diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/debian/cabal-install-2.4.postinst cabal-install-2.4-2.4+git20181125.1.5e65672/debian/cabal-install-2.4.postinst --- cabal-install-2.4-2.4+git20181017.1.d899935/debian/cabal-install-2.4.postinst 2018-10-17 15:59:24.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/debian/cabal-install-2.4.postinst 2018-11-26 08:43:33.000000000 +0000 @@ -1,4 +1,4 @@ #! /bin/sh set -e -update-alternatives --install /opt/cabal/bin/cabal opt-cabal /opt/cabal/bin/cabal-2.4 20400 --slave /opt/ghc/bin/cabal opt-ghc-cabal /opt/cabal/bin/cabal-2.4 #DEBHELPER# +update-alternatives --install /opt/cabal/bin/cabal opt-cabal /opt/cabal/bin/cabal-2.4 20401 --slave /opt/ghc/bin/cabal opt-ghc-cabal /opt/cabal/bin/cabal-2.4 #DEBHELPER# diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/debian/changelog cabal-install-2.4-2.4+git20181125.1.5e65672/debian/changelog --- cabal-install-2.4-2.4+git20181017.1.d899935/debian/changelog 2018-10-17 16:00:13.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/debian/changelog 2018-11-26 08:44:22.000000000 +0000 @@ -1,5 +1,5 @@ -cabal-install-2.4 (2.4+git20181017.1.d899935-6~18.04) bionic; urgency=medium +cabal-install-2.4 (2.4+git20181125.1.5e65672-6~18.04) bionic; urgency=medium * Initial release - -- Herbert Valerio Riedel Wed, 17 Oct 2018 18:00:13 +0200 + -- Herbert Valerio Riedel Mon, 26 Nov 2018 09:44:22 +0100 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/buildplan.lst cabal-install-2.4-2.4+git20181125.1.5e65672/src/buildplan.lst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/buildplan.lst 2018-10-17 15:59:09.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/buildplan.lst 2018-11-26 08:43:07.000000000 +0000 @@ -5,20 +5,20 @@ echo-0.1.3 ed25519-0.0.5.0 mtl-2.2.2 -network-2.7.0.2 +network-2.8.0.0 random-1.1 stm-2.5.0.0 tar-0.5.1.0 text-1.2.3.1 zlib-0.6.2 -resolv-0.1.1.1 +resolv-0.1.1.2 edit-distance-0.2.2.1 parsec-3.1.13.0 hashable-1.2.7.0 network-uri-2.6.1.0 -Cabal-2.4.0.1 +Cabal-2.4.1.0 async-2.2.1 HTTP-4000.3.12 zip-archive-0.3.3 hackage-security-0.5.3.0 -cabal-install-2.4.0.0 +cabal-install-2.4.1.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Cabal.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Cabal.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Cabal.cabal 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Cabal.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,619 +0,0 @@ -name: Cabal -version: 2.4.0.1 -copyright: 2003-2018, Cabal Development Team (see AUTHORS file) -license: BSD3 -license-file: LICENSE -author: Cabal Development Team -maintainer: cabal-devel@haskell.org -homepage: http://www.haskell.org/cabal/ -bug-reports: https://github.com/haskell/cabal/issues -synopsis: A framework for packaging Haskell software -description: - The Haskell Common Architecture for Building Applications and - Libraries: a framework defining a common interface for authors to more - easily build their Haskell applications in a portable way. - . - The Haskell Cabal is part of a larger infrastructure for distributing, - organizing, and cataloging Haskell libraries and tools. -category: Distribution -cabal-version: >=1.10 -build-type: Simple --- If we use a new Cabal feature, this needs to be changed to Custom so --- we can bootstrap. - -extra-source-files: - README.md tests/README.md ChangeLog.md - doc/bugs-and-stability.rst doc/concepts-and-development.rst - doc/conf.py doc/config-and-install.rst doc/developing-packages.rst - doc/images/Cabal-dark.png doc/index.rst doc/installing-packages.rst - doc/intro.rst doc/misc.rst doc/nix-local-build-overview.rst - doc/nix-local-build.rst doc/file-format-changelog.rst doc/README.md - doc/references.inc - - -- Generated with 'make gen-extra-source-files' - -- Do NOT edit this section manually; instead, run the script. - -- BEGIN gen-extra-source-files - tests/ParserTests/errors/common1.cabal - tests/ParserTests/errors/common1.errors - tests/ParserTests/errors/common2.cabal - tests/ParserTests/errors/common2.errors - tests/ParserTests/errors/common3.cabal - tests/ParserTests/errors/common3.errors - tests/ParserTests/errors/forward-compat.cabal - tests/ParserTests/errors/forward-compat.errors - tests/ParserTests/errors/forward-compat2.cabal - tests/ParserTests/errors/forward-compat2.errors - tests/ParserTests/errors/forward-compat3.cabal - tests/ParserTests/errors/forward-compat3.errors - tests/ParserTests/errors/issue-5055-2.cabal - tests/ParserTests/errors/issue-5055-2.errors - tests/ParserTests/errors/issue-5055.cabal - tests/ParserTests/errors/issue-5055.errors - tests/ParserTests/errors/leading-comma.cabal - tests/ParserTests/errors/leading-comma.errors - tests/ParserTests/errors/noVersion.cabal - tests/ParserTests/errors/noVersion.errors - tests/ParserTests/errors/noVersion2.cabal - tests/ParserTests/errors/noVersion2.errors - tests/ParserTests/errors/range-ge-wild.cabal - tests/ParserTests/errors/range-ge-wild.errors - tests/ParserTests/errors/spdx-1.cabal - tests/ParserTests/errors/spdx-1.errors - tests/ParserTests/errors/spdx-2.cabal - tests/ParserTests/errors/spdx-2.errors - tests/ParserTests/errors/spdx-3.cabal - tests/ParserTests/errors/spdx-3.errors - tests/ParserTests/ipi/Includes2.cabal - tests/ParserTests/ipi/Includes2.expr - tests/ParserTests/ipi/Includes2.format - tests/ParserTests/ipi/internal-preprocessor-test.cabal - tests/ParserTests/ipi/internal-preprocessor-test.expr - tests/ParserTests/ipi/internal-preprocessor-test.format - tests/ParserTests/ipi/issue-2276-ghc-9885.cabal - tests/ParserTests/ipi/issue-2276-ghc-9885.expr - tests/ParserTests/ipi/issue-2276-ghc-9885.format - tests/ParserTests/ipi/transformers.cabal - tests/ParserTests/ipi/transformers.expr - tests/ParserTests/ipi/transformers.format - tests/ParserTests/regressions/MiniAgda.cabal - tests/ParserTests/regressions/MiniAgda.check - tests/ParserTests/regressions/Octree-0.5.cabal - tests/ParserTests/regressions/Octree-0.5.expr - tests/ParserTests/regressions/Octree-0.5.format - tests/ParserTests/regressions/bad-glob-syntax.cabal - tests/ParserTests/regressions/bad-glob-syntax.check - tests/ParserTests/regressions/cc-options-with-optimization.cabal - tests/ParserTests/regressions/cc-options-with-optimization.check - tests/ParserTests/regressions/common.cabal - tests/ParserTests/regressions/common.expr - tests/ParserTests/regressions/common.format - tests/ParserTests/regressions/common2.cabal - tests/ParserTests/regressions/common2.expr - tests/ParserTests/regressions/common2.format - tests/ParserTests/regressions/cxx-options-with-optimization.cabal - tests/ParserTests/regressions/cxx-options-with-optimization.check - tests/ParserTests/regressions/elif.cabal - tests/ParserTests/regressions/elif.expr - tests/ParserTests/regressions/elif.format - tests/ParserTests/regressions/elif2.cabal - tests/ParserTests/regressions/elif2.expr - tests/ParserTests/regressions/elif2.format - tests/ParserTests/regressions/encoding-0.8.cabal - tests/ParserTests/regressions/encoding-0.8.expr - tests/ParserTests/regressions/encoding-0.8.format - tests/ParserTests/regressions/extensions-paths-5054.cabal - tests/ParserTests/regressions/extensions-paths-5054.check - tests/ParserTests/regressions/generics-sop.cabal - tests/ParserTests/regressions/generics-sop.expr - tests/ParserTests/regressions/generics-sop.format - tests/ParserTests/regressions/ghc-option-j.cabal - tests/ParserTests/regressions/ghc-option-j.check - tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal - tests/ParserTests/regressions/haddock-api-2.18.1-check.check - tests/ParserTests/regressions/issue-5055.cabal - tests/ParserTests/regressions/issue-5055.expr - tests/ParserTests/regressions/issue-5055.format - tests/ParserTests/regressions/issue-774.cabal - tests/ParserTests/regressions/issue-774.check - tests/ParserTests/regressions/issue-774.expr - tests/ParserTests/regressions/issue-774.format - tests/ParserTests/regressions/leading-comma.cabal - tests/ParserTests/regressions/leading-comma.expr - tests/ParserTests/regressions/leading-comma.format - tests/ParserTests/regressions/noVersion.cabal - tests/ParserTests/regressions/noVersion.expr - tests/ParserTests/regressions/noVersion.format - tests/ParserTests/regressions/nothing-unicode.cabal - tests/ParserTests/regressions/nothing-unicode.check - tests/ParserTests/regressions/nothing-unicode.expr - tests/ParserTests/regressions/nothing-unicode.format - tests/ParserTests/regressions/pre-1.6-glob.cabal - tests/ParserTests/regressions/pre-1.6-glob.check - tests/ParserTests/regressions/pre-2.4-globstar.cabal - tests/ParserTests/regressions/pre-2.4-globstar.check - tests/ParserTests/regressions/shake.cabal - tests/ParserTests/regressions/shake.expr - tests/ParserTests/regressions/shake.format - tests/ParserTests/regressions/spdx-1.cabal - tests/ParserTests/regressions/spdx-1.expr - tests/ParserTests/regressions/spdx-1.format - tests/ParserTests/regressions/spdx-2.cabal - tests/ParserTests/regressions/spdx-2.expr - tests/ParserTests/regressions/spdx-2.format - tests/ParserTests/regressions/spdx-3.cabal - tests/ParserTests/regressions/spdx-3.expr - tests/ParserTests/regressions/spdx-3.format - tests/ParserTests/regressions/th-lift-instances.cabal - tests/ParserTests/regressions/th-lift-instances.expr - tests/ParserTests/regressions/th-lift-instances.format - tests/ParserTests/regressions/wl-pprint-indef.cabal - tests/ParserTests/regressions/wl-pprint-indef.expr - tests/ParserTests/regressions/wl-pprint-indef.format - tests/ParserTests/warnings/bom.cabal - tests/ParserTests/warnings/bool.cabal - tests/ParserTests/warnings/deprecatedfield.cabal - tests/ParserTests/warnings/doubledash.cabal - tests/ParserTests/warnings/extratestmodule.cabal - tests/ParserTests/warnings/gluedop.cabal - tests/ParserTests/warnings/multiplesingular.cabal - tests/ParserTests/warnings/nbsp.cabal - tests/ParserTests/warnings/newsyntax.cabal - tests/ParserTests/warnings/oldsyntax.cabal - tests/ParserTests/warnings/subsection.cabal - tests/ParserTests/warnings/tab.cabal - tests/ParserTests/warnings/trailingfield.cabal - tests/ParserTests/warnings/unknownfield.cabal - tests/ParserTests/warnings/unknownsection.cabal - tests/ParserTests/warnings/utf8.cabal - tests/ParserTests/warnings/versiontag.cabal - tests/hackage/check.sh - tests/hackage/download.sh - tests/hackage/unpack.sh - tests/misc/ghc-supported-languages.hs - -- END gen-extra-source-files - -source-repository head - type: git - location: https://github.com/haskell/cabal/ - subdir: Cabal - -flag bundled-binary-generic - default: False - -library - build-depends: - array >= 0.4.0.1 && < 0.6, - base >= 4.6 && < 5, - bytestring >= 0.10.0.0 && < 0.11, - containers >= 0.5.0.0 && < 0.7, - deepseq >= 1.3.0.1 && < 1.5, - directory >= 1.2 && < 1.4, - filepath >= 1.3.0.1 && < 1.5, - pretty >= 1.1.1 && < 1.2, - process >= 1.1.0.2 && < 1.7, - time >= 1.4.0.1 && < 1.9 - - if flag(bundled-binary-generic) - build-depends: binary >= 0.5.1.1 && < 0.7 - else - build-depends: binary >= 0.7 && < 0.9 - - if os(windows) - build-depends: Win32 >= 2.3.0.0 && < 2.9 - else - build-depends: unix >= 2.6.0.0 && < 2.8 - - ghc-options: -Wall -fno-ignore-asserts -fwarn-tabs - if impl(ghc >= 8.0) - ghc-options: -Wcompat -Wnoncanonical-monad-instances - -Wnoncanonical-monadfail-instances - - exposed-modules: - Distribution.Backpack - Distribution.Backpack.Configure - Distribution.Backpack.ComponentsGraph - Distribution.Backpack.ConfiguredComponent - Distribution.Backpack.DescribeUnitId - Distribution.Backpack.FullUnitId - Distribution.Backpack.LinkedComponent - Distribution.Backpack.ModSubst - Distribution.Backpack.ModuleShape - Distribution.Backpack.PreModuleShape - Distribution.CabalSpecVersion - Distribution.Utils.IOData - Distribution.Utils.LogProgress - Distribution.Utils.MapAccum - Distribution.Compat.CreatePipe - Distribution.Compat.Directory - Distribution.Compat.Environment - Distribution.Compat.Exception - Distribution.Compat.Graph - Distribution.Compat.Internal.TempFile - Distribution.Compat.Newtype - Distribution.Compat.Prelude.Internal - Distribution.Compat.ReadP - Distribution.Compat.Semigroup - Distribution.Compat.Stack - Distribution.Compat.Time - Distribution.Compat.DList - Distribution.Compiler - Distribution.InstalledPackageInfo - Distribution.Types.AbiDependency - Distribution.Types.ExposedModule - Distribution.Types.InstalledPackageInfo - Distribution.Types.InstalledPackageInfo.FieldGrammar - Distribution.License - Distribution.Make - Distribution.ModuleName - Distribution.Package - Distribution.PackageDescription - Distribution.PackageDescription.Check - Distribution.PackageDescription.Configuration - Distribution.PackageDescription.PrettyPrint - Distribution.PackageDescription.Utils - Distribution.ParseUtils - Distribution.PrettyUtils - Distribution.ReadE - Distribution.Simple - Distribution.Simple.Bench - Distribution.Simple.Build - Distribution.Simple.Build.Macros - Distribution.Simple.Build.PathsModule - Distribution.Simple.BuildPaths - Distribution.Simple.BuildTarget - Distribution.Simple.BuildToolDepends - Distribution.Simple.CCompiler - Distribution.Simple.Command - Distribution.Simple.Compiler - Distribution.Simple.Configure - Distribution.Simple.Flag - Distribution.Simple.GHC - Distribution.Simple.GHCJS - Distribution.Simple.Haddock - Distribution.Simple.Doctest - Distribution.Simple.Glob - Distribution.Simple.HaskellSuite - Distribution.Simple.Hpc - Distribution.Simple.Install - Distribution.Simple.InstallDirs - Distribution.Simple.LocalBuildInfo - Distribution.Simple.PackageIndex - Distribution.Simple.PreProcess - Distribution.Simple.PreProcess.Unlit - Distribution.Simple.Program - Distribution.Simple.Program.Ar - Distribution.Simple.Program.Builtin - Distribution.Simple.Program.Db - Distribution.Simple.Program.Find - Distribution.Simple.Program.GHC - Distribution.Simple.Program.HcPkg - Distribution.Simple.Program.Hpc - Distribution.Simple.Program.Internal - Distribution.Simple.Program.Ld - Distribution.Simple.Program.ResponseFile - Distribution.Simple.Program.Run - Distribution.Simple.Program.Script - Distribution.Simple.Program.Strip - Distribution.Simple.Program.Types - Distribution.Simple.Register - Distribution.Simple.Setup - Distribution.Simple.SrcDist - Distribution.Simple.Test - Distribution.Simple.Test.ExeV10 - Distribution.Simple.Test.LibV09 - Distribution.Simple.Test.Log - Distribution.Simple.UHC - Distribution.Simple.UserHooks - Distribution.Simple.Utils - Distribution.SPDX - Distribution.SPDX.License - Distribution.SPDX.LicenseId - Distribution.SPDX.LicenseExceptionId - Distribution.SPDX.LicenseExpression - Distribution.SPDX.LicenseListVersion - Distribution.SPDX.LicenseReference - Distribution.System - Distribution.TestSuite - Distribution.Text - Distribution.Pretty - Distribution.Types.AbiHash - Distribution.Types.AnnotatedId - Distribution.Types.Benchmark - Distribution.Types.BenchmarkInterface - Distribution.Types.BenchmarkType - Distribution.Types.BuildInfo - Distribution.Types.BuildType - Distribution.Types.ComponentInclude - Distribution.Types.Dependency - Distribution.Types.ExeDependency - Distribution.Types.LegacyExeDependency - Distribution.Types.PkgconfigDependency - Distribution.Types.DependencyMap - Distribution.Types.ComponentId - Distribution.Types.MungedPackageId - Distribution.Types.PackageId - Distribution.Types.UnitId - Distribution.Types.Executable - Distribution.Types.ExecutableScope - Distribution.Types.Library - Distribution.Types.ForeignLib - Distribution.Types.ForeignLibType - Distribution.Types.ForeignLibOption - Distribution.Types.Module - Distribution.Types.ModuleReexport - Distribution.Types.ModuleRenaming - Distribution.Types.ComponentName - Distribution.Types.MungedPackageName - Distribution.Types.PackageName - Distribution.Types.PkgconfigName - Distribution.Types.UnqualComponentName - Distribution.Types.IncludeRenaming - Distribution.Types.Mixin - Distribution.Types.SetupBuildInfo - Distribution.Types.TestSuite - Distribution.Types.TestSuiteInterface - Distribution.Types.TestType - Distribution.Types.GenericPackageDescription - Distribution.Types.Condition - Distribution.Types.CondTree - Distribution.Types.HookedBuildInfo - Distribution.Types.PackageDescription - Distribution.Types.SourceRepo - Distribution.Types.Component - Distribution.Types.ComponentLocalBuildInfo - Distribution.Types.LocalBuildInfo - Distribution.Types.ComponentRequestedSpec - Distribution.Types.TargetInfo - Distribution.Types.Version - Distribution.Types.VersionRange - Distribution.Types.VersionInterval - Distribution.Utils.Generic - Distribution.Utils.NubList - Distribution.Utils.ShortText - Distribution.Utils.Progress - Distribution.Verbosity - Distribution.Version - Language.Haskell.Extension - Distribution.Compat.Binary - - -- Parsec parser-related modules - build-depends: - -- transformers-0.4.0.0 doesn't have record syntax e.g. for Identity - -- See also https://github.com/ekmett/transformers-compat/issues/35 - transformers (>= 0.3 && < 0.4) || (>=0.4.1.0 && <0.6), - mtl >= 2.1 && < 2.3, - text >= 1.2.3.0 && < 1.3, - parsec >= 3.1.13.0 && < 3.2 - exposed-modules: - Distribution.Compat.Parsing - Distribution.Compat.CharParsing - Distribution.FieldGrammar - Distribution.FieldGrammar.Class - Distribution.FieldGrammar.FieldDescrs - Distribution.FieldGrammar.Parsec - Distribution.FieldGrammar.Pretty - Distribution.PackageDescription.FieldGrammar - Distribution.PackageDescription.Parsec - Distribution.PackageDescription.Quirks - Distribution.Parsec.Class - Distribution.Parsec.Common - Distribution.Parsec.ConfVar - Distribution.Parsec.Field - Distribution.Parsec.FieldLineStream - Distribution.Parsec.Lexer - Distribution.Parsec.LexerMonad - Distribution.Parsec.Newtypes - Distribution.Parsec.ParseResult - Distribution.Parsec.Parser - - -- Lens functionality - exposed-modules: - Distribution.Compat.Lens - Distribution.Types.Lens - Distribution.Types.Benchmark.Lens - Distribution.Types.BuildInfo.Lens - Distribution.Types.Executable.Lens - Distribution.Types.ForeignLib.Lens - Distribution.Types.GenericPackageDescription.Lens - Distribution.Types.InstalledPackageInfo.Lens - Distribution.Types.Library.Lens - Distribution.Types.PackageDescription.Lens - Distribution.Types.PackageId.Lens - Distribution.Types.SetupBuildInfo.Lens - Distribution.Types.SourceRepo.Lens - Distribution.Types.TestSuite.Lens - - other-modules: - Distribution.Backpack.PreExistingComponent - Distribution.Backpack.ReadyComponent - Distribution.Backpack.MixLink - Distribution.Backpack.ModuleScope - Distribution.Backpack.UnifyM - Distribution.Backpack.Id - Distribution.Utils.UnionFind - Distribution.Utils.Base62 - Distribution.Compat.CopyFile - Distribution.Compat.GetShortPathName - Distribution.Compat.MonadFail - Distribution.Compat.Prelude - Distribution.Compat.SnocList - Distribution.GetOpt - Distribution.Lex - Distribution.Utils.String - Distribution.Simple.GHC.EnvironmentParser - Distribution.Simple.GHC.Internal - Distribution.Simple.GHC.ImplInfo - Paths_Cabal - - if flag(bundled-binary-generic) - other-modules: - Distribution.Compat.Binary.Class - Distribution.Compat.Binary.Generic - - default-language: Haskell2010 - other-extensions: - BangPatterns - CPP - DefaultSignatures - DeriveDataTypeable - DeriveFoldable - DeriveFunctor - DeriveGeneric - DeriveTraversable - ExistentialQuantification - FlexibleContexts - FlexibleInstances - GeneralizedNewtypeDeriving - ImplicitParams - KindSignatures - NondecreasingIndentation - OverloadedStrings - RankNTypes - RecordWildCards - ScopedTypeVariables - StandaloneDeriving - Trustworthy - TypeFamilies - TypeOperators - TypeSynonymInstances - UndecidableInstances - - if impl(ghc >= 7.11) - other-extensions: PatternSynonyms - --- Small, fast running tests. -test-suite unit-tests - type: exitcode-stdio-1.0 - hs-source-dirs: tests - other-modules: - Test.Laws - Test.QuickCheck.Utils - UnitTests.Distribution.Compat.CreatePipe - UnitTests.Distribution.Compat.ReadP - UnitTests.Distribution.Compat.Time - UnitTests.Distribution.Compat.Graph - UnitTests.Distribution.Simple.Glob - UnitTests.Distribution.Simple.Program.Internal - UnitTests.Distribution.Simple.Utils - UnitTests.Distribution.SPDX - UnitTests.Distribution.System - UnitTests.Distribution.Types.GenericPackageDescription - UnitTests.Distribution.Utils.Generic - UnitTests.Distribution.Utils.NubList - UnitTests.Distribution.Utils.ShortText - UnitTests.Distribution.Version - main-is: UnitTests.hs - build-depends: - array, - base, - bytestring, - containers, - directory, - filepath, - integer-logarithms >= 1.0.2 && <1.1, - tasty >= 1.1.0.3 && < 1.2, - tasty-hunit, - tasty-quickcheck, - tagged, - temporary, - text, - pretty, - QuickCheck >= 2.11.3 && < 2.12, - Cabal - ghc-options: -Wall - default-language: Haskell2010 - -test-suite parser-tests - type: exitcode-stdio-1.0 - hs-source-dirs: tests - main-is: ParserTests.hs - build-depends: - base, - base-compat >=0.10.4 && <0.11, - bytestring, - filepath, - tasty >= 1.1.0.3 && < 1.2, - tasty-hunit, - tasty-quickcheck, - tasty-golden >=2.3.1.1 && <2.4, - Diff >=0.3.4 && <0.4, - Cabal - ghc-options: -Wall - default-language: Haskell2010 - - if impl(ghc >= 7.8) - build-depends: - tree-diff >= 0.0.1 && <0.1 - other-modules: - Instances.TreeDiff - Instances.TreeDiff.Language - Instances.TreeDiff.SPDX - Instances.TreeDiff.Version - -test-suite check-tests - type: exitcode-stdio-1.0 - hs-source-dirs: tests - main-is: CheckTests.hs - build-depends: - base, - bytestring, - filepath, - tasty >= 1.1.0.3 && < 1.2, - tasty-golden >=2.3.1.1 && <2.4, - Diff >=0.3.4 && <0.4, - Cabal - ghc-options: -Wall - default-language: Haskell2010 - -test-suite custom-setup-tests - type: exitcode-stdio-1.0 - hs-source-dirs: tests/custom-setup - main-is: CustomSetupTests.hs - other-modules: - CabalDoctestSetup - IdrisSetup - build-depends: - Cabal, - base, - directory, - filepath, - process - default-language: Haskell2010 - -test-suite hackage-tests - type: exitcode-stdio-1.0 - main-is: HackageTests.hs - - -- TODO: need to get 01-index.tar on appveyor - if os(windows) - buildable: False - - hs-source-dirs: tests - - build-depends: - base, - Cabal, - bytestring, - deepseq, - containers, - directory, - filepath - - build-depends: - base-compat >=0.10.4 && <0.11, - base-orphans >=0.6 && <0.9, - optparse-applicative >=0.13.2.0 && <0.15, - tar >=0.5.0.3 && <0.6 - - if impl(ghc >= 7.8) - build-depends: - tree-diff >= 0.0.1 && <0.1 - other-modules: - Instances.TreeDiff - Instances.TreeDiff.Language - Instances.TreeDiff.SPDX - Instances.TreeDiff.Version - - ghc-options: -Wall -rtsopts -threaded - default-extensions: CPP - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/ChangeLog.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/ChangeLog.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/ChangeLog.md 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/ChangeLog.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,878 +0,0 @@ -### 2.4.0.1 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) September 2018 - - * Allow arguments to be passed to `Setup.hs haddock` for `build-type:configure` - ([#5503](https://github.com/haskell/cabal/issues/5503)). - -# 2.4.0.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) September 2018 - - * Due to [#5119](https://github.com/haskell/cabal/issues/5119), the - `cabal check` warning for bounds on internal libraries has been - disabled. - * `Distribution.Simple.Haddock` now checks to ensure that it - does not erroneously call Haddock with no target modules. - ([#5232](https://github.com/haskell/cabal/issues/5232), - [#5459](https://github.com/haskell/cabal/issues/5459)). - * Add `getting` (less general than `to`) Lens combinator, - `non`) and an optics to access the modules in a component - of a `PackageDescription` by the `ComponentName`: - `componentBuildInfo` and `componentModules` - * Add `readGhcEnvironmentFile` to parse GHC environment files. - * Drop support for GHC 7.4, since it is out of our support window - (and has been for over a year!) - * Deprecate `preSDist`, `sDistHook`, and `postSDist` in service of - `new-sdist`, since they violate key invariants of the new-build - ecosystem. Use `autogen-modules` and `build-tool-depends` instead. - ([#5389](https://github.com/haskell/cabal/pull/5389)). - * Added `--repl-options` flag to `Setup repl` used to pass flags to the - underlying repl without affecting the `LocalBuildInfo` - ([#4247](https://github.com/haskell/cabal/issues/4247), - [#5287](https://github.com/haskell/cabal/pull/5287)) - * `KnownExtension`: added new extensions `BlockArguments` - ([#5101](https://github.com/haskell/cabal/issues/5101)), - `NumericUnderscores` - ([#5130]((https://github.com/haskell/cabal/issues/5130)), - `QuantifiedConstraints`, and `StarIsType`. - * `buildDepends` is removed from `PackageDescription`. It had long been - uselessly hanging about as top-level build-depends already got put - into per-component condition trees anyway. Now it's finally been put - out of its misery - ([#4383](https://github.com/haskell/cabal/issues/4283)). - * Added `Eta` to `CompilerFlavor` and to known compilers. - * `cabal haddock` now generates per-component documentation - ([#5226](https://github.com/haskell/cabal/issues/5226)). - * Wildcard improvements: - * Allow `**` wildcards in `data-files`, `extra-source-files` and - `extra-doc-files`. These allow a limited form of recursive - matching, and require `cabal-version: 2.4`. - ([#5284](https://github.com/haskell/cabal/issues/5284), - [#3178](https://github.com/haskell/cabal/issues/3178), et al.) - * With `cabal-version: 2.4`, when matching a wildcard, the - requirement for the full extension to match exactly has been - loosened. Instead, if the wildcard's extension is a suffix of the - file's extension, the file will be selected. For example, - previously `foo.en.html` would not match `*.html`, and - `foo.solaris.tar.gz` would not match `*.tar.gz`, but now both - do. This may lead to files unexpectedly being included by `sdist`; - please audit your package descriptions if you rely on this - behaviour to keep sensitive data out of distributed packages - ([#5372](https://github.com/haskell/cabal/pull/5372), - [#784](https://github.com/haskell/cabal/issues/784), - [#5057](https://github.com/haskell/cabal/issues/5057)). - * Wildcard syntax errors (misplaced `*`, etc), wildcards that - refer to missing directoies, and wildcards that do not match - anything are now all detected by `cabal check`. - * Wildcard ('globbing') functions have been moved from - `Distribution.Simple.Utils` to `Distribution.Simple.Glob` and - have been refactored. - * Fixed `cxx-options` and `cxx-sources` buildinfo fields for - separate compilation of C++ source files to correctly build and link - non-library components ([#5309](https://github.com/haskell/cabal/issues/5309)). - * Reduced warnings generated by hsc2hs and c2hs when `cxx-options` field - is present in a component. - * `cabal check` now warns if `-j` is used in `ghc-options` in a Cabal - file. ([#5277](https://github.com/haskell/cabal/issues/5277)) - * `install-includes` now works as expected with foreign libraries - ([#5302](https://github.com/haskell/cabal/issues/5299)). - * Removed support for JHC. - * Options listed in `ghc-options`, `cc-options`, `ld-options`, - `cxx-options`, `cpp-options` are not deduplicated anymore - ([#4449](https://github.com/haskell/cabal/issues/4449)). - * Deprecated `cabal hscolour` in favour of `cabal haddock --hyperlink-source` ([#5236](https://github.com/haskell/cabal/pull/5236/)). - * Recognize `powerpc64le` as architecture PPC64. - * Cabal now deduplicates more `-I` and `-L` and flags to avoid `E2BIG` - ([#5356](https://github.com/haskell/cabal/issues/5356)). - * With `build-type: configure`, avoid using backslashes to delimit - path components on Windows and warn about other unsafe characters - in the path to the source directory on all platforms - ([#5386](https://github.com/haskell/cabal/issues/5386)). - * `Distribution.PackageDescription.Check.checkPackageFiles` now - accepts a `Verbosity` argument. - * Added a parameter to - `Distribution.Backpack.ConfiguredComponent.toConfiguredComponent` in order to fix - [#5409](https://github.com/haskell/cabal/issues/5409). - * Partially silence `abi-depends` warnings - ([#5465](https://github.com/haskell/cabal/issues/5465)). - * Foreign libraries are now linked against the threaded RTS when the - 'ghc-options: -threaded' flag is used - ([#5431](https://github.com/haskell/cabal/pull/5431)). - * Pass command line arguments to `hsc2hs` using response files when possible - ([#3122](https://github.com/haskell/cabal/issues/3122)). - ----- - -## 2.2.0.1 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) March 2018 - - * Fix `checkPackageFiles` for relative directories ([#5206](https://github.com/haskell/cabal/issues/5206)) - - -# 2.2.0.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) March 2018 - - * The 2.2 migration guide gives advice on adapting Custom setup - scripts to backwards-incompatible changes in this release: - https://github.com/haskell/cabal/wiki/2.2-migration-guide. - * New Parsec-based parser for `.cabal` files is now the - default. This brings memory consumption and speed improvements, as - well as making new syntax extensions easier to implement. - * Support for common stanzas (#4751). - * Added elif-conditionals to `.cabal` syntax (#4750). - * The package license information can now be specified using the - SPDX syntax. This requires setting `cabal-version` to 2.2+ (#2547, - #5050). - * Support for GHC's numeric -g debug levels (#4673). - * Compilation with section splitting is now supported via the - `--enable-split-sections` flag (#4819) - * Fields with mandatory commas (e.g. build-depends) may now have a - leading or a trailing comma (either one, not both) (#4953) - * Added `virtual-modules` field, to allow modules that are not built - but registered (#4875). - * Use better defaulting for `build-type`; rename `PackageDescription`'s - `buildType` field to `buildTypeRaw` and introduce new `buildType` - function (#4958) - * `D.T.PackageDescription.allBuildInfo` now returns all build infos, not - only for buildable components (#5087). - * Removed `UnknownBuildType` constructor from `BuildType` (#5003). - * Added `HexFloatLiterals` to `KnownExtension`. - * Cabal will no longer try to build an empty set of `inputModules` - (#4890). - * `copyComponent` and `installIncludeFiles` will now look for - include headers in the build directory (`dist/build/...` by - default) as well (#4866). - * Added `cxx-options` and `cxx-sources` buildinfo fields for - separate compilation of C++ source files (#3700). - * Removed unused `--allow-newer`/`--allow-older` support from - `Setup configure` (#4527). - * Changed `FlagAssignment` to be an opaque `newtype` (#4849). - * Changed `rawSystemStdInOut` to use proper type to represent - binary and textual data; new `Distribution.Utils.IOData` module; - removed obsolete `startsWithBOM`, `fileHasBOM`, `fromUTF8`, - and `toUTF8` functions; add new `toUTF8BS`/`toUTF8LBS` - encoding functions. (#4666) - * Added a `cabal check` warning when the `.cabal` file name does - not match package name (#4592). - * The `ar` program now receives its arguments via a response file - (`@file`). Old behaviour can be restored with - `--disable-response-files` argument to `configure` or - `install` (#4596). - * Added `.Lens` modules, with optics for package description data - types (#4701). - * Support for building with Win32 version 2.6 (#4835). - * Change `compilerExtensions` and `ghcOptExtensionMap` to contain - `Maybe Flag`s, since a supported extention can lack a flag (#4443). - * Pretty-printing of `.cabal` files is slightly different due to - parser changes. For an example, see - https://mail.haskell.org/pipermail/cabal-devel/2017-December/010414.html. - * `--hyperlink-source` now uses Haddock's hyperlinker backend when - Haddock is new enough, falling back to HsColour otherwise. - * `D.S.defaultHookedPackageDesc` has been deprecated in favour of - `D.S.findHookedPackageDesc` (#4874). - * `D.S.getHookedBuildInfo` now takes an additional parameter - specifying the build directory path (#4874). - * Emit warning when encountering unknown GHC versions (#415). - -### 2.0.1.1 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) December 2017 - - * Don't pass `other-modules` to stub executable for detailed-0.9 - (#4918). - * Hpc: Use relative .mix search paths (#4917). - -## 2.0.1.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) November 2017 - - * Support for GHC's numeric -g debug levels (#4673). - * Added a new `Distribution.Verbosity.modifyVerbosity` combinator - (#4724). - * Added a new `cabal check` warning about unused, undeclared or - non-Unicode flags. Also, it warns about leading dash, which is - unusable but accepted if it's unused in conditionals. (#4687) - * Modify `allBuildInfo` to include foreign library info (#4763). - * Documentation fixes. - -### 2.0.0.2 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) July 2017 - - * See http://coldwa.st/e/blog/2017-09-09-Cabal-2-0.html - for more detailed release notes. - * The 2.0 migration guide gives advice on adapting Custom setup - scripts to backwards-incompatible changes in this release: - https://github.com/haskell/cabal/wiki/2.0-migration-guide - * Add CURRENT_PACKAGE_VERSION to cabal_macros.h (#4319) - * Dropped support for versions of GHC earlier than 6.12 (#3111). - * GHC compatibility window for the Cabal library has been extended - to five years (#3838). - * Convenience/internal libraries are now supported (#269). - An internal library is declared using the stanza `library - 'libname'`. Packages which use internal libraries can - result in multiple registrations; thus `--gen-pkg-config` - can now output a directory of registration scripts rather than - a single file. - * Backwards incompatible change to preprocessor interface: - the function in `PPSuffixHandler` now takes an additional - `ComponentLocalBuildInfo` specifying the build information - of the component being preprocessed. - * Backwards incompatible change to `cabal_macros.h` (#1893): we now - generate a macro file for each component which contains only - information about the direct dependencies of that component. - Consequently, `dist/build/autogen/cabal_macros.h` contains - only the macros for the library, and is not generated if a - package has no library; to find the macros for an executable - named `foobar`, look in `dist/build/foobar/autogen/cabal_macros.h`. - Similarly, if you used `autogenModulesDir` you should now - use `autogenComponentModulesDir`, which now requires a - `ComponentLocalBuildInfo` argument as well in order to - disambiguate which component the autogenerated files are for. - * Backwards incompatible change to `Component`: `TestSuite` and - `Benchmark` no longer have `testEnabled` and - `benchmarkEnabled`. If you used - `enabledTests` or `enabledBenchmarks`, please instead use - `enabledTestLBIs` and `enabledBenchLBIs` - (you will need a `LocalBuildInfo` for these functions.) - Additionally, the semantics of `withTest` and `withBench` - have changed: they now iterate over all buildable - such components, regardless of whether or not they have - been enabled; if you only want enabled components, - use `withTestLBI` and `withBenchLBI`. - `finalizePackageDescription` is deprecated: - its replacement `finalizePD` now takes an extra argument - `ComponentRequestedSpec` which specifies what components - are to be enabled: use this instead of modifying the - `Component` in a `GenericPackageDescription`. (As - it's not possible now, `finalizePackageDescription` - will assume tests/benchmarks are disabled.) - If you only need to test if a component is buildable - (i.e., it is marked buildable in the Cabal file) - use the new function `componentBuildable`. - * Backwards incompatible change to `PackageName` (#3896): - `PackageName` is now opaque; conversion to/from `String` now works - via (old) `unPackageName` and (new) `mkPackageName` functions. - * Backwards incompatible change to `ComponentId` (#3917): - `ComponentId` is now opaque; conversion to/from `String` now works - via `unComponentId` and `mkComponentId` functions. - * Backwards incompatible change to `AbiHash` (#3921): - `AbiHash` is now opaque; conversion to/from `String` now works - via `unAbiHash` and `mkAbiHash` functions. - * Backwards incompatible change to `FlagName` (#4062): - `FlagName` is now opaque; conversion to/from `String` now works - via `unFlagName` and `mkFlagName` functions. - * Backwards incompatible change to `Version` (#3905): - Version is now opaque; conversion to/from `[Int]` now works - via `versionNumbers` and `mkVersion` functions. - * Add support for `--allow-older` (dual to `--allow-newer`) (#3466) - * Improved an error message for process output decoding errors - (#3408). - * `getComponentLocalBuildInfo`, `withComponentsInBuildOrder` - and `componentsInBuildOrder` are deprecated in favor of a - new interface in `Distribution.Types.LocalBuildInfo`. - * New `autogen-modules` field. Modules that are built automatically at - setup, like Paths_PACKAGENAME or others created with a build-type - custom, appear on `other-modules` for the Library, Executable, - Test-Suite or Benchmark stanzas or also on `exposed-modules` for - libraries but are not really on the package when distributed. This - makes commands like sdist fail because the file is not found, so with - this new field modules that appear there are treated the same way as - Paths_PACKAGENAME was and there is no need to create complex build - hooks. Just add the module names on `other-modules` and - `exposed-modules` as always and on the new `autogen-modules` besides. - (#3656). - * New `./Setup configure` flag `--cabal-file`, allowing multiple - `.cabal` files in a single directory (#3553). Primarily intended for - internal use. - * Macros in `cabal_macros.h` are now ifndef'd, so that they - don't cause an error if the macro is already defined. (#3041) - * `./Setup configure` now accepts a single argument specifying - the component to be configured. The semantics of this mode - of operation are described in - - * Internal `build-tools` dependencies are now added to PATH - upon invocation of GHC, so that they can be conveniently - used via `-pgmF`. (#1541) - * Add support for new caret-style version range operator `^>=` (#3705) - * Verbosity `-v` now takes an extended format which allows - specifying exactly what you want to be logged. The format is - `[silent|normal|verbose|debug] flags`, where flags is a space - separated list of flags. At the moment, only the flags - +callsite and +callstack are supported; these report the - call site/stack of a logging output respectively (these - are only supported if Cabal is built with GHC 8.0/7.10.2 - or greater, respectively). - * New `Distribution.Utils.ShortText.ShortText` type for representing - short text strings compactly (#3898) - * Cabal no longer supports using a version bound to disambiguate - between an internal and external package (#4020). This should - not affect many people, as this mode of use already did not - work with the dependency solver. - * Support for "foreign libraries" (#2540), which are Haskell - libraries intended to be used by foreign languages like C. - Foreign libraries only work with GHC 7.8 and later. - * Added a technical preview version of integrated doctest support (#4480). - * Added a new `scope` field to the executable stanza. Executables - with `scope: private` get installed into - $libexecdir/$libexecsubdir. Additionally $libexecdir now has a - subdir structure similar to $lib(sub)dir to allow installing - private executables of different packages and package versions - alongside one another. Private executables are those that are - expected to be run by other programs rather than users. (#3461) - -## 1.24.2.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) December 2016 - * Fixed a bug in the handling of non-buildable components (#4094). - * Reverted a PVP-noncompliant API change in 1.24.1.0 (#4123). - * Bumped the directory upper bound to < 1.4 (#4158). - -## 1.24.1.0 [Ryan Thomas](mailto:ryan@ryant.org) October 2016 - * API addition: `differenceVersionRanges` (#3519). - * Fixed reexported-modules display mangling (#3928). - * Check that the correct cabal-version is specified when the - extra-doc-files field is present (#3825). - * Fixed an incorrect invocation of GetShortPathName that was - causing build failures on Windows (#3649). - * Linker flags are now set correctly on GHC >= 7.8 (#3443). - -# 1.24.0.0 [Ryan Thomas](mailto:ryan@ryant.org) March 2016 - * Support GHC 8. - * Deal with extra C sources from preprocessors (#238). - * Include cabal_macros.h when running c2hs (#2600). - * Don't recompile C sources unless needed (#2601). - * Read `builddir` option from `CABAL_BUILDDIR` environment variable. - * Add `--profiling-detail=$level` flag with a default for libraries - and executables of `exported-functions` and `toplevel-functions` - respectively (GHC's `-fprof-auto-{exported,top}` flags) (#193). - * New `custom-setup` stanza to specify setup deps. Setup is also built - with the cabal_macros.h style macros, for conditional compilation. - * Support Haddock response files (#2746). - * Fixed a bug in the Text instance for Platform (#2862). - * New `setup haddock` option: `--for-hackage` (#2852). - * New `--show-detail=direct`; like streaming, but allows the test - program to detect that is connected to a terminal, and works - reliable with a non-threaded runtime (#2911, and serves as a - work-around for #2398) - * Library support for multi-instance package DBs (#2948). - * Improved the `./Setup configure` solver (#3082, #3076). - * The `--allow-newer` option can be now used with `./Setup - configure` (#3163). - * Added a way to specify extra locations to find OS X frameworks - in (`extra-framework-dirs`). Can be used both in `.cabal` files and - as an argument to `./Setup configure` (#3158). - * Macros `VERSION_$pkgname` and `MIN_VERSION_$pkgname` are now - also generated for the current package. (#3235). - * Backpack is supported! Two new fields supported in Cabal - files: signatures and mixins; and a new flag - to setup scripts, `--instantiate-with`. See - https://github.com/ezyang/ghc-proposals/blob/backpack/proposals/0000-backpack.rst - for more details. - ----- - -## 1.22.8.0 [Ryan Thomas](mailto:ryan@ryant.org) March 2016 - * Distribution.Simple.Setup: remove job cap. Fixes #3191. - * Check all object file suffixes for recompilation. Fixes #3128. - * Move source files under `src/`. Fixes #3003. - -## 1.22.7.0 [Ryan Thomas](mailto:ryan@ryant.org) January 2016 - * Backport #3012 to the 1.22 branch - * Cabal.cabal: change build-type to Simple - * Add foldl' import - * The Cabal part for fully gcc-like response files - -## 1.22.6.0 [Ryan Thomas](mailto:ryan@ryant.org) December 2015 - * Relax upper bound to allow upcoming binary-0.8 - -## 1.22.5.0 [Ryan Thomas](mailto:ryan@ryant.org) November 2015 - * Don't recompile C sources unless needed (#2601). (Luke Iannini) - * Support Haddock response files. - * Add frameworks when linking a dynamic library. - -## 1.22.4.0 [Ryan Thomas](mailto:ryan@ryant.org) June 2015 - * Add libname install-dirs variable, use it by default. Fixes #2437. (Edward Z. Yang) - * Reduce temporary directory name length, fixes #2502. (Edward Z. Yang) - * Workaround for #2527. (Mikhail Glushenkov) - -## 1.22.3.0 [Ryan Thomas](mailto:ryan@ryant.org) April 2015 - * Fix for the ghcjs-pkg version number handling (Luite Stegeman) - * filterConfigureFlags: filter more flags (Mikhail Glushenkov) - * Cabal check will fail on -fprof-auto passed as a ghc-option - Fixes #2479 (John Chee) - -## 1.22.2.0 [Ryan Thomas](mailto:ryan@ryant.org) March 2015 - * Don't pass `--{en,dis}able-profiling` to old setup. - * Add -Wall police - * Fix dependencies on `old-time` - * Fix test interface detailed-0.9 with GHC 7.10 - * Fix HPC tests with GHC 7.10 - * Make sure to pass the package key to ghc - * Use `--package-{name|version}` when available for Haddock when available - * Put full package name and version in library names - * Fully specify package key format, so external tools can generate it. - -# 1.22.0.0 [Johan Tibell](mailto:johan.tibell@gmail.com) January 2015 - * Support GHC 7.10. - * Experimental support for emitting DWARF debug info. - * Preliminary support for relocatable packages. - * Allow cabal to be used inside cabal exec enviroments. - * hpc: support mutliple "ways" (e.g. profiling and vanilla). - * Support GHCJS. - * Improved command line documentation. - * Add `-none` constraint syntax for version ranges (#2093). - * Make the default doc index file path compiler/arch/os-dependent - (#2136). - * Warn instead of dying when generating documentation and hscolour - isn't installed (455f51622fa38347db62197a04bb0fa5b928ff17). - * Support the new BinaryLiterals extension - (1f25ab3c5eff311ada73c6c987061b80e9bbebd9). - * Warn about `ghc-prof-options: -auto-all` in `cabal check` (#2162). - * Add preliminary support for multiple instances of the same package - version installed side-by-side (#2002). - * New binary build config format - faster build times (#2076). - * Support module thinning and renaming (#2038). - * Add a new license type: UnspecifiedLicense (#2141). - * Remove support for Hugs and nhc98 (#2168). - * Invoke `tar` with `--formar ustar` if possible in `sdist` (#1903). - * Replace `--enable-library-coverage` with `--enable-coverage`, which - enables program coverage for all components (#1945). - * Suggest that `ExitFailure 9` is probably due to memory - exhaustion (#1522). - * Drop support for Haddock < 2.0 (#1808, #1718). - * Make `cabal test`/`cabal bench` build only what's needed for - running tests/benchmarks (#1821). - * Build shared libraries by default when linking executables dynamically. - * Build profiled libraries by default when profiling executables. - ----- - -### 1.20.0.4 [Ryan Thomas](mailto:ryan@ryant.org) January 2016 - * Cabal.cabal: change build-type to Simple. - -### 1.20.0.1 [Johan Tibell](mailto:johan.tibell@gmail.com) May 2014 - * Fix streaming test output. - -# 1.20.0.0 [Johan Tibell](mailto:johan.tibell@gmail.com) April 2014 - * Rewrite user guide - * Fix repl Ctrl+C handling - * Add haskell-suite compiler support - * Add __HADDOCK_VERSION__ define - * Allow specifying exact dependency version using hash - * Rename extra-html-files to extra-doc-files - * Add parallel build support for GHC 7.8 and later - * Don't call ranlib on OS X - * Avoid re-linking executables, test suites, and benchmarks - unnecessarily, shortening build times - * Add `--allow-newer` which allows upper version bounds to be - ignored - * Add `--enable-library-stripping` - * Add command for freezing dependencies - * Allow repl to be used outside Cabal packages - * Add `--require-sandbox` - * Don't use `--strip-unneeded` on OS X or iOS - * Add new license-files field got additional licenses - * Fix if(solaris) on some Solaris versions - * Don't use -dylib-install-name on OS X with GHC > 7.8 - * Add DragonFly as a known OS - * Improve pretty-printing of Cabal files - * Add test flag `--show-details=streaming` for real-time test output - * Add exec command - ----- - -## 1.10.2.0 [Duncan Coutts](mailto:duncan@community.haskell.org) June 2011 - * Include test suites in cabal sdist - * Fix for conditionals in test suite stanzas in `.cabal` files - * Fix permissions of directories created during install - * Fix for global builds when $HOME env var is not set - -## 1.10.1.0 [Duncan Coutts](mailto:duncan@community.haskell.org) February 2011 - * Improved error messages when test suites are not enabled - * Template parameters allowed in test `--test-option(s)` flag - * Improved documentation of the test feature - * Relaxed QA check on cabal-version when using test-suite sections - * `haddock` command now allows both `--hoogle` and `--html` at the same time - * Find ghc-version-specific instances of the hsc2hs program - * Preserve file executable permissions in sdist tarballs - * Pass gcc location and flags to ./configure scripts - * Get default gcc flags from ghc - -# 1.10.0.0 [Duncan Coutts](mailto:duncan@haskell.org) November 2010 - * New cabal test feature - * Initial support for UHC - * New default-language and other-languages fields (e.g. Haskell98/2010) - * New default-extensions and other-extensions fields - * Deprecated extensions field (for packages using cabal-version >=1.10) - * Cabal-version field must now only be of the form `>= x.y` - * Removed deprecated `--copy-prefix=` feature - * Auto-reconfigure when `.cabal` file changes - * Workaround for haddock overwriting .hi and .o files when using TH - * Extra cpp flags used with hsc2hs and c2hs (-D${os}_BUILD_OS etc) - * New cpp define VERSION_ gives string version of dependencies - * User guide source now in markdown format for easier editing - * Improved checks and error messages for C libraries and headers - * Removed BSD4 from the list of suggested licenses - * Updated list of known language extensions - * Fix for include paths to allow C code to import FFI stub.h files - * Fix for intra-package dependencies on OSX - * Stricter checks on various bits of `.cabal` file syntax - * Minor fixes for c2hs - ----- - -### 1.8.0.6 [Duncan Coutts](mailto:duncan@haskell.org) June 2010 - * Fix `register --global/--user` - -### 1.8.0.4 [Duncan Coutts](mailto:duncan@haskell.org) March 2010 - * Set dylib-install-name for dynalic libs on OSX - * Stricter configure check that compiler supports a package's extensions - * More configure-time warnings - * Hugs can compile Cabal lib again - * Default datadir now follows prefix on Windows - * Support for finding installed packages for hugs - * Cabal version macros now have proper parenthesis - * Reverted change to filter out deps of non-buildable components - * Fix for registering implace when using a specific package db - * Fix mismatch between $os and $arch path template variables - * Fix for finding ar.exe on Windows, always pick ghc's version - * Fix for intra-package dependencies with ghc-6.12 - -# 1.8.0.2 [Duncan Coutts](mailto:duncan@haskell.org) December 2009 - * Support for GHC-6.12 - * New unique installed package IDs which use a package hash - * Allow executables to depend on the lib within the same package - * Dependencies for each component apply only to that component - (previously applied to all the other components too) - * Added new known license MIT and versioned GPL and LGPL - * More liberal package version range syntax - * Package registration files are now UTF8 - * Support for LHC and JHC-0.7.2 - * Deprecated RecordPuns extension in favour of NamedFieldPuns - * Deprecated PatternSignatures extension in favor of ScopedTypeVariables - * New VersionRange semantic view as a sequence of intervals - * Improved package quality checks - * Minor simplification in a couple `Setup.hs` hooks - * Beginnings of a unit level testsuite using QuickCheck - * Various bug fixes - * Various internal cleanups - ----- - -### 1.6.0.2 [Duncan Coutts](mailto:duncan@haskell.org) February 2009 - * New configure-time check for C headers and libraries - * Added language extensions present in ghc-6.10 - * Added support for NamedFieldPuns extension in ghc-6.8 - * Fix in configure step for ghc-6.6 on Windows - * Fix warnings in `Path_pkgname.hs` module on Windows - * Fix for exotic flags in ld-options field - * Fix for using pkg-config in a package with a lib and an executable - * Fix for building haddock docs for exes that use the Paths module - * Fix for installing header files in subdirectories - * Fix for the case of building profiling libs but not ordinary libs - * Fix read-only attribute of installed files on Windows - * Ignore ghc -threaded flag when profiling in ghc-6.8 and older - -### 1.6.0.1 [Duncan Coutts](mailto:duncan@haskell.org) October 2008 - * Export a compat function to help alex and happy - -# 1.6.0.0 [Duncan Coutts](mailto:duncan@haskell.org) October 2008 - * Support for ghc-6.10 - * Source control repositories can now be specified in `.cabal` files - * Bug report URLs can be now specified in `.cabal` files - * Wildcards now allowed in data-files and extra-source-files fields - * New syntactic sugar for dependencies `build-depends: foo ==1.2.*` - * New cabal_macros.h provides macros to test versions of dependencies - * Relocatable bindists now possible on unix via env vars - * New `exposed` field allows packages to be not exposed by default - * Install dir flags can now use $os and $arch variables - * New `--builddir` flag allows multiple builds from a single sources dir - * cc-options now only apply to .c files, not for -fvia-C - * cc-options are not longer propagated to dependent packages - * The cpp/cc/ld-options fields no longer use `,` as a separator - * hsc2hs is now called using gcc instead of using ghc as gcc - * New api for manipulating sets and graphs of packages - * Internal api improvements and code cleanups - * Minor improvements to the user guide - * Miscellaneous minor bug fixes - ----- - -### 1.4.0.2 [Duncan Coutts](mailto:duncan@haskell.org) August 2008 - * Fix executable stripping default - * Fix striping exes on OSX that export dynamic symbols (like ghc) - * Correct the order of arguments given by `--prog-options=` - * Fix corner case with overlapping user and global packages - * Fix for modules that use pre-processing and `.hs-boot` files - * Clarify some points in the user guide and readme text - * Fix verbosity flags passed to sub-command like haddock - * Fix `sdist --snapshot` - * Allow meta-packages that contain no modules or C code - * Make the generated Paths module -Wall clean on Windows - -### 1.4.0.1 [Duncan Coutts](mailto:duncan@haskell.org) June 2008 - * Fix a bug which caused `.` to always be in the sources search path - * Haddock-2.2 and later do now support the `--hoogle` flag - -# 1.4.0.0 [Duncan Coutts](mailto:duncan@haskell.org) June 2008 - * Rewritten command line handling support - * Command line completion with bash - * Better support for Haddock 2 - * Improved support for nhc98 - * Removed support for ghc-6.2 - * Haddock markup in `.lhs` files now supported - * Default colour scheme for highlighted source code - * Default prefix for `--user` installs is now `$HOME/.cabal` - * All `.cabal` files are treaded as UTF-8 and must be valid - * Many checks added for common mistakes - * New `--package-db=` option for specific package databases - * Many internal changes to support cabal-install - * Stricter parsing for version strings, eg dissalows "1.05" - * Improved user guide introduction - * Programatica support removed - * New options `--program-prefix/suffix` allows eg versioned programs - * Support packages that use `.hs-boot` files - * Fix sdist for Main modules that require preprocessing - * New configure -O flag with optimisation level 0--2 - * Provide access to "`x-`" extension fields through the Cabal api - * Added check for broken installed packages - * Added warning about using inconsistent versions of dependencies - * Strip binary executable files by default with an option to disable - * New options to add site-specific include and library search paths - * Lift the restriction that libraries must have exposed-modules - * Many bugs fixed. - * Many internal structural improvements and code cleanups - ----- - -## 1.2.4.0 [Duncan Coutts](mailto:duncan@haskell.org) June 2008 - * Released with GHC 6.8.3 - * Backported several fixes and minor improvements from Cabal-1.4 - * Use a default colour scheme for sources with hscolour >=1.9 - * Support `--hyperlink-source` for Haddock >= 2.0 - * Fix for running in a non-writable directory - * Add OSX -framework arguments when linking executables - * Updates to the user guide - * Allow build-tools names to include + and _ - * Export autoconfUserHooks and simpleUserHooks - * Export ccLdOptionsBuildInfo for `Setup.hs` scripts - * Export unionBuildInfo and make BuildInfo an instance of Monoid - * Fix to allow the `main-is` module to use a pre-processor - -## 1.2.3.0 [Duncan Coutts](mailto:duncan@haskell.org) Nov 2007 - * Released with GHC 6.8.2 - * Includes full list of GHC language extensions - * Fix infamous `dist/conftest.c` bug - * Fix `configure --interfacedir=` - * Find ld.exe on Windows correctly - * Export PreProcessor constructor and mkSimplePreProcessor - * Fix minor bug in unlit code - * Fix some markup in the haddock docs - -## 1.2.2.0 [Duncan Coutts](mailto:duncan@haskell.org) Nov 2007 - * Released with GHC 6.8.1 - * Support haddock-2.0 - * Support building DSOs with GHC - * Require reconfiguring if the `.cabal` file has changed - * Fix os(windows) configuration test - * Fix building documentation - * Fix building packages on Solaris - * Other minor bug fixes - -## 1.2.1 [Duncan Coutts](mailto:duncan@haskell.org) Oct 2007 - * To be included in GHC 6.8.1 - * New field `cpp-options` used when preprocessing Haskell modules - * Fixes for hsc2hs when using ghc - * C source code gets compiled with -O2 by default - * OS aliases, to allow os(windows) rather than requiring os(mingw32) - * Fix cleaning of `stub` files - * Fix cabal-setup, command line ui that replaces `runhaskell Setup.hs` - * Build docs even when dependent packages docs are missing - * Allow the `--html-dir` to be specified at configure time - * Fix building with ghc-6.2 - * Other minor bug fixes and build fixes - -# 1.2.0 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) Sept 2007 - * To be included in GHC 6.8.x - * New configurations feature - * Can make haddock docs link to hilighted sources (with hscolour) - * New flag to allow linking to haddock docs on the web - * Supports pkg-config - * New field `build-tools` for tool dependencies - * Improved c2hs support - * Preprocessor output no longer clutters source dirs - * Separate `includes` and `install-includes` fields - * Makefile command to generate makefiles for building libs with GHC - * New `--docdir` configure flag - * Generic `--with-prog` `--prog-args` configure flags - * Better default installation paths on Windows - * Install paths can be specified relative to each other - * License files now installed - * Initial support for NHC (incomplete) - * Consistent treatment of verbosity - * Reduced verbosity of configure step by default - * Improved helpfulness of output messages - * Help output now clearer and fits in 80 columns - * New setup register `--gen-pkg-config` flag for distros - * Major internal refactoring, hooks api has changed - * Dozens of bug fixes - ----- - -### 1.1.6.2 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) May 2007 - - * Released with GHC 6.6.1 - * Handle windows text file encoding for `.cabal` files - * Fix compiling a executable for profiling that uses Template Haskell - * Other minor bug fixes and user guide clarifications - -### 1.1.6.1 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) Oct 2006 - - * fix unlit code - * fix escaping in register.sh - -## 1.1.6 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) Oct 2006 - - * Released with GHC 6.6 - * Added support for hoogle - * Allow profiling and normal builds of libs to be chosen indepentantly - * Default installation directories on Win32 changed - * Register haddock docs with ghc-pkg - * Get haddock to make hyperlinks to dependent package docs - * Added BangPatterns language extension - * Various bug fixes - -## 1.1.4 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) May 2006 - - * Released with GHC 6.4.2 - * Better support for packages that need to install header files - * cabal-setup added, but not installed by default yet - * Implemented `setup register --inplace` - * Have packages exposed by default with ghc-6.2 - * It is no longer necessary to run `configure` before `clean` or `sdist` - * Added support for ghc's `-split-objs` - * Initial support for JHC - * Ignore extension fields in `.cabal` files (fields begining with "`x-`") - * Some changes to command hooks API to improve consistency - * Hugs support improvements - * Added GeneralisedNewtypeDeriving language extension - * Added cabal-version field - * Support hidden modules with haddock - * Internal code refactoring - * More bug fixes - -## 1.1.3 [Isaac Jones](mailto:ijones@syntaxpolice.org) Sept 2005 - - * WARNING: Interfaces not documented in the user's guide may - change in future releases. - * Move building of GHCi .o libs to the build phase rather than - register phase. (from Duncan Coutts) - * Use .tar.gz for source package extension - * Uses GHC instead of cpphs if the latter is not available - * Added experimental "command hooks" which completely override the - default behavior of a command. - * Some bugfixes - -# 1.1.1 [Isaac Jones](mailto:ijones@syntaxpolice.org) July 2005 - - * WARNING: Interfaces not documented in the user's guide may - change in future releases. - * Handles recursive modules for GHC 6.2 and GHC 6.4. - * Added `setup test` command (Used with UserHook) - * implemented handling of _stub.{c,h,o} files - * Added support for profiling - * Changed install prefix of libraries (pref/pkgname-version - to prefix/pkgname-version/compname-version) - * Added pattern guards as a language extension - * Moved some functionality to Language.Haskell.Extension - * Register / unregister .bat files for windows - * Exposed more of the API - * Added support for the hide-all-packages flag in GHC > 6.4 - * Several bug fixes - ----- - -# 1.0 [Isaac Jones](mailto:ijones@syntaxpolice.org) March 11 2005 - - * Released with GHC 6.4, Hugs March 2005, and nhc98 1.18 - * Some sanity checking - ----- - -# 0.5 [Isaac Jones](mailto:ijones@syntaxpolice.org) Wed Feb 19 2005 - - * __WARNING__: this is a pre-release and the interfaces are - still likely to change until we reach a 1.0 release. - * Hooks interfaces changed - * Added preprocessors to user hooks - * No more executable-modules or hidden-modules. Use - `other-modules` instead. - * Certain fields moved into BuildInfo, much refactoring - * `extra-libs` -> `extra-libraries` - * Added `--gen-script` to configure and unconfigure. - * `modules-ghc` (etc) now `ghc-modules` (etc) - * added new fields including `synopsis` - * Lots of bug fixes - * spaces can sometimes be used instead of commas - * A user manual has appeared (Thanks, ross!) - * for ghc 6.4, configures versionsed depends properly - * more features to `./setup haddock` - ----- - -# 0.4 [Isaac Jones](mailto:ijones@syntaxpolice.org) Sun Jan 16 2005 - - * Much thanks to all the awesome fptools hackers who have been - working hard to build the Haskell Cabal! - - * __Interface Changes__: - - * __WARNING__: this is a pre-release and the interfaces are still - likely to change until we reach a 1.0 release. - - * Instead of Package.description, you should name your - description files .cabal. In particular, we suggest - that you name it .cabal, but this is not enforced - (yet). Multiple `.cabal` files in the same directory is an error, - at least for now. - - * `./setup install --install-prefix` is gone. Use `./setup copy` - `--copy-prefix` instead. - - * The `Modules` field is gone. Use `hidden-modules`, - `exposed-modules`, and `executable-modules`. - - * `Build-depends` is now a package-only field, and can't go into - executable stanzas. Build-depends is a package-to-package - relationship. - - * Some new fields. Use the Source. - - * __New Features__ - - * Cabal is now included as a package in the CVS version of - fptools. That means it'll be released as `-package Cabal` in - future versions of the compilers, and if you are a bleeding-edge - user, you can grab it from the CVS repository with the compilers. - - * Hugs compatibility and NHC98 compatibility should both be - improved. - - * Hooks Interface / Autoconf compatibility: Most of the hooks - interface is hidden for now, because it's not finalized. I have - exposed only `defaultMainWithHooks` and `defaultUserHooks`. This - allows you to use a ./configure script to preprocess - `foo.buildinfo`, which gets merged with `foo.cabal`. In future - releases, we'll expose UserHooks, but we're definitely going to - change the interface to those. The interface to the two functions - I've exposed should stay the same, though. - - * ./setup haddock is a baby feature which pre-processes the - source code with hscpp and runs haddock on it. This is brand new - and hardly tested, so you get to knock it around and see what you - think. - - * Some commands now actually implement verbosity. - - * The preprocessors have been tested a bit more, and seem to work - OK. Please give feedback if you use these. - ----- - -# 0.3 [Isaac Jones](mailto:ijones@syntaxpolice.org) Sun Jan 16 2005 - - * Unstable snapshot release - * From now on, stable releases are even. - ----- - -# 0.2 [Isaac Jones](mailto:ijones@syntaxpolice.org) - - * Adds more HUGS support and preprocessor support. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ComponentsGraph.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ComponentsGraph.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ComponentsGraph.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ComponentsGraph.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,97 +0,0 @@ --- | See -module Distribution.Backpack.ComponentsGraph ( - ComponentsGraph, - ComponentsWithDeps, - mkComponentsGraph, - componentsGraphToList, - dispComponentsWithDeps, - componentCycleMsg -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Package -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Simple.BuildToolDepends -import Distribution.Simple.LocalBuildInfo -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.UnqualComponentName -import Distribution.Compat.Graph (Graph, Node(..)) -import qualified Distribution.Compat.Graph as Graph - -import Distribution.Text - ( Text(disp) ) -import Text.PrettyPrint - ------------------------------------------------------------------------------- --- Components graph ------------------------------------------------------------------------------- - --- | A graph of source-level components by their source-level --- dependencies --- -type ComponentsGraph = Graph (Node ComponentName Component) - --- | A list of components associated with the source level --- dependencies between them. --- -type ComponentsWithDeps = [(Component, [ComponentName])] - --- | Pretty-print 'ComponentsWithDeps'. --- -dispComponentsWithDeps :: ComponentsWithDeps -> Doc -dispComponentsWithDeps graph = - vcat [ hang (text "component" <+> disp (componentName c)) 4 - (vcat [ text "dependency" <+> disp cdep | cdep <- cdeps ]) - | (c, cdeps) <- graph ] - --- | Create a 'Graph' of 'Component', or report a cycle if there is a --- problem. --- -mkComponentsGraph :: ComponentRequestedSpec - -> PackageDescription - -> Either [ComponentName] ComponentsGraph -mkComponentsGraph enabled pkg_descr = - let g = Graph.fromDistinctList - [ N c (componentName c) (componentDeps c) - | c <- pkgBuildableComponents pkg_descr - , componentEnabled enabled c ] - in case Graph.cycles g of - [] -> Right g - ccycles -> Left [ componentName c | N c _ _ <- concat ccycles ] - where - -- The dependencies for the given component - componentDeps component = - (CExeName <$> getAllInternalToolDependencies pkg_descr bi) - - ++ [ if pkgname == packageName pkg_descr - then CLibName - else CSubLibName toolname - | Dependency pkgname _ <- targetBuildDepends bi - , let toolname = packageNameToUnqualComponentName pkgname - , toolname `elem` internalPkgDeps ] - where - bi = componentBuildInfo component - internalPkgDeps = map (conv . libName) (allLibraries pkg_descr) - conv Nothing = packageNameToUnqualComponentName $ packageName pkg_descr - conv (Just s) = s - --- | Given the package description and a 'PackageDescription' (used --- to determine if a package name is internal or not), sort the --- components in dependency order (fewest dependencies first). This is --- NOT necessarily the build order (although it is in the absence of --- Backpack.) --- -componentsGraphToList :: ComponentsGraph - -> ComponentsWithDeps -componentsGraphToList = - map (\(N c _ cs) -> (c, cs)) . Graph.revTopSort - --- | Error message when there is a cycle; takes the SCC of components. -componentCycleMsg :: [ComponentName] -> Doc -componentCycleMsg cnames = - text $ "Components in the package depend on each other in a cyclic way:\n " - ++ intercalate " depends on " - [ "'" ++ showComponentName cname ++ "'" - | cname <- cnames ++ [head cnames] ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ConfiguredComponent.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ConfiguredComponent.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ConfiguredComponent.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ConfiguredComponent.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,303 +0,0 @@ -{-# LANGUAGE PatternGuards #-} --- | See -module Distribution.Backpack.ConfiguredComponent ( - ConfiguredComponent(..), - cc_name, - cc_cid, - cc_pkgid, - toConfiguredComponent, - toConfiguredComponents, - dispConfiguredComponent, - - ConfiguredComponentMap, - extendConfiguredComponentMap, - - -- TODO: Should go somewhere else - newPackageDepsBehaviour -) where - -import Prelude () -import Distribution.Compat.Prelude hiding ((<>)) - -import Distribution.Backpack.Id - -import Distribution.Types.AnnotatedId -import Distribution.Types.Dependency -import Distribution.Types.ExeDependency -import Distribution.Types.IncludeRenaming -import Distribution.Types.ComponentId -import Distribution.Types.PackageId -import Distribution.Types.PackageName -import Distribution.Types.Mixin -import Distribution.Types.ComponentName -import Distribution.Types.UnqualComponentName -import Distribution.Types.ComponentInclude -import Distribution.Package -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Simple.BuildToolDepends -import Distribution.Simple.Setup as Setup -import Distribution.Simple.LocalBuildInfo -import Distribution.Version -import Distribution.Utils.LogProgress -import Distribution.Utils.MapAccum -import Distribution.Utils.Generic - -import Control.Monad -import qualified Data.Set as Set -import qualified Data.Map as Map -import Distribution.Text -import Text.PrettyPrint - --- | A configured component, we know exactly what its 'ComponentId' is, --- and the 'ComponentId's of the things it depends on. -data ConfiguredComponent - = ConfiguredComponent { - -- | Unique identifier of component, plus extra useful info. - cc_ann_id :: AnnotatedId ComponentId, - -- | The fragment of syntax from the Cabal file describing this - -- component. - cc_component :: Component, - -- | Is this the public library component of the package? - -- (If we invoke Setup with an instantiation, this is the - -- component the instantiation applies to.) - -- Note that in one-component configure mode, this is - -- always True, because any component is the "public" one.) - cc_public :: Bool, - -- | Dependencies on executables from @build-tools@ and - -- @build-tool-depends@. - cc_exe_deps :: [AnnotatedId ComponentId], - -- | The mixins of this package, including both explicit (from - -- the @mixins@ field) and implicit (from @build-depends@). Not - -- mix-in linked yet; component configuration only looks at - -- 'ComponentId's. - cc_includes :: [ComponentInclude ComponentId IncludeRenaming] - } - - --- | Uniquely identifies a configured component. -cc_cid :: ConfiguredComponent -> ComponentId -cc_cid = ann_id . cc_ann_id - --- | The package this component came from. -cc_pkgid :: ConfiguredComponent -> PackageId -cc_pkgid = ann_pid . cc_ann_id - --- | The 'ComponentName' of a component; this uniquely identifies --- a fragment of syntax within a specified Cabal file describing the --- component. -cc_name :: ConfiguredComponent -> ComponentName -cc_name = ann_cname . cc_ann_id - --- | Pretty-print a 'ConfiguredComponent'. -dispConfiguredComponent :: ConfiguredComponent -> Doc -dispConfiguredComponent cc = - hang (text "component" <+> disp (cc_cid cc)) 4 - (vcat [ hsep $ [ text "include", disp (ci_id incl), disp (ci_renaming incl) ] - | incl <- cc_includes cc - ]) - --- | Construct a 'ConfiguredComponent', given that the 'ComponentId' --- and library/executable dependencies are known. The primary --- work this does is handling implicit @backpack-include@ fields. -mkConfiguredComponent - :: PackageDescription - -> ComponentId - -> [AnnotatedId ComponentId] -- lib deps - -> [AnnotatedId ComponentId] -- exe deps - -> Component - -> LogProgress ConfiguredComponent -mkConfiguredComponent pkg_descr this_cid lib_deps exe_deps component = do - -- Resolve each @mixins@ into the actual dependency - -- from @lib_deps@. - explicit_includes <- forM (mixins bi) $ \(Mixin name rns) -> do - let keys = fixFakePkgName pkg_descr name - aid <- case Map.lookup keys deps_map of - Nothing -> - dieProgress $ - text "Mix-in refers to non-existent package" <+> - quotes (disp name) $$ - text "(did you forget to add the package to build-depends?)" - Just r -> return r - return ComponentInclude { - ci_ann_id = aid, - ci_renaming = rns, - ci_implicit = False - } - - -- Any @build-depends@ which is not explicitly mentioned in - -- @backpack-include@ is converted into an "implicit" include. - let used_explicitly = Set.fromList (map ci_id explicit_includes) - implicit_includes - = map (\aid -> ComponentInclude { - ci_ann_id = aid, - ci_renaming = defaultIncludeRenaming, - ci_implicit = True - }) - $ filter (flip Set.notMember used_explicitly . ann_id) lib_deps - - return ConfiguredComponent { - cc_ann_id = AnnotatedId { - ann_id = this_cid, - ann_pid = package pkg_descr, - ann_cname = componentName component - }, - cc_component = component, - cc_public = is_public, - cc_exe_deps = exe_deps, - cc_includes = explicit_includes ++ implicit_includes - } - where - bi = componentBuildInfo component - deps_map = Map.fromList [ ((packageName dep, ann_cname dep), dep) - | dep <- lib_deps ] - is_public = componentName component == CLibName - -type ConfiguredComponentMap = - Map PackageName (Map ComponentName (AnnotatedId ComponentId)) - -toConfiguredComponent - :: PackageDescription - -> ComponentId - -> ConfiguredComponentMap - -> ConfiguredComponentMap - -> Component - -> LogProgress ConfiguredComponent -toConfiguredComponent pkg_descr this_cid lib_dep_map exe_dep_map component = do - lib_deps <- - if newPackageDepsBehaviour pkg_descr - then forM (targetBuildDepends bi) $ \(Dependency name _) -> do - let (pn, cn) = fixFakePkgName pkg_descr name - value <- case Map.lookup cn =<< Map.lookup pn lib_dep_map of - Nothing -> - dieProgress $ - text "Dependency on unbuildable (i.e. 'buildable: False')" <+> - text (showComponentName cn) <+> - text "from" <+> disp pn - Just v -> return v - return value - else return old_style_lib_deps - mkConfiguredComponent - pkg_descr this_cid - lib_deps exe_deps component - where - bi = componentBuildInfo component - -- lib_dep_map contains a mix of internal and external deps. - -- We want all the public libraries (dep_cn == CLibName) - -- of all external deps (dep /= pn). Note that this - -- excludes the public library of the current package: - -- this is not supported by old-style deps behavior - -- because it would imply a cyclic dependency for the - -- library itself. - old_style_lib_deps = [ e - | (pn, comp_map) <- Map.toList lib_dep_map - , pn /= packageName pkg_descr - , (cn, e) <- Map.toList comp_map - , cn == CLibName ] - -- We have to nub here, because 'getAllToolDependencies' may return - -- duplicates (see #4986). (NB: This is not needed for lib_deps, - -- since those elaborate into includes, for which there explicitly - -- may be multiple instances of a package) - exe_deps = ordNub $ - [ exe - | ExeDependency pn cn _ <- getAllToolDependencies pkg_descr bi - -- The error suppression here is important, because in general - -- we won't know about external dependencies (e.g., 'happy') - -- which the package is attempting to use (those deps are only - -- fed in when cabal-install uses this codepath.) - -- TODO: Let cabal-install request errors here - , Just exe <- [Map.lookup (CExeName cn) =<< Map.lookup pn exe_dep_map] - ] - --- | Also computes the 'ComponentId', and sets cc_public if necessary. --- This is Cabal-only; cabal-install won't use this. -toConfiguredComponent' - :: Bool -- use_external_internal_deps - -> FlagAssignment - -> PackageDescription - -> Bool -- deterministic - -> Flag String -- configIPID (todo: remove me) - -> Flag ComponentId -- configCID - -> ConfiguredComponentMap - -> Component - -> LogProgress ConfiguredComponent -toConfiguredComponent' use_external_internal_deps flags - pkg_descr deterministic ipid_flag cid_flag - dep_map component = do - cc <- toConfiguredComponent - pkg_descr this_cid - dep_map dep_map component - return $ if use_external_internal_deps - then cc { cc_public = True } - else cc - where - -- TODO: pass component names to it too! - this_cid = computeComponentId deterministic ipid_flag cid_flag (package pkg_descr) - (componentName component) (Just (deps, flags)) - deps = [ ann_id aid | m <- Map.elems dep_map - , aid <- Map.elems m ] - -extendConfiguredComponentMap - :: ConfiguredComponent - -> ConfiguredComponentMap - -> ConfiguredComponentMap -extendConfiguredComponentMap cc = - Map.insertWith Map.union - (pkgName (cc_pkgid cc)) - (Map.singleton (cc_name cc) (cc_ann_id cc)) - --- Compute the 'ComponentId's for a graph of 'Component's. The --- list of internal components must be topologically sorted --- based on internal package dependencies, so that any internal --- dependency points to an entry earlier in the list. --- --- TODO: This function currently restricts the input configured components to --- one version per package, by using the type ConfiguredComponentMap. It cannot --- be used to configure a component that depends on one version of a package for --- a library and another version for a build-tool. -toConfiguredComponents - :: Bool -- use_external_internal_deps - -> FlagAssignment - -> Bool -- deterministic - -> Flag String -- configIPID - -> Flag ComponentId -- configCID - -> PackageDescription - -> ConfiguredComponentMap - -> [Component] - -> LogProgress [ConfiguredComponent] -toConfiguredComponents - use_external_internal_deps flags deterministic ipid_flag cid_flag pkg_descr - dep_map comps - = fmap snd (mapAccumM go dep_map comps) - where - go m component = do - cc <- toConfiguredComponent' - use_external_internal_deps flags pkg_descr - deterministic ipid_flag cid_flag - m component - return (extendConfiguredComponentMap cc m, cc) - -newPackageDepsBehaviourMinVersion :: Version -newPackageDepsBehaviourMinVersion = mkVersion [1,7,1] - - --- In older cabal versions, there was only one set of package dependencies for --- the whole package. In this version, we can have separate dependencies per --- target, but we only enable this behaviour if the minimum cabal version --- specified is >= a certain minimum. Otherwise, for compatibility we use the --- old behaviour. -newPackageDepsBehaviour :: PackageDescription -> Bool -newPackageDepsBehaviour pkg = - specVersion pkg >= newPackageDepsBehaviourMinVersion - --- | 'build-depends:' stanzas are currently ambiguous as the external packages --- and internal libraries are specified the same. For now, we assume internal --- libraries shadow, and this function disambiguates accordingly, but soon the --- underlying ambiguity will be addressed. -fixFakePkgName :: PackageDescription -> PackageName -> (PackageName, ComponentName) -fixFakePkgName pkg_descr pn = - if subLibName `elem` internalLibraries - then (packageName pkg_descr, CSubLibName subLibName) - else (pn, CLibName) - where - subLibName = packageNameToUnqualComponentName pn - internalLibraries = mapMaybe libName (allLibraries pkg_descr) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/Configure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/Configure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/Configure.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/Configure.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,357 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE PatternGuards #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE NoMonoLocalBinds #-} -{-# LANGUAGE NondecreasingIndentation #-} - --- | See --- --- WARNING: The contents of this module are HIGHLY experimental. --- We may refactor it under you. -module Distribution.Backpack.Configure ( - configureComponentLocalBuildInfos, -) where - -import Prelude () -import Distribution.Compat.Prelude hiding ((<>)) - -import Distribution.Backpack -import Distribution.Backpack.FullUnitId -import Distribution.Backpack.PreExistingComponent -import Distribution.Backpack.ConfiguredComponent -import Distribution.Backpack.LinkedComponent -import Distribution.Backpack.ReadyComponent -import Distribution.Backpack.ComponentsGraph -import Distribution.Backpack.Id - -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Package -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.InstalledPackageInfo (InstalledPackageInfo - ,emptyInstalledPackageInfo) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.ModuleName -import Distribution.Simple.Setup as Setup -import Distribution.Simple.LocalBuildInfo -import Distribution.Types.AnnotatedId -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.ComponentInclude -import Distribution.Verbosity -import qualified Distribution.Compat.Graph as Graph -import Distribution.Compat.Graph (Graph, IsNode(..)) -import Distribution.Utils.LogProgress - -import Data.Either - ( lefts ) -import qualified Data.Set as Set -import qualified Data.Map as Map -import Distribution.Text -import Text.PrettyPrint - ------------------------------------------------------------------------------- --- Pipeline ------------------------------------------------------------------------------- - -configureComponentLocalBuildInfos - :: Verbosity - -> Bool -- use_external_internal_deps - -> ComponentRequestedSpec - -> Bool -- deterministic - -> Flag String -- configIPID - -> Flag ComponentId -- configCID - -> PackageDescription - -> [PreExistingComponent] - -> FlagAssignment -- configConfigurationsFlags - -> [(ModuleName, Module)] -- configInstantiateWith - -> InstalledPackageIndex - -> Compiler - -> LogProgress ([ComponentLocalBuildInfo], InstalledPackageIndex) -configureComponentLocalBuildInfos - verbosity use_external_internal_deps enabled deterministic ipid_flag cid_flag pkg_descr - prePkgDeps flagAssignment instantiate_with installedPackageSet comp = do - -- NB: In single component mode, this returns a *single* component. - -- In this graph, the graph is NOT closed. - graph0 <- case mkComponentsGraph enabled pkg_descr of - Left ccycle -> dieProgress (componentCycleMsg ccycle) - Right g -> return (componentsGraphToList g) - infoProgress $ hang (text "Source component graph:") 4 - (dispComponentsWithDeps graph0) - - let conf_pkg_map = Map.fromListWith Map.union - [(pc_pkgname pkg, - Map.singleton (pc_compname pkg) - (AnnotatedId { - ann_id = pc_cid pkg, - ann_pid = packageId pkg, - ann_cname = pc_compname pkg - })) - | pkg <- prePkgDeps] - graph1 <- toConfiguredComponents use_external_internal_deps - flagAssignment - deterministic ipid_flag cid_flag pkg_descr - conf_pkg_map (map fst graph0) - infoProgress $ hang (text "Configured component graph:") 4 - (vcat (map dispConfiguredComponent graph1)) - - let shape_pkg_map = Map.fromList - [ (pc_cid pkg, (pc_open_uid pkg, pc_shape pkg)) - | pkg <- prePkgDeps] - uid_lookup def_uid - | Just pkg <- PackageIndex.lookupUnitId installedPackageSet uid - = FullUnitId (Installed.installedComponentId pkg) - (Map.fromList (Installed.instantiatedWith pkg)) - | otherwise = error ("uid_lookup: " ++ display uid) - where uid = unDefUnitId def_uid - graph2 <- toLinkedComponents verbosity uid_lookup - (package pkg_descr) shape_pkg_map graph1 - - infoProgress $ - hang (text "Linked component graph:") 4 - (vcat (map dispLinkedComponent graph2)) - - let pid_map = Map.fromList $ - [ (pc_uid pkg, pc_munged_id pkg) - | pkg <- prePkgDeps] ++ - [ (Installed.installedUnitId pkg, mungedId pkg) - | (_, Module uid _) <- instantiate_with - , Just pkg <- [PackageIndex.lookupUnitId - installedPackageSet (unDefUnitId uid)] ] - subst = Map.fromList instantiate_with - graph3 = toReadyComponents pid_map subst graph2 - graph4 = Graph.revTopSort (Graph.fromDistinctList graph3) - - infoProgress $ hang (text "Ready component graph:") 4 - (vcat (map dispReadyComponent graph4)) - - toComponentLocalBuildInfos comp installedPackageSet pkg_descr prePkgDeps graph4 - ------------------------------------------------------------------------------- --- ComponentLocalBuildInfo ------------------------------------------------------------------------------- - -toComponentLocalBuildInfos - :: Compiler - -> InstalledPackageIndex -- FULL set - -> PackageDescription - -> [PreExistingComponent] -- external package deps - -> [ReadyComponent] - -> LogProgress ([ComponentLocalBuildInfo], - InstalledPackageIndex) -- only relevant packages -toComponentLocalBuildInfos - comp installedPackageSet pkg_descr externalPkgDeps graph = do - -- Check and make sure that every instantiated component exists. - -- We have to do this now, because prior to linking/instantiating - -- we don't actually know what the full set of 'UnitId's we need - -- are. - let -- TODO: This is actually a bit questionable performance-wise, - -- since we will pay for the ALL installed packages even if - -- they are not related to what we are building. This was true - -- in the old configure code. - external_graph :: Graph (Either InstalledPackageInfo ReadyComponent) - external_graph = Graph.fromDistinctList - . map Left - $ PackageIndex.allPackages installedPackageSet - internal_graph :: Graph (Either InstalledPackageInfo ReadyComponent) - internal_graph = Graph.fromDistinctList - . map Right - $ graph - combined_graph = Graph.unionRight external_graph internal_graph - Just local_graph = Graph.closure combined_graph (map nodeKey graph) - -- The database of transitively reachable installed packages that the - -- external components the package (as a whole) depends on. This will be - -- used in several ways: - -- - -- * We'll use it to do a consistency check so we're not depending - -- on multiple versions of the same package (TODO: someday relax - -- this for private dependencies.) See right below. - -- - -- * We'll pass it on in the LocalBuildInfo, where preprocessors - -- and other things will incorrectly use it to determine what - -- the include paths and everything should be. - -- - packageDependsIndex = PackageIndex.fromList (lefts local_graph) - fullIndex = Graph.fromDistinctList local_graph - case Graph.broken fullIndex of - [] -> return () - broken -> - -- TODO: ppr this - dieProgress . text $ - "The following packages are broken because other" - ++ " packages they depend on are missing. These broken " - ++ "packages must be rebuilt before they can be used.\n" - -- TODO: Undupe. - ++ unlines [ "installed package " - ++ display (packageId pkg) - ++ " is broken due to missing package " - ++ intercalate ", " (map display deps) - | (Left pkg, deps) <- broken ] - ++ unlines [ "planned package " - ++ display (packageId pkg) - ++ " is broken due to missing package " - ++ intercalate ", " (map display deps) - | (Right pkg, deps) <- broken ] - - -- In this section, we'd like to look at the 'packageDependsIndex' - -- and see if we've picked multiple versions of the same - -- installed package (this is bad, because it means you might - -- get an error could not match foo-0.1:Type with foo-0.2:Type). - -- - -- What is pseudoTopPkg for? I have no idea. It was used - -- in the very original commit which introduced checking for - -- inconsistencies 5115bb2be4e13841ea07dc9166b9d9afa5f0d012, - -- and then moved out of PackageIndex and put here later. - -- TODO: Try this code without it... - -- - -- TODO: Move this into a helper function - -- - -- TODO: This is probably wrong for Backpack - let pseudoTopPkg :: InstalledPackageInfo - pseudoTopPkg = emptyInstalledPackageInfo { - Installed.installedUnitId = mkLegacyUnitId (packageId pkg_descr), - Installed.sourcePackageId = packageId pkg_descr, - Installed.depends = map pc_uid externalPkgDeps - } - case PackageIndex.dependencyInconsistencies - . PackageIndex.insert pseudoTopPkg - $ packageDependsIndex of - [] -> return () - inconsistencies -> - warnProgress $ - hang (text "This package indirectly depends on multiple versions of the same" <+> - text "package. This is very likely to cause a compile failure.") 2 - (vcat [ text "package" <+> disp (packageName user) <+> - parens (disp (installedUnitId user)) <+> text "requires" <+> - disp inst - | (_dep_key, insts) <- inconsistencies - , (inst, users) <- insts - , user <- users ]) - let clbis = mkLinkedComponentsLocalBuildInfo comp graph - -- forM clbis $ \(clbi,deps) -> info verbosity $ "UNIT" ++ hashUnitId (componentUnitId clbi) ++ "\n" ++ intercalate "\n" (map hashUnitId deps) - return (clbis, packageDependsIndex) - --- Build ComponentLocalBuildInfo for each component we are going --- to build. --- --- This conversion is lossy; we lose some invariants from ReadyComponent -mkLinkedComponentsLocalBuildInfo - :: Compiler - -> [ReadyComponent] - -> [ComponentLocalBuildInfo] -mkLinkedComponentsLocalBuildInfo comp rcs = map go rcs - where - internalUnits = Set.fromList (map rc_uid rcs) - isInternal x = Set.member x internalUnits - go rc = - case rc_component rc of - CLib lib -> - let convModuleExport (modname', (Module uid modname)) - | this_uid == unDefUnitId uid - , modname' == modname - = Installed.ExposedModule modname' Nothing - | otherwise - = Installed.ExposedModule modname' - (Just (OpenModule (DefiniteUnitId uid) modname)) - convOpenModuleExport (modname', modu@(OpenModule uid modname)) - | uid == this_open_uid - , modname' == modname - = Installed.ExposedModule modname' Nothing - | otherwise - = Installed.ExposedModule modname' (Just modu) - convOpenModuleExport (_, OpenModuleVar _) - = error "convOpenModuleExport: top-level modvar" - exports = - -- Loses invariants - case rc_i rc of - Left indefc -> map convOpenModuleExport - $ Map.toList (indefc_provides indefc) - Right instc -> map convModuleExport - $ Map.toList (instc_provides instc) - insts = - case rc_i rc of - Left indefc -> [ (m, OpenModuleVar m) | m <- indefc_requires indefc ] - Right instc -> [ (m, OpenModule (DefiniteUnitId uid') m') - | (m, Module uid' m') <- instc_insts instc ] - - compat_name = computeCompatPackageName (packageName rc) (libName lib) - compat_key = computeCompatPackageKey comp compat_name (packageVersion rc) this_uid - - in LibComponentLocalBuildInfo { - componentPackageDeps = cpds, - componentUnitId = this_uid, - componentComponentId = this_cid, - componentInstantiatedWith = insts, - componentIsIndefinite_ = is_indefinite, - componentLocalName = cname, - componentInternalDeps = internal_deps, - componentExeDeps = exe_deps, - componentIncludes = includes, - componentExposedModules = exports, - componentIsPublic = rc_public rc, - componentCompatPackageKey = compat_key, - componentCompatPackageName = compat_name - } - CFLib _ -> - FLibComponentLocalBuildInfo { - componentUnitId = this_uid, - componentComponentId = this_cid, - componentLocalName = cname, - componentPackageDeps = cpds, - componentExeDeps = exe_deps, - componentInternalDeps = internal_deps, - componentIncludes = includes - } - CExe _ -> - ExeComponentLocalBuildInfo { - componentUnitId = this_uid, - componentComponentId = this_cid, - componentLocalName = cname, - componentPackageDeps = cpds, - componentExeDeps = exe_deps, - componentInternalDeps = internal_deps, - componentIncludes = includes - } - CTest _ -> - TestComponentLocalBuildInfo { - componentUnitId = this_uid, - componentComponentId = this_cid, - componentLocalName = cname, - componentPackageDeps = cpds, - componentExeDeps = exe_deps, - componentInternalDeps = internal_deps, - componentIncludes = includes - } - CBench _ -> - BenchComponentLocalBuildInfo { - componentUnitId = this_uid, - componentComponentId = this_cid, - componentLocalName = cname, - componentPackageDeps = cpds, - componentExeDeps = exe_deps, - componentInternalDeps = internal_deps, - componentIncludes = includes - } - where - this_uid = rc_uid rc - this_open_uid = rc_open_uid rc - this_cid = rc_cid rc - cname = componentName (rc_component rc) - cpds = rc_depends rc - exe_deps = map ann_id $ rc_exe_deps rc - is_indefinite = - case rc_i rc of - Left _ -> True - Right _ -> False - includes = - map (\ci -> (ci_id ci, ci_renaming ci)) $ - case rc_i rc of - Left indefc -> - indefc_includes indefc - Right instc -> - map (\ci -> ci { ci_ann_id = fmap DefiniteUnitId (ci_ann_id ci) }) - (instc_includes instc) - internal_deps = filter isInternal (nodeNeighbors rc) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/DescribeUnitId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/DescribeUnitId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/DescribeUnitId.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/DescribeUnitId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,61 +0,0 @@ -{-# LANGUAGE Rank2Types #-} -{-# LANGUAGE FlexibleContexts #-} -module Distribution.Backpack.DescribeUnitId where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.PackageId -import Distribution.Types.ComponentName -import Distribution.Compat.Stack -import Distribution.Verbosity -import Distribution.ModuleName -import Distribution.Text -import Distribution.Simple.Utils - -import Text.PrettyPrint - --- Unit identifiers have a well defined, machine-readable format, --- but this format isn't very user-friendly for users. This --- module defines some functions for solving common rendering --- problems one has for displaying these. --- --- There are three basic problems we tackle: --- --- - Users don't want to see pkg-0.5-inplace-libname, --- they want to see "library 'libname' from 'pkg-0.5'" --- --- - Users don't want to see the raw component identifier, which --- usually contains a wordy hash that doesn't matter. --- --- - Users don't want to see a hash of the instantiation: they --- want to see the actual instantiation, and they want it in --- interpretable form. --- - --- | Print a Setup message stating (1) what operation we are doing, --- for (2) which component (with enough details to uniquely identify --- the build in question.) --- -setupMessage' :: Text a => Verbosity - -> String -- ^ Operation being done (capitalized), on: - -> PackageIdentifier -- ^ Package - -> ComponentName -- ^ Component name - -> Maybe [(ModuleName, a)] -- ^ Instantiation, if available. - -- Polymorphic to take - -- 'OpenModule' or 'Module' - -> IO () -setupMessage' verbosity msg pkgid cname mb_insts = withFrozenCallStack $ do - noticeDoc verbosity $ - case mb_insts of - Just insts | not (null insts) -> - hang (msg_doc <+> text "instantiated with") 2 - (vcat [ disp k <+> text "=" <+> disp v - | (k,v) <- insts ]) $$ - for_doc - _ -> - msg_doc <+> for_doc - - where - msg_doc = text msg <+> text (showComponentName cname) - for_doc = text "for" <+> disp pkgid <<>> text ".." diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/FullUnitId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/FullUnitId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/FullUnitId.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/FullUnitId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Backpack.FullUnitId ( - FullUnitId(..), - FullDb, - expandOpenUnitId, - expandUnitId -) where - -import Distribution.Backpack -import Distribution.Types.ComponentId -import Distribution.Compat.Prelude - --- Unlike OpenUnitId, which could direct to a UnitId. -data FullUnitId = FullUnitId ComponentId OpenModuleSubst - deriving (Show, Generic) - -type FullDb = DefUnitId -> FullUnitId - -expandOpenUnitId :: FullDb -> OpenUnitId -> FullUnitId -expandOpenUnitId _db (IndefFullUnitId cid subst) - = FullUnitId cid subst -expandOpenUnitId db (DefiniteUnitId uid) - = expandUnitId db uid - -expandUnitId :: FullDb -> DefUnitId -> FullUnitId -expandUnitId db uid = db uid diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/Id.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/Id.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/Id.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/Id.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,146 +0,0 @@ -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE PatternGuards #-} --- | See -module Distribution.Backpack.Id( - computeComponentId, - computeCompatPackageKey, - computeCompatPackageName, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.UnqualComponentName -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Simple.Setup as Setup -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Simple.LocalBuildInfo -import Distribution.Types.ComponentId -import Distribution.Types.PackageId -import Distribution.Types.UnitId -import Distribution.Types.MungedPackageName -import Distribution.Utils.Base62 -import Distribution.Version - -import Distribution.Text - ( display, simpleParse ) - --- | This method computes a default, "good enough" 'ComponentId' --- for a package. The intent is that cabal-install (or the user) will --- specify a more detailed IPID via the @--ipid@ flag if necessary. -computeComponentId - :: Bool -- deterministic mode - -> Flag String - -> Flag ComponentId - -> PackageIdentifier - -> ComponentName - -- This is used by cabal-install's legacy codepath - -> Maybe ([ComponentId], FlagAssignment) - -> ComponentId -computeComponentId deterministic mb_ipid mb_cid pid cname mb_details = - -- show is found to be faster than intercalate and then replacement of - -- special character used in intercalating. We cannot simply hash by - -- doubly concating list, as it just flatten out the nested list, so - -- different sources can produce same hash - let hash_suffix - | Just (dep_ipids, flags) <- mb_details - = "-" ++ hashToBase62 - -- For safety, include the package + version here - -- for GHC 7.10, where just the hash is used as - -- the package key - ( display pid - ++ show dep_ipids - ++ show flags ) - | otherwise = "" - generated_base = display pid ++ hash_suffix - explicit_base cid0 = fromPathTemplate (InstallDirs.substPathTemplate env - (toPathTemplate cid0)) - -- Hack to reuse install dirs machinery - -- NB: no real IPID available at this point - where env = packageTemplateEnv pid (mkUnitId "") - actual_base = case mb_ipid of - Flag ipid0 -> explicit_base ipid0 - NoFlag | deterministic -> display pid - | otherwise -> generated_base - in case mb_cid of - Flag cid -> cid - NoFlag -> mkComponentId $ actual_base - ++ (case componentNameString cname of - Nothing -> "" - Just s -> "-" ++ unUnqualComponentName s) - --- | In GHC 8.0, the string we pass to GHC to use for symbol --- names for a package can be an arbitrary, IPID-compatible string. --- However, prior to GHC 8.0 there are some restrictions on what --- format this string can be (due to how ghc-pkg parsed the key): --- --- 1. In GHC 7.10, the string had either be of the form --- foo_ABCD, where foo is a non-semantic alphanumeric/hyphenated --- prefix and ABCD is two base-64 encoded 64-bit integers, --- or a GHC 7.8 style identifier. --- --- 2. In GHC 7.8, the string had to be a valid package identifier --- like foo-0.1. --- --- So, the problem is that Cabal, in general, has a general IPID, --- but needs to figure out a package key / package ID that the --- old ghc-pkg will actually accept. But there's an EVERY WORSE --- problem: if ghc-pkg decides to parse an identifier foo-0.1-xxx --- as if it were a package identifier, which means it will SILENTLY --- DROP the "xxx" (because it's a tag, and Cabal does not allow tags.) --- So we must CONNIVE to ensure that we don't pick something that --- looks like this. --- --- So this function attempts to define a mapping into the old formats. --- --- The mapping for GHC 7.8 and before: --- --- * We use the *compatibility* package name and version. For --- public libraries this is just the package identifier; for --- internal libraries, it's something like "z-pkgname-z-libname-0.1". --- See 'computeCompatPackageName' for more details. --- --- The mapping for GHC 7.10: --- --- * For CLibName: --- If the IPID is of the form foo-0.1-ABCDEF where foo_ABCDEF would --- validly parse as a package key, we pass "ABCDEF". (NB: not --- all hashes parse this way, because GHC 7.10 mandated that --- these hashes be two base-62 encoded 64 bit integers), --- but hashes that Cabal generated using 'computeComponentId' --- are guaranteed to have this form. --- --- If it is not of this form, we rehash the IPID into the --- correct form and pass that. --- --- * For sub-components, we rehash the IPID into the correct format --- and pass that. --- -computeCompatPackageKey - :: Compiler - -> MungedPackageName - -> Version - -> UnitId - -> String -computeCompatPackageKey comp pkg_name pkg_version uid - | not (packageKeySupported comp) = - display pkg_name ++ "-" ++ display pkg_version - | not (unifiedIPIDRequired comp) = - let str = unUnitId uid -- assume no Backpack support - mb_verbatim_key - = case simpleParse str :: Maybe PackageId of - -- Something like 'foo-0.1', use it verbatim. - -- (NB: hash tags look like tags, so they are parsed, - -- so the extra equality check tests if a tag was dropped.) - Just pid0 | display pid0 == str -> Just str - _ -> Nothing - mb_truncated_key - = let cand = reverse (takeWhile isAlphaNum (reverse str)) - in if length cand == 22 && all isAlphaNum cand - then Just cand - else Nothing - rehashed_key = hashToBase62 str - in fromMaybe rehashed_key (mb_verbatim_key `mplus` mb_truncated_key) - | otherwise = display uid diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/LinkedComponent.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/LinkedComponent.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/LinkedComponent.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/LinkedComponent.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,399 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeFamilies #-} --- | See -module Distribution.Backpack.LinkedComponent ( - LinkedComponent(..), - lc_insts, - lc_uid, - lc_cid, - lc_pkgid, - toLinkedComponent, - toLinkedComponents, - dispLinkedComponent, - LinkedComponentMap, - extendLinkedComponentMap, -) where - -import Prelude () -import Distribution.Compat.Prelude hiding ((<>)) - -import Distribution.Backpack -import Distribution.Backpack.FullUnitId -import Distribution.Backpack.ConfiguredComponent -import Distribution.Backpack.ModuleShape -import Distribution.Backpack.PreModuleShape -import Distribution.Backpack.ModuleScope -import Distribution.Backpack.UnifyM -import Distribution.Backpack.MixLink -import Distribution.Utils.MapAccum - -import Distribution.Types.AnnotatedId -import Distribution.Types.ComponentName -import Distribution.Types.ModuleRenaming -import Distribution.Types.IncludeRenaming -import Distribution.Types.ComponentInclude -import Distribution.Types.ComponentId -import Distribution.Types.PackageId -import Distribution.Package -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.ModuleName -import Distribution.Simple.LocalBuildInfo -import Distribution.Verbosity -import Distribution.Utils.LogProgress - -import qualified Data.Set as Set -import qualified Data.Map as Map -import Data.Traversable - ( mapM ) -import Distribution.Text - ( Text(disp) ) -import Text.PrettyPrint -import Data.Either - --- | A linked component is a component that has been mix-in linked, at --- which point we have determined how all the dependencies of the --- component are explicitly instantiated (in the form of an OpenUnitId). --- 'ConfiguredComponent' is mix-in linked into 'LinkedComponent', which --- is then instantiated into 'ReadyComponent'. -data LinkedComponent - = LinkedComponent { - -- | Uniquely identifies linked component - lc_ann_id :: AnnotatedId ComponentId, - -- | Corresponds to 'cc_component'. - lc_component :: Component, - -- | @build-tools@ and @build-tool-depends@ dependencies. - -- Corresponds to 'cc_exe_deps'. - lc_exe_deps :: [AnnotatedId OpenUnitId], - -- | Is this the public library of a package? Corresponds to - -- 'cc_public'. - lc_public :: Bool, - -- | Corresponds to 'cc_includes', but (1) this does not contain - -- includes of signature packages (packages with no exports), - -- and (2) the 'ModuleRenaming' for requirements (stored in - -- 'IncludeRenaming') has been removed, as it is reflected in - -- 'OpenUnitId'.) - lc_includes :: [ComponentInclude OpenUnitId ModuleRenaming], - -- | Like 'lc_includes', but this specifies includes on - -- signature packages which have no exports. - lc_sig_includes :: [ComponentInclude OpenUnitId ModuleRenaming], - -- | The module shape computed by mix-in linking. This is - -- newly computed from 'ConfiguredComponent' - lc_shape :: ModuleShape - } - --- | Uniquely identifies a 'LinkedComponent'. Corresponds to --- 'cc_cid'. -lc_cid :: LinkedComponent -> ComponentId -lc_cid = ann_id . lc_ann_id - --- | Corresponds to 'cc_pkgid'. -lc_pkgid :: LinkedComponent -> PackageId -lc_pkgid = ann_pid . lc_ann_id - --- | The 'OpenUnitId' of this component in the "default" instantiation. --- See also 'lc_insts'. 'LinkedComponent's cannot be instantiated --- (e.g., there is no 'ModSubst' instance for them). -lc_uid :: LinkedComponent -> OpenUnitId -lc_uid lc = IndefFullUnitId (lc_cid lc) . Map.fromList $ lc_insts lc - --- | The instantiation of 'lc_uid'; this always has the invariant --- that it is a mapping from a module name @A@ to @@ (the hole A). -lc_insts :: LinkedComponent -> [(ModuleName, OpenModule)] -lc_insts lc = [ (req, OpenModuleVar req) - | req <- Set.toList (modShapeRequires (lc_shape lc)) ] - -dispLinkedComponent :: LinkedComponent -> Doc -dispLinkedComponent lc = - hang (text "unit" <+> disp (lc_uid lc)) 4 $ - vcat [ text "include" <+> disp (ci_id incl) <+> disp (ci_renaming incl) - | incl <- lc_includes lc ] - $+$ - vcat [ text "signature include" <+> disp (ci_id incl) - | incl <- lc_sig_includes lc ] - $+$ dispOpenModuleSubst (modShapeProvides (lc_shape lc)) - -instance Package LinkedComponent where - packageId = lc_pkgid - -toLinkedComponent - :: Verbosity - -> FullDb - -> PackageId - -> LinkedComponentMap - -> ConfiguredComponent - -> LogProgress LinkedComponent -toLinkedComponent verbosity db this_pid pkg_map ConfiguredComponent { - cc_ann_id = aid@AnnotatedId { ann_id = this_cid }, - cc_component = component, - cc_exe_deps = exe_deps, - cc_public = is_public, - cc_includes = cid_includes - } = do - let - -- The explicitly specified requirements, provisions and - -- reexports from the Cabal file. These are only non-empty for - -- libraries; everything else is trivial. - (src_reqs :: [ModuleName], - src_provs :: [ModuleName], - src_reexports :: [ModuleReexport]) = - case component of - CLib lib -> (signatures lib, - exposedModules lib, - reexportedModules lib) - _ -> ([], [], []) - src_hidden = otherModules (componentBuildInfo component) - - -- Take each included ComponentId and resolve it into an - -- *unlinked* unit identity. We will use unification (relying - -- on the ModuleShape) to resolve these into linked identities. - unlinked_includes :: [ComponentInclude (OpenUnitId, ModuleShape) IncludeRenaming] - unlinked_includes = [ ComponentInclude (fmap lookupUid dep_aid) rns i - | ComponentInclude dep_aid rns i <- cid_includes ] - - lookupUid :: ComponentId -> (OpenUnitId, ModuleShape) - lookupUid cid = fromMaybe (error "linkComponent: lookupUid") - (Map.lookup cid pkg_map) - - let orErr (Right x) = return x - orErr (Left [err]) = dieProgress err - orErr (Left errs) = do - dieProgress (vcat (intersperse (text "") -- double newline! - [ hang (text "-") 2 err | err <- errs])) - - -- Pre-shaping - let pre_shape = mixLinkPreModuleShape $ - PreModuleShape { - preModShapeProvides = Set.fromList (src_provs ++ src_hidden), - preModShapeRequires = Set.fromList src_reqs - } : [ renamePreModuleShape (toPreModuleShape sh) rns - | ComponentInclude (AnnotatedId { ann_id = (_, sh) }) rns _ <- unlinked_includes ] - reqs = preModShapeRequires pre_shape - insts = [ (req, OpenModuleVar req) - | req <- Set.toList reqs ] - this_uid = IndefFullUnitId this_cid . Map.fromList $ insts - - -- OK, actually do unification - -- TODO: the unification monad might return errors, in which - -- case we have to deal. Use monadic bind for now. - (linked_shape0 :: ModuleScope, - linked_includes0 :: [ComponentInclude OpenUnitId ModuleRenaming], - linked_sig_includes0 :: [ComponentInclude OpenUnitId ModuleRenaming]) - <- orErr $ runUnifyM verbosity this_cid db $ do - -- The unification monad is implemented using mutable - -- references. Thus, we must convert our *pure* data - -- structures into mutable ones to perform unification. - - let convertMod :: (ModuleName -> ModuleSource) -> ModuleName -> UnifyM s (ModuleScopeU s) - convertMod from m = do - m_u <- convertModule (OpenModule this_uid m) - return (Map.singleton m [WithSource (from m) m_u], Map.empty) - -- Handle 'exposed-modules' - exposed_mod_shapes_u <- mapM (convertMod FromExposedModules) src_provs - -- Handle 'other-modules' - other_mod_shapes_u <- mapM (convertMod FromOtherModules) src_hidden - - -- Handle 'signatures' - let convertReq :: ModuleName -> UnifyM s (ModuleScopeU s) - convertReq req = do - req_u <- convertModule (OpenModuleVar req) - return (Map.empty, Map.singleton req [WithSource (FromSignatures req) req_u]) - req_shapes_u <- mapM convertReq src_reqs - - -- Handle 'mixins' - (incl_shapes_u, all_includes_u) <- fmap unzip (mapM convertInclude unlinked_includes) - - failIfErrs -- Prevent error cascade - -- Mix-in link everything! mixLink is the real workhorse. - shape_u <- mixLink $ exposed_mod_shapes_u - ++ other_mod_shapes_u - ++ req_shapes_u - ++ incl_shapes_u - - -- src_reqs_u <- mapM convertReq src_reqs - -- Read out all the final results by converting back - -- into a pure representation. - let convertIncludeU (ComponentInclude dep_aid rns i) = do - uid <- convertUnitIdU (ann_id dep_aid) - return (ComponentInclude { - ci_ann_id = dep_aid { ann_id = uid }, - ci_renaming = rns, - ci_implicit = i - }) - shape <- convertModuleScopeU shape_u - let (includes_u, sig_includes_u) = partitionEithers all_includes_u - incls <- mapM convertIncludeU includes_u - sig_incls <- mapM convertIncludeU sig_includes_u - return (shape, incls, sig_incls) - - let isNotLib (CLib _) = False - isNotLib _ = True - when (not (Set.null reqs) && isNotLib component) $ - dieProgress $ - hang (text "Non-library component has unfilled requirements:") - 4 (vcat [disp req | req <- Set.toList reqs]) - - -- NB: do NOT include hidden modules here: GHC 7.10's ghc-pkg - -- won't allow it (since someone could directly synthesize - -- an 'InstalledPackageInfo' that violates abstraction.) - -- Though, maybe it should be relaxed? - let src_hidden_set = Set.fromList src_hidden - linked_shape = linked_shape0 { - modScopeProvides = - -- Would rather use withoutKeys but need BC - Map.filterWithKey - (\k _ -> not (k `Set.member` src_hidden_set)) - (modScopeProvides linked_shape0) - } - - -- OK, compute the reexports - -- TODO: This code reports the errors for reexports one reexport at - -- a time. Better to collect them all up and report them all at - -- once. - let hdl :: [Either Doc a] -> LogProgress [a] - hdl es = - case partitionEithers es of - ([], rs) -> return rs - (ls, _) -> - dieProgress $ - hang (text "Problem with module re-exports:") 2 - (vcat [hang (text "-") 2 l | l <- ls]) - reexports_list <- hdl . (flip map) src_reexports $ \reex@(ModuleReexport mb_pn from to) -> do - case Map.lookup from (modScopeProvides linked_shape) of - Just cands@(x0:xs0) -> do - -- Make sure there is at least one candidate - (x, xs) <- - case mb_pn of - Just pn -> - let matches_pn (FromMixins pn' _ _) = pn == pn' - matches_pn (FromBuildDepends pn' _) = pn == pn' - matches_pn (FromExposedModules _) = pn == packageName this_pid - matches_pn (FromOtherModules _) = pn == packageName this_pid - matches_pn (FromSignatures _) = pn == packageName this_pid - in case filter (matches_pn . getSource) cands of - (x1:xs1) -> return (x1, xs1) - _ -> Left (brokenReexportMsg reex) - Nothing -> return (x0, xs0) - -- Test that all the candidates are consistent - case filter (\x' -> unWithSource x /= unWithSource x') xs of - [] -> return () - _ -> Left $ ambiguousReexportMsg reex x xs - return (to, unWithSource x) - _ -> - Left (brokenReexportMsg reex) - - -- TODO: maybe check this earlier; it's syntactically obvious. - let build_reexports m (k, v) - | Map.member k m = - dieProgress $ hsep - [ text "Module name ", disp k, text " is exported multiple times." ] - | otherwise = return (Map.insert k v m) - provs <- foldM build_reexports Map.empty $ - -- TODO: doublecheck we have checked for - -- src_provs duplicates already! - [ (mod_name, OpenModule this_uid mod_name) | mod_name <- src_provs ] ++ - reexports_list - - let final_linked_shape = ModuleShape provs (Map.keysSet (modScopeRequires linked_shape)) - - -- See Note Note [Signature package special case] - let (linked_includes, linked_sig_includes) - | Set.null reqs = (linked_includes0 ++ linked_sig_includes0, []) - | otherwise = (linked_includes0, linked_sig_includes0) - - return $ LinkedComponent { - lc_ann_id = aid, - lc_component = component, - lc_public = is_public, - -- These must be executables - lc_exe_deps = map (fmap (\cid -> IndefFullUnitId cid Map.empty)) exe_deps, - lc_shape = final_linked_shape, - lc_includes = linked_includes, - lc_sig_includes = linked_sig_includes - } - --- Note [Signature package special case] --- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --- Suppose we have p-indef, which depends on str-sig and inherits --- the hole from that signature package. When we instantiate p-indef, --- it's a bit pointless to also go ahead and build str-sig, because --- str-sig cannot possibly have contributed any code to the package --- in question. Furthermore, because the signature was inherited to --- p-indef, if we test matching against p-indef, we also have tested --- matching against p-sig. In fact, skipping p-sig is *mandatory*, --- because p-indef may have thinned it (so that an implementation may --- match p-indef but not p-sig.) --- --- However, suppose that we have a package which mixes together str-sig --- and str-bytestring, with the intent of *checking* that str-sig is --- implemented by str-bytestring. Here, it's quite important to --- build an instantiated str-sig, since that is the only way we will --- actually end up testing if the matching works. Note that this --- admonition only applies if the package has NO requirements; if it --- has any requirements, we will typecheck it as an indefinite --- package, at which point the signature includes will be passed to --- GHC who will in turn actually do the checking to make sure they --- are instantiated correctly. - --- Handle mix-in linking for components. In the absence of Backpack, --- every ComponentId gets converted into a UnitId by way of SimpleUnitId. -toLinkedComponents - :: Verbosity - -> FullDb - -> PackageId - -> LinkedComponentMap - -> [ConfiguredComponent] - -> LogProgress [LinkedComponent] -toLinkedComponents verbosity db this_pid lc_map0 comps - = fmap snd (mapAccumM go lc_map0 comps) - where - go :: Map ComponentId (OpenUnitId, ModuleShape) - -> ConfiguredComponent - -> LogProgress (Map ComponentId (OpenUnitId, ModuleShape), LinkedComponent) - go lc_map cc = do - lc <- addProgressCtx (text "In the stanza" <+> text (componentNameStanza (cc_name cc))) $ - toLinkedComponent verbosity db this_pid lc_map cc - return (extendLinkedComponentMap lc lc_map, lc) - -type LinkedComponentMap = Map ComponentId (OpenUnitId, ModuleShape) - -extendLinkedComponentMap :: LinkedComponent - -> LinkedComponentMap - -> LinkedComponentMap -extendLinkedComponentMap lc m = - Map.insert (lc_cid lc) (lc_uid lc, lc_shape lc) m - -brokenReexportMsg :: ModuleReexport -> Doc -brokenReexportMsg (ModuleReexport (Just pn) from _to) = - vcat [ text "The package" <+> quotes (disp pn) - , text "does not export a module" <+> quotes (disp from) ] -brokenReexportMsg (ModuleReexport Nothing from _to) = - vcat [ text "The module" <+> quotes (disp from) - , text "is not exported by any suitable package." - , text "It occurs in neither the 'exposed-modules' of this package," - , text "nor any of its 'build-depends' dependencies." ] - -ambiguousReexportMsg :: ModuleReexport -> ModuleWithSource -> [ModuleWithSource] -> Doc -ambiguousReexportMsg (ModuleReexport mb_pn from _to) y1 ys = - vcat [ text "Ambiguous reexport" <+> quotes (disp from) - , hang (text "It could refer to either:") 2 - (vcat (msg : msgs)) - , help_msg mb_pn ] - where - msg = text " " <+> displayModuleWithSource y1 - msgs = [text "or" <+> displayModuleWithSource y | y <- ys] - help_msg Nothing = - -- TODO: This advice doesn't help if the ambiguous exports - -- come from a package named the same thing - vcat [ text "The ambiguity can be resolved by qualifying the" - , text "re-export with a package name." - , text "The syntax is 'packagename:ModuleName [as NewName]'." ] - -- Qualifying won't help that much. - help_msg (Just _) = - vcat [ text "The ambiguity can be resolved by using the" - , text "mixins field to rename one of the module" - , text "names differently." ] - displayModuleWithSource y - = vcat [ quotes (disp (unWithSource y)) - , text "brought into scope by" <+> - dispModuleSource (getSource y) - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/MixLink.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/MixLink.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/MixLink.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/MixLink.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,185 +0,0 @@ -{-# LANGUAGE NondecreasingIndentation #-} --- | See -module Distribution.Backpack.MixLink ( - mixLink, -) where - -import Prelude () -import Distribution.Compat.Prelude hiding (mod) - -import Distribution.Backpack -import Distribution.Backpack.UnifyM -import Distribution.Backpack.FullUnitId -import Distribution.Backpack.ModuleScope - -import qualified Distribution.Utils.UnionFind as UnionFind -import Distribution.ModuleName -import Distribution.Text -import Distribution.Types.ComponentId - -import Text.PrettyPrint -import Control.Monad -import qualified Data.Map as Map -import qualified Data.Foldable as F - ------------------------------------------------------------------------ --- Linking - --- | Given to scopes of provisions and requirements, link them together. -mixLink :: [ModuleScopeU s] -> UnifyM s (ModuleScopeU s) -mixLink scopes = do - let provs = Map.unionsWith (++) (map fst scopes) - -- Invariant: any identically named holes refer to same mutable cell - reqs = Map.unionsWith (++) (map snd scopes) - filled = Map.intersectionWithKey linkProvision provs reqs - F.sequenceA_ filled - let remaining = Map.difference reqs filled - return (provs, remaining) - --- | Link a list of possibly provided modules to a single --- requirement. This applies a side-condition that all --- of the provided modules at the same name are *actually* --- the same module. -linkProvision :: ModuleName - -> [ModuleWithSourceU s] -- provs - -> [ModuleWithSourceU s] -- reqs - -> UnifyM s [ModuleWithSourceU s] -linkProvision mod_name ret@(prov:provs) (req:reqs) = do - -- TODO: coalesce all the non-unifying modules together - forM_ provs $ \prov' -> do - -- Careful: read it out BEFORE unifying, because the - -- unification algorithm preemptively unifies modules - mod <- convertModuleU (unWithSource prov) - mod' <- convertModuleU (unWithSource prov') - r <- unify prov prov' - case r of - Just () -> return () - Nothing -> do - addErr $ - text "Ambiguous module" <+> quotes (disp mod_name) $$ - text "It could refer to" <+> - ( text " " <+> (quotes (disp mod) $$ in_scope_by (getSource prov)) $$ - text "or" <+> (quotes (disp mod') $$ in_scope_by (getSource prov')) ) $$ - link_doc - mod <- convertModuleU (unWithSource prov) - req_mod <- convertModuleU (unWithSource req) - self_cid <- fmap unify_self_cid getUnifEnv - case mod of - OpenModule (IndefFullUnitId cid _) _ - | cid == self_cid -> addErr $ - text "Cannot instantiate requirement" <+> quotes (disp mod_name) <+> - in_scope_by (getSource req) $$ - text "with locally defined module" <+> in_scope_by (getSource prov) $$ - text "as this would create a cyclic dependency, which GHC does not support." $$ - text "Try moving this module to a separate library, e.g.," $$ - text "create a new stanza: library 'sublib'." - _ -> return () - r <- unify prov req - case r of - Just () -> return () - Nothing -> do - -- TODO: Record and report WHERE the bad constraint came from - addErr $ text "Could not instantiate requirement" <+> quotes (disp mod_name) $$ - nest 4 (text "Expected:" <+> disp mod $$ - text "Actual: " <+> disp req_mod) $$ - parens (text "This can occur if an exposed module of" <+> - text "a libraries shares a name with another module.") $$ - link_doc - return ret - where - unify s1 s2 = tryM $ addErrContext short_link_doc - $ unifyModule (unWithSource s1) (unWithSource s2) - in_scope_by s = text "brought into scope by" <+> dispModuleSource s - short_link_doc = text "While filling requirement" <+> quotes (disp mod_name) - link_doc = text "While filling requirements of" <+> reqs_doc - reqs_doc - | null reqs = dispModuleSource (getSource req) - | otherwise = ( text " " <+> dispModuleSource (getSource req) $$ - vcat [ text "and" <+> dispModuleSource (getSource r) | r <- reqs]) -linkProvision _ _ _ = error "linkProvision" - - - ------------------------------------------------------------------------ --- The unification algorithm - --- This is based off of https://gist.github.com/amnn/559551517d020dbb6588 --- which is a translation from Huet's thesis. - -unifyUnitId :: UnitIdU s -> UnitIdU s -> UnifyM s () -unifyUnitId uid1_u uid2_u - | uid1_u == uid2_u = return () - | otherwise = do - xuid1 <- liftST $ UnionFind.find uid1_u - xuid2 <- liftST $ UnionFind.find uid2_u - case (xuid1, xuid2) of - (UnitIdThunkU u1, UnitIdThunkU u2) - | u1 == u2 -> return () - | otherwise -> - failWith $ hang (text "Couldn't match unit IDs:") 4 - (text " " <+> disp u1 $$ - text "and" <+> disp u2) - (UnitIdThunkU uid1, UnitIdU _ cid2 insts2) - -> unifyThunkWith cid2 insts2 uid2_u uid1 uid1_u - (UnitIdU _ cid1 insts1, UnitIdThunkU uid2) - -> unifyThunkWith cid1 insts1 uid1_u uid2 uid2_u - (UnitIdU _ cid1 insts1, UnitIdU _ cid2 insts2) - -> unifyInner cid1 insts1 uid1_u cid2 insts2 uid2_u - -unifyThunkWith :: ComponentId - -> Map ModuleName (ModuleU s) - -> UnitIdU s - -> DefUnitId - -> UnitIdU s - -> UnifyM s () -unifyThunkWith cid1 insts1 uid1_u uid2 uid2_u = do - db <- fmap unify_db getUnifEnv - let FullUnitId cid2 insts2' = expandUnitId db uid2 - insts2 <- convertModuleSubst insts2' - unifyInner cid1 insts1 uid1_u cid2 insts2 uid2_u - -unifyInner :: ComponentId - -> Map ModuleName (ModuleU s) - -> UnitIdU s - -> ComponentId - -> Map ModuleName (ModuleU s) - -> UnitIdU s - -> UnifyM s () -unifyInner cid1 insts1 uid1_u cid2 insts2 uid2_u = do - when (cid1 /= cid2) $ - -- TODO: if we had a package identifier, could be an - -- easier to understand error message. - failWith $ - hang (text "Couldn't match component IDs:") 4 - (text " " <+> disp cid1 $$ - text "and" <+> disp cid2) - -- The KEY STEP which makes this a Huet-style unification - -- algorithm. (Also a payoff of using union-find.) - -- We can build infinite unit IDs this way, which is necessary - -- for support mutual recursion. NB: union keeps the SECOND - -- descriptor, so we always arrange for a UnitIdThunkU to live - -- there. - liftST $ UnionFind.union uid1_u uid2_u - F.sequenceA_ $ Map.intersectionWith unifyModule insts1 insts2 - --- | Imperatively unify two modules. -unifyModule :: ModuleU s -> ModuleU s -> UnifyM s () -unifyModule mod1_u mod2_u - | mod1_u == mod2_u = return () - | otherwise = do - mod1 <- liftST $ UnionFind.find mod1_u - mod2 <- liftST $ UnionFind.find mod2_u - case (mod1, mod2) of - (ModuleVarU _, _) -> liftST $ UnionFind.union mod1_u mod2_u - (_, ModuleVarU _) -> liftST $ UnionFind.union mod2_u mod1_u - (ModuleU uid1 mod_name1, ModuleU uid2 mod_name2) -> do - when (mod_name1 /= mod_name2) $ - failWith $ - hang (text "Cannot match module names") 4 $ - text " " <+> disp mod_name1 $$ - text "and" <+> disp mod_name2 - -- NB: this is not actually necessary (because we'll - -- detect loops eventually in 'unifyUnitId'), but it - -- seems harmless enough - liftST $ UnionFind.union mod1_u mod2_u - unifyUnitId uid1 uid2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ModSubst.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ModSubst.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ModSubst.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ModSubst.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,54 +0,0 @@ -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE PatternGuards #-} - --- | A type class 'ModSubst' for objects which can have 'ModuleSubst' --- applied to them. --- --- See also - -module Distribution.Backpack.ModSubst ( - ModSubst(..), -) where - -import Prelude () -import Distribution.Compat.Prelude hiding (mod) - -import Distribution.ModuleName - -import Distribution.Backpack - -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set - --- | Applying module substitutions to semantic objects. -class ModSubst a where - -- In notation, substitution is postfix, which implies - -- putting it on the right hand side, but for partial - -- application it's more convenient to have it on the left - -- hand side. - modSubst :: OpenModuleSubst -> a -> a - -instance ModSubst OpenModule where - modSubst subst (OpenModule cid mod_name) = OpenModule (modSubst subst cid) mod_name - modSubst subst mod@(OpenModuleVar mod_name) - | Just mod' <- Map.lookup mod_name subst = mod' - | otherwise = mod - -instance ModSubst OpenUnitId where - modSubst subst (IndefFullUnitId cid insts) = IndefFullUnitId cid (modSubst subst insts) - modSubst _subst uid = uid - -instance ModSubst (Set ModuleName) where - modSubst subst reqs - = Set.union (Set.difference reqs (Map.keysSet subst)) - (openModuleSubstFreeHoles subst) - --- Substitutions are functorial. NB: this means that --- there is an @instance 'ModSubst' 'ModuleSubst'@! -instance ModSubst a => ModSubst (Map k a) where - modSubst subst = fmap (modSubst subst) -instance ModSubst a => ModSubst [a] where - modSubst subst = fmap (modSubst subst) -instance ModSubst a => ModSubst (k, a) where - modSubst subst (x,y) = (x, modSubst subst y) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ModuleScope.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ModuleScope.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ModuleScope.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ModuleScope.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,131 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE DeriveFoldable #-} --- | See -module Distribution.Backpack.ModuleScope ( - -- * Module scopes - ModuleScope(..), - ModuleProvides, - ModuleRequires, - ModuleSource(..), - dispModuleSource, - WithSource(..), - unWithSource, - getSource, - ModuleWithSource, - emptyModuleScope, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.ModuleName -import Distribution.Types.IncludeRenaming -import Distribution.Types.PackageName -import Distribution.Types.ComponentName - -import Distribution.Backpack -import Distribution.Backpack.ModSubst -import Distribution.Text - -import qualified Data.Map as Map -import Text.PrettyPrint - - ------------------------------------------------------------------------ --- Module scopes - --- Why is ModuleProvides so complicated? The basic problem is that --- we want to support this: --- --- package p where --- include q (A) --- include r (A) --- module B where --- import "q" A --- import "r" A --- --- Specifically, in Cabal today it is NOT an error have two modules in --- scope with the same identifier. So we need to preserve this for --- Backpack. The modification is that an ambiguous module name is --- OK... as long as it is NOT used to fill a requirement! --- --- So as a first try, we might try deferring unifying provisions that --- are being glommed together, and check for equality after the fact. --- But this doesn't work, because what if a multi-module provision --- is used to fill a requirement?! So you do the equality test --- IMMEDIATELY before a requirement fill happens... or never at all. --- --- Alternate strategy: go ahead and unify, and then if it is revealed --- that some requirements got filled "out-of-thin-air", error. - - --- | A 'ModuleScope' describes the modules and requirements that --- are in-scope as we are processing a Cabal package. Unlike --- a 'ModuleShape', there may be multiple modules in scope at --- the same 'ModuleName'; this is only an error if we attempt --- to use those modules to fill a requirement. A 'ModuleScope' --- can influence the 'ModuleShape' via a reexport. -data ModuleScope = ModuleScope { - modScopeProvides :: ModuleProvides, - modScopeRequires :: ModuleRequires - } - --- | An empty 'ModuleScope'. -emptyModuleScope :: ModuleScope -emptyModuleScope = ModuleScope Map.empty Map.empty - --- | Every 'Module' in scope at a 'ModuleName' is annotated with --- the 'PackageName' it comes from. -type ModuleProvides = Map ModuleName [ModuleWithSource] --- | INVARIANT: entries for ModuleName m, have msrc_module is OpenModuleVar m -type ModuleRequires = Map ModuleName [ModuleWithSource] --- TODO: consider newtping the two types above. - --- | Description of where a module participating in mixin linking came --- from. -data ModuleSource - = FromMixins PackageName ComponentName IncludeRenaming - | FromBuildDepends PackageName ComponentName - | FromExposedModules ModuleName - | FromOtherModules ModuleName - | FromSignatures ModuleName --- We don't have line numbers, but if we did, we'd want to record that --- too - --- TODO: Deduplicate this with Distribution.Backpack.UnifyM.ci_msg -dispModuleSource :: ModuleSource -> Doc -dispModuleSource (FromMixins pn cn incls) - = text "mixins:" <+> dispComponent pn cn <+> disp incls -dispModuleSource (FromBuildDepends pn cn) - = text "build-depends:" <+> dispComponent pn cn -dispModuleSource (FromExposedModules m) - = text "exposed-modules:" <+> disp m -dispModuleSource (FromOtherModules m) - = text "other-modules:" <+> disp m -dispModuleSource (FromSignatures m) - = text "signatures:" <+> disp m - --- Dependency -dispComponent :: PackageName -> ComponentName -> Doc -dispComponent pn cn = - -- NB: This syntax isn't quite the source syntax, but it - -- should be clear enough. To do source syntax, we'd - -- need to know what the package we're linking is. - case cn of - CLibName -> disp pn - CSubLibName ucn -> disp pn <<>> colon <<>> disp ucn - -- Case below shouldn't happen - _ -> disp pn <+> parens (disp cn) - --- | An 'OpenModule', annotated with where it came from in a Cabal file. -data WithSource a = WithSource ModuleSource a - deriving (Functor, Foldable, Traversable) -unWithSource :: WithSource a -> a -unWithSource (WithSource _ x) = x -getSource :: WithSource a -> ModuleSource -getSource (WithSource s _) = s -type ModuleWithSource = WithSource OpenModule - -instance ModSubst a => ModSubst (WithSource a) where - modSubst subst (WithSource s m) = WithSource s (modSubst subst m) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ModuleShape.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ModuleShape.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ModuleShape.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ModuleShape.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,83 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} --- | See -module Distribution.Backpack.ModuleShape ( - -- * Module shapes - ModuleShape(..), - emptyModuleShape, - shapeInstalledPackage, -) where - -import Prelude () -import Distribution.Compat.Prelude hiding (mod) - -import Distribution.ModuleName -import Distribution.InstalledPackageInfo as IPI - -import Distribution.Backpack.ModSubst -import Distribution.Backpack - -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set - ------------------------------------------------------------------------ --- Module shapes - --- | A 'ModuleShape' describes the provisions and requirements of --- a library. We can extract a 'ModuleShape' from an --- 'InstalledPackageInfo'. -data ModuleShape = ModuleShape { - modShapeProvides :: OpenModuleSubst, - modShapeRequires :: Set ModuleName - } - deriving (Eq, Show, Generic) - -instance Binary ModuleShape - -instance ModSubst ModuleShape where - modSubst subst (ModuleShape provs reqs) - = ModuleShape (modSubst subst provs) (modSubst subst reqs) - --- | The default module shape, with no provisions and no requirements. -emptyModuleShape :: ModuleShape -emptyModuleShape = ModuleShape Map.empty Set.empty - --- Food for thought: suppose we apply the Merkel tree optimization. --- Imagine this situation: --- --- component p --- signature H --- module P --- component h --- module H --- component a --- signature P --- module A --- component q(P) --- include p --- include h --- component r --- include q (P) --- include p (P) requires (H) --- include h (H) --- include a (A) requires (P) --- --- Component r should not have any conflicts, since after mix-in linking --- the two P imports will end up being the same, so we can properly --- instantiate it. But to know that q's P is p:P instantiated with h:H, --- we have to be able to expand its unit id. Maybe we can expand it --- lazily but in some cases it will need to be expanded. --- --- FWIW, the way that GHC handles this is by improving unit IDs as --- soon as it sees an improved one in the package database. This --- is a bit disgusting. -shapeInstalledPackage :: IPI.InstalledPackageInfo -> ModuleShape -shapeInstalledPackage ipi = ModuleShape (Map.fromList provs) reqs - where - uid = installedOpenUnitId ipi - provs = map shapeExposedModule (IPI.exposedModules ipi) - reqs = requiredSignatures ipi - shapeExposedModule (IPI.ExposedModule mod_name Nothing) - = (mod_name, OpenModule uid mod_name) - shapeExposedModule (IPI.ExposedModule mod_name (Just mod)) - = (mod_name, mod) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/PreExistingComponent.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/PreExistingComponent.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/PreExistingComponent.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/PreExistingComponent.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ --- | See -module Distribution.Backpack.PreExistingComponent ( - PreExistingComponent(..), - ipiToPreExistingComponent, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Backpack.ModuleShape -import Distribution.Backpack -import Distribution.Types.ComponentId -import Distribution.Types.MungedPackageId -import Distribution.Types.PackageId -import Distribution.Types.UnitId -import Distribution.Types.ComponentName -import Distribution.Types.PackageName -import Distribution.Package - -import qualified Data.Map as Map -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.InstalledPackageInfo (InstalledPackageInfo) - --- | Stripped down version of 'LinkedComponent' for things --- we don't need to know how to build. -data PreExistingComponent - = PreExistingComponent { - -- | The actual name of the package. This may DISAGREE with 'pc_pkgid' - -- for internal dependencies: e.g., an internal component @lib@ may be - -- munged to @z-pkg-z-lib@, but we still want to use it when we see - -- @lib@ in @build-depends@ - pc_pkgname :: PackageName, - -- | The actual name of the component. - pc_compname :: ComponentName, - pc_munged_id :: MungedPackageId, - pc_uid :: UnitId, - pc_cid :: ComponentId, - pc_open_uid :: OpenUnitId, - pc_shape :: ModuleShape - } - --- | Convert an 'InstalledPackageInfo' into a 'PreExistingComponent', --- which was brought into scope under the 'PackageName' (important for --- a package qualified reference.) -ipiToPreExistingComponent :: InstalledPackageInfo -> PreExistingComponent -ipiToPreExistingComponent ipi = - PreExistingComponent { - pc_pkgname = packageName ipi, - pc_compname = libraryComponentName $ Installed.sourceLibName ipi, - pc_munged_id = mungedId ipi, - pc_uid = Installed.installedUnitId ipi, - pc_cid = Installed.installedComponentId ipi, - pc_open_uid = - IndefFullUnitId (Installed.installedComponentId ipi) - (Map.fromList (Installed.instantiatedWith ipi)), - pc_shape = shapeInstalledPackage ipi - } - -instance HasMungedPackageId PreExistingComponent where - mungedId = pc_munged_id - -instance Package PreExistingComponent where - packageId pec = PackageIdentifier (pc_pkgname pec) v - where MungedPackageId _ v = pc_munged_id pec - -instance HasUnitId PreExistingComponent where - installedUnitId = pc_uid diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/PreModuleShape.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/PreModuleShape.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/PreModuleShape.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/PreModuleShape.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Backpack.PreModuleShape ( - PreModuleShape(..), - toPreModuleShape, - renamePreModuleShape, - mixLinkPreModuleShape, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Data.Set (Set) -import qualified Data.Set as Set -import qualified Data.Map as Map - -import Distribution.Backpack.ModuleShape -import Distribution.Types.IncludeRenaming -import Distribution.Types.ModuleRenaming -import Distribution.ModuleName - -data PreModuleShape = PreModuleShape { - preModShapeProvides :: Set ModuleName, - preModShapeRequires :: Set ModuleName - } - deriving (Eq, Show, Generic) - -toPreModuleShape :: ModuleShape -> PreModuleShape -toPreModuleShape (ModuleShape provs reqs) = PreModuleShape (Map.keysSet provs) reqs - -renamePreModuleShape :: PreModuleShape -> IncludeRenaming -> PreModuleShape -renamePreModuleShape (PreModuleShape provs reqs) (IncludeRenaming prov_rn req_rn) = - PreModuleShape - (Set.fromList (mapMaybe prov_fn (Set.toList provs))) - (Set.map req_fn reqs) - where - prov_fn = interpModuleRenaming prov_rn - req_fn k = fromMaybe k (interpModuleRenaming req_rn k) - -mixLinkPreModuleShape :: [PreModuleShape] -> PreModuleShape -mixLinkPreModuleShape shapes = PreModuleShape provs (Set.difference reqs provs) - where - provs = Set.unions (map preModShapeProvides shapes) - reqs = Set.unions (map preModShapeRequires shapes) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ReadyComponent.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ReadyComponent.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/ReadyComponent.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/ReadyComponent.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,368 +0,0 @@ -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE PatternGuards #-} --- | See -module Distribution.Backpack.ReadyComponent ( - ReadyComponent(..), - InstantiatedComponent(..), - IndefiniteComponent(..), - rc_depends, - rc_uid, - rc_pkgid, - dispReadyComponent, - toReadyComponents, -) where - -import Prelude () -import Distribution.Compat.Prelude hiding ((<>)) - -import Distribution.Backpack -import Distribution.Backpack.LinkedComponent -import Distribution.Backpack.ModuleShape - -import Distribution.Types.AnnotatedId -import Distribution.Types.ModuleRenaming -import Distribution.Types.Component -import Distribution.Types.ComponentInclude -import Distribution.Types.ComponentId -import Distribution.Types.ComponentName -import Distribution.Types.PackageId -import Distribution.Types.UnitId -import Distribution.Compat.Graph (IsNode(..)) -import Distribution.Types.Module -import Distribution.Types.MungedPackageId -import Distribution.Types.MungedPackageName -import Distribution.Types.Library - -import Distribution.ModuleName -import Distribution.Package -import Distribution.Simple.Utils - -import qualified Control.Applicative as A -import qualified Data.Traversable as T - -import Control.Monad -import Text.PrettyPrint -import qualified Data.Map as Map - -import Distribution.Version -import Distribution.Text - --- | A 'ReadyComponent' is one that we can actually generate build --- products for. We have a ready component for the typecheck-only --- products of every indefinite package, as well as a ready component --- for every way these packages can be fully instantiated. --- -data ReadyComponent - = ReadyComponent { - rc_ann_id :: AnnotatedId UnitId, - -- | The 'OpenUnitId' for this package. At the moment, this - -- is used in only one case, which is to determine if an - -- export is of a module from this library (indefinite - -- libraries record these exports as 'OpenModule'); - -- 'rc_open_uid' can be conveniently used to test for - -- equality, whereas 'UnitId' cannot always be used in this - -- case. - rc_open_uid :: OpenUnitId, - -- | Corresponds to 'lc_cid'. Invariant: if 'rc_open_uid' - -- records a 'ComponentId', it coincides with this one. - rc_cid :: ComponentId, - -- | Corresponds to 'lc_component'. - rc_component :: Component, - -- | Corresponds to 'lc_exe_deps'. - -- Build-tools don't participate in mix-in linking. - -- (but what if they could?) - rc_exe_deps :: [AnnotatedId UnitId], - -- | Corresponds to 'lc_public'. - rc_public :: Bool, - -- | Extra metadata depending on whether or not this is an - -- indefinite library (typechecked only) or an instantiated - -- component (can be compiled). - rc_i :: Either IndefiniteComponent InstantiatedComponent - } - --- | The final, string 'UnitId' that will uniquely identify --- the compilation products of this component. -rc_uid :: ReadyComponent -> UnitId -rc_uid = ann_id . rc_ann_id - --- | Corresponds to 'lc_pkgid'. -rc_pkgid :: ReadyComponent -> PackageId -rc_pkgid = ann_pid . rc_ann_id - --- | An 'InstantiatedComponent' is a library which is fully instantiated --- (or, possibly, has no requirements at all.) -data InstantiatedComponent - = InstantiatedComponent { - -- | How this library was instantiated. - instc_insts :: [(ModuleName, Module)], - -- | Dependencies induced by 'instc_insts'. These are recorded - -- here because there isn't a convenient way otherwise to get - -- the 'PackageId' we need to fill 'componentPackageDeps' as needed. - instc_insts_deps :: [(UnitId, MungedPackageId)], - -- | The modules exported/reexported by this library. - instc_provides :: Map ModuleName Module, - -- | The dependencies which need to be passed to the compiler - -- to bring modules into scope. These always refer to installed - -- fully instantiated libraries. - instc_includes :: [ComponentInclude DefUnitId ModuleRenaming] - } - --- | An 'IndefiniteComponent' is a library with requirements --- which we will typecheck only. -data IndefiniteComponent - = IndefiniteComponent { - -- | The requirements of the library. - indefc_requires :: [ModuleName], - -- | The modules exported/reexported by this library. - indefc_provides :: Map ModuleName OpenModule, - -- | The dependencies which need to be passed to the compiler - -- to bring modules into scope. These are 'OpenUnitId' because - -- these may refer to partially instantiated libraries. - indefc_includes :: [ComponentInclude OpenUnitId ModuleRenaming] - } - --- | Compute the dependencies of a 'ReadyComponent' that should --- be recorded in the @depends@ field of 'InstalledPackageInfo'. -rc_depends :: ReadyComponent -> [(UnitId, MungedPackageId)] -rc_depends rc = ordNub $ - case rc_i rc of - Left indefc -> - map (\ci -> (abstractUnitId $ ci_id ci, toMungedPackageId ci)) - (indefc_includes indefc) - Right instc -> - map (\ci -> (unDefUnitId $ ci_id ci, toMungedPackageId ci)) - (instc_includes instc) - ++ instc_insts_deps instc - where - toMungedPackageId :: Text id => ComponentInclude id rn -> MungedPackageId - toMungedPackageId ci = - computeCompatPackageId - (ci_pkgid ci) - (case ci_cname ci of - CLibName -> Nothing - CSubLibName uqn -> Just uqn - _ -> error $ display (rc_cid rc) ++ - " depends on non-library " ++ display (ci_id ci)) - --- | Get the 'MungedPackageId' of a 'ReadyComponent' IF it is --- a library. -rc_munged_id :: ReadyComponent -> MungedPackageId -rc_munged_id rc = - computeCompatPackageId - (rc_pkgid rc) - (case rc_component rc of - CLib lib -> libName lib - _ -> error "rc_munged_id: not library") - -instance Package ReadyComponent where - packageId = rc_pkgid - -instance HasUnitId ReadyComponent where - installedUnitId = rc_uid - -instance IsNode ReadyComponent where - type Key ReadyComponent = UnitId - nodeKey = rc_uid - nodeNeighbors rc = - (case rc_i rc of - Right inst | [] <- instc_insts inst - -> [] - | otherwise - -> [newSimpleUnitId (rc_cid rc)] - _ -> []) ++ - ordNub (map fst (rc_depends rc)) ++ - map ann_id (rc_exe_deps rc) - -dispReadyComponent :: ReadyComponent -> Doc -dispReadyComponent rc = - hang (text (case rc_i rc of - Left _ -> "indefinite" - Right _ -> "definite") - <+> disp (nodeKey rc) - {- <+> dispModSubst (Map.fromList (lc_insts lc)) -} ) 4 $ - vcat [ text "depends" <+> disp uid - | uid <- nodeNeighbors rc ] - --- | The state of 'InstM'; a mapping from 'UnitId's to their --- ready component, or @Nothing@ if its an external --- component which we don't know how to build. -type InstS = Map UnitId (Maybe ReadyComponent) - --- | A state monad for doing instantiations (can't use actual --- State because that would be an extra dependency.) -newtype InstM a = InstM { runInstM :: InstS -> (a, InstS) } - -instance Functor InstM where - fmap f (InstM m) = InstM $ \s -> let (x, s') = m s - in (f x, s') - -instance A.Applicative InstM where - pure a = InstM $ \s -> (a, s) - InstM f <*> InstM x = InstM $ \s -> let (f', s') = f s - (x', s'') = x s' - in (f' x', s'') - -instance Monad InstM where - return = A.pure - InstM m >>= f = InstM $ \s -> let (x, s') = m s - in runInstM (f x) s' - --- | Given a list of 'LinkedComponent's, expand the module graph --- so that we have an instantiated graph containing all of the --- instantiated components we need to build. --- --- Instantiation intuitively follows the following algorithm: --- --- instantiate a definite unit id p[S]: --- recursively instantiate each module M in S --- recursively instantiate modules exported by this unit --- recursively instantiate dependencies substituted by S --- --- The implementation is a bit more involved to memoize instantiation --- if we have done it already. --- --- We also call 'improveUnitId' during this process, so that fully --- instantiated components are given 'HashedUnitId'. --- -toReadyComponents - :: Map UnitId MungedPackageId - -> Map ModuleName Module -- subst for the public component - -> [LinkedComponent] - -> [ReadyComponent] -toReadyComponents pid_map subst0 comps - = catMaybes (Map.elems ready_map) - where - cmap = Map.fromList [ (lc_cid lc, lc) | lc <- comps ] - - instantiateUnitId :: ComponentId -> Map ModuleName Module - -> InstM DefUnitId - instantiateUnitId cid insts = InstM $ \s -> - case Map.lookup uid s of - Nothing -> - -- Knot tied - let (r, s') = runInstM (instantiateComponent uid cid insts) - (Map.insert uid r s) - in (def_uid, Map.insert uid r s') - Just _ -> (def_uid, s) - where - -- The mkDefUnitId here indicates that we assume - -- that Cabal handles unit id hash allocation. - -- Good thing about hashing here: map is only on string. - -- Bad thing: have to repeatedly hash. - def_uid = mkDefUnitId cid insts - uid = unDefUnitId def_uid - - instantiateComponent - :: UnitId -> ComponentId -> Map ModuleName Module - -> InstM (Maybe ReadyComponent) - instantiateComponent uid cid insts - | Just lc <- Map.lookup cid cmap = do - provides <- T.mapM (substModule insts) (modShapeProvides (lc_shape lc)) - -- NB: lc_sig_includes is omitted here, because we don't - -- need them to build - includes <- forM (lc_includes lc) $ \ci -> do - uid' <- substUnitId insts (ci_id ci) - return ci { ci_ann_id = fmap (const uid') (ci_ann_id ci) } - exe_deps <- mapM (substExeDep insts) (lc_exe_deps lc) - s <- InstM $ \s -> (s, s) - let getDep (Module dep_def_uid _) - | let dep_uid = unDefUnitId dep_def_uid - -- Lose DefUnitId invariant for rc_depends - = [(dep_uid, - fromMaybe err_pid $ - Map.lookup dep_uid pid_map A.<|> - fmap rc_munged_id (join (Map.lookup dep_uid s)))] - where - err_pid = MungedPackageId - (mkMungedPackageName "nonexistent-package-this-is-a-cabal-bug") - (mkVersion [0]) - instc = InstantiatedComponent { - instc_insts = Map.toList insts, - instc_insts_deps = concatMap getDep (Map.elems insts), - instc_provides = provides, - instc_includes = includes - -- NB: there is no dependency on the - -- indefinite version of this instantiated package here, - -- as (1) it doesn't go in depends in the - -- IPI: it's not a run time dep, and (2) - -- we don't have to tell GHC about it, it - -- will match up the ComponentId - -- automatically - } - return $ Just ReadyComponent { - rc_ann_id = (lc_ann_id lc) { ann_id = uid }, - rc_open_uid = DefiniteUnitId (unsafeMkDefUnitId uid), - rc_cid = lc_cid lc, - rc_component = lc_component lc, - rc_exe_deps = exe_deps, - rc_public = lc_public lc, - rc_i = Right instc - } - | otherwise = return Nothing - - substUnitId :: Map ModuleName Module -> OpenUnitId -> InstM DefUnitId - substUnitId _ (DefiniteUnitId uid) = - return uid - substUnitId subst (IndefFullUnitId cid insts) = do - insts' <- substSubst subst insts - instantiateUnitId cid insts' - - -- NB: NOT composition - substSubst :: Map ModuleName Module - -> Map ModuleName OpenModule - -> InstM (Map ModuleName Module) - substSubst subst insts = T.mapM (substModule subst) insts - - substModule :: Map ModuleName Module -> OpenModule -> InstM Module - substModule subst (OpenModuleVar mod_name) - | Just m <- Map.lookup mod_name subst = return m - | otherwise = error "substModule: non-closing substitution" - substModule subst (OpenModule uid mod_name) = do - uid' <- substUnitId subst uid - return (Module uid' mod_name) - - substExeDep :: Map ModuleName Module - -> AnnotatedId OpenUnitId -> InstM (AnnotatedId UnitId) - substExeDep insts exe_aid = do - exe_uid' <- substUnitId insts (ann_id exe_aid) - return exe_aid { ann_id = unDefUnitId exe_uid' } - - indefiniteUnitId :: ComponentId -> InstM UnitId - indefiniteUnitId cid = do - let uid = newSimpleUnitId cid - r <- indefiniteComponent uid cid - InstM $ \s -> (uid, Map.insert uid r s) - - indefiniteComponent :: UnitId -> ComponentId -> InstM (Maybe ReadyComponent) - indefiniteComponent uid cid - | Just lc <- Map.lookup cid cmap = do - exe_deps <- mapM (substExeDep Map.empty) (lc_exe_deps lc) - let indefc = IndefiniteComponent { - indefc_requires = map fst (lc_insts lc), - indefc_provides = modShapeProvides (lc_shape lc), - indefc_includes = lc_includes lc ++ lc_sig_includes lc - } - return $ Just ReadyComponent { - rc_ann_id = (lc_ann_id lc) { ann_id = uid }, - rc_cid = lc_cid lc, - rc_open_uid = lc_uid lc, - rc_component = lc_component lc, - -- It's always fully built - rc_exe_deps = exe_deps, - rc_public = lc_public lc, - rc_i = Left indefc - } - | otherwise = return Nothing - - ready_map = snd $ runInstM work Map.empty - - work - | not (Map.null subst0) - , [lc] <- filter lc_public (Map.elems cmap) - = do _ <- instantiateUnitId (lc_cid lc) subst0 - return () - | otherwise - = forM_ (Map.elems cmap) $ \lc -> - if null (lc_insts lc) - then instantiateUnitId (lc_cid lc) Map.empty >> return () - else indefiniteUnitId (lc_cid lc) >> return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/UnifyM.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/UnifyM.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack/UnifyM.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack/UnifyM.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,628 +0,0 @@ -{-# LANGUAGE Rank2Types #-} -{-# LANGUAGE ScopedTypeVariables #-} --- | See -module Distribution.Backpack.UnifyM ( - -- * Unification monad - UnifyM, - runUnifyM, - failWith, - addErr, - failIfErrs, - tryM, - addErrContext, - addErrContextM, - liftST, - - UnifEnv(..), - getUnifEnv, - - -- * Modules and unit IDs - ModuleU, - ModuleU'(..), - convertModule, - convertModuleU, - - UnitIdU, - UnitIdU'(..), - convertUnitId, - convertUnitIdU, - - ModuleSubstU, - convertModuleSubstU, - convertModuleSubst, - - ModuleScopeU, - emptyModuleScopeU, - convertModuleScopeU, - - ModuleWithSourceU, - - convertInclude, - convertModuleProvides, - convertModuleProvidesU, - -) where - -import Prelude () -import Distribution.Compat.Prelude hiding (mod) - -import Distribution.Backpack.ModuleShape -import Distribution.Backpack.ModuleScope -import Distribution.Backpack.ModSubst -import Distribution.Backpack.FullUnitId -import Distribution.Backpack - -import qualified Distribution.Utils.UnionFind as UnionFind -import Distribution.ModuleName -import Distribution.Package -import Distribution.PackageDescription -import Distribution.Text -import Distribution.Types.IncludeRenaming -import Distribution.Types.ComponentInclude -import Distribution.Types.AnnotatedId -import Distribution.Types.ComponentName -import Distribution.Verbosity - -import Data.STRef -import Data.Traversable -import Control.Monad.ST -import qualified Data.Map as Map -import qualified Data.Set as Set -import Data.IntMap (IntMap) -import qualified Data.IntMap as IntMap -import qualified Data.Traversable as T -import Text.PrettyPrint - --- TODO: more detailed trace output on high verbosity would probably --- be appreciated by users debugging unification errors. Collect --- some good examples! - -data ErrMsg = ErrMsg { - err_msg :: Doc, - err_ctx :: [Doc] - } -type MsgDoc = Doc - -renderErrMsg :: ErrMsg -> MsgDoc -renderErrMsg ErrMsg { err_msg = msg, err_ctx = ctx } = - msg $$ vcat ctx - --- | The unification monad, this monad encapsulates imperative --- unification. -newtype UnifyM s a = UnifyM { unUnifyM :: UnifEnv s -> ST s (Maybe a) } - --- | Run a computation in the unification monad. -runUnifyM :: Verbosity -> ComponentId -> FullDb -> (forall s. UnifyM s a) -> Either [MsgDoc] a -runUnifyM verbosity self_cid db m - = runST $ do i <- newSTRef 0 - hmap <- newSTRef Map.empty - errs <- newSTRef [] - mb_r <- unUnifyM m UnifEnv { - unify_uniq = i, - unify_reqs = hmap, - unify_self_cid = self_cid, - unify_verbosity = verbosity, - unify_ctx = [], - unify_db = db, - unify_errs = errs } - final_errs <- readSTRef errs - case mb_r of - Just x | null final_errs -> return (Right x) - _ -> return (Left (map renderErrMsg (reverse final_errs))) --- NB: GHC 7.6 throws a hissy fit if you pattern match on 'm'. - -type ErrCtx s = MsgDoc - --- | The unification environment. -data UnifEnv s = UnifEnv { - -- | A supply of unique integers to label 'UnitIdU' - -- cells. This is used to determine loops in unit - -- identifiers (which can happen with mutual recursion.) - unify_uniq :: UnifRef s UnitIdUnique, - -- | The set of requirements in scope. When - -- a provision is brought into scope, we unify with - -- the requirement at the same module name to fill it. - -- This mapping grows monotonically. - unify_reqs :: UnifRef s (Map ModuleName (ModuleU s)), - -- | Component id of the unit we're linking. We use this - -- to detect if we fill a requirement with a local module, - -- which in principle should be OK but is not currently - -- supported by GHC. - unify_self_cid :: ComponentId, - -- | How verbose the error message should be - unify_verbosity :: Verbosity, - -- | The error reporting context - unify_ctx :: [ErrCtx s], - -- | The package index for expanding unit identifiers - unify_db :: FullDb, - -- | Accumulated errors - unify_errs :: UnifRef s [ErrMsg] - } - -instance Functor (UnifyM s) where - fmap f (UnifyM m) = UnifyM (fmap (fmap (fmap f)) m) - -instance Applicative (UnifyM s) where - pure = UnifyM . pure . pure . pure - UnifyM f <*> UnifyM x = UnifyM $ \r -> do - f' <- f r - case f' of - Nothing -> return Nothing - Just f'' -> do - x' <- x r - case x' of - Nothing -> return Nothing - Just x'' -> return (Just (f'' x'')) - -instance Monad (UnifyM s) where - return = pure - UnifyM m >>= f = UnifyM $ \r -> do - x <- m r - case x of - Nothing -> return Nothing - Just x' -> unUnifyM (f x') r - --- | Lift a computation from 'ST' monad to 'UnifyM' monad. --- Internal use only. -liftST :: ST s a -> UnifyM s a -liftST m = UnifyM $ \_ -> fmap Just m - -addErr :: MsgDoc -> UnifyM s () -addErr msg = do - env <- getUnifEnv - let err = ErrMsg { - err_msg = msg, - err_ctx = unify_ctx env - } - liftST $ modifySTRef (unify_errs env) (\errs -> err:errs) - -failWith :: MsgDoc -> UnifyM s a -failWith msg = do - addErr msg - failM - -failM :: UnifyM s a -failM = UnifyM $ \_ -> return Nothing - -failIfErrs :: UnifyM s () -failIfErrs = do - env <- getUnifEnv - errs <- liftST $ readSTRef (unify_errs env) - when (not (null errs)) failM - -tryM :: UnifyM s a -> UnifyM s (Maybe a) -tryM m = - UnifyM (\env -> do - mb_r <- unUnifyM m env - return (Just mb_r)) - -{- -otherFail :: ErrMsg -> UnifyM s a -otherFail s = UnifyM $ \_ -> return (Left s) - -unifyFail :: ErrMsg -> UnifyM s a -unifyFail err = do - env <- getUnifEnv - msg <- case unify_ctx env of - Nothing -> return (text "Unspecified unification error:" <+> err) - Just (ctx, mod1, mod2) - | unify_verbosity env > normal - -> do mod1' <- convertModuleU mod1 - mod2' <- convertModuleU mod2 - let extra = " (was unifying " ++ display mod1' - ++ " and " ++ display mod2' ++ ")" - return (ctx ++ err ++ extra) - | otherwise - -> return (ctx ++ err ++ " (for more information, pass -v flag)") - UnifyM $ \_ -> return (Left msg) --} - --- | A convenient alias for mutable references in the unification monad. -type UnifRef s a = STRef s a - --- | Imperatively read a 'UnifRef'. -readUnifRef :: UnifRef s a -> UnifyM s a -readUnifRef = liftST . readSTRef - --- | Imperatively write a 'UnifRef'. -writeUnifRef :: UnifRef s a -> a -> UnifyM s () -writeUnifRef x = liftST . writeSTRef x - --- | Get the current unification environment. -getUnifEnv :: UnifyM s (UnifEnv s) -getUnifEnv = UnifyM $ \r -> return (return r) - --- | Add a fixed message to the error context. -addErrContext :: Doc -> UnifyM s a -> UnifyM s a -addErrContext ctx m = addErrContextM ctx m - --- | Add a message to the error context. It may make monadic queries. -addErrContextM :: ErrCtx s -> UnifyM s a -> UnifyM s a -addErrContextM ctx m = - UnifyM $ \r -> unUnifyM m r { unify_ctx = ctx : unify_ctx r } - - ------------------------------------------------------------------------ --- The "unifiable" variants of the data types --- --- In order to properly do unification over infinite trees, we --- need to union find over 'Module's and 'UnitId's. The pure --- representation is ill-equipped to do this, so we convert --- from the pure representation into one which is indirected --- through union-find. 'ModuleU' handles hole variables; --- 'UnitIdU' handles mu-binders. - --- | Contents of a mutable 'ModuleU' reference. -data ModuleU' s - = ModuleU (UnitIdU s) ModuleName - | ModuleVarU ModuleName - --- | Contents of a mutable 'UnitIdU' reference. -data UnitIdU' s - = UnitIdU UnitIdUnique ComponentId (Map ModuleName (ModuleU s)) - | UnitIdThunkU DefUnitId - --- | A mutable version of 'Module' which can be imperatively unified. -type ModuleU s = UnionFind.Point s (ModuleU' s) - --- | A mutable version of 'UnitId' which can be imperatively unified. -type UnitIdU s = UnionFind.Point s (UnitIdU' s) - --- | An integer for uniquely labeling 'UnitIdU' nodes. We need --- these labels in order to efficiently serialize 'UnitIdU's into --- 'UnitId's (we use the label to check if any parent is the --- node in question, and if so insert a deBruijn index instead.) --- These labels must be unique across all 'UnitId's/'Module's which --- participate in unification! -type UnitIdUnique = Int - - ------------------------------------------------------------------------ --- Conversion to the unifiable data types - --- An environment for tracking the mu-bindings in scope. --- The invariant for a state @(m, i)@ is that [0..i] are --- keys of @m@; in fact, the @i-k@th entry is the @k@th --- de Bruijn index (this saves us from having to shift as --- we enter mu-binders.) -type MuEnv s = (IntMap (UnitIdU s), Int) - -extendMuEnv :: MuEnv s -> UnitIdU s -> MuEnv s -extendMuEnv (m, i) x = - (IntMap.insert (i + 1) x m, i + 1) - -{- -lookupMuEnv :: MuEnv s -> Int {- de Bruijn index -} -> UnitIdU s -lookupMuEnv (m, i) k = - case IntMap.lookup (i - k) m of - -- Technically a user can trigger this by giving us a - -- bad 'UnitId', so handle this better. - Nothing -> error "lookupMuEnv: out of bounds (malformed de Bruijn index)" - Just v -> v --} - -emptyMuEnv :: MuEnv s -emptyMuEnv = (IntMap.empty, -1) - --- The workhorse functions. These share an environment: --- * @UnifRef s UnitIdUnique@ - the unique label supply for 'UnitIdU' nodes --- * @UnifRef s (Map ModuleName moduleU)@ - the (lazily initialized) --- environment containing the implicitly universally quantified --- @hole:A@ binders. --- * @MuEnv@ - the environment for mu-binders. - -convertUnitId' :: MuEnv s - -> OpenUnitId - -> UnifyM s (UnitIdU s) --- TODO: this could be more lazy if we know there are no internal --- references -convertUnitId' _ (DefiniteUnitId uid) = - liftST $ UnionFind.fresh (UnitIdThunkU uid) -convertUnitId' stk (IndefFullUnitId cid insts) = do - fs <- fmap unify_uniq getUnifEnv - x <- liftST $ UnionFind.fresh (error "convertUnitId") -- tie the knot later - insts_u <- T.forM insts $ convertModule' (extendMuEnv stk x) - u <- readUnifRef fs - writeUnifRef fs (u+1) - y <- liftST $ UnionFind.fresh (UnitIdU u cid insts_u) - liftST $ UnionFind.union x y - return y --- convertUnitId' stk (UnitIdVar i) = return (lookupMuEnv stk i) - -convertModule' :: MuEnv s - -> OpenModule -> UnifyM s (ModuleU s) -convertModule' _stk (OpenModuleVar mod_name) = do - hmap <- fmap unify_reqs getUnifEnv - hm <- readUnifRef hmap - case Map.lookup mod_name hm of - Nothing -> do mod <- liftST $ UnionFind.fresh (ModuleVarU mod_name) - writeUnifRef hmap (Map.insert mod_name mod hm) - return mod - Just mod -> return mod -convertModule' stk (OpenModule uid mod_name) = do - uid_u <- convertUnitId' stk uid - liftST $ UnionFind.fresh (ModuleU uid_u mod_name) - -convertUnitId :: OpenUnitId -> UnifyM s (UnitIdU s) -convertUnitId = convertUnitId' emptyMuEnv - -convertModule :: OpenModule -> UnifyM s (ModuleU s) -convertModule = convertModule' emptyMuEnv - - - ------------------------------------------------------------------------ --- Substitutions - --- | The mutable counterpart of a 'ModuleSubst' (not defined here). -type ModuleSubstU s = Map ModuleName (ModuleU s) - --- | Conversion of 'ModuleSubst' to 'ModuleSubstU' -convertModuleSubst :: Map ModuleName OpenModule -> UnifyM s (Map ModuleName (ModuleU s)) -convertModuleSubst = T.mapM convertModule - --- | Conversion of 'ModuleSubstU' to 'ModuleSubst' -convertModuleSubstU :: ModuleSubstU s -> UnifyM s OpenModuleSubst -convertModuleSubstU = T.mapM convertModuleU - ------------------------------------------------------------------------ --- Conversion from the unifiable data types - --- An environment for tracking candidates for adding a mu-binding. --- The invariant for a state @(m, i)@, is that if we encounter a node --- labeled @k@ such that @m[k -> v]@, then we can replace this --- node with the de Bruijn index @i-v@ referring to an enclosing --- mu-binder; furthermore, @range(m) = [0..i]@. -type MooEnv = (IntMap Int, Int) - -emptyMooEnv :: MooEnv -emptyMooEnv = (IntMap.empty, -1) - -extendMooEnv :: MooEnv -> UnitIdUnique -> MooEnv -extendMooEnv (m, i) k = (IntMap.insert k (i + 1) m, i + 1) - -lookupMooEnv :: MooEnv -> UnitIdUnique -> Maybe Int -lookupMooEnv (m, i) k = - case IntMap.lookup k m of - Nothing -> Nothing - Just v -> Just (i-v) -- de Bruijn indexize - --- The workhorse functions - -convertUnitIdU' :: MooEnv -> UnitIdU s -> UnifyM s OpenUnitId -convertUnitIdU' stk uid_u = do - x <- liftST $ UnionFind.find uid_u - case x of - UnitIdThunkU uid -> return (DefiniteUnitId uid) - UnitIdU u cid insts_u -> - case lookupMooEnv stk u of - Just _i -> - failWith (text "Unsupported mutually recursive unit identifier") - -- return (UnitIdVar i) - Nothing -> do - insts <- T.forM insts_u $ convertModuleU' (extendMooEnv stk u) - return (IndefFullUnitId cid insts) - -convertModuleU' :: MooEnv -> ModuleU s -> UnifyM s OpenModule -convertModuleU' stk mod_u = do - mod <- liftST $ UnionFind.find mod_u - case mod of - ModuleVarU mod_name -> return (OpenModuleVar mod_name) - ModuleU uid_u mod_name -> do - uid <- convertUnitIdU' stk uid_u - return (OpenModule uid mod_name) - --- Helper functions - -convertUnitIdU :: UnitIdU s -> UnifyM s OpenUnitId -convertUnitIdU = convertUnitIdU' emptyMooEnv - -convertModuleU :: ModuleU s -> UnifyM s OpenModule -convertModuleU = convertModuleU' emptyMooEnv - --- | An empty 'ModuleScopeU'. -emptyModuleScopeU :: ModuleScopeU s -emptyModuleScopeU = (Map.empty, Map.empty) - - --- | The mutable counterpart of 'ModuleScope'. -type ModuleScopeU s = (ModuleProvidesU s, ModuleRequiresU s) --- | The mutable counterpart of 'ModuleProvides' -type ModuleProvidesU s = Map ModuleName [ModuleWithSourceU s] -type ModuleRequiresU s = ModuleProvidesU s -type ModuleWithSourceU s = WithSource (ModuleU s) - --- TODO: Deduplicate this with Distribution.Backpack.MixLink.dispSource -ci_msg :: ComponentInclude (OpenUnitId, ModuleShape) IncludeRenaming -> Doc -ci_msg ci - | ci_implicit ci = text "build-depends:" <+> pp_pn - | otherwise = text "mixins:" <+> pp_pn <+> disp (ci_renaming ci) - where - pn = pkgName (ci_pkgid ci) - pp_pn = - case ci_cname ci of - CLibName -> disp pn - CSubLibName cn -> disp pn <<>> colon <<>> disp cn - -- Shouldn't happen - cn -> disp pn <+> parens (disp cn) - --- | Convert a 'ModuleShape' into a 'ModuleScopeU', so we can do --- unification on it. -convertInclude - :: ComponentInclude (OpenUnitId, ModuleShape) IncludeRenaming - -> UnifyM s (ModuleScopeU s, - Either (ComponentInclude (UnitIdU s) ModuleRenaming) {- normal -} - (ComponentInclude (UnitIdU s) ModuleRenaming) {- sig -}) -convertInclude ci@(ComponentInclude { - ci_ann_id = AnnotatedId { - ann_id = (uid, ModuleShape provs reqs), - ann_pid = pid, - ann_cname = compname - }, - ci_renaming = incl@(IncludeRenaming prov_rns req_rns), - ci_implicit = implicit - }) = addErrContext (text "In" <+> ci_msg ci) $ do - let pn = packageName pid - the_source | implicit - = FromBuildDepends pn compname - | otherwise - = FromMixins pn compname incl - source = WithSource the_source - - -- Suppose our package has two requirements A and B, and - -- we include it with @requires (A as X)@ - -- There are three closely related things we compute based - -- off of @reqs@ and @reqs_rns@: - -- - -- 1. The requirement renaming (A -> X) - -- 2. The requirement substitution (A -> , B -> ) - - -- Requirement renaming. This is read straight off the syntax: - -- - -- [nothing] ==> [empty] - -- requires (B as Y) ==> B -> Y - -- - -- Requirement renamings are NOT injective: if two requirements - -- are mapped to the same name, the intent is to merge them - -- together. But they are *functions*, so @B as X, B as Y@ is - -- illegal. - - req_rename_list <- - case req_rns of - DefaultRenaming -> return [] - HidingRenaming _ -> do - -- Not valid here for requires! - addErr $ text "Unsupported syntax" <+> - quotes (text "requires hiding (...)") - return [] - ModuleRenaming rns -> return rns - - let req_rename_listmap :: Map ModuleName [ModuleName] - req_rename_listmap = - Map.fromListWith (++) [ (k,[v]) | (k,v) <- req_rename_list ] - req_rename <- sequenceA . flip Map.mapWithKey req_rename_listmap $ \k vs0 -> - case vs0 of - [] -> error "req_rename" - [v] -> return v - v:vs -> do addErr $ - text "Conflicting renamings of requirement" <+> quotes (disp k) $$ - text "Renamed to: " <+> vcat (map disp (v:vs)) - return v - - let req_rename_fn k = case Map.lookup k req_rename of - Nothing -> k - Just v -> v - - -- Requirement substitution. - -- - -- A -> X ==> A -> - let req_subst = fmap OpenModuleVar req_rename - - uid_u <- convertUnitId (modSubst req_subst uid) - - -- Requirement mapping. This is just taking the range of the - -- requirement substitution, and making a mapping so that it is - -- convenient to merge things together. It INCLUDES the implicit - -- mappings. - -- - -- A -> X ==> X -> , B -> - reqs_u <- convertModuleRequires . Map.fromList $ - [ (k, [source (OpenModuleVar k)]) - | k <- map req_rename_fn (Set.toList reqs) - ] - - -- Report errors if there were unused renamings - let leftover = Map.keysSet req_rename `Set.difference` reqs - unless (Set.null leftover) $ - addErr $ - hang (text "The" <+> text (showComponentName compname) <+> - text "from package" <+> quotes (disp pid) - <+> text "does not require:") 4 - (vcat (map disp (Set.toList leftover))) - - -- Provision computation is more complex. - -- For example, if we have: - -- - -- include p (A as X) requires (B as Y) - -- where A -> q[B=]:A - -- - -- Then we need: - -- - -- X -> [("p", q[B=]:A)] - -- - -- There are a bunch of clever ways to present the algorithm - -- but here is the simple one: - -- - -- 1. If we have a default renaming, apply req_subst - -- to provs and use that. - -- - -- 2. Otherwise, build a map by successively looking - -- up the referenced modules in the renaming in provs. - -- - -- Importantly, overlapping rename targets get accumulated - -- together. It's not an (immediate) error. - (pre_prov_scope, prov_rns') <- - case prov_rns of - DefaultRenaming -> return (Map.toList provs, prov_rns) - HidingRenaming hides -> - let hides_set = Set.fromList hides - in let r = [ (k,v) - | (k,v) <- Map.toList provs - , not (k `Set.member` hides_set) ] - -- GHC doesn't understand hiding, so expand it out! - in return (r, ModuleRenaming (map ((\x -> (x,x)).fst) r)) - ModuleRenaming rns -> do - r <- sequence - [ case Map.lookup from provs of - Just m -> return (to, m) - Nothing -> failWith $ - text "Package" <+> quotes (disp pid) <+> - text "does not expose the module" <+> quotes (disp from) - | (from, to) <- rns ] - return (r, prov_rns) - let prov_scope = modSubst req_subst - $ Map.fromListWith (++) - [ (k, [source v]) - | (k, v) <- pre_prov_scope ] - - provs_u <- convertModuleProvides prov_scope - - -- TODO: Assert that provs_u is empty if provs was empty - return ((provs_u, reqs_u), - -- NB: We test that requirements is not null so that - -- users can create packages with zero module exports - -- that cause some C library to linked in, etc. - (if Map.null provs && not (Set.null reqs) - then Right -- is sig - else Left) (ComponentInclude { - ci_ann_id = AnnotatedId { - ann_id = uid_u, - ann_pid = pid, - ann_cname = compname - }, - ci_renaming = prov_rns', - ci_implicit = ci_implicit ci - })) - --- | Convert a 'ModuleScopeU' to a 'ModuleScope'. -convertModuleScopeU :: ModuleScopeU s -> UnifyM s ModuleScope -convertModuleScopeU (provs_u, reqs_u) = do - provs <- convertModuleProvidesU provs_u - reqs <- convertModuleRequiresU reqs_u - -- TODO: Test that the requirements are still free. If they - -- are not, they got unified, and that's dodgy at best. - return (ModuleScope provs reqs) - --- | Convert a 'ModuleProvides' to a 'ModuleProvidesU' -convertModuleProvides :: ModuleProvides -> UnifyM s (ModuleProvidesU s) -convertModuleProvides = T.mapM (mapM (T.mapM convertModule)) - --- | Convert a 'ModuleProvidesU' to a 'ModuleProvides' -convertModuleProvidesU :: ModuleProvidesU s -> UnifyM s ModuleProvides -convertModuleProvidesU = T.mapM (mapM (T.mapM convertModuleU)) - -convertModuleRequires :: ModuleRequires -> UnifyM s (ModuleRequiresU s) -convertModuleRequires = convertModuleProvides - -convertModuleRequiresU :: ModuleRequiresU s -> UnifyM s ModuleRequires -convertModuleRequiresU = convertModuleProvidesU diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Backpack.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Backpack.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,311 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE PatternGuards #-} -{-# LANGUAGE RankNTypes #-} - --- | This module defines the core data types for Backpack. For more --- details, see: --- --- - -module Distribution.Backpack ( - -- * OpenUnitId - OpenUnitId(..), - openUnitIdFreeHoles, - mkOpenUnitId, - - -- * DefUnitId - DefUnitId, - unDefUnitId, - mkDefUnitId, - - -- * OpenModule - OpenModule(..), - openModuleFreeHoles, - - -- * OpenModuleSubst - OpenModuleSubst, - dispOpenModuleSubst, - dispOpenModuleSubstEntry, - parseOpenModuleSubst, - parseOpenModuleSubstEntry, - parsecOpenModuleSubst, - parsecOpenModuleSubstEntry, - openModuleSubstFreeHoles, - - -- * Conversions to 'UnitId' - abstractUnitId, - hashModuleSubst, -) where - -import Distribution.Compat.Prelude hiding (mod) -import Distribution.Compat.ReadP ((<++)) -import Distribution.Parsec.Class -import Distribution.Pretty -import Prelude () -import Text.PrettyPrint (hcat) - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -import Distribution.ModuleName -import Distribution.Text -import Distribution.Types.ComponentId -import Distribution.Types.Module -import Distribution.Types.UnitId -import Distribution.Utils.Base62 - -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set - ------------------------------------------------------------------------ --- OpenUnitId - --- | An 'OpenUnitId' describes a (possibly partially) instantiated --- Backpack component, with a description of how the holes are filled --- in. Unlike 'OpenUnitId', the 'ModuleSubst' is kept in a structured --- form that allows for substitution (which fills in holes.) This form --- of unit cannot be installed. It must first be converted to a --- 'UnitId'. --- --- In the absence of Backpack, there are no holes to fill, so any such --- component always has an empty module substitution; thus we can lossly --- represent it as an 'OpenUnitId uid'. --- --- For a source component using Backpack, however, there is more --- structure as components may be parametrized over some signatures, and --- these \"holes\" may be partially or wholly filled. --- --- OpenUnitId plays an important role when we are mix-in linking, --- and is recorded to the installed packaged database for indefinite --- packages; however, for compiled packages that are fully instantiated, --- we instantiate 'OpenUnitId' into 'UnitId'. --- --- For more details see the Backpack spec --- --- - -data OpenUnitId - -- | Identifies a component which may have some unfilled holes; - -- specifying its 'ComponentId' and its 'OpenModuleSubst'. - -- TODO: Invariant that 'OpenModuleSubst' is non-empty? - -- See also the Text instance. - = IndefFullUnitId ComponentId OpenModuleSubst - -- | Identifies a fully instantiated component, which has - -- been compiled and abbreviated as a hash. The embedded 'UnitId' - -- MUST NOT be for an indefinite component; an 'OpenUnitId' - -- is guaranteed not to have any holes. - | DefiniteUnitId DefUnitId - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) --- TODO: cache holes? - -instance Binary OpenUnitId - -instance NFData OpenUnitId where - rnf (IndefFullUnitId cid subst) = rnf cid `seq` rnf subst - rnf (DefiniteUnitId uid) = rnf uid - -instance Pretty OpenUnitId where - pretty (IndefFullUnitId cid insts) - -- TODO: arguably a smart constructor to enforce invariant would be - -- better - | Map.null insts = pretty cid - | otherwise = pretty cid <<>> Disp.brackets (dispOpenModuleSubst insts) - pretty (DefiniteUnitId uid) = pretty uid - --- | --- --- >>> eitherParsec "foobar" :: Either String OpenUnitId ---Right (DefiniteUnitId (DefUnitId {unDefUnitId = UnitId "foobar"})) --- --- >>> eitherParsec "foo[Str=text-1.2.3:Data.Text.Text]" :: Either String OpenUnitId --- Right (IndefFullUnitId (ComponentId "foo") (fromList [(ModuleName ["Str"],OpenModule (DefiniteUnitId (DefUnitId {unDefUnitId = UnitId "text-1.2.3"})) (ModuleName ["Data","Text","Text"]))])) --- -instance Parsec OpenUnitId where - parsec = P.try parseOpenUnitId <|> fmap DefiniteUnitId parsec - where - parseOpenUnitId = do - cid <- parsec - insts <- P.between (P.char '[') (P.char ']') - parsecOpenModuleSubst - return (IndefFullUnitId cid insts) - -instance Text OpenUnitId where - parse = parseOpenUnitId <++ fmap DefiniteUnitId parse - where - parseOpenUnitId = do - cid <- parse - insts <- Parse.between (Parse.char '[') (Parse.char ']') - parseOpenModuleSubst - return (IndefFullUnitId cid insts) - --- | Get the set of holes ('ModuleVar') embedded in a 'UnitId'. -openUnitIdFreeHoles :: OpenUnitId -> Set ModuleName -openUnitIdFreeHoles (IndefFullUnitId _ insts) = openModuleSubstFreeHoles insts -openUnitIdFreeHoles _ = Set.empty - --- | Safe constructor from a UnitId. The only way to do this safely --- is if the instantiation is provided. -mkOpenUnitId :: UnitId -> ComponentId -> OpenModuleSubst -> OpenUnitId -mkOpenUnitId uid cid insts = - if Set.null (openModuleSubstFreeHoles insts) - then DefiniteUnitId (unsafeMkDefUnitId uid) -- invariant holds! - else IndefFullUnitId cid insts - ------------------------------------------------------------------------ --- DefUnitId - --- | Create a 'DefUnitId' from a 'ComponentId' and an instantiation --- with no holes. -mkDefUnitId :: ComponentId -> Map ModuleName Module -> DefUnitId -mkDefUnitId cid insts = - unsafeMkDefUnitId (mkUnitId - (unComponentId cid ++ maybe "" ("+"++) (hashModuleSubst insts))) - -- impose invariant! - ------------------------------------------------------------------------ --- OpenModule - --- | Unlike a 'Module', an 'OpenModule' is either an ordinary --- module from some unit, OR an 'OpenModuleVar', representing a --- hole that needs to be filled in. Substitutions are over --- module variables. -data OpenModule - = OpenModule OpenUnitId ModuleName - | OpenModuleVar ModuleName - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - -instance Binary OpenModule - -instance NFData OpenModule where - rnf (OpenModule uid mod_name) = rnf uid `seq` rnf mod_name - rnf (OpenModuleVar mod_name) = rnf mod_name - -instance Pretty OpenModule where - pretty (OpenModule uid mod_name) = - hcat [pretty uid, Disp.text ":", pretty mod_name] - pretty (OpenModuleVar mod_name) = - hcat [Disp.char '<', pretty mod_name, Disp.char '>'] - --- | --- --- >>> eitherParsec "Includes2-0.1.0.0-inplace-mysql:Database.MySQL" :: Either String OpenModule --- Right (OpenModule (DefiniteUnitId (DefUnitId {unDefUnitId = UnitId "Includes2-0.1.0.0-inplace-mysql"})) (ModuleName ["Database","MySQL"])) --- -instance Parsec OpenModule where - parsec = parsecModuleVar <|> parsecOpenModule - where - parsecOpenModule = do - uid <- parsec - _ <- P.char ':' - mod_name <- parsec - return (OpenModule uid mod_name) - - parsecModuleVar = do - _ <- P.char '<' - mod_name <- parsec - _ <- P.char '>' - return (OpenModuleVar mod_name) - -instance Text OpenModule where - parse = parseModuleVar <++ parseOpenModule - where - parseOpenModule = do - uid <- parse - _ <- Parse.char ':' - mod_name <- parse - return (OpenModule uid mod_name) - parseModuleVar = do - _ <- Parse.char '<' - mod_name <- parse - _ <- Parse.char '>' - return (OpenModuleVar mod_name) - --- | Get the set of holes ('ModuleVar') embedded in a 'Module'. -openModuleFreeHoles :: OpenModule -> Set ModuleName -openModuleFreeHoles (OpenModuleVar mod_name) = Set.singleton mod_name -openModuleFreeHoles (OpenModule uid _n) = openUnitIdFreeHoles uid - ------------------------------------------------------------------------ --- OpenModuleSubst - --- | An explicit substitution on modules. --- --- NB: These substitutions are NOT idempotent, for example, a --- valid substitution is (A -> B, B -> A). -type OpenModuleSubst = Map ModuleName OpenModule - --- | Pretty-print the entries of a module substitution, suitable --- for embedding into a 'OpenUnitId' or passing to GHC via @--instantiate-with@. -dispOpenModuleSubst :: OpenModuleSubst -> Disp.Doc -dispOpenModuleSubst subst - = Disp.hcat - . Disp.punctuate Disp.comma - $ map dispOpenModuleSubstEntry (Map.toAscList subst) - --- | Pretty-print a single entry of a module substitution. -dispOpenModuleSubstEntry :: (ModuleName, OpenModule) -> Disp.Doc -dispOpenModuleSubstEntry (k, v) = disp k <<>> Disp.char '=' <<>> disp v - --- | Inverse to 'dispModSubst'. -parseOpenModuleSubst :: Parse.ReadP r OpenModuleSubst -parseOpenModuleSubst = fmap Map.fromList - . flip Parse.sepBy (Parse.char ',') - $ parseOpenModuleSubstEntry - --- | Inverse to 'dispModSubstEntry'. -parseOpenModuleSubstEntry :: Parse.ReadP r (ModuleName, OpenModule) -parseOpenModuleSubstEntry = - do k <- parse - _ <- Parse.char '=' - v <- parse - return (k, v) - --- | Inverse to 'dispModSubst'. --- --- @since 2.2 -parsecOpenModuleSubst :: CabalParsing m => m OpenModuleSubst -parsecOpenModuleSubst = fmap Map.fromList - . flip P.sepBy (P.char ',') - $ parsecOpenModuleSubstEntry - --- | Inverse to 'dispModSubstEntry'. --- --- @since 2.2 -parsecOpenModuleSubstEntry :: CabalParsing m => m (ModuleName, OpenModule) -parsecOpenModuleSubstEntry = - do k <- parsec - _ <- P.char '=' - v <- parsec - return (k, v) - --- | Get the set of holes ('ModuleVar') embedded in a 'OpenModuleSubst'. --- This is NOT the domain of the substitution. -openModuleSubstFreeHoles :: OpenModuleSubst -> Set ModuleName -openModuleSubstFreeHoles insts = Set.unions (map openModuleFreeHoles (Map.elems insts)) - ------------------------------------------------------------------------ --- Conversions to UnitId - --- | When typechecking, we don't demand that a freshly instantiated --- 'IndefFullUnitId' be compiled; instead, we just depend on the --- installed indefinite unit installed at the 'ComponentId'. -abstractUnitId :: OpenUnitId -> UnitId -abstractUnitId (DefiniteUnitId def_uid) = unDefUnitId def_uid -abstractUnitId (IndefFullUnitId cid _) = newSimpleUnitId cid - --- | Take a module substitution and hash it into a string suitable for --- 'UnitId'. Note that since this takes 'Module', not 'OpenModule', --- you are responsible for recursively converting 'OpenModule' --- into 'Module'. See also "Distribution.Backpack.ReadyComponent". -hashModuleSubst :: Map ModuleName Module -> Maybe String -hashModuleSubst subst - | Map.null subst = Nothing - | otherwise = - Just . hashToBase62 $ - concat [ display mod_name ++ "=" ++ display m ++ "\n" - | (mod_name, m) <- Map.toList subst] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/CabalSpecVersion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/CabalSpecVersion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/CabalSpecVersion.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/CabalSpecVersion.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,80 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.CabalSpecVersion where - -import Prelude () -import Distribution.Compat.Prelude -import qualified Data.Set as Set - --- | Different Cabal-the-spec versions. --- --- We branch based on this at least in the parser. --- -data CabalSpecVersion - = CabalSpecOld - | CabalSpecV1_22 - | CabalSpecV1_24 - | CabalSpecV2_0 - | CabalSpecV2_2 - | CabalSpecV2_4 - deriving (Eq, Ord, Show, Read, Enum, Bounded, Typeable, Data, Generic) - -cabalSpecLatest :: CabalSpecVersion -cabalSpecLatest = CabalSpecV2_4 - -cabalSpecFeatures :: CabalSpecVersion -> Set.Set CabalFeature -cabalSpecFeatures CabalSpecOld = Set.empty -cabalSpecFeatures CabalSpecV1_22 = Set.empty -cabalSpecFeatures CabalSpecV1_24 = Set.empty -cabalSpecFeatures CabalSpecV2_0 = Set.empty -cabalSpecFeatures CabalSpecV2_2 = Set.fromList - [ Elif - , CommonStanzas - ] -cabalSpecFeatures CabalSpecV2_4 = Set.fromList - [ Elif - , CommonStanzas - , Globstar - ] - -cabalSpecSupports :: CabalSpecVersion -> [Int] -> Bool -cabalSpecSupports CabalSpecOld v = v < [1,21] -cabalSpecSupports CabalSpecV1_22 v = v < [1,23] -cabalSpecSupports CabalSpecV1_24 v = v < [1,25] -cabalSpecSupports CabalSpecV2_0 v = v < [2,1] -cabalSpecSupports CabalSpecV2_2 v = v < [2,3] -cabalSpecSupports CabalSpecV2_4 _ = True - -specHasCommonStanzas :: CabalSpecVersion -> HasCommonStanzas -specHasCommonStanzas CabalSpecV2_2 = HasCommonStanzas -specHasCommonStanzas CabalSpecV2_4 = HasCommonStanzas -specHasCommonStanzas _ = NoCommonStanzas - -specHasElif :: CabalSpecVersion -> HasElif -specHasElif CabalSpecV2_2 = HasElif -specHasElif CabalSpecV2_4 = HasElif -specHasElif _ = NoElif - -------------------------------------------------------------------------------- --- Features -------------------------------------------------------------------------------- - -data CabalFeature - = Elif - | CommonStanzas - | Globstar - -- ^ Implemented in #5284. Not actually a change to the parser, - -- as filename patterns are opaque to it currently. - deriving (Eq, Ord, Show, Read, Enum, Bounded, Typeable, Data, Generic) - -------------------------------------------------------------------------------- --- Booleans -------------------------------------------------------------------------------- - -data HasElif = HasElif | NoElif - deriving (Eq, Show) - -data HasCommonStanzas = HasCommonStanzas | NoCommonStanzas - deriving (Eq, Show) - -data HasGlobstar = HasGlobstar | NoGlobstar diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Binary/Class.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Binary/Class.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Binary/Class.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Binary/Class.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,519 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE Trustworthy #-} -{-# LANGUAGE DefaultSignatures #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.Binary.Class --- Copyright : Lennart Kolmodin --- License : BSD3-style (see LICENSE) --- --- Maintainer : Lennart Kolmodin --- Stability : unstable --- Portability : portable to Hugs and GHC. Requires the FFI and some flexible instances --- --- Typeclass and instances for binary serialization. --- ------------------------------------------------------------------------------ - -module Distribution.Compat.Binary.Class ( - - -- * The Binary class - Binary(..) - - -- * Support for generics - , GBinary(..) - - ) where - -import Data.Word - -import Data.Binary.Put -import Data.Binary.Get - -import Control.Applicative ((<$>), (<*>), (*>)) -import Foreign - -import Data.ByteString.Lazy (ByteString) -import qualified Data.ByteString.Lazy as L - -import Data.Char (chr,ord) -import Data.List (unfoldr) -import Data.Foldable (traverse_) - --- And needed for the instances: -import qualified Data.ByteString as B -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Data.IntMap as IntMap -import qualified Data.IntSet as IntSet -import qualified Data.Ratio as R - -import qualified Data.Tree as T - -import Data.Array.Unboxed - -import GHC.Generics - -import qualified Data.Sequence as Seq -import qualified Data.Foldable as Fold - ------------------------------------------------------------------------- - -class GBinary f where - gput :: f t -> Put - gget :: Get (f t) - --- | The 'Binary' class provides 'put' and 'get', methods to encode and --- decode a Haskell value to a lazy 'ByteString'. It mirrors the 'Read' and --- 'Show' classes for textual representation of Haskell types, and is --- suitable for serialising Haskell values to disk, over the network. --- --- For decoding and generating simple external binary formats (e.g. C --- structures), Binary may be used, but in general is not suitable --- for complex protocols. Instead use the 'Put' and 'Get' primitives --- directly. --- --- Instances of Binary should satisfy the following property: --- --- > decode . encode == id --- --- That is, the 'get' and 'put' methods should be the inverse of each --- other. A range of instances are provided for basic Haskell types. --- -class Binary t where - -- | Encode a value in the Put monad. - put :: t -> Put - -- | Decode a value in the Get monad - get :: Get t - - default put :: (Generic t, GBinary (Rep t)) => t -> Put - put = gput . from - - default get :: (Generic t, GBinary (Rep t)) => Get t - get = to `fmap` gget - ------------------------------------------------------------------------- --- Simple instances - --- The () type need never be written to disk: values of singleton type --- can be reconstructed from the type alone -instance Binary () where - put () = return () - get = return () - --- Bools are encoded as a byte in the range 0 .. 1 -instance Binary Bool where - put = putWord8 . fromIntegral . fromEnum - get = fmap (toEnum . fromIntegral) getWord8 - --- Values of type 'Ordering' are encoded as a byte in the range 0 .. 2 -instance Binary Ordering where - put = putWord8 . fromIntegral . fromEnum - get = fmap (toEnum . fromIntegral) getWord8 - ------------------------------------------------------------------------- --- Words and Ints - --- Words8s are written as bytes -instance Binary Word8 where - put = putWord8 - get = getWord8 - --- Words16s are written as 2 bytes in big-endian (network) order -instance Binary Word16 where - put = putWord16be - get = getWord16be - --- Words32s are written as 4 bytes in big-endian (network) order -instance Binary Word32 where - put = putWord32be - get = getWord32be - --- Words64s are written as 8 bytes in big-endian (network) order -instance Binary Word64 where - put = putWord64be - get = getWord64be - --- Int8s are written as a single byte. -instance Binary Int8 where - put i = put (fromIntegral i :: Word8) - get = fmap fromIntegral (get :: Get Word8) - --- Int16s are written as a 2 bytes in big endian format -instance Binary Int16 where - put i = put (fromIntegral i :: Word16) - get = fmap fromIntegral (get :: Get Word16) - --- Int32s are written as a 4 bytes in big endian format -instance Binary Int32 where - put i = put (fromIntegral i :: Word32) - get = fmap fromIntegral (get :: Get Word32) - --- Int64s are written as a 4 bytes in big endian format -instance Binary Int64 where - put i = put (fromIntegral i :: Word64) - get = fmap fromIntegral (get :: Get Word64) - ------------------------------------------------------------------------- - --- Words are are written as Word64s, that is, 8 bytes in big endian format -instance Binary Word where - put i = put (fromIntegral i :: Word64) - get = fmap fromIntegral (get :: Get Word64) - --- Ints are are written as Int64s, that is, 8 bytes in big endian format -instance Binary Int where - put i = put (fromIntegral i :: Int64) - get = fmap fromIntegral (get :: Get Int64) - ------------------------------------------------------------------------- --- --- Portable, and pretty efficient, serialisation of Integer --- - --- Fixed-size type for a subset of Integer -type SmallInt = Int32 - --- Integers are encoded in two ways: if they fit inside a SmallInt, --- they're written as a byte tag, and that value. If the Integer value --- is too large to fit in a SmallInt, it is written as a byte array, --- along with a sign and length field. - -instance Binary Integer where - - {-# INLINE put #-} - put n | n >= lo && n <= hi = do - putWord8 0 - put (fromIntegral n :: SmallInt) -- fast path - where - lo = fromIntegral (minBound :: SmallInt) :: Integer - hi = fromIntegral (maxBound :: SmallInt) :: Integer - - put n = do - putWord8 1 - put sign - put (unroll (abs n)) -- unroll the bytes - where - sign = fromIntegral (signum n) :: Word8 - - {-# INLINE get #-} - get = do - tag <- get :: Get Word8 - case tag of - 0 -> fmap fromIntegral (get :: Get SmallInt) - _ -> do sign <- get - bytes <- get - let v = roll bytes - return $! if sign == (1 :: Word8) then v else - v - --- --- Fold and unfold an Integer to and from a list of its bytes --- -unroll :: Integer -> [Word8] -unroll = unfoldr step - where - step 0 = Nothing - step i = Just (fromIntegral i, i `shiftR` 8) - -roll :: [Word8] -> Integer -roll = foldr unstep 0 - where - unstep b a = a `shiftL` 8 .|. fromIntegral b - -{- - --- --- An efficient, raw serialisation for Integer (GHC only) --- - --- TODO This instance is not architecture portable. GMP stores numbers as --- arrays of machine sized words, so the byte format is not portable across --- architectures with different endianness and word size. - -import Data.ByteString.Base (toForeignPtr,unsafePackAddress, memcpy) -import GHC.Base hiding (ord, chr) -import GHC.Prim -import GHC.Ptr (Ptr(..)) -import GHC.IOBase (IO(..)) - -instance Binary Integer where - put (S# i) = putWord8 0 *> put (I# i) - put (J# s ba) = do - putWord8 1 - put (I# s) - put (BA ba) - - get = do - b <- getWord8 - case b of - 0 -> do (I# i#) <- get - return (S# i#) - _ -> do (I# s#) <- get - (BA a#) <- get - return (J# s# a#) - -instance Binary ByteArray where - - -- Pretty safe. - put (BA ba) = - let sz = sizeofByteArray# ba -- (primitive) in *bytes* - addr = byteArrayContents# ba - bs = unsafePackAddress (I# sz) addr - in put bs -- write as a ByteString. easy, yay! - - -- Pretty scary. Should be quick though - get = do - (fp, off, n@(I# sz)) <- fmap toForeignPtr get -- so decode a ByteString - assert (off == 0) $ return $ unsafePerformIO $ do - (MBA arr) <- newByteArray sz -- and copy it into a ByteArray# - let to = byteArrayContents# (unsafeCoerce# arr) -- urk, is this safe? - withForeignPtr fp $ \from -> memcpy (Ptr to) from (fromIntegral n) - freezeByteArray arr - --- wrapper for ByteArray# -data ByteArray = BA {-# UNPACK #-} !ByteArray# -data MBA = MBA {-# UNPACK #-} !(MutableByteArray# RealWorld) - -newByteArray :: Int# -> IO MBA -newByteArray sz = IO $ \s -> - case newPinnedByteArray# sz s of { (# s', arr #) -> - (# s', MBA arr #) } - -freezeByteArray :: MutableByteArray# RealWorld -> IO ByteArray -freezeByteArray arr = IO $ \s -> - case unsafeFreezeByteArray# arr s of { (# s', arr' #) -> - (# s', BA arr' #) } - --} - -instance (Binary a,Integral a) => Binary (R.Ratio a) where - put r = put (R.numerator r) *> put (R.denominator r) - get = (R.%) <$> get <*> get - ------------------------------------------------------------------------- - --- Char is serialised as UTF-8 -instance Binary Char where - put a | c <= 0x7f = put (fromIntegral c :: Word8) - | c <= 0x7ff = do put (0xc0 .|. y) - put (0x80 .|. z) - | c <= 0xffff = do put (0xe0 .|. x) - put (0x80 .|. y) - put (0x80 .|. z) - | c <= 0x10ffff = do put (0xf0 .|. w) - put (0x80 .|. x) - put (0x80 .|. y) - put (0x80 .|. z) - | otherwise = error "Not a valid Unicode code point" - where - c = ord a - z, y, x, w :: Word8 - z = fromIntegral (c .&. 0x3f) - y = fromIntegral (shiftR c 6 .&. 0x3f) - x = fromIntegral (shiftR c 12 .&. 0x3f) - w = fromIntegral (shiftR c 18 .&. 0x7) - - get = do - let getByte = fmap (fromIntegral :: Word8 -> Int) get - shiftL6 = flip shiftL 6 :: Int -> Int - w <- getByte - r <- case () of - _ | w < 0x80 -> return w - | w < 0xe0 -> do - x <- fmap (xor 0x80) getByte - return (x .|. shiftL6 (xor 0xc0 w)) - | w < 0xf0 -> do - x <- fmap (xor 0x80) getByte - y <- fmap (xor 0x80) getByte - return (y .|. shiftL6 (x .|. shiftL6 - (xor 0xe0 w))) - | otherwise -> do - x <- fmap (xor 0x80) getByte - y <- fmap (xor 0x80) getByte - z <- fmap (xor 0x80) getByte - return (z .|. shiftL6 (y .|. shiftL6 - (x .|. shiftL6 (xor 0xf0 w)))) - return $! chr r - ------------------------------------------------------------------------- --- Instances for the first few tuples - -instance (Binary a, Binary b) => Binary (a,b) where - put (a,b) = put a *> put b - get = (,) <$> get <*> get - -instance (Binary a, Binary b, Binary c) => Binary (a,b,c) where - put (a,b,c) = put a *> put b *> put c - get = (,,) <$> get <*> get <*> get - -instance (Binary a, Binary b, Binary c, Binary d) => Binary (a,b,c,d) where - put (a,b,c,d) = put a *> put b *> put c *> put d - get = (,,,) <$> get <*> get <*> get <*> get - -instance (Binary a, Binary b, Binary c, Binary d, Binary e) => Binary (a,b,c,d,e) where - put (a,b,c,d,e) = put a *> put b *> put c *> put d *> put e - get = (,,,,) <$> get <*> get <*> get <*> get <*> get - --- --- and now just recurse: --- - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f) - => Binary (a,b,c,d,e,f) where - put (a,b,c,d,e,f) = put (a,(b,c,d,e,f)) - get = do (a,(b,c,d,e,f)) <- get ; return (a,b,c,d,e,f) - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f, Binary g) - => Binary (a,b,c,d,e,f,g) where - put (a,b,c,d,e,f,g) = put (a,(b,c,d,e,f,g)) - get = do (a,(b,c,d,e,f,g)) <- get ; return (a,b,c,d,e,f,g) - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, - Binary f, Binary g, Binary h) - => Binary (a,b,c,d,e,f,g,h) where - put (a,b,c,d,e,f,g,h) = put (a,(b,c,d,e,f,g,h)) - get = do (a,(b,c,d,e,f,g,h)) <- get ; return (a,b,c,d,e,f,g,h) - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, - Binary f, Binary g, Binary h, Binary i) - => Binary (a,b,c,d,e,f,g,h,i) where - put (a,b,c,d,e,f,g,h,i) = put (a,(b,c,d,e,f,g,h,i)) - get = do (a,(b,c,d,e,f,g,h,i)) <- get ; return (a,b,c,d,e,f,g,h,i) - -instance (Binary a, Binary b, Binary c, Binary d, Binary e, - Binary f, Binary g, Binary h, Binary i, Binary j) - => Binary (a,b,c,d,e,f,g,h,i,j) where - put (a,b,c,d,e,f,g,h,i,j) = put (a,(b,c,d,e,f,g,h,i,j)) - get = do (a,(b,c,d,e,f,g,h,i,j)) <- get ; return (a,b,c,d,e,f,g,h,i,j) - ------------------------------------------------------------------------- --- Container types - -instance Binary a => Binary [a] where - put l = put (length l) *> traverse_ put l - get = do n <- get :: Get Int - getMany n - --- | 'getMany n' get 'n' elements in order, without blowing the stack. -getMany :: Binary a => Int -> Get [a] -getMany n = go [] n - where - go xs 0 = return $! reverse xs - go xs i = do x <- get - -- we must seq x to avoid stack overflows due to laziness in - -- (>>=) - x `seq` go (x:xs) (i-1) -{-# INLINE getMany #-} - -instance (Binary a) => Binary (Maybe a) where - put Nothing = putWord8 0 - put (Just x) = putWord8 1 *> put x - get = do - w <- getWord8 - case w of - 0 -> return Nothing - _ -> fmap Just get - -instance (Binary a, Binary b) => Binary (Either a b) where - put (Left a) = putWord8 0 *> put a - put (Right b) = putWord8 1 *> put b - get = do - w <- getWord8 - case w of - 0 -> fmap Left get - _ -> fmap Right get - ------------------------------------------------------------------------- --- ByteStrings (have specially efficient instances) - -instance Binary B.ByteString where - put bs = do put (B.length bs) - putByteString bs - get = get >>= getByteString - --- --- Using old versions of fps, this is a type synonym, and non portable --- --- Requires 'flexible instances' --- -instance Binary ByteString where - put bs = do put (fromIntegral (L.length bs) :: Int) - putLazyByteString bs - get = get >>= getLazyByteString - ------------------------------------------------------------------------- --- Maps and Sets - -instance (Binary a) => Binary (Set.Set a) where - put s = put (Set.size s) *> traverse_ put (Set.toAscList s) - get = fmap Set.fromDistinctAscList get - -instance (Binary k, Binary e) => Binary (Map.Map k e) where - put m = put (Map.size m) *> traverse_ put (Map.toAscList m) - get = fmap Map.fromDistinctAscList get - -instance Binary IntSet.IntSet where - put s = put (IntSet.size s) *> traverse_ put (IntSet.toAscList s) - get = fmap IntSet.fromDistinctAscList get - -instance (Binary e) => Binary (IntMap.IntMap e) where - put m = put (IntMap.size m) *> traverse_ put (IntMap.toAscList m) - get = fmap IntMap.fromDistinctAscList get - ------------------------------------------------------------------------- --- Queues and Sequences - -instance (Binary e) => Binary (Seq.Seq e) where - put s = put (Seq.length s) *> Fold.traverse_ put s - get = do n <- get :: Get Int - rep Seq.empty n get - where rep xs 0 _ = return $! xs - rep xs n g = xs `seq` n `seq` do - x <- g - rep (xs Seq.|> x) (n-1) g - ------------------------------------------------------------------------- --- Floating point - -instance Binary Double where - put d = put (decodeFloat d) - get = encodeFloat <$> get <*> get - -instance Binary Float where - put f = put (decodeFloat f) - get = encodeFloat <$> get <*> get - ------------------------------------------------------------------------- --- Trees - -instance (Binary e) => Binary (T.Tree e) where - put (T.Node r s) = put r *> put s - get = T.Node <$> get <*> get - ------------------------------------------------------------------------- --- Arrays - -instance (Binary i, Ix i, Binary e) => Binary (Array i e) where - put a = do - put (bounds a) - put (rangeSize $ bounds a) -- write the length - traverse_ put (elems a) -- now the elems. - get = do - bs <- get - n <- get -- read the length - xs <- getMany n -- now the elems. - return (listArray bs xs) - --- --- The IArray UArray e constraint is non portable. Requires flexible instances --- -instance (Binary i, Ix i, Binary e, IArray UArray e) => Binary (UArray i e) where - put a = do - put (bounds a) - put (rangeSize $ bounds a) -- now write the length - traverse_ put (elems a) - get = do - bs <- get - n <- get - xs <- getMany n - return (listArray bs xs) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Binary/Generic.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Binary/Generic.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Binary/Generic.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Binary/Generic.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,128 +0,0 @@ -{-# LANGUAGE BangPatterns, CPP, FlexibleInstances, KindSignatures, - ScopedTypeVariables, Trustworthy, TypeOperators, TypeSynonymInstances #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.Binary.Generic --- Copyright : Bryan O'Sullivan --- License : BSD3-style (see LICENSE) --- --- Maintainer : Bryan O'Sullivan --- Stability : unstable --- Portability : Only works with GHC 7.2 and newer --- --- Instances for supporting GHC generics. --- ------------------------------------------------------------------------------ -module Distribution.Compat.Binary.Generic - ( - ) where - -import Control.Applicative -import Distribution.Compat.Binary.Class -import Data.Binary.Get -import Data.Binary.Put -import Data.Bits -import Data.Word -import GHC.Generics - --- Type without constructors -instance GBinary V1 where - gput _ = return () - gget = return undefined - --- Constructor without arguments -instance GBinary U1 where - gput U1 = return () - gget = return U1 - --- Product: constructor with parameters -instance (GBinary a, GBinary b) => GBinary (a :*: b) where - gput (x :*: y) = gput x >> gput y - gget = (:*:) <$> gget <*> gget - --- Metadata (constructor name, etc) -instance GBinary a => GBinary (M1 i c a) where - gput = gput . unM1 - gget = M1 <$> gget - --- Constants, additional parameters, and rank-1 recursion -instance Binary a => GBinary (K1 i a) where - gput = put . unK1 - gget = K1 <$> get - --- Borrowed from the cereal package. - --- The following GBinary instance for sums has support for serializing --- types with up to 2^64-1 constructors. It will use the minimal --- number of bytes needed to encode the constructor. For example when --- a type has 2^8 constructors or less it will use a single byte to --- encode the constructor. If it has 2^16 constructors or less it will --- use two bytes, and so on till 2^64-1. - -#define GUARD(WORD) (size - 1) <= fromIntegral (maxBound :: WORD) -#define PUTSUM(WORD) GUARD(WORD) = putSum (0 :: WORD) (fromIntegral size) -#define GETSUM(WORD) GUARD(WORD) = (get :: Get WORD) >>= checkGetSum (fromIntegral size) - -instance ( GSum a, GSum b - , GBinary a, GBinary b - , SumSize a, SumSize b) => GBinary (a :+: b) where - gput | PUTSUM(Word8) | PUTSUM(Word16) | PUTSUM(Word32) | PUTSUM(Word64) - | otherwise = sizeError "encode" size - where - size = unTagged (sumSize :: Tagged (a :+: b) Word64) - - gget | GETSUM(Word8) | GETSUM(Word16) | GETSUM(Word32) | GETSUM(Word64) - | otherwise = sizeError "decode" size - where - size = unTagged (sumSize :: Tagged (a :+: b) Word64) - -sizeError :: Show size => String -> size -> error -sizeError s size = - error $ "Can't " ++ s ++ " a type with " ++ show size ++ " constructors" - ------------------------------------------------------------------------- - -checkGetSum :: (Ord word, Num word, Bits word, GSum f) - => word -> word -> Get (f a) -checkGetSum size code | code < size = getSum code size - | otherwise = fail "Unknown encoding for constructor" -{-# INLINE checkGetSum #-} - -class GSum f where - getSum :: (Ord word, Num word, Bits word) => word -> word -> Get (f a) - putSum :: (Num w, Bits w, Binary w) => w -> w -> f a -> Put - -instance (GSum a, GSum b, GBinary a, GBinary b) => GSum (a :+: b) where - getSum !code !size | code < sizeL = L1 <$> getSum code sizeL - | otherwise = R1 <$> getSum (code - sizeL) sizeR - where - sizeL = size `shiftR` 1 - sizeR = size - sizeL - - putSum !code !size s = case s of - L1 x -> putSum code sizeL x - R1 x -> putSum (code + sizeL) sizeR x - where - sizeL = size `shiftR` 1 - sizeR = size - sizeL - -instance GBinary a => GSum (C1 c a) where - getSum _ _ = gget - - putSum !code _ x = put code *> gput x - ------------------------------------------------------------------------- - -class SumSize f where - sumSize :: Tagged f Word64 - -newtype Tagged (s :: * -> *) b = Tagged {unTagged :: b} - -instance (SumSize a, SumSize b) => SumSize (a :+: b) where - sumSize = Tagged $ unTagged (sumSize :: Tagged a Word64) + - unTagged (sumSize :: Tagged b Word64) - -instance SumSize (C1 c a) where - sumSize = Tagged 1 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Binary.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Binary.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Binary.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Binary.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,71 +0,0 @@ -{-# LANGUAGE CPP #-} -#if __GLASGOW_HASKELL__ >= 711 -{-# LANGUAGE PatternSynonyms #-} -#endif - -#ifndef MIN_VERSION_binary -#define MIN_VERSION_binary(x, y, z) 0 -#endif - -module Distribution.Compat.Binary - ( decodeOrFailIO - , decodeFileOrFail' -#if __GLASGOW_HASKELL__ >= 708 || MIN_VERSION_binary(0,7,0) - , module Data.Binary -#else - , Binary(..) - , decode, encode, encodeFile -#endif - ) where - -import Control.Exception (catch, evaluate) -#if __GLASGOW_HASKELL__ >= 711 -import Control.Exception (pattern ErrorCall) -#else -import Control.Exception (ErrorCall(..)) -#endif -import Data.ByteString.Lazy (ByteString) - -#if __GLASGOW_HASKELL__ >= 708 || MIN_VERSION_binary(0,7,0) - -import Data.Binary - --- | Lazily reconstruct a value previously written to a file. -decodeFileOrFail' :: Binary a => FilePath -> IO (Either String a) -decodeFileOrFail' f = either (Left . snd) Right `fmap` decodeFileOrFail f - -#else - -import Data.Binary.Get -import Data.Binary.Put -import qualified Data.ByteString.Lazy as BSL - -import Distribution.Compat.Binary.Class -import Distribution.Compat.Binary.Generic () - --- | Decode a value from a lazy ByteString, reconstructing the --- original structure. --- -decode :: Binary a => ByteString -> a -decode = runGet get - --- | Encode a value using binary serialisation to a lazy ByteString. --- -encode :: Binary a => a -> ByteString -encode = runPut . put -{-# INLINE encode #-} - --- | Lazily reconstruct a value previously written to a file. -decodeFileOrFail' :: Binary a => FilePath -> IO (Either String a) -decodeFileOrFail' f = decodeOrFailIO =<< BSL.readFile f - --- | Lazily serialise a value to a file -encodeFile :: Binary a => FilePath -> a -> IO () -encodeFile f = BSL.writeFile f . encode - -#endif - -decodeOrFailIO :: Binary a => ByteString -> IO (Either String a) -decodeOrFailIO bs = - catch (evaluate (decode bs) >>= return . Right) - $ \(ErrorCall str) -> return $ Left str diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/CharParsing.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/CharParsing.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/CharParsing.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/CharParsing.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,356 +0,0 @@ -{-# LANGUAGE GADTs #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# OPTIONS_GHC -fspec-constr -fspec-constr-count=8 #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.CharParsing --- Copyright : (c) Edward Kmett 2011 --- License : BSD3 --- --- Maintainer : ekmett@gmail.com --- Stability : experimental --- Portability : non-portable --- --- Parsers for character streams --- --- Originally in @parsers@ package. --- ------------------------------------------------------------------------------ -module Distribution.Compat.CharParsing - ( - -- * Combinators - oneOf -- :: CharParsing m => [Char] -> m Char - , noneOf -- :: CharParsing m => [Char] -> m Char - , spaces -- :: CharParsing m => m () - , space -- :: CharParsing m => m Char - , newline -- :: CharParsing m => m Char - , tab -- :: CharParsing m => m Char - , upper -- :: CharParsing m => m Char - , lower -- :: CharParsing m => m Char - , alphaNum -- :: CharParsing m => m Char - , letter -- :: CharParsing m => m Char - , digit -- :: CharParsing m => m Char - , hexDigit -- :: CharParsing m => m Char - , octDigit -- :: CharParsing m => m Char - , satisfyRange -- :: CharParsing m => Char -> Char -> m Char - -- * Class - , CharParsing(..) - -- * Cabal additions - , integral - , munch1 - , munch - , skipSpaces1 - , module Distribution.Compat.Parsing - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Control.Monad.Trans.Class (lift) -import Control.Monad.Trans.State.Lazy as Lazy -import Control.Monad.Trans.State.Strict as Strict -import Control.Monad.Trans.Writer.Lazy as Lazy -import Control.Monad.Trans.Writer.Strict as Strict -import Control.Monad.Trans.RWS.Lazy as Lazy -import Control.Monad.Trans.RWS.Strict as Strict -import Control.Monad.Trans.Reader (ReaderT (..)) -import Control.Monad.Trans.Identity (IdentityT (..)) -import Data.Char -import Data.Text (Text, unpack) - -import qualified Text.Parsec as Parsec -import qualified Distribution.Compat.ReadP as ReadP - -import Distribution.Compat.Parsing - --- | @oneOf cs@ succeeds if the current character is in the supplied --- list of characters @cs@. Returns the parsed character. See also --- 'satisfy'. --- --- > vowel = oneOf "aeiou" -oneOf :: CharParsing m => [Char] -> m Char -oneOf xs = satisfy (\c -> c `elem` xs) -{-# INLINE oneOf #-} - --- | As the dual of 'oneOf', @noneOf cs@ succeeds if the current --- character is /not/ in the supplied list of characters @cs@. Returns the --- parsed character. --- --- > consonant = noneOf "aeiou" -noneOf :: CharParsing m => [Char] -> m Char -noneOf xs = satisfy (\c -> c `notElem` xs) -{-# INLINE noneOf #-} - --- | Skips /zero/ or more white space characters. See also 'skipMany'. -spaces :: CharParsing m => m () -spaces = skipMany space "white space" -{-# INLINE spaces #-} - --- | Parses a white space character (any character which satisfies 'isSpace') --- Returns the parsed character. -space :: CharParsing m => m Char -space = satisfy isSpace "space" -{-# INLINE space #-} - --- | Parses a newline character (\'\\n\'). Returns a newline character. -newline :: CharParsing m => m Char -newline = char '\n' "new-line" -{-# INLINE newline #-} - --- | Parses a tab character (\'\\t\'). Returns a tab character. -tab :: CharParsing m => m Char -tab = char '\t' "tab" -{-# INLINE tab #-} - --- | Parses an upper case letter. Returns the parsed character. -upper :: CharParsing m => m Char -upper = satisfy isUpper "uppercase letter" -{-# INLINE upper #-} - --- | Parses a lower case character. Returns the parsed character. -lower :: CharParsing m => m Char -lower = satisfy isLower "lowercase letter" -{-# INLINE lower #-} - --- | Parses a letter or digit. Returns the parsed character. -alphaNum :: CharParsing m => m Char -alphaNum = satisfy isAlphaNum "letter or digit" -{-# INLINE alphaNum #-} - --- | Parses a letter (an upper case or lower case character). Returns the --- parsed character. -letter :: CharParsing m => m Char -letter = satisfy isAlpha "letter" -{-# INLINE letter #-} - --- | Parses a digit. Returns the parsed character. -digit :: CharParsing m => m Char -digit = satisfy isDigit "digit" -{-# INLINE digit #-} - --- | Parses a hexadecimal digit (a digit or a letter between \'a\' and --- \'f\' or \'A\' and \'F\'). Returns the parsed character. -hexDigit :: CharParsing m => m Char -hexDigit = satisfy isHexDigit "hexadecimal digit" -{-# INLINE hexDigit #-} - --- | Parses an octal digit (a character between \'0\' and \'7\'). Returns --- the parsed character. -octDigit :: CharParsing m => m Char -octDigit = satisfy isOctDigit "octal digit" -{-# INLINE octDigit #-} - -satisfyRange :: CharParsing m => Char -> Char -> m Char -satisfyRange a z = satisfy (\c -> c >= a && c <= z) -{-# INLINE satisfyRange #-} - --- | Additional functionality needed to parse character streams. -class Parsing m => CharParsing m where - -- | Parse a single character of the input, with UTF-8 decoding - satisfy :: (Char -> Bool) -> m Char - - -- | @char c@ parses a single character @c@. Returns the parsed - -- character (i.e. @c@). - -- - -- /e.g./ - -- - -- @semiColon = 'char' ';'@ - char :: Char -> m Char - char c = satisfy (c ==) show [c] - {-# INLINE char #-} - - -- | @notChar c@ parses any single character other than @c@. Returns the parsed - -- character. - notChar :: Char -> m Char - notChar c = satisfy (c /=) - {-# INLINE notChar #-} - - -- | This parser succeeds for any character. Returns the parsed character. - anyChar :: m Char - anyChar = satisfy (const True) - {-# INLINE anyChar #-} - - -- | @string s@ parses a sequence of characters given by @s@. Returns - -- the parsed string (i.e. @s@). - -- - -- > divOrMod = string "div" - -- > <|> string "mod" - string :: String -> m String - string s = s <$ try (traverse_ char s) show s - {-# INLINE string #-} - - -- | @text t@ parses a sequence of characters determined by the text @t@ Returns - -- the parsed text fragment (i.e. @t@). - -- - -- Using @OverloadedStrings@: - -- - -- > divOrMod = text "div" - -- > <|> text "mod" - text :: Text -> m Text - text t = t <$ string (unpack t) - {-# INLINE text #-} - -instance (CharParsing m, MonadPlus m) => CharParsing (Lazy.StateT s m) where - satisfy = lift . satisfy - {-# INLINE satisfy #-} - char = lift . char - {-# INLINE char #-} - notChar = lift . notChar - {-# INLINE notChar #-} - anyChar = lift anyChar - {-# INLINE anyChar #-} - string = lift . string - {-# INLINE string #-} - text = lift . text - {-# INLINE text #-} - -instance (CharParsing m, MonadPlus m) => CharParsing (Strict.StateT s m) where - satisfy = lift . satisfy - {-# INLINE satisfy #-} - char = lift . char - {-# INLINE char #-} - notChar = lift . notChar - {-# INLINE notChar #-} - anyChar = lift anyChar - {-# INLINE anyChar #-} - string = lift . string - {-# INLINE string #-} - text = lift . text - {-# INLINE text #-} - -instance (CharParsing m, MonadPlus m) => CharParsing (ReaderT e m) where - satisfy = lift . satisfy - {-# INLINE satisfy #-} - char = lift . char - {-# INLINE char #-} - notChar = lift . notChar - {-# INLINE notChar #-} - anyChar = lift anyChar - {-# INLINE anyChar #-} - string = lift . string - {-# INLINE string #-} - text = lift . text - {-# INLINE text #-} - -instance (CharParsing m, MonadPlus m, Monoid w) => CharParsing (Strict.WriterT w m) where - satisfy = lift . satisfy - {-# INLINE satisfy #-} - char = lift . char - {-# INLINE char #-} - notChar = lift . notChar - {-# INLINE notChar #-} - anyChar = lift anyChar - {-# INLINE anyChar #-} - string = lift . string - {-# INLINE string #-} - text = lift . text - {-# INLINE text #-} - -instance (CharParsing m, MonadPlus m, Monoid w) => CharParsing (Lazy.WriterT w m) where - satisfy = lift . satisfy - {-# INLINE satisfy #-} - char = lift . char - {-# INLINE char #-} - notChar = lift . notChar - {-# INLINE notChar #-} - anyChar = lift anyChar - {-# INLINE anyChar #-} - string = lift . string - {-# INLINE string #-} - text = lift . text - {-# INLINE text #-} - -instance (CharParsing m, MonadPlus m, Monoid w) => CharParsing (Lazy.RWST r w s m) where - satisfy = lift . satisfy - {-# INLINE satisfy #-} - char = lift . char - {-# INLINE char #-} - notChar = lift . notChar - {-# INLINE notChar #-} - anyChar = lift anyChar - {-# INLINE anyChar #-} - string = lift . string - {-# INLINE string #-} - text = lift . text - {-# INLINE text #-} - -instance (CharParsing m, MonadPlus m, Monoid w) => CharParsing (Strict.RWST r w s m) where - satisfy = lift . satisfy - {-# INLINE satisfy #-} - char = lift . char - {-# INLINE char #-} - notChar = lift . notChar - {-# INLINE notChar #-} - anyChar = lift anyChar - {-# INLINE anyChar #-} - string = lift . string - {-# INLINE string #-} - text = lift . text - {-# INLINE text #-} - -instance (CharParsing m, MonadPlus m) => CharParsing (IdentityT m) where - satisfy = lift . satisfy - {-# INLINE satisfy #-} - char = lift . char - {-# INLINE char #-} - notChar = lift . notChar - {-# INLINE notChar #-} - anyChar = lift anyChar - {-# INLINE anyChar #-} - string = lift . string - {-# INLINE string #-} - text = lift . text - {-# INLINE text #-} - -instance Parsec.Stream s m Char => CharParsing (Parsec.ParsecT s u m) where - satisfy = Parsec.satisfy - char = Parsec.char - notChar c = Parsec.satisfy (/= c) - anyChar = Parsec.anyChar - string = Parsec.string - -instance t ~ Char => CharParsing (ReadP.Parser r t) where - satisfy = ReadP.satisfy - char = ReadP.char - notChar c = ReadP.satisfy (/= c) - anyChar = ReadP.get - string = ReadP.string - -------------------------------------------------------------------------------- --- Our additions -------------------------------------------------------------------------------- - -integral :: (CharParsing m, Integral a) => m a -integral = toNumber <$> some d "integral" - where - toNumber = foldl' (\a b -> a * 10 + b) 0 - d = f <$> satisfyRange '0' '9' - f '0' = 0 - f '1' = 1 - f '2' = 2 - f '3' = 3 - f '4' = 4 - f '5' = 5 - f '6' = 6 - f '7' = 7 - f '8' = 8 - f '9' = 9 - f _ = error "panic! integral" -{-# INLINE integral #-} - --- | Greedily munch characters while predicate holds. --- Require at least one character. -munch1 :: CharParsing m => (Char -> Bool) -> m String -munch1 = some . satisfy -{-# INLINE munch1 #-} - --- | Greedely munch characters while predicate holds. --- Always succeeds. -munch :: CharParsing m => (Char -> Bool) -> m String -munch = many . satisfy -{-# INLINE munch #-} - -skipSpaces1 :: CharParsing m => m () -skipSpaces1 = skipSome space -{-# INLINE skipSpaces1 #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/CopyFile.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/CopyFile.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/CopyFile.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/CopyFile.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,153 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_HADDOCK hide #-} -module Distribution.Compat.CopyFile ( - copyFile, - copyFileChanged, - filesEqual, - copyOrdinaryFile, - copyExecutableFile, - setFileOrdinary, - setFileExecutable, - setDirOrdinary, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Compat.Exception - -#ifndef mingw32_HOST_OS -import Distribution.Compat.Internal.TempFile - -import Control.Exception - ( bracketOnError, throwIO ) -import qualified Data.ByteString.Lazy as BSL -import System.IO.Error - ( ioeSetLocation ) -import System.Directory - ( doesFileExist, renameFile, removeFile ) -import System.FilePath - ( takeDirectory ) -import System.IO - ( IOMode(ReadMode), hClose, hGetBuf, hPutBuf, hFileSize - , withBinaryFile ) -import Foreign - ( allocaBytes ) - -import System.Posix.Types - ( FileMode ) -import System.Posix.Internals - ( c_chmod, withFilePath ) -import Foreign.C - ( throwErrnoPathIfMinus1_ ) - -#else /* else mingw32_HOST_OS */ - -import Control.Exception - ( throwIO ) -import qualified Data.ByteString.Lazy as BSL -import System.IO.Error - ( ioeSetLocation ) -import System.Directory - ( doesFileExist ) -import System.FilePath - ( isRelative, normalise ) -import System.IO - ( IOMode(ReadMode), hFileSize - , withBinaryFile ) - -import qualified System.Win32.File as Win32 ( copyFile ) -#endif /* mingw32_HOST_OS */ - -copyOrdinaryFile, copyExecutableFile :: FilePath -> FilePath -> NoCallStackIO () -copyOrdinaryFile src dest = copyFile src dest >> setFileOrdinary dest -copyExecutableFile src dest = copyFile src dest >> setFileExecutable dest - -setFileOrdinary, setFileExecutable, setDirOrdinary :: FilePath -> NoCallStackIO () -#ifndef mingw32_HOST_OS -setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- -setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x - -setFileMode :: FilePath -> FileMode -> NoCallStackIO () -setFileMode name m = - withFilePath name $ \s -> do - throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) -#else -setFileOrdinary _ = return () -setFileExecutable _ = return () -#endif --- This happens to be true on Unix and currently on Windows too: -setDirOrdinary = setFileExecutable - --- | Copies a file to a new destination. --- Often you should use `copyFileChanged` instead. -copyFile :: FilePath -> FilePath -> NoCallStackIO () -copyFile fromFPath toFPath = - copy - `catchIO` (\ioe -> throwIO (ioeSetLocation ioe "copyFile")) - where -#ifndef mingw32_HOST_OS - copy = withBinaryFile fromFPath ReadMode $ \hFrom -> - bracketOnError openTmp cleanTmp $ \(tmpFPath, hTmp) -> - do allocaBytes bufferSize $ copyContents hFrom hTmp - hClose hTmp - renameFile tmpFPath toFPath - openTmp = openBinaryTempFile (takeDirectory toFPath) ".copyFile.tmp" - cleanTmp (tmpFPath, hTmp) = do - hClose hTmp `catchIO` \_ -> return () - removeFile tmpFPath `catchIO` \_ -> return () - bufferSize = 4096 - - copyContents hFrom hTo buffer = do - count <- hGetBuf hFrom buffer bufferSize - when (count > 0) $ do - hPutBuf hTo buffer count - copyContents hFrom hTo buffer -#else - copy = Win32.copyFile (toExtendedLengthPath fromFPath) - (toExtendedLengthPath toFPath) - False - --- NOTE: Shamelessly lifted from System.Directory.Internal.Windows - --- | Add the @"\\\\?\\"@ prefix if necessary or possible. The path remains --- unchanged if the prefix is not added. This function can sometimes be used --- to bypass the @MAX_PATH@ length restriction in Windows API calls. -toExtendedLengthPath :: FilePath -> FilePath -toExtendedLengthPath path - | isRelative path = path - | otherwise = - case normalise path of - '\\' : '?' : '?' : '\\' : _ -> path - '\\' : '\\' : '?' : '\\' : _ -> path - '\\' : '\\' : '.' : '\\' : _ -> path - '\\' : subpath@('\\' : _) -> "\\\\?\\UNC" <> subpath - normalisedPath -> "\\\\?\\" <> normalisedPath -#endif /* mingw32_HOST_OS */ - --- | Like `copyFile`, but does not touch the target if source and destination --- are already byte-identical. This is recommended as it is useful for --- time-stamp based recompilation avoidance. -copyFileChanged :: FilePath -> FilePath -> NoCallStackIO () -copyFileChanged src dest = do - equal <- filesEqual src dest - unless equal $ copyFile src dest - --- | Checks if two files are byte-identical. --- Returns False if either of the files do not exist or if files --- are of different size. -filesEqual :: FilePath -> FilePath -> NoCallStackIO Bool -filesEqual f1 f2 = do - ex1 <- doesFileExist f1 - ex2 <- doesFileExist f2 - if not (ex1 && ex2) then return False else - withBinaryFile f1 ReadMode $ \h1 -> - withBinaryFile f2 ReadMode $ \h2 -> do - s1 <- hFileSize h1 - s2 <- hFileSize h2 - if s1 /= s2 - then return False - else do - c1 <- BSL.hGetContents h1 - c2 <- BSL.hGetContents h2 - return $! c1 == c2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/CreatePipe.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/CreatePipe.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/CreatePipe.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/CreatePipe.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,77 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - -module Distribution.Compat.CreatePipe (createPipe) where - -import System.IO (Handle, hSetEncoding, localeEncoding) - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Compat.Stack - --- The mingw32_HOST_OS CPP macro is GHC-specific -#ifdef mingw32_HOST_OS -import qualified Prelude -import Control.Exception (onException) -import Foreign.C.Error (throwErrnoIfMinus1_) -import Foreign.C.Types (CInt(..), CUInt(..)) -import Foreign.Ptr (Ptr) -import Foreign.Marshal.Array (allocaArray) -import Foreign.Storable (peek, peekElemOff) -import GHC.IO.FD (mkFD) -import GHC.IO.Device (IODeviceType(Stream)) -import GHC.IO.Handle.FD (mkHandleFromFD) -import System.IO (IOMode(ReadMode, WriteMode)) -#elif defined ghcjs_HOST_OS -#else -import System.Posix.IO (fdToHandle) -import qualified System.Posix.IO as Posix -#endif - -createPipe :: IO (Handle, Handle) --- The mingw32_HOST_OS CPP macro is GHC-specific -#ifdef mingw32_HOST_OS -createPipe = do - (readfd, writefd) <- allocaArray 2 $ \ pfds -> do - throwErrnoIfMinus1_ "_pipe" $ c__pipe pfds 2 ({- _O_BINARY -} 32768) - readfd <- peek pfds - writefd <- peekElemOff pfds 1 - return (readfd, writefd) - (do readh <- fdToHandle readfd ReadMode - writeh <- fdToHandle writefd WriteMode - hSetEncoding readh localeEncoding - hSetEncoding writeh localeEncoding - return (readh, writeh)) `onException` (close readfd >> close writefd) - where - fdToHandle :: CInt -> IOMode -> NoCallStackIO Handle - fdToHandle fd mode = do - (fd', deviceType) <- mkFD fd mode (Just (Stream, 0, 0)) False False - mkHandleFromFD fd' deviceType "" mode False Nothing - - close :: CInt -> IO () - close = throwErrnoIfMinus1_ "_close" . c__close - where _ = callStack -- TODO: attach call stack to exception - - _ = callStack -- TODO: attach call stack to exceptions - -foreign import ccall "io.h _pipe" c__pipe :: - Ptr CInt -> CUInt -> CInt -> Prelude.IO CInt - -foreign import ccall "io.h _close" c__close :: - CInt -> Prelude.IO CInt -#elif defined ghcjs_HOST_OS -createPipe = error "createPipe" - where - _ = callStack -#else -createPipe = do - (readfd, writefd) <- Posix.createPipe - readh <- fdToHandle readfd - writeh <- fdToHandle writefd - hSetEncoding readh localeEncoding - hSetEncoding writeh localeEncoding - return (readh, writeh) - where - _ = callStack -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Directory.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Directory.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Directory.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Directory.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,44 +0,0 @@ -{-# LANGUAGE CPP #-} - -module Distribution.Compat.Directory -( listDirectory -, makeAbsolute -, doesPathExist -) where - -#if MIN_VERSION_directory(1,2,7) -import System.Directory as Dir hiding (doesPathExist) -import System.Directory (doesPathExist) -#else -import System.Directory as Dir -#endif -#if !MIN_VERSION_directory(1,2,2) -import System.FilePath as Path -#endif - -#if !MIN_VERSION_directory(1,2,5) - -listDirectory :: FilePath -> IO [FilePath] -listDirectory path = - filter f `fmap` Dir.getDirectoryContents path - where f filename = filename /= "." && filename /= ".." - -#endif - -#if !MIN_VERSION_directory(1,2,2) - -makeAbsolute :: FilePath -> IO FilePath -makeAbsolute p | Path.isAbsolute p = return p - | otherwise = do - cwd <- Dir.getCurrentDirectory - return $ cwd p - -#endif - -#if !MIN_VERSION_directory(1,2,7) - -doesPathExist :: FilePath -> IO Bool -doesPathExist path = (||) <$> doesDirectoryExist path <*> doesFileExist path - -#endif - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/DList.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/DList.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/DList.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/DList.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,48 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.DList --- Copyright : (c) Ben Gamari 2015-2019 --- License : BSD3 --- --- Maintainer : cabal-dev@haskell.org --- Stability : experimental --- Portability : portable --- --- A very simple difference list. -module Distribution.Compat.DList ( - DList, - runDList, - singleton, - fromList, - toList, - snoc, -) where - -import Prelude () -import Distribution.Compat.Prelude - --- | Difference list. -newtype DList a = DList ([a] -> [a]) - -runDList :: DList a -> [a] -runDList (DList run) = run [] - --- | Make 'DList' with containing single element. -singleton :: a -> DList a -singleton a = DList (a:) - -fromList :: [a] -> DList a -fromList as = DList (as ++) - -toList :: DList a -> [a] -toList = runDList - -snoc :: DList a -> a -> DList a -snoc xs x = xs <> singleton x - -instance Monoid (DList a) where - mempty = DList id - mappend = (<>) - -instance Semigroup (DList a) where - DList a <> DList b = DList (a . b) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Environment.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Environment.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Environment.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Environment.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,134 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# OPTIONS_HADDOCK hide #-} - -module Distribution.Compat.Environment - ( getEnvironment, lookupEnv, setEnv, unsetEnv ) - where - -import Prelude () -import qualified Prelude -import Distribution.Compat.Prelude - -#ifndef mingw32_HOST_OS -#if __GLASGOW_HASKELL__ < 708 -import Foreign.C.Error (throwErrnoIf_) -#endif -#endif - -import qualified System.Environment as System -import System.Environment (lookupEnv) -#if __GLASGOW_HASKELL__ >= 708 -import System.Environment (unsetEnv) -#endif - -import Distribution.Compat.Stack - -#ifdef mingw32_HOST_OS -import Foreign.C -#if __GLASGOW_HASKELL__ < 708 -import Foreign.Ptr (nullPtr) -#endif -import GHC.Windows -#else -import Foreign.C.Types -import Foreign.C.String -import Foreign.C.Error (throwErrnoIfMinus1_) -import System.Posix.Internals ( withFilePath ) -#endif /* mingw32_HOST_OS */ - -getEnvironment :: NoCallStackIO [(String, String)] -#ifdef mingw32_HOST_OS --- On Windows, the names of environment variables are case-insensitive, but are --- often given in mixed-case (e.g. "PATH" is "Path"), so we have to normalise --- them. -getEnvironment = fmap upcaseVars System.getEnvironment - where - upcaseVars = map upcaseVar - upcaseVar (var, val) = (map toUpper var, val) -#else -getEnvironment = System.getEnvironment -#endif - --- | @setEnv name value@ sets the specified environment variable to @value@. --- --- Throws `Control.Exception.IOException` if either @name@ or @value@ is the --- empty string or contains an equals sign. -setEnv :: String -> String -> IO () -setEnv key value_ = setEnv_ key value - where - -- NOTE: Anything that follows NUL is ignored on both POSIX and Windows. We - -- still strip it manually so that the null check above succeeds if a value - -- starts with NUL. - value = takeWhile (/= '\NUL') value_ - -setEnv_ :: String -> String -> IO () - -#ifdef mingw32_HOST_OS - -setEnv_ key value = withCWString key $ \k -> withCWString value $ \v -> do - success <- c_SetEnvironmentVariable k v - unless success (throwGetLastError "setEnv") - where - _ = callStack -- TODO: attach CallStack to exception - -# if defined(i386_HOST_ARCH) -# define WINDOWS_CCONV stdcall -# elif defined(x86_64_HOST_ARCH) -# define WINDOWS_CCONV ccall -# else -# error Unknown mingw32 arch -# endif /* i386_HOST_ARCH */ - -foreign import WINDOWS_CCONV unsafe "windows.h SetEnvironmentVariableW" - c_SetEnvironmentVariable :: LPTSTR -> LPTSTR -> Prelude.IO Bool -#else -setEnv_ key value = do - withFilePath key $ \ keyP -> - withFilePath value $ \ valueP -> - throwErrnoIfMinus1_ "setenv" $ - c_setenv keyP valueP (fromIntegral (fromEnum True)) - where - _ = callStack -- TODO: attach CallStack to exception - -foreign import ccall unsafe "setenv" - c_setenv :: CString -> CString -> CInt -> Prelude.IO CInt -#endif /* mingw32_HOST_OS */ - -#if __GLASGOW_HASKELL__ < 708 - --- | @unsetEnv name@ removes the specified environment variable from the --- environment of the current process. --- --- Throws `Control.Exception.IOException` if @name@ is the empty string or --- contains an equals sign. --- --- @since 4.7.0.0 -unsetEnv :: String -> IO () -#ifdef mingw32_HOST_OS -unsetEnv key = withCWString key $ \k -> do - success <- c_SetEnvironmentVariable k nullPtr - unless success $ do - -- We consider unsetting an environment variable that does not exist not as - -- an error, hence we ignore eRROR_ENVVAR_NOT_FOUND. - err <- c_GetLastError - unless (err == eRROR_ENVVAR_NOT_FOUND) $ do - throwGetLastError "unsetEnv" - -eRROR_ENVVAR_NOT_FOUND :: DWORD -eRROR_ENVVAR_NOT_FOUND = 203 - -foreign import WINDOWS_CCONV unsafe "windows.h GetLastError" - c_GetLastError:: IO DWORD -#else -unsetEnv key = withFilePath key (throwErrnoIf_ (/= 0) "unsetEnv" . c_unsetenv) -#if __GLASGOW_HASKELL__ > 706 -foreign import ccall unsafe "__hsbase_unsetenv" c_unsetenv :: CString -> Prelude.IO CInt -#else --- HACK: We hope very hard that !UNSETENV_RETURNS_VOID -foreign import ccall unsafe "unsetenv" c_unsetenv :: CString -> Prelude.IO CInt -#endif -#endif - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Exception.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Exception.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Exception.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Exception.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -{-# LANGUAGE CPP #-} -module Distribution.Compat.Exception ( - catchIO, - catchExit, - tryIO, - displayException, - ) where - -import System.Exit -import qualified Control.Exception as Exception -#if __GLASGOW_HASKELL__ >= 710 -import Control.Exception (displayException) -#endif - -tryIO :: IO a -> IO (Either Exception.IOException a) -tryIO = Exception.try - -catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a -catchIO = Exception.catch - -catchExit :: IO a -> (ExitCode -> IO a) -> IO a -catchExit = Exception.catch - -#if __GLASGOW_HASKELL__ < 710 -displayException :: Exception.Exception e => e -> String -displayException = show -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/GetShortPathName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/GetShortPathName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/GetShortPathName.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/GetShortPathName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.GetShortPathName --- --- Maintainer : cabal-devel@haskell.org --- Portability : Windows-only --- --- Win32 API 'GetShortPathName' function. - -module Distribution.Compat.GetShortPathName ( getShortPathName ) - where - -import Prelude () -import Distribution.Compat.Prelude - -#ifdef mingw32_HOST_OS - -import qualified Prelude -import qualified System.Win32 as Win32 -import System.Win32 (LPCTSTR, LPTSTR, DWORD) -import Foreign.Marshal.Array (allocaArray) - -#ifdef x86_64_HOST_ARCH -#define WINAPI ccall -#else -#define WINAPI stdcall -#endif - -foreign import WINAPI unsafe "windows.h GetShortPathNameW" - c_GetShortPathName :: LPCTSTR -> LPTSTR -> DWORD -> Prelude.IO DWORD - --- | On Windows, retrieves the short path form of the specified path. On --- non-Windows, does nothing. See https://github.com/haskell/cabal/issues/3185. --- --- From MS's GetShortPathName docs: --- --- Passing NULL for [the second] parameter and zero for cchBuffer --- will always return the required buffer size for a --- specified lpszLongPath. --- -getShortPathName :: FilePath -> NoCallStackIO FilePath -getShortPathName path = - Win32.withTString path $ \c_path -> do - c_len <- Win32.failIfZero "GetShortPathName #1 failed!" $ - c_GetShortPathName c_path Win32.nullPtr 0 - let arr_len = fromIntegral c_len - allocaArray arr_len $ \c_out -> do - void $ Win32.failIfZero "GetShortPathName #2 failed!" $ - c_GetShortPathName c_path c_out c_len - Win32.peekTString c_out - -#else - -getShortPathName :: FilePath -> NoCallStackIO FilePath -getShortPathName path = return path - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Graph.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Graph.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Graph.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Graph.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,404 +0,0 @@ -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE BangPatterns #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.Graph --- Copyright : (c) Edward Z. Yang 2016 --- License : BSD3 --- --- Maintainer : cabal-dev@haskell.org --- Stability : experimental --- Portability : portable --- --- A data type representing directed graphs, backed by "Data.Graph". --- It is strict in the node type. --- --- This is an alternative interface to "Data.Graph". In this interface, --- nodes (identified by the 'IsNode' type class) are associated with a --- key and record the keys of their neighbors. This interface is more --- convenient than 'Data.Graph.Graph', which requires vertices to be --- explicitly handled by integer indexes. --- --- The current implementation has somewhat peculiar performance --- characteristics. The asymptotics of all map-like operations mirror --- their counterparts in "Data.Map". However, to perform a graph --- operation, we first must build the "Data.Graph" representation, an --- operation that takes /O(V + E log V)/. However, this operation can --- be amortized across all queries on that particular graph. --- --- Some nodes may be broken, i.e., refer to neighbors which are not --- stored in the graph. In our graph algorithms, we transparently --- ignore such edges; however, you can easily query for the broken --- vertices of a graph using 'broken' (and should, e.g., to ensure that --- a closure of a graph is well-formed.) It's possible to take a closed --- subset of a broken graph and get a well-formed graph. --- ------------------------------------------------------------------------------ - -module Distribution.Compat.Graph ( - -- * Graph type - Graph, - IsNode(..), - -- * Query - null, - size, - member, - lookup, - -- * Construction - empty, - insert, - deleteKey, - deleteLookup, - -- * Combine - unionLeft, - unionRight, - -- * Graph algorithms - stronglyConnComp, - SCC(..), - cycles, - broken, - neighbors, - revNeighbors, - closure, - revClosure, - topSort, - revTopSort, - -- * Conversions - -- ** Maps - toMap, - -- ** Lists - fromDistinctList, - toList, - keys, - -- ** Sets - keysSet, - -- ** Graphs - toGraph, - -- * Node type - Node(..), - nodeValue, -) where - -import Prelude () -import qualified Distribution.Compat.Prelude as Prelude -import Distribution.Compat.Prelude hiding (lookup, null, empty) - -import Data.Graph (SCC(..)) -import qualified Data.Graph as G - -import qualified Data.Map.Strict as Map -import qualified Data.Set as Set -import qualified Data.Array as Array -import Data.Array ((!)) -import qualified Data.Tree as Tree -import Data.Either (partitionEithers) -import qualified Data.Foldable as Foldable - --- | A graph of nodes @a@. The nodes are expected to have instance --- of class 'IsNode'. -data Graph a - = Graph { - graphMap :: !(Map (Key a) a), - -- Lazily cached graph representation - graphForward :: G.Graph, - graphAdjoint :: G.Graph, - graphVertexToNode :: G.Vertex -> a, - graphKeyToVertex :: Key a -> Maybe G.Vertex, - graphBroken :: [(a, [Key a])] - } - deriving (Typeable) - --- NB: Not a Functor! (or Traversable), because you need --- to restrict Key a ~ Key b. We provide our own mapping --- functions. - --- General strategy is most operations are deferred to the --- Map representation. - -instance Show a => Show (Graph a) where - show = show . toList - -instance (IsNode a, Read a, Show (Key a)) => Read (Graph a) where - readsPrec d s = map (\(a,r) -> (fromDistinctList a, r)) (readsPrec d s) - -instance (IsNode a, Binary a, Show (Key a)) => Binary (Graph a) where - put x = put (toList x) - get = fmap fromDistinctList get - -instance (Eq (Key a), Eq a) => Eq (Graph a) where - g1 == g2 = graphMap g1 == graphMap g2 - -instance Foldable.Foldable Graph where - fold = Foldable.fold . graphMap - foldr f z = Foldable.foldr f z . graphMap - foldl f z = Foldable.foldl f z . graphMap - foldMap f = Foldable.foldMap f . graphMap - foldl' f z = Foldable.foldl' f z . graphMap - foldr' f z = Foldable.foldr' f z . graphMap -#ifdef MIN_VERSION_base -#if MIN_VERSION_base(4,8,0) - length = Foldable.length . graphMap - null = Foldable.null . graphMap - toList = Foldable.toList . graphMap - elem x = Foldable.elem x . graphMap - maximum = Foldable.maximum . graphMap - minimum = Foldable.minimum . graphMap - sum = Foldable.sum . graphMap - product = Foldable.product . graphMap -#endif -#endif - -instance (NFData a, NFData (Key a)) => NFData (Graph a) where - rnf Graph { - graphMap = m, - graphForward = gf, - graphAdjoint = ga, - graphVertexToNode = vtn, - graphKeyToVertex = ktv, - graphBroken = b - } = gf `seq` ga `seq` vtn `seq` ktv `seq` b `seq` rnf m - --- TODO: Data instance? - --- | The 'IsNode' class is used for datatypes which represent directed --- graph nodes. A node of type @a@ is associated with some unique key of --- type @'Key' a@; given a node we can determine its key ('nodeKey') --- and the keys of its neighbors ('nodeNeighbors'). -class Ord (Key a) => IsNode a where - type Key a - nodeKey :: a -> Key a - nodeNeighbors :: a -> [Key a] - -instance (IsNode a, IsNode b, Key a ~ Key b) => IsNode (Either a b) where - type Key (Either a b) = Key a - nodeKey (Left x) = nodeKey x - nodeKey (Right x) = nodeKey x - nodeNeighbors (Left x) = nodeNeighbors x - nodeNeighbors (Right x) = nodeNeighbors x - --- | A simple, trivial data type which admits an 'IsNode' instance. -data Node k a = N a k [k] - deriving (Show, Eq) - --- | Get the value from a 'Node'. -nodeValue :: Node k a -> a -nodeValue (N a _ _) = a - -instance Functor (Node k) where - fmap f (N a k ks) = N (f a) k ks - -instance Ord k => IsNode (Node k a) where - type Key (Node k a) = k - nodeKey (N _ k _) = k - nodeNeighbors (N _ _ ks) = ks - --- TODO: Maybe introduce a typeclass for items which just --- keys (so, Key associated type, and nodeKey method). But --- I didn't need it here, so I didn't introduce it. - --- Query - --- | /O(1)/. Is the graph empty? -null :: Graph a -> Bool -null = Map.null . toMap - --- | /O(1)/. The number of nodes in the graph. -size :: Graph a -> Int -size = Map.size . toMap - --- | /O(log V)/. Check if the key is in the graph. -member :: IsNode a => Key a -> Graph a -> Bool -member k g = Map.member k (toMap g) - --- | /O(log V)/. Lookup the node at a key in the graph. -lookup :: IsNode a => Key a -> Graph a -> Maybe a -lookup k g = Map.lookup k (toMap g) - --- Construction - --- | /O(1)/. The empty graph. -empty :: IsNode a => Graph a -empty = fromMap Map.empty - --- | /O(log V)/. Insert a node into a graph. -insert :: IsNode a => a -> Graph a -> Graph a -insert !n g = fromMap (Map.insert (nodeKey n) n (toMap g)) - --- | /O(log V)/. Delete the node at a key from the graph. -deleteKey :: IsNode a => Key a -> Graph a -> Graph a -deleteKey k g = fromMap (Map.delete k (toMap g)) - --- | /O(log V)/. Lookup and delete. This function returns the deleted --- value if it existed. -deleteLookup :: IsNode a => Key a -> Graph a -> (Maybe a, Graph a) -deleteLookup k g = - let (r, m') = Map.updateLookupWithKey (\_ _ -> Nothing) k (toMap g) - in (r, fromMap m') - --- Combining - --- | /O(V + V')/. Right-biased union, preferring entries --- from the second map when conflicts occur. --- @'nodeKey' x = 'nodeKey' (f x)@. -unionRight :: IsNode a => Graph a -> Graph a -> Graph a -unionRight g g' = fromMap (Map.union (toMap g') (toMap g)) - --- | /O(V + V')/. Left-biased union, preferring entries from --- the first map when conflicts occur. -unionLeft :: IsNode a => Graph a -> Graph a -> Graph a -unionLeft = flip unionRight - --- Graph-like operations - --- | /Ω(V + E)/. Compute the strongly connected components of a graph. --- Requires amortized construction of graph. -stronglyConnComp :: Graph a -> [SCC a] -stronglyConnComp g = map decode forest - where - forest = G.scc (graphForward g) - decode (Tree.Node v []) - | mentions_itself v = CyclicSCC [graphVertexToNode g v] - | otherwise = AcyclicSCC (graphVertexToNode g v) - decode other = CyclicSCC (dec other []) - where dec (Tree.Node v ts) vs - = graphVertexToNode g v : foldr dec vs ts - mentions_itself v = v `elem` (graphForward g ! v) --- Implementation copied from 'stronglyConnCompR' in 'Data.Graph'. - --- | /Ω(V + E)/. Compute the cycles of a graph. --- Requires amortized construction of graph. -cycles :: Graph a -> [[a]] -cycles g = [ vs | CyclicSCC vs <- stronglyConnComp g ] - --- | /O(1)/. Return a list of nodes paired with their broken --- neighbors (i.e., neighbor keys which are not in the graph). --- Requires amortized construction of graph. -broken :: Graph a -> [(a, [Key a])] -broken g = graphBroken g - --- | Lookup the immediate neighbors from a key in the graph. --- Requires amortized construction of graph. -neighbors :: Graph a -> Key a -> Maybe [a] -neighbors g k = do - v <- graphKeyToVertex g k - return (map (graphVertexToNode g) (graphForward g ! v)) - --- | Lookup the immediate reverse neighbors from a key in the graph. --- Requires amortized construction of graph. -revNeighbors :: Graph a -> Key a -> Maybe [a] -revNeighbors g k = do - v <- graphKeyToVertex g k - return (map (graphVertexToNode g) (graphAdjoint g ! v)) - --- | Compute the subgraph which is the closure of some set of keys. --- Returns @Nothing@ if one (or more) keys are not present in --- the graph. --- Requires amortized construction of graph. -closure :: Graph a -> [Key a] -> Maybe [a] -closure g ks = do - vs <- traverse (graphKeyToVertex g) ks - return (decodeVertexForest g (G.dfs (graphForward g) vs)) - --- | Compute the reverse closure of a graph from some set --- of keys. Returns @Nothing@ if one (or more) keys are not present in --- the graph. --- Requires amortized construction of graph. -revClosure :: Graph a -> [Key a] -> Maybe [a] -revClosure g ks = do - vs <- traverse (graphKeyToVertex g) ks - return (decodeVertexForest g (G.dfs (graphAdjoint g) vs)) - -flattenForest :: Tree.Forest a -> [a] -flattenForest = concatMap Tree.flatten - -decodeVertexForest :: Graph a -> Tree.Forest G.Vertex -> [a] -decodeVertexForest g = map (graphVertexToNode g) . flattenForest - --- | Topologically sort the nodes of a graph. --- Requires amortized construction of graph. -topSort :: Graph a -> [a] -topSort g = map (graphVertexToNode g) $ G.topSort (graphForward g) - --- | Reverse topologically sort the nodes of a graph. --- Requires amortized construction of graph. -revTopSort :: Graph a -> [a] -revTopSort g = map (graphVertexToNode g) $ G.topSort (graphAdjoint g) - --- Conversions - --- | /O(1)/. Convert a map from keys to nodes into a graph. --- The map must satisfy the invariant that --- @'fromMap' m == 'fromList' ('Data.Map.elems' m)@; --- if you can't fulfill this invariant use @'fromList' ('Data.Map.elems' m)@ --- instead. The values of the map are assumed to already --- be in WHNF. -fromMap :: IsNode a => Map (Key a) a -> Graph a -fromMap m - = Graph { graphMap = m - -- These are lazily computed! - , graphForward = g - , graphAdjoint = G.transposeG g - , graphVertexToNode = vertex_to_node - , graphKeyToVertex = key_to_vertex - , graphBroken = broke - } - where - try_key_to_vertex k = maybe (Left k) Right (key_to_vertex k) - - (brokenEdges, edges) - = unzip - $ [ partitionEithers (map try_key_to_vertex (nodeNeighbors n)) - | n <- ns ] - broke = filter (not . Prelude.null . snd) (zip ns brokenEdges) - - g = Array.listArray bounds edges - - ns = Map.elems m -- sorted ascending - vertices = zip (map nodeKey ns) [0..] - vertex_map = Map.fromAscList vertices - key_to_vertex k = Map.lookup k vertex_map - - vertex_to_node vertex = nodeTable ! vertex - - nodeTable = Array.listArray bounds ns - bounds = (0, Map.size m - 1) - --- | /O(V log V)/. Convert a list of nodes (with distinct keys) into a graph. -fromDistinctList :: (IsNode a, Show (Key a)) => [a] -> Graph a -fromDistinctList = fromMap - . Map.fromListWith (\_ -> duplicateError) - . map (\n -> n `seq` (nodeKey n, n)) - where - duplicateError n = error $ "Graph.fromDistinctList: duplicate key: " - ++ show (nodeKey n) - --- Map-like operations - --- | /O(V)/. Convert a graph into a list of nodes. -toList :: Graph a -> [a] -toList g = Map.elems (toMap g) - --- | /O(V)/. Convert a graph into a list of keys. -keys :: Graph a -> [Key a] -keys g = Map.keys (toMap g) - --- | /O(V)/. Convert a graph into a set of keys. -keysSet :: Graph a -> Set.Set (Key a) -keysSet g = Map.keysSet (toMap g) - --- | /O(1)/. Convert a graph into a map from keys to nodes. --- The resulting map @m@ is guaranteed to have the property that --- @'Prelude.all' (\(k,n) -> k == 'nodeKey' n) ('Data.Map.toList' m)@. -toMap :: Graph a -> Map (Key a) a -toMap = graphMap - --- Graph-like operations - --- | /O(1)/. Convert a graph into a 'Data.Graph.Graph'. --- Requires amortized construction of graph. -toGraph :: Graph a -> (G.Graph, G.Vertex -> a, Key a -> Maybe G.Vertex) -toGraph g = (graphForward g, graphVertexToNode g, graphKeyToVertex g) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Internal/TempFile.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Internal/TempFile.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Internal/TempFile.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Internal/TempFile.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,123 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_HADDOCK hide #-} -module Distribution.Compat.Internal.TempFile ( - openTempFile, - openBinaryTempFile, - openNewBinaryFile, - createTempDirectory, - ) where - -import Distribution.Compat.Exception - -import System.FilePath (()) -import Foreign.C (CInt, eEXIST, getErrno, errnoToIOError) - -import System.IO (Handle, openTempFile, openBinaryTempFile) -import Data.Bits ((.|.)) -import System.Posix.Internals (c_open, c_close, o_CREAT, o_EXCL, o_RDWR, - o_BINARY, o_NONBLOCK, o_NOCTTY, - withFilePath, c_getpid) -import System.IO.Error (isAlreadyExistsError) -import GHC.IO.Handle.FD (fdToHandle) -import Control.Exception (onException) - -#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS) -import System.Directory ( createDirectory ) -#else -import qualified System.Posix -#endif - --- ------------------------------------------------------------ --- * temporary files --- ------------------------------------------------------------ - --- This is here for Haskell implementations that do not come with --- System.IO.openTempFile. This includes nhc-1.20, hugs-2006.9. --- TODO: This file should probably be removed. - --- This is a copy/paste of the openBinaryTempFile definition, but --- if uses 666 rather than 600 for the permissions. The base library --- needs to be changed to make this better. -openNewBinaryFile :: FilePath -> String -> IO (FilePath, Handle) -openNewBinaryFile dir template = do - pid <- c_getpid - findTempName pid - where - -- We split off the last extension, so we can use .foo.ext files - -- for temporary files (hidden on Unix OSes). Unfortunately we're - -- below file path in the hierarchy here. - (prefix,suffix) = - case break (== '.') $ reverse template of - -- First case: template contains no '.'s. Just re-reverse it. - (rev_suffix, "") -> (reverse rev_suffix, "") - -- Second case: template contains at least one '.'. Strip the - -- dot from the prefix and prepend it to the suffix (if we don't - -- do this, the unique number will get added after the '.' and - -- thus be part of the extension, which is wrong.) - (rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix) - -- Otherwise, something is wrong, because (break (== '.')) should - -- always return a pair with either the empty string or a string - -- beginning with '.' as the second component. - _ -> error "bug in System.IO.openTempFile" - - oflags = rw_flags .|. o_EXCL .|. o_BINARY - - findTempName x = do - fd <- withFilePath filepath $ \ f -> - c_open f oflags 0o666 - if fd < 0 - then do - errno <- getErrno - if errno == eEXIST - then findTempName (x+1) - else ioError (errnoToIOError "openNewBinaryFile" errno Nothing (Just dir)) - else do - -- TODO: We want to tell fdToHandle what the file path is, - -- as any exceptions etc will only be able to report the - -- FD currently - h <- fdToHandle fd `onException` c_close fd - return (filepath, h) - where - filename = prefix ++ show x ++ suffix - filepath = dir `combine` filename - - -- FIXME: bits copied from System.FilePath - combine a b - | null b = a - | null a = b - | last a == pathSeparator = a ++ b - | otherwise = a ++ [pathSeparator] ++ b - --- FIXME: Should use System.FilePath library -pathSeparator :: Char -#ifdef mingw32_HOST_OS -pathSeparator = '\\' -#else -pathSeparator = '/' -#endif - --- FIXME: Copied from GHC.Handle -std_flags, output_flags, rw_flags :: CInt -std_flags = o_NONBLOCK .|. o_NOCTTY -output_flags = std_flags .|. o_CREAT -rw_flags = output_flags .|. o_RDWR - -createTempDirectory :: FilePath -> String -> IO FilePath -createTempDirectory dir template = do - pid <- c_getpid - findTempName pid - where - findTempName x = do - let dirpath = dir template ++ "-" ++ show x - r <- tryIO $ mkPrivateDir dirpath - case r of - Right _ -> return dirpath - Left e | isAlreadyExistsError e -> findTempName (x+1) - | otherwise -> ioError e - -mkPrivateDir :: String -> IO () -#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS) -mkPrivateDir s = createDirectory s -#else -mkPrivateDir s = System.Posix.createDirectory s 0o700 -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,264 +0,0 @@ -{-# LANGUAGE RankNTypes #-} --- | This module provides very basic lens functionality, without extra dependencies. --- --- For the documentation of the combinators see package. --- This module uses the same vocabulary. -module Distribution.Compat.Lens ( - -- * Types - Lens, - Lens', - Traversal, - Traversal', - -- ** LensLike - LensLike, - LensLike', - -- ** rank-1 types - Getting, - AGetter, - ASetter, - ALens, - ALens', - -- * Getter - view, - use, - getting, - -- * Setter - set, - over, - -- * Fold - toDListOf, - toListOf, - toSetOf, - -- * Lens - cloneLens, - aview, - -- * Common lenses - _1, _2, - -- * Operators - (&), - (^.), - (.~), (?~), (%~), - (.=), (?=), (%=), - (^#), - (#~), (#%~), - -- * Internal Comonads - Pretext (..), - -- * Cabal developer info - -- $development - ) where - -import Prelude() -import Distribution.Compat.Prelude - -import Control.Applicative (Const (..)) -import Data.Functor.Identity (Identity (..)) -import Control.Monad.State.Class (MonadState (..), gets, modify) - -import qualified Distribution.Compat.DList as DList -import qualified Data.Set as Set - -------------------------------------------------------------------------------- --- Types -------------------------------------------------------------------------------- - -type LensLike f s t a b = (a -> f b) -> s -> f t -type LensLike' f s a = (a -> f a) -> s -> f s - -type Lens s t a b = forall f. Functor f => LensLike f s t a b -type Traversal s t a b = forall f. Applicative f => LensLike f s t a b - -type Lens' s a = Lens s s a a -type Traversal' s a = Traversal s s a a - -type Getting r s a = LensLike (Const r) s s a a - -type AGetter s a = LensLike (Const a) s s a a -- this doens't exist in 'lens' -type ASetter s t a b = LensLike Identity s t a b -type ALens s t a b = LensLike (Pretext a b) s t a b - -type ALens' s a = ALens s s a a - -------------------------------------------------------------------------------- --- Getter -------------------------------------------------------------------------------- - -view :: Getting a s a -> s -> a -view l s = getConst (l Const s) -{-# INLINE view #-} - -use :: MonadState s m => Getting a s a -> m a -use l = gets (view l) -{-# INLINE use #-} - --- | @since 2.4 --- --- >>> (3 :: Int) ^. getting (+2) . getting show --- "5" -getting :: (s -> a) -> Getting r s a -getting k f = Const . getConst . f . k -{-# INLINE getting #-} - -------------------------------------------------------------------------------- --- Setter -------------------------------------------------------------------------------- - -set :: ASetter s t a b -> b -> s -> t -set l x = over l (const x) - -over :: ASetter s t a b -> (a -> b) -> s -> t -over l f s = runIdentity (l (\x -> Identity (f x)) s) - -------------------------------------------------------------------------------- --- Fold -------------------------------------------------------------------------------- - -toDListOf :: Getting (DList.DList a) s a -> s -> DList.DList a -toDListOf l s = getConst (l (\x -> Const (DList.singleton x)) s) - -toListOf :: Getting (DList.DList a) s a -> s -> [a] -toListOf l = DList.runDList . toDListOf l - -toSetOf :: Getting (Set.Set a) s a -> s -> Set.Set a -toSetOf l s = getConst (l (\x -> Const (Set.singleton x)) s) - -------------------------------------------------------------------------------- --- Lens -------------------------------------------------------------------------------- - -aview :: ALens s t a b -> s -> a -aview l = pretextPos . l pretextSell -{-# INLINE aview #-} - -{- -lens :: (s -> a) -> (s -> a -> s) -> Lens' s a -lens sa sbt afb s = sbt s <$> afb (sa s) --} - -------------------------------------------------------------------------------- --- Common -------------------------------------------------------------------------------- - -_1 :: Lens (a, c) (b, c) a b -_1 f (a, c) = flip (,) c <$> f a - -_2 :: Lens (c, a) (c, b) a b -_2 f (c, a) = (,) c <$> f a - -------------------------------------------------------------------------------- --- Operators -------------------------------------------------------------------------------- - --- | '&' is a reverse application operator -(&) :: a -> (a -> b) -> b -(&) = flip ($) -{-# INLINE (&) #-} -infixl 1 & - -infixl 8 ^., ^# -infixr 4 .~, %~, ?~ -infixr 4 #~, #%~ -infixr 4 .=, %=, ?= - -(^.) :: s -> Getting a s a -> a -s ^. l = getConst (l Const s) -{-# INLINE (^.) #-} - -(.~) :: ASetter s t a b -> b -> s -> t -(.~) = set -{-# INLINE (.~) #-} - -(?~) :: ASetter s t a (Maybe b) -> b -> s -> t -l ?~ b = set l (Just b) -{-# INLINE (?~) #-} - -(%~) :: ASetter s t a b -> (a -> b) -> s -> t -(%~) = over -{-# INLINE (%~) #-} - -(.=) :: MonadState s m => ASetter s s a b -> b -> m () -l .= b = modify (l .~ b) -{-# INLINE (.=) #-} - -(?=) :: MonadState s m => ASetter s s a (Maybe b) -> b -> m () -l ?= b = modify (l ?~ b) -{-# INLINE (?=) #-} - -(%=) :: MonadState s m => ASetter s s a b -> (a -> b) -> m () -l %= f = modify (l %~ f) -{-# INLINE (%=) #-} - -(^#) :: s -> ALens s t a b -> a -s ^# l = aview l s - -(#~) :: ALens s t a b -> b -> s -> t -(#~) l b s = pretextPeek b (l pretextSell s) -{-# INLINE (#~) #-} - -(#%~) :: ALens s t a b -> (a -> b) -> s -> t -(#%~) l f s = pretextPeeks f (l pretextSell s) -{-# INLINE (#%~) #-} - -pretextSell :: a -> Pretext a b b -pretextSell a = Pretext (\afb -> afb a) -{-# INLINE pretextSell #-} - -pretextPeeks :: (a -> b) -> Pretext a b t -> t -pretextPeeks f (Pretext m) = runIdentity $ m (\x -> Identity (f x)) -{-# INLINE pretextPeeks #-} - -pretextPeek :: b -> Pretext a b t -> t -pretextPeek b (Pretext m) = runIdentity $ m (\_ -> Identity b) -{-# INLINE pretextPeek #-} - -pretextPos :: Pretext a b t -> a -pretextPos (Pretext m) = getConst (m Const) -{-# INLINE pretextPos #-} - -cloneLens :: Functor f => ALens s t a b -> LensLike f s t a b -cloneLens l f s = runPretext (l pretextSell s) f -{-# INLINE cloneLens #-} - -------------------------------------------------------------------------------- --- Comonads -------------------------------------------------------------------------------- - --- | @lens@ variant is also parametrised by profunctor. -data Pretext a b t = Pretext { runPretext :: forall f. Functor f => (a -> f b) -> f t } - -instance Functor (Pretext a b) where - fmap f (Pretext pretext) = Pretext (\afb -> fmap f (pretext afb)) - -------------------------------------------------------------------------------- --- Documentation -------------------------------------------------------------------------------- - --- $development --- --- We cannot depend on @template-haskell@, because Cabal is a boot library. --- This fact makes defining optics a manual task. Here is a small recipe to --- make the process less tedious. --- --- First start a repl --- --- > cabal new-repl Cabal:hackage-tests --- --- Because @--extra-package@ isn't yet implemented, we use a test-suite --- with @generics-sop@ dependency. --- --- In the repl, we load a helper script: --- --- > :l ../generics-sop-lens.hs --- --- Now we are set up to derive lenses! --- --- > :m +Distribution.Types.SourceRepo --- > putStr $ genericLenses (Proxy :: Proxy SourceRepo) --- --- @ --- repoKind :: Lens' SourceRepo RepoKind --- repoKind f s = fmap (\\x -> s { T.repoKind = x }) (f (T.repoKind s)) --- \{-# INLINE repoKind #-\} --- ... --- @ --- --- /Note:/ You may need to adjust type-aliases, e.g. `String` to `FilePath`. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/MonadFail.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/MonadFail.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/MonadFail.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/MonadFail.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -{-# LANGUAGE CPP #-} - --- | Compatibility layer for "Control.Monad.Fail" -module Distribution.Compat.MonadFail ( MonadFail(fail) ) where -#if __GLASGOW_HASKELL__ >= 800 --- provided by base-4.9.0.0 and later -import Control.Monad.Fail (MonadFail(fail)) -#else --- the following code corresponds to --- http://hackage.haskell.org/package/fail-4.9.0.0 -import qualified Prelude as P -import Distribution.Compat.Prelude hiding (fail) - -import Text.ParserCombinators.ReadP -import Text.ParserCombinators.ReadPrec - -class Monad m => MonadFail m where - fail :: String -> m a - --- instances provided by base-4.9 - -instance MonadFail Maybe where - fail _ = Nothing - -instance MonadFail [] where - fail _ = [] - -instance MonadFail P.IO where - fail = P.fail - -instance MonadFail ReadPrec where - fail = P.fail -- = P (\_ -> fail s) - -instance MonadFail ReadP where - fail = P.fail -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Newtype.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Newtype.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Newtype.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Newtype.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE FunctionalDependencies #-} --- | Per Conor McBride, the 'Newtype' typeclass represents the packing and --- unpacking of a newtype, and allows you to operatate under that newtype with --- functions such as 'ala'. -module Distribution.Compat.Newtype ( - Newtype (..), - ala, - alaf, - pack', - unpack', - ) where - -import Data.Functor.Identity (Identity (..)) -import Data.Monoid (Sum (..), Product (..), Endo (..)) - --- | The @FunctionalDependencies@ version of 'Newtype' type-class. --- --- /Note:/ for actual newtypes the implementation can be --- @pack = coerce; unpack = coerce@. We don't have default implementation, --- because @Cabal@ have to support older than @base >= 4.7@ compilers. --- Also, 'Newtype' could witness a non-structural isomorphism. -class Newtype n o | n -> o where - pack :: o -> n - unpack :: n -> o - -instance Newtype (Identity a) a where - pack = Identity - unpack = runIdentity - -instance Newtype (Sum a) a where - pack = Sum - unpack = getSum - -instance Newtype (Product a) a where - pack = Product - unpack = getProduct - -instance Newtype (Endo a) (a -> a) where - pack = Endo - unpack = appEndo - --- | --- --- >>> ala Sum foldMap [1, 2, 3, 4 :: Int] --- 10 --- --- /Note:/ the user supplied function for the newtype is /ignored/. --- --- >>> ala (Sum . (+1)) foldMap [1, 2, 3, 4 :: Int] --- 10 -ala :: (Newtype n o, Newtype n' o') => (o -> n) -> ((o -> n) -> b -> n') -> (b -> o') -ala pa hof = alaf pa hof id - --- | --- --- >>> alaf Sum foldMap length ["cabal", "install"] --- 12 --- --- /Note:/ as with 'ala', the user supplied function for the newtype is /ignored/. -alaf :: (Newtype n o, Newtype n' o') => (o -> n) -> ((a -> n) -> b -> n') -> (a -> o) -> (b -> o') -alaf _ hof f = unpack . hof (pack . f) - --- | Variant of 'pack', which takes a phantom type. -pack' :: Newtype n o => (o -> n) -> o -> n -pack' _ = pack - --- | Variant of 'pack', which takes a phantom type. -unpack' :: Newtype n o => (o -> n) -> n -> o -unpack' _ = unpack diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Parsing.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Parsing.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Parsing.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Parsing.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,403 +0,0 @@ -{-# LANGUAGE GADTs, UndecidableInstances #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.Parsing --- Copyright : (c) Edward Kmett 2011-2012 --- License : BSD3 --- --- Maintainer : ekmett@gmail.com --- Stability : experimental --- Portability : non-portable --- --- Alternative parser combinators. --- --- Originally in @parsers@ package. --- ------------------------------------------------------------------------------ -module Distribution.Compat.Parsing - ( - -- * Parsing Combinators - choice - , option - , optional -- from Control.Applicative, parsec optionMaybe - , skipOptional -- parsec optional - , between - , some -- from Control.Applicative, parsec many1 - , many -- from Control.Applicative - , sepBy - , sepBy1 - -- , sepByNonEmpty - , sepEndBy1 - -- , sepEndByNonEmpty - , sepEndBy - , endBy1 - -- , endByNonEmpty - , endBy - , count - , chainl - , chainr - , chainl1 - , chainr1 - , manyTill - -- * Parsing Class - , Parsing(..) - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Control.Applicative ((<**>), optional) -import Control.Monad.Trans.Class (lift) -import Control.Monad.Trans.State.Lazy as Lazy -import Control.Monad.Trans.State.Strict as Strict -import Control.Monad.Trans.Writer.Lazy as Lazy -import Control.Monad.Trans.Writer.Strict as Strict -import Control.Monad.Trans.RWS.Lazy as Lazy -import Control.Monad.Trans.RWS.Strict as Strict -import Control.Monad.Trans.Reader (ReaderT (..)) -import Control.Monad.Trans.Identity (IdentityT (..)) -import Data.Foldable (asum) - -import qualified Text.Parsec as Parsec -import qualified Distribution.Compat.ReadP as ReadP - --- | @choice ps@ tries to apply the parsers in the list @ps@ in order, --- until one of them succeeds. Returns the value of the succeeding --- parser. -choice :: Alternative m => [m a] -> m a -choice = asum -{-# INLINE choice #-} - --- | @option x p@ tries to apply parser @p@. If @p@ fails without --- consuming input, it returns the value @x@, otherwise the value --- returned by @p@. --- --- > priority = option 0 (digitToInt <$> digit) -option :: Alternative m => a -> m a -> m a -option x p = p <|> pure x -{-# INLINE option #-} - --- | @skipOptional p@ tries to apply parser @p@. It will parse @p@ or nothing. --- It only fails if @p@ fails after consuming input. It discards the result --- of @p@. (Plays the role of parsec's optional, which conflicts with Applicative's optional) -skipOptional :: Alternative m => m a -> m () -skipOptional p = (() <$ p) <|> pure () -{-# INLINE skipOptional #-} - --- | @between open close p@ parses @open@, followed by @p@ and @close@. --- Returns the value returned by @p@. --- --- > braces = between (symbol "{") (symbol "}") -between :: Applicative m => m bra -> m ket -> m a -> m a -between bra ket p = bra *> p <* ket -{-# INLINE between #-} - --- | @sepBy p sep@ parses /zero/ or more occurrences of @p@, separated --- by @sep@. Returns a list of values returned by @p@. --- --- > commaSep p = p `sepBy` (symbol ",") -sepBy :: Alternative m => m a -> m sep -> m [a] -sepBy p sep = sepBy1 p sep <|> pure [] -{-# INLINE sepBy #-} - --- | @sepBy1 p sep@ parses /one/ or more occurrences of @p@, separated --- by @sep@. Returns a list of values returned by @p@. -sepBy1 :: Alternative m => m a -> m sep -> m [a] -sepBy1 p sep = (:) <$> p <*> many (sep *> p) --- toList <$> sepByNonEmpty p sep -{-# INLINE sepBy1 #-} - -{- --- | @sepByNonEmpty p sep@ parses /one/ or more occurrences of @p@, separated --- by @sep@. Returns a non-empty list of values returned by @p@. -sepByNonEmpty :: Alternative m => m a -> m sep -> m (NonEmpty a) -sepByNonEmpty p sep = (:|) <$> p <*> many (sep *> p) -{-# INLINE sepByNonEmpty #-} --} - --- | @sepEndBy1 p sep@ parses /one/ or more occurrences of @p@, --- separated and optionally ended by @sep@. Returns a list of values --- returned by @p@. -sepEndBy1 :: Alternative m => m a -> m sep -> m [a] -sepEndBy1 p sep = (:) <$> p <*> ((sep *> sepEndBy p sep) <|> pure []) --- toList <$> sepEndByNonEmpty p sep - -{- --- | @sepEndByNonEmpty p sep@ parses /one/ or more occurrences of @p@, --- separated and optionally ended by @sep@. Returns a non-empty list of values --- returned by @p@. -sepEndByNonEmpty :: Alternative m => m a -> m sep -> m (NonEmpty a) -sepEndByNonEmpty p sep = (:|) <$> p <*> ((sep *> sepEndBy p sep) <|> pure []) --} - --- | @sepEndBy p sep@ parses /zero/ or more occurrences of @p@, --- separated and optionally ended by @sep@, ie. haskell style --- statements. Returns a list of values returned by @p@. --- --- > haskellStatements = haskellStatement `sepEndBy` semi -sepEndBy :: Alternative m => m a -> m sep -> m [a] -sepEndBy p sep = sepEndBy1 p sep <|> pure [] -{-# INLINE sepEndBy #-} - --- | @endBy1 p sep@ parses /one/ or more occurrences of @p@, separated --- and ended by @sep@. Returns a list of values returned by @p@. -endBy1 :: Alternative m => m a -> m sep -> m [a] -endBy1 p sep = some (p <* sep) -{-# INLINE endBy1 #-} - -{- --- | @endByNonEmpty p sep@ parses /one/ or more occurrences of @p@, separated --- and ended by @sep@. Returns a non-empty list of values returned by @p@. -endByNonEmpty :: Alternative m => m a -> m sep -> m (NonEmpty a) -endByNonEmpty p sep = some1 (p <* sep) -{-# INLINE endByNonEmpty #-} --} - --- | @endBy p sep@ parses /zero/ or more occurrences of @p@, separated --- and ended by @sep@. Returns a list of values returned by @p@. --- --- > cStatements = cStatement `endBy` semi -endBy :: Alternative m => m a -> m sep -> m [a] -endBy p sep = many (p <* sep) -{-# INLINE endBy #-} - --- | @count n p@ parses @n@ occurrences of @p@. If @n@ is smaller or --- equal to zero, the parser equals to @return []@. Returns a list of --- @n@ values returned by @p@. -count :: Applicative m => Int -> m a -> m [a] -count n p | n <= 0 = pure [] - | otherwise = sequenceA (replicate n p) -{-# INLINE count #-} - --- | @chainr p op x@ parses /zero/ or more occurrences of @p@, --- separated by @op@ Returns a value obtained by a /right/ associative --- application of all functions returned by @op@ to the values returned --- by @p@. If there are no occurrences of @p@, the value @x@ is --- returned. -chainr :: Alternative m => m a -> m (a -> a -> a) -> a -> m a -chainr p op x = chainr1 p op <|> pure x -{-# INLINE chainr #-} - --- | @chainl p op x@ parses /zero/ or more occurrences of @p@, --- separated by @op@. Returns a value obtained by a /left/ associative --- application of all functions returned by @op@ to the values returned --- by @p@. If there are zero occurrences of @p@, the value @x@ is --- returned. -chainl :: Alternative m => m a -> m (a -> a -> a) -> a -> m a -chainl p op x = chainl1 p op <|> pure x -{-# INLINE chainl #-} - --- | @chainl1 p op x@ parses /one/ or more occurrences of @p@, --- separated by @op@ Returns a value obtained by a /left/ associative --- application of all functions returned by @op@ to the values returned --- by @p@. . This parser can for example be used to eliminate left --- recursion which typically occurs in expression grammars. --- --- > expr = term `chainl1` addop --- > term = factor `chainl1` mulop --- > factor = parens expr <|> integer --- > --- > mulop = (*) <$ symbol "*" --- > <|> div <$ symbol "/" --- > --- > addop = (+) <$ symbol "+" --- > <|> (-) <$ symbol "-" -chainl1 :: Alternative m => m a -> m (a -> a -> a) -> m a -chainl1 p op = scan where - scan = p <**> rst - rst = (\f y g x -> g (f x y)) <$> op <*> p <*> rst <|> pure id -{-# INLINE chainl1 #-} - --- | @chainr1 p op x@ parses /one/ or more occurrences of @p@, --- separated by @op@ Returns a value obtained by a /right/ associative --- application of all functions returned by @op@ to the values returned --- by @p@. -chainr1 :: Alternative m => m a -> m (a -> a -> a) -> m a -chainr1 p op = scan where - scan = p <**> rst - rst = (flip <$> op <*> scan) <|> pure id -{-# INLINE chainr1 #-} - --- | @manyTill p end@ applies parser @p@ /zero/ or more times until --- parser @end@ succeeds. Returns the list of values returned by @p@. --- This parser can be used to scan comments: --- --- > simpleComment = do{ string "")) --- > } --- --- Note the overlapping parsers @anyChar@ and @string \"-->\"@, and --- therefore the use of the 'try' combinator. -manyTill :: Alternative m => m a -> m end -> m [a] -manyTill p end = go where go = ([] <$ end) <|> ((:) <$> p <*> go) -{-# INLINE manyTill #-} - -infixr 0 - --- | Additional functionality needed to describe parsers independent of input type. -class Alternative m => Parsing m where - -- | Take a parser that may consume input, and on failure, go back to - -- where we started and fail as if we didn't consume input. - try :: m a -> m a - - -- | Give a parser a name - () :: m a -> String -> m a - - -- | A version of many that discards its input. Specialized because it - -- can often be implemented more cheaply. - skipMany :: m a -> m () - skipMany p = () <$ many p - {-# INLINE skipMany #-} - - -- | @skipSome p@ applies the parser @p@ /one/ or more times, skipping - -- its result. (aka skipMany1 in parsec) - skipSome :: m a -> m () - skipSome p = p *> skipMany p - {-# INLINE skipSome #-} - - -- | Used to emit an error on an unexpected token - unexpected :: String -> m a - - -- | This parser only succeeds at the end of the input. This is not a - -- primitive parser but it is defined using 'notFollowedBy'. - -- - -- > eof = notFollowedBy anyChar "end of input" - eof :: m () - - -- | @notFollowedBy p@ only succeeds when parser @p@ fails. This parser - -- does not consume any input. This parser can be used to implement the - -- \'longest match\' rule. For example, when recognizing keywords (for - -- example @let@), we want to make sure that a keyword is not followed - -- by a legal identifier character, in which case the keyword is - -- actually an identifier (for example @lets@). We can program this - -- behaviour as follows: - -- - -- > keywordLet = try $ string "let" <* notFollowedBy alphaNum - notFollowedBy :: Show a => m a -> m () - -instance (Parsing m, MonadPlus m) => Parsing (Lazy.StateT s m) where - try (Lazy.StateT m) = Lazy.StateT $ try . m - {-# INLINE try #-} - Lazy.StateT m l = Lazy.StateT $ \s -> m s l - {-# INLINE () #-} - unexpected = lift . unexpected - {-# INLINE unexpected #-} - eof = lift eof - {-# INLINE eof #-} - notFollowedBy (Lazy.StateT m) = Lazy.StateT - $ \s -> notFollowedBy (fst <$> m s) >> return ((),s) - {-# INLINE notFollowedBy #-} - -instance (Parsing m, MonadPlus m) => Parsing (Strict.StateT s m) where - try (Strict.StateT m) = Strict.StateT $ try . m - {-# INLINE try #-} - Strict.StateT m l = Strict.StateT $ \s -> m s l - {-# INLINE () #-} - unexpected = lift . unexpected - {-# INLINE unexpected #-} - eof = lift eof - {-# INLINE eof #-} - notFollowedBy (Strict.StateT m) = Strict.StateT - $ \s -> notFollowedBy (fst <$> m s) >> return ((),s) - {-# INLINE notFollowedBy #-} - -instance (Parsing m, MonadPlus m) => Parsing (ReaderT e m) where - try (ReaderT m) = ReaderT $ try . m - {-# INLINE try #-} - ReaderT m l = ReaderT $ \e -> m e l - {-# INLINE () #-} - skipMany (ReaderT m) = ReaderT $ skipMany . m - {-# INLINE skipMany #-} - unexpected = lift . unexpected - {-# INLINE unexpected #-} - eof = lift eof - {-# INLINE eof #-} - notFollowedBy (ReaderT m) = ReaderT $ notFollowedBy . m - {-# INLINE notFollowedBy #-} - -instance (Parsing m, MonadPlus m, Monoid w) => Parsing (Strict.WriterT w m) where - try (Strict.WriterT m) = Strict.WriterT $ try m - {-# INLINE try #-} - Strict.WriterT m l = Strict.WriterT (m l) - {-# INLINE () #-} - unexpected = lift . unexpected - {-# INLINE unexpected #-} - eof = lift eof - {-# INLINE eof #-} - notFollowedBy (Strict.WriterT m) = Strict.WriterT - $ notFollowedBy (fst <$> m) >>= \x -> return (x, mempty) - {-# INLINE notFollowedBy #-} - -instance (Parsing m, MonadPlus m, Monoid w) => Parsing (Lazy.WriterT w m) where - try (Lazy.WriterT m) = Lazy.WriterT $ try m - {-# INLINE try #-} - Lazy.WriterT m l = Lazy.WriterT (m l) - {-# INLINE () #-} - unexpected = lift . unexpected - {-# INLINE unexpected #-} - eof = lift eof - {-# INLINE eof #-} - notFollowedBy (Lazy.WriterT m) = Lazy.WriterT - $ notFollowedBy (fst <$> m) >>= \x -> return (x, mempty) - {-# INLINE notFollowedBy #-} - -instance (Parsing m, MonadPlus m, Monoid w) => Parsing (Lazy.RWST r w s m) where - try (Lazy.RWST m) = Lazy.RWST $ \r s -> try (m r s) - {-# INLINE try #-} - Lazy.RWST m l = Lazy.RWST $ \r s -> m r s l - {-# INLINE () #-} - unexpected = lift . unexpected - {-# INLINE unexpected #-} - eof = lift eof - {-# INLINE eof #-} - notFollowedBy (Lazy.RWST m) = Lazy.RWST - $ \r s -> notFollowedBy ((\(a,_,_) -> a) <$> m r s) >>= \x -> return (x, s, mempty) - {-# INLINE notFollowedBy #-} - -instance (Parsing m, MonadPlus m, Monoid w) => Parsing (Strict.RWST r w s m) where - try (Strict.RWST m) = Strict.RWST $ \r s -> try (m r s) - {-# INLINE try #-} - Strict.RWST m l = Strict.RWST $ \r s -> m r s l - {-# INLINE () #-} - unexpected = lift . unexpected - {-# INLINE unexpected #-} - eof = lift eof - {-# INLINE eof #-} - notFollowedBy (Strict.RWST m) = Strict.RWST - $ \r s -> notFollowedBy ((\(a,_,_) -> a) <$> m r s) >>= \x -> return (x, s, mempty) - {-# INLINE notFollowedBy #-} - -instance (Parsing m, Monad m) => Parsing (IdentityT m) where - try = IdentityT . try . runIdentityT - {-# INLINE try #-} - IdentityT m l = IdentityT (m l) - {-# INLINE () #-} - skipMany = IdentityT . skipMany . runIdentityT - {-# INLINE skipMany #-} - unexpected = lift . unexpected - {-# INLINE unexpected #-} - eof = lift eof - {-# INLINE eof #-} - notFollowedBy (IdentityT m) = IdentityT $ notFollowedBy m - {-# INLINE notFollowedBy #-} - -instance (Parsec.Stream s m t, Show t) => Parsing (Parsec.ParsecT s u m) where - try = Parsec.try - () = (Parsec.) - skipMany = Parsec.skipMany - skipSome = Parsec.skipMany1 - unexpected = Parsec.unexpected - eof = Parsec.eof - notFollowedBy = Parsec.notFollowedBy - -instance t ~ Char => Parsing (ReadP.Parser r t) where - try = id - () = const - skipMany = ReadP.skipMany - skipSome = ReadP.skipMany1 - unexpected = const ReadP.pfail - eof = ReadP.eof - - -- TODO: we would like to have <++ here - notFollowedBy p = ((Just <$> p) ReadP.+++ pure Nothing) - >>= maybe (pure ()) (unexpected . show) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Prelude/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Prelude/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Prelude/Internal.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Prelude/Internal.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ --- | This module re-exports the non-exposed --- "Distribution.Compat.Prelude" module for --- reuse by @cabal-install@'s --- "Distribution.Client.Compat.Prelude" module. --- --- It is highly discouraged to rely on this module --- for @Setup.hs@ scripts since its API is /not/ --- stable. -module Distribution.Compat.Prelude.Internal - {-# WARNING "This modules' API is not stable. Use at your own risk, or better yet, use @base-compat@!" #-} - ( module Distribution.Compat.Prelude - ) where - -import Distribution.Compat.Prelude diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Prelude.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Prelude.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Prelude.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Prelude.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,204 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE TypeOperators #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE FlexibleContexts #-} - -#ifdef MIN_VERSION_base -#define MINVER_base_48 MIN_VERSION_base(4,8,0) -#define MINVER_base_47 MIN_VERSION_base(4,7,0) -#else -#define MINVER_base_48 (__GLASGOW_HASKELL__ >= 710) -#define MINVER_base_47 (__GLASGOW_HASKELL__ >= 708) -#endif - --- | This module does two things: --- --- * Acts as a compatiblity layer, like @base-compat@. --- --- * Provides commonly used imports. -module Distribution.Compat.Prelude ( - -- * Prelude - -- - -- Prelude is re-exported, following is hidden: - module BasePrelude, - -#if !MINVER_base_48 - -- * base 4.8 shim - Applicative(..), (<$), (<$>), - Monoid(..), -#endif - - -- * Common type-classes - Semigroup (..), - gmappend, gmempty, - Typeable, - Data, - Generic, - NFData (..), genericRnf, - Binary (..), - Alternative (..), - MonadPlus (..), - IsString (..), - - -- * Some types - IO, NoCallStackIO, - Map, - - -- * Data.Maybe - catMaybes, mapMaybe, - fromMaybe, - maybeToList, listToMaybe, - isNothing, isJust, - - -- * Data.List - unfoldr, - isPrefixOf, isSuffixOf, - intercalate, intersperse, - sort, sortBy, - nub, nubBy, - - -- * Data.Foldable - Foldable, foldMap, foldr, - null, length, - find, foldl', - traverse_, for_, - any, all, - - -- * Data.Traversable - Traversable, traverse, sequenceA, - for, - - -- * Control.Arrow - first, - - -- * Control.Monad - liftM, liftM2, - unless, when, - ap, void, - foldM, filterM, - - -- * Data.Char - isSpace, isDigit, isUpper, isAlpha, isAlphaNum, - chr, ord, - toLower, toUpper, - - -- * Data.Word & Data.Int - Word, - Word8, Word16, Word32, Word64, - Int8, Int16, Int32, Int64, - - -- * Text.PrettyPrint - (<<>>), - ) where - --- We also could hide few partial function -import Prelude as BasePrelude hiding - ( IO, mapM, mapM_, sequence, null, length, foldr, any, all -#if MINVER_base_48 - , Word - -- We hide them, as we import only some members - , Traversable, traverse, sequenceA - , Foldable, foldMap -#endif - ) - -#if !MINVER_base_48 -import Control.Applicative (Applicative (..), (<$), (<$>)) -import Distribution.Compat.Semigroup (Monoid (..)) -#else -import Data.Foldable (length, null) -#endif - -import Data.Foldable (Foldable (foldMap, foldr), find, foldl', for_, traverse_, any, all) -import Data.Traversable (Traversable (traverse, sequenceA), for) - -import Control.Applicative (Alternative (..)) -import Control.DeepSeq (NFData (..)) -import Data.Data (Data) -import Data.Typeable (Typeable) -import Distribution.Compat.Binary (Binary (..)) -import Distribution.Compat.Semigroup (Semigroup (..), gmappend, gmempty) -import GHC.Generics (Generic, Rep(..), - V1, U1(U1), K1(unK1), M1(unM1), - (:*:)((:*:)), (:+:)(L1,R1)) - -import Data.Map (Map) - -import Control.Arrow (first) -import Control.Monad hiding (mapM) -import Data.Char -import Data.List (intercalate, intersperse, isPrefixOf, - isSuffixOf, nub, nubBy, sort, sortBy, - unfoldr) -import Data.Maybe -import Data.String (IsString (..)) -import Data.Int -import Data.Word - -import qualified Text.PrettyPrint as Disp - -import qualified Prelude as OrigPrelude -import Distribution.Compat.Stack - -type IO a = WithCallStack (OrigPrelude.IO a) -type NoCallStackIO a = OrigPrelude.IO a - --- | New name for 'Text.PrettyPrint.<>' -(<<>>) :: Disp.Doc -> Disp.Doc -> Disp.Doc -(<<>>) = (Disp.<>) - -#if !MINVER_base_48 --- | Test whether the structure is empty. The default implementation is --- optimized for structures that are similar to cons-lists, because there --- is no general way to do better. -null :: Foldable t => t a -> Bool -null = foldr (\_ _ -> False) True - --- | Returns the size/length of a finite structure as an 'Int'. The --- default implementation is optimized for structures that are similar to --- cons-lists, because there is no general way to do better. -length :: Foldable t => t a -> Int -length = foldl' (\c _ -> c+1) 0 -#endif - - --- | "GHC.Generics"-based 'rnf' implementation --- --- This is needed in order to support @deepseq < 1.4@ which didn't --- have a 'Generic'-based default 'rnf' implementation yet. --- --- In order to define instances, use e.g. --- --- > instance NFData MyType where rnf = genericRnf --- --- The implementation has been taken from @deepseq-1.4.2@'s default --- 'rnf' implementation. -genericRnf :: (Generic a, GNFData (Rep a)) => a -> () -genericRnf = grnf . from - --- | Hidden internal type-class -class GNFData f where - grnf :: f a -> () - -instance GNFData V1 where - grnf = error "Control.DeepSeq.rnf: uninhabited type" - -instance GNFData U1 where - grnf U1 = () - -instance NFData a => GNFData (K1 i a) where - grnf = rnf . unK1 - {-# INLINEABLE grnf #-} - -instance GNFData a => GNFData (M1 i c a) where - grnf = grnf . unM1 - {-# INLINEABLE grnf #-} - -instance (GNFData a, GNFData b) => GNFData (a :*: b) where - grnf (x :*: y) = grnf x `seq` grnf y - {-# INLINEABLE grnf #-} - -instance (GNFData a, GNFData b) => GNFData (a :+: b) where - grnf (L1 x) = grnf x - grnf (R1 x) = grnf x - {-# INLINEABLE grnf #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/ReadP.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/ReadP.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/ReadP.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/ReadP.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,424 +0,0 @@ -{-# LANGUAGE GADTs #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.ReadP --- Copyright : (c) The University of Glasgow 2002 --- License : BSD-style (see the file libraries/base/LICENSE) --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- This is a library of parser combinators, originally written by Koen Claessen. --- It parses all alternatives in parallel, so it never keeps hold of --- the beginning of the input string, a common source of space leaks with --- other parsers. The '(+++)' choice combinator is genuinely commutative; --- it makes no difference which branch is \"shorter\". --- --- See also Koen's paper /Parallel Parsing Processes/ --- (). --- --- This version of ReadP has been locally hacked to make it H98, by --- Martin Sjögren --- --- The unit tests have been moved to UnitTest.Distribution.Compat.ReadP, by --- Mark Lentczner ------------------------------------------------------------------------------ - -module Distribution.Compat.ReadP - ( - -- * The 'ReadP' type - ReadP, -- :: * -> *; instance Functor, Monad, MonadPlus - - -- * Primitive operations - get, -- :: ReadP Char - look, -- :: ReadP String - (+++), -- :: ReadP a -> ReadP a -> ReadP a - (<++), -- :: ReadP a -> ReadP a -> ReadP a - gather, -- :: ReadP a -> ReadP (String, a) - - -- * Other operations - pfail, -- :: ReadP a - eof, -- :: ReadP () - satisfy, -- :: (Char -> Bool) -> ReadP Char - char, -- :: Char -> ReadP Char - string, -- :: String -> ReadP String - munch, -- :: (Char -> Bool) -> ReadP String - munch1, -- :: (Char -> Bool) -> ReadP String - skipSpaces, -- :: ReadP () - skipSpaces1,-- :: ReadP () - choice, -- :: [ReadP a] -> ReadP a - count, -- :: Int -> ReadP a -> ReadP [a] - between, -- :: ReadP open -> ReadP close -> ReadP a -> ReadP a - option, -- :: a -> ReadP a -> ReadP a - optional, -- :: ReadP a -> ReadP () - many, -- :: ReadP a -> ReadP [a] - many1, -- :: ReadP a -> ReadP [a] - skipMany, -- :: ReadP a -> ReadP () - skipMany1, -- :: ReadP a -> ReadP () - sepBy, -- :: ReadP a -> ReadP sep -> ReadP [a] - sepBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] - endBy, -- :: ReadP a -> ReadP sep -> ReadP [a] - endBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] - chainr, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a - chainl, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a - chainl1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a - chainr1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a - manyTill, -- :: ReadP a -> ReadP end -> ReadP [a] - - -- * Running a parser - ReadS, -- :: *; = String -> [(a,String)] - readP_to_S, -- :: ReadP a -> ReadS a - readS_to_P, -- :: ReadS a -> ReadP a - - -- ** Internal - Parser, - ) - where - -import Prelude () -import Distribution.Compat.Prelude hiding (many, get) - -import qualified Distribution.Compat.MonadFail as Fail - -import Control.Monad( replicateM, (>=>) ) - -infixr 5 +++, <++ - --- --------------------------------------------------------------------------- --- The P type --- is representation type -- should be kept abstract - -data P s a - = Get (s -> P s a) - | Look ([s] -> P s a) - | Fail - | Result a (P s a) - | Final [(a,[s])] -- invariant: list is non-empty! - --- Monad, MonadPlus - -instance Functor (P s) where - fmap = liftM - -instance Applicative (P s) where - pure x = Result x Fail - (<*>) = ap - -instance Monad (P s) where - return = pure - - (Get f) >>= k = Get (f >=> k) - (Look f) >>= k = Look (f >=> k) - Fail >>= _ = Fail - (Result x p) >>= k = k x `mplus` (p >>= k) - (Final r) >>= k = final [ys' | (x,s) <- r, ys' <- run (k x) s] - - fail = Fail.fail - -instance Fail.MonadFail (P s) where - fail _ = Fail - -instance Alternative (P s) where - empty = mzero - (<|>) = mplus - -instance MonadPlus (P s) where - mzero = Fail - - -- most common case: two gets are combined - Get f1 `mplus` Get f2 = Get (\c -> f1 c `mplus` f2 c) - - -- results are delivered as soon as possible - Result x p `mplus` q = Result x (p `mplus` q) - p `mplus` Result x q = Result x (p `mplus` q) - - -- fail disappears - Fail `mplus` p = p - p `mplus` Fail = p - - -- two finals are combined - -- final + look becomes one look and one final (=optimization) - -- final + sthg else becomes one look and one final - Final r `mplus` Final t = Final (r ++ t) - Final r `mplus` Look f = Look (\s -> Final (r ++ run (f s) s)) - Final r `mplus` p = Look (\s -> Final (r ++ run p s)) - Look f `mplus` Final r = Look (\s -> Final (run (f s) s ++ r)) - p `mplus` Final r = Look (\s -> Final (run p s ++ r)) - - -- two looks are combined (=optimization) - -- look + sthg else floats upwards - Look f `mplus` Look g = Look (\s -> f s `mplus` g s) - Look f `mplus` p = Look (\s -> f s `mplus` p) - p `mplus` Look f = Look (\s -> p `mplus` f s) - --- --------------------------------------------------------------------------- --- The ReadP type - -newtype Parser r s a = R ((a -> P s r) -> P s r) -type ReadP r a = Parser r Char a - --- Functor, Monad, MonadPlus - -instance Functor (Parser r s) where - fmap h (R f) = R (\k -> f (k . h)) - -instance Applicative (Parser r s) where - pure x = R (\k -> k x) - (<*>) = ap - -instance s ~ Char => Alternative (Parser r s) where - empty = pfail - (<|>) = (+++) - -instance Monad (Parser r s) where - return = pure - fail = Fail.fail - R m >>= f = R (\k -> m (\a -> let R m' = f a in m' k)) - -instance Fail.MonadFail (Parser r s) where - fail _ = R (const Fail) - -instance s ~ Char => MonadPlus (Parser r s) where - mzero = pfail - mplus = (+++) - --- --------------------------------------------------------------------------- --- Operations over P - -final :: [(a,[s])] -> P s a --- Maintains invariant for Final constructor -final [] = Fail -final r = Final r - -run :: P c a -> ([c] -> [(a, [c])]) -run (Get f) (c:s) = run (f c) s -run (Look f) s = run (f s) s -run (Result x p) s = (x,s) : run p s -run (Final r) _ = r -run _ _ = [] - --- --------------------------------------------------------------------------- --- Operations over ReadP - -get :: ReadP r Char --- ^ Consumes and returns the next character. --- Fails if there is no input left. -get = R Get - -look :: ReadP r String --- ^ Look-ahead: returns the part of the input that is left, without --- consuming it. -look = R Look - -pfail :: ReadP r a --- ^ Always fails. -pfail = R (const Fail) - -eof :: ReadP r () --- ^ Succeeds iff we are at the end of input -eof = do { s <- look - ; if null s then return () - else pfail } - -(+++) :: ReadP r a -> ReadP r a -> ReadP r a --- ^ Symmetric choice. -R f1 +++ R f2 = R (\k -> f1 k `mplus` f2 k) - -(<++) :: ReadP a a -> ReadP r a -> ReadP r a --- ^ Local, exclusive, left-biased choice: If left parser --- locally produces any result at all, then right parser is --- not used. -R f <++ q = - do s <- look - probe (f return) s 0 - where - probe (Get f') (c:s) n = probe (f' c) s (n+1 :: Int) - probe (Look f') s n = probe (f' s) s n - probe p@(Result _ _) _ n = discard n >> R (p >>=) - probe (Final r) _ _ = R (Final r >>=) - probe _ _ _ = q - - discard 0 = return () - discard n = get >> discard (n-1 :: Int) - -gather :: ReadP (String -> P Char r) a -> ReadP r (String, a) --- ^ Transforms a parser into one that does the same, but --- in addition returns the exact characters read. --- IMPORTANT NOTE: 'gather' gives a runtime error if its first argument --- is built using any occurrences of readS_to_P. -gather (R m) = - R (\k -> gath id (m (\a -> return (\s -> k (s,a))))) - where - gath l (Get f) = Get (\c -> gath (l.(c:)) (f c)) - gath _ Fail = Fail - gath l (Look f) = Look (gath l . f) - gath l (Result k p) = k (l []) `mplus` gath l p - gath _ (Final _) = error "do not use readS_to_P in gather!" - --- --------------------------------------------------------------------------- --- Derived operations - -satisfy :: (Char -> Bool) -> ReadP r Char --- ^ Consumes and returns the next character, if it satisfies the --- specified predicate. -satisfy p = do c <- get; if p c then return c else pfail - -char :: Char -> ReadP r Char --- ^ Parses and returns the specified character. -char c = satisfy (c ==) - -string :: String -> ReadP r String --- ^ Parses and returns the specified string. -string this = do s <- look; scan this s - where - scan [] _ = return this - scan (x:xs) (y:ys) | x == y = get >> scan xs ys - scan _ _ = pfail - -munch :: (Char -> Bool) -> ReadP r String --- ^ Parses the first zero or more characters satisfying the predicate. -munch p = - do s <- look - scan s - where - scan (c:cs) | p c = do _ <- get; s <- scan cs; return (c:s) - scan _ = do return "" - -munch1 :: (Char -> Bool) -> ReadP r String --- ^ Parses the first one or more characters satisfying the predicate. -munch1 p = - do c <- get - if p c then do s <- munch p; return (c:s) - else pfail - -choice :: [ReadP r a] -> ReadP r a --- ^ Combines all parsers in the specified list. -choice [] = pfail -choice [p] = p -choice (p:ps) = p +++ choice ps - -skipSpaces :: ReadP r () --- ^ Skips all whitespace. -skipSpaces = - do s <- look - skip s - where - skip (c:s) | isSpace c = do _ <- get; skip s - skip _ = do return () - -skipSpaces1 :: ReadP r () --- ^ Like 'skipSpaces' but succeeds only if there is at least one --- whitespace character to skip. -skipSpaces1 = satisfy isSpace >> skipSpaces - -count :: Int -> ReadP r a -> ReadP r [a] --- ^ @ count n p @ parses @n@ occurrences of @p@ in sequence. A list of --- results is returned. -count n p = replicateM n p - -between :: ReadP r open -> ReadP r close -> ReadP r a -> ReadP r a --- ^ @ between open close p @ parses @open@, followed by @p@ and finally --- @close@. Only the value of @p@ is returned. -between open close p = do _ <- open - x <- p - _ <- close - return x - -option :: a -> ReadP r a -> ReadP r a --- ^ @option x p@ will either parse @p@ or return @x@ without consuming --- any input. -option x p = p +++ return x - -optional :: ReadP r a -> ReadP r () --- ^ @optional p@ optionally parses @p@ and always returns @()@. -optional p = (p >> return ()) +++ return () - -many :: ReadP r a -> ReadP r [a] --- ^ Parses zero or more occurrences of the given parser. -many p = return [] +++ many1 p - -many1 :: ReadP r a -> ReadP r [a] --- ^ Parses one or more occurrences of the given parser. -many1 p = liftM2 (:) p (many p) - -skipMany :: ReadP r a -> ReadP r () --- ^ Like 'many', but discards the result. -skipMany p = many p >> return () - -skipMany1 :: ReadP r a -> ReadP r () --- ^ Like 'many1', but discards the result. -skipMany1 p = p >> skipMany p - -sepBy :: ReadP r a -> ReadP r sep -> ReadP r [a] --- ^ @sepBy p sep@ parses zero or more occurrences of @p@, separated by @sep@. --- Returns a list of values returned by @p@. -sepBy p sep = sepBy1 p sep +++ return [] - -sepBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] --- ^ @sepBy1 p sep@ parses one or more occurrences of @p@, separated by @sep@. --- Returns a list of values returned by @p@. -sepBy1 p sep = liftM2 (:) p (many (sep >> p)) - -endBy :: ReadP r a -> ReadP r sep -> ReadP r [a] --- ^ @endBy p sep@ parses zero or more occurrences of @p@, separated and ended --- by @sep@. -endBy p sep = many (do x <- p ; _ <- sep ; return x) - -endBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] --- ^ @endBy p sep@ parses one or more occurrences of @p@, separated and ended --- by @sep@. -endBy1 p sep = many1 (do x <- p ; _ <- sep ; return x) - -chainr :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a --- ^ @chainr p op x@ parses zero or more occurrences of @p@, separated by @op@. --- Returns a value produced by a /right/ associative application of all --- functions returned by @op@. If there are no occurrences of @p@, @x@ is --- returned. -chainr p op x = chainr1 p op +++ return x - -chainl :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a --- ^ @chainl p op x@ parses zero or more occurrences of @p@, separated by @op@. --- Returns a value produced by a /left/ associative application of all --- functions returned by @op@. If there are no occurrences of @p@, @x@ is --- returned. -chainl p op x = chainl1 p op +++ return x - -chainr1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a --- ^ Like 'chainr', but parses one or more occurrences of @p@. -chainr1 p op = scan - where scan = p >>= rest - rest x = do f <- op - y <- scan - return (f x y) - +++ return x - -chainl1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a --- ^ Like 'chainl', but parses one or more occurrences of @p@. -chainl1 p op = p >>= rest - where rest x = do f <- op - y <- p - rest (f x y) - +++ return x - -manyTill :: ReadP r a -> ReadP [a] end -> ReadP r [a] --- ^ @manyTill p end@ parses zero or more occurrences of @p@, until @end@ --- succeeds. Returns a list of values returned by @p@. -manyTill p end = scan - where scan = (end >> return []) <++ (liftM2 (:) p scan) - --- --------------------------------------------------------------------------- --- Converting between ReadP and Read - -readP_to_S :: ReadP a a -> ReadS a --- ^ Converts a parser into a Haskell ReadS-style function. --- This is the main way in which you can \"run\" a 'ReadP' parser: --- the expanded type is --- @ readP_to_S :: ReadP a -> String -> [(a,String)] @ -readP_to_S (R f) = run (f return) - -readS_to_P :: ReadS a -> ReadP r a --- ^ Converts a Haskell ReadS-style function into a parser. --- Warning: This introduces local backtracking in the resulting --- parser, and therefore a possible inefficiency. -readS_to_P r = - R (\k -> Look (\s -> final [bs'' | (a,s') <- r s, bs'' <- run (k a) s'])) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Semigroup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Semigroup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Semigroup.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Semigroup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,171 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE TypeOperators #-} - --- | Compatibility layer for "Data.Semigroup" -module Distribution.Compat.Semigroup - ( Semigroup((<>)) - , Mon.Monoid(..) - , All(..) - , Any(..) - - , Last'(..) - - , gmappend - , gmempty - ) where - -import Distribution.Compat.Binary (Binary) - -import Control.Applicative as App -import GHC.Generics -#if __GLASGOW_HASKELL__ >= 711 --- Data.Semigroup is available since GHC 8.0/base-4.9 -import Data.Semigroup -import qualified Data.Monoid as Mon -#else --- provide internal simplified non-exposed class for older GHCs -import Data.Monoid as Mon (Monoid(..), All(..), Any(..), Dual(..)) --- containers -import Data.Set (Set) -import Data.IntSet (IntSet) -import Data.Map (Map) -import Data.IntMap (IntMap) - - -class Semigroup a where - (<>) :: a -> a -> a - --- several primitive instances -instance Semigroup () where - _ <> _ = () - -instance Semigroup [a] where - (<>) = (++) - -instance Semigroup a => Semigroup (Dual a) where - Dual a <> Dual b = Dual (b <> a) - -instance Semigroup a => Semigroup (Maybe a) where - Nothing <> b = b - a <> Nothing = a - Just a <> Just b = Just (a <> b) - -instance Semigroup (Either a b) where - Left _ <> b = b - a <> _ = a - -instance Semigroup Ordering where - LT <> _ = LT - EQ <> y = y - GT <> _ = GT - -instance Semigroup b => Semigroup (a -> b) where - f <> g = \a -> f a <> g a - -instance Semigroup All where - All a <> All b = All (a && b) - -instance Semigroup Any where - Any a <> Any b = Any (a || b) - -instance (Semigroup a, Semigroup b) => Semigroup (a, b) where - (a,b) <> (a',b') = (a<>a',b<>b') - -instance (Semigroup a, Semigroup b, Semigroup c) - => Semigroup (a, b, c) where - (a,b,c) <> (a',b',c') = (a<>a',b<>b',c<>c') - -instance (Semigroup a, Semigroup b, Semigroup c, Semigroup d) - => Semigroup (a, b, c, d) where - (a,b,c,d) <> (a',b',c',d') = (a<>a',b<>b',c<>c',d<>d') - -instance (Semigroup a, Semigroup b, Semigroup c, Semigroup d, Semigroup e) - => Semigroup (a, b, c, d, e) where - (a,b,c,d,e) <> (a',b',c',d',e') = (a<>a',b<>b',c<>c',d<>d',e<>e') - --- containers instances -instance Semigroup IntSet where - (<>) = mappend - -instance Ord a => Semigroup (Set a) where - (<>) = mappend - -instance Semigroup (IntMap v) where - (<>) = mappend - -instance Ord k => Semigroup (Map k v) where - (<>) = mappend -#endif - --- | Cabal's own 'Data.Monoid.Last' copy to avoid requiring an orphan --- 'Binary' instance. --- --- Once the oldest `binary` version we support provides a 'Binary' --- instance for 'Data.Monoid.Last' we can remove this one here. --- --- NB: 'Data.Semigroup.Last' is defined differently and not a 'Monoid' -newtype Last' a = Last' { getLast' :: Maybe a } - deriving (Eq, Ord, Read, Show, Binary, - Functor, App.Applicative, Generic) - -instance Semigroup (Last' a) where - x <> Last' Nothing = x - _ <> x = x - -instance Monoid (Last' a) where - mempty = Last' Nothing - mappend = (<>) - -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- --- Stolen from Edward Kmett's BSD3-licensed `semigroups` package - --- | Generically generate a 'Semigroup' ('<>') operation for any type --- implementing 'Generic'. This operation will append two values --- by point-wise appending their component fields. It is only defined --- for product types. --- --- @ --- 'gmappend' a ('gmappend' b c) = 'gmappend' ('gmappend' a b) c --- @ -gmappend :: (Generic a, GSemigroup (Rep a)) => a -> a -> a -gmappend x y = to (gmappend' (from x) (from y)) - -class GSemigroup f where - gmappend' :: f p -> f p -> f p - -instance Semigroup a => GSemigroup (K1 i a) where - gmappend' (K1 x) (K1 y) = K1 (x <> y) - -instance GSemigroup f => GSemigroup (M1 i c f) where - gmappend' (M1 x) (M1 y) = M1 (gmappend' x y) - -instance (GSemigroup f, GSemigroup g) => GSemigroup (f :*: g) where - gmappend' (x1 :*: x2) (y1 :*: y2) = gmappend' x1 y1 :*: gmappend' x2 y2 - --- | Generically generate a 'Monoid' 'mempty' for any product-like type --- implementing 'Generic'. --- --- It is only defined for product types. --- --- @ --- 'gmappend' 'gmempty' a = a = 'gmappend' a 'gmempty' --- @ - -gmempty :: (Generic a, GMonoid (Rep a)) => a -gmempty = to gmempty' - -class GSemigroup f => GMonoid f where - gmempty' :: f p - -instance (Semigroup a, Monoid a) => GMonoid (K1 i a) where - gmempty' = K1 mempty - -instance GMonoid f => GMonoid (M1 i c f) where - gmempty' = M1 gmempty' - -instance (GMonoid f, GMonoid g) => GMonoid (f :*: g) where - gmempty' = gmempty' :*: gmempty' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/SnocList.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/SnocList.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/SnocList.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/SnocList.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,33 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.SnocList --- License : BSD3 --- --- Maintainer : cabal-dev@haskell.org --- Stability : experimental --- Portability : portable --- --- A very reversed list. Has efficient `snoc` -module Distribution.Compat.SnocList ( - SnocList, - runSnocList, - snoc, -) where - -import Prelude () -import Distribution.Compat.Prelude - -newtype SnocList a = SnocList [a] - -snoc :: SnocList a -> a -> SnocList a -snoc (SnocList xs) x = SnocList (x : xs) - -runSnocList :: SnocList a -> [a] -runSnocList (SnocList xs) = reverse xs - -instance Semigroup (SnocList a) where - SnocList xs <> SnocList ys = SnocList (ys <> xs) - -instance Monoid (SnocList a) where - mempty = SnocList [] - mappend = (<>) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Stack.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Stack.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Stack.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Stack.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,113 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ImplicitParams #-} -module Distribution.Compat.Stack ( - WithCallStack, - CallStack, - annotateCallStackIO, - withFrozenCallStack, - withLexicalCallStack, - callStack, - prettyCallStack, - parentSrcLocPrefix -) where - -import System.IO.Error - -#ifdef MIN_VERSION_base -#if MIN_VERSION_base(4,8,1) -#define GHC_STACK_SUPPORTED 1 -#endif -#endif - -#ifdef GHC_STACK_SUPPORTED -import GHC.Stack -#endif - -#ifdef GHC_STACK_SUPPORTED - -#if MIN_VERSION_base(4,9,0) -type WithCallStack a = HasCallStack => a -#elif MIN_VERSION_base(4,8,1) -type WithCallStack a = (?callStack :: CallStack) => a -#endif - -#if !MIN_VERSION_base(4,9,0) --- NB: Can't say WithCallStack (WithCallStack a -> a); --- Haskell doesn't support this kind of implicit parameter! --- See https://mail.haskell.org/pipermail/ghc-devs/2016-January/011096.html --- Since this function doesn't do anything, it's OK to --- give it a less good type. -withFrozenCallStack :: WithCallStack (a -> a) -withFrozenCallStack x = x - -callStack :: (?callStack :: CallStack) => CallStack -callStack = ?callStack - -prettyCallStack :: CallStack -> String -prettyCallStack = showCallStack -#endif - --- | Give the *parent* of the person who invoked this; --- so it's most suitable for being called from a utility function. --- You probably want to call this using 'withFrozenCallStack'; otherwise --- it's not very useful. We didn't implement this for base-4.8.1 --- because we cannot rely on freezing to have taken place. --- -parentSrcLocPrefix :: WithCallStack String -#if MIN_VERSION_base(4,9,0) -parentSrcLocPrefix = - case getCallStack callStack of - (_:(_, loc):_) -> showLoc loc - [(_, loc)] -> showLoc loc - [] -> error "parentSrcLocPrefix: empty call stack" - where - showLoc loc = - srcLocFile loc ++ ":" ++ show (srcLocStartLine loc) ++ ": " -#else -parentSrcLocPrefix = "Call sites not available with base < 4.9.0.0 (GHC 8.0): " -#endif - --- Yeah, this uses skivvy implementation details. -withLexicalCallStack :: (a -> WithCallStack (IO b)) -> WithCallStack (a -> IO b) -withLexicalCallStack f = - let stk = ?callStack - in \x -> let ?callStack = stk in f x - -#else - -data CallStack = CallStack - deriving (Eq, Show) - -type WithCallStack a = a - -withFrozenCallStack :: a -> a -withFrozenCallStack x = x - -callStack :: CallStack -callStack = CallStack - -prettyCallStack :: CallStack -> String -prettyCallStack _ = "Call stacks not available with base < 4.8.1.0 (GHC 7.10)" - -parentSrcLocPrefix :: String -parentSrcLocPrefix = "Call sites not available with base < 4.9.0.0 (GHC 8.0): " - -withLexicalCallStack :: (a -> IO b) -> a -> IO b -withLexicalCallStack f = f - -#endif - --- | This function is for when you *really* want to add a call --- stack to raised IO, but you don't have a --- 'Distribution.Verbosity.Verbosity' so you can't use --- 'Distribution.Simple.Utils.annotateIO'. If you have a 'Verbosity', --- please use that function instead. -annotateCallStackIO :: WithCallStack (IO a -> IO a) -annotateCallStackIO = modifyIOError f - where - f ioe = ioeSetErrorString ioe - . wrapCallStack - $ ioeGetErrorString ioe - wrapCallStack s = - prettyCallStack callStack ++ "\n" ++ s diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Time.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Time.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compat/Time.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compat/Time.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,190 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE ScopedTypeVariables #-} - -module Distribution.Compat.Time - ( ModTime(..) -- Needed for testing - , getModTime, getFileAge, getCurTime - , posixSecondsToModTime - , calibrateMtimeChangeDelay ) - where - -import Prelude () -import Distribution.Compat.Prelude - -import System.Directory ( getModificationTime ) - -import Distribution.Simple.Utils ( withTempDirectory ) -import Distribution.Verbosity ( silent ) - -import System.FilePath - -import Data.Time.Clock.POSIX ( POSIXTime, getPOSIXTime ) -import Data.Time ( diffUTCTime, getCurrentTime ) -import Data.Time.Clock.POSIX ( posixDayLength ) - - -#if defined mingw32_HOST_OS - -import qualified Prelude -import Data.Bits ((.|.), unsafeShiftL) -#if MIN_VERSION_base(4,7,0) -import Data.Bits (finiteBitSize) -#else -import Data.Bits (bitSize) -#endif - -import Foreign ( allocaBytes, peekByteOff ) -import System.IO.Error ( mkIOError, doesNotExistErrorType ) -import System.Win32.Types ( BOOL, DWORD, LPCTSTR, LPVOID, withTString ) - -#else - -import System.Posix.Files ( FileStatus, getFileStatus ) - -#if MIN_VERSION_unix(2,6,0) -import System.Posix.Files ( modificationTimeHiRes ) -#else -import System.Posix.Files ( modificationTime ) -#endif - -#endif - --- | An opaque type representing a file's modification time, represented --- internally as a 64-bit unsigned integer in the Windows UTC format. -newtype ModTime = ModTime Word64 - deriving (Binary, Bounded, Eq, Ord) - -instance Show ModTime where - show (ModTime x) = show x - -instance Read ModTime where - readsPrec p str = map (first ModTime) (readsPrec p str) - --- | Return modification time of the given file. Works around the low clock --- resolution problem that 'getModificationTime' has on GHC < 7.8. --- --- This is a modified version of the code originally written for Shake by Neil --- Mitchell. See module Development.Shake.FileInfo. -getModTime :: FilePath -> NoCallStackIO ModTime - -#if defined mingw32_HOST_OS - --- Directly against the Win32 API. -getModTime path = allocaBytes size_WIN32_FILE_ATTRIBUTE_DATA $ \info -> do - res <- getFileAttributesEx path info - if not res - then do - let err = mkIOError doesNotExistErrorType - "Distribution.Compat.Time.getModTime" - Nothing (Just path) - ioError err - else do - dwLow <- peekByteOff info - index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime - dwHigh <- peekByteOff info - index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime -#if MIN_VERSION_base(4,7,0) - let qwTime = - (fromIntegral (dwHigh :: DWORD) `unsafeShiftL` finiteBitSize dwHigh) - .|. (fromIntegral (dwLow :: DWORD)) -#else - let qwTime = - (fromIntegral (dwHigh :: DWORD) `unsafeShiftL` bitSize dwHigh) - .|. (fromIntegral (dwLow :: DWORD)) -#endif - return $! ModTime (qwTime :: Word64) - -#ifdef x86_64_HOST_ARCH -#define CALLCONV ccall -#else -#define CALLCONV stdcall -#endif - -foreign import CALLCONV "windows.h GetFileAttributesExW" - c_getFileAttributesEx :: LPCTSTR -> Int32 -> LPVOID -> Prelude.IO BOOL - -getFileAttributesEx :: String -> LPVOID -> NoCallStackIO BOOL -getFileAttributesEx path lpFileInformation = - withTString path $ \c_path -> - c_getFileAttributesEx c_path getFileExInfoStandard lpFileInformation - -getFileExInfoStandard :: Int32 -getFileExInfoStandard = 0 - -size_WIN32_FILE_ATTRIBUTE_DATA :: Int -size_WIN32_FILE_ATTRIBUTE_DATA = 36 - -index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime :: Int -index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime = 20 - -index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime :: Int -index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime = 24 - -#else - --- Directly against the unix library. -getModTime path = do - st <- getFileStatus path - return $! (extractFileTime st) - -extractFileTime :: FileStatus -> ModTime -extractFileTime x = posixTimeToModTime (modificationTimeHiRes x) - -#endif - -windowsTick, secToUnixEpoch :: Word64 -windowsTick = 10000000 -secToUnixEpoch = 11644473600 - --- | Convert POSIX seconds to ModTime. -posixSecondsToModTime :: Int64 -> ModTime -posixSecondsToModTime s = - ModTime $ ((fromIntegral s :: Word64) + secToUnixEpoch) * windowsTick - --- | Convert 'POSIXTime' to 'ModTime'. -posixTimeToModTime :: POSIXTime -> ModTime -posixTimeToModTime p = ModTime $ (ceiling $ p * 1e7) -- 100 ns precision - + (secToUnixEpoch * windowsTick) - --- | Return age of given file in days. -getFileAge :: FilePath -> NoCallStackIO Double -getFileAge file = do - t0 <- getModificationTime file - t1 <- getCurrentTime - return $ realToFrac (t1 `diffUTCTime` t0) / realToFrac posixDayLength - --- | Return the current time as 'ModTime'. -getCurTime :: NoCallStackIO ModTime -getCurTime = posixTimeToModTime `fmap` getPOSIXTime -- Uses 'gettimeofday'. - --- | Based on code written by Neil Mitchell for Shake. See --- 'sleepFileTimeCalibrate' in 'Test.Type'. Returns a pair --- of microsecond values: first, the maximum delay seen, and the --- recommended delay to use before testing for file modification change. --- The returned delay is never smaller --- than 10 ms, but never larger than 1 second. -calibrateMtimeChangeDelay :: IO (Int, Int) -calibrateMtimeChangeDelay = - withTempDirectory silent "." "calibration-" $ \dir -> do - let fileName = dir "probe" - mtimes <- for [1..25] $ \(i::Int) -> time $ do - writeFile fileName $ show i - t0 <- getModTime fileName - let spin j = do - writeFile fileName $ show (i,j) - t1 <- getModTime fileName - unless (t0 < t1) (spin $ j + 1) - spin (0::Int) - let mtimeChange = maximum mtimes - mtimeChange' = min 1000000 $ (max 10000 mtimeChange) * 2 - return (mtimeChange, mtimeChange') - where - time :: IO () -> IO Int - time act = do - t0 <- getCurrentTime - act - t1 <- getCurrentTime - return . ceiling $! (t1 `diffUTCTime` t0) * 1e6 -- microseconds diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compiler.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compiler.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Compiler.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Compiler.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,220 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compiler --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This has an enumeration of the various compilers that Cabal knows about. It --- also specifies the default compiler. Sadly you'll often see code that does --- case analysis on this compiler flavour enumeration like: --- --- > case compilerFlavor comp of --- > GHC -> GHC.getInstalledPackages verbosity packageDb progdb --- --- Obviously it would be better to use the proper 'Compiler' abstraction --- because that would keep all the compiler-specific code together. --- Unfortunately we cannot make this change yet without breaking the --- 'UserHooks' api, which would break all custom @Setup.hs@ files, so for the --- moment we just have to live with this deficiency. If you're interested, see --- ticket #57. - -module Distribution.Compiler ( - -- * Compiler flavor - CompilerFlavor(..), - buildCompilerId, - buildCompilerFlavor, - defaultCompilerFlavor, - parseCompilerFlavorCompat, - classifyCompilerFlavor, - knownCompilerFlavors, - - -- * Compiler id - CompilerId(..), - - -- * Compiler info - CompilerInfo(..), - unknownCompilerInfo, - AbiTag(..), abiTagString - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Language.Haskell.Extension - -import Distribution.Version (Version, mkVersion', nullVersion) - -import qualified System.Info (compilerName, compilerVersion) -import Distribution.Parsec.Class (Parsec (..)) -import Distribution.Pretty (Pretty (..)) -import Distribution.Text (Text(..), display) -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp - -data CompilerFlavor = - GHC | GHCJS | NHC | YHC | Hugs | HBC | Helium | JHC | LHC | UHC | Eta - | HaskellSuite String -- string is the id of the actual compiler - | OtherCompiler String - deriving (Generic, Show, Read, Eq, Ord, Typeable, Data) - -instance Binary CompilerFlavor - -instance NFData CompilerFlavor where rnf = genericRnf - -knownCompilerFlavors :: [CompilerFlavor] -knownCompilerFlavors = - [GHC, GHCJS, NHC, YHC, Hugs, HBC, Helium, JHC, LHC, UHC, Eta] - -instance Pretty CompilerFlavor where - pretty (OtherCompiler name) = Disp.text name - pretty (HaskellSuite name) = Disp.text name - pretty NHC = Disp.text "nhc98" - pretty other = Disp.text (lowercase (show other)) - -instance Parsec CompilerFlavor where - parsec = classifyCompilerFlavor <$> component - where - component = do - cs <- P.munch1 isAlphaNum - if all isDigit cs then fail "all digits compiler name" else return cs - -instance Text CompilerFlavor where - parse = do - comp <- Parse.munch1 isAlphaNum - when (all isDigit comp) Parse.pfail - return (classifyCompilerFlavor comp) - -classifyCompilerFlavor :: String -> CompilerFlavor -classifyCompilerFlavor s = - fromMaybe (OtherCompiler s) $ lookup (lowercase s) compilerMap - where - compilerMap = [ (lowercase (display compiler), compiler) - | compiler <- knownCompilerFlavors ] - - ---TODO: In some future release, remove 'parseCompilerFlavorCompat' and use --- ordinary 'parse'. Also add ("nhc", NHC) to the above 'compilerMap'. - --- | Like 'classifyCompilerFlavor' but compatible with the old ReadS parser. --- --- It is compatible in the sense that it accepts only the same strings, --- eg "GHC" but not "ghc". However other strings get mapped to 'OtherCompiler'. --- The point of this is that we do not allow extra valid values that would --- upset older Cabal versions that had a stricter parser however we cope with --- new values more gracefully so that we'll be able to introduce new value in --- future without breaking things so much. --- -parseCompilerFlavorCompat :: Parse.ReadP r CompilerFlavor -parseCompilerFlavorCompat = do - comp <- Parse.munch1 isAlphaNum - when (all isDigit comp) Parse.pfail - case lookup comp compilerMap of - Just compiler -> return compiler - Nothing -> return (OtherCompiler comp) - where - compilerMap = [ (show compiler, compiler) - | compiler <- knownCompilerFlavors - , compiler /= YHC ] - -buildCompilerFlavor :: CompilerFlavor -buildCompilerFlavor = classifyCompilerFlavor System.Info.compilerName - -buildCompilerVersion :: Version -buildCompilerVersion = mkVersion' System.Info.compilerVersion - -buildCompilerId :: CompilerId -buildCompilerId = CompilerId buildCompilerFlavor buildCompilerVersion - --- | The default compiler flavour to pick when compiling stuff. This defaults --- to the compiler used to build the Cabal lib. --- --- However if it's not a recognised compiler then it's 'Nothing' and the user --- will have to specify which compiler they want. --- -defaultCompilerFlavor :: Maybe CompilerFlavor -defaultCompilerFlavor = case buildCompilerFlavor of - OtherCompiler _ -> Nothing - _ -> Just buildCompilerFlavor - --- ------------------------------------------------------------ --- * Compiler Id --- ------------------------------------------------------------ - -data CompilerId = CompilerId CompilerFlavor Version - deriving (Eq, Generic, Ord, Read, Show) - -instance Binary CompilerId - -instance NFData CompilerId where rnf = genericRnf - -instance Text CompilerId where - disp (CompilerId f v) - | v == nullVersion = disp f - | otherwise = disp f <<>> Disp.char '-' <<>> disp v - - parse = do - flavour <- parse - version <- (Parse.char '-' >> parse) Parse.<++ return nullVersion - return (CompilerId flavour version) - -lowercase :: String -> String -lowercase = map toLower - --- ------------------------------------------------------------ --- * Compiler Info --- ------------------------------------------------------------ - --- | Compiler information used for resolving configurations. Some --- fields can be set to Nothing to indicate that the information is --- unknown. - -data CompilerInfo = CompilerInfo { - compilerInfoId :: CompilerId, - -- ^ Compiler flavour and version. - compilerInfoAbiTag :: AbiTag, - -- ^ Tag for distinguishing incompatible ABI's on the same - -- architecture/os. - compilerInfoCompat :: Maybe [CompilerId], - -- ^ Other implementations that this compiler claims to be - -- compatible with, if known. - compilerInfoLanguages :: Maybe [Language], - -- ^ Supported language standards, if known. - compilerInfoExtensions :: Maybe [Extension] - -- ^ Supported extensions, if known. - } - deriving (Generic, Show, Read) - -instance Binary CompilerInfo - -data AbiTag - = NoAbiTag - | AbiTag String - deriving (Eq, Generic, Show, Read) - -instance Binary AbiTag - -instance Text AbiTag where - disp NoAbiTag = Disp.empty - disp (AbiTag tag) = Disp.text tag - - parse = do - tag <- Parse.munch (\c -> isAlphaNum c || c == '_') - if null tag then return NoAbiTag else return (AbiTag tag) - -abiTagString :: AbiTag -> String -abiTagString NoAbiTag = "" -abiTagString (AbiTag tag) = tag - --- | Make a CompilerInfo of which only the known information is its CompilerId, --- its AbiTag and that it does not claim to be compatible with other --- compiler id's. -unknownCompilerInfo :: CompilerId -> AbiTag -> CompilerInfo -unknownCompilerInfo compilerId abiTag = - CompilerInfo compilerId abiTag (Just []) Nothing Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Class.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Class.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Class.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Class.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,147 +0,0 @@ -module Distribution.FieldGrammar.Class ( - FieldGrammar (..), - uniqueField, - optionalField, - optionalFieldDef, - monoidalField, - deprecatedField', - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Data.Functor.Identity (Identity (..)) - -import Distribution.Compat.Newtype (Newtype) -import Distribution.Parsec.Class (Parsec) -import Distribution.Parsec.Field -import Distribution.Pretty (Pretty) - --- | 'FieldGrammar' is parametrised by --- --- * @s@ which is a structure we are parsing. We need this to provide prettyprinter --- functionality --- --- * @a@ type of the field. --- --- /Note:/ We'd like to have @forall s. Applicative (f s)@ context. --- -class FieldGrammar g where - -- | Unfocus, zoom out, /blur/ 'FieldGrammar'. - blurFieldGrammar :: ALens' a b -> g b c -> g a c - - -- | Field which should be defined, exactly once. - uniqueFieldAla - :: (Parsec b, Pretty b, Newtype b a) - => FieldName -- ^ field name - -> (a -> b) -- ^ 'Newtype' pack - -> ALens' s a -- ^ lens into the field - -> g s a - - -- | Boolean field with a default value. - booleanFieldDef - :: FieldName -- ^ field name - -> ALens' s Bool -- ^ lens into the field - -> Bool -- ^ default - -> g s Bool - - -- | Optional field. - optionalFieldAla - :: (Parsec b, Pretty b, Newtype b a) - => FieldName -- ^ field name - -> (a -> b) -- ^ 'pack' - -> ALens' s (Maybe a) -- ^ lens into the field - -> g s (Maybe a) - - -- | Optional field with default value. - optionalFieldDefAla - :: (Parsec b, Pretty b, Newtype b a, Eq a) - => FieldName -- ^ field name - -> (a -> b) -- ^ 'Newtype' pack - -> ALens' s a -- ^ @'Lens'' s a@: lens into the field - -> a -- ^ default value - -> g s a - - -- | Monoidal field. - -- - -- Values are combined with 'mappend'. - -- - -- /Note:/ 'optionalFieldAla' is a @monoidalField@ with 'Last' monoid. - -- - monoidalFieldAla - :: (Parsec b, Pretty b, Monoid a, Newtype b a) - => FieldName -- ^ field name - -> (a -> b) -- ^ 'pack' - -> ALens' s a -- ^ lens into the field - -> g s a - - -- | Parser matching all fields with a name starting with a prefix. - prefixedFields - :: FieldName -- ^ field name prefix - -> ALens' s [(String, String)] -- ^ lens into the field - -> g s [(String, String)] - - -- | Known field, which we don't parse, neither pretty print. - knownField :: FieldName -> g s () - - -- | Field which is parsed but not pretty printed. - hiddenField :: g s a -> g s a - - -- | Deprecated since - deprecatedSince - :: [Int] -- ^ version - -> String -- ^ deprecation message - -> g s a - -> g s a - - -- | Annotate field with since spec-version. - availableSince - :: [Int] -- ^ spec version - -> a -- ^ default value - -> g s a - -> g s a - --- | Field which can be defined at most once. -uniqueField - :: (FieldGrammar g, Parsec a, Pretty a) - => FieldName -- ^ field name - -> ALens' s a -- ^ lens into the field - -> g s a -uniqueField fn = uniqueFieldAla fn Identity - --- | Field which can be defined at most once. -optionalField - :: (FieldGrammar g, Parsec a, Pretty a) - => FieldName -- ^ field name - -> ALens' s (Maybe a) -- ^ lens into the field - -> g s (Maybe a) -optionalField fn = optionalFieldAla fn Identity - --- | Optional field with default value. -optionalFieldDef - :: (FieldGrammar g, Functor (g s), Parsec a, Pretty a, Eq a) - => FieldName -- ^ field name - -> ALens' s a -- ^ @'Lens'' s a@: lens into the field - -> a -- ^ default value - -> g s a -optionalFieldDef fn = optionalFieldDefAla fn Identity - --- | Field which can be define multiple times, and the results are @mappend@ed. -monoidalField - :: (FieldGrammar g, Parsec a, Pretty a, Monoid a) - => FieldName -- ^ field name - -> ALens' s a -- ^ lens into the field - -> g s a -monoidalField fn = monoidalFieldAla fn Identity - --- | Deprecated field. If found, warning is issued. --- --- /Note:/ also it's not pretty printed! --- -deprecatedField' - :: FieldGrammar g - => String -- ^ deprecation message - -> g s a - -> g s a -deprecatedField' = deprecatedSince [] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar/FieldDescrs.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar/FieldDescrs.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar/FieldDescrs.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar/FieldDescrs.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,86 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE RankNTypes #-} -module Distribution.FieldGrammar.FieldDescrs ( - FieldDescrs, - fieldDescrPretty, - fieldDescrParse, - fieldDescrsToList, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Compat.Lens (aview, cloneLens) -import Distribution.Compat.Newtype -import Distribution.FieldGrammar -import Distribution.Pretty (pretty) -import Distribution.Utils.Generic (fromUTF8BS) - -import qualified Data.Map as Map -import qualified Distribution.Parsec.Class as P -import qualified Distribution.Parsec.Field as P -import qualified Text.PrettyPrint as Disp - --- strict pair -data SP s = SP - { pPretty :: !(s -> Disp.Doc) - , pParse :: !(forall m. P.CabalParsing m => s -> m s) - } - --- | A collection field parsers and pretty-printers. -newtype FieldDescrs s a = F { runF :: Map String (SP s) } - deriving (Functor) - -instance Applicative (FieldDescrs s) where - pure _ = F mempty - f <*> x = F (mappend (runF f) (runF x)) - -singletonF :: P.FieldName -> (s -> Disp.Doc) -> (forall m. P.CabalParsing m => s -> m s) -> FieldDescrs s a -singletonF fn f g = F $ Map.singleton (fromUTF8BS fn) (SP f g) - --- | Lookup a field value pretty-printer. -fieldDescrPretty :: FieldDescrs s a -> String -> Maybe (s -> Disp.Doc) -fieldDescrPretty (F m) fn = pPretty <$> Map.lookup fn m - --- | Lookup a field value parser. -fieldDescrParse :: P.CabalParsing m => FieldDescrs s a -> String -> Maybe (s -> m s) -fieldDescrParse (F m) fn = pParse <$> Map.lookup fn m - -fieldDescrsToList - :: P.CabalParsing m - => FieldDescrs s a - -> [(String, s -> Disp.Doc, s -> m s)] -fieldDescrsToList = map mk . Map.toList . runF where - mk (name, SP ppr parse) = (name, ppr, parse) - --- | /Note:/ default values are printed. -instance FieldGrammar FieldDescrs where - blurFieldGrammar l (F m) = F (fmap blur m) where - blur (SP f g) = SP (f . aview l) (cloneLens l g) - - booleanFieldDef fn l _def = singletonF fn f g where - f s = Disp.text (show (aview l s)) - g s = cloneLens l (const P.parsec) s - -- Note: eta expansion is needed for RankNTypes type-checking to work. - - uniqueFieldAla fn _pack l = singletonF fn f g where - f s = pretty (pack' _pack (aview l s)) - g s = cloneLens l (const (unpack' _pack <$> P.parsec)) s - - optionalFieldAla fn _pack l = singletonF fn f g where - f s = maybe mempty (pretty . pack' _pack) (aview l s) - g s = cloneLens l (const (Just . unpack' _pack <$> P.parsec)) s - - optionalFieldDefAla fn _pack l _def = singletonF fn f g where - f s = pretty (pack' _pack (aview l s)) - g s = cloneLens l (const (unpack' _pack <$> P.parsec)) s - - monoidalFieldAla fn _pack l = singletonF fn f g where - f s = pretty (pack' _pack (aview l s)) - g s = cloneLens l (\x -> mappend x . unpack' _pack <$> P.parsec) s - - prefixedFields _fnPfx _l = F mempty - knownField _ = pure () - deprecatedSince _ _ x = x - availableSince _ _ = id - hiddenField _ = F mempty diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Parsec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Parsec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Parsec.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Parsec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,297 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} --- | This module provides a 'FieldGrammarParser', one way to parse --- @.cabal@ -like files. --- --- Fields can be specified multiple times in the .cabal files. The order of --- such entries is important, but the mutual ordering of different fields is --- not.Also conditional sections are considered after non-conditional data. --- The example of this silent-commutation quirk is the fact that --- --- @ --- buildable: True --- if os(linux) --- buildable: False --- @ --- --- and --- --- @ --- if os(linux) --- buildable: False --- buildable: True --- @ --- --- behave the same! This is the limitation of 'GeneralPackageDescription' --- structure. --- --- So we transform the list of fields @['Field' ann]@ into --- a map of grouped ordinary fields and a list of lists of sections: --- @'Fields' ann = 'Map' 'FieldName' ['NamelessField' ann]@ and @[['Section' ann]]@. --- --- We need list of list of sections, because we need to distinguish situations --- where there are fields in between. For example --- --- @ --- if flag(bytestring-lt-0_10_4) --- build-depends: bytestring < 0.10.4 --- --- default-language: Haskell2020 --- --- else --- build-depends: bytestring >= 0.10.4 --- --- @ --- --- is obviously invalid specification. --- --- We can parse 'Fields' like we parse @aeson@ objects, yet we use --- slighly higher-level API, so we can process unspecified fields, --- to report unknown fields and save custom @x-fields@. --- -module Distribution.FieldGrammar.Parsec ( - ParsecFieldGrammar, - parseFieldGrammar, - fieldGrammarKnownFieldList, - -- * Auxiliary - Fields, - NamelessField (..), - namelessFieldAnn, - Section (..), - runFieldParser, - runFieldParser', - ) where - -import Data.List (dropWhileEnd) -import Data.Ord (comparing) -import Data.Set (Set) -import Distribution.Compat.Newtype -import Distribution.Compat.Prelude -import Distribution.Simple.Utils (fromUTF8BS) -import Prelude () - -import qualified Data.ByteString as BS -import qualified Data.Set as Set -import qualified Data.Map.Strict as Map -import qualified Text.Parsec as P -import qualified Text.Parsec.Error as P - -import Distribution.CabalSpecVersion -import Distribution.FieldGrammar.Class -import Distribution.Parsec.Class -import Distribution.Parsec.Common -import Distribution.Parsec.Field -import Distribution.Parsec.FieldLineStream -import Distribution.Parsec.ParseResult - -------------------------------------------------------------------------------- --- Auxiliary types -------------------------------------------------------------------------------- - -type Fields ann = Map FieldName [NamelessField ann] - --- | Single field, without name, but with its annotation. -data NamelessField ann = MkNamelessField !ann [FieldLine ann] - deriving (Eq, Show, Functor) - -namelessFieldAnn :: NamelessField ann -> ann -namelessFieldAnn (MkNamelessField ann _) = ann - --- | The 'Section' constructor of 'Field'. -data Section ann = MkSection !(Name ann) [SectionArg ann] [Field ann] - deriving (Eq, Show, Functor) - -------------------------------------------------------------------------------- --- ParsecFieldGrammar -------------------------------------------------------------------------------- - -data ParsecFieldGrammar s a = ParsecFG - { fieldGrammarKnownFields :: !(Set FieldName) - , fieldGrammarKnownPrefixes :: !(Set FieldName) - , fieldGrammarParser :: !(CabalSpecVersion -> Fields Position -> ParseResult a) - } - deriving (Functor) - -parseFieldGrammar :: CabalSpecVersion -> Fields Position -> ParsecFieldGrammar s a -> ParseResult a -parseFieldGrammar v fields grammar = do - for_ (Map.toList (Map.filterWithKey isUnknownField fields)) $ \(name, nfields) -> - for_ nfields $ \(MkNamelessField pos _) -> - parseWarning pos PWTUnknownField $ "Unknown field: " ++ show name - -- TODO: fields allowed in this section - - -- parse - fieldGrammarParser grammar v fields - - where - isUnknownField k _ = not $ - k `Set.member` fieldGrammarKnownFields grammar - || any (`BS.isPrefixOf` k) (fieldGrammarKnownPrefixes grammar) - -fieldGrammarKnownFieldList :: ParsecFieldGrammar s a -> [FieldName] -fieldGrammarKnownFieldList = Set.toList . fieldGrammarKnownFields - -instance Applicative (ParsecFieldGrammar s) where - pure x = ParsecFG mempty mempty (\_ _ -> pure x) - {-# INLINE pure #-} - - ParsecFG f f' f'' <*> ParsecFG x x' x'' = ParsecFG - (mappend f x) - (mappend f' x') - (\v fields -> f'' v fields <*> x'' v fields) - {-# INLINE (<*>) #-} - -warnMultipleSingularFields :: FieldName -> [NamelessField Position] -> ParseResult () -warnMultipleSingularFields _ [] = pure () -warnMultipleSingularFields fn (x : xs) = do - let pos = namelessFieldAnn x - poss = map namelessFieldAnn xs - parseWarning pos PWTMultipleSingularField $ - "The field " <> show fn <> " is specified more than once at positions " ++ intercalate ", " (map showPos (pos : poss)) - -instance FieldGrammar ParsecFieldGrammar where - blurFieldGrammar _ (ParsecFG s s' parser) = ParsecFG s s' parser - - uniqueFieldAla fn _pack _extract = ParsecFG (Set.singleton fn) Set.empty parser - where - parser v fields = case Map.lookup fn fields of - Nothing -> parseFatalFailure zeroPos $ show fn ++ " field missing" - Just [] -> parseFatalFailure zeroPos $ show fn ++ " field missing" - Just [x] -> parseOne v x - Just xs -> do - warnMultipleSingularFields fn xs - last <$> traverse (parseOne v) xs - - parseOne v (MkNamelessField pos fls) = - unpack' _pack <$> runFieldParser pos parsec v fls - - booleanFieldDef fn _extract def = ParsecFG (Set.singleton fn) Set.empty parser - where - parser v fields = case Map.lookup fn fields of - Nothing -> pure def - Just [] -> pure def - Just [x] -> parseOne v x - Just xs -> do - warnMultipleSingularFields fn xs - last <$> traverse (parseOne v) xs - - parseOne v (MkNamelessField pos fls) = runFieldParser pos parsec v fls - - optionalFieldAla fn _pack _extract = ParsecFG (Set.singleton fn) Set.empty parser - where - parser v fields = case Map.lookup fn fields of - Nothing -> pure Nothing - Just [] -> pure Nothing - Just [x] -> parseOne v x - Just xs -> do - warnMultipleSingularFields fn xs - last <$> traverse (parseOne v) xs - - parseOne v (MkNamelessField pos fls) - | null fls = pure Nothing - | otherwise = Just . unpack' _pack <$> runFieldParser pos parsec v fls - - optionalFieldDefAla fn _pack _extract def = ParsecFG (Set.singleton fn) Set.empty parser - where - parser v fields = case Map.lookup fn fields of - Nothing -> pure def - Just [] -> pure def - Just [x] -> parseOne v x - Just xs -> do - warnMultipleSingularFields fn xs - last <$> traverse (parseOne v) xs - - parseOne v (MkNamelessField pos fls) - | null fls = pure def - | otherwise = unpack' _pack <$> runFieldParser pos parsec v fls - - monoidalFieldAla fn _pack _extract = ParsecFG (Set.singleton fn) Set.empty parser - where - parser v fields = case Map.lookup fn fields of - Nothing -> pure mempty - Just xs -> foldMap (unpack' _pack) <$> traverse (parseOne v) xs - - parseOne v (MkNamelessField pos fls) = runFieldParser pos parsec v fls - - prefixedFields fnPfx _extract = ParsecFG mempty (Set.singleton fnPfx) (\_ fs -> pure (parser fs)) - where - parser :: Fields Position -> [(String, String)] - parser values = reorder $ concatMap convert $ filter match $ Map.toList values - - match (fn, _) = fnPfx `BS.isPrefixOf` fn - convert (fn, fields) = - [ (pos, (fromUTF8BS fn, trim $ fromUTF8BS $ fieldlinesToBS fls)) - | MkNamelessField pos fls <- fields - ] - -- hack: recover the order of prefixed fields - reorder = map snd . sortBy (comparing fst) - trim :: String -> String - trim = dropWhile isSpace . dropWhileEnd isSpace - - availableSince vs def (ParsecFG names prefixes parser) = ParsecFG names prefixes parser' - where - parser' v values - | cabalSpecSupports v vs = parser v values - | otherwise = do - let unknownFields = Map.intersection values $ Map.fromSet (const ()) names - for_ (Map.toList unknownFields) $ \(name, fields) -> - for_ fields $ \(MkNamelessField pos _) -> - parseWarning pos PWTUnknownField $ - "The field " <> show name <> " is available since Cabal " ++ show vs - - pure def - - -- todo we know about this field - deprecatedSince (_ : _) _ grammar = grammar -- pass on non-empty version - deprecatedSince _ msg (ParsecFG names prefixes parser) = ParsecFG names prefixes parser' - where - parser' v values = do - let deprecatedFields = Map.intersection values $ Map.fromSet (const ()) names - for_ (Map.toList deprecatedFields) $ \(name, fields) -> - for_ fields $ \(MkNamelessField pos _) -> - parseWarning pos PWTDeprecatedField $ - "The field " <> show name <> " is deprecated. " ++ msg - - parser v values - - knownField fn = ParsecFG (Set.singleton fn) Set.empty (\_ _ -> pure ()) - - hiddenField = id - -------------------------------------------------------------------------------- --- Parsec -------------------------------------------------------------------------------- - -runFieldParser' :: Position -> ParsecParser a -> CabalSpecVersion -> FieldLineStream -> ParseResult a -runFieldParser' (Position row col) p v str = case P.runParser p' [] "" str of - Right (pok, ws) -> do - -- TODO: map pos - traverse_ (\(PWarning t pos w) -> parseWarning pos t w) ws - pure pok - Left err -> do - let ppos = P.errorPos err - -- Positions start from 1:1, not 0:0 - let epos = Position (row - 1 + P.sourceLine ppos) (col - 1 + P.sourceColumn ppos) - let msg = P.showErrorMessages - "or" "unknown parse error" "expecting" "unexpected" "end of input" - (P.errorMessages err) - let str' = unlines (filter (not . all isSpace) (fieldLineStreamToLines str)) - - parseFatalFailure epos $ msg ++ "\n" ++ "\n" ++ str' - where - p' = (,) <$ P.spaces <*> unPP p v <* P.spaces <* P.eof <*> P.getState - -fieldLineStreamToLines :: FieldLineStream -> [String] -fieldLineStreamToLines (FLSLast bs) = [ fromUTF8BS bs ] -fieldLineStreamToLines (FLSCons bs s) = fromUTF8BS bs : fieldLineStreamToLines s - -runFieldParser :: Position -> ParsecParser a -> CabalSpecVersion -> [FieldLine Position] -> ParseResult a -runFieldParser pp p v ls = runFieldParser' pos p v (fieldLinesToStream ls) - where - -- TODO: make per line lookup - pos = case ls of - [] -> pp - (FieldLine pos' _ : _) -> pos' - -fieldlinesToBS :: [FieldLine ann] -> BS.ByteString -fieldlinesToBS = BS.intercalate "\n" . map (\(FieldLine _ bs) -> bs) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Pretty.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Pretty.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Pretty.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar/Pretty.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,80 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -module Distribution.FieldGrammar.Pretty ( - PrettyFieldGrammar, - prettyFieldGrammar, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Newtype -import Distribution.Compat.Prelude -import Distribution.Pretty (Pretty (..)) -import Distribution.Simple.Utils (fromUTF8BS) -import Prelude () -import Text.PrettyPrint (Doc) -import qualified Text.PrettyPrint as PP - -import Distribution.FieldGrammar.Class -import Distribution.ParseUtils (ppField) - -newtype PrettyFieldGrammar s a = PrettyFG - { fieldGrammarPretty :: s -> Doc - } - deriving (Functor) - -instance Applicative (PrettyFieldGrammar s) where - pure _ = PrettyFG (\_ -> mempty) - PrettyFG f <*> PrettyFG x = PrettyFG (\s -> f s PP.$$ x s) - --- | We can use 'PrettyFieldGrammar' to pp print the @s@. --- --- /Note:/ there is not trailing @($+$ text "")@. -prettyFieldGrammar :: PrettyFieldGrammar s a -> s -> Doc -prettyFieldGrammar = fieldGrammarPretty - -instance FieldGrammar PrettyFieldGrammar where - blurFieldGrammar f (PrettyFG pp) = PrettyFG (pp . aview f) - - uniqueFieldAla fn _pack l = PrettyFG $ \s -> - ppField (fromUTF8BS fn) (pretty (pack' _pack (aview l s))) - - booleanFieldDef fn l def = PrettyFG pp - where - pp s - | b == def = mempty - | otherwise = ppField (fromUTF8BS fn) (PP.text (show b)) - where - b = aview l s - - optionalFieldAla fn _pack l = PrettyFG pp - where - pp s = case aview l s of - Nothing -> mempty - Just a -> ppField (fromUTF8BS fn) (pretty (pack' _pack a)) - - optionalFieldDefAla fn _pack l def = PrettyFG pp - where - pp s - | x == def = mempty - | otherwise = ppField (fromUTF8BS fn) (pretty (pack' _pack x)) - where - x = aview l s - - monoidalFieldAla fn _pack l = PrettyFG pp - where - pp s = ppField (fromUTF8BS fn) (pretty (pack' _pack (aview l s))) - - prefixedFields _fnPfx l = PrettyFG (pp . aview l) - where - pp xs = PP.vcat - -- always print the field, even its Doc is empty - -- i.e. don't use ppField - [ PP.text n <<>> PP.colon PP.<+> (PP.vcat $ map PP.text $ lines s) - | (n, s) <- xs - -- fnPfx `isPrefixOf` n - ] - - knownField _ = pure () - deprecatedSince [] _ _ = PrettyFG (\_ -> mempty) - deprecatedSince _ _ x = x - availableSince _ _ = id - hiddenField _ = PrettyFG (\_ -> mempty) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/FieldGrammar.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/FieldGrammar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,85 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE ScopedTypeVariables #-} --- | This module provides a way to specify a grammar of @.cabal@ -like files. -module Distribution.FieldGrammar ( - -- * Field grammar type - FieldGrammar (..), - uniqueField, - optionalField, - optionalFieldDef, - monoidalField, - deprecatedField', - -- * Concrete grammar implementations - ParsecFieldGrammar, - ParsecFieldGrammar', - parseFieldGrammar, - fieldGrammarKnownFieldList, - PrettyFieldGrammar, - PrettyFieldGrammar', - prettyFieldGrammar, - -- * Auxlilary - (^^^), - Section(..), - Fields, - partitionFields, - takeFields, - runFieldParser, - runFieldParser', - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import qualified Data.Map.Strict as Map - -import Distribution.FieldGrammar.Class -import Distribution.FieldGrammar.Parsec -import Distribution.FieldGrammar.Pretty -import Distribution.Parsec.Field -import Distribution.Utils.Generic (spanMaybe) - -type ParsecFieldGrammar' a = ParsecFieldGrammar a a -type PrettyFieldGrammar' a = PrettyFieldGrammar a a - -infixl 5 ^^^ - --- | Reverse function application which binds tighter than '<$>' and '<*>'. --- Useful for refining grammar specification. --- --- @ --- \<*\> 'monoidalFieldAla' "extensions" (alaList' FSep MQuoted) oldExtensions --- ^^^ 'deprecatedSince' [1,12] "Please use 'default-extensions' or 'other-extensions' fields." --- @ -(^^^) :: a -> (a -> b) -> b -x ^^^ f = f x - --- | Partitioning state -data PS ann = PS (Fields ann) [Section ann] [[Section ann]] - --- | Partition field list into field map and groups of sections. -partitionFields :: [Field ann] -> (Fields ann, [[Section ann]]) -partitionFields = finalize . foldl' f (PS mempty mempty mempty) - where - finalize :: PS ann -> (Fields ann, [[Section ann]]) - finalize (PS fs s ss) - | null s = (fs, reverse ss) - | otherwise = (fs, reverse (reverse s : ss)) - - f :: PS ann -> Field ann -> PS ann - f (PS fs s ss) (Field (Name ann name) fss) = - PS (Map.insertWith (flip (++)) name [MkNamelessField ann fss] fs) [] ss' - where - ss' | null s = ss - | otherwise = reverse s : ss - f (PS fs s ss) (Section name sargs sfields) = - PS fs (MkSection name sargs sfields : s) ss - --- | Take all fields from the front. -takeFields :: [Field ann] -> (Fields ann, [Field ann]) -takeFields = finalize . spanMaybe match - where - finalize (fs, rest) = (Map.fromListWith (flip (++)) fs, rest) - - match (Field (Name ann name) fs) = Just (name, [MkNamelessField ann fs]) - match _ = Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/GetOpt.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/GetOpt.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/GetOpt.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/GetOpt.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,209 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.GetOpt --- Copyright : (c) Sven Panne 2002-2005 --- License : BSD-style (see the file libraries/base/LICENSE) --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- This is a fork of "System.Console.GetOpt" with the following changes: --- --- * Treat "cabal --flag command" as "cabal command --flag" e.g. --- "cabal -v configure" to mean "cabal configure -v" For flags that are --- not recognised as global flags, pass them on to the sub-command. See --- the difference in 'shortOpt'. --- --- * Line wrapping in the 'usageInfo' output, plus a more compact --- rendering of short options, and slightly less padding. --- --- If you want to take on the challenge of merging this with the GetOpt --- from the base package then go for it! --- -module Distribution.GetOpt ( - -- * GetOpt - getOpt, getOpt', - usageInfo, - ArgOrder(..), - OptDescr(..), - ArgDescr(..), - - -- * Example - -- | See "System.Console.GetOpt" for examples -) where - -import Prelude () -import Distribution.Compat.Prelude -import System.Console.GetOpt - ( ArgOrder(..), OptDescr(..), ArgDescr(..) ) - -data OptKind a -- kind of cmd line arg (internal use only): - = Opt a -- an option - | UnreqOpt String -- an un-recognized option - | NonOpt String -- a non-option - | EndOfOpts -- end-of-options marker (i.e. "--") - | OptErr String -- something went wrong... - --- | Return a string describing the usage of a command, derived from --- the header (first argument) and the options described by the --- second argument. -usageInfo :: String -- header - -> [OptDescr a] -- option descriptors - -> String -- nicely formatted decription of options -usageInfo header optDescr = unlines (header:table) - where (ss,ls,ds) = unzip3 [ (intercalate ", " (map (fmtShort ad) sos) - ,concatMap (fmtLong ad) (take 1 los) - ,d) - | Option sos los ad d <- optDescr ] - ssWidth = (maximum . map length) ss - lsWidth = (maximum . map length) ls - dsWidth = 30 `max` (80 - (ssWidth + lsWidth + 3)) - table = [ " " ++ padTo ssWidth so' ++ - " " ++ padTo lsWidth lo' ++ - " " ++ d' - | (so,lo,d) <- zip3 ss ls ds - , (so',lo',d') <- fmtOpt dsWidth so lo d ] - padTo n x = take n (x ++ repeat ' ') - -fmtOpt :: Int -> String -> String -> String -> [(String, String, String)] -fmtOpt descrWidth so lo descr = - case wrapText descrWidth descr of - [] -> [(so,lo,"")] - (d:ds) -> (so,lo,d) : [ ("","",d') | d' <- ds ] - -fmtShort :: ArgDescr a -> Char -> String -fmtShort (NoArg _ ) so = "-" ++ [so] -fmtShort (ReqArg _ _) so = "-" ++ [so] -fmtShort (OptArg _ _) so = "-" ++ [so] - -- unlike upstream GetOpt we omit the arg name for short options - -fmtLong :: ArgDescr a -> String -> String -fmtLong (NoArg _ ) lo = "--" ++ lo -fmtLong (ReqArg _ ad) lo = "--" ++ lo ++ "=" ++ ad -fmtLong (OptArg _ ad) lo = "--" ++ lo ++ "[=" ++ ad ++ "]" - -wrapText :: Int -> String -> [String] -wrapText width = map unwords . wrap 0 [] . words - where wrap :: Int -> [String] -> [String] -> [[String]] - wrap 0 [] (w:ws) - | length w + 1 > width - = wrap (length w) [w] ws - wrap col line (w:ws) - | col + length w + 1 > width - = reverse line : wrap 0 [] (w:ws) - wrap col line (w:ws) - = let col' = col + length w + 1 - in wrap col' (w:line) ws - wrap _ [] [] = [] - wrap _ line [] = [reverse line] - -{-| -Process the command-line, and return the list of values that matched -(and those that didn\'t). The arguments are: - -* The order requirements (see 'ArgOrder') - -* The option descriptions (see 'OptDescr') - -* The actual command line arguments (presumably got from - 'System.Environment.getArgs'). - -'getOpt' returns a triple consisting of the option arguments, a list -of non-options, and a list of error messages. --} -getOpt :: ArgOrder a -- non-option handling - -> [OptDescr a] -- option descriptors - -> [String] -- the command-line arguments - -> ([a],[String],[String]) -- (options,non-options,error messages) -getOpt ordering optDescr args = (os,xs,es ++ map errUnrec us) - where (os,xs,us,es) = getOpt' ordering optDescr args - -{-| -This is almost the same as 'getOpt', but returns a quadruple -consisting of the option arguments, a list of non-options, a list of -unrecognized options, and a list of error messages. --} -getOpt' :: ArgOrder a -- non-option handling - -> [OptDescr a] -- option descriptors - -> [String] -- the command-line arguments - -> ([a],[String], [String] ,[String]) -- (options,non-options,unrecognized,error messages) -getOpt' _ _ [] = ([],[],[],[]) -getOpt' ordering optDescr (arg:args) = procNextOpt opt ordering - where procNextOpt (Opt o) _ = (o:os,xs,us,es) - procNextOpt (UnreqOpt u) _ = (os,xs,u:us,es) - procNextOpt (NonOpt x) RequireOrder = ([],x:rest,[],[]) - procNextOpt (NonOpt x) Permute = (os,x:xs,us,es) - procNextOpt (NonOpt x) (ReturnInOrder f) = (f x :os, xs,us,es) - procNextOpt EndOfOpts RequireOrder = ([],rest,[],[]) - procNextOpt EndOfOpts Permute = ([],rest,[],[]) - procNextOpt EndOfOpts (ReturnInOrder f) = (map f rest,[],[],[]) - procNextOpt (OptErr e) _ = (os,xs,us,e:es) - - (opt,rest) = getNext arg args optDescr - (os,xs,us,es) = getOpt' ordering optDescr rest - --- take a look at the next cmd line arg and decide what to do with it -getNext :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) -getNext ('-':'-':[]) rest _ = (EndOfOpts,rest) -getNext ('-':'-':xs) rest optDescr = longOpt xs rest optDescr -getNext ('-': x :xs) rest optDescr = shortOpt x xs rest optDescr -getNext a rest _ = (NonOpt a,rest) - --- handle long option -longOpt :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) -longOpt ls rs optDescr = long ads arg rs - where (opt,arg) = break (=='=') ls - getWith p = [ o | o@(Option _ xs _ _) <- optDescr - , isJust (find (p opt) xs)] - exact = getWith (==) - options = if null exact then getWith isPrefixOf else exact - ads = [ ad | Option _ _ ad _ <- options ] - optStr = "--" ++ opt - - long (_:_:_) _ rest = (errAmbig options optStr,rest) - long [NoArg a ] [] rest = (Opt a,rest) - long [NoArg _ ] ('=':_) rest = (errNoArg optStr,rest) - long [ReqArg _ d] [] [] = (errReq d optStr,[]) - long [ReqArg f _] [] (r:rest) = (Opt (f r),rest) - long [ReqArg f _] ('=':xs) rest = (Opt (f xs),rest) - long [OptArg f _] [] rest = (Opt (f Nothing),rest) - long [OptArg f _] ('=':xs) rest = (Opt (f (Just xs)),rest) - long _ _ rest = (UnreqOpt ("--"++ls),rest) - --- handle short option -shortOpt :: Char -> String -> [String] -> [OptDescr a] -> (OptKind a,[String]) -shortOpt y ys rs optDescr = short ads ys rs - where options = [ o | o@(Option ss _ _ _) <- optDescr, s <- ss, y == s ] - ads = [ ad | Option _ _ ad _ <- options ] - optStr = '-':[y] - - short (_:_:_) _ rest = (errAmbig options optStr,rest) - short (NoArg a :_) [] rest = (Opt a,rest) - short (NoArg a :_) xs rest = (Opt a,('-':xs):rest) - short (ReqArg _ d:_) [] [] = (errReq d optStr,[]) - short (ReqArg f _:_) [] (r:rest) = (Opt (f r),rest) - short (ReqArg f _:_) xs rest = (Opt (f xs),rest) - short (OptArg f _:_) [] rest = (Opt (f Nothing),rest) - short (OptArg f _:_) xs rest = (Opt (f (Just xs)),rest) - short [] [] rest = (UnreqOpt optStr,rest) - short [] xs rest = (UnreqOpt (optStr++xs),rest) - -- This is different vs upstream = (UnreqOpt optStr,('-':xs):rest) - -- Apparently this was part of the change so that flags that are - -- not recognised as global flags are passed on to the sub-command. - -- But why was no equivalent change required for longOpt? So could - -- this change go upstream? - --- miscellaneous error formatting - -errAmbig :: [OptDescr a] -> String -> OptKind a -errAmbig ods optStr = OptErr (usageInfo header ods) - where header = "option `" ++ optStr ++ "' is ambiguous; could be one of:" - -errReq :: String -> String -> OptKind a -errReq d optStr = OptErr ("option `" ++ optStr ++ "' requires an argument " ++ d ++ "\n") - -errUnrec :: String -> String -errUnrec optStr = "unrecognized option `" ++ optStr ++ "'\n" - -errNoArg :: String -> OptKind a -errNoArg optStr = OptErr ("option `" ++ optStr ++ "' doesn't allow an argument\n") diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/InstalledPackageInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/InstalledPackageInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/InstalledPackageInfo.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/InstalledPackageInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,150 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.InstalledPackageInfo --- Copyright : (c) The University of Glasgow 2004 --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- This is the information about an /installed/ package that --- is communicated to the @ghc-pkg@ program in order to register --- a package. @ghc-pkg@ now consumes this package format (as of version --- 6.4). This is specific to GHC at the moment. --- --- The @.cabal@ file format is for describing a package that is not yet --- installed. It has a lot of flexibility, like conditionals and dependency --- ranges. As such, that format is not at all suitable for describing a package --- that has already been built and installed. By the time we get to that stage, --- we have resolved all conditionals and resolved dependency version --- constraints to exact versions of dependent packages. So, this module defines --- the 'InstalledPackageInfo' data structure that contains all the info we keep --- about an installed package. There is a parser and pretty printer. The --- textual format is rather simpler than the @.cabal@ format: there are no --- sections, for example. - --- This module is meant to be local-only to Distribution... - -module Distribution.InstalledPackageInfo ( - InstalledPackageInfo(..), - installedPackageId, - installedComponentId, - installedOpenUnitId, - sourceComponentName, - requiredSignatures, - ExposedModule(..), - AbiDependency(..), - ParseResult(..), PError(..), PWarning, - emptyInstalledPackageInfo, - parseInstalledPackageInfo, - showInstalledPackageInfo, - showFullInstalledPackageInfo, - showInstalledPackageInfoField, - showSimpleInstalledPackageInfoField, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Data.Set (Set) -import Distribution.Backpack -import Distribution.CabalSpecVersion (cabalSpecLatest) -import Distribution.FieldGrammar -import Distribution.FieldGrammar.FieldDescrs -import Distribution.ModuleName -import Distribution.Package hiding (installedPackageId, installedUnitId) -import Distribution.ParseUtils -import Distribution.Types.ComponentName -import Distribution.Utils.Generic (toUTF8BS) - -import qualified Data.Map as Map -import qualified Distribution.Parsec.Common as P -import qualified Distribution.Parsec.Parser as P -import qualified Distribution.Parsec.ParseResult as P -import qualified Text.Parsec.Error as Parsec -import qualified Text.Parsec.Pos as Parsec -import qualified Text.PrettyPrint as Disp - -import Distribution.Types.InstalledPackageInfo -import Distribution.Types.InstalledPackageInfo.FieldGrammar - - - -installedComponentId :: InstalledPackageInfo -> ComponentId -installedComponentId ipi = - case unComponentId (installedComponentId_ ipi) of - "" -> mkComponentId (unUnitId (installedUnitId ipi)) - _ -> installedComponentId_ ipi - --- | Get the indefinite unit identity representing this package. --- This IS NOT guaranteed to give you a substitution; for --- instantiated packages you will get @DefiniteUnitId (installedUnitId ipi)@. --- For indefinite libraries, however, you will correctly get --- an @OpenUnitId@ with the appropriate 'OpenModuleSubst'. -installedOpenUnitId :: InstalledPackageInfo -> OpenUnitId -installedOpenUnitId ipi - = mkOpenUnitId (installedUnitId ipi) (installedComponentId ipi) (Map.fromList (instantiatedWith ipi)) - --- | Returns the set of module names which need to be filled for --- an indefinite package, or the empty set if the package is definite. -requiredSignatures :: InstalledPackageInfo -> Set ModuleName -requiredSignatures ipi = openModuleSubstFreeHoles (Map.fromList (instantiatedWith ipi)) - -{-# DEPRECATED installedPackageId "Use installedUnitId instead" #-} --- | Backwards compatibility with Cabal pre-1.24. --- --- This type synonym is slightly awful because in cabal-install --- we define an 'InstalledPackageId' but it's a ComponentId, --- not a UnitId! -installedPackageId :: InstalledPackageInfo -> UnitId -installedPackageId = installedUnitId - --- ----------------------------------------------------------------------------- --- Munging - -sourceComponentName :: InstalledPackageInfo -> ComponentName -sourceComponentName ipi = - case sourceLibName ipi of - Nothing -> CLibName - Just qn -> CSubLibName qn - --- ----------------------------------------------------------------------------- --- Parsing - -parseInstalledPackageInfo :: String -> ParseResult InstalledPackageInfo -parseInstalledPackageInfo s = case P.readFields (toUTF8BS s) of - Left err -> ParseFailed (NoParse (show err) $ Parsec.sourceLine $ Parsec.errorPos err) - Right fs -> case partitionFields fs of - (fs', _) -> case P.runParseResult $ parseFieldGrammar cabalSpecLatest fs' ipiFieldGrammar of - (ws, Right x) -> ParseOk ws' x where - ws' = map (PWarning . P.showPWarning "") ws - (_, Left (_, errs)) -> ParseFailed (NoParse errs' 0) where - errs' = intercalate "; " $ map (\(P.PError _ msg) -> msg) errs - --- ----------------------------------------------------------------------------- --- Pretty-printing - --- | Pretty print 'InstalledPackageInfo'. --- --- @pkgRoot@ isn't printed, as ghc-pkg prints it manually (as GHC-8.4). -showInstalledPackageInfo :: InstalledPackageInfo -> String -showInstalledPackageInfo ipi = - showFullInstalledPackageInfo ipi { pkgRoot = Nothing } - --- | The variant of 'showInstalledPackageInfo' which outputs @pkgroot@ field too. -showFullInstalledPackageInfo :: InstalledPackageInfo -> String -showFullInstalledPackageInfo = Disp.render . (Disp.$+$ Disp.text "") . prettyFieldGrammar ipiFieldGrammar - --- | --- --- >>> let ipi = emptyInstalledPackageInfo { maintainer = "Tester" } --- >>> fmap ($ ipi) $ showInstalledPackageInfoField "maintainer" --- Just "maintainer: Tester" -showInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) -showInstalledPackageInfoField fn = - fmap (\g -> Disp.render . ppField fn . g) $ fieldDescrPretty ipiFieldGrammar fn - -showSimpleInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) -showSimpleInstalledPackageInfoField fn = - fmap (Disp.renderStyle myStyle .) $ fieldDescrPretty ipiFieldGrammar fn - where - myStyle = Disp.style { Disp.mode = Disp.LeftMode } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Lex.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Lex.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Lex.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Lex.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,40 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Lex --- Copyright : Ben Gamari 2015-2019 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains a simple lexer supporting quoted strings - -module Distribution.Lex ( - tokenizeQuotedWords - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Compat.DList - -tokenizeQuotedWords :: String -> [String] -tokenizeQuotedWords = filter (not . null) . go False mempty - where - go :: Bool -- ^ in quoted region - -> DList Char -- ^ accumulator - -> String -- ^ string to be parsed - -> [String] -- ^ parse result - go _ accum [] - | [] <- accum' = [] - | otherwise = [accum'] - where accum' = runDList accum - - go False accum (c:cs) - | isSpace c = runDList accum : go False mempty cs - | c == '"' = go True accum cs - - go True accum (c:cs) - | c == '"' = go False accum cs - - go quoted accum (c:cs) - = go quoted (accum `mappend` singleton c) cs - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/License.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/License.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/License.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/License.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,275 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.License --- Description : The License data type. --- Copyright : Isaac Jones 2003-2005 --- Duncan Coutts 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Package descriptions contain fields for specifying the name of a software --- license and the name of the file containing the text of that license. While --- package authors may choose any license they like, Cabal provides an --- enumeration of a small set of common free and open source software licenses. --- This is done so that Hackage can recognise licenses, so that tools can detect --- , --- and to deter --- . --- --- It is recommended that all package authors use the @license-file@ or --- @license-files@ fields in their package descriptions. Further information --- about these fields can be found in the --- . --- --- = Additional resources --- --- The following websites provide information about free and open source --- software licenses: --- --- * --- --- * --- --- = Disclaimer --- --- The descriptions of software licenses provided by this documentation are --- intended for informational purposes only and in no way constitute legal --- advice. Please read the text of the licenses and consult a lawyer for any --- advice regarding software licensing. - -module Distribution.License ( - License(..), - knownLicenses, - licenseToSPDX, - licenseFromSPDX, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text -import Distribution.Version - -import qualified Distribution.Compat.CharParsing as P -import qualified Data.Map.Strict as Map -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.SPDX as SPDX -import qualified Text.PrettyPrint as Disp - --- | Indicates the license under which a package's source code is released. --- Versions of the licenses not listed here will be rejected by Hackage and --- cause @cabal check@ to issue a warning. -data License = - -- TODO: * remove BSD4 - - -- | GNU General Public License, - -- or - -- . - GPL (Maybe Version) - - -- | . - | AGPL (Maybe Version) - - -- | GNU Lesser General Public License, - -- or - -- . - | LGPL (Maybe Version) - - -- | . - | BSD2 - - -- | . - | BSD3 - - -- | . - -- This license has not been approved by the OSI and is incompatible with - -- the GNU GPL. It is provided for historical reasons and should be avoided. - | BSD4 - - -- | . - | MIT - - -- | - | ISC - - -- | . - | MPL Version - - -- | . - | Apache (Maybe Version) - - -- | The author of a package disclaims any copyright to its source code and - -- dedicates it to the public domain. This is not a software license. Please - -- note that it is not possible to dedicate works to the public domain in - -- every jurisdiction, nor is a work that is in the public domain in one - -- jurisdiction necessarily in the public domain elsewhere. - | PublicDomain - - -- | Explicitly 'All Rights Reserved', eg for proprietary software. The - -- package may not be legally modified or redistributed by anyone but the - -- rightsholder. - | AllRightsReserved - - -- | No license specified which legally defaults to 'All Rights Reserved'. - -- The package may not be legally modified or redistributed by anyone but - -- the rightsholder. - | UnspecifiedLicense - - -- | Any other software license. - | OtherLicense - - -- | Indicates an erroneous license name. - | UnknownLicense String - deriving (Generic, Read, Show, Eq, Typeable, Data) - -instance Binary License - -instance NFData License where rnf = genericRnf - --- | The list of all currently recognised licenses. -knownLicenses :: [License] -knownLicenses = [ GPL unversioned, GPL (version [2]), GPL (version [3]) - , LGPL unversioned, LGPL (version [2, 1]), LGPL (version [3]) - , AGPL unversioned, AGPL (version [3]) - , BSD2, BSD3, MIT, ISC - , MPL (mkVersion [2, 0]) - , Apache unversioned, Apache (version [2, 0]) - , PublicDomain, AllRightsReserved, OtherLicense] - where - unversioned = Nothing - version = Just . mkVersion - --- | Convert old 'License' to SPDX 'SPDX.License'. --- Non-SPDX licenses are converted to 'SPDX.LicenseRef'. --- --- @since 2.2.0.0 -licenseToSPDX :: License -> SPDX.License -licenseToSPDX l = case l of - GPL v | v == version [2] -> spdx SPDX.GPL_2_0_only - GPL v | v == version [3] -> spdx SPDX.GPL_3_0_only - LGPL v | v == version [2,1] -> spdx SPDX.LGPL_2_1_only - LGPL v | v == version [3] -> spdx SPDX.LGPL_3_0_only - AGPL v | v == version [3] -> spdx SPDX.AGPL_3_0_only - BSD2 -> spdx SPDX.BSD_2_Clause - BSD3 -> spdx SPDX.BSD_3_Clause - BSD4 -> spdx SPDX.BSD_4_Clause - MIT -> spdx SPDX.MIT - ISC -> spdx SPDX.ISC - MPL v | v == mkVersion [2,0] -> spdx SPDX.MPL_2_0 - Apache v | v == version [2,0] -> spdx SPDX.Apache_2_0 - AllRightsReserved -> SPDX.NONE - UnspecifiedLicense -> SPDX.NONE - OtherLicense -> ref (SPDX.mkLicenseRef' Nothing "OtherLicense") - PublicDomain -> ref (SPDX.mkLicenseRef' Nothing "PublicDomain") - UnknownLicense str -> ref (SPDX.mkLicenseRef' Nothing str) - _ -> ref (SPDX.mkLicenseRef' Nothing $ prettyShow l) - where - version = Just . mkVersion - spdx = SPDX.License . SPDX.simpleLicenseExpression - ref r = SPDX.License $ SPDX.ELicense (SPDX.ELicenseRef r) Nothing - --- | Convert 'SPDX.License' to 'License', --- --- This is lossy conversion. We try our best. --- --- >>> licenseFromSPDX . licenseToSPDX $ BSD3 --- BSD3 --- --- >>> licenseFromSPDX . licenseToSPDX $ GPL (Just (mkVersion [3])) --- GPL (Just (mkVersion [3])) --- --- >>> licenseFromSPDX . licenseToSPDX $ PublicDomain --- UnknownLicense "LicenseRefPublicDomain" --- --- >>> licenseFromSPDX $ SPDX.License $ SPDX.simpleLicenseExpression SPDX.EUPL_1_1 --- UnknownLicense "EUPL-1.1" --- --- >>> licenseFromSPDX . licenseToSPDX $ AllRightsReserved --- AllRightsReserved --- --- >>> licenseFromSPDX <$> simpleParsec "BSD-3-Clause OR GPL-3.0-only" --- Just (UnknownLicense "BSD3ClauseORGPL30only") --- --- @since 2.2.0.0 -licenseFromSPDX :: SPDX.License -> License -licenseFromSPDX SPDX.NONE = AllRightsReserved -licenseFromSPDX l = - fromMaybe (mungle $ prettyShow l) $ Map.lookup l m - where - m :: Map.Map SPDX.License License - m = Map.fromList $ filter (isSimple . fst ) $ - map (\x -> (licenseToSPDX x, x)) knownLicenses - - isSimple (SPDX.License (SPDX.ELicense (SPDX.ELicenseId _) Nothing)) = True - isSimple _ = False - - mungle name = fromMaybe (UnknownLicense (mapMaybe mangle name)) (simpleParsec name) - - mangle c - | isAlphaNum c = Just c - | otherwise = Nothing - -instance Pretty License where - pretty (GPL version) = Disp.text "GPL" <<>> dispOptVersion version - pretty (LGPL version) = Disp.text "LGPL" <<>> dispOptVersion version - pretty (AGPL version) = Disp.text "AGPL" <<>> dispOptVersion version - pretty (MPL version) = Disp.text "MPL" <<>> dispVersion version - pretty (Apache version) = Disp.text "Apache" <<>> dispOptVersion version - pretty (UnknownLicense other) = Disp.text other - pretty other = Disp.text (show other) - -instance Parsec License where - parsec = do - name <- P.munch1 isAlphaNum - version <- P.optional (P.char '-' *> parsec) - return $! case (name, version :: Maybe Version) of - ("GPL", _ ) -> GPL version - ("LGPL", _ ) -> LGPL version - ("AGPL", _ ) -> AGPL version - ("BSD2", Nothing) -> BSD2 - ("BSD3", Nothing) -> BSD3 - ("BSD4", Nothing) -> BSD4 - ("ISC", Nothing) -> ISC - ("MIT", Nothing) -> MIT - ("MPL", Just version') -> MPL version' - ("Apache", _ ) -> Apache version - ("PublicDomain", Nothing) -> PublicDomain - ("AllRightsReserved", Nothing) -> AllRightsReserved - ("OtherLicense", Nothing) -> OtherLicense - _ -> UnknownLicense $ name ++ - maybe "" (('-':) . display) version - -instance Text License where - parse = do - name <- Parse.munch1 (\c -> isAlphaNum c && c /= '-') - version <- Parse.option Nothing (Parse.char '-' >> fmap Just parse) - return $! case (name, version :: Maybe Version) of - ("GPL", _ ) -> GPL version - ("LGPL", _ ) -> LGPL version - ("AGPL", _ ) -> AGPL version - ("BSD2", Nothing) -> BSD2 - ("BSD3", Nothing) -> BSD3 - ("BSD4", Nothing) -> BSD4 - ("ISC", Nothing) -> ISC - ("MIT", Nothing) -> MIT - ("MPL", Just version') -> MPL version' - ("Apache", _ ) -> Apache version - ("PublicDomain", Nothing) -> PublicDomain - ("AllRightsReserved", Nothing) -> AllRightsReserved - ("OtherLicense", Nothing) -> OtherLicense - _ -> UnknownLicense $ name ++ - maybe "" (('-':) . display) version - -dispOptVersion :: Maybe Version -> Disp.Doc -dispOptVersion Nothing = Disp.empty -dispOptVersion (Just v) = dispVersion v - -dispVersion :: Version -> Disp.Doc -dispVersion v = Disp.char '-' <<>> disp v diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Make.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Make.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Make.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Make.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,189 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Make --- Copyright : Martin Sjögren 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is an alternative build system that delegates everything to the @make@ --- program. All the commands just end up calling @make@ with appropriate --- arguments. The intention was to allow preexisting packages that used --- makefiles to be wrapped into Cabal packages. In practice essentially all --- such packages were converted over to the \"Simple\" build system instead. --- Consequently this module is not used much and it certainly only sees cursory --- maintenance and no testing. Perhaps at some point we should stop pretending --- that it works. --- --- Uses the parsed command-line from "Distribution.Simple.Setup" in order to build --- Haskell tools using a back-end build system based on make. Obviously we --- assume that there is a configure script, and that after the ConfigCmd has --- been run, there is a Makefile. Further assumptions: --- --- [ConfigCmd] We assume the configure script accepts --- @--with-hc@, --- @--with-hc-pkg@, --- @--prefix@, --- @--bindir@, --- @--libdir@, --- @--libexecdir@, --- @--datadir@. --- --- [BuildCmd] We assume that the default Makefile target will build everything. --- --- [InstallCmd] We assume there is an @install@ target. Note that we assume that --- this does *not* register the package! --- --- [CopyCmd] We assume there is a @copy@ target, and a variable @$(destdir)@. --- The @copy@ target should probably just invoke @make install@ --- recursively (e.g. @$(MAKE) install prefix=$(destdir)\/$(prefix) --- bindir=$(destdir)\/$(bindir)@. The reason we can\'t invoke @make --- install@ directly here is that we don\'t know the value of @$(prefix)@. --- --- [SDistCmd] We assume there is a @dist@ target. --- --- [RegisterCmd] We assume there is a @register@ target and a variable @$(user)@. --- --- [UnregisterCmd] We assume there is an @unregister@ target. --- --- [HaddockCmd] We assume there is a @docs@ or @doc@ target. - - --- copy : --- $(MAKE) install prefix=$(destdir)/$(prefix) \ --- bindir=$(destdir)/$(bindir) \ - -module Distribution.Make ( - module Distribution.Package, - License(..), Version, - defaultMain, defaultMainArgs, defaultMainNoRead - ) where - -import Prelude () -import Distribution.Compat.Prelude - --- local -import Distribution.Compat.Exception -import Distribution.Package -import Distribution.Simple.Program -import Distribution.PackageDescription -import Distribution.Simple.Setup -import Distribution.Simple.Command - -import Distribution.Simple.Utils - -import Distribution.License -import Distribution.Version -import Distribution.Text - -import System.Environment (getArgs, getProgName) -import System.Exit - -defaultMain :: IO () -defaultMain = getArgs >>= defaultMainArgs - -defaultMainArgs :: [String] -> IO () -defaultMainArgs = defaultMainHelper - -{-# DEPRECATED defaultMainNoRead "it ignores its PackageDescription arg" #-} -defaultMainNoRead :: PackageDescription -> IO () -defaultMainNoRead = const defaultMain - -defaultMainHelper :: [String] -> IO () -defaultMainHelper args = - case commandsRun (globalCommand commands) commands args of - CommandHelp help -> printHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo (flags, commandParse) -> - case commandParse of - _ | fromFlag (globalVersion flags) -> printVersion - | fromFlag (globalNumericVersion flags) -> printNumericVersion - CommandHelp help -> printHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo action -> action - - where - printHelp help = getProgName >>= putStr . help - printOptionsList = putStr . unlines - printErrors errs = do - putStr (intercalate "\n" errs) - exitWith (ExitFailure 1) - printNumericVersion = putStrLn $ display cabalVersion - printVersion = putStrLn $ "Cabal library version " - ++ display cabalVersion - - progs = defaultProgramDb - commands = - [configureCommand progs `commandAddAction` configureAction - ,buildCommand progs `commandAddAction` buildAction - ,installCommand `commandAddAction` installAction - ,copyCommand `commandAddAction` copyAction - ,haddockCommand `commandAddAction` haddockAction - ,cleanCommand `commandAddAction` cleanAction - ,sdistCommand `commandAddAction` sdistAction - ,registerCommand `commandAddAction` registerAction - ,unregisterCommand `commandAddAction` unregisterAction - ] - -configureAction :: ConfigFlags -> [String] -> IO () -configureAction flags args = do - noExtraFlags args - let verbosity = fromFlag (configVerbosity flags) - rawSystemExit verbosity "sh" $ - "configure" - : configureArgs backwardsCompatHack flags - where backwardsCompatHack = True - -copyAction :: CopyFlags -> [String] -> IO () -copyAction flags args = do - noExtraFlags args - let destArgs = case fromFlag $ copyDest flags of - NoCopyDest -> ["install"] - CopyTo path -> ["copy", "destdir=" ++ path] - CopyToDb _ -> error "CopyToDb not supported via Make" - - rawSystemExit (fromFlag $ copyVerbosity flags) "make" destArgs - -installAction :: InstallFlags -> [String] -> IO () -installAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ installVerbosity flags) "make" ["install"] - rawSystemExit (fromFlag $ installVerbosity flags) "make" ["register"] - -haddockAction :: HaddockFlags -> [String] -> IO () -haddockAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ haddockVerbosity flags) "make" ["docs"] - `catchIO` \_ -> - rawSystemExit (fromFlag $ haddockVerbosity flags) "make" ["doc"] - -buildAction :: BuildFlags -> [String] -> IO () -buildAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ buildVerbosity flags) "make" [] - -cleanAction :: CleanFlags -> [String] -> IO () -cleanAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ cleanVerbosity flags) "make" ["clean"] - -sdistAction :: SDistFlags -> [String] -> IO () -sdistAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ sDistVerbosity flags) "make" ["dist"] - -registerAction :: RegisterFlags -> [String] -> IO () -registerAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ regVerbosity flags) "make" ["register"] - -unregisterAction :: RegisterFlags -> [String] -> IO () -unregisterAction flags args = do - noExtraFlags args - rawSystemExit (fromFlag $ regVerbosity flags) "make" ["unregister"] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/ModuleName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/ModuleName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/ModuleName.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/ModuleName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,164 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.ModuleName --- Copyright : Duncan Coutts 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Data type for Haskell module names. - -module Distribution.ModuleName ( - ModuleName (..), -- TODO: move Parsec instance here, don't export constructor - fromString, - fromComponents, - components, - toFilePath, - main, - simple, - -- * Internal - validModuleComponent, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Utils.ShortText -import System.FilePath ( pathSeparator ) - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - --- | A valid Haskell module name. --- -newtype ModuleName = ModuleName ShortTextLst - deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) - -instance Binary ModuleName - -instance NFData ModuleName where - rnf (ModuleName ms) = rnf ms - -instance Pretty ModuleName where - pretty (ModuleName ms) = - Disp.hcat (intersperse (Disp.char '.') (map Disp.text $ stlToStrings ms)) - -instance Parsec ModuleName where - parsec = fromComponents <$> P.sepBy1 component (P.char '.') - where - component = do - c <- P.satisfy isUpper - cs <- P.munch validModuleChar - return (c:cs) - -instance Text ModuleName where - parse = do - ms <- Parse.sepBy1 component (Parse.char '.') - return (ModuleName $ stlFromStrings ms) - - where - component = do - c <- Parse.satisfy isUpper - cs <- Parse.munch validModuleChar - return (c:cs) - -validModuleChar :: Char -> Bool -validModuleChar c = isAlphaNum c || c == '_' || c == '\'' - -validModuleComponent :: String -> Bool -validModuleComponent [] = False -validModuleComponent (c:cs) = isUpper c - && all validModuleChar cs - -{-# DEPRECATED simple "use ModuleName.fromString instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -simple :: String -> ModuleName -simple str = ModuleName (stlFromStrings [str]) - --- | Construct a 'ModuleName' from a valid module name 'String'. --- --- This is just a convenience function intended for valid module strings. It is --- an error if it is used with a string that is not a valid module name. If you --- are parsing user input then use 'Distribution.Text.simpleParse' instead. --- -instance IsString ModuleName where - fromString string = fromComponents (split string) - where - split cs = case break (=='.') cs of - (chunk,[]) -> chunk : [] - (chunk,_:rest) -> chunk : split rest - --- | Construct a 'ModuleName' from valid module components, i.e. parts --- separated by dots. -fromComponents :: [String] -> ModuleName -fromComponents components' - | null components' = error zeroComponents - | all validModuleComponent components' = ModuleName (stlFromStrings components') - | otherwise = error badName - where - zeroComponents = "ModuleName.fromComponents: zero components" - badName = "ModuleName.fromComponents: invalid components " ++ show components' - --- | The module name @Main@. --- -main :: ModuleName -main = ModuleName (stlFromStrings ["Main"]) - --- | The individual components of a hierarchical module name. For example --- --- > components (fromString "A.B.C") = ["A", "B", "C"] --- -components :: ModuleName -> [String] -components (ModuleName ms) = stlToStrings ms - --- | Convert a module name to a file path, but without any file extension. --- For example: --- --- > toFilePath (fromString "A.B.C") = "A/B/C" --- -toFilePath :: ModuleName -> FilePath -toFilePath = intercalate [pathSeparator] . components - ----------------------------------------------------------------------------- --- internal helper - --- | Strict/unpacked representation of @[ShortText]@ -data ShortTextLst = STLNil - | STLCons !ShortText !ShortTextLst - deriving (Eq, Generic, Ord, Typeable, Data) - -instance NFData ShortTextLst where - rnf = flip seq () - -instance Show ShortTextLst where - showsPrec p = showsPrec p . stlToList - - -instance Read ShortTextLst where - readsPrec p = map (first stlFromList) . readsPrec p - -instance Binary ShortTextLst where - put = put . stlToList - get = stlFromList <$> get - -stlToList :: ShortTextLst -> [ShortText] -stlToList STLNil = [] -stlToList (STLCons st next) = st : stlToList next - -stlToStrings :: ShortTextLst -> [String] -stlToStrings = map fromShortText . stlToList - -stlFromList :: [ShortText] -> ShortTextLst -stlFromList [] = STLNil -stlFromList (x:xs) = STLCons x (stlFromList xs) - -stlFromStrings :: [String] -> ShortTextLst -stlFromStrings = stlFromList . map toShortText diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Check.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Check.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Check.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Check.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2244 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.Check --- Copyright : Lennart Kolmodin 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This has code for checking for various problems in packages. There is one --- set of checks that just looks at a 'PackageDescription' in isolation and --- another set of checks that also looks at files in the package. Some of the --- checks are basic sanity checks, others are portability standards that we'd --- like to encourage. There is a 'PackageCheck' type that distinguishes the --- different kinds of check so we can see which ones are appropriate to report --- in different situations. This code gets uses when configuring a package when --- we consider only basic problems. The higher standard is uses when when --- preparing a source tarball and by Hackage when uploading new packages. The --- reason for this is that we want to hold packages that are expected to be --- distributed to a higher standard than packages that are only ever expected --- to be used on the author's own environment. - -module Distribution.PackageDescription.Check ( - -- * Package Checking - PackageCheck(..), - checkPackage, - checkConfiguredPackage, - - -- ** Checking package contents - checkPackageFiles, - checkPackageContent, - CheckPackageContentOps(..), - checkPackageFileNames, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Control.Monad (mapM) -import Data.List (group) -import Distribution.Compat.Lens -import Distribution.Compiler -import Distribution.License -import Distribution.Package -import Distribution.PackageDescription -import Distribution.PackageDescription.Configuration -import Distribution.Pretty (prettyShow) -import Distribution.Simple.BuildPaths (autogenPathsModuleName) -import Distribution.Simple.BuildToolDepends -import Distribution.Simple.CCompiler -import Distribution.Simple.Glob -import Distribution.Simple.Utils hiding (findPackageDesc, notice) -import Distribution.System -import Distribution.Text -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.CondTree -import Distribution.Types.ExeDependency -import Distribution.Types.UnqualComponentName -import Distribution.Utils.Generic (isAscii) -import Distribution.Verbosity -import Distribution.Version -import Language.Haskell.Extension -import System.FilePath - (splitDirectories, splitExtension, splitPath, takeExtension, takeFileName, (<.>), ()) - -import qualified Data.ByteString.Lazy as BS -import qualified Data.Map as Map -import qualified Distribution.Compat.DList as DList -import qualified Distribution.SPDX as SPDX -import qualified System.Directory as System - -import qualified System.Directory (getDirectoryContents) -import qualified System.FilePath.Windows as FilePath.Windows (isValid) - -import qualified Data.Set as Set - -import qualified Distribution.Types.BuildInfo.Lens as L -import qualified Distribution.Types.GenericPackageDescription.Lens as L -import qualified Distribution.Types.PackageDescription.Lens as L - --- | Results of some kind of failed package check. --- --- There are a range of severities, from merely dubious to totally insane. --- All of them come with a human readable explanation. In future we may augment --- them with more machine readable explanations, for example to help an IDE --- suggest automatic corrections. --- -data PackageCheck = - - -- | This package description is no good. There's no way it's going to - -- build sensibly. This should give an error at configure time. - PackageBuildImpossible { explanation :: String } - - -- | A problem that is likely to affect building the package, or an - -- issue that we'd like every package author to be aware of, even if - -- the package is never distributed. - | PackageBuildWarning { explanation :: String } - - -- | An issue that might not be a problem for the package author but - -- might be annoying or detrimental when the package is distributed to - -- users. We should encourage distributed packages to be free from these - -- issues, but occasionally there are justifiable reasons so we cannot - -- ban them entirely. - | PackageDistSuspicious { explanation :: String } - - -- | Like PackageDistSuspicious but will only display warnings - -- rather than causing abnormal exit when you run 'cabal check'. - | PackageDistSuspiciousWarn { explanation :: String } - - -- | An issue that is OK in the author's environment but is almost - -- certain to be a portability problem for other environments. We can - -- quite legitimately refuse to publicly distribute packages with these - -- problems. - | PackageDistInexcusable { explanation :: String } - deriving (Eq, Ord) - -instance Show PackageCheck where - show notice = explanation notice - -check :: Bool -> PackageCheck -> Maybe PackageCheck -check False _ = Nothing -check True pc = Just pc - -checkSpecVersion :: PackageDescription -> [Int] -> Bool -> PackageCheck - -> Maybe PackageCheck -checkSpecVersion pkg specver cond pc - | specVersion pkg >= mkVersion specver = Nothing - | otherwise = check cond pc - --- ------------------------------------------------------------ --- * Standard checks --- ------------------------------------------------------------ - --- | Check for common mistakes and problems in package descriptions. --- --- This is the standard collection of checks covering all aspects except --- for checks that require looking at files within the package. For those --- see 'checkPackageFiles'. --- --- It requires the 'GenericPackageDescription' and optionally a particular --- configuration of that package. If you pass 'Nothing' then we just check --- a version of the generic description using 'flattenPackageDescription'. --- -checkPackage :: GenericPackageDescription - -> Maybe PackageDescription - -> [PackageCheck] -checkPackage gpkg mpkg = - checkConfiguredPackage pkg - ++ checkConditionals gpkg - ++ checkPackageVersions gpkg - ++ checkDevelopmentOnlyFlags gpkg - ++ checkFlagNames gpkg - ++ checkUnusedFlags gpkg - ++ checkUnicodeXFields gpkg - ++ checkPathsModuleExtensions pkg - where - pkg = fromMaybe (flattenPackageDescription gpkg) mpkg - ---TODO: make this variant go away --- we should always know the GenericPackageDescription -checkConfiguredPackage :: PackageDescription -> [PackageCheck] -checkConfiguredPackage pkg = - checkSanity pkg - ++ checkFields pkg - ++ checkLicense pkg - ++ checkSourceRepos pkg - ++ checkGhcOptions pkg - ++ checkCCOptions pkg - ++ checkCxxOptions pkg - ++ checkCPPOptions pkg - ++ checkPaths pkg - ++ checkCabalVersion pkg - - --- ------------------------------------------------------------ --- * Basic sanity checks --- ------------------------------------------------------------ - --- | Check that this package description is sane. --- -checkSanity :: PackageDescription -> [PackageCheck] -checkSanity pkg = - catMaybes [ - - check (null . unPackageName . packageName $ pkg) $ - PackageBuildImpossible "No 'name' field." - - , check (nullVersion == packageVersion pkg) $ - PackageBuildImpossible "No 'version' field." - - , check (all ($ pkg) [ null . executables - , null . testSuites - , null . benchmarks - , null . allLibraries - , null . foreignLibs ]) $ - PackageBuildImpossible - "No executables, libraries, tests, or benchmarks found. Nothing to do." - - , check (any isNothing (map libName $ subLibraries pkg)) $ - PackageBuildImpossible $ "Found one or more unnamed internal libraries. " - ++ "Only the non-internal library can have the same name as the package." - - , check (not (null duplicateNames)) $ - PackageBuildImpossible $ "Duplicate sections: " - ++ commaSep (map unUnqualComponentName duplicateNames) - ++ ". The name of every library, executable, test suite," - ++ " and benchmark section in" - ++ " the package must be unique." - - -- NB: but it's OK for executables to have the same name! - -- TODO shouldn't need to compare on the string level - , check (any (== display (packageName pkg)) (display <$> subLibNames)) $ - PackageBuildImpossible $ "Illegal internal library name " - ++ display (packageName pkg) - ++ ". Internal libraries cannot have the same name as the package." - ++ " Maybe you wanted a non-internal library?" - ++ " If so, rewrite the section stanza" - ++ " from 'library: '" ++ display (packageName pkg) ++ "' to 'library'." - ] - --TODO: check for name clashes case insensitively: windows file systems cannot - --cope. - - ++ concatMap (checkLibrary pkg) (allLibraries pkg) - ++ concatMap (checkExecutable pkg) (executables pkg) - ++ concatMap (checkTestSuite pkg) (testSuites pkg) - ++ concatMap (checkBenchmark pkg) (benchmarks pkg) - - ++ catMaybes [ - - check (specVersion pkg > cabalVersion) $ - PackageBuildImpossible $ - "This package description follows version " - ++ display (specVersion pkg) ++ " of the Cabal specification. This " - ++ "tool only supports up to version " ++ display cabalVersion ++ "." - ] - where - -- The public 'library' gets special dispensation, because it - -- is common practice to export a library and name the executable - -- the same as the package. - subLibNames = catMaybes . map libName $ subLibraries pkg - exeNames = map exeName $ executables pkg - testNames = map testName $ testSuites pkg - bmNames = map benchmarkName $ benchmarks pkg - duplicateNames = dups $ subLibNames ++ exeNames ++ testNames ++ bmNames - -checkLibrary :: PackageDescription -> Library -> [PackageCheck] -checkLibrary pkg lib = - catMaybes [ - - check (not (null moduleDuplicates)) $ - PackageBuildImpossible $ - "Duplicate modules in library: " - ++ commaSep (map display moduleDuplicates) - - -- TODO: This check is bogus if a required-signature was passed through - , check (null (explicitLibModules lib) && null (reexportedModules lib)) $ - PackageDistSuspiciousWarn $ - "Library " ++ (case libName lib of - Nothing -> "" - Just n -> display n - ) ++ "does not expose any modules" - - -- check use of signatures sections - , checkVersion [1,25] (not (null (signatures lib))) $ - PackageDistInexcusable $ - "To use the 'signatures' field the package needs to specify " - ++ "at least 'cabal-version: 2.0'." - - -- check that all autogen-modules appear on other-modules or exposed-modules - , check - (not $ and $ map (flip elem (explicitLibModules lib)) (libModulesAutogen lib)) $ - PackageBuildImpossible $ - "An 'autogen-module' is neither on 'exposed-modules' or " - ++ "'other-modules'." - - ] - - where - checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck - checkVersion ver cond pc - | specVersion pkg >= mkVersion ver = Nothing - | otherwise = check cond pc - - -- TODO: not sure if this check is always right in Backpack - moduleDuplicates = dups (explicitLibModules lib ++ - map moduleReexportName (reexportedModules lib)) - -checkExecutable :: PackageDescription -> Executable -> [PackageCheck] -checkExecutable pkg exe = - catMaybes [ - - check (null (modulePath exe)) $ - PackageBuildImpossible $ - "No 'main-is' field found for executable " ++ display (exeName exe) - - , check (not (null (modulePath exe)) - && (not $ fileExtensionSupportedLanguage $ modulePath exe)) $ - PackageBuildImpossible $ - "The 'main-is' field must specify a '.hs' or '.lhs' file " - ++ "(even if it is generated by a preprocessor), " - ++ "or it may specify a C/C++/obj-C source file." - - , checkSpecVersion pkg [1,17] - (fileExtensionSupportedLanguage (modulePath exe) - && takeExtension (modulePath exe) `notElem` [".hs", ".lhs"]) $ - PackageDistInexcusable $ - "The package uses a C/C++/obj-C source file for the 'main-is' field. " - ++ "To use this feature you must specify 'cabal-version: >= 1.18'." - - , check (not (null moduleDuplicates)) $ - PackageBuildImpossible $ - "Duplicate modules in executable '" ++ display (exeName exe) ++ "': " - ++ commaSep (map display moduleDuplicates) - - -- check that all autogen-modules appear on other-modules - , check - (not $ and $ map (flip elem (exeModules exe)) (exeModulesAutogen exe)) $ - PackageBuildImpossible $ - "On executable '" ++ display (exeName exe) ++ "' an 'autogen-module' is not " - ++ "on 'other-modules'" - ] - where - moduleDuplicates = dups (exeModules exe) - -checkTestSuite :: PackageDescription -> TestSuite -> [PackageCheck] -checkTestSuite pkg test = - catMaybes [ - - case testInterface test of - TestSuiteUnsupported tt@(TestTypeUnknown _ _) -> Just $ - PackageBuildWarning $ - quote (display tt) ++ " is not a known type of test suite. " - ++ "The known test suite types are: " - ++ commaSep (map display knownTestTypes) - - TestSuiteUnsupported tt -> Just $ - PackageBuildWarning $ - quote (display tt) ++ " is not a supported test suite version. " - ++ "The known test suite types are: " - ++ commaSep (map display knownTestTypes) - _ -> Nothing - - , check (not $ null moduleDuplicates) $ - PackageBuildImpossible $ - "Duplicate modules in test suite '" ++ display (testName test) ++ "': " - ++ commaSep (map display moduleDuplicates) - - , check mainIsWrongExt $ - PackageBuildImpossible $ - "The 'main-is' field must specify a '.hs' or '.lhs' file " - ++ "(even if it is generated by a preprocessor), " - ++ "or it may specify a C/C++/obj-C source file." - - , checkSpecVersion pkg [1,17] (mainIsNotHsExt && not mainIsWrongExt) $ - PackageDistInexcusable $ - "The package uses a C/C++/obj-C source file for the 'main-is' field. " - ++ "To use this feature you must specify 'cabal-version: >= 1.18'." - - -- check that all autogen-modules appear on other-modules - , check - (not $ and $ map - (flip elem (testModules test)) - (testModulesAutogen test) - ) $ - PackageBuildImpossible $ - "On test suite '" ++ display (testName test) ++ "' an 'autogen-module' is not " - ++ "on 'other-modules'" - ] - where - moduleDuplicates = dups $ testModules test - - mainIsWrongExt = case testInterface test of - TestSuiteExeV10 _ f -> not $ fileExtensionSupportedLanguage f - _ -> False - - mainIsNotHsExt = case testInterface test of - TestSuiteExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"] - _ -> False - -checkBenchmark :: PackageDescription -> Benchmark -> [PackageCheck] -checkBenchmark _pkg bm = - catMaybes [ - - case benchmarkInterface bm of - BenchmarkUnsupported tt@(BenchmarkTypeUnknown _ _) -> Just $ - PackageBuildWarning $ - quote (display tt) ++ " is not a known type of benchmark. " - ++ "The known benchmark types are: " - ++ commaSep (map display knownBenchmarkTypes) - - BenchmarkUnsupported tt -> Just $ - PackageBuildWarning $ - quote (display tt) ++ " is not a supported benchmark version. " - ++ "The known benchmark types are: " - ++ commaSep (map display knownBenchmarkTypes) - _ -> Nothing - - , check (not $ null moduleDuplicates) $ - PackageBuildImpossible $ - "Duplicate modules in benchmark '" ++ display (benchmarkName bm) ++ "': " - ++ commaSep (map display moduleDuplicates) - - , check mainIsWrongExt $ - PackageBuildImpossible $ - "The 'main-is' field must specify a '.hs' or '.lhs' file " - ++ "(even if it is generated by a preprocessor)." - - -- check that all autogen-modules appear on other-modules - , check - (not $ and $ map - (flip elem (benchmarkModules bm)) - (benchmarkModulesAutogen bm) - ) $ - PackageBuildImpossible $ - "On benchmark '" ++ display (benchmarkName bm) ++ "' an 'autogen-module' is " - ++ "not on 'other-modules'" - ] - where - moduleDuplicates = dups $ benchmarkModules bm - - mainIsWrongExt = case benchmarkInterface bm of - BenchmarkExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"] - _ -> False - --- ------------------------------------------------------------ --- * Additional pure checks --- ------------------------------------------------------------ - -checkFields :: PackageDescription -> [PackageCheck] -checkFields pkg = - catMaybes [ - - check (not . FilePath.Windows.isValid . display . packageName $ pkg) $ - PackageDistInexcusable $ - "Unfortunately, the package name '" ++ display (packageName pkg) - ++ "' is one of the reserved system file names on Windows. Many tools " - ++ "need to convert package names to file names so using this name " - ++ "would cause problems." - - , check ((isPrefixOf "z-") . display . packageName $ pkg) $ - PackageDistInexcusable $ - "Package names with the prefix 'z-' are reserved by Cabal and " - ++ "cannot be used." - - , check (isNothing (buildTypeRaw pkg) && specVersion pkg < mkVersion [2,1]) $ - PackageBuildWarning $ - "No 'build-type' specified. If you do not need a custom Setup.hs or " - ++ "./configure script then use 'build-type: Simple'." - - , check (isJust (setupBuildInfo pkg) && buildType pkg /= Custom) $ - PackageBuildWarning $ - "Ignoring the 'custom-setup' section because the 'build-type' is " - ++ "not 'Custom'. Use 'build-type: Custom' if you need to use a " - ++ "custom Setup.hs script." - - , check (not (null unknownCompilers)) $ - PackageBuildWarning $ - "Unknown compiler " ++ commaSep (map quote unknownCompilers) - ++ " in 'tested-with' field." - - , check (not (null unknownLanguages)) $ - PackageBuildWarning $ - "Unknown languages: " ++ commaSep unknownLanguages - - , check (not (null unknownExtensions)) $ - PackageBuildWarning $ - "Unknown extensions: " ++ commaSep unknownExtensions - - , check (not (null languagesUsedAsExtensions)) $ - PackageBuildWarning $ - "Languages listed as extensions: " - ++ commaSep languagesUsedAsExtensions - ++ ". Languages must be specified in either the 'default-language' " - ++ " or the 'other-languages' field." - - , check (not (null ourDeprecatedExtensions)) $ - PackageDistSuspicious $ - "Deprecated extensions: " - ++ commaSep (map (quote . display . fst) ourDeprecatedExtensions) - ++ ". " ++ unwords - [ "Instead of '" ++ display ext - ++ "' use '" ++ display replacement ++ "'." - | (ext, Just replacement) <- ourDeprecatedExtensions ] - - , check (null (category pkg)) $ - PackageDistSuspicious "No 'category' field." - - , check (null (maintainer pkg)) $ - PackageDistSuspicious "No 'maintainer' field." - - , check (null (synopsis pkg) && null (description pkg)) $ - PackageDistInexcusable "No 'synopsis' or 'description' field." - - , check (null (description pkg) && not (null (synopsis pkg))) $ - PackageDistSuspicious "No 'description' field." - - , check (null (synopsis pkg) && not (null (description pkg))) $ - PackageDistSuspicious "No 'synopsis' field." - - --TODO: recommend the bug reports URL, author and homepage fields - --TODO: recommend not using the stability field - --TODO: recommend specifying a source repo - - , check (length (synopsis pkg) >= 80) $ - PackageDistSuspicious - "The 'synopsis' field is rather long (max 80 chars is recommended)." - - -- See also https://github.com/haskell/cabal/pull/3479 - , check (not (null (description pkg)) - && length (description pkg) <= length (synopsis pkg)) $ - PackageDistSuspicious $ - "The 'description' field should be longer than the 'synopsis' " - ++ "field. " - ++ "It's useful to provide an informative 'description' to allow " - ++ "Haskell programmers who have never heard about your package to " - ++ "understand the purpose of your package. " - ++ "The 'description' field content is typically shown by tooling " - ++ "(e.g. 'cabal info', Haddock, Hackage) below the 'synopsis' which " - ++ "serves as a headline. " - ++ "Please refer to " - ++ " for more details." - - -- check use of impossible constraints "tested-with: GHC== 6.10 && ==6.12" - , check (not (null testedWithImpossibleRanges)) $ - PackageDistInexcusable $ - "Invalid 'tested-with' version range: " - ++ commaSep (map display testedWithImpossibleRanges) - ++ ". To indicate that you have tested a package with multiple " - ++ "different versions of the same compiler use multiple entries, " - ++ "for example 'tested-with: GHC==6.10.4, GHC==6.12.3' and not " - ++ "'tested-with: GHC==6.10.4 && ==6.12.3'." - - -- Disabled due to #5119: we generate loads of spurious instances of - -- this warning. Re-enabling this check should be part of the fix to - -- #5119. - , check (False && not (null depInternalLibraryWithExtraVersion)) $ - PackageBuildWarning $ - "The package has an extraneous version range for a dependency on an " - ++ "internal library: " - ++ commaSep (map display depInternalLibraryWithExtraVersion) - ++ ". This version range includes the current package but isn't needed " - ++ "as the current package's library will always be used." - - , check (not (null depInternalLibraryWithImpossibleVersion)) $ - PackageBuildImpossible $ - "The package has an impossible version range for a dependency on an " - ++ "internal library: " - ++ commaSep (map display depInternalLibraryWithImpossibleVersion) - ++ ". This version range does not include the current package, and must " - ++ "be removed as the current package's library will always be used." - - , check (not (null depInternalExecutableWithExtraVersion)) $ - PackageBuildWarning $ - "The package has an extraneous version range for a dependency on an " - ++ "internal executable: " - ++ commaSep (map display depInternalExecutableWithExtraVersion) - ++ ". This version range includes the current package but isn't needed " - ++ "as the current package's executable will always be used." - - , check (not (null depInternalExecutableWithImpossibleVersion)) $ - PackageBuildImpossible $ - "The package has an impossible version range for a dependency on an " - ++ "internal executable: " - ++ commaSep (map display depInternalExecutableWithImpossibleVersion) - ++ ". This version range does not include the current package, and must " - ++ "be removed as the current package's executable will always be used." - - , check (not (null depMissingInternalExecutable)) $ - PackageBuildImpossible $ - "The package depends on a missing internal executable: " - ++ commaSep (map display depInternalExecutableWithImpossibleVersion) - ] - where - unknownCompilers = [ name | (OtherCompiler name, _) <- testedWith pkg ] - unknownLanguages = [ name | bi <- allBuildInfo pkg - , UnknownLanguage name <- allLanguages bi ] - unknownExtensions = [ name | bi <- allBuildInfo pkg - , UnknownExtension name <- allExtensions bi - , name `notElem` map display knownLanguages ] - ourDeprecatedExtensions = nub $ catMaybes - [ find ((==ext) . fst) deprecatedExtensions - | bi <- allBuildInfo pkg - , ext <- allExtensions bi ] - languagesUsedAsExtensions = - [ name | bi <- allBuildInfo pkg - , UnknownExtension name <- allExtensions bi - , name `elem` map display knownLanguages ] - - testedWithImpossibleRanges = - [ Dependency (mkPackageName (display compiler)) vr - | (compiler, vr) <- testedWith pkg - , isNoVersion vr ] - - internalLibraries = - map (maybe (packageName pkg) (unqualComponentNameToPackageName) . libName) - (allLibraries pkg) - - internalExecutables = map exeName $ executables pkg - - internalLibDeps = - [ dep - | bi <- allBuildInfo pkg - , dep@(Dependency name _) <- targetBuildDepends bi - , name `elem` internalLibraries - ] - - internalExeDeps = - [ dep - | bi <- allBuildInfo pkg - , dep <- getAllToolDependencies pkg bi - , isInternal pkg dep - ] - - depInternalLibraryWithExtraVersion = - [ dep - | dep@(Dependency _ versionRange) <- internalLibDeps - , not $ isAnyVersion versionRange - , packageVersion pkg `withinRange` versionRange - ] - - depInternalLibraryWithImpossibleVersion = - [ dep - | dep@(Dependency _ versionRange) <- internalLibDeps - , not $ packageVersion pkg `withinRange` versionRange - ] - - depInternalExecutableWithExtraVersion = - [ dep - | dep@(ExeDependency _ _ versionRange) <- internalExeDeps - , not $ isAnyVersion versionRange - , packageVersion pkg `withinRange` versionRange - ] - - depInternalExecutableWithImpossibleVersion = - [ dep - | dep@(ExeDependency _ _ versionRange) <- internalExeDeps - , not $ packageVersion pkg `withinRange` versionRange - ] - - depMissingInternalExecutable = - [ dep - | dep@(ExeDependency _ eName _) <- internalExeDeps - , not $ eName `elem` internalExecutables - ] - - -checkLicense :: PackageDescription -> [PackageCheck] -checkLicense pkg = case licenseRaw pkg of - Right l -> checkOldLicense pkg l - Left l -> checkNewLicense pkg l - -checkNewLicense :: PackageDescription -> SPDX.License -> [PackageCheck] -checkNewLicense _pkg lic = catMaybes - [ check (lic == SPDX.NONE) $ - PackageDistInexcusable - "The 'license' field is missing or is NONE." - ] - -checkOldLicense :: PackageDescription -> License -> [PackageCheck] -checkOldLicense pkg lic = catMaybes - [ check (lic == UnspecifiedLicense) $ - PackageDistInexcusable - "The 'license' field is missing." - - , check (lic == AllRightsReserved) $ - PackageDistSuspicious - "The 'license' is AllRightsReserved. Is that really what you want?" - - , checkVersion [1,4] (lic `notElem` compatLicenses) $ - PackageDistInexcusable $ - "Unfortunately the license " ++ quote (prettyShow (license pkg)) - ++ " messes up the parser in earlier Cabal versions so you need to " - ++ "specify 'cabal-version: >= 1.4'. Alternatively if you require " - ++ "compatibility with earlier Cabal versions then use 'OtherLicense'." - - , case lic of - UnknownLicense l -> Just $ - PackageBuildWarning $ - quote ("license: " ++ l) ++ " is not a recognised license. The " - ++ "known licenses are: " - ++ commaSep (map display knownLicenses) - _ -> Nothing - - , check (lic == BSD4) $ - PackageDistSuspicious $ - "Using 'license: BSD4' is almost always a misunderstanding. 'BSD4' " - ++ "refers to the old 4-clause BSD license with the advertising " - ++ "clause. 'BSD3' refers the new 3-clause BSD license." - - , case unknownLicenseVersion (lic) of - Just knownVersions -> Just $ - PackageDistSuspicious $ - "'license: " ++ display (lic) ++ "' is not a known " - ++ "version of that license. The known versions are " - ++ commaSep (map display knownVersions) - ++ ". If this is not a mistake and you think it should be a known " - ++ "version then please file a ticket." - _ -> Nothing - - , check (lic `notElem` [ AllRightsReserved - , UnspecifiedLicense, PublicDomain] - -- AllRightsReserved and PublicDomain are not strictly - -- licenses so don't need license files. - && null (licenseFiles pkg)) $ - PackageDistSuspicious "A 'license-file' is not specified." - ] - where - unknownLicenseVersion (GPL (Just v)) - | v `notElem` knownVersions = Just knownVersions - where knownVersions = [ v' | GPL (Just v') <- knownLicenses ] - unknownLicenseVersion (LGPL (Just v)) - | v `notElem` knownVersions = Just knownVersions - where knownVersions = [ v' | LGPL (Just v') <- knownLicenses ] - unknownLicenseVersion (AGPL (Just v)) - | v `notElem` knownVersions = Just knownVersions - where knownVersions = [ v' | AGPL (Just v') <- knownLicenses ] - unknownLicenseVersion (Apache (Just v)) - | v `notElem` knownVersions = Just knownVersions - where knownVersions = [ v' | Apache (Just v') <- knownLicenses ] - unknownLicenseVersion _ = Nothing - - checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck - checkVersion ver cond pc - | specVersion pkg >= mkVersion ver = Nothing - | otherwise = check cond pc - - compatLicenses = [ GPL Nothing, LGPL Nothing, AGPL Nothing, BSD3, BSD4 - , PublicDomain, AllRightsReserved - , UnspecifiedLicense, OtherLicense ] - -checkSourceRepos :: PackageDescription -> [PackageCheck] -checkSourceRepos pkg = - catMaybes $ concat [[ - - case repoKind repo of - RepoKindUnknown kind -> Just $ PackageDistInexcusable $ - quote kind ++ " is not a recognised kind of source-repository. " - ++ "The repo kind is usually 'head' or 'this'" - _ -> Nothing - - , check (isNothing (repoType repo)) $ - PackageDistInexcusable - "The source-repository 'type' is a required field." - - , check (isNothing (repoLocation repo)) $ - PackageDistInexcusable - "The source-repository 'location' is a required field." - - , check (repoType repo == Just CVS && isNothing (repoModule repo)) $ - PackageDistInexcusable - "For a CVS source-repository, the 'module' is a required field." - - , check (repoKind repo == RepoThis && isNothing (repoTag repo)) $ - PackageDistInexcusable $ - "For the 'this' kind of source-repository, the 'tag' is a required " - ++ "field. It should specify the tag corresponding to this version " - ++ "or release of the package." - - , check (maybe False isAbsoluteOnAnyPlatform (repoSubdir repo)) $ - PackageDistInexcusable - "The 'subdir' field of a source-repository must be a relative path." - ] - | repo <- sourceRepos pkg ] - ---TODO: check location looks like a URL for some repo types. - -checkGhcOptions :: PackageDescription -> [PackageCheck] -checkGhcOptions pkg = - catMaybes [ - - checkFlags ["-fasm"] $ - PackageDistInexcusable $ - "'ghc-options: -fasm' is unnecessary and will not work on CPU " - ++ "architectures other than x86, x86-64, ppc or sparc." - - , checkFlags ["-fvia-C"] $ - PackageDistSuspicious $ - "'ghc-options: -fvia-C' is usually unnecessary. If your package " - ++ "needs -via-C for correctness rather than performance then it " - ++ "is using the FFI incorrectly and will probably not work with GHC " - ++ "6.10 or later." - - , checkFlags ["-fhpc"] $ - PackageDistInexcusable $ - "'ghc-options: -fhpc' is not not necessary. Use the configure flag " - ++ " --enable-coverage instead." - - , checkFlags ["-prof"] $ - PackageBuildWarning $ - "'ghc-options: -prof' is not necessary and will lead to problems " - ++ "when used on a library. Use the configure flag " - ++ "--enable-library-profiling and/or --enable-profiling." - - , checkFlags ["-o"] $ - PackageBuildWarning $ - "'ghc-options: -o' is not needed. " - ++ "The output files are named automatically." - - , checkFlags ["-hide-package"] $ - PackageBuildWarning $ - "'ghc-options: -hide-package' is never needed. " - ++ "Cabal hides all packages." - - , checkFlags ["--make"] $ - PackageBuildWarning $ - "'ghc-options: --make' is never needed. Cabal uses this automatically." - - , checkFlags ["-main-is"] $ - PackageDistSuspicious $ - "'ghc-options: -main-is' is not portable." - - , checkNonTestAndBenchmarkFlags ["-O0", "-Onot"] $ - PackageDistSuspicious $ - "'ghc-options: -O0' is not needed. " - ++ "Use the --disable-optimization configure flag." - - , checkTestAndBenchmarkFlags ["-O0", "-Onot"] $ - PackageDistSuspiciousWarn $ - "'ghc-options: -O0' is not needed. " - ++ "Use the --disable-optimization configure flag." - - , checkFlags [ "-O", "-O1"] $ - PackageDistInexcusable $ - "'ghc-options: -O' is not needed. " - ++ "Cabal automatically adds the '-O' flag. " - ++ "Setting it yourself interferes with the --disable-optimization flag." - - , checkFlags ["-O2"] $ - PackageDistSuspiciousWarn $ - "'ghc-options: -O2' is rarely needed. " - ++ "Check that it is giving a real benefit " - ++ "and not just imposing longer compile times on your users." - - , checkFlags ["-split-sections"] $ - PackageBuildWarning $ - "'ghc-options: -split-sections' is not needed. " - ++ "Use the --enable-split-sections configure flag." - - , checkFlags ["-split-objs"] $ - PackageBuildWarning $ - "'ghc-options: -split-objs' is not needed. " - ++ "Use the --enable-split-objs configure flag." - - , checkFlags ["-optl-Wl,-s", "-optl-s"] $ - PackageDistInexcusable $ - "'ghc-options: -optl-Wl,-s' is not needed and is not portable to all" - ++ " operating systems. Cabal 1.4 and later automatically strip" - ++ " executables. Cabal also has a flag --disable-executable-stripping" - ++ " which is necessary when building packages for some Linux" - ++ " distributions and using '-optl-Wl,-s' prevents that from working." - - , checkFlags ["-fglasgow-exts"] $ - PackageDistSuspicious $ - "Instead of 'ghc-options: -fglasgow-exts' it is preferable to use " - ++ "the 'extensions' field." - - , check ("-threaded" `elem` lib_ghc_options) $ - PackageBuildWarning $ - "'ghc-options: -threaded' has no effect for libraries. It should " - ++ "only be used for executables." - - , check ("-rtsopts" `elem` lib_ghc_options) $ - PackageBuildWarning $ - "'ghc-options: -rtsopts' has no effect for libraries. It should " - ++ "only be used for executables." - - , check (any (\opt -> "-with-rtsopts" `isPrefixOf` opt) lib_ghc_options) $ - PackageBuildWarning $ - "'ghc-options: -with-rtsopts' has no effect for libraries. It " - ++ "should only be used for executables." - - , checkAlternatives "ghc-options" "extensions" - [ (flag, display extension) | flag <- all_ghc_options - , Just extension <- [ghcExtension flag] ] - - , checkAlternatives "ghc-options" "extensions" - [ (flag, extension) | flag@('-':'X':extension) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "cpp-options" $ - [ (flag, flag) | flag@('-':'D':_) <- all_ghc_options ] - ++ [ (flag, flag) | flag@('-':'U':_) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "include-dirs" - [ (flag, dir) | flag@('-':'I':dir) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "extra-libraries" - [ (flag, lib) | flag@('-':'l':lib) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "extra-lib-dirs" - [ (flag, dir) | flag@('-':'L':dir) <- all_ghc_options ] - - , checkAlternatives "ghc-options" "frameworks" - [ (flag, fmwk) | (flag@"-framework", fmwk) <- - zip all_ghc_options (safeTail all_ghc_options) ] - - , checkAlternatives "ghc-options" "extra-framework-dirs" - [ (flag, dir) | (flag@"-framework-path", dir) <- - zip all_ghc_options (safeTail all_ghc_options) ] - ] - - where - all_ghc_options = concatMap get_ghc_options (allBuildInfo pkg) - lib_ghc_options = concatMap (get_ghc_options . libBuildInfo) - (allLibraries pkg) - get_ghc_options bi = hcOptions GHC bi ++ hcProfOptions GHC bi - ++ hcSharedOptions GHC bi - - test_ghc_options = concatMap (get_ghc_options . testBuildInfo) - (testSuites pkg) - benchmark_ghc_options = concatMap (get_ghc_options . benchmarkBuildInfo) - (benchmarks pkg) - test_and_benchmark_ghc_options = test_ghc_options ++ - benchmark_ghc_options - non_test_and_benchmark_ghc_options = concatMap get_ghc_options - (allBuildInfo (pkg { testSuites = [] - , benchmarks = [] - })) - - checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck - checkFlags flags = check (any (`elem` flags) all_ghc_options) - - checkTestAndBenchmarkFlags :: [String] -> PackageCheck -> Maybe PackageCheck - checkTestAndBenchmarkFlags flags = check (any (`elem` flags) test_and_benchmark_ghc_options) - - checkNonTestAndBenchmarkFlags :: [String] -> PackageCheck -> Maybe PackageCheck - checkNonTestAndBenchmarkFlags flags = check (any (`elem` flags) non_test_and_benchmark_ghc_options) - - ghcExtension ('-':'f':name) = case name of - "allow-overlapping-instances" -> enable OverlappingInstances - "no-allow-overlapping-instances" -> disable OverlappingInstances - "th" -> enable TemplateHaskell - "no-th" -> disable TemplateHaskell - "ffi" -> enable ForeignFunctionInterface - "no-ffi" -> disable ForeignFunctionInterface - "fi" -> enable ForeignFunctionInterface - "no-fi" -> disable ForeignFunctionInterface - "monomorphism-restriction" -> enable MonomorphismRestriction - "no-monomorphism-restriction" -> disable MonomorphismRestriction - "mono-pat-binds" -> enable MonoPatBinds - "no-mono-pat-binds" -> disable MonoPatBinds - "allow-undecidable-instances" -> enable UndecidableInstances - "no-allow-undecidable-instances" -> disable UndecidableInstances - "allow-incoherent-instances" -> enable IncoherentInstances - "no-allow-incoherent-instances" -> disable IncoherentInstances - "arrows" -> enable Arrows - "no-arrows" -> disable Arrows - "generics" -> enable Generics - "no-generics" -> disable Generics - "implicit-prelude" -> enable ImplicitPrelude - "no-implicit-prelude" -> disable ImplicitPrelude - "implicit-params" -> enable ImplicitParams - "no-implicit-params" -> disable ImplicitParams - "bang-patterns" -> enable BangPatterns - "no-bang-patterns" -> disable BangPatterns - "scoped-type-variables" -> enable ScopedTypeVariables - "no-scoped-type-variables" -> disable ScopedTypeVariables - "extended-default-rules" -> enable ExtendedDefaultRules - "no-extended-default-rules" -> disable ExtendedDefaultRules - _ -> Nothing - ghcExtension "-cpp" = enable CPP - ghcExtension _ = Nothing - - enable e = Just (EnableExtension e) - disable e = Just (DisableExtension e) - -checkCCOptions :: PackageDescription -> [PackageCheck] -checkCCOptions = checkCLikeOptions "C" "cc-options" ccOptions - -checkCxxOptions :: PackageDescription -> [PackageCheck] -checkCxxOptions = checkCLikeOptions "C++" "cxx-options" cxxOptions - -checkCLikeOptions :: String -> String -> (BuildInfo -> [String]) -> PackageDescription -> [PackageCheck] -checkCLikeOptions label prefix accessor pkg = - catMaybes [ - - checkAlternatives prefix "include-dirs" - [ (flag, dir) | flag@('-':'I':dir) <- all_cLikeOptions ] - - , checkAlternatives prefix "extra-libraries" - [ (flag, lib) | flag@('-':'l':lib) <- all_cLikeOptions ] - - , checkAlternatives prefix "extra-lib-dirs" - [ (flag, dir) | flag@('-':'L':dir) <- all_cLikeOptions ] - - , checkAlternatives "ld-options" "extra-libraries" - [ (flag, lib) | flag@('-':'l':lib) <- all_ldOptions ] - - , checkAlternatives "ld-options" "extra-lib-dirs" - [ (flag, dir) | flag@('-':'L':dir) <- all_ldOptions ] - - , checkCCFlags [ "-O", "-Os", "-O0", "-O1", "-O2", "-O3" ] $ - PackageDistSuspicious $ - "'"++prefix++": -O[n]' is generally not needed. When building with " - ++ " optimisations Cabal automatically adds '-O2' for "++label++" code. " - ++ "Setting it yourself interferes with the --disable-optimization flag." - ] - - where all_cLikeOptions = [ opts | bi <- allBuildInfo pkg - , opts <- accessor bi ] - all_ldOptions = [ opts | bi <- allBuildInfo pkg - , opts <- ldOptions bi ] - - checkCCFlags :: [String] -> PackageCheck -> Maybe PackageCheck - checkCCFlags flags = check (any (`elem` flags) all_cLikeOptions) - -checkCPPOptions :: PackageDescription -> [PackageCheck] -checkCPPOptions pkg = - catMaybes [ - checkAlternatives "cpp-options" "include-dirs" - [ (flag, dir) | flag@('-':'I':dir) <- all_cppOptions] - ] - where all_cppOptions = [ opts | bi <- allBuildInfo pkg - , opts <- cppOptions bi ] - -checkAlternatives :: String -> String -> [(String, String)] - -> Maybe PackageCheck -checkAlternatives badField goodField flags = - check (not (null badFlags)) $ - PackageBuildWarning $ - "Instead of " ++ quote (badField ++ ": " ++ unwords badFlags) - ++ " use " ++ quote (goodField ++ ": " ++ unwords goodFlags) - - where (badFlags, goodFlags) = unzip flags - -checkPaths :: PackageDescription -> [PackageCheck] -checkPaths pkg = - [ PackageBuildWarning $ - quote (kind ++ ": " ++ path) - ++ " is a relative path outside of the source tree. " - ++ "This will not work when generating a tarball with 'sdist'." - | (path, kind) <- relPaths ++ absPaths - , isOutsideTree path ] - ++ - [ PackageDistInexcusable $ - quote (kind ++ ": " ++ path) ++ " is an absolute path." - | (path, kind) <- relPaths - , isAbsoluteOnAnyPlatform path ] - ++ - [ PackageDistInexcusable $ - quote (kind ++ ": " ++ path) ++ " points inside the 'dist' " - ++ "directory. This is not reliable because the location of this " - ++ "directory is configurable by the user (or package manager). In " - ++ "addition the layout of the 'dist' directory is subject to change " - ++ "in future versions of Cabal." - | (path, kind) <- relPaths ++ absPaths - , isInsideDist path ] - ++ - [ PackageDistInexcusable $ - "The 'ghc-options' contains the path '" ++ path ++ "' which points " - ++ "inside the 'dist' directory. This is not reliable because the " - ++ "location of this directory is configurable by the user (or package " - ++ "manager). In addition the layout of the 'dist' directory is subject " - ++ "to change in future versions of Cabal." - | bi <- allBuildInfo pkg - , (GHC, flags) <- options bi - , path <- flags - , isInsideDist path ] - ++ - [ PackageDistInexcusable $ - "In the 'data-files' field: " ++ explainGlobSyntaxError pat err - | pat <- dataFiles pkg - , Left err <- [parseFileGlob (specVersion pkg) pat] - ] - ++ - [ PackageDistInexcusable $ - "In the 'extra-source-files' field: " ++ explainGlobSyntaxError pat err - | pat <- extraSrcFiles pkg - , Left err <- [parseFileGlob (specVersion pkg) pat] - ] - ++ - [ PackageDistInexcusable $ - "In the 'extra-doc-files' field: " ++ explainGlobSyntaxError pat err - | pat <- extraDocFiles pkg - , Left err <- [parseFileGlob (specVersion pkg) pat] - ] - where - isOutsideTree path = case splitDirectories path of - "..":_ -> True - ".":"..":_ -> True - _ -> False - isInsideDist path = case map lowercase (splitDirectories path) of - "dist" :_ -> True - ".":"dist":_ -> True - _ -> False - -- paths that must be relative - relPaths = - [ (path, "extra-source-files") | path <- extraSrcFiles pkg ] - ++ [ (path, "extra-tmp-files") | path <- extraTmpFiles pkg ] - ++ [ (path, "extra-doc-files") | path <- extraDocFiles pkg ] - ++ [ (path, "data-files") | path <- dataFiles pkg ] - ++ [ (path, "data-dir") | path <- [dataDir pkg]] - ++ [ (path, "license-file") | path <- licenseFiles pkg ] - ++ concat - [ [ (path, "asm-sources") | path <- asmSources bi ] - ++ [ (path, "cmm-sources") | path <- cmmSources bi ] - ++ [ (path, "c-sources") | path <- cSources bi ] - ++ [ (path, "cxx-sources") | path <- cxxSources bi ] - ++ [ (path, "js-sources") | path <- jsSources bi ] - ++ [ (path, "install-includes") | path <- installIncludes bi ] - ++ [ (path, "hs-source-dirs") | path <- hsSourceDirs bi ] - | bi <- allBuildInfo pkg ] - -- paths that are allowed to be absolute - absPaths = concat - [ [ (path, "includes") | path <- includes bi ] - ++ [ (path, "include-dirs") | path <- includeDirs bi ] - ++ [ (path, "extra-lib-dirs") | path <- extraLibDirs bi ] - | bi <- allBuildInfo pkg ] - ---TODO: check sets of paths that would be interpreted differently between Unix --- and windows, ie case-sensitive or insensitive. Things that might clash, or --- conversely be distinguished. - ---TODO: use the tar path checks on all the above paths - --- | Check that the package declares the version in the @\"cabal-version\"@ --- field correctly. --- -checkCabalVersion :: PackageDescription -> [PackageCheck] -checkCabalVersion pkg = - catMaybes [ - - -- check syntax of cabal-version field - check (specVersion pkg >= mkVersion [1,10] - && not simpleSpecVersionRangeSyntax) $ - PackageBuildWarning $ - "Packages relying on Cabal 1.10 or later must only specify a " - ++ "version range of the form 'cabal-version: >= x.y'. Use " - ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'." - - -- check syntax of cabal-version field - , check (specVersion pkg < mkVersion [1,9] - && not simpleSpecVersionRangeSyntax) $ - PackageDistSuspicious $ - "It is recommended that the 'cabal-version' field only specify a " - ++ "version range of the form '>= x.y'. Use " - ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'. " - ++ "Tools based on Cabal 1.10 and later will ignore upper bounds." - - -- check syntax of cabal-version field - , checkVersion [1,12] simpleSpecVersionSyntax $ - PackageBuildWarning $ - "With Cabal 1.10 or earlier, the 'cabal-version' field must use " - ++ "range syntax rather than a simple version number. Use " - ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'." - - -- check syntax of cabal-version field - , check (specVersion pkg >= mkVersion [1,12] - && not simpleSpecVersionSyntax) $ - (if specVersion pkg >= mkVersion [2,0] then PackageDistSuspicious else PackageDistSuspiciousWarn) $ - "Packages relying on Cabal 1.12 or later should specify a " - ++ "specific version of the Cabal spec of the form " - ++ "'cabal-version: x.y'. " - ++ "Use 'cabal-version: " ++ display (specVersion pkg) ++ "'." - - -- check use of test suite sections - , checkVersion [1,8] (not (null $ testSuites pkg)) $ - PackageDistInexcusable $ - "The 'test-suite' section is new in Cabal 1.10. " - ++ "Unfortunately it messes up the parser in older Cabal versions " - ++ "so you must specify at least 'cabal-version: >= 1.8', but note " - ++ "that only Cabal 1.10 and later can actually run such test suites." - - -- check use of default-language field - -- note that we do not need to do an equivalent check for the - -- other-language field since that one does not change behaviour - , checkVersion [1,10] (any isJust (buildInfoField defaultLanguage)) $ - PackageBuildWarning $ - "To use the 'default-language' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.10'." - - , check (specVersion pkg >= mkVersion [1,10] - && (any isNothing (buildInfoField defaultLanguage))) $ - PackageBuildWarning $ - "Packages using 'cabal-version: >= 1.10' must specify the " - ++ "'default-language' field for each component (e.g. Haskell98 or " - ++ "Haskell2010). If a component uses different languages in " - ++ "different modules then list the other ones in the " - ++ "'other-languages' field." - - , checkVersion [1,18] - (not . null $ extraDocFiles pkg) $ - PackageDistInexcusable $ - "To use the 'extra-doc-files' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.18'." - - , checkVersion [2,0] - (not (null (subLibraries pkg))) $ - PackageDistInexcusable $ - "To use multiple 'library' sections or a named library section " - ++ "the package needs to specify at least 'cabal-version: 2.0'." - - -- check use of reexported-modules sections - , checkVersion [1,21] - (any (not.null.reexportedModules) (allLibraries pkg)) $ - PackageDistInexcusable $ - "To use the 'reexported-module' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.22'." - - -- check use of thinning and renaming - , checkVersion [1,25] usesBackpackIncludes $ - PackageDistInexcusable $ - "To use the 'mixins' field the package needs to specify " - ++ "at least 'cabal-version: 2.0'." - - -- check use of 'extra-framework-dirs' field - , checkVersion [1,23] (any (not . null) (buildInfoField extraFrameworkDirs)) $ - -- Just a warning, because this won't break on old Cabal versions. - PackageDistSuspiciousWarn $ - "To use the 'extra-framework-dirs' field the package needs to specify" - ++ " at least 'cabal-version: >= 1.24'." - - -- check use of default-extensions field - -- don't need to do the equivalent check for other-extensions - , checkVersion [1,10] (any (not . null) (buildInfoField defaultExtensions)) $ - PackageBuildWarning $ - "To use the 'default-extensions' field the package needs to specify " - ++ "at least 'cabal-version: >= 1.10'." - - -- check use of extensions field - , check (specVersion pkg >= mkVersion [1,10] - && (any (not . null) (buildInfoField oldExtensions))) $ - PackageBuildWarning $ - "For packages using 'cabal-version: >= 1.10' the 'extensions' " - ++ "field is deprecated. The new 'default-extensions' field lists " - ++ "extensions that are used in all modules in the component, while " - ++ "the 'other-extensions' field lists extensions that are used in " - ++ "some modules, e.g. via the {-# LANGUAGE #-} pragma." - - -- check use of "foo (>= 1.0 && < 1.4) || >=1.8 " version-range syntax - , checkVersion [1,8] (not (null versionRangeExpressions)) $ - PackageDistInexcusable $ - "The package uses full version-range expressions " - ++ "in a 'build-depends' field: " - ++ commaSep (map displayRawDependency versionRangeExpressions) - ++ ". To use this new syntax the package needs to specify at least " - ++ "'cabal-version: >= 1.8'. Alternatively, if broader compatibility " - ++ "is important, then convert to conjunctive normal form, and use " - ++ "multiple 'build-depends:' lines, one conjunct per line." - - -- check use of "build-depends: foo == 1.*" syntax - , checkVersion [1,6] (not (null depsUsingWildcardSyntax)) $ - PackageDistInexcusable $ - "The package uses wildcard syntax in the 'build-depends' field: " - ++ commaSep (map display depsUsingWildcardSyntax) - ++ ". To use this new syntax the package need to specify at least " - ++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility " - ++ "is important then use: " ++ commaSep - [ display (Dependency name (eliminateWildcardSyntax versionRange)) - | Dependency name versionRange <- depsUsingWildcardSyntax ] - - -- check use of "build-depends: foo ^>= 1.2.3" syntax - , checkVersion [2,0] (not (null depsUsingMajorBoundSyntax)) $ - PackageDistInexcusable $ - "The package uses major bounded version syntax in the " - ++ "'build-depends' field: " - ++ commaSep (map display depsUsingMajorBoundSyntax) - ++ ". To use this new syntax the package need to specify at least " - ++ "'cabal-version: 2.0'. Alternatively, if broader compatibility " - ++ "is important then use: " ++ commaSep - [ display (Dependency name (eliminateMajorBoundSyntax versionRange)) - | Dependency name versionRange <- depsUsingMajorBoundSyntax ] - - , checkVersion [2,1] (any (not . null) - (concatMap buildInfoField - [ asmSources - , cmmSources - , extraBundledLibs - , extraLibFlavours ])) $ - PackageDistInexcusable $ - "The use of 'asm-sources', 'cmm-sources', 'extra-bundled-libraries' " - ++ " and 'extra-library-flavours' requires the package " - ++ " to specify at least 'cabal-version: >= 2.1'." - - , checkVersion [2,1] (any (not . null) - (buildInfoField virtualModules)) $ - PackageDistInexcusable $ - "The use of 'virtual-modules' requires the package " - ++ " to specify at least 'cabal-version: >= 2.1'." - - -- check use of "tested-with: GHC (>= 1.0 && < 1.4) || >=1.8 " syntax - , checkVersion [1,8] (not (null testedWithVersionRangeExpressions)) $ - PackageDistInexcusable $ - "The package uses full version-range expressions " - ++ "in a 'tested-with' field: " - ++ commaSep (map displayRawDependency testedWithVersionRangeExpressions) - ++ ". To use this new syntax the package needs to specify at least " - ++ "'cabal-version: >= 1.8'." - - -- check use of "tested-with: GHC == 6.12.*" syntax - , checkVersion [1,6] (not (null testedWithUsingWildcardSyntax)) $ - PackageDistInexcusable $ - "The package uses wildcard syntax in the 'tested-with' field: " - ++ commaSep (map display testedWithUsingWildcardSyntax) - ++ ". To use this new syntax the package need to specify at least " - ++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility " - ++ "is important then use: " ++ commaSep - [ display (Dependency name (eliminateWildcardSyntax versionRange)) - | Dependency name versionRange <- testedWithUsingWildcardSyntax ] - - -- check use of "source-repository" section - , checkVersion [1,6] (not (null (sourceRepos pkg))) $ - PackageDistInexcusable $ - "The 'source-repository' section is new in Cabal 1.6. " - ++ "Unfortunately it messes up the parser in earlier Cabal versions " - ++ "so you need to specify 'cabal-version: >= 1.6'." - - -- check for new language extensions - , checkVersion [1,2,3] (not (null mentionedExtensionsThatNeedCabal12)) $ - PackageDistInexcusable $ - "Unfortunately the language extensions " - ++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal12) - ++ " break the parser in earlier Cabal versions so you need to " - ++ "specify 'cabal-version: >= 1.2.3'. Alternatively if you require " - ++ "compatibility with earlier Cabal versions then you may be able to " - ++ "use an equivalent compiler-specific flag." - - , checkVersion [1,4] (not (null mentionedExtensionsThatNeedCabal14)) $ - PackageDistInexcusable $ - "Unfortunately the language extensions " - ++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal14) - ++ " break the parser in earlier Cabal versions so you need to " - ++ "specify 'cabal-version: >= 1.4'. Alternatively if you require " - ++ "compatibility with earlier Cabal versions then you may be able to " - ++ "use an equivalent compiler-specific flag." - - , check (specVersion pkg >= mkVersion [1,23] - && isNothing (setupBuildInfo pkg) - && buildType pkg == Custom) $ - PackageBuildWarning $ - "Packages using 'cabal-version: >= 1.24' with 'build-type: Custom' " - ++ "must use a 'custom-setup' section with a 'setup-depends' field " - ++ "that specifies the dependencies of the Setup.hs script itself. " - ++ "The 'setup-depends' field uses the same syntax as 'build-depends', " - ++ "so a simple example would be 'setup-depends: base, Cabal'." - - , check (specVersion pkg < mkVersion [1,23] - && isNothing (setupBuildInfo pkg) - && buildType pkg == Custom) $ - PackageDistSuspiciousWarn $ - "From version 1.24 cabal supports specifiying explicit dependencies " - ++ "for Custom setup scripts. Consider using cabal-version >= 1.24 and " - ++ "adding a 'custom-setup' section with a 'setup-depends' field " - ++ "that specifies the dependencies of the Setup.hs script itself. " - ++ "The 'setup-depends' field uses the same syntax as 'build-depends', " - ++ "so a simple example would be 'setup-depends: base, Cabal'." - - , check (specVersion pkg >= mkVersion [1,25] - && elem (autogenPathsModuleName pkg) allModuleNames - && not (elem (autogenPathsModuleName pkg) allModuleNamesAutogen) ) $ - PackageDistInexcusable $ - "Packages using 'cabal-version: 2.0' and the autogenerated " - ++ "module Paths_* must include it also on the 'autogen-modules' field " - ++ "besides 'exposed-modules' and 'other-modules'. This specifies that " - ++ "the module does not come with the package and is generated on " - ++ "setup. Modules built with a custom Setup.hs script also go here " - ++ "to ensure that commands like sdist don't fail." - - ] - where - -- Perform a check on packages that use a version of the spec less than - -- the version given. This is for cases where a new Cabal version adds - -- a new feature and we want to check that it is not used prior to that - -- version. - checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck - checkVersion ver cond pc - | specVersion pkg >= mkVersion ver = Nothing - | otherwise = check cond pc - - buildInfoField field = map field (allBuildInfo pkg) - - versionRangeExpressions = - [ dep | dep@(Dependency _ vr) <- allBuildDepends pkg - , usesNewVersionRangeSyntax vr ] - - testedWithVersionRangeExpressions = - [ Dependency (mkPackageName (display compiler)) vr - | (compiler, vr) <- testedWith pkg - , usesNewVersionRangeSyntax vr ] - - simpleSpecVersionRangeSyntax = - either (const True) (cataVersionRange alg) (specVersionRaw pkg) - where - alg (OrLaterVersionF _) = True - alg _ = False - - -- is the cabal-version field a simple version number, rather than a range - simpleSpecVersionSyntax = - either (const True) (const False) (specVersionRaw pkg) - - usesNewVersionRangeSyntax :: VersionRange -> Bool - usesNewVersionRangeSyntax - = (> 2) -- uses the new syntax if depth is more than 2 - . cataVersionRange alg - where - alg (UnionVersionRangesF a b) = a + b - alg (IntersectVersionRangesF a b) = a + b - alg (VersionRangeParensF _) = 3 - alg _ = 1 :: Int - - depsUsingWildcardSyntax = [ dep | dep@(Dependency _ vr) <- allBuildDepends pkg - , usesWildcardSyntax vr ] - - depsUsingMajorBoundSyntax = [ dep | dep@(Dependency _ vr) <- allBuildDepends pkg - , usesMajorBoundSyntax vr ] - - usesBackpackIncludes = any (not . null . mixins) (allBuildInfo pkg) - - testedWithUsingWildcardSyntax = - [ Dependency (mkPackageName (display compiler)) vr - | (compiler, vr) <- testedWith pkg - , usesWildcardSyntax vr ] - - usesWildcardSyntax :: VersionRange -> Bool - usesWildcardSyntax = cataVersionRange alg - where - alg (WildcardVersionF _) = True - alg (UnionVersionRangesF a b) = a || b - alg (IntersectVersionRangesF a b) = a || b - alg (VersionRangeParensF a) = a - alg _ = False - - -- NB: this eliminates both, WildcardVersion and MajorBoundVersion - -- because when WildcardVersion is not support, neither is MajorBoundVersion - eliminateWildcardSyntax = hyloVersionRange embed projectVersionRange - where - embed (WildcardVersionF v) = intersectVersionRanges - (orLaterVersion v) (earlierVersion (wildcardUpperBound v)) - embed (MajorBoundVersionF v) = intersectVersionRanges - (orLaterVersion v) (earlierVersion (majorUpperBound v)) - embed vr = embedVersionRange vr - - usesMajorBoundSyntax :: VersionRange -> Bool - usesMajorBoundSyntax = cataVersionRange alg - where - alg (MajorBoundVersionF _) = True - alg (UnionVersionRangesF a b) = a || b - alg (IntersectVersionRangesF a b) = a || b - alg (VersionRangeParensF a) = a - alg _ = False - - eliminateMajorBoundSyntax = hyloVersionRange embed projectVersionRange - where - embed (MajorBoundVersionF v) = intersectVersionRanges - (orLaterVersion v) (earlierVersion (majorUpperBound v)) - embed vr = embedVersionRange vr - - mentionedExtensions = [ ext | bi <- allBuildInfo pkg - , ext <- allExtensions bi ] - mentionedExtensionsThatNeedCabal12 = - nub (filter (`elem` compatExtensionsExtra) mentionedExtensions) - - -- As of Cabal-1.4 we can add new extensions without worrying about - -- breaking old versions of cabal. - mentionedExtensionsThatNeedCabal14 = - nub (filter (`notElem` compatExtensions) mentionedExtensions) - - -- The known extensions in Cabal-1.2.3 - compatExtensions = - map EnableExtension - [ OverlappingInstances, UndecidableInstances, IncoherentInstances - , RecursiveDo, ParallelListComp, MultiParamTypeClasses - , FunctionalDependencies, Rank2Types - , RankNTypes, PolymorphicComponents, ExistentialQuantification - , ScopedTypeVariables, ImplicitParams, FlexibleContexts - , FlexibleInstances, EmptyDataDecls, CPP, BangPatterns - , TypeSynonymInstances, TemplateHaskell, ForeignFunctionInterface - , Arrows, Generics, NamedFieldPuns, PatternGuards - , GeneralizedNewtypeDeriving, ExtensibleRecords, RestrictedTypeSynonyms - , HereDocuments] ++ - map DisableExtension - [MonomorphismRestriction, ImplicitPrelude] ++ - compatExtensionsExtra - - -- The extra known extensions in Cabal-1.2.3 vs Cabal-1.1.6 - -- (Cabal-1.1.6 came with ghc-6.6. Cabal-1.2 came with ghc-6.8) - compatExtensionsExtra = - map EnableExtension - [ KindSignatures, MagicHash, TypeFamilies, StandaloneDeriving - , UnicodeSyntax, PatternSignatures, UnliftedFFITypes, LiberalTypeSynonyms - , TypeOperators, RecordWildCards, RecordPuns, DisambiguateRecordFields - , OverloadedStrings, GADTs, RelaxedPolyRec - , ExtendedDefaultRules, UnboxedTuples, DeriveDataTypeable - , ConstrainedClassMethods - ] ++ - map DisableExtension - [MonoPatBinds] - - allModuleNames = - (case library pkg of - Nothing -> [] - (Just lib) -> explicitLibModules lib - ) - ++ concatMap otherModules (allBuildInfo pkg) - - allModuleNamesAutogen = concatMap autogenModules (allBuildInfo pkg) - -displayRawDependency :: Dependency -> String -displayRawDependency (Dependency pkg vr) = - display pkg ++ " " ++ display vr - - --- ------------------------------------------------------------ --- * Checks on the GenericPackageDescription --- ------------------------------------------------------------ - --- | Check the build-depends fields for any weirdness or bad practise. --- -checkPackageVersions :: GenericPackageDescription -> [PackageCheck] -checkPackageVersions pkg = - catMaybes [ - - -- Check that the version of base is bounded above. - -- For example this bans "build-depends: base >= 3". - -- It should probably be "build-depends: base >= 3 && < 4" - -- which is the same as "build-depends: base == 3.*" - check (not (boundedAbove baseDependency)) $ - PackageDistInexcusable $ - "The dependency 'build-depends: base' does not specify an upper " - ++ "bound on the version number. Each major release of the 'base' " - ++ "package changes the API in various ways and most packages will " - ++ "need some changes to compile with it. The recommended practise " - ++ "is to specify an upper bound on the version of the 'base' " - ++ "package. This ensures your package will continue to build when a " - ++ "new major version of the 'base' package is released. If you are " - ++ "not sure what upper bound to use then use the next major " - ++ "version. For example if you have tested your package with 'base' " - ++ "version 4.5 and 4.6 then use 'build-depends: base >= 4.5 && < 4.7'." - - ] - where - -- TODO: What we really want to do is test if there exists any - -- configuration in which the base version is unbounded above. - -- However that's a bit tricky because there are many possible - -- configurations. As a cheap easy and safe approximation we will - -- pick a single "typical" configuration and check if that has an - -- open upper bound. To get a typical configuration we finalise - -- using no package index and the current platform. - finalised = finalizePD - mempty defaultComponentRequestedSpec (const True) - buildPlatform - (unknownCompilerInfo - (CompilerId buildCompilerFlavor nullVersion) - NoAbiTag) - [] pkg - baseDependency = case finalised of - Right (pkg', _) | not (null baseDeps) -> - foldr intersectVersionRanges anyVersion baseDeps - where - baseDeps = - [ vr | Dependency pname vr <- allBuildDepends pkg' - , pname == mkPackageName "base" ] - - -- Just in case finalizePD fails for any reason, - -- or if the package doesn't depend on the base package at all, - -- then we will just skip the check, since boundedAbove noVersion = True - _ -> noVersion - - boundedAbove :: VersionRange -> Bool - boundedAbove vr = case asVersionIntervals vr of - [] -> True -- this is the inconsistent version range. - intervals -> case last intervals of - (_, UpperBound _ _) -> True - (_, NoUpperBound ) -> False - - -checkConditionals :: GenericPackageDescription -> [PackageCheck] -checkConditionals pkg = - catMaybes [ - - check (not $ null unknownOSs) $ - PackageDistInexcusable $ - "Unknown operating system name " - ++ commaSep (map quote unknownOSs) - - , check (not $ null unknownArches) $ - PackageDistInexcusable $ - "Unknown architecture name " - ++ commaSep (map quote unknownArches) - - , check (not $ null unknownImpls) $ - PackageDistInexcusable $ - "Unknown compiler name " - ++ commaSep (map quote unknownImpls) - ] - where - unknownOSs = [ os | OS (OtherOS os) <- conditions ] - unknownArches = [ arch | Arch (OtherArch arch) <- conditions ] - unknownImpls = [ impl | Impl (OtherCompiler impl) _ <- conditions ] - conditions = concatMap fvs (maybeToList (condLibrary pkg)) - ++ concatMap (fvs . snd) (condSubLibraries pkg) - ++ concatMap (fvs . snd) (condForeignLibs pkg) - ++ concatMap (fvs . snd) (condExecutables pkg) - ++ concatMap (fvs . snd) (condTestSuites pkg) - ++ concatMap (fvs . snd) (condBenchmarks pkg) - fvs (CondNode _ _ ifs) = concatMap compfv ifs -- free variables - compfv (CondBranch c ct mct) = condfv c ++ fvs ct ++ maybe [] fvs mct - condfv c = case c of - Var v -> [v] - Lit _ -> [] - CNot c1 -> condfv c1 - COr c1 c2 -> condfv c1 ++ condfv c2 - CAnd c1 c2 -> condfv c1 ++ condfv c2 - -checkFlagNames :: GenericPackageDescription -> [PackageCheck] -checkFlagNames gpd - | null invalidFlagNames = [] - | otherwise = [ PackageDistInexcusable - $ "Suspicious flag names: " ++ unwords invalidFlagNames ++ ". " - ++ "To avoid ambiguity in command line interfaces, flag shouldn't " - ++ "start with a dash. Also for better compatibility, flag names " - ++ "shouldn't contain non-ascii characters." - ] - where - invalidFlagNames = - [ fn - | flag <- genPackageFlags gpd - , let fn = unFlagName (flagName flag) - , invalidFlagName fn - ] - -- starts with dash - invalidFlagName ('-':_) = True - -- mon ascii letter - invalidFlagName cs = any (not . isAscii) cs - -checkUnusedFlags :: GenericPackageDescription -> [PackageCheck] -checkUnusedFlags gpd - | declared == used = [] - | otherwise = [ PackageDistSuspicious - $ "Declared and used flag sets differ: " - ++ s declared ++ " /= " ++ s used ++ ". " - ] - where - s :: Set.Set FlagName -> String - s = commaSep . map unFlagName . Set.toList - - declared :: Set.Set FlagName - declared = toSetOf (L.genPackageFlags . traverse . L.flagName) gpd - - used :: Set.Set FlagName - used = mconcat - [ toSetOf (L.condLibrary . traverse . traverseCondTreeV . L._Flag) gpd - , toSetOf (L.condSubLibraries . traverse . _2 . traverseCondTreeV . L._Flag) gpd - , toSetOf (L.condForeignLibs . traverse . _2 . traverseCondTreeV . L._Flag) gpd - , toSetOf (L.condExecutables . traverse . _2 . traverseCondTreeV . L._Flag) gpd - , toSetOf (L.condTestSuites . traverse . _2 . traverseCondTreeV . L._Flag) gpd - , toSetOf (L.condBenchmarks . traverse . _2 . traverseCondTreeV . L._Flag) gpd - ] - -checkUnicodeXFields :: GenericPackageDescription -> [PackageCheck] -checkUnicodeXFields gpd - | null nonAsciiXFields = [] - | otherwise = [ PackageDistInexcusable - $ "Non ascii custom fields: " ++ unwords nonAsciiXFields ++ ". " - ++ "For better compatibility, custom field names " - ++ "shouldn't contain non-ascii characters." - ] - where - nonAsciiXFields :: [String] - nonAsciiXFields = [ n | (n, _) <- xfields, any (not . isAscii) n ] - - xfields :: [(String,String)] - xfields = DList.runDList $ mconcat - [ toDListOf (L.packageDescription . L.customFieldsPD . traverse) gpd - , toDListOf (L.traverseBuildInfos . L.customFieldsBI . traverse) gpd - ] - --- | cabal-version <2.2 + Paths_module + default-extensions: doesn't build. -checkPathsModuleExtensions :: PackageDescription -> [PackageCheck] -checkPathsModuleExtensions pd - | specVersion pd >= mkVersion [2,1] = [] - | any checkBI (allBuildInfo pd) || any checkLib (allLibraries pd) - = return $ PackageBuildImpossible $ unwords - [ "The package uses RebindableSyntax with OverloadedStrings or OverloadedLists" - , "in default-extensions, and also Paths_ autogen module." - , "That configuration is known to cause compile failures with Cabal < 2.2." - , "To use these default-extensions with Paths_ autogen module" - , "specify at least 'cabal-version: 2.2'." - ] - | otherwise = [] - where - mn = autogenPathsModuleName pd - - checkLib :: Library -> Bool - checkLib l = mn `elem` exposedModules l && checkExts (l ^. L.defaultExtensions) - - checkBI :: BuildInfo -> Bool - checkBI bi = - (mn `elem` otherModules bi || mn `elem` autogenModules bi) && - checkExts (bi ^. L.defaultExtensions) - - checkExts exts = rebind `elem` exts && (strings `elem` exts || lists `elem` exts) - where - rebind = EnableExtension RebindableSyntax - strings = EnableExtension OverloadedStrings - lists = EnableExtension OverloadedLists - -checkDevelopmentOnlyFlagsBuildInfo :: BuildInfo -> [PackageCheck] -checkDevelopmentOnlyFlagsBuildInfo bi = - catMaybes [ - - check has_WerrorWall $ - PackageDistInexcusable $ - "'ghc-options: -Wall -Werror' makes the package very easy to " - ++ "break with future GHC versions because new GHC versions often " - ++ "add new warnings. Use just 'ghc-options: -Wall' instead." - ++ extraExplanation - - , check (not has_WerrorWall && has_Werror) $ - PackageDistInexcusable $ - "'ghc-options: -Werror' makes the package easy to " - ++ "break with future GHC versions because new GHC versions often " - ++ "add new warnings. " - ++ extraExplanation - - , check (has_J) $ - PackageDistInexcusable $ - "'ghc-options: -j[N]' can make sense for specific user's setup," - ++ " but it is not appropriate for a distributed package." - ++ extraExplanation - - , checkFlags ["-fdefer-type-errors"] $ - PackageDistInexcusable $ - "'ghc-options: -fdefer-type-errors' is fine during development but " - ++ "is not appropriate for a distributed package. " - ++ extraExplanation - - -- -dynamic is not a debug flag - , check (any (\opt -> "-d" `isPrefixOf` opt && opt /= "-dynamic") - ghc_options) $ - PackageDistInexcusable $ - "'ghc-options: -d*' debug flags are not appropriate " - ++ "for a distributed package. " - ++ extraExplanation - - , checkFlags ["-fprof-auto", "-fprof-auto-top", "-fprof-auto-calls", - "-fprof-cafs", "-fno-prof-count-entries", - "-auto-all", "-auto", "-caf-all"] $ - PackageDistSuspicious $ - "'ghc-options/ghc-prof-options: -fprof*' profiling flags are typically not " - ++ "appropriate for a distributed library package. These flags are " - ++ "useful to profile this package, but when profiling other packages " - ++ "that use this one these flags clutter the profile output with " - ++ "excessive detail. If you think other packages really want to see " - ++ "cost centres from this package then use '-fprof-auto-exported' " - ++ "which puts cost centres only on exported functions. " - ++ extraExplanation - ] - where - extraExplanation = - " Alternatively, if you want to use this, make it conditional based " - ++ "on a Cabal configuration flag (with 'manual: True' and 'default: " - ++ "False') and enable that flag during development." - - has_WerrorWall = has_Werror && ( has_Wall || has_W ) - has_Werror = "-Werror" `elem` ghc_options - has_Wall = "-Wall" `elem` ghc_options - has_W = "-W" `elem` ghc_options - has_J = any - (\o -> case o of - "-j" -> True - ('-' : 'j' : d : _) -> isDigit d - _ -> False - ) - ghc_options - ghc_options = hcOptions GHC bi ++ hcProfOptions GHC bi - ++ hcSharedOptions GHC bi - - checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck - checkFlags flags = check (any (`elem` flags) ghc_options) - -checkDevelopmentOnlyFlags :: GenericPackageDescription -> [PackageCheck] -checkDevelopmentOnlyFlags pkg = - concatMap checkDevelopmentOnlyFlagsBuildInfo - [ bi - | (conditions, bi) <- allConditionalBuildInfo - , not (any guardedByManualFlag conditions) ] - where - guardedByManualFlag = definitelyFalse - - -- We've basically got three-values logic here: True, False or unknown - -- hence this pattern to propagate the unknown cases properly. - definitelyFalse (Var (Flag n)) = maybe False not (Map.lookup n manualFlags) - definitelyFalse (Var _) = False - definitelyFalse (Lit b) = not b - definitelyFalse (CNot c) = definitelyTrue c - definitelyFalse (COr c1 c2) = definitelyFalse c1 && definitelyFalse c2 - definitelyFalse (CAnd c1 c2) = definitelyFalse c1 || definitelyFalse c2 - - definitelyTrue (Var (Flag n)) = fromMaybe False (Map.lookup n manualFlags) - definitelyTrue (Var _) = False - definitelyTrue (Lit b) = b - definitelyTrue (CNot c) = definitelyFalse c - definitelyTrue (COr c1 c2) = definitelyTrue c1 || definitelyTrue c2 - definitelyTrue (CAnd c1 c2) = definitelyTrue c1 && definitelyTrue c2 - - manualFlags = Map.fromList - [ (flagName flag, flagDefault flag) - | flag <- genPackageFlags pkg - , flagManual flag ] - - allConditionalBuildInfo :: [([Condition ConfVar], BuildInfo)] - allConditionalBuildInfo = - concatMap (collectCondTreePaths libBuildInfo) - (maybeToList (condLibrary pkg)) - - ++ concatMap (collectCondTreePaths libBuildInfo . snd) - (condSubLibraries pkg) - - ++ concatMap (collectCondTreePaths buildInfo . snd) - (condExecutables pkg) - - ++ concatMap (collectCondTreePaths testBuildInfo . snd) - (condTestSuites pkg) - - ++ concatMap (collectCondTreePaths benchmarkBuildInfo . snd) - (condBenchmarks pkg) - - -- get all the leaf BuildInfo, paired up with the path (in the tree sense) - -- of if-conditions that guard it - collectCondTreePaths :: (a -> b) - -> CondTree v c a - -> [([Condition v], b)] - collectCondTreePaths mapData = go [] - where - go conditions condNode = - -- the data at this level in the tree: - (reverse conditions, mapData (condTreeData condNode)) - - : concat - [ go (condition:conditions) ifThen - | (CondBranch condition ifThen _) <- condTreeComponents condNode ] - - ++ concat - [ go (condition:conditions) elseThen - | (CondBranch condition _ (Just elseThen)) <- condTreeComponents condNode ] - - --- ------------------------------------------------------------ --- * Checks involving files in the package --- ------------------------------------------------------------ - --- | Sanity check things that requires IO. It looks at the files in the --- package and expects to find the package unpacked in at the given file path. --- -checkPackageFiles :: Verbosity -> PackageDescription -> FilePath -> NoCallStackIO [PackageCheck] -checkPackageFiles verbosity pkg root = do - contentChecks <- checkPackageContent checkFilesIO pkg - preDistributionChecks <- checkPackageFilesPreDistribution verbosity pkg root - -- Sort because different platforms will provide files from - -- `getDirectoryContents` in different orders, and we'd like to be - -- stable for test output. - return (sort contentChecks ++ sort preDistributionChecks) - where - checkFilesIO = CheckPackageContentOps { - doesFileExist = System.doesFileExist . relative, - doesDirectoryExist = System.doesDirectoryExist . relative, - getDirectoryContents = System.Directory.getDirectoryContents . relative, - getFileContents = BS.readFile . relative - } - relative path = root path - --- | A record of operations needed to check the contents of packages. --- Used by 'checkPackageContent'. --- -data CheckPackageContentOps m = CheckPackageContentOps { - doesFileExist :: FilePath -> m Bool, - doesDirectoryExist :: FilePath -> m Bool, - getDirectoryContents :: FilePath -> m [FilePath], - getFileContents :: FilePath -> m BS.ByteString - } - --- | Sanity check things that requires looking at files in the package. --- This is a generalised version of 'checkPackageFiles' that can work in any --- monad for which you can provide 'CheckPackageContentOps' operations. --- --- The point of this extra generality is to allow doing checks in some virtual --- file system, for example a tarball in memory. --- -checkPackageContent :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m [PackageCheck] -checkPackageContent ops pkg = do - cabalBomError <- checkCabalFileBOM ops - cabalNameError <- checkCabalFileName ops pkg - licenseErrors <- checkLicensesExist ops pkg - setupError <- checkSetupExists ops pkg - configureError <- checkConfigureExists ops pkg - localPathErrors <- checkLocalPathsExist ops pkg - vcsLocation <- checkMissingVcsInfo ops pkg - - return $ licenseErrors - ++ catMaybes [cabalBomError, cabalNameError, setupError, configureError] - ++ localPathErrors - ++ vcsLocation - -checkCabalFileBOM :: Monad m => CheckPackageContentOps m - -> m (Maybe PackageCheck) -checkCabalFileBOM ops = do - epdfile <- findPackageDesc ops - case epdfile of - -- MASSIVE HACK. If the Cabal file doesn't exist, that is - -- a very strange situation to be in, because the driver code - -- in 'Distribution.Setup' ought to have noticed already! - -- But this can be an issue, see #3552 and also when - -- --cabal-file is specified. So if you can't find the file, - -- just don't bother with this check. - Left _ -> return $ Nothing - Right pdfile -> (flip check pc . BS.isPrefixOf bomUtf8) - `liftM` (getFileContents ops pdfile) - where pc = PackageDistInexcusable $ - pdfile ++ " starts with an Unicode byte order mark (BOM)." - ++ " This may cause problems with older cabal versions." - - where - bomUtf8 :: BS.ByteString - bomUtf8 = BS.pack [0xef,0xbb,0xbf] -- U+FEFF encoded as UTF8 - -checkCabalFileName :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m (Maybe PackageCheck) -checkCabalFileName ops pkg = do - -- findPackageDesc already takes care to detect missing/multiple - -- .cabal files; we don't include this check in 'findPackageDesc' in - -- order not to short-cut other checks which call 'findPackageDesc' - epdfile <- findPackageDesc ops - case epdfile of - -- see "MASSIVE HACK" note in 'checkCabalFileBOM' - Left _ -> return Nothing - Right pdfile - | takeFileName pdfile == expectedCabalname -> return Nothing - | otherwise -> return $ Just $ PackageDistInexcusable $ - "The filename " ++ pdfile ++ " does not match package name " ++ - "(expected: " ++ expectedCabalname ++ ")" - where - pkgname = unPackageName . packageName $ pkg - expectedCabalname = pkgname <.> "cabal" - - --- |Find a package description file in the given directory. Looks for --- @.cabal@ files. Like 'Distribution.Simple.Utils.findPackageDesc', --- but generalized over monads. -findPackageDesc :: Monad m => CheckPackageContentOps m - -> m (Either PackageCheck FilePath) -- ^.cabal -findPackageDesc ops - = do let dir = "." - files <- getDirectoryContents ops dir - -- to make sure we do not mistake a ~/.cabal/ dir for a .cabal - -- file we filter to exclude dirs and null base file names: - cabalFiles <- filterM (doesFileExist ops) - [ dir file - | file <- files - , let (name, ext) = splitExtension file - , not (null name) && ext == ".cabal" ] - case cabalFiles of - [] -> return (Left $ PackageBuildImpossible noDesc) - [cabalFile] -> return (Right cabalFile) - multiple -> return (Left $ PackageBuildImpossible - $ multiDesc multiple) - - where - noDesc :: String - noDesc = "No cabal file found.\n" - ++ "Please create a package description file .cabal" - - multiDesc :: [String] -> String - multiDesc l = "Multiple cabal files found while checking.\n" - ++ "Please use only one of: " - ++ intercalate ", " l - -checkLicensesExist :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m [PackageCheck] -checkLicensesExist ops pkg = do - exists <- mapM (doesFileExist ops) (licenseFiles pkg) - return - [ PackageBuildWarning $ - "The '" ++ fieldname ++ "' field refers to the file " - ++ quote file ++ " which does not exist." - | (file, False) <- zip (licenseFiles pkg) exists ] - where - fieldname | length (licenseFiles pkg) == 1 = "license-file" - | otherwise = "license-files" - -checkSetupExists :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m (Maybe PackageCheck) -checkSetupExists ops pkg = do - let simpleBuild = buildType pkg == Simple - hsexists <- doesFileExist ops "Setup.hs" - lhsexists <- doesFileExist ops "Setup.lhs" - return $ check (not simpleBuild && not hsexists && not lhsexists) $ - PackageDistInexcusable $ - "The package is missing a Setup.hs or Setup.lhs script." - -checkConfigureExists :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m (Maybe PackageCheck) -checkConfigureExists ops pd - | buildType pd == Configure = do - exists <- doesFileExist ops "configure" - return $ check (not exists) $ - PackageBuildWarning $ - "The 'build-type' is 'Configure' but there is no 'configure' script. " - ++ "You probably need to run 'autoreconf -i' to generate it." - | otherwise = return Nothing - -checkLocalPathsExist :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m [PackageCheck] -checkLocalPathsExist ops pkg = do - let dirs = [ (dir, kind) - | bi <- allBuildInfo pkg - , (dir, kind) <- - [ (dir, "extra-lib-dirs") | dir <- extraLibDirs bi ] - ++ [ (dir, "extra-framework-dirs") - | dir <- extraFrameworkDirs bi ] - ++ [ (dir, "include-dirs") | dir <- includeDirs bi ] - ++ [ (dir, "hs-source-dirs") | dir <- hsSourceDirs bi ] - , isRelativeOnAnyPlatform dir ] - missing <- filterM (liftM not . doesDirectoryExist ops . fst) dirs - return [ PackageBuildWarning { - explanation = quote (kind ++ ": " ++ dir) - ++ " directory does not exist." - } - | (dir, kind) <- missing ] - -checkMissingVcsInfo :: Monad m => CheckPackageContentOps m - -> PackageDescription - -> m [PackageCheck] -checkMissingVcsInfo ops pkg | null (sourceRepos pkg) = do - vcsInUse <- liftM or $ mapM (doesDirectoryExist ops) repoDirnames - if vcsInUse - then return [ PackageDistSuspicious message ] - else return [] - where - repoDirnames = [ dirname | repo <- knownRepoTypes - , dirname <- repoTypeDirname repo ] - message = "When distributing packages it is encouraged to specify source " - ++ "control information in the .cabal file using one or more " - ++ "'source-repository' sections. See the Cabal user guide for " - ++ "details." - -checkMissingVcsInfo _ _ = return [] - -repoTypeDirname :: RepoType -> [FilePath] -repoTypeDirname Darcs = ["_darcs"] -repoTypeDirname Git = [".git"] -repoTypeDirname SVN = [".svn"] -repoTypeDirname CVS = ["CVS"] -repoTypeDirname Mercurial = [".hg"] -repoTypeDirname GnuArch = [".arch-params"] -repoTypeDirname Bazaar = [".bzr"] -repoTypeDirname Monotone = ["_MTN"] -repoTypeDirname _ = [] - - --- ------------------------------------------------------------ --- * Checks involving files in the package --- ------------------------------------------------------------ - --- | Check the names of all files in a package for portability problems. This --- should be done for example when creating or validating a package tarball. --- -checkPackageFileNames :: [FilePath] -> [PackageCheck] -checkPackageFileNames files = - (take 1 . mapMaybe checkWindowsPath $ files) - ++ (take 1 . mapMaybe checkTarPath $ files) - -- If we get any of these checks triggering then we're likely to get - -- many, and that's probably not helpful, so return at most one. - -checkWindowsPath :: FilePath -> Maybe PackageCheck -checkWindowsPath path = - check (not $ FilePath.Windows.isValid path') $ - PackageDistInexcusable $ - "Unfortunately, the file " ++ quote path ++ " is not a valid file " - ++ "name on Windows which would cause portability problems for this " - ++ "package. Windows file names cannot contain any of the characters " - ++ "\":*?<>|\" and there are a few reserved names including \"aux\", " - ++ "\"nul\", \"con\", \"prn\", \"com1-9\", \"lpt1-9\" and \"clock$\"." - where - path' = ".\\" ++ path - -- force a relative name to catch invalid file names like "f:oo" which - -- otherwise parse as file "oo" in the current directory on the 'f' drive. - --- | Check a file name is valid for the portable POSIX tar format. --- --- The POSIX tar format has a restriction on the length of file names. It is --- unfortunately not a simple restriction like a maximum length. The exact --- restriction is that either the whole path be 100 characters or less, or it --- be possible to split the path on a directory separator such that the first --- part is 155 characters or less and the second part 100 characters or less. --- -checkTarPath :: FilePath -> Maybe PackageCheck -checkTarPath path - | length path > 255 = Just longPath - | otherwise = case pack nameMax (reverse (splitPath path)) of - Left err -> Just err - Right [] -> Nothing - Right (h:rest) -> case pack prefixMax remainder of - Left err -> Just err - Right [] -> Nothing - Right (_:_) -> Just noSplit - where - -- drop the '/' between the name and prefix: - remainder = init h : rest - - where - nameMax, prefixMax :: Int - nameMax = 100 - prefixMax = 155 - - pack _ [] = Left emptyName - pack maxLen (c:cs) - | n > maxLen = Left longName - | otherwise = Right (pack' maxLen n cs) - where n = length c - - pack' maxLen n (c:cs) - | n' <= maxLen = pack' maxLen n' cs - where n' = n + length c - pack' _ _ cs = cs - - longPath = PackageDistInexcusable $ - "The following file name is too long to store in a portable POSIX " - ++ "format tar archive. The maximum length is 255 ASCII characters.\n" - ++ "The file in question is:\n " ++ path - longName = PackageDistInexcusable $ - "The following file name is too long to store in a portable POSIX " - ++ "format tar archive. The maximum length for the name part (including " - ++ "extension) is 100 ASCII characters. The maximum length for any " - ++ "individual directory component is 155.\n" - ++ "The file in question is:\n " ++ path - noSplit = PackageDistInexcusable $ - "The following file name is too long to store in a portable POSIX " - ++ "format tar archive. While the total length is less than 255 ASCII " - ++ "characters, there are unfortunately further restrictions. It has to " - ++ "be possible to split the file path on a directory separator into " - ++ "two parts such that the first part fits in 155 characters or less " - ++ "and the second part fits in 100 characters or less. Basically you " - ++ "have to make the file name or directory names shorter, or you could " - ++ "split a long directory name into nested subdirectories with shorter " - ++ "names.\nThe file in question is:\n " ++ path - emptyName = PackageDistInexcusable $ - "Encountered a file with an empty name, something is very wrong! " - ++ "Files with an empty name cannot be stored in a tar archive or in " - ++ "standard file systems." - --- -------------------------------------------------------------- --- * Checks for missing content and other pre-distribution checks --- -------------------------------------------------------------- - --- | Similar to 'checkPackageContent', 'checkPackageFilesPreDistribution' --- inspects the files included in the package, but is primarily looking for --- files in the working tree that may have been missed or other similar --- problems that can only be detected pre-distribution. --- --- Because Hackage necessarily checks the uploaded tarball, it is too late to --- check these on the server; these checks only make sense in the development --- and package-creation environment. Hence we can use IO, rather than needing --- to pass a 'CheckPackageContentOps' dictionary around. -checkPackageFilesPreDistribution :: Verbosity -> PackageDescription -> FilePath -> NoCallStackIO [PackageCheck] --- Note: this really shouldn't return any 'Inexcusable' warnings, --- because that will make us say that Hackage would reject the package. --- But, because Hackage doesn't run these tests, that will be a lie! -checkPackageFilesPreDistribution = checkGlobFiles - --- | Discover problems with the package's wildcards. -checkGlobFiles :: Verbosity - -> PackageDescription - -> FilePath - -> NoCallStackIO [PackageCheck] -checkGlobFiles verbosity pkg root = - fmap concat $ for allGlobs $ \(field, dir, glob) -> - -- Note: we just skip over parse errors here; they're reported elsewhere. - case parseFileGlob (specVersion pkg) glob of - Left _ -> return [] - Right parsedGlob -> do - results <- runDirFileGlob verbosity (root dir) parsedGlob - let individualWarnings = results >>= getWarning field glob - noMatchesWarning = - [ PackageDistSuspiciousWarn $ - "In '" ++ field ++ "': the pattern '" ++ glob ++ "' does not" - ++ " match any files." - | all (not . suppressesNoMatchesWarning) results - ] - return (noMatchesWarning ++ individualWarnings) - where - adjustedDataDir = if null (dataDir pkg) then "." else dataDir pkg - allGlobs = concat - [ (,,) "extra-source-files" "." <$> extraSrcFiles pkg - , (,,) "extra-doc-files" "." <$> extraDocFiles pkg - , (,,) "data-files" adjustedDataDir <$> dataFiles pkg - ] - - -- If there's a missing directory in play, since our globs don't - -- (currently) support disjunction, that will always mean there are no - -- matches. The no matches error in this case is strictly less informative - -- than the missing directory error, so sit on it. - suppressesNoMatchesWarning (GlobMatch _) = True - suppressesNoMatchesWarning (GlobWarnMultiDot _) = False - suppressesNoMatchesWarning (GlobMissingDirectory _) = True - - getWarning :: String -> FilePath -> GlobResult FilePath -> [PackageCheck] - getWarning _ _ (GlobMatch _) = - [] - -- Before Cabal 2.4, the extensions of globs had to match the file - -- exactly. This has been relaxed in 2.4 to allow matching only the - -- suffix. This warning detects when pre-2.4 package descriptions are - -- omitting files purely because of the stricter check. - getWarning field glob (GlobWarnMultiDot file) = - [ PackageDistSuspiciousWarn $ - "In '" ++ field ++ "': the pattern '" ++ glob ++ "' does not" - ++ " match the file '" ++ file ++ "' because the extensions do not" - ++ " exactly match (e.g., foo.en.html does not exactly match *.html)." - ++ " To enable looser suffix-only matching, set 'cabal-version: 2.4' or higher." - ] - getWarning field glob (GlobMissingDirectory dir) = - [ PackageDistSuspiciousWarn $ - "In '" ++ field ++ "': the pattern '" ++ glob ++ "' attempts to" - ++ " match files in the directory '" ++ dir ++ "', but there is no" - ++ " directory by that name." - ] - --- ------------------------------------------------------------ --- * Utils --- ------------------------------------------------------------ - -quote :: String -> String -quote s = "'" ++ s ++ "'" - -commaSep :: [String] -> String -commaSep = intercalate ", " - -dups :: Ord a => [a] -> [a] -dups xs = [ x | (x:_:_) <- group (sort xs) ] - -fileExtensionSupportedLanguage :: FilePath -> Bool -fileExtensionSupportedLanguage path = - isHaskell || isC - where - extension = takeExtension path - isHaskell = extension `elem` [".hs", ".lhs"] - isC = isJust (filenameCDialect extension) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Configuration.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Configuration.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Configuration.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Configuration.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,599 +0,0 @@ --- -fno-warn-deprecations for use of Map.foldWithKey -{-# OPTIONS_GHC -fno-warn-deprecations #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.Configuration --- Copyright : Thomas Schilling, 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is about the cabal configurations feature. It exports --- 'finalizePD' and 'flattenPackageDescription' which are --- functions for converting 'GenericPackageDescription's down to --- 'PackageDescription's. It has code for working with the tree of conditions --- and resolving or flattening conditions. - -module Distribution.PackageDescription.Configuration ( - finalizePD, - finalizePackageDescription, - flattenPackageDescription, - - -- Utils - parseCondition, - freeVars, - extractCondition, - extractConditions, - addBuildableCondition, - mapCondTree, - mapTreeData, - mapTreeConds, - mapTreeConstrs, - transformAllBuildInfos, - transformAllBuildDepends, - ) where - -import Prelude () -import Distribution.Compat.Prelude - --- lens -import qualified Distribution.Types.BuildInfo.Lens as L -import qualified Distribution.Types.GenericPackageDescription.Lens as L -import qualified Distribution.Types.PackageDescription.Lens as L -import qualified Distribution.Types.SetupBuildInfo.Lens as L - -import Distribution.PackageDescription -import Distribution.PackageDescription.Utils -import Distribution.Version -import Distribution.Compiler -import Distribution.System -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.Compat.Lens -import Distribution.Compat.ReadP as ReadP hiding ( char ) -import qualified Distribution.Compat.ReadP as ReadP ( char ) -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.ForeignLib -import Distribution.Types.Component -import Distribution.Types.Dependency -import Distribution.Types.PackageName -import Distribution.Types.UnqualComponentName -import Distribution.Types.CondTree -import Distribution.Types.Condition -import Distribution.Types.DependencyMap - -import qualified Data.Map.Strict as Map.Strict -import qualified Data.Map.Lazy as Map -import Data.Tree ( Tree(Node) ) - ------------------------------------------------------------------------------- - --- | Simplify a configuration condition using the OS and arch names. Returns --- the names of all the flags occurring in the condition. -simplifyWithSysParams :: OS -> Arch -> CompilerInfo -> Condition ConfVar - -> (Condition FlagName, [FlagName]) -simplifyWithSysParams os arch cinfo cond = (cond', flags) - where - (cond', flags) = simplifyCondition cond interp - interp (OS os') = Right $ os' == os - interp (Arch arch') = Right $ arch' == arch - interp (Impl comp vr) - | matchImpl (compilerInfoId cinfo) = Right True - | otherwise = case compilerInfoCompat cinfo of - -- fixme: treat Nothing as unknown, rather than empty list once we - -- support partial resolution of system parameters - Nothing -> Right False - Just compat -> Right (any matchImpl compat) - where - matchImpl (CompilerId c v) = comp == c && v `withinRange` vr - interp (Flag f) = Left f - --- TODO: Add instances and check --- --- prop_sC_idempotent cond a o = cond' == cond'' --- where --- cond' = simplifyCondition cond a o --- cond'' = simplifyCondition cond' a o --- --- prop_sC_noLits cond a o = isLit res || not (hasLits res) --- where --- res = simplifyCondition cond a o --- hasLits (Lit _) = True --- hasLits (CNot c) = hasLits c --- hasLits (COr l r) = hasLits l || hasLits r --- hasLits (CAnd l r) = hasLits l || hasLits r --- hasLits _ = False --- - --- | Parse a configuration condition from a string. -parseCondition :: ReadP r (Condition ConfVar) -parseCondition = condOr - where - condOr = sepBy1 condAnd (oper "||") >>= return . foldl1 COr - condAnd = sepBy1 cond (oper "&&")>>= return . foldl1 CAnd - cond = sp >> (boolLiteral +++ inparens condOr +++ notCond +++ osCond - +++ archCond +++ flagCond +++ implCond ) - inparens = between (ReadP.char '(' >> sp) (sp >> ReadP.char ')' >> sp) - notCond = ReadP.char '!' >> sp >> cond >>= return . CNot - osCond = string "os" >> sp >> inparens osIdent >>= return . Var - archCond = string "arch" >> sp >> inparens archIdent >>= return . Var - flagCond = string "flag" >> sp >> inparens flagIdent >>= return . Var - implCond = string "impl" >> sp >> inparens implIdent >>= return . Var - boolLiteral = fmap Lit parse - archIdent = fmap Arch parse - osIdent = fmap OS parse - flagIdent = fmap (Flag . mkFlagName . lowercase) (munch1 isIdentChar) - isIdentChar c = isAlphaNum c || c == '_' || c == '-' - oper s = sp >> string s >> sp - sp = skipSpaces - implIdent = do i <- parse - vr <- sp >> option anyVersion parse - return $ Impl i vr - ------------------------------------------------------------------------------- - --- | Result of dependency test. Isomorphic to @Maybe d@ but renamed for --- clarity. -data DepTestRslt d = DepOk | MissingDeps d - -instance Semigroup d => Monoid (DepTestRslt d) where - mempty = DepOk - mappend = (<>) - -instance Semigroup d => Semigroup (DepTestRslt d) where - DepOk <> x = x - x <> DepOk = x - (MissingDeps d) <> (MissingDeps d') = MissingDeps (d <> d') - - --- | Try to find a flag assignment that satisfies the constraints of all trees. --- --- Returns either the missing dependencies, or a tuple containing the --- resulting data, the associated dependencies, and the chosen flag --- assignments. --- --- In case of failure, the union of the dependencies that led to backtracking --- on all branches is returned. --- [TODO: Could also be specified with a function argument.] --- --- TODO: The current algorithm is rather naive. A better approach would be to: --- --- * Rule out possible paths, by taking a look at the associated dependencies. --- --- * Infer the required values for the conditions of these paths, and --- calculate the required domains for the variables used in these --- conditions. Then picking a flag assignment would be linear (I guess). --- --- This would require some sort of SAT solving, though, thus it's not --- implemented unless we really need it. --- -resolveWithFlags :: - [(FlagName,[Bool])] - -- ^ Domain for each flag name, will be tested in order. - -> ComponentRequestedSpec - -> OS -- ^ OS as returned by Distribution.System.buildOS - -> Arch -- ^ Arch as returned by Distribution.System.buildArch - -> CompilerInfo -- ^ Compiler information - -> [Dependency] -- ^ Additional constraints - -> [CondTree ConfVar [Dependency] PDTagged] - -> ([Dependency] -> DepTestRslt [Dependency]) -- ^ Dependency test function. - -> Either [Dependency] (TargetSet PDTagged, FlagAssignment) - -- ^ Either the missing dependencies (error case), or a pair of - -- (set of build targets with dependencies, chosen flag assignments) -resolveWithFlags dom enabled os arch impl constrs trees checkDeps = - either (Left . fromDepMapUnion) Right $ explore (build mempty dom) - where - extraConstrs = toDepMap constrs - - -- simplify trees by (partially) evaluating all conditions and converting - -- dependencies to dependency maps. - simplifiedTrees :: [CondTree FlagName DependencyMap PDTagged] - simplifiedTrees = map ( mapTreeConstrs toDepMap -- convert to maps - . addBuildableConditionPDTagged - . mapTreeConds (fst . simplifyWithSysParams os arch impl)) - trees - - -- @explore@ searches a tree of assignments, backtracking whenever a flag - -- introduces a dependency that cannot be satisfied. If there is no - -- solution, @explore@ returns the union of all dependencies that caused - -- it to backtrack. Since the tree is constructed lazily, we avoid some - -- computation overhead in the successful case. - explore :: Tree FlagAssignment - -> Either DepMapUnion (TargetSet PDTagged, FlagAssignment) - explore (Node flags ts) = - let targetSet = TargetSet $ flip map simplifiedTrees $ - -- apply additional constraints to all dependencies - first (`constrainBy` extraConstrs) . - simplifyCondTree (env flags) - deps = overallDependencies enabled targetSet - in case checkDeps (fromDepMap deps) of - DepOk | null ts -> Right (targetSet, flags) - | otherwise -> tryAll $ map explore ts - MissingDeps mds -> Left (toDepMapUnion mds) - - -- Builds a tree of all possible flag assignments. Internal nodes - -- have only partial assignments. - build :: FlagAssignment -> [(FlagName, [Bool])] -> Tree FlagAssignment - build assigned [] = Node assigned [] - build assigned ((fn, vals) : unassigned) = - Node assigned $ map (\v -> build (insertFlagAssignment fn v assigned) unassigned) vals - - tryAll :: [Either DepMapUnion a] -> Either DepMapUnion a - tryAll = foldr mp mz - - -- special version of `mplus' for our local purposes - mp :: Either DepMapUnion a -> Either DepMapUnion a -> Either DepMapUnion a - mp m@(Right _) _ = m - mp _ m@(Right _) = m - mp (Left xs) (Left ys) = - let union = Map.foldrWithKey (Map.Strict.insertWith combine) - (unDepMapUnion xs) (unDepMapUnion ys) - combine x y = simplifyVersionRange $ unionVersionRanges x y - in union `seq` Left (DepMapUnion union) - - -- `mzero' - mz :: Either DepMapUnion a - mz = Left (DepMapUnion Map.empty) - - env :: FlagAssignment -> FlagName -> Either FlagName Bool - env flags flag = (maybe (Left flag) Right . lookupFlagAssignment flag) flags - --- | Transforms a 'CondTree' by putting the input under the "then" branch of a --- conditional that is True when Buildable is True. If 'addBuildableCondition' --- can determine that Buildable is always True, it returns the input unchanged. --- If Buildable is always False, it returns the empty 'CondTree'. -addBuildableCondition :: (Eq v, Monoid a, Monoid c) => (a -> BuildInfo) - -> CondTree v c a - -> CondTree v c a -addBuildableCondition getInfo t = - case extractCondition (buildable . getInfo) t of - Lit True -> t - Lit False -> CondNode mempty mempty [] - c -> CondNode mempty mempty [condIfThen c t] - --- | This is a special version of 'addBuildableCondition' for the 'PDTagged' --- type. --- --- It is not simply a specialisation. It is more complicated than it --- ought to be because of the way the 'PDTagged' monoid instance works. The --- @mempty = 'PDNull'@ forgets the component type, which has the effect of --- completely deleting components that are not buildable. --- --- See for more details. --- -addBuildableConditionPDTagged :: (Eq v, Monoid c) => - CondTree v c PDTagged - -> CondTree v c PDTagged -addBuildableConditionPDTagged t = - case extractCondition (buildable . getInfo) t of - Lit True -> t - Lit False -> deleteConstraints t - c -> CondNode mempty mempty [condIfThenElse c t (deleteConstraints t)] - where - deleteConstraints = mapTreeConstrs (const mempty) - - getInfo :: PDTagged -> BuildInfo - getInfo (Lib l) = libBuildInfo l - getInfo (SubComp _ c) = componentBuildInfo c - getInfo PDNull = mempty - - --- Note: extracting buildable conditions. --- -------------------------------------- --- --- If the conditions in a cond tree lead to Buildable being set to False, then --- none of the dependencies for this cond tree should actually be taken into --- account. On the other hand, some of the flags may only be decided in the --- solver, so we cannot necessarily make the decision whether a component is --- Buildable or not prior to solving. --- --- What we are doing here is to partially evaluate a condition tree in order to --- extract the condition under which Buildable is True. The predicate determines --- whether data under a 'CondTree' is buildable. - --- | Extract conditions matched by the given predicate from all cond trees in a --- 'GenericPackageDescription'. -extractConditions :: (BuildInfo -> Bool) -> GenericPackageDescription - -> [Condition ConfVar] -extractConditions f gpkg = - concat [ - extractCondition (f . libBuildInfo) <$> maybeToList (condLibrary gpkg) - , extractCondition (f . libBuildInfo) . snd <$> condSubLibraries gpkg - , extractCondition (f . buildInfo) . snd <$> condExecutables gpkg - , extractCondition (f . testBuildInfo) . snd <$> condTestSuites gpkg - , extractCondition (f . benchmarkBuildInfo) . snd <$> condBenchmarks gpkg - ] - - --- | A map of dependencies that combines version ranges using 'unionVersionRanges'. -newtype DepMapUnion = DepMapUnion { unDepMapUnion :: Map PackageName VersionRange } - -toDepMapUnion :: [Dependency] -> DepMapUnion -toDepMapUnion ds = - DepMapUnion $ Map.fromListWith unionVersionRanges [ (p,vr) | Dependency p vr <- ds ] - -fromDepMapUnion :: DepMapUnion -> [Dependency] -fromDepMapUnion m = [ Dependency p vr | (p,vr) <- Map.toList (unDepMapUnion m) ] - -freeVars :: CondTree ConfVar c a -> [FlagName] -freeVars t = [ f | Flag f <- freeVars' t ] - where - freeVars' (CondNode _ _ ifs) = concatMap compfv ifs - compfv (CondBranch c ct mct) = condfv c ++ freeVars' ct ++ maybe [] freeVars' mct - condfv c = case c of - Var v -> [v] - Lit _ -> [] - CNot c' -> condfv c' - COr c1 c2 -> condfv c1 ++ condfv c2 - CAnd c1 c2 -> condfv c1 ++ condfv c2 - - ------------------------------------------------------------------------------- - --- | A set of targets with their package dependencies -newtype TargetSet a = TargetSet [(DependencyMap, a)] - --- | Combine the target-specific dependencies in a TargetSet to give the --- dependencies for the package as a whole. -overallDependencies :: ComponentRequestedSpec -> TargetSet PDTagged -> DependencyMap -overallDependencies enabled (TargetSet targets) = mconcat depss - where - (depss, _) = unzip $ filter (removeDisabledSections . snd) targets - removeDisabledSections :: PDTagged -> Bool - -- UGH. The embedded componentName in the 'Component's here is - -- BLANK. I don't know whose fault this is but I'll use the tag - -- instead. -- ezyang - removeDisabledSections (Lib _) = componentNameRequested enabled CLibName - removeDisabledSections (SubComp t c) - -- Do NOT use componentName - = componentNameRequested enabled - $ case c of - CLib _ -> CSubLibName t - CFLib _ -> CFLibName t - CExe _ -> CExeName t - CTest _ -> CTestName t - CBench _ -> CBenchName t - removeDisabledSections PDNull = True - --- | Collect up the targets in a TargetSet of tagged targets, storing the --- dependencies as we go. -flattenTaggedTargets :: TargetSet PDTagged -> (Maybe Library, [(UnqualComponentName, Component)]) -flattenTaggedTargets (TargetSet targets) = foldr untag (Nothing, []) targets where - untag (depMap, pdTagged) accum = case (pdTagged, accum) of - (Lib _, (Just _, _)) -> userBug "Only one library expected" - (Lib l, (Nothing, comps)) -> (Just $ redoBD l, comps) - (SubComp n c, (mb_lib, comps)) - | any ((== n) . fst) comps -> - userBug $ "There exist several components with the same name: '" ++ display n ++ "'" - | otherwise -> (mb_lib, (n, redoBD c) : comps) - (PDNull, x) -> x -- actually this should not happen, but let's be liberal - where - redoBD :: L.HasBuildInfo a => a -> a - redoBD = set L.targetBuildDepends $ fromDepMap depMap - ------------------------------------------------------------------------------- --- Convert GenericPackageDescription to PackageDescription --- - -data PDTagged = Lib Library - | SubComp UnqualComponentName Component - | PDNull - deriving Show - -instance Monoid PDTagged where - mempty = PDNull - mappend = (<>) - -instance Semigroup PDTagged where - PDNull <> x = x - x <> PDNull = x - Lib l <> Lib l' = Lib (l <> l') - SubComp n x <> SubComp n' x' | n == n' = SubComp n (x <> x') - _ <> _ = cabalBug "Cannot combine incompatible tags" - --- | Create a package description with all configurations resolved. --- --- This function takes a `GenericPackageDescription` and several environment --- parameters and tries to generate `PackageDescription` by finding a flag --- assignment that result in satisfiable dependencies. --- --- It takes as inputs a not necessarily complete specifications of flags --- assignments, an optional package index as well as platform parameters. If --- some flags are not assigned explicitly, this function will try to pick an --- assignment that causes this function to succeed. The package index is --- optional since on some platforms we cannot determine which packages have --- been installed before. When no package index is supplied, every dependency --- is assumed to be satisfiable, therefore all not explicitly assigned flags --- will get their default values. --- --- This function will fail if it cannot find a flag assignment that leads to --- satisfiable dependencies. (It will not try alternative assignments for --- explicitly specified flags.) In case of failure it will return the missing --- dependencies that it encountered when trying different flag assignments. --- On success, it will return the package description and the full flag --- assignment chosen. --- --- Note that this drops any stanzas which have @buildable: False@. While --- this is arguably the right thing to do, it means we give bad error --- messages in some situations, see #3858. --- -finalizePD :: - FlagAssignment -- ^ Explicitly specified flag assignments - -> ComponentRequestedSpec - -> (Dependency -> Bool) -- ^ Is a given dependency satisfiable from the set of - -- available packages? If this is unknown then use - -- True. - -> Platform -- ^ The 'Arch' and 'OS' - -> CompilerInfo -- ^ Compiler information - -> [Dependency] -- ^ Additional constraints - -> GenericPackageDescription - -> Either [Dependency] - (PackageDescription, FlagAssignment) - -- ^ Either missing dependencies or the resolved package - -- description along with the flag assignments chosen. -finalizePD userflags enabled satisfyDep - (Platform arch os) impl constraints - (GenericPackageDescription pkg flags mb_lib0 sub_libs0 flibs0 exes0 tests0 bms0) = do - (targetSet, flagVals) <- - resolveWithFlags flagChoices enabled os arch impl constraints condTrees check - let - (mb_lib, comps) = flattenTaggedTargets targetSet - mb_lib' = fmap libFillInDefaults mb_lib - comps' = flip map comps $ \(n,c) -> foldComponent - (\l -> CLib (libFillInDefaults l) { libName = Just n - , libExposed = False }) - (\l -> CFLib (flibFillInDefaults l) { foreignLibName = n }) - (\e -> CExe (exeFillInDefaults e) { exeName = n }) - (\t -> CTest (testFillInDefaults t) { testName = n }) - (\b -> CBench (benchFillInDefaults b) { benchmarkName = n }) - c - (sub_libs', flibs', exes', tests', bms') = partitionComponents comps' - return ( pkg { library = mb_lib' - , subLibraries = sub_libs' - , foreignLibs = flibs' - , executables = exes' - , testSuites = tests' - , benchmarks = bms' - } - , flagVals ) - where - -- Combine lib, exes, and tests into one list of @CondTree@s with tagged data - condTrees = maybeToList (fmap (mapTreeData Lib) mb_lib0) - ++ map (\(name,tree) -> mapTreeData (SubComp name . CLib) tree) sub_libs0 - ++ map (\(name,tree) -> mapTreeData (SubComp name . CFLib) tree) flibs0 - ++ map (\(name,tree) -> mapTreeData (SubComp name . CExe) tree) exes0 - ++ map (\(name,tree) -> mapTreeData (SubComp name . CTest) tree) tests0 - ++ map (\(name,tree) -> mapTreeData (SubComp name . CBench) tree) bms0 - - flagChoices = map (\(MkFlag n _ d manual) -> (n, d2c manual n d)) flags - d2c manual n b = case lookupFlagAssignment n userflags of - Just val -> [val] - Nothing - | manual -> [b] - | otherwise -> [b, not b] - --flagDefaults = map (\(n,x:_) -> (n,x)) flagChoices - check ds = let missingDeps = filter (not . satisfyDep) ds - in if null missingDeps - then DepOk - else MissingDeps missingDeps - -{-# DEPRECATED finalizePackageDescription "This function now always assumes tests and benchmarks are disabled; use finalizePD with ComponentRequestedSpec to specify something more specific. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -finalizePackageDescription :: - FlagAssignment -- ^ Explicitly specified flag assignments - -> (Dependency -> Bool) -- ^ Is a given dependency satisfiable from the set of - -- available packages? If this is unknown then use - -- True. - -> Platform -- ^ The 'Arch' and 'OS' - -> CompilerInfo -- ^ Compiler information - -> [Dependency] -- ^ Additional constraints - -> GenericPackageDescription - -> Either [Dependency] - (PackageDescription, FlagAssignment) -finalizePackageDescription flags = finalizePD flags defaultComponentRequestedSpec - -{- -let tst_p = (CondNode [1::Int] [Distribution.Package.Dependency "a" AnyVersion] []) -let tst_p2 = (CondNode [1::Int] [Distribution.Package.Dependency "a" (EarlierVersion (Version [1,0] [])), Distribution.Package.Dependency "a" (LaterVersion (Version [2,0] []))] []) - -let p_index = Distribution.Simple.PackageIndex.fromList [Distribution.Package.PackageIdentifier "a" (Version [0,5] []), Distribution.Package.PackageIdentifier "a" (Version [2,5] [])] -let look = not . null . Distribution.Simple.PackageIndex.lookupDependency p_index -let looks ds = mconcat $ map (\d -> if look d then DepOk else MissingDeps [d]) ds -resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p] looks ===> Right ... -resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p2] looks ===> Left ... --} - --- | Flatten a generic package description by ignoring all conditions and just --- join the field descriptors into on package description. Note, however, --- that this may lead to inconsistent field values, since all values are --- joined into one field, which may not be possible in the original package --- description, due to the use of exclusive choices (if ... else ...). --- --- TODO: One particularly tricky case is defaulting. In the original package --- description, e.g., the source directory might either be the default or a --- certain, explicitly set path. Since defaults are filled in only after the --- package has been resolved and when no explicit value has been set, the --- default path will be missing from the package description returned by this --- function. -flattenPackageDescription :: GenericPackageDescription -> PackageDescription -flattenPackageDescription - (GenericPackageDescription pkg _ mlib0 sub_libs0 flibs0 exes0 tests0 bms0) = - pkg { library = mlib - , subLibraries = reverse sub_libs - , foreignLibs = reverse flibs - , executables = reverse exes - , testSuites = reverse tests - , benchmarks = reverse bms - } - where - mlib = f <$> mlib0 - where f lib = (libFillInDefaults . fst . ignoreConditions $ lib) { libName = Nothing } - sub_libs = flattenLib <$> sub_libs0 - flibs = flattenFLib <$> flibs0 - exes = flattenExe <$> exes0 - tests = flattenTst <$> tests0 - bms = flattenBm <$> bms0 - flattenLib (n, t) = libFillInDefaults $ (fst $ ignoreConditions t) - { libName = Just n, libExposed = False } - flattenFLib (n, t) = flibFillInDefaults $ (fst $ ignoreConditions t) - { foreignLibName = n } - flattenExe (n, t) = exeFillInDefaults $ (fst $ ignoreConditions t) - { exeName = n } - flattenTst (n, t) = testFillInDefaults $ (fst $ ignoreConditions t) - { testName = n } - flattenBm (n, t) = benchFillInDefaults $ (fst $ ignoreConditions t) - { benchmarkName = n } - --- This is in fact rather a hack. The original version just overrode the --- default values, however, when adding conditions we had to switch to a --- modifier-based approach. There, nothing is ever overwritten, but only --- joined together. --- --- This is the cleanest way i could think of, that doesn't require --- changing all field parsing functions to return modifiers instead. -libFillInDefaults :: Library -> Library -libFillInDefaults lib@(Library { libBuildInfo = bi }) = - lib { libBuildInfo = biFillInDefaults bi } - -flibFillInDefaults :: ForeignLib -> ForeignLib -flibFillInDefaults flib@(ForeignLib { foreignLibBuildInfo = bi }) = - flib { foreignLibBuildInfo = biFillInDefaults bi } - -exeFillInDefaults :: Executable -> Executable -exeFillInDefaults exe@(Executable { buildInfo = bi }) = - exe { buildInfo = biFillInDefaults bi } - -testFillInDefaults :: TestSuite -> TestSuite -testFillInDefaults tst@(TestSuite { testBuildInfo = bi }) = - tst { testBuildInfo = biFillInDefaults bi } - -benchFillInDefaults :: Benchmark -> Benchmark -benchFillInDefaults bm@(Benchmark { benchmarkBuildInfo = bi }) = - bm { benchmarkBuildInfo = biFillInDefaults bi } - -biFillInDefaults :: BuildInfo -> BuildInfo -biFillInDefaults bi = - if null (hsSourceDirs bi) - then bi { hsSourceDirs = [currentDir] } - else bi - --- Walk a 'GenericPackageDescription' and apply @onBuildInfo@/@onSetupBuildInfo@ --- to all nested 'BuildInfo'/'SetupBuildInfo' values. -transformAllBuildInfos :: (BuildInfo -> BuildInfo) - -> (SetupBuildInfo -> SetupBuildInfo) - -> GenericPackageDescription - -> GenericPackageDescription -transformAllBuildInfos onBuildInfo onSetupBuildInfo = - over L.traverseBuildInfos onBuildInfo - . over (L.packageDescription . L.setupBuildInfo . traverse) onSetupBuildInfo - --- | Walk a 'GenericPackageDescription' and apply @f@ to all nested --- @build-depends@ fields. -transformAllBuildDepends :: (Dependency -> Dependency) - -> GenericPackageDescription - -> GenericPackageDescription -transformAllBuildDepends f = - over (L.traverseBuildInfos . L.targetBuildDepends . traverse) f - . over (L.packageDescription . L.setupBuildInfo . traverse . L.setupDepends . traverse) f - -- cannot be point-free as normal because of higher rank - . over (\f' -> L.allCondTrees $ traverseCondTreeC f') (map f) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/FieldGrammar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/FieldGrammar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/FieldGrammar.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/FieldGrammar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,532 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} --- | 'GenericPackageDescription' Field descriptions -module Distribution.PackageDescription.FieldGrammar ( - -- * Package description - packageDescriptionFieldGrammar, - -- * Library - libraryFieldGrammar, - -- * Foreign library - foreignLibFieldGrammar, - -- * Executable - executableFieldGrammar, - -- * Test suite - TestSuiteStanza (..), - testSuiteFieldGrammar, - validateTestSuite, - unvalidateTestSuite, - -- ** Lenses - testStanzaTestType, - testStanzaMainIs, - testStanzaTestModule, - testStanzaBuildInfo, - -- * Benchmark - BenchmarkStanza (..), - benchmarkFieldGrammar, - validateBenchmark, - unvalidateBenchmark, - -- ** Lenses - benchmarkStanzaBenchmarkType, - benchmarkStanzaMainIs, - benchmarkStanzaBenchmarkModule, - benchmarkStanzaBuildInfo, - -- * Flag - flagFieldGrammar, - -- * Source repository - sourceRepoFieldGrammar, - -- * Setup build info - setupBInfoFieldGrammar, - -- * Component build info - buildInfoFieldGrammar, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Compiler (CompilerFlavor (..)) -import Distribution.FieldGrammar -import Distribution.ModuleName (ModuleName) -import Distribution.Package -import Distribution.PackageDescription -import Distribution.Parsec.Common -import Distribution.Parsec.Newtypes -import Distribution.Parsec.ParseResult -import Distribution.Text (display) -import Distribution.Types.ExecutableScope -import Distribution.Types.ForeignLib -import Distribution.Types.ForeignLibType -import Distribution.Types.UnqualComponentName -import Distribution.Version (anyVersion) - -import qualified Distribution.SPDX as SPDX - -import qualified Distribution.Types.Lens as L - -------------------------------------------------------------------------------- --- PackageDescription -------------------------------------------------------------------------------- - -packageDescriptionFieldGrammar - :: (FieldGrammar g, Applicative (g PackageDescription), Applicative (g PackageIdentifier)) - => g PackageDescription PackageDescription -packageDescriptionFieldGrammar = PackageDescription - <$> optionalFieldDefAla "cabal-version" SpecVersion L.specVersionRaw (Right anyVersion) - <*> blurFieldGrammar L.package packageIdentifierGrammar - <*> optionalFieldDefAla "license" SpecLicense L.licenseRaw (Left SPDX.NONE) - <*> licenseFilesGrammar - <*> optionalFieldDefAla "copyright" FreeText L.copyright "" - <*> optionalFieldDefAla "maintainer" FreeText L.maintainer "" - <*> optionalFieldDefAla "author" FreeText L.author "" - <*> optionalFieldDefAla "stability" FreeText L.stability "" - <*> monoidalFieldAla "tested-with" (alaList' FSep TestedWith) L.testedWith - <*> optionalFieldDefAla "homepage" FreeText L.homepage "" - <*> optionalFieldDefAla "package-url" FreeText L.pkgUrl "" - <*> optionalFieldDefAla "bug-reports" FreeText L.bugReports "" - <*> pure [] -- source-repos are stanza - <*> optionalFieldDefAla "synopsis" FreeText L.synopsis "" - <*> optionalFieldDefAla "description" FreeText L.description "" - <*> optionalFieldDefAla "category" FreeText L.category "" - <*> prefixedFields "x-" L.customFieldsPD - <*> optionalField "build-type" L.buildTypeRaw - <*> pure Nothing -- custom-setup - -- components - <*> pure Nothing -- lib - <*> pure [] -- sub libs - <*> pure [] -- executables - <*> pure [] -- foreign libs - <*> pure [] -- test suites - <*> pure [] -- benchmarks - -- * Files - <*> monoidalFieldAla "data-files" (alaList' VCat FilePathNT) L.dataFiles - <*> optionalFieldDefAla "data-dir" FilePathNT L.dataDir "" - <*> monoidalFieldAla "extra-source-files" (alaList' VCat FilePathNT) L.extraSrcFiles - <*> monoidalFieldAla "extra-tmp-files" (alaList' VCat FilePathNT) L.extraTmpFiles - <*> monoidalFieldAla "extra-doc-files" (alaList' VCat FilePathNT) L.extraDocFiles - where - packageIdentifierGrammar = PackageIdentifier - <$> uniqueField "name" L.pkgName - <*> uniqueField "version" L.pkgVersion - - licenseFilesGrammar = (++) - -- TODO: neither field is deprecated - -- should we pretty print license-file if there's single license file - -- and license-files when more - <$> monoidalFieldAla "license-file" (alaList' FSep FilePathNT) L.licenseFiles - <*> monoidalFieldAla "license-files" (alaList' FSep FilePathNT) L.licenseFiles - ^^^ hiddenField - -------------------------------------------------------------------------------- --- Library -------------------------------------------------------------------------------- - -libraryFieldGrammar - :: (FieldGrammar g, Applicative (g Library), Applicative (g BuildInfo)) - => Maybe UnqualComponentName -> g Library Library -libraryFieldGrammar n = Library n - <$> monoidalFieldAla "exposed-modules" (alaList' VCat MQuoted) L.exposedModules - <*> monoidalFieldAla "reexported-modules" (alaList CommaVCat) L.reexportedModules - <*> monoidalFieldAla "signatures" (alaList' VCat MQuoted) L.signatures - ^^^ availableSince [2,0] [] - <*> booleanFieldDef "exposed" L.libExposed True - <*> blurFieldGrammar L.libBuildInfo buildInfoFieldGrammar -{-# SPECIALIZE libraryFieldGrammar :: Maybe UnqualComponentName -> ParsecFieldGrammar' Library #-} -{-# SPECIALIZE libraryFieldGrammar :: Maybe UnqualComponentName -> PrettyFieldGrammar' Library #-} - -------------------------------------------------------------------------------- --- Foreign library -------------------------------------------------------------------------------- - -foreignLibFieldGrammar - :: (FieldGrammar g, Applicative (g ForeignLib), Applicative (g BuildInfo)) - => UnqualComponentName -> g ForeignLib ForeignLib -foreignLibFieldGrammar n = ForeignLib n - <$> optionalFieldDef "type" L.foreignLibType ForeignLibTypeUnknown - <*> monoidalFieldAla "options" (alaList FSep) L.foreignLibOptions - <*> blurFieldGrammar L.foreignLibBuildInfo buildInfoFieldGrammar - <*> optionalField "lib-version-info" L.foreignLibVersionInfo - <*> optionalField "lib-version-linux" L.foreignLibVersionLinux - <*> monoidalFieldAla "mod-def-file" (alaList' FSep FilePathNT) L.foreignLibModDefFile -{-# SPECIALIZE foreignLibFieldGrammar :: UnqualComponentName -> ParsecFieldGrammar' ForeignLib #-} -{-# SPECIALIZE foreignLibFieldGrammar :: UnqualComponentName -> PrettyFieldGrammar' ForeignLib #-} - -------------------------------------------------------------------------------- --- Executable -------------------------------------------------------------------------------- - -executableFieldGrammar - :: (FieldGrammar g, Applicative (g Executable), Applicative (g BuildInfo)) - => UnqualComponentName -> g Executable Executable -executableFieldGrammar n = Executable n - -- main-is is optional as conditional blocks don't have it - <$> optionalFieldDefAla "main-is" FilePathNT L.modulePath "" - <*> optionalFieldDef "scope" L.exeScope ExecutablePublic - ^^^ availableSince [2,0] ExecutablePublic - <*> blurFieldGrammar L.buildInfo buildInfoFieldGrammar -{-# SPECIALIZE executableFieldGrammar :: UnqualComponentName -> ParsecFieldGrammar' Executable #-} -{-# SPECIALIZE executableFieldGrammar :: UnqualComponentName -> PrettyFieldGrammar' Executable #-} - -------------------------------------------------------------------------------- --- TestSuite -------------------------------------------------------------------------------- - --- | An intermediate type just used for parsing the test-suite stanza. --- After validation it is converted into the proper 'TestSuite' type. -data TestSuiteStanza = TestSuiteStanza - { _testStanzaTestType :: Maybe TestType - , _testStanzaMainIs :: Maybe FilePath - , _testStanzaTestModule :: Maybe ModuleName - , _testStanzaBuildInfo :: BuildInfo - } - -instance L.HasBuildInfo TestSuiteStanza where - buildInfo = testStanzaBuildInfo - -testStanzaTestType :: Lens' TestSuiteStanza (Maybe TestType) -testStanzaTestType f s = fmap (\x -> s { _testStanzaTestType = x }) (f (_testStanzaTestType s)) -{-# INLINE testStanzaTestType #-} - -testStanzaMainIs :: Lens' TestSuiteStanza (Maybe FilePath) -testStanzaMainIs f s = fmap (\x -> s { _testStanzaMainIs = x }) (f (_testStanzaMainIs s)) -{-# INLINE testStanzaMainIs #-} - -testStanzaTestModule :: Lens' TestSuiteStanza (Maybe ModuleName) -testStanzaTestModule f s = fmap (\x -> s { _testStanzaTestModule = x }) (f (_testStanzaTestModule s)) -{-# INLINE testStanzaTestModule #-} - -testStanzaBuildInfo :: Lens' TestSuiteStanza BuildInfo -testStanzaBuildInfo f s = fmap (\x -> s { _testStanzaBuildInfo = x }) (f (_testStanzaBuildInfo s)) -{-# INLINE testStanzaBuildInfo #-} - -testSuiteFieldGrammar - :: (FieldGrammar g, Applicative (g TestSuiteStanza), Applicative (g BuildInfo)) - => g TestSuiteStanza TestSuiteStanza -testSuiteFieldGrammar = TestSuiteStanza - <$> optionalField "type" testStanzaTestType - <*> optionalFieldAla "main-is" FilePathNT testStanzaMainIs - <*> optionalField "test-module" testStanzaTestModule - <*> blurFieldGrammar testStanzaBuildInfo buildInfoFieldGrammar - -validateTestSuite :: Position -> TestSuiteStanza -> ParseResult TestSuite -validateTestSuite pos stanza = case _testStanzaTestType stanza of - Nothing -> return $ - emptyTestSuite { testBuildInfo = _testStanzaBuildInfo stanza } - - Just tt@(TestTypeUnknown _ _) -> - pure emptyTestSuite - { testInterface = TestSuiteUnsupported tt - , testBuildInfo = _testStanzaBuildInfo stanza - } - - Just tt | tt `notElem` knownTestTypes -> - pure emptyTestSuite - { testInterface = TestSuiteUnsupported tt - , testBuildInfo = _testStanzaBuildInfo stanza - } - - Just tt@(TestTypeExe ver) -> case _testStanzaMainIs stanza of - Nothing -> do - parseFailure pos (missingField "main-is" tt) - pure emptyTestSuite - Just file -> do - when (isJust (_testStanzaTestModule stanza)) $ - parseWarning pos PWTExtraBenchmarkModule (extraField "test-module" tt) - pure emptyTestSuite - { testInterface = TestSuiteExeV10 ver file - , testBuildInfo = _testStanzaBuildInfo stanza - } - - Just tt@(TestTypeLib ver) -> case _testStanzaTestModule stanza of - Nothing -> do - parseFailure pos (missingField "test-module" tt) - pure emptyTestSuite - Just module_ -> do - when (isJust (_testStanzaMainIs stanza)) $ - parseWarning pos PWTExtraMainIs (extraField "main-is" tt) - pure emptyTestSuite - { testInterface = TestSuiteLibV09 ver module_ - , testBuildInfo = _testStanzaBuildInfo stanza - } - - where - missingField name tt = "The '" ++ name ++ "' field is required for the " - ++ display tt ++ " test suite type." - - extraField name tt = "The '" ++ name ++ "' field is not used for the '" - ++ display tt ++ "' test suite type." - -unvalidateTestSuite :: TestSuite -> TestSuiteStanza -unvalidateTestSuite t = TestSuiteStanza - { _testStanzaTestType = ty - , _testStanzaMainIs = ma - , _testStanzaTestModule = mo - , _testStanzaBuildInfo = testBuildInfo t - } - where - (ty, ma, mo) = case testInterface t of - TestSuiteExeV10 ver file -> (Just $ TestTypeExe ver, Just file, Nothing) - TestSuiteLibV09 ver modu -> (Just $ TestTypeLib ver, Nothing, Just modu) - _ -> (Nothing, Nothing, Nothing) - -------------------------------------------------------------------------------- --- Benchmark -------------------------------------------------------------------------------- - --- | An intermediate type just used for parsing the benchmark stanza. --- After validation it is converted into the proper 'Benchmark' type. -data BenchmarkStanza = BenchmarkStanza - { _benchmarkStanzaBenchmarkType :: Maybe BenchmarkType - , _benchmarkStanzaMainIs :: Maybe FilePath - , _benchmarkStanzaBenchmarkModule :: Maybe ModuleName - , _benchmarkStanzaBuildInfo :: BuildInfo - } - -instance L.HasBuildInfo BenchmarkStanza where - buildInfo = benchmarkStanzaBuildInfo - -benchmarkStanzaBenchmarkType :: Lens' BenchmarkStanza (Maybe BenchmarkType) -benchmarkStanzaBenchmarkType f s = fmap (\x -> s { _benchmarkStanzaBenchmarkType = x }) (f (_benchmarkStanzaBenchmarkType s)) -{-# INLINE benchmarkStanzaBenchmarkType #-} - -benchmarkStanzaMainIs :: Lens' BenchmarkStanza (Maybe FilePath) -benchmarkStanzaMainIs f s = fmap (\x -> s { _benchmarkStanzaMainIs = x }) (f (_benchmarkStanzaMainIs s)) -{-# INLINE benchmarkStanzaMainIs #-} - -benchmarkStanzaBenchmarkModule :: Lens' BenchmarkStanza (Maybe ModuleName) -benchmarkStanzaBenchmarkModule f s = fmap (\x -> s { _benchmarkStanzaBenchmarkModule = x }) (f (_benchmarkStanzaBenchmarkModule s)) -{-# INLINE benchmarkStanzaBenchmarkModule #-} - -benchmarkStanzaBuildInfo :: Lens' BenchmarkStanza BuildInfo -benchmarkStanzaBuildInfo f s = fmap (\x -> s { _benchmarkStanzaBuildInfo = x }) (f (_benchmarkStanzaBuildInfo s)) -{-# INLINE benchmarkStanzaBuildInfo #-} - -benchmarkFieldGrammar - :: (FieldGrammar g, Applicative (g BenchmarkStanza), Applicative (g BuildInfo)) - => g BenchmarkStanza BenchmarkStanza -benchmarkFieldGrammar = BenchmarkStanza - <$> optionalField "type" benchmarkStanzaBenchmarkType - <*> optionalFieldAla "main-is" FilePathNT benchmarkStanzaMainIs - <*> optionalField "benchmark-module" benchmarkStanzaBenchmarkModule - <*> blurFieldGrammar benchmarkStanzaBuildInfo buildInfoFieldGrammar - -validateBenchmark :: Position -> BenchmarkStanza -> ParseResult Benchmark -validateBenchmark pos stanza = case _benchmarkStanzaBenchmarkType stanza of - Nothing -> pure emptyBenchmark - { benchmarkBuildInfo = _benchmarkStanzaBuildInfo stanza } - - Just tt@(BenchmarkTypeUnknown _ _) -> pure emptyBenchmark - { benchmarkInterface = BenchmarkUnsupported tt - , benchmarkBuildInfo = _benchmarkStanzaBuildInfo stanza - } - - Just tt | tt `notElem` knownBenchmarkTypes -> pure emptyBenchmark - { benchmarkInterface = BenchmarkUnsupported tt - , benchmarkBuildInfo = _benchmarkStanzaBuildInfo stanza - } - - Just tt@(BenchmarkTypeExe ver) -> case _benchmarkStanzaMainIs stanza of - Nothing -> do - parseFailure pos (missingField "main-is" tt) - pure emptyBenchmark - Just file -> do - when (isJust (_benchmarkStanzaBenchmarkModule stanza)) $ - parseWarning pos PWTExtraBenchmarkModule (extraField "benchmark-module" tt) - pure emptyBenchmark - { benchmarkInterface = BenchmarkExeV10 ver file - , benchmarkBuildInfo = _benchmarkStanzaBuildInfo stanza - } - - where - missingField name tt = "The '" ++ name ++ "' field is required for the " - ++ display tt ++ " benchmark type." - - extraField name tt = "The '" ++ name ++ "' field is not used for the '" - ++ display tt ++ "' benchmark type." - -unvalidateBenchmark :: Benchmark -> BenchmarkStanza -unvalidateBenchmark b = BenchmarkStanza - { _benchmarkStanzaBenchmarkType = ty - , _benchmarkStanzaMainIs = ma - , _benchmarkStanzaBenchmarkModule = mo - , _benchmarkStanzaBuildInfo = benchmarkBuildInfo b - } - where - (ty, ma, mo) = case benchmarkInterface b of - BenchmarkExeV10 ver "" -> (Just $ BenchmarkTypeExe ver, Nothing, Nothing) - BenchmarkExeV10 ver ma' -> (Just $ BenchmarkTypeExe ver, Just ma', Nothing) - _ -> (Nothing, Nothing, Nothing) - -------------------------------------------------------------------------------- --- Build info -------------------------------------------------------------------------------- - -buildInfoFieldGrammar - :: (FieldGrammar g, Applicative (g BuildInfo)) - => g BuildInfo BuildInfo -buildInfoFieldGrammar = BuildInfo - <$> booleanFieldDef "buildable" L.buildable True - <*> monoidalFieldAla "build-tools" (alaList CommaFSep) L.buildTools - ^^^ deprecatedSince [2,0] "Please use 'build-tool-depends' field" - <*> monoidalFieldAla "build-tool-depends" (alaList CommaFSep) L.buildToolDepends - -- {- ^^^ availableSince [2,0] [] -} - -- here, we explicitly want to recognise build-tool-depends for all Cabal files - -- as otherwise cabal new-build cannot really work. - -- - -- I.e. we don't want trigger unknown field warning - <*> monoidalFieldAla "cpp-options" (alaList' NoCommaFSep Token') L.cppOptions - <*> monoidalFieldAla "asm-options" (alaList' NoCommaFSep Token') L.asmOptions - <*> monoidalFieldAla "cmm-options" (alaList' NoCommaFSep Token') L.cmmOptions - <*> monoidalFieldAla "cc-options" (alaList' NoCommaFSep Token') L.ccOptions - <*> monoidalFieldAla "cxx-options" (alaList' NoCommaFSep Token') L.cxxOptions - ^^^ availableSince [2,1] [] -- TODO change to 2,2 when version is bumped - <*> monoidalFieldAla "ld-options" (alaList' NoCommaFSep Token') L.ldOptions - <*> monoidalFieldAla "pkgconfig-depends" (alaList CommaFSep) L.pkgconfigDepends - <*> monoidalFieldAla "frameworks" (alaList' FSep Token) L.frameworks - <*> monoidalFieldAla "extra-framework-dirs" (alaList' FSep FilePathNT) L.extraFrameworkDirs - <*> monoidalFieldAla "asm-sources" (alaList' VCat FilePathNT) L.asmSources - <*> monoidalFieldAla "cmm-sources" (alaList' VCat FilePathNT) L.cmmSources - <*> monoidalFieldAla "c-sources" (alaList' VCat FilePathNT) L.cSources - <*> monoidalFieldAla "cxx-sources" (alaList' VCat FilePathNT) L.cxxSources - ^^^ availableSince [2,1] [] -- TODO change to 2,2 when version is bumped - <*> monoidalFieldAla "js-sources" (alaList' VCat FilePathNT) L.jsSources - <*> hsSourceDirsGrammar - <*> monoidalFieldAla "other-modules" (alaList' VCat MQuoted) L.otherModules - <*> monoidalFieldAla "virtual-modules" (alaList' VCat MQuoted) L.virtualModules - ^^^ availableSince [2,1] [] -- TODO change to 2,2 when version is bumped - <*> monoidalFieldAla "autogen-modules" (alaList' VCat MQuoted) L.autogenModules - <*> optionalFieldAla "default-language" MQuoted L.defaultLanguage - <*> monoidalFieldAla "other-languages" (alaList' FSep MQuoted) L.otherLanguages - <*> monoidalFieldAla "default-extensions" (alaList' FSep MQuoted) L.defaultExtensions - <*> monoidalFieldAla "other-extensions" (alaList' FSep MQuoted) L.otherExtensions - <*> monoidalFieldAla "extensions" (alaList' FSep MQuoted) L.oldExtensions - ^^^ deprecatedSince [1,12] "Please use 'default-extensions' or 'other-extensions' fields." - <*> monoidalFieldAla "extra-libraries" (alaList' VCat Token) L.extraLibs - <*> monoidalFieldAla "extra-ghci-libraries" (alaList' VCat Token) L.extraGHCiLibs - <*> monoidalFieldAla "extra-bundled-libraries" (alaList' VCat Token) L.extraBundledLibs - <*> monoidalFieldAla "extra-library-flavours" (alaList' VCat Token) L.extraLibFlavours - <*> monoidalFieldAla "extra-lib-dirs" (alaList' FSep FilePathNT) L.extraLibDirs - <*> monoidalFieldAla "include-dirs" (alaList' FSep FilePathNT) L.includeDirs - <*> monoidalFieldAla "includes" (alaList' FSep FilePathNT) L.includes - <*> monoidalFieldAla "install-includes" (alaList' FSep FilePathNT) L.installIncludes - <*> optionsFieldGrammar - <*> profOptionsFieldGrammar - <*> sharedOptionsFieldGrammar - <*> pure [] -- static-options ??? - <*> prefixedFields "x-" L.customFieldsBI - <*> monoidalFieldAla "build-depends" (alaList CommaVCat) L.targetBuildDepends - <*> monoidalFieldAla "mixins" (alaList CommaVCat) L.mixins - ^^^ availableSince [2,0] [] -{-# SPECIALIZE buildInfoFieldGrammar :: ParsecFieldGrammar' BuildInfo #-} -{-# SPECIALIZE buildInfoFieldGrammar :: PrettyFieldGrammar' BuildInfo #-} - -hsSourceDirsGrammar - :: (FieldGrammar g, Applicative (g BuildInfo)) - => g BuildInfo [FilePath] -hsSourceDirsGrammar = (++) - <$> monoidalFieldAla "hs-source-dirs" (alaList' FSep FilePathNT) L.hsSourceDirs - <*> monoidalFieldAla "hs-source-dir" (alaList' FSep FilePathNT) L.hsSourceDirs - ^^^ deprecatedField' "Please use 'hs-source-dirs'" - -optionsFieldGrammar - :: (FieldGrammar g, Applicative (g BuildInfo)) - => g BuildInfo [(CompilerFlavor, [String])] -optionsFieldGrammar = combine - <$> monoidalFieldAla "ghc-options" (alaList' NoCommaFSep Token') (extract GHC) - <*> monoidalFieldAla "ghcjs-options" (alaList' NoCommaFSep Token') (extract GHCJS) - -- NOTE: Hugs, NHC and JHC are not supported anymore, but these - -- fields are kept around so that we can still parse legacy .cabal - -- files that have them. - <* knownField "jhc-options" - <* knownField "hugs-options" - <* knownField "nhc98-options" - where - extract :: CompilerFlavor -> ALens' BuildInfo [String] - extract flavor = L.options . lookupLens flavor - - combine ghc ghcjs = - f GHC ghc ++ f GHCJS ghcjs - where - f _flavor [] = [] - f flavor opts = [(flavor, opts)] - -profOptionsFieldGrammar - :: (FieldGrammar g, Applicative (g BuildInfo)) - => g BuildInfo [(CompilerFlavor, [String])] -profOptionsFieldGrammar = combine - <$> monoidalFieldAla "ghc-prof-options" (alaList' NoCommaFSep Token') (extract GHC) - <*> monoidalFieldAla "ghcjs-prof-options" (alaList' NoCommaFSep Token') (extract GHCJS) - where - extract :: CompilerFlavor -> ALens' BuildInfo [String] - extract flavor = L.profOptions . lookupLens flavor - - combine ghc ghcjs = f GHC ghc ++ f GHCJS ghcjs - where - f _flavor [] = [] - f flavor opts = [(flavor, opts)] - -sharedOptionsFieldGrammar - :: (FieldGrammar g, Applicative (g BuildInfo)) - => g BuildInfo [(CompilerFlavor, [String])] -sharedOptionsFieldGrammar = combine - <$> monoidalFieldAla "ghc-shared-options" (alaList' NoCommaFSep Token') (extract GHC) - <*> monoidalFieldAla "ghcjs-shared-options" (alaList' NoCommaFSep Token') (extract GHCJS) - where - extract :: CompilerFlavor -> ALens' BuildInfo [String] - extract flavor = L.sharedOptions . lookupLens flavor - - combine ghc ghcjs = f GHC ghc ++ f GHCJS ghcjs - where - f _flavor [] = [] - f flavor opts = [(flavor, opts)] - -lookupLens :: (Functor f, Ord k) => k -> LensLike' f [(k, [v])] [v] -lookupLens k f kvs = str kvs <$> f (gtr kvs) - where - gtr = fromMaybe [] . lookup k - - str [] v = [(k, v)] - str (x@(k',_):xs) v - | k == k' = (k, v) : xs - | otherwise = x : str xs v - -------------------------------------------------------------------------------- --- Flag -------------------------------------------------------------------------------- - -flagFieldGrammar - :: (FieldGrammar g, Applicative (g Flag)) - => FlagName -> g Flag Flag -flagFieldGrammar name = MkFlag name - <$> optionalFieldDefAla "description" FreeText L.flagDescription "" - <*> booleanFieldDef "default" L.flagDefault True - <*> booleanFieldDef "manual" L.flagManual False -{-# SPECIALIZE flagFieldGrammar :: FlagName -> ParsecFieldGrammar' Flag #-} -{-# SPECIALIZE flagFieldGrammar :: FlagName -> PrettyFieldGrammar' Flag #-} - -------------------------------------------------------------------------------- --- SourceRepo -------------------------------------------------------------------------------- - -sourceRepoFieldGrammar - :: (FieldGrammar g, Applicative (g SourceRepo)) - => RepoKind -> g SourceRepo SourceRepo -sourceRepoFieldGrammar kind = SourceRepo kind - <$> optionalField "type" L.repoType - <*> optionalFieldAla "location" FreeText L.repoLocation - <*> optionalFieldAla "module" Token L.repoModule - <*> optionalFieldAla "branch" Token L.repoBranch - <*> optionalFieldAla "tag" Token L.repoTag - <*> optionalFieldAla "subdir" FilePathNT L.repoSubdir -{-# SPECIALIZE sourceRepoFieldGrammar :: RepoKind -> ParsecFieldGrammar' SourceRepo #-} -{-# SPECIALIZE sourceRepoFieldGrammar :: RepoKind ->PrettyFieldGrammar' SourceRepo #-} - -------------------------------------------------------------------------------- --- SetupBuildInfo -------------------------------------------------------------------------------- - -setupBInfoFieldGrammar - :: (FieldGrammar g, Functor (g SetupBuildInfo)) - => Bool -> g SetupBuildInfo SetupBuildInfo -setupBInfoFieldGrammar def = flip SetupBuildInfo def - <$> monoidalFieldAla "setup-depends" (alaList CommaVCat) L.setupDepends -{-# SPECIALIZE setupBInfoFieldGrammar :: Bool -> ParsecFieldGrammar' SetupBuildInfo #-} -{-# SPECIALIZE setupBInfoFieldGrammar :: Bool ->PrettyFieldGrammar' SetupBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Parsec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Parsec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Parsec.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Parsec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,806 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE Rank2Types #-} -{-# LANGUAGE ScopedTypeVariables #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.Parsec --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defined parsers and partial pretty printers for the @.cabal@ format. - -module Distribution.PackageDescription.Parsec ( - -- * Package descriptions - readGenericPackageDescription, - parseGenericPackageDescription, - parseGenericPackageDescriptionMaybe, - - -- ** Parsing - ParseResult, - runParseResult, - - -- * New-style spec-version - scanSpecVersion, - - -- ** Supplementary build information - readHookedBuildInfo, - parseHookedBuildInfo, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Control.Monad (guard) -import Control.Monad.State.Strict (StateT, execStateT) -import Control.Monad.Trans.Class (lift) -import Data.List (partition) -import Distribution.CabalSpecVersion -import Distribution.Compat.Lens -import Distribution.FieldGrammar -import Distribution.FieldGrammar.Parsec (NamelessField (..)) -import Distribution.PackageDescription -import Distribution.PackageDescription.FieldGrammar -import Distribution.PackageDescription.Quirks (patchQuirks) -import Distribution.Parsec.Class (parsec, simpleParsec) -import Distribution.Parsec.Common -import Distribution.Parsec.ConfVar (parseConditionConfVar) -import Distribution.Parsec.Field (FieldName, getName) -import Distribution.Parsec.FieldLineStream (fieldLineStreamFromBS) -import Distribution.Parsec.LexerMonad (LexWarning, toPWarnings) -import Distribution.Parsec.Newtypes (CommaFSep, List, SpecVersion (..), Token) -import Distribution.Parsec.Parser -import Distribution.Parsec.ParseResult -import Distribution.Pretty (prettyShow) -import Distribution.Simple.Utils (fromUTF8BS) -import Distribution.Text (display) -import Distribution.Types.CondTree -import Distribution.Types.Dependency (Dependency) -import Distribution.Types.ForeignLib -import Distribution.Types.ForeignLibType (knownForeignLibTypes) -import Distribution.Types.GenericPackageDescription (emptyGenericPackageDescription) -import Distribution.Types.PackageDescription (specVersion') -import Distribution.Types.UnqualComponentName (UnqualComponentName, mkUnqualComponentName) -import Distribution.Utils.Generic (breakMaybe, unfoldrM, validateUTF8) -import Distribution.Verbosity (Verbosity) -import Distribution.Version - (LowerBound (..), Version, asVersionIntervals, mkVersion, orLaterVersion, version0, - versionNumbers) - -import qualified Data.ByteString as BS -import qualified Data.ByteString.Char8 as BS8 -import qualified Data.Map.Strict as Map -import qualified Distribution.Compat.Newtype as Newtype -import qualified Distribution.Types.BuildInfo.Lens as L -import qualified Distribution.Types.GenericPackageDescription.Lens as L -import qualified Distribution.Types.PackageDescription.Lens as L -import qualified Text.Parsec as P - --- --------------------------------------------------------------- --- Parsing --- --------------------------------------------------------------- - --- | Parse the given package file. -readGenericPackageDescription :: Verbosity -> FilePath -> IO GenericPackageDescription -readGenericPackageDescription = readAndParseFile parseGenericPackageDescription - ------------------------------------------------------------------------------- --- | Parses the given file into a 'GenericPackageDescription'. --- --- In Cabal 1.2 the syntax for package descriptions was changed to a format --- with sections and possibly indented property descriptions. --- -parseGenericPackageDescription :: BS.ByteString -> ParseResult GenericPackageDescription -parseGenericPackageDescription bs = do - -- set scanned version - setCabalSpecVersion ver - -- if we get too new version, fail right away - case ver of - Just v | v > mkVersion [2,4] -> parseFailure zeroPos - "Unsupported cabal-version. See https://github.com/haskell/cabal/issues/4899." - _ -> pure () - - case readFields' bs' of - Right (fs, lexWarnings) -> do - when patched $ - parseWarning zeroPos PWTQuirkyCabalFile "Legacy cabal file" - -- UTF8 is validated in a prepass step, afterwards parsing is lenient. - parseGenericPackageDescription' ver lexWarnings (validateUTF8 bs') fs - -- TODO: better marshalling of errors - Left perr -> parseFatalFailure pos (show perr) where - ppos = P.errorPos perr - pos = Position (P.sourceLine ppos) (P.sourceColumn ppos) - where - (patched, bs') = patchQuirks bs - ver = scanSpecVersion bs' - --- | 'Maybe' variant of 'parseGenericPackageDescription' -parseGenericPackageDescriptionMaybe :: BS.ByteString -> Maybe GenericPackageDescription -parseGenericPackageDescriptionMaybe = - either (const Nothing) Just . snd . runParseResult . parseGenericPackageDescription - -fieldlinesToBS :: [FieldLine ann] -> BS.ByteString -fieldlinesToBS = BS.intercalate "\n" . map (\(FieldLine _ bs) -> bs) - --- Monad in which sections are parsed -type SectionParser = StateT SectionS ParseResult - --- | State of section parser -data SectionS = SectionS - { _stateGpd :: !GenericPackageDescription - , _stateCommonStanzas :: !(Map String CondTreeBuildInfo) - } - -stateGpd :: Lens' SectionS GenericPackageDescription -stateGpd f (SectionS gpd cs) = (\x -> SectionS x cs) <$> f gpd -{-# INLINE stateGpd #-} - -stateCommonStanzas :: Lens' SectionS (Map String CondTreeBuildInfo) -stateCommonStanzas f (SectionS gpd cs) = SectionS gpd <$> f cs -{-# INLINE stateCommonStanzas #-} - --- Note [Accumulating parser] --- --- This parser has two "states": --- * first we parse fields of PackageDescription --- * then we parse sections (libraries, executables, etc) -parseGenericPackageDescription' - :: Maybe Version - -> [LexWarning] - -> Maybe Int - -> [Field Position] - -> ParseResult GenericPackageDescription -parseGenericPackageDescription' cabalVerM lexWarnings utf8WarnPos fs = do - parseWarnings (toPWarnings lexWarnings) - for_ utf8WarnPos $ \pos -> - parseWarning zeroPos PWTUTF $ "UTF8 encoding problem at byte offset " ++ show pos - let (syntax, fs') = sectionizeFields fs - let (fields, sectionFields) = takeFields fs' - - -- cabal-version - cabalVer <- case cabalVerM of - Just v -> return v - Nothing -> case Map.lookup "cabal-version" fields >>= safeLast of - Nothing -> return version0 - Just (MkNamelessField pos fls) -> do - v <- specVersion' . Newtype.unpack' SpecVersion <$> runFieldParser pos parsec cabalSpecLatest fls - when (v >= mkVersion [2,1]) $ parseFailure pos $ - "cabal-version should be at the beginning of the file starting with spec version 2.2. " ++ - "See https://github.com/haskell/cabal/issues/4899" - - return v - - let specVer - | cabalVer >= mkVersion [2,3] = CabalSpecV2_4 - | cabalVer >= mkVersion [2,1] = CabalSpecV2_2 - | cabalVer >= mkVersion [1,25] = CabalSpecV2_0 - | cabalVer >= mkVersion [1,23] = CabalSpecV1_24 - | cabalVer >= mkVersion [1,21] = CabalSpecV1_22 - | otherwise = CabalSpecOld - - -- reset cabal version - setCabalSpecVersion (Just cabalVer) - - -- Package description - pd <- parseFieldGrammar specVer fields packageDescriptionFieldGrammar - - -- Check that scanned and parsed versions match. - unless (cabalVer == specVersion pd) $ parseFailure zeroPos $ - "Scanned and parsed cabal-versions don't match " ++ - prettyShow cabalVer ++ " /= " ++ prettyShow (specVersion pd) - - maybeWarnCabalVersion syntax pd - - -- Sections - let gpd = emptyGenericPackageDescription & L.packageDescription .~ pd - - view stateGpd <$> execStateT (goSections specVer sectionFields) (SectionS gpd Map.empty) - where - safeLast :: [a] -> Maybe a - safeLast = listToMaybe . reverse - - newSyntaxVersion :: Version - newSyntaxVersion = mkVersion [1, 2] - - maybeWarnCabalVersion :: Syntax -> PackageDescription -> ParseResult () - maybeWarnCabalVersion syntax pkg - | syntax == NewSyntax && specVersion pkg < newSyntaxVersion - = parseWarning zeroPos PWTNewSyntax $ - "A package using section syntax must specify at least\n" - ++ "'cabal-version: >= 1.2'." - - maybeWarnCabalVersion syntax pkg - | syntax == OldSyntax && specVersion pkg >= newSyntaxVersion - = parseWarning zeroPos PWTOldSyntax $ - "A package using 'cabal-version: " - ++ displaySpecVersion (specVersionRaw pkg) - ++ "' must use section syntax. See the Cabal user guide for details." - where - displaySpecVersion (Left version) = display version - displaySpecVersion (Right versionRange) = - case asVersionIntervals versionRange of - [] {- impossible -} -> display versionRange - ((LowerBound version _, _):_) -> display (orLaterVersion version) - - maybeWarnCabalVersion _ _ = return () - -goSections :: CabalSpecVersion -> [Field Position] -> SectionParser () -goSections specVer = traverse_ process - where - process (Field (Name pos name) _) = - lift $ parseWarning pos PWTTrailingFields $ - "Ignoring trailing fields after sections: " ++ show name - process (Section name args secFields) = - parseSection name args secFields - - snoc x xs = xs ++ [x] - - hasCommonStanzas = specHasCommonStanzas specVer - - -- we need signature, because this is polymorphic, but not-closed - parseCondTree' - :: FromBuildInfo a - => ParsecFieldGrammar' a -- ^ grammar - -> Map String CondTreeBuildInfo -- ^ common stanzas - -> [Field Position] - -> ParseResult (CondTree ConfVar [Dependency] a) - parseCondTree' = parseCondTreeWithCommonStanzas specVer - - parseSection :: Name Position -> [SectionArg Position] -> [Field Position] -> SectionParser () - parseSection (Name pos name) args fields - | hasCommonStanzas == NoCommonStanzas, name == "common" = lift $ do - parseWarning pos PWTUnknownSection $ "Ignoring section: common. You should set cabal-version: 2.2 or larger to use common stanzas." - - | name == "common" = do - commonStanzas <- use stateCommonStanzas - name' <- lift $ parseCommonName pos args - biTree <- lift $ parseCondTree' buildInfoFieldGrammar commonStanzas fields - - case Map.lookup name' commonStanzas of - Nothing -> stateCommonStanzas .= Map.insert name' biTree commonStanzas - Just _ -> lift $ parseFailure pos $ - "Duplicate common stanza: " ++ name' - - | name == "library" && null args = do - commonStanzas <- use stateCommonStanzas - lib <- lift $ parseCondTree' (libraryFieldGrammar Nothing) commonStanzas fields - -- TODO: check that library is defined once - stateGpd . L.condLibrary ?= lib - - -- Sublibraries - -- TODO: check cabal-version - | name == "library" = do - commonStanzas <- use stateCommonStanzas - name' <- parseUnqualComponentName pos args - lib <- lift $ parseCondTree' (libraryFieldGrammar $ Just name') commonStanzas fields - -- TODO check duplicate name here? - stateGpd . L.condSubLibraries %= snoc (name', lib) - - -- TODO: check cabal-version - | name == "foreign-library" = do - commonStanzas <- use stateCommonStanzas - name' <- parseUnqualComponentName pos args - flib <- lift $ parseCondTree' (foreignLibFieldGrammar name') commonStanzas fields - - let hasType ts = foreignLibType ts /= foreignLibType mempty - unless (onAllBranches hasType flib) $ lift $ parseFailure pos $ concat - [ "Foreign library " ++ show (display name') - , " is missing required field \"type\" or the field " - , "is not present in all conditional branches. The " - , "available test types are: " - , intercalate ", " (map display knownForeignLibTypes) - ] - - -- TODO check duplicate name here? - stateGpd . L.condForeignLibs %= snoc (name', flib) - - | name == "executable" = do - commonStanzas <- use stateCommonStanzas - name' <- parseUnqualComponentName pos args - exe <- lift $ parseCondTree' (executableFieldGrammar name') commonStanzas fields - -- TODO check duplicate name here? - stateGpd . L.condExecutables %= snoc (name', exe) - - | name == "test-suite" = do - commonStanzas <- use stateCommonStanzas - name' <- parseUnqualComponentName pos args - testStanza <- lift $ parseCondTree' testSuiteFieldGrammar commonStanzas fields - testSuite <- lift $ traverse (validateTestSuite pos) testStanza - - let hasType ts = testInterface ts /= testInterface mempty - unless (onAllBranches hasType testSuite) $ lift $ parseFailure pos $ concat - [ "Test suite " ++ show (display name') - , " is missing required field \"type\" or the field " - , "is not present in all conditional branches. The " - , "available test types are: " - , intercalate ", " (map display knownTestTypes) - ] - - -- TODO check duplicate name here? - stateGpd . L.condTestSuites %= snoc (name', testSuite) - - | name == "benchmark" = do - commonStanzas <- use stateCommonStanzas - name' <- parseUnqualComponentName pos args - benchStanza <- lift $ parseCondTree' benchmarkFieldGrammar commonStanzas fields - bench <- lift $ traverse (validateBenchmark pos) benchStanza - - let hasType ts = benchmarkInterface ts /= benchmarkInterface mempty - unless (onAllBranches hasType bench) $ lift $ parseFailure pos $ concat - [ "Benchmark " ++ show (display name') - , " is missing required field \"type\" or the field " - , "is not present in all conditional branches. The " - , "available benchmark types are: " - , intercalate ", " (map display knownBenchmarkTypes) - ] - - -- TODO check duplicate name here? - stateGpd . L.condBenchmarks %= snoc (name', bench) - - | name == "flag" = do - name' <- parseNameBS pos args - name'' <- lift $ runFieldParser' pos parsec specVer (fieldLineStreamFromBS name') `recoverWith` mkFlagName "" - flag <- lift $ parseFields specVer fields (flagFieldGrammar name'') - -- Check default flag - stateGpd . L.genPackageFlags %= snoc flag - - | name == "custom-setup" && null args = do - sbi <- lift $ parseFields specVer fields (setupBInfoFieldGrammar False) - stateGpd . L.packageDescription . L.setupBuildInfo ?= sbi - - | name == "source-repository" = do - kind <- lift $ case args of - [SecArgName spos secName] -> - runFieldParser' spos parsec specVer (fieldLineStreamFromBS secName) `recoverWith` RepoHead - [] -> do - parseFailure pos "'source-repository' requires exactly one argument" - pure RepoHead - _ -> do - parseFailure pos $ "Invalid source-repository kind " ++ show args - pure RepoHead - - sr <- lift $ parseFields specVer fields (sourceRepoFieldGrammar kind) - stateGpd . L.packageDescription . L.sourceRepos %= snoc sr - - | otherwise = lift $ - parseWarning pos PWTUnknownSection $ "Ignoring section: " ++ show name - -parseName :: Position -> [SectionArg Position] -> SectionParser String -parseName pos args = fromUTF8BS <$> parseNameBS pos args - -parseNameBS :: Position -> [SectionArg Position] -> SectionParser BS.ByteString --- TODO: use strict parser -parseNameBS pos args = case args of - [SecArgName _pos secName] -> - pure secName - [SecArgStr _pos secName] -> - pure secName - [] -> do - lift $ parseFailure pos "name required" - pure "" - _ -> do - -- TODO: pretty print args - lift $ parseFailure pos $ "Invalid name " ++ show args - pure "" - -parseCommonName :: Position -> [SectionArg Position] -> ParseResult String -parseCommonName pos args = case args of - [SecArgName _pos secName] -> - pure $ fromUTF8BS secName - [SecArgStr _pos secName] -> - pure $ fromUTF8BS secName - [] -> do - parseFailure pos $ "name required" - pure "" - _ -> do - -- TODO: pretty print args - parseFailure pos $ "Invalid name " ++ show args - pure "" - --- TODO: avoid conversion to 'String'. -parseUnqualComponentName :: Position -> [SectionArg Position] -> SectionParser UnqualComponentName -parseUnqualComponentName pos args = mkUnqualComponentName <$> parseName pos args - --- | Parse a non-recursive list of fields. -parseFields - :: CabalSpecVersion - -> [Field Position] -- ^ fields to be parsed - -> ParsecFieldGrammar' a - -> ParseResult a -parseFields v fields grammar = do - let (fs0, ss) = partitionFields fields - traverse_ (traverse_ warnInvalidSubsection) ss - parseFieldGrammar v fs0 grammar - -warnInvalidSubsection :: Section Position -> ParseResult () -warnInvalidSubsection (MkSection (Name pos name) _ _) = - void (parseFailure pos $ "invalid subsection " ++ show name) - -parseCondTree - :: forall a c. - CabalSpecVersion - -> HasElif -- ^ accept @elif@ - -> ParsecFieldGrammar' a -- ^ grammar - -> (a -> c) -- ^ condition extractor - -> [Field Position] - -> ParseResult (CondTree ConfVar c a) -parseCondTree v hasElif grammar cond = go - where - go fields = do - let (fs, ss) = partitionFields fields - x <- parseFieldGrammar v fs grammar - branches <- concat <$> traverse parseIfs ss - return (CondNode x (cond x) branches) -- TODO: branches - - parseIfs :: [Section Position] -> ParseResult [CondBranch ConfVar c a] - parseIfs [] = return [] - parseIfs (MkSection (Name _ name) test fields : sections) | name == "if" = do - test' <- parseConditionConfVar test - fields' <- go fields - -- TODO: else - (elseFields, sections') <- parseElseIfs sections - return (CondBranch test' fields' elseFields : sections') - parseIfs (MkSection (Name pos name) _ _ : sections) = do - parseWarning pos PWTInvalidSubsection $ "invalid subsection " ++ show name - parseIfs sections - - parseElseIfs - :: [Section Position] - -> ParseResult (Maybe (CondTree ConfVar c a), [CondBranch ConfVar c a]) - parseElseIfs [] = return (Nothing, []) - parseElseIfs (MkSection (Name pos name) args fields : sections) | name == "else" = do - unless (null args) $ - parseFailure pos $ "`else` section has section arguments " ++ show args - elseFields <- go fields - sections' <- parseIfs sections - return (Just elseFields, sections') - - - - parseElseIfs (MkSection (Name _ name) test fields : sections) | hasElif == HasElif, name == "elif" = do - -- TODO: check cabal-version - test' <- parseConditionConfVar test - fields' <- go fields - (elseFields, sections') <- parseElseIfs sections - -- we parse an empty 'Fields', to get empty value for a node - a <- parseFieldGrammar v mempty grammar - return (Just $ CondNode a (cond a) [CondBranch test' fields' elseFields], sections') - - parseElseIfs (MkSection (Name pos name) _ _ : sections) | name == "elif" = do - parseWarning pos PWTInvalidSubsection $ "invalid subsection \"elif\". You should set cabal-version: 2.2 or larger to use elif-conditionals." - (,) Nothing <$> parseIfs sections - - parseElseIfs sections = (,) Nothing <$> parseIfs sections - -{- Note [Accumulating parser] - -Note: Outdated a bit - -In there parser, @'FieldDescr' a@ is transformed into @Map FieldName (a -> -FieldParser a)@. The weird value is used because we accumulate structure of -@a@ by folding over the fields. There are various reasons for that: - -* Almost all fields are optional - -* This is simple approach so declarative bi-directional format (parsing and -printing) of structure could be specified (list of @'FieldDescr' a@) - -* There are surface syntax fields corresponding to single field in the file: - @license-file@ and @license-files@ - -* This is quite safe approach. - -When/if we re-implement the parser to support formatting preservging roundtrip -with new AST, this all need to be rewritten. --} - -------------------------------------------------------------------------------- --- Common stanzas -------------------------------------------------------------------------------- - --- $commonStanzas --- --- [Note: Common stanzas] --- --- In Cabal 2.2 we support simple common stanzas: --- --- * Commons stanzas define 'BuildInfo' --- --- * import "fields" can only occur at top of other stanzas (think: imports) --- --- In particular __there aren't__ --- --- * implicit stanzas --- --- * More specific common stanzas (executable, test-suite). --- --- --- The approach uses the fact that 'BuildInfo' is a 'Monoid': --- --- @ --- mergeCommonStanza' :: HasBuildInfo comp => BuildInfo -> comp -> comp --- mergeCommonStanza' bi = over L.BuildInfo (bi <>) --- @ --- --- Real 'mergeCommonStanza' is more complicated as we have to deal with --- conditional trees. --- --- The approach is simple, and have good properties: --- --- * Common stanzas are parsed exactly once, even if not-used. Thus we report errors in them. --- -type CondTreeBuildInfo = CondTree ConfVar [Dependency] BuildInfo - --- | Create @a@ from 'BuildInfo'. --- --- Law: @view buildInfo . fromBuildInfo = id@ -class L.HasBuildInfo a => FromBuildInfo a where - fromBuildInfo :: BuildInfo -> a - -instance FromBuildInfo BuildInfo where fromBuildInfo = id -instance FromBuildInfo Library where fromBuildInfo bi = set L.buildInfo bi emptyLibrary -instance FromBuildInfo ForeignLib where fromBuildInfo bi = set L.buildInfo bi emptyForeignLib -instance FromBuildInfo Executable where fromBuildInfo bi = set L.buildInfo bi emptyExecutable - -instance FromBuildInfo TestSuiteStanza where - fromBuildInfo = TestSuiteStanza Nothing Nothing Nothing - -instance FromBuildInfo BenchmarkStanza where - fromBuildInfo = BenchmarkStanza Nothing Nothing Nothing - -parseCondTreeWithCommonStanzas - :: forall a. FromBuildInfo a - => CabalSpecVersion - -> ParsecFieldGrammar' a -- ^ grammar - -> Map String CondTreeBuildInfo -- ^ common stanzas - -> [Field Position] - -> ParseResult (CondTree ConfVar [Dependency] a) -parseCondTreeWithCommonStanzas v grammar commonStanzas = goImports [] - where - hasElif = specHasElif v - hasCommonStanzas = specHasCommonStanzas v - - getList' :: List CommaFSep Token String -> [String] - getList' = Newtype.unpack - - -- parse leading imports - -- not supported: - goImports acc (Field (Name pos name) _ : fields) | name == "import", hasCommonStanzas == NoCommonStanzas = do - parseWarning pos PWTUnknownField "Unknown field: import. You should set cabal-version: 2.2 or larger to use common stanzas" - goImports acc fields - -- supported: - goImports acc (Field (Name pos name) fls : fields) | name == "import" = do - names <- getList' <$> runFieldParser pos parsec v fls - names' <- for names $ \commonName -> - case Map.lookup commonName commonStanzas of - Nothing -> do - parseFailure pos $ "Undefined common stanza imported: " ++ commonName - pure Nothing - Just commonTree -> - pure (Just commonTree) - - goImports (acc ++ catMaybes names') fields - - -- Go to parsing condTree after first non-import 'Field'. - goImports acc fields = go acc fields - - -- parse actual CondTree - go :: [CondTreeBuildInfo] -> [Field Position] -> ParseResult (CondTree ConfVar [Dependency] a) - go bis fields = do - x <- parseCondTree v hasElif grammar (view L.targetBuildDepends) fields - pure $ foldr mergeCommonStanza x bis - -mergeCommonStanza - :: forall a. FromBuildInfo a - => CondTree ConfVar [Dependency] BuildInfo - -> CondTree ConfVar [Dependency] a - -> CondTree ConfVar [Dependency] a -mergeCommonStanza (CondNode bi _ bis) (CondNode x _ cs) = - CondNode x' (x' ^. L.targetBuildDepends) cs' - where - -- new value is old value with buildInfo field _prepended_. - x' = x & L.buildInfo %~ (bi <>) - - -- tree components are appended together. - cs' = map (fmap fromBuildInfo) bis ++ cs - -------------------------------------------------------------------------------- --- Branches -------------------------------------------------------------------------------- - --- Check that a property holds on all branches of a condition tree -onAllBranches :: forall v c a. Monoid a => (a -> Bool) -> CondTree v c a -> Bool -onAllBranches p = go mempty - where - -- If the current level of the tree satisfies the property, then we are - -- done. If not, then one of the conditional branches below the current node - -- must satisfy it. Each node may have multiple immediate children; we only - -- one need one to satisfy the property because the configure step uses - -- 'mappend' to join together the results of flag resolution. - go :: a -> CondTree v c a -> Bool - go acc ct = let acc' = acc `mappend` condTreeData ct - in p acc' || any (goBranch acc') (condTreeComponents ct) - - -- Both the 'true' and the 'false' block must satisfy the property. - goBranch :: a -> CondBranch v c a -> Bool - goBranch _ (CondBranch _ _ Nothing) = False - goBranch acc (CondBranch _ t (Just e)) = go acc t && go acc e - -------------------------------------------------------------------------------- --- Old syntax -------------------------------------------------------------------------------- - --- TODO: move to own module - --- | "Sectionize" an old-style Cabal file. A sectionized file has: --- --- * all global fields at the beginning, followed by --- --- * all flag declarations, followed by --- --- * an optional library section, and an arbitrary number of executable --- sections (in any order). --- --- The current implementation just gathers all library-specific fields --- in a library section and wraps all executable stanzas in an executable --- section. -sectionizeFields :: [Field ann] -> (Syntax, [Field ann]) -sectionizeFields fs = case classifyFields fs of - Just fields -> (OldSyntax, convert fields) - Nothing -> (NewSyntax, fs) - where - -- return 'Just' if all fields are simple fields - classifyFields :: [Field ann] -> Maybe [(Name ann, [FieldLine ann])] - classifyFields = traverse f - where - f (Field name fieldlines) = Just (name, fieldlines) - f _ = Nothing - - trim = BS.dropWhile isSpace' . BS.reverse . BS.dropWhile isSpace' . BS.reverse - isSpace' = (== 32) - - convert :: [(Name ann, [FieldLine ann])] -> [Field ann] - convert fields = - let - toField (name, ls) = Field name ls - -- "build-depends" is a local field now. To be backwards - -- compatible, we still allow it as a global field in old-style - -- package description files and translate it to a local field by - -- adding it to every non-empty section - (hdr0, exes0) = break ((=="executable") . getName . fst) fields - (hdr, libfs0) = partition (not . (`elem` libFieldNames) . getName . fst) hdr0 - - (deps, libfs) = partition ((== "build-depends") . getName . fst) - libfs0 - - exes = unfoldr toExe exes0 - toExe [] = Nothing - toExe ((Name pos n, ls) : r) - | n == "executable" = - let (efs, r') = break ((== "executable") . getName . fst) r - in Just (Section (Name pos "executable") [SecArgName pos $ trim $ fieldlinesToBS ls] (map toField $ deps ++ efs), r') - toExe _ = error "unexpected input to 'toExe'" - - lib = case libfs of - [] -> [] - ((Name pos _, _) : _) -> - [Section (Name pos "library") [] (map toField $ deps ++ libfs)] - - in map toField hdr ++ lib ++ exes - --- | See 'sectionizeFields'. -data Syntax = OldSyntax | NewSyntax - deriving (Eq, Show) - --- TODO: -libFieldNames :: [FieldName] -libFieldNames = fieldGrammarKnownFieldList (libraryFieldGrammar Nothing) - -------------------------------------------------------------------------------- --- Suplementary build information -------------------------------------------------------------------------------- - -readHookedBuildInfo :: Verbosity -> FilePath -> IO HookedBuildInfo -readHookedBuildInfo = readAndParseFile parseHookedBuildInfo - -parseHookedBuildInfo :: BS.ByteString -> ParseResult HookedBuildInfo -parseHookedBuildInfo bs = case readFields' bs of - Right (fs, lexWarnings) -> do - parseHookedBuildInfo' lexWarnings fs - -- TODO: better marshalling of errors - Left perr -> parseFatalFailure zeroPos (show perr) - -parseHookedBuildInfo' - :: [LexWarning] - -> [Field Position] - -> ParseResult HookedBuildInfo -parseHookedBuildInfo' lexWarnings fs = do - parseWarnings (toPWarnings lexWarnings) - (mLibFields, exes) <- stanzas fs - mLib <- parseLib mLibFields - biExes <- traverse parseExe exes - return (mLib, biExes) - where - parseLib :: Fields Position -> ParseResult (Maybe BuildInfo) - parseLib fields - | Map.null fields = pure Nothing - | otherwise = Just <$> parseFieldGrammar cabalSpecLatest fields buildInfoFieldGrammar - - parseExe :: (UnqualComponentName, Fields Position) -> ParseResult (UnqualComponentName, BuildInfo) - parseExe (n, fields) = do - bi <- parseFieldGrammar cabalSpecLatest fields buildInfoFieldGrammar - pure (n, bi) - - stanzas :: [Field Position] -> ParseResult (Fields Position, [(UnqualComponentName, Fields Position)]) - stanzas fields = do - let (hdr0, exes0) = breakMaybe isExecutableField fields - hdr <- toFields hdr0 - exes <- unfoldrM (traverse toExe) exes0 - pure (hdr, exes) - - toFields :: [Field Position] -> ParseResult (Fields Position) - toFields fields = do - let (fields', ss) = partitionFields fields - traverse_ (traverse_ warnInvalidSubsection) ss - pure fields' - - toExe - :: ([FieldLine Position], [Field Position]) - -> ParseResult ((UnqualComponentName, Fields Position), Maybe ([FieldLine Position], [Field Position])) - toExe (fss, fields) = do - name <- runFieldParser zeroPos parsec cabalSpecLatest fss - let (hdr0, rest) = breakMaybe isExecutableField fields - hdr <- toFields hdr0 - pure ((name, hdr), rest) - - isExecutableField (Field (Name _ name) fss) - | name == "executable" = Just fss - | otherwise = Nothing - isExecutableField _ = Nothing - --- | Quickly scan new-style spec-version --- --- A new-style spec-version declaration begins the .cabal file and --- follow the following case-insensitive grammar (expressed in --- RFC5234 ABNF): --- --- @ --- newstyle-spec-version-decl = "cabal-version" *WS ":" *WS newstyle-pec-version *WS --- --- spec-version = NUM "." NUM [ "." NUM ] --- --- NUM = DIGIT0 / DIGITP 1*DIGIT0 --- DIGIT0 = %x30-39 --- DIGITP = %x31-39 --- WS = %20 --- @ --- -scanSpecVersion :: BS.ByteString -> Maybe Version -scanSpecVersion bs = do - fstline':_ <- pure (BS8.lines bs) - - -- parse - -- normalise: remove all whitespace, convert to lower-case - let fstline = BS.map toLowerW8 $ BS.filter (/= 0x20) fstline' - ["cabal-version",vers] <- pure (BS8.split ':' fstline) - - -- parse - -- - -- This is currently more tolerant regarding leading 0 digits. - -- - ver <- simpleParsec (BS8.unpack vers) - guard $ case versionNumbers ver of - [_,_] -> True - [_,_,_] -> True - _ -> False - - pure ver - where - -- | Translate ['A'..'Z'] to ['a'..'z'] - toLowerW8 :: Word8 -> Word8 - toLowerW8 w | 0x40 < w && w < 0x5b = w+0x20 - | otherwise = w diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/PrettyPrint.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/PrettyPrint.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/PrettyPrint.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/PrettyPrint.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,247 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.PrettyPrint --- Copyright : Jürgen Nicklisch-Franken 2010 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Pretty printing for cabal files --- ------------------------------------------------------------------------------ - -module Distribution.PackageDescription.PrettyPrint ( - -- * Generic package descriptions - writeGenericPackageDescription, - showGenericPackageDescription, - - -- * Package descriptions - writePackageDescription, - showPackageDescription, - - -- ** Supplementary build information - writeHookedBuildInfo, - showHookedBuildInfo, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.Dependency -import Distribution.Types.ForeignLib (ForeignLib (foreignLibName)) -import Distribution.Types.UnqualComponentName -import Distribution.Types.CondTree - -import Distribution.PackageDescription -import Distribution.Simple.Utils -import Distribution.ParseUtils -import Distribution.Text - -import Distribution.FieldGrammar (PrettyFieldGrammar', prettyFieldGrammar) -import Distribution.PackageDescription.FieldGrammar - (packageDescriptionFieldGrammar, buildInfoFieldGrammar, - flagFieldGrammar, foreignLibFieldGrammar, libraryFieldGrammar, - benchmarkFieldGrammar, testSuiteFieldGrammar, - setupBInfoFieldGrammar, sourceRepoFieldGrammar, executableFieldGrammar) - -import qualified Distribution.PackageDescription.FieldGrammar as FG - -import Text.PrettyPrint - (hsep, space, parens, char, nest, ($$), (<+>), - text, vcat, ($+$), Doc, render) - -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 - --- | Writes a .cabal file from a generic package description -writeGenericPackageDescription :: FilePath -> GenericPackageDescription -> NoCallStackIO () -writeGenericPackageDescription fpath pkg = writeUTF8File fpath (showGenericPackageDescription pkg) - --- | Writes a generic package description to a string -showGenericPackageDescription :: GenericPackageDescription -> String -showGenericPackageDescription = render . ($+$ text "") . ppGenericPackageDescription - -ppGenericPackageDescription :: GenericPackageDescription -> Doc -ppGenericPackageDescription gpd = - ppPackageDescription (packageDescription gpd) - $+$ ppSetupBInfo (setupBuildInfo (packageDescription gpd)) - $+$ ppGenPackageFlags (genPackageFlags gpd) - $+$ ppCondLibrary (condLibrary gpd) - $+$ ppCondSubLibraries (condSubLibraries gpd) - $+$ ppCondForeignLibs (condForeignLibs gpd) - $+$ ppCondExecutables (condExecutables gpd) - $+$ ppCondTestSuites (condTestSuites gpd) - $+$ ppCondBenchmarks (condBenchmarks gpd) - -ppPackageDescription :: PackageDescription -> Doc -ppPackageDescription pd = - prettyFieldGrammar packageDescriptionFieldGrammar pd - $+$ ppSourceRepos (sourceRepos pd) - -ppSourceRepos :: [SourceRepo] -> Doc -ppSourceRepos [] = mempty -ppSourceRepos (hd:tl) = ppSourceRepo hd $+$ ppSourceRepos tl - -ppSourceRepo :: SourceRepo -> Doc -ppSourceRepo repo = - emptyLine $ text "source-repository" <+> disp kind $+$ - nest indentWith (prettyFieldGrammar (sourceRepoFieldGrammar kind) repo) - where - kind = repoKind repo - -ppSetupBInfo :: Maybe SetupBuildInfo -> Doc -ppSetupBInfo Nothing = mempty -ppSetupBInfo (Just sbi) - | defaultSetupDepends sbi = mempty - | otherwise = - emptyLine $ text "custom-setup" $+$ - nest indentWith (prettyFieldGrammar (setupBInfoFieldGrammar False) sbi) - -ppGenPackageFlags :: [Flag] -> Doc -ppGenPackageFlags flds = vcat [ppFlag f | f <- flds] - -ppFlag :: Flag -> Doc -ppFlag flag@(MkFlag name _ _ _) = - emptyLine $ text "flag" <+> ppFlagName name $+$ - nest indentWith (prettyFieldGrammar (flagFieldGrammar name) flag) - -ppCondTree2 :: PrettyFieldGrammar' s -> CondTree ConfVar [Dependency] s -> Doc -ppCondTree2 grammar = go - where - -- TODO: recognise elif opportunities - go (CondNode it _ ifs) = - prettyFieldGrammar grammar it - $+$ vcat (map ppIf ifs) - - ppIf (CondBranch c thenTree Nothing) --- | isEmpty thenDoc = mempty - | otherwise = ppIfCondition c $$ nest indentWith thenDoc - where - thenDoc = go thenTree - - ppIf (CondBranch c thenTree (Just elseTree)) = - case (False, False) of - -- case (isEmpty thenDoc, isEmpty elseDoc) of - (True, True) -> mempty - (False, True) -> ppIfCondition c $$ nest indentWith thenDoc - (True, False) -> ppIfCondition (cNot c) $$ nest indentWith elseDoc - (False, False) -> (ppIfCondition c $$ nest indentWith thenDoc) - $+$ (text "else" $$ nest indentWith elseDoc) - where - thenDoc = go thenTree - elseDoc = go elseTree - -ppCondLibrary :: Maybe (CondTree ConfVar [Dependency] Library) -> Doc -ppCondLibrary Nothing = mempty -ppCondLibrary (Just condTree) = - emptyLine $ text "library" $+$ - nest indentWith (ppCondTree2 (libraryFieldGrammar Nothing) condTree) - -ppCondSubLibraries :: [(UnqualComponentName, CondTree ConfVar [Dependency] Library)] -> Doc -ppCondSubLibraries libs = vcat - [ emptyLine $ (text "library" <+> disp n) $+$ - nest indentWith (ppCondTree2 (libraryFieldGrammar $ Just n) condTree) - | (n, condTree) <- libs - ] - -ppCondForeignLibs :: [(UnqualComponentName, CondTree ConfVar [Dependency] ForeignLib)] -> Doc -ppCondForeignLibs flibs = vcat - [ emptyLine $ (text "foreign-library" <+> disp n) $+$ - nest indentWith (ppCondTree2 (foreignLibFieldGrammar n) condTree) - | (n, condTree) <- flibs - ] - -ppCondExecutables :: [(UnqualComponentName, CondTree ConfVar [Dependency] Executable)] -> Doc -ppCondExecutables exes = vcat - [ emptyLine $ (text "executable" <+> disp n) $+$ - nest indentWith (ppCondTree2 (executableFieldGrammar n) condTree) - | (n, condTree) <- exes - ] - -ppCondTestSuites :: [(UnqualComponentName, CondTree ConfVar [Dependency] TestSuite)] -> Doc -ppCondTestSuites suites = vcat - [ emptyLine $ (text "test-suite" <+> disp n) $+$ - nest indentWith (ppCondTree2 testSuiteFieldGrammar (fmap FG.unvalidateTestSuite condTree)) - | (n, condTree) <- suites - ] - -ppCondBenchmarks :: [(UnqualComponentName, CondTree ConfVar [Dependency] Benchmark)] -> Doc -ppCondBenchmarks suites = vcat - [ emptyLine $ (text "benchmark" <+> disp n) $+$ - nest indentWith (ppCondTree2 benchmarkFieldGrammar (fmap FG.unvalidateBenchmark condTree)) - | (n, condTree) <- suites - ] - -ppCondition :: Condition ConfVar -> Doc -ppCondition (Var x) = ppConfVar x -ppCondition (Lit b) = text (show b) -ppCondition (CNot c) = char '!' <<>> (ppCondition c) -ppCondition (COr c1 c2) = parens (hsep [ppCondition c1, text "||" - <+> ppCondition c2]) -ppCondition (CAnd c1 c2) = parens (hsep [ppCondition c1, text "&&" - <+> ppCondition c2]) -ppConfVar :: ConfVar -> Doc -ppConfVar (OS os) = text "os" <<>> parens (disp os) -ppConfVar (Arch arch) = text "arch" <<>> parens (disp arch) -ppConfVar (Flag name) = text "flag" <<>> parens (ppFlagName name) -ppConfVar (Impl c v) = text "impl" <<>> parens (disp c <+> disp v) - -ppFlagName :: FlagName -> Doc -ppFlagName = text . unFlagName - -ppIfCondition :: (Condition ConfVar) -> Doc -ppIfCondition c = (emptyLine $ text "if" <+> ppCondition c) - -emptyLine :: Doc -> Doc -emptyLine d = text "" $+$ d - --- | @since 2.0.0.2 -writePackageDescription :: FilePath -> PackageDescription -> NoCallStackIO () -writePackageDescription fpath pkg = writeUTF8File fpath (showPackageDescription pkg) - ---TODO: make this use section syntax --- add equivalent for GenericPackageDescription - --- | @since 2.0.0.2 -showPackageDescription :: PackageDescription -> String -showPackageDescription = showGenericPackageDescription . pdToGpd - -pdToGpd :: PackageDescription -> GenericPackageDescription -pdToGpd pd = GenericPackageDescription - { packageDescription = pd - , genPackageFlags = [] - , condLibrary = mkCondTree <$> library pd - , condSubLibraries = mkCondTreeL <$> subLibraries pd - , condForeignLibs = mkCondTree' foreignLibName <$> foreignLibs pd - , condExecutables = mkCondTree' exeName <$> executables pd - , condTestSuites = mkCondTree' testName <$> testSuites pd - , condBenchmarks = mkCondTree' benchmarkName <$> benchmarks pd - } - where - -- We set CondTree's [Dependency] to an empty list, as it - -- is not pretty printed anyway. - mkCondTree x = CondNode x [] [] - mkCondTreeL l = (fromMaybe (mkUnqualComponentName "") (libName l), CondNode l [] []) - - mkCondTree' - :: (a -> UnqualComponentName) - -> a -> (UnqualComponentName, CondTree ConfVar [Dependency] a) - mkCondTree' f x = (f x, CondNode x [] []) - --- | @since 2.0.0.2 -writeHookedBuildInfo :: FilePath -> HookedBuildInfo -> NoCallStackIO () -writeHookedBuildInfo fpath = writeFileAtomic fpath . BS.Char8.pack - . showHookedBuildInfo - --- | @since 2.0.0.2 -showHookedBuildInfo :: HookedBuildInfo -> String -showHookedBuildInfo (mb_lib_bi, ex_bis) = render $ - maybe mempty (prettyFieldGrammar buildInfoFieldGrammar) mb_lib_bi - $$ vcat - [ space - $$ (text "executable:" <+> disp name) - $$ prettyFieldGrammar buildInfoFieldGrammar bi - | (name, bi) <- ex_bis - ] - $+$ text "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Quirks.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Quirks.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Quirks.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Quirks.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,203 +0,0 @@ -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE OverloadedStrings #-} --- | --- --- @since 2.2.0.0 -module Distribution.PackageDescription.Quirks (patchQuirks) where - -import Prelude () -import Distribution.Compat.Prelude -import GHC.Fingerprint (Fingerprint (..), fingerprintData) -import Foreign.Ptr (castPtr) -import System.IO.Unsafe (unsafeDupablePerformIO) - -import qualified Data.ByteString as BS -import qualified Data.ByteString.Unsafe as BS -import qualified Data.Map as Map - --- | Patch legacy @.cabal@ file contents to allow parsec parser to accept --- all of Hackage. --- --- Bool part of the result tells whether the output is modified. --- --- @since 2.2.0.0 -patchQuirks :: BS.ByteString -> (Bool, BS.ByteString) -patchQuirks bs = case Map.lookup (BS.take 256 bs, md5 bs) patches of - Nothing -> (False, bs) - Just (post, f) - | post /= md5 output -> (False, bs) - | otherwise -> (True, output) - where - output = f bs - -md5 :: BS.ByteString -> Fingerprint -md5 bs = unsafeDupablePerformIO $ BS.unsafeUseAsCStringLen bs $ \(ptr, len) -> - fingerprintData (castPtr ptr) len - --- | 'patches' contains first 256 bytes, pre- and post-fingerprints and a patch function. --- --- -patches :: Map.Map (BS.ByteString, Fingerprint) (Fingerprint, BS.ByteString -> BS.ByteString) -patches = Map.fromList - -- http://hackage.haskell.org/package/unicode-transforms-0.3.3 - -- other-modules: . - -- ReadP assumed dot is empty line - [ mk "-- This file has been generated from package.yaml by hpack version 0.17.0.\n--\n-- see: https://github.com/sol/hpack\n\nname: unicode-transforms\nversion: 0.3.3\nsynopsis: Unicode normalization\ndescription: Fast Unic" - (Fingerprint 15958160436627155571 10318709190730872881) - (Fingerprint 11008465475756725834 13815629925116264363) - (bsRemove " other-modules:\n .\n") -- TODO: remove traling \n to test structural-diff - -- http://hackage.haskell.org/package/DSTM-0.1.2 - -- http://hackage.haskell.org/package/DSTM-0.1.1 - -- http://hackage.haskell.org/package/DSTM-0.1 - -- Other Modules: no dash - -- ReadP parsed as section - , mk "Name: DSTM\nVersion: 0.1.2\nCopyright: (c) 2010, Frank Kupke\nLicense: LGPL\nLicense-File: LICENSE\nAuthor: Frank Kupke\nMaintainer: frk@informatik.uni-kiel.de\nCabal-Version: >= 1.2.3\nStability: provisional\nSynopsis: A framework for using STM within distributed " - (Fingerprint 6919263071548559054 9050746360708965827) - (Fingerprint 17015177514298962556 11943164891661867280) - (bsReplace "Other modules:" "-- ") - , mk "Name: DSTM\nVersion: 0.1.1\nCopyright: (c) 2010, Frank Kupke\nLicense: LGPL\nLicense-File: LICENSE\nAuthor: Frank Kupke\nMaintainer: frk@informatik.uni-kiel.de\nCabal-Version: >= 1.2.3\nStability: provisional\nSynopsis: A framework for using STM within distributed " - (Fingerprint 17313105789069667153 9610429408495338584) - (Fingerprint 17250946493484671738 17629939328766863497) - (bsReplace "Other modules:" "-- ") - , mk "Name: DSTM\nVersion: 0.1\nCopyright: (c) 2010, Frank Kupke\nLicense: LGPL\nLicense-File: LICENSE\nAuthor: Frank Kupke\nMaintainer: frk@informatik.uni-kiel.de\nCabal-Version: >= 1.2.3\nStability: provisional\nSynopsis: A framework for using STM within distributed sy" - (Fingerprint 10502599650530614586 16424112934471063115) - (Fingerprint 13562014713536696107 17899511905611879358) - (bsReplace "Other modules:" "-- ") - -- http://hackage.haskell.org/package/control-monad-exception-mtl-0.10.3 - , mk "name: control-monad-exception-mtl\nversion: 0.10.3\nCabal-Version: >= 1.10\nbuild-type: Simple\nlicense: PublicDomain\nauthor: Pepe Iborra\nmaintainer: pepeiborra@gmail.com\nhomepage: http://pepeiborra.github.com/control-monad-exception\nsynopsis: MTL instances f" - (Fingerprint 18274748422558568404 4043538769550834851) - (Fingerprint 11395257416101232635 4303318131190196308) - (bsReplace " default- extensions:" "unknown-section") - -- http://hackage.haskell.org/package/vacuum-opengl-0.0 - -- \DEL character - , mk "Name: vacuum-opengl\nVersion: 0.0\nSynopsis: Visualize live Haskell data structures using vacuum, graphviz and OpenGL.\nDescription: \DELVisualize live Haskell data structures using vacuum, graphviz and OpenGL.\n " - (Fingerprint 5946760521961682577 16933361639326309422) - (Fingerprint 14034745101467101555 14024175957788447824) - (bsRemove "\DEL") - , mk "Name: vacuum-opengl\nVersion: 0.0.1\nSynopsis: Visualize live Haskell data structures using vacuum, graphviz and OpenGL.\nDescription: \DELVisualize live Haskell data structures using vacuum, graphviz and OpenGL.\n " - (Fingerprint 10790950110330119503 1309560249972452700) - (Fingerprint 1565743557025952928 13645502325715033593) - (bsRemove "\DEL") - -- http://hackage.haskell.org/package/ixset-1.0.4 - -- {- comments -} - , mk "Name: ixset\nVersion: 1.0.4\nSynopsis: Efficient relational queries on Haskell sets.\nDescription:\n Create and query sets that are indexed by multiple indices.\nLicense: BSD3\nLicense-file: COPYING\nAut" - (Fingerprint 11886092342440414185 4150518943472101551) - (Fingerprint 5731367240051983879 17473925006273577821) - (bsRemoveStarting "{-") - -- : after section - -- http://hackage.haskell.org/package/ds-kanren - , mk "name: ds-kanren\nversion: 0.2.0.0\nsynopsis: A subset of the miniKanren language\ndescription:\n ds-kanren is an implementation of the language.\n .\n == What's in ds-kanren?\n .\n ['dis" - (Fingerprint 2804006762382336875 9677726932108735838) - (Fingerprint 9830506174094917897 12812107316777006473) - (bsReplace "Test-Suite test-unify:" "Test-Suite \"test-unify:\"" . bsReplace "Test-Suite test-list-ops:" "Test-Suite \"test-list-ops:\"") - , mk "name: ds-kanren\nversion: 0.2.0.1\nsynopsis: A subset of the miniKanren language\ndescription:\n ds-kanren is an implementation of the language.\n\nlicense: MIT\nlicense-file: " - (Fingerprint 9130259649220396193 2155671144384738932) - (Fingerprint 1847988234352024240 4597789823227580457) - (bsReplace "Test-Suite test-unify:" "Test-Suite \"test-unify:\"" . bsReplace "Test-Suite test-list-ops:" "Test-Suite \"test-list-ops:\"") - , mk "name: metric\nversion: 0.1.4\nsynopsis: Metric spaces.\nlicense: MIT\nlicense-file: LICENSE\nauthor: Vikram Verma\nmaintainer: me@vikramverma.com\ncategory: Data\nbuild-type:" - (Fingerprint 6150019278861565482 3066802658031228162) - (Fingerprint 9124826020564520548 15629704249829132420) - (bsReplace "test-suite metric-tests:" "test-suite \"metric-tests:\"") - , mk "name: metric\nversion: 0.2.0\nsynopsis: Metric spaces.\nlicense: MIT\nlicense-file: LICENSE\nauthor: Vikram Verma\nmaintainer: me@vikramverma.com\ncategory: Data\nbuild-type:" - (Fingerprint 4639805967994715694 7859317050376284551) - (Fingerprint 5566222290622325231 873197212916959151) - (bsReplace "test-suite metric-tests:" "test-suite \"metric-tests:\"") - , mk "name: phasechange\ncategory: Data\nversion: 0.1\nauthor: G\195\161bor Lehel\nmaintainer: G\195\161bor Lehel \nhomepage: http://github.com/glehel/phasechange\ncopyright: Copyright (C) 2012 G\195\161bor Lehel\nlicense: " - (Fingerprint 10546509771395401582 245508422312751943) - (Fingerprint 5169853482576003304 7247091607933993833) - (bsReplace "impl(ghc >= 7.4):" "erroneous-section" . bsReplace "impl(ghc >= 7.6):" "erroneous-section") - , mk "Name: smartword\nSynopsis: Web based flash card for Word Smart I and II vocabularies\nVersion: 0.0.0.5\nHomepage: http://kyagrd.dyndns.org/~kyagrd/project/smartword/\nCategory: Web,Education\nLicense: " - (Fingerprint 7803544783533485151 10807347873998191750) - (Fingerprint 1665635316718752601 16212378357991151549) - (bsReplace "build depends:" "--") - , mk "name: shelltestrunner\n-- sync with README.md, ANNOUNCE:\nversion: 1.3\ncategory: Testing\nsynopsis: A tool for testing command-line programs.\ndescription:\n shelltestrunner is a cross-platform tool for testing command-line\n program" - (Fingerprint 4403237110790078829 15392625961066653722) - (Fingerprint 10218887328390239431 4644205837817510221) - (bsReplace "other modules:" "--") - -- &&! - -- http://hackage.haskell.org/package/hblas-0.3.0.0 - , mk "-- Initial hblas.cabal generated by cabal init. For further \n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP) \n-- " - (Fingerprint 8570120150072467041 18315524331351505945) - (Fingerprint 10838007242302656005 16026440017674974175) - (bsReplace "&&!" "&& !") - , mk "-- Initial hblas.cabal generated by cabal init. For further \n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP) \n-- " - (Fingerprint 5262875856214215155 10846626274067555320) - (Fingerprint 3022954285783401045 13395975869915955260) - (bsReplace "&&!" "&& !") - , mk "-- Initial hblas.cabal generated by cabal init. For further \n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP) \n-- " - (Fingerprint 54222628930951453 5526514916844166577) - (Fingerprint 1749630806887010665 8607076506606977549) - (bsReplace "&&!" "&& !") - , mk "-- Initial hblas.cabal generated by cabal init. For further\n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP)\n-- fo" - (Fingerprint 6817250511240350300 15278852712000783849) - (Fingerprint 15757717081429529536 15542551865099640223) - (bsReplace "&&!" "&& !") - , mk "-- Initial hblas.cabal generated by cabal init. For further\n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP)\n-- fo" - (Fingerprint 8310050400349211976 201317952074418615) - (Fingerprint 10283381191257209624 4231947623042413334) - (bsReplace "&&!" "&& !") - , mk "-- Initial hblas.cabal generated by cabal init. For further\n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP)\n-- fo" - (Fingerprint 7010988292906098371 11591884496857936132) - (Fingerprint 6158672440010710301 6419743768695725095) - (bsReplace "&&!" "&& !") - , mk "-- Initial hblas.cabal generated by cabal init. For further\r\n-- documentation, see http://haskell.org/cabal/users-guide/\r\n\r\n-- The name of the package.\r\nname: hblas\r\n\r\n-- The package version. See the Haskell package versioning policy (PVP)" - (Fingerprint 2076850805659055833 16615160726215879467) - (Fingerprint 10634706281258477722 5285812379517916984) - (bsReplace "&&!" "&& !") - , mk "-- Initial hblas.cabal generated by cabal init. For further\r\n-- documentation, see http://haskell.org/cabal/users-guide/\r\n\r\n-- The name of the package.\r\nname: hblas\r\n\r\n-- The package version. See the Haskell package versioning policy (PVP)" - (Fingerprint 11850020631622781099 11956481969231030830) - (Fingerprint 13702868780337762025 13383526367149067158) - (bsReplace "&&!" "&& !") - , mk "-- Initial hblas.cabal generated by cabal init. For further\n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP)\n-- fo" - (Fingerprint 13690322768477779172 19704059263540994) - (Fingerprint 11189374824645442376 8363528115442591078) - (bsReplace "&&!" "&& !") - ] - where - mk a b c d = ((a, b), (c, d)) - --- | Helper to create entries in patches -_makePatchKey :: FilePath -> (BS.ByteString -> BS.ByteString) -> NoCallStackIO () -_makePatchKey fp transform = do - contents <- BS.readFile fp - let output = transform contents - let Fingerprint hi lo = md5 contents - let Fingerprint hi' lo' = md5 output - putStrLn - $ showString " , mk " - . shows (BS.take 256 contents) - . showString "\n (Fingerprint " - . shows hi - . showString " " - . shows lo - . showString ")\n (Fingerprint " - . shows hi' - . showString " " - . shows lo' - . showString ")" - $ "" - -------------------------------------------------------------------------------- --- Patch helpers -------------------------------------------------------------------------------- - -bsRemove - :: BS.ByteString -- ^ needle - -> BS.ByteString -> BS.ByteString -bsRemove needle haystack = case BS.breakSubstring needle haystack of - (h, t) -> BS.append h (BS.drop (BS.length needle) t) - -bsReplace - :: BS.ByteString -- ^ needle - -> BS.ByteString -- ^ replacement - -> BS.ByteString -> BS.ByteString -bsReplace needle repl haystack = case BS.breakSubstring needle haystack of - (h, t) - | not (BS.null t) -> BS.append h (BS.append repl (BS.drop (BS.length needle) t)) - | otherwise -> haystack - -bsRemoveStarting - :: BS.ByteString -- ^ needle - -> BS.ByteString -> BS.ByteString -bsRemoveStarting needle haystack = case BS.breakSubstring needle haystack of - (h, _) -> h diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription/Utils.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription.Utils --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Common utils used by modules under Distribution.PackageDescription.*. - -module Distribution.PackageDescription.Utils ( - cabalBug, userBug - ) where - --- ---------------------------------------------------------------------------- --- Exception and logging utils - -userBug :: String -> a -userBug msg = error $ msg ++ ". This is a bug in your .cabal file." - -cabalBug :: String -> a -cabalBug msg = error $ msg ++ ". This is possibly a bug in Cabal.\n" - ++ "Please report it to the developers: " - ++ "https://github.com/haskell/cabal/issues/new" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PackageDescription.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,139 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.PackageDescription --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Backwards compatibility reexport of everything you need to know --- about @.cabal@ files. - -module Distribution.PackageDescription ( - -- * Package descriptions - PackageDescription(..), - emptyPackageDescription, - specVersion, - buildType, - license, - descCabalVersion, - BuildType(..), - knownBuildTypes, - allLibraries, - - -- ** Renaming (syntactic) - ModuleRenaming(..), - defaultRenaming, - - -- ** Libraries - Library(..), - ModuleReexport(..), - emptyLibrary, - withLib, - hasPublicLib, - hasLibs, - explicitLibModules, - libModulesAutogen, - libModules, - - -- ** Executables - Executable(..), - emptyExecutable, - withExe, - hasExes, - exeModules, - exeModulesAutogen, - - -- * Tests - TestSuite(..), - TestSuiteInterface(..), - TestType(..), - testType, - knownTestTypes, - emptyTestSuite, - hasTests, - withTest, - testModules, - testModulesAutogen, - - -- * Benchmarks - Benchmark(..), - BenchmarkInterface(..), - BenchmarkType(..), - benchmarkType, - knownBenchmarkTypes, - emptyBenchmark, - hasBenchmarks, - withBenchmark, - benchmarkModules, - benchmarkModulesAutogen, - - -- * Build information - BuildInfo(..), - emptyBuildInfo, - allBuildInfo, - allLanguages, - allExtensions, - usedExtensions, - usesTemplateHaskellOrQQ, - hcOptions, - hcProfOptions, - hcSharedOptions, - hcStaticOptions, - - -- ** Supplementary build information - allBuildDepends, - enabledBuildDepends, - ComponentName(..), - defaultLibName, - HookedBuildInfo, - emptyHookedBuildInfo, - updatePackageDescription, - - -- * package configuration - GenericPackageDescription(..), - Flag(..), emptyFlag, - FlagName, mkFlagName, unFlagName, - FlagAssignment, mkFlagAssignment, unFlagAssignment, - nullFlagAssignment, showFlagValue, - diffFlagAssignment, lookupFlagAssignment, insertFlagAssignment, - dispFlagAssignment, parseFlagAssignment, parsecFlagAssignment, - findDuplicateFlagAssignments, - CondTree(..), ConfVar(..), Condition(..), - cNot, cAnd, cOr, - - -- * Source repositories - SourceRepo(..), - RepoKind(..), - RepoType(..), - knownRepoTypes, - emptySourceRepo, - - -- * Custom setup build information - SetupBuildInfo(..), - ) where - -import Prelude () ---import Distribution.Compat.Prelude - -import Distribution.Types.Library -import Distribution.Types.TestSuite -import Distribution.Types.Executable -import Distribution.Types.Benchmark -import Distribution.Types.TestType -import Distribution.Types.TestSuiteInterface -import Distribution.Types.BenchmarkType -import Distribution.Types.BenchmarkInterface -import Distribution.Types.ModuleRenaming -import Distribution.Types.ModuleReexport -import Distribution.Types.BuildInfo -import Distribution.Types.SetupBuildInfo -import Distribution.Types.BuildType -import Distribution.Types.GenericPackageDescription -import Distribution.Types.CondTree -import Distribution.Types.Condition -import Distribution.Types.PackageDescription -import Distribution.Types.ComponentName -import Distribution.Types.HookedBuildInfo -import Distribution.Types.SourceRepo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Package.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Package.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Package.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Package.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Package --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Defines a package identifier along with a parser and pretty printer for it. --- 'PackageIdentifier's consist of a name and an exact version. It also defines --- a 'Dependency' data type. A dependency is a package name and a version --- range, like @\"foo >= 1.2 && < 2\"@. - -module Distribution.Package - ( module Distribution.Types.AbiHash - , module Distribution.Types.ComponentId - , module Distribution.Types.PackageId - , module Distribution.Types.UnitId - , module Distribution.Types.Module - , module Distribution.Types.PackageName - , module Distribution.Types.PkgconfigName - , module Distribution.Types.Dependency - , Package(..), packageName, packageVersion - , HasMungedPackageId(..), mungedName', mungedVersion' - , HasUnitId(..) - , installedPackageId - , PackageInstalled(..) - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Version - ( Version ) - -import Distribution.Types.AbiHash -import Distribution.Types.ComponentId -import Distribution.Types.Dependency -import Distribution.Types.MungedPackageId -import Distribution.Types.PackageId -import Distribution.Types.UnitId -import Distribution.Types.Module -import Distribution.Types.MungedPackageName -import Distribution.Types.PackageName -import Distribution.Types.PkgconfigName - --- | Class of things that have a 'PackageIdentifier' --- --- Types in this class are all notions of a package. This allows us to have --- different types for the different phases that packages go though, from --- simple name\/id, package description, configured or installed packages. --- --- Not all kinds of packages can be uniquely identified by a --- 'PackageIdentifier'. In particular, installed packages cannot, there may be --- many installed instances of the same source package. --- -class Package pkg where - packageId :: pkg -> PackageIdentifier - -mungedName' :: HasMungedPackageId pkg => pkg -> MungedPackageName -mungedName' = mungedName . mungedId - -mungedVersion' :: HasMungedPackageId munged => munged -> Version -mungedVersion' = mungedVersion . mungedId - -class HasMungedPackageId pkg where - mungedId :: pkg -> MungedPackageId - -instance Package PackageIdentifier where - packageId = id - -packageName :: Package pkg => pkg -> PackageName -packageName = pkgName . packageId - -packageVersion :: Package pkg => pkg -> Version -packageVersion = pkgVersion . packageId - -instance HasMungedPackageId MungedPackageId where - mungedId = id - --- | Packages that have an installed unit ID -class Package pkg => HasUnitId pkg where - installedUnitId :: pkg -> UnitId - -{-# DEPRECATED installedPackageId "Use installedUnitId instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} --- | Compatibility wrapper for Cabal pre-1.24. -installedPackageId :: HasUnitId pkg => pkg -> UnitId -installedPackageId = installedUnitId - --- | Class of installed packages. --- --- The primary data type which is an instance of this package is --- 'InstalledPackageInfo', but when we are doing install plans in Cabal install --- we may have other, installed package-like things which contain more metadata. --- Installed packages have exact dependencies 'installedDepends'. -class (HasUnitId pkg) => PackageInstalled pkg where - installedDepends :: pkg -> [UnitId] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Class.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Class.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Class.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Class.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,353 +0,0 @@ -{-# LANGUAGE GADTs #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} -module Distribution.Parsec.Class ( - Parsec(..), - ParsecParser (..), - runParsecParser, - simpleParsec, - lexemeParsec, - eitherParsec, - explicitEitherParsec, - -- * CabalParsing & warnings - CabalParsing (..), - PWarnType (..), - -- * Utilities - parsecToken, - parsecToken', - parsecFilePath, - parsecQuoted, - parsecMaybeQuoted, - parsecCommaList, - parsecLeadingCommaList, - parsecOptCommaList, - parsecStandard, - parsecUnqualComponentName, - ) where - -import Data.Char (digitToInt, intToDigit) -import Data.Functor.Identity (Identity (..)) -import Data.List (transpose) -import Distribution.CabalSpecVersion -import Distribution.Compat.Prelude -import Distribution.Parsec.FieldLineStream -import Distribution.Parsec.Common (PWarnType (..), PWarning (..), Position (..)) -import Numeric (showIntAtBase) -import Prelude () - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.MonadFail as Fail -import qualified Distribution.Compat.ReadP as ReadP -import qualified Text.Parsec as Parsec - -------------------------------------------------------------------------------- --- Class -------------------------------------------------------------------------------- - --- | Class for parsing with @parsec@. Mainly used for @.cabal@ file fields. -class Parsec a where - parsec :: CabalParsing m => m a - --- | Parsing class which --- --- * can report Cabal parser warnings. --- --- * knows @cabal-version@ we work with --- -class (P.CharParsing m, MonadPlus m) => CabalParsing m where - parsecWarning :: PWarnType -> String -> m () - - parsecHaskellString :: m String - parsecHaskellString = stringLiteral - - askCabalSpecVersion :: m CabalSpecVersion - -instance t ~ Char => CabalParsing (ReadP.Parser r t) where - parsecWarning _ _ = pure () - askCabalSpecVersion = pure cabalSpecLatest - --- | 'parsec' /could/ consume trailing spaces, this function /will/ consume. -lexemeParsec :: (CabalParsing m, Parsec a) => m a -lexemeParsec = parsec <* P.spaces - -newtype ParsecParser a = PP { unPP - :: CabalSpecVersion -> Parsec.Parsec FieldLineStream [PWarning] a - } - -liftParsec :: Parsec.Parsec FieldLineStream [PWarning] a -> ParsecParser a -liftParsec p = PP $ \_ -> p - -instance Functor ParsecParser where - fmap f p = PP $ \v -> fmap f (unPP p v) - {-# INLINE fmap #-} - - x <$ p = PP $ \v -> x <$ unPP p v - {-# INLINE (<$) #-} - -instance Applicative ParsecParser where - pure = liftParsec . pure - {-# INLINE pure #-} - - f <*> x = PP $ \v -> unPP f v <*> unPP x v - {-# INLINE (<*>) #-} - f *> x = PP $ \v -> unPP f v *> unPP x v - {-# INLINE (*>) #-} - f <* x = PP $ \v -> unPP f v <* unPP x v - {-# INLINE (<*) #-} - -instance Alternative ParsecParser where - empty = liftParsec empty - - a <|> b = PP $ \v -> unPP a v <|> unPP b v - {-# INLINE (<|>) #-} - - many p = PP $ \v -> many (unPP p v) - {-# INLINE many #-} - - some p = PP $ \v -> some (unPP p v) - {-# INLINE some #-} - -instance Monad ParsecParser where - return = pure - - m >>= k = PP $ \v -> unPP m v >>= \x -> unPP (k x) v - {-# INLINE (>>=) #-} - (>>) = (*>) - {-# INLINE (>>) #-} - - fail = Fail.fail - -instance MonadPlus ParsecParser where - mzero = empty - mplus = (<|>) - -instance Fail.MonadFail ParsecParser where - fail = P.unexpected - -instance P.Parsing ParsecParser where - try p = PP $ \v -> P.try (unPP p v) - p d = PP $ \v -> unPP p v P. d - skipMany p = PP $ \v -> P.skipMany (unPP p v) - skipSome p = PP $ \v -> P.skipSome (unPP p v) - unexpected = liftParsec . P.unexpected - eof = liftParsec P.eof - notFollowedBy p = PP $ \v -> P.notFollowedBy (unPP p v) - -instance P.CharParsing ParsecParser where - satisfy = liftParsec . P.satisfy - char = liftParsec . P.char - notChar = liftParsec . P.notChar - anyChar = liftParsec P.anyChar - string = liftParsec . P.string - -instance CabalParsing ParsecParser where - parsecWarning t w = liftParsec $ Parsec.modifyState (PWarning t (Position 0 0) w :) - askCabalSpecVersion = PP pure - --- | Parse a 'String' with 'lexemeParsec'. -simpleParsec :: Parsec a => String -> Maybe a -simpleParsec - = either (const Nothing) Just - . runParsecParser lexemeParsec "" - . fieldLineStreamFromString - --- | Parse a 'String' with 'lexemeParsec'. -eitherParsec :: Parsec a => String -> Either String a -eitherParsec = explicitEitherParsec parsec - --- | Parse a 'String' with given 'ParsecParser'. Trailing whitespace is accepted. -explicitEitherParsec :: ParsecParser a -> String -> Either String a -explicitEitherParsec parser - = either (Left . show) Right - . runParsecParser (parser <* P.spaces) "" - . fieldLineStreamFromString - --- | Run 'ParsecParser' with 'cabalSpecLatest'. -runParsecParser :: ParsecParser a -> FilePath -> FieldLineStream -> Either Parsec.ParseError a -runParsecParser p n = Parsec.runParser (unPP p cabalSpecLatest <* P.eof) [] n - -instance Parsec a => Parsec (Identity a) where - parsec = Identity <$> parsec - -instance Parsec Bool where - parsec = P.munch1 isAlpha >>= postprocess - where - postprocess str - | str == "True" = pure True - | str == "False" = pure False - | lstr == "true" = parsecWarning PWTBoolCase caseWarning *> pure True - | lstr == "false" = parsecWarning PWTBoolCase caseWarning *> pure False - | otherwise = fail $ "Not a boolean: " ++ str - where - lstr = map toLower str - caseWarning = - "Boolean values are case sensitive, use 'True' or 'False'." - --- | @[^ ,]@ -parsecToken :: CabalParsing m => m String -parsecToken = parsecHaskellString <|> ((P.munch1 (\x -> not (isSpace x) && x /= ',') P. "identifier" ) >>= checkNotDoubleDash) - --- | @[^ ]@ -parsecToken' :: CabalParsing m => m String -parsecToken' = parsecHaskellString <|> ((P.munch1 (not . isSpace) P. "token") >>= checkNotDoubleDash) - -checkNotDoubleDash :: CabalParsing m => String -> m String -checkNotDoubleDash s = do - when (s == "--") $ parsecWarning PWTDoubleDash $ unwords - [ "Double-dash token found." - , "Note: there are no end-of-line comments in .cabal files, only whole line comments." - , "Use \"--\" (quoted double dash) to silence this warning, if you actually want -- token" - ] - - return s - -parsecFilePath :: CabalParsing m => m FilePath -parsecFilePath = parsecToken - --- | Parse a benchmark/test-suite types. -parsecStandard :: (CabalParsing m, Parsec ver) => (ver -> String -> a) -> m a -parsecStandard f = do - cs <- some $ P.try (component <* P.char '-') - ver <- parsec - let name = map toLower (intercalate "-" cs) - return $! f ver name - where - component = do - cs <- P.munch1 isAlphaNum - if all isDigit cs then fail "all digit component" else return cs - -- each component must contain an alphabetic character, to avoid - -- ambiguity in identifiers like foo-1 (the 1 is the version number). - -parsecCommaList :: CabalParsing m => m a -> m [a] -parsecCommaList p = P.sepBy (p <* P.spaces) (P.char ',' *> P.spaces P. "comma") - --- | Like 'parsecCommaList' but accept leading or trailing comma. --- --- @ --- p (comma p)* -- p `sepBy` comma --- (comma p)* -- leading comma --- (p comma)* -- trailing comma --- @ -parsecLeadingCommaList :: CabalParsing m => m a -> m [a] -parsecLeadingCommaList p = do - c <- P.optional comma - case c of - Nothing -> P.sepEndBy1 lp comma <|> pure [] - Just _ -> P.sepBy1 lp comma - where - lp = p <* P.spaces - comma = P.char ',' *> P.spaces P. "comma" - -parsecOptCommaList :: CabalParsing m => m a -> m [a] -parsecOptCommaList p = P.sepBy (p <* P.spaces) (P.optional comma) - where - comma = P.char ',' *> P.spaces - --- | Content isn't unquoted -parsecQuoted :: CabalParsing m => m a -> m a -parsecQuoted = P.between (P.char '"') (P.char '"') - --- | @parsecMaybeQuoted p = 'parsecQuoted' p <|> p@. -parsecMaybeQuoted :: CabalParsing m => m a -> m a -parsecMaybeQuoted p = parsecQuoted p <|> p - -parsecUnqualComponentName :: CabalParsing m => m String -parsecUnqualComponentName = intercalate "-" <$> P.sepBy1 component (P.char '-') - where - component :: CabalParsing m => m String - component = do - cs <- P.munch1 isAlphaNum - if all isDigit cs - then fail "all digits in portion of unqualified component name" - else return cs - -stringLiteral :: forall m. P.CharParsing m => m String -stringLiteral = lit where - lit :: m String - lit = foldr (maybe id (:)) "" - <$> P.between (P.char '"') (P.char '"' P. "end of string") (many stringChar) - P. "string" - - stringChar :: m (Maybe Char) - stringChar = Just <$> stringLetter - <|> stringEscape - P. "string character" - - stringLetter :: m Char - stringLetter = P.satisfy (\c -> (c /= '"') && (c /= '\\') && (c > '\026')) - - stringEscape :: m (Maybe Char) - stringEscape = P.char '\\' *> esc where - esc :: m (Maybe Char) - esc = Nothing <$ escapeGap - <|> Nothing <$ escapeEmpty - <|> Just <$> escapeCode - - escapeEmpty, escapeGap :: m Char - escapeEmpty = P.char '&' - escapeGap = P.skipSpaces1 *> (P.char '\\' P. "end of string gap") - -escapeCode :: forall m. P.CharParsing m => m Char -escapeCode = (charEsc <|> charNum <|> charAscii <|> charControl) P. "escape code" - where - charControl, charNum :: m Char - charControl = (\c -> toEnum (fromEnum c - fromEnum '@')) <$> (P.char '^' *> (P.upper <|> P.char '@')) - charNum = toEnum <$> num - where - num :: m Int - num = bounded 10 maxchar - <|> (P.char 'o' *> bounded 8 maxchar) - <|> (P.char 'x' *> bounded 16 maxchar) - maxchar = fromEnum (maxBound :: Char) - - bounded :: Int -> Int -> m Int - bounded base bnd = foldl' (\x d -> base * x + digitToInt d) 0 - <$> bounded' (take base thedigits) (map digitToInt $ showIntAtBase base intToDigit bnd "") - where - thedigits :: [m Char] - thedigits = map P.char ['0'..'9'] ++ map P.oneOf (transpose [['A'..'F'],['a'..'f']]) - - toomuch :: m a - toomuch = P.unexpected "out-of-range numeric escape sequence" - - bounded', bounded'' :: [m Char] -> [Int] -> m [Char] - bounded' dps@(zero:_) bds = P.skipSome zero *> ([] <$ P.notFollowedBy (P.choice dps) <|> bounded'' dps bds) - <|> bounded'' dps bds - bounded' [] _ = error "bounded called with base 0" - bounded'' dps [] = [] <$ P.notFollowedBy (P.choice dps) <|> toomuch - bounded'' dps (bd : bds) = let anyd :: m Char - anyd = P.choice dps - - nomore :: m () - nomore = P.notFollowedBy anyd <|> toomuch - - (low, ex : high) = splitAt bd dps - in ((:) <$> P.choice low <*> atMost (length bds) anyd) <* nomore - <|> ((:) <$> ex <*> ([] <$ nomore <|> bounded'' dps bds)) - <|> if not (null bds) - then (:) <$> P.choice high <*> atMost (length bds - 1) anyd <* nomore - else empty - atMost n p | n <= 0 = pure [] - | otherwise = ((:) <$> p <*> atMost (n - 1) p) <|> pure [] - - charEsc :: m Char - charEsc = P.choice $ parseEsc <$> escMap - - parseEsc (c,code) = code <$ P.char c - escMap = zip "abfnrtv\\\"\'" "\a\b\f\n\r\t\v\\\"\'" - - charAscii :: m Char - charAscii = P.choice $ parseAscii <$> asciiMap - - parseAscii (asc,code) = P.try $ code <$ P.string asc - asciiMap = zip (ascii3codes ++ ascii2codes) (ascii3 ++ ascii2) - ascii2codes, ascii3codes :: [String] - ascii2codes = [ "BS","HT","LF","VT","FF","CR","SO" - , "SI","EM","FS","GS","RS","US","SP"] - ascii3codes = ["NUL","SOH","STX","ETX","EOT","ENQ","ACK" - ,"BEL","DLE","DC1","DC2","DC3","DC4","NAK" - ,"SYN","ETB","CAN","SUB","ESC","DEL"] - ascii2, ascii3 :: String - ascii2 = "\BS\HT\LF\VT\FF\CR\SO\SI\EM\FS\GS\RS\US\SP" - ascii3 = "\NUL\SOH\STX\ETX\EOT\ENQ\ACK\BEL\DLE\DC1\DC2\DC3\DC4\NAK\SYN\ETB\CAN\SUB\ESC\DEL" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Common.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Common.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Common.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Common.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,100 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} --- | Module containing small types -module Distribution.Parsec.Common ( - -- * Diagnostics - PError (..), - showPError, - PWarning (..), - PWarnType (..), - showPWarning, - -- * Position - Position (..), - incPos, - retPos, - showPos, - zeroPos, - ) where - -import Distribution.Compat.Prelude -import Prelude () -import System.FilePath (normalise) - --- | Parser error. -data PError = PError Position String - deriving (Show, Generic) - -instance Binary PError -instance NFData PError where rnf = genericRnf - --- | Type of parser warning. We do classify warnings. --- --- Different application may decide not to show some, or have fatal behaviour on others -data PWarnType - = PWTOther -- ^ Unclassified warning - | PWTUTF -- ^ Invalid UTF encoding - | PWTBoolCase -- ^ @true@ or @false@, not @True@ or @False@ - | PWTVersionTag -- ^ there are version with tags - | PWTNewSyntax -- ^ New syntax used, but no @cabal-version: >= 1.2@ specified - | PWTOldSyntax -- ^ Old syntax used, and @cabal-version >= 1.2@ specified - | PWTDeprecatedField - | PWTInvalidSubsection - | PWTUnknownField - | PWTUnknownSection - | PWTTrailingFields - | PWTExtraMainIs -- ^ extra main-is field - | PWTExtraTestModule -- ^ extra test-module field - | PWTExtraBenchmarkModule -- ^ extra benchmark-module field - | PWTLexNBSP - | PWTLexBOM - | PWTLexTab - | PWTQuirkyCabalFile -- ^ legacy cabal file that we know how to patch - | PWTDoubleDash -- ^ Double dash token, most likely it's a mistake - it's not a comment - | PWTMultipleSingularField -- ^ e.g. name or version should be specified only once. - | PWTBuildTypeDefault -- ^ Workaround for derive-package having build-type: Default. See . - | PWTVersionLeadingZeros -- ^ See https://github.com/haskell-infra/hackage-trustees/issues/128 - deriving (Eq, Ord, Show, Enum, Bounded, Generic) - -instance Binary PWarnType -instance NFData PWarnType where rnf = genericRnf - --- | Parser warning. -data PWarning = PWarning !PWarnType !Position String - deriving (Show, Generic) - -instance Binary PWarning -instance NFData PWarning where rnf = genericRnf - -showPWarning :: FilePath -> PWarning -> String -showPWarning fpath (PWarning _ pos msg) = - normalise fpath ++ ":" ++ showPos pos ++ ": " ++ msg - -showPError :: FilePath -> PError -> String -showPError fpath (PError pos msg) = - normalise fpath ++ ":" ++ showPos pos ++ ": " ++ msg - -------------------------------------------------------------------------------- --- Position -------------------------------------------------------------------------------- - --- | 1-indexed row and column positions in a file. -data Position = Position - {-# UNPACK #-} !Int -- row - {-# UNPACK #-} !Int -- column - deriving (Eq, Ord, Show, Generic) - -instance Binary Position -instance NFData Position where rnf = genericRnf - --- | Shift position by n columns to the right. -incPos :: Int -> Position -> Position -incPos n (Position row col) = Position row (col + n) - --- | Shift position to beginning of next row. -retPos :: Position -> Position -retPos (Position row _col) = Position (row + 1) 1 - -showPos :: Position -> String -showPos (Position row col) = show row ++ ":" ++ show col - -zeroPos :: Position -zeroPos = Position 0 0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/ConfVar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/ConfVar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/ConfVar.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/ConfVar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,124 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -module Distribution.Parsec.ConfVar (parseConditionConfVar) where - -import Distribution.Compat.CharParsing (char, integral) -import Distribution.Compat.Prelude -import Distribution.Parsec.Class (Parsec (..), runParsecParser) -import Distribution.Parsec.Common -import Distribution.Parsec.FieldLineStream -import Distribution.Parsec.Field (SectionArg (..)) -import Distribution.Parsec.ParseResult -import Distribution.Types.Condition -import Distribution.Types.GenericPackageDescription (ConfVar (..)) -import Distribution.Version - (anyVersion, earlierVersion, intersectVersionRanges, laterVersion, majorBoundVersion, - mkVersion, noVersion, orEarlierVersion, orLaterVersion, thisVersion, unionVersionRanges, - withinVersion) -import Prelude () - -import qualified Text.Parsec as P -import qualified Text.Parsec.Error as P - --- | Parse @'Condition' 'ConfVar'@ from section arguments provided by parsec --- based outline parser. -parseConditionConfVar :: [SectionArg Position] -> ParseResult (Condition ConfVar) -parseConditionConfVar args = - -- The name of the input file is irrelevant, as we reformat the error message. - case P.runParser (parser <* P.eof) () "" args of - Right x -> pure x - Left err -> do - -- Mangle the position to the actual one - let ppos = P.errorPos err - let epos = Position (P.sourceLine ppos) (P.sourceColumn ppos) - let msg = P.showErrorMessages - "or" "unknown parse error" "expecting" "unexpected" "end of input" - (P.errorMessages err) - parseFailure epos msg - pure $ Lit True - -type Parser = P.Parsec [SectionArg Position] () - -parser :: Parser (Condition ConfVar) -parser = condOr - where - condOr = P.sepBy1 condAnd (oper "||") >>= return . foldl1 COr - condAnd = P.sepBy1 cond (oper "&&") >>= return . foldl1 CAnd - cond = P.choice - [ boolLiteral, parens condOr, notCond, osCond, archCond, flagCond, implCond ] - - notCond = CNot <$ oper "!" <*> cond - - boolLiteral = Lit <$> boolLiteral' - osCond = Var . OS <$ string "os" <*> parens fromParsec - flagCond = Var . Flag <$ string "flag" <*> parens fromParsec - archCond = Var . Arch <$ string "arch" <*> parens fromParsec - implCond = Var <$ string "impl" <*> parens implCond' - - implCond' = Impl - <$> fromParsec - <*> P.option anyVersion versionRange - - version = fromParsec - versionStar = mkVersion <$> fromParsec' versionStar' <* oper "*" - versionStar' = some (integral <* char '.') - - versionRange = expr - where - expr = foldl1 unionVersionRanges <$> P.sepBy1 term (oper "||") - term = foldl1 intersectVersionRanges <$> P.sepBy1 factor (oper "&&") - - factor = P.choice - $ parens expr - : parseAnyVersion - : parseNoVersion - : parseWildcardRange - : map parseRangeOp rangeOps - - parseAnyVersion = anyVersion <$ string "-any" - parseNoVersion = noVersion <$ string "-none" - - parseWildcardRange = P.try $ withinVersion <$ oper "==" <*> versionStar - - parseRangeOp (s,f) = P.try (f <$ oper s <*> version) - rangeOps = [ ("<", earlierVersion), - ("<=", orEarlierVersion), - (">", laterVersion), - (">=", orLaterVersion), - ("^>=", majorBoundVersion), - ("==", thisVersion) ] - - -- Number token can have many dots in it: SecArgNum (Position 65 15) "7.6.1" - identBS = tokenPrim $ \t -> case t of - SecArgName _ s -> Just s - _ -> Nothing - - boolLiteral' = tokenPrim $ \t -> case t of - SecArgName _ s - | s == "True" -> Just True - | s == "true" -> Just True - | s == "False" -> Just False - | s == "false" -> Just False - _ -> Nothing - - string s = tokenPrim $ \t -> case t of - SecArgName _ s' | s == s' -> Just () - _ -> Nothing - - oper o = tokenPrim $ \t -> case t of - SecArgOther _ o' | o == o' -> Just () - _ -> Nothing - - parens = P.between (oper "(") (oper ")") - - tokenPrim = P.tokenPrim prettySectionArg updatePosition - -- TODO: check where the errors are reported - updatePosition x _ _ = x - prettySectionArg = show - - fromParsec :: Parsec a => Parser a - fromParsec = fromParsec' parsec - - fromParsec' p = do - bs <- identBS - let fls = fieldLineStreamFromBS bs - either (fail . show) pure (runParsecParser p "" fls) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Field.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Field.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Field.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Field.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,96 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} --- | Cabal-like file AST types: 'Field', 'Section' etc --- --- These types are parametrized by an annotation. -module Distribution.Parsec.Field ( - -- * Cabal file - Field (..), - fieldName, - fieldAnn, - fieldUniverse, - FieldLine (..), - SectionArg (..), - sectionArgAnn, - -- * Name - FieldName, - Name (..), - mkName, - getName, - nameAnn, - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Data.ByteString (ByteString) -import qualified Data.ByteString.Char8 as B -import qualified Data.Char as Char - -------------------------------------------------------------------------------- --- Cabal file -------------------------------------------------------------------------------- - --- | A Cabal-like file consists of a series of fields (@foo: bar@) and sections (@library ...@). -data Field ann - = Field !(Name ann) [FieldLine ann] - | Section !(Name ann) [SectionArg ann] [Field ann] - deriving (Eq, Show, Functor) - --- | Section of field name -fieldName :: Field ann -> Name ann -fieldName (Field n _ ) = n -fieldName (Section n _ _) = n - -fieldAnn :: Field ann -> ann -fieldAnn = nameAnn . fieldName - --- | All transitive descendands of 'Field', including itself. --- --- /Note:/ the resulting list is never empty. --- -fieldUniverse :: Field ann -> [Field ann] -fieldUniverse f@(Section _ _ fs) = f : concatMap fieldUniverse fs -fieldUniverse f@(Field _ _) = [f] - --- | A line of text representing the value of a field from a Cabal file. --- A field may contain multiple lines. --- --- /Invariant:/ 'ByteString' has no newlines. -data FieldLine ann = FieldLine !ann !ByteString - deriving (Eq, Show, Functor) - --- | Section arguments, e.g. name of the library -data SectionArg ann - = SecArgName !ann !ByteString - -- ^ identifier, or omething which loos like number. Also many dot numbers, i.e. "7.6.3" - | SecArgStr !ann !ByteString - -- ^ quoted string - | SecArgOther !ann !ByteString - -- ^ everything else, mm. operators (e.g. in if-section conditionals) - deriving (Eq, Show, Functor) - --- | Extract annotation from 'SectionArg'. -sectionArgAnn :: SectionArg ann -> ann -sectionArgAnn (SecArgName ann _) = ann -sectionArgAnn (SecArgStr ann _) = ann -sectionArgAnn (SecArgOther ann _) = ann - -------------------------------------------------------------------------------- --- Name -------------------------------------------------------------------------------- - -type FieldName = ByteString - --- | A field name. --- --- /Invariant/: 'ByteString' is lower-case ASCII. -data Name ann = Name !ann !FieldName - deriving (Eq, Show, Functor) - -mkName :: ann -> FieldName -> Name ann -mkName ann bs = Name ann (B.map Char.toLower bs) - -getName :: Name ann -> FieldName -getName (Name _ bs) = bs - -nameAnn :: Name ann -> ann -nameAnn (Name ann _) = ann diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/FieldLineStream.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/FieldLineStream.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/FieldLineStream.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/FieldLineStream.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,96 +0,0 @@ -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE OverloadedStrings , ScopedTypeVariables #-} -{-# OPTIONS_GHC -Wall -Werror #-} -module Distribution.Parsec.FieldLineStream ( - FieldLineStream (..), - fieldLinesToStream, - fieldLineStreamFromString, - fieldLineStreamFromBS, - ) where - -import Data.Bits -import Data.ByteString (ByteString) -import Distribution.Compat.Prelude -import Distribution.Parsec.Field (FieldLine (..)) -import Distribution.Utils.Generic (toUTF8BS) -import Prelude () - -import qualified Data.ByteString as BS -import qualified Text.Parsec as Parsec - --- | This is essentially a lazy bytestring, but chunks are glued with newline '\n'. -data FieldLineStream - = FLSLast !ByteString - | FLSCons {-# UNPACK #-} !ByteString FieldLineStream - deriving Show - -fieldLinesToStream :: [FieldLine ann] -> FieldLineStream -fieldLinesToStream [] = end -fieldLinesToStream [FieldLine _ bs] = FLSLast bs -fieldLinesToStream (FieldLine _ bs : fs) = FLSCons bs (fieldLinesToStream fs) - -end :: FieldLineStream -end = FLSLast "" - --- | Convert 'String' to 'FieldLineStream'. --- --- /Note:/ inefficient! -fieldLineStreamFromString :: String -> FieldLineStream -fieldLineStreamFromString = FLSLast . toUTF8BS - -fieldLineStreamFromBS :: ByteString -> FieldLineStream -fieldLineStreamFromBS = FLSLast - -instance Monad m => Parsec.Stream FieldLineStream m Char where - uncons (FLSLast bs) = return $ case BS.uncons bs of - Nothing -> Nothing - Just (c, bs') -> Just (unconsChar c bs' (\bs'' -> FLSLast bs'') end) - - uncons (FLSCons bs s) = return $ case BS.uncons bs of - -- as lines are glued with '\n', we return '\n' here! - Nothing -> Just ('\n', s) - Just (c, bs') -> Just (unconsChar c bs' (\bs'' -> FLSCons bs'' s) s) - --- Bssed on implementation 'decodeStringUtf8' -unconsChar :: forall a. Word8 -> ByteString -> (ByteString -> a) -> a -> (Char, a) -unconsChar c0 bs0 f next - | c0 <= 0x7F = (chr (fromIntegral c0), f bs0) - | c0 <= 0xBF = (replacementChar, f bs0) - | c0 <= 0xDF = twoBytes - | c0 <= 0xEF = moreBytes 3 0x800 bs0 (fromIntegral $ c0 .&. 0xF) - | c0 <= 0xF7 = moreBytes 4 0x10000 bs0 (fromIntegral $ c0 .&. 0x7) - | c0 <= 0xFB = moreBytes 5 0x200000 bs0 (fromIntegral $ c0 .&. 0x3) - | c0 <= 0xFD = moreBytes 6 0x4000000 bs0 (fromIntegral $ c0 .&. 0x1) - | otherwise = error $ "not implemented " ++ show c0 - where - twoBytes = case BS.uncons bs0 of - Nothing -> (replacementChar, next) - Just (c1, bs1) - | c1 .&. 0xC0 == 0x80 -> - if d >= 0x80 - then (chr d, f bs1) - else (replacementChar, f bs1) - | otherwise -> (replacementChar, f bs1) - where - d = (fromIntegral (c0 .&. 0x1F) `shiftL` 6) .|. fromIntegral (c1 .&. 0x3F) - - moreBytes :: Int -> Int -> ByteString -> Int -> (Char, a) - moreBytes 1 overlong bs' acc - | overlong <= acc, acc <= 0x10FFFF, acc < 0xD800 || 0xDFFF < acc - = (chr acc, f bs') - | otherwise - = (replacementChar, f bs') - - moreBytes byteCount overlong bs' acc = case BS.uncons bs' of - Nothing -> (replacementChar, f bs') - Just (cn, bs1) - | cn .&. 0xC0 == 0x80 -> moreBytes - (byteCount-1) - overlong - bs1 - ((acc `shiftL` 6) .|. fromIntegral cn .&. 0x3F) - | otherwise -> (replacementChar, f bs1) - -replacementChar :: Char -replacementChar = '\xfffd' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Lexer.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Lexer.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Lexer.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Lexer.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,422 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-missing-signatures #-} -{-# LANGUAGE CPP,MagicHash #-} -{-# LINE 1 "boot/Lexer.x" #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Parsec.Lexer --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Lexer for the cabal files. -{-# LANGUAGE CPP #-} -{-# LANGUAGE BangPatterns #-} -#ifdef CABAL_PARSEC_DEBUG -{-# LANGUAGE PatternGuards #-} -#endif -{-# OPTIONS_GHC -fno-warn-unused-imports #-} -module Distribution.Parsec.Lexer - (ltest, lexToken, Token(..), LToken(..) - ,bol_section, in_section, in_field_layout, in_field_braces - ,mkLexState) where - --- [Note: boostrapping parsec parser] --- --- We manually produce the `Lexer.hs` file from `boot/Lexer.x` (make lexer) --- because boostrapping cabal-install would be otherwise tricky. --- Alex is (atm) tricky package to build, cabal-install has some magic --- to move bundled generated files in place, so rather we don't depend --- on it before we can build it ourselves. --- Therefore there is one thing less to worry in bootstrap.sh, which is a win. --- --- See also https://github.com/haskell/cabal/issues/4633 --- - -import Prelude () -import qualified Prelude as Prelude -import Distribution.Compat.Prelude - -import Distribution.Parsec.LexerMonad -import Distribution.Parsec.Common (Position (..), incPos, retPos) -import Data.ByteString (ByteString) -import qualified Data.ByteString as B -import qualified Data.ByteString.Char8 as B.Char8 -import qualified Data.Word as Word - -#ifdef CABAL_PARSEC_DEBUG -import Debug.Trace -import qualified Data.Vector as V -import qualified Data.Text as T -import qualified Data.Text.Encoding as T -import qualified Data.Text.Encoding.Error as T -#endif - -#if __GLASGOW_HASKELL__ >= 603 -#include "ghcconfig.h" -#elif defined(__GLASGOW_HASKELL__) -#include "config.h" -#endif -#if __GLASGOW_HASKELL__ >= 503 -import Data.Array -import Data.Array.Base (unsafeAt) -#else -import Array -#endif -#if __GLASGOW_HASKELL__ >= 503 -import GHC.Exts -#else -import GlaExts -#endif -alex_tab_size :: Int -alex_tab_size = 8 -alex_base :: AlexAddr -alex_base = AlexA# "\x12\xff\xff\xff\xf9\xff\xff\xff\xfb\xff\xff\xff\x01\x00\x00\x00\x2f\x00\x00\x00\x50\x00\x00\x00\xd0\x00\x00\x00\x48\xff\xff\xff\xdc\xff\xff\xff\x51\xff\xff\xff\x6d\xff\xff\xff\x6f\xff\xff\xff\x50\x01\x00\x00\x74\x01\x00\x00\x70\xff\xff\xff\x68\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\xa3\x01\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6a\x00\x00\x00\xd1\x01\x00\x00\xfb\x01\x00\x00\x7b\x02\x00\x00\xfb\x02\x00\x00\x00\x00\x00\x00\x7b\x03\x00\x00\x7d\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x6d\x00\x00\x00\x6b\x00\x00\x00\xfc\x03\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x6f\x00\x00\x00\x1c\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00"# - -alex_table :: AlexAddr -alex_table = AlexA# "\x00\x00\x09\x00\x0f\x00\x11\x00\x02\x00\x11\x00\x12\x00\x00\x00\x12\x00\x13\x00\x03\x00\x11\x00\x07\x00\x10\x00\x12\x00\x25\x00\x14\x00\x11\x00\x10\x00\x11\x00\x14\x00\x11\x00\x12\x00\x23\x00\x12\x00\x0f\x00\x28\x00\x02\x00\x2e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x10\x00\x00\x00\x14\x00\x00\x00\x00\x00\x08\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2a\x00\x2e\x00\xff\xff\xff\xff\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2a\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x26\x00\x28\x00\xff\xff\xff\xff\x29\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x26\x00\x0f\x00\x11\x00\x17\x00\x26\x00\x12\x00\x25\x00\x11\x00\x2a\x00\x00\x00\x12\x00\x00\x00\x15\x00\x00\x00\x16\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x17\x00\x26\x00\x00\x00\x25\x00\x00\x00\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x2c\x00\x00\x00\x2d\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x0e\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x17\x00\x23\x00\xff\xff\xff\xff\x24\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x17\x00\x1e\x00\x0d\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x1f\x00\x1f\x00\x1e\x00\x1e\x00\x1e\x00\x19\x00\x1a\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x1f\x00\x1e\x00\x1f\x00\x1e\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x21\x00\x1e\x00\x22\x00\x1e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x1d\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x0c\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x0c\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1e\x00\xff\xff\x1e\x00\x1e\x00\x1e\x00\x1e\x00\xff\xff\xff\xff\xff\xff\x1e\x00\x1e\x00\x1e\x00\x18\x00\x1a\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x1e\x00\xff\xff\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x1e\x00\xff\xff\x1e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1e\x00\xff\xff\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\xff\xff\xff\xff\x1e\x00\x1e\x00\x1e\x00\x1a\x00\x1a\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x1e\x00\xff\xff\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x1e\x00\xff\xff\x1e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x1c\x00\x1e\x00\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x1e\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\xff\xff\x1e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# - -alex_check :: AlexAddr -alex_check = AlexA# "\xff\xff\xef\x00\x09\x00\x0a\x00\x09\x00\x0a\x00\x0d\x00\xbf\x00\x0d\x00\x2d\x00\x09\x00\x0a\x00\xbb\x00\xa0\x00\x0d\x00\xa0\x00\xa0\x00\x0a\x00\x09\x00\x0a\x00\x09\x00\x0a\x00\x0d\x00\x0a\x00\x0d\x00\x20\x00\x0a\x00\x20\x00\x0a\x00\xff\xff\xff\xff\xff\xff\xff\xff\x20\x00\xff\xff\xff\xff\xff\xff\xff\xff\x2d\x00\xff\xff\x2d\x00\x20\x00\xff\xff\x20\x00\xff\xff\xff\xff\x2d\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x09\x00\x0a\x00\x09\x00\x09\x00\x0d\x00\x09\x00\x0a\x00\x09\x00\xff\xff\x0d\x00\xff\xff\x7b\x00\xff\xff\x7d\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x20\x00\xff\xff\x20\x00\x20\x00\xff\xff\x20\x00\xff\xff\x20\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2d\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\xff\xff\x7d\x00\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x00\xff\xff\xc2\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x00\xff\xff\xc2\x00\xff\xff\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x23\x00\x24\x00\x25\x00\x26\x00\xff\xff\x28\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2e\x00\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x00\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x00\x5b\x00\x5c\x00\x5d\x00\x5e\x00\xc2\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\x7c\x00\x7d\x00\x7e\x00\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\x22\x00\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\x22\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x5c\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x00\x5c\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\xff\xff\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x23\x00\x24\x00\x25\x00\x26\x00\x7f\x00\x28\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2e\x00\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x00\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5b\x00\x5c\x00\x5d\x00\x5e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\x7c\x00\x7d\x00\x7e\x00\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x23\x00\x24\x00\x25\x00\x26\x00\xff\xff\x28\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2e\x00\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x00\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5b\x00\x5c\x00\x5d\x00\x5e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\x7c\x00\x7d\x00\x7e\x00\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x23\x00\x24\x00\x25\x00\x26\x00\xff\xff\x28\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\xff\xff\xff\xff\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x00\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5b\x00\x5c\x00\x5d\x00\x5e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\x7c\x00\x7d\x00\x7e\x00\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\x22\x00\x21\x00\xff\xff\x23\x00\x24\x00\x25\x00\x26\x00\xff\xff\xff\xff\xff\xff\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2e\x00\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5c\x00\xff\xff\x5c\x00\xff\xff\x5e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7c\x00\x7f\x00\x7e\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\xff\xff\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\xff\xff\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\xff\xff\x7d\x00\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# - -alex_deflt :: AlexAddr -alex_deflt = AlexA# "\xff\xff\xff\xff\xff\xff\xff\xff\x2b\x00\x27\x00\x1b\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x0d\x00\x0d\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x13\x00\xff\xff\xff\xff\xff\xff\xff\xff\x18\x00\x1b\x00\x1b\x00\x1b\x00\xff\xff\x0d\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x27\x00\xff\xff\xff\xff\xff\xff\x2b\x00\xff\xff\xff\xff\xff\xff\xff\xff"# - -alex_accept = listArray (0::Int,47) [AlexAcc (alex_action_0),AlexAcc (alex_action_20),AlexAcc (alex_action_16),AlexAcc (alex_action_3),AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAcc (alex_action_1),AlexAcc (alex_action_1),AlexAccSkip,AlexAcc (alex_action_3),AlexAcc (alex_action_4),AlexAcc (alex_action_5),AlexAccSkip,AlexAccSkip,AlexAcc (alex_action_8),AlexAcc (alex_action_8),AlexAcc (alex_action_8),AlexAcc (alex_action_9),AlexAcc (alex_action_9),AlexAcc (alex_action_10),AlexAcc (alex_action_11),AlexAcc (alex_action_12),AlexAcc (alex_action_13),AlexAcc (alex_action_14),AlexAcc (alex_action_15),AlexAcc (alex_action_15),AlexAcc (alex_action_16),AlexAccSkip,AlexAcc (alex_action_18),AlexAcc (alex_action_19),AlexAcc (alex_action_19),AlexAccSkip,AlexAcc (alex_action_22),AlexAcc (alex_action_23),AlexAcc (alex_action_24),AlexAcc (alex_action_25),AlexAcc (alex_action_25)] -{-# LINE 151 "boot/Lexer.x" #-} - --- | Tokens of outer cabal file structure. Field values are treated opaquely. -data Token = TokSym !ByteString -- ^ Haskell-like identifier, number or operator - | TokStr !ByteString -- ^ String in quotes - | TokOther !ByteString -- ^ Operators and parens - | Indent !Int -- ^ Indentation token - | TokFieldLine !ByteString -- ^ Lines after @:@ - | Colon - | OpenBrace - | CloseBrace - | EOF - | LexicalError InputStream --TODO: add separate string lexical error - deriving Show - -data LToken = L !Position !Token - deriving Show - -toki :: (ByteString -> Token) -> Position -> Int -> ByteString -> Lex LToken -toki t pos len input = return $! L pos (t (B.take len input)) - -tok :: Token -> Position -> Int -> ByteString -> Lex LToken -tok t pos _len _input = return $! L pos t - -checkLeadingWhitespace :: Int -> ByteString -> Lex Int -checkLeadingWhitespace len bs - | B.any (== 9) (B.take len bs) = do - addWarning LexWarningTab - checkWhitespace len bs - | otherwise = checkWhitespace len bs - -checkWhitespace :: Int -> ByteString -> Lex Int -checkWhitespace len bs - | B.any (== 194) (B.take len bs) = do - addWarning LexWarningNBSP - return $ len - B.count 194 (B.take len bs) - | otherwise = return len - --- ----------------------------------------------------------------------------- --- The input type - -type AlexInput = InputStream - -alexInputPrevChar :: AlexInput -> Char -alexInputPrevChar _ = error "alexInputPrevChar not used" - -alexGetByte :: AlexInput -> Maybe (Word.Word8,AlexInput) -alexGetByte = B.uncons - -lexicalError :: Position -> InputStream -> Lex LToken -lexicalError pos inp = do - setInput B.empty - return $! L pos (LexicalError inp) - -lexToken :: Lex LToken -lexToken = do - pos <- getPos - inp <- getInput - st <- getStartCode - case alexScan inp st of - AlexEOF -> return (L pos EOF) - AlexError inp' -> - let !len_bytes = B.length inp - B.length inp' in - --FIXME: we want len_chars here really - -- need to decode utf8 up to this point - lexicalError (incPos len_bytes pos) inp' - AlexSkip inp' len_chars -> do - checkPosition pos inp inp' len_chars - adjustPos (incPos len_chars) - setInput inp' - lexToken - AlexToken inp' len_chars action -> do - checkPosition pos inp inp' len_chars - adjustPos (incPos len_chars) - setInput inp' - let !len_bytes = B.length inp - B.length inp' - t <- action pos len_bytes inp - --traceShow t $ return tok - return t - -checkPosition :: Position -> ByteString -> ByteString -> Int -> Lex () -#ifdef CABAL_PARSEC_DEBUG -checkPosition pos@(Position lineno colno) inp inp' len_chars = do - text_lines <- getDbgText - let len_bytes = B.length inp - B.length inp' - pos_txt | lineno-1 < V.length text_lines = T.take len_chars (T.drop (colno-1) (text_lines V.! (lineno-1))) - | otherwise = T.empty - real_txt = B.take len_bytes inp - when (pos_txt /= T.decodeUtf8 real_txt) $ - traceShow (pos, pos_txt, T.decodeUtf8 real_txt) $ - traceShow (take 3 (V.toList text_lines)) $ return () - where - getDbgText = Lex $ \s@LexState{ dbgText = txt } -> LexResult s txt -#else -checkPosition _ _ _ _ = return () -#endif - -lexAll :: Lex [LToken] -lexAll = do - t <- lexToken - case t of - L _ EOF -> return [t] - _ -> do ts <- lexAll - return (t : ts) - -ltest :: Int -> String -> Prelude.IO () -ltest code s = - let (ws, xs) = execLexer (setStartCode code >> lexAll) (B.Char8.pack s) - in traverse_ print ws >> traverse_ print xs - -mkLexState :: ByteString -> LexState -mkLexState input = LexState - { curPos = Position 1 1 - , curInput = input - , curCode = 0 - , warnings = [] -#ifdef CABAL_PARSEC_DEBUG - , dbgText = V.fromList . lines' . T.decodeUtf8With T.lenientDecode $ input -#endif - } - -#ifdef CABAL_PARSEC_DEBUG -lines' :: T.Text -> [T.Text] -lines' s1 - | T.null s1 = [] - | otherwise = case T.break (\c -> c == '\r' || c == '\n') s1 of - (l, s2) | Just (c,s3) <- T.uncons s2 - -> case T.uncons s3 of - Just ('\n', s4) | c == '\r' -> l `T.snoc` '\r' `T.snoc` '\n' : lines' s4 - _ -> l `T.snoc` c : lines' s3 - - | otherwise - -> [l] -#endif - -bol_field_braces,bol_field_layout,bol_section,in_field_braces,in_field_layout,in_section :: Int -bol_field_braces = 1 -bol_field_layout = 2 -bol_section = 3 -in_field_braces = 4 -in_field_layout = 5 -in_section = 6 -alex_action_0 = \_ len _ -> do - when (len /= 0) $ addWarning LexWarningBOM - setStartCode bol_section - lexToken - -alex_action_1 = \_pos len inp -> checkWhitespace len inp >> adjustPos retPos >> lexToken -alex_action_3 = \pos len inp -> checkLeadingWhitespace len inp >> - if B.length inp == len - then return (L pos EOF) - else setStartCode in_section - >> return (L pos (Indent len)) -alex_action_4 = tok OpenBrace -alex_action_5 = tok CloseBrace -alex_action_8 = toki TokSym -alex_action_9 = \pos len inp -> return $! L pos (TokStr (B.take (len - 2) (B.tail inp))) -alex_action_10 = toki TokOther -alex_action_11 = toki TokOther -alex_action_12 = tok Colon -alex_action_13 = tok OpenBrace -alex_action_14 = tok CloseBrace -alex_action_15 = \_ _ _ -> adjustPos retPos >> setStartCode bol_section >> lexToken -alex_action_16 = \pos len inp -> checkLeadingWhitespace len inp >>= \len' -> - if B.length inp == len - then return (L pos EOF) - else setStartCode in_field_layout - >> return (L pos (Indent len')) -alex_action_18 = toki TokFieldLine -alex_action_19 = \_ _ _ -> adjustPos retPos >> setStartCode bol_field_layout >> lexToken -alex_action_20 = \_ _ _ -> setStartCode in_field_braces >> lexToken -alex_action_22 = toki TokFieldLine -alex_action_23 = tok OpenBrace -alex_action_24 = tok CloseBrace -alex_action_25 = \_ _ _ -> adjustPos retPos >> setStartCode bol_field_braces >> lexToken -{-# LINE 1 "templates/GenericTemplate.hs" #-} -{-# LINE 1 "templates/GenericTemplate.hs" #-} -{-# LINE 1 "" #-} -{-# LINE 1 "" #-} -{-# LINE 10 "" #-} -# 1 "/usr/include/stdc-predef.h" 1 3 4 - -# 17 "/usr/include/stdc-predef.h" 3 4 - -{-# LINE 10 "" #-} -{-# LINE 1 "/opt/ghc/7.10.3/lib/ghc-7.10.3/include/ghcversion.h" #-} - -{-# LINE 10 "" #-} -{-# LINE 1 "templates/GenericTemplate.hs" #-} --- ----------------------------------------------------------------------------- --- ALEX TEMPLATE --- --- This code is in the PUBLIC DOMAIN; you may copy it freely and use --- it for any purpose whatsoever. - --- ----------------------------------------------------------------------------- --- INTERNALS and main scanner engine - -{-# LINE 21 "templates/GenericTemplate.hs" #-} - --- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. -#if __GLASGOW_HASKELL__ > 706 -#define GTE(n,m) (tagToEnum# (n >=# m)) -#define EQ(n,m) (tagToEnum# (n ==# m)) -#else -#define GTE(n,m) (n >=# m) -#define EQ(n,m) (n ==# m) -#endif -{-# LINE 51 "templates/GenericTemplate.hs" #-} - -data AlexAddr = AlexA# Addr# --- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. -#if __GLASGOW_HASKELL__ < 503 -uncheckedShiftL# = shiftL# -#endif - -{-# INLINE alexIndexInt16OffAddr #-} -alexIndexInt16OffAddr (AlexA# arr) off = -#ifdef WORDS_BIGENDIAN - narrow16Int# i - where - i = word2Int# ((high `uncheckedShiftL#` 8#) `or#` low) - high = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#))) - low = int2Word# (ord# (indexCharOffAddr# arr off')) - off' = off *# 2# -#else - indexInt16OffAddr# arr off -#endif - -{-# INLINE alexIndexInt32OffAddr #-} -alexIndexInt32OffAddr (AlexA# arr) off = -#ifdef WORDS_BIGENDIAN - narrow32Int# i - where - i = word2Int# ((b3 `uncheckedShiftL#` 24#) `or#` - (b2 `uncheckedShiftL#` 16#) `or#` - (b1 `uncheckedShiftL#` 8#) `or#` b0) - b3 = int2Word# (ord# (indexCharOffAddr# arr (off' +# 3#))) - b2 = int2Word# (ord# (indexCharOffAddr# arr (off' +# 2#))) - b1 = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#))) - b0 = int2Word# (ord# (indexCharOffAddr# arr off')) - off' = off *# 4# -#else - indexInt32OffAddr# arr off -#endif - -#if __GLASGOW_HASKELL__ < 503 -quickIndex arr i = arr ! i -#else --- GHC >= 503, unsafeAt is available from Data.Array.Base. -quickIndex = unsafeAt -#endif - --- ----------------------------------------------------------------------------- --- Main lexing routines - -data AlexReturn a - = AlexEOF - | AlexError !AlexInput - | AlexSkip !AlexInput !Int - | AlexToken !AlexInput !Int a - --- alexScan :: AlexInput -> StartCode -> AlexReturn a -alexScan input (I# (sc)) - = alexScanUser undefined input (I# (sc)) - -alexScanUser user input (I# (sc)) - = case alex_scan_tkn user input 0# input sc AlexNone of - (AlexNone, input') -> - case alexGetByte input of - Nothing -> - - AlexEOF - Just _ -> - - AlexError input' - - (AlexLastSkip input'' len, _) -> - - AlexSkip input'' len - - (AlexLastAcc k input''' len, _) -> - - AlexToken input''' len k - --- Push the input through the DFA, remembering the most recent accepting --- state it encountered. - -alex_scan_tkn user orig_input len input s last_acc = - input `seq` -- strict in the input - let - new_acc = (check_accs (alex_accept `quickIndex` (I# (s)))) - in - new_acc `seq` - case alexGetByte input of - Nothing -> (new_acc, input) - Just (c, new_input) -> - - case fromIntegral c of { (I# (ord_c)) -> - let - base = alexIndexInt32OffAddr alex_base s - offset = (base +# ord_c) - check = alexIndexInt16OffAddr alex_check offset - - new_s = if GTE(offset,0#) && EQ(check,ord_c) - then alexIndexInt16OffAddr alex_table offset - else alexIndexInt16OffAddr alex_deflt s - in - case new_s of - -1# -> (new_acc, input) - -- on an error, we want to keep the input *before* the - -- character that failed, not after. - _ -> alex_scan_tkn user orig_input (if c < 0x80 || c >= 0xC0 then (len +# 1#) else len) - -- note that the length is increased ONLY if this is the 1st byte in a char encoding) - new_input new_s new_acc - } - where - check_accs (AlexAccNone) = last_acc - check_accs (AlexAcc a ) = AlexLastAcc a input (I# (len)) - check_accs (AlexAccSkip) = AlexLastSkip input (I# (len)) -{-# LINE 198 "templates/GenericTemplate.hs" #-} - -data AlexLastAcc a - = AlexNone - | AlexLastAcc a !AlexInput !Int - | AlexLastSkip !AlexInput !Int - -instance Functor AlexLastAcc where - fmap _ AlexNone = AlexNone - fmap f (AlexLastAcc x y z) = AlexLastAcc (f x) y z - fmap _ (AlexLastSkip x y) = AlexLastSkip x y - -data AlexAcc a user - = AlexAccNone - | AlexAcc a - | AlexAccSkip diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/LexerMonad.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/LexerMonad.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/LexerMonad.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/LexerMonad.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,153 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Parsec.LexerMonad --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable -module Distribution.Parsec.LexerMonad ( - InputStream, - LexState(..), - LexResult(..), - - Lex(..), - execLexer, - - getPos, - setPos, - adjustPos, - - getInput, - setInput, - - getStartCode, - setStartCode, - - LexWarning(..), - LexWarningType(..), - addWarning, - toPWarnings, - - ) where - -import qualified Data.ByteString as B -import Distribution.Compat.Prelude -import Distribution.Parsec.Common (PWarnType (..), PWarning (..), Position (..), showPos) -import Prelude () - -import qualified Data.Map.Strict as Map - -#ifdef CABAL_PARSEC_DEBUG --- testing only: -import qualified Data.Text as T -import qualified Data.Text.Encoding as T -import qualified Data.Vector as V -#endif - --- simple state monad -newtype Lex a = Lex { unLex :: LexState -> LexResult a } - -instance Functor Lex where - fmap = liftM - -instance Applicative Lex where - pure = returnLex - (<*>) = ap - -instance Monad Lex where - return = pure - (>>=) = thenLex - -data LexResult a = LexResult {-# UNPACK #-} !LexState a - -data LexWarningType - = LexWarningNBSP -- ^ Encountered non breaking space - | LexWarningBOM -- ^ BOM at the start of the cabal file - | LexWarningTab -- ^ Leading tags - deriving (Eq, Ord, Show) - -data LexWarning = LexWarning !LexWarningType - {-# UNPACK #-} !Position - deriving (Show) - -toPWarnings :: [LexWarning] -> [PWarning] -toPWarnings - = map (uncurry toWarning) - . Map.toList - . Map.fromListWith (++) - . map (\(LexWarning t p) -> (t, [p])) - where - toWarning LexWarningBOM poss = - PWarning PWTLexBOM (head poss) "Byte-order mark found at the beginning of the file" - toWarning LexWarningNBSP poss = - PWarning PWTLexNBSP (head poss) $ "Non breaking spaces at " ++ intercalate ", " (map showPos poss) - toWarning LexWarningTab poss = - PWarning PWTLexTab (head poss) $ "Tabs used as indentation at " ++ intercalate ", " (map showPos poss) - -data LexState = LexState { - curPos :: {-# UNPACK #-} !Position, -- ^ position at current input location - curInput :: {-# UNPACK #-} !InputStream, -- ^ the current input - curCode :: {-# UNPACK #-} !StartCode, -- ^ lexer code - warnings :: [LexWarning] -#ifdef CABAL_PARSEC_DEBUG - , dbgText :: V.Vector T.Text -- ^ input lines, to print pretty debug info -#endif - } --TODO: check if we should cache the first token - -- since it looks like parsec's uncons can be called many times on the same input - -type StartCode = Int -- ^ An @alex@ lexer start code -type InputStream = B.ByteString - - - --- | Execute the given lexer on the supplied input stream. -execLexer :: Lex a -> InputStream -> ([LexWarning], a) -execLexer (Lex lexer) input = - case lexer initialState of - LexResult LexState{ warnings = ws } result -> (ws, result) - where - initialState = LexState - -- TODO: add 'startPosition' - { curPos = Position 1 1 - , curInput = input - , curCode = 0 - , warnings = [] -#ifdef CABAL_PARSEC_DEBUG - , dbgText = V.fromList . T.lines . T.decodeUtf8 $ input -#endif - } - -{-# INLINE returnLex #-} -returnLex :: a -> Lex a -returnLex a = Lex $ \s -> LexResult s a - -{-# INLINE thenLex #-} -thenLex :: Lex a -> (a -> Lex b) -> Lex b -(Lex m) `thenLex` k = Lex $ \s -> case m s of LexResult s' a -> (unLex (k a)) s' - -setPos :: Position -> Lex () -setPos pos = Lex $ \s -> LexResult s{ curPos = pos } () - -getPos :: Lex Position -getPos = Lex $ \s@LexState{ curPos = pos } -> LexResult s pos - -adjustPos :: (Position -> Position) -> Lex () -adjustPos f = Lex $ \s@LexState{ curPos = pos } -> LexResult s{ curPos = f pos } () - -getInput :: Lex InputStream -getInput = Lex $ \s@LexState{ curInput = i } -> LexResult s i - -setInput :: InputStream -> Lex () -setInput i = Lex $ \s -> LexResult s{ curInput = i } () - -getStartCode :: Lex Int -getStartCode = Lex $ \s@LexState{ curCode = c } -> LexResult s c - -setStartCode :: Int -> Lex () -setStartCode c = Lex $ \s -> LexResult s{ curCode = c } () - --- | Add warning at the current position -addWarning :: LexWarningType -> Lex () -addWarning wt = Lex $ \s@LexState{ curPos = pos, warnings = ws } -> - LexResult s{ warnings = LexWarning wt pos : ws } () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Newtypes.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Newtypes.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Newtypes.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Newtypes.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,278 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE FunctionalDependencies #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE TypeSynonymInstances #-} --- | This module provides @newtype@ wrappers to be used with "Distribution.FieldGrammar". -module Distribution.Parsec.Newtypes ( - -- * List - alaList, - alaList', - -- ** Modifiers - CommaVCat (..), - CommaFSep (..), - VCat (..), - FSep (..), - NoCommaFSep (..), - -- ** Type - List, - -- * Version & License - SpecVersion (..), - TestedWith (..), - SpecLicense (..), - -- * Identifiers - Token (..), - Token' (..), - MQuoted (..), - FreeText (..), - FilePathNT (..), - ) where - -import Distribution.Compat.Newtype -import Distribution.Compat.Prelude -import Prelude () - -import Data.Functor.Identity (Identity (..)) -import Data.List (dropWhileEnd) -import Distribution.CabalSpecVersion -import Distribution.Compiler (CompilerFlavor) -import Distribution.License (License) -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Version - (LowerBound (..), Version, VersionRange, anyVersion, asVersionIntervals, mkVersion) -import Text.PrettyPrint (Doc, comma, fsep, punctuate, vcat, (<+>)) - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.SPDX as SPDX - --- | Vertical list with commas. Displayed with 'vcat' -data CommaVCat = CommaVCat - --- | Paragraph fill list with commas. Displayed with 'fsep' -data CommaFSep = CommaFSep - --- | Vertical list with optional commas. Displayed with 'vcat'. -data VCat = VCat - --- | Paragraph fill list with optional commas. Displayed with 'fsep'. -data FSep = FSep - --- | Paragraph fill list without commas. Displayed with 'fsep'. -data NoCommaFSep = NoCommaFSep - --- | Proxy, internal to this module. -data P sep = P - -class Sep sep where - prettySep :: P sep -> [Doc] -> Doc - - parseSep :: CabalParsing m => P sep -> m a -> m [a] - -instance Sep CommaVCat where - prettySep _ = vcat . punctuate comma - parseSep _ p = do - v <- askCabalSpecVersion - if v >= CabalSpecV2_2 then parsecLeadingCommaList p else parsecCommaList p -instance Sep CommaFSep where - prettySep _ = fsep . punctuate comma - parseSep _ p = do - v <- askCabalSpecVersion - if v >= CabalSpecV2_2 then parsecLeadingCommaList p else parsecCommaList p -instance Sep VCat where - prettySep _ = vcat - parseSep _ = parsecOptCommaList -instance Sep FSep where - prettySep _ = fsep - parseSep _ = parsecOptCommaList -instance Sep NoCommaFSep where - prettySep _ = fsep - parseSep _ p = many (p <* P.spaces) - --- | List separated with optional commas. Displayed with @sep@, arguments of --- type @a@ are parsed and pretty-printed as @b@. -newtype List sep b a = List { getList :: [a] } - --- | 'alaList' and 'alaList'' are simply 'List', with additional phantom --- arguments to constraint the resulting type --- --- >>> :t alaList VCat --- alaList VCat :: [a] -> List VCat (Identity a) a --- --- >>> :t alaList' FSep Token --- alaList' FSep Token :: [String] -> List FSep Token String --- -alaList :: sep -> [a] -> List sep (Identity a) a -alaList _ = List - --- | More general version of 'alaList'. -alaList' :: sep -> (a -> b) -> [a] -> List sep b a -alaList' _ _ = List - -instance Newtype (List sep wrapper a) [a] where - pack = List - unpack = getList - -instance (Newtype b a, Sep sep, Parsec b) => Parsec (List sep b a) where - parsec = pack . map (unpack :: b -> a) <$> parseSep (P :: P sep) parsec - -instance (Newtype b a, Sep sep, Pretty b) => Pretty (List sep b a) where - pretty = prettySep (P :: P sep) . map (pretty . (pack :: a -> b)) . unpack - --- | Haskell string or @[^ ,]+@ -newtype Token = Token { getToken :: String } - -instance Newtype Token String where - pack = Token - unpack = getToken - -instance Parsec Token where - parsec = pack <$> parsecToken - -instance Pretty Token where - pretty = showToken . unpack - --- | Haskell string or @[^ ]+@ -newtype Token' = Token' { getToken' :: String } - -instance Newtype Token' String where - pack = Token' - unpack = getToken' - -instance Parsec Token' where - parsec = pack <$> parsecToken' - -instance Pretty Token' where - pretty = showToken . unpack - --- | Either @"quoted"@ or @un-quoted@. -newtype MQuoted a = MQuoted { getMQuoted :: a } - -instance Newtype (MQuoted a) a where - pack = MQuoted - unpack = getMQuoted - -instance Parsec a => Parsec (MQuoted a) where - parsec = pack <$> parsecMaybeQuoted parsec - -instance Pretty a => Pretty (MQuoted a) where - pretty = pretty . unpack - --- | Version range or just version, i.e. @cabal-version@ field. --- --- There are few things to consider: --- --- * Starting with 2.2 the cabal-version field should be the first field in the --- file and only exact version is accepted. Therefore if we get e.g. --- @>= 2.2@, we fail. --- See --- -newtype SpecVersion = SpecVersion { getSpecVersion :: Either Version VersionRange } - -instance Newtype SpecVersion (Either Version VersionRange) where - pack = SpecVersion - unpack = getSpecVersion - -instance Parsec SpecVersion where - parsec = pack <$> parsecSpecVersion - where - parsecSpecVersion = Left <$> parsec <|> Right <$> range - range = do - vr <- parsec - if specVersionFromRange vr >= mkVersion [2,1] - then fail "cabal-version higher than 2.2 cannot be specified as a range. See https://github.com/haskell/cabal/issues/4899" - else return vr - -instance Pretty SpecVersion where - pretty = either pretty pretty . unpack - -specVersionFromRange :: VersionRange -> Version -specVersionFromRange versionRange = case asVersionIntervals versionRange of - [] -> mkVersion [0] - ((LowerBound version _, _):_) -> version - --- | SPDX License expression or legacy license -newtype SpecLicense = SpecLicense { getSpecLicense :: Either SPDX.License License } - -instance Newtype SpecLicense (Either SPDX.License License) where - pack = SpecLicense - unpack = getSpecLicense - -instance Parsec SpecLicense where - parsec = do - v <- askCabalSpecVersion - if v >= CabalSpecV2_2 - then SpecLicense . Left <$> parsec - else SpecLicense . Right <$> parsec - -instance Pretty SpecLicense where - pretty = either pretty pretty . unpack - --- | Version range or just version -newtype TestedWith = TestedWith { getTestedWith :: (CompilerFlavor, VersionRange) } - -instance Newtype TestedWith (CompilerFlavor, VersionRange) where - pack = TestedWith - unpack = getTestedWith - -instance Parsec TestedWith where - parsec = pack <$> parsecTestedWith - -instance Pretty TestedWith where - pretty x = case unpack x of - (compiler, vr) -> pretty compiler <+> pretty vr - --- | This is /almost/ @'many' 'Distribution.Compat.P.anyChar'@, but it --- --- * trims whitespace from ends of the lines, --- --- * converts lines with only single dot into empty line. --- -newtype FreeText = FreeText { getFreeText :: String } - -instance Newtype FreeText String where - pack = FreeText - unpack = getFreeText - -instance Parsec FreeText where - parsec = pack . dropDotLines <$ P.spaces <*> many P.anyChar - where - -- Example package with dot lines - -- http://hackage.haskell.org/package/copilot-cbmc-0.1/copilot-cbmc.cabal - dropDotLines "." = "." - dropDotLines x = intercalate "\n" . map dotToEmpty . lines $ x - dotToEmpty x | trim' x == "." = "" - dotToEmpty x = trim x - - trim' :: String -> String - trim' = dropWhileEnd (`elem` (" \t" :: String)) - - trim :: String -> String - trim = dropWhile isSpace . dropWhileEnd isSpace - -instance Pretty FreeText where - pretty = showFreeText . unpack - --- | Filepath are parsed as 'Token'. -newtype FilePathNT = FilePathNT { getFilePathNT :: String } - -instance Newtype FilePathNT String where - pack = FilePathNT - unpack = getFilePathNT - -instance Parsec FilePathNT where - parsec = pack <$> parsecToken - -instance Pretty FilePathNT where - pretty = showFilePath . unpack - -------------------------------------------------------------------------------- --- Internal -------------------------------------------------------------------------------- - -parsecTestedWith :: CabalParsing m => m (CompilerFlavor, VersionRange) -parsecTestedWith = do - name <- lexemeParsec - ver <- parsec <|> pure anyVersion - return (name, ver) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/ParseResult.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/ParseResult.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/ParseResult.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/ParseResult.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,184 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} --- | A parse result type for parsers from AST to Haskell types. -module Distribution.Parsec.ParseResult ( - ParseResult, - runParseResult, - recoverWith, - parseWarning, - parseWarnings, - parseFailure, - parseFatalFailure, - parseFatalFailure', - getCabalSpecVersion, - setCabalSpecVersion, - readAndParseFile, - parseString - ) where - -import qualified Data.ByteString.Char8 as BS -import Distribution.Compat.Prelude -import Distribution.Parsec.Common - ( PError (..), PWarnType (..), PWarning (..), Position (..), zeroPos - , showPWarning, showPError) -import Distribution.Simple.Utils (die', warn) -import Distribution.Verbosity (Verbosity) -import Distribution.Version (Version) -import Prelude () -import System.Directory (doesFileExist) - -#if MIN_VERSION_base(4,10,0) -import Control.Applicative (Applicative (..)) -#endif - --- | A monad with failure and accumulating errors and warnings. -newtype ParseResult a = PR - { unPR - :: forall r. PRState - -> (PRState -> r) -- failure, but we were able to recover a new-style spec-version declaration - -> (PRState -> a -> r) -- success - -> r - } - -data PRState = PRState ![PWarning] ![PError] !(Maybe Version) - -emptyPRState :: PRState -emptyPRState = PRState [] [] Nothing - --- | Destruct a 'ParseResult' into the emitted warnings and either --- a successful value or --- list of errors and possibly recovered a spec-version declaration. -runParseResult :: ParseResult a -> ([PWarning], Either (Maybe Version, [PError]) a) -runParseResult pr = unPR pr emptyPRState failure success - where - failure (PRState warns errs v) = (warns, Left (v, errs)) - success (PRState warns [] _) x = (warns, Right x) - -- If there are any errors, don't return the result - success (PRState warns errs v) _ = (warns, Left (v, errs)) - -instance Functor ParseResult where - fmap f (PR pr) = PR $ \ !s failure success -> - pr s failure $ \ !s' a -> - success s' (f a) - {-# INLINE fmap #-} - -instance Applicative ParseResult where - pure x = PR $ \ !s _ success -> success s x - {-# INLINE pure #-} - - f <*> x = PR $ \ !s0 failure success -> - unPR f s0 failure $ \ !s1 f' -> - unPR x s1 failure $ \ !s2 x' -> - success s2 (f' x') - {-# INLINE (<*>) #-} - - x *> y = PR $ \ !s0 failure success -> - unPR x s0 failure $ \ !s1 _ -> - unPR y s1 failure success - {-# INLINE (*>) #-} - - x <* y = PR $ \ !s0 failure success -> - unPR x s0 failure $ \ !s1 x' -> - unPR y s1 failure $ \ !s2 _ -> - success s2 x' - {-# INLINE (<*) #-} - -#if MIN_VERSION_base(4,10,0) - liftA2 f x y = PR $ \ !s0 failure success -> - unPR x s0 failure $ \ !s1 x' -> - unPR y s1 failure $ \ !s2 y' -> - success s2 (f x' y') - {-# INLINE liftA2 #-} -#endif - -instance Monad ParseResult where - return = pure - (>>) = (*>) - - m >>= k = PR $ \ !s failure success -> - unPR m s failure $ \ !s' a -> - unPR (k a) s' failure success - {-# INLINE (>>=) #-} - --- | "Recover" the parse result, so we can proceed parsing. --- 'runParseResult' will still result in 'Nothing', if there are recorded errors. -recoverWith :: ParseResult a -> a -> ParseResult a -recoverWith (PR pr) x = PR $ \ !s _failure success -> - pr s (\ !s' -> success s' x) success - --- | Set cabal spec version. -setCabalSpecVersion :: Maybe Version -> ParseResult () -setCabalSpecVersion v = PR $ \(PRState warns errs _) _failure success -> - success (PRState warns errs v) () - --- | Get cabal spec version. -getCabalSpecVersion :: ParseResult (Maybe Version) -getCabalSpecVersion = PR $ \s@(PRState _ _ v) _failure success -> - success s v - --- | Add a warning. This doesn't fail the parsing process. -parseWarning :: Position -> PWarnType -> String -> ParseResult () -parseWarning pos t msg = PR $ \(PRState warns errs v) _failure success -> - success (PRState (PWarning t pos msg : warns) errs v) () - --- | Add multiple warnings at once. -parseWarnings :: [PWarning] -> ParseResult () -parseWarnings newWarns = PR $ \(PRState warns errs v) _failure success -> - success (PRState (newWarns ++ warns) errs v) () - --- | Add an error, but not fail the parser yet. --- --- For fatal failure use 'parseFatalFailure' -parseFailure :: Position -> String -> ParseResult () -parseFailure pos msg = PR $ \(PRState warns errs v) _failure success -> - success (PRState warns (PError pos msg : errs) v) () - --- | Add an fatal error. -parseFatalFailure :: Position -> String -> ParseResult a -parseFatalFailure pos msg = PR $ \(PRState warns errs v) failure _success -> - failure (PRState warns (PError pos msg : errs) v) - --- | A 'mzero'. -parseFatalFailure' :: ParseResult a -parseFatalFailure' = PR pr - where - pr (PRState warns [] v) failure _success = failure (PRState warns [err] v) - pr s failure _success = failure s - - err = PError zeroPos "Unknown fatal error" - --- | Helper combinator to do parsing plumbing for files. --- --- Given a parser and a filename, return the parse of the file, --- after checking if the file exists. --- --- Argument order is chosen to encourage partial application. -readAndParseFile - :: (BS.ByteString -> ParseResult a) -- ^ File contents to final value parser - -> Verbosity -- ^ Verbosity level - -> FilePath -- ^ File to read - -> IO a -readAndParseFile parser verbosity fpath = do - exists <- doesFileExist fpath - unless exists $ - die' verbosity $ - "Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue." - bs <- BS.readFile fpath - parseString parser verbosity fpath bs - -parseString - :: (BS.ByteString -> ParseResult a) -- ^ File contents to final value parser - -> Verbosity -- ^ Verbosity level - -> String -- ^ File name - -> BS.ByteString - -> IO a -parseString parser verbosity name bs = do - let (warnings, result) = runParseResult (parser bs) - traverse_ (warn verbosity . showPWarning name) warnings - case result of - Right x -> return x - Left (_, errors) -> do - traverse_ (warn verbosity . showPError name) errors - die' verbosity $ "Failed parsing \"" ++ name ++ "\"." diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Parser.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Parser.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Parsec/Parser.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Parsec/Parser.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,378 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE PatternGuards #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Parsec.Parser --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable -module Distribution.Parsec.Parser ( - -- * Types - Field(..), - Name(..), - FieldLine(..), - SectionArg(..), - -- * Grammar and parsing - -- $grammar - readFields, - readFields', -#ifdef CABAL_PARSEC_DEBUG - -- * Internal - parseFile, - parseStr, - parseBS, -#endif - ) where - -import Control.Monad (guard) -import qualified Data.ByteString.Char8 as B8 -import Data.Functor.Identity -import Distribution.Compat.Prelude -import Distribution.Parsec.Common -import Distribution.Parsec.Field -import Distribution.Parsec.Lexer -import Distribution.Parsec.LexerMonad - (LexResult (..), LexState (..), LexWarning (..), unLex) -import Prelude () -import Text.Parsec.Combinator hiding (eof, notFollowedBy) -import Text.Parsec.Error -import Text.Parsec.Pos -import Text.Parsec.Prim hiding (many, (<|>)) - -#ifdef CABAL_PARSEC_DEBUG -import qualified Data.Text as T -import qualified Data.Text.Encoding as T -import qualified Data.Text.Encoding.Error as T -#endif - --- | The 'LexState'' (with a prime) is an instance of parsec's 'Stream' --- wrapped around lexer's 'LexState' (without a prime) -data LexState' = LexState' !LexState (LToken, LexState') - -mkLexState' :: LexState -> LexState' -mkLexState' st = LexState' st - (case unLex lexToken st of LexResult st' tok -> (tok, mkLexState' st')) - -type Parser a = ParsecT LexState' () Identity a - -instance Stream LexState' Identity LToken where - uncons (LexState' _ (tok, st')) = - case tok of - L _ EOF -> return Nothing - _ -> return (Just (tok, st')) - --- | Get lexer warnings accumulated so far -getLexerWarnings :: Parser [LexWarning] -getLexerWarnings = do - LexState' (LexState { warnings = ws }) _ <- getInput - return ws - --- | Set Alex code i.e. the mode "state" lexer is in. -setLexerMode :: Int -> Parser () -setLexerMode code = do - LexState' ls _ <- getInput - setInput $! mkLexState' ls { curCode = code } - -getToken :: (Token -> Maybe a) -> Parser a -getToken getTok = getTokenWithPos (\(L _ t) -> getTok t) - -getTokenWithPos :: (LToken -> Maybe a) -> Parser a -getTokenWithPos getTok = tokenPrim (\(L _ t) -> describeToken t) updatePos getTok - where - updatePos :: SourcePos -> LToken -> LexState' -> SourcePos - updatePos pos (L (Position col line) _) _ = newPos (sourceName pos) col line - -describeToken :: Token -> String -describeToken t = case t of - TokSym s -> "symbol " ++ show s - TokStr s -> "string " ++ show s - TokOther s -> "operator " ++ show s - Indent _ -> "new line" - TokFieldLine _ -> "field content" - Colon -> "\":\"" - OpenBrace -> "\"{\"" - CloseBrace -> "\"}\"" --- SemiColon -> "\";\"" - EOF -> "end of file" - LexicalError is -> "character in input " ++ show (B8.head is) - -tokSym :: Parser (Name Position) -tokSym', tokStr, tokOther :: Parser (SectionArg Position) -tokIndent :: Parser Int -tokColon, tokOpenBrace, tokCloseBrace :: Parser () -tokFieldLine :: Parser (FieldLine Position) - -tokSym = getTokenWithPos $ \t -> case t of L pos (TokSym x) -> Just (mkName pos x); _ -> Nothing -tokSym' = getTokenWithPos $ \t -> case t of L pos (TokSym x) -> Just (SecArgName pos x); _ -> Nothing -tokStr = getTokenWithPos $ \t -> case t of L pos (TokStr x) -> Just (SecArgStr pos x); _ -> Nothing -tokOther = getTokenWithPos $ \t -> case t of L pos (TokOther x) -> Just (SecArgOther pos x); _ -> Nothing -tokIndent = getToken $ \t -> case t of Indent x -> Just x; _ -> Nothing -tokColon = getToken $ \t -> case t of Colon -> Just (); _ -> Nothing -tokOpenBrace = getToken $ \t -> case t of OpenBrace -> Just (); _ -> Nothing -tokCloseBrace = getToken $ \t -> case t of CloseBrace -> Just (); _ -> Nothing -tokFieldLine = getTokenWithPos $ \t -> case t of L pos (TokFieldLine s) -> Just (FieldLine pos s); _ -> Nothing - -colon, openBrace, closeBrace :: Parser () - -sectionArg :: Parser (SectionArg Position) -sectionArg = tokSym' <|> tokStr <|> tokOther "section parameter" - -fieldSecName :: Parser (Name Position) -fieldSecName = tokSym "field or section name" - -colon = tokColon "\":\"" -openBrace = tokOpenBrace "\"{\"" -closeBrace = tokCloseBrace "\"}\"" - -fieldContent :: Parser (FieldLine Position) -fieldContent = tokFieldLine "field contents" - -newtype IndentLevel = IndentLevel Int - -zeroIndentLevel :: IndentLevel -zeroIndentLevel = IndentLevel 0 - -incIndentLevel :: IndentLevel -> IndentLevel -incIndentLevel (IndentLevel i) = IndentLevel (succ i) - -indentOfAtLeast :: IndentLevel -> Parser IndentLevel -indentOfAtLeast (IndentLevel i) = try $ do - j <- tokIndent - guard (j >= i) "indentation of at least " ++ show i - return (IndentLevel j) - - -newtype LexerMode = LexerMode Int - -inLexerMode :: LexerMode -> Parser p -> Parser p -inLexerMode (LexerMode mode) p = - do setLexerMode mode; x <- p; setLexerMode in_section; return x - - ------------------------ --- Cabal file grammar --- - --- $grammar --- --- @ --- CabalStyleFile ::= SecElems --- --- SecElems ::= SecElem* '\n'? --- SecElem ::= '\n' SecElemLayout | SecElemBraces --- SecElemLayout ::= FieldLayout | FieldBraces | SectionLayout | SectionBraces --- SecElemBraces ::= FieldInline | FieldBraces | SectionBraces --- FieldLayout ::= name ':' line? ('\n' line)* --- FieldBraces ::= name ':' '\n'? '{' content '}' --- FieldInline ::= name ':' content --- SectionLayout ::= name arg* SecElems --- SectionBraces ::= name arg* '\n'? '{' SecElems '}' --- @ --- --- and the same thing but left factored... --- --- @ --- SecElems ::= SecElem* --- SecElem ::= '\n' name SecElemLayout --- | name SecElemBraces --- SecElemLayout ::= ':' FieldLayoutOrBraces --- | arg* SectionLayoutOrBraces --- FieldLayoutOrBraces ::= '\n'? '{' content '}' --- | line? ('\n' line)* --- SectionLayoutOrBraces ::= '\n'? '{' SecElems '\n'? '}' --- | SecElems --- SecElemBraces ::= ':' FieldInlineOrBraces --- | arg* '\n'? '{' SecElems '\n'? '}' --- FieldInlineOrBraces ::= '\n'? '{' content '}' --- | content --- @ --- --- Note how we have several productions with the sequence: --- --- > '\n'? '{' --- --- That is, an optional newline (and indent) followed by a @{@ token. --- In the @SectionLayoutOrBraces@ case you can see that this makes it --- not fully left factored (because @SecElems@ can start with a @\n@). --- Fully left factoring here would be ugly, and though we could use a --- lookahead of two tokens to resolve the alternatives, we can't --- conveniently use Parsec's 'try' here to get a lookahead of only two. --- So instead we deal with this case in the lexer by making a line --- where the first non-space is @{@ lex as just the @{@ token, without --- the usual indent token. Then in the parser we can resolve everything --- with just one token of lookahead and so without using 'try'. - --- Top level of a file using cabal syntax --- -cabalStyleFile :: Parser [Field Position] -cabalStyleFile = do es <- elements zeroIndentLevel - eof - return es - --- Elements that live at the top level or inside a section, ie fields --- and sectionscontent --- --- elements ::= element* -elements :: IndentLevel -> Parser [Field Position] -elements ilevel = many (element ilevel) - --- An individual element, ie a field or a section. These can either use --- layout style or braces style. For layout style then it must start on --- a line on its own (so that we know its indentation level). --- --- element ::= '\n' name elementInLayoutContext --- | name elementInNonLayoutContext -element :: IndentLevel -> Parser (Field Position) -element ilevel = - (do ilevel' <- indentOfAtLeast ilevel - name <- fieldSecName - elementInLayoutContext (incIndentLevel ilevel') name) - <|> (do name <- fieldSecName - elementInNonLayoutContext name) - --- An element (field or section) that is valid in a layout context. --- In a layout context we can have fields and sections that themselves --- either use layout style or that use braces style. --- --- elementInLayoutContext ::= ':' fieldLayoutOrBraces --- | arg* sectionLayoutOrBraces -elementInLayoutContext :: IndentLevel -> Name Position -> Parser (Field Position) -elementInLayoutContext ilevel name = - (do colon; fieldLayoutOrBraces ilevel name) - <|> (do args <- many sectionArg - elems <- sectionLayoutOrBraces ilevel - return (Section name args elems)) - --- An element (field or section) that is valid in a non-layout context. --- In a non-layout context we can have only have fields and sections that --- themselves use braces style, or inline style fields. --- --- elementInNonLayoutContext ::= ':' FieldInlineOrBraces --- | arg* '\n'? '{' elements '\n'? '}' -elementInNonLayoutContext :: Name Position -> Parser (Field Position) -elementInNonLayoutContext name = - (do colon; fieldInlineOrBraces name) - <|> (do args <- many sectionArg - openBrace - elems <- elements zeroIndentLevel - optional tokIndent - closeBrace - return (Section name args elems)) - --- The body of a field, using either layout style or braces style. --- --- fieldLayoutOrBraces ::= '\n'? '{' content '}' --- | line? ('\n' line)* -fieldLayoutOrBraces :: IndentLevel -> Name Position -> Parser (Field Position) -fieldLayoutOrBraces ilevel name = braces <|> fieldLayout - where - braces = do - openBrace - ls <- inLexerMode (LexerMode in_field_braces) (many fieldContent) - closeBrace - return (Field name ls) - fieldLayout = inLexerMode (LexerMode in_field_layout) $ do - l <- optionMaybe fieldContent - ls <- many (do _ <- indentOfAtLeast ilevel; fieldContent) - return $ case l of - Nothing -> Field name ls - Just l' -> Field name (l' : ls) - --- The body of a section, using either layout style or braces style. --- --- sectionLayoutOrBraces ::= '\n'? '{' elements \n? '}' --- | elements -sectionLayoutOrBraces :: IndentLevel -> Parser [Field Position] -sectionLayoutOrBraces ilevel = - (do openBrace - elems <- elements zeroIndentLevel - optional tokIndent - closeBrace - return elems) - <|> (elements ilevel) - --- The body of a field, using either inline style or braces. --- --- fieldInlineOrBraces ::= '\n'? '{' content '}' --- | content -fieldInlineOrBraces :: Name Position -> Parser (Field Position) -fieldInlineOrBraces name = - (do openBrace - ls <- inLexerMode (LexerMode in_field_braces) (many fieldContent) - closeBrace - return (Field name ls)) - <|> (do ls <- inLexerMode (LexerMode in_field_braces) (option [] (fmap (\l -> [l]) fieldContent)) - return (Field name ls)) - - --- | Parse cabal style 'B8.ByteString' into list of 'Field's, i.e. the cabal AST. -readFields :: B8.ByteString -> Either ParseError [Field Position] -readFields s = fmap fst (readFields' s) - --- | Like 'readFields' but also return lexer warnings -readFields' :: B8.ByteString -> Either ParseError ([Field Position], [LexWarning]) -readFields' s = do - parse parser "the input" lexSt - where - parser = do - fields <- cabalStyleFile - ws <- getLexerWarnings - pure (fields, ws) - - lexSt = mkLexState' (mkLexState s) - -#ifdef CABAL_PARSEC_DEBUG -parseTest' :: Show a => Parsec LexState' () a -> SourceName -> B8.ByteString -> IO () -parseTest' p fname s = - case parse p fname (lexSt s) of - Left err -> putStrLn (formatError s err) - - Right x -> print x - where - lexSt = mkLexState' . mkLexState - -parseFile :: Show a => Parser a -> FilePath -> IO () -parseFile p f = B8.readFile f >>= \s -> parseTest' p f s - -parseStr :: Show a => Parser a -> String -> IO () -parseStr p = parseBS p . B8.pack - -parseBS :: Show a => Parser a -> B8.ByteString -> IO () -parseBS p = parseTest' p "" - -formatError :: B8.ByteString -> ParseError -> String -formatError input perr = - unlines - [ "Parse error "++ show (errorPos perr) ++ ":" - , errLine - , indicator ++ errmsg ] - where - pos = errorPos perr - ls = lines' (T.decodeUtf8With T.lenientDecode input) - errLine = T.unpack (ls !! (sourceLine pos - 1)) - indicator = replicate (sourceColumn pos) ' ' ++ "^" - errmsg = showErrorMessages "or" "unknown parse error" - "expecting" "unexpected" "end of file" - (errorMessages perr) - --- | Handles windows/osx/unix line breaks uniformly -lines' :: T.Text -> [T.Text] -lines' s1 - | T.null s1 = [] - | otherwise = case T.break (\c -> c == '\r' || c == '\n') s1 of - (l, s2) | Just (c,s3) <- T.uncons s2 - -> case T.uncons s3 of - Just ('\n', s4) | c == '\r' -> l : lines' s4 - _ -> l : lines' s3 - | otherwise -> [l] -#endif - -eof :: Parser () -eof = notFollowedBy anyToken "end of file" - where - notFollowedBy :: Parser LToken -> Parser () - notFollowedBy p = try ( (do L _ t <- try p; unexpected (describeToken t)) - <|> return ()) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/ParseUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/ParseUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/ParseUtils.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/ParseUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,715 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.ParseUtils --- Copyright : (c) The University of Glasgow 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Utilities for parsing 'PackageDescription' and 'InstalledPackageInfo'. --- --- The @.cabal@ file format is not trivial, especially with the introduction --- of configurations and the section syntax that goes with that. This module --- has a bunch of parsing functions that is used by the @.cabal@ parser and a --- couple others. It has the parsing framework code and also little parsers for --- many of the formats we get in various @.cabal@ file fields, like module --- names, comma separated lists etc. - --- This module is meant to be local-only to Distribution... - -{-# OPTIONS_HADDOCK hide #-} -{-# LANGUAGE Rank2Types #-} -module Distribution.ParseUtils ( - LineNo, PError(..), PWarning(..), locatedErrorMsg, syntaxError, warning, - runP, runE, ParseResult(..), catchParseError, parseFail, showPWarning, - Field(..), fName, lineNo, - FieldDescr(..), ppField, ppFields, readFields, readFieldsFlat, - showFields, showSingleNamedField, showSimpleSingleNamedField, - parseFields, parseFieldsFlat, - parseFilePathQ, parseTokenQ, parseTokenQ', - parseModuleNameQ, - parseOptVersion, parsePackageName, - parseTestedWithQ, parseLicenseQ, parseLanguageQ, parseExtensionQ, - parseSepList, parseCommaList, parseOptCommaList, - showFilePath, showToken, showTestedWith, showFreeText, parseFreeText, - field, simpleField, listField, listFieldWithSep, spaceListField, - commaListField, commaListFieldWithSep, commaNewLineListField, - optsField, liftField, boolField, parseQuoted, parseMaybeQuoted, indentWith, - readPToMaybe, - - UnrecFieldParser, warnUnrec, ignoreUnrec, - ) where - -import Prelude () -import Distribution.Compat.Prelude hiding (get) - -import Distribution.Compiler -import Distribution.License -import Distribution.Version -import Distribution.ModuleName -import qualified Distribution.Compat.MonadFail as Fail -import Distribution.Compat.ReadP as ReadP hiding (get) -import Distribution.ReadE -import Distribution.Compat.Newtype -import Distribution.Parsec.Newtypes (TestedWith (..)) -import Distribution.Text -import Distribution.Utils.Generic -import Distribution.Pretty -import Language.Haskell.Extension - -import Text.PrettyPrint - ( Doc, render, style, renderStyle - , text, colon, nest, punctuate, comma, sep - , fsep, hsep, isEmpty, vcat, mode, Mode (..) - , ($+$), (<+>) - ) -import Data.Tree as Tree (Tree(..), flatten) -import qualified Data.Map as Map -import System.FilePath (normalise) - --- ----------------------------------------------------------------------------- - -type LineNo = Int - -data PError = AmbiguousParse String LineNo - | NoParse String LineNo - | TabsError LineNo - | FromString String (Maybe LineNo) - deriving (Eq, Show) - -data PWarning = PWarning String - | UTFWarning LineNo String - deriving (Eq, Show) - -showPWarning :: FilePath -> PWarning -> String -showPWarning fpath (PWarning msg) = - normalise fpath ++ ": " ++ msg -showPWarning fpath (UTFWarning line fname) = - normalise fpath ++ ":" ++ show line - ++ ": Invalid UTF-8 text in the '" ++ fname ++ "' field." - -data ParseResult a = ParseFailed PError | ParseOk [PWarning] a - deriving Show - -instance Functor ParseResult where - fmap _ (ParseFailed err) = ParseFailed err - fmap f (ParseOk ws x) = ParseOk ws $ f x - -instance Applicative ParseResult where - pure = ParseOk [] - (<*>) = ap - - -instance Monad ParseResult where - return = pure - ParseFailed err >>= _ = ParseFailed err - ParseOk ws x >>= f = case f x of - ParseFailed err -> ParseFailed err - ParseOk ws' x' -> ParseOk (ws'++ws) x' - fail = Fail.fail - -instance Fail.MonadFail ParseResult where - fail s = ParseFailed (FromString s Nothing) - -catchParseError :: ParseResult a -> (PError -> ParseResult a) - -> ParseResult a -p@(ParseOk _ _) `catchParseError` _ = p -ParseFailed e `catchParseError` k = k e - -parseFail :: PError -> ParseResult a -parseFail = ParseFailed - -runP :: LineNo -> String -> ReadP a a -> String -> ParseResult a -runP line fieldname p s = - case [ x | (x,"") <- results ] of - [a] -> ParseOk (utf8Warnings line fieldname s) a - --TODO: what is this double parse thing all about? - -- Can't we just do the all isSpace test the first time? - [] -> case [ x | (x,ys) <- results, all isSpace ys ] of - [a] -> ParseOk (utf8Warnings line fieldname s) a - [] -> ParseFailed (NoParse fieldname line) - _ -> ParseFailed (AmbiguousParse fieldname line) - _ -> ParseFailed (AmbiguousParse fieldname line) - where results = readP_to_S p s - -runE :: LineNo -> String -> ReadE a -> String -> ParseResult a -runE line fieldname p s = - case runReadE p s of - Right a -> ParseOk (utf8Warnings line fieldname s) a - Left e -> syntaxError line $ - "Parse of field '" ++ fieldname ++ "' failed (" ++ e ++ "): " ++ s - -utf8Warnings :: LineNo -> String -> String -> [PWarning] -utf8Warnings line fieldname s = - take 1 [ UTFWarning n fieldname - | (n,l) <- zip [line..] (lines s) - , '\xfffd' `elem` l ] - -locatedErrorMsg :: PError -> (Maybe LineNo, String) -locatedErrorMsg (AmbiguousParse f n) = (Just n, - "Ambiguous parse in field '"++f++"'.") -locatedErrorMsg (NoParse f n) = (Just n, - "Parse of field '"++f++"' failed.") -locatedErrorMsg (TabsError n) = (Just n, "Tab used as indentation.") -locatedErrorMsg (FromString s n) = (n, s) - -syntaxError :: LineNo -> String -> ParseResult a -syntaxError n s = ParseFailed $ FromString s (Just n) - -tabsError :: LineNo -> ParseResult a -tabsError ln = ParseFailed $ TabsError ln - -warning :: String -> ParseResult () -warning s = ParseOk [PWarning s] () - --- | Field descriptor. The parameter @a@ parameterizes over where the field's --- value is stored in. -data FieldDescr a - = FieldDescr - { fieldName :: String - , fieldGet :: a -> Doc - , fieldSet :: LineNo -> String -> a -> ParseResult a - -- ^ @fieldSet n str x@ Parses the field value from the given input - -- string @str@ and stores the result in @x@ if the parse was - -- successful. Otherwise, reports an error on line number @n@. - } - -field :: String -> (a -> Doc) -> ReadP a a -> FieldDescr a -field name showF readF = - FieldDescr name showF (\line val _st -> runP line name readF val) - --- Lift a field descriptor storing into an 'a' to a field descriptor storing --- into a 'b'. -liftField :: (b -> a) -> (a -> b -> b) -> FieldDescr a -> FieldDescr b -liftField get set (FieldDescr name showF parseF) - = FieldDescr name (showF . get) - (\line str b -> do - a <- parseF line str (get b) - return (set a b)) - --- Parser combinator for simple fields. Takes a field name, a pretty printer, --- a parser function, an accessor, and a setter, returns a FieldDescr over the --- compoid structure. -simpleField :: String -> (a -> Doc) -> ReadP a a - -> (b -> a) -> (a -> b -> b) -> FieldDescr b -simpleField name showF readF get set - = liftField get set $ field name showF readF - -commaListFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -commaListFieldWithSep separator name showF readF get set = - liftField get set' $ - field name showF' (parseCommaList readF) - where - set' xs b = set (get b ++ xs) b - showF' = separator . punctuate comma . map showF - -commaListField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -commaListField = commaListFieldWithSep fsep - -commaNewLineListField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -commaNewLineListField = commaListFieldWithSep sep - -spaceListField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -spaceListField name showF readF get set = - liftField get set' $ - field name showF' (parseSpaceList readF) - where - set' xs b = set (get b ++ xs) b - showF' = fsep . map showF - -listFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -listFieldWithSep separator name showF readF get set = - liftField get set' $ - field name showF' (parseOptCommaList readF) - where - set' xs b = set (get b ++ xs) b - showF' = separator . map showF - -listField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -listField = listFieldWithSep fsep - -optsField :: String -> CompilerFlavor -> (b -> [(CompilerFlavor,[String])]) - -> ([(CompilerFlavor,[String])] -> b -> b) -> FieldDescr b -optsField name flavor get set = - liftField (fromMaybe [] . lookup flavor . get) - (\opts b -> set (reorder (update flavor opts (get b))) b) $ - field name showF (sepBy parseTokenQ' (munch1 isSpace)) - where - update _ opts l | all null opts = l --empty opts as if no opts - update f opts [] = [(f,opts)] - update f opts ((f',opts'):rest) - | f == f' = (f, opts' ++ opts) : rest - | otherwise = (f',opts') : update f opts rest - reorder = sortBy (comparing fst) - showF = hsep . map text - --- TODO: this is a bit smelly hack. It's because we want to parse bool fields --- liberally but not accept new parses. We cannot do that with ReadP --- because it does not support warnings. We need a new parser framework! -boolField :: String -> (b -> Bool) -> (Bool -> b -> b) -> FieldDescr b -boolField name get set = liftField get set (FieldDescr name showF readF) - where - showF = text . show - readF line str _ - | str == "True" = ParseOk [] True - | str == "False" = ParseOk [] False - | lstr == "true" = ParseOk [caseWarning] True - | lstr == "false" = ParseOk [caseWarning] False - | otherwise = ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'." - -ppFields :: [FieldDescr a] -> a -> Doc -ppFields fields x = - vcat [ ppField name (getter x) | FieldDescr name getter _ <- fields ] - -ppField :: String -> Doc -> Doc -ppField name fielddoc - | isEmpty fielddoc = mempty - | name `elem` nestedFields = text name <<>> colon $+$ nest indentWith fielddoc - | otherwise = text name <<>> colon <+> fielddoc - where - nestedFields = - [ "description" - , "build-depends" - , "data-files" - , "extra-source-files" - , "extra-tmp-files" - , "exposed-modules" - , "asm-sources" - , "cmm-sources" - , "c-sources" - , "js-sources" - , "extra-libraries" - , "includes" - , "install-includes" - , "other-modules" - , "autogen-modules" - , "depends" - ] - -showFields :: [FieldDescr a] -> a -> String -showFields fields = render . ($+$ text "") . ppFields fields - -showSingleNamedField :: [FieldDescr a] -> String -> Maybe (a -> String) -showSingleNamedField fields f = - case [ get | (FieldDescr f' get _) <- fields, f' == f ] of - [] -> Nothing - (get:_) -> Just (render . ppField f . get) - -showSimpleSingleNamedField :: [FieldDescr a] -> String -> Maybe (a -> String) -showSimpleSingleNamedField fields f = - case [ get | (FieldDescr f' get _) <- fields, f' == f ] of - [] -> Nothing - (get:_) -> Just (renderStyle myStyle . get) - where myStyle = style { mode = LeftMode } - -parseFields :: [FieldDescr a] -> a -> String -> ParseResult a -parseFields fields initial str = - readFields str >>= accumFields fields initial - -parseFieldsFlat :: [FieldDescr a] -> a -> String -> ParseResult a -parseFieldsFlat fields initial str = - readFieldsFlat str >>= accumFields fields initial - -accumFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a -accumFields fields = foldM setField - where - fieldMap = Map.fromList - [ (name, f) | f@(FieldDescr name _ _) <- fields ] - setField accum (F line name value) = case Map.lookup name fieldMap of - Just (FieldDescr _ _ set) -> set line value accum - Nothing -> do - warning ("Unrecognized field " ++ name ++ " on line " ++ show line) - return accum - setField accum f = do - warning ("Unrecognized stanza on line " ++ show (lineNo f)) - return accum - --- | The type of a function which, given a name-value pair of an --- unrecognized field, and the current structure being built, --- decides whether to incorporate the unrecognized field --- (by returning Just x, where x is a possibly modified version --- of the structure being built), or not (by returning Nothing). -type UnrecFieldParser a = (String,String) -> a -> Maybe a - --- | A default unrecognized field parser which simply returns Nothing, --- i.e. ignores all unrecognized fields, so warnings will be generated. -warnUnrec :: UnrecFieldParser a -warnUnrec _ _ = Nothing - --- | A default unrecognized field parser which silently (i.e. no --- warnings will be generated) ignores unrecognized fields, by --- returning the structure being built unmodified. -ignoreUnrec :: UnrecFieldParser a -ignoreUnrec _ = Just - ------------------------------------------------------------------------------- - --- The data type for our three syntactic categories -data Field - = F LineNo String String - -- ^ A regular @: @ field - | Section LineNo String String [Field] - -- ^ A section with a name and possible parameter. The syntactic - -- structure is: - -- - -- @ - -- { - -- * - -- } - -- @ - | IfBlock LineNo String [Field] [Field] - -- ^ A conditional block with an optional else branch: - -- - -- @ - -- if { - -- * - -- } else { - -- * - -- } - -- @ - deriving (Show - ,Eq) -- for testing - -lineNo :: Field -> LineNo -lineNo (F n _ _) = n -lineNo (Section n _ _ _) = n -lineNo (IfBlock n _ _ _) = n - -fName :: Field -> String -fName (F _ n _) = n -fName (Section _ n _ _) = n -fName _ = error "fname: not a field or section" - -readFields :: String -> ParseResult [Field] -readFields input = ifelse - =<< traverse (mkField 0) - =<< mkTree tokens - - where ls = (lines . normaliseLineEndings) input - tokens = (concatMap tokeniseLine . trimLines) ls - -readFieldsFlat :: String -> ParseResult [Field] -readFieldsFlat input = traverse (mkField 0) - =<< mkTree tokens - where ls = (lines . normaliseLineEndings) input - tokens = (concatMap tokeniseLineFlat . trimLines) ls - --- attach line number and determine indentation -trimLines :: [String] -> [(LineNo, Indent, HasTabs, String)] -trimLines ls = [ (lineno, indent, hastabs, trimTrailing l') - | (lineno, l) <- zip [1..] ls - , let (sps, l') = span isSpace l - indent = length sps - hastabs = '\t' `elem` sps - , validLine l' ] - where validLine ('-':'-':_) = False -- Comment - validLine [] = False -- blank line - validLine _ = True - --- | We parse generically based on indent level and braces '{' '}'. To do that --- we split into lines and then '{' '}' tokens and other spans within a line. -data Token = - -- | The 'Line' token is for bits that /start/ a line, eg: - -- - -- > "\n blah blah { blah" - -- - -- tokenises to: - -- - -- > [Line n 2 False "blah blah", OpenBracket, Span n "blah"] - -- - -- so lines are the only ones that can have nested layout, since they - -- have a known indentation level. - -- - -- eg: we can't have this: - -- - -- > if ... { - -- > } else - -- > other - -- - -- because other cannot nest under else, since else doesn't start a line - -- so cannot have nested layout. It'd have to be: - -- - -- > if ... { - -- > } - -- > else - -- > other - -- - -- but that's not so common, people would normally use layout or - -- brackets not both in a single @if else@ construct. - -- - -- > if ... { foo : bar } - -- > else - -- > other - -- - -- this is OK - Line LineNo Indent HasTabs String - | Span LineNo String -- ^ span in a line, following brackets - | OpenBracket LineNo | CloseBracket LineNo - -type Indent = Int -type HasTabs = Bool - --- | Tokenise a single line, splitting on '{' '}' and the spans in between. --- Also trims leading & trailing space on those spans within the line. -tokeniseLine :: (LineNo, Indent, HasTabs, String) -> [Token] -tokeniseLine (n0, i, t, l) = case split n0 l of - (Span _ l':ss) -> Line n0 i t l' :ss - cs -> cs - where split _ "" = [] - split n s = case span (\c -> c /='}' && c /= '{') s of - ("", '{' : s') -> OpenBracket n : split n s' - (w , '{' : s') -> mkspan n w (OpenBracket n : split n s') - ("", '}' : s') -> CloseBracket n : split n s' - (w , '}' : s') -> mkspan n w (CloseBracket n : split n s') - (w , _) -> mkspan n w [] - - mkspan n s ss | null s' = ss - | otherwise = Span n s' : ss - where s' = trimTrailing (trimLeading s) - -tokeniseLineFlat :: (LineNo, Indent, HasTabs, String) -> [Token] -tokeniseLineFlat (n0, i, t, l) - | null l' = [] - | otherwise = [Line n0 i t l'] - where - l' = trimTrailing (trimLeading l) - -trimLeading, trimTrailing :: String -> String -trimLeading = dropWhile isSpace -trimTrailing = dropWhileEndLE isSpace - - -type SyntaxTree = Tree (LineNo, HasTabs, String) - --- | Parse the stream of tokens into a tree of them, based on indent \/ layout -mkTree :: [Token] -> ParseResult [SyntaxTree] -mkTree toks = - layout 0 [] toks >>= \(trees, trailing) -> case trailing of - [] -> return trees - OpenBracket n:_ -> syntaxError n "mismatched brackets, unexpected {" - CloseBracket n:_ -> syntaxError n "mismatched brackets, unexpected }" - -- the following two should never happen: - Span n l :_ -> syntaxError n $ "unexpected span: " ++ show l - Line n _ _ l :_ -> syntaxError n $ "unexpected line: " ++ show l - - --- | Parse the stream of tokens into a tree of them, based on indent --- This parse state expect to be in a layout context, though possibly --- nested within a braces context so we may still encounter closing braces. -layout :: Indent -- ^ indent level of the parent\/previous line - -> [SyntaxTree] -- ^ accumulating param, trees in this level - -> [Token] -- ^ remaining tokens - -> ParseResult ([SyntaxTree], [Token]) - -- ^ collected trees on this level and trailing tokens -layout _ a [] = return (reverse a, []) -layout i a (s@(Line _ i' _ _):ss) | i' < i = return (reverse a, s:ss) -layout i a (Line n _ t l:OpenBracket n':ss) = do - (sub, ss') <- braces n' [] ss - layout i (Node (n,t,l) sub:a) ss' - -layout i a (Span n l:OpenBracket n':ss) = do - (sub, ss') <- braces n' [] ss - layout i (Node (n,False,l) sub:a) ss' - --- look ahead to see if following lines are more indented, giving a sub-tree -layout i a (Line n i' t l:ss) = do - lookahead <- layout (i'+1) [] ss - case lookahead of - ([], _) -> layout i (Node (n,t,l) [] :a) ss - (ts, ss') -> layout i (Node (n,t,l) ts :a) ss' - -layout _ _ ( OpenBracket n :_) = syntaxError n "unexpected '{'" -layout _ a (s@(CloseBracket _):ss) = return (reverse a, s:ss) -layout _ _ ( Span n l : _) = syntaxError n $ "unexpected span: " - ++ show l - --- | Parse the stream of tokens into a tree of them, based on explicit braces --- This parse state expects to find a closing bracket. -braces :: LineNo -- ^ line of the '{', used for error messages - -> [SyntaxTree] -- ^ accumulating param, trees in this level - -> [Token] -- ^ remaining tokens - -> ParseResult ([SyntaxTree],[Token]) - -- ^ collected trees on this level and trailing tokens -braces m a (Line n _ t l:OpenBracket n':ss) = do - (sub, ss') <- braces n' [] ss - braces m (Node (n,t,l) sub:a) ss' - -braces m a (Span n l:OpenBracket n':ss) = do - (sub, ss') <- braces n' [] ss - braces m (Node (n,False,l) sub:a) ss' - -braces m a (Line n i t l:ss) = do - lookahead <- layout (i+1) [] ss - case lookahead of - ([], _) -> braces m (Node (n,t,l) [] :a) ss - (ts, ss') -> braces m (Node (n,t,l) ts :a) ss' - -braces m a (Span n l:ss) = braces m (Node (n,False,l) []:a) ss -braces _ a (CloseBracket _:ss) = return (reverse a, ss) -braces n _ [] = syntaxError n $ "opening brace '{'" - ++ "has no matching closing brace '}'" -braces _ _ (OpenBracket n:_) = syntaxError n "unexpected '{'" - --- | Convert the parse tree into the Field AST --- Also check for dodgy uses of tabs in indentation. -mkField :: Int -> SyntaxTree -> ParseResult Field -mkField d (Node (n,t,_) _) | d >= 1 && t = tabsError n -mkField d (Node (n,_,l) ts) = case span (\c -> isAlphaNum c || c == '-') l of - ([], _) -> syntaxError n $ "unrecognised field or section: " ++ show l - (name, rest) -> case trimLeading rest of - (':':rest') -> do let followingLines = concatMap Tree.flatten ts - tabs = not (null [()| (_,True,_) <- followingLines ]) - if tabs && d >= 1 - then tabsError n - else return $ F n (map toLower name) - (fieldValue rest' followingLines) - rest' -> do ts' <- traverse (mkField (d+1)) ts - return (Section n (map toLower name) rest' ts') - where fieldValue firstLine followingLines = - let firstLine' = trimLeading firstLine - followingLines' = map (\(_,_,s) -> stripDot s) followingLines - allLines | null firstLine' = followingLines' - | otherwise = firstLine' : followingLines' - in intercalate "\n" allLines - stripDot "." = "" - stripDot s = s - --- | Convert if/then/else 'Section's to 'IfBlock's -ifelse :: [Field] -> ParseResult [Field] -ifelse [] = return [] -ifelse (Section n "if" cond thenpart - :Section _ "else" as elsepart:fs) - | null cond = syntaxError n "'if' with missing condition" - | null thenpart = syntaxError n "'then' branch of 'if' is empty" - | not (null as) = syntaxError n "'else' takes no arguments" - | null elsepart = syntaxError n "'else' branch of 'if' is empty" - | otherwise = do tp <- ifelse thenpart - ep <- ifelse elsepart - fs' <- ifelse fs - return (IfBlock n cond tp ep:fs') -ifelse (Section n "if" cond thenpart:fs) - | null cond = syntaxError n "'if' with missing condition" - | null thenpart = syntaxError n "'then' branch of 'if' is empty" - | otherwise = do tp <- ifelse thenpart - fs' <- ifelse fs - return (IfBlock n cond tp []:fs') -ifelse (Section n "else" _ _:_) = syntaxError n - "stray 'else' with no preceding 'if'" -ifelse (Section n s a fs':fs) = do fs'' <- ifelse fs' - fs''' <- ifelse fs - return (Section n s a fs'' : fs''') -ifelse (f:fs) = do fs' <- ifelse fs - return (f : fs') - ------------------------------------------------------------------------------- - --- |parse a module name -parseModuleNameQ :: ReadP r ModuleName -parseModuleNameQ = parseMaybeQuoted parse - -parseFilePathQ :: ReadP r FilePath -parseFilePathQ = parseTokenQ - -- removed until normalise is no longer broken, was: - -- liftM normalise parseTokenQ - -betweenSpaces :: ReadP r a -> ReadP r a -betweenSpaces act = do skipSpaces - res <- act - skipSpaces - return res - -parsePackageName :: ReadP r String -parsePackageName = do - ns <- sepBy1 component (char '-') - return $ intercalate "-" ns - where - component = do - cs <- munch1 isAlphaNum - if all isDigit cs then pfail else return cs - -- each component must contain an alphabetic character, to avoid - -- ambiguity in identifiers like foo-1 (the 1 is the version number). - -parseOptVersion :: ReadP r Version -parseOptVersion = parseMaybeQuoted ver - where ver :: ReadP r Version - ver = parse <++ return nullVersion - -parseTestedWithQ :: ReadP r (CompilerFlavor,VersionRange) -parseTestedWithQ = parseMaybeQuoted tw - where - tw :: ReadP r (CompilerFlavor,VersionRange) - tw = do compiler <- parseCompilerFlavorCompat - version <- betweenSpaces $ parse <++ return anyVersion - return (compiler,version) - -parseLicenseQ :: ReadP r License -parseLicenseQ = parseMaybeQuoted parse - --- urgh, we can't define optQuotes :: ReadP r a -> ReadP r a --- because the "compat" version of ReadP isn't quite powerful enough. In --- particular, the type of <++ is ReadP r r -> ReadP r a -> ReadP r a --- Hence the trick above to make 'lic' polymorphic. - -parseLanguageQ :: ReadP r Language -parseLanguageQ = parseMaybeQuoted parse - -parseExtensionQ :: ReadP r Extension -parseExtensionQ = parseMaybeQuoted parse - -parseHaskellString :: ReadP r String -parseHaskellString = readS_to_P reads - -parseTokenQ :: ReadP r String -parseTokenQ = parseHaskellString <++ munch1 (\x -> not (isSpace x) && x /= ',') - -parseTokenQ' :: ReadP r String -parseTokenQ' = parseHaskellString <++ munch1 (not . isSpace) - -parseSepList :: ReadP r b - -> ReadP r a -- ^The parser for the stuff between commas - -> ReadP r [a] -parseSepList sepr p = sepBy p separator - where separator = betweenSpaces sepr - -parseSpaceList :: ReadP r a -- ^The parser for the stuff between commas - -> ReadP r [a] -parseSpaceList p = sepBy p skipSpaces - -parseCommaList :: ReadP r a -- ^The parser for the stuff between commas - -> ReadP r [a] -parseCommaList = parseSepList (ReadP.char ',') - -parseOptCommaList :: ReadP r a -- ^The parser for the stuff between commas - -> ReadP r [a] -parseOptCommaList = parseSepList (optional (ReadP.char ',')) - -parseQuoted :: ReadP r a -> ReadP r a -parseQuoted = between (ReadP.char '"') (ReadP.char '"') - -parseMaybeQuoted :: (forall r. ReadP r a) -> ReadP r' a -parseMaybeQuoted p = parseQuoted p <++ p - -parseFreeText :: ReadP.ReadP s String -parseFreeText = ReadP.munch (const True) - -readPToMaybe :: ReadP a a -> String -> Maybe a -readPToMaybe p str = listToMaybe [ r | (r,s) <- readP_to_S p str - , all isSpace s ] - -------------------------------------------------------------------------------- --- Internal -------------------------------------------------------------------------------- - -showTestedWith :: (CompilerFlavor, VersionRange) -> Doc -showTestedWith = pretty . pack' TestedWith diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Pretty.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Pretty.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Pretty.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Pretty.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,95 +0,0 @@ -module Distribution.Pretty ( - Pretty (..), - prettyShow, - defaultStyle, - flatStyle, - -- * Utilities - showFilePath, - showToken, - showFreeText, - indentWith, - -- * Deprecated - Separator, - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Data.Functor.Identity (Identity (..)) - -import qualified Text.PrettyPrint as PP - -class Pretty a where - pretty :: a -> PP.Doc - -instance Pretty Bool where - pretty = PP.text . show - -instance Pretty Int where - pretty = PP.text . show - -instance Pretty a => Pretty (Identity a) where - pretty = pretty . runIdentity - -prettyShow :: Pretty a => a -> String -prettyShow = PP.renderStyle defaultStyle . pretty - --- | The default rendering style used in Cabal for console --- output. It has a fixed page width and adds line breaks --- automatically. -defaultStyle :: PP.Style -defaultStyle = PP.Style { PP.mode = PP.PageMode - , PP.lineLength = 79 - , PP.ribbonsPerLine = 1.0 - } - --- | A style for rendering all on one line. -flatStyle :: PP.Style -flatStyle = PP.Style { PP.mode = PP.LeftMode - , PP.lineLength = err "lineLength" - , PP.ribbonsPerLine = err "ribbonsPerLine" - } - where - err x = error ("flatStyle: tried to access " ++ x ++ " in LeftMode. " ++ - "This should never happen and indicates a bug in Cabal.") - -------------------------------------------------------------------------------- --- Utilities -------------------------------------------------------------------------------- - --- TODO: remove when ReadP parser is gone. -type Separator = [PP.Doc] -> PP.Doc - -showFilePath :: FilePath -> PP.Doc -showFilePath = showToken - -showToken :: String -> PP.Doc -showToken str - -- if token looks like a comment (starts with --), print it in quotes - | "--" `isPrefixOf` str = PP.text (show str) - -- also if token ends with a colon (e.g. executable name), print it in quotes - | ":" `isSuffixOf` str = PP.text (show str) - | not (any dodgy str) && not (null str) = PP.text str - | otherwise = PP.text (show str) - where - dodgy c = isSpace c || c == ',' - - --- | Pretty-print free-format text, ensuring that it is vertically aligned, --- and with blank lines replaced by dots for correct re-parsing. -showFreeText :: String -> PP.Doc -showFreeText "" = mempty -showFreeText s = PP.vcat [ PP.text (if null l then "." else l) | l <- lines_ s ] - --- | 'lines_' breaks a string up into a list of strings at newline --- characters. The resulting strings do not contain newlines. -lines_ :: String -> [String] -lines_ [] = [""] -lines_ s = - let (l, s') = break (== '\n') s - in l : case s' of - [] -> [] - (_:s'') -> lines_ s'' - --- | the indentation used for pretty printing -indentWith :: Int -indentWith = 4 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PrettyUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PrettyUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/PrettyUtils.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/PrettyUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.PrettyUtils --- Copyright : (c) The University of Glasgow 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Utilities for pretty printing. -{-# OPTIONS_HADDOCK hide #-} -module Distribution.PrettyUtils {-# DEPRECATED "Use Distribution.Pretty. This module will be removed in Cabal-3.0 (est. Oct 2018)." #-} ( - Separator, - -- * Internal - showFilePath, - showToken, - showTestedWith, - showFreeText, - indentWith, - ) where - -import Distribution.Pretty -import Distribution.ParseUtils diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/ReadE.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/ReadE.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/ReadE.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/ReadE.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,64 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.ReadE --- Copyright : Jose Iborra 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Simple parsing with failure - -module Distribution.ReadE ( - -- * ReadE - ReadE(..), succeedReadE, failReadE, - -- * Projections - parseReadE, readEOrFail, - readP_to_E, - parsecToReadE, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Compat.ReadP -import Distribution.Parsec.Class -import Distribution.Parsec.FieldLineStream - --- | Parser with simple error reporting -newtype ReadE a = ReadE {runReadE :: String -> Either ErrorMsg a} -type ErrorMsg = String - -instance Functor ReadE where - fmap f (ReadE p) = ReadE $ \txt -> case p txt of - Right a -> Right (f a) - Left err -> Left err - -succeedReadE :: (String -> a) -> ReadE a -succeedReadE f = ReadE (Right . f) - -failReadE :: ErrorMsg -> ReadE a -failReadE = ReadE . const . Left - -parseReadE :: ReadE a -> ReadP r a -parseReadE (ReadE p) = do - txt <- look - either fail return (p txt) - -readEOrFail :: ReadE a -> String -> a -readEOrFail r = either error id . runReadE r - --- {-# DEPRECATED readP_to_E "Use parsecToReadE. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -readP_to_E :: (String -> ErrorMsg) -> ReadP a a -> ReadE a -readP_to_E err r = - ReadE $ \txt -> case [ p | (p, s) <- readP_to_S r txt - , all isSpace s ] - of [] -> Left (err txt) - (p:_) -> Right p - -parsecToReadE :: (String -> ErrorMsg) -> ParsecParser a -> ReadE a -parsecToReadE err p = ReadE $ \txt -> - case runParsecParser p "" (fieldLineStreamFromString txt) of - Right x -> Right x - Left _e -> Left (err txt) --- TODO: use parsec error to make 'ErrorMsg'. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Bench.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Bench.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Bench.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Bench.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,126 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Bench --- Copyright : Johan Tibell 2011 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the entry point into running the benchmarks in a built --- package. It performs the \"@.\/setup bench@\" action. It runs --- benchmarks designated in the package description. - -module Distribution.Simple.Bench - ( bench - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.UnqualComponentName -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.UserHooks -import Distribution.Simple.Utils -import Distribution.Text - -import System.Exit ( ExitCode(..), exitFailure, exitSuccess ) -import System.Directory ( doesFileExist ) -import System.FilePath ( (), (<.>) ) - --- | Perform the \"@.\/setup bench@\" action. -bench :: Args -- ^positional command-line arguments - -> PD.PackageDescription -- ^information from the .cabal file - -> LBI.LocalBuildInfo -- ^information from the configure step - -> BenchmarkFlags -- ^flags sent to benchmark - -> IO () -bench args pkg_descr lbi flags = do - let verbosity = fromFlag $ benchmarkVerbosity flags - benchmarkNames = args - pkgBenchmarks = PD.benchmarks pkg_descr - enabledBenchmarks = map fst (LBI.enabledBenchLBIs pkg_descr lbi) - - -- Run the benchmark - doBench :: PD.Benchmark -> IO ExitCode - doBench bm = - case PD.benchmarkInterface bm of - PD.BenchmarkExeV10 _ _ -> do - let cmd = LBI.buildDir lbi name name <.> exeExtension (LBI.hostPlatform lbi) - options = map (benchOption pkg_descr lbi bm) $ - benchmarkOptions flags - -- Check that the benchmark executable exists. - exists <- doesFileExist cmd - unless exists $ die' verbosity $ - "Error: Could not find benchmark program \"" - ++ cmd ++ "\". Did you build the package first?" - - notice verbosity $ startMessage name - -- This will redirect the child process - -- stdout/stderr to the parent process. - exitcode <- rawSystemExitCode verbosity cmd options - notice verbosity $ finishMessage name exitcode - return exitcode - - _ -> do - notice verbosity $ "No support for running " - ++ "benchmark " ++ name ++ " of type: " - ++ display (PD.benchmarkType bm) - exitFailure - where name = unUnqualComponentName $ PD.benchmarkName bm - - unless (PD.hasBenchmarks pkg_descr) $ do - notice verbosity "Package has no benchmarks." - exitSuccess - - when (PD.hasBenchmarks pkg_descr && null enabledBenchmarks) $ - die' verbosity $ "No benchmarks enabled. Did you remember to configure with " - ++ "\'--enable-benchmarks\'?" - - bmsToRun <- case benchmarkNames of - [] -> return enabledBenchmarks - names -> for names $ \bmName -> - let benchmarkMap = zip enabledNames enabledBenchmarks - enabledNames = map PD.benchmarkName enabledBenchmarks - allNames = map PD.benchmarkName pkgBenchmarks - in case lookup (mkUnqualComponentName bmName) benchmarkMap of - Just t -> return t - _ | mkUnqualComponentName bmName `elem` allNames -> - die' verbosity $ "Package configured with benchmark " - ++ bmName ++ " disabled." - | otherwise -> die' verbosity $ "no such benchmark: " ++ bmName - - let totalBenchmarks = length bmsToRun - notice verbosity $ "Running " ++ show totalBenchmarks ++ " benchmarks..." - exitcodes <- traverse doBench bmsToRun - let allOk = totalBenchmarks == length (filter (== ExitSuccess) exitcodes) - unless allOk exitFailure - where - startMessage name = "Benchmark " ++ name ++ ": RUNNING...\n" - finishMessage name exitcode = "Benchmark " ++ name ++ ": " - ++ (case exitcode of - ExitSuccess -> "FINISH" - ExitFailure _ -> "ERROR") - - --- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't --- necessarily a path. -benchOption :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> PD.Benchmark - -> PathTemplate - -> String -benchOption pkg_descr lbi bm template = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ - [(BenchmarkNameVar, toPathTemplate $ unUnqualComponentName $ PD.benchmarkName bm)] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Build/Macros.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Build/Macros.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Build/Macros.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Build/Macros.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,158 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Build.Macros --- Copyright : Simon Marlow 2008 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Generate cabal_macros.h - CPP macros for package version testing --- --- When using CPP you get --- --- > VERSION_ --- > MIN_VERSION_(A,B,C) --- --- for each /package/ in @build-depends@, which is true if the version of --- /package/ in use is @>= A.B.C@, using the normal ordering on version --- numbers. --- --- TODO Figure out what to do about backpack and internal libraries. It is very --- suspecious that this stuff works with munged package identifiers -module Distribution.Simple.Build.Macros ( - generate, - generatePackageVersionMacros, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Version -import Distribution.PackageDescription -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.Program.Db -import Distribution.Simple.Program.Types -import Distribution.Types.MungedPackageId -import Distribution.Types.MungedPackageName -import Distribution.Types.PackageId -import Distribution.Text - --- ------------------------------------------------------------ --- * Generate cabal_macros.h --- ------------------------------------------------------------ - --- Invariant: HeaderLines always has a trailing newline -type HeaderLines = String - -line :: String -> HeaderLines -line str = str ++ "\n" - -ifndef :: String -> HeaderLines -> HeaderLines -ifndef macro body = - line ("#ifndef " ++ macro) ++ - body ++ - line ("#endif /* " ++ macro ++ " */") - -define :: String -> Maybe [String] -> String -> HeaderLines -define macro params val = - line ("#define " ++ macro ++ f params ++ " " ++ val) - where - f Nothing = "" - f (Just xs) = "(" ++ intercalate "," xs ++ ")" - -defineStr :: String -> String -> HeaderLines -defineStr macro str = define macro Nothing (show str) - -ifndefDefine :: String -> Maybe [String] -> String -> HeaderLines -ifndefDefine macro params str = - ifndef macro (define macro params str) - -ifndefDefineStr :: String -> String -> HeaderLines -ifndefDefineStr macro str = - ifndef macro (defineStr macro str) - --- | The contents of the @cabal_macros.h@ for the given configured package. --- -generate :: PackageDescription -> LocalBuildInfo -> ComponentLocalBuildInfo -> String -generate pkg_descr lbi clbi = - "/* DO NOT EDIT: This file is automatically generated by Cabal */\n\n" ++ - generatePackageVersionMacros - (package pkg_descr : map getPid (componentPackageDeps clbi)) ++ - generateToolVersionMacros (configuredPrograms . withPrograms $ lbi) ++ - generateComponentIdMacro lbi clbi ++ - generateCurrentPackageVersion pkg_descr - where - getPid (_, MungedPackageId mpn v) = - PackageIdentifier pn v - where - -- NB: Drop the component name! We're just reporting package versions. - -- This would have to be revisited if you are allowed to depend - -- on different versions of the same package - pn = fst (decodeCompatPackageName mpn) - --- | Helper function that generates just the @VERSION_pkg@ and @MIN_VERSION_pkg@ --- macros for a list of package ids (usually used with the specific deps of --- a configured package). --- -generatePackageVersionMacros :: [PackageId] -> String -generatePackageVersionMacros pkgids = concat - [ line ("/* package " ++ display pkgid ++ " */") - ++ generateMacros "" pkgname version - | pkgid@(PackageIdentifier name version) <- pkgids - , let pkgname = map fixchar (display name) - ] - --- | Helper function that generates just the @TOOL_VERSION_pkg@ and --- @MIN_TOOL_VERSION_pkg@ macros for a list of configured programs. --- -generateToolVersionMacros :: [ConfiguredProgram] -> String -generateToolVersionMacros progs = concat - [ line ("/* tool " ++ progid ++ " */") - ++ generateMacros "TOOL_" progname version - | prog <- progs - , isJust . programVersion $ prog - , let progid = programId prog ++ "-" ++ display version - progname = map fixchar (programId prog) - Just version = programVersion prog - ] - --- | Common implementation of 'generatePackageVersionMacros' and --- 'generateToolVersionMacros'. --- -generateMacros :: String -> String -> Version -> String -generateMacros macro_prefix name version = - concat - [ifndefDefineStr (macro_prefix ++ "VERSION_" ++ name) (display version) - ,ifndefDefine ("MIN_" ++ macro_prefix ++ "VERSION_" ++ name) - (Just ["major1","major2","minor"]) - $ concat [ - "(\\\n" - ," (major1) < ",major1," || \\\n" - ," (major1) == ",major1," && (major2) < ",major2," || \\\n" - ," (major1) == ",major1," && (major2) == ",major2," && (minor) <= ",minor,")" - ] - ,"\n"] - where - (major1:major2:minor:_) = map show (versionNumbers version ++ repeat 0) - --- | Generate the @CURRENT_COMPONENT_ID@ definition for the component ID --- of the current package. -generateComponentIdMacro :: LocalBuildInfo -> ComponentLocalBuildInfo -> String -generateComponentIdMacro _lbi clbi = - concat $ - [case clbi of - LibComponentLocalBuildInfo{} -> - ifndefDefineStr "CURRENT_PACKAGE_KEY" (componentCompatPackageKey clbi) - _ -> "" - ,ifndefDefineStr "CURRENT_COMPONENT_ID" (display (componentComponentId clbi)) - ] - --- | Generate the @CURRENT_PACKAGE_VERSION@ definition for the declared version --- of the current package. -generateCurrentPackageVersion :: PackageDescription -> String -generateCurrentPackageVersion pd = - ifndefDefineStr "CURRENT_PACKAGE_VERSION" (display (pkgVersion (package pd))) - -fixchar :: Char -> Char -fixchar '-' = '_' -fixchar c = c diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Build/PathsModule.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Build/PathsModule.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Build/PathsModule.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Build/PathsModule.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,353 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Build.Macros --- Copyright : Isaac Jones 2003-2005, --- Ross Paterson 2006, --- Duncan Coutts 2007-2008 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Generating the Paths_pkgname module. --- --- This is a module that Cabal generates for the benefit of packages. It --- enables them to find their version number and find any installed data files --- at runtime. This code should probably be split off into another module. --- -module Distribution.Simple.Build.PathsModule ( - generate, pkgPathEnvVar - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.System -import Distribution.Simple.Compiler -import Distribution.Package -import Distribution.PackageDescription -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.Version - -import System.FilePath ( pathSeparator ) - --- ------------------------------------------------------------ --- * Building Paths_.hs --- ------------------------------------------------------------ - -generate :: PackageDescription -> LocalBuildInfo -> ComponentLocalBuildInfo -> String -generate pkg_descr lbi clbi = - let pragmas = - cpp_pragma - ++ no_rebindable_syntax_pragma - ++ ffi_pragmas - ++ warning_pragmas - - cpp_pragma - | supports_cpp = "{-# LANGUAGE CPP #-}\n" - | otherwise = "" - - -- -XRebindableSyntax is problematic because when paired with - -- -XOverloadedLists, 'fromListN' is not in scope, - -- or -XOverloadedStrings 'fromString' is not in scope, - -- so we disable 'RebindableSyntax'. - no_rebindable_syntax_pragma - | supports_rebindable_syntax = "{-# LANGUAGE NoRebindableSyntax #-}\n" - | otherwise = "" - - ffi_pragmas - | absolute = "" - | supports_language_pragma = - "{-# LANGUAGE ForeignFunctionInterface #-}\n" - | otherwise = - "{-# OPTIONS_GHC -fffi #-}\n" - - warning_pragmas = - "{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}\n" - - foreign_imports - | absolute = "" - | otherwise = - "import Foreign\n"++ - "import Foreign.C\n" - - reloc_imports - | reloc = - "import System.Environment (getExecutablePath)\n" - | otherwise = "" - - header = - pragmas++ - "module " ++ display paths_modulename ++ " (\n"++ - " version,\n"++ - " getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,\n"++ - " getDataFileName, getSysconfDir\n"++ - " ) where\n"++ - "\n"++ - foreign_imports++ - "import qualified Control.Exception as Exception\n"++ - "import Data.Version (Version(..))\n"++ - "import System.Environment (getEnv)\n"++ - reloc_imports ++ - "import Prelude\n"++ - "\n"++ - (if supports_cpp - then - ("#if defined(VERSION_base)\n"++ - "\n"++ - "#if MIN_VERSION_base(4,0,0)\n"++ - "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n"++ - "#else\n"++ - "catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a\n"++ - "#endif\n"++ - "\n"++ - "#else\n"++ - "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n"++ - "#endif\n") - else - "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n")++ - "catchIO = Exception.catch\n" ++ - "\n"++ - "version :: Version"++ - "\nversion = Version " ++ show branch ++ " []" - where branch = versionNumbers $ packageVersion pkg_descr - - body - | reloc = - "\n\nbindirrel :: FilePath\n" ++ - "bindirrel = " ++ show flat_bindirreloc ++ - "\n"++ - "\ngetBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath\n"++ - "getBinDir = "++mkGetEnvOrReloc "bindir" flat_bindirreloc++"\n"++ - "getLibDir = "++mkGetEnvOrReloc "libdir" flat_libdirreloc++"\n"++ - "getDynLibDir = "++mkGetEnvOrReloc "libdir" flat_dynlibdirreloc++"\n"++ - "getDataDir = "++mkGetEnvOrReloc "datadir" flat_datadirreloc++"\n"++ - "getLibexecDir = "++mkGetEnvOrReloc "libexecdir" flat_libexecdirreloc++"\n"++ - "getSysconfDir = "++mkGetEnvOrReloc "sysconfdir" flat_sysconfdirreloc++"\n"++ - "\n"++ - "getDataFileName :: FilePath -> IO FilePath\n"++ - "getDataFileName name = do\n"++ - " dir <- getDataDir\n"++ - " return (dir `joinFileName` name)\n"++ - "\n"++ - get_prefix_reloc_stuff++ - "\n"++ - filename_stuff - | absolute = - "\nbindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath\n"++ - "\nbindir = " ++ show flat_bindir ++ - "\nlibdir = " ++ show flat_libdir ++ - "\ndynlibdir = " ++ show flat_dynlibdir ++ - "\ndatadir = " ++ show flat_datadir ++ - "\nlibexecdir = " ++ show flat_libexecdir ++ - "\nsysconfdir = " ++ show flat_sysconfdir ++ - "\n"++ - "\ngetBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath\n"++ - "getBinDir = "++mkGetEnvOr "bindir" "return bindir"++"\n"++ - "getLibDir = "++mkGetEnvOr "libdir" "return libdir"++"\n"++ - "getDynLibDir = "++mkGetEnvOr "dynlibdir" "return dynlibdir"++"\n"++ - "getDataDir = "++mkGetEnvOr "datadir" "return datadir"++"\n"++ - "getLibexecDir = "++mkGetEnvOr "libexecdir" "return libexecdir"++"\n"++ - "getSysconfDir = "++mkGetEnvOr "sysconfdir" "return sysconfdir"++"\n"++ - "\n"++ - "getDataFileName :: FilePath -> IO FilePath\n"++ - "getDataFileName name = do\n"++ - " dir <- getDataDir\n"++ - " return (dir ++ "++path_sep++" ++ name)\n" - | otherwise = - "\nprefix, bindirrel :: FilePath" ++ - "\nprefix = " ++ show flat_prefix ++ - "\nbindirrel = " ++ show (fromMaybe (error "PathsModule.generate") flat_bindirrel) ++ - "\n\n"++ - "getBinDir :: IO FilePath\n"++ - "getBinDir = getPrefixDirRel bindirrel\n\n"++ - "getLibDir :: IO FilePath\n"++ - "getLibDir = "++mkGetDir flat_libdir flat_libdirrel++"\n\n"++ - "getDynLibDir :: IO FilePath\n"++ - "getDynLibDir = "++mkGetDir flat_dynlibdir flat_dynlibdirrel++"\n\n"++ - "getDataDir :: IO FilePath\n"++ - "getDataDir = "++ mkGetEnvOr "datadir" - (mkGetDir flat_datadir flat_datadirrel)++"\n\n"++ - "getLibexecDir :: IO FilePath\n"++ - "getLibexecDir = "++mkGetDir flat_libexecdir flat_libexecdirrel++"\n\n"++ - "getSysconfDir :: IO FilePath\n"++ - "getSysconfDir = "++mkGetDir flat_sysconfdir flat_sysconfdirrel++"\n\n"++ - "getDataFileName :: FilePath -> IO FilePath\n"++ - "getDataFileName name = do\n"++ - " dir <- getDataDir\n"++ - " return (dir `joinFileName` name)\n"++ - "\n"++ - get_prefix_stuff++ - "\n"++ - filename_stuff - in header++body - - where - cid = componentUnitId clbi - - InstallDirs { - prefix = flat_prefix, - bindir = flat_bindir, - libdir = flat_libdir, - dynlibdir = flat_dynlibdir, - datadir = flat_datadir, - libexecdir = flat_libexecdir, - sysconfdir = flat_sysconfdir - } = absoluteComponentInstallDirs pkg_descr lbi cid NoCopyDest - InstallDirs { - bindir = flat_bindirrel, - libdir = flat_libdirrel, - dynlibdir = flat_dynlibdirrel, - datadir = flat_datadirrel, - libexecdir = flat_libexecdirrel, - sysconfdir = flat_sysconfdirrel - } = prefixRelativeComponentInstallDirs (packageId pkg_descr) lbi cid - - flat_bindirreloc = shortRelativePath flat_prefix flat_bindir - flat_libdirreloc = shortRelativePath flat_prefix flat_libdir - flat_dynlibdirreloc = shortRelativePath flat_prefix flat_dynlibdir - flat_datadirreloc = shortRelativePath flat_prefix flat_datadir - flat_libexecdirreloc = shortRelativePath flat_prefix flat_libexecdir - flat_sysconfdirreloc = shortRelativePath flat_prefix flat_sysconfdir - - mkGetDir _ (Just dirrel) = "getPrefixDirRel " ++ show dirrel - mkGetDir dir Nothing = "return " ++ show dir - - mkGetEnvOrReloc var dirrel = "catchIO (getEnv \""++var'++"\")" ++ - " (\\_ -> getPrefixDirReloc \"" ++ dirrel ++ - "\")" - where var' = pkgPathEnvVar pkg_descr var - - mkGetEnvOr var expr = "catchIO (getEnv \""++var'++"\")"++ - " (\\_ -> "++expr++")" - where var' = pkgPathEnvVar pkg_descr var - - -- In several cases we cannot make relocatable installations - absolute = - hasLibs pkg_descr -- we can only make progs relocatable - || isNothing flat_bindirrel -- if the bin dir is an absolute path - || not (supportsRelocatableProgs (compilerFlavor (compiler lbi))) - - reloc = relocatable lbi - - supportsRelocatableProgs GHC = case buildOS of - Windows -> True - _ -> False - supportsRelocatableProgs GHCJS = case buildOS of - Windows -> True - _ -> False - supportsRelocatableProgs _ = False - - paths_modulename = autogenPathsModuleName pkg_descr - - get_prefix_stuff = get_prefix_win32 supports_cpp buildArch - - path_sep = show [pathSeparator] - - supports_cpp = supports_language_pragma - supports_rebindable_syntax= ghc_newer_than (mkVersion [7,0,1]) - supports_language_pragma = ghc_newer_than (mkVersion [6,6,1]) - - ghc_newer_than minVersion = - case compilerCompatVersion GHC (compiler lbi) of - Nothing -> False - Just version -> version `withinRange` orLaterVersion minVersion - --- | Generates the name of the environment variable controlling the path --- component of interest. --- --- Note: The format of these strings is part of Cabal's public API; --- changing this function constitutes a *backwards-compatibility* break. -pkgPathEnvVar :: PackageDescription - -> String -- ^ path component; one of \"bindir\", \"libdir\", - -- \"datadir\", \"libexecdir\", or \"sysconfdir\" - -> String -- ^ environment variable name -pkgPathEnvVar pkg_descr var = - showPkgName (packageName pkg_descr) ++ "_" ++ var - where - showPkgName = map fixchar . display - fixchar '-' = '_' - fixchar c = c - -get_prefix_reloc_stuff :: String -get_prefix_reloc_stuff = - "getPrefixDirReloc :: FilePath -> IO FilePath\n"++ - "getPrefixDirReloc dirRel = do\n"++ - " exePath <- getExecutablePath\n"++ - " let (bindir,_) = splitFileName exePath\n"++ - " return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)\n" - -get_prefix_win32 :: Bool -> Arch -> String -get_prefix_win32 supports_cpp arch = - "getPrefixDirRel :: FilePath -> IO FilePath\n"++ - "getPrefixDirRel dirRel = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.\n"++ - " where\n"++ - " try_size size = allocaArray (fromIntegral size) $ \\buf -> do\n"++ - " ret <- c_GetModuleFileName nullPtr buf size\n"++ - " case ret of\n"++ - " 0 -> return (prefix `joinFileName` dirRel)\n"++ - " _ | ret < size -> do\n"++ - " exePath <- peekCWString buf\n"++ - " let (bindir,_) = splitFileName exePath\n"++ - " return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)\n"++ - " | otherwise -> try_size (size * 2)\n"++ - "\n"++ - (case supports_cpp of - False -> "" - True -> "#if defined(i386_HOST_ARCH)\n"++ - "# define WINDOWS_CCONV stdcall\n"++ - "#elif defined(x86_64_HOST_ARCH)\n"++ - "# define WINDOWS_CCONV ccall\n"++ - "#else\n"++ - "# error Unknown mingw32 arch\n"++ - "#endif\n")++ - "foreign import " ++ cconv ++ " unsafe \"windows.h GetModuleFileNameW\"\n"++ - " c_GetModuleFileName :: Ptr () -> CWString -> Int32 -> IO Int32\n" - where cconv = if supports_cpp - then "WINDOWS_CCONV" - else case arch of - I386 -> "stdcall" - X86_64 -> "ccall" - _ -> error "win32 supported only with I386, X86_64" - -filename_stuff :: String -filename_stuff = - "minusFileName :: FilePath -> String -> FilePath\n"++ - "minusFileName dir \"\" = dir\n"++ - "minusFileName dir \".\" = dir\n"++ - "minusFileName dir suffix =\n"++ - " minusFileName (fst (splitFileName dir)) (fst (splitFileName suffix))\n"++ - "\n"++ - "joinFileName :: String -> String -> FilePath\n"++ - "joinFileName \"\" fname = fname\n"++ - "joinFileName \".\" fname = fname\n"++ - "joinFileName dir \"\" = dir\n"++ - "joinFileName dir fname\n"++ - " | isPathSeparator (last dir) = dir++fname\n"++ - " | otherwise = dir++pathSeparator:fname\n"++ - "\n"++ - "splitFileName :: FilePath -> (String, String)\n"++ - "splitFileName p = (reverse (path2++drive), reverse fname)\n"++ - " where\n"++ - " (path,drive) = case p of\n"++ - " (c:':':p') -> (reverse p',[':',c])\n"++ - " _ -> (reverse p ,\"\")\n"++ - " (fname,path1) = break isPathSeparator path\n"++ - " path2 = case path1 of\n"++ - " [] -> \".\"\n"++ - " [_] -> path1 -- don't remove the trailing slash if \n"++ - " -- there is only one character\n"++ - " (c:path') | isPathSeparator c -> path'\n"++ - " _ -> path1\n"++ - "\n"++ - "pathSeparator :: Char\n"++ - (case buildOS of - Windows -> "pathSeparator = '\\\\'\n" - _ -> "pathSeparator = '/'\n") ++ - "\n"++ - "isPathSeparator :: Char -> Bool\n"++ - (case buildOS of - Windows -> "isPathSeparator c = c == '/' || c == '\\\\'\n" - _ -> "isPathSeparator c = c == '/'\n") diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Build.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Build.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Build.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Build.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,696 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Build --- Copyright : Isaac Jones 2003-2005, --- Ross Paterson 2006, --- Duncan Coutts 2007-2008, 2012 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the entry point to actually building the modules in a package. It --- doesn't actually do much itself, most of the work is delegated to --- compiler-specific actions. It does do some non-compiler specific bits like --- running pre-processors. --- - -module Distribution.Simple.Build ( - build, repl, - startInterpreter, - - initialBuildSteps, - createInternalPackageDB, - componentInitialBuildSteps, - writeAutogenFiles, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.Dependency -import Distribution.Types.LocalBuildInfo -import Distribution.Types.TargetInfo -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.ForeignLib -import Distribution.Types.MungedPackageId -import Distribution.Types.MungedPackageName -import Distribution.Types.UnqualComponentName -import Distribution.Types.ComponentLocalBuildInfo -import Distribution.Types.ExecutableScope - -import Distribution.Package -import Distribution.Backpack -import Distribution.Backpack.DescribeUnitId -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS -import qualified Distribution.Simple.UHC as UHC -import qualified Distribution.Simple.HaskellSuite as HaskellSuite -import qualified Distribution.Simple.PackageIndex as Index - -import qualified Distribution.Simple.Build.Macros as Build.Macros -import qualified Distribution.Simple.Build.PathsModule as Build.PathsModule -import qualified Distribution.Simple.Program.HcPkg as HcPkg - -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.PackageDescription hiding (Flag) -import qualified Distribution.InstalledPackageInfo as IPI -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import qualified Distribution.ModuleName as ModuleName - -import Distribution.Simple.Setup -import Distribution.Simple.BuildTarget -import Distribution.Simple.BuildToolDepends -import Distribution.Simple.PreProcess -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Db -import Distribution.Simple.BuildPaths -import Distribution.Simple.Configure -import Distribution.Simple.Register -import Distribution.Simple.Test.LibV09 -import Distribution.Simple.Utils - -import Distribution.System -import Distribution.Text -import Distribution.Verbosity - -import Distribution.Compat.Graph (IsNode(..)) - -import Control.Monad -import qualified Data.Set as Set -import System.FilePath ( (), (<.>), takeDirectory ) -import System.Directory ( getCurrentDirectory ) - --- ----------------------------------------------------------------------------- --- |Build the libraries and executables in this package. - -build :: PackageDescription -- ^ Mostly information from the .cabal file - -> LocalBuildInfo -- ^ Configuration information - -> BuildFlags -- ^ Flags that the user passed to build - -> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling - -> IO () -build pkg_descr lbi flags suffixes = do - targets <- readTargetInfos verbosity pkg_descr lbi (buildArgs flags) - let componentsToBuild = neededTargetsInBuildOrder' pkg_descr lbi (map nodeKey targets) - info verbosity $ "Component build order: " - ++ intercalate ", " - (map (showComponentName . componentLocalName . targetCLBI) - componentsToBuild) - - when (null targets) $ - -- Only bother with this message if we're building the whole package - setupMessage verbosity "Building" (packageId pkg_descr) - - internalPackageDB <- createInternalPackageDB verbosity lbi distPref - - (\f -> foldM_ f (installedPkgs lbi) componentsToBuild) $ \index target -> do - let comp = targetComponent target - clbi = targetCLBI target - componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity - let bi = componentBuildInfo comp - progs' = addInternalBuildTools pkg_descr lbi bi (withPrograms lbi) - lbi' = lbi { - withPrograms = progs', - withPackageDB = withPackageDB lbi ++ [internalPackageDB], - installedPkgs = index - } - mb_ipi <- buildComponent verbosity (buildNumJobs flags) pkg_descr - lbi' suffixes comp clbi distPref - return (maybe index (Index.insert `flip` index) mb_ipi) - return () - where - distPref = fromFlag (buildDistPref flags) - verbosity = fromFlag (buildVerbosity flags) - - -repl :: PackageDescription -- ^ Mostly information from the .cabal file - -> LocalBuildInfo -- ^ Configuration information - -> ReplFlags -- ^ Flags that the user passed to build - -> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling - -> [String] - -> IO () -repl pkg_descr lbi flags suffixes args = do - let distPref = fromFlag (replDistPref flags) - verbosity = fromFlag (replVerbosity flags) - - target <- readTargetInfos verbosity pkg_descr lbi args >>= \r -> case r of - -- This seems DEEPLY questionable. - [] -> return (head (allTargetsInBuildOrder' pkg_descr lbi)) - [target] -> return target - _ -> die' verbosity $ "The 'repl' command does not support multiple targets at once." - let componentsToBuild = neededTargetsInBuildOrder' pkg_descr lbi [nodeKey target] - debug verbosity $ "Component build order: " - ++ intercalate ", " - (map (showComponentName . componentLocalName . targetCLBI) - componentsToBuild) - - internalPackageDB <- createInternalPackageDB verbosity lbi distPref - - let lbiForComponent comp lbi' = - lbi' { - withPackageDB = withPackageDB lbi ++ [internalPackageDB], - withPrograms = addInternalBuildTools pkg_descr lbi' - (componentBuildInfo comp) (withPrograms lbi') - } - - -- build any dependent components - sequence_ - [ do let clbi = targetCLBI subtarget - comp = targetComponent subtarget - lbi' = lbiForComponent comp lbi - componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity - buildComponent verbosity NoFlag - pkg_descr lbi' suffixes comp clbi distPref - | subtarget <- init componentsToBuild ] - - -- REPL for target components - let clbi = targetCLBI target - comp = targetComponent target - lbi' = lbiForComponent comp lbi - replFlags = replReplOptions flags - componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity - replComponent replFlags verbosity pkg_descr lbi' suffixes comp clbi distPref - - --- | Start an interpreter without loading any package files. -startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform - -> PackageDBStack -> IO () -startInterpreter verbosity programDb comp platform packageDBs = - case compilerFlavor comp of - GHC -> GHC.startInterpreter verbosity programDb comp platform packageDBs - GHCJS -> GHCJS.startInterpreter verbosity programDb comp platform packageDBs - _ -> die' verbosity "A REPL is not supported with this compiler." - -buildComponent :: Verbosity - -> Flag (Maybe Int) - -> PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> Component - -> ComponentLocalBuildInfo - -> FilePath - -> IO (Maybe InstalledPackageInfo) -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CLib lib) clbi distPref = do - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - setupMessage' verbosity "Building" (packageId pkg_descr) - (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) - let libbi = libBuildInfo lib - lib' = lib { libBuildInfo = addExtraCxxSources (addExtraCSources libbi extras) extras } - buildLib verbosity numJobs pkg_descr lbi lib' clbi - - let oneComponentRequested (OneComponentRequestedSpec _) = True - oneComponentRequested _ = False - -- Don't register inplace if we're only building a single component; - -- it's not necessary because there won't be any subsequent builds - -- that need to tag us - if (not (oneComponentRequested (componentEnabledSpec lbi))) - then do - -- Register the library in-place, so exes can depend - -- on internally defined libraries. - pwd <- getCurrentDirectory - let -- The in place registration uses the "-inplace" suffix, not an ABI hash - installedPkgInfo = inplaceInstalledPackageInfo pwd distPref pkg_descr - -- NB: Use a fake ABI hash to avoid - -- needing to recompute it every build. - (mkAbiHash "inplace") lib' lbi clbi - - debug verbosity $ "Registering inplace:\n" ++ (IPI.showInstalledPackageInfo installedPkgInfo) - registerPackage verbosity (compiler lbi) (withPrograms lbi) - (withPackageDB lbi) installedPkgInfo - HcPkg.defaultRegisterOptions { - HcPkg.registerMultiInstance = True - } - return (Just installedPkgInfo) - else return Nothing - -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CFLib flib) clbi _distPref = do - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - setupMessage' verbosity "Building" (packageId pkg_descr) - (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) - buildFLib verbosity numJobs pkg_descr lbi flib clbi - return Nothing - -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CExe exe) clbi _ = do - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - setupMessage' verbosity "Building" (packageId pkg_descr) - (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) - let ebi = buildInfo exe - exe' = exe { buildInfo = addExtraCSources ebi extras } - buildExe verbosity numJobs pkg_descr lbi exe' clbi - return Nothing - - -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} }) - clbi _distPref = do - let exe = testSuiteExeV10AsExe test - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - setupMessage' verbosity "Building" (packageId pkg_descr) - (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) - let ebi = buildInfo exe - exe' = exe { buildInfo = addExtraCSources ebi extras } - buildExe verbosity numJobs pkg_descr lbi exe' clbi - return Nothing - - -buildComponent verbosity numJobs pkg_descr lbi0 suffixes - comp@(CTest - test@TestSuite { testInterface = TestSuiteLibV09{} }) - clbi -- This ComponentLocalBuildInfo corresponds to a detailed - -- test suite and not a real component. It should not - -- be used, except to construct the CLBIs for the - -- library and stub executable that will actually be - -- built. - distPref = do - pwd <- getCurrentDirectory - let (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) = - testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - setupMessage' verbosity "Building" (packageId pkg_descr) - (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) - buildLib verbosity numJobs pkg lbi lib libClbi - -- NB: need to enable multiple instances here, because on 7.10+ - -- the package name is the same as the library, and we still - -- want the registration to go through. - registerPackage verbosity (compiler lbi) (withPrograms lbi) - (withPackageDB lbi) ipi - HcPkg.defaultRegisterOptions { - HcPkg.registerMultiInstance = True - } - let ebi = buildInfo exe - -- NB: The stub executable is linked against the test-library - -- which already contains all `other-modules`, so we need - -- to remove those from the stub-exe's build-info - exe' = exe { buildInfo = (addExtraCSources ebi extras) { otherModules = [] } } - buildExe verbosity numJobs pkg_descr lbi exe' exeClbi - return Nothing -- Can't depend on test suite - - -buildComponent verbosity _ _ _ _ - (CTest TestSuite { testInterface = TestSuiteUnsupported tt }) - _ _ = - die' verbosity $ "No support for building test suite type " ++ display tt - - -buildComponent verbosity numJobs pkg_descr lbi suffixes - comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} }) - clbi _ = do - let (exe, exeClbi) = benchmarkExeV10asExe bm clbi - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - setupMessage' verbosity "Building" (packageId pkg_descr) - (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) - let ebi = buildInfo exe - exe' = exe { buildInfo = addExtraCSources ebi extras } - buildExe verbosity numJobs pkg_descr lbi exe' exeClbi - return Nothing - - -buildComponent verbosity _ _ _ _ - (CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt }) - _ _ = - die' verbosity $ "No support for building benchmark type " ++ display tt - - --- | Add extra C sources generated by preprocessing to build --- information. -addExtraCSources :: BuildInfo -> [FilePath] -> BuildInfo -addExtraCSources bi extras = bi { cSources = new } - where new = Set.toList $ old `Set.union` exs - old = Set.fromList $ cSources bi - exs = Set.fromList extras - - --- | Add extra C++ sources generated by preprocessing to build --- information. -addExtraCxxSources :: BuildInfo -> [FilePath] -> BuildInfo -addExtraCxxSources bi extras = bi { cxxSources = new } - where new = Set.toList $ old `Set.union` exs - old = Set.fromList $ cxxSources bi - exs = Set.fromList extras - - -replComponent :: [String] - -> Verbosity - -> PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> Component - -> ComponentLocalBuildInfo - -> FilePath - -> IO () -replComponent replFlags verbosity pkg_descr lbi suffixes - comp@(CLib lib) clbi _ = do - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - let libbi = libBuildInfo lib - lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } } - replLib replFlags verbosity pkg_descr lbi lib' clbi - -replComponent replFlags verbosity pkg_descr lbi suffixes - comp@(CFLib flib) clbi _ = do - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - replFLib replFlags verbosity pkg_descr lbi flib clbi - -replComponent replFlags verbosity pkg_descr lbi suffixes - comp@(CExe exe) clbi _ = do - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - let ebi = buildInfo exe - exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } - replExe replFlags verbosity pkg_descr lbi exe' clbi - - -replComponent replFlags verbosity pkg_descr lbi suffixes - comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} }) - clbi _distPref = do - let exe = testSuiteExeV10AsExe test - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - let ebi = buildInfo exe - exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } - replExe replFlags verbosity pkg_descr lbi exe' clbi - - -replComponent replFlags verbosity pkg_descr lbi0 suffixes - comp@(CTest - test@TestSuite { testInterface = TestSuiteLibV09{} }) - clbi distPref = do - pwd <- getCurrentDirectory - let (pkg, lib, libClbi, lbi, _, _, _) = - testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - let libbi = libBuildInfo lib - lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } } - replLib replFlags verbosity pkg lbi lib' libClbi - - -replComponent _ verbosity _ _ _ - (CTest TestSuite { testInterface = TestSuiteUnsupported tt }) - _ _ = - die' verbosity $ "No support for building test suite type " ++ display tt - - -replComponent replFlags verbosity pkg_descr lbi suffixes - comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} }) - clbi _ = do - let (exe, exeClbi) = benchmarkExeV10asExe bm clbi - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - extras <- preprocessExtras verbosity comp lbi - let ebi = buildInfo exe - exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } - replExe replFlags verbosity pkg_descr lbi exe' exeClbi - - -replComponent _ verbosity _ _ _ - (CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt }) - _ _ = - die' verbosity $ "No support for building benchmark type " ++ display tt - ----------------------------------------------------- --- Shared code for buildComponent and replComponent --- - --- | Translate a exe-style 'TestSuite' component into an exe for building -testSuiteExeV10AsExe :: TestSuite -> Executable -testSuiteExeV10AsExe test@TestSuite { testInterface = TestSuiteExeV10 _ mainFile } = - Executable { - exeName = testName test, - modulePath = mainFile, - exeScope = ExecutablePublic, - buildInfo = testBuildInfo test - } -testSuiteExeV10AsExe TestSuite{} = error "testSuiteExeV10AsExe: wrong kind" - --- | Translate a lib-style 'TestSuite' component into a lib + exe for building -testSuiteLibV09AsLibAndExe :: PackageDescription - -> TestSuite - -> ComponentLocalBuildInfo - -> LocalBuildInfo - -> FilePath - -> FilePath - -> (PackageDescription, - Library, ComponentLocalBuildInfo, - LocalBuildInfo, - IPI.InstalledPackageInfo, - Executable, ComponentLocalBuildInfo) -testSuiteLibV09AsLibAndExe pkg_descr - test@TestSuite { testInterface = TestSuiteLibV09 _ m } - clbi lbi distPref pwd = - (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) - where - bi = testBuildInfo test - lib = Library { - libName = Nothing, - exposedModules = [ m ], - reexportedModules = [], - signatures = [], - libExposed = True, - libBuildInfo = bi - } - -- This is, like, the one place where we use a CTestName for a library. - -- Should NOT use library name, since that could conflict! - PackageIdentifier pkg_name pkg_ver = package pkg_descr - compat_name = computeCompatPackageName pkg_name (Just (testName test)) - compat_key = computeCompatPackageKey (compiler lbi) compat_name pkg_ver (componentUnitId clbi) - libClbi = LibComponentLocalBuildInfo - { componentPackageDeps = componentPackageDeps clbi - , componentInternalDeps = componentInternalDeps clbi - , componentIsIndefinite_ = False - , componentExeDeps = componentExeDeps clbi - , componentLocalName = CSubLibName (testName test) - , componentIsPublic = False - , componentIncludes = componentIncludes clbi - , componentUnitId = componentUnitId clbi - , componentComponentId = componentComponentId clbi - , componentInstantiatedWith = [] - , componentCompatPackageName = compat_name - , componentCompatPackageKey = compat_key - , componentExposedModules = [IPI.ExposedModule m Nothing] - } - pkg = pkg_descr { - package = (package pkg_descr) { pkgName = mkPackageName $ unMungedPackageName compat_name } - , executables = [] - , testSuites = [] - , subLibraries = [lib] - } - ipi = inplaceInstalledPackageInfo pwd distPref pkg (mkAbiHash "") lib lbi libClbi - testDir = buildDir lbi stubName test - stubName test ++ "-tmp" - testLibDep = thisPackageVersion $ package pkg - exe = Executable { - exeName = mkUnqualComponentName $ stubName test, - modulePath = stubFilePath test, - exeScope = ExecutablePublic, - buildInfo = (testBuildInfo test) { - hsSourceDirs = [ testDir ], - targetBuildDepends = testLibDep - : (targetBuildDepends $ testBuildInfo test) - } - } - -- | The stub executable needs a new 'ComponentLocalBuildInfo' - -- that exposes the relevant test suite library. - deps = (IPI.installedUnitId ipi, mungedId ipi) - : (filter (\(_, x) -> let name = unMungedPackageName $ mungedName x - in name == "Cabal" || name == "base") - (componentPackageDeps clbi)) - exeClbi = ExeComponentLocalBuildInfo { - -- TODO: this is a hack, but as long as this is unique - -- (doesn't clobber something) we won't run into trouble - componentUnitId = mkUnitId (stubName test), - componentComponentId = mkComponentId (stubName test), - componentInternalDeps = [componentUnitId clbi], - componentExeDeps = [], - componentLocalName = CExeName $ mkUnqualComponentName $ stubName test, - componentPackageDeps = deps, - -- Assert DefUnitId invariant! - -- Executable can't be indefinite, so dependencies must - -- be definite packages. - componentIncludes = zip (map (DefiniteUnitId . unsafeMkDefUnitId . fst) deps) - (repeat defaultRenaming) - } -testSuiteLibV09AsLibAndExe _ TestSuite{} _ _ _ _ = error "testSuiteLibV09AsLibAndExe: wrong kind" - - --- | Translate a exe-style 'Benchmark' component into an exe for building -benchmarkExeV10asExe :: Benchmark -> ComponentLocalBuildInfo - -> (Executable, ComponentLocalBuildInfo) -benchmarkExeV10asExe bm@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } - clbi = - (exe, exeClbi) - where - exe = Executable { - exeName = benchmarkName bm, - modulePath = f, - exeScope = ExecutablePublic, - buildInfo = benchmarkBuildInfo bm - } - exeClbi = ExeComponentLocalBuildInfo { - componentUnitId = componentUnitId clbi, - componentComponentId = componentComponentId clbi, - componentLocalName = CExeName (benchmarkName bm), - componentInternalDeps = componentInternalDeps clbi, - componentExeDeps = componentExeDeps clbi, - componentPackageDeps = componentPackageDeps clbi, - componentIncludes = componentIncludes clbi - } -benchmarkExeV10asExe Benchmark{} _ = error "benchmarkExeV10asExe: wrong kind" - --- | Initialize a new package db file for libraries defined --- internally to the package. -createInternalPackageDB :: Verbosity -> LocalBuildInfo -> FilePath - -> IO PackageDB -createInternalPackageDB verbosity lbi distPref = do - existsAlready <- doesPackageDBExist dbPath - when existsAlready $ deletePackageDB dbPath - createPackageDB verbosity (compiler lbi) (withPrograms lbi) False dbPath - return (SpecificPackageDB dbPath) - where - dbPath = internalPackageDBPath lbi distPref - -addInternalBuildTools :: PackageDescription -> LocalBuildInfo -> BuildInfo - -> ProgramDb -> ProgramDb -addInternalBuildTools pkg lbi bi progs = - foldr updateProgram progs internalBuildTools - where - internalBuildTools = - [ simpleConfiguredProgram toolName' (FoundOnSystem toolLocation) - | toolName <- getAllInternalToolDependencies pkg bi - , let toolName' = unUnqualComponentName toolName - , let toolLocation = buildDir lbi toolName' toolName' <.> exeExtension (hostPlatform lbi) ] - - --- TODO: build separate libs in separate dirs so that we can build --- multiple libs, e.g. for 'LibTest' library-style test suites -buildLib :: Verbosity -> Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib verbosity numJobs pkg_descr lbi lib clbi = - case compilerFlavor (compiler lbi) of - GHC -> GHC.buildLib verbosity numJobs pkg_descr lbi lib clbi - GHCJS -> GHCJS.buildLib verbosity numJobs pkg_descr lbi lib clbi - UHC -> UHC.buildLib verbosity pkg_descr lbi lib clbi - HaskellSuite {} -> HaskellSuite.buildLib verbosity pkg_descr lbi lib clbi - _ -> die' verbosity "Building is not supported with this compiler." - --- | Build a foreign library --- --- NOTE: We assume that we already checked that we can actually build the --- foreign library in configure. -buildFLib :: Verbosity -> Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> ForeignLib -> ComponentLocalBuildInfo -> IO () -buildFLib verbosity numJobs pkg_descr lbi flib clbi = - case compilerFlavor (compiler lbi) of - GHC -> GHC.buildFLib verbosity numJobs pkg_descr lbi flib clbi - _ -> die' verbosity "Building is not supported with this compiler." - -buildExe :: Verbosity -> Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe verbosity numJobs pkg_descr lbi exe clbi = - case compilerFlavor (compiler lbi) of - GHC -> GHC.buildExe verbosity numJobs pkg_descr lbi exe clbi - GHCJS -> GHCJS.buildExe verbosity numJobs pkg_descr lbi exe clbi - UHC -> UHC.buildExe verbosity pkg_descr lbi exe clbi - _ -> die' verbosity "Building is not supported with this compiler." - -replLib :: [String] -> Verbosity -> PackageDescription - -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo - -> IO () -replLib replFlags verbosity pkg_descr lbi lib clbi = - case compilerFlavor (compiler lbi) of - -- 'cabal repl' doesn't need to support 'ghc --make -j', so we just pass - -- NoFlag as the numJobs parameter. - GHC -> GHC.replLib replFlags verbosity NoFlag pkg_descr lbi lib clbi - GHCJS -> GHCJS.replLib replFlags verbosity NoFlag pkg_descr lbi lib clbi - _ -> die' verbosity "A REPL is not supported for this compiler." - -replExe :: [String] -> Verbosity -> PackageDescription - -> LocalBuildInfo -> Executable -> ComponentLocalBuildInfo - -> IO () -replExe replFlags verbosity pkg_descr lbi exe clbi = - case compilerFlavor (compiler lbi) of - GHC -> GHC.replExe replFlags verbosity NoFlag pkg_descr lbi exe clbi - GHCJS -> GHCJS.replExe replFlags verbosity NoFlag pkg_descr lbi exe clbi - _ -> die' verbosity "A REPL is not supported for this compiler." - -replFLib :: [String] -> Verbosity -> PackageDescription - -> LocalBuildInfo -> ForeignLib -> ComponentLocalBuildInfo - -> IO () -replFLib replFlags verbosity pkg_descr lbi exe clbi = - case compilerFlavor (compiler lbi) of - GHC -> GHC.replFLib replFlags verbosity NoFlag pkg_descr lbi exe clbi - _ -> die' verbosity "A REPL is not supported for this compiler." - --- | Runs 'componentInitialBuildSteps' on every configured component. -initialBuildSteps :: FilePath -- ^"dist" prefix - -> PackageDescription -- ^mostly information from the .cabal file - -> LocalBuildInfo -- ^Configuration information - -> Verbosity -- ^The verbosity to use - -> IO () -initialBuildSteps distPref pkg_descr lbi verbosity = - withAllComponentsInBuildOrder pkg_descr lbi $ \_comp clbi -> - componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity - --- | Creates the autogenerated files for a particular configured component. -componentInitialBuildSteps :: FilePath -- ^"dist" prefix - -> PackageDescription -- ^mostly information from the .cabal file - -> LocalBuildInfo -- ^Configuration information - -> ComponentLocalBuildInfo - -> Verbosity -- ^The verbosity to use - -> IO () -componentInitialBuildSteps _distPref pkg_descr lbi clbi verbosity = do - createDirectoryIfMissingVerbose verbosity True (componentBuildDir lbi clbi) - - writeAutogenFiles verbosity pkg_descr lbi clbi - --- | Generate and write out the Paths_.hs and cabal_macros.h files --- -writeAutogenFiles :: Verbosity - -> PackageDescription - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> IO () -writeAutogenFiles verbosity pkg lbi clbi = do - createDirectoryIfMissingVerbose verbosity True (autogenComponentModulesDir lbi clbi) - - let pathsModulePath = autogenComponentModulesDir lbi clbi - ModuleName.toFilePath (autogenPathsModuleName pkg) <.> "hs" - pathsModuleDir = takeDirectory pathsModulePath - -- Ensure that the directory exists! - createDirectoryIfMissingVerbose verbosity True pathsModuleDir - rewriteFileEx verbosity pathsModulePath (Build.PathsModule.generate pkg lbi clbi) - - --TODO: document what we're doing here, and move it to its own function - case clbi of - LibComponentLocalBuildInfo { componentInstantiatedWith = insts } -> - -- Write out empty hsig files for all requirements, so that GHC - -- has a source file to look at it when it needs to typecheck - -- a signature. It's harmless to write these out even when - -- there is a real hsig file written by the user, since - -- include path ordering ensures that the real hsig file - -- will always be picked up before the autogenerated one. - for_ (map fst insts) $ \mod_name -> do - let sigPath = autogenComponentModulesDir lbi clbi - ModuleName.toFilePath mod_name <.> "hsig" - createDirectoryIfMissingVerbose verbosity True (takeDirectory sigPath) - rewriteFileEx verbosity sigPath $ - "{-# LANGUAGE NoImplicitPrelude #-}\n" ++ - "signature " ++ display mod_name ++ " where" - _ -> return () - - let cppHeaderPath = autogenComponentModulesDir lbi clbi cppHeaderName - rewriteFileEx verbosity cppHeaderPath (Build.Macros.generate pkg lbi clbi) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/BuildPaths.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/BuildPaths.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/BuildPaths.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/BuildPaths.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,251 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.BuildPaths --- Copyright : Isaac Jones 2003-2004, --- Duncan Coutts 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A bunch of dirs, paths and file names used for intermediate build steps. --- - -module Distribution.Simple.BuildPaths ( - defaultDistPref, srcPref, - haddockDirName, hscolourPref, haddockPref, - autogenModulesDir, - autogenPackageModulesDir, - autogenComponentModulesDir, - - autogenModuleName, - autogenPathsModuleName, - cppHeaderName, - haddockName, - - mkGenericStaticLibName, - mkLibName, - mkProfLibName, - mkGenericSharedLibName, - mkSharedLibName, - mkStaticLibName, - - exeExtension, - objExtension, - dllExtension, - staticLibExtension, - -- * Source files & build directories - getSourceFiles, getLibSourceFiles, getExeSourceFiles, - getFLibSourceFiles, exeBuildDir, flibBuildDir, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.ForeignLib -import Distribution.Types.UnqualComponentName (unUnqualComponentName) -import Distribution.Package -import Distribution.ModuleName as ModuleName -import Distribution.Compiler -import Distribution.PackageDescription -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.Setup -import Distribution.Text -import Distribution.System -import Distribution.Verbosity -import Distribution.Simple.Utils - -import System.FilePath ((), (<.>), normalise) - --- --------------------------------------------------------------------------- --- Build directories and files - -srcPref :: FilePath -> FilePath -srcPref distPref = distPref "src" - -hscolourPref :: HaddockTarget -> FilePath -> PackageDescription -> FilePath -hscolourPref = haddockPref - --- | This is the name of the directory in which the generated haddocks --- should be stored. It does not include the @/doc/html@ prefix. -haddockDirName :: HaddockTarget -> PackageDescription -> FilePath -haddockDirName ForDevelopment = display . packageName -haddockDirName ForHackage = (++ "-docs") . display . packageId - --- | The directory to which generated haddock documentation should be written. -haddockPref :: HaddockTarget -> FilePath -> PackageDescription -> FilePath -haddockPref haddockTarget distPref pkg_descr - = distPref "doc" "html" haddockDirName haddockTarget pkg_descr - --- | The directory in which we put auto-generated modules for EVERY --- component in the package. See deprecation notice. -{-# DEPRECATED autogenModulesDir "If you can, use 'autogenComponentModulesDir' instead, but if you really wanted package-global generated modules, use 'autogenPackageModulesDir'. In Cabal 2.0, we avoid using autogenerated files which apply to all components, because the information you often want in these files, e.g., dependency information, is best specified per component, so that reconfiguring a different component (e.g., enabling tests) doesn't force the entire to be rebuilt. 'autogenPackageModulesDir' still provides a place to put files you want to apply to the entire package, but most users of 'autogenModulesDir' should seriously consider 'autogenComponentModulesDir' if you really wanted the module to apply to one component." #-} -autogenModulesDir :: LocalBuildInfo -> String -autogenModulesDir = autogenPackageModulesDir - --- | The directory in which we put auto-generated modules for EVERY --- component in the package. -autogenPackageModulesDir :: LocalBuildInfo -> String -autogenPackageModulesDir lbi = buildDir lbi "global-autogen" - --- | The directory in which we put auto-generated modules for a --- particular component. -autogenComponentModulesDir :: LocalBuildInfo -> ComponentLocalBuildInfo -> String -autogenComponentModulesDir lbi clbi = componentBuildDir lbi clbi "autogen" --- NB: Look at 'checkForeignDeps' for where a simplified version of this --- has been copy-pasted. - -cppHeaderName :: String -cppHeaderName = "cabal_macros.h" - -{-# DEPRECATED autogenModuleName "Use autogenPathsModuleName instead" #-} --- |The name of the auto-generated module associated with a package -autogenModuleName :: PackageDescription -> ModuleName -autogenModuleName = autogenPathsModuleName - --- | The name of the auto-generated Paths_* module associated with a package -autogenPathsModuleName :: PackageDescription -> ModuleName -autogenPathsModuleName pkg_descr = - ModuleName.fromString $ - "Paths_" ++ map fixchar (display (packageName pkg_descr)) - where fixchar '-' = '_' - fixchar c = c - -haddockName :: PackageDescription -> FilePath -haddockName pkg_descr = display (packageName pkg_descr) <.> "haddock" - --- ----------------------------------------------------------------------------- --- Source File helper - -getLibSourceFiles :: Verbosity - -> LocalBuildInfo - -> Library - -> ComponentLocalBuildInfo - -> IO [(ModuleName.ModuleName, FilePath)] -getLibSourceFiles verbosity lbi lib clbi = getSourceFiles verbosity searchpaths modules - where - bi = libBuildInfo lib - modules = allLibModules lib clbi - searchpaths = componentBuildDir lbi clbi : hsSourceDirs bi ++ - [ autogenComponentModulesDir lbi clbi - , autogenPackageModulesDir lbi ] - -getExeSourceFiles :: Verbosity - -> LocalBuildInfo - -> Executable - -> ComponentLocalBuildInfo - -> IO [(ModuleName.ModuleName, FilePath)] -getExeSourceFiles verbosity lbi exe clbi = do - moduleFiles <- getSourceFiles verbosity searchpaths modules - srcMainPath <- findFile (hsSourceDirs bi) (modulePath exe) - return ((ModuleName.main, srcMainPath) : moduleFiles) - where - bi = buildInfo exe - modules = otherModules bi - searchpaths = autogenComponentModulesDir lbi clbi - : autogenPackageModulesDir lbi - : exeBuildDir lbi exe : hsSourceDirs bi - -getFLibSourceFiles :: Verbosity - -> LocalBuildInfo - -> ForeignLib - -> ComponentLocalBuildInfo - -> IO [(ModuleName.ModuleName, FilePath)] -getFLibSourceFiles verbosity lbi flib clbi = getSourceFiles verbosity searchpaths modules - where - bi = foreignLibBuildInfo flib - modules = otherModules bi - searchpaths = autogenComponentModulesDir lbi clbi - : autogenPackageModulesDir lbi - : flibBuildDir lbi flib : hsSourceDirs bi - -getSourceFiles :: Verbosity -> [FilePath] - -> [ModuleName.ModuleName] - -> IO [(ModuleName.ModuleName, FilePath)] -getSourceFiles verbosity dirs modules = flip traverse modules $ \m -> fmap ((,) m) $ - findFileWithExtension ["hs", "lhs", "hsig", "lhsig"] dirs (ModuleName.toFilePath m) - >>= maybe (notFound m) (return . normalise) - where - notFound module_ = die' verbosity $ "can't find source for module " ++ display module_ - --- | The directory where we put build results for an executable -exeBuildDir :: LocalBuildInfo -> Executable -> FilePath -exeBuildDir lbi exe = buildDir lbi nm nm ++ "-tmp" - where - nm = unUnqualComponentName $ exeName exe - --- | The directory where we put build results for a foreign library -flibBuildDir :: LocalBuildInfo -> ForeignLib -> FilePath -flibBuildDir lbi flib = buildDir lbi nm nm ++ "-tmp" - where - nm = unUnqualComponentName $ foreignLibName flib - --- --------------------------------------------------------------------------- --- Library file names - --- | Create a library name for a static library from a given name. --- Prepends 'lib' and appends the static library extension ('.a'). -mkGenericStaticLibName :: String -> String -mkGenericStaticLibName lib = "lib" ++ lib <.> "a" - -mkLibName :: UnitId -> String -mkLibName lib = mkGenericStaticLibName (getHSLibraryName lib) - -mkProfLibName :: UnitId -> String -mkProfLibName lib = mkGenericStaticLibName (getHSLibraryName lib ++ "_p") - --- | Create a library name for a shared lirbary from a given name. --- Prepends 'lib' and appends the '-' --- as well as the shared library extension. -mkGenericSharedLibName :: Platform -> CompilerId -> String -> String -mkGenericSharedLibName platform (CompilerId compilerFlavor compilerVersion) lib - = mconcat [ "lib", lib, "-", comp <.> dllExtension platform ] - where comp = display compilerFlavor ++ display compilerVersion - --- Implement proper name mangling for dynamical shared objects --- libHS- --- e.g. libHSbase-2.1-ghc6.6.1.so -mkSharedLibName :: Platform -> CompilerId -> UnitId -> String -mkSharedLibName platform comp lib - = mkGenericSharedLibName platform comp (getHSLibraryName lib) - --- Static libs are named the same as shared libraries, only with --- a different extension. -mkStaticLibName :: Platform -> CompilerId -> UnitId -> String -mkStaticLibName platform (CompilerId compilerFlavor compilerVersion) lib - = "lib" ++ getHSLibraryName lib ++ "-" ++ comp <.> staticLibExtension platform - where comp = display compilerFlavor ++ display compilerVersion - --- ------------------------------------------------------------ --- * Platform file extensions --- ------------------------------------------------------------ - --- | Default extension for executable files on the current platform. --- (typically @\"\"@ on Unix and @\"exe\"@ on Windows or OS\/2) -exeExtension :: Platform -> String -exeExtension (Platform _arch os) = case os of - Windows -> "exe" - _ -> "" - --- | Extension for object files. For GHC the extension is @\"o\"@. -objExtension :: String -objExtension = "o" - --- | Extension for dynamically linked (or shared) libraries --- (typically @\"so\"@ on Unix and @\"dll\"@ on Windows) -dllExtension :: Platform -> String -dllExtension (Platform _arch os)= case os of - Windows -> "dll" - OSX -> "dylib" - _ -> "so" - --- | Extension for static libraries --- --- TODO: Here, as well as in dllExtension, it's really the target OS that we're --- interested in, not the build OS. -staticLibExtension :: Platform -> String -staticLibExtension (Platform _arch os) = case os of - Windows -> "lib" - _ -> "a" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/BuildTarget.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/BuildTarget.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/BuildTarget.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/BuildTarget.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1038 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.BuildTargets --- Copyright : (c) Duncan Coutts 2012 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- --- Handling for user-specified build targets ------------------------------------------------------------------------------ -module Distribution.Simple.BuildTarget ( - -- * Main interface - readTargetInfos, - readBuildTargets, -- in case you don't have LocalBuildInfo - - -- * Build targets - BuildTarget(..), - showBuildTarget, - QualLevel(..), - buildTargetComponentName, - - -- * Parsing user build targets - UserBuildTarget, - readUserBuildTargets, - showUserBuildTarget, - UserBuildTargetProblem(..), - reportUserBuildTargetProblems, - - -- * Resolving build targets - resolveBuildTargets, - BuildTargetProblem(..), - reportBuildTargetProblems, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.TargetInfo -import Distribution.Types.LocalBuildInfo -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.ForeignLib -import Distribution.Types.UnqualComponentName - -import Distribution.Package -import Distribution.PackageDescription -import Distribution.ModuleName -import Distribution.Simple.LocalBuildInfo -import Distribution.Text -import Distribution.Simple.Utils -import Distribution.Verbosity - -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP ( (+++), (<++) ) -import Distribution.ParseUtils ( readPToMaybe ) - -import Control.Monad ( msum ) -import Data.List ( stripPrefix, groupBy, partition ) -import Data.Either ( partitionEithers ) -import System.FilePath as FilePath - ( dropExtension, normalise, splitDirectories, joinPath, splitPath - , hasTrailingPathSeparator ) -import System.Directory ( doesFileExist, doesDirectoryExist ) -import qualified Data.Map as Map - --- | Take a list of 'String' build targets, and parse and validate them --- into actual 'TargetInfo's to be built/registered/whatever. -readTargetInfos :: Verbosity -> PackageDescription -> LocalBuildInfo -> [String] -> IO [TargetInfo] -readTargetInfos verbosity pkg_descr lbi args = do - build_targets <- readBuildTargets verbosity pkg_descr args - checkBuildTargets verbosity pkg_descr lbi build_targets - --- ------------------------------------------------------------ --- * User build targets --- ------------------------------------------------------------ - --- | Various ways that a user may specify a build target. --- -data UserBuildTarget = - - -- | A target specified by a single name. This could be a component - -- module or file. - -- - -- > cabal build foo - -- > cabal build Data.Foo - -- > cabal build Data/Foo.hs Data/Foo.hsc - -- - UserBuildTargetSingle String - - -- | A target specified by a qualifier and name. This could be a component - -- name qualified by the component namespace kind, or a module or file - -- qualified by the component name. - -- - -- > cabal build lib:foo exe:foo - -- > cabal build foo:Data.Foo - -- > cabal build foo:Data/Foo.hs - -- - | UserBuildTargetDouble String String - - -- | A fully qualified target, either a module or file qualified by a - -- component name with the component namespace kind. - -- - -- > cabal build lib:foo:Data/Foo.hs exe:foo:Data/Foo.hs - -- > cabal build lib:foo:Data.Foo exe:foo:Data.Foo - -- - | UserBuildTargetTriple String String String - deriving (Show, Eq, Ord) - - --- ------------------------------------------------------------ --- * Resolved build targets --- ------------------------------------------------------------ - --- | A fully resolved build target. --- -data BuildTarget = - - -- | A specific component - -- - BuildTargetComponent ComponentName - - -- | A specific module within a specific component. - -- - | BuildTargetModule ComponentName ModuleName - - -- | A specific file within a specific component. - -- - | BuildTargetFile ComponentName FilePath - deriving (Eq, Show, Generic) - -instance Binary BuildTarget - -buildTargetComponentName :: BuildTarget -> ComponentName -buildTargetComponentName (BuildTargetComponent cn) = cn -buildTargetComponentName (BuildTargetModule cn _) = cn -buildTargetComponentName (BuildTargetFile cn _) = cn - --- | Read a list of user-supplied build target strings and resolve them to --- 'BuildTarget's according to a 'PackageDescription'. If there are problems --- with any of the targets e.g. they don't exist or are misformatted, throw an --- 'IOException'. -readBuildTargets :: Verbosity -> PackageDescription -> [String] -> IO [BuildTarget] -readBuildTargets verbosity pkg targetStrs = do - let (uproblems, utargets) = readUserBuildTargets targetStrs - reportUserBuildTargetProblems verbosity uproblems - - utargets' <- traverse checkTargetExistsAsFile utargets - - let (bproblems, btargets) = resolveBuildTargets pkg utargets' - reportBuildTargetProblems verbosity bproblems - - return btargets - -checkTargetExistsAsFile :: UserBuildTarget -> NoCallStackIO (UserBuildTarget, Bool) -checkTargetExistsAsFile t = do - fexists <- existsAsFile (fileComponentOfTarget t) - return (t, fexists) - - where - existsAsFile f = do - exists <- doesFileExist f - case splitPath f of - (d:_) | hasTrailingPathSeparator d -> doesDirectoryExist d - (d:_:_) | not exists -> doesDirectoryExist d - _ -> return exists - - fileComponentOfTarget (UserBuildTargetSingle s1) = s1 - fileComponentOfTarget (UserBuildTargetDouble _ s2) = s2 - fileComponentOfTarget (UserBuildTargetTriple _ _ s3) = s3 - - --- ------------------------------------------------------------ --- * Parsing user targets --- ------------------------------------------------------------ - -readUserBuildTargets :: [String] -> ([UserBuildTargetProblem] - ,[UserBuildTarget]) -readUserBuildTargets = partitionEithers . map readUserBuildTarget - -readUserBuildTarget :: String -> Either UserBuildTargetProblem - UserBuildTarget -readUserBuildTarget targetstr = - case readPToMaybe parseTargetApprox targetstr of - Nothing -> Left (UserBuildTargetUnrecognised targetstr) - Just tgt -> Right tgt - - where - parseTargetApprox :: Parse.ReadP r UserBuildTarget - parseTargetApprox = - (do a <- tokenQ - return (UserBuildTargetSingle a)) - +++ (do a <- token - _ <- Parse.char ':' - b <- tokenQ - return (UserBuildTargetDouble a b)) - +++ (do a <- token - _ <- Parse.char ':' - b <- token - _ <- Parse.char ':' - c <- tokenQ - return (UserBuildTargetTriple a b c)) - - token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') - tokenQ = parseHaskellString <++ token - parseHaskellString :: Parse.ReadP r String - parseHaskellString = Parse.readS_to_P reads - -data UserBuildTargetProblem - = UserBuildTargetUnrecognised String - deriving Show - -reportUserBuildTargetProblems :: Verbosity -> [UserBuildTargetProblem] -> IO () -reportUserBuildTargetProblems verbosity problems = do - case [ target | UserBuildTargetUnrecognised target <- problems ] of - [] -> return () - target -> - die' verbosity $ unlines - [ "Unrecognised build target '" ++ name ++ "'." - | name <- target ] - ++ "Examples:\n" - ++ " - build foo -- component name " - ++ "(library, executable, test-suite or benchmark)\n" - ++ " - build Data.Foo -- module name\n" - ++ " - build Data/Foo.hsc -- file name\n" - ++ " - build lib:foo exe:foo -- component qualified by kind\n" - ++ " - build foo:Data.Foo -- module qualified by component\n" - ++ " - build foo:Data/Foo.hsc -- file qualified by component" - -showUserBuildTarget :: UserBuildTarget -> String -showUserBuildTarget = intercalate ":" . getComponents - where - getComponents (UserBuildTargetSingle s1) = [s1] - getComponents (UserBuildTargetDouble s1 s2) = [s1,s2] - getComponents (UserBuildTargetTriple s1 s2 s3) = [s1,s2,s3] - --- | Unless you use 'QL1', this function is PARTIAL; --- use 'showBuildTarget' instead. -showBuildTarget' :: QualLevel -> PackageId -> BuildTarget -> String -showBuildTarget' ql pkgid bt = - showUserBuildTarget (renderBuildTarget ql bt pkgid) - --- | Unambiguously render a 'BuildTarget', so that it can --- be parsed in all situations. -showBuildTarget :: PackageId -> BuildTarget -> String -showBuildTarget pkgid t = - showBuildTarget' (qlBuildTarget t) pkgid t - where - qlBuildTarget BuildTargetComponent{} = QL2 - qlBuildTarget _ = QL3 - - --- ------------------------------------------------------------ --- * Resolving user targets to build targets --- ------------------------------------------------------------ - -{- -stargets = - [ BuildTargetComponent (CExeName "foo") - , BuildTargetModule (CExeName "foo") (mkMn "Foo") - , BuildTargetModule (CExeName "tst") (mkMn "Foo") - ] - where - mkMn :: String -> ModuleName - mkMn = fromJust . simpleParse - -ex_pkgid :: PackageIdentifier -Just ex_pkgid = simpleParse "thelib" --} - --- | Given a bunch of user-specified targets, try to resolve what it is they --- refer to. --- -resolveBuildTargets :: PackageDescription - -> [(UserBuildTarget, Bool)] - -> ([BuildTargetProblem], [BuildTarget]) -resolveBuildTargets pkg = partitionEithers - . map (uncurry (resolveBuildTarget pkg)) - -resolveBuildTarget :: PackageDescription -> UserBuildTarget -> Bool - -> Either BuildTargetProblem BuildTarget -resolveBuildTarget pkg userTarget fexists = - case findMatch (matchBuildTarget pkg userTarget fexists) of - Unambiguous target -> Right target - Ambiguous targets -> Left (BuildTargetAmbiguous userTarget targets') - where targets' = disambiguateBuildTargets - (packageId pkg) - userTarget - targets - None errs -> Left (classifyMatchErrors errs) - - where - classifyMatchErrors errs - | not (null expected) = let (things, got:_) = unzip expected in - BuildTargetExpected userTarget things got - | not (null nosuch) = BuildTargetNoSuch userTarget nosuch - | otherwise = error $ "resolveBuildTarget: internal error in matching" - where - expected = [ (thing, got) | MatchErrorExpected thing got <- errs ] - nosuch = [ (thing, got) | MatchErrorNoSuch thing got <- errs ] - - -data BuildTargetProblem - = BuildTargetExpected UserBuildTarget [String] String - -- ^ [expected thing] (actually got) - | BuildTargetNoSuch UserBuildTarget [(String, String)] - -- ^ [(no such thing, actually got)] - | BuildTargetAmbiguous UserBuildTarget [(UserBuildTarget, BuildTarget)] - deriving Show - - -disambiguateBuildTargets :: PackageId -> UserBuildTarget -> [BuildTarget] - -> [(UserBuildTarget, BuildTarget)] -disambiguateBuildTargets pkgid original = - disambiguate (userTargetQualLevel original) - where - disambiguate ql ts - | null amb = unamb - | otherwise = unamb ++ disambiguate (succ ql) amb - where - (amb, unamb) = step ql ts - - userTargetQualLevel (UserBuildTargetSingle _ ) = QL1 - userTargetQualLevel (UserBuildTargetDouble _ _ ) = QL2 - userTargetQualLevel (UserBuildTargetTriple _ _ _) = QL3 - - step :: QualLevel -> [BuildTarget] - -> ([BuildTarget], [(UserBuildTarget, BuildTarget)]) - step ql = (\(amb, unamb) -> (map snd $ concat amb, concat unamb)) - . partition (\g -> length g > 1) - . groupBy (equating fst) - . sortBy (comparing fst) - . map (\t -> (renderBuildTarget ql t pkgid, t)) - -data QualLevel = QL1 | QL2 | QL3 - deriving (Enum, Show) - -renderBuildTarget :: QualLevel -> BuildTarget -> PackageId -> UserBuildTarget -renderBuildTarget ql target pkgid = - case ql of - QL1 -> UserBuildTargetSingle s1 where s1 = single target - QL2 -> UserBuildTargetDouble s1 s2 where (s1, s2) = double target - QL3 -> UserBuildTargetTriple s1 s2 s3 where (s1, s2, s3) = triple target - - where - single (BuildTargetComponent cn ) = dispCName cn - single (BuildTargetModule _ m) = display m - single (BuildTargetFile _ f) = f - - double (BuildTargetComponent cn ) = (dispKind cn, dispCName cn) - double (BuildTargetModule cn m) = (dispCName cn, display m) - double (BuildTargetFile cn f) = (dispCName cn, f) - - triple (BuildTargetComponent _ ) = error "triple BuildTargetComponent" - triple (BuildTargetModule cn m) = (dispKind cn, dispCName cn, display m) - triple (BuildTargetFile cn f) = (dispKind cn, dispCName cn, f) - - dispCName = componentStringName pkgid - dispKind = showComponentKindShort . componentKind - -reportBuildTargetProblems :: Verbosity -> [BuildTargetProblem] -> IO () -reportBuildTargetProblems verbosity problems = do - - case [ (t, e, g) | BuildTargetExpected t e g <- problems ] of - [] -> return () - targets -> - die' verbosity $ unlines - [ "Unrecognised build target '" ++ showUserBuildTarget target - ++ "'.\n" - ++ "Expected a " ++ intercalate " or " expected - ++ ", rather than '" ++ got ++ "'." - | (target, expected, got) <- targets ] - - case [ (t, e) | BuildTargetNoSuch t e <- problems ] of - [] -> return () - targets -> - die' verbosity $ unlines - [ "Unknown build target '" ++ showUserBuildTarget target - ++ "'.\nThere is no " - ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" - | (thing, got) <- nosuch ] ++ "." - | (target, nosuch) <- targets ] - where - mungeThing "file" = "file target" - mungeThing thing = thing - - case [ (t, ts) | BuildTargetAmbiguous t ts <- problems ] of - [] -> return () - targets -> - die' verbosity $ unlines - [ "Ambiguous build target '" ++ showUserBuildTarget target - ++ "'. It could be:\n " - ++ unlines [ " "++ showUserBuildTarget ut ++ - " (" ++ showBuildTargetKind bt ++ ")" - | (ut, bt) <- amb ] - | (target, amb) <- targets ] - - where - showBuildTargetKind (BuildTargetComponent _ ) = "component" - showBuildTargetKind (BuildTargetModule _ _) = "module" - showBuildTargetKind (BuildTargetFile _ _) = "file" - - ----------------------------------- --- Top level BuildTarget matcher --- - -matchBuildTarget :: PackageDescription - -> UserBuildTarget -> Bool -> Match BuildTarget -matchBuildTarget pkg = \utarget fexists -> - case utarget of - UserBuildTargetSingle str1 -> - matchBuildTarget1 cinfo str1 fexists - - UserBuildTargetDouble str1 str2 -> - matchBuildTarget2 cinfo str1 str2 fexists - - UserBuildTargetTriple str1 str2 str3 -> - matchBuildTarget3 cinfo str1 str2 str3 fexists - where - cinfo = pkgComponentInfo pkg - -matchBuildTarget1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget -matchBuildTarget1 cinfo str1 fexists = - matchComponent1 cinfo str1 - `matchPlusShadowing` matchModule1 cinfo str1 - `matchPlusShadowing` matchFile1 cinfo str1 fexists - - -matchBuildTarget2 :: [ComponentInfo] -> String -> String -> Bool - -> Match BuildTarget -matchBuildTarget2 cinfo str1 str2 fexists = - matchComponent2 cinfo str1 str2 - `matchPlusShadowing` matchModule2 cinfo str1 str2 - `matchPlusShadowing` matchFile2 cinfo str1 str2 fexists - - -matchBuildTarget3 :: [ComponentInfo] -> String -> String -> String -> Bool - -> Match BuildTarget -matchBuildTarget3 cinfo str1 str2 str3 fexists = - matchModule3 cinfo str1 str2 str3 - `matchPlusShadowing` matchFile3 cinfo str1 str2 str3 fexists - - -data ComponentInfo = ComponentInfo { - cinfoName :: ComponentName, - cinfoStrName :: ComponentStringName, - cinfoSrcDirs :: [FilePath], - cinfoModules :: [ModuleName], - cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) - cinfoAsmFiles:: [FilePath], - cinfoCmmFiles:: [FilePath], - cinfoCFiles :: [FilePath], - cinfoCxxFiles:: [FilePath], - cinfoJsFiles :: [FilePath] - } - -type ComponentStringName = String - -pkgComponentInfo :: PackageDescription -> [ComponentInfo] -pkgComponentInfo pkg = - [ ComponentInfo { - cinfoName = componentName c, - cinfoStrName = componentStringName pkg (componentName c), - cinfoSrcDirs = hsSourceDirs bi, - cinfoModules = componentModules c, - cinfoHsFiles = componentHsFiles c, - cinfoAsmFiles= asmSources bi, - cinfoCmmFiles= cmmSources bi, - cinfoCFiles = cSources bi, - cinfoCxxFiles= cxxSources bi, - cinfoJsFiles = jsSources bi - } - | c <- pkgComponents pkg - , let bi = componentBuildInfo c ] - -componentStringName :: Package pkg => pkg -> ComponentName -> ComponentStringName -componentStringName pkg CLibName = display (packageName pkg) -componentStringName _ (CSubLibName name) = unUnqualComponentName name -componentStringName _ (CFLibName name) = unUnqualComponentName name -componentStringName _ (CExeName name) = unUnqualComponentName name -componentStringName _ (CTestName name) = unUnqualComponentName name -componentStringName _ (CBenchName name) = unUnqualComponentName name - -componentModules :: Component -> [ModuleName] --- TODO: Use of 'explicitLibModules' here is a bit wrong: --- a user could very well ask to build a specific signature --- that was inherited from other packages. To fix this --- we have to plumb 'LocalBuildInfo' through this code. --- Fortunately, this is only used by 'pkgComponentInfo' --- Please don't export this function unless you plan on fixing --- this. -componentModules (CLib lib) = explicitLibModules lib -componentModules (CFLib flib) = foreignLibModules flib -componentModules (CExe exe) = exeModules exe -componentModules (CTest test) = testModules test -componentModules (CBench bench) = benchmarkModules bench - -componentHsFiles :: Component -> [FilePath] -componentHsFiles (CExe exe) = [modulePath exe] -componentHsFiles (CTest TestSuite { - testInterface = TestSuiteExeV10 _ mainfile - }) = [mainfile] -componentHsFiles (CBench Benchmark { - benchmarkInterface = BenchmarkExeV10 _ mainfile - }) = [mainfile] -componentHsFiles _ = [] - -{- -ex_cs :: [ComponentInfo] -ex_cs = - [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) - , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) - ] - where - mkC n ds ms = ComponentInfo n (componentStringName pkgid n) ds (map mkMn ms) - mkMn :: String -> ModuleName - mkMn = fromJust . simpleParse - pkgid :: PackageIdentifier - Just pkgid = simpleParse "thelib" --} - ------------------------------- --- Matching component kinds --- - -data ComponentKind = LibKind | FLibKind | ExeKind | TestKind | BenchKind - deriving (Eq, Ord, Show) - -componentKind :: ComponentName -> ComponentKind -componentKind CLibName = LibKind -componentKind (CSubLibName _) = LibKind -componentKind (CFLibName _) = FLibKind -componentKind (CExeName _) = ExeKind -componentKind (CTestName _) = TestKind -componentKind (CBenchName _) = BenchKind - -cinfoKind :: ComponentInfo -> ComponentKind -cinfoKind = componentKind . cinfoName - -matchComponentKind :: String -> Match ComponentKind -matchComponentKind s - | s `elem` ["lib", "library"] = return' LibKind - | s `elem` ["flib", "foreign-lib", "foreign-library"] = return' FLibKind - | s `elem` ["exe", "executable"] = return' ExeKind - | s `elem` ["tst", "test", "test-suite"] = return' TestKind - | s `elem` ["bench", "benchmark"] = return' BenchKind - | otherwise = matchErrorExpected "component kind" s - where - return' ck = increaseConfidence >> return ck - -showComponentKind :: ComponentKind -> String -showComponentKind LibKind = "library" -showComponentKind FLibKind = "foreign-library" -showComponentKind ExeKind = "executable" -showComponentKind TestKind = "test-suite" -showComponentKind BenchKind = "benchmark" - -showComponentKindShort :: ComponentKind -> String -showComponentKindShort LibKind = "lib" -showComponentKindShort FLibKind = "flib" -showComponentKindShort ExeKind = "exe" -showComponentKindShort TestKind = "test" -showComponentKindShort BenchKind = "bench" - ------------------------------- --- Matching component targets --- - -matchComponent1 :: [ComponentInfo] -> String -> Match BuildTarget -matchComponent1 cs = \str1 -> do - guardComponentName str1 - c <- matchComponentName cs str1 - return (BuildTargetComponent (cinfoName c)) - -matchComponent2 :: [ComponentInfo] -> String -> String -> Match BuildTarget -matchComponent2 cs = \str1 str2 -> do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - return (BuildTargetComponent (cinfoName c)) - --- utils: - -guardComponentName :: String -> Match () -guardComponentName s - | all validComponentChar s - && not (null s) = increaseConfidence - | otherwise = matchErrorExpected "component name" s - where - validComponentChar c = isAlphaNum c || c == '.' - || c == '_' || c == '-' || c == '\'' - -matchComponentName :: [ComponentInfo] -> String -> Match ComponentInfo -matchComponentName cs str = - orNoSuchThing "component" str - $ increaseConfidenceFor - $ matchInexactly caseFold - [ (cinfoStrName c, c) | c <- cs ] - str - -matchComponentKindAndName :: [ComponentInfo] -> ComponentKind -> String - -> Match ComponentInfo -matchComponentKindAndName cs ckind str = - orNoSuchThing (showComponentKind ckind ++ " component") str - $ increaseConfidenceFor - $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) - [ ((cinfoKind c, cinfoStrName c), c) | c <- cs ] - (ckind, str) - - ------------------------------- --- Matching module targets --- - -matchModule1 :: [ComponentInfo] -> String -> Match BuildTarget -matchModule1 cs = \str1 -> do - guardModuleName str1 - nubMatchErrors $ do - c <- tryEach cs - let ms = cinfoModules c - m <- matchModuleName ms str1 - return (BuildTargetModule (cinfoName c) m) - -matchModule2 :: [ComponentInfo] -> String -> String -> Match BuildTarget -matchModule2 cs = \str1 str2 -> do - guardComponentName str1 - guardModuleName str2 - c <- matchComponentName cs str1 - let ms = cinfoModules c - m <- matchModuleName ms str2 - return (BuildTargetModule (cinfoName c) m) - -matchModule3 :: [ComponentInfo] -> String -> String -> String - -> Match BuildTarget -matchModule3 cs str1 str2 str3 = do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - guardModuleName str3 - let ms = cinfoModules c - m <- matchModuleName ms str3 - return (BuildTargetModule (cinfoName c) m) - --- utils: - -guardModuleName :: String -> Match () -guardModuleName s - | all validModuleChar s - && not (null s) = increaseConfidence - | otherwise = matchErrorExpected "module name" s - where - validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' - -matchModuleName :: [ModuleName] -> String -> Match ModuleName -matchModuleName ms str = - orNoSuchThing "module" str - $ increaseConfidenceFor - $ matchInexactly caseFold - [ (display m, m) - | m <- ms ] - str - - ------------------------------- --- Matching file targets --- - -matchFile1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget -matchFile1 cs str1 exists = - nubMatchErrors $ do - c <- tryEach cs - filepath <- matchComponentFile c str1 exists - return (BuildTargetFile (cinfoName c) filepath) - - -matchFile2 :: [ComponentInfo] -> String -> String -> Bool -> Match BuildTarget -matchFile2 cs str1 str2 exists = do - guardComponentName str1 - c <- matchComponentName cs str1 - filepath <- matchComponentFile c str2 exists - return (BuildTargetFile (cinfoName c) filepath) - - -matchFile3 :: [ComponentInfo] -> String -> String -> String -> Bool - -> Match BuildTarget -matchFile3 cs str1 str2 str3 exists = do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - filepath <- matchComponentFile c str3 exists - return (BuildTargetFile (cinfoName c) filepath) - - -matchComponentFile :: ComponentInfo -> String -> Bool -> Match FilePath -matchComponentFile c str fexists = - expecting "file" str $ - matchPlus - (matchFileExists str fexists) - (matchPlusShadowing - (msum [ matchModuleFileRooted dirs ms str - , matchOtherFileRooted dirs hsFiles str ]) - (msum [ matchModuleFileUnrooted ms str - , matchOtherFileUnrooted hsFiles str - , matchOtherFileUnrooted cFiles str - , matchOtherFileUnrooted jsFiles str ])) - where - dirs = cinfoSrcDirs c - ms = cinfoModules c - hsFiles = cinfoHsFiles c - cFiles = cinfoCFiles c - jsFiles = cinfoJsFiles c - - --- utils - -matchFileExists :: FilePath -> Bool -> Match a -matchFileExists _ False = mzero -matchFileExists fname True = do increaseConfidence - matchErrorNoSuch "file" fname - -matchModuleFileUnrooted :: [ModuleName] -> String -> Match FilePath -matchModuleFileUnrooted ms str = do - let filepath = normalise str - _ <- matchModuleFileStem ms filepath - return filepath - -matchModuleFileRooted :: [FilePath] -> [ModuleName] -> String -> Match FilePath -matchModuleFileRooted dirs ms str = nubMatches $ do - let filepath = normalise str - filepath' <- matchDirectoryPrefix dirs filepath - _ <- matchModuleFileStem ms filepath' - return filepath - -matchModuleFileStem :: [ModuleName] -> FilePath -> Match ModuleName -matchModuleFileStem ms = - increaseConfidenceFor - . matchInexactly caseFold - [ (toFilePath m, m) | m <- ms ] - . dropExtension - -matchOtherFileRooted :: [FilePath] -> [FilePath] -> FilePath -> Match FilePath -matchOtherFileRooted dirs fs str = do - let filepath = normalise str - filepath' <- matchDirectoryPrefix dirs filepath - _ <- matchFile fs filepath' - return filepath - -matchOtherFileUnrooted :: [FilePath] -> FilePath -> Match FilePath -matchOtherFileUnrooted fs str = do - let filepath = normalise str - _ <- matchFile fs filepath - return filepath - -matchFile :: [FilePath] -> FilePath -> Match FilePath -matchFile fs = increaseConfidenceFor - . matchInexactly caseFold [ (f, f) | f <- fs ] - -matchDirectoryPrefix :: [FilePath] -> FilePath -> Match FilePath -matchDirectoryPrefix dirs filepath = - exactMatches $ - catMaybes - [ stripDirectory (normalise dir) filepath | dir <- dirs ] - where - stripDirectory :: FilePath -> FilePath -> Maybe FilePath - stripDirectory dir fp = - joinPath `fmap` stripPrefix (splitDirectories dir) (splitDirectories fp) - - ------------------------------- --- Matching monad --- - --- | A matcher embodies a way to match some input as being some recognised --- value. In particular it deals with multiple and ambiguous matches. --- --- There are various matcher primitives ('matchExactly', 'matchInexactly'), --- ways to combine matchers ('ambiguousWith', 'shadows') and finally we can --- run a matcher against an input using 'findMatch'. --- - -data Match a = NoMatch Confidence [MatchError] - | ExactMatch Confidence [a] - | InexactMatch Confidence [a] - deriving Show - -type Confidence = Int - -data MatchError = MatchErrorExpected String String - | MatchErrorNoSuch String String - deriving (Show, Eq) - - -instance Alternative Match where - empty = mzero - (<|>) = mplus - -instance MonadPlus Match where - mzero = matchZero - mplus = matchPlus - -matchZero :: Match a -matchZero = NoMatch 0 [] - --- | Combine two matchers. Exact matches are used over inexact matches --- but if we have multiple exact, or inexact then the we collect all the --- ambiguous matches. --- -matchPlus :: Match a -> Match a -> Match a -matchPlus (ExactMatch d1 xs) (ExactMatch d2 xs') = - ExactMatch (max d1 d2) (xs ++ xs') -matchPlus a@(ExactMatch _ _ ) (InexactMatch _ _ ) = a -matchPlus a@(ExactMatch _ _ ) (NoMatch _ _ ) = a -matchPlus (InexactMatch _ _ ) b@(ExactMatch _ _ ) = b -matchPlus (InexactMatch d1 xs) (InexactMatch d2 xs') = - InexactMatch (max d1 d2) (xs ++ xs') -matchPlus a@(InexactMatch _ _ ) (NoMatch _ _ ) = a -matchPlus (NoMatch _ _ ) b@(ExactMatch _ _ ) = b -matchPlus (NoMatch _ _ ) b@(InexactMatch _ _ ) = b -matchPlus a@(NoMatch d1 ms) b@(NoMatch d2 ms') - | d1 > d2 = a - | d1 < d2 = b - | otherwise = NoMatch d1 (ms ++ ms') - --- | Combine two matchers. This is similar to 'ambiguousWith' with the --- difference that an exact match from the left matcher shadows any exact --- match on the right. Inexact matches are still collected however. --- -matchPlusShadowing :: Match a -> Match a -> Match a -matchPlusShadowing a@(ExactMatch _ _) (ExactMatch _ _) = a -matchPlusShadowing a b = matchPlus a b - -instance Functor Match where - fmap _ (NoMatch d ms) = NoMatch d ms - fmap f (ExactMatch d xs) = ExactMatch d (fmap f xs) - fmap f (InexactMatch d xs) = InexactMatch d (fmap f xs) - -instance Applicative Match where - pure a = ExactMatch 0 [a] - (<*>) = ap - -instance Monad Match where - return = pure - - NoMatch d ms >>= _ = NoMatch d ms - ExactMatch d xs >>= f = addDepth d - $ foldr matchPlus matchZero (map f xs) - InexactMatch d xs >>= f = addDepth d . forceInexact - $ foldr matchPlus matchZero (map f xs) - -addDepth :: Confidence -> Match a -> Match a -addDepth d' (NoMatch d msgs) = NoMatch (d'+d) msgs -addDepth d' (ExactMatch d xs) = ExactMatch (d'+d) xs -addDepth d' (InexactMatch d xs) = InexactMatch (d'+d) xs - -forceInexact :: Match a -> Match a -forceInexact (ExactMatch d ys) = InexactMatch d ys -forceInexact m = m - ------------------------------- --- Various match primitives --- - -matchErrorExpected, matchErrorNoSuch :: String -> String -> Match a -matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] -matchErrorNoSuch thing got = NoMatch 0 [MatchErrorNoSuch thing got] - -expecting :: String -> String -> Match a -> Match a -expecting thing got (NoMatch 0 _) = matchErrorExpected thing got -expecting _ _ m = m - -orNoSuchThing :: String -> String -> Match a -> Match a -orNoSuchThing thing got (NoMatch 0 _) = matchErrorNoSuch thing got -orNoSuchThing _ _ m = m - -increaseConfidence :: Match () -increaseConfidence = ExactMatch 1 [()] - -increaseConfidenceFor :: Match a -> Match a -increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r - -nubMatches :: Eq a => Match a -> Match a -nubMatches (NoMatch d msgs) = NoMatch d msgs -nubMatches (ExactMatch d xs) = ExactMatch d (nub xs) -nubMatches (InexactMatch d xs) = InexactMatch d (nub xs) - -nubMatchErrors :: Match a -> Match a -nubMatchErrors (NoMatch d msgs) = NoMatch d (nub msgs) -nubMatchErrors (ExactMatch d xs) = ExactMatch d xs -nubMatchErrors (InexactMatch d xs) = InexactMatch d xs - --- | Lift a list of matches to an exact match. --- -exactMatches, inexactMatches :: [a] -> Match a - -exactMatches [] = matchZero -exactMatches xs = ExactMatch 0 xs - -inexactMatches [] = matchZero -inexactMatches xs = InexactMatch 0 xs - -tryEach :: [a] -> Match a -tryEach = exactMatches - - ------------------------------- --- Top level match runner --- - --- | Given a matcher and a key to look up, use the matcher to find all the --- possible matches. There may be 'None', a single 'Unambiguous' match or --- you may have an 'Ambiguous' match with several possibilities. --- -findMatch :: Eq b => Match b -> MaybeAmbiguous b -findMatch match = - case match of - NoMatch _ msgs -> None (nub msgs) - ExactMatch _ xs -> checkAmbiguous xs - InexactMatch _ xs -> checkAmbiguous xs - where - checkAmbiguous xs = case nub xs of - [x] -> Unambiguous x - xs' -> Ambiguous xs' - -data MaybeAmbiguous a = None [MatchError] | Unambiguous a | Ambiguous [a] - deriving Show - - ------------------------------- --- Basic matchers --- - -{- --- | A primitive matcher that looks up a value in a finite 'Map'. The --- value must match exactly. --- -matchExactly :: forall a b. Ord a => [(a, b)] -> (a -> Match b) -matchExactly xs = - \x -> case Map.lookup x m of - Nothing -> matchZero - Just ys -> ExactMatch 0 ys - where - m :: Ord a => Map a [b] - m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ] --} - --- | A primitive matcher that looks up a value in a finite 'Map'. It checks --- for an exact or inexact match. We get an inexact match if the match --- is not exact, but the canonical forms match. It takes a canonicalisation --- function for this purpose. --- --- So for example if we used string case fold as the canonicalisation --- function, then we would get case insensitive matching (but it will still --- report an exact match when the case matches too). --- -matchInexactly :: (Ord a, Ord a') => - (a -> a') -> - [(a, b)] -> (a -> Match b) -matchInexactly cannonicalise xs = - \x -> case Map.lookup x m of - Just ys -> exactMatches ys - Nothing -> case Map.lookup (cannonicalise x) m' of - Just ys -> inexactMatches ys - Nothing -> matchZero - where - m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ] - - -- the map of canonicalised keys to groups of inexact matches - m' = Map.mapKeysWith (++) cannonicalise m - - - ------------------------------- --- Utils --- - -caseFold :: String -> String -caseFold = lowercase - - --- | Check that the given build targets are valid in the current context. --- --- Also swizzle into a more convenient form. --- -checkBuildTargets :: Verbosity -> PackageDescription -> LocalBuildInfo -> [BuildTarget] - -> IO [TargetInfo] -checkBuildTargets _ pkg_descr lbi [] = - return (allTargetsInBuildOrder' pkg_descr lbi) - -checkBuildTargets verbosity pkg_descr lbi targets = do - - let (enabled, disabled) = - partitionEithers - [ case componentDisabledReason (componentEnabledSpec lbi) comp of - Nothing -> Left target' - Just reason -> Right (cname, reason) - | target <- targets - , let target'@(cname,_) = swizzleTarget target - , let comp = getComponent pkg_descr cname ] - - case disabled of - [] -> return () - ((cname,reason):_) -> die' verbosity $ formatReason (showComponentName cname) reason - - for_ [ (c, t) | (c, Just t) <- enabled ] $ \(c, t) -> - warn verbosity $ "Ignoring '" ++ either display id t ++ ". The whole " - ++ showComponentName c ++ " will be processed. (Support for " - ++ "module and file targets has not been implemented yet.)" - - -- Pick out the actual CLBIs for each of these cnames - enabled' <- for enabled $ \(cname, _) -> do - case componentNameTargets' pkg_descr lbi cname of - [] -> error "checkBuildTargets: nothing enabled" - [target] -> return target - _targets -> error "checkBuildTargets: multiple copies enabled" - - return enabled' - - where - swizzleTarget (BuildTargetComponent c) = (c, Nothing) - swizzleTarget (BuildTargetModule c m) = (c, Just (Left m)) - swizzleTarget (BuildTargetFile c f) = (c, Just (Right f)) - - formatReason cn DisabledComponent = - "Cannot process the " ++ cn ++ " because the component is marked " - ++ "as disabled in the .cabal file." - formatReason cn DisabledAllTests = - "Cannot process the " ++ cn ++ " because test suites are not " - ++ "enabled. Run configure with the flag --enable-tests" - formatReason cn DisabledAllBenchmarks = - "Cannot process the " ++ cn ++ " because benchmarks are not " - ++ "enabled. Re-run configure with the flag --enable-benchmarks" - formatReason cn (DisabledAllButOne cn') = - "Cannot process the " ++ cn ++ " because this package was " - ++ "configured only to build " ++ cn' ++ ". Re-run configure " - ++ "with the argument " ++ cn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/BuildToolDepends.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/BuildToolDepends.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/BuildToolDepends.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/BuildToolDepends.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,96 +0,0 @@ --- | --- --- This modules provides functions for working with both the legacy --- "build-tools" field, and its replacement, "build-tool-depends". Prefer using --- the functions contained to access those fields directly. -module Distribution.Simple.BuildToolDepends where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Data.Map as Map - -import Distribution.Package -import Distribution.PackageDescription -import Distribution.Types.ExeDependency -import Distribution.Types.LegacyExeDependency -import Distribution.Types.UnqualComponentName - --- | Desugar a "build-tools" entry into proper a executable dependency if --- possible. --- --- An entry can be so desguared in two cases: --- --- 1. The name in build-tools matches a locally defined executable. The --- executable dependency produced is on that exe in the current package. --- --- 2. The name in build-tools matches a hard-coded set of known tools. For now, --- the executable dependency produced is one an executable in a package of --- the same, but the hard-coding could just as well be per-key. --- --- The first cases matches first. -desugarBuildTool :: PackageDescription - -> LegacyExeDependency - -> Maybe ExeDependency -desugarBuildTool pkg led = - if foundLocal - then Just $ ExeDependency (packageName pkg) toolName reqVer - else Map.lookup name whiteMap - where - LegacyExeDependency name reqVer = led - toolName = mkUnqualComponentName name - foundLocal = toolName `elem` map exeName (executables pkg) - whitelist = [ "hscolour", "haddock", "happy", "alex", "hsc2hs", "c2hs" - , "cpphs", "greencard", "hspec-discover" - ] - whiteMap = Map.fromList $ flip map whitelist $ \n -> - (n, ExeDependency (mkPackageName n) (mkUnqualComponentName n) reqVer) - --- | Get everything from "build-tool-depends", along with entries from --- "build-tools" that we know how to desugar. --- --- This should almost always be used instead of just accessing the --- `buildToolDepends` field directly. -getAllToolDependencies :: PackageDescription - -> BuildInfo - -> [ExeDependency] -getAllToolDependencies pkg bi = - buildToolDepends bi ++ mapMaybe (desugarBuildTool pkg) (buildTools bi) - --- | Does the given executable dependency map to this current package? --- --- This is a tiny function, but used in a number of places. --- --- This function is only sound to call on `BuildInfo`s from the given package --- description. This is because it just filters the package names of each --- dependency, and does not check whether version bounds in fact exclude the --- current package, or the referenced components in fact exist in the current --- package. --- --- This is OK because when a package is loaded, it is checked (in --- `Distribution.Package.Check`) that dependencies matching internal components --- do indeed have version bounds accepting the current package, and any --- depended-on component in the current package actually exists. In fact this --- check is performed by gathering the internal tool dependencies of each --- component of the package according to this module, and ensuring those --- properties on each so-gathered dependency. --- --- version bounds and components of the package are unchecked. This is because --- we sanitize exe deps so that the matching name implies these other --- conditions. -isInternal :: PackageDescription -> ExeDependency -> Bool -isInternal pkg (ExeDependency n _ _) = n == packageName pkg - - --- | Get internal "build-tool-depends", along with internal "build-tools" --- --- This is a tiny function, but used in a number of places. The same --- restrictions that apply to `isInternal` also apply to this function. -getAllInternalToolDependencies :: PackageDescription - -> BuildInfo - -> [UnqualComponentName] -getAllInternalToolDependencies pkg bi = - [ toolname - | dep@(ExeDependency _ toolname _) <- getAllToolDependencies pkg bi - , isInternal pkg dep - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/CCompiler.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/CCompiler.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/CCompiler.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/CCompiler.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,123 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.CCompiler --- Copyright : 2011, Dan Knapp --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This simple package provides types and functions for interacting with --- C compilers. Currently it's just a type enumerating extant C-like --- languages, which we call dialects. - -{- -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Isaac Jones nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} - -module Distribution.Simple.CCompiler ( - CDialect(..), - cSourceExtensions, - cDialectFilenameExtension, - filenameCDialect - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import System.FilePath - ( takeExtension ) - - --- | Represents a dialect of C. The Monoid instance expresses backward --- compatibility, in the sense that 'mappend a b' is the least inclusive --- dialect which both 'a' and 'b' can be correctly interpreted as. -data CDialect = C - | ObjectiveC - | CPlusPlus - | ObjectiveCPlusPlus - deriving (Eq, Show) - -instance Monoid CDialect where - mempty = C - mappend = (<>) - -instance Semigroup CDialect where - C <> anything = anything - ObjectiveC <> CPlusPlus = ObjectiveCPlusPlus - CPlusPlus <> ObjectiveC = ObjectiveCPlusPlus - _ <> ObjectiveCPlusPlus = ObjectiveCPlusPlus - ObjectiveC <> _ = ObjectiveC - CPlusPlus <> _ = CPlusPlus - ObjectiveCPlusPlus <> _ = ObjectiveCPlusPlus - --- | A list of all file extensions which are recognized as possibly containing --- some dialect of C code. Note that this list is only for source files, --- not for header files. -cSourceExtensions :: [String] -cSourceExtensions = ["c", "i", "ii", "m", "mi", "mm", "M", "mii", "cc", "cp", - "cxx", "cpp", "CPP", "c++", "C"] - - --- | Takes a dialect of C and whether code is intended to be passed through --- the preprocessor, and returns a filename extension for containing that --- code. -cDialectFilenameExtension :: CDialect -> Bool -> String -cDialectFilenameExtension C True = "c" -cDialectFilenameExtension C False = "i" -cDialectFilenameExtension ObjectiveC True = "m" -cDialectFilenameExtension ObjectiveC False = "mi" -cDialectFilenameExtension CPlusPlus True = "cpp" -cDialectFilenameExtension CPlusPlus False = "ii" -cDialectFilenameExtension ObjectiveCPlusPlus True = "mm" -cDialectFilenameExtension ObjectiveCPlusPlus False = "mii" - - --- | Infers from a filename's extension the dialect of C which it contains, --- and whether it is intended to be passed through the preprocessor. -filenameCDialect :: String -> Maybe (CDialect, Bool) -filenameCDialect filename = do - extension <- case takeExtension filename of - '.':ext -> Just ext - _ -> Nothing - case extension of - "c" -> return (C, True) - "i" -> return (C, False) - "ii" -> return (CPlusPlus, False) - "m" -> return (ObjectiveC, True) - "mi" -> return (ObjectiveC, False) - "mm" -> return (ObjectiveCPlusPlus, True) - "M" -> return (ObjectiveCPlusPlus, True) - "mii" -> return (ObjectiveCPlusPlus, False) - "cc" -> return (CPlusPlus, True) - "cp" -> return (CPlusPlus, True) - "cxx" -> return (CPlusPlus, True) - "cpp" -> return (CPlusPlus, True) - "CPP" -> return (CPlusPlus, True) - "c++" -> return (CPlusPlus, True) - "C" -> return (CPlusPlus, True) - _ -> Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Command.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Command.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Command.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Command.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,621 +0,0 @@ -{-# LANGUAGE ExistentialQuantification #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Command --- Copyright : Duncan Coutts 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : non-portable (ExistentialQuantification) --- --- This is to do with command line handling. The Cabal command line is --- organised into a number of named sub-commands (much like darcs). The --- 'CommandUI' abstraction represents one of these sub-commands, with a name, --- description, a set of flags. Commands can be associated with actions and --- run. It handles some common stuff automatically, like the @--help@ and --- command line completion flags. It is designed to allow other tools make --- derived commands. This feature is used heavily in @cabal-install@. - -module Distribution.Simple.Command ( - - -- * Command interface - CommandUI(..), - commandShowOptions, - CommandParse(..), - commandParseArgs, - getNormalCommandDescriptions, - helpCommandUI, - - -- ** Constructing commands - ShowOrParseArgs(..), - usageDefault, - usageAlternatives, - mkCommandUI, - hiddenCommand, - - -- ** Associating actions with commands - Command, - commandAddAction, - noExtraFlags, - - -- ** Building lists of commands - CommandType(..), - CommandSpec(..), - commandFromSpec, - - -- ** Running commands - commandsRun, - --- * Option Fields - OptionField(..), Name, - --- ** Constructing Option Fields - option, multiOption, - --- ** Liftings & Projections - liftOption, viewAsFieldDescr, - --- * Option Descriptions - OptDescr(..), Description, SFlags, LFlags, OptFlags, ArgPlaceHolder, - --- ** OptDescr 'smart' constructors - MkOptDescr, - reqArg, reqArg', optArg, optArg', noArg, - boolOpt, boolOpt', choiceOpt, choiceOptFromEnum - - ) where - -import Prelude () -import Distribution.Compat.Prelude hiding (get) - -import qualified Distribution.GetOpt as GetOpt -import Distribution.Text -import Distribution.ParseUtils -import Distribution.ReadE -import Distribution.Simple.Utils - -import Text.PrettyPrint ( punctuate, cat, comma, text ) -import Text.PrettyPrint as PP ( empty ) - -data CommandUI flags = CommandUI { - -- | The name of the command as it would be entered on the command line. - -- For example @\"build\"@. - commandName :: String, - -- | A short, one line description of the command to use in help texts. - commandSynopsis :: String, - -- | A function that maps a program name to a usage summary for this - -- command. - commandUsage :: String -> String, - -- | Additional explanation of the command to use in help texts. - commandDescription :: Maybe (String -> String), - -- | Post-Usage notes and examples in help texts - commandNotes :: Maybe (String -> String), - -- | Initial \/ empty flags - commandDefaultFlags :: flags, - -- | All the Option fields for this command - commandOptions :: ShowOrParseArgs -> [OptionField flags] - } - -data ShowOrParseArgs = ShowArgs | ParseArgs -type Name = String -type Description = String - --- | We usually have a data type for storing configuration values, where --- every field stores a configuration option, and the user sets --- the value either via command line flags or a configuration file. --- An individual OptionField models such a field, and we usually --- build a list of options associated to a configuration data type. -data OptionField a = OptionField { - optionName :: Name, - optionDescr :: [OptDescr a] } - --- | An OptionField takes one or more OptDescrs, describing the command line --- interface for the field. -data OptDescr a = ReqArg Description OptFlags ArgPlaceHolder - (ReadE (a->a)) (a -> [String]) - - | OptArg Description OptFlags ArgPlaceHolder - (ReadE (a->a)) (a->a) (a -> [Maybe String]) - - | ChoiceOpt [(Description, OptFlags, a->a, a -> Bool)] - - | BoolOpt Description OptFlags{-True-} OptFlags{-False-} - (Bool -> a -> a) (a-> Maybe Bool) - --- | Short command line option strings -type SFlags = [Char] --- | Long command line option strings -type LFlags = [String] -type OptFlags = (SFlags,LFlags) -type ArgPlaceHolder = String - - --- | Create an option taking a single OptDescr. --- No explicit Name is given for the Option, the name is the first LFlag given. -option :: SFlags -> LFlags -> Description -> get -> set -> MkOptDescr get set a - -> OptionField a -option sf lf@(n:_) d get set arg = OptionField n [arg sf lf d get set] -option _ _ _ _ _ _ = error $ "Distribution.command.option: " - ++ "An OptionField must have at least one LFlag" - --- | Create an option taking several OptDescrs. --- You will have to give the flags and description individually to the --- OptDescr constructor. -multiOption :: Name -> get -> set - -> [get -> set -> OptDescr a] -- ^MkOptDescr constructors partially - -- applied to flags and description. - -> OptionField a -multiOption n get set args = OptionField n [arg get set | arg <- args] - -type MkOptDescr get set a = SFlags -> LFlags -> Description -> get -> set - -> OptDescr a - --- | Create a string-valued command line interface. -reqArg :: Monoid b => ArgPlaceHolder -> ReadE b -> (b -> [String]) - -> MkOptDescr (a -> b) (b -> a -> a) a -reqArg ad mkflag showflag sf lf d get set = - ReqArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) - (showflag . get) - --- | Create a string-valued command line interface with a default value. -optArg :: Monoid b => ArgPlaceHolder -> ReadE b -> b -> (b -> [Maybe String]) - -> MkOptDescr (a -> b) (b -> a -> a) a -optArg ad mkflag def showflag sf lf d get set = - OptArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) - (\b -> set (get b `mappend` def) b) - (showflag . get) - --- | (String -> a) variant of "reqArg" -reqArg' :: Monoid b => ArgPlaceHolder -> (String -> b) -> (b -> [String]) - -> MkOptDescr (a -> b) (b -> a -> a) a -reqArg' ad mkflag showflag = - reqArg ad (succeedReadE mkflag) showflag - --- | (String -> a) variant of "optArg" -optArg' :: Monoid b => ArgPlaceHolder -> (Maybe String -> b) - -> (b -> [Maybe String]) - -> MkOptDescr (a -> b) (b -> a -> a) a -optArg' ad mkflag showflag = - optArg ad (succeedReadE (mkflag . Just)) def showflag - where def = mkflag Nothing - -noArg :: (Eq b) => b -> MkOptDescr (a -> b) (b -> a -> a) a -noArg flag sf lf d = choiceOpt [(flag, (sf,lf), d)] sf lf d - -boolOpt :: (b -> Maybe Bool) -> (Bool -> b) -> SFlags -> SFlags - -> MkOptDescr (a -> b) (b -> a -> a) a -boolOpt g s sfT sfF _sf _lf@(n:_) d get set = - BoolOpt d (sfT, ["enable-"++n]) (sfF, ["disable-"++n]) (set.s) (g.get) -boolOpt _ _ _ _ _ _ _ _ _ = error - "Distribution.Simple.Setup.boolOpt: unreachable" - -boolOpt' :: (b -> Maybe Bool) -> (Bool -> b) -> OptFlags -> OptFlags - -> MkOptDescr (a -> b) (b -> a -> a) a -boolOpt' g s ffT ffF _sf _lf d get set = BoolOpt d ffT ffF (set.s) (g . get) - --- | create a Choice option -choiceOpt :: Eq b => [(b,OptFlags,Description)] - -> MkOptDescr (a -> b) (b -> a -> a) a -choiceOpt aa_ff _sf _lf _d get set = ChoiceOpt alts - where alts = [(d,flags, set alt, (==alt) . get) | (alt,flags,d) <- aa_ff] - --- | create a Choice option out of an enumeration type. --- As long flags, the Show output is used. As short flags, the first character --- which does not conflict with a previous one is used. -choiceOptFromEnum :: (Bounded b, Enum b, Show b, Eq b) => - MkOptDescr (a -> b) (b -> a -> a) a -choiceOptFromEnum _sf _lf d get = - choiceOpt [ (x, (sf, [map toLower $ show x]), d') - | (x, sf) <- sflags' - , let d' = d ++ show x] - _sf _lf d get - where sflags' = foldl f [] [firstOne..] - f prev x = let prevflags = concatMap snd prev in - prev ++ take 1 [(x, [toLower sf]) - | sf <- show x, isAlpha sf - , toLower sf `notElem` prevflags] - firstOne = minBound `asTypeOf` get undefined - -commandGetOpts :: ShowOrParseArgs -> CommandUI flags - -> [GetOpt.OptDescr (flags -> flags)] -commandGetOpts showOrParse command = - concatMap viewAsGetOpt (commandOptions command showOrParse) - -viewAsGetOpt :: OptionField a -> [GetOpt.OptDescr (a->a)] -viewAsGetOpt (OptionField _n aa) = concatMap optDescrToGetOpt aa - where - optDescrToGetOpt (ReqArg d (cs,ss) arg_desc set _) = - [GetOpt.Option cs ss (GetOpt.ReqArg set' arg_desc) d] - where set' = readEOrFail set - optDescrToGetOpt (OptArg d (cs,ss) arg_desc set def _) = - [GetOpt.Option cs ss (GetOpt.OptArg set' arg_desc) d] - where set' Nothing = def - set' (Just txt) = readEOrFail set txt - optDescrToGetOpt (ChoiceOpt alts) = - [GetOpt.Option sf lf (GetOpt.NoArg set) d | (d,(sf,lf),set,_) <- alts ] - optDescrToGetOpt (BoolOpt d (sfT, lfT) ([], []) set _) = - [ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) d ] - optDescrToGetOpt (BoolOpt d ([], []) (sfF, lfF) set _) = - [ GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) d ] - optDescrToGetOpt (BoolOpt d (sfT,lfT) (sfF, lfF) set _) = - [ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) ("Enable " ++ d) - , GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) ("Disable " ++ d) ] - --- | to view as a FieldDescr, we sort the list of interfaces (Req > Bool > --- Choice > Opt) and consider only the first one. -viewAsFieldDescr :: OptionField a -> FieldDescr a -viewAsFieldDescr (OptionField _n []) = - error "Distribution.command.viewAsFieldDescr: unexpected" -viewAsFieldDescr (OptionField n dd) = FieldDescr n get set - where - optDescr = head $ sortBy cmp dd - - cmp :: OptDescr a -> OptDescr a -> Ordering - ReqArg{} `cmp` ReqArg{} = EQ - ReqArg{} `cmp` _ = GT - BoolOpt{} `cmp` ReqArg{} = LT - BoolOpt{} `cmp` BoolOpt{} = EQ - BoolOpt{} `cmp` _ = GT - ChoiceOpt{} `cmp` ReqArg{} = LT - ChoiceOpt{} `cmp` BoolOpt{} = LT - ChoiceOpt{} `cmp` ChoiceOpt{} = EQ - ChoiceOpt{} `cmp` _ = GT - OptArg{} `cmp` OptArg{} = EQ - OptArg{} `cmp` _ = LT - --- get :: a -> Doc - get t = case optDescr of - ReqArg _ _ _ _ ppr -> - (cat . punctuate comma . map text . ppr) t - - OptArg _ _ _ _ _ ppr -> - case ppr t of [] -> PP.empty - (Nothing : _) -> text "True" - (Just a : _) -> text a - - ChoiceOpt alts -> - fromMaybe PP.empty $ listToMaybe - [ text lf | (_,(_,lf:_), _,enabled) <- alts, enabled t] - - BoolOpt _ _ _ _ enabled -> (maybe PP.empty disp . enabled) t - --- set :: LineNo -> String -> a -> ParseResult a - set line val a = - case optDescr of - ReqArg _ _ _ readE _ -> ($ a) `liftM` runE line n readE val - -- We parse for a single value instead of a - -- list, as one can't really implement - -- parseList :: ReadE a -> ReadE [a] with - -- the current ReadE definition - ChoiceOpt{} -> - case getChoiceByLongFlag optDescr val of - Just f -> return (f a) - _ -> syntaxError line val - - BoolOpt _ _ _ setV _ -> (`setV` a) `liftM` runP line n parse val - - OptArg _ _ _ readE _ _ -> ($ a) `liftM` runE line n readE val - -- Optional arguments are parsed just like - -- required arguments here; we don't - -- provide a method to set an OptArg field - -- to the default value. - -getChoiceByLongFlag :: OptDescr b -> String -> Maybe (b->b) -getChoiceByLongFlag (ChoiceOpt alts) val = listToMaybe - [ set | (_,(_sf,lf:_), set, _) <- alts - , lf == val] - -getChoiceByLongFlag _ _ = - error "Distribution.command.getChoiceByLongFlag: expected a choice option" - -getCurrentChoice :: OptDescr a -> a -> [String] -getCurrentChoice (ChoiceOpt alts) a = - [ lf | (_,(_sf,lf:_), _, currentChoice) <- alts, currentChoice a] - -getCurrentChoice _ _ = error "Command.getChoice: expected a Choice OptDescr" - - -liftOption :: (b -> a) -> (a -> (b -> b)) -> OptionField a -> OptionField b -liftOption get' set' opt = - opt { optionDescr = liftOptDescr get' set' `map` optionDescr opt} - - -liftOptDescr :: (b -> a) -> (a -> (b -> b)) -> OptDescr a -> OptDescr b -liftOptDescr get' set' (ChoiceOpt opts) = - ChoiceOpt [ (d, ff, liftSet get' set' set , (get . get')) - | (d, ff, set, get) <- opts] - -liftOptDescr get' set' (OptArg d ff ad set def get) = - OptArg d ff ad (liftSet get' set' `fmap` set) - (liftSet get' set' def) (get . get') - -liftOptDescr get' set' (ReqArg d ff ad set get) = - ReqArg d ff ad (liftSet get' set' `fmap` set) (get . get') - -liftOptDescr get' set' (BoolOpt d ffT ffF set get) = - BoolOpt d ffT ffF (liftSet get' set' . set) (get . get') - -liftSet :: (b -> a) -> (a -> (b -> b)) -> (a -> a) -> b -> b -liftSet get' set' set x = set' (set $ get' x) x - --- | Show flags in the standard long option command line format -commandShowOptions :: CommandUI flags -> flags -> [String] -commandShowOptions command v = concat - [ showOptDescr v od | o <- commandOptions command ParseArgs - , od <- optionDescr o] - where - maybePrefix [] = [] - maybePrefix (lOpt:_) = ["--" ++ lOpt] - - showOptDescr :: a -> OptDescr a -> [String] - showOptDescr x (BoolOpt _ (_,lfTs) (_,lfFs) _ enabled) - = case enabled x of - Nothing -> [] - Just True -> maybePrefix lfTs - Just False -> maybePrefix lfFs - showOptDescr x c@ChoiceOpt{} - = ["--" ++ val | val <- getCurrentChoice c x] - showOptDescr x (ReqArg _ (_ssff,lf:_) _ _ showflag) - = [ "--"++lf++"="++flag - | flag <- showflag x ] - showOptDescr x (OptArg _ (_ssff,lf:_) _ _ _ showflag) - = [ case flag of - Just s -> "--"++lf++"="++s - Nothing -> "--"++lf - | flag <- showflag x ] - showOptDescr _ _ - = error "Distribution.Simple.Command.showOptDescr: unreachable" - - -commandListOptions :: CommandUI flags -> [String] -commandListOptions command = - concatMap listOption $ - addCommonFlags ShowArgs $ -- This is a slight hack, we don't want - -- "--list-options" showing up in the - -- list options output, so use ShowArgs - commandGetOpts ShowArgs command - where - listOption (GetOpt.Option shortNames longNames _ _) = - [ "-" ++ [name] | name <- shortNames ] - ++ [ "--" ++ name | name <- longNames ] - --- | The help text for this command with descriptions of all the options. -commandHelp :: CommandUI flags -> String -> String -commandHelp command pname = - commandSynopsis command - ++ "\n\n" - ++ commandUsage command pname - ++ ( case commandDescription command of - Nothing -> "" - Just desc -> '\n': desc pname) - ++ "\n" - ++ ( if cname == "" - then "Global flags:" - else "Flags for " ++ cname ++ ":" ) - ++ ( GetOpt.usageInfo "" - . addCommonFlags ShowArgs - $ commandGetOpts ShowArgs command ) - ++ ( case commandNotes command of - Nothing -> "" - Just notes -> '\n': notes pname) - where cname = commandName command - --- | Default "usage" documentation text for commands. -usageDefault :: String -> String -> String -usageDefault name pname = - "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n\n" - ++ "Flags for " ++ name ++ ":" - --- | Create "usage" documentation from a list of parameter --- configurations. -usageAlternatives :: String -> [String] -> String -> String -usageAlternatives name strs pname = unlines - [ start ++ pname ++ " " ++ name ++ " " ++ s - | let starts = "Usage: " : repeat " or: " - , (start, s) <- zip starts strs - ] - --- | Make a Command from standard 'GetOpt' options. -mkCommandUI :: String -- ^ name - -> String -- ^ synopsis - -> [String] -- ^ usage alternatives - -> flags -- ^ initial\/empty flags - -> (ShowOrParseArgs -> [OptionField flags]) -- ^ options - -> CommandUI flags -mkCommandUI name synopsis usages flags options = CommandUI - { commandName = name - , commandSynopsis = synopsis - , commandDescription = Nothing - , commandNotes = Nothing - , commandUsage = usageAlternatives name usages - , commandDefaultFlags = flags - , commandOptions = options - } - --- | Common flags that apply to every command -data CommonFlag = HelpFlag | ListOptionsFlag - -commonFlags :: ShowOrParseArgs -> [GetOpt.OptDescr CommonFlag] -commonFlags showOrParseArgs = case showOrParseArgs of - ShowArgs -> [help] - ParseArgs -> [help, list] - where - help = GetOpt.Option helpShortFlags ["help"] (GetOpt.NoArg HelpFlag) - "Show this help text" - helpShortFlags = case showOrParseArgs of - ShowArgs -> ['h'] - ParseArgs -> ['h', '?'] - list = GetOpt.Option [] ["list-options"] (GetOpt.NoArg ListOptionsFlag) - "Print a list of command line flags" - -addCommonFlags :: ShowOrParseArgs - -> [GetOpt.OptDescr a] - -> [GetOpt.OptDescr (Either CommonFlag a)] -addCommonFlags showOrParseArgs options = - map (fmapOptDesc Left) (commonFlags showOrParseArgs) - ++ map (fmapOptDesc Right) options - where fmapOptDesc f (GetOpt.Option s l d m) = - GetOpt.Option s l (fmapArgDesc f d) m - fmapArgDesc f (GetOpt.NoArg a) = GetOpt.NoArg (f a) - fmapArgDesc f (GetOpt.ReqArg s d) = GetOpt.ReqArg (f . s) d - fmapArgDesc f (GetOpt.OptArg s d) = GetOpt.OptArg (f . s) d - --- | Parse a bunch of command line arguments --- -commandParseArgs :: CommandUI flags - -> Bool -- ^ Is the command a global or subcommand? - -> [String] - -> CommandParse (flags -> flags, [String]) -commandParseArgs command global args = - let options = addCommonFlags ParseArgs - $ commandGetOpts ParseArgs command - order | global = GetOpt.RequireOrder - | otherwise = GetOpt.Permute - in case GetOpt.getOpt' order options args of - (flags, _, _, _) - | any listFlag flags -> CommandList (commandListOptions command) - | any helpFlag flags -> CommandHelp (commandHelp command) - where listFlag (Left ListOptionsFlag) = True; listFlag _ = False - helpFlag (Left HelpFlag) = True; helpFlag _ = False - (flags, opts, opts', []) - | global || null opts' -> CommandReadyToGo (accum flags, mix opts opts') - | otherwise -> CommandErrors (unrecognised opts') - (_, _, _, errs) -> CommandErrors errs - - where -- Note: It is crucial to use reverse function composition here or to - -- reverse the flags here as we want to process the flags left to right - -- but data flow in function composition is right to left. - accum flags = foldr (flip (.)) id [ f | Right f <- flags ] - unrecognised opts = [ "unrecognized " - ++ "'" ++ (commandName command) ++ "'" - ++ " option `" ++ opt ++ "'\n" - | opt <- opts ] - -- For unrecognised global flags we put them in the position just after - -- the command, if there is one. This gives us a chance to parse them - -- as sub-command rather than global flags. - mix [] ys = ys - mix (x:xs) ys = x:ys++xs - -data CommandParse flags = CommandHelp (String -> String) - | CommandList [String] - | CommandErrors [String] - | CommandReadyToGo flags -instance Functor CommandParse where - fmap _ (CommandHelp help) = CommandHelp help - fmap _ (CommandList opts) = CommandList opts - fmap _ (CommandErrors errs) = CommandErrors errs - fmap f (CommandReadyToGo flags) = CommandReadyToGo (f flags) - - -data CommandType = NormalCommand | HiddenCommand -data Command action = - Command String String ([String] -> CommandParse action) CommandType - --- | Mark command as hidden. Hidden commands don't show up in the 'progname --- help' or 'progname --help' output. -hiddenCommand :: Command action -> Command action -hiddenCommand (Command name synopsys f _cmdType) = - Command name synopsys f HiddenCommand - -commandAddAction :: CommandUI flags - -> (flags -> [String] -> action) - -> Command action -commandAddAction command action = - Command (commandName command) - (commandSynopsis command) - (fmap (uncurry applyDefaultArgs) . commandParseArgs command False) - NormalCommand - - where applyDefaultArgs mkflags args = - let flags = mkflags (commandDefaultFlags command) - in action flags args - -commandsRun :: CommandUI a - -> [Command action] - -> [String] - -> CommandParse (a, CommandParse action) -commandsRun globalCommand commands args = - case commandParseArgs globalCommand True args of - CommandHelp help -> CommandHelp help - CommandList opts -> CommandList (opts ++ commandNames) - CommandErrors errs -> CommandErrors errs - CommandReadyToGo (mkflags, args') -> case args' of - ("help":cmdArgs) -> handleHelpCommand cmdArgs - (name:cmdArgs) -> case lookupCommand name of - [Command _ _ action _] - -> CommandReadyToGo (flags, action cmdArgs) - _ -> CommandReadyToGo (flags, badCommand name) - [] -> CommandReadyToGo (flags, noCommand) - where flags = mkflags (commandDefaultFlags globalCommand) - - where - lookupCommand cname = [ cmd | cmd@(Command cname' _ _ _) <- commands' - , cname' == cname ] - noCommand = CommandErrors ["no command given (try --help)\n"] - badCommand cname = CommandErrors ["unrecognised command: " ++ cname - ++ " (try --help)\n"] - commands' = commands ++ [commandAddAction helpCommandUI undefined] - commandNames = [ name | (Command name _ _ NormalCommand) <- commands' ] - - -- A bit of a hack: support "prog help" as a synonym of "prog --help" - -- furthermore, support "prog help command" as "prog command --help" - handleHelpCommand cmdArgs = - case commandParseArgs helpCommandUI True cmdArgs of - CommandHelp help -> CommandHelp help - CommandList list -> CommandList (list ++ commandNames) - CommandErrors _ -> CommandHelp globalHelp - CommandReadyToGo (_,[]) -> CommandHelp globalHelp - CommandReadyToGo (_,(name:cmdArgs')) -> - case lookupCommand name of - [Command _ _ action _] -> - case action ("--help":cmdArgs') of - CommandHelp help -> CommandHelp help - CommandList _ -> CommandList [] - _ -> CommandHelp globalHelp - _ -> badCommand name - - where globalHelp = commandHelp globalCommand - --- | Utility function, many commands do not accept additional flags. This --- action fails with a helpful error message if the user supplies any extra. --- -noExtraFlags :: [String] -> IO () -noExtraFlags [] = return () -noExtraFlags extraFlags = - dieNoVerbosity $ "Unrecognised flags: " ++ intercalate ", " extraFlags ---TODO: eliminate this function and turn it into a variant on commandAddAction --- instead like commandAddActionNoArgs that doesn't supply the [String] - --- | Helper function for creating globalCommand description -getNormalCommandDescriptions :: [Command action] -> [(String, String)] -getNormalCommandDescriptions cmds = - [ (name, description) - | Command name description _ NormalCommand <- cmds ] - -helpCommandUI :: CommandUI () -helpCommandUI = - (mkCommandUI - "help" - "Help about commands." - ["[FLAGS]", "COMMAND [FLAGS]"] - () - (const [])) - { - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " help help\n" - ++ " Oh, appararently you already know this.\n" - } - --- | wraps a @CommandUI@ together with a function that turns it into a @Command@. --- By hiding the type of flags for the UI allows construction of a list of all UIs at the --- top level of the program. That list can then be used for generation of manual page --- as well as for executing the selected command. -data CommandSpec action - = forall flags. CommandSpec (CommandUI flags) (CommandUI flags -> Command action) CommandType - -commandFromSpec :: CommandSpec a -> Command a -commandFromSpec (CommandSpec ui action _) = action ui diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Compiler.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Compiler.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Compiler.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Compiler.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,438 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Compiler --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This should be a much more sophisticated abstraction than it is. Currently --- it's just a bit of data about the compiler, like its flavour and name and --- version. The reason it's just data is because currently it has to be in --- 'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The --- only interesting bit of info it contains is a mapping between language --- extensions and compiler command line flags. This module also defines a --- 'PackageDB' type which is used to refer to package databases. Most compilers --- only know about a single global package collection but GHC has a global and --- per-user one and it lets you create arbitrary other package databases. We do --- not yet fully support this latter feature. - -module Distribution.Simple.Compiler ( - -- * Haskell implementations - module Distribution.Compiler, - Compiler(..), - showCompilerId, showCompilerIdWithAbi, - compilerFlavor, compilerVersion, - compilerCompatFlavor, - compilerCompatVersion, - compilerInfo, - - -- * Support for package databases - PackageDB(..), - PackageDBStack, - registrationPackageDB, - absolutePackageDBPaths, - absolutePackageDBPath, - - -- * Support for optimisation levels - OptimisationLevel(..), - flagToOptimisationLevel, - - -- * Support for debug info levels - DebugInfoLevel(..), - flagToDebugInfoLevel, - - -- * Support for language extensions - Flag, - languageToFlags, - unsupportedLanguages, - extensionsToFlags, - unsupportedExtensions, - parmakeSupported, - reexportedModulesSupported, - renamingPackageFlagsSupported, - unifiedIPIDRequired, - packageKeySupported, - unitIdSupported, - coverageSupported, - profilingSupported, - backpackSupported, - arResponseFilesSupported, - libraryDynDirSupported, - - -- * Support for profiling detail levels - ProfDetailLevel(..), - knownProfDetailLevels, - flagToProfDetailLevel, - showProfDetailLevel, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Compiler -import Distribution.Version -import Distribution.Text -import Language.Haskell.Extension -import Distribution.Simple.Utils - -import Control.Monad (join) -import qualified Data.Map as Map (lookup) -import System.Directory (canonicalizePath) - -data Compiler = Compiler { - compilerId :: CompilerId, - -- ^ Compiler flavour and version. - compilerAbiTag :: AbiTag, - -- ^ Tag for distinguishing incompatible ABI's on the same - -- architecture/os. - compilerCompat :: [CompilerId], - -- ^ Other implementations that this compiler claims to be - -- compatible with. - compilerLanguages :: [(Language, Flag)], - -- ^ Supported language standards. - compilerExtensions :: [(Extension, Maybe Flag)], - -- ^ Supported extensions. - compilerProperties :: Map String String - -- ^ A key-value map for properties not covered by the above fields. - } - deriving (Eq, Generic, Typeable, Show, Read) - -instance Binary Compiler - -showCompilerId :: Compiler -> String -showCompilerId = display . compilerId - -showCompilerIdWithAbi :: Compiler -> String -showCompilerIdWithAbi comp = - display (compilerId comp) ++ - case compilerAbiTag comp of - NoAbiTag -> [] - AbiTag xs -> '-':xs - -compilerFlavor :: Compiler -> CompilerFlavor -compilerFlavor = (\(CompilerId f _) -> f) . compilerId - -compilerVersion :: Compiler -> Version -compilerVersion = (\(CompilerId _ v) -> v) . compilerId - - --- | Is this compiler compatible with the compiler flavour we're interested in? --- --- For example this checks if the compiler is actually GHC or is another --- compiler that claims to be compatible with some version of GHC, e.g. GHCJS. --- --- > if compilerCompatFlavor GHC compiler then ... else ... --- -compilerCompatFlavor :: CompilerFlavor -> Compiler -> Bool -compilerCompatFlavor flavor comp = - flavor == compilerFlavor comp - || flavor `elem` [ flavor' | CompilerId flavor' _ <- compilerCompat comp ] - - --- | Is this compiler compatible with the compiler flavour we're interested in, --- and if so what version does it claim to be compatible with. --- --- For example this checks if the compiler is actually GHC-7.x or is another --- compiler that claims to be compatible with some GHC-7.x version. --- --- > case compilerCompatVersion GHC compiler of --- > Just (Version (7:_)) -> ... --- > _ -> ... --- -compilerCompatVersion :: CompilerFlavor -> Compiler -> Maybe Version -compilerCompatVersion flavor comp - | compilerFlavor comp == flavor = Just (compilerVersion comp) - | otherwise = - listToMaybe [ v | CompilerId fl v <- compilerCompat comp, fl == flavor ] - -compilerInfo :: Compiler -> CompilerInfo -compilerInfo c = CompilerInfo (compilerId c) - (compilerAbiTag c) - (Just . compilerCompat $ c) - (Just . map fst . compilerLanguages $ c) - (Just . map fst . compilerExtensions $ c) - --- ------------------------------------------------------------ --- * Package databases --- ------------------------------------------------------------ - --- |Some compilers have a notion of a database of available packages. --- For some there is just one global db of packages, other compilers --- support a per-user or an arbitrary db specified at some location in --- the file system. This can be used to build isloated environments of --- packages, for example to build a collection of related packages --- without installing them globally. --- -data PackageDB = GlobalPackageDB - | UserPackageDB - | SpecificPackageDB FilePath - deriving (Eq, Generic, Ord, Show, Read) - -instance Binary PackageDB - --- | We typically get packages from several databases, and stack them --- together. This type lets us be explicit about that stacking. For example --- typical stacks include: --- --- > [GlobalPackageDB] --- > [GlobalPackageDB, UserPackageDB] --- > [GlobalPackageDB, SpecificPackageDB "package.conf.inplace"] --- --- Note that the 'GlobalPackageDB' is invariably at the bottom since it --- contains the rts, base and other special compiler-specific packages. --- --- We are not restricted to using just the above combinations. In particular --- we can use several custom package dbs and the user package db together. --- --- When it comes to writing, the top most (last) package is used. --- -type PackageDBStack = [PackageDB] - --- | Return the package that we should register into. This is the package db at --- the top of the stack. --- -registrationPackageDB :: PackageDBStack -> PackageDB -registrationPackageDB [] = error "internal error: empty package db set" -registrationPackageDB dbs = last dbs - --- | Make package paths absolute - - -absolutePackageDBPaths :: PackageDBStack -> NoCallStackIO PackageDBStack -absolutePackageDBPaths = traverse absolutePackageDBPath - -absolutePackageDBPath :: PackageDB -> NoCallStackIO PackageDB -absolutePackageDBPath GlobalPackageDB = return GlobalPackageDB -absolutePackageDBPath UserPackageDB = return UserPackageDB -absolutePackageDBPath (SpecificPackageDB db) = - SpecificPackageDB `liftM` canonicalizePath db - --- ------------------------------------------------------------ --- * Optimisation levels --- ------------------------------------------------------------ - --- | Some compilers support optimising. Some have different levels. --- For compilers that do not the level is just capped to the level --- they do support. --- -data OptimisationLevel = NoOptimisation - | NormalOptimisation - | MaximumOptimisation - deriving (Bounded, Enum, Eq, Generic, Read, Show) - -instance Binary OptimisationLevel - -flagToOptimisationLevel :: Maybe String -> OptimisationLevel -flagToOptimisationLevel Nothing = NormalOptimisation -flagToOptimisationLevel (Just s) = case reads s of - [(i, "")] - | i >= fromEnum (minBound :: OptimisationLevel) - && i <= fromEnum (maxBound :: OptimisationLevel) - -> toEnum i - | otherwise -> error $ "Bad optimisation level: " ++ show i - ++ ". Valid values are 0..2" - _ -> error $ "Can't parse optimisation level " ++ s - --- ------------------------------------------------------------ --- * Debug info levels --- ------------------------------------------------------------ - --- | Some compilers support emitting debug info. Some have different --- levels. For compilers that do not the level is just capped to the --- level they do support. --- -data DebugInfoLevel = NoDebugInfo - | MinimalDebugInfo - | NormalDebugInfo - | MaximalDebugInfo - deriving (Bounded, Enum, Eq, Generic, Read, Show) - -instance Binary DebugInfoLevel - -flagToDebugInfoLevel :: Maybe String -> DebugInfoLevel -flagToDebugInfoLevel Nothing = NormalDebugInfo -flagToDebugInfoLevel (Just s) = case reads s of - [(i, "")] - | i >= fromEnum (minBound :: DebugInfoLevel) - && i <= fromEnum (maxBound :: DebugInfoLevel) - -> toEnum i - | otherwise -> error $ "Bad debug info level: " ++ show i - ++ ". Valid values are 0..3" - _ -> error $ "Can't parse debug info level " ++ s - --- ------------------------------------------------------------ --- * Languages and Extensions --- ------------------------------------------------------------ - -unsupportedLanguages :: Compiler -> [Language] -> [Language] -unsupportedLanguages comp langs = - [ lang | lang <- langs - , isNothing (languageToFlag comp lang) ] - -languageToFlags :: Compiler -> Maybe Language -> [Flag] -languageToFlags comp = filter (not . null) - . catMaybes . map (languageToFlag comp) - . maybe [Haskell98] (\x->[x]) - -languageToFlag :: Compiler -> Language -> Maybe Flag -languageToFlag comp ext = lookup ext (compilerLanguages comp) - - --- |For the given compiler, return the extensions it does not support. -unsupportedExtensions :: Compiler -> [Extension] -> [Extension] -unsupportedExtensions comp exts = - [ ext | ext <- exts - , isNothing (extensionToFlag' comp ext) ] - -type Flag = String - --- |For the given compiler, return the flags for the supported extensions. -extensionsToFlags :: Compiler -> [Extension] -> [Flag] -extensionsToFlags comp = nub . filter (not . null) - . catMaybes . map (extensionToFlag comp) - --- | Looks up the flag for a given extension, for a given compiler. --- Ignores the subtlety of extensions which lack associated flags. -extensionToFlag :: Compiler -> Extension -> Maybe Flag -extensionToFlag comp ext = join (extensionToFlag' comp ext) - --- | Looks up the flag for a given extension, for a given compiler. --- However, the extension may be valid for the compiler but not have a flag. --- For example, NondecreasingIndentation is enabled by default on GHC 7.0.4, --- hence it is considered a supported extension but not an accepted flag. --- --- The outer layer of Maybe indicates whether the extensions is supported, while --- the inner layer indicates whether it has a flag. --- When building strings, it is often more convenient to use 'extensionToFlag', --- which ignores the difference. -extensionToFlag' :: Compiler -> Extension -> Maybe (Maybe Flag) -extensionToFlag' comp ext = lookup ext (compilerExtensions comp) - --- | Does this compiler support parallel --make mode? -parmakeSupported :: Compiler -> Bool -parmakeSupported = ghcSupported "Support parallel --make" - --- | Does this compiler support reexported-modules? -reexportedModulesSupported :: Compiler -> Bool -reexportedModulesSupported = ghcSupported "Support reexported-modules" - --- | Does this compiler support thinning/renaming on package flags? -renamingPackageFlagsSupported :: Compiler -> Bool -renamingPackageFlagsSupported = ghcSupported - "Support thinning and renaming package flags" - --- | Does this compiler have unified IPIDs (so no package keys) -unifiedIPIDRequired :: Compiler -> Bool -unifiedIPIDRequired = ghcSupported "Requires unified installed package IDs" - --- | Does this compiler support package keys? -packageKeySupported :: Compiler -> Bool -packageKeySupported = ghcSupported "Uses package keys" - --- | Does this compiler support unit IDs? -unitIdSupported :: Compiler -> Bool -unitIdSupported = ghcSupported "Uses unit IDs" - --- | Does this compiler support Backpack? -backpackSupported :: Compiler -> Bool -backpackSupported = ghcSupported "Support Backpack" - --- | Does this compiler support a package database entry with: --- "dynamic-library-dirs"? -libraryDynDirSupported :: Compiler -> Bool -libraryDynDirSupported comp = case compilerFlavor comp of - GHC -> - -- Not just v >= mkVersion [8,0,1,20161022], as there - -- are many GHC 8.1 nightlies which don't support this. - ((v >= mkVersion [8,0,1,20161022] && v < mkVersion [8,1]) || - v >= mkVersion [8,1,20161021]) - _ -> False - where - v = compilerVersion comp - --- | Does this compiler's "ar" command supports response file --- arguments (i.e. @file-style arguments). -arResponseFilesSupported :: Compiler -> Bool -arResponseFilesSupported = ghcSupported "ar supports at file" - --- | Does this compiler support Haskell program coverage? -coverageSupported :: Compiler -> Bool -coverageSupported comp = - case compilerFlavor comp of - GHC -> True - GHCJS -> True - _ -> False - --- | Does this compiler support profiling? -profilingSupported :: Compiler -> Bool -profilingSupported comp = - case compilerFlavor comp of - GHC -> True - GHCJS -> True - _ -> False - --- | Utility function for GHC only features -ghcSupported :: String -> Compiler -> Bool -ghcSupported key comp = - case compilerFlavor comp of - GHC -> checkProp - GHCJS -> checkProp - _ -> False - where checkProp = - case Map.lookup key (compilerProperties comp) of - Just "YES" -> True - _ -> False - --- ------------------------------------------------------------ --- * Profiling detail level --- ------------------------------------------------------------ - --- | Some compilers (notably GHC) support profiling and can instrument --- programs so the system can account costs to different functions. There are --- different levels of detail that can be used for this accounting. --- For compilers that do not support this notion or the particular detail --- levels, this is either ignored or just capped to some similar level --- they do support. --- -data ProfDetailLevel = ProfDetailNone - | ProfDetailDefault - | ProfDetailExportedFunctions - | ProfDetailToplevelFunctions - | ProfDetailAllFunctions - | ProfDetailOther String - deriving (Eq, Generic, Read, Show) - -instance Binary ProfDetailLevel - -flagToProfDetailLevel :: String -> ProfDetailLevel -flagToProfDetailLevel "" = ProfDetailDefault -flagToProfDetailLevel s = - case lookup (lowercase s) - [ (name, value) - | (primary, aliases, value) <- knownProfDetailLevels - , name <- primary : aliases ] - of Just value -> value - Nothing -> ProfDetailOther s - -knownProfDetailLevels :: [(String, [String], ProfDetailLevel)] -knownProfDetailLevels = - [ ("default", [], ProfDetailDefault) - , ("none", [], ProfDetailNone) - , ("exported-functions", ["exported"], ProfDetailExportedFunctions) - , ("toplevel-functions", ["toplevel", "top"], ProfDetailToplevelFunctions) - , ("all-functions", ["all"], ProfDetailAllFunctions) - ] - -showProfDetailLevel :: ProfDetailLevel -> String -showProfDetailLevel dl = case dl of - ProfDetailNone -> "none" - ProfDetailDefault -> "default" - ProfDetailExportedFunctions -> "exported-functions" - ProfDetailToplevelFunctions -> "toplevel-functions" - ProfDetailAllFunctions -> "all-functions" - ProfDetailOther other -> other diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Configure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Configure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Configure.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Configure.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2032 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Configure --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This deals with the /configure/ phase. It provides the 'configure' action --- which is given the package description and configure flags. It then tries --- to: configure the compiler; resolves any conditionals in the package --- description; resolve the package dependencies; check if all the extensions --- used by this package are supported by the compiler; check that all the build --- tools are available (including version checks if appropriate); checks for --- any required @pkg-config@ packages (updating the 'BuildInfo' with the --- results) --- --- Then based on all this it saves the info in the 'LocalBuildInfo' and writes --- it out to the @dist\/setup-config@ file. It also displays various details to --- the user, the amount of information displayed depending on the verbosity --- level. - -module Distribution.Simple.Configure (configure, - writePersistBuildConfig, - getConfigStateFile, - getPersistBuildConfig, - checkPersistBuildConfigOutdated, - tryGetPersistBuildConfig, - maybeGetPersistBuildConfig, - findDistPref, findDistPrefOrDefault, - getInternalPackages, - computeComponentId, - computeCompatPackageKey, - computeCompatPackageName, - localBuildInfoFile, - getInstalledPackages, - getInstalledPackagesMonitorFiles, - getPackageDBContents, - configCompiler, configCompilerAux, - configCompilerEx, configCompilerAuxEx, - computeEffectiveProfiling, - ccLdOptionsBuildInfo, - checkForeignDeps, - interpretPackageDbFlags, - ConfigStateFileError(..), - tryGetConfigStateFile, - platformDefines, - ) - where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Compiler -import Distribution.Types.IncludeRenaming -import Distribution.Utils.NubList -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.PreProcess -import Distribution.Package -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Types.PackageDescription as PD -import Distribution.PackageDescription.PrettyPrint -import Distribution.PackageDescription.Configuration -import Distribution.PackageDescription.Check hiding (doesFileExist) -import Distribution.Simple.BuildToolDepends -import Distribution.Simple.Program -import Distribution.Simple.Setup as Setup -import Distribution.Simple.BuildTarget -import Distribution.Simple.LocalBuildInfo -import Distribution.Types.ExeDependency -import Distribution.Types.LegacyExeDependency -import Distribution.Types.PkgconfigDependency -import Distribution.Types.MungedPackageName -import Distribution.Types.LocalBuildInfo -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.ForeignLib -import Distribution.Types.ForeignLibType -import Distribution.Types.ForeignLibOption -import Distribution.Types.Mixin -import Distribution.Types.UnqualComponentName -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Version -import Distribution.Verbosity -import qualified Distribution.Compat.Graph as Graph -import Distribution.Compat.Stack -import Distribution.Backpack.Configure -import Distribution.Backpack.DescribeUnitId -import Distribution.Backpack.PreExistingComponent -import Distribution.Backpack.ConfiguredComponent (newPackageDepsBehaviour) -import Distribution.Backpack.Id -import Distribution.Utils.LogProgress - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS -import qualified Distribution.Simple.UHC as UHC -import qualified Distribution.Simple.HaskellSuite as HaskellSuite - -import Control.Exception - ( ErrorCall, Exception, evaluate, throw, throwIO, try ) -import Control.Monad ( forM, forM_ ) -import Distribution.Compat.Binary ( decodeOrFailIO, encode ) -import Distribution.Compat.Directory ( listDirectory ) -import Data.ByteString.Lazy ( ByteString ) -import qualified Data.ByteString as BS -import qualified Data.ByteString.Lazy.Char8 as BLC8 -import Data.List - ( (\\), partition, inits, stripPrefix, intersect ) -import Data.Either - ( partitionEithers ) -import qualified Data.Map as Map -import System.Directory - ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory - , removeFile) -import System.FilePath - ( (), isAbsolute, takeDirectory ) -import Distribution.Compat.Directory - ( doesPathExist ) -import qualified System.Info - ( compilerName, compilerVersion ) -import System.IO - ( hPutStrLn, hClose ) -import Distribution.Text - ( Text(disp), defaultStyle, display, simpleParse ) -import Text.PrettyPrint - ( Doc, (<+>), ($+$), char, comma, hsep, nest - , punctuate, quotes, render, renderStyle, sep, text ) -import Distribution.Compat.Environment ( lookupEnv ) -import Distribution.Compat.Exception ( catchExit, catchIO ) - - -type UseExternalInternalDeps = Bool - --- | The errors that can be thrown when reading the @setup-config@ file. -data ConfigStateFileError - = ConfigStateFileNoHeader -- ^ No header found. - | ConfigStateFileBadHeader -- ^ Incorrect header. - | ConfigStateFileNoParse -- ^ Cannot parse file contents. - | ConfigStateFileMissing -- ^ No file! - | ConfigStateFileBadVersion PackageIdentifier PackageIdentifier - (Either ConfigStateFileError LocalBuildInfo) -- ^ Mismatched version. - deriving (Typeable) - --- | Format a 'ConfigStateFileError' as a user-facing error message. -dispConfigStateFileError :: ConfigStateFileError -> Doc -dispConfigStateFileError ConfigStateFileNoHeader = - text "Saved package config file header is missing." - <+> text "Re-run the 'configure' command." -dispConfigStateFileError ConfigStateFileBadHeader = - text "Saved package config file header is corrupt." - <+> text "Re-run the 'configure' command." -dispConfigStateFileError ConfigStateFileNoParse = - text "Saved package config file is corrupt." - <+> text "Re-run the 'configure' command." -dispConfigStateFileError ConfigStateFileMissing = - text "Run the 'configure' command first." -dispConfigStateFileError (ConfigStateFileBadVersion oldCabal oldCompiler _) = - text "Saved package config file is outdated:" - $+$ badCabal $+$ badCompiler - $+$ text "Re-run the 'configure' command." - where - badCabal = - text "• the Cabal version changed from" - <+> disp oldCabal <+> "to" <+> disp currentCabalId - badCompiler - | oldCompiler == currentCompilerId = mempty - | otherwise = - text "• the compiler changed from" - <+> disp oldCompiler <+> "to" <+> disp currentCompilerId - -instance Show ConfigStateFileError where - show = renderStyle defaultStyle . dispConfigStateFileError - -instance Exception ConfigStateFileError - --- | Read the 'localBuildInfoFile'. Throw an exception if the file is --- missing, if the file cannot be read, or if the file was created by an older --- version of Cabal. -getConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file. - -> IO LocalBuildInfo -getConfigStateFile filename = do - exists <- doesFileExist filename - unless exists $ throwIO ConfigStateFileMissing - -- Read the config file into a strict ByteString to avoid problems with - -- lazy I/O, then convert to lazy because the binary package needs that. - contents <- BS.readFile filename - let (header, body) = BLC8.span (/='\n') (BLC8.fromChunks [contents]) - - headerParseResult <- try $ evaluate $ parseHeader header - let (cabalId, compId) = - case headerParseResult of - Left (_ :: ErrorCall) -> throw ConfigStateFileBadHeader - Right x -> x - - let getStoredValue = do - result <- decodeOrFailIO (BLC8.tail body) - case result of - Left _ -> throw ConfigStateFileNoParse - Right x -> return x - deferErrorIfBadVersion act - | cabalId /= currentCabalId = do - eResult <- try act - throw $ ConfigStateFileBadVersion cabalId compId eResult - | otherwise = act - deferErrorIfBadVersion getStoredValue - where - _ = callStack -- TODO: attach call stack to exception - --- | Read the 'localBuildInfoFile', returning either an error or the local build --- info. -tryGetConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file. - -> IO (Either ConfigStateFileError LocalBuildInfo) -tryGetConfigStateFile = try . getConfigStateFile - --- | Try to read the 'localBuildInfoFile'. -tryGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. - -> IO (Either ConfigStateFileError LocalBuildInfo) -tryGetPersistBuildConfig = try . getPersistBuildConfig - --- | Read the 'localBuildInfoFile'. Throw an exception if the file is --- missing, if the file cannot be read, or if the file was created by an older --- version of Cabal. -getPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. - -> IO LocalBuildInfo -getPersistBuildConfig = getConfigStateFile . localBuildInfoFile - --- | Try to read the 'localBuildInfoFile'. -maybeGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. - -> IO (Maybe LocalBuildInfo) -maybeGetPersistBuildConfig = - liftM (either (const Nothing) Just) . tryGetPersistBuildConfig - --- | After running configure, output the 'LocalBuildInfo' to the --- 'localBuildInfoFile'. -writePersistBuildConfig :: FilePath -- ^ The @dist@ directory path. - -> LocalBuildInfo -- ^ The 'LocalBuildInfo' to write. - -> NoCallStackIO () -writePersistBuildConfig distPref lbi = do - createDirectoryIfMissing False distPref - writeFileAtomic (localBuildInfoFile distPref) $ - BLC8.unlines [showHeader pkgId, encode lbi] - where - pkgId = localPackage lbi - --- | Identifier of the current Cabal package. -currentCabalId :: PackageIdentifier -currentCabalId = PackageIdentifier (mkPackageName "Cabal") cabalVersion - --- | Identifier of the current compiler package. -currentCompilerId :: PackageIdentifier -currentCompilerId = PackageIdentifier (mkPackageName System.Info.compilerName) - (mkVersion' System.Info.compilerVersion) - --- | Parse the @setup-config@ file header, returning the package identifiers --- for Cabal and the compiler. -parseHeader :: ByteString -- ^ The file contents. - -> (PackageIdentifier, PackageIdentifier) -parseHeader header = case BLC8.words header of - ["Saved", "package", "config", "for", pkgId, "written", "by", cabalId, - "using", compId] -> - fromMaybe (throw ConfigStateFileBadHeader) $ do - _ <- simpleParse (BLC8.unpack pkgId) :: Maybe PackageIdentifier - cabalId' <- simpleParse (BLC8.unpack cabalId) - compId' <- simpleParse (BLC8.unpack compId) - return (cabalId', compId') - _ -> throw ConfigStateFileNoHeader - --- | Generate the @setup-config@ file header. -showHeader :: PackageIdentifier -- ^ The processed package. - -> ByteString -showHeader pkgId = BLC8.unwords - [ "Saved", "package", "config", "for" - , BLC8.pack $ display pkgId - , "written", "by" - , BLC8.pack $ display currentCabalId - , "using" - , BLC8.pack $ display currentCompilerId - ] - --- | Check that localBuildInfoFile is up-to-date with respect to the --- .cabal file. -checkPersistBuildConfigOutdated :: FilePath -> FilePath -> NoCallStackIO Bool -checkPersistBuildConfigOutdated distPref pkg_descr_file = - pkg_descr_file `moreRecentFile` localBuildInfoFile distPref - --- | Get the path of @dist\/setup-config@. -localBuildInfoFile :: FilePath -- ^ The @dist@ directory path. - -> FilePath -localBuildInfoFile distPref = distPref "setup-config" - --- ----------------------------------------------------------------------------- --- * Configuration --- ----------------------------------------------------------------------------- - --- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken --- from (in order of highest to lowest preference) the override prefix, the --- \"CABAL_BUILDDIR\" environment variable, or the default prefix. -findDistPref :: FilePath -- ^ default \"dist\" prefix - -> Setup.Flag FilePath -- ^ override \"dist\" prefix - -> NoCallStackIO FilePath -findDistPref defDistPref overrideDistPref = do - envDistPref <- liftM parseEnvDistPref (lookupEnv "CABAL_BUILDDIR") - return $ fromFlagOrDefault defDistPref (mappend envDistPref overrideDistPref) - where - parseEnvDistPref env = - case env of - Just distPref | not (null distPref) -> toFlag distPref - _ -> NoFlag - --- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken --- from (in order of highest to lowest preference) the override prefix, the --- \"CABAL_BUILDDIR\" environment variable, or 'defaultDistPref' is used. Call --- this function to resolve a @*DistPref@ flag whenever it is not known to be --- set. (The @*DistPref@ flags are always set to a definite value before --- invoking 'UserHooks'.) -findDistPrefOrDefault :: Setup.Flag FilePath -- ^ override \"dist\" prefix - -> NoCallStackIO FilePath -findDistPrefOrDefault = findDistPref defaultDistPref - --- |Perform the \"@.\/setup configure@\" action. --- Returns the @.setup-config@ file. -configure :: (GenericPackageDescription, HookedBuildInfo) - -> ConfigFlags -> IO LocalBuildInfo -configure (pkg_descr0, pbi) cfg = do - -- Determine the component we are configuring, if a user specified - -- one on the command line. We use a fake, flattened version of - -- the package since at this point, we're not really sure what - -- components we *can* configure. @Nothing@ means that we should - -- configure everything (the old behavior). - (mb_cname :: Maybe ComponentName) <- do - let flat_pkg_descr = flattenPackageDescription pkg_descr0 - targets <- readBuildTargets verbosity flat_pkg_descr (configArgs cfg) - -- TODO: bleat if you use the module/file syntax - let targets' = [ cname | BuildTargetComponent cname <- targets ] - case targets' of - _ | null (configArgs cfg) -> return Nothing - [cname] -> return (Just cname) - [] -> die' verbosity "No valid component targets found" - _ -> die' verbosity "Can only configure either single component or all of them" - - let use_external_internal_deps = isJust mb_cname - case mb_cname of - Nothing -> setupMessage verbosity "Configuring" (packageId pkg_descr0) - Just cname -> setupMessage' verbosity "Configuring" (packageId pkg_descr0) - cname (Just (configInstantiateWith cfg)) - - -- configCID is only valid for per-component configure - when (isJust (flagToMaybe (configCID cfg)) && isNothing mb_cname) $ - die' verbosity "--cid is only supported for per-component configure" - - checkDeprecatedFlags verbosity cfg - checkExactConfiguration verbosity pkg_descr0 cfg - - -- Where to build the package - let buildDir :: FilePath -- e.g. dist/build - -- fromFlag OK due to Distribution.Simple calling - -- findDistPrefOrDefault to fill it in - buildDir = fromFlag (configDistPref cfg) "build" - createDirectoryIfMissingVerbose (lessVerbose verbosity) True buildDir - - -- What package database(s) to use - let packageDbs :: PackageDBStack - packageDbs - = interpretPackageDbFlags - (fromFlag (configUserInstall cfg)) - (configPackageDBs cfg) - - -- comp: the compiler we're building with - -- compPlatform: the platform we're building for - -- programDb: location and args of all programs we're - -- building with - (comp :: Compiler, - compPlatform :: Platform, - programDb :: ProgramDb) - <- configCompilerEx - (flagToMaybe (configHcFlavor cfg)) - (flagToMaybe (configHcPath cfg)) - (flagToMaybe (configHcPkg cfg)) - (mkProgramDb cfg (configPrograms cfg)) - (lessVerbose verbosity) - - -- The InstalledPackageIndex of all installed packages - installedPackageSet :: InstalledPackageIndex - <- getInstalledPackages (lessVerbose verbosity) comp - packageDbs programDb - - -- The set of package names which are "shadowed" by internal - -- packages, and which component they map to - let internalPackageSet :: Map PackageName (Maybe UnqualComponentName) - internalPackageSet = getInternalPackages pkg_descr0 - - -- Make a data structure describing what components are enabled. - let enabled :: ComponentRequestedSpec - enabled = case mb_cname of - Just cname -> OneComponentRequestedSpec cname - Nothing -> ComponentRequestedSpec - -- The flag name (@--enable-tests@) is a - -- little bit of a misnomer, because - -- just passing this flag won't - -- "enable", in our internal - -- nomenclature; it's just a request; a - -- @buildable: False@ might make it - -- not possible to enable. - { testsRequested = fromFlag (configTests cfg) - , benchmarksRequested = - fromFlag (configBenchmarks cfg) } - -- Some sanity checks related to enabling components. - when (isJust mb_cname - && (fromFlag (configTests cfg) || fromFlag (configBenchmarks cfg))) $ - die' verbosity $ "--enable-tests/--enable-benchmarks are incompatible with" ++ - " explicitly specifying a component to configure." - - -- allConstraints: The set of all 'Dependency's we have. Used ONLY - -- to 'configureFinalizedPackage'. - -- requiredDepsMap: A map from 'PackageName' to the specifically - -- required 'InstalledPackageInfo', due to --dependency - -- - -- NB: These constraints are to be applied to ALL components of - -- a package. Thus, it's not an error if allConstraints contains - -- more constraints than is necessary for a component (another - -- component might need it.) - -- - -- NB: The fact that we bundle all the constraints together means - -- that is not possible to configure a test-suite to use one - -- version of a dependency, and the executable to use another. - (allConstraints :: [Dependency], - requiredDepsMap :: Map PackageName InstalledPackageInfo) - <- either (die' verbosity) return $ - combinedConstraints (configConstraints cfg) - (configDependencies cfg) - installedPackageSet - - -- pkg_descr: The resolved package description, that does not contain any - -- conditionals, because we have have an assignment for - -- every flag, either picking them ourselves using a - -- simple naive algorithm, or having them be passed to - -- us by 'configConfigurationsFlags') - -- flags: The 'FlagAssignment' that the conditionals were - -- resolved with. - -- - -- NB: Why doesn't finalizing a package also tell us what the - -- dependencies are (e.g. when we run the naive algorithm, - -- we are checking if dependencies are satisfiable)? The - -- primary reason is that we may NOT have done any solving: - -- if the flags are all chosen for us, this step is a simple - -- matter of flattening according to that assignment. It's - -- cleaner to then configure the dependencies afterwards. - (pkg_descr :: PackageDescription, - flags :: FlagAssignment) - <- configureFinalizedPackage verbosity cfg enabled - allConstraints - (dependencySatisfiable - use_external_internal_deps - (fromFlagOrDefault False (configExactConfiguration cfg)) - (packageName pkg_descr0) - installedPackageSet - internalPackageSet - requiredDepsMap) - comp - compPlatform - pkg_descr0 - - debug verbosity $ "Finalized package description:\n" - ++ showPackageDescription pkg_descr - - let cabalFileDir = maybe "." takeDirectory $ - flagToMaybe (configCabalFilePath cfg) - checkCompilerProblems verbosity comp pkg_descr enabled - checkPackageProblems verbosity cabalFileDir pkg_descr0 - (updatePackageDescription pbi pkg_descr) - - -- The list of 'InstalledPackageInfo' recording the selected - -- dependencies on external packages. - -- - -- Invariant: For any package name, there is at most one package - -- in externalPackageDeps which has that name. - -- - -- NB: The dependency selection is global over ALL components - -- in the package (similar to how allConstraints and - -- requiredDepsMap are global over all components). In particular, - -- if *any* component (post-flag resolution) has an unsatisfiable - -- dependency, we will fail. This can sometimes be undesirable - -- for users, see #1786 (benchmark conflicts with executable), - -- - -- In the presence of Backpack, these package dependencies are - -- NOT complete: they only ever include the INDEFINITE - -- dependencies. After we apply an instantiation, we'll get - -- definite references which constitute extra dependencies. - -- (Why not have cabal-install pass these in explicitly? - -- For one it's deterministic; for two, we need to associate - -- them with renamings which would require a far more complicated - -- input scheme than what we have today.) - externalPkgDeps :: [PreExistingComponent] - <- configureDependencies - verbosity - use_external_internal_deps - internalPackageSet - installedPackageSet - requiredDepsMap - pkg_descr - enabled - - -- Compute installation directory templates, based on user - -- configuration. - -- - -- TODO: Move this into a helper function. - defaultDirs :: InstallDirTemplates - <- defaultInstallDirs' use_external_internal_deps - (compilerFlavor comp) - (fromFlag (configUserInstall cfg)) - (hasLibs pkg_descr) - let installDirs :: InstallDirTemplates - installDirs = combineInstallDirs fromFlagOrDefault - defaultDirs (configInstallDirs cfg) - - -- Check languages and extensions - -- TODO: Move this into a helper function. - let langlist = nub $ catMaybes $ map defaultLanguage - (enabledBuildInfos pkg_descr enabled) - let langs = unsupportedLanguages comp langlist - when (not (null langs)) $ - die' verbosity $ "The package " ++ display (packageId pkg_descr0) - ++ " requires the following languages which are not " - ++ "supported by " ++ display (compilerId comp) ++ ": " - ++ intercalate ", " (map display langs) - let extlist = nub $ concatMap allExtensions (enabledBuildInfos pkg_descr enabled) - let exts = unsupportedExtensions comp extlist - when (not (null exts)) $ - die' verbosity $ "The package " ++ display (packageId pkg_descr0) - ++ " requires the following language extensions which are not " - ++ "supported by " ++ display (compilerId comp) ++ ": " - ++ intercalate ", " (map display exts) - - -- Check foreign library build requirements - let flibs = [flib | CFLib flib <- enabledComponents pkg_descr enabled] - let unsupportedFLibs = unsupportedForeignLibs comp compPlatform flibs - when (not (null unsupportedFLibs)) $ - die' verbosity $ "Cannot build some foreign libraries: " - ++ intercalate "," unsupportedFLibs - - -- Configure certain external build tools, see below for which ones. - let requiredBuildTools = do - bi <- enabledBuildInfos pkg_descr enabled - -- First, we collect any tool dep that we know is external. This is, - -- in practice: - -- - -- 1. `build-tools` entries on the whitelist - -- - -- 2. `build-tool-depends` that aren't from the current package. - let externBuildToolDeps = - [ LegacyExeDependency (unUnqualComponentName eName) versionRange - | buildTool@(ExeDependency _ eName versionRange) - <- getAllToolDependencies pkg_descr bi - , not $ isInternal pkg_descr buildTool ] - -- Second, we collect any build-tools entry we don't know how to - -- desugar. We'll never have any idea how to build them, so we just - -- hope they are already on the PATH. - let unknownBuildTools = - [ buildTool - | buildTool <- buildTools bi - , Nothing == desugarBuildTool pkg_descr buildTool ] - externBuildToolDeps ++ unknownBuildTools - - programDb' <- - configureAllKnownPrograms (lessVerbose verbosity) programDb - >>= configureRequiredPrograms verbosity requiredBuildTools - - (pkg_descr', programDb'') <- - configurePkgconfigPackages verbosity pkg_descr programDb' enabled - - -- Compute internal component graph - -- - -- The general idea is that we take a look at all the source level - -- components (which may build-depends on each other) and form a graph. - -- From there, we build a ComponentLocalBuildInfo for each of the - -- components, which lets us actually build each component. - -- internalPackageSet - -- use_external_internal_deps - (buildComponents :: [ComponentLocalBuildInfo], - packageDependsIndex :: InstalledPackageIndex) <- - runLogProgress verbosity $ configureComponentLocalBuildInfos - verbosity - use_external_internal_deps - enabled - (fromFlagOrDefault False (configDeterministic cfg)) - (configIPID cfg) - (configCID cfg) - pkg_descr - externalPkgDeps - (configConfigurationsFlags cfg) - (configInstantiateWith cfg) - installedPackageSet - comp - - -- Decide if we're going to compile with split sections. - split_sections :: Bool <- - if not (fromFlag $ configSplitSections cfg) - then return False - else case compilerFlavor comp of - GHC | compilerVersion comp >= mkVersion [8,0] - -> return True - GHCJS - -> return True - _ -> do warn verbosity - ("this compiler does not support " ++ - "--enable-split-sections; ignoring") - return False - - -- Decide if we're going to compile with split objects. - split_objs :: Bool <- - if not (fromFlag $ configSplitObjs cfg) - then return False - else case compilerFlavor comp of - _ | split_sections - -> do warn verbosity - ("--enable-split-sections and " ++ - "--enable-split-objs are mutually" ++ - "exclusive; ignoring the latter") - return False - GHC - -> return True - GHCJS - -> return True - _ -> do warn verbosity - ("this compiler does not support " ++ - "--enable-split-objs; ignoring") - return False - - let ghciLibByDefault = - case compilerId comp of - CompilerId GHC _ -> - -- If ghc is non-dynamic, then ghci needs object files, - -- so we build one by default. - -- - -- Technically, archive files should be sufficient for ghci, - -- but because of GHC bug #8942, it has never been safe to - -- rely on them. By the time that bug was fixed, ghci had - -- been changed to read shared libraries instead of archive - -- files (see next code block). - not (GHC.isDynamic comp) - CompilerId GHCJS _ -> - not (GHCJS.isDynamic comp) - _ -> False - - let sharedLibsByDefault - | fromFlag (configDynExe cfg) = - -- build a shared library if dynamically-linked - -- executables are requested - True - | otherwise = case compilerId comp of - CompilerId GHC _ -> - -- if ghc is dynamic, then ghci needs a shared - -- library, so we build one by default. - GHC.isDynamic comp - CompilerId GHCJS _ -> - GHCJS.isDynamic comp - _ -> False - withSharedLib_ = - -- build shared libraries if required by GHC or by the - -- executable linking mode, but allow the user to force - -- building only static library archives with - -- --disable-shared. - fromFlagOrDefault sharedLibsByDefault $ configSharedLib cfg - - withStaticLib_ = - -- build a static library (all dependent libraries rolled - -- into a huge .a archive) via GHCs -staticlib flag. - fromFlagOrDefault False $ configStaticLib cfg - - withDynExe_ = fromFlag $ configDynExe cfg - when (withDynExe_ && not withSharedLib_) $ warn verbosity $ - "Executables will use dynamic linking, but a shared library " - ++ "is not being built. Linking will fail if any executables " - ++ "depend on the library." - - setProfLBI <- configureProfiling verbosity cfg comp - - setCoverageLBI <- configureCoverage verbosity cfg comp - - let reloc = fromFlagOrDefault False $ configRelocatable cfg - - let buildComponentsMap = - foldl' (\m clbi -> Map.insertWith (++) - (componentLocalName clbi) [clbi] m) - Map.empty buildComponents - - let lbi = (setCoverageLBI . setProfLBI) - LocalBuildInfo { - configFlags = cfg, - flagAssignment = flags, - componentEnabledSpec = enabled, - extraConfigArgs = [], -- Currently configure does not - -- take extra args, but if it - -- did they would go here. - installDirTemplates = installDirs, - compiler = comp, - hostPlatform = compPlatform, - buildDir = buildDir, - cabalFilePath = flagToMaybe (configCabalFilePath cfg), - componentGraph = Graph.fromDistinctList buildComponents, - componentNameMap = buildComponentsMap, - installedPkgs = packageDependsIndex, - pkgDescrFile = Nothing, - localPkgDescr = pkg_descr', - withPrograms = programDb'', - withVanillaLib = fromFlag $ configVanillaLib cfg, - withSharedLib = withSharedLib_, - withStaticLib = withStaticLib_, - withDynExe = withDynExe_, - withProfLib = False, - withProfLibDetail = ProfDetailNone, - withProfExe = False, - withProfExeDetail = ProfDetailNone, - withOptimization = fromFlag $ configOptimization cfg, - withDebugInfo = fromFlag $ configDebugInfo cfg, - withGHCiLib = fromFlagOrDefault ghciLibByDefault $ - configGHCiLib cfg, - splitSections = split_sections, - splitObjs = split_objs, - stripExes = fromFlag $ configStripExes cfg, - stripLibs = fromFlag $ configStripLibs cfg, - exeCoverage = False, - libCoverage = False, - withPackageDB = packageDbs, - progPrefix = fromFlag $ configProgPrefix cfg, - progSuffix = fromFlag $ configProgSuffix cfg, - relocatable = reloc - } - - when reloc (checkRelocatable verbosity pkg_descr lbi) - - -- TODO: This is not entirely correct, because the dirs may vary - -- across libraries/executables - let dirs = absoluteInstallDirs pkg_descr lbi NoCopyDest - relative = prefixRelativeInstallDirs (packageId pkg_descr) lbi - - -- PKGROOT: allowing ${pkgroot} to be passed as --prefix to - -- cabal configure, is only a hidden option. It allows packages - -- to be relocatable with their package database. This however - -- breaks when the Paths_* or other includes are used that - -- contain hard coded paths. This is still an open TODO. - -- - -- Allowing ${pkgroot} here, however requires less custom hooks - -- in scripts that *really* want ${pkgroot}. See haskell/cabal/#4872 - unless (isAbsolute (prefix dirs) - || "${pkgroot}" `isPrefixOf` prefix dirs) $ die' verbosity $ - "expected an absolute directory name for --prefix: " ++ prefix dirs - - when ("${pkgroot}" `isPrefixOf` prefix dirs) $ - warn verbosity $ "Using ${pkgroot} in prefix " ++ prefix dirs - ++ " will not work if you rely on the Path_* module " - ++ " or other hard coded paths. Cabal does not yet " - ++ " support fully relocatable builds! " - ++ " See #462 #2302 #2994 #3305 #3473 #3586 #3909 #4097 #4291 #4872" - - info verbosity $ "Using " ++ display currentCabalId - ++ " compiled by " ++ display currentCompilerId - info verbosity $ "Using compiler: " ++ showCompilerId comp - info verbosity $ "Using install prefix: " ++ prefix dirs - - let dirinfo name dir isPrefixRelative = - info verbosity $ name ++ " installed in: " ++ dir ++ relNote - where relNote = case buildOS of - Windows | not (hasLibs pkg_descr) - && isNothing isPrefixRelative - -> " (fixed location)" - _ -> "" - - dirinfo "Executables" (bindir dirs) (bindir relative) - dirinfo "Libraries" (libdir dirs) (libdir relative) - dirinfo "Dynamic Libraries" (dynlibdir dirs) (dynlibdir relative) - dirinfo "Private executables" (libexecdir dirs) (libexecdir relative) - dirinfo "Data files" (datadir dirs) (datadir relative) - dirinfo "Documentation" (docdir dirs) (docdir relative) - dirinfo "Configuration files" (sysconfdir dirs) (sysconfdir relative) - - sequence_ [ reportProgram verbosity prog configuredProg - | (prog, configuredProg) <- knownPrograms programDb'' ] - - return lbi - - where - verbosity = fromFlag (configVerbosity cfg) - -mkProgramDb :: ConfigFlags -> ProgramDb -> ProgramDb -mkProgramDb cfg initialProgramDb = programDb - where - programDb = userSpecifyArgss (configProgramArgs cfg) - . userSpecifyPaths (configProgramPaths cfg) - . setProgramSearchPath searchpath - $ initialProgramDb - searchpath = getProgramSearchPath initialProgramDb - ++ map ProgramSearchPathDir - (fromNubList $ configProgramPathExtra cfg) - --- ----------------------------------------------------------------------------- --- Helper functions for configure - --- | Check if the user used any deprecated flags. -checkDeprecatedFlags :: Verbosity -> ConfigFlags -> IO () -checkDeprecatedFlags verbosity cfg = do - unless (configProfExe cfg == NoFlag) $ do - let enable | fromFlag (configProfExe cfg) = "enable" - | otherwise = "disable" - warn verbosity - ("The flag --" ++ enable ++ "-executable-profiling is deprecated. " - ++ "Please use --" ++ enable ++ "-profiling instead.") - - unless (configLibCoverage cfg == NoFlag) $ do - let enable | fromFlag (configLibCoverage cfg) = "enable" - | otherwise = "disable" - warn verbosity - ("The flag --" ++ enable ++ "-library-coverage is deprecated. " - ++ "Please use --" ++ enable ++ "-coverage instead.") - --- | Sanity check: if '--exact-configuration' was given, ensure that the --- complete flag assignment was specified on the command line. -checkExactConfiguration :: Verbosity -> GenericPackageDescription -> ConfigFlags -> IO () -checkExactConfiguration verbosity pkg_descr0 cfg = - when (fromFlagOrDefault False (configExactConfiguration cfg)) $ do - let cmdlineFlags = map fst (unFlagAssignment (configConfigurationsFlags cfg)) - allFlags = map flagName . genPackageFlags $ pkg_descr0 - diffFlags = allFlags \\ cmdlineFlags - when (not . null $ diffFlags) $ - die' verbosity $ "'--exact-configuration' was given, " - ++ "but the following flags were not specified: " - ++ intercalate ", " (map show diffFlags) - --- | Create a PackageIndex that makes *any libraries that might be* --- defined internally to this package look like installed packages, in --- case an executable should refer to any of them as dependencies. --- --- It must be *any libraries that might be* defined rather than the --- actual definitions, because these depend on conditionals in the .cabal --- file, and we haven't resolved them yet. finalizePD --- does the resolution of conditionals, and it takes internalPackageSet --- as part of its input. -getInternalPackages :: GenericPackageDescription - -> Map PackageName (Maybe UnqualComponentName) -getInternalPackages pkg_descr0 = - -- TODO: some day, executables will be fair game here too! - let pkg_descr = flattenPackageDescription pkg_descr0 - f lib = case libName lib of - Nothing -> (packageName pkg_descr, Nothing) - Just n' -> (unqualComponentNameToPackageName n', Just n') - in Map.fromList (map f (allLibraries pkg_descr)) - --- | Returns true if a dependency is satisfiable. This function may --- report a dependency satisfiable even when it is not, but not vice --- versa. This is to be passed to finalizePD. -dependencySatisfiable - :: Bool -- ^ use external internal deps? - -> Bool -- ^ exact configuration? - -> PackageName - -> InstalledPackageIndex -- ^ installed set - -> Map PackageName (Maybe UnqualComponentName) -- ^ internal set - -> Map PackageName InstalledPackageInfo -- ^ required dependencies - -> (Dependency -> Bool) -dependencySatisfiable - use_external_internal_deps - exact_config pn installedPackageSet internalPackageSet requiredDepsMap - d@(Dependency depName vr) - - | exact_config - -- When we're given '--exact-configuration', we assume that all - -- dependencies and flags are exactly specified on the command - -- line. Thus we only consult the 'requiredDepsMap'. Note that - -- we're not doing the version range check, so if there's some - -- dependency that wasn't specified on the command line, - -- 'finalizePD' will fail. - -- TODO: mention '--exact-configuration' in the error message - -- when this fails? - = if isInternalDep && not use_external_internal_deps - -- Except for internal deps, when we're NOT per-component mode; - -- those are just True. - then True - else depName `Map.member` requiredDepsMap - - | isInternalDep - = if use_external_internal_deps - -- When we are doing per-component configure, we now need to - -- test if the internal dependency is in the index. This has - -- DIFFERENT semantics from normal dependency satisfiability. - then internalDepSatisfiable - -- If a 'PackageName' is defined by an internal component, the dep is - -- satisfiable (we're going to build it ourselves) - else True - - | otherwise - = depSatisfiable - - where - isInternalDep = Map.member depName internalPackageSet - - depSatisfiable = - not . null $ PackageIndex.lookupDependency installedPackageSet d - - internalDepSatisfiable = - not . null $ PackageIndex.lookupInternalDependency - installedPackageSet (Dependency pn vr) cn - where - cn | pn == depName - = Nothing - | otherwise - -- Reinterpret the "package name" as an unqualified component - -- name - = Just (mkUnqualComponentName (unPackageName depName)) - --- | Finalize a generic package description. The workhorse is --- 'finalizePD' but there's a bit of other nattering --- about necessary. --- --- TODO: what exactly is the business with @flaggedTests@ and --- @flaggedBenchmarks@? -configureFinalizedPackage - :: Verbosity - -> ConfigFlags - -> ComponentRequestedSpec - -> [Dependency] - -> (Dependency -> Bool) -- ^ tests if a dependency is satisfiable. - -- Might say it's satisfiable even when not. - -> Compiler - -> Platform - -> GenericPackageDescription - -> IO (PackageDescription, FlagAssignment) -configureFinalizedPackage verbosity cfg enabled - allConstraints satisfies comp compPlatform pkg_descr0 = do - - (pkg_descr0', flags) <- - case finalizePD - (configConfigurationsFlags cfg) - enabled - satisfies - compPlatform - (compilerInfo comp) - allConstraints - pkg_descr0 - of Right r -> return r - Left missing -> - die' verbosity $ "Encountered missing dependencies:\n" - ++ (render . nest 4 . sep . punctuate comma - . map (disp . simplifyDependency) - $ missing) - - -- add extra include/lib dirs as specified in cfg - -- we do it here so that those get checked too - let pkg_descr = addExtraIncludeLibDirs pkg_descr0' - - unless (nullFlagAssignment flags) $ - info verbosity $ "Flags chosen: " - ++ intercalate ", " [ unFlagName fn ++ "=" ++ display value - | (fn, value) <- unFlagAssignment flags ] - - return (pkg_descr, flags) - where - addExtraIncludeLibDirs pkg_descr = - let extraBi = mempty { extraLibDirs = configExtraLibDirs cfg - , extraFrameworkDirs = configExtraFrameworkDirs cfg - , PD.includeDirs = configExtraIncludeDirs cfg} - modifyLib l = l{ libBuildInfo = libBuildInfo l - `mappend` extraBi } - modifyExecutable e = e{ buildInfo = buildInfo e - `mappend` extraBi} - modifyForeignLib f = f{ foreignLibBuildInfo = foreignLibBuildInfo f - `mappend` extraBi} - modifyTestsuite t = t{ testBuildInfo = testBuildInfo t - `mappend` extraBi} - modifyBenchmark b = b{ benchmarkBuildInfo = benchmarkBuildInfo b - `mappend` extraBi} - in pkg_descr - { library = modifyLib `fmap` library pkg_descr - , subLibraries = modifyLib `map` subLibraries pkg_descr - , executables = modifyExecutable `map` executables pkg_descr - , foreignLibs = modifyForeignLib `map` foreignLibs pkg_descr - , testSuites = modifyTestsuite `map` testSuites pkg_descr - , benchmarks = modifyBenchmark `map` benchmarks pkg_descr - } - --- | Check for use of Cabal features which require compiler support -checkCompilerProblems :: Verbosity -> Compiler -> PackageDescription -> ComponentRequestedSpec -> IO () -checkCompilerProblems verbosity comp pkg_descr enabled = do - unless (renamingPackageFlagsSupported comp || - all (all (isDefaultIncludeRenaming . mixinIncludeRenaming) . mixins) - (enabledBuildInfos pkg_descr enabled)) $ - die' verbosity $ "Your compiler does not support thinning and renaming on " - ++ "package flags. To use this feature you must use " - ++ "GHC 7.9 or later." - - when (any (not.null.PD.reexportedModules) (PD.allLibraries pkg_descr) - && not (reexportedModulesSupported comp)) $ - die' verbosity $ "Your compiler does not support module re-exports. To use " - ++ "this feature you must use GHC 7.9 or later." - - when (any (not.null.PD.signatures) (PD.allLibraries pkg_descr) - && not (backpackSupported comp)) $ - die' verbosity $ "Your compiler does not support Backpack. To use " - ++ "this feature you must use GHC 8.1 or later." - --- | Select dependencies for the package. -configureDependencies - :: Verbosity - -> UseExternalInternalDeps - -> Map PackageName (Maybe UnqualComponentName) -- ^ internal packages - -> InstalledPackageIndex -- ^ installed packages - -> Map PackageName InstalledPackageInfo -- ^ required deps - -> PackageDescription - -> ComponentRequestedSpec - -> IO [PreExistingComponent] -configureDependencies verbosity use_external_internal_deps - internalPackageSet installedPackageSet requiredDepsMap pkg_descr enableSpec = do - let failedDeps :: [FailedDependency] - allPkgDeps :: [ResolvedDependency] - (failedDeps, allPkgDeps) = partitionEithers - [ (\s -> (dep, s)) <$> status - | dep <- enabledBuildDepends pkg_descr enableSpec - , let status = selectDependency (package pkg_descr) - internalPackageSet installedPackageSet - requiredDepsMap use_external_internal_deps dep ] - - internalPkgDeps = [ pkgid - | (_, InternalDependency pkgid) <- allPkgDeps ] - -- NB: we have to SAVE the package name, because this is the only - -- way we can be able to resolve package names in the package - -- description. - externalPkgDeps = [ pec - | (_, ExternalDependency pec) <- allPkgDeps ] - - when (not (null internalPkgDeps) - && not (newPackageDepsBehaviour pkg_descr)) $ - die' verbosity $ "The field 'build-depends: " - ++ intercalate ", " (map (display . packageName) internalPkgDeps) - ++ "' refers to a library which is defined within the same " - ++ "package. To use this feature the package must specify at " - ++ "least 'cabal-version: >= 1.8'." - - reportFailedDependencies verbosity failedDeps - reportSelectedDependencies verbosity allPkgDeps - - return externalPkgDeps - --- | Select and apply coverage settings for the build based on the --- 'ConfigFlags' and 'Compiler'. -configureCoverage :: Verbosity -> ConfigFlags -> Compiler - -> IO (LocalBuildInfo -> LocalBuildInfo) -configureCoverage verbosity cfg comp = do - let tryExeCoverage = fromFlagOrDefault False (configCoverage cfg) - tryLibCoverage = fromFlagOrDefault tryExeCoverage - (mappend (configCoverage cfg) (configLibCoverage cfg)) - if coverageSupported comp - then do - let apply lbi = lbi { libCoverage = tryLibCoverage - , exeCoverage = tryExeCoverage - } - return apply - else do - let apply lbi = lbi { libCoverage = False - , exeCoverage = False - } - when (tryExeCoverage || tryLibCoverage) $ warn verbosity - ("The compiler " ++ showCompilerId comp ++ " does not support " - ++ "program coverage. Program coverage has been disabled.") - return apply - --- | Compute the effective value of the profiling flags --- @--enable-library-profiling@ and @--enable-executable-profiling@ --- from the specified 'ConfigFlags'. This may be useful for --- external Cabal tools which need to interact with Setup in --- a backwards-compatible way: the most predictable mechanism --- for enabling profiling across many legacy versions is to --- NOT use @--enable-profiling@ and use those two flags instead. --- --- Note that @--enable-executable-profiling@ also affects profiling --- of benchmarks and (non-detailed) test suites. -computeEffectiveProfiling :: ConfigFlags -> (Bool {- lib -}, Bool {- exe -}) -computeEffectiveProfiling cfg = - -- The --profiling flag sets the default for both libs and exes, - -- but can be overidden by --library-profiling, or the old deprecated - -- --executable-profiling flag. - -- - -- The --profiling-detail and --library-profiling-detail flags behave - -- similarly - let tryExeProfiling = fromFlagOrDefault False - (mappend (configProf cfg) (configProfExe cfg)) - tryLibProfiling = fromFlagOrDefault tryExeProfiling - (mappend (configProf cfg) (configProfLib cfg)) - in (tryLibProfiling, tryExeProfiling) - --- | Select and apply profiling settings for the build based on the --- 'ConfigFlags' and 'Compiler'. -configureProfiling :: Verbosity -> ConfigFlags -> Compiler - -> IO (LocalBuildInfo -> LocalBuildInfo) -configureProfiling verbosity cfg comp = do - let (tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling cfg - - tryExeProfileLevel = fromFlagOrDefault ProfDetailDefault - (configProfDetail cfg) - tryLibProfileLevel = fromFlagOrDefault ProfDetailDefault - (mappend - (configProfDetail cfg) - (configProfLibDetail cfg)) - - checkProfileLevel (ProfDetailOther other) = do - warn verbosity - ("Unknown profiling detail level '" ++ other - ++ "', using default.\nThe profiling detail levels are: " - ++ intercalate ", " - [ name | (name, _, _) <- knownProfDetailLevels ]) - return ProfDetailDefault - checkProfileLevel other = return other - - (exeProfWithoutLibProf, applyProfiling) <- - if profilingSupported comp - then do - exeLevel <- checkProfileLevel tryExeProfileLevel - libLevel <- checkProfileLevel tryLibProfileLevel - let apply lbi = lbi { withProfLib = tryLibProfiling - , withProfLibDetail = libLevel - , withProfExe = tryExeProfiling - , withProfExeDetail = exeLevel - } - return (tryExeProfiling && not tryLibProfiling, apply) - else do - let apply lbi = lbi { withProfLib = False - , withProfLibDetail = ProfDetailNone - , withProfExe = False - , withProfExeDetail = ProfDetailNone - } - when (tryExeProfiling || tryLibProfiling) $ warn verbosity - ("The compiler " ++ showCompilerId comp ++ " does not support " - ++ "profiling. Profiling has been disabled.") - return (False, apply) - - when exeProfWithoutLibProf $ warn verbosity - ("Executables will be built with profiling, but library " - ++ "profiling is disabled. Linking will fail if any executables " - ++ "depend on the library.") - - return applyProfiling - --- ----------------------------------------------------------------------------- --- Configuring package dependencies - -reportProgram :: Verbosity -> Program -> Maybe ConfiguredProgram -> IO () -reportProgram verbosity prog Nothing - = info verbosity $ "No " ++ programName prog ++ " found" -reportProgram verbosity prog (Just configuredProg) - = info verbosity $ "Using " ++ programName prog ++ version ++ location - where location = case programLocation configuredProg of - FoundOnSystem p -> " found on system at: " ++ p - UserSpecified p -> " given by user at: " ++ p - version = case programVersion configuredProg of - Nothing -> "" - Just v -> " version " ++ display v - -hackageUrl :: String -hackageUrl = "http://hackage.haskell.org/package/" - -type ResolvedDependency = (Dependency, DependencyResolution) - -data DependencyResolution - -- | An external dependency from the package database, OR an - -- internal dependency which we are getting from the package - -- database. - = ExternalDependency PreExistingComponent - -- | An internal dependency ('PackageId' should be a library name) - -- which we are going to have to build. (The - -- 'PackageId' here is a hack to get a modest amount of - -- polymorphism out of the 'Package' typeclass.) - | InternalDependency PackageId - -data FailedDependency = DependencyNotExists PackageName - | DependencyMissingInternal PackageName PackageName - | DependencyNoVersion Dependency - --- | Test for a package dependency and record the version we have installed. -selectDependency :: PackageId -- ^ Package id of current package - -> Map PackageName (Maybe UnqualComponentName) - -> InstalledPackageIndex -- ^ Installed packages - -> Map PackageName InstalledPackageInfo - -- ^ Packages for which we have been given specific deps to - -- use - -> UseExternalInternalDeps -- ^ Are we configuring a - -- single component? - -> Dependency - -> Either FailedDependency DependencyResolution -selectDependency pkgid internalIndex installedIndex requiredDepsMap - use_external_internal_deps - dep@(Dependency dep_pkgname vr) = - -- If the dependency specification matches anything in the internal package - -- index, then we prefer that match to anything in the second. - -- For example: - -- - -- Name: MyLibrary - -- Version: 0.1 - -- Library - -- .. - -- Executable my-exec - -- build-depends: MyLibrary - -- - -- We want "build-depends: MyLibrary" always to match the internal library - -- even if there is a newer installed library "MyLibrary-0.2". - case Map.lookup dep_pkgname internalIndex of - Just cname -> if use_external_internal_deps - then do_external (Just cname) - else do_internal - _ -> do_external Nothing - where - - -- It's an internal library, and we're not per-component build - do_internal = Right $ InternalDependency - $ PackageIdentifier dep_pkgname $ packageVersion pkgid - - -- We have to look it up externally - do_external is_internal = do - ipi <- case Map.lookup dep_pkgname requiredDepsMap of - -- If we know the exact pkg to use, then use it. - Just pkginstance -> Right pkginstance - -- Otherwise we just pick an arbitrary instance of the latest version. - Nothing -> - case is_internal of - Nothing -> do_external_external - Just mb_uqn -> do_external_internal mb_uqn - return $ ExternalDependency $ ipiToPreExistingComponent ipi - - -- It's an external package, normal situation - do_external_external = - case PackageIndex.lookupDependency installedIndex dep of - [] -> Left (DependencyNotExists dep_pkgname) - pkgs -> Right $ head $ snd $ last pkgs - - -- It's an internal library, being looked up externally - do_external_internal mb_uqn = - case PackageIndex.lookupInternalDependency installedIndex - (Dependency (packageName pkgid) vr) mb_uqn of - [] -> Left (DependencyMissingInternal dep_pkgname (packageName pkgid)) - pkgs -> Right $ head $ snd $ last pkgs - -reportSelectedDependencies :: Verbosity - -> [ResolvedDependency] -> IO () -reportSelectedDependencies verbosity deps = - info verbosity $ unlines - [ "Dependency " ++ display (simplifyDependency dep) - ++ ": using " ++ display pkgid - | (dep, resolution) <- deps - , let pkgid = case resolution of - ExternalDependency pkg' -> packageId pkg' - InternalDependency pkgid' -> pkgid' ] - -reportFailedDependencies :: Verbosity -> [FailedDependency] -> IO () -reportFailedDependencies _ [] = return () -reportFailedDependencies verbosity failed = - die' verbosity (intercalate "\n\n" (map reportFailedDependency failed)) - - where - reportFailedDependency (DependencyNotExists pkgname) = - "there is no version of " ++ display pkgname ++ " installed.\n" - ++ "Perhaps you need to download and install it from\n" - ++ hackageUrl ++ display pkgname ++ "?" - - reportFailedDependency (DependencyMissingInternal pkgname real_pkgname) = - "internal dependency " ++ display pkgname ++ " not installed.\n" - ++ "Perhaps you need to configure and install it first?\n" - ++ "(This library was defined by " ++ display real_pkgname ++ ")" - - reportFailedDependency (DependencyNoVersion dep) = - "cannot satisfy dependency " ++ display (simplifyDependency dep) ++ "\n" - --- | List all installed packages in the given package databases. --- Non-existent package databases do not cause errors, they just get skipped --- with a warning and treated as empty ones, since technically they do not --- contain any package. -getInstalledPackages :: Verbosity -> Compiler - -> PackageDBStack -- ^ The stack of package databases. - -> ProgramDb - -> IO InstalledPackageIndex -getInstalledPackages verbosity comp packageDBs progdb = do - when (null packageDBs) $ - die' verbosity $ "No package databases have been specified. If you use " - ++ "--package-db=clear, you must follow it with --package-db= " - ++ "with 'global', 'user' or a specific file." - - info verbosity "Reading installed packages..." - -- do not check empty packagedbs (ghc-pkg would error out) - packageDBs' <- filterM packageDBExists packageDBs - case compilerFlavor comp of - GHC -> GHC.getInstalledPackages verbosity comp packageDBs' progdb - GHCJS -> GHCJS.getInstalledPackages verbosity packageDBs' progdb - UHC -> UHC.getInstalledPackages verbosity comp packageDBs' progdb - HaskellSuite {} -> - HaskellSuite.getInstalledPackages verbosity packageDBs' progdb - flv -> die' verbosity $ "don't know how to find the installed packages for " - ++ display flv - where - packageDBExists (SpecificPackageDB path) = do - exists <- doesPathExist path - unless exists $ - warn verbosity $ "Package db " <> path <> " does not exist yet" - return exists - -- Checking the user and global package dbs is more complicated and needs - -- way more data. Also ghc-pkg won't error out unless the user/global - -- pkgdb is overridden with an empty one, so we just don't check for them. - packageDBExists UserPackageDB = pure True - packageDBExists GlobalPackageDB = pure True - --- | Like 'getInstalledPackages', but for a single package DB. --- --- NB: Why isn't this always a fall through to 'getInstalledPackages'? --- That is because 'getInstalledPackages' performs some sanity checks --- on the package database stack in question. However, when sandboxes --- are involved these sanity checks are not desirable. -getPackageDBContents :: Verbosity -> Compiler - -> PackageDB -> ProgramDb - -> IO InstalledPackageIndex -getPackageDBContents verbosity comp packageDB progdb = do - info verbosity "Reading installed packages..." - case compilerFlavor comp of - GHC -> GHC.getPackageDBContents verbosity packageDB progdb - GHCJS -> GHCJS.getPackageDBContents verbosity packageDB progdb - -- For other compilers, try to fall back on 'getInstalledPackages'. - _ -> getInstalledPackages verbosity comp [packageDB] progdb - - --- | A set of files (or directories) that can be monitored to detect when --- there might have been a change in the installed packages. --- -getInstalledPackagesMonitorFiles :: Verbosity -> Compiler - -> PackageDBStack - -> ProgramDb -> Platform - -> IO [FilePath] -getInstalledPackagesMonitorFiles verbosity comp packageDBs progdb platform = - case compilerFlavor comp of - GHC -> GHC.getInstalledPackagesMonitorFiles - verbosity platform progdb packageDBs - other -> do - warn verbosity $ "don't know how to find change monitoring files for " - ++ "the installed package databases for " ++ display other - return [] - --- | The user interface specifies the package dbs to use with a combination of --- @--global@, @--user@ and @--package-db=global|user|clear|$file@. --- This function combines the global/user flag and interprets the package-db --- flag into a single package db stack. --- -interpretPackageDbFlags :: Bool -> [Maybe PackageDB] -> PackageDBStack -interpretPackageDbFlags userInstall specificDBs = - extra initialStack specificDBs - where - initialStack | userInstall = [GlobalPackageDB, UserPackageDB] - | otherwise = [GlobalPackageDB] - - extra dbs' [] = dbs' - extra _ (Nothing:dbs) = extra [] dbs - extra dbs' (Just db:dbs) = extra (dbs' ++ [db]) dbs - --- We are given both --constraint="foo < 2.0" style constraints and also --- specific packages to pick via --dependency="foo=foo-2.0-177d5cdf20962d0581". --- --- When finalising the package we have to take into account the specific --- installed deps we've been given, and the finalise function expects --- constraints, so we have to translate these deps into version constraints. --- --- But after finalising we then have to make sure we pick the right specific --- deps in the end. So we still need to remember which installed packages to --- pick. -combinedConstraints :: [Dependency] -> - [(PackageName, ComponentId)] -> - InstalledPackageIndex -> - Either String ([Dependency], - Map PackageName InstalledPackageInfo) -combinedConstraints constraints dependencies installedPackages = do - - when (not (null badComponentIds)) $ - Left $ render $ text "The following package dependencies were requested" - $+$ nest 4 (dispDependencies badComponentIds) - $+$ text "however the given installed package instance does not exist." - - --TODO: we don't check that all dependencies are used! - - return (allConstraints, idConstraintMap) - - where - allConstraints :: [Dependency] - allConstraints = constraints - ++ [ thisPackageVersion (packageId pkg) - | (_, _, Just pkg) <- dependenciesPkgInfo ] - - idConstraintMap :: Map PackageName InstalledPackageInfo - idConstraintMap = Map.fromList - -- NB: do NOT use the packageName from - -- dependenciesPkgInfo! - [ (pn, pkg) - | (pn, _, Just pkg) <- dependenciesPkgInfo ] - - -- The dependencies along with the installed package info, if it exists - dependenciesPkgInfo :: [(PackageName, ComponentId, - Maybe InstalledPackageInfo)] - dependenciesPkgInfo = - [ (pkgname, cid, mpkg) - | (pkgname, cid) <- dependencies - , let mpkg = PackageIndex.lookupComponentId - installedPackages cid - ] - - -- If we looked up a package specified by an installed package id - -- (i.e. someone has written a hash) and didn't find it then it's - -- an error. - badComponentIds = - [ (pkgname, cid) - | (pkgname, cid, Nothing) <- dependenciesPkgInfo ] - - dispDependencies deps = - hsep [ text "--dependency=" - <<>> quotes (disp pkgname <<>> char '=' <<>> disp cid) - | (pkgname, cid) <- deps ] - --- ----------------------------------------------------------------------------- --- Configuring program dependencies - -configureRequiredPrograms :: Verbosity -> [LegacyExeDependency] -> ProgramDb - -> IO ProgramDb -configureRequiredPrograms verbosity deps progdb = - foldM (configureRequiredProgram verbosity) progdb deps - --- | Configure a required program, ensuring that it exists in the PATH --- (or where the user has specified the program must live) and making it --- available for use via the 'ProgramDb' interface. If the program is --- known (exists in the input 'ProgramDb'), we will make sure that the --- program matches the required version; otherwise we will accept --- any version of the program and assume that it is a simpleProgram. -configureRequiredProgram :: Verbosity -> ProgramDb -> LegacyExeDependency - -> IO ProgramDb -configureRequiredProgram verbosity progdb - (LegacyExeDependency progName verRange) = - case lookupKnownProgram progName progdb of - Nothing -> - -- Try to configure it as a 'simpleProgram' automatically - -- - -- There's a bit of a story behind this line. In old versions - -- of Cabal, there were only internal build-tools dependencies. So the - -- behavior in this case was: - -- - -- - If a build-tool dependency was internal, don't do - -- any checking. - -- - -- - If it was external, call 'configureRequiredProgram' to - -- "configure" the executable. In particular, if - -- the program was not "known" (present in 'ProgramDb'), - -- then we would just error. This was fine, because - -- the only way a program could be executed from 'ProgramDb' - -- is if some library code from Cabal actually called it, - -- and the pre-existing Cabal code only calls known - -- programs from 'defaultProgramDb', and so if it - -- is calling something else, you have a Custom setup - -- script, and in that case you are expected to register - -- the program you want to call in the ProgramDb. - -- - -- OK, so that was fine, until I (ezyang, in 2016) refactored - -- Cabal to support per-component builds. In this case, what - -- was previously an internal build-tool dependency now became - -- an external one, and now previously "internal" dependencies - -- are now external. But these are permitted to exist even - -- when they are not previously configured (something that - -- can only occur by a Custom script.) - -- - -- So, I decided, "Fine, let's just accept these in any - -- case." Thus this line. The alternative would have been to - -- somehow detect when a build-tools dependency was "internal" (by - -- looking at the unflattened package description) but this - -- would also be incompatible with future work to support - -- external executable dependencies: we definitely cannot - -- assume they will be preinitialized in the 'ProgramDb'. - configureProgram verbosity (simpleProgram progName) progdb - Just prog - -- requireProgramVersion always requires the program have a version - -- but if the user says "build-depends: foo" ie no version constraint - -- then we should not fail if we cannot discover the program version. - | verRange == anyVersion -> do - (_, progdb') <- requireProgram verbosity prog progdb - return progdb' - | otherwise -> do - (_, _, progdb') <- requireProgramVersion verbosity prog verRange progdb - return progdb' - --- ----------------------------------------------------------------------------- --- Configuring pkg-config package dependencies - -configurePkgconfigPackages :: Verbosity -> PackageDescription - -> ProgramDb -> ComponentRequestedSpec - -> IO (PackageDescription, ProgramDb) -configurePkgconfigPackages verbosity pkg_descr progdb enabled - | null allpkgs = return (pkg_descr, progdb) - | otherwise = do - (_, _, progdb') <- requireProgramVersion - (lessVerbose verbosity) pkgConfigProgram - (orLaterVersion $ mkVersion [0,9,0]) progdb - traverse_ requirePkg allpkgs - mlib' <- traverse addPkgConfigBILib (library pkg_descr) - libs' <- traverse addPkgConfigBILib (subLibraries pkg_descr) - exes' <- traverse addPkgConfigBIExe (executables pkg_descr) - tests' <- traverse addPkgConfigBITest (testSuites pkg_descr) - benches' <- traverse addPkgConfigBIBench (benchmarks pkg_descr) - let pkg_descr' = pkg_descr { library = mlib', - subLibraries = libs', executables = exes', - testSuites = tests', benchmarks = benches' } - return (pkg_descr', progdb') - - where - allpkgs = concatMap pkgconfigDepends (enabledBuildInfos pkg_descr enabled) - pkgconfig = getDbProgramOutput (lessVerbose verbosity) - pkgConfigProgram progdb - - requirePkg dep@(PkgconfigDependency pkgn range) = do - version <- pkgconfig ["--modversion", pkg] - `catchIO` (\_ -> die' verbosity notFound) - `catchExit` (\_ -> die' verbosity notFound) - case simpleParse version of - Nothing -> die' verbosity "parsing output of pkg-config --modversion failed" - Just v | not (withinRange v range) -> die' verbosity (badVersion v) - | otherwise -> info verbosity (depSatisfied v) - where - notFound = "The pkg-config package '" ++ pkg ++ "'" - ++ versionRequirement - ++ " is required but it could not be found." - badVersion v = "The pkg-config package '" ++ pkg ++ "'" - ++ versionRequirement - ++ " is required but the version installed on the" - ++ " system is version " ++ display v - depSatisfied v = "Dependency " ++ display dep - ++ ": using version " ++ display v - - versionRequirement - | isAnyVersion range = "" - | otherwise = " version " ++ display range - - pkg = unPkgconfigName pkgn - - -- Adds pkgconfig dependencies to the build info for a component - addPkgConfigBI compBI setCompBI comp = do - bi <- pkgconfigBuildInfo (pkgconfigDepends (compBI comp)) - return $ setCompBI comp (compBI comp `mappend` bi) - - -- Adds pkgconfig dependencies to the build info for a library - addPkgConfigBILib = addPkgConfigBI libBuildInfo $ - \lib bi -> lib { libBuildInfo = bi } - - -- Adds pkgconfig dependencies to the build info for an executable - addPkgConfigBIExe = addPkgConfigBI buildInfo $ - \exe bi -> exe { buildInfo = bi } - - -- Adds pkgconfig dependencies to the build info for a test suite - addPkgConfigBITest = addPkgConfigBI testBuildInfo $ - \test bi -> test { testBuildInfo = bi } - - -- Adds pkgconfig dependencies to the build info for a benchmark - addPkgConfigBIBench = addPkgConfigBI benchmarkBuildInfo $ - \bench bi -> bench { benchmarkBuildInfo = bi } - - pkgconfigBuildInfo :: [PkgconfigDependency] -> NoCallStackIO BuildInfo - pkgconfigBuildInfo [] = return mempty - pkgconfigBuildInfo pkgdeps = do - let pkgs = nub [ display pkg | PkgconfigDependency pkg _ <- pkgdeps ] - ccflags <- pkgconfig ("--cflags" : pkgs) - ldflags <- pkgconfig ("--libs" : pkgs) - return (ccLdOptionsBuildInfo (words ccflags) (words ldflags)) - --- | Makes a 'BuildInfo' from C compiler and linker flags. --- --- This can be used with the output from configuration programs like pkg-config --- and similar package-specific programs like mysql-config, freealut-config etc. --- For example: --- --- > ccflags <- getDbProgramOutput verbosity prog progdb ["--cflags"] --- > ldflags <- getDbProgramOutput verbosity prog progdb ["--libs"] --- > return (ccldOptionsBuildInfo (words ccflags) (words ldflags)) --- -ccLdOptionsBuildInfo :: [String] -> [String] -> BuildInfo -ccLdOptionsBuildInfo cflags ldflags = - let (includeDirs', cflags') = partition ("-I" `isPrefixOf`) cflags - (extraLibs', ldflags') = partition ("-l" `isPrefixOf`) ldflags - (extraLibDirs', ldflags'') = partition ("-L" `isPrefixOf`) ldflags' - in mempty { - PD.includeDirs = map (drop 2) includeDirs', - PD.extraLibs = map (drop 2) extraLibs', - PD.extraLibDirs = map (drop 2) extraLibDirs', - PD.ccOptions = cflags', - PD.ldOptions = ldflags'' - } - --- ----------------------------------------------------------------------------- --- Determining the compiler details - -configCompilerAuxEx :: ConfigFlags - -> IO (Compiler, Platform, ProgramDb) -configCompilerAuxEx cfg = configCompilerEx (flagToMaybe $ configHcFlavor cfg) - (flagToMaybe $ configHcPath cfg) - (flagToMaybe $ configHcPkg cfg) - programDb - (fromFlag (configVerbosity cfg)) - where - programDb = mkProgramDb cfg defaultProgramDb - -configCompilerEx :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath - -> ProgramDb -> Verbosity - -> IO (Compiler, Platform, ProgramDb) -configCompilerEx Nothing _ _ _ verbosity = die' verbosity "Unknown compiler" -configCompilerEx (Just hcFlavor) hcPath hcPkg progdb verbosity = do - (comp, maybePlatform, programDb) <- case hcFlavor of - GHC -> GHC.configure verbosity hcPath hcPkg progdb - GHCJS -> GHCJS.configure verbosity hcPath hcPkg progdb - UHC -> UHC.configure verbosity hcPath hcPkg progdb - HaskellSuite {} -> HaskellSuite.configure verbosity hcPath hcPkg progdb - _ -> die' verbosity "Unknown compiler" - return (comp, fromMaybe buildPlatform maybePlatform, programDb) - --- Ideally we would like to not have separate configCompiler* and --- configCompiler*Ex sets of functions, but there are many custom setup scripts --- in the wild that are using them, so the versions with old types are kept for --- backwards compatibility. Platform was added to the return triple in 1.18. - -{-# DEPRECATED configCompiler - "'configCompiler' is deprecated. Use 'configCompilerEx' instead." #-} -configCompiler :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath - -> ProgramDb -> Verbosity - -> IO (Compiler, ProgramDb) -configCompiler mFlavor hcPath hcPkg progdb verbosity = - fmap (\(a,_,b) -> (a,b)) $ configCompilerEx mFlavor hcPath hcPkg progdb verbosity - -{-# DEPRECATED configCompilerAux - "configCompilerAux is deprecated. Use 'configCompilerAuxEx' instead." #-} -configCompilerAux :: ConfigFlags - -> IO (Compiler, ProgramDb) -configCompilerAux = fmap (\(a,_,b) -> (a,b)) . configCompilerAuxEx - --- ----------------------------------------------------------------------------- --- Testing C lib and header dependencies - --- Try to build a test C program which includes every header and links every --- lib. If that fails, try to narrow it down by preprocessing (only) and linking --- with individual headers and libs. If none is the obvious culprit then give a --- generic error message. --- TODO: produce a log file from the compiler errors, if any. -checkForeignDeps :: PackageDescription -> LocalBuildInfo -> Verbosity -> IO () -checkForeignDeps pkg lbi verbosity = - ifBuildsWith allHeaders (commonCcArgs ++ makeLdArgs allLibs) -- I'm feeling - -- lucky - (return ()) - (do missingLibs <- findMissingLibs - missingHdr <- findOffendingHdr - explainErrors missingHdr missingLibs) - where - allHeaders = collectField PD.includes - allLibs = collectField PD.extraLibs - - ifBuildsWith headers args success failure = do - checkDuplicateHeaders - ok <- builds (makeProgram headers) args - if ok then success else failure - - -- Ensure that there is only one header with a given name - -- in either the generated (most likely by `configure`) - -- build directory (e.g. `dist/build`) or in the source directory. - -- - -- If it exists in both, we'll remove the one in the source - -- directory, as the generated should take precedence. - -- - -- C compilers like to prefer source local relative includes, - -- so the search paths provided to the compiler via -I are - -- ignored if the included file can be found relative to the - -- including file. As such we need to take drastic measures - -- and delete the offending file in the source directory. - checkDuplicateHeaders = do - let relIncDirs = filter (not . isAbsolute) (collectField PD.includeDirs) - isHeader = isSuffixOf ".h" - genHeaders <- forM relIncDirs $ \dir -> - fmap (dir ) . filter isHeader <$> listDirectory (buildDir lbi dir) - `catchIO` (\_ -> return []) - srcHeaders <- forM relIncDirs $ \dir -> - fmap (dir ) . filter isHeader <$> listDirectory (baseDir lbi dir) - `catchIO` (\_ -> return []) - let commonHeaders = concat genHeaders `intersect` concat srcHeaders - forM_ commonHeaders $ \hdr -> do - warn verbosity $ "Duplicate header found in " - ++ (buildDir lbi hdr) - ++ " and " - ++ (baseDir lbi hdr) - ++ "; removing " - ++ (baseDir lbi hdr) - removeFile (baseDir lbi hdr) - - findOffendingHdr = - ifBuildsWith allHeaders ccArgs - (return Nothing) - (go . tail . inits $ allHeaders) - where - go [] = return Nothing -- cannot happen - go (hdrs:hdrsInits) = - -- Try just preprocessing first - ifBuildsWith hdrs cppArgs - -- If that works, try compiling too - (ifBuildsWith hdrs ccArgs - (go hdrsInits) - (return . Just . Right . last $ hdrs)) - (return . Just . Left . last $ hdrs) - - cppArgs = "-E":commonCppArgs -- preprocess only - ccArgs = "-c":commonCcArgs -- don't try to link - - findMissingLibs = ifBuildsWith [] (makeLdArgs allLibs) - (return []) - (filterM (fmap not . libExists) allLibs) - - libExists lib = builds (makeProgram []) (makeLdArgs [lib]) - - baseDir lbi' = fromMaybe "." (takeDirectory <$> cabalFilePath lbi') - - commonCppArgs = platformDefines lbi - -- TODO: This is a massive hack, to work around the - -- fact that the test performed here should be - -- PER-component (c.f. the "I'm Feeling Lucky"; we - -- should NOT be glomming everything together.) - ++ [ "-I" ++ buildDir lbi "autogen" ] - -- `configure' may generate headers in the build directory - ++ [ "-I" ++ buildDir lbi dir | dir <- ordNub (collectField PD.includeDirs) - , not (isAbsolute dir)] - -- we might also reference headers from the packages directory. - ++ [ "-I" ++ baseDir lbi dir | dir <- ordNub (collectField PD.includeDirs) - , not (isAbsolute dir)] - ++ [ "-I" ++ dir | dir <- ordNub (collectField PD.includeDirs) - , isAbsolute dir] - ++ ["-I" ++ baseDir lbi] - ++ collectField PD.cppOptions - ++ collectField PD.ccOptions - ++ [ "-I" ++ dir - | dir <- ordNub [ dir - | dep <- deps - , dir <- Installed.includeDirs dep ] - -- dedupe include dirs of dependencies - -- to prevent quadratic blow-up - ] - ++ [ opt - | dep <- deps - , opt <- Installed.ccOptions dep ] - - commonCcArgs = commonCppArgs - ++ collectField PD.ccOptions - ++ [ opt - | dep <- deps - , opt <- Installed.ccOptions dep ] - - commonLdArgs = [ "-L" ++ dir | dir <- ordNub (collectField PD.extraLibDirs) ] - ++ collectField PD.ldOptions - ++ [ "-L" ++ dir - | dir <- ordNub [ dir - | dep <- deps - , dir <- Installed.libraryDirs dep ] - ] - --TODO: do we also need dependent packages' ld options? - makeLdArgs libs = [ "-l"++lib | lib <- libs ] ++ commonLdArgs - - makeProgram hdrs = unlines $ - [ "#include \"" ++ hdr ++ "\"" | hdr <- hdrs ] ++ - ["int main(int argc, char** argv) { return 0; }"] - - collectField f = concatMap f allBi - allBi = enabledBuildInfos pkg (componentEnabledSpec lbi) - deps = PackageIndex.topologicalOrder (installedPkgs lbi) - - builds program args = do - tempDir <- getTemporaryDirectory - withTempFile tempDir ".c" $ \cName cHnd -> - withTempFile tempDir "" $ \oNname oHnd -> do - hPutStrLn cHnd program - hClose cHnd - hClose oHnd - _ <- getDbProgramOutput verbosity - gccProgram (withPrograms lbi) (cName:"-o":oNname:args) - return True - `catchIO` (\_ -> return False) - `catchExit` (\_ -> return False) - - explainErrors Nothing [] = return () -- should be impossible! - explainErrors _ _ - | isNothing . lookupProgram gccProgram . withPrograms $ lbi - - = die' verbosity $ unlines - [ "No working gcc", - "This package depends on foreign library but we cannot " - ++ "find a working C compiler. If you have it in a " - ++ "non-standard location you can use the --with-gcc " - ++ "flag to specify it." ] - - explainErrors hdr libs = die' verbosity $ unlines $ - [ if plural - then "Missing dependencies on foreign libraries:" - else "Missing dependency on a foreign library:" - | missing ] - ++ case hdr of - Just (Left h) -> ["* Missing (or bad) header file: " ++ h ] - _ -> [] - ++ case libs of - [] -> [] - [lib] -> ["* Missing (or bad) C library: " ++ lib] - _ -> ["* Missing (or bad) C libraries: " ++ intercalate ", " libs] - ++ [if plural then messagePlural else messageSingular | missing] - ++ case hdr of - Just (Left _) -> [ headerCppMessage ] - Just (Right h) -> [ (if missing then "* " else "") - ++ "Bad header file: " ++ h - , headerCcMessage ] - _ -> [] - - where - plural = length libs >= 2 - -- Is there something missing? (as opposed to broken) - missing = not (null libs) - || case hdr of Just (Left _) -> True; _ -> False - - messageSingular = - "This problem can usually be solved by installing the system " - ++ "package that provides this library (you may need the " - ++ "\"-dev\" version). If the library is already installed " - ++ "but in a non-standard location then you can use the flags " - ++ "--extra-include-dirs= and --extra-lib-dirs= to specify " - ++ "where it is." - ++ "If the library file does exist, it may contain errors that " - ++ "are caught by the C compiler at the preprocessing stage. " - ++ "In this case you can re-run configure with the verbosity " - ++ "flag -v3 to see the error messages." - messagePlural = - "This problem can usually be solved by installing the system " - ++ "packages that provide these libraries (you may need the " - ++ "\"-dev\" versions). If the libraries are already installed " - ++ "but in a non-standard location then you can use the flags " - ++ "--extra-include-dirs= and --extra-lib-dirs= to specify " - ++ "where they are." - ++ "If the library files do exist, it may contain errors that " - ++ "are caught by the C compiler at the preprocessing stage. " - ++ "In this case you can re-run configure with the verbosity " - ++ "flag -v3 to see the error messages." - headerCppMessage = - "If the header file does exist, it may contain errors that " - ++ "are caught by the C compiler at the preprocessing stage. " - ++ "In this case you can re-run configure with the verbosity " - ++ "flag -v3 to see the error messages." - headerCcMessage = - "The header file contains a compile error. " - ++ "You can re-run configure with the verbosity flag " - ++ "-v3 to see the error messages from the C compiler." - --- | Output package check warnings and errors. Exit if any errors. -checkPackageProblems :: Verbosity - -> FilePath - -- ^ Path to the @.cabal@ file's directory - -> GenericPackageDescription - -> PackageDescription - -> IO () -checkPackageProblems verbosity dir gpkg pkg = do - ioChecks <- checkPackageFiles verbosity pkg dir - let pureChecks = checkPackage gpkg (Just pkg) - errors = [ e | PackageBuildImpossible e <- pureChecks ++ ioChecks ] - warnings = [ w | PackageBuildWarning w <- pureChecks ++ ioChecks ] - if null errors - then traverse_ (warn verbosity) warnings - else die' verbosity (intercalate "\n\n" errors) - --- | Preform checks if a relocatable build is allowed -checkRelocatable :: Verbosity - -> PackageDescription - -> LocalBuildInfo - -> IO () -checkRelocatable verbosity pkg lbi - = sequence_ [ checkOS - , checkCompiler - , packagePrefixRelative - , depsPrefixRelative - ] - where - -- Check if the OS support relocatable builds. - -- - -- If you add new OS' to this list, and your OS supports dynamic libraries - -- and RPATH, make sure you add your OS to RPATH-support list of: - -- Distribution.Simple.GHC.getRPaths - checkOS - = unless (os `elem` [ OSX, Linux ]) - $ die' verbosity $ "Operating system: " ++ display os ++ - ", does not support relocatable builds" - where - (Platform _ os) = hostPlatform lbi - - -- Check if the Compiler support relocatable builds - checkCompiler - = unless (compilerFlavor comp `elem` [ GHC ]) - $ die' verbosity $ "Compiler: " ++ show comp ++ - ", does not support relocatable builds" - where - comp = compiler lbi - - -- Check if all the install dirs are relative to same prefix - packagePrefixRelative - = unless (relativeInstallDirs installDirs) - $ die' verbosity $ "Installation directories are not prefix_relative:\n" ++ - show installDirs - where - -- NB: should be good enough to check this against the default - -- component ID, but if we wanted to be strictly correct we'd - -- check for each ComponentId. - installDirs = absoluteInstallDirs pkg lbi NoCopyDest - p = prefix installDirs - relativeInstallDirs (InstallDirs {..}) = - all isJust - (fmap (stripPrefix p) - [ bindir, libdir, dynlibdir, libexecdir, includedir, datadir - , docdir, mandir, htmldir, haddockdir, sysconfdir] ) - - -- Check if the library dirs of the dependencies that are in the package - -- database to which the package is installed are relative to the - -- prefix of the package - depsPrefixRelative = do - pkgr <- GHC.pkgRoot verbosity lbi (last (withPackageDB lbi)) - traverse_ (doCheck pkgr) ipkgs - where - doCheck pkgr ipkg - | maybe False (== pkgr) (Installed.pkgRoot ipkg) - = traverse_ (\l -> when (isNothing $ stripPrefix p l) (die' verbosity (msg l))) - (Installed.libraryDirs ipkg) - | otherwise - = return () - -- NB: should be good enough to check this against the default - -- component ID, but if we wanted to be strictly correct we'd - -- check for each ComponentId. - installDirs = absoluteInstallDirs pkg lbi NoCopyDest - p = prefix installDirs - ipkgs = PackageIndex.allPackages (installedPkgs lbi) - msg l = "Library directory of a dependency: " ++ show l ++ - "\nis not relative to the installation prefix:\n" ++ - show p - --- ----------------------------------------------------------------------------- --- Testing foreign library requirements - -unsupportedForeignLibs :: Compiler -> Platform -> [ForeignLib] -> [String] -unsupportedForeignLibs comp platform = - mapMaybe (checkForeignLibSupported comp platform) - -checkForeignLibSupported :: Compiler -> Platform -> ForeignLib -> Maybe String -checkForeignLibSupported comp platform flib = go (compilerFlavor comp) - where - go :: CompilerFlavor -> Maybe String - go GHC - | compilerVersion comp < mkVersion [7,8] = unsupported [ - "Building foreign libraires is only supported with GHC >= 7.8" - ] - | otherwise = goGhcPlatform platform - go _ = unsupported [ - "Building foreign libraries is currently only supported with ghc" - ] - - goGhcPlatform :: Platform -> Maybe String - goGhcPlatform (Platform X86_64 OSX ) = goGhcOsx (foreignLibType flib) - goGhcPlatform (Platform I386 Linux ) = goGhcLinux (foreignLibType flib) - goGhcPlatform (Platform X86_64 Linux ) = goGhcLinux (foreignLibType flib) - goGhcPlatform (Platform I386 Windows) = goGhcWindows (foreignLibType flib) - goGhcPlatform (Platform X86_64 Windows) = goGhcWindows (foreignLibType flib) - goGhcPlatform _ = unsupported [ - "Building foreign libraries is currently only supported on OSX, " - , "Linux and Windows" - ] - - goGhcOsx :: ForeignLibType -> Maybe String - goGhcOsx ForeignLibNativeShared - | not (null (foreignLibModDefFile flib)) = unsupported [ - "Module definition file not supported on OSX" - ] - | not (null (foreignLibVersionInfo flib)) = unsupported [ - "Foreign library versioning not currently supported on OSX" - ] - | otherwise = - Nothing - goGhcOsx _ = unsupported [ - "We can currently only build shared foreign libraries on OSX" - ] - - goGhcLinux :: ForeignLibType -> Maybe String - goGhcLinux ForeignLibNativeShared - | not (null (foreignLibModDefFile flib)) = unsupported [ - "Module definition file not supported on Linux" - ] - | not (null (foreignLibVersionInfo flib)) - && not (null (foreignLibVersionLinux flib)) = unsupported [ - "You must not specify both lib-version-info and lib-version-linux" - ] - | otherwise = - Nothing - goGhcLinux _ = unsupported [ - "We can currently only build shared foreign libraries on Linux" - ] - - goGhcWindows :: ForeignLibType -> Maybe String - goGhcWindows ForeignLibNativeShared - | not standalone = unsupported [ - "We can currently only build standalone libraries on Windows. Use\n" - , " if os(Windows)\n" - , " options: standalone\n" - , "in your foreign-library stanza." - ] - | not (null (foreignLibVersionInfo flib)) = unsupported [ - "Foreign library versioning not currently supported on Windows.\n" - , "You can specify module definition files in the mod-def-file field." - ] - | otherwise = - Nothing - goGhcWindows _ = unsupported [ - "We can currently only build shared foreign libraries on Windows" - ] - - standalone :: Bool - standalone = ForeignLibStandalone `elem` foreignLibOptions flib - - unsupported :: [String] -> Maybe String - unsupported = Just . concat diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Doctest.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Doctest.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Doctest.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Doctest.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,187 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Doctest --- Copyright : Moritz Angermann 2017 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module deals with the @doctest@ command. - --- Note: this module is modelled after Distribution.Simple.Haddock - -module Distribution.Simple.Doctest ( - doctest - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS - --- local -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.Program.GHC -import Distribution.Simple.Program -import Distribution.Simple.PreProcess -import Distribution.Simple.Setup -import Distribution.Simple.Build -import Distribution.Simple.LocalBuildInfo hiding (substPathTemplate) -import Distribution.Simple.Register (internalPackageDBPath) -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Version -import Distribution.Verbosity - --- ----------------------------------------------------------------------------- --- Types - --- | A record that represents the arguments to the doctest executable. -data DoctestArgs = DoctestArgs { - argTargets :: [FilePath] - -- ^ Modules to process - , argGhcOptions :: Flag (GhcOptions, Version) -} deriving (Show, Generic) - --- ----------------------------------------------------------------------------- --- Doctest support - -doctest :: PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> DoctestFlags - -> IO () -doctest pkg_descr lbi suffixes doctestFlags = do - let verbosity = flag doctestVerbosity - distPref = flag doctestDistPref - flag f = fromFlag $ f doctestFlags - tmpFileOpts = defaultTempFileOptions - lbi' = lbi { withPackageDB = withPackageDB lbi - ++ [SpecificPackageDB (internalPackageDBPath lbi distPref)] } - - (doctestProg, _version, _) <- - requireProgramVersion verbosity doctestProgram - (orLaterVersion (mkVersion [0,11,3])) (withPrograms lbi) - - withAllComponentsInBuildOrder pkg_descr lbi $ \component clbi -> do - componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity - preprocessComponent pkg_descr component lbi clbi False verbosity suffixes - - case component of - CLib lib -> do - withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ - \tmp -> do - inFiles <- map snd <$> getLibSourceFiles verbosity lbi lib clbi - args <- mkDoctestArgs verbosity tmp lbi' clbi inFiles (libBuildInfo lib) - runDoctest verbosity (compiler lbi) (hostPlatform lbi) doctestProg args - CExe exe -> do - withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ - \tmp -> do - inFiles <- map snd <$> getExeSourceFiles verbosity lbi exe clbi - args <- mkDoctestArgs verbosity tmp lbi' clbi inFiles (buildInfo exe) - runDoctest verbosity (compiler lbi) (hostPlatform lbi) doctestProg args - CFLib _ -> return () -- do not doctest foreign libs - CTest _ -> return () -- do not doctest tests - CBench _ -> return () -- do not doctest benchmarks - --- ----------------------------------------------------------------------------- --- Contributions to DoctestArgs (see also Haddock.hs for very similar code). - -componentGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions verbosity lbi bi clbi odir = - let f = case compilerFlavor (compiler lbi) of - GHC -> GHC.componentGhcOptions - GHCJS -> GHCJS.componentGhcOptions - _ -> error $ - "Distribution.Simple.Doctest.componentGhcOptions:" ++ - "doctest only supports GHC and GHCJS" - in f verbosity lbi bi clbi odir - -mkDoctestArgs :: Verbosity - -> FilePath - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> [FilePath] - -> BuildInfo - -> IO DoctestArgs -mkDoctestArgs verbosity tmp lbi clbi inFiles bi = do - let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) - { ghcOptOptimisation = mempty -- no optimizations when runnign doctest - -- disable -Wmissing-home-modules - , ghcOptWarnMissingHomeModules = mempty - -- clear out ghc-options: these are likely not meant for doctest. - -- If so, should be explicitly specified via doctest-ghc-options: again. - , ghcOptExtra = mempty - , ghcOptCabal = toFlag False - - , ghcOptObjDir = toFlag tmp - , ghcOptHiDir = toFlag tmp - , ghcOptStubDir = toFlag tmp } - sharedOpts = vanillaOpts - { ghcOptDynLinkMode = toFlag GhcDynamicOnly - , ghcOptFPic = toFlag True - , ghcOptHiSuffix = toFlag "dyn_hi" - , ghcOptObjSuffix = toFlag "dyn_o" - , ghcOptExtra = hcSharedOptions GHC bi} - opts <- if withVanillaLib lbi - then return vanillaOpts - else if withSharedLib lbi - then return sharedOpts - else die' verbosity $ "Must have vanilla or shared lirbaries " - ++ "enabled in order to run doctest" - ghcVersion <- maybe (die' verbosity "Compiler has no GHC version") - return - (compilerCompatVersion GHC (compiler lbi)) - return $ DoctestArgs - { argTargets = inFiles - , argGhcOptions = toFlag (opts, ghcVersion) - } - - --- ----------------------------------------------------------------------------- --- Call doctest with the specified arguments. -runDoctest :: Verbosity - -> Compiler - -> Platform - -> ConfiguredProgram - -> DoctestArgs - -> IO () -runDoctest verbosity comp platform doctestProg args = do - renderArgs verbosity comp platform args $ - \(flags, files) -> do - runProgram verbosity doctestProg (flags <> files) - -renderArgs :: Verbosity - -> Compiler - -> Platform - -> DoctestArgs - -> (([String],[FilePath]) -> IO a) - -> IO a -renderArgs _verbosity comp platform args k = do - k (flags, argTargets args) - where - flags :: [String] - flags = mconcat - [ pure "--no-magic" -- disable doctests automagic discovery heuristics - , pure "-fdiagnostics-color=never" -- disable ghc's color diagnostics. - , [ opt | (opts, _ghcVer) <- flagToList (argGhcOptions args) - , opt <- renderGhcOptions comp platform opts ] - ] - --- ------------------------------------------------------------------------------ --- Boilerplate Monoid instance. -instance Monoid DoctestArgs where - mempty = gmempty - mappend = (<>) - -instance Semigroup DoctestArgs where - (<>) = gmappend diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Flag.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Flag.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Flag.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Flag.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,124 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Flag --- Copyright : Isaac Jones 2003-2004 --- Duncan Coutts 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Defines the 'Flag' type and it's 'Monoid' instance, see --- --- for an explanation. --- --- Split off from "Distribution.Simple.Setup" to break import cycles. -module Distribution.Simple.Flag ( - Flag(..), - allFlags, - toFlag, - fromFlag, - fromFlagOrDefault, - flagToMaybe, - flagToList, - maybeToFlag, - BooleanFlag(..) ) where - -import Prelude () -import Distribution.Compat.Prelude hiding (get) -import Distribution.Compat.Stack - --- ------------------------------------------------------------ --- * Flag type --- ------------------------------------------------------------ - --- | All flags are monoids, they come in two flavours: --- --- 1. list flags eg --- --- > --ghc-option=foo --ghc-option=bar --- --- gives us all the values ["foo", "bar"] --- --- 2. singular value flags, eg: --- --- > --enable-foo --disable-foo --- --- gives us Just False --- So this Flag type is for the latter singular kind of flag. --- Its monoid instance gives us the behaviour where it starts out as --- 'NoFlag' and later flags override earlier ones. --- -data Flag a = Flag a | NoFlag deriving (Eq, Generic, Show, Read) - -instance Binary a => Binary (Flag a) - -instance Functor Flag where - fmap f (Flag x) = Flag (f x) - fmap _ NoFlag = NoFlag - -instance Applicative Flag where - (Flag x) <*> y = x <$> y - NoFlag <*> _ = NoFlag - pure = Flag - -instance Monoid (Flag a) where - mempty = NoFlag - mappend = (<>) - -instance Semigroup (Flag a) where - _ <> f@(Flag _) = f - f <> NoFlag = f - -instance Bounded a => Bounded (Flag a) where - minBound = toFlag minBound - maxBound = toFlag maxBound - -instance Enum a => Enum (Flag a) where - fromEnum = fromEnum . fromFlag - toEnum = toFlag . toEnum - enumFrom (Flag a) = map toFlag . enumFrom $ a - enumFrom _ = [] - enumFromThen (Flag a) (Flag b) = toFlag `map` enumFromThen a b - enumFromThen _ _ = [] - enumFromTo (Flag a) (Flag b) = toFlag `map` enumFromTo a b - enumFromTo _ _ = [] - enumFromThenTo (Flag a) (Flag b) (Flag c) = toFlag `map` enumFromThenTo a b c - enumFromThenTo _ _ _ = [] - -toFlag :: a -> Flag a -toFlag = Flag - -fromFlag :: WithCallStack (Flag a -> a) -fromFlag (Flag x) = x -fromFlag NoFlag = error "fromFlag NoFlag. Use fromFlagOrDefault" - -fromFlagOrDefault :: a -> Flag a -> a -fromFlagOrDefault _ (Flag x) = x -fromFlagOrDefault def NoFlag = def - -flagToMaybe :: Flag a -> Maybe a -flagToMaybe (Flag x) = Just x -flagToMaybe NoFlag = Nothing - -flagToList :: Flag a -> [a] -flagToList (Flag x) = [x] -flagToList NoFlag = [] - -allFlags :: [Flag Bool] -> Flag Bool -allFlags flags = if all (\f -> fromFlagOrDefault False f) flags - then Flag True - else NoFlag - -maybeToFlag :: Maybe a -> Flag a -maybeToFlag Nothing = NoFlag -maybeToFlag (Just x) = Flag x - --- | Types that represent boolean flags. -class BooleanFlag a where - asBool :: a -> Bool - -instance BooleanFlag Bool where - asBool = id diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHC/EnvironmentParser.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHC/EnvironmentParser.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHC/EnvironmentParser.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHC/EnvironmentParser.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} -module Distribution.Simple.GHC.EnvironmentParser - ( parseGhcEnvironmentFile, readGhcEnvironmentFile, ParseErrorExc(..) ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Compiler - ( PackageDB(..) ) -import Distribution.Simple.GHC.Internal - ( GhcEnvironmentFileEntry(..) ) -import Distribution.Types.UnitId - ( mkUnitId ) - -import Control.Exception - ( Exception, throwIO ) -import qualified Text.Parsec as P -import Text.Parsec.String - ( Parser, parseFromFile ) - -parseEnvironmentFileLine :: Parser GhcEnvironmentFileEntry -parseEnvironmentFileLine = GhcEnvFileComment <$> comment - <|> GhcEnvFilePackageId <$> unitId - <|> GhcEnvFilePackageDb <$> packageDb - <|> pure GhcEnvFileClearPackageDbStack <* clearDb - where - comment = P.string "--" *> P.many (P.noneOf "\r\n") - unitId = P.try $ P.string "package-id" *> P.spaces *> - (mkUnitId <$> P.many1 (P.satisfy $ \c -> isAlphaNum c || c `elem` "-_.+")) - packageDb = (P.string "global-package-db" *> pure GlobalPackageDB) - <|> (P.string "user-package-db" *> pure UserPackageDB) - <|> (P.string "package-db" *> P.spaces *> (SpecificPackageDB <$> P.many1 (P.noneOf "\r\n") <* P.lookAhead P.endOfLine)) - clearDb = P.string "clear-package-db" - -newtype ParseErrorExc = ParseErrorExc P.ParseError - deriving (Show, Typeable) - -instance Exception ParseErrorExc - -parseGhcEnvironmentFile :: Parser [GhcEnvironmentFileEntry] -parseGhcEnvironmentFile = parseEnvironmentFileLine `P.sepEndBy` P.endOfLine <* P.eof - -readGhcEnvironmentFile :: FilePath -> IO [GhcEnvironmentFileEntry] -readGhcEnvironmentFile path = - either (throwIO . ParseErrorExc) return =<< - parseFromFile parseGhcEnvironmentFile path diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHC/ImplInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHC/ImplInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHC/ImplInfo.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHC/ImplInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.GHC.ImplInfo --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains the data structure describing invocation --- details for a GHC or GHC-derived compiler, such as supported flags --- and workarounds for bugs. - -module Distribution.Simple.GHC.ImplInfo ( - GhcImplInfo(..), getImplInfo, - ghcVersionImplInfo, ghcjsVersionImplInfo - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Compiler -import Distribution.Version - -{- | - Information about features and quirks of a GHC-based implementation. - - Compiler flavors based on GHC behave similarly enough that some of - the support code for them is shared. Every implementation has its - own peculiarities, that may or may not be a direct result of the - underlying GHC version. This record keeps track of these differences. - - All shared code (i.e. everything not in the Distribution.Simple.FLAVOR - module) should use implementation info rather than version numbers - to test for supported features. --} - -data GhcImplInfo = GhcImplInfo - { supportsHaskell2010 :: Bool -- ^ -XHaskell2010 and -XHaskell98 flags - , reportsNoExt :: Bool -- ^ --supported-languages gives Ext and NoExt - , alwaysNondecIndent :: Bool -- ^ NondecreasingIndentation is always on - , flagGhciScript :: Bool -- ^ -ghci-script flag supported - , flagProfAuto :: Bool -- ^ new style -fprof-auto* flags - , flagPackageConf :: Bool -- ^ use package-conf instead of package-db - , flagDebugInfo :: Bool -- ^ -g flag supported - , supportsDebugLevels :: Bool -- ^ supports numeric @-g@ levels - , supportsPkgEnvFiles :: Bool -- ^ picks up @.ghc.environment@ files - , flagWarnMissingHomeModules :: Bool -- ^ -Wmissing-home-modules is supported - } - -getImplInfo :: Compiler -> GhcImplInfo -getImplInfo comp = - case compilerFlavor comp of - GHC -> ghcVersionImplInfo (compilerVersion comp) - GHCJS -> case compilerCompatVersion GHC comp of - Just ghcVer -> ghcjsVersionImplInfo (compilerVersion comp) ghcVer - _ -> error ("Distribution.Simple.GHC.Props.getImplProps: " ++ - "could not find GHC version for GHCJS compiler") - x -> error ("Distribution.Simple.GHC.Props.getImplProps only works" ++ - "for GHC-like compilers (GHC, GHCJS)" ++ - ", but found " ++ show x) - -ghcVersionImplInfo :: Version -> GhcImplInfo -ghcVersionImplInfo ver = GhcImplInfo - { supportsHaskell2010 = v >= [7] - , reportsNoExt = v >= [7] - , alwaysNondecIndent = v < [7,1] - , flagGhciScript = v >= [7,2] - , flagProfAuto = v >= [7,4] - , flagPackageConf = v < [7,5] - , flagDebugInfo = v >= [7,10] - , supportsDebugLevels = v >= [8,0] - , supportsPkgEnvFiles = v >= [8,0,1,20160901] -- broken in 8.0.1, fixed in 8.0.2 - , flagWarnMissingHomeModules = v >= [8,2] - } - where - v = versionNumbers ver - -ghcjsVersionImplInfo :: Version -- ^ The GHCJS version - -> Version -- ^ The GHC version - -> GhcImplInfo -ghcjsVersionImplInfo _ghcjsver ghcver = GhcImplInfo - { supportsHaskell2010 = True - , reportsNoExt = True - , alwaysNondecIndent = False - , flagGhciScript = True - , flagProfAuto = True - , flagPackageConf = False - , flagDebugInfo = False - , supportsDebugLevels = ghcv >= [8,0] - , supportsPkgEnvFiles = ghcv >= [8,0,2] --TODO: check this works in ghcjs - , flagWarnMissingHomeModules = ghcv >= [8,2] - } - where - ghcv = versionNumbers ghcver diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHC/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHC/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHC/Internal.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHC/Internal.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,609 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.GHC.Internal --- Copyright : Isaac Jones 2003-2007 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains functions shared by GHC (Distribution.Simple.GHC) --- and GHC-derived compilers. - -module Distribution.Simple.GHC.Internal ( - configureToolchain, - getLanguages, - getExtensions, - targetPlatform, - getGhcInfo, - componentCcGhcOptions, - componentCxxGhcOptions, - componentGhcOptions, - mkGHCiLibName, - filterGhciFlags, - ghcLookupProperty, - getHaskellObjects, - mkGhcOptPackages, - substTopDir, - checkPackageDbEnvVar, - profDetailLevelFlag, - -- * GHC platform and version strings - ghcArchString, - ghcOsString, - ghcPlatformAndVersionString, - -- * Constructing GHC environment files - GhcEnvironmentFileEntry(..), - writeGhcEnvironmentFile, - simpleGhcEnvironmentFile, - ghcEnvironmentFileName, - renderGhcEnvironmentFile, - renderGhcEnvironmentFileEntry, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.GHC.ImplInfo -import Distribution.Types.ComponentLocalBuildInfo -import Distribution.Backpack -import Distribution.InstalledPackageInfo -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Compat.Exception -import Distribution.Lex -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.Program.GHC -import Distribution.Simple.Setup -import qualified Distribution.ModuleName as ModuleName -import Distribution.Simple.Program -import Distribution.Simple.LocalBuildInfo -import Distribution.Types.UnitId -import Distribution.Types.LocalBuildInfo -import Distribution.Types.TargetInfo -import Distribution.Simple.Utils -import Distribution.Simple.BuildPaths -import Distribution.System -import Distribution.Text ( display, simpleParse ) -import Distribution.Utils.NubList ( toNubListR ) -import Distribution.Verbosity -import Distribution.Compat.Stack -import Distribution.Version (Version) -import Language.Haskell.Extension - -import qualified Data.Map as Map -import qualified Data.ByteString.Lazy.Char8 as BS -import System.Directory ( getDirectoryContents, getTemporaryDirectory ) -import System.Environment ( getEnv ) -import System.FilePath ( (), (<.>), takeExtension - , takeDirectory, takeFileName) -import System.IO ( hClose, hPutStrLn ) - -targetPlatform :: [(String, String)] -> Maybe Platform -targetPlatform ghcInfo = platformFromTriple =<< lookup "Target platform" ghcInfo - --- | Adjust the way we find and configure gcc and ld --- -configureToolchain :: GhcImplInfo - -> ConfiguredProgram - -> Map String String - -> ProgramDb - -> ProgramDb -configureToolchain _implInfo ghcProg ghcInfo = - addKnownProgram gccProgram { - programFindLocation = findProg gccProgramName extraGccPath, - programPostConf = configureGcc - } - . addKnownProgram ldProgram { - programFindLocation = findProg ldProgramName extraLdPath, - programPostConf = configureLd - } - . addKnownProgram arProgram { - programFindLocation = findProg arProgramName extraArPath - } - . addKnownProgram stripProgram { - programFindLocation = findProg stripProgramName extraStripPath - } - where - compilerDir = takeDirectory (programPath ghcProg) - base_dir = takeDirectory compilerDir - mingwBinDir = base_dir "mingw" "bin" - isWindows = case buildOS of Windows -> True; _ -> False - binPrefix = "" - - maybeName :: Program -> Maybe FilePath -> String - maybeName prog = maybe (programName prog) (dropExeExtension . takeFileName) - - gccProgramName = maybeName gccProgram mbGccLocation - ldProgramName = maybeName ldProgram mbLdLocation - arProgramName = maybeName arProgram mbArLocation - stripProgramName = maybeName stripProgram mbStripLocation - - mkExtraPath :: Maybe FilePath -> FilePath -> [FilePath] - mkExtraPath mbPath mingwPath | isWindows = mbDir ++ [mingwPath] - | otherwise = mbDir - where - mbDir = maybeToList . fmap takeDirectory $ mbPath - - extraGccPath = mkExtraPath mbGccLocation windowsExtraGccDir - extraLdPath = mkExtraPath mbLdLocation windowsExtraLdDir - extraArPath = mkExtraPath mbArLocation windowsExtraArDir - extraStripPath = mkExtraPath mbStripLocation windowsExtraStripDir - - -- on Windows finding and configuring ghc's gcc & binutils is a bit special - (windowsExtraGccDir, windowsExtraLdDir, - windowsExtraArDir, windowsExtraStripDir) = - let b = mingwBinDir binPrefix - in (b, b, b, b) - - findProg :: String -> [FilePath] - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) - findProg progName extraPath v searchpath = - findProgramOnSearchPath v searchpath' progName - where - searchpath' = (map ProgramSearchPathDir extraPath) ++ searchpath - - -- Read tool locations from the 'ghc --info' output. Useful when - -- cross-compiling. - mbGccLocation = Map.lookup "C compiler command" ghcInfo - mbLdLocation = Map.lookup "ld command" ghcInfo - mbArLocation = Map.lookup "ar command" ghcInfo - mbStripLocation = Map.lookup "strip command" ghcInfo - - ccFlags = getFlags "C compiler flags" - -- GHC 7.8 renamed "Gcc Linker flags" to "C compiler link flags" - -- and "Ld Linker flags" to "ld flags" (GHC #4862). - gccLinkerFlags = getFlags "Gcc Linker flags" ++ getFlags "C compiler link flags" - ldLinkerFlags = getFlags "Ld Linker flags" ++ getFlags "ld flags" - - -- It appears that GHC 7.6 and earlier encode the tokenized flags as a - -- [String] in these settings whereas later versions just encode the flags as - -- String. - -- - -- We first try to parse as a [String] and if this fails then tokenize the - -- flags ourself. - getFlags :: String -> [String] - getFlags key = - case Map.lookup key ghcInfo of - Nothing -> [] - Just flags - | (flags', ""):_ <- reads flags -> flags' - | otherwise -> tokenizeQuotedWords flags - - configureGcc :: Verbosity -> ConfiguredProgram -> NoCallStackIO ConfiguredProgram - configureGcc _v gccProg = do - return gccProg { - programDefaultArgs = programDefaultArgs gccProg - ++ ccFlags ++ gccLinkerFlags - } - - configureLd :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - configureLd v ldProg = do - ldProg' <- configureLd' v ldProg - return ldProg' { - programDefaultArgs = programDefaultArgs ldProg' ++ ldLinkerFlags - } - - -- we need to find out if ld supports the -x flag - configureLd' :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram - configureLd' verbosity ldProg = do - tempDir <- getTemporaryDirectory - ldx <- withTempFile tempDir ".c" $ \testcfile testchnd -> - withTempFile tempDir ".o" $ \testofile testohnd -> do - hPutStrLn testchnd "int foo() { return 0; }" - hClose testchnd; hClose testohnd - runProgram verbosity ghcProg - [ "-hide-all-packages" - , "-c", testcfile - , "-o", testofile - ] - withTempFile tempDir ".o" $ \testofile' testohnd' -> - do - hClose testohnd' - _ <- getProgramOutput verbosity ldProg - ["-x", "-r", testofile, "-o", testofile'] - return True - `catchIO` (\_ -> return False) - `catchExit` (\_ -> return False) - if ldx - then return ldProg { programDefaultArgs = ["-x"] } - else return ldProg - -getLanguages :: Verbosity -> GhcImplInfo -> ConfiguredProgram - -> NoCallStackIO [(Language, String)] -getLanguages _ implInfo _ - -- TODO: should be using --supported-languages rather than hard coding - | supportsHaskell2010 implInfo = return [(Haskell98, "-XHaskell98") - ,(Haskell2010, "-XHaskell2010")] - | otherwise = return [(Haskell98, "")] - -getGhcInfo :: Verbosity -> GhcImplInfo -> ConfiguredProgram - -> IO [(String, String)] -getGhcInfo verbosity _implInfo ghcProg = do - xs <- getProgramOutput verbosity (suppressOverrideArgs ghcProg) - ["--info"] - case reads xs of - [(i, ss)] - | all isSpace ss -> - return i - _ -> - die' verbosity "Can't parse --info output of GHC" - -getExtensions :: Verbosity -> GhcImplInfo -> ConfiguredProgram - -> IO [(Extension, Maybe String)] -getExtensions verbosity implInfo ghcProg = do - str <- getProgramOutput verbosity (suppressOverrideArgs ghcProg) - ["--supported-languages"] - let extStrs = if reportsNoExt implInfo - then lines str - else -- Older GHCs only gave us either Foo or NoFoo, - -- so we have to work out the other one ourselves - [ extStr'' - | extStr <- lines str - , let extStr' = case extStr of - 'N' : 'o' : xs -> xs - _ -> "No" ++ extStr - , extStr'' <- [extStr, extStr'] - ] - let extensions0 = [ (ext, Just $ "-X" ++ display ext) - | Just ext <- map simpleParse extStrs ] - extensions1 = if alwaysNondecIndent implInfo - then -- ghc-7.2 split NondecreasingIndentation off - -- into a proper extension. Before that it - -- was always on. - -- Since it was not a proper extension, it could - -- not be turned off, hence we omit a - -- DisableExtension entry here. - (EnableExtension NondecreasingIndentation, Nothing) : - extensions0 - else extensions0 - return extensions1 - -componentCcGhcOptions :: Verbosity -> GhcImplInfo -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> FilePath - -> GhcOptions -componentCcGhcOptions verbosity _implInfo lbi bi clbi odir filename = - mempty { - -- Respect -v0, but don't crank up verbosity on GHC if - -- Cabal verbosity is requested. For that, use --ghc-option=-v instead! - ghcOptVerbosity = toFlag (min verbosity normal), - ghcOptMode = toFlag GhcModeCompile, - ghcOptInputFiles = toNubListR [filename], - - ghcOptCppIncludePath = toNubListR $ [autogenComponentModulesDir lbi clbi - ,autogenPackageModulesDir lbi - ,odir] - -- includes relative to the package - ++ PD.includeDirs bi - -- potential includes generated by `configure' - -- in the build directory - ++ [buildDir lbi dir | dir <- PD.includeDirs bi], - ghcOptHideAllPackages= toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, - ghcOptCcOptions = (case withOptimization lbi of - NoOptimisation -> [] - _ -> ["-O2"]) ++ - (case withDebugInfo lbi of - NoDebugInfo -> [] - MinimalDebugInfo -> ["-g1"] - NormalDebugInfo -> ["-g"] - MaximalDebugInfo -> ["-g3"]) ++ - PD.ccOptions bi, - ghcOptObjDir = toFlag odir - } - - -componentCxxGhcOptions :: Verbosity -> GhcImplInfo -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> FilePath - -> GhcOptions -componentCxxGhcOptions verbosity _implInfo lbi bi clbi odir filename = - mempty { - -- Respect -v0, but don't crank up verbosity on GHC if - -- Cabal verbosity is requested. For that, use --ghc-option=-v instead! - ghcOptVerbosity = toFlag (min verbosity normal), - ghcOptMode = toFlag GhcModeCompile, - ghcOptInputFiles = toNubListR [filename], - - ghcOptCppIncludePath = toNubListR $ [autogenComponentModulesDir lbi clbi - ,autogenPackageModulesDir lbi - ,odir] - -- includes relative to the package - ++ PD.includeDirs bi - -- potential includes generated by `configure' - -- in the build directory - ++ [buildDir lbi dir | dir <- PD.includeDirs bi], - ghcOptHideAllPackages= toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, - ghcOptCxxOptions = (case withOptimization lbi of - NoOptimisation -> [] - _ -> ["-O2"]) ++ - (case withDebugInfo lbi of - NoDebugInfo -> [] - MinimalDebugInfo -> ["-g1"] - NormalDebugInfo -> ["-g"] - MaximalDebugInfo -> ["-g3"]) ++ - PD.cxxOptions bi, - ghcOptObjDir = toFlag odir - } - - -componentGhcOptions :: Verbosity -> GhcImplInfo -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions verbosity implInfo lbi bi clbi odir = - mempty { - -- Respect -v0, but don't crank up verbosity on GHC if - -- Cabal verbosity is requested. For that, use --ghc-option=-v instead! - ghcOptVerbosity = toFlag (min verbosity normal), - ghcOptCabal = toFlag True, - ghcOptThisUnitId = case clbi of - LibComponentLocalBuildInfo { componentCompatPackageKey = pk } - -> toFlag pk - _ -> mempty, - ghcOptThisComponentId = case clbi of - LibComponentLocalBuildInfo { componentComponentId = cid - , componentInstantiatedWith = insts } -> - if null insts - then mempty - else toFlag cid - _ -> mempty, - ghcOptInstantiatedWith = case clbi of - LibComponentLocalBuildInfo { componentInstantiatedWith = insts } - -> insts - _ -> [], - ghcOptNoCode = toFlag $ componentIsIndefinite clbi, - ghcOptHideAllPackages = toFlag True, - ghcOptWarnMissingHomeModules = toFlag $ flagWarnMissingHomeModules implInfo, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, - ghcOptSplitSections = toFlag (splitSections lbi), - ghcOptSplitObjs = toFlag (splitObjs lbi), - ghcOptSourcePathClear = toFlag True, - ghcOptSourcePath = toNubListR $ [odir] ++ (hsSourceDirs bi) - ++ [autogenComponentModulesDir lbi clbi] - ++ [autogenPackageModulesDir lbi], - ghcOptCppIncludePath = toNubListR $ [autogenComponentModulesDir lbi clbi - ,autogenPackageModulesDir lbi - ,odir] - -- includes relative to the package - ++ PD.includeDirs bi - -- potential includes generated by `configure' - -- in the build directory - ++ [buildDir lbi dir | dir <- PD.includeDirs bi], - ghcOptCppOptions = cppOptions bi, - ghcOptCppIncludes = toNubListR $ - [autogenComponentModulesDir lbi clbi cppHeaderName], - ghcOptFfiIncludes = toNubListR $ PD.includes bi, - ghcOptObjDir = toFlag odir, - ghcOptHiDir = toFlag odir, - ghcOptStubDir = toFlag odir, - ghcOptOutputDir = toFlag odir, - ghcOptOptimisation = toGhcOptimisation (withOptimization lbi), - ghcOptDebugInfo = toFlag (withDebugInfo lbi), - ghcOptExtra = hcOptions GHC bi, - ghcOptExtraPath = toNubListR $ exe_paths, - ghcOptLanguage = toFlag (fromMaybe Haskell98 (defaultLanguage bi)), - -- Unsupported extensions have already been checked by configure - ghcOptExtensions = toNubListR $ usedExtensions bi, - ghcOptExtensionMap = Map.fromList . compilerExtensions $ (compiler lbi) - } - where - toGhcOptimisation NoOptimisation = mempty --TODO perhaps override? - toGhcOptimisation NormalOptimisation = toFlag GhcNormalOptimisation - toGhcOptimisation MaximumOptimisation = toFlag GhcMaximumOptimisation - - exe_paths = [ componentBuildDir lbi (targetCLBI exe_tgt) - | uid <- componentExeDeps clbi - -- TODO: Ugh, localPkgDescr - , Just exe_tgt <- [unitIdTarget' (localPkgDescr lbi) lbi uid] ] - --- | Strip out flags that are not supported in ghci -filterGhciFlags :: [String] -> [String] -filterGhciFlags = filter supported - where - supported ('-':'O':_) = False - supported "-debug" = False - supported "-threaded" = False - supported "-ticky" = False - supported "-eventlog" = False - supported "-prof" = False - supported "-unreg" = False - supported _ = True - -mkGHCiLibName :: UnitId -> String -mkGHCiLibName lib = getHSLibraryName lib <.> "o" - -ghcLookupProperty :: String -> Compiler -> Bool -ghcLookupProperty prop comp = - case Map.lookup prop (compilerProperties comp) of - Just "YES" -> True - _ -> False - --- when using -split-objs, we need to search for object files in the --- Module_split directory for each module. -getHaskellObjects :: GhcImplInfo -> Library -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> FilePath -> String -> Bool -> NoCallStackIO [FilePath] -getHaskellObjects _implInfo lib lbi clbi pref wanted_obj_ext allow_split_objs - | splitObjs lbi && allow_split_objs = do - let splitSuffix = "_" ++ wanted_obj_ext ++ "_split" - dirs = [ pref (ModuleName.toFilePath x ++ splitSuffix) - | x <- allLibModules lib clbi ] - objss <- traverse getDirectoryContents dirs - let objs = [ dir obj - | (objs',dir) <- zip objss dirs, obj <- objs', - let obj_ext = takeExtension obj, - '.':wanted_obj_ext == obj_ext ] - return objs - | otherwise = - return [ pref ModuleName.toFilePath x <.> wanted_obj_ext - | x <- allLibModules lib clbi ] - -mkGhcOptPackages :: ComponentLocalBuildInfo - -> [(OpenUnitId, ModuleRenaming)] -mkGhcOptPackages = componentIncludes - -substTopDir :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo -substTopDir topDir ipo - = ipo { - InstalledPackageInfo.importDirs - = map f (InstalledPackageInfo.importDirs ipo), - InstalledPackageInfo.libraryDirs - = map f (InstalledPackageInfo.libraryDirs ipo), - InstalledPackageInfo.includeDirs - = map f (InstalledPackageInfo.includeDirs ipo), - InstalledPackageInfo.frameworkDirs - = map f (InstalledPackageInfo.frameworkDirs ipo), - InstalledPackageInfo.haddockInterfaces - = map f (InstalledPackageInfo.haddockInterfaces ipo), - InstalledPackageInfo.haddockHTMLs - = map f (InstalledPackageInfo.haddockHTMLs ipo) - } - where f ('$':'t':'o':'p':'d':'i':'r':rest) = topDir ++ rest - f x = x - --- Cabal does not use the environment variable GHC{,JS}_PACKAGE_PATH; let --- users know that this is the case. See ticket #335. Simply ignoring it is --- not a good idea, since then ghc and cabal are looking at different sets --- of package DBs and chaos is likely to ensue. --- --- An exception to this is when running cabal from within a `cabal exec` --- environment. In this case, `cabal exec` will set the --- CABAL_SANDBOX_PACKAGE_PATH to the same value that it set --- GHC{,JS}_PACKAGE_PATH to. If that is the case it is OK to allow --- GHC{,JS}_PACKAGE_PATH. -checkPackageDbEnvVar :: Verbosity -> String -> String -> IO () -checkPackageDbEnvVar verbosity compilerName packagePathEnvVar = do - mPP <- lookupEnv packagePathEnvVar - when (isJust mPP) $ do - mcsPP <- lookupEnv "CABAL_SANDBOX_PACKAGE_PATH" - unless (mPP == mcsPP) abort - where - lookupEnv :: String -> NoCallStackIO (Maybe String) - lookupEnv name = (Just `fmap` getEnv name) - `catchIO` const (return Nothing) - abort = - die' verbosity $ "Use of " ++ compilerName ++ "'s environment variable " - ++ packagePathEnvVar ++ " is incompatible with Cabal. Use the " - ++ "flag --package-db to specify a package database (it can be " - ++ "used multiple times)." - - _ = callStack -- TODO: output stack when erroring - -profDetailLevelFlag :: Bool -> ProfDetailLevel -> Flag GhcProfAuto -profDetailLevelFlag forLib mpl = - case mpl of - ProfDetailNone -> mempty - ProfDetailDefault | forLib -> toFlag GhcProfAutoExported - | otherwise -> toFlag GhcProfAutoToplevel - ProfDetailExportedFunctions -> toFlag GhcProfAutoExported - ProfDetailToplevelFunctions -> toFlag GhcProfAutoToplevel - ProfDetailAllFunctions -> toFlag GhcProfAutoAll - ProfDetailOther _ -> mempty - - --- ----------------------------------------------------------------------------- --- GHC platform and version strings - --- | GHC's rendering of its host or target 'Arch' as used in its platform --- strings and certain file locations (such as user package db location). --- -ghcArchString :: Arch -> String -ghcArchString PPC = "powerpc" -ghcArchString PPC64 = "powerpc64" -ghcArchString other = display other - --- | GHC's rendering of its host or target 'OS' as used in its platform --- strings and certain file locations (such as user package db location). --- -ghcOsString :: OS -> String -ghcOsString Windows = "mingw32" -ghcOsString OSX = "darwin" -ghcOsString Solaris = "solaris2" -ghcOsString other = display other - --- | GHC's rendering of its platform and compiler version string as used in --- certain file locations (such as user package db location). --- For example @x86_64-linux-7.10.4@ --- -ghcPlatformAndVersionString :: Platform -> Version -> String -ghcPlatformAndVersionString (Platform arch os) version = - intercalate "-" [ ghcArchString arch, ghcOsString os, display version ] - - --- ----------------------------------------------------------------------------- --- Constructing GHC environment files - --- | The kinds of entries we can stick in a @.ghc.environment@ file. --- -data GhcEnvironmentFileEntry = - GhcEnvFileComment String -- ^ @-- a comment@ - | GhcEnvFilePackageId UnitId -- ^ @package-id foo-1.0-4fe301a...@ - | GhcEnvFilePackageDb PackageDB -- ^ @global-package-db@, - -- @user-package-db@ or - -- @package-db blah/package.conf.d/@ - | GhcEnvFileClearPackageDbStack -- ^ @clear-package-db@ - deriving (Eq, Ord, Show) - --- | Make entries for a GHC environment file based on a 'PackageDBStack' and --- a bunch of package (unit) ids. --- --- If you need to do anything more complicated then either use this as a basis --- and add more entries, or just make all the entries directly. --- -simpleGhcEnvironmentFile :: PackageDBStack - -> [UnitId] - -> [GhcEnvironmentFileEntry] -simpleGhcEnvironmentFile packageDBs pkgids = - GhcEnvFileClearPackageDbStack - : map GhcEnvFilePackageDb packageDBs - ++ map GhcEnvFilePackageId pkgids - --- | Write a @.ghc.environment-$arch-$os-$ver@ file in the given directory. --- --- The 'Platform' and GHC 'Version' are needed as part of the file name. --- --- Returns the name of the file written. -writeGhcEnvironmentFile :: FilePath -- ^ directory in which to put it - -> Platform -- ^ the GHC target platform - -> Version -- ^ the GHC version - -> [GhcEnvironmentFileEntry] -- ^ the content - -> NoCallStackIO FilePath -writeGhcEnvironmentFile directory platform ghcversion entries = do - writeFileAtomic envfile . BS.pack . renderGhcEnvironmentFile $ entries - return envfile - where - envfile = directory ghcEnvironmentFileName platform ghcversion - --- | The @.ghc.environment-$arch-$os-$ver@ file name --- -ghcEnvironmentFileName :: Platform -> Version -> FilePath -ghcEnvironmentFileName platform ghcversion = - ".ghc.environment." ++ ghcPlatformAndVersionString platform ghcversion - --- | Render a bunch of GHC environment file entries --- -renderGhcEnvironmentFile :: [GhcEnvironmentFileEntry] -> String -renderGhcEnvironmentFile = - unlines . map renderGhcEnvironmentFileEntry - --- | Render an individual GHC environment file entry --- -renderGhcEnvironmentFileEntry :: GhcEnvironmentFileEntry -> String -renderGhcEnvironmentFileEntry entry = case entry of - GhcEnvFileComment comment -> format comment - where format = intercalate "\n" . map ("-- " ++) . lines - GhcEnvFilePackageId pkgid -> "package-id " ++ display pkgid - GhcEnvFilePackageDb pkgdb -> - case pkgdb of - GlobalPackageDB -> "global-package-db" - UserPackageDB -> "user-package-db" - SpecificPackageDB dbfile -> "package-db " ++ dbfile - GhcEnvFileClearPackageDbStack -> "clear-package-db" - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHC.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHC.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHC.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHC.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1942 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.GHC --- Copyright : Isaac Jones 2003-2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is a fairly large module. It contains most of the GHC-specific code for --- configuring, building and installing packages. It also exports a function --- for finding out what packages are already installed. Configuring involves --- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions --- this version of ghc supports and returning a 'Compiler' value. --- --- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out --- what packages are installed. --- --- Building is somewhat complex as there is quite a bit of information to take --- into account. We have to build libs and programs, possibly for profiling and --- shared libs. We have to support building libraries that will be usable by --- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files --- using ghc. Linking, especially for @split-objs@ is remarkably complex, --- partly because there tend to be 1,000's of @.o@ files and this can often be --- more than we can pass to the @ld@ or @ar@ programs in one go. --- --- Installing for libs and exes involves finding the right files and copying --- them to the right places. One of the more tricky things about this module is --- remembering the layout of files in the build directory (which is not --- explicitly documented) and thus what search dirs are used for various kinds --- of files. - -module Distribution.Simple.GHC ( - getGhcInfo, - configure, - getInstalledPackages, - getInstalledPackagesMonitorFiles, - getPackageDBContents, - buildLib, buildFLib, buildExe, - replLib, replFLib, replExe, - startInterpreter, - installLib, installFLib, installExe, - libAbiHash, - hcPkgInfo, - registerPackage, - componentGhcOptions, - componentCcGhcOptions, - getLibDir, - isDynamic, - getGlobalPackageDB, - pkgRoot, - -- * Constructing and deconstructing GHC environment files - Internal.GhcEnvironmentFileEntry(..), - Internal.simpleGhcEnvironmentFile, - Internal.renderGhcEnvironmentFile, - Internal.writeGhcEnvironmentFile, - Internal.ghcPlatformAndVersionString, - readGhcEnvironmentFile, - parseGhcEnvironmentFile, - ParseErrorExc(..), - -- * Version-specific implementation quirks - getImplInfo, - GhcImplInfo(..) - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Distribution.Simple.GHC.Internal as Internal -import Distribution.Simple.GHC.ImplInfo -import Distribution.Simple.GHC.EnvironmentParser -import Distribution.PackageDescription.Utils (cabalBug) -import Distribution.PackageDescription as PD -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.LocalBuildInfo -import Distribution.Types.ComponentLocalBuildInfo -import qualified Distribution.Simple.Hpc as Hpc -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Package -import qualified Distribution.ModuleName as ModuleName -import Distribution.ModuleName (ModuleName) -import Distribution.Simple.Program -import Distribution.Simple.Program.Builtin (runghcProgram) -import qualified Distribution.Simple.Program.HcPkg as HcPkg -import qualified Distribution.Simple.Program.Ar as Ar -import qualified Distribution.Simple.Program.Ld as Ld -import qualified Distribution.Simple.Program.Strip as Strip -import Distribution.Simple.Program.GHC -import Distribution.Simple.Setup -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Version -import Distribution.System -import Distribution.Verbosity -import Distribution.Text -import Distribution.Types.ForeignLib -import Distribution.Types.ForeignLibType -import Distribution.Types.ForeignLibOption -import Distribution.Types.UnqualComponentName -import Distribution.Utils.NubList -import Language.Haskell.Extension - -import Control.Monad (msum) -import Data.Char (isLower) -import qualified Data.Map as Map -import System.Directory - ( doesFileExist, getAppUserDataDirectory, createDirectoryIfMissing - , canonicalizePath, removeFile, renameFile ) -import System.FilePath ( (), (<.>), takeExtension - , takeDirectory, replaceExtension - ,isRelative ) -import qualified System.Info -#ifndef mingw32_HOST_OS -import System.Posix (createSymbolicLink) -#endif /* mingw32_HOST_OS */ - --- ----------------------------------------------------------------------------- --- Configuring - -configure :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramDb - -> IO (Compiler, Maybe Platform, ProgramDb) -configure verbosity hcPath hcPkgPath conf0 = do - - (ghcProg, ghcVersion, progdb1) <- - requireProgramVersion verbosity ghcProgram - (orLaterVersion (mkVersion [7,0,1])) - (userMaybeSpecifyPath "ghc" hcPath conf0) - let implInfo = ghcVersionImplInfo ghcVersion - - -- Cabal currently supports ghc >= 7.0.1 && < 8.7 - unless (ghcVersion < mkVersion [8,7]) $ - warn verbosity $ - "Unknown/unsupported 'ghc' version detected " - ++ "(Cabal " ++ display cabalVersion ++ " supports 'ghc' version < 8.7): " - ++ programPath ghcProg ++ " is version " ++ display ghcVersion - - -- This is slightly tricky, we have to configure ghc first, then we use the - -- location of ghc to help find ghc-pkg in the case that the user did not - -- specify the location of ghc-pkg directly: - (ghcPkgProg, ghcPkgVersion, progdb2) <- - requireProgramVersion verbosity ghcPkgProgram { - programFindLocation = guessGhcPkgFromGhcPath ghcProg - } - anyVersion (userMaybeSpecifyPath "ghc-pkg" hcPkgPath progdb1) - - when (ghcVersion /= ghcPkgVersion) $ die' verbosity $ - "Version mismatch between ghc and ghc-pkg: " - ++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " " - ++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion - - -- Likewise we try to find the matching hsc2hs and haddock programs. - let hsc2hsProgram' = hsc2hsProgram { - programFindLocation = guessHsc2hsFromGhcPath ghcProg - } - haddockProgram' = haddockProgram { - programFindLocation = guessHaddockFromGhcPath ghcProg - } - hpcProgram' = hpcProgram { - programFindLocation = guessHpcFromGhcPath ghcProg - } - runghcProgram' = runghcProgram { - programFindLocation = guessRunghcFromGhcPath ghcProg - } - progdb3 = addKnownProgram haddockProgram' $ - addKnownProgram hsc2hsProgram' $ - addKnownProgram hpcProgram' $ - addKnownProgram runghcProgram' progdb2 - - languages <- Internal.getLanguages verbosity implInfo ghcProg - extensions0 <- Internal.getExtensions verbosity implInfo ghcProg - - ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcProg - let ghcInfoMap = Map.fromList ghcInfo - extensions = -- workaround https://ghc.haskell.org/ticket/11214 - filterExt JavaScriptFFI $ - -- see 'filterExtTH' comment below - filterExtTH $ extensions0 - - -- starting with GHC 8.0, `TemplateHaskell` will be omitted from - -- `--supported-extensions` when it's not available. - -- for older GHCs we can use the "Have interpreter" property to - -- filter out `TemplateHaskell` - filterExtTH | ghcVersion < mkVersion [8] - , Just "NO" <- Map.lookup "Have interpreter" ghcInfoMap - = filterExt TemplateHaskell - | otherwise = id - - filterExt ext = filter ((/= EnableExtension ext) . fst) - - let comp = Compiler { - compilerId = CompilerId GHC ghcVersion, - compilerAbiTag = NoAbiTag, - compilerCompat = [], - compilerLanguages = languages, - compilerExtensions = extensions, - compilerProperties = ghcInfoMap - } - compPlatform = Internal.targetPlatform ghcInfo - -- configure gcc and ld - progdb4 = Internal.configureToolchain implInfo ghcProg ghcInfoMap progdb3 - return (comp, compPlatform, progdb4) - --- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find --- the corresponding tool; e.g. if the tool is ghc-pkg, we try looking --- for a versioned or unversioned ghc-pkg in the same dir, that is: --- --- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe) --- > /usr/local/bin/ghc-pkg-6.6.1(.exe) --- > /usr/local/bin/ghc-pkg(.exe) --- -guessToolFromGhcPath :: Program -> ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessToolFromGhcPath tool ghcProg verbosity searchpath - = do let toolname = programName tool - given_path = programPath ghcProg - given_dir = takeDirectory given_path - real_path <- canonicalizePath given_path - let real_dir = takeDirectory real_path - versionSuffix path = takeVersionSuffix (dropExeExtension path) - given_suf = versionSuffix given_path - real_suf = versionSuffix real_path - guessNormal dir = dir toolname <.> exeExtension buildPlatform - guessGhcVersioned dir suf = dir (toolname ++ "-ghc" ++ suf) - <.> exeExtension buildPlatform - guessVersioned dir suf = dir (toolname ++ suf) - <.> exeExtension buildPlatform - mkGuesses dir suf | null suf = [guessNormal dir] - | otherwise = [guessGhcVersioned dir suf, - guessVersioned dir suf, - guessNormal dir] - guesses = mkGuesses given_dir given_suf ++ - if real_path == given_path - then [] - else mkGuesses real_dir real_suf - info verbosity $ "looking for tool " ++ toolname - ++ " near compiler in " ++ given_dir - debug verbosity $ "candidate locations: " ++ show guesses - exists <- traverse doesFileExist guesses - case [ file | (file, True) <- zip guesses exists ] of - -- If we can't find it near ghc, fall back to the usual - -- method. - [] -> programFindLocation tool verbosity searchpath - (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp - let lookedAt = map fst - . takeWhile (\(_file, exist) -> not exist) - $ zip guesses exists - return (Just (fp, lookedAt)) - - where takeVersionSuffix :: FilePath -> String - takeVersionSuffix = takeWhileEndLE isSuffixChar - - isSuffixChar :: Char -> Bool - isSuffixChar c = isDigit c || c == '.' || c == '-' - --- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a --- corresponding ghc-pkg, we try looking for both a versioned and unversioned --- ghc-pkg in the same dir, that is: --- --- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe) --- > /usr/local/bin/ghc-pkg-6.6.1(.exe) --- > /usr/local/bin/ghc-pkg(.exe) --- -guessGhcPkgFromGhcPath :: ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessGhcPkgFromGhcPath = guessToolFromGhcPath ghcPkgProgram - --- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a --- corresponding hsc2hs, we try looking for both a versioned and unversioned --- hsc2hs in the same dir, that is: --- --- > /usr/local/bin/hsc2hs-ghc-6.6.1(.exe) --- > /usr/local/bin/hsc2hs-6.6.1(.exe) --- > /usr/local/bin/hsc2hs(.exe) --- -guessHsc2hsFromGhcPath :: ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessHsc2hsFromGhcPath = guessToolFromGhcPath hsc2hsProgram - --- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a --- corresponding haddock, we try looking for both a versioned and unversioned --- haddock in the same dir, that is: --- --- > /usr/local/bin/haddock-ghc-6.6.1(.exe) --- > /usr/local/bin/haddock-6.6.1(.exe) --- > /usr/local/bin/haddock(.exe) --- -guessHaddockFromGhcPath :: ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessHaddockFromGhcPath = guessToolFromGhcPath haddockProgram - -guessHpcFromGhcPath :: ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessHpcFromGhcPath = guessToolFromGhcPath hpcProgram - -guessRunghcFromGhcPath :: ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessRunghcFromGhcPath = guessToolFromGhcPath runghcProgram - - -getGhcInfo :: Verbosity -> ConfiguredProgram -> IO [(String, String)] -getGhcInfo verbosity ghcProg = Internal.getGhcInfo verbosity implInfo ghcProg - where - Just version = programVersion ghcProg - implInfo = ghcVersionImplInfo version - --- | Given a single package DB, return all installed packages. -getPackageDBContents :: Verbosity -> PackageDB -> ProgramDb - -> IO InstalledPackageIndex -getPackageDBContents verbosity packagedb progdb = do - pkgss <- getInstalledPackages' verbosity [packagedb] progdb - toPackageIndex verbosity pkgss progdb - --- | Given a package DB stack, return all installed packages. -getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack - -> ProgramDb - -> IO InstalledPackageIndex -getInstalledPackages verbosity comp packagedbs progdb = do - checkPackageDbEnvVar verbosity - checkPackageDbStack verbosity comp packagedbs - pkgss <- getInstalledPackages' verbosity packagedbs progdb - index <- toPackageIndex verbosity pkgss progdb - return $! hackRtsPackage index - - where - hackRtsPackage index = - case PackageIndex.lookupPackageName index (mkPackageName "rts") of - [(_,[rts])] - -> PackageIndex.insert (removeMingwIncludeDir rts) index - _ -> index -- No (or multiple) ghc rts package is registered!! - -- Feh, whatever, the ghc test suite does some crazy stuff. - --- | Given a list of @(PackageDB, InstalledPackageInfo)@ pairs, produce a --- @PackageIndex@. Helper function used by 'getPackageDBContents' and --- 'getInstalledPackages'. -toPackageIndex :: Verbosity - -> [(PackageDB, [InstalledPackageInfo])] - -> ProgramDb - -> IO InstalledPackageIndex -toPackageIndex verbosity pkgss progdb = do - -- On Windows, various fields have $topdir/foo rather than full - -- paths. We need to substitute the right value in so that when - -- we, for example, call gcc, we have proper paths to give it. - topDir <- getLibDir' verbosity ghcProg - let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) - | (_, pkgs) <- pkgss ] - return $! mconcat indices - - where - Just ghcProg = lookupProgram ghcProgram progdb - -getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath -getLibDir verbosity lbi = - dropWhileEndLE isSpace `fmap` - getDbProgramOutput verbosity ghcProgram - (withPrograms lbi) ["--print-libdir"] - -getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath -getLibDir' verbosity ghcProg = - dropWhileEndLE isSpace `fmap` - getProgramOutput verbosity ghcProg ["--print-libdir"] - - --- | Return the 'FilePath' to the global GHC package database. -getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath -getGlobalPackageDB verbosity ghcProg = - dropWhileEndLE isSpace `fmap` - getProgramOutput verbosity ghcProg ["--print-global-package-db"] - --- | Return the 'FilePath' to the per-user GHC package database. -getUserPackageDB :: Verbosity -> ConfiguredProgram -> Platform -> NoCallStackIO FilePath -getUserPackageDB _verbosity ghcProg platform = do - -- It's rather annoying that we have to reconstruct this, because ghc - -- hides this information from us otherwise. But for certain use cases - -- like change monitoring it really can't remain hidden. - appdir <- getAppUserDataDirectory "ghc" - return (appdir platformAndVersion packageConfFileName) - where - platformAndVersion = Internal.ghcPlatformAndVersionString - platform ghcVersion - packageConfFileName = "package.conf.d" - Just ghcVersion = programVersion ghcProg - -checkPackageDbEnvVar :: Verbosity -> IO () -checkPackageDbEnvVar verbosity = - Internal.checkPackageDbEnvVar verbosity "GHC" "GHC_PACKAGE_PATH" - -checkPackageDbStack :: Verbosity -> Compiler -> PackageDBStack -> IO () -checkPackageDbStack verbosity comp = - if flagPackageConf implInfo - then checkPackageDbStackPre76 verbosity - else checkPackageDbStackPost76 verbosity - where implInfo = ghcVersionImplInfo (compilerVersion comp) - -checkPackageDbStackPost76 :: Verbosity -> PackageDBStack -> IO () -checkPackageDbStackPost76 _ (GlobalPackageDB:rest) - | GlobalPackageDB `notElem` rest = return () -checkPackageDbStackPost76 verbosity rest - | GlobalPackageDB `elem` rest = - die' verbosity $ "If the global package db is specified, it must be " - ++ "specified first and cannot be specified multiple times" -checkPackageDbStackPost76 _ _ = return () - -checkPackageDbStackPre76 :: Verbosity -> PackageDBStack -> IO () -checkPackageDbStackPre76 _ (GlobalPackageDB:rest) - | GlobalPackageDB `notElem` rest = return () -checkPackageDbStackPre76 verbosity rest - | GlobalPackageDB `notElem` rest = - die' verbosity $ "With current ghc versions the global package db is always used " - ++ "and must be listed first. This ghc limitation is lifted in GHC 7.6," - ++ "see http://hackage.haskell.org/trac/ghc/ticket/5977" -checkPackageDbStackPre76 verbosity _ = - die' verbosity $ "If the global package db is specified, it must be " - ++ "specified first and cannot be specified multiple times" - --- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This --- breaks when you want to use a different gcc, so we need to filter --- it out. -removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo -removeMingwIncludeDir pkg = - let ids = InstalledPackageInfo.includeDirs pkg - ids' = filter (not . ("mingw" `isSuffixOf`)) ids - in pkg { InstalledPackageInfo.includeDirs = ids' } - --- | Get the packages from specific PackageDBs, not cumulative. --- -getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramDb - -> IO [(PackageDB, [InstalledPackageInfo])] -getInstalledPackages' verbosity packagedbs progdb = - sequenceA - [ do pkgs <- HcPkg.dump (hcPkgInfo progdb) verbosity packagedb - return (packagedb, pkgs) - | packagedb <- packagedbs ] - -getInstalledPackagesMonitorFiles :: Verbosity -> Platform - -> ProgramDb - -> [PackageDB] - -> IO [FilePath] -getInstalledPackagesMonitorFiles verbosity platform progdb = - traverse getPackageDBPath - where - getPackageDBPath :: PackageDB -> IO FilePath - getPackageDBPath GlobalPackageDB = - selectMonitorFile =<< getGlobalPackageDB verbosity ghcProg - - getPackageDBPath UserPackageDB = - selectMonitorFile =<< getUserPackageDB verbosity ghcProg platform - - getPackageDBPath (SpecificPackageDB path) = selectMonitorFile path - - -- GHC has old style file dbs, and new style directory dbs. - -- Note that for dir style dbs, we only need to monitor the cache file, not - -- the whole directory. The ghc program itself only reads the cache file - -- so it's safe to only monitor this one file. - selectMonitorFile path = do - isFileStyle <- doesFileExist path - if isFileStyle then return path - else return (path "package.cache") - - Just ghcProg = lookupProgram ghcProgram progdb - - --- ----------------------------------------------------------------------------- --- Building a library - -buildLib :: Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib = buildOrReplLib Nothing - -replLib :: [String] -> Verbosity - -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Library - -> ComponentLocalBuildInfo -> IO () -replLib = buildOrReplLib . Just - -buildOrReplLib :: Maybe [String] -> Verbosity - -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Library - -> ComponentLocalBuildInfo -> IO () -buildOrReplLib mReplFlags verbosity numJobs pkg_descr lbi lib clbi = do - let uid = componentUnitId clbi - libTargetDir = componentBuildDir lbi clbi - whenVanillaLib forceVanilla = - when (forceVanilla || withVanillaLib lbi) - whenProfLib = when (withProfLib lbi) - whenSharedLib forceShared = - when (forceShared || withSharedLib lbi) - whenStaticLib forceStatic = - when (forceStatic || withStaticLib lbi) - whenGHCiLib = when (withGHCiLib lbi && withVanillaLib lbi) - forRepl = maybe False (const True) mReplFlags - ifReplLib = when forRepl - replFlags = fromMaybe mempty mReplFlags - comp = compiler lbi - ghcVersion = compilerVersion comp - implInfo = getImplInfo comp - platform@(Platform _hostArch hostOS) = hostPlatform lbi - has_code = not (componentIsIndefinite clbi) - - (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) - let runGhcProg = runGHC verbosity ghcProg comp platform - - let libBi = libBuildInfo lib - - let isGhcDynamic = isDynamic comp - dynamicTooSupported = supportsDynamicToo comp - doingTH = usesTemplateHaskellOrQQ libBi - forceVanillaLib = doingTH && not isGhcDynamic - forceSharedLib = doingTH && isGhcDynamic - -- TH always needs default libs, even when building for profiling - - -- Determine if program coverage should be enabled and if so, what - -- '-hpcdir' should be. - let isCoverageEnabled = libCoverage lbi - -- TODO: Historically HPC files have been put into a directory which - -- has the package name. I'm going to avoid changing this for - -- now, but it would probably be better for this to be the - -- component ID instead... - pkg_name = display (PD.package pkg_descr) - distPref = fromFlag $ configDistPref $ configFlags lbi - hpcdir way - | forRepl = mempty -- HPC is not supported in ghci - | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way pkg_name - | otherwise = mempty - - createDirectoryIfMissingVerbose verbosity True libTargetDir - -- TODO: do we need to put hs-boot files into place for mutually recursive - -- modules? - let cLikeFiles = fromNubListR $ toNubListR (cSources libBi) <> toNubListR (cxxSources libBi) - cObjs = map (`replaceExtension` objExtension) cLikeFiles - baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir - vanillaOpts = baseOpts `mappend` mempty { - ghcOptMode = toFlag GhcModeMake, - ghcOptNumJobs = numJobs, - ghcOptInputModules = toNubListR $ allLibModules lib clbi, - ghcOptHPCDir = hpcdir Hpc.Vanilla - } - - profOpts = vanillaOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptProfilingAuto = Internal.profDetailLevelFlag True - (withProfLibDetail lbi), - ghcOptHiSuffix = toFlag "p_hi", - ghcOptObjSuffix = toFlag "p_o", - ghcOptExtra = hcProfOptions GHC libBi, - ghcOptHPCDir = hpcdir Hpc.Prof - } - - sharedOpts = vanillaOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = hcSharedOptions GHC libBi, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - linkerOpts = mempty { - ghcOptLinkOptions = PD.ldOptions libBi, - ghcOptLinkLibs = extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, - ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, - ghcOptLinkFrameworkDirs = toNubListR $ PD.extraFrameworkDirs libBi, - ghcOptInputFiles = toNubListR - [libTargetDir x | x <- cObjs] - } - replOpts = vanillaOpts { - ghcOptExtra = Internal.filterGhciFlags - (ghcOptExtra vanillaOpts) - <> replFlags, - ghcOptNumJobs = mempty - } - `mappend` linkerOpts - `mappend` mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptOptimisation = toFlag GhcNoOptimisation - } - - vanillaSharedOpts = vanillaOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, - ghcOptDynHiSuffix = toFlag "dyn_hi", - ghcOptDynObjSuffix = toFlag "dyn_o", - ghcOptHPCDir = hpcdir Hpc.Dyn - } - - unless (forRepl || null (allLibModules lib clbi)) $ - do let vanilla = whenVanillaLib forceVanillaLib (runGhcProg vanillaOpts) - shared = whenSharedLib forceSharedLib (runGhcProg sharedOpts) - useDynToo = dynamicTooSupported && - (forceVanillaLib || withVanillaLib lbi) && - (forceSharedLib || withSharedLib lbi) && - null (hcSharedOptions GHC libBi) - if not has_code - then vanilla - else - if useDynToo - then do - runGhcProg vanillaSharedOpts - case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of - (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> - -- When the vanilla and shared library builds are done - -- in one pass, only one set of HPC module interfaces - -- are generated. This set should suffice for both - -- static and dynamically linked executables. We copy - -- the modules interfaces so they are available under - -- both ways. - copyDirectoryRecursive verbosity dynDir vanillaDir - _ -> return () - else if isGhcDynamic - then do shared; vanilla - else do vanilla; shared - whenProfLib (runGhcProg profOpts) - - -- Build any C++ sources separately. - unless (not has_code || null (cxxSources libBi)) $ do - info verbosity "Building C++ Sources..." - sequence_ - [ do let baseCxxOpts = Internal.componentCxxGhcOptions verbosity implInfo - lbi libBi clbi libTargetDir filename - vanillaCxxOpts = if isGhcDynamic - then baseCxxOpts { ghcOptFPic = toFlag True } - else baseCxxOpts - profCxxOpts = vanillaCxxOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptObjSuffix = toFlag "p_o" - } - sharedCxxOpts = vanillaCxxOpts `mappend` mempty { - ghcOptFPic = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptObjSuffix = toFlag "dyn_o" - } - odir = fromFlag (ghcOptObjDir vanillaCxxOpts) - createDirectoryIfMissingVerbose verbosity True odir - let runGhcProgIfNeeded cxxOpts = do - needsRecomp <- checkNeedsRecompilation filename cxxOpts - when needsRecomp $ runGhcProg cxxOpts - runGhcProgIfNeeded vanillaCxxOpts - unless forRepl $ - whenSharedLib forceSharedLib (runGhcProgIfNeeded sharedCxxOpts) - unless forRepl $ whenProfLib (runGhcProgIfNeeded profCxxOpts) - | filename <- cxxSources libBi] - - when has_code . ifReplLib $ do - when (null (allLibModules lib clbi)) $ warn verbosity "No exposed modules" - ifReplLib (runGhcProg replOpts) - - -- build any C sources - -- TODO: Add support for S and CMM files. - unless (not has_code || null (cSources libBi)) $ do - info verbosity "Building C Sources..." - sequence_ - [ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo - lbi libBi clbi libTargetDir filename - vanillaCcOpts = if isGhcDynamic - -- Dynamic GHC requires C sources to be built - -- with -fPIC for REPL to work. See #2207. - then baseCcOpts { ghcOptFPic = toFlag True } - else baseCcOpts - profCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptObjSuffix = toFlag "p_o" - } - sharedCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptFPic = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptObjSuffix = toFlag "dyn_o" - } - odir = fromFlag (ghcOptObjDir vanillaCcOpts) - createDirectoryIfMissingVerbose verbosity True odir - let runGhcProgIfNeeded ccOpts = do - needsRecomp <- checkNeedsRecompilation filename ccOpts - when needsRecomp $ runGhcProg ccOpts - runGhcProgIfNeeded vanillaCcOpts - unless forRepl $ - whenSharedLib forceSharedLib (runGhcProgIfNeeded sharedCcOpts) - unless forRepl $ whenProfLib (runGhcProgIfNeeded profCcOpts) - | filename <- cSources libBi] - - -- TODO: problem here is we need the .c files built first, so we can load them - -- with ghci, but .c files can depend on .h files generated by ghc by ffi - -- exports. - - -- link: - when has_code . unless forRepl $ do - info verbosity "Linking..." - let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) - (cSources libBi ++ cxxSources libBi) - cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) - (cSources libBi ++ cxxSources libBi) - compiler_id = compilerId (compiler lbi) - vanillaLibFilePath = libTargetDir mkLibName uid - profileLibFilePath = libTargetDir mkProfLibName uid - sharedLibFilePath = libTargetDir mkSharedLibName (hostPlatform lbi) compiler_id uid - staticLibFilePath = libTargetDir mkStaticLibName (hostPlatform lbi) compiler_id uid - ghciLibFilePath = libTargetDir Internal.mkGHCiLibName uid - libInstallPath = libdir $ absoluteComponentInstallDirs pkg_descr lbi uid NoCopyDest - sharedLibInstallPath = libInstallPath mkSharedLibName (hostPlatform lbi) compiler_id uid - - stubObjs <- catMaybes <$> sequenceA - [ findFileWithExtension [objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files - , x <- allLibModules lib clbi ] - stubProfObjs <- catMaybes <$> sequenceA - [ findFileWithExtension ["p_" ++ objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files - , x <- allLibModules lib clbi ] - stubSharedObjs <- catMaybes <$> sequenceA - [ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir] - (ModuleName.toFilePath x ++"_stub") - | ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files - , x <- allLibModules lib clbi ] - - hObjs <- Internal.getHaskellObjects implInfo lib lbi clbi - libTargetDir objExtension True - hProfObjs <- - if withProfLib lbi - then Internal.getHaskellObjects implInfo lib lbi clbi - libTargetDir ("p_" ++ objExtension) True - else return [] - hSharedObjs <- - if withSharedLib lbi - then Internal.getHaskellObjects implInfo lib lbi clbi - libTargetDir ("dyn_" ++ objExtension) False - else return [] - - unless (null hObjs && null cObjs && null stubObjs) $ do - rpaths <- getRPaths lbi clbi - - let staticObjectFiles = - hObjs - ++ map (libTargetDir ) cObjs - ++ stubObjs - profObjectFiles = - hProfObjs - ++ map (libTargetDir ) cProfObjs - ++ stubProfObjs - ghciObjFiles = - hObjs - ++ map (libTargetDir ) cObjs - ++ stubObjs - dynamicObjectFiles = - hSharedObjs - ++ map (libTargetDir ) cSharedObjs - ++ stubSharedObjs - -- After the relocation lib is created we invoke ghc -shared - -- with the dependencies spelled out as -package arguments - -- and ghc invokes the linker with the proper library paths - ghcSharedLinkArgs = - mempty { - ghcOptShared = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptInputFiles = toNubListR dynamicObjectFiles, - ghcOptOutputFile = toFlag sharedLibFilePath, - ghcOptExtra = hcSharedOptions GHC libBi, - -- For dynamic libs, Mac OS/X needs to know the install location - -- at build time. This only applies to GHC < 7.8 - see the - -- discussion in #1660. - ghcOptDylibName = if hostOS == OSX - && ghcVersion < mkVersion [7,8] - then toFlag sharedLibInstallPath - else mempty, - ghcOptHideAllPackages = toFlag True, - ghcOptNoAutoLinkPackages = toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptThisUnitId = case clbi of - LibComponentLocalBuildInfo { componentCompatPackageKey = pk } - -> toFlag pk - _ -> mempty, - ghcOptThisComponentId = case clbi of - LibComponentLocalBuildInfo { componentInstantiatedWith = insts } -> - if null insts - then mempty - else toFlag (componentComponentId clbi) - _ -> mempty, - ghcOptInstantiatedWith = case clbi of - LibComponentLocalBuildInfo { componentInstantiatedWith = insts } - -> insts - _ -> [], - ghcOptPackages = toNubListR $ - Internal.mkGhcOptPackages clbi , - ghcOptLinkLibs = extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, - ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, - ghcOptLinkFrameworkDirs = - toNubListR $ PD.extraFrameworkDirs libBi, - ghcOptRPaths = rpaths - } - ghcStaticLinkArgs = - mempty { - ghcOptStaticLib = toFlag True, - ghcOptInputFiles = toNubListR staticObjectFiles, - ghcOptOutputFile = toFlag staticLibFilePath, - ghcOptExtra = hcStaticOptions GHC libBi, - ghcOptHideAllPackages = toFlag True, - ghcOptNoAutoLinkPackages = toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptThisUnitId = case clbi of - LibComponentLocalBuildInfo { componentCompatPackageKey = pk } - -> toFlag pk - _ -> mempty, - ghcOptThisComponentId = case clbi of - LibComponentLocalBuildInfo { componentInstantiatedWith = insts } -> - if null insts - then mempty - else toFlag (componentComponentId clbi) - _ -> mempty, - ghcOptInstantiatedWith = case clbi of - LibComponentLocalBuildInfo { componentInstantiatedWith = insts } - -> insts - _ -> [], - ghcOptPackages = toNubListR $ - Internal.mkGhcOptPackages clbi , - ghcOptLinkLibs = extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi - } - - info verbosity (show (ghcOptPackages ghcSharedLinkArgs)) - - whenVanillaLib False $ - Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles - - whenProfLib $ - Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles - - whenGHCiLib $ do - (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) - Ld.combineObjectFiles verbosity lbi ldProg - ghciLibFilePath ghciObjFiles - - whenSharedLib False $ - runGhcProg ghcSharedLinkArgs - - whenStaticLib False $ - runGhcProg ghcStaticLinkArgs - --- | Start a REPL without loading any source files. -startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform - -> PackageDBStack -> IO () -startInterpreter verbosity progdb comp platform packageDBs = do - let replOpts = mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptPackageDBs = packageDBs - } - checkPackageDbStack verbosity comp packageDBs - (ghcProg, _) <- requireProgram verbosity ghcProgram progdb - runGHC verbosity ghcProg comp platform replOpts - --- ----------------------------------------------------------------------------- --- Building an executable or foreign library - --- | Build a foreign library -buildFLib - :: Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> ForeignLib -> ComponentLocalBuildInfo -> IO () -buildFLib v njobs pkg lbi = gbuild v njobs pkg lbi . GBuildFLib - -replFLib - :: [String] -> Verbosity - -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> ForeignLib - -> ComponentLocalBuildInfo -> IO () -replFLib replFlags v njobs pkg lbi = - gbuild v njobs pkg lbi . GReplFLib replFlags - --- | Build an executable with GHC. --- -buildExe - :: Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe v njobs pkg lbi = gbuild v njobs pkg lbi . GBuildExe - -replExe - :: [String] -> Verbosity - -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Executable - -> ComponentLocalBuildInfo -> IO () -replExe replFlags v njobs pkg lbi = - gbuild v njobs pkg lbi . GReplExe replFlags - --- | Building an executable, starting the REPL, and building foreign --- libraries are all very similar and implemented in 'gbuild'. The --- 'GBuildMode' distinguishes between the various kinds of operation. -data GBuildMode = - GBuildExe Executable - | GReplExe [String] Executable - | GBuildFLib ForeignLib - | GReplFLib [String] ForeignLib - -gbuildInfo :: GBuildMode -> BuildInfo -gbuildInfo (GBuildExe exe) = buildInfo exe -gbuildInfo (GReplExe _ exe) = buildInfo exe -gbuildInfo (GBuildFLib flib) = foreignLibBuildInfo flib -gbuildInfo (GReplFLib _ flib) = foreignLibBuildInfo flib - -gbuildName :: GBuildMode -> String -gbuildName (GBuildExe exe) = unUnqualComponentName $ exeName exe -gbuildName (GReplExe _ exe) = unUnqualComponentName $ exeName exe -gbuildName (GBuildFLib flib) = unUnqualComponentName $ foreignLibName flib -gbuildName (GReplFLib _ flib) = unUnqualComponentName $ foreignLibName flib - -gbuildTargetName :: LocalBuildInfo -> GBuildMode -> String -gbuildTargetName lbi (GBuildExe exe) = exeTargetName (hostPlatform lbi) exe -gbuildTargetName lbi (GReplExe _ exe) = exeTargetName (hostPlatform lbi) exe -gbuildTargetName lbi (GBuildFLib flib) = flibTargetName lbi flib -gbuildTargetName lbi (GReplFLib _ flib) = flibTargetName lbi flib - -exeTargetName :: Platform -> Executable -> String -exeTargetName platform exe = unUnqualComponentName (exeName exe) `withExt` exeExtension platform - --- | Target name for a foreign library (the actual file name) --- --- We do not use mkLibName and co here because the naming for foreign libraries --- is slightly different (we don't use "_p" or compiler version suffices, and we --- don't want the "lib" prefix on Windows). --- --- TODO: We do use `dllExtension` and co here, but really that's wrong: they --- use the OS used to build cabal to determine which extension to use, rather --- than the target OS (but this is wrong elsewhere in Cabal as well). -flibTargetName :: LocalBuildInfo -> ForeignLib -> String -flibTargetName lbi flib = - case (os, foreignLibType flib) of - (Windows, ForeignLibNativeShared) -> nm <.> "dll" - (Windows, ForeignLibNativeStatic) -> nm <.> "lib" - (Linux, ForeignLibNativeShared) -> "lib" ++ nm <.> versionedExt - (_other, ForeignLibNativeShared) -> "lib" ++ nm <.> dllExtension (hostPlatform lbi) - (_other, ForeignLibNativeStatic) -> "lib" ++ nm <.> staticLibExtension (hostPlatform lbi) - (_any, ForeignLibTypeUnknown) -> cabalBug "unknown foreign lib type" - where - nm :: String - nm = unUnqualComponentName $ foreignLibName flib - - os :: OS - os = let (Platform _ os') = hostPlatform lbi - in os' - - -- If a foreign lib foo has lib-version-info 5:1:2 or - -- lib-version-linux 3.2.1, it should be built as libfoo.so.3.2.1 - -- Libtool's version-info data is translated into library versions in a - -- nontrivial way: so refer to libtool documentation. - versionedExt :: String - versionedExt = - let nums = foreignLibVersion flib os - in foldl (<.>) "so" (map show nums) - --- | Name for the library when building. --- --- If the `lib-version-info` field or the `lib-version-linux` field of --- a foreign library target is set, we need to incorporate that --- version into the SONAME field. --- --- If a foreign library foo has lib-version-info 5:1:2, it should be --- built as libfoo.so.3.2.1. We want it to get soname libfoo.so.3. --- However, GHC does not allow overriding soname by setting linker --- options, as it sets a soname of its own (namely the output --- filename), after the user-supplied linker options. Hence, we have --- to compile the library with the soname as its filename. We rename --- the compiled binary afterwards. --- --- This method allows to adjust the name of the library at build time --- such that the correct soname can be set. -flibBuildName :: LocalBuildInfo -> ForeignLib -> String -flibBuildName lbi flib - -- On linux, if a foreign-library has version data, the first digit is used - -- to produce the SONAME. - | (os, foreignLibType flib) == - (Linux, ForeignLibNativeShared) - = let nums = foreignLibVersion flib os - in "lib" ++ nm <.> foldl (<.>) "so" (map show (take 1 nums)) - | otherwise = flibTargetName lbi flib - where - os :: OS - os = let (Platform _ os') = hostPlatform lbi - in os' - - nm :: String - nm = unUnqualComponentName $ foreignLibName flib - -gbuildIsRepl :: GBuildMode -> Bool -gbuildIsRepl (GBuildExe _) = False -gbuildIsRepl (GReplExe _ _) = True -gbuildIsRepl (GBuildFLib _) = False -gbuildIsRepl (GReplFLib _ _) = True - -gbuildNeedDynamic :: LocalBuildInfo -> GBuildMode -> Bool -gbuildNeedDynamic lbi bm = - case bm of - GBuildExe _ -> withDynExe lbi - GReplExe _ _ -> withDynExe lbi - GBuildFLib flib -> withDynFLib flib - GReplFLib _ flib -> withDynFLib flib - where - withDynFLib flib = - case foreignLibType flib of - ForeignLibNativeShared -> - ForeignLibStandalone `notElem` foreignLibOptions flib - ForeignLibNativeStatic -> - False - ForeignLibTypeUnknown -> - cabalBug "unknown foreign lib type" - -gbuildModDefFiles :: GBuildMode -> [FilePath] -gbuildModDefFiles (GBuildExe _) = [] -gbuildModDefFiles (GReplExe _ _) = [] -gbuildModDefFiles (GBuildFLib flib) = foreignLibModDefFile flib -gbuildModDefFiles (GReplFLib _ flib) = foreignLibModDefFile flib - --- | "Main" module name when overridden by @ghc-options: -main-is ...@ --- or 'Nothing' if no @-main-is@ flag could be found. --- --- In case of 'Nothing', 'Distribution.ModuleName.main' can be assumed. -exeMainModuleName :: Executable -> Maybe ModuleName -exeMainModuleName Executable{buildInfo = bnfo} = - -- GHC honors the last occurence of a module name updated via -main-is - -- - -- Moreover, -main-is when parsed left-to-right can update either - -- the "Main" module name, or the "main" function name, or both, - -- see also 'decodeMainIsArg'. - msum $ reverse $ map decodeMainIsArg $ findIsMainArgs ghcopts - where - ghcopts = hcOptions GHC bnfo - - findIsMainArgs [] = [] - findIsMainArgs ("-main-is":arg:rest) = arg : findIsMainArgs rest - findIsMainArgs (_:rest) = findIsMainArgs rest - --- | Decode argument to '-main-is' --- --- Returns 'Nothing' if argument set only the function name. --- --- This code has been stolen/refactored from GHC's DynFlags.setMainIs --- function. The logic here is deliberately imperfect as it is --- intended to be bug-compatible with GHC's parser. See discussion in --- https://github.com/haskell/cabal/pull/4539#discussion_r118981753. -decodeMainIsArg :: String -> Maybe ModuleName -decodeMainIsArg arg - | not (null main_fn) && isLower (head main_fn) - -- The arg looked like "Foo.Bar.baz" - = Just (ModuleName.fromString main_mod) - | isUpper (head arg) -- The arg looked like "Foo" or "Foo.Bar" - = Just (ModuleName.fromString arg) - | otherwise -- The arg looked like "baz" - = Nothing - where - (main_mod, main_fn) = splitLongestPrefix arg (== '.') - - splitLongestPrefix :: String -> (Char -> Bool) -> (String,String) - splitLongestPrefix str pred' - | null r_pre = (str, []) - | otherwise = (reverse (tail r_pre), reverse r_suf) - -- 'tail' drops the char satisfying 'pred' - where (r_suf, r_pre) = break pred' (reverse str) - - --- | A collection of: --- * C input files --- * C++ input files --- * GHC input files --- * GHC input modules --- --- Used to correctly build and link sources. -data BuildSources = BuildSources { - cSourcesFiles :: [FilePath], - cxxSourceFiles :: [FilePath], - inputSourceFiles :: [FilePath], - inputSourceModules :: [ModuleName] - } - --- | Locate and return the 'BuildSources' required to build and link. -gbuildSources :: Verbosity - -> Version -- ^ specVersion - -> FilePath - -> GBuildMode - -> IO BuildSources -gbuildSources verbosity specVer tmpDir bm = - case bm of - GBuildExe exe -> exeSources exe - GReplExe _ exe -> exeSources exe - GBuildFLib flib -> return $ flibSources flib - GReplFLib _ flib -> return $ flibSources flib - where - exeSources :: Executable -> IO BuildSources - exeSources exe@Executable{buildInfo = bnfo, modulePath = modPath} = do - main <- findFile (tmpDir : hsSourceDirs bnfo) modPath - let mainModName = fromMaybe ModuleName.main $ exeMainModuleName exe - otherModNames = exeModules exe - - if isHaskell main - then - if specVer < mkVersion [2] && (mainModName `elem` otherModNames) - then do - -- The cabal manual clearly states that `other-modules` is - -- intended for non-main modules. However, there's at least one - -- important package on Hackage (happy-1.19.5) which - -- violates this. We workaround this here so that we don't - -- invoke GHC with e.g. 'ghc --make Main src/Main.hs' which - -- would result in GHC complaining about duplicate Main - -- modules. - -- - -- Finally, we only enable this workaround for - -- specVersion < 2, as 'cabal-version:>=2.0' cabal files - -- have no excuse anymore to keep doing it wrong... ;-) - warn verbosity $ "Enabling workaround for Main module '" - ++ display mainModName - ++ "' listed in 'other-modules' illegally!" - - return BuildSources { - cSourcesFiles = cSources bnfo, - cxxSourceFiles = cxxSources bnfo, - inputSourceFiles = [main], - inputSourceModules = filter (/= mainModName) $ exeModules exe - } - - else return BuildSources { - cSourcesFiles = cSources bnfo, - cxxSourceFiles = cxxSources bnfo, - inputSourceFiles = [main], - inputSourceModules = exeModules exe - } - else let (csf, cxxsf) - | isCxx main = ( cSources bnfo, main : cxxSources bnfo) - -- if main is not a Haskell source - -- and main is not a C++ source - -- then we assume that it is a C source - | otherwise = (main : cSources bnfo, cxxSources bnfo) - - in return BuildSources { - cSourcesFiles = csf, - cxxSourceFiles = cxxsf, - inputSourceFiles = [], - inputSourceModules = exeModules exe - } - - flibSources :: ForeignLib -> BuildSources - flibSources flib@ForeignLib{foreignLibBuildInfo = bnfo} = - BuildSources { - cSourcesFiles = cSources bnfo, - cxxSourceFiles = cxxSources bnfo, - inputSourceFiles = [], - inputSourceModules = foreignLibModules flib - } - - isHaskell :: FilePath -> Bool - isHaskell fp = elem (takeExtension fp) [".hs", ".lhs"] - - isCxx :: FilePath -> Bool - isCxx fp = elem (takeExtension fp) [".cpp", ".cxx", ".c++"] - --- | Generic build function. See comment for 'GBuildMode'. -gbuild :: Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> GBuildMode -> ComponentLocalBuildInfo -> IO () -gbuild verbosity numJobs pkg_descr lbi bm clbi = do - (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) - let replFlags = case bm of - GReplExe flags _ -> flags - GReplFLib flags _ -> flags - GBuildExe{} -> mempty - GBuildFLib{} -> mempty - comp = compiler lbi - platform = hostPlatform lbi - implInfo = getImplInfo comp - runGhcProg = runGHC verbosity ghcProg comp platform - - let (bnfo, threaded) = case bm of - GBuildFLib _ -> popThreadedFlag (gbuildInfo bm) - _ -> (gbuildInfo bm, False) - - -- the name that GHC really uses (e.g., with .exe on Windows for executables) - let targetName = gbuildTargetName lbi bm - let targetDir = buildDir lbi (gbuildName bm) - let tmpDir = targetDir (gbuildName bm ++ "-tmp") - createDirectoryIfMissingVerbose verbosity True targetDir - createDirectoryIfMissingVerbose verbosity True tmpDir - - -- TODO: do we need to put hs-boot files into place for mutually recursive - -- modules? FIX: what about exeName.hi-boot? - - -- Determine if program coverage should be enabled and if so, what - -- '-hpcdir' should be. - let isCoverageEnabled = exeCoverage lbi - distPref = fromFlag $ configDistPref $ configFlags lbi - hpcdir way - | gbuildIsRepl bm = mempty -- HPC is not supported in ghci - | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way (gbuildName bm) - | otherwise = mempty - - rpaths <- getRPaths lbi clbi - buildSources <- gbuildSources verbosity (specVersion pkg_descr) tmpDir bm - - let cSrcs = cSourcesFiles buildSources - cxxSrcs = cxxSourceFiles buildSources - inputFiles = inputSourceFiles buildSources - inputModules = inputSourceModules buildSources - isGhcDynamic = isDynamic comp - dynamicTooSupported = supportsDynamicToo comp - cObjs = map (`replaceExtension` objExtension) cSrcs - cxxObjs = map (`replaceExtension` objExtension) cxxSrcs - needDynamic = gbuildNeedDynamic lbi bm - needProfiling = withProfExe lbi - - -- build executables - baseOpts = (componentGhcOptions verbosity lbi bnfo clbi tmpDir) - `mappend` mempty { - ghcOptMode = toFlag GhcModeMake, - ghcOptInputFiles = toNubListR inputFiles, - ghcOptInputModules = toNubListR inputModules - } - staticOpts = baseOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticOnly, - ghcOptHPCDir = hpcdir Hpc.Vanilla - } - profOpts = baseOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptProfilingAuto = Internal.profDetailLevelFlag False - (withProfExeDetail lbi), - ghcOptHiSuffix = toFlag "p_hi", - ghcOptObjSuffix = toFlag "p_o", - ghcOptExtra = hcProfOptions GHC bnfo, - ghcOptHPCDir = hpcdir Hpc.Prof - } - dynOpts = baseOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - -- TODO: Does it hurt to set -fPIC for executables? - ghcOptFPic = toFlag True, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = hcSharedOptions GHC bnfo, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - dynTooOpts = staticOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, - ghcOptDynHiSuffix = toFlag "dyn_hi", - ghcOptDynObjSuffix = toFlag "dyn_o", - ghcOptHPCDir = hpcdir Hpc.Dyn - } - linkerOpts = mempty { - ghcOptLinkOptions = PD.ldOptions bnfo, - ghcOptLinkLibs = extraLibs bnfo, - ghcOptLinkLibPath = toNubListR $ extraLibDirs bnfo, - ghcOptLinkFrameworks = toNubListR $ - PD.frameworks bnfo, - ghcOptLinkFrameworkDirs = toNubListR $ - PD.extraFrameworkDirs bnfo, - ghcOptInputFiles = toNubListR - [tmpDir x | x <- cObjs ++ cxxObjs] - } - dynLinkerOpts = mempty { - ghcOptRPaths = rpaths - } - replOpts = baseOpts { - ghcOptExtra = Internal.filterGhciFlags - (ghcOptExtra baseOpts) - <> replFlags - } - -- For a normal compile we do separate invocations of ghc for - -- compiling as for linking. But for repl we have to do just - -- the one invocation, so that one has to include all the - -- linker stuff too, like -l flags and any .o files from C - -- files etc. - `mappend` linkerOpts - `mappend` mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptOptimisation = toFlag GhcNoOptimisation - } - commonOpts | needProfiling = profOpts - | needDynamic = dynOpts - | otherwise = staticOpts - compileOpts | useDynToo = dynTooOpts - | otherwise = commonOpts - withStaticExe = not needProfiling && not needDynamic - - -- For building exe's that use TH with -prof or -dynamic we actually have - -- to build twice, once without -prof/-dynamic and then again with - -- -prof/-dynamic. This is because the code that TH needs to run at - -- compile time needs to be the vanilla ABI so it can be loaded up and run - -- by the compiler. - -- With dynamic-by-default GHC the TH object files loaded at compile-time - -- need to be .dyn_o instead of .o. - doingTH = usesTemplateHaskellOrQQ bnfo - -- Should we use -dynamic-too instead of compiling twice? - useDynToo = dynamicTooSupported && isGhcDynamic - && doingTH && withStaticExe - && null (hcSharedOptions GHC bnfo) - compileTHOpts | isGhcDynamic = dynOpts - | otherwise = staticOpts - compileForTH - | gbuildIsRepl bm = False - | useDynToo = False - | isGhcDynamic = doingTH && (needProfiling || withStaticExe) - | otherwise = doingTH && (needProfiling || needDynamic) - - -- Build static/dynamic object files for TH, if needed. - when compileForTH $ - runGhcProg compileTHOpts { ghcOptNoLink = toFlag True - , ghcOptNumJobs = numJobs } - - -- Do not try to build anything if there are no input files. - -- This can happen if the cabal file ends up with only cSrcs - -- but no Haskell modules. - unless ((null inputFiles && null inputModules) - || gbuildIsRepl bm) $ - runGhcProg compileOpts { ghcOptNoLink = toFlag True - , ghcOptNumJobs = numJobs } - - -- build any C++ sources - unless (null cxxSrcs) $ do - info verbosity "Building C++ Sources..." - sequence_ - [ do let baseCxxOpts = Internal.componentCxxGhcOptions verbosity implInfo - lbi bnfo clbi tmpDir filename - vanillaCxxOpts = if isGhcDynamic - -- Dynamic GHC requires C++ sources to be built - -- with -fPIC for REPL to work. See #2207. - then baseCxxOpts { ghcOptFPic = toFlag True } - else baseCxxOpts - profCxxOpts = vanillaCxxOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True - } - sharedCxxOpts = vanillaCxxOpts `mappend` mempty { - ghcOptFPic = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly - } - opts | needProfiling = profCxxOpts - | needDynamic = sharedCxxOpts - | otherwise = vanillaCxxOpts - -- TODO: Placing all Haskell, C, & C++ objects in a single directory - -- Has the potential for file collisions. In general we would - -- consider this a user error. However, we should strive to - -- add a warning if this occurs. - odir = fromFlag (ghcOptObjDir opts) - createDirectoryIfMissingVerbose verbosity True odir - needsRecomp <- checkNeedsRecompilation filename opts - when needsRecomp $ - runGhcProg opts - | filename <- cxxSrcs ] - - -- build any C sources - unless (null cSrcs) $ do - info verbosity "Building C Sources..." - sequence_ - [ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo - lbi bnfo clbi tmpDir filename - vanillaCcOpts = if isGhcDynamic - -- Dynamic GHC requires C sources to be built - -- with -fPIC for REPL to work. See #2207. - then baseCcOpts { ghcOptFPic = toFlag True } - else baseCcOpts - profCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True - } - sharedCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptFPic = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly - } - opts | needProfiling = profCcOpts - | needDynamic = sharedCcOpts - | otherwise = vanillaCcOpts - odir = fromFlag (ghcOptObjDir opts) - createDirectoryIfMissingVerbose verbosity True odir - needsRecomp <- checkNeedsRecompilation filename opts - when needsRecomp $ - runGhcProg opts - | filename <- cSrcs ] - - -- TODO: problem here is we need the .c files built first, so we can load them - -- with ghci, but .c files can depend on .h files generated by ghc by ffi - -- exports. - case bm of - GReplExe _ _ -> runGhcProg replOpts - GReplFLib _ _ -> runGhcProg replOpts - GBuildExe _ -> do - let linkOpts = commonOpts - `mappend` linkerOpts - `mappend` mempty { - ghcOptLinkNoHsMain = toFlag (null inputFiles) - } - `mappend` (if withDynExe lbi then dynLinkerOpts else mempty) - - info verbosity "Linking..." - -- Work around old GHCs not relinking in this - -- situation, see #3294 - let target = targetDir targetName - when (compilerVersion comp < mkVersion [7,7]) $ do - e <- doesFileExist target - when e (removeFile target) - runGhcProg linkOpts { ghcOptOutputFile = toFlag target } - GBuildFLib flib -> do - let rtsInfo = extractRtsInfo lbi - rtsOptLinkLibs = [ - if needDynamic - then if threaded - then dynRtsThreadedLib (rtsDynamicInfo rtsInfo) - else dynRtsVanillaLib (rtsDynamicInfo rtsInfo) - else if threaded - then statRtsThreadedLib (rtsStaticInfo rtsInfo) - else statRtsVanillaLib (rtsStaticInfo rtsInfo) - ] - linkOpts = case foreignLibType flib of - ForeignLibNativeShared -> - commonOpts - `mappend` linkerOpts - `mappend` dynLinkerOpts - `mappend` mempty { - ghcOptLinkNoHsMain = toFlag True, - ghcOptShared = toFlag True, - ghcOptLinkLibs = rtsOptLinkLibs, - ghcOptLinkLibPath = toNubListR $ rtsLibPaths rtsInfo, - ghcOptFPic = toFlag True, - ghcOptLinkModDefFiles = toNubListR $ gbuildModDefFiles bm - } - -- See Note [RPATH] - `mappend` ifNeedsRPathWorkaround lbi mempty { - ghcOptLinkOptions = ["-Wl,--no-as-needed"] - , ghcOptLinkLibs = ["ffi"] - } - ForeignLibNativeStatic -> - -- this should be caught by buildFLib - -- (and if we do implement tihs, we probably don't even want to call - -- ghc here, but rather Ar.createArLibArchive or something) - cabalBug "static libraries not yet implemented" - ForeignLibTypeUnknown -> - cabalBug "unknown foreign lib type" - -- We build under a (potentially) different filename to set a - -- soname on supported platforms. See also the note for - -- @flibBuildName@. - info verbosity "Linking..." - let buildName = flibBuildName lbi flib - runGhcProg linkOpts { ghcOptOutputFile = toFlag (targetDir buildName) } - renameFile (targetDir buildName) (targetDir targetName) - -{- -Note [RPATH] -~~~~~~~~~~~~ - -Suppose that the dynamic library depends on `base`, but not (directly) on -`integer-gmp` (which, however, is a dependency of `base`). We will link the -library as - - gcc ... -lHSbase-4.7.0.2-ghc7.8.4 -lHSinteger-gmp-0.5.1.0-ghc7.8.4 ... - -However, on systems (like Ubuntu) where the linker gets called with `-as-needed` -by default, the linker will notice that `integer-gmp` isn't actually a direct -dependency and hence omit the link. - -Then when we attempt to link a C program against this dynamic library, the -_static_ linker will attempt to verify that all symbols can be resolved. The -dynamic library itself does not require any symbols from `integer-gmp`, but -`base` does. In order to verify that the symbols used by `base` can be -resolved, the static linker needs to be able to _find_ integer-gmp. - -Finding the `base` dependency is simple, because the dynamic elf header -(`readelf -d`) for the library that we have created looks something like - - (NEEDED) Shared library: [libHSbase-4.7.0.2-ghc7.8.4.so] - (RPATH) Library rpath: [/path/to/base-4.7.0.2:...] - -However, when it comes to resolving the dependency on `integer-gmp`, it needs -to look at the dynamic header for `base`. On modern ghc (7.8 and higher) this -looks something like - - (NEEDED) Shared library: [libHSinteger-gmp-0.5.1.0-ghc7.8.4.so] - (RPATH) Library rpath: [$ORIGIN/../integer-gmp-0.5.1.0:...] - -This specifies the location of `integer-gmp` _in terms of_ the location of base -(using the `$ORIGIN`) variable. But here's the crux: when the static linker -attempts to verify that all symbols can be resolved, [**IT DOES NOT RESOLVE -`$ORIGIN`**](http://stackoverflow.com/questions/6323603/ld-using-rpath-origin-inside-a-shared-library-recursive). -As a consequence, it will not be able to resolve the symbols and report the -missing symbols as errors, _even though the dynamic linker **would** be able to -resolve these symbols_. We can tell the static linker not to report these -errors by using `--unresolved-symbols=ignore-all` and all will be fine when we -run the program ([(indeed, this is what the gold linker -does)](https://sourceware.org/ml/binutils/2013-05/msg00038.html), but it makes -the resulting library more difficult to use. - -Instead what we can do is make sure that the generated dynamic library has -explicit top-level dependencies on these libraries. This means that the static -linker knows where to find them, and when we have transitive dependencies on -the same libraries the linker will only load them once, so we avoid needing to -look at the `RPATH` of our dependencies. We can do this by passing -`--no-as-needed` to the linker, so that it doesn't omit any libraries. - -Note that on older ghc (7.6 and before) the Haskell libraries don't have an -RPATH set at all, which makes it even more important that we make these -top-level dependencies. - -Finally, we have to explicitly link against `libffi` for the same reason. For -newer ghc this _happens_ to be unnecessary on many systems because `libffi` is -a library which is not specific to GHC, and when the static linker verifies -that all symbols can be resolved it will find the `libffi` that is globally -installed (completely independent from ghc). Of course, this may well be the -_wrong_ version of `libffi`, but it's quite possible that symbol resolution -happens to work. This is of course the wrong approach, which is why we link -explicitly against `libffi` so that we will find the _right_ version of -`libffi`. --} - --- | Do we need the RPATH workaround? --- --- See Note [RPATH]. -ifNeedsRPathWorkaround :: Monoid a => LocalBuildInfo -> a -> a -ifNeedsRPathWorkaround lbi a = - case hostPlatform lbi of - Platform _ Linux -> a - _otherwise -> mempty - -data DynamicRtsInfo = DynamicRtsInfo { - dynRtsVanillaLib :: FilePath - , dynRtsThreadedLib :: FilePath - , dynRtsDebugLib :: FilePath - , dynRtsEventlogLib :: FilePath - , dynRtsThreadedDebugLib :: FilePath - , dynRtsThreadedEventlogLib :: FilePath - } - -data StaticRtsInfo = StaticRtsInfo { - statRtsVanillaLib :: FilePath - , statRtsThreadedLib :: FilePath - , statRtsDebugLib :: FilePath - , statRtsEventlogLib :: FilePath - , statRtsThreadedDebugLib :: FilePath - , statRtsThreadedEventlogLib :: FilePath - , statRtsProfilingLib :: FilePath - , statRtsThreadedProfilingLib :: FilePath - } - -data RtsInfo = RtsInfo { - rtsDynamicInfo :: DynamicRtsInfo - , rtsStaticInfo :: StaticRtsInfo - , rtsLibPaths :: [FilePath] - } - --- | Extract (and compute) information about the RTS library --- --- TODO: This hardcodes the name as @HSrts-ghc@. I don't know if we can --- find this information somewhere. We can lookup the 'hsLibraries' field of --- 'InstalledPackageInfo' but it will tell us @["HSrts", "Cffi"]@, which --- doesn't really help. -extractRtsInfo :: LocalBuildInfo -> RtsInfo -extractRtsInfo lbi = - case PackageIndex.lookupPackageName (installedPkgs lbi) (mkPackageName "rts") of - [(_, [rts])] -> aux rts - _otherwise -> error "No (or multiple) ghc rts package is registered" - where - aux :: InstalledPackageInfo -> RtsInfo - aux rts = RtsInfo { - rtsDynamicInfo = DynamicRtsInfo { - dynRtsVanillaLib = withGhcVersion "HSrts" - , dynRtsThreadedLib = withGhcVersion "HSrts_thr" - , dynRtsDebugLib = withGhcVersion "HSrts_debug" - , dynRtsEventlogLib = withGhcVersion "HSrts_l" - , dynRtsThreadedDebugLib = withGhcVersion "HSrts_thr_debug" - , dynRtsThreadedEventlogLib = withGhcVersion "HSrts_thr_l" - } - , rtsStaticInfo = StaticRtsInfo { - statRtsVanillaLib = "HSrts" - , statRtsThreadedLib = "HSrts_thr" - , statRtsDebugLib = "HSrts_debug" - , statRtsEventlogLib = "HSrts_l" - , statRtsThreadedDebugLib = "HSrts_thr_debug" - , statRtsThreadedEventlogLib = "HSrts_thr_l" - , statRtsProfilingLib = "HSrts_p" - , statRtsThreadedProfilingLib = "HSrts_thr_p" - } - , rtsLibPaths = InstalledPackageInfo.libraryDirs rts - } - withGhcVersion = (++ ("-ghc" ++ display (compilerVersion (compiler lbi)))) - --- | Returns True if the modification date of the given source file is newer than --- the object file we last compiled for it, or if no object file exists yet. -checkNeedsRecompilation :: FilePath -> GhcOptions -> NoCallStackIO Bool -checkNeedsRecompilation filename opts = filename `moreRecentFile` oname - where oname = getObjectFileName filename opts - --- | Finds the object file name of the given source file -getObjectFileName :: FilePath -> GhcOptions -> FilePath -getObjectFileName filename opts = oname - where odir = fromFlag (ghcOptObjDir opts) - oext = fromFlagOrDefault "o" (ghcOptObjSuffix opts) - oname = odir replaceExtension filename oext - --- | Calculate the RPATHs for the component we are building. --- --- Calculates relative RPATHs when 'relocatable' is set. -getRPaths :: LocalBuildInfo - -> ComponentLocalBuildInfo -- ^ Component we are building - -> NoCallStackIO (NubListR FilePath) -getRPaths lbi clbi | supportRPaths hostOS = do - libraryPaths <- depLibraryPaths False (relocatable lbi) lbi clbi - let hostPref = case hostOS of - OSX -> "@loader_path" - _ -> "$ORIGIN" - relPath p = if isRelative p then hostPref p else p - rpaths = toNubListR (map relPath libraryPaths) - return rpaths - where - (Platform _ hostOS) = hostPlatform lbi - compid = compilerId . compiler $ lbi - - -- The list of RPath-supported operating systems below reflects the - -- platforms on which Cabal's RPATH handling is tested. It does _NOT_ - -- reflect whether the OS supports RPATH. - - -- E.g. when this comment was written, the *BSD operating systems were - -- untested with regards to Cabal RPATH handling, and were hence set to - -- 'False', while those operating systems themselves do support RPATH. - supportRPaths Linux   = True - supportRPaths Windows = False - supportRPaths OSX   = True - supportRPaths FreeBSD   = - case compid of - CompilerId GHC ver | ver >= mkVersion [7,10,2] -> True - _ -> False - supportRPaths OpenBSD   = False - supportRPaths NetBSD   = False - supportRPaths DragonFly = False - supportRPaths Solaris = False - supportRPaths AIX = False - supportRPaths HPUX = False - supportRPaths IRIX = False - supportRPaths HaLVM = False - supportRPaths IOS = False - supportRPaths Android = False - supportRPaths Ghcjs = False - supportRPaths Hurd = False - supportRPaths (OtherOS _) = False - -- Do _not_ add a default case so that we get a warning here when a new OS - -- is added. - -getRPaths _ _ = return mempty - --- | Remove the "-threaded" flag when building a foreign library, as it has no --- effect when used with "-shared". Returns the updated 'BuildInfo', along --- with whether or not the flag was present, so we can use it to link against --- the appropriate RTS on our own. -popThreadedFlag :: BuildInfo -> (BuildInfo, Bool) -popThreadedFlag bi = - ( bi { options = filterHcOptions (/= "-threaded") (options bi) } - , hasThreaded (options bi)) - - where - filterHcOptions :: (String -> Bool) - -> [(CompilerFlavor, [String])] - -> [(CompilerFlavor, [String])] - filterHcOptions p hcoptss = - [ (hc, if hc == GHC then filter p opts else opts) - | (hc, opts) <- hcoptss ] - - hasThreaded :: [(CompilerFlavor, [String])] -> Bool - hasThreaded hcoptss = - or [ if hc == GHC then elem "-threaded" opts else False - | (hc, opts) <- hcoptss ] - --- | Extracts a String representing a hash of the ABI of a built --- library. It can fail if the library has not yet been built. --- -libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO String -libAbiHash verbosity _pkg_descr lbi lib clbi = do - let - libBi = libBuildInfo lib - comp = compiler lbi - platform = hostPlatform lbi - vanillaArgs0 = - (componentGhcOptions verbosity lbi libBi clbi (componentBuildDir lbi clbi)) - `mappend` mempty { - ghcOptMode = toFlag GhcModeAbiHash, - ghcOptInputModules = toNubListR $ exposedModules lib - } - vanillaArgs = - -- Package DBs unnecessary, and break ghc-cabal. See #3633 - -- BUT, put at least the global database so that 7.4 doesn't - -- break. - vanillaArgs0 { ghcOptPackageDBs = [GlobalPackageDB] - , ghcOptPackages = mempty } - sharedArgs = vanillaArgs `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = hcSharedOptions GHC libBi - } - profArgs = vanillaArgs `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptProfilingAuto = Internal.profDetailLevelFlag True - (withProfLibDetail lbi), - ghcOptHiSuffix = toFlag "p_hi", - ghcOptObjSuffix = toFlag "p_o", - ghcOptExtra = hcProfOptions GHC libBi - } - ghcArgs - | withVanillaLib lbi = vanillaArgs - | withSharedLib lbi = sharedArgs - | withProfLib lbi = profArgs - | otherwise = error "libAbiHash: Can't find an enabled library way" - - (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) - hash <- getProgramInvocationOutput verbosity - (ghcInvocation ghcProg comp platform ghcArgs) - return (takeWhile (not . isSpace) hash) - -componentGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions verbosity lbi = - Internal.componentGhcOptions verbosity implInfo lbi - where - comp = compiler lbi - implInfo = getImplInfo comp - -componentCcGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> FilePath - -> GhcOptions -componentCcGhcOptions verbosity lbi = - Internal.componentCcGhcOptions verbosity implInfo lbi - where - comp = compiler lbi - implInfo = getImplInfo comp - --- ----------------------------------------------------------------------------- --- Installing - --- |Install executables for GHC. -installExe :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^Where to copy the files to - -> FilePath -- ^Build location - -> (FilePath, FilePath) -- ^Executable (prefix,suffix) - -> PackageDescription - -> Executable - -> IO () -installExe verbosity lbi binDir buildPref - (progprefix, progsuffix) _pkg exe = do - createDirectoryIfMissingVerbose verbosity True binDir - let exeName' = unUnqualComponentName $ exeName exe - exeFileName = exeTargetName (hostPlatform lbi) exe - fixedExeBaseName = progprefix ++ exeName' ++ progsuffix - installBinary dest = do - installExecutableFile verbosity - (buildPref exeName' exeFileName) - (dest <.> exeExtension (hostPlatform lbi)) - when (stripExes lbi) $ - Strip.stripExe verbosity (hostPlatform lbi) (withPrograms lbi) - (dest <.> exeExtension (hostPlatform lbi)) - installBinary (binDir fixedExeBaseName) - --- |Install foreign library for GHC. -installFLib :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^install location - -> FilePath -- ^Build location - -> PackageDescription - -> ForeignLib - -> IO () -installFLib verbosity lbi targetDir builtDir _pkg flib = - install (foreignLibIsShared flib) - builtDir - targetDir - (flibTargetName lbi flib) - where - install isShared srcDir dstDir name = do - let src = srcDir name - dst = dstDir name - createDirectoryIfMissingVerbose verbosity True targetDir - -- TODO: Should we strip? (stripLibs lbi) - if isShared - then installExecutableFile verbosity src dst - else installOrdinaryFile verbosity src dst - -- Now install appropriate symlinks if library is versioned - let (Platform _ os) = hostPlatform lbi - when (not (null (foreignLibVersion flib os))) $ do - when (os /= Linux) $ die' verbosity - -- It should be impossible to get here. - "Can't install foreign-library symlink on non-Linux OS" -#ifndef mingw32_HOST_OS - -- 'createSymbolicLink file1 file2' creates a symbolic link - -- named 'file2' which points to the file 'file1'. - -- Note that we do want a symlink to 'name' rather than - -- 'dst', because the symlink will be relative to the - -- directory it's created in. - -- Finally, we first create the symlinks in a temporary - -- directory and then rename to simulate 'ln --force'. - withTempDirectory verbosity dstDir nm $ \tmpDir -> do - let link1 = flibBuildName lbi flib - link2 = "lib" ++ nm <.> "so" - createSymbolicLink name (tmpDir link1) - renameFile (tmpDir link1) (dstDir link1) - createSymbolicLink name (tmpDir link2) - renameFile (tmpDir link2) (dstDir link2) - where - nm :: String - nm = unUnqualComponentName $ foreignLibName flib -#endif /* mingw32_HOST_OS */ - - --- |Install for ghc, .hi, .a and, if --with-ghci given, .o -installLib :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^install location - -> FilePath -- ^install location for dynamic libraries - -> FilePath -- ^Build location - -> PackageDescription - -> Library - -> ComponentLocalBuildInfo - -> IO () -installLib verbosity lbi targetDir dynlibTargetDir _builtDir _pkg lib clbi = do - -- copy .hi files over: - whenVanilla $ copyModuleFiles "hi" - whenProf $ copyModuleFiles "p_hi" - whenShared $ copyModuleFiles "dyn_hi" - - -- copy the built library files over: - whenHasCode $ do - whenVanilla $ do - sequence_ [ installOrdinary builtDir targetDir (mkGenericStaticLibName (l ++ f)) - | l <- getHSLibraryName (componentUnitId clbi):(extraBundledLibs (libBuildInfo lib)) - , f <- "":extraLibFlavours (libBuildInfo lib) - ] - whenProf $ installOrdinary builtDir targetDir profileLibName - whenGHCi $ installOrdinary builtDir targetDir ghciLibName - whenShared $ installShared builtDir dynlibTargetDir sharedLibName - - where - builtDir = componentBuildDir lbi clbi - - install isShared srcDir dstDir name = do - let src = srcDir name - dst = dstDir name - - createDirectoryIfMissingVerbose verbosity True dstDir - - if isShared - then installExecutableFile verbosity src dst - else installOrdinaryFile verbosity src dst - - when (stripLibs lbi) $ Strip.stripLib verbosity - (hostPlatform lbi) (withPrograms lbi) dst - - installOrdinary = install False - installShared = install True - - copyModuleFiles ext = - findModuleFiles [builtDir] [ext] (allLibModules lib clbi) - >>= installOrdinaryFiles verbosity targetDir - - compiler_id = compilerId (compiler lbi) - uid = componentUnitId clbi - profileLibName = mkProfLibName uid - ghciLibName = Internal.mkGHCiLibName uid - sharedLibName = (mkSharedLibName (hostPlatform lbi) compiler_id) uid - - hasLib = not $ null (allLibModules lib clbi) - && null (cSources (libBuildInfo lib)) - && null (cxxSources (libBuildInfo lib)) - has_code = not (componentIsIndefinite clbi) - whenHasCode = when has_code - whenVanilla = when (hasLib && withVanillaLib lbi) - whenProf = when (hasLib && withProfLib lbi && has_code) - whenGHCi = when (hasLib && withGHCiLib lbi && has_code) - whenShared = when (hasLib && withSharedLib lbi && has_code) - --- ----------------------------------------------------------------------------- --- Registering - -hcPkgInfo :: ProgramDb -> HcPkg.HcPkgInfo -hcPkgInfo progdb = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcPkgProg - , HcPkg.noPkgDbStack = v < [6,9] - , HcPkg.noVerboseFlag = v < [6,11] - , HcPkg.flagPackageConf = v < [7,5] - , HcPkg.supportsDirDbs = v >= [6,8] - , HcPkg.requiresDirDbs = v >= [7,10] - , HcPkg.nativeMultiInstance = v >= [7,10] - , HcPkg.recacheMultiInstance = v >= [6,12] - , HcPkg.suppressFilesCheck = v >= [6,6] - } - where - v = versionNumbers ver - Just ghcPkgProg = lookupProgram ghcPkgProgram progdb - Just ver = programVersion ghcPkgProg - -registerPackage - :: Verbosity - -> ProgramDb - -> PackageDBStack - -> InstalledPackageInfo - -> HcPkg.RegisterOptions - -> IO () -registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions = - HcPkg.register (hcPkgInfo progdb) verbosity packageDbs - installedPkgInfo registerOptions - -pkgRoot :: Verbosity -> LocalBuildInfo -> PackageDB -> IO FilePath -pkgRoot verbosity lbi = pkgRoot' - where - pkgRoot' GlobalPackageDB = - let Just ghcProg = lookupProgram ghcProgram (withPrograms lbi) - in fmap takeDirectory (getGlobalPackageDB verbosity ghcProg) - pkgRoot' UserPackageDB = do - appDir <- getAppUserDataDirectory "ghc" - let ver = compilerVersion (compiler lbi) - subdir = System.Info.arch ++ '-':System.Info.os - ++ '-':display ver - rootDir = appDir subdir - -- We must create the root directory for the user package database if it - -- does not yet exists. Otherwise '${pkgroot}' will resolve to a - -- directory at the time of 'ghc-pkg register', and registration will - -- fail. - createDirectoryIfMissing True rootDir - return rootDir - pkgRoot' (SpecificPackageDB fp) = return (takeDirectory fp) - --- ----------------------------------------------------------------------------- --- Utils - -isDynamic :: Compiler -> Bool -isDynamic = Internal.ghcLookupProperty "GHC Dynamic" - -supportsDynamicToo :: Compiler -> Bool -supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" - -withExt :: FilePath -> String -> FilePath -withExt fp ext = fp <.> if takeExtension fp /= ('.':ext) then ext else "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHCJS.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHCJS.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/GHCJS.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/GHCJS.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,890 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - -module Distribution.Simple.GHCJS ( - configure, getInstalledPackages, getPackageDBContents, - buildLib, buildExe, - replLib, replExe, - startInterpreter, - installLib, installExe, - libAbiHash, - hcPkgInfo, - registerPackage, - componentGhcOptions, - getLibDir, - isDynamic, - getGlobalPackageDB, - runCmd - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.UnqualComponentName -import Distribution.Simple.GHC.ImplInfo -import qualified Distribution.Simple.GHC.Internal as Internal -import Distribution.PackageDescription as PD -import Distribution.InstalledPackageInfo -import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.LocalBuildInfo -import qualified Distribution.Simple.Hpc as Hpc -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Simple.Program -import qualified Distribution.Simple.Program.HcPkg as HcPkg -import qualified Distribution.Simple.Program.Ar as Ar -import qualified Distribution.Simple.Program.Ld as Ld -import qualified Distribution.Simple.Program.Strip as Strip -import Distribution.Simple.Program.GHC -import Distribution.Simple.Setup hiding ( Flag ) -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Compiler hiding ( Flag ) -import Distribution.Version -import Distribution.System -import Distribution.Verbosity -import Distribution.Utils.NubList -import Distribution.Text -import Distribution.Types.UnitId - -import qualified Data.Map as Map -import System.Directory ( doesFileExist ) -import System.FilePath ( (), (<.>), takeExtension - , takeDirectory, replaceExtension ) - -configure :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramDb - -> IO (Compiler, Maybe Platform, ProgramDb) -configure verbosity hcPath hcPkgPath progdb0 = do - (ghcjsProg, ghcjsVersion, progdb1) <- - requireProgramVersion verbosity ghcjsProgram - (orLaterVersion (mkVersion [0,1])) - (userMaybeSpecifyPath "ghcjs" hcPath progdb0) - Just ghcjsGhcVersion <- findGhcjsGhcVersion verbosity (programPath ghcjsProg) - let implInfo = ghcjsVersionImplInfo ghcjsVersion ghcjsGhcVersion - - -- This is slightly tricky, we have to configure ghcjs first, then we use the - -- location of ghcjs to help find ghcjs-pkg in the case that the user did not - -- specify the location of ghc-pkg directly: - (ghcjsPkgProg, ghcjsPkgVersion, progdb2) <- - requireProgramVersion verbosity ghcjsPkgProgram { - programFindLocation = guessGhcjsPkgFromGhcjsPath ghcjsProg - } - anyVersion (userMaybeSpecifyPath "ghcjs-pkg" hcPkgPath progdb1) - - Just ghcjsPkgGhcjsVersion <- findGhcjsPkgGhcjsVersion - verbosity (programPath ghcjsPkgProg) - - when (ghcjsVersion /= ghcjsPkgGhcjsVersion) $ die' verbosity $ - "Version mismatch between ghcjs and ghcjs-pkg: " - ++ programPath ghcjsProg ++ " is version " ++ display ghcjsVersion ++ " " - ++ programPath ghcjsPkgProg ++ " is version " ++ display ghcjsPkgGhcjsVersion - - when (ghcjsGhcVersion /= ghcjsPkgVersion) $ die' verbosity $ - "Version mismatch between ghcjs and ghcjs-pkg: " - ++ programPath ghcjsProg - ++ " was built with GHC version " ++ display ghcjsGhcVersion ++ " " - ++ programPath ghcjsPkgProg - ++ " was built with GHC version " ++ display ghcjsPkgVersion - - -- be sure to use our versions of hsc2hs, c2hs, haddock and ghc - let hsc2hsProgram' = - hsc2hsProgram { programFindLocation = - guessHsc2hsFromGhcjsPath ghcjsProg } - c2hsProgram' = - c2hsProgram { programFindLocation = - guessC2hsFromGhcjsPath ghcjsProg } - - haddockProgram' = - haddockProgram { programFindLocation = - guessHaddockFromGhcjsPath ghcjsProg } - progdb3 = addKnownPrograms [ hsc2hsProgram', c2hsProgram', haddockProgram' ] progdb2 - - languages <- Internal.getLanguages verbosity implInfo ghcjsProg - extensions <- Internal.getExtensions verbosity implInfo ghcjsProg - - ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcjsProg - let ghcInfoMap = Map.fromList ghcInfo - - let comp = Compiler { - compilerId = CompilerId GHCJS ghcjsVersion, - compilerAbiTag = AbiTag $ - "ghc" ++ intercalate "_" (map show . versionNumbers $ ghcjsGhcVersion), - compilerCompat = [CompilerId GHC ghcjsGhcVersion], - compilerLanguages = languages, - compilerExtensions = extensions, - compilerProperties = ghcInfoMap - } - compPlatform = Internal.targetPlatform ghcInfo - -- configure gcc and ld - let progdb4 = if ghcjsNativeToo comp - then Internal.configureToolchain implInfo - ghcjsProg ghcInfoMap progdb3 - else progdb3 - return (comp, compPlatform, progdb4) - -ghcjsNativeToo :: Compiler -> Bool -ghcjsNativeToo = Internal.ghcLookupProperty "Native Too" - -guessGhcjsPkgFromGhcjsPath :: ConfiguredProgram -> Verbosity - -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) -guessGhcjsPkgFromGhcjsPath = guessToolFromGhcjsPath ghcjsPkgProgram - -guessHsc2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity - -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) -guessHsc2hsFromGhcjsPath = guessToolFromGhcjsPath hsc2hsProgram - -guessC2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity - -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) -guessC2hsFromGhcjsPath = guessToolFromGhcjsPath c2hsProgram - -guessHaddockFromGhcjsPath :: ConfiguredProgram -> Verbosity - -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) -guessHaddockFromGhcjsPath = guessToolFromGhcjsPath haddockProgram - -guessToolFromGhcjsPath :: Program -> ConfiguredProgram - -> Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])) -guessToolFromGhcjsPath tool ghcjsProg verbosity searchpath - = do let toolname = programName tool - path = programPath ghcjsProg - dir = takeDirectory path - versionSuffix = takeVersionSuffix (dropExeExtension path) - guessNormal = dir toolname <.> exeExtension buildPlatform - guessGhcjsVersioned = dir (toolname ++ "-ghcjs" ++ versionSuffix) - <.> exeExtension buildPlatform - guessGhcjs = dir (toolname ++ "-ghcjs") - <.> exeExtension buildPlatform - guessVersioned = dir (toolname ++ versionSuffix) <.> exeExtension buildPlatform - guesses | null versionSuffix = [guessGhcjs, guessNormal] - | otherwise = [guessGhcjsVersioned, - guessGhcjs, - guessVersioned, - guessNormal] - info verbosity $ "looking for tool " ++ toolname - ++ " near compiler in " ++ dir - exists <- traverse doesFileExist guesses - case [ file | (file, True) <- zip guesses exists ] of - -- If we can't find it near ghc, fall back to the usual - -- method. - [] -> programFindLocation tool verbosity searchpath - (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp - let lookedAt = map fst - . takeWhile (\(_file, exist) -> not exist) - $ zip guesses exists - return (Just (fp, lookedAt)) - - where takeVersionSuffix :: FilePath -> String - takeVersionSuffix = reverse . takeWhile (`elem ` "0123456789.-") . - reverse - --- | Given a single package DB, return all installed packages. -getPackageDBContents :: Verbosity -> PackageDB -> ProgramDb - -> IO InstalledPackageIndex -getPackageDBContents verbosity packagedb progdb = do - pkgss <- getInstalledPackages' verbosity [packagedb] progdb - toPackageIndex verbosity pkgss progdb - --- | Given a package DB stack, return all installed packages. -getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramDb - -> IO InstalledPackageIndex -getInstalledPackages verbosity packagedbs progdb = do - checkPackageDbEnvVar verbosity - checkPackageDbStack verbosity packagedbs - pkgss <- getInstalledPackages' verbosity packagedbs progdb - index <- toPackageIndex verbosity pkgss progdb - return $! index - -toPackageIndex :: Verbosity - -> [(PackageDB, [InstalledPackageInfo])] - -> ProgramDb - -> IO InstalledPackageIndex -toPackageIndex verbosity pkgss progdb = do - -- On Windows, various fields have $topdir/foo rather than full - -- paths. We need to substitute the right value in so that when - -- we, for example, call gcc, we have proper paths to give it. - topDir <- getLibDir' verbosity ghcjsProg - let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) - | (_, pkgs) <- pkgss ] - return $! (mconcat indices) - - where - Just ghcjsProg = lookupProgram ghcjsProgram progdb - -checkPackageDbEnvVar :: Verbosity -> IO () -checkPackageDbEnvVar verbosity = - Internal.checkPackageDbEnvVar verbosity "GHCJS" "GHCJS_PACKAGE_PATH" - -checkPackageDbStack :: Verbosity -> PackageDBStack -> IO () -checkPackageDbStack _ (GlobalPackageDB:rest) - | GlobalPackageDB `notElem` rest = return () -checkPackageDbStack verbosity rest - | GlobalPackageDB `notElem` rest = - die' verbosity $ "With current ghc versions the global package db is always used " - ++ "and must be listed first. This ghc limitation may be lifted in " - ++ "future, see http://hackage.haskell.org/trac/ghc/ticket/5977" -checkPackageDbStack verbosity _ = - die' verbosity $ "If the global package db is specified, it must be " - ++ "specified first and cannot be specified multiple times" - -getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramDb - -> IO [(PackageDB, [InstalledPackageInfo])] -getInstalledPackages' verbosity packagedbs progdb = - sequenceA - [ do pkgs <- HcPkg.dump (hcPkgInfo progdb) verbosity packagedb - return (packagedb, pkgs) - | packagedb <- packagedbs ] - -getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath -getLibDir verbosity lbi = - (reverse . dropWhile isSpace . reverse) `fmap` - getDbProgramOutput verbosity ghcjsProgram - (withPrograms lbi) ["--print-libdir"] - -getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath -getLibDir' verbosity ghcjsProg = - (reverse . dropWhile isSpace . reverse) `fmap` - getProgramOutput verbosity ghcjsProg ["--print-libdir"] - --- | Return the 'FilePath' to the global GHC package database. -getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath -getGlobalPackageDB verbosity ghcjsProg = - (reverse . dropWhile isSpace . reverse) `fmap` - getProgramOutput verbosity ghcjsProg ["--print-global-package-db"] - -toJSLibName :: String -> String -toJSLibName lib - | takeExtension lib `elem` [".dll",".dylib",".so"] - = replaceExtension lib "js_so" - | takeExtension lib == ".a" = replaceExtension lib "js_a" - | otherwise = lib <.> "js_a" - -buildLib :: Verbosity -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo - -> IO () -buildLib = buildOrReplLib Nothing - -replLib :: [String] -> Verbosity - -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Library - -> ComponentLocalBuildInfo -> IO () -replLib = buildOrReplLib . Just - -buildOrReplLib :: Maybe [String] -> Verbosity - -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Library - -> ComponentLocalBuildInfo -> IO () -buildOrReplLib mReplFlags verbosity numJobs pkg_descr lbi lib clbi = do - let uid = componentUnitId clbi - libTargetDir = buildDir lbi - whenVanillaLib forceVanilla = - when (not forRepl && (forceVanilla || withVanillaLib lbi)) - whenProfLib = when (not forRepl && withProfLib lbi) - whenSharedLib forceShared = - when (not forRepl && (forceShared || withSharedLib lbi)) - whenGHCiLib = when (not forRepl && withGHCiLib lbi && withVanillaLib lbi) - forRepl = maybe False (const True) mReplFlags - ifReplLib = when forRepl - replFlags = fromMaybe mempty mReplFlags - comp = compiler lbi - platform = hostPlatform lbi - implInfo = getImplInfo comp - nativeToo = ghcjsNativeToo comp - - (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) - let runGhcjsProg = runGHC verbosity ghcjsProg comp platform - libBi = libBuildInfo lib - isGhcjsDynamic = isDynamic comp - dynamicTooSupported = supportsDynamicToo comp - doingTH = usesTemplateHaskellOrQQ libBi - forceVanillaLib = doingTH && not isGhcjsDynamic - forceSharedLib = doingTH && isGhcjsDynamic - -- TH always needs default libs, even when building for profiling - - -- Determine if program coverage should be enabled and if so, what - -- '-hpcdir' should be. - let isCoverageEnabled = libCoverage lbi - pkg_name = display $ PD.package pkg_descr - distPref = fromFlag $ configDistPref $ configFlags lbi - hpcdir way - | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way pkg_name - | otherwise = mempty - - createDirectoryIfMissingVerbose verbosity True libTargetDir - -- TODO: do we need to put hs-boot files into place for mutually recursive - -- modules? - let cObjs = map (`replaceExtension` objExtension) (cSources libBi) - jsSrcs = jsSources libBi - baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir - linkJsLibOpts = mempty { - ghcOptExtra = - [ "-link-js-lib" , getHSLibraryName uid - , "-js-lib-outputdir", libTargetDir ] ++ - jsSrcs - } - vanillaOptsNoJsLib = baseOpts `mappend` mempty { - ghcOptMode = toFlag GhcModeMake, - ghcOptNumJobs = numJobs, - ghcOptInputModules = toNubListR $ allLibModules lib clbi, - ghcOptHPCDir = hpcdir Hpc.Vanilla - } - vanillaOpts = vanillaOptsNoJsLib `mappend` linkJsLibOpts - - profOpts = adjustExts "p_hi" "p_o" vanillaOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptExtra = ghcjsProfOptions libBi, - ghcOptHPCDir = hpcdir Hpc.Prof - } - sharedOpts = adjustExts "dyn_hi" "dyn_o" vanillaOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptExtra = ghcjsSharedOptions libBi, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - linkerOpts = mempty { - ghcOptLinkOptions = PD.ldOptions libBi, - ghcOptLinkLibs = extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, - ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, - ghcOptInputFiles = - toNubListR $ [libTargetDir x | x <- cObjs] ++ jsSrcs - } - replOpts = vanillaOptsNoJsLib { - ghcOptExtra = Internal.filterGhciFlags - (ghcOptExtra vanillaOpts) - <> replFlags, - ghcOptNumJobs = mempty - } - `mappend` linkerOpts - `mappend` mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptOptimisation = toFlag GhcNoOptimisation - } - - vanillaSharedOpts = vanillaOpts `mappend` - mempty { - ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, - ghcOptDynHiSuffix = toFlag "dyn_hi", - ghcOptDynObjSuffix = toFlag "dyn_o", - ghcOptHPCDir = hpcdir Hpc.Dyn - } - - unless (forRepl || (null (allLibModules lib clbi) && null jsSrcs && null cObjs)) $ - do let vanilla = whenVanillaLib forceVanillaLib (runGhcjsProg vanillaOpts) - shared = whenSharedLib forceSharedLib (runGhcjsProg sharedOpts) - useDynToo = dynamicTooSupported && - (forceVanillaLib || withVanillaLib lbi) && - (forceSharedLib || withSharedLib lbi) && - null (ghcjsSharedOptions libBi) - if useDynToo - then do - runGhcjsProg vanillaSharedOpts - case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of - (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> do - -- When the vanilla and shared library builds are done - -- in one pass, only one set of HPC module interfaces - -- are generated. This set should suffice for both - -- static and dynamically linked executables. We copy - -- the modules interfaces so they are available under - -- both ways. - copyDirectoryRecursive verbosity dynDir vanillaDir - _ -> return () - else if isGhcjsDynamic - then do shared; vanilla - else do vanilla; shared - whenProfLib (runGhcjsProg profOpts) - - -- build any C sources - unless (null (cSources libBi) || not nativeToo) $ do - info verbosity "Building C Sources..." - sequence_ - [ do let vanillaCcOpts = - (Internal.componentCcGhcOptions verbosity implInfo - lbi libBi clbi libTargetDir filename) - profCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptObjSuffix = toFlag "p_o" - } - sharedCcOpts = vanillaCcOpts `mappend` mempty { - ghcOptFPic = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptObjSuffix = toFlag "dyn_o" - } - odir = fromFlag (ghcOptObjDir vanillaCcOpts) - createDirectoryIfMissingVerbose verbosity True odir - runGhcjsProg vanillaCcOpts - whenSharedLib forceSharedLib (runGhcjsProg sharedCcOpts) - whenProfLib (runGhcjsProg profCcOpts) - | filename <- cSources libBi] - - -- TODO: problem here is we need the .c files built first, so we can load them - -- with ghci, but .c files can depend on .h files generated by ghc by ffi - -- exports. - unless (null (allLibModules lib clbi)) $ - ifReplLib (runGhcjsProg replOpts) - - -- link: - when (nativeToo && not forRepl) $ do - info verbosity "Linking..." - let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) - (cSources libBi) - cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) - (cSources libBi) - compiler_id = compilerId (compiler lbi) - vanillaLibFilePath = libTargetDir mkLibName uid - profileLibFilePath = libTargetDir mkProfLibName uid - sharedLibFilePath = libTargetDir mkSharedLibName (hostPlatform lbi) compiler_id uid - ghciLibFilePath = libTargetDir Internal.mkGHCiLibName uid - - hObjs <- Internal.getHaskellObjects implInfo lib lbi clbi - libTargetDir objExtension True - hProfObjs <- - if (withProfLib lbi) - then Internal.getHaskellObjects implInfo lib lbi clbi - libTargetDir ("p_" ++ objExtension) True - else return [] - hSharedObjs <- - if (withSharedLib lbi) - then Internal.getHaskellObjects implInfo lib lbi clbi - libTargetDir ("dyn_" ++ objExtension) False - else return [] - - unless (null hObjs && null cObjs) $ do - - let staticObjectFiles = - hObjs - ++ map (libTargetDir ) cObjs - profObjectFiles = - hProfObjs - ++ map (libTargetDir ) cProfObjs - ghciObjFiles = - hObjs - ++ map (libTargetDir ) cObjs - dynamicObjectFiles = - hSharedObjs - ++ map (libTargetDir ) cSharedObjs - -- After the relocation lib is created we invoke ghc -shared - -- with the dependencies spelled out as -package arguments - -- and ghc invokes the linker with the proper library paths - ghcSharedLinkArgs = - mempty { - ghcOptShared = toFlag True, - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptInputFiles = toNubListR dynamicObjectFiles, - ghcOptOutputFile = toFlag sharedLibFilePath, - ghcOptExtra = ghcjsSharedOptions libBi, - ghcOptNoAutoLinkPackages = toFlag True, - ghcOptPackageDBs = withPackageDB lbi, - ghcOptPackages = toNubListR $ - Internal.mkGhcOptPackages clbi, - ghcOptLinkLibs = extraLibs libBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi - } - - whenVanillaLib False $ do - Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles - - whenProfLib $ do - Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles - - whenGHCiLib $ do - (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) - Ld.combineObjectFiles verbosity lbi ldProg - ghciLibFilePath ghciObjFiles - - whenSharedLib False $ - runGhcjsProg ghcSharedLinkArgs - --- | Start a REPL without loading any source files. -startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform - -> PackageDBStack -> IO () -startInterpreter verbosity progdb comp platform packageDBs = do - let replOpts = mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptPackageDBs = packageDBs - } - checkPackageDbStack verbosity packageDBs - (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram progdb - runGHC verbosity ghcjsProg comp platform replOpts - -buildExe :: Verbosity -> Cabal.Flag (Maybe Int) - -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe = buildOrReplExe Nothing - -replExe :: [String] -> Verbosity - -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Executable - -> ComponentLocalBuildInfo -> IO () -replExe = buildOrReplExe . Just - -buildOrReplExe :: Maybe [String] -> Verbosity - -> Cabal.Flag (Maybe Int) -> PackageDescription - -> LocalBuildInfo -> Executable - -> ComponentLocalBuildInfo -> IO () -buildOrReplExe mReplFlags verbosity numJobs _pkg_descr lbi - exe@Executable { exeName = exeName', modulePath = modPath } clbi = do - - (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) - let forRepl = maybe False (const True) mReplFlags - replFlags = fromMaybe mempty mReplFlags - comp = compiler lbi - platform = hostPlatform lbi - implInfo = getImplInfo comp - runGhcjsProg = runGHC verbosity ghcjsProg comp platform - exeBi = buildInfo exe - - let exeName'' = unUnqualComponentName exeName' - -- exeNameReal, the name that GHC really uses (with .exe on Windows) - let exeNameReal = exeName'' <.> - (if takeExtension exeName'' /= ('.':exeExtension buildPlatform) - then exeExtension buildPlatform - else "") - - let targetDir = (buildDir lbi) exeName'' - let exeDir = targetDir (exeName'' ++ "-tmp") - createDirectoryIfMissingVerbose verbosity True targetDir - createDirectoryIfMissingVerbose verbosity True exeDir - -- TODO: do we need to put hs-boot files into place for mutually recursive - -- modules? FIX: what about exeName.hi-boot? - - -- Determine if program coverage should be enabled and if so, what - -- '-hpcdir' should be. - let isCoverageEnabled = exeCoverage lbi - distPref = fromFlag $ configDistPref $ configFlags lbi - hpcdir way - | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName'' - | otherwise = mempty - - -- build executables - - srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath - let isGhcjsDynamic = isDynamic comp - dynamicTooSupported = supportsDynamicToo comp - buildRunner = case clbi of - ExeComponentLocalBuildInfo {} -> False - _ -> True - isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"] - jsSrcs = jsSources exeBi - cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain] - cObjs = map (`replaceExtension` objExtension) cSrcs - nativeToo = ghcjsNativeToo comp - baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir) - `mappend` mempty { - ghcOptMode = toFlag GhcModeMake, - ghcOptInputFiles = toNubListR $ - [ srcMainFile | isHaskellMain], - ghcOptInputModules = toNubListR $ - [ m | not isHaskellMain, m <- exeModules exe], - ghcOptExtra = - if buildRunner then ["-build-runner"] - else mempty - } - staticOpts = baseOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticOnly, - ghcOptHPCDir = hpcdir Hpc.Vanilla - } - profOpts = adjustExts "p_hi" "p_o" baseOpts `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptExtra = ghcjsProfOptions exeBi, - ghcOptHPCDir = hpcdir Hpc.Prof - } - dynOpts = adjustExts "dyn_hi" "dyn_o" baseOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptExtra = ghcjsSharedOptions exeBi, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - dynTooOpts = adjustExts "dyn_hi" "dyn_o" staticOpts `mappend` mempty { - ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, - ghcOptHPCDir = hpcdir Hpc.Dyn - } - linkerOpts = mempty { - ghcOptLinkOptions = PD.ldOptions exeBi, - ghcOptLinkLibs = extraLibs exeBi, - ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi, - ghcOptLinkFrameworks = toNubListR $ PD.frameworks exeBi, - ghcOptInputFiles = toNubListR $ - [exeDir x | x <- cObjs] ++ jsSrcs - } - replOpts = baseOpts { - ghcOptExtra = Internal.filterGhciFlags - (ghcOptExtra baseOpts) - <> replFlags - } - -- For a normal compile we do separate invocations of ghc for - -- compiling as for linking. But for repl we have to do just - -- the one invocation, so that one has to include all the - -- linker stuff too, like -l flags and any .o files from C - -- files etc. - `mappend` linkerOpts - `mappend` mempty { - ghcOptMode = toFlag GhcModeInteractive, - ghcOptOptimisation = toFlag GhcNoOptimisation - } - commonOpts | withProfExe lbi = profOpts - | withDynExe lbi = dynOpts - | otherwise = staticOpts - compileOpts | useDynToo = dynTooOpts - | otherwise = commonOpts - withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi) - - -- For building exe's that use TH with -prof or -dynamic we actually have - -- to build twice, once without -prof/-dynamic and then again with - -- -prof/-dynamic. This is because the code that TH needs to run at - -- compile time needs to be the vanilla ABI so it can be loaded up and run - -- by the compiler. - -- With dynamic-by-default GHC the TH object files loaded at compile-time - -- need to be .dyn_o instead of .o. - doingTH = usesTemplateHaskellOrQQ exeBi - -- Should we use -dynamic-too instead of compiling twice? - useDynToo = dynamicTooSupported && isGhcjsDynamic - && doingTH && withStaticExe && null (ghcjsSharedOptions exeBi) - compileTHOpts | isGhcjsDynamic = dynOpts - | otherwise = staticOpts - compileForTH - | forRepl = False - | useDynToo = False - | isGhcjsDynamic = doingTH && (withProfExe lbi || withStaticExe) - | otherwise = doingTH && (withProfExe lbi || withDynExe lbi) - - linkOpts = commonOpts `mappend` - linkerOpts `mappend` mempty { - ghcOptLinkNoHsMain = toFlag (not isHaskellMain) - } - - -- Build static/dynamic object files for TH, if needed. - when compileForTH $ - runGhcjsProg compileTHOpts { ghcOptNoLink = toFlag True - , ghcOptNumJobs = numJobs } - - unless forRepl $ - runGhcjsProg compileOpts { ghcOptNoLink = toFlag True - , ghcOptNumJobs = numJobs } - - -- build any C sources - unless (null cSrcs || not nativeToo) $ do - info verbosity "Building C Sources..." - sequence_ - [ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi - clbi exeDir filename) `mappend` mempty { - ghcOptDynLinkMode = toFlag (if withDynExe lbi - then GhcDynamicOnly - else GhcStaticOnly), - ghcOptProfilingMode = toFlag (withProfExe lbi) - } - odir = fromFlag (ghcOptObjDir opts) - createDirectoryIfMissingVerbose verbosity True odir - runGhcjsProg opts - | filename <- cSrcs ] - - -- TODO: problem here is we need the .c files built first, so we can load them - -- with ghci, but .c files can depend on .h files generated by ghc by ffi - -- exports. - when forRepl $ runGhcjsProg replOpts - - -- link: - unless forRepl $ do - info verbosity "Linking..." - runGhcjsProg linkOpts { ghcOptOutputFile = toFlag (targetDir exeNameReal) } - --- |Install for ghc, .hi, .a and, if --with-ghci given, .o -installLib :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^install location - -> FilePath -- ^install location for dynamic libraries - -> FilePath -- ^Build location - -> PackageDescription - -> Library - -> ComponentLocalBuildInfo - -> IO () -installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do - whenVanilla $ copyModuleFiles "js_hi" - whenProf $ copyModuleFiles "js_p_hi" - whenShared $ copyModuleFiles "js_dyn_hi" - - whenVanilla $ installOrdinary builtDir targetDir $ toJSLibName vanillaLibName - whenProf $ installOrdinary builtDir targetDir $ toJSLibName profileLibName - whenShared $ installShared builtDir dynlibTargetDir $ toJSLibName sharedLibName - - when (ghcjsNativeToo $ compiler lbi) $ do - -- copy .hi files over: - whenVanilla $ copyModuleFiles "hi" - whenProf $ copyModuleFiles "p_hi" - whenShared $ copyModuleFiles "dyn_hi" - - -- copy the built library files over: - whenVanilla $ installOrdinaryNative builtDir targetDir vanillaLibName - whenProf $ installOrdinaryNative builtDir targetDir profileLibName - whenGHCi $ installOrdinaryNative builtDir targetDir ghciLibName - whenShared $ installSharedNative builtDir dynlibTargetDir sharedLibName - - where - install isShared isJS srcDir dstDir name = do - let src = srcDir name - dst = dstDir name - createDirectoryIfMissingVerbose verbosity True dstDir - - if isShared - then installExecutableFile verbosity src dst - else installOrdinaryFile verbosity src dst - - when (stripLibs lbi && not isJS) $ - Strip.stripLib verbosity - (hostPlatform lbi) (withPrograms lbi) dst - - installOrdinary = install False True - installShared = install True True - - installOrdinaryNative = install False False - installSharedNative = install True False - - copyModuleFiles ext = - findModuleFiles [builtDir] [ext] (allLibModules lib clbi) - >>= installOrdinaryFiles verbosity targetDir - - compiler_id = compilerId (compiler lbi) - uid = componentUnitId clbi - vanillaLibName = mkLibName uid - profileLibName = mkProfLibName uid - ghciLibName = Internal.mkGHCiLibName uid - sharedLibName = (mkSharedLibName (hostPlatform lbi) compiler_id) uid - - hasLib = not $ null (allLibModules lib clbi) - && null (cSources (libBuildInfo lib)) - whenVanilla = when (hasLib && withVanillaLib lbi) - whenProf = when (hasLib && withProfLib lbi) - whenGHCi = when (hasLib && withGHCiLib lbi) - whenShared = when (hasLib && withSharedLib lbi) - -installExe :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^Where to copy the files to - -> FilePath -- ^Build location - -> (FilePath, FilePath) -- ^Executable (prefix,suffix) - -> PackageDescription - -> Executable - -> IO () -installExe verbosity lbi binDir buildPref - (progprefix, progsuffix) _pkg exe = do - createDirectoryIfMissingVerbose verbosity True binDir - let exeName' = unUnqualComponentName $ exeName exe - exeFileName = exeName' - fixedExeBaseName = progprefix ++ exeName' ++ progsuffix - installBinary dest = do - runDbProgram verbosity ghcjsProgram (withPrograms lbi) $ - [ "--install-executable" - , buildPref exeName' exeFileName - , "-o", dest - ] ++ - case (stripExes lbi, lookupProgram stripProgram $ withPrograms lbi) of - (True, Just strip) -> ["-strip-program", programPath strip] - _ -> [] - installBinary (binDir fixedExeBaseName) - -libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO String -libAbiHash verbosity _pkg_descr lbi lib clbi = do - let - libBi = libBuildInfo lib - comp = compiler lbi - platform = hostPlatform lbi - vanillaArgs = - (componentGhcOptions verbosity lbi libBi clbi (buildDir lbi)) - `mappend` mempty { - ghcOptMode = toFlag GhcModeAbiHash, - ghcOptInputModules = toNubListR $ PD.exposedModules lib - } - profArgs = adjustExts "js_p_hi" "js_p_o" vanillaArgs `mappend` mempty { - ghcOptProfilingMode = toFlag True, - ghcOptExtra = ghcjsProfOptions libBi - } - ghcArgs | withVanillaLib lbi = vanillaArgs - | withProfLib lbi = profArgs - | otherwise = error "libAbiHash: Can't find an enabled library way" - -- - (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) - hash <- getProgramInvocationOutput verbosity - (ghcInvocation ghcjsProg comp platform ghcArgs) - return (takeWhile (not . isSpace) hash) - -adjustExts :: String -> String -> GhcOptions -> GhcOptions -adjustExts hiSuf objSuf opts = - opts `mappend` mempty { - ghcOptHiSuffix = toFlag hiSuf, - ghcOptObjSuffix = toFlag objSuf - } - -registerPackage :: Verbosity - -> ProgramDb - -> PackageDBStack - -> InstalledPackageInfo - -> HcPkg.RegisterOptions - -> IO () -registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions = - HcPkg.register (hcPkgInfo progdb) verbosity packageDbs - installedPkgInfo registerOptions - -componentGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions verbosity lbi bi clbi odir = - let opts = Internal.componentGhcOptions verbosity implInfo lbi bi clbi odir - comp = compiler lbi - implInfo = getImplInfo comp - in opts { ghcOptExtra = ghcOptExtra opts `mappend` hcOptions GHCJS bi - } - -ghcjsProfOptions :: BuildInfo -> [String] -ghcjsProfOptions bi = - hcProfOptions GHC bi `mappend` hcProfOptions GHCJS bi - -ghcjsSharedOptions :: BuildInfo -> [String] -ghcjsSharedOptions bi = - hcSharedOptions GHC bi `mappend` hcSharedOptions GHCJS bi - -isDynamic :: Compiler -> Bool -isDynamic = Internal.ghcLookupProperty "GHC Dynamic" - -supportsDynamicToo :: Compiler -> Bool -supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" - -findGhcjsGhcVersion :: Verbosity -> FilePath -> IO (Maybe Version) -findGhcjsGhcVersion verbosity pgm = - findProgramVersion "--numeric-ghc-version" id verbosity pgm - -findGhcjsPkgGhcjsVersion :: Verbosity -> FilePath -> IO (Maybe Version) -findGhcjsPkgGhcjsVersion verbosity pgm = - findProgramVersion "--numeric-ghcjs-version" id verbosity pgm - --- ----------------------------------------------------------------------------- --- Registering - -hcPkgInfo :: ProgramDb -> HcPkg.HcPkgInfo -hcPkgInfo progdb = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcjsPkgProg - , HcPkg.noPkgDbStack = False - , HcPkg.noVerboseFlag = False - , HcPkg.flagPackageConf = False - , HcPkg.supportsDirDbs = True - , HcPkg.requiresDirDbs = ver >= v7_10 - , HcPkg.nativeMultiInstance = ver >= v7_10 - , HcPkg.recacheMultiInstance = True - , HcPkg.suppressFilesCheck = True - } - where - v7_10 = mkVersion [7,10] - Just ghcjsPkgProg = lookupProgram ghcjsPkgProgram progdb - Just ver = programVersion ghcjsPkgProg - --- | Get the JavaScript file name and command and arguments to run a --- program compiled by GHCJS --- the exe should be the base program name without exe extension -runCmd :: ProgramDb -> FilePath - -> (FilePath, FilePath, [String]) -runCmd progdb exe = - ( script - , programPath ghcjsProg - , programDefaultArgs ghcjsProg ++ programOverrideArgs ghcjsProg ++ ["--run"] - ) - where - script = exe <.> "jsexe" "all" <.> "js" - Just ghcjsProg = lookupProgram ghcjsProgram progdb diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Glob.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Glob.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Glob.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Glob.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,295 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Glob --- Copyright : Isaac Jones, Simon Marlow 2003-2004 --- License : BSD3 --- portions Copyright (c) 2007, Galois Inc. --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Simple file globbing. - -module Distribution.Simple.Glob ( - GlobSyntaxError(..), - GlobResult(..), - matchDirFileGlob, - runDirFileGlob, - fileGlobMatches, - parseFileGlob, - explainGlobSyntaxError, - Glob, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Control.Monad (guard) - -import Distribution.Simple.Utils -import Distribution.Verbosity -import Distribution.Version - -import System.Directory (getDirectoryContents, doesDirectoryExist, doesFileExist) -import System.FilePath (joinPath, splitExtensions, splitDirectories, takeFileName, (), (<.>)) - --- Note throughout that we use splitDirectories, not splitPath. On --- Posix, this makes no difference, but, because Windows accepts both --- slash and backslash as its path separators, if we left in the --- separators from the glob we might not end up properly normalised. - -data GlobResult a - = GlobMatch a - -- ^ The glob matched the value supplied. - | GlobWarnMultiDot a - -- ^ The glob did not match the value supplied because the - -- cabal-version is too low and the extensions on the file did - -- not precisely match the glob's extensions, but rather the - -- glob was a proper suffix of the file's extensions; i.e., if - -- not for the low cabal-version, it would have matched. - | GlobMissingDirectory FilePath - -- ^ The glob couldn't match because the directory named doesn't - -- exist. The directory will be as it appears in the glob (i.e., - -- relative to the directory passed to 'matchDirFileGlob', and, - -- for 'data-files', relative to 'data-dir'). - deriving (Show, Eq, Ord, Functor) - --- | Extract the matches from a list of 'GlobResult's. --- --- Note: throws away the 'GlobMissingDirectory' results; chances are --- that you want to check for these and error out if any are present. -globMatches :: [GlobResult a] -> [a] -globMatches input = [ a | GlobMatch a <- input ] - -data GlobSyntaxError - = StarInDirectory - | StarInFileName - | StarInExtension - | NoExtensionOnStar - | EmptyGlob - | LiteralFileNameGlobStar - | VersionDoesNotSupportGlobStar - | VersionDoesNotSupportGlob - deriving (Eq, Show) - -explainGlobSyntaxError :: FilePath -> GlobSyntaxError -> String -explainGlobSyntaxError filepath StarInDirectory = - "invalid file glob '" ++ filepath - ++ "'. A wildcard '**' is only allowed as the final parent" - ++ " directory. Stars must not otherwise appear in the parent" - ++ " directories." -explainGlobSyntaxError filepath StarInExtension = - "invalid file glob '" ++ filepath - ++ "'. Wildcards '*' are only allowed as the" - ++ " file's base name, not in the file extension." -explainGlobSyntaxError filepath StarInFileName = - "invalid file glob '" ++ filepath - ++ "'. Wildcards '*' may only totally replace the" - ++ " file's base name, not only parts of it." -explainGlobSyntaxError filepath NoExtensionOnStar = - "invalid file glob '" ++ filepath - ++ "'. If a wildcard '*' is used it must be with an file extension." -explainGlobSyntaxError filepath LiteralFileNameGlobStar = - "invalid file glob '" ++ filepath - ++ "'. If a wildcard '**' is used as a parent directory, the" - ++ " file's base name must be a wildcard '*'." -explainGlobSyntaxError _ EmptyGlob = - "invalid file glob. A glob cannot be the empty string." -explainGlobSyntaxError filepath VersionDoesNotSupportGlobStar = - "invalid file glob '" ++ filepath - ++ "'. Using the double-star syntax requires 'cabal-version: 2.4'" - ++ " or greater. Alternatively, for compatibility with earlier Cabal" - ++ " versions, list the included directories explicitly." -explainGlobSyntaxError filepath VersionDoesNotSupportGlob = - "invalid file glob '" ++ filepath - ++ "'. Using star wildcards requires 'cabal-version: >= 1.6'. " - ++ "Alternatively if you require compatibility with earlier Cabal " - ++ "versions then list all the files explicitly." - -data IsRecursive = Recursive | NonRecursive - -data MultiDot = MultiDotDisabled | MultiDotEnabled - -data Glob - = GlobStem FilePath Glob - -- ^ A single subdirectory component + remainder. - | GlobFinal GlobFinal - -data GlobFinal - = FinalMatch IsRecursive MultiDot String - -- ^ First argument: Is this a @**/*.ext@ pattern? - -- Second argument: should we match against the exact extensions, or accept a suffix? - -- Third argument: the extensions to accept. - | FinalLit FilePath - -- ^ Literal file name. - -reconstructGlob :: Glob -> FilePath -reconstructGlob (GlobStem dir glob) = - dir reconstructGlob glob -reconstructGlob (GlobFinal final) = case final of - FinalMatch Recursive _ exts -> "**" "*" <.> exts - FinalMatch NonRecursive _ exts -> "*" <.> exts - FinalLit path -> path - --- | Returns 'Nothing' if the glob didn't match at all, or 'Just' the --- result if the glob matched (or would have matched with a higher --- cabal-version). -fileGlobMatches :: Glob -> FilePath -> Maybe (GlobResult FilePath) -fileGlobMatches pat candidate = do - match <- fileGlobMatchesSegments pat (splitDirectories candidate) - return (candidate <$ match) - -fileGlobMatchesSegments :: Glob -> [FilePath] -> Maybe (GlobResult ()) -fileGlobMatchesSegments _ [] = Nothing -fileGlobMatchesSegments pat (seg : segs) = case pat of - GlobStem dir pat' -> do - guard (dir == seg) - fileGlobMatchesSegments pat' segs - GlobFinal final -> case final of - FinalMatch Recursive multidot ext -> do - let (candidateBase, candidateExts) = splitExtensions (last $ seg:segs) - guard (not (null candidateBase)) - checkExt multidot ext candidateExts - FinalMatch NonRecursive multidot ext -> do - let (candidateBase, candidateExts) = splitExtensions seg - guard (null segs && not (null candidateBase)) - checkExt multidot ext candidateExts - FinalLit filename -> do - guard (null segs && filename == seg) - return (GlobMatch ()) - -checkExt - :: MultiDot - -> String -- ^ The pattern's extension - -> String -- ^ The candidate file's extension - -> Maybe (GlobResult ()) -checkExt multidot ext candidate - | ext == candidate = Just (GlobMatch ()) - | ext `isSuffixOf` candidate = case multidot of - MultiDotDisabled -> Just (GlobWarnMultiDot ()) - MultiDotEnabled -> Just (GlobMatch ()) - | otherwise = Nothing - -parseFileGlob :: Version -> FilePath -> Either GlobSyntaxError Glob -parseFileGlob version filepath = case reverse (splitDirectories filepath) of - [] -> - Left EmptyGlob - (filename : "**" : segments) - | allowGlobStar -> do - ext <- case splitExtensions filename of - ("*", ext) | '*' `elem` ext -> Left StarInExtension - | null ext -> Left NoExtensionOnStar - | otherwise -> Right ext - _ -> Left LiteralFileNameGlobStar - foldM addStem (GlobFinal $ FinalMatch Recursive multidot ext) segments - | otherwise -> Left VersionDoesNotSupportGlobStar - (filename : segments) -> do - pat <- case splitExtensions filename of - ("*", ext) | not allowGlob -> Left VersionDoesNotSupportGlob - | '*' `elem` ext -> Left StarInExtension - | null ext -> Left NoExtensionOnStar - | otherwise -> Right (FinalMatch NonRecursive multidot ext) - (_, ext) | '*' `elem` ext -> Left StarInExtension - | '*' `elem` filename -> Left StarInFileName - | otherwise -> Right (FinalLit filename) - foldM addStem (GlobFinal pat) segments - where - allowGlob = version >= mkVersion [1,6] - allowGlobStar = version >= mkVersion [2,4] - addStem pat seg - | '*' `elem` seg = Left StarInDirectory - | otherwise = Right (GlobStem seg pat) - multidot - | version >= mkVersion [2,4] = MultiDotEnabled - | otherwise = MultiDotDisabled - --- | This will 'die'' when the glob matches no files, or if the glob --- refers to a missing directory, or if the glob fails to parse. --- --- The returned values do not include the supplied @dir@ prefix, which --- must itself be a valid directory (hence, it can't be the empty --- string). -matchDirFileGlob :: Verbosity -> Version -> FilePath -> FilePath -> IO [FilePath] -matchDirFileGlob verbosity version dir filepath = case parseFileGlob version filepath of - Left err -> die' verbosity $ explainGlobSyntaxError filepath err - Right glob -> do - results <- runDirFileGlob verbosity dir glob - let missingDirectories = - [ missingDir | GlobMissingDirectory missingDir <- results ] - matches = globMatches results - -- Check for missing directories first, since we'll obviously have - -- no matches in that case. - for_ missingDirectories $ \ missingDir -> - die' verbosity $ - "filepath wildcard '" ++ filepath ++ "' refers to the directory" - ++ " '" ++ missingDir ++ "', which does not exist or is not a directory." - when (null matches) $ die' verbosity $ - "filepath wildcard '" ++ filepath - ++ "' does not match any files." - return matches - --- | Match files against a pre-parsed glob, starting in a directory. --- --- The returned values do not include the supplied @dir@ prefix, which --- must itself be a valid directory (hence, it can't be the empty --- string). -runDirFileGlob :: Verbosity -> FilePath -> Glob -> IO [GlobResult FilePath] -runDirFileGlob verbosity rawDir pat = do - -- The default data-dir is null. Our callers -should- be - -- converting that to '.' themselves, but it's a certainty that - -- some future call-site will forget and trigger a really - -- hard-to-debug failure if we don't check for that here. - when (null rawDir) $ - warn verbosity $ - "Null dir passed to runDirFileGlob; interpreting it " - ++ "as '.'. This is probably an internal error." - let dir = if null rawDir then "." else rawDir - debug verbosity $ "Expanding glob '" ++ reconstructGlob pat ++ "' in directory '" ++ dir ++ "'." - -- This function might be called from the project root with dir as - -- ".". Walking the tree starting there involves going into .git/ - -- and dist-newstyle/, which is a lot of work for no reward, so - -- extract the constant prefix from the pattern and start walking - -- there, and only walk as much as we need to: recursively if **, - -- the whole directory if *, and just the specific file if it's a - -- literal. - let (prefixSegments, final) = splitConstantPrefix pat - joinedPrefix = joinPath prefixSegments - case final of - FinalMatch recursive multidot exts -> do - let prefix = dir joinedPrefix - directoryExists <- doesDirectoryExist prefix - if directoryExists - then do - candidates <- case recursive of - Recursive -> getDirectoryContentsRecursive prefix - NonRecursive -> filterM (doesFileExist . (prefix )) =<< getDirectoryContents prefix - let checkName candidate = do - let (candidateBase, candidateExts) = splitExtensions $ takeFileName candidate - guard (not (null candidateBase)) - match <- checkExt multidot exts candidateExts - return (joinedPrefix candidate <$ match) - return $ mapMaybe checkName candidates - else - return [ GlobMissingDirectory joinedPrefix ] - FinalLit fn -> do - exists <- doesFileExist (dir joinedPrefix fn) - return [ GlobMatch (joinedPrefix fn) | exists ] - -unfoldr' :: (a -> Either r (b, a)) -> a -> ([b], r) -unfoldr' f a = case f a of - Left r -> ([], r) - Right (b, a') -> case unfoldr' f a' of - (bs, r) -> (b : bs, r) - --- | Extract the (possibly null) constant prefix from the pattern. --- This has the property that, if @(pref, final) = splitConstantPrefix pat@, --- then @pat === foldr GlobStem (GlobFinal final) pref@. -splitConstantPrefix :: Glob -> ([FilePath], GlobFinal) -splitConstantPrefix = unfoldr' step - where - step (GlobStem seg pat) = Right (seg, pat) - step (GlobFinal pat) = Left pat diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Haddock.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Haddock.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Haddock.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Haddock.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,900 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Haddock --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module deals with the @haddock@ and @hscolour@ commands. --- It uses information about installed packages (from @ghc-pkg@) to find the --- locations of documentation for dependent packages, so it can create links. --- --- The @hscolour@ support allows generating HTML versions of the original --- source, with coloured syntax highlighting. - -module Distribution.Simple.Haddock ( - haddock, hscolour, - - haddockPackagePaths - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS - --- local -import Distribution.Backpack.DescribeUnitId -import Distribution.Types.ForeignLib -import Distribution.Types.UnqualComponentName -import Distribution.Types.ComponentLocalBuildInfo -import Distribution.Types.ExecutableScope -import Distribution.Types.LocalBuildInfo -import Distribution.Types.TargetInfo -import Distribution.Package -import qualified Distribution.ModuleName as ModuleName -import Distribution.PackageDescription as PD hiding (Flag) -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.Glob -import Distribution.Simple.Program.GHC -import Distribution.Simple.Program.ResponseFile -import Distribution.Simple.Program -import Distribution.Simple.PreProcess -import Distribution.Simple.Setup -import Distribution.Simple.Build -import Distribution.Simple.BuildTarget -import Distribution.Simple.InstallDirs -import Distribution.Simple.LocalBuildInfo hiding (substPathTemplate) -import Distribution.Simple.BuildPaths -import Distribution.Simple.Register -import qualified Distribution.Simple.Program.HcPkg as HcPkg -import qualified Distribution.Simple.PackageIndex as PackageIndex -import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo -import Distribution.InstalledPackageInfo ( InstalledPackageInfo ) -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Text -import Distribution.Utils.NubList -import Distribution.Version -import Distribution.Verbosity -import Language.Haskell.Extension - -import Distribution.Compat.Semigroup (All (..), Any (..)) - -import Control.Monad -import Data.Either ( rights ) - -import System.Directory (getCurrentDirectory, doesDirectoryExist, doesFileExist) -import System.FilePath ( (), (<.>), normalise, isAbsolute ) -import System.IO (hClose, hPutStrLn, hSetEncoding, utf8) - --- ------------------------------------------------------------------------------ --- Types - --- | A record that represents the arguments to the haddock executable, a product --- monoid. -data HaddockArgs = HaddockArgs { - argInterfaceFile :: Flag FilePath, - -- ^ Path to the interface file, relative to argOutputDir, required. - argPackageName :: Flag PackageIdentifier, - -- ^ Package name, required. - argHideModules :: (All,[ModuleName.ModuleName]), - -- ^ (Hide modules ?, modules to hide) - argIgnoreExports :: Any, - -- ^ Ignore export lists in modules? - argLinkSource :: Flag (Template,Template,Template), - -- ^ (Template for modules, template for symbols, template for lines). - argLinkedSource :: Flag Bool, - -- ^ Generate hyperlinked sources - argQuickJump :: Flag Bool, - -- ^ Generate quickjump index - argCssFile :: Flag FilePath, - -- ^ Optional custom CSS file. - argContents :: Flag String, - -- ^ Optional URL to contents page. - argVerbose :: Any, - argOutput :: Flag [Output], - -- ^ HTML or Hoogle doc or both? Required. - argInterfaces :: [(FilePath, Maybe String, Maybe String)], - -- ^ [(Interface file, URL to the HTML docs and hyperlinked-source for links)]. - argOutputDir :: Directory, - -- ^ Where to generate the documentation. - argTitle :: Flag String, - -- ^ Page title, required. - argPrologue :: Flag String, - -- ^ Prologue text, required. - argGhcOptions :: GhcOptions, - -- ^ Additional flags to pass to GHC. - argGhcLibDir :: Flag FilePath, - -- ^ To find the correct GHC, required. - argTargets :: [FilePath] - -- ^ Modules to process. -} deriving Generic - --- | The FilePath of a directory, it's a monoid under '()'. -newtype Directory = Dir { unDir' :: FilePath } deriving (Read,Show,Eq,Ord) - -unDir :: Directory -> FilePath -unDir = normalise . unDir' - -type Template = String - -data Output = Html | Hoogle - --- ------------------------------------------------------------------------------ --- Haddock support - -haddock :: PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> HaddockFlags - -> IO () -haddock pkg_descr _ _ haddockFlags - | not (hasLibs pkg_descr) - && not (fromFlag $ haddockExecutables haddockFlags) - && not (fromFlag $ haddockTestSuites haddockFlags) - && not (fromFlag $ haddockBenchmarks haddockFlags) - && not (fromFlag $ haddockForeignLibs haddockFlags) - = - warn (fromFlag $ haddockVerbosity haddockFlags) $ - "No documentation was generated as this package does not contain " - ++ "a library. Perhaps you want to use the --executables, --tests," - ++ " --benchmarks or --foreign-libraries flags." - -haddock pkg_descr lbi suffixes flags' = do - let verbosity = flag haddockVerbosity - comp = compiler lbi - platform = hostPlatform lbi - - quickJmpFlag = haddockQuickJump flags' - flags = case haddockTarget of - ForDevelopment -> flags' - ForHackage -> flags' - { haddockHoogle = Flag True - , haddockHtml = Flag True - , haddockHtmlLocation = Flag (pkg_url ++ "/docs") - , haddockContents = Flag (toPathTemplate pkg_url) - , haddockLinkedSource = Flag True - , haddockQuickJump = Flag True - } - pkg_url = "/package/$pkg-$version" - flag f = fromFlag $ f flags - - tmpFileOpts = defaultTempFileOptions - { optKeepTempFiles = flag haddockKeepTempFiles } - htmlTemplate = fmap toPathTemplate . flagToMaybe . haddockHtmlLocation - $ flags - haddockTarget = - fromFlagOrDefault ForDevelopment (haddockForHackage flags') - - (haddockProg, version, _) <- - requireProgramVersion verbosity haddockProgram - (orLaterVersion (mkVersion [2,0])) (withPrograms lbi) - - -- various sanity checks - when (flag haddockHoogle && version < mkVersion [2,2]) $ - die' verbosity "Haddock 2.0 and 2.1 do not support the --hoogle flag." - - - when (flag haddockQuickJump && version < mkVersion [2,19]) $ do - let msg = "Haddock prior to 2.19 does not support the --quickjump flag." - alt = "The generated documentation won't have the QuickJump feature." - if Flag True == quickJmpFlag - then die' verbosity msg - else warn verbosity (msg ++ "\n" ++ alt) - - haddockGhcVersionStr <- getProgramOutput verbosity haddockProg - ["--ghc-version"] - case (simpleParse haddockGhcVersionStr, compilerCompatVersion GHC comp) of - (Nothing, _) -> die' verbosity "Could not get GHC version from Haddock" - (_, Nothing) -> die' verbosity "Could not get GHC version from compiler" - (Just haddockGhcVersion, Just ghcVersion) - | haddockGhcVersion == ghcVersion -> return () - | otherwise -> die' verbosity $ - "Haddock's internal GHC version must match the configured " - ++ "GHC version.\n" - ++ "The GHC version is " ++ display ghcVersion ++ " but " - ++ "haddock is using GHC version " ++ display haddockGhcVersion - - -- the tools match the requests, we can proceed - - -- We fall back to using HsColour only for versions of Haddock which don't - -- support '--hyperlinked-sources'. - when (flag haddockLinkedSource && version < mkVersion [2,17]) $ - hscolour' (warn verbosity) haddockTarget pkg_descr lbi suffixes - (defaultHscolourFlags `mappend` haddockToHscolour flags) - - libdirArgs <- getGhcLibDir verbosity lbi - let commonArgs = mconcat - [ libdirArgs - , fromFlags (haddockTemplateEnv lbi (packageId pkg_descr)) flags - , fromPackageDescription haddockTarget pkg_descr ] - - targets <- readTargetInfos verbosity pkg_descr lbi (haddockArgs flags) - - let - targets' = - case targets of - [] -> allTargetsInBuildOrder' pkg_descr lbi - _ -> targets - - internalPackageDB <- - createInternalPackageDB verbosity lbi (flag haddockDistPref) - - (\f -> foldM_ f (installedPkgs lbi) targets') $ \index target -> do - - let component = targetComponent target - clbi = targetCLBI target - - componentInitialBuildSteps (flag haddockDistPref) pkg_descr lbi clbi verbosity - - let - lbi' = lbi { - withPackageDB = withPackageDB lbi ++ [internalPackageDB], - installedPkgs = index - } - - preprocessComponent pkg_descr component lbi' clbi False verbosity suffixes - let - doExe com = case (compToExe com) of - Just exe -> do - withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi') "tmp" $ - \tmp -> do - exeArgs <- fromExecutable verbosity tmp lbi' clbi htmlTemplate - version exe - let exeArgs' = commonArgs `mappend` exeArgs - runHaddock verbosity tmpFileOpts comp platform - haddockProg exeArgs' - Nothing -> do - warn (fromFlag $ haddockVerbosity flags) - "Unsupported component, skipping..." - return () - -- We define 'smsg' once and then reuse it inside the case, so that - -- we don't say we are running Haddock when we actually aren't - -- (e.g., Haddock is not run on non-libraries) - smsg :: IO () - smsg = setupMessage' verbosity "Running Haddock on" (packageId pkg_descr) - (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) - case component of - CLib lib -> do - withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ - \tmp -> do - smsg - libArgs <- fromLibrary verbosity tmp lbi' clbi htmlTemplate - version lib - let libArgs' = commonArgs `mappend` libArgs - runHaddock verbosity tmpFileOpts comp platform haddockProg libArgs' - - case libName lib of - Just _ -> do - pwd <- getCurrentDirectory - - let - ipi = inplaceInstalledPackageInfo - pwd (flag haddockDistPref) pkg_descr - (mkAbiHash "inplace") lib lbi' clbi - - debug verbosity $ "Registering inplace:\n" - ++ (InstalledPackageInfo.showInstalledPackageInfo ipi) - - registerPackage verbosity (compiler lbi') (withPrograms lbi') - (withPackageDB lbi') ipi - HcPkg.defaultRegisterOptions { - HcPkg.registerMultiInstance = True - } - - return $ PackageIndex.insert ipi index - Nothing -> - pure index - - CFLib flib -> (when (flag haddockForeignLibs) $ do - withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi') "tmp" $ - \tmp -> do - smsg - flibArgs <- fromForeignLib verbosity tmp lbi' clbi htmlTemplate - version flib - let libArgs' = commonArgs `mappend` flibArgs - runHaddock verbosity tmpFileOpts comp platform haddockProg libArgs') - - >> return index - - CExe _ -> (when (flag haddockExecutables) $ smsg >> doExe component) >> return index - CTest _ -> (when (flag haddockTestSuites) $ smsg >> doExe component) >> return index - CBench _ -> (when (flag haddockBenchmarks) $ smsg >> doExe component) >> return index - - for_ (extraDocFiles pkg_descr) $ \ fpath -> do - files <- matchDirFileGlob verbosity (specVersion pkg_descr) "." fpath - for_ files $ copyFileTo verbosity (unDir $ argOutputDir commonArgs) - --- ------------------------------------------------------------------------------ --- Contributions to HaddockArgs (see also Doctest.hs for very similar code). - -fromFlags :: PathTemplateEnv -> HaddockFlags -> HaddockArgs -fromFlags env flags = - mempty { - argHideModules = (maybe mempty (All . not) - $ flagToMaybe (haddockInternal flags), mempty), - argLinkSource = if fromFlag (haddockLinkedSource flags) - then Flag ("src/%{MODULE/./-}.html" - ,"src/%{MODULE/./-}.html#%{NAME}" - ,"src/%{MODULE/./-}.html#line-%{LINE}") - else NoFlag, - argLinkedSource = haddockLinkedSource flags, - argQuickJump = haddockQuickJump flags, - argCssFile = haddockCss flags, - argContents = fmap (fromPathTemplate . substPathTemplate env) - (haddockContents flags), - argVerbose = maybe mempty (Any . (>= deafening)) - . flagToMaybe $ haddockVerbosity flags, - argOutput = - Flag $ case [ Html | Flag True <- [haddockHtml flags] ] ++ - [ Hoogle | Flag True <- [haddockHoogle flags] ] - of [] -> [ Html ] - os -> os, - argOutputDir = maybe mempty Dir . flagToMaybe $ haddockDistPref flags, - - argGhcOptions = mempty { ghcOptExtra = ghcArgs } - } - where - ghcArgs = fromMaybe [] . lookup "ghc" . haddockProgramArgs $ flags - -fromPackageDescription :: HaddockTarget -> PackageDescription -> HaddockArgs -fromPackageDescription haddockTarget pkg_descr = - mempty { argInterfaceFile = Flag $ haddockName pkg_descr, - argPackageName = Flag $ packageId $ pkg_descr, - argOutputDir = Dir $ - "doc" "html" haddockDirName haddockTarget pkg_descr, - argPrologue = Flag $ if null desc then synopsis pkg_descr - else desc, - argTitle = Flag $ showPkg ++ subtitle - } - where - desc = PD.description pkg_descr - showPkg = display (packageId pkg_descr) - subtitle | null (synopsis pkg_descr) = "" - | otherwise = ": " ++ synopsis pkg_descr - -componentGhcOptions :: Verbosity -> LocalBuildInfo - -> BuildInfo -> ComponentLocalBuildInfo -> FilePath - -> GhcOptions -componentGhcOptions verbosity lbi bi clbi odir = - let f = case compilerFlavor (compiler lbi) of - GHC -> GHC.componentGhcOptions - GHCJS -> GHCJS.componentGhcOptions - _ -> error $ - "Distribution.Simple.Haddock.componentGhcOptions:" ++ - "haddock only supports GHC and GHCJS" - in f verbosity lbi bi clbi odir - -mkHaddockArgs :: Verbosity - -> FilePath - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Maybe PathTemplate -- ^ template for HTML location - -> Version - -> [FilePath] - -> BuildInfo - -> IO HaddockArgs -mkHaddockArgs verbosity tmp lbi clbi htmlTemplate haddockVersion inFiles bi = do - ifaceArgs <- getInterfaces verbosity lbi clbi htmlTemplate - let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) { - -- Noooooooooo!!!!!111 - -- haddock stomps on our precious .hi - -- and .o files. Workaround by telling - -- haddock to write them elsewhere. - ghcOptObjDir = toFlag tmp, - ghcOptHiDir = toFlag tmp, - ghcOptStubDir = toFlag tmp - } `mappend` getGhcCppOpts haddockVersion bi - sharedOpts = vanillaOpts { - ghcOptDynLinkMode = toFlag GhcDynamicOnly, - ghcOptFPic = toFlag True, - ghcOptHiSuffix = toFlag "dyn_hi", - ghcOptObjSuffix = toFlag "dyn_o", - ghcOptExtra = hcSharedOptions GHC bi - - } - opts <- if withVanillaLib lbi - then return vanillaOpts - else if withSharedLib lbi - then return sharedOpts - else die' verbosity $ "Must have vanilla or shared libraries " - ++ "enabled in order to run haddock" - - return ifaceArgs { - argGhcOptions = opts, - argTargets = inFiles - } - -fromLibrary :: Verbosity - -> FilePath - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Maybe PathTemplate -- ^ template for HTML location - -> Version - -> Library - -> IO HaddockArgs -fromLibrary verbosity tmp lbi clbi htmlTemplate haddockVersion lib = do - inFiles <- map snd `fmap` getLibSourceFiles verbosity lbi lib clbi - args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate haddockVersion - inFiles (libBuildInfo lib) - return args { - argHideModules = (mempty, otherModules (libBuildInfo lib)) - } - -fromExecutable :: Verbosity - -> FilePath - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Maybe PathTemplate -- ^ template for HTML location - -> Version - -> Executable - -> IO HaddockArgs -fromExecutable verbosity tmp lbi clbi htmlTemplate haddockVersion exe = do - inFiles <- map snd `fmap` getExeSourceFiles verbosity lbi exe clbi - args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate - haddockVersion inFiles (buildInfo exe) - return args { - argOutputDir = Dir $ unUnqualComponentName $ exeName exe, - argTitle = Flag $ unUnqualComponentName $ exeName exe - } - -fromForeignLib :: Verbosity - -> FilePath - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Maybe PathTemplate -- ^ template for HTML location - -> Version - -> ForeignLib - -> IO HaddockArgs -fromForeignLib verbosity tmp lbi clbi htmlTemplate haddockVersion flib = do - inFiles <- map snd `fmap` getFLibSourceFiles verbosity lbi flib clbi - args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate - haddockVersion inFiles (foreignLibBuildInfo flib) - return args { - argOutputDir = Dir $ unUnqualComponentName $ foreignLibName flib, - argTitle = Flag $ unUnqualComponentName $ foreignLibName flib - } - -compToExe :: Component -> Maybe Executable -compToExe comp = - case comp of - CTest test@TestSuite { testInterface = TestSuiteExeV10 _ f } -> - Just Executable { - exeName = testName test, - modulePath = f, - exeScope = ExecutablePublic, - buildInfo = testBuildInfo test - } - CBench bench@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } -> - Just Executable { - exeName = benchmarkName bench, - modulePath = f, - exeScope = ExecutablePublic, - buildInfo = benchmarkBuildInfo bench - } - CExe exe -> Just exe - _ -> Nothing - -getInterfaces :: Verbosity - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Maybe PathTemplate -- ^ template for HTML location - -> IO HaddockArgs -getInterfaces verbosity lbi clbi htmlTemplate = do - (packageFlags, warnings) <- haddockPackageFlags verbosity lbi clbi htmlTemplate - traverse_ (warn (verboseUnmarkOutput verbosity)) warnings - return $ mempty { - argInterfaces = packageFlags - } - -getGhcCppOpts :: Version - -> BuildInfo - -> GhcOptions -getGhcCppOpts haddockVersion bi = - mempty { - ghcOptExtensions = toNubListR [EnableExtension CPP | needsCpp], - ghcOptCppOptions = defines - } - where - needsCpp = EnableExtension CPP `elem` usedExtensions bi - defines = [haddockVersionMacro] - haddockVersionMacro = "-D__HADDOCK_VERSION__=" - ++ show (v1 * 1000 + v2 * 10 + v3) - where - [v1, v2, v3] = take 3 $ versionNumbers haddockVersion ++ [0,0] - -getGhcLibDir :: Verbosity -> LocalBuildInfo - -> IO HaddockArgs -getGhcLibDir verbosity lbi = do - l <- case compilerFlavor (compiler lbi) of - GHC -> GHC.getLibDir verbosity lbi - GHCJS -> GHCJS.getLibDir verbosity lbi - _ -> error "haddock only supports GHC and GHCJS" - return $ mempty { argGhcLibDir = Flag l } - --- ------------------------------------------------------------------------------ --- | Call haddock with the specified arguments. -runHaddock :: Verbosity - -> TempFileOptions - -> Compiler - -> Platform - -> ConfiguredProgram - -> HaddockArgs - -> IO () -runHaddock verbosity tmpFileOpts comp platform haddockProg args - | null (argTargets args) = warn verbosity $ - "Haddocks are being requested, but there aren't any modules given " - ++ "to create documentation for." - | otherwise = do - let haddockVersion = fromMaybe (error "unable to determine haddock version") - (programVersion haddockProg) - renderArgs verbosity tmpFileOpts haddockVersion comp platform args $ - \(flags,result)-> do - - runProgram verbosity haddockProg flags - - notice verbosity $ "Documentation created: " ++ result - - -renderArgs :: Verbosity - -> TempFileOptions - -> Version - -> Compiler - -> Platform - -> HaddockArgs - -> (([String], FilePath) -> IO a) - -> IO a -renderArgs verbosity tmpFileOpts version comp platform args k = do - let haddockSupportsUTF8 = version >= mkVersion [2,14,4] - haddockSupportsResponseFiles = version > mkVersion [2,16,2] - createDirectoryIfMissingVerbose verbosity True outputDir - withTempFileEx tmpFileOpts outputDir "haddock-prologue.txt" $ - \prologueFileName h -> do - do - when haddockSupportsUTF8 (hSetEncoding h utf8) - hPutStrLn h $ fromFlag $ argPrologue args - hClose h - let pflag = "--prologue=" ++ prologueFileName - renderedArgs = pflag : renderPureArgs version comp platform args - if haddockSupportsResponseFiles - then - withResponseFile - verbosity - tmpFileOpts - outputDir - "haddock-response.txt" - (if haddockSupportsUTF8 then Just utf8 else Nothing) - renderedArgs - (\responseFileName -> k (["@" ++ responseFileName], result)) - else - k (renderedArgs, result) - where - outputDir = (unDir $ argOutputDir args) - result = intercalate ", " - . map (\o -> outputDir - case o of - Html -> "index.html" - Hoogle -> pkgstr <.> "txt") - $ arg argOutput - where - pkgstr = display $ packageName pkgid - pkgid = arg argPackageName - arg f = fromFlag $ f args - -renderPureArgs :: Version -> Compiler -> Platform -> HaddockArgs -> [String] -renderPureArgs version comp platform args = concat - [ (:[]) . (\f -> "--dump-interface="++ unDir (argOutputDir args) f) - . fromFlag . argInterfaceFile $ args - - , if isVersion 2 16 - then (\pkg -> [ "--package-name=" ++ display (pkgName pkg) - , "--package-version="++display (pkgVersion pkg) - ]) - . fromFlag . argPackageName $ args - else [] - - , [ "--since-qual=external" | isVersion 2 20 ] - - , [ "--quickjump" | isVersion 2 19 - , fromFlag . argQuickJump $ args ] - - , [ "--hyperlinked-source" | isVersion 2 17 - , fromFlag . argLinkedSource $ args ] - - , (\(All b,xs) -> bool (map (("--hide=" ++). display) xs) [] b) - . argHideModules $ args - - , bool ["--ignore-all-exports"] [] . getAny . argIgnoreExports $ args - - , maybe [] (\(m,e,l) -> - ["--source-module=" ++ m - ,"--source-entity=" ++ e] - ++ if isVersion 2 14 then ["--source-entity-line=" ++ l] - else [] - ) . flagToMaybe . argLinkSource $ args - - , maybe [] ((:[]) . ("--css="++)) . flagToMaybe . argCssFile $ args - - , maybe [] ((:[]) . ("--use-contents="++)) . flagToMaybe . argContents $ args - - , bool [] [verbosityFlag] . getAny . argVerbose $ args - - , map (\o -> case o of Hoogle -> "--hoogle"; Html -> "--html") - . fromFlag . argOutput $ args - - , renderInterfaces . argInterfaces $ args - - , (:[]) . ("--odir="++) . unDir . argOutputDir $ args - - , (:[]) . ("--title="++) - . (bool (++" (internal documentation)") - id (getAny $ argIgnoreExports args)) - . fromFlag . argTitle $ args - - , [ "--optghc=" ++ opt | let opts = argGhcOptions args - , opt <- renderGhcOptions comp platform opts ] - - , maybe [] (\l -> ["-B"++l]) $ - flagToMaybe (argGhcLibDir args) -- error if Nothing? - - , argTargets $ args - ] - where - renderInterfaces = map renderInterface - - renderInterface :: (FilePath, Maybe FilePath, Maybe FilePath) -> String - renderInterface (i, html, hypsrc) = "--read-interface=" ++ - (intercalate "," $ concat [ [ x | Just x <- [html] ] - , [ x | Just _ <- [html] - -- only render hypsrc path if html path - -- is given and hyperlinked-source is - -- enabled - , Just x <- [hypsrc] - , isVersion 2 17 - , fromFlag . argLinkedSource $ args - ] - , [ i ] - ]) - - bool a b c = if c then a else b - isVersion major minor = version >= mkVersion [major,minor] - verbosityFlag - | isVersion 2 5 = "--verbosity=1" - | otherwise = "--verbose" - ---------------------------------------------------------------------------------- - --- | Given a list of 'InstalledPackageInfo's, return a list of interfaces and --- HTML paths, and an optional warning for packages with missing documentation. -haddockPackagePaths :: [InstalledPackageInfo] - -> Maybe (InstalledPackageInfo -> FilePath) - -> NoCallStackIO ([( FilePath -- path to interface - -- file - - , Maybe FilePath -- url to html - -- documentation - - , Maybe FilePath -- url to hyperlinked - -- source - )] - , Maybe String -- warning about - -- missing documentation - ) -haddockPackagePaths ipkgs mkHtmlPath = do - interfaces <- sequenceA - [ case interfaceAndHtmlPath ipkg of - Nothing -> return (Left (packageId ipkg)) - Just (interface, html) -> do - - (html', hypsrc') <- - case html of - Just htmlPath -> do - let hypSrcPath = htmlPath defaultHyperlinkedSourceDirectory - hypSrcExists <- doesDirectoryExist hypSrcPath - return $ ( Just (fixFileUrl htmlPath) - , if hypSrcExists - then Just (fixFileUrl hypSrcPath) - else Nothing - ) - Nothing -> return (Nothing, Nothing) - - exists <- doesFileExist interface - if exists - then return (Right (interface, html', hypsrc')) - else return (Left pkgid) - | ipkg <- ipkgs, let pkgid = packageId ipkg - , pkgName pkgid `notElem` noHaddockWhitelist - ] - - let missing = [ pkgid | Left pkgid <- interfaces ] - warning = "The documentation for the following packages are not " - ++ "installed. No links will be generated to these packages: " - ++ intercalate ", " (map display missing) - flags = rights interfaces - - return (flags, if null missing then Nothing else Just warning) - - where - -- Don't warn about missing documentation for these packages. See #1231. - noHaddockWhitelist = map mkPackageName [ "rts" ] - - -- Actually extract interface and HTML paths from an 'InstalledPackageInfo'. - interfaceAndHtmlPath :: InstalledPackageInfo - -> Maybe (FilePath, Maybe FilePath) - interfaceAndHtmlPath pkg = do - interface <- listToMaybe (InstalledPackageInfo.haddockInterfaces pkg) - html <- case mkHtmlPath of - Nothing -> listToMaybe (InstalledPackageInfo.haddockHTMLs pkg) - Just mkPath -> Just (mkPath pkg) - return (interface, if null html then Nothing else Just html) - - -- The 'haddock-html' field in the hc-pkg output is often set as a - -- native path, but we need it as a URL. See #1064. Also don't "fix" - -- the path if it is an interpolated one. - fixFileUrl f | Nothing <- mkHtmlPath - , isAbsolute f = "file://" ++ f - | otherwise = f - - -- 'src' is the default hyperlinked source directory ever since. It is - -- not possible to configure that directory in any way in haddock. - defaultHyperlinkedSourceDirectory = "src" - - -haddockPackageFlags :: Verbosity - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Maybe PathTemplate - -> IO ([( FilePath -- path to interface - -- file - - , Maybe FilePath -- url to html - -- documentation - - , Maybe FilePath -- url to hyperlinked - -- source - )] - , Maybe String -- warning about - -- missing documentation - ) -haddockPackageFlags verbosity lbi clbi htmlTemplate = do - let allPkgs = installedPkgs lbi - directDeps = map fst (componentPackageDeps clbi) - transitiveDeps <- case PackageIndex.dependencyClosure allPkgs directDeps of - Left x -> return x - Right inf -> die' verbosity $ "internal error when calculating transitive " - ++ "package dependencies.\nDebug info: " ++ show inf - haddockPackagePaths (PackageIndex.allPackages transitiveDeps) mkHtmlPath - where - mkHtmlPath = fmap expandTemplateVars htmlTemplate - expandTemplateVars tmpl pkg = - fromPathTemplate . substPathTemplate (env pkg) $ tmpl - env pkg = haddockTemplateEnv lbi (packageId pkg) - - -haddockTemplateEnv :: LocalBuildInfo -> PackageIdentifier -> PathTemplateEnv -haddockTemplateEnv lbi pkg_id = - (PrefixVar, prefix (installDirTemplates lbi)) - -- We want the legacy unit ID here, because it gives us nice paths - -- (Haddock people don't care about the dependencies) - : initialPathTemplateEnv - pkg_id - (mkLegacyUnitId pkg_id) - (compilerInfo (compiler lbi)) - (hostPlatform lbi) - --- ------------------------------------------------------------------------------ --- hscolour support. - -hscolour :: PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> HscolourFlags - -> IO () -hscolour = hscolour' dieNoVerbosity ForDevelopment - -hscolour' :: (String -> IO ()) -- ^ Called when the 'hscolour' exe is not found. - -> HaddockTarget - -> PackageDescription - -> LocalBuildInfo - -> [PPSuffixHandler] - -> HscolourFlags - -> IO () -hscolour' onNoHsColour haddockTarget pkg_descr lbi suffixes flags = - either onNoHsColour (\(hscolourProg, _, _) -> go hscolourProg) =<< - lookupProgramVersion verbosity hscolourProgram - (orLaterVersion (mkVersion [1,8])) (withPrograms lbi) - where - go :: ConfiguredProgram -> IO () - go hscolourProg = do - warn verbosity $ - "the 'cabal hscolour' command is deprecated in favour of 'cabal " ++ - "haddock --hyperlink-source' and will be removed in the next major " ++ - "release." - - setupMessage verbosity "Running hscolour for" (packageId pkg_descr) - createDirectoryIfMissingVerbose verbosity True $ - hscolourPref haddockTarget distPref pkg_descr - - withAllComponentsInBuildOrder pkg_descr lbi $ \comp clbi -> do - componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity - preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes - let - doExe com = case (compToExe com) of - Just exe -> do - let outputDir = hscolourPref haddockTarget distPref pkg_descr - unUnqualComponentName (exeName exe) "src" - runHsColour hscolourProg outputDir =<< getExeSourceFiles verbosity lbi exe clbi - Nothing -> do - warn (fromFlag $ hscolourVerbosity flags) - "Unsupported component, skipping..." - return () - case comp of - CLib lib -> do - let outputDir = hscolourPref haddockTarget distPref pkg_descr "src" - runHsColour hscolourProg outputDir =<< getLibSourceFiles verbosity lbi lib clbi - CFLib flib -> do - let outputDir = hscolourPref haddockTarget distPref pkg_descr - unUnqualComponentName (foreignLibName flib) "src" - runHsColour hscolourProg outputDir =<< getFLibSourceFiles verbosity lbi flib clbi - CExe _ -> when (fromFlag (hscolourExecutables flags)) $ doExe comp - CTest _ -> when (fromFlag (hscolourTestSuites flags)) $ doExe comp - CBench _ -> when (fromFlag (hscolourBenchmarks flags)) $ doExe comp - - stylesheet = flagToMaybe (hscolourCSS flags) - - verbosity = fromFlag (hscolourVerbosity flags) - distPref = fromFlag (hscolourDistPref flags) - - runHsColour prog outputDir moduleFiles = do - createDirectoryIfMissingVerbose verbosity True outputDir - - case stylesheet of -- copy the CSS file - Nothing | programVersion prog >= Just (mkVersion [1,9]) -> - runProgram verbosity prog - ["-print-css", "-o" ++ outputDir "hscolour.css"] - | otherwise -> return () - Just s -> copyFileVerbose verbosity s (outputDir "hscolour.css") - - for_ moduleFiles $ \(m, inFile) -> - runProgram verbosity prog - ["-css", "-anchor", "-o" ++ outFile m, inFile] - where - outFile m = outputDir - intercalate "-" (ModuleName.components m) <.> "html" - -haddockToHscolour :: HaddockFlags -> HscolourFlags -haddockToHscolour flags = - HscolourFlags { - hscolourCSS = haddockHscolourCss flags, - hscolourExecutables = haddockExecutables flags, - hscolourTestSuites = haddockTestSuites flags, - hscolourBenchmarks = haddockBenchmarks flags, - hscolourForeignLibs = haddockForeignLibs flags, - hscolourVerbosity = haddockVerbosity flags, - hscolourDistPref = haddockDistPref flags, - hscolourCabalFilePath = haddockCabalFilePath flags - } - --- ------------------------------------------------------------------------------ --- Boilerplate Monoid instance. -instance Monoid HaddockArgs where - mempty = gmempty - mappend = (<>) - -instance Semigroup HaddockArgs where - (<>) = gmappend - -instance Monoid Directory where - mempty = Dir "." - mappend = (<>) - -instance Semigroup Directory where - Dir m <> Dir n = Dir $ m n diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/HaskellSuite.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/HaskellSuite.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/HaskellSuite.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/HaskellSuite.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,228 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - -module Distribution.Simple.HaskellSuite where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Data.Map as Map (empty) - -import Distribution.Simple.Program -import Distribution.Simple.Compiler as Compiler -import Distribution.Simple.Utils -import Distribution.Simple.BuildPaths -import Distribution.Verbosity -import Distribution.Version -import Distribution.Text -import Distribution.Package -import Distribution.InstalledPackageInfo hiding (includeDirs) -import Distribution.Simple.PackageIndex as PackageIndex -import Distribution.PackageDescription -import Distribution.Simple.LocalBuildInfo -import Distribution.System (Platform) -import Distribution.Compat.Exception -import Language.Haskell.Extension -import Distribution.Simple.Program.Builtin - -configure - :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramDb -> IO (Compiler, Maybe Platform, ProgramDb) -configure verbosity mbHcPath hcPkgPath progdb0 = do - - -- We have no idea how a haskell-suite tool is named, so we require at - -- least some information from the user. - hcPath <- - let msg = "You have to provide name or path of a haskell-suite tool (-w PATH)" - in maybe (die' verbosity msg) return mbHcPath - - when (isJust hcPkgPath) $ - warn verbosity "--with-hc-pkg option is ignored for haskell-suite" - - (comp, confdCompiler, progdb1) <- configureCompiler hcPath progdb0 - - -- Update our pkg tool. It uses the same executable as the compiler, but - -- all command start with "pkg" - (confdPkg, _) <- requireProgram verbosity haskellSuitePkgProgram progdb1 - let progdb2 = - updateProgram - confdPkg - { programLocation = programLocation confdCompiler - , programDefaultArgs = ["pkg"] - } - progdb1 - - return (comp, Nothing, progdb2) - - where - configureCompiler hcPath progdb0' = do - let - haskellSuiteProgram' = - haskellSuiteProgram - { programFindLocation = \v p -> findProgramOnSearchPath v p hcPath } - - -- NB: cannot call requireProgram right away — it'd think that - -- the program is already configured and won't reconfigure it again. - -- Instead, call configureProgram directly first. - progdb1 <- configureProgram verbosity haskellSuiteProgram' progdb0' - (confdCompiler, progdb2) <- requireProgram verbosity haskellSuiteProgram' progdb1 - - extensions <- getExtensions verbosity confdCompiler - languages <- getLanguages verbosity confdCompiler - (compName, compVersion) <- - getCompilerVersion verbosity confdCompiler - - let - comp = Compiler { - compilerId = CompilerId (HaskellSuite compName) compVersion, - compilerAbiTag = Compiler.NoAbiTag, - compilerCompat = [], - compilerLanguages = languages, - compilerExtensions = extensions, - compilerProperties = Map.empty - } - - return (comp, confdCompiler, progdb2) - -hstoolVersion :: Verbosity -> FilePath -> IO (Maybe Version) -hstoolVersion = findProgramVersion "--hspkg-version" id - -numericVersion :: Verbosity -> FilePath -> IO (Maybe Version) -numericVersion = findProgramVersion "--compiler-version" (last . words) - -getCompilerVersion :: Verbosity -> ConfiguredProgram -> IO (String, Version) -getCompilerVersion verbosity prog = do - output <- rawSystemStdout verbosity (programPath prog) ["--compiler-version"] - let - parts = words output - name = concat $ init parts -- there shouldn't be any spaces in the name anyway - versionStr = last parts - version <- - maybe (die' verbosity "haskell-suite: couldn't determine compiler version") return $ - simpleParse versionStr - return (name, version) - -getExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Maybe Compiler.Flag)] -getExtensions verbosity prog = do - extStrs <- - lines `fmap` - rawSystemStdout verbosity (programPath prog) ["--supported-extensions"] - return - [ (ext, Just $ "-X" ++ display ext) | Just ext <- map simpleParse extStrs ] - -getLanguages :: Verbosity -> ConfiguredProgram -> IO [(Language, Compiler.Flag)] -getLanguages verbosity prog = do - langStrs <- - lines `fmap` - rawSystemStdout verbosity (programPath prog) ["--supported-languages"] - return - [ (ext, "-G" ++ display ext) | Just ext <- map simpleParse langStrs ] - --- Other compilers do some kind of a packagedb stack check here. Not sure --- if we need something like that as well. -getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramDb - -> IO InstalledPackageIndex -getInstalledPackages verbosity packagedbs progdb = - liftM (PackageIndex.fromList . concat) $ for packagedbs $ \packagedb -> - do str <- - getDbProgramOutput verbosity haskellSuitePkgProgram progdb - ["dump", packageDbOpt packagedb] - `catchExit` \_ -> die' verbosity $ "pkg dump failed" - case parsePackages str of - Right ok -> return ok - _ -> die' verbosity "failed to parse output of 'pkg dump'" - - where - parsePackages str = - let parsed = map parseInstalledPackageInfo (splitPkgs str) - in case [ msg | ParseFailed msg <- parsed ] of - [] -> Right [ pkg | ParseOk _ pkg <- parsed ] - msgs -> Left msgs - - splitPkgs :: String -> [String] - splitPkgs = map unlines . splitWith ("---" ==) . lines - where - splitWith :: (a -> Bool) -> [a] -> [[a]] - splitWith p xs = ys : case zs of - [] -> [] - _:ws -> splitWith p ws - where (ys,zs) = break p xs - -buildLib - :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib verbosity pkg_descr lbi lib clbi = do - -- In future, there should be a mechanism for the compiler to request any - -- number of the above parameters (or their parts) — in particular, - -- pieces of PackageDescription. - -- - -- For now, we only pass those that we know are used. - - let odir = buildDir lbi - bi = libBuildInfo lib - srcDirs = hsSourceDirs bi ++ [odir] - dbStack = withPackageDB lbi - language = fromMaybe Haskell98 (defaultLanguage bi) - progdb = withPrograms lbi - pkgid = packageId pkg_descr - - runDbProgram verbosity haskellSuiteProgram progdb $ - [ "compile", "--build-dir", odir ] ++ - concat [ ["-i", d] | d <- srcDirs ] ++ - concat [ ["-I", d] | d <- [autogenComponentModulesDir lbi clbi - ,autogenPackageModulesDir lbi - ,odir] ++ includeDirs bi ] ++ - [ packageDbOpt pkgDb | pkgDb <- dbStack ] ++ - [ "--package-name", display pkgid ] ++ - concat [ ["--package-id", display ipkgid ] - | (ipkgid, _) <- componentPackageDeps clbi ] ++ - ["-G", display language] ++ - concat [ ["-X", display ex] | ex <- usedExtensions bi ] ++ - cppOptions (libBuildInfo lib) ++ - [ display modu | modu <- allLibModules lib clbi ] - - - -installLib - :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^install location - -> FilePath -- ^install location for dynamic libraries - -> FilePath -- ^Build location - -> PackageDescription - -> Library - -> ComponentLocalBuildInfo - -> IO () -installLib verbosity lbi targetDir dynlibTargetDir builtDir pkg lib clbi = do - let progdb = withPrograms lbi - runDbProgram verbosity haskellSuitePkgProgram progdb $ - [ "install-library" - , "--build-dir", builtDir - , "--target-dir", targetDir - , "--dynlib-target-dir", dynlibTargetDir - , "--package-id", display $ packageId pkg - ] ++ map display (allLibModules lib clbi) - -registerPackage - :: Verbosity - -> ProgramDb - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerPackage verbosity progdb packageDbs installedPkgInfo = do - (hspkg, _) <- requireProgram verbosity haskellSuitePkgProgram progdb - - runProgramInvocation verbosity $ - (programInvocation hspkg - ["update", packageDbOpt $ last packageDbs]) - { progInvokeInput = Just $ showInstalledPackageInfo installedPkgInfo } - -initPackageDB :: Verbosity -> ProgramDb -> FilePath -> IO () -initPackageDB verbosity progdb dbPath = - runDbProgram verbosity haskellSuitePkgProgram progdb - ["init", dbPath] - -packageDbOpt :: PackageDB -> String -packageDbOpt GlobalPackageDB = "--global" -packageDbOpt UserPackageDB = "--user" -packageDbOpt (SpecificPackageDB db) = "--package-db=" ++ db diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Hpc.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Hpc.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Hpc.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Hpc.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,149 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Hpc --- Copyright : Thomas Tuegel 2011 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides functions for locating various HPC-related paths and --- a function for adding the necessary options to a PackageDescription to --- build test suites with HPC enabled. - -module Distribution.Simple.Hpc - ( Way(..), guessWay - , htmlDir - , mixDir - , tixDir - , tixFilePath - , markupPackage - , markupTest - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.UnqualComponentName -import Distribution.ModuleName ( main ) -import Distribution.PackageDescription - ( TestSuite(..) - , testModules - ) -import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) ) -import Distribution.Simple.Program - ( hpcProgram - , requireProgramVersion - ) -import Distribution.Simple.Program.Hpc ( markup, union ) -import Distribution.Simple.Utils ( notice ) -import Distribution.Version ( anyVersion ) -import Distribution.Verbosity ( Verbosity() ) -import System.Directory ( createDirectoryIfMissing, doesFileExist ) -import System.FilePath - --- ------------------------------------------------------------------------- --- Haskell Program Coverage - -data Way = Vanilla | Prof | Dyn - deriving (Bounded, Enum, Eq, Read, Show) - -hpcDir :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Directory containing component's HPC .mix files -hpcDir distPref way = distPref "hpc" wayDir - where - wayDir = case way of - Vanilla -> "vanilla" - Prof -> "prof" - Dyn -> "dyn" - -mixDir :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Component name - -> FilePath -- ^ Directory containing test suite's .mix files -mixDir distPref way name = hpcDir distPref way "mix" name - -tixDir :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Component name - -> FilePath -- ^ Directory containing test suite's .tix files -tixDir distPref way name = hpcDir distPref way "tix" name - --- | Path to the .tix file containing a test suite's sum statistics. -tixFilePath :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Component name - -> FilePath -- ^ Path to test suite's .tix file -tixFilePath distPref way name = tixDir distPref way name name <.> "tix" - -htmlDir :: FilePath -- ^ \"dist/\" prefix - -> Way - -> FilePath -- ^ Component name - -> FilePath -- ^ Path to test suite's HTML markup directory -htmlDir distPref way name = hpcDir distPref way "html" name - --- | Attempt to guess the way the test suites in this package were compiled --- and linked with the library so the correct module interfaces are found. -guessWay :: LocalBuildInfo -> Way -guessWay lbi - | withProfExe lbi = Prof - | withDynExe lbi = Dyn - | otherwise = Vanilla - --- | Generate the HTML markup for a test suite. -markupTest :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^ \"dist/\" prefix - -> String -- ^ Library name - -> TestSuite - -> IO () -markupTest verbosity lbi distPref libName suite = do - tixFileExists <- doesFileExist $ tixFilePath distPref way $ testName' - when tixFileExists $ do - -- behaviour of 'markup' depends on version, so we need *a* version - -- but no particular one - (hpc, hpcVer, _) <- requireProgramVersion verbosity - hpcProgram anyVersion (withPrograms lbi) - let htmlDir_ = htmlDir distPref way testName' - markup hpc hpcVer verbosity - (tixFilePath distPref way testName') mixDirs - htmlDir_ - (testModules suite ++ [ main ]) - notice verbosity $ "Test coverage report written to " - ++ htmlDir_ "hpc_index" <.> "html" - where - way = guessWay lbi - testName' = unUnqualComponentName $ testName suite - mixDirs = map (mixDir distPref way) [ testName', libName ] - --- | Generate the HTML markup for all of a package's test suites. -markupPackage :: Verbosity - -> LocalBuildInfo - -> FilePath -- ^ \"dist/\" prefix - -> String -- ^ Library name - -> [TestSuite] - -> IO () -markupPackage verbosity lbi distPref libName suites = do - let tixFiles = map (tixFilePath distPref way) testNames - tixFilesExist <- traverse doesFileExist tixFiles - when (and tixFilesExist) $ do - -- behaviour of 'markup' depends on version, so we need *a* version - -- but no particular one - (hpc, hpcVer, _) <- requireProgramVersion verbosity - hpcProgram anyVersion (withPrograms lbi) - let outFile = tixFilePath distPref way libName - htmlDir' = htmlDir distPref way libName - excluded = concatMap testModules suites ++ [ main ] - createDirectoryIfMissing True $ takeDirectory outFile - union hpc verbosity tixFiles outFile excluded - markup hpc hpcVer verbosity outFile mixDirs htmlDir' excluded - notice verbosity $ "Package coverage report written to " - ++ htmlDir' "hpc_index.html" - where - way = guessWay lbi - testNames = fmap (unUnqualComponentName . testName) suites - mixDirs = map (mixDir distPref way) $ libName : testNames diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/InstallDirs.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/InstallDirs.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/InstallDirs.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/InstallDirs.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,614 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.InstallDirs --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This manages everything to do with where files get installed (though does --- not get involved with actually doing any installation). It provides an --- 'InstallDirs' type which is a set of directories for where to install --- things. It also handles the fact that we use templates in these install --- dirs. For example most install dirs are relative to some @$prefix@ and by --- changing the prefix all other dirs still end up changed appropriately. So it --- provides a 'PathTemplate' type and functions for substituting for these --- templates. - -module Distribution.Simple.InstallDirs ( - InstallDirs(..), - InstallDirTemplates, - defaultInstallDirs, - defaultInstallDirs', - combineInstallDirs, - absoluteInstallDirs, - CopyDest(..), - prefixRelativeInstallDirs, - substituteInstallDirTemplates, - - PathTemplate, - PathTemplateVariable(..), - PathTemplateEnv, - toPathTemplate, - fromPathTemplate, - combinePathTemplate, - substPathTemplate, - initialPathTemplateEnv, - platformTemplateEnv, - compilerTemplateEnv, - packageTemplateEnv, - abiTemplateEnv, - installDirsTemplateEnv, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Compat.Environment (lookupEnv) -import Distribution.Package -import Distribution.System -import Distribution.Compiler -import Distribution.Text - -import System.Directory (getAppUserDataDirectory) -import System.FilePath - ( (), isPathSeparator - , pathSeparator, dropDrive - , takeDirectory ) - -#ifdef mingw32_HOST_OS -import qualified Prelude -import Foreign -import Foreign.C -#endif - --- --------------------------------------------------------------------------- --- Installation directories - - --- | The directories where we will install files for packages. --- --- We have several different directories for different types of files since --- many systems have conventions whereby different types of files in a package --- are installed in different directories. This is particularly the case on --- Unix style systems. --- -data InstallDirs dir = InstallDirs { - prefix :: dir, - bindir :: dir, - libdir :: dir, - libsubdir :: dir, - dynlibdir :: dir, - flibdir :: dir, -- ^ foreign libraries - libexecdir :: dir, - libexecsubdir:: dir, - includedir :: dir, - datadir :: dir, - datasubdir :: dir, - docdir :: dir, - mandir :: dir, - htmldir :: dir, - haddockdir :: dir, - sysconfdir :: dir - } deriving (Eq, Read, Show, Functor, Generic) - -instance Binary dir => Binary (InstallDirs dir) - -instance (Semigroup dir, Monoid dir) => Monoid (InstallDirs dir) where - mempty = gmempty - mappend = (<>) - -instance Semigroup dir => Semigroup (InstallDirs dir) where - (<>) = gmappend - -combineInstallDirs :: (a -> b -> c) - -> InstallDirs a - -> InstallDirs b - -> InstallDirs c -combineInstallDirs combine a b = InstallDirs { - prefix = prefix a `combine` prefix b, - bindir = bindir a `combine` bindir b, - libdir = libdir a `combine` libdir b, - libsubdir = libsubdir a `combine` libsubdir b, - dynlibdir = dynlibdir a `combine` dynlibdir b, - flibdir = flibdir a `combine` flibdir b, - libexecdir = libexecdir a `combine` libexecdir b, - libexecsubdir= libexecsubdir a `combine` libexecsubdir b, - includedir = includedir a `combine` includedir b, - datadir = datadir a `combine` datadir b, - datasubdir = datasubdir a `combine` datasubdir b, - docdir = docdir a `combine` docdir b, - mandir = mandir a `combine` mandir b, - htmldir = htmldir a `combine` htmldir b, - haddockdir = haddockdir a `combine` haddockdir b, - sysconfdir = sysconfdir a `combine` sysconfdir b - } - -appendSubdirs :: (a -> a -> a) -> InstallDirs a -> InstallDirs a -appendSubdirs append dirs = dirs { - libdir = libdir dirs `append` libsubdir dirs, - libexecdir = libexecdir dirs `append` libexecsubdir dirs, - datadir = datadir dirs `append` datasubdir dirs, - libsubdir = error "internal error InstallDirs.libsubdir", - libexecsubdir = error "internal error InstallDirs.libexecsubdir", - datasubdir = error "internal error InstallDirs.datasubdir" - } - --- | The installation directories in terms of 'PathTemplate's that contain --- variables. --- --- The defaults for most of the directories are relative to each other, in --- particular they are all relative to a single prefix. This makes it --- convenient for the user to override the default installation directory --- by only having to specify --prefix=... rather than overriding each --- individually. This is done by allowing $-style variables in the dirs. --- These are expanded by textual substitution (see 'substPathTemplate'). --- --- A few of these installation directories are split into two components, the --- dir and subdir. The full installation path is formed by combining the two --- together with @\/@. The reason for this is compatibility with other Unix --- build systems which also support @--libdir@ and @--datadir@. We would like --- users to be able to configure @--libdir=\/usr\/lib64@ for example but --- because by default we want to support installing multiple versions of --- packages and building the same package for multiple compilers we append the --- libsubdir to get: @\/usr\/lib64\/$libname\/$compiler@. --- --- An additional complication is the need to support relocatable packages on --- systems which support such things, like Windows. --- -type InstallDirTemplates = InstallDirs PathTemplate - --- --------------------------------------------------------------------------- --- Default installation directories - -defaultInstallDirs :: CompilerFlavor -> Bool -> Bool -> IO InstallDirTemplates -defaultInstallDirs = defaultInstallDirs' False - -defaultInstallDirs' :: Bool {- use external internal deps -} - -> CompilerFlavor -> Bool -> Bool -> IO InstallDirTemplates -defaultInstallDirs' True comp userInstall hasLibs = do - dflt <- defaultInstallDirs' False comp userInstall hasLibs - -- Be a bit more hermetic about per-component installs - return dflt { datasubdir = toPathTemplate $ "$abi" "$libname", - docdir = toPathTemplate $ "$datadir" "doc" "$abi" "$libname" - } -defaultInstallDirs' False comp userInstall _hasLibs = do - installPrefix <- - if userInstall - then do - mDir <- lookupEnv "CABAL_DIR" - case mDir of - Nothing -> getAppUserDataDirectory "cabal" - Just dir -> return dir - else case buildOS of - Windows -> do windowsProgramFilesDir <- getWindowsProgramFilesDir - return (windowsProgramFilesDir "Haskell") - _ -> return "/usr/local" - installLibDir <- - case buildOS of - Windows -> return "$prefix" - _ -> return ("$prefix" "lib") - return $ fmap toPathTemplate $ InstallDirs { - prefix = installPrefix, - bindir = "$prefix" "bin", - libdir = installLibDir, - libsubdir = case comp of - UHC -> "$pkgid" - _other -> "$abi" "$libname", - dynlibdir = "$libdir" case comp of - UHC -> "$pkgid" - _other -> "$abi", - libexecsubdir= "$abi" "$pkgid", - flibdir = "$libdir", - libexecdir = case buildOS of - Windows -> "$prefix" "$libname" - _other -> "$prefix" "libexec", - includedir = "$libdir" "$libsubdir" "include", - datadir = case buildOS of - Windows -> "$prefix" - _other -> "$prefix" "share", - datasubdir = "$abi" "$pkgid", - docdir = "$datadir" "doc" "$abi" "$pkgid", - mandir = "$datadir" "man", - htmldir = "$docdir" "html", - haddockdir = "$htmldir", - sysconfdir = "$prefix" "etc" - } - --- --------------------------------------------------------------------------- --- Converting directories, absolute or prefix-relative - --- | Substitute the install dir templates into each other. --- --- To prevent cyclic substitutions, only some variables are allowed in --- particular dir templates. If out of scope vars are present, they are not --- substituted for. Checking for any remaining unsubstituted vars can be done --- as a subsequent operation. --- --- The reason it is done this way is so that in 'prefixRelativeInstallDirs' we --- can replace 'prefix' with the 'PrefixVar' and get resulting --- 'PathTemplate's that still have the 'PrefixVar' in them. Doing this makes it --- each to check which paths are relative to the $prefix. --- -substituteInstallDirTemplates :: PathTemplateEnv - -> InstallDirTemplates -> InstallDirTemplates -substituteInstallDirTemplates env dirs = dirs' - where - dirs' = InstallDirs { - -- So this specifies exactly which vars are allowed in each template - prefix = subst prefix [], - bindir = subst bindir [prefixVar], - libdir = subst libdir [prefixVar, bindirVar], - libsubdir = subst libsubdir [], - dynlibdir = subst dynlibdir [prefixVar, bindirVar, libdirVar], - flibdir = subst flibdir [prefixVar, bindirVar, libdirVar], - libexecdir = subst libexecdir prefixBinLibVars, - libexecsubdir = subst libexecsubdir [], - includedir = subst includedir prefixBinLibVars, - datadir = subst datadir prefixBinLibVars, - datasubdir = subst datasubdir [], - docdir = subst docdir prefixBinLibDataVars, - mandir = subst mandir (prefixBinLibDataVars ++ [docdirVar]), - htmldir = subst htmldir (prefixBinLibDataVars ++ [docdirVar]), - haddockdir = subst haddockdir (prefixBinLibDataVars ++ - [docdirVar, htmldirVar]), - sysconfdir = subst sysconfdir prefixBinLibVars - } - subst dir env' = substPathTemplate (env'++env) (dir dirs) - - prefixVar = (PrefixVar, prefix dirs') - bindirVar = (BindirVar, bindir dirs') - libdirVar = (LibdirVar, libdir dirs') - libsubdirVar = (LibsubdirVar, libsubdir dirs') - datadirVar = (DatadirVar, datadir dirs') - datasubdirVar = (DatasubdirVar, datasubdir dirs') - docdirVar = (DocdirVar, docdir dirs') - htmldirVar = (HtmldirVar, htmldir dirs') - prefixBinLibVars = [prefixVar, bindirVar, libdirVar, libsubdirVar] - prefixBinLibDataVars = prefixBinLibVars ++ [datadirVar, datasubdirVar] - --- | Convert from abstract install directories to actual absolute ones by --- substituting for all the variables in the abstract paths, to get real --- absolute path. -absoluteInstallDirs :: PackageIdentifier - -> UnitId - -> CompilerInfo - -> CopyDest - -> Platform - -> InstallDirs PathTemplate - -> InstallDirs FilePath -absoluteInstallDirs pkgId libname compilerId copydest platform dirs = - (case copydest of - CopyTo destdir -> fmap ((destdir ) . dropDrive) - CopyToDb dbdir -> fmap (substPrefix "${pkgroot}" (takeDirectory dbdir)) - _ -> id) - . appendSubdirs () - . fmap fromPathTemplate - $ substituteInstallDirTemplates env dirs - where - env = initialPathTemplateEnv pkgId libname compilerId platform - substPrefix pre root path - | pre `isPrefixOf` path = root ++ drop (length pre) path - | otherwise = path - - --- |The location prefix for the /copy/ command. -data CopyDest - = NoCopyDest - | CopyTo FilePath - | CopyToDb FilePath - -- ^ when using the ${pkgroot} as prefix. The CopyToDb will - -- adjust the paths to be relative to the provided package - -- database when copying / installing. - deriving (Eq, Show, Generic) - -instance Binary CopyDest - --- | Check which of the paths are relative to the installation $prefix. --- --- If any of the paths are not relative, ie they are absolute paths, then it --- prevents us from making a relocatable package (also known as a \"prefix --- independent\" package). --- -prefixRelativeInstallDirs :: PackageIdentifier - -> UnitId - -> CompilerInfo - -> Platform - -> InstallDirTemplates - -> InstallDirs (Maybe FilePath) -prefixRelativeInstallDirs pkgId libname compilerId platform dirs = - fmap relative - . appendSubdirs combinePathTemplate - $ -- substitute the path template into each other, except that we map - -- \$prefix back to $prefix. We're trying to end up with templates that - -- mention no vars except $prefix. - substituteInstallDirTemplates env dirs { - prefix = PathTemplate [Variable PrefixVar] - } - where - env = initialPathTemplateEnv pkgId libname compilerId platform - - -- If it starts with $prefix then it's relative and produce the relative - -- path by stripping off $prefix/ or $prefix - relative dir = case dir of - PathTemplate cs -> fmap (fromPathTemplate . PathTemplate) (relative' cs) - relative' (Variable PrefixVar : Ordinary (s:rest) : rest') - | isPathSeparator s = Just (Ordinary rest : rest') - relative' (Variable PrefixVar : rest) = Just rest - relative' _ = Nothing - --- --------------------------------------------------------------------------- --- Path templates - --- | An abstract path, possibly containing variables that need to be --- substituted for to get a real 'FilePath'. --- -newtype PathTemplate = PathTemplate [PathComponent] - deriving (Eq, Ord, Generic) - -instance Binary PathTemplate - -data PathComponent = - Ordinary FilePath - | Variable PathTemplateVariable - deriving (Eq, Ord, Generic) - -instance Binary PathComponent - -data PathTemplateVariable = - PrefixVar -- ^ The @$prefix@ path variable - | BindirVar -- ^ The @$bindir@ path variable - | LibdirVar -- ^ The @$libdir@ path variable - | LibsubdirVar -- ^ The @$libsubdir@ path variable - | DynlibdirVar -- ^ The @$dynlibdir@ path variable - | DatadirVar -- ^ The @$datadir@ path variable - | DatasubdirVar -- ^ The @$datasubdir@ path variable - | DocdirVar -- ^ The @$docdir@ path variable - | HtmldirVar -- ^ The @$htmldir@ path variable - | PkgNameVar -- ^ The @$pkg@ package name path variable - | PkgVerVar -- ^ The @$version@ package version path variable - | PkgIdVar -- ^ The @$pkgid@ package Id path variable, eg @foo-1.0@ - | LibNameVar -- ^ The @$libname@ path variable - | CompilerVar -- ^ The compiler name and version, eg @ghc-6.6.1@ - | OSVar -- ^ The operating system name, eg @windows@ or @linux@ - | ArchVar -- ^ The CPU architecture name, eg @i386@ or @x86_64@ - | AbiVar -- ^ The Compiler's ABI identifier, $arch-$os-$compiler-$abitag - | AbiTagVar -- ^ The optional ABI tag for the compiler - | ExecutableNameVar -- ^ The executable name; used in shell wrappers - | TestSuiteNameVar -- ^ The name of the test suite being run - | TestSuiteResultVar -- ^ The result of the test suite being run, eg - -- @pass@, @fail@, or @error@. - | BenchmarkNameVar -- ^ The name of the benchmark being run - deriving (Eq, Ord, Generic) - -instance Binary PathTemplateVariable - -type PathTemplateEnv = [(PathTemplateVariable, PathTemplate)] - --- | Convert a 'FilePath' to a 'PathTemplate' including any template vars. --- -toPathTemplate :: FilePath -> PathTemplate -toPathTemplate = PathTemplate . read -- TODO: eradicateNoParse - --- | Convert back to a path, any remaining vars are included --- -fromPathTemplate :: PathTemplate -> FilePath -fromPathTemplate (PathTemplate template) = show template - -combinePathTemplate :: PathTemplate -> PathTemplate -> PathTemplate -combinePathTemplate (PathTemplate t1) (PathTemplate t2) = - PathTemplate (t1 ++ [Ordinary [pathSeparator]] ++ t2) - -substPathTemplate :: PathTemplateEnv -> PathTemplate -> PathTemplate -substPathTemplate environment (PathTemplate template) = - PathTemplate (concatMap subst template) - - where subst component@(Ordinary _) = [component] - subst component@(Variable variable) = - case lookup variable environment of - Just (PathTemplate components) -> components - Nothing -> [component] - --- | The initial environment has all the static stuff but no paths -initialPathTemplateEnv :: PackageIdentifier - -> UnitId - -> CompilerInfo - -> Platform - -> PathTemplateEnv -initialPathTemplateEnv pkgId libname compiler platform = - packageTemplateEnv pkgId libname - ++ compilerTemplateEnv compiler - ++ platformTemplateEnv platform - ++ abiTemplateEnv compiler platform - -packageTemplateEnv :: PackageIdentifier -> UnitId -> PathTemplateEnv -packageTemplateEnv pkgId uid = - [(PkgNameVar, PathTemplate [Ordinary $ display (packageName pkgId)]) - ,(PkgVerVar, PathTemplate [Ordinary $ display (packageVersion pkgId)]) - -- Invariant: uid is actually a HashedUnitId. Hard to enforce because - -- it's an API change. - ,(LibNameVar, PathTemplate [Ordinary $ display uid]) - ,(PkgIdVar, PathTemplate [Ordinary $ display pkgId]) - ] - -compilerTemplateEnv :: CompilerInfo -> PathTemplateEnv -compilerTemplateEnv compiler = - [(CompilerVar, PathTemplate [Ordinary $ display (compilerInfoId compiler)]) - ] - -platformTemplateEnv :: Platform -> PathTemplateEnv -platformTemplateEnv (Platform arch os) = - [(OSVar, PathTemplate [Ordinary $ display os]) - ,(ArchVar, PathTemplate [Ordinary $ display arch]) - ] - -abiTemplateEnv :: CompilerInfo -> Platform -> PathTemplateEnv -abiTemplateEnv compiler (Platform arch os) = - [(AbiVar, PathTemplate [Ordinary $ display arch ++ '-':display os ++ - '-':display (compilerInfoId compiler) ++ - case compilerInfoAbiTag compiler of - NoAbiTag -> "" - AbiTag tag -> '-':tag]) - ,(AbiTagVar, PathTemplate [Ordinary $ abiTagString (compilerInfoAbiTag compiler)]) - ] - -installDirsTemplateEnv :: InstallDirs PathTemplate -> PathTemplateEnv -installDirsTemplateEnv dirs = - [(PrefixVar, prefix dirs) - ,(BindirVar, bindir dirs) - ,(LibdirVar, libdir dirs) - ,(LibsubdirVar, libsubdir dirs) - ,(DynlibdirVar, dynlibdir dirs) - ,(DatadirVar, datadir dirs) - ,(DatasubdirVar, datasubdir dirs) - ,(DocdirVar, docdir dirs) - ,(HtmldirVar, htmldir dirs) - ] - - --- --------------------------------------------------------------------------- --- Parsing and showing path templates: - --- The textual format is that of an ordinary Haskell String, eg --- "$prefix/bin" --- and this gets parsed to the internal representation as a sequence of path --- spans which are either strings or variables, eg: --- PathTemplate [Variable PrefixVar, Ordinary "/bin" ] - -instance Show PathTemplateVariable where - show PrefixVar = "prefix" - show LibNameVar = "libname" - show BindirVar = "bindir" - show LibdirVar = "libdir" - show LibsubdirVar = "libsubdir" - show DynlibdirVar = "dynlibdir" - show DatadirVar = "datadir" - show DatasubdirVar = "datasubdir" - show DocdirVar = "docdir" - show HtmldirVar = "htmldir" - show PkgNameVar = "pkg" - show PkgVerVar = "version" - show PkgIdVar = "pkgid" - show CompilerVar = "compiler" - show OSVar = "os" - show ArchVar = "arch" - show AbiTagVar = "abitag" - show AbiVar = "abi" - show ExecutableNameVar = "executablename" - show TestSuiteNameVar = "test-suite" - show TestSuiteResultVar = "result" - show BenchmarkNameVar = "benchmark" - -instance Read PathTemplateVariable where - readsPrec _ s = - take 1 - [ (var, drop (length varStr) s) - | (varStr, var) <- vars - , varStr `isPrefixOf` s ] - -- NB: order matters! Longer strings first - where vars = [("prefix", PrefixVar) - ,("bindir", BindirVar) - ,("libdir", LibdirVar) - ,("libsubdir", LibsubdirVar) - ,("dynlibdir", DynlibdirVar) - ,("datadir", DatadirVar) - ,("datasubdir", DatasubdirVar) - ,("docdir", DocdirVar) - ,("htmldir", HtmldirVar) - ,("pkgid", PkgIdVar) - ,("libname", LibNameVar) - ,("pkgkey", LibNameVar) -- backwards compatibility - ,("pkg", PkgNameVar) - ,("version", PkgVerVar) - ,("compiler", CompilerVar) - ,("os", OSVar) - ,("arch", ArchVar) - ,("abitag", AbiTagVar) - ,("abi", AbiVar) - ,("executablename", ExecutableNameVar) - ,("test-suite", TestSuiteNameVar) - ,("result", TestSuiteResultVar) - ,("benchmark", BenchmarkNameVar)] - -instance Show PathComponent where - show (Ordinary path) = path - show (Variable var) = '$':show var - showList = foldr (\x -> (shows x .)) id - -instance Read PathComponent where - -- for some reason we collapse multiple $ symbols here - readsPrec _ = lex0 - where lex0 [] = [] - lex0 ('$':'$':s') = lex0 ('$':s') - lex0 ('$':s') = case [ (Variable var, s'') - | (var, s'') <- reads s' ] of - [] -> lex1 "$" s' - ok -> ok - lex0 s' = lex1 [] s' - lex1 "" "" = [] - lex1 acc "" = [(Ordinary (reverse acc), "")] - lex1 acc ('$':'$':s) = lex1 acc ('$':s) - lex1 acc ('$':s) = [(Ordinary (reverse acc), '$':s)] - lex1 acc (c:s) = lex1 (c:acc) s - readList [] = [([],"")] - readList s = [ (component:components, s'') - | (component, s') <- reads s - , (components, s'') <- readList s' ] - -instance Show PathTemplate where - show (PathTemplate template) = show (show template) - -instance Read PathTemplate where - readsPrec p s = [ (PathTemplate template, s') - | (path, s') <- readsPrec p s - , (template, "") <- reads path ] - --- --------------------------------------------------------------------------- --- Internal utilities - -getWindowsProgramFilesDir :: NoCallStackIO FilePath -getWindowsProgramFilesDir = do -#ifdef mingw32_HOST_OS - m <- shGetFolderPath csidl_PROGRAM_FILES -#else - let m = Nothing -#endif - return (fromMaybe "C:\\Program Files" m) - -#ifdef mingw32_HOST_OS -shGetFolderPath :: CInt -> NoCallStackIO (Maybe FilePath) -shGetFolderPath n = - allocaArray long_path_size $ \pPath -> do - r <- c_SHGetFolderPath nullPtr n nullPtr 0 pPath - if (r /= 0) - then return Nothing - else do s <- peekCWString pPath; return (Just s) - where - long_path_size = 1024 -- MAX_PATH is 260, this should be plenty - -csidl_PROGRAM_FILES :: CInt -csidl_PROGRAM_FILES = 0x0026 --- csidl_PROGRAM_FILES_COMMON :: CInt --- csidl_PROGRAM_FILES_COMMON = 0x002b - -#ifdef x86_64_HOST_ARCH -#define CALLCONV ccall -#else -#define CALLCONV stdcall -#endif - -foreign import CALLCONV unsafe "shlobj.h SHGetFolderPathW" - c_SHGetFolderPath :: Ptr () - -> CInt - -> Ptr () - -> CInt - -> CWString - -> Prelude.IO CInt -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Install.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Install.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Install.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Install.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,268 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Install --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the entry point into installing a built package. Performs the --- \"@.\/setup install@\" and \"@.\/setup copy@\" actions. It moves files into --- place based on the prefix argument. It does the generic bits and then calls --- compiler-specific functions to do the rest. - -module Distribution.Simple.Install ( - install, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.TargetInfo -import Distribution.Types.LocalBuildInfo -import Distribution.Types.ForeignLib -import Distribution.Types.PackageDescription -import Distribution.Types.UnqualComponentName -import Distribution.Types.ExecutableScope - -import Distribution.Package -import Distribution.PackageDescription -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths (haddockName, haddockPref) -import Distribution.Simple.Glob (matchDirFileGlob) -import Distribution.Simple.Utils - ( createDirectoryIfMissingVerbose - , installDirectoryContents, installOrdinaryFile, isInSearchPath - , die', info, noticeNoWrap, warn ) -import Distribution.Simple.Compiler - ( CompilerFlavor(..), compilerFlavor ) -import Distribution.Simple.Setup - ( CopyFlags(..), fromFlag, HaddockTarget(ForDevelopment) ) -import Distribution.Simple.BuildTarget - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS -import qualified Distribution.Simple.UHC as UHC -import qualified Distribution.Simple.HaskellSuite as HaskellSuite -import Distribution.Compat.Graph (IsNode(..)) - -import System.Directory - ( doesDirectoryExist, doesFileExist ) -import System.FilePath - ( takeFileName, takeDirectory, (), isRelative ) - -import Distribution.Verbosity -import Distribution.Text - ( display ) - --- |Perform the \"@.\/setup install@\" and \"@.\/setup copy@\" --- actions. Move files into place based on the prefix argument. --- --- This does NOT register libraries, you should call 'register' --- to do that. - -install :: PackageDescription -- ^information from the .cabal file - -> LocalBuildInfo -- ^information from the configure step - -> CopyFlags -- ^flags sent to copy or install - -> IO () -install pkg_descr lbi flags = do - checkHasLibsOrExes - targets <- readTargetInfos verbosity pkg_descr lbi (copyArgs flags) - - copyPackage verbosity pkg_descr lbi distPref copydest - - -- It's not necessary to do these in build-order, but it's harmless - withNeededTargetsInBuildOrder' pkg_descr lbi (map nodeKey targets) $ \target -> - let comp = targetComponent target - clbi = targetCLBI target - in copyComponent verbosity pkg_descr lbi comp clbi copydest - where - distPref = fromFlag (copyDistPref flags) - verbosity = fromFlag (copyVerbosity flags) - copydest = fromFlag (copyDest flags) - - checkHasLibsOrExes = - unless (hasLibs pkg_descr || hasForeignLibs pkg_descr || hasExes pkg_descr) $ - die' verbosity "No executables and no library found. Nothing to do." - --- | Copy package global files. -copyPackage :: Verbosity -> PackageDescription - -> LocalBuildInfo -> FilePath -> CopyDest -> IO () -copyPackage verbosity pkg_descr lbi distPref copydest = do - let -- This is a bit of a hack, to handle files which are not - -- per-component (data files and Haddock files.) - InstallDirs { - datadir = dataPref, - -- NB: The situation with Haddock is a bit delicate. On the - -- one hand, the easiest to understand Haddock documentation - -- path is pkgname-0.1, which means it's per-package (not - -- per-component). But this means that it's impossible to - -- install Haddock documentation for internal libraries. We'll - -- keep this constraint for now; this means you can't use - -- Cabal to Haddock internal libraries. This does not seem - -- like a big problem. - docdir = docPref, - htmldir = htmlPref, - haddockdir = interfacePref} - -- Notice use of 'absoluteInstallDirs' (not the - -- per-component variant). This means for non-library - -- packages we'll just pick a nondescriptive foo-0.1 - = absoluteInstallDirs pkg_descr lbi copydest - - -- Install (package-global) data files - installDataFiles verbosity pkg_descr dataPref - - -- Install (package-global) Haddock files - -- TODO: these should be done per-library - docExists <- doesDirectoryExist $ haddockPref ForDevelopment distPref pkg_descr - info verbosity ("directory " ++ haddockPref ForDevelopment distPref pkg_descr ++ - " does exist: " ++ show docExists) - - -- TODO: this is a bit questionable, Haddock files really should - -- be per library (when there are convenience libraries.) - when docExists $ do - createDirectoryIfMissingVerbose verbosity True htmlPref - installDirectoryContents verbosity - (haddockPref ForDevelopment distPref pkg_descr) htmlPref - -- setPermissionsRecursive [Read] htmlPref - -- The haddock interface file actually already got installed - -- in the recursive copy, but now we install it where we actually - -- want it to be (normally the same place). We could remove the - -- copy in htmlPref first. - let haddockInterfaceFileSrc = haddockPref ForDevelopment distPref pkg_descr - haddockName pkg_descr - haddockInterfaceFileDest = interfacePref haddockName pkg_descr - -- We only generate the haddock interface file for libs, So if the - -- package consists only of executables there will not be one: - exists <- doesFileExist haddockInterfaceFileSrc - when exists $ do - createDirectoryIfMissingVerbose verbosity True interfacePref - installOrdinaryFile verbosity haddockInterfaceFileSrc - haddockInterfaceFileDest - - let lfiles = licenseFiles pkg_descr - unless (null lfiles) $ do - createDirectoryIfMissingVerbose verbosity True docPref - sequence_ - [ installOrdinaryFile verbosity lfile (docPref takeFileName lfile) - | lfile <- lfiles ] - --- | Copy files associated with a component. -copyComponent :: Verbosity -> PackageDescription - -> LocalBuildInfo -> Component -> ComponentLocalBuildInfo - -> CopyDest - -> IO () -copyComponent verbosity pkg_descr lbi (CLib lib) clbi copydest = do - let InstallDirs{ - libdir = libPref, - dynlibdir = dynlibPref, - includedir = incPref - } = absoluteComponentInstallDirs pkg_descr lbi (componentUnitId clbi) copydest - buildPref = componentBuildDir lbi clbi - - case libName lib of - Nothing -> noticeNoWrap verbosity ("Installing library in " ++ libPref) - Just n -> noticeNoWrap verbosity ("Installing internal library " ++ display n ++ " in " ++ libPref) - - -- install include files for all compilers - they may be needed to compile - -- haskell files (using the CPP extension) - installIncludeFiles verbosity (libBuildInfo lib) lbi buildPref incPref - - case compilerFlavor (compiler lbi) of - GHC -> GHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr lib clbi - GHCJS -> GHCJS.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr lib clbi - UHC -> UHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr lib clbi - HaskellSuite _ -> HaskellSuite.installLib - verbosity lbi libPref dynlibPref buildPref pkg_descr lib clbi - _ -> die' verbosity $ "installing with " - ++ display (compilerFlavor (compiler lbi)) - ++ " is not implemented" - -copyComponent verbosity pkg_descr lbi (CFLib flib) clbi copydest = do - let InstallDirs{ - flibdir = flibPref, - includedir = incPref - } = absoluteComponentInstallDirs pkg_descr lbi (componentUnitId clbi) copydest - buildPref = componentBuildDir lbi clbi - - noticeNoWrap verbosity ("Installing foreign library " ++ unUnqualComponentName (foreignLibName flib) ++ " in " ++ flibPref) - installIncludeFiles verbosity (foreignLibBuildInfo flib) lbi buildPref incPref - - case compilerFlavor (compiler lbi) of - GHC -> GHC.installFLib verbosity lbi flibPref buildPref pkg_descr flib - _ -> die' verbosity $ "installing foreign lib with " - ++ display (compilerFlavor (compiler lbi)) - ++ " is not implemented" - -copyComponent verbosity pkg_descr lbi (CExe exe) clbi copydest = do - let installDirs = absoluteComponentInstallDirs pkg_descr lbi (componentUnitId clbi) copydest - -- the installers know how to find the actual location of the - -- binaries - buildPref = buildDir lbi - uid = componentUnitId clbi - pkgid = packageId pkg_descr - binPref | ExecutablePrivate <- exeScope exe = libexecdir installDirs - | otherwise = bindir installDirs - progPrefixPref = substPathTemplate pkgid lbi uid (progPrefix lbi) - progSuffixPref = substPathTemplate pkgid lbi uid (progSuffix lbi) - progFix = (progPrefixPref, progSuffixPref) - noticeNoWrap verbosity ("Installing executable " ++ display (exeName exe) - ++ " in " ++ binPref) - inPath <- isInSearchPath binPref - when (not inPath) $ - warn verbosity ("The directory " ++ binPref - ++ " is not in the system search path.") - case compilerFlavor (compiler lbi) of - GHC -> GHC.installExe verbosity lbi binPref buildPref progFix pkg_descr exe - GHCJS -> GHCJS.installExe verbosity lbi binPref buildPref progFix pkg_descr exe - UHC -> return () - HaskellSuite {} -> return () - _ -> die' verbosity $ "installing with " - ++ display (compilerFlavor (compiler lbi)) - ++ " is not implemented" - --- Nothing to do for benchmark/testsuite -copyComponent _ _ _ (CBench _) _ _ = return () -copyComponent _ _ _ (CTest _) _ _ = return () - --- | Install the files listed in data-files --- -installDataFiles :: Verbosity -> PackageDescription -> FilePath -> IO () -installDataFiles verbosity pkg_descr destDataDir = - flip traverse_ (dataFiles pkg_descr) $ \ file -> do - let srcDataDirRaw = dataDir pkg_descr - srcDataDir = if null srcDataDirRaw - then "." - else srcDataDirRaw - files <- matchDirFileGlob verbosity (specVersion pkg_descr) srcDataDir file - let dir = takeDirectory file - createDirectoryIfMissingVerbose verbosity True (destDataDir dir) - sequence_ [ installOrdinaryFile verbosity (srcDataDir file') - (destDataDir file') - | file' <- files ] - --- | Install the files listed in install-includes for a library --- -installIncludeFiles :: Verbosity -> BuildInfo -> LocalBuildInfo -> FilePath -> FilePath -> IO () -installIncludeFiles verbosity libBi lbi buildPref destIncludeDir = do - let relincdirs = "." : filter isRelative (includeDirs libBi) - incdirs = [ baseDir lbi dir | dir <- relincdirs ] - ++ [ buildPref dir | dir <- relincdirs ] - incs <- traverse (findInc incdirs) (installIncludes libBi) - sequence_ - [ do createDirectoryIfMissingVerbose verbosity True destDir - installOrdinaryFile verbosity srcFile destFile - | (relFile, srcFile) <- incs - , let destFile = destIncludeDir relFile - destDir = takeDirectory destFile ] - where - baseDir lbi' = fromMaybe "" (takeDirectory <$> cabalFilePath lbi') - findInc [] file = die' verbosity ("can't find include file " ++ file) - findInc (dir:dirs) file = do - let path = dir file - exists <- doesFileExist path - if exists then return (file, path) else findInc dirs file diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/LocalBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/LocalBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/LocalBuildInfo.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/LocalBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,386 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.LocalBuildInfo --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Once a package has been configured we have resolved conditionals and --- dependencies, configured the compiler and other needed external programs. --- The 'LocalBuildInfo' is used to hold all this information. It holds the --- install dirs, the compiler, the exact package dependencies, the configured --- programs, the package database to use and a bunch of miscellaneous configure --- flags. It gets saved and reloaded from a file (@dist\/setup-config@). It gets --- passed in to very many subsequent build actions. - -module Distribution.Simple.LocalBuildInfo ( - LocalBuildInfo(..), - externalPackageDeps, - localComponentId, - localUnitId, - localCompatPackageKey, - - -- * Buildable package components - Component(..), - ComponentName(..), - defaultLibName, - showComponentName, - componentNameString, - ComponentLocalBuildInfo(..), - componentBuildDir, - foldComponent, - componentName, - componentBuildInfo, - componentBuildable, - pkgComponents, - pkgBuildableComponents, - lookupComponent, - getComponent, - getComponentLocalBuildInfo, - allComponentsInBuildOrder, - componentsInBuildOrder, - depLibraryPaths, - allLibModules, - - withAllComponentsInBuildOrder, - withComponentsInBuildOrder, - withComponentsLBI, - withLibLBI, - withExeLBI, - withBenchLBI, - withTestLBI, - enabledTestLBIs, - enabledBenchLBIs, - - -- * Installation directories - module Distribution.Simple.InstallDirs, - absoluteInstallDirs, prefixRelativeInstallDirs, - absoluteComponentInstallDirs, prefixRelativeComponentInstallDirs, - substPathTemplate, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.Component -import Distribution.Types.PackageId -import Distribution.Types.UnitId -import Distribution.Types.ComponentName -import Distribution.Types.UnqualComponentName -import Distribution.Types.PackageDescription -import Distribution.Types.ComponentLocalBuildInfo -import Distribution.Types.LocalBuildInfo -import Distribution.Types.TargetInfo - -import Distribution.Simple.InstallDirs hiding (absoluteInstallDirs, - prefixRelativeInstallDirs, - substPathTemplate, ) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.PackageDescription -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Package -import Distribution.ModuleName -import Distribution.Simple.Compiler -import Distribution.Simple.PackageIndex -import Distribution.Simple.Utils -import Distribution.Text -import qualified Distribution.Compat.Graph as Graph - -import Data.List (stripPrefix) -import System.FilePath -import qualified Data.Map as Map - -import System.Directory (doesDirectoryExist, canonicalizePath) - --- ----------------------------------------------------------------------------- --- Configuration information of buildable components - -componentBuildDir :: LocalBuildInfo -> ComponentLocalBuildInfo -> FilePath --- For now, we assume that libraries/executables/test-suites/benchmarks --- are only ever built once. With Backpack, we need a special case for --- libraries so that we can handle building them multiple times. -componentBuildDir lbi clbi - = buildDir lbi - case componentLocalName clbi of - CLibName -> - if display (componentUnitId clbi) == display (componentComponentId clbi) - then "" - else display (componentUnitId clbi) - CSubLibName s -> - if display (componentUnitId clbi) == display (componentComponentId clbi) - then unUnqualComponentName s - else display (componentUnitId clbi) - CFLibName s -> unUnqualComponentName s - CExeName s -> unUnqualComponentName s - CTestName s -> unUnqualComponentName s - CBenchName s -> unUnqualComponentName s - -{-# DEPRECATED getComponentLocalBuildInfo "This function is not well-defined, because a 'ComponentName' does not uniquely identify a 'ComponentLocalBuildInfo'. If you have a 'TargetInfo', you should use 'targetCLBI' to get the 'ComponentLocalBuildInfo'. Otherwise, use 'componentNameTargets' to get all possible 'ComponentLocalBuildInfo's. This will be removed in Cabal 2.2." #-} -getComponentLocalBuildInfo :: LocalBuildInfo -> ComponentName -> ComponentLocalBuildInfo -getComponentLocalBuildInfo lbi cname = - case componentNameCLBIs lbi cname of - [clbi] -> clbi - [] -> - error $ "internal error: there is no configuration data " - ++ "for component " ++ show cname - clbis -> - error $ "internal error: the component name " ++ show cname - ++ "is ambiguous. Refers to: " - ++ intercalate ", " (map (display . componentUnitId) clbis) - --- | Perform the action on each enabled 'library' in the package --- description with the 'ComponentLocalBuildInfo'. -withLibLBI :: PackageDescription -> LocalBuildInfo - -> (Library -> ComponentLocalBuildInfo -> IO ()) -> IO () -withLibLBI pkg lbi f = - withAllTargetsInBuildOrder' pkg lbi $ \target -> - case targetComponent target of - CLib lib -> f lib (targetCLBI target) - _ -> return () - --- | Perform the action on each enabled 'Executable' in the package --- description. Extended version of 'withExe' that also gives corresponding --- build info. -withExeLBI :: PackageDescription -> LocalBuildInfo - -> (Executable -> ComponentLocalBuildInfo -> IO ()) -> IO () -withExeLBI pkg lbi f = - withAllTargetsInBuildOrder' pkg lbi $ \target -> - case targetComponent target of - CExe exe -> f exe (targetCLBI target) - _ -> return () - --- | Perform the action on each enabled 'Benchmark' in the package --- description. -withBenchLBI :: PackageDescription -> LocalBuildInfo - -> (Benchmark -> ComponentLocalBuildInfo -> IO ()) -> IO () -withBenchLBI pkg lbi f = - sequence_ [ f test clbi | (test, clbi) <- enabledBenchLBIs pkg lbi ] - -withTestLBI :: PackageDescription -> LocalBuildInfo - -> (TestSuite -> ComponentLocalBuildInfo -> IO ()) -> IO () -withTestLBI pkg lbi f = - sequence_ [ f test clbi | (test, clbi) <- enabledTestLBIs pkg lbi ] - -enabledTestLBIs :: PackageDescription -> LocalBuildInfo - -> [(TestSuite, ComponentLocalBuildInfo)] -enabledTestLBIs pkg lbi = - [ (test, targetCLBI target) - | target <- allTargetsInBuildOrder' pkg lbi - , CTest test <- [targetComponent target] ] - -enabledBenchLBIs :: PackageDescription -> LocalBuildInfo - -> [(Benchmark, ComponentLocalBuildInfo)] -enabledBenchLBIs pkg lbi = - [ (bench, targetCLBI target) - | target <- allTargetsInBuildOrder' pkg lbi - , CBench bench <- [targetComponent target] ] - -{-# DEPRECATED withComponentsLBI "Use withAllComponentsInBuildOrder" #-} -withComponentsLBI :: PackageDescription -> LocalBuildInfo - -> (Component -> ComponentLocalBuildInfo -> IO ()) - -> IO () -withComponentsLBI = withAllComponentsInBuildOrder - --- | Perform the action on each buildable 'Library' or 'Executable' (Component) --- in the PackageDescription, subject to the build order specified by the --- 'compBuildOrder' field of the given 'LocalBuildInfo' -withAllComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo - -> (Component -> ComponentLocalBuildInfo -> IO ()) - -> IO () -withAllComponentsInBuildOrder pkg lbi f = - withAllTargetsInBuildOrder' pkg lbi $ \target -> - f (targetComponent target) (targetCLBI target) - -{-# DEPRECATED withComponentsInBuildOrder "You have got a 'TargetInfo' right? Use 'withNeededTargetsInBuildOrder' on the 'UnitId's you can 'nodeKey' out." #-} -withComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo - -> [ComponentName] - -> (Component -> ComponentLocalBuildInfo -> IO ()) - -> IO () -withComponentsInBuildOrder pkg lbi cnames f = - withNeededTargetsInBuildOrder' pkg lbi uids $ \target -> - f (targetComponent target) (targetCLBI target) - where uids = concatMap (componentNameToUnitIds lbi) cnames - -allComponentsInBuildOrder :: LocalBuildInfo - -> [ComponentLocalBuildInfo] -allComponentsInBuildOrder lbi = - Graph.topSort (componentGraph lbi) - --- | Private helper function for some of the deprecated implementations. -componentNameToUnitIds :: LocalBuildInfo -> ComponentName -> [UnitId] -componentNameToUnitIds lbi cname = - case Map.lookup cname (componentNameMap lbi) of - Just clbis -> map componentUnitId clbis - Nothing -> error $ "componentNameToUnitIds " ++ display cname - -{-# DEPRECATED componentsInBuildOrder "You've got 'TargetInfo' right? Use 'neededTargetsInBuildOrder' on the 'UnitId's you can 'nodeKey' out." #-} -componentsInBuildOrder :: LocalBuildInfo -> [ComponentName] - -> [ComponentLocalBuildInfo] -componentsInBuildOrder lbi cnames - -- NB: use of localPkgDescr here is safe because we throw out the - -- result immediately afterwards - = map targetCLBI (neededTargetsInBuildOrder' (localPkgDescr lbi) lbi uids) - where uids = concatMap (componentNameToUnitIds lbi) cnames - --- ----------------------------------------------------------------------------- --- A random function that has no business in this module - --- | Determine the directories containing the dynamic libraries of the --- transitive dependencies of the component we are building. --- --- When wanted, and possible, returns paths relative to the installDirs 'prefix' -depLibraryPaths :: Bool -- ^ Building for inplace? - -> Bool -- ^ Generate prefix-relative library paths - -> LocalBuildInfo - -> ComponentLocalBuildInfo -- ^ Component that is being built - -> NoCallStackIO [FilePath] -depLibraryPaths inplace relative lbi clbi = do - let pkgDescr = localPkgDescr lbi - installDirs = absoluteComponentInstallDirs pkgDescr lbi (componentUnitId clbi) NoCopyDest - executable = case clbi of - ExeComponentLocalBuildInfo {} -> True - _ -> False - relDir | executable = bindir installDirs - | otherwise = libdir installDirs - - let -- TODO: this is kind of inefficient - internalDeps = [ uid - | (uid, _) <- componentPackageDeps clbi - -- Test that it's internal - , sub_target <- allTargetsInBuildOrder' pkgDescr lbi - , componentUnitId (targetCLBI (sub_target)) == uid ] - internalLibs = [ getLibDir (targetCLBI sub_target) - | sub_target <- neededTargetsInBuildOrder' - pkgDescr lbi internalDeps ] - {- - -- This is better, but it doesn't work, because we may be passed a - -- CLBI which doesn't actually exist, and was faked up when we - -- were building a test suite/benchmark. See #3599 for proposal - -- to fix this. - let internalCLBIs = filter ((/= componentUnitId clbi) . componentUnitId) - . map targetCLBI - $ neededTargetsInBuildOrder lbi [componentUnitId clbi] - internalLibs = map getLibDir internalCLBIs - -} - getLibDir sub_clbi - | inplace = componentBuildDir lbi sub_clbi - | otherwise = dynlibdir (absoluteComponentInstallDirs pkgDescr lbi (componentUnitId sub_clbi) NoCopyDest) - - -- Why do we go through all the trouble of a hand-crafting - -- internalLibs, when 'installedPkgs' actually contains the - -- internal libraries? The trouble is that 'installedPkgs' - -- may contain *inplace* entries, which we must NOT use for - -- not inplace 'depLibraryPaths' (e.g., for RPATH calculation). - -- See #4025 for more details. This is all horrible but it - -- is a moot point if you are using a per-component build, - -- because you never have any internal libraries in this case; - -- they're all external. - let external_ipkgs = filter is_external (allPackages (installedPkgs lbi)) - is_external ipkg = not (installedUnitId ipkg `elem` internalDeps) - -- First look for dynamic libraries in `dynamic-library-dirs`, and use - -- `library-dirs` as a fall back. - getDynDir pkg = case Installed.libraryDynDirs pkg of - [] -> Installed.libraryDirs pkg - d -> d - allDepLibDirs = concatMap getDynDir external_ipkgs - - allDepLibDirs' = internalLibs ++ allDepLibDirs - allDepLibDirsC <- traverse canonicalizePathNoFail allDepLibDirs' - - let p = prefix installDirs - prefixRelative l = isJust (stripPrefix p l) - libPaths - | relative && - prefixRelative relDir = map (\l -> - if prefixRelative l - then shortRelativePath relDir l - else l - ) allDepLibDirsC - | otherwise = allDepLibDirsC - - return libPaths - where - -- 'canonicalizePath' fails on UNIX when the directory does not exists. - -- So just don't canonicalize when it doesn't exist. - canonicalizePathNoFail p = do - exists <- doesDirectoryExist p - if exists - then canonicalizePath p - else return p - --- | Get all module names that needed to be built by GHC; i.e., all --- of these 'ModuleName's have interface files associated with them --- that need to be installed. -allLibModules :: Library -> ComponentLocalBuildInfo -> [ModuleName] -allLibModules lib clbi = - ordNub $ - explicitLibModules lib ++ - case clbi of - LibComponentLocalBuildInfo { componentInstantiatedWith = insts } -> map fst insts - _ -> [] - --- ----------------------------------------------------------------------------- --- Wrappers for a couple functions from InstallDirs - --- | Backwards compatibility function which computes the InstallDirs --- assuming that @$libname@ points to the public library (or some fake --- package identifier if there is no public library.) IF AT ALL --- POSSIBLE, please use 'absoluteComponentInstallDirs' instead. -absoluteInstallDirs :: PackageDescription -> LocalBuildInfo - -> CopyDest - -> InstallDirs FilePath -absoluteInstallDirs pkg lbi copydest = - absoluteComponentInstallDirs pkg lbi (localUnitId lbi) copydest - --- | See 'InstallDirs.absoluteInstallDirs'. -absoluteComponentInstallDirs :: PackageDescription -> LocalBuildInfo - -> UnitId - -> CopyDest - -> InstallDirs FilePath -absoluteComponentInstallDirs pkg lbi uid copydest = - InstallDirs.absoluteInstallDirs - (packageId pkg) - uid - (compilerInfo (compiler lbi)) - copydest - (hostPlatform lbi) - (installDirTemplates lbi) - --- | Backwards compatibility function which computes the InstallDirs --- assuming that @$libname@ points to the public library (or some fake --- package identifier if there is no public library.) IF AT ALL --- POSSIBLE, please use 'prefixRelativeComponentInstallDirs' instead. -prefixRelativeInstallDirs :: PackageId -> LocalBuildInfo - -> InstallDirs (Maybe FilePath) -prefixRelativeInstallDirs pkg_descr lbi = - prefixRelativeComponentInstallDirs pkg_descr lbi (localUnitId lbi) - --- |See 'InstallDirs.prefixRelativeInstallDirs' -prefixRelativeComponentInstallDirs :: PackageId -> LocalBuildInfo - -> UnitId - -> InstallDirs (Maybe FilePath) -prefixRelativeComponentInstallDirs pkg_descr lbi uid = - InstallDirs.prefixRelativeInstallDirs - (packageId pkg_descr) - uid - (compilerInfo (compiler lbi)) - (hostPlatform lbi) - (installDirTemplates lbi) - -substPathTemplate :: PackageId -> LocalBuildInfo - -> UnitId - -> PathTemplate -> FilePath -substPathTemplate pkgid lbi uid = fromPathTemplate - . ( InstallDirs.substPathTemplate env ) - where env = initialPathTemplateEnv - pkgid - uid - (compilerInfo (compiler lbi)) - (hostPlatform lbi) - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/PackageIndex.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/PackageIndex.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/PackageIndex.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/PackageIndex.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,723 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE FlexibleInstances #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.PackageIndex --- Copyright : (c) David Himmelstrup 2005, --- Bjorn Bringert 2007, --- Duncan Coutts 2008-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- An index of packages whose primary key is 'UnitId'. Public libraries --- are additionally indexed by 'PackageName' and 'Version'. --- Technically, these are an index of *units* (so we should eventually --- rename it to 'UnitIndex'); but in the absence of internal libraries --- or Backpack each unit is equivalent to a package. --- --- While 'PackageIndex' is parametric over what it actually records, --- it is in fact only ever instantiated with a single element: --- The 'InstalledPackageIndex' (defined here) contains a graph of --- 'InstalledPackageInfo's representing the packages in a --- package database stack. It is used in a variety of ways: --- --- * The primary use to let Cabal access the same installed --- package database which is used by GHC during compilation. --- For example, this data structure is used by 'ghc-pkg' --- and 'Cabal' to do consistency checks on the database --- (are the references closed). --- --- * Given a set of dependencies, we can compute the transitive --- closure of dependencies. This is to check if the versions --- of packages are consistent, and also needed by multiple --- tools (Haddock must be explicitly told about the every --- transitive package to do cross-package linking; --- preprocessors must know about the include paths of all --- transitive dependencies.) --- --- This 'PackageIndex' is NOT to be confused with --- 'Distribution.Client.PackageIndex', which indexes packages only by --- 'PackageName' (this makes it suitable for indexing source packages, --- for which we don't know 'UnitId's.) --- -module Distribution.Simple.PackageIndex ( - -- * Package index data type - InstalledPackageIndex, - PackageIndex, - - -- * Creating an index - fromList, - - -- * Updates - merge, - - insert, - - deleteUnitId, - deleteSourcePackageId, - deletePackageName, --- deleteDependency, - - -- * Queries - - -- ** Precise lookups - lookupUnitId, - lookupComponentId, - lookupSourcePackageId, - lookupPackageId, - lookupPackageName, - lookupDependency, - lookupInternalDependency, - - -- ** Case-insensitive searches - searchByName, - SearchResult(..), - searchByNameSubstring, - - -- ** Bulk queries - allPackages, - allPackagesByName, - allPackagesBySourcePackageId, - allPackagesBySourcePackageIdAndLibName, - - -- ** Special queries - brokenPackages, - dependencyClosure, - reverseDependencyClosure, - topologicalOrder, - reverseTopologicalOrder, - dependencyInconsistencies, - dependencyCycles, - dependencyGraph, - moduleNameIndex, - - -- * Backwards compatibility - deleteInstalledPackageId, - lookupInstalledPackageId, - ) where - -import Prelude () -import Distribution.Compat.Prelude hiding (lookup) -import qualified Data.Map.Strict as Map - -import Distribution.Package -import Distribution.Backpack -import Distribution.ModuleName -import qualified Distribution.InstalledPackageInfo as IPI -import Distribution.Version -import Distribution.Simple.Utils -import Distribution.Types.UnqualComponentName - -import Control.Exception (assert) -import Data.Array ((!)) -import qualified Data.Array as Array -import qualified Data.Graph as Graph -import Data.List as List ( groupBy, deleteBy, deleteFirstsBy ) -import qualified Data.Tree as Tree -import Control.Monad -import Distribution.Compat.Stack - --- | The collection of information about packages from one or more 'PackageDB's. --- These packages generally should have an instance of 'PackageInstalled' --- --- Packages are uniquely identified in by their 'UnitId', they can --- also be efficiently looked up by package name or by name and version. --- -data PackageIndex a = PackageIndex { - -- The primary index. Each InstalledPackageInfo record is uniquely identified - -- by its UnitId. - -- - unitIdIndex :: !(Map UnitId a), - - -- This auxiliary index maps package names (case-sensitively) to all the - -- versions and instances of that package. This allows us to find all - -- versions satisfying a dependency. - -- - -- It is a three-level index. The first level is the package name, - -- the second is the package version and the final level is instances - -- of the same package version. These are unique by UnitId - -- and are kept in preference order. - -- - -- FIXME: Clarify what "preference order" means. Check that this invariant is - -- preserved. See #1463 for discussion. - packageIdIndex :: !(Map (PackageName, Maybe UnqualComponentName) (Map Version [a])) - - } deriving (Eq, Generic, Show, Read) - -instance Binary a => Binary (PackageIndex a) - --- | The default package index which contains 'InstalledPackageInfo'. Normally --- use this. -type InstalledPackageIndex = PackageIndex IPI.InstalledPackageInfo - -instance Monoid (PackageIndex IPI.InstalledPackageInfo) where - mempty = PackageIndex Map.empty Map.empty - mappend = (<>) - --save one mappend with empty in the common case: - mconcat [] = mempty - mconcat xs = foldr1 mappend xs - -instance Semigroup (PackageIndex IPI.InstalledPackageInfo) where - (<>) = merge - -{-# NOINLINE invariant #-} -invariant :: WithCallStack (InstalledPackageIndex -> Bool) -invariant (PackageIndex pids pnames) = - -- trace (show pids' ++ "\n" ++ show pnames') $ - pids' == pnames' - where - pids' = map installedUnitId (Map.elems pids) - pnames' = sort - [ assert pinstOk (installedUnitId pinst) - | ((pname, plib), pvers) <- Map.toList pnames - , let pversOk = not (Map.null pvers) - , (pver, pinsts) <- assert pversOk $ Map.toList pvers - , let pinsts' = sortBy (comparing installedUnitId) pinsts - pinstsOk = all (\g -> length g == 1) - (groupBy (equating installedUnitId) pinsts') - , pinst <- assert pinstsOk $ pinsts' - , let pinstOk = packageName pinst == pname - && packageVersion pinst == pver - && IPI.sourceLibName pinst == plib - ] - -- If you see this invariant failing (ie the assert in mkPackageIndex below) - -- then one thing to check is if it is happening in fromList. Check if the - -- second list above (the sort [...] bit) is ending up with duplicates. This - -- has been observed in practice once due to a messed up ghc-pkg db. How/why - -- it became messed up was not discovered. - - --- --- * Internal helpers --- - -mkPackageIndex :: WithCallStack (Map UnitId IPI.InstalledPackageInfo - -> Map (PackageName, Maybe UnqualComponentName) - (Map Version [IPI.InstalledPackageInfo]) - -> InstalledPackageIndex) -mkPackageIndex pids pnames = assert (invariant index) index - where index = PackageIndex pids pnames - - --- --- * Construction --- - --- | Build an index out of a bunch of packages. --- --- If there are duplicates by 'UnitId' then later ones mask earlier --- ones. --- -fromList :: [IPI.InstalledPackageInfo] -> InstalledPackageIndex -fromList pkgs = mkPackageIndex pids pnames - where - pids = Map.fromList [ (installedUnitId pkg, pkg) | pkg <- pkgs ] - pnames = - Map.fromList - [ (liftM2 (,) packageName IPI.sourceLibName (head pkgsN), pvers) - | pkgsN <- groupBy (equating (liftM2 (,) packageName IPI.sourceLibName)) - . sortBy (comparing (liftM3 (,,) packageName IPI.sourceLibName packageVersion)) - $ pkgs - , let pvers = - Map.fromList - [ (packageVersion (head pkgsNV), - nubBy (equating installedUnitId) (reverse pkgsNV)) - | pkgsNV <- groupBy (equating packageVersion) pkgsN - ] - ] - --- --- * Updates --- - --- | Merge two indexes. --- --- Packages from the second mask packages from the first if they have the exact --- same 'UnitId'. --- --- For packages with the same source 'PackageId', packages from the second are --- \"preferred\" over those from the first. Being preferred means they are top --- result when we do a lookup by source 'PackageId'. This is the mechanism we --- use to prefer user packages over global packages. --- -merge :: InstalledPackageIndex -> InstalledPackageIndex - -> InstalledPackageIndex -merge (PackageIndex pids1 pnames1) (PackageIndex pids2 pnames2) = - mkPackageIndex (Map.unionWith (\_ y -> y) pids1 pids2) - (Map.unionWith (Map.unionWith mergeBuckets) pnames1 pnames2) - where - -- Packages in the second list mask those in the first, however preferred - -- packages go first in the list. - mergeBuckets xs ys = ys ++ (xs \\ ys) - (\\) = deleteFirstsBy (equating installedUnitId) - - --- | Inserts a single package into the index. --- --- This is equivalent to (but slightly quicker than) using 'mappend' or --- 'merge' with a singleton index. --- -insert :: IPI.InstalledPackageInfo -> InstalledPackageIndex -> InstalledPackageIndex -insert pkg (PackageIndex pids pnames) = - mkPackageIndex pids' pnames' - - where - pids' = Map.insert (installedUnitId pkg) pkg pids - pnames' = insertPackageName pnames - insertPackageName = - Map.insertWith (\_ -> insertPackageVersion) - (packageName pkg, IPI.sourceLibName pkg) - (Map.singleton (packageVersion pkg) [pkg]) - - insertPackageVersion = - Map.insertWith (\_ -> insertPackageInstance) - (packageVersion pkg) [pkg] - - insertPackageInstance pkgs = - pkg : deleteBy (equating installedUnitId) pkg pkgs - - --- | Removes a single installed package from the index. --- -deleteUnitId :: UnitId -> InstalledPackageIndex - -> InstalledPackageIndex -deleteUnitId ipkgid original@(PackageIndex pids pnames) = - case Map.updateLookupWithKey (\_ _ -> Nothing) ipkgid pids of - (Nothing, _) -> original - (Just spkgid, pids') -> mkPackageIndex pids' - (deletePkgName spkgid pnames) - - where - deletePkgName spkgid = - Map.update (deletePkgVersion spkgid) (packageName spkgid, IPI.sourceLibName spkgid) - - deletePkgVersion spkgid = - (\m -> if Map.null m then Nothing else Just m) - . Map.update deletePkgInstance (packageVersion spkgid) - - deletePkgInstance = - (\xs -> if null xs then Nothing else Just xs) - . List.deleteBy (\_ pkg -> installedUnitId pkg == ipkgid) undefined - --- | Backwards compatibility wrapper for Cabal pre-1.24. -{-# DEPRECATED deleteInstalledPackageId "Use deleteUnitId instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -deleteInstalledPackageId :: UnitId -> InstalledPackageIndex - -> InstalledPackageIndex -deleteInstalledPackageId = deleteUnitId - --- | Removes all packages with this source 'PackageId' from the index. --- -deleteSourcePackageId :: PackageId -> InstalledPackageIndex - -> InstalledPackageIndex -deleteSourcePackageId pkgid original@(PackageIndex pids pnames) = - -- NB: Doesn't delete internal packages - case Map.lookup (packageName pkgid, Nothing) pnames of - Nothing -> original - Just pvers -> case Map.lookup (packageVersion pkgid) pvers of - Nothing -> original - Just pkgs -> mkPackageIndex - (foldl' (flip (Map.delete . installedUnitId)) pids pkgs) - (deletePkgName pnames) - where - deletePkgName = - Map.update deletePkgVersion (packageName pkgid, Nothing) - - deletePkgVersion = - (\m -> if Map.null m then Nothing else Just m) - . Map.delete (packageVersion pkgid) - - --- | Removes all packages with this (case-sensitive) name from the index. --- --- NB: Does NOT delete internal libraries from this package. --- -deletePackageName :: PackageName -> InstalledPackageIndex - -> InstalledPackageIndex -deletePackageName name original@(PackageIndex pids pnames) = - case Map.lookup (name, Nothing) pnames of - Nothing -> original - Just pvers -> mkPackageIndex - (foldl' (flip (Map.delete . installedUnitId)) pids - (concat (Map.elems pvers))) - (Map.delete (name, Nothing) pnames) - -{- --- | Removes all packages satisfying this dependency from the index. --- -deleteDependency :: Dependency -> PackageIndex -> PackageIndex -deleteDependency (Dependency name verstionRange) = - delete' name (\pkg -> packageVersion pkg `withinRange` verstionRange) --} - --- --- * Bulk queries --- - --- | Get all the packages from the index. --- -allPackages :: PackageIndex a -> [a] -allPackages = Map.elems . unitIdIndex - --- | Get all the packages from the index. --- --- They are grouped by package name (case-sensitively). --- --- (Doesn't include private libraries.) --- -allPackagesByName :: PackageIndex a -> [(PackageName, [a])] -allPackagesByName index = - [ (pkgname, concat (Map.elems pvers)) - | ((pkgname, Nothing), pvers) <- Map.toList (packageIdIndex index) ] - --- | Get all the packages from the index. --- --- They are grouped by source package id (package name and version). --- --- (Doesn't include private libraries) --- -allPackagesBySourcePackageId :: HasUnitId a => PackageIndex a - -> [(PackageId, [a])] -allPackagesBySourcePackageId index = - [ (packageId ipkg, ipkgs) - | ((_, Nothing), pvers) <- Map.toList (packageIdIndex index) - , ipkgs@(ipkg:_) <- Map.elems pvers ] - --- | Get all the packages from the index. --- --- They are grouped by source package id and library name. --- --- This DOES include internal libraries. -allPackagesBySourcePackageIdAndLibName :: HasUnitId a => PackageIndex a - -> [((PackageId, Maybe UnqualComponentName), [a])] -allPackagesBySourcePackageIdAndLibName index = - [ ((packageId ipkg, ln), ipkgs) - | ((_, ln), pvers) <- Map.toList (packageIdIndex index) - , ipkgs@(ipkg:_) <- Map.elems pvers ] - --- --- * Lookups --- - --- | Does a lookup by unit identifier. --- --- Since multiple package DBs mask each other by 'UnitId', --- then we get back at most one package. --- -lookupUnitId :: PackageIndex a -> UnitId - -> Maybe a -lookupUnitId index uid = Map.lookup uid (unitIdIndex index) - --- | Does a lookup by component identifier. In the absence --- of Backpack, this is just a 'lookupUnitId'. --- -lookupComponentId :: PackageIndex a -> ComponentId - -> Maybe a -lookupComponentId index cid = - Map.lookup (newSimpleUnitId cid) (unitIdIndex index) - --- | Backwards compatibility for Cabal pre-1.24. -{-# DEPRECATED lookupInstalledPackageId "Use lookupUnitId instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -lookupInstalledPackageId :: PackageIndex a -> UnitId - -> Maybe a -lookupInstalledPackageId = lookupUnitId - - --- | Does a lookup by source package id (name & version). --- --- There can be multiple installed packages with the same source 'PackageId' --- but different 'UnitId'. They are returned in order of --- preference, with the most preferred first. --- -lookupSourcePackageId :: PackageIndex a -> PackageId -> [a] -lookupSourcePackageId index pkgid = - -- Do not lookup internal libraries - case Map.lookup (packageName pkgid, Nothing) (packageIdIndex index) of - Nothing -> [] - Just pvers -> case Map.lookup (packageVersion pkgid) pvers of - Nothing -> [] - Just pkgs -> pkgs -- in preference order - --- | Convenient alias of 'lookupSourcePackageId', but assuming only --- one package per package ID. -lookupPackageId :: PackageIndex a -> PackageId -> Maybe a -lookupPackageId index pkgid = case lookupSourcePackageId index pkgid of - [] -> Nothing - [pkg] -> Just pkg - _ -> error "Distribution.Simple.PackageIndex: multiple matches found" - --- | Does a lookup by source package name. --- -lookupPackageName :: PackageIndex a -> PackageName - -> [(Version, [a])] -lookupPackageName index name = - -- Do not match internal libraries - case Map.lookup (name, Nothing) (packageIdIndex index) of - Nothing -> [] - Just pvers -> Map.toList pvers - - --- | Does a lookup by source package name and a range of versions. --- --- We get back any number of versions of the specified package name, all --- satisfying the version range constraint. --- --- This does NOT work for internal dependencies, DO NOT use this --- function on those; use 'lookupInternalDependency' instead. --- --- INVARIANT: List of eligible 'IPI.InstalledPackageInfo' is non-empty. --- -lookupDependency :: InstalledPackageIndex -> Dependency - -> [(Version, [IPI.InstalledPackageInfo])] -lookupDependency index dep = - -- Yes, a little bit of a misnomer here! - lookupInternalDependency index dep Nothing - --- | Does a lookup by source package name and a range of versions. --- --- We get back any number of versions of the specified package name, all --- satisfying the version range constraint. --- --- INVARIANT: List of eligible 'IPI.InstalledPackageInfo' is non-empty. --- -lookupInternalDependency :: InstalledPackageIndex -> Dependency - -> Maybe UnqualComponentName - -> [(Version, [IPI.InstalledPackageInfo])] -lookupInternalDependency index (Dependency name versionRange) libn = - case Map.lookup (name, libn) (packageIdIndex index) of - Nothing -> [] - Just pvers -> [ (ver, pkgs') - | (ver, pkgs) <- Map.toList pvers - , ver `withinRange` versionRange - , let pkgs' = filter eligible pkgs - -- Enforce the invariant - , not (null pkgs') - ] - where - -- When we select for dependencies, we ONLY want to pick up indefinite - -- packages, or packages with no instantiations. We'll do mix-in - -- linking to improve any such package into an instantiated one - -- later. - eligible pkg = IPI.indefinite pkg || null (IPI.instantiatedWith pkg) - - --- --- * Case insensitive name lookups --- - --- | Does a case-insensitive search by package name. --- --- If there is only one package that compares case-insensitively to this name --- then the search is unambiguous and we get back all versions of that package. --- If several match case-insensitively but one matches exactly then it is also --- unambiguous. --- --- If however several match case-insensitively and none match exactly then we --- have an ambiguous result, and we get back all the versions of all the --- packages. The list of ambiguous results is split by exact package name. So --- it is a non-empty list of non-empty lists. --- -searchByName :: PackageIndex a -> String -> SearchResult [a] -searchByName index name = - -- Don't match internal packages - case [ pkgs | pkgs@((pname, Nothing),_) <- Map.toList (packageIdIndex index) - , lowercase (unPackageName pname) == lname ] of - [] -> None - [(_,pvers)] -> Unambiguous (concat (Map.elems pvers)) - pkgss -> case find ((mkPackageName name ==) . fst . fst) pkgss of - Just (_,pvers) -> Unambiguous (concat (Map.elems pvers)) - Nothing -> Ambiguous (map (concat . Map.elems . snd) pkgss) - where lname = lowercase name - -data SearchResult a = None | Unambiguous a | Ambiguous [a] - --- | Does a case-insensitive substring search by package name. --- --- That is, all packages that contain the given string in their name. --- -searchByNameSubstring :: PackageIndex a -> String -> [a] -searchByNameSubstring index searchterm = - [ pkg - -- Don't match internal packages - | ((pname, Nothing), pvers) <- Map.toList (packageIdIndex index) - , lsearchterm `isInfixOf` lowercase (unPackageName pname) - , pkgs <- Map.elems pvers - , pkg <- pkgs ] - where lsearchterm = lowercase searchterm - - --- --- * Special queries --- - --- None of the stuff below depends on the internal representation of the index. --- - --- | Find if there are any cycles in the dependency graph. If there are no --- cycles the result is @[]@. --- --- This actually computes the strongly connected components. So it gives us a --- list of groups of packages where within each group they all depend on each --- other, directly or indirectly. --- -dependencyCycles :: PackageInstalled a => PackageIndex a -> [[a]] -dependencyCycles index = - [ vs | Graph.CyclicSCC vs <- Graph.stronglyConnComp adjacencyList ] - where - adjacencyList = [ (pkg, installedUnitId pkg, installedDepends pkg) - | pkg <- allPackages index ] - - --- | All packages that have immediate dependencies that are not in the index. --- --- Returns such packages along with the dependencies that they're missing. --- -brokenPackages :: PackageInstalled a => PackageIndex a - -> [(a, [UnitId])] -brokenPackages index = - [ (pkg, missing) - | pkg <- allPackages index - , let missing = [ pkg' | pkg' <- installedDepends pkg - , isNothing (lookupUnitId index pkg') ] - , not (null missing) ] - --- | Tries to take the transitive closure of the package dependencies. --- --- If the transitive closure is complete then it returns that subset of the --- index. Otherwise it returns the broken packages as in 'brokenPackages'. --- --- * Note that if the result is @Right []@ it is because at least one of --- the original given 'PackageId's do not occur in the index. --- -dependencyClosure :: InstalledPackageIndex - -> [UnitId] - -> Either (InstalledPackageIndex) - [(IPI.InstalledPackageInfo, [UnitId])] -dependencyClosure index pkgids0 = case closure mempty [] pkgids0 of - (completed, []) -> Left completed - (completed, _) -> Right (brokenPackages completed) - where - closure completed failed [] = (completed, failed) - closure completed failed (pkgid:pkgids) = case lookupUnitId index pkgid of - Nothing -> closure completed (pkgid:failed) pkgids - Just pkg -> case lookupUnitId completed (installedUnitId pkg) of - Just _ -> closure completed failed pkgids - Nothing -> closure completed' failed pkgids' - where completed' = insert pkg completed - pkgids' = installedDepends pkg ++ pkgids - --- | Takes the transitive closure of the packages reverse dependencies. --- --- * The given 'PackageId's must be in the index. --- -reverseDependencyClosure :: PackageInstalled a => PackageIndex a - -> [UnitId] - -> [a] -reverseDependencyClosure index = - map vertexToPkg - . concatMap Tree.flatten - . Graph.dfs reverseDepGraph - . map (fromMaybe noSuchPkgId . pkgIdToVertex) - - where - (depGraph, vertexToPkg, pkgIdToVertex) = dependencyGraph index - reverseDepGraph = Graph.transposeG depGraph - noSuchPkgId = error "reverseDependencyClosure: package is not in the graph" - -topologicalOrder :: PackageInstalled a => PackageIndex a -> [a] -topologicalOrder index = map toPkgId - . Graph.topSort - $ graph - where (graph, toPkgId, _) = dependencyGraph index - -reverseTopologicalOrder :: PackageInstalled a => PackageIndex a -> [a] -reverseTopologicalOrder index = map toPkgId - . Graph.topSort - . Graph.transposeG - $ graph - where (graph, toPkgId, _) = dependencyGraph index - --- | Builds a graph of the package dependencies. --- --- Dependencies on other packages that are not in the index are discarded. --- You can check if there are any such dependencies with 'brokenPackages'. --- -dependencyGraph :: PackageInstalled a => PackageIndex a - -> (Graph.Graph, - Graph.Vertex -> a, - UnitId -> Maybe Graph.Vertex) -dependencyGraph index = (graph, vertex_to_pkg, id_to_vertex) - where - graph = Array.listArray bounds - [ [ v | Just v <- map id_to_vertex (installedDepends pkg) ] - | pkg <- pkgs ] - - pkgs = sortBy (comparing packageId) (allPackages index) - vertices = zip (map installedUnitId pkgs) [0..] - vertex_map = Map.fromList vertices - id_to_vertex pid = Map.lookup pid vertex_map - - vertex_to_pkg vertex = pkgTable ! vertex - - pkgTable = Array.listArray bounds pkgs - topBound = length pkgs - 1 - bounds = (0, topBound) - --- | We maintain the invariant that, for any 'DepUniqueKey', there --- is only one instance of the package in our database. -type DepUniqueKey = (PackageName, Maybe UnqualComponentName, Map ModuleName OpenModule) - --- | Given a package index where we assume we want to use all the packages --- (use 'dependencyClosure' if you need to get such a index subset) find out --- if the dependencies within it use consistent versions of each package. --- Return all cases where multiple packages depend on different versions of --- some other package. --- --- Each element in the result is a package name along with the packages that --- depend on it and the versions they require. These are guaranteed to be --- distinct. --- -dependencyInconsistencies :: InstalledPackageIndex - -- At DepUniqueKey... - -> [(DepUniqueKey, - -- There were multiple packages (BAD!) - [(UnitId, - -- And here are the packages which - -- immediately depended on it - [IPI.InstalledPackageInfo])])] -dependencyInconsistencies index = do - (dep_key, insts_map) <- Map.toList inverseIndex - let insts = Map.toList insts_map - guard (length insts >= 2) - return (dep_key, insts) - where - inverseIndex :: Map DepUniqueKey (Map UnitId [IPI.InstalledPackageInfo]) - inverseIndex = Map.fromListWith (Map.unionWith (++)) $ do - pkg <- allPackages index - dep_ipid <- installedDepends pkg - Just dep <- [lookupUnitId index dep_ipid] - let dep_key = (packageName dep, IPI.sourceLibName dep, - Map.fromList (IPI.instantiatedWith dep)) - return (dep_key, Map.singleton dep_ipid [pkg]) - --- | A rough approximation of GHC's module finder, takes a --- 'InstalledPackageIndex' and turns it into a map from module names to their --- source packages. It's used to initialize the @build-deps@ field in @cabal --- init@. -moduleNameIndex :: InstalledPackageIndex -> Map ModuleName [IPI.InstalledPackageInfo] -moduleNameIndex index = - Map.fromListWith (++) $ do - pkg <- allPackages index - IPI.ExposedModule m reexport <- IPI.exposedModules pkg - case reexport of - Nothing -> return (m, [pkg]) - Just (OpenModuleVar _) -> [] - Just (OpenModule _ m') | m == m' -> [] - | otherwise -> return (m', [pkg]) - -- The heuristic is this: we want to prefer the original package - -- which originally exported a module. However, if a reexport - -- also *renamed* the module (m /= m'), then we have to use the - -- downstream package, since the upstream package has the wrong - -- module name! diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/PreProcess/Unlit.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/PreProcess/Unlit.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/PreProcess/Unlit.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/PreProcess/Unlit.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,167 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.PreProcess.Unlit --- Copyright : ... --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Remove the \"literal\" markups from a Haskell source file, including --- \"@>@\", \"@\\begin{code}@\", \"@\\end{code}@\", and \"@#@\" - --- This version is interesting because instead of striping comment lines, it --- turns them into "-- " style comments. This allows using haddock markup --- in literate scripts without having to use "> --" prefix. - -module Distribution.Simple.PreProcess.Unlit (unlit,plain) where - -import Prelude () -import Distribution.Compat.Prelude - -import Data.List (mapAccumL) - -data Classified = BirdTrack String | Blank String | Ordinary String - | Line !Int String | CPP String - | BeginCode | EndCode - -- output only: - | Error String | Comment String - --- | No unliteration. -plain :: String -> String -> String -plain _ hs = hs - -classify :: String -> Classified -classify ('>':s) = BirdTrack s -classify ('#':s) = case tokens s of - (line:file:_) | all isDigit line - && length file >= 2 - && head file == '"' - && last file == '"' - -> Line (read line) (tail (init file)) -- TODO:eradicateNoParse - _ -> CPP s - where tokens = unfoldr $ \str -> case lex str of - (t@(_:_), str'):_ -> Just (t, str') - _ -> Nothing -classify ('\\':s) - | "begin{code}" `isPrefixOf` s = BeginCode - | "end{code}" `isPrefixOf` s = EndCode -classify s | all isSpace s = Blank s -classify s = Ordinary s - --- So the weird exception for comment indenting is to make things work with --- haddock, see classifyAndCheckForBirdTracks below. -unclassify :: Bool -> Classified -> String -unclassify _ (BirdTrack s) = ' ':s -unclassify _ (Blank s) = s -unclassify _ (Ordinary s) = s -unclassify _ (Line n file) = "# " ++ show n ++ " " ++ show file -unclassify _ (CPP s) = '#':s -unclassify True (Comment "") = " --" -unclassify True (Comment s) = " -- " ++ s -unclassify False (Comment "") = "--" -unclassify False (Comment s) = "-- " ++ s -unclassify _ _ = internalError - --- | 'unlit' takes a filename (for error reports), and transforms the --- given string, to eliminate the literate comments from the program text. -unlit :: FilePath -> String -> Either String String -unlit file input = - let (usesBirdTracks, classified) = classifyAndCheckForBirdTracks - . inlines - $ input - in either (Left . unlines . map (unclassify usesBirdTracks)) - Right - . checkErrors - . reclassify - $ classified - - where - -- So haddock requires comments and code to align, since it treats comments - -- as following the layout rule. This is a pain for us since bird track - -- style literate code typically gets indented by two since ">" is replaced - -- by " " and people usually use one additional space of indent ie - -- "> then the code". On the other hand we cannot just go and indent all - -- the comments by two since that does not work for latex style literate - -- code. So the hacky solution we use here is that if we see any bird track - -- style code then we'll indent all comments by two, otherwise by none. - -- Of course this will not work for mixed latex/bird track .lhs files but - -- nobody does that, it's silly and specifically recommended against in the - -- H98 unlit spec. - -- - classifyAndCheckForBirdTracks = - flip mapAccumL False $ \seenBirdTrack line -> - let classification = classify line - in (seenBirdTrack || isBirdTrack classification, classification) - - isBirdTrack (BirdTrack _) = True - isBirdTrack _ = False - - checkErrors ls = case [ e | Error e <- ls ] of - [] -> Left ls - (message:_) -> Right (f ++ ":" ++ show n ++ ": " ++ message) - where (f, n) = errorPos file 1 ls - errorPos f n [] = (f, n) - errorPos f n (Error _:_) = (f, n) - errorPos _ _ (Line n' f':ls) = errorPos f' n' ls - errorPos f n (_ :ls) = errorPos f (n+1) ls - --- Here we model a state machine, with each state represented by --- a local function. We only have four states (well, five, --- if you count the error state), but the rules --- to transition between then are not so simple. --- Would it be simpler to have more states? --- --- Each state represents the type of line that was last read --- i.e. are we in a comment section, or a latex-code section, --- or a bird-code section, etc? -reclassify :: [Classified] -> [Classified] -reclassify = blank -- begin in blank state - where - latex [] = [] - latex (EndCode :ls) = Blank "" : comment ls - latex (BeginCode :_ ) = [Error "\\begin{code} in code section"] - latex (BirdTrack l:ls) = Ordinary ('>':l) : latex ls - latex ( l:ls) = l : latex ls - - blank [] = [] - blank (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] - blank (BeginCode :ls) = Blank "" : latex ls - blank (BirdTrack l:ls) = BirdTrack l : bird ls - blank (Ordinary l:ls) = Comment l : comment ls - blank ( l:ls) = l : blank ls - - bird [] = [] - bird (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] - bird (BeginCode :ls) = Blank "" : latex ls - bird (Blank l :ls) = Blank l : blank ls - bird (Ordinary _:_ ) = [Error "program line before comment line"] - bird ( l:ls) = l : bird ls - - comment [] = [] - comment (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] - comment (BeginCode :ls) = Blank "" : latex ls - comment (CPP l :ls) = CPP l : comment ls - comment (BirdTrack _:_ ) = [Error "comment line before program line"] - -- a blank line and another ordinary line following a comment - -- will be treated as continuing the comment. Otherwise it's - -- then end of the comment, with a blank line. - comment (Blank l:ls@(Ordinary _:_)) = Comment l : comment ls - comment (Blank l:ls) = Blank l : blank ls - comment (Line n f :ls) = Line n f : comment ls - comment (Ordinary l:ls) = Comment l : comment ls - comment (Comment _: _) = internalError - comment (Error _: _) = internalError - --- Re-implementation of 'lines', for better efficiency (but decreased laziness). --- Also, importantly, accepts non-standard DOS and Mac line ending characters. -inlines :: String -> [String] -inlines xs = lines' xs id - where - lines' [] acc = [acc []] - lines' ('\^M':'\n':s) acc = acc [] : lines' s id -- DOS - lines' ('\^M':s) acc = acc [] : lines' s id -- MacOS - lines' ('\n':s) acc = acc [] : lines' s id -- Unix - lines' (c:s) acc = lines' s (acc . (c:)) - -internalError :: a -internalError = error "unlit: internal error" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/PreProcess.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/PreProcess.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/PreProcess.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/PreProcess.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,752 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.PreProcess --- Copyright : (c) 2003-2005, Isaac Jones, Malcolm Wallace --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defines a 'PreProcessor' abstraction which represents a pre-processor --- that can transform one kind of file into another. There is also a --- 'PPSuffixHandler' which is a combination of a file extension and a function --- for configuring a 'PreProcessor'. It defines a bunch of known built-in --- preprocessors like @cpp@, @cpphs@, @c2hs@, @hsc2hs@, @happy@, @alex@ etc and --- lists them in 'knownSuffixHandlers'. On top of this it provides a function --- for actually preprocessing some sources given a bunch of known suffix --- handlers. This module is not as good as it could be, it could really do with --- a rewrite to address some of the problems we have with pre-processors. - -module Distribution.Simple.PreProcess (preprocessComponent, preprocessExtras, - knownSuffixHandlers, ppSuffixes, - PPSuffixHandler, PreProcessor(..), - mkSimplePreProcessor, runSimplePreProcessor, - ppCpp, ppCpp', ppGreenCard, ppC2hs, ppHsc2hs, - ppHappy, ppAlex, ppUnlit, platformDefines - ) - where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Compat.Stack - -import Distribution.Simple.PreProcess.Unlit -import Distribution.Backpack.DescribeUnitId -import Distribution.Package -import qualified Distribution.ModuleName as ModuleName -import Distribution.ModuleName (ModuleName) -import Distribution.PackageDescription as PD -import qualified Distribution.InstalledPackageInfo as Installed -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.CCompiler -import Distribution.Simple.Compiler -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Utils -import Distribution.Simple.Program -import Distribution.Simple.Program.ResponseFile -import Distribution.Simple.Test.LibV09 -import Distribution.System -import Distribution.Text -import Distribution.Version -import Distribution.Verbosity -import Distribution.Types.ForeignLib -import Distribution.Types.UnqualComponentName - -import System.Directory (doesFileExist) -import System.Info (os, arch) -import System.FilePath (splitExtension, dropExtensions, (), (<.>), - takeDirectory, normalise, replaceExtension, - takeExtensions) - --- |The interface to a preprocessor, which may be implemented using an --- external program, but need not be. The arguments are the name of --- the input file, the name of the output file and a verbosity level. --- Here is a simple example that merely prepends a comment to the given --- source file: --- --- > ppTestHandler :: PreProcessor --- > ppTestHandler = --- > PreProcessor { --- > platformIndependent = True, --- > runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> --- > do info verbosity (inFile++" has been preprocessed to "++outFile) --- > stuff <- readFile inFile --- > writeFile outFile ("-- preprocessed as a test\n\n" ++ stuff) --- > return ExitSuccess --- --- We split the input and output file names into a base directory and the --- rest of the file name. The input base dir is the path in the list of search --- dirs that this file was found in. The output base dir is the build dir where --- all the generated source files are put. --- --- The reason for splitting it up this way is that some pre-processors don't --- simply generate one output .hs file from one input file but have --- dependencies on other generated files (notably c2hs, where building one --- .hs file may require reading other .chi files, and then compiling the .hs --- file may require reading a generated .h file). In these cases the generated --- files need to embed relative path names to each other (eg the generated .hs --- file mentions the .h file in the FFI imports). This path must be relative to --- the base directory where the generated files are located, it cannot be --- relative to the top level of the build tree because the compilers do not --- look for .h files relative to there, ie we do not use \"-I .\", instead we --- use \"-I dist\/build\" (or whatever dist dir has been set by the user) --- --- Most pre-processors do not care of course, so mkSimplePreProcessor and --- runSimplePreProcessor functions handle the simple case. --- -data PreProcessor = PreProcessor { - - -- Is the output of the pre-processor platform independent? eg happy output - -- is portable haskell but c2hs's output is platform dependent. - -- This matters since only platform independent generated code can be - -- included into a source tarball. - platformIndependent :: Bool, - - -- TODO: deal with pre-processors that have implementation dependent output - -- eg alex and happy have --ghc flags. However we can't really include - -- ghc-specific code into supposedly portable source tarballs. - - runPreProcessor :: (FilePath, FilePath) -- Location of the source file relative to a base dir - -> (FilePath, FilePath) -- Output file name, relative to an output base dir - -> Verbosity -- verbosity - -> IO () -- Should exit if the preprocessor fails - } - --- | Function to determine paths to possible extra C sources for a --- preprocessor: just takes the path to the build directory and uses --- this to search for C sources with names that match the --- preprocessor's output name format. -type PreProcessorExtras = FilePath -> IO [FilePath] - - -mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ()) - -> (FilePath, FilePath) - -> (FilePath, FilePath) -> Verbosity -> IO () -mkSimplePreProcessor simplePP - (inBaseDir, inRelativeFile) - (outBaseDir, outRelativeFile) verbosity = simplePP inFile outFile verbosity - where inFile = normalise (inBaseDir inRelativeFile) - outFile = normalise (outBaseDir outRelativeFile) - -runSimplePreProcessor :: PreProcessor -> FilePath -> FilePath -> Verbosity - -> IO () -runSimplePreProcessor pp inFile outFile verbosity = - runPreProcessor pp (".", inFile) (".", outFile) verbosity - --- |A preprocessor for turning non-Haskell files with the given extension --- into plain Haskell source files. -type PPSuffixHandler - = (String, BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor) - --- | Apply preprocessors to the sources from 'hsSourceDirs' for a given --- component (lib, exe, or test suite). -preprocessComponent :: PackageDescription - -> Component - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Bool - -> Verbosity - -> [PPSuffixHandler] - -> IO () -preprocessComponent pd comp lbi clbi isSrcDist verbosity handlers = do - -- NB: never report instantiation here; we'll report it properly when - -- building. - setupMessage' verbosity "Preprocessing" (packageId pd) - (componentLocalName clbi) (Nothing :: Maybe [(ModuleName, Module)]) - case comp of - (CLib lib@Library{ libBuildInfo = bi }) -> do - let dirs = hsSourceDirs bi ++ [autogenComponentModulesDir lbi clbi - ,autogenPackageModulesDir lbi] - for_ (map ModuleName.toFilePath $ allLibModules lib clbi) $ - pre dirs (componentBuildDir lbi clbi) (localHandlers bi) - (CFLib flib@ForeignLib { foreignLibBuildInfo = bi, foreignLibName = nm }) -> do - let nm' = unUnqualComponentName nm - let flibDir = buildDir lbi nm' nm' ++ "-tmp" - dirs = hsSourceDirs bi ++ [autogenComponentModulesDir lbi clbi - ,autogenPackageModulesDir lbi] - for_ (map ModuleName.toFilePath $ foreignLibModules flib) $ - pre dirs flibDir (localHandlers bi) - (CExe exe@Executable { buildInfo = bi, exeName = nm }) -> do - let nm' = unUnqualComponentName nm - let exeDir = buildDir lbi nm' nm' ++ "-tmp" - dirs = hsSourceDirs bi ++ [autogenComponentModulesDir lbi clbi - ,autogenPackageModulesDir lbi] - for_ (map ModuleName.toFilePath $ otherModules bi) $ - pre dirs exeDir (localHandlers bi) - pre (hsSourceDirs bi) exeDir (localHandlers bi) $ - dropExtensions (modulePath exe) - CTest test@TestSuite{ testName = nm } -> do - let nm' = unUnqualComponentName nm - case testInterface test of - TestSuiteExeV10 _ f -> - preProcessTest test f $ buildDir lbi nm' nm' ++ "-tmp" - TestSuiteLibV09 _ _ -> do - let testDir = buildDir lbi stubName test - stubName test ++ "-tmp" - writeSimpleTestStub test testDir - preProcessTest test (stubFilePath test) testDir - TestSuiteUnsupported tt -> - die' verbosity $ "No support for preprocessing test " - ++ "suite type " ++ display tt - CBench bm@Benchmark{ benchmarkName = nm } -> do - let nm' = unUnqualComponentName nm - case benchmarkInterface bm of - BenchmarkExeV10 _ f -> - preProcessBench bm f $ buildDir lbi nm' nm' ++ "-tmp" - BenchmarkUnsupported tt -> - die' verbosity $ "No support for preprocessing benchmark " - ++ "type " ++ display tt - where - builtinHaskellSuffixes = ["hs", "lhs", "hsig", "lhsig"] - builtinCSuffixes = cSourceExtensions - builtinSuffixes = builtinHaskellSuffixes ++ builtinCSuffixes - localHandlers bi = [(ext, h bi lbi clbi) | (ext, h) <- handlers] - pre dirs dir lhndlrs fp = - preprocessFile dirs dir isSrcDist fp verbosity builtinSuffixes lhndlrs - preProcessTest test = preProcessComponent (testBuildInfo test) - (testModules test) - preProcessBench bm = preProcessComponent (benchmarkBuildInfo bm) - (benchmarkModules bm) - preProcessComponent bi modules exePath dir = do - let biHandlers = localHandlers bi - sourceDirs = hsSourceDirs bi ++ [ autogenComponentModulesDir lbi clbi - , autogenPackageModulesDir lbi ] - sequence_ [ preprocessFile sourceDirs dir isSrcDist - (ModuleName.toFilePath modu) verbosity builtinSuffixes - biHandlers - | modu <- modules ] - preprocessFile (dir : (hsSourceDirs bi)) dir isSrcDist - (dropExtensions $ exePath) verbosity - builtinSuffixes biHandlers - ---TODO: try to list all the modules that could not be found --- not just the first one. It's annoying and slow due to the need --- to reconfigure after editing the .cabal file each time. - --- |Find the first extension of the file that exists, and preprocess it --- if required. -preprocessFile - :: [FilePath] -- ^source directories - -> FilePath -- ^build directory - -> Bool -- ^preprocess for sdist - -> FilePath -- ^module file name - -> Verbosity -- ^verbosity - -> [String] -- ^builtin suffixes - -> [(String, PreProcessor)] -- ^possible preprocessors - -> IO () -preprocessFile searchLoc buildLoc forSDist baseFile verbosity builtinSuffixes handlers = do - -- look for files in the various source dirs with this module name - -- and a file extension of a known preprocessor - psrcFiles <- findFileWithExtension' (map fst handlers) searchLoc baseFile - case psrcFiles of - -- no preprocessor file exists, look for an ordinary source file - -- just to make sure one actually exists at all for this module. - -- Note: by looking in the target/output build dir too, we allow - -- source files to appear magically in the target build dir without - -- any corresponding "real" source file. This lets custom Setup.hs - -- files generate source modules directly into the build dir without - -- the rest of the build system being aware of it (somewhat dodgy) - Nothing -> do - bsrcFiles <- findFileWithExtension builtinSuffixes (buildLoc : searchLoc) baseFile - case bsrcFiles of - Nothing -> - die' verbosity $ "can't find source for " ++ baseFile - ++ " in " ++ intercalate ", " searchLoc - _ -> return () - -- found a pre-processable file in one of the source dirs - Just (psrcLoc, psrcRelFile) -> do - let (srcStem, ext) = splitExtension psrcRelFile - psrcFile = psrcLoc psrcRelFile - pp = fromMaybe (error "Distribution.Simple.PreProcess: Just expected") - (lookup (tailNotNull ext) handlers) - -- Preprocessing files for 'sdist' is different from preprocessing - -- for 'build'. When preprocessing for sdist we preprocess to - -- avoid that the user has to have the preprocessors available. - -- ATM, we don't have a way to specify which files are to be - -- preprocessed and which not, so for sdist we only process - -- platform independent files and put them into the 'buildLoc' - -- (which we assume is set to the temp. directory that will become - -- the tarball). - --TODO: eliminate sdist variant, just supply different handlers - when (not forSDist || forSDist && platformIndependent pp) $ do - -- look for existing pre-processed source file in the dest dir to - -- see if we really have to re-run the preprocessor. - ppsrcFiles <- findFileWithExtension builtinSuffixes [buildLoc] baseFile - recomp <- case ppsrcFiles of - Nothing -> return True - Just ppsrcFile -> - psrcFile `moreRecentFile` ppsrcFile - when recomp $ do - let destDir = buildLoc dirName srcStem - createDirectoryIfMissingVerbose verbosity True destDir - runPreProcessorWithHsBootHack pp - (psrcLoc, psrcRelFile) - (buildLoc, srcStem <.> "hs") - - where - dirName = takeDirectory - tailNotNull [] = [] - tailNotNull x = tail x - - -- FIXME: This is a somewhat nasty hack. GHC requires that hs-boot files - -- be in the same place as the hs files, so if we put the hs file in dist/ - -- then we need to copy the hs-boot file there too. This should probably be - -- done another way. Possibly we should also be looking for .lhs-boot - -- files, but I think that preprocessors only produce .hs files. - runPreProcessorWithHsBootHack pp - (inBaseDir, inRelativeFile) - (outBaseDir, outRelativeFile) = do - runPreProcessor pp - (inBaseDir, inRelativeFile) - (outBaseDir, outRelativeFile) verbosity - - exists <- doesFileExist inBoot - when exists $ copyFileVerbose verbosity inBoot outBoot - - where - inBoot = replaceExtension inFile "hs-boot" - outBoot = replaceExtension outFile "hs-boot" - - inFile = normalise (inBaseDir inRelativeFile) - outFile = normalise (outBaseDir outRelativeFile) - --- ------------------------------------------------------------ --- * known preprocessors --- ------------------------------------------------------------ - -ppGreenCard :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppGreenCard _ lbi _ - = PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> - runDbProgram verbosity greencardProgram (withPrograms lbi) - (["-tffi", "-o" ++ outFile, inFile]) - } - --- This one is useful for preprocessors that can't handle literate source. --- We also need a way to chain preprocessors. -ppUnlit :: PreProcessor -ppUnlit = - PreProcessor { - platformIndependent = True, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> - withUTF8FileContents inFile $ \contents -> - either (writeUTF8File outFile) (die' verbosity) (unlit inFile contents) - } - -ppCpp :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppCpp = ppCpp' [] - -ppCpp' :: [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppCpp' extraArgs bi lbi clbi = - case compilerFlavor (compiler lbi) of - GHC -> ppGhcCpp ghcProgram (const True) args bi lbi clbi - GHCJS -> ppGhcCpp ghcjsProgram (const True) args bi lbi clbi - _ -> ppCpphs args bi lbi clbi - where cppArgs = getCppOptions bi lbi - args = cppArgs ++ extraArgs - -ppGhcCpp :: Program -> (Version -> Bool) - -> [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppGhcCpp program xHs extraArgs _bi lbi clbi = - PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do - (prog, version, _) <- requireProgramVersion verbosity - program anyVersion (withPrograms lbi) - runProgram verbosity prog $ - ["-E", "-cpp"] - -- This is a bit of an ugly hack. We're going to - -- unlit the file ourselves later on if appropriate, - -- so we need GHC not to unlit it now or it'll get - -- double-unlitted. In the future we might switch to - -- using cpphs --unlit instead. - ++ (if xHs version then ["-x", "hs"] else []) - ++ [ "-optP-include", "-optP"++ (autogenComponentModulesDir lbi clbi cppHeaderName) ] - ++ ["-o", outFile, inFile] - ++ extraArgs - } - -ppCpphs :: [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppCpphs extraArgs _bi lbi clbi = - PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do - (cpphsProg, cpphsVersion, _) <- requireProgramVersion verbosity - cpphsProgram anyVersion (withPrograms lbi) - runProgram verbosity cpphsProg $ - ("-O" ++ outFile) : inFile - : "--noline" : "--strip" - : (if cpphsVersion >= mkVersion [1,6] - then ["--include="++ (autogenComponentModulesDir lbi clbi cppHeaderName)] - else []) - ++ extraArgs - } - -ppHsc2hs :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppHsc2hs bi lbi clbi = - PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do - (gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi) - (hsc2hsProg, hsc2hsVersion, _) <- requireProgramVersion verbosity - hsc2hsProgram anyVersion (withPrograms lbi) - -- See Trac #13896 and https://github.com/haskell/cabal/issues/3122. - let hsc2hsSupportsResponseFiles = hsc2hsVersion >= mkVersion [0,68,4] - pureArgs = genPureArgs gccProg inFile outFile - if hsc2hsSupportsResponseFiles - then withResponseFile - verbosity - defaultTempFileOptions - (takeDirectory outFile) - "hsc2hs-response.txt" - Nothing - pureArgs - (\responseFileName -> - runProgram verbosity hsc2hsProg ["@"++ responseFileName]) - else runProgram verbosity hsc2hsProg pureArgs - } - where - -- Returns a list of command line arguments that can either be passed - -- directly, or via a response file. - genPureArgs :: ConfiguredProgram -> String -> String -> [String] - genPureArgs gccProg inFile outFile = - [ "--cc=" ++ programPath gccProg - , "--ld=" ++ programPath gccProg ] - - -- Additional gcc options - ++ [ "--cflag=" ++ opt | opt <- programDefaultArgs gccProg - ++ programOverrideArgs gccProg ] - ++ [ "--lflag=" ++ opt | opt <- programDefaultArgs gccProg - ++ programOverrideArgs gccProg ] - - -- OSX frameworks: - ++ [ what ++ "=-F" ++ opt - | isOSX - , opt <- nub (concatMap Installed.frameworkDirs pkgs) - , what <- ["--cflag", "--lflag"] ] - ++ [ "--lflag=" ++ arg - | isOSX - , opt <- PD.frameworks bi ++ concatMap Installed.frameworks pkgs - , arg <- ["-framework", opt] ] - - -- Note that on ELF systems, wherever we use -L, we must also use -R - -- because presumably that -L dir is not on the normal path for the - -- system's dynamic linker. This is needed because hsc2hs works by - -- compiling a C program and then running it. - - ++ [ "--cflag=" ++ opt | opt <- platformDefines lbi ] - - -- Options from the current package: - ++ [ "--cflag=-I" ++ dir | dir <- PD.includeDirs bi ] - ++ [ "--cflag=-I" ++ buildDir lbi dir | dir <- PD.includeDirs bi ] - ++ [ "--cflag=" ++ opt | opt <- PD.ccOptions bi - ++ PD.cppOptions bi - -- hsc2hs uses the C ABI - -- We assume that there are only C sources - -- and C++ functions are exported via a C - -- interface and wrapped in a C source file. - -- Therefore we do not supply C++ flags - -- because there will not be C++ sources. - -- - -- DO NOT add PD.cxxOptions unless this changes! - ] - ++ [ "--cflag=" ++ opt | opt <- - [ "-I" ++ autogenComponentModulesDir lbi clbi, - "-I" ++ autogenPackageModulesDir lbi, - "-include", autogenComponentModulesDir lbi clbi cppHeaderName ] ] - ++ [ "--lflag=-L" ++ opt | opt <- PD.extraLibDirs bi ] - ++ [ "--lflag=-Wl,-R," ++ opt | isELF - , opt <- PD.extraLibDirs bi ] - ++ [ "--lflag=-l" ++ opt | opt <- PD.extraLibs bi ] - ++ [ "--lflag=" ++ opt | opt <- PD.ldOptions bi ] - - -- Options from dependent packages - ++ [ "--cflag=" ++ opt - | pkg <- pkgs - , opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ] - ++ [ opt | opt <- Installed.ccOptions pkg ] ] - ++ [ "--lflag=" ++ opt - | pkg <- pkgs - , opt <- [ "-L" ++ opt | opt <- Installed.libraryDirs pkg ] - ++ [ "-Wl,-R," ++ opt | isELF - , opt <- Installed.libraryDirs pkg ] - ++ [ "-l" ++ opt | opt <- Installed.extraLibraries pkg ] - ++ [ opt | opt <- Installed.ldOptions pkg ] ] - ++ ["-o", outFile, inFile] - - hacked_index = packageHacks (installedPkgs lbi) - -- Look only at the dependencies of the current component - -- being built! This relies on 'installedPkgs' maintaining - -- 'InstalledPackageInfo' for internal deps too; see #2971. - pkgs = PackageIndex.topologicalOrder $ - case PackageIndex.dependencyClosure hacked_index - (map fst (componentPackageDeps clbi)) of - Left index' -> index' - Right inf -> - error ("ppHsc2hs: broken closure: " ++ show inf) - isOSX = case buildOS of OSX -> True; _ -> False - isELF = case buildOS of OSX -> False; Windows -> False; AIX -> False; _ -> True; - packageHacks = case compilerFlavor (compiler lbi) of - GHC -> hackRtsPackage - GHCJS -> hackRtsPackage - _ -> id - -- We don't link in the actual Haskell libraries of our dependencies, so - -- the -u flags in the ldOptions of the rts package mean linking fails on - -- OS X (its ld is a tad stricter than gnu ld). Thus we remove the - -- ldOptions for GHC's rts package: - hackRtsPackage index = - case PackageIndex.lookupPackageName index (mkPackageName "rts") of - [(_, [rts])] - -> PackageIndex.insert rts { Installed.ldOptions = [] } index - _ -> error "No (or multiple) ghc rts package is registered!!" - -ppHsc2hsExtras :: PreProcessorExtras -ppHsc2hsExtras buildBaseDir = filter ("_hsc.c" `isSuffixOf`) `fmap` - getDirectoryContentsRecursive buildBaseDir - -ppC2hs :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppC2hs bi lbi clbi = - PreProcessor { - platformIndependent = False, - runPreProcessor = \(inBaseDir, inRelativeFile) - (outBaseDir, outRelativeFile) verbosity -> do - (c2hsProg, _, _) <- requireProgramVersion verbosity - c2hsProgram (orLaterVersion (mkVersion [0,15])) - (withPrograms lbi) - (gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi) - runProgram verbosity c2hsProg $ - - -- Options from the current package: - [ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ] - ++ [ "--cppopts=" ++ opt | opt <- getCppOptions bi lbi ] - ++ [ "--cppopts=-include" ++ (autogenComponentModulesDir lbi clbi cppHeaderName) ] - ++ [ "--include=" ++ outBaseDir ] - - -- Options from dependent packages - ++ [ "--cppopts=" ++ opt - | pkg <- pkgs - , opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ] - ++ [ opt | opt@('-':c:_) <- Installed.ccOptions pkg - -- c2hs uses the C ABI - -- We assume that there are only C sources - -- and C++ functions are exported via a C - -- interface and wrapped in a C source file. - -- Therefore we do not supply C++ flags - -- because there will not be C++ sources. - -- - -- - -- DO NOT add Installed.cxxOptions unless this changes! - , c `elem` "DIU" ] ] - --TODO: install .chi files for packages, so we can --include - -- those dirs here, for the dependencies - - -- input and output files - ++ [ "--output-dir=" ++ outBaseDir - , "--output=" ++ outRelativeFile - , inBaseDir inRelativeFile ] - } - where - pkgs = PackageIndex.topologicalOrder (installedPkgs lbi) - -ppC2hsExtras :: PreProcessorExtras -ppC2hsExtras d = filter (\p -> takeExtensions p == ".chs.c") `fmap` - getDirectoryContentsRecursive d - ---TODO: perhaps use this with hsc2hs too ---TODO: remove cc-options from cpphs for cabal-version: >= 1.10 ---TODO: Refactor and add separate getCppOptionsForHs, getCppOptionsForCxx, & getCppOptionsForC --- instead of combining all these cases in a single function. This blind combination can --- potentially lead to compilation inconsistencies. -getCppOptions :: BuildInfo -> LocalBuildInfo -> [String] -getCppOptions bi lbi - = platformDefines lbi - ++ cppOptions bi - ++ ["-I" ++ dir | dir <- PD.includeDirs bi] - ++ [opt | opt@('-':c:_) <- PD.ccOptions bi ++ PD.cxxOptions bi, c `elem` "DIU"] - -platformDefines :: LocalBuildInfo -> [String] -platformDefines lbi = - case compilerFlavor comp of - GHC -> - ["-D__GLASGOW_HASKELL__=" ++ versionInt version] ++ - ["-D" ++ os ++ "_BUILD_OS=1"] ++ - ["-D" ++ arch ++ "_BUILD_ARCH=1"] ++ - map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ - map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr - GHCJS -> - compatGlasgowHaskell ++ - ["-D__GHCJS__=" ++ versionInt version] ++ - ["-D" ++ os ++ "_BUILD_OS=1"] ++ - ["-D" ++ arch ++ "_BUILD_ARCH=1"] ++ - map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ - map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr - HaskellSuite {} -> - ["-D__HASKELL_SUITE__"] ++ - map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ - map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr - _ -> [] - where - comp = compiler lbi - Platform hostArch hostOS = hostPlatform lbi - version = compilerVersion comp - compatGlasgowHaskell = - maybe [] (\v -> ["-D__GLASGOW_HASKELL__=" ++ versionInt v]) - (compilerCompatVersion GHC comp) - -- TODO: move this into the compiler abstraction - -- FIXME: this forces GHC's crazy 4.8.2 -> 408 convention on all - -- the other compilers. Check if that's really what they want. - versionInt :: Version -> String - versionInt v = case versionNumbers v of - [] -> "1" - [n] -> show n - n1:n2:_ -> - -- 6.8.x -> 608 - -- 6.10.x -> 610 - let s1 = show n1 - s2 = show n2 - middle = case s2 of - _ : _ : _ -> "" - _ -> "0" - in s1 ++ middle ++ s2 - - osStr = case hostOS of - Linux -> ["linux"] - Windows -> ["mingw32"] - OSX -> ["darwin"] - FreeBSD -> ["freebsd"] - OpenBSD -> ["openbsd"] - NetBSD -> ["netbsd"] - DragonFly -> ["dragonfly"] - Solaris -> ["solaris2"] - AIX -> ["aix"] - HPUX -> ["hpux"] - IRIX -> ["irix"] - HaLVM -> [] - IOS -> ["ios"] - Android -> ["android"] - Ghcjs -> ["ghcjs"] - Hurd -> ["hurd"] - OtherOS _ -> [] - archStr = case hostArch of - I386 -> ["i386"] - X86_64 -> ["x86_64"] - PPC -> ["powerpc"] - PPC64 -> ["powerpc64"] - Sparc -> ["sparc"] - Arm -> ["arm"] - AArch64 -> ["aarch64"] - Mips -> ["mips"] - SH -> [] - IA64 -> ["ia64"] - S390 -> ["s390"] - Alpha -> ["alpha"] - Hppa -> ["hppa"] - Rs6000 -> ["rs6000"] - M68k -> ["m68k"] - Vax -> ["vax"] - JavaScript -> ["javascript"] - OtherArch _ -> [] - -ppHappy :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppHappy _ lbi _ = pp { platformIndependent = True } - where pp = standardPP lbi happyProgram (hcFlags hc) - hc = compilerFlavor (compiler lbi) - hcFlags GHC = ["-agc"] - hcFlags GHCJS = ["-agc"] - hcFlags _ = [] - -ppAlex :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor -ppAlex _ lbi _ = pp { platformIndependent = True } - where pp = standardPP lbi alexProgram (hcFlags hc) - hc = compilerFlavor (compiler lbi) - hcFlags GHC = ["-g"] - hcFlags GHCJS = ["-g"] - hcFlags _ = [] - -standardPP :: LocalBuildInfo -> Program -> [String] -> PreProcessor -standardPP lbi prog args = - PreProcessor { - platformIndependent = False, - runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> - runDbProgram verbosity prog (withPrograms lbi) - (args ++ ["-o", outFile, inFile]) - } - --- |Convenience function; get the suffixes of these preprocessors. -ppSuffixes :: [ PPSuffixHandler ] -> [String] -ppSuffixes = map fst - --- |Standard preprocessors: GreenCard, c2hs, hsc2hs, happy, alex and cpphs. -knownSuffixHandlers :: [ PPSuffixHandler ] -knownSuffixHandlers = - [ ("gc", ppGreenCard) - , ("chs", ppC2hs) - , ("hsc", ppHsc2hs) - , ("x", ppAlex) - , ("y", ppHappy) - , ("ly", ppHappy) - , ("cpphs", ppCpp) - ] - --- |Standard preprocessors with possible extra C sources: c2hs, hsc2hs. -knownExtrasHandlers :: [ PreProcessorExtras ] -knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ] - --- | Find any extra C sources generated by preprocessing that need to --- be added to the component (addresses issue #238). -preprocessExtras :: Verbosity - -> Component - -> LocalBuildInfo - -> IO [FilePath] -preprocessExtras verbosity comp lbi = case comp of - CLib _ -> pp $ buildDir lbi - (CExe Executable { exeName = nm }) -> do - let nm' = unUnqualComponentName nm - pp $ buildDir lbi nm' nm' ++ "-tmp" - (CFLib ForeignLib { foreignLibName = nm }) -> do - let nm' = unUnqualComponentName nm - pp $ buildDir lbi nm' nm' ++ "-tmp" - CTest test -> do - let nm' = unUnqualComponentName $ testName test - case testInterface test of - TestSuiteExeV10 _ _ -> - pp $ buildDir lbi nm' nm' ++ "-tmp" - TestSuiteLibV09 _ _ -> - pp $ buildDir lbi stubName test stubName test ++ "-tmp" - TestSuiteUnsupported tt -> die' verbosity $ "No support for preprocessing test " - ++ "suite type " ++ display tt - CBench bm -> do - let nm' = unUnqualComponentName $ benchmarkName bm - case benchmarkInterface bm of - BenchmarkExeV10 _ _ -> - pp $ buildDir lbi nm' nm' ++ "-tmp" - BenchmarkUnsupported tt -> - die' verbosity $ "No support for preprocessing benchmark " - ++ "type " ++ display tt - where - pp :: FilePath -> IO [FilePath] - pp dir = (map (dir ) . filter not_sub . concat) - <$> for knownExtrasHandlers - (withLexicalCallStack (\f -> f dir)) - -- TODO: This is a terrible hack to work around #3545 while we don't - -- reorganize the directory layout. Basically, for the main - -- library, we might accidentally pick up autogenerated sources for - -- our subcomponents, because they are all stored as subdirectories - -- in dist/build. This is a cheap and cheerful check to prevent - -- this from happening. It is not particularly correct; for example - -- if a user has a test suite named foobar and puts their C file in - -- foobar/foo.c, this test will incorrectly exclude it. But I - -- didn't want to break BC... - not_sub p = and [ not (pre `isPrefixOf` p) | pre <- component_dirs ] - component_dirs = component_names (localPkgDescr lbi) - -- TODO: libify me - component_names pkg_descr = fmap unUnqualComponentName $ - mapMaybe libName (subLibraries pkg_descr) ++ - map exeName (executables pkg_descr) ++ - map testName (testSuites pkg_descr) ++ - map benchmarkName (benchmarks pkg_descr) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Ar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Ar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Ar.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Ar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,191 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE NondecreasingIndentation #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Ar --- Copyright : Duncan Coutts 2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @ar@ program. - -module Distribution.Simple.Program.Ar ( - createArLibArchive, - multiStageProgramInvocation - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Data.ByteString as BS -import qualified Data.ByteString.Char8 as BS8 -import Distribution.Compat.CopyFile (filesEqual) -import Distribution.Simple.Compiler (arResponseFilesSupported) -import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) -import Distribution.Simple.Program - ( ProgramInvocation, arProgram, requireProgram ) -import Distribution.Simple.Program.ResponseFile - ( withResponseFile ) -import Distribution.Simple.Program.Run - ( programInvocation, multiStageProgramInvocation - , runProgramInvocation ) -import Distribution.Simple.Setup - ( fromFlagOrDefault, configUseResponseFiles ) -import Distribution.Simple.Utils - ( defaultTempFileOptions, dieWithLocation', withTempDirectory ) -import Distribution.System - ( Arch(..), OS(..), Platform(..) ) -import Distribution.Verbosity - ( Verbosity, deafening, verbose ) -import System.Directory (doesFileExist, renameFile) -import System.FilePath ((), splitFileName) -import System.IO - ( Handle, IOMode(ReadWriteMode), SeekMode(AbsoluteSeek) - , hFileSize, hSeek, withBinaryFile ) - --- | Call @ar@ to create a library archive from a bunch of object files. --- -createArLibArchive :: Verbosity -> LocalBuildInfo - -> FilePath -> [FilePath] -> IO () -createArLibArchive verbosity lbi targetPath files = do - (ar, _) <- requireProgram verbosity arProgram progDb - - let (targetDir, targetName) = splitFileName targetPath - withTempDirectory verbosity targetDir "objs" $ \ tmpDir -> do - let tmpPath = tmpDir targetName - - -- The args to use with "ar" are actually rather subtle and system-dependent. - -- In particular we have the following issues: - -- - -- -- On OS X, "ar q" does not make an archive index. Archives with no - -- index cannot be used. - -- - -- -- GNU "ar r" will not let us add duplicate objects, only "ar q" lets us - -- do that. We have duplicates because of modules like "A.M" and "B.M" - -- both make an object file "M.o" and ar does not consider the directory. - -- - -- Our solution is to use "ar r" in the simple case when one call is enough. - -- When we need to call ar multiple times we use "ar q" and for the last - -- call on OSX we use "ar qs" so that it'll make the index. - - let simpleArgs = case hostOS of - OSX -> ["-r", "-s"] - _ -> ["-r"] - - initialArgs = ["-q"] - finalArgs = case hostOS of - OSX -> ["-q", "-s"] - _ -> ["-q"] - - extraArgs = verbosityOpts verbosity ++ [tmpPath] - - simple = programInvocation ar (simpleArgs ++ extraArgs) - initial = programInvocation ar (initialArgs ++ extraArgs) - middle = initial - final = programInvocation ar (finalArgs ++ extraArgs) - - oldVersionManualOverride = - fromFlagOrDefault False $ configUseResponseFiles $ configFlags lbi - responseArgumentsNotSupported = - not (arResponseFilesSupported (compiler lbi)) - - invokeWithResponesFile :: FilePath -> ProgramInvocation - invokeWithResponesFile atFile = - programInvocation ar $ - simpleArgs ++ extraArgs ++ ['@' : atFile] - - if oldVersionManualOverride || responseArgumentsNotSupported - then - sequence_ - [ runProgramInvocation verbosity inv - | inv <- multiStageProgramInvocation - simple (initial, middle, final) files ] - else - withResponseFile verbosity defaultTempFileOptions tmpDir "ar.rsp" Nothing files $ - \path -> runProgramInvocation verbosity $ invokeWithResponesFile path - - unless (hostArch == Arm -- See #1537 - || hostOS == AIX) $ -- AIX uses its own "ar" format variant - wipeMetadata verbosity tmpPath - equal <- filesEqual tmpPath targetPath - unless equal $ renameFile tmpPath targetPath - - where - progDb = withPrograms lbi - Platform hostArch hostOS = hostPlatform lbi - verbosityOpts v - | v >= deafening = ["-v"] - | v >= verbose = [] - | otherwise = ["-c"] -- Do not warn if library had to be created. - --- | @ar@ by default includes various metadata for each object file in their --- respective headers, so the output can differ for the same inputs, making --- it difficult to avoid re-linking. GNU @ar@(1) has a deterministic mode --- (@-D@) flag that always writes zero for the mtime, UID and GID, and 0644 --- for the file mode. However detecting whether @-D@ is supported seems --- rather harder than just re-implementing this feature. -wipeMetadata :: Verbosity -> FilePath -> IO () -wipeMetadata verbosity path = do - -- Check for existence first (ReadWriteMode would create one otherwise) - exists <- doesFileExist path - unless exists $ wipeError "Temporary file disappeared" - withBinaryFile path ReadWriteMode $ \ h -> hFileSize h >>= wipeArchive h - - where - wipeError msg = dieWithLocation' verbosity path Nothing $ - "Distribution.Simple.Program.Ar.wipeMetadata: " ++ msg - archLF = "!\x0a" -- global magic, 8 bytes - x60LF = "\x60\x0a" -- header magic, 2 bytes - metadata = BS.concat - [ "0 " -- mtime, 12 bytes - , "0 " -- UID, 6 bytes - , "0 " -- GID, 6 bytes - , "0644 " -- mode, 8 bytes - ] - headerSize :: Int - headerSize = 60 - - -- http://en.wikipedia.org/wiki/Ar_(Unix)#File_format_details - wipeArchive :: Handle -> Integer -> IO () - wipeArchive h archiveSize = do - global <- BS.hGet h (BS.length archLF) - unless (global == archLF) $ wipeError "Bad global header" - wipeHeader (toInteger $ BS.length archLF) - - where - wipeHeader :: Integer -> IO () - wipeHeader offset = case compare offset archiveSize of - EQ -> return () - GT -> wipeError (atOffset "Archive truncated") - LT -> do - header <- BS.hGet h headerSize - unless (BS.length header == headerSize) $ - wipeError (atOffset "Short header") - let magic = BS.drop 58 header - unless (magic == x60LF) . wipeError . atOffset $ - "Bad magic " ++ show magic ++ " in header" - - let name = BS.take 16 header - let size = BS.take 10 $ BS.drop 48 header - objSize <- case reads (BS8.unpack size) of - [(n, s)] | all isSpace s -> return n - _ -> wipeError (atOffset "Bad file size in header") - - let replacement = BS.concat [ name, metadata, size, magic ] - unless (BS.length replacement == headerSize) $ - wipeError (atOffset "Something has gone terribly wrong") - hSeek h AbsoluteSeek offset - BS.hPut h replacement - - let nextHeader = offset + toInteger headerSize + - -- Odd objects are padded with an extra '\x0a' - if odd objSize then objSize + 1 else objSize - hSeek h AbsoluteSeek nextHeader - wipeHeader nextHeader - - where - atOffset msg = msg ++ " at offset " ++ show offset diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Builtin.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Builtin.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Builtin.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Builtin.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,349 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Builtin --- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- The module defines all the known built-in 'Program's. --- --- Where possible we try to find their version numbers. --- -module Distribution.Simple.Program.Builtin ( - - -- * The collection of unconfigured and configured programs - builtinPrograms, - - -- * Programs that Cabal knows about - ghcProgram, - ghcPkgProgram, - runghcProgram, - ghcjsProgram, - ghcjsPkgProgram, - hmakeProgram, - jhcProgram, - haskellSuiteProgram, - haskellSuitePkgProgram, - uhcProgram, - gccProgram, - arProgram, - stripProgram, - happyProgram, - alexProgram, - hsc2hsProgram, - c2hsProgram, - cpphsProgram, - hscolourProgram, - doctestProgram, - haddockProgram, - greencardProgram, - ldProgram, - tarProgram, - cppProgram, - pkgConfigProgram, - hpcProgram, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Program.GHC -import Distribution.Simple.Program.Find -import Distribution.Simple.Program.Internal -import Distribution.Simple.Program.Run -import Distribution.Simple.Program.Types -import Distribution.Simple.Utils -import Distribution.Compat.Exception -import Distribution.Verbosity -import Distribution.Version - -import qualified Data.Map as Map - --- ------------------------------------------------------------ --- * Known programs --- ------------------------------------------------------------ - --- | The default list of programs. --- These programs are typically used internally to Cabal. -builtinPrograms :: [Program] -builtinPrograms = - [ - -- compilers and related progs - ghcProgram - , runghcProgram - , ghcPkgProgram - , ghcjsProgram - , ghcjsPkgProgram - , haskellSuiteProgram - , haskellSuitePkgProgram - , hmakeProgram - , jhcProgram - , uhcProgram - , hpcProgram - -- preprocessors - , hscolourProgram - , doctestProgram - , haddockProgram - , happyProgram - , alexProgram - , hsc2hsProgram - , c2hsProgram - , cpphsProgram - , greencardProgram - -- platform toolchain - , gccProgram - , arProgram - , stripProgram - , ldProgram - , tarProgram - -- configuration tools - , pkgConfigProgram - ] - -ghcProgram :: Program -ghcProgram = (simpleProgram "ghc") { - programFindVersion = findProgramVersion "--numeric-version" id, - - -- Workaround for https://ghc.haskell.org/trac/ghc/ticket/8825 - -- (spurious warning on non-english locales) - programPostConf = \_verbosity ghcProg -> - do let ghcProg' = ghcProg { - programOverrideEnv = ("LANGUAGE", Just "en") - : programOverrideEnv ghcProg - } - -- Only the 7.8 branch seems to be affected. Fixed in 7.8.4. - affectedVersionRange = intersectVersionRanges - (laterVersion $ mkVersion [7,8,0]) - (earlierVersion $ mkVersion [7,8,4]) - return $ maybe ghcProg - (\v -> if withinRange v affectedVersionRange - then ghcProg' else ghcProg) - (programVersion ghcProg), - - programNormaliseArgs = normaliseGhcArgs - } - -runghcProgram :: Program -runghcProgram = (simpleProgram "runghc") { - programFindVersion = findProgramVersion "--version" $ \str -> - case words str of - -- "runghc 7.10.3" - (_:ver:_) -> ver - _ -> "" - } - -ghcPkgProgram :: Program -ghcPkgProgram = (simpleProgram "ghc-pkg") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "ghc-pkg --version" gives a string like - -- "GHC package manager version 6.4.1" - case words str of - (_:_:_:_:ver:_) -> ver - _ -> "" - } - -ghcjsProgram :: Program -ghcjsProgram = (simpleProgram "ghcjs") { - programFindVersion = findProgramVersion "--numeric-ghcjs-version" id - } - --- note: version is the version number of the GHC version that ghcjs-pkg was built with -ghcjsPkgProgram :: Program -ghcjsPkgProgram = (simpleProgram "ghcjs-pkg") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "ghcjs-pkg --version" gives a string like - -- "GHCJS package manager version 6.4.1" - case words str of - (_:_:_:_:ver:_) -> ver - _ -> "" - } - -hmakeProgram :: Program -hmakeProgram = (simpleProgram "hmake") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "hmake --version" gives a string line - -- "/usr/local/bin/hmake: 3.13 (2006-11-01)" - case words str of - (_:ver:_) -> ver - _ -> "" - } - -jhcProgram :: Program -jhcProgram = (simpleProgram "jhc") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- invoking "jhc --version" gives a string like - -- "jhc 0.3.20080208 (wubgipkamcep-2) - -- compiled by ghc-6.8 on a x86_64 running linux" - case words str of - (_:ver:_) -> ver - _ -> "" - } - -uhcProgram :: Program -uhcProgram = (simpleProgram "uhc") { - programFindVersion = findProgramVersion "--version-dotted" id - } - -hpcProgram :: Program -hpcProgram = (simpleProgram "hpc") - { - programFindVersion = findProgramVersion "version" $ \str -> - case words str of - (_ : _ : _ : ver : _) -> ver - _ -> "" - } - --- This represents a haskell-suite compiler. Of course, the compiler --- itself probably is not called "haskell-suite", so this is not a real --- program. (But we don't know statically the name of the actual compiler, --- so this is the best we can do.) --- --- Having this Program value serves two purposes: --- --- 1. We can accept options for the compiler in the form of --- --- --haskell-suite-option(s)=... --- --- 2. We can find a program later using this static id (with --- requireProgram). --- --- The path to the real compiler is found and recorded in the ProgramDb --- during the configure phase. -haskellSuiteProgram :: Program -haskellSuiteProgram = (simpleProgram "haskell-suite") { - -- pretend that the program exists, otherwise it won't be in the - -- "configured" state - programFindLocation = \_verbosity _searchPath -> - return $ Just ("haskell-suite-dummy-location", []) - } - --- This represent a haskell-suite package manager. See the comments for --- haskellSuiteProgram. -haskellSuitePkgProgram :: Program -haskellSuitePkgProgram = (simpleProgram "haskell-suite-pkg") { - programFindLocation = \_verbosity _searchPath -> - return $ Just ("haskell-suite-pkg-dummy-location", []) - } - - -happyProgram :: Program -happyProgram = (simpleProgram "happy") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "happy --version" gives a string like - -- "Happy Version 1.16 Copyright (c) ...." - case words str of - (_:_:ver:_) -> ver - _ -> "" - } - -alexProgram :: Program -alexProgram = (simpleProgram "alex") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "alex --version" gives a string like - -- "Alex version 2.1.0, (c) 2003 Chris Dornan and Simon Marlow" - case words str of - (_:_:ver:_) -> takeWhile (\x -> isDigit x || x == '.') ver - _ -> "" - } - -gccProgram :: Program -gccProgram = (simpleProgram "gcc") { - programFindVersion = findProgramVersion "-dumpversion" id - } - -arProgram :: Program -arProgram = simpleProgram "ar" - -stripProgram :: Program -stripProgram = (simpleProgram "strip") { - programFindVersion = \verbosity -> - findProgramVersion "--version" stripExtractVersion (lessVerbose verbosity) - } - -hsc2hsProgram :: Program -hsc2hsProgram = (simpleProgram "hsc2hs") { - programFindVersion = - findProgramVersion "--version" $ \str -> - -- Invoking "hsc2hs --version" gives a string like "hsc2hs version 0.66" - case words str of - (_:_:ver:_) -> ver - _ -> "" - } - -c2hsProgram :: Program -c2hsProgram = (simpleProgram "c2hs") { - programFindVersion = findProgramVersion "--numeric-version" id - } - -cpphsProgram :: Program -cpphsProgram = (simpleProgram "cpphs") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "cpphs --version" gives a string like "cpphs 1.3" - case words str of - (_:ver:_) -> ver - _ -> "" - } - -hscolourProgram :: Program -hscolourProgram = (simpleProgram "hscolour") { - programFindLocation = \v p -> findProgramOnSearchPath v p "HsColour", - programFindVersion = findProgramVersion "-version" $ \str -> - -- Invoking "HsColour -version" gives a string like "HsColour 1.7" - case words str of - (_:ver:_) -> ver - _ -> "" - } - --- TODO: Ensure that doctest is built against the same GHC as the one --- that's being used. Same for haddock. @phadej pointed this out. -doctestProgram :: Program -doctestProgram = (simpleProgram "doctest") { - programFindLocation = \v p -> findProgramOnSearchPath v p "doctest" - , programFindVersion = findProgramVersion "--version" $ \str -> - -- "doctest version 0.11.2" - case words str of - (_:_:ver:_) -> ver - _ -> "" - } - -haddockProgram :: Program -haddockProgram = (simpleProgram "haddock") { - programFindVersion = findProgramVersion "--version" $ \str -> - -- Invoking "haddock --version" gives a string like - -- "Haddock version 0.8, (c) Simon Marlow 2006" - case words str of - (_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver - _ -> "", - - programNormaliseArgs = \_ _ args -> args - } - -greencardProgram :: Program -greencardProgram = simpleProgram "greencard" - -ldProgram :: Program -ldProgram = simpleProgram "ld" - -tarProgram :: Program -tarProgram = (simpleProgram "tar") { - -- See #1901. Some versions of 'tar' (OpenBSD, NetBSD, ...) don't support the - -- '--format' option. - programPostConf = \verbosity tarProg -> do - tarHelpOutput <- getProgramInvocationOutput - verbosity (programInvocation tarProg ["--help"]) - -- Some versions of tar don't support '--help'. - `catchIO` (\_ -> return "") - let k = "Supports --format" - v = if ("--format" `isInfixOf` tarHelpOutput) then "YES" else "NO" - m = Map.insert k v (programProperties tarProg) - return $ tarProg { programProperties = m } - } - -cppProgram :: Program -cppProgram = simpleProgram "cpp" - -pkgConfigProgram :: Program -pkgConfigProgram = (simpleProgram "pkg-config") { - programFindVersion = findProgramVersion "--version" id - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Db.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Db.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Db.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Db.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,485 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE DeriveDataTypeable #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Db --- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This provides a 'ProgramDb' type which holds configured and not-yet --- configured programs. It is the parameter to lots of actions elsewhere in --- Cabal that need to look up and run programs. If we had a Cabal monad, --- the 'ProgramDb' would probably be a reader or state component of it. --- --- One nice thing about using it is that any program that is --- registered with Cabal will get some \"configure\" and \".cabal\" --- helpers like --with-foo-args --foo-path= and extra-foo-args. --- --- There's also a hook for adding programs in a Setup.lhs script. See --- hookedPrograms in 'Distribution.Simple.UserHooks'. This gives a --- hook user the ability to get the above flags and such so that they --- don't have to write all the PATH logic inside Setup.lhs. - -module Distribution.Simple.Program.Db ( - -- * The collection of configured programs we can run - ProgramDb, - emptyProgramDb, - defaultProgramDb, - restoreProgramDb, - - -- ** Query and manipulate the program db - addKnownProgram, - addKnownPrograms, - lookupKnownProgram, - knownPrograms, - getProgramSearchPath, - setProgramSearchPath, - modifyProgramSearchPath, - userSpecifyPath, - userSpecifyPaths, - userMaybeSpecifyPath, - userSpecifyArgs, - userSpecifyArgss, - userSpecifiedArgs, - lookupProgram, - updateProgram, - configuredPrograms, - - -- ** Query and manipulate the program db - configureProgram, - configureAllKnownPrograms, - unconfigureProgram, - lookupProgramVersion, - reconfigurePrograms, - requireProgram, - requireProgramVersion, - - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Find -import Distribution.Simple.Program.Builtin -import Distribution.Simple.Utils -import Distribution.Version -import Distribution.Text -import Distribution.Verbosity - -import Control.Monad (join) -import Data.Tuple (swap) -import qualified Data.Map as Map - --- ------------------------------------------------------------ --- * Programs database --- ------------------------------------------------------------ - --- | The configuration is a collection of information about programs. It --- contains information both about configured programs and also about programs --- that we are yet to configure. --- --- The idea is that we start from a collection of unconfigured programs and one --- by one we try to configure them at which point we move them into the --- configured collection. For unconfigured programs we record not just the --- 'Program' but also any user-provided arguments and location for the program. -data ProgramDb = ProgramDb { - unconfiguredProgs :: UnconfiguredProgs, - progSearchPath :: ProgramSearchPath, - configuredProgs :: ConfiguredProgs - } - deriving (Typeable) - -type UnconfiguredProgram = (Program, Maybe FilePath, [ProgArg]) -type UnconfiguredProgs = Map.Map String UnconfiguredProgram -type ConfiguredProgs = Map.Map String ConfiguredProgram - - -emptyProgramDb :: ProgramDb -emptyProgramDb = ProgramDb Map.empty defaultProgramSearchPath Map.empty - -defaultProgramDb :: ProgramDb -defaultProgramDb = restoreProgramDb builtinPrograms emptyProgramDb - - --- internal helpers: -updateUnconfiguredProgs :: (UnconfiguredProgs -> UnconfiguredProgs) - -> ProgramDb -> ProgramDb -updateUnconfiguredProgs update progdb = - progdb { unconfiguredProgs = update (unconfiguredProgs progdb) } - -updateConfiguredProgs :: (ConfiguredProgs -> ConfiguredProgs) - -> ProgramDb -> ProgramDb -updateConfiguredProgs update progdb = - progdb { configuredProgs = update (configuredProgs progdb) } - - --- Read & Show instances are based on listToFM - --- | Note that this instance does not preserve the known 'Program's. --- See 'restoreProgramDb' for details. --- -instance Show ProgramDb where - show = show . Map.toAscList . configuredProgs - --- | Note that this instance does not preserve the known 'Program's. --- See 'restoreProgramDb' for details. --- -instance Read ProgramDb where - readsPrec p s = - [ (emptyProgramDb { configuredProgs = Map.fromList s' }, r) - | (s', r) <- readsPrec p s ] - --- | Note that this instance does not preserve the known 'Program's. --- See 'restoreProgramDb' for details. --- -instance Binary ProgramDb where - put db = do - put (progSearchPath db) - put (configuredProgs db) - - get = do - searchpath <- get - progs <- get - return $! emptyProgramDb { - progSearchPath = searchpath, - configuredProgs = progs - } - - --- | The 'Read'\/'Show' and 'Binary' instances do not preserve all the --- unconfigured 'Programs' because 'Program' is not in 'Read'\/'Show' because --- it contains functions. So to fully restore a deserialised 'ProgramDb' use --- this function to add back all the known 'Program's. --- --- * It does not add the default programs, but you probably want them, use --- 'builtinPrograms' in addition to any extra you might need. --- -restoreProgramDb :: [Program] -> ProgramDb -> ProgramDb -restoreProgramDb = addKnownPrograms - - --- ------------------------------- --- Managing unconfigured programs - --- | Add a known program that we may configure later --- -addKnownProgram :: Program -> ProgramDb -> ProgramDb -addKnownProgram prog = updateUnconfiguredProgs $ - Map.insertWith combine (programName prog) (prog, Nothing, []) - where combine _ (_, path, args) = (prog, path, args) - - -addKnownPrograms :: [Program] -> ProgramDb -> ProgramDb -addKnownPrograms progs progdb = foldl' (flip addKnownProgram) progdb progs - - -lookupKnownProgram :: String -> ProgramDb -> Maybe Program -lookupKnownProgram name = - fmap (\(p,_,_)->p) . Map.lookup name . unconfiguredProgs - - -knownPrograms :: ProgramDb -> [(Program, Maybe ConfiguredProgram)] -knownPrograms progdb = - [ (p,p') | (p,_,_) <- Map.elems (unconfiguredProgs progdb) - , let p' = Map.lookup (programName p) (configuredProgs progdb) ] - --- | Get the current 'ProgramSearchPath' used by the 'ProgramDb'. --- This is the default list of locations where programs are looked for when --- configuring them. This can be overridden for specific programs (with --- 'userSpecifyPath'), and specific known programs can modify or ignore this --- search path in their own configuration code. --- -getProgramSearchPath :: ProgramDb -> ProgramSearchPath -getProgramSearchPath = progSearchPath - --- | Change the current 'ProgramSearchPath' used by the 'ProgramDb'. --- This will affect programs that are configured from here on, so you --- should usually set it before configuring any programs. --- -setProgramSearchPath :: ProgramSearchPath -> ProgramDb -> ProgramDb -setProgramSearchPath searchpath db = db { progSearchPath = searchpath } - --- | Modify the current 'ProgramSearchPath' used by the 'ProgramDb'. --- This will affect programs that are configured from here on, so you --- should usually modify it before configuring any programs. --- -modifyProgramSearchPath :: (ProgramSearchPath -> ProgramSearchPath) - -> ProgramDb - -> ProgramDb -modifyProgramSearchPath f db = - setProgramSearchPath (f $ getProgramSearchPath db) db - --- |User-specify this path. Basically override any path information --- for this program in the configuration. If it's not a known --- program ignore it. --- -userSpecifyPath :: String -- ^Program name - -> FilePath -- ^user-specified path to the program - -> ProgramDb -> ProgramDb -userSpecifyPath name path = updateUnconfiguredProgs $ - flip Map.update name $ \(prog, _, args) -> Just (prog, Just path, args) - - -userMaybeSpecifyPath :: String -> Maybe FilePath - -> ProgramDb -> ProgramDb -userMaybeSpecifyPath _ Nothing progdb = progdb -userMaybeSpecifyPath name (Just path) progdb = userSpecifyPath name path progdb - - --- |User-specify the arguments for this program. Basically override --- any args information for this program in the configuration. If it's --- not a known program, ignore it.. -userSpecifyArgs :: String -- ^Program name - -> [ProgArg] -- ^user-specified args - -> ProgramDb - -> ProgramDb -userSpecifyArgs name args' = - updateUnconfiguredProgs - (flip Map.update name $ - \(prog, path, args) -> Just (prog, path, args ++ args')) - . updateConfiguredProgs - (flip Map.update name $ - \prog -> Just prog { programOverrideArgs = programOverrideArgs prog - ++ args' }) - - --- | Like 'userSpecifyPath' but for a list of progs and their paths. --- -userSpecifyPaths :: [(String, FilePath)] - -> ProgramDb - -> ProgramDb -userSpecifyPaths paths progdb = - foldl' (\progdb' (prog, path) -> userSpecifyPath prog path progdb') progdb paths - - --- | Like 'userSpecifyPath' but for a list of progs and their args. --- -userSpecifyArgss :: [(String, [ProgArg])] - -> ProgramDb - -> ProgramDb -userSpecifyArgss argss progdb = - foldl' (\progdb' (prog, args) -> userSpecifyArgs prog args progdb') progdb argss - - --- | Get the path that has been previously specified for a program, if any. --- -userSpecifiedPath :: Program -> ProgramDb -> Maybe FilePath -userSpecifiedPath prog = - join . fmap (\(_,p,_)->p) . Map.lookup (programName prog) . unconfiguredProgs - - --- | Get any extra args that have been previously specified for a program. --- -userSpecifiedArgs :: Program -> ProgramDb -> [ProgArg] -userSpecifiedArgs prog = - maybe [] (\(_,_,as)->as) . Map.lookup (programName prog) . unconfiguredProgs - - --- ----------------------------- --- Managing configured programs - --- | Try to find a configured program -lookupProgram :: Program -> ProgramDb -> Maybe ConfiguredProgram -lookupProgram prog = Map.lookup (programName prog) . configuredProgs - - --- | Update a configured program in the database. -updateProgram :: ConfiguredProgram -> ProgramDb - -> ProgramDb -updateProgram prog = updateConfiguredProgs $ - Map.insert (programId prog) prog - - --- | List all configured programs. -configuredPrograms :: ProgramDb -> [ConfiguredProgram] -configuredPrograms = Map.elems . configuredProgs - --- --------------------------- --- Configuring known programs - --- | Try to configure a specific program. If the program is already included in --- the collection of unconfigured programs then we use any user-supplied --- location and arguments. If the program gets configured successfully it gets --- added to the configured collection. --- --- Note that it is not a failure if the program cannot be configured. It's only --- a failure if the user supplied a location and the program could not be found --- at that location. --- --- The reason for it not being a failure at this stage is that we don't know up --- front all the programs we will need, so we try to configure them all. --- To verify that a program was actually successfully configured use --- 'requireProgram'. --- -configureProgram :: Verbosity - -> Program - -> ProgramDb - -> IO ProgramDb -configureProgram verbosity prog progdb = do - let name = programName prog - maybeLocation <- case userSpecifiedPath prog progdb of - Nothing -> - programFindLocation prog verbosity (progSearchPath progdb) - >>= return . fmap (swap . fmap FoundOnSystem . swap) - Just path -> do - absolute <- doesExecutableExist path - if absolute - then return (Just (UserSpecified path, [])) - else findProgramOnSearchPath verbosity (progSearchPath progdb) path - >>= maybe (die' verbosity notFound) - (return . Just . swap . fmap UserSpecified . swap) - where notFound = "Cannot find the program '" ++ name - ++ "'. User-specified path '" - ++ path ++ "' does not refer to an executable and " - ++ "the program is not on the system path." - case maybeLocation of - Nothing -> return progdb - Just (location, triedLocations) -> do - version <- programFindVersion prog verbosity (locationPath location) - newPath <- programSearchPathAsPATHVar (progSearchPath progdb) - let configuredProg = ConfiguredProgram { - programId = name, - programVersion = version, - programDefaultArgs = [], - programOverrideArgs = userSpecifiedArgs prog progdb, - programOverrideEnv = [("PATH", Just newPath)], - programProperties = Map.empty, - programLocation = location, - programMonitorFiles = triedLocations - } - configuredProg' <- programPostConf prog verbosity configuredProg - return (updateConfiguredProgs (Map.insert name configuredProg') progdb) - - --- | Configure a bunch of programs using 'configureProgram'. Just a 'foldM'. --- -configurePrograms :: Verbosity - -> [Program] - -> ProgramDb - -> IO ProgramDb -configurePrograms verbosity progs progdb = - foldM (flip (configureProgram verbosity)) progdb progs - - --- | Unconfigure a program. This is basically a hack and you shouldn't --- use it, but it can be handy for making sure a 'requireProgram' --- actually reconfigures. -unconfigureProgram :: String -> ProgramDb -> ProgramDb -unconfigureProgram progname = - updateConfiguredProgs $ Map.delete progname - --- | Try to configure all the known programs that have not yet been configured. --- -configureAllKnownPrograms :: Verbosity - -> ProgramDb - -> IO ProgramDb -configureAllKnownPrograms verbosity progdb = - configurePrograms verbosity - [ prog | (prog,_,_) <- Map.elems notYetConfigured ] progdb - where - notYetConfigured = unconfiguredProgs progdb - `Map.difference` configuredProgs progdb - - --- | reconfigure a bunch of programs given new user-specified args. It takes --- the same inputs as 'userSpecifyPath' and 'userSpecifyArgs' and for all progs --- with a new path it calls 'configureProgram'. --- -reconfigurePrograms :: Verbosity - -> [(String, FilePath)] - -> [(String, [ProgArg])] - -> ProgramDb - -> IO ProgramDb -reconfigurePrograms verbosity paths argss progdb = do - configurePrograms verbosity progs - . userSpecifyPaths paths - . userSpecifyArgss argss - $ progdb - - where - progs = catMaybes [ lookupKnownProgram name progdb | (name,_) <- paths ] - - --- | Check that a program is configured and available to be run. --- --- It raises an exception if the program could not be configured, otherwise --- it returns the configured program. --- -requireProgram :: Verbosity -> Program -> ProgramDb - -> IO (ConfiguredProgram, ProgramDb) -requireProgram verbosity prog progdb = do - - -- If it's not already been configured, try to configure it now - progdb' <- case lookupProgram prog progdb of - Nothing -> configureProgram verbosity prog progdb - Just _ -> return progdb - - case lookupProgram prog progdb' of - Nothing -> die' verbosity notFound - Just configuredProg -> return (configuredProg, progdb') - - where notFound = "The program '" ++ programName prog - ++ "' is required but it could not be found." - - --- | Check that a program is configured and available to be run. --- --- Additionally check that the program version number is suitable and return --- it. For example you could require 'AnyVersion' or @'orLaterVersion' --- ('Version' [1,0] [])@ --- --- It returns the configured program, its version number and a possibly updated --- 'ProgramDb'. If the program could not be configured or the version is --- unsuitable, it returns an error value. --- -lookupProgramVersion - :: Verbosity -> Program -> VersionRange -> ProgramDb - -> IO (Either String (ConfiguredProgram, Version, ProgramDb)) -lookupProgramVersion verbosity prog range programDb = do - - -- If it's not already been configured, try to configure it now - programDb' <- case lookupProgram prog programDb of - Nothing -> configureProgram verbosity prog programDb - Just _ -> return programDb - - case lookupProgram prog programDb' of - Nothing -> return $! Left notFound - Just configuredProg@ConfiguredProgram { programLocation = location } -> - case programVersion configuredProg of - Just version - | withinRange version range -> - return $! Right (configuredProg, version ,programDb') - | otherwise -> - return $! Left (badVersion version location) - Nothing -> - return $! Left (unknownVersion location) - - where notFound = "The program '" - ++ programName prog ++ "'" ++ versionRequirement - ++ " is required but it could not be found." - badVersion v l = "The program '" - ++ programName prog ++ "'" ++ versionRequirement - ++ " is required but the version found at " - ++ locationPath l ++ " is version " ++ display v - unknownVersion l = "The program '" - ++ programName prog ++ "'" ++ versionRequirement - ++ " is required but the version of " - ++ locationPath l ++ " could not be determined." - versionRequirement - | isAnyVersion range = "" - | otherwise = " version " ++ display range - --- | Like 'lookupProgramVersion', but raises an exception in case of error --- instead of returning 'Left errMsg'. --- -requireProgramVersion :: Verbosity -> Program -> VersionRange - -> ProgramDb - -> IO (ConfiguredProgram, Version, ProgramDb) -requireProgramVersion verbosity prog range programDb = - join $ either (die' verbosity) return `fmap` - lookupProgramVersion verbosity prog range programDb diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Find.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Find.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Find.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Find.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,187 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Find --- Copyright : Duncan Coutts 2013 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A somewhat extended notion of the normal program search path concept. --- --- Usually when finding executables we just want to look in the usual places --- using the OS's usual method for doing so. In Haskell the normal OS-specific --- method is captured by 'findExecutable'. On all common OSs that makes use of --- a @PATH@ environment variable, (though on Windows it is not just the @PATH@). --- --- However it is sometimes useful to be able to look in additional locations --- without having to change the process-global @PATH@ environment variable. --- So we need an extension of the usual 'findExecutable' that can look in --- additional locations, either before, after or instead of the normal OS --- locations. --- -module Distribution.Simple.Program.Find ( - -- * Program search path - ProgramSearchPath, - ProgramSearchPathEntry(..), - defaultProgramSearchPath, - findProgramOnSearchPath, - programSearchPathAsPATHVar, - getSystemSearchPath, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Verbosity -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Compat.Environment - -import qualified System.Directory as Directory - ( findExecutable ) -import System.FilePath as FilePath - ( (), (<.>), splitSearchPath, searchPathSeparator, getSearchPath - , takeDirectory ) -#if defined(mingw32_HOST_OS) -import qualified System.Win32 as Win32 -#endif - --- | A search path to use when locating executables. This is analogous --- to the unix @$PATH@ or win32 @%PATH%@ but with the ability to use --- the system default method for finding executables ('findExecutable' which --- on unix is simply looking on the @$PATH@ but on win32 is a bit more --- complicated). --- --- The default to use is @[ProgSearchPathDefault]@ but you can add extra dirs --- either before, after or instead of the default, e.g. here we add an extra --- dir to search after the usual ones. --- --- > ['ProgramSearchPathDefault', 'ProgramSearchPathDir' dir] --- -type ProgramSearchPath = [ProgramSearchPathEntry] -data ProgramSearchPathEntry = - ProgramSearchPathDir FilePath -- ^ A specific dir - | ProgramSearchPathDefault -- ^ The system default - deriving (Eq, Generic) - -instance Binary ProgramSearchPathEntry - -defaultProgramSearchPath :: ProgramSearchPath -defaultProgramSearchPath = [ProgramSearchPathDefault] - -findProgramOnSearchPath :: Verbosity -> ProgramSearchPath - -> FilePath -> IO (Maybe (FilePath, [FilePath])) -findProgramOnSearchPath verbosity searchpath prog = do - debug verbosity $ "Searching for " ++ prog ++ " in path." - res <- tryPathElems [] searchpath - case res of - Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path") - Just (path, _) -> debug verbosity ("Found " ++ prog ++ " at "++ path) - return res - where - tryPathElems :: [[FilePath]] -> [ProgramSearchPathEntry] - -> IO (Maybe (FilePath, [FilePath])) - tryPathElems _ [] = return Nothing - tryPathElems tried (pe:pes) = do - res <- tryPathElem pe - case res of - (Nothing, notfoundat) -> tryPathElems (notfoundat : tried) pes - (Just foundat, notfoundat) -> return (Just (foundat, alltried)) - where - alltried = concat (reverse (notfoundat : tried)) - - tryPathElem :: ProgramSearchPathEntry -> NoCallStackIO (Maybe FilePath, [FilePath]) - tryPathElem (ProgramSearchPathDir dir) = - findFirstExe [ dir prog <.> ext | ext <- exeExtensions ] - - -- On windows, getSystemSearchPath is not guaranteed 100% correct so we - -- use findExecutable and then approximate the not-found-at locations. - tryPathElem ProgramSearchPathDefault | buildOS == Windows = do - mExe <- findExecutable prog - syspath <- getSystemSearchPath - case mExe of - Nothing -> - let notfoundat = [ dir prog | dir <- syspath ] in - return (Nothing, notfoundat) - - Just foundat -> do - let founddir = takeDirectory foundat - notfoundat = [ dir prog - | dir <- takeWhile (/= founddir) syspath ] - return (Just foundat, notfoundat) - - -- On other OSs we can just do the simple thing - tryPathElem ProgramSearchPathDefault = do - dirs <- getSystemSearchPath - findFirstExe [ dir prog <.> ext | dir <- dirs, ext <- exeExtensions ] - - findFirstExe :: [FilePath] -> NoCallStackIO (Maybe FilePath, [FilePath]) - findFirstExe = go [] - where - go fs' [] = return (Nothing, reverse fs') - go fs' (f:fs) = do - isExe <- doesExecutableExist f - if isExe - then return (Just f, reverse fs') - else go (f:fs') fs - --- | Interpret a 'ProgramSearchPath' to construct a new @$PATH@ env var. --- Note that this is close but not perfect because on Windows the search --- algorithm looks at more than just the @%PATH%@. -programSearchPathAsPATHVar :: ProgramSearchPath -> NoCallStackIO String -programSearchPathAsPATHVar searchpath = do - ess <- traverse getEntries searchpath - return (intercalate [searchPathSeparator] (concat ess)) - where - getEntries (ProgramSearchPathDir dir) = return [dir] - getEntries ProgramSearchPathDefault = do - env <- getEnvironment - return (maybe [] splitSearchPath (lookup "PATH" env)) - --- | Get the system search path. On Unix systems this is just the @$PATH@ env --- var, but on windows it's a bit more complicated. --- -getSystemSearchPath :: NoCallStackIO [FilePath] -getSystemSearchPath = fmap nub $ do -#if defined(mingw32_HOST_OS) - processdir <- takeDirectory `fmap` Win32.getModuleFileName Win32.nullHANDLE - currentdir <- Win32.getCurrentDirectory - systemdir <- Win32.getSystemDirectory - windowsdir <- Win32.getWindowsDirectory - pathdirs <- FilePath.getSearchPath - let path = processdir : currentdir - : systemdir : windowsdir - : pathdirs - return path -#else - FilePath.getSearchPath -#endif - -#ifdef MIN_VERSION_directory -#if MIN_VERSION_directory(1,2,1) -#define HAVE_directory_121 -#endif -#endif - -findExecutable :: FilePath -> NoCallStackIO (Maybe FilePath) -#ifdef HAVE_directory_121 -findExecutable = Directory.findExecutable -#else -findExecutable prog = do - -- With directory < 1.2.1 'findExecutable' doesn't check that the path - -- really refers to an executable. - mExe <- Directory.findExecutable prog - case mExe of - Just exe -> do - exeExists <- doesExecutableExist exe - if exeExists - then return mExe - else return Nothing - _ -> return mExe -#endif - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/GHC.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/GHC.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/GHC.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/GHC.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,752 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} - -module Distribution.Simple.Program.GHC ( - GhcOptions(..), - GhcMode(..), - GhcOptimisation(..), - GhcDynLinkMode(..), - GhcProfAuto(..), - - ghcInvocation, - renderGhcOptions, - - runGHC, - - packageDbArgsDb, - normaliseGhcArgs - - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Backpack -import Distribution.Simple.GHC.ImplInfo -import Distribution.PackageDescription hiding (Flag) -import Distribution.ModuleName -import Distribution.Simple.Compiler hiding (Flag) -import qualified Distribution.Simple.Compiler as Compiler (Flag) -import Distribution.Simple.Flag -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Run -import Distribution.System -import Distribution.Text -import Distribution.Types.ComponentId -import Distribution.Verbosity -import Distribution.Version -import Distribution.Utils.NubList -import Language.Haskell.Extension - -import Data.List (stripPrefix) -import qualified Data.Map as Map -import Data.Monoid (All(..), Any(..), Endo(..), First(..)) -import Data.Set (Set) -import qualified Data.Set as Set - -normaliseGhcArgs :: Maybe Version -> PackageDescription -> [String] -> [String] -normaliseGhcArgs (Just ghcVersion) PackageDescription{..} ghcArgs - | ghcVersion `withinRange` supportedGHCVersions - = argumentFilters $ filter simpleFilters ghcArgs - where - supportedGHCVersions :: VersionRange - supportedGHCVersions = intersectVersionRanges - (orLaterVersion (mkVersion [8,0])) - (earlierVersion (mkVersion [8,5])) - - from :: Monoid m => [Int] -> m -> m - from version flags - | ghcVersion `withinRange` orLaterVersion (mkVersion version) = flags - | otherwise = mempty - - checkComponentWarnings :: (a -> BuildInfo) -> [a] -> All - checkComponentWarnings getInfo = foldMap $ checkComponent . getInfo - where - checkComponent :: BuildInfo -> All - checkComponent = - foldMap checkWarnings . filterGhcOptions . allBuildInfoOptions - - allBuildInfoOptions :: BuildInfo -> [(CompilerFlavor, [String])] - allBuildInfoOptions = - mconcat [options, profOptions, sharedOptions, staticOptions] - - filterGhcOptions :: [(CompilerFlavor, [String])] -> [[String]] - filterGhcOptions l = [opts | (GHC, opts) <- l] - - libs, exes, tests, benches :: All - libs = checkComponentWarnings libBuildInfo $ - maybeToList library ++ subLibraries - - exes = checkComponentWarnings buildInfo $ executables - tests = checkComponentWarnings testBuildInfo $ testSuites - benches = checkComponentWarnings benchmarkBuildInfo $ benchmarks - - safeToFilterWarnings :: Bool - safeToFilterWarnings = getAll $ mconcat - [checkWarnings ghcArgs, libs, exes, tests, benches] - - checkWarnings :: [String] -> All - checkWarnings = All . Set.null . foldr alter Set.empty - where - alter :: String -> Set String -> Set String - alter flag = appEndo $ mconcat - [ \s -> Endo $ if s == "-Werror" then Set.insert s else id - , \s -> Endo $ if s == "-Wwarn" then const Set.empty else id - , from [8,4] $ markFlag "-Werror=" Set.insert - , from [8,4] $ markFlag "-Wwarn=" Set.delete - , from [8,4] $ markFlag "-Wno-error=" Set.delete - ] flag - - markFlag - :: String - -> (String -> Set String -> Set String) - -> String - -> Endo (Set String) - markFlag name update flag = Endo $ case stripPrefix name flag of - Just rest | not (null rest) -> update rest - _ -> id - - flagArgumentFilter :: [String] -> [String] -> [String] - flagArgumentFilter flags = go - where - makeFilter :: String -> String -> First ([String] -> [String]) - makeFilter flag arg = First $ filterRest <$> stripPrefix flag arg - where - filterRest leftOver = case dropEq leftOver of - [] -> drop 1 - _ -> id - - checkFilter :: String -> Maybe ([String] -> [String]) - checkFilter = getFirst . mconcat (map makeFilter flags) - - go :: [String] -> [String] - go [] = [] - go (arg:args) = case checkFilter arg of - Just f -> go (f args) - Nothing -> arg : go args - - argumentFilters :: [String] -> [String] - argumentFilters = flagArgumentFilter ["-ghci-script", "-H"] - - simpleFilters :: String -> Bool - simpleFilters = not . getAny . mconcat - [ flagIn simpleFlags - , Any . isPrefixOf "-ddump-" - , Any . isPrefixOf "-dsuppress-" - , Any . isPrefixOf "-dno-suppress-" - , flagIn $ invertibleFlagSet "-" ["ignore-dot-ghci"] - , flagIn . invertibleFlagSet "-f" . mconcat $ - [ [ "reverse-errors", "warn-unused-binds" ] - , from [8,2] - [ "diagnostics-show-caret", "local-ghci-history" - , "show-warning-groups", "hide-source-paths" - , "show-hole-constraints" - ] - , from [8,4] ["show-loaded-modules"] - ] - , flagIn . invertibleFlagSet "-d" $ [ "ppr-case-as-let", "ppr-ticks" ] - , isOptIntFlag - , isIntFlag - , if safeToFilterWarnings - then isWarning <> (Any . ("-w"==)) - else mempty - ] - - flagIn :: Set String -> String -> Any - flagIn set flag = Any $ Set.member flag set - - isWarning :: String -> Any - isWarning = mconcat $ map ((Any .) . isPrefixOf) - ["-fwarn-", "-fno-warn-", "-W", "-Wno-"] - - simpleFlags :: Set String - simpleFlags = Set.fromList . mconcat $ - [ [ "-n", "-#include", "-Rghc-timing", "-dsuppress-all", "-dstg-stats" - , "-dth-dec-file", "-dsource-stats", "-dverbose-core2core" - , "-dverbose-stg2stg", "-dcore-lint", "-dstg-lint", "-dcmm-lint" - , "-dasm-lint", "-dannot-lint", "-dshow-passes", "-dfaststring-stats" - , "-fno-max-relevant-binds", "-recomp", "-no-recomp", "-fforce-recomp" - , "-fno-force-recomp", "-interactive-print" - ] - - , from [8,2] - [ "-fno-max-errors", "-fdiagnostics-color=auto" - , "-fdiagnostics-color=always", "-fdiagnostics-color=never" - , "-dppr-debug", "-dno-debug-output" - ] - - , from [8,4] - [ "-ddebug-output", "-fno-max-valid-substitutions" ] - ] - - isOptIntFlag :: String -> Any - isOptIntFlag = mconcat . map (dropIntFlag True) $ ["-v", "-j"] - - isIntFlag :: String -> Any - isIntFlag = mconcat . map (dropIntFlag False) . mconcat $ - [ [ "-fmax-relevant-binds", "-ddpr-user-length", "-ddpr-cols" - , "-dtrace-level", "-fghci-hist-size" ] - , from [8,2] ["-fmax-uncovered-patterns", "-fmax-errors"] - , from [8,4] ["-fmax-valid-substitutions"] - ] - - dropIntFlag :: Bool -> String -> String -> Any - dropIntFlag isOpt flag input = Any $ case stripPrefix flag input of - Nothing -> False - Just rest | isOpt && null rest -> True - | otherwise -> case parseInt rest of - Just _ -> True - Nothing -> False - where - parseInt :: String -> Maybe Int - parseInt = readMaybe . dropEq - - readMaybe :: Read a => String -> Maybe a - readMaybe s = case reads s of - [(x, "")] -> Just x - _ -> Nothing - - dropEq :: String -> String - dropEq ('=':s) = s - dropEq s = s - - invertibleFlagSet :: String -> [String] -> Set String - invertibleFlagSet prefix flagNames = - Set.fromList $ (++) <$> [prefix, prefix ++ "no-"] <*> flagNames - -normaliseGhcArgs _ _ args = args - --- | A structured set of GHC options/flags --- --- Note that options containing lists fall into two categories: --- --- * options that can be safely deduplicated, e.g. input modules or --- enabled extensions; --- * options that cannot be deduplicated in general without changing --- semantics, e.g. extra ghc options or linking options. -data GhcOptions = GhcOptions { - - -- | The major mode for the ghc invocation. - ghcOptMode :: Flag GhcMode, - - -- | Any extra options to pass directly to ghc. These go at the end and hence - -- override other stuff. - ghcOptExtra :: [String], - - -- | Extra default flags to pass directly to ghc. These go at the beginning - -- and so can be overridden by other stuff. - ghcOptExtraDefault :: [String], - - ----------------------- - -- Inputs and outputs - - -- | The main input files; could be .hs, .hi, .c, .o, depending on mode. - ghcOptInputFiles :: NubListR FilePath, - - -- | The names of input Haskell modules, mainly for @--make@ mode. - ghcOptInputModules :: NubListR ModuleName, - - -- | Location for output file; the @ghc -o@ flag. - ghcOptOutputFile :: Flag FilePath, - - -- | Location for dynamic output file in 'GhcStaticAndDynamic' mode; - -- the @ghc -dyno@ flag. - ghcOptOutputDynFile :: Flag FilePath, - - -- | Start with an empty search path for Haskell source files; - -- the @ghc -i@ flag (@-i@ on its own with no path argument). - ghcOptSourcePathClear :: Flag Bool, - - -- | Search path for Haskell source files; the @ghc -i@ flag. - ghcOptSourcePath :: NubListR FilePath, - - ------------- - -- Packages - - -- | The unit ID the modules will belong to; the @ghc -this-unit-id@ - -- flag (or @-this-package-key@ or @-package-name@ on older - -- versions of GHC). This is a 'String' because we assume you've - -- already figured out what the correct format for this string is - -- (we need to handle backwards compatibility.) - ghcOptThisUnitId :: Flag String, - - -- | GHC doesn't make any assumptions about the format of - -- definite unit ids, so when we are instantiating a package it - -- needs to be told explicitly what the component being instantiated - -- is. This only gets set when 'ghcOptInstantiatedWith' is non-empty - ghcOptThisComponentId :: Flag ComponentId, - - -- | How the requirements of the package being compiled are to - -- be filled. When typechecking an indefinite package, the 'OpenModule' - -- is always a 'OpenModuleVar'; otherwise, it specifies the installed module - -- that instantiates a package. - ghcOptInstantiatedWith :: [(ModuleName, OpenModule)], - - -- | No code? (But we turn on interface writing - ghcOptNoCode :: Flag Bool, - - -- | GHC package databases to use, the @ghc -package-conf@ flag. - ghcOptPackageDBs :: PackageDBStack, - - -- | The GHC packages to bring into scope when compiling, - -- the @ghc -package-id@ flags. - ghcOptPackages :: - NubListR (OpenUnitId, ModuleRenaming), - - -- | Start with a clean package set; the @ghc -hide-all-packages@ flag - ghcOptHideAllPackages :: Flag Bool, - - -- | Warn about modules, not listed in command line - ghcOptWarnMissingHomeModules :: Flag Bool, - - -- | Don't automatically link in Haskell98 etc; the @ghc - -- -no-auto-link-packages@ flag. - ghcOptNoAutoLinkPackages :: Flag Bool, - - ----------------- - -- Linker stuff - - -- | Names of libraries to link in; the @ghc -l@ flag. - ghcOptLinkLibs :: [FilePath], - - -- | Search path for libraries to link in; the @ghc -L@ flag. - ghcOptLinkLibPath :: NubListR FilePath, - - -- | Options to pass through to the linker; the @ghc -optl@ flag. - ghcOptLinkOptions :: [String], - - -- | OSX only: frameworks to link in; the @ghc -framework@ flag. - ghcOptLinkFrameworks :: NubListR String, - - -- | OSX only: Search path for frameworks to link in; the - -- @ghc -framework-path@ flag. - ghcOptLinkFrameworkDirs :: NubListR String, - - -- | Don't do the link step, useful in make mode; the @ghc -no-link@ flag. - ghcOptNoLink :: Flag Bool, - - -- | Don't link in the normal RTS @main@ entry point; the @ghc -no-hs-main@ - -- flag. - ghcOptLinkNoHsMain :: Flag Bool, - - -- | Module definition files (Windows specific) - ghcOptLinkModDefFiles :: NubListR FilePath, - - -------------------- - -- C and CPP stuff - - -- | Options to pass through to the C compiler; the @ghc -optc@ flag. - ghcOptCcOptions :: [String], - - -- | Options to pass through to the C++ compiler. - ghcOptCxxOptions :: [String], - - -- | Options to pass through to CPP; the @ghc -optP@ flag. - ghcOptCppOptions :: [String], - - -- | Search path for CPP includes like header files; the @ghc -I@ flag. - ghcOptCppIncludePath :: NubListR FilePath, - - -- | Extra header files to include at CPP stage; the @ghc -optP-include@ flag. - ghcOptCppIncludes :: NubListR FilePath, - - -- | Extra header files to include for old-style FFI; the @ghc -#include@ flag. - ghcOptFfiIncludes :: NubListR FilePath, - - ---------------------------- - -- Language and extensions - - -- | The base language; the @ghc -XHaskell98@ or @-XHaskell2010@ flag. - ghcOptLanguage :: Flag Language, - - -- | The language extensions; the @ghc -X@ flag. - ghcOptExtensions :: NubListR Extension, - - -- | A GHC version-dependent mapping of extensions to flags. This must be - -- set to be able to make use of the 'ghcOptExtensions'. - ghcOptExtensionMap :: Map Extension (Maybe Compiler.Flag), - - ---------------- - -- Compilation - - -- | What optimisation level to use; the @ghc -O@ flag. - ghcOptOptimisation :: Flag GhcOptimisation, - - -- | Emit debug info; the @ghc -g@ flag. - ghcOptDebugInfo :: Flag DebugInfoLevel, - - -- | Compile in profiling mode; the @ghc -prof@ flag. - ghcOptProfilingMode :: Flag Bool, - - -- | Automatically add profiling cost centers; the @ghc -fprof-auto*@ flags. - ghcOptProfilingAuto :: Flag GhcProfAuto, - - -- | Use the \"split sections\" feature; the @ghc -split-sections@ flag. - ghcOptSplitSections :: Flag Bool, - - -- | Use the \"split object files\" feature; the @ghc -split-objs@ flag. - ghcOptSplitObjs :: Flag Bool, - - -- | Run N jobs simultaneously (if possible). - ghcOptNumJobs :: Flag (Maybe Int), - - -- | Enable coverage analysis; the @ghc -fhpc -hpcdir@ flags. - ghcOptHPCDir :: Flag FilePath, - - ---------------- - -- GHCi - - -- | Extra GHCi startup scripts; the @-ghci-script@ flag - ghcOptGHCiScripts :: [FilePath], - - ------------------------ - -- Redirecting outputs - - ghcOptHiSuffix :: Flag String, - ghcOptObjSuffix :: Flag String, - ghcOptDynHiSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode - ghcOptDynObjSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode - ghcOptHiDir :: Flag FilePath, - ghcOptObjDir :: Flag FilePath, - ghcOptOutputDir :: Flag FilePath, - ghcOptStubDir :: Flag FilePath, - - -------------------- - -- Creating libraries - - ghcOptDynLinkMode :: Flag GhcDynLinkMode, - ghcOptStaticLib :: Flag Bool, - ghcOptShared :: Flag Bool, - ghcOptFPic :: Flag Bool, - ghcOptDylibName :: Flag String, - ghcOptRPaths :: NubListR FilePath, - - --------------- - -- Misc flags - - -- | Get GHC to be quiet or verbose with what it's doing; the @ghc -v@ flag. - ghcOptVerbosity :: Flag Verbosity, - - -- | Put the extra folders in the PATH environment variable we invoke - -- GHC with - ghcOptExtraPath :: NubListR FilePath, - - -- | Let GHC know that it is Cabal that's calling it. - -- Modifies some of the GHC error messages. - ghcOptCabal :: Flag Bool - -} deriving (Show, Generic) - - -data GhcMode = GhcModeCompile -- ^ @ghc -c@ - | GhcModeLink -- ^ @ghc@ - | GhcModeMake -- ^ @ghc --make@ - | GhcModeInteractive -- ^ @ghci@ \/ @ghc --interactive@ - | GhcModeAbiHash -- ^ @ghc --abi-hash@ --- | GhcModeDepAnalysis -- ^ @ghc -M@ --- | GhcModeEvaluate -- ^ @ghc -e@ - deriving (Show, Eq) - -data GhcOptimisation = GhcNoOptimisation -- ^ @-O0@ - | GhcNormalOptimisation -- ^ @-O@ - | GhcMaximumOptimisation -- ^ @-O2@ - | GhcSpecialOptimisation String -- ^ e.g. @-Odph@ - deriving (Show, Eq) - -data GhcDynLinkMode = GhcStaticOnly -- ^ @-static@ - | GhcDynamicOnly -- ^ @-dynamic@ - | GhcStaticAndDynamic -- ^ @-static -dynamic-too@ - deriving (Show, Eq) - -data GhcProfAuto = GhcProfAutoAll -- ^ @-fprof-auto@ - | GhcProfAutoToplevel -- ^ @-fprof-auto-top@ - | GhcProfAutoExported -- ^ @-fprof-auto-exported@ - deriving (Show, Eq) - -runGHC :: Verbosity -> ConfiguredProgram -> Compiler -> Platform -> GhcOptions - -> IO () -runGHC verbosity ghcProg comp platform opts = do - runProgramInvocation verbosity (ghcInvocation ghcProg comp platform opts) - - -ghcInvocation :: ConfiguredProgram -> Compiler -> Platform -> GhcOptions - -> ProgramInvocation -ghcInvocation prog comp platform opts = - (programInvocation prog (renderGhcOptions comp platform opts)) { - progInvokePathEnv = fromNubListR (ghcOptExtraPath opts) - } - -renderGhcOptions :: Compiler -> Platform -> GhcOptions -> [String] -renderGhcOptions comp _platform@(Platform _arch os) opts - | compilerFlavor comp `notElem` [GHC, GHCJS] = - error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " - ++ "compiler flavor must be 'GHC' or 'GHCJS'!" - | otherwise = - concat - [ case flagToMaybe (ghcOptMode opts) of - Nothing -> [] - Just GhcModeCompile -> ["-c"] - Just GhcModeLink -> [] - Just GhcModeMake -> ["--make"] - Just GhcModeInteractive -> ["--interactive"] - Just GhcModeAbiHash -> ["--abi-hash"] --- Just GhcModeDepAnalysis -> ["-M"] --- Just GhcModeEvaluate -> ["-e", expr] - - , ghcOptExtraDefault opts - - , [ "-no-link" | flagBool ghcOptNoLink ] - - --------------- - -- Misc flags - - , maybe [] verbosityOpts (flagToMaybe (ghcOptVerbosity opts)) - - , [ "-fbuilding-cabal-package" | flagBool ghcOptCabal ] - - ---------------- - -- Compilation - - , case flagToMaybe (ghcOptOptimisation opts) of - Nothing -> [] - Just GhcNoOptimisation -> ["-O0"] - Just GhcNormalOptimisation -> ["-O"] - Just GhcMaximumOptimisation -> ["-O2"] - Just (GhcSpecialOptimisation s) -> ["-O" ++ s] -- eg -Odph - - , case flagToMaybe (ghcOptDebugInfo opts) of - Nothing -> [] - Just NoDebugInfo -> [] - Just MinimalDebugInfo -> ["-g1"] - Just NormalDebugInfo -> ["-g2"] - Just MaximalDebugInfo -> ["-g3"] - - , [ "-prof" | flagBool ghcOptProfilingMode ] - - , case flagToMaybe (ghcOptProfilingAuto opts) of - _ | not (flagBool ghcOptProfilingMode) - -> [] - Nothing -> [] - Just GhcProfAutoAll - | flagProfAuto implInfo -> ["-fprof-auto"] - | otherwise -> ["-auto-all"] -- not the same, but close - Just GhcProfAutoToplevel - | flagProfAuto implInfo -> ["-fprof-auto-top"] - | otherwise -> ["-auto-all"] - Just GhcProfAutoExported - | flagProfAuto implInfo -> ["-fprof-auto-exported"] - | otherwise -> ["-auto"] - - , [ "-split-sections" | flagBool ghcOptSplitSections ] - , [ "-split-objs" | flagBool ghcOptSplitObjs ] - - , case flagToMaybe (ghcOptHPCDir opts) of - Nothing -> [] - Just hpcdir -> ["-fhpc", "-hpcdir", hpcdir] - - , if parmakeSupported comp - then case ghcOptNumJobs opts of - NoFlag -> [] - Flag n -> ["-j" ++ maybe "" show n] - else [] - - -------------------- - -- Creating libraries - - , [ "-staticlib" | flagBool ghcOptStaticLib ] - , [ "-shared" | flagBool ghcOptShared ] - , case flagToMaybe (ghcOptDynLinkMode opts) of - Nothing -> [] - Just GhcStaticOnly -> ["-static"] - Just GhcDynamicOnly -> ["-dynamic"] - Just GhcStaticAndDynamic -> ["-static", "-dynamic-too"] - , [ "-fPIC" | flagBool ghcOptFPic ] - - , concat [ ["-dylib-install-name", libname] | libname <- flag ghcOptDylibName ] - - ------------------------ - -- Redirecting outputs - - , concat [ ["-osuf", suf] | suf <- flag ghcOptObjSuffix ] - , concat [ ["-hisuf", suf] | suf <- flag ghcOptHiSuffix ] - , concat [ ["-dynosuf", suf] | suf <- flag ghcOptDynObjSuffix ] - , concat [ ["-dynhisuf",suf] | suf <- flag ghcOptDynHiSuffix ] - , concat [ ["-outputdir", dir] | dir <- flag ghcOptOutputDir ] - , concat [ ["-odir", dir] | dir <- flag ghcOptObjDir ] - , concat [ ["-hidir", dir] | dir <- flag ghcOptHiDir ] - , concat [ ["-stubdir", dir] | dir <- flag ghcOptStubDir ] - - ----------------------- - -- Source search path - - , [ "-i" | flagBool ghcOptSourcePathClear ] - , [ "-i" ++ dir | dir <- flags ghcOptSourcePath ] - - -------------------- - - -------------------- - -- CPP, C, and C++ stuff - - , [ "-I" ++ dir | dir <- flags ghcOptCppIncludePath ] - , [ "-optP" ++ opt | opt <- ghcOptCppOptions opts] - , concat [ [ "-optP-include", "-optP" ++ inc] - | inc <- flags ghcOptCppIncludes ] - , [ "-optc" ++ opt | opt <- ghcOptCcOptions opts] - , [ "-optc" ++ opt | opt <- ghcOptCxxOptions opts] - - ----------------- - -- Linker stuff - - , [ "-optl" ++ opt | opt <- ghcOptLinkOptions opts] - , ["-l" ++ lib | lib <- ghcOptLinkLibs opts] - , ["-L" ++ dir | dir <- flags ghcOptLinkLibPath ] - , if isOSX - then concat [ ["-framework", fmwk] - | fmwk <- flags ghcOptLinkFrameworks ] - else [] - , if isOSX - then concat [ ["-framework-path", path] - | path <- flags ghcOptLinkFrameworkDirs ] - else [] - , [ "-no-hs-main" | flagBool ghcOptLinkNoHsMain ] - , [ "-dynload deploy" | not (null (flags ghcOptRPaths)) ] - , concat [ [ "-optl-Wl,-rpath," ++ dir] - | dir <- flags ghcOptRPaths ] - , [ modDefFile | modDefFile <- flags ghcOptLinkModDefFiles ] - - ------------- - -- Packages - - , concat [ [ case () of - _ | unitIdSupported comp -> "-this-unit-id" - | packageKeySupported comp -> "-this-package-key" - | otherwise -> "-package-name" - , this_arg ] - | this_arg <- flag ghcOptThisUnitId ] - - , concat [ ["-this-component-id", display this_cid ] - | this_cid <- flag ghcOptThisComponentId ] - - , if null (ghcOptInstantiatedWith opts) - then [] - else "-instantiated-with" - : intercalate "," (map (\(n,m) -> display n ++ "=" - ++ display m) - (ghcOptInstantiatedWith opts)) - : [] - - , concat [ ["-fno-code", "-fwrite-interface"] | flagBool ghcOptNoCode ] - - , [ "-hide-all-packages" | flagBool ghcOptHideAllPackages ] - , [ "-Wmissing-home-modules" | flagBool ghcOptWarnMissingHomeModules ] - , [ "-no-auto-link-packages" | flagBool ghcOptNoAutoLinkPackages ] - - , packageDbArgs implInfo (ghcOptPackageDBs opts) - - , concat $ let space "" = "" - space xs = ' ' : xs - in [ ["-package-id", display ipkgid ++ space (display rns)] - | (ipkgid,rns) <- flags ghcOptPackages ] - - ---------------------------- - -- Language and extensions - - , if supportsHaskell2010 implInfo - then [ "-X" ++ display lang | lang <- flag ghcOptLanguage ] - else [] - - , [ ext' - | ext <- flags ghcOptExtensions - , ext' <- case Map.lookup ext (ghcOptExtensionMap opts) of - Just (Just arg) -> [arg] - Just Nothing -> [] - Nothing -> - error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " - ++ display ext ++ " not present in ghcOptExtensionMap." - ] - - ---------------- - -- GHCi - - , concat [ [ "-ghci-script", script ] | script <- ghcOptGHCiScripts opts - , flagGhciScript implInfo ] - - --------------- - -- Inputs - - , [ display modu | modu <- flags ghcOptInputModules ] - , flags ghcOptInputFiles - - , concat [ [ "-o", out] | out <- flag ghcOptOutputFile ] - , concat [ [ "-dyno", out] | out <- flag ghcOptOutputDynFile ] - - --------------- - -- Extra - - , ghcOptExtra opts - - ] - - - where - implInfo = getImplInfo comp - isOSX = os == OSX - flag flg = flagToList (flg opts) - flags flg = fromNubListR . flg $ opts - flagBool flg = fromFlagOrDefault False (flg opts) - -verbosityOpts :: Verbosity -> [String] -verbosityOpts verbosity - | verbosity >= deafening = ["-v"] - | verbosity >= normal = [] - | otherwise = ["-w", "-v0"] - - --- | GHC <7.6 uses '-package-conf' instead of '-package-db'. -packageDbArgsConf :: PackageDBStack -> [String] -packageDbArgsConf dbstack = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs - (GlobalPackageDB:dbs) -> ("-no-user-package-conf") - : concatMap specific dbs - _ -> ierror - where - specific (SpecificPackageDB db) = [ "-package-conf", db ] - specific _ = ierror - ierror = error $ "internal error: unexpected package db stack: " - ++ show dbstack - --- | GHC >= 7.6 uses the '-package-db' flag. See --- https://ghc.haskell.org/trac/ghc/ticket/5977. -packageDbArgsDb :: PackageDBStack -> [String] --- special cases to make arguments prettier in common scenarios -packageDbArgsDb dbstack = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) - | all isSpecific dbs -> concatMap single dbs - (GlobalPackageDB:dbs) - | all isSpecific dbs -> "-no-user-package-db" - : concatMap single dbs - dbs -> "-clear-package-db" - : concatMap single dbs - where - single (SpecificPackageDB db) = [ "-package-db", db ] - single GlobalPackageDB = [ "-global-package-db" ] - single UserPackageDB = [ "-user-package-db" ] - isSpecific (SpecificPackageDB _) = True - isSpecific _ = False - -packageDbArgs :: GhcImplInfo -> PackageDBStack -> [String] -packageDbArgs implInfo - | flagPackageConf implInfo = packageDbArgsConf - | otherwise = packageDbArgsDb - --- ----------------------------------------------------------------------------- --- Boilerplate Monoid instance for GhcOptions - -instance Monoid GhcOptions where - mempty = gmempty - mappend = (<>) - -instance Semigroup GhcOptions where - (<>) = gmappend diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/HcPkg.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/HcPkg.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/HcPkg.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/HcPkg.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,495 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.HcPkg --- Copyright : Duncan Coutts 2009, 2013 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @hc-pkg@ program. --- Currently only GHC and GHCJS have hc-pkg programs. - -module Distribution.Simple.Program.HcPkg ( - -- * Types - HcPkgInfo(..), - RegisterOptions(..), - defaultRegisterOptions, - - -- * Actions - init, - invoke, - register, - unregister, - recache, - expose, - hide, - dump, - describe, - list, - - -- * Program invocations - initInvocation, - registerInvocation, - unregisterInvocation, - recacheInvocation, - exposeInvocation, - hideInvocation, - dumpInvocation, - describeInvocation, - listInvocation, - ) where - -import Prelude () -import Distribution.Compat.Prelude hiding (init) - -import Distribution.InstalledPackageInfo -import Distribution.Simple.Compiler -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Run -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.Types.ComponentId -import Distribution.Types.PackageId -import Distribution.Types.UnitId -import Distribution.Verbosity -import Distribution.Compat.Exception - -import Data.List - ( stripPrefix ) -import System.FilePath as FilePath - ( (), (<.>) - , splitPath, splitDirectories, joinPath, isPathSeparator ) -import qualified System.FilePath.Posix as FilePath.Posix - --- | Information about the features and capabilities of an @hc-pkg@ --- program. --- -data HcPkgInfo = HcPkgInfo - { hcPkgProgram :: ConfiguredProgram - , noPkgDbStack :: Bool -- ^ no package DB stack supported - , noVerboseFlag :: Bool -- ^ hc-pkg does not support verbosity flags - , flagPackageConf :: Bool -- ^ use package-conf option instead of package-db - , supportsDirDbs :: Bool -- ^ supports directory style package databases - , requiresDirDbs :: Bool -- ^ requires directory style package databases - , nativeMultiInstance :: Bool -- ^ supports --enable-multi-instance flag - , recacheMultiInstance :: Bool -- ^ supports multi-instance via recache - , suppressFilesCheck :: Bool -- ^ supports --force-files or equivalent - } - - --- | Call @hc-pkg@ to initialise a package database at the location {path}. --- --- > hc-pkg init {path} --- -init :: HcPkgInfo -> Verbosity -> Bool -> FilePath -> IO () -init hpi verbosity preferCompat path - | not (supportsDirDbs hpi) - || (not (requiresDirDbs hpi) && preferCompat) - = writeFile path "[]" - - | otherwise - = runProgramInvocation verbosity (initInvocation hpi verbosity path) - --- | Run @hc-pkg@ using a given package DB stack, directly forwarding the --- provided command-line arguments to it. -invoke :: HcPkgInfo -> Verbosity -> PackageDBStack -> [String] -> IO () -invoke hpi verbosity dbStack extraArgs = - runProgramInvocation verbosity invocation - where - args = packageDbStackOpts hpi dbStack ++ extraArgs - invocation = programInvocation (hcPkgProgram hpi) args - --- | Additional variations in the behaviour for 'register'. -data RegisterOptions = RegisterOptions { - -- | Allows re-registering \/ overwriting an existing package - registerAllowOverwrite :: Bool, - - -- | Insist on the ability to register multiple instances of a - -- single version of a single package. This will fail if the @hc-pkg@ - -- does not support it, see 'nativeMultiInstance' and - -- 'recacheMultiInstance'. - registerMultiInstance :: Bool, - - -- | Require that no checks are performed on the existence of package - -- files mentioned in the registration info. This must be used if - -- registering prior to putting the files in their final place. This will - -- fail if the @hc-pkg@ does not support it, see 'suppressFilesCheck'. - registerSuppressFilesCheck :: Bool - } - --- | Defaults are @True@, @False@ and @False@ -defaultRegisterOptions :: RegisterOptions -defaultRegisterOptions = RegisterOptions { - registerAllowOverwrite = True, - registerMultiInstance = False, - registerSuppressFilesCheck = False - } - --- | Call @hc-pkg@ to register a package. --- --- > hc-pkg register {filename | -} [--user | --global | --package-db] --- -register :: HcPkgInfo -> Verbosity -> PackageDBStack - -> InstalledPackageInfo - -> RegisterOptions - -> IO () -register hpi verbosity packagedbs pkgInfo registerOptions - | registerMultiInstance registerOptions - , not (nativeMultiInstance hpi || recacheMultiInstance hpi) - = die' verbosity $ "HcPkg.register: the compiler does not support " - ++ "registering multiple instances of packages." - - | registerSuppressFilesCheck registerOptions - , not (suppressFilesCheck hpi) - = die' verbosity $ "HcPkg.register: the compiler does not support " - ++ "suppressing checks on files." - - -- This is a trick. Older versions of GHC do not support the - -- --enable-multi-instance flag for ghc-pkg register but it turns out that - -- the same ability is available by using ghc-pkg recache. The recache - -- command is there to support distro package managers that like to work - -- by just installing files and running update commands, rather than - -- special add/remove commands. So the way to register by this method is - -- to write the package registration file directly into the package db and - -- then call hc-pkg recache. - -- - | registerMultiInstance registerOptions - , recacheMultiInstance hpi - = do let pkgdb = last packagedbs - writeRegistrationFileDirectly verbosity hpi pkgdb pkgInfo - recache hpi verbosity pkgdb - - | otherwise - = runProgramInvocation verbosity - (registerInvocation hpi verbosity packagedbs pkgInfo registerOptions) - -writeRegistrationFileDirectly :: Verbosity - -> HcPkgInfo - -> PackageDB - -> InstalledPackageInfo - -> IO () -writeRegistrationFileDirectly verbosity hpi (SpecificPackageDB dir) pkgInfo - | supportsDirDbs hpi - = do let pkgfile = dir display (installedUnitId pkgInfo) <.> "conf" - writeUTF8File pkgfile (showInstalledPackageInfo pkgInfo) - - | otherwise - = die' verbosity $ "HcPkg.writeRegistrationFileDirectly: compiler does not support dir style package dbs" - -writeRegistrationFileDirectly verbosity _ _ _ = - -- We don't know here what the dir for the global or user dbs are, - -- if that's needed it'll require a bit more plumbing to support. - die' verbosity $ "HcPkg.writeRegistrationFileDirectly: only supports SpecificPackageDB for now" - - --- | Call @hc-pkg@ to unregister a package --- --- > hc-pkg unregister [pkgid] [--user | --global | --package-db] --- -unregister :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () -unregister hpi verbosity packagedb pkgid = - runProgramInvocation verbosity - (unregisterInvocation hpi verbosity packagedb pkgid) - - --- | Call @hc-pkg@ to recache the registered packages. --- --- > hc-pkg recache [--user | --global | --package-db] --- -recache :: HcPkgInfo -> Verbosity -> PackageDB -> IO () -recache hpi verbosity packagedb = - runProgramInvocation verbosity - (recacheInvocation hpi verbosity packagedb) - - --- | Call @hc-pkg@ to expose a package. --- --- > hc-pkg expose [pkgid] [--user | --global | --package-db] --- -expose :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () -expose hpi verbosity packagedb pkgid = - runProgramInvocation verbosity - (exposeInvocation hpi verbosity packagedb pkgid) - --- | Call @hc-pkg@ to retrieve a specific package --- --- > hc-pkg describe [pkgid] [--user | --global | --package-db] --- -describe :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId -> IO [InstalledPackageInfo] -describe hpi verbosity packagedb pid = do - - output <- getProgramInvocationOutput verbosity - (describeInvocation hpi verbosity packagedb pid) - `catchIO` \_ -> return "" - - case parsePackages output of - Left ok -> return ok - _ -> die' verbosity $ "failed to parse output of '" - ++ programId (hcPkgProgram hpi) ++ " describe " ++ display pid ++ "'" - --- | Call @hc-pkg@ to hide a package. --- --- > hc-pkg hide [pkgid] [--user | --global | --package-db] --- -hide :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () -hide hpi verbosity packagedb pkgid = - runProgramInvocation verbosity - (hideInvocation hpi verbosity packagedb pkgid) - - --- | Call @hc-pkg@ to get all the details of all the packages in the given --- package database. --- -dump :: HcPkgInfo -> Verbosity -> PackageDB -> IO [InstalledPackageInfo] -dump hpi verbosity packagedb = do - - output <- getProgramInvocationOutput verbosity - (dumpInvocation hpi verbosity packagedb) - `catchIO` \e -> die' verbosity $ programId (hcPkgProgram hpi) ++ " dump failed: " - ++ displayException e - - case parsePackages output of - Left ok -> return ok - _ -> die' verbosity $ "failed to parse output of '" - ++ programId (hcPkgProgram hpi) ++ " dump'" - -parsePackages :: String -> Either [InstalledPackageInfo] [PError] -parsePackages str = - let parsed = map parseInstalledPackageInfo (splitPkgs str) - in case [ msg | ParseFailed msg <- parsed ] of - [] -> Left [ setUnitId - . maybe id mungePackagePaths (pkgRoot pkg) - $ pkg - | ParseOk _ pkg <- parsed ] - msgs -> Right msgs - ---TODO: this could be a lot faster. We're doing normaliseLineEndings twice --- and converting back and forth with lines/unlines. -splitPkgs :: String -> [String] -splitPkgs = checkEmpty . map unlines . splitWith ("---" ==) . lines - where - -- Handle the case of there being no packages at all. - checkEmpty [s] | all isSpace s = [] - checkEmpty ss = ss - - splitWith :: (a -> Bool) -> [a] -> [[a]] - splitWith p xs = ys : case zs of - [] -> [] - _:ws -> splitWith p ws - where (ys,zs) = break p xs - -mungePackagePaths :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo --- Perform path/URL variable substitution as per the Cabal ${pkgroot} spec --- (http://www.haskell.org/pipermail/libraries/2009-May/011772.html) --- Paths/URLs can be relative to ${pkgroot} or ${pkgrooturl}. --- The "pkgroot" is the directory containing the package database. -mungePackagePaths pkgroot pkginfo = - pkginfo { - importDirs = mungePaths (importDirs pkginfo), - includeDirs = mungePaths (includeDirs pkginfo), - libraryDirs = mungePaths (libraryDirs pkginfo), - libraryDynDirs = mungePaths (libraryDynDirs pkginfo), - frameworkDirs = mungePaths (frameworkDirs pkginfo), - haddockInterfaces = mungePaths (haddockInterfaces pkginfo), - haddockHTMLs = mungeUrls (haddockHTMLs pkginfo) - } - where - mungePaths = map mungePath - mungeUrls = map mungeUrl - - mungePath p = case stripVarPrefix "${pkgroot}" p of - Just p' -> pkgroot p' - Nothing -> p - - mungeUrl p = case stripVarPrefix "${pkgrooturl}" p of - Just p' -> toUrlPath pkgroot p' - Nothing -> p - - toUrlPath r p = "file:///" - -- URLs always use posix style '/' separators: - ++ FilePath.Posix.joinPath (r : FilePath.splitDirectories p) - - stripVarPrefix var p = - case splitPath p of - (root:path') -> case stripPrefix var root of - Just [sep] | isPathSeparator sep -> Just (joinPath path') - _ -> Nothing - _ -> Nothing - - --- Older installed package info files did not have the installedUnitId --- field, so if it is missing then we fill it as the source package ID. --- NB: Internal libraries not supported. -setUnitId :: InstalledPackageInfo -> InstalledPackageInfo -setUnitId pkginfo@InstalledPackageInfo { - installedUnitId = uid, - sourcePackageId = pid - } | unUnitId uid == "" - = pkginfo { - installedUnitId = mkLegacyUnitId pid, - installedComponentId_ = mkComponentId (display pid) - } -setUnitId pkginfo = pkginfo - - --- | Call @hc-pkg@ to get the source package Id of all the packages in the --- given package database. --- --- This is much less information than with 'dump', but also rather quicker. --- Note in particular that it does not include the 'UnitId', just --- the source 'PackageId' which is not necessarily unique in any package db. --- -list :: HcPkgInfo -> Verbosity -> PackageDB - -> IO [PackageId] -list hpi verbosity packagedb = do - - output <- getProgramInvocationOutput verbosity - (listInvocation hpi verbosity packagedb) - `catchIO` \_ -> die' verbosity $ programId (hcPkgProgram hpi) ++ " list failed" - - case parsePackageIds output of - Just ok -> return ok - _ -> die' verbosity $ "failed to parse output of '" - ++ programId (hcPkgProgram hpi) ++ " list'" - - where - parsePackageIds = traverse simpleParse . words - --------------------------- --- The program invocations --- - -initInvocation :: HcPkgInfo -> Verbosity -> FilePath -> ProgramInvocation -initInvocation hpi verbosity path = - programInvocation (hcPkgProgram hpi) args - where - args = ["init", path] - ++ verbosityOpts hpi verbosity - -registerInvocation - :: HcPkgInfo -> Verbosity -> PackageDBStack - -> InstalledPackageInfo - -> RegisterOptions - -> ProgramInvocation -registerInvocation hpi verbosity packagedbs pkgInfo registerOptions = - (programInvocation (hcPkgProgram hpi) (args "-")) { - progInvokeInput = Just (showInstalledPackageInfo pkgInfo), - progInvokeInputEncoding = IOEncodingUTF8 - } - where - cmdname - | registerAllowOverwrite registerOptions = "update" - | registerMultiInstance registerOptions = "update" - | otherwise = "register" - - args file = [cmdname, file] - ++ (if noPkgDbStack hpi - then [packageDbOpts hpi (last packagedbs)] - else packageDbStackOpts hpi packagedbs) - ++ [ "--enable-multi-instance" - | registerMultiInstance registerOptions ] - ++ [ "--force-files" - | registerSuppressFilesCheck registerOptions ] - ++ verbosityOpts hpi verbosity - -unregisterInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId - -> ProgramInvocation -unregisterInvocation hpi verbosity packagedb pkgid = - programInvocation (hcPkgProgram hpi) $ - ["unregister", packageDbOpts hpi packagedb, display pkgid] - ++ verbosityOpts hpi verbosity - - -recacheInvocation :: HcPkgInfo -> Verbosity -> PackageDB - -> ProgramInvocation -recacheInvocation hpi verbosity packagedb = - programInvocation (hcPkgProgram hpi) $ - ["recache", packageDbOpts hpi packagedb] - ++ verbosityOpts hpi verbosity - - -exposeInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId - -> ProgramInvocation -exposeInvocation hpi verbosity packagedb pkgid = - programInvocation (hcPkgProgram hpi) $ - ["expose", packageDbOpts hpi packagedb, display pkgid] - ++ verbosityOpts hpi verbosity - -describeInvocation :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId - -> ProgramInvocation -describeInvocation hpi verbosity packagedbs pkgid = - programInvocation (hcPkgProgram hpi) $ - ["describe", display pkgid] - ++ (if noPkgDbStack hpi - then [packageDbOpts hpi (last packagedbs)] - else packageDbStackOpts hpi packagedbs) - ++ verbosityOpts hpi verbosity - -hideInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId - -> ProgramInvocation -hideInvocation hpi verbosity packagedb pkgid = - programInvocation (hcPkgProgram hpi) $ - ["hide", packageDbOpts hpi packagedb, display pkgid] - ++ verbosityOpts hpi verbosity - - -dumpInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation -dumpInvocation hpi _verbosity packagedb = - (programInvocation (hcPkgProgram hpi) args) { - progInvokeOutputEncoding = IOEncodingUTF8 - } - where - args = ["dump", packageDbOpts hpi packagedb] - ++ verbosityOpts hpi silent - -- We use verbosity level 'silent' because it is important that we - -- do not contaminate the output with info/debug messages. - -listInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation -listInvocation hpi _verbosity packagedb = - (programInvocation (hcPkgProgram hpi) args) { - progInvokeOutputEncoding = IOEncodingUTF8 - } - where - args = ["list", "--simple-output", packageDbOpts hpi packagedb] - ++ verbosityOpts hpi silent - -- We use verbosity level 'silent' because it is important that we - -- do not contaminate the output with info/debug messages. - - -packageDbStackOpts :: HcPkgInfo -> PackageDBStack -> [String] -packageDbStackOpts hpi dbstack = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) -> "--global" - : "--user" - : map specific dbs - (GlobalPackageDB:dbs) -> "--global" - : ("--no-user-" ++ packageDbFlag hpi) - : map specific dbs - _ -> ierror - where - specific (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db - specific _ = ierror - ierror :: a - ierror = error ("internal error: unexpected package db stack: " ++ show dbstack) - -packageDbFlag :: HcPkgInfo -> String -packageDbFlag hpi - | flagPackageConf hpi - = "package-conf" - | otherwise - = "package-db" - -packageDbOpts :: HcPkgInfo -> PackageDB -> String -packageDbOpts _ GlobalPackageDB = "--global" -packageDbOpts _ UserPackageDB = "--user" -packageDbOpts hpi (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db - -verbosityOpts :: HcPkgInfo -> Verbosity -> [String] -verbosityOpts hpi v - | noVerboseFlag hpi - = [] - | v >= deafening = ["-v2"] - | v == silent = ["-v0"] - | otherwise = [] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Hpc.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Hpc.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Hpc.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Hpc.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,111 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Hpc --- Copyright : Thomas Tuegel 2011 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @hpc@ program. - -module Distribution.Simple.Program.Hpc - ( markup - , union - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Control.Monad (mapM) -import System.Directory (makeRelativeToCurrentDirectory) - -import Distribution.ModuleName -import Distribution.Simple.Program.Run -import Distribution.Simple.Program.Types -import Distribution.Text -import Distribution.Simple.Utils -import Distribution.Verbosity -import Distribution.Version - --- | Invoke hpc with the given parameters. --- --- Prior to HPC version 0.7 (packaged with GHC 7.8), hpc did not handle --- multiple .mix paths correctly, so we print a warning, and only pass it the --- first path in the list. This means that e.g. test suites that import their --- library as a dependency can still work, but those that include the library --- modules directly (in other-modules) don't. -markup :: ConfiguredProgram - -> Version - -> Verbosity - -> FilePath -- ^ Path to .tix file - -> [FilePath] -- ^ Paths to .mix file directories - -> FilePath -- ^ Path where html output should be located - -> [ModuleName] -- ^ List of modules to exclude from report - -> IO () -markup hpc hpcVer verbosity tixFile hpcDirs destDir excluded = do - hpcDirs' <- if withinRange hpcVer (orLaterVersion version07) - then return hpcDirs - else do - warn verbosity $ "Your version of HPC (" ++ display hpcVer - ++ ") does not properly handle multiple search paths. " - ++ "Coverage report generation may fail unexpectedly. These " - ++ "issues are addressed in version 0.7 or later (GHC 7.8 or " - ++ "later)." - ++ if null droppedDirs - then "" - else " The following search paths have been abandoned: " - ++ show droppedDirs - return passedDirs - - -- Prior to GHC 8.0, hpc assumes all .mix paths are relative. - hpcDirs'' <- mapM makeRelativeToCurrentDirectory hpcDirs' - - runProgramInvocation verbosity - (markupInvocation hpc tixFile hpcDirs'' destDir excluded) - where - version07 = mkVersion [0, 7] - (passedDirs, droppedDirs) = splitAt 1 hpcDirs - -markupInvocation :: ConfiguredProgram - -> FilePath -- ^ Path to .tix file - -> [FilePath] -- ^ Paths to .mix file directories - -> FilePath -- ^ Path where html output should be - -- located - -> [ModuleName] -- ^ List of modules to exclude from - -- report - -> ProgramInvocation -markupInvocation hpc tixFile hpcDirs destDir excluded = - let args = [ "markup", tixFile - , "--destdir=" ++ destDir - ] - ++ map ("--hpcdir=" ++) hpcDirs - ++ ["--exclude=" ++ display moduleName - | moduleName <- excluded ] - in programInvocation hpc args - -union :: ConfiguredProgram - -> Verbosity - -> [FilePath] -- ^ Paths to .tix files - -> FilePath -- ^ Path to resultant .tix file - -> [ModuleName] -- ^ List of modules to exclude from union - -> IO () -union hpc verbosity tixFiles outFile excluded = - runProgramInvocation verbosity - (unionInvocation hpc tixFiles outFile excluded) - -unionInvocation :: ConfiguredProgram - -> [FilePath] -- ^ Paths to .tix files - -> FilePath -- ^ Path to resultant .tix file - -> [ModuleName] -- ^ List of modules to exclude from union - -> ProgramInvocation -unionInvocation hpc tixFiles outFile excluded = - programInvocation hpc $ concat - [ ["sum", "--union"] - , tixFiles - , ["--output=" ++ outFile] - , ["--exclude=" ++ display moduleName - | moduleName <- excluded ] - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Internal.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Internal.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Internal --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Internal utilities used by Distribution.Simple.Program.*. - -module Distribution.Simple.Program.Internal ( - stripExtractVersion, - ) where - -import Prelude () -import Distribution.Compat.Prelude - --- | Extract the version number from the output of 'strip --version'. --- --- Invoking "strip --version" gives very inconsistent results. We ignore --- everything in parentheses (see #2497), look for the first word that starts --- with a number, and try parsing out the first two components of it. Non-GNU --- 'strip' doesn't appear to have a version flag. -stripExtractVersion :: String -> String -stripExtractVersion str = - let numeric "" = False - numeric (x:_) = isDigit x - - -- Filter out everything in parentheses. - filterPar' :: Int -> [String] -> [String] - filterPar' _ [] = [] - filterPar' n (x:xs) - | n >= 0 && "(" `isPrefixOf` x = filterPar' (n+1) ((tail x):xs) - | n > 0 && ")" `isSuffixOf` x = filterPar' (n-1) xs - | n > 0 = filterPar' n xs - | otherwise = x:filterPar' n xs - - filterPar = filterPar' 0 - - in case dropWhile (not . numeric) (filterPar . words $ str) of - (ver:_) -> - -- take the first two version components - let isDot = (== '.') - (major, rest) = break isDot ver - minor = takeWhile isDigit (dropWhile isDot rest) - in major ++ "." ++ minor - _ -> "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Ld.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Ld.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Ld.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Ld.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Ld --- Copyright : Duncan Coutts 2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @ld@ linker program. - -module Distribution.Simple.Program.Ld ( - combineObjectFiles, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Compiler (arResponseFilesSupported) -import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) -import Distribution.Simple.Program.ResponseFile - ( withResponseFile ) -import Distribution.Simple.Program.Run - ( ProgramInvocation, programInvocation, multiStageProgramInvocation - , runProgramInvocation ) -import Distribution.Simple.Program.Types - ( ConfiguredProgram(..) ) -import Distribution.Simple.Setup - ( fromFlagOrDefault, configUseResponseFiles ) -import Distribution.Simple.Utils - ( defaultTempFileOptions ) -import Distribution.Verbosity - ( Verbosity ) - -import System.Directory - ( renameFile ) -import System.FilePath - ( (<.>), takeDirectory ) - --- | Call @ld -r@ to link a bunch of object files together. --- -combineObjectFiles :: Verbosity -> LocalBuildInfo -> ConfiguredProgram - -> FilePath -> [FilePath] -> IO () -combineObjectFiles verbosity lbi ld target files = do - - -- Unlike "ar", the "ld" tool is not designed to be used with xargs. That is, - -- if we have more object files than fit on a single command line then we - -- have a slight problem. What we have to do is link files in batches into - -- a temp object file and then include that one in the next batch. - - let simpleArgs = ["-r", "-o", target] - - initialArgs = ["-r", "-o", target] - middleArgs = ["-r", "-o", target, tmpfile] - finalArgs = middleArgs - - simple = programInvocation ld simpleArgs - initial = programInvocation ld initialArgs - middle = programInvocation ld middleArgs - final = programInvocation ld finalArgs - - targetDir = takeDirectory target - - invokeWithResponesFile :: FilePath -> ProgramInvocation - invokeWithResponesFile atFile = - programInvocation ld $ simpleArgs ++ ['@' : atFile] - - oldVersionManualOverride = - fromFlagOrDefault False $ configUseResponseFiles $ configFlags lbi - -- Whether ghc's ar supports response files is a good proxy for - -- whether ghc's ld supports them as well. - responseArgumentsNotSupported = - not (arResponseFilesSupported (compiler lbi)) - - if oldVersionManualOverride || responseArgumentsNotSupported - then - run $ multiStageProgramInvocation simple (initial, middle, final) files - else - withResponseFile verbosity defaultTempFileOptions targetDir "ld.rsp" Nothing files $ - \path -> runProgramInvocation verbosity $ invokeWithResponesFile path - - where - tmpfile = target <.> "tmp" -- perhaps should use a proper temp file - - run :: [ProgramInvocation] -> IO () - run [] = return () - run [inv] = runProgramInvocation verbosity inv - run (inv:invs) = do runProgramInvocation verbosity inv - renameFile target tmpfile - run invs - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/ResponseFile.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/ResponseFile.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/ResponseFile.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/ResponseFile.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,60 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} ----------------------------------------------------------------------------- --- | --- Module : Distribution.Simple.Program.ResponseFile --- Copyright : (c) Sergey Vinokurov 2017 --- License : BSD3-style --- --- Maintainer : cabal-devel@haskell.org --- Created : 23 July 2017 ----------------------------------------------------------------------------- - -module Distribution.Simple.Program.ResponseFile (withResponseFile) where - -import Prelude () -import System.IO (TextEncoding, hSetEncoding, hPutStr, hClose) - -import Distribution.Compat.Prelude -import Distribution.Simple.Utils (TempFileOptions, withTempFileEx, debug) -import Distribution.Verbosity - -withResponseFile - :: Verbosity - -> TempFileOptions - -> FilePath -- ^ Working directory to create response file in. - -> FilePath -- ^ Template for response file name. - -> Maybe TextEncoding -- ^ Encoding to use for response file contents. - -> [String] -- ^ Arguments to put into response file. - -> (FilePath -> IO a) - -> IO a -withResponseFile verbosity tmpFileOpts workDir fileNameTemplate encoding arguments f = - withTempFileEx tmpFileOpts workDir fileNameTemplate $ \responseFileName hf -> do - traverse_ (hSetEncoding hf) encoding - let responseContents = unlines $ map escapeResponseFileArg arguments - hPutStr hf responseContents - hClose hf - debug verbosity $ responseFileName ++ " contents: <<<" - debug verbosity responseContents - debug verbosity $ ">>> " ++ responseFileName - f responseFileName - --- Support a gcc-like response file syntax. Each separate --- argument and its possible parameter(s), will be separated in the --- response file by an actual newline; all other whitespace, --- single quotes, double quotes, and the character used for escaping --- (backslash) are escaped. The called program will need to do a similar --- inverse operation to de-escape and re-constitute the argument list. -escapeResponseFileArg :: String -> String -escapeResponseFileArg = reverse . foldl' escape [] - where - escape :: String -> Char -> String - escape cs c = - case c of - '\\' -> c:'\\':cs - '\'' -> c:'\\':cs - '"' -> c:'\\':cs - _ | isSpace c -> c:'\\':cs - | otherwise -> c:cs - - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Run.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Run.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Run.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Run.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,283 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Run --- Copyright : Duncan Coutts 2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides a data type for program invocations and functions to --- run them. - -module Distribution.Simple.Program.Run ( - ProgramInvocation(..), - IOEncoding(..), - emptyProgramInvocation, - simpleProgramInvocation, - programInvocation, - multiStageProgramInvocation, - - runProgramInvocation, - getProgramInvocationOutput, - getProgramInvocationOutputAndErrors, - - getEffectiveEnvironment, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Program.Types -import Distribution.Simple.Utils -import Distribution.Verbosity -import Distribution.Compat.Environment - -import qualified Data.Map as Map -import System.FilePath -import System.Exit - ( ExitCode(..), exitWith ) - --- | Represents a specific invocation of a specific program. --- --- This is used as an intermediate type between deciding how to call a program --- and actually doing it. This provides the opportunity to the caller to --- adjust how the program will be called. These invocations can either be run --- directly or turned into shell or batch scripts. --- -data ProgramInvocation = ProgramInvocation { - progInvokePath :: FilePath, - progInvokeArgs :: [String], - progInvokeEnv :: [(String, Maybe String)], - -- Extra paths to add to PATH - progInvokePathEnv :: [FilePath], - progInvokeCwd :: Maybe FilePath, - progInvokeInput :: Maybe String, - progInvokeInputEncoding :: IOEncoding, - progInvokeOutputEncoding :: IOEncoding - } - -data IOEncoding = IOEncodingText -- locale mode text - | IOEncodingUTF8 -- always utf8 - -encodeToIOData :: IOEncoding -> String -> IOData -encodeToIOData IOEncodingText = IODataText -encodeToIOData IOEncodingUTF8 = IODataBinary . toUTF8LBS - -emptyProgramInvocation :: ProgramInvocation -emptyProgramInvocation = - ProgramInvocation { - progInvokePath = "", - progInvokeArgs = [], - progInvokeEnv = [], - progInvokePathEnv = [], - progInvokeCwd = Nothing, - progInvokeInput = Nothing, - progInvokeInputEncoding = IOEncodingText, - progInvokeOutputEncoding = IOEncodingText - } - -simpleProgramInvocation :: FilePath -> [String] -> ProgramInvocation -simpleProgramInvocation path args = - emptyProgramInvocation { - progInvokePath = path, - progInvokeArgs = args - } - -programInvocation :: ConfiguredProgram -> [String] -> ProgramInvocation -programInvocation prog args = - emptyProgramInvocation { - progInvokePath = programPath prog, - progInvokeArgs = programDefaultArgs prog - ++ args - ++ programOverrideArgs prog, - progInvokeEnv = programOverrideEnv prog - } - - -runProgramInvocation :: Verbosity -> ProgramInvocation -> IO () -runProgramInvocation verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = [], - progInvokePathEnv = [], - progInvokeCwd = Nothing, - progInvokeInput = Nothing - } = - rawSystemExit verbosity path args - -runProgramInvocation verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envOverrides, - progInvokePathEnv = extraPath, - progInvokeCwd = mcwd, - progInvokeInput = Nothing - } = do - pathOverride <- getExtraPathEnv envOverrides extraPath - menv <- getEffectiveEnvironment (envOverrides ++ pathOverride) - exitCode <- rawSystemIOWithEnv verbosity - path args - mcwd menv - Nothing Nothing Nothing - when (exitCode /= ExitSuccess) $ - exitWith exitCode - -runProgramInvocation verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envOverrides, - progInvokePathEnv = extraPath, - progInvokeCwd = mcwd, - progInvokeInput = Just inputStr, - progInvokeInputEncoding = encoding - } = do - pathOverride <- getExtraPathEnv envOverrides extraPath - menv <- getEffectiveEnvironment (envOverrides ++ pathOverride) - (_, errors, exitCode) <- rawSystemStdInOut verbosity - path args - mcwd menv - (Just input) IODataModeBinary - when (exitCode /= ExitSuccess) $ - die' verbosity $ "'" ++ path ++ "' exited with an error:\n" ++ errors - where - input = encodeToIOData encoding inputStr - -getProgramInvocationOutput :: Verbosity -> ProgramInvocation -> IO String -getProgramInvocationOutput verbosity inv = do - (output, errors, exitCode) <- getProgramInvocationOutputAndErrors verbosity inv - when (exitCode /= ExitSuccess) $ - die' verbosity $ "'" ++ progInvokePath inv ++ "' exited with an error:\n" ++ errors - return output - - -getProgramInvocationOutputAndErrors :: Verbosity -> ProgramInvocation - -> IO (String, String, ExitCode) -getProgramInvocationOutputAndErrors verbosity - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envOverrides, - progInvokePathEnv = extraPath, - progInvokeCwd = mcwd, - progInvokeInput = minputStr, - progInvokeOutputEncoding = encoding - } = do - let mode = case encoding of IOEncodingUTF8 -> IODataModeBinary - IOEncodingText -> IODataModeText - - decode (IODataBinary b) = normaliseLineEndings (fromUTF8LBS b) - decode (IODataText s) = s - - pathOverride <- getExtraPathEnv envOverrides extraPath - menv <- getEffectiveEnvironment (envOverrides ++ pathOverride) - (output, errors, exitCode) <- rawSystemStdInOut verbosity - path args - mcwd menv - input mode - return (decode output, errors, exitCode) - where - input = encodeToIOData encoding <$> minputStr - -getExtraPathEnv :: [(String, Maybe String)] -> [FilePath] -> NoCallStackIO [(String, Maybe String)] -getExtraPathEnv _ [] = return [] -getExtraPathEnv env extras = do - mb_path <- case lookup "PATH" env of - Just x -> return x - Nothing -> lookupEnv "PATH" - let extra = intercalate [searchPathSeparator] extras - path' = case mb_path of - Nothing -> extra - Just path -> extra ++ searchPathSeparator : path - return [("PATH", Just path')] - --- | Return the current environment extended with the given overrides. --- If an entry is specified twice in @overrides@, the second entry takes --- precedence. --- -getEffectiveEnvironment :: [(String, Maybe String)] - -> NoCallStackIO (Maybe [(String, String)]) -getEffectiveEnvironment [] = return Nothing -getEffectiveEnvironment overrides = - fmap (Just . Map.toList . apply overrides . Map.fromList) getEnvironment - where - apply os env = foldl' (flip update) env os - update (var, Nothing) = Map.delete var - update (var, Just val) = Map.insert var val - --- | Like the unix xargs program. Useful for when we've got very long command --- lines that might overflow an OS limit on command line length and so you --- need to invoke a command multiple times to get all the args in. --- --- It takes four template invocations corresponding to the simple, initial, --- middle and last invocations. If the number of args given is small enough --- that we can get away with just a single invocation then the simple one is --- used: --- --- > $ simple args --- --- If the number of args given means that we need to use multiple invocations --- then the templates for the initial, middle and last invocations are used: --- --- > $ initial args_0 --- > $ middle args_1 --- > $ middle args_2 --- > ... --- > $ final args_n --- -multiStageProgramInvocation - :: ProgramInvocation - -> (ProgramInvocation, ProgramInvocation, ProgramInvocation) - -> [String] - -> [ProgramInvocation] -multiStageProgramInvocation simple (initial, middle, final) args = - - let argSize inv = length (progInvokePath inv) - + foldl' (\s a -> length a + 1 + s) 1 (progInvokeArgs inv) - fixedArgSize = maximum (map argSize [simple, initial, middle, final]) - chunkSize = maxCommandLineSize - fixedArgSize - - in case splitChunks chunkSize args of - [] -> [ simple ] - - [c] -> [ simple `appendArgs` c ] - - (c:cs) -> [ initial `appendArgs` c ] - ++ [ middle `appendArgs` c'| c' <- init cs ] - ++ [ final `appendArgs` c'| let c' = last cs ] - - where - appendArgs :: ProgramInvocation -> [String] -> ProgramInvocation - inv `appendArgs` as = inv { progInvokeArgs = progInvokeArgs inv ++ as } - - splitChunks :: Int -> [[a]] -> [[[a]]] - splitChunks len = unfoldr $ \s -> - if null s then Nothing - else Just (chunk len s) - - chunk :: Int -> [[a]] -> ([[a]], [[a]]) - chunk len (s:_) | length s >= len = error toolong - chunk len ss = chunk' [] len ss - - chunk' :: [[a]] -> Int -> [[a]] -> ([[a]], [[a]]) - chunk' acc len (s:ss) - | len' < len = chunk' (s:acc) (len-len'-1) ss - where len' = length s - chunk' acc _ ss = (reverse acc, ss) - - toolong = "multiStageProgramInvocation: a single program arg is larger " - ++ "than the maximum command line length!" - - ---FIXME: discover this at configure time or runtime on unix --- The value is 32k on Windows and posix specifies a minimum of 4k --- but all sensible unixes use more than 4k. --- we could use getSysVar ArgumentLimit but that's in the unix lib --- -maxCommandLineSize :: Int -maxCommandLineSize = 30 * 1024 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Script.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Script.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Script.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Script.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,108 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Script --- Copyright : Duncan Coutts 2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @hc-pkg@ program. --- Currently only GHC and LHC have hc-pkg programs. - -module Distribution.Simple.Program.Script ( - - invocationAsSystemScript, - invocationAsShellScript, - invocationAsBatchFile, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Program.Run -import Distribution.System - --- | Generate a system script, either POSIX shell script or Windows batch file --- as appropriate for the given system. --- -invocationAsSystemScript :: OS -> ProgramInvocation -> String -invocationAsSystemScript Windows = invocationAsBatchFile -invocationAsSystemScript _ = invocationAsShellScript - - --- | Generate a POSIX shell script that invokes a program. --- -invocationAsShellScript :: ProgramInvocation -> String -invocationAsShellScript - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envExtra, - progInvokeCwd = mcwd, - progInvokeInput = minput - } = unlines $ - [ "#!/bin/sh" ] - ++ concatMap setEnv envExtra - ++ [ "cd " ++ quote cwd | cwd <- maybeToList mcwd ] - ++ [ (case minput of - Nothing -> "" - Just input -> "echo " ++ quote input ++ " | ") - ++ unwords (map quote $ path : args) ++ " \"$@\""] - - where - setEnv (var, Nothing) = ["unset " ++ var, "export " ++ var] - setEnv (var, Just val) = ["export " ++ var ++ "=" ++ quote val] - - quote :: String -> String - quote s = "'" ++ escape s ++ "'" - - escape [] = [] - escape ('\'':cs) = "'\\''" ++ escape cs - escape (c :cs) = c : escape cs - - --- | Generate a Windows batch file that invokes a program. --- -invocationAsBatchFile :: ProgramInvocation -> String -invocationAsBatchFile - ProgramInvocation { - progInvokePath = path, - progInvokeArgs = args, - progInvokeEnv = envExtra, - progInvokeCwd = mcwd, - progInvokeInput = minput - } = unlines $ - [ "@echo off" ] - ++ map setEnv envExtra - ++ [ "cd \"" ++ cwd ++ "\"" | cwd <- maybeToList mcwd ] - ++ case minput of - Nothing -> - [ path ++ concatMap (' ':) args ] - - Just input -> - [ "(" ] - ++ [ "echo " ++ escape line | line <- lines input ] - ++ [ ") | " - ++ "\"" ++ path ++ "\"" - ++ concatMap (\arg -> ' ':quote arg) args ] - - where - setEnv (var, Nothing) = "set " ++ var ++ "=" - setEnv (var, Just val) = "set " ++ var ++ "=" ++ escape val - - quote :: String -> String - quote s = "\"" ++ escapeQ s ++ "\"" - - escapeQ [] = [] - escapeQ ('"':cs) = "\"\"\"" ++ escapeQ cs - escapeQ (c :cs) = c : escapeQ cs - - escape [] = [] - escape ('|':cs) = "^|" ++ escape cs - escape ('<':cs) = "^<" ++ escape cs - escape ('>':cs) = "^>" ++ escape cs - escape ('&':cs) = "^&" ++ escape cs - escape ('(':cs) = "^(" ++ escape cs - escape (')':cs) = "^)" ++ escape cs - escape ('^':cs) = "^^" ++ escape cs - escape (c :cs) = c : escape cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Strip.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Strip.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Strip.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Strip.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,75 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Strip --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module provides an library interface to the @strip@ program. - -module Distribution.Simple.Program.Strip (stripLib, stripExe) - where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Program -import Distribution.Simple.Utils -import Distribution.System -import Distribution.Verbosity -import Distribution.Version - -import System.FilePath (takeBaseName) - -runStrip :: Verbosity -> ProgramDb -> FilePath -> [String] -> IO () -runStrip verbosity progDb path args = - case lookupProgram stripProgram progDb of - Just strip -> runProgram verbosity strip (args ++ [path]) - Nothing -> unless (buildOS == Windows) $ - -- Don't bother warning on windows, we don't expect them to - -- have the strip program anyway. - warn verbosity $ "Unable to strip executable or library '" - ++ (takeBaseName path) - ++ "' (missing the 'strip' program)" - -stripExe :: Verbosity -> Platform -> ProgramDb -> FilePath -> IO () -stripExe verbosity (Platform _arch os) progdb path = - runStrip verbosity progdb path args - where - args = case os of - OSX -> ["-x"] -- By default, stripping the ghc binary on at least - -- some OS X installations causes: - -- HSbase-3.0.o: unknown symbol `_environ'" - -- The -x flag fixes that. - _ -> [] - -stripLib :: Verbosity -> Platform -> ProgramDb -> FilePath -> IO () -stripLib verbosity (Platform arch os) progdb path = do - case os of - OSX -> -- '--strip-unneeded' is not supported on OS X, iOS, AIX, or - -- Solaris. See #1630. - return () - IOS -> return () - AIX -> return () - Solaris -> return () - Windows -> -- Stripping triggers a bug in 'strip.exe' for - -- libraries with lots identically named modules. See - -- #1784. - return() - Linux | arch == I386 -> - -- Versions of 'strip' on 32-bit Linux older than 2.18 are - -- broken. See #2339. - let okVersion = orLaterVersion (mkVersion [2,18]) - in case programVersion =<< lookupProgram stripProgram progdb of - Just v | withinRange v okVersion -> - runStrip verbosity progdb path args - _ -> warn verbosity $ "Unable to strip library '" - ++ (takeBaseName path) - ++ "' (version of 'strip' too old; " - ++ "requires >= 2.18 on 32-bit Linux)" - _ -> runStrip verbosity progdb path args - where - args = ["--strip-unneeded"] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program/Types.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,187 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE DeriveDataTypeable #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program.Types --- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This provides an abstraction which deals with configuring and running --- programs. A 'Program' is a static notion of a known program. A --- 'ConfiguredProgram' is a 'Program' that has been found on the current --- machine and is ready to be run (possibly with some user-supplied default --- args). Configuring a program involves finding its location and if necessary --- finding its version. There's reasonable default behavior for trying to find --- \"foo\" in PATH, being able to override its location, etc. --- -module Distribution.Simple.Program.Types ( - -- * Program and functions for constructing them - Program(..), - ProgramSearchPath, - ProgramSearchPathEntry(..), - simpleProgram, - - -- * Configured program and related functions - ConfiguredProgram(..), - programPath, - suppressOverrideArgs, - ProgArg, - ProgramLocation(..), - simpleConfiguredProgram, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.PackageDescription -import Distribution.Simple.Program.Find -import Distribution.Version -import Distribution.Verbosity - -import qualified Data.Map as Map - --- | Represents a program which can be configured. --- --- Note: rather than constructing this directly, start with 'simpleProgram' and --- override any extra fields. --- -data Program = Program { - -- | The simple name of the program, eg. ghc - programName :: String, - - -- | A function to search for the program if its location was not - -- specified by the user. Usually this will just be a call to - -- 'findProgramOnSearchPath'. - -- - -- It is supplied with the prevailing search path which will typically - -- just be used as-is, but can be extended or ignored as needed. - -- - -- For the purpose of change monitoring, in addition to the location - -- where the program was found, it returns all the other places that - -- were tried. - -- - programFindLocation :: Verbosity -> ProgramSearchPath - -> IO (Maybe (FilePath, [FilePath])), - - -- | Try to find the version of the program. For many programs this is - -- not possible or is not necessary so it's OK to return Nothing. - programFindVersion :: Verbosity -> FilePath -> IO (Maybe Version), - - -- | A function to do any additional configuration after we have - -- located the program (and perhaps identified its version). For example - -- it could add args, or environment vars. - programPostConf :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram, - -- | A function that filters any arguments that don't impact the output - -- from a commandline. Used to limit the volatility of dependency hashes - -- when using new-build. - programNormaliseArgs :: Maybe Version -> PackageDescription -> [String] -> [String] - } -instance Show Program where - show (Program name _ _ _ _) = "Program: " ++ name - -type ProgArg = String - --- | Represents a program which has been configured and is thus ready to be run. --- --- These are usually made by configuring a 'Program', but if you have to --- construct one directly then start with 'simpleConfiguredProgram' and --- override any extra fields. --- -data ConfiguredProgram = ConfiguredProgram { - -- | Just the name again - programId :: String, - - -- | The version of this program, if it is known. - programVersion :: Maybe Version, - - -- | Default command-line args for this program. - -- These flags will appear first on the command line, so they can be - -- overridden by subsequent flags. - programDefaultArgs :: [String], - - -- | Override command-line args for this program. - -- These flags will appear last on the command line, so they override - -- all earlier flags. - programOverrideArgs :: [String], - - -- | Override environment variables for this program. - -- These env vars will extend\/override the prevailing environment of - -- the current to form the environment for the new process. - programOverrideEnv :: [(String, Maybe String)], - - -- | A key-value map listing various properties of the program, useful - -- for feature detection. Populated during the configuration step, key - -- names depend on the specific program. - programProperties :: Map.Map String String, - - -- | Location of the program. eg. @\/usr\/bin\/ghc-6.4@ - programLocation :: ProgramLocation, - - -- | In addition to the 'programLocation' where the program was found, - -- these are additional locations that were looked at. The combination - -- of ths found location and these not-found locations can be used to - -- monitor to detect when the re-configuring the program might give a - -- different result (e.g. found in a different location). - -- - programMonitorFiles :: [FilePath] - } - deriving (Eq, Generic, Read, Show, Typeable) - -instance Binary ConfiguredProgram - --- | Where a program was found. Also tells us whether it's specified by user or --- not. This includes not just the path, but the program as well. -data ProgramLocation - = UserSpecified { locationPath :: FilePath } - -- ^The user gave the path to this program, - -- eg. --ghc-path=\/usr\/bin\/ghc-6.6 - | FoundOnSystem { locationPath :: FilePath } - -- ^The program was found automatically. - deriving (Eq, Generic, Read, Show) - -instance Binary ProgramLocation - --- | The full path of a configured program. -programPath :: ConfiguredProgram -> FilePath -programPath = locationPath . programLocation - --- | Suppress any extra arguments added by the user. -suppressOverrideArgs :: ConfiguredProgram -> ConfiguredProgram -suppressOverrideArgs prog = prog { programOverrideArgs = [] } - --- | Make a simple named program. --- --- By default we'll just search for it in the path and not try to find the --- version name. You can override these behaviours if necessary, eg: --- --- > (simpleProgram "foo") { programFindLocation = ... , programFindVersion ... } --- -simpleProgram :: String -> Program -simpleProgram name = Program { - programName = name, - programFindLocation = \v p -> findProgramOnSearchPath v p name, - programFindVersion = \_ _ -> return Nothing, - programPostConf = \_ p -> return p, - programNormaliseArgs = \_ _ -> id - } - --- | Make a simple 'ConfiguredProgram'. --- --- > simpleConfiguredProgram "foo" (FoundOnSystem path) --- -simpleConfiguredProgram :: String -> ProgramLocation -> ConfiguredProgram -simpleConfiguredProgram name loc = ConfiguredProgram { - programId = name, - programVersion = Nothing, - programDefaultArgs = [], - programOverrideArgs = [], - programOverrideEnv = [], - programProperties = Map.empty, - programLocation = loc, - programMonitorFiles = [] -- did not look in any other locations - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Program.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Program.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,239 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Program --- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This provides an abstraction which deals with configuring and running --- programs. A 'Program' is a static notion of a known program. A --- 'ConfiguredProgram' is a 'Program' that has been found on the current --- machine and is ready to be run (possibly with some user-supplied default --- args). Configuring a program involves finding its location and if necessary --- finding its version. There is also a 'ProgramDb' type which holds --- configured and not-yet configured programs. It is the parameter to lots of --- actions elsewhere in Cabal that need to look up and run programs. If we had --- a Cabal monad, the 'ProgramDb' would probably be a reader or --- state component of it. --- --- The module also defines all the known built-in 'Program's and the --- 'defaultProgramDb' which contains them all. --- --- One nice thing about using it is that any program that is --- registered with Cabal will get some \"configure\" and \".cabal\" --- helpers like --with-foo-args --foo-path= and extra-foo-args. --- --- There's also good default behavior for trying to find \"foo\" in --- PATH, being able to override its location, etc. --- --- There's also a hook for adding programs in a Setup.lhs script. See --- hookedPrograms in 'Distribution.Simple.UserHooks'. This gives a --- hook user the ability to get the above flags and such so that they --- don't have to write all the PATH logic inside Setup.lhs. - -module Distribution.Simple.Program ( - -- * Program and functions for constructing them - Program(..) - , ProgramSearchPath - , ProgramSearchPathEntry(..) - , simpleProgram - , findProgramOnSearchPath - , defaultProgramSearchPath - , findProgramVersion - - -- * Configured program and related functions - , ConfiguredProgram(..) - , programPath - , ProgArg - , ProgramLocation(..) - , runProgram - , getProgramOutput - , suppressOverrideArgs - - -- * Program invocations - , ProgramInvocation(..) - , emptyProgramInvocation - , simpleProgramInvocation - , programInvocation - , runProgramInvocation - , getProgramInvocationOutput - - -- * The collection of unconfigured and configured programs - , builtinPrograms - - -- * The collection of configured programs we can run - , ProgramDb - , defaultProgramDb - , emptyProgramDb - , restoreProgramDb - , addKnownProgram - , addKnownPrograms - , lookupKnownProgram - , knownPrograms - , getProgramSearchPath - , setProgramSearchPath - , userSpecifyPath - , userSpecifyPaths - , userMaybeSpecifyPath - , userSpecifyArgs - , userSpecifyArgss - , userSpecifiedArgs - , lookupProgram - , lookupProgramVersion - , updateProgram - , configureProgram - , configureAllKnownPrograms - , reconfigurePrograms - , requireProgram - , requireProgramVersion - , runDbProgram - , getDbProgramOutput - - -- * Programs that Cabal knows about - , ghcProgram - , ghcPkgProgram - , ghcjsProgram - , ghcjsPkgProgram - , hmakeProgram - , jhcProgram - , uhcProgram - , gccProgram - , arProgram - , stripProgram - , happyProgram - , alexProgram - , hsc2hsProgram - , c2hsProgram - , cpphsProgram - , hscolourProgram - , doctestProgram - , haddockProgram - , greencardProgram - , ldProgram - , tarProgram - , cppProgram - , pkgConfigProgram - , hpcProgram - - -- * deprecated - , ProgramConfiguration - , emptyProgramConfiguration - , defaultProgramConfiguration - , restoreProgramConfiguration - , rawSystemProgram - , rawSystemProgramStdout - , rawSystemProgramConf - , rawSystemProgramStdoutConf - , findProgramOnPath - , findProgramLocation - - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Program.Types -import Distribution.Simple.Program.Run -import Distribution.Simple.Program.Db -import Distribution.Simple.Program.Builtin -import Distribution.Simple.Program.Find -import Distribution.Simple.Utils -import Distribution.Verbosity - --- | Runs the given configured program. -runProgram :: Verbosity -- ^Verbosity - -> ConfiguredProgram -- ^The program to run - -> [ProgArg] -- ^Any /extra/ arguments to add - -> IO () -runProgram verbosity prog args = - runProgramInvocation verbosity (programInvocation prog args) - - --- | Runs the given configured program and gets the output. --- -getProgramOutput :: Verbosity -- ^Verbosity - -> ConfiguredProgram -- ^The program to run - -> [ProgArg] -- ^Any /extra/ arguments to add - -> IO String -getProgramOutput verbosity prog args = - getProgramInvocationOutput verbosity (programInvocation prog args) - - --- | Looks up the given program in the program database and runs it. --- -runDbProgram :: Verbosity -- ^verbosity - -> Program -- ^The program to run - -> ProgramDb -- ^look up the program here - -> [ProgArg] -- ^Any /extra/ arguments to add - -> IO () -runDbProgram verbosity prog programDb args = - case lookupProgram prog programDb of - Nothing -> die' verbosity notFound - Just configuredProg -> runProgram verbosity configuredProg args - where - notFound = "The program '" ++ programName prog - ++ "' is required but it could not be found" - --- | Looks up the given program in the program database and runs it. --- -getDbProgramOutput :: Verbosity -- ^verbosity - -> Program -- ^The program to run - -> ProgramDb -- ^look up the program here - -> [ProgArg] -- ^Any /extra/ arguments to add - -> IO String -getDbProgramOutput verbosity prog programDb args = - case lookupProgram prog programDb of - Nothing -> die' verbosity notFound - Just configuredProg -> getProgramOutput verbosity configuredProg args - where - notFound = "The program '" ++ programName prog - ++ "' is required but it could not be found" - - ---------------------- --- Deprecated aliases --- - -{-# DEPRECATED rawSystemProgram "use runProgram instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -rawSystemProgram :: Verbosity -> ConfiguredProgram - -> [ProgArg] -> IO () -rawSystemProgram = runProgram - -{-# DEPRECATED rawSystemProgramStdout "use getProgramOutput instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -rawSystemProgramStdout :: Verbosity -> ConfiguredProgram - -> [ProgArg] -> IO String -rawSystemProgramStdout = getProgramOutput - -{-# DEPRECATED rawSystemProgramConf "use runDbProgram instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -rawSystemProgramConf :: Verbosity -> Program -> ProgramConfiguration - -> [ProgArg] -> IO () -rawSystemProgramConf = runDbProgram - -{-# DEPRECATED rawSystemProgramStdoutConf "use getDbProgramOutput instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -rawSystemProgramStdoutConf :: Verbosity -> Program -> ProgramConfiguration - -> [ProgArg] -> IO String -rawSystemProgramStdoutConf = getDbProgramOutput - -{-# DEPRECATED ProgramConfiguration "use ProgramDb instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -type ProgramConfiguration = ProgramDb - -{-# DEPRECATED emptyProgramConfiguration "use emptyProgramDb instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -{-# DEPRECATED defaultProgramConfiguration "use defaultProgramDb instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -emptyProgramConfiguration, defaultProgramConfiguration :: ProgramConfiguration -emptyProgramConfiguration = emptyProgramDb -defaultProgramConfiguration = defaultProgramDb - -{-# DEPRECATED restoreProgramConfiguration "use restoreProgramDb instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -restoreProgramConfiguration :: [Program] -> ProgramConfiguration - -> ProgramConfiguration -restoreProgramConfiguration = restoreProgramDb - -{-# DEPRECATED findProgramOnPath "use findProgramOnSearchPath instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -findProgramOnPath :: String -> Verbosity -> IO (Maybe FilePath) -findProgramOnPath name verbosity = - fmap (fmap fst) $ - findProgramOnSearchPath verbosity defaultProgramSearchPath name diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Register.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Register.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Register.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Register.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,593 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Register --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module deals with registering and unregistering packages. There are a --- couple ways it can do this, one is to do it directly. Another is to generate --- a script that can be run later to do it. The idea here being that the user --- is shielded from the details of what command to use for package registration --- for a particular compiler. In practice this aspect was not especially --- popular so we also provide a way to simply generate the package registration --- file which then must be manually passed to @ghc-pkg@. It is possible to --- generate registration information for where the package is to be installed, --- or alternatively to register the package in place in the build tree. The --- latter is occasionally handy, and will become more important when we try to --- build multi-package systems. --- --- This module does not delegate anything to the per-compiler modules but just --- mixes it all in in this module, which is rather unsatisfactory. The script --- generation and the unregister feature are not well used or tested. - -module Distribution.Simple.Register ( - register, - unregister, - - internalPackageDBPath, - - initPackageDB, - doesPackageDBExist, - createPackageDB, - deletePackageDB, - - abiHash, - invokeHcPkg, - registerPackage, - HcPkg.RegisterOptions(..), - HcPkg.defaultRegisterOptions, - generateRegistrationInfo, - inplaceInstalledPackageInfo, - absoluteInstalledPackageInfo, - generalInstalledPackageInfo, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.TargetInfo -import Distribution.Types.LocalBuildInfo -import Distribution.Types.ComponentLocalBuildInfo - -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.BuildTarget - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS -import qualified Distribution.Simple.UHC as UHC -import qualified Distribution.Simple.HaskellSuite as HaskellSuite -import qualified Distribution.Simple.PackageIndex as Index - -import Distribution.Backpack.DescribeUnitId -import Distribution.Simple.Compiler -import Distribution.Simple.Program -import Distribution.Simple.Program.Script -import qualified Distribution.Simple.Program.HcPkg as HcPkg -import Distribution.Simple.Setup -import Distribution.PackageDescription -import Distribution.Package -import Distribution.License (licenseToSPDX, licenseFromSPDX) -import qualified Distribution.InstalledPackageInfo as IPI -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import Distribution.Simple.Utils -import Distribution.Utils.MapAccum -import Distribution.System -import Distribution.Text -import Distribution.Types.ComponentName -import Distribution.Verbosity as Verbosity -import Distribution.Version -import Distribution.Compat.Graph (IsNode(nodeKey)) - -import System.FilePath ((), (<.>), isAbsolute) -import System.Directory - -import Data.List (partition) -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 - --- ----------------------------------------------------------------------------- --- Registration - -register :: PackageDescription -> LocalBuildInfo - -> RegisterFlags -- ^Install in the user's database?; verbose - -> IO () -register pkg_descr lbi0 flags = - -- Duncan originally asked for us to not register/install files - -- when there was no public library. But with per-component - -- configure, we legitimately need to install internal libraries - -- so that we can get them. So just unconditionally install. - doRegister - where - doRegister = do - targets <- readTargetInfos verbosity pkg_descr lbi0 (regArgs flags) - - -- It's important to register in build order, because ghc-pkg - -- will complain if a dependency is not registered. - let componentsToRegister - = neededTargetsInBuildOrder' pkg_descr lbi0 (map nodeKey targets) - - (_, ipi_mbs) <- - mapAccumM `flip` installedPkgs lbi0 `flip` componentsToRegister $ \index tgt -> - case targetComponent tgt of - CLib lib -> do - let clbi = targetCLBI tgt - lbi = lbi0 { installedPkgs = index } - ipi <- generateOne pkg_descr lib lbi clbi flags - return (Index.insert ipi index, Just ipi) - _ -> return (index, Nothing) - - registerAll pkg_descr lbi0 flags (catMaybes ipi_mbs) - where - verbosity = fromFlag (regVerbosity flags) - -generateOne :: PackageDescription -> Library -> LocalBuildInfo -> ComponentLocalBuildInfo - -> RegisterFlags - -> IO InstalledPackageInfo -generateOne pkg lib lbi clbi regFlags - = do - absPackageDBs <- absolutePackageDBPaths packageDbs - installedPkgInfo <- generateRegistrationInfo - verbosity pkg lib lbi clbi inplace reloc distPref - (registrationPackageDB absPackageDBs) - info verbosity (IPI.showInstalledPackageInfo installedPkgInfo) - return installedPkgInfo - where - inplace = fromFlag (regInPlace regFlags) - reloc = relocatable lbi - -- FIXME: there's really no guarantee this will work. - -- registering into a totally different db stack can - -- fail if dependencies cannot be satisfied. - packageDbs = nub $ withPackageDB lbi - ++ maybeToList (flagToMaybe (regPackageDB regFlags)) - distPref = fromFlag (regDistPref regFlags) - verbosity = fromFlag (regVerbosity regFlags) - -registerAll :: PackageDescription -> LocalBuildInfo -> RegisterFlags - -> [InstalledPackageInfo] - -> IO () -registerAll pkg lbi regFlags ipis - = do - when (fromFlag (regPrintId regFlags)) $ do - for_ ipis $ \installedPkgInfo -> - -- Only print the public library's IPI - when (packageId installedPkgInfo == packageId pkg - && IPI.sourceLibName installedPkgInfo == Nothing) $ - putStrLn (display (IPI.installedUnitId installedPkgInfo)) - - -- Three different modes: - case () of - _ | modeGenerateRegFile -> writeRegistrationFileOrDirectory - | modeGenerateRegScript -> writeRegisterScript - | otherwise -> do - for_ ipis $ \ipi -> do - setupMessage' verbosity "Registering" (packageId pkg) - (libraryComponentName (IPI.sourceLibName ipi)) - (Just (IPI.instantiatedWith ipi)) - registerPackage verbosity (compiler lbi) (withPrograms lbi) - packageDbs ipi HcPkg.defaultRegisterOptions - - where - modeGenerateRegFile = isJust (flagToMaybe (regGenPkgConf regFlags)) - regFile = fromMaybe (display (packageId pkg) <.> "conf") - (fromFlag (regGenPkgConf regFlags)) - - modeGenerateRegScript = fromFlag (regGenScript regFlags) - - -- FIXME: there's really no guarantee this will work. - -- registering into a totally different db stack can - -- fail if dependencies cannot be satisfied. - packageDbs = nub $ withPackageDB lbi - ++ maybeToList (flagToMaybe (regPackageDB regFlags)) - verbosity = fromFlag (regVerbosity regFlags) - - writeRegistrationFileOrDirectory = do - -- Handles overwriting both directory and file - deletePackageDB regFile - case ipis of - [installedPkgInfo] -> do - info verbosity ("Creating package registration file: " ++ regFile) - writeUTF8File regFile (IPI.showInstalledPackageInfo installedPkgInfo) - _ -> do - info verbosity ("Creating package registration directory: " ++ regFile) - createDirectory regFile - let num_ipis = length ipis - lpad m xs = replicate (m - length ys) '0' ++ ys - where ys = take m xs - number i = lpad (length (show num_ipis)) (show i) - for_ (zip ([1..] :: [Int]) ipis) $ \(i, installedPkgInfo) -> - writeUTF8File (regFile (number i ++ "-" ++ display (IPI.installedUnitId installedPkgInfo))) - (IPI.showInstalledPackageInfo installedPkgInfo) - - writeRegisterScript = - case compilerFlavor (compiler lbi) of - UHC -> notice verbosity "Registration scripts not needed for uhc" - _ -> withHcPkg verbosity - "Registration scripts are not implemented for this compiler" - (compiler lbi) (withPrograms lbi) - (writeHcPkgRegisterScript verbosity ipis packageDbs) - - -generateRegistrationInfo :: Verbosity - -> PackageDescription - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> Bool - -> Bool - -> FilePath - -> PackageDB - -> IO InstalledPackageInfo -generateRegistrationInfo verbosity pkg lib lbi clbi inplace reloc distPref packageDb = do - --TODO: eliminate pwd! - pwd <- getCurrentDirectory - - installedPkgInfo <- - if inplace - -- NB: With an inplace installation, the user may run './Setup - -- build' to update the library files, without reregistering. - -- In this case, it is critical that the ABI hash not flip. - then return (inplaceInstalledPackageInfo pwd distPref - pkg (mkAbiHash "inplace") lib lbi clbi) - else do - abi_hash <- abiHash verbosity pkg distPref lbi lib clbi - if reloc - then relocRegistrationInfo verbosity - pkg lib lbi clbi abi_hash packageDb - else return (absoluteInstalledPackageInfo - pkg abi_hash lib lbi clbi) - - - return installedPkgInfo - --- | Compute the 'AbiHash' of a library that we built inplace. -abiHash :: Verbosity - -> PackageDescription - -> FilePath - -> LocalBuildInfo - -> Library - -> ComponentLocalBuildInfo - -> IO AbiHash -abiHash verbosity pkg distPref lbi lib clbi = - case compilerFlavor comp of - GHC -> do - fmap mkAbiHash $ GHC.libAbiHash verbosity pkg lbi' lib clbi - GHCJS -> do - fmap mkAbiHash $ GHCJS.libAbiHash verbosity pkg lbi' lib clbi - _ -> return (mkAbiHash "") - where - comp = compiler lbi - lbi' = lbi { - withPackageDB = withPackageDB lbi - ++ [SpecificPackageDB (internalPackageDBPath lbi distPref)] - } - -relocRegistrationInfo :: Verbosity - -> PackageDescription - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> AbiHash - -> PackageDB - -> IO InstalledPackageInfo -relocRegistrationInfo verbosity pkg lib lbi clbi abi_hash packageDb = - case (compilerFlavor (compiler lbi)) of - GHC -> do fs <- GHC.pkgRoot verbosity lbi packageDb - return (relocatableInstalledPackageInfo - pkg abi_hash lib lbi clbi fs) - _ -> die' verbosity - "Distribution.Simple.Register.relocRegistrationInfo: \ - \not implemented for this compiler" - -initPackageDB :: Verbosity -> Compiler -> ProgramDb -> FilePath -> IO () -initPackageDB verbosity comp progdb dbPath = - createPackageDB verbosity comp progdb False dbPath - --- | Create an empty package DB at the specified location. -createPackageDB :: Verbosity -> Compiler -> ProgramDb -> Bool - -> FilePath -> IO () -createPackageDB verbosity comp progdb preferCompat dbPath = - case compilerFlavor comp of - GHC -> HcPkg.init (GHC.hcPkgInfo progdb) verbosity preferCompat dbPath - GHCJS -> HcPkg.init (GHCJS.hcPkgInfo progdb) verbosity False dbPath - UHC -> return () - HaskellSuite _ -> HaskellSuite.initPackageDB verbosity progdb dbPath - _ -> die' verbosity $ - "Distribution.Simple.Register.createPackageDB: " - ++ "not implemented for this compiler" - -doesPackageDBExist :: FilePath -> NoCallStackIO Bool -doesPackageDBExist dbPath = do - -- currently one impl for all compiler flavours, but could change if needed - dir_exists <- doesDirectoryExist dbPath - if dir_exists - then return True - else doesFileExist dbPath - -deletePackageDB :: FilePath -> NoCallStackIO () -deletePackageDB dbPath = do - -- currently one impl for all compiler flavours, but could change if needed - dir_exists <- doesDirectoryExist dbPath - if dir_exists - then removeDirectoryRecursive dbPath - else do file_exists <- doesFileExist dbPath - when file_exists $ removeFile dbPath - --- | Run @hc-pkg@ using a given package DB stack, directly forwarding the --- provided command-line arguments to it. -invokeHcPkg :: Verbosity -> Compiler -> ProgramDb -> PackageDBStack - -> [String] -> IO () -invokeHcPkg verbosity comp progdb dbStack extraArgs = - withHcPkg verbosity "invokeHcPkg" comp progdb - (\hpi -> HcPkg.invoke hpi verbosity dbStack extraArgs) - -withHcPkg :: Verbosity -> String -> Compiler -> ProgramDb - -> (HcPkg.HcPkgInfo -> IO a) -> IO a -withHcPkg verbosity name comp progdb f = - case compilerFlavor comp of - GHC -> f (GHC.hcPkgInfo progdb) - GHCJS -> f (GHCJS.hcPkgInfo progdb) - _ -> die' verbosity ("Distribution.Simple.Register." ++ name ++ ":\ - \not implemented for this compiler") - -registerPackage :: Verbosity - -> Compiler - -> ProgramDb - -> PackageDBStack - -> InstalledPackageInfo - -> HcPkg.RegisterOptions - -> IO () -registerPackage verbosity comp progdb packageDbs installedPkgInfo registerOptions = - case compilerFlavor comp of - GHC -> GHC.registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions - GHCJS -> GHCJS.registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions - HaskellSuite {} -> - HaskellSuite.registerPackage verbosity progdb packageDbs installedPkgInfo - _ | HcPkg.registerMultiInstance registerOptions - -> die' verbosity "Registering multiple package instances is not yet supported for this compiler" - UHC -> UHC.registerPackage verbosity comp progdb packageDbs installedPkgInfo - _ -> die' verbosity "Registering is not implemented for this compiler" - -writeHcPkgRegisterScript :: Verbosity - -> [InstalledPackageInfo] - -> PackageDBStack - -> HcPkg.HcPkgInfo - -> IO () -writeHcPkgRegisterScript verbosity ipis packageDbs hpi = do - let genScript installedPkgInfo = - let invocation = HcPkg.registerInvocation hpi Verbosity.normal - packageDbs installedPkgInfo - HcPkg.defaultRegisterOptions - in invocationAsSystemScript buildOS invocation - scripts = map genScript ipis - -- TODO: Do something more robust here - regScript = unlines scripts - - info verbosity ("Creating package registration script: " ++ regScriptFileName) - writeUTF8File regScriptFileName regScript - setFileExecutable regScriptFileName - -regScriptFileName :: FilePath -regScriptFileName = case buildOS of - Windows -> "register.bat" - _ -> "register.sh" - - --- ----------------------------------------------------------------------------- --- Making the InstalledPackageInfo - --- | Construct 'InstalledPackageInfo' for a library in a package, given a set --- of installation directories. --- -generalInstalledPackageInfo - :: ([FilePath] -> [FilePath]) -- ^ Translate relative include dir paths to - -- absolute paths. - -> PackageDescription - -> AbiHash - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> InstallDirs FilePath - -> InstalledPackageInfo -generalInstalledPackageInfo adjustRelIncDirs pkg abi_hash lib lbi clbi installDirs = - IPI.InstalledPackageInfo { - IPI.sourcePackageId = packageId pkg, - IPI.installedUnitId = componentUnitId clbi, - IPI.installedComponentId_ = componentComponentId clbi, - IPI.instantiatedWith = componentInstantiatedWith clbi, - IPI.sourceLibName = libName lib, - IPI.compatPackageKey = componentCompatPackageKey clbi, - -- If GHC >= 8.4 we register with SDPX, otherwise with legacy license - IPI.license = - if ghc84 - then Left $ either id licenseToSPDX $ licenseRaw pkg - else Right $ either licenseFromSPDX id $ licenseRaw pkg, - IPI.copyright = copyright pkg, - IPI.maintainer = maintainer pkg, - IPI.author = author pkg, - IPI.stability = stability pkg, - IPI.homepage = homepage pkg, - IPI.pkgUrl = pkgUrl pkg, - IPI.synopsis = synopsis pkg, - IPI.description = description pkg, - IPI.category = category pkg, - IPI.abiHash = abi_hash, - IPI.indefinite = componentIsIndefinite clbi, - IPI.exposed = libExposed lib, - IPI.exposedModules = componentExposedModules clbi - -- add virtual modules into the list of exposed modules for the - -- package database as well. - ++ map (\name -> IPI.ExposedModule name Nothing) (virtualModules bi), - IPI.hiddenModules = otherModules bi, - IPI.trusted = IPI.trusted IPI.emptyInstalledPackageInfo, - IPI.importDirs = [ libdir installDirs | hasModules ], - IPI.libraryDirs = libdirs, - IPI.libraryDynDirs = dynlibdirs, - IPI.dataDir = datadir installDirs, - IPI.hsLibraries = (if hasLibrary - then [getHSLibraryName (componentUnitId clbi)] - else []) ++ extraBundledLibs bi, - IPI.extraLibraries = extraLibs bi, - IPI.extraGHCiLibraries = extraGHCiLibs bi, - IPI.includeDirs = absinc ++ adjustRelIncDirs relinc, - IPI.includes = includes bi, - IPI.depends = depends, - IPI.abiDepends = [], -- due to #5465 - IPI.ccOptions = [], -- Note. NOT ccOptions bi! - -- We don't want cc-options to be propagated - -- to C compilations in other packages. - IPI.cxxOptions = [], -- Also. NOT cxxOptions bi! - IPI.ldOptions = ldOptions bi, - IPI.frameworks = frameworks bi, - IPI.frameworkDirs = extraFrameworkDirs bi, - IPI.haddockInterfaces = [haddockdir installDirs haddockName pkg], - IPI.haddockHTMLs = [htmldir installDirs], - IPI.pkgRoot = Nothing - } - where - ghc84 = case compilerId $ compiler lbi of - CompilerId GHC v -> v >= mkVersion [8, 4] - _ -> False - - bi = libBuildInfo lib - --TODO: unclear what the root cause of the - -- duplication is, but we nub it here for now: - depends = ordNub $ map fst (componentPackageDeps clbi) - (absinc, relinc) = partition isAbsolute (includeDirs bi) - hasModules = not $ null (allLibModules lib clbi) - comp = compiler lbi - hasLibrary = (hasModules || not (null (cSources bi)) - || not (null (asmSources bi)) - || not (null (cmmSources bi)) - || not (null (cxxSources bi)) - || (not (null (jsSources bi)) && - compilerFlavor comp == GHCJS)) - && not (componentIsIndefinite clbi) - (libdirs, dynlibdirs) - | not hasLibrary - = (extraLibDirs bi, []) - -- the dynamic-library-dirs defaults to the library-dirs if not specified, - -- so this works whether the dynamic-library-dirs field is supported or not - - | libraryDynDirSupported comp - = (libdir installDirs : extraLibDirs bi, - dynlibdir installDirs : extraLibDirs bi) - - | otherwise - = (libdir installDirs : dynlibdir installDirs : extraLibDirs bi, []) - -- the compiler doesn't understand the dynamic-library-dirs field so we - -- add the dyn directory to the "normal" list in the library-dirs field - --- | Construct 'InstalledPackageInfo' for a library that is in place in the --- build tree. --- --- This function knows about the layout of in place packages. --- -inplaceInstalledPackageInfo :: FilePath -- ^ top of the build tree - -> FilePath -- ^ location of the dist tree - -> PackageDescription - -> AbiHash - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> InstalledPackageInfo -inplaceInstalledPackageInfo inplaceDir distPref pkg abi_hash lib lbi clbi = - generalInstalledPackageInfo adjustRelativeIncludeDirs - pkg abi_hash lib lbi clbi installDirs - where - adjustRelativeIncludeDirs = map (inplaceDir ) - libTargetDir = componentBuildDir lbi clbi - installDirs = - (absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest) { - libdir = inplaceDir libTargetDir, - dynlibdir = inplaceDir libTargetDir, - datadir = inplaceDir dataDir pkg, - docdir = inplaceDocdir, - htmldir = inplaceHtmldir, - haddockdir = inplaceHtmldir - } - inplaceDocdir = inplaceDir distPref "doc" - inplaceHtmldir = inplaceDocdir "html" display (packageName pkg) - - --- | Construct 'InstalledPackageInfo' for the final install location of a --- library package. --- --- This function knows about the layout of installed packages. --- -absoluteInstalledPackageInfo :: PackageDescription - -> AbiHash - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> InstalledPackageInfo -absoluteInstalledPackageInfo pkg abi_hash lib lbi clbi = - generalInstalledPackageInfo adjustReativeIncludeDirs - pkg abi_hash lib lbi clbi installDirs - where - -- For installed packages we install all include files into one dir, - -- whereas in the build tree they may live in multiple local dirs. - adjustReativeIncludeDirs _ - | null (installIncludes bi) = [] - | otherwise = [includedir installDirs] - bi = libBuildInfo lib - installDirs = absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest - - -relocatableInstalledPackageInfo :: PackageDescription - -> AbiHash - -> Library - -> LocalBuildInfo - -> ComponentLocalBuildInfo - -> FilePath - -> InstalledPackageInfo -relocatableInstalledPackageInfo pkg abi_hash lib lbi clbi pkgroot = - generalInstalledPackageInfo adjustReativeIncludeDirs - pkg abi_hash lib lbi clbi installDirs - where - -- For installed packages we install all include files into one dir, - -- whereas in the build tree they may live in multiple local dirs. - adjustReativeIncludeDirs _ - | null (installIncludes bi) = [] - | otherwise = [includedir installDirs] - bi = libBuildInfo lib - - installDirs = fmap (("${pkgroot}" ) . shortRelativePath pkgroot) - $ absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest - --- ----------------------------------------------------------------------------- --- Unregistration - -unregister :: PackageDescription -> LocalBuildInfo -> RegisterFlags -> IO () -unregister pkg lbi regFlags = do - let pkgid = packageId pkg - genScript = fromFlag (regGenScript regFlags) - verbosity = fromFlag (regVerbosity regFlags) - packageDb = fromFlagOrDefault (registrationPackageDB (withPackageDB lbi)) - (regPackageDB regFlags) - unreg hpi = - let invocation = HcPkg.unregisterInvocation - hpi Verbosity.normal packageDb pkgid - in if genScript - then writeFileAtomic unregScriptFileName - (BS.Char8.pack $ invocationAsSystemScript buildOS invocation) - else runProgramInvocation verbosity invocation - setupMessage verbosity "Unregistering" pkgid - withHcPkg verbosity "unregistering is only implemented for GHC and GHCJS" - (compiler lbi) (withPrograms lbi) unreg - -unregScriptFileName :: FilePath -unregScriptFileName = case buildOS of - Windows -> "unregister.bat" - _ -> "unregister.sh" - -internalPackageDBPath :: LocalBuildInfo -> FilePath -> FilePath -internalPackageDBPath lbi distPref = - case compilerFlavor (compiler lbi) of - UHC -> UHC.inplacePackageDbPath lbi - _ -> distPref "package.conf.inplace" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Setup.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2316 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Setup --- Copyright : Isaac Jones 2003-2004 --- Duncan Coutts 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is a big module, but not very complicated. The code is very regular --- and repetitive. It defines the command line interface for all the Cabal --- commands. For each command (like @configure@, @build@ etc) it defines a type --- that holds all the flags, the default set of flags and a 'CommandUI' that --- maps command line flags to and from the corresponding flags type. --- --- All the flags types are instances of 'Monoid', see --- --- for an explanation. --- --- The types defined here get used in the front end and especially in --- @cabal-install@ which has to do quite a bit of manipulating sets of command --- line flags. --- --- This is actually relatively nice, it works quite well. The main change it --- needs is to unify it with the code for managing sets of fields that can be --- read and written from files. This would allow us to save configure flags in --- config files. - -module Distribution.Simple.Setup ( - - GlobalFlags(..), emptyGlobalFlags, defaultGlobalFlags, globalCommand, - ConfigFlags(..), emptyConfigFlags, defaultConfigFlags, configureCommand, - configPrograms, - configAbsolutePaths, readPackageDbList, showPackageDbList, - CopyFlags(..), emptyCopyFlags, defaultCopyFlags, copyCommand, - InstallFlags(..), emptyInstallFlags, defaultInstallFlags, installCommand, - DoctestFlags(..), emptyDoctestFlags, defaultDoctestFlags, doctestCommand, - HaddockTarget(..), - HaddockFlags(..), emptyHaddockFlags, defaultHaddockFlags, haddockCommand, - HscolourFlags(..), emptyHscolourFlags, defaultHscolourFlags, hscolourCommand, - BuildFlags(..), emptyBuildFlags, defaultBuildFlags, buildCommand, - buildVerbose, - ReplFlags(..), defaultReplFlags, replCommand, - CleanFlags(..), emptyCleanFlags, defaultCleanFlags, cleanCommand, - RegisterFlags(..), emptyRegisterFlags, defaultRegisterFlags, registerCommand, - unregisterCommand, - SDistFlags(..), emptySDistFlags, defaultSDistFlags, sdistCommand, - TestFlags(..), emptyTestFlags, defaultTestFlags, testCommand, - TestShowDetails(..), - BenchmarkFlags(..), emptyBenchmarkFlags, - defaultBenchmarkFlags, benchmarkCommand, - CopyDest(..), - configureArgs, configureOptions, configureCCompiler, configureLinker, - buildOptions, haddockOptions, installDirsOptions, - programDbOptions, programDbPaths', - programConfigurationOptions, programConfigurationPaths', - programFlagsDescription, - replOptions, - splitArgs, - - defaultDistPref, optionDistPref, - - Flag(..), - toFlag, - fromFlag, - fromFlagOrDefault, - flagToMaybe, - flagToList, - maybeToFlag, - BooleanFlag(..), - boolOpt, boolOpt', trueArg, falseArg, - optionVerbosity, optionNumJobs, readPToMaybe ) where - -import Prelude () -import Distribution.Compat.Prelude hiding (get) - -import Distribution.Compiler -import Distribution.ReadE -import Distribution.Text -import Distribution.Parsec.Class -import Distribution.Pretty -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import Distribution.ParseUtils (readPToMaybe) -import qualified Text.PrettyPrint as Disp -import Distribution.ModuleName -import Distribution.PackageDescription hiding (Flag) -import Distribution.Simple.Command hiding (boolOpt, boolOpt') -import qualified Distribution.Simple.Command as Command -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.Flag -import Distribution.Simple.Utils -import Distribution.Simple.Program -import Distribution.Simple.InstallDirs -import Distribution.Verbosity -import Distribution.Utils.NubList -import Distribution.Types.Dependency -import Distribution.Types.ComponentId -import Distribution.Types.Module -import Distribution.Types.PackageName - -import Distribution.Compat.Stack -import Distribution.Compat.Semigroup (Last' (..)) - -import Data.Function (on) - --- FIXME Not sure where this should live -defaultDistPref :: FilePath -defaultDistPref = "dist" - --- ------------------------------------------------------------ --- * Global flags --- ------------------------------------------------------------ - --- In fact since individual flags types are monoids and these are just sets of --- flags then they are also monoids pointwise. This turns out to be really --- useful. The mempty is the set of empty flags and mappend allows us to --- override specific flags. For example we can start with default flags and --- override with the ones we get from a file or the command line, or both. - --- | Flags that apply at the top level, not to any sub-command. -data GlobalFlags = GlobalFlags { - globalVersion :: Flag Bool, - globalNumericVersion :: Flag Bool - } deriving (Generic) - -defaultGlobalFlags :: GlobalFlags -defaultGlobalFlags = GlobalFlags { - globalVersion = Flag False, - globalNumericVersion = Flag False - } - -globalCommand :: [Command action] -> CommandUI GlobalFlags -globalCommand commands = CommandUI - { commandName = "" - , commandSynopsis = "" - , commandUsage = \pname -> - "This Setup program uses the Haskell Cabal Infrastructure.\n" - ++ "See http://www.haskell.org/cabal/ for more information.\n" - ++ "\n" - ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n" - , commandDescription = Just $ \pname -> - let - commands' = commands ++ [commandAddAction helpCommandUI undefined] - cmdDescs = getNormalCommandDescriptions commands' - maxlen = maximum $ [length name | (name, _) <- cmdDescs] - align str = str ++ replicate (maxlen - length str) ' ' - in - "Commands:\n" - ++ unlines [ " " ++ align name ++ " " ++ descr - | (name, descr) <- cmdDescs ] - ++ "\n" - ++ "For more information about a command use\n" - ++ " " ++ pname ++ " COMMAND --help\n\n" - ++ "Typical steps for installing Cabal packages:\n" - ++ concat [ " " ++ pname ++ " " ++ x ++ "\n" - | x <- ["configure", "build", "install"]] - , commandNotes = Nothing - , commandDefaultFlags = defaultGlobalFlags - , commandOptions = \_ -> - [option ['V'] ["version"] - "Print version information" - globalVersion (\v flags -> flags { globalVersion = v }) - trueArg - ,option [] ["numeric-version"] - "Print just the version number" - globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) - trueArg - ] - } - -emptyGlobalFlags :: GlobalFlags -emptyGlobalFlags = mempty - -instance Monoid GlobalFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup GlobalFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Config flags --- ------------------------------------------------------------ - --- | Flags to @configure@ command. --- --- IMPORTANT: every time a new flag is added, 'D.C.Setup.filterConfigureFlags' --- should be updated. --- IMPORTANT: every time a new flag is added, it should be added to the Eq instance -data ConfigFlags = ConfigFlags { - -- This is the same hack as in 'buildArgs' and 'copyArgs'. - -- TODO: Stop using this eventually when 'UserHooks' gets changed - configArgs :: [String], - - --FIXME: the configPrograms is only here to pass info through to configure - -- because the type of configure is constrained by the UserHooks. - -- when we change UserHooks next we should pass the initial - -- ProgramDb directly and not via ConfigFlags - configPrograms_ :: Last' ProgramDb, -- ^All programs that - -- @cabal@ may run - - configProgramPaths :: [(String, FilePath)], -- ^user specified programs paths - configProgramArgs :: [(String, [String])], -- ^user specified programs args - configProgramPathExtra :: NubList FilePath, -- ^Extend the $PATH - configHcFlavor :: Flag CompilerFlavor, -- ^The \"flavor\" of the - -- compiler, e.g. GHC. - configHcPath :: Flag FilePath, -- ^given compiler location - configHcPkg :: Flag FilePath, -- ^given hc-pkg location - configVanillaLib :: Flag Bool, -- ^Enable vanilla library - configProfLib :: Flag Bool, -- ^Enable profiling in the library - configSharedLib :: Flag Bool, -- ^Build shared library - configStaticLib :: Flag Bool, -- ^Build static library - configDynExe :: Flag Bool, -- ^Enable dynamic linking of the - -- executables. - configProfExe :: Flag Bool, -- ^Enable profiling in the - -- executables. - configProf :: Flag Bool, -- ^Enable profiling in the library - -- and executables. - configProfDetail :: Flag ProfDetailLevel, -- ^Profiling detail level - -- in the library and executables. - configProfLibDetail :: Flag ProfDetailLevel, -- ^Profiling detail level - -- in the library - configConfigureArgs :: [String], -- ^Extra arguments to @configure@ - configOptimization :: Flag OptimisationLevel, -- ^Enable optimization. - configProgPrefix :: Flag PathTemplate, -- ^Installed executable prefix. - configProgSuffix :: Flag PathTemplate, -- ^Installed executable suffix. - configInstallDirs :: InstallDirs (Flag PathTemplate), -- ^Installation - -- paths - configScratchDir :: Flag FilePath, - configExtraLibDirs :: [FilePath], -- ^ path to search for extra libraries - configExtraFrameworkDirs :: [FilePath], -- ^ path to search for extra - -- frameworks (OS X only) - configExtraIncludeDirs :: [FilePath], -- ^ path to search for header files - configIPID :: Flag String, -- ^ explicit IPID to be used - configCID :: Flag ComponentId, -- ^ explicit CID to be used - configDeterministic :: Flag Bool, -- ^ be as deterministic as possible - -- (e.g., invariant over GHC, database, - -- etc). Used by the test suite - - configDistPref :: Flag FilePath, -- ^"dist" prefix - configCabalFilePath :: Flag FilePath, -- ^ Cabal file to use - configVerbosity :: Flag Verbosity, -- ^verbosity level - configUserInstall :: Flag Bool, -- ^The --user\/--global flag - configPackageDBs :: [Maybe PackageDB], -- ^Which package DBs to use - configGHCiLib :: Flag Bool, -- ^Enable compiling library for GHCi - configSplitSections :: Flag Bool, -- ^Enable -split-sections with GHC - configSplitObjs :: Flag Bool, -- ^Enable -split-objs with GHC - configStripExes :: Flag Bool, -- ^Enable executable stripping - configStripLibs :: Flag Bool, -- ^Enable library stripping - configConstraints :: [Dependency], -- ^Additional constraints for - -- dependencies. - configDependencies :: [(PackageName, ComponentId)], - -- ^The packages depended on. - configInstantiateWith :: [(ModuleName, Module)], - -- ^ The requested Backpack instantiation. If empty, either this - -- package does not use Backpack, or we just want to typecheck - -- the indefinite package. - configConfigurationsFlags :: FlagAssignment, - configTests :: Flag Bool, -- ^Enable test suite compilation - configBenchmarks :: Flag Bool, -- ^Enable benchmark compilation - configCoverage :: Flag Bool, -- ^Enable program coverage - configLibCoverage :: Flag Bool, -- ^Enable program coverage (deprecated) - configExactConfiguration :: Flag Bool, - -- ^All direct dependencies and flags are provided on the command line by - -- the user via the '--dependency' and '--flags' options. - configFlagError :: Flag String, - -- ^Halt and show an error message indicating an error in flag assignment - configRelocatable :: Flag Bool, -- ^ Enable relocatable package built - configDebugInfo :: Flag DebugInfoLevel, -- ^ Emit debug info. - configUseResponseFiles :: Flag Bool - -- ^ Whether to use response files at all. They're used for such tools - -- as haddock, or or ld. - } - deriving (Generic, Read, Show) - -instance Binary ConfigFlags - --- | More convenient version of 'configPrograms'. Results in an --- 'error' if internal invariant is violated. -configPrograms :: WithCallStack (ConfigFlags -> ProgramDb) -configPrograms = maybe (error "FIXME: remove configPrograms") id . getLast' . configPrograms_ - -instance Eq ConfigFlags where - (==) a b = - -- configPrograms skipped: not user specified, has no Eq instance - equal configProgramPaths - && equal configProgramArgs - && equal configProgramPathExtra - && equal configHcFlavor - && equal configHcPath - && equal configHcPkg - && equal configVanillaLib - && equal configProfLib - && equal configSharedLib - && equal configStaticLib - && equal configDynExe - && equal configProfExe - && equal configProf - && equal configProfDetail - && equal configProfLibDetail - && equal configConfigureArgs - && equal configOptimization - && equal configProgPrefix - && equal configProgSuffix - && equal configInstallDirs - && equal configScratchDir - && equal configExtraLibDirs - && equal configExtraIncludeDirs - && equal configIPID - && equal configDeterministic - && equal configDistPref - && equal configVerbosity - && equal configUserInstall - && equal configPackageDBs - && equal configGHCiLib - && equal configSplitSections - && equal configSplitObjs - && equal configStripExes - && equal configStripLibs - && equal configConstraints - && equal configDependencies - && equal configConfigurationsFlags - && equal configTests - && equal configBenchmarks - && equal configCoverage - && equal configLibCoverage - && equal configExactConfiguration - && equal configFlagError - && equal configRelocatable - && equal configDebugInfo - && equal configUseResponseFiles - where - equal f = on (==) f a b - -configAbsolutePaths :: ConfigFlags -> NoCallStackIO ConfigFlags -configAbsolutePaths f = - (\v -> f { configPackageDBs = v }) - `liftM` traverse (maybe (return Nothing) (liftM Just . absolutePackageDBPath)) - (configPackageDBs f) - -defaultConfigFlags :: ProgramDb -> ConfigFlags -defaultConfigFlags progDb = emptyConfigFlags { - configArgs = [], - configPrograms_ = pure progDb, - configHcFlavor = maybe NoFlag Flag defaultCompilerFlavor, - configVanillaLib = Flag True, - configProfLib = NoFlag, - configSharedLib = NoFlag, - configStaticLib = NoFlag, - configDynExe = Flag False, - configProfExe = NoFlag, - configProf = NoFlag, - configProfDetail = NoFlag, - configProfLibDetail= NoFlag, - configOptimization = Flag NormalOptimisation, - configProgPrefix = Flag (toPathTemplate ""), - configProgSuffix = Flag (toPathTemplate ""), - configDistPref = NoFlag, - configCabalFilePath = NoFlag, - configVerbosity = Flag normal, - configUserInstall = Flag False, --TODO: reverse this -#if defined(mingw32_HOST_OS) - -- See #1589. - configGHCiLib = Flag True, -#else - configGHCiLib = NoFlag, -#endif - configSplitSections = Flag False, - configSplitObjs = Flag False, -- takes longer, so turn off by default - configStripExes = Flag True, - configStripLibs = Flag True, - configTests = Flag False, - configBenchmarks = Flag False, - configCoverage = Flag False, - configLibCoverage = NoFlag, - configExactConfiguration = Flag False, - configFlagError = NoFlag, - configRelocatable = Flag False, - configDebugInfo = Flag NoDebugInfo, - configUseResponseFiles = NoFlag - } - -configureCommand :: ProgramDb -> CommandUI ConfigFlags -configureCommand progDb = CommandUI - { commandName = "configure" - , commandSynopsis = "Prepare to build the package." - , commandDescription = Just $ \_ -> wrapText $ - "Configure how the package is built by setting " - ++ "package (and other) flags.\n" - ++ "\n" - ++ "The configuration affects several other commands, " - ++ "including build, test, bench, run, repl.\n" - , commandNotes = Just $ \_pname -> programFlagsDescription progDb - , commandUsage = \pname -> - "Usage: " ++ pname ++ " configure [FLAGS]\n" - , commandDefaultFlags = defaultConfigFlags progDb - , commandOptions = \showOrParseArgs -> - configureOptions showOrParseArgs - ++ programDbPaths progDb showOrParseArgs - configProgramPaths (\v fs -> fs { configProgramPaths = v }) - ++ programDbOption progDb showOrParseArgs - configProgramArgs (\v fs -> fs { configProgramArgs = v }) - ++ programDbOptions progDb showOrParseArgs - configProgramArgs (\v fs -> fs { configProgramArgs = v }) - } - --- | Inverse to 'dispModSubstEntry'. -parsecModSubstEntry :: ParsecParser (ModuleName, Module) -parsecModSubstEntry = do - k <- parsec - _ <- P.char '=' - v <- parsec - return (k, v) - --- | Pretty-print a single entry of a module substitution. -dispModSubstEntry :: (ModuleName, Module) -> Disp.Doc -dispModSubstEntry (k, v) = disp k <<>> Disp.char '=' <<>> disp v - -configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] -configureOptions showOrParseArgs = - [optionVerbosity configVerbosity - (\v flags -> flags { configVerbosity = v }) - ,optionDistPref - configDistPref (\d flags -> flags { configDistPref = d }) - showOrParseArgs - - ,option [] ["compiler"] "compiler" - configHcFlavor (\v flags -> flags { configHcFlavor = v }) - (choiceOpt [ (Flag GHC, ("g", ["ghc"]), "compile with GHC") - , (Flag GHCJS, ([] , ["ghcjs"]), "compile with GHCJS") - , (Flag UHC, ([] , ["uhc"]), "compile with UHC") - -- "haskell-suite" compiler id string will be replaced - -- by a more specific one during the configure stage - , (Flag (HaskellSuite "haskell-suite"), ([] , ["haskell-suite"]), - "compile with a haskell-suite compiler")]) - - ,option "" ["cabal-file"] - "use this Cabal file" - configCabalFilePath (\v flags -> flags { configCabalFilePath = v }) - (reqArgFlag "PATH") - - ,option "w" ["with-compiler"] - "give the path to a particular compiler" - configHcPath (\v flags -> flags { configHcPath = v }) - (reqArgFlag "PATH") - - ,option "" ["with-hc-pkg"] - "give the path to the package tool" - configHcPkg (\v flags -> flags { configHcPkg = v }) - (reqArgFlag "PATH") - ] - ++ map liftInstallDirs installDirsOptions - ++ [option "" ["program-prefix"] - "prefix to be applied to installed executables" - configProgPrefix - (\v flags -> flags { configProgPrefix = v }) - (reqPathTemplateArgFlag "PREFIX") - - ,option "" ["program-suffix"] - "suffix to be applied to installed executables" - configProgSuffix (\v flags -> flags { configProgSuffix = v } ) - (reqPathTemplateArgFlag "SUFFIX") - - ,option "" ["library-vanilla"] - "Vanilla libraries" - configVanillaLib (\v flags -> flags { configVanillaLib = v }) - (boolOpt [] []) - - ,option "p" ["library-profiling"] - "Library profiling" - configProfLib (\v flags -> flags { configProfLib = v }) - (boolOpt "p" []) - - ,option "" ["shared"] - "Shared library" - configSharedLib (\v flags -> flags { configSharedLib = v }) - (boolOpt [] []) - - ,option "" ["static"] - "Static library" - configStaticLib (\v flags -> flags { configStaticLib = v }) - (boolOpt [] []) - - ,option "" ["executable-dynamic"] - "Executable dynamic linking" - configDynExe (\v flags -> flags { configDynExe = v }) - (boolOpt [] []) - - ,option "" ["profiling"] - "Executable and library profiling" - configProf (\v flags -> flags { configProf = v }) - (boolOpt [] []) - - ,option "" ["executable-profiling"] - "Executable profiling (DEPRECATED)" - configProfExe (\v flags -> flags { configProfExe = v }) - (boolOpt [] []) - - ,option "" ["profiling-detail"] - ("Profiling detail level for executable and library (default, " ++ - "none, exported-functions, toplevel-functions, all-functions).") - configProfDetail (\v flags -> flags { configProfDetail = v }) - (reqArg' "level" (Flag . flagToProfDetailLevel) - showProfDetailLevelFlag) - - ,option "" ["library-profiling-detail"] - "Profiling detail level for libraries only." - configProfLibDetail (\v flags -> flags { configProfLibDetail = v }) - (reqArg' "level" (Flag . flagToProfDetailLevel) - showProfDetailLevelFlag) - - ,multiOption "optimization" - configOptimization (\v flags -> flags { configOptimization = v }) - [optArg' "n" (Flag . flagToOptimisationLevel) - (\f -> case f of - Flag NoOptimisation -> [] - Flag NormalOptimisation -> [Nothing] - Flag MaximumOptimisation -> [Just "2"] - _ -> []) - "O" ["enable-optimization","enable-optimisation"] - "Build with optimization (n is 0--2, default is 1)", - noArg (Flag NoOptimisation) [] - ["disable-optimization","disable-optimisation"] - "Build without optimization" - ] - - ,multiOption "debug-info" - configDebugInfo (\v flags -> flags { configDebugInfo = v }) - [optArg' "n" (Flag . flagToDebugInfoLevel) - (\f -> case f of - Flag NoDebugInfo -> [] - Flag MinimalDebugInfo -> [Just "1"] - Flag NormalDebugInfo -> [Nothing] - Flag MaximalDebugInfo -> [Just "3"] - _ -> []) - "" ["enable-debug-info"] - "Emit debug info (n is 0--3, default is 0)", - noArg (Flag NoDebugInfo) [] - ["disable-debug-info"] - "Don't emit debug info" - ] - - ,option "" ["library-for-ghci"] - "compile library for use with GHCi" - configGHCiLib (\v flags -> flags { configGHCiLib = v }) - (boolOpt [] []) - - ,option "" ["split-sections"] - "compile library code such that unneeded definitions can be dropped from the final executable (GHC 7.8+)" - configSplitSections (\v flags -> flags { configSplitSections = v }) - (boolOpt [] []) - - ,option "" ["split-objs"] - "split library into smaller objects to reduce binary sizes (GHC 6.6+)" - configSplitObjs (\v flags -> flags { configSplitObjs = v }) - (boolOpt [] []) - - ,option "" ["executable-stripping"] - "strip executables upon installation to reduce binary sizes" - configStripExes (\v flags -> flags { configStripExes = v }) - (boolOpt [] []) - - ,option "" ["library-stripping"] - "strip libraries upon installation to reduce binary sizes" - configStripLibs (\v flags -> flags { configStripLibs = v }) - (boolOpt [] []) - - ,option "" ["configure-option"] - "Extra option for configure" - configConfigureArgs (\v flags -> flags { configConfigureArgs = v }) - (reqArg' "OPT" (\x -> [x]) id) - - ,option "" ["user-install"] - "doing a per-user installation" - configUserInstall (\v flags -> flags { configUserInstall = v }) - (boolOpt' ([],["user"]) ([], ["global"])) - - ,option "" ["package-db"] - ( "Append the given package database to the list of package" - ++ " databases used (to satisfy dependencies and register into)." - ++ " May be a specific file, 'global' or 'user'. The initial list" - ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," - ++ " depending on context. Use 'clear' to reset the list to empty." - ++ " See the user guide for details.") - configPackageDBs (\v flags -> flags { configPackageDBs = v }) - (reqArg' "DB" readPackageDbList showPackageDbList) - - ,option "f" ["flags"] - "Force values for the given flags in Cabal conditionals in the .cabal file. E.g., --flags=\"debug -usebytestrings\" forces the flag \"debug\" to true and \"usebytestrings\" to false." - configConfigurationsFlags (\v flags -> flags { configConfigurationsFlags = v }) - (reqArg "FLAGS" - (parsecToReadE (\err -> "Invalid flag assignment: " ++ err) parsecFlagAssignment) - showFlagAssignment) - - ,option "" ["extra-include-dirs"] - "A list of directories to search for header files" - configExtraIncludeDirs (\v flags -> flags {configExtraIncludeDirs = v}) - (reqArg' "PATH" (\x -> [x]) id) - - ,option "" ["deterministic"] - "Try to be as deterministic as possible (used by the test suite)" - configDeterministic (\v flags -> flags {configDeterministic = v}) - (boolOpt [] []) - - ,option "" ["ipid"] - "Installed package ID to compile this package as" - configIPID (\v flags -> flags {configIPID = v}) - (reqArgFlag "IPID") - - ,option "" ["cid"] - "Installed component ID to compile this component as" - (fmap display . configCID) (\v flags -> flags {configCID = fmap mkComponentId v}) - (reqArgFlag "CID") - - ,option "" ["extra-lib-dirs"] - "A list of directories to search for external libraries" - configExtraLibDirs (\v flags -> flags {configExtraLibDirs = v}) - (reqArg' "PATH" (\x -> [x]) id) - - ,option "" ["extra-framework-dirs"] - "A list of directories to search for external frameworks (OS X only)" - configExtraFrameworkDirs - (\v flags -> flags {configExtraFrameworkDirs = v}) - (reqArg' "PATH" (\x -> [x]) id) - - ,option "" ["extra-prog-path"] - "A list of directories to search for required programs (in addition to the normal search locations)" - configProgramPathExtra (\v flags -> flags {configProgramPathExtra = v}) - (reqArg' "PATH" (\x -> toNubList [x]) fromNubList) - - ,option "" ["constraint"] - "A list of additional constraints on the dependencies." - configConstraints (\v flags -> flags { configConstraints = v}) - (reqArg "DEPENDENCY" - (parsecToReadE (const "dependency expected") ((\x -> [x]) `fmap` parsec)) - (map display)) - - ,option "" ["dependency"] - "A list of exact dependencies. E.g., --dependency=\"void=void-0.5.8-177d5cdf20962d0581fe2e4932a6c309\"" - configDependencies (\v flags -> flags { configDependencies = v}) - (reqArg "NAME=CID" - (parsecToReadE (const "dependency expected") ((\x -> [x]) `fmap` parsecDependency)) - (map (\x -> display (fst x) ++ "=" ++ display (snd x)))) - - ,option "" ["instantiate-with"] - "A mapping of signature names to concrete module instantiations." - configInstantiateWith (\v flags -> flags { configInstantiateWith = v }) - (reqArg "NAME=MOD" - (parsecToReadE ("Cannot parse module substitution: " ++) (fmap (:[]) parsecModSubstEntry)) - (map (Disp.renderStyle defaultStyle . dispModSubstEntry))) - - ,option "" ["tests"] - "dependency checking and compilation for test suites listed in the package description file." - configTests (\v flags -> flags { configTests = v }) - (boolOpt [] []) - - ,option "" ["coverage"] - "build package with Haskell Program Coverage. (GHC only)" - configCoverage (\v flags -> flags { configCoverage = v }) - (boolOpt [] []) - - ,option "" ["library-coverage"] - "build package with Haskell Program Coverage. (GHC only) (DEPRECATED)" - configLibCoverage (\v flags -> flags { configLibCoverage = v }) - (boolOpt [] []) - - ,option "" ["exact-configuration"] - "All direct dependencies and flags are provided on the command line." - configExactConfiguration - (\v flags -> flags { configExactConfiguration = v }) - trueArg - - ,option "" ["benchmarks"] - "dependency checking and compilation for benchmarks listed in the package description file." - configBenchmarks (\v flags -> flags { configBenchmarks = v }) - (boolOpt [] []) - - ,option "" ["relocatable"] - "building a package that is relocatable. (GHC only)" - configRelocatable (\v flags -> flags { configRelocatable = v}) - (boolOpt [] []) - - ,option "" ["response-files"] - "enable workaround for old versions of programs like \"ar\" that do not support @file arguments" - configUseResponseFiles - (\v flags -> flags { configUseResponseFiles = v }) - (boolOpt' ([], ["disable-response-files"]) ([], [])) - ] - where - liftInstallDirs = - liftOption configInstallDirs (\v flags -> flags { configInstallDirs = v }) - - reqPathTemplateArgFlag title _sf _lf d get set = - reqArgFlag title _sf _lf d - (fmap fromPathTemplate . get) (set . fmap toPathTemplate) - -showFlagAssignment :: FlagAssignment -> [String] -showFlagAssignment = map showFlagValue' . unFlagAssignment - where - -- We can't use 'showFlagValue' because legacy custom-setups don't - -- support the '+' prefix in --flags; so we omit the (redundant) + prefix; - -- NB: we assume that we never have to set/enable '-'-prefixed flags here. - showFlagValue' :: (FlagName, Bool) -> String - showFlagValue' (f, True) = unFlagName f - showFlagValue' (f, False) = '-' : unFlagName f - -readPackageDbList :: String -> [Maybe PackageDB] -readPackageDbList "clear" = [Nothing] -readPackageDbList "global" = [Just GlobalPackageDB] -readPackageDbList "user" = [Just UserPackageDB] -readPackageDbList other = [Just (SpecificPackageDB other)] - -showPackageDbList :: [Maybe PackageDB] -> [String] -showPackageDbList = map showPackageDb - where - showPackageDb Nothing = "clear" - showPackageDb (Just GlobalPackageDB) = "global" - showPackageDb (Just UserPackageDB) = "user" - showPackageDb (Just (SpecificPackageDB db)) = db - -showProfDetailLevelFlag :: Flag ProfDetailLevel -> [String] -showProfDetailLevelFlag NoFlag = [] -showProfDetailLevelFlag (Flag dl) = [showProfDetailLevel dl] - -parsecDependency :: ParsecParser (PackageName, ComponentId) -parsecDependency = do - x <- parsec - _ <- P.char '=' - y <- parsec - return (x, y) - -installDirsOptions :: [OptionField (InstallDirs (Flag PathTemplate))] -installDirsOptions = - [ option "" ["prefix"] - "bake this prefix in preparation of installation" - prefix (\v flags -> flags { prefix = v }) - installDirArg - - , option "" ["bindir"] - "installation directory for executables" - bindir (\v flags -> flags { bindir = v }) - installDirArg - - , option "" ["libdir"] - "installation directory for libraries" - libdir (\v flags -> flags { libdir = v }) - installDirArg - - , option "" ["libsubdir"] - "subdirectory of libdir in which libs are installed" - libsubdir (\v flags -> flags { libsubdir = v }) - installDirArg - - , option "" ["dynlibdir"] - "installation directory for dynamic libraries" - dynlibdir (\v flags -> flags { dynlibdir = v }) - installDirArg - - , option "" ["libexecdir"] - "installation directory for program executables" - libexecdir (\v flags -> flags { libexecdir = v }) - installDirArg - - , option "" ["libexecsubdir"] - "subdirectory of libexecdir in which private executables are installed" - libexecsubdir (\v flags -> flags { libexecsubdir = v }) - installDirArg - - , option "" ["datadir"] - "installation directory for read-only data" - datadir (\v flags -> flags { datadir = v }) - installDirArg - - , option "" ["datasubdir"] - "subdirectory of datadir in which data files are installed" - datasubdir (\v flags -> flags { datasubdir = v }) - installDirArg - - , option "" ["docdir"] - "installation directory for documentation" - docdir (\v flags -> flags { docdir = v }) - installDirArg - - , option "" ["htmldir"] - "installation directory for HTML documentation" - htmldir (\v flags -> flags { htmldir = v }) - installDirArg - - , option "" ["haddockdir"] - "installation directory for haddock interfaces" - haddockdir (\v flags -> flags { haddockdir = v }) - installDirArg - - , option "" ["sysconfdir"] - "installation directory for configuration files" - sysconfdir (\v flags -> flags { sysconfdir = v }) - installDirArg - ] - where - installDirArg _sf _lf d get set = - reqArgFlag "DIR" _sf _lf d - (fmap fromPathTemplate . get) (set . fmap toPathTemplate) - -emptyConfigFlags :: ConfigFlags -emptyConfigFlags = mempty - -instance Monoid ConfigFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ConfigFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Copy flags --- ------------------------------------------------------------ - --- | Flags to @copy@: (destdir, copy-prefix (backwards compat), verbosity) -data CopyFlags = CopyFlags { - copyDest :: Flag CopyDest, - copyDistPref :: Flag FilePath, - copyVerbosity :: Flag Verbosity, - -- This is the same hack as in 'buildArgs'. But I (ezyang) don't - -- think it's a hack, it's the right way to make hooks more robust - -- TODO: Stop using this eventually when 'UserHooks' gets changed - copyArgs :: [String], - copyCabalFilePath :: Flag FilePath - } - deriving (Show, Generic) - -defaultCopyFlags :: CopyFlags -defaultCopyFlags = CopyFlags { - copyDest = Flag NoCopyDest, - copyDistPref = NoFlag, - copyVerbosity = Flag normal, - copyArgs = [], - copyCabalFilePath = mempty - } - -copyCommand :: CommandUI CopyFlags -copyCommand = CommandUI - { commandName = "copy" - , commandSynopsis = "Copy the files of all/specific components to install locations." - , commandDescription = Just $ \_ -> wrapText $ - "Components encompass executables and libraries. " - ++ "Does not call register, and allows a prefix at install time. " - ++ "Without the --destdir flag, configure determines location.\n" - , commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " copy " - ++ " All the components in the package\n" - ++ " " ++ pname ++ " copy foo " - ++ " A component (i.e. lib, exe, test suite)" - , commandUsage = usageAlternatives "copy" $ - [ "[FLAGS]" - , "COMPONENTS [FLAGS]" - ] - , commandDefaultFlags = defaultCopyFlags - , commandOptions = \showOrParseArgs -> case showOrParseArgs of - ShowArgs -> filter ((`notElem` ["target-package-db"]) - . optionName) $ copyOptions ShowArgs - ParseArgs -> copyOptions ParseArgs -} - -copyOptions :: ShowOrParseArgs -> [OptionField CopyFlags] -copyOptions showOrParseArgs = - [optionVerbosity copyVerbosity (\v flags -> flags { copyVerbosity = v }) - - ,optionDistPref - copyDistPref (\d flags -> flags { copyDistPref = d }) - showOrParseArgs - - ,option "" ["destdir"] - "directory to copy files to, prepended to installation directories" - copyDest (\v flags -> case copyDest flags of - Flag (CopyToDb _) -> error "Use either 'destdir' or 'target-package-db'." - _ -> flags { copyDest = v }) - (reqArg "DIR" (succeedReadE (Flag . CopyTo)) - (\f -> case f of Flag (CopyTo p) -> [p]; _ -> [])) - - ,option "" ["target-package-db"] - "package database to copy files into. Required when using ${pkgroot} prefix." - copyDest (\v flags -> case copyDest flags of - NoFlag -> flags { copyDest = v } - Flag NoCopyDest -> flags { copyDest = v } - _ -> error "Use either 'destdir' or 'target-package-db'.") - (reqArg "DATABASE" (succeedReadE (Flag . CopyToDb)) - (\f -> case f of Flag (CopyToDb p) -> [p]; _ -> [])) - ] - -emptyCopyFlags :: CopyFlags -emptyCopyFlags = mempty - -instance Monoid CopyFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup CopyFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Install flags --- ------------------------------------------------------------ - --- | Flags to @install@: (package db, verbosity) -data InstallFlags = InstallFlags { - installPackageDB :: Flag PackageDB, - installDest :: Flag CopyDest, - installDistPref :: Flag FilePath, - installUseWrapper :: Flag Bool, - installInPlace :: Flag Bool, - installVerbosity :: Flag Verbosity, - -- this is only here, because we can not - -- change the hooks API. - installCabalFilePath :: Flag FilePath - } - deriving (Show, Generic) - -defaultInstallFlags :: InstallFlags -defaultInstallFlags = InstallFlags { - installPackageDB = NoFlag, - installDest = Flag NoCopyDest, - installDistPref = NoFlag, - installUseWrapper = Flag False, - installInPlace = Flag False, - installVerbosity = Flag normal, - installCabalFilePath = mempty - } - -installCommand :: CommandUI InstallFlags -installCommand = CommandUI - { commandName = "install" - , commandSynopsis = - "Copy the files into the install locations. Run register." - , commandDescription = Just $ \_ -> wrapText $ - "Unlike the copy command, install calls the register command." - ++ "If you want to install into a location that is not what was" - ++ "specified in the configure step, use the copy command.\n" - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " install [FLAGS]\n" - , commandDefaultFlags = defaultInstallFlags - , commandOptions = \showOrParseArgs -> case showOrParseArgs of - ShowArgs -> filter ((`notElem` ["target-package-db"]) - . optionName) $ installOptions ShowArgs - ParseArgs -> installOptions ParseArgs - } - -installOptions :: ShowOrParseArgs -> [OptionField InstallFlags] -installOptions showOrParseArgs = - [optionVerbosity installVerbosity (\v flags -> flags { installVerbosity = v }) - ,optionDistPref - installDistPref (\d flags -> flags { installDistPref = d }) - showOrParseArgs - - ,option "" ["inplace"] - "install the package in the install subdirectory of the dist prefix, so it can be used without being installed" - installInPlace (\v flags -> flags { installInPlace = v }) - trueArg - - ,option "" ["shell-wrappers"] - "using shell script wrappers around executables" - installUseWrapper (\v flags -> flags { installUseWrapper = v }) - (boolOpt [] []) - - ,option "" ["package-db"] "" - installPackageDB (\v flags -> flags { installPackageDB = v }) - (choiceOpt [ (Flag UserPackageDB, ([],["user"]), - "upon configuration register this package in the user's local package database") - , (Flag GlobalPackageDB, ([],["global"]), - "(default) upon configuration register this package in the system-wide package database")]) - ,option "" ["target-package-db"] - "package database to install into. Required when using ${pkgroot} prefix." - installDest (\v flags -> flags { installDest = v }) - (reqArg "DATABASE" (succeedReadE (Flag . CopyToDb)) - (\f -> case f of Flag (CopyToDb p) -> [p]; _ -> [])) - ] - -emptyInstallFlags :: InstallFlags -emptyInstallFlags = mempty - -instance Monoid InstallFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup InstallFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * SDist flags --- ------------------------------------------------------------ - --- | Flags to @sdist@: (snapshot, verbosity) -data SDistFlags = SDistFlags { - sDistSnapshot :: Flag Bool, - sDistDirectory :: Flag FilePath, - sDistDistPref :: Flag FilePath, - sDistListSources :: Flag FilePath, - sDistVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -defaultSDistFlags :: SDistFlags -defaultSDistFlags = SDistFlags { - sDistSnapshot = Flag False, - sDistDirectory = mempty, - sDistDistPref = NoFlag, - sDistListSources = mempty, - sDistVerbosity = Flag normal - } - -sdistCommand :: CommandUI SDistFlags -sdistCommand = CommandUI - { commandName = "sdist" - , commandSynopsis = - "Generate a source distribution file (.tar.gz)." - , commandDescription = Nothing - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " sdist [FLAGS]\n" - , commandDefaultFlags = defaultSDistFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity sDistVerbosity (\v flags -> flags { sDistVerbosity = v }) - ,optionDistPref - sDistDistPref (\d flags -> flags { sDistDistPref = d }) - showOrParseArgs - - ,option "" ["list-sources"] - "Just write a list of the package's sources to a file" - sDistListSources (\v flags -> flags { sDistListSources = v }) - (reqArgFlag "FILE") - - ,option "" ["snapshot"] - "Produce a snapshot source distribution" - sDistSnapshot (\v flags -> flags { sDistSnapshot = v }) - trueArg - - ,option "" ["output-directory"] - ("Generate a source distribution in the given directory, " - ++ "without creating a tarball") - sDistDirectory (\v flags -> flags { sDistDirectory = v }) - (reqArgFlag "DIR") - ] - } - -emptySDistFlags :: SDistFlags -emptySDistFlags = mempty - -instance Monoid SDistFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup SDistFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Register flags --- ------------------------------------------------------------ - --- | Flags to @register@ and @unregister@: (user package, gen-script, --- in-place, verbosity) -data RegisterFlags = RegisterFlags { - regPackageDB :: Flag PackageDB, - regGenScript :: Flag Bool, - regGenPkgConf :: Flag (Maybe FilePath), - regInPlace :: Flag Bool, - regDistPref :: Flag FilePath, - regPrintId :: Flag Bool, - regVerbosity :: Flag Verbosity, - -- Same as in 'buildArgs' and 'copyArgs' - regArgs :: [String], - regCabalFilePath :: Flag FilePath - } - deriving (Show, Generic) - -defaultRegisterFlags :: RegisterFlags -defaultRegisterFlags = RegisterFlags { - regPackageDB = NoFlag, - regGenScript = Flag False, - regGenPkgConf = NoFlag, - regInPlace = Flag False, - regDistPref = NoFlag, - regPrintId = Flag False, - regArgs = [], - regCabalFilePath = mempty, - regVerbosity = Flag normal - } - -registerCommand :: CommandUI RegisterFlags -registerCommand = CommandUI - { commandName = "register" - , commandSynopsis = - "Register this package with the compiler." - , commandDescription = Nothing - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " register [FLAGS]\n" - , commandDefaultFlags = defaultRegisterFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v }) - ,optionDistPref - regDistPref (\d flags -> flags { regDistPref = d }) - showOrParseArgs - - ,option "" ["packageDB"] "" - regPackageDB (\v flags -> flags { regPackageDB = v }) - (choiceOpt [ (Flag UserPackageDB, ([],["user"]), - "upon registration, register this package in the user's local package database") - , (Flag GlobalPackageDB, ([],["global"]), - "(default)upon registration, register this package in the system-wide package database")]) - - ,option "" ["inplace"] - "register the package in the build location, so it can be used without being installed" - regInPlace (\v flags -> flags { regInPlace = v }) - trueArg - - ,option "" ["gen-script"] - "instead of registering, generate a script to register later" - regGenScript (\v flags -> flags { regGenScript = v }) - trueArg - - ,option "" ["gen-pkg-config"] - "instead of registering, generate a package registration file/directory" - regGenPkgConf (\v flags -> flags { regGenPkgConf = v }) - (optArg' "PKG" Flag flagToList) - - ,option "" ["print-ipid"] - "print the installed package ID calculated for this package" - regPrintId (\v flags -> flags { regPrintId = v }) - trueArg - ] - } - -unregisterCommand :: CommandUI RegisterFlags -unregisterCommand = CommandUI - { commandName = "unregister" - , commandSynopsis = - "Unregister this package with the compiler." - , commandDescription = Nothing - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " unregister [FLAGS]\n" - , commandDefaultFlags = defaultRegisterFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v }) - ,optionDistPref - regDistPref (\d flags -> flags { regDistPref = d }) - showOrParseArgs - - ,option "" ["user"] "" - regPackageDB (\v flags -> flags { regPackageDB = v }) - (choiceOpt [ (Flag UserPackageDB, ([],["user"]), - "unregister this package in the user's local package database") - , (Flag GlobalPackageDB, ([],["global"]), - "(default) unregister this package in the system-wide package database")]) - - ,option "" ["gen-script"] - "Instead of performing the unregister command, generate a script to unregister later" - regGenScript (\v flags -> flags { regGenScript = v }) - trueArg - ] - } - -emptyRegisterFlags :: RegisterFlags -emptyRegisterFlags = mempty - -instance Monoid RegisterFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup RegisterFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * HsColour flags --- ------------------------------------------------------------ - -data HscolourFlags = HscolourFlags { - hscolourCSS :: Flag FilePath, - hscolourExecutables :: Flag Bool, - hscolourTestSuites :: Flag Bool, - hscolourBenchmarks :: Flag Bool, - hscolourForeignLibs :: Flag Bool, - hscolourDistPref :: Flag FilePath, - hscolourVerbosity :: Flag Verbosity, - hscolourCabalFilePath :: Flag FilePath - } - deriving (Show, Generic) - -emptyHscolourFlags :: HscolourFlags -emptyHscolourFlags = mempty - -defaultHscolourFlags :: HscolourFlags -defaultHscolourFlags = HscolourFlags { - hscolourCSS = NoFlag, - hscolourExecutables = Flag False, - hscolourTestSuites = Flag False, - hscolourBenchmarks = Flag False, - hscolourDistPref = NoFlag, - hscolourForeignLibs = Flag False, - hscolourVerbosity = Flag normal, - hscolourCabalFilePath = mempty - } - -instance Monoid HscolourFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup HscolourFlags where - (<>) = gmappend - -hscolourCommand :: CommandUI HscolourFlags -hscolourCommand = CommandUI - { commandName = "hscolour" - , commandSynopsis = - "Generate HsColour colourised code, in HTML format." - , commandDescription = Just (\_ -> "Requires the hscolour program.\n") - , commandNotes = Just $ \_ -> - "Deprecated in favour of 'cabal haddock --hyperlink-source'." - , commandUsage = \pname -> - "Usage: " ++ pname ++ " hscolour [FLAGS]\n" - , commandDefaultFlags = defaultHscolourFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity hscolourVerbosity - (\v flags -> flags { hscolourVerbosity = v }) - ,optionDistPref - hscolourDistPref (\d flags -> flags { hscolourDistPref = d }) - showOrParseArgs - - ,option "" ["executables"] - "Run hscolour for Executables targets" - hscolourExecutables (\v flags -> flags { hscolourExecutables = v }) - trueArg - - ,option "" ["tests"] - "Run hscolour for Test Suite targets" - hscolourTestSuites (\v flags -> flags { hscolourTestSuites = v }) - trueArg - - ,option "" ["benchmarks"] - "Run hscolour for Benchmark targets" - hscolourBenchmarks (\v flags -> flags { hscolourBenchmarks = v }) - trueArg - - ,option "" ["foreign-libraries"] - "Run hscolour for Foreign Library targets" - hscolourForeignLibs (\v flags -> flags { hscolourForeignLibs = v }) - trueArg - - ,option "" ["all"] - "Run hscolour for all targets" - (\f -> allFlags [ hscolourExecutables f - , hscolourTestSuites f - , hscolourBenchmarks f - , hscolourForeignLibs f - ]) - (\v flags -> flags { hscolourExecutables = v - , hscolourTestSuites = v - , hscolourBenchmarks = v - , hscolourForeignLibs = v - }) - trueArg - - ,option "" ["css"] - "Use a cascading style sheet" - hscolourCSS (\v flags -> flags { hscolourCSS = v }) - (reqArgFlag "PATH") - ] - } - --- ------------------------------------------------------------ --- * Doctest flags --- ------------------------------------------------------------ - -data DoctestFlags = DoctestFlags { - doctestProgramPaths :: [(String, FilePath)], - doctestProgramArgs :: [(String, [String])], - doctestDistPref :: Flag FilePath, - doctestVerbosity :: Flag Verbosity - } - deriving (Show, Generic) - -defaultDoctestFlags :: DoctestFlags -defaultDoctestFlags = DoctestFlags { - doctestProgramPaths = mempty, - doctestProgramArgs = [], - doctestDistPref = NoFlag, - doctestVerbosity = Flag normal - } - -doctestCommand :: CommandUI DoctestFlags -doctestCommand = CommandUI - { commandName = "doctest" - , commandSynopsis = "Run doctest tests." - , commandDescription = Just $ \_ -> - "Requires the program doctest, version 0.12.\n" - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " doctest [FLAGS]\n" - , commandDefaultFlags = defaultDoctestFlags - , commandOptions = \showOrParseArgs -> - doctestOptions showOrParseArgs - ++ programDbPaths progDb ParseArgs - doctestProgramPaths (\v flags -> flags { doctestProgramPaths = v }) - ++ programDbOption progDb showOrParseArgs - doctestProgramArgs (\v fs -> fs { doctestProgramArgs = v }) - ++ programDbOptions progDb ParseArgs - doctestProgramArgs (\v flags -> flags { doctestProgramArgs = v }) - } - where - progDb = addKnownProgram doctestProgram - emptyProgramDb - -doctestOptions :: ShowOrParseArgs -> [OptionField DoctestFlags] -doctestOptions showOrParseArgs = - [optionVerbosity doctestVerbosity - (\v flags -> flags { doctestVerbosity = v }) - ,optionDistPref - doctestDistPref (\d flags -> flags { doctestDistPref = d }) - showOrParseArgs - ] - -emptyDoctestFlags :: DoctestFlags -emptyDoctestFlags = mempty - -instance Monoid DoctestFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup DoctestFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Haddock flags --- ------------------------------------------------------------ - - --- | When we build haddock documentation, there are two cases: --- --- 1. We build haddocks only for the current development version, --- intended for local use and not for distribution. In this case, --- we store the generated documentation in @/doc/html/@. --- --- 2. We build haddocks for intended for uploading them to hackage. --- In this case, we need to follow the layout that hackage expects --- from documentation tarballs, and we might also want to use different --- flags than for development builds, so in this case we store the generated --- documentation in @/doc/html/-docs@. -data HaddockTarget = ForHackage | ForDevelopment deriving (Eq, Show, Generic) - -instance Binary HaddockTarget - -instance Text HaddockTarget where - disp ForHackage = Disp.text "for-hackage" - disp ForDevelopment = Disp.text "for-development" - - parse = Parse.choice [ Parse.string "for-hackage" >> return ForHackage - , Parse.string "for-development" >> return ForDevelopment] - -data HaddockFlags = HaddockFlags { - haddockProgramPaths :: [(String, FilePath)], - haddockProgramArgs :: [(String, [String])], - haddockHoogle :: Flag Bool, - haddockHtml :: Flag Bool, - haddockHtmlLocation :: Flag String, - haddockForHackage :: Flag HaddockTarget, - haddockExecutables :: Flag Bool, - haddockTestSuites :: Flag Bool, - haddockBenchmarks :: Flag Bool, - haddockForeignLibs :: Flag Bool, - haddockInternal :: Flag Bool, - haddockCss :: Flag FilePath, - haddockLinkedSource :: Flag Bool, - haddockQuickJump :: Flag Bool, - haddockHscolourCss :: Flag FilePath, - haddockContents :: Flag PathTemplate, - haddockDistPref :: Flag FilePath, - haddockKeepTempFiles:: Flag Bool, - haddockVerbosity :: Flag Verbosity, - haddockCabalFilePath :: Flag FilePath, - haddockArgs :: [String] - } - deriving (Show, Generic) - -defaultHaddockFlags :: HaddockFlags -defaultHaddockFlags = HaddockFlags { - haddockProgramPaths = mempty, - haddockProgramArgs = [], - haddockHoogle = Flag False, - haddockHtml = Flag False, - haddockHtmlLocation = NoFlag, - haddockForHackage = NoFlag, - haddockExecutables = Flag False, - haddockTestSuites = Flag False, - haddockBenchmarks = Flag False, - haddockForeignLibs = Flag False, - haddockInternal = Flag False, - haddockCss = NoFlag, - haddockLinkedSource = Flag False, - haddockQuickJump = Flag False, - haddockHscolourCss = NoFlag, - haddockContents = NoFlag, - haddockDistPref = NoFlag, - haddockKeepTempFiles= Flag False, - haddockVerbosity = Flag normal, - haddockCabalFilePath = mempty, - haddockArgs = mempty - } - -haddockCommand :: CommandUI HaddockFlags -haddockCommand = CommandUI - { commandName = "haddock" - , commandSynopsis = "Generate Haddock HTML documentation." - , commandDescription = Just $ \_ -> - "Requires the program haddock, version 2.x.\n" - , commandNotes = Nothing - , commandUsage = usageAlternatives "haddock" $ - [ "[FLAGS]" - , "COMPONENTS [FLAGS]" - ] - , commandDefaultFlags = defaultHaddockFlags - , commandOptions = \showOrParseArgs -> - haddockOptions showOrParseArgs - ++ programDbPaths progDb ParseArgs - haddockProgramPaths (\v flags -> flags { haddockProgramPaths = v}) - ++ programDbOption progDb showOrParseArgs - haddockProgramArgs (\v fs -> fs { haddockProgramArgs = v }) - ++ programDbOptions progDb ParseArgs - haddockProgramArgs (\v flags -> flags { haddockProgramArgs = v}) - } - where - progDb = addKnownProgram haddockProgram - $ addKnownProgram ghcProgram - $ emptyProgramDb - -haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] -haddockOptions showOrParseArgs = - [optionVerbosity haddockVerbosity - (\v flags -> flags { haddockVerbosity = v }) - ,optionDistPref - haddockDistPref (\d flags -> flags { haddockDistPref = d }) - showOrParseArgs - - ,option "" ["keep-temp-files"] - "Keep temporary files" - haddockKeepTempFiles (\b flags -> flags { haddockKeepTempFiles = b }) - trueArg - - ,option "" ["hoogle"] - "Generate a hoogle database" - haddockHoogle (\v flags -> flags { haddockHoogle = v }) - trueArg - - ,option "" ["html"] - "Generate HTML documentation (the default)" - haddockHtml (\v flags -> flags { haddockHtml = v }) - trueArg - - ,option "" ["html-location"] - "Location of HTML documentation for pre-requisite packages" - haddockHtmlLocation (\v flags -> flags { haddockHtmlLocation = v }) - (reqArgFlag "URL") - - ,option "" ["for-hackage"] - "Collection of flags to generate documentation suitable for upload to hackage" - haddockForHackage (\v flags -> flags { haddockForHackage = v }) - (noArg (Flag ForHackage)) - - ,option "" ["executables"] - "Run haddock for Executables targets" - haddockExecutables (\v flags -> flags { haddockExecutables = v }) - trueArg - - ,option "" ["tests"] - "Run haddock for Test Suite targets" - haddockTestSuites (\v flags -> flags { haddockTestSuites = v }) - trueArg - - ,option "" ["benchmarks"] - "Run haddock for Benchmark targets" - haddockBenchmarks (\v flags -> flags { haddockBenchmarks = v }) - trueArg - - ,option "" ["foreign-libraries"] - "Run haddock for Foreign Library targets" - haddockForeignLibs (\v flags -> flags { haddockForeignLibs = v }) - trueArg - - ,option "" ["all"] - "Run haddock for all targets" - (\f -> allFlags [ haddockExecutables f - , haddockTestSuites f - , haddockBenchmarks f - , haddockForeignLibs f - ]) - (\v flags -> flags { haddockExecutables = v - , haddockTestSuites = v - , haddockBenchmarks = v - , haddockForeignLibs = v - }) - trueArg - - ,option "" ["internal"] - "Run haddock for internal modules and include all symbols" - haddockInternal (\v flags -> flags { haddockInternal = v }) - trueArg - - ,option "" ["css"] - "Use PATH as the haddock stylesheet" - haddockCss (\v flags -> flags { haddockCss = v }) - (reqArgFlag "PATH") - - ,option "" ["hyperlink-source","hyperlink-sources","hyperlinked-source"] - "Hyperlink the documentation to the source code" - haddockLinkedSource (\v flags -> flags { haddockLinkedSource = v }) - trueArg - - ,option "" ["quickjump"] - "Generate an index for interactive documentation navigation" - haddockQuickJump (\v flags -> flags { haddockQuickJump = v }) - trueArg - - ,option "" ["hscolour-css"] - "Use PATH as the HsColour stylesheet" - haddockHscolourCss (\v flags -> flags { haddockHscolourCss = v }) - (reqArgFlag "PATH") - - ,option "" ["contents-location"] - "Bake URL in as the location for the contents page" - haddockContents (\v flags -> flags { haddockContents = v }) - (reqArg' "URL" - (toFlag . toPathTemplate) - (flagToList . fmap fromPathTemplate)) - ] - -emptyHaddockFlags :: HaddockFlags -emptyHaddockFlags = mempty - -instance Monoid HaddockFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup HaddockFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Clean flags --- ------------------------------------------------------------ - -data CleanFlags = CleanFlags { - cleanSaveConf :: Flag Bool, - cleanDistPref :: Flag FilePath, - cleanVerbosity :: Flag Verbosity, - cleanCabalFilePath :: Flag FilePath - } - deriving (Show, Generic) - -defaultCleanFlags :: CleanFlags -defaultCleanFlags = CleanFlags { - cleanSaveConf = Flag False, - cleanDistPref = NoFlag, - cleanVerbosity = Flag normal, - cleanCabalFilePath = mempty - } - -cleanCommand :: CommandUI CleanFlags -cleanCommand = CommandUI - { commandName = "clean" - , commandSynopsis = "Clean up after a build." - , commandDescription = Just $ \_ -> - "Removes .hi, .o, preprocessed sources, etc.\n" - , commandNotes = Nothing - , commandUsage = \pname -> - "Usage: " ++ pname ++ " clean [FLAGS]\n" - , commandDefaultFlags = defaultCleanFlags - , commandOptions = \showOrParseArgs -> - [optionVerbosity cleanVerbosity (\v flags -> flags { cleanVerbosity = v }) - ,optionDistPref - cleanDistPref (\d flags -> flags { cleanDistPref = d }) - showOrParseArgs - - ,option "s" ["save-configure"] - "Do not remove the configuration file (dist/setup-config) during cleaning. Saves need to reconfigure." - cleanSaveConf (\v flags -> flags { cleanSaveConf = v }) - trueArg - ] - } - -emptyCleanFlags :: CleanFlags -emptyCleanFlags = mempty - -instance Monoid CleanFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup CleanFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Build flags --- ------------------------------------------------------------ - -data BuildFlags = BuildFlags { - buildProgramPaths :: [(String, FilePath)], - buildProgramArgs :: [(String, [String])], - buildDistPref :: Flag FilePath, - buildVerbosity :: Flag Verbosity, - buildNumJobs :: Flag (Maybe Int), - -- TODO: this one should not be here, it's just that the silly - -- UserHooks stop us from passing extra info in other ways - buildArgs :: [String], - buildCabalFilePath :: Flag FilePath - } - deriving (Read, Show, Generic) - -{-# DEPRECATED buildVerbose "Use buildVerbosity instead" #-} -buildVerbose :: BuildFlags -> Verbosity -buildVerbose = fromFlagOrDefault normal . buildVerbosity - -defaultBuildFlags :: BuildFlags -defaultBuildFlags = BuildFlags { - buildProgramPaths = mempty, - buildProgramArgs = [], - buildDistPref = mempty, - buildVerbosity = Flag normal, - buildNumJobs = mempty, - buildArgs = [], - buildCabalFilePath = mempty - } - -buildCommand :: ProgramDb -> CommandUI BuildFlags -buildCommand progDb = CommandUI - { commandName = "build" - , commandSynopsis = "Compile all/specific components." - , commandDescription = Just $ \_ -> wrapText $ - "Components encompass executables, tests, and benchmarks.\n" - ++ "\n" - ++ "Affected by configuration options, see `configure`.\n" - , commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " build " - ++ " All the components in the package\n" - ++ " " ++ pname ++ " build foo " - ++ " A component (i.e. lib, exe, test suite)\n\n" - ++ programFlagsDescription progDb ---TODO: re-enable once we have support for module/file targets --- ++ " " ++ pname ++ " build Foo.Bar " --- ++ " A module\n" --- ++ " " ++ pname ++ " build Foo/Bar.hs" --- ++ " A file\n\n" --- ++ "If a target is ambiguous it can be qualified with the component " --- ++ "name, e.g.\n" --- ++ " " ++ pname ++ " build foo:Foo.Bar\n" --- ++ " " ++ pname ++ " build testsuite1:Foo/Bar.hs\n" - , commandUsage = usageAlternatives "build" $ - [ "[FLAGS]" - , "COMPONENTS [FLAGS]" - ] - , commandDefaultFlags = defaultBuildFlags - , commandOptions = \showOrParseArgs -> - [ optionVerbosity - buildVerbosity (\v flags -> flags { buildVerbosity = v }) - - , optionDistPref - buildDistPref (\d flags -> flags { buildDistPref = d }) showOrParseArgs - ] - ++ buildOptions progDb showOrParseArgs - } - -buildOptions :: ProgramDb -> ShowOrParseArgs - -> [OptionField BuildFlags] -buildOptions progDb showOrParseArgs = - [ optionNumJobs - buildNumJobs (\v flags -> flags { buildNumJobs = v }) - ] - - ++ programDbPaths progDb showOrParseArgs - buildProgramPaths (\v flags -> flags { buildProgramPaths = v}) - - ++ programDbOption progDb showOrParseArgs - buildProgramArgs (\v fs -> fs { buildProgramArgs = v }) - - ++ programDbOptions progDb showOrParseArgs - buildProgramArgs (\v flags -> flags { buildProgramArgs = v}) - -emptyBuildFlags :: BuildFlags -emptyBuildFlags = mempty - -instance Monoid BuildFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup BuildFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * REPL Flags --- ------------------------------------------------------------ - -data ReplFlags = ReplFlags { - replProgramPaths :: [(String, FilePath)], - replProgramArgs :: [(String, [String])], - replDistPref :: Flag FilePath, - replVerbosity :: Flag Verbosity, - replReload :: Flag Bool, - replReplOptions :: [String] - } - deriving (Show, Generic) - -defaultReplFlags :: ReplFlags -defaultReplFlags = ReplFlags { - replProgramPaths = mempty, - replProgramArgs = [], - replDistPref = NoFlag, - replVerbosity = Flag normal, - replReload = Flag False, - replReplOptions = [] - } - -instance Monoid ReplFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ReplFlags where - (<>) = gmappend - -replCommand :: ProgramDb -> CommandUI ReplFlags -replCommand progDb = CommandUI - { commandName = "repl" - , commandSynopsis = - "Open an interpreter session for the given component." - , commandDescription = Just $ \pname -> wrapText $ - "If the current directory contains no package, ignores COMPONENT " - ++ "parameters and opens an interactive interpreter session; if a " - ++ "sandbox is present, its package database will be used.\n" - ++ "\n" - ++ "Otherwise, (re)configures with the given or default flags, and " - ++ "loads the interpreter with the relevant modules. For executables, " - ++ "tests and benchmarks, loads the main module (and its " - ++ "dependencies); for libraries all exposed/other modules.\n" - ++ "\n" - ++ "The default component is the library itself, or the executable " - ++ "if that is the only component.\n" - ++ "\n" - ++ "Support for loading specific modules is planned but not " - ++ "implemented yet. For certain scenarios, `" ++ pname - ++ " exec -- ghci :l Foo` may be used instead. Note that `exec` will " - ++ "not (re)configure and you will have to specify the location of " - ++ "other modules, if required.\n" - - , commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " repl " - ++ " The first component in the package\n" - ++ " " ++ pname ++ " repl foo " - ++ " A named component (i.e. lib, exe, test suite)\n" - ++ " " ++ pname ++ " repl --repl-options=\"-lstdc++\"" - ++ " Specifying flags for interpreter\n" ---TODO: re-enable once we have support for module/file targets --- ++ " " ++ pname ++ " repl Foo.Bar " --- ++ " A module\n" --- ++ " " ++ pname ++ " repl Foo/Bar.hs" --- ++ " A file\n\n" --- ++ "If a target is ambiguous it can be qualified with the component " --- ++ "name, e.g.\n" --- ++ " " ++ pname ++ " repl foo:Foo.Bar\n" --- ++ " " ++ pname ++ " repl testsuite1:Foo/Bar.hs\n" - , commandUsage = \pname -> "Usage: " ++ pname ++ " repl [COMPONENT] [FLAGS]\n" - , commandDefaultFlags = defaultReplFlags - , commandOptions = \showOrParseArgs -> - optionVerbosity replVerbosity (\v flags -> flags { replVerbosity = v }) - : optionDistPref - replDistPref (\d flags -> flags { replDistPref = d }) - showOrParseArgs - - : programDbPaths progDb showOrParseArgs - replProgramPaths (\v flags -> flags { replProgramPaths = v}) - - ++ programDbOption progDb showOrParseArgs - replProgramArgs (\v flags -> flags { replProgramArgs = v}) - - ++ programDbOptions progDb showOrParseArgs - replProgramArgs (\v flags -> flags { replProgramArgs = v}) - - ++ case showOrParseArgs of - ParseArgs -> - [ option "" ["reload"] - "Used from within an interpreter to update files." - replReload (\v flags -> flags { replReload = v }) - trueArg - ] - _ -> [] - ++ map liftReplOption (replOptions showOrParseArgs) - } - where - liftReplOption = liftOption replReplOptions (\v flags -> flags { replReplOptions = v }) - -replOptions :: ShowOrParseArgs -> [OptionField [String]] -replOptions _ = [ option [] ["repl-options"] "use this option for the repl" id - const (reqArg "FLAG" (succeedReadE (:[])) id) ] - --- ------------------------------------------------------------ --- * Test flags --- ------------------------------------------------------------ - -data TestShowDetails = Never | Failures | Always | Streaming | Direct - deriving (Eq, Ord, Enum, Bounded, Show) - -knownTestShowDetails :: [TestShowDetails] -knownTestShowDetails = [minBound..maxBound] - -instance Pretty TestShowDetails where - pretty = Disp.text . lowercase . show - -instance Parsec TestShowDetails where - parsec = maybe (fail "invalid TestShowDetails") return . classify =<< ident - where - ident = P.munch1 (\c -> isAlpha c || c == '_' || c == '-') - classify str = lookup (lowercase str) enumMap - enumMap :: [(String, TestShowDetails)] - enumMap = [ (display x, x) - | x <- knownTestShowDetails ] - -instance Text TestShowDetails where - parse = maybe Parse.pfail return . classify =<< ident - where - ident = Parse.munch1 (\c -> isAlpha c || c == '_' || c == '-') - classify str = lookup (lowercase str) enumMap - enumMap :: [(String, TestShowDetails)] - enumMap = [ (display x, x) - | x <- knownTestShowDetails ] - ---TODO: do we need this instance? -instance Monoid TestShowDetails where - mempty = Never - mappend = (<>) - -instance Semigroup TestShowDetails where - a <> b = if a < b then b else a - -data TestFlags = TestFlags { - testDistPref :: Flag FilePath, - testVerbosity :: Flag Verbosity, - testHumanLog :: Flag PathTemplate, - testMachineLog :: Flag PathTemplate, - testShowDetails :: Flag TestShowDetails, - testKeepTix :: Flag Bool, - -- TODO: think about if/how options are passed to test exes - testOptions :: [PathTemplate] - } deriving (Generic) - -defaultTestFlags :: TestFlags -defaultTestFlags = TestFlags { - testDistPref = NoFlag, - testVerbosity = Flag normal, - testHumanLog = toFlag $ toPathTemplate $ "$pkgid-$test-suite.log", - testMachineLog = toFlag $ toPathTemplate $ "$pkgid.log", - testShowDetails = toFlag Failures, - testKeepTix = toFlag False, - testOptions = [] - } - -testCommand :: CommandUI TestFlags -testCommand = CommandUI - { commandName = "test" - , commandSynopsis = - "Run all/specific tests in the test suite." - , commandDescription = Just $ \pname -> wrapText $ - "If necessary (re)configures with `--enable-tests` flag and builds" - ++ " the test suite.\n" - ++ "\n" - ++ "Remember that the tests' dependencies must be installed if there" - ++ " are additional ones; e.g. with `" ++ pname - ++ " install --only-dependencies --enable-tests`.\n" - ++ "\n" - ++ "By defining UserHooks in a custom Setup.hs, the package can" - ++ " define actions to be executed before and after running tests.\n" - , commandNotes = Nothing - , commandUsage = usageAlternatives "test" - [ "[FLAGS]" - , "TESTCOMPONENTS [FLAGS]" - ] - , commandDefaultFlags = defaultTestFlags - , commandOptions = \showOrParseArgs -> - [ optionVerbosity testVerbosity (\v flags -> flags { testVerbosity = v }) - , optionDistPref - testDistPref (\d flags -> flags { testDistPref = d }) - showOrParseArgs - , option [] ["log"] - ("Log all test suite results to file (name template can use " - ++ "$pkgid, $compiler, $os, $arch, $test-suite, $result)") - testHumanLog (\v flags -> flags { testHumanLog = v }) - (reqArg' "TEMPLATE" - (toFlag . toPathTemplate) - (flagToList . fmap fromPathTemplate)) - , option [] ["machine-log"] - ("Produce a machine-readable log file (name template can use " - ++ "$pkgid, $compiler, $os, $arch, $result)") - testMachineLog (\v flags -> flags { testMachineLog = v }) - (reqArg' "TEMPLATE" - (toFlag . toPathTemplate) - (flagToList . fmap fromPathTemplate)) - , option [] ["show-details"] - ("'always': always show results of individual test cases. " - ++ "'never': never show results of individual test cases. " - ++ "'failures': show results of failing test cases. " - ++ "'streaming': show results of test cases in real time." - ++ "'direct': send results of test cases in real time; no log file.") - testShowDetails (\v flags -> flags { testShowDetails = v }) - (reqArg "FILTER" - (parsecToReadE (\_ -> "--show-details flag expects one of " - ++ intercalate ", " - (map display knownTestShowDetails)) - (fmap toFlag parsec)) - (flagToList . fmap display)) - , option [] ["keep-tix-files"] - "keep .tix files for HPC between test runs" - testKeepTix (\v flags -> flags { testKeepTix = v}) - trueArg - , option [] ["test-options"] - ("give extra options to test executables " - ++ "(name templates can use $pkgid, $compiler, " - ++ "$os, $arch, $test-suite)") - testOptions (\v flags -> flags { testOptions = v }) - (reqArg' "TEMPLATES" (map toPathTemplate . splitArgs) - (const [])) - , option [] ["test-option"] - ("give extra option to test executables " - ++ "(no need to quote options containing spaces, " - ++ "name template can use $pkgid, $compiler, " - ++ "$os, $arch, $test-suite)") - testOptions (\v flags -> flags { testOptions = v }) - (reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) - (map fromPathTemplate)) - ] - } - -emptyTestFlags :: TestFlags -emptyTestFlags = mempty - -instance Monoid TestFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup TestFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Benchmark flags --- ------------------------------------------------------------ - -data BenchmarkFlags = BenchmarkFlags { - benchmarkDistPref :: Flag FilePath, - benchmarkVerbosity :: Flag Verbosity, - benchmarkOptions :: [PathTemplate] - } deriving (Generic) - -defaultBenchmarkFlags :: BenchmarkFlags -defaultBenchmarkFlags = BenchmarkFlags { - benchmarkDistPref = NoFlag, - benchmarkVerbosity = Flag normal, - benchmarkOptions = [] - } - -benchmarkCommand :: CommandUI BenchmarkFlags -benchmarkCommand = CommandUI - { commandName = "bench" - , commandSynopsis = - "Run all/specific benchmarks." - , commandDescription = Just $ \pname -> wrapText $ - "If necessary (re)configures with `--enable-benchmarks` flag and" - ++ " builds the benchmarks.\n" - ++ "\n" - ++ "Remember that the benchmarks' dependencies must be installed if" - ++ " there are additional ones; e.g. with `" ++ pname - ++ " install --only-dependencies --enable-benchmarks`.\n" - ++ "\n" - ++ "By defining UserHooks in a custom Setup.hs, the package can" - ++ " define actions to be executed before and after running" - ++ " benchmarks.\n" - , commandNotes = Nothing - , commandUsage = usageAlternatives "bench" - [ "[FLAGS]" - , "BENCHCOMPONENTS [FLAGS]" - ] - , commandDefaultFlags = defaultBenchmarkFlags - , commandOptions = \showOrParseArgs -> - [ optionVerbosity benchmarkVerbosity - (\v flags -> flags { benchmarkVerbosity = v }) - , optionDistPref - benchmarkDistPref (\d flags -> flags { benchmarkDistPref = d }) - showOrParseArgs - , option [] ["benchmark-options"] - ("give extra options to benchmark executables " - ++ "(name templates can use $pkgid, $compiler, " - ++ "$os, $arch, $benchmark)") - benchmarkOptions (\v flags -> flags { benchmarkOptions = v }) - (reqArg' "TEMPLATES" (map toPathTemplate . splitArgs) - (const [])) - , option [] ["benchmark-option"] - ("give extra option to benchmark executables " - ++ "(no need to quote options containing spaces, " - ++ "name template can use $pkgid, $compiler, " - ++ "$os, $arch, $benchmark)") - benchmarkOptions (\v flags -> flags { benchmarkOptions = v }) - (reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) - (map fromPathTemplate)) - ] - } - -emptyBenchmarkFlags :: BenchmarkFlags -emptyBenchmarkFlags = mempty - -instance Monoid BenchmarkFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup BenchmarkFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Shared options utils --- ------------------------------------------------------------ - -programFlagsDescription :: ProgramDb -> String -programFlagsDescription progDb = - "The flags --with-PROG and --PROG-option(s) can be used with" - ++ " the following programs:" - ++ (concatMap (\line -> "\n " ++ unwords line) . wrapLine 77 . sort) - [ programName prog | (prog, _) <- knownPrograms progDb ] - ++ "\n" - --- | For each known program @PROG@ in 'progDb', produce a @with-PROG@ --- 'OptionField'. -programDbPaths - :: ProgramDb - -> ShowOrParseArgs - -> (flags -> [(String, FilePath)]) - -> ([(String, FilePath)] -> (flags -> flags)) - -> [OptionField flags] -programDbPaths progDb showOrParseArgs get set = - programDbPaths' ("with-" ++) progDb showOrParseArgs get set - -{-# DEPRECATED programConfigurationPaths' "Use programDbPaths' instead" #-} - --- | Like 'programDbPaths', but allows to customise the option name. -programDbPaths', programConfigurationPaths' - :: (String -> String) - -> ProgramDb - -> ShowOrParseArgs - -> (flags -> [(String, FilePath)]) - -> ([(String, FilePath)] -> (flags -> flags)) - -> [OptionField flags] - -programConfigurationPaths' = programDbPaths' - -programDbPaths' mkName progDb showOrParseArgs get set = - case showOrParseArgs of - -- we don't want a verbose help text list so we just show a generic one: - ShowArgs -> [withProgramPath "PROG"] - ParseArgs -> map (withProgramPath . programName . fst) - (knownPrograms progDb) - where - withProgramPath prog = - option "" [mkName prog] - ("give the path to " ++ prog) - get set - (reqArg' "PATH" (\path -> [(prog, path)]) - (\progPaths -> [ path | (prog', path) <- progPaths, prog==prog' ])) - --- | For each known program @PROG@ in 'progDb', produce a @PROG-option@ --- 'OptionField'. -programDbOption - :: ProgramDb - -> ShowOrParseArgs - -> (flags -> [(String, [String])]) - -> ([(String, [String])] -> (flags -> flags)) - -> [OptionField flags] -programDbOption progDb showOrParseArgs get set = - case showOrParseArgs of - -- we don't want a verbose help text list so we just show a generic one: - ShowArgs -> [programOption "PROG"] - ParseArgs -> map (programOption . programName . fst) - (knownPrograms progDb) - where - programOption prog = - option "" [prog ++ "-option"] - ("give an extra option to " ++ prog ++ - " (no need to quote options containing spaces)") - get set - (reqArg' "OPT" (\arg -> [(prog, [arg])]) - (\progArgs -> concat [ args - | (prog', args) <- progArgs, prog==prog' ])) - -{-# DEPRECATED programConfigurationOptions "Use programDbOptions instead" #-} - --- | For each known program @PROG@ in 'progDb', produce a @PROG-options@ --- 'OptionField'. -programDbOptions, programConfigurationOptions - :: ProgramDb - -> ShowOrParseArgs - -> (flags -> [(String, [String])]) - -> ([(String, [String])] -> (flags -> flags)) - -> [OptionField flags] - -programConfigurationOptions = programDbOptions - -programDbOptions progDb showOrParseArgs get set = - case showOrParseArgs of - -- we don't want a verbose help text list so we just show a generic one: - ShowArgs -> [programOptions "PROG"] - ParseArgs -> map (programOptions . programName . fst) - (knownPrograms progDb) - where - programOptions prog = - option "" [prog ++ "-options"] - ("give extra options to " ++ prog) - get set - (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) (const [])) - --- ------------------------------------------------------------ --- * GetOpt Utils --- ------------------------------------------------------------ - -boolOpt :: SFlags -> SFlags - -> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a -boolOpt = Command.boolOpt flagToMaybe Flag - -boolOpt' :: OptFlags -> OptFlags - -> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a -boolOpt' = Command.boolOpt' flagToMaybe Flag - -trueArg, falseArg :: MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a -trueArg sfT lfT = boolOpt' (sfT, lfT) ([], []) sfT lfT -falseArg sfF lfF = boolOpt' ([], []) (sfF, lfF) sfF lfF - -reqArgFlag :: ArgPlaceHolder -> SFlags -> LFlags -> Description -> - (b -> Flag String) -> (Flag String -> b -> b) -> OptDescr b -reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList - -optionDistPref :: (flags -> Flag FilePath) - -> (Flag FilePath -> flags -> flags) - -> ShowOrParseArgs - -> OptionField flags -optionDistPref get set = \showOrParseArgs -> - option "" (distPrefFlagName showOrParseArgs) - ( "The directory where Cabal puts generated build files " - ++ "(default " ++ defaultDistPref ++ ")") - get set - (reqArgFlag "DIR") - where - distPrefFlagName ShowArgs = ["builddir"] - distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"] - -optionVerbosity :: (flags -> Flag Verbosity) - -> (Flag Verbosity -> flags -> flags) - -> OptionField flags -optionVerbosity get set = - option "v" ["verbose"] - "Control verbosity (n is 0--3, default verbosity level is 1)" - get set - (optArg "n" (fmap Flag flagToVerbosity) - (Flag verbose) -- default Value if no n is given - (fmap (Just . showForCabal) . flagToList)) - -optionNumJobs :: (flags -> Flag (Maybe Int)) - -> (Flag (Maybe Int) -> flags -> flags) - -> OptionField flags -optionNumJobs get set = - option "j" ["jobs"] - "Run NUM jobs simultaneously (or '$ncpus' if no NUM is given)." - get set - (optArg "NUM" (fmap Flag numJobsParser) - (Flag Nothing) - (map (Just . maybe "$ncpus" show) . flagToList)) - where - numJobsParser :: ReadE (Maybe Int) - numJobsParser = ReadE $ \s -> - case s of - "$ncpus" -> Right Nothing - _ -> case reads s of - [(n, "")] - | n < 1 -> Left "The number of jobs should be 1 or more." - | otherwise -> Right (Just n) - _ -> Left "The jobs value should be a number or '$ncpus'" - --- ------------------------------------------------------------ --- * Other Utils --- ------------------------------------------------------------ - --- | Arguments to pass to a @configure@ script, e.g. generated by --- @autoconf@. -configureArgs :: Bool -> ConfigFlags -> [String] -configureArgs bcHack flags - = hc_flag - ++ optFlag "with-hc-pkg" configHcPkg - ++ optFlag' "prefix" prefix - ++ optFlag' "bindir" bindir - ++ optFlag' "libdir" libdir - ++ optFlag' "libexecdir" libexecdir - ++ optFlag' "datadir" datadir - ++ optFlag' "sysconfdir" sysconfdir - ++ configConfigureArgs flags - where - hc_flag = case (configHcFlavor flags, configHcPath flags) of - (_, Flag hc_path) -> [hc_flag_name ++ hc_path] - (Flag hc, NoFlag) -> [hc_flag_name ++ display hc] - (NoFlag,NoFlag) -> [] - hc_flag_name - --TODO kill off thic bc hack when defaultUserHooks is removed. - | bcHack = "--with-hc=" - | otherwise = "--with-compiler=" - optFlag name config_field = case config_field flags of - Flag p -> ["--" ++ name ++ "=" ++ p] - NoFlag -> [] - optFlag' name config_field = optFlag name (fmap fromPathTemplate - . config_field - . configInstallDirs) - -configureCCompiler :: Verbosity -> ProgramDb - -> IO (FilePath, [String]) -configureCCompiler verbosity progdb = configureProg verbosity progdb gccProgram - -configureLinker :: Verbosity -> ProgramDb -> IO (FilePath, [String]) -configureLinker verbosity progdb = configureProg verbosity progdb ldProgram - -configureProg :: Verbosity -> ProgramDb -> Program - -> IO (FilePath, [String]) -configureProg verbosity programDb prog = do - (p, _) <- requireProgram verbosity prog programDb - let pInv = programInvocation p [] - return (progInvokePath pInv, progInvokeArgs pInv) - --- | Helper function to split a string into a list of arguments. --- It's supposed to handle quoted things sensibly, eg: --- --- > splitArgs "--foo=\"C:/Program Files/Bar/" --baz" --- > = ["--foo=C:/Program Files/Bar", "--baz"] --- --- > splitArgs "\"-DMSGSTR=\\\"foo bar\\\"\" --baz" --- > = ["-DMSGSTR=\"foo bar\"","--baz"] --- -splitArgs :: String -> [String] -splitArgs = space [] - where - space :: String -> String -> [String] - space w [] = word w [] - space w ( c :s) - | isSpace c = word w (space [] s) - space w ('"':s) = string w s - space w s = nonstring w s - - string :: String -> String -> [String] - string w [] = word w [] - string w ('"':s) = space w s - string w ('\\':'"':s) = string ('"':w) s - string w ( c :s) = string (c:w) s - - nonstring :: String -> String -> [String] - nonstring w [] = word w [] - nonstring w ('"':s) = string w s - nonstring w ( c :s) = space (c:w) s - - word [] s = s - word w s = reverse w : s - --- The test cases kinda have to be rewritten from the ground up... :/ ---hunitTests :: [Test] ---hunitTests = --- let m = [("ghc", GHC), ("nhc98", NHC), ("hugs", Hugs)] --- (flags, commands', unkFlags, ers) --- = getOpt Permute options ["configure", "foobar", "--prefix=/foo", "--ghc", "--nhc98", "--hugs", "--with-compiler=/comp", "--unknown1", "--unknown2", "--install-prefix=/foo", "--user", "--global"] --- in [TestLabel "very basic option parsing" $ TestList [ --- "getOpt flags" ~: "failed" ~: --- [Prefix "/foo", GhcFlag, NhcFlag, HugsFlag, --- WithCompiler "/comp", InstPrefix "/foo", UserFlag, GlobalFlag] --- ~=? flags, --- "getOpt commands" ~: "failed" ~: ["configure", "foobar"] ~=? commands', --- "getOpt unknown opts" ~: "failed" ~: --- ["--unknown1", "--unknown2"] ~=? unkFlags, --- "getOpt errors" ~: "failed" ~: [] ~=? ers], --- --- TestLabel "test location of various compilers" $ TestList --- ["configure parsing for prefix and compiler flag" ~: "failed" ~: --- (Right (ConfigCmd (Just comp, Nothing, Just "/usr/local"), [])) --- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, "configure"]) --- | (name, comp) <- m], --- --- TestLabel "find the package tool" $ TestList --- ["configure parsing for prefix comp flag, withcompiler" ~: "failed" ~: --- (Right (ConfigCmd (Just comp, Just "/foo/comp", Just "/usr/local"), [])) --- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, --- "--with-compiler=/foo/comp", "configure"]) --- | (name, comp) <- m], --- --- TestLabel "simpler commands" $ TestList --- [flag ~: "failed" ~: (Right (flagCmd, [])) ~=? (parseArgs [flag]) --- | (flag, flagCmd) <- [("build", BuildCmd), --- ("install", InstallCmd Nothing False), --- ("sdist", SDistCmd), --- ("register", RegisterCmd False)] --- ] --- ] - -{- Testing ideas: - * IO to look for hugs and hugs-pkg (which hugs, etc) - * quickCheck to test permutations of arguments - * what other options can we over-ride with a command-line flag? --} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/SrcDist.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/SrcDist.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/SrcDist.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/SrcDist.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,510 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.SrcDist --- Copyright : Simon Marlow 2004 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This handles the @sdist@ command. The module exports an 'sdist' action but --- also some of the phases that make it up so that other tools can use just the --- bits they need. In particular the preparation of the tree of files to go --- into the source tarball is separated from actually building the source --- tarball. --- --- The 'createArchive' action uses the external @tar@ program and assumes that --- it accepts the @-z@ flag. Neither of these assumptions are valid on Windows. --- The 'sdist' action now also does some distribution QA checks. - --- NOTE: FIX: we don't have a great way of testing this module, since --- we can't easily look inside a tarball once its created. - -module Distribution.Simple.SrcDist ( - -- * The top level action - sdist, - - -- ** Parts of 'sdist' - printPackageProblems, - prepareTree, - createArchive, - - -- ** Snapshots - prepareSnapshotTree, - snapshotPackage, - snapshotVersion, - dateToSnapshotNumber, - - -- * Extracting the source files - listPackageSources - - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.PackageDescription hiding (Flag) -import Distribution.PackageDescription.Check hiding (doesFileExist) -import Distribution.Package -import Distribution.ModuleName -import qualified Distribution.ModuleName as ModuleName -import Distribution.Version -import Distribution.Simple.Glob -import Distribution.Simple.Utils -import Distribution.Simple.Setup -import Distribution.Simple.PreProcess -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.BuildPaths -import Distribution.Simple.Program -import Distribution.Text -import Distribution.Types.ForeignLib -import Distribution.Verbosity - -import Data.List (partition) -import qualified Data.Map as Map -import Data.Time (UTCTime, getCurrentTime, toGregorian, utctDay) -import System.Directory ( doesFileExist ) -import System.IO (IOMode(WriteMode), hPutStrLn, withFile) -import System.FilePath ((), (<.>), dropExtension, isRelative) -import Control.Monad - --- |Create a source distribution. -sdist :: PackageDescription -- ^information from the tarball - -> Maybe LocalBuildInfo -- ^Information from configure - -> SDistFlags -- ^verbosity & snapshot - -> (FilePath -> FilePath) -- ^build prefix (temp dir) - -> [PPSuffixHandler] -- ^ extra preprocessors (includes suffixes) - -> IO () -sdist pkg mb_lbi flags mkTmpDir pps = - - -- When given --list-sources, just output the list of sources to a file. - case (sDistListSources flags) of - Flag path -> withFile path WriteMode $ \outHandle -> do - (ordinary, maybeExecutable) <- listPackageSources verbosity pkg pps - traverse_ (hPutStrLn outHandle) ordinary - traverse_ (hPutStrLn outHandle) maybeExecutable - notice verbosity $ "List of package sources written to file '" - ++ path ++ "'" - NoFlag -> do - -- do some QA - printPackageProblems verbosity pkg - - when (isNothing mb_lbi) $ - warn verbosity "Cannot run preprocessors. Run 'configure' command first." - - date <- getCurrentTime - let pkg' | snapshot = snapshotPackage date pkg - | otherwise = pkg - - case flagToMaybe (sDistDirectory flags) of - Just targetDir -> do - generateSourceDir targetDir pkg' - info verbosity $ "Source directory created: " ++ targetDir - - Nothing -> do - createDirectoryIfMissingVerbose verbosity True tmpTargetDir - withTempDirectory verbosity tmpTargetDir "sdist." $ \tmpDir -> do - let targetDir = tmpDir tarBallName pkg' - generateSourceDir targetDir pkg' - targzFile <- createArchive verbosity pkg' mb_lbi tmpDir targetPref - notice verbosity $ "Source tarball created: " ++ targzFile - - where - generateSourceDir targetDir pkg' = do - - setupMessage verbosity "Building source dist for" (packageId pkg') - prepareTree verbosity pkg' mb_lbi targetDir pps - when snapshot $ - overwriteSnapshotPackageDesc verbosity pkg' targetDir - - verbosity = fromFlag (sDistVerbosity flags) - snapshot = fromFlag (sDistSnapshot flags) - - distPref = fromFlag $ sDistDistPref flags - targetPref = distPref - tmpTargetDir = mkTmpDir distPref - --- | List all source files of a package. Returns a tuple of lists: first --- component is a list of ordinary files, second one is a list of those files --- that may be executable. -listPackageSources :: Verbosity -- ^ verbosity - -> PackageDescription -- ^ info from the cabal file - -> [PPSuffixHandler] -- ^ extra preprocessors (include - -- suffixes) - -> IO ([FilePath], [FilePath]) -listPackageSources verbosity pkg_descr0 pps = do - -- Call helpers that actually do all work. - ordinary <- listPackageSourcesOrdinary verbosity pkg_descr pps - maybeExecutable <- listPackageSourcesMaybeExecutable verbosity pkg_descr - return (ordinary, maybeExecutable) - where - pkg_descr = filterAutogenModules pkg_descr0 - --- | List those source files that may be executable (e.g. the configure script). -listPackageSourcesMaybeExecutable :: Verbosity -> PackageDescription -> IO [FilePath] -listPackageSourcesMaybeExecutable verbosity pkg_descr = - -- Extra source files. - fmap concat . for (extraSrcFiles pkg_descr) $ \fpath -> - matchDirFileGlob verbosity (specVersion pkg_descr) "." fpath - --- | List those source files that should be copied with ordinary permissions. -listPackageSourcesOrdinary :: Verbosity - -> PackageDescription - -> [PPSuffixHandler] - -> IO [FilePath] -listPackageSourcesOrdinary verbosity pkg_descr pps = - fmap concat . sequenceA $ - [ - -- Library sources. - fmap concat - . withAllLib $ \Library { - exposedModules = modules, - signatures = sigs, - libBuildInfo = libBi - } -> - allSourcesBuildInfo verbosity libBi pps (modules ++ sigs) - - -- Executables sources. - , fmap concat - . withAllExe $ \Executable { modulePath = mainPath, buildInfo = exeBi } -> do - biSrcs <- allSourcesBuildInfo verbosity exeBi pps [] - mainSrc <- findMainExeFile exeBi pps mainPath - return (mainSrc:biSrcs) - - -- Foreign library sources - , fmap concat - . withAllFLib $ \flib@(ForeignLib { foreignLibBuildInfo = flibBi }) -> do - biSrcs <- allSourcesBuildInfo verbosity flibBi pps [] - defFiles <- mapM (findModDefFile flibBi pps) (foreignLibModDefFile flib) - return (defFiles ++ biSrcs) - - -- Test suites sources. - , fmap concat - . withAllTest $ \t -> do - let bi = testBuildInfo t - case testInterface t of - TestSuiteExeV10 _ mainPath -> do - biSrcs <- allSourcesBuildInfo verbosity bi pps [] - srcMainFile <- findMainExeFile bi pps mainPath - return (srcMainFile:biSrcs) - TestSuiteLibV09 _ m -> - allSourcesBuildInfo verbosity bi pps [m] - TestSuiteUnsupported tp -> die' verbosity $ "Unsupported test suite type: " - ++ show tp - - -- Benchmarks sources. - , fmap concat - . withAllBenchmark $ \bm -> do - let bi = benchmarkBuildInfo bm - case benchmarkInterface bm of - BenchmarkExeV10 _ mainPath -> do - biSrcs <- allSourcesBuildInfo verbosity bi pps [] - srcMainFile <- findMainExeFile bi pps mainPath - return (srcMainFile:biSrcs) - BenchmarkUnsupported tp -> die' verbosity $ "Unsupported benchmark type: " - ++ show tp - - -- Data files. - , fmap concat - . for (dataFiles pkg_descr) $ \filename -> - let srcDataDirRaw = dataDir pkg_descr - srcDataDir = if null srcDataDirRaw - then "." - else srcDataDirRaw - in fmap (fmap (srcDataDir )) $ - matchDirFileGlob verbosity (specVersion pkg_descr) srcDataDir filename - - -- Extra doc files. - , fmap concat - . for (extraDocFiles pkg_descr) $ \ filename -> - matchDirFileGlob verbosity (specVersion pkg_descr) "." filename - - -- License file(s). - , return (licenseFiles pkg_descr) - - -- Install-include files. - , fmap concat - . withAllLib $ \ l -> do - let lbi = libBuildInfo l - relincdirs = "." : filter isRelative (includeDirs lbi) - traverse (fmap snd . findIncludeFile verbosity relincdirs) (installIncludes lbi) - - -- Setup script, if it exists. - , fmap (maybe [] (\f -> [f])) $ findSetupFile "" - - -- The .cabal file itself. - , fmap (\d -> [d]) (defaultPackageDesc verbosity) - - ] - where - -- We have to deal with all libs and executables, so we have local - -- versions of these functions that ignore the 'buildable' attribute: - withAllLib action = traverse action (allLibraries pkg_descr) - withAllFLib action = traverse action (foreignLibs pkg_descr) - withAllExe action = traverse action (executables pkg_descr) - withAllTest action = traverse action (testSuites pkg_descr) - withAllBenchmark action = traverse action (benchmarks pkg_descr) - - --- |Prepare a directory tree of source files. -prepareTree :: Verbosity -- ^verbosity - -> PackageDescription -- ^info from the cabal file - -> Maybe LocalBuildInfo - -> FilePath -- ^source tree to populate - -> [PPSuffixHandler] -- ^extra preprocessors (includes suffixes) - -> IO () -prepareTree verbosity pkg_descr0 mb_lbi targetDir pps = do - -- If the package was configured then we can run platform-independent - -- pre-processors and include those generated files. - case mb_lbi of - Just lbi | not (null pps) -> do - let lbi' = lbi{ buildDir = targetDir buildDir lbi } - withAllComponentsInBuildOrder pkg_descr lbi' $ \c clbi -> - preprocessComponent pkg_descr c lbi' clbi True verbosity pps - _ -> return () - - (ordinary, mExecutable) <- listPackageSources verbosity pkg_descr0 pps - installOrdinaryFiles verbosity targetDir (zip (repeat []) ordinary) - installMaybeExecutableFiles verbosity targetDir (zip (repeat []) mExecutable) - maybeCreateDefaultSetupScript targetDir - - where - pkg_descr = filterAutogenModules pkg_descr0 - --- | Find the setup script file, if it exists. -findSetupFile :: FilePath -> NoCallStackIO (Maybe FilePath) -findSetupFile targetDir = do - hsExists <- doesFileExist setupHs - lhsExists <- doesFileExist setupLhs - if hsExists - then return (Just setupHs) - else if lhsExists - then return (Just setupLhs) - else return Nothing - where - setupHs = targetDir "Setup.hs" - setupLhs = targetDir "Setup.lhs" - --- | Create a default setup script in the target directory, if it doesn't exist. -maybeCreateDefaultSetupScript :: FilePath -> NoCallStackIO () -maybeCreateDefaultSetupScript targetDir = do - mSetupFile <- findSetupFile targetDir - case mSetupFile of - Just _setupFile -> return () - Nothing -> do - writeUTF8File (targetDir "Setup.hs") $ unlines [ - "import Distribution.Simple", - "main = defaultMain"] - --- | Find the main executable file. -findMainExeFile :: BuildInfo -> [PPSuffixHandler] -> FilePath -> IO FilePath -findMainExeFile exeBi pps mainPath = do - ppFile <- findFileWithExtension (ppSuffixes pps) (hsSourceDirs exeBi) - (dropExtension mainPath) - case ppFile of - Nothing -> findFile (hsSourceDirs exeBi) mainPath - Just pp -> return pp - --- | Find a module definition file --- --- TODO: I don't know if this is right -findModDefFile :: BuildInfo -> [PPSuffixHandler] -> FilePath -> IO FilePath -findModDefFile flibBi _pps modDefPath = - findFile (".":hsSourceDirs flibBi) modDefPath - --- | Given a list of include paths, try to find the include file named --- @f@. Return the name of the file and the full path, or exit with error if --- there's no such file. -findIncludeFile :: Verbosity -> [FilePath] -> String -> IO (String, FilePath) -findIncludeFile verbosity [] f = die' verbosity ("can't find include file " ++ f) -findIncludeFile verbosity (d:ds) f = do - let path = (d f) - b <- doesFileExist path - if b then return (f,path) else findIncludeFile verbosity ds f - --- | Remove the auto-generated modules (like 'Paths_*') from 'exposed-modules' --- and 'other-modules'. -filterAutogenModules :: PackageDescription -> PackageDescription -filterAutogenModules pkg_descr0 = mapLib filterAutogenModuleLib $ - mapAllBuildInfo filterAutogenModuleBI pkg_descr0 - where - mapLib f pkg = pkg { library = fmap f (library pkg) - , subLibraries = map f (subLibraries pkg) } - filterAutogenModuleLib lib = lib { - exposedModules = filter (filterFunction (libBuildInfo lib)) (exposedModules lib) - } - filterAutogenModuleBI bi = bi { - otherModules = filter (filterFunction bi) (otherModules bi) - } - pathsModule = autogenPathsModuleName pkg_descr0 - filterFunction bi = \mn -> - mn /= pathsModule - && not (mn `elem` autogenModules bi) - --- | Prepare a directory tree of source files for a snapshot version. --- It is expected that the appropriate snapshot version has already been set --- in the package description, eg using 'snapshotPackage' or 'snapshotVersion'. --- -prepareSnapshotTree :: Verbosity -- ^verbosity - -> PackageDescription -- ^info from the cabal file - -> Maybe LocalBuildInfo - -> FilePath -- ^source tree to populate - -> [PPSuffixHandler] -- ^extra preprocessors (includes - -- suffixes) - -> IO () -prepareSnapshotTree verbosity pkg mb_lbi targetDir pps = do - prepareTree verbosity pkg mb_lbi targetDir pps - overwriteSnapshotPackageDesc verbosity pkg targetDir - -overwriteSnapshotPackageDesc :: Verbosity -- ^verbosity - -> PackageDescription -- ^info from the cabal file - -> FilePath -- ^source tree - -> IO () -overwriteSnapshotPackageDesc verbosity pkg targetDir = do - -- We could just writePackageDescription targetDescFile pkg_descr, - -- but that would lose comments and formatting. - descFile <- defaultPackageDesc verbosity - withUTF8FileContents descFile $ - writeUTF8File (targetDir descFile) - . unlines . map (replaceVersion (packageVersion pkg)) . lines - - where - replaceVersion :: Version -> String -> String - replaceVersion version line - | "version:" `isPrefixOf` map toLower line - = "version: " ++ display version - | otherwise = line - --- | Modifies a 'PackageDescription' by appending a snapshot number --- corresponding to the given date. --- -snapshotPackage :: UTCTime -> PackageDescription -> PackageDescription -snapshotPackage date pkg = - pkg { - package = pkgid { pkgVersion = snapshotVersion date (pkgVersion pkgid) } - } - where pkgid = packageId pkg - --- | Modifies a 'Version' by appending a snapshot number corresponding --- to the given date. --- -snapshotVersion :: UTCTime -> Version -> Version -snapshotVersion date = alterVersion (++ [dateToSnapshotNumber date]) - --- | Given a date produce a corresponding integer representation. --- For example given a date @18/03/2008@ produce the number @20080318@. --- -dateToSnapshotNumber :: UTCTime -> Int -dateToSnapshotNumber date = case toGregorian (utctDay date) of - (year, month, day) -> - fromIntegral year * 10000 - + month * 100 - + day - --- | Callback type for use by sdistWith. -type CreateArchiveFun = Verbosity -- ^verbosity - -> PackageDescription -- ^info from cabal file - -> Maybe LocalBuildInfo -- ^info from configure - -> FilePath -- ^source tree to archive - -> FilePath -- ^name of archive to create - -> IO FilePath - --- | Create an archive from a tree of source files, and clean up the tree. -createArchive :: CreateArchiveFun -createArchive verbosity pkg_descr mb_lbi tmpDir targetPref = do - let tarBallFilePath = targetPref tarBallName pkg_descr <.> "tar.gz" - - (tarProg, _) <- requireProgram verbosity tarProgram - (maybe defaultProgramDb withPrograms mb_lbi) - let formatOptSupported = maybe False (== "YES") $ - Map.lookup "Supports --format" - (programProperties tarProg) - runProgram verbosity tarProg $ - -- Hmm: I could well be skating on thinner ice here by using the -C option - -- (=> seems to be supported at least by GNU and *BSD tar) [The - -- prev. solution used pipes and sub-command sequences to set up the paths - -- correctly, which is problematic in a Windows setting.] - ["-czf", tarBallFilePath, "-C", tmpDir] - ++ (if formatOptSupported then ["--format", "ustar"] else []) - ++ [tarBallName pkg_descr] - return tarBallFilePath - --- | Given a buildinfo, return the names of all source files. -allSourcesBuildInfo :: Verbosity - -> BuildInfo - -> [PPSuffixHandler] -- ^ Extra preprocessors - -> [ModuleName] -- ^ Exposed modules - -> IO [FilePath] -allSourcesBuildInfo verbosity bi pps modules = do - let searchDirs = hsSourceDirs bi - sources <- fmap concat $ sequenceA $ - [ let file = ModuleName.toFilePath module_ - -- NB: *Not* findFileWithExtension, because the same source - -- file may show up in multiple paths due to a conditional; - -- we need to package all of them. See #367. - in findAllFilesWithExtension suffixes searchDirs file - >>= nonEmpty (notFound module_) return - | module_ <- modules ++ otherModules bi ] - bootFiles <- sequenceA - [ let file = ModuleName.toFilePath module_ - fileExts = ["hs-boot", "lhs-boot"] - in findFileWithExtension fileExts (hsSourceDirs bi) file - | module_ <- modules ++ otherModules bi ] - - return $ sources ++ catMaybes bootFiles ++ cSources bi ++ cxxSources bi ++ jsSources bi - - where - nonEmpty x _ [] = x - nonEmpty _ f xs = f xs - suffixes = ppSuffixes pps ++ ["hs", "lhs", "hsig", "lhsig"] - notFound m = die' verbosity $ "Error: Could not find module: " ++ display m - ++ " with any suffix: " ++ show suffixes ++ ". If the module " - ++ "is autogenerated it should be added to 'autogen-modules'." - - --- | Note: must be called with the CWD set to the directory containing --- the '.cabal' file. -printPackageProblems :: Verbosity -> PackageDescription -> IO () -printPackageProblems verbosity pkg_descr = do - ioChecks <- checkPackageFiles verbosity pkg_descr "." - let pureChecks = checkConfiguredPackage pkg_descr - isDistError (PackageDistSuspicious _) = False - isDistError (PackageDistSuspiciousWarn _) = False - isDistError _ = True - (errors, warnings) = partition isDistError (pureChecks ++ ioChecks) - unless (null errors) $ - notice verbosity $ "Distribution quality errors:\n" - ++ unlines (map explanation errors) - unless (null warnings) $ - notice verbosity $ "Distribution quality warnings:\n" - ++ unlines (map explanation warnings) - unless (null errors) $ - notice verbosity - "Note: the public hackage server would reject this package." - ------------------------------------------------------------- - --- | The name of the tarball without extension --- -tarBallName :: PackageDescription -> String -tarBallName = display . packageId - -mapAllBuildInfo :: (BuildInfo -> BuildInfo) - -> (PackageDescription -> PackageDescription) -mapAllBuildInfo f pkg = pkg { - library = fmap mapLibBi (library pkg), - subLibraries = fmap mapLibBi (subLibraries pkg), - foreignLibs = fmap mapFLibBi (foreignLibs pkg), - executables = fmap mapExeBi (executables pkg), - testSuites = fmap mapTestBi (testSuites pkg), - benchmarks = fmap mapBenchBi (benchmarks pkg) - } - where - mapLibBi lib = lib { libBuildInfo = f (libBuildInfo lib) } - mapFLibBi flib = flib { foreignLibBuildInfo = f (foreignLibBuildInfo flib) } - mapExeBi exe = exe { buildInfo = f (buildInfo exe) } - mapTestBi tst = tst { testBuildInfo = f (testBuildInfo tst) } - mapBenchBi bm = bm { benchmarkBuildInfo = f (benchmarkBuildInfo bm) } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Test/ExeV10.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Test/ExeV10.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Test/ExeV10.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Test/ExeV10.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,175 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - -module Distribution.Simple.Test.ExeV10 - ( runTest - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.UnqualComponentName -import Distribution.Compat.CreatePipe -import Distribution.Compat.Environment -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Build.PathsModule -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler -import Distribution.Simple.Hpc -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import qualified Distribution.Types.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.Test.Log -import Distribution.Simple.Utils -import Distribution.System -import Distribution.TestSuite -import Distribution.Text -import Distribution.Verbosity - -import Control.Concurrent (forkIO) -import System.Directory - ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist - , getCurrentDirectory, removeDirectoryRecursive ) -import System.Exit ( ExitCode(..) ) -import System.FilePath ( (), (<.>) ) -import System.IO ( hGetContents, stdout, stderr ) - -runTest :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> LBI.ComponentLocalBuildInfo - -> TestFlags - -> PD.TestSuite - -> IO TestSuiteLog -runTest pkg_descr lbi clbi flags suite = do - let isCoverageEnabled = LBI.testCoverage lbi - way = guessWay lbi - tixDir_ = tixDir distPref way testName' - - pwd <- getCurrentDirectory - existingEnv <- getEnvironment - - let cmd = LBI.buildDir lbi testName' - testName' <.> exeExtension (LBI.hostPlatform lbi) - -- Check that the test executable exists. - exists <- doesFileExist cmd - unless exists $ die' verbosity $ "Error: Could not find test program \"" ++ cmd - ++ "\". Did you build the package first?" - - -- Remove old .tix files if appropriate. - unless (fromFlag $ testKeepTix flags) $ do - exists' <- doesDirectoryExist tixDir_ - when exists' $ removeDirectoryRecursive tixDir_ - - -- Create directory for HPC files. - createDirectoryIfMissing True tixDir_ - - -- Write summary notices indicating start of test suite - notice verbosity $ summarizeSuiteStart $ testName' - - (wOut, wErr, logText) <- case details of - Direct -> return (stdout, stderr, "") - _ -> do - (rOut, wOut) <- createPipe - - -- Read test executable's output lazily (returns immediately) - logText <- hGetContents rOut - -- Force the IO manager to drain the test output pipe - void $ forkIO $ length logText `seq` return () - - -- '--show-details=streaming': print the log output in another thread - when (details == Streaming) $ void $ forkIO $ putStr logText - - return (wOut, wOut, logText) - - -- Run the test executable - let opts = map (testOption pkg_descr lbi suite) - (testOptions flags) - dataDirPath = pwd PD.dataDir pkg_descr - tixFile = pwd tixFilePath distPref way (testName') - pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath) - : existingEnv - shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] ++ pkgPathEnv - - -- Add (DY)LD_LIBRARY_PATH if needed - shellEnv' <- if LBI.withDynExe lbi - then do let (Platform _ os) = LBI.hostPlatform lbi - paths <- LBI.depLibraryPaths True False lbi clbi - return (addLibraryPath os paths shellEnv) - else return shellEnv - - exit <- rawSystemIOWithEnv verbosity cmd opts Nothing (Just shellEnv') - -- these handles are automatically closed - Nothing (Just wOut) (Just wErr) - - -- Generate TestSuiteLog from executable exit code and a machine- - -- readable test log. - let suiteLog = buildLog exit - - -- Write summary notice to log file indicating start of test suite - appendFile (logFile suiteLog) $ summarizeSuiteStart $ testName' - - -- Append contents of temporary log file to the final human- - -- readable log file - appendFile (logFile suiteLog) logText - - -- Write end-of-suite summary notice to log file - appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog - - -- Show the contents of the human-readable log file on the terminal - -- if there is a failure and/or detailed output is requested - let whenPrinting = when $ - ( details == Always || - details == Failures && not (suitePassed $ testLogs suiteLog)) - -- verbosity overrides show-details - && verbosity >= normal - whenPrinting $ putStr $ unlines $ lines logText - - -- Write summary notice to terminal indicating end of test suite - notice verbosity $ summarizeSuiteFinish suiteLog - - when isCoverageEnabled $ - markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite - - return suiteLog - where - testName' = unUnqualComponentName $ PD.testName suite - - distPref = fromFlag $ testDistPref flags - verbosity = fromFlag $ testVerbosity flags - details = fromFlag $ testShowDetails flags - testLogDir = distPref "test" - - buildLog exit = - let r = case exit of - ExitSuccess -> Pass - ExitFailure c -> Fail $ "exit code: " ++ show c - --n = unUnqualComponentName $ PD.testName suite - l = TestLog - { testName = testName' - , testOptionsReturned = [] - , testResult = r - } - in TestSuiteLog - { testSuiteName = PD.testName suite - , testLogs = l - , logFile = - testLogDir - testSuiteLogPath (fromFlag $ testHumanLog flags) - pkg_descr lbi testName' l - } - --- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't --- necessarily a path. -testOption :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> PD.TestSuite - -> PathTemplate - -> String -testOption pkg_descr lbi suite template = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ - [(TestSuiteNameVar, toPathTemplate $ unUnqualComponentName $ PD.testName suite)] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Test/LibV09.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Test/LibV09.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Test/LibV09.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Test/LibV09.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,275 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - -module Distribution.Simple.Test.LibV09 - ( runTest - -- Test stub - , simpleTestStub - , stubFilePath, stubMain, stubName, stubWriteLog - , writeSimpleTestStub - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Types.UnqualComponentName - -import Distribution.Compat.CreatePipe -import Distribution.Compat.Environment -import Distribution.Compat.Internal.TempFile -import Distribution.ModuleName -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Build.PathsModule -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler -import Distribution.Simple.Hpc -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import qualified Distribution.Types.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.Test.Log -import Distribution.Simple.Utils -import Distribution.System -import Distribution.TestSuite -import Distribution.Text -import Distribution.Verbosity - -import qualified Control.Exception as CE -import System.Directory - ( createDirectoryIfMissing, canonicalizePath - , doesDirectoryExist, doesFileExist - , getCurrentDirectory, removeDirectoryRecursive, removeFile - , setCurrentDirectory ) -import System.Exit ( exitSuccess, exitWith, ExitCode(..) ) -import System.FilePath ( (), (<.>) ) -import System.IO ( hClose, hGetContents, hPutStr ) -import System.Process (StdStream(..), waitForProcess) - -runTest :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> LBI.ComponentLocalBuildInfo - -> TestFlags - -> PD.TestSuite - -> IO TestSuiteLog -runTest pkg_descr lbi clbi flags suite = do - let isCoverageEnabled = LBI.testCoverage lbi - way = guessWay lbi - - pwd <- getCurrentDirectory - existingEnv <- getEnvironment - - let cmd = LBI.buildDir lbi stubName suite - stubName suite <.> exeExtension (LBI.hostPlatform lbi) - -- Check that the test executable exists. - exists <- doesFileExist cmd - unless exists $ - die' verbosity $ "Error: Could not find test program \"" ++ cmd - ++ "\". Did you build the package first?" - - -- Remove old .tix files if appropriate. - unless (fromFlag $ testKeepTix flags) $ do - let tDir = tixDir distPref way testName' - exists' <- doesDirectoryExist tDir - when exists' $ removeDirectoryRecursive tDir - - -- Create directory for HPC files. - createDirectoryIfMissing True $ tixDir distPref way testName' - - -- Write summary notices indicating start of test suite - notice verbosity $ summarizeSuiteStart testName' - - suiteLog <- CE.bracket openCabalTemp deleteIfExists $ \tempLog -> do - - (rOut, wOut) <- createPipe - - -- Run test executable - (Just wIn, _, _, process) <- do - let opts = map (testOption pkg_descr lbi suite) $ testOptions flags - dataDirPath = pwd PD.dataDir pkg_descr - tixFile = pwd tixFilePath distPref way testName' - pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath) - : existingEnv - shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] - ++ pkgPathEnv - -- Add (DY)LD_LIBRARY_PATH if needed - shellEnv' <- - if LBI.withDynExe lbi - then do - let (Platform _ os) = LBI.hostPlatform lbi - paths <- LBI.depLibraryPaths True False lbi clbi - cpath <- canonicalizePath $ LBI.componentBuildDir lbi clbi - return (addLibraryPath os (cpath : paths) shellEnv) - else return shellEnv - createProcessWithEnv verbosity cmd opts Nothing (Just shellEnv') - -- these handles are closed automatically - CreatePipe (UseHandle wOut) (UseHandle wOut) - - hPutStr wIn $ show (tempLog, PD.testName suite) - hClose wIn - - -- Append contents of temporary log file to the final human- - -- readable log file - logText <- hGetContents rOut - -- Force the IO manager to drain the test output pipe - length logText `seq` return () - - exitcode <- waitForProcess process - unless (exitcode == ExitSuccess) $ do - debug verbosity $ cmd ++ " returned " ++ show exitcode - - -- Generate final log file name - let finalLogName l = testLogDir - testSuiteLogPath - (fromFlag $ testHumanLog flags) pkg_descr lbi - (unUnqualComponentName $ testSuiteName l) (testLogs l) - -- Generate TestSuiteLog from executable exit code and a machine- - -- readable test log - suiteLog <- fmap ((\l -> l { logFile = finalLogName l }) . read) -- TODO: eradicateNoParse - $ readFile tempLog - - -- Write summary notice to log file indicating start of test suite - appendFile (logFile suiteLog) $ summarizeSuiteStart testName' - - appendFile (logFile suiteLog) logText - - -- Write end-of-suite summary notice to log file - appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog - - -- Show the contents of the human-readable log file on the terminal - -- if there is a failure and/or detailed output is requested - let details = fromFlag $ testShowDetails flags - whenPrinting = when $ (details > Never) - && (not (suitePassed $ testLogs suiteLog) || details == Always) - && verbosity >= normal - whenPrinting $ putStr $ unlines $ lines logText - - return suiteLog - - -- Write summary notice to terminal indicating end of test suite - notice verbosity $ summarizeSuiteFinish suiteLog - - when isCoverageEnabled $ - markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite - - return suiteLog - where - testName' = unUnqualComponentName $ PD.testName suite - - deleteIfExists file = do - exists <- doesFileExist file - when exists $ removeFile file - - testLogDir = distPref "test" - openCabalTemp = do - (f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log" - hClose h >> return f - - distPref = fromFlag $ testDistPref flags - verbosity = fromFlag $ testVerbosity flags - --- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't --- necessarily a path. -testOption :: PD.PackageDescription - -> LBI.LocalBuildInfo - -> PD.TestSuite - -> PathTemplate - -> String -testOption pkg_descr lbi suite template = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ - [(TestSuiteNameVar, toPathTemplate $ unUnqualComponentName $ PD.testName suite)] - --- Test stub ---------- - --- | The name of the stub executable associated with a library 'TestSuite'. -stubName :: PD.TestSuite -> FilePath -stubName t = unUnqualComponentName (PD.testName t) ++ "Stub" - --- | The filename of the source file for the stub executable associated with a --- library 'TestSuite'. -stubFilePath :: PD.TestSuite -> FilePath -stubFilePath t = stubName t <.> "hs" - --- | Write the source file for a library 'TestSuite' stub executable. -writeSimpleTestStub :: PD.TestSuite -- ^ library 'TestSuite' for which a stub - -- is being created - -> FilePath -- ^ path to directory where stub source - -- should be located - -> NoCallStackIO () -writeSimpleTestStub t dir = do - createDirectoryIfMissing True dir - let filename = dir stubFilePath t - PD.TestSuiteLibV09 _ m = PD.testInterface t - writeFile filename $ simpleTestStub m - --- | Source code for library test suite stub executable -simpleTestStub :: ModuleName -> String -simpleTestStub m = unlines - [ "module Main ( main ) where" - , "import Distribution.Simple.Test.LibV09 ( stubMain )" - , "import " ++ show (disp m) ++ " ( tests )" - , "main :: IO ()" - , "main = stubMain tests" - ] - --- | Main function for test stubs. Once, it was written directly into the stub, --- but minimizing the amount of code actually in the stub maximizes the number --- of detectable errors when Cabal is compiled. -stubMain :: IO [Test] -> IO () -stubMain tests = do - (f, n) <- fmap read getContents -- TODO: eradicateNoParse - dir <- getCurrentDirectory - results <- (tests >>= stubRunTests) `CE.catch` errHandler - setCurrentDirectory dir - stubWriteLog f n results - where - errHandler :: CE.SomeException -> NoCallStackIO TestLogs - errHandler e = case CE.fromException e of - Just CE.UserInterrupt -> CE.throwIO e - _ -> return $ TestLog { testName = "Cabal test suite exception", - testOptionsReturned = [], - testResult = Error $ show e } - --- | The test runner used in library "TestSuite" stub executables. Runs a list --- of 'Test's. An executable calling this function is meant to be invoked as --- the child of a Cabal process during @.\/setup test@. A 'TestSuiteLog', --- provided by Cabal, is read from the standard input; it supplies the name of --- the test suite and the location of the machine-readable test suite log file. --- Human-readable log information is written to the standard output for capture --- by the calling Cabal process. -stubRunTests :: [Test] -> IO TestLogs -stubRunTests tests = do - logs <- traverse stubRunTests' tests - return $ GroupLogs "Default" logs - where - stubRunTests' (Test t) = do - l <- run t >>= finish - summarizeTest normal Always l - return l - where - finish (Finished result) = - return TestLog - { testName = name t - , testOptionsReturned = defaultOptions t - , testResult = result - } - finish (Progress _ next) = next >>= finish - stubRunTests' g@(Group {}) = do - logs <- traverse stubRunTests' $ groupTests g - return $ GroupLogs (groupName g) logs - stubRunTests' (ExtraOptions _ t) = stubRunTests' t - maybeDefaultOption opt = - maybe Nothing (\d -> Just (optionName opt, d)) $ optionDefault opt - defaultOptions testInst = mapMaybe maybeDefaultOption $ options testInst - --- | From a test stub, write the 'TestSuiteLog' to temporary file for the calling --- Cabal process to read. -stubWriteLog :: FilePath -> UnqualComponentName -> TestLogs -> NoCallStackIO () -stubWriteLog f n logs = do - let testLog = TestSuiteLog { testSuiteName = n, testLogs = logs, logFile = f } - writeFile (logFile testLog) $ show testLog - when (suiteError logs) $ exitWith $ ExitFailure 2 - when (suiteFailed logs) $ exitWith $ ExitFailure 1 - exitSuccess diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Test/Log.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Test/Log.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Test/Log.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Test/Log.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,164 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - -module Distribution.Simple.Test.Log - ( PackageLog(..) - , TestLogs(..) - , TestSuiteLog(..) - , countTestResults - , localPackageLog - , summarizePackage - , summarizeSuiteFinish, summarizeSuiteStart - , summarizeTest - , suiteError, suiteFailed, suitePassed - , testSuiteLogPath - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Package -import Distribution.Types.UnqualComponentName -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Compiler -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.Utils -import Distribution.System -import Distribution.TestSuite -import Distribution.Verbosity -import Distribution.Text - --- | Logs all test results for a package, broken down first by test suite and --- then by test case. -data PackageLog = PackageLog - { package :: PackageId - , compiler :: CompilerId - , platform :: Platform - , testSuites :: [TestSuiteLog] - } - deriving (Read, Show, Eq) - --- | A 'PackageLog' with package and platform information specified. -localPackageLog :: PD.PackageDescription -> LBI.LocalBuildInfo -> PackageLog -localPackageLog pkg_descr lbi = PackageLog - { package = PD.package pkg_descr - , compiler = compilerId $ LBI.compiler lbi - , platform = LBI.hostPlatform lbi - , testSuites = [] - } - --- | Logs test suite results, itemized by test case. -data TestSuiteLog = TestSuiteLog - { testSuiteName :: UnqualComponentName - , testLogs :: TestLogs - , logFile :: FilePath -- path to human-readable log file - } - deriving (Read, Show, Eq) - -data TestLogs - = TestLog - { testName :: String - , testOptionsReturned :: Options - , testResult :: Result - } - | GroupLogs String [TestLogs] - deriving (Read, Show, Eq) - --- | Count the number of pass, fail, and error test results in a 'TestLogs' --- tree. -countTestResults :: TestLogs - -> (Int, Int, Int) -- ^ Passes, fails, and errors, - -- respectively. -countTestResults = go (0, 0, 0) - where - go (p, f, e) (TestLog { testResult = r }) = - case r of - Pass -> (p + 1, f, e) - Fail _ -> (p, f + 1, e) - Error _ -> (p, f, e + 1) - go (p, f, e) (GroupLogs _ ts) = foldl go (p, f, e) ts - --- | From a 'TestSuiteLog', determine if the test suite passed. -suitePassed :: TestLogs -> Bool -suitePassed l = - case countTestResults l of - (_, 0, 0) -> True - _ -> False - --- | From a 'TestSuiteLog', determine if the test suite failed. -suiteFailed :: TestLogs -> Bool -suiteFailed l = - case countTestResults l of - (_, 0, _) -> False - _ -> True - --- | From a 'TestSuiteLog', determine if the test suite encountered errors. -suiteError :: TestLogs -> Bool -suiteError l = - case countTestResults l of - (_, _, 0) -> False - _ -> True - -resultString :: TestLogs -> String -resultString l | suiteError l = "error" - | suiteFailed l = "fail" - | otherwise = "pass" - -testSuiteLogPath :: PathTemplate - -> PD.PackageDescription - -> LBI.LocalBuildInfo - -> String -- ^ test suite name - -> TestLogs -- ^ test suite results - -> FilePath -testSuiteLogPath template pkg_descr lbi test_name result = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) - ++ [ (TestSuiteNameVar, toPathTemplate test_name) - , (TestSuiteResultVar, toPathTemplate $ resultString result) - ] - --- | Print a summary to the console after all test suites have been run --- indicating the number of successful test suites and cases. Returns 'True' if --- all test suites passed and 'False' otherwise. -summarizePackage :: Verbosity -> PackageLog -> IO Bool -summarizePackage verbosity packageLog = do - let counts = map (countTestResults . testLogs) $ testSuites packageLog - (passed, failed, errors) = foldl1 addTriple counts - totalCases = passed + failed + errors - passedSuites = length - $ filter (suitePassed . testLogs) - $ testSuites packageLog - totalSuites = length $ testSuites packageLog - notice verbosity $ show passedSuites ++ " of " ++ show totalSuites - ++ " test suites (" ++ show passed ++ " of " - ++ show totalCases ++ " test cases) passed." - return $! passedSuites == totalSuites - where - addTriple (p1, f1, e1) (p2, f2, e2) = (p1 + p2, f1 + f2, e1 + e2) - --- | Print a summary of a single test case's result to the console, supressing --- output for certain verbosity or test filter levels. -summarizeTest :: Verbosity -> TestShowDetails -> TestLogs -> IO () -summarizeTest _ _ (GroupLogs {}) = return () -summarizeTest verbosity details t = - when shouldPrint $ notice verbosity $ "Test case " ++ testName t - ++ ": " ++ show (testResult t) - where shouldPrint = (details > Never) && (notPassed || details == Always) - notPassed = testResult t /= Pass - --- | Print a summary of the test suite's results on the console, suppressing --- output for certain verbosity or test filter levels. -summarizeSuiteFinish :: TestSuiteLog -> String -summarizeSuiteFinish testLog = unlines - [ "Test suite " ++ display (testSuiteName testLog) ++ ": " ++ resStr - , "Test suite logged to: " ++ logFile testLog - ] - where resStr = map toUpper (resultString $ testLogs testLog) - -summarizeSuiteStart :: String -> String -summarizeSuiteStart n = "Test suite " ++ n ++ ": RUNNING...\n" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Test.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Test.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Test.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Test.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,137 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Test --- Copyright : Thomas Tuegel 2010 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the entry point into testing a built package. It performs the --- \"@.\/setup test@\" action. It runs test suites designated in the package --- description and reports on the results. - -module Distribution.Simple.Test - ( test - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.UnqualComponentName -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Compiler -import Distribution.Simple.Hpc -import Distribution.Simple.InstallDirs -import qualified Distribution.Simple.LocalBuildInfo as LBI -import qualified Distribution.Types.LocalBuildInfo as LBI -import Distribution.Simple.Setup -import Distribution.Simple.UserHooks -import qualified Distribution.Simple.Test.ExeV10 as ExeV10 -import qualified Distribution.Simple.Test.LibV09 as LibV09 -import Distribution.Simple.Test.Log -import Distribution.Simple.Utils -import Distribution.TestSuite -import Distribution.Text - -import System.Directory - ( createDirectoryIfMissing, doesFileExist, getDirectoryContents - , removeFile ) -import System.Exit ( exitFailure, exitSuccess ) -import System.FilePath ( () ) - --- |Perform the \"@.\/setup test@\" action. -test :: Args -- ^positional command-line arguments - -> PD.PackageDescription -- ^information from the .cabal file - -> LBI.LocalBuildInfo -- ^information from the configure step - -> TestFlags -- ^flags sent to test - -> IO () -test args pkg_descr lbi flags = do - let verbosity = fromFlag $ testVerbosity flags - machineTemplate = fromFlag $ testMachineLog flags - distPref = fromFlag $ testDistPref flags - testLogDir = distPref "test" - testNames = args - pkgTests = PD.testSuites pkg_descr - enabledTests = LBI.enabledTestLBIs pkg_descr lbi - - doTest :: ((PD.TestSuite, LBI.ComponentLocalBuildInfo), - Maybe TestSuiteLog) -> IO TestSuiteLog - doTest ((suite, clbi), _) = - case PD.testInterface suite of - PD.TestSuiteExeV10 _ _ -> - ExeV10.runTest pkg_descr lbi clbi flags suite - - PD.TestSuiteLibV09 _ _ -> - LibV09.runTest pkg_descr lbi clbi flags suite - - _ -> return TestSuiteLog - { testSuiteName = PD.testName suite - , testLogs = TestLog - { testName = unUnqualComponentName $ PD.testName suite - , testOptionsReturned = [] - , testResult = - Error $ "No support for running test suite type: " - ++ show (disp $ PD.testType suite) - } - , logFile = "" - } - - unless (PD.hasTests pkg_descr) $ do - notice verbosity "Package has no test suites." - exitSuccess - - when (PD.hasTests pkg_descr && null enabledTests) $ - die' verbosity $ - "No test suites enabled. Did you remember to configure with " - ++ "\'--enable-tests\'?" - - testsToRun <- case testNames of - [] -> return $ zip enabledTests $ repeat Nothing - names -> for names $ \tName -> - let testMap = zip enabledNames enabledTests - enabledNames = map (PD.testName . fst) enabledTests - allNames = map PD.testName pkgTests - tCompName = mkUnqualComponentName tName - in case lookup tCompName testMap of - Just t -> return (t, Nothing) - _ | tCompName `elem` allNames -> - die' verbosity $ "Package configured with test suite " - ++ tName ++ " disabled." - | otherwise -> die' verbosity $ "no such test: " ++ tName - - createDirectoryIfMissing True testLogDir - - -- Delete ordinary files from test log directory. - getDirectoryContents testLogDir - >>= filterM doesFileExist . map (testLogDir ) - >>= traverse_ removeFile - - let totalSuites = length testsToRun - notice verbosity $ "Running " ++ show totalSuites ++ " test suites..." - suites <- traverse doTest testsToRun - let packageLog = (localPackageLog pkg_descr lbi) { testSuites = suites } - packageLogFile = () testLogDir - $ packageLogPath machineTemplate pkg_descr lbi - allOk <- summarizePackage verbosity packageLog - writeFile packageLogFile $ show packageLog - - when (LBI.testCoverage lbi) $ - markupPackage verbosity lbi distPref (display $ PD.package pkg_descr) $ - map (fst . fst) testsToRun - - unless allOk exitFailure - -packageLogPath :: PathTemplate - -> PD.PackageDescription - -> LBI.LocalBuildInfo - -> FilePath -packageLogPath template pkg_descr lbi = - fromPathTemplate $ substPathTemplate env template - where - env = initialPathTemplateEnv - (PD.package pkg_descr) (LBI.localUnitId lbi) - (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/UHC.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/UHC.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/UHC.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/UHC.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,293 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.UHC --- Copyright : Andres Loeh 2009 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module contains most of the UHC-specific code for configuring, building --- and installing packages. --- --- Thanks to the authors of the other implementation-specific files, in --- particular to Isaac Jones, Duncan Coutts and Henning Thielemann, for --- inspiration on how to design this module. - -module Distribution.Simple.UHC ( - configure, getInstalledPackages, - buildLib, buildExe, installLib, registerPackage, inplacePackageDbPath - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Compat.ReadP -import Distribution.InstalledPackageInfo -import Distribution.Package hiding (installedUnitId) -import Distribution.PackageDescription -import Distribution.Simple.BuildPaths -import Distribution.Simple.Compiler as C -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.PackageIndex -import Distribution.Simple.Program -import Distribution.Simple.Utils -import Distribution.Text -import Distribution.Types.MungedPackageId -import Distribution.Verbosity -import Distribution.Version -import Distribution.System -import Language.Haskell.Extension - -import qualified Data.Map as Map ( empty ) -import System.Directory -import System.FilePath - --- ----------------------------------------------------------------------------- --- Configuring - -configure :: Verbosity -> Maybe FilePath -> Maybe FilePath - -> ProgramDb -> IO (Compiler, Maybe Platform, ProgramDb) -configure verbosity hcPath _hcPkgPath progdb = do - - (_uhcProg, uhcVersion, progdb') <- - requireProgramVersion verbosity uhcProgram - (orLaterVersion (mkVersion [1,0,2])) - (userMaybeSpecifyPath "uhc" hcPath progdb) - - let comp = Compiler { - compilerId = CompilerId UHC uhcVersion, - compilerAbiTag = C.NoAbiTag, - compilerCompat = [], - compilerLanguages = uhcLanguages, - compilerExtensions = uhcLanguageExtensions, - compilerProperties = Map.empty - } - compPlatform = Nothing - return (comp, compPlatform, progdb') - -uhcLanguages :: [(Language, C.Flag)] -uhcLanguages = [(Haskell98, "")] - --- | The flags for the supported extensions. -uhcLanguageExtensions :: [(Extension, Maybe C.Flag)] -uhcLanguageExtensions = - let doFlag (f, (enable, disable)) = [(EnableExtension f, enable), - (DisableExtension f, disable)] - alwaysOn = (Nothing, Nothing{- wrong -}) - in concatMap doFlag - [(CPP, (Just "--cpp", Nothing{- wrong -})), - (PolymorphicComponents, alwaysOn), - (ExistentialQuantification, alwaysOn), - (ForeignFunctionInterface, alwaysOn), - (UndecidableInstances, alwaysOn), - (MultiParamTypeClasses, alwaysOn), - (Rank2Types, alwaysOn), - (PatternSignatures, alwaysOn), - (EmptyDataDecls, alwaysOn), - (ImplicitPrelude, (Nothing, Just "--no-prelude"{- wrong -})), - (TypeOperators, alwaysOn), - (OverlappingInstances, alwaysOn), - (FlexibleInstances, alwaysOn)] - -getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramDb - -> IO InstalledPackageIndex -getInstalledPackages verbosity comp packagedbs progdb = do - let compilerid = compilerId comp - systemPkgDir <- getGlobalPackageDir verbosity progdb - userPkgDir <- getUserPackageDir - let pkgDirs = nub (concatMap (packageDbPaths userPkgDir systemPkgDir) packagedbs) - -- putStrLn $ "pkgdirs: " ++ show pkgDirs - pkgs <- liftM (map addBuiltinVersions . concat) $ - traverse (\ d -> getDirectoryContents d >>= filterM (isPkgDir (display compilerid) d)) - pkgDirs - -- putStrLn $ "pkgs: " ++ show pkgs - let iPkgs = - map mkInstalledPackageInfo $ - concatMap parsePackage $ - pkgs - -- putStrLn $ "installed pkgs: " ++ show iPkgs - return (fromList iPkgs) - -getGlobalPackageDir :: Verbosity -> ProgramDb -> IO FilePath -getGlobalPackageDir verbosity progdb = do - output <- getDbProgramOutput verbosity - uhcProgram progdb ["--meta-pkgdir-system"] - -- call to "lines" necessary, because pkgdir contains an extra newline at the end - let [pkgdir] = lines output - return pkgdir - -getUserPackageDir :: NoCallStackIO FilePath -getUserPackageDir = do - homeDir <- getHomeDirectory - return $ homeDir ".cabal" "lib" -- TODO: determine in some other way - -packageDbPaths :: FilePath -> FilePath -> PackageDB -> [FilePath] -packageDbPaths user system db = - case db of - GlobalPackageDB -> [ system ] - UserPackageDB -> [ user ] - SpecificPackageDB path -> [ path ] - --- | Hack to add version numbers to UHC-built-in packages. This should sooner or --- later be fixed on the UHC side. -addBuiltinVersions :: String -> String -{- -addBuiltinVersions "uhcbase" = "uhcbase-1.0" -addBuiltinVersions "base" = "base-3.0" -addBuiltinVersions "array" = "array-0.2" --} -addBuiltinVersions xs = xs - --- | Name of the installed package config file. -installedPkgConfig :: String -installedPkgConfig = "installed-pkg-config" - --- | Check if a certain dir contains a valid package. Currently, we are --- looking only for the presence of an installed package configuration. --- TODO: Actually make use of the information provided in the file. -isPkgDir :: String -> String -> String -> NoCallStackIO Bool -isPkgDir _ _ ('.' : _) = return False -- ignore files starting with a . -isPkgDir c dir xs = do - let candidate = dir uhcPackageDir xs c - -- putStrLn $ "trying: " ++ candidate - doesFileExist (candidate installedPkgConfig) - -parsePackage :: String -> [PackageId] -parsePackage x = map fst (filter (\ (_,y) -> null y) (readP_to_S parse x)) - --- | Create a trivial package info from a directory name. -mkInstalledPackageInfo :: PackageId -> InstalledPackageInfo -mkInstalledPackageInfo p = emptyInstalledPackageInfo - { installedUnitId = mkLegacyUnitId p, - sourcePackageId = p } - - --- ----------------------------------------------------------------------------- --- Building - -buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Library -> ComponentLocalBuildInfo -> IO () -buildLib verbosity pkg_descr lbi lib clbi = do - - systemPkgDir <- getGlobalPackageDir verbosity (withPrograms lbi) - userPkgDir <- getUserPackageDir - let runUhcProg = runDbProgram verbosity uhcProgram (withPrograms lbi) - let uhcArgs = -- set package name - ["--pkg-build=" ++ display (packageId pkg_descr)] - -- common flags lib/exe - ++ constructUHCCmdLine userPkgDir systemPkgDir - lbi (libBuildInfo lib) clbi - (buildDir lbi) verbosity - -- source files - -- suboptimal: UHC does not understand module names, so - -- we replace periods by path separators - ++ map (map (\ c -> if c == '.' then pathSeparator else c)) - (map display (allLibModules lib clbi)) - - runUhcProg uhcArgs - - return () - -buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo - -> Executable -> ComponentLocalBuildInfo -> IO () -buildExe verbosity _pkg_descr lbi exe clbi = do - systemPkgDir <- getGlobalPackageDir verbosity (withPrograms lbi) - userPkgDir <- getUserPackageDir - let runUhcProg = runDbProgram verbosity uhcProgram (withPrograms lbi) - let uhcArgs = -- common flags lib/exe - constructUHCCmdLine userPkgDir systemPkgDir - lbi (buildInfo exe) clbi - (buildDir lbi) verbosity - -- output file - ++ ["--output", buildDir lbi display (exeName exe)] - -- main source module - ++ [modulePath exe] - runUhcProg uhcArgs - -constructUHCCmdLine :: FilePath -> FilePath - -> LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo - -> FilePath -> Verbosity -> [String] -constructUHCCmdLine user system lbi bi clbi odir verbosity = - -- verbosity - (if verbosity >= deafening then ["-v4"] - else if verbosity >= normal then [] - else ["-v0"]) - ++ hcOptions UHC bi - -- flags for language extensions - ++ languageToFlags (compiler lbi) (defaultLanguage bi) - ++ extensionsToFlags (compiler lbi) (usedExtensions bi) - -- packages - ++ ["--hide-all-packages"] - ++ uhcPackageDbOptions user system (withPackageDB lbi) - ++ ["--package=uhcbase"] - ++ ["--package=" ++ display (mungedName pkgid) | (_, pkgid) <- componentPackageDeps clbi ] - -- search paths - ++ ["-i" ++ odir] - ++ ["-i" ++ l | l <- nub (hsSourceDirs bi)] - ++ ["-i" ++ autogenComponentModulesDir lbi clbi] - ++ ["-i" ++ autogenPackageModulesDir lbi] - -- cpp options - ++ ["--optP=" ++ opt | opt <- cppOptions bi] - -- output path - ++ ["--odir=" ++ odir] - -- optimization - ++ (case withOptimization lbi of - NoOptimisation -> ["-O0"] - NormalOptimisation -> ["-O1"] - MaximumOptimisation -> ["-O2"]) - -uhcPackageDbOptions :: FilePath -> FilePath -> PackageDBStack -> [String] -uhcPackageDbOptions user system db = map (\ x -> "--pkg-searchpath=" ++ x) - (concatMap (packageDbPaths user system) db) - --- ----------------------------------------------------------------------------- --- Installation - -installLib :: Verbosity -> LocalBuildInfo - -> FilePath -> FilePath -> FilePath - -> PackageDescription -> Library -> ComponentLocalBuildInfo -> IO () -installLib verbosity _lbi targetDir _dynlibTargetDir builtDir pkg _library _clbi = do - -- putStrLn $ "dest: " ++ targetDir - -- putStrLn $ "built: " ++ builtDir - installDirectoryContents verbosity (builtDir display (packageId pkg)) targetDir - --- currently hard-coded UHC code generator and variant to use -uhcTarget, uhcTargetVariant :: String -uhcTarget = "bc" -uhcTargetVariant = "plain" - --- root directory for a package in UHC -uhcPackageDir :: String -> String -> FilePath -uhcPackageSubDir :: String -> FilePath -uhcPackageDir pkgid compilerid = pkgid uhcPackageSubDir compilerid -uhcPackageSubDir compilerid = compilerid uhcTarget uhcTargetVariant - --- ----------------------------------------------------------------------------- --- Registering - -registerPackage - :: Verbosity - -> Compiler - -> ProgramDb - -> PackageDBStack - -> InstalledPackageInfo - -> IO () -registerPackage verbosity comp progdb packageDbs installedPkgInfo = do - dbdir <- case last packageDbs of - GlobalPackageDB -> getGlobalPackageDir verbosity progdb - UserPackageDB -> getUserPackageDir - SpecificPackageDB dir -> return dir - let pkgdir = dbdir uhcPackageDir (display pkgid) (display compilerid) - createDirectoryIfMissingVerbose verbosity True pkgdir - writeUTF8File (pkgdir installedPkgConfig) - (showInstalledPackageInfo installedPkgInfo) - where - pkgid = sourcePackageId installedPkgInfo - compilerid = compilerId comp - -inplacePackageDbPath :: LocalBuildInfo -> FilePath -inplacePackageDbPath lbi = buildDir lbi diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/UserHooks.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/UserHooks.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/UserHooks.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/UserHooks.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,225 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.UserHooks --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defines the API that @Setup.hs@ scripts can use to customise the way --- the build works. This module just defines the 'UserHooks' type. The --- predefined sets of hooks that implement the @Simple@, @Make@ and @Configure@ --- build systems are defined in "Distribution.Simple". The 'UserHooks' is a big --- record of functions. There are 3 for each action, a pre, post and the action --- itself. There are few other miscellaneous hooks, ones to extend the set of --- programs and preprocessors and one to override the function used to read the --- @.cabal@ file. --- --- This hooks type is widely agreed to not be the right solution. Partly this --- is because changes to it usually break custom @Setup.hs@ files and yet many --- internal code changes do require changes to the hooks. For example we cannot --- pass any extra parameters to most of the functions that implement the --- various phases because it would involve changing the types of the --- corresponding hook. At some point it will have to be replaced. - -module Distribution.Simple.UserHooks ( - UserHooks(..), Args, - emptyUserHooks, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.PackageDescription -import Distribution.Simple.Program -import Distribution.Simple.Command -import Distribution.Simple.PreProcess -import Distribution.Simple.Setup -import Distribution.Simple.LocalBuildInfo - -type Args = [String] - --- | Hooks allow authors to add specific functionality before and after a --- command is run, and also to specify additional preprocessors. --- --- * WARNING: The hooks interface is under rather constant flux as we try to --- understand users needs. Setup files that depend on this interface may --- break in future releases. -data UserHooks = UserHooks { - - -- | Used for @.\/setup test@ - runTests :: Args -> Bool -> PackageDescription -> LocalBuildInfo -> IO (), - -- | Read the description file - readDesc :: IO (Maybe GenericPackageDescription), - -- | Custom preprocessors in addition to and overriding 'knownSuffixHandlers'. - hookedPreProcessors :: [ PPSuffixHandler ], - -- | These programs are detected at configure time. Arguments for them are - -- added to the configure command. - hookedPrograms :: [Program], - - -- |Hook to run before configure command - preConf :: Args -> ConfigFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during configure. - confHook :: (GenericPackageDescription, HookedBuildInfo) - -> ConfigFlags -> IO LocalBuildInfo, - -- |Hook to run after configure command - postConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before build command. Second arg indicates verbosity level. - preBuild :: Args -> BuildFlags -> IO HookedBuildInfo, - - -- |Over-ride this hook to get different behavior during build. - buildHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO (), - -- |Hook to run after build command. Second arg indicates verbosity level. - postBuild :: Args -> BuildFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before repl command. Second arg indicates verbosity level. - preRepl :: Args -> ReplFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during interpretation. - replHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> ReplFlags -> [String] -> IO (), - -- |Hook to run after repl command. Second arg indicates verbosity level. - postRepl :: Args -> ReplFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before clean command. Second arg indicates verbosity level. - preClean :: Args -> CleanFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during clean. - cleanHook :: PackageDescription -> () -> UserHooks -> CleanFlags -> IO (), - -- |Hook to run after clean command. Second arg indicates verbosity level. - postClean :: Args -> CleanFlags -> PackageDescription -> () -> IO (), - - -- |Hook to run before copy command - preCopy :: Args -> CopyFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during copy. - copyHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> CopyFlags -> IO (), - -- |Hook to run after copy command - postCopy :: Args -> CopyFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before install command - preInst :: Args -> InstallFlags -> IO HookedBuildInfo, - - -- |Over-ride this hook to get different behavior during install. - instHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> InstallFlags -> IO (), - -- |Hook to run after install command. postInst should be run - -- on the target, not on the build machine. - postInst :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before sdist command. Second arg indicates verbosity level. - preSDist :: Args -> SDistFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during sdist. - sDistHook :: PackageDescription -> Maybe LocalBuildInfo -> UserHooks -> SDistFlags -> IO (), - -- |Hook to run after sdist command. Second arg indicates verbosity level. - postSDist :: Args -> SDistFlags -> PackageDescription -> Maybe LocalBuildInfo -> IO (), - - -- |Hook to run before register command - preReg :: Args -> RegisterFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during registration. - regHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (), - -- |Hook to run after register command - postReg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before unregister command - preUnreg :: Args -> RegisterFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during unregistration. - unregHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (), - -- |Hook to run after unregister command - postUnreg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before hscolour command. Second arg indicates verbosity level. - preHscolour :: Args -> HscolourFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during hscolour. - hscolourHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HscolourFlags -> IO (), - -- |Hook to run after hscolour command. Second arg indicates verbosity level. - postHscolour :: Args -> HscolourFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before doctest command. Second arg indicates verbosity level. - preDoctest :: Args -> DoctestFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during doctest. - doctestHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> DoctestFlags -> IO (), - -- |Hook to run after doctest command. Second arg indicates verbosity level. - postDoctest :: Args -> DoctestFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before haddock command. Second arg indicates verbosity level. - preHaddock :: Args -> HaddockFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during haddock. - haddockHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HaddockFlags -> IO (), - -- |Hook to run after haddock command. Second arg indicates verbosity level. - postHaddock :: Args -> HaddockFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before test command. - preTest :: Args -> TestFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during test. - testHook :: Args -> PackageDescription -> LocalBuildInfo -> UserHooks -> TestFlags -> IO (), - -- |Hook to run after test command. - postTest :: Args -> TestFlags -> PackageDescription -> LocalBuildInfo -> IO (), - - -- |Hook to run before bench command. - preBench :: Args -> BenchmarkFlags -> IO HookedBuildInfo, - -- |Over-ride this hook to get different behavior during bench. - benchHook :: Args -> PackageDescription -> LocalBuildInfo -> UserHooks -> BenchmarkFlags -> IO (), - -- |Hook to run after bench command. - postBench :: Args -> BenchmarkFlags -> PackageDescription -> LocalBuildInfo -> IO () - } - -{-# DEPRECATED runTests "Please use the new testing interface instead!" #-} -{-# DEPRECATED preSDist "SDist hooks violate the invariants of new-sdist. Use 'autogen-modules' and 'build-tool-depends' instead." #-} -{-# DEPRECATED sDistHook "SDist hooks violate the invariants of new-sdist. Use 'autogen-modules' and 'build-tool-depends' instead." #-} -{-# DEPRECATED postSDist "SDist hooks violate the invariants of new-sdist. Use 'autogen-modules' and 'build-tool-depends' instead." #-} - --- |Empty 'UserHooks' which do nothing. -emptyUserHooks :: UserHooks -emptyUserHooks - = UserHooks { - runTests = ru, - readDesc = return Nothing, - hookedPreProcessors = [], - hookedPrograms = [], - preConf = rn', - confHook = (\_ _ -> return (error "No local build info generated during configure. Over-ride empty configure hook.")), - postConf = ru, - preBuild = rn', - buildHook = ru, - postBuild = ru, - preRepl = \_ _ -> return emptyHookedBuildInfo, - replHook = \_ _ _ _ _ -> return (), - postRepl = ru, - preClean = rn, - cleanHook = ru, - postClean = ru, - preCopy = rn', - copyHook = ru, - postCopy = ru, - preInst = rn, - instHook = ru, - postInst = ru, - preSDist = rn, - sDistHook = ru, - postSDist = ru, - preReg = rn', - regHook = ru, - postReg = ru, - preUnreg = rn, - unregHook = ru, - postUnreg = ru, - preHscolour = rn, - hscolourHook = ru, - postHscolour = ru, - preDoctest = rn, - doctestHook = ru, - postDoctest = ru, - preHaddock = rn', - haddockHook = ru, - postHaddock = ru, - preTest = rn', - testHook = \_ -> ru, - postTest = ru, - preBench = rn', - benchHook = \_ -> ru, - postBench = ru - } - where rn args _ = noExtraFlags args >> return emptyHookedBuildInfo - rn' _ _ = return emptyHookedBuildInfo - ru _ _ _ _ = return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple/Utils.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1515 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE BangPatterns #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Utils --- Copyright : Isaac Jones, Simon Marlow 2003-2004 --- License : BSD3 --- portions Copyright (c) 2007, Galois Inc. --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A large and somewhat miscellaneous collection of utility functions used --- throughout the rest of the Cabal lib and in other tools that use the Cabal --- lib like @cabal-install@. It has a very simple set of logging actions. It --- has low level functions for running programs, a bunch of wrappers for --- various directory and file functions that do extra logging. - -module Distribution.Simple.Utils ( - cabalVersion, - - -- * logging and errors - -- Old style - die, dieWithLocation, - -- New style - dieNoVerbosity, - die', dieWithLocation', - dieNoWrap, - topHandler, topHandlerWith, - warn, - notice, noticeNoWrap, noticeDoc, - setupMessage, - info, infoNoWrap, - debug, debugNoWrap, - chattyTry, - annotateIO, - printRawCommandAndArgs, printRawCommandAndArgsAndEnv, - withOutputMarker, - - -- * exceptions - handleDoesNotExist, - - -- * running programs - rawSystemExit, - rawSystemExitCode, - rawSystemExitWithEnv, - rawSystemStdout, - rawSystemStdInOut, - rawSystemIOWithEnv, - createProcessWithEnv, - maybeExit, - xargs, - findProgramLocation, - findProgramVersion, - - -- ** 'IOData' re-export - -- - -- These types are re-exported from - -- "Distribution.Utils.IOData" for convience as they're - -- exposed in the API of 'rawSystemStdInOut' - IOData(..), - IODataMode(..), - - -- * copying files - smartCopySources, - createDirectoryIfMissingVerbose, - copyFileVerbose, - copyDirectoryRecursiveVerbose, - copyFiles, - copyFileTo, - - -- * installing files - installOrdinaryFile, - installExecutableFile, - installMaybeExecutableFile, - installOrdinaryFiles, - installExecutableFiles, - installMaybeExecutableFiles, - installDirectoryContents, - copyDirectoryRecursive, - - -- * File permissions - doesExecutableExist, - setFileOrdinary, - setFileExecutable, - - -- * file names - currentDir, - shortRelativePath, - dropExeExtension, - exeExtensions, - - -- * finding files - findFile, - findFirstFile, - findFileWithExtension, - findFileWithExtension', - findAllFilesWithExtension, - findModuleFile, - findModuleFiles, - getDirectoryContentsRecursive, - - -- * environment variables - isInSearchPath, - addLibraryPath, - - -- * modification time - moreRecentFile, - existsAndIsMoreRecentThan, - - -- * temp files and dirs - TempFileOptions(..), defaultTempFileOptions, - withTempFile, withTempFileEx, - withTempDirectory, withTempDirectoryEx, - createTempDirectory, - - -- * .cabal and .buildinfo files - defaultPackageDesc, - findPackageDesc, - tryFindPackageDesc, - defaultHookedPackageDesc, - findHookedPackageDesc, - - -- * reading and writing files safely - withFileContents, - writeFileAtomic, - rewriteFile, - rewriteFileEx, - - -- * Unicode - fromUTF8BS, - fromUTF8LBS, - toUTF8BS, - toUTF8LBS, - readUTF8File, - withUTF8FileContents, - writeUTF8File, - normaliseLineEndings, - - -- * BOM - ignoreBOM, - - -- * generic utils - dropWhileEndLE, - takeWhileEndLE, - equating, - comparing, - isInfixOf, - intercalate, - lowercase, - listUnion, - listUnionRight, - ordNub, - ordNubBy, - ordNubRight, - safeTail, - unintersperse, - wrapText, - wrapLine, - - -- * FilePath stuff - isAbsoluteOnAnyPlatform, - isRelativeOnAnyPlatform, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Text -import Distribution.Utils.Generic -import Distribution.Utils.IOData (IOData(..), IODataMode(..)) -import qualified Distribution.Utils.IOData as IOData -import Distribution.ModuleName as ModuleName -import Distribution.System -import Distribution.Version -import Distribution.Compat.CopyFile -import Distribution.Compat.Internal.TempFile -import Distribution.Compat.Exception -import Distribution.Compat.Stack -import Distribution.Verbosity -import Distribution.Types.PackageId - -#if __GLASGOW_HASKELL__ < 711 -#ifdef VERSION_base -#define BOOTSTRAPPED_CABAL 1 -#endif -#else -#ifdef CURRENT_PACKAGE_KEY -#define BOOTSTRAPPED_CABAL 1 -#endif -#endif - -#ifdef BOOTSTRAPPED_CABAL -import qualified Paths_Cabal (version) -#endif - -import Control.Concurrent.MVar - ( newEmptyMVar, putMVar, takeMVar ) -import Data.Typeable - ( cast ) -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 - -import System.Directory - ( Permissions(executable), getDirectoryContents, getPermissions - , doesDirectoryExist, doesFileExist, removeFile, findExecutable - , getModificationTime, createDirectory, removeDirectoryRecursive ) -import System.Environment - ( getProgName ) -import System.Exit - ( exitWith, ExitCode(..) ) -import System.FilePath - ( normalise, (), (<.>) - , getSearchPath, joinPath, takeDirectory, splitExtension - , splitDirectories, searchPathSeparator ) -import System.IO - ( Handle, hSetBinaryMode, hGetContents, stderr, stdout, hPutStr, hFlush - , hClose, hSetBuffering, BufferMode(..) ) -import System.IO.Error -import System.IO.Unsafe - ( unsafeInterleaveIO ) -import qualified Control.Exception as Exception - -import Data.Time.Clock.POSIX (getPOSIXTime, POSIXTime) -import Control.Exception (IOException, evaluate, throwIO) -import Control.Concurrent (forkIO) -import Numeric (showFFloat) -import qualified System.Process as Process - ( CreateProcess(..), StdStream(..), proc) -import System.Process - ( ProcessHandle, createProcess, rawSystem, runInteractiveProcess - , showCommandForUser, waitForProcess) - -import qualified Text.PrettyPrint as Disp - --- We only get our own version number when we're building with ourselves -cabalVersion :: Version -#if defined(BOOTSTRAPPED_CABAL) -cabalVersion = mkVersion' Paths_Cabal.version -#elif defined(CABAL_VERSION) -cabalVersion = mkVersion [CABAL_VERSION] -#else -cabalVersion = mkVersion [1,9999] --used when bootstrapping -#endif - --- ---------------------------------------------------------------------------- --- Exception and logging utils - --- Cabal's logging infrastructure has a few constraints: --- --- * We must make all logging formatting and emissions decisions based --- on the 'Verbosity' parameter, which is the only parameter that is --- plumbed to enough call-sites to actually be used for this matter. --- (One of Cabal's "big mistakes" is to have never have defined a --- monad of its own.) --- --- * When we 'die', we must raise an IOError. This a backwards --- compatibility consideration, because that's what we've raised --- previously, and if we change to any other exception type, --- exception handlers which match on IOError will no longer work. --- One case where it is known we rely on IOError being catchable --- is 'readPkgConfigDb' in cabal-install; there may be other --- user code that also assumes this. --- --- * The 'topHandler' does not know what 'Verbosity' is, because --- it gets called before we've done command line parsing (where --- the 'Verbosity' parameter would come from). --- --- This leads to two big architectural choices: --- --- * Although naively we might imagine 'Verbosity' to be a simple --- enumeration type, actually it is a full-on abstract data type --- that may contain arbitrarily complex information. At the --- moment, it is fully representable as a string, but we might --- eventually also use verbosity to let users register their --- own logging handler. --- --- * When we call 'die', we perform all the formatting and addition --- of extra information we need, and then ship this in the IOError --- to the top-level handler. Here are alternate designs that --- don't work: --- --- a) Ship the unformatted info to the handler. This doesn't --- work because at the point the handler gets the message, --- we've lost call stacks, and even if we did, we don't have access --- to 'Verbosity' to decide whether or not to render it. --- --- b) Print the information at the 'die' site, then raise an --- error. This means that if the exception is subsequently --- caught by a handler, we will still have emitted the output, --- which is not the correct behavior. --- --- For the top-level handler to "know" that an error message --- contains one of these fully formatted packets, we set a sentinel --- in one of IOError's extra fields. This is handled by --- 'ioeSetVerbatim' and 'ioeGetVerbatim'. --- - -{-# DEPRECATED dieWithLocation "Messages thrown with dieWithLocation can't be controlled with Verbosity; use dieWithLocation' instead" #-} -dieWithLocation :: FilePath -> Maybe Int -> String -> IO a -dieWithLocation filename lineno msg = - ioError . setLocation lineno - . flip ioeSetFileName (normalise filename) - $ userError msg - where - setLocation Nothing err = err - setLocation (Just n) err = ioeSetLocation err (show n) - _ = callStack -- TODO: Attach CallStack to exception - -{-# DEPRECATED die "Messages thrown with die can't be controlled with Verbosity; use die' instead, or dieNoVerbosity if Verbosity truly is not available" #-} -die :: String -> IO a -die = dieNoVerbosity - -dieNoVerbosity :: String -> IO a -dieNoVerbosity msg - = ioError (userError msg) - where - _ = callStack -- TODO: Attach CallStack to exception - --- | Tag an 'IOError' whose error string should be output to the screen --- verbatim. -ioeSetVerbatim :: IOError -> IOError -ioeSetVerbatim e = ioeSetLocation e "dieVerbatim" - --- | Check if an 'IOError' should be output verbatim to screen. -ioeGetVerbatim :: IOError -> Bool -ioeGetVerbatim e = ioeGetLocation e == "dieVerbatim" - --- | Create a 'userError' whose error text will be output verbatim -verbatimUserError :: String -> IOError -verbatimUserError = ioeSetVerbatim . userError - -dieWithLocation' :: Verbosity -> FilePath -> Maybe Int -> String -> IO a -dieWithLocation' verbosity filename mb_lineno msg = withFrozenCallStack $ do - ts <- getPOSIXTime - pname <- getProgName - ioError . verbatimUserError - . withMetadata ts AlwaysMark VerboseTrace verbosity - . wrapTextVerbosity verbosity - $ pname ++ ": " ++ - filename ++ (case mb_lineno of - Just lineno -> ":" ++ show lineno - Nothing -> "") ++ - ": " ++ msg - -die' :: Verbosity -> String -> IO a -die' verbosity msg = withFrozenCallStack $ do - ts <- getPOSIXTime - pname <- getProgName - ioError . verbatimUserError - . withMetadata ts AlwaysMark VerboseTrace verbosity - . wrapTextVerbosity verbosity - $ pname ++ ": " ++ msg - -dieNoWrap :: Verbosity -> String -> IO a -dieNoWrap verbosity msg = withFrozenCallStack $ do - -- TODO: should this have program name or not? - ts <- getPOSIXTime - ioError . verbatimUserError - . withMetadata ts AlwaysMark VerboseTrace verbosity - $ msg - --- | Given a block of IO code that may raise an exception, annotate --- it with the metadata from the current scope. Use this as close --- to external code that raises IO exceptions as possible, since --- this function unconditionally wraps the error message with a trace --- (so it is NOT idempotent.) -annotateIO :: Verbosity -> IO a -> IO a -annotateIO verbosity act = do - ts <- getPOSIXTime - modifyIOError (f ts) act - where - f ts ioe = ioeSetErrorString ioe - . withMetadata ts NeverMark VerboseTrace verbosity - $ ioeGetErrorString ioe - - -{-# NOINLINE topHandlerWith #-} -topHandlerWith :: forall a. (Exception.SomeException -> IO a) -> IO a -> IO a -topHandlerWith cont prog = do - -- By default, stderr to a terminal device is NoBuffering. But this - -- is *really slow* - hSetBuffering stderr LineBuffering - Exception.catches prog [ - Exception.Handler rethrowAsyncExceptions - , Exception.Handler rethrowExitStatus - , Exception.Handler handle - ] - where - -- Let async exceptions rise to the top for the default top-handler - rethrowAsyncExceptions :: Exception.AsyncException -> NoCallStackIO a - rethrowAsyncExceptions a = throwIO a - - -- ExitCode gets thrown asynchronously too, and we don't want to print it - rethrowExitStatus :: ExitCode -> NoCallStackIO a - rethrowExitStatus = throwIO - - -- Print all other exceptions - handle :: Exception.SomeException -> NoCallStackIO a - handle se = do - hFlush stdout - pname <- getProgName - hPutStr stderr (message pname se) - cont se - - message :: String -> Exception.SomeException -> String - message pname (Exception.SomeException se) = - case cast se :: Maybe Exception.IOException of - Just ioe - | ioeGetVerbatim ioe -> - -- Use the message verbatim - ioeGetErrorString ioe ++ "\n" - | isUserError ioe -> - let file = case ioeGetFileName ioe of - Nothing -> "" - Just path -> path ++ location ++ ": " - location = case ioeGetLocation ioe of - l@(n:_) | isDigit n -> ':' : l - _ -> "" - detail = ioeGetErrorString ioe - in wrapText (pname ++ ": " ++ file ++ detail) - _ -> - displaySomeException se ++ "\n" - --- | BC wrapper around 'Exception.displayException'. -displaySomeException :: Exception.Exception e => e -> String -displaySomeException se = -#if __GLASGOW_HASKELL__ < 710 - show se -#else - Exception.displayException se -#endif - -topHandler :: IO a -> IO a -topHandler prog = topHandlerWith (const $ exitWith (ExitFailure 1)) prog - --- | Non fatal conditions that may be indicative of an error or problem. --- --- We display these at the 'normal' verbosity level. --- -warn :: Verbosity -> String -> IO () -warn verbosity msg = withFrozenCallStack $ do - when (verbosity >= normal) $ do - ts <- getPOSIXTime - hFlush stdout - hPutStr stderr . withMetadata ts NormalMark FlagTrace verbosity - . wrapTextVerbosity verbosity - $ "Warning: " ++ msg - --- | Useful status messages. --- --- We display these at the 'normal' verbosity level. --- --- This is for the ordinary helpful status messages that users see. Just --- enough information to know that things are working but not floods of detail. --- -notice :: Verbosity -> String -> IO () -notice verbosity msg = withFrozenCallStack $ do - when (verbosity >= normal) $ do - ts <- getPOSIXTime - hPutStr stdout . withMetadata ts NormalMark FlagTrace verbosity - . wrapTextVerbosity verbosity - $ msg - --- | Display a message at 'normal' verbosity level, but without --- wrapping. --- -noticeNoWrap :: Verbosity -> String -> IO () -noticeNoWrap verbosity msg = withFrozenCallStack $ do - when (verbosity >= normal) $ do - ts <- getPOSIXTime - hPutStr stdout . withMetadata ts NormalMark FlagTrace verbosity $ msg - --- | Pretty-print a 'Disp.Doc' status message at 'normal' verbosity --- level. Use this if you need fancy formatting. --- -noticeDoc :: Verbosity -> Disp.Doc -> IO () -noticeDoc verbosity msg = withFrozenCallStack $ do - when (verbosity >= normal) $ do - ts <- getPOSIXTime - hPutStr stdout . withMetadata ts NormalMark FlagTrace verbosity - . Disp.renderStyle defaultStyle $ msg - --- | Display a "setup status message". Prefer using setupMessage' --- if possible. --- -setupMessage :: Verbosity -> String -> PackageIdentifier -> IO () -setupMessage verbosity msg pkgid = withFrozenCallStack $ do - noticeNoWrap verbosity (msg ++ ' ': display pkgid ++ "...") - --- | More detail on the operation of some action. --- --- We display these messages when the verbosity level is 'verbose' --- -info :: Verbosity -> String -> IO () -info verbosity msg = withFrozenCallStack $ - when (verbosity >= verbose) $ do - ts <- getPOSIXTime - hPutStr stdout . withMetadata ts NeverMark FlagTrace verbosity - . wrapTextVerbosity verbosity - $ msg - -infoNoWrap :: Verbosity -> String -> IO () -infoNoWrap verbosity msg = withFrozenCallStack $ - when (verbosity >= verbose) $ do - ts <- getPOSIXTime - hPutStr stdout . withMetadata ts NeverMark FlagTrace verbosity - $ msg - --- | Detailed internal debugging information --- --- We display these messages when the verbosity level is 'deafening' --- -debug :: Verbosity -> String -> IO () -debug verbosity msg = withFrozenCallStack $ - when (verbosity >= deafening) $ do - ts <- getPOSIXTime - hPutStr stdout . withMetadata ts NeverMark FlagTrace verbosity - . wrapTextVerbosity verbosity - $ msg - -- ensure that we don't lose output if we segfault/infinite loop - hFlush stdout - --- | A variant of 'debug' that doesn't perform the automatic line --- wrapping. Produces better output in some cases. -debugNoWrap :: Verbosity -> String -> IO () -debugNoWrap verbosity msg = withFrozenCallStack $ - when (verbosity >= deafening) $ do - ts <- getPOSIXTime - hPutStr stdout . withMetadata ts NeverMark FlagTrace verbosity - $ msg - -- ensure that we don't lose output if we segfault/infinite loop - hFlush stdout - --- | Perform an IO action, catching any IO exceptions and printing an error --- if one occurs. -chattyTry :: String -- ^ a description of the action we were attempting - -> IO () -- ^ the action itself - -> IO () -chattyTry desc action = - catchIO action $ \exception -> - putStrLn $ "Error while " ++ desc ++ ": " ++ show exception - --- | Run an IO computation, returning @e@ if it raises a "file --- does not exist" error. -handleDoesNotExist :: a -> NoCallStackIO a -> NoCallStackIO a -handleDoesNotExist e = - Exception.handleJust - (\ioe -> if isDoesNotExistError ioe then Just ioe else Nothing) - (\_ -> return e) - --- ----------------------------------------------------------------------------- --- Helper functions - --- | Wraps text unless the @+nowrap@ verbosity flag is active -wrapTextVerbosity :: Verbosity -> String -> String -wrapTextVerbosity verb - | isVerboseNoWrap verb = withTrailingNewline - | otherwise = withTrailingNewline . wrapText - - --- | Prepends a timestamp if @+timestamp@ verbosity flag is set --- --- This is used by 'withMetadata' --- -withTimestamp :: Verbosity -> POSIXTime -> String -> String -withTimestamp v ts msg - | isVerboseTimestamp v = msg' - | otherwise = msg -- no-op - where - msg' = case lines msg of - [] -> tsstr "\n" - l1:rest -> unlines (tsstr (' ':l1) : map (contpfx++) rest) - - -- format timestamp to be prepended to first line with msec precision - tsstr = showFFloat (Just 3) (realToFrac ts :: Double) - - -- continuation prefix for subsequent lines of msg - contpfx = replicate (length (tsstr " ")) ' ' - --- | Wrap output with a marker if @+markoutput@ verbosity flag is set. --- --- NB: Why is markoutput done with start/end markers, and not prefixes? --- Markers are more convenient to add (if we want to add prefixes, --- we have to 'lines' and then 'map'; here's it's just some --- concatenates). Note that even in the prefix case, we can't --- guarantee that the markers are unambiguous, because some of --- Cabal's output comes straight from external programs, where --- we don't have the ability to interpose on the output. --- --- This is used by 'withMetadata' --- -withOutputMarker :: Verbosity -> String -> String -withOutputMarker v xs | not (isVerboseMarkOutput v) = xs -withOutputMarker _ "" = "" -- Minor optimization, don't mark uselessly -withOutputMarker _ xs = - "-----BEGIN CABAL OUTPUT-----\n" ++ - withTrailingNewline xs ++ - "-----END CABAL OUTPUT-----\n" - --- | Append a trailing newline to a string if it does not --- already have a trailing newline. --- -withTrailingNewline :: String -> String -withTrailingNewline "" = "" -withTrailingNewline (x:xs) = x : go x xs - where - go _ (c:cs) = c : go c cs - go '\n' "" = "" - go _ "" = "\n" - --- | Prepend a call-site and/or call-stack based on Verbosity --- -withCallStackPrefix :: WithCallStack (TraceWhen -> Verbosity -> String -> String) -withCallStackPrefix tracer verbosity s = withFrozenCallStack $ - (if isVerboseCallSite verbosity - then parentSrcLocPrefix ++ - -- Hack: need a newline before starting output marker :( - if isVerboseMarkOutput verbosity - then "\n" - else "" - else "") ++ - (case traceWhen verbosity tracer of - Just pre -> pre ++ prettyCallStack callStack ++ "\n" - Nothing -> "") ++ - s - --- | When should we emit the call stack? We always emit --- for internal errors, emit the trace for errors when we --- are in verbose mode, and otherwise only emit it if --- explicitly asked for using the @+callstack@ verbosity --- flag. (At the moment, 'AlwaysTrace' is not used. --- -data TraceWhen - = AlwaysTrace - | VerboseTrace - | FlagTrace - deriving (Eq) - --- | Determine if we should emit a call stack. --- If we trace, it also emits any prefix we should append. -traceWhen :: Verbosity -> TraceWhen -> Maybe String -traceWhen _ AlwaysTrace = Just "" -traceWhen v VerboseTrace | v >= verbose = Just "" -traceWhen v FlagTrace | isVerboseCallStack v = Just "----\n" -traceWhen _ _ = Nothing - --- | When should we output the marker? Things like 'die' --- always get marked, but a 'NormalMark' will only be --- output if we're not a quiet verbosity. --- -data MarkWhen = AlwaysMark | NormalMark | NeverMark - --- | Add all necessary metadata to a logging message --- -withMetadata :: WithCallStack (POSIXTime -> MarkWhen -> TraceWhen -> Verbosity -> String -> String) -withMetadata ts marker tracer verbosity x = withFrozenCallStack $ - -- NB: order matters. Output marker first because we - -- don't want to capture call stacks. - withTrailingNewline - . withCallStackPrefix tracer verbosity - . (case marker of - AlwaysMark -> withOutputMarker verbosity - NormalMark | not (isVerboseQuiet verbosity) - -> withOutputMarker verbosity - | otherwise - -> id - NeverMark -> id) - -- Clear out any existing markers - . clearMarkers - . withTimestamp verbosity ts - $ x - -clearMarkers :: String -> String -clearMarkers s = unlines . filter isMarker $ lines s - where - isMarker "-----BEGIN CABAL OUTPUT-----" = False - isMarker "-----END CABAL OUTPUT-----" = False - isMarker _ = True - --- ----------------------------------------------------------------------------- --- rawSystem variants -maybeExit :: IO ExitCode -> IO () -maybeExit cmd = do - res <- cmd - unless (res == ExitSuccess) $ exitWith res - -printRawCommandAndArgs :: Verbosity -> FilePath -> [String] -> IO () -printRawCommandAndArgs verbosity path args = withFrozenCallStack $ - printRawCommandAndArgsAndEnv verbosity path args Nothing Nothing - -printRawCommandAndArgsAndEnv :: Verbosity - -> FilePath - -> [String] - -> Maybe FilePath - -> Maybe [(String, String)] - -> IO () -printRawCommandAndArgsAndEnv verbosity path args mcwd menv = do - case menv of - Just env -> debugNoWrap verbosity ("Environment: " ++ show env) - Nothing -> return () - case mcwd of - Just cwd -> debugNoWrap verbosity ("Working directory: " ++ show cwd) - Nothing -> return () - infoNoWrap verbosity (showCommandForUser path args) - --- Exit with the same exit code if the subcommand fails -rawSystemExit :: Verbosity -> FilePath -> [String] -> IO () -rawSystemExit verbosity path args = withFrozenCallStack $ do - printRawCommandAndArgs verbosity path args - hFlush stdout - exitcode <- rawSystem path args - unless (exitcode == ExitSuccess) $ do - debug verbosity $ path ++ " returned " ++ show exitcode - exitWith exitcode - -rawSystemExitCode :: Verbosity -> FilePath -> [String] -> IO ExitCode -rawSystemExitCode verbosity path args = withFrozenCallStack $ do - printRawCommandAndArgs verbosity path args - hFlush stdout - exitcode <- rawSystem path args - unless (exitcode == ExitSuccess) $ do - debug verbosity $ path ++ " returned " ++ show exitcode - return exitcode - -rawSystemExitWithEnv :: Verbosity - -> FilePath - -> [String] - -> [(String, String)] - -> IO () -rawSystemExitWithEnv verbosity path args env = withFrozenCallStack $ do - printRawCommandAndArgsAndEnv verbosity path args Nothing (Just env) - hFlush stdout - (_,_,_,ph) <- createProcess $ - (Process.proc path args) { Process.env = (Just env) -#ifdef MIN_VERSION_process -#if MIN_VERSION_process(1,2,0) --- delegate_ctlc has been added in process 1.2, and we still want to be able to --- bootstrap GHC on systems not having that version - , Process.delegate_ctlc = True -#endif -#endif - } - exitcode <- waitForProcess ph - unless (exitcode == ExitSuccess) $ do - debug verbosity $ path ++ " returned " ++ show exitcode - exitWith exitcode - --- Closes the passed in handles before returning. -rawSystemIOWithEnv :: Verbosity - -> FilePath - -> [String] - -> Maybe FilePath -- ^ New working dir or inherit - -> Maybe [(String, String)] -- ^ New environment or inherit - -> Maybe Handle -- ^ stdin - -> Maybe Handle -- ^ stdout - -> Maybe Handle -- ^ stderr - -> IO ExitCode -rawSystemIOWithEnv verbosity path args mcwd menv inp out err = withFrozenCallStack $ do - (_,_,_,ph) <- createProcessWithEnv verbosity path args mcwd menv - (mbToStd inp) (mbToStd out) (mbToStd err) - exitcode <- waitForProcess ph - unless (exitcode == ExitSuccess) $ do - debug verbosity $ path ++ " returned " ++ show exitcode - return exitcode - where - mbToStd :: Maybe Handle -> Process.StdStream - mbToStd = maybe Process.Inherit Process.UseHandle - -createProcessWithEnv :: - Verbosity - -> FilePath - -> [String] - -> Maybe FilePath -- ^ New working dir or inherit - -> Maybe [(String, String)] -- ^ New environment or inherit - -> Process.StdStream -- ^ stdin - -> Process.StdStream -- ^ stdout - -> Process.StdStream -- ^ stderr - -> IO (Maybe Handle, Maybe Handle, Maybe Handle,ProcessHandle) - -- ^ Any handles created for stdin, stdout, or stderr - -- with 'CreateProcess', and a handle to the process. -createProcessWithEnv verbosity path args mcwd menv inp out err = withFrozenCallStack $ do - printRawCommandAndArgsAndEnv verbosity path args mcwd menv - hFlush stdout - (inp', out', err', ph) <- createProcess $ - (Process.proc path args) { - Process.cwd = mcwd - , Process.env = menv - , Process.std_in = inp - , Process.std_out = out - , Process.std_err = err -#ifdef MIN_VERSION_process -#if MIN_VERSION_process(1,2,0) --- delegate_ctlc has been added in process 1.2, and we still want to be able to --- bootstrap GHC on systems not having that version - , Process.delegate_ctlc = True -#endif -#endif - } - return (inp', out', err', ph) - --- | Run a command and return its output. --- --- The output is assumed to be text in the locale encoding. --- -rawSystemStdout :: Verbosity -> FilePath -> [String] -> IO String -rawSystemStdout verbosity path args = withFrozenCallStack $ do - (IODataText output, errors, exitCode) <- rawSystemStdInOut verbosity path args - Nothing Nothing - Nothing IODataModeText - when (exitCode /= ExitSuccess) $ - die errors - return output - --- | Run a command and return its output, errors and exit status. Optionally --- also supply some input. Also provides control over whether the binary/text --- mode of the input and output. --- -rawSystemStdInOut :: Verbosity - -> FilePath -- ^ Program location - -> [String] -- ^ Arguments - -> Maybe FilePath -- ^ New working dir or inherit - -> Maybe [(String, String)] -- ^ New environment or inherit - -> Maybe IOData -- ^ input text and binary mode - -> IODataMode -- ^ output in binary mode - -> IO (IOData, String, ExitCode) -- ^ output, errors, exit -rawSystemStdInOut verbosity path args mcwd menv input outputMode = withFrozenCallStack $ do - printRawCommandAndArgs verbosity path args - - Exception.bracket - (runInteractiveProcess path args mcwd menv) - (\(inh,outh,errh,_) -> hClose inh >> hClose outh >> hClose errh) - $ \(inh,outh,errh,pid) -> do - - -- output mode depends on what the caller wants - -- but the errors are always assumed to be text (in the current locale) - hSetBinaryMode errh False - - -- fork off a couple threads to pull on the stderr and stdout - -- so if the process writes to stderr we do not block. - - err <- hGetContents errh - - out <- IOData.hGetContents outh outputMode - - mv <- newEmptyMVar - let force str = do - mberr <- Exception.try (evaluate (rnf str) >> return ()) - putMVar mv (mberr :: Either IOError ()) - _ <- forkIO $ force out - _ <- forkIO $ force err - - -- push all the input, if any - case input of - Nothing -> return () - Just inputData -> do - -- input mode depends on what the caller wants - IOData.hPutContents inh inputData - --TODO: this probably fails if the process refuses to consume - -- or if it closes stdin (eg if it exits) - - -- wait for both to finish, in either order - mberr1 <- takeMVar mv - mberr2 <- takeMVar mv - - -- wait for the program to terminate - exitcode <- waitForProcess pid - unless (exitcode == ExitSuccess) $ - debug verbosity $ path ++ " returned " ++ show exitcode - ++ if null err then "" else - " with error message:\n" ++ err - ++ case input of - Nothing -> "" - Just d | IOData.null d -> "" - Just (IODataText inp) -> "\nstdin input:\n" ++ inp - Just (IODataBinary inp) -> "\nstdin input (binary):\n" ++ show inp - - -- Check if we we hit an exception while consuming the output - -- (e.g. a text decoding error) - reportOutputIOError mberr1 - reportOutputIOError mberr2 - - return (out, err, exitcode) - where - reportOutputIOError :: Either IOError () -> NoCallStackIO () - reportOutputIOError = - either (\e -> throwIO (ioeSetFileName e ("output of " ++ path))) - return - - -{-# DEPRECATED findProgramLocation - "No longer used within Cabal, try findProgramOnSearchPath" #-} --- | Look for a program on the path. -findProgramLocation :: Verbosity -> FilePath -> IO (Maybe FilePath) -findProgramLocation verbosity prog = withFrozenCallStack $ do - debug verbosity $ "searching for " ++ prog ++ " in path." - res <- findExecutable prog - case res of - Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path") - Just path -> debug verbosity ("found " ++ prog ++ " at "++ path) - return res - - --- | Look for a program and try to find it's version number. It can accept --- either an absolute path or the name of a program binary, in which case we --- will look for the program on the path. --- -findProgramVersion :: String -- ^ version args - -> (String -> String) -- ^ function to select version - -- number from program output - -> Verbosity - -> FilePath -- ^ location - -> IO (Maybe Version) -findProgramVersion versionArg selectVersion verbosity path = withFrozenCallStack $ do - str <- rawSystemStdout verbosity path [versionArg] - `catchIO` (\_ -> return "") - `catchExit` (\_ -> return "") - let version :: Maybe Version - version = simpleParse (selectVersion str) - case version of - Nothing -> warn verbosity $ "cannot determine version of " ++ path - ++ " :\n" ++ show str - Just v -> debug verbosity $ path ++ " is version " ++ display v - return version - - --- | Like the Unix xargs program. Useful for when we've got very long command --- lines that might overflow an OS limit on command line length and so you --- need to invoke a command multiple times to get all the args in. --- --- Use it with either of the rawSystem variants above. For example: --- --- > xargs (32*1024) (rawSystemExit verbosity) prog fixedArgs bigArgs --- -xargs :: Int -> ([String] -> IO ()) - -> [String] -> [String] -> IO () -xargs maxSize rawSystemFun fixedArgs bigArgs = - let fixedArgSize = sum (map length fixedArgs) + length fixedArgs - chunkSize = maxSize - fixedArgSize - in traverse_ (rawSystemFun . (fixedArgs ++)) (chunks chunkSize bigArgs) - - where chunks len = unfoldr $ \s -> - if null s then Nothing - else Just (chunk [] len s) - - chunk acc _ [] = (reverse acc,[]) - chunk acc len (s:ss) - | len' < len = chunk (s:acc) (len-len'-1) ss - | otherwise = (reverse acc, s:ss) - where len' = length s - --- ------------------------------------------------------------ --- * File Utilities --- ------------------------------------------------------------ - ----------------- --- Finding files - --- | Find a file by looking in a search path. The file path must match exactly. --- -findFile :: [FilePath] -- ^search locations - -> FilePath -- ^File Name - -> IO FilePath -findFile searchPath fileName = - findFirstFile id - [ path fileName - | path <- nub searchPath] - >>= maybe (die $ fileName ++ " doesn't exist") return - --- | Find a file by looking in a search path with one of a list of possible --- file extensions. The file base name should be given and it will be tried --- with each of the extensions in each element of the search path. --- -findFileWithExtension :: [String] - -> [FilePath] - -> FilePath - -> NoCallStackIO (Maybe FilePath) -findFileWithExtension extensions searchPath baseName = - findFirstFile id - [ path baseName <.> ext - | path <- nub searchPath - , ext <- nub extensions ] - -findAllFilesWithExtension :: [String] - -> [FilePath] - -> FilePath - -> NoCallStackIO [FilePath] -findAllFilesWithExtension extensions searchPath basename = - findAllFiles id - [ path basename <.> ext - | path <- nub searchPath - , ext <- nub extensions ] - --- | Like 'findFileWithExtension' but returns which element of the search path --- the file was found in, and the file path relative to that base directory. --- -findFileWithExtension' :: [String] - -> [FilePath] - -> FilePath - -> NoCallStackIO (Maybe (FilePath, FilePath)) -findFileWithExtension' extensions searchPath baseName = - findFirstFile (uncurry ()) - [ (path, baseName <.> ext) - | path <- nub searchPath - , ext <- nub extensions ] - -findFirstFile :: (a -> FilePath) -> [a] -> NoCallStackIO (Maybe a) -findFirstFile file = findFirst - where findFirst [] = return Nothing - findFirst (x:xs) = do exists <- doesFileExist (file x) - if exists - then return (Just x) - else findFirst xs - -findAllFiles :: (a -> FilePath) -> [a] -> NoCallStackIO [a] -findAllFiles file = filterM (doesFileExist . file) - --- | Finds the files corresponding to a list of Haskell module names. --- --- As 'findModuleFile' but for a list of module names. --- -findModuleFiles :: [FilePath] -- ^ build prefix (location of objects) - -> [String] -- ^ search suffixes - -> [ModuleName] -- ^ modules - -> IO [(FilePath, FilePath)] -findModuleFiles searchPath extensions moduleNames = - traverse (findModuleFile searchPath extensions) moduleNames - --- | Find the file corresponding to a Haskell module name. --- --- This is similar to 'findFileWithExtension'' but specialised to a module --- name. The function fails if the file corresponding to the module is missing. --- -findModuleFile :: [FilePath] -- ^ build prefix (location of objects) - -> [String] -- ^ search suffixes - -> ModuleName -- ^ module - -> IO (FilePath, FilePath) -findModuleFile searchPath extensions mod_name = - maybe notFound return - =<< findFileWithExtension' extensions searchPath - (ModuleName.toFilePath mod_name) - where - notFound = die $ "Error: Could not find module: " ++ display mod_name - ++ " with any suffix: " ++ show extensions - ++ " in the search path: " ++ show searchPath - --- | List all the files in a directory and all subdirectories. --- --- The order places files in sub-directories after all the files in their --- parent directories. The list is generated lazily so is not well defined if --- the source directory structure changes before the list is used. --- -getDirectoryContentsRecursive :: FilePath -> IO [FilePath] -getDirectoryContentsRecursive topdir = recurseDirectories [""] - where - recurseDirectories :: [FilePath] -> IO [FilePath] - recurseDirectories [] = return [] - recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do - (files, dirs') <- collect [] [] =<< getDirectoryContents (topdir dir) - files' <- recurseDirectories (dirs' ++ dirs) - return (files ++ files') - - where - collect files dirs' [] = return (reverse files - ,reverse dirs') - collect files dirs' (entry:entries) | ignore entry - = collect files dirs' entries - collect files dirs' (entry:entries) = do - let dirEntry = dir entry - isDirectory <- doesDirectoryExist (topdir dirEntry) - if isDirectory - then collect files (dirEntry:dirs') entries - else collect (dirEntry:files) dirs' entries - - ignore ['.'] = True - ignore ['.', '.'] = True - ignore _ = False - ------------------------- --- Environment variables - --- | Is this directory in the system search path? -isInSearchPath :: FilePath -> NoCallStackIO Bool -isInSearchPath path = fmap (elem path) getSearchPath - -addLibraryPath :: OS - -> [FilePath] - -> [(String,String)] - -> [(String,String)] -addLibraryPath os paths = addEnv - where - pathsString = intercalate [searchPathSeparator] paths - ldPath = case os of - OSX -> "DYLD_LIBRARY_PATH" - _ -> "LD_LIBRARY_PATH" - - addEnv [] = [(ldPath,pathsString)] - addEnv ((key,value):xs) - | key == ldPath = - if null value - then (key,pathsString):xs - else (key,value ++ (searchPathSeparator:pathsString)):xs - | otherwise = (key,value):addEnv xs - --------------------- --- Modification time - --- | Compare the modification times of two files to see if the first is newer --- than the second. The first file must exist but the second need not. --- The expected use case is when the second file is generated using the first. --- In this use case, if the result is True then the second file is out of date. --- -moreRecentFile :: FilePath -> FilePath -> NoCallStackIO Bool -moreRecentFile a b = do - exists <- doesFileExist b - if not exists - then return True - else do tb <- getModificationTime b - ta <- getModificationTime a - return (ta > tb) - --- | Like 'moreRecentFile', but also checks that the first file exists. -existsAndIsMoreRecentThan :: FilePath -> FilePath -> NoCallStackIO Bool -existsAndIsMoreRecentThan a b = do - exists <- doesFileExist a - if not exists - then return False - else a `moreRecentFile` b - ----------------------------------------- --- Copying and installing files and dirs - --- | Same as 'createDirectoryIfMissing' but logs at higher verbosity levels. --- -createDirectoryIfMissingVerbose :: Verbosity - -> Bool -- ^ Create its parents too? - -> FilePath - -> IO () -createDirectoryIfMissingVerbose verbosity create_parents path0 - | create_parents = withFrozenCallStack $ createDirs (parents path0) - | otherwise = withFrozenCallStack $ createDirs (take 1 (parents path0)) - where - parents = reverse . scanl1 () . splitDirectories . normalise - - createDirs [] = return () - createDirs (dir:[]) = createDir dir throwIO - createDirs (dir:dirs) = - createDir dir $ \_ -> do - createDirs dirs - createDir dir throwIO - - createDir :: FilePath -> (IOException -> IO ()) -> IO () - createDir dir notExistHandler = do - r <- tryIO $ createDirectoryVerbose verbosity dir - case (r :: Either IOException ()) of - Right () -> return () - Left e - | isDoesNotExistError e -> notExistHandler e - -- createDirectory (and indeed POSIX mkdir) does not distinguish - -- between a dir already existing and a file already existing. So we - -- check for it here. Unfortunately there is a slight race condition - -- here, but we think it is benign. It could report an exception in - -- the case that the dir did exist but another process deletes the - -- directory and creates a file in its place before we can check - -- that the directory did indeed exist. - | isAlreadyExistsError e -> (do - isDir <- doesDirectoryExist dir - unless isDir $ throwIO e - ) `catchIO` ((\_ -> return ()) :: IOException -> IO ()) - | otherwise -> throwIO e - -createDirectoryVerbose :: Verbosity -> FilePath -> IO () -createDirectoryVerbose verbosity dir = withFrozenCallStack $ do - info verbosity $ "creating " ++ dir - createDirectory dir - setDirOrdinary dir - --- | Copies a file without copying file permissions. The target file is created --- with default permissions. Any existing target file is replaced. --- --- At higher verbosity levels it logs an info message. --- -copyFileVerbose :: Verbosity -> FilePath -> FilePath -> IO () -copyFileVerbose verbosity src dest = withFrozenCallStack $ do - info verbosity ("copy " ++ src ++ " to " ++ dest) - copyFile src dest - --- | Install an ordinary file. This is like a file copy but the permissions --- are set appropriately for an installed file. On Unix it is \"-rw-r--r--\" --- while on Windows it uses the default permissions for the target directory. --- -installOrdinaryFile :: Verbosity -> FilePath -> FilePath -> IO () -installOrdinaryFile verbosity src dest = withFrozenCallStack $ do - info verbosity ("Installing " ++ src ++ " to " ++ dest) - copyOrdinaryFile src dest - --- | Install an executable file. This is like a file copy but the permissions --- are set appropriately for an installed file. On Unix it is \"-rwxr-xr-x\" --- while on Windows it uses the default permissions for the target directory. --- -installExecutableFile :: Verbosity -> FilePath -> FilePath -> IO () -installExecutableFile verbosity src dest = withFrozenCallStack $ do - info verbosity ("Installing executable " ++ src ++ " to " ++ dest) - copyExecutableFile src dest - --- | Install a file that may or not be executable, preserving permissions. -installMaybeExecutableFile :: Verbosity -> FilePath -> FilePath -> IO () -installMaybeExecutableFile verbosity src dest = withFrozenCallStack $ do - perms <- getPermissions src - if (executable perms) --only checks user x bit - then installExecutableFile verbosity src dest - else installOrdinaryFile verbosity src dest - --- | Given a relative path to a file, copy it to the given directory, preserving --- the relative path and creating the parent directories if needed. -copyFileTo :: Verbosity -> FilePath -> FilePath -> IO () -copyFileTo verbosity dir file = withFrozenCallStack $ do - let targetFile = dir file - createDirectoryIfMissingVerbose verbosity True (takeDirectory targetFile) - installOrdinaryFile verbosity file targetFile - --- | Common implementation of 'copyFiles', 'installOrdinaryFiles', --- 'installExecutableFiles' and 'installMaybeExecutableFiles'. -copyFilesWith :: (Verbosity -> FilePath -> FilePath -> IO ()) - -> Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () -copyFilesWith doCopy verbosity targetDir srcFiles = withFrozenCallStack $ do - - -- Create parent directories for everything - let dirs = map (targetDir ) . nub . map (takeDirectory . snd) $ srcFiles - traverse_ (createDirectoryIfMissingVerbose verbosity True) dirs - - -- Copy all the files - sequence_ [ let src = srcBase srcFile - dest = targetDir srcFile - in doCopy verbosity src dest - | (srcBase, srcFile) <- srcFiles ] - --- | Copies a bunch of files to a target directory, preserving the directory --- structure in the target location. The target directories are created if they --- do not exist. --- --- The files are identified by a pair of base directory and a path relative to --- that base. It is only the relative part that is preserved in the --- destination. --- --- For example: --- --- > copyFiles normal "dist/src" --- > [("", "src/Foo.hs"), ("dist/build/", "src/Bar.hs")] --- --- This would copy \"src\/Foo.hs\" to \"dist\/src\/src\/Foo.hs\" and --- copy \"dist\/build\/src\/Bar.hs\" to \"dist\/src\/src\/Bar.hs\". --- --- This operation is not atomic. Any IO failure during the copy (including any --- missing source files) leaves the target in an unknown state so it is best to --- use it with a freshly created directory so that it can be simply deleted if --- anything goes wrong. --- -copyFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () -copyFiles v fp fs = withFrozenCallStack (copyFilesWith copyFileVerbose v fp fs) - --- | This is like 'copyFiles' but uses 'installOrdinaryFile'. --- -installOrdinaryFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () -installOrdinaryFiles v fp fs = withFrozenCallStack (copyFilesWith installOrdinaryFile v fp fs) - --- | This is like 'copyFiles' but uses 'installExecutableFile'. --- -installExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] - -> IO () -installExecutableFiles v fp fs = withFrozenCallStack (copyFilesWith installExecutableFile v fp fs) - --- | This is like 'copyFiles' but uses 'installMaybeExecutableFile'. --- -installMaybeExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] - -> IO () -installMaybeExecutableFiles v fp fs = withFrozenCallStack (copyFilesWith installMaybeExecutableFile v fp fs) - --- | This installs all the files in a directory to a target location, --- preserving the directory layout. All the files are assumed to be ordinary --- rather than executable files. --- -installDirectoryContents :: Verbosity -> FilePath -> FilePath -> IO () -installDirectoryContents verbosity srcDir destDir = withFrozenCallStack $ do - info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") - srcFiles <- getDirectoryContentsRecursive srcDir - installOrdinaryFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ] - --- | Recursively copy the contents of one directory to another path. -copyDirectoryRecursive :: Verbosity -> FilePath -> FilePath -> IO () -copyDirectoryRecursive verbosity srcDir destDir = withFrozenCallStack $ do - info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") - srcFiles <- getDirectoryContentsRecursive srcDir - copyFilesWith (const copyFile) verbosity destDir [ (srcDir, f) - | f <- srcFiles ] - -------------------- --- File permissions - --- | Like 'doesFileExist', but also checks that the file is executable. -doesExecutableExist :: FilePath -> NoCallStackIO Bool -doesExecutableExist f = do - exists <- doesFileExist f - if exists - then do perms <- getPermissions f - return (executable perms) - else return False - ---------------------------------- --- Deprecated file copy functions - -{-# DEPRECATED smartCopySources - "Use findModuleFiles and copyFiles or installOrdinaryFiles" #-} -smartCopySources :: Verbosity -> [FilePath] -> FilePath - -> [ModuleName] -> [String] -> IO () -smartCopySources verbosity searchPath targetDir moduleNames extensions = withFrozenCallStack $ - findModuleFiles searchPath extensions moduleNames - >>= copyFiles verbosity targetDir - -{-# DEPRECATED copyDirectoryRecursiveVerbose - "You probably want installDirectoryContents instead" #-} -copyDirectoryRecursiveVerbose :: Verbosity -> FilePath -> FilePath -> IO () -copyDirectoryRecursiveVerbose verbosity srcDir destDir = withFrozenCallStack $ do - info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") - srcFiles <- getDirectoryContentsRecursive srcDir - copyFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ] - ---------------------------- --- Temporary files and dirs - --- | Advanced options for 'withTempFile' and 'withTempDirectory'. -data TempFileOptions = TempFileOptions { - optKeepTempFiles :: Bool -- ^ Keep temporary files? - } - -defaultTempFileOptions :: TempFileOptions -defaultTempFileOptions = TempFileOptions { optKeepTempFiles = False } - --- | Use a temporary filename that doesn't already exist. --- -withTempFile :: FilePath -- ^ Temp dir to create the file in - -> String -- ^ File name template. See 'openTempFile'. - -> (FilePath -> Handle -> IO a) -> IO a -withTempFile tmpDir template action = - withTempFileEx defaultTempFileOptions tmpDir template action - --- | A version of 'withTempFile' that additionally takes a 'TempFileOptions' --- argument. -withTempFileEx :: TempFileOptions - -> FilePath -- ^ Temp dir to create the file in - -> String -- ^ File name template. See 'openTempFile'. - -> (FilePath -> Handle -> IO a) -> IO a -withTempFileEx opts tmpDir template action = - Exception.bracket - (openTempFile tmpDir template) - (\(name, handle) -> do hClose handle - unless (optKeepTempFiles opts) $ - handleDoesNotExist () . removeFile $ name) - (withLexicalCallStack (uncurry action)) - --- | Create and use a temporary directory. --- --- Creates a new temporary directory inside the given directory, making use --- of the template. The temp directory is deleted after use. For example: --- --- > withTempDirectory verbosity "src" "sdist." $ \tmpDir -> do ... --- --- The @tmpDir@ will be a new subdirectory of the given directory, e.g. --- @src/sdist.342@. --- -withTempDirectory :: Verbosity -> FilePath -> String -> (FilePath -> IO a) -> IO a -withTempDirectory verbosity targetDir template f = withFrozenCallStack $ - withTempDirectoryEx verbosity defaultTempFileOptions targetDir template - (withLexicalCallStack f) - --- | A version of 'withTempDirectory' that additionally takes a --- 'TempFileOptions' argument. -withTempDirectoryEx :: Verbosity -> TempFileOptions - -> FilePath -> String -> (FilePath -> IO a) -> IO a -withTempDirectoryEx _verbosity opts targetDir template f = withFrozenCallStack $ - Exception.bracket - (createTempDirectory targetDir template) - (unless (optKeepTempFiles opts) - . handleDoesNotExist () . removeDirectoryRecursive) - (withLexicalCallStack f) - ------------------------------------ --- Safely reading and writing files - -{-# DEPRECATED rewriteFile "Use rewriteFileEx so that Verbosity is respected" #-} -rewriteFile :: FilePath -> String -> IO () -rewriteFile = rewriteFileEx normal - --- | Write a file but only if it would have new content. If we would be writing --- the same as the existing content then leave the file as is so that we do not --- update the file's modification time. --- --- NB: the file is assumed to be ASCII-encoded. -rewriteFileEx :: Verbosity -> FilePath -> String -> IO () -rewriteFileEx verbosity path newContent = - flip catchIO mightNotExist $ do - existingContent <- annotateIO verbosity $ readFile path - _ <- evaluate (length existingContent) - unless (existingContent == newContent) $ - annotateIO verbosity $ - writeFileAtomic path (BS.Char8.pack newContent) - where - mightNotExist e | isDoesNotExistError e - = annotateIO verbosity $ writeFileAtomic path - (BS.Char8.pack newContent) - | otherwise - = ioError e - --- | The path name that represents the current directory. --- In Unix, it's @\".\"@, but this is system-specific. --- (E.g. AmigaOS uses the empty string @\"\"@ for the current directory.) -currentDir :: FilePath -currentDir = "." - -shortRelativePath :: FilePath -> FilePath -> FilePath -shortRelativePath from to = - case dropCommonPrefix (splitDirectories from) (splitDirectories to) of - (stuff, path) -> joinPath (map (const "..") stuff ++ path) - where - dropCommonPrefix :: Eq a => [a] -> [a] -> ([a],[a]) - dropCommonPrefix (x:xs) (y:ys) - | x == y = dropCommonPrefix xs ys - dropCommonPrefix xs ys = (xs,ys) - --- | Drop the extension if it's one of 'exeExtensions', or return the path --- unchanged. -dropExeExtension :: FilePath -> FilePath -dropExeExtension filepath = - case splitExtension filepath of - (filepath', extension) | extension `elem` exeExtensions -> filepath' - | otherwise -> filepath - --- | List of possible executable file extensions on the current platform. -exeExtensions :: [String] -exeExtensions = case buildOS of - -- Possible improvement: on Windows, read the list of extensions from the - -- PATHEXT environment variable. By default PATHEXT is ".com; .exe; .bat; - -- .cmd". - Windows -> ["", "exe"] - Ghcjs -> ["", "exe"] - _ -> [""] - --- ------------------------------------------------------------ --- * Finding the description file --- ------------------------------------------------------------ - --- |Package description file (/pkgname/@.cabal@) -defaultPackageDesc :: Verbosity -> IO FilePath -defaultPackageDesc _verbosity = tryFindPackageDesc currentDir - --- |Find a package description file in the given directory. Looks for --- @.cabal@ files. -findPackageDesc :: FilePath -- ^Where to look - -> NoCallStackIO (Either String FilePath) -- ^.cabal -findPackageDesc dir - = do files <- getDirectoryContents dir - -- to make sure we do not mistake a ~/.cabal/ dir for a .cabal - -- file we filter to exclude dirs and null base file names: - cabalFiles <- filterM doesFileExist - [ dir file - | file <- files - , let (name, ext) = splitExtension file - , not (null name) && ext == ".cabal" ] - case cabalFiles of - [] -> return (Left noDesc) - [cabalFile] -> return (Right cabalFile) - multiple -> return (Left $ multiDesc multiple) - - where - noDesc :: String - noDesc = "No cabal file found.\n" - ++ "Please create a package description file .cabal" - - multiDesc :: [String] -> String - multiDesc l = "Multiple cabal files found.\n" - ++ "Please use only one of: " - ++ intercalate ", " l - --- |Like 'findPackageDesc', but calls 'die' in case of error. -tryFindPackageDesc :: FilePath -> IO FilePath -tryFindPackageDesc dir = either die return =<< findPackageDesc dir - -{-# DEPRECATED defaultHookedPackageDesc "Use findHookedPackageDesc with the proper base directory instead" #-} --- |Optional auxiliary package information file (/pkgname/@.buildinfo@) -defaultHookedPackageDesc :: IO (Maybe FilePath) -defaultHookedPackageDesc = findHookedPackageDesc currentDir - --- |Find auxiliary package information in the given directory. --- Looks for @.buildinfo@ files. -findHookedPackageDesc - :: FilePath -- ^Directory to search - -> IO (Maybe FilePath) -- ^/dir/@\/@/pkgname/@.buildinfo@, if present -findHookedPackageDesc dir = do - files <- getDirectoryContents dir - buildInfoFiles <- filterM doesFileExist - [ dir file - | file <- files - , let (name, ext) = splitExtension file - , not (null name) && ext == buildInfoExt ] - case buildInfoFiles of - [] -> return Nothing - [f] -> return (Just f) - _ -> die ("Multiple files with extension " ++ buildInfoExt) - -buildInfoExt :: String -buildInfoExt = ".buildinfo" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Simple.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Simple.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,855 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This is the command line front end to the Simple build system. When given --- the parsed command-line args and package information, is able to perform --- basic commands like configure, build, install, register, etc. --- --- This module exports the main functions that Setup.hs scripts use. It --- re-exports the 'UserHooks' type, the standard entry points like --- 'defaultMain' and 'defaultMainWithHooks' and the predefined sets of --- 'UserHooks' that custom @Setup.hs@ scripts can extend to add their own --- behaviour. --- --- This module isn't called \"Simple\" because it's simple. Far from --- it. It's called \"Simple\" because it does complicated things to --- simple software. --- --- The original idea was that there could be different build systems that all --- presented the same compatible command line interfaces. There is still a --- "Distribution.Make" system but in practice no packages use it. - -{- -Work around this warning: -libraries/Cabal/Distribution/Simple.hs:78:0: - Warning: In the use of `runTests' - (imported from Distribution.Simple.UserHooks): - Deprecated: "Please use the new testing interface instead!" --} -{-# OPTIONS_GHC -fno-warn-deprecations #-} - -module Distribution.Simple ( - module Distribution.Package, - module Distribution.Version, - module Distribution.License, - module Distribution.Simple.Compiler, - module Language.Haskell.Extension, - -- * Simple interface - defaultMain, defaultMainNoRead, defaultMainArgs, - -- * Customization - UserHooks(..), Args, - defaultMainWithHooks, defaultMainWithHooksArgs, - defaultMainWithHooksNoRead, defaultMainWithHooksNoReadArgs, - -- ** Standard sets of hooks - simpleUserHooks, - autoconfUserHooks, - defaultUserHooks, emptyUserHooks, - -- ** Utils - defaultHookedPackageDesc - ) where - -import Prelude () -import Control.Exception (try) -import Distribution.Compat.Prelude - --- local -import Distribution.Simple.Compiler hiding (Flag) -import Distribution.Simple.UserHooks -import Distribution.Package -import Distribution.PackageDescription hiding (Flag) -import Distribution.PackageDescription.Configuration -import Distribution.Simple.Program -import Distribution.Simple.Program.Db -import Distribution.Simple.PreProcess -import Distribution.Simple.Setup -import Distribution.Simple.Command - -import Distribution.Simple.Build -import Distribution.Simple.SrcDist -import Distribution.Simple.Register - -import Distribution.Simple.Configure - -import Distribution.Simple.LocalBuildInfo -import Distribution.Simple.Bench -import Distribution.Simple.BuildPaths -import Distribution.Simple.Test -import Distribution.Simple.Install -import Distribution.Simple.Haddock -import Distribution.Simple.Doctest -import Distribution.Simple.Utils -import Distribution.Utils.NubList -import Distribution.Verbosity -import Language.Haskell.Extension -import Distribution.Version -import Distribution.License -import Distribution.Text - --- Base -import System.Environment (getArgs, getProgName) -import System.Directory (removeFile, doesFileExist - ,doesDirectoryExist, removeDirectoryRecursive) -import System.Exit (exitWith,ExitCode(..)) -import System.FilePath (searchPathSeparator, takeDirectory, (), splitDirectories, dropDrive) -import Distribution.Compat.Directory (makeAbsolute) -import Distribution.Compat.Environment (getEnvironment) -import Distribution.Compat.GetShortPathName (getShortPathName) - -import Data.List (unionBy, (\\)) - -import Distribution.PackageDescription.Parsec - --- | A simple implementation of @main@ for a Cabal setup script. --- It reads the package description file using IO, and performs the --- action specified on the command line. -defaultMain :: IO () -defaultMain = getArgs >>= defaultMainHelper simpleUserHooks - --- | A version of 'defaultMain' that is passed the command line --- arguments, rather than getting them from the environment. -defaultMainArgs :: [String] -> IO () -defaultMainArgs = defaultMainHelper simpleUserHooks - --- | A customizable version of 'defaultMain'. -defaultMainWithHooks :: UserHooks -> IO () -defaultMainWithHooks hooks = getArgs >>= defaultMainHelper hooks - --- | A customizable version of 'defaultMain' that also takes the command --- line arguments. -defaultMainWithHooksArgs :: UserHooks -> [String] -> IO () -defaultMainWithHooksArgs = defaultMainHelper - --- | Like 'defaultMain', but accepts the package description as input --- rather than using IO to read it. -defaultMainNoRead :: GenericPackageDescription -> IO () -defaultMainNoRead = defaultMainWithHooksNoRead simpleUserHooks - --- | A customizable version of 'defaultMainNoRead'. -defaultMainWithHooksNoRead :: UserHooks -> GenericPackageDescription -> IO () -defaultMainWithHooksNoRead hooks pkg_descr = - getArgs >>= - defaultMainHelper hooks { readDesc = return (Just pkg_descr) } - --- | A customizable version of 'defaultMainNoRead' that also takes the --- command line arguments. --- --- @since 2.2.0.0 -defaultMainWithHooksNoReadArgs :: UserHooks -> GenericPackageDescription -> [String] -> IO () -defaultMainWithHooksNoReadArgs hooks pkg_descr = - defaultMainHelper hooks { readDesc = return (Just pkg_descr) } - -defaultMainHelper :: UserHooks -> Args -> IO () -defaultMainHelper hooks args = topHandler $ - case commandsRun (globalCommand commands) commands args of - CommandHelp help -> printHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo (flags, commandParse) -> - case commandParse of - _ | fromFlag (globalVersion flags) -> printVersion - | fromFlag (globalNumericVersion flags) -> printNumericVersion - CommandHelp help -> printHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo action -> action - - where - printHelp help = getProgName >>= putStr . help - printOptionsList = putStr . unlines - printErrors errs = do - putStr (intercalate "\n" errs) - exitWith (ExitFailure 1) - printNumericVersion = putStrLn $ display cabalVersion - printVersion = putStrLn $ "Cabal library version " - ++ display cabalVersion - - progs = addKnownPrograms (hookedPrograms hooks) defaultProgramDb - commands = - [configureCommand progs `commandAddAction` - \fs as -> configureAction hooks fs as >> return () - ,buildCommand progs `commandAddAction` buildAction hooks - ,replCommand progs `commandAddAction` replAction hooks - ,installCommand `commandAddAction` installAction hooks - ,copyCommand `commandAddAction` copyAction hooks - ,doctestCommand `commandAddAction` doctestAction hooks - ,haddockCommand `commandAddAction` haddockAction hooks - ,cleanCommand `commandAddAction` cleanAction hooks - ,sdistCommand `commandAddAction` sdistAction hooks - ,hscolourCommand `commandAddAction` hscolourAction hooks - ,registerCommand `commandAddAction` registerAction hooks - ,unregisterCommand `commandAddAction` unregisterAction hooks - ,testCommand `commandAddAction` testAction hooks - ,benchmarkCommand `commandAddAction` benchAction hooks - ] - --- | Combine the preprocessors in the given hooks with the --- preprocessors built into cabal. -allSuffixHandlers :: UserHooks - -> [PPSuffixHandler] -allSuffixHandlers hooks - = overridesPP (hookedPreProcessors hooks) knownSuffixHandlers - where - overridesPP :: [PPSuffixHandler] -> [PPSuffixHandler] -> [PPSuffixHandler] - overridesPP = unionBy (\x y -> fst x == fst y) - -configureAction :: UserHooks -> ConfigFlags -> Args -> IO LocalBuildInfo -configureAction hooks flags args = do - distPref <- findDistPrefOrDefault (configDistPref flags) - let flags' = flags { configDistPref = toFlag distPref - , configArgs = args } - - -- See docs for 'HookedBuildInfo' - pbi <- preConf hooks args flags' - - (mb_pd_file, pkg_descr0) <- confPkgDescr hooks verbosity - (flagToMaybe (configCabalFilePath flags)) - - let epkg_descr = (pkg_descr0, pbi) - - localbuildinfo0 <- confHook hooks epkg_descr flags' - - -- remember the .cabal filename if we know it - -- and all the extra command line args - let localbuildinfo = localbuildinfo0 { - pkgDescrFile = mb_pd_file, - extraConfigArgs = args - } - writePersistBuildConfig distPref localbuildinfo - - let pkg_descr = localPkgDescr localbuildinfo - postConf hooks args flags' pkg_descr localbuildinfo - return localbuildinfo - where - verbosity = fromFlag (configVerbosity flags) - -confPkgDescr :: UserHooks -> Verbosity -> Maybe FilePath - -> IO (Maybe FilePath, GenericPackageDescription) -confPkgDescr hooks verbosity mb_path = do - mdescr <- readDesc hooks - case mdescr of - Just descr -> return (Nothing, descr) - Nothing -> do - pdfile <- case mb_path of - Nothing -> defaultPackageDesc verbosity - Just path -> return path - info verbosity "Using Parsec parser" - descr <- readGenericPackageDescription verbosity pdfile - return (Just pdfile, descr) - -buildAction :: UserHooks -> BuildFlags -> Args -> IO () -buildAction hooks flags args = do - distPref <- findDistPrefOrDefault (buildDistPref flags) - let verbosity = fromFlag $ buildVerbosity flags - lbi <- getBuildConfig hooks verbosity distPref - let flags' = flags { buildDistPref = toFlag distPref - , buildCabalFilePath = maybeToFlag (cabalFilePath lbi)} - - progs <- reconfigurePrograms verbosity - (buildProgramPaths flags') - (buildProgramArgs flags') - (withPrograms lbi) - - hookedAction preBuild buildHook postBuild - (return lbi { withPrograms = progs }) - hooks flags' { buildArgs = args } args - -replAction :: UserHooks -> ReplFlags -> Args -> IO () -replAction hooks flags args = do - distPref <- findDistPrefOrDefault (replDistPref flags) - let verbosity = fromFlag $ replVerbosity flags - flags' = flags { replDistPref = toFlag distPref } - - lbi <- getBuildConfig hooks verbosity distPref - progs <- reconfigurePrograms verbosity - (replProgramPaths flags') - (replProgramArgs flags') - (withPrograms lbi) - - -- As far as I can tell, the only reason this doesn't use - -- 'hookedActionWithArgs' is because the arguments of 'replHook' - -- takes the args explicitly. UGH. -- ezyang - pbi <- preRepl hooks args flags' - let pkg_descr0 = localPkgDescr lbi - sanityCheckHookedBuildInfo pkg_descr0 pbi - let pkg_descr = updatePackageDescription pbi pkg_descr0 - lbi' = lbi { withPrograms = progs - , localPkgDescr = pkg_descr } - replHook hooks pkg_descr lbi' hooks flags' args - postRepl hooks args flags' pkg_descr lbi' - -hscolourAction :: UserHooks -> HscolourFlags -> Args -> IO () -hscolourAction hooks flags args = do - distPref <- findDistPrefOrDefault (hscolourDistPref flags) - let verbosity = fromFlag $ hscolourVerbosity flags - lbi <- getBuildConfig hooks verbosity distPref - let flags' = flags { hscolourDistPref = toFlag distPref - , hscolourCabalFilePath = maybeToFlag (cabalFilePath lbi)} - - hookedAction preHscolour hscolourHook postHscolour - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -doctestAction :: UserHooks -> DoctestFlags -> Args -> IO () -doctestAction hooks flags args = do - distPref <- findDistPrefOrDefault (doctestDistPref flags) - let verbosity = fromFlag $ doctestVerbosity flags - flags' = flags { doctestDistPref = toFlag distPref } - - lbi <- getBuildConfig hooks verbosity distPref - progs <- reconfigurePrograms verbosity - (doctestProgramPaths flags') - (doctestProgramArgs flags') - (withPrograms lbi) - - hookedAction preDoctest doctestHook postDoctest - (return lbi { withPrograms = progs }) - hooks flags' args - -haddockAction :: UserHooks -> HaddockFlags -> Args -> IO () -haddockAction hooks flags args = do - distPref <- findDistPrefOrDefault (haddockDistPref flags) - let verbosity = fromFlag $ haddockVerbosity flags - lbi <- getBuildConfig hooks verbosity distPref - let flags' = flags { haddockDistPref = toFlag distPref - , haddockCabalFilePath = maybeToFlag (cabalFilePath lbi)} - - progs <- reconfigurePrograms verbosity - (haddockProgramPaths flags') - (haddockProgramArgs flags') - (withPrograms lbi) - - hookedAction preHaddock haddockHook postHaddock - (return lbi { withPrograms = progs }) - hooks flags' { haddockArgs = args } args - -cleanAction :: UserHooks -> CleanFlags -> Args -> IO () -cleanAction hooks flags args = do - distPref <- findDistPrefOrDefault (cleanDistPref flags) - - elbi <- tryGetBuildConfig hooks verbosity distPref - let flags' = flags { cleanDistPref = toFlag distPref - , cleanCabalFilePath = case elbi of - Left _ -> mempty - Right lbi -> maybeToFlag (cabalFilePath lbi)} - - pbi <- preClean hooks args flags' - - (_, ppd) <- confPkgDescr hooks verbosity Nothing - -- It might seem like we are doing something clever here - -- but we're really not: if you look at the implementation - -- of 'clean' in the end all the package description is - -- used for is to clear out @extra-tmp-files@. IMO, - -- the configure script goo should go into @dist@ too! - -- -- ezyang - let pkg_descr0 = flattenPackageDescription ppd - -- We don't sanity check for clean as an error - -- here would prevent cleaning: - --sanityCheckHookedBuildInfo pkg_descr0 pbi - let pkg_descr = updatePackageDescription pbi pkg_descr0 - - cleanHook hooks pkg_descr () hooks flags' - postClean hooks args flags' pkg_descr () - where - verbosity = fromFlag (cleanVerbosity flags) - -copyAction :: UserHooks -> CopyFlags -> Args -> IO () -copyAction hooks flags args = do - distPref <- findDistPrefOrDefault (copyDistPref flags) - let verbosity = fromFlag $ copyVerbosity flags - lbi <- getBuildConfig hooks verbosity distPref - let flags' = flags { copyDistPref = toFlag distPref - , copyCabalFilePath = maybeToFlag (cabalFilePath lbi)} - hookedAction preCopy copyHook postCopy - (getBuildConfig hooks verbosity distPref) - hooks flags' { copyArgs = args } args - -installAction :: UserHooks -> InstallFlags -> Args -> IO () -installAction hooks flags args = do - distPref <- findDistPrefOrDefault (installDistPref flags) - let verbosity = fromFlag $ installVerbosity flags - lbi <- getBuildConfig hooks verbosity distPref - let flags' = flags { installDistPref = toFlag distPref - , installCabalFilePath = maybeToFlag (cabalFilePath lbi)} - hookedAction preInst instHook postInst - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -sdistAction :: UserHooks -> SDistFlags -> Args -> IO () -sdistAction hooks flags args = do - distPref <- findDistPrefOrDefault (sDistDistPref flags) - let flags' = flags { sDistDistPref = toFlag distPref } - pbi <- preSDist hooks args flags' - - mlbi <- maybeGetPersistBuildConfig distPref - - -- NB: It would be TOTALLY WRONG to use the 'PackageDescription' - -- store in the 'LocalBuildInfo' for the rest of @sdist@, because - -- that would result in only the files that would be built - -- according to the user's configure being packaged up. - -- In fact, it is not obvious why we need to read the - -- 'LocalBuildInfo' in the first place, except that we want - -- to do some architecture-independent preprocessing which - -- needs to be configured. This is totally awful, see - -- GH#130. - - (_, ppd) <- confPkgDescr hooks verbosity Nothing - - let pkg_descr0 = flattenPackageDescription ppd - sanityCheckHookedBuildInfo pkg_descr0 pbi - let pkg_descr = updatePackageDescription pbi pkg_descr0 - mlbi' = fmap (\lbi -> lbi { localPkgDescr = pkg_descr }) mlbi - - sDistHook hooks pkg_descr mlbi' hooks flags' - postSDist hooks args flags' pkg_descr mlbi' - where - verbosity = fromFlag (sDistVerbosity flags) - -testAction :: UserHooks -> TestFlags -> Args -> IO () -testAction hooks flags args = do - distPref <- findDistPrefOrDefault (testDistPref flags) - let verbosity = fromFlag $ testVerbosity flags - flags' = flags { testDistPref = toFlag distPref } - - localBuildInfo <- getBuildConfig hooks verbosity distPref - let pkg_descr = localPkgDescr localBuildInfo - -- It is safe to do 'runTests' before the new test handler because the - -- default action is a no-op and if the package uses the old test interface - -- the new handler will find no tests. - runTests hooks args False pkg_descr localBuildInfo - hookedActionWithArgs preTest testHook postTest - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -benchAction :: UserHooks -> BenchmarkFlags -> Args -> IO () -benchAction hooks flags args = do - distPref <- findDistPrefOrDefault (benchmarkDistPref flags) - let verbosity = fromFlag $ benchmarkVerbosity flags - flags' = flags { benchmarkDistPref = toFlag distPref } - hookedActionWithArgs preBench benchHook postBench - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -registerAction :: UserHooks -> RegisterFlags -> Args -> IO () -registerAction hooks flags args = do - distPref <- findDistPrefOrDefault (regDistPref flags) - let verbosity = fromFlag $ regVerbosity flags - lbi <- getBuildConfig hooks verbosity distPref - let flags' = flags { regDistPref = toFlag distPref - , regCabalFilePath = maybeToFlag (cabalFilePath lbi)} - hookedAction preReg regHook postReg - (getBuildConfig hooks verbosity distPref) - hooks flags' { regArgs = args } args - -unregisterAction :: UserHooks -> RegisterFlags -> Args -> IO () -unregisterAction hooks flags args = do - distPref <- findDistPrefOrDefault (regDistPref flags) - let verbosity = fromFlag $ regVerbosity flags - lbi <- getBuildConfig hooks verbosity distPref - let flags' = flags { regDistPref = toFlag distPref - , regCabalFilePath = maybeToFlag (cabalFilePath lbi)} - hookedAction preUnreg unregHook postUnreg - (getBuildConfig hooks verbosity distPref) - hooks flags' args - -hookedAction :: (UserHooks -> Args -> flags -> IO HookedBuildInfo) - -> (UserHooks -> PackageDescription -> LocalBuildInfo - -> UserHooks -> flags -> IO ()) - -> (UserHooks -> Args -> flags -> PackageDescription - -> LocalBuildInfo -> IO ()) - -> IO LocalBuildInfo - -> UserHooks -> flags -> Args -> IO () -hookedAction pre_hook cmd_hook = - hookedActionWithArgs pre_hook (\h _ pd lbi uh flags -> - cmd_hook h pd lbi uh flags) - -hookedActionWithArgs :: (UserHooks -> Args -> flags -> IO HookedBuildInfo) - -> (UserHooks -> Args -> PackageDescription -> LocalBuildInfo - -> UserHooks -> flags -> IO ()) - -> (UserHooks -> Args -> flags -> PackageDescription - -> LocalBuildInfo -> IO ()) - -> IO LocalBuildInfo - -> UserHooks -> flags -> Args -> IO () -hookedActionWithArgs pre_hook cmd_hook post_hook - get_build_config hooks flags args = do - pbi <- pre_hook hooks args flags - lbi0 <- get_build_config - let pkg_descr0 = localPkgDescr lbi0 - sanityCheckHookedBuildInfo pkg_descr0 pbi - let pkg_descr = updatePackageDescription pbi pkg_descr0 - lbi = lbi0 { localPkgDescr = pkg_descr } - cmd_hook hooks args pkg_descr lbi hooks flags - post_hook hooks args flags pkg_descr lbi - -sanityCheckHookedBuildInfo :: PackageDescription -> HookedBuildInfo -> IO () -sanityCheckHookedBuildInfo PackageDescription { library = Nothing } (Just _,_) - = die $ "The buildinfo contains info for a library, " - ++ "but the package does not have a library." - -sanityCheckHookedBuildInfo pkg_descr (_, hookExes) - | not (null nonExistant) - = die $ "The buildinfo contains info for an executable called '" - ++ display (head nonExistant) ++ "' but the package does not have a " - ++ "executable with that name." - where - pkgExeNames = nub (map exeName (executables pkg_descr)) - hookExeNames = nub (map fst hookExes) - nonExistant = hookExeNames \\ pkgExeNames - -sanityCheckHookedBuildInfo _ _ = return () - --- | Try to read the 'localBuildInfoFile' -tryGetBuildConfig :: UserHooks -> Verbosity -> FilePath - -> IO (Either ConfigStateFileError LocalBuildInfo) -tryGetBuildConfig u v = try . getBuildConfig u v - - --- | Read the 'localBuildInfoFile' or throw an exception. -getBuildConfig :: UserHooks -> Verbosity -> FilePath -> IO LocalBuildInfo -getBuildConfig hooks verbosity distPref = do - lbi_wo_programs <- getPersistBuildConfig distPref - -- Restore info about unconfigured programs, since it is not serialized - let lbi = lbi_wo_programs { - withPrograms = restoreProgramDb - (builtinPrograms ++ hookedPrograms hooks) - (withPrograms lbi_wo_programs) - } - - case pkgDescrFile lbi of - Nothing -> return lbi - Just pkg_descr_file -> do - outdated <- checkPersistBuildConfigOutdated distPref pkg_descr_file - if outdated - then reconfigure pkg_descr_file lbi - else return lbi - - where - reconfigure :: FilePath -> LocalBuildInfo -> IO LocalBuildInfo - reconfigure pkg_descr_file lbi = do - notice verbosity $ pkg_descr_file ++ " has been changed. " - ++ "Re-configuring with most recently used options. " - ++ "If this fails, please run configure manually.\n" - let cFlags = configFlags lbi - let cFlags' = cFlags { - -- Since the list of unconfigured programs is not serialized, - -- restore it to the same value as normally used at the beginning - -- of a configure run: - configPrograms_ = restoreProgramDb - (builtinPrograms ++ hookedPrograms hooks) - `fmap` configPrograms_ cFlags, - - -- Use the current, not saved verbosity level: - configVerbosity = Flag verbosity - } - configureAction hooks cFlags' (extraConfigArgs lbi) - - --- -------------------------------------------------------------------------- --- Cleaning - -clean :: PackageDescription -> CleanFlags -> IO () -clean pkg_descr flags = do - let distPref = fromFlagOrDefault defaultDistPref $ cleanDistPref flags - notice verbosity "cleaning..." - - maybeConfig <- if fromFlag (cleanSaveConf flags) - then maybeGetPersistBuildConfig distPref - else return Nothing - - -- remove the whole dist/ directory rather than tracking exactly what files - -- we created in there. - chattyTry "removing dist/" $ do - exists <- doesDirectoryExist distPref - when exists (removeDirectoryRecursive distPref) - - -- Any extra files the user wants to remove - traverse_ removeFileOrDirectory (extraTmpFiles pkg_descr) - - -- If the user wanted to save the config, write it back - traverse_ (writePersistBuildConfig distPref) maybeConfig - - where - removeFileOrDirectory :: FilePath -> NoCallStackIO () - removeFileOrDirectory fname = do - isDir <- doesDirectoryExist fname - isFile <- doesFileExist fname - if isDir then removeDirectoryRecursive fname - else when isFile $ removeFile fname - verbosity = fromFlag (cleanVerbosity flags) - --- -------------------------------------------------------------------------- --- Default hooks - --- | Hooks that correspond to a plain instantiation of the --- \"simple\" build system -simpleUserHooks :: UserHooks -simpleUserHooks = - emptyUserHooks { - confHook = configure, - postConf = finalChecks, - buildHook = defaultBuildHook, - replHook = defaultReplHook, - copyHook = \desc lbi _ f -> install desc lbi f, - -- 'install' has correct 'copy' behavior with params - testHook = defaultTestHook, - benchHook = defaultBenchHook, - instHook = defaultInstallHook, - sDistHook = \p l h f -> sdist p l f srcPref (allSuffixHandlers h), - cleanHook = \p _ _ f -> clean p f, - hscolourHook = \p l h f -> hscolour p l (allSuffixHandlers h) f, - haddockHook = \p l h f -> haddock p l (allSuffixHandlers h) f, - doctestHook = \p l h f -> doctest p l (allSuffixHandlers h) f, - regHook = defaultRegHook, - unregHook = \p l _ f -> unregister p l f - } - where - finalChecks _args flags pkg_descr lbi = - checkForeignDeps pkg_descr lbi (lessVerbose verbosity) - where - verbosity = fromFlag (configVerbosity flags) - --- | Basic autoconf 'UserHooks': --- --- * 'postConf' runs @.\/configure@, if present. --- --- * the pre-hooks 'preBuild', 'preClean', 'preCopy', 'preInst', --- 'preReg' and 'preUnreg' read additional build information from --- /package/@.buildinfo@, if present. --- --- Thus @configure@ can use local system information to generate --- /package/@.buildinfo@ and possibly other files. - -{-# DEPRECATED defaultUserHooks - "Use simpleUserHooks or autoconfUserHooks, unless you need Cabal-1.2\n compatibility in which case you must stick with defaultUserHooks" #-} -defaultUserHooks :: UserHooks -defaultUserHooks = autoconfUserHooks { - confHook = \pkg flags -> do - let verbosity = fromFlag (configVerbosity flags) - warn verbosity - "defaultUserHooks in Setup script is deprecated." - confHook autoconfUserHooks pkg flags, - postConf = oldCompatPostConf - } - -- This is the annoying old version that only runs configure if it exists. - -- It's here for compatibility with existing Setup.hs scripts. See: - -- https://github.com/haskell/cabal/issues/158 - where oldCompatPostConf args flags pkg_descr lbi - = do let verbosity = fromFlag (configVerbosity flags) - baseDir lbi' = fromMaybe "" (takeDirectory <$> cabalFilePath lbi') - - confExists <- doesFileExist $ (baseDir lbi) "configure" - when confExists $ - runConfigureScript verbosity - backwardsCompatHack flags lbi - - pbi <- getHookedBuildInfo (buildDir lbi) verbosity - sanityCheckHookedBuildInfo pkg_descr pbi - let pkg_descr' = updatePackageDescription pbi pkg_descr - lbi' = lbi { localPkgDescr = pkg_descr' } - postConf simpleUserHooks args flags pkg_descr' lbi' - - backwardsCompatHack = True - -autoconfUserHooks :: UserHooks -autoconfUserHooks - = simpleUserHooks - { - postConf = defaultPostConf, - preBuild = readHookWithArgs buildVerbosity buildDistPref, -- buildCabalFilePath, - preCopy = readHookWithArgs copyVerbosity copyDistPref, - preClean = readHook cleanVerbosity cleanDistPref, - preInst = readHook installVerbosity installDistPref, - preHscolour = readHook hscolourVerbosity hscolourDistPref, - preHaddock = readHookWithArgs haddockVerbosity haddockDistPref, - preReg = readHook regVerbosity regDistPref, - preUnreg = readHook regVerbosity regDistPref - } - where defaultPostConf :: Args -> ConfigFlags -> PackageDescription - -> LocalBuildInfo -> IO () - defaultPostConf args flags pkg_descr lbi - = do let verbosity = fromFlag (configVerbosity flags) - baseDir lbi' = fromMaybe "" (takeDirectory <$> cabalFilePath lbi') - confExists <- doesFileExist $ (baseDir lbi) "configure" - if confExists - then runConfigureScript verbosity - backwardsCompatHack flags lbi - else die "configure script not found." - - pbi <- getHookedBuildInfo (buildDir lbi) verbosity - sanityCheckHookedBuildInfo pkg_descr pbi - let pkg_descr' = updatePackageDescription pbi pkg_descr - lbi' = lbi { localPkgDescr = pkg_descr' } - postConf simpleUserHooks args flags pkg_descr' lbi' - - backwardsCompatHack = False - - readHookWithArgs :: (a -> Flag Verbosity) - -> (a -> Flag FilePath) - -> Args -> a - -> IO HookedBuildInfo - readHookWithArgs get_verbosity get_dist_pref _ flags = do - dist_dir <- findDistPrefOrDefault (get_dist_pref flags) - getHookedBuildInfo (dist_dir "build") verbosity - where - verbosity = fromFlag (get_verbosity flags) - - readHook :: (a -> Flag Verbosity) - -> (a -> Flag FilePath) - -> Args -> a -> IO HookedBuildInfo - readHook get_verbosity get_dist_pref a flags = do - noExtraFlags a - dist_dir <- findDistPrefOrDefault (get_dist_pref flags) - getHookedBuildInfo (dist_dir "build") verbosity - where - verbosity = fromFlag (get_verbosity flags) - -runConfigureScript :: Verbosity -> Bool -> ConfigFlags -> LocalBuildInfo - -> IO () -runConfigureScript verbosity backwardsCompatHack flags lbi = do - env <- getEnvironment - let programDb = withPrograms lbi - (ccProg, ccFlags) <- configureCCompiler verbosity programDb - ccProgShort <- getShortPathName ccProg - -- The C compiler's compilation and linker flags (e.g. - -- "C compiler flags" and "Gcc Linker flags" from GHC) have already - -- been merged into ccFlags, so we set both CFLAGS and LDFLAGS - -- to ccFlags - -- We don't try and tell configure which ld to use, as we don't have - -- a way to pass its flags too - configureFile <- makeAbsolute $ - fromMaybe "." (takeDirectory <$> cabalFilePath lbi) "configure" - -- autoconf is fussy about filenames, and has a set of forbidden - -- characters that can't appear in the build directory, etc: - -- https://www.gnu.org/software/autoconf/manual/autoconf.html#File-System-Conventions - -- - -- This has caused hard-to-debug failures in the past (#5368), so we - -- detect some cases early and warn with a clear message. Windows's - -- use of backslashes is problematic here, so we'll switch to - -- slashes, but we do still want to fail on backslashes in POSIX - -- paths. - -- - -- TODO: We don't check for colons, tildes or leading dashes. We - -- also should check the builddir's path, destdir, and all other - -- paths as well. - let configureFile' = intercalate "/" $ splitDirectories configureFile - for_ badAutoconfCharacters $ \(c, cname) -> - when (c `elem` dropDrive configureFile') $ - warn verbosity $ - "The path to the './configure' script, '" ++ configureFile' - ++ "', contains the character '" ++ [c] ++ "' (" ++ cname ++ ")." - ++ " This may cause the script to fail with an obscure error, or for" - ++ " building the package to fail later." - let extraPath = fromNubList $ configProgramPathExtra flags - let cflagsEnv = maybe (unwords ccFlags) (++ (" " ++ unwords ccFlags)) - $ lookup "CFLAGS" env - spSep = [searchPathSeparator] - pathEnv = maybe (intercalate spSep extraPath) - ((intercalate spSep extraPath ++ spSep)++) $ lookup "PATH" env - overEnv = ("CFLAGS", Just cflagsEnv) : - [("PATH", Just pathEnv) | not (null extraPath)] - args' = configureFile':args ++ ["CC=" ++ ccProgShort] - shProg = simpleProgram "sh" - progDb = modifyProgramSearchPath - (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb - shConfiguredProg <- lookupProgram shProg - `fmap` configureProgram verbosity shProg progDb - case shConfiguredProg of - Just sh -> runProgramInvocation verbosity $ - (programInvocation (sh {programOverrideEnv = overEnv}) args') - { progInvokeCwd = Just (buildDir lbi) } - Nothing -> die notFoundMsg - - where - args = configureArgs backwardsCompatHack flags - - badAutoconfCharacters = - [ (' ', "space") - , ('\t', "tab") - , ('\n', "newline") - , ('\0', "null") - , ('"', "double quote") - , ('#', "hash") - , ('$', "dollar sign") - , ('&', "ampersand") - , ('\'', "single quote") - , ('(', "left bracket") - , (')', "right bracket") - , ('*', "star") - , (';', "semicolon") - , ('<', "less-than sign") - , ('=', "equals sign") - , ('>', "greater-than sign") - , ('?', "question mark") - , ('[', "left square bracket") - , ('\\', "backslash") - , ('`', "backtick") - , ('|', "pipe") - ] - - notFoundMsg = "The package has a './configure' script. " - ++ "If you are on Windows, This requires a " - ++ "Unix compatibility toolchain such as MinGW+MSYS or Cygwin. " - ++ "If you are not on Windows, ensure that an 'sh' command " - ++ "is discoverable in your path." - -getHookedBuildInfo :: FilePath -> Verbosity -> IO HookedBuildInfo -getHookedBuildInfo build_dir verbosity = do - maybe_infoFile <- findHookedPackageDesc build_dir - case maybe_infoFile of - Nothing -> return emptyHookedBuildInfo - Just infoFile -> do - info verbosity $ "Reading parameters from " ++ infoFile - readHookedBuildInfo verbosity infoFile - -defaultTestHook :: Args -> PackageDescription -> LocalBuildInfo - -> UserHooks -> TestFlags -> IO () -defaultTestHook args pkg_descr localbuildinfo _ flags = - test args pkg_descr localbuildinfo flags - -defaultBenchHook :: Args -> PackageDescription -> LocalBuildInfo - -> UserHooks -> BenchmarkFlags -> IO () -defaultBenchHook args pkg_descr localbuildinfo _ flags = - bench args pkg_descr localbuildinfo flags - -defaultInstallHook :: PackageDescription -> LocalBuildInfo - -> UserHooks -> InstallFlags -> IO () -defaultInstallHook pkg_descr localbuildinfo _ flags = do - let copyFlags = defaultCopyFlags { - copyDistPref = installDistPref flags, - copyDest = installDest flags, - copyVerbosity = installVerbosity flags - } - install pkg_descr localbuildinfo copyFlags - let registerFlags = defaultRegisterFlags { - regDistPref = installDistPref flags, - regInPlace = installInPlace flags, - regPackageDB = installPackageDB flags, - regVerbosity = installVerbosity flags - } - when (hasLibs pkg_descr) $ register pkg_descr localbuildinfo registerFlags - -defaultBuildHook :: PackageDescription -> LocalBuildInfo - -> UserHooks -> BuildFlags -> IO () -defaultBuildHook pkg_descr localbuildinfo hooks flags = - build pkg_descr localbuildinfo flags (allSuffixHandlers hooks) - -defaultReplHook :: PackageDescription -> LocalBuildInfo - -> UserHooks -> ReplFlags -> [String] -> IO () -defaultReplHook pkg_descr localbuildinfo hooks flags args = - repl pkg_descr localbuildinfo flags (allSuffixHandlers hooks) args - -defaultRegHook :: PackageDescription -> LocalBuildInfo - -> UserHooks -> RegisterFlags -> IO () -defaultRegHook pkg_descr localbuildinfo _ flags = - if hasLibs pkg_descr - then register pkg_descr localbuildinfo flags - else setupMessage (fromFlag (regVerbosity flags)) - "Package contains no library to register:" (packageId pkg_descr) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseExceptionId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseExceptionId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseExceptionId.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseExceptionId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,213 +0,0 @@ --- This file is generated. See Makefile's spdx rule -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.SPDX.LicenseExceptionId ( - LicenseExceptionId (..), - licenseExceptionId, - licenseExceptionName, - mkLicenseExceptionId, - licenseExceptionIdList, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Utils.Generic (isAsciiAlphaNum) -import Distribution.SPDX.LicenseListVersion - -import qualified Data.Map.Strict as Map -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp - -------------------------------------------------------------------------------- --- LicenseExceptionId -------------------------------------------------------------------------------- - --- | SPDX License identifier -data LicenseExceptionId - = DS389_exception -- ^ @389-exception@, 389 Directory Server Exception - | Autoconf_exception_2_0 -- ^ @Autoconf-exception-2.0@, Autoconf exception 2.0 - | Autoconf_exception_3_0 -- ^ @Autoconf-exception-3.0@, Autoconf exception 3.0 - | Bison_exception_2_2 -- ^ @Bison-exception-2.2@, Bison exception 2.2 - | Bootloader_exception -- ^ @Bootloader-exception@, Bootloader Distribution Exception - | Classpath_exception_2_0 -- ^ @Classpath-exception-2.0@, Classpath exception 2.0 - | CLISP_exception_2_0 -- ^ @CLISP-exception-2.0@, CLISP exception 2.0 - | DigiRule_FOSS_exception -- ^ @DigiRule-FOSS-exception@, DigiRule FOSS License Exception - | ECos_exception_2_0 -- ^ @eCos-exception-2.0@, eCos exception 2.0 - | Fawkes_Runtime_exception -- ^ @Fawkes-Runtime-exception@, Fawkes Runtime Exception - | FLTK_exception -- ^ @FLTK-exception@, FLTK exception - | Font_exception_2_0 -- ^ @Font-exception-2.0@, Font exception 2.0 - | Freertos_exception_2_0 -- ^ @freertos-exception-2.0@, FreeRTOS Exception 2.0 - | GCC_exception_2_0 -- ^ @GCC-exception-2.0@, GCC Runtime Library exception 2.0 - | GCC_exception_3_1 -- ^ @GCC-exception-3.1@, GCC Runtime Library exception 3.1 - | Gnu_javamail_exception -- ^ @gnu-javamail-exception@, GNU JavaMail exception - | I2p_gpl_java_exception -- ^ @i2p-gpl-java-exception@, i2p GPL+Java Exception - | Libtool_exception -- ^ @Libtool-exception@, Libtool Exception - | Linux_syscall_note -- ^ @Linux-syscall-note@, Linux Syscall Note - | LLVM_exception -- ^ @LLVM-exception@, LLVM Exception, SPDX License List 3.2 - | LZMA_exception -- ^ @LZMA-exception@, LZMA exception - | Mif_exception -- ^ @mif-exception@, Macros and Inline Functions Exception - | Nokia_Qt_exception_1_1 -- ^ @Nokia-Qt-exception-1.1@, Nokia Qt LGPL exception 1.1 - | OCCT_exception_1_0 -- ^ @OCCT-exception-1.0@, Open CASCADE Exception 1.0 - | OpenJDK_assembly_exception_1_0 -- ^ @OpenJDK-assembly-exception-1.0@, OpenJDK Assembly exception 1.0, SPDX License List 3.2 - | Openvpn_openssl_exception -- ^ @openvpn-openssl-exception@, OpenVPN OpenSSL Exception - | PS_or_PDF_font_exception_20170817 -- ^ @PS-or-PDF-font-exception-20170817@, PS/PDF font exception (2017-08-17), SPDX License List 3.2 - | Qt_GPL_exception_1_0 -- ^ @Qt-GPL-exception-1.0@, Qt GPL exception 1.0, SPDX License List 3.2 - | Qt_LGPL_exception_1_1 -- ^ @Qt-LGPL-exception-1.1@, Qt LGPL exception 1.1, SPDX License List 3.2 - | Qwt_exception_1_0 -- ^ @Qwt-exception-1.0@, Qwt exception 1.0 - | U_boot_exception_2_0 -- ^ @u-boot-exception-2.0@, U-Boot exception 2.0 - | WxWindows_exception_3_1 -- ^ @WxWindows-exception-3.1@, WxWindows Library Exception 3.1 - deriving (Eq, Ord, Enum, Bounded, Show, Read, Typeable, Data, Generic) - -instance Binary LicenseExceptionId - -instance Pretty LicenseExceptionId where - pretty = Disp.text . licenseExceptionId - -instance Parsec LicenseExceptionId where - parsec = do - n <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' - v <- askCabalSpecVersion - maybe (fail $ "Unknown SPDX license exception identifier: " ++ n) return $ - mkLicenseExceptionId (cabalSpecVersionToSPDXListVersion v) n - -instance NFData LicenseExceptionId where - rnf l = l `seq` () - -------------------------------------------------------------------------------- --- License Data -------------------------------------------------------------------------------- - --- | License SPDX identifier, e.g. @"BSD-3-Clause"@. -licenseExceptionId :: LicenseExceptionId -> String -licenseExceptionId DS389_exception = "389-exception" -licenseExceptionId Autoconf_exception_2_0 = "Autoconf-exception-2.0" -licenseExceptionId Autoconf_exception_3_0 = "Autoconf-exception-3.0" -licenseExceptionId Bison_exception_2_2 = "Bison-exception-2.2" -licenseExceptionId Bootloader_exception = "Bootloader-exception" -licenseExceptionId Classpath_exception_2_0 = "Classpath-exception-2.0" -licenseExceptionId CLISP_exception_2_0 = "CLISP-exception-2.0" -licenseExceptionId DigiRule_FOSS_exception = "DigiRule-FOSS-exception" -licenseExceptionId ECos_exception_2_0 = "eCos-exception-2.0" -licenseExceptionId Fawkes_Runtime_exception = "Fawkes-Runtime-exception" -licenseExceptionId FLTK_exception = "FLTK-exception" -licenseExceptionId Font_exception_2_0 = "Font-exception-2.0" -licenseExceptionId Freertos_exception_2_0 = "freertos-exception-2.0" -licenseExceptionId GCC_exception_2_0 = "GCC-exception-2.0" -licenseExceptionId GCC_exception_3_1 = "GCC-exception-3.1" -licenseExceptionId Gnu_javamail_exception = "gnu-javamail-exception" -licenseExceptionId I2p_gpl_java_exception = "i2p-gpl-java-exception" -licenseExceptionId Libtool_exception = "Libtool-exception" -licenseExceptionId Linux_syscall_note = "Linux-syscall-note" -licenseExceptionId LLVM_exception = "LLVM-exception" -licenseExceptionId LZMA_exception = "LZMA-exception" -licenseExceptionId Mif_exception = "mif-exception" -licenseExceptionId Nokia_Qt_exception_1_1 = "Nokia-Qt-exception-1.1" -licenseExceptionId OCCT_exception_1_0 = "OCCT-exception-1.0" -licenseExceptionId OpenJDK_assembly_exception_1_0 = "OpenJDK-assembly-exception-1.0" -licenseExceptionId Openvpn_openssl_exception = "openvpn-openssl-exception" -licenseExceptionId PS_or_PDF_font_exception_20170817 = "PS-or-PDF-font-exception-20170817" -licenseExceptionId Qt_GPL_exception_1_0 = "Qt-GPL-exception-1.0" -licenseExceptionId Qt_LGPL_exception_1_1 = "Qt-LGPL-exception-1.1" -licenseExceptionId Qwt_exception_1_0 = "Qwt-exception-1.0" -licenseExceptionId U_boot_exception_2_0 = "u-boot-exception-2.0" -licenseExceptionId WxWindows_exception_3_1 = "WxWindows-exception-3.1" - --- | License name, e.g. @"GNU General Public License v2.0 only"@ -licenseExceptionName :: LicenseExceptionId -> String -licenseExceptionName DS389_exception = "389 Directory Server Exception" -licenseExceptionName Autoconf_exception_2_0 = "Autoconf exception 2.0" -licenseExceptionName Autoconf_exception_3_0 = "Autoconf exception 3.0" -licenseExceptionName Bison_exception_2_2 = "Bison exception 2.2" -licenseExceptionName Bootloader_exception = "Bootloader Distribution Exception" -licenseExceptionName Classpath_exception_2_0 = "Classpath exception 2.0" -licenseExceptionName CLISP_exception_2_0 = "CLISP exception 2.0" -licenseExceptionName DigiRule_FOSS_exception = "DigiRule FOSS License Exception" -licenseExceptionName ECos_exception_2_0 = "eCos exception 2.0" -licenseExceptionName Fawkes_Runtime_exception = "Fawkes Runtime Exception" -licenseExceptionName FLTK_exception = "FLTK exception" -licenseExceptionName Font_exception_2_0 = "Font exception 2.0" -licenseExceptionName Freertos_exception_2_0 = "FreeRTOS Exception 2.0" -licenseExceptionName GCC_exception_2_0 = "GCC Runtime Library exception 2.0" -licenseExceptionName GCC_exception_3_1 = "GCC Runtime Library exception 3.1" -licenseExceptionName Gnu_javamail_exception = "GNU JavaMail exception" -licenseExceptionName I2p_gpl_java_exception = "i2p GPL+Java Exception" -licenseExceptionName Libtool_exception = "Libtool Exception" -licenseExceptionName Linux_syscall_note = "Linux Syscall Note" -licenseExceptionName LLVM_exception = "LLVM Exception" -licenseExceptionName LZMA_exception = "LZMA exception" -licenseExceptionName Mif_exception = "Macros and Inline Functions Exception" -licenseExceptionName Nokia_Qt_exception_1_1 = "Nokia Qt LGPL exception 1.1" -licenseExceptionName OCCT_exception_1_0 = "Open CASCADE Exception 1.0" -licenseExceptionName OpenJDK_assembly_exception_1_0 = "OpenJDK Assembly exception 1.0" -licenseExceptionName Openvpn_openssl_exception = "OpenVPN OpenSSL Exception" -licenseExceptionName PS_or_PDF_font_exception_20170817 = "PS/PDF font exception (2017-08-17)" -licenseExceptionName Qt_GPL_exception_1_0 = "Qt GPL exception 1.0" -licenseExceptionName Qt_LGPL_exception_1_1 = "Qt LGPL exception 1.1" -licenseExceptionName Qwt_exception_1_0 = "Qwt exception 1.0" -licenseExceptionName U_boot_exception_2_0 = "U-Boot exception 2.0" -licenseExceptionName WxWindows_exception_3_1 = "WxWindows Library Exception 3.1" - -------------------------------------------------------------------------------- --- Creation -------------------------------------------------------------------------------- - -licenseExceptionIdList :: LicenseListVersion -> [LicenseExceptionId] -licenseExceptionIdList LicenseListVersion_3_0 = - [] - ++ bulkOfLicenses -licenseExceptionIdList LicenseListVersion_3_2 = - [ LLVM_exception - , OpenJDK_assembly_exception_1_0 - , PS_or_PDF_font_exception_20170817 - , Qt_GPL_exception_1_0 - , Qt_LGPL_exception_1_1 - ] - ++ bulkOfLicenses - --- | Create a 'LicenseExceptionId' from a 'String'. -mkLicenseExceptionId :: LicenseListVersion -> String -> Maybe LicenseExceptionId -mkLicenseExceptionId LicenseListVersion_3_0 s = Map.lookup s stringLookup_3_0 -mkLicenseExceptionId LicenseListVersion_3_2 s = Map.lookup s stringLookup_3_2 - -stringLookup_3_0 :: Map String LicenseExceptionId -stringLookup_3_0 = Map.fromList $ map (\i -> (licenseExceptionId i, i)) $ - licenseExceptionIdList LicenseListVersion_3_0 - -stringLookup_3_2 :: Map String LicenseExceptionId -stringLookup_3_2 = Map.fromList $ map (\i -> (licenseExceptionId i, i)) $ - licenseExceptionIdList LicenseListVersion_3_2 - --- | License exceptions in all SPDX License lists -bulkOfLicenses :: [LicenseExceptionId] -bulkOfLicenses = - [ DS389_exception - , Autoconf_exception_2_0 - , Autoconf_exception_3_0 - , Bison_exception_2_2 - , Bootloader_exception - , Classpath_exception_2_0 - , CLISP_exception_2_0 - , DigiRule_FOSS_exception - , ECos_exception_2_0 - , Fawkes_Runtime_exception - , FLTK_exception - , Font_exception_2_0 - , Freertos_exception_2_0 - , GCC_exception_2_0 - , GCC_exception_3_1 - , Gnu_javamail_exception - , I2p_gpl_java_exception - , Libtool_exception - , Linux_syscall_note - , LZMA_exception - , Mif_exception - , Nokia_Qt_exception_1_1 - , OCCT_exception_1_0 - , Openvpn_openssl_exception - , Qwt_exception_1_0 - , U_boot_exception_2_0 - , WxWindows_exception_3_1 - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseExpression.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseExpression.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseExpression.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseExpression.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,161 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.SPDX.LicenseExpression ( - LicenseExpression (..), - SimpleLicenseExpression (..), - simpleLicenseExpression, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.SPDX.LicenseExceptionId -import Distribution.SPDX.LicenseId -import Distribution.SPDX.LicenseListVersion -import Distribution.SPDX.LicenseReference -import Distribution.Utils.Generic (isAsciiAlphaNum) -import Text.PrettyPrint ((<+>)) - -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp - --- | SPDX License Expression. --- --- @ --- idstring = 1*(ALPHA \/ DIGIT \/ "-" \/ "." ) --- license id = \ --- license exception id = \ --- license ref = [\"DocumentRef-"1*(idstring)":"]\"LicenseRef-"1*(idstring) --- --- simple expression = license id \/ license id"+" \/ license ref --- --- compound expression = 1*1(simple expression \/ --- simple expression \"WITH" license exception id \/ --- compound expression \"AND" compound expression \/ --- compound expression \"OR" compound expression ) \/ --- "(" compound expression ")" ) --- --- license expression = 1*1(simple expression / compound expression) --- @ -data LicenseExpression - = ELicense !SimpleLicenseExpression !(Maybe LicenseExceptionId) - | EAnd !LicenseExpression !LicenseExpression - | EOr !LicenseExpression !LicenseExpression - deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) - --- | Simple License Expressions. -data SimpleLicenseExpression - = ELicenseId LicenseId - -- ^ An SPDX License List Short Form Identifier. For example: @GPL-2.0-only@ - | ELicenseIdPlus LicenseId - -- ^ An SPDX License List Short Form Identifier with a unary"+" operator suffix to represent the current version of the license or any later version. For example: @GPL-2.0+@ - | ELicenseRef LicenseRef - -- ^ A SPDX user defined license reference: For example: @LicenseRef-23@, @LicenseRef-MIT-Style-1@, or @DocumentRef-spdx-tool-1.2:LicenseRef-MIT-Style-2@ - deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) - -simpleLicenseExpression :: LicenseId -> LicenseExpression -simpleLicenseExpression i = ELicense (ELicenseId i) Nothing - -instance Binary LicenseExpression -instance Binary SimpleLicenseExpression - -instance Pretty LicenseExpression where - pretty = go 0 - where - go :: Int -> LicenseExpression -> Disp.Doc - go _ (ELicense lic exc) = - let doc = pretty lic - in maybe id (\e d -> d <+> Disp.text "WITH" <+> pretty e) exc doc - go d (EAnd e1 e2) = parens (d < 0) $ go 0 e1 <+> Disp.text "AND" <+> go 0 e2 - go d (EOr e1 e2) = parens (d < 1) $ go 1 e1 <+> Disp.text "OR" <+> go 1 e2 - - - parens False doc = doc - parens True doc = Disp.parens doc - -instance Pretty SimpleLicenseExpression where - pretty (ELicenseId i) = pretty i - pretty (ELicenseIdPlus i) = pretty i <<>> Disp.char '+' - pretty (ELicenseRef r) = pretty r - -instance Parsec SimpleLicenseExpression where - parsec = idstring >>= simple where - simple n - | Just l <- "LicenseRef-" `isPrefixOfMaybe` n = - maybe (fail $ "Incorrect LicenseRef format: " ++ n) (return . ELicenseRef) $ mkLicenseRef Nothing l - | Just d <- "DocumentRef-" `isPrefixOfMaybe` n = do - _ <- P.string ":LicenseRef-" - l <- idstring - maybe (fail $ "Incorrect LicenseRef format:" ++ n) (return . ELicenseRef) $ mkLicenseRef (Just d) l - | otherwise = do - v <- askCabalSpecVersion - l <- maybe (fail $ "Unknown SPDX license identifier: '" ++ n ++ "' " ++ licenseIdMigrationMessage n) return $ - mkLicenseId (cabalSpecVersionToSPDXListVersion v) n - orLater <- isJust <$> P.optional (P.char '+') - if orLater - then return (ELicenseIdPlus l) - else return (ELicenseId l) - -idstring :: P.CharParsing m => m String -idstring = P.munch1 $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' - --- returns suffix part -isPrefixOfMaybe :: Eq a => [a] -> [a] -> Maybe [a] -isPrefixOfMaybe pfx s - | pfx `isPrefixOf` s = Just (drop (length pfx) s) - | otherwise = Nothing - -instance Parsec LicenseExpression where - parsec = expr - where - expr = compoundOr - - simple = do - s <- parsec - exc <- exception - return $ ELicense s exc - - exception = P.optional $ P.try (spaces1 *> P.string "WITH" *> spaces1) *> parsec - - compoundOr = do - x <- compoundAnd - l <- P.optional $ P.try (spaces1 *> P.string "OR" *> spaces1) *> compoundOr - return $ maybe id (flip EOr) l x - - compoundAnd = do - x <- compound - l <- P.optional $ P.try (spaces1 *> P.string "AND" *> spaces1) *> compoundAnd - return $ maybe id (flip EAnd) l x - - compound = braces <|> simple - - -- NOTE: we require that there's a space around AND & OR operators, - -- i.e. @(MIT)AND(MIT)@ will cause parse-error. - braces = do - _ <- P.char '(' - _ <- P.spaces - x <- expr - _ <- P.char ')' - return x - - spaces1 = P.space *> P.spaces - --- notes: --- --- There MUST NOT be whitespace between a license­id and any following "+".  This supports easy parsing and --- backwards compatibility.  There MUST be whitespace on either side of the operator "WITH".  There MUST be --- whitespace and/or parentheses on either side of the operators "AND" and "OR". --- --- We handle that by having greedy 'idstring' parser, so MITAND would parse as invalid license identifier. - -instance NFData LicenseExpression where - rnf (ELicense s e) = rnf s `seq` rnf e - rnf (EAnd x y) = rnf x `seq` rnf y - rnf (EOr x y) = rnf x `seq` rnf y - -instance NFData SimpleLicenseExpression where - rnf (ELicenseId i) = rnf i - rnf (ELicenseIdPlus i) = rnf i - rnf (ELicenseRef r) = rnf r diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/License.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/License.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/License.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/License.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,64 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.SPDX.License ( - License (..), - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.SPDX.LicenseExpression - -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp - --- | Declared license. --- See [section 3.15 of SPDX Specification 2.1](https://spdx.org/spdx-specification-21-web-version#h.1hmsyys) --- --- /Note:/ the NOASSERTION case is omitted. --- --- Old 'License' can be migrated using following rules: --- --- * @AllRightsReserved@ and @UnspecifiedLicense@ to 'NONE'. --- No license specified which legally defaults to /All Rights Reserved/. --- The package may not be legally modified or redistributed by anyone but --- the rightsholder. --- --- * @OtherLicense@ can be converted to 'LicenseRef' pointing to the file --- in the package. --- --- * @UnknownLicense@ i.e. other licenses of the form @name-x.y@, should be --- covered by SPDX license list, otherwise use 'LicenseRef'. --- --- * @PublicDomain@ isn't covered. Consider using CC0. --- See --- for more information. --- -data License - = NONE - -- ^ if the package contains no license information whatsoever; or - | License LicenseExpression - -- ^ A valid SPDX License Expression as defined in Appendix IV. - deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) - -instance Binary License - -instance NFData License where - rnf NONE = () - rnf (License l) = rnf l - -instance Pretty License where - pretty NONE = Disp.text "NONE" - pretty (License l) = pretty l - --- | --- >>> eitherParsec "BSD-3-Clause AND MIT" :: Either String License --- Right (License (EAnd (ELicense (ELicenseId BSD_3_Clause) Nothing) (ELicense (ELicenseId MIT) Nothing))) --- --- >>> eitherParsec "NONE" :: Either String License --- Right NONE --- -instance Parsec License where - parsec = NONE <$ P.try (P.string "NONE") <|> License <$> parsec diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseId.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1885 +0,0 @@ --- This file is generated. See Makefile's spdx rule -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.SPDX.LicenseId ( - LicenseId (..), - licenseId, - licenseName, - licenseIsOsiApproved, - mkLicenseId, - licenseIdList, - -- * Helpers - licenseIdMigrationMessage, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Utils.Generic (isAsciiAlphaNum) -import Distribution.SPDX.LicenseListVersion - -import qualified Data.Map.Strict as Map -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp - -------------------------------------------------------------------------------- --- LicenseId -------------------------------------------------------------------------------- - --- | SPDX License identifier -data LicenseId - = NullBSD -- ^ @0BSD@, BSD Zero Clause License - | AAL -- ^ @AAL@, Attribution Assurance License - | Abstyles -- ^ @Abstyles@, Abstyles License - | Adobe_2006 -- ^ @Adobe-2006@, Adobe Systems Incorporated Source Code License Agreement - | Adobe_Glyph -- ^ @Adobe-Glyph@, Adobe Glyph List License - | ADSL -- ^ @ADSL@, Amazon Digital Services License - | AFL_1_1 -- ^ @AFL-1.1@, Academic Free License v1.1 - | AFL_1_2 -- ^ @AFL-1.2@, Academic Free License v1.2 - | AFL_2_0 -- ^ @AFL-2.0@, Academic Free License v2.0 - | AFL_2_1 -- ^ @AFL-2.1@, Academic Free License v2.1 - | AFL_3_0 -- ^ @AFL-3.0@, Academic Free License v3.0 - | Afmparse -- ^ @Afmparse@, Afmparse License - | AGPL_1_0 -- ^ @AGPL-1.0@, Affero General Public License v1.0, SPDX License List 3.0 - | AGPL_1_0_only -- ^ @AGPL-1.0-only@, Affero General Public License v1.0 only, SPDX License List 3.2 - | AGPL_1_0_or_later -- ^ @AGPL-1.0-or-later@, Affero General Public License v1.0 or later, SPDX License List 3.2 - | AGPL_3_0_only -- ^ @AGPL-3.0-only@, GNU Affero General Public License v3.0 only - | AGPL_3_0_or_later -- ^ @AGPL-3.0-or-later@, GNU Affero General Public License v3.0 or later - | Aladdin -- ^ @Aladdin@, Aladdin Free Public License - | AMDPLPA -- ^ @AMDPLPA@, AMD's plpa_map.c License - | AML -- ^ @AML@, Apple MIT License - | AMPAS -- ^ @AMPAS@, Academy of Motion Picture Arts and Sciences BSD - | ANTLR_PD -- ^ @ANTLR-PD@, ANTLR Software Rights Notice - | Apache_1_0 -- ^ @Apache-1.0@, Apache License 1.0 - | Apache_1_1 -- ^ @Apache-1.1@, Apache License 1.1 - | Apache_2_0 -- ^ @Apache-2.0@, Apache License 2.0 - | APAFML -- ^ @APAFML@, Adobe Postscript AFM License - | APL_1_0 -- ^ @APL-1.0@, Adaptive Public License 1.0 - | APSL_1_0 -- ^ @APSL-1.0@, Apple Public Source License 1.0 - | APSL_1_1 -- ^ @APSL-1.1@, Apple Public Source License 1.1 - | APSL_1_2 -- ^ @APSL-1.2@, Apple Public Source License 1.2 - | APSL_2_0 -- ^ @APSL-2.0@, Apple Public Source License 2.0 - | Artistic_1_0_cl8 -- ^ @Artistic-1.0-cl8@, Artistic License 1.0 w/clause 8 - | Artistic_1_0_Perl -- ^ @Artistic-1.0-Perl@, Artistic License 1.0 (Perl) - | Artistic_1_0 -- ^ @Artistic-1.0@, Artistic License 1.0 - | Artistic_2_0 -- ^ @Artistic-2.0@, Artistic License 2.0 - | Bahyph -- ^ @Bahyph@, Bahyph License - | Barr -- ^ @Barr@, Barr License - | Beerware -- ^ @Beerware@, Beerware License - | BitTorrent_1_0 -- ^ @BitTorrent-1.0@, BitTorrent Open Source License v1.0 - | BitTorrent_1_1 -- ^ @BitTorrent-1.1@, BitTorrent Open Source License v1.1 - | Borceux -- ^ @Borceux@, Borceux license - | BSD_1_Clause -- ^ @BSD-1-Clause@, BSD 1-Clause License - | BSD_2_Clause_FreeBSD -- ^ @BSD-2-Clause-FreeBSD@, BSD 2-Clause FreeBSD License - | BSD_2_Clause_NetBSD -- ^ @BSD-2-Clause-NetBSD@, BSD 2-Clause NetBSD License - | BSD_2_Clause_Patent -- ^ @BSD-2-Clause-Patent@, BSD-2-Clause Plus Patent License - | BSD_2_Clause -- ^ @BSD-2-Clause@, BSD 2-Clause "Simplified" License - | BSD_3_Clause_Attribution -- ^ @BSD-3-Clause-Attribution@, BSD with attribution - | BSD_3_Clause_Clear -- ^ @BSD-3-Clause-Clear@, BSD 3-Clause Clear License - | BSD_3_Clause_LBNL -- ^ @BSD-3-Clause-LBNL@, Lawrence Berkeley National Labs BSD variant license - | BSD_3_Clause_No_Nuclear_License_2014 -- ^ @BSD-3-Clause-No-Nuclear-License-2014@, BSD 3-Clause No Nuclear License 2014 - | BSD_3_Clause_No_Nuclear_License -- ^ @BSD-3-Clause-No-Nuclear-License@, BSD 3-Clause No Nuclear License - | BSD_3_Clause_No_Nuclear_Warranty -- ^ @BSD-3-Clause-No-Nuclear-Warranty@, BSD 3-Clause No Nuclear Warranty - | BSD_3_Clause -- ^ @BSD-3-Clause@, BSD 3-Clause "New" or "Revised" License - | BSD_4_Clause_UC -- ^ @BSD-4-Clause-UC@, BSD-4-Clause (University of California-Specific) - | BSD_4_Clause -- ^ @BSD-4-Clause@, BSD 4-Clause "Original" or "Old" License - | BSD_Protection -- ^ @BSD-Protection@, BSD Protection License - | BSD_Source_Code -- ^ @BSD-Source-Code@, BSD Source Code Attribution - | BSL_1_0 -- ^ @BSL-1.0@, Boost Software License 1.0 - | Bzip2_1_0_5 -- ^ @bzip2-1.0.5@, bzip2 and libbzip2 License v1.0.5 - | Bzip2_1_0_6 -- ^ @bzip2-1.0.6@, bzip2 and libbzip2 License v1.0.6 - | Caldera -- ^ @Caldera@, Caldera License - | CATOSL_1_1 -- ^ @CATOSL-1.1@, Computer Associates Trusted Open Source License 1.1 - | CC_BY_1_0 -- ^ @CC-BY-1.0@, Creative Commons Attribution 1.0 Generic - | CC_BY_2_0 -- ^ @CC-BY-2.0@, Creative Commons Attribution 2.0 Generic - | CC_BY_2_5 -- ^ @CC-BY-2.5@, Creative Commons Attribution 2.5 Generic - | CC_BY_3_0 -- ^ @CC-BY-3.0@, Creative Commons Attribution 3.0 Unported - | CC_BY_4_0 -- ^ @CC-BY-4.0@, Creative Commons Attribution 4.0 International - | CC_BY_NC_1_0 -- ^ @CC-BY-NC-1.0@, Creative Commons Attribution Non Commercial 1.0 Generic - | CC_BY_NC_2_0 -- ^ @CC-BY-NC-2.0@, Creative Commons Attribution Non Commercial 2.0 Generic - | CC_BY_NC_2_5 -- ^ @CC-BY-NC-2.5@, Creative Commons Attribution Non Commercial 2.5 Generic - | CC_BY_NC_3_0 -- ^ @CC-BY-NC-3.0@, Creative Commons Attribution Non Commercial 3.0 Unported - | CC_BY_NC_4_0 -- ^ @CC-BY-NC-4.0@, Creative Commons Attribution Non Commercial 4.0 International - | CC_BY_NC_ND_1_0 -- ^ @CC-BY-NC-ND-1.0@, Creative Commons Attribution Non Commercial No Derivatives 1.0 Generic - | CC_BY_NC_ND_2_0 -- ^ @CC-BY-NC-ND-2.0@, Creative Commons Attribution Non Commercial No Derivatives 2.0 Generic - | CC_BY_NC_ND_2_5 -- ^ @CC-BY-NC-ND-2.5@, Creative Commons Attribution Non Commercial No Derivatives 2.5 Generic - | CC_BY_NC_ND_3_0 -- ^ @CC-BY-NC-ND-3.0@, Creative Commons Attribution Non Commercial No Derivatives 3.0 Unported - | CC_BY_NC_ND_4_0 -- ^ @CC-BY-NC-ND-4.0@, Creative Commons Attribution Non Commercial No Derivatives 4.0 International - | CC_BY_NC_SA_1_0 -- ^ @CC-BY-NC-SA-1.0@, Creative Commons Attribution Non Commercial Share Alike 1.0 Generic - | CC_BY_NC_SA_2_0 -- ^ @CC-BY-NC-SA-2.0@, Creative Commons Attribution Non Commercial Share Alike 2.0 Generic - | CC_BY_NC_SA_2_5 -- ^ @CC-BY-NC-SA-2.5@, Creative Commons Attribution Non Commercial Share Alike 2.5 Generic - | CC_BY_NC_SA_3_0 -- ^ @CC-BY-NC-SA-3.0@, Creative Commons Attribution Non Commercial Share Alike 3.0 Unported - | CC_BY_NC_SA_4_0 -- ^ @CC-BY-NC-SA-4.0@, Creative Commons Attribution Non Commercial Share Alike 4.0 International - | CC_BY_ND_1_0 -- ^ @CC-BY-ND-1.0@, Creative Commons Attribution No Derivatives 1.0 Generic - | CC_BY_ND_2_0 -- ^ @CC-BY-ND-2.0@, Creative Commons Attribution No Derivatives 2.0 Generic - | CC_BY_ND_2_5 -- ^ @CC-BY-ND-2.5@, Creative Commons Attribution No Derivatives 2.5 Generic - | CC_BY_ND_3_0 -- ^ @CC-BY-ND-3.0@, Creative Commons Attribution No Derivatives 3.0 Unported - | CC_BY_ND_4_0 -- ^ @CC-BY-ND-4.0@, Creative Commons Attribution No Derivatives 4.0 International - | CC_BY_SA_1_0 -- ^ @CC-BY-SA-1.0@, Creative Commons Attribution Share Alike 1.0 Generic - | CC_BY_SA_2_0 -- ^ @CC-BY-SA-2.0@, Creative Commons Attribution Share Alike 2.0 Generic - | CC_BY_SA_2_5 -- ^ @CC-BY-SA-2.5@, Creative Commons Attribution Share Alike 2.5 Generic - | CC_BY_SA_3_0 -- ^ @CC-BY-SA-3.0@, Creative Commons Attribution Share Alike 3.0 Unported - | CC_BY_SA_4_0 -- ^ @CC-BY-SA-4.0@, Creative Commons Attribution Share Alike 4.0 International - | CC0_1_0 -- ^ @CC0-1.0@, Creative Commons Zero v1.0 Universal - | CDDL_1_0 -- ^ @CDDL-1.0@, Common Development and Distribution License 1.0 - | CDDL_1_1 -- ^ @CDDL-1.1@, Common Development and Distribution License 1.1 - | CDLA_Permissive_1_0 -- ^ @CDLA-Permissive-1.0@, Community Data License Agreement Permissive 1.0 - | CDLA_Sharing_1_0 -- ^ @CDLA-Sharing-1.0@, Community Data License Agreement Sharing 1.0 - | CECILL_1_0 -- ^ @CECILL-1.0@, CeCILL Free Software License Agreement v1.0 - | CECILL_1_1 -- ^ @CECILL-1.1@, CeCILL Free Software License Agreement v1.1 - | CECILL_2_0 -- ^ @CECILL-2.0@, CeCILL Free Software License Agreement v2.0 - | CECILL_2_1 -- ^ @CECILL-2.1@, CeCILL Free Software License Agreement v2.1 - | CECILL_B -- ^ @CECILL-B@, CeCILL-B Free Software License Agreement - | CECILL_C -- ^ @CECILL-C@, CeCILL-C Free Software License Agreement - | ClArtistic -- ^ @ClArtistic@, Clarified Artistic License - | CNRI_Jython -- ^ @CNRI-Jython@, CNRI Jython License - | CNRI_Python_GPL_Compatible -- ^ @CNRI-Python-GPL-Compatible@, CNRI Python Open Source GPL Compatible License Agreement - | CNRI_Python -- ^ @CNRI-Python@, CNRI Python License - | Condor_1_1 -- ^ @Condor-1.1@, Condor Public License v1.1 - | CPAL_1_0 -- ^ @CPAL-1.0@, Common Public Attribution License 1.0 - | CPL_1_0 -- ^ @CPL-1.0@, Common Public License 1.0 - | CPOL_1_02 -- ^ @CPOL-1.02@, Code Project Open License 1.02 - | Crossword -- ^ @Crossword@, Crossword License - | CrystalStacker -- ^ @CrystalStacker@, CrystalStacker License - | CUA_OPL_1_0 -- ^ @CUA-OPL-1.0@, CUA Office Public License v1.0 - | Cube -- ^ @Cube@, Cube License - | Curl -- ^ @curl@, curl License - | D_FSL_1_0 -- ^ @D-FSL-1.0@, Deutsche Freie Software Lizenz - | Diffmark -- ^ @diffmark@, diffmark license - | DOC -- ^ @DOC@, DOC License - | Dotseqn -- ^ @Dotseqn@, Dotseqn License - | DSDP -- ^ @DSDP@, DSDP License - | Dvipdfm -- ^ @dvipdfm@, dvipdfm License - | ECL_1_0 -- ^ @ECL-1.0@, Educational Community License v1.0 - | ECL_2_0 -- ^ @ECL-2.0@, Educational Community License v2.0 - | EFL_1_0 -- ^ @EFL-1.0@, Eiffel Forum License v1.0 - | EFL_2_0 -- ^ @EFL-2.0@, Eiffel Forum License v2.0 - | EGenix -- ^ @eGenix@, eGenix.com Public License 1.1.0 - | Entessa -- ^ @Entessa@, Entessa Public License v1.0 - | EPL_1_0 -- ^ @EPL-1.0@, Eclipse Public License 1.0 - | EPL_2_0 -- ^ @EPL-2.0@, Eclipse Public License 2.0 - | ErlPL_1_1 -- ^ @ErlPL-1.1@, Erlang Public License v1.1 - | EUDatagrid -- ^ @EUDatagrid@, EU DataGrid Software License - | EUPL_1_0 -- ^ @EUPL-1.0@, European Union Public License 1.0 - | EUPL_1_1 -- ^ @EUPL-1.1@, European Union Public License 1.1 - | EUPL_1_2 -- ^ @EUPL-1.2@, European Union Public License 1.2 - | Eurosym -- ^ @Eurosym@, Eurosym License - | Fair -- ^ @Fair@, Fair License - | Frameworx_1_0 -- ^ @Frameworx-1.0@, Frameworx Open License 1.0 - | FreeImage -- ^ @FreeImage@, FreeImage Public License v1.0 - | FSFAP -- ^ @FSFAP@, FSF All Permissive License - | FSFUL -- ^ @FSFUL@, FSF Unlimited License - | FSFULLR -- ^ @FSFULLR@, FSF Unlimited License (with License Retention) - | FTL -- ^ @FTL@, Freetype Project License - | GFDL_1_1_only -- ^ @GFDL-1.1-only@, GNU Free Documentation License v1.1 only - | GFDL_1_1_or_later -- ^ @GFDL-1.1-or-later@, GNU Free Documentation License v1.1 or later - | GFDL_1_2_only -- ^ @GFDL-1.2-only@, GNU Free Documentation License v1.2 only - | GFDL_1_2_or_later -- ^ @GFDL-1.2-or-later@, GNU Free Documentation License v1.2 or later - | GFDL_1_3_only -- ^ @GFDL-1.3-only@, GNU Free Documentation License v1.3 only - | GFDL_1_3_or_later -- ^ @GFDL-1.3-or-later@, GNU Free Documentation License v1.3 or later - | Giftware -- ^ @Giftware@, Giftware License - | GL2PS -- ^ @GL2PS@, GL2PS License - | Glide -- ^ @Glide@, 3dfx Glide License - | Glulxe -- ^ @Glulxe@, Glulxe License - | Gnuplot -- ^ @gnuplot@, gnuplot License - | GPL_1_0_only -- ^ @GPL-1.0-only@, GNU General Public License v1.0 only - | GPL_1_0_or_later -- ^ @GPL-1.0-or-later@, GNU General Public License v1.0 or later - | GPL_2_0_only -- ^ @GPL-2.0-only@, GNU General Public License v2.0 only - | GPL_2_0_or_later -- ^ @GPL-2.0-or-later@, GNU General Public License v2.0 or later - | GPL_3_0_only -- ^ @GPL-3.0-only@, GNU General Public License v3.0 only - | GPL_3_0_or_later -- ^ @GPL-3.0-or-later@, GNU General Public License v3.0 or later - | GSOAP_1_3b -- ^ @gSOAP-1.3b@, gSOAP Public License v1.3b - | HaskellReport -- ^ @HaskellReport@, Haskell Language Report License - | HPND -- ^ @HPND@, Historical Permission Notice and Disclaimer - | IBM_pibs -- ^ @IBM-pibs@, IBM PowerPC Initialization and Boot Software - | ICU -- ^ @ICU@, ICU License - | IJG -- ^ @IJG@, Independent JPEG Group License - | ImageMagick -- ^ @ImageMagick@, ImageMagick License - | IMatix -- ^ @iMatix@, iMatix Standard Function Library Agreement - | Imlib2 -- ^ @Imlib2@, Imlib2 License - | Info_ZIP -- ^ @Info-ZIP@, Info-ZIP License - | Intel_ACPI -- ^ @Intel-ACPI@, Intel ACPI Software License Agreement - | Intel -- ^ @Intel@, Intel Open Source License - | Interbase_1_0 -- ^ @Interbase-1.0@, Interbase Public License v1.0 - | IPA -- ^ @IPA@, IPA Font License - | IPL_1_0 -- ^ @IPL-1.0@, IBM Public License v1.0 - | ISC -- ^ @ISC@, ISC License - | JasPer_2_0 -- ^ @JasPer-2.0@, JasPer License - | JSON -- ^ @JSON@, JSON License - | LAL_1_2 -- ^ @LAL-1.2@, Licence Art Libre 1.2 - | LAL_1_3 -- ^ @LAL-1.3@, Licence Art Libre 1.3 - | Latex2e -- ^ @Latex2e@, Latex2e License - | Leptonica -- ^ @Leptonica@, Leptonica License - | LGPL_2_0_only -- ^ @LGPL-2.0-only@, GNU Library General Public License v2 only - | LGPL_2_0_or_later -- ^ @LGPL-2.0-or-later@, GNU Library General Public License v2 or later - | LGPL_2_1_only -- ^ @LGPL-2.1-only@, GNU Lesser General Public License v2.1 only - | LGPL_2_1_or_later -- ^ @LGPL-2.1-or-later@, GNU Lesser General Public License v2.1 or later - | LGPL_3_0_only -- ^ @LGPL-3.0-only@, GNU Lesser General Public License v3.0 only - | LGPL_3_0_or_later -- ^ @LGPL-3.0-or-later@, GNU Lesser General Public License v3.0 or later - | LGPLLR -- ^ @LGPLLR@, Lesser General Public License For Linguistic Resources - | Libpng -- ^ @Libpng@, libpng License - | Libtiff -- ^ @libtiff@, libtiff License - | LiLiQ_P_1_1 -- ^ @LiLiQ-P-1.1@, Licence Libre du Québec – Permissive version 1.1 - | LiLiQ_R_1_1 -- ^ @LiLiQ-R-1.1@, Licence Libre du Québec – Réciprocité version 1.1 - | LiLiQ_Rplus_1_1 -- ^ @LiLiQ-Rplus-1.1@, Licence Libre du Québec – Réciprocité forte version 1.1 - | Linux_OpenIB -- ^ @Linux-OpenIB@, Linux Kernel Variant of OpenIB.org license, SPDX License List 3.2 - | LPL_1_0 -- ^ @LPL-1.0@, Lucent Public License Version 1.0 - | LPL_1_02 -- ^ @LPL-1.02@, Lucent Public License v1.02 - | LPPL_1_0 -- ^ @LPPL-1.0@, LaTeX Project Public License v1.0 - | LPPL_1_1 -- ^ @LPPL-1.1@, LaTeX Project Public License v1.1 - | LPPL_1_2 -- ^ @LPPL-1.2@, LaTeX Project Public License v1.2 - | LPPL_1_3a -- ^ @LPPL-1.3a@, LaTeX Project Public License v1.3a - | LPPL_1_3c -- ^ @LPPL-1.3c@, LaTeX Project Public License v1.3c - | MakeIndex -- ^ @MakeIndex@, MakeIndex License - | MirOS -- ^ @MirOS@, MirOS License - | MIT_0 -- ^ @MIT-0@, MIT No Attribution, SPDX License List 3.2 - | MIT_advertising -- ^ @MIT-advertising@, Enlightenment License (e16) - | MIT_CMU -- ^ @MIT-CMU@, CMU License - | MIT_enna -- ^ @MIT-enna@, enna License - | MIT_feh -- ^ @MIT-feh@, feh License - | MIT -- ^ @MIT@, MIT License - | MITNFA -- ^ @MITNFA@, MIT +no-false-attribs license - | Motosoto -- ^ @Motosoto@, Motosoto License - | Mpich2 -- ^ @mpich2@, mpich2 License - | MPL_1_0 -- ^ @MPL-1.0@, Mozilla Public License 1.0 - | MPL_1_1 -- ^ @MPL-1.1@, Mozilla Public License 1.1 - | MPL_2_0_no_copyleft_exception -- ^ @MPL-2.0-no-copyleft-exception@, Mozilla Public License 2.0 (no copyleft exception) - | MPL_2_0 -- ^ @MPL-2.0@, Mozilla Public License 2.0 - | MS_PL -- ^ @MS-PL@, Microsoft Public License - | MS_RL -- ^ @MS-RL@, Microsoft Reciprocal License - | MTLL -- ^ @MTLL@, Matrix Template Library License - | Multics -- ^ @Multics@, Multics License - | Mup -- ^ @Mup@, Mup License - | NASA_1_3 -- ^ @NASA-1.3@, NASA Open Source Agreement 1.3 - | Naumen -- ^ @Naumen@, Naumen Public License - | NBPL_1_0 -- ^ @NBPL-1.0@, Net Boolean Public License v1 - | NCSA -- ^ @NCSA@, University of Illinois/NCSA Open Source License - | Net_SNMP -- ^ @Net-SNMP@, Net-SNMP License - | NetCDF -- ^ @NetCDF@, NetCDF license - | Newsletr -- ^ @Newsletr@, Newsletr License - | NGPL -- ^ @NGPL@, Nethack General Public License - | NLOD_1_0 -- ^ @NLOD-1.0@, Norwegian Licence for Open Government Data - | NLPL -- ^ @NLPL@, No Limit Public License - | Nokia -- ^ @Nokia@, Nokia Open Source License - | NOSL -- ^ @NOSL@, Netizen Open Source License - | Noweb -- ^ @Noweb@, Noweb License - | NPL_1_0 -- ^ @NPL-1.0@, Netscape Public License v1.0 - | NPL_1_1 -- ^ @NPL-1.1@, Netscape Public License v1.1 - | NPOSL_3_0 -- ^ @NPOSL-3.0@, Non-Profit Open Software License 3.0 - | NRL -- ^ @NRL@, NRL License - | NTP -- ^ @NTP@, NTP License - | OCCT_PL -- ^ @OCCT-PL@, Open CASCADE Technology Public License - | OCLC_2_0 -- ^ @OCLC-2.0@, OCLC Research Public License 2.0 - | ODbL_1_0 -- ^ @ODbL-1.0@, ODC Open Database License v1.0 - | ODC_By_1_0 -- ^ @ODC-By-1.0@, Open Data Commons Attribution License v1.0, SPDX License List 3.2 - | OFL_1_0 -- ^ @OFL-1.0@, SIL Open Font License 1.0 - | OFL_1_1 -- ^ @OFL-1.1@, SIL Open Font License 1.1 - | OGTSL -- ^ @OGTSL@, Open Group Test Suite License - | OLDAP_1_1 -- ^ @OLDAP-1.1@, Open LDAP Public License v1.1 - | OLDAP_1_2 -- ^ @OLDAP-1.2@, Open LDAP Public License v1.2 - | OLDAP_1_3 -- ^ @OLDAP-1.3@, Open LDAP Public License v1.3 - | OLDAP_1_4 -- ^ @OLDAP-1.4@, Open LDAP Public License v1.4 - | OLDAP_2_0_1 -- ^ @OLDAP-2.0.1@, Open LDAP Public License v2.0.1 - | OLDAP_2_0 -- ^ @OLDAP-2.0@, Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B) - | OLDAP_2_1 -- ^ @OLDAP-2.1@, Open LDAP Public License v2.1 - | OLDAP_2_2_1 -- ^ @OLDAP-2.2.1@, Open LDAP Public License v2.2.1 - | OLDAP_2_2_2 -- ^ @OLDAP-2.2.2@, Open LDAP Public License 2.2.2 - | OLDAP_2_2 -- ^ @OLDAP-2.2@, Open LDAP Public License v2.2 - | OLDAP_2_3 -- ^ @OLDAP-2.3@, Open LDAP Public License v2.3 - | OLDAP_2_4 -- ^ @OLDAP-2.4@, Open LDAP Public License v2.4 - | OLDAP_2_5 -- ^ @OLDAP-2.5@, Open LDAP Public License v2.5 - | OLDAP_2_6 -- ^ @OLDAP-2.6@, Open LDAP Public License v2.6 - | OLDAP_2_7 -- ^ @OLDAP-2.7@, Open LDAP Public License v2.7 - | OLDAP_2_8 -- ^ @OLDAP-2.8@, Open LDAP Public License v2.8 - | OML -- ^ @OML@, Open Market License - | OpenSSL -- ^ @OpenSSL@, OpenSSL License - | OPL_1_0 -- ^ @OPL-1.0@, Open Public License v1.0 - | OSET_PL_2_1 -- ^ @OSET-PL-2.1@, OSET Public License version 2.1 - | OSL_1_0 -- ^ @OSL-1.0@, Open Software License 1.0 - | OSL_1_1 -- ^ @OSL-1.1@, Open Software License 1.1 - | OSL_2_0 -- ^ @OSL-2.0@, Open Software License 2.0 - | OSL_2_1 -- ^ @OSL-2.1@, Open Software License 2.1 - | OSL_3_0 -- ^ @OSL-3.0@, Open Software License 3.0 - | PDDL_1_0 -- ^ @PDDL-1.0@, ODC Public Domain Dedication & License 1.0 - | PHP_3_0 -- ^ @PHP-3.0@, PHP License v3.0 - | PHP_3_01 -- ^ @PHP-3.01@, PHP License v3.01 - | Plexus -- ^ @Plexus@, Plexus Classworlds License - | PostgreSQL -- ^ @PostgreSQL@, PostgreSQL License - | Psfrag -- ^ @psfrag@, psfrag License - | Psutils -- ^ @psutils@, psutils License - | Python_2_0 -- ^ @Python-2.0@, Python License 2.0 - | Qhull -- ^ @Qhull@, Qhull License - | QPL_1_0 -- ^ @QPL-1.0@, Q Public License 1.0 - | Rdisc -- ^ @Rdisc@, Rdisc License - | RHeCos_1_1 -- ^ @RHeCos-1.1@, Red Hat eCos Public License v1.1 - | RPL_1_1 -- ^ @RPL-1.1@, Reciprocal Public License 1.1 - | RPL_1_5 -- ^ @RPL-1.5@, Reciprocal Public License 1.5 - | RPSL_1_0 -- ^ @RPSL-1.0@, RealNetworks Public Source License v1.0 - | RSA_MD -- ^ @RSA-MD@, RSA Message-Digest License - | RSCPL -- ^ @RSCPL@, Ricoh Source Code Public License - | Ruby -- ^ @Ruby@, Ruby License - | SAX_PD -- ^ @SAX-PD@, Sax Public Domain Notice - | Saxpath -- ^ @Saxpath@, Saxpath License - | SCEA -- ^ @SCEA@, SCEA Shared Source License - | Sendmail -- ^ @Sendmail@, Sendmail License - | SGI_B_1_0 -- ^ @SGI-B-1.0@, SGI Free Software License B v1.0 - | SGI_B_1_1 -- ^ @SGI-B-1.1@, SGI Free Software License B v1.1 - | SGI_B_2_0 -- ^ @SGI-B-2.0@, SGI Free Software License B v2.0 - | SimPL_2_0 -- ^ @SimPL-2.0@, Simple Public License 2.0 - | SISSL_1_2 -- ^ @SISSL-1.2@, Sun Industry Standards Source License v1.2 - | SISSL -- ^ @SISSL@, Sun Industry Standards Source License v1.1 - | Sleepycat -- ^ @Sleepycat@, Sleepycat License - | SMLNJ -- ^ @SMLNJ@, Standard ML of New Jersey License - | SMPPL -- ^ @SMPPL@, Secure Messaging Protocol Public License - | SNIA -- ^ @SNIA@, SNIA Public License 1.1 - | Spencer_86 -- ^ @Spencer-86@, Spencer License 86 - | Spencer_94 -- ^ @Spencer-94@, Spencer License 94 - | Spencer_99 -- ^ @Spencer-99@, Spencer License 99 - | SPL_1_0 -- ^ @SPL-1.0@, Sun Public License v1.0 - | SugarCRM_1_1_3 -- ^ @SugarCRM-1.1.3@, SugarCRM Public License v1.1.3 - | SWL -- ^ @SWL@, Scheme Widget Library (SWL) Software License Agreement - | TCL -- ^ @TCL@, TCL/TK License - | TCP_wrappers -- ^ @TCP-wrappers@, TCP Wrappers License - | TMate -- ^ @TMate@, TMate Open Source License - | TORQUE_1_1 -- ^ @TORQUE-1.1@, TORQUE v2.5+ Software License v1.1 - | TOSL -- ^ @TOSL@, Trusster Open Source License - | TU_Berlin_1_0 -- ^ @TU-Berlin-1.0@, Technische Universitaet Berlin License 1.0, SPDX License List 3.2 - | TU_Berlin_2_0 -- ^ @TU-Berlin-2.0@, Technische Universitaet Berlin License 2.0, SPDX License List 3.2 - | Unicode_DFS_2015 -- ^ @Unicode-DFS-2015@, Unicode License Agreement - Data Files and Software (2015) - | Unicode_DFS_2016 -- ^ @Unicode-DFS-2016@, Unicode License Agreement - Data Files and Software (2016) - | Unicode_TOU -- ^ @Unicode-TOU@, Unicode Terms of Use - | Unlicense -- ^ @Unlicense@, The Unlicense - | UPL_1_0 -- ^ @UPL-1.0@, Universal Permissive License v1.0 - | Vim -- ^ @Vim@, Vim License - | VOSTROM -- ^ @VOSTROM@, VOSTROM Public License for Open Source - | VSL_1_0 -- ^ @VSL-1.0@, Vovida Software License v1.0 - | W3C_19980720 -- ^ @W3C-19980720@, W3C Software Notice and License (1998-07-20) - | W3C_20150513 -- ^ @W3C-20150513@, W3C Software Notice and Document License (2015-05-13) - | W3C -- ^ @W3C@, W3C Software Notice and License (2002-12-31) - | Watcom_1_0 -- ^ @Watcom-1.0@, Sybase Open Watcom Public License 1.0 - | Wsuipa -- ^ @Wsuipa@, Wsuipa License - | WTFPL -- ^ @WTFPL@, Do What The F*ck You Want To Public License - | X11 -- ^ @X11@, X11 License - | Xerox -- ^ @Xerox@, Xerox License - | XFree86_1_1 -- ^ @XFree86-1.1@, XFree86 License 1.1 - | Xinetd -- ^ @xinetd@, xinetd License - | Xnet -- ^ @Xnet@, X.Net License - | Xpp -- ^ @xpp@, XPP License - | XSkat -- ^ @XSkat@, XSkat License - | YPL_1_0 -- ^ @YPL-1.0@, Yahoo! Public License v1.0 - | YPL_1_1 -- ^ @YPL-1.1@, Yahoo! Public License v1.1 - | Zed -- ^ @Zed@, Zed License - | Zend_2_0 -- ^ @Zend-2.0@, Zend License v2.0 - | Zimbra_1_3 -- ^ @Zimbra-1.3@, Zimbra Public License v1.3 - | Zimbra_1_4 -- ^ @Zimbra-1.4@, Zimbra Public License v1.4 - | Zlib_acknowledgement -- ^ @zlib-acknowledgement@, zlib/libpng License with Acknowledgement - | Zlib -- ^ @Zlib@, zlib License - | ZPL_1_1 -- ^ @ZPL-1.1@, Zope Public License 1.1 - | ZPL_2_0 -- ^ @ZPL-2.0@, Zope Public License 2.0 - | ZPL_2_1 -- ^ @ZPL-2.1@, Zope Public License 2.1 - deriving (Eq, Ord, Enum, Bounded, Show, Read, Typeable, Data, Generic) - -instance Binary LicenseId - -instance Pretty LicenseId where - pretty = Disp.text . licenseId - --- | --- >>> eitherParsec "BSD-3-Clause" :: Either String LicenseId --- Right BSD_3_Clause --- --- >>> eitherParsec "BSD3" :: Either String LicenseId --- Left "...Unknown SPDX license identifier: 'BSD3' Do you mean BSD-3-Clause?" --- -instance Parsec LicenseId where - parsec = do - n <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' - v <- askCabalSpecVersion - maybe (fail $ "Unknown SPDX license identifier: '" ++ n ++ "' " ++ licenseIdMigrationMessage n) return $ - mkLicenseId (cabalSpecVersionToSPDXListVersion v) n - -instance NFData LicenseId where - rnf l = l `seq` () - --- | Help message for migrating from non-SPDX license identifiers. --- --- Old 'License' is almost SPDX, except for 'BSD2', 'BSD3'. This function --- suggests SPDX variant: --- --- >>> licenseIdMigrationMessage "BSD3" --- "Do you mean BSD-3-Clause?" --- --- Also 'OtherLicense', 'AllRightsReserved', and 'PublicDomain' aren't --- valid SPDX identifiers --- --- >>> traverse_ (print . licenseIdMigrationMessage) [ "OtherLicense", "AllRightsReserved", "PublicDomain" ] --- "SPDX license list contains plenty of licenses. See https://spdx.org/licenses/. Also they can be combined into complex expressions with AND and OR." --- "You can use NONE as a value of license field." --- "Public Domain is a complex matter. See https://wiki.spdx.org/view/Legal_Team/Decisions/Dealing_with_Public_Domain_within_SPDX_Files. Consider using a proper license." --- --- SPDX License list version 3.0 introduced "-only" and "-or-later" variants for GNU family of licenses. --- See --- >>> licenseIdMigrationMessage "GPL-2.0" --- "SPDX license list 3.0 deprecated suffixless variants of GNU family of licenses. Use GPL-2.0-only or GPL-2.0-or-later." --- --- For other common licenses their old license format coincides with the SPDX identifiers: --- --- >>> traverse eitherParsec ["GPL-2.0-only", "GPL-3.0-only", "LGPL-2.1-only", "MIT", "ISC", "MPL-2.0", "Apache-2.0"] :: Either String [LicenseId] --- Right [GPL_2_0_only,GPL_3_0_only,LGPL_2_1_only,MIT,ISC,MPL_2_0,Apache_2_0] --- -licenseIdMigrationMessage :: String -> String -licenseIdMigrationMessage = go where - go l | gnuVariant l = "SPDX license list 3.0 deprecated suffixless variants of GNU family of licenses. Use " ++ l ++ "-only or " ++ l ++ "-or-later." - go "BSD3" = "Do you mean BSD-3-Clause?" - go "BSD2" = "Do you mean BSD-2-Clause?" - go "AllRightsReserved" = "You can use NONE as a value of license field." - go "OtherLicense" = "SPDX license list contains plenty of licenses. See https://spdx.org/licenses/. Also they can be combined into complex expressions with AND and OR." - go "PublicDomain" = "Public Domain is a complex matter. See https://wiki.spdx.org/view/Legal_Team/Decisions/Dealing_with_Public_Domain_within_SPDX_Files. Consider using a proper license." - - -- otherwise, we don't know - go _ = "" - - gnuVariant = flip elem ["GPL-2.0", "GPL-3.0", "LGPL-2.1", "LGPL-3.0", "AGPL-3.0" ] - -------------------------------------------------------------------------------- --- License Data -------------------------------------------------------------------------------- - --- | License SPDX identifier, e.g. @"BSD-3-Clause"@. -licenseId :: LicenseId -> String -licenseId NullBSD = "0BSD" -licenseId AAL = "AAL" -licenseId Abstyles = "Abstyles" -licenseId Adobe_2006 = "Adobe-2006" -licenseId Adobe_Glyph = "Adobe-Glyph" -licenseId ADSL = "ADSL" -licenseId AFL_1_1 = "AFL-1.1" -licenseId AFL_1_2 = "AFL-1.2" -licenseId AFL_2_0 = "AFL-2.0" -licenseId AFL_2_1 = "AFL-2.1" -licenseId AFL_3_0 = "AFL-3.0" -licenseId Afmparse = "Afmparse" -licenseId AGPL_1_0 = "AGPL-1.0" -licenseId AGPL_1_0_only = "AGPL-1.0-only" -licenseId AGPL_1_0_or_later = "AGPL-1.0-or-later" -licenseId AGPL_3_0_only = "AGPL-3.0-only" -licenseId AGPL_3_0_or_later = "AGPL-3.0-or-later" -licenseId Aladdin = "Aladdin" -licenseId AMDPLPA = "AMDPLPA" -licenseId AML = "AML" -licenseId AMPAS = "AMPAS" -licenseId ANTLR_PD = "ANTLR-PD" -licenseId Apache_1_0 = "Apache-1.0" -licenseId Apache_1_1 = "Apache-1.1" -licenseId Apache_2_0 = "Apache-2.0" -licenseId APAFML = "APAFML" -licenseId APL_1_0 = "APL-1.0" -licenseId APSL_1_0 = "APSL-1.0" -licenseId APSL_1_1 = "APSL-1.1" -licenseId APSL_1_2 = "APSL-1.2" -licenseId APSL_2_0 = "APSL-2.0" -licenseId Artistic_1_0_cl8 = "Artistic-1.0-cl8" -licenseId Artistic_1_0_Perl = "Artistic-1.0-Perl" -licenseId Artistic_1_0 = "Artistic-1.0" -licenseId Artistic_2_0 = "Artistic-2.0" -licenseId Bahyph = "Bahyph" -licenseId Barr = "Barr" -licenseId Beerware = "Beerware" -licenseId BitTorrent_1_0 = "BitTorrent-1.0" -licenseId BitTorrent_1_1 = "BitTorrent-1.1" -licenseId Borceux = "Borceux" -licenseId BSD_1_Clause = "BSD-1-Clause" -licenseId BSD_2_Clause_FreeBSD = "BSD-2-Clause-FreeBSD" -licenseId BSD_2_Clause_NetBSD = "BSD-2-Clause-NetBSD" -licenseId BSD_2_Clause_Patent = "BSD-2-Clause-Patent" -licenseId BSD_2_Clause = "BSD-2-Clause" -licenseId BSD_3_Clause_Attribution = "BSD-3-Clause-Attribution" -licenseId BSD_3_Clause_Clear = "BSD-3-Clause-Clear" -licenseId BSD_3_Clause_LBNL = "BSD-3-Clause-LBNL" -licenseId BSD_3_Clause_No_Nuclear_License_2014 = "BSD-3-Clause-No-Nuclear-License-2014" -licenseId BSD_3_Clause_No_Nuclear_License = "BSD-3-Clause-No-Nuclear-License" -licenseId BSD_3_Clause_No_Nuclear_Warranty = "BSD-3-Clause-No-Nuclear-Warranty" -licenseId BSD_3_Clause = "BSD-3-Clause" -licenseId BSD_4_Clause_UC = "BSD-4-Clause-UC" -licenseId BSD_4_Clause = "BSD-4-Clause" -licenseId BSD_Protection = "BSD-Protection" -licenseId BSD_Source_Code = "BSD-Source-Code" -licenseId BSL_1_0 = "BSL-1.0" -licenseId Bzip2_1_0_5 = "bzip2-1.0.5" -licenseId Bzip2_1_0_6 = "bzip2-1.0.6" -licenseId Caldera = "Caldera" -licenseId CATOSL_1_1 = "CATOSL-1.1" -licenseId CC_BY_1_0 = "CC-BY-1.0" -licenseId CC_BY_2_0 = "CC-BY-2.0" -licenseId CC_BY_2_5 = "CC-BY-2.5" -licenseId CC_BY_3_0 = "CC-BY-3.0" -licenseId CC_BY_4_0 = "CC-BY-4.0" -licenseId CC_BY_NC_1_0 = "CC-BY-NC-1.0" -licenseId CC_BY_NC_2_0 = "CC-BY-NC-2.0" -licenseId CC_BY_NC_2_5 = "CC-BY-NC-2.5" -licenseId CC_BY_NC_3_0 = "CC-BY-NC-3.0" -licenseId CC_BY_NC_4_0 = "CC-BY-NC-4.0" -licenseId CC_BY_NC_ND_1_0 = "CC-BY-NC-ND-1.0" -licenseId CC_BY_NC_ND_2_0 = "CC-BY-NC-ND-2.0" -licenseId CC_BY_NC_ND_2_5 = "CC-BY-NC-ND-2.5" -licenseId CC_BY_NC_ND_3_0 = "CC-BY-NC-ND-3.0" -licenseId CC_BY_NC_ND_4_0 = "CC-BY-NC-ND-4.0" -licenseId CC_BY_NC_SA_1_0 = "CC-BY-NC-SA-1.0" -licenseId CC_BY_NC_SA_2_0 = "CC-BY-NC-SA-2.0" -licenseId CC_BY_NC_SA_2_5 = "CC-BY-NC-SA-2.5" -licenseId CC_BY_NC_SA_3_0 = "CC-BY-NC-SA-3.0" -licenseId CC_BY_NC_SA_4_0 = "CC-BY-NC-SA-4.0" -licenseId CC_BY_ND_1_0 = "CC-BY-ND-1.0" -licenseId CC_BY_ND_2_0 = "CC-BY-ND-2.0" -licenseId CC_BY_ND_2_5 = "CC-BY-ND-2.5" -licenseId CC_BY_ND_3_0 = "CC-BY-ND-3.0" -licenseId CC_BY_ND_4_0 = "CC-BY-ND-4.0" -licenseId CC_BY_SA_1_0 = "CC-BY-SA-1.0" -licenseId CC_BY_SA_2_0 = "CC-BY-SA-2.0" -licenseId CC_BY_SA_2_5 = "CC-BY-SA-2.5" -licenseId CC_BY_SA_3_0 = "CC-BY-SA-3.0" -licenseId CC_BY_SA_4_0 = "CC-BY-SA-4.0" -licenseId CC0_1_0 = "CC0-1.0" -licenseId CDDL_1_0 = "CDDL-1.0" -licenseId CDDL_1_1 = "CDDL-1.1" -licenseId CDLA_Permissive_1_0 = "CDLA-Permissive-1.0" -licenseId CDLA_Sharing_1_0 = "CDLA-Sharing-1.0" -licenseId CECILL_1_0 = "CECILL-1.0" -licenseId CECILL_1_1 = "CECILL-1.1" -licenseId CECILL_2_0 = "CECILL-2.0" -licenseId CECILL_2_1 = "CECILL-2.1" -licenseId CECILL_B = "CECILL-B" -licenseId CECILL_C = "CECILL-C" -licenseId ClArtistic = "ClArtistic" -licenseId CNRI_Jython = "CNRI-Jython" -licenseId CNRI_Python_GPL_Compatible = "CNRI-Python-GPL-Compatible" -licenseId CNRI_Python = "CNRI-Python" -licenseId Condor_1_1 = "Condor-1.1" -licenseId CPAL_1_0 = "CPAL-1.0" -licenseId CPL_1_0 = "CPL-1.0" -licenseId CPOL_1_02 = "CPOL-1.02" -licenseId Crossword = "Crossword" -licenseId CrystalStacker = "CrystalStacker" -licenseId CUA_OPL_1_0 = "CUA-OPL-1.0" -licenseId Cube = "Cube" -licenseId Curl = "curl" -licenseId D_FSL_1_0 = "D-FSL-1.0" -licenseId Diffmark = "diffmark" -licenseId DOC = "DOC" -licenseId Dotseqn = "Dotseqn" -licenseId DSDP = "DSDP" -licenseId Dvipdfm = "dvipdfm" -licenseId ECL_1_0 = "ECL-1.0" -licenseId ECL_2_0 = "ECL-2.0" -licenseId EFL_1_0 = "EFL-1.0" -licenseId EFL_2_0 = "EFL-2.0" -licenseId EGenix = "eGenix" -licenseId Entessa = "Entessa" -licenseId EPL_1_0 = "EPL-1.0" -licenseId EPL_2_0 = "EPL-2.0" -licenseId ErlPL_1_1 = "ErlPL-1.1" -licenseId EUDatagrid = "EUDatagrid" -licenseId EUPL_1_0 = "EUPL-1.0" -licenseId EUPL_1_1 = "EUPL-1.1" -licenseId EUPL_1_2 = "EUPL-1.2" -licenseId Eurosym = "Eurosym" -licenseId Fair = "Fair" -licenseId Frameworx_1_0 = "Frameworx-1.0" -licenseId FreeImage = "FreeImage" -licenseId FSFAP = "FSFAP" -licenseId FSFUL = "FSFUL" -licenseId FSFULLR = "FSFULLR" -licenseId FTL = "FTL" -licenseId GFDL_1_1_only = "GFDL-1.1-only" -licenseId GFDL_1_1_or_later = "GFDL-1.1-or-later" -licenseId GFDL_1_2_only = "GFDL-1.2-only" -licenseId GFDL_1_2_or_later = "GFDL-1.2-or-later" -licenseId GFDL_1_3_only = "GFDL-1.3-only" -licenseId GFDL_1_3_or_later = "GFDL-1.3-or-later" -licenseId Giftware = "Giftware" -licenseId GL2PS = "GL2PS" -licenseId Glide = "Glide" -licenseId Glulxe = "Glulxe" -licenseId Gnuplot = "gnuplot" -licenseId GPL_1_0_only = "GPL-1.0-only" -licenseId GPL_1_0_or_later = "GPL-1.0-or-later" -licenseId GPL_2_0_only = "GPL-2.0-only" -licenseId GPL_2_0_or_later = "GPL-2.0-or-later" -licenseId GPL_3_0_only = "GPL-3.0-only" -licenseId GPL_3_0_or_later = "GPL-3.0-or-later" -licenseId GSOAP_1_3b = "gSOAP-1.3b" -licenseId HaskellReport = "HaskellReport" -licenseId HPND = "HPND" -licenseId IBM_pibs = "IBM-pibs" -licenseId ICU = "ICU" -licenseId IJG = "IJG" -licenseId ImageMagick = "ImageMagick" -licenseId IMatix = "iMatix" -licenseId Imlib2 = "Imlib2" -licenseId Info_ZIP = "Info-ZIP" -licenseId Intel_ACPI = "Intel-ACPI" -licenseId Intel = "Intel" -licenseId Interbase_1_0 = "Interbase-1.0" -licenseId IPA = "IPA" -licenseId IPL_1_0 = "IPL-1.0" -licenseId ISC = "ISC" -licenseId JasPer_2_0 = "JasPer-2.0" -licenseId JSON = "JSON" -licenseId LAL_1_2 = "LAL-1.2" -licenseId LAL_1_3 = "LAL-1.3" -licenseId Latex2e = "Latex2e" -licenseId Leptonica = "Leptonica" -licenseId LGPL_2_0_only = "LGPL-2.0-only" -licenseId LGPL_2_0_or_later = "LGPL-2.0-or-later" -licenseId LGPL_2_1_only = "LGPL-2.1-only" -licenseId LGPL_2_1_or_later = "LGPL-2.1-or-later" -licenseId LGPL_3_0_only = "LGPL-3.0-only" -licenseId LGPL_3_0_or_later = "LGPL-3.0-or-later" -licenseId LGPLLR = "LGPLLR" -licenseId Libpng = "Libpng" -licenseId Libtiff = "libtiff" -licenseId LiLiQ_P_1_1 = "LiLiQ-P-1.1" -licenseId LiLiQ_R_1_1 = "LiLiQ-R-1.1" -licenseId LiLiQ_Rplus_1_1 = "LiLiQ-Rplus-1.1" -licenseId Linux_OpenIB = "Linux-OpenIB" -licenseId LPL_1_0 = "LPL-1.0" -licenseId LPL_1_02 = "LPL-1.02" -licenseId LPPL_1_0 = "LPPL-1.0" -licenseId LPPL_1_1 = "LPPL-1.1" -licenseId LPPL_1_2 = "LPPL-1.2" -licenseId LPPL_1_3a = "LPPL-1.3a" -licenseId LPPL_1_3c = "LPPL-1.3c" -licenseId MakeIndex = "MakeIndex" -licenseId MirOS = "MirOS" -licenseId MIT_0 = "MIT-0" -licenseId MIT_advertising = "MIT-advertising" -licenseId MIT_CMU = "MIT-CMU" -licenseId MIT_enna = "MIT-enna" -licenseId MIT_feh = "MIT-feh" -licenseId MIT = "MIT" -licenseId MITNFA = "MITNFA" -licenseId Motosoto = "Motosoto" -licenseId Mpich2 = "mpich2" -licenseId MPL_1_0 = "MPL-1.0" -licenseId MPL_1_1 = "MPL-1.1" -licenseId MPL_2_0_no_copyleft_exception = "MPL-2.0-no-copyleft-exception" -licenseId MPL_2_0 = "MPL-2.0" -licenseId MS_PL = "MS-PL" -licenseId MS_RL = "MS-RL" -licenseId MTLL = "MTLL" -licenseId Multics = "Multics" -licenseId Mup = "Mup" -licenseId NASA_1_3 = "NASA-1.3" -licenseId Naumen = "Naumen" -licenseId NBPL_1_0 = "NBPL-1.0" -licenseId NCSA = "NCSA" -licenseId Net_SNMP = "Net-SNMP" -licenseId NetCDF = "NetCDF" -licenseId Newsletr = "Newsletr" -licenseId NGPL = "NGPL" -licenseId NLOD_1_0 = "NLOD-1.0" -licenseId NLPL = "NLPL" -licenseId Nokia = "Nokia" -licenseId NOSL = "NOSL" -licenseId Noweb = "Noweb" -licenseId NPL_1_0 = "NPL-1.0" -licenseId NPL_1_1 = "NPL-1.1" -licenseId NPOSL_3_0 = "NPOSL-3.0" -licenseId NRL = "NRL" -licenseId NTP = "NTP" -licenseId OCCT_PL = "OCCT-PL" -licenseId OCLC_2_0 = "OCLC-2.0" -licenseId ODbL_1_0 = "ODbL-1.0" -licenseId ODC_By_1_0 = "ODC-By-1.0" -licenseId OFL_1_0 = "OFL-1.0" -licenseId OFL_1_1 = "OFL-1.1" -licenseId OGTSL = "OGTSL" -licenseId OLDAP_1_1 = "OLDAP-1.1" -licenseId OLDAP_1_2 = "OLDAP-1.2" -licenseId OLDAP_1_3 = "OLDAP-1.3" -licenseId OLDAP_1_4 = "OLDAP-1.4" -licenseId OLDAP_2_0_1 = "OLDAP-2.0.1" -licenseId OLDAP_2_0 = "OLDAP-2.0" -licenseId OLDAP_2_1 = "OLDAP-2.1" -licenseId OLDAP_2_2_1 = "OLDAP-2.2.1" -licenseId OLDAP_2_2_2 = "OLDAP-2.2.2" -licenseId OLDAP_2_2 = "OLDAP-2.2" -licenseId OLDAP_2_3 = "OLDAP-2.3" -licenseId OLDAP_2_4 = "OLDAP-2.4" -licenseId OLDAP_2_5 = "OLDAP-2.5" -licenseId OLDAP_2_6 = "OLDAP-2.6" -licenseId OLDAP_2_7 = "OLDAP-2.7" -licenseId OLDAP_2_8 = "OLDAP-2.8" -licenseId OML = "OML" -licenseId OpenSSL = "OpenSSL" -licenseId OPL_1_0 = "OPL-1.0" -licenseId OSET_PL_2_1 = "OSET-PL-2.1" -licenseId OSL_1_0 = "OSL-1.0" -licenseId OSL_1_1 = "OSL-1.1" -licenseId OSL_2_0 = "OSL-2.0" -licenseId OSL_2_1 = "OSL-2.1" -licenseId OSL_3_0 = "OSL-3.0" -licenseId PDDL_1_0 = "PDDL-1.0" -licenseId PHP_3_0 = "PHP-3.0" -licenseId PHP_3_01 = "PHP-3.01" -licenseId Plexus = "Plexus" -licenseId PostgreSQL = "PostgreSQL" -licenseId Psfrag = "psfrag" -licenseId Psutils = "psutils" -licenseId Python_2_0 = "Python-2.0" -licenseId Qhull = "Qhull" -licenseId QPL_1_0 = "QPL-1.0" -licenseId Rdisc = "Rdisc" -licenseId RHeCos_1_1 = "RHeCos-1.1" -licenseId RPL_1_1 = "RPL-1.1" -licenseId RPL_1_5 = "RPL-1.5" -licenseId RPSL_1_0 = "RPSL-1.0" -licenseId RSA_MD = "RSA-MD" -licenseId RSCPL = "RSCPL" -licenseId Ruby = "Ruby" -licenseId SAX_PD = "SAX-PD" -licenseId Saxpath = "Saxpath" -licenseId SCEA = "SCEA" -licenseId Sendmail = "Sendmail" -licenseId SGI_B_1_0 = "SGI-B-1.0" -licenseId SGI_B_1_1 = "SGI-B-1.1" -licenseId SGI_B_2_0 = "SGI-B-2.0" -licenseId SimPL_2_0 = "SimPL-2.0" -licenseId SISSL_1_2 = "SISSL-1.2" -licenseId SISSL = "SISSL" -licenseId Sleepycat = "Sleepycat" -licenseId SMLNJ = "SMLNJ" -licenseId SMPPL = "SMPPL" -licenseId SNIA = "SNIA" -licenseId Spencer_86 = "Spencer-86" -licenseId Spencer_94 = "Spencer-94" -licenseId Spencer_99 = "Spencer-99" -licenseId SPL_1_0 = "SPL-1.0" -licenseId SugarCRM_1_1_3 = "SugarCRM-1.1.3" -licenseId SWL = "SWL" -licenseId TCL = "TCL" -licenseId TCP_wrappers = "TCP-wrappers" -licenseId TMate = "TMate" -licenseId TORQUE_1_1 = "TORQUE-1.1" -licenseId TOSL = "TOSL" -licenseId TU_Berlin_1_0 = "TU-Berlin-1.0" -licenseId TU_Berlin_2_0 = "TU-Berlin-2.0" -licenseId Unicode_DFS_2015 = "Unicode-DFS-2015" -licenseId Unicode_DFS_2016 = "Unicode-DFS-2016" -licenseId Unicode_TOU = "Unicode-TOU" -licenseId Unlicense = "Unlicense" -licenseId UPL_1_0 = "UPL-1.0" -licenseId Vim = "Vim" -licenseId VOSTROM = "VOSTROM" -licenseId VSL_1_0 = "VSL-1.0" -licenseId W3C_19980720 = "W3C-19980720" -licenseId W3C_20150513 = "W3C-20150513" -licenseId W3C = "W3C" -licenseId Watcom_1_0 = "Watcom-1.0" -licenseId Wsuipa = "Wsuipa" -licenseId WTFPL = "WTFPL" -licenseId X11 = "X11" -licenseId Xerox = "Xerox" -licenseId XFree86_1_1 = "XFree86-1.1" -licenseId Xinetd = "xinetd" -licenseId Xnet = "Xnet" -licenseId Xpp = "xpp" -licenseId XSkat = "XSkat" -licenseId YPL_1_0 = "YPL-1.0" -licenseId YPL_1_1 = "YPL-1.1" -licenseId Zed = "Zed" -licenseId Zend_2_0 = "Zend-2.0" -licenseId Zimbra_1_3 = "Zimbra-1.3" -licenseId Zimbra_1_4 = "Zimbra-1.4" -licenseId Zlib_acknowledgement = "zlib-acknowledgement" -licenseId Zlib = "Zlib" -licenseId ZPL_1_1 = "ZPL-1.1" -licenseId ZPL_2_0 = "ZPL-2.0" -licenseId ZPL_2_1 = "ZPL-2.1" - --- | License name, e.g. @"GNU General Public License v2.0 only"@ -licenseName :: LicenseId -> String -licenseName NullBSD = "BSD Zero Clause License" -licenseName AAL = "Attribution Assurance License" -licenseName Abstyles = "Abstyles License" -licenseName Adobe_2006 = "Adobe Systems Incorporated Source Code License Agreement" -licenseName Adobe_Glyph = "Adobe Glyph List License" -licenseName ADSL = "Amazon Digital Services License" -licenseName AFL_1_1 = "Academic Free License v1.1" -licenseName AFL_1_2 = "Academic Free License v1.2" -licenseName AFL_2_0 = "Academic Free License v2.0" -licenseName AFL_2_1 = "Academic Free License v2.1" -licenseName AFL_3_0 = "Academic Free License v3.0" -licenseName Afmparse = "Afmparse License" -licenseName AGPL_1_0 = "Affero General Public License v1.0" -licenseName AGPL_1_0_only = "Affero General Public License v1.0 only" -licenseName AGPL_1_0_or_later = "Affero General Public License v1.0 or later" -licenseName AGPL_3_0_only = "GNU Affero General Public License v3.0 only" -licenseName AGPL_3_0_or_later = "GNU Affero General Public License v3.0 or later" -licenseName Aladdin = "Aladdin Free Public License" -licenseName AMDPLPA = "AMD's plpa_map.c License" -licenseName AML = "Apple MIT License" -licenseName AMPAS = "Academy of Motion Picture Arts and Sciences BSD" -licenseName ANTLR_PD = "ANTLR Software Rights Notice" -licenseName Apache_1_0 = "Apache License 1.0" -licenseName Apache_1_1 = "Apache License 1.1" -licenseName Apache_2_0 = "Apache License 2.0" -licenseName APAFML = "Adobe Postscript AFM License" -licenseName APL_1_0 = "Adaptive Public License 1.0" -licenseName APSL_1_0 = "Apple Public Source License 1.0" -licenseName APSL_1_1 = "Apple Public Source License 1.1" -licenseName APSL_1_2 = "Apple Public Source License 1.2" -licenseName APSL_2_0 = "Apple Public Source License 2.0" -licenseName Artistic_1_0_cl8 = "Artistic License 1.0 w/clause 8" -licenseName Artistic_1_0_Perl = "Artistic License 1.0 (Perl)" -licenseName Artistic_1_0 = "Artistic License 1.0" -licenseName Artistic_2_0 = "Artistic License 2.0" -licenseName Bahyph = "Bahyph License" -licenseName Barr = "Barr License" -licenseName Beerware = "Beerware License" -licenseName BitTorrent_1_0 = "BitTorrent Open Source License v1.0" -licenseName BitTorrent_1_1 = "BitTorrent Open Source License v1.1" -licenseName Borceux = "Borceux license" -licenseName BSD_1_Clause = "BSD 1-Clause License" -licenseName BSD_2_Clause_FreeBSD = "BSD 2-Clause FreeBSD License" -licenseName BSD_2_Clause_NetBSD = "BSD 2-Clause NetBSD License" -licenseName BSD_2_Clause_Patent = "BSD-2-Clause Plus Patent License" -licenseName BSD_2_Clause = "BSD 2-Clause \"Simplified\" License" -licenseName BSD_3_Clause_Attribution = "BSD with attribution" -licenseName BSD_3_Clause_Clear = "BSD 3-Clause Clear License" -licenseName BSD_3_Clause_LBNL = "Lawrence Berkeley National Labs BSD variant license" -licenseName BSD_3_Clause_No_Nuclear_License_2014 = "BSD 3-Clause No Nuclear License 2014" -licenseName BSD_3_Clause_No_Nuclear_License = "BSD 3-Clause No Nuclear License" -licenseName BSD_3_Clause_No_Nuclear_Warranty = "BSD 3-Clause No Nuclear Warranty" -licenseName BSD_3_Clause = "BSD 3-Clause \"New\" or \"Revised\" License" -licenseName BSD_4_Clause_UC = "BSD-4-Clause (University of California-Specific)" -licenseName BSD_4_Clause = "BSD 4-Clause \"Original\" or \"Old\" License" -licenseName BSD_Protection = "BSD Protection License" -licenseName BSD_Source_Code = "BSD Source Code Attribution" -licenseName BSL_1_0 = "Boost Software License 1.0" -licenseName Bzip2_1_0_5 = "bzip2 and libbzip2 License v1.0.5" -licenseName Bzip2_1_0_6 = "bzip2 and libbzip2 License v1.0.6" -licenseName Caldera = "Caldera License" -licenseName CATOSL_1_1 = "Computer Associates Trusted Open Source License 1.1" -licenseName CC_BY_1_0 = "Creative Commons Attribution 1.0 Generic" -licenseName CC_BY_2_0 = "Creative Commons Attribution 2.0 Generic" -licenseName CC_BY_2_5 = "Creative Commons Attribution 2.5 Generic" -licenseName CC_BY_3_0 = "Creative Commons Attribution 3.0 Unported" -licenseName CC_BY_4_0 = "Creative Commons Attribution 4.0 International" -licenseName CC_BY_NC_1_0 = "Creative Commons Attribution Non Commercial 1.0 Generic" -licenseName CC_BY_NC_2_0 = "Creative Commons Attribution Non Commercial 2.0 Generic" -licenseName CC_BY_NC_2_5 = "Creative Commons Attribution Non Commercial 2.5 Generic" -licenseName CC_BY_NC_3_0 = "Creative Commons Attribution Non Commercial 3.0 Unported" -licenseName CC_BY_NC_4_0 = "Creative Commons Attribution Non Commercial 4.0 International" -licenseName CC_BY_NC_ND_1_0 = "Creative Commons Attribution Non Commercial No Derivatives 1.0 Generic" -licenseName CC_BY_NC_ND_2_0 = "Creative Commons Attribution Non Commercial No Derivatives 2.0 Generic" -licenseName CC_BY_NC_ND_2_5 = "Creative Commons Attribution Non Commercial No Derivatives 2.5 Generic" -licenseName CC_BY_NC_ND_3_0 = "Creative Commons Attribution Non Commercial No Derivatives 3.0 Unported" -licenseName CC_BY_NC_ND_4_0 = "Creative Commons Attribution Non Commercial No Derivatives 4.0 International" -licenseName CC_BY_NC_SA_1_0 = "Creative Commons Attribution Non Commercial Share Alike 1.0 Generic" -licenseName CC_BY_NC_SA_2_0 = "Creative Commons Attribution Non Commercial Share Alike 2.0 Generic" -licenseName CC_BY_NC_SA_2_5 = "Creative Commons Attribution Non Commercial Share Alike 2.5 Generic" -licenseName CC_BY_NC_SA_3_0 = "Creative Commons Attribution Non Commercial Share Alike 3.0 Unported" -licenseName CC_BY_NC_SA_4_0 = "Creative Commons Attribution Non Commercial Share Alike 4.0 International" -licenseName CC_BY_ND_1_0 = "Creative Commons Attribution No Derivatives 1.0 Generic" -licenseName CC_BY_ND_2_0 = "Creative Commons Attribution No Derivatives 2.0 Generic" -licenseName CC_BY_ND_2_5 = "Creative Commons Attribution No Derivatives 2.5 Generic" -licenseName CC_BY_ND_3_0 = "Creative Commons Attribution No Derivatives 3.0 Unported" -licenseName CC_BY_ND_4_0 = "Creative Commons Attribution No Derivatives 4.0 International" -licenseName CC_BY_SA_1_0 = "Creative Commons Attribution Share Alike 1.0 Generic" -licenseName CC_BY_SA_2_0 = "Creative Commons Attribution Share Alike 2.0 Generic" -licenseName CC_BY_SA_2_5 = "Creative Commons Attribution Share Alike 2.5 Generic" -licenseName CC_BY_SA_3_0 = "Creative Commons Attribution Share Alike 3.0 Unported" -licenseName CC_BY_SA_4_0 = "Creative Commons Attribution Share Alike 4.0 International" -licenseName CC0_1_0 = "Creative Commons Zero v1.0 Universal" -licenseName CDDL_1_0 = "Common Development and Distribution License 1.0" -licenseName CDDL_1_1 = "Common Development and Distribution License 1.1" -licenseName CDLA_Permissive_1_0 = "Community Data License Agreement Permissive 1.0" -licenseName CDLA_Sharing_1_0 = "Community Data License Agreement Sharing 1.0" -licenseName CECILL_1_0 = "CeCILL Free Software License Agreement v1.0" -licenseName CECILL_1_1 = "CeCILL Free Software License Agreement v1.1" -licenseName CECILL_2_0 = "CeCILL Free Software License Agreement v2.0" -licenseName CECILL_2_1 = "CeCILL Free Software License Agreement v2.1" -licenseName CECILL_B = "CeCILL-B Free Software License Agreement" -licenseName CECILL_C = "CeCILL-C Free Software License Agreement" -licenseName ClArtistic = "Clarified Artistic License" -licenseName CNRI_Jython = "CNRI Jython License" -licenseName CNRI_Python_GPL_Compatible = "CNRI Python Open Source GPL Compatible License Agreement" -licenseName CNRI_Python = "CNRI Python License" -licenseName Condor_1_1 = "Condor Public License v1.1" -licenseName CPAL_1_0 = "Common Public Attribution License 1.0" -licenseName CPL_1_0 = "Common Public License 1.0" -licenseName CPOL_1_02 = "Code Project Open License 1.02" -licenseName Crossword = "Crossword License" -licenseName CrystalStacker = "CrystalStacker License" -licenseName CUA_OPL_1_0 = "CUA Office Public License v1.0" -licenseName Cube = "Cube License" -licenseName Curl = "curl License" -licenseName D_FSL_1_0 = "Deutsche Freie Software Lizenz" -licenseName Diffmark = "diffmark license" -licenseName DOC = "DOC License" -licenseName Dotseqn = "Dotseqn License" -licenseName DSDP = "DSDP License" -licenseName Dvipdfm = "dvipdfm License" -licenseName ECL_1_0 = "Educational Community License v1.0" -licenseName ECL_2_0 = "Educational Community License v2.0" -licenseName EFL_1_0 = "Eiffel Forum License v1.0" -licenseName EFL_2_0 = "Eiffel Forum License v2.0" -licenseName EGenix = "eGenix.com Public License 1.1.0" -licenseName Entessa = "Entessa Public License v1.0" -licenseName EPL_1_0 = "Eclipse Public License 1.0" -licenseName EPL_2_0 = "Eclipse Public License 2.0" -licenseName ErlPL_1_1 = "Erlang Public License v1.1" -licenseName EUDatagrid = "EU DataGrid Software License" -licenseName EUPL_1_0 = "European Union Public License 1.0" -licenseName EUPL_1_1 = "European Union Public License 1.1" -licenseName EUPL_1_2 = "European Union Public License 1.2" -licenseName Eurosym = "Eurosym License" -licenseName Fair = "Fair License" -licenseName Frameworx_1_0 = "Frameworx Open License 1.0" -licenseName FreeImage = "FreeImage Public License v1.0" -licenseName FSFAP = "FSF All Permissive License" -licenseName FSFUL = "FSF Unlimited License" -licenseName FSFULLR = "FSF Unlimited License (with License Retention)" -licenseName FTL = "Freetype Project License" -licenseName GFDL_1_1_only = "GNU Free Documentation License v1.1 only" -licenseName GFDL_1_1_or_later = "GNU Free Documentation License v1.1 or later" -licenseName GFDL_1_2_only = "GNU Free Documentation License v1.2 only" -licenseName GFDL_1_2_or_later = "GNU Free Documentation License v1.2 or later" -licenseName GFDL_1_3_only = "GNU Free Documentation License v1.3 only" -licenseName GFDL_1_3_or_later = "GNU Free Documentation License v1.3 or later" -licenseName Giftware = "Giftware License" -licenseName GL2PS = "GL2PS License" -licenseName Glide = "3dfx Glide License" -licenseName Glulxe = "Glulxe License" -licenseName Gnuplot = "gnuplot License" -licenseName GPL_1_0_only = "GNU General Public License v1.0 only" -licenseName GPL_1_0_or_later = "GNU General Public License v1.0 or later" -licenseName GPL_2_0_only = "GNU General Public License v2.0 only" -licenseName GPL_2_0_or_later = "GNU General Public License v2.0 or later" -licenseName GPL_3_0_only = "GNU General Public License v3.0 only" -licenseName GPL_3_0_or_later = "GNU General Public License v3.0 or later" -licenseName GSOAP_1_3b = "gSOAP Public License v1.3b" -licenseName HaskellReport = "Haskell Language Report License" -licenseName HPND = "Historical Permission Notice and Disclaimer" -licenseName IBM_pibs = "IBM PowerPC Initialization and Boot Software" -licenseName ICU = "ICU License" -licenseName IJG = "Independent JPEG Group License" -licenseName ImageMagick = "ImageMagick License" -licenseName IMatix = "iMatix Standard Function Library Agreement" -licenseName Imlib2 = "Imlib2 License" -licenseName Info_ZIP = "Info-ZIP License" -licenseName Intel_ACPI = "Intel ACPI Software License Agreement" -licenseName Intel = "Intel Open Source License" -licenseName Interbase_1_0 = "Interbase Public License v1.0" -licenseName IPA = "IPA Font License" -licenseName IPL_1_0 = "IBM Public License v1.0" -licenseName ISC = "ISC License" -licenseName JasPer_2_0 = "JasPer License" -licenseName JSON = "JSON License" -licenseName LAL_1_2 = "Licence Art Libre 1.2" -licenseName LAL_1_3 = "Licence Art Libre 1.3" -licenseName Latex2e = "Latex2e License" -licenseName Leptonica = "Leptonica License" -licenseName LGPL_2_0_only = "GNU Library General Public License v2 only" -licenseName LGPL_2_0_or_later = "GNU Library General Public License v2 or later" -licenseName LGPL_2_1_only = "GNU Lesser General Public License v2.1 only" -licenseName LGPL_2_1_or_later = "GNU Lesser General Public License v2.1 or later" -licenseName LGPL_3_0_only = "GNU Lesser General Public License v3.0 only" -licenseName LGPL_3_0_or_later = "GNU Lesser General Public License v3.0 or later" -licenseName LGPLLR = "Lesser General Public License For Linguistic Resources" -licenseName Libpng = "libpng License" -licenseName Libtiff = "libtiff License" -licenseName LiLiQ_P_1_1 = "Licence Libre du Qu\233bec \8211 Permissive version 1.1" -licenseName LiLiQ_R_1_1 = "Licence Libre du Qu\233bec \8211 R\233ciprocit\233 version 1.1" -licenseName LiLiQ_Rplus_1_1 = "Licence Libre du Qu\233bec \8211 R\233ciprocit\233 forte version 1.1" -licenseName Linux_OpenIB = "Linux Kernel Variant of OpenIB.org license" -licenseName LPL_1_0 = "Lucent Public License Version 1.0" -licenseName LPL_1_02 = "Lucent Public License v1.02" -licenseName LPPL_1_0 = "LaTeX Project Public License v1.0" -licenseName LPPL_1_1 = "LaTeX Project Public License v1.1" -licenseName LPPL_1_2 = "LaTeX Project Public License v1.2" -licenseName LPPL_1_3a = "LaTeX Project Public License v1.3a" -licenseName LPPL_1_3c = "LaTeX Project Public License v1.3c" -licenseName MakeIndex = "MakeIndex License" -licenseName MirOS = "MirOS License" -licenseName MIT_0 = "MIT No Attribution" -licenseName MIT_advertising = "Enlightenment License (e16)" -licenseName MIT_CMU = "CMU License" -licenseName MIT_enna = "enna License" -licenseName MIT_feh = "feh License" -licenseName MIT = "MIT License" -licenseName MITNFA = "MIT +no-false-attribs license" -licenseName Motosoto = "Motosoto License" -licenseName Mpich2 = "mpich2 License" -licenseName MPL_1_0 = "Mozilla Public License 1.0" -licenseName MPL_1_1 = "Mozilla Public License 1.1" -licenseName MPL_2_0_no_copyleft_exception = "Mozilla Public License 2.0 (no copyleft exception)" -licenseName MPL_2_0 = "Mozilla Public License 2.0" -licenseName MS_PL = "Microsoft Public License" -licenseName MS_RL = "Microsoft Reciprocal License" -licenseName MTLL = "Matrix Template Library License" -licenseName Multics = "Multics License" -licenseName Mup = "Mup License" -licenseName NASA_1_3 = "NASA Open Source Agreement 1.3" -licenseName Naumen = "Naumen Public License" -licenseName NBPL_1_0 = "Net Boolean Public License v1" -licenseName NCSA = "University of Illinois/NCSA Open Source License" -licenseName Net_SNMP = "Net-SNMP License" -licenseName NetCDF = "NetCDF license" -licenseName Newsletr = "Newsletr License" -licenseName NGPL = "Nethack General Public License" -licenseName NLOD_1_0 = "Norwegian Licence for Open Government Data" -licenseName NLPL = "No Limit Public License" -licenseName Nokia = "Nokia Open Source License" -licenseName NOSL = "Netizen Open Source License" -licenseName Noweb = "Noweb License" -licenseName NPL_1_0 = "Netscape Public License v1.0" -licenseName NPL_1_1 = "Netscape Public License v1.1" -licenseName NPOSL_3_0 = "Non-Profit Open Software License 3.0" -licenseName NRL = "NRL License" -licenseName NTP = "NTP License" -licenseName OCCT_PL = "Open CASCADE Technology Public License" -licenseName OCLC_2_0 = "OCLC Research Public License 2.0" -licenseName ODbL_1_0 = "ODC Open Database License v1.0" -licenseName ODC_By_1_0 = "Open Data Commons Attribution License v1.0" -licenseName OFL_1_0 = "SIL Open Font License 1.0" -licenseName OFL_1_1 = "SIL Open Font License 1.1" -licenseName OGTSL = "Open Group Test Suite License" -licenseName OLDAP_1_1 = "Open LDAP Public License v1.1" -licenseName OLDAP_1_2 = "Open LDAP Public License v1.2" -licenseName OLDAP_1_3 = "Open LDAP Public License v1.3" -licenseName OLDAP_1_4 = "Open LDAP Public License v1.4" -licenseName OLDAP_2_0_1 = "Open LDAP Public License v2.0.1" -licenseName OLDAP_2_0 = "Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)" -licenseName OLDAP_2_1 = "Open LDAP Public License v2.1" -licenseName OLDAP_2_2_1 = "Open LDAP Public License v2.2.1" -licenseName OLDAP_2_2_2 = "Open LDAP Public License 2.2.2" -licenseName OLDAP_2_2 = "Open LDAP Public License v2.2" -licenseName OLDAP_2_3 = "Open LDAP Public License v2.3" -licenseName OLDAP_2_4 = "Open LDAP Public License v2.4" -licenseName OLDAP_2_5 = "Open LDAP Public License v2.5" -licenseName OLDAP_2_6 = "Open LDAP Public License v2.6" -licenseName OLDAP_2_7 = "Open LDAP Public License v2.7" -licenseName OLDAP_2_8 = "Open LDAP Public License v2.8" -licenseName OML = "Open Market License" -licenseName OpenSSL = "OpenSSL License" -licenseName OPL_1_0 = "Open Public License v1.0" -licenseName OSET_PL_2_1 = "OSET Public License version 2.1" -licenseName OSL_1_0 = "Open Software License 1.0" -licenseName OSL_1_1 = "Open Software License 1.1" -licenseName OSL_2_0 = "Open Software License 2.0" -licenseName OSL_2_1 = "Open Software License 2.1" -licenseName OSL_3_0 = "Open Software License 3.0" -licenseName PDDL_1_0 = "ODC Public Domain Dedication & License 1.0" -licenseName PHP_3_0 = "PHP License v3.0" -licenseName PHP_3_01 = "PHP License v3.01" -licenseName Plexus = "Plexus Classworlds License" -licenseName PostgreSQL = "PostgreSQL License" -licenseName Psfrag = "psfrag License" -licenseName Psutils = "psutils License" -licenseName Python_2_0 = "Python License 2.0" -licenseName Qhull = "Qhull License" -licenseName QPL_1_0 = "Q Public License 1.0" -licenseName Rdisc = "Rdisc License" -licenseName RHeCos_1_1 = "Red Hat eCos Public License v1.1" -licenseName RPL_1_1 = "Reciprocal Public License 1.1" -licenseName RPL_1_5 = "Reciprocal Public License 1.5" -licenseName RPSL_1_0 = "RealNetworks Public Source License v1.0" -licenseName RSA_MD = "RSA Message-Digest License " -licenseName RSCPL = "Ricoh Source Code Public License" -licenseName Ruby = "Ruby License" -licenseName SAX_PD = "Sax Public Domain Notice" -licenseName Saxpath = "Saxpath License" -licenseName SCEA = "SCEA Shared Source License" -licenseName Sendmail = "Sendmail License" -licenseName SGI_B_1_0 = "SGI Free Software License B v1.0" -licenseName SGI_B_1_1 = "SGI Free Software License B v1.1" -licenseName SGI_B_2_0 = "SGI Free Software License B v2.0" -licenseName SimPL_2_0 = "Simple Public License 2.0" -licenseName SISSL_1_2 = "Sun Industry Standards Source License v1.2" -licenseName SISSL = "Sun Industry Standards Source License v1.1" -licenseName Sleepycat = "Sleepycat License" -licenseName SMLNJ = "Standard ML of New Jersey License" -licenseName SMPPL = "Secure Messaging Protocol Public License" -licenseName SNIA = "SNIA Public License 1.1" -licenseName Spencer_86 = "Spencer License 86" -licenseName Spencer_94 = "Spencer License 94" -licenseName Spencer_99 = "Spencer License 99" -licenseName SPL_1_0 = "Sun Public License v1.0" -licenseName SugarCRM_1_1_3 = "SugarCRM Public License v1.1.3" -licenseName SWL = "Scheme Widget Library (SWL) Software License Agreement" -licenseName TCL = "TCL/TK License" -licenseName TCP_wrappers = "TCP Wrappers License" -licenseName TMate = "TMate Open Source License" -licenseName TORQUE_1_1 = "TORQUE v2.5+ Software License v1.1" -licenseName TOSL = "Trusster Open Source License" -licenseName TU_Berlin_1_0 = "Technische Universitaet Berlin License 1.0" -licenseName TU_Berlin_2_0 = "Technische Universitaet Berlin License 2.0" -licenseName Unicode_DFS_2015 = "Unicode License Agreement - Data Files and Software (2015)" -licenseName Unicode_DFS_2016 = "Unicode License Agreement - Data Files and Software (2016)" -licenseName Unicode_TOU = "Unicode Terms of Use" -licenseName Unlicense = "The Unlicense" -licenseName UPL_1_0 = "Universal Permissive License v1.0" -licenseName Vim = "Vim License" -licenseName VOSTROM = "VOSTROM Public License for Open Source" -licenseName VSL_1_0 = "Vovida Software License v1.0" -licenseName W3C_19980720 = "W3C Software Notice and License (1998-07-20)" -licenseName W3C_20150513 = "W3C Software Notice and Document License (2015-05-13)" -licenseName W3C = "W3C Software Notice and License (2002-12-31)" -licenseName Watcom_1_0 = "Sybase Open Watcom Public License 1.0" -licenseName Wsuipa = "Wsuipa License" -licenseName WTFPL = "Do What The F*ck You Want To Public License" -licenseName X11 = "X11 License" -licenseName Xerox = "Xerox License" -licenseName XFree86_1_1 = "XFree86 License 1.1" -licenseName Xinetd = "xinetd License" -licenseName Xnet = "X.Net License" -licenseName Xpp = "XPP License" -licenseName XSkat = "XSkat License" -licenseName YPL_1_0 = "Yahoo! Public License v1.0" -licenseName YPL_1_1 = "Yahoo! Public License v1.1" -licenseName Zed = "Zed License" -licenseName Zend_2_0 = "Zend License v2.0" -licenseName Zimbra_1_3 = "Zimbra Public License v1.3" -licenseName Zimbra_1_4 = "Zimbra Public License v1.4" -licenseName Zlib_acknowledgement = "zlib/libpng License with Acknowledgement" -licenseName Zlib = "zlib License" -licenseName ZPL_1_1 = "Zope Public License 1.1" -licenseName ZPL_2_0 = "Zope Public License 2.0" -licenseName ZPL_2_1 = "Zope Public License 2.1" - --- | Whether the license is approved by Open Source Initiative (OSI). --- --- See . -licenseIsOsiApproved :: LicenseId -> Bool -licenseIsOsiApproved NullBSD = False -licenseIsOsiApproved AAL = True -licenseIsOsiApproved Abstyles = False -licenseIsOsiApproved Adobe_2006 = False -licenseIsOsiApproved Adobe_Glyph = False -licenseIsOsiApproved ADSL = False -licenseIsOsiApproved AFL_1_1 = True -licenseIsOsiApproved AFL_1_2 = True -licenseIsOsiApproved AFL_2_0 = True -licenseIsOsiApproved AFL_2_1 = True -licenseIsOsiApproved AFL_3_0 = True -licenseIsOsiApproved Afmparse = False -licenseIsOsiApproved AGPL_1_0 = False -licenseIsOsiApproved AGPL_1_0_only = False -licenseIsOsiApproved AGPL_1_0_or_later = False -licenseIsOsiApproved AGPL_3_0_only = True -licenseIsOsiApproved AGPL_3_0_or_later = True -licenseIsOsiApproved Aladdin = False -licenseIsOsiApproved AMDPLPA = False -licenseIsOsiApproved AML = False -licenseIsOsiApproved AMPAS = False -licenseIsOsiApproved ANTLR_PD = False -licenseIsOsiApproved Apache_1_0 = False -licenseIsOsiApproved Apache_1_1 = True -licenseIsOsiApproved Apache_2_0 = True -licenseIsOsiApproved APAFML = False -licenseIsOsiApproved APL_1_0 = True -licenseIsOsiApproved APSL_1_0 = True -licenseIsOsiApproved APSL_1_1 = True -licenseIsOsiApproved APSL_1_2 = True -licenseIsOsiApproved APSL_2_0 = True -licenseIsOsiApproved Artistic_1_0_cl8 = True -licenseIsOsiApproved Artistic_1_0_Perl = True -licenseIsOsiApproved Artistic_1_0 = True -licenseIsOsiApproved Artistic_2_0 = True -licenseIsOsiApproved Bahyph = False -licenseIsOsiApproved Barr = False -licenseIsOsiApproved Beerware = False -licenseIsOsiApproved BitTorrent_1_0 = False -licenseIsOsiApproved BitTorrent_1_1 = False -licenseIsOsiApproved Borceux = False -licenseIsOsiApproved BSD_1_Clause = False -licenseIsOsiApproved BSD_2_Clause_FreeBSD = False -licenseIsOsiApproved BSD_2_Clause_NetBSD = False -licenseIsOsiApproved BSD_2_Clause_Patent = True -licenseIsOsiApproved BSD_2_Clause = True -licenseIsOsiApproved BSD_3_Clause_Attribution = False -licenseIsOsiApproved BSD_3_Clause_Clear = False -licenseIsOsiApproved BSD_3_Clause_LBNL = False -licenseIsOsiApproved BSD_3_Clause_No_Nuclear_License_2014 = False -licenseIsOsiApproved BSD_3_Clause_No_Nuclear_License = False -licenseIsOsiApproved BSD_3_Clause_No_Nuclear_Warranty = False -licenseIsOsiApproved BSD_3_Clause = True -licenseIsOsiApproved BSD_4_Clause_UC = False -licenseIsOsiApproved BSD_4_Clause = False -licenseIsOsiApproved BSD_Protection = False -licenseIsOsiApproved BSD_Source_Code = False -licenseIsOsiApproved BSL_1_0 = True -licenseIsOsiApproved Bzip2_1_0_5 = False -licenseIsOsiApproved Bzip2_1_0_6 = False -licenseIsOsiApproved Caldera = False -licenseIsOsiApproved CATOSL_1_1 = True -licenseIsOsiApproved CC_BY_1_0 = False -licenseIsOsiApproved CC_BY_2_0 = False -licenseIsOsiApproved CC_BY_2_5 = False -licenseIsOsiApproved CC_BY_3_0 = False -licenseIsOsiApproved CC_BY_4_0 = False -licenseIsOsiApproved CC_BY_NC_1_0 = False -licenseIsOsiApproved CC_BY_NC_2_0 = False -licenseIsOsiApproved CC_BY_NC_2_5 = False -licenseIsOsiApproved CC_BY_NC_3_0 = False -licenseIsOsiApproved CC_BY_NC_4_0 = False -licenseIsOsiApproved CC_BY_NC_ND_1_0 = False -licenseIsOsiApproved CC_BY_NC_ND_2_0 = False -licenseIsOsiApproved CC_BY_NC_ND_2_5 = False -licenseIsOsiApproved CC_BY_NC_ND_3_0 = False -licenseIsOsiApproved CC_BY_NC_ND_4_0 = False -licenseIsOsiApproved CC_BY_NC_SA_1_0 = False -licenseIsOsiApproved CC_BY_NC_SA_2_0 = False -licenseIsOsiApproved CC_BY_NC_SA_2_5 = False -licenseIsOsiApproved CC_BY_NC_SA_3_0 = False -licenseIsOsiApproved CC_BY_NC_SA_4_0 = False -licenseIsOsiApproved CC_BY_ND_1_0 = False -licenseIsOsiApproved CC_BY_ND_2_0 = False -licenseIsOsiApproved CC_BY_ND_2_5 = False -licenseIsOsiApproved CC_BY_ND_3_0 = False -licenseIsOsiApproved CC_BY_ND_4_0 = False -licenseIsOsiApproved CC_BY_SA_1_0 = False -licenseIsOsiApproved CC_BY_SA_2_0 = False -licenseIsOsiApproved CC_BY_SA_2_5 = False -licenseIsOsiApproved CC_BY_SA_3_0 = False -licenseIsOsiApproved CC_BY_SA_4_0 = False -licenseIsOsiApproved CC0_1_0 = False -licenseIsOsiApproved CDDL_1_0 = True -licenseIsOsiApproved CDDL_1_1 = False -licenseIsOsiApproved CDLA_Permissive_1_0 = False -licenseIsOsiApproved CDLA_Sharing_1_0 = False -licenseIsOsiApproved CECILL_1_0 = False -licenseIsOsiApproved CECILL_1_1 = False -licenseIsOsiApproved CECILL_2_0 = False -licenseIsOsiApproved CECILL_2_1 = True -licenseIsOsiApproved CECILL_B = False -licenseIsOsiApproved CECILL_C = False -licenseIsOsiApproved ClArtistic = False -licenseIsOsiApproved CNRI_Jython = False -licenseIsOsiApproved CNRI_Python_GPL_Compatible = False -licenseIsOsiApproved CNRI_Python = True -licenseIsOsiApproved Condor_1_1 = False -licenseIsOsiApproved CPAL_1_0 = True -licenseIsOsiApproved CPL_1_0 = True -licenseIsOsiApproved CPOL_1_02 = False -licenseIsOsiApproved Crossword = False -licenseIsOsiApproved CrystalStacker = False -licenseIsOsiApproved CUA_OPL_1_0 = True -licenseIsOsiApproved Cube = False -licenseIsOsiApproved Curl = False -licenseIsOsiApproved D_FSL_1_0 = False -licenseIsOsiApproved Diffmark = False -licenseIsOsiApproved DOC = False -licenseIsOsiApproved Dotseqn = False -licenseIsOsiApproved DSDP = False -licenseIsOsiApproved Dvipdfm = False -licenseIsOsiApproved ECL_1_0 = True -licenseIsOsiApproved ECL_2_0 = True -licenseIsOsiApproved EFL_1_0 = True -licenseIsOsiApproved EFL_2_0 = True -licenseIsOsiApproved EGenix = False -licenseIsOsiApproved Entessa = True -licenseIsOsiApproved EPL_1_0 = True -licenseIsOsiApproved EPL_2_0 = True -licenseIsOsiApproved ErlPL_1_1 = False -licenseIsOsiApproved EUDatagrid = True -licenseIsOsiApproved EUPL_1_0 = False -licenseIsOsiApproved EUPL_1_1 = True -licenseIsOsiApproved EUPL_1_2 = True -licenseIsOsiApproved Eurosym = False -licenseIsOsiApproved Fair = True -licenseIsOsiApproved Frameworx_1_0 = True -licenseIsOsiApproved FreeImage = False -licenseIsOsiApproved FSFAP = False -licenseIsOsiApproved FSFUL = False -licenseIsOsiApproved FSFULLR = False -licenseIsOsiApproved FTL = False -licenseIsOsiApproved GFDL_1_1_only = False -licenseIsOsiApproved GFDL_1_1_or_later = False -licenseIsOsiApproved GFDL_1_2_only = False -licenseIsOsiApproved GFDL_1_2_or_later = False -licenseIsOsiApproved GFDL_1_3_only = False -licenseIsOsiApproved GFDL_1_3_or_later = False -licenseIsOsiApproved Giftware = False -licenseIsOsiApproved GL2PS = False -licenseIsOsiApproved Glide = False -licenseIsOsiApproved Glulxe = False -licenseIsOsiApproved Gnuplot = False -licenseIsOsiApproved GPL_1_0_only = False -licenseIsOsiApproved GPL_1_0_or_later = False -licenseIsOsiApproved GPL_2_0_only = True -licenseIsOsiApproved GPL_2_0_or_later = True -licenseIsOsiApproved GPL_3_0_only = True -licenseIsOsiApproved GPL_3_0_or_later = True -licenseIsOsiApproved GSOAP_1_3b = False -licenseIsOsiApproved HaskellReport = False -licenseIsOsiApproved HPND = True -licenseIsOsiApproved IBM_pibs = False -licenseIsOsiApproved ICU = False -licenseIsOsiApproved IJG = False -licenseIsOsiApproved ImageMagick = False -licenseIsOsiApproved IMatix = False -licenseIsOsiApproved Imlib2 = False -licenseIsOsiApproved Info_ZIP = False -licenseIsOsiApproved Intel_ACPI = False -licenseIsOsiApproved Intel = True -licenseIsOsiApproved Interbase_1_0 = False -licenseIsOsiApproved IPA = True -licenseIsOsiApproved IPL_1_0 = True -licenseIsOsiApproved ISC = True -licenseIsOsiApproved JasPer_2_0 = False -licenseIsOsiApproved JSON = False -licenseIsOsiApproved LAL_1_2 = False -licenseIsOsiApproved LAL_1_3 = False -licenseIsOsiApproved Latex2e = False -licenseIsOsiApproved Leptonica = False -licenseIsOsiApproved LGPL_2_0_only = True -licenseIsOsiApproved LGPL_2_0_or_later = True -licenseIsOsiApproved LGPL_2_1_only = True -licenseIsOsiApproved LGPL_2_1_or_later = True -licenseIsOsiApproved LGPL_3_0_only = True -licenseIsOsiApproved LGPL_3_0_or_later = True -licenseIsOsiApproved LGPLLR = False -licenseIsOsiApproved Libpng = False -licenseIsOsiApproved Libtiff = False -licenseIsOsiApproved LiLiQ_P_1_1 = True -licenseIsOsiApproved LiLiQ_R_1_1 = True -licenseIsOsiApproved LiLiQ_Rplus_1_1 = True -licenseIsOsiApproved Linux_OpenIB = False -licenseIsOsiApproved LPL_1_0 = True -licenseIsOsiApproved LPL_1_02 = True -licenseIsOsiApproved LPPL_1_0 = False -licenseIsOsiApproved LPPL_1_1 = False -licenseIsOsiApproved LPPL_1_2 = False -licenseIsOsiApproved LPPL_1_3a = False -licenseIsOsiApproved LPPL_1_3c = True -licenseIsOsiApproved MakeIndex = False -licenseIsOsiApproved MirOS = True -licenseIsOsiApproved MIT_0 = True -licenseIsOsiApproved MIT_advertising = False -licenseIsOsiApproved MIT_CMU = False -licenseIsOsiApproved MIT_enna = False -licenseIsOsiApproved MIT_feh = False -licenseIsOsiApproved MIT = True -licenseIsOsiApproved MITNFA = False -licenseIsOsiApproved Motosoto = True -licenseIsOsiApproved Mpich2 = False -licenseIsOsiApproved MPL_1_0 = True -licenseIsOsiApproved MPL_1_1 = True -licenseIsOsiApproved MPL_2_0_no_copyleft_exception = True -licenseIsOsiApproved MPL_2_0 = True -licenseIsOsiApproved MS_PL = True -licenseIsOsiApproved MS_RL = True -licenseIsOsiApproved MTLL = False -licenseIsOsiApproved Multics = True -licenseIsOsiApproved Mup = False -licenseIsOsiApproved NASA_1_3 = True -licenseIsOsiApproved Naumen = True -licenseIsOsiApproved NBPL_1_0 = False -licenseIsOsiApproved NCSA = True -licenseIsOsiApproved Net_SNMP = False -licenseIsOsiApproved NetCDF = False -licenseIsOsiApproved Newsletr = False -licenseIsOsiApproved NGPL = True -licenseIsOsiApproved NLOD_1_0 = False -licenseIsOsiApproved NLPL = False -licenseIsOsiApproved Nokia = True -licenseIsOsiApproved NOSL = False -licenseIsOsiApproved Noweb = False -licenseIsOsiApproved NPL_1_0 = False -licenseIsOsiApproved NPL_1_1 = False -licenseIsOsiApproved NPOSL_3_0 = True -licenseIsOsiApproved NRL = False -licenseIsOsiApproved NTP = True -licenseIsOsiApproved OCCT_PL = False -licenseIsOsiApproved OCLC_2_0 = True -licenseIsOsiApproved ODbL_1_0 = False -licenseIsOsiApproved ODC_By_1_0 = False -licenseIsOsiApproved OFL_1_0 = False -licenseIsOsiApproved OFL_1_1 = True -licenseIsOsiApproved OGTSL = True -licenseIsOsiApproved OLDAP_1_1 = False -licenseIsOsiApproved OLDAP_1_2 = False -licenseIsOsiApproved OLDAP_1_3 = False -licenseIsOsiApproved OLDAP_1_4 = False -licenseIsOsiApproved OLDAP_2_0_1 = False -licenseIsOsiApproved OLDAP_2_0 = False -licenseIsOsiApproved OLDAP_2_1 = False -licenseIsOsiApproved OLDAP_2_2_1 = False -licenseIsOsiApproved OLDAP_2_2_2 = False -licenseIsOsiApproved OLDAP_2_2 = False -licenseIsOsiApproved OLDAP_2_3 = False -licenseIsOsiApproved OLDAP_2_4 = False -licenseIsOsiApproved OLDAP_2_5 = False -licenseIsOsiApproved OLDAP_2_6 = False -licenseIsOsiApproved OLDAP_2_7 = False -licenseIsOsiApproved OLDAP_2_8 = False -licenseIsOsiApproved OML = False -licenseIsOsiApproved OpenSSL = False -licenseIsOsiApproved OPL_1_0 = False -licenseIsOsiApproved OSET_PL_2_1 = True -licenseIsOsiApproved OSL_1_0 = True -licenseIsOsiApproved OSL_1_1 = False -licenseIsOsiApproved OSL_2_0 = True -licenseIsOsiApproved OSL_2_1 = True -licenseIsOsiApproved OSL_3_0 = True -licenseIsOsiApproved PDDL_1_0 = False -licenseIsOsiApproved PHP_3_0 = True -licenseIsOsiApproved PHP_3_01 = False -licenseIsOsiApproved Plexus = False -licenseIsOsiApproved PostgreSQL = True -licenseIsOsiApproved Psfrag = False -licenseIsOsiApproved Psutils = False -licenseIsOsiApproved Python_2_0 = True -licenseIsOsiApproved Qhull = False -licenseIsOsiApproved QPL_1_0 = True -licenseIsOsiApproved Rdisc = False -licenseIsOsiApproved RHeCos_1_1 = False -licenseIsOsiApproved RPL_1_1 = True -licenseIsOsiApproved RPL_1_5 = True -licenseIsOsiApproved RPSL_1_0 = True -licenseIsOsiApproved RSA_MD = False -licenseIsOsiApproved RSCPL = True -licenseIsOsiApproved Ruby = False -licenseIsOsiApproved SAX_PD = False -licenseIsOsiApproved Saxpath = False -licenseIsOsiApproved SCEA = False -licenseIsOsiApproved Sendmail = False -licenseIsOsiApproved SGI_B_1_0 = False -licenseIsOsiApproved SGI_B_1_1 = False -licenseIsOsiApproved SGI_B_2_0 = False -licenseIsOsiApproved SimPL_2_0 = True -licenseIsOsiApproved SISSL_1_2 = False -licenseIsOsiApproved SISSL = True -licenseIsOsiApproved Sleepycat = True -licenseIsOsiApproved SMLNJ = False -licenseIsOsiApproved SMPPL = False -licenseIsOsiApproved SNIA = False -licenseIsOsiApproved Spencer_86 = False -licenseIsOsiApproved Spencer_94 = False -licenseIsOsiApproved Spencer_99 = False -licenseIsOsiApproved SPL_1_0 = True -licenseIsOsiApproved SugarCRM_1_1_3 = False -licenseIsOsiApproved SWL = False -licenseIsOsiApproved TCL = False -licenseIsOsiApproved TCP_wrappers = False -licenseIsOsiApproved TMate = False -licenseIsOsiApproved TORQUE_1_1 = False -licenseIsOsiApproved TOSL = False -licenseIsOsiApproved TU_Berlin_1_0 = False -licenseIsOsiApproved TU_Berlin_2_0 = False -licenseIsOsiApproved Unicode_DFS_2015 = False -licenseIsOsiApproved Unicode_DFS_2016 = False -licenseIsOsiApproved Unicode_TOU = False -licenseIsOsiApproved Unlicense = False -licenseIsOsiApproved UPL_1_0 = True -licenseIsOsiApproved Vim = False -licenseIsOsiApproved VOSTROM = False -licenseIsOsiApproved VSL_1_0 = True -licenseIsOsiApproved W3C_19980720 = False -licenseIsOsiApproved W3C_20150513 = False -licenseIsOsiApproved W3C = True -licenseIsOsiApproved Watcom_1_0 = True -licenseIsOsiApproved Wsuipa = False -licenseIsOsiApproved WTFPL = False -licenseIsOsiApproved X11 = False -licenseIsOsiApproved Xerox = False -licenseIsOsiApproved XFree86_1_1 = False -licenseIsOsiApproved Xinetd = False -licenseIsOsiApproved Xnet = True -licenseIsOsiApproved Xpp = False -licenseIsOsiApproved XSkat = False -licenseIsOsiApproved YPL_1_0 = False -licenseIsOsiApproved YPL_1_1 = False -licenseIsOsiApproved Zed = False -licenseIsOsiApproved Zend_2_0 = False -licenseIsOsiApproved Zimbra_1_3 = False -licenseIsOsiApproved Zimbra_1_4 = False -licenseIsOsiApproved Zlib_acknowledgement = False -licenseIsOsiApproved Zlib = True -licenseIsOsiApproved ZPL_1_1 = False -licenseIsOsiApproved ZPL_2_0 = True -licenseIsOsiApproved ZPL_2_1 = False - -------------------------------------------------------------------------------- --- Creation -------------------------------------------------------------------------------- - -licenseIdList :: LicenseListVersion -> [LicenseId] -licenseIdList LicenseListVersion_3_0 = - [ AGPL_1_0 - ] - ++ bulkOfLicenses -licenseIdList LicenseListVersion_3_2 = - [ AGPL_1_0_only - , AGPL_1_0_or_later - , Linux_OpenIB - , MIT_0 - , ODC_By_1_0 - , TU_Berlin_1_0 - , TU_Berlin_2_0 - ] - ++ bulkOfLicenses - --- | Create a 'LicenseId' from a 'String'. -mkLicenseId :: LicenseListVersion -> String -> Maybe LicenseId -mkLicenseId LicenseListVersion_3_0 s = Map.lookup s stringLookup_3_0 -mkLicenseId LicenseListVersion_3_2 s = Map.lookup s stringLookup_3_2 - -stringLookup_3_0 :: Map String LicenseId -stringLookup_3_0 = Map.fromList $ map (\i -> (licenseId i, i)) $ - licenseIdList LicenseListVersion_3_0 - -stringLookup_3_2 :: Map String LicenseId -stringLookup_3_2 = Map.fromList $ map (\i -> (licenseId i, i)) $ - licenseIdList LicenseListVersion_3_2 - --- | Licenses in all SPDX License lists -bulkOfLicenses :: [LicenseId] -bulkOfLicenses = - [ NullBSD - , AAL - , Abstyles - , Adobe_2006 - , Adobe_Glyph - , ADSL - , AFL_1_1 - , AFL_1_2 - , AFL_2_0 - , AFL_2_1 - , AFL_3_0 - , Afmparse - , AGPL_3_0_only - , AGPL_3_0_or_later - , Aladdin - , AMDPLPA - , AML - , AMPAS - , ANTLR_PD - , Apache_1_0 - , Apache_1_1 - , Apache_2_0 - , APAFML - , APL_1_0 - , APSL_1_0 - , APSL_1_1 - , APSL_1_2 - , APSL_2_0 - , Artistic_1_0_cl8 - , Artistic_1_0_Perl - , Artistic_1_0 - , Artistic_2_0 - , Bahyph - , Barr - , Beerware - , BitTorrent_1_0 - , BitTorrent_1_1 - , Borceux - , BSD_1_Clause - , BSD_2_Clause_FreeBSD - , BSD_2_Clause_NetBSD - , BSD_2_Clause_Patent - , BSD_2_Clause - , BSD_3_Clause_Attribution - , BSD_3_Clause_Clear - , BSD_3_Clause_LBNL - , BSD_3_Clause_No_Nuclear_License_2014 - , BSD_3_Clause_No_Nuclear_License - , BSD_3_Clause_No_Nuclear_Warranty - , BSD_3_Clause - , BSD_4_Clause_UC - , BSD_4_Clause - , BSD_Protection - , BSD_Source_Code - , BSL_1_0 - , Bzip2_1_0_5 - , Bzip2_1_0_6 - , Caldera - , CATOSL_1_1 - , CC_BY_1_0 - , CC_BY_2_0 - , CC_BY_2_5 - , CC_BY_3_0 - , CC_BY_4_0 - , CC_BY_NC_1_0 - , CC_BY_NC_2_0 - , CC_BY_NC_2_5 - , CC_BY_NC_3_0 - , CC_BY_NC_4_0 - , CC_BY_NC_ND_1_0 - , CC_BY_NC_ND_2_0 - , CC_BY_NC_ND_2_5 - , CC_BY_NC_ND_3_0 - , CC_BY_NC_ND_4_0 - , CC_BY_NC_SA_1_0 - , CC_BY_NC_SA_2_0 - , CC_BY_NC_SA_2_5 - , CC_BY_NC_SA_3_0 - , CC_BY_NC_SA_4_0 - , CC_BY_ND_1_0 - , CC_BY_ND_2_0 - , CC_BY_ND_2_5 - , CC_BY_ND_3_0 - , CC_BY_ND_4_0 - , CC_BY_SA_1_0 - , CC_BY_SA_2_0 - , CC_BY_SA_2_5 - , CC_BY_SA_3_0 - , CC_BY_SA_4_0 - , CC0_1_0 - , CDDL_1_0 - , CDDL_1_1 - , CDLA_Permissive_1_0 - , CDLA_Sharing_1_0 - , CECILL_1_0 - , CECILL_1_1 - , CECILL_2_0 - , CECILL_2_1 - , CECILL_B - , CECILL_C - , ClArtistic - , CNRI_Jython - , CNRI_Python_GPL_Compatible - , CNRI_Python - , Condor_1_1 - , CPAL_1_0 - , CPL_1_0 - , CPOL_1_02 - , Crossword - , CrystalStacker - , CUA_OPL_1_0 - , Cube - , Curl - , D_FSL_1_0 - , Diffmark - , DOC - , Dotseqn - , DSDP - , Dvipdfm - , ECL_1_0 - , ECL_2_0 - , EFL_1_0 - , EFL_2_0 - , EGenix - , Entessa - , EPL_1_0 - , EPL_2_0 - , ErlPL_1_1 - , EUDatagrid - , EUPL_1_0 - , EUPL_1_1 - , EUPL_1_2 - , Eurosym - , Fair - , Frameworx_1_0 - , FreeImage - , FSFAP - , FSFUL - , FSFULLR - , FTL - , GFDL_1_1_only - , GFDL_1_1_or_later - , GFDL_1_2_only - , GFDL_1_2_or_later - , GFDL_1_3_only - , GFDL_1_3_or_later - , Giftware - , GL2PS - , Glide - , Glulxe - , Gnuplot - , GPL_1_0_only - , GPL_1_0_or_later - , GPL_2_0_only - , GPL_2_0_or_later - , GPL_3_0_only - , GPL_3_0_or_later - , GSOAP_1_3b - , HaskellReport - , HPND - , IBM_pibs - , ICU - , IJG - , ImageMagick - , IMatix - , Imlib2 - , Info_ZIP - , Intel_ACPI - , Intel - , Interbase_1_0 - , IPA - , IPL_1_0 - , ISC - , JasPer_2_0 - , JSON - , LAL_1_2 - , LAL_1_3 - , Latex2e - , Leptonica - , LGPL_2_0_only - , LGPL_2_0_or_later - , LGPL_2_1_only - , LGPL_2_1_or_later - , LGPL_3_0_only - , LGPL_3_0_or_later - , LGPLLR - , Libpng - , Libtiff - , LiLiQ_P_1_1 - , LiLiQ_R_1_1 - , LiLiQ_Rplus_1_1 - , LPL_1_0 - , LPL_1_02 - , LPPL_1_0 - , LPPL_1_1 - , LPPL_1_2 - , LPPL_1_3a - , LPPL_1_3c - , MakeIndex - , MirOS - , MIT_advertising - , MIT_CMU - , MIT_enna - , MIT_feh - , MIT - , MITNFA - , Motosoto - , Mpich2 - , MPL_1_0 - , MPL_1_1 - , MPL_2_0_no_copyleft_exception - , MPL_2_0 - , MS_PL - , MS_RL - , MTLL - , Multics - , Mup - , NASA_1_3 - , Naumen - , NBPL_1_0 - , NCSA - , Net_SNMP - , NetCDF - , Newsletr - , NGPL - , NLOD_1_0 - , NLPL - , Nokia - , NOSL - , Noweb - , NPL_1_0 - , NPL_1_1 - , NPOSL_3_0 - , NRL - , NTP - , OCCT_PL - , OCLC_2_0 - , ODbL_1_0 - , OFL_1_0 - , OFL_1_1 - , OGTSL - , OLDAP_1_1 - , OLDAP_1_2 - , OLDAP_1_3 - , OLDAP_1_4 - , OLDAP_2_0_1 - , OLDAP_2_0 - , OLDAP_2_1 - , OLDAP_2_2_1 - , OLDAP_2_2_2 - , OLDAP_2_2 - , OLDAP_2_3 - , OLDAP_2_4 - , OLDAP_2_5 - , OLDAP_2_6 - , OLDAP_2_7 - , OLDAP_2_8 - , OML - , OpenSSL - , OPL_1_0 - , OSET_PL_2_1 - , OSL_1_0 - , OSL_1_1 - , OSL_2_0 - , OSL_2_1 - , OSL_3_0 - , PDDL_1_0 - , PHP_3_0 - , PHP_3_01 - , Plexus - , PostgreSQL - , Psfrag - , Psutils - , Python_2_0 - , Qhull - , QPL_1_0 - , Rdisc - , RHeCos_1_1 - , RPL_1_1 - , RPL_1_5 - , RPSL_1_0 - , RSA_MD - , RSCPL - , Ruby - , SAX_PD - , Saxpath - , SCEA - , Sendmail - , SGI_B_1_0 - , SGI_B_1_1 - , SGI_B_2_0 - , SimPL_2_0 - , SISSL_1_2 - , SISSL - , Sleepycat - , SMLNJ - , SMPPL - , SNIA - , Spencer_86 - , Spencer_94 - , Spencer_99 - , SPL_1_0 - , SugarCRM_1_1_3 - , SWL - , TCL - , TCP_wrappers - , TMate - , TORQUE_1_1 - , TOSL - , Unicode_DFS_2015 - , Unicode_DFS_2016 - , Unicode_TOU - , Unlicense - , UPL_1_0 - , Vim - , VOSTROM - , VSL_1_0 - , W3C_19980720 - , W3C_20150513 - , W3C - , Watcom_1_0 - , Wsuipa - , WTFPL - , X11 - , Xerox - , XFree86_1_1 - , Xinetd - , Xnet - , Xpp - , XSkat - , YPL_1_0 - , YPL_1_1 - , Zed - , Zend_2_0 - , Zimbra_1_3 - , Zimbra_1_4 - , Zlib_acknowledgement - , Zlib - , ZPL_1_1 - , ZPL_2_0 - , ZPL_2_1 - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseListVersion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseListVersion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseListVersion.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseListVersion.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -module Distribution.SPDX.LicenseListVersion ( - LicenseListVersion (..), - cabalSpecVersionToSPDXListVersion, - ) where - -import Distribution.CabalSpecVersion - --- | SPDX License List version @Cabal@ is aware of. -data LicenseListVersion - = LicenseListVersion_3_0 - | LicenseListVersion_3_2 - deriving (Eq, Ord, Show, Enum, Bounded) - -cabalSpecVersionToSPDXListVersion :: CabalSpecVersion -> LicenseListVersion -cabalSpecVersionToSPDXListVersion CabalSpecV2_4 = LicenseListVersion_3_2 -cabalSpecVersionToSPDXListVersion _ = LicenseListVersion_3_0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseReference.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseReference.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseReference.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX/LicenseReference.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,79 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.SPDX.LicenseReference ( - LicenseRef, - licenseRef, - licenseDocumentRef, - mkLicenseRef, - mkLicenseRef', - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Utils.Generic (isAsciiAlphaNum) -import Distribution.Pretty -import Distribution.Parsec.Class - -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp - --- | A user defined license reference denoted by @LicenseRef-[idstring]@ (for a license not on the SPDX License List); -data LicenseRef = LicenseRef - { _lrDocument :: !(Maybe String) - , _lrLicense :: !String - } - deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) - --- | License reference. -licenseRef :: LicenseRef -> String -licenseRef = _lrLicense - --- | Document reference. -licenseDocumentRef :: LicenseRef -> Maybe String -licenseDocumentRef = _lrDocument - -instance Binary LicenseRef - -instance NFData LicenseRef where - rnf (LicenseRef d l) = rnf d `seq` rnf l - -instance Pretty LicenseRef where - pretty (LicenseRef Nothing l) = Disp.text "LicenseRef-" <<>> Disp.text l - pretty (LicenseRef (Just d) l) = - Disp.text "DocumentRef-" <<>> Disp.text d <<>> Disp.char ':' <<>> Disp.text "LicenseRef-" <<>> Disp.text l - -instance Parsec LicenseRef where - parsec = name <|> doc - where - name = do - _ <- P.string "LicenseRef-" - n <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' - pure (LicenseRef Nothing n) - - doc = do - _ <- P.string "DocumentRef-" - d <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' - _ <- P.char ':' - _ <- P.string "LicenseRef-" - n <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' - pure (LicenseRef (Just d) n) - --- | Create 'LicenseRef' from optional document ref and name. -mkLicenseRef :: Maybe String -> String -> Maybe LicenseRef -mkLicenseRef d l = do - d' <- traverse checkIdString d - l' <- checkIdString l - pure (LicenseRef d' l') - where - checkIdString s - | all (\c -> isAsciiAlphaNum c || c == '-' || c == '.') s = Just s - | otherwise = Nothing - --- | Like 'mkLicenseRef' but convert invalid characters into @-@. -mkLicenseRef' :: Maybe String -> String -> LicenseRef -mkLicenseRef' d l = LicenseRef (fmap f d) (f l) - where - f = map g - g c | isAsciiAlphaNum c || c == '-' || c == '.' = c - | otherwise = '-' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/SPDX.hs 2018-10-17 15:59:02.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/SPDX.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,40 +0,0 @@ --- | This module implements SPDX specification version 2.1 with a version 3.0 license list. --- --- Specification is available on -module Distribution.SPDX ( - -- * License - License (..), - -- * License expression - LicenseExpression (..), - SimpleLicenseExpression (..), - simpleLicenseExpression, - -- * License identifier - LicenseId (..), - licenseId, - licenseName, - licenseIsOsiApproved, - mkLicenseId, - licenseIdList, - -- * License exception - LicenseExceptionId (..), - licenseExceptionId, - licenseExceptionName, - mkLicenseExceptionId, - licenseExceptionIdList, - -- * License reference - LicenseRef, - licenseRef, - licenseDocumentRef, - mkLicenseRef, - mkLicenseRef', - -- * License list version - LicenseListVersion (..), - cabalSpecVersionToSPDXListVersion, - ) where - -import Distribution.SPDX.LicenseExceptionId -import Distribution.SPDX.License -import Distribution.SPDX.LicenseId -import Distribution.SPDX.LicenseExpression -import Distribution.SPDX.LicenseReference -import Distribution.SPDX.LicenseListVersion diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/System.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/System.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/System.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/System.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,300 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.System --- Copyright : Duncan Coutts 2007-2008 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Cabal often needs to do slightly different things on specific platforms. You --- probably know about the 'System.Info.os' however using that is very --- inconvenient because it is a string and different Haskell implementations --- do not agree on using the same strings for the same platforms! (In --- particular see the controversy over \"windows\" vs \"mingw32\"). So to make it --- more consistent and easy to use we have an 'OS' enumeration. --- -module Distribution.System ( - -- * Operating System - OS(..), - buildOS, - - -- * Machine Architecture - Arch(..), - buildArch, - - -- * Platform is a pair of arch and OS - Platform(..), - buildPlatform, - platformFromTriple, - - -- * Internal - knownOSs, - knownArches, - - -- * Classification - ClassificationStrictness (..), - classifyOS, - classifyArch, - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Control.Applicative (liftA2) - -import qualified System.Info (os, arch) -import Distribution.Utils.Generic (lowercase) - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text - -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp - --- | How strict to be when classifying strings into the 'OS' and 'Arch' enums. --- --- The reason we have multiple ways to do the classification is because there --- are two situations where we need to do it. --- --- For parsing OS and arch names in .cabal files we really want everyone to be --- referring to the same or or arch by the same name. Variety is not a virtue --- in this case. We don't mind about case though. --- --- For the System.Info.os\/arch different Haskell implementations use different --- names for the same or\/arch. Also they tend to distinguish versions of an --- OS\/arch which we just don't care about. --- --- The 'Compat' classification allows us to recognise aliases that are already --- in common use but it allows us to distinguish them from the canonical name --- which enables us to warn about such deprecated aliases. --- -data ClassificationStrictness = Permissive | Compat | Strict - --- ------------------------------------------------------------ --- * Operating System --- ------------------------------------------------------------ - --- | These are the known OS names: Linux, Windows, OSX --- ,FreeBSD, OpenBSD, NetBSD, DragonFly --- ,Solaris, AIX, HPUX, IRIX --- ,HaLVM ,Hurd ,IOS, Android,Ghcjs --- --- The following aliases can also be used:, --- * Windows aliases: mingw32, win32, cygwin32 --- * OSX alias: darwin --- * Hurd alias: gnu --- * FreeBSD alias: kfreebsdgnu --- * Solaris alias: solaris2 --- -data OS = Linux | Windows | OSX -- tier 1 desktop OSs - | FreeBSD | OpenBSD | NetBSD -- other free Unix OSs - | DragonFly - | Solaris | AIX | HPUX | IRIX -- ageing Unix OSs - | HaLVM -- bare metal / VMs / hypervisors - | Hurd -- GNU's microkernel - | IOS | Android -- mobile OSs - | Ghcjs - | OtherOS String - deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) - -instance Binary OS - -instance NFData OS where rnf = genericRnf - -knownOSs :: [OS] -knownOSs = [Linux, Windows, OSX - ,FreeBSD, OpenBSD, NetBSD, DragonFly - ,Solaris, AIX, HPUX, IRIX - ,HaLVM - ,Hurd - ,IOS, Android - ,Ghcjs] - -osAliases :: ClassificationStrictness -> OS -> [String] -osAliases Permissive Windows = ["mingw32", "win32", "cygwin32"] -osAliases Compat Windows = ["mingw32", "win32"] -osAliases _ OSX = ["darwin"] -osAliases _ Hurd = ["gnu"] -osAliases Permissive FreeBSD = ["kfreebsdgnu"] -osAliases Compat FreeBSD = ["kfreebsdgnu"] -osAliases Permissive Solaris = ["solaris2"] -osAliases Compat Solaris = ["solaris2"] -osAliases _ Android = ["linux-android"] -osAliases _ _ = [] - -instance Pretty OS where - pretty (OtherOS name) = Disp.text name - pretty other = Disp.text (lowercase (show other)) - -instance Parsec OS where - parsec = classifyOS Compat <$> parsecIdent - -instance Text OS where - parse = fmap (classifyOS Compat) ident - -classifyOS :: ClassificationStrictness -> String -> OS -classifyOS strictness s = - fromMaybe (OtherOS s) $ lookup (lowercase s) osMap - where - osMap = [ (name, os) - | os <- knownOSs - , name <- display os : osAliases strictness os ] - -buildOS :: OS -buildOS = classifyOS Permissive System.Info.os - --- ------------------------------------------------------------ --- * Machine Architecture --- ------------------------------------------------------------ - --- | These are the known Arches: I386, X86_64, PPC, PPC64, Sparc, --- Arm, AArch64, Mips, SH, IA64, S39, Alpha, Hppa, Rs6000, M68k, --- Vax, and JavaScript. --- --- The following aliases can also be used: --- * PPC alias: powerpc --- * PPC64 alias : powerpc64, powerpc64le --- * Sparc aliases: sparc64, sun4 --- * Mips aliases: mipsel, mipseb --- * Arm aliases: armeb, armel --- * AArch64 aliases: arm64 --- -data Arch = I386 | X86_64 | PPC | PPC64 | Sparc - | Arm | AArch64 | Mips | SH - | IA64 | S390 - | Alpha | Hppa | Rs6000 - | M68k | Vax - | JavaScript - | OtherArch String - deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) - -instance Binary Arch - -instance NFData Arch where rnf = genericRnf - -knownArches :: [Arch] -knownArches = [I386, X86_64, PPC, PPC64, Sparc - ,Arm, AArch64, Mips, SH - ,IA64, S390 - ,Alpha, Hppa, Rs6000 - ,M68k, Vax - ,JavaScript] - -archAliases :: ClassificationStrictness -> Arch -> [String] -archAliases Strict _ = [] -archAliases Compat _ = [] -archAliases _ PPC = ["powerpc"] -archAliases _ PPC64 = ["powerpc64", "powerpc64le"] -archAliases _ Sparc = ["sparc64", "sun4"] -archAliases _ Mips = ["mipsel", "mipseb"] -archAliases _ Arm = ["armeb", "armel"] -archAliases _ AArch64 = ["arm64"] -archAliases _ _ = [] - -instance Pretty Arch where - pretty (OtherArch name) = Disp.text name - pretty other = Disp.text (lowercase (show other)) - -instance Parsec Arch where - parsec = classifyArch Strict <$> parsecIdent - -instance Text Arch where - parse = fmap (classifyArch Strict) ident - -classifyArch :: ClassificationStrictness -> String -> Arch -classifyArch strictness s = - fromMaybe (OtherArch s) $ lookup (lowercase s) archMap - where - archMap = [ (name, arch) - | arch <- knownArches - , name <- display arch : archAliases strictness arch ] - -buildArch :: Arch -buildArch = classifyArch Permissive System.Info.arch - --- ------------------------------------------------------------ --- * Platform --- ------------------------------------------------------------ - -data Platform = Platform Arch OS - deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) - -instance Binary Platform - -instance NFData Platform where rnf = genericRnf - -instance Pretty Platform where - pretty (Platform arch os) = pretty arch <<>> Disp.char '-' <<>> pretty os - -instance Parsec Platform where - parsec = do - arch <- parsecDashlessArch - _ <- P.char '-' - os <- parsec - return (Platform arch os) - where - parsecDashlessArch = classifyArch Strict <$> dashlessIdent - - dashlessIdent = liftA2 (:) firstChar rest - where - firstChar = P.satisfy isAlpha - rest = P.munch (\c -> isAlphaNum c || c == '_') - -instance Text Platform where - -- TODO: there are ambigious platforms like: `arch-word-os` - -- which could be parsed as - -- * Platform "arch-word" "os" - -- * Platform "arch" "word-os" - -- We could support that preferring variants 'OtherOS' or 'OtherArch' - -- - -- For now we split into arch and os parts on the first dash. - parse = do - arch <- parseDashlessArch - _ <- Parse.char '-' - os <- parse - return (Platform arch os) - where - parseDashlessArch :: Parse.ReadP r Arch - parseDashlessArch = fmap (classifyArch Strict) dashlessIdent - - dashlessIdent :: Parse.ReadP r String - dashlessIdent = liftM2 (:) firstChar rest - where firstChar = Parse.satisfy isAlpha - rest = Parse.munch (\c -> isAlphaNum c || c == '_') - --- | The platform Cabal was compiled on. In most cases, --- @LocalBuildInfo.hostPlatform@ should be used instead (the platform we're --- targeting). -buildPlatform :: Platform -buildPlatform = Platform buildArch buildOS - --- Utils: - -ident :: Parse.ReadP r String -ident = liftM2 (:) firstChar rest - where firstChar = Parse.satisfy isAlpha - rest = Parse.munch (\c -> isAlphaNum c || c == '_' || c == '-') - -parsecIdent :: CabalParsing m => m String -parsecIdent = (:) <$> firstChar <*> rest - where - firstChar = P.satisfy isAlpha - rest = P.munch (\c -> isAlphaNum c || c == '_' || c == '-') - -platformFromTriple :: String -> Maybe Platform -platformFromTriple triple = - fmap fst (listToMaybe $ Parse.readP_to_S parseTriple triple) - where parseWord = Parse.munch1 (\c -> isAlphaNum c || c == '_') - parseTriple = do - arch <- fmap (classifyArch Permissive) parseWord - _ <- Parse.char '-' - _ <- parseWord -- Skip vendor - _ <- Parse.char '-' - os <- fmap (classifyOS Permissive) ident -- OS may have hyphens, like - -- 'nto-qnx' - return $ Platform arch os diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/TestSuite.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/TestSuite.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/TestSuite.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/TestSuite.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.TestSuite --- Copyright : Thomas Tuegel 2010 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This module defines the detailed test suite interface which makes it --- possible to expose individual tests to Cabal or other test agents. - -module Distribution.TestSuite - ( TestInstance(..) - , OptionDescr(..) - , OptionType(..) - , Test(..) - , Options - , Progress(..) - , Result(..) - , testGroup - ) where - -import Prelude () -import Distribution.Compat.Prelude - -data TestInstance = TestInstance - { run :: IO Progress -- ^ Perform the test. - , name :: String -- ^ A name for the test, unique within a - -- test suite. - , tags :: [String] -- ^ Users can select groups of tests by - -- their tags. - , options :: [OptionDescr] -- ^ Descriptions of the options recognized - -- by this test. - , setOption :: String -> String -> Either String TestInstance - -- ^ Try to set the named option to the given value. Returns an error - -- message if the option is not supported or the value could not be - -- correctly parsed; otherwise, a 'TestInstance' with the option set to - -- the given value is returned. - } - -data OptionDescr = OptionDescr - { optionName :: String - , optionDescription :: String -- ^ A human-readable description of the - -- option to guide the user setting it. - , optionType :: OptionType - , optionDefault :: Maybe String - } - deriving (Eq, Read, Show) - -data OptionType - = OptionFile - { optionFileMustExist :: Bool - , optionFileIsDir :: Bool - , optionFileExtensions :: [String] - } - | OptionString - { optionStringMultiline :: Bool - } - | OptionNumber - { optionNumberIsInt :: Bool - , optionNumberBounds :: (Maybe String, Maybe String) - } - | OptionBool - | OptionEnum [String] - | OptionSet [String] - | OptionRngSeed - deriving (Eq, Read, Show) - -data Test - = Test TestInstance - | Group - { groupName :: String - , concurrently :: Bool - -- ^ If true, then children of this group may be run in parallel. - -- Note that this setting is not inherited by children. In - -- particular, consider a group F with "concurrently = False" that - -- has some children, including a group T with "concurrently = - -- True". The children of group T may be run concurrently with each - -- other, as long as none are run at the same time as any of the - -- direct children of group F. - , groupTests :: [Test] - } - | ExtraOptions [OptionDescr] Test - -type Options = [(String, String)] - -data Progress = Finished Result - | Progress String (IO Progress) - -data Result = Pass - | Fail String - | Error String - deriving (Eq, Read, Show) - --- | Create a named group of tests, which are assumed to be safe to run in --- parallel. -testGroup :: String -> [Test] -> Test -testGroup n ts = Group { groupName = n, concurrently = True, groupTests = ts } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Text.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Text.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Text.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Text.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ -{-# LANGUAGE DefaultSignatures #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Text --- Copyright : Duncan Coutts 2007 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defines a 'Text' class which is a bit like the 'Read' and 'Show' --- classes. The difference is that it uses a modern pretty printer and parser --- system and the format is not expected to be Haskell concrete syntax but --- rather the external human readable representation used by Cabal. --- -module Distribution.Text ( - Text(..), - defaultStyle, - display, - flatStyle, - simpleParse, - stdParse, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Data.Functor.Identity (Identity (..)) -import Distribution.Pretty -import Distribution.Parsec.Class -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -import Data.Version (Version(Version)) - --- | /Note:/ this class will soon be deprecated. --- It's not yet, so that we are @-Wall@ clean. -class Text a where - disp :: a -> Disp.Doc - default disp :: Pretty a => a -> Disp.Doc - disp = pretty - - parse :: Parse.ReadP r a - default parse :: Parsec a => Parse.ReadP r a - parse = parsec - --- | Pretty-prints with the default style. -display :: Text a => a -> String -display = Disp.renderStyle defaultStyle . disp - -simpleParse :: Text a => String -> Maybe a -simpleParse str = case [ p | (p, s) <- Parse.readP_to_S parse str - , all isSpace s ] of - [] -> Nothing - (p:_) -> Just p - -stdParse :: Text ver => (ver -> String -> res) -> Parse.ReadP r res -stdParse f = do - cs <- Parse.sepBy1 component (Parse.char '-') - _ <- Parse.char '-' - ver <- parse - let name = intercalate "-" cs - return $! f ver (lowercase name) - where - component = do - cs <- Parse.munch1 isAlphaNum - if all isDigit cs then Parse.pfail else return cs - -- each component must contain an alphabetic character, to avoid - -- ambiguity in identifiers like foo-1 (the 1 is the version number). - -lowercase :: String -> String -lowercase = map toLower - --- ----------------------------------------------------------------------------- --- Instances for types from the base package - -instance Text Bool where - parse = Parse.choice [ (Parse.string "True" Parse.+++ - Parse.string "true") >> return True - , (Parse.string "False" Parse.+++ - Parse.string "false") >> return False ] - -instance Text Int where - parse = fmap negate (Parse.char '-' >> parseNat) Parse.+++ parseNat - -instance Text a => Text (Identity a) where - disp = disp . runIdentity - parse = fmap Identity parse - --- | Parser for non-negative integers. -parseNat :: Parse.ReadP r Int -parseNat = read `fmap` Parse.munch1 isDigit -- TODO: eradicateNoParse - - -instance Text Version where - disp (Version branch _tags) -- Death to version tags!! - = Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int branch)) - - parse = do - branch <- Parse.sepBy1 parseNat (Parse.char '.') - -- allow but ignore tags: - _tags <- Parse.many (Parse.char '-' >> Parse.munch1 isAlphaNum) - return (Version branch []) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/AbiDependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/AbiDependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/AbiDependency.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/AbiDependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.AbiDependency where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Package as Package -import qualified Text.PrettyPrint as Disp - --- | An ABI dependency is a dependency on a library which also --- records the ABI hash ('abiHash') of the library it depends --- on. --- --- The primary utility of this is to enable an extra sanity when --- GHC loads libraries: it can check if the dependency has a matching --- ABI and if not, refuse to load this library. This information --- is critical if we are shadowing libraries; differences in the --- ABI hash let us know what packages get shadowed by the new version --- of a package. -data AbiDependency = AbiDependency { - depUnitId :: Package.UnitId, - depAbiHash :: Package.AbiHash - } - deriving (Eq, Generic, Read, Show) - -instance Pretty AbiDependency where - pretty (AbiDependency uid abi) = - disp uid <<>> Disp.char '=' <<>> disp abi - -instance Parsec AbiDependency where - parsec = do - uid <- parsec - _ <- P.char '=' - abi <- parsec - return (AbiDependency uid abi) - -instance Text AbiDependency where - parse = do - uid <- parse - _ <- Parse.char '=' - abi <- parse - return (AbiDependency uid abi) - -instance Binary AbiDependency - -instance NFData AbiDependency where rnf = genericRnf diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/AbiHash.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/AbiHash.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/AbiHash.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/AbiHash.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - -module Distribution.Types.AbiHash - ( AbiHash, unAbiHash, mkAbiHash - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Utils.ShortText - -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import Distribution.Text -import Distribution.Pretty -import Distribution.Parsec.Class - -import Text.PrettyPrint (text) - --- | ABI Hashes --- --- Use 'mkAbiHash' and 'unAbiHash' to convert from/to a --- 'String'. --- --- This type is opaque since @Cabal-2.0@ --- --- @since 2.0.0.2 -newtype AbiHash = AbiHash ShortText - deriving (Eq, Show, Read, Generic) - --- | Construct a 'AbiHash' from a 'String' --- --- 'mkAbiHash' is the inverse to 'unAbiHash' --- --- Note: No validations are performed to ensure that the resulting --- 'AbiHash' is valid --- --- @since 2.0.0.2 -unAbiHash :: AbiHash -> String -unAbiHash (AbiHash h) = fromShortText h - --- | Convert 'AbiHash' to 'String' --- --- @since 2.0.0.2 -mkAbiHash :: String -> AbiHash -mkAbiHash = AbiHash . toShortText - --- | 'mkAbiHash' --- --- @since 2.0.0.2 -instance IsString AbiHash where - fromString = mkAbiHash - -instance Binary AbiHash - -instance NFData AbiHash where rnf = genericRnf - -instance Pretty AbiHash where - pretty = text . unAbiHash - -instance Parsec AbiHash where - parsec = fmap mkAbiHash (P.munch isAlphaNum) - -instance Text AbiHash where - parse = fmap mkAbiHash (Parse.munch isAlphaNum) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/AnnotatedId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/AnnotatedId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/AnnotatedId.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/AnnotatedId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -module Distribution.Types.AnnotatedId ( - AnnotatedId(..) -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Package -import Distribution.Types.ComponentName - --- | An 'AnnotatedId' is a 'ComponentId', 'UnitId', etc. --- which is annotated with some other useful information --- that is useful for printing to users, etc. --- --- Invariant: if ann_id x == ann_id y, then ann_pid x == ann_pid y --- and ann_cname x == ann_cname y -data AnnotatedId id = AnnotatedId { - ann_pid :: PackageId, - ann_cname :: ComponentName, - ann_id :: id - } - deriving (Show) - -instance Eq id => Eq (AnnotatedId id) where - x == y = ann_id x == ann_id y - -instance Ord id => Ord (AnnotatedId id) where - compare x y = compare (ann_id x) (ann_id y) - -instance Package (AnnotatedId id) where - packageId = ann_pid - -instance Functor AnnotatedId where - fmap f (AnnotatedId pid cn x) = AnnotatedId pid cn (f x) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Benchmark/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Benchmark/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Benchmark/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Benchmark/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -module Distribution.Types.Benchmark.Lens ( - Benchmark, - module Distribution.Types.Benchmark.Lens, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Types.Benchmark (Benchmark) -import Distribution.Types.BenchmarkInterface (BenchmarkInterface) -import Distribution.Types.BuildInfo (BuildInfo) -import Distribution.Types.UnqualComponentName (UnqualComponentName) - -import qualified Distribution.Types.Benchmark as T - -benchmarkName :: Lens' Benchmark UnqualComponentName -benchmarkName f s = fmap (\x -> s { T.benchmarkName = x }) (f (T.benchmarkName s)) -{-# INLINE benchmarkName #-} - -benchmarkInterface :: Lens' Benchmark BenchmarkInterface -benchmarkInterface f s = fmap (\x -> s { T.benchmarkInterface = x }) (f (T.benchmarkInterface s)) -{-# INLINE benchmarkInterface #-} - -benchmarkBuildInfo :: Lens' Benchmark BuildInfo -benchmarkBuildInfo f s = fmap (\x -> s { T.benchmarkBuildInfo = x }) (f (T.benchmarkBuildInfo s)) -{-# INLINE benchmarkBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Benchmark.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Benchmark.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Benchmark.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Benchmark.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,77 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.Benchmark ( - Benchmark(..), - emptyBenchmark, - benchmarkType, - benchmarkModules, - benchmarkModulesAutogen -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.BuildInfo -import Distribution.Types.BenchmarkType -import Distribution.Types.BenchmarkInterface -import Distribution.Types.UnqualComponentName - -import Distribution.ModuleName - -import qualified Distribution.Types.BuildInfo.Lens as L - --- | A \"benchmark\" stanza in a cabal file. --- -data Benchmark = Benchmark { - benchmarkName :: UnqualComponentName, - benchmarkInterface :: BenchmarkInterface, - benchmarkBuildInfo :: BuildInfo - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary Benchmark - -instance NFData Benchmark where rnf = genericRnf - -instance L.HasBuildInfo Benchmark where - buildInfo f (Benchmark x1 x2 x3) = fmap (\y1 -> Benchmark x1 x2 y1) (f x3) - -instance Monoid Benchmark where - mempty = Benchmark { - benchmarkName = mempty, - benchmarkInterface = mempty, - benchmarkBuildInfo = mempty - } - mappend = (<>) - -instance Semigroup Benchmark where - a <> b = Benchmark { - benchmarkName = combine' benchmarkName, - benchmarkInterface = combine benchmarkInterface, - benchmarkBuildInfo = combine benchmarkBuildInfo - } - where combine field = field a `mappend` field b - combine' field = case ( unUnqualComponentName $ field a - , unUnqualComponentName $ field b) of - ("", _) -> field b - (_, "") -> field a - (x, y) -> error $ "Ambiguous values for test field: '" - ++ x ++ "' and '" ++ y ++ "'" - -emptyBenchmark :: Benchmark -emptyBenchmark = mempty - -benchmarkType :: Benchmark -> BenchmarkType -benchmarkType benchmark = case benchmarkInterface benchmark of - BenchmarkExeV10 ver _ -> BenchmarkTypeExe ver - BenchmarkUnsupported benchmarktype -> benchmarktype - --- | Get all the module names from a benchmark. -benchmarkModules :: Benchmark -> [ModuleName] -benchmarkModules benchmark = otherModules (benchmarkBuildInfo benchmark) - --- | Get all the auto generated module names from a benchmark. --- This are a subset of 'benchmarkModules'. -benchmarkModulesAutogen :: Benchmark -> [ModuleName] -benchmarkModulesAutogen benchmark = autogenModules (benchmarkBuildInfo benchmark) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BenchmarkInterface.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BenchmarkInterface.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BenchmarkInterface.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BenchmarkInterface.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.BenchmarkInterface ( - BenchmarkInterface(..), -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.BenchmarkType -import Distribution.Version - --- | The benchmark interfaces that are currently defined. Each --- benchmark must specify which interface it supports. --- --- More interfaces may be defined in future, either new revisions or --- totally new interfaces. --- -data BenchmarkInterface = - - -- | Benchmark interface \"exitcode-stdio-1.0\". The benchmark - -- takes the form of an executable. It returns a zero exit code - -- for success, non-zero for failure. The stdout and stderr - -- channels may be logged. It takes no command line parameters - -- and nothing on stdin. - -- - BenchmarkExeV10 Version FilePath - - -- | A benchmark that does not conform to one of the above - -- interfaces for the given reason (e.g. unknown benchmark type). - -- - | BenchmarkUnsupported BenchmarkType - deriving (Eq, Generic, Read, Show, Typeable, Data) - -instance Binary BenchmarkInterface - -instance NFData BenchmarkInterface where rnf = genericRnf - -instance Monoid BenchmarkInterface where - mempty = BenchmarkUnsupported (BenchmarkTypeUnknown mempty nullVersion) - mappend = (<>) - -instance Semigroup BenchmarkInterface where - a <> (BenchmarkUnsupported _) = a - _ <> b = b diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BenchmarkType.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BenchmarkType.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BenchmarkType.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BenchmarkType.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,47 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.BenchmarkType ( - BenchmarkType(..), - knownBenchmarkTypes, -) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text -import Distribution.Version -import Text.PrettyPrint (char, text) - --- | The \"benchmark-type\" field in the benchmark stanza. --- -data BenchmarkType = BenchmarkTypeExe Version - -- ^ \"type: exitcode-stdio-x.y\" - | BenchmarkTypeUnknown String Version - -- ^ Some unknown benchmark type e.g. \"type: foo\" - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary BenchmarkType - -instance NFData BenchmarkType where rnf = genericRnf - -knownBenchmarkTypes :: [BenchmarkType] -knownBenchmarkTypes = [ BenchmarkTypeExe (mkVersion [1,0]) ] - -instance Pretty BenchmarkType where - pretty (BenchmarkTypeExe ver) = text "exitcode-stdio-" <<>> pretty ver - pretty (BenchmarkTypeUnknown name ver) = text name <<>> char '-' <<>> pretty ver - -instance Parsec BenchmarkType where - parsec = parsecStandard $ \ver name -> case name of - "exitcode-stdio" -> BenchmarkTypeExe ver - _ -> BenchmarkTypeUnknown name ver - -instance Text BenchmarkType where - parse = stdParse $ \ver name -> case name of - "exitcode-stdio" -> BenchmarkTypeExe ver - _ -> BenchmarkTypeUnknown name ver - - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BuildInfo/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BuildInfo/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BuildInfo/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BuildInfo/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,320 +0,0 @@ -module Distribution.Types.BuildInfo.Lens ( - BuildInfo, - HasBuildInfo (..), - HasBuildInfos (..), - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Compat.Lens - -import Distribution.Compiler (CompilerFlavor) -import Distribution.ModuleName (ModuleName) -import Distribution.Types.BuildInfo (BuildInfo) -import Distribution.Types.Dependency (Dependency) -import Distribution.Types.ExeDependency (ExeDependency) -import Distribution.Types.LegacyExeDependency (LegacyExeDependency) -import Distribution.Types.Mixin (Mixin) -import Distribution.Types.PkgconfigDependency (PkgconfigDependency) -import Language.Haskell.Extension (Extension, Language) - -import qualified Distribution.Types.BuildInfo as T - --- | Classy lenses for 'BuildInfo'. -class HasBuildInfo a where - buildInfo :: Lens' a BuildInfo - - buildable :: Lens' a Bool - buildable = buildInfo . buildable - {-# INLINE buildable #-} - - buildTools :: Lens' a [LegacyExeDependency] - buildTools = buildInfo . buildTools - {-# INLINE buildTools #-} - - buildToolDepends :: Lens' a [ExeDependency] - buildToolDepends = buildInfo . buildToolDepends - {-# INLINE buildToolDepends #-} - - cppOptions :: Lens' a [String] - cppOptions = buildInfo . cppOptions - {-# INLINE cppOptions #-} - - asmOptions :: Lens' a [String] - asmOptions = buildInfo . asmOptions - {-# INLINE asmOptions #-} - - cmmOptions :: Lens' a [String] - cmmOptions = buildInfo . cmmOptions - {-# INLINE cmmOptions #-} - - ccOptions :: Lens' a [String] - ccOptions = buildInfo . ccOptions - {-# INLINE ccOptions #-} - - cxxOptions :: Lens' a [String] - cxxOptions = buildInfo . cxxOptions - {-# INLINE cxxOptions #-} - - ldOptions :: Lens' a [String] - ldOptions = buildInfo . ldOptions - {-# INLINE ldOptions #-} - - pkgconfigDepends :: Lens' a [PkgconfigDependency] - pkgconfigDepends = buildInfo . pkgconfigDepends - {-# INLINE pkgconfigDepends #-} - - frameworks :: Lens' a [String] - frameworks = buildInfo . frameworks - {-# INLINE frameworks #-} - - extraFrameworkDirs :: Lens' a [String] - extraFrameworkDirs = buildInfo . extraFrameworkDirs - {-# INLINE extraFrameworkDirs #-} - - asmSources :: Lens' a [FilePath] - asmSources = buildInfo . asmSources - {-# INLINE asmSources #-} - - cmmSources :: Lens' a [FilePath] - cmmSources = buildInfo . cmmSources - {-# INLINE cmmSources #-} - - cSources :: Lens' a [FilePath] - cSources = buildInfo . cSources - {-# INLINE cSources #-} - - cxxSources :: Lens' a [FilePath] - cxxSources = buildInfo . cxxSources - {-# INLINE cxxSources #-} - - jsSources :: Lens' a [FilePath] - jsSources = buildInfo . jsSources - {-# INLINE jsSources #-} - - hsSourceDirs :: Lens' a [FilePath] - hsSourceDirs = buildInfo . hsSourceDirs - {-# INLINE hsSourceDirs #-} - - otherModules :: Lens' a [ModuleName] - otherModules = buildInfo . otherModules - {-# INLINE otherModules #-} - - virtualModules :: Lens' a [ModuleName] - virtualModules = buildInfo . virtualModules - {-# INLINE virtualModules #-} - - autogenModules :: Lens' a [ModuleName] - autogenModules = buildInfo . autogenModules - {-# INLINE autogenModules #-} - - defaultLanguage :: Lens' a (Maybe Language) - defaultLanguage = buildInfo . defaultLanguage - {-# INLINE defaultLanguage #-} - - otherLanguages :: Lens' a [Language] - otherLanguages = buildInfo . otherLanguages - {-# INLINE otherLanguages #-} - - defaultExtensions :: Lens' a [Extension] - defaultExtensions = buildInfo . defaultExtensions - {-# INLINE defaultExtensions #-} - - otherExtensions :: Lens' a [Extension] - otherExtensions = buildInfo . otherExtensions - {-# INLINE otherExtensions #-} - - oldExtensions :: Lens' a [Extension] - oldExtensions = buildInfo . oldExtensions - {-# INLINE oldExtensions #-} - - extraLibs :: Lens' a [String] - extraLibs = buildInfo . extraLibs - {-# INLINE extraLibs #-} - - extraGHCiLibs :: Lens' a [String] - extraGHCiLibs = buildInfo . extraGHCiLibs - {-# INLINE extraGHCiLibs #-} - - extraBundledLibs :: Lens' a [String] - extraBundledLibs = buildInfo . extraBundledLibs - {-# INLINE extraBundledLibs #-} - - extraLibFlavours :: Lens' a [String] - extraLibFlavours = buildInfo . extraLibFlavours - {-# INLINE extraLibFlavours #-} - - extraLibDirs :: Lens' a [String] - extraLibDirs = buildInfo . extraLibDirs - {-# INLINE extraLibDirs #-} - - includeDirs :: Lens' a [FilePath] - includeDirs = buildInfo . includeDirs - {-# INLINE includeDirs #-} - - includes :: Lens' a [FilePath] - includes = buildInfo . includes - {-# INLINE includes #-} - - installIncludes :: Lens' a [FilePath] - installIncludes = buildInfo . installIncludes - {-# INLINE installIncludes #-} - - options :: Lens' a [(CompilerFlavor,[String])] - options = buildInfo . options - {-# INLINE options #-} - - profOptions :: Lens' a [(CompilerFlavor,[String])] - profOptions = buildInfo . profOptions - {-# INLINE profOptions #-} - - sharedOptions :: Lens' a [(CompilerFlavor,[String])] - sharedOptions = buildInfo . sharedOptions - {-# INLINE sharedOptions #-} - - staticOptions :: Lens' a [(CompilerFlavor,[String])] - staticOptions = buildInfo . staticOptions - {-# INLINE staticOptions #-} - - customFieldsBI :: Lens' a [(String,String)] - customFieldsBI = buildInfo . customFieldsBI - {-# INLINE customFieldsBI #-} - - targetBuildDepends :: Lens' a [Dependency] - targetBuildDepends = buildInfo . targetBuildDepends - {-# INLINE targetBuildDepends #-} - - mixins :: Lens' a [Mixin] - mixins = buildInfo . mixins - {-# INLINE mixins #-} - - -instance HasBuildInfo BuildInfo where - buildInfo = id - {-# INLINE buildInfo #-} - - buildable f s = fmap (\x -> s { T.buildable = x }) (f (T.buildable s)) - {-# INLINE buildable #-} - - buildTools f s = fmap (\x -> s { T.buildTools = x }) (f (T.buildTools s)) - {-# INLINE buildTools #-} - - buildToolDepends f s = fmap (\x -> s { T.buildToolDepends = x }) (f (T.buildToolDepends s)) - {-# INLINE buildToolDepends #-} - - cppOptions f s = fmap (\x -> s { T.cppOptions = x }) (f (T.cppOptions s)) - {-# INLINE cppOptions #-} - - asmOptions f s = fmap (\x -> s { T.asmOptions = x }) (f (T.asmOptions s)) - {-# INLINE asmOptions #-} - - cmmOptions f s = fmap (\x -> s { T.cmmOptions = x }) (f (T.cmmOptions s)) - {-# INLINE cmmOptions #-} - - ccOptions f s = fmap (\x -> s { T.ccOptions = x }) (f (T.ccOptions s)) - {-# INLINE ccOptions #-} - - cxxOptions f s = fmap (\x -> s { T.cxxOptions = x }) (f (T.cxxOptions s)) - {-# INLINE cxxOptions #-} - - ldOptions f s = fmap (\x -> s { T.ldOptions = x }) (f (T.ldOptions s)) - {-# INLINE ldOptions #-} - - pkgconfigDepends f s = fmap (\x -> s { T.pkgconfigDepends = x }) (f (T.pkgconfigDepends s)) - {-# INLINE pkgconfigDepends #-} - - frameworks f s = fmap (\x -> s { T.frameworks = x }) (f (T.frameworks s)) - {-# INLINE frameworks #-} - - extraFrameworkDirs f s = fmap (\x -> s { T.extraFrameworkDirs = x }) (f (T.extraFrameworkDirs s)) - {-# INLINE extraFrameworkDirs #-} - - asmSources f s = fmap (\x -> s { T.asmSources = x }) (f (T.asmSources s)) - {-# INLINE asmSources #-} - - cmmSources f s = fmap (\x -> s { T.cmmSources = x }) (f (T.cmmSources s)) - {-# INLINE cmmSources #-} - - cSources f s = fmap (\x -> s { T.cSources = x }) (f (T.cSources s)) - {-# INLINE cSources #-} - - cxxSources f s = fmap (\x -> s { T.cSources = x }) (f (T.cxxSources s)) - {-# INLINE cxxSources #-} - - jsSources f s = fmap (\x -> s { T.jsSources = x }) (f (T.jsSources s)) - {-# INLINE jsSources #-} - - hsSourceDirs f s = fmap (\x -> s { T.hsSourceDirs = x }) (f (T.hsSourceDirs s)) - {-# INLINE hsSourceDirs #-} - - otherModules f s = fmap (\x -> s { T.otherModules = x }) (f (T.otherModules s)) - {-# INLINE otherModules #-} - - virtualModules f s = fmap (\x -> s { T.virtualModules = x }) (f (T.virtualModules s)) - {-# INLINE virtualModules #-} - - autogenModules f s = fmap (\x -> s { T.autogenModules = x }) (f (T.autogenModules s)) - {-# INLINE autogenModules #-} - - defaultLanguage f s = fmap (\x -> s { T.defaultLanguage = x }) (f (T.defaultLanguage s)) - {-# INLINE defaultLanguage #-} - - otherLanguages f s = fmap (\x -> s { T.otherLanguages = x }) (f (T.otherLanguages s)) - {-# INLINE otherLanguages #-} - - defaultExtensions f s = fmap (\x -> s { T.defaultExtensions = x }) (f (T.defaultExtensions s)) - {-# INLINE defaultExtensions #-} - - otherExtensions f s = fmap (\x -> s { T.otherExtensions = x }) (f (T.otherExtensions s)) - {-# INLINE otherExtensions #-} - - oldExtensions f s = fmap (\x -> s { T.oldExtensions = x }) (f (T.oldExtensions s)) - {-# INLINE oldExtensions #-} - - extraLibs f s = fmap (\x -> s { T.extraLibs = x }) (f (T.extraLibs s)) - {-# INLINE extraLibs #-} - - extraGHCiLibs f s = fmap (\x -> s { T.extraGHCiLibs = x }) (f (T.extraGHCiLibs s)) - {-# INLINE extraGHCiLibs #-} - - extraBundledLibs f s = fmap (\x -> s { T.extraBundledLibs = x }) (f (T.extraBundledLibs s)) - {-# INLINE extraBundledLibs #-} - - extraLibFlavours f s = fmap (\x -> s { T.extraLibFlavours = x }) (f (T.extraLibFlavours s)) - {-# INLINE extraLibFlavours #-} - - extraLibDirs f s = fmap (\x -> s { T.extraLibDirs = x }) (f (T.extraLibDirs s)) - {-# INLINE extraLibDirs #-} - - includeDirs f s = fmap (\x -> s { T.includeDirs = x }) (f (T.includeDirs s)) - {-# INLINE includeDirs #-} - - includes f s = fmap (\x -> s { T.includes = x }) (f (T.includes s)) - {-# INLINE includes #-} - - installIncludes f s = fmap (\x -> s { T.installIncludes = x }) (f (T.installIncludes s)) - {-# INLINE installIncludes #-} - - options f s = fmap (\x -> s { T.options = x }) (f (T.options s)) - {-# INLINE options #-} - - profOptions f s = fmap (\x -> s { T.profOptions = x }) (f (T.profOptions s)) - {-# INLINE profOptions #-} - - sharedOptions f s = fmap (\x -> s { T.sharedOptions = x }) (f (T.sharedOptions s)) - {-# INLINE sharedOptions #-} - - staticOptions f s = fmap (\x -> s { T.staticOptions = x }) (f (T.staticOptions s)) - {-# INLINE staticOptions #-} - - customFieldsBI f s = fmap (\x -> s { T.customFieldsBI = x }) (f (T.customFieldsBI s)) - {-# INLINE customFieldsBI #-} - - targetBuildDepends f s = fmap (\x -> s { T.targetBuildDepends = x }) (f (T.targetBuildDepends s)) - {-# INLINE targetBuildDepends #-} - - mixins f s = fmap (\x -> s { T.mixins = x }) (f (T.mixins s)) - {-# INLINE mixins #-} - -class HasBuildInfos a where - traverseBuildInfos :: Traversal' a BuildInfo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BuildInfo.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,252 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.BuildInfo ( - BuildInfo(..), - - emptyBuildInfo, - allLanguages, - allExtensions, - usedExtensions, - usesTemplateHaskellOrQQ, - - hcOptions, - hcProfOptions, - hcSharedOptions, - hcStaticOptions, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.Mixin -import Distribution.Types.Dependency -import Distribution.Types.ExeDependency -import Distribution.Types.LegacyExeDependency -import Distribution.Types.PkgconfigDependency - -import Distribution.ModuleName -import Distribution.Compiler -import Language.Haskell.Extension - --- Consider refactoring into executable and library versions. -data BuildInfo = BuildInfo { - -- | component is buildable here - buildable :: Bool, - -- | Tools needed to build this bit. - -- - -- This is a legacy field that 'buildToolDepends' largely supersedes. - -- - -- Unless use are very sure what you are doing, use the functions in - -- "Distribution.Simple.BuildToolDepends" rather than accessing this - -- field directly. - buildTools :: [LegacyExeDependency], - -- | Haskell tools needed to build this bit - -- - -- This field is better than 'buildTools' because it allows one to - -- precisely specify an executable in a package. - -- - -- Unless use are very sure what you are doing, use the functions in - -- "Distribution.Simple.BuildToolDepends" rather than accessing this - -- field directly. - buildToolDepends :: [ExeDependency], - cppOptions :: [String], -- ^ options for pre-processing Haskell code - asmOptions :: [String], -- ^ options for assmebler - cmmOptions :: [String], -- ^ options for C-- compiler - ccOptions :: [String], -- ^ options for C compiler - cxxOptions :: [String], -- ^ options for C++ compiler - ldOptions :: [String], -- ^ options for linker - pkgconfigDepends :: [PkgconfigDependency], -- ^ pkg-config packages that are used - frameworks :: [String], -- ^support frameworks for Mac OS X - extraFrameworkDirs:: [String], -- ^ extra locations to find frameworks. - asmSources :: [FilePath], -- ^ Assembly files. - cmmSources :: [FilePath], -- ^ C-- files. - cSources :: [FilePath], - cxxSources :: [FilePath], - jsSources :: [FilePath], - hsSourceDirs :: [FilePath], -- ^ where to look for the Haskell module hierarchy - otherModules :: [ModuleName], -- ^ non-exposed or non-main modules - virtualModules :: [ModuleName], -- ^ exposed modules that do not have a source file (e.g. @GHC.Prim@ from @ghc-prim@ package) - autogenModules :: [ModuleName], -- ^ not present on sdist, Paths_* or user-generated with a custom Setup.hs - - defaultLanguage :: Maybe Language,-- ^ language used when not explicitly specified - otherLanguages :: [Language], -- ^ other languages used within the package - defaultExtensions :: [Extension], -- ^ language extensions used by all modules - otherExtensions :: [Extension], -- ^ other language extensions used within the package - oldExtensions :: [Extension], -- ^ the old extensions field, treated same as 'defaultExtensions' - - extraLibs :: [String], -- ^ what libraries to link with when compiling a program that uses your package - extraGHCiLibs :: [String], -- ^ if present, overrides extraLibs when package is loaded with GHCi. - extraBundledLibs :: [String], -- ^ if present, adds libs to hs-lirbaries, which become part of the package. - -- Example: the Cffi library shipping with the rts, alognside the HSrts-1.0.a,.o,... - -- Example 2: a library that is being built by a foreing tool (e.g. rust) - -- and copied and registered together with this library. The - -- logic on how this library is built will have to be encoded in a - -- custom Setup for now. Oherwise cabal would need to lear how to - -- call arbitary lirbary builders. - extraLibFlavours :: [String], -- ^ Hidden Flag. This set of strings, will be appended to all lirbaries when - -- copying. E.g. [libHS_ | flavour <- extraLibFlavours]. This - -- should only be needed in very specific cases, e.g. the `rts` package, where - -- there are multiple copies of slightly differently built libs. - extraLibDirs :: [String], - includeDirs :: [FilePath], -- ^directories to find .h files - includes :: [FilePath], -- ^ The .h files to be found in includeDirs - installIncludes :: [FilePath], -- ^ .h files to install with the package - options :: [(CompilerFlavor,[String])], - profOptions :: [(CompilerFlavor,[String])], - sharedOptions :: [(CompilerFlavor,[String])], - staticOptions :: [(CompilerFlavor,[String])], - customFieldsBI :: [(String,String)], -- ^Custom fields starting - -- with x-, stored in a - -- simple assoc-list. - targetBuildDepends :: [Dependency], -- ^ Dependencies specific to a library or executable target - mixins :: [Mixin] - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary BuildInfo - -instance NFData BuildInfo where rnf = genericRnf - -instance Monoid BuildInfo where - mempty = BuildInfo { - buildable = True, - buildTools = [], - buildToolDepends = [], - cppOptions = [], - asmOptions = [], - cmmOptions = [], - ccOptions = [], - cxxOptions = [], - ldOptions = [], - pkgconfigDepends = [], - frameworks = [], - extraFrameworkDirs = [], - asmSources = [], - cmmSources = [], - cSources = [], - cxxSources = [], - jsSources = [], - hsSourceDirs = [], - otherModules = [], - virtualModules = [], - autogenModules = [], - defaultLanguage = Nothing, - otherLanguages = [], - defaultExtensions = [], - otherExtensions = [], - oldExtensions = [], - extraLibs = [], - extraGHCiLibs = [], - extraBundledLibs = [], - extraLibFlavours = [], - extraLibDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - options = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - customFieldsBI = [], - targetBuildDepends = [], - mixins = [] - } - mappend = (<>) - -instance Semigroup BuildInfo where - a <> b = BuildInfo { - buildable = buildable a && buildable b, - buildTools = combine buildTools, - buildToolDepends = combine buildToolDepends, - cppOptions = combine cppOptions, - asmOptions = combine asmOptions, - cmmOptions = combine cmmOptions, - ccOptions = combine ccOptions, - cxxOptions = combine cxxOptions, - ldOptions = combine ldOptions, - pkgconfigDepends = combine pkgconfigDepends, - frameworks = combineNub frameworks, - extraFrameworkDirs = combineNub extraFrameworkDirs, - asmSources = combineNub asmSources, - cmmSources = combineNub cmmSources, - cSources = combineNub cSources, - cxxSources = combineNub cxxSources, - jsSources = combineNub jsSources, - hsSourceDirs = combineNub hsSourceDirs, - otherModules = combineNub otherModules, - virtualModules = combineNub virtualModules, - autogenModules = combineNub autogenModules, - defaultLanguage = combineMby defaultLanguage, - otherLanguages = combineNub otherLanguages, - defaultExtensions = combineNub defaultExtensions, - otherExtensions = combineNub otherExtensions, - oldExtensions = combineNub oldExtensions, - extraLibs = combine extraLibs, - extraGHCiLibs = combine extraGHCiLibs, - extraBundledLibs = combine extraBundledLibs, - extraLibFlavours = combine extraLibFlavours, - extraLibDirs = combineNub extraLibDirs, - includeDirs = combineNub includeDirs, - includes = combineNub includes, - installIncludes = combineNub installIncludes, - options = combine options, - profOptions = combine profOptions, - sharedOptions = combine sharedOptions, - staticOptions = combine staticOptions, - customFieldsBI = combine customFieldsBI, - targetBuildDepends = combineNub targetBuildDepends, - mixins = combine mixins - } - where - combine field = field a `mappend` field b - combineNub field = nub (combine field) - combineMby field = field b `mplus` field a - -emptyBuildInfo :: BuildInfo -emptyBuildInfo = mempty - --- | The 'Language's used by this component --- -allLanguages :: BuildInfo -> [Language] -allLanguages bi = maybeToList (defaultLanguage bi) - ++ otherLanguages bi - --- | The 'Extension's that are used somewhere by this component --- -allExtensions :: BuildInfo -> [Extension] -allExtensions bi = usedExtensions bi - ++ otherExtensions bi - --- | The 'Extensions' that are used by all modules in this component --- -usedExtensions :: BuildInfo -> [Extension] -usedExtensions bi = oldExtensions bi - ++ defaultExtensions bi - --- | Whether any modules in this component use Template Haskell or --- Quasi Quotes -usesTemplateHaskellOrQQ :: BuildInfo -> Bool -usesTemplateHaskellOrQQ bi = any p (allExtensions bi) - where - p ex = ex `elem` - [EnableExtension TemplateHaskell, EnableExtension QuasiQuotes] - --- |Select options for a particular Haskell compiler. -hcOptions :: CompilerFlavor -> BuildInfo -> [String] -hcOptions = lookupHcOptions options - -hcProfOptions :: CompilerFlavor -> BuildInfo -> [String] -hcProfOptions = lookupHcOptions profOptions - -hcSharedOptions :: CompilerFlavor -> BuildInfo -> [String] -hcSharedOptions = lookupHcOptions sharedOptions - -hcStaticOptions :: CompilerFlavor -> BuildInfo -> [String] -hcStaticOptions = lookupHcOptions staticOptions - -lookupHcOptions :: (BuildInfo -> [(CompilerFlavor,[String])]) - -> CompilerFlavor -> BuildInfo -> [String] -lookupHcOptions f hc bi = [ opt | (hc',opts) <- f bi - , hc' == hc - , opt <- opts ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BuildType.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BuildType.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/BuildType.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/BuildType.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.BuildType ( - BuildType(..), - knownBuildTypes, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.CabalSpecVersion (CabalSpecVersion (..)) -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - --- | The type of build system used by this package. -data BuildType - = Simple -- ^ calls @Distribution.Simple.defaultMain@ - | Configure -- ^ calls @Distribution.Simple.defaultMainWithHooks defaultUserHooks@, - -- which invokes @configure@ to generate additional build - -- information used by later phases. - | Make -- ^ calls @Distribution.Make.defaultMain@ - | Custom -- ^ uses user-supplied @Setup.hs@ or @Setup.lhs@ (default) - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary BuildType - -instance NFData BuildType where rnf = genericRnf - -knownBuildTypes :: [BuildType] -knownBuildTypes = [Simple, Configure, Make, Custom] - -instance Pretty BuildType where - pretty = Disp.text . show - -instance Parsec BuildType where - parsec = do - name <- P.munch1 isAlphaNum - case name of - "Simple" -> return Simple - "Configure" -> return Configure - "Custom" -> return Custom - "Make" -> return Make - "Default" -> do - v <- askCabalSpecVersion - if v <= CabalSpecOld - then do - parsecWarning PWTBuildTypeDefault "build-type: Default is parsed as Custom for legacy reasons. See https://github.com/haskell/cabal/issues/5020" - return Custom - else fail ("unknown build-type: '" ++ name ++ "'") - _ -> fail ("unknown build-type: '" ++ name ++ "'") - -instance Text BuildType where - parse = do - name <- Parse.munch1 isAlphaNum - case name of - "Simple" -> return Simple - "Configure" -> return Configure - "Custom" -> return Custom - "Make" -> return Make - "Default" -> return Custom - _ -> fail ("unknown build-type: '" ++ name ++ "'") diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Component.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Component.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Component.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Component.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,91 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.Component ( - Component(..), - foldComponent, - componentBuildInfo, - componentBuildable, - componentName, - partitionComponents, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.Library -import Distribution.Types.ForeignLib -import Distribution.Types.Executable -import Distribution.Types.TestSuite -import Distribution.Types.Benchmark - -import Distribution.Types.ComponentName -import Distribution.Types.BuildInfo - -import qualified Distribution.Types.BuildInfo.Lens as L - -data Component = CLib Library - | CFLib ForeignLib - | CExe Executable - | CTest TestSuite - | CBench Benchmark - deriving (Show, Eq, Read) - -instance Semigroup Component where - CLib l <> CLib l' = CLib (l <> l') - CFLib l <> CFLib l' = CFLib (l <> l') - CExe e <> CExe e' = CExe (e <> e') - CTest t <> CTest t' = CTest (t <> t') - CBench b <> CBench b' = CBench (b <> b') - _ <> _ = error "Cannot merge Component" - -instance L.HasBuildInfo Component where - buildInfo f (CLib l) = CLib <$> L.buildInfo f l - buildInfo f (CFLib l) = CFLib <$> L.buildInfo f l - buildInfo f (CExe e) = CExe <$> L.buildInfo f e - buildInfo f (CTest t) = CTest <$> L.buildInfo f t - buildInfo f (CBench b) = CBench <$> L.buildInfo f b - -foldComponent :: (Library -> a) - -> (ForeignLib -> a) - -> (Executable -> a) - -> (TestSuite -> a) - -> (Benchmark -> a) - -> Component - -> a -foldComponent f _ _ _ _ (CLib lib) = f lib -foldComponent _ f _ _ _ (CFLib flib)= f flib -foldComponent _ _ f _ _ (CExe exe) = f exe -foldComponent _ _ _ f _ (CTest tst) = f tst -foldComponent _ _ _ _ f (CBench bch) = f bch - -componentBuildInfo :: Component -> BuildInfo -componentBuildInfo = - foldComponent libBuildInfo foreignLibBuildInfo buildInfo testBuildInfo benchmarkBuildInfo - --- | Is a component buildable (i.e., not marked with @buildable: False@)? --- See also this note in --- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components". --- --- @since 2.0.0.2 --- -componentBuildable :: Component -> Bool -componentBuildable = buildable . componentBuildInfo - -componentName :: Component -> ComponentName -componentName = - foldComponent (libraryComponentName . libName) - (CFLibName . foreignLibName) - (CExeName . exeName) - (CTestName . testName) - (CBenchName . benchmarkName) - -partitionComponents - :: [Component] - -> ([Library], [ForeignLib], [Executable], [TestSuite], [Benchmark]) -partitionComponents = foldr (foldComponent fa fb fc fd fe) ([],[],[],[],[]) - where - fa x ~(a,b,c,d,e) = (x:a,b,c,d,e) - fb x ~(a,b,c,d,e) = (a,x:b,c,d,e) - fc x ~(a,b,c,d,e) = (a,b,x:c,d,e) - fd x ~(a,b,c,d,e) = (a,b,c,x:d,e) - fe x ~(a,b,c,d,e) = (a,b,c,d,x:e) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentId.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,74 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - -module Distribution.Types.ComponentId - ( ComponentId, unComponentId, mkComponentId - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Utils.ShortText - -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import Distribution.Text -import Distribution.Pretty -import Distribution.Parsec.Class - -import Text.PrettyPrint (text) - --- | A 'ComponentId' uniquely identifies the transitive source --- code closure of a component (i.e. libraries, executables). --- --- For non-Backpack components, this corresponds one to one with --- the 'UnitId', which serves as the basis for install paths, --- linker symbols, etc. --- --- Use 'mkComponentId' and 'unComponentId' to convert from/to a --- 'String'. --- --- This type is opaque since @Cabal-2.0@ --- --- @since 2.0.0.2 -newtype ComponentId = ComponentId ShortText - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - --- | Construct a 'ComponentId' from a 'String' --- --- 'mkComponentId' is the inverse to 'unComponentId' --- --- Note: No validations are performed to ensure that the resulting --- 'ComponentId' is valid --- --- @since 2.0.0.2 -mkComponentId :: String -> ComponentId -mkComponentId = ComponentId . toShortText - --- | Convert 'ComponentId' to 'String' --- --- @since 2.0.0.2 -unComponentId :: ComponentId -> String -unComponentId (ComponentId s) = fromShortText s - --- | 'mkComponentId' --- --- @since 2.0.0.2 -instance IsString ComponentId where - fromString = mkComponentId - -instance Binary ComponentId - -instance Pretty ComponentId where - pretty = text . unComponentId - -instance Parsec ComponentId where - parsec = mkComponentId `fmap` P.munch1 abi_char - where abi_char c = isAlphaNum c || c `elem` "-_." - -instance Text ComponentId where - parse = mkComponentId `fmap` Parse.munch1 abi_char - where abi_char c = isAlphaNum c || c `elem` "-_." - -instance NFData ComponentId where - rnf = rnf . unComponentId diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentInclude.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentInclude.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentInclude.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentInclude.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -module Distribution.Types.ComponentInclude ( - ComponentInclude(..), - ci_id, - ci_pkgid, - ci_cname -) where - -import Distribution.Types.PackageId -import Distribution.Types.ComponentName -import Distribution.Types.AnnotatedId - --- Once ci_id is refined to an 'OpenUnitId' or 'DefUnitId', --- the 'includeRequiresRn' is not so useful (because it --- includes the requirements renaming that is no longer --- needed); use 'ci_prov_renaming' instead. -data ComponentInclude id rn = ComponentInclude { - ci_ann_id :: AnnotatedId id, - ci_renaming :: rn, - -- | Did this come from an entry in @mixins@, or - -- was implicitly generated by @build-depends@? - ci_implicit :: Bool - } - -ci_id :: ComponentInclude id rn -> id -ci_id = ann_id . ci_ann_id - -ci_pkgid :: ComponentInclude id rn -> PackageId -ci_pkgid = ann_pid . ci_ann_id - --- | This should always return 'CLibName' or 'CSubLibName' -ci_cname :: ComponentInclude id rn -> ComponentName -ci_cname = ann_cname . ci_ann_id diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentLocalBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentLocalBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentLocalBuildInfo.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentLocalBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,127 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE TypeFamilies #-} - -module Distribution.Types.ComponentLocalBuildInfo ( - ComponentLocalBuildInfo(..), - componentIsIndefinite, - maybeComponentInstantiatedWith, - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.ModuleName - -import Distribution.Backpack -import Distribution.Compat.Graph -import Distribution.Types.ComponentId -import Distribution.Types.MungedPackageId -import Distribution.Types.UnitId -import Distribution.Types.ComponentName -import Distribution.Types.MungedPackageName - -import Distribution.PackageDescription -import qualified Distribution.InstalledPackageInfo as Installed - --- | The first five fields are common across all algebraic variants. -data ComponentLocalBuildInfo - = LibComponentLocalBuildInfo { - -- | It would be very convenient to store the literal Library here, - -- but if we do that, it will get serialized (via the Binary) - -- instance twice. So instead we just provide the ComponentName, - -- which can be used to find the Component in the - -- PackageDescription. NB: eventually, this will NOT uniquely - -- identify the ComponentLocalBuildInfo. - componentLocalName :: ComponentName, - -- | The computed 'ComponentId' of this component. - componentComponentId :: ComponentId, - -- | The computed 'UnitId' which uniquely identifies this - -- component. Might be hashed. - componentUnitId :: UnitId, - -- | Is this an indefinite component (i.e. has unfilled holes)? - componentIsIndefinite_ :: Bool, - -- | How the component was instantiated - componentInstantiatedWith :: [(ModuleName, OpenModule)], - -- | Resolved internal and external package dependencies for this component. - -- The 'BuildInfo' specifies a set of build dependencies that must be - -- satisfied in terms of version ranges. This field fixes those dependencies - -- to the specific versions available on this machine for this compiler. - componentPackageDeps :: [(UnitId, MungedPackageId)], - -- | The set of packages that are brought into scope during - -- compilation, including a 'ModuleRenaming' which may used - -- to hide or rename modules. This is what gets translated into - -- @-package-id@ arguments. This is a modernized version of - -- 'componentPackageDeps', which is kept around for BC purposes. - componentIncludes :: [(OpenUnitId, ModuleRenaming)], - componentExeDeps :: [UnitId], - -- | The internal dependencies which induce a graph on the - -- 'ComponentLocalBuildInfo' of this package. This does NOT - -- coincide with 'componentPackageDeps' because it ALSO records - -- 'build-tool' dependencies on executables. Maybe one day - -- @cabal-install@ will also handle these correctly too! - componentInternalDeps :: [UnitId], - -- | Compatibility "package key" that we pass to older versions of GHC. - componentCompatPackageKey :: String, - -- | Compatibility "package name" that we register this component as. - componentCompatPackageName :: MungedPackageName, - -- | A list of exposed modules (either defined in this component, - -- or reexported from another component.) - componentExposedModules :: [Installed.ExposedModule], - -- | Convenience field, specifying whether or not this is the - -- "public library" that has the same name as the package. - componentIsPublic :: Bool - } - -- TODO: refactor all these duplicates - | FLibComponentLocalBuildInfo { - componentLocalName :: ComponentName, - componentComponentId :: ComponentId, - componentUnitId :: UnitId, - componentPackageDeps :: [(UnitId, MungedPackageId)], - componentIncludes :: [(OpenUnitId, ModuleRenaming)], - componentExeDeps :: [UnitId], - componentInternalDeps :: [UnitId] - } - | ExeComponentLocalBuildInfo { - componentLocalName :: ComponentName, - componentComponentId :: ComponentId, - componentUnitId :: UnitId, - componentPackageDeps :: [(UnitId, MungedPackageId)], - componentIncludes :: [(OpenUnitId, ModuleRenaming)], - componentExeDeps :: [UnitId], - componentInternalDeps :: [UnitId] - } - | TestComponentLocalBuildInfo { - componentLocalName :: ComponentName, - componentComponentId :: ComponentId, - componentUnitId :: UnitId, - componentPackageDeps :: [(UnitId, MungedPackageId)], - componentIncludes :: [(OpenUnitId, ModuleRenaming)], - componentExeDeps :: [UnitId], - componentInternalDeps :: [UnitId] - - } - | BenchComponentLocalBuildInfo { - componentLocalName :: ComponentName, - componentComponentId :: ComponentId, - componentUnitId :: UnitId, - componentPackageDeps :: [(UnitId, MungedPackageId)], - componentIncludes :: [(OpenUnitId, ModuleRenaming)], - componentExeDeps :: [UnitId], - componentInternalDeps :: [UnitId] - } - deriving (Generic, Read, Show) - -instance Binary ComponentLocalBuildInfo - -instance IsNode ComponentLocalBuildInfo where - type Key ComponentLocalBuildInfo = UnitId - nodeKey = componentUnitId - nodeNeighbors = componentInternalDeps - -componentIsIndefinite :: ComponentLocalBuildInfo -> Bool -componentIsIndefinite LibComponentLocalBuildInfo{ componentIsIndefinite_ = b } = b -componentIsIndefinite _ = False - -maybeComponentInstantiatedWith :: ComponentLocalBuildInfo -> Maybe [(ModuleName, OpenModule)] -maybeComponentInstantiatedWith - LibComponentLocalBuildInfo { componentInstantiatedWith = insts } = Just insts -maybeComponentInstantiatedWith _ = Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentName.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,91 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.ComponentName ( - ComponentName(..), - defaultLibName, - libraryComponentName, - showComponentName, - componentNameStanza, - componentNameString, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP ((<++)) -import Distribution.Types.UnqualComponentName -import Distribution.Pretty -import Distribution.Text - -import Text.PrettyPrint as Disp - --- Libraries live in a separate namespace, so must distinguish -data ComponentName = CLibName - | CSubLibName UnqualComponentName - | CFLibName UnqualComponentName - | CExeName UnqualComponentName - | CTestName UnqualComponentName - | CBenchName UnqualComponentName - deriving (Eq, Generic, Ord, Read, Show, Typeable) - -instance Binary ComponentName - --- Build-target-ish syntax -instance Pretty ComponentName where - pretty CLibName = Disp.text "lib" - pretty (CSubLibName str) = Disp.text "lib:" <<>> pretty str - pretty (CFLibName str) = Disp.text "flib:" <<>> pretty str - pretty (CExeName str) = Disp.text "exe:" <<>> pretty str - pretty (CTestName str) = Disp.text "test:" <<>> pretty str - pretty (CBenchName str) = Disp.text "bench:" <<>> pretty str - -instance Text ComponentName where - parse = parseComposite <++ parseSingle - where - parseSingle = Parse.string "lib" >> return CLibName - parseComposite = do - ctor <- Parse.choice [ Parse.string "lib:" >> return CSubLibName - , Parse.string "flib:" >> return CFLibName - , Parse.string "exe:" >> return CExeName - , Parse.string "bench:" >> return CBenchName - , Parse.string "test:" >> return CTestName ] - ctor <$> parse - -defaultLibName :: ComponentName -defaultLibName = CLibName - -showComponentName :: ComponentName -> String -showComponentName CLibName = "library" -showComponentName (CSubLibName name) = "library '" ++ display name ++ "'" -showComponentName (CFLibName name) = "foreign library '" ++ display name ++ "'" -showComponentName (CExeName name) = "executable '" ++ display name ++ "'" -showComponentName (CTestName name) = "test suite '" ++ display name ++ "'" -showComponentName (CBenchName name) = "benchmark '" ++ display name ++ "'" - -componentNameStanza :: ComponentName -> String -componentNameStanza CLibName = "library" -componentNameStanza (CSubLibName name) = "library " ++ display name -componentNameStanza (CFLibName name) = "foreign-library " ++ display name -componentNameStanza (CExeName name) = "executable " ++ display name -componentNameStanza (CTestName name) = "test-suite " ++ display name -componentNameStanza (CBenchName name) = "benchmark " ++ display name - --- | This gets the underlying unqualified component name. In fact, it is --- guaranteed to uniquely identify a component, returning --- @Nothing@ if the 'ComponentName' was for the public --- library. -componentNameString :: ComponentName -> Maybe UnqualComponentName -componentNameString CLibName = Nothing -componentNameString (CSubLibName n) = Just n -componentNameString (CFLibName n) = Just n -componentNameString (CExeName n) = Just n -componentNameString (CTestName n) = Just n -componentNameString (CBenchName n) = Just n - --- | Convert the 'UnqualComponentName' of a library into a --- 'ComponentName'. -libraryComponentName :: Maybe UnqualComponentName -> ComponentName -libraryComponentName Nothing = CLibName -libraryComponentName (Just n) = CSubLibName n diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentRequestedSpec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentRequestedSpec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ComponentRequestedSpec.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ComponentRequestedSpec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,123 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.ComponentRequestedSpec ( - -- $buildable_vs_enabled_components - - ComponentRequestedSpec(..), - ComponentDisabledReason(..), - - defaultComponentRequestedSpec, - componentNameRequested, - - componentEnabled, - componentDisabledReason, -) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Text - -import Distribution.Types.Component -- TODO: maybe remove me? -import Distribution.Types.ComponentName - --- $buildable_vs_enabled_components --- #buildable_vs_enabled_components# --- --- = Note: Buildable versus requested versus enabled components --- What's the difference between a buildable component (ala --- 'componentBuildable'), a requested component --- (ala 'componentNameRequested'), and an enabled component (ala --- 'componentEnabled')? --- --- A component is __buildable__ if, after resolving flags and --- conditionals, there is no @buildable: False@ property in it. --- This is a /static/ property that arises from the --- Cabal file and the package description flattening; once we have --- a 'PackageDescription' buildability is known. --- --- A component is __requested__ if a user specified, via a --- the flags and arguments passed to configure, that it should be --- built. E.g., @--enable-tests@ or @--enable-benchmarks@ request --- all tests and benchmarks, if they are provided. What is requested --- can be read off directly from 'ComponentRequestedSpec'. A requested --- component is not always buildable; e.g., a user may @--enable-tests@ --- but one of the test suites may have @buildable: False@. --- --- A component is __enabled__ if it is BOTH buildable --- and requested. Once we have a 'LocalBuildInfo', whether or not a --- component is enabled is known. --- --- Generally speaking, most Cabal API code cares if a component --- is enabled. (For example, if you want to run a preprocessor on each --- component prior to building them, you want to run this on each --- /enabled/ component.) --- --- Note that post-configuration, you will generally not see a --- non-buildable 'Component'. This is because 'flattenPD' will drop --- any such components from 'PackageDescription'. See #3858 for --- an example where this causes problems. - --- | Describes what components are enabled by user-interaction. --- See also this note in --- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components". --- --- @since 2.0.0.2 -data ComponentRequestedSpec - = ComponentRequestedSpec { testsRequested :: Bool - , benchmarksRequested :: Bool } - | OneComponentRequestedSpec ComponentName - deriving (Generic, Read, Show, Eq) -instance Binary ComponentRequestedSpec - --- | The default set of enabled components. Historically tests and --- benchmarks are NOT enabled by default. --- --- @since 2.0.0.2 -defaultComponentRequestedSpec :: ComponentRequestedSpec -defaultComponentRequestedSpec = ComponentRequestedSpec False False - --- | Is this component enabled? See also this note in --- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components". --- --- @since 2.0.0.2 -componentEnabled :: ComponentRequestedSpec -> Component -> Bool -componentEnabled enabled = isNothing . componentDisabledReason enabled - --- | Is this component name enabled? See also this note in --- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components". --- --- @since 2.0.0.2 -componentNameRequested :: ComponentRequestedSpec -> ComponentName -> Bool -componentNameRequested enabled = isNothing . componentNameNotRequestedReason enabled - --- | Is this component disabled, and if so, why? --- --- @since 2.0.0.2 -componentDisabledReason :: ComponentRequestedSpec -> Component - -> Maybe ComponentDisabledReason -componentDisabledReason enabled comp - | not (componentBuildable comp) = Just DisabledComponent - | otherwise = componentNameNotRequestedReason enabled (componentName comp) - --- | Is this component name disabled, and if so, why? --- --- @since 2.0.0.2 -componentNameNotRequestedReason :: ComponentRequestedSpec -> ComponentName - -> Maybe ComponentDisabledReason -componentNameNotRequestedReason - ComponentRequestedSpec{ testsRequested = False } (CTestName _) - = Just DisabledAllTests -componentNameNotRequestedReason - ComponentRequestedSpec{ benchmarksRequested = False } (CBenchName _) - = Just DisabledAllBenchmarks -componentNameNotRequestedReason ComponentRequestedSpec{} _ = Nothing -componentNameNotRequestedReason (OneComponentRequestedSpec cname) c - | c == cname = Nothing - | otherwise = Just (DisabledAllButOne (display cname)) - --- | A reason explaining why a component is disabled. --- --- @since 2.0.0.2 -data ComponentDisabledReason = DisabledComponent - | DisabledAllTests - | DisabledAllBenchmarks - | DisabledAllButOne String diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Condition.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Condition.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Condition.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Condition.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,135 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.Condition ( - Condition(..), - cNot, - cAnd, - cOr, - simplifyCondition, -) where - -import Prelude () -import Distribution.Compat.Prelude - --- | A boolean expression parameterized over the variable type used. -data Condition c = Var c - | Lit Bool - | CNot (Condition c) - | COr (Condition c) (Condition c) - | CAnd (Condition c) (Condition c) - deriving (Show, Eq, Typeable, Data, Generic) - --- | Boolean negation of a 'Condition' value. -cNot :: Condition a -> Condition a -cNot (Lit b) = Lit (not b) -cNot (CNot c) = c -cNot c = CNot c - --- | Boolean AND of two 'Condtion' values. -cAnd :: Condition a -> Condition a -> Condition a -cAnd (Lit False) _ = Lit False -cAnd _ (Lit False) = Lit False -cAnd (Lit True) x = x -cAnd x (Lit True) = x -cAnd x y = CAnd x y - --- | Boolean OR of two 'Condition' values. -cOr :: Eq v => Condition v -> Condition v -> Condition v -cOr (Lit True) _ = Lit True -cOr _ (Lit True) = Lit True -cOr (Lit False) x = x -cOr x (Lit False) = x -cOr c (CNot d) - | c == d = Lit True -cOr (CNot c) d - | c == d = Lit True -cOr x y = COr x y - -instance Functor Condition where - f `fmap` Var c = Var (f c) - _ `fmap` Lit c = Lit c - f `fmap` CNot c = CNot (fmap f c) - f `fmap` COr c d = COr (fmap f c) (fmap f d) - f `fmap` CAnd c d = CAnd (fmap f c) (fmap f d) - -instance Foldable Condition where - f `foldMap` Var c = f c - _ `foldMap` Lit _ = mempty - f `foldMap` CNot c = foldMap f c - f `foldMap` COr c d = foldMap f c `mappend` foldMap f d - f `foldMap` CAnd c d = foldMap f c `mappend` foldMap f d - -instance Traversable Condition where - f `traverse` Var c = Var `fmap` f c - _ `traverse` Lit c = pure $ Lit c - f `traverse` CNot c = CNot `fmap` traverse f c - f `traverse` COr c d = COr `fmap` traverse f c <*> traverse f d - f `traverse` CAnd c d = CAnd `fmap` traverse f c <*> traverse f d - -instance Applicative Condition where - pure = Var - (<*>) = ap - -instance Monad Condition where - return = pure - -- Terminating cases - (>>=) (Lit x) _ = Lit x - (>>=) (Var x) f = f x - -- Recursing cases - (>>=) (CNot x ) f = CNot (x >>= f) - (>>=) (COr x y) f = COr (x >>= f) (y >>= f) - (>>=) (CAnd x y) f = CAnd (x >>= f) (y >>= f) - -instance Monoid (Condition a) where - mempty = Lit False - mappend = (<>) - -instance Semigroup (Condition a) where - (<>) = COr - -instance Alternative Condition where - empty = mempty - (<|>) = mappend - -instance MonadPlus Condition where - mzero = mempty - mplus = mappend - -instance Binary c => Binary (Condition c) - -instance NFData c => NFData (Condition c) where rnf = genericRnf - --- | Simplify the condition and return its free variables. -simplifyCondition :: Condition c - -> (c -> Either d Bool) -- ^ (partial) variable assignment - -> (Condition d, [d]) -simplifyCondition cond i = fv . walk $ cond - where - walk cnd = case cnd of - Var v -> either Var Lit (i v) - Lit b -> Lit b - CNot c -> case walk c of - Lit True -> Lit False - Lit False -> Lit True - c' -> CNot c' - COr c d -> case (walk c, walk d) of - (Lit False, d') -> d' - (Lit True, _) -> Lit True - (c', Lit False) -> c' - (_, Lit True) -> Lit True - (c',d') -> COr c' d' - CAnd c d -> case (walk c, walk d) of - (Lit False, _) -> Lit False - (Lit True, d') -> d' - (_, Lit False) -> Lit False - (c', Lit True) -> c' - (c',d') -> CAnd c' d' - -- gather free vars - fv c = (c, fv' c) - fv' c = case c of - Var v -> [v] - Lit _ -> [] - CNot c' -> fv' c' - COr c1 c2 -> fv' c1 ++ fv' c2 - CAnd c1 c2 -> fv' c1 ++ fv' c2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/CondTree.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/CondTree.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/CondTree.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/CondTree.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,179 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveTraversable #-} - -module Distribution.Types.CondTree ( - CondTree(..), - CondBranch(..), - condIfThen, - condIfThenElse, - mapCondTree, - mapTreeConstrs, - mapTreeConds, - mapTreeData, - traverseCondTreeV, - traverseCondBranchV, - traverseCondTreeC, - traverseCondBranchC, - extractCondition, - simplifyCondTree, - ignoreConditions, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.Condition - -import qualified Distribution.Compat.Lens as L - - --- | A 'CondTree' is used to represent the conditional structure of --- a Cabal file, reflecting a syntax element subject to constraints, --- and then any number of sub-elements which may be enabled subject --- to some condition. Both @a@ and @c@ are usually 'Monoid's. --- --- To be more concrete, consider the following fragment of a @Cabal@ --- file: --- --- @ --- build-depends: base >= 4.0 --- if flag(extra) --- build-depends: base >= 4.2 --- @ --- --- One way to represent this is to have @'CondTree' 'ConfVar' --- ['Dependency'] 'BuildInfo'@. Here, 'condTreeData' represents --- the actual fields which are not behind any conditional, while --- 'condTreeComponents' recursively records any further fields --- which are behind a conditional. 'condTreeConstraints' records --- the constraints (in this case, @base >= 4.0@) which would --- be applied if you use this syntax; in general, this is --- derived off of 'targetBuildInfo' (perhaps a good refactoring --- would be to convert this into an opaque type, with a smart --- constructor that pre-computes the dependencies.) --- -data CondTree v c a = CondNode - { condTreeData :: a - , condTreeConstraints :: c - , condTreeComponents :: [CondBranch v c a] - } - deriving (Show, Eq, Typeable, Data, Generic, Functor, Foldable, Traversable) - -instance (Binary v, Binary c, Binary a) => Binary (CondTree v c a) - -instance (NFData v, NFData c, NFData a) => NFData (CondTree v c a) where rnf = genericRnf - --- | A 'CondBranch' represents a conditional branch, e.g., @if --- flag(foo)@ on some syntax @a@. It also has an optional false --- branch. --- -data CondBranch v c a = CondBranch - { condBranchCondition :: Condition v - , condBranchIfTrue :: CondTree v c a - , condBranchIfFalse :: Maybe (CondTree v c a) - } - deriving (Show, Eq, Typeable, Data, Generic, Functor, Traversable) - --- This instance is written by hand because GHC 8.0.1/8.0.2 infinite --- loops when trying to derive it with optimizations. See --- https://ghc.haskell.org/trac/ghc/ticket/13056 -instance Foldable (CondBranch v c) where - foldMap f (CondBranch _ c Nothing) = foldMap f c - foldMap f (CondBranch _ c (Just a)) = foldMap f c `mappend` foldMap f a - -instance (Binary v, Binary c, Binary a) => Binary (CondBranch v c a) - -instance (NFData v, NFData c, NFData a) => NFData (CondBranch v c a) where rnf = genericRnf - -condIfThen :: Condition v -> CondTree v c a -> CondBranch v c a -condIfThen c t = CondBranch c t Nothing - -condIfThenElse :: Condition v -> CondTree v c a -> CondTree v c a -> CondBranch v c a -condIfThenElse c t e = CondBranch c t (Just e) - -mapCondTree :: (a -> b) -> (c -> d) -> (Condition v -> Condition w) - -> CondTree v c a -> CondTree w d b -mapCondTree fa fc fcnd (CondNode a c ifs) = - CondNode (fa a) (fc c) (map g ifs) - where - g (CondBranch cnd t me) - = CondBranch (fcnd cnd) - (mapCondTree fa fc fcnd t) - (fmap (mapCondTree fa fc fcnd) me) - -mapTreeConstrs :: (c -> d) -> CondTree v c a -> CondTree v d a -mapTreeConstrs f = mapCondTree id f id - -mapTreeConds :: (Condition v -> Condition w) -> CondTree v c a -> CondTree w c a -mapTreeConds f = mapCondTree id id f - -mapTreeData :: (a -> b) -> CondTree v c a -> CondTree v c b -mapTreeData f = mapCondTree f id id - --- | @@Traversal@@ for the variables -traverseCondTreeV :: L.Traversal (CondTree v c a) (CondTree w c a) v w -traverseCondTreeV f (CondNode a c ifs) = - CondNode a c <$> traverse (traverseCondBranchV f) ifs - --- | @@Traversal@@ for the variables -traverseCondBranchV :: L.Traversal (CondBranch v c a) (CondBranch w c a) v w -traverseCondBranchV f (CondBranch cnd t me) = CondBranch - <$> traverse f cnd - <*> traverseCondTreeV f t - <*> traverse (traverseCondTreeV f) me - --- | @@Traversal@@ for the aggregated constraints -traverseCondTreeC :: L.Traversal (CondTree v c a) (CondTree v d a) c d -traverseCondTreeC f (CondNode a c ifs) = - CondNode a <$> f c <*> traverse (traverseCondBranchC f) ifs - --- | @@Traversal@@ for the aggregated constraints -traverseCondBranchC :: L.Traversal (CondBranch v c a) (CondBranch v d a) c d -traverseCondBranchC f (CondBranch cnd t me) = CondBranch cnd - <$> traverseCondTreeC f t - <*> traverse (traverseCondTreeC f) me - - --- | Extract the condition matched by the given predicate from a cond tree. --- --- We use this mainly for extracting buildable conditions (see the Note above), --- but the function is in fact more general. -extractCondition :: Eq v => (a -> Bool) -> CondTree v c a -> Condition v -extractCondition p = go - where - go (CondNode x _ cs) | not (p x) = Lit False - | otherwise = goList cs - - goList [] = Lit True - goList (CondBranch c t e : cs) = - let - ct = go t - ce = maybe (Lit True) go e - in - ((c `cAnd` ct) `cOr` (CNot c `cAnd` ce)) `cAnd` goList cs - --- | Flattens a CondTree using a partial flag assignment. When a condition --- cannot be evaluated, both branches are ignored. -simplifyCondTree :: (Monoid a, Monoid d) => - (v -> Either v Bool) - -> CondTree v d a - -> (d, a) -simplifyCondTree env (CondNode a d ifs) = - mconcat $ (d, a) : mapMaybe simplifyIf ifs - where - simplifyIf (CondBranch cnd t me) = - case simplifyCondition cnd env of - (Lit True, _) -> Just $ simplifyCondTree env t - (Lit False, _) -> fmap (simplifyCondTree env) me - _ -> Nothing - --- | Flatten a CondTree. This will resolve the CondTree by taking all --- possible paths into account. Note that since branches represent exclusive --- choices this may not result in a \"sane\" result. -ignoreConditions :: (Monoid a, Monoid c) => CondTree v c a -> (a, c) -ignoreConditions (CondNode a c ifs) = (a, c) `mappend` mconcat (concatMap f ifs) - where f (CondBranch _ t me) = ignoreConditions t - : maybeToList (fmap ignoreConditions me) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Dependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Dependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Dependency.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.Dependency - ( Dependency(..) - , depPkgName - , depVerRange - , thisPackageVersion - , notThisPackageVersion - , simplifyDependency - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Version ( VersionRange, thisVersion - , notThisVersion, anyVersion - , simplifyVersionRange ) - -import qualified Distribution.Compat.ReadP as Parse - -import Distribution.Text -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Types.PackageId -import Distribution.Types.PackageName - -import Text.PrettyPrint ((<+>)) - --- | Describes a dependency on a source package (API) --- -data Dependency = Dependency PackageName VersionRange - deriving (Generic, Read, Show, Eq, Typeable, Data) - -depPkgName :: Dependency -> PackageName -depPkgName (Dependency pn _) = pn - -depVerRange :: Dependency -> VersionRange -depVerRange (Dependency _ vr) = vr - -instance Binary Dependency -instance NFData Dependency where rnf = genericRnf - -instance Pretty Dependency where - pretty (Dependency name ver) = pretty name <+> pretty ver - -instance Parsec Dependency where - parsec = do - name <- lexemeParsec - ver <- parsec <|> pure anyVersion - return (Dependency name ver) - -instance Text Dependency where - parse = do name <- parse - Parse.skipSpaces - ver <- parse Parse.<++ return anyVersion - Parse.skipSpaces - return (Dependency name ver) - -thisPackageVersion :: PackageIdentifier -> Dependency -thisPackageVersion (PackageIdentifier n v) = - Dependency n (thisVersion v) - -notThisPackageVersion :: PackageIdentifier -> Dependency -notThisPackageVersion (PackageIdentifier n v) = - Dependency n (notThisVersion v) - --- | Simplify the 'VersionRange' expression in a 'Dependency'. --- See 'simplifyVersionRange'. --- -simplifyDependency :: Dependency -> Dependency -simplifyDependency (Dependency name range) = - Dependency name (simplifyVersionRange range) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/DependencyMap.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/DependencyMap.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/DependencyMap.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/DependencyMap.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -module Distribution.Types.DependencyMap ( - DependencyMap, - toDepMap, - fromDepMap, - constrainBy, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.Dependency -import Distribution.Types.PackageName -import Distribution.Version - -import qualified Data.Map.Lazy as Map - --- | A map of dependencies. Newtyped since the default monoid instance is not --- appropriate. The monoid instance uses 'intersectVersionRanges'. -newtype DependencyMap = DependencyMap { unDependencyMap :: Map PackageName VersionRange } - deriving (Show, Read) - -instance Monoid DependencyMap where - mempty = DependencyMap Map.empty - mappend = (<>) - -instance Semigroup DependencyMap where - (DependencyMap a) <> (DependencyMap b) = - DependencyMap (Map.unionWith intersectVersionRanges a b) - -toDepMap :: [Dependency] -> DependencyMap -toDepMap ds = - DependencyMap $ Map.fromListWith intersectVersionRanges [ (p,vr) | Dependency p vr <- ds ] - -fromDepMap :: DependencyMap -> [Dependency] -fromDepMap m = [ Dependency p vr | (p,vr) <- Map.toList (unDependencyMap m) ] - --- Apply extra constraints to a dependency map. --- Combines dependencies where the result will only contain keys from the left --- (first) map. If a key also exists in the right map, both constraints will --- be intersected. -constrainBy :: DependencyMap -- ^ Input map - -> DependencyMap -- ^ Extra constraints - -> DependencyMap -constrainBy left extra = - DependencyMap $ - Map.foldrWithKey tightenConstraint (unDependencyMap left) - (unDependencyMap extra) - where tightenConstraint n c l = - case Map.lookup n l of - Nothing -> l - Just vr -> Map.insert n (intersectVersionRanges vr c) l diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Executable/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Executable/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Executable/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Executable/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -module Distribution.Types.Executable.Lens ( - Executable, - module Distribution.Types.Executable.Lens, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Types.BuildInfo (BuildInfo) -import Distribution.Types.Executable (Executable) -import Distribution.Types.ExecutableScope (ExecutableScope) -import Distribution.Types.UnqualComponentName (UnqualComponentName) - -import qualified Distribution.Types.Executable as T - -exeName :: Lens' Executable UnqualComponentName -exeName f s = fmap (\x -> s { T.exeName = x }) (f (T.exeName s)) -{-# INLINE exeName #-} - -modulePath :: Lens' Executable String -modulePath f s = fmap (\x -> s { T.modulePath = x }) (f (T.modulePath s)) -{-# INLINE modulePath #-} - -exeScope :: Lens' Executable ExecutableScope -exeScope f s = fmap (\x -> s { T.exeScope = x }) (f (T.exeScope s)) -{-# INLINE exeScope #-} - -exeBuildInfo :: Lens' Executable BuildInfo -exeBuildInfo f s = fmap (\x -> s { T.buildInfo = x }) (f (T.buildInfo s)) -{-# INLINE exeBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Executable.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Executable.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Executable.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Executable.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,65 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.Executable ( - Executable(..), - emptyExecutable, - exeModules, - exeModulesAutogen -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.BuildInfo -import Distribution.Types.UnqualComponentName -import Distribution.Types.ExecutableScope -import Distribution.ModuleName - -import qualified Distribution.Types.BuildInfo.Lens as L - -data Executable = Executable { - exeName :: UnqualComponentName, - modulePath :: FilePath, - exeScope :: ExecutableScope, - buildInfo :: BuildInfo - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance L.HasBuildInfo Executable where - buildInfo f l = (\x -> l { buildInfo = x }) <$> f (buildInfo l) - -instance Binary Executable - -instance NFData Executable where rnf = genericRnf - -instance Monoid Executable where - mempty = gmempty - mappend = (<>) - -instance Semigroup Executable where - a <> b = Executable{ - exeName = combine' exeName, - modulePath = combine modulePath, - exeScope = combine exeScope, - buildInfo = combine buildInfo - } - where combine field = field a `mappend` field b - combine' field = case ( unUnqualComponentName $ field a - , unUnqualComponentName $ field b) of - ("", _) -> field b - (_, "") -> field a - (x, y) -> error $ "Ambiguous values for executable field: '" - ++ x ++ "' and '" ++ y ++ "'" - -emptyExecutable :: Executable -emptyExecutable = mempty - --- | Get all the module names from an exe -exeModules :: Executable -> [ModuleName] -exeModules exe = otherModules (buildInfo exe) - --- | Get all the auto generated module names from an exe --- This are a subset of 'exeModules'. -exeModulesAutogen :: Executable -> [ModuleName] -exeModulesAutogen exe = autogenModules (buildInfo exe) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ExecutableScope.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ExecutableScope.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ExecutableScope.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ExecutableScope.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.ExecutableScope ( - ExecutableScope(..), -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -data ExecutableScope = ExecutablePublic - | ExecutablePrivate - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Pretty ExecutableScope where - pretty ExecutablePublic = Disp.text "public" - pretty ExecutablePrivate = Disp.text "private" - -instance Parsec ExecutableScope where - parsec = P.try pub <|> pri where - pub = ExecutablePublic <$ P.string "public" - pri = ExecutablePrivate <$ P.string "private" - -instance Text ExecutableScope where - parse = Parse.choice - [ Parse.string "public" >> return ExecutablePublic - , Parse.string "private" >> return ExecutablePrivate - ] - -instance Binary ExecutableScope - -instance NFData ExecutableScope where rnf = genericRnf - --- | 'Any' like semigroup, where 'ExecutablePrivate' is 'Any True' -instance Semigroup ExecutableScope where - ExecutablePublic <> x = x - x@ExecutablePrivate <> _ = x - --- | 'mempty' = 'ExecutablePublic' -instance Monoid ExecutableScope where - mempty = ExecutablePublic - mappend = (<>) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ExeDependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ExeDependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ExeDependency.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ExeDependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,57 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.ExeDependency - ( ExeDependency(..) - , qualifiedExeName - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text -import Distribution.Types.ComponentName -import Distribution.Types.PackageName -import Distribution.Types.UnqualComponentName -import Distribution.Version (VersionRange, anyVersion) - -import qualified Distribution.Compat.CharParsing as P -import Distribution.Compat.ReadP ((<++)) -import qualified Distribution.Compat.ReadP as Parse -import Text.PrettyPrint (text, (<+>)) - --- | Describes a dependency on an executable from a package --- -data ExeDependency = ExeDependency - PackageName - UnqualComponentName -- name of executable component of package - VersionRange - deriving (Generic, Read, Show, Eq, Typeable, Data) - -instance Binary ExeDependency -instance NFData ExeDependency where rnf = genericRnf - -instance Pretty ExeDependency where - pretty (ExeDependency name exe ver) = - (pretty name <<>> text ":" <<>> pretty exe) <+> pretty ver - -instance Parsec ExeDependency where - parsec = do - name <- lexemeParsec - _ <- P.char ':' - exe <- lexemeParsec - ver <- parsec <|> pure anyVersion - return (ExeDependency name exe ver) - -instance Text ExeDependency where - parse = do name <- parse - _ <- Parse.char ':' - exe <- parse - Parse.skipSpaces - ver <- parse <++ return anyVersion - Parse.skipSpaces - return (ExeDependency name exe ver) - -qualifiedExeName :: ExeDependency -> ComponentName -qualifiedExeName (ExeDependency _ ucn _) = CExeName ucn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ExposedModule.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ExposedModule.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ExposedModule.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ExposedModule.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,57 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.ExposedModule where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Backpack -import Distribution.ModuleName -import Distribution.Parsec.Class -import Distribution.ParseUtils (parseModuleNameQ) -import Distribution.Pretty -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -data ExposedModule - = ExposedModule { - exposedName :: ModuleName, - exposedReexport :: Maybe OpenModule - } - deriving (Eq, Generic, Read, Show) - -instance Pretty ExposedModule where - pretty (ExposedModule m reexport) = - Disp.hsep [ pretty m - , case reexport of - Just m' -> Disp.hsep [Disp.text "from", disp m'] - Nothing -> Disp.empty - ] - -instance Parsec ExposedModule where - parsec = do - m <- parsecMaybeQuoted parsec - P.spaces - - reexport <- P.optional $ do - _ <- P.string "from" - P.skipSpaces1 - parsec - - return (ExposedModule m reexport) - -instance Text ExposedModule where - parse = do - m <- parseModuleNameQ - Parse.skipSpaces - reexport <- Parse.option Nothing $ do - _ <- Parse.string "from" - Parse.skipSpaces - fmap Just parse - return (ExposedModule m reexport) - -instance Binary ExposedModule - -instance NFData ExposedModule where rnf = genericRnf diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ForeignLib/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ForeignLib/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ForeignLib/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ForeignLib/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -module Distribution.Types.ForeignLib.Lens ( - ForeignLib, - module Distribution.Types.ForeignLib.Lens, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Types.BuildInfo (BuildInfo) -import Distribution.Types.ForeignLib (ForeignLib, LibVersionInfo) -import Distribution.Types.ForeignLibOption (ForeignLibOption) -import Distribution.Types.ForeignLibType (ForeignLibType) -import Distribution.Types.UnqualComponentName (UnqualComponentName) -import Distribution.Version (Version) - -import qualified Distribution.Types.ForeignLib as T - -foreignLibName :: Lens' ForeignLib UnqualComponentName -foreignLibName f s = fmap (\x -> s { T.foreignLibName = x }) (f (T.foreignLibName s)) -{-# INLINE foreignLibName #-} - -foreignLibType :: Lens' ForeignLib ForeignLibType -foreignLibType f s = fmap (\x -> s { T.foreignLibType = x }) (f (T.foreignLibType s)) -{-# INLINE foreignLibType #-} - -foreignLibOptions :: Lens' ForeignLib [ForeignLibOption] -foreignLibOptions f s = fmap (\x -> s { T.foreignLibOptions = x }) (f (T.foreignLibOptions s)) -{-# INLINE foreignLibOptions #-} - -foreignLibBuildInfo :: Lens' ForeignLib BuildInfo -foreignLibBuildInfo f s = fmap (\x -> s { T.foreignLibBuildInfo = x }) (f (T.foreignLibBuildInfo s)) -{-# INLINE foreignLibBuildInfo #-} - -foreignLibVersionInfo :: Lens' ForeignLib (Maybe LibVersionInfo) -foreignLibVersionInfo f s = fmap (\x -> s { T.foreignLibVersionInfo = x }) (f (T.foreignLibVersionInfo s)) -{-# INLINE foreignLibVersionInfo #-} - -foreignLibVersionLinux :: Lens' ForeignLib (Maybe Version) -foreignLibVersionLinux f s = fmap (\x -> s { T.foreignLibVersionLinux = x }) (f (T.foreignLibVersionLinux s)) -{-# INLINE foreignLibVersionLinux #-} - -foreignLibModDefFile :: Lens' ForeignLib [FilePath] -foreignLibModDefFile f s = fmap (\x -> s { T.foreignLibModDefFile = x }) (f (T.foreignLibModDefFile s)) -{-# INLINE foreignLibModDefFile #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ForeignLib.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ForeignLib.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ForeignLib.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ForeignLib.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,213 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.ForeignLib( - ForeignLib(..), - emptyForeignLib, - foreignLibModules, - foreignLibIsShared, - foreignLibVersion, - - LibVersionInfo, - mkLibVersionInfo, - libVersionInfoCRA, - libVersionNumber, - libVersionNumberShow, - libVersionMajor -) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.ModuleName -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.System -import Distribution.Text -import Distribution.Types.BuildInfo -import Distribution.Types.ForeignLibOption -import Distribution.Types.ForeignLibType -import Distribution.Types.UnqualComponentName -import Distribution.Version - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp -import qualified Text.Read as Read - -import qualified Distribution.Types.BuildInfo.Lens as L - --- | A foreign library stanza is like a library stanza, except that --- the built code is intended for consumption by a non-Haskell client. -data ForeignLib = ForeignLib { - -- | Name of the foreign library - foreignLibName :: UnqualComponentName - -- | What kind of foreign library is this (static or dynamic). - , foreignLibType :: ForeignLibType - -- | What options apply to this foreign library (e.g., are we - -- merging in all foreign dependencies.) - , foreignLibOptions :: [ForeignLibOption] - -- | Build information for this foreign library. - , foreignLibBuildInfo :: BuildInfo - -- | Libtool-style version-info data to compute library version. - -- Refer to the libtool documentation on the - -- current:revision:age versioning scheme. - , foreignLibVersionInfo :: Maybe LibVersionInfo - -- | Linux library version - , foreignLibVersionLinux :: Maybe Version - - -- | (Windows-specific) module definition files - -- - -- This is a list rather than a maybe field so that we can flatten - -- the condition trees (for instance, when creating an sdist) - , foreignLibModDefFile :: [FilePath] - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -data LibVersionInfo = LibVersionInfo Int Int Int deriving (Data, Eq, Generic, Typeable) - -instance Ord LibVersionInfo where - LibVersionInfo c r _ `compare` LibVersionInfo c' r' _ = - case c `compare` c' of - EQ -> r `compare` r' - e -> e - -instance Show LibVersionInfo where - showsPrec d (LibVersionInfo c r a) = showParen (d > 10) - $ showString "mkLibVersionInfo " - . showsPrec 11 (c,r,a) - -instance Read LibVersionInfo where - readPrec = Read.parens $ do - Read.Ident "mkLibVersionInfo" <- Read.lexP - t <- Read.step Read.readPrec - return (mkLibVersionInfo t) - -instance Binary LibVersionInfo - -instance NFData LibVersionInfo where rnf = genericRnf - -instance Pretty LibVersionInfo where - pretty (LibVersionInfo c r a) - = Disp.hcat $ Disp.punctuate (Disp.char ':') $ map Disp.int [c,r,a] - -instance Parsec LibVersionInfo where - parsec = do - c <- P.integral - (r, a) <- P.option (0,0) $ do - _ <- P.char ':' - r <- P.integral - a <- P.option 0 $ do - _ <- P.char ':' - P.integral - return (r,a) - return $ mkLibVersionInfo (c,r,a) - -instance Text LibVersionInfo where - parse = do - c <- parseNat - (r, a) <- Parse.option (0,0) $ do - _ <- Parse.char ':' - r <- parseNat - a <- Parse.option 0 (Parse.char ':' >> parseNat) - return (r, a) - return $ mkLibVersionInfo (c,r,a) - where - parseNat = read `fmap` Parse.munch1 isDigit - --- | Construct 'LibVersionInfo' from @(current, revision, age)@ --- numbers. --- --- For instance, @mkLibVersionInfo (3,0,0)@ constructs a --- 'LibVersionInfo' representing the version-info @3:0:0@. --- --- All version components must be non-negative. -mkLibVersionInfo :: (Int, Int, Int) -> LibVersionInfo -mkLibVersionInfo (c,r,a) = LibVersionInfo c r a - --- | From a given 'LibVersionInfo', extract the @(current, revision, --- age)@ numbers. -libVersionInfoCRA :: LibVersionInfo -> (Int, Int, Int) -libVersionInfoCRA (LibVersionInfo c r a) = (c,r,a) - --- | Given a version-info field, produce a @major.minor.build@ version -libVersionNumber :: LibVersionInfo -> (Int, Int, Int) -libVersionNumber (LibVersionInfo c r a) = (c-a , a , r) - --- | Given a version-info field, return @"major.minor.build"@ as a --- 'String' -libVersionNumberShow :: LibVersionInfo -> String -libVersionNumberShow v = - let (major, minor, build) = libVersionNumber v - in show major ++ "." ++ show minor ++ "." ++ show build - --- | Return the @major@ version of a version-info field. -libVersionMajor :: LibVersionInfo -> Int -libVersionMajor (LibVersionInfo c _ a) = c-a - -instance L.HasBuildInfo ForeignLib where - buildInfo f l = (\x -> l { foreignLibBuildInfo = x }) <$> f (foreignLibBuildInfo l) - -instance Binary ForeignLib - -instance NFData ForeignLib where rnf = genericRnf - -instance Semigroup ForeignLib where - a <> b = ForeignLib { - foreignLibName = combine' foreignLibName - , foreignLibType = combine foreignLibType - , foreignLibOptions = combine foreignLibOptions - , foreignLibBuildInfo = combine foreignLibBuildInfo - , foreignLibVersionInfo = combine'' foreignLibVersionInfo - , foreignLibVersionLinux = combine'' foreignLibVersionLinux - , foreignLibModDefFile = combine foreignLibModDefFile - } - where combine field = field a `mappend` field b - combine' field = case ( unUnqualComponentName $ field a - , unUnqualComponentName $ field b) of - ("", _) -> field b - (_, "") -> field a - (x, y) -> error $ "Ambiguous values for executable field: '" - ++ x ++ "' and '" ++ y ++ "'" - combine'' field = field b - -instance Monoid ForeignLib where - mempty = ForeignLib { - foreignLibName = mempty - , foreignLibType = ForeignLibTypeUnknown - , foreignLibOptions = [] - , foreignLibBuildInfo = mempty - , foreignLibVersionInfo = Nothing - , foreignLibVersionLinux = Nothing - , foreignLibModDefFile = [] - } - mappend = (<>) - --- | An empty foreign library. -emptyForeignLib :: ForeignLib -emptyForeignLib = mempty - --- | Modules defined by a foreign library. -foreignLibModules :: ForeignLib -> [ModuleName] -foreignLibModules = otherModules . foreignLibBuildInfo - --- | Is the foreign library shared? -foreignLibIsShared :: ForeignLib -> Bool -foreignLibIsShared = foreignLibTypeIsShared . foreignLibType - --- | Get a version number for a foreign library. --- If we're on Linux, and a Linux version is specified, use that. --- If we're on Linux, and libtool-style version-info is specified, translate --- that field into appropriate version numbers. --- Otherwise, this feature is unsupported so we don't return any version data. -foreignLibVersion :: ForeignLib -> OS -> [Int] -foreignLibVersion flib Linux = - case foreignLibVersionLinux flib of - Just v -> versionNumbers v - Nothing -> - case foreignLibVersionInfo flib of - Just v' -> - let (major, minor, build) = libVersionNumber v' - in [major, minor, build] - Nothing -> [] -foreignLibVersion _ _ = [] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ForeignLibOption.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ForeignLibOption.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ForeignLibOption.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ForeignLibOption.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.ForeignLibOption( - ForeignLibOption(..) -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -data ForeignLibOption = - -- | Merge in all dependent libraries (i.e., use - -- @ghc -shared -static@ rather than just record - -- the dependencies, ala @ghc -shared -dynamic@). - -- This option is compulsory on Windows and unsupported - -- on other platforms. - ForeignLibStandalone - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Pretty ForeignLibOption where - pretty ForeignLibStandalone = Disp.text "standalone" - -instance Parsec ForeignLibOption where - parsec = do - name <- P.munch1 (\c -> isAlphaNum c || c == '-') - case name of - "standalone" -> return ForeignLibStandalone - _ -> fail "unrecognized foreign-library option" - -instance Text ForeignLibOption where - parse = Parse.choice [ - do _ <- Parse.string "standalone" ; return ForeignLibStandalone - ] - -instance Binary ForeignLibOption - -instance NFData ForeignLibOption where rnf = genericRnf diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ForeignLibType.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ForeignLibType.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ForeignLibType.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ForeignLibType.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,76 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.ForeignLibType( - ForeignLibType(..), - knownForeignLibTypes, - foreignLibTypeIsShared, -) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.PackageDescription.Utils - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - --- | What kind of foreign library is to be built? -data ForeignLibType = - -- | A native shared library (@.so@ on Linux, @.dylib@ on OSX, or - -- @.dll@ on Windows). - ForeignLibNativeShared - -- | A native static library (not currently supported.) - | ForeignLibNativeStatic - -- TODO: Maybe this should record a string? - | ForeignLibTypeUnknown - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Pretty ForeignLibType where - pretty ForeignLibNativeShared = Disp.text "native-shared" - pretty ForeignLibNativeStatic = Disp.text "native-static" - pretty ForeignLibTypeUnknown = Disp.text "unknown" - -instance Parsec ForeignLibType where - parsec = do - name <- P.munch1 (\c -> isAlphaNum c || c == '-') - return $ case name of - "native-shared" -> ForeignLibNativeShared - "native-static" -> ForeignLibNativeStatic - _ -> ForeignLibTypeUnknown - -instance Text ForeignLibType where - parse = Parse.choice [ - do _ <- Parse.string "native-shared" ; return ForeignLibNativeShared - , do _ <- Parse.string "native-static" ; return ForeignLibNativeStatic - ] - -instance Binary ForeignLibType - -instance NFData ForeignLibType where rnf = genericRnf - -instance Semigroup ForeignLibType where - ForeignLibTypeUnknown <> b = b - a <> ForeignLibTypeUnknown = a - _ <> _ = error "Ambiguous foreign library type" - -instance Monoid ForeignLibType where - mempty = ForeignLibTypeUnknown - mappend = (<>) - -knownForeignLibTypes :: [ForeignLibType] -knownForeignLibTypes = [ - ForeignLibNativeShared - , ForeignLibNativeStatic - ] - -foreignLibTypeIsShared :: ForeignLibType -> Bool -foreignLibTypeIsShared t = - case t of - ForeignLibNativeShared -> True - ForeignLibNativeStatic -> False - ForeignLibTypeUnknown -> cabalBug "Unknown foreign library type" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/GenericPackageDescription/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/GenericPackageDescription/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/GenericPackageDescription/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/GenericPackageDescription/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,124 +0,0 @@ -{-# LANGUAGE Rank2Types #-} -module Distribution.Types.GenericPackageDescription.Lens ( - GenericPackageDescription, - Flag, - FlagName, - ConfVar (..), - module Distribution.Types.GenericPackageDescription.Lens, - ) where - -import Prelude() -import Distribution.Compat.Prelude -import Distribution.Compat.Lens - --- We import types from their packages, so we can remove unused imports --- and have wider inter-module dependency graph -import Distribution.Types.CondTree (CondTree) -import Distribution.Types.Dependency (Dependency) -import Distribution.Types.Executable (Executable) -import Distribution.Types.PackageDescription (PackageDescription) -import Distribution.Types.Benchmark (Benchmark) -import Distribution.Types.ForeignLib (ForeignLib) -import Distribution.Types.GenericPackageDescription - ( GenericPackageDescription(GenericPackageDescription) - , Flag(MkFlag), FlagName, ConfVar (..)) -import Distribution.Types.Library (Library) -import Distribution.Types.TestSuite (TestSuite) -import Distribution.Types.UnqualComponentName (UnqualComponentName) -import Distribution.System (Arch, OS) -import Distribution.Compiler (CompilerFlavor) -import Distribution.Version (VersionRange) - -------------------------------------------------------------------------------- --- GenericPackageDescription -------------------------------------------------------------------------------- - -condBenchmarks :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] Benchmark)] -condBenchmarks f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 y1) (f x8) -{-# INLINE condBenchmarks #-} - -condExecutables :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] Executable)] -condExecutables f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 x4 x5 y1 x7 x8) (f x6) -{-# INLINE condExecutables #-} - -condForeignLibs :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] Distribution.Types.ForeignLib.ForeignLib)] -condForeignLibs f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 x4 y1 x6 x7 x8) (f x5) -{-# INLINE condForeignLibs #-} - -condLibrary :: Lens' GenericPackageDescription (Maybe (CondTree ConfVar [Dependency] Library)) -condLibrary f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 y1 x4 x5 x6 x7 x8) (f x3) -{-# INLINE condLibrary #-} - -condSubLibraries :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] Library)] -condSubLibraries f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 y1 x5 x6 x7 x8) (f x4) -{-# INLINE condSubLibraries #-} - -condTestSuites :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] TestSuite)] -condTestSuites f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 x4 x5 x6 y1 x8) (f x7) -{-# INLINE condTestSuites #-} - -genPackageFlags :: Lens' GenericPackageDescription [Flag] -genPackageFlags f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 y1 x3 x4 x5 x6 x7 x8) (f x2) -{-# INLINE genPackageFlags #-} - -packageDescription :: Lens' GenericPackageDescription PackageDescription -packageDescription f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription y1 x2 x3 x4 x5 x6 x7 x8) (f x1) -{-# INLINE packageDescription #-} - -allCondTrees - :: Applicative f - => (forall a. CondTree ConfVar [Dependency] a - -> f (CondTree ConfVar [Dependency] a)) - -> GenericPackageDescription - -> f GenericPackageDescription -allCondTrees f (GenericPackageDescription p a1 x1 x2 x3 x4 x5 x6) = - GenericPackageDescription - <$> pure p - <*> pure a1 - <*> traverse f x1 - <*> (traverse . _2) f x2 - <*> (traverse . _2) f x3 - <*> (traverse . _2) f x4 - <*> (traverse . _2) f x5 - <*> (traverse . _2) f x6 - - -------------------------------------------------------------------------------- --- Flag -------------------------------------------------------------------------------- - -flagName :: Lens' Flag FlagName -flagName f (MkFlag x1 x2 x3 x4) = fmap (\y1 -> MkFlag y1 x2 x3 x4) (f x1) -{-# INLINE flagName #-} - -flagDescription :: Lens' Flag String -flagDescription f (MkFlag x1 x2 x3 x4) = fmap (\y1 -> MkFlag x1 y1 x3 x4) (f x2) -{-# INLINE flagDescription #-} - -flagDefault :: Lens' Flag Bool -flagDefault f (MkFlag x1 x2 x3 x4) = fmap (\y1 -> MkFlag x1 x2 y1 x4) (f x3) -{-# INLINE flagDefault #-} - -flagManual :: Lens' Flag Bool -flagManual f (MkFlag x1 x2 x3 x4) = fmap (\y1 -> MkFlag x1 x2 x3 y1) (f x4) -{-# INLINE flagManual #-} - -------------------------------------------------------------------------------- --- ConfVar -------------------------------------------------------------------------------- - -_OS :: Traversal' ConfVar OS -_OS f (OS os) = OS <$> f os -_OS _ x = pure x - -_Arch :: Traversal' ConfVar Arch -_Arch f (Arch arch) = Arch <$> f arch -_Arch _ x = pure x - -_Flag :: Traversal' ConfVar FlagName -_Flag f (Flag flag) = Flag <$> f flag -_Flag _ x = pure x - -_Impl :: Traversal' ConfVar (CompilerFlavor, VersionRange) -_Impl f (Impl cf vr) = uncurry Impl <$> f (cf, vr) -_Impl _ x = pure x diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/GenericPackageDescription.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/GenericPackageDescription.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/GenericPackageDescription.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/GenericPackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,353 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - -module Distribution.Types.GenericPackageDescription ( - GenericPackageDescription(..), - emptyGenericPackageDescription, - Flag(..), - emptyFlag, - FlagName, - mkFlagName, - unFlagName, - FlagAssignment, - mkFlagAssignment, - unFlagAssignment, - lookupFlagAssignment, - insertFlagAssignment, - diffFlagAssignment, - findDuplicateFlagAssignments, - nullFlagAssignment, - showFlagValue, - dispFlagAssignment, - parseFlagAssignment, - parsecFlagAssignment, - ConfVar(..), -) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Utils.ShortText -import Distribution.Utils.Generic (lowercase) -import qualified Text.PrettyPrint as Disp -import qualified Data.Map as Map -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import Distribution.Compat.ReadP ((+++)) - --- lens -import Distribution.Compat.Lens as L -import qualified Distribution.Types.BuildInfo.Lens as L - -import Distribution.Types.PackageDescription - -import Distribution.Types.Dependency -import Distribution.Types.Library -import Distribution.Types.ForeignLib -import Distribution.Types.Executable -import Distribution.Types.TestSuite -import Distribution.Types.Benchmark -import Distribution.Types.UnqualComponentName -import Distribution.Types.CondTree - -import Distribution.Package -import Distribution.Version -import Distribution.Compiler -import Distribution.System -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text - --- --------------------------------------------------------------------------- --- The 'GenericPackageDescription' type - -data GenericPackageDescription = - GenericPackageDescription - { packageDescription :: PackageDescription - , genPackageFlags :: [Flag] - , condLibrary :: Maybe (CondTree ConfVar [Dependency] Library) - , condSubLibraries :: [( UnqualComponentName - , CondTree ConfVar [Dependency] Library )] - , condForeignLibs :: [( UnqualComponentName - , CondTree ConfVar [Dependency] ForeignLib )] - , condExecutables :: [( UnqualComponentName - , CondTree ConfVar [Dependency] Executable )] - , condTestSuites :: [( UnqualComponentName - , CondTree ConfVar [Dependency] TestSuite )] - , condBenchmarks :: [( UnqualComponentName - , CondTree ConfVar [Dependency] Benchmark )] - } - deriving (Show, Eq, Typeable, Data, Generic) - -instance Package GenericPackageDescription where - packageId = packageId . packageDescription - -instance Binary GenericPackageDescription - -instance NFData GenericPackageDescription where rnf = genericRnf - -emptyGenericPackageDescription :: GenericPackageDescription -emptyGenericPackageDescription = GenericPackageDescription emptyPackageDescription [] Nothing [] [] [] [] [] - --- ----------------------------------------------------------------------------- --- Traversal Instances - -instance L.HasBuildInfos GenericPackageDescription where - traverseBuildInfos f (GenericPackageDescription p a1 x1 x2 x3 x4 x5 x6) = - GenericPackageDescription - <$> L.traverseBuildInfos f p - <*> pure a1 - <*> (traverse . traverse . L.buildInfo) f x1 - <*> (traverse . L._2 . traverse . L.buildInfo) f x2 - <*> (traverse . L._2 . traverse . L.buildInfo) f x3 - <*> (traverse . L._2 . traverse . L.buildInfo) f x4 - <*> (traverse . L._2 . traverse . L.buildInfo) f x5 - <*> (traverse . L._2 . traverse . L.buildInfo) f x6 - --- ----------------------------------------------------------------------------- --- The Flag' type - --- | A flag can represent a feature to be included, or a way of linking --- a target against its dependencies, or in fact whatever you can think of. -data Flag = MkFlag - { flagName :: FlagName - , flagDescription :: String - , flagDefault :: Bool - , flagManual :: Bool - } - deriving (Show, Eq, Typeable, Data, Generic) - -instance Binary Flag - -instance NFData Flag where rnf = genericRnf - --- | A 'Flag' initialized with default parameters. -emptyFlag :: FlagName -> Flag -emptyFlag name = MkFlag - { flagName = name - , flagDescription = "" - , flagDefault = True - , flagManual = False - } - --- | A 'FlagName' is the name of a user-defined configuration flag --- --- Use 'mkFlagName' and 'unFlagName' to convert from/to a 'String'. --- --- This type is opaque since @Cabal-2.0@ --- --- @since 2.0.0.2 -newtype FlagName = FlagName ShortText - deriving (Eq, Generic, Ord, Show, Read, Typeable, Data, NFData) - --- | Construct a 'FlagName' from a 'String' --- --- 'mkFlagName' is the inverse to 'unFlagName' --- --- Note: No validations are performed to ensure that the resulting --- 'FlagName' is valid --- --- @since 2.0.0.2 -mkFlagName :: String -> FlagName -mkFlagName = FlagName . toShortText - --- | 'mkFlagName' --- --- @since 2.0.0.2 -instance IsString FlagName where - fromString = mkFlagName - --- | Convert 'FlagName' to 'String' --- --- @since 2.0.0.2 -unFlagName :: FlagName -> String -unFlagName (FlagName s) = fromShortText s - -instance Binary FlagName - -instance Pretty FlagName where - pretty = Disp.text . unFlagName - -instance Parsec FlagName where - parsec = mkFlagName . lowercase <$> parsec' - where - parsec' = (:) <$> lead <*> rest - lead = P.satisfy (\c -> isAlphaNum c || c == '_') - rest = P.munch (\c -> isAlphaNum c || c == '_' || c == '-') - -instance Text FlagName where - -- Note: we don't check that FlagName doesn't have leading dash, - -- cabal check will do that. - parse = mkFlagName . lowercase <$> parse' - where - parse' = (:) <$> lead <*> rest - lead = Parse.satisfy (\c -> isAlphaNum c || c == '_') - rest = Parse.munch (\c -> isAlphaNum c || c == '_' || c == '-') - --- | A 'FlagAssignment' is a total or partial mapping of 'FlagName's to --- 'Bool' flag values. It represents the flags chosen by the user or --- discovered during configuration. For example @--flags=foo --flags=-bar@ --- becomes @[("foo", True), ("bar", False)]@ --- -newtype FlagAssignment - = FlagAssignment { getFlagAssignment :: Map.Map FlagName (Int, Bool) } - deriving (Binary, Generic, NFData) - -instance Eq FlagAssignment where - (==) (FlagAssignment m1) (FlagAssignment m2) - = fmap snd m1 == fmap snd m2 - -instance Ord FlagAssignment where - compare (FlagAssignment m1) (FlagAssignment m2) - = fmap snd m1 `compare` fmap snd m2 - --- | Combines pairs of values contained in the 'FlagAssignment' Map. --- --- The last flag specified takes precedence, and we record the number --- of times we have seen the flag. --- -combineFlagValues :: (Int, Bool) -> (Int, Bool) -> (Int, Bool) -combineFlagValues (c1, _) (c2, b2) = (c1 + c2, b2) - --- The 'Semigroup' instance currently is right-biased. --- --- If duplicate flags are specified, we want the last flag specified to --- take precedence and we want to know how many times the flag has been --- specified so that we have the option of warning the user about --- supplying duplicate flags. -instance Semigroup FlagAssignment where - (<>) (FlagAssignment m1) (FlagAssignment m2) - = FlagAssignment (Map.unionWith combineFlagValues m1 m2) - -instance Monoid FlagAssignment where - mempty = FlagAssignment Map.empty - mappend = (<>) - --- | Construct a 'FlagAssignment' from a list of flag/value pairs. --- --- If duplicate flags occur in the input list, the later entries --- in the list will take precedence. --- --- @since 2.2.0 -mkFlagAssignment :: [(FlagName, Bool)] -> FlagAssignment -mkFlagAssignment = - FlagAssignment . - Map.fromListWith (flip combineFlagValues) . fmap (fmap (\b -> (1, b))) - --- | Deconstruct a 'FlagAssignment' into a list of flag/value pairs. --- --- @ 'null' ('findDuplicateFlagAssignments' fa) ==> ('mkFlagAssignment' . 'unFlagAssignment') fa == fa @ --- --- @since 2.2.0 -unFlagAssignment :: FlagAssignment -> [(FlagName, Bool)] -unFlagAssignment = fmap (fmap snd) . Map.toList . getFlagAssignment - --- | Test whether 'FlagAssignment' is empty. --- --- @since 2.2.0 -nullFlagAssignment :: FlagAssignment -> Bool -nullFlagAssignment = Map.null . getFlagAssignment - --- | Lookup the value for a flag --- --- Returns 'Nothing' if the flag isn't contained in the 'FlagAssignment'. --- --- @since 2.2.0 -lookupFlagAssignment :: FlagName -> FlagAssignment -> Maybe Bool -lookupFlagAssignment fn = fmap snd . Map.lookup fn . getFlagAssignment - --- | Insert or update the boolean value of a flag. --- --- If the flag is already present in the 'FlagAssigment', the --- value will be updated and the fact that multiple values have --- been provided for that flag will be recorded so that a --- warning can be generated later on. --- --- @since 2.2.0 -insertFlagAssignment :: FlagName -> Bool -> FlagAssignment -> FlagAssignment --- TODO: this currently just shadows prior values for an existing --- flag; rather than enforcing uniqueness at construction, it's --- verified later on via `D.C.Dependency.configuredPackageProblems` -insertFlagAssignment flag val = - FlagAssignment . - Map.insertWith (flip combineFlagValues) flag (1, val) . getFlagAssignment - --- | Remove all flag-assignments from the first 'FlagAssignment' that --- are contained in the second 'FlagAssignment' --- --- NB/TODO: This currently only removes flag assignments which also --- match the value assignment! We should review the code which uses --- this operation to figure out if this it's not enough to only --- compare the flagnames without the values. --- --- @since 2.2.0 -diffFlagAssignment :: FlagAssignment -> FlagAssignment -> FlagAssignment -diffFlagAssignment fa1 fa2 = FlagAssignment - (Map.difference (getFlagAssignment fa1) (getFlagAssignment fa2)) - --- | Find the 'FlagName's that have been listed more than once. --- --- @since 2.2.0 -findDuplicateFlagAssignments :: FlagAssignment -> [FlagName] -findDuplicateFlagAssignments = - Map.keys . Map.filter ((> 1) . fst) . getFlagAssignment - --- | @since 2.2.0 -instance Read FlagAssignment where - readsPrec p s = [ (FlagAssignment x, rest) | (x,rest) <- readsPrec p s ] - --- | @since 2.2.0 -instance Show FlagAssignment where - showsPrec p (FlagAssignment xs) = showsPrec p xs - --- | String representation of a flag-value pair. -showFlagValue :: (FlagName, Bool) -> String -showFlagValue (f, True) = '+' : unFlagName f -showFlagValue (f, False) = '-' : unFlagName f - --- | Pretty-prints a flag assignment. -dispFlagAssignment :: FlagAssignment -> Disp.Doc -dispFlagAssignment = Disp.hsep . map (Disp.text . showFlagValue) . unFlagAssignment - --- | Parses a flag assignment. -parsecFlagAssignment :: ParsecParser FlagAssignment -parsecFlagAssignment = mkFlagAssignment <$> - P.sepBy (onFlag <|> offFlag) P.skipSpaces1 - where - onFlag = do - _ <- P.optional (P.char '+') - f <- parsec - return (f, True) - offFlag = do - _ <- P.char '-' - f <- parsec - return (f, False) - --- | Parses a flag assignment. -parseFlagAssignment :: Parse.ReadP r FlagAssignment -parseFlagAssignment = mkFlagAssignment <$> - Parse.sepBy parseFlagValue Parse.skipSpaces1 - where - parseFlagValue = - (do Parse.optional (Parse.char '+') - f <- parse - return (f, True)) - +++ (do _ <- Parse.char '-' - f <- parse - return (f, False)) --- {-# DEPRECATED parseFlagAssignment "Use parsecFlagAssignment. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} - --- ----------------------------------------------------------------------------- --- The 'CondVar' type - --- | A @ConfVar@ represents the variable type used. -data ConfVar = OS OS - | Arch Arch - | Flag FlagName - | Impl CompilerFlavor VersionRange - deriving (Eq, Show, Typeable, Data, Generic) - -instance Binary ConfVar - -instance NFData ConfVar where rnf = genericRnf diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/HookedBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/HookedBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/HookedBuildInfo.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/HookedBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.HookedBuildInfo ( - HookedBuildInfo, - emptyHookedBuildInfo, - ) where - --- import Distribution.Compat.Prelude -import Distribution.Types.BuildInfo -import Distribution.Types.UnqualComponentName - --- | 'HookedBuildInfo' is mechanism that hooks can use to --- override the 'BuildInfo's inside packages. One example --- use-case (which is used in core libraries today) is as --- a way of passing flags which are computed by a configure --- script into Cabal. In this case, the autoconf build type adds --- hooks to read in a textual 'HookedBuildInfo' format prior --- to doing any operations. --- --- Quite honestly, this mechanism is a massive hack since we shouldn't --- be editing the 'PackageDescription' data structure (it's easy --- to assume that this data structure shouldn't change and --- run into bugs, see for example 1c20a6328579af9e37677d507e2e9836ef70ab9d). --- But it's a bit convenient, because there isn't another data --- structure that allows adding extra 'BuildInfo' style things. --- --- In any case, a lot of care has to be taken to make sure the --- 'HookedBuildInfo' is applied to the 'PackageDescription'. In --- general this process occurs in "Distribution.Simple", which is --- responsible for orchestrating the hooks mechanism. The --- general strategy: --- --- 1. We run the pre-hook, which produces a 'HookedBuildInfo' --- (e.g., in the Autoconf case, it reads it out from a file). --- 2. We sanity-check the hooked build info with --- 'sanityCheckHookedBuildInfo'. --- 3. We update our 'PackageDescription' (either freshly read --- or cached from 'LocalBuildInfo') with 'updatePackageDescription'. --- --- In principle, we are also supposed to update the copy of --- the 'PackageDescription' stored in 'LocalBuildInfo' --- at 'localPkgDescr'. Unfortunately, in practice, there --- are lots of Custom setup scripts which fail to update --- 'localPkgDescr' so you really shouldn't rely on it. --- It's not DEPRECATED because there are legitimate uses --- for it, but... yeah. Sharp knife. See --- --- for more information on the issue. --- --- It is not well-specified whether or not a 'HookedBuildInfo' applied --- at configure time is persistent to the 'LocalBuildInfo'. The --- fact that 'HookedBuildInfo' is passed to 'confHook' MIGHT SUGGEST --- that the 'HookedBuildInfo' is applied at this time, but actually --- since 9317b67e6122ab14e53f81b573bd0ecb388eca5a it has been ONLY used --- to create a modified package description that we check for problems: --- it is never actually saved to the LBI. Since 'HookedBuildInfo' is --- applied monoidally to the existing build infos (and it is not an --- idempotent monoid), it could break things to save it, since we --- are obligated to apply any new 'HookedBuildInfo' and then we'd --- get the effect twice. But this does mean we have to re-apply --- it every time. Hey, it's more flexibility. -type HookedBuildInfo = (Maybe BuildInfo, [(UnqualComponentName, BuildInfo)]) - -emptyHookedBuildInfo :: HookedBuildInfo -emptyHookedBuildInfo = (Nothing, []) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/IncludeRenaming.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/IncludeRenaming.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/IncludeRenaming.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/IncludeRenaming.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,76 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.IncludeRenaming ( - IncludeRenaming(..), - defaultIncludeRenaming, - isDefaultIncludeRenaming, -) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Types.ModuleRenaming - -import qualified Distribution.Compat.CharParsing as P -import Distribution.Compat.ReadP ((<++)) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text -import Text.PrettyPrint (text, (<+>)) -import qualified Text.PrettyPrint as Disp - --- --------------------------------------------------------------------------- --- Module renaming - --- | A renaming on an include: (provides renaming, requires renaming) -data IncludeRenaming - = IncludeRenaming { - includeProvidesRn :: ModuleRenaming, - includeRequiresRn :: ModuleRenaming - } - deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) - -instance Binary IncludeRenaming - -instance NFData IncludeRenaming where rnf = genericRnf - --- | The 'defaultIncludeRenaming' applied when you only @build-depends@ --- on a package. -defaultIncludeRenaming :: IncludeRenaming -defaultIncludeRenaming = IncludeRenaming defaultRenaming defaultRenaming - --- | Is an 'IncludeRenaming' the default one? -isDefaultIncludeRenaming :: IncludeRenaming -> Bool -isDefaultIncludeRenaming (IncludeRenaming p r) = isDefaultRenaming p && isDefaultRenaming r - -instance Pretty IncludeRenaming where - pretty (IncludeRenaming prov_rn req_rn) = - pretty prov_rn - <+> (if isDefaultRenaming req_rn - then Disp.empty - else text "requires" <+> pretty req_rn) - -instance Parsec IncludeRenaming where - parsec = do - prov_rn <- parsec - req_rn <- P.option defaultRenaming $ P.try $ do - P.spaces - _ <- P.string "requires" - P.spaces - parsec - return (IncludeRenaming prov_rn req_rn) - -instance Text IncludeRenaming where - parse = do - prov_rn <- parse - req_rn <- (Parse.string "requires" >> Parse.skipSpaces >> parse) <++ return defaultRenaming - -- Requirements don't really care if they're mentioned - -- or not (since you can't thin a requirement.) But - -- we have a little hack in Configure to combine - -- the provisions and requirements together before passing - -- them to GHC, and so the most neutral choice for a requirement - -- is for the "with" field to be False, so we correctly - -- thin provisions. - return (IncludeRenaming prov_rn req_rn) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo/FieldGrammar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo/FieldGrammar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo/FieldGrammar.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo/FieldGrammar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,263 +0,0 @@ -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -module Distribution.Types.InstalledPackageInfo.FieldGrammar ( - ipiFieldGrammar, - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Backpack -import Distribution.Compat.Lens (Lens', (&), (.~)) -import Distribution.Compat.Newtype -import Distribution.FieldGrammar -import Distribution.FieldGrammar.FieldDescrs -import Distribution.License -import Distribution.ModuleName -import Distribution.Package -import Distribution.Parsec.Class -import Distribution.Parsec.Newtypes -import Distribution.Pretty -import Distribution.Text -import Distribution.Types.MungedPackageName -import Distribution.Types.UnqualComponentName -import Distribution.Version - -import qualified Data.Char as Char -import qualified Data.Map as Map -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.SPDX as SPDX -import qualified Text.PrettyPrint as Disp - -import Distribution.Types.InstalledPackageInfo - -import qualified Distribution.Types.InstalledPackageInfo.Lens as L -import qualified Distribution.Types.PackageId.Lens as L - --- Note: GHC goes nuts and inlines everything, --- One can see e.g. in -ddump-simpl-stats: --- --- 34886 KnownBranch --- 8197 wild1_ixF0 --- --- https://ghc.haskell.org/trac/ghc/ticket/13253 might be the cause. --- --- The workaround is to prevent GHC optimising the code: -infixl 4 <+> -(<+>) :: Applicative f => f (a -> b) -> f a -> f b -f <+> x = f <*> x -{-# NOINLINE (<+>) #-} - -ipiFieldGrammar - :: (FieldGrammar g, Applicative (g InstalledPackageInfo), Applicative (g Basic)) - => g InstalledPackageInfo InstalledPackageInfo -ipiFieldGrammar = mkInstalledPackageInfo - -- Deprecated fields - <$> monoidalFieldAla "hugs-options" (alaList' FSep Token) unitedList - ^^^ deprecatedField' "hugs isn't supported anymore" - -- Very basic fields: name, version, package-name and lib-name - <+> blurFieldGrammar basic basicFieldGrammar - -- Basic fields - <+> optionalFieldDef "id" L.installedUnitId (mkUnitId "") - <+> optionalFieldDefAla "instantiated-with" InstWith L.instantiatedWith [] - <+> optionalFieldDefAla "key" CompatPackageKey L.compatPackageKey "" - <+> optionalFieldDefAla "license" SpecLicenseLenient L.license (Left SPDX.NONE) - <+> optionalFieldDefAla "copyright" FreeText L.copyright "" - <+> optionalFieldDefAla "maintainer" FreeText L.maintainer "" - <+> optionalFieldDefAla "author" FreeText L.author "" - <+> optionalFieldDefAla "stability" FreeText L.stability "" - <+> optionalFieldDefAla "homepage" FreeText L.homepage "" - <+> optionalFieldDefAla "package-url" FreeText L.pkgUrl "" - <+> optionalFieldDefAla "synopsis" FreeText L.synopsis "" - <+> optionalFieldDefAla "description" FreeText L.description "" - <+> optionalFieldDefAla "category" FreeText L.category "" - -- Installed fields - <+> optionalFieldDef "abi" L.abiHash (mkAbiHash "") - <+> booleanFieldDef "indefinite" L.indefinite False - <+> booleanFieldDef "exposed" L.exposed False - <+> monoidalFieldAla "exposed-modules" ExposedModules L.exposedModules - <+> monoidalFieldAla "hidden-modules" (alaList' FSep MQuoted) L.hiddenModules - <+> booleanFieldDef "trusted" L.trusted False - <+> monoidalFieldAla "import-dirs" (alaList' FSep FilePathNT) L.importDirs - <+> monoidalFieldAla "library-dirs" (alaList' FSep FilePathNT) L.libraryDirs - <+> monoidalFieldAla "dynamic-library-dirs" (alaList' FSep FilePathNT) L.libraryDynDirs - <+> optionalFieldDefAla "data-dir" FilePathNT L.dataDir "" - <+> monoidalFieldAla "hs-libraries" (alaList' FSep Token) L.hsLibraries - <+> monoidalFieldAla "extra-libraries" (alaList' FSep Token) L.extraLibraries - <+> monoidalFieldAla "extra-ghci-libraries" (alaList' FSep Token) L.extraGHCiLibraries - <+> monoidalFieldAla "include-dirs" (alaList' FSep FilePathNT) L.includeDirs - <+> monoidalFieldAla "includes" (alaList' FSep FilePathNT) L.includes - <+> monoidalFieldAla "depends" (alaList FSep) L.depends - <+> monoidalFieldAla "abi-depends" (alaList FSep) L.abiDepends - <+> monoidalFieldAla "cc-options" (alaList' FSep Token) L.ccOptions - <+> monoidalFieldAla "cxx-options" (alaList' FSep Token) L.cxxOptions - <+> monoidalFieldAla "ld-options" (alaList' FSep Token) L.ldOptions - <+> monoidalFieldAla "framework-dirs" (alaList' FSep FilePathNT) L.frameworkDirs - <+> monoidalFieldAla "frameworks" (alaList' FSep Token) L.frameworks - <+> monoidalFieldAla "haddock-interfaces" (alaList' FSep FilePathNT) L.haddockInterfaces - <+> monoidalFieldAla "haddock-html" (alaList' FSep FilePathNT) L.haddockHTMLs - <+> optionalFieldAla "pkgroot" FilePathNT L.pkgRoot - where - mkInstalledPackageInfo _ Basic {..} = InstalledPackageInfo - -- _basicPkgName is not used - -- setMaybePackageId says it can be no-op. - (PackageIdentifier pn _basicVersion) - (mb_uqn <|> _basicLibName) - (mkComponentId "") -- installedComponentId_, not in use - where - (pn, mb_uqn) = decodeCompatPackageName _basicName -{-# SPECIALIZE ipiFieldGrammar :: FieldDescrs InstalledPackageInfo InstalledPackageInfo #-} -{-# SPECIALIZE ipiFieldGrammar :: ParsecFieldGrammar InstalledPackageInfo InstalledPackageInfo #-} -{-# SPECIALIZE ipiFieldGrammar :: PrettyFieldGrammar InstalledPackageInfo InstalledPackageInfo #-} - --- (forall b. [b]) ~ () -unitedList :: Lens' a [b] -unitedList f s = s <$ f [] - -------------------------------------------------------------------------------- --- Helper functions -------------------------------------------------------------------------------- - --- To maintain backwards-compatibility, we accept both comma/non-comma --- separated variants of this field. You SHOULD use the comma syntax if you --- use any new functions, although actually it's unambiguous due to a quirk --- of the fact that modules must start with capital letters. - -showExposedModules :: [ExposedModule] -> Disp.Doc -showExposedModules xs - | all isExposedModule xs = Disp.fsep (map disp xs) - | otherwise = Disp.fsep (Disp.punctuate Disp.comma (map disp xs)) - where isExposedModule (ExposedModule _ Nothing) = True - isExposedModule _ = False - --- | Returns @Just@ if the @name@ field of the IPI record would not contain --- the package name verbatim. This helps us avoid writing @package-name@ --- when it's redundant. -maybePackageName :: InstalledPackageInfo -> Maybe PackageName -maybePackageName ipi = - case sourceLibName ipi of - Nothing -> Nothing - Just _ -> Just (packageName ipi) - --- | Setter for the @package-name@ field. It should be acceptable for this --- to be a no-op. -setMaybePackageName :: Maybe PackageName -> InstalledPackageInfo -> InstalledPackageInfo -setMaybePackageName Nothing ipi = ipi -setMaybePackageName (Just pn) ipi = ipi { - sourcePackageId=(sourcePackageId ipi){pkgName=pn} - } - -setMungedPackageName :: MungedPackageName -> InstalledPackageInfo -> InstalledPackageInfo -setMungedPackageName mpn ipi = - let (pn, mb_uqn) = decodeCompatPackageName mpn - in ipi { - sourcePackageId = (sourcePackageId ipi) {pkgName=pn}, - sourceLibName = mb_uqn - } - -------------------------------------------------------------------------------- --- Auxiliary types -------------------------------------------------------------------------------- - -newtype ExposedModules = ExposedModules { getExposedModules :: [ExposedModule] } - -instance Newtype ExposedModules [ExposedModule] where - pack = ExposedModules - unpack = getExposedModules - -instance Parsec ExposedModules where - parsec = ExposedModules <$> parsecOptCommaList parsec - -instance Pretty ExposedModules where - pretty = showExposedModules . getExposedModules - - -newtype CompatPackageKey = CompatPackageKey { getCompatPackageKey :: String } - -instance Newtype CompatPackageKey String where - pack = CompatPackageKey - unpack = getCompatPackageKey - -instance Pretty CompatPackageKey where - pretty = Disp.text . getCompatPackageKey - -instance Parsec CompatPackageKey where - parsec = CompatPackageKey <$> P.munch1 uid_char where - uid_char c = Char.isAlphaNum c || c `elem` ("-_.=[],:<>+" :: String) - - -newtype InstWith = InstWith { getInstWith :: [(ModuleName,OpenModule)] } - -instance Newtype InstWith [(ModuleName, OpenModule)] where - pack = InstWith - unpack = getInstWith - -instance Pretty InstWith where - pretty = dispOpenModuleSubst . Map.fromList . getInstWith - -instance Parsec InstWith where - parsec = InstWith . Map.toList <$> parsecOpenModuleSubst - - --- | SPDX License expression or legacy license. Lenient parser, accepts either. -newtype SpecLicenseLenient = SpecLicenseLenient { getSpecLicenseLenient :: Either SPDX.License License } - -instance Newtype SpecLicenseLenient (Either SPDX.License License) where - pack = SpecLicenseLenient - unpack = getSpecLicenseLenient - -instance Parsec SpecLicenseLenient where - parsec = fmap SpecLicenseLenient $ Left <$> P.try parsec <|> Right <$> parsec - -instance Pretty SpecLicenseLenient where - pretty = either pretty pretty . unpack - - -data Basic = Basic - { _basicName :: MungedPackageName - , _basicVersion :: Version - , _basicPkgName :: Maybe PackageName - , _basicLibName :: Maybe UnqualComponentName - } - -basic :: Lens' InstalledPackageInfo Basic -basic f ipi = g <$> f b - where - b = Basic - (mungedPackageName ipi) - (packageVersion ipi) - (maybePackageName ipi) - (sourceLibName ipi) - - g (Basic n v pn ln) = ipi - & setMungedPackageName n - & L.sourcePackageId . L.pkgVersion .~ v - & setMaybePackageName pn - & L.sourceLibName .~ ln - -basicName :: Lens' Basic MungedPackageName -basicName f b = (\x -> b { _basicName = x }) <$> f (_basicName b) -{-# INLINE basicName #-} - -basicVersion :: Lens' Basic Version -basicVersion f b = (\x -> b { _basicVersion = x }) <$> f (_basicVersion b) -{-# INLINE basicVersion #-} - -basicPkgName :: Lens' Basic (Maybe PackageName) -basicPkgName f b = (\x -> b { _basicPkgName = x }) <$> f (_basicPkgName b) -{-# INLINE basicPkgName #-} - -basicLibName :: Lens' Basic (Maybe UnqualComponentName) -basicLibName f b = (\x -> b { _basicLibName = x }) <$> f (_basicLibName b) -{-# INLINE basicLibName #-} - -basicFieldGrammar - :: (FieldGrammar g, Applicative (g Basic)) - => g Basic Basic -basicFieldGrammar = Basic - <$> optionalFieldDefAla "name" MQuoted basicName (mungedPackageName emptyInstalledPackageInfo) - <*> optionalFieldDefAla "version" MQuoted basicVersion nullVersion - <*> optionalField "package-name" basicPkgName - <*> optionalField "lib-name" basicLibName diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,183 +0,0 @@ -module Distribution.Types.InstalledPackageInfo.Lens ( - InstalledPackageInfo, - module Distribution.Types.InstalledPackageInfo.Lens - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Backpack (OpenModule) -import Distribution.License (License) -import Distribution.ModuleName (ModuleName) -import Distribution.Package (AbiHash, ComponentId, PackageIdentifier, UnitId) -import Distribution.Types.InstalledPackageInfo (AbiDependency, ExposedModule, InstalledPackageInfo) -import Distribution.Types.UnqualComponentName (UnqualComponentName) - -import qualified Distribution.SPDX as SPDX -import qualified Distribution.Types.InstalledPackageInfo as T - -sourcePackageId :: Lens' InstalledPackageInfo PackageIdentifier -sourcePackageId f s = fmap (\x -> s { T.sourcePackageId = x }) (f (T.sourcePackageId s)) -{-# INLINE sourcePackageId #-} - -installedUnitId :: Lens' InstalledPackageInfo UnitId -installedUnitId f s = fmap (\x -> s { T.installedUnitId = x }) (f (T.installedUnitId s)) -{-# INLINE installedUnitId #-} - -installedComponentId_ :: Lens' InstalledPackageInfo ComponentId -installedComponentId_ f s = fmap (\x -> s { T.installedComponentId_ = x }) (f (T.installedComponentId_ s)) -{-# INLINE installedComponentId_ #-} - -instantiatedWith :: Lens' InstalledPackageInfo [(ModuleName,OpenModule)] -instantiatedWith f s = fmap (\x -> s { T.instantiatedWith = x }) (f (T.instantiatedWith s)) -{-# INLINE instantiatedWith #-} - -sourceLibName :: Lens' InstalledPackageInfo (Maybe UnqualComponentName) -sourceLibName f s = fmap (\x -> s { T.sourceLibName = x }) (f (T.sourceLibName s)) -{-# INLINE sourceLibName #-} - -compatPackageKey :: Lens' InstalledPackageInfo String -compatPackageKey f s = fmap (\x -> s { T.compatPackageKey = x }) (f (T.compatPackageKey s)) -{-# INLINE compatPackageKey #-} - -license :: Lens' InstalledPackageInfo (Either SPDX.License License) -license f s = fmap (\x -> s { T.license = x }) (f (T.license s)) -{-# INLINE license #-} - -copyright :: Lens' InstalledPackageInfo String -copyright f s = fmap (\x -> s { T.copyright = x }) (f (T.copyright s)) -{-# INLINE copyright #-} - -maintainer :: Lens' InstalledPackageInfo String -maintainer f s = fmap (\x -> s { T.maintainer = x }) (f (T.maintainer s)) -{-# INLINE maintainer #-} - -author :: Lens' InstalledPackageInfo String -author f s = fmap (\x -> s { T.author = x }) (f (T.author s)) -{-# INLINE author #-} - -stability :: Lens' InstalledPackageInfo String -stability f s = fmap (\x -> s { T.stability = x }) (f (T.stability s)) -{-# INLINE stability #-} - -homepage :: Lens' InstalledPackageInfo String -homepage f s = fmap (\x -> s { T.homepage = x }) (f (T.homepage s)) -{-# INLINE homepage #-} - -pkgUrl :: Lens' InstalledPackageInfo String -pkgUrl f s = fmap (\x -> s { T.pkgUrl = x }) (f (T.pkgUrl s)) -{-# INLINE pkgUrl #-} - -synopsis :: Lens' InstalledPackageInfo String -synopsis f s = fmap (\x -> s { T.synopsis = x }) (f (T.synopsis s)) -{-# INLINE synopsis #-} - -description :: Lens' InstalledPackageInfo String -description f s = fmap (\x -> s { T.description = x }) (f (T.description s)) -{-# INLINE description #-} - -category :: Lens' InstalledPackageInfo String -category f s = fmap (\x -> s { T.category = x }) (f (T.category s)) -{-# INLINE category #-} - -abiHash :: Lens' InstalledPackageInfo AbiHash -abiHash f s = fmap (\x -> s { T.abiHash = x }) (f (T.abiHash s)) -{-# INLINE abiHash #-} - -indefinite :: Lens' InstalledPackageInfo Bool -indefinite f s = fmap (\x -> s { T.indefinite = x }) (f (T.indefinite s)) -{-# INLINE indefinite #-} - -exposed :: Lens' InstalledPackageInfo Bool -exposed f s = fmap (\x -> s { T.exposed = x }) (f (T.exposed s)) -{-# INLINE exposed #-} - -exposedModules :: Lens' InstalledPackageInfo [ExposedModule] -exposedModules f s = fmap (\x -> s { T.exposedModules = x }) (f (T.exposedModules s)) -{-# INLINE exposedModules #-} - -hiddenModules :: Lens' InstalledPackageInfo [ModuleName] -hiddenModules f s = fmap (\x -> s { T.hiddenModules = x }) (f (T.hiddenModules s)) -{-# INLINE hiddenModules #-} - -trusted :: Lens' InstalledPackageInfo Bool -trusted f s = fmap (\x -> s { T.trusted = x }) (f (T.trusted s)) -{-# INLINE trusted #-} - -importDirs :: Lens' InstalledPackageInfo [FilePath] -importDirs f s = fmap (\x -> s { T.importDirs = x }) (f (T.importDirs s)) -{-# INLINE importDirs #-} - -libraryDirs :: Lens' InstalledPackageInfo [FilePath] -libraryDirs f s = fmap (\x -> s { T.libraryDirs = x }) (f (T.libraryDirs s)) -{-# INLINE libraryDirs #-} - -libraryDynDirs :: Lens' InstalledPackageInfo [FilePath] -libraryDynDirs f s = fmap (\x -> s { T.libraryDynDirs = x }) (f (T.libraryDynDirs s)) -{-# INLINE libraryDynDirs #-} - -dataDir :: Lens' InstalledPackageInfo FilePath -dataDir f s = fmap (\x -> s { T.dataDir = x }) (f (T.dataDir s)) -{-# INLINE dataDir #-} - -hsLibraries :: Lens' InstalledPackageInfo [String] -hsLibraries f s = fmap (\x -> s { T.hsLibraries = x }) (f (T.hsLibraries s)) -{-# INLINE hsLibraries #-} - -extraLibraries :: Lens' InstalledPackageInfo [String] -extraLibraries f s = fmap (\x -> s { T.extraLibraries = x }) (f (T.extraLibraries s)) -{-# INLINE extraLibraries #-} - -extraGHCiLibraries :: Lens' InstalledPackageInfo [String] -extraGHCiLibraries f s = fmap (\x -> s { T.extraGHCiLibraries = x }) (f (T.extraGHCiLibraries s)) -{-# INLINE extraGHCiLibraries #-} - -includeDirs :: Lens' InstalledPackageInfo [FilePath] -includeDirs f s = fmap (\x -> s { T.includeDirs = x }) (f (T.includeDirs s)) -{-# INLINE includeDirs #-} - -includes :: Lens' InstalledPackageInfo [String] -includes f s = fmap (\x -> s { T.includes = x }) (f (T.includes s)) -{-# INLINE includes #-} - -depends :: Lens' InstalledPackageInfo [UnitId] -depends f s = fmap (\x -> s { T.depends = x }) (f (T.depends s)) -{-# INLINE depends #-} - -abiDepends :: Lens' InstalledPackageInfo [AbiDependency] -abiDepends f s = fmap (\x -> s { T.abiDepends = x }) (f (T.abiDepends s)) -{-# INLINE abiDepends #-} - -ccOptions :: Lens' InstalledPackageInfo [String] -ccOptions f s = fmap (\x -> s { T.ccOptions = x }) (f (T.ccOptions s)) -{-# INLINE ccOptions #-} - -cxxOptions :: Lens' InstalledPackageInfo [String] -cxxOptions f s = fmap (\x -> s { T.cxxOptions = x }) (f (T.cxxOptions s)) -{-# INLINE cxxOptions #-} - -ldOptions :: Lens' InstalledPackageInfo [String] -ldOptions f s = fmap (\x -> s { T.ldOptions = x }) (f (T.ldOptions s)) -{-# INLINE ldOptions #-} - -frameworkDirs :: Lens' InstalledPackageInfo [FilePath] -frameworkDirs f s = fmap (\x -> s { T.frameworkDirs = x }) (f (T.frameworkDirs s)) -{-# INLINE frameworkDirs #-} - -frameworks :: Lens' InstalledPackageInfo [String] -frameworks f s = fmap (\x -> s { T.frameworks = x }) (f (T.frameworks s)) -{-# INLINE frameworks #-} - -haddockInterfaces :: Lens' InstalledPackageInfo [FilePath] -haddockInterfaces f s = fmap (\x -> s { T.haddockInterfaces = x }) (f (T.haddockInterfaces s)) -{-# INLINE haddockInterfaces #-} - -haddockHTMLs :: Lens' InstalledPackageInfo [FilePath] -haddockHTMLs f s = fmap (\x -> s { T.haddockHTMLs = x }) (f (T.haddockHTMLs s)) -{-# INLINE haddockHTMLs #-} - -pkgRoot :: Lens' InstalledPackageInfo (Maybe FilePath) -pkgRoot f s = fmap (\x -> s { T.pkgRoot = x }) (f (T.pkgRoot s)) -{-# INLINE pkgRoot #-} - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/InstalledPackageInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,170 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE TypeFamilies #-} -module Distribution.Types.InstalledPackageInfo ( - InstalledPackageInfo (..), - emptyInstalledPackageInfo, - mungedPackageId, - mungedPackageName, - AbiDependency (..), - ExposedModule (..), - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Backpack -import Distribution.Compat.Graph (IsNode (..)) -import Distribution.License -import Distribution.ModuleName -import Distribution.Package hiding (installedUnitId) -import Distribution.Types.AbiDependency -import Distribution.Types.ExposedModule -import Distribution.Types.MungedPackageId -import Distribution.Types.MungedPackageName -import Distribution.Types.UnqualComponentName -import Distribution.Version (nullVersion) - -import qualified Distribution.Package as Package -import qualified Distribution.SPDX as SPDX - --- ----------------------------------------------------------------------------- --- The InstalledPackageInfo type - --- For BC reasons, we continue to name this record an InstalledPackageInfo; --- but it would more accurately be called an InstalledUnitInfo with Backpack -data InstalledPackageInfo - = InstalledPackageInfo { - -- these parts (sourcePackageId, installedUnitId) are - -- exactly the same as PackageDescription - sourcePackageId :: PackageId, - sourceLibName :: Maybe UnqualComponentName, - installedComponentId_ :: ComponentId, - installedUnitId :: UnitId, - -- INVARIANT: if this package is definite, OpenModule's - -- OpenUnitId directly records UnitId. If it is - -- indefinite, OpenModule is always an OpenModuleVar - -- with the same ModuleName as the key. - instantiatedWith :: [(ModuleName, OpenModule)], - compatPackageKey :: String, - license :: Either SPDX.License License, - copyright :: String, - maintainer :: String, - author :: String, - stability :: String, - homepage :: String, - pkgUrl :: String, - synopsis :: String, - description :: String, - category :: String, - -- these parts are required by an installed package only: - abiHash :: AbiHash, - indefinite :: Bool, - exposed :: Bool, - -- INVARIANT: if the package is definite, OpenModule's - -- OpenUnitId directly records UnitId. - exposedModules :: [ExposedModule], - hiddenModules :: [ModuleName], - trusted :: Bool, - importDirs :: [FilePath], - libraryDirs :: [FilePath], - libraryDynDirs :: [FilePath], -- ^ overrides 'libraryDirs' - dataDir :: FilePath, - hsLibraries :: [String], - extraLibraries :: [String], - extraGHCiLibraries:: [String], -- overrides extraLibraries for GHCi - includeDirs :: [FilePath], - includes :: [String], - -- INVARIANT: if the package is definite, UnitId is NOT - -- a ComponentId of an indefinite package - depends :: [UnitId], - abiDepends :: [AbiDependency], - ccOptions :: [String], - cxxOptions :: [String], - ldOptions :: [String], - frameworkDirs :: [FilePath], - frameworks :: [String], - haddockInterfaces :: [FilePath], - haddockHTMLs :: [FilePath], - pkgRoot :: Maybe FilePath - } - deriving (Eq, Generic, Typeable, Read, Show) - -instance Binary InstalledPackageInfo - -instance NFData InstalledPackageInfo where rnf = genericRnf - -instance Package.HasMungedPackageId InstalledPackageInfo where - mungedId = mungedPackageId - -instance Package.Package InstalledPackageInfo where - packageId = sourcePackageId - -instance Package.HasUnitId InstalledPackageInfo where - installedUnitId = installedUnitId - -instance Package.PackageInstalled InstalledPackageInfo where - installedDepends = depends - -instance IsNode InstalledPackageInfo where - type Key InstalledPackageInfo = UnitId - nodeKey = installedUnitId - nodeNeighbors = depends - -mungedPackageId :: InstalledPackageInfo -> MungedPackageId -mungedPackageId ipi = - MungedPackageId (mungedPackageName ipi) (packageVersion ipi) - --- | Returns the munged package name, which we write into @name@ for --- compatibility with old versions of GHC. -mungedPackageName :: InstalledPackageInfo -> MungedPackageName -mungedPackageName ipi = - computeCompatPackageName - (packageName ipi) - (sourceLibName ipi) - -emptyInstalledPackageInfo :: InstalledPackageInfo -emptyInstalledPackageInfo - = InstalledPackageInfo { - sourcePackageId = PackageIdentifier (mkPackageName "") nullVersion, - sourceLibName = Nothing, - installedComponentId_ = mkComponentId "", - installedUnitId = mkUnitId "", - instantiatedWith = [], - compatPackageKey = "", - license = Left SPDX.NONE, - copyright = "", - maintainer = "", - author = "", - stability = "", - homepage = "", - pkgUrl = "", - synopsis = "", - description = "", - category = "", - abiHash = mkAbiHash "", - indefinite = False, - exposed = False, - exposedModules = [], - hiddenModules = [], - trusted = False, - importDirs = [], - libraryDirs = [], - libraryDynDirs = [], - dataDir = "", - hsLibraries = [], - extraLibraries = [], - extraGHCiLibraries= [], - includeDirs = [], - includes = [], - depends = [], - abiDepends = [], - ccOptions = [], - cxxOptions = [], - ldOptions = [], - frameworkDirs = [], - frameworks = [], - haddockInterfaces = [], - haddockHTMLs = [], - pkgRoot = Nothing - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/LegacyExeDependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/LegacyExeDependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/LegacyExeDependency.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/LegacyExeDependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,65 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.LegacyExeDependency - ( LegacyExeDependency(..) - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Parsec.Class -import Distribution.ParseUtils (parseMaybeQuoted) -import Distribution.Pretty -import Distribution.Text -import Distribution.Version (VersionRange, anyVersion) - -import qualified Distribution.Compat.CharParsing as P -import Distribution.Compat.ReadP ((<++)) -import qualified Distribution.Compat.ReadP as Parse -import Text.PrettyPrint (text, (<+>)) - --- | Describes a legacy `build-tools`-style dependency on an executable --- --- It is "legacy" because we do not know what the build-tool referred to. It --- could refer to a pkg-config executable (PkgconfigName), or an internal --- executable (UnqualComponentName). Thus the name is stringly typed. --- --- @since 2.0.0.2 -data LegacyExeDependency = LegacyExeDependency - String - VersionRange - deriving (Generic, Read, Show, Eq, Typeable, Data) - -instance Binary LegacyExeDependency -instance NFData LegacyExeDependency where rnf = genericRnf - -instance Pretty LegacyExeDependency where - pretty (LegacyExeDependency name ver) = - text name <+> pretty ver - -instance Parsec LegacyExeDependency where - parsec = do - name <- parsecMaybeQuoted nameP - P.spaces - verRange <- parsecMaybeQuoted parsec <|> pure anyVersion - pure $ LegacyExeDependency name verRange - where - nameP = intercalate "-" <$> P.sepBy1 component (P.char '-') - component = do - cs <- P.munch1 (\c -> isAlphaNum c || c == '+' || c == '_') - if all isDigit cs then fail "invalid component" else return cs - -instance Text LegacyExeDependency where - parse = do name <- parseMaybeQuoted parseBuildToolName - Parse.skipSpaces - ver <- parse <++ return anyVersion - Parse.skipSpaces - return $ LegacyExeDependency name ver - where - -- like parsePackageName but accepts symbols in components - parseBuildToolName :: Parse.ReadP r String - parseBuildToolName = do ns <- Parse.sepBy1 component (Parse.char '-') - return (intercalate "-" ns) - where component = do - cs <- Parse.munch1 (\c -> isAlphaNum c || c == '+' || c == '_') - if all isDigit cs then Parse.pfail else return cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -module Distribution.Types.Lens ( - module Distribution.Types.Benchmark.Lens, - module Distribution.Types.BuildInfo.Lens, - module Distribution.Types.Executable.Lens, - module Distribution.Types.ForeignLib.Lens, - module Distribution.Types.GenericPackageDescription.Lens, - module Distribution.Types.Library.Lens, - module Distribution.Types.PackageDescription.Lens, - module Distribution.Types.PackageId.Lens, - module Distribution.Types.SetupBuildInfo.Lens, - module Distribution.Types.SourceRepo.Lens, - module Distribution.Types.TestSuite.Lens, - ) where - -import Distribution.Types.Benchmark.Lens -import Distribution.Types.BuildInfo.Lens -import Distribution.Types.Executable.Lens -import Distribution.Types.ForeignLib.Lens -import Distribution.Types.GenericPackageDescription.Lens -import Distribution.Types.Library.Lens -import Distribution.Types.PackageDescription.Lens -import Distribution.Types.PackageId.Lens -import Distribution.Types.SetupBuildInfo.Lens -import Distribution.Types.SourceRepo.Lens -import Distribution.Types.TestSuite.Lens diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Library/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Library/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Library/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Library/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,40 +0,0 @@ -module Distribution.Types.Library.Lens ( - Library, - module Distribution.Types.Library.Lens, - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Compat.Lens - -import Distribution.ModuleName (ModuleName) -import Distribution.Types.BuildInfo (BuildInfo) -import Distribution.Types.Library (Library) -import Distribution.Types.ModuleReexport (ModuleReexport) -import Distribution.Types.UnqualComponentName (UnqualComponentName) - -import qualified Distribution.Types.Library as T - -libName :: Lens' Library (Maybe UnqualComponentName) -libName f s = fmap (\x -> s { T.libName = x }) (f (T.libName s)) -{-# INLINE libName #-} - -exposedModules :: Lens' Library [ModuleName] -exposedModules f s = fmap (\x -> s { T.exposedModules = x }) (f (T.exposedModules s)) -{-# INLINE exposedModules #-} - -reexportedModules :: Lens' Library [ModuleReexport] -reexportedModules f s = fmap (\x -> s { T.reexportedModules = x }) (f (T.reexportedModules s)) -{-# INLINE reexportedModules #-} - -signatures :: Lens' Library [ModuleName] -signatures f s = fmap (\x -> s { T.signatures = x }) (f (T.signatures s)) -{-# INLINE signatures #-} - -libExposed :: Lens' Library Bool -libExposed f s = fmap (\x -> s { T.libExposed = x }) (f (T.libExposed s)) -{-# INLINE libExposed #-} - -libBuildInfo :: Lens' Library BuildInfo -libBuildInfo f s = fmap (\x -> s { T.libBuildInfo = x }) (f (T.libBuildInfo s)) -{-# INLINE libBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Library.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Library.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Library.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Library.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,87 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.Library ( - Library(..), - emptyLibrary, - explicitLibModules, - libModulesAutogen, - libModules, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.BuildInfo -import Distribution.Types.ModuleReexport -import Distribution.Types.UnqualComponentName -import Distribution.ModuleName - -import qualified Distribution.Types.BuildInfo.Lens as L - -data Library = Library - { libName :: Maybe UnqualComponentName - , exposedModules :: [ModuleName] - , reexportedModules :: [ModuleReexport] - , signatures :: [ModuleName] -- ^ What sigs need implementations? - , libExposed :: Bool -- ^ Is the lib to be exposed by default? - , libBuildInfo :: BuildInfo - } - deriving (Generic, Show, Eq, Read, Typeable, Data) - -instance L.HasBuildInfo Library where - buildInfo f l = (\x -> l { libBuildInfo = x }) <$> f (libBuildInfo l) - -instance Binary Library - -instance NFData Library where rnf = genericRnf - -instance Monoid Library where - mempty = Library { - libName = mempty, - exposedModules = mempty, - reexportedModules = mempty, - signatures = mempty, - libExposed = True, - libBuildInfo = mempty - } - mappend = (<>) - -instance Semigroup Library where - a <> b = Library { - libName = combine libName, - exposedModules = combine exposedModules, - reexportedModules = combine reexportedModules, - signatures = combine signatures, - libExposed = libExposed a && libExposed b, -- so False propagates - libBuildInfo = combine libBuildInfo - } - where combine field = field a `mappend` field b - -emptyLibrary :: Library -emptyLibrary = mempty - --- | Get all the module names from the library (exposed and internal modules) --- which are explicitly listed in the package description which would --- need to be compiled. (This does not include reexports, which --- do not need to be compiled.) This may not include all modules for which --- GHC generated interface files (i.e., implicit modules.) -explicitLibModules :: Library -> [ModuleName] -explicitLibModules lib = exposedModules lib - ++ otherModules (libBuildInfo lib) - ++ signatures lib - --- | Get all the auto generated module names from the library, exposed or not. --- This are a subset of 'libModules'. -libModulesAutogen :: Library -> [ModuleName] -libModulesAutogen lib = autogenModules (libBuildInfo lib) - --- | Backwards-compatibility shim for 'explicitLibModules'. In most cases, --- you actually want 'allLibModules', which returns all modules that will --- actually be compiled, as opposed to those which are explicitly listed --- in the package description ('explicitLibModules'); unfortunately, the --- type signature for 'allLibModules' is incompatible since we need a --- 'ComponentLocalBuildInfo'. -{-# DEPRECATED libModules "If you want all modules that are built with a library, use 'allLibModules'. Otherwise, use 'explicitLibModules' for ONLY the modules explicitly mentioned in the package description. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -libModules :: Library -> [ModuleName] -libModules = explicitLibModules diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/LocalBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/LocalBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/LocalBuildInfo.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/LocalBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,336 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - -module Distribution.Types.LocalBuildInfo ( - -- * The type - - LocalBuildInfo(..), - - -- * Convenience accessors - - localComponentId, - localUnitId, - localCompatPackageKey, - localPackage, - - -- * Build targets of the 'LocalBuildInfo'. - - componentNameCLBIs, - - -- NB: the primes mean that they take a 'PackageDescription' - -- which may not match 'localPkgDescr' in 'LocalBuildInfo'. - -- More logical types would drop this argument, but - -- at the moment, this is the ONLY supported function, because - -- 'localPkgDescr' is not guaranteed to match. At some point - -- we will fix it and then we can use the (free) unprimed - -- namespace for the correct commands. - -- - -- See https://github.com/haskell/cabal/issues/3606 for more - -- details. - - componentNameTargets', - unitIdTarget', - allTargetsInBuildOrder', - withAllTargetsInBuildOrder', - neededTargetsInBuildOrder', - withNeededTargetsInBuildOrder', - testCoverage, - - -- * Functions you SHOULD NOT USE (yet), but are defined here to - -- prevent someone from accidentally defining them - - componentNameTargets, - unitIdTarget, - allTargetsInBuildOrder, - withAllTargetsInBuildOrder, - neededTargetsInBuildOrder, - withNeededTargetsInBuildOrder, - - -- * Backwards compatibility. - - componentsConfigs, - externalPackageDeps, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.PackageDescription -import Distribution.Types.ComponentLocalBuildInfo -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.ComponentId -import Distribution.Types.MungedPackageId -import Distribution.Types.PackageId -import Distribution.Types.UnitId -import Distribution.Types.TargetInfo - -import Distribution.Simple.InstallDirs hiding (absoluteInstallDirs, - prefixRelativeInstallDirs, - substPathTemplate, ) -import Distribution.Simple.Program -import Distribution.PackageDescription -import Distribution.Simple.Compiler -import Distribution.Simple.PackageIndex -import Distribution.Simple.Setup -import Distribution.Text -import Distribution.System - -import Distribution.Compat.Graph (Graph) -import qualified Distribution.Compat.Graph as Graph -import qualified Data.Map as Map - --- | Data cached after configuration step. See also --- 'Distribution.Simple.Setup.ConfigFlags'. -data LocalBuildInfo = LocalBuildInfo { - configFlags :: ConfigFlags, - -- ^ Options passed to the configuration step. - -- Needed to re-run configuration when .cabal is out of date - flagAssignment :: FlagAssignment, - -- ^ The final set of flags which were picked for this package - componentEnabledSpec :: ComponentRequestedSpec, - -- ^ What components were enabled during configuration, and why. - extraConfigArgs :: [String], - -- ^ Extra args on the command line for the configuration step. - -- Needed to re-run configuration when .cabal is out of date - installDirTemplates :: InstallDirTemplates, - -- ^ The installation directories for the various different - -- kinds of files - --TODO: inplaceDirTemplates :: InstallDirs FilePath - compiler :: Compiler, - -- ^ The compiler we're building with - hostPlatform :: Platform, - -- ^ The platform we're building for - buildDir :: FilePath, - -- ^ Where to build the package. - cabalFilePath :: Maybe FilePath, - -- ^ Path to the cabal file, if given during configuration. - componentGraph :: Graph ComponentLocalBuildInfo, - -- ^ All the components to build, ordered by topological - -- sort, and with their INTERNAL dependencies over the - -- intrapackage dependency graph. - -- TODO: this is assumed to be short; otherwise we want - -- some sort of ordered map. - componentNameMap :: Map ComponentName [ComponentLocalBuildInfo], - -- ^ A map from component name to all matching - -- components. These coincide with 'componentGraph' - installedPkgs :: InstalledPackageIndex, - -- ^ All the info about the installed packages that the - -- current package depends on (directly or indirectly). - -- The copy saved on disk does NOT include internal - -- dependencies (because we just don't have enough - -- information at this point to have an - -- 'InstalledPackageInfo' for an internal dep), but we - -- will often update it with the internal dependencies; - -- see for example 'Distribution.Simple.Build.build'. - -- (This admonition doesn't apply for per-component builds.) - pkgDescrFile :: Maybe FilePath, - -- ^ the filename containing the .cabal file, if available - localPkgDescr :: PackageDescription, - -- ^ WARNING WARNING WARNING Be VERY careful about using - -- this function; we haven't deprecated it but using it - -- could introduce subtle bugs related to - -- 'HookedBuildInfo'. - -- - -- In principle, this is supposed to contain the - -- resolved package description, that does not contain - -- any conditionals. However, it MAY NOT contain - -- the description wtih a 'HookedBuildInfo' applied - -- to it; see 'HookedBuildInfo' for the whole sordid saga. - -- As much as possible, Cabal library should avoid using - -- this parameter. - withPrograms :: ProgramDb, -- ^Location and args for all programs - withPackageDB :: PackageDBStack, -- ^What package database to use, global\/user - withVanillaLib:: Bool, -- ^Whether to build normal libs. - withProfLib :: Bool, -- ^Whether to build profiling versions of libs. - withSharedLib :: Bool, -- ^Whether to build shared versions of libs. - withStaticLib :: Bool, -- ^Whether to build static versions of libs (with all other libs rolled in) - withDynExe :: Bool, -- ^Whether to link executables dynamically - withProfExe :: Bool, -- ^Whether to build executables for profiling. - withProfLibDetail :: ProfDetailLevel, -- ^Level of automatic profile detail. - withProfExeDetail :: ProfDetailLevel, -- ^Level of automatic profile detail. - withOptimization :: OptimisationLevel, -- ^Whether to build with optimization (if available). - withDebugInfo :: DebugInfoLevel, -- ^Whether to emit debug info (if available). - withGHCiLib :: Bool, -- ^Whether to build libs suitable for use with GHCi. - splitSections :: Bool, -- ^Use -split-sections with GHC, if available - splitObjs :: Bool, -- ^Use -split-objs with GHC, if available - stripExes :: Bool, -- ^Whether to strip executables during install - stripLibs :: Bool, -- ^Whether to strip libraries during install - exeCoverage :: Bool, -- ^Whether to enable executable program coverage - libCoverage :: Bool, -- ^Whether to enable library program coverage - progPrefix :: PathTemplate, -- ^Prefix to be prepended to installed executables - progSuffix :: PathTemplate, -- ^Suffix to be appended to installed executables - relocatable :: Bool -- ^Whether to build a relocatable package - } deriving (Generic, Read, Show) - -instance Binary LocalBuildInfo - -------------------------------------------------------------------------------- --- Accessor functions - --- TODO: Get rid of these functions, as much as possible. They are --- a bit useful in some cases, but you should be very careful! - --- | Extract the 'ComponentId' from the public library component of a --- 'LocalBuildInfo' if it exists, or make a fake component ID based --- on the package ID. -localComponentId :: LocalBuildInfo -> ComponentId -localComponentId lbi = - case componentNameCLBIs lbi CLibName of - [LibComponentLocalBuildInfo { componentComponentId = cid }] - -> cid - _ -> mkComponentId (display (localPackage lbi)) - --- | Extract the 'PackageIdentifier' of a 'LocalBuildInfo'. --- This is a "safe" use of 'localPkgDescr' -localPackage :: LocalBuildInfo -> PackageId -localPackage lbi = package (localPkgDescr lbi) - --- | Extract the 'UnitId' from the library component of a --- 'LocalBuildInfo' if it exists, or make a fake unit ID based on --- the package ID. -localUnitId :: LocalBuildInfo -> UnitId -localUnitId lbi = - case componentNameCLBIs lbi CLibName of - [LibComponentLocalBuildInfo { componentUnitId = uid }] - -> uid - _ -> mkLegacyUnitId $ localPackage lbi - --- | Extract the compatibility package key from the public library component of a --- 'LocalBuildInfo' if it exists, or make a fake package key based --- on the package ID. -localCompatPackageKey :: LocalBuildInfo -> String -localCompatPackageKey lbi = - case componentNameCLBIs lbi CLibName of - [LibComponentLocalBuildInfo { componentCompatPackageKey = pk }] - -> pk - _ -> display (localPackage lbi) - --- | Convenience function to generate a default 'TargetInfo' from a --- 'ComponentLocalBuildInfo'. The idea is to call this once, and then --- use 'TargetInfo' everywhere else. Private to this module. -mkTargetInfo :: PackageDescription -> LocalBuildInfo -> ComponentLocalBuildInfo -> TargetInfo -mkTargetInfo pkg_descr _lbi clbi = - TargetInfo { - targetCLBI = clbi, - -- NB: @pkg_descr@, not @localPkgDescr lbi@! - targetComponent = getComponent pkg_descr - (componentLocalName clbi) - } - --- | Return all 'TargetInfo's associated with 'ComponentName'. --- In the presence of Backpack there may be more than one! --- Has a prime because it takes a 'PackageDescription' argument --- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. -componentNameTargets' :: PackageDescription -> LocalBuildInfo -> ComponentName -> [TargetInfo] -componentNameTargets' pkg_descr lbi cname = - case Map.lookup cname (componentNameMap lbi) of - Just clbis -> map (mkTargetInfo pkg_descr lbi) clbis - Nothing -> [] - -unitIdTarget' :: PackageDescription -> LocalBuildInfo -> UnitId -> Maybe TargetInfo -unitIdTarget' pkg_descr lbi uid = - case Graph.lookup uid (componentGraph lbi) of - Just clbi -> Just (mkTargetInfo pkg_descr lbi clbi) - Nothing -> Nothing - --- | Return all 'ComponentLocalBuildInfo's associated with 'ComponentName'. --- In the presence of Backpack there may be more than one! -componentNameCLBIs :: LocalBuildInfo -> ComponentName -> [ComponentLocalBuildInfo] -componentNameCLBIs lbi cname = - case Map.lookup cname (componentNameMap lbi) of - Just clbis -> clbis - Nothing -> [] - --- TODO: Maybe cache topsort (Graph can do this) - --- | Return the list of default 'TargetInfo's associated with a --- configured package, in the order they need to be built. --- Has a prime because it takes a 'PackageDescription' argument --- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. -allTargetsInBuildOrder' :: PackageDescription -> LocalBuildInfo -> [TargetInfo] -allTargetsInBuildOrder' pkg_descr lbi - = map (mkTargetInfo pkg_descr lbi) (Graph.revTopSort (componentGraph lbi)) - --- | Execute @f@ for every 'TargetInfo' in the package, respecting the --- build dependency order. (TODO: We should use Shake!) --- Has a prime because it takes a 'PackageDescription' argument --- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. -withAllTargetsInBuildOrder' :: PackageDescription -> LocalBuildInfo -> (TargetInfo -> IO ()) -> IO () -withAllTargetsInBuildOrder' pkg_descr lbi f - = sequence_ [ f target | target <- allTargetsInBuildOrder' pkg_descr lbi ] - --- | Return the list of all targets needed to build the @uids@, in --- the order they need to be built. --- Has a prime because it takes a 'PackageDescription' argument --- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. -neededTargetsInBuildOrder' :: PackageDescription -> LocalBuildInfo -> [UnitId] -> [TargetInfo] -neededTargetsInBuildOrder' pkg_descr lbi uids = - case Graph.closure (componentGraph lbi) uids of - Nothing -> error $ "localBuildPlan: missing uids " ++ intercalate ", " (map display uids) - Just clos -> map (mkTargetInfo pkg_descr lbi) (Graph.revTopSort (Graph.fromDistinctList clos)) - --- | Execute @f@ for every 'TargetInfo' needed to build @uid@s, respecting --- the build dependency order. --- Has a prime because it takes a 'PackageDescription' argument --- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. -withNeededTargetsInBuildOrder' :: PackageDescription -> LocalBuildInfo -> [UnitId] -> (TargetInfo -> IO ()) -> IO () -withNeededTargetsInBuildOrder' pkg_descr lbi uids f - = sequence_ [ f target | target <- neededTargetsInBuildOrder' pkg_descr lbi uids ] - --- | Is coverage enabled for test suites? In practice, this requires library --- and executable profiling to be enabled. -testCoverage :: LocalBuildInfo -> Bool -testCoverage lbi = exeCoverage lbi && libCoverage lbi - -------------------------------------------------------------------------------- --- Stub functions to prevent someone from accidentally defining them - -{-# WARNING componentNameTargets, unitIdTarget, allTargetsInBuildOrder, withAllTargetsInBuildOrder, neededTargetsInBuildOrder, withNeededTargetsInBuildOrder "By using this function, you may be introducing a bug where you retrieve a 'Component' which does not have 'HookedBuildInfo' applied to it. See the documentation for 'HookedBuildInfo' for an explanation of the issue. If you have a 'PakcageDescription' handy (NOT from the 'LocalBuildInfo'), try using the primed version of the function, which takes it as an extra argument." #-} - -componentNameTargets :: LocalBuildInfo -> ComponentName -> [TargetInfo] -componentNameTargets lbi = componentNameTargets' (localPkgDescr lbi) lbi - -unitIdTarget :: LocalBuildInfo -> UnitId -> Maybe TargetInfo -unitIdTarget lbi = unitIdTarget' (localPkgDescr lbi) lbi - -allTargetsInBuildOrder :: LocalBuildInfo -> [TargetInfo] -allTargetsInBuildOrder lbi = allTargetsInBuildOrder' (localPkgDescr lbi) lbi - -withAllTargetsInBuildOrder :: LocalBuildInfo -> (TargetInfo -> IO ()) -> IO () -withAllTargetsInBuildOrder lbi = withAllTargetsInBuildOrder' (localPkgDescr lbi) lbi - -neededTargetsInBuildOrder :: LocalBuildInfo -> [UnitId] -> [TargetInfo] -neededTargetsInBuildOrder lbi = neededTargetsInBuildOrder' (localPkgDescr lbi) lbi - -withNeededTargetsInBuildOrder :: LocalBuildInfo -> [UnitId] -> (TargetInfo -> IO ()) -> IO () -withNeededTargetsInBuildOrder lbi = withNeededTargetsInBuildOrder' (localPkgDescr lbi) lbi - -------------------------------------------------------------------------------- --- Backwards compatibility - -{-# DEPRECATED componentsConfigs "Use 'componentGraph' instead; you can get a list of 'ComponentLocalBuildInfo' with 'Distribution.Compat.Graph.toList'. There's not a good way to get the list of 'ComponentName's the 'ComponentLocalBuildInfo' depends on because this query doesn't make sense; the graph is indexed by 'UnitId' not 'ComponentName'. Given a 'UnitId' you can lookup the 'ComponentLocalBuildInfo' ('getCLBI') and then get the 'ComponentName' ('componentLocalName]). To be removed in Cabal 3.0" #-} -componentsConfigs :: LocalBuildInfo -> [(ComponentName, ComponentLocalBuildInfo, [ComponentName])] -componentsConfigs lbi = - [ (componentLocalName clbi, - clbi, - mapMaybe (fmap componentLocalName . flip Graph.lookup g) - (componentInternalDeps clbi)) - | clbi <- Graph.toList g ] - where - g = componentGraph lbi - --- | External package dependencies for the package as a whole. This is the --- union of the individual 'componentPackageDeps', less any internal deps. -{-# DEPRECATED externalPackageDeps "You almost certainly don't want this function, which agglomerates the dependencies of ALL enabled components. If you're using this to write out information on your dependencies, read off the dependencies directly from the actual component in question. To be removed in Cabal 3.0" #-} -externalPackageDeps :: LocalBuildInfo -> [(UnitId, MungedPackageId)] -externalPackageDeps lbi = - -- TODO: what about non-buildable components? - nub [ (ipkgid, pkgid) - | clbi <- Graph.toList (componentGraph lbi) - , (ipkgid, pkgid) <- componentPackageDeps clbi - , not (internal ipkgid) ] - where - -- True if this dependency is an internal one (depends on the library - -- defined in the same package). - internal ipkgid = any ((==ipkgid) . componentUnitId) (Graph.toList (componentGraph lbi)) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Mixin.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Mixin.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Mixin.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Mixin.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.Mixin ( - Mixin(..), -) where - -import Distribution.Compat.Prelude -import Prelude () - -import Text.PrettyPrint ((<+>)) - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text -import Distribution.Types.IncludeRenaming -import Distribution.Types.PackageName - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse - -data Mixin = Mixin { mixinPackageName :: PackageName - , mixinIncludeRenaming :: IncludeRenaming } - deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) - -instance Binary Mixin - -instance NFData Mixin where rnf = genericRnf - -instance Pretty Mixin where - pretty (Mixin pkg_name incl) = pretty pkg_name <+> pretty incl - -instance Parsec Mixin where - parsec = do - mod_name <- parsec - P.spaces - incl <- parsec - return (Mixin mod_name incl) - -instance Text Mixin where - parse = do - pkg_name <- parse - Parse.skipSpaces - incl <- parse - return (Mixin pkg_name incl) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Module.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Module.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Module.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Module.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,54 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} - -module Distribution.Types.Module - ( Module(..) - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text -import Distribution.Types.UnitId -import Distribution.ModuleName - --- | A module identity uniquely identifies a Haskell module by --- qualifying a 'ModuleName' with the 'UnitId' which defined --- it. This type distinguishes between two packages --- which provide a module with the same name, or a module --- from the same package compiled with different dependencies. --- There are a few cases where Cabal needs to know about --- module identities, e.g., when writing out reexported modules in --- the 'InstalledPackageInfo'. -data Module = - Module DefUnitId ModuleName - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - -instance Binary Module - -instance Pretty Module where - pretty (Module uid mod_name) = - pretty uid <<>> Disp.text ":" <<>> pretty mod_name - -instance Parsec Module where - parsec = do - uid <- parsec - _ <- P.char ':' - mod_name <- parsec - return (Module uid mod_name) - -instance Text Module where - parse = do - uid <- parse - _ <- Parse.char ':' - mod_name <- parse - return (Module uid mod_name) - -instance NFData Module where - rnf (Module uid mod_name) = rnf uid `seq` rnf mod_name diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ModuleReexport.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ModuleReexport.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ModuleReexport.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ModuleReexport.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.ModuleReexport ( - ModuleReexport(..) -) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.ModuleName -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text -import Distribution.Types.PackageName - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import Text.PrettyPrint ((<+>)) -import qualified Text.PrettyPrint as Disp - --- ----------------------------------------------------------------------------- --- Module re-exports - -data ModuleReexport = ModuleReexport { - moduleReexportOriginalPackage :: Maybe PackageName, - moduleReexportOriginalName :: ModuleName, - moduleReexportName :: ModuleName - } - deriving (Eq, Generic, Read, Show, Typeable, Data) - -instance Binary ModuleReexport - -instance NFData ModuleReexport where rnf = genericRnf - -instance Pretty ModuleReexport where - pretty (ModuleReexport mpkgname origname newname) = - maybe Disp.empty (\pkgname -> pretty pkgname <<>> Disp.char ':') mpkgname - <<>> pretty origname - <+> if newname == origname - then Disp.empty - else Disp.text "as" <+> pretty newname - -instance Parsec ModuleReexport where - parsec = do - mpkgname <- P.optional (P.try $ parsec <* P.char ':') - origname <- parsec - newname <- P.option origname $ P.try $ do - P.spaces - _ <- P.string "as" - P.spaces - parsec - return (ModuleReexport mpkgname origname newname) - -instance Text ModuleReexport where - parse = do - mpkgname <- Parse.option Nothing $ do - pkgname <- parse - _ <- Parse.char ':' - return (Just pkgname) - origname <- parse - newname <- Parse.option origname $ do - Parse.skipSpaces - _ <- Parse.string "as" - Parse.skipSpaces - parse - return (ModuleReexport mpkgname origname newname) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ModuleRenaming.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ModuleRenaming.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/ModuleRenaming.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/ModuleRenaming.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,140 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.ModuleRenaming ( - ModuleRenaming(..), - interpModuleRenaming, - defaultRenaming, - isDefaultRenaming, -) where - -import Distribution.Compat.Prelude hiding (empty) -import Prelude () - -import Distribution.ModuleName -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text - -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Distribution.Compat.CharParsing as P -import Distribution.Compat.ReadP ((<++)) -import qualified Distribution.Compat.ReadP as Parse -import Text.PrettyPrint (hsep, parens, punctuate, text, (<+>), comma) - --- | Renaming applied to the modules provided by a package. --- The boolean indicates whether or not to also include all of the --- original names of modules. Thus, @ModuleRenaming False []@ is --- "don't expose any modules, and @ModuleRenaming True [("Data.Bool", "Bool")]@ --- is, "expose all modules, but also expose @Data.Bool@ as @Bool@". --- If a renaming is omitted you get the 'DefaultRenaming'. --- --- (NB: This is a list not a map so that we can preserve order.) --- -data ModuleRenaming - -- | A module renaming/thinning; e.g., @(A as B, C as C)@ - -- brings @B@ and @C@ into scope. - = ModuleRenaming [(ModuleName, ModuleName)] - -- | The default renaming, bringing all exported modules - -- into scope. - | DefaultRenaming - -- | Hiding renaming, e.g., @hiding (A, B)@, bringing all - -- exported modules into scope except the hidden ones. - | HidingRenaming [ModuleName] - deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) - --- | Interpret a 'ModuleRenaming' as a partial map from 'ModuleName' --- to 'ModuleName'. For efficiency, you should partially apply it --- with 'ModuleRenaming' and then reuse it. -interpModuleRenaming :: ModuleRenaming -> ModuleName -> Maybe ModuleName -interpModuleRenaming DefaultRenaming = Just -interpModuleRenaming (ModuleRenaming rns) = - let m = Map.fromList rns - in \k -> Map.lookup k m -interpModuleRenaming (HidingRenaming hs) = - let s = Set.fromList hs - in \k -> if k `Set.member` s then Nothing else Just k - --- | The default renaming, if something is specified in @build-depends@ --- only. -defaultRenaming :: ModuleRenaming -defaultRenaming = DefaultRenaming - --- | Tests if its the default renaming; we can use a more compact syntax --- in 'Distribution.Types.IncludeRenaming.IncludeRenaming' in this case. -isDefaultRenaming :: ModuleRenaming -> Bool -isDefaultRenaming DefaultRenaming = True -isDefaultRenaming _ = False - -instance Binary ModuleRenaming where - -instance NFData ModuleRenaming where rnf = genericRnf - --- NB: parentheses are mandatory, because later we may extend this syntax --- to allow "hiding (A, B)" or other modifier words. -instance Pretty ModuleRenaming where - pretty DefaultRenaming = mempty - pretty (HidingRenaming hides) - = text "hiding" <+> parens (hsep (punctuate comma (map pretty hides))) - pretty (ModuleRenaming rns) - = parens . hsep $ punctuate comma (map dispEntry rns) - where dispEntry (orig, new) - | orig == new = pretty orig - | otherwise = pretty orig <+> text "as" <+> pretty new - -instance Parsec ModuleRenaming where - -- NB: try not necessary as the first token is obvious - parsec = P.choice [ parseRename, parseHiding, return DefaultRenaming ] - where - parseRename = do - rns <- P.between (P.char '(') (P.char ')') parseList - P.spaces - return (ModuleRenaming rns) - parseHiding = do - _ <- P.string "hiding" - P.spaces - hides <- P.between (P.char '(') (P.char ')') - (P.sepBy parsec (P.char ',' >> P.spaces)) - return (HidingRenaming hides) - parseList = - P.sepBy parseEntry (P.char ',' >> P.spaces) - parseEntry = do - orig <- parsec - P.spaces - P.option (orig, orig) $ do - _ <- P.string "as" - P.spaces - new <- parsec - P.spaces - return (orig, new) - - - -instance Text ModuleRenaming where - parse = do fmap ModuleRenaming parseRns - <++ parseHidingRenaming - <++ return DefaultRenaming - where parseRns = do - rns <- Parse.between (Parse.char '(') (Parse.char ')') parseList - Parse.skipSpaces - return rns - parseHidingRenaming = do - _ <- Parse.string "hiding" - Parse.skipSpaces - hides <- Parse.between (Parse.char '(') (Parse.char ')') - (Parse.sepBy parse (Parse.char ',' >> Parse.skipSpaces)) - return (HidingRenaming hides) - parseList = - Parse.sepBy parseEntry (Parse.char ',' >> Parse.skipSpaces) - parseEntry :: Parse.ReadP r (ModuleName, ModuleName) - parseEntry = do - orig <- parse - Parse.skipSpaces - (do _ <- Parse.string "as" - Parse.skipSpaces - new <- parse - Parse.skipSpaces - return (orig, new) - <++ - return (orig, orig)) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/MungedPackageId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/MungedPackageId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/MungedPackageId.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/MungedPackageId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.MungedPackageId - ( MungedPackageId(..) - , computeCompatPackageId - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Version - ( Version, nullVersion ) - -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp -import Distribution.Compat.ReadP -import Distribution.Text -import Distribution.Types.PackageId -import Distribution.Types.UnqualComponentName -import Distribution.Types.MungedPackageName - --- | A simple pair of a 'MungedPackageName' and 'Version'. 'MungedPackageName' is to --- 'MungedPackageId' as 'PackageName' is to 'PackageId'. See 'MungedPackageName' for more --- info. -data MungedPackageId - = MungedPackageId { - -- | The combined package and component name. see documentation for - -- 'MungedPackageName'. - mungedName :: MungedPackageName, - -- | The version of this package / component, eg 1.2 - mungedVersion :: Version - } - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - -instance Binary MungedPackageId - -instance Text MungedPackageId where - disp (MungedPackageId n v) - | v == nullVersion = disp n -- if no version, don't show version. - | otherwise = disp n <<>> Disp.char '-' <<>> disp v - - parse = do - n <- parse - v <- (Parse.char '-' >> parse) <++ return nullVersion - return (MungedPackageId n v) - -instance NFData MungedPackageId where - rnf (MungedPackageId name version) = rnf name `seq` rnf version - --- | See docs for 'Distribution.Types.MungedPackageName.computeCompatPackageId'. this --- is a thin wrapper around that. -computeCompatPackageId :: PackageId -> Maybe UnqualComponentName -> MungedPackageId -computeCompatPackageId (PackageIdentifier pn vr) mb_uqn = MungedPackageId pn' vr - where pn' = computeCompatPackageName pn mb_uqn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/MungedPackageName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/MungedPackageName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/MungedPackageName.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/MungedPackageName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,141 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.MungedPackageName - ( MungedPackageName, unMungedPackageName, mkMungedPackageName - , computeCompatPackageName - , decodeCompatPackageName - ) where - -import Distribution.Compat.Prelude -import Distribution.Utils.ShortText -import Prelude () - -import Distribution.Parsec.Class -import Distribution.ParseUtils -import Distribution.Pretty -import Distribution.Text -import Distribution.Types.PackageName -import Distribution.Types.UnqualComponentName - -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - --- | A combination of a package and component name used in various legacy --- interfaces, chiefly bundled with a version as 'MungedPackageId'. It's generally --- better to use a 'UnitId' to opaquely refer to some compilation/packing unit, --- but that doesn't always work, e.g. where a "name" is needed, in which case --- this can be used as a fallback. --- --- Use 'mkMungedPackageName' and 'unMungedPackageName' to convert from/to a 'String'. --- --- @since 2.0.0.2 -newtype MungedPackageName = MungedPackageName ShortText - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - --- | Convert 'MungedPackageName' to 'String' -unMungedPackageName :: MungedPackageName -> String -unMungedPackageName (MungedPackageName s) = fromShortText s - --- | Construct a 'MungedPackageName' from a 'String' --- --- 'mkMungedPackageName' is the inverse to 'unMungedPackageName' --- --- Note: No validations are performed to ensure that the resulting --- 'MungedPackageName' is valid --- --- @since 2.0.0.2 -mkMungedPackageName :: String -> MungedPackageName -mkMungedPackageName = MungedPackageName . toShortText - --- | 'mkMungedPackageName' --- --- @since 2.0.0.2 -instance IsString MungedPackageName where - fromString = mkMungedPackageName - -instance Binary MungedPackageName - -instance Pretty MungedPackageName where - pretty = Disp.text . unMungedPackageName - -instance Parsec MungedPackageName where - parsec = mkMungedPackageName <$> parsecUnqualComponentName - -instance Text MungedPackageName where - parse = mkMungedPackageName <$> parsePackageName - -instance NFData MungedPackageName where - rnf (MungedPackageName pkg) = rnf pkg - --- | Computes the package name for a library. If this is the public --- library, it will just be the original package name; otherwise, --- it will be a munged package name recording the original package --- name as well as the name of the internal library. --- --- A lot of tooling in the Haskell ecosystem assumes that if something --- is installed to the package database with the package name 'foo', --- then it actually is an entry for the (only public) library in package --- 'foo'. With internal packages, this is not necessarily true: --- a public library as well as arbitrarily many internal libraries may --- come from the same package. To prevent tools from getting confused --- in this case, the package name of these internal libraries is munged --- so that they do not conflict the public library proper. A particular --- case where this matters is ghc-pkg: if we don't munge the package --- name, the inplace registration will OVERRIDE a different internal --- library. --- --- We munge into a reserved namespace, "z-", and encode both the --- component name and the package name of an internal library using the --- following format: --- --- compat-pkg-name ::= "z-" package-name "-z-" library-name --- --- where package-name and library-name have "-" ( "z" + ) "-" --- segments encoded by adding an extra "z". --- --- When we have the public library, the compat-pkg-name is just the --- package-name, no surprises there! --- -computeCompatPackageName :: PackageName -> Maybe UnqualComponentName -> MungedPackageName --- First handle the cases where we can just use the original 'PackageName'. --- This is for the PRIMARY library, and it is non-Backpack, or the --- indefinite package for us. -computeCompatPackageName pkg_name Nothing - = mkMungedPackageName $ unPackageName pkg_name -computeCompatPackageName pkg_name (Just uqn) - = mkMungedPackageName $ - "z-" ++ zdashcode (unPackageName pkg_name) ++ - "-z-" ++ zdashcode (unUnqualComponentName uqn) - -decodeCompatPackageName :: MungedPackageName -> (PackageName, Maybe UnqualComponentName) -decodeCompatPackageName m = - case unMungedPackageName m of - 'z':'-':rest | [([pn, cn], "")] <- Parse.readP_to_S parseZDashCode rest - -> (mkPackageName pn, Just (mkUnqualComponentName cn)) - s -> (mkPackageName s, Nothing) - -zdashcode :: String -> String -zdashcode s = go s (Nothing :: Maybe Int) [] - where go [] _ r = reverse r - go ('-':z) (Just n) r | n > 0 = go z (Just 0) ('-':'z':r) - go ('-':z) _ r = go z (Just 0) ('-':r) - go ('z':z) (Just n) r = go z (Just (n+1)) ('z':r) - go (c:z) _ r = go z Nothing (c:r) - -parseZDashCode :: Parse.ReadP r [String] -parseZDashCode = do - ns <- Parse.sepBy1 (Parse.many1 (Parse.satisfy (/= '-'))) (Parse.char '-') - Parse.eof - return (go ns) - where - go ns = case break (=="z") ns of - (_, []) -> [paste ns] - (as, "z":bs) -> paste as : go bs - _ -> error "parseZDashCode: go" - unZ :: String -> String - unZ "" = error "parseZDashCode: unZ" - unZ r@('z':zs) | all (=='z') zs = zs - | otherwise = r - unZ r = r - paste :: [String] -> String - paste = intercalate "-" . map unZ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageDescription/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageDescription/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageDescription/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageDescription/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,228 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -module Distribution.Types.PackageDescription.Lens ( - PackageDescription, - module Distribution.Types.PackageDescription.Lens, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Compiler (CompilerFlavor) -import Distribution.License (License) -import Distribution.ModuleName (ModuleName) -import Distribution.Types.Benchmark (Benchmark, benchmarkModules) -import Distribution.Types.Benchmark.Lens (benchmarkName, benchmarkBuildInfo) -import Distribution.Types.BuildInfo (BuildInfo) -import Distribution.Types.BuildType (BuildType) -import Distribution.Types.ComponentName (ComponentName(..)) -import Distribution.Types.Executable (Executable, exeModules) -import Distribution.Types.Executable.Lens (exeName, exeBuildInfo) -import Distribution.Types.ForeignLib (ForeignLib, foreignLibModules) -import Distribution.Types.ForeignLib.Lens (foreignLibName, foreignLibBuildInfo) -import Distribution.Types.Library (Library, explicitLibModules) -import Distribution.Types.Library.Lens (libName, libBuildInfo) -import Distribution.Types.PackageDescription (PackageDescription) -import Distribution.Types.PackageId (PackageIdentifier) -import Distribution.Types.SetupBuildInfo (SetupBuildInfo) -import Distribution.Types.SourceRepo (SourceRepo) -import Distribution.Types.TestSuite (TestSuite, testModules) -import Distribution.Types.TestSuite.Lens (testName, testBuildInfo) -import Distribution.Types.UnqualComponentName ( UnqualComponentName ) -import Distribution.Version (Version, VersionRange) - -import qualified Distribution.SPDX as SPDX -import qualified Distribution.Types.PackageDescription as T - -package :: Lens' PackageDescription PackageIdentifier -package f s = fmap (\x -> s { T.package = x }) (f (T.package s)) -{-# INLINE package #-} - -licenseRaw :: Lens' PackageDescription (Either SPDX.License License) -licenseRaw f s = fmap (\x -> s { T.licenseRaw = x }) (f (T.licenseRaw s)) -{-# INLINE licenseRaw #-} - -licenseFiles :: Lens' PackageDescription [String] -licenseFiles f s = fmap (\x -> s { T.licenseFiles = x }) (f (T.licenseFiles s)) -{-# INLINE licenseFiles #-} - -copyright :: Lens' PackageDescription String -copyright f s = fmap (\x -> s { T.copyright = x }) (f (T.copyright s)) -{-# INLINE copyright #-} - -maintainer :: Lens' PackageDescription String -maintainer f s = fmap (\x -> s { T.maintainer = x }) (f (T.maintainer s)) -{-# INLINE maintainer #-} - -author :: Lens' PackageDescription String -author f s = fmap (\x -> s { T.author = x }) (f (T.author s)) -{-# INLINE author #-} - -stability :: Lens' PackageDescription String -stability f s = fmap (\x -> s { T.stability = x }) (f (T.stability s)) -{-# INLINE stability #-} - -testedWith :: Lens' PackageDescription [(CompilerFlavor,VersionRange)] -testedWith f s = fmap (\x -> s { T.testedWith = x }) (f (T.testedWith s)) -{-# INLINE testedWith #-} - -homepage :: Lens' PackageDescription String -homepage f s = fmap (\x -> s { T.homepage = x }) (f (T.homepage s)) -{-# INLINE homepage #-} - -pkgUrl :: Lens' PackageDescription String -pkgUrl f s = fmap (\x -> s { T.pkgUrl = x }) (f (T.pkgUrl s)) -{-# INLINE pkgUrl #-} - -bugReports :: Lens' PackageDescription String -bugReports f s = fmap (\x -> s { T.bugReports = x }) (f (T.bugReports s)) -{-# INLINE bugReports #-} - -sourceRepos :: Lens' PackageDescription [SourceRepo] -sourceRepos f s = fmap (\x -> s { T.sourceRepos = x }) (f (T.sourceRepos s)) -{-# INLINE sourceRepos #-} - -synopsis :: Lens' PackageDescription String -synopsis f s = fmap (\x -> s { T.synopsis = x }) (f (T.synopsis s)) -{-# INLINE synopsis #-} - -description :: Lens' PackageDescription String -description f s = fmap (\x -> s { T.description = x }) (f (T.description s)) -{-# INLINE description #-} - -category :: Lens' PackageDescription String -category f s = fmap (\x -> s { T.category = x }) (f (T.category s)) -{-# INLINE category #-} - -customFieldsPD :: Lens' PackageDescription [(String,String)] -customFieldsPD f s = fmap (\x -> s { T.customFieldsPD = x }) (f (T.customFieldsPD s)) -{-# INLINE customFieldsPD #-} - -specVersionRaw :: Lens' PackageDescription (Either Version VersionRange) -specVersionRaw f s = fmap (\x -> s { T.specVersionRaw = x }) (f (T.specVersionRaw s)) -{-# INLINE specVersionRaw #-} - -buildTypeRaw :: Lens' PackageDescription (Maybe BuildType) -buildTypeRaw f s = fmap (\x -> s { T.buildTypeRaw = x }) (f (T.buildTypeRaw s)) -{-# INLINE buildTypeRaw #-} - -setupBuildInfo :: Lens' PackageDescription (Maybe SetupBuildInfo) -setupBuildInfo f s = fmap (\x -> s { T.setupBuildInfo = x }) (f (T.setupBuildInfo s)) -{-# INLINE setupBuildInfo #-} - -library :: Lens' PackageDescription (Maybe Library) -library f s = fmap (\x -> s { T.library = x }) (f (T.library s)) -{-# INLINE library #-} - -subLibraries :: Lens' PackageDescription [Library] -subLibraries f s = fmap (\x -> s { T.subLibraries = x }) (f (T.subLibraries s)) -{-# INLINE subLibraries #-} - -executables :: Lens' PackageDescription [Executable] -executables f s = fmap (\x -> s { T.executables = x }) (f (T.executables s)) -{-# INLINE executables #-} - -foreignLibs :: Lens' PackageDescription [ForeignLib] -foreignLibs f s = fmap (\x -> s { T.foreignLibs = x }) (f (T.foreignLibs s)) -{-# INLINE foreignLibs #-} - -testSuites :: Lens' PackageDescription [TestSuite] -testSuites f s = fmap (\x -> s { T.testSuites = x }) (f (T.testSuites s)) -{-# INLINE testSuites #-} - -benchmarks :: Lens' PackageDescription [Benchmark] -benchmarks f s = fmap (\x -> s { T.benchmarks = x }) (f (T.benchmarks s)) -{-# INLINE benchmarks #-} - -dataFiles :: Lens' PackageDescription [FilePath] -dataFiles f s = fmap (\x -> s { T.dataFiles = x }) (f (T.dataFiles s)) -{-# INLINE dataFiles #-} - -dataDir :: Lens' PackageDescription FilePath -dataDir f s = fmap (\x -> s { T.dataDir = x }) (f (T.dataDir s)) -{-# INLINE dataDir #-} - -extraSrcFiles :: Lens' PackageDescription [String] -extraSrcFiles f s = fmap (\x -> s { T.extraSrcFiles = x }) (f (T.extraSrcFiles s)) -{-# INLINE extraSrcFiles #-} - -extraTmpFiles :: Lens' PackageDescription [String] -extraTmpFiles f s = fmap (\x -> s { T.extraTmpFiles = x }) (f (T.extraTmpFiles s)) -{-# INLINE extraTmpFiles #-} - -extraDocFiles :: Lens' PackageDescription [String] -extraDocFiles f s = fmap (\x -> s { T.extraDocFiles = x }) (f (T.extraDocFiles s)) -{-# INLINE extraDocFiles #-} - --- | @since 2.4 -componentModules :: Monoid r => ComponentName -> Getting r PackageDescription [ModuleName] -componentModules cname = case cname of - CLibName -> library . traverse . getting explicitLibModules - CSubLibName name -> - componentModules' name subLibraries (libName . non "") explicitLibModules - CFLibName name -> - componentModules' name foreignLibs foreignLibName foreignLibModules - CExeName name -> - componentModules' name executables exeName exeModules - CTestName name -> - componentModules' name testSuites testName testModules - CBenchName name -> - componentModules' name benchmarks benchmarkName benchmarkModules - where - componentModules' - :: Monoid r - => UnqualComponentName - -> Traversal' PackageDescription [a] - -> Traversal' a UnqualComponentName - -> (a -> [ModuleName]) - -> Getting r PackageDescription [ModuleName] - componentModules' name pdL nameL modules = - pdL - . traverse - . filtered ((== name) . view nameL) - . getting modules - - -- This are easily wrongly used, so we have them here locally only. - non :: Eq a => a -> Lens' (Maybe a) a - non x afb s = f <$> afb (fromMaybe x s) - where f y = if x == y then Nothing else Just y - - filtered :: (a -> Bool) -> Traversal' a a - filtered p f s = if p s then f s else pure s - --- | @since 2.4 -componentBuildInfo :: ComponentName -> Traversal' PackageDescription BuildInfo -componentBuildInfo cname = case cname of - CLibName -> - library . traverse . libBuildInfo - CSubLibName name -> - componentBuildInfo' name subLibraries (libName . non "") libBuildInfo - CFLibName name -> - componentBuildInfo' name foreignLibs foreignLibName foreignLibBuildInfo - CExeName name -> - componentBuildInfo' name executables exeName exeBuildInfo - CTestName name -> - componentBuildInfo' name testSuites testName testBuildInfo - CBenchName name -> - componentBuildInfo' name benchmarks benchmarkName benchmarkBuildInfo - where - componentBuildInfo' :: UnqualComponentName - -> Traversal' PackageDescription [a] - -> Traversal' a UnqualComponentName - -> Traversal' a BuildInfo - -> Traversal' PackageDescription BuildInfo - componentBuildInfo' name pdL nameL biL = - pdL - . traverse - . filtered ((== name) . view nameL) - . biL - - -- This are easily wrongly used, so we have them here locally only. - -- We have to repeat these, as everything is exported from this module. - non :: Eq a => a -> Lens' (Maybe a) a - non x afb s = f <$> afb (fromMaybe x s) - where f y = if x == y then Nothing else Just y - - filtered :: (a -> Bool) -> Traversal' a a - filtered p f s = if p s then f s else pure s diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageDescription.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageDescription.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageDescription.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,492 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Types.PackageDescription --- Copyright : Isaac Jones 2003-2005 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- This defines the data structure for the @.cabal@ file format. There are --- several parts to this structure. It has top level info and then 'Library', --- 'Executable', 'TestSuite', and 'Benchmark' sections each of which have --- associated 'BuildInfo' data that's used to build the library, exe, test, or --- benchmark. To further complicate things there is both a 'PackageDescription' --- and a 'GenericPackageDescription'. This distinction relates to cabal --- configurations. When we initially read a @.cabal@ file we get a --- 'GenericPackageDescription' which has all the conditional sections. --- Before actually building a package we have to decide --- on each conditional. Once we've done that we get a 'PackageDescription'. --- It was done this way initially to avoid breaking too much stuff when the --- feature was introduced. It could probably do with being rationalised at some --- point to make it simpler. - -module Distribution.Types.PackageDescription ( - PackageDescription(..), - specVersion, - specVersion', - license, - license', - descCabalVersion, - buildType, - emptyPackageDescription, - hasPublicLib, - hasLibs, - allLibraries, - withLib, - hasExes, - withExe, - hasTests, - withTest, - hasBenchmarks, - withBenchmark, - hasForeignLibs, - withForeignLib, - allBuildInfo, - enabledBuildInfos, - allBuildDepends, - enabledBuildDepends, - updatePackageDescription, - pkgComponents, - pkgBuildableComponents, - enabledComponents, - lookupComponent, - getComponent, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Control.Monad ((<=<)) - --- lens -import qualified Distribution.Types.BuildInfo.Lens as L -import Distribution.Types.Library -import Distribution.Types.TestSuite -import Distribution.Types.Executable -import Distribution.Types.Benchmark -import Distribution.Types.ForeignLib - -import Distribution.Types.Component -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.Dependency -import Distribution.Types.PackageId -import Distribution.Types.ComponentName -import Distribution.Types.PackageName -import Distribution.Types.UnqualComponentName -import Distribution.Types.SetupBuildInfo -import Distribution.Types.BuildInfo -import Distribution.Types.BuildType -import Distribution.Types.SourceRepo -import Distribution.Types.HookedBuildInfo - -import Distribution.Compiler -import Distribution.License -import Distribution.Package -import Distribution.Version - -import qualified Distribution.SPDX as SPDX - --- ----------------------------------------------------------------------------- --- The PackageDescription type - --- | This data type is the internal representation of the file @pkg.cabal@. --- It contains two kinds of information about the package: information --- which is needed for all packages, such as the package name and version, and --- information which is needed for the simple build system only, such as --- the compiler options and library name. --- -data PackageDescription - = PackageDescription { - -- the following are required by all packages: - - -- | The version of the Cabal spec that this package description uses. - -- For historical reasons this is specified with a version range but - -- only ranges of the form @>= v@ make sense. We are in the process of - -- transitioning to specifying just a single version, not a range. - -- See also 'specVersion'. - specVersionRaw :: Either Version VersionRange, - package :: PackageIdentifier, - licenseRaw :: Either SPDX.License License, - licenseFiles :: [FilePath], - copyright :: String, - maintainer :: String, - author :: String, - stability :: String, - testedWith :: [(CompilerFlavor,VersionRange)], - homepage :: String, - pkgUrl :: String, - bugReports :: String, - sourceRepos :: [SourceRepo], - synopsis :: String, -- ^A one-line summary of this package - description :: String, -- ^A more verbose description of this package - category :: String, - customFieldsPD :: [(String,String)], -- ^Custom fields starting - -- with x-, stored in a - -- simple assoc-list. - - -- | The original @build-type@ value as parsed from the - -- @.cabal@ file without defaulting. See also 'buildType'. - -- - -- @since 2.2 - buildTypeRaw :: Maybe BuildType, - setupBuildInfo :: Maybe SetupBuildInfo, - -- components - library :: Maybe Library, - subLibraries :: [Library], - executables :: [Executable], - foreignLibs :: [ForeignLib], - testSuites :: [TestSuite], - benchmarks :: [Benchmark], - -- files - dataFiles :: [FilePath], - dataDir :: FilePath, - extraSrcFiles :: [FilePath], - extraTmpFiles :: [FilePath], - extraDocFiles :: [FilePath] - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary PackageDescription - -instance NFData PackageDescription where rnf = genericRnf - -instance Package PackageDescription where - packageId = package - --- | The version of the Cabal spec that this package should be interpreted --- against. --- --- Historically we used a version range but we are switching to using a single --- version. Currently we accept either. This function converts into a single --- version by ignoring upper bounds in the version range. --- -specVersion :: PackageDescription -> Version -specVersion = specVersion' . specVersionRaw - --- | --- --- @since 2.2.0.0 -specVersion' :: Either Version VersionRange -> Version -specVersion' (Left version) = version -specVersion' (Right versionRange) = case asVersionIntervals versionRange of - [] -> mkVersion [0] - ((LowerBound version _, _):_) -> version - --- | The SPDX 'LicenseExpression' of the package. --- --- @since 2.2.0.0 -license :: PackageDescription -> SPDX.License -license = license' . licenseRaw - --- | See 'license'. --- --- @since 2.2.0.0 -license' :: Either SPDX.License License -> SPDX.License -license' = either id licenseToSPDX - --- | The range of versions of the Cabal tools that this package is intended to --- work with. --- --- This function is deprecated and should not be used for new purposes, only to --- support old packages that rely on the old interpretation. --- -descCabalVersion :: PackageDescription -> VersionRange -descCabalVersion pkg = case specVersionRaw pkg of - Left version -> orLaterVersion version - Right versionRange -> versionRange -{-# DEPRECATED descCabalVersion "Use specVersion instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} - --- | The effective @build-type@ after applying defaulting rules. --- --- The original @build-type@ value parsed is stored in the --- 'buildTypeRaw' field. However, the @build-type@ field is optional --- and can therefore be empty in which case we need to compute the --- /effective/ @build-type@. This function implements the following --- defaulting rules: --- --- * For @cabal-version:2.0@ and below, default to the @Custom@ --- build-type unconditionally. --- --- * Otherwise, if a @custom-setup@ stanza is defined, default to --- the @Custom@ build-type; else default to @Simple@ build-type. --- --- @since 2.2 -buildType :: PackageDescription -> BuildType -buildType pkg - | specVersion pkg >= mkVersion [2,1] - = fromMaybe newDefault (buildTypeRaw pkg) - | otherwise -- cabal-version < 2.1 - = fromMaybe Custom (buildTypeRaw pkg) - where - newDefault | isNothing (setupBuildInfo pkg) = Simple - | otherwise = Custom - -emptyPackageDescription :: PackageDescription -emptyPackageDescription - = PackageDescription { - package = PackageIdentifier (mkPackageName "") - nullVersion, - licenseRaw = Right UnspecifiedLicense, -- TODO: - licenseFiles = [], - specVersionRaw = Right anyVersion, - buildTypeRaw = Nothing, - copyright = "", - maintainer = "", - author = "", - stability = "", - testedWith = [], - homepage = "", - pkgUrl = "", - bugReports = "", - sourceRepos = [], - synopsis = "", - description = "", - category = "", - customFieldsPD = [], - setupBuildInfo = Nothing, - library = Nothing, - subLibraries = [], - foreignLibs = [], - executables = [], - testSuites = [], - benchmarks = [], - dataFiles = [], - dataDir = "", - extraSrcFiles = [], - extraTmpFiles = [], - extraDocFiles = [] - } - --- --------------------------------------------------------------------------- --- The Library type - --- | Does this package have a buildable PUBLIC library? -hasPublicLib :: PackageDescription -> Bool -hasPublicLib p = - case library p of - Just lib -> buildable (libBuildInfo lib) - Nothing -> False - --- | Does this package have any libraries? -hasLibs :: PackageDescription -> Bool -hasLibs p = any (buildable . libBuildInfo) (allLibraries p) - -allLibraries :: PackageDescription -> [Library] -allLibraries p = maybeToList (library p) ++ subLibraries p - --- | If the package description has a buildable library section, --- call the given function with the library build info as argument. --- You probably want 'withLibLBI' if you have a 'LocalBuildInfo', --- see the note in --- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components" --- for more information. -withLib :: PackageDescription -> (Library -> IO ()) -> IO () -withLib pkg_descr f = - sequence_ [f lib | lib <- allLibraries pkg_descr, buildable (libBuildInfo lib)] - --- --------------------------------------------------------------------------- --- The Executable type - --- |does this package have any executables? -hasExes :: PackageDescription -> Bool -hasExes p = any (buildable . buildInfo) (executables p) - --- | Perform the action on each buildable 'Executable' in the package --- description. You probably want 'withExeLBI' if you have a --- 'LocalBuildInfo', see the note in --- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components" --- for more information. -withExe :: PackageDescription -> (Executable -> IO ()) -> IO () -withExe pkg_descr f = - sequence_ [f exe | exe <- executables pkg_descr, buildable (buildInfo exe)] - --- --------------------------------------------------------------------------- --- The TestSuite type - --- | Does this package have any test suites? -hasTests :: PackageDescription -> Bool -hasTests = any (buildable . testBuildInfo) . testSuites - --- | Perform an action on each buildable 'TestSuite' in a package. --- You probably want 'withTestLBI' if you have a 'LocalBuildInfo', see the note in --- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components" --- for more information. - -withTest :: PackageDescription -> (TestSuite -> IO ()) -> IO () -withTest pkg_descr f = - sequence_ [ f test | test <- testSuites pkg_descr, buildable (testBuildInfo test) ] - --- --------------------------------------------------------------------------- --- The Benchmark type - --- | Does this package have any benchmarks? -hasBenchmarks :: PackageDescription -> Bool -hasBenchmarks = any (buildable . benchmarkBuildInfo) . benchmarks - --- | Perform an action on each buildable 'Benchmark' in a package. --- You probably want 'withBenchLBI' if you have a 'LocalBuildInfo', see the note in --- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components" --- for more information. - -withBenchmark :: PackageDescription -> (Benchmark -> IO ()) -> IO () -withBenchmark pkg_descr f = - sequence_ [f bench | bench <- benchmarks pkg_descr, buildable (benchmarkBuildInfo bench)] - --- --------------------------------------------------------------------------- --- The ForeignLib type - --- | Does this package have any foreign libraries? -hasForeignLibs :: PackageDescription -> Bool -hasForeignLibs p = any (buildable . foreignLibBuildInfo) (foreignLibs p) - --- | Perform the action on each buildable 'ForeignLib' in the package --- description. -withForeignLib :: PackageDescription -> (ForeignLib -> IO ()) -> IO () -withForeignLib pkg_descr f = - sequence_ [ f flib - | flib <- foreignLibs pkg_descr - , buildable (foreignLibBuildInfo flib) - ] - --- --------------------------------------------------------------------------- --- The BuildInfo type - --- | All 'BuildInfo' in the 'PackageDescription': --- libraries, executables, test-suites and benchmarks. --- --- Useful for implementing package checks. -allBuildInfo :: PackageDescription -> [BuildInfo] -allBuildInfo pkg_descr = [ bi | lib <- allLibraries pkg_descr - , let bi = libBuildInfo lib ] - ++ [ bi | flib <- foreignLibs pkg_descr - , let bi = foreignLibBuildInfo flib ] - ++ [ bi | exe <- executables pkg_descr - , let bi = buildInfo exe ] - ++ [ bi | tst <- testSuites pkg_descr - , let bi = testBuildInfo tst ] - ++ [ bi | tst <- benchmarks pkg_descr - , let bi = benchmarkBuildInfo tst ] - --- | Return all of the 'BuildInfo's of enabled components, i.e., all of --- the ones that would be built if you run @./Setup build@. -enabledBuildInfos :: PackageDescription -> ComponentRequestedSpec -> [BuildInfo] -enabledBuildInfos pkg enabled = - [ componentBuildInfo comp - | comp <- enabledComponents pkg enabled ] - - --- ------------------------------------------------------------ --- * Utils --- ------------------------------------------------------------ - --- | Get the combined build-depends entries of all components. -allBuildDepends :: PackageDescription -> [Dependency] -allBuildDepends = targetBuildDepends <=< allBuildInfo - --- | Get the combined build-depends entries of all enabled components, per the --- given request spec. -enabledBuildDepends :: PackageDescription -> ComponentRequestedSpec -> [Dependency] -enabledBuildDepends spec pd = targetBuildDepends =<< enabledBuildInfos spec pd - - -updatePackageDescription :: HookedBuildInfo -> PackageDescription -> PackageDescription -updatePackageDescription (mb_lib_bi, exe_bi) p - = p{ executables = updateExecutables exe_bi (executables p) - , library = updateLibrary mb_lib_bi (library p) } - where - updateLibrary :: Maybe BuildInfo -> Maybe Library -> Maybe Library - updateLibrary (Just bi) (Just lib) = Just (lib{libBuildInfo = bi `mappend` libBuildInfo lib}) - updateLibrary Nothing mb_lib = mb_lib - updateLibrary (Just _) Nothing = Nothing - - updateExecutables :: [(UnqualComponentName, BuildInfo)] -- ^[(exeName, new buildinfo)] - -> [Executable] -- ^list of executables to update - -> [Executable] -- ^list with exeNames updated - updateExecutables exe_bi' executables' = foldr updateExecutable executables' exe_bi' - - updateExecutable :: (UnqualComponentName, BuildInfo) -- ^(exeName, new buildinfo) - -> [Executable] -- ^list of executables to update - -> [Executable] -- ^list with exeName updated - updateExecutable _ [] = [] - updateExecutable exe_bi'@(name,bi) (exe:exes) - | exeName exe == name = exe{buildInfo = bi `mappend` buildInfo exe} : exes - | otherwise = exe : updateExecutable exe_bi' exes - --- ----------------------------------------------------------------------------- --- Source-representation of buildable components - --- | All the components in the package. --- -pkgComponents :: PackageDescription -> [Component] -pkgComponents pkg = - [ CLib lib | lib <- allLibraries pkg ] - ++ [ CFLib flib | flib <- foreignLibs pkg ] - ++ [ CExe exe | exe <- executables pkg ] - ++ [ CTest tst | tst <- testSuites pkg ] - ++ [ CBench bm | bm <- benchmarks pkg ] - --- | A list of all components in the package that are buildable, --- i.e., were not marked with @buildable: False@. This does NOT --- indicate if we are actually going to build the component, --- see 'enabledComponents' instead. --- --- @since 2.0.0.2 --- -pkgBuildableComponents :: PackageDescription -> [Component] -pkgBuildableComponents = filter componentBuildable . pkgComponents - --- | A list of all components in the package that are enabled. --- --- @since 2.0.0.2 --- -enabledComponents :: PackageDescription -> ComponentRequestedSpec -> [Component] -enabledComponents pkg enabled = filter (componentEnabled enabled) $ pkgBuildableComponents pkg - -lookupComponent :: PackageDescription -> ComponentName -> Maybe Component -lookupComponent pkg CLibName = fmap CLib (library pkg) -lookupComponent pkg (CSubLibName name) = - fmap CLib $ find ((Just name ==) . libName) (subLibraries pkg) -lookupComponent pkg (CFLibName name) = - fmap CFLib $ find ((name ==) . foreignLibName) (foreignLibs pkg) -lookupComponent pkg (CExeName name) = - fmap CExe $ find ((name ==) . exeName) (executables pkg) -lookupComponent pkg (CTestName name) = - fmap CTest $ find ((name ==) . testName) (testSuites pkg) -lookupComponent pkg (CBenchName name) = - fmap CBench $ find ((name ==) . benchmarkName) (benchmarks pkg) - -getComponent :: PackageDescription -> ComponentName -> Component -getComponent pkg cname = - case lookupComponent pkg cname of - Just cpnt -> cpnt - Nothing -> missingComponent - where - missingComponent = - error $ "internal error: the package description contains no " - ++ "component corresponding to " ++ show cname - --- ----------------------------------------------------------------------------- --- Traversal Instances - -instance L.HasBuildInfos PackageDescription where - traverseBuildInfos f (PackageDescription a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 a14 a15 a16 a17 a18 a19 - x1 x2 x3 x4 x5 x6 - a20 a21 a22 a23 a24) = - PackageDescription a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 a14 a15 a16 a17 a18 a19 - <$> (traverse . L.buildInfo) f x1 -- library - <*> (traverse . L.buildInfo) f x2 -- sub libraries - <*> (traverse . L.buildInfo) f x3 -- executables - <*> (traverse . L.buildInfo) f x4 -- foreign libs - <*> (traverse . L.buildInfo) f x5 -- test suites - <*> (traverse . L.buildInfo) f x6 -- benchmarks - <*> pure a20 -- data files - <*> pure a21 -- data dir - <*> pure a22 -- exta src files - <*> pure a23 -- extra temp files - <*> pure a24 -- extra doc files diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageId/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageId/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageId/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageId/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -module Distribution.Types.PackageId.Lens ( - PackageIdentifier, - module Distribution.Types.PackageId.Lens, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Types.PackageId (PackageIdentifier) -import Distribution.Types.PackageName (PackageName) -import Distribution.Version (Version) - -import qualified Distribution.Types.PackageId as T - -pkgName :: Lens' PackageIdentifier PackageName -pkgName f s = fmap (\x -> s { T.pkgName = x }) (f (T.pkgName s)) -{-# INLINE pkgName #-} - -pkgVersion :: Lens' PackageIdentifier Version -pkgVersion f s = fmap (\x -> s { T.pkgVersion = x }) (f (T.pkgVersion s)) -{-# INLINE pkgVersion #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageId.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,54 +0,0 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.PackageId - ( PackageIdentifier(..) - , PackageId - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Version - ( Version, nullVersion ) - -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import qualified Text.PrettyPrint as Disp -import Distribution.Compat.ReadP -import Distribution.Text -import Distribution.Parsec.Class - ( Parsec(..) ) -import Distribution.Pretty -import Distribution.Types.PackageName - --- | Type alias so we can use the shorter name PackageId. -type PackageId = PackageIdentifier - --- | The name and version of a package. -data PackageIdentifier - = PackageIdentifier { - pkgName :: PackageName, -- ^The name of this package, eg. foo - pkgVersion :: Version -- ^the version of this package, eg 1.2 - } - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - -instance Binary PackageIdentifier - -instance Pretty PackageIdentifier where - pretty (PackageIdentifier n v) - | v == nullVersion = pretty n -- if no version, don't show version. - | otherwise = pretty n <<>> Disp.char '-' <<>> pretty v - -instance Text PackageIdentifier where - parse = do - n <- parse - v <- (Parse.char '-' >> parse) <++ return nullVersion - return (PackageIdentifier n v) - -instance Parsec PackageIdentifier where - parsec = PackageIdentifier <$> - parsec <*> (P.char '-' *> parsec <|> pure nullVersion) - -instance NFData PackageIdentifier where - rnf (PackageIdentifier name version) = rnf name `seq` rnf version diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PackageName.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PackageName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,62 +0,0 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.PackageName - ( PackageName, unPackageName, mkPackageName - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Utils.ShortText - -import qualified Text.PrettyPrint as Disp -import Distribution.ParseUtils -import Distribution.Text -import Distribution.Pretty -import Distribution.Parsec.Class - --- | A package name. --- --- Use 'mkPackageName' and 'unPackageName' to convert from/to a --- 'String'. --- --- This type is opaque since @Cabal-2.0@ --- --- @since 2.0.0.2 -newtype PackageName = PackageName ShortText - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - --- | Convert 'PackageName' to 'String' -unPackageName :: PackageName -> String -unPackageName (PackageName s) = fromShortText s - --- | Construct a 'PackageName' from a 'String' --- --- 'mkPackageName' is the inverse to 'unPackageName' --- --- Note: No validations are performed to ensure that the resulting --- 'PackageName' is valid --- --- @since 2.0.0.2 -mkPackageName :: String -> PackageName -mkPackageName = PackageName . toShortText - --- | 'mkPackageName' --- --- @since 2.0.0.2 -instance IsString PackageName where - fromString = mkPackageName - -instance Binary PackageName - -instance Pretty PackageName where - pretty = Disp.text . unPackageName - -instance Parsec PackageName where - parsec = mkPackageName <$> parsecUnqualComponentName - -instance Text PackageName where - parse = mkPackageName <$> parsePackageName - -instance NFData PackageName where - rnf (PackageName pkg) = rnf pkg diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PkgconfigDependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PkgconfigDependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PkgconfigDependency.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PkgconfigDependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.PkgconfigDependency - ( PkgconfigDependency(..) - ) where - -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Version (VersionRange, anyVersion) - -import Distribution.Types.PkgconfigName - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import Distribution.Compat.ReadP ((<++)) -import qualified Distribution.Compat.ReadP as Parse -import Text.PrettyPrint ((<+>)) - --- | Describes a dependency on a pkg-config library --- --- @since 2.0.0.2 -data PkgconfigDependency = PkgconfigDependency - PkgconfigName - VersionRange - deriving (Generic, Read, Show, Eq, Typeable, Data) - -instance Binary PkgconfigDependency -instance NFData PkgconfigDependency where rnf = genericRnf - -instance Pretty PkgconfigDependency where - pretty (PkgconfigDependency name ver) = - pretty name <+> pretty ver - -instance Parsec PkgconfigDependency where - parsec = do - name <- parsec - P.spaces - verRange <- parsec <|> pure anyVersion - pure $ PkgconfigDependency name verRange - -instance Text PkgconfigDependency where - parse = do name <- parse - Parse.skipSpaces - ver <- parse <++ return anyVersion - Parse.skipSpaces - return $ PkgconfigDependency name ver diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PkgconfigName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PkgconfigName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/PkgconfigName.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/PkgconfigName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.PkgconfigName - ( PkgconfigName, unPkgconfigName, mkPkgconfigName - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Utils.ShortText - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - --- | A pkg-config library name --- --- This is parsed as any valid argument to the pkg-config utility. --- --- @since 2.0.0.2 -newtype PkgconfigName = PkgconfigName ShortText - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) - --- | Convert 'PkgconfigName' to 'String' --- --- @since 2.0.0.2 -unPkgconfigName :: PkgconfigName -> String -unPkgconfigName (PkgconfigName s) = fromShortText s - --- | Construct a 'PkgconfigName' from a 'String' --- --- 'mkPkgconfigName' is the inverse to 'unPkgconfigName' --- --- Note: No validations are performed to ensure that the resulting --- 'PkgconfigName' is valid --- --- @since 2.0.0.2 -mkPkgconfigName :: String -> PkgconfigName -mkPkgconfigName = PkgconfigName . toShortText - --- | 'mkPkgconfigName' --- --- @since 2.0.0.2 -instance IsString PkgconfigName where - fromString = mkPkgconfigName - -instance Binary PkgconfigName - --- pkg-config allows versions and other letters in package names, eg --- "gtk+-2.0" is a valid pkg-config package _name_. It then has a package --- version number like 2.10.13 -instance Pretty PkgconfigName where - pretty = Disp.text . unPkgconfigName - -instance Parsec PkgconfigName where - parsec = mkPkgconfigName <$> P.munch1 (\c -> isAlphaNum c || c `elem` "+-._") - -instance Text PkgconfigName where - parse = mkPkgconfigName - <$> Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-._") - -instance NFData PkgconfigName where - rnf (PkgconfigName pkg) = rnf pkg diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/SetupBuildInfo/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/SetupBuildInfo/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/SetupBuildInfo/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/SetupBuildInfo/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -module Distribution.Types.SetupBuildInfo.Lens ( - SetupBuildInfo, - module Distribution.Types.SetupBuildInfo.Lens, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Types.Dependency (Dependency) -import Distribution.Types.SetupBuildInfo (SetupBuildInfo) - -import qualified Distribution.Types.SetupBuildInfo as T - -setupDepends :: Lens' SetupBuildInfo [Dependency] -setupDepends f s = fmap (\x -> s { T.setupDepends = x }) (f (T.setupDepends s)) -{-# INLINE setupDepends #-} - -defaultSetupDepends :: Lens' SetupBuildInfo Bool -defaultSetupDepends f s = fmap (\x -> s { T.defaultSetupDepends = x }) (f (T.defaultSetupDepends s)) -{-# INLINE defaultSetupDepends #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/SetupBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/SetupBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/SetupBuildInfo.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/SetupBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,41 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.SetupBuildInfo ( - SetupBuildInfo(..) -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.Dependency - --- --------------------------------------------------------------------------- --- The SetupBuildInfo type - --- One can see this as a very cut-down version of BuildInfo below. --- To keep things simple for tools that compile Setup.hs we limit the --- options authors can specify to just Haskell package dependencies. - -data SetupBuildInfo = SetupBuildInfo - { setupDepends :: [Dependency] - , defaultSetupDepends :: Bool - -- ^ Is this a default 'custom-setup' section added by the cabal-install - -- code (as opposed to user-provided)? This field is only used - -- internally, and doesn't correspond to anything in the .cabal - -- file. See #3199. - } - deriving (Generic, Show, Eq, Read, Typeable, Data) - -instance Binary SetupBuildInfo - -instance NFData SetupBuildInfo where rnf = genericRnf - -instance Monoid SetupBuildInfo where - mempty = SetupBuildInfo [] False - mappend = (<>) - -instance Semigroup SetupBuildInfo where - a <> b = SetupBuildInfo - (setupDepends a <> setupDepends b) - (defaultSetupDepends a || defaultSetupDepends b) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/SourceRepo/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/SourceRepo/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/SourceRepo/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/SourceRepo/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -module Distribution.Types.SourceRepo.Lens ( - T.SourceRepo, - module Distribution.Types.SourceRepo.Lens, - ) where - -import Prelude() -import Distribution.Compat.Prelude -import Distribution.Compat.Lens - -import Distribution.Types.SourceRepo (SourceRepo, RepoKind, RepoType) -import qualified Distribution.Types.SourceRepo as T - -repoKind :: Lens' SourceRepo RepoKind -repoKind f s = fmap (\x -> s { T.repoKind = x }) (f (T.repoKind s)) -{-# INLINE repoKind #-} - -repoType :: Lens' SourceRepo (Maybe RepoType) -repoType f s = fmap (\x -> s { T.repoType = x }) (f (T.repoType s)) -{-# INLINE repoType #-} - -repoLocation :: Lens' SourceRepo (Maybe String) -repoLocation f s = fmap (\x -> s { T.repoLocation = x }) (f (T.repoLocation s)) -{-# INLINE repoLocation #-} - -repoModule :: Lens' SourceRepo (Maybe String) -repoModule f s = fmap (\x -> s { T.repoModule = x }) (f (T.repoModule s)) -{-# INLINE repoModule #-} - -repoBranch :: Lens' SourceRepo (Maybe String) -repoBranch f s = fmap (\x -> s { T.repoBranch = x }) (f (T.repoBranch s)) -{-# INLINE repoBranch #-} - -repoTag :: Lens' SourceRepo (Maybe String) -repoTag f s = fmap (\x -> s { T.repoTag = x }) (f (T.repoTag s)) -{-# INLINE repoTag #-} - -repoSubdir :: Lens' SourceRepo (Maybe FilePath) -repoSubdir f s = fmap (\x -> s { T.repoSubdir = x }) (f (T.repoSubdir s)) -{-# INLINE repoSubdir #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/SourceRepo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/SourceRepo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/SourceRepo.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/SourceRepo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,185 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.SourceRepo ( - SourceRepo(..), - RepoKind(..), - RepoType(..), - knownRepoTypes, - emptySourceRepo, - classifyRepoType, - classifyRepoKind, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Utils.Generic (lowercase) - -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - --- ------------------------------------------------------------ --- * Source repos --- ------------------------------------------------------------ - --- | Information about the source revision control system for a package. --- --- When specifying a repo it is useful to know the meaning or intention of the --- information as doing so enables automation. There are two obvious common --- purposes: one is to find the repo for the latest development version, the --- other is to find the repo for this specific release. The 'ReopKind' --- specifies which one we mean (or another custom one). --- --- A package can specify one or the other kind or both. Most will specify just --- a head repo but some may want to specify a repo to reconstruct the sources --- for this package release. --- --- The required information is the 'RepoType' which tells us if it's using --- 'Darcs', 'Git' for example. The 'repoLocation' and other details are --- interpreted according to the repo type. --- -data SourceRepo = SourceRepo { - -- | The kind of repo. This field is required. - repoKind :: RepoKind, - - -- | The type of the source repository system for this repo, eg 'Darcs' or - -- 'Git'. This field is required. - repoType :: Maybe RepoType, - - -- | The location of the repository. For most 'RepoType's this is a URL. - -- This field is required. - repoLocation :: Maybe String, - - -- | 'CVS' can put multiple \"modules\" on one server and requires a - -- module name in addition to the location to identify a particular repo. - -- Logically this is part of the location but unfortunately has to be - -- specified separately. This field is required for the 'CVS' 'RepoType' and - -- should not be given otherwise. - repoModule :: Maybe String, - - -- | The name or identifier of the branch, if any. Many source control - -- systems have the notion of multiple branches in a repo that exist in the - -- same location. For example 'Git' and 'CVS' use this while systems like - -- 'Darcs' use different locations for different branches. This field is - -- optional but should be used if necessary to identify the sources, - -- especially for the 'RepoThis' repo kind. - repoBranch :: Maybe String, - - -- | The tag identify a particular state of the repository. This should be - -- given for the 'RepoThis' repo kind and not for 'RepoHead' kind. - -- - repoTag :: Maybe String, - - -- | Some repositories contain multiple projects in different subdirectories - -- This field specifies the subdirectory where this packages sources can be - -- found, eg the subdirectory containing the @.cabal@ file. It is interpreted - -- relative to the root of the repository. This field is optional. If not - -- given the default is \".\" ie no subdirectory. - repoSubdir :: Maybe FilePath -} - deriving (Eq, Ord, Generic, Read, Show, Typeable, Data) - -emptySourceRepo :: RepoKind -> SourceRepo -emptySourceRepo kind = SourceRepo - { repoKind = kind - , repoType = Nothing - , repoLocation = Nothing - , repoModule = Nothing - , repoBranch = Nothing - , repoTag = Nothing - , repoSubdir = Nothing - } - -instance Binary SourceRepo - -instance NFData SourceRepo where rnf = genericRnf - --- | What this repo info is for, what it represents. --- -data RepoKind = - -- | The repository for the \"head\" or development version of the project. - -- This repo is where we should track the latest development activity or - -- the usual repo people should get to contribute patches. - RepoHead - - -- | The repository containing the sources for this exact package version - -- or release. For this kind of repo a tag should be given to give enough - -- information to re-create the exact sources. - | RepoThis - - | RepoKindUnknown String - deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) - -instance Binary RepoKind - -instance NFData RepoKind where rnf = genericRnf - --- | An enumeration of common source control systems. The fields used in the --- 'SourceRepo' depend on the type of repo. The tools and methods used to --- obtain and track the repo depend on the repo type. --- -data RepoType = Darcs | Git | SVN | CVS - | Mercurial | GnuArch | Bazaar | Monotone - | OtherRepoType String - deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) - -instance Binary RepoType - -instance NFData RepoType where rnf = genericRnf - -knownRepoTypes :: [RepoType] -knownRepoTypes = [Darcs, Git, SVN, CVS - ,Mercurial, GnuArch, Bazaar, Monotone] - -repoTypeAliases :: RepoType -> [String] -repoTypeAliases Bazaar = ["bzr"] -repoTypeAliases Mercurial = ["hg"] -repoTypeAliases GnuArch = ["arch"] -repoTypeAliases _ = [] - -instance Pretty RepoKind where - pretty RepoHead = Disp.text "head" - pretty RepoThis = Disp.text "this" - pretty (RepoKindUnknown other) = Disp.text other - -instance Parsec RepoKind where - parsec = classifyRepoKind <$> P.munch1 isIdent - -instance Text RepoKind where - parse = fmap classifyRepoKind ident - -classifyRepoKind :: String -> RepoKind -classifyRepoKind name = case lowercase name of - "head" -> RepoHead - "this" -> RepoThis - _ -> RepoKindUnknown name - -instance Pretty RepoType where - pretty (OtherRepoType other) = Disp.text other - pretty other = Disp.text (lowercase (show other)) - -instance Parsec RepoType where - parsec = classifyRepoType <$> P.munch1 isIdent - -instance Text RepoType where - parse = fmap classifyRepoType ident - -classifyRepoType :: String -> RepoType -classifyRepoType s = - fromMaybe (OtherRepoType s) $ lookup (lowercase s) repoTypeMap - where - repoTypeMap = [ (name, repoType') - | repoType' <- knownRepoTypes - , name <- display repoType' : repoTypeAliases repoType' ] - -ident :: Parse.ReadP r String -ident = Parse.munch1 isIdent - -isIdent :: Char -> Bool -isIdent c = isAlphaNum c || c == '_' || c == '-' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TargetInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TargetInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TargetInfo.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TargetInfo.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,33 +0,0 @@ -{-# LANGUAGE TypeFamilies #-} -module Distribution.Types.TargetInfo ( - TargetInfo(..) -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.ComponentLocalBuildInfo -import Distribution.Types.Component -import Distribution.Types.UnitId - -import Distribution.Compat.Graph (IsNode(..)) - --- | The 'TargetInfo' contains all the information necessary to build a --- specific target (e.g., component/module/file) in a package. In --- principle, one can get the 'Component' from a --- 'ComponentLocalBuildInfo' and 'LocalBuildInfo', but it is much more --- convenient to have the component in hand. -data TargetInfo = TargetInfo { - targetCLBI :: ComponentLocalBuildInfo, - targetComponent :: Component - -- TODO: BuildTargets supporting parsing these is dumb, - -- we don't have support for compiling single modules or - -- file paths. Accommodating it now is premature - -- generalization. Figure it out later. - -- targetSub :: Maybe (Either ModuleName FilePath) - } - -instance IsNode TargetInfo where - type Key TargetInfo = UnitId - nodeKey = nodeKey . targetCLBI - nodeNeighbors = nodeNeighbors . targetCLBI diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TestSuite/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TestSuite/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TestSuite/Lens.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TestSuite/Lens.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -module Distribution.Types.TestSuite.Lens ( - TestSuite, - module Distribution.Types.TestSuite.Lens, - ) where - -import Distribution.Compat.Lens -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.Types.BuildInfo (BuildInfo) -import Distribution.Types.TestSuite (TestSuite) -import Distribution.Types.TestSuiteInterface (TestSuiteInterface) -import Distribution.Types.UnqualComponentName (UnqualComponentName) - -import qualified Distribution.Types.TestSuite as T - -testName :: Lens' TestSuite UnqualComponentName -testName f s = fmap (\x -> s { T.testName = x }) (f (T.testName s)) -{-# INLINE testName #-} - -testInterface :: Lens' TestSuite TestSuiteInterface -testInterface f s = fmap (\x -> s { T.testInterface = x }) (f (T.testInterface s)) -{-# INLINE testInterface #-} - -testBuildInfo :: Lens' TestSuite BuildInfo -testBuildInfo f s = fmap (\x -> s { T.testBuildInfo = x }) (f (T.testBuildInfo s)) -{-# INLINE testBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TestSuite.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TestSuite.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TestSuite.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TestSuite.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,82 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.TestSuite ( - TestSuite(..), - emptyTestSuite, - testType, - testModules, - testModulesAutogen -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.BuildInfo -import Distribution.Types.TestType -import Distribution.Types.TestSuiteInterface -import Distribution.Types.UnqualComponentName - -import Distribution.ModuleName - -import qualified Distribution.Types.BuildInfo.Lens as L - --- | A \"test-suite\" stanza in a cabal file. --- -data TestSuite = TestSuite { - testName :: UnqualComponentName, - testInterface :: TestSuiteInterface, - testBuildInfo :: BuildInfo - } - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance L.HasBuildInfo TestSuite where - buildInfo f l = (\x -> l { testBuildInfo = x }) <$> f (testBuildInfo l) - -instance Binary TestSuite - -instance NFData TestSuite where rnf = genericRnf - -instance Monoid TestSuite where - mempty = TestSuite { - testName = mempty, - testInterface = mempty, - testBuildInfo = mempty - } - mappend = (<>) - -instance Semigroup TestSuite where - a <> b = TestSuite { - testName = combine' testName, - testInterface = combine testInterface, - testBuildInfo = combine testBuildInfo - } - where combine field = field a `mappend` field b - combine' field = case ( unUnqualComponentName $ field a - , unUnqualComponentName $ field b) of - ("", _) -> field b - (_, "") -> field a - (x, y) -> error $ "Ambiguous values for test field: '" - ++ x ++ "' and '" ++ y ++ "'" - -emptyTestSuite :: TestSuite -emptyTestSuite = mempty - - -testType :: TestSuite -> TestType -testType test = case testInterface test of - TestSuiteExeV10 ver _ -> TestTypeExe ver - TestSuiteLibV09 ver _ -> TestTypeLib ver - TestSuiteUnsupported testtype -> testtype - --- | Get all the module names from a test suite. -testModules :: TestSuite -> [ModuleName] -testModules test = (case testInterface test of - TestSuiteLibV09 _ m -> [m] - _ -> []) - ++ otherModules (testBuildInfo test) - --- | Get all the auto generated module names from a test suite. --- This are a subset of 'testModules'. -testModulesAutogen :: TestSuite -> [ModuleName] -testModulesAutogen test = autogenModules (testBuildInfo test) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TestSuiteInterface.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TestSuiteInterface.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TestSuiteInterface.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TestSuiteInterface.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.TestSuiteInterface ( - TestSuiteInterface(..), -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Types.TestType -import Distribution.ModuleName -import Distribution.Version - --- | The test suite interfaces that are currently defined. Each test suite must --- specify which interface it supports. --- --- More interfaces may be defined in future, either new revisions or totally --- new interfaces. --- -data TestSuiteInterface = - - -- | Test interface \"exitcode-stdio-1.0\". The test-suite takes the form - -- of an executable. It returns a zero exit code for success, non-zero for - -- failure. The stdout and stderr channels may be logged. It takes no - -- command line parameters and nothing on stdin. - -- - TestSuiteExeV10 Version FilePath - - -- | Test interface \"detailed-0.9\". The test-suite takes the form of a - -- library containing a designated module that exports \"tests :: [Test]\". - -- - | TestSuiteLibV09 Version ModuleName - - -- | A test suite that does not conform to one of the above interfaces for - -- the given reason (e.g. unknown test type). - -- - | TestSuiteUnsupported TestType - deriving (Eq, Generic, Read, Show, Typeable, Data) - -instance Binary TestSuiteInterface - -instance NFData TestSuiteInterface where rnf = genericRnf - -instance Monoid TestSuiteInterface where - mempty = TestSuiteUnsupported (TestTypeUnknown mempty nullVersion) - mappend = (<>) - -instance Semigroup TestSuiteInterface where - a <> (TestSuiteUnsupported _) = a - _ <> b = b diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TestType.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TestType.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/TestType.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/TestType.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,48 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Types.TestType ( - TestType(..), - knownTestTypes, -) where - -import Distribution.Compat.Prelude -import Distribution.Version -import Prelude () - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text -import Text.PrettyPrint (char, text) - --- | The \"test-type\" field in the test suite stanza. --- -data TestType = TestTypeExe Version -- ^ \"type: exitcode-stdio-x.y\" - | TestTypeLib Version -- ^ \"type: detailed-x.y\" - | TestTypeUnknown String Version -- ^ Some unknown test type e.g. \"type: foo\" - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary TestType - -instance NFData TestType where rnf = genericRnf - -knownTestTypes :: [TestType] -knownTestTypes = [ TestTypeExe (mkVersion [1,0]) - , TestTypeLib (mkVersion [0,9]) ] - -instance Pretty TestType where - pretty (TestTypeExe ver) = text "exitcode-stdio-" <<>> pretty ver - pretty (TestTypeLib ver) = text "detailed-" <<>> pretty ver - pretty (TestTypeUnknown name ver) = text name <<>> char '-' <<>> pretty ver - -instance Parsec TestType where - parsec = parsecStandard $ \ver name -> case name of - "exitcode-stdio" -> TestTypeExe ver - "detailed" -> TestTypeLib ver - _ -> TestTypeUnknown name ver - -instance Text TestType where - parse = stdParse $ \ver name -> case name of - "exitcode-stdio" -> TestTypeExe ver - "detailed" -> TestTypeLib ver - _ -> TestTypeUnknown name ver diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/UnitId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/UnitId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/UnitId.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/UnitId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,134 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE FlexibleContexts #-} - -module Distribution.Types.UnitId - ( UnitId, unUnitId, mkUnitId - , DefUnitId - , unsafeMkDefUnitId - , unDefUnitId - , newSimpleUnitId - , mkLegacyUnitId - , getHSLibraryName - , InstalledPackageId -- backwards compat - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Utils.ShortText - -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.Compat.CharParsing as P -import Distribution.Pretty -import Distribution.Parsec.Class -import Distribution.Text -import Distribution.Types.ComponentId -import Distribution.Types.PackageId - -import Text.PrettyPrint (text) - --- | A unit identifier identifies a (possibly instantiated) --- package/component that can be installed the installed package --- database. There are several types of components that can be --- installed: --- --- * A traditional library with no holes, so that 'unitIdHash' --- is @Nothing@. In the absence of Backpack, 'UnitId' --- is the same as a 'ComponentId'. --- --- * An indefinite, Backpack library with holes. In this case, --- 'unitIdHash' is still @Nothing@, but in the install, --- there are only interfaces, no compiled objects. --- --- * An instantiated Backpack library with all the holes --- filled in. 'unitIdHash' is a @Just@ a hash of the --- instantiating mapping. --- --- A unit is a component plus the additional information on how the --- holes are filled in. Thus there is a one to many relationship: for a --- particular component there are many different ways of filling in the --- holes, and each different combination is a unit (and has a separate --- 'UnitId'). --- --- 'UnitId' is distinct from 'OpenUnitId', in that it is always --- installed, whereas 'OpenUnitId' are intermediate unit identities --- that arise during mixin linking, and don't necessarily correspond --- to any actually installed unit. Since the mapping is not actually --- recorded in a 'UnitId', you can't actually substitute over them --- (but you can substitute over 'OpenUnitId'). See also --- "Distribution.Backpack.FullUnitId" for a mechanism for expanding an --- instantiated 'UnitId' to retrieve its mapping. --- --- Backwards compatibility note: if you need to get the string --- representation of a UnitId to pass, e.g., as a @-package-id@ --- flag, use the 'display' function, which will work on all --- versions of Cabal. --- -newtype UnitId = UnitId ShortText - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, NFData) - -{-# DEPRECATED InstalledPackageId "Use UnitId instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -type InstalledPackageId = UnitId - -instance Binary UnitId - --- | The textual format for 'UnitId' coincides with the format --- GHC accepts for @-package-id@. --- -instance Pretty UnitId where - pretty = text . unUnitId - --- | The textual format for 'UnitId' coincides with the format --- GHC accepts for @-package-id@. --- -instance Parsec UnitId where - parsec = mkUnitId <$> P.munch1 (\c -> isAlphaNum c || c `elem` "-_.+") - -instance Text UnitId where - parse = mkUnitId <$> Parse.munch1 (\c -> isAlphaNum c || c `elem` "-_.+") - --- | If you need backwards compatibility, consider using 'display' --- instead, which is supported by all versions of Cabal. --- -unUnitId :: UnitId -> String -unUnitId (UnitId s) = fromShortText s - -mkUnitId :: String -> UnitId -mkUnitId = UnitId . toShortText - --- | 'mkUnitId' --- --- @since 2.0.0.2 -instance IsString UnitId where - fromString = mkUnitId - --- | Create a unit identity with no associated hash directly --- from a 'ComponentId'. -newSimpleUnitId :: ComponentId -> UnitId -newSimpleUnitId = mkUnitId . unComponentId - --- | Make an old-style UnitId from a package identifier. --- Assumed to be for the public library -mkLegacyUnitId :: PackageId -> UnitId -mkLegacyUnitId = newSimpleUnitId . mkComponentId . display - --- | Returns library name prefixed with HS, suitable for filenames -getHSLibraryName :: UnitId -> String -getHSLibraryName uid = "HS" ++ display uid - --- | A 'UnitId' for a definite package. The 'DefUnitId' invariant says --- that a 'UnitId' identified this way is definite; i.e., it has no --- unfilled holes. -newtype DefUnitId = DefUnitId { unDefUnitId :: UnitId } - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, Binary, NFData, Pretty, Text) - --- Workaround for a GHC 8.0.1 bug, see --- https://github.com/haskell/cabal/issues/4793#issuecomment-334258288 -instance Parsec DefUnitId where - parsec = DefUnitId <$> parsec - --- | Unsafely create a 'DefUnitId' from a 'UnitId'. Your responsibility --- is to ensure that the 'DefUnitId' invariant holds. -unsafeMkDefUnitId :: UnitId -> DefUnitId -unsafeMkDefUnitId = DefUnitId diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/UnqualComponentName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/UnqualComponentName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/UnqualComponentName.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/UnqualComponentName.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,89 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -module Distribution.Types.UnqualComponentName - ( UnqualComponentName, unUnqualComponentName, mkUnqualComponentName - , packageNameToUnqualComponentName, unqualComponentNameToPackageName - ) where - -import Distribution.Compat.Prelude -import Distribution.Utils.ShortText -import Prelude () - -import Distribution.Parsec.Class -import Distribution.ParseUtils (parsePackageName) -import Distribution.Pretty -import Distribution.Text -import Distribution.Types.PackageName - --- | An unqualified component name, for any kind of component. --- --- This is distinguished from a 'ComponentName' and 'ComponentId'. The former --- also states which of a library, executable, etc the name refers too. The --- later uniquely identifiers a component and its closure. --- --- @since 2.0.0.2 -newtype UnqualComponentName = UnqualComponentName ShortText - deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, - Semigroup, Monoid) -- TODO: bad enabler of bad monoids - --- | Convert 'UnqualComponentName' to 'String' --- --- @since 2.0.0.2 -unUnqualComponentName :: UnqualComponentName -> String -unUnqualComponentName (UnqualComponentName s) = fromShortText s - --- | Construct a 'UnqualComponentName' from a 'String' --- --- 'mkUnqualComponentName' is the inverse to 'unUnqualComponentName' --- --- Note: No validations are performed to ensure that the resulting --- 'UnqualComponentName' is valid --- --- @since 2.0.0.2 -mkUnqualComponentName :: String -> UnqualComponentName -mkUnqualComponentName = UnqualComponentName . toShortText - --- | 'mkUnqualComponentName' --- --- @since 2.0.0.2 -instance IsString UnqualComponentName where - fromString = mkUnqualComponentName - -instance Binary UnqualComponentName - -instance Pretty UnqualComponentName where - pretty = showToken . unUnqualComponentName - -instance Parsec UnqualComponentName where - parsec = mkUnqualComponentName <$> parsecUnqualComponentName - -instance Text UnqualComponentName where - parse = mkUnqualComponentName <$> parsePackageName - -instance NFData UnqualComponentName where - rnf (UnqualComponentName pkg) = rnf pkg - --- TODO avoid String round trip with these PackageName <-> --- UnqualComponentName converters. - --- | Converts a package name to an unqualified component name --- --- Useful in legacy situations where a package name may refer to an internal --- component, if one is defined with that name. --- --- @since 2.0.0.2 -packageNameToUnqualComponentName :: PackageName -> UnqualComponentName -packageNameToUnqualComponentName = mkUnqualComponentName . unPackageName - --- | Converts an unqualified component name to a package name --- --- `packageNameToUnqualComponentName` is the inverse of --- `unqualComponentNameToPackageName`. --- --- Useful in legacy situations where a package name may refer to an internal --- component, if one is defined with that name. --- --- @since 2.0.0.2 -unqualComponentNameToPackageName :: UnqualComponentName -> PackageName -unqualComponentNameToPackageName = mkPackageName . unUnqualComponentName diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/Version.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/Version.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,255 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Types.Version ( - -- * Package versions - Version, - mkVersion, - mkVersion', - versionNumbers, - nullVersion, - alterVersion, - version0, - - -- ** Backwards compatibility - showVersion, - - -- * Internal - validVersion, - ) where - -import Data.Bits (shiftL, shiftR, (.&.), (.|.)) -import Distribution.Compat.Prelude -import Prelude () - -import Distribution.CabalSpecVersion -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text - -import qualified Data.Version as Base -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp -import qualified Text.Read as Read - --- | A 'Version' represents the version of a software entity. --- --- Instances of 'Eq' and 'Ord' are provided, which gives exact --- equality and lexicographic ordering of the version number --- components (i.e. 2.1 > 2.0, 1.2.3 > 1.2.2, etc.). --- --- This type is opaque and distinct from the 'Base.Version' type in --- "Data.Version" since @Cabal-2.0@. The difference extends to the --- 'Binary' instance using a different (and more compact) encoding. --- --- @since 2.0.0.2 -data Version = PV0 {-# UNPACK #-} !Word64 - | PV1 !Int [Int] - -- NOTE: If a version fits into the packed Word64 - -- representation (i.e. at most four version components - -- which all fall into the [0..0xfffe] range), then PV0 - -- MUST be used. This is essential for the 'Eq' instance - -- to work. - deriving (Data,Eq,Generic,Typeable) - -instance Ord Version where - compare (PV0 x) (PV0 y) = compare x y - compare (PV1 x xs) (PV1 y ys) = case compare x y of - EQ -> compare xs ys - c -> c - compare (PV0 w) (PV1 y ys) = case compare x y of - EQ -> compare [x2,x3,x4] ys - c -> c - where - x = fromIntegral ((w `shiftR` 48) .&. 0xffff) - 1 - x2 = fromIntegral ((w `shiftR` 32) .&. 0xffff) - 1 - x3 = fromIntegral ((w `shiftR` 16) .&. 0xffff) - 1 - x4 = fromIntegral (w .&. 0xffff) - 1 - compare (PV1 x xs) (PV0 w) = case compare x y of - EQ -> compare xs [y2,y3,y4] - c -> c - where - y = fromIntegral ((w `shiftR` 48) .&. 0xffff) - 1 - y2 = fromIntegral ((w `shiftR` 32) .&. 0xffff) - 1 - y3 = fromIntegral ((w `shiftR` 16) .&. 0xffff) - 1 - y4 = fromIntegral (w .&. 0xffff) - 1 - -instance Show Version where - showsPrec d v = showParen (d > 10) - $ showString "mkVersion " - . showsPrec 11 (versionNumbers v) - -instance Read Version where - readPrec = Read.parens $ do - Read.Ident "mkVersion" <- Read.lexP - v <- Read.step Read.readPrec - return (mkVersion v) - -instance Binary Version - -instance NFData Version where - rnf (PV0 _) = () - rnf (PV1 _ ns) = rnf ns - -instance Pretty Version where - pretty ver - = Disp.hcat (Disp.punctuate (Disp.char '.') - (map Disp.int $ versionNumbers ver)) - -instance Parsec Version where - parsec = do - digit <- digitParser <$> askCabalSpecVersion - mkVersion <$> P.sepBy1 digit (P.char '.') <* tags - where - digitParser v - | v >= CabalSpecV2_0 = P.integral - | otherwise = (some d >>= toNumber) P. "non-leading-zero integral" - where - toNumber :: CabalParsing m => [Int] -> m Int - toNumber [0] = return 0 - toNumber xs@(0:_) = do - parsecWarning PWTVersionLeadingZeros "Version digit with leading zero. Use cabal-version: 2.0 or later to write such versions. For more information see https://github.com/haskell/cabal/issues/5092" - return $ foldl' (\a b -> a * 10 + b) 0 xs - toNumber xs = return $ foldl' (\a b -> a * 10 + b) 0 xs - - d :: P.CharParsing m => m Int - d = f <$> P.satisfyRange '0' '9' - f c = ord c - ord '0' - - tags = do - ts <- many $ P.char '-' *> some (P.satisfy isAlphaNum) - case ts of - [] -> pure () - (_ : _) -> parsecWarning PWTVersionTag "version with tags" - -instance Text Version where - parse = do - branch <- Parse.sepBy1 parseNat (Parse.char '.') - -- allow but ignore tags: - _tags <- Parse.many (Parse.char '-' >> Parse.munch1 isAlphaNum) - return (mkVersion branch) - where - parseNat = read `fmap` Parse.munch1 isDigit - --- | Construct 'Version' from list of version number components. --- --- For instance, @mkVersion [3,2,1]@ constructs a 'Version' --- representing the version @3.2.1@. --- --- All version components must be non-negative. @mkVersion []@ --- currently represents the special /null/ version; see also 'nullVersion'. --- --- @since 2.0.0.2 -mkVersion :: [Int] -> Version --- TODO: add validity check; disallow 'mkVersion []' (we have --- 'nullVersion' for that) -mkVersion [] = nullVersion -mkVersion (v1:[]) - | inWord16VerRep1 v1 = PV0 (mkWord64VerRep1 v1) - | otherwise = PV1 v1 [] - where - inWord16VerRep1 x1 = inWord16 (x1 .|. (x1+1)) - mkWord64VerRep1 y1 = mkWord64VerRep (y1+1) 0 0 0 - -mkVersion (v1:vs@(v2:[])) - | inWord16VerRep2 v1 v2 = PV0 (mkWord64VerRep2 v1 v2) - | otherwise = PV1 v1 vs - where - inWord16VerRep2 x1 x2 = inWord16 (x1 .|. (x1+1) - .|. x2 .|. (x2+1)) - mkWord64VerRep2 y1 y2 = mkWord64VerRep (y1+1) (y2+1) 0 0 - -mkVersion (v1:vs@(v2:v3:[])) - | inWord16VerRep3 v1 v2 v3 = PV0 (mkWord64VerRep3 v1 v2 v3) - | otherwise = PV1 v1 vs - where - inWord16VerRep3 x1 x2 x3 = inWord16 (x1 .|. (x1+1) - .|. x2 .|. (x2+1) - .|. x3 .|. (x3+1)) - mkWord64VerRep3 y1 y2 y3 = mkWord64VerRep (y1+1) (y2+1) (y3+1) 0 - -mkVersion (v1:vs@(v2:v3:v4:[])) - | inWord16VerRep4 v1 v2 v3 v4 = PV0 (mkWord64VerRep4 v1 v2 v3 v4) - | otherwise = PV1 v1 vs - where - inWord16VerRep4 x1 x2 x3 x4 = inWord16 (x1 .|. (x1+1) - .|. x2 .|. (x2+1) - .|. x3 .|. (x3+1) - .|. x4 .|. (x4+1)) - mkWord64VerRep4 y1 y2 y3 y4 = mkWord64VerRep (y1+1) (y2+1) (y3+1) (y4+1) - -mkVersion (v1:vs) = PV1 v1 vs - --- | Version 0. A lower bound of 'Version'. --- --- @since 2.2 -version0 :: Version -version0 = mkVersion [0] - -{-# INLINE mkWord64VerRep #-} -mkWord64VerRep :: Int -> Int -> Int -> Int -> Word64 -mkWord64VerRep v1 v2 v3 v4 = - (fromIntegral v1 `shiftL` 48) - .|. (fromIntegral v2 `shiftL` 32) - .|. (fromIntegral v3 `shiftL` 16) - .|. fromIntegral v4 - -{-# INLINE inWord16 #-} -inWord16 :: Int -> Bool -inWord16 x = (fromIntegral x :: Word) <= 0xffff - --- | Variant of 'mkVersion' which converts a "Data.Version" --- 'Base.Version' into Cabal's 'Version' type. --- --- @since 2.0.0.2 -mkVersion' :: Base.Version -> Version -mkVersion' = mkVersion . Base.versionBranch - --- | Unpack 'Version' into list of version number components. --- --- This is the inverse to 'mkVersion', so the following holds: --- --- > (versionNumbers . mkVersion) vs == vs --- --- @since 2.0.0.2 -versionNumbers :: Version -> [Int] -versionNumbers (PV1 n ns) = n:ns -versionNumbers (PV0 w) - | v1 < 0 = [] - | v2 < 0 = [v1] - | v3 < 0 = [v1,v2] - | v4 < 0 = [v1,v2,v3] - | otherwise = [v1,v2,v3,v4] - where - v1 = fromIntegral ((w `shiftR` 48) .&. 0xffff) - 1 - v2 = fromIntegral ((w `shiftR` 32) .&. 0xffff) - 1 - v3 = fromIntegral ((w `shiftR` 16) .&. 0xffff) - 1 - v4 = fromIntegral (w .&. 0xffff) - 1 - - --- | Constant representing the special /null/ 'Version' --- --- The 'nullVersion' compares (via 'Ord') as less than every proper --- 'Version' value. --- --- @since 2.0.0.2 -nullVersion :: Version --- TODO: at some point, 'mkVersion' may disallow creating /null/ --- 'Version's -nullVersion = PV0 0 - --- | Apply function to list of version number components --- --- > alterVersion f == mkVersion . f . versionNumbers --- --- @since 2.0.0.2 -alterVersion :: ([Int] -> [Int]) -> Version -> Version -alterVersion f = mkVersion . f . versionNumbers - --- internal helper -validVersion :: Version -> Bool -validVersion v = v /= nullVersion && all (>=0) (versionNumbers v) - -showVersion :: Version -> String -showVersion = prettyShow -{-# DEPRECATED showVersion "Use prettyShow. This function will be removed in Cabal-3.0 (estimated Oct 2018)" #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/VersionInterval.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/VersionInterval.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/VersionInterval.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/VersionInterval.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,361 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -module Distribution.Types.VersionInterval ( - -- * Version intervals - VersionIntervals, - toVersionIntervals, - fromVersionIntervals, - withinIntervals, - versionIntervals, - mkVersionIntervals, - unionVersionIntervals, - intersectVersionIntervals, - invertVersionIntervals, - relaxLastInterval, - relaxHeadInterval, - - -- * Version intervals view - asVersionIntervals, - VersionInterval, - LowerBound(..), - UpperBound(..), - Bound(..), - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Control.Exception (assert) - -import Distribution.Types.Version -import Distribution.Types.VersionRange - -------------------------------------------------------------------------------- --- VersionRange -------------------------------------------------------------------------------- - --- | View a 'VersionRange' as a union of intervals. --- --- This provides a canonical view of the semantics of a 'VersionRange' as --- opposed to the syntax of the expression used to define it. For the syntactic --- view use 'foldVersionRange'. --- --- Each interval is non-empty. The sequence is in increasing order and no --- intervals overlap or touch. Therefore only the first and last can be --- unbounded. The sequence can be empty if the range is empty --- (e.g. a range expression like @< 1 && > 2@). --- --- Other checks are trivial to implement using this view. For example: --- --- > isNoVersion vr | [] <- asVersionIntervals vr = True --- > | otherwise = False --- --- > isSpecificVersion vr --- > | [(LowerBound v InclusiveBound --- > ,UpperBound v' InclusiveBound)] <- asVersionIntervals vr --- > , v == v' = Just v --- > | otherwise = Nothing --- -asVersionIntervals :: VersionRange -> [VersionInterval] -asVersionIntervals = versionIntervals . toVersionIntervals - - -------------------------------------------------------------------------------- --- VersionInterval -------------------------------------------------------------------------------- - --- | A complementary representation of a 'VersionRange'. Instead of a boolean --- version predicate it uses an increasing sequence of non-overlapping, --- non-empty intervals. --- --- The key point is that this representation gives a canonical representation --- for the semantics of 'VersionRange's. This makes it easier to check things --- like whether a version range is empty, covers all versions, or requires a --- certain minimum or maximum version. It also makes it easy to check equality --- or containment. It also makes it easier to identify \'simple\' version --- predicates for translation into foreign packaging systems that do not --- support complex version range expressions. --- -newtype VersionIntervals = VersionIntervals [VersionInterval] - deriving (Eq, Show, Typeable) - --- | Inspect the list of version intervals. --- -versionIntervals :: VersionIntervals -> [VersionInterval] -versionIntervals (VersionIntervals is) = is - -type VersionInterval = (LowerBound, UpperBound) -data LowerBound = LowerBound Version !Bound deriving (Eq, Show) -data UpperBound = NoUpperBound | UpperBound Version !Bound deriving (Eq, Show) -data Bound = ExclusiveBound | InclusiveBound deriving (Eq, Show) - -minLowerBound :: LowerBound -minLowerBound = LowerBound (mkVersion [0]) InclusiveBound - -isVersion0 :: Version -> Bool -isVersion0 = (==) version0 - -instance Ord LowerBound where - LowerBound ver bound <= LowerBound ver' bound' = case compare ver ver' of - LT -> True - EQ -> not (bound == ExclusiveBound && bound' == InclusiveBound) - GT -> False - -instance Ord UpperBound where - _ <= NoUpperBound = True - NoUpperBound <= UpperBound _ _ = False - UpperBound ver bound <= UpperBound ver' bound' = case compare ver ver' of - LT -> True - EQ -> not (bound == InclusiveBound && bound' == ExclusiveBound) - GT -> False - -invariant :: VersionIntervals -> Bool -invariant (VersionIntervals intervals) = all validInterval intervals - && all doesNotTouch' adjacentIntervals - where - doesNotTouch' :: (VersionInterval, VersionInterval) -> Bool - doesNotTouch' ((_,u), (l',_)) = doesNotTouch u l' - - adjacentIntervals :: [(VersionInterval, VersionInterval)] - adjacentIntervals - | null intervals = [] - | otherwise = zip intervals (tail intervals) - -checkInvariant :: VersionIntervals -> VersionIntervals -checkInvariant is = assert (invariant is) is - --- | Directly construct a 'VersionIntervals' from a list of intervals. --- --- In @Cabal-2.2@ the 'Maybe' is dropped from the result type. --- -mkVersionIntervals :: [VersionInterval] -> VersionIntervals -mkVersionIntervals intervals - | invariant (VersionIntervals intervals) = VersionIntervals intervals - | otherwise - = checkInvariant - . foldl' (flip insertInterval) (VersionIntervals []) - . filter validInterval - $ intervals - -insertInterval :: VersionInterval -> VersionIntervals -> VersionIntervals -insertInterval i is = unionVersionIntervals (VersionIntervals [i]) is - -validInterval :: (LowerBound, UpperBound) -> Bool -validInterval i@(l, u) = validLower l && validUpper u && nonEmpty i - where - validLower (LowerBound v _) = validVersion v - validUpper NoUpperBound = True - validUpper (UpperBound v _) = validVersion v - --- Check an interval is non-empty --- -nonEmpty :: VersionInterval -> Bool -nonEmpty (_, NoUpperBound ) = True -nonEmpty (LowerBound l lb, UpperBound u ub) = - (l < u) || (l == u && lb == InclusiveBound && ub == InclusiveBound) - --- Check an upper bound does not intersect, or even touch a lower bound: --- --- ---| or ---) but not ---] or ---) or ---] --- |--- (--- (--- [--- [--- --- -doesNotTouch :: UpperBound -> LowerBound -> Bool -doesNotTouch NoUpperBound _ = False -doesNotTouch (UpperBound u ub) (LowerBound l lb) = - u < l - || (u == l && ub == ExclusiveBound && lb == ExclusiveBound) - --- | Check an upper bound does not intersect a lower bound: --- --- ---| or ---) or ---] or ---) but not ---] --- |--- (--- (--- [--- [--- --- -doesNotIntersect :: UpperBound -> LowerBound -> Bool -doesNotIntersect NoUpperBound _ = False -doesNotIntersect (UpperBound u ub) (LowerBound l lb) = - u < l - || (u == l && not (ub == InclusiveBound && lb == InclusiveBound)) - --- | Test if a version falls within the version intervals. --- --- It exists mostly for completeness and testing. It satisfies the following --- properties: --- --- > withinIntervals v (toVersionIntervals vr) = withinRange v vr --- > withinIntervals v ivs = withinRange v (fromVersionIntervals ivs) --- -withinIntervals :: Version -> VersionIntervals -> Bool -withinIntervals v (VersionIntervals intervals) = any withinInterval intervals - where - withinInterval (lowerBound, upperBound) = withinLower lowerBound - && withinUpper upperBound - withinLower (LowerBound v' ExclusiveBound) = v' < v - withinLower (LowerBound v' InclusiveBound) = v' <= v - - withinUpper NoUpperBound = True - withinUpper (UpperBound v' ExclusiveBound) = v' > v - withinUpper (UpperBound v' InclusiveBound) = v' >= v - --- | Convert a 'VersionRange' to a sequence of version intervals. --- -toVersionIntervals :: VersionRange -> VersionIntervals -toVersionIntervals = foldVersionRange - ( chkIvl (minLowerBound, NoUpperBound)) - (\v -> chkIvl (LowerBound v InclusiveBound, UpperBound v InclusiveBound)) - (\v -> chkIvl (LowerBound v ExclusiveBound, NoUpperBound)) - (\v -> if isVersion0 v then VersionIntervals [] else - chkIvl (minLowerBound, UpperBound v ExclusiveBound)) - unionVersionIntervals - intersectVersionIntervals - where - chkIvl interval = checkInvariant (VersionIntervals [interval]) - --- | Convert a 'VersionIntervals' value back into a 'VersionRange' expression --- representing the version intervals. --- -fromVersionIntervals :: VersionIntervals -> VersionRange -fromVersionIntervals (VersionIntervals []) = noVersion -fromVersionIntervals (VersionIntervals intervals) = - foldr1 unionVersionRanges [ interval l u | (l, u) <- intervals ] - - where - interval (LowerBound v InclusiveBound) - (UpperBound v' InclusiveBound) | v == v' - = thisVersion v - interval (LowerBound v InclusiveBound) - (UpperBound v' ExclusiveBound) | isWildcardRange v v' - = withinVersion v - interval l u = lowerBound l `intersectVersionRanges'` upperBound u - - lowerBound (LowerBound v InclusiveBound) - | isVersion0 v = Nothing - | otherwise = Just (orLaterVersion v) - lowerBound (LowerBound v ExclusiveBound) = Just (laterVersion v) - - upperBound NoUpperBound = Nothing - upperBound (UpperBound v InclusiveBound) = Just (orEarlierVersion v) - upperBound (UpperBound v ExclusiveBound) = Just (earlierVersion v) - - intersectVersionRanges' Nothing Nothing = anyVersion - intersectVersionRanges' (Just vr) Nothing = vr - intersectVersionRanges' Nothing (Just vr) = vr - intersectVersionRanges' (Just vr) (Just vr') = intersectVersionRanges vr vr' - -unionVersionIntervals :: VersionIntervals -> VersionIntervals - -> VersionIntervals -unionVersionIntervals (VersionIntervals is0) (VersionIntervals is'0) = - checkInvariant (VersionIntervals (union is0 is'0)) - where - union is [] = is - union [] is' = is' - union (i:is) (i':is') = case unionInterval i i' of - Left Nothing -> i : union is (i' :is') - Left (Just i'') -> union is (i'':is') - Right Nothing -> i' : union (i :is) is' - Right (Just i'') -> union (i'':is) is' - -unionInterval :: VersionInterval -> VersionInterval - -> Either (Maybe VersionInterval) (Maybe VersionInterval) -unionInterval (lower , upper ) (lower', upper') - - -- Non-intersecting intervals with the left interval ending first - | upper `doesNotTouch` lower' = Left Nothing - - -- Non-intersecting intervals with the right interval first - | upper' `doesNotTouch` lower = Right Nothing - - -- Complete or partial overlap, with the left interval ending first - | upper <= upper' = lowerBound `seq` - Left (Just (lowerBound, upper')) - - -- Complete or partial overlap, with the left interval ending first - | otherwise = lowerBound `seq` - Right (Just (lowerBound, upper)) - where - lowerBound = min lower lower' - -intersectVersionIntervals :: VersionIntervals -> VersionIntervals - -> VersionIntervals -intersectVersionIntervals (VersionIntervals is0) (VersionIntervals is'0) = - checkInvariant (VersionIntervals (intersect is0 is'0)) - where - intersect _ [] = [] - intersect [] _ = [] - intersect (i:is) (i':is') = case intersectInterval i i' of - Left Nothing -> intersect is (i':is') - Left (Just i'') -> i'' : intersect is (i':is') - Right Nothing -> intersect (i:is) is' - Right (Just i'') -> i'' : intersect (i:is) is' - -intersectInterval :: VersionInterval -> VersionInterval - -> Either (Maybe VersionInterval) (Maybe VersionInterval) -intersectInterval (lower , upper ) (lower', upper') - - -- Non-intersecting intervals with the left interval ending first - | upper `doesNotIntersect` lower' = Left Nothing - - -- Non-intersecting intervals with the right interval first - | upper' `doesNotIntersect` lower = Right Nothing - - -- Complete or partial overlap, with the left interval ending first - | upper <= upper' = lowerBound `seq` - Left (Just (lowerBound, upper)) - - -- Complete or partial overlap, with the right interval ending first - | otherwise = lowerBound `seq` - Right (Just (lowerBound, upper')) - where - lowerBound = max lower lower' - -invertVersionIntervals :: VersionIntervals - -> VersionIntervals -invertVersionIntervals (VersionIntervals xs) = - case xs of - -- Empty interval set - [] -> VersionIntervals [(noLowerBound, NoUpperBound)] - -- Interval with no lower bound - ((lb, ub) : more) | lb == noLowerBound -> - VersionIntervals $ invertVersionIntervals' ub more - -- Interval with a lower bound - ((lb, ub) : more) -> - VersionIntervals $ (noLowerBound, invertLowerBound lb) - : invertVersionIntervals' ub more - where - -- Invert subsequent version intervals given the upper bound of - -- the intervals already inverted. - invertVersionIntervals' :: UpperBound - -> [(LowerBound, UpperBound)] - -> [(LowerBound, UpperBound)] - invertVersionIntervals' NoUpperBound [] = [] - invertVersionIntervals' ub0 [] = [(invertUpperBound ub0, NoUpperBound)] - invertVersionIntervals' ub0 [(lb, NoUpperBound)] = - [(invertUpperBound ub0, invertLowerBound lb)] - invertVersionIntervals' ub0 ((lb, ub1) : more) = - (invertUpperBound ub0, invertLowerBound lb) - : invertVersionIntervals' ub1 more - - invertLowerBound :: LowerBound -> UpperBound - invertLowerBound (LowerBound v b) = UpperBound v (invertBound b) - - invertUpperBound :: UpperBound -> LowerBound - invertUpperBound (UpperBound v b) = LowerBound v (invertBound b) - invertUpperBound NoUpperBound = error "NoUpperBound: unexpected" - - invertBound :: Bound -> Bound - invertBound ExclusiveBound = InclusiveBound - invertBound InclusiveBound = ExclusiveBound - - noLowerBound :: LowerBound - noLowerBound = LowerBound (mkVersion [0]) InclusiveBound - - -relaxLastInterval :: VersionIntervals -> VersionIntervals -relaxLastInterval (VersionIntervals xs) = VersionIntervals (relaxLastInterval' xs) - where - relaxLastInterval' [] = [] - relaxLastInterval' [(l,_)] = [(l, NoUpperBound)] - relaxLastInterval' (i:is) = i : relaxLastInterval' is - -relaxHeadInterval :: VersionIntervals -> VersionIntervals -relaxHeadInterval (VersionIntervals xs) = VersionIntervals (relaxHeadInterval' xs) - where - relaxHeadInterval' [] = [] - relaxHeadInterval' ((_,u):is) = (minLowerBound,u) : is diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/VersionRange.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/VersionRange.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Types/VersionRange.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Types/VersionRange.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,586 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE FlexibleContexts #-} -module Distribution.Types.VersionRange ( - -- * Version ranges - VersionRange(..), - - -- ** Constructing - anyVersion, noVersion, - thisVersion, notThisVersion, - laterVersion, earlierVersion, - orLaterVersion, orEarlierVersion, - unionVersionRanges, intersectVersionRanges, - withinVersion, - majorBoundVersion, - - -- ** Inspection - -- - -- See "Distribution.Version" for more utilities. - withinRange, - foldVersionRange, - normaliseVersionRange, - stripParensVersionRange, - hasUpperBound, - hasLowerBound, - - -- ** Cata & ana - VersionRangeF (..), - cataVersionRange, - anaVersionRange, - hyloVersionRange, - projectVersionRange, - embedVersionRange, - - -- ** Utilities - wildcardUpperBound, - majorUpperBound, - isWildcardRange, - ) where - -import Distribution.Compat.Prelude -import Distribution.Types.Version -import Prelude () - -import Distribution.CabalSpecVersion -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text -import Text.PrettyPrint ((<+>)) - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.DList as DList -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -data VersionRange - = AnyVersion - | ThisVersion Version -- = version - | LaterVersion Version -- > version (NB. not >=) - | OrLaterVersion Version -- >= version - | EarlierVersion Version -- < version - | OrEarlierVersion Version -- <= version - | WildcardVersion Version -- == ver.* (same as >= ver && < ver+1) - | MajorBoundVersion Version -- @^>= ver@ (same as >= ver && < MAJ(ver)+1) - | UnionVersionRanges VersionRange VersionRange - | IntersectVersionRanges VersionRange VersionRange - | VersionRangeParens VersionRange -- just '(exp)' parentheses syntax - deriving (Data, Eq, Generic, Read, Show, Typeable) - -instance Binary VersionRange - -instance NFData VersionRange where rnf = genericRnf - -{-# DeprecateD AnyVersion - "Use 'anyVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED ThisVersion - "Use 'thisVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED LaterVersion - "Use 'laterVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED EarlierVersion - "Use 'earlierVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED WildcardVersion - "Use 'anyVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED UnionVersionRanges - "Use 'unionVersionRanges', 'foldVersionRange' or 'asVersionIntervals'" #-} -{-# DEPRECATED IntersectVersionRanges - "Use 'intersectVersionRanges', 'foldVersionRange' or 'asVersionIntervals'"#-} - --- | The version range @-any@. That is, a version range containing all --- versions. --- --- > withinRange v anyVersion = True --- -anyVersion :: VersionRange -anyVersion = AnyVersion - --- | The empty version range, that is a version range containing no versions. --- --- This can be constructed using any unsatisfiable version range expression, --- for example @> 1 && < 1@. --- --- > withinRange v noVersion = False --- -noVersion :: VersionRange -noVersion = IntersectVersionRanges (LaterVersion v) (EarlierVersion v) - where v = mkVersion [1] - --- | The version range @== v@ --- --- > withinRange v' (thisVersion v) = v' == v --- -thisVersion :: Version -> VersionRange -thisVersion = ThisVersion - --- | The version range @< v || > v@ --- --- > withinRange v' (notThisVersion v) = v' /= v --- -notThisVersion :: Version -> VersionRange -notThisVersion v = UnionVersionRanges (EarlierVersion v) (LaterVersion v) - --- | The version range @> v@ --- --- > withinRange v' (laterVersion v) = v' > v --- -laterVersion :: Version -> VersionRange -laterVersion = LaterVersion - --- | The version range @>= v@ --- --- > withinRange v' (orLaterVersion v) = v' >= v --- -orLaterVersion :: Version -> VersionRange -orLaterVersion = OrLaterVersion - --- | The version range @< v@ --- --- > withinRange v' (earlierVersion v) = v' < v --- -earlierVersion :: Version -> VersionRange -earlierVersion = EarlierVersion - --- | The version range @<= v@ --- --- > withinRange v' (orEarlierVersion v) = v' <= v --- -orEarlierVersion :: Version -> VersionRange -orEarlierVersion = OrEarlierVersion - --- | The version range @vr1 || vr2@ --- --- > withinRange v' (unionVersionRanges vr1 vr2) --- > = withinRange v' vr1 || withinRange v' vr2 --- -unionVersionRanges :: VersionRange -> VersionRange -> VersionRange -unionVersionRanges = UnionVersionRanges - --- | The version range @vr1 && vr2@ --- --- > withinRange v' (intersectVersionRanges vr1 vr2) --- > = withinRange v' vr1 && withinRange v' vr2 --- -intersectVersionRanges :: VersionRange -> VersionRange -> VersionRange -intersectVersionRanges = IntersectVersionRanges - --- | The version range @== v.*@. --- --- For example, for version @1.2@, the version range @== 1.2.*@ is the same as --- @>= 1.2 && < 1.3@ --- --- > withinRange v' (laterVersion v) = v' >= v && v' < upper v --- > where --- > upper (Version lower t) = Version (init lower ++ [last lower + 1]) t --- -withinVersion :: Version -> VersionRange -withinVersion = WildcardVersion - --- | The version range @^>= v@. --- --- For example, for version @1.2.3.4@, the version range @^>= 1.2.3.4@ is the same as --- @>= 1.2.3.4 && < 1.3@. --- --- Note that @^>= 1@ is equivalent to @>= 1 && < 1.1@. --- --- @since 2.0.0.2 -majorBoundVersion :: Version -> VersionRange -majorBoundVersion = MajorBoundVersion - --- | F-Algebra of 'VersionRange'. See 'cataVersionRange'. --- --- @since 2.2 -data VersionRangeF a - = AnyVersionF - | ThisVersionF Version -- = version - | LaterVersionF Version -- > version (NB. not >=) - | OrLaterVersionF Version -- >= version - | EarlierVersionF Version -- < version - | OrEarlierVersionF Version -- <= version - | WildcardVersionF Version -- == ver.* (same as >= ver && < ver+1) - | MajorBoundVersionF Version -- @^>= ver@ (same as >= ver && < MAJ(ver)+1) - | UnionVersionRangesF a a - | IntersectVersionRangesF a a - | VersionRangeParensF a - deriving (Data, Eq, Generic, Read, Show, Typeable, Functor, Foldable, Traversable) - --- | @since 2.2 -projectVersionRange :: VersionRange -> VersionRangeF VersionRange -projectVersionRange AnyVersion = AnyVersionF -projectVersionRange (ThisVersion v) = ThisVersionF v -projectVersionRange (LaterVersion v) = LaterVersionF v -projectVersionRange (OrLaterVersion v) = OrLaterVersionF v -projectVersionRange (EarlierVersion v) = EarlierVersionF v -projectVersionRange (OrEarlierVersion v) = OrEarlierVersionF v -projectVersionRange (WildcardVersion v) = WildcardVersionF v -projectVersionRange (MajorBoundVersion v) = MajorBoundVersionF v -projectVersionRange (UnionVersionRanges a b) = UnionVersionRangesF a b -projectVersionRange (IntersectVersionRanges a b) = IntersectVersionRangesF a b -projectVersionRange (VersionRangeParens a) = VersionRangeParensF a - --- | Fold 'VersionRange'. --- --- @since 2.2 -cataVersionRange :: (VersionRangeF a -> a) -> VersionRange -> a -cataVersionRange f = c where c = f . fmap c . projectVersionRange - --- | @since 2.2 -embedVersionRange :: VersionRangeF VersionRange -> VersionRange -embedVersionRange AnyVersionF = AnyVersion -embedVersionRange (ThisVersionF v) = ThisVersion v -embedVersionRange (LaterVersionF v) = LaterVersion v -embedVersionRange (OrLaterVersionF v) = OrLaterVersion v -embedVersionRange (EarlierVersionF v) = EarlierVersion v -embedVersionRange (OrEarlierVersionF v) = OrEarlierVersion v -embedVersionRange (WildcardVersionF v) = WildcardVersion v -embedVersionRange (MajorBoundVersionF v) = MajorBoundVersion v -embedVersionRange (UnionVersionRangesF a b) = UnionVersionRanges a b -embedVersionRange (IntersectVersionRangesF a b) = IntersectVersionRanges a b -embedVersionRange (VersionRangeParensF a) = VersionRangeParens a - --- | Unfold 'VersionRange'. --- --- @since 2.2 -anaVersionRange :: (a -> VersionRangeF a) -> a -> VersionRange -anaVersionRange g = a where a = embedVersionRange . fmap a . g - - --- | Fold over the basic syntactic structure of a 'VersionRange'. --- --- This provides a syntactic view of the expression defining the version range. --- The syntactic sugar @\">= v\"@, @\"<= v\"@ and @\"== v.*\"@ is presented --- in terms of the other basic syntax. --- --- For a semantic view use 'asVersionIntervals'. --- -foldVersionRange :: a -- ^ @\"-any\"@ version - -> (Version -> a) -- ^ @\"== v\"@ - -> (Version -> a) -- ^ @\"> v\"@ - -> (Version -> a) -- ^ @\"< v\"@ - -> (a -> a -> a) -- ^ @\"_ || _\"@ union - -> (a -> a -> a) -- ^ @\"_ && _\"@ intersection - -> VersionRange -> a -foldVersionRange anyv this later earlier union intersect = fold - where - fold = cataVersionRange alg - - alg AnyVersionF = anyv - alg (ThisVersionF v) = this v - alg (LaterVersionF v) = later v - alg (OrLaterVersionF v) = union (this v) (later v) - alg (EarlierVersionF v) = earlier v - alg (OrEarlierVersionF v) = union (this v) (earlier v) - alg (WildcardVersionF v) = fold (wildcard v) - alg (MajorBoundVersionF v) = fold (majorBound v) - alg (UnionVersionRangesF v1 v2) = union v1 v2 - alg (IntersectVersionRangesF v1 v2) = intersect v1 v2 - alg (VersionRangeParensF v) = v - - wildcard v = intersectVersionRanges - (orLaterVersion v) - (earlierVersion (wildcardUpperBound v)) - - majorBound v = intersectVersionRanges - (orLaterVersion v) - (earlierVersion (majorUpperBound v)) - --- | Refold 'VersionRange' --- --- @since 2.2 -hyloVersionRange :: (VersionRangeF VersionRange -> VersionRange) - -> (VersionRange -> VersionRangeF VersionRange) - -> VersionRange -> VersionRange -hyloVersionRange f g = h where h = f . fmap h . g - --- | Normalise 'VersionRange'. --- --- In particular collapse @(== v || > v)@ into @>= v@, and so on. -normaliseVersionRange :: VersionRange -> VersionRange -normaliseVersionRange = hyloVersionRange embed projectVersionRange - where - -- == v || > v, > v || == v ==> >= v - embed (UnionVersionRangesF (ThisVersion v) (LaterVersion v')) | v == v' = - orLaterVersion v - embed (UnionVersionRangesF (LaterVersion v) (ThisVersion v')) | v == v' = - orLaterVersion v - - -- == v || < v, < v || == v ==> <= v - embed (UnionVersionRangesF (ThisVersion v) (EarlierVersion v')) | v == v' = - orEarlierVersion v - embed (UnionVersionRangesF (EarlierVersion v) (ThisVersion v')) | v == v' = - orEarlierVersion v - - -- otherwise embed normally - embed vr = embedVersionRange vr - --- | Remove 'VersionRangeParens' constructors. --- --- @since 2.2 -stripParensVersionRange :: VersionRange -> VersionRange -stripParensVersionRange = hyloVersionRange embed projectVersionRange - where - embed (VersionRangeParensF vr) = vr - embed vr = embedVersionRange vr - --- | Does this version fall within the given range? --- --- This is the evaluation function for the 'VersionRange' type. --- -withinRange :: Version -> VersionRange -> Bool -withinRange v = foldVersionRange - True - (\v' -> v == v') - (\v' -> v > v') - (\v' -> v < v') - (||) - (&&) - ----------------------------- --- Wildcard range utilities --- - --- | @since 2.2 -wildcardUpperBound :: Version -> Version -wildcardUpperBound = alterVersion $ - \lowerBound -> init lowerBound ++ [last lowerBound + 1] - -isWildcardRange :: Version -> Version -> Bool -isWildcardRange ver1 ver2 = check (versionNumbers ver1) (versionNumbers ver2) - where check (n:[]) (m:[]) | n+1 == m = True - check (n:ns) (m:ms) | n == m = check ns ms - check _ _ = False - --- | Compute next greater major version to be used as upper bound --- --- Example: @0.4.1@ produces the version @0.5@ which then can be used --- to construct a range @>= 0.4.1 && < 0.5@ --- --- @since 2.2 -majorUpperBound :: Version -> Version -majorUpperBound = alterVersion $ \numbers -> case numbers of - [] -> [0,1] -- should not happen - [m1] -> [m1,1] -- e.g. version '1' - (m1:m2:_) -> [m1,m2+1] - -------------------------------------------------------------------------------- --- Parsec & Pretty -------------------------------------------------------------------------------- - -instance Pretty VersionRange where - pretty = fst . cataVersionRange alg - where - alg AnyVersionF = (Disp.text "-any", 0 :: Int) - alg (ThisVersionF v) = (Disp.text "==" <<>> pretty v, 0) - alg (LaterVersionF v) = (Disp.char '>' <<>> pretty v, 0) - alg (OrLaterVersionF v) = (Disp.text ">=" <<>> pretty v, 0) - alg (EarlierVersionF v) = (Disp.char '<' <<>> pretty v, 0) - alg (OrEarlierVersionF v) = (Disp.text "<=" <<>> pretty v, 0) - alg (WildcardVersionF v) = (Disp.text "==" <<>> dispWild v, 0) - alg (MajorBoundVersionF v) = (Disp.text "^>=" <<>> pretty v, 0) - alg (UnionVersionRangesF (r1, p1) (r2, p2)) = - (punct 1 p1 r1 <+> Disp.text "||" <+> punct 2 p2 r2 , 2) - alg (IntersectVersionRangesF (r1, p1) (r2, p2)) = - (punct 0 p1 r1 <+> Disp.text "&&" <+> punct 1 p2 r2 , 1) - alg (VersionRangeParensF (r, _)) = - (Disp.parens r, 0) - - dispWild ver = - Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int $ versionNumbers ver)) - <<>> Disp.text ".*" - - punct p p' | p < p' = Disp.parens - | otherwise = id - -instance Parsec VersionRange where - parsec = expr - where - expr = do P.spaces - t <- term - P.spaces - (do _ <- P.string "||" - P.spaces - e <- expr - return (unionVersionRanges t e) - <|> - return t) - term = do f <- factor - P.spaces - (do _ <- P.string "&&" - P.spaces - t <- term - return (intersectVersionRanges f t) - <|> - return f) - factor = parens expr <|> prim - - prim = do - op <- P.munch1 (`elem` "<>=^-") P. "operator" - case op of - "-" -> anyVersion <$ P.string "any" <|> P.string "none" *> noVersion' - - "==" -> do - P.spaces - (wild, v) <- verOrWild - pure $ (if wild then withinVersion else thisVersion) v - - _ -> do - P.spaces - (wild, v) <- verOrWild - when wild $ P.unexpected $ - "wild-card version after non-== operator: " ++ show op - case op of - ">=" -> pure $ orLaterVersion v - "<" -> pure $ earlierVersion v - "^>=" -> majorBoundVersion' v - "<=" -> pure $ orEarlierVersion v - ">" -> pure $ laterVersion v - _ -> fail $ "Unknown version operator " ++ show op - - -- Note: There are other features: - -- && and || since 1.8 - -- x.y.* (wildcard) since 1.6 - - -- -none version range is available since 1.22 - noVersion' = do - csv <- askCabalSpecVersion - if csv >= CabalSpecV1_22 - then pure noVersion - else fail $ unwords - [ "-none version range used." - , "To use this syntax the package needs to specify at least 'cabal-version: 1.22'." - , "Alternatively, if broader compatibility is important then use" - , "<0 or other empty range." - ] - - -- ^>= is available since 2.0 - majorBoundVersion' v = do - csv <- askCabalSpecVersion - if csv >= CabalSpecV2_0 - then pure $ majorBoundVersion v - else fail $ unwords - [ "major bounded version syntax (caret, ^>=) used." - , "To use this syntax the package need to specify at least 'cabal-version: 2.0'." - , "Alternatively, if broader compatibility is important then use:" - , prettyShow $ eliminateMajorBoundSyntax $ majorBoundVersion v - ] - where - eliminateMajorBoundSyntax = hyloVersionRange embed projectVersionRange - embed (MajorBoundVersionF u) = intersectVersionRanges - (orLaterVersion u) (earlierVersion (majorUpperBound u)) - embed vr = embedVersionRange vr - - -- either wildcard or normal version - verOrWild :: CabalParsing m => m (Bool, Version) - verOrWild = do - x <- P.integral - verLoop (DList.singleton x) - - -- trailing: wildcard (.y.*) or normal version (optional tags) (.y.z-tag) - verLoop :: CabalParsing m => DList.DList Int -> m (Bool, Version) - verLoop acc = verLoop' acc <|> (tags *> pure (False, mkVersion (DList.toList acc))) - - verLoop' :: CabalParsing m => DList.DList Int -> m (Bool, Version) - verLoop' acc = do - _ <- P.char '.' - let digit = P.integral >>= verLoop . DList.snoc acc - let wild = (True, mkVersion (DList.toList acc)) <$ P.char '*' - digit <|> wild - - parens p = P.between - ((P.char '(' P. "opening paren") >> P.spaces) - (P.char ')' >> P.spaces) - (do a <- p - P.spaces - return (VersionRangeParens a)) - - tags :: CabalParsing m => m () - tags = do - ts <- many $ P.char '-' *> some (P.satisfy isAlphaNum) - case ts of - [] -> pure () - (_ : _) -> parsecWarning PWTVersionTag "version with tags" - - -instance Text VersionRange where - parse = expr - where - expr = do Parse.skipSpaces - t <- term - Parse.skipSpaces - (do _ <- Parse.string "||" - Parse.skipSpaces - e <- expr - return (UnionVersionRanges t e) - Parse.+++ - return t) - term = do f <- factor - Parse.skipSpaces - (do _ <- Parse.string "&&" - Parse.skipSpaces - t <- term - return (IntersectVersionRanges f t) - Parse.+++ - return f) - factor = Parse.choice $ parens expr - : parseAnyVersion - : parseNoVersion - : parseWildcardRange - : map parseRangeOp rangeOps - parseAnyVersion = Parse.string "-any" >> return AnyVersion - parseNoVersion = Parse.string "-none" >> return noVersion - - parseWildcardRange = do - _ <- Parse.string "==" - Parse.skipSpaces - branch <- Parse.sepBy1 digits (Parse.char '.') - _ <- Parse.char '.' - _ <- Parse.char '*' - return (WildcardVersion (mkVersion branch)) - - parens p = Parse.between (Parse.char '(' >> Parse.skipSpaces) - (Parse.char ')' >> Parse.skipSpaces) - (do a <- p - Parse.skipSpaces - return (VersionRangeParens a)) - - digits = do - firstDigit <- Parse.satisfy isDigit - if firstDigit == '0' - then return 0 - else do rest <- Parse.munch isDigit - return (read (firstDigit : rest)) -- TODO: eradicateNoParse - - parseRangeOp (s,f) = Parse.string s >> Parse.skipSpaces >> fmap f parse - rangeOps = [ ("<", EarlierVersion), - ("<=", orEarlierVersion), - (">", LaterVersion), - (">=", orLaterVersion), - ("^>=", MajorBoundVersion), - ("==", ThisVersion) ] - --- | Does the version range have an upper bound? --- --- @since 1.24.0.0 -hasUpperBound :: VersionRange -> Bool -hasUpperBound = foldVersionRange - False - (const True) - (const False) - (const True) - (&&) (||) - --- | Does the version range have an explicit lower bound? --- --- Note: this function only considers the user-specified lower bounds, but not --- the implicit >=0 lower bound. --- --- @since 1.24.0.0 -hasLowerBound :: VersionRange -> Bool -hasLowerBound = foldVersionRange - False - (const True) - (const True) - (const False) - (&&) (||) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/Base62.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/Base62.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/Base62.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/Base62.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ - --- | Implementation of base-62 encoding, which we use when computing hashes --- for fully instantiated unit ids. -module Distribution.Utils.Base62 (hashToBase62) where - -import GHC.Fingerprint ( Fingerprint(..), fingerprintString ) -import Numeric ( showIntAtBase ) -import Data.Char ( chr ) - --- | Hash a string using GHC's fingerprinting algorithm (a 128-bit --- MD5 hash) and then encode the resulting hash in base 62. -hashToBase62 :: String -> String -hashToBase62 s = showFingerprint $ fingerprintString s - where - showIntAtBase62 x = showIntAtBase 62 representBase62 x "" - representBase62 x - | x < 10 = chr (48 + x) - | x < 36 = chr (65 + x - 10) - | x < 62 = chr (97 + x - 36) - | otherwise = '@' - showFingerprint (Fingerprint a b) = showIntAtBase62 a ++ showIntAtBase62 b - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/Generic.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/Generic.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/Generic.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/Generic.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,495 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE BangPatterns #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Simple.Utils --- Copyright : Isaac Jones, Simon Marlow 2003-2004 --- License : BSD3 --- portions Copyright (c) 2007, Galois Inc. --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A large and somewhat miscellaneous collection of utility functions used --- throughout the rest of the Cabal lib and in other tools that use the Cabal --- lib like @cabal-install@. It has a very simple set of logging actions. It --- has low level functions for running programs, a bunch of wrappers for --- various directory and file functions that do extra logging. - -module Distribution.Utils.Generic ( - -- * reading and writing files safely - withFileContents, - writeFileAtomic, - - -- * Unicode - - -- ** Conversions - fromUTF8BS, - fromUTF8LBS, - - toUTF8BS, - toUTF8LBS, - - validateUTF8, - - -- ** File I/O - readUTF8File, - withUTF8FileContents, - writeUTF8File, - - -- ** BOM - ignoreBOM, - - -- ** Misc - normaliseLineEndings, - - -- * generic utils - dropWhileEndLE, - takeWhileEndLE, - equating, - comparing, - isInfixOf, - intercalate, - lowercase, - isAscii, - isAsciiAlpha, - isAsciiAlphaNum, - listUnion, - listUnionRight, - ordNub, - ordNubBy, - ordNubRight, - safeTail, - unintersperse, - wrapText, - wrapLine, - unfoldrM, - spanMaybe, - breakMaybe, - - -- * FilePath stuff - isAbsoluteOnAnyPlatform, - isRelativeOnAnyPlatform, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Utils.String - -import Data.Bits ((.&.), (.|.), shiftL) -import Data.List - ( isInfixOf ) -import Data.Ord - ( comparing ) -import qualified Data.ByteString.Lazy as BS -import qualified Data.Set as Set - -import qualified Data.ByteString as SBS - -import System.Directory - ( removeFile, renameFile ) -import System.FilePath - ( (<.>), splitFileName ) -import System.IO - ( withFile, withBinaryFile - , openBinaryTempFileWithDefaultPermissions - , IOMode(ReadMode), hGetContents, hClose ) -import qualified Control.Exception as Exception - --- ----------------------------------------------------------------------------- --- Helper functions - --- | Wraps text to the default line width. Existing newlines are preserved. -wrapText :: String -> String -wrapText = unlines - . map (intercalate "\n" - . map unwords - . wrapLine 79 - . words) - . lines - --- | Wraps a list of words to a list of lines of words of a particular width. -wrapLine :: Int -> [String] -> [[String]] -wrapLine width = wrap 0 [] - where wrap :: Int -> [String] -> [String] -> [[String]] - wrap 0 [] (w:ws) - | length w + 1 > width - = wrap (length w) [w] ws - wrap col line (w:ws) - | col + length w + 1 > width - = reverse line : wrap 0 [] (w:ws) - wrap col line (w:ws) - = let col' = col + length w + 1 - in wrap col' (w:line) ws - wrap _ [] [] = [] - wrap _ line [] = [reverse line] - ------------------------------------ --- Safely reading and writing files - --- | Gets the contents of a file, but guarantee that it gets closed. --- --- The file is read lazily but if it is not fully consumed by the action then --- the remaining input is truncated and the file is closed. --- -withFileContents :: FilePath -> (String -> NoCallStackIO a) -> NoCallStackIO a -withFileContents name action = - withFile name ReadMode - (\hnd -> hGetContents hnd >>= action) - --- | Writes a file atomically. --- --- The file is either written successfully or an IO exception is raised and --- the original file is left unchanged. --- --- On windows it is not possible to delete a file that is open by a process. --- This case will give an IO exception but the atomic property is not affected. --- -writeFileAtomic :: FilePath -> BS.ByteString -> NoCallStackIO () -writeFileAtomic targetPath content = do - let (targetDir, targetFile) = splitFileName targetPath - Exception.bracketOnError - (openBinaryTempFileWithDefaultPermissions targetDir $ targetFile <.> "tmp") - (\(tmpPath, handle) -> hClose handle >> removeFile tmpPath) - (\(tmpPath, handle) -> do - BS.hPut handle content - hClose handle - renameFile tmpPath targetPath) - --- ------------------------------------------------------------ --- * Unicode stuff --- ------------------------------------------------------------ - --- | Decode 'String' from UTF8-encoded 'BS.ByteString' --- --- Invalid data in the UTF8 stream (this includes code-points @U+D800@ --- through @U+DFFF@) will be decoded as the replacement character (@U+FFFD@). --- -fromUTF8BS :: SBS.ByteString -> String -fromUTF8BS = decodeStringUtf8 . SBS.unpack - --- | Variant of 'fromUTF8BS' for lazy 'BS.ByteString's --- -fromUTF8LBS :: BS.ByteString -> String -fromUTF8LBS = decodeStringUtf8 . BS.unpack - --- | Encode 'String' to to UTF8-encoded 'SBS.ByteString' --- --- Code-points in the @U+D800@-@U+DFFF@ range will be encoded --- as the replacement character (i.e. @U+FFFD@). --- -toUTF8BS :: String -> SBS.ByteString -toUTF8BS = SBS.pack . encodeStringUtf8 - --- | Variant of 'toUTF8BS' for lazy 'BS.ByteString's --- -toUTF8LBS :: String -> BS.ByteString -toUTF8LBS = BS.pack . encodeStringUtf8 - --- | Check that strict 'ByteString' is valid UTF8. Returns 'Just offset' if it's not. -validateUTF8 :: SBS.ByteString -> Maybe Int -validateUTF8 = go 0 where - go off bs = case SBS.uncons bs of - Nothing -> Nothing - Just (c, bs') - | c <= 0x7F -> go (off + 1) bs' - | c <= 0xBF -> Just off - | c <= 0xDF -> twoBytes off c bs' - | c <= 0xEF -> moreBytes off 3 0x800 bs' (fromIntegral $ c .&. 0xF) - | c <= 0xF7 -> moreBytes off 4 0x10000 bs' (fromIntegral $ c .&. 0x7) - | c <= 0xFB -> moreBytes off 5 0x200000 bs' (fromIntegral $ c .&. 0x3) - | c <= 0xFD -> moreBytes off 6 0x4000000 bs' (fromIntegral $ c .&. 0x1) - | otherwise -> Just off - - twoBytes off c0 bs = case SBS.uncons bs of - Nothing -> Just off - Just (c1, bs') - | c1 .&. 0xC0 == 0x80 -> - if d >= (0x80 :: Int) - then go (off + 2) bs' - else Just off - | otherwise -> Just off - where - d = (fromIntegral (c0 .&. 0x1F) `shiftL` 6) .|. fromIntegral (c1 .&. 0x3F) - - moreBytes :: Int -> Int -> Int -> SBS.ByteString -> Int -> Maybe Int - moreBytes off 1 overlong cs' acc - | overlong <= acc, acc <= 0x10FFFF, acc < 0xD800 || 0xDFFF < acc - = go (off + 1) cs' - - | otherwise - = Just off - - moreBytes off byteCount overlong bs acc = case SBS.uncons bs of - Just (cn, bs') | cn .&. 0xC0 == 0x80 -> - moreBytes (off + 1) (byteCount-1) overlong bs' ((acc `shiftL` 6) .|. fromIntegral cn .&. 0x3F) - _ -> Just off - - --- | Ignore a Unicode byte order mark (BOM) at the beginning of the input --- -ignoreBOM :: String -> String -ignoreBOM ('\xFEFF':string) = string -ignoreBOM string = string - --- | Reads a UTF8 encoded text file as a Unicode String --- --- Reads lazily using ordinary 'readFile'. --- -readUTF8File :: FilePath -> NoCallStackIO String -readUTF8File f = (ignoreBOM . fromUTF8LBS) <$> BS.readFile f - --- | Reads a UTF8 encoded text file as a Unicode String --- --- Same behaviour as 'withFileContents'. --- -withUTF8FileContents :: FilePath -> (String -> IO a) -> IO a -withUTF8FileContents name action = - withBinaryFile name ReadMode - (\hnd -> BS.hGetContents hnd >>= action . ignoreBOM . fromUTF8LBS) - --- | Writes a Unicode String as a UTF8 encoded text file. --- --- Uses 'writeFileAtomic', so provides the same guarantees. --- -writeUTF8File :: FilePath -> String -> NoCallStackIO () -writeUTF8File path = writeFileAtomic path . BS.pack . encodeStringUtf8 - --- | Fix different systems silly line ending conventions -normaliseLineEndings :: String -> String -normaliseLineEndings [] = [] -normaliseLineEndings ('\r':'\n':s) = '\n' : normaliseLineEndings s -- windows -normaliseLineEndings ('\r':s) = '\n' : normaliseLineEndings s -- old OS X -normaliseLineEndings ( c :s) = c : normaliseLineEndings s - --- ------------------------------------------------------------ --- * Common utils --- ------------------------------------------------------------ - --- | @dropWhileEndLE p@ is equivalent to @reverse . dropWhile p . reverse@, but --- quite a bit faster. The difference between "Data.List.dropWhileEnd" and this --- version is that the one in "Data.List" is strict in elements, but spine-lazy, --- while this one is spine-strict but lazy in elements. That's what @LE@ stands --- for - "lazy in elements". --- --- Example: --- --- >>> tail $ Data.List.dropWhileEnd (<3) [undefined, 5, 4, 3, 2, 1] --- *** Exception: Prelude.undefined --- ... --- --- >>> tail $ dropWhileEndLE (<3) [undefined, 5, 4, 3, 2, 1] --- [5,4,3] --- --- >>> take 3 $ Data.List.dropWhileEnd (<3) [5, 4, 3, 2, 1, undefined] --- [5,4,3] --- --- >>> take 3 $ dropWhileEndLE (<3) [5, 4, 3, 2, 1, undefined] --- *** Exception: Prelude.undefined --- ... --- -dropWhileEndLE :: (a -> Bool) -> [a] -> [a] -dropWhileEndLE p = foldr (\x r -> if null r && p x then [] else x:r) [] - --- | @takeWhileEndLE p@ is equivalent to @reverse . takeWhile p . reverse@, but --- is usually faster (as well as being easier to read). -takeWhileEndLE :: (a -> Bool) -> [a] -> [a] -takeWhileEndLE p = fst . foldr go ([], False) - where - go x (rest, done) - | not done && p x = (x:rest, False) - | otherwise = (rest, True) - --- | Like 'Data.List.nub', but has @O(n log n)@ complexity instead of --- @O(n^2)@. Code for 'ordNub' and 'listUnion' taken from Niklas Hambüchen's --- package. -ordNub :: Ord a => [a] -> [a] -ordNub = ordNubBy id - --- | Like 'ordNub' and 'Data.List.nubBy'. Selects a key for each element and --- takes the nub based on that key. -ordNubBy :: Ord b => (a -> b) -> [a] -> [a] -ordNubBy f l = go Set.empty l - where - go !_ [] = [] - go !s (x:xs) - | y `Set.member` s = go s xs - | otherwise = let !s' = Set.insert y s - in x : go s' xs - where - y = f x - --- | Like "Data.List.union", but has @O(n log n)@ complexity instead of --- @O(n^2)@. -listUnion :: (Ord a) => [a] -> [a] -> [a] -listUnion a b = a ++ ordNub (filter (`Set.notMember` aSet) b) - where - aSet = Set.fromList a - --- | A right-biased version of 'ordNub'. --- --- Example: --- --- >>> ordNub [1,2,1] :: [Int] --- [1,2] --- --- >>> ordNubRight [1,2,1] :: [Int] --- [2,1] --- -ordNubRight :: (Ord a) => [a] -> [a] -ordNubRight = fst . foldr go ([], Set.empty) - where - go x p@(l, s) = if x `Set.member` s then p - else (x:l, Set.insert x s) - --- | A right-biased version of 'listUnion'. --- --- Example: --- --- >>> listUnion [1,2,3,4,3] [2,1,1] --- [1,2,3,4,3] --- --- >>> listUnionRight [1,2,3,4,3] [2,1,1] --- [4,3,2,1,1] --- -listUnionRight :: (Ord a) => [a] -> [a] -> [a] -listUnionRight a b = ordNubRight (filter (`Set.notMember` bSet) a) ++ b - where - bSet = Set.fromList b - --- | A total variant of 'tail'. -safeTail :: [a] -> [a] -safeTail [] = [] -safeTail (_:xs) = xs - -equating :: Eq a => (b -> a) -> b -> b -> Bool -equating p x y = p x == p y - --- | Lower case string --- --- >>> lowercase "Foobar" --- "foobar" -lowercase :: String -> String -lowercase = map toLower - --- | Ascii characters -isAscii :: Char -> Bool -isAscii c = fromEnum c < 0x80 - --- | Ascii letters. -isAsciiAlpha :: Char -> Bool -isAsciiAlpha c = ('a' <= c && c <= 'z') - || ('A' <= c && c <= 'Z') - --- | Ascii letters and digits. --- --- >>> isAsciiAlphaNum 'a' --- True --- --- >>> isAsciiAlphaNum 'ä' --- False --- -isAsciiAlphaNum :: Char -> Bool -isAsciiAlphaNum c = isAscii c && isAlphaNum c - -unintersperse :: Char -> String -> [String] -unintersperse mark = unfoldr unintersperse1 where - unintersperse1 str - | null str = Nothing - | otherwise = - let (this, rest) = break (== mark) str in - Just (this, safeTail rest) - --- | Like 'break', but with 'Maybe' predicate --- --- >>> breakMaybe (readMaybe :: String -> Maybe Int) ["foo", "bar", "1", "2", "quu"] --- (["foo","bar"],Just (1,["2","quu"])) --- --- >>> breakMaybe (readMaybe :: String -> Maybe Int) ["foo", "bar"] --- (["foo","bar"],Nothing) --- --- @since 2.2 --- -breakMaybe :: (a -> Maybe b) -> [a] -> ([a], Maybe (b, [a])) -breakMaybe f = go id where - go !acc [] = (acc [], Nothing) - go !acc (x:xs) = case f x of - Nothing -> go (acc . (x:)) xs - Just b -> (acc [], Just (b, xs)) - --- | Like 'span' but with 'Maybe' predicate --- --- >>> spanMaybe listToMaybe [[1,2],[3],[],[4,5],[6,7]] --- ([1,3],[[],[4,5],[6,7]]) --- --- >>> spanMaybe (readMaybe :: String -> Maybe Int) ["1", "2", "foo"] --- ([1,2],["foo"]) --- --- @since 2.2 --- -spanMaybe :: (a -> Maybe b) -> [a] -> ([b],[a]) -spanMaybe _ xs@[] = ([], xs) -spanMaybe p xs@(x:xs') = case p x of - Just y -> let (ys, zs) = spanMaybe p xs' in (y : ys, zs) - Nothing -> ([], xs) - --- | 'unfoldr' with monadic action. --- --- >>> take 5 $ unfoldrM (\b r -> Just (r + b, b + 1)) (1 :: Int) 2 --- [3,4,5,6,7] --- --- @since 2.2 --- -unfoldrM :: Monad m => (b -> m (Maybe (a, b))) -> b -> m [a] -unfoldrM f = go where - go b = do - m <- f b - case m of - Nothing -> return [] - Just (a, b') -> liftM (a :) (go b') - --- ------------------------------------------------------------ --- * FilePath stuff --- ------------------------------------------------------------ - --- | 'isAbsoluteOnAnyPlatform' and 'isRelativeOnAnyPlatform' are like --- 'System.FilePath.isAbsolute' and 'System.FilePath.isRelative' but have --- platform independent heuristics. --- The System.FilePath exists in two versions, Windows and Posix. The two --- versions don't agree on what is a relative path and we don't know if we're --- given Windows or Posix paths. --- This results in false positives when running on Posix and inspecting --- Windows paths, like the hackage server does. --- System.FilePath.Posix.isAbsolute \"C:\\hello\" == False --- System.FilePath.Windows.isAbsolute \"/hello\" == False --- This means that we would treat paths that start with \"/\" to be absolute. --- On Posix they are indeed absolute, while on Windows they are not. --- --- The portable versions should be used when we might deal with paths that --- are from another OS than the host OS. For example, the Hackage Server --- deals with both Windows and Posix paths while performing the --- PackageDescription checks. In contrast, when we run 'cabal configure' we --- do expect the paths to be correct for our OS and we should not have to use --- the platform independent heuristics. -isAbsoluteOnAnyPlatform :: FilePath -> Bool --- C:\\directory -isAbsoluteOnAnyPlatform (drive:':':'\\':_) = isAlpha drive --- UNC -isAbsoluteOnAnyPlatform ('\\':'\\':_) = True --- Posix root -isAbsoluteOnAnyPlatform ('/':_) = True -isAbsoluteOnAnyPlatform _ = False - --- | @isRelativeOnAnyPlatform = not . 'isAbsoluteOnAnyPlatform'@ -isRelativeOnAnyPlatform :: FilePath -> Bool -isRelativeOnAnyPlatform = not . isAbsoluteOnAnyPlatform - --- $setup --- >>> import Data.Maybe --- >>> import Text.Read diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/IOData.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/IOData.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/IOData.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/IOData.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ --- | @since 2.2.0 -module Distribution.Utils.IOData - ( -- * 'IOData' & 'IODataMode' type - IOData(..) - , IODataMode(..) - , null - , hGetContents - , hPutContents - ) where - -import qualified Data.ByteString.Lazy as BS -import Distribution.Compat.Prelude hiding (null) -import qualified Prelude -import qualified System.IO - --- | Represents either textual or binary data passed via I/O functions --- which support binary/text mode --- --- @since 2.2.0 -data IOData = IODataText String - -- ^ How Text gets encoded is usually locale-dependent. - | IODataBinary BS.ByteString - -- ^ Raw binary which gets read/written in binary mode. - --- | Test whether 'IOData' is empty --- --- @since 2.2.0 -null :: IOData -> Bool -null (IODataText s) = Prelude.null s -null (IODataBinary b) = BS.null b - -instance NFData IOData where - rnf (IODataText s) = rnf s - rnf (IODataBinary bs) = rnf bs - -data IODataMode = IODataModeText | IODataModeBinary - --- | 'IOData' Wrapper for 'System.IO.hGetContents' --- --- __Note__: This operation uses lazy I/O. Use 'NFData' to force all --- data to be read and consequently the internal file handle to be --- closed. --- --- @since 2.2.0 -hGetContents :: System.IO.Handle -> IODataMode -> Prelude.IO IOData -hGetContents h IODataModeText = do - System.IO.hSetBinaryMode h False - IODataText <$> System.IO.hGetContents h -hGetContents h IODataModeBinary = do - System.IO.hSetBinaryMode h True - IODataBinary <$> BS.hGetContents h - --- | 'IOData' Wrapper for 'System.IO.hPutStr' and 'System.IO.hClose' --- --- This is the dual operation ot 'ioDataHGetContents', --- and consequently the handle is closed with `hClose`. --- --- @since 2.2.0 -hPutContents :: System.IO.Handle -> IOData -> Prelude.IO () -hPutContents h (IODataText c) = do - System.IO.hSetBinaryMode h False - System.IO.hPutStr h c - System.IO.hClose h -hPutContents h (IODataBinary c) = do - System.IO.hSetBinaryMode h True - BS.hPutStr h c - System.IO.hClose h diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/LogProgress.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/LogProgress.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/LogProgress.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/LogProgress.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,89 +0,0 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE Rank2Types #-} -{-# LANGUAGE FlexibleContexts #-} -module Distribution.Utils.LogProgress ( - LogProgress, - runLogProgress, - warnProgress, - infoProgress, - dieProgress, - addProgressCtx, -) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Utils.Progress -import Distribution.Verbosity -import Distribution.Simple.Utils -import Text.PrettyPrint - -type CtxMsg = Doc -type LogMsg = Doc -type ErrMsg = Doc - -data LogEnv = LogEnv { - le_verbosity :: Verbosity, - le_context :: [CtxMsg] - } - --- | The 'Progress' monad with specialized logging and --- error messages. -newtype LogProgress a = LogProgress { unLogProgress :: LogEnv -> Progress LogMsg ErrMsg a } - -instance Functor LogProgress where - fmap f (LogProgress m) = LogProgress (fmap (fmap f) m) - -instance Applicative LogProgress where - pure x = LogProgress (pure (pure x)) - LogProgress f <*> LogProgress x = LogProgress $ \r -> f r `ap` x r - -instance Monad LogProgress where - return = pure - LogProgress m >>= f = LogProgress $ \r -> m r >>= \x -> unLogProgress (f x) r - --- | Run 'LogProgress', outputting traces according to 'Verbosity', --- 'die' if there is an error. -runLogProgress :: Verbosity -> LogProgress a -> NoCallStackIO a -runLogProgress verbosity (LogProgress m) = - foldProgress step_fn fail_fn return (m env) - where - env = LogEnv { - le_verbosity = verbosity, - le_context = [] - } - step_fn :: LogMsg -> NoCallStackIO a -> NoCallStackIO a - step_fn doc go = do - putStrLn (render doc) - go - fail_fn :: Doc -> NoCallStackIO a - fail_fn doc = do - dieNoWrap verbosity (render doc) - --- | Output a warning trace message in 'LogProgress'. -warnProgress :: Doc -> LogProgress () -warnProgress s = LogProgress $ \env -> - when (le_verbosity env >= normal) $ - stepProgress $ - hang (text "Warning:") 4 (formatMsg (le_context env) s) - --- | Output an informational trace message in 'LogProgress'. -infoProgress :: Doc -> LogProgress () -infoProgress s = LogProgress $ \env -> - when (le_verbosity env >= verbose) $ - stepProgress s - --- | Fail the computation with an error message. -dieProgress :: Doc -> LogProgress a -dieProgress s = LogProgress $ \env -> - failProgress $ - hang (text "Error:") 4 (formatMsg (le_context env) s) - --- | Format a message with context. (Something simple for now.) -formatMsg :: [CtxMsg] -> Doc -> Doc -formatMsg ctx doc = doc $$ vcat ctx - --- | Add a message to the error/warning context. -addProgressCtx :: CtxMsg -> LogProgress a -> LogProgress a -addProgressCtx s (LogProgress m) = LogProgress $ \env -> - m env { le_context = s : le_context env } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/MapAccum.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/MapAccum.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/MapAccum.hs 2018-10-17 15:59:01.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/MapAccum.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -{-# LANGUAGE CPP #-} -module Distribution.Utils.MapAccum (mapAccumM) where - -import Distribution.Compat.Prelude -import Prelude () - --- Like StateT but with return tuple swapped -newtype StateM s m a = StateM { runStateM :: s -> m (s, a) } - -instance Functor m => Functor (StateM s m) where - fmap f (StateM x) = StateM $ \s -> fmap (\(s', a) -> (s', f a)) (x s) - -instance -#if __GLASGOW_HASKELL__ < 709 - (Functor m, Monad m) -#else - Monad m -#endif - => Applicative (StateM s m) where - pure x = StateM $ \s -> return (s, x) - StateM f <*> StateM x = StateM $ \s -> do (s', f') <- f s - (s'', x') <- x s' - return (s'', f' x') - --- | Monadic variant of 'mapAccumL'. -mapAccumM :: -#if __GLASGOW_HASKELL__ < 709 - (Functor m, Monad m, Traversable t) -#else - (Monad m, Traversable t) -#endif - => (a -> b -> m (a, c)) -> a -> t b -> m (a, t c) -mapAccumM f s t = runStateM (traverse (\x -> StateM (\s' -> f s' x)) t) s - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/NubList.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/NubList.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/NubList.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/NubList.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,104 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -module Distribution.Utils.NubList - ( NubList -- opaque - , toNubList -- smart construtor - , fromNubList - , overNubList - - , NubListR - , toNubListR - , fromNubListR - , overNubListR - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.Simple.Utils - -import qualified Text.Read as R - --- | NubList : A de-duplicated list that maintains the original order. -newtype NubList a = - NubList { fromNubList :: [a] } - deriving (Eq, Typeable) - --- NubList assumes that nub retains the list order while removing duplicate --- elements (keeping the first occurence). Documentation for "Data.List.nub" --- does not specifically state that ordering is maintained so we will add a test --- for that to the test suite. - --- | Smart constructor for the NubList type. -toNubList :: Ord a => [a] -> NubList a -toNubList list = NubList $ ordNub list - --- | Lift a function over lists to a function over NubLists. -overNubList :: Ord a => ([a] -> [a]) -> NubList a -> NubList a -overNubList f (NubList list) = toNubList . f $ list - --- | Monoid operations on NubLists. --- For a valid Monoid instance we need to satistfy the required monoid laws; --- identity, associativity and closure. --- --- Identity : by inspection: --- mempty `mappend` NubList xs == NubList xs `mappend` mempty --- --- Associativity : by inspection: --- (NubList xs `mappend` NubList ys) `mappend` NubList zs --- == NubList xs `mappend` (NubList ys `mappend` NubList zs) --- --- Closure : appending two lists of type a and removing duplicates obviously --- does not change the type. - -instance Ord a => Monoid (NubList a) where - mempty = NubList [] - mappend = (<>) - -instance Ord a => Semigroup (NubList a) where - (NubList xs) <> (NubList ys) = NubList $ xs `listUnion` ys - -instance Show a => Show (NubList a) where - show (NubList list) = show list - -instance (Ord a, Read a) => Read (NubList a) where - readPrec = readNubList toNubList - --- | Helper used by NubList/NubListR's Read instances. -readNubList :: (Read a) => ([a] -> l a) -> R.ReadPrec (l a) -readNubList toList = R.parens . R.prec 10 $ fmap toList R.readPrec - --- | Binary instance for 'NubList a' is the same as for '[a]'. For 'put', we --- just pull off constructor and put the list. For 'get', we get the list and --- make a 'NubList' out of it using 'toNubList'. -instance (Ord a, Binary a) => Binary (NubList a) where - put (NubList l) = put l - get = fmap toNubList get - --- | NubListR : A right-biased version of 'NubList'. That is @toNubListR --- ["-XNoFoo", "-XFoo", "-XNoFoo"]@ will result in @["-XFoo", "-XNoFoo"]@, --- unlike the normal 'NubList', which is left-biased. Built on top of --- 'ordNubRight' and 'listUnionRight'. -newtype NubListR a = - NubListR { fromNubListR :: [a] } - deriving Eq - --- | Smart constructor for the NubListR type. -toNubListR :: Ord a => [a] -> NubListR a -toNubListR list = NubListR $ ordNubRight list - --- | Lift a function over lists to a function over NubListRs. -overNubListR :: Ord a => ([a] -> [a]) -> NubListR a -> NubListR a -overNubListR f (NubListR list) = toNubListR . f $ list - -instance Ord a => Monoid (NubListR a) where - mempty = NubListR [] - mappend = (<>) - -instance Ord a => Semigroup (NubListR a) where - (NubListR xs) <> (NubListR ys) = NubListR $ xs `listUnionRight` ys - -instance Show a => Show (NubListR a) where - show (NubListR list) = show list - -instance (Ord a, Read a) => Read (NubListR a) where - readPrec = readNubList toNubListR diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/Progress.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/Progress.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/Progress.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/Progress.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveFunctor #-} --- Note: This module was copied from cabal-install. - --- | A progress monad, which we use to report failure and logging from --- otherwise pure code. -module Distribution.Utils.Progress - ( Progress - , stepProgress - , failProgress - , foldProgress - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import qualified Data.Monoid as Mon - - --- | A type to represent the unfolding of an expensive long running --- calculation that may fail (or maybe not expensive, but complicated!) --- We may get intermediate steps before the final --- result which may be used to indicate progress and\/or logging messages. --- --- TODO: Apply Codensity to avoid left-associativity problem. --- See http://comonad.com/reader/2011/free-monads-for-less/ and --- http://blog.ezyang.com/2012/01/problem-set-the-codensity-transformation/ --- -data Progress step fail done = Step step (Progress step fail done) - | Fail fail - | Done done - deriving (Functor) - --- | Emit a step and then continue. --- -stepProgress :: step -> Progress step fail () -stepProgress step = Step step (Done ()) - --- | Fail the computation. -failProgress :: fail -> Progress step fail done -failProgress err = Fail err - --- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two --- base cases, one for a final result and one for failure. --- --- Eg to convert into a simple 'Either' result use: --- --- > foldProgress (flip const) Left Right --- -foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a) - -> Progress step fail done -> a -foldProgress step err done = fold - where fold (Step s p) = step s (fold p) - fold (Fail f) = err f - fold (Done r) = done r - -instance Monad (Progress step fail) where - return = pure - p >>= f = foldProgress Step Fail f p - -instance Applicative (Progress step fail) where - pure a = Done a - p <*> x = foldProgress Step Fail (flip fmap x) p - -instance Monoid fail => Alternative (Progress step fail) where - empty = Fail Mon.mempty - p <|> q = foldProgress Step (const q) Done p diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/ShortText.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/ShortText.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/ShortText.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/ShortText.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,111 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - -module Distribution.Utils.ShortText - ( -- * 'ShortText' type - ShortText - , toShortText - , fromShortText - - -- * internal utilities - , decodeStringUtf8 - , encodeStringUtf8 - ) where - -import Prelude () -import Distribution.Compat.Prelude -import Distribution.Utils.String - -#if defined(MIN_VERSION_bytestring) -# if MIN_VERSION_bytestring(0,10,4) -# define HAVE_SHORTBYTESTRING 1 -# endif -#endif - --- Hack for GHC bootstrapping --- --- Currently (as of GHC 8.1), GHC bootstraps Cabal by building --- binary and Cabal in one giant ghc --make command. This --- means no MIN_VERSION_binary macro is available. --- --- We could try to cleverly figure something out in this case, --- but there is a better plan: just use the unoptimized version --- of the Binary instance. We're not going to use it for anything --- real in any case. --- --- WARNING: Don't use MIN_VERSION_binary to smooth over a BC-break! --- -#ifndef MIN_VERSION_binary -#define MIN_VERSION_binary(x, y, z) 0 -#endif - -#if HAVE_SHORTBYTESTRING -import qualified Data.ByteString.Short as BS.Short -#endif - --- | Construct 'ShortText' from 'String' -toShortText :: String -> ShortText - --- | Convert 'ShortText' to 'String' -fromShortText :: ShortText -> String - --- | Compact representation of short 'Strings' --- --- The data is stored internally as UTF8 in an --- 'BS.Short.ShortByteString' when compiled against @bytestring >= --- 0.10.4@, and otherwise the fallback is to use plain old non-compat --- '[Char]'. --- --- Note: This type is for internal uses (such as e.g. 'PackageName') --- and shall not be exposed in Cabal's API --- --- @since 2.0.0.2 -#if HAVE_SHORTBYTESTRING -newtype ShortText = ST { unST :: BS.Short.ShortByteString } - deriving (Eq,Ord,Generic,Data,Typeable) - -# if MIN_VERSION_binary(0,8,1) -instance Binary ShortText where - put = put . unST - get = fmap ST get -# else -instance Binary ShortText where - put = put . BS.Short.fromShort . unST - get = fmap (ST . BS.Short.toShort) get -# endif - -toShortText = ST . BS.Short.pack . encodeStringUtf8 - -fromShortText = decodeStringUtf8 . BS.Short.unpack . unST -#else -newtype ShortText = ST { unST :: String } - deriving (Eq,Ord,Generic,Data,Typeable) - -instance Binary ShortText where - put = put . encodeStringUtf8 . unST - get = fmap (ST . decodeStringUtf8) get - -toShortText = ST - -fromShortText = unST -#endif - -instance NFData ShortText where - rnf = rnf . unST - -instance Show ShortText where - show = show . fromShortText - -instance Read ShortText where - readsPrec p = map (first toShortText) . readsPrec p - -instance Semigroup ShortText where - ST a <> ST b = ST (mappend a b) - -instance Monoid ShortText where - mempty = ST mempty - mappend = (<>) - -instance IsString ShortText where - fromString = toShortText diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/String.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/String.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/String.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/String.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ -module Distribution.Utils.String - ( -- * Encode to/from UTF8 - decodeStringUtf8 - , encodeStringUtf8 - ) where - -import Data.Word -import Data.Bits -import Data.Char (chr,ord) - --- | Decode 'String' from UTF8-encoded octets. --- --- Invalid data in the UTF8 stream (this includes code-points @U+D800@ --- through @U+DFFF@) will be decoded as the replacement character (@U+FFFD@). --- --- See also 'encodeStringUtf8' -decodeStringUtf8 :: [Word8] -> String -decodeStringUtf8 = go - where - go :: [Word8] -> String - go [] = [] - go (c : cs) - | c <= 0x7F = chr (fromIntegral c) : go cs - | c <= 0xBF = replacementChar : go cs - | c <= 0xDF = twoBytes c cs - | c <= 0xEF = moreBytes 3 0x800 cs (fromIntegral $ c .&. 0xF) - | c <= 0xF7 = moreBytes 4 0x10000 cs (fromIntegral $ c .&. 0x7) - | c <= 0xFB = moreBytes 5 0x200000 cs (fromIntegral $ c .&. 0x3) - | c <= 0xFD = moreBytes 6 0x4000000 cs (fromIntegral $ c .&. 0x1) - | otherwise = replacementChar : go cs - - twoBytes :: Word8 -> [Word8] -> String - twoBytes c0 (c1:cs') - | c1 .&. 0xC0 == 0x80 - = let d = (fromIntegral (c0 .&. 0x1F) `shiftL` 6) - .|. fromIntegral (c1 .&. 0x3F) - in if d >= 0x80 - then chr d : go cs' - else replacementChar : go cs' - twoBytes _ cs' = replacementChar : go cs' - - moreBytes :: Int -> Int -> [Word8] -> Int -> [Char] - moreBytes 1 overlong cs' acc - | overlong <= acc, acc <= 0x10FFFF, acc < 0xD800 || 0xDFFF < acc - = chr acc : go cs' - - | otherwise - = replacementChar : go cs' - - moreBytes byteCount overlong (cn:cs') acc - | cn .&. 0xC0 == 0x80 - = moreBytes (byteCount-1) overlong cs' - ((acc `shiftL` 6) .|. fromIntegral cn .&. 0x3F) - - moreBytes _ _ cs' _ - = replacementChar : go cs' - - replacementChar = '\xfffd' - - --- | Encode 'String' to a list of UTF8-encoded octets --- --- Code-points in the @U+D800@-@U+DFFF@ range will be encoded --- as the replacement character (i.e. @U+FFFD@). --- --- See also 'decodeUtf8' -encodeStringUtf8 :: String -> [Word8] -encodeStringUtf8 [] = [] -encodeStringUtf8 (c:cs) - | c <= '\x07F' = w8 - : encodeStringUtf8 cs - | c <= '\x7FF' = (0xC0 .|. w8ShiftR 6 ) - : (0x80 .|. (w8 .&. 0x3F)) - : encodeStringUtf8 cs - | c <= '\xD7FF'= (0xE0 .|. w8ShiftR 12 ) - : (0x80 .|. (w8ShiftR 6 .&. 0x3F)) - : (0x80 .|. (w8 .&. 0x3F)) - : encodeStringUtf8 cs - | c <= '\xDFFF'= 0xEF : 0xBF : 0xBD -- U+FFFD - : encodeStringUtf8 cs - | c <= '\xFFFF'= (0xE0 .|. w8ShiftR 12 ) - : (0x80 .|. (w8ShiftR 6 .&. 0x3F)) - : (0x80 .|. (w8 .&. 0x3F)) - : encodeStringUtf8 cs - | otherwise = (0xf0 .|. w8ShiftR 18 ) - : (0x80 .|. (w8ShiftR 12 .&. 0x3F)) - : (0x80 .|. (w8ShiftR 6 .&. 0x3F)) - : (0x80 .|. (w8 .&. 0x3F)) - : encodeStringUtf8 cs - where - w8 = fromIntegral (ord c) :: Word8 - w8ShiftR :: Int -> Word8 - w8ShiftR = fromIntegral . shiftR (ord c) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/UnionFind.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/UnionFind.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Utils/UnionFind.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Utils/UnionFind.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ -{-# LANGUAGE NondecreasingIndentation #-} --- | A simple mutable union-find data structure. --- --- It is used in a unification algorithm for backpack mix-in linking. --- --- This implementation is based off of the one in \"The Essence of ML Type --- Inference\". (N.B. the union-find package is also based off of this.) --- -module Distribution.Utils.UnionFind ( - Point, - fresh, - find, - union, - equivalent, -) where - -import Data.STRef -import Control.Monad -import Control.Monad.ST - --- | A variable which can be unified; alternately, this can be thought --- of as an equivalence class with a distinguished representative. -newtype Point s a = Point (STRef s (Link s a)) - deriving (Eq) - --- | Mutable write to a 'Point' -writePoint :: Point s a -> Link s a -> ST s () -writePoint (Point v) = writeSTRef v - --- | Read the current value of 'Point'. -readPoint :: Point s a -> ST s (Link s a) -readPoint (Point v) = readSTRef v - --- | The internal data structure for a 'Point', which either records --- the representative element of an equivalence class, or a link to --- the 'Point' that actually stores the representative type. -data Link s a - -- NB: it is too bad we can't say STRef Int#; the weights remain boxed - = Info {-# UNPACK #-} !(STRef s Int) {-# UNPACK #-} !(STRef s a) - | Link {-# UNPACK #-} !(Point s a) - --- | Create a fresh equivalence class with one element. -fresh :: a -> ST s (Point s a) -fresh desc = do - weight <- newSTRef 1 - descriptor <- newSTRef desc - Point `fmap` newSTRef (Info weight descriptor) - --- | Flatten any chains of links, returning a 'Point' --- which points directly to the canonical representation. -repr :: Point s a -> ST s (Point s a) -repr point = readPoint point >>= \r -> - case r of - Link point' -> do - point'' <- repr point' - when (point'' /= point') $ do - writePoint point =<< readPoint point' - return point'' - Info _ _ -> return point - --- | Return the canonical element of an equivalence --- class 'Point'. -find :: Point s a -> ST s a -find point = - -- Optimize length 0 and 1 case at expense of - -- general case - readPoint point >>= \r -> - case r of - Info _ d_ref -> readSTRef d_ref - Link point' -> readPoint point' >>= \r' -> - case r' of - Info _ d_ref -> readSTRef d_ref - Link _ -> repr point >>= find - --- | Unify two equivalence classes, so that they share --- a canonical element. Keeps the descriptor of point2. -union :: Point s a -> Point s a -> ST s () -union refpoint1 refpoint2 = do - point1 <- repr refpoint1 - point2 <- repr refpoint2 - when (point1 /= point2) $ do - l1 <- readPoint point1 - l2 <- readPoint point2 - case (l1, l2) of - (Info wref1 dref1, Info wref2 dref2) -> do - weight1 <- readSTRef wref1 - weight2 <- readSTRef wref2 - -- Should be able to optimize the == case separately - if weight1 >= weight2 - then do - writePoint point2 (Link point1) - -- The weight calculation here seems a bit dodgy - writeSTRef wref1 (weight1 + weight2) - writeSTRef dref1 =<< readSTRef dref2 - else do - writePoint point1 (Link point2) - writeSTRef wref2 (weight1 + weight2) - _ -> error "UnionFind.union: repr invariant broken" - --- | Test if two points are in the same equivalence class. -equivalent :: Point s a -> Point s a -> ST s Bool -equivalent point1 point2 = liftM2 (==) (repr point1) (repr point2) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Verbosity.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Verbosity.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Verbosity.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Verbosity.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,300 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Verbosity --- Copyright : Ian Lynagh 2007 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- A 'Verbosity' type with associated utilities. --- --- There are 4 standard verbosity levels from 'silent', 'normal', --- 'verbose' up to 'deafening'. This is used for deciding what logging --- messages to print. --- --- Verbosity also is equipped with some internal settings which can be --- used to control at a fine granularity the verbosity of specific --- settings (e.g., so that you can trace only particular things you --- are interested in.) It's important to note that the instances --- for 'Verbosity' assume that this does not exist. - --- Verbosity for Cabal functions. - -module Distribution.Verbosity ( - -- * Verbosity - Verbosity, - silent, normal, verbose, deafening, - moreVerbose, lessVerbose, isVerboseQuiet, - intToVerbosity, flagToVerbosity, - showForCabal, showForGHC, - verboseNoFlags, verboseHasFlags, - modifyVerbosity, - - -- * Call stacks - verboseCallSite, verboseCallStack, - isVerboseCallSite, isVerboseCallStack, - - -- * Output markets - verboseMarkOutput, isVerboseMarkOutput, - verboseUnmarkOutput, - - -- * line-wrapping - verboseNoWrap, isVerboseNoWrap, - - -- * timestamps - verboseTimestamp, isVerboseTimestamp, - verboseNoTimestamp, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Distribution.ReadE -import Distribution.Compat.ReadP - -import Data.List (elemIndex) -import Data.Set (Set) -import qualified Data.Set as Set - -data Verbosity = Verbosity { - vLevel :: VerbosityLevel, - vFlags :: Set VerbosityFlag, - vQuiet :: Bool - } deriving (Generic) - -mkVerbosity :: VerbosityLevel -> Verbosity -mkVerbosity l = Verbosity { vLevel = l, vFlags = Set.empty, vQuiet = False } - -instance Show Verbosity where - showsPrec n = showsPrec n . vLevel - -instance Read Verbosity where - readsPrec n s = map (\(x,y) -> (mkVerbosity x,y)) (readsPrec n s) - -instance Eq Verbosity where - x == y = vLevel x == vLevel y - -instance Ord Verbosity where - compare x y = compare (vLevel x) (vLevel y) - -instance Enum Verbosity where - toEnum = mkVerbosity . toEnum - fromEnum = fromEnum . vLevel - -instance Bounded Verbosity where - minBound = mkVerbosity minBound - maxBound = mkVerbosity maxBound - -instance Binary Verbosity - -data VerbosityLevel = Silent | Normal | Verbose | Deafening - deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded) - -instance Binary VerbosityLevel - --- We shouldn't print /anything/ unless an error occurs in silent mode -silent :: Verbosity -silent = mkVerbosity Silent - --- Print stuff we want to see by default -normal :: Verbosity -normal = mkVerbosity Normal - --- Be more verbose about what's going on -verbose :: Verbosity -verbose = mkVerbosity Verbose - --- Not only are we verbose ourselves (perhaps even noisier than when --- being "verbose"), but we tell everything we run to be verbose too -deafening :: Verbosity -deafening = mkVerbosity Deafening - -moreVerbose :: Verbosity -> Verbosity -moreVerbose v = - case vLevel v of - Silent -> v -- silent should stay silent - Normal -> v { vLevel = Verbose } - Verbose -> v { vLevel = Deafening } - Deafening -> v - -lessVerbose :: Verbosity -> Verbosity -lessVerbose v = - verboseQuiet $ - case vLevel v of - Deafening -> v -- deafening stays deafening - Verbose -> v { vLevel = Normal } - Normal -> v { vLevel = Silent } - Silent -> v - --- | Combinator for transforming verbosity level while retaining the --- original hidden state. --- --- For instance, the following property holds --- --- prop> isVerboseNoWrap (modifyVerbosity (max verbose) v) == isVerboseNoWrap v --- --- __Note__: you can use @modifyVerbosity (const v1) v0@ to overwrite --- @v1@'s flags with @v0@'s flags. --- --- @since 2.0.1.0 -modifyVerbosity :: (Verbosity -> Verbosity) -> Verbosity -> Verbosity -modifyVerbosity f v = v { vLevel = vLevel (f v) } - -intToVerbosity :: Int -> Maybe Verbosity -intToVerbosity 0 = Just (mkVerbosity Silent) -intToVerbosity 1 = Just (mkVerbosity Normal) -intToVerbosity 2 = Just (mkVerbosity Verbose) -intToVerbosity 3 = Just (mkVerbosity Deafening) -intToVerbosity _ = Nothing - -parseVerbosity :: ReadP r (Either Int Verbosity) -parseVerbosity = parseIntVerbosity <++ parseStringVerbosity - where - parseIntVerbosity = fmap Left (readS_to_P reads) - parseStringVerbosity = fmap Right $ do - level <- parseVerbosityLevel - _ <- skipSpaces - extras <- sepBy parseExtra skipSpaces - return (foldr (.) id extras (mkVerbosity level)) - parseVerbosityLevel = choice - [ string "silent" >> return Silent - , string "normal" >> return Normal - , string "verbose" >> return Verbose - , string "debug" >> return Deafening - , string "deafening" >> return Deafening - ] - parseExtra = char '+' >> choice - [ string "callsite" >> return verboseCallSite - , string "callstack" >> return verboseCallStack - , string "nowrap" >> return verboseNoWrap - , string "markoutput" >> return verboseMarkOutput - , string "timestamp" >> return verboseTimestamp - ] - -flagToVerbosity :: ReadE Verbosity -flagToVerbosity = ReadE $ \s -> - case readP_to_S (parseVerbosity >>= \r -> eof >> return r) s of - [(Left i, "")] -> - case intToVerbosity i of - Just v -> Right v - Nothing -> Left ("Bad verbosity: " ++ show i ++ - ". Valid values are 0..3") - [(Right v, "")] -> Right v - _ -> Left ("Can't parse verbosity " ++ s) - -showForCabal, showForGHC :: Verbosity -> String - -showForCabal v - | Set.null (vFlags v) - = maybe (error "unknown verbosity") show $ - elemIndex v [silent,normal,verbose,deafening] - | otherwise - = unwords $ (case vLevel v of - Silent -> "silent" - Normal -> "normal" - Verbose -> "verbose" - Deafening -> "debug") - : concatMap showFlag (Set.toList (vFlags v)) - where - showFlag VCallSite = ["+callsite"] - showFlag VCallStack = ["+callstack"] - showFlag VNoWrap = ["+nowrap"] - showFlag VMarkOutput = ["+markoutput"] - showFlag VTimestamp = ["+timestamp"] -showForGHC v = maybe (error "unknown verbosity") show $ - elemIndex v [silent,normal,__,verbose,deafening] - where __ = silent -- this will be always ignored by elemIndex - -data VerbosityFlag - = VCallStack - | VCallSite - | VNoWrap - | VMarkOutput - | VTimestamp - deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded) - -instance Binary VerbosityFlag - --- | Turn on verbose call-site printing when we log. -verboseCallSite :: Verbosity -> Verbosity -verboseCallSite = verboseFlag VCallSite - --- | Turn on verbose call-stack printing when we log. -verboseCallStack :: Verbosity -> Verbosity -verboseCallStack = verboseFlag VCallStack - --- | Turn on @-----BEGIN CABAL OUTPUT-----@ markers for output --- from Cabal (as opposed to GHC, or system dependent). -verboseMarkOutput :: Verbosity -> Verbosity -verboseMarkOutput = verboseFlag VMarkOutput - --- | Turn off marking; useful for suppressing nondeterministic output. -verboseUnmarkOutput :: Verbosity -> Verbosity -verboseUnmarkOutput = verboseNoFlag VMarkOutput - --- | Disable line-wrapping for log messages. -verboseNoWrap :: Verbosity -> Verbosity -verboseNoWrap = verboseFlag VNoWrap - --- | Mark the verbosity as quiet -verboseQuiet :: Verbosity -> Verbosity -verboseQuiet v = v { vQuiet = True } - --- | Turn on timestamps for log messages. -verboseTimestamp :: Verbosity -> Verbosity -verboseTimestamp = verboseFlag VTimestamp - --- | Turn off timestamps for log messages. -verboseNoTimestamp :: Verbosity -> Verbosity -verboseNoTimestamp = verboseNoFlag VTimestamp - --- | Helper function for flag enabling functions -verboseFlag :: VerbosityFlag -> (Verbosity -> Verbosity) -verboseFlag flag v = v { vFlags = Set.insert flag (vFlags v) } - --- | Helper function for flag disabling functions -verboseNoFlag :: VerbosityFlag -> (Verbosity -> Verbosity) -verboseNoFlag flag v = v { vFlags = Set.delete flag (vFlags v) } - --- | Turn off all flags -verboseNoFlags :: Verbosity -> Verbosity -verboseNoFlags v = v { vFlags = Set.empty } - -verboseHasFlags :: Verbosity -> Bool -verboseHasFlags = not . Set.null . vFlags - --- | Test if we should output call sites when we log. -isVerboseCallSite :: Verbosity -> Bool -isVerboseCallSite = isVerboseFlag VCallSite - --- | Test if we should output call stacks when we log. -isVerboseCallStack :: Verbosity -> Bool -isVerboseCallStack = isVerboseFlag VCallStack - --- | Test if we should output markets. -isVerboseMarkOutput :: Verbosity -> Bool -isVerboseMarkOutput = isVerboseFlag VMarkOutput - --- | Test if line-wrapping is disabled for log messages. -isVerboseNoWrap :: Verbosity -> Bool -isVerboseNoWrap = isVerboseFlag VNoWrap - --- | Test if we had called 'lessVerbose' on the verbosity -isVerboseQuiet :: Verbosity -> Bool -isVerboseQuiet = vQuiet - --- | Test if if we should output timestamps when we log. -isVerboseTimestamp :: Verbosity -> Bool -isVerboseTimestamp = isVerboseFlag VTimestamp - --- | Helper function for flag testing functions. -isVerboseFlag :: VerbosityFlag -> Verbosity -> Bool -isVerboseFlag flag = (Set.member flag) . vFlags - --- $setup --- >>> import Test.QuickCheck (Arbitrary (..), arbitraryBoundedEnum) --- >>> instance Arbitrary VerbosityLevel where arbitrary = arbitraryBoundedEnum --- >>> instance Arbitrary Verbosity where arbitrary = fmap mkVerbosity arbitrary diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Distribution/Version.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Distribution/Version.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,262 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Version --- Copyright : Isaac Jones, Simon Marlow 2003-2004 --- Duncan Coutts 2008 --- License : BSD3 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Exports the 'Version' type along with a parser and pretty printer. A version --- is something like @\"1.3.3\"@. It also defines the 'VersionRange' data --- types. Version ranges are like @\">= 1.2 && < 2\"@. - -module Distribution.Version ( - -- * Package versions - Version, - version0, - mkVersion, - mkVersion', - versionNumbers, - nullVersion, - alterVersion, - - -- ** Backwards compatibility - showVersion, - - -- * Version ranges - VersionRange(..), - - -- ** Constructing - anyVersion, noVersion, - thisVersion, notThisVersion, - laterVersion, earlierVersion, - orLaterVersion, orEarlierVersion, - unionVersionRanges, intersectVersionRanges, - differenceVersionRanges, - invertVersionRange, - withinVersion, - majorBoundVersion, - betweenVersionsInclusive, - - -- ** Inspection - withinRange, - isAnyVersion, - isNoVersion, - isSpecificVersion, - simplifyVersionRange, - foldVersionRange, - foldVersionRange', - normaliseVersionRange, - stripParensVersionRange, - hasUpperBound, - hasLowerBound, - - -- ** Cata & ana - VersionRangeF (..), - cataVersionRange, - anaVersionRange, - hyloVersionRange, - projectVersionRange, - embedVersionRange, - - -- ** Utilities - wildcardUpperBound, - majorUpperBound, - - -- ** Modification - removeUpperBound, - removeLowerBound, - - -- * Version intervals view - asVersionIntervals, - VersionInterval, - LowerBound(..), - UpperBound(..), - Bound(..), - - -- ** 'VersionIntervals' abstract type - -- | The 'VersionIntervals' type and the accompanying functions are exposed - -- primarily for completeness and testing purposes. In practice - -- 'asVersionIntervals' is the main function to use to - -- view a 'VersionRange' as a bunch of 'VersionInterval's. - -- - VersionIntervals, - toVersionIntervals, - fromVersionIntervals, - withinIntervals, - versionIntervals, - mkVersionIntervals, - unionVersionIntervals, - intersectVersionIntervals, - invertVersionIntervals - - ) where - -import Distribution.Types.Version -import Distribution.Types.VersionRange -import Distribution.Types.VersionInterval - -------------------------------------------------------------------------------- --- Utilities on VersionRange requiring VersionInterval -------------------------------------------------------------------------------- - --- | Does this 'VersionRange' place any restriction on the 'Version' or is it --- in fact equivalent to 'AnyVersion'. --- --- Note this is a semantic check, not simply a syntactic check. So for example --- the following is @True@ (for all @v@). --- --- > isAnyVersion (EarlierVersion v `UnionVersionRanges` orLaterVersion v) --- -isAnyVersion :: VersionRange -> Bool -isAnyVersion vr = case asVersionIntervals vr of - [(LowerBound v InclusiveBound, NoUpperBound)] | isVersion0 v -> True - _ -> False - where - isVersion0 :: Version -> Bool - isVersion0 = (== mkVersion [0]) - - --- | This is the converse of 'isAnyVersion'. It check if the version range is --- empty, if there is no possible version that satisfies the version range. --- --- For example this is @True@ (for all @v@): --- --- > isNoVersion (EarlierVersion v `IntersectVersionRanges` LaterVersion v) --- -isNoVersion :: VersionRange -> Bool -isNoVersion vr = case asVersionIntervals vr of - [] -> True - _ -> False - --- | Is this version range in fact just a specific version? --- --- For example the version range @\">= 3 && <= 3\"@ contains only the version --- @3@. --- -isSpecificVersion :: VersionRange -> Maybe Version -isSpecificVersion vr = case asVersionIntervals vr of - [(LowerBound v InclusiveBound - ,UpperBound v' InclusiveBound)] - | v == v' -> Just v - _ -> Nothing - --- | Simplify a 'VersionRange' expression. For non-empty version ranges --- this produces a canonical form. Empty or inconsistent version ranges --- are left as-is because that provides more information. --- --- If you need a canonical form use --- @fromVersionIntervals . toVersionIntervals@ --- --- It satisfies the following properties: --- --- > withinRange v (simplifyVersionRange r) = withinRange v r --- --- > withinRange v r = withinRange v r' --- > ==> simplifyVersionRange r = simplifyVersionRange r' --- > || isNoVersion r --- > || isNoVersion r' --- -simplifyVersionRange :: VersionRange -> VersionRange -simplifyVersionRange vr - -- If the version range is inconsistent then we just return the - -- original since that has more information than ">1 && < 1", which - -- is the canonical inconsistent version range. - | null (versionIntervals vi) = vr - | otherwise = fromVersionIntervals vi - where - vi = toVersionIntervals vr - --- | The difference of two version ranges --- --- > withinRange v' (differenceVersionRanges vr1 vr2) --- > = withinRange v' vr1 && not (withinRange v' vr2) --- --- @since 1.24.1.0 -differenceVersionRanges :: VersionRange -> VersionRange -> VersionRange -differenceVersionRanges vr1 vr2 = - intersectVersionRanges vr1 (invertVersionRange vr2) - --- | The inverse of a version range --- --- > withinRange v' (invertVersionRange vr) --- > = not (withinRange v' vr) --- -invertVersionRange :: VersionRange -> VersionRange -invertVersionRange = - fromVersionIntervals . invertVersionIntervals . toVersionIntervals - --- | Given a version range, remove the highest upper bound. Example: @(>= 1 && < --- 3) || (>= 4 && < 5)@ is converted to @(>= 1 && < 3) || (>= 4)@. -removeUpperBound :: VersionRange -> VersionRange -removeUpperBound = fromVersionIntervals . relaxLastInterval . toVersionIntervals - --- | Given a version range, remove the lowest lower bound. --- Example: @(>= 1 && < 3) || (>= 4 && < 5)@ is converted to --- @(>= 0 && < 3) || (>= 4 && < 5)@. -removeLowerBound :: VersionRange -> VersionRange -removeLowerBound = fromVersionIntervals . relaxHeadInterval . toVersionIntervals - -------------------------------------------------------------------------------- --- Deprecated -------------------------------------------------------------------------------- - --- In practice this is not very useful because we normally use inclusive lower --- bounds and exclusive upper bounds. --- --- > withinRange v' (laterVersion v) = v' > v --- -betweenVersionsInclusive :: Version -> Version -> VersionRange -betweenVersionsInclusive v1 v2 = - intersectVersionRanges (orLaterVersion v1) (orEarlierVersion v2) - -{-# DEPRECATED betweenVersionsInclusive - "In practice this is not very useful because we normally use inclusive lower bounds and exclusive upper bounds" #-} - - - - --- | An extended variant of 'foldVersionRange' that also provides a view of the --- expression in which the syntactic sugar @\">= v\"@, @\"<= v\"@ and @\"== --- v.*\"@ is presented explicitly rather than in terms of the other basic --- syntax. --- -foldVersionRange' :: a -- ^ @\"-any\"@ version - -> (Version -> a) -- ^ @\"== v\"@ - -> (Version -> a) -- ^ @\"> v\"@ - -> (Version -> a) -- ^ @\"< v\"@ - -> (Version -> a) -- ^ @\">= v\"@ - -> (Version -> a) -- ^ @\"<= v\"@ - -> (Version -> Version -> a) -- ^ @\"== v.*\"@ wildcard. The - -- function is passed the - -- inclusive lower bound and the - -- exclusive upper bounds of the - -- range defined by the wildcard. - -> (Version -> Version -> a) -- ^ @\"^>= v\"@ major upper bound - -- The function is passed the - -- inclusive lower bound and the - -- exclusive major upper bounds - -- of the range defined by this - -- operator. - -> (a -> a -> a) -- ^ @\"_ || _\"@ union - -> (a -> a -> a) -- ^ @\"_ && _\"@ intersection - -> (a -> a) -- ^ @\"(_)\"@ parentheses - -> VersionRange -> a -foldVersionRange' anyv this later earlier orLater orEarlier - wildcard major union intersect parens = - cataVersionRange alg . normaliseVersionRange - where - alg AnyVersionF = anyv - alg (ThisVersionF v) = this v - alg (LaterVersionF v) = later v - alg (EarlierVersionF v) = earlier v - alg (OrLaterVersionF v) = orLater v - alg (OrEarlierVersionF v) = orEarlier v - alg (WildcardVersionF v) = wildcard v (wildcardUpperBound v) - alg (MajorBoundVersionF v) = major v (majorUpperBound v) - alg (UnionVersionRangesF v1 v2) = union v1 v2 - alg (IntersectVersionRangesF v1 v2) = intersect v1 v2 - alg (VersionRangeParensF v) = parens v -{-# DEPRECATED foldVersionRange' "Use cataVersionRange & normaliseVersionRange for more principled folding" #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/bugs-and-stability.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/bugs-and-stability.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/bugs-and-stability.rst 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/bugs-and-stability.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -Reporting Bugs and Stability of Cabal Interfaces -================================================ - -.. toctree:: - misc - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/concepts-and-development.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/concepts-and-development.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/concepts-and-development.rst 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/concepts-and-development.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -Package Concepts and Development -================================ - -.. toctree:: - :maxdepth: 2 - - developing-packages diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/config-and-install.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/config-and-install.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/config-and-install.rst 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/config-and-install.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -Configuration and Installing Packages -===================================== - -.. toctree:: - installing-packages diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/conf.py cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/conf.py --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/conf.py 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/conf.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,220 +0,0 @@ -# -*- coding: utf-8 -*- -# -# GHC Users Guide documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -import sys -import os -import sphinx_rtd_theme - -# Support for :base-ref:, etc. -sys.path.insert(0, os.path.abspath('.')) -import cabaldomain - -version = "2.4.0.1" - -extensions = ['sphinx.ext.extlinks', 'sphinx.ext.todo'] - -templates_path = ['_templates'] -source_suffix = '.rst' -source_encoding = 'utf-8-sig' -master_doc = 'index' - -# extlinks -- see http://www.sphinx-doc.org/en/stable/ext/extlinks.html -extlinks = { - 'issue': ('https://github.com/haskell/cabal/issues/%s', '#'), - - 'ghc-wiki': ('http://ghc.haskell.org/trac/ghc/wiki/%s', ''), - 'ghc-ticket': ('http://ghc.haskell.org/trac/ghc/ticket/%s', 'GHC #'), - - 'hackage-pkg': ('http://hackage.haskell.org/package/%s', ''), -} - -# General information about the project. -project = u'Cabal' -copyright = u'2003-2017, Cabal Team' -# N.B. version comes from ghc_config -release = version # The full version, including alpha/beta/rc tags. - -# Syntax highlighting -highlight_language = 'cabal' -#pygments_style = 'tango' - -primary_domain = 'cabal' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['.build', "*.gen.rst"] - -# -- Options for HTML output --------------------------------------------- - -# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -if not on_rtd: # only import and set the theme if we're building docs locally - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -html_title = "Cabal User's Guide" -html_short_title = "Cabal %s User's Guide" % release -html_logo = 'images/Cabal-dark.png' -html_static_path = ['images'] -# Convert quotes and dashes to typographically correct entities -html_use_smartypants = True -html_show_copyright = True -html_context = { - 'source_url_prefix': "https://github.com/haskell/cabal/tree/master/Cabal/doc/", - "display_github": True, - "github_host": "github.com", - "github_user": "haskell", - "github_repo": 'cabal', - "github_version": "master/", - "conf_py_path": "Cabal/doc/", - "source_suffix": '.rst', -} - - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'CabalUsersGuide' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - 'inputenc': '', - 'utf8extra': '', - 'preamble': ''' -\usepackage{fontspec} -\usepackage{makeidx} -\setsansfont{DejaVu Sans} -\setromanfont{DejaVu Serif} -\setmonofont{DejaVu Sans Mono} -''', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ('index', 'users_guide.tex', u'GHC Users Guide Documentation', - u'GHC Team', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -latex_logo = 'images/logo.pdf' - -# If true, show page references after internal links. -latex_show_pagerefs = True - -# http://www.sphinx-doc.org/en/master/usage/extensions/todo.html -todo_include_todos = True - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('cabal', 'cabal', 'The Haskell Cabal', 'The Cabal Team', 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'CabalUsersGuide', u'Cabal Users Guide', - u'Cabal Team', 'CabalUsersGuide', 'The Haskell Cabal.', - 'Compilers'), -] - -from sphinx import addnodes -from docutils import nodes - -def parse_ghci_cmd(env, sig, signode): - name = sig.split(';')[0] - sig = sig.replace(';', '') - signode += addnodes.desc_name(name, sig) - return name - -def parse_flag(env, sig, signode): - import re - names = [] - for i, flag in enumerate(sig.split(',')): - flag = flag.strip() - sep = '=' - parts = flag.split('=') - if len(parts) == 1: - sep=' ' - parts = flag.split() - if len(parts) == 0: continue - - name = parts[0] - names.append(name) - sig = sep + ' '.join(parts[1:]) - sig = re.sub(ur'<([-a-zA-Z ]+)>', ur'⟨\1⟩', sig) - if i > 0: - signode += addnodes.desc_name(', ', ', ') - signode += addnodes.desc_name(name, name) - if len(sig) > 0: - signode += addnodes.desc_addname(sig, sig) - - return names[0] - -def setup(app): - from sphinx.util.docfields import Field, TypedField - - increase_python_stack() - - # the :ghci-cmd: directive used in ghci.rst - app.add_object_type('ghci-cmd', 'ghci-cmd', - parse_node=parse_ghci_cmd, - objname='GHCi command', - indextemplate='pair: %s; GHCi command') - - app.add_object_type('ghc-flag', 'ghc-flag', - objname='GHC command-line option', - parse_node=parse_flag, - indextemplate='pair: %s; GHC option', - doc_field_types=[ - Field('since', label='Introduced in GHC version', names=['since']), - Field('default', label='Default value', names=['default']), - Field('static') - ]) - - app.add_object_type('rts-flag', 'rts-flag', - objname='runtime system command-line option', - parse_node=parse_flag, - indextemplate='pair: %s; RTS option', - doc_field_types=[ - Field('since', label='Introduced in GHC version', names=['since']), - ]) - - cabaldomain.setup(app) - -def increase_python_stack(): - # Workaround sphinx-build recursion limit overflow: - # pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL) - # RuntimeError: maximum recursion depth exceeded while pickling an object - # - # Default python allows recursion depth of 1000 calls. - sys.setrecursionlimit(10000) - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/developing-packages.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/developing-packages.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/developing-packages.rst 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/developing-packages.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,3480 +0,0 @@ -Quickstart -========== - -Lets assume we have created a project directory and already have a -Haskell module or two. - -Every project needs a name, we'll call this example "proglet". - -.. highlight:: console - -:: - - $ cd proglet/ - $ ls - Proglet.hs - -It is assumed that (apart from external dependencies) all the files that -make up a package live under a common project root directory. This -simple example has all the project files in one directory, but most -packages will use one or more subdirectories. - -To turn this into a Cabal package we need two extra files in the -project's root directory: - -- ``proglet.cabal``: containing package metadata and build information. - -- ``Setup.hs``: usually containing a few standardized lines of code, - but can be customized if necessary. - -We can create both files manually or we can use ``cabal init`` to create -them for us. - -Using "cabal init" ------------------- - -The ``cabal init`` command is interactive. It asks us a number of -questions starting with the package name and version. - -:: - - $ cabal init - Package name [default "proglet"]? - Package version [default "0.1"]? - ... - -It also asks questions about various other bits of package metadata. For -a package that you never intend to distribute to others, these fields -can be left blank. - -One of the important questions is whether the package contains a library -or an executable. Libraries are collections of Haskell modules that can -be re-used by other Haskell libraries and programs, while executables -are standalone programs. - -:: - - What does the package build: - 1) Library - 2) Executable - Your choice? - -For the moment these are the only choices. For more complex packages -(e.g. a library and multiple executables or test suites) the ``.cabal`` -file can be edited afterwards. - -Finally, ``cabal init`` creates the initial ``proglet.cabal`` and -``Setup.hs`` files, and depending on your choice of license, a -``LICENSE`` file as well. - -:: - - Generating LICENSE... - Generating Setup.hs... - Generating proglet.cabal... - - You may want to edit the .cabal file and add a Description field. - -As this stage the ``proglet.cabal`` is not quite complete and before you -are able to build the package you will need to edit the file and add -some build information about the library or executable. - -Editing the .cabal file ------------------------ - -.. highlight:: cabal - -Load up the ``.cabal`` file in a text editor. The first part of the -``.cabal`` file has the package metadata and towards the end of the file -you will find the :pkg-section:`executable` or :pkg-section:`library` section. - -You will see that the fields that have yet to be filled in are commented -out. Cabal files use "``--``" Haskell-style comment syntax. (Note that -comments are only allowed on lines on their own. Trailing comments on -other lines are not allowed because they could be confused with program -options.) - -If you selected earlier to create a library package then your ``.cabal`` -file will have a section that looks like this: - -:: - - library - exposed-modules: Proglet - -- other-modules: - -- build-depends: - -Alternatively, if you selected an executable then there will be a -section like: - -:: - - executable proglet - -- main-is: - -- other-modules: - -- build-depends: - -The build information fields listed (but commented out) are just the few -most important and common fields. There are many others that are covered -later in this chapter. - -Most of the build information fields are the same between libraries and -executables. The difference is that libraries have a number of "exposed" -modules that make up the public interface of the library, while -executables have a file containing a ``Main`` module. - -The name of a library always matches the name of the package, so it is -not specified in the library section. Executables often follow the name -of the package too, but this is not required and the name is given -explicitly. - -Modules included in the package -------------------------------- - -For a library, ``cabal init`` looks in the project directory for files -that look like Haskell modules and adds all the modules to the -:pkg-field:`library:exposed-modules` field. For modules that do not form part -of your package's public interface, you can move those modules to the -:pkg-field:`other-modules` field. Either way, all modules in the library need -to be listed. - -For an executable, ``cabal init`` does not try to guess which file -contains your program's ``Main`` module. You will need to fill in the -:pkg-field:`executable:main-is` field with the file name of your program's -``Main`` module (including ``.hs`` or ``.lhs`` extension). Other modules -included in the executable should be listed in the :pkg-field:`other-modules` -field. - -Modules imported from other packages ------------------------------------- - -While your library or executable may include a number of modules, it -almost certainly also imports a number of external modules from the -standard libraries or other pre-packaged libraries. (These other -libraries are of course just Cabal packages that contain a library.) - -You have to list all of the library packages that your library or -executable imports modules from. Or to put it another way: you have to -list all the other packages that your package depends on. - -For example, suppose the example ``Proglet`` module imports the module -``Data.Map``. The ``Data.Map`` module comes from the ``containers`` -package, so we must list it: - -:: - - library - exposed-modules: Proglet - other-modules: - build-depends: containers, base == 4.* - -In addition, almost every package also depends on the ``base`` library -package because it exports the standard ``Prelude`` module plus other -basic modules like ``Data.List``. - -You will notice that we have listed ``base == 4.*``. This gives a -constraint on the version of the base package that our package will work -with. The most common kinds of constraints are: - -- ``pkgname >= n`` -- ``pkgname ^>= n`` (since Cabal 2.0) -- ``pkgname >= n && < m`` -- ``pkgname == n.*`` (since Cabal 1.6) - -The last is just shorthand, for example ``base == 4.*`` means exactly -the same thing as ``base >= 4 && < 5``. Please refer to the documentation -on the :pkg-field:`build-depends` field for more information. - -Building the package --------------------- - -For simple packages that's it! We can now try configuring and building -the package: - -.. code-block:: console - - $ cabal configure - $ cabal build - -Assuming those two steps worked then you can also install the package: - -.. code-block:: console - - $ cabal install - -For libraries this makes them available for use in GHCi or to be used by -other packages. For executables it installs the program so that you can -run it (though you may first need to adjust your system's ``$PATH``). - -Next steps ----------- - -What we have covered so far should be enough for very simple packages -that you use on your own system. - -The next few sections cover more details needed for more complex -packages and details needed for distributing packages to other people. - -The previous chapter covers building and installing packages -- your own -packages or ones developed by other people. - -Package concepts -================ - -Before diving into the details of writing packages it helps to -understand a bit about packages in the Haskell world and the particular -approach that Cabal takes. - -The point of packages ---------------------- - -Packages are a mechanism for organising and distributing code. Packages -are particularly suited for "programming in the large", that is building -big systems by using and re-using code written by different people at -different times. - -People organise code into packages based on functionality and -dependencies. Social factors are also important: most packages have a -single author, or a relatively small team of authors. - -Packages are also used for distribution: the idea is that a package can -be created in one place and be moved to a different computer and be -usable in that different environment. There are a surprising number of -details that have to be got right for this to work, and a good package -system helps to simply this process and make it reliable. - -Packages come in two main flavours: libraries of reusable code, and -complete programs. Libraries present a code interface, an API, while -programs can be run directly. In the Haskell world, library packages -expose a set of Haskell modules as their public interface. Cabal -packages can contain a library or executables or both. - -Some programming languages have packages as a builtin language concept. -For example in Java, a package provides a local namespace for types and -other definitions. In the Haskell world, packages are not a part of the -language itself. Haskell programs consist of a number of modules, and -packages just provide a way to partition the modules into sets of -related functionality. Thus the choice of module names in Haskell is -still important, even when using packages. - -Package names and versions --------------------------- - -All packages have a name, e.g. "HUnit". Package names are assumed to be -unique. Cabal package names may contain letters, numbers and hyphens, -but not spaces and may also not contain a hyphened section consisting of -only numbers. The namespace for Cabal packages is flat, not -hierarchical. - -Packages also have a version, e.g "1.1". This matches the typical way in -which packages are developed. Strictly speaking, each version of a -package is independent, but usually they are very similar. Cabal package -versions follow the conventional numeric style, consisting of a sequence -of digits such as "1.0.1" or "2.0". There are a range of common -conventions for "versioning" packages, that is giving some meaning to -the version number in terms of changes in the package, such as -e.g. `SemVer `__; however, for packages intended to be -distributed via Hackage Haskell's `Package Versioning Policy`_ applies -(see also the `PVP/SemVer FAQ section `__). - -The combination of package name and version is called the *package ID* -and is written with a hyphen to separate the name and version, e.g. -"HUnit-1.1". - -For Cabal packages, the combination of the package name and version -*uniquely* identifies each package. Or to put it another way: two -packages with the same name and version are considered to *be* the same. - -Strictly speaking, the package ID only identifies each Cabal *source* -package; the same Cabal source package can be configured and built in -different ways. There is a separate installed package ID that uniquely -identifies each installed package instance. Most of the time however, -users need not be aware of this detail. - -Kinds of package: Cabal vs GHC vs system ----------------------------------------- - -It can be slightly confusing at first because there are various -different notions of package floating around. Fortunately the details -are not very complicated. - -Cabal packages - Cabal packages are really source packages. That is they contain - Haskell (and sometimes C) source code. - - Cabal packages can be compiled to produce GHC packages. They can - also be translated into operating system packages. - -GHC packages - This is GHC's view on packages. GHC only cares about library - packages, not executables. Library packages have to be registered - with GHC for them to be available in GHCi or to be used when - compiling other programs or packages. - - The low-level tool ``ghc-pkg`` is used to register GHC packages and - to get information on what packages are currently registered. - - You never need to make GHC packages manually. When you build and - install a Cabal package containing a library then it gets registered - with GHC automatically. - - Haskell implementations other than GHC have essentially the same - concept of registered packages. For the most part, Cabal hides the - slight differences. - -Operating system packages - On operating systems like Linux and Mac OS X, the system has a - specific notion of a package and there are tools for installing and - managing packages. - - The Cabal package format is designed to allow Cabal packages to be - translated, mostly-automatically, into operating system packages. - They are usually translated 1:1, that is a single Cabal package - becomes a single system package. - - It is also possible to make Windows installers from Cabal packages, - though this is typically done for a program together with all of its - library dependencies, rather than packaging each library separately. - -Unit of distribution --------------------- - -The Cabal package is the unit of distribution. What this means is that -each Cabal package can be distributed on its own in source or binary -form. Of course there may dependencies between packages, but there is -usually a degree of flexibility in which versions of packages can work -together so distributing them independently makes sense. - -It is perhaps easiest to see what being "the unit of distribution" -means by contrast to an alternative approach. Many projects are made up -of several interdependent packages and during development these might -all be kept under one common directory tree and be built and tested -together. When it comes to distribution however, rather than -distributing them all together in a single tarball, it is required that -they each be distributed independently in their own tarballs. - -Cabal's approach is to say that if you can specify a dependency on a -package then that package should be able to be distributed -independently. Or to put it the other way round, if you want to -distribute it as a single unit, then it should be a single package. - -Explicit dependencies and automatic package management ------------------------------------------------------- - -Cabal takes the approach that all packages dependencies are specified -explicitly and specified in a declarative way. The point is to enable -automatic package management. This means tools like ``cabal`` can -resolve dependencies and install a package plus all of its dependencies -automatically. Alternatively, it is possible to mechanically (or mostly -mechanically) translate Cabal packages into system packages and let the -system package manager install dependencies automatically. - -It is important to track dependencies accurately so that packages can -reliably be moved from one system to another system and still be able to -build it there. Cabal is therefore relatively strict about specifying -dependencies. For example Cabal's default build system will not even let -code build if it tries to import a module from a package that isn't -listed in the ``.cabal`` file, even if that package is actually -installed. This helps to ensure that there are no "untracked -dependencies" that could cause the code to fail to build on some other -system. - -The explicit dependency approach is in contrast to the traditional -"./configure" approach where instead of specifying dependencies -declaratively, the ``./configure`` script checks if the dependencies are -present on the system. Some manual work is required to transform a -``./configure`` based package into a Linux distribution package (or -similar). This conversion work is usually done by people other than the -package author(s). The practical effect of this is that only the most -popular packages will benefit from automatic package management. -Instead, Cabal forces the original author to specify the dependencies -but the advantage is that every package can benefit from automatic -package management. - -The "./configure" approach tends to encourage packages that adapt -themselves to the environment in which they are built, for example by -disabling optional features so that they can continue to work when a -particular dependency is not available. This approach makes sense in a -world where installing additional dependencies is a tiresome manual -process and so minimising dependencies is important. The automatic -package management view is that packages should just declare what they -need and the package manager will take responsibility for ensuring that -all the dependencies are installed. - -Sometimes of course optional features and optional dependencies do make -sense. Cabal packages can have optional features and varying -dependencies. These conditional dependencies are still specified in a -declarative way however and remain compatible with automatic package -management. The need to remain compatible with automatic package -management means that Cabal's conditional dependencies system is a bit -less flexible than with the "./configure" approach. - -.. note:: - `GNU autoconf places restrictions on paths, including the - path that the user builds a package from. - `_ - Package authors using ``build-type: configure`` should be aware of - these restrictions; because users may be unexpectedly constrained and - face mysterious errors, it is recommended that ``build-type: configure`` - is only used where strictly necessary. - -Portability ------------ - -One of the purposes of Cabal is to make it easier to build packages on -different platforms (operating systems and CPU architectures), with -different compiler versions and indeed even with different Haskell -implementations. (Yes, there are Haskell implementations other than -GHC!) - -Cabal provides abstractions of features present in different Haskell -implementations and wherever possible it is best to take advantage of -these to increase portability. Where necessary however it is possible to -use specific features of specific implementations. - -For example a package author can list in the package's ``.cabal`` what -language extensions the code uses. This allows Cabal to figure out if -the language extension is supported by the Haskell implementation that -the user picks. Additionally, certain language extensions such as -Template Haskell require special handling from the build system and by -listing the extension it provides the build system with enough -information to do the right thing. - -Another similar example is linking with foreign libraries. Rather than -specifying GHC flags directly, the package author can list the libraries -that are needed and the build system will take care of using the right -flags for the compiler. Additionally this makes it easier for tools to -discover what system C libraries a package needs, which is useful for -tracking dependencies on system libraries (e.g. when translating into -Linux distribution packages). - -In fact both of these examples fall into the category of explicitly -specifying dependencies. Not all dependencies are other Cabal packages. -Foreign libraries are clearly another kind of dependency. It's also -possible to think of language extensions as dependencies: the package -depends on a Haskell implementation that supports all those extensions. - -Where compiler-specific options are needed however, there is an "escape -hatch" available. The developer can specify implementation-specific -options and more generally there is a configuration mechanism to -customise many aspects of how a package is built depending on the -Haskell implementation, the operating system, computer architecture and -user-specified configuration flags. - -Developing packages -=================== - -The Cabal package is the unit of distribution. When installed, its -purpose is to make available: - -- One or more Haskell programs. - -- At most one library, exposing a number of Haskell modules. - -However having both a library and executables in a package does not work -very well; if the executables depend on the library, they must -explicitly list all the modules they directly or indirectly import from -that library. Fortunately, starting with Cabal 1.8.0.4, executables can -also declare the package that they are in as a dependency, and Cabal -will treat them as if they were in another package that depended on the -library. - -Internally, the package may consist of much more than a bunch of Haskell -modules: it may also have C source code and header files, source code -meant for preprocessing, documentation, test cases, auxiliary tools etc. - -A package is identified by a globally-unique *package name*, which -consists of one or more alphanumeric words separated by hyphens. To -avoid ambiguity, each of these words should contain at least one letter. -Chaos will result if two distinct packages with the same name are -installed on the same system. A particular version of the package is -distinguished by a *version number*, consisting of a sequence of one or -more integers separated by dots. These can be combined to form a single -text string called the *package ID*, using a hyphen to separate the name -from the version, e.g. "``HUnit-1.1``". - -.. Note:: - - Packages are not part of the Haskell language; they simply - populate the hierarchical space of module names. In GHC 6.6 and later a - program may contain multiple modules with the same name if they come - from separate packages; in all other current Haskell systems packages - may not overlap in the modules they provide, including hidden modules. - -Creating a package ------------------- - -Suppose you have a directory hierarchy containing the source files that -make up your package. You will need to add two more files to the root -directory of the package: - -:file:`{package-name}.cabal` - a Unicode UTF-8 text file containing a package description. For - details of the syntax of this file, see the section on - `package descriptions`_. - -:file:`Setup.hs` - a single-module Haskell program to perform various setup tasks (with - the interface described in the section on :ref:`installing-packages`). - This module should import only modules that will be present in all Haskell - implementations, including modules of the Cabal library. The content of - this file is determined by the :pkg-field:`build-type` setting in the - ``.cabal`` file. In most cases it will be trivial, calling on the Cabal - library to do most of the work. - -Once you have these, you can create a source bundle of this directory -for distribution. Building of the package is discussed in the section on -:ref:`installing-packages`. - -One of the purposes of Cabal is to make it easier to build a package -with different Haskell implementations. So it provides abstractions of -features present in different Haskell implementations and wherever -possible it is best to take advantage of these to increase portability. -Where necessary however it is possible to use specific features of -specific implementations. For example one of the pieces of information a -package author can put in the package's ``.cabal`` file is what language -extensions the code uses. This is far preferable to specifying flags for -a specific compiler as it allows Cabal to pick the right flags for the -Haskell implementation that the user picks. It also allows Cabal to -figure out if the language extension is even supported by the Haskell -implementation that the user picks. Where compiler-specific options are -needed however, there is an "escape hatch" available. The developer can -specify implementation-specific options and more generally there is a -configuration mechanism to customise many aspects of how a package is -built depending on the Haskell implementation, the Operating system, -computer architecture and user-specified configuration flags. - -:: - - name: Foo - version: 1.0 - - library - build-depends: base >= 4 && < 5 - exposed-modules: Foo - extensions: ForeignFunctionInterface - ghc-options: -Wall - if os(windows) - build-depends: Win32 >= 2.1 && < 2.6 - -Example: A package containing a simple library -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The HUnit package contains a file ``HUnit.cabal`` containing: - -:: - - name: HUnit - version: 1.1.1 - synopsis: A unit testing framework for Haskell - homepage: http://hunit.sourceforge.net/ - category: Testing - author: Dean Herington - license: BSD3 - license-file: LICENSE - cabal-version: >= 1.10 - build-type: Simple - - library - build-depends: base >= 2 && < 4 - exposed-modules: Test.HUnit.Base, Test.HUnit.Lang, - Test.HUnit.Terminal, Test.HUnit.Text, Test.HUnit - default-extensions: CPP - -and the following ``Setup.hs``: - -.. code-block:: haskell - - import Distribution.Simple - main = defaultMain - -Example: A package containing executable programs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -:: - - name: TestPackage - version: 0.0 - synopsis: Small package with two programs - author: Angela Author - license: BSD3 - build-type: Simple - cabal-version: >= 1.8 - - executable program1 - build-depends: HUnit >= 1.1.1 && < 1.2 - main-is: Main.hs - hs-source-dirs: prog1 - - executable program2 - main-is: Main.hs - build-depends: HUnit >= 1.1.1 && < 1.2 - hs-source-dirs: prog2 - other-modules: Utils - -with ``Setup.hs`` the same as above. - -Example: A package containing a library and executable programs -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -:: - - name: TestPackage - version: 0.0 - synopsis: Package with library and two programs - license: BSD3 - author: Angela Author - build-type: Simple - cabal-version: >= 1.8 - - library - build-depends: HUnit >= 1.1.1 && < 1.2 - exposed-modules: A, B, C - - executable program1 - main-is: Main.hs - hs-source-dirs: prog1 - other-modules: A, B - - executable program2 - main-is: Main.hs - hs-source-dirs: prog2 - other-modules: A, C, Utils - -with ``Setup.hs`` the same as above. Note that any library modules -required (directly or indirectly) by an executable must be listed again. - -The trivial setup script used in these examples uses the *simple build -infrastructure* provided by the Cabal library (see -`Distribution.Simple <../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html>`__). -The simplicity lies in its interface rather that its implementation. It -automatically handles preprocessing with standard preprocessors, and -builds packages for all the Haskell implementations. - -The simple build infrastructure can also handle packages where building -is governed by system-dependent parameters, if you specify a little more -(see the section on `system-dependent parameters`_). -A few packages require `more elaborate solutions `_. - -Package descriptions --------------------- - -The package description file must have a name ending in "``.cabal``". It -must be a Unicode text file encoded using valid UTF-8. There must be -exactly one such file in the directory. The first part of the name is -usually the package name, and some of the tools that operate on Cabal -packages require this; specifically, Hackage rejects packages which -don't follow this rule. - -In the package description file, lines whose first non-whitespace -characters are "``--``" are treated as comments and ignored. - -This file should contain of a number global property descriptions and -several sections. - -- The `package properties`_ describe the package - as a whole, such as name, license, author, etc. - -- Optionally, a number of *configuration flags* can be declared. These - can be used to enable or disable certain features of a package. (see - the section on `configurations`_). - -- The (optional) library section specifies the `library`_ properties and - relevant `build information`_. - -- Following is an arbitrary number of executable sections which describe - an executable program and relevant `build information`_. - -Each section consists of a number of property descriptions in the form -of field/value pairs, with a syntax roughly like mail message headers. - -- Case is not significant in field names, but is significant in field - values. - -- To continue a field value, indent the next line relative to the field - name. - -- Field names may be indented, but all field values in the same section - must use the same indentation. - -- Tabs are *not* allowed as indentation characters due to a missing - standard interpretation of tab width. - -- To get a blank line in a field value, use an indented "``.``" - -The syntax of the value depends on the field. Field types include: - -*token*, *filename*, *directory* - Either a sequence of one or more non-space non-comma characters, or - a quoted string in Haskell 98 lexical syntax. The latter can be used - for escaping whitespace, for example: - ``ghc-options: -Wall "-with-rtsopts=-T -I1"``. Unless otherwise - stated, relative filenames and directories are interpreted from the - package root directory. -*freeform*, *URL*, *address* - An arbitrary, uninterpreted string. -*identifier* - A letter followed by zero or more alphanumerics or underscores. -*compiler* - A compiler flavor (one of: ``GHC``, ``UHC`` or ``LHC``) - followed by a version range. For example, ``GHC ==6.10.3``, or - ``LHC >=0.6 && <0.8``. - -Modules and preprocessors -^^^^^^^^^^^^^^^^^^^^^^^^^ - -Haskell module names listed in the :pkg-field:`library:exposed-modules` and -:pkg-field:`library:other-modules` fields may correspond to Haskell source -files, i.e. with names ending in "``.hs``" or "``.lhs``", or to inputs for -various Haskell preprocessors. The simple build infrastructure understands the -extensions: - -- ``.gc`` (:hackage-pkg:`greencard`) -- ``.chs`` (:hackage-pkg:`c2hs`) -- ``.hsc`` (:hackage-pkg:`hsc2hs`) -- ``.y`` and ``.ly`` (happy_) -- ``.x`` (alex_) -- ``.cpphs`` (cpphs_) - -When building, Cabal will automatically run the appropriate preprocessor -and compile the Haskell module it produces. For the ``c2hs`` and -``hsc2hs`` preprocessors, Cabal will also automatically add, compile and -link any C sources generated by the preprocessor (produced by -``hsc2hs``'s ``#def`` feature or ``c2hs``'s auto-generated wrapper -functions). Dependencies on pre-processors are specified via the -:pkg-field:`build-tools` or :pkg-field:`build-tool-depends` fields. - -Some fields take lists of values, which are optionally separated by -commas, except for the :pkg-field:`build-depends` field, where the commas are -mandatory. - -Some fields are marked as required. All others are optional, and unless -otherwise specified have empty default values. - -Package properties -^^^^^^^^^^^^^^^^^^ - -These fields may occur in the first top-level properties section and -describe the package as a whole: - -.. pkg-field:: name: package-name (required) - - The unique name of the package, without the version number. - - As pointed out in the section on `package descriptions`_, some - tools require the package-name specified for this field to match - the package description's file-name :file:`{package-name}.cabal`. - - Package names are case-sensitive and must match the regular expression - (i.e. alphanumeric "words" separated by dashes; each alphanumeric - word must contain at least one letter): - ``[[:digit:]]*[[:alpha:]][[:alnum:]]*(-[[:digit:]]*[[:alpha:]][[:alnum:]]*)*``. - - Or, expressed in ABNF_: - - .. code-block:: abnf - - package-name = package-name-part *("-" package-name-part) - package-name-part = *DIGIT UALPHA *UALNUM - - UALNUM = UALPHA / DIGIT - UALPHA = ... ; set of alphabetic Unicode code-points - - .. note:: - - Hackage restricts package names to the ASCII subset. - -.. pkg-field:: version: numbers (required) - - The package version number, usually consisting of a sequence of - natural numbers separated by dots, i.e. as the regular - expression ``[0-9]+([.][0-9]+)*`` or expressed in ABNF_: - - .. code-block:: abnf - - package-version = 1*DIGIT *("." 1*DIGIT) - -.. pkg-field:: cabal-version: >= x.y - - The version of the Cabal specification that this package description - uses. The Cabal specification does slowly evolve, introducing new - features and occasionally changing the meaning of existing features. - By specifying which version of the spec you are using it enables - programs which process the package description to know what syntax - to expect and what each part means. - - For historical reasons this is always expressed using *>=* version - range syntax. No other kinds of version range make sense, in - particular upper bounds do not make sense. In future this field will - specify just a version number, rather than a version range. - - The version number you specify will affect both compatibility and - behaviour. Most tools (including the Cabal library and cabal - program) understand a range of versions of the Cabal specification. - Older tools will of course only work with older versions of the - Cabal specification. Most of the time, tools that are too old will - recognise this fact and produce a suitable error message. - - As for behaviour, new versions of the Cabal spec can change the - meaning of existing syntax. This means if you want to take advantage - of the new meaning or behaviour then you must specify the newer - Cabal version. Tools are expected to use the meaning and behaviour - appropriate to the version given in the package description. - - In particular, the syntax of package descriptions changed - significantly with Cabal version 1.2 and the :pkg-field:`cabal-version` - field is now required. Files written in the old syntax are still - recognized, so if you require compatibility with very old Cabal - versions then you may write your package description file using the - old syntax. Please consult the user's guide of an older Cabal - version for a description of that syntax. - -.. pkg-field:: build-type: identifier - - :default: ``Custom`` or ``Simple`` - - The type of build used by this package. Build types are the - constructors of the - `BuildType <../release/cabal-latest/doc/API/Cabal/Distribution-PackageDescription.html#t:BuildType>`__ - type. This field is optional and when missing, its default value - is inferred according to the following rules: - - - When :pkg-field:`cabal-version` is set to ``2.2`` or higher, - the default is ``Simple`` unless a :pkg-section:`custom-setup` - exists, in which case the inferred default is ``Custom``. - - - For lower :pkg-field:`cabal-version` values, the default is - ``Custom`` unconditionally. - - If the build type is anything other than ``Custom``, then the - ``Setup.hs`` file *must* be exactly the standardized content - discussed below. This is because in these cases, ``cabal`` will - ignore the ``Setup.hs`` file completely, whereas other methods of - package management, such as ``runhaskell Setup.hs [CMD]``, still - rely on the ``Setup.hs`` file. - - For build type ``Simple``, the contents of ``Setup.hs`` must be: - - .. code-block:: haskell - - import Distribution.Simple - main = defaultMain - - For build type ``Configure`` (see the section on `system-dependent - parameters`_ below), the contents of - ``Setup.hs`` must be: - - .. code-block:: haskell - - import Distribution.Simple - main = defaultMainWithHooks autoconfUserHooks - - For build type ``Make`` (see the section on `more complex packages`_ below), - the contents of ``Setup.hs`` must be: - - .. code-block:: haskell - - import Distribution.Make - main = defaultMain - - For build type ``Custom``, the file ``Setup.hs`` can be customized, - and will be used both by ``cabal`` and other tools. - - For most packages, the build type ``Simple`` is sufficient. - -.. pkg-field:: license: identifier - - :default: ``AllRightsReserved`` - - The type of license under which this package is distributed. License - names are the constants of the - `License <../release/cabal-latest/doc/API/Cabal/Distribution-License.html#t:License>`__ - type. - -.. pkg-field:: license-file: filename -.. pkg-field:: license-files: filename list - - The name of a file(s) containing the precise copyright license for - this package. The license file(s) will be installed with the - package. - - If you have multiple license files then use the :pkg-field:`license-files` - field instead of (or in addition to) the :pkg-field:`license-file` field. - -.. pkg-field:: copyright: freeform - - The content of a copyright notice, typically the name of the holder - of the copyright on the package and the year(s) from which copyright - is claimed. For example:: - - copyright: (c) 2006-2007 Joe Bloggs - -.. pkg-field:: author: freeform - - The original author of the package. - - Remember that ``.cabal`` files are Unicode, using the UTF-8 - encoding. - -.. pkg-field:: maintainer: address - - The current maintainer or maintainers of the package. This is an - e-mail address to which users should send bug reports, feature - requests and patches. - -.. pkg-field:: stability: freeform - - The stability level of the package, e.g. ``alpha``, - ``experimental``, ``provisional``, ``stable``. - -.. pkg-field:: homepage: URL - - The package homepage. - -.. pkg-field:: bug-reports: URL - - The URL where users should direct bug reports. This would normally - be either: - - - A ``mailto:`` URL, e.g. for a person or a mailing list. - - - An ``http:`` (or ``https:``) URL for an online bug tracking - system. - - For example Cabal itself uses a web-based bug tracking system - - :: - - bug-reports: https://github.com/haskell/cabal/issues - -.. pkg-field:: package-url: URL - - The location of a source bundle for the package. The distribution - should be a Cabal package. - -.. pkg-field:: synopsis: freeform - - A very short description of the package, for use in a table of - packages. This is your headline, so keep it short (one line) but as - informative as possible. Save space by not including the package - name or saying it's written in Haskell. - -.. pkg-field:: description: freeform - - Description of the package. This may be several paragraphs, and - should be aimed at a Haskell programmer who has never heard of your - package before. - - For library packages, this field is used as prologue text by - :ref:`setup-haddock` and thus may contain the same markup as Haddock_ - documentation comments. - -.. pkg-field:: category: freeform - - A classification category for future use by the package catalogue - Hackage_. These categories have not - yet been specified, but the upper levels of the module hierarchy - make a good start. - -.. pkg-field:: tested-with: compiler list - - A list of compilers and versions against which the package has been - tested (or at least built). - -.. pkg-field:: data-files: filename list - - A list of files to be installed for run-time use by the package. - This is useful for packages that use a large amount of static data, - such as tables of values or code templates. Cabal provides a way to - `find these files at run-time <#accessing-data-files-from-package-code>`_. - - A limited form of ``*`` wildcards in file names, for example - ``data-files: images/*.png`` matches all the ``.png`` files in the - ``images`` directory. ``data-files: audio/**/*.mp3`` matches all - the ``.mp3`` files in the ``audio`` directory, including - subdirectories. - - The specific limitations of this wildcard syntax are - - - ``*`` wildcards are only allowed in place of the file name, not - in the directory name or file extension. It must replace the - whole file name (e.g., ``*.html`` is allowed, but - ``chapter-*.html`` is not). If a wildcard is used, it must be - used with an extension, so ``data-files: data/*`` is not - allowed. - - - Prior to Cabal 2.4, when matching a wildcard plus extension, a - file's full extension must match exactly, so ``*.gz`` matches - ``foo.gz`` but not ``foo.tar.gz``. This restriction has been - lifted when ``cabal-version: 2.4`` or greater so that ``*.gz`` - does match ``foo.tar.gz`` - - - ``*`` wildcards will not match if the file name is empty (e.g., - ``*.html`` will not match ``foo/.html``). - - - ``**`` wildcards can only appear as the final path component - before the file name (e.g., ``data/**/images/*.jpg`` is not - allowed). If a ``**`` wildcard is used, then the file name must - include a ``*`` wildcard (e.g., ``data/**/README.rst`` is not - allowed). - - - A wildcard that does not match any files is an error. - - The reason for providing only a very limited form of wildcard is to - concisely express the common case of a large number of related files - of the same file type without making it too easy to accidentally - include unwanted files. - - On efficiency: if you use ``**`` patterns, the directory tree will - be walked starting with the parent directory of the ``**``. If - that's the root of the project, this might include ``.git/``, - ``dist-newstyle/``, or other large directories! To avoid this - behaviour, put the files that wildcards will match against in - their own folder. - - ``**`` wildcards are available starting in Cabal 2.4. - -.. pkg-field:: data-dir: directory - - The directory where Cabal looks for data files to install, relative - to the source directory. By default, Cabal will look in the source - directory itself. - -.. pkg-field:: extra-source-files: filename list - - A list of additional files to be included in source distributions - built with :ref:`setup-sdist`. As with :pkg-field:`data-files` it can use - a limited form of ``*`` wildcards in file names. - -.. pkg-field:: extra-doc-files: filename list - - A list of additional files to be included in source distributions, - and also copied to the html directory when Haddock documentation is - generated. As with :pkg-field:`data-files` it can use a limited form of - ``*`` wildcards in file names. - -.. pkg-field:: extra-tmp-files: filename list - - A list of additional files or directories to be removed by - :ref:`setup-clean`. These would typically be additional files created by - additional hooks, such as the scheme described in the section on - `system-dependent parameters`_ - -Library -^^^^^^^ - -.. pkg-section:: library name - :synopsis: Library build information. - - Build information for libraries. - - Currently, there can only be one publicly exposed library in a - package, and its name is the same as package name set by global - :pkg-field:`name` field. In this case, the ``name`` argument to - the :pkg-section:`library` section must be omitted. - - Starting with Cabal 2.0, private internal sub-library components - can be defined by using setting the ``name`` field to a name - different from the current package's name; see section on - :ref:`Internal Libraries ` for more information. - -The library section should contain the following fields: - -.. pkg-field:: exposed-modules: identifier list - - :required: if this package contains a library - - A list of modules added by this package. - -.. pkg-field:: virtual-modules: identifier list - :since: 2.2 - - A list of virtual modules provided by this package. Virtual modules - are modules without a source file. See for example the ``GHC.Prim`` - module from the ``ghc-prim`` package. Modules listed here will not be - built, but still end up in the list of ``exposed-modules`` in the - installed package info when the package is registered in the package - database. - -.. pkg-field:: exposed: boolean - - :default: ``True`` - - Some Haskell compilers (notably GHC) support the notion of packages - being "exposed" or "hidden" which means the modules they provide can - be easily imported without always having to specify which package - they come from. However this only works effectively if the modules - provided by all exposed packages do not overlap (otherwise a module - import would be ambiguous). - - Almost all new libraries use hierarchical module names that do not - clash, so it is very uncommon to have to use this field. However it - may be necessary to set ``exposed: False`` for some old libraries - that use a flat module namespace or where it is known that the - exposed modules would clash with other common modules. - -.. pkg-field:: reexported-modules: exportlist - :since: 1.22 - - Supported only in GHC 7.10 and later. A list of modules to - *reexport* from this package. The syntax of this field is - ``orig-pkg:Name as NewName`` to reexport module ``Name`` from - ``orig-pkg`` with the new name ``NewName``. We also support - abbreviated versions of the syntax: if you omit ``as NewName``, - we'll reexport without renaming; if you omit ``orig-pkg``, then we - will automatically figure out which package to reexport from, if - it's unambiguous. - - Reexported modules are useful for compatibility shims when a package - has been split into multiple packages, and they have the useful - property that if a package provides a module, and another package - reexports it under the same name, these are not considered a - conflict (as would be the case with a stub module.) They can also be - used to resolve name conflicts. - -.. pkg-field:: signatures: signature list - :since: 2.0 - - Supported only in GHC 8.2 and later. A list of `module signatures `__ required by this package. - - Module signatures are part of the - `Backpack `__ extension to - the Haskell module system. - - Packages that do not export any modules and only export required signatures - are called "signature-only packages", and their signatures are subjected to - `signature thinning - `__. - - - -The library section may also contain build information fields (see the -section on `build information`_). - -.. _sublibs: - -**Internal Libraries** - -Cabal 2.0 and later support "internal libraries", which are extra named -libraries (as opposed to the usual unnamed library section). For -example, suppose that your test suite needs access to some internal -modules in your library, which you do not otherwise want to export. You -could put these modules in an internal library, which the main library -and the test suite :pkg-field:`build-depends` upon. Then your Cabal file might -look something like this: - -:: - - cabal-version: 2.0 - name: foo - version: 0.1.0.0 - license: BSD3 - build-type: Simple - - library foo-internal - exposed-modules: Foo.Internal - -- NOTE: no explicit constraints on base needed - -- as they're inherited from the 'library' stanza - build-depends: base - - library - exposed-modules: Foo.Public - build-depends: foo-internal, base >= 4.3 && < 5 - - test-suite test-foo - type: exitcode-stdio-1.0 - main-is: test-foo.hs - -- NOTE: no constraints on 'foo-internal' as same-package - -- dependencies implicitly refer to the same package instance - build-depends: foo-internal, base - -Internal libraries are also useful for packages that define multiple -executables, but do not define a publically accessible library. Internal -libraries are only visible internally in the package (so they can only -be added to the :pkg-field:`build-depends` of same-package libraries, -executables, test suites, etc.) Internal libraries locally shadow any -packages which have the same name; consequently, don't name an internal -library with the same name as an external dependency if you need to be -able to refer to the external dependency in a -:pkg-field:`build-depends` declaration. - -Shadowing can be used to vendor an external dependency into a package -and thus emulate *private dependencies*. Below is an example based on -a real-world use case: - -:: - - cabal-version: 2.2 - name: haddock-library - version: 1.6.0 - - library - build-depends: - , base ^>= 4.11.1.0 - , bytestring ^>= 0.10.2.0 - , containers ^>= 0.4.2.1 || ^>= 0.5.0.0 - , transformers ^>= 0.5.0.0 - - hs-source-dirs: src - - -- internal sub-lib - build-depends: attoparsec - - exposed-modules: - Documentation.Haddock - - library attoparsec - build-depends: - , base ^>= 4.11.1.0 - , bytestring ^>= 0.10.2.0 - , deepseq ^>= 1.4.0.0 - - hs-source-dirs: vendor/attoparsec-0.13.1.0 - - -- NB: haddock-library needs only small part of lib:attoparsec - -- internally, so we only bundle that subset here - exposed-modules: - Data.Attoparsec.ByteString - Data.Attoparsec.Combinator - - other-modules: - Data.Attoparsec.Internal - - ghc-options: -funbox-strict-fields -Wall -fwarn-tabs -O2 - - -Opening an interpreter session -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -While developing a package, it is often useful to make its code -available inside an interpreter session. This can be done with the -``repl`` command: - -.. code-block:: console - - $ cabal repl - -The name comes from the acronym -`REPL `__, -which stands for "read-eval-print-loop". By default ``cabal repl`` loads -the first component in a package. If the package contains several named -components, the name can be given as an argument to ``repl``. The name -can be also optionally prefixed with the component's type for -disambiguation purposes. Example: - -.. code-block:: console - - $ cabal repl foo - $ cabal repl exe:foo - $ cabal repl test:bar - $ cabal repl bench:baz - -Freezing dependency versions -"""""""""""""""""""""""""""" - -If a package is built in several different environments, such as a -development environment, a staging environment and a production -environment, it may be necessary or desirable to ensure that the same -dependency versions are selected in each environment. This can be done -with the ``freeze`` command: - -.. code-block:: console - - $ cabal freeze - -The command writes the selected version for all dependencies to the -``cabal.config`` file. All environments which share this file will use -the dependency versions specified in it. - -Generating dependency version bounds -"""""""""""""""""""""""""""""""""""" - -Cabal also has the ability to suggest dependency version bounds that -conform to `Package Versioning Policy`_, which is -a recommended versioning system for publicly released Cabal packages. -This is done by running the ``gen-bounds`` command: - -.. code-block:: console - - $ cabal gen-bounds - -For example, given the following dependencies specified in -:pkg-field:`build-depends`: - -:: - - build-depends: - foo == 0.5.2 - bar == 1.1 - -``gen-bounds`` will suggest changing them to the following: - -:: - - build-depends: - foo >= 0.5.2 && < 0.6 - bar >= 1.1 && < 1.2 - -Listing outdated dependency version bounds -"""""""""""""""""""""""""""""""""""""""""" - -Manually updating dependency version bounds in a ``.cabal`` file or a -freeze file can be tedious, especially when there's a lot of -dependencies. The ``cabal outdated`` command is designed to help with -that. It will print a list of packages for which there is a new -version on Hackage that is outside the version bound specified in the -``build-depends`` field. The ``outdated`` command can also be -configured to act on the freeze file (both old- and new-style) and -ignore major (or all) version bumps on Hackage for a subset of -dependencies. - -The following flags are supported by the ``outdated`` command: - -``--freeze-file`` - Read dependency version bounds from the freeze file (``cabal.config``) - instead of the package description file (``$PACKAGENAME.cabal``). - ``--v1-freeze-file`` is an alias for this flag starting in Cabal 2.4. -``--new-freeze-file`` - Read dependency version bounds from the new-style freeze file - (by default, ``cabal.project.freeze``) instead of the package - description file. ``--v2-freeze-file`` is an alias for this flag - starting in Cabal 2.4. -``--project-file`` *PROJECTFILE* - :since: 2.4 - - Read dependendency version bounds from the new-style freeze file - related to the named project file (i.e., ``$PROJECTFILE.freeze``) - instead of the package desctription file. If multiple ``--project-file`` - flags are provided, only the final one is considered. This flag - must only be passed in when ``--new-freeze-file`` is present. -``--simple-output`` - Print only the names of outdated dependencies, one per line. -``--exit-code`` - Exit with a non-zero exit code when there are outdated dependencies. -``-q, --quiet`` - Don't print any output. Implies ``-v0`` and ``--exit-code``. -``--ignore`` *PACKAGENAMES* - Don't warn about outdated dependency version bounds for the packages in this - list. -``--minor`` *[PACKAGENAMES]* - Ignore major version bumps for these packages. E.g. if there's a version 2.0 - of a package ``pkg`` on Hackage and the freeze file specifies the constraint - ``pkg == 1.9``, ``cabal outdated --freeze --minor=pkg`` will only consider - the ``pkg`` outdated when there's a version of ``pkg`` on Hackage satisfying - ``pkg > 1.9 && < 2.0``. ``--minor`` can also be used without arguments, in - that case major version bumps are ignored for all packages. - -Examples: - -.. code-block:: console - - $ cd /some/package - $ cabal outdated - Outdated dependencies: - haskell-src-exts <1.17 (latest: 1.19.1) - language-javascript <0.6 (latest: 0.6.0.9) - unix ==2.7.2.0 (latest: 2.7.2.1) - - $ cabal outdated --simple-output - haskell-src-exts - language-javascript - unix - - $ cabal outdated --ignore=haskell-src-exts - Outdated dependencies: - language-javascript <0.6 (latest: 0.6.0.9) - unix ==2.7.2.0 (latest: 2.7.2.1) - - $ cabal outdated --ignore=haskell-src-exts,language-javascript,unix - All dependencies are up to date. - - $ cabal outdated --ignore=haskell-src-exts,language-javascript,unix -q - $ echo $? - 0 - - $ cd /some/other/package - $ cabal outdated --freeze-file - Outdated dependencies: - HTTP ==4000.3.3 (latest: 4000.3.4) - HUnit ==1.3.1.1 (latest: 1.5.0.0) - - $ cabal outdated --freeze-file --ignore=HTTP --minor=HUnit - Outdated dependencies: - HUnit ==1.3.1.1 (latest: 1.3.1.2) - - -Executables -^^^^^^^^^^^ - -.. pkg-section:: executable name - :synopsis: Executable build info section. - - Executable sections (if present) describe executable programs contained - in the package and must have an argument after the section label, which - defines the name of the executable. This is a freeform argument but may - not contain spaces. - -The executable may be described using the following fields, as well as -build information fields (see the section on `build information`_). - -.. pkg-field:: main-is: filename (required) - - The name of the ``.hs`` or ``.lhs`` file containing the ``Main`` - module. Note that it is the ``.hs`` filename that must be listed, - even if that file is generated using a preprocessor. The source file - must be relative to one of the directories listed in - :pkg-field:`hs-source-dirs`. Further, while the name of the file may - vary, the module itself must be named ``Main``. - -.. pkg-field:: scope: token - :since: 2.0 - - Whether the executable is ``public`` (default) or ``private``, i.e. meant to - be run by other programs rather than the user. Private executables are - installed into `$libexecdir/$libexecsubdir`. - -Running executables -""""""""""""""""""" - -You can have Cabal build and run your executables by using the ``run`` -command: - -.. code-block:: console - - $ cabal run EXECUTABLE [-- EXECUTABLE_FLAGS] - -This command will configure, build and run the executable -``EXECUTABLE``. The double dash separator is required to distinguish -executable flags from ``run``'s own flags. If there is only one -executable defined in the whole package, the executable's name can be -omitted. See the output of ``cabal help run`` for a list of options you -can pass to ``cabal run``. - -Test suites -^^^^^^^^^^^ - -.. pkg-section:: test-suite name - :synopsis: Test suite build information. - - Test suite sections (if present) describe package test suites and must - have an argument after the section label, which defines the name of the - test suite. This is a freeform argument, but may not contain spaces. It - should be unique among the names of the package's other test suites, the - package's executables, and the package itself. Using test suite sections - requires at least Cabal version 1.9.2. - -The test suite may be described using the following fields, as well as -build information fields (see the section on `build information`_). - -.. pkg-field:: type: interface (required) - - The interface type and version of the test suite. Cabal supports two - test suite interfaces, called ``exitcode-stdio-1.0`` and - ``detailed-0.9``. Each of these types may require or disallow other - fields as described below. - -Test suites using the ``exitcode-stdio-1.0`` interface are executables -that indicate test failure with a non-zero exit code when run; they may -provide human-readable log information through the standard output and -error channels. The ``exitcode-stdio-1.0`` type requires the ``main-is`` -field. - -.. pkg-field:: main-is: filename - :synopsis: Module containing tests main function. - - :required: ``exitcode-stdio-1.0`` - :disallowed: ``detailed-0.9`` - - The name of the ``.hs`` or ``.lhs`` file containing the ``Main`` - module. Note that it is the ``.hs`` filename that must be listed, - even if that file is generated using a preprocessor. The source file - must be relative to one of the directories listed in - :pkg-field:`hs-source-dirs`. This field is analogous to the ``main-is`` field - of an executable section. - -Test suites using the ``detailed-0.9`` interface are modules exporting -the symbol ``tests :: IO [Test]``. The ``Test`` type is exported by the -module ``Distribution.TestSuite`` provided by Cabal. For more details, -see the example below. - -The ``detailed-0.9`` interface allows Cabal and other test agents to -inspect a test suite's results case by case, producing detailed human- -and machine-readable log files. The ``detailed-0.9`` interface requires -the :pkg-field:`test-module` field. - -.. pkg-field:: test-module: identifier - - :required: ``detailed-0.9`` - :disallowed: ``exitcode-stdio-1.0`` - - The module exporting the ``tests`` symbol. - -Example: Package using ``exitcode-stdio-1.0`` interface -""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The example package description and executable source file below -demonstrate the use of the ``exitcode-stdio-1.0`` interface. - -.. code-block:: cabal - :caption: foo.cabal - - Name: foo - Version: 1.0 - License: BSD3 - Cabal-Version: >= 1.9.2 - Build-Type: Simple - - Test-Suite test-foo - type: exitcode-stdio-1.0 - main-is: test-foo.hs - build-depends: base >= 4 && < 5 - -.. code-block:: haskell - :caption: test-foo.hs - - module Main where - - import System.Exit (exitFailure) - - main = do - putStrLn "This test always fails!" - exitFailure - -Example: Package using ``detailed-0.9`` interface -""""""""""""""""""""""""""""""""""""""""""""""""" - -The example package description and test module source file below -demonstrate the use of the ``detailed-0.9`` interface. The test module -also develops a simple implementation of the interface set by -``Distribution.TestSuite``, but in actual usage the implementation would -be provided by the library that provides the testing facility. - -.. code-block:: cabal - :caption: bar.cabal - - Name: bar - Version: 1.0 - License: BSD3 - Cabal-Version: >= 1.9.2 - Build-Type: Simple - - Test-Suite test-bar - type: detailed-0.9 - test-module: Bar - build-depends: base >= 4 && < 5, Cabal >= 1.9.2 && < 2 - - -.. code-block:: haskell - :caption: Bar.hs - - module Bar ( tests ) where - - import Distribution.TestSuite - - tests :: IO [Test] - tests = return [ Test succeeds, Test fails ] - where - succeeds = TestInstance - { run = return $ Finished Pass - , name = "succeeds" - , tags = [] - , options = [] - , setOption = \_ _ -> Right succeeds - } - fails = TestInstance - { run = return $ Finished $ Fail "Always fails!" - , name = "fails" - , tags = [] - , options = [] - , setOption = \_ _ -> Right fails - } - -Running test suites -""""""""""""""""""" - -You can have Cabal run your test suites using its built-in test runner: - -:: - - $ cabal configure --enable-tests - $ cabal build - $ cabal test - -See the output of ``cabal help test`` for a list of options you can pass -to ``cabal test``. - -Benchmarks -^^^^^^^^^^ - -.. pkg-section:: benchmark name - :since: 1.9.2 - :synopsis: Benchmark build information. - - Benchmark sections (if present) describe benchmarks contained in the - package and must have an argument after the section label, which defines - the name of the benchmark. This is a freeform argument, but may not - contain spaces. It should be unique among the names of the package's - other benchmarks, the package's test suites, the package's executables, - and the package itself. Using benchmark sections requires at least Cabal - version 1.9.2. - -The benchmark may be described using the following fields, as well as -build information fields (see the section on `build information`_). - -.. pkg-field:: type: interface (required) - - The interface type and version of the benchmark. At the moment Cabal - only support one benchmark interface, called ``exitcode-stdio-1.0``. - -Benchmarks using the ``exitcode-stdio-1.0`` interface are executables -that indicate failure to run the benchmark with a non-zero exit code -when run; they may provide human-readable information through the -standard output and error channels. - -.. pkg-field:: main-is: filename - - :required: ``exitcode-stdio-1.0`` - - The name of the ``.hs`` or ``.lhs`` file containing the ``Main`` - module. Note that it is the ``.hs`` filename that must be listed, - even if that file is generated using a preprocessor. The source file - must be relative to one of the directories listed in - :pkg-field:`hs-source-dirs`. This field is analogous to the ``main-is`` - field of an executable section. Further, while the name of the file may - vary, the module itself must be named ``Main``. - -Example: Package using ``exitcode-stdio-1.0`` interface -""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -The example package description and executable source file below -demonstrate the use of the ``exitcode-stdio-1.0`` interface. - -.. code-block:: cabal - :caption: foo.cabal - :name: foo-bench.cabal - - Name: foo - Version: 1.0 - License: BSD3 - Cabal-Version: >= 1.9.2 - Build-Type: Simple - - Benchmark bench-foo - type: exitcode-stdio-1.0 - main-is: bench-foo.hs - build-depends: base >= 4 && < 5, time >= 1.1 && < 1.7 - -.. code-block:: haskell - :caption: bench-foo.hs - - {-# LANGUAGE BangPatterns #-} - module Main where - - import Data.Time.Clock - - fib 0 = 1 - fib 1 = 1 - fib n = fib (n-1) + fib (n-2) - - main = do - start <- getCurrentTime - let !r = fib 20 - end <- getCurrentTime - putStrLn $ "fib 20 took " ++ show (diffUTCTime end start) - -Running benchmarks -"""""""""""""""""" - -You can have Cabal run your benchmark using its built-in benchmark -runner: - -:: - - $ cabal configure --enable-benchmarks - $ cabal build - $ cabal bench - -See the output of ``cabal help bench`` for a list of options you can -pass to ``cabal bench``. - -Foreign libraries -^^^^^^^^^^^^^^^^^ - -Foreign libraries are system libraries intended to be linked against -programs written in C or other "foreign" languages. They -come in two primary flavours: dynamic libraries (``.so`` files on Linux, -``.dylib`` files on OSX, ``.dll`` files on Windows, etc.) are linked against -executables when the executable is run (or even lazily during -execution), while static libraries (``.a`` files on Linux/OSX, ``.lib`` -files on Windows) get linked against the executable at compile time. - -Foreign libraries only work with GHC 7.8 and later. - -A typical stanza for a foreign library looks like - -:: - - foreign-library myforeignlib - type: native-shared - lib-version-info: 6:3:2 - - if os(Windows) - options: standalone - mod-def-file: MyForeignLib.def - - other-modules: MyForeignLib.SomeModule - MyForeignLib.SomeOtherModule - build-depends: base >=4.7 && <4.9 - hs-source-dirs: src - c-sources: csrc/MyForeignLibWrapper.c - default-language: Haskell2010 - - -.. pkg-section:: foreign-library name - :since: 2.0 - :synopsis: Foriegn library build information. - - Build information for `foreign libraries`_. - -.. pkg-field:: type: foreign library type - - Cabal recognizes ``native-static`` and ``native-shared`` here, although - we currently only support building `native-shared` libraries. - -.. pkg-field:: options: foreign library option list - - Options for building the foreign library, typically specific to the - specified type of foreign library. Currently we only support - ``standalone`` here. A standalone dynamic library is one that does not - have any dependencies on other (Haskell) shared libraries; without - the ``standalone`` option the generated library would have dependencies - on the Haskell runtime library (``libHSrts``), the base library - (``libHSbase``), etc. Currently, ``standalone`` *must* be used on Windows - and *must not* be used on any other platform. - -.. pkg-field:: mod-def-file: filename - - This option can only be used when creating dynamic Windows libraries - (that is, when using ``native-shared`` and the ``os`` is ``Windows``). If - used, it must be a path to a *module definition file*. The details of - module definition files are beyond the scope of this document; see the - `GHC `_ - manual for some details and some further pointers. - -.. pkg-field:: lib-version-info: current:revision:age - - This field is currently only used on Linux. - - This field specifies a Libtool-style version-info field that sets - an appropriate ABI version for the foreign library. Note that the - three numbers specified in this field do not directly specify the - actual ABI version: ``6:3:2`` results in library version ``4.2.3``. - - With this field set, the SONAME of the library is set, and symlinks - are installed. - - How you should bump this field on an ABI change depends on the - breakage you introduce: - - - Programs using the previous version may use the new version as - drop-in replacement, and programs using the new version can also - work with the previous one. In other words, no recompiling nor - relinking is needed. In this case, bump ``revision`` only, don't - touch current nor age. - - Programs using the previous version may use the new version as - drop-in replacement, but programs using the new version may use - APIs not present in the previous one. In other words, a program - linking against the new version may fail with "unresolved - symbols" if linking against the old version at runtime: set - revision to 0, bump current and age. - - Programs may need to be changed, recompiled, and relinked in - order to use the new version. Bump current, set revision and age - to 0. - - Also refer to the Libtool documentation on the version-info field. - -.. pkg-field:: lib-version-linux: version - - This field is only used on Linux. - - Specifies the library ABI version directly for foreign libraries - built on Linux: so specifying ``4.2.3`` causes a library - ``libfoo.so.4.2.3`` to be built with SONAME ``libfoo.so.4``, and - appropriate symlinks ``libfoo.so.4`` and ``libfoo.so`` to be - installed. - -Note that typically foreign libraries should export a way to initialize -and shutdown the Haskell runtime. In the example above, this is done by -the ``csrc/MyForeignLibWrapper.c`` file, which might look something like - -.. code-block:: c - - #include - #include "HsFFI.h" - - HsBool myForeignLibInit(void){ - int argc = 2; - char *argv[] = { "+RTS", "-A32m", NULL }; - char **pargv = argv; - - // Initialize Haskell runtime - hs_init(&argc, &pargv); - - // do any other initialization here and - // return false if there was a problem - return HS_BOOL_TRUE; - } - - void myForeignLibExit(void){ - hs_exit(); - } - -With modern ghc regular libraries are installed in directories that contain -package keys. This isn't usually a problem because the package gets registered -in ghc's package DB and so we can figure out what the location of the library -is. Foreign libraries however don't get registered, which means that we'd have -to have a way of finding out where a platform library got installed (other than by -searching the ``lib/`` directory). Instead, we install foreign libraries in -``~/.cabal/lib``, much like we install executables in ``~/.cabal/bin``. - -Build information -^^^^^^^^^^^^^^^^^ -.. pkg-section:: None - -The following fields may be optionally present in a library, executable, -test suite or benchmark section, and give information for the building -of the corresponding library or executable. See also the sections on -`system-dependent parameters`_ and `configurations`_ for a way to supply -system-dependent values for these fields. - -.. pkg-field:: build-depends: library list - - Declares the *library* dependencies required to build the current - package component; see :pkg-field:`build-tool-depends` for - declaring build-time *tool* dependencies. External library - dependencies should be annotated with a version constraint. - - **Library Names** - - External libraries are identified by the package's name they're - provided by (currently a package can only publically expose its - main library compeonent; in future, packages with multiple exposed - public library components will be supported and a syntax for - referring to public sub-libraries will be provided). - - In order to specify an intra-package dependency on an internal - library component you can use the unqualified name of the - component library component. Note that locally defined sub-library - names shadow external package names of the same name. See section on - :ref:`Internal Libraries ` for examples and more information. - - **Version Constraints** - - Version constraints use the operators ``==, >=, >, <, <=`` and a - version number. Multiple constraints can be combined using ``&&`` or - ``||``. If no version constraint is specified, any version is - assumed to be acceptable. For example: - - :: - - library - build-depends: - base >= 2, - foo >= 1.2.3 && < 1.3, - bar - - Dependencies like ``foo >= 1.2.3 && < 1.3`` turn out to be very - common because it is recommended practise for package versions to - correspond to API versions (see PVP_). - - Since Cabal 1.6, there is a special wildcard syntax to help with - such ranges - - :: - - build-depends: foo ==1.2.* - - It is only syntactic sugar. It is exactly equivalent to - ``foo >= 1.2 && < 1.3``. - - .. Warning:: - - A potential pitfall of the wildcard syntax is that the - constraint ``nats == 1.0.*`` doesn't match the release - ``nats-1`` because the version ``1`` is lexicographically less - than ``1.0``. This is not an issue with the caret-operator - ``^>=`` described below. - - Starting with Cabal 2.0, there's a new version operator to express - PVP_-style major upper bounds conveniently, and is inspired by similar - syntactic sugar found in other language ecosystems where it's often - called the "Caret" operator: - - :: - - build-depends: - foo ^>= 1.2.3.4, - bar ^>= 1 - - This allows to assert the positive knowledge that this package is - *known* to be semantically compatible with the releases - ``foo-1.2.3.4`` and ``bar-1`` respectively. The information - encoded via such ``^>=``-assertions is used by the cabal solver to - infer version constraints describing semantically compatible - version ranges according to the PVP_ contract (see below). - - Another way to say this is that ``foo < 1.3`` expresses *negative* - information, i.e. "``foo-1.3`` or ``foo-1.4.2`` will *not* be - compatible"; whereas ``foo ^>= 1.2.3.4`` asserts the *positive* - information that "``foo-1.2.3.4`` is *known* to be compatible" and (in - the absence of additional information) according to the PVP_ - contract we can (positively) infer right away that all versions - satisfying ``foo >= 1.2.3.4 && < 1.3`` will be compatible as well. - - .. Note:: - - More generally, the PVP_ contract implies that we can safely - relax the lower bound to ``>= 1.2``, because if we know that - ``foo-1.2.3.4`` is semantically compatible, then so is - ``foo-1.2`` (if it typechecks). But we'd need to perform - additional static analysis (i.e. perform typechecking) in order - to know if our package in the role of an API consumer will - successfully typecheck against the dependency ``foo-1.2``. But - since we cannot do this analysis during constraint solving and - to keep things simple, we pragmatically use ``foo >= 1.2.3.4`` - as the initially inferred approximation for the lower bound - resulting from the assertion ``foo ^>= 1.2.3.4``. If further - evidence becomes available that e.g. ``foo-1.2`` typechecks, - one can simply revise the dependency specification to include - the assertion ``foo ^>= 1.2``. - - The subtle but important difference in signaling allows tooling to - treat explicitly expressed ``<``-style constraints and inferred - (``^>=``-style) upper bounds differently. For instance, - :option:`--allow-newer`'s ``^``-modifier allows to relax only - ``^>=``-style bounds while leaving explicitly stated - ``<``-constraints unaffected. - - Ignoring the signaling intent, the default syntactic desugaring rules are - - - ``^>= x`` == ``>= x && < x.1`` - - ``^>= x.y`` == ``>= x.y && < x.(y+1)`` - - ``^>= x.y.z`` == ``>= x.y.z && < x.(y+1)`` - - ``^>= x.y.z.u`` == ``>= x.y.z.u && < x.(y+1)`` - - etc. - - .. Note:: - - One might expected the desugaring to truncate all version - components below (and including) the patch-level, i.e. - ``^>= x.y.z.u`` == ``>= x.y.z && < x.(y+1)``, - as the major and minor version components alone are supposed to - uniquely identify the API according to the PVP_. However, by - designing ``^>=`` to be closer to the ``>=`` operator, we avoid - the potentially confusing effect of ``^>=`` being more liberal - than ``>=`` in the presence of patch-level versions. - - Consequently, the example declaration above is equivalent to - - :: - - build-depends: - foo >= 1.2.3.4 && < 1.3, - bar >= 1 && < 1.1 - - .. Note:: - - Prior to Cabal 1.8, ``build-depends`` specified in each - section were global to all sections. This was unintentional, but - some packages were written to depend on it, so if you need your - :pkg-field:`build-depends` to be local to each section, you must specify - at least ``Cabal-Version: >= 1.8`` in your ``.cabal`` file. - - .. Note:: - - Cabal 1.20 experimentally supported module thinning and - renaming in ``build-depends``; however, this support has since been - removed and should not be used. - -.. pkg-field:: other-modules: identifier list - - A list of modules used by the component but not exposed to users. - For a library component, these would be hidden modules of the - library. For an executable, these would be auxiliary modules to be - linked with the file named in the ``main-is`` field. - - .. Note:: - - Every module in the package *must* be listed in one of - :pkg-field:`other-modules`, :pkg-field:`library:exposed-modules` or - :pkg-field:`executable:main-is` fields. - -.. pkg-field:: hs-source-dirs: directory list - - :default: ``.`` - - Root directories for the module hierarchy. - - For backwards compatibility, the old variant ``hs-source-dir`` is - also recognized. - -.. pkg-field:: default-extensions: identifier list - - A list of Haskell extensions used by every module. These determine - corresponding compiler options enabled for all files. Extension - names are the constructors of the - `Extension <../release/cabal-latest/doc/API/Cabal/Language-Haskell-Extension.html#t:Extension>`__ - type. For example, ``CPP`` specifies that Haskell source files are - to be preprocessed with a C preprocessor. - -.. pkg-field:: other-extensions: identifier list - - A list of Haskell extensions used by some (but not necessarily all) - modules. From GHC version 6.6 onward, these may be specified by - placing a ``LANGUAGE`` pragma in the source files affected e.g. - - .. code-block:: haskell - - {-# LANGUAGE CPP, MultiParamTypeClasses #-} - - In Cabal-1.24 the dependency solver will use this and - :pkg-field:`default-extensions` information. Cabal prior to 1.24 will abort - compilation if the current compiler doesn't provide the extensions. - - If you use some extensions conditionally, using CPP or conditional - module lists, it is good to replicate the condition in - :pkg-field:`other-extensions` declarations: - - :: - - other-extensions: CPP - if impl(ghc >= 7.5) - other-extensions: PolyKinds - - You could also omit the conditionally used extensions, as they are - for information only, but it is recommended to replicate them in - :pkg-field:`other-extensions` declarations. - -.. pkg-field:: extensions: identifier list - :deprecated: - - Deprecated in favor of :pkg-field:`default-extensions`. - -.. pkg-field:: build-tool-depends: package:executable list - :since: 2.0 - - A list of Haskell programs needed to build this component. - Each is specified by the package containing the executable and the name of the executable itself, separated by a colon, and optionally followed by a version bound. - It is fine for the package to be the current one, in which case this is termed an *internal*, rather than *external* executable dependency. - - External dependencies can (and should) contain a version bound like conventional :pkg-field:`build-depends` dependencies. - Internal deps should not contain a version bound, as they will be always resolved within the same configuration of the package in the build plan. - Specifically, version bounds that include the package's version will be warned for being extraneous, and version bounds that exclude the package's version will raise an error for being impossible to follow. - - Cabal can make sure that specified programs are built and on the ``PATH`` before building the component in question. - It will always do so for internal dependencies, and also do so for external dependencies when using Nix-style local builds. - - :pkg-field:`build-tool-depends` was added in Cabal 2.0, and it will - be ignored (with a warning) with old versions of Cabal. See - :pkg-field:`build-tools` for more information about backwards - compatibility. - -.. pkg-field:: build-tools: program list - :deprecated: - - Deprecated in favor of :pkg-field:`build-tool-depends`, but :ref:`see below for backwards compatibility information `. - - A list of Haskell programs needed to build this component. - Each may be followed by an optional version bound. - Confusingly, each program in the list either refer to one of three things: - - 1. Another executables in the same package (supported since Cabal 1.12) - - 2. Tool name contained in Cabal's :ref:`hard-coded set of common tools ` - - 3. A pre-built executable that should already be on the ``PATH`` - (supported since Cabal 2.0) - - These cases are listed in order of priority: - an executable in the package will override any of the hard-coded packages with the same name, - and a hard-coded package will override any executable on the ``PATH``. - - In the first two cases, the list entry is desugared into a :pkg-field:`build-tool-depends` entry. - In the first case, the entry is desugared into a :pkg-field:`build-tool-depends` entry by prefixing with ``$pkg:``. - In the second case, it is desugared by looking up the package and executable name in a hard-coded table. - In either case, the optional version bound is passed through unchanged. - Refer to the documentation for :pkg-field:`build-tool-depends` to understand the desugared field's meaning, along with restrictions on version bounds. - - .. _buildtoolsbc: - - **Backward Compatiblity** - - Although this field is deprecated in favor of :pkg-field:`build-tool-depends`, there are some situations where you may prefer to use :pkg-field:`build-tools` in cases (1) and (2), as it is supported by more versions of Cabal. - In case (3), :pkg-field:`build-tool-depends` is better for backwards-compatibility, as it will be ignored by old versions of Cabal; if you add the executable to :pkg-field:`build-tools`, a setup script built against old Cabal will choke. - If an old version of Cabal is used, an end-user will have to manually arrange for the requested executable to be in your ``PATH``. - - .. _buildtoolsmap: - - **Set of Known Tool Names** - - Identifiers specified in :pkg-field:`build-tools` are desugared into their respective equivalent :pkg-field:`build-tool-depends` form according to the table below. Consequently, a legacy specification such as:: - - build-tools: alex >= 3.2.1 && < 3.3, happy >= 1.19.5 && < 1.20 - - is simply desugared into the equivalent specification:: - - build-tool-depends: alex:alex >= 3.2.1 && < 3.3, happy:happy >= 1.19.5 && < 1.20 - - +--------------------------+-----------------------------------+-----------------+ - | :pkg-field:`build-tools` | desugared | Note | - | identifier | :pkg-field:`build-tool-depends` | | - | | identifier | | - +==========================+===================================+=================+ - | ``alex`` | ``alex:alex`` | | - +--------------------------+-----------------------------------+-----------------+ - | ``c2hs`` | ``c2hs:c2hs`` | | - +--------------------------+-----------------------------------+-----------------+ - | ``cpphs`` | ``cpphs:cpphs`` | | - +--------------------------+-----------------------------------+-----------------+ - | ``greencard`` | ``greencard:greencard`` | | - +--------------------------+-----------------------------------+-----------------+ - | ``haddock`` | ``haddock:haddock`` | | - +--------------------------+-----------------------------------+-----------------+ - | ``happy`` | ``happy:happy`` | | - +--------------------------+-----------------------------------+-----------------+ - | ``hsc2hs`` | ``hsc2hs:hsc2hs`` | | - +--------------------------+-----------------------------------+-----------------+ - | ``hscolour`` | ``hscolour:hscolour`` | | - +--------------------------+-----------------------------------+-----------------+ - | ``hspec-discover`` | ``hspec-discover:hspec-discover`` | since Cabal 2.0 | - +--------------------------+-----------------------------------+-----------------+ - - This built-in set can be programmatically extended via ``Custom`` setup scripts; this, however, is of limited use since the Cabal solver cannot access information injected by ``Custom`` setup scripts. - -.. pkg-field:: buildable: boolean - - :default: ``True`` - - Is the component buildable? Like some of the other fields below, - this field is more useful with the slightly more elaborate form of - the simple build infrastructure described in the section on - `system-dependent parameters`_. - -.. pkg-field:: ghc-options: token list - - Additional options for GHC. You can often achieve the same effect - using the :pkg-field:`extensions` field, which is preferred. - - Options required only by one module may be specified by placing an - ``OPTIONS_GHC`` pragma in the source file affected. - - As with many other fields, whitespace can be escaped by using - Haskell string syntax. Example: - ``ghc-options: -Wcompat "-with-rtsopts=-T -I1" -Wall``. - -.. pkg-field:: ghc-prof-options: token list - - Additional options for GHC when the package is built with profiling - enabled. - - Note that as of Cabal-1.24, the default profiling detail level - defaults to ``exported-functions`` for libraries and - ``toplevel-functions`` for executables. For GHC these correspond to - the flags ``-fprof-auto-exported`` and ``-fprof-auto-top``. Prior to - Cabal-1.24 the level defaulted to ``none``. These levels can be - adjusted by the person building the package with the - ``--profiling-detail`` and ``--library-profiling-detail`` flags. - - It is typically better for the person building the package to pick - the profiling detail level rather than for the package author. So - unless you have special needs it is probably better not to specify - any of the GHC ``-fprof-auto*`` flags here. However if you wish to - override the profiling detail level, you can do so using the - :pkg-field:`ghc-prof-options` field: use ``-fno-prof-auto`` or one of the - other ``-fprof-auto*`` flags. - -.. pkg-field:: ghc-shared-options: token list - - Additional options for GHC when the package is built as shared - library. The options specified via this field are combined with the - ones specified via :pkg-field:`ghc-options`, and are passed to GHC during - both the compile and link phases. - -.. pkg-field:: includes: filename list - - A list of header files to be included in any compilations via C. - This field applies to both header files that are already installed - on the system and to those coming with the package to be installed. - The former files should be found in absolute paths, while the latter - files should be found in paths relative to the top of the source - tree or relative to one of the directories listed in - :pkg-field:`include-dirs`. - - These files typically contain function prototypes for foreign - imports used by the package. This is in contrast to - :pkg-field:`install-includes`, which lists header files that are intended - to be exposed to other packages that transitively depend on this - library. - -.. pkg-field:: install-includes: filename list - - A list of header files from this package to be installed into - ``$libdir/includes`` when the package is installed. Files listed in - :pkg-field:`install-includes` should be found in relative to the top of the - source tree or relative to one of the directories listed in - :pkg-field:`include-dirs`. - - :pkg-field:`install-includes` is typically used to name header files that - contain prototypes for foreign imports used in Haskell code in this - package, for which the C implementations are also provided with the - package. For example, here is a ``.cabal`` file for a hypothetical - ``bindings-clib`` package that bundles the C source code for ``clib``:: - - include-dirs: cbits - c-sources: clib.c - install-includes: clib.h - - Now any package that depends (directly or transitively) on the - ``bindings-clib`` library can use ``clib.h``. - - Note that in order for files listed in :pkg-field:`install-includes` to be - usable when compiling the package itself, they need to be listed in - the :pkg-field:`includes` field as well. - -.. pkg-field:: include-dirs: directory list - - A list of directories to search for header files, when preprocessing - with ``c2hs``, ``hsc2hs``, ``cpphs`` or the C preprocessor, and also - when compiling via C. Directories can be absolute paths (e.g., for - system directories) or paths that are relative to the top of the - source tree. Cabal looks in these directories when attempting to - locate files listed in :pkg-field:`includes` and - :pkg-field:`install-includes`. - -.. pkg-field:: c-sources: filename list - - A list of C source files to be compiled and linked with the Haskell - files. - -.. pkg-field:: cxx-sources: filename list - :since: 2.2 - - A list of C++ source files to be compiled and linked with the Haskell - files. Useful for segregating C and C++ sources when supplying different - command-line arguments to the compiler via the :pkg-field:`cc-options` - and the :pkg-field:`cxx-options` fields. The files listed in the - :pkg-field:`cxx-sources` can reference files listed in the - :pkg-field:`c-sources` field and vice-versa. The object files will be linked - appropriately. - -.. pkg-field:: asm-sources: filename list - - A list of assembly source files to be compiled and linked with the - Haskell files. - -.. pkg-field:: cmm-sources: filename list - - A list of C-- source files to be compiled and linked with the Haskell - files. - -.. pkg-field:: js-sources: filename list - - A list of JavaScript source files to be linked with the Haskell - files (only for JavaScript targets). - -.. pkg-field:: extra-libraries: token list - - A list of extra libraries to link with. - -.. pkg-field:: extra-ghci-libraries: token list - - A list of extra libraries to be used instead of 'extra-libraries' - when the package is loaded with GHCi. - -.. pkg-field:: extra-bundled-libraries: token list - - A list of libraries that are supposed to be copied from the build - directory alongside the produced Haskell libraries. Note that you - are under the obligation to produce those lirbaries in the build - directory (e.g. via a custom setup). Libraries listed here will - be included when ``copy``-ing packages and be listed in the - ``hs-libraries`` of the package configuration. - -.. pkg-field:: extra-lib-dirs: directory list - - A list of directories to search for libraries. - -.. pkg-field:: cc-options: token list - - Command-line arguments to be passed to the C compiler. Since the - arguments are compiler-dependent, this field is more useful with the - setup described in the section on `system-dependent parameters`_. - -.. pkg-field:: cpp-options: token list - - Command-line arguments for pre-processing Haskell code. Applies to - Haskell source and other pre-processed Haskell source like .hsc - .chs. Does not apply to C code, that's what cc-options is for. - -.. pkg-field:: cxx-options: token list - :since: 2.2 - - Command-line arguments to be passed to the compiler when compiling - C++ code. The C++ sources to which these command-line arguments - should be applied can be specified with the :pkg-field:`cxx-sources` - field. Command-line options for C and C++ can be passed separately to - the compiler when compiling both C and C++ sources by segregating the C - and C++ sources with the :pkg-field:`c-sources` and - :pkg-field:`cxx-sources` fields respectively, and providing different - command-line arguments with the :pkg-field:`cc-options` and the - :pkg-field:`cxx-options` fields. - -.. pkg-field:: ld-options: token list - - Command-line arguments to be passed to the linker. Since the - arguments are compiler-dependent, this field is more useful with the - setup described in the section on `system-dependent parameters`_. - -.. pkg-field:: pkgconfig-depends: package list - - A list of - `pkg-config `__ - packages, needed to build this package. They can be annotated with - versions, e.g. ``gtk+-2.0 >= 2.10, cairo >= 1.0``. If no version - constraint is specified, any version is assumed to be acceptable. - Cabal uses ``pkg-config`` to find if the packages are available on - the system and to find the extra compilation and linker options - needed to use the packages. - - If you need to bind to a C library that supports ``pkg-config`` (use - ``pkg-config --list-all`` to find out if it is supported) then it is - much preferable to use this field rather than hard code options into - the other fields. - -.. pkg-field:: frameworks: token list - - On Darwin/MacOS X, a list of frameworks to link to. See Apple's - developer documentation for more details on frameworks. This entry - is ignored on all other platforms. - -.. pkg-field:: extra-frameworks-dirs: directory list - - On Darwin/MacOS X, a list of directories to search for frameworks. - This entry is ignored on all other platforms. - -.. pkg-field:: mixins: mixin list - :since: 2.0 - - Supported only in GHC 8.2 and later. A list of packages mentioned in the - :pkg-field:`build-depends` field, each optionally accompanied by a list of - module and module signature renamings. - - The simplest mixin syntax is simply the name of a package mentioned in the - :pkg-field:`build-depends` field. For example: - - :: - - library - build-depends: - foo >= 1.2.3 && < 1.3 - mixins: - foo - - But this doesn't have any effect. More interesting is to use the mixin - entry to rename one or more modules from the package, like this: - - :: - - library - mixins: - foo (Foo.Bar as AnotherFoo.Bar, Foo.Baz as AnotherFoo.Baz) - - Note that renaming a module like this will hide all the modules - that are not explicitly named. - - Modules can also be hidden: - - :: - - library: - mixins: - foo hiding (Foo.Bar) - - Hiding modules exposes everything that is not explicitly hidden. - - .. Note:: - - The current version of Cabal suffers from an infelicity in how the - entries of :pkg-field:`mixins` are parsed: an entry will fail to parse - if the provided renaming clause has whitespace after the opening - parenthesis. This will be fixed in future versions of Cabal. - - See issues `#5150 `__, - `#4864 `__, and - `#5293 `__. - - There can be multiple mixin entries for a given package, in effect creating - multiple copies of the dependency: - - :: - - library - mixins: - foo (Foo.Bar as AnotherFoo.Bar, Foo.Baz as AnotherFoo.Baz), - foo (Foo.Bar as YetAnotherFoo.Bar) - - The ``requires`` clause is used to rename the module signatures required by - a package: - - :: - - library - mixins: - foo (Foo.Bar as AnotherFoo.Bar) requires (Foo.SomeSig as AnotherFoo.SomeSig) - - Signature-only packages don't have any modules, so only the signatures can - be renamed, with the following syntax: - - :: - - library - mixins: - sigonly requires (SigOnly.SomeSig as AnotherSigOnly.SomeSig) - - See the :pkg-field:`signatures` field for more details. - - Mixin packages are part of the `Backpack - `__ extension to the - Haskell module system. - - The matching of the module signatures required by a - :pkg-field:`build-depends` dependency with the implementation modules - present in another dependency is triggered by a coincidence of names. When - the names of the signature and of the implementation are already the same, - the matching is automatic. But when the names don't coincide, or we want to - instantiate a signature in two different ways, adding mixin entries that - perform renamings becomes necessary. - - .. Warning:: - - Backpack has the limitation that implementation modules that instantiate - signatures required by a :pkg-field:`build-depends` dependency can't - reside in the same component that has the dependency. They must reside - in a different package dependency, or at least in a separate internal - library. - -Configurations -^^^^^^^^^^^^^^ - -Library and executable sections may include conditional blocks, which -test for various system parameters and configuration flags. The flags -mechanism is rather generic, but most of the time a flag represents -certain feature, that can be switched on or off by the package user. -Here is an example package description file using configurations: - -Example: A package containing a library and executable programs -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -:: - - Name: Test1 - Version: 0.0.1 - Cabal-Version: >= 1.8 - License: BSD3 - Author: Jane Doe - Synopsis: Test package to test configurations - Category: Example - Build-Type: Simple - - Flag Debug - Description: Enable debug support - Default: False - Manual: True - - Flag WebFrontend - Description: Include API for web frontend. - Default: False - Manual: True - - Flag NewDirectory - description: Whether to build against @directory >= 1.2@ - -- This is an automatic flag which the solver will be - -- assign automatically while searching for a solution - - Library - Build-Depends: base >= 4.2 && < 4.9 - Exposed-Modules: Testing.Test1 - Extensions: CPP - - GHC-Options: -Wall - if flag(Debug) - CPP-Options: -DDEBUG - if !os(windows) - CC-Options: "-DDEBUG" - else - CC-Options: "-DNDEBUG" - - if flag(WebFrontend) - Build-Depends: cgi >= 0.42 && < 0.44 - Other-Modules: Testing.WebStuff - CPP-Options: -DWEBFRONTEND - - if flag(NewDirectory) - build-depends: directory >= 1.2 && < 1.4 - Build-Depends: time >= 1.0 && < 1.9 - else - build-depends: directory == 1.1.* - Build-Depends: old-time >= 1.0 && < 1.2 - - Executable test1 - Main-is: T1.hs - Other-Modules: Testing.Test1 - Build-Depends: base >= 4.2 && < 4.9 - - if flag(debug) - CC-Options: "-DDEBUG" - CPP-Options: -DDEBUG - -Layout -"""""" - -Flags, conditionals, library and executable sections use layout to -indicate structure. This is very similar to the Haskell layout rule. -Entries in a section have to all be indented to the same level which -must be more than the section header. Tabs are not allowed to be used -for indentation. - -As an alternative to using layout you can also use explicit braces -``{}``. In this case the indentation of entries in a section does not -matter, though different fields within a block must be on different -lines. Here is a bit of the above example again, using braces: - -Example: Using explicit braces rather than indentation for layout -""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" - -:: - - Name: Test1 - Version: 0.0.1 - Cabal-Version: >= 1.8 - License: BSD3 - Author: Jane Doe - Synopsis: Test package to test configurations - Category: Example - Build-Type: Simple - - Flag Debug { - Description: Enable debug support - Default: False - Manual: True - } - - Library { - Build-Depends: base >= 4.2 && < 4.9 - Exposed-Modules: Testing.Test1 - Extensions: CPP - if flag(debug) { - CPP-Options: -DDEBUG - if !os(windows) { - CC-Options: "-DDEBUG" - } else { - CC-Options: "-DNDEBUG" - } - } - } - -Configuration Flags -""""""""""""""""""" - -.. pkg-section:: flag name - :synopsis: Flag declaration. - - Flag section declares a flag which can be used in `conditional blocks`_. - - Flag names are case-insensitive and must match ``[[:alnum:]_][[:alnum:]_-]*`` - regular expression, or expressed as ABNF_: - - .. code-block:: abnf - - flag-name = (UALNUM / "_") *(UALNUM / "_" / "-") - - UALNUM = UALPHA / DIGIT - UALPHA = ... ; set of alphabetic Unicode code-points - - .. note:: - - Hackage accepts ASCII-only flags, ``[a-zA-Z0-9_][a-zA-Z0-9_-]*`` regexp. - -.. pkg-field:: description: freeform - - The description of this flag. - -.. pkg-field:: default: boolean - - :default: ``True`` - - The default value of this flag. - - .. note:: - - This value may be `overridden in several - ways `__. The - rationale for having flags default to True is that users usually - want new features as soon as they are available. Flags representing - features that are not (yet) recommended for most users (such as - experimental features or debugging support) should therefore - explicitly override the default to False. - -.. pkg-field:: manual: boolean - - :default: ``False`` - :since: 1.6 - - By default, Cabal will first try to satisfy dependencies with the - default flag value and then, if that is not possible, with the - negated value. However, if the flag is manual, then the default - value (which can be overridden by commandline flags) will be used. - -Conditional Blocks -^^^^^^^^^^^^^^^^^^ - -Conditional blocks may appear anywhere inside a library or executable -section. They have to follow rather strict formatting rules. Conditional -blocks must always be of the shape - -:: - - if condition - property-descriptions-or-conditionals - -or - -:: - - if condition - property-descriptions-or-conditionals - else - property-descriptions-or-conditionals - -Note that the ``if`` and the condition have to be all on the same line. - -Since Cabal 2.2 conditional blocks support ``elif`` construct. - -:: - - if condition1 - property-descriptions-or-conditionals - elif condition2 - property-descriptions-or-conditionals - else - property-descriptions-or-conditionals - -Conditions -"""""""""" - -Conditions can be formed using boolean tests and the boolean operators -``||`` (disjunction / logical "or"), ``&&`` (conjunction / logical -"and"), or ``!`` (negation / logical "not"). The unary ``!`` takes -highest precedence, ``||`` takes lowest. Precedence levels may be -overridden through the use of parentheses. For example, -``os(darwin) && !arch(i386) || os(freebsd)`` is equivalent to -``(os(darwin) && !(arch(i386))) || os(freebsd)``. - -The following tests are currently supported. - -:samp:`os({name})` - Tests if the current operating system is *name*. The argument is - tested against ``System.Info.os`` on the target system. There is - unfortunately some disagreement between Haskell implementations - about the standard values of ``System.Info.os``. Cabal canonicalises - it so that in particular ``os(windows)`` works on all - implementations. If the canonicalised os names match, this test - evaluates to true, otherwise false. The match is case-insensitive. -:samp:`arch({name})` - Tests if the current architecture is *name*. The argument is matched - against ``System.Info.arch`` on the target system. If the arch names - match, this test evaluates to true, otherwise false. The match is - case-insensitive. -:samp:`impl({compiler})` - Tests for the configured Haskell implementation. An optional version - constraint may be specified (for example ``impl(ghc >= 6.6.1)``). If - the configured implementation is of the right type and matches the - version constraint, then this evaluates to true, otherwise false. - The match is case-insensitive. - - Note that including a version constraint in an ``impl`` test causes - it to check for two properties: - - - The current compiler has the specified name, and - - - The compiler's version satisfied the specified version constraint - - As a result, ``!impl(ghc >= x.y.z)`` is not entirely equivalent to - ``impl(ghc < x.y.z)``. The test ``!impl(ghc >= x.y.z)`` checks that: - - - The current compiler is not GHC, or - - - The version of GHC is earlier than version x.y.z. - -:samp:`flag({name})` - Evaluates to the current assignment of the flag of the given name. - Flag names are case insensitive. Testing for flags that have not - been introduced with a flag section is an error. -``true`` - Constant value true. -``false`` - Constant value false. - -Resolution of Conditions and Flags -"""""""""""""""""""""""""""""""""" - -If a package descriptions specifies configuration flags the package user -can `control these in several -ways `__. If the -user does not fix the value of a flag, Cabal will try to find a flag -assignment in the following way. - -- For each flag specified, it will assign its default value, evaluate - all conditions with this flag assignment, and check if all - dependencies can be satisfied. If this check succeeded, the package - will be configured with those flag assignments. - -- If dependencies were missing, the last flag (as by the order in which - the flags were introduced in the package description) is tried with - its alternative value and so on. This continues until either an - assignment is found where all dependencies can be satisfied, or all - possible flag assignments have been tried. - -To put it another way, Cabal does a complete backtracking search to find -a satisfiable package configuration. It is only the dependencies -specified in the :pkg-field:`build-depends` field in conditional blocks that -determine if a particular flag assignment is satisfiable -(:pkg-field:`build-tools` are not considered). The order of the declaration and -the default value of the flags determines the search order. Flags -overridden on the command line fix the assignment of that flag, so no -backtracking will be tried for that flag. - -If no suitable flag assignment could be found, the configuration phase -will fail and a list of missing dependencies will be printed. Note that -this resolution process is exponential in the worst case (i.e., in the -case where dependencies cannot be satisfied). There are some -optimizations applied internally, but the overall complexity remains -unchanged. - -Meaning of field values when using conditionals -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -During the configuration phase, a flag assignment is chosen, all -conditionals are evaluated, and the package description is combined into -a flat package descriptions. If the same field both inside a conditional -and outside then they are combined using the following rules. - -- Boolean fields are combined using conjunction (logical "and"). - -- List fields are combined by appending the inner items to the outer - items, for example - - :: - - other-extensions: CPP - if impl(ghc) - other-extensions: MultiParamTypeClasses - - when compiled using GHC will be combined to - - :: - - other-extensions: CPP, MultiParamTypeClasses - - Similarly, if two conditional sections appear at the same nesting - level, properties specified in the latter will come after properties - specified in the former. - -- All other fields must not be specified in ambiguous ways. For example - - :: - - Main-is: Main.hs - if flag(useothermain) - Main-is: OtherMain.hs - - will lead to an error. Instead use - - :: - - if flag(useothermain) - Main-is: OtherMain.hs - else - Main-is: Main.hs - -Common stanzas -^^^^^^^^^^^^^^ - -.. pkg-section:: common name - :since: 2.2 - :synopsis: Common build info section - -Starting with Cabal-2.2 it's possible to use common build info stanzas. - -:: - - common deps - build-depends: base ^>= 4.11 - ghc-options: -Wall - - common test-deps - build-depends: tasty - - library - import: deps - exposed-modules: Foo - - test-suite tests - import: deps, test-deps - type: exitcode-stdio-1.0 - main-is: Tests.hs - build-depends: foo - -- You can use `build information`_ fields in common stanzas. - -- Common stanzas must be defined before use. - -- Common stanzas can import other common stanzas. - -- You can import multiple stanzas at once. Stanza names must be separated by commas. - -.. Note:: - - The name `import` was chosen, because there is ``includes`` field. - -Source Repositories -^^^^^^^^^^^^^^^^^^^ - -.. pkg-section:: source-repository - :since: 1.6 - -It is often useful to be able to specify a source revision control -repository for a package. Cabal lets you specifying this information in -a relatively structured form which enables other tools to interpret and -make effective use of the information. For example the information -should be sufficient for an automatic tool to checkout the sources. - -Cabal supports specifying different information for various common -source control systems. Obviously not all automated tools will support -all source control systems. - -Cabal supports specifying repositories for different use cases. By -declaring which case we mean automated tools can be more useful. There -are currently two kinds defined: - -- The ``head`` kind refers to the latest development branch of the - package. This may be used for example to track activity of a project - or as an indication to outside developers what sources to get for - making new contributions. - -- The ``this`` kind refers to the branch and tag of a repository that - contains the sources for this version or release of a package. For - most source control systems this involves specifying a tag, id or - hash of some form and perhaps a branch. The purpose is to be able to - reconstruct the sources corresponding to a particular package - version. This might be used to indicate what sources to get if - someone needs to fix a bug in an older branch that is no longer an - active head branch. - -You can specify one kind or the other or both. As an example here are -the repositories for the Cabal library. Note that the ``this`` kind of -repository specifies a tag. - -:: - - source-repository head - type: darcs - location: http://darcs.haskell.org/cabal/ - - source-repository this - type: darcs - location: http://darcs.haskell.org/cabal-branches/cabal-1.6/ - tag: 1.6.1 - -The exact fields are as follows: - -.. pkg-field:: type: token - - The name of the source control system used for this repository. The - currently recognised types are: - - - ``darcs`` - - ``git`` - - ``svn`` - - ``cvs`` - - ``mercurial`` (or alias ``hg``) - - ``bazaar`` (or alias ``bzr``) - - ``arch`` - - ``monotone`` - - This field is required. - -.. pkg-field:: location: URL - - The location of the repository. The exact form of this field depends - on the repository type. For example: - - - for darcs: ``http://code.haskell.org/foo/`` - - for git: ``git://github.com/foo/bar.git`` - - for CVS: ``anoncvs@cvs.foo.org:/cvs`` - - This field is required. - -.. pkg-field:: module: token - - CVS requires a named module, as each CVS server can host multiple - named repositories. - - This field is required for the CVS repository type and should not be - used otherwise. - -.. pkg-field:: branch: token - - Many source control systems support the notion of a branch, as a - distinct concept from having repositories in separate locations. For - example CVS, SVN and git use branches while for darcs uses different - locations for different branches. If you need to specify a branch to - identify a your repository then specify it in this field. - - This field is optional. - -.. pkg-field:: tag: token - - A tag identifies a particular state of a source repository. The tag - can be used with a ``this`` repository kind to identify the state of - a repository corresponding to a particular package version or - release. The exact form of the tag depends on the repository type. - - This field is required for the ``this`` repository kind. - -.. pkg-field:: subdir: directory - - Some projects put the sources for multiple packages under a single - source repository. This field lets you specify the relative path - from the root of the repository to the top directory for the - package, i.e. the directory containing the package's ``.cabal`` - file. - - This field is optional. It default to empty which corresponds to the - root directory of the repository. - -Downloading a package's source -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The ``cabal get`` command allows to access a package's source code - -either by unpacking a tarball downloaded from Hackage (the default) or -by checking out a working copy from the package's source repository. - -:: - - $ cabal get [FLAGS] PACKAGES - -The ``get`` command supports the following options: - -``-d --destdir`` *PATH* - Where to place the package source, defaults to (a subdirectory of) - the current directory. -``-s --source-repository`` *[head\|this\|...]* - Fork the package's source repository using the appropriate version - control system. The optional argument allows to choose a specific - repository kind. -``--index-state`` *[HEAD\|@\|]* - Use source package index state as it existed at a previous time. Accepts - unix-timestamps (e.g. ``@1474732068``), ISO8601 UTC timestamps (e.g. - ``2016-09-24T17:47:48Z``), or ``HEAD`` (default). - This determines which package versions are available as well as which - ``.cabal`` file revision is selected (unless ``--pristine`` is used). -``--pristine`` - Unpack the original pristine tarball, rather than updating the - ``.cabal`` file with the latest revision from the package archive. - -Custom setup scripts --------------------- - -Since Cabal 1.24, custom ``Setup.hs`` are required to accurately track -their dependencies by declaring them in the ``.cabal`` file rather than -rely on dependencies being implicitly in scope. Please refer -`this article `__ -for more details. - -Declaring a ``custom-setup`` stanza also enables the generation of -``MIN_VERSION_package_(A,B,C)`` CPP macros for the Setup component. - -.. pkg-section:: custom-setup - :synopsis: Custom Setup.hs build information. - :since: 1.24 - - The optional :pkg-section:`custom-setup` stanza contains information needed - for the compilation of custom ``Setup.hs`` scripts, - -:: - - custom-setup - setup-depends: - base >= 4.5 && < 4.11, - Cabal >= 1.14 && < 1.25 - -.. pkg-field:: setup-depends: package list - :since: 1.24 - - The dependencies needed to compile ``Setup.hs``. See the - :pkg-field:`build-depends` field for a description of the syntax expected by - this field. - -Backward compatibility and ``custom-setup`` -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Versions prior to Cabal 1.24 don't recognise ``custom-setup`` stanzas, -and will behave agnostic to them (except for warning about an unknown -section). Consequently, versions prior to Cabal 1.24 can't ensure the -declared dependencies ``setup-depends`` are in scope, and instead -whatever is registered in the current package database environment -will become eligible (and resolved by the compiler) for the -``Setup.hs`` module. - -The availability of the -``MIN_VERSION_package_(A,B,C)`` CPP macros -inside ``Setup.hs`` scripts depends on the condition that either - -- a ``custom-setup`` section has been declared (or ``cabal new-build`` is being - used which injects an implicit hard-coded ``custom-setup`` stanza if it's missing), or -- GHC 8.0 or later is used (which natively injects package version CPP macros) - -Consequently, if you need to write backward compatible ``Setup.hs`` -scripts using CPP, you should declare a ``custom-setup`` stanza and -use the pattern below: - -.. code-block:: haskell - - {-# LANGUAGE CPP #-} - import Distribution.Simple - - #if defined(MIN_VERSION_Cabal) - -- version macros are available and can be used as usual - # if MIN_VERSION_Cabal(a,b,c) - -- code specific to lib:Cabal >= a.b.c - # else - -- code specific to lib:Cabal < a.b.c - # endif - #else - # warning Enabling heuristic fall-back. Please upgrade cabal-install to 1.24 or later if Setup.hs fails to compile. - - -- package version macros not available; except for exotic environments, - -- you can heuristically assume that lib:Cabal's version is correlated - -- with __GLASGOW_HASKELL__, and specifically since we can assume that - -- GHC < 8.0, we can assume that lib:Cabal is version 1.22 or older. - #endif - - main = ... - -The simplified (heuristic) CPP pattern shown below is useful if all you need -is to distinguish ``Cabal < 2.0`` from ``Cabal >= 2.0``. - -.. code-block:: haskell - - {-# LANGUAGE CPP #-} - import Distribution.Simple - - #if !defined(MIN_VERSION_Cabal) - # define MIN_VERSION_Cabal(a,b,c) 0 - #endif - - #if MIN_VERSION_Cabal(2,0,0) - -- code for lib:Cabal >= 2.0 - #else - -- code for lib:Cabal < 2.0 - #endif - - main = ... - - - -Autogenerated modules ---------------------- - -Modules that are built automatically at setup, created with a custom -setup script, must appear on :pkg-field:`other-modules` for the library, -executable, test-suite or benchmark stanzas or also on -:pkg-field:`library:exposed-modules` for libraries to be used, but are not -really on the package when distributed. This makes commands like sdist fail -because the file is not found. - -These special modules must appear again on the :pkg-field:`autogen-modules` -field of the stanza that is using it, besides :pkg-field:`other-modules` or -:pkg-field:`library:exposed-modules`. With this there is no need to create -complex build hooks for this poweruser case. - -.. pkg-field:: autogen-modules: module list - :since: 2.0 - - .. TODO: document autogen-modules field - -Right now :pkg-field:`executable:main-is` modules are not supported on -:pkg-field:`autogen-modules`. - -:: - - Library - default-language: Haskell2010 - build-depends: base - exposed-modules: - MyLibrary - MyLibHelperModule - other-modules: - MyLibModule - autogen-modules: - MyLibHelperModule - - Executable Exe - default-language: Haskell2010 - main-is: Dummy.hs - build-depends: base - other-modules: - MyExeModule - MyExeHelperModule - autogen-modules: - MyExeHelperModule - -Accessing data files from package code --------------------------------------- - -The placement on the target system of files listed in -the :pkg-field:`data-files` field varies between systems, and in some cases -one can even move packages around after installation (see `prefix -independence `__). To -enable packages to find these files in a portable way, Cabal generates a -module called :file:`Paths_{pkgname}` (with any hyphens in *pkgname* -replaced by underscores) during building, so that it may be imported by -modules of the package. This module defines a function - -.. code-block:: haskell - - getDataFileName :: FilePath -> IO FilePath - -If the argument is a filename listed in the :pkg-field:`data-files` field, the -result is the name of the corresponding file on the system on which the -program is running. - -.. Note:: - - If you decide to import the :file:`Paths_{pkgname}` module then it - *must* be listed in the :pkg-field:`other-modules` field just like any other - module in your package and on :pkg-field:`autogen-modules` as the file is - autogenerated. - -The :file:`Paths_{pkgname}` module is not platform independent, as any -other autogenerated module, so it does not get included in the source -tarballs generated by ``sdist``. - -The :file:`Paths_{pkgname}` module also includes some other useful -functions and values, which record the version of the package and some -other directories which the package has been configured to be installed -into (e.g. data files live in ``getDataDir``): - -.. code-block:: haskell - - version :: Version - - getBinDir :: IO FilePath - getLibDir :: IO FilePath - getDynLibDir :: IO FilePath - getDataDir :: IO FilePath - getLibexecDir :: IO FilePath - getSysconfDir :: IO FilePath - -The actual location of all these directories can be individually -overridden at runtime using environment variables of the form -``pkg_name_var``, where ``pkg_name`` is the name of the package with all -hyphens converted into underscores, and ``var`` is either ``bindir``, -``libdir``, ``dynlibdir``, ``datadir``, ``libexedir`` or ``sysconfdir``. For example, -the configured data directory for ``pretty-show`` is controlled with the -``pretty_show_datadir`` environment variable. - -Accessing the package version -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The aforementioned auto generated :file:`Paths_{pkgname}` module also -exports the constant ``version ::`` -`Version `__ -which is defined as the version of your package as specified in the -``version`` field. - -System-dependent parameters ---------------------------- - -For some packages, especially those interfacing with C libraries, -implementation details and the build procedure depend on the build -environment. The ``build-type`` ``Configure`` can be used to handle many -such situations. In this case, ``Setup.hs`` should be: - -.. code-block:: haskell - - import Distribution.Simple - main = defaultMainWithHooks autoconfUserHooks - -Most packages, however, would probably do better using the ``Simple`` -build type and `configurations`_. - -The :pkg-field:`build-type` ``Configure`` differs from ``Simple`` in two ways: - -- The package root directory must contain a shell script called - ``configure``. The configure step will run the script. This - ``configure`` script may be produced by - `autoconf `__ or may be - hand-written. The ``configure`` script typically discovers - information about the system and records it for later steps, e.g. by - generating system-dependent header files for inclusion in C source - files and preprocessed Haskell source files. (Clearly this won't work - for Windows without MSYS or Cygwin: other ideas are needed.) - -- If the package root directory contains a file called - *package*\ ``.buildinfo`` after the configuration step, subsequent - steps will read it to obtain additional settings for `build - information`_ fields,to be merged with the ones - given in the ``.cabal`` file. In particular, this file may be - generated by the ``configure`` script mentioned above, allowing these - settings to vary depending on the build environment. - -The build information file should have the following structure: - - *buildinfo* - - ``executable:`` *name* *buildinfo* - - ``executable:`` *name* *buildinfo* ... - -where each *buildinfo* consists of settings of fields listed in the -section on `build information`_. The first one (if -present) relates to the library, while each of the others relate to the -named executable. (The names must match the package description, but you -don't have to have entries for all of them.) - -Neither of these files is required. If they are absent, this setup -script is equivalent to ``defaultMain``. - -Example: Using autoconf -^^^^^^^^^^^^^^^^^^^^^^^ - -This example is for people familiar with the -`autoconf `__ tools. - -In the X11 package, the file ``configure.ac`` contains: - -.. code-block:: shell - - AC_INIT([Haskell X11 package], [1.1], [libraries@haskell.org], [X11]) - - # Safety check: Ensure that we are in the correct source directory. - AC_CONFIG_SRCDIR([X11.cabal]) - - # Header file to place defines in - AC_CONFIG_HEADERS([include/HsX11Config.h]) - - # Check for X11 include paths and libraries - AC_PATH_XTRA - AC_TRY_CPP([#include ],,[no_x=yes]) - - # Build the package if we found X11 stuff - if test "$no_x" = yes - then BUILD_PACKAGE_BOOL=False - else BUILD_PACKAGE_BOOL=True - fi - AC_SUBST([BUILD_PACKAGE_BOOL]) - - AC_CONFIG_FILES([X11.buildinfo]) - AC_OUTPUT - -Then the setup script will run the ``configure`` script, which checks -for the presence of the X11 libraries and substitutes for variables in -the file ``X11.buildinfo.in``: - -:: - - buildable: @BUILD_PACKAGE_BOOL@ - cc-options: @X_CFLAGS@ - ld-options: @X_LIBS@ - -This generates a file ``X11.buildinfo`` supplying the parameters needed -by later stages: - -:: - - buildable: True - cc-options: -I/usr/X11R6/include - ld-options: -L/usr/X11R6/lib - -The ``configure`` script also generates a header file -``include/HsX11Config.h`` containing C preprocessor defines recording -the results of various tests. This file may be included by C source -files and preprocessed Haskell source files in the package. - -.. Note:: - - Packages using these features will also need to list additional - files such as ``configure``, templates for ``.buildinfo`` files, files - named only in ``.buildinfo`` files, header files and so on in the - :pkg-field:`extra-source-files` field to ensure that they are included in - source distributions. They should also list files and directories generated - by ``configure`` in the :pkg-field:`extra-tmp-files` field to ensure that - they are removed by ``setup clean``. - -Quite often the files generated by ``configure`` need to be listed -somewhere in the package description (for example, in the -:pkg-field:`install-includes` field). However, we usually don't want generated -files to be included in the source tarball. The solution is again -provided by the ``.buildinfo`` file. In the above example, the following -line should be added to ``X11.buildinfo``: - -:: - - install-includes: HsX11Config.h - -In this way, the generated ``HsX11Config.h`` file won't be included in -the source tarball in addition to ``HsX11Config.h.in``, but it will be -copied to the right location during the install process. Packages that -use custom ``Setup.hs`` scripts can update the necessary fields -programmatically instead of using the ``.buildinfo`` file. - -Conditional compilation ------------------------ - -Sometimes you want to write code that works with more than one version -of a dependency. You can specify a range of versions for the dependency -in the :pkg-field:`build-depends`, but how do you then write the code that can -use different versions of the API? - -Haskell lets you preprocess your code using the C preprocessor (either -the real C preprocessor, or ``cpphs``). To enable this, add -``extensions: CPP`` to your package description. When using CPP, Cabal -provides some pre-defined macros to let you test the version of -dependent packages; for example, suppose your package works with either -version 3 or version 4 of the ``base`` package, you could select the -available version in your Haskell modules like this: - -.. code-block:: cpp - - #if MIN_VERSION_base(4,0,0) - ... code that works with base-4 ... - #else - ... code that works with base-3 ... - #endif - -In general, Cabal supplies a macro -``MIN_VERSION_``\ *``package``*\ ``_(A,B,C)`` for each package depended -on via :pkg-field:`build-depends`. This macro is true if the actual version of -the package in use is greater than or equal to ``A.B.C`` (using the -conventional ordering on version numbers, which is lexicographic on the -sequence, but numeric on each component, so for example 1.2.0 is greater -than 1.0.3). - -Since version 1.20, the ``MIN_TOOL_VERSION_``\ *``tool``* -family of macros lets you condition on the version of build tools used to -build the program (e.g. ``hsc2hs``). - -Since version 1.24, the macro ``CURRENT_COMPONENT_ID``, which -expands to the string of the component identifier that uniquely -identifies this component. Furthermore, if the package is a library, -the macro ``CURRENT_PACKAGE_KEY`` records the identifier that was passed -to GHC for use in symbols and for type equality. - -Since version 2.0, the macro ``CURRENT_PACKAGE_VERSION`` expands -to the string version number of the current package. - -Cabal places the definitions of these macros into an -automatically-generated header file, which is included when -preprocessing Haskell source code by passing options to the C -preprocessor. - -Cabal also allows to detect when the source code is being used for -generating documentation. The ``__HADDOCK_VERSION__`` macro is defined -only when compiling via Haddock_ -instead of a normal Haskell compiler. The value of the -``__HADDOCK_VERSION__`` macro is defined as ``A*1000 + B*10 + C``, where -``A.B.C`` is the Haddock version. This can be useful for working around -bugs in Haddock or generating prettier documentation in some special -cases. - -More complex packages ---------------------- - -For packages that don't fit the simple schemes described above, you have -a few options: - -- By using the :pkg-field:`build-type` ``Custom``, you can supply your own - ``Setup.hs`` file, and customize the simple build infrastructure - using *hooks*. These allow you to perform additional actions before - and after each command is run, and also to specify additional - preprocessors. A typical ``Setup.hs`` may look like this: - - .. code-block:: haskell - - import Distribution.Simple - main = defaultMainWithHooks simpleUserHooks { postHaddock = posthaddock } - - posthaddock args flags desc info = .... - - See ``UserHooks`` in - `Distribution.Simple <../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html>`__ - for the details, but note that this interface is experimental, and - likely to change in future releases. - - If you use a custom ``Setup.hs`` file you should strongly consider - adding a :pkg-section:`custom-setup` stanza with a - :pkg-field:`custom-setup:setup-depends` field to ensure that your setup - script does not break with future dependency versions. - -- You could delegate all the work to ``make``, though this is unlikely - to be very portable. Cabal supports this with the :pkg-field:`build-type` - ``Make`` and a trivial setup library - `Distribution.Make <../release/cabal-latest/doc/API/Cabal/Distribution-Make.html>`__, - which simply parses the command line arguments and invokes ``make``. - Here ``Setup.hs`` should look like this: - - .. code-block:: haskell - - import Distribution.Make - main = defaultMain - - The root directory of the package should contain a ``configure`` - script, and, after that has run, a ``Makefile`` with a default target - that builds the package, plus targets ``install``, ``register``, - ``unregister``, ``clean``, ``dist`` and ``docs``. Some options to - commands are passed through as follows: - - - The ``--with-hc-pkg``, ``--prefix``, ``--bindir``, ``--libdir``, - ``--dynlibdir``, ``--datadir``, ``--libexecdir`` and ``--sysconfdir`` options to - the ``configure`` command are passed on to the ``configure`` - script. In addition the value of the ``--with-compiler`` option is - passed in a ``--with-hc`` option and all options specified with - ``--configure-option=`` are passed on. - - - The ``--destdir`` option to the ``copy`` command becomes a setting - of a ``destdir`` variable on the invocation of ``make copy``. The - supplied ``Makefile`` should provide a ``copy`` target, which will - probably look like this: - - .. code-block:: make - - copy : - $(MAKE) install prefix=$(destdir)/$(prefix) \ - bindir=$(destdir)/$(bindir) \ - libdir=$(destdir)/$(libdir) \ - dynlibdir=$(destdir)/$(dynlibdir) \ - datadir=$(destdir)/$(datadir) \ - libexecdir=$(destdir)/$(libexecdir) \ - sysconfdir=$(destdir)/$(sysconfdir) \ - -- Finally, with the :pkg-field:`build-type` ``Custom``, you can also write your - own setup script from scratch. It must conform to the interface - described in the section on `building and installing - packages `__, and you may use the Cabal - library for all or part of the work. One option is to copy the source - of ``Distribution.Simple``, and alter it for your needs. Good luck. - - -.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/file-format-changelog.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/file-format-changelog.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/file-format-changelog.rst 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/file-format-changelog.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -Cabal file format changelog -=========================== - -Changes in 2.4 --------------- - -* Wildcard matching has been expanded. All previous wildcard - expressions are still valid; some will match strictly more files - than before. Specifically: - - * Double-star (``**``) wildcards are now accepted for recursive - matching immediately before the final slash; they must be followed - by a filename wildcard (e.g., ``foo/**/*.html`` is valid; - ``foo/**/bar/*.html`` and ``foo/**/**/*.html``, - ``foo/**/bar.html`` are all invalid). As ``**`` was an error in - globs before, this does not affect any existing ``.cabal`` files - that previously worked. - - * Wildcards now match when the pattern's extensions form a suffix of - the candidate file's extension, rather than requiring strict - equality (e.g., previously ``*.html`` did not match - ``foo.en.html``, but now it does). - -* License fields use identifiers from SPDX License List version - ``3.2 2018-07-10`` Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/images/Cabal-dark.png and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/images/Cabal-dark.png differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/index.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/index.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/index.rst 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/index.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ - -Welcome to the Cabal User Guide -=============================== - -.. toctree:: - :maxdepth: 2 - :numbered: - - intro - config-and-install - concepts-and-development - bugs-and-stability - nix-local-build-overview - nix-integration - file-format-changelog diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/installing-packages.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/installing-packages.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/installing-packages.rst 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/installing-packages.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,1730 +0,0 @@ -Configuration -============= - -.. highlight:: cabal - -Overview --------- - -The global configuration file for ``cabal-install`` is -``~/.cabal/config``. If you do not have this file, ``cabal`` will create -it for you on the first call to ``cabal update``. Alternatively, you can -explicitly ask ``cabal`` to create it for you using - -.. code-block:: console - - $ cabal user-config update - -Most of the options in this configuration file are also available as -command line arguments, and the corresponding documentation can be used -to lookup their meaning. The created configuration file only specifies -values for a handful of options. Most options are left at their default -value, which it documents; for instance, - -:: - - -- executable-stripping: True - -means that the configuration file currently does not specify a value for -the ``executable-stripping`` option (the line is commented out), and -that the default is ``True``; if you wanted to disable stripping of -executables by default, you would change this line to - -:: - - executable-stripping: False - -You can also use ``cabal user-config update`` to migrate configuration -files created by older versions of ``cabal``. - -Repository specification ------------------------- - -An important part of the configuration if the specification of the -repository. When ``cabal`` creates a default config file, it configures -the repository to be the central Hackage server: - -:: - - repository hackage.haskell.org - url: http://hackage.haskell.org/ - -The name of the repository is given on the first line, and can be -anything; packages downloaded from this repository will be cached under -``~/.cabal/packages/hackage.haskell.org`` (or whatever name you specify; -you can change the prefix by changing the value of -``remote-repo-cache``). If you want, you can configure multiple -repositories, and ``cabal`` will combine them and be able to download -packages from any of them. - -Using secure repositories -^^^^^^^^^^^^^^^^^^^^^^^^^ - -For repositories that support the TUF security infrastructure (this -includes Hackage), you can enable secure access to the repository by -specifying: - -:: - - repository hackage.haskell.org - url: http://hackage.haskell.org/ - secure: True - root-keys: - key-threshold: - -The ```` and ```` values are used for -bootstrapping. As part of the TUF infrastructure the repository will -contain a file ``root.json`` (for instance, -http://hackage.haskell.org/root.json) which the client needs to do -verification. However, how can ``cabal`` verify the ``root.json`` file -*itself*? This is known as bootstrapping: if you specify a list of root -key IDs and a corresponding threshold, ``cabal`` will verify that the -downloaded ``root.json`` file has been signed with at least -```` keys from your set of ````. - -You can, but are not recommended to, omit these two fields. In that case -``cabal`` will download the ``root.json`` field and use it without -verification. Although this bootstrapping step is then unsafe, all -subsequent access is secure (provided that the downloaded ``root.json`` -was not tempered with). Of course, adding ``root-keys`` and -``key-threshold`` to your repository specification only shifts the -problem, because now you somehow need to make sure that the key IDs you -received were the right ones. How that is done is however outside the -scope of ``cabal`` proper. - -More information about the security infrastructure can be found at -https://github.com/well-typed/hackage-security. - -Legacy repositories -^^^^^^^^^^^^^^^^^^^ - -Currently ``cabal`` supports two kinds of “legacy†repositories. The -first is specified using - -:: - - remote-repo: hackage.haskell.org:http://hackage.haskell.org/packages/archive - -This is just syntactic sugar for - -:: - - repository hackage.haskell.org - url: hackage.haskell.org:http://hackage.haskell.org/packages/archive - -although, in (and only in) the specific case of Hackage, the URL -``http://hackage.haskell.org/packages/archive`` will be silently -translated to ``http://hackage.haskell.org/``. - -The second kind of legacy repositories are so-called “local†-repositories: - -:: - - local-repo: my-local-repo:/path/to/local/repo - -This can be used to access repositories on the local file system. -However, the layout of these local repositories is different from the -layout of remote repositories, and usage of these local repositories is -deprecated. - -Secure local repositories -^^^^^^^^^^^^^^^^^^^^^^^^^ - -If you want to use repositories on your local file system, it is -recommended instead to use a *secure* local repository: - -:: - - repository my-local-repo - url: file:/path/to/local/repo - secure: True - root-keys: - key-threshold: - -The layout of these secure local repos matches the layout of remote -repositories exactly; the :hackage-pkg:`hackage-repo-tool` -can be used to create and manage such repositories. - -.. _installing-packages: - -Building and installing packages -================================ - -.. highlight:: console - -After you've unpacked a Cabal package, you can build it by moving into -the root directory of the package and running the ``cabal`` tool there: - -:: - - $ cabal [command] [option...] - -The *command* argument selects a particular step in the build/install -process. - -You can also get a summary of the command syntax with - -:: - - $ cabal help - -Alternatively, you can also use the ``Setup.hs`` or ``Setup.lhs`` -script: - -:: - - $ runhaskell Setup.hs [command] [option...] - -For the summary of the command syntax, run: - -:: - - $ cabal help - -or - -:: - - $ runhaskell Setup.hs --help - -Building and installing a system package ----------------------------------------- - -:: - - $ runhaskell Setup.hs configure --ghc - $ runhaskell Setup.hs build - $ runhaskell Setup.hs install - -The first line readies the system to build the tool using GHC; for -example, it checks that GHC exists on the system. The second line -performs the actual building, while the last both copies the build -results to some permanent place and registers the package with GHC. - -Building and installing a user package --------------------------------------- - -:: - - $ runhaskell Setup.hs configure --user - $ runhaskell Setup.hs build - $ runhaskell Setup.hs install - -The package is installed under the user's home directory and is -registered in the user's package database (:option:`setup configure --user`). - -Installing packages from Hackage --------------------------------- - -The ``cabal`` tool also can download, configure, build and install a -Hackage_ package and all of its -dependencies in a single step. To do this, run: - -:: - - $ cabal install [PACKAGE...] - -To browse the list of available packages, visit the -Hackage_ web site. - -Developing with sandboxes -------------------------- - -By default, any dependencies of the package are installed into the -global or user package databases (e.g. using -``cabal install --only-dependencies``). If you're building several -different packages that have incompatible dependencies, this can cause -the build to fail. One way to avoid this problem is to build each -package in an isolated environment ("sandbox"), with a sandbox-local -package database. Because sandboxes are per-project, inconsistent -dependencies can be simply disallowed. - -For more on sandboxes, see also `this -article `__. - -Sandboxes: basic usage -^^^^^^^^^^^^^^^^^^^^^^ - -To initialise a fresh sandbox in the current directory, run -``cabal sandbox init``. All subsequent commands (such as ``build`` and -``install``) from this point will use the sandbox. - -:: - - $ cd /path/to/my/haskell/library - $ cabal sandbox init # Initialise the sandbox - $ cabal install --only-dependencies # Install dependencies into the sandbox - $ cabal build # Build your package inside the sandbox - -It can be useful to make a source package available for installation in -the sandbox - for example, if your package depends on a patched or an -unreleased version of a library. This can be done with the -``cabal sandbox add-source`` command - think of it as "local Hackage_". -If an add-source dependency is later modified, it is reinstalled automatically. - -:: - - $ cabal sandbox add-source /my/patched/library # Add a new add-source dependency - $ cabal install --dependencies-only # Install it into the sandbox - $ cabal build # Build the local package - $ $EDITOR /my/patched/library/Source.hs # Modify the add-source dependency - $ cabal build # Modified dependency is automatically reinstalled - -Normally, the sandbox settings (such as optimisation level) are -inherited from the main Cabal config file (``$HOME/cabal/config``). -Sometimes, though, you need to change some settings specifically for a -single sandbox. You can do this by creating a ``cabal.config`` file in -the same directory with your ``cabal.sandbox.config`` (which was created -by ``sandbox init``). This file has the same syntax as the main Cabal -config file. - -:: - - $ cat cabal.config - documentation: True - constraints: foo == 1.0, bar >= 2.0, baz - $ cabal build # Uses settings from the cabal.config file - -When you have decided that you no longer want to build your package -inside a sandbox, just delete it: - -:: - - $ cabal sandbox delete # Built-in command - $ rm -rf .cabal-sandbox cabal.sandbox.config # Alternative manual method - -Sandboxes: advanced usage -^^^^^^^^^^^^^^^^^^^^^^^^^ - -The default behaviour of the ``add-source`` command is to track -modifications done to the added dependency and reinstall the sandbox -copy of the package when needed. Sometimes this is not desirable: in -these cases you can use ``add-source --snapshot``, which disables the -change tracking. In addition to ``add-source``, there are also -``list-sources`` and ``delete-source`` commands. - -Sometimes one wants to share a single sandbox between multiple packages. -This can be easily done with the ``--sandbox`` option: - -:: - - $ mkdir -p /path/to/shared-sandbox - $ cd /path/to/shared-sandbox - $ cabal sandbox init --sandbox . - $ cd /path/to/package-a - $ cabal sandbox init --sandbox /path/to/shared-sandbox - $ cd /path/to/package-b - $ cabal sandbox init --sandbox /path/to/shared-sandbox - -Note that ``cabal sandbox init --sandbox .`` puts all sandbox files into -the current directory. By default, ``cabal sandbox init`` initialises a -new sandbox in a newly-created subdirectory of the current working -directory (``./.cabal-sandbox``). - -Using multiple different compiler versions simultaneously is also -supported, via the ``-w`` option: - -:: - - $ cabal sandbox init - $ cabal install --only-dependencies -w /path/to/ghc-1 # Install dependencies for both compilers - $ cabal install --only-dependencies -w /path/to/ghc-2 - $ cabal configure -w /path/to/ghc-1 # Build with the first compiler - $ cabal build - $ cabal configure -w /path/to/ghc-2 # Build with the second compiler - $ cabal build - -It can be occasionally useful to run the compiler-specific package -manager tool (e.g. ``ghc-pkg``) tool on the sandbox package DB directly -(for example, you may need to unregister some packages). The -``cabal sandbox hc-pkg`` command is a convenient wrapper that runs the -compiler-specific package manager tool with the arguments: - -:: - - $ cabal -v sandbox hc-pkg list - Using a sandbox located at /path/to/.cabal-sandbox - 'ghc-pkg' '--global' '--no-user-package-conf' - '--package-conf=/path/to/.cabal-sandbox/i386-linux-ghc-7.4.2-packages.conf.d' - 'list' - [...] - -The ``--require-sandbox`` option makes all sandbox-aware commands -(``install``/``build``/etc.) exit with error if there is no sandbox -present. This makes it harder to accidentally modify the user package -database. The option can be also turned on via the per-user -configuration file (``~/.cabal/config``) or the per-project one -(``$PROJECT_DIR/cabal.config``). The error can be squelched with -``--no-require-sandbox``. - -The option ``--sandbox-config-file`` allows to specify the location of -the ``cabal.sandbox.config`` file (by default, ``cabal`` searches for it -in the current directory). This provides the same functionality as -shared sandboxes, but sometimes can be more convenient. Example: - -:: - - $ mkdir my/sandbox - $ cd my/sandbox - $ cabal sandbox init - $ cd /path/to/my/project - $ cabal --sandbox-config-file=/path/to/my/sandbox/cabal.sandbox.config install - # Uses the sandbox located at /path/to/my/sandbox/.cabal-sandbox - $ cd ~ - $ cabal --sandbox-config-file=/path/to/my/sandbox/cabal.sandbox.config install - # Still uses the same sandbox - -The sandbox config file can be also specified via the -``CABAL_SANDBOX_CONFIG`` environment variable. - -Finally, the flag ``--ignore-sandbox`` lets you temporarily ignore an -existing sandbox: - -:: - - $ mkdir my/sandbox - $ cd my/sandbox - $ cabal sandbox init - $ cabal --ignore-sandbox install text - # Installs 'text' in the user package database ('~/.cabal'). - -Creating a binary package -------------------------- - -When creating binary packages (e.g. for Red Hat or Debian) one needs to -create a tarball that can be sent to another system for unpacking in the -root directory: - -:: - - $ runhaskell Setup.hs configure --prefix=/usr - $ runhaskell Setup.hs build - $ runhaskell Setup.hs copy --destdir=/tmp/mypkg - $ tar -czf mypkg.tar.gz /tmp/mypkg/ - -If the package contains a library, you need two additional steps: - -:: - - $ runhaskell Setup.hs register --gen-script - $ runhaskell Setup.hs unregister --gen-script - -This creates shell scripts ``register.sh`` and ``unregister.sh``, which -must also be sent to the target system. After unpacking there, the -package must be registered by running the ``register.sh`` script. The -``unregister.sh`` script would be used in the uninstall procedure of the -package. Similar steps may be used for creating binary packages for -Windows. - -The following options are understood by all commands: - -.. program:: setup - -.. option:: --help, -h or -? - - List the available options for the command. - -.. option:: --verbose=n or -v n - - Set the verbosity level (0-3). The normal level is 1; a missing *n* - defaults to 2. - - There is also an extended version of this command which can be - used to fine-tune the verbosity of output. It takes the - form ``[silent|normal|verbose|debug]``\ *flags*, where *flags* - is a list of ``+`` flags which toggle various aspects of - output. At the moment, only ``+callsite`` and ``+callstack`` - are supported, which respectively toggle call site and call - stack printing (these are only supported if Cabal - is built with a sufficiently recent GHC.) - -The various commands and the additional options they support are -described below. In the simple build infrastructure, any other options -will be reported as errors. - -.. _setup-configure: - -setup configure ---------------- - -.. program:: setup configure - -Prepare to build the package. Typically, this step checks that the -target platform is capable of building the package, and discovers -platform-specific features that are needed during the build. - -The user may also adjust the behaviour of later stages using the options -listed in the following subsections. In the simple build infrastructure, -the values supplied via these options are recorded in a private file -read by later stages. - -If a user-supplied ``configure`` script is run (see the section on -`system-dependent -parameters `__ or -on `complex -packages `__), it is -passed the :option:`--with-hc-pkg`, :option:`--prefix`, :option:`--bindir`, -:option:`--libdir`, :option:`--dynlibdir`, :option:`--datadir`, :option:`--libexecdir` and -:option:`--sysconfdir` options. In addition the value of the -:option:`--with-compiler` option is passed in a :option:`--with-hc-pkg` option -and all options specified with :option:`--configure-option` are passed on. - -.. note:: - `GNU autoconf places restrictions on paths, including the directory - that the package is built from. - `_ - The errors produced when this happens can be obscure; Cabal attempts to - detect and warn in this situation, but it is not perfect. - -In Cabal 2.0, support for a single positional argument was added to -``setup configure`` This makes Cabal configure a the specific component -to be configured. Specified names can be qualified with ``lib:`` or -``exe:`` in case just a name is ambiguous (as would be the case for a -package named ``p`` which has a library and an executable named ``p``.) -This has the following effects: - -- Subsequent invocations of ``cabal build``, ``register``, etc. operate only - on the configured component. - -- Cabal requires all "internal" dependencies (e.g., an executable - depending on a library defined in the same package) must be found in - the set of databases via :option:`--package-db` (and related flags): these - dependencies are assumed to be up-to-date. A dependency can be - explicitly specified using :option:`--dependency` simply by giving the name - of the internal library; e.g., the dependency for an internal library - named ``foo`` is given as - ``--dependency=pkg-internal=pkg-1.0-internal-abcd``. - -- Only the dependencies needed for the requested component are - required. Similarly, when :option:`--exact-configuration` is specified, - it's only necessary to specify :option:`--dependency` for the component. - (As mentioned previously, you *must* specify internal dependencies as - well.) - -- Internal ``build-tool-depends`` and ``build-tools`` dependencies are expected - to be in the ``PATH`` upon subsequent invocations of ``setup``. - -Full details can be found in the `Componentized Cabal -proposal `__. - -Programs used for building -^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The following options govern the programs used to process the source -files of a package: - -.. option:: --ghc or -g, --jhc, --lhc, --uhc - - Specify which Haskell implementation to use to build the package. At - most one of these flags may be given. If none is given, the - implementation under which the setup script was compiled or - interpreted is used. - -.. option:: --with-compiler=path or -w *path* - - Specify the path to a particular compiler. If given, this must match - the implementation selected above. The default is to search for the - usual name of the selected implementation. - - This flag also sets the default value of the :option:`--with-hc-pkg` - option to the package tool for this compiler. Check the output of - ``setup configure -v`` to ensure that it finds the right package - tool (or use :option:`--with-hc-pkg` explicitly). - -.. option:: --with-hc-pkg=path - - Specify the path to the package tool, e.g. ``ghc-pkg``. The package - tool must be compatible with the compiler specified by - :option:`--with-compiler`. If this option is omitted, the default value is - determined from the compiler selected. - -.. option:: --with-prog=path - - Specify the path to the program *prog*. Any program known to Cabal - can be used in place of *prog*. It can either be a fully path or the - name of a program that can be found on the program search path. For - example: ``--with-ghc=ghc-6.6.1`` or - ``--with-cpphs=/usr/local/bin/cpphs``. The full list of accepted - programs is not enumerated in this user guide. Rather, run - ``cabal install --help`` to view the list. - -.. option:: --prog-options=options - - Specify additional options to the program *prog*. Any program known - to Cabal can be used in place of *prog*. For example: - ``--alex-options="--template=mytemplatedir/"``. The *options* is - split into program options based on spaces. Any options containing - embedded spaced need to be quoted, for example - ``--foo-options='--bar="C:\Program File\Bar"'``. As an alternative - that takes only one option at a time but avoids the need to quote, - use :option:`--prog-option` instead. - -.. option:: --prog-option=option - - Specify a single additional option to the program *prog*. For - passing an option that contain embedded spaces, such as a file name - with embedded spaces, using this rather than :option:`--prog-options` - means you do not need an additional level of quoting. Of course if you - are using a command shell you may still need to quote, for example - ``--foo-options="--bar=C:\Program File\Bar"``. - -All of the options passed with either :option:`--prog-options` -or :option:`--prog-option` are passed in the order they were -specified on the configure command line. - -Installation paths -^^^^^^^^^^^^^^^^^^ - -The following options govern the location of installed files from a -package: - -.. option:: --prefix=dir - - The root of the installation. For example for a global install you - might use ``/usr/local`` on a Unix system, or ``C:\Program Files`` - on a Windows system. The other installation paths are usually - subdirectories of *prefix*, but they don't have to be. - - In the simple build system, *dir* may contain the following path - variables: ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, - ``$os``, ``$arch``, ``$abi``, ``$abitag`` - -.. option:: --bindir=dir - - Executables that the user might invoke are installed here. - - In the simple build system, *dir* may contain the following path - variables: ``$prefix``, ``$pkgid``, ``$pkg``, ``$version``, - ``$compiler``, ``$os``, ``$arch``, ``$abi``, ``$abitag`` - -.. option:: --libdir=dir - - Object-code libraries are installed here. - - In the simple build system, *dir* may contain the following path - variables: ``$prefix``, ``$bindir``, ``$pkgid``, ``$pkg``, - ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - -.. option:: --dynlibdir=dir - - Dynamic libraries are installed here. - - By default, this is set to `$libdir/$abi`, which is usually not equal to - `$libdir/$libsubdir`. - - In the simple build system, *dir* may contain the following path - variables: ``$prefix``, ``$bindir``, ``$libdir``, ``$pkgid``, ``$pkg``, - ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - -.. option:: --libexecdir=dir - - Executables that are not expected to be invoked directly by the user - are installed here. - - In the simple build system, *dir* may contain the following path - variables: ``$prefix``, ``$bindir``, ``$libdir``, ``$libsubdir``, - ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, ``$os``, - ``$arch``, ``$abi``, ``$abitag`` - -.. option:: --datadir=dir - - Architecture-independent data files are installed here. - - In the simple build system, *dir* may contain the following path - variables: ``$prefix``, ``$bindir``, ``$libdir``, ``$libsubdir``, - ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, ``$os``, - ``$arch``, ``$abi``, ``$abitag`` - -.. option:: --sysconfdir=dir - - Installation directory for the configuration files. - - In the simple build system, *dir* may contain the following path - variables: ``$prefix``, ``$bindir``, ``$libdir``, ``$libsubdir``, - ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, ``$os``, - ``$arch``, ``$abi``, ``$abitag`` - -In addition the simple build system supports the following installation -path options: - -.. option:: --libsubdir=dir - - A subdirectory of *libdir* in which libraries are actually installed. For - example, in the simple build system on Unix, the default *libdir* is - ``/usr/local/lib``, and *libsubdir* contains the compiler ABI and package - identifier, - e.g. ``x86_64-linux-ghc-8.0.2/mypkg-0.1.0-IxQNmCA7qrSEQNkoHSF7A``, so - libraries would be installed in - ``/usr/local/lib/x86_64-linux-ghc-8.0.2/mypkg-0.1.0-IxQNmCA7qrSEQNkoHSF7A/``. - - *dir* may contain the following path variables: ``$pkgid``, - ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - -.. option:: --libexecsubdir=dir - - A subdirectory of *libexecdir* in which private executables are - installed. For example, in the simple build system on Unix, the default - *libexecdir* is ``/usr/local/libexec``, and *libsubdir* is - ``x86_64-linux-ghc-8.0.2/mypkg-0.1.0``, so private executables would be - installed in ``/usr/local/libexec/x86_64-linux-ghc-8.0.2/mypkg-0.1.0/`` - - *dir* may contain the following path variables: ``$pkgid``, - ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - -.. option:: --datasubdir=dir - - A subdirectory of *datadir* in which data files are actually - installed. - - *dir* may contain the following path variables: ``$pkgid``, - ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - -.. option:: --docdir=dir - - Documentation files are installed relative to this directory. - - *dir* may contain the following path variables: ``$prefix``, - ``$bindir``, ``$libdir``, ``$libsubdir``, ``$datadir``, - ``$datasubdir``, ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, - ``$os``, ``$arch``, ``$abi``, ``$abitag`` - -.. option:: --htmldir=dir - - HTML documentation files are installed relative to this directory. - - *dir* may contain the following path variables: ``$prefix``, - ``$bindir``, ``$libdir``, ``$libsubdir``, ``$datadir``, - ``$datasubdir``, ``$docdir``, ``$pkgid``, ``$pkg``, ``$version``, - ``$compiler``, ``$os``, ``$arch``, ``$abi``, ``$abitag`` - -.. option:: --program-prefix=prefix - - Prepend *prefix* to installed program names. - - *prefix* may contain the following path variables: ``$pkgid``, - ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - -.. option:: --program-suffix=suffix - - Append *suffix* to installed program names. The most obvious use for - this is to append the program's version number to make it possible - to install several versions of a program at once: - ``--program-suffix='$version'``. - - *suffix* may contain the following path variables: ``$pkgid``, - ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - -Path variables in the simple build system -""""""""""""""""""""""""""""""""""""""""" - -For the simple build system, there are a number of variables that can be -used when specifying installation paths. The defaults are also specified -in terms of these variables. A number of the variables are actually for -other paths, like ``$prefix``. This allows paths to be specified -relative to each other rather than as absolute paths, which is important -for building relocatable packages (see `prefix -independence <#prefix-independence>`__). - -$prefix - The path variable that stands for the root of the installation. For - an installation to be relocatable, all other installation paths must - be relative to the ``$prefix`` variable. -$bindir - The path variable that expands to the path given by the :option:`--bindir` - configure option (or the default). -$libdir - As above but for :option:`--libdir` -$libsubdir - As above but for :option:`--libsubdir` -$dynlibdir - As above but for :option:`--dynlibdir` -$datadir - As above but for :option:`--datadir` -$datasubdir - As above but for :option:`--datasubdir` -$docdir - As above but for :option:`--docdir` -$pkgid - The name and version of the package, e.g. ``mypkg-0.2`` -$pkg - The name of the package, e.g. ``mypkg`` -$version - The version of the package, e.g. ``0.2`` -$compiler - The compiler being used to build the package, e.g. ``ghc-6.6.1`` -$os - The operating system of the computer being used to build the - package, e.g. ``linux``, ``windows``, ``osx``, ``freebsd`` or - ``solaris`` -$arch - The architecture of the computer being used to build the package, - e.g. ``i386``, ``x86_64``, ``ppc`` or ``sparc`` -$abitag - An optional tag that a compiler can use for telling incompatible - ABI's on the same architecture apart. GHCJS encodes the underlying - GHC version in the ABI tag. -$abi - A shortcut for getting a path that completely identifies the - platform in terms of binary compatibility. Expands to the same value - as ``$arch-$os-compiler-$abitag`` if the compiler uses an abi tag, - ``$arch-$os-$compiler`` if it doesn't. - -Paths in the simple build system -"""""""""""""""""""""""""""""""" - -For the simple build system, the following defaults apply: - -.. list-table:: Default installation paths - - * - Option - - Unix Default - - Windows Default - * - :option:`--prefix` (global) - - ``/usr/local`` - - ``%PROGRAMFILES%\Haskell`` - * - :option:`--prefix` (per-user) - - ``$HOME/.cabal`` - - ``%APPDATA%\cabal`` - * - :option:`--bindir` - - ``$prefix/bin`` - - ``$prefix\bin`` - * - :option:`--libdir` - - ``$prefix/lib`` - - ``$prefix`` - * - :option:`--libsubdir` (others) - - ``$pkgid/$compiler`` - - ``$pkgid\$compiler`` - * - :option:`--dynlibdir` - - ``$libdir/$abi`` - - ``$libdir\$abi`` - * - :option:`--libexecdir` - - ``$prefix/libexec`` - - ``$prefix\$pkgid`` - * - :option:`--datadir` (executable) - - ``$prefix/share`` - - ``$prefix`` - * - :option:`--datadir` (library) - - ``$prefix/share`` - - ``%PROGRAMFILES%\Haskell`` - * - :option:`--datasubdir` - - ``$pkgid`` - - ``$pkgid`` - * - :option:`--docdir` - - ``$datadir/doc/$pkgid`` - - ``$prefix\doc\$pkgid`` - * - :option:`--sysconfdir` - - ``$prefix/etc`` - - ``$prefix\etc`` - * - :option:`--htmldir` - - ``$docdir/html`` - - ``$docdir\html`` - * - :option:`--program-prefix` - - (empty) - - (empty) - * - :option:`--program-suffix` - - (empty) - - (empty) - -Prefix-independence -""""""""""""""""""" - -On Windows it is possible to obtain the pathname of the running program. -This means that we can construct an installable executable package that -is independent of its absolute install location. The executable can find -its auxiliary files by finding its own path and knowing the location of -the other files relative to ``$bindir``. Prefix-independence is -particularly useful: it means the user can choose the install location -(i.e. the value of ``$prefix``) at install-time, rather than having to -bake the path into the binary when it is built. - -In order to achieve this, we require that for an executable on Windows, -all of ``$bindir``, ``$libdir``, ``$dynlibdir``, ``$datadir`` and ``$libexecdir`` begin -with ``$prefix``. If this is not the case then the compiled executable -will have baked-in all absolute paths. - -The application need do nothing special to achieve prefix-independence. -If it finds any files using ``getDataFileName`` and the `other functions -provided for the -purpose `__, -the files will be accessed relative to the location of the current -executable. - -A library cannot (currently) be prefix-independent, because it will be -linked into an executable whose file system location bears no relation -to the library package. - -Controlling Flag Assignments -^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Flag assignments (see the `resolution of conditions and -flags `__) -can be controlled with the following command line options. - -.. option:: -f flagname or -f -flagname - - Force the specified flag to ``true`` or ``false`` (if preceded with - a ``-``). Later specifications for the same flags will override - earlier, i.e., specifying ``-fdebug -f-debug`` is equivalent to - ``-f-debug`` - -.. option:: --flags=flagspecs - - Same as ``-f``, but allows specifying multiple flag assignments at - once. The parameter is a space-separated list of flag names (to - force a flag to ``true``), optionally preceded by a ``-`` (to force - a flag to ``false``). For example, - ``--flags="debug -feature1 feature2"`` is equivalent to - ``-fdebug -f-feature1 -ffeature2``. - -Building Test Suites -^^^^^^^^^^^^^^^^^^^^ - -.. option:: --enable-tests - - Build the test suites defined in the package description file during - the ``build`` stage. Check for dependencies required by the test - suites. If the package is configured with this option, it will be - possible to run the test suites with the ``test`` command after the - package is built. - -.. option:: --disable-tests - - (default) Do not build any test suites during the ``build`` stage. - Do not check for dependencies required only by the test suites. It - will not be possible to invoke the ``test`` command without - reconfiguring the package. - -.. option:: --enable-coverage - - Build libraries and executables (including test suites) with Haskell - Program Coverage enabled. Running the test suites will automatically - generate coverage reports with HPC. - -.. option:: --disable-coverage - - (default) Do not enable Haskell Program Coverage. - -Miscellaneous options -^^^^^^^^^^^^^^^^^^^^^ - -.. option:: --user - - Does a per-user installation. This changes the `default installation - prefix <#paths-in-the-simple-build-system>`__. It also allow - dependencies to be satisfied by the user's package database, in - addition to the global database. This also implies a default of - ``--user`` for any subsequent ``install`` command, as packages - registered in the global database should not depend on packages - registered in a user's database. - -.. option:: --global - - (default) Does a global installation. In this case package - dependencies must be satisfied by the global package database. All - packages in the user's package database will be ignored. Typically - the final installation step will require administrative privileges. - -.. option:: --package-db=db - - Allows package dependencies to be satisfied from this additional - package database *db* in addition to the global package database. - All packages in the user's package database will be ignored. The - interpretation of *db* is implementation-specific. Typically it will - be a file or directory. Not all implementations support arbitrary - package databases. - - This pushes an extra db onto the db stack. The :option:`--global` and - :option:`--user` mode switches add the respective [Global] and [Global, - User] dbs to the initial stack. There is a compiler-implementation - constraint that the global db must appear first in the stack, and if - the user one appears at all, it must appear immediately after the - global db. - - To reset the stack, use ``--package-db=clear``. - -.. option:: --ipid=ipid - - Specifies the *installed package identifier* of the package to be - built; this identifier is passed on to GHC and serves as the basis - for linker symbols and the ``id`` field in a ``ghc-pkg`` - registration. When a package has multiple components, the actual - component identifiers are derived off of this identifier (e.g., an - internal library ``foo`` from package ``p-0.1-abcd`` will get the - identifier ``p-0.1-abcd-foo``. - -.. option:: --cid=cid - - Specifies the *component identifier* of the component being built; - this is only valid if you are configuring a single component. - -.. option:: --default-user-config=file - - Allows a "default" ``cabal.config`` freeze file to be passed in - manually. This file will only be used if one does not exist in the - project directory already. Typically, this can be set from the - global cabal ``config`` file so as to provide a default set of - partial constraints to be used by projects, providing a way for - users to peg themselves to stable package collections. - -.. option:: --enable-optimization[=n] or -O [n] - - (default) Build with optimization flags (if available). This is - appropriate for production use, taking more time to build faster - libraries and programs. - - The optional *n* value is the optimisation level. Some compilers - support multiple optimisation levels. The range is 0 to 2. Level 0 - is equivalent to :option:`--disable-optimization`, level 1 is the - default if no *n* parameter is given. Level 2 is higher optimisation - if the compiler supports it. Level 2 is likely to lead to longer - compile times and bigger generated code. - - When optimizations are enabled, Cabal passes ``-O2`` to the C compiler. - -.. option:: --disable-optimization - - Build without optimization. This is suited for development: building - will be quicker, but the resulting library or programs will be - slower. - -.. option:: --enable-profiling - - Build libraries and executables with profiling enabled (for - compilers that support profiling as a separate mode). For this to - work, all libraries used by this package must also have been built - with profiling support. For libraries this involves building an - additional instance of the library in addition to the normal - non-profiling instance. For executables it changes the single - executable to be built in profiling mode. - - This flag covers both libraries and executables, but can be - overridden by the :option:`--enable-library-profiling` flag. - - See also the :option:`--profiling-detail` flag below. - -.. option:: --disable-profiling - - (default) Do not enable profiling in generated libraries and - executables. - -.. option:: --enable-library-profiling or -p - - As with :option:`--enable-profiling` above, but it applies only for - libraries. So this generates an additional profiling instance of the - library in addition to the normal non-profiling instance. - - The :option:`--enable-profiling` flag controls the profiling mode for both - libraries and executables, but if different modes are desired for - libraries versus executables then use :option:`--enable-library-profiling` - as well. - -.. option:: --disable-library-profiling - - (default) Do not generate an additional profiling version of the library. - -.. option:: --profiling-detail[=level] - - Some compilers that support profiling, notably GHC, can allocate - costs to different parts of the program and there are different - levels of granularity or detail with which this can be done. In - particular for GHC this concept is called "cost centers", and GHC - can automatically add cost centers, and can do so in different ways. - - This flag covers both libraries and executables, but can be - overridden by the :option:`--library-profiling-detail` flag. - - Currently this setting is ignored for compilers other than GHC. The - levels that cabal currently supports are: - - default - For GHC this uses ``exported-functions`` for libraries and - ``toplevel-functions`` for executables. - none - No costs will be assigned to any code within this component. - exported-functions - Costs will be assigned at the granularity of all top level - functions exported from each module. In GHC specifically, this - is for non-inline functions. - toplevel-functions - Costs will be assigned at the granularity of all top level - functions in each module, whether they are exported from the - module or not. In GHC specifically, this is for non-inline - functions. - all-functions - Costs will be assigned at the granularity of all functions in - each module, whether top level or local. In GHC specifically, - this is for non-inline toplevel or where-bound functions or - values. - - This flag is new in Cabal-1.24. Prior versions used the equivalent - of ``none`` above. - -.. option:: --library-profiling-detail[=level] - - As with :option:`--profiling-detail` above, but it applies only for - libraries. - - The level for both libraries and executables is set by the - :option:`--profiling-detail` flag, but if different levels are desired - for libraries versus executables then use - :option:`--library-profiling-detail` as well. - -.. option:: --enable-library-vanilla - - (default) Build ordinary libraries (as opposed to profiling - libraries). This is independent of the - :option:`--enable-library-profiling` option. If you enable both, you get - both. - -.. option:: --disable-library-vanilla - - Do not build ordinary libraries. This is useful in conjunction with - :option:`--enable-library-profiling` to build only profiling libraries, - rather than profiling and ordinary libraries. - -.. option:: --enable-library-for-ghci - - (default) Build libraries suitable for use with GHCi. - -.. option:: --disable-library-for-ghci - - Not all platforms support GHCi and indeed on some platforms, trying - to build GHCi libs fails. In such cases this flag can be used as a - workaround. - -.. option:: --enable-split-objs - - Use the GHC ``-split-objs`` feature when building the library. This - reduces the final size of the executables that use the library by - allowing them to link with only the bits that they use rather than - the entire library. The downside is that building the library takes - longer and uses considerably more memory. - -.. option:: --disable-split-objs - - (default) Do not use the GHC ``-split-objs`` feature. This makes - building the library quicker but the final executables that use the - library will be larger. - -.. option:: --enable-executable-stripping - - (default) When installing binary executable programs, run the - ``strip`` program on the binary. This can considerably reduce the - size of the executable binary file. It does this by removing - debugging information and symbols. While such extra information is - useful for debugging C programs with traditional debuggers it is - rarely helpful for debugging binaries produced by Haskell compilers. - - Not all Haskell implementations generate native binaries. For such - implementations this option has no effect. - -.. option:: --disable-executable-stripping - - Do not strip binary executables during installation. You might want - to use this option if you need to debug a program using gdb, for - example if you want to debug the C parts of a program containing - both Haskell and C code. Another reason is if your are building a - package for a system which has a policy of managing the stripping - itself (such as some Linux distributions). - -.. option:: --enable-shared - - Build shared library. This implies a separate compiler run to - generate position independent code as required on most platforms. - -.. option:: --disable-shared - - (default) Do not build shared library. - -.. option:: --enable-static - - Build a static library. This passes ``-staticlib`` to GHC (available - for iOS, and with 8.4 more platforms). The result is an archive ``.a`` - containing all dependent haskell libararies combined. - -.. option:: --disable-static - - (default) Do not build a static library. - -.. option:: --enable-executable-dynamic - - Link executables dynamically. The executable's library dependencies - should be built as shared objects. This implies :option:`--enable-shared` - unless :option:`--disable-shared` is explicitly specified. - -.. option:: --disable-executable-dynamic - - (default) Link executables statically. - -.. option:: --configure-option=str - - An extra option to an external ``configure`` script, if one is used - (see the section on `system-dependent - parameters `__). - There can be several of these options. - -.. option:: --extra-include-dirs[=dir] - - An extra directory to search for C header files. You can use this - flag multiple times to get a list of directories. - - You might need to use this flag if you have standard system header - files in a non-standard location that is not mentioned in the - package's ``.cabal`` file. Using this option has the same affect as - appending the directory *dir* to the ``include-dirs`` field in each - library and executable in the package's ``.cabal`` file. The - advantage of course is that you do not have to modify the package at - all. These extra directories will be used while building the package - and for libraries it is also saved in the package registration - information and used when compiling modules that use the library. - -.. option:: --extra-lib-dirs[=dir] - - An extra directory to search for system libraries files. You can use - this flag multiple times to get a list of directories. - -.. option:: --extra-framework-dirs[=dir] - - An extra directory to search for frameworks (OS X only). You can use - this flag multiple times to get a list of directories. - - You might need to use this flag if you have standard system - libraries in a non-standard location that is not mentioned in the - package's ``.cabal`` file. Using this option has the same affect as - appending the directory *dir* to the ``extra-lib-dirs`` field in - each library and executable in the package's ``.cabal`` file. The - advantage of course is that you do not have to modify the package at - all. These extra directories will be used while building the package - and for libraries it is also saved in the package registration - information and used when compiling modules that use the library. - -.. option:: --dependency[=pkgname=ipid] - - Specify that a particular dependency should used for a particular - package name. In particular, it declares that any reference to - *pkgname* in a ``build-depends`` should be resolved to *ipid*. - -.. option:: --exact-configuration - - This changes Cabal to require every dependency be explicitly - specified using :option:`--dependency`, rather than use Cabal's (very - simple) dependency solver. This is useful for programmatic use of - Cabal's API, where you want to error if you didn't specify enough - :option:`--dependency` flags. - -.. option:: --allow-newer[=pkgs], --allow-older[=pkgs] - - Selectively relax upper or lower bounds in dependencies without - editing the package description respectively. - - The following description focuses on upper bounds and the - :option:`--allow-newer` flag, but applies analogously to - :option:`--allow-older` and lower bounds. :option:`--allow-newer` - and :option:`--allow-older` can be used at the same time. - - If you want to install a package A that depends on B >= 1.0 && < - 2.0, but you have the version 2.0 of B installed, you can compile A - against B 2.0 by using ``cabal install --allow-newer=B A``. This - works for the whole package index: if A also depends on C that in - turn depends on B < 2.0, C's dependency on B will be also relaxed. - - Example: - - :: - - $ cd foo - $ cabal configure - Resolving dependencies... - cabal: Could not resolve dependencies: - [...] - $ cabal configure --allow-newer - Resolving dependencies... - Configuring foo... - - Additional examples: - - :: - - # Relax upper bounds in all dependencies. - $ cabal install --allow-newer foo - - # Relax upper bounds only in dependencies on bar, baz and quux. - $ cabal install --allow-newer=bar,baz,quux foo - - # Relax the upper bound on bar and force bar==2.1. - $ cabal install --allow-newer=bar --constraint="bar==2.1" foo - - It's also possible to limit the scope of :option:`--allow-newer` to single - packages with the ``--allow-newer=scope:dep`` syntax. This means - that the dependency on ``dep`` will be relaxed only for the package - ``scope``. - - Example: - - :: - - # Relax upper bound in foo's dependency on base; also relax upper bound in - # every package's dependency on lens. - $ cabal install --allow-newer=foo:base,lens - - # Relax upper bounds in foo's dependency on base and bar's dependency - # on time; also relax the upper bound in the dependency on lens specified by - # any package. - $ cabal install --allow-newer=foo:base,lens --allow-newer=bar:time - - Finally, one can enable :option:`--allow-newer` permanently by setting - ``allow-newer: True`` in the ``~/.cabal/config`` file. Enabling - 'allow-newer' selectively is also supported in the config file - (``allow-newer: foo, bar, baz:base``). - -.. option:: --constraint=constraint - - Restrict solutions involving a package to given version - bounds, flag settings, and other properties. For example, to - consider only install plans that use version 2.1 of ``bar`` - or do not use ``bar`` at all, write: - - :: - - $ cabal install --constraint="bar == 2.1" - - Version bounds have the same syntax as ``build-depends``. As - a special case, the following prevents ``bar`` from being - used at all: - - :: - - # Note: this is just syntax sugar for '> 1 && < 1', and is - # supported by build-depends. - $ cabal install --constraint="bar -none" - - You can also specify flag assignments: - - :: - - # Require bar to be installed with the foo flag turned on and - # the baz flag turned off. - $ cabal install --constraint="bar +foo -baz" - - To specify multiple constraints, you may pass the - ``constraint`` option multiple times. - - There are also some more specialized constraints, which most people - don't generally need: - - :: - - # Require that a version of bar be used that is already installed in - # the global package database. - $ cabal install --constraint="bar installed" - - # Require the local source copy of bar to be used. - # (Note: By default, if we have a local package we will - # automatically use it, so it will generally not be necessary to - # specify this.) - $ cabal install --constraint="bar source" - - # Require that bar have test suites and benchmarks enabled. - $ cabal install --constraint="bar test" --constraint="bar bench" - - By default, constraints only apply to build dependencies - (``build-depends``), build dependencies of build - dependencies, and so on. Constraints normally do not apply to - dependencies of the ``Setup.hs`` script of any package - (``setup-depends``) nor do they apply to build tools - (``build-tool-depends``) or the dependencies of build - tools. To explicitly apply a constraint to a setup or build - tool dependency, you can add a qualifier to the constraint as - follows: - - :: - - # Example use of the 'any' qualifier. This constraint - # applies to package bar anywhere in the dependency graph. - $ cabal install --constraint="any.bar == 1.0" - - :: - - # Example uses of 'setup' qualifiers. - - # This constraint applies to package bar when it is a - # dependency of any Setup.hs script. - $ cabal install --constraint="setup.bar == 1.0" - - # This constraint applies to package bar when it is a - # dependency of the Setup.hs script of package foo. - $ cabal install --constraint="foo:setup.bar == 1.0" - - .. TODO: Uncomment this example once we decide on a syntax for 'exe'. - .. # Example use of the 'exe' (executable build tool) - # qualifier. This constraint applies to package baz when it - # is a dependency of the build tool bar being used to - # build package foo. - $ cabal install --constraint="foo:bar:exe.baz == 1.0" - -.. option:: --preference=preference - - Specify a soft constraint on versions of a package. The solver will - attempt to satisfy these preferences on a "best-effort" basis. - -.. option:: --disable-response-files - - Enable workaround for older versions of programs such as ``ar`` or - ``ld`` that do not support response file arguments (i.e. ``@file`` - arguments). You may want this flag only if you specify custom ar - executable. For system ``ar`` or the one bundled with ``ghc`` on - Windows the ``cabal`` should do the right thing and hence should - normally not require this flag. - -.. _setup-build: - -setup build ------------ - -Perform any preprocessing or compilation needed to make this package -ready for installation. - -This command takes the following options: - -.. program:: setup build - -.. option:: --prog-options=options, --prog-option=option - - These are mostly the same as the `options configure - step <#setup-configure>`__. Unlike the options specified at the - configure step, any program options specified at the build step are - not persistent but are used for that invocation only. They options - specified at the build step are in addition not in replacement of - any options specified at the configure step. - -.. _setup-haddock: - -setup haddock -------------- - -.. program:: setup haddock - -Build the documentation for the package using Haddock_. -By default, only the documentation for the exposed modules is generated -(but see the :option:`--executables` and :option:`--internal` flags below). - -This command takes the following options: - -.. option:: --hoogle - - Generate a file ``dist/doc/html/``\ *pkgid*\ ``.txt``, which can be - converted by Hoogle_ into a - database for searching. This is equivalent to running Haddock_ - with the ``--hoogle`` flag. - -.. option:: --html-location=url - - Specify a template for the location of HTML documentation for - prerequisite packages. The substitutions (`see - listing <#paths-in-the-simple-build-system>`__) are applied to the - template to obtain a location for each package, which will be used - by hyperlinks in the generated documentation. For example, the - following command generates links pointing at Hackage_ pages: - - setup haddock - --html-location='http://hackage.haskell.org/packages/archive/$pkg/latest/doc/html' - - Here the argument is quoted to prevent substitution by the shell. If - this option is omitted, the location for each package is obtained - using the package tool (e.g. ``ghc-pkg``). - -.. option:: --executables - - Also run Haddock_ for the modules of all the executable programs. By default - Haddock_ is run only on the exported modules. - -.. option:: --internal - - Run Haddock_ for the all - modules, including unexposed ones, and make - Haddock_ generate documentation - for unexported symbols as well. - -.. option:: --css=path - - The argument *path* denotes a CSS file, which is passed to - Haddock_ and used to set the - style of the generated documentation. This is only needed to - override the default style that - Haddock_ uses. - -.. option:: --hyperlink-source - - Generate Haddock_ documentation integrated with HsColour_ . First, - HsColour_ is run to generate colourised code. Then Haddock_ is run to - generate HTML documentation. Each entity shown in the documentation is - linked to its definition in the colourised code. - -.. option:: --hscolour-css=path - - The argument *path* denotes a CSS file, which is passed to HsColour_ as in - - runhaskell Setup.hs hscolour --css=*path* - -.. _setup-hscolour: - -setup hscolour --------------- - -Produce colourised code in HTML format using HsColour_. Colourised code for -exported modules is put in ``dist/doc/html/``\ *pkgid*\ ``/src``. - -This command takes the following options: - -.. program:: setup hscolour - -.. option:: --executables - - Also run HsColour_ on the sources of all executable programs. Colourised - code is put in ``dist/doc/html/``\ *pkgid*/*executable*\ ``/src``. - -.. option:: --css=path - - Use the given CSS file for the generated HTML files. The CSS file - defines the colours used to colourise code. Note that this copies - the given CSS file to the directory with the generated HTML files - (renamed to ``hscolour.css``) rather than linking to it. - -.. _setup-install: - -setup install -------------- - -.. program:: setup install - -Copy the files into the install locations and (for library packages) -register the package with the compiler, i.e. make the modules it -contains available to programs. - -The `install locations <#installation-paths>`__ are determined by -options to `setup configure`_. - -This command takes the following options: - -.. option:: --global - - Register this package in the system-wide database. (This is the - default, unless the :option:`setup configure --user` option was supplied - to the ``configure`` command.) - -.. option:: --user - - Register this package in the user's local package database. (This is - the default if the :option:`setup configure --user` option was supplied - to the ``configure`` command.) - -.. _setup-copy: - -setup copy ----------- - -Copy the files without registering them. This command is mainly of use -to those creating binary packages. - -This command takes the following option: - -.. program:: setup copy - -.. option:: --destdir=path - - Specify the directory under which to place installed files. If this is - not given, then the root directory is assumed. - -.. _setup-register: - -setup register --------------- - -Register this package with the compiler, i.e. make the modules it -contains available to programs. This only makes sense for library -packages. Note that the ``install`` command incorporates this action. -The main use of this separate command is in the post-installation step -for a binary package. - -This command takes the following options: - -.. program:: setup register - -.. option:: --global - - Register this package in the system-wide database. (This is the - default.) - -.. option:: --user - - Register this package in the user's local package database. - -.. option:: --gen-script - - Instead of registering the package, generate a script containing - commands to perform the registration. On Unix, this file is called - ``register.sh``, on Windows, ``register.bat``. This script might be - included in a binary bundle, to be run after the bundle is unpacked - on the target system. - -.. option:: --gen-pkg-config[=path] - - Instead of registering the package, generate a package registration - file (or directory, in some circumstances). This only applies to - compilers that support package registration files which at the - moment is only GHC. The file should be used with the compiler's - mechanism for registering packages. This option is mainly intended - for packaging systems. If possible use the :option:`--gen-script` option - instead since it is more portable across Haskell implementations. - The *path* is optional and can be used to specify a particular - output file to generate. Otherwise, by default the file is the - package name and version with a ``.conf`` extension. - - This option outputs a directory if the package requires multiple - registrations: this can occur if internal/convenience libraries are - used. These configuration file names are sorted so that they can be - registered in order. - -.. option:: --inplace - - Registers the package for use directly from the build tree, without - needing to install it. This can be useful for testing: there's no - need to install the package after modifying it, just recompile and - test. - - This flag does not create a build-tree-local package database. It - still registers the package in one of the user or global databases. - - However, there are some caveats. It only works with GHC (currently). - It only works if your package doesn't depend on having any - supplemental files installed --- plain Haskell libraries should be - fine. - -.. _setup-unregister: - -setup unregister ----------------- - -.. program:: setup unregister - -Deregister this package with the compiler. - -This command takes the following options: - -.. option:: --global - - Deregister this package in the system-wide database. (This is the - default.) - -.. option:: --user - - Deregister this package in the user's local package database. - -.. option:: --gen-script - - Instead of deregistering the package, generate a script containing - commands to perform the deregistration. On Unix, this file is called - ``unregister.sh``, on Windows, ``unregister.bat``. This script might - be included in a binary bundle, to be run on the target system. - -.. _setup-clean: - -setup clean ------------ - -Remove any local files created during the ``configure``, ``build``, -``haddock``, ``register`` or ``unregister`` steps, and also any files -and directories listed in the :pkg-field:`extra-tmp-files` field. - -This command takes the following options: - -.. program:: setup clean - -.. option:: --save-configure, -s - - Keeps the configuration information so it is not necessary to run - the configure step again before building. - -setup test ----------- - -Run the test suites specified in the package description file. Aside -from the following flags, Cabal accepts the name of one or more test -suites on the command line after ``test``. When supplied, Cabal will run -only the named test suites, otherwise, Cabal will run all test suites in -the package. - -.. program:: setup test - -.. option:: --builddir=dir - - The directory where Cabal puts generated build files (default: - ``dist``). Test logs will be located in the ``test`` subdirectory. - -.. option:: --human-log=path - - The template used to name human-readable test logs; the path is - relative to ``dist/test``. By default, logs are named according to - the template ``$pkgid-$test-suite.log``, so that each test suite - will be logged to its own human-readable log file. Template - variables allowed are: ``$pkgid``, ``$compiler``, ``$os``, - ``$arch``, ``$abi``, ``$abitag``, ``$test-suite``, and ``$result``. - -.. option:: --machine-log=path - - The path to the machine-readable log, relative to ``dist/test``. The - default template is ``$pkgid.log``. Template variables allowed are: - ``$pkgid``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, ``$abitag`` - and ``$result``. - -.. option:: --show-details=filter - - Determines if the results of individual test cases are shown on the - terminal. May be ``always`` (always show), ``never`` (never show), - ``failures`` (show only failed results), or ``streaming`` (show all - results in real time). - -.. option:: --test-options=options - Give extra options to the test executables. - -.. option:: --test-option=option - - give an extra option to the test executables. There is no need to - quote options containing spaces because a single option is assumed, - so options will not be split on spaces. - -.. _setup-sdist: - -setup sdist ------------ - -Create a system- and compiler-independent source distribution in a file -*package*-*version*\ ``.tar.gz`` in the ``dist`` subdirectory, for -distribution to package builders. When unpacked, the commands listed in -this section will be available. - -The files placed in this distribution are the package description file, -the setup script, the sources of the modules named in the package -description file, and files named in the ``license-file``, ``main-is``, -``c-sources``, ``asm-sources``, ``cmm-sources``, ``js-sources``, -``data-files``, ``extra-source-files`` and ``extra-doc-files`` fields. - -This command takes the following option: - -.. program:: setup sdist - -.. option:: --snapshot - - Append today's date (in "YYYYMMDD" format) to the version number for - the generated source package. The original package is unaffected. - - -.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/intro.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/intro.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/intro.rst 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/intro.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,200 +0,0 @@ -.. highlight:: console - -Cabal is the standard package system for -Haskell_ software. It helps people to -configure, build and install Haskell software and to distribute it -easily to other users and developers. - -There is a command line tool called ``cabal`` for working with Cabal -packages. It helps with installing existing packages and also helps -people developing their own packages. It can be used to work with local -packages or to install packages from online package archives, including -automatically installing dependencies. By default it is configured to -use Hackage_ which is Haskell's central -package archive that contains thousands of libraries and applications in -the Cabal package format. - -Introduction -============ - -Cabal is a package system for Haskell software. The point of a package -system is to enable software developers and users to easily distribute, -use and reuse software. A package system makes it easier for developers -to get their software into the hands of users. Equally importantly, it -makes it easier for software developers to be able to reuse software -components written by other developers. - -Packaging systems deal with packages and with Cabal we call them *Cabal -packages*. The Cabal package is the unit of distribution. Every Cabal -package has a name and a version number which are used to identify the -package, e.g. ``filepath-1.0``. - -Cabal packages can depend on other Cabal packages. There are tools to -enable automated package management. This means it is possible for -developers and users to install a package plus all of the other Cabal -packages that it depends on. It also means that it is practical to make -very modular systems using lots of packages that reuse code written by -many developers. - -Cabal packages are source based and are typically (but not necessarily) -portable to many platforms and Haskell implementations. The Cabal -package format is designed to make it possible to translate into other -formats, including binary packages for various systems. - -When distributed, Cabal packages use the standard compressed tarball -format, with the file extension ``.tar.gz``, e.g. -``filepath-1.0.tar.gz``. - -Note that packages are not part of the Haskell language, rather they are -a feature provided by the combination of Cabal and GHC (and several -other Haskell implementations). - -A tool for working with packages --------------------------------- - -There is a command line tool, called "``cabal``", that users and -developers can use to build and install Cabal packages. It can be used -for both local packages and for packages available remotely over the -network. It can automatically install Cabal packages plus any other -Cabal packages they depend on. - -Developers can use the tool with packages in local directories, e.g. - -:: - - $ cd foo/ - $ cabal install - -While working on a package in a local directory, developers can run the -individual steps to configure and build, and also generate documentation -and run test suites and benchmarks. - -It is also possible to install several local packages at once, e.g. - -:: - - $ cabal install foo/ bar/ - -Developers and users can use the tool to install packages from remote -Cabal package archives. By default, the ``cabal`` tool is configured to -use the central Haskell package archive called -Hackage_ but it is possible to use it -with any other suitable archive. - -:: - - $ cabal install xmonad - -This will install the ``xmonad`` package plus all of its dependencies. - -In addition to packages that have been published in an archive, -developers can install packages from local or remote tarball files, for -example - -:: - - $ cabal install foo-1.0.tar.gz - $ cabal install http://example.com/foo-1.0.tar.gz - -Cabal provides a number of ways for a user to customise how and where a -package is installed. They can decide where a package will be installed, -which Haskell implementation to use and whether to build optimised code -or build with the ability to profile code. It is not expected that users -will have to modify any of the information in the ``.cabal`` file. - -For full details, see the section on `building and installing -packages `__. - -Note that ``cabal`` is not the only tool for working with Cabal -packages. Due to the standardised format and a library for reading -``.cabal`` files, there are several other special-purpose tools. - -What's in a package -------------------- - -A Cabal package consists of: - -- Haskell software, including libraries, executables and tests -- metadata about the package in a standard human and machine readable - format (the "``.cabal``" file) -- a standard interface to build the package (the "``Setup.hs``" file) - -The ``.cabal`` file contains information about the package, supplied by -the package author. In particular it lists the other Cabal packages that -the package depends on. - -For full details on what goes in the ``.cabal`` and ``Setup.hs`` files, -and for all the other features provided by the build system, see the -section on `developing packages `__. - -Cabal featureset ----------------- - -Cabal and its associated tools and websites covers: - -- a software build system -- software configuration -- packaging for distribution -- automated package management - - - natively using the ``cabal`` command line tool; or - - by translation into native package formats such as RPM or deb - -- web and local Cabal package archives - - - central Hackage website with 1000's of Cabal packages - -Some parts of the system can be used without others. In particular the -built-in build system for simple packages is optional: it is possible to -use custom build systems. - -Similar systems ---------------- - -The Cabal system is roughly comparable with the system of Python Eggs, -Ruby Gems or Perl distributions. Each system has a notion of -distributable packages, and has tools to manage the process of -distributing and installing packages. - -Hackage is an online archive of Cabal packages. It is roughly comparable -to CPAN but with rather fewer packages (around 5,000 vs 28,000). - -Cabal is often compared with autoconf and automake and there is some -overlap in functionality. The most obvious similarity is that the -command line interface for actually configuring and building packages -follows the same steps and has many of the same configuration -parameters. - -:: - - $ ./configure --prefix=... - $ make - $ make install - -compared to - -:: - - $ cabal configure --prefix=... - $ cabal build - $ cabal install - -Cabal's build system for simple packages is considerably less flexible -than make/automake, but has builtin knowledge of how to build Haskell -code and requires very little manual configuration. Cabal's simple build -system is also portable to Windows, without needing a Unix-like -environment such as cygwin/mingwin. - -Compared to autoconf, Cabal takes a somewhat different approach to -package configuration. Cabal's approach is designed for automated -package management. Instead of having a configure script that tests for -whether dependencies are available, Cabal packages specify their -dependencies. There is some scope for optional and conditional -dependencies. By having package authors specify dependencies it makes it -possible for tools to install a package and all of its dependencies -automatically. It also makes it possible to translate (in a -mostly-automatically way) into another package format like RPM or deb -which also have automatic dependency resolution. - - -.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/misc.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/misc.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/misc.rst 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/misc.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,103 +0,0 @@ -Reporting bugs and deficiencies -=============================== - -Please report any flaws or feature requests in the `bug -tracker `__. - -For general discussion or queries email the libraries mailing list -libraries@haskell.org. There is also a development mailing list -cabal-devel@haskell.org. - -Stability of Cabal interfaces -============================= - -The Cabal library and related infrastructure is still under active -development. New features are being added and limitations and bugs are -being fixed. This requires internal changes and often user visible -changes as well. We therefore cannot promise complete future-proof -stability, at least not without halting all development work. - -This section documents the aspects of the Cabal interface that we can -promise to keep stable and which bits are subject to change. - -Cabal file format ------------------ - -This is backwards compatible and mostly forwards compatible. New fields -can be added without breaking older versions of Cabal. Fields can be -deprecated without breaking older packages. - -Command-line interface ----------------------- - -Very Stable Command-line interfaces -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -- ``./setup configure`` -- ``--prefix`` -- ``--user`` -- ``--ghc``, ``--uhc`` -- ``--verbose`` -- ``--prefix`` - -- ``./setup build`` -- ``./setup install`` -- ``./setup register`` -- ``./setup copy`` - -Stable Command-line interfaces -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Unstable command-line -~~~~~~~~~~~~~~~~~~~~~ - -Functions and Types -------------------- - -The Cabal library follows the `Package Versioning Policy`_. -This means that within a stable major release, for example 1.2.x, there -will be no incompatible API changes. But minor versions increments, for -example 1.2.3, indicate compatible API additions. - -The Package Versioning Policy does not require any API guarantees -between major releases, for example between 1.2.x and 1.4.x. In practise -of course not everything changes between major releases. Some parts of -the API are more prone to change than others. The rest of this section -gives some informal advice on what level of API stability you can expect -between major releases. - -Very Stable API -~~~~~~~~~~~~~~~ - -- ``defaultMain`` - -- ``defaultMainWithHooks defaultUserHooks`` - -But regular ``defaultMainWithHooks`` isn't stable since ``UserHooks`` -changes. - -Semi-stable API -~~~~~~~~~~~~~~~ - -- ``UserHooks`` The hooks API will change in the future - -- ``Distribution.*`` is mostly declarative information about packages - and is somewhat stable. - -Unstable API -~~~~~~~~~~~~ - -Everything under ``Distribution.Simple.*`` has no stability guarantee. - -Hackage -------- - -The index format is a partly stable interface. It consists of a tar.gz -file that contains directories with ``.cabal`` files in. In future it -may contain more kinds of files so do not assume every file is a -``.cabal`` file. Incompatible revisions to the format would involve -bumping the name of the index file, i.e., ``00-index.tar.gz``, -``01-index.tar.gz`` etc. - - -.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/nix-local-build-overview.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/nix-local-build-overview.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/nix-local-build-overview.rst 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/nix-local-build-overview.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,43 +0,0 @@ -Nix-style Local Builds -====================== - -Nix-style local builds are a new build system implementation inspired by Nix. -The Nix-style local build system is commonly called "new-build" for short -after the ``cabal new-*`` family of commands that control it. However, those -names are only temporary until Nix-style local builds become the default. -This is expected to happen soon. For those who do not wish to use the new -functionality, the classic project style will not be removed immediately, -but these legacy commands will require the usage of the ``v1-`` prefix as of -Cabal 3.0 and will be removed in a future release. For a future-proof -way to use these commands in a script or tutorial that anticipates the -possibility of another UI paradigm being devised in the future, there -are also ``v2-`` prefixed versions that will reference the same functionality -until such a point as it is completely removed from Cabal. - -Nix-style local builds combine the best of non-sandboxed and sandboxed Cabal: - -1. Like sandboxed Cabal today, we build sets of independent local - packages deterministically and independent of any global state. - new-build will never tell you that it can't build your package - because it would result in a "dangerous reinstall." Given a - particular state of the Hackage index, your build is completely - reproducible. For example, you no longer need to compile packages - with profiling ahead of time; just request profiling and new-build - will rebuild all its dependencies with profiling automatically. - -2. Like non-sandboxed Cabal today, builds of external packages are - cached in ``~/.cabal/store``, so that a package can be built once, - and then reused anywhere else it is also used. No need to continually - rebuild dependencies whenever you make a new sandbox: dependencies - which can be shared, are shared. - -Nix-style local builds were first released as beta in cabal-install 1.24. -They currently work with all versions of GHC supported by that release: GHC 7.0 and later. - -Some features described in this manual are not implemented. If you need -them, please give us a shout and we'll prioritize accordingly. - - - -.. toctree:: - nix-local-build diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/nix-local-build.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/nix-local-build.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/nix-local-build.rst 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/nix-local-build.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,2037 +0,0 @@ -.. highlight:: console - -Quickstart -========== - -Suppose that you are in a directory containing a single Cabal package -which you wish to build. You can configure and build it using Nix-style -local builds with this command (configuring is not necessary): - -:: - - $ cabal new-build - -To open a GHCi shell with this package, use this command: - -:: - - $ cabal new-repl - -To run an executable defined in this package, use this command: - -:: - - $ cabal new-run [executable args] - -Developing multiple packages ----------------------------- - -Many Cabal projects involve multiple packages which need to be built -together. To build multiple Cabal packages, you need to first create a -``cabal.project`` file which declares where all the local package -directories live. For example, in the Cabal repository, there is a root -directory with a folder per package, e.g., the folders ``Cabal`` and -``cabal-install``. The ``cabal.project`` file specifies each folder as -part of the project: - -.. code-block:: cabal - - packages: Cabal/ - cabal-install/ - -The expectation is that a ``cabal.project`` is checked into your source -control, to be used by all developers of a project. If you need to make -local changes, they can be placed in ``cabal.project.local`` (which -should not be checked in.) - -Then, to build every component of every package, from the top-level -directory, run the command: (using cabal-install-2.0 or greater.) - -:: - - $ cabal new-build - -To build a specific package, you can either run ``new-build`` from the -directory of the package in question: - -:: - - $ cd cabal-install - $ cabal new-build - -or you can pass the name of the package as an argument to -``cabal new-build`` (this works in any subdirectory of the project): - -:: - - $ cabal new-build cabal-install - -You can also specify a specific component of the package to build. For -example, to build a test suite named ``package-tests``, use the command: - -:: - - $ cabal new-build package-tests - -Targets can be qualified with package names. So to request -``package-tests`` *from* the ``Cabal`` package, use -``Cabal:package-tests``. - -Unlike sandboxes, there is no need to setup a sandbox or ``add-source`` -projects; just check in ``cabal.project`` to your repository and -``new-build`` will just work. - -Cookbook -======== - -How can I profile my library/application? ------------------------------------------ - -Create or edit your ``cabal.project.local``, adding the following -line:: - - profiling: True - -Now, ``cabal new-build`` will automatically build all libraries and -executables with profiling. You can fine-tune the profiling settings -for each package using :cfg-field:`profiling-detail`:: - - package p - profiling-detail: toplevel-functions - -Alternately, you can call ``cabal new-build --enable-profiling`` to -temporarily build with profiling. - -How it works -============ - -Local versus external packages ------------------------------- - -One of the primary innovations of Nix-style local builds is the -distinction between local packages, which users edit and recompile and -must be built per-project, versus external packages, which can be cached -across projects. To be more precise: - -1. A **local package** is one that is listed explicitly in the - ``packages``, ``optional-packages`` or ``extra-packages`` field of a - project. Usually, these refer to packages whose source code lives - directly in a folder in your project (although, you can list an - arbitrary Hackage package in ``extra-packages`` to force it to be - treated as local). - -Local packages, as well as the external packages (below) which depend on -them, are built **inplace**, meaning that they are always built -specifically for the project and are not installed globally. Inplace -packages are not cached and not given unique hashes, which makes them -suitable for packages which you want to edit and recompile. - -2. An **external package** is any package which is not listed in the - ``packages`` field. The source code for external packages is usually - retrieved from Hackage. - -When an external package does not depend on an inplace package, it can -be built and installed to a **global** store, which can be shared across -projects. These build products are identified by a hash that over all of -the inputs which would influence the compilation of a package (flags, -dependency selection, etc.). Just as in Nix, these hashes uniquely -identify the result of a build; if we compute this identifier and we -find that we already have this ID built, we can just use the already -built version. - -The global package store is ``~/.cabal/store`` (configurable via -global `store-dir` option); if you need to clear your store for -whatever reason (e.g., to reclaim disk space or because the global -store is corrupted), deleting this directory is safe (``new-build`` -will just rebuild everything it needs on its next invocation). - -This split motivates some of the UI choices for Nix-style local build -commands. For example, flags passed to ``cabal new-build`` are only -applied to *local* packages, so that adding a flag to -``cabal new-build`` doesn't necessitate a rebuild of *every* transitive -dependency in the global package store. - -In cabal-install 2.0 and above, Nix-style local builds also take advantage of a -new Cabal library feature, `per-component -builds `__, -where each component of a package is configured and built separately. -This can massively speed up rebuilds of packages with lots of components -(e.g., a package that defines multiple executables), as only one -executable needs to be rebuilt. Packages that use Custom setup scripts -are not currently built on a per-component basis. - -Where are my build products? ----------------------------- - -A major deficiency in the current implementation of new-build is that -there is no programmatic way to access the location of build products. -The location of the build products is intended to be an internal -implementation detail of new-build, but we also understand that many -unimplemented features can only be reasonably worked around by -accessing build products directly. - -The location where build products can be found varies depending on the -version of cabal-install: - -- In cabal-install-1.24, the dist directory for a package ``p-0.1`` is - stored in ``dist-newstyle/build/p-0.1``. For example, if you built an - executable or test suite named ``pexe``, it would be located at - ``dist-newstyle/build/p-0.1/build/pexe/pexe``. - -- In cabal-install-2.0 and above, the dist directory for a package ``p-0.1`` - defining a library built with GHC 8.0.1 on 64-bit Linux is - ``dist-newstyle/build/x86_64-linux/ghc-8.0.1/p-0.1``. When - per-component builds are enabled (any non-Custom package), a - subcomponent like an executable or test suite named ``pexe`` will be - stored at - ``dist-newstyle/build/x86_64-linux/ghc-8.0.1/p-0.1/c/pexe``; thus, - the full path of the executable is - ``dist-newstyle/build/x86_64-linux/ghc-8.0.1/p-0.1/c/pexe/build/pexe/pexe`` - (you can see why we want this to be an implementation detail!) - -The paths are a bit longer in 2.0 and above but the benefit is that you can -transparently have multiple builds with different versions of GHC. We -plan to add the ability to create aliases for certain build -configurations, and more convenient paths to access particularly useful -build products like executables. - -Caching -------- - -Nix-style local builds sport a robust caching system which help reduce -the time it takes to execute a rebuild cycle. While the details of how -``cabal-install`` does caching are an implementation detail and may -change in the future, knowing what gets cached is helpful for -understanding the performance characteristics of invocations to -``new-build``. The cached intermediate results are stored in -``dist-newstyle/cache``; this folder can be safely deleted to clear the -cache. - -The following intermediate results are cached in the following files in -this folder (the most important two are first): - -``solver-plan`` (binary) - The result of calling the dependency solver, assuming that the - Hackage index, local ``cabal.project`` file, and local ``cabal`` - files are unmodified. (Notably, we do NOT have to dependency solve - again if new build products are stored in the global store; the - invocation of the dependency solver is independent of what is - already available in the store.) -``source-hashes`` (binary) - The hashes of all local source files. When all local source files of - a local package are unchanged, ``cabal new-build`` will skip - invoking ``setup build`` entirely (saving us from a possibly - expensive call to ``ghc --make``). The full list of source files - participating in compilation are determined using - ``setup sdist --list-sources`` (thus, if you do not list all your - source files in a Cabal file, you may fail to recompile when you - edit them.) -``config`` (same format as ``cabal.project``) - The full project configuration, merged from ``cabal.project`` (and - friends) as well as the command line arguments. -``compiler`` (binary) - The configuration of the compiler being used to build the project. -``improved-plan`` (binary) - Like ``solver-plan``, but with all non-inplace packages improved - into pre-existing copies from the store. -``plan.json`` (JSON) - A JSON serialization of the computed install plan intended - for integrating ``cabal`` with external tooling. - The `cabal-plan `__ - package provides a library for parsing ``plan.json`` files into a - Haskell data structure as well as an example tool showing possible - applications. - - .. todo:: - - Document JSON schema (including version history of schema) - - -Note that every package also has a local cache managed by the Cabal -build system, e.g., in ``$distdir/cache``. - -There is another useful file in ``dist-newstyle/cache``, -``plan.json``, which is a JSON serialization of the computed install -plan and is intended for integrating with external tooling. - - - - -Commands -======== - -We now give an in-depth description of all the commands, describing the -arguments and flags they accept. - -cabal new-configure -------------------- - -``cabal new-configure`` takes a set of arguments and writes a -``cabal.project.local`` file based on the flags passed to this command. -``cabal new-configure FLAGS; cabal new-build`` is roughly equivalent to -``cabal new-build FLAGS``, except that with ``new-configure`` the flags -are persisted to all subsequent calls to ``new-build``. - -``cabal new-configure`` is intended to be a convenient way to write out -a ``cabal.project.local`` for simple configurations; e.g., -``cabal new-configure -w ghc-7.8`` would ensure that all subsequent -builds with ``cabal new-build`` are performed with the compiler -``ghc-7.8``. For more complex configuration, we recommend writing the -``cabal.project.local`` file directly (or placing it in -``cabal.project``!) - -``cabal new-configure`` inherits options from ``Cabal``. semantics: - -- Any flag accepted by ``./Setup configure``. - -- Any flag accepted by ``cabal configure`` beyond - ``./Setup configure``, namely ``--cabal-lib-version``, - ``--constraint``, ``--preference`` and ``--solver.`` - -- Any flag accepted by ``cabal install`` beyond ``./Setup configure``. - -- Any flag accepted by ``./Setup haddock``. - -The options of all of these flags apply only to *local* packages in a -project; this behavior is different than that of ``cabal install``, -which applies flags to every package that would be built. The motivation -for this is to avoid an innocuous addition to the flags of a package -resulting in a rebuild of every package in the store (which might need -to happen if a flag actually applied to every transitive dependency). To -apply options to an external package, use a ``package`` stanza in a -``cabal.project`` file. - -cabal new-update ----------------- - -``cabal new-update`` updates the state of the package index. If the -project contains multiple remote package repositories it will update -the index of all of them (e.g. when using overlays). - -Seom examples: - -:: - - $ cabal new-update # update all remote repos - $ cabal new-update head.hackage # update only head.hackage - -cabal new-build ---------------- - -``cabal new-build`` takes a set of targets and builds them. It -automatically handles building and installing any dependencies of these -targets. - -A target can take any of the following forms: - -- A package target: ``package``, which specifies that all enabled - components of a package to be built. By default, test suites and - benchmarks are *not* enabled, unless they are explicitly requested - (e.g., via ``--enable-tests``.) - -- A component target: ``[package:][ctype:]component``, which specifies - a specific component (e.g., a library, executable, test suite or - benchmark) to be built. - -- All packages: ``all``, which specifies all packages within the project. - -- Components of a particular type: ``package:ctypes``, ``all:ctypes``: - which specifies all components of the given type. Where valid - ``ctypes`` are: - - ``libs``, ``libraries``, - - ``flibs``, ``foreign-libraries``, - - ``exes``, ``executables``, - - ``tests``, - - ``benches``, ``benchmarks``. - -In component targets, ``package:`` and ``ctype:`` (valid component types -are ``lib``, ``flib``, ``exe``, ``test`` and ``bench``) can be used to -disambiguate when multiple packages define the same component, or the -same component name is used in a package (e.g., a package ``foo`` -defines both an executable and library named ``foo``). We always prefer -interpreting a target as a package name rather than as a component name. - -Some example targets: - -:: - - $ cabal new-build lib:foo-pkg # build the library named foo-pkg - $ cabal new-build foo-pkg:foo-tests # build foo-tests in foo-pkg - -(There is also syntax for specifying module and file targets, but it -doesn't currently do anything.) - -Beyond a list of targets, ``cabal new-build`` accepts all the flags that -``cabal new-configure`` takes. Most of these flags are only taken into -consideration when building local packages; however, some flags may -cause extra store packages to be built (for example, -``--enable-profiling`` will automatically make sure profiling libraries -for all transitive dependencies are built and installed.) - -cabal new-repl --------------- - -``cabal new-repl TARGET`` loads all of the modules of the target into -GHCi as interpreted bytecode. In addition to ``cabal new-build``'s flags, -it takes an additional ``--repl-options`` flag. - -To avoid ``ghci`` specific flags from triggering unneeded global rebuilds these -flags are now stripped from the internal configuration. As a result -``--ghc-options`` will no longer (reliably) work to pass flags to ``ghci`` (or -other repls). Instead, you should use the new ``--repl-options`` flag to -specify these options to the invoked repl. (This flag also works on ``cabal -repl`` and ``Setup repl`` on sufficiently new versions of Cabal.) - -Currently, it is not supported to pass multiple targets to ``new-repl`` -(``new-repl`` will just successively open a separate GHCi session for -each target.) - -It also provides a way to experiment with libraries without needing to download -them manually or to install them globally. - -This command opens a REPL with the current default target loaded, and a version -of the ``vector`` package matching that specification exposed. - -:: - - $ cabal new-repl --build-depends "vector >= 0.12 && < 0.13" - -Both of these commands do the same thing as the above, but only exposes ``base``, -``vector``, and the``vector`` package's transitive dependencies even if the user -is in a project context. - -:: - - $ cabal new-repl --ignore-project --build-depends "vector >= 0.12 && < 0.13" - $ cabal new-repl --project='' --build-depends "vector >= 0.12 && < 0.13" - -This command would add ``vector``, but not (for example) ``primitive``, because -it only includes the packages specified on the command line (and ``base``, which -cannot be excluded for technical reasons). - -:: - - $ cabal new-repl --build-depends vector --no-transitive-deps - -cabal new-run -------------- - -``cabal new-run [TARGET [ARGS]]`` runs the executable specified by the -target, which can be a component, a package or can be left blank, as -long as it can uniquely identify an executable within the project. -Tests and benchmarks are also treated as executables. - -See `the new-build section <#cabal-new-build>`__ for the target syntax. - -Except in the case of the empty target, the strings after it will be -passed to the executable as arguments. - -If one of the arguments starts with ``-`` it will be interpreted as -a cabal flag, so if you need to pass flags to the executable you -have to separate them with ``--``. - -:: - - $ cabal new-run target -- -a -bcd --argument - -'new-run' also supports running script files that use a certain format. With -a script that looks like: - -:: - - #!/usr/bin/env cabal - {- cabal: - build-depends: base ^>= 4.11 - , shelly ^>= 1.8.1 - -} - - main :: IO () - main = do - ... - -It can either be executed like any other script, using ``cabal`` as an -interpreter, or through this command: - -:: - - $ cabal new-run script.hs - $ cabal new-run script.hs -- --arg1 # args are passed like this - -cabal new-freeze ----------------- - -``cabal new-freeze`` writes out a **freeze file** which records all of -the versions and flags which that are picked by the solver under the -current index and flags. Default name of this file is -``cabal.project.freeze`` but in combination with a -``--project-file=my.project`` flag (see :ref:`project-file -`) -the name will be ``my.project.freeze``. -A freeze file has the same syntax as ``cabal.project`` and looks -something like this: - -.. highlight:: cabal - -:: - - constraints: HTTP ==4000.3.3, - HTTP +warp-tests -warn-as-error -network23 +network-uri -mtl1 -conduit10, - QuickCheck ==2.9.1, - QuickCheck +templatehaskell, - -- etc... - - -For end-user executables, it is recommended that you distribute the -``cabal.project.freeze`` file in your source repository so that all -users see a consistent set of dependencies. For libraries, this is not -recommended: users often need to build against different versions of -libraries than what you developed against. - -cabal new-bench ---------------- - -``cabal new-bench [TARGETS] [OPTIONS]`` runs the specified benchmarks -(all the benchmarks in the current package by default), first ensuring -they are up to date. - -cabal new-test --------------- - -``cabal new-test [TARGETS] [OPTIONS]`` runs the specified test suites -(all the test suites in the current package by default), first ensuring -they are up to date. - -cabal new-haddock ------------------ - -``cabal new-haddock [FLAGS] [TARGET]`` builds Haddock documentation for -the specified packages within the project. - -If a target is not a library :cfg-field:`haddock-benchmarks`, -:cfg-field:`haddock-executables`, :cfg-field:`haddock-internal`, -:cfg-field:`haddock-tests` will be implied as necessary. - -cabal new-exec ---------------- - -``cabal new-exec [FLAGS] [--] COMMAND [--] [ARGS]`` runs the specified command -using the project's environment. That is, passing the right flags to compiler -invocations and bringing the project's executables into scope. - -cabal new-install ------------------ - -``cabal new-install [FLAGS] PACKAGES`` builds the specified packages and -symlinks their executables in ``symlink-bindir`` (usually ``~/.cabal/bin``). - -For example this command will build the latest ``cabal-install`` and symlink -its ``cabal`` executable: - -:: - - $ cabal new-install cabal-install - -In addition, it's possible to use ``cabal new-install`` to install components -of a local project. For example, with an up-to-date Git clone of the Cabal -repository, this command will build cabal-install HEAD and symlink the -``cabal`` executable: - -:: - - $ cabal new-install exe:cabal - -It is also possible to "install" libraries using the ``--lib`` flag. For -example, this command will build the latest Cabal library and install it: - -:: - - $ cabal new-install --lib Cabal - -This works by managing GHC environments. By default, it is writing to the -global environment in ``~/.ghc/$ARCH-$OS-$GHCVER/environments/default``. -``new-install`` provides the ``--package-env`` flag to control which of -these environments is modified. - -This command will modify the environment file in the current directory: - -:: - - $ cabal new-install --lib Cabal --package-env . - -This command will modify the enviroment file in the ``~/foo`` directory: - -:: - - $ cabal new-install --lib Cabal --package-env foo/ - -Do note that the results of the previous two commands will be overwritten by -the use of other new-style commands, so it is not reccomended to use them inside -a project directory. - -This command will modify the environment in the "local.env" file in the -current directory: - -:: - - $ cabal new-install --lib Cabal --package-env local.env - -This command will modify the ``myenv`` named global environment: - -:: - - $ cabal new-install --lib Cabal --package-env myenv - -If you wish to create a named environment file in the current directory where -the name does not contain an extension, you must reference it as ``./myenv``. - -You can learn more about how to use these environments in `this section of the -GHC manual `_. - -cabal new-clean ---------------- - -``cabal new-clean [FLAGS]`` cleans up the temporary files and build artifacts -stored in the ``dist-newstyle`` folder. - -By default, it removes the entire folder, but it can also spare the configuration -and caches if the ``--save-config`` option is given, in which case it only removes -the build artefacts (``.hi``, ``.o`` along with any other temporary files generated -by the compiler, along with the build output). - -cabal new-sdist ---------------- - -``cabal new-sdist [FLAGS] [TARGETS]`` takes the crucial files needed to build ``TARGETS`` -and puts them into an archive format ready for upload to Hackage. These archives are stable -and two archives of the same format built from the same source will hash to the same value. - -``cabal new-sdist`` takes the following flags: - -- ``-l``, ``--list-only``: Rather than creating an archive, lists files that would be included. - Output is to ``stdout`` by default. The file paths are relative to the project's root - directory. - -- ``--targz``: Output an archive in ``.tar.gz`` format. - -- ``--zip``: Output an archive in ``.zip`` format. - -- ``-o``, ``--output-dir``: Sets the output dir, if a non-default one is desired. The default is - ``dist-newstyle/sdist/``. ``--output-dir -`` will send output to ``stdout`` - unless multiple archives are being created. - -- ``-z``, ``--null``: Only used with ``--list-only``. Separates filenames with a NUL - byte instead of newlines. - -``new-sdist`` is inherently incompatible with sdist hooks, not due to implementation but due -to fundamental core invariants (same source code should result in the same tarball, byte for -byte) that must be satisfied for it to function correctly in the larger new-build ecosystem. -``autogen-modules`` is able to replace uses of the hooks to add generated modules, along with -the custom publishing of Haddock documentation to Hackage. - -Configuring builds with cabal.project -===================================== - -``cabal.project`` files support a variety of options which configure the -details of your build. The general syntax of a ``cabal.project`` file is -similar to that of a Cabal file: there are a number of fields, some of -which live inside stanzas: - -:: - - packages: */*.cabal - with-compiler: /opt/ghc/8.0.1/bin/ghc - - package cryptohash - optimization: False - -In general, the accepted field names coincide with the accepted command -line flags that ``cabal install`` and other commands take. For example, -``cabal new-configure --enable-profiling`` will write out a project -file with ``profiling: True``. - -The full configuration of a project is determined by combining the -following sources (later entries override earlier ones): - -1. ``~/.cabal/config`` (the user-wide global configuration) - -2. ``cabal.project`` (the project configuratoin) - -3. ``cabal.project.freeze`` (the output of ``cabal new-freeze``) - -4. ``cabal.project.local`` (the output of ``cabal new-configure``) - - -Specifying the local packages ------------------------------ - -The following top-level options specify what the local packages of a -project are: - -.. cfg-field:: packages: package location list (space or comma separated) - :synopsis: Project packages. - - :default: ``./*.cabal`` - - Specifies the list of package locations which contain the local - packages to be built by this project. Package locations can take the - following forms: - - 1. They can specify a Cabal file, or a directory containing a Cabal - file, e.g., ``packages: Cabal cabal-install/cabal-install.cabal``. - - 2. They can specify a glob-style wildcards, which must match one or - more (a) directories containing a (single) Cabal file, (b) Cabal - files (extension ``.cabal``), or (c) tarballs which contain Cabal - packages (extension ``.tar.gz``). - For example, to match all Cabal files in all - subdirectories, as well as the Cabal projects in the parent - directories ``foo`` and ``bar``, use - ``packages: */*.cabal ../{foo,bar}/`` - - 3. [STRIKEOUT:They can specify an ``http``, ``https`` or ``file`` - URL, representing the path to a remote tarball to be downloaded - and built.] (not implemented yet) - - There is no command line variant of this field; see :issue:`3585`. - -.. cfg-field:: optional-packages: package location list (space or comma-separated) - :synopsis: Optional project packages. - - :default: ``./*/*.cabal`` - - Like :cfg-field:`packages`, specifies a list of package locations - containing local packages to be built. Unlike :cfg-field:`packages`, - if we glob for a package, it is permissible for the glob to match against - zero packages. The intended use-case for :cfg-field:`optional-packages` - is to make it so that vendored packages can be automatically picked up if - they are placed in a subdirectory, but not error if there aren't any. - - There is no command line variant of this field. - -.. cfg-field:: extra-packages: package list with version bounds (comma separated) - :synopsis: Adds external pacakges as local - - [STRIKEOUT:Specifies a list of external packages from Hackage which - should be considered local packages.] (Not implemented) - - There is no command line variant of this field. - -[STRIKEOUT:There is also a stanza ``source-repository-package`` for -specifying packages from an external version control.] (Not -implemented.) - -All local packages are *vendored*, in the sense that if other packages -(including external ones from Hackage) depend on a package with the name -of a local package, the local package is preferentially used. This -motivates the default settings:: - - packages: ./*.cabal - optional-packages: ./*/*.cabal - -...any package can be vendored simply by making a checkout in the -top-level project directory, as might be seen in this hypothetical -directory layout:: - - foo.cabal - foo-helper/ # local package - unix/ # vendored external package - -All of these options support globs. ``cabal new-build`` has its own glob -format: - -- Anywhere in a path, as many times as you like, you can specify an - asterisk ``*`` wildcard. E.g., ``*/*.cabal`` matches all ``.cabal`` - files in all immediate subdirectories. Like in glob(7), asterisks do - not match hidden files unless there is an explicit period, e.g., - ``.*/foo.cabal`` will match ``.private/foo.cabal`` (but - ``*/foo.cabal`` will not). - -- You can use braces to specify specific directories; e.g., - ``{vendor,pkgs}/*.cabal`` matches all Cabal files in the ``vendor`` - and ``pkgs`` subdirectories. - -Formally, the format described by the following BNF: - -.. code-block:: abnf - - FilePathGlob ::= FilePathRoot FilePathGlobRel - FilePathRoot ::= {- empty -} # relative to cabal.project - | "/" # Unix root - | [a-zA-Z] ":" [/\\] # Windows root - | "~" # home directory - FilePathGlobRel ::= Glob "/" FilePathGlobRel # Unix directory - | Glob "\\" FilePathGlobRel # Windows directory - | Glob # file - | {- empty -} # trailing slash - Glob ::= GlobPiece * - GlobPiece ::= "*" # wildcard - | [^*{},/\\] * # literal string - | "\\" [*{},] # escaped reserved character - | "{" Glob "," ... "," Glob "}" # union (match any of these) - -Global configuration options ----------------------------- - -The following top-level configuration options are not specific to any -package, and thus apply globally: - -.. cfg-field:: verbose: nat - --verbose=n, -vn - :synopsis: Build verbosity level. - - :default: 1 - - Control the verbosity of ``cabal`` commands, valid values are from 0 - to 3. - - The command line variant of this field is ``--verbose=2``; a short - form ``-v2`` is also supported. - -.. cfg-field:: jobs: nat or $ncpus - --jobs=n, -jn, --jobs=$ncpus - :synopsis: Number of builds running in parallel. - - :default: 1 - - Run *nat* jobs simultaneously when building. If ``$ncpus`` is - specified, run the number of jobs equal to the number of CPUs. - Package building is often quite parallel, so turning on parallelism - can speed up build times quite a bit! - - The command line variant of this field is ``--jobs=2``; a short form - ``-j2`` is also supported; a bare ``--jobs`` or ``-j`` is equivalent - to ``--jobs=$ncpus``. - -.. cfg-field:: keep-going: boolean - --keep-going - :synopsis: Try to continue building on failure. - - :default: False - - If true, after a build failure, continue to build other unaffected - packages. - - The command line variant of this field is ``--keep-going``. - -.. option:: --builddir=DIR - - Specifies the name of the directory where build products for - build will be stored; defaults to ``dist-newstyle``. If a - relative name is specified, this directory is resolved relative - to the root of the project (i.e., where the ``cabal.project`` - file lives.) - - This option cannot be specified via a ``cabal.project`` file. - -.. _cmdoption-project-file: -.. option:: --project-file=FILE - - Specifies the name of the project file used to specify the - rest of the top-level configuration; defaults to ``cabal.project``. - This name not only specifies the name of the main project file, - but also the auxiliary project files ``cabal.project.freeze`` - and ``cabal.project.local``; for example, if you specify - ``--project-file=my.project``, then the other files that will - be probed are ``my.project.freeze`` and ``my.project.local``. - - If the specified project file is a relative path, we will - look for the file relative to the current working directory, - and then for the parent directory, until the project file is - found or we have hit the top of the user's home directory. - - This option cannot be specified via a ``cabal.project`` file. - -.. option:: --store-dir=DIR - - Specifies the name of the directory of the global package store. - -Solver configuration options ----------------------------- - -The following settings control the behavior of the dependency solver: - -.. cfg-field:: constraints: constraints list (comma separated) - --constraint="pkg >= 2.0" - :synopsis: Extra dependencies constraints. - - Add extra constraints to the version bounds, flag settings, - and other properties a solver can pick for a - package. For example: - - :: - - constraints: bar == 2.1 - - A package can be specified multiple times in ``constraints``, in - which case the specified constraints are intersected. This is - useful, since the syntax does not allow you to specify multiple - constraints at once. For example, to specify both version bounds and - flag assignments, you would write: - - :: - - constraints: bar == 2.1, - bar +foo -baz - - Valid constraints take the same form as for the `constraint - command line option - `__. - -.. cfg-field:: preferences: preference (comma separated) - --preference="pkg >= 2.0" - :synopsis: Prefered dependency versions. - - Like :cfg-field:`constraints`, but the solver will attempt to satisfy - these preferences on a best-effort basis. The resulting install is locally - optimal with respect to preferences; specifically, no single package - could be replaced with a more preferred version that still satisfies - the hard constraints. - - Operationally, preferences can cause the solver to attempt certain - version choices of a package before others, which can improve - dependency solver runtime. - - One way to use :cfg-field:`preferences` is to take a known working set of - constraints (e.g., via ``cabal new-freeze``) and record them as - preferences. In this case, the solver will first attempt to use this - configuration, and if this violates hard constraints, it will try to - find the minimal number of upgrades to satisfy the hard constraints - again. - - The command line variant of this field is - ``--preference="pkg >= 2.0"``; to specify multiple preferences, pass - the flag multiple times. - -.. cfg-field:: allow-newer: none, all or list of scoped package names (space or comma separated) - --allow-newer, --allow-newer=[none,all,[scope:][^]pkg] - :synopsis: Lift dependencies upper bound constaints. - - :default: ``none`` - - Allow the solver to pick an newer version of some packages than - would normally be permitted by than the :pkg-field:`build-depends` bounds - of packages in the install plan. This option may be useful if the - dependency solver cannot otherwise find a valid install plan. - - For example, to relax ``pkg``\ s :pkg-field:`build-depends` upper bound on - ``dep-pkg``, write a scoped package name of the form: - - :: - - allow-newer: pkg:dep-pkg - - If the scope shall be limited to specific releases of ``pkg``, the - extended form as in - - :: - - allow-newer: pkg-1.2.3:dep-pkg, pkg-1.1.2:dep-pkg - - can be used to limit the relaxation of dependencies on - ``dep-pkg`` by the ``pkg-1.2.3`` and ``pkg-1.1.2`` releases only. - - The scoped syntax is recommended, as it is often only a single package - whose upper bound is misbehaving. In this case, the upper bounds of - other packages should still be respected; indeed, relaxing the bound - can break some packages which test the selected version of packages. - - The syntax also allows to prefix the dependee package with a - modifier symbol to modify the scope/semantic of the relaxation - transformation in a additional ways. Currently only one modifier - symbol is defined, i.e. ``^`` (i.e. caret) which causes the - relaxation to be applied only to ``^>=`` operators and leave all other - version operators untouched. - - However, in some situations (e.g., when attempting to build packages - on a new version of GHC), it is useful to disregard *all* - upper-bounds, with respect to a package or all packages. This can be - done by specifying just a package name, or using the keyword ``all`` - to specify all packages: - - :: - - -- Disregard upper bounds involving the dependencies on - -- packages bar, baz. For quux only, relax - -- 'quux ^>= ...'-style constraints only. - allow-newer: bar, baz, ^quux - - -- Disregard all upper bounds when dependency solving - allow-newer: all - - -- Disregard all `^>=`-style upper bounds when dependency solving - allow-newer: ^all - - - For consistency, there is also the explicit wildcard scope syntax - ``*`` (or its alphabetic synonym ``all``). Consequently, the - examples above are equivalent to the explicitly scoped variants: - - :: - - allow-newer: all:bar, *:baz, *:^quux - - allow-newer: *:* - allow-newer: all:all - - allow-newer: *:^* - allow-newer: all:^all - - In order to ignore all bounds specified by a package ``pkg-1.2.3`` - you can combine scoping with a right-hand-side wildcard like so - - :: - - -- Disregard any upper bounds specified by pkg-1.2.3 - allow-newer: pkg-1.2.3:* - - -- Disregard only `^>=`-style upper bounds in pkg-1.2.3 - allow-newer: pkg-1.2.3:^* - - - :cfg-field:`allow-newer` is often used in conjunction with a constraint - (in the cfg-field:`constraints` field) forcing the usage of a specific, - newer version of a package. - - The command line variant of this field is e.g. ``--allow-newer=bar``. A - bare ``--allow-newer`` is equivalent to ``--allow-newer=all``. - -.. cfg-field:: allow-older: none, all, list of scoped package names (space or comma separated) - --allow-older, --allow-older=[none,all,[scope:][^]pkg] - :synopsis: Lift dependency lower bound constaints. - :since: 2.0 - - :default: ``none`` - - Like :cfg-field:`allow-newer`, but applied to lower bounds rather than - upper bounds. - - The command line variant of this field is ``--allow-older=all``. A - bare ``--allow-older`` is equivalent to ``--allow-older=all``. - - -.. cfg-field:: index-state: HEAD, unix-timestamp, ISO8601 UTC timestamp. - :synopsis: Use source package index state as it existed at a previous time. - :since: 2.0 - - :default: ``HEAD`` - - This allows to change the source package index state the solver uses - to compute install-plans. This is particularly useful in - combination with freeze-files in order to also freeze the state the - package index was in at the time the install-plan was frozen. - - :: - - -- UNIX timestamp format example - index-state: @1474739268 - - -- ISO8601 UTC timestamp format example - -- This format is used by 'cabal new-configure' - -- for storing `--index-state` values. - index-state: 2016-09-24T17:47:48Z - - -Package configuration options ------------------------------ - -Package options affect the building of specific packages. There are three -ways a package option can be specified: - -- They can be specified at the top-level, in which case they apply only - to **local package**, or - -- They can be specified inside a ``package`` stanza, in which case they - apply to the build of the package, whether or not it is local or - external. - -- They can be specified inside an ``package *`` stanza, in which case they - apply to all packages, local ones from the project and also external - dependencies. - - -For example, the following options specify that :cfg-field:`optimization` -should be turned off for all local packages, and that ``bytestring`` (possibly -an external dependency) should be built with ``-fno-state-hack``:: - - optimization: False - - package bytestring - ghc-options: -fno-state-hack - -``ghc-options`` is not specifically described in this documentation, -but is one of many fields for configuring programs. They take the form -``progname-options`` and ``progname-location``, and -can only be set inside package stanzas. (TODO: They are not supported -at top-level, see :issue:`3579`.) - -At the moment, there is no way to specify an option to apply to all -external packages or all inplace packages. Additionally, it is only -possible to specify these options on the command line for all local -packages (there is no per-package command line interface.) - -Some flags were added by more recent versions of the Cabal library. This -means that they are NOT supported by packages which use Custom setup -scripts that require a version of the Cabal library older than when the -feature was added. - -.. cfg-field:: flags: list of +flagname or -flagname (space separated) - --flags="+foo -bar", -ffoo, -f-bar - :synopsis: Enable or disable package flags. - - Force all flags specified as ``+flagname`` to be true, and all flags - specified as ``-flagname`` to be false. For example, to enable the - flag ``foo`` and disable ``bar``, set: - - :: - - flags: +foo -bar - - If there is no leading punctuation, it is assumed that the flag - should be enabled; e.g., this is equivalent: - - :: - - flags: foo -bar - - Flags are *per-package*, so it doesn't make much sense to specify - flags at the top-level, unless you happen to know that *all* of your - local packages support the same named flags. If a flag is not - supported by a package, it is ignored. - - See also the solver configuration field :cfg-field:`constraints`. - - The command line variant of this flag is ``--flags``. There is also - a shortened form ``-ffoo -f-bar``. - - A common mistake is to say ``cabal new-build -fhans``, where - ``hans`` is a flag for a transitive dependency that is not in the - local package; in this case, the flag will be silently ignored. If - ``haskell-tor`` is the package you want this flag to apply to, try - ``--constraint="haskell-tor +hans"`` instead. - -.. cfg-field:: with-compiler: executable - --with-compiler=executable - :synopsis: Path to compiler executable. - - Specify the path to a particular compiler to be used. If not an - absolute path, it will be resolved according to the :envvar:`PATH` - environment. The type of the compiler (GHC, GHCJS, etc) must be - consistent with the setting of the :cfg-field:`compiler` field. - - The most common use of this option is to specify a different version - of your compiler to be used; e.g., if you have ``ghc-7.8`` in your - path, you can specify ``with-compiler: ghc-7.8`` to use it. - - This flag also sets the default value of :cfg-field:`with-hc-pkg`, using - the heuristic that it is named ``ghc-pkg-7.8`` (if your executable name - is suffixed with a version number), or is the executable named - ``ghc-pkg`` in the same directory as the ``ghc`` directory. If this - heuristic does not work, set :cfg-field:`with-hc-pkg` explicitly. - - For inplace packages, ``cabal new-build`` maintains a separate build - directory for each version of GHC, so you can maintain multiple - build trees for different versions of GHC without clobbering each - other. - - At the moment, it's not possible to set :cfg-field:`with-compiler` on a - per-package basis, but eventually we plan on relaxing this - restriction. If this is something you need, give us a shout. - - The command line variant of this flag is - ``--with-compiler=ghc-7.8``; there is also a short version - ``-w ghc-7.8``. - -.. cfg-field:: with-hc-pkg: executable - --with-hc-pkg=executable - :synopsis: Specifies package tool. - - Specify the path to the package tool, e.g., ``ghc-pkg``. This - package tool must be compatible with the compiler specified by - :cfg-field:`with-compiler` (generally speaking, it should be precisely - the tool that was distributed with the compiler). If this option is - omitted, the default value is determined from :cfg-field:`with-compiler`. - - The command line variant of this flag is - ``--with-hc-pkg=ghc-pkg-7.8``. - -.. cfg-field:: optimization: nat - --enable-optimization - --disable-optimization - :synopsis: Build with optimization. - - :default: ``1`` - - Build with optimization. This is appropriate for production use, - taking more time to build faster libraries and programs. - - The optional *nat* value is the optimisation level. Some compilers - support multiple optimisation levels. The range is 0 to 2. Level 0 - disables optimization, level 1 is the default. Level 2 is higher - optimisation if the compiler supports it. Level 2 is likely to lead - to longer compile times and bigger generated code. If you are not - planning to run code, turning off optimization will lead to better - build times and less code to be rebuilt when a module changes. - - When optimizations are enabled, Cabal passes ``-O2`` to the C compiler. - - We also accept ``True`` (equivalent to 1) and ``False`` (equivalent - to 0). - - Note that as of GHC 8.0, GHC does not recompile when optimization - levels change (see :ghc-ticket:`10923`), so if - you change the optimization level for a local package you may need - to blow away your old build products in order to rebuild with the - new optimization level. - - The command line variant of this flag is ``-O2`` (with ``-O1`` - equivalent to ``-O``). There are also long-form variants - ``--enable-optimization`` and ``--disable-optimization``. - -.. cfg-field:: configure-options: args (space separated) - --configure-option=arg - :synopsis: Options to pass to configure script. - - A list of extra arguments to pass to the external ``./configure`` - script, if one is used. This is only useful for packages which have - the ``Configure`` build type. See also the section on - `system-dependent - parameters `__. - - The command line variant of this flag is ``--configure-option=arg``, - which can be specified multiple times to pass multiple options. - -.. cfg-field:: compiler: ghc, ghcjs, jhc, lhc, uhc or haskell-suite - --compiler=compiler - :synopsis: Compiler to build with. - - :default: ``ghc`` - - Specify which compiler toolchain to be used. This is independent of - ``with-compiler``, because the choice of toolchain affects Cabal's - build logic. - - The command line variant of this flag is ``--compiler=ghc``. - -.. cfg-field:: tests: boolean - --enable-tests - --disable-tests - :synopsis: Build tests. - - :default: ``False`` - - Force test suites to be enabled. For most users this should not be - needed, as we always attempt to solve for test suite dependencies, - even when this value is ``False``; furthermore, test suites are - automatically enabled if they are requested as a built target. - - The command line variant of this flag is ``--enable-tests`` and - ``--disable-tests``. - -.. cfg-field:: benchmarks: boolean - --enable-benchmarks - --disable-benchmarks - :synopsis: Build benchmarks. - - :default: ``False`` - - Force benchmarks to be enabled. For most users this should not be - needed, as we always attempt to solve for benchmark dependencies, - even when this value is ``False``; furthermore, benchmarks are - automatically enabled if they are requested as a built target. - - The command line variant of this flag is ``--enable-benchmarks`` and - ``--disable-benchmarks``. - -.. cfg-field:: extra-prog-path: paths (newline or comma separated) - --extra-prog-path=PATH - :synopsis: Add directories to program search path. - :since: 1.18 - - A list of directories to search for extra required programs. Most - users should not need this, as programs like ``happy`` and ``alex`` - will automatically be installed and added to the path. This can be - useful if a ``Custom`` setup script relies on an exotic extra - program. - - The command line variant of this flag is ``--extra-prog-path=PATH``, - which can be specified multiple times. - -.. cfg-field:: run-tests: boolean - --run-tests - :synopsis: Run package test suite upon installation. - - :default: ``False`` - - Run the package test suite upon installation. This is useful for - saying "When this package is installed, check that the test suite - passes, terminating the rest of the build if it is broken." - - .. warning:: - - One deficiency: the :cfg-field:`run-tests` setting of a package is NOT - recorded as part of the hash, so if you install something without - :cfg-field:`run-tests` and then turn on ``run-tests``, we won't - subsequently test the package. If this is causing you problems, give - us a shout. - - The command line variant of this flag is ``--run-tests``. - -Object code options -^^^^^^^^^^^^^^^^^^^ - -.. cfg-field:: debug-info: integer - --enable-debug-info= - --disable-debug-info - :synopsis: Build with debug info enabled. - :since: 1.22 - - :default: False - - If the compiler (e.g., GHC 7.10 and later) supports outputing OS - native debug info (e.g., DWARF), setting ``debug-info: True`` will - instruct it to do so. See the GHC wiki page on :ghc-wiki:`DWARF` - for more information about this feature. - - (This field also accepts numeric syntax, but until GHC 8.2 this didn't - do anything.) - - The command line variant of this flag is ``--enable-debug-info`` and - ``--disable-debug-info``. - -.. cfg-field:: split-sections: boolean - --enable-split-sections - --disable-split-sections - :synopsis: Use GHC's split sections feature. - :since: 2.1 - - :default: False - - Use the GHC ``-split-sections`` feature when building the library. This - reduces the final size of the executables that use the library by - allowing them to link with only the bits that they use rather than - the entire library. The downside is that building the library takes - longer and uses a bit more memory. - - This feature is supported by GHC 8.0 and later. - - The command line variant of this flag is ``--enable-split-sections`` and - ``--disable-split-sections``. - -.. cfg-field:: split-objs: boolean - --enable-split-objs - --disable-split-objs - :synopsis: Use GHC's split objects feature. - - :default: False - - Use the GHC ``-split-objs`` feature when building the library. This - reduces the final size of the executables that use the library by - allowing them to link with only the bits that they use rather than - the entire library. The downside is that building the library takes - longer and uses considerably more memory. - - It is generally recommend that you use ``split-sections`` instead - of ``split-objs`` where possible. - - The command line variant of this flag is ``--enable-split-objs`` and - ``--disable-split-objs``. - -.. cfg-field:: executable-stripping: boolean - --enable-executable-stripping - --disable-executable-stripping - :synopsis: Strip installed programs. - - :default: True - - When installing binary executable programs, run the ``strip`` - program on the binary. This can considerably reduce the size of the - executable binary file. It does this by removing debugging - information and symbols. - - Not all Haskell implementations generate native binaries. For such - implementations this option has no effect. - - (TODO: Check what happens if you combine this with ``debug-info``.) - - The command line variant of this flag is - ``--enable-executable-stripping`` and - ``--disable-executable-stripping``. - -.. cfg-field:: library-stripping: boolean - --enable-library-stripping - --disable-library-stripping - :synopsis: Strip installed libraries. - :since: 1.19 - - When installing binary libraries, run the ``strip`` program on the - binary, saving space on the file system. See also - ``executable-stripping``. - - The command line variant of this flag is - ``--enable-library-stripping`` and ``--disable-library-stripping``. - -Executable options -^^^^^^^^^^^^^^^^^^ - -.. cfg-field:: program-prefix: prefix - --program-prefix=prefix - :synopsis: Prepend prefix to program names. - - [STRIKEOUT:Prepend *prefix* to installed program names.] (Currently - implemented in a silly and not useful way. If you need this to work - give us a shout.) - - *prefix* may contain the following path variables: ``$pkgid``, - ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - - The command line variant of this flag is ``--program-prefix=foo-``. - -.. cfg-field:: program-suffix: suffix - --program-suffix=suffix - :synopsis: Append refix to program names. - - [STRIKEOUT:Append *suffix* to installed program names.] (Currently - implemented in a silly and not useful way. If you need this to work - give us a shout.) - - The most obvious use for this is to append the program's version - number to make it possible to install several versions of a program - at once: ``program-suffix: $version``. - - *suffix* may contain the following path variables: ``$pkgid``, - ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, - ``$abitag`` - - The command line variant of this flag is - ``--program-suffix='$version'``. - -Dynamic linking options -^^^^^^^^^^^^^^^^^^^^^^^ - -.. cfg-field:: shared: boolean - --enable-shared - --disable-shared - :synopsis: Build shared library. - - :default: False - - Build shared library. This implies a separate compiler run to - generate position independent code as required on most platforms. - - The command line variant of this flag is ``--enable-shared`` and - ``--disable-shared``. - -.. cfg-field:: executable-dynamic: boolean - --enable-executable-dynamic - --disable-executable-dynamic - :synopsis: Link executables dynamically. - - :default: False - - Link executables dynamically. The executable's library dependencies - should be built as shared objects. This implies ``shared: True`` - unless ``shared: False`` is explicitly specified. - - The command line variant of this flag is - ``--enable-executable-dynamic`` and - ``--disable-executable-dynamic``. - -.. cfg-field:: library-for-ghci: boolean - --enable-library-for-ghci - --disable-library-for-ghci - :synopsis: Build libraries suitable for use with GHCi. - - :default: True - - Build libraries suitable for use with GHCi. This involves an extra - linking step after the build. - - Not all platforms support GHCi and indeed on some platforms, trying - to build GHCi libs fails. In such cases, consider setting - ``library-for-ghci: False``. - - The command line variant of this flag is - ``--enable-library-for-ghci`` and ``--disable-library-for-ghci``. - -.. cfg-field:: relocatable: - --relocatable - :synopsis: Build relocatable package. - :since: 1.21 - - :default: False - - [STRIKEOUT:Build a package which is relocatable.] (TODO: It is not - clear what this actually does, or if it works at all.) - - The command line variant of this flag is ``--relocatable``. - -Static linking options -^^^^^^^^^^^^^^^^^^^^^^ - -.. cfg-field:: static: boolean - --enable-static - --disable-static - :synopsis: Build static library. - - - :default: False - - Roll this and all dependent libraries into a combined ``.a`` archive. - This uses GHCs ``-staticlib`` flag, which is avaiable for iOS and with - GHC 8.4 and later for other platforms as well. - -Foreign function interface options -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -.. cfg-field:: extra-include-dirs: directories (comma or newline separated list) - --extra-include-dirs=DIR - :synopsis: Adds C header search path. - - An extra directory to search for C header files. You can use this - flag multiple times to get a list of directories. - - You might need to use this flag if you have standard system header - files in a non-standard location that is not mentioned in the - package's ``.cabal`` file. Using this option has the same affect as - appending the directory *dir* to the :pkg-field:`include-dirs` field in each - library and executable in the package's ``.cabal`` file. The - advantage of course is that you do not have to modify the package at - all. These extra directories will be used while building the package - and for libraries it is also saved in the package registration - information and used when compiling modules that use the library. - - The command line variant of this flag is - ``--extra-include-dirs=DIR``, which can be specified multiple times. - -.. cfg-field:: extra-lib-dirs: directories (comma or newline separated list) - --extra-lib-dirs=DIR - :synopsis: Adds library search directory. - - An extra directory to search for system libraries files. - - The command line variant of this flag is ``--extra-lib-dirs=DIR``, - which can be specified multiple times. - -.. cfg-field:: extra-framework-dirs: directories (comma or newline separated list) - --extra-framework-dirs=DIR - :synopsis: Adds framework search directory (OS X only). - - An extra directory to search for frameworks (OS X only). - - You might need to use this flag if you have standard system - libraries in a non-standard location that is not mentioned in the - package's ``.cabal`` file. Using this option has the same affect as - appending the directory *dir* to the :cfg-field:`extra-lib-dirs` field in - each library and executable in the package's ``.cabal`` file. The - advantage of course is that you do not have to modify the package at - all. These extra directories will be used while building the package - and for libraries it is also saved in the package registration - information and used when compiling modules that use the library. - - The command line variant of this flag is - ``--extra-framework-dirs=DIR``, which can be specified multiple - times. - -Profiling options -^^^^^^^^^^^^^^^^^ - -.. cfg-field:: profiling: boolean - --enable-profiling - --disable-profiling - :synopsis: Enable profiling builds. - :since: 1.21 - - :default: False - - Build libraries and executables with profiling enabled (for - compilers that support profiling as a separate mode). It is only - necessary to specify :cfg-field:`profiling` for the specific package you - want to profile; ``cabal new-build`` will ensure that all of its - transitive dependencies are built with profiling enabled. - - To enable profiling for only libraries or executables, see - :cfg-field:`library-profiling` and :cfg-field:`executable-profiling`. - - For useful profiling, it can be important to control precisely what - cost centers are allocated; see :cfg-field:`profiling-detail`. - - The command line variant of this flag is ``--enable-profiling`` and - ``--disable-profiling``. - -.. cfg-field:: profiling-detail: level - --profiling-detail=level - :synopsis: Profiling detail level. - :since: 1.23 - - Some compilers that support profiling, notably GHC, can allocate - costs to different parts of the program and there are different - levels of granularity or detail with which this can be done. In - particular for GHC this concept is called "cost centers", and GHC - can automatically add cost centers, and can do so in different ways. - - This flag covers both libraries and executables, but can be - overridden by the ``library-profiling-detail`` field. - - Currently this setting is ignored for compilers other than GHC. The - levels that cabal currently supports are: - - default - For GHC this uses ``exported-functions`` for libraries and - ``toplevel-functions`` for executables. - none - No costs will be assigned to any code within this component. - exported-functions - Costs will be assigned at the granularity of all top level - functions exported from each module. In GHC, this - is for non-inline functions. Corresponds to ``-fprof-auto-exported``. - toplevel-functions - Costs will be assigned at the granularity of all top level - functions in each module, whether they are exported from the - module or not. In GHC specifically, this is for non-inline - functions. Corresponds to ``-fprof-auto-top``. - all-functions - Costs will be assigned at the granularity of all functions in - each module, whether top level or local. In GHC specifically, - this is for non-inline toplevel or where-bound functions or - values. Corresponds to ``-fprof-auto``. - - The command line variant of this flag is - ``--profiling-detail=none``. - -.. cfg-field:: library-profiling-detail: level - --library-profiling-detail=level - :synopsis: Libraries profiling detail level. - :since: 1.23 - - Like :cfg-field:`profiling-detail`, but applied only to libraries - - The command line variant of this flag is - ``--library-profiling-detail=none``. - -.. cfg-field:: library-vanilla: boolean - --enable-library-vanilla - --disable-library-vanilla - :synopsis: Build libraries without profiling. - - :default: True - - Build ordinary libraries (as opposed to profiling libraries). - Mostly, you can set this to False to avoid building ordinary - libraries when you are profiling. - - The command line variant of this flag is - ``--enable-library-vanilla`` and ``--disable-library-vanilla``. - -.. cfg-field:: library-profiling: boolean - --enable-library-profiling - --disable-library-profiling - :synopsis: Build libraries with profiling enabled. - :since: 1.21 - - :default: False - - Build libraries with profiling enabled. You probably want - to use :cfg-field:`profiling` instead. - - The command line variant of this flag is - ``--enable-library-profiling`` and ``--disable-library-profiling``. - -.. cfg-field:: executable-profiling: boolean - --enable-executable-profiling - --disable-executable-profiling - :synopsis: Build executables with profiling enabled. - :since: 1.21 - - :default: False - - Build executables with profiling enabled. You probably want - to use :cfg-field:`profiling` instead. - - The command line variant of this flag is - ``--enable-executable-profiling`` and - ``--disable-executable-profiling``. - -Coverage options -^^^^^^^^^^^^^^^^ - -.. cfg-field:: coverage: boolean - --enable-coverage - --disable-coverage - :synopsis: Build with coverage enabled. - :since: 1.21 - - :default: False - - Build libraries and executables (including test suites) with Haskell - Program Coverage enabled. Running the test suites will automatically - generate coverage reports with HPC. - - The command line variant of this flag is ``--enable-coverage`` and - ``--disable-coverage``. - -.. cfg-field:: library-coverage: boolean - --enable-library-coverage - --disable-library-coverage - :since: 1.21 - :deprecated: - - :default: False - - Deprecated, use :cfg-field:`coverage`. - - The command line variant of this flag is - ``--enable-library-coverage`` and ``--disable-library-coverage``. - -Haddock options -^^^^^^^^^^^^^^^ - -.. cfg-field:: documentation: boolean - --enable-documentation - --disable-documentation - :synopsis: Enable building of documentation. - - :default: False - - Enables building of Haddock documentation - - The command line variant of this flag is ``--enable-documentation`` - and ``--disable-documentation``. - - `documentation: true` does not imply :cfg-field:`haddock-benchmarks`, - :cfg-field:`haddock-executables`, :cfg-field:`haddock-internal` or - :cfg-field:`haddock-tests`. These need to be enabled separately if - desired. - -.. cfg-field:: doc-index-file: templated path - --doc-index-file=TEMPLATE - :synopsis: Path to haddock templates. - - A central index of Haddock API documentation (template cannot use - ``$pkgid``), which should be updated as documentation is built. - - The command line variant of this flag is - ``--doc-index-file=TEMPLATE`` - -The following commands are equivalent to ones that would be passed when -running ``setup haddock``. (TODO: Where does the documentation get put.) - -.. cfg-field:: haddock-hoogle: boolean - :synopsis: Generate Hoogle file. - - :default: False - - Generate a text file which can be converted by Hoogle_ - into a database for searching. This is equivalent to running ``haddock`` - with the ``--hoogle`` flag. - - The command line variant of this flag is ``--hoogle`` (for the - ``haddock`` command). - -.. cfg-field:: haddock-html: boolean - :synopsis: Build HTML documentation. - - :default: True - - Build HTML documentation. - - The command line variant of this flag is ``--html`` (for the - ``haddock`` command). - -.. cfg-field:: haddock-html-location: templated path - :synopsis: Haddock HTML templates location. - - Specify a template for the location of HTML documentation for - prerequisite packages. The substitutions are applied to the template - to obtain a location for each package, which will be used by - hyperlinks in the generated documentation. For example, the - following command generates links pointing at [Hackage] pages: - - :: - - html-location: 'http://hackage.haskell.org/packages/archive/$pkg/latest/doc/html' - - Here the argument is quoted to prevent substitution by the shell. If - this option is omitted, the location for each package is obtained - using the package tool (e.g. ``ghc-pkg``). - - The command line variant of this flag is ``--html-location`` (for - the ``haddock`` subcommand). - -.. cfg-field:: haddock-executables: boolean - :synopsis: Generate documentation for executables. - - :default: False - - Run haddock on all executable programs. - - The command line variant of this flag is ``--executables`` (for the - ``haddock`` subcommand). - -.. cfg-field:: haddock-tests: boolean - :synopsis: Generate documentation for tests. - - :default: False - - Run haddock on all test suites. - - The command line variant of this flag is ``--tests`` (for the - ``haddock`` subcommand). - -.. cfg-field:: haddock-benchmarks: boolean - :synopsis: Generate documentation for benchmarks. - - :default: False - - Run haddock on all benchmarks. - - The command line variant of this flag is ``--benchmarks`` (for the - ``haddock`` subcommand). - -.. cfg-field:: haddock-all: boolean - :synopsis: Generate documentation for everything - - :default: False - - Run haddock on all components. - - The command line variant of this flag is ``--all`` (for the - ``haddock`` subcommand). - -.. cfg-field:: haddock-internal: boolean - :synopsis: Generate documentation for internal modules - - :default: False - - Build haddock documentation which includes unexposed modules and - symbols. - - The command line variant of this flag is ``--internal`` (for the - ``haddock`` subcommand). - -.. cfg-field:: haddock-css: path - :synopsis: Location of Haddoc CSS file. - - The CSS file that should be used to style the generated - documentation (overriding haddock's default.) - - The command line variant of this flag is ``--css`` (for the - ``haddock`` subcommand). - -.. cfg-field:: haddock-hyperlink-source: boolean - :synopsis: Generate hyperlinked source code for documentation - - :default: False - - Generated hyperlinked source code using `HsColour`_, and have - Haddock documentation link to it. - - The command line variant of this flag is ``--hyperlink-source`` (for - the ``haddock`` subcommand). - -.. cfg-field:: haddock-hscolour-css: path - :synopsis: Location of CSS file for HsColour - - The CSS file that should be used to style the generated hyperlinked - source code (from `HsColour`_). - - The command line variant of this flag is ``--hscolour-css`` (for the - ``haddock`` subcommand). - -.. cfg-field:: haddock-contents-location: URL - :synopsis: URL for contents page. - - A baked-in URL to be used as the location for the contents page. - - The command line variant of this flag is ``--contents-location`` - (for the ``haddock`` subcommand). - -.. cfg-field:: haddock-keep-temp-files: boolean - :synopsis: Keep temporary Haddock files. - - Keep temporary files. - - The command line variant of this flag is ``--keep-temp-files`` (for - the ``haddock`` subcommand). - -Advanced global configuration options -------------------------------------- - -.. cfg-field:: http-transport: curl, wget, powershell, or plain-http - --http-transport=transport - :synopsis: Transport to use with http(s) requests. - - :default: ``curl`` - - Set a transport to be used when making http(s) requests. - - The command line variant of this field is ``--http-transport=curl``. - -.. cfg-field:: ignore-expiry: boolean - --ignore-expiry - :synopsis: Ignore Hackage expiration dates. - - :default: False - - If ``True``, we will ignore expiry dates on metadata from Hackage. - - In general, you should not set this to ``True`` as it will leave you - vulnerable to stale cache attacks. However, it may be temporarily - useful if the main Hackage server is down, and we need to rely on - mirrors which have not been updated for longer than the expiry - period on the timestamp. - - The command line variant of this field is ``--ignore-expiry``. - -.. cfg-field:: remote-repo-cache: directory - --remote-repo-cache=DIR - :synopsis: Location of packages cache. - - :default: ``~/.cabal/packages`` - - [STRIKEOUT:The location where packages downloaded from remote - repositories will be cached.] Not implemented yet. - - The command line variant of this flag is - ``--remote-repo-cache=DIR``. - -.. cfg-field:: logs-dir: directory - --logs-dir=DIR - :synopsis: Directory to store build logs. - - :default: ``~/.cabal/logs`` - - [STRIKEOUT:The location where build logs for packages are stored.] - Not implemented yet. - - The command line variant of this flag is ``--logs-dir=DIR``. - -.. cfg-field:: build-summary: template filepath - --build-summary=TEMPLATE - :synopsis: Build summaries location. - - :default: ``~/.cabal/logs/build.log`` - - [STRIKEOUT:The file to save build summaries. Valid variables which - can be used in the path are ``$pkgid``, ``$compiler``, ``$os`` and - ``$arch``.] Not implemented yet. - - The command line variant of this flag is - ``--build-summary=TEMPLATE``. - -.. cfg-field:: local-repo: directory - --local-repo=DIR - :deprecated: - - [STRIKEOUT:The location of a local repository.] Deprecated. See - "Legacy repositories." - - The command line variant of this flag is ``--local-repo=DIR``. - -.. cfg-field:: world-file: path - --world-file=FILE - :deprecated: - - [STRIKEOUT:The location of the world file.] Deprecated. - - The command line variant of this flag is ``--world-file=FILE``. - -Undocumented fields: ``root-cmd``, ``symlink-bindir``, ``build-log``, -``remote-build-reporting``, ``report-planned-failure``, ``one-shot``, -``offline``. - -Advanced solver options -^^^^^^^^^^^^^^^^^^^^^^^ - -Most users generally won't need these. - -.. cfg-field:: solver: modular - --solver=modular - :synopsis: Which solver to use. - - This field is reserved to allow the specification of alternative - dependency solvers. At the moment, the only accepted option is - ``modular``. - - The command line variant of this field is ``--solver=modular``. - -.. cfg-field:: max-backjumps: nat - --max-backjumps=N - :synopsis: Maximum number of solver backjumps. - - :default: 2000 - - Maximum number of backjumps (backtracking multiple steps) allowed - while solving. Set -1 to allow unlimited backtracking, and 0 to - disable backtracking completely. - - The command line variant of this field is ``--max-backjumps=2000``. - -.. cfg-field:: reorder-goals: boolean - --reorder-goals - --no-reorder-goals - :synopsis: Allow solver to reorder goals. - - :default: False - - When enabled, the solver will reorder goals according to certain - heuristics. Slows things down on average, but may make backtracking - faster for some packages. It's unlikely to help for small projects, - but for big install plans it may help you find a plan when otherwise - this is not possible. See :issue:`1780` for more commentary. - - The command line variant of this field is ``--(no-)reorder-goals``. - -.. cfg-field:: count-conflicts: boolean - --count-conflicts - --no-count-conflicts - :synopsis: Solver prefers versions with less conflicts. - - :default: True - - Try to speed up solving by preferring goals that are involved in a - lot of conflicts. - - The command line variant of this field is - ``--(no-)count-conflicts``. - -.. cfg-field:: strong-flags: boolean - --strong-flags - --no-strong-flags - :synopsis: Do not defer flag choices when solving. - - :default: False - - Do not defer flag choices. (TODO: Better documentation.) - - The command line variant of this field is ``--(no-)strong-flags``. - -.. cfg-field:: allow-boot-library-installs: boolean - --allow-boot-library-installs - --no-allow-boot-library-installs - :synopsis: Allow cabal to install or upgrade any package. - - :default: False - - By default, the dependency solver doesn't allow ``base``, - ``ghc-prim``, ``integer-simple``, ``integer-gmp``, and - ``template-haskell`` to be installed or upgraded. This flag - removes the restriction. - - The command line variant of this field is - ``--(no-)allow-boot-library-installs``. - -.. cfg-field:: cabal-lib-version: version - --cabal-lib-version=version - :synopsis: Version of Cabal library used to build package. - - This field selects the version of the Cabal library which should be - used to build packages. This option is intended primarily for - internal development use (e.g., forcing a package to build with a - newer version of Cabal, to test a new version of Cabal.) (TODO: - Specify its semantics more clearly.) - - The command line variant of this field is - ``--cabal-lib-version=1.24.0.1``. - -.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/README.md 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,126 +0,0 @@ -Cabal documentation -=================== - -### Where to read it -These docs will be built and deployed whenever a release is made, -and can be read at: https://www.haskell.org/cabal/users-guide/ - -In addition, the docs are taken directly from git and hosted at: -http://cabal.readthedocs.io/ - - -### How to build it - -* Currently requires python-2 -* `> pip install sphinx` -* `> pip install sphinx_rtd_theme` -* `> cd Cabal` -* `> make clean users-guide` -* if you are missing any other dependencies, install them with `pip` as needed -¯\\\_(ツ)_/¯ -* Python on Mac OS X dislikes `LC_CTYPE=UTF-8`, unset the env var in -terminal preferences and instead set `LC_ALL=en_US.UTF-8` or something -* On archlinux, package `python2-sphinx` is sufficient. - -### Caveats, for newcomers to RST from MD -RST does not allow you to skip section levels when nesting, like MD -does. -So, you cannot have - -``` - Section heading - =============== - - Some unimportant block - """""""""""""""""""""" -``` - - instead you need to observe order and either promote your block: - -``` - Section heading - =============== - - Some not quite so important block - --------------------------------- -``` - - or introduce more subsections: - -``` - Section heading - =============== - - Subsection - ---------- - - Subsubsection - ^^^^^^^^^^^^^ - - Some unimportant block - """""""""""""""""""""" -``` - -* RST simply parses a file and interpretes headings to indicate the - start of a new block, - * at the level implied by the header's *adornment*, if the adornment was - previously encountered in this file, - * at one level deeper than the previous block, otherwise. - - This means that a lot of confusion can arise when people use - different adornments to signify the same depth in different files. - - To eliminate this confusion, please stick to the adornment order - recommended by the Sphinx team: - -``` - #### - Part - #### - - ******* - Chapter - ******* - - Section - ======= - - Subsection - ---------- - - Subsubsection - ^^^^^^^^^^^^^ - - Paragraph - """"""""" -``` - -* The Read-The-Docs stylesheet does not support multiple top-level - sections in a file that is linked to from the top-most TOC (in - `index.rst`). It will mess up the sidebar. - E.g. you cannot link to a `cabal.rst` with sections "Introduction", - "Using Cabal", "Epilogue" from `index.rst`. - - One solution is to have a single section, e.g. "All About Cabal", in - `cabal.rst` and make the other blocks subsections of that. - - Another solution is to link via an indirection, e.g. create - `all-about-cabal.rst`, where you include `cabal.rst` using the - `.. toctree::` command and then link to `all-about-cabal.rst` from - `index.rst`. - This will effectively "push down" all blocks by one layer and solve - the problem without having to change `cabal.rst`. - - -* We use [`extlinks`](http://www.sphinx-doc.org/en/stable/ext/extlinks.html) - to shorten links to commonly referred resources (wiki, issue trackers). - - E.g. you can use the more convenient short syntax - - :issue:`123` - - which is expanded into a hyperlink - - `#123 `__ - - See `conf.py` for list of currently defined link shorteners. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/references.inc cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/references.inc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/doc/references.inc 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/doc/references.inc 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -.. -*- rst -*- - This file contains commonly used link-references - See also "extlinks" in conf.py - -.. _`Package Versioning Policy`: -.. _PVP: http://pvp.haskell.org/ - -.. _Hackage: http://hackage.haskell.org/ - -.. _Haskell: http://www.haskell.org/ - -.. _Haddock: http://www.haskell.org/haddock/ - -.. _Alex: http://www.haskell.org/alex/ - -.. _Happy: http://www.haskell.org/happy/ - -.. _Hoogle: http://www.haskell.org/hoogle/ - -.. _HsColour: http://www.cs.york.ac.uk/fp/darcs/hscolour/ - -.. _cpphs: http://projects.haskell.org/cpphs/ - -.. _ABNF: https://tools.ietf.org/html/rfc5234 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Language/Haskell/Extension.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Language/Haskell/Extension.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Language/Haskell/Extension.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Language/Haskell/Extension.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,909 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Language.Haskell.Extension --- Copyright : Isaac Jones 2003-2004 --- License : BSD3 --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- Haskell language dialects and extensions - -module Language.Haskell.Extension ( - Language(..), - knownLanguages, - classifyLanguage, - - Extension(..), - KnownExtension(..), - knownExtensions, - deprecatedExtensions, - classifyExtension, - ) where - -import Prelude () -import Distribution.Compat.Prelude - -import Data.Array (Array, accumArray, bounds, Ix(inRange), (!)) - -import Distribution.Parsec.Class -import Distribution.Pretty -import Distribution.Text - -import qualified Distribution.Compat.CharParsing as P -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - --- ------------------------------------------------------------ --- * Language --- ------------------------------------------------------------ - --- | This represents a Haskell language dialect. --- --- Language 'Extension's are interpreted relative to one of these base --- languages. --- -data Language = - - -- | The Haskell 98 language as defined by the Haskell 98 report. - -- - Haskell98 - - -- | The Haskell 2010 language as defined by the Haskell 2010 report. - -- - | Haskell2010 - - -- | An unknown language, identified by its name. - | UnknownLanguage String - deriving (Generic, Show, Read, Eq, Typeable, Data) - -instance Binary Language - -instance NFData Language where rnf = genericRnf - -knownLanguages :: [Language] -knownLanguages = [Haskell98, Haskell2010] - -instance Pretty Language where - pretty (UnknownLanguage other) = Disp.text other - pretty other = Disp.text (show other) - -instance Parsec Language where - parsec = classifyLanguage <$> P.munch1 isAlphaNum - -instance Text Language where - parse = do - lang <- Parse.munch1 isAlphaNum - return (classifyLanguage lang) - -classifyLanguage :: String -> Language -classifyLanguage = \str -> case lookup str langTable of - Just lang -> lang - Nothing -> UnknownLanguage str - where - langTable = [ (show lang, lang) - | lang <- knownLanguages ] - --- ------------------------------------------------------------ --- * Extension --- ------------------------------------------------------------ - --- Note: if you add a new 'KnownExtension': --- --- * also add it to the Distribution.Simple.X.languageExtensions lists --- (where X is each compiler: GHC, UHC, HaskellSuite) --- --- | This represents language extensions beyond a base 'Language' definition --- (such as 'Haskell98') that are supported by some implementations, usually --- in some special mode. --- --- Where applicable, references are given to an implementation's --- official documentation. - -data Extension = - -- | Enable a known extension - EnableExtension KnownExtension - - -- | Disable a known extension - | DisableExtension KnownExtension - - -- | An unknown extension, identified by the name of its @LANGUAGE@ - -- pragma. - | UnknownExtension String - - deriving (Generic, Show, Read, Eq, Ord, Typeable, Data) - -instance Binary Extension - -instance NFData Extension where rnf = genericRnf - -data KnownExtension = - - -- | Allow overlapping class instances, provided there is a unique - -- most specific instance for each use. - -- - -- * - OverlappingInstances - - -- | Ignore structural rules guaranteeing the termination of class - -- instance resolution. Termination is guaranteed by a fixed-depth - -- recursion stack, and compilation may fail if this depth is - -- exceeded. - -- - -- * - | UndecidableInstances - - -- | Implies 'OverlappingInstances'. Allow the implementation to - -- choose an instance even when it is possible that further - -- instantiation of types will lead to a more specific instance - -- being applicable. - -- - -- * - | IncoherentInstances - - -- | /(deprecated)/ Deprecated in favour of 'RecursiveDo'. - -- - -- Old description: Allow recursive bindings in @do@ blocks, using - -- the @rec@ keyword. See also 'RecursiveDo'. - | DoRec - - -- | Allow recursive bindings in @do@ blocks, using the @rec@ - -- keyword, or @mdo@, a variant of @do@. - -- - -- * - | RecursiveDo - - -- | Provide syntax for writing list comprehensions which iterate - -- over several lists together, like the 'zipWith' family of - -- functions. - -- - -- * - | ParallelListComp - - -- | Allow multiple parameters in a type class. - -- - -- * - | MultiParamTypeClasses - - -- | Enable the dreaded monomorphism restriction. - -- - -- * - | MonomorphismRestriction - - -- | Allow a specification attached to a multi-parameter type class - -- which indicates that some parameters are entirely determined by - -- others. The implementation will check that this property holds - -- for the declared instances, and will use this property to reduce - -- ambiguity in instance resolution. - -- - -- * - | FunctionalDependencies - - -- | /(deprecated)/ A synonym for 'RankNTypes'. - -- - -- Old description: Like 'RankNTypes' but does not allow a - -- higher-rank type to itself appear on the left of a function - -- arrow. - -- - -- * - | Rank2Types - - -- | Allow a universally-quantified type to occur on the left of a - -- function arrow. - -- - -- * - | RankNTypes - - -- | /(deprecated)/ A synonym for 'RankNTypes'. - -- - -- Old description: Allow data constructors to have polymorphic - -- arguments. Unlike 'RankNTypes', does not allow this for ordinary - -- functions. - -- - -- * - | PolymorphicComponents - - -- | Allow existentially-quantified data constructors. - -- - -- * - | ExistentialQuantification - - -- | Cause a type variable in a signature, which has an explicit - -- @forall@ quantifier, to scope over the definition of the - -- accompanying value declaration. - -- - -- * - | ScopedTypeVariables - - -- | Deprecated, use 'ScopedTypeVariables' instead. - | PatternSignatures - - -- | Enable implicit function parameters with dynamic scope. - -- - -- * - | ImplicitParams - - -- | Relax some restrictions on the form of the context of a type - -- signature. - -- - -- * - | FlexibleContexts - - -- | Relax some restrictions on the form of the context of an - -- instance declaration. - -- - -- * - | FlexibleInstances - - -- | Allow data type declarations with no constructors. - -- - -- * - | EmptyDataDecls - - -- | Run the C preprocessor on Haskell source code. - -- - -- * - | CPP - - -- | Allow an explicit kind signature giving the kind of types over - -- which a type variable ranges. - -- - -- * - | KindSignatures - - -- | Enable a form of pattern which forces evaluation before an - -- attempted match, and a form of strict @let@/@where@ binding. - -- - -- * - | BangPatterns - - -- | Allow type synonyms in instance heads. - -- - -- * - | TypeSynonymInstances - - -- | Enable Template Haskell, a system for compile-time - -- metaprogramming. - -- - -- * - | TemplateHaskell - - -- | Enable the Foreign Function Interface. In GHC, implements the - -- standard Haskell 98 Foreign Function Interface Addendum, plus - -- some GHC-specific extensions. - -- - -- * - | ForeignFunctionInterface - - -- | Enable arrow notation. - -- - -- * - | Arrows - - -- | /(deprecated)/ Enable generic type classes, with default instances defined in - -- terms of the algebraic structure of a type. - -- - -- * - | Generics - - -- | Enable the implicit importing of the module "Prelude". When - -- disabled, when desugaring certain built-in syntax into ordinary - -- identifiers, use whatever is in scope rather than the "Prelude" - -- -- version. - -- - -- * - | ImplicitPrelude - - -- | Enable syntax for implicitly binding local names corresponding - -- to the field names of a record. Puns bind specific names, unlike - -- 'RecordWildCards'. - -- - -- * - | NamedFieldPuns - - -- | Enable a form of guard which matches a pattern and binds - -- variables. - -- - -- * - | PatternGuards - - -- | Allow a type declared with @newtype@ to use @deriving@ for any - -- class with an instance for the underlying type. - -- - -- * - | GeneralizedNewtypeDeriving - - -- | Enable the \"Trex\" extensible records system. - -- - -- * - | ExtensibleRecords - - -- | Enable type synonyms which are transparent in some definitions - -- and opaque elsewhere, as a way of implementing abstract - -- datatypes. - -- - -- * - | RestrictedTypeSynonyms - - -- | Enable an alternate syntax for string literals, - -- with string templating. - -- - -- * - | HereDocuments - - -- | Allow the character @#@ as a postfix modifier on identifiers. - -- Also enables literal syntax for unboxed values. - -- - -- * - | MagicHash - - -- | Allow data types and type synonyms which are indexed by types, - -- i.e. ad-hoc polymorphism for types. - -- - -- * - | TypeFamilies - - -- | Allow a standalone declaration which invokes the type class - -- @deriving@ mechanism. - -- - -- * - | StandaloneDeriving - - -- | Allow certain Unicode characters to stand for certain ASCII - -- character sequences, e.g. keywords and punctuation. - -- - -- * - | UnicodeSyntax - - -- | Allow the use of unboxed types as foreign types, e.g. in - -- @foreign import@ and @foreign export@. - -- - -- * - | UnliftedFFITypes - - -- | Enable interruptible FFI. - -- - -- * - | InterruptibleFFI - - -- | Allow use of CAPI FFI calling convention (@foreign import capi@). - -- - -- * - | CApiFFI - - -- | Defer validity checking of types until after expanding type - -- synonyms, relaxing the constraints on how synonyms may be used. - -- - -- * - | LiberalTypeSynonyms - - -- | Allow the name of a type constructor, type class, or type - -- variable to be an infix operator. - -- * - | TypeOperators - - -- | Enable syntax for implicitly binding local names corresponding - -- to the field names of a record. A wildcard binds all unmentioned - -- names, unlike 'NamedFieldPuns'. - -- - -- * - | RecordWildCards - - -- | Deprecated, use 'NamedFieldPuns' instead. - | RecordPuns - - -- | Allow a record field name to be disambiguated by the type of - -- the record it's in. - -- - -- * - | DisambiguateRecordFields - - -- | Enable traditional record syntax (as supported by Haskell 98) - -- - -- * - | TraditionalRecordSyntax - - -- | Enable overloading of string literals using a type class, much - -- like integer literals. - -- - -- * - | OverloadedStrings - - -- | Enable generalized algebraic data types, in which type - -- variables may be instantiated on a per-constructor basis. Implies - -- 'GADTSyntax'. - -- - -- * - | GADTs - - -- | Enable GADT syntax for declaring ordinary algebraic datatypes. - -- - -- * - | GADTSyntax - - -- | /(deprecated)/ Has no effect. - -- - -- Old description: Make pattern bindings monomorphic. - -- - -- * - | MonoPatBinds - - -- | Relax the requirements on mutually-recursive polymorphic - -- functions. - -- - -- * - | RelaxedPolyRec - - -- | Allow default instantiation of polymorphic types in more - -- situations. - -- - -- * - | ExtendedDefaultRules - - -- | Enable unboxed tuples. - -- - -- * - | UnboxedTuples - - -- | Enable @deriving@ for classes 'Data.Typeable.Typeable' and - -- 'Data.Generics.Data'. - -- - -- * - | DeriveDataTypeable - - -- | Enable @deriving@ for 'GHC.Generics.Generic' and 'GHC.Generics.Generic1'. - -- - -- * - | DeriveGeneric - - -- | Enable support for default signatures. - -- - -- * - | DefaultSignatures - - -- | Allow type signatures to be specified in instance declarations. - -- - -- * - | InstanceSigs - - -- | Allow a class method's type to place additional constraints on - -- a class type variable. - -- - -- * - | ConstrainedClassMethods - - -- | Allow imports to be qualified by the package name the module is - -- intended to be imported from, e.g. - -- - -- > import "network" Network.Socket - -- - -- * - | PackageImports - - -- | /(deprecated)/ Allow a type variable to be instantiated at a - -- polymorphic type. - -- - -- * - | ImpredicativeTypes - - -- | /(deprecated)/ Change the syntax for qualified infix operators. - -- - -- * - | NewQualifiedOperators - - -- | Relax the interpretation of left operator sections to allow - -- unary postfix operators. - -- - -- * - | PostfixOperators - - -- | Enable quasi-quotation, a mechanism for defining new concrete - -- syntax for expressions and patterns. - -- - -- * - | QuasiQuotes - - -- | Enable generalized list comprehensions, supporting operations - -- such as sorting and grouping. - -- - -- * - | TransformListComp - - -- | Enable monad comprehensions, which generalise the list - -- comprehension syntax to work for any monad. - -- - -- * - | MonadComprehensions - - -- | Enable view patterns, which match a value by applying a - -- function and matching on the result. - -- - -- * - | ViewPatterns - - -- | Allow concrete XML syntax to be used in expressions and patterns, - -- as per the Haskell Server Pages extension language: - -- . The ideas behind it are - -- discussed in the paper \"Haskell Server Pages through Dynamic Loading\" - -- by Niklas Broberg, from Haskell Workshop '05. - | XmlSyntax - - -- | Allow regular pattern matching over lists, as discussed in the - -- paper \"Regular Expression Patterns\" by Niklas Broberg, Andreas Farre - -- and Josef Svenningsson, from ICFP '04. - | RegularPatterns - - -- | Enable the use of tuple sections, e.g. @(, True)@ desugars into - -- @\x -> (x, True)@. - -- - -- * - | TupleSections - - -- | Allow GHC primops, written in C--, to be imported into a Haskell - -- file. - | GHCForeignImportPrim - - -- | Support for patterns of the form @n + k@, where @k@ is an - -- integer literal. - -- - -- * - | NPlusKPatterns - - -- | Improve the layout rule when @if@ expressions are used in a @do@ - -- block. - | DoAndIfThenElse - - -- | Enable support for multi-way @if@-expressions. - -- - -- * - | MultiWayIf - - -- | Enable support lambda-@case@ expressions. - -- - -- * - | LambdaCase - - -- | Makes much of the Haskell sugar be desugared into calls to the - -- function with a particular name that is in scope. - -- - -- * - | RebindableSyntax - - -- | Make @forall@ a keyword in types, which can be used to give the - -- generalisation explicitly. - -- - -- * - | ExplicitForAll - - -- | Allow contexts to be put on datatypes, e.g. the @Eq a@ in - -- @data Eq a => Set a = NilSet | ConsSet a (Set a)@. - -- - -- * - | DatatypeContexts - - -- | Local (@let@ and @where@) bindings are monomorphic. - -- - -- * - | MonoLocalBinds - - -- | Enable @deriving@ for the 'Data.Functor.Functor' class. - -- - -- * - | DeriveFunctor - - -- | Enable @deriving@ for the 'Data.Traversable.Traversable' class. - -- - -- * - | DeriveTraversable - - -- | Enable @deriving@ for the 'Data.Foldable.Foldable' class. - -- - -- * - | DeriveFoldable - - -- | Enable non-decreasing indentation for @do@ blocks. - -- - -- * - | NondecreasingIndentation - - -- | Allow imports to be qualified with a safe keyword that requires - -- the imported module be trusted as according to the Safe Haskell - -- definition of trust. - -- - -- > import safe Network.Socket - -- - -- * - | SafeImports - - -- | Compile a module in the Safe, Safe Haskell mode -- a restricted - -- form of the Haskell language to ensure type safety. - -- - -- * - | Safe - - -- | Compile a module in the Trustworthy, Safe Haskell mode -- no - -- restrictions apply but the module is marked as trusted as long as - -- the package the module resides in is trusted. - -- - -- * - | Trustworthy - - -- | Compile a module in the Unsafe, Safe Haskell mode so that - -- modules compiled using Safe, Safe Haskell mode can't import it. - -- - -- * - | Unsafe - - -- | Allow type class/implicit parameter/equality constraints to be - -- used as types with the special kind constraint. Also generalise - -- the @(ctxt => ty)@ syntax so that any type of kind constraint can - -- occur before the arrow. - -- - -- * - | ConstraintKinds - - -- | Enable kind polymorphism. - -- - -- * - | PolyKinds - - -- | Enable datatype promotion. - -- - -- * - | DataKinds - - -- | Enable parallel arrays syntax (@[:@, @:]@) for /Data Parallel Haskell/. - -- - -- * - | ParallelArrays - - -- | Enable explicit role annotations, like in (@type role Foo representational representational@). - -- - -- * - | RoleAnnotations - - -- | Enable overloading of list literals, arithmetic sequences and - -- list patterns using the 'IsList' type class. - -- - -- * - | OverloadedLists - - -- | Enable case expressions that have no alternatives. Also applies to lambda-case expressions if they are enabled. - -- - -- * - | EmptyCase - - -- | /(deprecated)/ Deprecated in favour of 'DeriveDataTypeable'. - -- - -- Old description: Triggers the generation of derived 'Typeable' - -- instances for every datatype and type class declaration. - -- - -- * - | AutoDeriveTypeable - - -- | Desugars negative literals directly (without using negate). - -- - -- * - | NegativeLiterals - - -- | Allow the use of binary integer literal syntax (e.g. @0b11001001@ to denote @201@). - -- - -- * - | BinaryLiterals - - -- | Allow the use of floating literal syntax for all instances of 'Num', including 'Int' and 'Integer'. - -- - -- * - | NumDecimals - - -- | Enable support for type classes with no type parameter. - -- - -- * - | NullaryTypeClasses - - -- | Enable explicit namespaces in module import/export lists. - -- - -- * - | ExplicitNamespaces - - -- | Allow the user to write ambiguous types, and the type inference engine to infer them. - -- - -- * - | AllowAmbiguousTypes - - -- | Enable @foreign import javascript@. - | JavaScriptFFI - - -- | Allow giving names to and abstracting over patterns. - -- - -- * - | PatternSynonyms - - -- | Allow anonymous placeholders (underscore) inside type signatures. The - -- type inference engine will generate a message describing the type inferred - -- at the hole's location. - -- - -- * - | PartialTypeSignatures - - -- | Allow named placeholders written with a leading underscore inside type - -- signatures. Wildcards with the same name unify to the same type. - -- - -- * - | NamedWildCards - - -- | Enable @deriving@ for any class. - -- - -- * - | DeriveAnyClass - - -- | Enable @deriving@ for the 'Language.Haskell.TH.Syntax.Lift' class. - -- - -- * - | DeriveLift - - -- | Enable support for 'static pointers' (and the @static@ - -- keyword) to refer to globally stable names, even across - -- different programs. - -- - -- * - | StaticPointers - - -- | Switches data type declarations to be strict by default (as if - -- they had a bang using @BangPatterns@), and allow opt-in field - -- laziness using @~@. - -- - -- * - | StrictData - - -- | Switches all pattern bindings to be strict by default (as if - -- they had a bang using @BangPatterns@), ordinary patterns are - -- recovered using @~@. Implies @StrictData@. - -- - -- * - | Strict - - -- | Allows @do@-notation for types that are @'Applicative'@ as well - -- as @'Monad'@. When enabled, desugaring @do@ notation tries to use - -- @(<*>)@ and @'fmap'@ and @'join'@ as far as possible. - | ApplicativeDo - - -- | Allow records to use duplicated field labels for accessors. - | DuplicateRecordFields - - -- | Enable explicit type applications with the syntax @id \@Int@. - | TypeApplications - - -- | Dissolve the distinction between types and kinds, allowing the compiler - -- to reason about kind equality and therefore enabling GADTs to be promoted - -- to the type-level. - | TypeInType - - -- | Allow recursive (and therefore undecideable) super-class relationships. - | UndecidableSuperClasses - - -- | A temporary extension to help library authors check if their - -- code will compile with the new planned desugaring of fail. - | MonadFailDesugaring - - -- | A subset of @TemplateHaskell@ including only quoting. - | TemplateHaskellQuotes - - -- | Allows use of the @#label@ syntax. - | OverloadedLabels - - -- | Allow functional dependency annotations on type families to declare them - -- as injective. - | TypeFamilyDependencies - - -- | Allow multiple @deriving@ clauses, each optionally qualified with a - -- /strategy/. - | DerivingStrategies - - -- | Enable the use of unboxed sum syntax. - | UnboxedSums - - -- | Allow use of hexadecimal literal notation for floating-point values. - | HexFloatLiterals - - -- | Allow @do@ blocks etc. in argument position. - | BlockArguments - - -- | Allow use of underscores in numeric literals. - | NumericUnderscores - - -- | Allow @forall@ in constraints. - | QuantifiedConstraints - - -- | Have @*@ refer to @Type@. - | StarIsType - - deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded, Typeable, Data) - -instance Binary KnownExtension - -instance NFData KnownExtension where rnf = genericRnf - -{-# DEPRECATED knownExtensions - "KnownExtension is an instance of Enum and Bounded, use those instead. This symbol will be removed in Cabal-3.0 (est. Oct 2018)." #-} -knownExtensions :: [KnownExtension] -knownExtensions = [minBound..maxBound] - --- | Extensions that have been deprecated, possibly paired with another --- extension that replaces it. --- -deprecatedExtensions :: [(Extension, Maybe Extension)] -deprecatedExtensions = - [ (EnableExtension RecordPuns, Just (EnableExtension NamedFieldPuns)) - , (EnableExtension PatternSignatures, Just (EnableExtension ScopedTypeVariables)) - ] --- NOTE: when adding deprecated extensions that have new alternatives --- we must be careful to make sure that the deprecation messages are --- valid. We must not recommend aliases that cannot be used with older --- compilers, perhaps by adding support in Cabal to translate the new --- name to the old one for older compilers. Otherwise we are in danger --- of the scenario in ticket #689. - -instance Pretty Extension where - pretty (UnknownExtension other) = Disp.text other - pretty (EnableExtension ke) = Disp.text (show ke) - pretty (DisableExtension ke) = Disp.text ("No" ++ show ke) - -instance Parsec Extension where - parsec = classifyExtension <$> P.munch1 isAlphaNum - -instance Text Extension where - parse = do - extension <- Parse.munch1 isAlphaNum - return (classifyExtension extension) - -instance Pretty KnownExtension where - pretty ke = Disp.text (show ke) - -instance Text KnownExtension where - parse = do - extension <- Parse.munch1 isAlphaNum - case classifyKnownExtension extension of - Just ke -> - return ke - Nothing -> - fail ("Can't parse " ++ show extension ++ " as KnownExtension") - -classifyExtension :: String -> Extension -classifyExtension string - = case classifyKnownExtension string of - Just ext -> EnableExtension ext - Nothing -> - case string of - 'N':'o':string' -> - case classifyKnownExtension string' of - Just ext -> DisableExtension ext - Nothing -> UnknownExtension string - _ -> UnknownExtension string - --- | 'read' for 'KnownExtension's is really really slow so for the Text --- instance --- what we do is make a simple table indexed off the first letter in the --- extension name. The extension names actually cover the range @'A'-'Z'@ --- pretty densely and the biggest bucket is 7 so it's not too bad. We just do --- a linear search within each bucket. --- --- This gives an order of magnitude improvement in parsing speed, and it'll --- also allow us to do case insensitive matches in future if we prefer. --- -classifyKnownExtension :: String -> Maybe KnownExtension -classifyKnownExtension "" = Nothing -classifyKnownExtension string@(c : _) - | inRange (bounds knownExtensionTable) c - = lookup string (knownExtensionTable ! c) - | otherwise = Nothing - -knownExtensionTable :: Array Char [(String, KnownExtension)] -knownExtensionTable = - accumArray (flip (:)) [] ('A', 'Z') - [ (head str, (str, extension)) - | extension <- [toEnum 0 ..] - , let str = show extension ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/LICENSE cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/LICENSE --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/LICENSE 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -Copyright (c) 2003-2017, Cabal Development Team. -See the AUTHORS file for the full list of copyright holders. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Isaac Jones nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/README.md 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,171 +0,0 @@ -The Cabal library package -========================= - -See the [Cabal web site] for more information. - -If you also want the `cabal` command-line program, you need the -[cabal-install] package in addition to this library. - -[cabal-install]: ../cabal-install/README.md - -Installing the Cabal library -============================ - -If you already have the `cabal` program ---------------------------------------- - -In this case run: - - $ cabal install - -However, if you do not have an existing version of the `cabal` program, -you first must install the Cabal library. To avoid this bootstrapping -problem, you can install the Cabal library directly as described below. - - -Installing as a user (no root or administrator access) ------------------------------------------------------- - - ghc -threaded --make Setup - ./Setup configure --user - ./Setup build - ./Setup install - -Note the use of the `--user` flag at the configure step. - -Compiling 'Setup' rather than using `runghc Setup` is much faster and -works on Windows. For all packages other than Cabal itself, it is fine -to use `runghc`. - -This will install into `$HOME/.cabal/` on Unix and into -`Documents and Settings\$User\Application Data\cabal\` on Windows. -If you want to install elsewhere, use the `--prefix=` flag at the -configure step. - - -Installing as root or Administrator ------------------------------------ - - ghc -threaded --make Setup - ./Setup configure - ./Setup build - sudo ./Setup install - -Compiling Setup rather than using `runghc Setup` is much faster and -works on Windows. For all packages other than Cabal itself, it is fine -to use `runghc`. - -This will install into `/usr/local` on Unix, and on Windows it will -install into `$ProgramFiles/Haskell`. If you want to install elsewhere, -use the `--prefix=` flag at the configure step. - - -Using older versions of GHC and Cabal -====================================== - -It is recommended that you leave any pre-existing version of Cabal -installed. In particular, it is *essential* you keep the version that -came with GHC itself, since other installed packages require it (for -instance, the "ghc" API package). - -Prior to GHC 6.4.2, however, GHC did not deal particularly well with -having multiple versions of packages installed at once. So if you are -using GHC 6.4.1 or older and you have an older version of Cabal -installed, you should probably remove it by running: - - $ ghc-pkg unregister Cabal - -or, if you had Cabal installed only for your user account, run: - - $ ghc-pkg unregister Cabal --user - -The `filepath` dependency -========================= - -Cabal uses the [filepath] package, so it must be installed first. -GHC version 6.6.1 and later come with `filepath`, however, earlier -versions do not by default. If you do not already have `filepath`, -you need to install it. You can use any existing version of Cabal to do -that. If you have neither Cabal nor `filepath`, it is slightly -harder but still possible. - -Unpack Cabal and `filepath` into separate directories. For example: - - tar -xzf filepath-1.1.0.0.tar.gz - tar -xzf Cabal-1.6.0.0.tar.gz - - # rename to make the following instructions simpler: - mv filepath-1.1.0.0/ filepath/ - mv Cabal-1.6.0.0/ Cabal/ - - cd Cabal - ghc -i../filepath -cpp --make Setup.hs -o ../filepath/setup - cd ../filepath/ - ./setup configure --user - ./setup build - ./setup install - -This installs `filepath` so that you can install Cabal with the normal -method. - -[filepath]: http://hackage.haskell.org/package/filepath - -More information -================ - -Please see the [Cabal web site] for the [user guide] and [API -documentation]. There is additional information available on the -[development wiki]. - -[user guide]: http://www.haskell.org/cabal/users-guide -[API documentation]: http://www.haskell.org/cabal/release/cabal-latest/doc/API/Cabal/Distribution-Simple.html -[development wiki]: https://github.com/haskell/cabal/wiki - - -Bugs -==== - -Please report bugs and feature requests to Cabal's [bug tracker]. - - -Your help ---------- - -To help Cabal's development, it is enormously helpful to know from -Cabal's users what their most pressing problems are with Cabal and -[Hackage]. You may have a favourite Cabal bug or limitation. Look at -Cabal's [bug tracker]. Ensure that the problem is reported there and -adequately described. Comment on the issue to report how much of a -problem the bug is for you. Subscribe to the issues's notifications to -discussed requirements and keep informed on progress. For feature -requests, it is helpful if there is a description of how you would -expect to interact with the new feature. - -[Hackage]: http://hackage.haskell.org - - -Source code -=========== - -You can get the master development branch using: - - $ git clone https://github.com/haskell/cabal.git - - -Credits -======= - -See the `AUTHORS` file. - -Authors of the [original Cabal -specification](https://www.haskell.org/cabal/proposal/pkg-spec.pdf): - -- Isaac Jones -- Simon Marlow -- Ross Patterson -- Simon Peyton Jones -- Malcolm Wallace - - -[bug tracker]: https://github.com/haskell/cabal/issues -[Cabal web site]: http://www.haskell.org/cabal/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/Setup.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -import Distribution.Simple -main :: IO () -main = defaultMain - --- Although this looks like the Simple build type, it is in fact vital that --- we use this Setup.hs because it'll get compiled against the local copy --- of the Cabal lib, thus enabling Cabal to bootstrap itself without relying --- on any previous installation. This also means we can use any new features --- immediately because we never have to worry about building Cabal with an --- older version of itself. --- --- NOTE 25/01/2015: Bootstrapping is disabled for now, see --- https://github.com/haskell/cabal/issues/3003. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/CheckTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/CheckTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/CheckTests.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/CheckTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,79 +0,0 @@ -module Main - ( main - ) where - -import Test.Tasty -import Test.Tasty.Golden.Advanced (goldenTest) - -import Data.Algorithm.Diff (Diff (..), getGroupedDiff) -import Distribution.PackageDescription.Check (checkPackage) -import Distribution.PackageDescription.Parsec (parseGenericPackageDescription) -import Distribution.Parsec.Common (showPError, showPWarning) -import Distribution.Parsec.ParseResult (runParseResult) -import Distribution.Utils.Generic (fromUTF8BS, toUTF8BS) -import System.FilePath (replaceExtension, ()) - -import qualified Data.ByteString as BS -import qualified Data.ByteString.Char8 as BS8 - -tests :: TestTree -tests = checkTests - -------------------------------------------------------------------------------- --- Regressions -------------------------------------------------------------------------------- - -checkTests :: TestTree -checkTests = testGroup "regressions" - [ checkTest "nothing-unicode.cabal" - , checkTest "haddock-api-2.18.1-check.cabal" - , checkTest "issue-774.cabal" - , checkTest "MiniAgda.cabal" - , checkTest "extensions-paths-5054.cabal" - , checkTest "pre-1.6-glob.cabal" - , checkTest "pre-2.4-globstar.cabal" - , checkTest "bad-glob-syntax.cabal" - , checkTest "cc-options-with-optimization.cabal" - , checkTest "cxx-options-with-optimization.cabal" - , checkTest "ghc-option-j.cabal" - ] - -checkTest :: FilePath -> TestTree -checkTest fp = cabalGoldenTest fp correct $ do - contents <- BS.readFile input - let res = parseGenericPackageDescription contents - let (ws, x) = runParseResult res - - return $ toUTF8BS $ case x of - Right gpd -> - -- Note: parser warnings are reported by `cabal check`, but not by - -- D.PD.Check functionality. - unlines (map (showPWarning fp) ws) ++ - unlines (map show (checkPackage gpd Nothing)) - Left (_, errs) -> unlines $ map (("ERROR: " ++) . showPError fp) errs - where - input = "tests" "ParserTests" "regressions" fp - correct = replaceExtension input "check" - -------------------------------------------------------------------------------- --- Main -------------------------------------------------------------------------------- - -main :: IO () -main = defaultMain tests - -cabalGoldenTest :: TestName -> FilePath -> IO BS.ByteString -> TestTree -cabalGoldenTest name ref act = goldenTest name (BS.readFile ref) act cmp upd - where - upd = BS.writeFile ref - cmp x y | x == y = return Nothing - cmp x y = return $ Just $ unlines $ - concatMap f (getGroupedDiff (BS8.lines x) (BS8.lines y)) - where - f (First xs) = map (cons3 '-' . fromUTF8BS) xs - f (Second ys) = map (cons3 '+' . fromUTF8BS) ys - -- we print unchanged lines too. It shouldn't be a problem while we have - -- reasonably small examples - f (Both xs _) = map (cons3 ' ' . fromUTF8BS) xs - -- we add three characters, so the changed lines are easier to spot - cons3 c cs = c : c : c : ' ' : cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/custom-setup/CabalDoctestSetup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/custom-setup/CabalDoctestSetup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/custom-setup/CabalDoctestSetup.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/custom-setup/CabalDoctestSetup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,475 +0,0 @@ --- This is Distribution.Extra.Doctest module from cabal-doctest-1.0.4 --- This isn't technically a Custom-Setup script, but it /was/. - -{- - -Copyright (c) 2017, Oleg Grenrus - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Oleg Grenrus nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - --} - -{-# LANGUAGE CPP #-} -{-# LANGUAGE OverloadedStrings #-} --- | The provided 'generateBuildModule' generates 'Build_doctests' module. --- That module exports enough configuration, so your doctests could be simply --- --- @ --- module Main where --- --- import Build_doctests (flags, pkgs, module_sources) --- import Data.Foldable (traverse_) --- import Test.Doctest (doctest) --- --- main :: IO () --- main = do --- traverse_ putStrLn args -- optionally print arguments --- doctest args --- where --- args = flags ++ pkgs ++ module_sources --- @ --- --- To use this library in the @Setup.hs@, you should specify a @custom-setup@ --- section in the cabal file, for example: --- --- @ --- custom-setup --- setup-depends: --- base >= 4 && <5, --- cabal-doctest >= 1 && <1.1 --- @ --- --- /Note:/ you don't need to depend on @Cabal@ if you use only --- 'defaultMainWithDoctests' in the @Setup.hs@. --- -module CabalDoctestSetup ( - defaultMainWithDoctests, - defaultMainAutoconfWithDoctests, - addDoctestsUserHook, - doctestsUserHooks, - generateBuildModule, - ) where - --- Hacky way to suppress few deprecation warnings. -#if MIN_VERSION_Cabal(1,24,0) -#define InstalledPackageId UnitId -#endif - -import Control.Monad - (when) -import Data.List - (nub) -import Data.Maybe - (maybeToList, mapMaybe) -import Data.String - (fromString) -import qualified Data.Foldable as F - (for_) -import qualified Data.Traversable as T - (traverse) -import qualified Distribution.ModuleName as ModuleName - (fromString) -import Distribution.ModuleName - (ModuleName) -import Distribution.Package - (InstalledPackageId) -import Distribution.Package - (Package (..), PackageId, packageVersion) -import Distribution.PackageDescription - (BuildInfo (..), Executable (..), Library (..), - PackageDescription (), TestSuite (..)) -import Distribution.Simple - (UserHooks (..), autoconfUserHooks, defaultMainWithHooks, simpleUserHooks) -import Distribution.Simple.BuildPaths - (autogenModulesDir) -import Distribution.Simple.Compiler - (PackageDB (..), showCompilerId) -import Distribution.Simple.LocalBuildInfo - (ComponentLocalBuildInfo (componentPackageDeps), LocalBuildInfo (), - compiler, withExeLBI, withLibLBI, withPackageDB, withTestLBI) -import Distribution.Simple.Setup - (BuildFlags (buildDistPref, buildVerbosity), fromFlag) -import Distribution.Simple.Utils - (createDirectoryIfMissingVerbose, findFile, rewriteFile) -import Distribution.Text - (display, simpleParse) -import System.FilePath - ((), (<.>), dropExtension) - -import Data.IORef (newIORef, modifyIORef, readIORef) - -#if MIN_VERSION_Cabal(1,25,0) -import Distribution.Simple.BuildPaths - (autogenComponentModulesDir) -#endif -#if MIN_VERSION_Cabal(2,0,0) -import Distribution.Types.MungedPackageId - (MungedPackageId) -import Distribution.Types.UnqualComponentName - (unUnqualComponentName) -#endif - -#if MIN_VERSION_directory(1,2,2) -import System.Directory - (makeAbsolute) -#else -import System.Directory - (getCurrentDirectory) -import System.FilePath - (isAbsolute) - -makeAbsolute :: FilePath -> IO FilePath -makeAbsolute p | isAbsolute p = return p - | otherwise = do - cwd <- getCurrentDirectory - return $ cwd p -#endif - --- | A default main with doctests: --- --- @ --- import Distribution.Extra.Doctest --- (defaultMainWithDoctests) --- --- main :: IO () --- main = defaultMainWithDoctests "doctests" --- @ -defaultMainWithDoctests - :: String -- ^ doctests test-suite name - -> IO () -defaultMainWithDoctests = defaultMainWithHooks . doctestsUserHooks - --- | Like 'defaultMainWithDoctests', for 'build-type: Configure' packages. --- --- @since 1.0.2 -defaultMainAutoconfWithDoctests - :: String -- ^ doctests test-suite name - -> IO () -defaultMainAutoconfWithDoctests n = - defaultMainWithHooks (addDoctestsUserHook n autoconfUserHooks) - --- | 'simpleUserHooks' with 'generateBuildModule' prepended to the 'buildHook'. -doctestsUserHooks - :: String -- ^ doctests test-suite name - -> UserHooks -doctestsUserHooks testsuiteName = - addDoctestsUserHook testsuiteName simpleUserHooks - --- | --- --- @since 1.0.2 -addDoctestsUserHook :: String -> UserHooks -> UserHooks -addDoctestsUserHook testsuiteName uh = uh - { buildHook = \pkg lbi hooks flags -> do - generateBuildModule testsuiteName flags pkg lbi - buildHook uh pkg lbi hooks flags - } - -data Name = NameLib (Maybe String) | NameExe String deriving (Eq, Show) - -nameToString :: Name -> String -nameToString n = case n of - NameLib x -> maybe "" (("_lib_" ++) . map fixchar) x - NameExe x -> "_exe_" ++ map fixchar x - where - -- Taken from Cabal: - -- https://github.com/haskell/cabal/blob/20de0bfea72145ba1c37e3f500cee5258cc18e51/Cabal/Distribution/Simple/Build/Macros.hs#L156-L158 - -- - -- Needed to fix component names with hyphens in them, as hyphens aren't - -- allowed in Haskell identifier names. - fixchar :: Char -> Char - fixchar '-' = '_' - fixchar c = c - -data Component = Component Name [String] [String] [String] - deriving Show - --- | Generate a build module for the test suite. --- --- @ --- import Distribution.Simple --- (defaultMainWithHooks, UserHooks(..), simpleUserHooks) --- import Distribution.Extra.Doctest --- (generateBuildModule) --- --- main :: IO () --- main = defaultMainWithHooks simpleUserHooks --- { buildHook = \pkg lbi hooks flags -> do --- generateBuildModule "doctests" flags pkg lbi --- buildHook simpleUserHooks pkg lbi hooks flags --- } --- @ -generateBuildModule - :: String -- ^ doctests test-suite name - -> BuildFlags -> PackageDescription -> LocalBuildInfo -> IO () -generateBuildModule testSuiteName flags pkg lbi = do - let verbosity = fromFlag (buildVerbosity flags) - let distPref = fromFlag (buildDistPref flags) - - -- Package DBs - let dbStack = withPackageDB lbi ++ [ SpecificPackageDB $ distPref "package.conf.inplace" ] - let dbFlags = "-hide-all-packages" : packageDbArgs dbStack - - withTestLBI pkg lbi $ \suite suitecfg -> when (testName suite == fromString testSuiteName) $ do -#if MIN_VERSION_Cabal(1,25,0) - let testAutogenDir = autogenComponentModulesDir lbi suitecfg -#else - let testAutogenDir = autogenModulesDir lbi -#endif - - createDirectoryIfMissingVerbose verbosity True testAutogenDir - - let buildDoctestsFile = testAutogenDir "Build_doctests.hs" - - -- First, we create the autogen'd module Build_doctests. - -- Initially populate Build_doctests with a simple preamble. - writeFile buildDoctestsFile $ unlines - [ "module Build_doctests where" - , "" - , "import Prelude" - , "" - , "data Name = NameLib (Maybe String) | NameExe String deriving (Eq, Show)" - , "data Component = Component Name [String] [String] [String] deriving (Eq, Show)" - , "" - ] - - -- we cannot traverse, only traverse_ - -- so we use IORef to collect components - componentsRef <- newIORef [] - - let testBI = testBuildInfo suite - - -- TODO: `words` is not proper parser (no support for quotes) - let additionalFlags = maybe [] words - $ lookup "x-doctest-options" - $ customFieldsBI testBI - - let additionalModules = maybe [] words - $ lookup "x-doctest-modules" - $ customFieldsBI testBI - - let additionalDirs' = maybe [] words - $ lookup "x-doctest-source-dirs" - $ customFieldsBI testBI - - additionalDirs <- mapM (fmap ("-i" ++) . makeAbsolute) additionalDirs' - - -- Next, for each component (library or executable), we get to Build_doctests - -- the sets of flags needed to run doctest on that component. - let getBuildDoctests withCompLBI mbCompName compExposedModules compMainIs compBuildInfo = - withCompLBI pkg lbi $ \comp compCfg -> do - let compBI = compBuildInfo comp - - -- modules - let modules = compExposedModules comp ++ otherModules compBI - -- it seems that doctest is happy to take in module names, not actual files! - let module_sources = modules - - -- We need the directory with the component's cabal_macros.h! -#if MIN_VERSION_Cabal(1,25,0) - let compAutogenDir = autogenComponentModulesDir lbi compCfg -#else - let compAutogenDir = autogenModulesDir lbi -#endif - - -- Lib sources and includes - iArgsNoPrefix - <- mapM makeAbsolute - $ compAutogenDir -- autogenerated files - : (distPref ++ "/build") -- preprocessed files (.hsc -> .hs); "build" is hardcoded in Cabal. - : hsSourceDirs compBI - includeArgs <- mapM (fmap ("-I"++) . makeAbsolute) $ includeDirs compBI - -- We clear all includes, so the CWD isn't used. - let iArgs' = map ("-i"++) iArgsNoPrefix - iArgs = "-i" : iArgs' - - -- default-extensions - let extensionArgs = map (("-X"++) . display) $ defaultExtensions compBI - - -- CPP includes, i.e. include cabal_macros.h - let cppFlags = map ("-optP"++) $ - [ "-include", compAutogenDir ++ "/cabal_macros.h" ] - ++ cppOptions compBI - - -- Unlike other modules, the main-is module of an executable is not - -- guaranteed to share a module name with its filepath name. That is, - -- even though the main-is module is named Main, its filepath might - -- actually be Something.hs. To account for this possibility, we simply - -- pass the full path to the main-is module instead. - mainIsPath <- T.traverse (findFile iArgsNoPrefix) (compMainIs comp) - - let all_sources = map display module_sources - ++ additionalModules - ++ maybeToList mainIsPath - - let component = Component - (mbCompName comp) - (formatDeps $ testDeps compCfg suitecfg) - (concat - [ iArgs - , additionalDirs - , includeArgs - , dbFlags - , cppFlags - , extensionArgs - , additionalFlags - ]) - all_sources - - -- modify IORef, append component - modifyIORef componentsRef (\cs -> cs ++ [component]) - - -- For now, we only check for doctests in libraries and executables. - getBuildDoctests withLibLBI mbLibraryName exposedModules (const Nothing) libBuildInfo - getBuildDoctests withExeLBI (NameExe . executableName) (const []) (Just . modulePath) buildInfo - - components <- readIORef componentsRef - F.for_ components $ \(Component name pkgs flags sources) -> do - let compSuffix = nameToString name - pkgs_comp = "pkgs" ++ compSuffix - flags_comp = "flags" ++ compSuffix - module_sources_comp = "module_sources" ++ compSuffix - - -- write autogen'd file - appendFile buildDoctestsFile $ unlines - [ -- -package-id etc. flags - pkgs_comp ++ " :: [String]" - , pkgs_comp ++ " = " ++ show pkgs - , "" - , flags_comp ++ " :: [String]" - , flags_comp ++ " = " ++ show flags - , "" - , module_sources_comp ++ " :: [String]" - , module_sources_comp ++ " = " ++ show sources - , "" - ] - - -- write enabled components, i.e. x-doctest-components - -- if none enabled, pick library - let enabledComponents = maybe [NameLib Nothing] (mapMaybe parseComponentName . words) - $ lookup "x-doctest-components" - $ customFieldsBI testBI - - let components' = - filter (\(Component n _ _ _) -> n `elem` enabledComponents) components - appendFile buildDoctestsFile $ unlines - [ "-- " ++ show enabledComponents - , "components :: [Component]" - , "components = " ++ show components' - ] - - where - parseComponentName :: String -> Maybe Name - parseComponentName "lib" = Just (NameLib Nothing) - parseComponentName ('l' : 'i' : 'b' : ':' : x) = Just (NameLib (Just x)) - parseComponentName ('e' : 'x' : 'e' : ':' : x) = Just (NameExe x) - parseComponentName _ = Nothing - - -- we do this check in Setup, as then doctests don't need to depend on Cabal - isOldCompiler = maybe False id $ do - a <- simpleParse $ showCompilerId $ compiler lbi - b <- simpleParse "7.5" - return $ packageVersion (a :: PackageId) < b - - formatDeps = map formatOne - formatOne (installedPkgId, pkgId) - -- The problem is how different cabal executables handle package databases - -- when doctests depend on the library - -- - -- If the pkgId is current package, we don't output the full package-id - -- but only the name - -- - -- Because of MungedPackageId we compare display version of identifiers - -- not the identifiers themfselves. - | display (packageId pkg) == display pkgId = "-package=" ++ display pkgId - | otherwise = "-package-id=" ++ display installedPkgId - - -- From Distribution.Simple.Program.GHC - packageDbArgs :: [PackageDB] -> [String] - packageDbArgs | isOldCompiler = packageDbArgsConf - | otherwise = packageDbArgsDb - - -- GHC <7.6 uses '-package-conf' instead of '-package-db'. - packageDbArgsConf :: [PackageDB] -> [String] - packageDbArgsConf dbstack = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs - (GlobalPackageDB:dbs) -> ("-no-user-package-conf") - : concatMap specific dbs - _ -> ierror - where - specific (SpecificPackageDB db) = [ "-package-conf=" ++ db ] - specific _ = ierror - ierror = error $ "internal error: unexpected package db stack: " - ++ show dbstack - - -- GHC >= 7.6 uses the '-package-db' flag. See - -- https://ghc.haskell.org/trac/ghc/ticket/5977. - packageDbArgsDb :: [PackageDB] -> [String] - -- special cases to make arguments prettier in common scenarios - packageDbArgsDb dbstack = case dbstack of - (GlobalPackageDB:UserPackageDB:dbs) - | all isSpecific dbs -> concatMap single dbs - (GlobalPackageDB:dbs) - | all isSpecific dbs -> "-no-user-package-db" - : concatMap single dbs - dbs -> "-clear-package-db" - : concatMap single dbs - where - single (SpecificPackageDB db) = [ "-package-db=" ++ db ] - single GlobalPackageDB = [ "-global-package-db" ] - single UserPackageDB = [ "-user-package-db" ] - isSpecific (SpecificPackageDB _) = True - isSpecific _ = False - - mbLibraryName :: Library -> Name -#if MIN_VERSION_Cabal(2,0,0) - -- Cabal-2.0 introduced internal libraries, which are named. - mbLibraryName = NameLib . fmap unUnqualComponentName . libName -#else - -- Before that, there was only ever at most one library per - -- .cabal file, which has no name. - mbLibraryName _ = NameLib Nothing -#endif - - executableName :: Executable -> String -#if MIN_VERSION_Cabal(2,0,0) - executableName = unUnqualComponentName . exeName -#else - executableName = exeName -#endif - --- | In compat settings it's better to omit the type-signature -testDeps :: ComponentLocalBuildInfo -> ComponentLocalBuildInfo -#if MIN_VERSION_Cabal(2,0,0) - -> [(InstalledPackageId, MungedPackageId)] -#else - -> [(InstalledPackageId, PackageId)] -#endif -testDeps xs ys = nub $ componentPackageDeps xs ++ componentPackageDeps ys diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/custom-setup/CustomSetupTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/custom-setup/CustomSetupTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/custom-setup/CustomSetupTests.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/custom-setup/CustomSetupTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ --- This test-suite verifies some custom-setup scripts compile ok --- so we don't break them by accident, i.e. when breakage can be prevented. -module Main (main) where -import CabalDoctestSetup () -import IdrisSetup () - -main :: IO () -main = return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/custom-setup/IdrisSetup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/custom-setup/IdrisSetup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/custom-setup/IdrisSetup.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/custom-setup/IdrisSetup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,383 +0,0 @@ --- This is Setup.hs script from idris-1.1.1 - -{- - -Copyright (c) 2011 Edwin Brady - School of Computer Science, University of St Andrews -All rights reserved. - -This code is derived from software written by Edwin Brady -(eb@cs.st-andrews.ac.uk). - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions -are met: -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. -3. None of the names of the copyright holders may be used to endorse - or promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY -EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR -BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE -OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN -IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -*** End of disclaimer. *** - --} - -{-# LANGUAGE CPP #-} -{-# OPTIONS_GHC -fno-warn-deprecations #-} -module IdrisSetup (main) where - -#if !defined(MIN_VERSION_Cabal) -# define MIN_VERSION_Cabal(x,y,z) 0 -#endif - -#if !defined(MIN_VERSION_base) -# define MIN_VERSION_base(x,y,z) 0 -#endif - -import Control.Monad -import Data.IORef -import Control.Exception (SomeException, catch) -import Data.String (fromString) - -import Distribution.Simple -import Distribution.Simple.BuildPaths -import Distribution.Simple.InstallDirs as I -import Distribution.Simple.LocalBuildInfo as L -import qualified Distribution.Simple.Setup as S -import qualified Distribution.Simple.Program as P -import Distribution.Simple.Utils (createDirectoryIfMissingVerbose, rewriteFile, notice, installOrdinaryFiles) -import Distribution.Compiler -import Distribution.PackageDescription -import Distribution.Text - -import System.Environment -import System.Exit -import System.FilePath ((), splitDirectories,isAbsolute) -import System.Directory -import qualified System.FilePath.Posix as Px -import System.Process - --- This is difference from vanilla idris-1.1.1 -configConfigurationsFlags :: S.ConfigFlags -> [(FlagName, Bool)] -#if MIN_VERSION_Cabal(2,1,0) -configConfigurationsFlags = unFlagAssignment . S.configConfigurationsFlags -#else -configConfigurationsFlags = S.configConfigurationsFlags -#endif - -#if !MIN_VERSION_base(4,6,0) -lookupEnv :: String -> IO (Maybe String) -lookupEnv v = lookup v `fmap` getEnvironment -#endif - --- After Idris is built, we need to check and install the prelude and other libs - --- ----------------------------------------------------------------------------- --- Idris Command Path - --- make on mingw32 exepects unix style separators -#ifdef mingw32_HOST_OS -() = (Px.) -idrisCmd local = Px.joinPath $ splitDirectories $ ".." ".." buildDir local "idris" "idris" -#else -idrisCmd local = ".." ".." buildDir local "idris" "idris" -#endif - --- ----------------------------------------------------------------------------- --- Make Commands - --- use GNU make on FreeBSD -#if defined(freebsd_HOST_OS) || defined(dragonfly_HOST_OS)\ - || defined(openbsd_HOST_OS) || defined(netbsd_HOST_OS) -mymake = "gmake" -#else -mymake = "make" -#endif -make verbosity = - P.runProgramInvocation verbosity . P.simpleProgramInvocation mymake - -#ifdef mingw32_HOST_OS -windres verbosity = P.runProgramInvocation verbosity . P.simpleProgramInvocation "windres" -#endif --- ----------------------------------------------------------------------------- --- Flags - -usesGMP :: S.ConfigFlags -> Bool -usesGMP flags = - case lookup (mkFlagName "gmp") (configConfigurationsFlags flags) of - Just True -> True - Just False -> False - Nothing -> False - -execOnly :: S.ConfigFlags -> Bool -execOnly flags = - case lookup (mkFlagName "execonly") (configConfigurationsFlags flags) of - Just True -> True - Just False -> False - Nothing -> False - -isRelease :: S.ConfigFlags -> Bool -isRelease flags = - case lookup (mkFlagName "release") (configConfigurationsFlags flags) of - Just True -> True - Just False -> False - Nothing -> False - -isFreestanding :: S.ConfigFlags -> Bool -isFreestanding flags = - case lookup (mkFlagName "freestanding") (configConfigurationsFlags flags) of - Just True -> True - Just False -> False - Nothing -> False - -#if !(MIN_VERSION_Cabal(2,0,0)) -mkFlagName :: String -> FlagName -mkFlagName = FlagName -#endif - --- ----------------------------------------------------------------------------- --- Clean - -idrisClean _ flags _ _ = cleanStdLib - where - verbosity = S.fromFlag $ S.cleanVerbosity flags - - cleanStdLib = makeClean "libs" - - makeClean dir = make verbosity [ "-C", dir, "clean", "IDRIS=idris" ] - --- ----------------------------------------------------------------------------- --- Configure - -gitHash :: IO String -gitHash = do h <- Control.Exception.catch (readProcess "git" ["rev-parse", "--short", "HEAD"] "") - (\e -> let e' = (e :: SomeException) in return "PRE") - return $ takeWhile (/= '\n') h - --- Put the Git hash into a module for use in the program --- For release builds, just put the empty string in the module -generateVersionModule verbosity dir release = do - hash <- gitHash - let versionModulePath = dir "Version_idris" Px.<.> "hs" - putStrLn $ "Generating " ++ versionModulePath ++ - if release then " for release" else " for prerelease " ++ hash - createDirectoryIfMissingVerbose verbosity True dir - rewriteFile versionModulePath (versionModuleContents hash) - - where versionModuleContents h = "module Version_idris where\n\n" ++ - "gitHash :: String\n" ++ - if release - then "gitHash = \"\"\n" - else "gitHash = \"git:" ++ h ++ "\"\n" - --- Generate a module that contains the lib path for a freestanding Idris -generateTargetModule verbosity dir targetDir = do - let absPath = isAbsolute targetDir - let targetModulePath = dir "Target_idris" Px.<.> "hs" - putStrLn $ "Generating " ++ targetModulePath - createDirectoryIfMissingVerbose verbosity True dir - rewriteFile targetModulePath (versionModuleContents absPath targetDir) - where versionModuleContents absolute td = "module Target_idris where\n\n" ++ - "import System.FilePath\n" ++ - "import System.Environment\n" ++ - "getDataDir :: IO String\n" ++ - if absolute - then "getDataDir = return \"" ++ td ++ "\"\n" - else "getDataDir = do \n" ++ - " expath <- getExecutablePath\n" ++ - " execDir <- return $ dropFileName expath\n" ++ - " return $ execDir ++ \"" ++ td ++ "\"\n" - ++ "getDataFileName :: FilePath -> IO FilePath\n" - ++ "getDataFileName name = do\n" - ++ " dir <- getDataDir\n" - ++ " return (dir ++ \"/\" ++ name)" - --- a module that has info about existence and location of a bundled toolchain -generateToolchainModule verbosity srcDir toolDir = do - let commonContent = "module Tools_idris where\n\n" - let toolContent = case toolDir of - Just dir -> "hasBundledToolchain = True\n" ++ - "getToolchainDir = \"" ++ dir ++ "\"\n" - Nothing -> "hasBundledToolchain = False\n" ++ - "getToolchainDir = \"\"" - let toolPath = srcDir "Tools_idris" Px.<.> "hs" - createDirectoryIfMissingVerbose verbosity True srcDir - rewriteFile toolPath (commonContent ++ toolContent) - -idrisConfigure _ flags pkgdesc local = do - configureRTS - withLibLBI pkgdesc local $ \_ libcfg -> do - let libAutogenDir = autogenComponentModulesDir local libcfg - generateVersionModule verbosity libAutogenDir (isRelease (configFlags local)) - if isFreestanding $ configFlags local - then do - toolDir <- lookupEnv "IDRIS_TOOLCHAIN_DIR" - generateToolchainModule verbosity libAutogenDir toolDir - targetDir <- lookupEnv "IDRIS_LIB_DIR" - case targetDir of - Just d -> generateTargetModule verbosity libAutogenDir d - Nothing -> error $ "Trying to build freestanding without a target directory." - ++ " Set it by defining IDRIS_LIB_DIR." - else - generateToolchainModule verbosity libAutogenDir Nothing - where - verbosity = S.fromFlag $ S.configVerbosity flags - version = pkgVersion . package $ localPkgDescr local - - -- This is a hack. I don't know how to tell cabal that a data file needs - -- installing but shouldn't be in the distribution. And it won't make the - -- distribution if it's not there, so instead I just delete - -- the file after configure. - configureRTS = make verbosity ["-C", "rts", "clean"] - -#if !(MIN_VERSION_Cabal(2,0,0)) - autogenComponentModulesDir lbi _ = autogenModulesDir lbi -#endif - -idrisPreSDist args flags = do - let dir = S.fromFlag (S.sDistDirectory flags) - let verb = S.fromFlag (S.sDistVerbosity flags) - generateVersionModule verb "src" True - generateTargetModule verb "src" "./libs" - generateToolchainModule verb "src" Nothing - preSDist simpleUserHooks args flags - -idrisSDist sdist pkgDesc bi hooks flags = do - pkgDesc' <- addGitFiles pkgDesc - sdist pkgDesc' bi hooks flags - where - addGitFiles :: PackageDescription -> IO PackageDescription - addGitFiles pkgDesc = do - files <- gitFiles - return $ pkgDesc { extraSrcFiles = extraSrcFiles pkgDesc ++ files} - gitFiles :: IO [FilePath] - gitFiles = liftM lines (readProcess "git" ["ls-files"] "") - -idrisPostSDist args flags desc lbi = do - Control.Exception.catch (do let file = "src" "Version_idris" Px.<.> "hs" - let targetFile = "src" "Target_idris" Px.<.> "hs" - putStrLn $ "Removing generated modules:\n " - ++ file ++ "\n" ++ targetFile - removeFile file - removeFile targetFile) - (\e -> let e' = (e :: SomeException) in return ()) - postSDist simpleUserHooks args flags desc lbi - --- ----------------------------------------------------------------------------- --- Build - -getVersion :: Args -> S.BuildFlags -> IO HookedBuildInfo -getVersion args flags = do - hash <- gitHash - let buildinfo = (emptyBuildInfo { cppOptions = ["-DVERSION="++hash] }) :: BuildInfo - return (Just buildinfo, []) - -idrisPreBuild args flags = do -#ifdef mingw32_HOST_OS - createDirectoryIfMissingVerbose verbosity True dir - windres verbosity ["icons/idris_icon.rc","-o", dir++"/idris_icon.o"] - return (Nothing, [(fromString "idris", emptyBuildInfo { ldOptions = [dir ++ "/idris_icon.o"] })]) - where - verbosity = S.fromFlag $ S.buildVerbosity flags - dir = S.fromFlagOrDefault "dist" $ S.buildDistPref flags -#else - return (Nothing, []) -#endif - -idrisBuild _ flags _ local - = if (execOnly (configFlags local)) then buildRTS - else do buildStdLib - buildRTS - where - verbosity = S.fromFlag $ S.buildVerbosity flags - - buildStdLib = do - putStrLn "Building libraries..." - makeBuild "libs" - where - makeBuild dir = make verbosity [ "-C", dir, "build" , "IDRIS=" ++ idrisCmd local] - - buildRTS = make verbosity (["-C", "rts", "build"] ++ - gmpflag (usesGMP (configFlags local))) - - gmpflag False = [] - gmpflag True = ["GMP=-DIDRIS_GMP"] - --- ----------------------------------------------------------------------------- --- Copy/Install - -idrisInstall verbosity copy pkg local - = if (execOnly (configFlags local)) then installRTS - else do installStdLib - installRTS - installManPage - where - target = datadir $ L.absoluteInstallDirs pkg local copy - - installStdLib = do - let target' = target -- "libs" - putStrLn $ "Installing libraries in " ++ target' - makeInstall "libs" target' - - installRTS = do - let target' = target "rts" - putStrLn $ "Installing run time system in " ++ target' - makeInstall "rts" target' - - installManPage = do - let mandest = mandir (L.absoluteInstallDirs pkg local copy) ++ "/man1" - notice verbosity $ unwords ["Copying man page to", mandest] - installOrdinaryFiles verbosity mandest [("man", "idris.1")] - - makeInstall src target = - make verbosity [ "-C", src, "install" , "TARGET=" ++ target, "IDRIS=" ++ idrisCmd local] - --- ----------------------------------------------------------------------------- --- Test - --- There are two "dataDir" in cabal, and they don't relate to each other. --- When fetching modules, idris uses the second path (in the pkg record), --- which by default is the root folder of the project. --- We want it to be the install directory where we put the idris libraries. -fixPkg pkg target = pkg { dataDir = target } - -idrisTestHook args pkg local hooks flags = do - let target = datadir $ L.absoluteInstallDirs pkg local NoCopyDest - testHook simpleUserHooks args (fixPkg pkg target) local hooks flags - --- ----------------------------------------------------------------------------- --- Main - --- Install libraries during both copy and install --- See https://github.com/haskell/cabal/issues/709 -main = defaultMainWithHooks $ simpleUserHooks - { postClean = idrisClean - , postConf = idrisConfigure - , preBuild = idrisPreBuild - , postBuild = idrisBuild - , postCopy = \_ flags pkg local -> - idrisInstall (S.fromFlag $ S.copyVerbosity flags) - (S.fromFlag $ S.copyDest flags) pkg local - , postInst = \_ flags pkg local -> - idrisInstall (S.fromFlag $ S.installVerbosity flags) - NoCopyDest pkg local - , preSDist = idrisPreSDist - , sDistHook = idrisSDist (sDistHook simpleUserHooks) - , postSDist = idrisPostSDist - , testHook = idrisTestHook - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/hackage/check.sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/hackage/check.sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/hackage/check.sh 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/hackage/check.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -#!/bin/sh - -base_version=1.4.0.2 -test_version=1.5.6 - -for setup in archive/*/*/Setup.hs archive/*/*/Setup.lhs; do - - pkgname=$(basename ${setup}) - - if test $(wc -w < ${setup}) -gt 21; then - if ghc -package Cabal-${base_version} -S ${setup} -o /dev/null 2> /dev/null; then - - if ghc -package Cabal-${test_version} -S ${setup} -o /dev/null 2> /dev/null; then - echo "OK ${setup}" - else - echo "FAIL ${setup} does not compile with Cabal-${test_version}" - fi - else - echo "OK ${setup} (does not compile with Cabal-${base_version})" - fi - else - echo "trivial ${setup}" - fi - -done diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/hackage/download.sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/hackage/download.sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/hackage/download.sh 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/hackage/download.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -#!/bin/sh - -if test ! -f archive/archive.tar; then - - wget http://hackage.haskell.org/cgi-bin/hackage-scripts/archive.tar - mkdir -p archive - mv archive.tar archive/ - tar -C archive -xf archive/archive.tar - -fi - -if test ! -f archive/00-index.tar.gz; then - - wget http://hackage.haskell.org/packages/archive/00-index.tar.gz - mkdir -p archive - mv 00-index.tar.gz archive/ - tar -C archive -xzf archive/00-index.tar.gz - -fi diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/hackage/unpack.sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/hackage/unpack.sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/hackage/unpack.sh 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/hackage/unpack.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -#!/bin/sh - -for tarball in archive/*/*/*.tar.gz; do - - pkgdir=$(dirname ${tarball}) - pkgname=$(basename ${tarball} .tar.gz) - - if tar -tzf ${tarball} ${pkgname}/Setup.hs 2> /dev/null; then - tar -xzf ${tarball} ${pkgname}/Setup.hs -O > ${pkgdir}/Setup.hs - elif tar -tzf ${tarball} ${pkgname}/Setup.lhs 2> /dev/null; then - tar -xzf ${tarball} ${pkgname}/Setup.lhs -O > ${pkgdir}/Setup.lhs - else - echo "${pkgname} has no Setup.hs or .lhs at all!!?!" - fi - -done diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/HackageTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/HackageTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/HackageTests.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/HackageTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,301 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE CPP #-} -{-# LANGUAGE Rank2Types #-} -{-# LANGUAGE ScopedTypeVariables #-} -#if !MIN_VERSION_deepseq(1,4,0) -{-# OPTIONS_GHC -fno-warn-orphans #-} -#endif -module Main where - -import Distribution.Compat.Semigroup -import Prelude () -import Prelude.Compat - -import Control.Applicative (many, (<**>), (<|>)) -import Control.DeepSeq (NFData (..), force) -import Control.Exception (evaluate) -import Control.Monad (join, unless) -import Data.Foldable (traverse_) -import Data.List (isPrefixOf, isSuffixOf) -import Data.Maybe (mapMaybe) -import Data.Monoid (Sum (..)) -import Distribution.PackageDescription.Check (PackageCheck (..) - ,checkPackage) -import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription) -import Distribution.Simple.Utils (toUTF8BS) -import System.Directory (getAppUserDataDirectory) -import System.Exit (exitFailure) -import System.FilePath (()) - -import Data.Orphans () - -import qualified Codec.Archive.Tar as Tar -import qualified Data.ByteString as B -import qualified Data.ByteString.Char8 as B8 -import qualified Data.ByteString.Lazy as BSL -import qualified Data.Map as Map -import qualified Distribution.PackageDescription.Parsec as Parsec -import qualified Distribution.Parsec.Common as Parsec -import qualified Distribution.Parsec.Parser as Parsec - -import Distribution.Compat.Lens -import qualified Distribution.Types.GenericPackageDescription.Lens as L -import qualified Distribution.Types.PackageDescription.Lens as L -import qualified Options.Applicative as O - -#ifdef MIN_VERSION_tree_diff -import Data.TreeDiff (ansiWlEditExpr, ediff) -import Instances.TreeDiff () -#endif - -parseIndex :: (Monoid a, NFData a) => (FilePath -> Bool) - -> (FilePath -> BSL.ByteString -> IO a) -> IO a -parseIndex predicate action = do - cabalDir <- getAppUserDataDirectory "cabal" - cfg <- B.readFile (cabalDir "config") - cfgFields <- either (fail . show) pure $ Parsec.readFields cfg - let repos = reposFromConfig cfgFields - repoCache = case lookupInConfig "remote-repo-cache" cfgFields of - [] -> cabalDir "packages" -- Default - (rrc : _) -> rrc -- User-specified - tarName repo = repoCache repo "01-index.tar" - mconcat <$> traverse (parseIndex' predicate action . tarName) repos - -parseIndex' :: (Monoid a, NFData a) => (FilePath -> Bool) - -> (FilePath -> BSL.ByteString -> IO a) -> FilePath -> IO a -parseIndex' predicate action path = do - putStrLn $ "Reading index from: " ++ path - contents <- BSL.readFile path - let entries = Tar.read contents - entries' = Tar.foldEntries cons [] (error . show) entries - foldIO f entries' - - where - cons entry entries - | predicate (Tar.entryPath entry) = entry : entries - | otherwise = entries - - f entry = case Tar.entryContent entry of - Tar.NormalFile contents _ - | ".cabal" `isSuffixOf` fpath -> - action fpath contents >>= evaluate . force - | otherwise -> - return mempty - Tar.Directory -> return mempty - _ -> putStrLn ("Unknown content in " ++ fpath) - >> return mempty - where - fpath = Tar.entryPath entry - -readFieldTest :: FilePath -> BSL.ByteString -> IO () -readFieldTest fpath bsl = case Parsec.readFields $ BSL.toStrict bsl of - Right _ -> return () - Left err -> putStrLn $ fpath ++ "\n" ++ show err - --- | Map with unionWith monoid -newtype M k v = M (Map.Map k v) - deriving (Show) -instance (Ord k, Monoid v) => Semigroup (M k v) where - M a <> M b = M (Map.unionWith mappend a b) -instance (Ord k, Monoid v) => Monoid (M k v) where - mempty = M Map.empty - mappend = (<>) -instance (NFData k, NFData v) => NFData (M k v) where - rnf (M m) = rnf m - -parseParsecTest :: FilePath -> BSL.ByteString -> IO (Sum Int) -parseParsecTest fpath bsl = do - let bs = BSL.toStrict bsl - let (_warnings, parsec) = Parsec.runParseResult $ - Parsec.parseGenericPackageDescription bs - case parsec of - Right _ -> return (Sum 1) - Left (_, errors) -> do - traverse_ (putStrLn . Parsec.showPError fpath) errors - exitFailure - -parseCheckTest :: FilePath -> BSL.ByteString -> IO CheckResult -parseCheckTest fpath bsl = do - let bs = BSL.toStrict bsl - let (_warnings, parsec) = Parsec.runParseResult $ - Parsec.parseGenericPackageDescription bs - case parsec of - Right gpd -> do - let checks = checkPackage gpd Nothing - -- one for file, many checks - return (CheckResult 1 0 0 0 0 0 <> foldMap toCheckResult checks) - Left (_, errors) -> do - traverse_ (putStrLn . Parsec.showPError fpath) errors - exitFailure - -data CheckResult = CheckResult !Int !Int !Int !Int !Int !Int - -instance NFData CheckResult where - rnf !_ = () - -instance Semigroup CheckResult where - CheckResult n a b c d e <> CheckResult n' a' b' c' d' e' = - CheckResult (n + n') (a + a') (b + b') (c + c') (d + d') (e + e') - -instance Monoid CheckResult where - mempty = CheckResult 0 0 0 0 0 0 - mappend = (<>) - -toCheckResult :: PackageCheck -> CheckResult -toCheckResult PackageBuildImpossible {} = CheckResult 0 1 0 0 0 0 -toCheckResult PackageBuildWarning {} = CheckResult 0 0 1 0 0 0 -toCheckResult PackageDistSuspicious {} = CheckResult 0 0 0 1 0 0 -toCheckResult PackageDistSuspiciousWarn {} = CheckResult 0 0 0 0 1 0 -toCheckResult PackageDistInexcusable {} = CheckResult 0 0 0 0 0 1 - -roundtripTest :: FilePath -> BSL.ByteString -> IO (Sum Int) -roundtripTest fpath bsl = do - let bs = BSL.toStrict bsl - x0 <- parse "1st" bs - let bs' = showGenericPackageDescription x0 - y0 <- parse "2nd" (toUTF8BS bs') - - -- we mungled license here - let y1 = y0 - - -- license-files: "" - let stripEmpty = filter (/="") - let x1 = x0 & L.packageDescription . L.licenseFiles %~ stripEmpty - let y2 = y1 & L.packageDescription . L.licenseFiles %~ stripEmpty - - let y = y2 & L.packageDescription . L.description .~ "" - let x = x1 & L.packageDescription . L.description .~ "" - - unless (x == y || fpath == "ixset/1.0.4/ixset.cabal") $ do - putStrLn fpath -#ifdef MIN_VERSION_tree_diff - print $ ansiWlEditExpr $ ediff x y -#else - putStrLn "<<<<<<" - print x - putStrLn "======" - print y - putStrLn ">>>>>>" -#endif - putStrLn bs' - exitFailure - - return (Sum 1) - where - parse phase c = do - let (_, x') = Parsec.runParseResult $ - Parsec.parseGenericPackageDescription c - case x' of - Right gpd -> pure gpd - Left (_, errs) -> do - putStrLn $ fpath ++ " " ++ phase - traverse_ print errs - B.putStr c - fail "parse error" - -main :: IO () -main = join (O.execParser opts) - where - opts = O.info (optsP <**> O.helper) $ mconcat - [ O.fullDesc - , O.progDesc "tests using Hackage's index" - ] - - optsP = subparser - [ command "read-fields" readFieldsP - "Parse outer format (to '[Field]', TODO: apply Quirks)" - , command "parsec" parsecP "Parse GPD with parsec" - , command "roundtrip" roundtripP "parse . pretty . parse = parse" - , command "check" checkP "Check GPD" - ] <|> pure defaultA - - defaultA = do - putStrLn "Default action: parsec k" - parsecA (mkPredicate ["k"]) - - readFieldsP = readFieldsA <$> prefixP - readFieldsA pfx = parseIndex pfx readFieldTest - - parsecP = parsecA <$> prefixP - parsecA pfx = do - Sum n <- parseIndex pfx parseParsecTest - putStrLn $ show n ++ " files processed" - - roundtripP = roundtripA <$> prefixP - roundtripA pfx = do - Sum n <- parseIndex pfx roundtripTest - putStrLn $ show n ++ " files processed" - - checkP = checkA <$> prefixP - checkA pfx = do - CheckResult n a b c d e <- parseIndex pfx parseCheckTest - putStrLn $ show n ++ " files processed" - putStrLn $ show a ++ " build impossible" - putStrLn $ show b ++ " build warning" - putStrLn $ show c ++ " build dist suspicious" - putStrLn $ show d ++ " build dist suspicious warning" - putStrLn $ show e ++ " build dist inexcusable" - - prefixP = fmap mkPredicate $ many $ O.strArgument $ mconcat - [ O.metavar "PREFIX" - , O.help "Check only files starting with a prefix" - ] - - mkPredicate [] = const True - mkPredicate pfxs = \n -> any (`isPrefixOf` n) pfxs - - command name p desc = O.command name - (O.info (p <**> O.helper) (O.progDesc desc)) - subparser = O.subparser . mconcat - -------------------------------------------------------------------------------- --- Index shuffling -------------------------------------------------------------------------------- - --- TODO: Use 'Cabal' for this? -reposFromConfig :: [Parsec.Field ann] -> [String] -reposFromConfig fields = takeWhile (/= ':') <$> mapMaybe f fields - where - f (Parsec.Field (Parsec.Name _ name) fieldLines) - | B8.unpack name == "remote-repo" = - Just $ fieldLinesToString fieldLines - f (Parsec.Section (Parsec.Name _ name) - [Parsec.SecArgName _ secName] _fieldLines) - | B8.unpack name == "repository" = - Just $ B8.unpack secName - f _ = Nothing - --- | Looks up the given key in the cabal configuration file -lookupInConfig :: String -> [Parsec.Field ann] -> [String] -lookupInConfig key = mapMaybe f - where - f (Parsec.Field (Parsec.Name _ name) fieldLines) - | B8.unpack name == key = - Just $ fieldLinesToString fieldLines - f _ = Nothing - -fieldLinesToString :: [Parsec.FieldLine ann] -> String -fieldLinesToString fieldLines = - B8.unpack $ B.concat $ bsFromFieldLine <$> fieldLines - where - bsFromFieldLine (Parsec.FieldLine _ bs) = bs - -------------------------------------------------------------------------------- --- Utilities -------------------------------------------------------------------------------- - -foldIO :: (Monoid m, NFData m) => (a -> IO m) -> [a] -> IO m -foldIO f = go mempty where - go !acc [] = return acc - go !acc (x : xs) = do - y <- f x - go (mappend acc y) xs - -------------------------------------------------------------------------------- --- Orphans -------------------------------------------------------------------------------- - -#if !MIN_VERSION_deepseq(1,4,0) -instance NFData a => NFData (Sum a) where - rnf (Sum a) = rnf a -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/Language.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/Language.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/Language.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/Language.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -{-# LANGUAGE CPP #-} -#if __GLASGOW_HASKELL__ >= 800 -{-# OPTIONS_GHC -freduction-depth=0 #-} -#else -{-# OPTIONS_GHC -fcontext-stack=151 #-} -#endif -{-# OPTIONS_GHC -fno-warn-orphans #-} -module Instances.TreeDiff.Language where - -import Data.TreeDiff -import Language.Haskell.Extension (Extension, KnownExtension, Language) - --- This are big enums, so they are in separate file. --- -instance ToExpr Extension -instance ToExpr KnownExtension -instance ToExpr Language diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/SPDX.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/SPDX.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/SPDX.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/SPDX.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,28 +0,0 @@ -{-# LANGUAGE CPP #-} -#if __GLASGOW_HASKELL__ >= 800 -{-# OPTIONS_GHC -freduction-depth=0 #-} -#else -{-# OPTIONS_GHC -fcontext-stack=151 #-} -#endif -{-# OPTIONS_GHC -fno-warn-orphans #-} -module Instances.TreeDiff.SPDX where - -import Data.TreeDiff -import Distribution.License (License) - -import Instances.TreeDiff.Version () - -import qualified Distribution.SPDX as SPDX - --- 'License' almost belongs here. - -instance ToExpr License - --- Generics instance is too heavy -instance ToExpr SPDX.LicenseId where toExpr = defaultExprViaShow -instance ToExpr SPDX.LicenseExceptionId where toExpr = defaultExprViaShow - -instance ToExpr SPDX.License -instance ToExpr SPDX.LicenseExpression -instance ToExpr SPDX.LicenseRef -instance ToExpr SPDX.SimpleLicenseExpression diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/Version.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Instances/TreeDiff/Version.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -{-# LANGUAGE CPP #-} -#if __GLASGOW_HASKELL__ >= 800 -{-# OPTIONS_GHC -freduction-depth=0 #-} -#else -{-# OPTIONS_GHC -fcontext-stack=151 #-} -#endif -{-# OPTIONS_GHC -fno-warn-orphans #-} -module Instances.TreeDiff.Version where - -import Data.TreeDiff -import Distribution.Version (Version, VersionRange) - -instance ToExpr Version where toExpr = defaultExprViaShow -instance ToExpr VersionRange diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Instances/TreeDiff.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Instances/TreeDiff.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Instances/TreeDiff.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Instances/TreeDiff.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,91 +0,0 @@ -{-# LANGUAGE CPP #-} -#if __GLASGOW_HASKELL__ >= 800 -{-# OPTIONS_GHC -freduction-depth=0 #-} -#else -{-# OPTIONS_GHC -fcontext-stack=151 #-} -#endif -{-# OPTIONS_GHC -fno-warn-orphans #-} -module Instances.TreeDiff where - -import Data.TreeDiff - -import Instances.TreeDiff.Language () -import Instances.TreeDiff.SPDX () -import Instances.TreeDiff.Version () - -------------------------------------------------------------------------------- - -import Distribution.Backpack (OpenModule, OpenUnitId) -import Distribution.Compiler (CompilerFlavor) -import Distribution.InstalledPackageInfo (AbiDependency, ExposedModule, InstalledPackageInfo) -import Distribution.ModuleName (ModuleName) -import Distribution.Package (Dependency, PackageIdentifier, PackageName) -import Distribution.PackageDescription -import Distribution.Types.AbiHash (AbiHash) -import Distribution.Types.ComponentId (ComponentId) -import Distribution.Types.CondTree -import Distribution.Types.ExecutableScope -import Distribution.Types.ExeDependency -import Distribution.Types.ForeignLib -import Distribution.Types.ForeignLibOption -import Distribution.Types.ForeignLibType -import Distribution.Types.IncludeRenaming (IncludeRenaming) -import Distribution.Types.LegacyExeDependency -import Distribution.Types.Mixin -import Distribution.Types.PkgconfigDependency -import Distribution.Types.UnitId (DefUnitId, UnitId) -import Distribution.Types.UnqualComponentName - -------------------------------------------------------------------------------- --- instances -------------------------------------------------------------------------------- - -instance (Eq a, Show a) => ToExpr (Condition a) where toExpr = defaultExprViaShow -instance (Show a, ToExpr b, ToExpr c, Show b, Show c, Eq a, Eq c, Eq b) => ToExpr (CondTree a b c) -instance (Show a, ToExpr b, ToExpr c, Show b, Show c, Eq a, Eq c, Eq b) => ToExpr (CondBranch a b c) - -instance ToExpr AbiDependency where toExpr = defaultExprViaShow -instance ToExpr AbiHash where toExpr = defaultExprViaShow -instance ToExpr Benchmark -instance ToExpr BenchmarkInterface -instance ToExpr BenchmarkType -instance ToExpr BuildInfo -instance ToExpr BuildType -instance ToExpr CompilerFlavor -instance ToExpr ComponentId where toExpr = defaultExprViaShow -instance ToExpr DefUnitId -instance ToExpr Dependency -instance ToExpr ExeDependency where toExpr = defaultExprViaShow -instance ToExpr Executable -instance ToExpr ExecutableScope where toExpr = defaultExprViaShow -instance ToExpr ExposedModule where toExpr = defaultExprViaShow -instance ToExpr Flag -instance ToExpr FlagName where toExpr = defaultExprViaShow -instance ToExpr ForeignLib -instance ToExpr ForeignLibOption -instance ToExpr ForeignLibType -instance ToExpr GenericPackageDescription -instance ToExpr IncludeRenaming -instance ToExpr InstalledPackageInfo -instance ToExpr LegacyExeDependency where toExpr = defaultExprViaShow -instance ToExpr LibVersionInfo where toExpr = defaultExprViaShow -instance ToExpr Library -instance ToExpr Mixin where toExpr = defaultExprViaShow -instance ToExpr ModuleName where toExpr = defaultExprViaShow -instance ToExpr ModuleReexport -instance ToExpr ModuleRenaming -instance ToExpr OpenModule -instance ToExpr OpenUnitId -instance ToExpr PackageDescription -instance ToExpr PackageIdentifier -instance ToExpr PackageName where toExpr = defaultExprViaShow -instance ToExpr PkgconfigDependency where toExpr = defaultExprViaShow -instance ToExpr RepoKind -instance ToExpr RepoType -instance ToExpr SetupBuildInfo -instance ToExpr SourceRepo -instance ToExpr TestSuite -instance ToExpr TestSuiteInterface -instance ToExpr TestType -instance ToExpr UnitId where toExpr = defaultExprViaShow -instance ToExpr UnqualComponentName where toExpr = defaultExprViaShow diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/misc/ghc-supported-languages.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/misc/ghc-supported-languages.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/misc/ghc-supported-languages.hs 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/misc/ghc-supported-languages.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,97 +0,0 @@ --- | A test program to check that ghc has got all of its extensions registered --- -module Main where - -import Language.Haskell.Extension -import Distribution.Text -import Distribution.Simple.Utils -import Distribution.Verbosity - -import Data.List ((\\)) -import Data.Maybe -import Control.Applicative -import Control.Monad -import System.Environment -import System.Exit - --- | A list of GHC extensions that are deliberately not registered, --- e.g. due to being experimental and not ready for public consumption --- -exceptions = map readExtension [] - -checkProblems :: [Extension] -> [String] -checkProblems implemented = - - let unregistered = - [ ext | ext <- implemented -- extensions that ghc knows about - , not (registered ext) -- but that are not registered - , ext `notElem` exceptions ] -- except for the exceptions - - -- check if someone has forgotten to update the exceptions list... - - -- exceptions that are not implemented - badExceptions = exceptions \\ implemented - - -- exceptions that are now registered - badExceptions' = filter registered exceptions - - in catMaybes - [ check unregistered $ unlines - [ "The following extensions are known to GHC but are not in the " - , "extension registry in Language.Haskell.Extension." - , " " ++ intercalate "\n " (map display unregistered) - , "If these extensions are ready for public consumption then they " - , "should be registered. If they are still experimental and you " - , "think they are not ready to be registered then please add them " - , "to the exceptions list in this test program along with an " - , "explanation." - ] - , check badExceptions $ unlines - [ "Error in the extension exception list. The following extensions" - , "are listed as exceptions but are not even implemented by GHC:" - , " " ++ intercalate "\n " (map display badExceptions) - , "Please fix this test program by correcting the list of" - , "exceptions." - ] - , check badExceptions' $ unlines - [ "Error in the extension exception list. The following extensions" - , "are listed as exceptions to registration but they are in fact" - , "now registered in Language.Haskell.Extension:" - , " " ++ intercalate "\n " (map display badExceptions') - , "Please fix this test program by correcting the list of" - , "exceptions." - ] - ] - where - registered (UnknownExtension _) = False - registered _ = True - - check [] _ = Nothing - check _ i = Just i - - -main = topHandler $ do - [ghcPath] <- getArgs - exts <- getExtensions ghcPath - let problems = checkProblems exts - putStrLn (intercalate "\n" problems) - if null problems - then exitSuccess - else exitFailure - -getExtensions :: FilePath -> IO [Extension] -getExtensions ghcPath = - map readExtension . lines - <$> rawSystemStdout normal ghcPath ["--supported-languages"] - -readExtension :: String -> Extension -readExtension str = handleNoParse $ do - -- GHC defines extensions in a positive way, Cabal defines them - -- relative to H98 so we try parsing ("No" ++ extName) first - ext <- simpleParse ("No" ++ str) - case ext of - UnknownExtension _ -> simpleParse str - _ -> return ext - where - handleNoParse :: Maybe Extension -> Extension - handleNoParse = fromMaybe (error $ "unparsable extension " ++ show str) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common1.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common1.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common1.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common1.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -cabal-version: 2.1 -name: common -version: 0 -synopsis: Common-stanza demo demo -build-type: Simple - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -common windows - if os(windows) - build-depends: Win32 - --- Non-existing common stanza -common deps - import: windo - build-depends: - base >=4.10 && <4.11, - containers - -library - import: deps - - default-language: Haskell2010 - exposed-modules: ElseIf - - build-depends: - ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common1.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common1.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common1.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common1.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -VERSION: Just (mkVersion [2,1]) -common1.cabal:17:3: Undefined common stanza imported: windo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -cabal-version: 2.1 -name: common -version: 0 -synopsis: Common-stanza demo demo -build-type: Simple - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - --- Used before use -common deps - import: windows - build-depends: - base >=4.10 && <4.11, - containers - -common windows - if os(windows) - build-depends: Win32 - -library - import: deps - - default-language: Haskell2010 - exposed-modules: ElseIf - - build-depends: - ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common2.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common2.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -VERSION: Just (mkVersion [2,1]) -common2.cabal:13:3: Undefined common stanza imported: windows diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common3.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common3.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common3.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common3.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -cabal-version: 2.1 -name: common -version: 0 -synopsis: Common-stanza demo demo -build-type: Simple - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -common windows - if os(windows) - build-depends: Win32 - -common deps - import: windows - build-depends: - base >=4.10 && <4.11, - containers - --- Duplicate -common deps - -library - import: deps - - default-language: Haskell2010 - exposed-modules: ElseIf - - build-depends: - ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common3.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common3.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/common3.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/common3.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -VERSION: Just (mkVersion [2,1]) -common3.cabal:22:1: Duplicate common stanza: deps diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -name: common -version: 0 -synopsis: Common-stanza demo demo -build-type: Simple -cabal-version: 2.1 - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -library - default-language: Haskell2010 - exposed-modules: ElseIf - - build-depends: - ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat2.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat2.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -VERSION: Just (mkVersion [2,1]) -forward-compat2.cabal:5:1: cabal-version should be at the beginning of the file starting with spec version 2.2. See https://github.com/haskell/cabal/issues/4899 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat3.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat3.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat3.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat3.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -cabal-version: 99999.99 -name: forward-compat -version: 0 -synopsis: Forward compat, too new cabal-version: we fail. -build-type: Simple - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -library - default-language: Haskell2010 - exposed-modules: ElseIf - - build-depends: - ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat3.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat3.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat3.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat3.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -VERSION: Just (mkVersion [99999,99]) -forward-compat3.cabal:0:0: Unsupported cabal-version. See https://github.com/haskell/cabal/issues/4899. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -cabal-version: 99999.9 -name: future -============ -Lexically completely changed future diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/forward-compat.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -VERSION: Just (mkVersion [99999,9]) -forward-compat.cabal:3:1: "the input" (line 3, column 1): -unexpected operator "============" -expecting field or section name -forward-compat.cabal:0:0: Unsupported cabal-version. See https://github.com/haskell/cabal/issues/4899. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055-2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055-2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055-2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055-2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -name: issue -version: 5055 -synopsis: no type in all branches -description: no type in all branches. -license: BSD3 -category: Test -build-type: Simple -cabal-version: >=2.0 - -executable flag-test-exe - main-is: FirstMain.hs - build-depends: base >= 4.8 && < 5 - default-language: Haskell2010 - -test-suite flag-cabal-test - -- TODO: fix so `type` can be on the top level - build-depends: base >= 4.8 && < 5 - default-language: Haskell2010 - - if os(windows) - main-is: FirstMain.hs - type: exitcode-stdio-1.0 - else: - main-is: SecondMain.hs - type: exitcode-stdio-1.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055-2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055-2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055-2.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055-2.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -VERSION: Just (mkVersion [2,0]) -issue-5055-2.cabal:15:1: Test suite "flag-cabal-test" is missing required field "type" or the field is not present in all conditional branches. The available test types are: exitcode-stdio-1.0, detailed-0.9 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -name: issue -version: 5055 -synopsis: no type in all branches -description: no type in all branches. -license: BSD3 -category: Test -build-type: Simple -cabal-version: >=2.0 - -executable flag-test-exe - main-is: FirstMain.hs - build-depends: base >= 4.8 && < 5 - default-language: Haskell2010 - -test-suite flag-cabal-test - build-depends: base >= 4.8 && < 5 - default-language: Haskell2010 - - if os(windows) - main-is: FirstMain.hs - type: exitcode-stdio-1.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/issue-5055.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -VERSION: Just (mkVersion [2,0]) -issue-5055.cabal:15:1: Test suite "flag-cabal-test" is missing required field "type" or the field is not present in all conditional branches. The available test types are: exitcode-stdio-1.0, detailed-0.9 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/leading-comma.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/leading-comma.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/leading-comma.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/leading-comma.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -name: leading-comma -version: 0 -synopsis: leading comma, trailing comma, or ordinary -build-type: Simple --- too small cabal-version -cabal-version: 2.0 - -library - default-language: Haskell2010 - exposed-modules: LeadingComma - - build-depends: base, containers - - build-depends: - deepseq, - transformers, - - build-depends: - , filepath - , directory diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/leading-comma.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/leading-comma.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/leading-comma.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/leading-comma.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -VERSION: Just (mkVersion [2,0]) -leading-comma.cabal:16:18: -unexpected end of input -expecting white space - -deepseq, -transformers, - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -name: noVersion -version: 0 -synopsis: ^>= in build-depends -build-type: Simple -cabal-version: 1.20 - -library - default-language: Haskell2010 - exposed-modules: ElseIf - build-depends: bad-package ^>= 2.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion2.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion2.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -VERSION: Just (mkVersion [1,20]) -noVersion2.cabal:10:40: -unexpected major bounded version syntax (caret, ^>=) used. To use this syntax the package need to specify at least 'cabal-version: 2.0'. Alternatively, if broader compatibility is important then use: >=2.0 && <2.1 -expecting "." or "-" - -bad-package ^>= 2.0 - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -name: noVersion -version: 0 -synopsis: -none in build-depends -build-type: Simple -cabal-version: 1.20 - -library - default-language: Haskell2010 - exposed-modules: ElseIf - build-depends: bad-package -none diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/noVersion.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -VERSION: Just (mkVersion [1,20]) -noVersion.cabal:10:38: -unexpected -none version range used. To use this syntax the package needs to specify at least 'cabal-version: 1.22'. Alternatively, if broader compatibility is important then use <0 or other empty range. - -bad-package -none - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/range-ge-wild.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/range-ge-wild.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/range-ge-wild.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/range-ge-wild.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -name: range-ge-wild -version: 0 -synopsis: Wild range after non-== op -build-type: Simple -cabal-version: >=1.10 - -library - build-depends: - -- comment, to check that position is right - base >= 4.* diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/range-ge-wild.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/range-ge-wild.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/range-ge-wild.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/range-ge-wild.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -VERSION: Just (mkVersion [1,10]) -range-ge-wild.cabal:10:16: -unexpected wild-card version after non-== operator: ">=" - -base >= 4.* - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-1.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-1.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-1.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-1.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.2 -name: spdx -version: 0 -synopsis: testing positive parsing of spdx identifiers -build-type: Simple -license: BSD3 - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-1.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-1.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-1.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-1.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -VERSION: Just (mkVersion [2,2]) -spdx-1.cabal:6:26: -unexpected Unknown SPDX license identifier: 'BSD3' Do you mean BSD-3-Clause? - -BSD3 - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.4 -name: spdx -version: 0 -synopsis: testing positive parsing of spdx identifiers -build-type: Simple -license: AGPL-1.0 - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-2.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-2.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -VERSION: Just (mkVersion [2,4]) -spdx-2.cabal:6:30: -unexpected Unknown SPDX license identifier: 'AGPL-1.0' - -AGPL-1.0 - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-3.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-3.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-3.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-3.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.2 -name: spdx -version: 0 -synopsis: testing positive parsing of spdx identifiers -build-type: Simple -license: AGPL-1.0-only - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-3.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-3.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-3.errors 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/errors/spdx-3.errors 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -VERSION: Just (mkVersion [2,2]) -spdx-3.cabal:6:35: -unexpected Unknown SPDX license identifier: 'AGPL-1.0-only' - -AGPL-1.0-only - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -name: z-Includes2-z-mylib -version: 0.1.0.0 -id: Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -instantiated-with: Database=Includes2-0.1.0.0-inplace-mysql:Database.MySQL -package-name: Includes2 -lib-name: mylib -key: Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -license: BSD3 -maintainer: ezyang@cs.stanford.edu -author: Edward Z. Yang -exposed: False -indefinite: False -exposed-modules: - Mine -abi: inplace -trusted: False -import-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -library-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -dynamic-library-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -data-dir: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2 -hs-libraries: HSIncludes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -depends: - base-4.10.1.0 Includes2-0.1.0.0-inplace-mysql -abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 - Includes2-0.1.0.0-inplace-mysql=inplace -haddock-interfaces: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2/Includes2.haddock -haddock-html: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -InstalledPackageInfo - {abiDepends = [`AbiDependency {depUnitId = UnitId "base-4.10.1.0", depAbiHash = AbiHash "35a7f6be752ee4f7385cb5bf28677879"}`, - `AbiDependency {depUnitId = UnitId "Includes2-0.1.0.0-inplace-mysql", depAbiHash = AbiHash "inplace"}`], - abiHash = `AbiHash "inplace"`, - author = "Edward Z. Yang", - category = "", - ccOptions = [], - compatPackageKey = "Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n", - copyright = "", - cxxOptions = [], - dataDir = "/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2", - depends = [`UnitId "base-4.10.1.0"`, - `UnitId "Includes2-0.1.0.0-inplace-mysql"`], - description = "", - exposed = False, - exposedModules = [`ExposedModule {exposedName = ModuleName ["Mine"], exposedReexport = Nothing}`], - extraGHCiLibraries = [], - extraLibraries = [], - frameworkDirs = [], - frameworks = [], - haddockHTMLs = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2"], - haddockInterfaces = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2/Includes2.haddock"], - hiddenModules = [], - homepage = "", - hsLibraries = ["HSIncludes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"], - importDirs = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"], - includeDirs = [], - includes = [], - indefinite = False, - installedComponentId_ = `ComponentId ""`, - installedUnitId = `UnitId "Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"`, - instantiatedWith = [_×_ - `ModuleName ["Database"]` - (OpenModule - (DefiniteUnitId - (DefUnitId `UnitId "Includes2-0.1.0.0-inplace-mysql"`)) - `ModuleName ["Database","MySQL"]`)], - ldOptions = [], - libraryDirs = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"], - libraryDynDirs = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"], - license = Right BSD3, - maintainer = "ezyang@cs.stanford.edu", - pkgRoot = Nothing, - pkgUrl = "", - sourceLibName = Just `UnqualComponentName "mylib"`, - sourcePackageId = PackageIdentifier - {pkgName = `PackageName "Includes2"`, - pkgVersion = `mkVersion [0,1,0,0]`}, - stability = "", - synopsis = "", - trusted = False} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/Includes2.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -name: z-Includes2-z-mylib -version: 0.1.0.0 -package-name: Includes2 -lib-name: mylib -id: Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -instantiated-with: Database=Includes2-0.1.0.0-inplace-mysql:Database.MySQL -key: Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -license: BSD3 -maintainer: ezyang@cs.stanford.edu -author: Edward Z. Yang -abi: inplace -exposed-modules: - Mine -import-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -library-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -dynamic-library-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -data-dir: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2 -hs-libraries: HSIncludes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n -depends: - base-4.10.1.0 Includes2-0.1.0.0-inplace-mysql -abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 - Includes2-0.1.0.0-inplace-mysql=inplace -haddock-interfaces: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2/Includes2.haddock -haddock-html: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -name: internal-preprocessor-test -version: 0.1.0.0 -id: internal-preprocessor-test-0.1.0.0 -key: internal-preprocessor-test-0.1.0.0 -license: GPL-3 -maintainer: mikhail.glushenkov@gmail.com -synopsis: Internal custom preprocessor example. -description: - See https://github.com/haskell/cabal/issues/1541#issuecomment-30155513 -category: Testing -author: Mikhail Glushenkov -exposed: True -exposed-modules: - A -trusted: False -import-dirs: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build -library-dirs: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build - /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build -data-dir: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess -hs-libraries: HSinternal-preprocessor-test-0.1.0.0 -depends: - base-4.8.2.0-0d6d1084fbc041e1cded9228e80e264d -haddock-interfaces: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test/internal-preprocessor-test.haddock -haddock-html: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test -pkgroot: "/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist" - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,46 +0,0 @@ -InstalledPackageInfo - {abiDepends = [], - abiHash = `AbiHash ""`, - author = "Mikhail Glushenkov", - category = "Testing", - ccOptions = [], - compatPackageKey = "internal-preprocessor-test-0.1.0.0", - copyright = "", - cxxOptions = [], - dataDir = "/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess", - depends = [`UnitId "base-4.8.2.0-0d6d1084fbc041e1cded9228e80e264d"`], - description = "See https://github.com/haskell/cabal/issues/1541#issuecomment-30155513", - exposed = True, - exposedModules = [`ExposedModule {exposedName = ModuleName ["A"], exposedReexport = Nothing}`], - extraGHCiLibraries = [], - extraLibraries = [], - frameworkDirs = [], - frameworks = [], - haddockHTMLs = ["/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test"], - haddockInterfaces = ["/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test/internal-preprocessor-test.haddock"], - hiddenModules = [], - homepage = "", - hsLibraries = ["HSinternal-preprocessor-test-0.1.0.0"], - importDirs = ["/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build"], - includeDirs = [], - includes = [], - indefinite = False, - installedComponentId_ = `ComponentId ""`, - installedUnitId = `UnitId "internal-preprocessor-test-0.1.0.0"`, - instantiatedWith = [], - ldOptions = [], - libraryDirs = ["/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build", - "/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build"], - libraryDynDirs = [], - license = Right (GPL (Just `mkVersion [3]`)), - maintainer = "mikhail.glushenkov@gmail.com", - pkgRoot = Just - "/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist", - pkgUrl = "", - sourceLibName = Nothing, - sourcePackageId = PackageIdentifier - {pkgName = `PackageName "internal-preprocessor-test"`, - pkgVersion = `mkVersion [0,1,0,0]`}, - stability = "", - synopsis = "Internal custom preprocessor example.", - trusted = False} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/internal-preprocessor-test.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -name: internal-preprocessor-test -version: 0.1.0.0 -id: internal-preprocessor-test-0.1.0.0 -key: internal-preprocessor-test-0.1.0.0 -license: GPL-3 -maintainer: mikhail.glushenkov@gmail.com -author: Mikhail Glushenkov -synopsis: Internal custom preprocessor example. -description: - See https://github.com/haskell/cabal/issues/1541#issuecomment-30155513 -category: Testing -exposed: True -exposed-modules: - A -import-dirs: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build -library-dirs: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build - /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build -data-dir: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess -hs-libraries: HSinternal-preprocessor-test-0.1.0.0 -depends: - base-4.8.2.0-0d6d1084fbc041e1cded9228e80e264d -haddock-interfaces: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test/internal-preprocessor-test.haddock -haddock-html: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,175 +0,0 @@ -name: transformers -version: 0.5.2.0 -id: transformers-0.5.2.0 -key: transformers-0.5.2.0 -license: BSD3 -maintainer: Ross Paterson -synopsis: Concrete functor and monad transformers -description: - A portable library of functor and monad transformers, inspired by - the paper \"Functional Programming with Overloading and Higher-Order - Polymorphism\", by Mark P Jones, - in /Advanced School of Functional Programming/, 1995 - (). - . - This package contains: - . - * the monad transformer class (in "Control.Monad.Trans.Class") - and IO monad class (in "Control.Monad.IO.Class") - . - * concrete functor and monad transformers, each with associated - operations and functions to lift operations associated with other - transformers. - . - The package can be used on its own in portable Haskell code, in - which case operations need to be manually lifted through transformer - stacks (see "Control.Monad.Trans.Class" for some examples). - Alternatively, it can be used with the non-portable monad classes in - the @mtl@ or @monads-tf@ packages, which automatically lift operations - introduced by monad transformers through other transformers. -category: Control -author: Andy Gill, Ross Paterson -exposed: True -indefinite: False -exposed-modules: - Control.Applicative.Backwards Control.Applicative.Lift - Control.Monad.Signatures Control.Monad.Trans.Class - Control.Monad.Trans.Cont Control.Monad.Trans.Error - Control.Monad.Trans.Except Control.Monad.Trans.Identity - Control.Monad.Trans.List Control.Monad.Trans.Maybe - Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy - Control.Monad.Trans.RWS.Strict Control.Monad.Trans.Reader - Control.Monad.Trans.State Control.Monad.Trans.State.Lazy - Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer - Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict - Data.Functor.Constant Data.Functor.Reverse -abi: e04579c0363c9229351d1a0b394bf2d5 -trusted: False -import-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -dynamic-library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -data-dir: /opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0 -hs-libraries: HStransformers-0.5.2.0 -depends: - base-4.10.1.0 -abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 -haddock-interfaces: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock -haddock-html: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0 -ld-options: -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,2086 +0,0 @@ -InstalledPackageInfo - {abiDepends = [`AbiDependency {depUnitId = UnitId "base-4.10.1.0", depAbiHash = AbiHash "35a7f6be752ee4f7385cb5bf28677879"}`], - abiHash = `AbiHash "e04579c0363c9229351d1a0b394bf2d5"`, - author = "Andy Gill, Ross Paterson", - category = "Control", - ccOptions = [], - compatPackageKey = "transformers-0.5.2.0", - copyright = "", - cxxOptions = [], - dataDir = "/opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0", - depends = [`UnitId "base-4.10.1.0"`], - description = concat - ["A portable library of functor and monad transformers, inspired by\n", - "the paper \\\"Functional Programming with Overloading and Higher-Order\n", - "Polymorphism\\\", by Mark P Jones,\n", - "in /Advanced School of Functional Programming/, 1995\n", - "().\n", - "\n", - "This package contains:\n", - "\n", - "* the monad transformer class (in \"Control.Monad.Trans.Class\")\n", - "and IO monad class (in \"Control.Monad.IO.Class\")\n", - "\n", - "* concrete functor and monad transformers, each with associated\n", - "operations and functions to lift operations associated with other\n", - "transformers.\n", - "\n", - "The package can be used on its own in portable Haskell code, in\n", - "which case operations need to be manually lifted through transformer\n", - "stacks (see \"Control.Monad.Trans.Class\" for some examples).\n", - "Alternatively, it can be used with the non-portable monad classes in\n", - "the @mtl@ or @monads-tf@ packages, which automatically lift operations\n", - "introduced by monad transformers through other transformers."], - exposed = True, - exposedModules = [`ExposedModule {exposedName = ModuleName ["Control","Applicative","Backwards"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Applicative","Lift"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Signatures"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Class"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Cont"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Error"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Except"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Identity"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","List"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Maybe"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS","Lazy"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS","Strict"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Reader"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State","Lazy"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State","Strict"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer","Lazy"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer","Strict"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Data","Functor","Constant"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Data","Functor","Reverse"], exposedReexport = Nothing}`], - extraGHCiLibraries = [], - extraLibraries = [], - frameworkDirs = [], - frameworks = [], - haddockHTMLs = ["/opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0"], - haddockInterfaces = ["/opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock"], - hiddenModules = [], - homepage = "", - hsLibraries = ["HStransformers-0.5.2.0"], - importDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], - includeDirs = [], - includes = [], - indefinite = False, - installedComponentId_ = `ComponentId ""`, - installedUnitId = `UnitId "transformers-0.5.2.0"`, - instantiatedWith = [], - ldOptions = ["-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm", - "-lm"], - libraryDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], - libraryDynDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], - license = Right BSD3, - maintainer = "Ross Paterson ", - pkgRoot = Nothing, - pkgUrl = "", - sourceLibName = Nothing, - sourcePackageId = PackageIdentifier - {pkgName = `PackageName "transformers"`, - pkgVersion = `mkVersion [0,5,2,0]`}, - stability = "", - synopsis = "Concrete functor and monad transformers", - trusted = False} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/issue-2276-ghc-9885.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,173 +0,0 @@ -name: transformers -version: 0.5.2.0 -id: transformers-0.5.2.0 -key: transformers-0.5.2.0 -license: BSD3 -maintainer: Ross Paterson -author: Andy Gill, Ross Paterson -synopsis: Concrete functor and monad transformers -description: - A portable library of functor and monad transformers, inspired by - the paper \"Functional Programming with Overloading and Higher-Order - Polymorphism\", by Mark P Jones, - in /Advanced School of Functional Programming/, 1995 - (). - . - This package contains: - . - * the monad transformer class (in "Control.Monad.Trans.Class") - and IO monad class (in "Control.Monad.IO.Class") - . - * concrete functor and monad transformers, each with associated - operations and functions to lift operations associated with other - transformers. - . - The package can be used on its own in portable Haskell code, in - which case operations need to be manually lifted through transformer - stacks (see "Control.Monad.Trans.Class" for some examples). - Alternatively, it can be used with the non-portable monad classes in - the @mtl@ or @monads-tf@ packages, which automatically lift operations - introduced by monad transformers through other transformers. -category: Control -abi: e04579c0363c9229351d1a0b394bf2d5 -exposed: True -exposed-modules: - Control.Applicative.Backwards Control.Applicative.Lift - Control.Monad.Signatures Control.Monad.Trans.Class - Control.Monad.Trans.Cont Control.Monad.Trans.Error - Control.Monad.Trans.Except Control.Monad.Trans.Identity - Control.Monad.Trans.List Control.Monad.Trans.Maybe - Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy - Control.Monad.Trans.RWS.Strict Control.Monad.Trans.Reader - Control.Monad.Trans.State Control.Monad.Trans.State.Lazy - Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer - Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict - Data.Functor.Constant Data.Functor.Reverse -import-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -dynamic-library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -data-dir: /opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0 -hs-libraries: HStransformers-0.5.2.0 -depends: - base-4.10.1.0 -abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 -ld-options: -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm - -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -haddock-interfaces: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock -haddock-html: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -name: transformers -version: 0.5.2.0 -id: transformers-0.5.2.0 -key: transformers-0.5.2.0 -license: BSD3 -maintainer: Ross Paterson -synopsis: Concrete functor and monad transformers -description: - A portable library of functor and monad transformers, inspired by - the paper \"Functional Programming with Overloading and Higher-Order - Polymorphism\", by Mark P Jones, - in /Advanced School of Functional Programming/, 1995 - (). - . - This package contains: - . - * the monad transformer class (in "Control.Monad.Trans.Class") - and IO monad class (in "Control.Monad.IO.Class") - . - * concrete functor and monad transformers, each with associated - operations and functions to lift operations associated with other - transformers. - . - The package can be used on its own in portable Haskell code, in - which case operations need to be manually lifted through transformer - stacks (see "Control.Monad.Trans.Class" for some examples). - Alternatively, it can be used with the non-portable monad classes in - the @mtl@ or @monads-tf@ packages, which automatically lift operations - introduced by monad transformers through other transformers. -category: Control -author: Andy Gill, Ross Paterson -exposed: True -indefinite: False -exposed-modules: - Control.Applicative.Backwards Control.Applicative.Lift - Control.Monad.Signatures Control.Monad.Trans.Class - Control.Monad.Trans.Cont Control.Monad.Trans.Error - Control.Monad.Trans.Except Control.Monad.Trans.Identity - Control.Monad.Trans.List Control.Monad.Trans.Maybe - Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy - Control.Monad.Trans.RWS.Strict Control.Monad.Trans.Reader - Control.Monad.Trans.State Control.Monad.Trans.State.Lazy - Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer - Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict - Data.Functor.Constant Data.Functor.Reverse -abi: e04579c0363c9229351d1a0b394bf2d5 -trusted: False -import-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -dynamic-library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -data-dir: /opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0 -hs-libraries: HStransformers-0.5.2.0 -depends: - base-4.10.1.0 -abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 -haddock-interfaces: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock -haddock-html: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0 -pkgroot: "/opt/ghc/8.2.2/lib/ghc-8.2.2" - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,86 +0,0 @@ -InstalledPackageInfo - {abiDepends = [`AbiDependency {depUnitId = UnitId "base-4.10.1.0", depAbiHash = AbiHash "35a7f6be752ee4f7385cb5bf28677879"}`], - abiHash = `AbiHash "e04579c0363c9229351d1a0b394bf2d5"`, - author = "Andy Gill, Ross Paterson", - category = "Control", - ccOptions = [], - cxxOptions = [], - compatPackageKey = "transformers-0.5.2.0", - copyright = "", - dataDir = "/opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0", - depends = [`UnitId "base-4.10.1.0"`], - description = concat - ["A portable library of functor and monad transformers, inspired by\n", - "the paper \\\"Functional Programming with Overloading and Higher-Order\n", - "Polymorphism\\\", by Mark P Jones,\n", - "in /Advanced School of Functional Programming/, 1995\n", - "().\n", - "\n", - "This package contains:\n", - "\n", - "* the monad transformer class (in \"Control.Monad.Trans.Class\")\n", - "and IO monad class (in \"Control.Monad.IO.Class\")\n", - "\n", - "* concrete functor and monad transformers, each with associated\n", - "operations and functions to lift operations associated with other\n", - "transformers.\n", - "\n", - "The package can be used on its own in portable Haskell code, in\n", - "which case operations need to be manually lifted through transformer\n", - "stacks (see \"Control.Monad.Trans.Class\" for some examples).\n", - "Alternatively, it can be used with the non-portable monad classes in\n", - "the @mtl@ or @monads-tf@ packages, which automatically lift operations\n", - "introduced by monad transformers through other transformers."], - exposed = True, - exposedModules = [`ExposedModule {exposedName = ModuleName ["Control","Applicative","Backwards"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Applicative","Lift"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Signatures"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Class"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Cont"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Error"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Except"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Identity"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","List"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Maybe"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS","Lazy"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS","Strict"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Reader"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State","Lazy"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State","Strict"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer","Lazy"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer","Strict"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Data","Functor","Constant"], exposedReexport = Nothing}`, - `ExposedModule {exposedName = ModuleName ["Data","Functor","Reverse"], exposedReexport = Nothing}`], - extraGHCiLibraries = [], - extraLibraries = [], - frameworkDirs = [], - frameworks = [], - haddockHTMLs = ["/opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0"], - haddockInterfaces = ["/opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock"], - hiddenModules = [], - homepage = "", - hsLibraries = ["HStransformers-0.5.2.0"], - importDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], - includeDirs = [], - includes = [], - indefinite = False, - installedComponentId_ = `ComponentId ""`, - installedUnitId = `UnitId "transformers-0.5.2.0"`, - instantiatedWith = [], - ldOptions = [], - libraryDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], - libraryDynDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], - license = Right BSD3, - maintainer = "Ross Paterson ", - pkgRoot = Just "/opt/ghc/8.2.2/lib/ghc-8.2.2", - pkgUrl = "", - sourceLibName = Nothing, - sourcePackageId = PackageIdentifier - {pkgName = `PackageName "transformers"`, - pkgVersion = `mkVersion [0,5,2,0]`}, - stability = "", - synopsis = "Concrete functor and monad transformers", - trusted = False} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/ipi/transformers.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -name: transformers -version: 0.5.2.0 -id: transformers-0.5.2.0 -key: transformers-0.5.2.0 -license: BSD3 -maintainer: Ross Paterson -author: Andy Gill, Ross Paterson -synopsis: Concrete functor and monad transformers -description: - A portable library of functor and monad transformers, inspired by - the paper \"Functional Programming with Overloading and Higher-Order - Polymorphism\", by Mark P Jones, - in /Advanced School of Functional Programming/, 1995 - (). - . - This package contains: - . - * the monad transformer class (in "Control.Monad.Trans.Class") - and IO monad class (in "Control.Monad.IO.Class") - . - * concrete functor and monad transformers, each with associated - operations and functions to lift operations associated with other - transformers. - . - The package can be used on its own in portable Haskell code, in - which case operations need to be manually lifted through transformer - stacks (see "Control.Monad.Trans.Class" for some examples). - Alternatively, it can be used with the non-portable monad classes in - the @mtl@ or @monads-tf@ packages, which automatically lift operations - introduced by monad transformers through other transformers. -category: Control -abi: e04579c0363c9229351d1a0b394bf2d5 -exposed: True -exposed-modules: - Control.Applicative.Backwards Control.Applicative.Lift - Control.Monad.Signatures Control.Monad.Trans.Class - Control.Monad.Trans.Cont Control.Monad.Trans.Error - Control.Monad.Trans.Except Control.Monad.Trans.Identity - Control.Monad.Trans.List Control.Monad.Trans.Maybe - Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy - Control.Monad.Trans.RWS.Strict Control.Monad.Trans.Reader - Control.Monad.Trans.State Control.Monad.Trans.State.Lazy - Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer - Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict - Data.Functor.Constant Data.Functor.Reverse -import-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -dynamic-library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 -data-dir: /opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0 -hs-libraries: HStransformers-0.5.2.0 -depends: - base-4.10.1.0 -abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 -haddock-interfaces: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock -haddock-html: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/bad-glob-syntax.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/bad-glob-syntax.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/bad-glob-syntax.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/bad-glob-syntax.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,16 +0,0 @@ -cabal-version: 2.2 -name: bad-glob-syntax -version: 0 -extra-source-files: - foo/blah-*.hs - foo/*/bar -license: BSD-3-Clause -synopsis: no -description: none -category: Test -maintainer: none - -library - default-language: Haskell2010 - exposed-modules: - Foo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/bad-glob-syntax.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/bad-glob-syntax.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/bad-glob-syntax.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/bad-glob-syntax.check 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -In the 'extra-source-files' field: invalid file glob 'foo/blah-*.hs'. Wildcards '*' may only totally replace the file's base name, not only parts of it. -In the 'extra-source-files' field: invalid file glob 'foo/*/bar'. A wildcard '**' is only allowed as the final parent directory. Stars must not otherwise appear in the parent directories. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cc-options-with-optimization.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cc-options-with-optimization.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cc-options-with-optimization.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cc-options-with-optimization.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -cabal-version: 2.2 -category: test -description: test a build check involving C++-options field -license: BSD-3-Clause -maintainer: me@example.com -name: cxx-options-with-optimization -synopsis: test a build check -version: 1 - -library - build-depends: base >= 4.9 && <4.10 - cc-options: -O2 - default-language: Haskell2010 - exposed-modules: Prelude - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cc-options-with-optimization.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cc-options-with-optimization.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cc-options-with-optimization.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cc-options-with-optimization.check 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -'cc-options: -O[n]' is generally not needed. When building with optimisations Cabal automatically adds '-O2' for C code. Setting it yourself interferes with the --disable-optimization flag. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,43 +0,0 @@ -cabal-version: 2.1 -name: common -version: 0 -synopsis: Common-stanza demo demo -build-type: Simple - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -common win-dows - if os(windows) - build-depends: Win32 - -common deps - import: win-dows - buildable: True - build-depends: - base >=4.10 && <4.11, - containers - -library - import: deps - - default-language: Haskell2010 - exposed-modules: ElseIf - - build-depends: - ghc-prim - -test-suite tests - import: deps, win-dows - - -- buildable fields verify that we don't have duplicate field warnings - buildable: True - if os(windows) - buildable: False - - type: exitcode-stdio-1.0 - main-is: Tests.hs - - build-depends: - HUnit diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,411 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (OS Windows)`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "Win32"` - AnyVersion], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "Win32"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,10]`) - (EarlierVersion `mkVersion [4,11]`)), - Dependency `PackageName "containers"` AnyVersion, - Dependency `PackageName "ghc-prim"` AnyVersion], - condTreeData = Library - {exposedModules = [`ModuleName ["ElseIf"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,10]`) - (EarlierVersion - `mkVersion [4,11]`)), - Dependency - `PackageName "containers"` - AnyVersion, - Dependency - `PackageName "ghc-prim"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [_×_ - `UnqualComponentName "tests"` - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (OS Windows)`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "Win32"` - AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "Win32"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}, - CondBranch - {condBranchCondition = `Var (OS Windows)`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "Win32"` - AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "Win32"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}, - CondBranch - {condBranchCondition = `Var (OS Windows)`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = False, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,10]`) - (EarlierVersion `mkVersion [4,11]`)), - Dependency `PackageName "containers"` AnyVersion, - Dependency `PackageName "HUnit"` AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,10]`) - (EarlierVersion - `mkVersion [4,11]`)), - Dependency - `PackageName "containers"` - AnyVersion, - Dependency - `PackageName "HUnit"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "Tests.hs", - testName = `UnqualComponentName ""`}}], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "common"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just "https://github.com/hvr/-.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Left `mkVersion [2,1]`, - stability = "", - subLibraries = [], - synopsis = "Common-stanza demo demo", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common2.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,41 +0,0 @@ -cabal-version: 2.1 -name: common -version: 0 -synopsis: Common-stanza demo demo -build-type: Simple - -source-repository head - type: git - location: https://github.com/hvr/-.git - -library - exposed-modules: - ElseIf - default-language: Haskell2010 - build-depends: - base >=4.10 && <4.11, - containers -any, - ghc-prim -any - - if os(windows) - build-depends: - Win32 -any - -test-suite tests - type: exitcode-stdio-1.0 - main-is: Tests.hs - build-depends: - base >=4.10 && <4.11, - containers -any, - HUnit -any - - if os(windows) - build-depends: - Win32 -any - - if os(windows) - build-depends: - Win32 -any - - if os(windows) - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -name: common -version: 0 -synopsis: Common-stanza demo demo -build-type: Simple -cabal-version: >=1.10 - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -common deps - build-depends: - base >=4.10 && <4.11, - containers - -library - import: deps - - default-language: Haskell2010 - exposed-modules: "ElseIf" - - build-depends: - ghc-prim - -test-suite tests - import: deps - - type: exitcode-stdio-1.0 - main-is: Tests.hs - - build-depends: - HUnit diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,155 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "ghc-prim"` AnyVersion], - condTreeData = Library - {exposedModules = [`ModuleName ["ElseIf"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "ghc-prim"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [_×_ - `UnqualComponentName "tests"` - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "HUnit"` AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "HUnit"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "Tests.hs", - testName = `UnqualComponentName ""`}}], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "common"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just "https://github.com/hvr/-.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), - stability = "", - subLibraries = [], - synopsis = "Common-stanza demo demo", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/common.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -common.cabal:26:3: Unknown field: import. You should set cabal-version: 2.2 or larger to use common stanzas -common.cabal:17:3: Unknown field: import. You should set cabal-version: 2.2 or larger to use common stanzas -common.cabal:11:1: Ignoring section: common. You should set cabal-version: 2.2 or larger to use common stanzas. -cabal-version: >=1.10 -name: common -version: 0 -synopsis: Common-stanza demo demo -build-type: Simple - -source-repository head - type: git - location: https://github.com/hvr/-.git - -library - exposed-modules: - ElseIf - default-language: Haskell2010 - build-depends: - ghc-prim -any - -test-suite tests - type: exitcode-stdio-1.0 - main-is: Tests.hs - build-depends: - HUnit -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cxx-options-with-optimization.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cxx-options-with-optimization.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cxx-options-with-optimization.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cxx-options-with-optimization.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -cabal-version: 2.2 -category: test -description: test a build check involving C++-options field -license: BSD-3-Clause -maintainer: me@example.com -name: cxx-options-with-optimization -synopsis: test a build check -version: 1 - -library - build-depends: base >= 4.9 && <4.10 - cxx-options: -O2 - default-language: Haskell2010 - exposed-modules: Prelude - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cxx-options-with-optimization.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cxx-options-with-optimization.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cxx-options-with-optimization.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/cxx-options-with-optimization.check 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -'cxx-options: -O[n]' is generally not needed. When building with optimisations Cabal automatically adds '-O2' for C++ code. Setting it yourself interferes with the --disable-optimization flag. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -cabal-version: 2.1 -name: elif -version: 0 -synopsis: The elif demo -build-type: Simple - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -library - default-language: Haskell2010 - exposed-modules: ElseIf - - if os(linux) - build-depends: unix - elif os(windows) - build-depends: Win32 - else - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,315 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (OS Linux)`, - condBranchIfFalse = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (OS Windows)`, - condBranchIfFalse = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = False, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "Win32"` - AnyVersion], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "Win32"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "unix"` - AnyVersion], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "unix"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [`ModuleName ["ElseIf"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "elif"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just "https://github.com/hvr/-.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Left `mkVersion [2,1]`, - stability = "", - subLibraries = [], - synopsis = "The elif demo", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif2.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -cabal-version: 2.1 -name: elif -version: 0 -synopsis: The elif demo -build-type: Simple - -source-repository head - type: git - location: https://github.com/hvr/-.git - -library - exposed-modules: - ElseIf - default-language: Haskell2010 - - if os(linux) - build-depends: - unix -any - else - - if os(windows) - build-depends: - Win32 -any - else - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -name: elif -version: 0 -synopsis: The elif demo -build-type: Simple -cabal-version: >=1.10 - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -library - default-language: Haskell2010 - exposed-modules: ElseIf - - if os(linux) - build-depends: unix - elif os(windows) - build-depends: Win32 - else - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,156 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (OS Linux)`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "unix"` - AnyVersion], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "unix"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [`ModuleName ["ElseIf"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "elif"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just "https://github.com/hvr/-.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), - stability = "", - subLibraries = [], - synopsis = "The elif demo", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/elif.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -elif.cabal:19:3: invalid subsection "else" -elif.cabal:17:3: invalid subsection "elif". You should set cabal-version: 2.2 or larger to use elif-conditionals. -cabal-version: >=1.10 -name: elif -version: 0 -synopsis: The elif demo -build-type: Simple - -source-repository head - type: git - location: https://github.com/hvr/-.git - -library - exposed-modules: - ElseIf - default-language: Haskell2010 - - if os(linux) - build-depends: - unix -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -Name: encoding-wrong -Name: encoding -Version: 0.8 -cabal-version: >=1.12 --- double-dash files -extra-source-files: - -- this is comment - README.md "--" - "--" - -custom-setup - setup-depends: - base < 5, - ghc-prim - -Library - -- version range round trip is better - build-depends: base (> 4.4 || == 4.4) - - Exposed-Modules: - Data.Encoding - - -- options with spaces - GHC-Options: -Wall -O2 -threaded -rtsopts "-with-rtsopts=-N1 -A64m" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,118 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (VersionRangeParens - (UnionVersionRanges - (LaterVersion `mkVersion [4,4]`) - (ThisVersion `mkVersion [4,4]`)))], - condTreeData = Library - {exposedModules = [`ModuleName ["Data","Encoding"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-Wall", - "-O2", - "-threaded", - "-rtsopts", - "-with-rtsopts=-N1 -A64m"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (VersionRangeParens - (UnionVersionRanges - (LaterVersion - `mkVersion [4,4]`) - (ThisVersion - `mkVersion [4,4]`)))], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Nothing, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = ["README.md", "--", "--"], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "encoding"`, - pkgVersion = `mkVersion [0,8]`}, - pkgUrl = "", - setupBuildInfo = Just - SetupBuildInfo - {defaultSetupDepends = False, - setupDepends = [Dependency - `PackageName "base"` - (EarlierVersion `mkVersion [5]`), - Dependency - `PackageName "ghc-prim"` - AnyVersion]}, - sourceRepos = [], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,12]`), - stability = "", - subLibraries = [], - synopsis = "", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/encoding-0.8.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -encoding-0.8.cabal:1:1: The field "name" is specified more than once at positions 1:1, 2:1 -cabal-version: >=1.12 -name: encoding -version: 0.8 -extra-source-files: - README.md - "--" - "--" - -custom-setup - setup-depends: base <5, - ghc-prim -any - -library - exposed-modules: - Data.Encoding - ghc-options: -Wall -O2 -threaded -rtsopts "-with-rtsopts=-N1 -A64m" - build-depends: - base (>4.4 || ==4.4) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/extensions-paths-5054.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/extensions-paths-5054.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/extensions-paths-5054.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/extensions-paths-5054.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,39 +0,0 @@ -name: extensions-paths -version: 5054 -category: Test -maintainer: Oleg Grenrus -license: BSD3 -license-file: LICENSe -synopsis: Paths_pkg module + "bad" extensions + old cabal -description: - Only cabal-version: 2.2 or later will build Paths_pkg ok with - - * RebindableSyntax and - - * OverloadedLists or OverloadedStrings - - `fromList` or `fromString` will be out-of-scope when compiling Paths_ module. - - Other extensions (like NoImplicitPrelude) were handled before -build-type: Simple -cabal-version: 1.12 - -library - default-language: Haskell2010 - exposed-modules: Issue Paths_extensions_paths - default-extensions: - RebindableSyntax - OverloadedStrings - -test-suite tests - default-language: Haskell2010 - main-is: Test.hs - type: exitcode-stdio-1.0 - if os(linux) - other-modules: Paths_extensions_paths - else - buildable: False - - default-extensions: - OverloadedLists - RebindableSyntax diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/extensions-paths-5054.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/extensions-paths-5054.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/extensions-paths-5054.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/extensions-paths-5054.check 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -The package uses RebindableSyntax with OverloadedStrings or OverloadedLists in default-extensions, and also Paths_ autogen module. That configuration is known to cause compile failures with Cabal < 2.2. To use these default-extensions with Paths_ autogen module specify at least 'cabal-version: 2.2'. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,128 +0,0 @@ -name: generics-sop -version: 0.3.1.0 -synopsis: Generic Programming using True Sums of Products -description: - A library to support the definition of generic functions. - Datatypes are viewed in a uniform, structured way: - the choice between constructors is represented using an n-ary - sum, and the arguments of each constructor are represented using - an n-ary product. - . - The module "Generics.SOP" is the main module of this library and contains - more detailed documentation. - . - Examples of using this library are provided by the following - packages: - . - * @@ basic examples, - . - * @@ generic pretty printing, - . - * @@ generically computed lenses, - . - * @@ generic JSON conversions. - . - A detailed description of the ideas behind this library is provided by - the paper: - . - * Edsko de Vries and Andres Löh. - . - Workshop on Generic Programming (WGP) 2014. - . -license: BSD3 -license-file: LICENSE -author: Edsko de Vries , Andres Löh -maintainer: andres@well-typed.com -category: Generics -build-type: Custom -cabal-version: >=1.10 -extra-source-files: CHANGELOG.md -tested-with: GHC == 7.8.4, GHC == 7.10.3, GHC == 8.0.1, GHC == 8.0.2, GHC == 8.2.1, GHC == 8.3.* - -custom-setup - setup-depends: - base, - Cabal, - cabal-doctest >= 1.0.2 && <1.1 - -source-repository head - type: git - location: https://github.com/well-typed/generics-sop - -library - exposed-modules: Generics.SOP - Generics.SOP.GGP - Generics.SOP.TH - Generics.SOP.Dict - Generics.SOP.Type.Metadata - -- exposed via Generics.SOP: - Generics.SOP.BasicFunctors - Generics.SOP.Classes - Generics.SOP.Constraint - Generics.SOP.Instances - Generics.SOP.Metadata - Generics.SOP.NP - Generics.SOP.NS - Generics.SOP.Universe - Generics.SOP.Sing - build-depends: base >= 4.7 && < 5, - template-haskell >= 2.8 && < 2.13, - ghc-prim >= 0.3 && < 0.6, - deepseq >= 1.3 && < 1.5 - if !impl (ghc >= 7.8) - build-depends: tagged >= 0.7 && < 0.9 - if !impl (ghc >= 8.0) - build-depends: transformers-compat >= 0.3 && < 0.6, - transformers >= 0.3 && < 0.6 - - hs-source-dirs: src - default-language: Haskell2010 - ghc-options: -Wall - default-extensions: CPP - ScopedTypeVariables - TypeFamilies - RankNTypes - TypeOperators - GADTs - ConstraintKinds - MultiParamTypeClasses - TypeSynonymInstances - FlexibleInstances - FlexibleContexts - DeriveFunctor - DeriveFoldable - DeriveTraversable - DefaultSignatures - KindSignatures - DataKinds - FunctionalDependencies - if impl (ghc >= 7.8) - default-extensions: AutoDeriveTypeable - other-extensions: OverloadedStrings - PolyKinds - UndecidableInstances - TemplateHaskell - DeriveGeneric - StandaloneDeriving - if impl (ghc < 7.10) - other-extensions: OverlappingInstances - -test-suite doctests - type: exitcode-stdio-1.0 - main-is: doctests.hs - x-doctest-options: --preserve-it - hs-source-dirs: test - default-language: Haskell2010 - build-depends: base, - doctest >= 0.13 && <0.14 - ghc-options: -Wall -threaded - -test-suite generics-sop-examples - type: exitcode-stdio-1.0 - main-is: Example.hs - other-modules: HTransExample - hs-source-dirs: test - default-language: Haskell2010 - ghc-options: -Wall - build-depends: base >= 4.6 && < 5, - generics-sop diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,637 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `CNot (Var (Impl GHC (OrLaterVersion (mkVersion [7,8]))))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "tagged"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,7]`) - (EarlierVersion - `mkVersion [0,9]`))], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "tagged"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,7]`) - (EarlierVersion - `mkVersion [0,9]`))], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}, - CondBranch - {condBranchCondition = `CNot (Var (Impl GHC (OrLaterVersion (mkVersion [8,0]))))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "transformers-compat"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,3]`) - (EarlierVersion - `mkVersion [0,6]`)), - Dependency - `PackageName "transformers"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,3]`) - (EarlierVersion - `mkVersion [0,6]`))], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "transformers-compat"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,3]`) - (EarlierVersion - `mkVersion [0,6]`)), - Dependency - `PackageName "transformers"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,3]`) - (EarlierVersion - `mkVersion [0,6]`))], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}, - CondBranch - {condBranchCondition = `Var (Impl GHC (OrLaterVersion (mkVersion [7,8])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [EnableExtension - AutoDeriveTypeable], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}, - CondBranch - {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,10])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [EnableExtension - OverlappingInstances], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,7]`) - (EarlierVersion `mkVersion [5]`)), - Dependency - `PackageName "template-haskell"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [2,8]`) - (EarlierVersion `mkVersion [2,13]`)), - Dependency - `PackageName "ghc-prim"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,3]`) - (EarlierVersion `mkVersion [0,6]`)), - Dependency - `PackageName "deepseq"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [1,3]`) - (EarlierVersion `mkVersion [1,5]`))], - condTreeData = Library - {exposedModules = [`ModuleName ["Generics","SOP"]`, - `ModuleName ["Generics","SOP","GGP"]`, - `ModuleName ["Generics","SOP","TH"]`, - `ModuleName ["Generics","SOP","Dict"]`, - `ModuleName ["Generics","SOP","Type","Metadata"]`, - `ModuleName ["Generics","SOP","BasicFunctors"]`, - `ModuleName ["Generics","SOP","Classes"]`, - `ModuleName ["Generics","SOP","Constraint"]`, - `ModuleName ["Generics","SOP","Instances"]`, - `ModuleName ["Generics","SOP","Metadata"]`, - `ModuleName ["Generics","SOP","NP"]`, - `ModuleName ["Generics","SOP","NS"]`, - `ModuleName ["Generics","SOP","Universe"]`, - `ModuleName ["Generics","SOP","Sing"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [EnableExtension CPP, - EnableExtension - ScopedTypeVariables, - EnableExtension - TypeFamilies, - EnableExtension - RankNTypes, - EnableExtension - TypeOperators, - EnableExtension - GADTs, - EnableExtension - ConstraintKinds, - EnableExtension - MultiParamTypeClasses, - EnableExtension - TypeSynonymInstances, - EnableExtension - FlexibleInstances, - EnableExtension - FlexibleContexts, - EnableExtension - DeriveFunctor, - EnableExtension - DeriveFoldable, - EnableExtension - DeriveTraversable, - EnableExtension - DefaultSignatures, - EnableExtension - KindSignatures, - EnableExtension - DataKinds, - EnableExtension - FunctionalDependencies], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["src"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ GHC ["-Wall"]], - otherExtensions = [EnableExtension - OverloadedStrings, - EnableExtension - PolyKinds, - EnableExtension - UndecidableInstances, - EnableExtension - TemplateHaskell, - EnableExtension - DeriveGeneric, - EnableExtension - StandaloneDeriving], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,7]`) - (EarlierVersion - `mkVersion [5]`)), - Dependency - `PackageName "template-haskell"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [2,8]`) - (EarlierVersion - `mkVersion [2,13]`)), - Dependency - `PackageName "ghc-prim"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,3]`) - (EarlierVersion - `mkVersion [0,6]`)), - Dependency - `PackageName "deepseq"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [1,3]`) - (EarlierVersion - `mkVersion [1,5]`))], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [_×_ - `UnqualComponentName "doctests"` - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency `PackageName "base"` AnyVersion, - Dependency - `PackageName "doctest"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,13]`) - (EarlierVersion `mkVersion [0,14]`))], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [_×_ - "x-doctest-options" - "--preserve-it"], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["test"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-Wall", "-threaded"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - AnyVersion, - Dependency - `PackageName "doctest"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,13]`) - (EarlierVersion - `mkVersion [0,14]`))], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "doctests.hs", - testName = `UnqualComponentName ""`}}, - _×_ - `UnqualComponentName "generics-sop-examples"` - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,6]`) - (EarlierVersion `mkVersion [5]`)), - Dependency - `PackageName "generics-sop"` AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["test"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ GHC ["-Wall"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [`ModuleName ["HTransExample"]`], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,6]`) - (EarlierVersion - `mkVersion [5]`)), - Dependency - `PackageName "generics-sop"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "Example.hs", - testName = `UnqualComponentName ""`}}], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "Edsko de Vries , Andres L\246h ", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Custom, - category = "Generics", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = concat - ["A library to support the definition of generic functions.\n", - "Datatypes are viewed in a uniform, structured way:\n", - "the choice between constructors is represented using an n-ary\n", - "sum, and the arguments of each constructor are represented using\n", - "an n-ary product.\n", - "\n", - "The module \"Generics.SOP\" is the main module of this library and contains\n", - "more detailed documentation.\n", - "\n", - "Examples of using this library are provided by the following\n", - "packages:\n", - "\n", - "* @@ basic examples,\n", - "\n", - "* @@ generic pretty printing,\n", - "\n", - "* @@ generically computed lenses,\n", - "\n", - "* @@ generic JSON conversions.\n", - "\n", - "A detailed description of the ideas behind this library is provided by\n", - "the paper:\n", - "\n", - "* Edsko de Vries and Andres L\246h.\n", - ".\n", - "Workshop on Generic Programming (WGP) 2014.\n"], - executables = [], - extraDocFiles = [], - extraSrcFiles = ["CHANGELOG.md"], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = ["LICENSE"], - licenseRaw = Right BSD3, - maintainer = "andres@well-typed.com", - package = PackageIdentifier - {pkgName = `PackageName "generics-sop"`, - pkgVersion = `mkVersion [0,3,1,0]`}, - pkgUrl = "", - setupBuildInfo = Just - SetupBuildInfo - {defaultSetupDepends = False, - setupDepends = [Dependency - `PackageName "base"` AnyVersion, - Dependency - `PackageName "Cabal"` AnyVersion, - Dependency - `PackageName "cabal-doctest"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [1,0,2]`) - (EarlierVersion - `mkVersion [1,1]`))]}, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just - "https://github.com/well-typed/generics-sop", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), - stability = "", - subLibraries = [], - synopsis = "Generic Programming using True Sums of Products", - testSuites = [], - testedWith = [_×_ GHC (ThisVersion `mkVersion [7,8,4]`), - _×_ GHC (ThisVersion `mkVersion [7,10,3]`), - _×_ GHC (ThisVersion `mkVersion [8,0,1]`), - _×_ GHC (ThisVersion `mkVersion [8,0,2]`), - _×_ GHC (ThisVersion `mkVersion [8,2,1]`), - _×_ GHC (WildcardVersion `mkVersion [8,3]`)]}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/generics-sop.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,121 +0,0 @@ -cabal-version: >=1.10 -name: generics-sop -version: 0.3.1.0 -license: BSD3 -license-file: LICENSE -maintainer: andres@well-typed.com -author: Edsko de Vries , Andres Löh -tested-with: ghc ==7.8.4 ghc ==7.10.3 ghc ==8.0.1 ghc ==8.0.2 - ghc ==8.2.1 ghc ==8.3.* -synopsis: Generic Programming using True Sums of Products -description: - A library to support the definition of generic functions. - Datatypes are viewed in a uniform, structured way: - the choice between constructors is represented using an n-ary - sum, and the arguments of each constructor are represented using - an n-ary product. - . - The module "Generics.SOP" is the main module of this library and contains - more detailed documentation. - . - Examples of using this library are provided by the following - packages: - . - * @@ basic examples, - . - * @@ generic pretty printing, - . - * @@ generically computed lenses, - . - * @@ generic JSON conversions. - . - A detailed description of the ideas behind this library is provided by - the paper: - . - * Edsko de Vries and Andres Löh. - . - Workshop on Generic Programming (WGP) 2014. - . -category: Generics -build-type: Custom -extra-source-files: - CHANGELOG.md - -source-repository head - type: git - location: https://github.com/well-typed/generics-sop - -custom-setup - setup-depends: base -any, - Cabal -any, - cabal-doctest >=1.0.2 && <1.1 - -library - exposed-modules: - Generics.SOP - Generics.SOP.GGP - Generics.SOP.TH - Generics.SOP.Dict - Generics.SOP.Type.Metadata - Generics.SOP.BasicFunctors - Generics.SOP.Classes - Generics.SOP.Constraint - Generics.SOP.Instances - Generics.SOP.Metadata - Generics.SOP.NP - Generics.SOP.NS - Generics.SOP.Universe - Generics.SOP.Sing - hs-source-dirs: src - default-language: Haskell2010 - default-extensions: CPP ScopedTypeVariables TypeFamilies RankNTypes - TypeOperators GADTs ConstraintKinds MultiParamTypeClasses - TypeSynonymInstances FlexibleInstances FlexibleContexts - DeriveFunctor DeriveFoldable DeriveTraversable DefaultSignatures - KindSignatures DataKinds FunctionalDependencies - other-extensions: OverloadedStrings PolyKinds UndecidableInstances - TemplateHaskell DeriveGeneric StandaloneDeriving - ghc-options: -Wall - build-depends: - base >=4.7 && <5, - template-haskell >=2.8 && <2.13, - ghc-prim >=0.3 && <0.6, - deepseq >=1.3 && <1.5 - - if !impl(ghc >=7.8) - build-depends: - tagged >=0.7 && <0.9 - - if !impl(ghc >=8.0) - build-depends: - transformers-compat >=0.3 && <0.6, - transformers >=0.3 && <0.6 - - if impl(ghc >=7.8) - default-extensions: AutoDeriveTypeable - - if impl(ghc <7.10) - other-extensions: OverlappingInstances - -test-suite doctests - type: exitcode-stdio-1.0 - main-is: doctests.hs - hs-source-dirs: test - default-language: Haskell2010 - ghc-options: -Wall -threaded - x-doctest-options: --preserve-it - build-depends: - base -any, - doctest >=0.13 && <0.14 - -test-suite generics-sop-examples - type: exitcode-stdio-1.0 - main-is: Example.hs - hs-source-dirs: test - other-modules: - HTransExample - default-language: Haskell2010 - ghc-options: -Wall - build-depends: - base >=4.6 && <5, - generics-sop -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/ghc-option-j.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/ghc-option-j.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/ghc-option-j.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/ghc-option-j.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -cabal-version: 2.2 -name: ghc-option-j -version: 0 -license: BSD-2-Clause -synopsis: Test -description: Testy test. -maintainer: none -category: none - -library - exposed-modules: Foo - ghc-options: -Wall -j -Wno-all - default-language: Haskell2010 - -executable foo - main-is: Main.hs - ghc-shared-options: -Wall -j2 -Wno-all - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/ghc-option-j.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/ghc-option-j.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/ghc-option-j.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/ghc-option-j.check 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -'ghc-options: -j[N]' can make sense for specific user's setup, but it is not appropriate for a distributed package. Alternatively, if you want to use this, make it conditional based on a Cabal configuration flag (with 'manual: True' and 'default: False') and enable that flag during development. -'ghc-options: -j[N]' can make sense for specific user's setup, but it is not appropriate for a distributed package. Alternatively, if you want to use this, make it conditional based on a Cabal configuration flag (with 'manual: True' and 'default: False') and enable that flag during development. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,147 +0,0 @@ -name: haddock-api -version: 2.18.1 -synopsis: A documentation-generation tool for Haskell libraries -description: Haddock is a documentation-generation tool for Haskell - libraries -license: BSD3 -license-file: LICENSE -author: Simon Marlow, David Waern -maintainer: Alex Biehl , Simon Hengel , Mateusz Kowalczyk -homepage: http://www.haskell.org/haddock/ -bug-reports: https://github.com/haskell/haddock/issues -copyright: (c) Simon Marlow, David Waern -category: Documentation -build-type: Simple -cabal-version: >= 1.10 - -extra-source-files: - CHANGES.md - -data-dir: - resources -data-files: - html/solarized.css - html/haddock-util.js - html/highlight.js - html/Classic.theme/haskell_icon.gif - html/Classic.theme/minus.gif - html/Classic.theme/plus.gif - html/Classic.theme/xhaddock.css - html/Ocean.std-theme/hslogo-16.png - html/Ocean.std-theme/minus.gif - html/Ocean.std-theme/ocean.css - html/Ocean.std-theme/plus.gif - html/Ocean.std-theme/synopsis.png - latex/haddock.sty - -library - default-language: Haskell2010 - - -- this package typically supports only single major versions - build-depends: base ^>= 4.10.0 - , Cabal ^>= 2.0.0 - , ghc ^>= 8.2 - , ghc-paths ^>= 0.1.0.9 - , haddock-library == 1.4.4.* - , xhtml ^>= 3000.2.2 - - -- Versions for the dependencies below are transitively pinned by - -- the non-reinstallable `ghc` package and hence need no version - -- bounds - build-depends: array - , bytestring - , containers - , deepseq - , directory - , filepath - , ghc-boot - , transformers - - hs-source-dirs: src - - ghc-options: -funbox-strict-fields -Wall -fwarn-tabs -O2 - ghc-options: -Wall - if impl(ghc >= 8.0) - ghc-options: -Wcompat -Wnoncanonical-monad-instances -Wnoncanonical-monadfail-instances - - exposed-modules: - Documentation.Haddock - - other-modules: - Haddock - Haddock.Interface - Haddock.Interface.Rename - Haddock.Interface.Create - Haddock.Interface.AttachInstances - Haddock.Interface.LexParseRn - Haddock.Interface.ParseModuleHeader - Haddock.Interface.Specialize - Haddock.Parser - Haddock.Utils - Haddock.Backends.Xhtml - Haddock.Backends.Xhtml.Decl - Haddock.Backends.Xhtml.DocMarkup - Haddock.Backends.Xhtml.Layout - Haddock.Backends.Xhtml.Names - Haddock.Backends.Xhtml.Themes - Haddock.Backends.Xhtml.Types - Haddock.Backends.Xhtml.Utils - Haddock.Backends.LaTeX - Haddock.Backends.HaddockDB - Haddock.Backends.Hoogle - Haddock.Backends.Hyperlinker - Haddock.Backends.Hyperlinker.Ast - Haddock.Backends.Hyperlinker.Parser - Haddock.Backends.Hyperlinker.Renderer - Haddock.Backends.Hyperlinker.Types - Haddock.Backends.Hyperlinker.Utils - Haddock.ModuleTree - Haddock.Types - Haddock.Doc - Haddock.Version - Haddock.InterfaceFile - Haddock.Options - Haddock.GhcUtils - Haddock.Syb - Haddock.Convert - Paths_haddock_api - - autogen-modules: - Paths_haddock_api - -test-suite spec - type: exitcode-stdio-1.0 - default-language: Haskell2010 - main-is: Spec.hs - ghc-options: -Wall - - hs-source-dirs: - test - , src - - -- NB: We only use a small subset of lib:haddock-api here, which - -- explains why this component has a smaller build-depends set - other-modules: - Haddock.Backends.Hyperlinker.ParserSpec - Haddock.Backends.Hyperlinker.Parser - Haddock.Backends.Hyperlinker.Types - - build-depends: - ghc ^>= 8.2 - , hspec ^>= 2.4.4 - , QuickCheck ^>= 2.10 - - -- Versions for the dependencies below are transitively pinned by - -- the non-reinstallable `ghc` package and hence need no version - -- bounds - build-depends: - base - , containers - - build-tool-depends: - hspec-discover:hspec-discover ^>= 2.4.4 - -source-repository head - type: git - subdir: haddock-api - location: https://github.com/haskell/haddock.git diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/haddock-api-2.18.1-check.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/haddock-api-2.18.1-check.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/haddock-api-2.18.1-check.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/haddock-api-2.18.1-check.check 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -ERROR: haddock-api-2.18.1-check.cabal:41:44: -unexpected major bounded version syntax (caret, ^>=) used. To use this syntax the package need to specify at least 'cabal-version: 2.0'. Alternatively, if broader compatibility is important then use: >=4.10.0 && <4.11 -expecting "." or "-" - -base ^>= 4.10.0 -, Cabal ^>= 2.0.0 -, ghc ^>= 8.2 -, ghc-paths ^>= 0.1.0.9 -, haddock-library == 1.4.4.* -, xhtml ^>= 3000.2.2 - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,25 +0,0 @@ -name: issue -version: 5055 -synopsis: no type in all branches -description: no type in all branches. -license: BSD3 -category: Test -build-type: Simple -cabal-version: >=2.0 - -executable flag-test-exe - main-is: FirstMain.hs - build-depends: base >= 4.8 && < 5 - default-language: Haskell2010 - -test-suite flag-cabal-test - -- TODO: fix so `type` can be on the top level - build-depends: base >= 4.8 && < 5 - default-language: Haskell2010 - - main-is: SecondMain.hs - type: exitcode-stdio-1.0 - - if os(windows) - main-is: FirstMain.hs - -- type: exitcode-stdio-1.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,214 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [_×_ - `UnqualComponentName "flag-test-exe"` - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,8]`) - (EarlierVersion `mkVersion [5]`))], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,8]`) - (EarlierVersion - `mkVersion [5]`))], - virtualModules = []}, - exeName = `UnqualComponentName "flag-test-exe"`, - exeScope = ExecutablePublic, - modulePath = "FirstMain.hs"}}], - condForeignLibs = [], - condLibrary = Nothing, - condSubLibraries = [], - condTestSuites = [_×_ - `UnqualComponentName "flag-cabal-test"` - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (OS Windows)`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,8]`) - (EarlierVersion `mkVersion [5]`))], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,8]`) - (EarlierVersion - `mkVersion [5]`))], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "SecondMain.hs", - testName = `UnqualComponentName ""`}}], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "Test", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "no type in all branches.", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Right BSD3, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "issue"`, - pkgVersion = `mkVersion [5055]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [], - specVersionRaw = Right (OrLaterVersion `mkVersion [2,0]`), - stability = "", - subLibraries = [], - synopsis = "no type in all branches", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-5055.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,24 +0,0 @@ -cabal-version: >=2.0 -name: issue -version: 5055 -license: BSD3 -synopsis: no type in all branches -description: - no type in all branches. -category: Test -build-type: Simple - -executable flag-test-exe - main-is: FirstMain.hs - default-language: Haskell2010 - build-depends: - base >=4.8 && <5 - -test-suite flag-cabal-test - type: exitcode-stdio-1.0 - main-is: SecondMain.hs - default-language: Haskell2010 - build-depends: - base >=4.8 && <5 - - if os(windows) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -name: issue -version: 744 -synopsis: Package description parser interprets curly braces in the description field -description: Here is some C code: - . - > for(i = 0; i < 100; i++) { - > printf("%d\n",i); - > } - . - What does it look like? -build-type: Simple --- we test that check warns about this -cabal-version: >=1.12 - -library - default-language: Haskell2010 - exposed-modules: Issue - - -- Test for round-trip of ghc-options here too - -- See https://github.com/haskell/cabal/issues/2661 - ghc-options: -Wall -threaded "-with-rtsopts=-N -s -M1G -c" -rtsopts diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.check 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -No 'category' field. -No 'maintainer' field. -The 'license' field is missing or is NONE. -'ghc-options: -threaded' has no effect for libraries. It should only be used for executables. -'ghc-options: -rtsopts' has no effect for libraries. It should only be used for executables. -'ghc-options: -with-rtsopts' has no effect for libraries. It should only be used for executables. -Packages relying on Cabal 1.12 or later should specify a specific version of the Cabal spec of the form 'cabal-version: x.y'. Use 'cabal-version: 1.12'. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,104 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [`ModuleName ["Issue"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-Wall", - "-threaded", - "-with-rtsopts=-N -s -M1G -c", - "-rtsopts"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = concat - ["Here is some C code:\n", - "\n", - "> for(i = 0; i < 100; i++) {\n", - "> printf(\"%d\\n\",i);\n", - "> }\n", - "\n", - "What does it look like?"], - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "issue"`, - pkgVersion = `mkVersion [744]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,12]`), - stability = "", - subLibraries = [], - synopsis = "Package description parser interprets curly braces in the description field", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/issue-774.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -cabal-version: >=1.12 -name: issue -version: 744 -synopsis: Package description parser interprets curly braces in the description field -description: - Here is some C code: - . - > for(i = 0; i < 100; i++) { - > printf("%d\n",i); - > } - . - What does it look like? -build-type: Simple - -library - exposed-modules: - Issue - default-language: Haskell2010 - ghc-options: -Wall -threaded "-with-rtsopts=-N -s -M1G -c" -rtsopts diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -cabal-version: 2.1 -name: leading-comma -version: 0 -synopsis: leading comma, trailing comma, or ordinary -build-type: Simple - -library - default-language: Haskell2010 - exposed-modules: LeadingComma - - build-depends: base, containers - - build-depends: - deepseq, - transformers, - - build-depends: - , filepath - , directory diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,114 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency `PackageName "base"` AnyVersion, - Dependency `PackageName "containers"` AnyVersion, - Dependency `PackageName "deepseq"` AnyVersion, - Dependency `PackageName "transformers"` AnyVersion, - Dependency `PackageName "filepath"` AnyVersion, - Dependency `PackageName "directory"` AnyVersion], - condTreeData = Library - {exposedModules = [`ModuleName ["LeadingComma"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - AnyVersion, - Dependency - `PackageName "containers"` - AnyVersion, - Dependency - `PackageName "deepseq"` - AnyVersion, - Dependency - `PackageName "transformers"` - AnyVersion, - Dependency - `PackageName "filepath"` - AnyVersion, - Dependency - `PackageName "directory"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "leading-comma"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [], - specVersionRaw = Left `mkVersion [2,1]`, - stability = "", - subLibraries = [], - synopsis = "leading comma, trailing comma, or ordinary", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/leading-comma.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -cabal-version: 2.1 -name: leading-comma -version: 0 -synopsis: leading comma, trailing comma, or ordinary -build-type: Simple - -library - exposed-modules: - LeadingComma - default-language: Haskell2010 - build-depends: - base -any, - containers -any, - deepseq -any, - transformers -any, - filepath -any, - directory -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/MiniAgda.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/MiniAgda.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/MiniAgda.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/MiniAgda.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,89 +0,0 @@ -name: MiniAgda -version: 0.2017.02.18 -build-type: Simple -cabal-version: 1.22 -license: OtherLicense -license-file: LICENSE -author: Andreas Abel and Karl Mehltretter -maintainer: Andreas Abel -homepage: http://www.tcs.ifi.lmu.de/~abel/miniagda/ -bug-reports: https://github.com/andreasabel/miniagda/issues -category: Dependent types -synopsis: A toy dependently typed programming language with type-based termination. -description: - MiniAgda is a tiny dependently-typed programming language in the style - of Agda. It serves as a laboratory to test potential additions to the - language and type system of Agda. MiniAgda's termination checker is a - fusion of sized types and size-change termination and supports - coinduction. Equality incorporates eta-expansion at record and - singleton types. Function arguments can be declared as static; such - arguments are discarded during equality checking and compilation. - - Recent features include bounded size quantification and destructor - patterns for a more general handling of coinduction. - -tested-with: GHC == 7.6.3 - GHC == 7.8.4 - GHC == 7.10.3 - GHC == 8.0.1 - -extra-source-files: Makefile - -data-files: test/succeed/Makefile - test/succeed/*.ma - test/fail/Makefile - test/fail/*.ma - test/fail/*.err - test/fail/adm/*.ma - test/fail/adm/*.err - lib/*.ma -source-repository head - type: git - location: https://github.com/andreasabel/miniagda - -executable miniagda - hs-source-dirs: src - build-depends: array >= 0.3 && < 0.6, - base >= 4.6 && < 4.11, - containers >= 0.3 && < 0.6, - haskell-src-exts >= 1.17 && < 1.18, - mtl >= 2.2.0.1 && < 2.3, - pretty >= 1.0 && < 1.2 - build-tools: happy >= 1.15 && < 2, - alex >= 3.0 && < 4 - default-language: Haskell98 - default-extensions: CPP - MultiParamTypeClasses - TypeSynonymInstances - FlexibleInstances - FlexibleContexts - GeneralizedNewtypeDeriving - NoMonomorphismRestriction - PatternGuards - TupleSections - NamedFieldPuns - main-is: Main.hs - other-modules: Abstract - Collection - Concrete - Eval - Extract - HsSyntax - Lexer - Main - Parser - Polarity - PrettyTCM - ScopeChecker - Semiring - SparseMatrix - TCM - Termination - ToHaskell - Tokens - TraceError - TreeShapedOrder - TypeChecker - Util - Value - Warshall diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/MiniAgda.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/MiniAgda.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/MiniAgda.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/MiniAgda.check 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -MiniAgda.cabal:0:0: Version digit with leading zero. Use cabal-version: 2.0 or later to write such versions. For more information see https://github.com/haskell/cabal/issues/5092 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -name: ç„¡ -version: 0 -synopsis: The canonical non-package ç„¡ -build-type: Simple -cabal-version: >=1.10 -x-ç„¡: ç„¡ - -source-repository head - Type: git - Location: https://github.com/hvr/-.git - -flag ç„¡ - description: ç„¡ - -library - default-language: Haskell2010 - - exposed-modules: Ω - - if !flag(ç„¡) - buildable:False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.check 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -No 'category' field. -No 'maintainer' field. -No 'description' field. -The 'license' field is missing or is NONE. -Suspicious flag names: ç„¡. To avoid ambiguity in command line interfaces, flag shouldn't start with a dash. Also for better compatibility, flag names shouldn't contain non-ascii characters. -Non ascii custom fields: x-ç„¡. For better compatibility, custom field names shouldn't contain non-ascii characters. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,156 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `CNot (Var (Flag (FlagName "\\28961")))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = False, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [`ModuleName ["\\937"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [MkFlag - {flagDefault = True, - flagDescription = "\28961", - flagManual = False, - flagName = `FlagName "\\28961"`}], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [_×_ "x-\28961" "\28961"], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "\\28961"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just "https://github.com/hvr/-.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), - stability = "", - subLibraries = [], - synopsis = "The canonical non-package \28961", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/nothing-unicode.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -cabal-version: >=1.10 -name: ç„¡ -version: 0 -synopsis: The canonical non-package ç„¡ -x-ç„¡: ç„¡ -build-type: Simple - -source-repository head - type: git - location: https://github.com/hvr/-.git - -flag ç„¡ - description: - ç„¡ - -library - exposed-modules: - Ω - default-language: Haskell2010 - - if !flag(ç„¡) - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: noVersion -version: 0 -synopsis: -none in build-depends -build-type: Simple -cabal-version: 1.22 - -library - default-language: Haskell2010 - exposed-modules: ElseIf - - build-depends: bad-package -none diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,102 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "bad-package"` - (IntersectVersionRanges - (LaterVersion `mkVersion [1]`) - (EarlierVersion `mkVersion [1]`))], - condTreeData = Library - {exposedModules = [`ModuleName ["ElseIf"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "bad-package"` - (IntersectVersionRanges - (LaterVersion - `mkVersion [1]`) - (EarlierVersion - `mkVersion [1]`))], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left NONE, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "noVersion"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [], - specVersionRaw = Left `mkVersion [1,22]`, - stability = "", - subLibraries = [], - synopsis = "-none in build-depends", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/noVersion.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -cabal-version: 1.22 -name: noVersion -version: 0 -synopsis: -none in build-depends -build-type: Simple - -library - exposed-modules: - ElseIf - default-language: Haskell2010 - build-depends: - bad-package >1 && <1 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,44 +0,0 @@ -name: Octree -version: 0.5 -stability: beta -homepage: https://github.com/mgajda/octree -package-url: http://hackage.haskell.org/package/octree -synopsis: Simple unbalanced Octree for storing data about 3D points -description: Octree data structure is relatively shallow data structure for space partitioning. -category: Data -license: BSD3 -license-file: LICENSE - -author: Michal J. Gajda -copyright: Copyright by Michal J. Gajda '2012 -maintainer: mjgajda@googlemail.com -bug-reports: mailto:mjgajda@googlemail.com - - -build-type: Simple -cabal-version: >=1.8 -tested-with: GHC==7.0.4,GHC==7.4.1,GHC==7.4.2,GHC==7.6.0 - -source-repository head - type: git - location: git@github.com:mgajda/octree.git - -Library - build-depends: base>=4.0 && < 4.7, AC-Vector >= 2.3.0, QuickCheck >= 2.4.0 - exposed-modules: Data.Octree - other-modules: Data.Octree.Internal - exposed: True - extensions: ScopedTypeVariables - -Test-suite test_Octree - Type: exitcode-stdio-1.0 - Build-depends: base>=4.0 && < 4.7, AC-Vector >= 2.3.0, QuickCheck >= 2.4.0 - Main-is: tests/test_Octree.hs - -Test-suite readme -  type: exitcode-stdio-1.0 - -- We have a symlink: README.lhs -> README.md -  main-is: README.lhs - Build-depends: base>=4.0 && < 4.7, AC-Vector >= 2.3.0, QuickCheck >= 2.4.0, markdown-unlit -  ghc-options: -pgmL markdown-unlit - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,285 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,0]`) - (EarlierVersion `mkVersion [4,7]`)), - Dependency - `PackageName "AC-Vector"` - (OrLaterVersion `mkVersion [2,3,0]`), - Dependency - `PackageName "QuickCheck"` - (OrLaterVersion `mkVersion [2,4,0]`)], - condTreeData = Library - {exposedModules = [`ModuleName ["Data","Octree"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [EnableExtension - ScopedTypeVariables], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [`ModuleName ["Data","Octree","Internal"]`], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,0]`) - (EarlierVersion - `mkVersion [4,7]`)), - Dependency - `PackageName "AC-Vector"` - (OrLaterVersion - `mkVersion [2,3,0]`), - Dependency - `PackageName "QuickCheck"` - (OrLaterVersion - `mkVersion [2,4,0]`)], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [_×_ - `UnqualComponentName "test_Octree"` - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,0]`) - (EarlierVersion `mkVersion [4,7]`)), - Dependency - `PackageName "AC-Vector"` - (OrLaterVersion `mkVersion [2,3,0]`), - Dependency - `PackageName "QuickCheck"` - (OrLaterVersion `mkVersion [2,4,0]`)], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,0]`) - (EarlierVersion - `mkVersion [4,7]`)), - Dependency - `PackageName "AC-Vector"` - (OrLaterVersion - `mkVersion [2,3,0]`), - Dependency - `PackageName "QuickCheck"` - (OrLaterVersion - `mkVersion [2,4,0]`)], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` - "tests/test_Octree.hs", - testName = `UnqualComponentName ""`}}, - _×_ - `UnqualComponentName "readme"` - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,0]`) - (EarlierVersion `mkVersion [4,7]`)), - Dependency - `PackageName "AC-Vector"` - (OrLaterVersion `mkVersion [2,3,0]`), - Dependency - `PackageName "QuickCheck"` - (OrLaterVersion `mkVersion [2,4,0]`), - Dependency - `PackageName "markdown-unlit"` AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-pgmL", - "markdown-unlit"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,0]`) - (EarlierVersion - `mkVersion [4,7]`)), - Dependency - `PackageName "AC-Vector"` - (OrLaterVersion - `mkVersion [2,3,0]`), - Dependency - `PackageName "QuickCheck"` - (OrLaterVersion - `mkVersion [2,4,0]`), - Dependency - `PackageName "markdown-unlit"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "README.lhs", - testName = `UnqualComponentName ""`}}], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "Michal J. Gajda", - benchmarks = [], - bugReports = "mailto:mjgajda@googlemail.com", - buildTypeRaw = Just Simple, - category = "Data", - copyright = "Copyright by Michal J. Gajda '2012", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "Octree data structure is relatively shallow data structure for space partitioning.", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "https://github.com/mgajda/octree", - library = Nothing, - licenseFiles = ["LICENSE"], - licenseRaw = Right BSD3, - maintainer = "mjgajda@googlemail.com", - package = PackageIdentifier - {pkgName = `PackageName "Octree"`, - pkgVersion = `mkVersion [0,5]`}, - pkgUrl = "http://hackage.haskell.org/package/octree", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just "git@github.com:mgajda/octree.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,8]`), - stability = "beta", - subLibraries = [], - synopsis = "Simple unbalanced Octree for storing data about 3D points", - testSuites = [], - testedWith = [_×_ GHC (ThisVersion `mkVersion [7,0,4]`), - _×_ GHC (ThisVersion `mkVersion [7,4,1]`), - _×_ GHC (ThisVersion `mkVersion [7,4,2]`), - _×_ GHC (ThisVersion `mkVersion [7,6,0]`)]}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/Octree-0.5.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -Octree-0.5.cabal:39:3: Non breaking spaces at 39:3, 41:3, 43:3 -cabal-version: >=1.8 -name: Octree -version: 0.5 -license: BSD3 -license-file: LICENSE -copyright: Copyright by Michal J. Gajda '2012 -maintainer: mjgajda@googlemail.com -author: Michal J. Gajda -stability: beta -tested-with: ghc ==7.0.4 ghc ==7.4.1 ghc ==7.4.2 ghc ==7.6.0 -homepage: https://github.com/mgajda/octree -package-url: http://hackage.haskell.org/package/octree -bug-reports: mailto:mjgajda@googlemail.com -synopsis: Simple unbalanced Octree for storing data about 3D points -description: - Octree data structure is relatively shallow data structure for space partitioning. -category: Data -build-type: Simple - -source-repository head - type: git - location: git@github.com:mgajda/octree.git - -library - exposed-modules: - Data.Octree - other-modules: - Data.Octree.Internal - extensions: ScopedTypeVariables - build-depends: - base >=4.0 && <4.7, - AC-Vector >=2.3.0, - QuickCheck >=2.4.0 - -test-suite test_Octree - type: exitcode-stdio-1.0 - main-is: tests/test_Octree.hs - build-depends: - base >=4.0 && <4.7, - AC-Vector >=2.3.0, - QuickCheck >=2.4.0 - -test-suite readme - type: exitcode-stdio-1.0 - main-is: README.lhs - ghc-options: -pgmL markdown-unlit - build-depends: - base >=4.0 && <4.7, - AC-Vector >=2.3.0, - QuickCheck >=2.4.0, - markdown-unlit -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-1.6-glob.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-1.6-glob.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-1.6-glob.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-1.6-glob.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,17 +0,0 @@ -cabal-version: >= 1.4 -name: pre-1dot6-glob -version: 0 -license: BSD3 -license-file: pre-1.6-glob.cabal -synopsis: no -description: none -build-type: Simple -category: Test -maintainer: none - -extra-source-files: - foo/*.hs - -library - exposed-modules: - Foo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-1.6-glob.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-1.6-glob.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-1.6-glob.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-1.6-glob.check 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -In the 'extra-source-files' field: invalid file glob 'foo/*.hs'. Using star wildcards requires 'cabal-version: >= 1.6'. Alternatively if you require compatibility with earlier Cabal versions then list all the files explicitly. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-2.4-globstar.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-2.4-globstar.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-2.4-globstar.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-2.4-globstar.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -cabal-version: 2.2 -name: pre-3dot0-globstar -version: 0 -extra-source-files: - foo/**/*.hs -extra-doc-files: - foo/**/*.html -data-files: - foo/**/*.dat -license: BSD-3-Clause -synopsis: no -description: none -category: Test -maintainer: none - -library - default-language: Haskell2010 - exposed-modules: - Foo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-2.4-globstar.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-2.4-globstar.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-2.4-globstar.check 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/pre-2.4-globstar.check 1970-01-01 00:00:00.000000000 +0000 @@ -1,3 +0,0 @@ -In the 'data-files' field: invalid file glob 'foo/**/*.dat'. Using the double-star syntax requires 'cabal-version: 2.4' or greater. Alternatively, for compatibility with earlier Cabal versions, list the included directories explicitly. -In the 'extra-source-files' field: invalid file glob 'foo/**/*.hs'. Using the double-star syntax requires 'cabal-version: 2.4' or greater. Alternatively, for compatibility with earlier Cabal versions, list the included directories explicitly. -In the 'extra-doc-files' field: invalid file glob 'foo/**/*.html'. Using the double-star syntax requires 'cabal-version: 2.4' or greater. Alternatively, for compatibility with earlier Cabal versions, list the included directories explicitly. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,402 +0,0 @@ -cabal-version: >= 1.18 -build-type: Simple -name: shake -version: 0.15.11 -license: BSD3 -license-file: LICENSE -category: Development, Shake -author: Neil Mitchell -maintainer: Neil Mitchell -copyright: Neil Mitchell 2011-2017 -synopsis: Build system library, like Make, but more accurate dependencies. -description: - Shake is a Haskell library for writing build systems - designed as a - replacement for @make@. See "Development.Shake" for an introduction, - including an example. Further examples are included in the Cabal tarball, - under the @Examples@ directory. The homepage contains links to a user - manual, an academic paper and further information: - - . - To use Shake the user writes a Haskell program - that imports "Development.Shake", defines some build rules, and calls - the 'Development.Shake.shakeArgs' function. Thanks to do notation and infix - operators, a simple Shake build system - is not too dissimilar from a simple Makefile. However, as build systems - get more complex, Shake is able to take advantage of the excellent - abstraction facilities offered by Haskell and easily support much larger - projects. The Shake library provides all the standard features available in other - build systems, including automatic parallelism and minimal rebuilds. - Shake also provides more accurate dependency tracking, including seamless - support for generated files, and dependencies on system information - (e.g. compiler version). -homepage: http://shakebuild.com -bug-reports: https://github.com/ndmitchell/shake/issues -tested-with: GHC==8.0.1, GHC==7.10.3, GHC==7.8.4, GHC==7.6.3, GHC==7.4.2 -extra-doc-files: - CHANGES.txt - README.md -extra-source-files: - src/Test/C/constants.c - src/Test/C/constants.h - src/Test/C/main.c - src/Test/MakeTutor/Makefile - src/Test/MakeTutor/hellofunc.c - src/Test/MakeTutor/hellomake.c - src/Test/MakeTutor/hellomake.h - src/Test/Tar/list.txt - src/Test/Ninja/*.ninja - src/Test/Ninja/subdir/*.ninja - src/Test/Ninja/*.output - src/Test/Progress/*.prog - src/Test/Tup/hello.c - src/Test/Tup/root.cfg - src/Test/Tup/newmath/root.cfg - src/Test/Tup/newmath/square.c - src/Test/Tup/newmath/square.h - src/Paths.hs - docs/Manual.md - docs/shake-progress.png - -data-files: - html/viz.js - html/profile.html - html/progress.html - html/shake.js - docs/manual/build.bat - docs/manual/Build.hs - docs/manual/build.sh - docs/manual/constants.c - docs/manual/constants.h - docs/manual/main.c - -source-repository head - type: git - location: https://github.com/ndmitchell/shake.git - -flag portable - default: False - manual: True - description: Obtain FileTime using portable functions - -library - default-language: Haskell2010 - hs-source-dirs: src - build-depends: - base >= 4.5, - directory, - hashable >= 1.1.2.3, - binary, - filepath, - process >= 1.1, - unordered-containers >= 0.2.1, - bytestring, - utf8-string >= 0.3, - time, - random, - js-jquery, - js-flot, - transformers >= 0.2, - extra >= 1.4.8, - deepseq >= 1.1 - - if flag(portable) - cpp-options: -DPORTABLE - if impl(ghc < 7.6) - build-depends: old-time - else - if !os(windows) - build-depends: unix >= 2.5.1 - if !os(windows) - build-depends: unix - - exposed-modules: - Development.Shake - Development.Shake.Classes - Development.Shake.Command - Development.Shake.Config - Development.Shake.FilePath - Development.Shake.Forward - Development.Shake.Rule - Development.Shake.Util - - other-modules: - Development.Ninja.Env - Development.Ninja.Lexer - Development.Ninja.Parse - Development.Ninja.Type - Development.Shake.Args - Development.Shake.ByteString - Development.Shake.Core - Development.Shake.CmdOption - Development.Shake.Database - Development.Shake.Demo - Development.Shake.Derived - Development.Shake.Errors - Development.Shake.FileInfo - Development.Shake.FilePattern - Development.Shake.Monad - Development.Shake.Pool - Development.Shake.Profile - Development.Shake.Progress - Development.Shake.Resource - Development.Shake.Rules.Directory - Development.Shake.Rules.File - Development.Shake.Rules.Files - Development.Shake.Rules.Oracle - Development.Shake.Rules.OrderOnly - Development.Shake.Rules.Rerun - Development.Shake.Shake - Development.Shake.Special - Development.Shake.Storage - Development.Shake.Types - Development.Shake.Value - General.Bilist - General.Binary - General.Cleanup - General.Concurrent - General.Extra - General.FileLock - General.Intern - General.Process - General.String - General.Template - General.Timing - Paths_shake - - -executable shake - default-language: Haskell2010 - hs-source-dirs: src - ghc-options: -main-is Run.main - main-is: Run.hs - ghc-options: -rtsopts - -- GHC bug 7646 means -threaded causes errors - if impl(ghc >= 7.8) - ghc-options: -threaded "-with-rtsopts=-I0 -qg -qb" - build-depends: - base == 4.*, - directory, - hashable >= 1.1.2.3, - binary, - filepath, - process >= 1.1, - unordered-containers >= 0.2.1, - bytestring, - utf8-string >= 0.3, - time, - random, - js-jquery, - js-flot, - transformers >= 0.2, - extra >= 1.4.8, - deepseq >= 1.1, - primitive - - if flag(portable) - cpp-options: -DPORTABLE - if impl(ghc < 7.6) - build-depends: old-time - else - if !os(windows) - build-depends: unix >= 2.5.1 - if !os(windows) - build-depends: unix - - other-modules: - Development.Make.All - Development.Make.Env - Development.Make.Parse - Development.Make.Rules - Development.Make.Type - Development.Ninja.All - Development.Ninja.Env - Development.Ninja.Lexer - Development.Ninja.Parse - Development.Ninja.Type - Development.Shake - Development.Shake.Args - Development.Shake.ByteString - Development.Shake.Classes - Development.Shake.CmdOption - Development.Shake.Command - Development.Shake.Core - Development.Shake.Database - Development.Shake.Demo - Development.Shake.Derived - Development.Shake.Errors - Development.Shake.FileInfo - Development.Shake.FilePath - Development.Shake.FilePattern - Development.Shake.Forward - Development.Shake.Monad - Development.Shake.Pool - Development.Shake.Profile - Development.Shake.Progress - Development.Shake.Resource - Development.Shake.Rule - Development.Shake.Rules.Directory - Development.Shake.Rules.File - Development.Shake.Rules.Files - Development.Shake.Rules.Oracle - Development.Shake.Rules.OrderOnly - Development.Shake.Rules.Rerun - Development.Shake.Shake - Development.Shake.Special - Development.Shake.Storage - Development.Shake.Types - Development.Shake.Value - General.Bilist - General.Binary - General.Cleanup - General.Concurrent - General.Extra - General.FileLock - General.Intern - General.Process - General.String - General.Template - General.Timing - Paths_shake - Run - - -test-suite shake-test - default-language: Haskell2010 - type: exitcode-stdio-1.0 - main-is: Test.hs - hs-source-dirs: src - - ghc-options: -main-is Test.main -rtsopts - if impl(ghc >= 7.6) - -- space leak introduced by -O1 in 7.4, see #445 - ghc-options: -with-rtsopts=-K1K - if impl(ghc >= 7.8) - -- GHC bug 7646 (fixed in 7.8) means -threaded causes errors - ghc-options: -threaded - - build-depends: - base == 4.*, - directory, - hashable >= 1.1.2.3, - binary, - filepath, - process >= 1.1, - unordered-containers >= 0.2.1, - bytestring, - utf8-string >= 0.3, - time, - random, - js-jquery, - js-flot, - transformers >= 0.2, - deepseq >= 1.1, - extra >= 1.4.8, - QuickCheck >= 2.0 - - if flag(portable) - cpp-options: -DPORTABLE - if impl(ghc < 7.6) - build-depends: old-time - else - if !os(windows) - build-depends: unix >= 2.5.1 - if !os(windows) - build-depends: unix - - other-modules: - Development.Make.All - Development.Make.Env - Development.Make.Parse - Development.Make.Rules - Development.Make.Type - Development.Ninja.All - Development.Ninja.Env - Development.Ninja.Lexer - Development.Ninja.Parse - Development.Ninja.Type - Development.Shake - Development.Shake.Args - Development.Shake.ByteString - Development.Shake.Classes - Development.Shake.CmdOption - Development.Shake.Command - Development.Shake.Config - Development.Shake.Core - Development.Shake.Database - Development.Shake.Demo - Development.Shake.Derived - Development.Shake.Errors - Development.Shake.FileInfo - Development.Shake.FilePath - Development.Shake.FilePattern - Development.Shake.Forward - Development.Shake.Monad - Development.Shake.Pool - Development.Shake.Profile - Development.Shake.Progress - Development.Shake.Resource - Development.Shake.Rule - Development.Shake.Rules.Directory - Development.Shake.Rules.File - Development.Shake.Rules.Files - Development.Shake.Rules.Oracle - Development.Shake.Rules.OrderOnly - Development.Shake.Rules.Rerun - Development.Shake.Shake - Development.Shake.Special - Development.Shake.Storage - Development.Shake.Types - Development.Shake.Util - Development.Shake.Value - General.Bilist - General.Binary - General.Cleanup - General.Concurrent - General.Extra - General.FileLock - General.Intern - General.Process - General.String - General.Template - General.Timing - Paths_shake - Run - Test.Assume - Test.Basic - Test.Benchmark - Test.C - Test.Cache - Test.Command - Test.Config - Test.Digest - Test.Directory - Test.Docs - Test.Errors - Test.FileLock - Test.FilePath - Test.FilePattern - Test.Files - Test.Forward - Test.Journal - Test.Lint - Test.Live - Test.Makefile - Test.Manual - Test.Match - Test.Monad - Test.Ninja - Test.Oracle - Test.OrderOnly - Test.Parallel - Test.Pool - Test.Progress - Test.Random - Test.Resources - Test.Self - Test.Tar - Test.Tup - Test.Type - Test.Unicode - Test.Util - Test.Verbosity - Test.Version diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,1720 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [_×_ - `UnqualComponentName "shake"` - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (Impl GHC (OrLaterVersion (mkVersion [7,8])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-threaded", - "-with-rtsopts=-I0 -qg -qb"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - exeName = `UnqualComponentName "shake"`, - exeScope = ExecutablePublic, - modulePath = ""}}}, - CondBranch - {condBranchCondition = `Var (Flag (FlagName "portable"))`, - condBranchIfFalse = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `CNot (Var (OS Windows))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "unix"` - (OrLaterVersion - `mkVersion [2,5,1]`)], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "unix"` - (OrLaterVersion - `mkVersion [2,5,1]`)], - virtualModules = []}, - exeName = `UnqualComponentName "shake"`, - exeScope = ExecutablePublic, - modulePath = ""}}}], - condTreeConstraints = [], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - exeName = `UnqualComponentName "shake"`, - exeScope = ExecutablePublic, - modulePath = ""}}, - condBranchIfTrue = CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,6])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "old-time"` - AnyVersion], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "old-time"` - AnyVersion], - virtualModules = []}, - exeName = `UnqualComponentName "shake"`, - exeScope = ExecutablePublic, - modulePath = ""}}}], - condTreeConstraints = [], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = ["-DPORTABLE"], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - exeName = `UnqualComponentName "shake"`, - exeScope = ExecutablePublic, - modulePath = ""}}}, - CondBranch - {condBranchCondition = `CNot (Var (OS Windows))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "unix"` - AnyVersion], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "unix"` - AnyVersion], - virtualModules = []}, - exeName = `UnqualComponentName "shake"`, - exeScope = ExecutablePublic, - modulePath = ""}}}], - condTreeConstraints = [Dependency - `PackageName "base"` - (WildcardVersion `mkVersion [4]`), - Dependency `PackageName "directory"` AnyVersion, - Dependency - `PackageName "hashable"` - (OrLaterVersion `mkVersion [1,1,2,3]`), - Dependency `PackageName "binary"` AnyVersion, - Dependency `PackageName "filepath"` AnyVersion, - Dependency - `PackageName "process"` - (OrLaterVersion `mkVersion [1,1]`), - Dependency - `PackageName "unordered-containers"` - (OrLaterVersion `mkVersion [0,2,1]`), - Dependency `PackageName "bytestring"` AnyVersion, - Dependency - `PackageName "utf8-string"` - (OrLaterVersion `mkVersion [0,3]`), - Dependency `PackageName "time"` AnyVersion, - Dependency `PackageName "random"` AnyVersion, - Dependency `PackageName "js-jquery"` AnyVersion, - Dependency `PackageName "js-flot"` AnyVersion, - Dependency - `PackageName "transformers"` - (OrLaterVersion `mkVersion [0,2]`), - Dependency - `PackageName "extra"` - (OrLaterVersion `mkVersion [1,4,8]`), - Dependency - `PackageName "deepseq"` - (OrLaterVersion `mkVersion [1,1]`), - Dependency `PackageName "primitive"` AnyVersion], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["src"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-main-is", - "Run.main", - "-rtsopts"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [`ModuleName ["Development","Make","All"]`, - `ModuleName ["Development","Make","Env"]`, - `ModuleName ["Development","Make","Parse"]`, - `ModuleName ["Development","Make","Rules"]`, - `ModuleName ["Development","Make","Type"]`, - `ModuleName ["Development","Ninja","All"]`, - `ModuleName ["Development","Ninja","Env"]`, - `ModuleName ["Development","Ninja","Lexer"]`, - `ModuleName ["Development","Ninja","Parse"]`, - `ModuleName ["Development","Ninja","Type"]`, - `ModuleName ["Development","Shake"]`, - `ModuleName ["Development","Shake","Args"]`, - `ModuleName ["Development","Shake","ByteString"]`, - `ModuleName ["Development","Shake","Classes"]`, - `ModuleName ["Development","Shake","CmdOption"]`, - `ModuleName ["Development","Shake","Command"]`, - `ModuleName ["Development","Shake","Core"]`, - `ModuleName ["Development","Shake","Database"]`, - `ModuleName ["Development","Shake","Demo"]`, - `ModuleName ["Development","Shake","Derived"]`, - `ModuleName ["Development","Shake","Errors"]`, - `ModuleName ["Development","Shake","FileInfo"]`, - `ModuleName ["Development","Shake","FilePath"]`, - `ModuleName ["Development","Shake","FilePattern"]`, - `ModuleName ["Development","Shake","Forward"]`, - `ModuleName ["Development","Shake","Monad"]`, - `ModuleName ["Development","Shake","Pool"]`, - `ModuleName ["Development","Shake","Profile"]`, - `ModuleName ["Development","Shake","Progress"]`, - `ModuleName ["Development","Shake","Resource"]`, - `ModuleName ["Development","Shake","Rule"]`, - `ModuleName ["Development","Shake","Rules","Directory"]`, - `ModuleName ["Development","Shake","Rules","File"]`, - `ModuleName ["Development","Shake","Rules","Files"]`, - `ModuleName ["Development","Shake","Rules","Oracle"]`, - `ModuleName ["Development","Shake","Rules","OrderOnly"]`, - `ModuleName ["Development","Shake","Rules","Rerun"]`, - `ModuleName ["Development","Shake","Shake"]`, - `ModuleName ["Development","Shake","Special"]`, - `ModuleName ["Development","Shake","Storage"]`, - `ModuleName ["Development","Shake","Types"]`, - `ModuleName ["Development","Shake","Value"]`, - `ModuleName ["General","Bilist"]`, - `ModuleName ["General","Binary"]`, - `ModuleName ["General","Cleanup"]`, - `ModuleName ["General","Concurrent"]`, - `ModuleName ["General","Extra"]`, - `ModuleName ["General","FileLock"]`, - `ModuleName ["General","Intern"]`, - `ModuleName ["General","Process"]`, - `ModuleName ["General","String"]`, - `ModuleName ["General","Template"]`, - `ModuleName ["General","Timing"]`, - `ModuleName ["Paths_shake"]`, - `ModuleName ["Run"]`], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (WildcardVersion - `mkVersion [4]`), - Dependency - `PackageName "directory"` - AnyVersion, - Dependency - `PackageName "hashable"` - (OrLaterVersion - `mkVersion [1,1,2,3]`), - Dependency - `PackageName "binary"` - AnyVersion, - Dependency - `PackageName "filepath"` - AnyVersion, - Dependency - `PackageName "process"` - (OrLaterVersion - `mkVersion [1,1]`), - Dependency - `PackageName "unordered-containers"` - (OrLaterVersion - `mkVersion [0,2,1]`), - Dependency - `PackageName "bytestring"` - AnyVersion, - Dependency - `PackageName "utf8-string"` - (OrLaterVersion - `mkVersion [0,3]`), - Dependency - `PackageName "time"` - AnyVersion, - Dependency - `PackageName "random"` - AnyVersion, - Dependency - `PackageName "js-jquery"` - AnyVersion, - Dependency - `PackageName "js-flot"` - AnyVersion, - Dependency - `PackageName "transformers"` - (OrLaterVersion - `mkVersion [0,2]`), - Dependency - `PackageName "extra"` - (OrLaterVersion - `mkVersion [1,4,8]`), - Dependency - `PackageName "deepseq"` - (OrLaterVersion - `mkVersion [1,1]`), - Dependency - `PackageName "primitive"` - AnyVersion], - virtualModules = []}, - exeName = `UnqualComponentName "shake"`, - exeScope = ExecutablePublic, - modulePath = "Run.hs"}}], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (Flag (FlagName "portable"))`, - condBranchIfFalse = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `CNot (Var (OS Windows))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "unix"` - (OrLaterVersion - `mkVersion [2,5,1]`)], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "unix"` - (OrLaterVersion - `mkVersion [2,5,1]`)], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condBranchIfTrue = CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,6])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "old-time"` - AnyVersion], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "old-time"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = ["-DPORTABLE"], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}, - CondBranch - {condBranchCondition = `CNot (Var (OS Windows))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "unix"` - AnyVersion], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "unix"` - AnyVersion], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}}], - condTreeConstraints = [Dependency - `PackageName "base"` - (OrLaterVersion `mkVersion [4,5]`), - Dependency `PackageName "directory"` AnyVersion, - Dependency - `PackageName "hashable"` - (OrLaterVersion `mkVersion [1,1,2,3]`), - Dependency `PackageName "binary"` AnyVersion, - Dependency `PackageName "filepath"` AnyVersion, - Dependency - `PackageName "process"` - (OrLaterVersion `mkVersion [1,1]`), - Dependency - `PackageName "unordered-containers"` - (OrLaterVersion `mkVersion [0,2,1]`), - Dependency `PackageName "bytestring"` AnyVersion, - Dependency - `PackageName "utf8-string"` - (OrLaterVersion `mkVersion [0,3]`), - Dependency `PackageName "time"` AnyVersion, - Dependency `PackageName "random"` AnyVersion, - Dependency `PackageName "js-jquery"` AnyVersion, - Dependency `PackageName "js-flot"` AnyVersion, - Dependency - `PackageName "transformers"` - (OrLaterVersion `mkVersion [0,2]`), - Dependency - `PackageName "extra"` - (OrLaterVersion `mkVersion [1,4,8]`), - Dependency - `PackageName "deepseq"` - (OrLaterVersion `mkVersion [1,1]`)], - condTreeData = Library - {exposedModules = [`ModuleName ["Development","Shake"]`, - `ModuleName ["Development","Shake","Classes"]`, - `ModuleName ["Development","Shake","Command"]`, - `ModuleName ["Development","Shake","Config"]`, - `ModuleName ["Development","Shake","FilePath"]`, - `ModuleName ["Development","Shake","Forward"]`, - `ModuleName ["Development","Shake","Rule"]`, - `ModuleName ["Development","Shake","Util"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["src"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [`ModuleName ["Development","Ninja","Env"]`, - `ModuleName ["Development","Ninja","Lexer"]`, - `ModuleName ["Development","Ninja","Parse"]`, - `ModuleName ["Development","Ninja","Type"]`, - `ModuleName ["Development","Shake","Args"]`, - `ModuleName ["Development","Shake","ByteString"]`, - `ModuleName ["Development","Shake","Core"]`, - `ModuleName ["Development","Shake","CmdOption"]`, - `ModuleName ["Development","Shake","Database"]`, - `ModuleName ["Development","Shake","Demo"]`, - `ModuleName ["Development","Shake","Derived"]`, - `ModuleName ["Development","Shake","Errors"]`, - `ModuleName ["Development","Shake","FileInfo"]`, - `ModuleName ["Development","Shake","FilePattern"]`, - `ModuleName ["Development","Shake","Monad"]`, - `ModuleName ["Development","Shake","Pool"]`, - `ModuleName ["Development","Shake","Profile"]`, - `ModuleName ["Development","Shake","Progress"]`, - `ModuleName ["Development","Shake","Resource"]`, - `ModuleName ["Development","Shake","Rules","Directory"]`, - `ModuleName ["Development","Shake","Rules","File"]`, - `ModuleName ["Development","Shake","Rules","Files"]`, - `ModuleName ["Development","Shake","Rules","Oracle"]`, - `ModuleName ["Development","Shake","Rules","OrderOnly"]`, - `ModuleName ["Development","Shake","Rules","Rerun"]`, - `ModuleName ["Development","Shake","Shake"]`, - `ModuleName ["Development","Shake","Special"]`, - `ModuleName ["Development","Shake","Storage"]`, - `ModuleName ["Development","Shake","Types"]`, - `ModuleName ["Development","Shake","Value"]`, - `ModuleName ["General","Bilist"]`, - `ModuleName ["General","Binary"]`, - `ModuleName ["General","Cleanup"]`, - `ModuleName ["General","Concurrent"]`, - `ModuleName ["General","Extra"]`, - `ModuleName ["General","FileLock"]`, - `ModuleName ["General","Intern"]`, - `ModuleName ["General","Process"]`, - `ModuleName ["General","String"]`, - `ModuleName ["General","Template"]`, - `ModuleName ["General","Timing"]`, - `ModuleName ["Paths_shake"]`], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (OrLaterVersion - `mkVersion [4,5]`), - Dependency - `PackageName "directory"` - AnyVersion, - Dependency - `PackageName "hashable"` - (OrLaterVersion - `mkVersion [1,1,2,3]`), - Dependency - `PackageName "binary"` - AnyVersion, - Dependency - `PackageName "filepath"` - AnyVersion, - Dependency - `PackageName "process"` - (OrLaterVersion - `mkVersion [1,1]`), - Dependency - `PackageName "unordered-containers"` - (OrLaterVersion - `mkVersion [0,2,1]`), - Dependency - `PackageName "bytestring"` - AnyVersion, - Dependency - `PackageName "utf8-string"` - (OrLaterVersion - `mkVersion [0,3]`), - Dependency - `PackageName "time"` - AnyVersion, - Dependency - `PackageName "random"` - AnyVersion, - Dependency - `PackageName "js-jquery"` - AnyVersion, - Dependency - `PackageName "js-flot"` - AnyVersion, - Dependency - `PackageName "transformers"` - (OrLaterVersion - `mkVersion [0,2]`), - Dependency - `PackageName "extra"` - (OrLaterVersion - `mkVersion [1,4,8]`), - Dependency - `PackageName "deepseq"` - (OrLaterVersion - `mkVersion [1,1]`)], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [_×_ - `UnqualComponentName "shake-test"` - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (Impl GHC (OrLaterVersion (mkVersion [7,6])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-with-rtsopts=-K1K"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}, - CondBranch - {condBranchCondition = `Var (Impl GHC (OrLaterVersion (mkVersion [7,8])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-threaded"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}, - CondBranch - {condBranchCondition = `Var (Flag (FlagName "portable"))`, - condBranchIfFalse = Just - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `CNot (Var (OS Windows))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "unix"` - (OrLaterVersion - `mkVersion [2,5,1]`)], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "unix"` - (OrLaterVersion - `mkVersion [2,5,1]`)], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}], - condTreeConstraints = [], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}, - condBranchIfTrue = CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,6])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "old-time"` - AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "old-time"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}], - condTreeConstraints = [], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = ["-DPORTABLE"], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}, - CondBranch - {condBranchCondition = `CNot (Var (OS Windows))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "unix"` - AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "unix"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}], - condTreeConstraints = [Dependency - `PackageName "base"` - (WildcardVersion `mkVersion [4]`), - Dependency `PackageName "directory"` AnyVersion, - Dependency - `PackageName "hashable"` - (OrLaterVersion `mkVersion [1,1,2,3]`), - Dependency `PackageName "binary"` AnyVersion, - Dependency `PackageName "filepath"` AnyVersion, - Dependency - `PackageName "process"` - (OrLaterVersion `mkVersion [1,1]`), - Dependency - `PackageName "unordered-containers"` - (OrLaterVersion `mkVersion [0,2,1]`), - Dependency `PackageName "bytestring"` AnyVersion, - Dependency - `PackageName "utf8-string"` - (OrLaterVersion `mkVersion [0,3]`), - Dependency `PackageName "time"` AnyVersion, - Dependency `PackageName "random"` AnyVersion, - Dependency `PackageName "js-jquery"` AnyVersion, - Dependency `PackageName "js-flot"` AnyVersion, - Dependency - `PackageName "transformers"` - (OrLaterVersion `mkVersion [0,2]`), - Dependency - `PackageName "deepseq"` - (OrLaterVersion `mkVersion [1,1]`), - Dependency - `PackageName "extra"` - (OrLaterVersion `mkVersion [1,4,8]`), - Dependency - `PackageName "QuickCheck"` - (OrLaterVersion `mkVersion [2,0]`)], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["src"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-main-is", - "Test.main", - "-rtsopts"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [`ModuleName ["Development","Make","All"]`, - `ModuleName ["Development","Make","Env"]`, - `ModuleName ["Development","Make","Parse"]`, - `ModuleName ["Development","Make","Rules"]`, - `ModuleName ["Development","Make","Type"]`, - `ModuleName ["Development","Ninja","All"]`, - `ModuleName ["Development","Ninja","Env"]`, - `ModuleName ["Development","Ninja","Lexer"]`, - `ModuleName ["Development","Ninja","Parse"]`, - `ModuleName ["Development","Ninja","Type"]`, - `ModuleName ["Development","Shake"]`, - `ModuleName ["Development","Shake","Args"]`, - `ModuleName ["Development","Shake","ByteString"]`, - `ModuleName ["Development","Shake","Classes"]`, - `ModuleName ["Development","Shake","CmdOption"]`, - `ModuleName ["Development","Shake","Command"]`, - `ModuleName ["Development","Shake","Config"]`, - `ModuleName ["Development","Shake","Core"]`, - `ModuleName ["Development","Shake","Database"]`, - `ModuleName ["Development","Shake","Demo"]`, - `ModuleName ["Development","Shake","Derived"]`, - `ModuleName ["Development","Shake","Errors"]`, - `ModuleName ["Development","Shake","FileInfo"]`, - `ModuleName ["Development","Shake","FilePath"]`, - `ModuleName ["Development","Shake","FilePattern"]`, - `ModuleName ["Development","Shake","Forward"]`, - `ModuleName ["Development","Shake","Monad"]`, - `ModuleName ["Development","Shake","Pool"]`, - `ModuleName ["Development","Shake","Profile"]`, - `ModuleName ["Development","Shake","Progress"]`, - `ModuleName ["Development","Shake","Resource"]`, - `ModuleName ["Development","Shake","Rule"]`, - `ModuleName ["Development","Shake","Rules","Directory"]`, - `ModuleName ["Development","Shake","Rules","File"]`, - `ModuleName ["Development","Shake","Rules","Files"]`, - `ModuleName ["Development","Shake","Rules","Oracle"]`, - `ModuleName ["Development","Shake","Rules","OrderOnly"]`, - `ModuleName ["Development","Shake","Rules","Rerun"]`, - `ModuleName ["Development","Shake","Shake"]`, - `ModuleName ["Development","Shake","Special"]`, - `ModuleName ["Development","Shake","Storage"]`, - `ModuleName ["Development","Shake","Types"]`, - `ModuleName ["Development","Shake","Util"]`, - `ModuleName ["Development","Shake","Value"]`, - `ModuleName ["General","Bilist"]`, - `ModuleName ["General","Binary"]`, - `ModuleName ["General","Cleanup"]`, - `ModuleName ["General","Concurrent"]`, - `ModuleName ["General","Extra"]`, - `ModuleName ["General","FileLock"]`, - `ModuleName ["General","Intern"]`, - `ModuleName ["General","Process"]`, - `ModuleName ["General","String"]`, - `ModuleName ["General","Template"]`, - `ModuleName ["General","Timing"]`, - `ModuleName ["Paths_shake"]`, - `ModuleName ["Run"]`, - `ModuleName ["Test","Assume"]`, - `ModuleName ["Test","Basic"]`, - `ModuleName ["Test","Benchmark"]`, - `ModuleName ["Test","C"]`, - `ModuleName ["Test","Cache"]`, - `ModuleName ["Test","Command"]`, - `ModuleName ["Test","Config"]`, - `ModuleName ["Test","Digest"]`, - `ModuleName ["Test","Directory"]`, - `ModuleName ["Test","Docs"]`, - `ModuleName ["Test","Errors"]`, - `ModuleName ["Test","FileLock"]`, - `ModuleName ["Test","FilePath"]`, - `ModuleName ["Test","FilePattern"]`, - `ModuleName ["Test","Files"]`, - `ModuleName ["Test","Forward"]`, - `ModuleName ["Test","Journal"]`, - `ModuleName ["Test","Lint"]`, - `ModuleName ["Test","Live"]`, - `ModuleName ["Test","Makefile"]`, - `ModuleName ["Test","Manual"]`, - `ModuleName ["Test","Match"]`, - `ModuleName ["Test","Monad"]`, - `ModuleName ["Test","Ninja"]`, - `ModuleName ["Test","Oracle"]`, - `ModuleName ["Test","OrderOnly"]`, - `ModuleName ["Test","Parallel"]`, - `ModuleName ["Test","Pool"]`, - `ModuleName ["Test","Progress"]`, - `ModuleName ["Test","Random"]`, - `ModuleName ["Test","Resources"]`, - `ModuleName ["Test","Self"]`, - `ModuleName ["Test","Tar"]`, - `ModuleName ["Test","Tup"]`, - `ModuleName ["Test","Type"]`, - `ModuleName ["Test","Unicode"]`, - `ModuleName ["Test","Util"]`, - `ModuleName ["Test","Verbosity"]`, - `ModuleName ["Test","Version"]`], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (WildcardVersion - `mkVersion [4]`), - Dependency - `PackageName "directory"` - AnyVersion, - Dependency - `PackageName "hashable"` - (OrLaterVersion - `mkVersion [1,1,2,3]`), - Dependency - `PackageName "binary"` - AnyVersion, - Dependency - `PackageName "filepath"` - AnyVersion, - Dependency - `PackageName "process"` - (OrLaterVersion - `mkVersion [1,1]`), - Dependency - `PackageName "unordered-containers"` - (OrLaterVersion - `mkVersion [0,2,1]`), - Dependency - `PackageName "bytestring"` - AnyVersion, - Dependency - `PackageName "utf8-string"` - (OrLaterVersion - `mkVersion [0,3]`), - Dependency - `PackageName "time"` - AnyVersion, - Dependency - `PackageName "random"` - AnyVersion, - Dependency - `PackageName "js-jquery"` - AnyVersion, - Dependency - `PackageName "js-flot"` - AnyVersion, - Dependency - `PackageName "transformers"` - (OrLaterVersion - `mkVersion [0,2]`), - Dependency - `PackageName "deepseq"` - (OrLaterVersion - `mkVersion [1,1]`), - Dependency - `PackageName "extra"` - (OrLaterVersion - `mkVersion [1,4,8]`), - Dependency - `PackageName "QuickCheck"` - (OrLaterVersion - `mkVersion [2,0]`)], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "Test.hs", - testName = `UnqualComponentName ""`}}], - genPackageFlags = [MkFlag - {flagDefault = False, - flagDescription = "Obtain FileTime using portable functions", - flagManual = True, - flagName = `FlagName "portable"`}], - packageDescription = PackageDescription - {author = "Neil Mitchell ", - benchmarks = [], - bugReports = "https://github.com/ndmitchell/shake/issues", - buildTypeRaw = Just Simple, - category = "Development, Shake", - copyright = "Neil Mitchell 2011-2017", - customFieldsPD = [], - dataDir = "", - dataFiles = ["html/viz.js", - "html/profile.html", - "html/progress.html", - "html/shake.js", - "docs/manual/build.bat", - "docs/manual/Build.hs", - "docs/manual/build.sh", - "docs/manual/constants.c", - "docs/manual/constants.h", - "docs/manual/main.c"], - description = concat - ["Shake is a Haskell library for writing build systems - designed as a\n", - "replacement for @make@. See \"Development.Shake\" for an introduction,\n", - "including an example. Further examples are included in the Cabal tarball,\n", - "under the @Examples@ directory. The homepage contains links to a user\n", - "manual, an academic paper and further information:\n", - "\n", - "\n", - "To use Shake the user writes a Haskell program\n", - "that imports \"Development.Shake\", defines some build rules, and calls\n", - "the 'Development.Shake.shakeArgs' function. Thanks to do notation and infix\n", - "operators, a simple Shake build system\n", - "is not too dissimilar from a simple Makefile. However, as build systems\n", - "get more complex, Shake is able to take advantage of the excellent\n", - "abstraction facilities offered by Haskell and easily support much larger\n", - "projects. The Shake library provides all the standard features available in other\n", - "build systems, including automatic parallelism and minimal rebuilds.\n", - "Shake also provides more accurate dependency tracking, including seamless\n", - "support for generated files, and dependencies on system information\n", - "(e.g. compiler version)."], - executables = [], - extraDocFiles = ["CHANGES.txt", "README.md"], - extraSrcFiles = ["src/Test/C/constants.c", - "src/Test/C/constants.h", - "src/Test/C/main.c", - "src/Test/MakeTutor/Makefile", - "src/Test/MakeTutor/hellofunc.c", - "src/Test/MakeTutor/hellomake.c", - "src/Test/MakeTutor/hellomake.h", - "src/Test/Tar/list.txt", - "src/Test/Ninja/*.ninja", - "src/Test/Ninja/subdir/*.ninja", - "src/Test/Ninja/*.output", - "src/Test/Progress/*.prog", - "src/Test/Tup/hello.c", - "src/Test/Tup/root.cfg", - "src/Test/Tup/newmath/root.cfg", - "src/Test/Tup/newmath/square.c", - "src/Test/Tup/newmath/square.h", - "src/Paths.hs", - "docs/Manual.md", - "docs/shake-progress.png"], - extraTmpFiles = [], - foreignLibs = [], - homepage = "http://shakebuild.com", - library = Nothing, - licenseFiles = ["LICENSE"], - licenseRaw = Right BSD3, - maintainer = "Neil Mitchell ", - package = PackageIdentifier - {pkgName = `PackageName "shake"`, - pkgVersion = `mkVersion [0,15,11]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just - "https://github.com/ndmitchell/shake.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,18]`), - stability = "", - subLibraries = [], - synopsis = "Build system library, like Make, but more accurate dependencies.", - testSuites = [], - testedWith = [_×_ GHC (ThisVersion `mkVersion [8,0,1]`), - _×_ GHC (ThisVersion `mkVersion [7,10,3]`), - _×_ GHC (ThisVersion `mkVersion [7,8,4]`), - _×_ GHC (ThisVersion `mkVersion [7,6,3]`), - _×_ GHC (ThisVersion `mkVersion [7,4,2]`)]}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/shake.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,411 +0,0 @@ -cabal-version: >=1.18 -name: shake -version: 0.15.11 -license: BSD3 -license-file: LICENSE -copyright: Neil Mitchell 2011-2017 -maintainer: Neil Mitchell -author: Neil Mitchell -tested-with: ghc ==8.0.1 ghc ==7.10.3 ghc ==7.8.4 ghc ==7.6.3 - ghc ==7.4.2 -homepage: http://shakebuild.com -bug-reports: https://github.com/ndmitchell/shake/issues -synopsis: Build system library, like Make, but more accurate dependencies. -description: - Shake is a Haskell library for writing build systems - designed as a - replacement for @make@. See "Development.Shake" for an introduction, - including an example. Further examples are included in the Cabal tarball, - under the @Examples@ directory. The homepage contains links to a user - manual, an academic paper and further information: - - . - To use Shake the user writes a Haskell program - that imports "Development.Shake", defines some build rules, and calls - the 'Development.Shake.shakeArgs' function. Thanks to do notation and infix - operators, a simple Shake build system - is not too dissimilar from a simple Makefile. However, as build systems - get more complex, Shake is able to take advantage of the excellent - abstraction facilities offered by Haskell and easily support much larger - projects. The Shake library provides all the standard features available in other - build systems, including automatic parallelism and minimal rebuilds. - Shake also provides more accurate dependency tracking, including seamless - support for generated files, and dependencies on system information - (e.g. compiler version). -category: Development, Shake -build-type: Simple -data-files: - html/viz.js - html/profile.html - html/progress.html - html/shake.js - docs/manual/build.bat - docs/manual/Build.hs - docs/manual/build.sh - docs/manual/constants.c - docs/manual/constants.h - docs/manual/main.c -extra-source-files: - src/Test/C/constants.c - src/Test/C/constants.h - src/Test/C/main.c - src/Test/MakeTutor/Makefile - src/Test/MakeTutor/hellofunc.c - src/Test/MakeTutor/hellomake.c - src/Test/MakeTutor/hellomake.h - src/Test/Tar/list.txt - src/Test/Ninja/*.ninja - src/Test/Ninja/subdir/*.ninja - src/Test/Ninja/*.output - src/Test/Progress/*.prog - src/Test/Tup/hello.c - src/Test/Tup/root.cfg - src/Test/Tup/newmath/root.cfg - src/Test/Tup/newmath/square.c - src/Test/Tup/newmath/square.h - src/Paths.hs - docs/Manual.md - docs/shake-progress.png -extra-doc-files: CHANGES.txt - README.md - -source-repository head - type: git - location: https://github.com/ndmitchell/shake.git - -flag portable - description: - Obtain FileTime using portable functions - default: False - manual: True - -library - exposed-modules: - Development.Shake - Development.Shake.Classes - Development.Shake.Command - Development.Shake.Config - Development.Shake.FilePath - Development.Shake.Forward - Development.Shake.Rule - Development.Shake.Util - hs-source-dirs: src - other-modules: - Development.Ninja.Env - Development.Ninja.Lexer - Development.Ninja.Parse - Development.Ninja.Type - Development.Shake.Args - Development.Shake.ByteString - Development.Shake.Core - Development.Shake.CmdOption - Development.Shake.Database - Development.Shake.Demo - Development.Shake.Derived - Development.Shake.Errors - Development.Shake.FileInfo - Development.Shake.FilePattern - Development.Shake.Monad - Development.Shake.Pool - Development.Shake.Profile - Development.Shake.Progress - Development.Shake.Resource - Development.Shake.Rules.Directory - Development.Shake.Rules.File - Development.Shake.Rules.Files - Development.Shake.Rules.Oracle - Development.Shake.Rules.OrderOnly - Development.Shake.Rules.Rerun - Development.Shake.Shake - Development.Shake.Special - Development.Shake.Storage - Development.Shake.Types - Development.Shake.Value - General.Bilist - General.Binary - General.Cleanup - General.Concurrent - General.Extra - General.FileLock - General.Intern - General.Process - General.String - General.Template - General.Timing - Paths_shake - default-language: Haskell2010 - build-depends: - base >=4.5, - directory -any, - hashable >=1.1.2.3, - binary -any, - filepath -any, - process >=1.1, - unordered-containers >=0.2.1, - bytestring -any, - utf8-string >=0.3, - time -any, - random -any, - js-jquery -any, - js-flot -any, - transformers >=0.2, - extra >=1.4.8, - deepseq >=1.1 - - if flag(portable) - cpp-options: -DPORTABLE - - if impl(ghc <7.6) - build-depends: - old-time -any - else - - if !os(windows) - build-depends: - unix >=2.5.1 - - if !os(windows) - build-depends: - unix -any - -executable shake - main-is: Run.hs - hs-source-dirs: src - other-modules: - Development.Make.All - Development.Make.Env - Development.Make.Parse - Development.Make.Rules - Development.Make.Type - Development.Ninja.All - Development.Ninja.Env - Development.Ninja.Lexer - Development.Ninja.Parse - Development.Ninja.Type - Development.Shake - Development.Shake.Args - Development.Shake.ByteString - Development.Shake.Classes - Development.Shake.CmdOption - Development.Shake.Command - Development.Shake.Core - Development.Shake.Database - Development.Shake.Demo - Development.Shake.Derived - Development.Shake.Errors - Development.Shake.FileInfo - Development.Shake.FilePath - Development.Shake.FilePattern - Development.Shake.Forward - Development.Shake.Monad - Development.Shake.Pool - Development.Shake.Profile - Development.Shake.Progress - Development.Shake.Resource - Development.Shake.Rule - Development.Shake.Rules.Directory - Development.Shake.Rules.File - Development.Shake.Rules.Files - Development.Shake.Rules.Oracle - Development.Shake.Rules.OrderOnly - Development.Shake.Rules.Rerun - Development.Shake.Shake - Development.Shake.Special - Development.Shake.Storage - Development.Shake.Types - Development.Shake.Value - General.Bilist - General.Binary - General.Cleanup - General.Concurrent - General.Extra - General.FileLock - General.Intern - General.Process - General.String - General.Template - General.Timing - Paths_shake - Run - default-language: Haskell2010 - ghc-options: -main-is Run.main -rtsopts - build-depends: - base ==4.*, - directory -any, - hashable >=1.1.2.3, - binary -any, - filepath -any, - process >=1.1, - unordered-containers >=0.2.1, - bytestring -any, - utf8-string >=0.3, - time -any, - random -any, - js-jquery -any, - js-flot -any, - transformers >=0.2, - extra >=1.4.8, - deepseq >=1.1, - primitive -any - - if impl(ghc >=7.8) - ghc-options: -threaded "-with-rtsopts=-I0 -qg -qb" - - if flag(portable) - cpp-options: -DPORTABLE - - if impl(ghc <7.6) - build-depends: - old-time -any - else - - if !os(windows) - build-depends: - unix >=2.5.1 - - if !os(windows) - build-depends: - unix -any - -test-suite shake-test - type: exitcode-stdio-1.0 - main-is: Test.hs - hs-source-dirs: src - other-modules: - Development.Make.All - Development.Make.Env - Development.Make.Parse - Development.Make.Rules - Development.Make.Type - Development.Ninja.All - Development.Ninja.Env - Development.Ninja.Lexer - Development.Ninja.Parse - Development.Ninja.Type - Development.Shake - Development.Shake.Args - Development.Shake.ByteString - Development.Shake.Classes - Development.Shake.CmdOption - Development.Shake.Command - Development.Shake.Config - Development.Shake.Core - Development.Shake.Database - Development.Shake.Demo - Development.Shake.Derived - Development.Shake.Errors - Development.Shake.FileInfo - Development.Shake.FilePath - Development.Shake.FilePattern - Development.Shake.Forward - Development.Shake.Monad - Development.Shake.Pool - Development.Shake.Profile - Development.Shake.Progress - Development.Shake.Resource - Development.Shake.Rule - Development.Shake.Rules.Directory - Development.Shake.Rules.File - Development.Shake.Rules.Files - Development.Shake.Rules.Oracle - Development.Shake.Rules.OrderOnly - Development.Shake.Rules.Rerun - Development.Shake.Shake - Development.Shake.Special - Development.Shake.Storage - Development.Shake.Types - Development.Shake.Util - Development.Shake.Value - General.Bilist - General.Binary - General.Cleanup - General.Concurrent - General.Extra - General.FileLock - General.Intern - General.Process - General.String - General.Template - General.Timing - Paths_shake - Run - Test.Assume - Test.Basic - Test.Benchmark - Test.C - Test.Cache - Test.Command - Test.Config - Test.Digest - Test.Directory - Test.Docs - Test.Errors - Test.FileLock - Test.FilePath - Test.FilePattern - Test.Files - Test.Forward - Test.Journal - Test.Lint - Test.Live - Test.Makefile - Test.Manual - Test.Match - Test.Monad - Test.Ninja - Test.Oracle - Test.OrderOnly - Test.Parallel - Test.Pool - Test.Progress - Test.Random - Test.Resources - Test.Self - Test.Tar - Test.Tup - Test.Type - Test.Unicode - Test.Util - Test.Verbosity - Test.Version - default-language: Haskell2010 - ghc-options: -main-is Test.main -rtsopts - build-depends: - base ==4.*, - directory -any, - hashable >=1.1.2.3, - binary -any, - filepath -any, - process >=1.1, - unordered-containers >=0.2.1, - bytestring -any, - utf8-string >=0.3, - time -any, - random -any, - js-jquery -any, - js-flot -any, - transformers >=0.2, - deepseq >=1.1, - extra >=1.4.8, - QuickCheck >=2.0 - - if impl(ghc >=7.6) - ghc-options: -with-rtsopts=-K1K - - if impl(ghc >=7.8) - ghc-options: -threaded - - if flag(portable) - cpp-options: -DPORTABLE - - if impl(ghc <7.6) - build-depends: - old-time -any - else - - if !os(windows) - build-depends: - unix >=2.5.1 - - if !os(windows) - build-depends: - unix -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.0 -name: spdx -version: 0 -synopsis: testing positive parsing of spdx identifiers -build-type: Simple -license: BSD3 - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,92 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Right BSD3, - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "spdx"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [], - specVersionRaw = Left `mkVersion [2,0]`, - stability = "", - subLibraries = [], - synopsis = "testing positive parsing of spdx identifiers", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-1.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.0 -name: spdx -version: 0 -license: BSD3 -synopsis: testing positive parsing of spdx identifiers -build-type: Simple - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.2 -name: spdx -version: 0 -synopsis: testing positive parsing of spdx identifiers -build-type: Simple -license: AGPL-1.0 - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left - (License (ELicense (ELicenseId AGPL_1_0) Nothing)), - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "spdx"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [], - specVersionRaw = Left `mkVersion [2,2]`, - stability = "", - subLibraries = [], - synopsis = "testing positive parsing of spdx identifiers", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-2.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.2 -name: spdx -version: 0 -license: AGPL-1.0 -synopsis: testing positive parsing of spdx identifiers -build-type: Simple - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.4 -name: spdx -version: 0 -synopsis: testing positive parsing of spdx identifiers -build-type: Simple -license: AGPL-1.0-only - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = Library - {exposedModules = [], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = "", - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = [], - licenseRaw = Left - (License (ELicense (ELicenseId AGPL_1_0_only) Nothing)), - maintainer = "", - package = PackageIdentifier - {pkgName = `PackageName "spdx"`, - pkgVersion = `mkVersion [0]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [], - specVersionRaw = Left `mkVersion [2,4]`, - stability = "", - subLibraries = [], - synopsis = "testing positive parsing of spdx identifiers", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/spdx-3.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -cabal-version: 2.4 -name: spdx -version: 0 -license: AGPL-1.0-only -synopsis: testing positive parsing of spdx identifiers -build-type: Simple - -library - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,76 +0,0 @@ -name: th-lift-instances -version: 0.1.4 -x-revision: 1 -license: BSD3 -cabal-version: >= 1.10 -license-file: LICENSE -author: Benno Fünfstück -maintainer: Benno Fünfstück -stability: experimental -homepage: http://github.com/bennofs/th-lift-instances/ -bug-reports: http://github.com/bennofs/th-lift-instances/issues -copyright: Copyright (C) 2013-2014 Benno Fünfstück -synopsis: Lift instances for template-haskell for common data types. -description: Most data types in haskell platform do not have Lift instances. This package provides orphan instances - for containers, text, bytestring and vector. -build-type: Custom -category: Template Haskell - -extra-source-files: - .ghci - .gitignore - .travis.yml - .vim.custom - README.md - -source-repository head - type: git - location: https://github.com/bennofs/th-lift-instances.git - -library - hs-source-dirs: src - default-language: Haskell2010 - ghc-options: -Wall -fwarn-tabs - build-depends: - base >= 4.4 && < 5 - , template-haskell < 2.10 - , th-lift - , containers >= 0.4 && < 0.6 - , vector >= 0.9 && < 0.11 - , text >= 0.11 && < 1.3 - , bytestring >= 0.9 && < 0.11 - exposed-modules: - Instances.TH.Lift - other-extensions: TemplateHaskell - -test-suite tests - type: exitcode-stdio-1.0 - main-is: Main.hs - other-modules: - Data - default-language: Haskell2010 - build-depends: - base - , template-haskell <2.10 - , containers >= 0.4 && < 0.6 - , vector >= 0.9 && < 0.11 - , text >= 0.11 && < 1.2 - , bytestring >= 0.9 && < 0.11 - , th-lift-instances - , QuickCheck >= 2.6 && < 2.8 - hs-source-dirs: tests - other-extensions: TemplateHaskell - -test-suite doctests - type: exitcode-stdio-1.0 - main-is: doctests.hs - default-language: Haskell2010 - build-depends: - base - , directory >= 1.0 - , doctest >= 0.9.1 - , filepath - ghc-options: -Wall -threaded - if impl(ghc<7.6.1) - ghc-options: -Werror - hs-source-dirs: tests diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,431 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [4,4]`) - (EarlierVersion `mkVersion [5]`)), - Dependency - `PackageName "template-haskell"` - (EarlierVersion `mkVersion [2,10]`), - Dependency `PackageName "th-lift"` AnyVersion, - Dependency - `PackageName "containers"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,4]`) - (EarlierVersion `mkVersion [0,6]`)), - Dependency - `PackageName "vector"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,9]`) - (EarlierVersion `mkVersion [0,11]`)), - Dependency - `PackageName "text"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,11]`) - (EarlierVersion `mkVersion [1,3]`)), - Dependency - `PackageName "bytestring"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,9]`) - (EarlierVersion `mkVersion [0,11]`))], - condTreeData = Library - {exposedModules = [`ModuleName ["Instances","TH","Lift"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["src"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-Wall", "-fwarn-tabs"]], - otherExtensions = [EnableExtension - TemplateHaskell], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [4,4]`) - (EarlierVersion - `mkVersion [5]`)), - Dependency - `PackageName "template-haskell"` - (EarlierVersion - `mkVersion [2,10]`), - Dependency - `PackageName "th-lift"` - AnyVersion, - Dependency - `PackageName "containers"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,4]`) - (EarlierVersion - `mkVersion [0,6]`)), - Dependency - `PackageName "vector"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,9]`) - (EarlierVersion - `mkVersion [0,11]`)), - Dependency - `PackageName "text"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,11]`) - (EarlierVersion - `mkVersion [1,3]`)), - Dependency - `PackageName "bytestring"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,9]`) - (EarlierVersion - `mkVersion [0,11]`))], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [_×_ - `UnqualComponentName "tests"` - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency `PackageName "base"` AnyVersion, - Dependency - `PackageName "template-haskell"` - (EarlierVersion `mkVersion [2,10]`), - Dependency - `PackageName "containers"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,4]`) - (EarlierVersion `mkVersion [0,6]`)), - Dependency - `PackageName "vector"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,9]`) - (EarlierVersion `mkVersion [0,11]`)), - Dependency - `PackageName "text"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,11]`) - (EarlierVersion `mkVersion [1,2]`)), - Dependency - `PackageName "bytestring"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [0,9]`) - (EarlierVersion `mkVersion [0,11]`)), - Dependency - `PackageName "th-lift-instances"` AnyVersion, - Dependency - `PackageName "QuickCheck"` - (IntersectVersionRanges - (OrLaterVersion `mkVersion [2,6]`) - (EarlierVersion `mkVersion [2,8]`))], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["tests"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [EnableExtension - TemplateHaskell], - otherLanguages = [], - otherModules = [`ModuleName ["Data"]`], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - AnyVersion, - Dependency - `PackageName "template-haskell"` - (EarlierVersion - `mkVersion [2,10]`), - Dependency - `PackageName "containers"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,4]`) - (EarlierVersion - `mkVersion [0,6]`)), - Dependency - `PackageName "vector"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,9]`) - (EarlierVersion - `mkVersion [0,11]`)), - Dependency - `PackageName "text"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,11]`) - (EarlierVersion - `mkVersion [1,2]`)), - Dependency - `PackageName "bytestring"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [0,9]`) - (EarlierVersion - `mkVersion [0,11]`)), - Dependency - `PackageName "th-lift-instances"` - AnyVersion, - Dependency - `PackageName "QuickCheck"` - (IntersectVersionRanges - (OrLaterVersion - `mkVersion [2,6]`) - (EarlierVersion - `mkVersion [2,8]`))], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "Main.hs", - testName = `UnqualComponentName ""`}}, - _×_ - `UnqualComponentName "doctests"` - CondNode - {condTreeComponents = [CondBranch - {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,6,1])))`, - condBranchIfFalse = Nothing, - condBranchIfTrue = CondNode - {condTreeComponents = [], - condTreeConstraints = [], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-Werror"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [], - virtualModules = []}, - testInterface = TestSuiteUnsupported - (TestTypeUnknown - "" - `mkVersion []`), - testName = `UnqualComponentName ""`}}}], - condTreeConstraints = [Dependency `PackageName "base"` AnyVersion, - Dependency - `PackageName "directory"` - (OrLaterVersion `mkVersion [1,0]`), - Dependency - `PackageName "doctest"` - (OrLaterVersion `mkVersion [0,9,1]`), - Dependency `PackageName "filepath"` AnyVersion], - condTreeData = TestSuite - {testBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Just Haskell2010, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["tests"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [_×_ - GHC - ["-Wall", "-threaded"]], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - AnyVersion, - Dependency - `PackageName "directory"` - (OrLaterVersion - `mkVersion [1,0]`), - Dependency - `PackageName "doctest"` - (OrLaterVersion - `mkVersion [0,9,1]`), - Dependency - `PackageName "filepath"` - AnyVersion], - virtualModules = []}, - testInterface = TestSuiteExeV10 - `mkVersion [1,0]` "doctests.hs", - testName = `UnqualComponentName ""`}}], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "Benno F\252nfst\252ck", - benchmarks = [], - bugReports = "http://github.com/bennofs/th-lift-instances/issues", - buildTypeRaw = Just Custom, - category = "Template Haskell", - copyright = "Copyright (C) 2013-2014 Benno F\252nfst\252ck", - customFieldsPD = [_×_ "x-revision" "1"], - dataDir = "", - dataFiles = [], - description = concat - ["Most data types in haskell platform do not have Lift instances. This package provides orphan instances\n", - "for containers, text, bytestring and vector."], - executables = [], - extraDocFiles = [], - extraSrcFiles = [".ghci", - ".gitignore", - ".travis.yml", - ".vim.custom", - "README.md"], - extraTmpFiles = [], - foreignLibs = [], - homepage = "http://github.com/bennofs/th-lift-instances/", - library = Nothing, - licenseFiles = ["LICENSE"], - licenseRaw = Right BSD3, - maintainer = "Benno F\252nfst\252ck ", - package = PackageIdentifier - {pkgName = `PackageName "th-lift-instances"`, - pkgVersion = `mkVersion [0,1,4]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just - "https://github.com/bennofs/th-lift-instances.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), - stability = "experimental", - subLibraries = [], - synopsis = "Lift instances for template-haskell for common data types.", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/th-lift-instances.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,78 +0,0 @@ -th-lift-instances.cabal:15:9: Tabs used as indentation at 15:9 -cabal-version: >=1.10 -name: th-lift-instances -version: 0.1.4 -license: BSD3 -license-file: LICENSE -copyright: Copyright (C) 2013-2014 Benno Fünfstück -maintainer: Benno Fünfstück -author: Benno Fünfstück -stability: experimental -homepage: http://github.com/bennofs/th-lift-instances/ -bug-reports: http://github.com/bennofs/th-lift-instances/issues -synopsis: Lift instances for template-haskell for common data types. -description: - Most data types in haskell platform do not have Lift instances. This package provides orphan instances - for containers, text, bytestring and vector. -category: Template Haskell -x-revision: 1 -build-type: Custom -extra-source-files: - .ghci - .gitignore - .travis.yml - .vim.custom - README.md - -source-repository head - type: git - location: https://github.com/bennofs/th-lift-instances.git - -library - exposed-modules: - Instances.TH.Lift - hs-source-dirs: src - default-language: Haskell2010 - other-extensions: TemplateHaskell - ghc-options: -Wall -fwarn-tabs - build-depends: - base >=4.4 && <5, - template-haskell <2.10, - th-lift -any, - containers >=0.4 && <0.6, - vector >=0.9 && <0.11, - text >=0.11 && <1.3, - bytestring >=0.9 && <0.11 - -test-suite tests - type: exitcode-stdio-1.0 - main-is: Main.hs - hs-source-dirs: tests - other-modules: - Data - default-language: Haskell2010 - other-extensions: TemplateHaskell - build-depends: - base -any, - template-haskell <2.10, - containers >=0.4 && <0.6, - vector >=0.9 && <0.11, - text >=0.11 && <1.2, - bytestring >=0.9 && <0.11, - th-lift-instances -any, - QuickCheck >=2.6 && <2.8 - -test-suite doctests - type: exitcode-stdio-1.0 - main-is: doctests.hs - hs-source-dirs: tests - default-language: Haskell2010 - ghc-options: -Wall -threaded - build-depends: - base -any, - directory >=1.0, - doctest >=0.9.1, - filepath -any - - if impl(ghc <7.6.1) - ghc-options: -Werror diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -Name: wl-pprint-indef -Version: 1.2 -Cabal-Version: >=1.6 -Synopsis: The Wadler/Leijen Pretty Printer -Category: Text -Description: - This is a pretty printing library based on Wadler's paper "A Prettier - Printer". See the haddocks for full info. This version allows the - library user to declare overlapping instances of the 'Pretty' class. -License: BSD3 -License-file: LICENSE -Author: Daan Leijen -Maintainer: Noam Lewis -Build-Type: Simple - -Executable wl-pprint-string-example - Main-is: Main.hs - Hs-Source-Dirs: example-string - Other-Modules: StringImpl - Build-Depends: base < 5, - str-string >= 0.1.0.0, - wl-pprint-indef - Mixins: wl-pprint-indef requires (Text.PrettyPrint.Leijen.Str as StringImpl) - -Library - Exposed-Modules: Text.PrettyPrint.Leijen - Signatures: Text.PrettyPrint.Leijen.Str - Mixins: str-sig requires (Str as Text.PrettyPrint.Leijen.Str) - Build-Depends: base < 5, - str-sig >= 0.1.0.0 - -source-repository head - type: git - location: git@github.com:danidiaz/wl-pprint-indef.git diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.expr 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.expr 1970-01-01 00:00:00.000000000 +0000 @@ -1,182 +0,0 @@ -GenericPackageDescription - {condBenchmarks = [], - condExecutables = [_×_ - `UnqualComponentName "wl-pprint-string-example"` - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (EarlierVersion `mkVersion [5]`), - Dependency - `PackageName "str-string"` - (OrLaterVersion `mkVersion [0,1,0,0]`), - Dependency - `PackageName "wl-pprint-indef"` AnyVersion], - condTreeData = Executable - {buildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = ["example-string"], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [`ModuleName ["StringImpl"]`], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (EarlierVersion - `mkVersion [5]`), - Dependency - `PackageName "str-string"` - (OrLaterVersion - `mkVersion [0,1,0,0]`), - Dependency - `PackageName "wl-pprint-indef"` - AnyVersion], - virtualModules = []}, - exeName = `UnqualComponentName "wl-pprint-string-example"`, - exeScope = ExecutablePublic, - modulePath = "Main.hs"}}], - condForeignLibs = [], - condLibrary = Just - CondNode - {condTreeComponents = [], - condTreeConstraints = [Dependency - `PackageName "base"` - (EarlierVersion `mkVersion [5]`), - Dependency - `PackageName "str-sig"` - (OrLaterVersion `mkVersion [0,1,0,0]`)], - condTreeData = Library - {exposedModules = [`ModuleName ["Text","PrettyPrint","Leijen"]`], - libBuildInfo = BuildInfo - {asmOptions = [], - asmSources = [], - autogenModules = [], - buildToolDepends = [], - buildTools = [], - buildable = True, - cSources = [], - ccOptions = [], - cmmOptions = [], - cmmSources = [], - cppOptions = [], - customFieldsBI = [], - cxxOptions = [], - cxxSources = [], - defaultExtensions = [], - defaultLanguage = Nothing, - extraBundledLibs = [], - extraFrameworkDirs = [], - extraGHCiLibs = [], - extraLibDirs = [], - extraLibFlavours = [], - extraLibs = [], - frameworks = [], - hsSourceDirs = [], - includeDirs = [], - includes = [], - installIncludes = [], - jsSources = [], - ldOptions = [], - mixins = [], - oldExtensions = [], - options = [], - otherExtensions = [], - otherLanguages = [], - otherModules = [], - pkgconfigDepends = [], - profOptions = [], - sharedOptions = [], - staticOptions = [], - targetBuildDepends = [Dependency - `PackageName "base"` - (EarlierVersion - `mkVersion [5]`), - Dependency - `PackageName "str-sig"` - (OrLaterVersion - `mkVersion [0,1,0,0]`)], - virtualModules = []}, - libExposed = True, - libName = Nothing, - reexportedModules = [], - signatures = []}}, - condSubLibraries = [], - condTestSuites = [], - genPackageFlags = [], - packageDescription = PackageDescription - {author = "Daan Leijen", - benchmarks = [], - bugReports = "", - buildTypeRaw = Just Simple, - category = "Text", - copyright = "", - customFieldsPD = [], - dataDir = "", - dataFiles = [], - description = concat - ["This is a pretty printing library based on Wadler's paper \"A Prettier\n", - "Printer\". See the haddocks for full info. This version allows the\n", - "library user to declare overlapping instances of the 'Pretty' class."], - executables = [], - extraDocFiles = [], - extraSrcFiles = [], - extraTmpFiles = [], - foreignLibs = [], - homepage = "", - library = Nothing, - licenseFiles = ["LICENSE"], - licenseRaw = Right BSD3, - maintainer = "Noam Lewis ", - package = PackageIdentifier - {pkgName = `PackageName "wl-pprint-indef"`, - pkgVersion = `mkVersion [1,2]`}, - pkgUrl = "", - setupBuildInfo = Nothing, - sourceRepos = [SourceRepo - {repoBranch = Nothing, - repoKind = RepoHead, - repoLocation = Just - "git@github.com:danidiaz/wl-pprint-indef.git", - repoModule = Nothing, - repoSubdir = Nothing, - repoTag = Nothing, - repoType = Just Git}], - specVersionRaw = Right (OrLaterVersion `mkVersion [1,6]`), - stability = "", - subLibraries = [], - synopsis = "The Wadler/Leijen Pretty Printer", - testSuites = [], - testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.format 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/regressions/wl-pprint-indef.format 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -wl-pprint-indef.cabal:28:3: The field "mixins" is available since Cabal [2,0] -wl-pprint-indef.cabal:27:3: The field "signatures" is available since Cabal [2,0] -wl-pprint-indef.cabal:23:3: The field "mixins" is available since Cabal [2,0] -cabal-version: >=1.6 -name: wl-pprint-indef -version: 1.2 -license: BSD3 -license-file: LICENSE -maintainer: Noam Lewis -author: Daan Leijen -synopsis: The Wadler/Leijen Pretty Printer -description: - This is a pretty printing library based on Wadler's paper "A Prettier - Printer". See the haddocks for full info. This version allows the - library user to declare overlapping instances of the 'Pretty' class. -category: Text -build-type: Simple - -source-repository head - type: git - location: git@github.com:danidiaz/wl-pprint-indef.git - -library - exposed-modules: - Text.PrettyPrint.Leijen - build-depends: - base <5, - str-sig >=0.1.0.0 - -executable wl-pprint-string-example - main-is: Main.hs - hs-source-dirs: example-string - other-modules: - StringImpl - build-depends: - base <5, - str-string >=0.1.0.0, - wl-pprint-indef -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/bom.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/bom.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/bom.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/bom.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -name: bom -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/bool.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/bool.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/bool.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/bool.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: bool -version: 1 -cabal-version: >= 1.6 - -flag foo - manual: true - -library - build-depends: base >= 4.9 && <4.10 - if flag(foo) - build-depends: containers - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/deprecatedfield.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/deprecatedfield.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/deprecatedfield.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/deprecatedfield.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -name: deprecatedfield -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dir: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/doubledash.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/doubledash.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/doubledash.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/doubledash.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: bool -version: 1 -cabal-version: >= 1.6 -extra-source-files: - README.md -- we include it - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/extratestmodule.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/extratestmodule.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/extratestmodule.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/extratestmodule.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: extramainis -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . - -test-suite tests - type: exitcode-stdio-1.0 - test-module: Tests diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/gluedop.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/gluedop.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/gluedop.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/gluedop.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: gluedop -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - if os(windows) &&!impl(ghc) - build-depends: containers - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/multiplesingular.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/multiplesingular.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/multiplesingular.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/multiplesingular.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: multiplesingular -name: multiplesingular2 -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/nbsp.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/nbsp.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/nbsp.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/nbsp.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -name: nbsp -version: 1 -cabal-version: >= 1.6 - -library -  build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/newsyntax.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/newsyntax.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/newsyntax.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/newsyntax.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -name: newsyntax -version: 1 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/oldsyntax.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/oldsyntax.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/oldsyntax.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/oldsyntax.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -name: oldsyntax -version: 1 -cabal-version: >= 1.6 - -build-depends: base >= 4.9 && <4.10 -hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/subsection.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/subsection.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/subsection.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/subsection.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: subsection -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . - iff os(windows) - build-depends: containers diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/tab.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/tab.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/tab.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/tab.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: tab -version: 1 -cabal-version: >= 1.6 - -library - build-depends: { base >= 4.9 && <4.10 } - hs-source-dirs: . - -test-suite tests { - type: exitcode-stdio-1.0 - main-is: Main.hs -} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/trailingfield.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/trailingfield.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/trailingfield.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/trailingfield.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: trailingfield -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . - -description: No fields after sections diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/unknownfield.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/unknownfield.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/unknownfield.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/unknownfield.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: unknownfield -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . - xfield: x diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/unknownsection.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/unknownsection.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/unknownsection.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/unknownsection.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -name: unknownsection -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . - -z - z-field: z diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/utf8.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/utf8.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/utf8.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/utf8.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: utf8 -author: Oleg Grönroos -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10 - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/versiontag.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/versiontag.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests/warnings/versiontag.cabal 2018-10-17 15:59:04.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests/warnings/versiontag.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -name: versiontag -version: 1 -cabal-version: >= 1.6 - -library - build-depends: base >= 4.9 && <4.10-rc1 - hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/ParserTests.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/ParserTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,308 +0,0 @@ -{-# LANGUAGE CPP #-} -module Main - ( main - ) where - -import Prelude () -import Prelude.Compat - -import Test.Tasty -import Test.Tasty.Golden.Advanced (goldenTest) -import Test.Tasty.HUnit - -import Control.Monad (void) -import Data.Algorithm.Diff (Diff (..), getGroupedDiff) -import Data.Maybe (isNothing) -import Distribution.PackageDescription (GenericPackageDescription) -import Distribution.PackageDescription.Parsec (parseGenericPackageDescription) -import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription) -import Distribution.Parsec.Common - (PWarnType (..), PWarning (..), showPError, showPWarning) -import Distribution.Parsec.ParseResult (runParseResult) -import Distribution.Utils.Generic (fromUTF8BS, toUTF8BS) -import System.FilePath (replaceExtension, ()) - -import qualified Data.ByteString as BS -import qualified Data.ByteString.Char8 as BS8 - -import qualified Distribution.InstalledPackageInfo as IPI -import qualified Distribution.ParseUtils as ReadP - -#ifdef MIN_VERSION_tree_diff -import Data.TreeDiff (toExpr) -import Data.TreeDiff.Golden (ediffGolden) -import Instances.TreeDiff () -#endif - -tests :: TestTree -tests = testGroup "parsec tests" - [ regressionTests - , warningTests - , errorTests - , ipiTests - ] - -------------------------------------------------------------------------------- --- Warnings -------------------------------------------------------------------------------- - --- Verify that we trigger warnings -warningTests :: TestTree -warningTests = testGroup "warnings triggered" - [ warningTest PWTLexBOM "bom.cabal" - , warningTest PWTLexNBSP "nbsp.cabal" - , warningTest PWTLexTab "tab.cabal" - , warningTest PWTUTF "utf8.cabal" - , warningTest PWTBoolCase "bool.cabal" - , warningTest PWTVersionTag "versiontag.cabal" - , warningTest PWTNewSyntax "newsyntax.cabal" - , warningTest PWTOldSyntax "oldsyntax.cabal" - , warningTest PWTDeprecatedField "deprecatedfield.cabal" - , warningTest PWTInvalidSubsection "subsection.cabal" - , warningTest PWTUnknownField "unknownfield.cabal" - , warningTest PWTUnknownSection "unknownsection.cabal" - , warningTest PWTTrailingFields "trailingfield.cabal" - , warningTest PWTDoubleDash "doubledash.cabal" - , warningTest PWTMultipleSingularField "multiplesingular.cabal" - -- TODO: not implemented yet - -- , warningTest PWTExtraTestModule "extratestmodule.cabal" - ] - -warningTest :: PWarnType -> FilePath -> TestTree -warningTest wt fp = testCase (show wt) $ do - contents <- BS.readFile $ "tests" "ParserTests" "warnings" fp - - let res = parseGenericPackageDescription contents - let (warns, x) = runParseResult res - - assertBool ("should parse successfully: " ++ show x) $ isRight x - - case warns of - [PWarning wt' _ _] -> assertEqual "warning type" wt wt' - [] -> assertFailure "got no warnings" - _ -> assertFailure $ "got multiple warnings: " ++ show warns - where - isRight (Right _) = True - isRight _ = False - -------------------------------------------------------------------------------- --- Errors -------------------------------------------------------------------------------- - -errorTests :: TestTree -errorTests = testGroup "errors" - [ errorTest "common1.cabal" - , errorTest "common2.cabal" - , errorTest "common3.cabal" - , errorTest "leading-comma.cabal" - , errorTest "range-ge-wild.cabal" - , errorTest "forward-compat.cabal" - , errorTest "forward-compat2.cabal" - , errorTest "forward-compat3.cabal" - , errorTest "issue-5055.cabal" - , errorTest "issue-5055-2.cabal" - , errorTest "noVersion.cabal" - , errorTest "noVersion2.cabal" - , errorTest "spdx-1.cabal" - , errorTest "spdx-2.cabal" - , errorTest "spdx-3.cabal" - ] - -errorTest :: FilePath -> TestTree -errorTest fp = cabalGoldenTest fp correct $ do - contents <- BS.readFile input - let res = parseGenericPackageDescription contents - let (_, x) = runParseResult res - - return $ toUTF8BS $ case x of - Right gpd -> - "UNXPECTED SUCCESS\n" ++ - showGenericPackageDescription gpd - Left (v, errs) -> - unlines $ ("VERSION: " ++ show v) : map (showPError fp) errs - where - input = "tests" "ParserTests" "errors" fp - correct = replaceExtension input "errors" - -------------------------------------------------------------------------------- --- Regressions -------------------------------------------------------------------------------- - -regressionTests :: TestTree -regressionTests = testGroup "regressions" - [ regressionTest "encoding-0.8.cabal" - , regressionTest "Octree-0.5.cabal" - , regressionTest "nothing-unicode.cabal" - , regressionTest "issue-774.cabal" - , regressionTest "generics-sop.cabal" - , regressionTest "elif.cabal" - , regressionTest "elif2.cabal" - , regressionTest "shake.cabal" - , regressionTest "common.cabal" - , regressionTest "common2.cabal" - , regressionTest "leading-comma.cabal" - , regressionTest "wl-pprint-indef.cabal" - , regressionTest "th-lift-instances.cabal" - , regressionTest "issue-5055.cabal" - , regressionTest "noVersion.cabal" - , regressionTest "spdx-1.cabal" - , regressionTest "spdx-2.cabal" - , regressionTest "spdx-3.cabal" - ] - -regressionTest :: FilePath -> TestTree -regressionTest fp = testGroup fp - [ formatGoldenTest fp - , formatRoundTripTest fp -#ifdef MIN_VERSION_tree_diff - , treeDiffGoldenTest fp -#endif - ] - -formatGoldenTest :: FilePath -> TestTree -formatGoldenTest fp = cabalGoldenTest "format" correct $ do - contents <- BS.readFile input - let res = parseGenericPackageDescription contents - let (warns, x) = runParseResult res - - return $ toUTF8BS $ case x of - Right gpd -> - unlines (map (showPWarning fp) warns) - ++ showGenericPackageDescription gpd - Left (_, errs) -> - unlines $ "ERROR" : map (showPError fp) errs - where - input = "tests" "ParserTests" "regressions" fp - correct = replaceExtension input "format" - -#ifdef MIN_VERSION_tree_diff -treeDiffGoldenTest :: FilePath -> TestTree -treeDiffGoldenTest fp = ediffGolden goldenTest "expr" exprFile $ do - contents <- BS.readFile input - let res = parseGenericPackageDescription contents - let (_, x) = runParseResult res - case x of - Right gpd -> pure (toExpr gpd) - Left (_, errs) -> fail $ unlines $ "ERROR" : map (showPError fp) errs - where - input = "tests" "ParserTests" "regressions" fp - exprFile = replaceExtension input "expr" -#endif - -formatRoundTripTest :: FilePath -> TestTree -formatRoundTripTest fp = testCase "roundtrip" $ do - contents <- BS.readFile input - x <- parse contents - let contents' = showGenericPackageDescription x - y <- parse (toUTF8BS contents') - -- previously we mangled licenses a bit - let y' = y - assertEqual "re-parsed doesn't match" x y' - where - parse :: BS.ByteString -> IO GenericPackageDescription - parse c = do - let (_, x') = runParseResult $ parseGenericPackageDescription c - case x' of - Right gpd -> pure gpd - Left (_, errs) -> do - void $ assertFailure $ unlines (map (showPError fp) errs) - fail "failure" - input = "tests" "ParserTests" "regressions" fp - -------------------------------------------------------------------------------- --- InstalledPackageInfo regressions -------------------------------------------------------------------------------- - -ipiTests :: TestTree -ipiTests = testGroup "ipis" - [ ipiTest "transformers.cabal" - , ipiTest "Includes2.cabal" - , ipiTest "issue-2276-ghc-9885.cabal" - , ipiTest "internal-preprocessor-test.cabal" - ] - -ipiTest :: FilePath -> TestTree -ipiTest fp = testGroup fp $ -#ifdef MIN_VERSION_tree_diff - [ ipiTreeDiffGoldenTest fp ] ++ -#endif - [ ipiFormatGoldenTest fp - , ipiFormatRoundTripTest fp - ] - -ipiFormatGoldenTest :: FilePath -> TestTree -ipiFormatGoldenTest fp = cabalGoldenTest "format" correct $ do - contents <- readFile input - let res = IPI.parseInstalledPackageInfo contents - return $ toUTF8BS $ case res of - ReadP.ParseFailed err -> "ERROR " ++ show err - ReadP.ParseOk ws ipi -> - unlines (map (ReadP.showPWarning fp) ws) - ++ IPI.showInstalledPackageInfo ipi - where - input = "tests" "ParserTests" "ipi" fp - correct = replaceExtension input "format" - -#ifdef MIN_VERSION_tree_diff -ipiTreeDiffGoldenTest :: FilePath -> TestTree -ipiTreeDiffGoldenTest fp = ediffGolden goldenTest "expr" exprFile $ do - contents <- readFile input - let res = IPI.parseInstalledPackageInfo contents - case res of - ReadP.ParseFailed err -> fail $ "ERROR " ++ show err - ReadP.ParseOk _ws ipi -> pure (toExpr ipi) - where - input = "tests" "ParserTests" "ipi" fp - exprFile = replaceExtension input "expr" -#endif - -ipiFormatRoundTripTest :: FilePath -> TestTree -ipiFormatRoundTripTest fp = testCase "roundtrip" $ do - contents <- readFile input - x <- parse contents - let contents' = IPI.showInstalledPackageInfo x - y <- parse contents' - - -- ghc-pkg prints pkgroot itself, based on cli arguments! - let x' = x { IPI.pkgRoot = Nothing } - let y' = y - assertBool "pkgRoot isn't shown" (isNothing (IPI.pkgRoot y)) - assertEqual "re-parsed doesn't match" x' y' - - -- Complete round-trip - let contents2 = IPI.showFullInstalledPackageInfo x - z <- parse contents2 - assertEqual "re-parsed doesn't match" x z - - where - parse :: String -> IO IPI.InstalledPackageInfo - parse c = do - case IPI.parseInstalledPackageInfo c of - ReadP.ParseOk _ ipi -> return ipi - ReadP.ParseFailed err -> do - void $ assertFailure $ show err - fail "failure" - input = "tests" "ParserTests" "ipi" fp - -------------------------------------------------------------------------------- --- Main -------------------------------------------------------------------------------- - -main :: IO () -main = defaultMain tests - -cabalGoldenTest :: TestName -> FilePath -> IO BS.ByteString -> TestTree -cabalGoldenTest name ref act = goldenTest name (BS.readFile ref) act cmp upd - where - upd = BS.writeFile ref - cmp x y | x == y = return Nothing - cmp x y = return $ Just $ unlines $ - concatMap f (getGroupedDiff (BS8.lines x) (BS8.lines y)) - where - f (First xs) = map (cons3 '-' . fromUTF8BS) xs - f (Second ys) = map (cons3 '+' . fromUTF8BS) ys - -- we print unchanged lines too. It shouldn't be a problem while we have - -- reasonably small examples - f (Both xs _) = map (cons3 ' ' . fromUTF8BS) xs - -- we add three characters, so the changed lines are easier to spot - cons3 c cs = c : c : c : ' ' : cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/README.md 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -Unit tests -========== - -Ordinary unit tests. If you're looking for the package tests, -they live in cabal-testsuite now. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Test/Laws.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Test/Laws.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Test/Laws.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Test/Laws.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,79 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-missing-signatures #-} -module Test.Laws where - -import Prelude hiding (Num((+), (*))) -import Data.Monoid (Monoid(..), Endo(..)) -import qualified Data.Foldable as Foldable - -idempotent_unary f x = f fx == fx where fx = f x - --- Basic laws on binary operators - -idempotent_binary (+) x = x + x == x - -commutative (+) x y = x + y == y + x - -associative (+) x y z = (x + y) + z == x + (y + z) - -distributive_left (*) (+) x y z = x * (y + z) == (x * y) + (x * z) - -distributive_right (*) (+) x y z = (y + z) * x == (y * x) + (z * x) - - --- | The first 'fmap' law --- --- > fmap id == id --- -fmap_1 :: (Eq (f a), Functor f) => f a -> Bool -fmap_1 x = fmap id x == x - --- | The second 'fmap' law --- --- > fmap (f . g) == fmap f . fmap g --- -fmap_2 :: (Eq (f c), Functor f) => (b -> c) -> (a -> b) -> f a -> Bool -fmap_2 f g x = fmap (f . g) x == (fmap f . fmap g) x - - --- | The monoid identity law, 'mempty' is a left and right identity of --- 'mappend': --- --- > mempty `mappend` x = x --- > x `mappend` mempty = x --- -monoid_1 :: (Eq a, Data.Monoid.Monoid a) => a -> Bool -monoid_1 x = mempty `mappend` x == x - && x `mappend` mempty == x - --- | The monoid associativity law, 'mappend' must be associative. --- --- > (x `mappend` y) `mappend` z = x `mappend` (y `mappend` z) --- -monoid_2 :: (Eq a, Data.Monoid.Monoid a) => a -> a -> a -> Bool -monoid_2 x y z = (x `mappend` y) `mappend` z - == x `mappend` (y `mappend` z) - --- | The 'mconcat' definition. It can be overidden for the sake of effeciency --- but it must still satisfy the property given by the default definition: --- --- > mconcat = foldr mappend mempty --- -monoid_3 :: (Eq a, Data.Monoid.Monoid a) => [a] -> Bool -monoid_3 xs = mconcat xs == foldr mappend mempty xs - - --- | First 'Foldable' law --- --- > Foldable.fold = Foldable.foldr mappend mempty --- -foldable_1 :: (Foldable.Foldable t, Monoid m, Eq m) => t m -> Bool -foldable_1 x = Foldable.fold x == Foldable.foldr mappend mempty x - --- | Second 'Foldable' law --- --- > foldr f z t = appEndo (foldMap (Endo . f) t) z --- -foldable_2 :: (Foldable.Foldable t, Eq b) - => (a -> b -> b) -> b -> t a -> Bool -foldable_2 f z t = Foldable.foldr f z t - == appEndo (Foldable.foldMap (Endo . f) t) z diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Test/QuickCheck/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Test/QuickCheck/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/Test/QuickCheck/Utils.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/Test/QuickCheck/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -module Test.QuickCheck.Utils where - -import Test.QuickCheck.Gen - - --- | Adjust the size of the generated value. --- --- In general the size gets bigger and bigger linearly. For some types --- it is not appropriate to generate ever bigger values but instead --- to generate lots of intermediate sized values. You could do that using: --- --- > adjustSize (\n -> min n 5) --- --- Similarly, for some types the linear size growth may mean getting too big --- too quickly relative to other values. So you may want to adjust how --- quickly the size grows. For example dividing by a constant, or even --- something like the integer square root or log. --- --- > adjustSize (\n -> n `div` 2) --- --- Putting this together we can make for example a relatively short list: --- --- > adjustSize (\n -> min 5 (n `div` 3)) (listOf1 arbitrary) --- --- Not only do we put a limit on the length but we also scale the growth to --- prevent it from hitting the maximum size quite so early. --- -adjustSize :: (Int -> Int) -> Gen a -> Gen a -adjustSize adjust gen = sized (\n -> resize (adjust n) gen) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/CreatePipe.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/CreatePipe.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/CreatePipe.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/CreatePipe.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -module UnitTests.Distribution.Compat.CreatePipe (tests) where - -import Distribution.Compat.CreatePipe -import System.IO (hClose, hGetContents, hPutStr, hSetEncoding, localeEncoding) -import Test.Tasty -import Test.Tasty.HUnit - -tests :: [TestTree] -tests = [testCase "Locale Encoding" case_Locale_Encoding] - -case_Locale_Encoding :: Assertion -case_Locale_Encoding = do - let str = "\0252" - (r, w) <- createPipe - hSetEncoding w localeEncoding - out <- hGetContents r - hPutStr w str - hClose w - assertEqual "createPipe should support Unicode roundtripping" str out diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/Graph.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/Graph.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/Graph.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/Graph.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,91 +0,0 @@ -{-# LANGUAGE PatternGuards #-} -{-# LANGUAGE FlexibleInstances #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} -module UnitTests.Distribution.Compat.Graph - ( tests - , arbitraryGraph - ) where - -import Distribution.Compat.Graph - -import qualified Prelude -import Prelude hiding (null) -import Test.Tasty -import Test.Tasty.QuickCheck -import qualified Data.Set as Set -import Control.Monad -import qualified Data.Graph as G -import Data.Array ((!)) -import Data.Maybe -import Data.List (sort) - -tests :: [TestTree] -tests = - [ testProperty "arbitrary unbroken" (prop_arbitrary_unbroken :: Graph (Node Int ()) -> Bool) - , testProperty "nodes consistent" (prop_nodes_consistent :: Graph (Node Int ()) -> Bool) - , testProperty "edges consistent" (prop_edges_consistent :: Graph (Node Int ()) -> Property) - , testProperty "closure consistent" (prop_closure_consistent :: Graph (Node Int ()) -> Property) - ] - --- Our arbitrary instance does not generate broken graphs -prop_arbitrary_unbroken :: Graph a -> Bool -prop_arbitrary_unbroken g = Prelude.null (broken g) - --- Every node from 'toList' maps to a vertex which --- is present in the constructed graph, and maps back --- to a node correctly. -prop_nodes_consistent :: (Eq a, IsNode a) => Graph a -> Bool -prop_nodes_consistent g = all p (toList g) - where - (_, vtn, ktv) = toGraph g - p n = case ktv (nodeKey n) of - Just v -> vtn v == n - Nothing -> False - --- A non-broken graph has the 'nodeNeighbors' of each node --- equal the recorded adjacent edges in the node graph. -prop_edges_consistent :: IsNode a => Graph a -> Property -prop_edges_consistent g = Prelude.null (broken g) ==> all p (toList g) - where - (gr, vtn, ktv) = toGraph g - p n = sort (nodeNeighbors n) - == sort (map (nodeKey . vtn) (gr ! fromJust (ktv (nodeKey n)))) - --- Closure is consistent with reachable -prop_closure_consistent :: (Show a, IsNode a) => Graph a -> Property -prop_closure_consistent g = - not (null g) ==> - forAll (elements (toList g)) $ \n -> - Set.fromList (map nodeKey (fromJust (closure g [nodeKey n]))) - == Set.fromList (map (nodeKey . vtn) (G.reachable gr (fromJust (ktv (nodeKey n))))) - where - (gr, vtn, ktv) = toGraph g - -hasNoDups :: Ord a => [a] -> Bool -hasNoDups = loop Set.empty - where - loop _ [] = True - loop s (x:xs) | s' <- Set.insert x s, Set.size s' > Set.size s - = loop s' xs - | otherwise - = False - --- | Produces a graph of size @len@. We sample with 'suchThat'; if we --- dropped duplicate entries our size could be smaller. -arbitraryGraph :: (Ord k, Show k, Arbitrary k, Arbitrary a) - => Int -> Gen (Graph (Node k a)) -arbitraryGraph len = do - -- Careful! Assume k is much larger than size. - ks <- vectorOf len arbitrary `suchThat` hasNoDups - ns <- forM ks $ \k -> do - a <- arbitrary - ns <- listOf (elements ks) - -- Allow duplicates! - return (N a k ns) - return (fromDistinctList ns) - -instance (Ord k, Show k, Arbitrary k, Arbitrary a) - => Arbitrary (Graph (Node k a)) where - arbitrary = sized $ \n -> do - len <- choose (0, n) - arbitraryGraph len diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/ReadP.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/ReadP.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/ReadP.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/ReadP.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,153 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Compat.ReadP --- Copyright : (c) The University of Glasgow 2002 --- License : BSD-style (see the file libraries/base/LICENSE) --- --- Maintainer : libraries@haskell.org --- Portability : portable --- --- This code was originally in Distribution.Compat.ReadP. Please see that file --- for provenance. The tests have been integrated into the test framework. --- Some properties cannot be tested, as they hold over arbitrary ReadP values, --- and we don't have a good Arbitrary instance (nor Show instance) for ReadP. --- -module UnitTests.Distribution.Compat.ReadP - ( tests - -- * Properties - -- $properties - ) where - -import Data.List -import Distribution.Compat.ReadP -import Test.Tasty -import Test.Tasty.QuickCheck - -tests :: [TestTree] -tests = - [ testProperty "Get Nil" prop_Get_Nil - , testProperty "Get Cons" prop_Get_Cons - , testProperty "Look" prop_Look - , testProperty "Fail" prop_Fail - , testProperty "Return" prop_Return - --, testProperty "Bind" prop_Bind - --, testProperty "Plus" prop_Plus - --, testProperty "LeftPlus" prop_LeftPlus - --, testProperty "Gather" prop_Gather - , testProperty "String Yes" prop_String_Yes - , testProperty "String Maybe" prop_String_Maybe - , testProperty "Munch" (prop_Munch evenChar) - , testProperty "Munch1" (prop_Munch1 evenChar) - --, testProperty "Choice" prop_Choice - --, testProperty "ReadS" prop_ReadS - ] - --- --------------------------------------------------------------------------- --- QuickCheck properties that hold for the combinators - -{- $properties -The following are QuickCheck specifications of what the combinators do. -These can be seen as formal specifications of the behavior of the -combinators. - -We use bags to give semantics to the combinators. --} - -type Bag a = [a] - --- Equality on bags does not care about the order of elements. - -(=~) :: Ord a => Bag a -> Bag a -> Bool -xs =~ ys = sort xs == sort ys - --- A special equality operator to avoid unresolved overloading --- when testing the properties. - -(=~.) :: Bag (Int,String) -> Bag (Int,String) -> Bool -(=~.) = (=~) - --- Here follow the properties: - -prop_Get_Nil :: Bool -prop_Get_Nil = - readP_to_S get [] =~ [] - -prop_Get_Cons :: Char -> [Char] -> Bool -prop_Get_Cons c s = - readP_to_S get (c:s) =~ [(c,s)] - -prop_Look :: String -> Bool -prop_Look s = - readP_to_S look s =~ [(s,s)] - -prop_Fail :: String -> Bool -prop_Fail s = - readP_to_S pfail s =~. [] - -prop_Return :: Int -> String -> Bool -prop_Return x s = - readP_to_S (return x) s =~. [(x,s)] - -{- -prop_Bind p k s = - readP_to_S (p >>= k) s =~. - [ ys'' - | (x,s') <- readP_to_S p s - , ys'' <- readP_to_S (k (x::Int)) s' - ] - -prop_Plus :: ReadP Int Int -> ReadP Int Int -> String -> Bool -prop_Plus p q s = - readP_to_S (p +++ q) s =~. - (readP_to_S p s ++ readP_to_S q s) - -prop_LeftPlus :: ReadP Int Int -> ReadP Int Int -> String -> Bool -prop_LeftPlus p q s = - readP_to_S (p <++ q) s =~. - (readP_to_S p s +<+ readP_to_S q s) - where - [] +<+ ys = ys - xs +<+ _ = xs - -prop_Gather s = - forAll readPWithoutReadS $ \p -> - readP_to_S (gather p) s =~ - [ ((pre,x::Int),s') - | (x,s') <- readP_to_S p s - , let pre = take (length s - length s') s - ] --} - -prop_String_Yes :: String -> [Char] -> Bool -prop_String_Yes this s = - readP_to_S (string this) (this ++ s) =~ - [(this,s)] - -prop_String_Maybe :: String -> String -> Bool -prop_String_Maybe this s = - readP_to_S (string this) s =~ - [(this, drop (length this) s) | this `isPrefixOf` s] - -prop_Munch :: (Char -> Bool) -> String -> Bool -prop_Munch p s = - readP_to_S (munch p) s =~ - [(takeWhile p s, dropWhile p s)] - -prop_Munch1 :: (Char -> Bool) -> String -> Bool -prop_Munch1 p s = - readP_to_S (munch1 p) s =~ - [(res,s') | let (res,s') = (takeWhile p s, dropWhile p s), not (null res)] - -{- -prop_Choice :: [ReadP Int Int] -> String -> Bool -prop_Choice ps s = - readP_to_S (choice ps) s =~. - readP_to_S (foldr (+++) pfail ps) s - -prop_ReadS :: ReadS Int -> String -> Bool -prop_ReadS r s = - readP_to_S (readS_to_P r) s =~. r s --} - -evenChar :: Char -> Bool -evenChar = even . fromEnum diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/Time.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/Time.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/Time.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Compat/Time.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -module UnitTests.Distribution.Compat.Time (tests) where - -import Control.Concurrent (threadDelay) -import System.FilePath - -import Distribution.Simple.Utils (withTempDirectory) -import Distribution.Verbosity - -import Distribution.Compat.Time - -import Test.Tasty -import Test.Tasty.HUnit - -tests :: Int -> [TestTree] -tests mtimeChange = - [ testCase "getModTime has sub-second resolution" $ getModTimeTest mtimeChange - , testCase "getCurTime works as expected" $ getCurTimeTest mtimeChange - ] - -getModTimeTest :: Int -> Assertion -getModTimeTest mtimeChange = - withTempDirectory silent "." "getmodtime-" $ \dir -> do - let fileName = dir "foo" - writeFile fileName "bar" - t0 <- getModTime fileName - threadDelay mtimeChange - writeFile fileName "baz" - t1 <- getModTime fileName - assertBool "expected different file mtimes" (t1 > t0) - - -getCurTimeTest :: Int -> Assertion -getCurTimeTest mtimeChange = - withTempDirectory silent "." "getmodtime-" $ \dir -> do - let fileName = dir "foo" - writeFile fileName "bar" - t0 <- getModTime fileName - threadDelay mtimeChange - t1 <- getCurTime - assertBool("expected file mtime (" ++ show t0 - ++ ") to be earlier than current time (" ++ show t1 ++ ")") - (t0 < t1) - - threadDelay mtimeChange - writeFile fileName "baz" - t2 <- getModTime fileName - assertBool ("expected current time (" ++ show t1 - ++ ") to be earlier than file mtime (" ++ show t2 ++ ")") - (t1 < t2) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Glob.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Glob.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Glob.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Glob.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,164 +0,0 @@ -module UnitTests.Distribution.Simple.Glob - ( tests - ) where - -import Control.Monad -import Data.Foldable (for_) -import Data.Function (on) -import Data.List (sort) -import Data.Maybe (mapMaybe) -import Distribution.Simple.Glob -import qualified Distribution.Verbosity as Verbosity -import Distribution.Version -import System.Directory (createDirectoryIfMissing) -import System.FilePath ((), splitFileName, normalise) -import System.IO.Temp (withSystemTempDirectory) -import Test.Tasty -import Test.Tasty.HUnit - -sampleFileNames :: [FilePath] -sampleFileNames = - [ "a" - , "a.html" - , "b.html" - , "b.html.gz" - , "foo/.blah.html" - , "foo/.html" - , "foo/a" - , "foo/a.html" - , "foo/a.html.gz" - , "foo/a.tex" - , "foo/a.tex.gz" - , "foo/b.html" - , "foo/b.html.gz" - , "foo/x.gz" - , "foo/bar/.html" - , "foo/bar/a.html" - , "foo/bar/a.html.gz" - , "foo/bar/a.tex" - , "foo/bar/a.tex.gz" - , "foo/bar/b.html" - , "foo/bar/b.html.gz" - , "foo/c.html/blah" - , "xyz/foo/a.html" - ] - -makeSampleFiles :: FilePath -> IO () -makeSampleFiles dir = for_ sampleFileNames $ \filename -> do - let (dir', name) = splitFileName filename - createDirectoryIfMissing True (dir dir') - writeFile (dir dir' name) $ "This is " ++ filename - -compatibilityTests :: Version -> [TestTree] -compatibilityTests version = - [ testCase "literal match" $ - testMatches "foo/a" [GlobMatch "foo/a"] - , testCase "literal no match on prefix" $ - testMatches "foo/c.html" [] - , testCase "literal no match on suffix" $ - testMatches "foo/a.html" [GlobMatch "foo/a.html"] - , testCase "literal no prefix" $ - testMatches "a" [GlobMatch "a"] - , testCase "literal multiple prefix" $ - testMatches "foo/bar/a.html" [GlobMatch "foo/bar/a.html"] - , testCase "glob" $ - testMatches "*.html" [GlobMatch "a.html", GlobMatch "b.html"] - , testCase "glob in subdir" $ - testMatches "foo/*.html" [GlobMatch "foo/a.html", GlobMatch "foo/b.html"] - , testCase "glob multiple extensions" $ - testMatches "foo/*.html.gz" [GlobMatch "foo/a.html.gz", GlobMatch "foo/b.html.gz"] - , testCase "glob in deep subdir" $ - testMatches "foo/bar/*.tex" [GlobMatch "foo/bar/a.tex"] - , testCase "star in directory" $ - testFailParse "blah/*/foo" StarInDirectory - , testCase "star plus text in segment" $ - testFailParse "xyz*/foo" StarInDirectory - , testCase "star in filename plus text" $ - testFailParse "foo*.bar" StarInFileName - , testCase "no extension on star" $ - testFailParse "foo/*" NoExtensionOnStar - , testCase "star in extension" $ - testFailParse "foo.*.gz" StarInExtension - ] - where - testMatches = testMatchesVersion version - testFailParse = testFailParseVersion version - --- For efficiency reasons, matchDirFileGlob isn't a simple call to --- getDirectoryContentsRecursive and then a filter with --- fileGlobMatches. So test both that naive approach and the actual --- approach to make sure they are both correct. --- --- TODO: Work out how to construct the sample tree once for all tests, --- rather than once for each test. -testMatchesVersion :: Version -> FilePath -> [GlobResult FilePath] -> Assertion -testMatchesVersion version pat expected = do - globPat <- case parseFileGlob version pat of - Left _ -> assertFailure "Couldn't compile the pattern." - Right globPat -> return globPat - checkPure globPat - checkIO globPat - where - isEqual = (==) `on` (sort . fmap (fmap normalise)) - checkPure globPat = do - let actual = mapMaybe (fileGlobMatches globPat) sampleFileNames - unless (sort expected == sort actual) $ - assertFailure $ "Unexpected result (pure matcher): " ++ show actual - checkIO globPat = - withSystemTempDirectory "globstar-sample" $ \tmpdir -> do - makeSampleFiles tmpdir - actual <- runDirFileGlob Verbosity.normal tmpdir globPat - unless (isEqual actual expected) $ - assertFailure $ "Unexpected result (impure matcher): " ++ show actual - -testFailParseVersion :: Version -> FilePath -> GlobSyntaxError -> Assertion -testFailParseVersion version pat expected = - case parseFileGlob version pat of - Left err -> unless (expected == err) $ - assertFailure $ "Unexpected error: " ++ show err - Right _ -> assertFailure "Unexpected success in parsing." - -globstarTests :: [TestTree] -globstarTests = - [ testCase "fails to parse on early spec version" $ - testFailParseVersion (mkVersion [2,2]) "**/*.html" VersionDoesNotSupportGlobStar - , testCase "out-of-place double star" $ - testFailParse "blah/**/blah/*.foo" StarInDirectory - , testCase "multiple double star" $ - testFailParse "blah/**/**/*.foo" StarInDirectory - , testCase "fails with literal filename" $ - testFailParse "**/a.html" LiteralFileNameGlobStar - , testCase "with glob filename" $ - testMatches "**/*.html" [GlobMatch "a.html", GlobMatch "b.html", GlobMatch "foo/a.html", GlobMatch "foo/b.html", GlobMatch "foo/bar/a.html", GlobMatch "foo/bar/b.html", GlobMatch "xyz/foo/a.html"] - , testCase "glob with prefix" $ - testMatches "foo/**/*.html" [GlobMatch "foo/a.html", GlobMatch "foo/b.html", GlobMatch "foo/bar/a.html", GlobMatch "foo/bar/b.html"] - ] - where - testFailParse = testFailParseVersion (mkVersion [2,4]) - testMatches = testMatchesVersion (mkVersion [2,4]) - -multiDotTests :: [TestTree] -multiDotTests = - [ testCase "pre-2.4 single extension not matching multiple" $ - testMatchesVersion (mkVersion [2,2]) "foo/*.gz" [GlobWarnMultiDot "foo/a.html.gz", GlobWarnMultiDot "foo/a.tex.gz", GlobWarnMultiDot "foo/b.html.gz", GlobMatch "foo/x.gz"] - , testCase "doesn't match literal" $ - testMatches "foo/a.tex" [GlobMatch "foo/a.tex"] - , testCase "works" $ - testMatches "foo/*.gz" [GlobMatch "foo/a.html.gz", GlobMatch "foo/a.tex.gz", GlobMatch "foo/b.html.gz", GlobMatch "foo/x.gz"] - , testCase "works with globstar" $ - testMatches "foo/**/*.gz" [GlobMatch "foo/a.html.gz", GlobMatch "foo/a.tex.gz", GlobMatch "foo/b.html.gz", GlobMatch "foo/x.gz", GlobMatch "foo/bar/a.html.gz", GlobMatch "foo/bar/a.tex.gz", GlobMatch "foo/bar/b.html.gz"] - ] - where - testMatches = testMatchesVersion (mkVersion [2,4]) - -tests :: [TestTree] -tests = - [ testGroup "pre-2.4 compatibility" $ - compatibilityTests (mkVersion [2,2]) - , testGroup "post-2.4 compatibility" $ - compatibilityTests (mkVersion [2,4]) - , testGroup "globstar" globstarTests - , testCase "pre-1.6 rejects globbing" $ - testFailParseVersion (mkVersion [1,4]) "foo/*.bar" VersionDoesNotSupportGlob - , testGroup "multi-dot globbing" multiDotTests - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Program/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Program/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Program/Internal.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Program/Internal.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -module UnitTests.Distribution.Simple.Program.Internal - ( tests - ) where - -import Distribution.Simple.Program.Internal ( stripExtractVersion ) - -import Test.Tasty -import Test.Tasty.HUnit - -v :: String -v = "GNU strip (GNU Binutils; openSUSE 13.2) 2.24.0.20140403-6.1\nCopyright 2013\ - \ Free Software Foundation, Inc.\nThis program is free software; you may\ - \ redistribute it under the terms of\nthe GNU General Public License version 3\ - \ or (at your option) any later version.\nThis program has absolutely no\ - \ warranty.\n" - -v' :: String -v' = "GNU strip 2.17.50.0.6-26.el5 20061020" - -v'' :: String -v'' = "GNU strip (openSUSE-13.2) 2.23.50.0.6-26.el5 20061020" - -v''' :: String -v''' = "GNU strip (GNU (Binutils for) Ubuntu 12.04 ) 2.22" - -tests :: [TestTree] -tests = - [ testCase "Handles parentheses" $ - (stripExtractVersion v) @=? "2.24" - , testCase "Handles dashes and alphabetic characters" $ - (stripExtractVersion v') @=? "2.17" - , testCase "Handles single-word parenthetical expressions" $ - (stripExtractVersion v'') @=? "2.23" - , testCase "Handles nested parentheses" $ - (stripExtractVersion v''') @=? "2.22" - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Utils.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Simple/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,101 +0,0 @@ -module UnitTests.Distribution.Simple.Utils - ( tests - ) where - -import Distribution.Simple.Utils -import Distribution.Verbosity - -import Data.IORef -import System.Directory ( doesDirectoryExist, doesFileExist - , getTemporaryDirectory - , removeDirectoryRecursive, removeFile ) -import System.IO (hClose, localeEncoding, hPutStrLn) -import System.IO.Error -import qualified Control.Exception as Exception - -import Test.Tasty -import Test.Tasty.HUnit - -withTempFileTest :: Assertion -withTempFileTest = do - fileName <- newIORef "" - tempDir <- getTemporaryDirectory - withTempFile tempDir ".foo" $ \fileName' _handle -> do - writeIORef fileName fileName' - fileExists <- readIORef fileName >>= doesFileExist - assertBool "Temporary file not deleted by 'withTempFile'!" (not fileExists) - -withTempFileRemovedTest :: Assertion -withTempFileRemovedTest = do - tempDir <- getTemporaryDirectory - withTempFile tempDir ".foo" $ \fileName handle -> do - hClose handle - removeFile fileName - -withTempDirTest :: Assertion -withTempDirTest = do - dirName <- newIORef "" - tempDir <- getTemporaryDirectory - withTempDirectory normal tempDir "foo" $ \dirName' -> do - writeIORef dirName dirName' - dirExists <- readIORef dirName >>= doesDirectoryExist - assertBool "Temporary directory not deleted by 'withTempDirectory'!" - (not dirExists) - -withTempDirRemovedTest :: Assertion -withTempDirRemovedTest = do - tempDir <- getTemporaryDirectory - withTempDirectory normal tempDir "foo" $ \dirPath -> do - removeDirectoryRecursive dirPath - -rawSystemStdInOutTextDecodingTest :: Assertion -rawSystemStdInOutTextDecodingTest - -- We can only get this exception when the locale encoding is UTF-8 - -- so skip the test if it's not. - | show localeEncoding /= "UTF-8" = return () - | otherwise = do - tempDir <- getTemporaryDirectory - res <- withTempFile tempDir ".hs" $ \filenameHs handleHs -> do - withTempFile tempDir ".exe" $ \filenameExe handleExe -> do - -- Small program printing not utf8 - hPutStrLn handleHs "import Data.ByteString" - hPutStrLn handleHs "main = Data.ByteString.putStr (Data.ByteString.pack [32, 32, 255])" - hClose handleHs - - -- We need to close exe handle as well, otherwise compilation (writing) may fail - hClose handleExe - - -- Compile - (IODataText resOutput, resErrors, resExitCode) <- rawSystemStdInOut normal - "ghc" ["-o", filenameExe, filenameHs] - Nothing Nothing Nothing - IODataModeText - print (resOutput, resErrors, resExitCode) - - -- Execute - Exception.try $ do - rawSystemStdInOut normal - filenameExe [] - Nothing Nothing Nothing - IODataModeText -- not binary mode output, ie utf8 text mode so try to decode - case res of - Right (IODataText x1, x2, x3) -> assertFailure $ "expected IO decoding exception: " ++ show (x1,x2,x3) - Right (IODataBinary _, _, _) -> assertFailure "internal error" - Left err | isDoesNotExistError err -> Exception.throwIO err -- no ghc! - | otherwise -> return () - - - -tests :: [TestTree] -tests = - [ testCase "withTempFile works as expected" $ - withTempFileTest - , testCase "withTempFile can handle removed files" $ - withTempFileRemovedTest - , testCase "withTempDirectory works as expected" $ - withTempDirTest - , testCase "withTempDirectory can handle removed directories" $ - withTempDirRemovedTest - , testCase "rawSystemStdInOut reports text decoding errors" $ - rawSystemStdInOutTextDecodingTest - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/SPDX.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/SPDX.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/SPDX.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/SPDX.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,158 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-deprecations #-} -module UnitTests.Distribution.SPDX (spdxTests) where - -import Distribution.Compat.Prelude.Internal -import Prelude () - -import Distribution.SPDX -import Distribution.Parsec.Class (eitherParsec) -import Distribution.Pretty (prettyShow) - -import Test.Tasty -import Test.Tasty.QuickCheck - -spdxTests :: [TestTree] -spdxTests = - [ testProperty "LicenseId roundtrip" licenseIdRoundtrip - , testProperty "LicenseExceptionId roundtrip" licenseExceptionIdRoundtrip - , testProperty "LicenseRef roundtrip" licenseRefRoundtrip - , testProperty "SimpleLicenseExpression roundtrip" simpleLicenseExpressionRoundtrip - , testProperty "LicenseExpression roundtrip" licenseExpressionRoundtrip - , testProperty "isAcceptableLicense l = True" shouldAcceptProp - , testProperty "isAcceptableLicense l = False" shouldRejectProp - ] - -licenseIdRoundtrip :: LicenseId -> Property -licenseIdRoundtrip x = - counterexample (prettyShow x) $ - Right x === eitherParsec (prettyShow x) - -licenseExceptionIdRoundtrip :: LicenseExceptionId -> Property -licenseExceptionIdRoundtrip x = - counterexample (prettyShow x) $ - Right x === eitherParsec (prettyShow x) - -licenseRefRoundtrip :: LicenseRef -> Property -licenseRefRoundtrip x = - counterexample (prettyShow x) $ - Right x === eitherParsec (prettyShow x) - -simpleLicenseExpressionRoundtrip :: SimpleLicenseExpression -> Property -simpleLicenseExpressionRoundtrip x = - counterexample (prettyShow x) $ - Right x === eitherParsec (prettyShow x) - -licenseExpressionRoundtrip :: LicenseExpression -> Property -licenseExpressionRoundtrip x = - counterexample (prettyShow x) $ - Right (reassoc x) === eitherParsec (prettyShow x) - --- Parser produces right biased trees of and/or expressions -reassoc :: LicenseExpression -> LicenseExpression -reassoc (EOr a b) = case reassoc a of - EOr x y -> EOr x (reassoc (EOr y b)) - x -> EOr x (reassoc b) -reassoc (EAnd a b) = case reassoc a of - EAnd x y -> EAnd x (reassoc (EAnd y b)) - x -> EAnd x (reassoc b) -reassoc l = l - -------------------------------------------------------------------------------- --- isAcceptableLicence -------------------------------------------------------------------------------- - -shouldAccept :: [License] -shouldAccept = map License - [ simpleLicenseExpression GPL_2_0_only - , simpleLicenseExpression GPL_2_0_or_later - , simpleLicenseExpression BSD_2_Clause - , simpleLicenseExpression BSD_3_Clause - , simpleLicenseExpression MIT - , simpleLicenseExpression ISC - , simpleLicenseExpression MPL_2_0 - , simpleLicenseExpression Apache_2_0 - , simpleLicenseExpression CC0_1_0 - , simpleLicenseExpression BSD_4_Clause `EOr` simpleLicenseExpression MIT - ] - -shouldReject :: [License] -shouldReject = map License - [ simpleLicenseExpression BSD_4_Clause - , simpleLicenseExpression BSD_4_Clause `EAnd` simpleLicenseExpression MIT - ] - --- | A sketch of what Hackage could accept --- --- * NONE is rejected --- --- * "or later" syntax (+ postfix) is rejected --- --- * "WITH exc" exceptions are rejected --- --- * There should be a way to interpert license as (conjunction of) --- OSI-accepted licenses or CC0 --- -isAcceptableLicense :: License -> Bool -isAcceptableLicense NONE = False -isAcceptableLicense (License expr) = goExpr expr - where - goExpr (EAnd a b) = goExpr a && goExpr b - goExpr (EOr a b) = goExpr a || goExpr b - goExpr (ELicense _ (Just _)) = False -- Don't allow exceptions - goExpr (ELicense s Nothing) = goSimple s - - goSimple (ELicenseRef _) = False -- don't allow referenced licenses - goSimple (ELicenseIdPlus _) = False -- don't allow + licenses (use GPL-3.0-or-later e.g.) - goSimple (ELicenseId CC0_1_0) = True -- CC0 isn't OSI approved, but we allow it as "PublicDomain", this is eg. PublicDomain in http://hackage.haskell.org/package/string-qq-0.0.2/src/LICENSE - goSimple (ELicenseId lid) = licenseIsOsiApproved lid -- allow only OSI approved licenses. - -shouldAcceptProp :: Property -shouldAcceptProp = conjoin $ - map (\l -> counterexample (prettyShow l) (isAcceptableLicense l)) shouldAccept - -shouldRejectProp :: Property -shouldRejectProp = conjoin $ - map (\l -> counterexample (prettyShow l) (not $ isAcceptableLicense l)) shouldReject - -------------------------------------------------------------------------------- --- Instances -------------------------------------------------------------------------------- - -instance Arbitrary LicenseId where - arbitrary = elements $ licenseIdList LicenseListVersion_3_2 - -instance Arbitrary LicenseExceptionId where - arbitrary = elements $ licenseExceptionIdList LicenseListVersion_3_2 - -instance Arbitrary LicenseRef where - arbitrary = mkLicenseRef' <$> ids' <*> ids - where - ids = listOf1 $ elements $ ['a'..'z'] ++ ['A' .. 'Z'] ++ ['0'..'9'] ++ "_-" - ids' = oneof [ pure Nothing, Just <$> ids ] - -instance Arbitrary SimpleLicenseExpression where - arbitrary = oneof - [ ELicenseId <$> arbitrary - , ELicenseIdPlus <$> arbitrary - , ELicenseRef <$> arbitrary - ] - -instance Arbitrary LicenseExpression where - arbitrary = sized arb - where - arb n - | n <= 0 = ELicense <$> arbitrary <*> pure Nothing - | otherwise = oneof - [ ELicense <$> arbitrary <*> arbitrary - , EAnd <$> arbA <*> arbB - , EOr <$> arbA <*> arbB - ] - where - m = n `div` 2 - arbA = arb m - arbB = arb (n - m) - - shrink (EAnd a b) = a : b : map (uncurry EAnd) (shrink (a, b)) - shrink (EOr a b) = a : b : map (uncurry EOr) (shrink (a, b)) - shrink _ = [] - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/System.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/System.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/System.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/System.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-orphans #-} -module UnitTests.Distribution.System - ( tests - ) where - -import Control.Monad (liftM2) -import Distribution.Text (Text(..), display, simpleParse) -import Distribution.System -import Test.Tasty -import Test.Tasty.QuickCheck - -textRoundtrip :: (Show a, Eq a, Text a) => a -> Property -textRoundtrip x = simpleParse (display x) === Just x - -tests :: [TestTree] -tests = - [ testProperty "Text OS round trip" (textRoundtrip :: OS -> Property) - , testProperty "Text Arch round trip" (textRoundtrip :: Arch -> Property) - , testProperty "Text Platform round trip" (textRoundtrip :: Platform -> Property) - ] - -instance Arbitrary OS where - arbitrary = elements knownOSs - -instance Arbitrary Arch where - arbitrary = elements knownArches - -instance Arbitrary Platform where - arbitrary = liftM2 Platform arbitrary arbitrary diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Types/GenericPackageDescription.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Types/GenericPackageDescription.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Types/GenericPackageDescription.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Types/GenericPackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,37 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-deprecations #-} -- for importing "Distribution.Compat.Prelude.Internal" - -module UnitTests.Distribution.Types.GenericPackageDescription where - -import Prelude () -import Distribution.Compat.Prelude.Internal -import Distribution.Types.GenericPackageDescription - -import Test.Tasty -import Test.Tasty.HUnit -import qualified Control.Exception as C - -tests :: [TestTree] -tests = - [ testCase "GenericPackageDescription deepseq" gpdDeepseq - ] - -gpdFields :: [(String, GenericPackageDescription -> GenericPackageDescription)] -gpdFields = - [ ("packageDescription", \gpd -> gpd { packageDescription = undefined }) - , ("genPackageFlags", \gpd -> gpd { genPackageFlags = undefined }) - , ("condLibrary", \gpd -> gpd { condLibrary = undefined }) - , ("condSubLibraries", \gpd -> gpd { condSubLibraries = undefined }) - , ("condForeignLibs", \gpd -> gpd { condForeignLibs = undefined }) - , ("condExecutables", \gpd -> gpd { condExecutables = undefined }) - , ("condTestSuites", \gpd -> gpd { condTestSuites = undefined }) - , ("condBenchmarks", \gpd -> gpd { condBenchmarks = undefined }) - ] - -gpdDeepseq :: Assertion -gpdDeepseq = sequence_ - [ throwsUndefined msg (f emptyGenericPackageDescription) | (msg, f) <- gpdFields ] - -throwsUndefined :: NFData a => String -> a -> Assertion -throwsUndefined field a = - C.catch (C.evaluate (rnf a) >> assertFailure ("Deepseq failed to evaluate " ++ show field)) - (\(C.ErrorCall _) -> return ()) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/Generic.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/Generic.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/Generic.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/Generic.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,71 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - --- to suppress WARNING in "Distribution.Compat.Prelude.Internal" -{-# OPTIONS_GHC -fno-warn-deprecations #-} - -module UnitTests.Distribution.Utils.Generic ( tests ) where - -import Prelude () -import Distribution.Compat.Prelude.Internal - -import Distribution.Utils.Generic - -import qualified Data.ByteString.Char8 as BS -import qualified Data.Text as T -import qualified Data.Text.Encoding as T - -import Test.Tasty -import Test.Tasty.HUnit -import Test.Tasty.QuickCheck - -tests :: [TestTree] -tests = - [ -- fromUTF8BS / toUTF8BS - testCase "fromUTF8BS mempty" testFromUTF8BSEmpty - , testCase "toUTF8BS mempty" testToUTF8BSEmpty - , testCase "toUTF8BS [U+D800..U+DFFF]" testToUTF8BSSurr - , testCase "toUTF8BS [U+0000..U+7F]" testToUTF8BSAscii - , testCase "toUTF8BS [U+0000..U+10FFFF]" testToUTF8BSText - , testCase "fromUTF8BS.toUTF8BS [U+0000..U+10FFFF]" testToFromUTF8BS - - , testProperty "fromUTF8BS.toUTF8BS == id" prop_toFromUTF8BS - , testProperty "toUTF8BS == encodeUtf8" prop_toUTF8BS - - , testProperty "Nothing = validateUtf8 (encodeUtf8 x)" prop_validateUtf8 - ] - -testFromUTF8BSEmpty :: Assertion -testFromUTF8BSEmpty = mempty @=? fromUTF8BS mempty - -testToUTF8BSEmpty :: Assertion -testToUTF8BSEmpty = mempty @=? toUTF8BS mempty - -testToUTF8BSSurr :: Assertion -testToUTF8BSSurr = BS.concat (replicate 2048 u_fffd) @=? toUTF8BS surrogates - where - surrogates = ['\xD800'..'\xDFFF'] - u_fffd = "\xEF\xBF\xBD" - -testToUTF8BSText :: Assertion -testToUTF8BSText = T.encodeUtf8 (T.pack txt) @=? toUTF8BS txt - where - txt = ['\x00'..'\x10FFFF'] - -testToUTF8BSAscii :: Assertion -testToUTF8BSAscii = BS.pack txt @=? toUTF8BS txt - where - txt = ['\x00'..'\x7F'] - -testToFromUTF8BS :: Assertion -testToFromUTF8BS = txt @=? (fromUTF8BS . toUTF8BS) txt - where - txt = ['\x0000'..'\xD7FF'] ++ ['\xE000'..'\x10FFFF'] - -prop_toFromUTF8BS :: [Char] -> Property -prop_toFromUTF8BS txt = txt === (fromUTF8BS . toUTF8BS) txt - -prop_toUTF8BS :: [Char] -> Property -prop_toUTF8BS txt = T.encodeUtf8 (T.pack txt) === toUTF8BS txt - -prop_validateUtf8 :: [Char] -> Property -prop_validateUtf8 txt = Nothing === validateUTF8 (toUTF8BS txt) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/NubList.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/NubList.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/NubList.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/NubList.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,76 +0,0 @@ --- to suppress WARNING in "Distribution.Compat.Prelude.Internal" -{-# OPTIONS_GHC -fno-warn-deprecations #-} -module UnitTests.Distribution.Utils.NubList - ( tests - ) where - -import Prelude () -import Distribution.Compat.Prelude.Internal - -import Distribution.Utils.NubList -import Test.Tasty -import Test.Tasty.HUnit -import Test.Tasty.QuickCheck - -tests :: [TestTree] -tests = - [ testCase "NubList retains ordering example" testOrdering - , testCase "NubList removes duplicates example" testDeDupe - , testProperty "NubList retains ordering" prop_Ordering - , testProperty "NubList removes duplicates" prop_DeDupe - , testProperty "fromNubList . toNubList = nub" prop_Nub - , testProperty "Monoid NubList Identity" prop_Identity - , testProperty "Monoid NubList Associativity" prop_Associativity - -- NubListR - , testProperty "NubListR removes duplicates from the right" prop_DeDupeR - ] - -someIntList :: [Int] --- This list must not have duplicate entries. -someIntList = [ 1, 3, 4, 2, 0, 7, 6, 5, 9, -1 ] - -testOrdering :: Assertion -testOrdering = - assertBool "Maintains element ordering:" $ - fromNubList (toNubList someIntList) == someIntList - -testDeDupe :: Assertion -testDeDupe = - assertBool "De-duplicates a list:" $ - fromNubList (toNubList (someIntList ++ someIntList)) == someIntList - --- --------------------------------------------------------------------------- --- QuickCheck properties for NubList - -prop_Ordering :: [Int] -> Property -prop_Ordering xs = - mempty <> toNubList xs' === toNubList xs' <> mempty - where - xs' = nub xs - -prop_DeDupe :: [Int] -> Property -prop_DeDupe xs = - fromNubList (toNubList (xs' ++ xs)) === xs' -- Note, we append primeless xs - where - xs' = nub xs - -prop_DeDupeR :: [Int] -> Property -prop_DeDupeR xs = - fromNubListR (toNubListR (xs ++ xs')) === xs' -- Note, we prepend primeless xs - where - xs' = nub xs - -prop_Nub :: [Int] -> Property -prop_Nub xs = rhs === lhs - where - rhs = fromNubList (toNubList xs) - lhs = nub xs - -prop_Identity :: [Int] -> Bool -prop_Identity xs = - mempty `mappend` toNubList xs == toNubList xs `mappend` mempty - -prop_Associativity :: [Int] -> [Int] -> [Int] -> Bool -prop_Associativity xs ys zs = - (toNubList xs `mappend` toNubList ys) `mappend` toNubList zs - == toNubList xs `mappend` (toNubList ys `mappend` toNubList zs) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/ShortText.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/ShortText.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/ShortText.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Utils/ShortText.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,33 +0,0 @@ -module UnitTests.Distribution.Utils.ShortText - ( tests - ) where - -import Data.Monoid as Mon -import Test.Tasty -import Test.Tasty.QuickCheck - -import Distribution.Compat.Binary (encode, decode) - -import Distribution.Utils.ShortText - -prop_ShortTextOrd :: String -> String -> Bool -prop_ShortTextOrd a b = compare a b == compare (toShortText a) (toShortText b) - -prop_ShortTextMonoid :: String -> String -> Bool -prop_ShortTextMonoid a b = Mon.mappend a b == fromShortText (mappend (toShortText a) (toShortText b)) - -prop_ShortTextId :: String -> Bool -prop_ShortTextId a = (fromShortText . toShortText) a == a - -prop_ShortTextBinaryId :: String -> Bool -prop_ShortTextBinaryId a = (decode . encode) a' == a' - where - a' = toShortText a - -tests :: [TestTree] -tests = - [ testProperty "ShortText Id" prop_ShortTextId - , testProperty "ShortText Ord" prop_ShortTextOrd - , testProperty "ShortText Monoid" prop_ShortTextMonoid - , testProperty "ShortText BinaryId" prop_ShortTextBinaryId - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Version.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests/Distribution/Version.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,782 +0,0 @@ -{-# LANGUAGE StandaloneDeriving, DeriveDataTypeable #-} -{-# OPTIONS_GHC -fno-warn-orphans - -fno-warn-incomplete-patterns - -fno-warn-deprecations - -fno-warn-unused-binds #-} --FIXME -module UnitTests.Distribution.Version (versionTests) where - -import Distribution.Compat.Prelude.Internal -import Prelude () - -import Distribution.Version -import Distribution.Text -import Distribution.Parsec.Class (simpleParsec) - -import Data.Typeable (typeOf) -import Math.NumberTheory.Logarithms (intLog2) -import Text.PrettyPrint as Disp (text, render, parens, hcat - ,punctuate, int, char, (<+>)) -import Test.Tasty -import Test.Tasty.QuickCheck -import qualified Test.Laws as Laws - -import Test.QuickCheck.Utils - -import Data.Maybe (fromJust) -import Data.Function (on) -import Text.Read (readMaybe) - -versionTests :: [TestTree] -versionTests = - -- test 'Version' type - [ tp "versionNumbers . mkVersion = id @[NonNegative Int]" prop_VersionId - , tp "versionNumbers . mkVersion = id @Base.Version" prop_VersionId2 - , tp "(==) = (==) `on` versionNumbers" prop_VersionEq - , tp "(==) = (==) `on` mkVersion" prop_VersionEq2 - , tp "compare = compare `on` versionNumbers" prop_VersionOrd - , tp "compare = compare `on` mkVersion" prop_VersionOrd2 - - , tp "readMaybe . show = Just" prop_ShowRead - , tp "read example" prop_ShowRead_example - - , tp "normaliseVersionRange involutive" prop_normalise_inv - , tp "parse . display involutive" prop_parse_disp_inv - , tp "parsec . display involutive" prop_parsec_disp_inv - - , tp "simpleParsec . display = Just" prop_parse_disp - ] - - ++ - zipWith - (\n (rep, p) -> testProperty ("Range Property " ++ show n ++ " (" ++ show rep ++ ")") p) - [1::Int ..] - -- properties to validate the test framework - [ typProperty prop_nonNull - , typProperty prop_gen_intervals1 - , typProperty prop_gen_intervals2 - --, typProperty prop_equivalentVersionRange --FIXME: runs out of test cases - , typProperty prop_intermediateVersion - - , typProperty prop_anyVersion - , typProperty prop_noVersion - , typProperty prop_thisVersion - , typProperty prop_notThisVersion - , typProperty prop_laterVersion - , typProperty prop_orLaterVersion - , typProperty prop_earlierVersion - , typProperty prop_orEarlierVersion - , typProperty prop_unionVersionRanges - , typProperty prop_intersectVersionRanges - , typProperty prop_differenceVersionRanges - , typProperty prop_invertVersionRange - , typProperty prop_withinVersion - , typProperty prop_foldVersionRange - , typProperty prop_foldVersionRange' - - -- the semantic query functions - --, typProperty prop_isAnyVersion1 --FIXME: runs out of test cases - --, typProperty prop_isAnyVersion2 --FIXME: runs out of test cases - --, typProperty prop_isNoVersion --FIXME: runs out of test cases - --, typProperty prop_isSpecificVersion1 --FIXME: runs out of test cases - --, typProperty prop_isSpecificVersion2 --FIXME: runs out of test cases - , typProperty prop_simplifyVersionRange1 - , typProperty prop_simplifyVersionRange1' - --, typProperty prop_simplifyVersionRange2 --FIXME: runs out of test cases - --, typProperty prop_simplifyVersionRange2' --FIXME: runs out of test cases - --, typProperty prop_simplifyVersionRange2'' --FIXME: actually wrong - - -- converting between version ranges and version intervals - , typProperty prop_to_intervals - --, typProperty prop_to_intervals_canonical --FIXME: runs out of test cases - --, typProperty prop_to_intervals_canonical' --FIXME: runs out of test cases - , typProperty prop_from_intervals - , typProperty prop_to_from_intervals - , typProperty prop_from_to_intervals - , typProperty prop_from_to_intervals' - - -- union and intersection of version intervals - , typProperty prop_unionVersionIntervals - , typProperty prop_unionVersionIntervals_idempotent - , typProperty prop_unionVersionIntervals_commutative - , typProperty prop_unionVersionIntervals_associative - , typProperty prop_intersectVersionIntervals - , typProperty prop_intersectVersionIntervals_idempotent - , typProperty prop_intersectVersionIntervals_commutative - , typProperty prop_intersectVersionIntervals_associative - , typProperty prop_union_intersect_distributive - , typProperty prop_intersect_union_distributive - - -- inversion of version intervals - , typProperty prop_invertVersionIntervals - , typProperty prop_invertVersionIntervalsTwice - ] - where - tp :: Testable p => String -> p -> TestTree - tp = testProperty - - typProperty p = (typeOf p, property p) - - --- parseTests :: [TestTree] --- parseTests = --- zipWith (\n p -> testProperty ("Parse Property " ++ show n) p) [1::Int ..] --- -- parsing and pretty printing --- [ -- property prop_parse_disp1 --FIXME: actually wrong - --- -- These are also wrong, see --- -- https://github.com/haskell/cabal/issues/3037#issuecomment-177671011 - --- -- property prop_parse_disp2 --- -- , property prop_parse_disp3 --- -- , property prop_parse_disp4 --- -- , property prop_parse_disp5 --- ] - -instance Arbitrary Version where - arbitrary = do - branch <- smallListOf1 $ - frequency [(3, return 0) - ,(3, return 1) - ,(2, return 2) - ,(2, return 3) - ,(1, return 0xfffd) - ,(1, return 0xfffe) -- max fitting into packed W64 - ,(1, return 0xffff) - ,(1, return 0x10000)] - return (mkVersion branch) - where - smallListOf1 = adjustSize (\n -> min 6 (n `div` 3)) . listOf1 - - shrink ver = [ mkVersion ns | ns <- shrink (versionNumbers ver) - , not (null ns) ] - -newtype VersionArb = VersionArb [Int] - deriving (Eq,Ord,Show) - --- | 'Version' instance as used by QC 2.9 -instance Arbitrary VersionArb where - arbitrary = sized $ \n -> - do k <- choose (0, log2 n) - xs <- vectorOf (k+1) arbitrarySizedNatural - return (VersionArb xs) - where - log2 :: Int -> Int - log2 n | n <= 1 = 0 - | otherwise = 1 + log2 (n `div` 2) - - shrink (VersionArb xs) = - [ VersionArb xs' - | xs' <- shrink xs - , length xs' > 0 - , all (>=0) xs' - ] - -instance Arbitrary VersionRange where - arbitrary = sized verRangeExp - where - verRangeExp n = frequency $ - [ (2, return anyVersion) - , (1, liftM thisVersion arbitrary) - , (1, liftM laterVersion arbitrary) - , (1, liftM orLaterVersion arbitrary) - , (1, liftM orLaterVersion' arbitrary) - , (1, liftM earlierVersion arbitrary) - , (1, liftM orEarlierVersion arbitrary) - , (1, liftM orEarlierVersion' arbitrary) - , (1, liftM withinVersion arbitrary) - , (1, liftM majorBoundVersion arbitrary) - , (2, liftM VersionRangeParens arbitrary) - ] ++ if n == 0 then [] else - [ (2, liftM2 unionVersionRanges verRangeExp2 verRangeExp2) - , (2, liftM2 intersectVersionRanges verRangeExp2 verRangeExp2) - ] - where - verRangeExp2 = verRangeExp (n `div` 2) - - orLaterVersion' v = - unionVersionRanges (LaterVersion v) (ThisVersion v) - orEarlierVersion' v = - unionVersionRanges (EarlierVersion v) (ThisVersion v) - - shrink AnyVersion = [] - shrink (ThisVersion v) = map ThisVersion (shrink v) - shrink (LaterVersion v) = map LaterVersion (shrink v) - shrink (EarlierVersion v) = map EarlierVersion (shrink v) - shrink (OrLaterVersion v) = LaterVersion v : map OrLaterVersion (shrink v) - shrink (OrEarlierVersion v) = EarlierVersion v : map OrEarlierVersion (shrink v) - shrink (WildcardVersion v) = map WildcardVersion ( shrink v) - shrink (MajorBoundVersion v) = map MajorBoundVersion (shrink v) - shrink (VersionRangeParens vr) = vr : map VersionRangeParens (shrink vr) - shrink (UnionVersionRanges a b) = a : b : map (uncurry UnionVersionRanges) (shrink (a, b)) - shrink (IntersectVersionRanges a b) = a : b : map (uncurry IntersectVersionRanges) (shrink (a, b)) - ---------------------- --- Version properties --- - -prop_VersionId :: [NonNegative Int] -> Bool -prop_VersionId lst0 = - (versionNumbers . mkVersion) lst == lst - where - lst = map getNonNegative lst0 - -prop_VersionId2 :: VersionArb -> Bool -prop_VersionId2 (VersionArb lst) = - (versionNumbers . mkVersion) lst == lst - -prop_VersionEq :: Version -> Version -> Bool -prop_VersionEq v1 v2 = (==) v1 v2 == ((==) `on` versionNumbers) v1 v2 - -prop_VersionEq2 :: VersionArb -> VersionArb -> Bool -prop_VersionEq2 (VersionArb v1) (VersionArb v2) = - (==) v1 v2 == ((==) `on` mkVersion) v1 v2 - -prop_VersionOrd :: Version -> Version -> Bool -prop_VersionOrd v1 v2 = - compare v1 v2 == (compare `on` versionNumbers) v1 v2 - -prop_VersionOrd2 :: VersionArb -> VersionArb -> Bool -prop_VersionOrd2 (VersionArb v1) (VersionArb v2) = - (==) v1 v2 == ((==) `on` mkVersion) v1 v2 - -prop_ShowRead :: Version -> Property -prop_ShowRead v = Just v === readMaybe (show v) - -prop_ShowRead_example :: Bool -prop_ShowRead_example = show (mkVersion [1,2,3]) == "mkVersion [1,2,3]" - ---------------------------- --- VersionRange properties --- - -prop_normalise_inv :: VersionRange -> Property -prop_normalise_inv vr = - normaliseVersionRange vr === normaliseVersionRange (normaliseVersionRange vr) - -prop_nonNull :: Version -> Bool -prop_nonNull = (/= nullVersion) - -prop_anyVersion :: Version -> Bool -prop_anyVersion v' = - withinRange v' anyVersion - -prop_noVersion :: Version -> Bool -prop_noVersion v' = - withinRange v' noVersion == False - -prop_thisVersion :: Version -> Version -> Bool -prop_thisVersion v v' = - withinRange v' (thisVersion v) - == (v' == v) - -prop_notThisVersion :: Version -> Version -> Bool -prop_notThisVersion v v' = - withinRange v' (notThisVersion v) - == (v' /= v) - -prop_laterVersion :: Version -> Version -> Bool -prop_laterVersion v v' = - withinRange v' (laterVersion v) - == (v' > v) - -prop_orLaterVersion :: Version -> Version -> Bool -prop_orLaterVersion v v' = - withinRange v' (orLaterVersion v) - == (v' >= v) - -prop_earlierVersion :: Version -> Version -> Bool -prop_earlierVersion v v' = - withinRange v' (earlierVersion v) - == (v' < v) - -prop_orEarlierVersion :: Version -> Version -> Bool -prop_orEarlierVersion v v' = - withinRange v' (orEarlierVersion v) - == (v' <= v) - -prop_unionVersionRanges :: VersionRange -> VersionRange -> Version -> Bool -prop_unionVersionRanges vr1 vr2 v' = - withinRange v' (unionVersionRanges vr1 vr2) - == (withinRange v' vr1 || withinRange v' vr2) - -prop_intersectVersionRanges :: VersionRange -> VersionRange -> Version -> Bool -prop_intersectVersionRanges vr1 vr2 v' = - withinRange v' (intersectVersionRanges vr1 vr2) - == (withinRange v' vr1 && withinRange v' vr2) - -prop_differenceVersionRanges :: VersionRange -> VersionRange -> Version -> Bool -prop_differenceVersionRanges vr1 vr2 v' = - withinRange v' (differenceVersionRanges vr1 vr2) - == (withinRange v' vr1 && not (withinRange v' vr2)) - -prop_invertVersionRange :: VersionRange -> Version -> Bool -prop_invertVersionRange vr v' = - withinRange v' (invertVersionRange vr) - == not (withinRange v' vr) - -prop_withinVersion :: Version -> Version -> Bool -prop_withinVersion v v' = - withinRange v' (withinVersion v) - == (v' >= v && v' < upper v) - where - upper = alterVersion $ \numbers -> init numbers ++ [last numbers + 1] - -prop_foldVersionRange :: VersionRange -> Property -prop_foldVersionRange range = - expandVR range - === foldVersionRange anyVersion thisVersion - laterVersion earlierVersion - unionVersionRanges intersectVersionRanges - range - where - expandVR (WildcardVersion v) = - intersectVersionRanges (expandVR (orLaterVersion v)) (earlierVersion (wildcardUpperBound v)) - expandVR (MajorBoundVersion v) = - intersectVersionRanges (expandVR (orLaterVersion v)) (earlierVersion (majorUpperBound v)) - expandVR (OrEarlierVersion v) = - unionVersionRanges (thisVersion v) (earlierVersion v) - expandVR (OrLaterVersion v) = - unionVersionRanges (thisVersion v) (laterVersion v) - expandVR (UnionVersionRanges v1 v2) = - UnionVersionRanges (expandVR v1) (expandVR v2) - expandVR (IntersectVersionRanges v1 v2) = - IntersectVersionRanges (expandVR v1) (expandVR v2) - expandVR (VersionRangeParens v) = expandVR v - expandVR v = v - - upper = alterVersion $ \numbers -> init numbers ++ [last numbers + 1] - -prop_foldVersionRange' :: VersionRange -> Property -prop_foldVersionRange' range = - normaliseVersionRange srange - === foldVersionRange' anyVersion thisVersion - laterVersion earlierVersion - orLaterVersion orEarlierVersion - (\v _ -> withinVersion v) - (\v _ -> majorBoundVersion v) - unionVersionRanges intersectVersionRanges id - srange - where - srange = stripParensVersionRange range - -prop_isAnyVersion1 :: VersionRange -> Version -> Property -prop_isAnyVersion1 range version = - isAnyVersion range ==> withinRange version range - -prop_isAnyVersion2 :: VersionRange -> Property -prop_isAnyVersion2 range = - isAnyVersion range ==> - foldVersionRange True (\_ -> False) (\_ -> False) (\_ -> False) - (\_ _ -> False) (\_ _ -> False) - (simplifyVersionRange range) - -prop_isNoVersion :: VersionRange -> Version -> Property -prop_isNoVersion range version = - isNoVersion range ==> not (withinRange version range) - -prop_isSpecificVersion1 :: VersionRange -> NonEmptyList Version -> Property -prop_isSpecificVersion1 range (NonEmpty versions) = - isJust version && not (null versions') ==> - allEqual (fromJust version : versions') - where - version = isSpecificVersion range - versions' = filter (`withinRange` range) versions - allEqual xs = and (zipWith (==) xs (tail xs)) - -prop_isSpecificVersion2 :: VersionRange -> Property -prop_isSpecificVersion2 range = - isJust version ==> - foldVersionRange Nothing Just (\_ -> Nothing) (\_ -> Nothing) - (\_ _ -> Nothing) (\_ _ -> Nothing) - (simplifyVersionRange range) - == version - - where - version = isSpecificVersion range - --- | 'simplifyVersionRange' is a semantic identity on 'VersionRange'. --- -prop_simplifyVersionRange1 :: VersionRange -> Version -> Bool -prop_simplifyVersionRange1 range version = - withinRange version range == withinRange version (simplifyVersionRange range) - -prop_simplifyVersionRange1' :: VersionRange -> Bool -prop_simplifyVersionRange1' range = - range `equivalentVersionRange` (simplifyVersionRange range) - --- | 'simplifyVersionRange' produces a canonical form for ranges with --- equivalent semantics. --- -prop_simplifyVersionRange2 :: VersionRange -> VersionRange -> Version -> Property -prop_simplifyVersionRange2 r r' v = - r /= r' && simplifyVersionRange r == simplifyVersionRange r' ==> - withinRange v r == withinRange v r' - -prop_simplifyVersionRange2' :: VersionRange -> VersionRange -> Property -prop_simplifyVersionRange2' r r' = - r /= r' && simplifyVersionRange r == simplifyVersionRange r' ==> - r `equivalentVersionRange` r' - ---FIXME: see equivalentVersionRange for details -prop_simplifyVersionRange2'' :: VersionRange -> VersionRange -> Property -prop_simplifyVersionRange2'' r r' = - r /= r' && r `equivalentVersionRange` r' ==> - simplifyVersionRange r == simplifyVersionRange r' - || isNoVersion r - || isNoVersion r' - --------------------- --- VersionIntervals --- - --- | Generating VersionIntervals --- --- This is a tad tricky as VersionIntervals is an abstract type, so we first --- make a local type for generating the internal representation. Then we check --- that this lets us construct valid 'VersionIntervals'. --- - -instance Arbitrary VersionIntervals where - arbitrary = fmap mkVersionIntervals' arbitrary - where - mkVersionIntervals' :: [(Version, Bound)] -> VersionIntervals - mkVersionIntervals' = mkVersionIntervals . go version0 - where - go :: Version -> [(Version, Bound)] -> [VersionInterval] - go _ [] = [] - go v [(lv, lb)] = - [(LowerBound (addVersion lv v) lb, NoUpperBound)] - go v ((lv, lb) : (uv, ub) : rest) = - (LowerBound lv' lb, UpperBound uv' ub) : go uv' rest - where - lv' = addVersion v lv - uv' = addVersion lv' uv - - addVersion :: Version -> Version -> Version - addVersion xs ys = mkVersion $ z (versionNumbers xs) (versionNumbers ys) - where - z [] ys' = ys' - z xs' [] = xs' - z (x : xs') (y : ys') = x + y : z xs' ys' - -instance Arbitrary Bound where - arbitrary = elements [ExclusiveBound, InclusiveBound] - --- | Check that our VersionIntervals' arbitrary instance generates intervals --- that satisfies the invariant. --- -prop_gen_intervals1 :: VersionIntervals -> Property -prop_gen_intervals1 i - = label ("length i ≈ 2 ^ " ++ show metric ++ " - 1") - $ xs === ys - where - metric = intLog2 (length xs + 1) - - xs = versionIntervals i - ys = versionIntervals (mkVersionIntervals xs) --- | Check that constructing our intervals type and converting it to a --- 'VersionRange' and then into the true intervals type gives us back --- the exact same sequence of intervals. This tells us that our arbitrary --- instance for 'VersionIntervals'' is ok. --- -prop_gen_intervals2 :: VersionIntervals -> Property -prop_gen_intervals2 intervals = - toVersionIntervals (fromVersionIntervals intervals) === intervals - --- | Check that 'VersionIntervals' models 'VersionRange' via --- 'toVersionIntervals'. --- -prop_to_intervals :: VersionRange -> Version -> Bool -prop_to_intervals range version = - withinRange version range == withinIntervals version intervals - where - intervals = toVersionIntervals range - --- | Check that semantic equality on 'VersionRange's is the same as converting --- to 'VersionIntervals' and doing syntactic equality. --- -prop_to_intervals_canonical :: VersionRange -> VersionRange -> Property -prop_to_intervals_canonical r r' = - r /= r' && r `equivalentVersionRange` r' ==> - toVersionIntervals r == toVersionIntervals r' - -prop_to_intervals_canonical' :: VersionRange -> VersionRange -> Property -prop_to_intervals_canonical' r r' = - r /= r' && toVersionIntervals r == toVersionIntervals r' ==> - r `equivalentVersionRange` r' - --- | Check that 'VersionIntervals' models 'VersionRange' via --- 'fromVersionIntervals'. --- -prop_from_intervals :: VersionIntervals -> Version -> Bool -prop_from_intervals intervals version = - withinRange version range == withinIntervals version intervals - where - range = fromVersionIntervals intervals - --- | @'toVersionIntervals' . 'fromVersionIntervals'@ is an exact identity on --- 'VersionIntervals'. --- -prop_to_from_intervals :: VersionIntervals -> Bool -prop_to_from_intervals intervals = - toVersionIntervals (fromVersionIntervals intervals) == intervals - --- | @'fromVersionIntervals' . 'toVersionIntervals'@ is a semantic identity on --- 'VersionRange', though not necessarily a syntactic identity. --- -prop_from_to_intervals :: VersionRange -> Bool -prop_from_to_intervals range = - range' `equivalentVersionRange` range - where - range' = fromVersionIntervals (toVersionIntervals range) - --- | Equivalent of 'prop_from_to_intervals' --- -prop_from_to_intervals' :: VersionRange -> Version -> Bool -prop_from_to_intervals' range version = - withinRange version range' == withinRange version range - where - range' = fromVersionIntervals (toVersionIntervals range) - --- | The semantics of 'unionVersionIntervals' is (||). --- -prop_unionVersionIntervals :: VersionIntervals -> VersionIntervals - -> Version -> Bool -prop_unionVersionIntervals is1 is2 v = - withinIntervals v (unionVersionIntervals is1 is2) - == (withinIntervals v is1 || withinIntervals v is2) - --- | 'unionVersionIntervals' is idempotent --- -prop_unionVersionIntervals_idempotent :: VersionIntervals -> Bool -prop_unionVersionIntervals_idempotent = - Laws.idempotent_binary unionVersionIntervals - --- | 'unionVersionIntervals' is commutative --- -prop_unionVersionIntervals_commutative :: VersionIntervals - -> VersionIntervals -> Bool -prop_unionVersionIntervals_commutative = - Laws.commutative unionVersionIntervals - --- | 'unionVersionIntervals' is associative --- -prop_unionVersionIntervals_associative :: VersionIntervals - -> VersionIntervals - -> VersionIntervals -> Bool -prop_unionVersionIntervals_associative = - Laws.associative unionVersionIntervals - --- | The semantics of 'intersectVersionIntervals' is (&&). --- -prop_intersectVersionIntervals :: VersionIntervals -> VersionIntervals - -> Version -> Bool -prop_intersectVersionIntervals is1 is2 v = - withinIntervals v (intersectVersionIntervals is1 is2) - == (withinIntervals v is1 && withinIntervals v is2) - --- | 'intersectVersionIntervals' is idempotent --- -prop_intersectVersionIntervals_idempotent :: VersionIntervals -> Bool -prop_intersectVersionIntervals_idempotent = - Laws.idempotent_binary intersectVersionIntervals - --- | 'intersectVersionIntervals' is commutative --- -prop_intersectVersionIntervals_commutative :: VersionIntervals - -> VersionIntervals -> Bool -prop_intersectVersionIntervals_commutative = - Laws.commutative intersectVersionIntervals - --- | 'intersectVersionIntervals' is associative --- -prop_intersectVersionIntervals_associative :: VersionIntervals - -> VersionIntervals - -> VersionIntervals -> Bool -prop_intersectVersionIntervals_associative = - Laws.associative intersectVersionIntervals - --- | 'unionVersionIntervals' distributes over 'intersectVersionIntervals' --- -prop_union_intersect_distributive :: Property -prop_union_intersect_distributive = - Laws.distributive_left unionVersionIntervals intersectVersionIntervals - .&. Laws.distributive_right unionVersionIntervals intersectVersionIntervals - --- | 'intersectVersionIntervals' distributes over 'unionVersionIntervals' --- -prop_intersect_union_distributive :: Property -prop_intersect_union_distributive = - Laws.distributive_left intersectVersionIntervals unionVersionIntervals - .&. Laws.distributive_right intersectVersionIntervals unionVersionIntervals - --- | The semantics of 'invertVersionIntervals' is 'not'. --- -prop_invertVersionIntervals :: VersionIntervals - -> Version -> Bool -prop_invertVersionIntervals vi v = - withinIntervals v (invertVersionIntervals vi) - == not (withinIntervals v vi) - --- | Double application of 'invertVersionIntervals' is the identity function -prop_invertVersionIntervalsTwice :: VersionIntervals -> Bool -prop_invertVersionIntervalsTwice vi = - invertVersionIntervals (invertVersionIntervals vi) == vi - - - --------------------------------- --- equivalentVersionRange helper - -prop_equivalentVersionRange :: VersionRange -> VersionRange - -> Version -> Property -prop_equivalentVersionRange range range' version = - equivalentVersionRange range range' && range /= range' ==> - withinRange version range == withinRange version range' - ---FIXME: this is wrong. consider version ranges "<=1" and "<1.0" --- this algorithm cannot distinguish them because there is no version --- that is included by one that is excluded by the other. --- Alternatively we must reconsider the semantics of '<' and '<=' --- in version ranges / version intervals. Perhaps the canonical --- representation should use just < v and interpret "<= v" as "< v.0". -equivalentVersionRange :: VersionRange -> VersionRange -> Bool -equivalentVersionRange vr1 vr2 = - let allVersionsUsed = nub (sort (versionsUsed vr1 ++ versionsUsed vr2)) - minPoint = mkVersion [0] - maxPoint | null allVersionsUsed = minPoint - | otherwise = alterVersion (++[1]) (maximum allVersionsUsed) - probeVersions = minPoint : maxPoint - : intermediateVersions allVersionsUsed - - in all (\v -> withinRange v vr1 == withinRange v vr2) probeVersions - - where - versionsUsed = foldVersionRange [] (\x->[x]) (\x->[x]) (\x->[x]) (++) (++) - intermediateVersions (v1:v2:vs) = v1 : intermediateVersion v1 v2 - : intermediateVersions (v2:vs) - intermediateVersions vs = vs - -intermediateVersion :: Version -> Version -> Version -intermediateVersion v1 v2 | v1 >= v2 = error "intermediateVersion: v1 >= v2" -intermediateVersion v1 v2 = - mkVersion (intermediateList (versionNumbers v1) (versionNumbers v2)) - where - intermediateList :: [Int] -> [Int] -> [Int] - intermediateList [] (_:_) = [0] - intermediateList (x:xs) (y:ys) - | x < y = x : xs ++ [0] - | otherwise = x : intermediateList xs ys - -prop_intermediateVersion :: Version -> Version -> Property -prop_intermediateVersion v1 v2 = - (v1 /= v2) && not (adjacentVersions v1 v2) ==> - if v1 < v2 - then let v = intermediateVersion v1 v2 - in (v1 < v && v < v2) - else let v = intermediateVersion v2 v1 - in v1 > v && v > v2 - -adjacentVersions :: Version -> Version -> Bool -adjacentVersions ver1 ver2 = v1 ++ [0] == v2 || v2 ++ [0] == v1 - where - v1 = versionNumbers ver1 - v2 = versionNumbers ver2 - --------------------------------- --- Parsing and pretty printing --- - -prop_parse_disp_inv :: VersionRange -> Property -prop_parse_disp_inv vr = - parseDisp vr === (parseDisp vr >>= parseDisp) - where - parseDisp = simpleParse . display - -prop_parsec_disp_inv :: VersionRange -> Property -prop_parsec_disp_inv vr = - parseDisp vr === (parseDisp vr >>= parseDisp) - where - parseDisp = simpleParsec . display - -prop_parse_disp :: VersionRange -> Property -prop_parse_disp vr = counterexample (show (display vr')) $ - fmap s (simpleParse (display vr')) === Just vr' - .&&. - fmap s (simpleParsec (display vr')) === Just vr' - where - -- we have to strip parens, because arbitrary 'VersionRange' may have - -- too little parens constructors. - s = stripParensVersionRange - vr' = s vr - -prop_parse_disp1 :: VersionRange -> Bool -prop_parse_disp1 vr = - fmap stripParens (simpleParse (display vr)) == Just (normaliseVersionRange vr) - where - stripParens :: VersionRange -> VersionRange - stripParens (VersionRangeParens v) = stripParens v - stripParens (UnionVersionRanges v1 v2) = - UnionVersionRanges (stripParens v1) (stripParens v2) - stripParens (IntersectVersionRanges v1 v2) = - IntersectVersionRanges (stripParens v1) (stripParens v2) - stripParens v = v - -prop_parse_disp2 :: VersionRange -> Property -prop_parse_disp2 vr = - let b = fmap (display :: VersionRange -> String) (simpleParse (display vr)) - a = Just (display vr) - in - counterexample ("Expected: " ++ show a) $ - counterexample ("But got: " ++ show b) $ - b == a - -prop_parse_disp3 :: VersionRange -> Property -prop_parse_disp3 vr = - let a = Just (display vr) - b = fmap displayRaw (simpleParse (display vr)) - in - counterexample ("Expected: " ++ show a) $ - counterexample ("But got: " ++ show b) $ - b == a - -prop_parse_disp4 :: VersionRange -> Property -prop_parse_disp4 vr = - let a = Just vr - b = (simpleParse (display vr)) - in - counterexample ("Expected: " ++ show a) $ - counterexample ("But got: " ++ show b) $ - b == a - -prop_parse_disp5 :: VersionRange -> Property -prop_parse_disp5 vr = - let a = Just vr - b = simpleParse (displayRaw vr) - in - counterexample ("Expected: " ++ show a) $ - counterexample ("But got: " ++ show b) $ - b == a - -displayRaw :: VersionRange -> String -displayRaw = - Disp.render - . foldVersionRange' -- precedence: - -- All the same as the usual pretty printer, except for the parens - ( Disp.text "-any") - (\v -> Disp.text "==" <<>> disp v) - (\v -> Disp.char '>' <<>> disp v) - (\v -> Disp.char '<' <<>> disp v) - (\v -> Disp.text ">=" <<>> disp v) - (\v -> Disp.text "<=" <<>> disp v) - (\v _ -> Disp.text "==" <<>> dispWild v) - (\v _ -> Disp.text "^>=" <<>> disp v) - (\r1 r2 -> r1 <+> Disp.text "||" <+> r2) - (\r1 r2 -> r1 <+> Disp.text "&&" <+> r2) - (\r -> Disp.parens r) -- parens - - where - dispWild v = - Disp.hcat (Disp.punctuate (Disp.char '.') - (map Disp.int (versionNumbers v))) - <<>> Disp.text ".*" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.0.1/tests/UnitTests.hs 2018-10-17 15:59:03.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.0.1/tests/UnitTests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,96 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -module Main - ( main - ) where - -import Test.Tasty -import Test.Tasty.Options - -import Data.Proxy -import Data.Typeable - -import Distribution.Simple.Utils -import Distribution.Verbosity -import Distribution.Compat.Time - -import qualified UnitTests.Distribution.Compat.CreatePipe -import qualified UnitTests.Distribution.Compat.ReadP -import qualified UnitTests.Distribution.Compat.Time -import qualified UnitTests.Distribution.Compat.Graph -import qualified UnitTests.Distribution.Simple.Glob -import qualified UnitTests.Distribution.Simple.Program.Internal -import qualified UnitTests.Distribution.Simple.Utils -import qualified UnitTests.Distribution.System -import qualified UnitTests.Distribution.Utils.Generic -import qualified UnitTests.Distribution.Utils.NubList -import qualified UnitTests.Distribution.Utils.ShortText -import qualified UnitTests.Distribution.Version (versionTests) -import qualified UnitTests.Distribution.SPDX (spdxTests) -import qualified UnitTests.Distribution.Types.GenericPackageDescription - -tests :: Int -> TestTree -tests mtimeChangeCalibrated = - askOption $ \(OptionMtimeChangeDelay mtimeChangeProvided) -> - let mtimeChange = if mtimeChangeProvided /= 0 - then mtimeChangeProvided - else mtimeChangeCalibrated - in - testGroup "Unit Tests" $ - [ testGroup "Distribution.Compat.CreatePipe" - UnitTests.Distribution.Compat.CreatePipe.tests - , testGroup "Distribution.Compat.ReadP" - UnitTests.Distribution.Compat.ReadP.tests - , testGroup "Distribution.Compat.Time" - (UnitTests.Distribution.Compat.Time.tests mtimeChange) - , testGroup "Distribution.Compat.Graph" - UnitTests.Distribution.Compat.Graph.tests - , testGroup "Distribution.Simple.Glob" - UnitTests.Distribution.Simple.Glob.tests - , testGroup "Distribution.Simple.Program.Internal" - UnitTests.Distribution.Simple.Program.Internal.tests - , testGroup "Distribution.Simple.Utils" - UnitTests.Distribution.Simple.Utils.tests - , testGroup "Distribution.Utils.Generic" - UnitTests.Distribution.Utils.Generic.tests - , testGroup "Distribution.Utils.NubList" - UnitTests.Distribution.Utils.NubList.tests - , testGroup "Distribution.Utils.ShortText" - UnitTests.Distribution.Utils.ShortText.tests - , testGroup "Distribution.System" - UnitTests.Distribution.System.tests - , testGroup "Distribution.Types.GenericPackageDescription" - UnitTests.Distribution.Types.GenericPackageDescription.tests - , testGroup "Distribution.Version" - UnitTests.Distribution.Version.versionTests - , testGroup "Distribution.SPDX" - UnitTests.Distribution.SPDX.spdxTests - ] - -extraOptions :: [OptionDescription] -extraOptions = - [ Option (Proxy :: Proxy OptionMtimeChangeDelay) - ] - -newtype OptionMtimeChangeDelay = OptionMtimeChangeDelay Int - deriving Typeable - -instance IsOption OptionMtimeChangeDelay where - defaultValue = OptionMtimeChangeDelay 0 - parseValue = fmap OptionMtimeChangeDelay . safeRead - optionName = return "mtime-change-delay" - optionHelp = return $ "How long to wait before attempting to detect" - ++ "file modification, in microseconds" - -main :: IO () -main = do - (mtimeChange, mtimeChange') <- calibrateMtimeChangeDelay - let toMillis :: Int -> Double - toMillis x = fromIntegral x / 1000.0 - notice normal $ "File modification time resolution calibration completed, " - ++ "maximum delay observed: " - ++ (show . toMillis $ mtimeChange ) ++ " ms. " - ++ "Will be using delay of " ++ (show . toMillis $ mtimeChange') - ++ " for test runs." - defaultMainWithIngredients - (includingOptions extraOptions : defaultIngredients) - (tests mtimeChange') diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Cabal.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Cabal.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Cabal.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Cabal.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,619 @@ +name: Cabal +version: 2.4.1.0 +copyright: 2003-2018, Cabal Development Team (see AUTHORS file) +license: BSD3 +license-file: LICENSE +author: Cabal Development Team +maintainer: cabal-devel@haskell.org +homepage: http://www.haskell.org/cabal/ +bug-reports: https://github.com/haskell/cabal/issues +synopsis: A framework for packaging Haskell software +description: + The Haskell Common Architecture for Building Applications and + Libraries: a framework defining a common interface for authors to more + easily build their Haskell applications in a portable way. + . + The Haskell Cabal is part of a larger infrastructure for distributing, + organizing, and cataloging Haskell libraries and tools. +category: Distribution +cabal-version: >=1.10 +build-type: Simple +-- If we use a new Cabal feature, this needs to be changed to Custom so +-- we can bootstrap. + +extra-source-files: + README.md tests/README.md ChangeLog.md + doc/bugs-and-stability.rst doc/concepts-and-development.rst + doc/conf.py doc/config-and-install.rst doc/developing-packages.rst + doc/images/Cabal-dark.png doc/index.rst doc/installing-packages.rst + doc/intro.rst doc/misc.rst doc/nix-local-build-overview.rst + doc/nix-local-build.rst doc/file-format-changelog.rst doc/README.md + doc/references.inc + + -- Generated with 'make gen-extra-source-files' + -- Do NOT edit this section manually; instead, run the script. + -- BEGIN gen-extra-source-files + tests/ParserTests/errors/common1.cabal + tests/ParserTests/errors/common1.errors + tests/ParserTests/errors/common2.cabal + tests/ParserTests/errors/common2.errors + tests/ParserTests/errors/common3.cabal + tests/ParserTests/errors/common3.errors + tests/ParserTests/errors/forward-compat.cabal + tests/ParserTests/errors/forward-compat.errors + tests/ParserTests/errors/forward-compat2.cabal + tests/ParserTests/errors/forward-compat2.errors + tests/ParserTests/errors/forward-compat3.cabal + tests/ParserTests/errors/forward-compat3.errors + tests/ParserTests/errors/issue-5055-2.cabal + tests/ParserTests/errors/issue-5055-2.errors + tests/ParserTests/errors/issue-5055.cabal + tests/ParserTests/errors/issue-5055.errors + tests/ParserTests/errors/leading-comma.cabal + tests/ParserTests/errors/leading-comma.errors + tests/ParserTests/errors/noVersion.cabal + tests/ParserTests/errors/noVersion.errors + tests/ParserTests/errors/noVersion2.cabal + tests/ParserTests/errors/noVersion2.errors + tests/ParserTests/errors/range-ge-wild.cabal + tests/ParserTests/errors/range-ge-wild.errors + tests/ParserTests/errors/spdx-1.cabal + tests/ParserTests/errors/spdx-1.errors + tests/ParserTests/errors/spdx-2.cabal + tests/ParserTests/errors/spdx-2.errors + tests/ParserTests/errors/spdx-3.cabal + tests/ParserTests/errors/spdx-3.errors + tests/ParserTests/ipi/Includes2.cabal + tests/ParserTests/ipi/Includes2.expr + tests/ParserTests/ipi/Includes2.format + tests/ParserTests/ipi/internal-preprocessor-test.cabal + tests/ParserTests/ipi/internal-preprocessor-test.expr + tests/ParserTests/ipi/internal-preprocessor-test.format + tests/ParserTests/ipi/issue-2276-ghc-9885.cabal + tests/ParserTests/ipi/issue-2276-ghc-9885.expr + tests/ParserTests/ipi/issue-2276-ghc-9885.format + tests/ParserTests/ipi/transformers.cabal + tests/ParserTests/ipi/transformers.expr + tests/ParserTests/ipi/transformers.format + tests/ParserTests/regressions/MiniAgda.cabal + tests/ParserTests/regressions/MiniAgda.check + tests/ParserTests/regressions/Octree-0.5.cabal + tests/ParserTests/regressions/Octree-0.5.expr + tests/ParserTests/regressions/Octree-0.5.format + tests/ParserTests/regressions/bad-glob-syntax.cabal + tests/ParserTests/regressions/bad-glob-syntax.check + tests/ParserTests/regressions/cc-options-with-optimization.cabal + tests/ParserTests/regressions/cc-options-with-optimization.check + tests/ParserTests/regressions/common.cabal + tests/ParserTests/regressions/common.expr + tests/ParserTests/regressions/common.format + tests/ParserTests/regressions/common2.cabal + tests/ParserTests/regressions/common2.expr + tests/ParserTests/regressions/common2.format + tests/ParserTests/regressions/cxx-options-with-optimization.cabal + tests/ParserTests/regressions/cxx-options-with-optimization.check + tests/ParserTests/regressions/elif.cabal + tests/ParserTests/regressions/elif.expr + tests/ParserTests/regressions/elif.format + tests/ParserTests/regressions/elif2.cabal + tests/ParserTests/regressions/elif2.expr + tests/ParserTests/regressions/elif2.format + tests/ParserTests/regressions/encoding-0.8.cabal + tests/ParserTests/regressions/encoding-0.8.expr + tests/ParserTests/regressions/encoding-0.8.format + tests/ParserTests/regressions/extensions-paths-5054.cabal + tests/ParserTests/regressions/extensions-paths-5054.check + tests/ParserTests/regressions/generics-sop.cabal + tests/ParserTests/regressions/generics-sop.expr + tests/ParserTests/regressions/generics-sop.format + tests/ParserTests/regressions/ghc-option-j.cabal + tests/ParserTests/regressions/ghc-option-j.check + tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal + tests/ParserTests/regressions/haddock-api-2.18.1-check.check + tests/ParserTests/regressions/issue-5055.cabal + tests/ParserTests/regressions/issue-5055.expr + tests/ParserTests/regressions/issue-5055.format + tests/ParserTests/regressions/issue-774.cabal + tests/ParserTests/regressions/issue-774.check + tests/ParserTests/regressions/issue-774.expr + tests/ParserTests/regressions/issue-774.format + tests/ParserTests/regressions/leading-comma.cabal + tests/ParserTests/regressions/leading-comma.expr + tests/ParserTests/regressions/leading-comma.format + tests/ParserTests/regressions/noVersion.cabal + tests/ParserTests/regressions/noVersion.expr + tests/ParserTests/regressions/noVersion.format + tests/ParserTests/regressions/nothing-unicode.cabal + tests/ParserTests/regressions/nothing-unicode.check + tests/ParserTests/regressions/nothing-unicode.expr + tests/ParserTests/regressions/nothing-unicode.format + tests/ParserTests/regressions/pre-1.6-glob.cabal + tests/ParserTests/regressions/pre-1.6-glob.check + tests/ParserTests/regressions/pre-2.4-globstar.cabal + tests/ParserTests/regressions/pre-2.4-globstar.check + tests/ParserTests/regressions/shake.cabal + tests/ParserTests/regressions/shake.expr + tests/ParserTests/regressions/shake.format + tests/ParserTests/regressions/spdx-1.cabal + tests/ParserTests/regressions/spdx-1.expr + tests/ParserTests/regressions/spdx-1.format + tests/ParserTests/regressions/spdx-2.cabal + tests/ParserTests/regressions/spdx-2.expr + tests/ParserTests/regressions/spdx-2.format + tests/ParserTests/regressions/spdx-3.cabal + tests/ParserTests/regressions/spdx-3.expr + tests/ParserTests/regressions/spdx-3.format + tests/ParserTests/regressions/th-lift-instances.cabal + tests/ParserTests/regressions/th-lift-instances.expr + tests/ParserTests/regressions/th-lift-instances.format + tests/ParserTests/regressions/wl-pprint-indef.cabal + tests/ParserTests/regressions/wl-pprint-indef.expr + tests/ParserTests/regressions/wl-pprint-indef.format + tests/ParserTests/warnings/bom.cabal + tests/ParserTests/warnings/bool.cabal + tests/ParserTests/warnings/deprecatedfield.cabal + tests/ParserTests/warnings/doubledash.cabal + tests/ParserTests/warnings/extratestmodule.cabal + tests/ParserTests/warnings/gluedop.cabal + tests/ParserTests/warnings/multiplesingular.cabal + tests/ParserTests/warnings/nbsp.cabal + tests/ParserTests/warnings/newsyntax.cabal + tests/ParserTests/warnings/oldsyntax.cabal + tests/ParserTests/warnings/subsection.cabal + tests/ParserTests/warnings/tab.cabal + tests/ParserTests/warnings/trailingfield.cabal + tests/ParserTests/warnings/unknownfield.cabal + tests/ParserTests/warnings/unknownsection.cabal + tests/ParserTests/warnings/utf8.cabal + tests/ParserTests/warnings/versiontag.cabal + tests/hackage/check.sh + tests/hackage/download.sh + tests/hackage/unpack.sh + tests/misc/ghc-supported-languages.hs + -- END gen-extra-source-files + +source-repository head + type: git + location: https://github.com/haskell/cabal/ + subdir: Cabal + +flag bundled-binary-generic + default: False + +library + build-depends: + array >= 0.4.0.1 && < 0.6, + base >= 4.6 && < 5, + bytestring >= 0.10.0.0 && < 0.11, + containers >= 0.5.0.0 && < 0.7, + deepseq >= 1.3.0.1 && < 1.5, + directory >= 1.2 && < 1.4, + filepath >= 1.3.0.1 && < 1.5, + pretty >= 1.1.1 && < 1.2, + process >= 1.1.0.2 && < 1.7, + time >= 1.4.0.1 && < 1.10 + + if flag(bundled-binary-generic) + build-depends: binary >= 0.5.1.1 && < 0.7 + else + build-depends: binary >= 0.7 && < 0.9 + + if os(windows) + build-depends: Win32 >= 2.3.0.0 && < 2.9 + else + build-depends: unix >= 2.6.0.0 && < 2.8 + + ghc-options: -Wall -fno-ignore-asserts -fwarn-tabs + if impl(ghc >= 8.0) + ghc-options: -Wcompat -Wnoncanonical-monad-instances + -Wnoncanonical-monadfail-instances + + exposed-modules: + Distribution.Backpack + Distribution.Backpack.Configure + Distribution.Backpack.ComponentsGraph + Distribution.Backpack.ConfiguredComponent + Distribution.Backpack.DescribeUnitId + Distribution.Backpack.FullUnitId + Distribution.Backpack.LinkedComponent + Distribution.Backpack.ModSubst + Distribution.Backpack.ModuleShape + Distribution.Backpack.PreModuleShape + Distribution.CabalSpecVersion + Distribution.Utils.IOData + Distribution.Utils.LogProgress + Distribution.Utils.MapAccum + Distribution.Compat.CreatePipe + Distribution.Compat.Directory + Distribution.Compat.Environment + Distribution.Compat.Exception + Distribution.Compat.Graph + Distribution.Compat.Internal.TempFile + Distribution.Compat.Newtype + Distribution.Compat.Prelude.Internal + Distribution.Compat.ReadP + Distribution.Compat.Semigroup + Distribution.Compat.Stack + Distribution.Compat.Time + Distribution.Compat.DList + Distribution.Compiler + Distribution.InstalledPackageInfo + Distribution.Types.AbiDependency + Distribution.Types.ExposedModule + Distribution.Types.InstalledPackageInfo + Distribution.Types.InstalledPackageInfo.FieldGrammar + Distribution.License + Distribution.Make + Distribution.ModuleName + Distribution.Package + Distribution.PackageDescription + Distribution.PackageDescription.Check + Distribution.PackageDescription.Configuration + Distribution.PackageDescription.PrettyPrint + Distribution.PackageDescription.Utils + Distribution.ParseUtils + Distribution.PrettyUtils + Distribution.ReadE + Distribution.Simple + Distribution.Simple.Bench + Distribution.Simple.Build + Distribution.Simple.Build.Macros + Distribution.Simple.Build.PathsModule + Distribution.Simple.BuildPaths + Distribution.Simple.BuildTarget + Distribution.Simple.BuildToolDepends + Distribution.Simple.CCompiler + Distribution.Simple.Command + Distribution.Simple.Compiler + Distribution.Simple.Configure + Distribution.Simple.Flag + Distribution.Simple.GHC + Distribution.Simple.GHCJS + Distribution.Simple.Haddock + Distribution.Simple.Doctest + Distribution.Simple.Glob + Distribution.Simple.HaskellSuite + Distribution.Simple.Hpc + Distribution.Simple.Install + Distribution.Simple.InstallDirs + Distribution.Simple.LocalBuildInfo + Distribution.Simple.PackageIndex + Distribution.Simple.PreProcess + Distribution.Simple.PreProcess.Unlit + Distribution.Simple.Program + Distribution.Simple.Program.Ar + Distribution.Simple.Program.Builtin + Distribution.Simple.Program.Db + Distribution.Simple.Program.Find + Distribution.Simple.Program.GHC + Distribution.Simple.Program.HcPkg + Distribution.Simple.Program.Hpc + Distribution.Simple.Program.Internal + Distribution.Simple.Program.Ld + Distribution.Simple.Program.ResponseFile + Distribution.Simple.Program.Run + Distribution.Simple.Program.Script + Distribution.Simple.Program.Strip + Distribution.Simple.Program.Types + Distribution.Simple.Register + Distribution.Simple.Setup + Distribution.Simple.SrcDist + Distribution.Simple.Test + Distribution.Simple.Test.ExeV10 + Distribution.Simple.Test.LibV09 + Distribution.Simple.Test.Log + Distribution.Simple.UHC + Distribution.Simple.UserHooks + Distribution.Simple.Utils + Distribution.SPDX + Distribution.SPDX.License + Distribution.SPDX.LicenseId + Distribution.SPDX.LicenseExceptionId + Distribution.SPDX.LicenseExpression + Distribution.SPDX.LicenseListVersion + Distribution.SPDX.LicenseReference + Distribution.System + Distribution.TestSuite + Distribution.Text + Distribution.Pretty + Distribution.Types.AbiHash + Distribution.Types.AnnotatedId + Distribution.Types.Benchmark + Distribution.Types.BenchmarkInterface + Distribution.Types.BenchmarkType + Distribution.Types.BuildInfo + Distribution.Types.BuildType + Distribution.Types.ComponentInclude + Distribution.Types.Dependency + Distribution.Types.ExeDependency + Distribution.Types.LegacyExeDependency + Distribution.Types.PkgconfigDependency + Distribution.Types.DependencyMap + Distribution.Types.ComponentId + Distribution.Types.MungedPackageId + Distribution.Types.PackageId + Distribution.Types.UnitId + Distribution.Types.Executable + Distribution.Types.ExecutableScope + Distribution.Types.Library + Distribution.Types.ForeignLib + Distribution.Types.ForeignLibType + Distribution.Types.ForeignLibOption + Distribution.Types.Module + Distribution.Types.ModuleReexport + Distribution.Types.ModuleRenaming + Distribution.Types.ComponentName + Distribution.Types.MungedPackageName + Distribution.Types.PackageName + Distribution.Types.PkgconfigName + Distribution.Types.UnqualComponentName + Distribution.Types.IncludeRenaming + Distribution.Types.Mixin + Distribution.Types.SetupBuildInfo + Distribution.Types.TestSuite + Distribution.Types.TestSuiteInterface + Distribution.Types.TestType + Distribution.Types.GenericPackageDescription + Distribution.Types.Condition + Distribution.Types.CondTree + Distribution.Types.HookedBuildInfo + Distribution.Types.PackageDescription + Distribution.Types.SourceRepo + Distribution.Types.Component + Distribution.Types.ComponentLocalBuildInfo + Distribution.Types.LocalBuildInfo + Distribution.Types.ComponentRequestedSpec + Distribution.Types.TargetInfo + Distribution.Types.Version + Distribution.Types.VersionRange + Distribution.Types.VersionInterval + Distribution.Utils.Generic + Distribution.Utils.NubList + Distribution.Utils.ShortText + Distribution.Utils.Progress + Distribution.Verbosity + Distribution.Version + Language.Haskell.Extension + Distribution.Compat.Binary + + -- Parsec parser-related modules + build-depends: + -- transformers-0.4.0.0 doesn't have record syntax e.g. for Identity + -- See also https://github.com/ekmett/transformers-compat/issues/35 + transformers (>= 0.3 && < 0.4) || (>=0.4.1.0 && <0.6), + mtl >= 2.1 && < 2.3, + text >= 1.2.3.0 && < 1.3, + parsec >= 3.1.13.0 && < 3.2 + exposed-modules: + Distribution.Compat.Parsing + Distribution.Compat.CharParsing + Distribution.FieldGrammar + Distribution.FieldGrammar.Class + Distribution.FieldGrammar.FieldDescrs + Distribution.FieldGrammar.Parsec + Distribution.FieldGrammar.Pretty + Distribution.PackageDescription.FieldGrammar + Distribution.PackageDescription.Parsec + Distribution.PackageDescription.Quirks + Distribution.Parsec.Class + Distribution.Parsec.Common + Distribution.Parsec.ConfVar + Distribution.Parsec.Field + Distribution.Parsec.FieldLineStream + Distribution.Parsec.Lexer + Distribution.Parsec.LexerMonad + Distribution.Parsec.Newtypes + Distribution.Parsec.ParseResult + Distribution.Parsec.Parser + + -- Lens functionality + exposed-modules: + Distribution.Compat.Lens + Distribution.Types.Lens + Distribution.Types.Benchmark.Lens + Distribution.Types.BuildInfo.Lens + Distribution.Types.Executable.Lens + Distribution.Types.ForeignLib.Lens + Distribution.Types.GenericPackageDescription.Lens + Distribution.Types.InstalledPackageInfo.Lens + Distribution.Types.Library.Lens + Distribution.Types.PackageDescription.Lens + Distribution.Types.PackageId.Lens + Distribution.Types.SetupBuildInfo.Lens + Distribution.Types.SourceRepo.Lens + Distribution.Types.TestSuite.Lens + + other-modules: + Distribution.Backpack.PreExistingComponent + Distribution.Backpack.ReadyComponent + Distribution.Backpack.MixLink + Distribution.Backpack.ModuleScope + Distribution.Backpack.UnifyM + Distribution.Backpack.Id + Distribution.Utils.UnionFind + Distribution.Utils.Base62 + Distribution.Compat.CopyFile + Distribution.Compat.GetShortPathName + Distribution.Compat.MonadFail + Distribution.Compat.Prelude + Distribution.Compat.SnocList + Distribution.GetOpt + Distribution.Lex + Distribution.Utils.String + Distribution.Simple.GHC.EnvironmentParser + Distribution.Simple.GHC.Internal + Distribution.Simple.GHC.ImplInfo + Paths_Cabal + + if flag(bundled-binary-generic) + other-modules: + Distribution.Compat.Binary.Class + Distribution.Compat.Binary.Generic + + default-language: Haskell2010 + other-extensions: + BangPatterns + CPP + DefaultSignatures + DeriveDataTypeable + DeriveFoldable + DeriveFunctor + DeriveGeneric + DeriveTraversable + ExistentialQuantification + FlexibleContexts + FlexibleInstances + GeneralizedNewtypeDeriving + ImplicitParams + KindSignatures + NondecreasingIndentation + OverloadedStrings + RankNTypes + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + Trustworthy + TypeFamilies + TypeOperators + TypeSynonymInstances + UndecidableInstances + + if impl(ghc >= 7.11) + other-extensions: PatternSynonyms + +-- Small, fast running tests. +test-suite unit-tests + type: exitcode-stdio-1.0 + hs-source-dirs: tests + other-modules: + Test.Laws + Test.QuickCheck.Utils + UnitTests.Distribution.Compat.CreatePipe + UnitTests.Distribution.Compat.ReadP + UnitTests.Distribution.Compat.Time + UnitTests.Distribution.Compat.Graph + UnitTests.Distribution.Simple.Glob + UnitTests.Distribution.Simple.Program.Internal + UnitTests.Distribution.Simple.Utils + UnitTests.Distribution.SPDX + UnitTests.Distribution.System + UnitTests.Distribution.Types.GenericPackageDescription + UnitTests.Distribution.Utils.Generic + UnitTests.Distribution.Utils.NubList + UnitTests.Distribution.Utils.ShortText + UnitTests.Distribution.Version + main-is: UnitTests.hs + build-depends: + array, + base, + bytestring, + containers, + directory, + filepath, + integer-logarithms >= 1.0.2 && <1.1, + tasty >= 1.1.0.3 && < 1.2, + tasty-hunit, + tasty-quickcheck, + tagged, + temporary, + text, + pretty, + QuickCheck >= 2.11.3 && < 2.12, + Cabal + ghc-options: -Wall + default-language: Haskell2010 + +test-suite parser-tests + type: exitcode-stdio-1.0 + hs-source-dirs: tests + main-is: ParserTests.hs + build-depends: + base, + base-compat >=0.10.4 && <0.11, + bytestring, + filepath, + tasty >= 1.1.0.3 && < 1.2, + tasty-hunit, + tasty-quickcheck, + tasty-golden >=2.3.1.1 && <2.4, + Diff >=0.3.4 && <0.4, + Cabal + ghc-options: -Wall + default-language: Haskell2010 + + if impl(ghc >= 7.8) + build-depends: + tree-diff >= 0.0.1 && <0.1 + other-modules: + Instances.TreeDiff + Instances.TreeDiff.Language + Instances.TreeDiff.SPDX + Instances.TreeDiff.Version + +test-suite check-tests + type: exitcode-stdio-1.0 + hs-source-dirs: tests + main-is: CheckTests.hs + build-depends: + base, + bytestring, + filepath, + tasty >= 1.1.0.3 && < 1.2, + tasty-golden >=2.3.1.1 && <2.4, + Diff >=0.3.4 && <0.4, + Cabal + ghc-options: -Wall + default-language: Haskell2010 + +test-suite custom-setup-tests + type: exitcode-stdio-1.0 + hs-source-dirs: tests/custom-setup + main-is: CustomSetupTests.hs + other-modules: + CabalDoctestSetup + IdrisSetup + build-depends: + Cabal, + base, + directory, + filepath, + process + default-language: Haskell2010 + +test-suite hackage-tests + type: exitcode-stdio-1.0 + main-is: HackageTests.hs + + -- TODO: need to get 01-index.tar on appveyor + if os(windows) + buildable: False + + hs-source-dirs: tests + + build-depends: + base, + Cabal, + bytestring, + deepseq, + containers, + directory, + filepath + + build-depends: + base-compat >=0.10.4 && <0.11, + base-orphans >=0.6 && <0.9, + optparse-applicative >=0.13.2.0 && <0.15, + tar >=0.5.0.3 && <0.6 + + if impl(ghc >= 7.8) + build-depends: + tree-diff >= 0.0.1 && <0.1 + other-modules: + Instances.TreeDiff + Instances.TreeDiff.Language + Instances.TreeDiff.SPDX + Instances.TreeDiff.Version + + ghc-options: -Wall -rtsopts -threaded + default-extensions: CPP + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/ChangeLog.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/ChangeLog.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/ChangeLog.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/ChangeLog.md 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,888 @@ +### 2.4.1.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) November 2018 + + * Warnings in autogenerated files are now silenced + ([#5678](https://github.com/haskell/cabal/pulls/5678)). + * Improved recompilation avoidance, especially when using GHC 8.6 + ([#5589](https://github.com/haskell/cabal/pulls/5589)). + * Do not error on empty packagedbs in `getInstalledPackages` + ([#5516](https://github.com/haskell/cabal/issues/5516)). + + +### 2.4.0.1 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) September 2018 + + * Allow arguments to be passed to `Setup.hs haddock` for `build-type:configure` + ([#5503](https://github.com/haskell/cabal/issues/5503)). + +# 2.4.0.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) September 2018 + + * Due to [#5119](https://github.com/haskell/cabal/issues/5119), the + `cabal check` warning for bounds on internal libraries has been + disabled. + * `Distribution.Simple.Haddock` now checks to ensure that it + does not erroneously call Haddock with no target modules. + ([#5232](https://github.com/haskell/cabal/issues/5232), + [#5459](https://github.com/haskell/cabal/issues/5459)). + * Add `getting` (less general than `to`) Lens combinator, + `non`) and an optics to access the modules in a component + of a `PackageDescription` by the `ComponentName`: + `componentBuildInfo` and `componentModules` + * Add `readGhcEnvironmentFile` to parse GHC environment files. + * Drop support for GHC 7.4, since it is out of our support window + (and has been for over a year!) + * Deprecate `preSDist`, `sDistHook`, and `postSDist` in service of + `new-sdist`, since they violate key invariants of the new-build + ecosystem. Use `autogen-modules` and `build-tool-depends` instead. + ([#5389](https://github.com/haskell/cabal/pull/5389)). + * Added `--repl-options` flag to `Setup repl` used to pass flags to the + underlying repl without affecting the `LocalBuildInfo` + ([#4247](https://github.com/haskell/cabal/issues/4247), + [#5287](https://github.com/haskell/cabal/pull/5287)) + * `KnownExtension`: added new extensions `BlockArguments` + ([#5101](https://github.com/haskell/cabal/issues/5101)), + `NumericUnderscores` + ([#5130]((https://github.com/haskell/cabal/issues/5130)), + `QuantifiedConstraints`, and `StarIsType`. + * `buildDepends` is removed from `PackageDescription`. It had long been + uselessly hanging about as top-level build-depends already got put + into per-component condition trees anyway. Now it's finally been put + out of its misery + ([#4383](https://github.com/haskell/cabal/issues/4283)). + * Added `Eta` to `CompilerFlavor` and to known compilers. + * `cabal haddock` now generates per-component documentation + ([#5226](https://github.com/haskell/cabal/issues/5226)). + * Wildcard improvements: + * Allow `**` wildcards in `data-files`, `extra-source-files` and + `extra-doc-files`. These allow a limited form of recursive + matching, and require `cabal-version: 2.4`. + ([#5284](https://github.com/haskell/cabal/issues/5284), + [#3178](https://github.com/haskell/cabal/issues/3178), et al.) + * With `cabal-version: 2.4`, when matching a wildcard, the + requirement for the full extension to match exactly has been + loosened. Instead, if the wildcard's extension is a suffix of the + file's extension, the file will be selected. For example, + previously `foo.en.html` would not match `*.html`, and + `foo.solaris.tar.gz` would not match `*.tar.gz`, but now both + do. This may lead to files unexpectedly being included by `sdist`; + please audit your package descriptions if you rely on this + behaviour to keep sensitive data out of distributed packages + ([#5372](https://github.com/haskell/cabal/pull/5372), + [#784](https://github.com/haskell/cabal/issues/784), + [#5057](https://github.com/haskell/cabal/issues/5057)). + * Wildcard syntax errors (misplaced `*`, etc), wildcards that + refer to missing directoies, and wildcards that do not match + anything are now all detected by `cabal check`. + * Wildcard ('globbing') functions have been moved from + `Distribution.Simple.Utils` to `Distribution.Simple.Glob` and + have been refactored. + * Fixed `cxx-options` and `cxx-sources` buildinfo fields for + separate compilation of C++ source files to correctly build and link + non-library components ([#5309](https://github.com/haskell/cabal/issues/5309)). + * Reduced warnings generated by hsc2hs and c2hs when `cxx-options` field + is present in a component. + * `cabal check` now warns if `-j` is used in `ghc-options` in a Cabal + file. ([#5277](https://github.com/haskell/cabal/issues/5277)) + * `install-includes` now works as expected with foreign libraries + ([#5302](https://github.com/haskell/cabal/issues/5299)). + * Removed support for JHC. + * Options listed in `ghc-options`, `cc-options`, `ld-options`, + `cxx-options`, `cpp-options` are not deduplicated anymore + ([#4449](https://github.com/haskell/cabal/issues/4449)). + * Deprecated `cabal hscolour` in favour of `cabal haddock --hyperlink-source` ([#5236](https://github.com/haskell/cabal/pull/5236/)). + * Recognize `powerpc64le` as architecture PPC64. + * Cabal now deduplicates more `-I` and `-L` and flags to avoid `E2BIG` + ([#5356](https://github.com/haskell/cabal/issues/5356)). + * With `build-type: configure`, avoid using backslashes to delimit + path components on Windows and warn about other unsafe characters + in the path to the source directory on all platforms + ([#5386](https://github.com/haskell/cabal/issues/5386)). + * `Distribution.PackageDescription.Check.checkPackageFiles` now + accepts a `Verbosity` argument. + * Added a parameter to + `Distribution.Backpack.ConfiguredComponent.toConfiguredComponent` in order to fix + [#5409](https://github.com/haskell/cabal/issues/5409). + * Partially silence `abi-depends` warnings + ([#5465](https://github.com/haskell/cabal/issues/5465)). + * Foreign libraries are now linked against the threaded RTS when the + 'ghc-options: -threaded' flag is used + ([#5431](https://github.com/haskell/cabal/pull/5431)). + * Pass command line arguments to `hsc2hs` using response files when possible + ([#3122](https://github.com/haskell/cabal/issues/3122)). + +---- + +## 2.2.0.1 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) March 2018 + + * Fix `checkPackageFiles` for relative directories ([#5206](https://github.com/haskell/cabal/issues/5206)) + + +# 2.2.0.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) March 2018 + + * The 2.2 migration guide gives advice on adapting Custom setup + scripts to backwards-incompatible changes in this release: + https://github.com/haskell/cabal/wiki/2.2-migration-guide. + * New Parsec-based parser for `.cabal` files is now the + default. This brings memory consumption and speed improvements, as + well as making new syntax extensions easier to implement. + * Support for common stanzas (#4751). + * Added elif-conditionals to `.cabal` syntax (#4750). + * The package license information can now be specified using the + SPDX syntax. This requires setting `cabal-version` to 2.2+ (#2547, + #5050). + * Support for GHC's numeric -g debug levels (#4673). + * Compilation with section splitting is now supported via the + `--enable-split-sections` flag (#4819) + * Fields with mandatory commas (e.g. build-depends) may now have a + leading or a trailing comma (either one, not both) (#4953) + * Added `virtual-modules` field, to allow modules that are not built + but registered (#4875). + * Use better defaulting for `build-type`; rename `PackageDescription`'s + `buildType` field to `buildTypeRaw` and introduce new `buildType` + function (#4958) + * `D.T.PackageDescription.allBuildInfo` now returns all build infos, not + only for buildable components (#5087). + * Removed `UnknownBuildType` constructor from `BuildType` (#5003). + * Added `HexFloatLiterals` to `KnownExtension`. + * Cabal will no longer try to build an empty set of `inputModules` + (#4890). + * `copyComponent` and `installIncludeFiles` will now look for + include headers in the build directory (`dist/build/...` by + default) as well (#4866). + * Added `cxx-options` and `cxx-sources` buildinfo fields for + separate compilation of C++ source files (#3700). + * Removed unused `--allow-newer`/`--allow-older` support from + `Setup configure` (#4527). + * Changed `FlagAssignment` to be an opaque `newtype` (#4849). + * Changed `rawSystemStdInOut` to use proper type to represent + binary and textual data; new `Distribution.Utils.IOData` module; + removed obsolete `startsWithBOM`, `fileHasBOM`, `fromUTF8`, + and `toUTF8` functions; add new `toUTF8BS`/`toUTF8LBS` + encoding functions. (#4666) + * Added a `cabal check` warning when the `.cabal` file name does + not match package name (#4592). + * The `ar` program now receives its arguments via a response file + (`@file`). Old behaviour can be restored with + `--disable-response-files` argument to `configure` or + `install` (#4596). + * Added `.Lens` modules, with optics for package description data + types (#4701). + * Support for building with Win32 version 2.6 (#4835). + * Change `compilerExtensions` and `ghcOptExtensionMap` to contain + `Maybe Flag`s, since a supported extention can lack a flag (#4443). + * Pretty-printing of `.cabal` files is slightly different due to + parser changes. For an example, see + https://mail.haskell.org/pipermail/cabal-devel/2017-December/010414.html. + * `--hyperlink-source` now uses Haddock's hyperlinker backend when + Haddock is new enough, falling back to HsColour otherwise. + * `D.S.defaultHookedPackageDesc` has been deprecated in favour of + `D.S.findHookedPackageDesc` (#4874). + * `D.S.getHookedBuildInfo` now takes an additional parameter + specifying the build directory path (#4874). + * Emit warning when encountering unknown GHC versions (#415). + +### 2.0.1.1 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) December 2017 + + * Don't pass `other-modules` to stub executable for detailed-0.9 + (#4918). + * Hpc: Use relative .mix search paths (#4917). + +## 2.0.1.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) November 2017 + + * Support for GHC's numeric -g debug levels (#4673). + * Added a new `Distribution.Verbosity.modifyVerbosity` combinator + (#4724). + * Added a new `cabal check` warning about unused, undeclared or + non-Unicode flags. Also, it warns about leading dash, which is + unusable but accepted if it's unused in conditionals. (#4687) + * Modify `allBuildInfo` to include foreign library info (#4763). + * Documentation fixes. + +### 2.0.0.2 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) July 2017 + + * See http://coldwa.st/e/blog/2017-09-09-Cabal-2-0.html + for more detailed release notes. + * The 2.0 migration guide gives advice on adapting Custom setup + scripts to backwards-incompatible changes in this release: + https://github.com/haskell/cabal/wiki/2.0-migration-guide + * Add CURRENT_PACKAGE_VERSION to cabal_macros.h (#4319) + * Dropped support for versions of GHC earlier than 6.12 (#3111). + * GHC compatibility window for the Cabal library has been extended + to five years (#3838). + * Convenience/internal libraries are now supported (#269). + An internal library is declared using the stanza `library + 'libname'`. Packages which use internal libraries can + result in multiple registrations; thus `--gen-pkg-config` + can now output a directory of registration scripts rather than + a single file. + * Backwards incompatible change to preprocessor interface: + the function in `PPSuffixHandler` now takes an additional + `ComponentLocalBuildInfo` specifying the build information + of the component being preprocessed. + * Backwards incompatible change to `cabal_macros.h` (#1893): we now + generate a macro file for each component which contains only + information about the direct dependencies of that component. + Consequently, `dist/build/autogen/cabal_macros.h` contains + only the macros for the library, and is not generated if a + package has no library; to find the macros for an executable + named `foobar`, look in `dist/build/foobar/autogen/cabal_macros.h`. + Similarly, if you used `autogenModulesDir` you should now + use `autogenComponentModulesDir`, which now requires a + `ComponentLocalBuildInfo` argument as well in order to + disambiguate which component the autogenerated files are for. + * Backwards incompatible change to `Component`: `TestSuite` and + `Benchmark` no longer have `testEnabled` and + `benchmarkEnabled`. If you used + `enabledTests` or `enabledBenchmarks`, please instead use + `enabledTestLBIs` and `enabledBenchLBIs` + (you will need a `LocalBuildInfo` for these functions.) + Additionally, the semantics of `withTest` and `withBench` + have changed: they now iterate over all buildable + such components, regardless of whether or not they have + been enabled; if you only want enabled components, + use `withTestLBI` and `withBenchLBI`. + `finalizePackageDescription` is deprecated: + its replacement `finalizePD` now takes an extra argument + `ComponentRequestedSpec` which specifies what components + are to be enabled: use this instead of modifying the + `Component` in a `GenericPackageDescription`. (As + it's not possible now, `finalizePackageDescription` + will assume tests/benchmarks are disabled.) + If you only need to test if a component is buildable + (i.e., it is marked buildable in the Cabal file) + use the new function `componentBuildable`. + * Backwards incompatible change to `PackageName` (#3896): + `PackageName` is now opaque; conversion to/from `String` now works + via (old) `unPackageName` and (new) `mkPackageName` functions. + * Backwards incompatible change to `ComponentId` (#3917): + `ComponentId` is now opaque; conversion to/from `String` now works + via `unComponentId` and `mkComponentId` functions. + * Backwards incompatible change to `AbiHash` (#3921): + `AbiHash` is now opaque; conversion to/from `String` now works + via `unAbiHash` and `mkAbiHash` functions. + * Backwards incompatible change to `FlagName` (#4062): + `FlagName` is now opaque; conversion to/from `String` now works + via `unFlagName` and `mkFlagName` functions. + * Backwards incompatible change to `Version` (#3905): + Version is now opaque; conversion to/from `[Int]` now works + via `versionNumbers` and `mkVersion` functions. + * Add support for `--allow-older` (dual to `--allow-newer`) (#3466) + * Improved an error message for process output decoding errors + (#3408). + * `getComponentLocalBuildInfo`, `withComponentsInBuildOrder` + and `componentsInBuildOrder` are deprecated in favor of a + new interface in `Distribution.Types.LocalBuildInfo`. + * New `autogen-modules` field. Modules that are built automatically at + setup, like Paths_PACKAGENAME or others created with a build-type + custom, appear on `other-modules` for the Library, Executable, + Test-Suite or Benchmark stanzas or also on `exposed-modules` for + libraries but are not really on the package when distributed. This + makes commands like sdist fail because the file is not found, so with + this new field modules that appear there are treated the same way as + Paths_PACKAGENAME was and there is no need to create complex build + hooks. Just add the module names on `other-modules` and + `exposed-modules` as always and on the new `autogen-modules` besides. + (#3656). + * New `./Setup configure` flag `--cabal-file`, allowing multiple + `.cabal` files in a single directory (#3553). Primarily intended for + internal use. + * Macros in `cabal_macros.h` are now ifndef'd, so that they + don't cause an error if the macro is already defined. (#3041) + * `./Setup configure` now accepts a single argument specifying + the component to be configured. The semantics of this mode + of operation are described in + + * Internal `build-tools` dependencies are now added to PATH + upon invocation of GHC, so that they can be conveniently + used via `-pgmF`. (#1541) + * Add support for new caret-style version range operator `^>=` (#3705) + * Verbosity `-v` now takes an extended format which allows + specifying exactly what you want to be logged. The format is + `[silent|normal|verbose|debug] flags`, where flags is a space + separated list of flags. At the moment, only the flags + +callsite and +callstack are supported; these report the + call site/stack of a logging output respectively (these + are only supported if Cabal is built with GHC 8.0/7.10.2 + or greater, respectively). + * New `Distribution.Utils.ShortText.ShortText` type for representing + short text strings compactly (#3898) + * Cabal no longer supports using a version bound to disambiguate + between an internal and external package (#4020). This should + not affect many people, as this mode of use already did not + work with the dependency solver. + * Support for "foreign libraries" (#2540), which are Haskell + libraries intended to be used by foreign languages like C. + Foreign libraries only work with GHC 7.8 and later. + * Added a technical preview version of integrated doctest support (#4480). + * Added a new `scope` field to the executable stanza. Executables + with `scope: private` get installed into + $libexecdir/$libexecsubdir. Additionally $libexecdir now has a + subdir structure similar to $lib(sub)dir to allow installing + private executables of different packages and package versions + alongside one another. Private executables are those that are + expected to be run by other programs rather than users. (#3461) + +## 1.24.2.0 [Mikhail Glushenkov](mailto:mikhail.glushenkov@gmail.com) December 2016 + * Fixed a bug in the handling of non-buildable components (#4094). + * Reverted a PVP-noncompliant API change in 1.24.1.0 (#4123). + * Bumped the directory upper bound to < 1.4 (#4158). + +## 1.24.1.0 [Ryan Thomas](mailto:ryan@ryant.org) October 2016 + * API addition: `differenceVersionRanges` (#3519). + * Fixed reexported-modules display mangling (#3928). + * Check that the correct cabal-version is specified when the + extra-doc-files field is present (#3825). + * Fixed an incorrect invocation of GetShortPathName that was + causing build failures on Windows (#3649). + * Linker flags are now set correctly on GHC >= 7.8 (#3443). + +# 1.24.0.0 [Ryan Thomas](mailto:ryan@ryant.org) March 2016 + * Support GHC 8. + * Deal with extra C sources from preprocessors (#238). + * Include cabal_macros.h when running c2hs (#2600). + * Don't recompile C sources unless needed (#2601). + * Read `builddir` option from `CABAL_BUILDDIR` environment variable. + * Add `--profiling-detail=$level` flag with a default for libraries + and executables of `exported-functions` and `toplevel-functions` + respectively (GHC's `-fprof-auto-{exported,top}` flags) (#193). + * New `custom-setup` stanza to specify setup deps. Setup is also built + with the cabal_macros.h style macros, for conditional compilation. + * Support Haddock response files (#2746). + * Fixed a bug in the Text instance for Platform (#2862). + * New `setup haddock` option: `--for-hackage` (#2852). + * New `--show-detail=direct`; like streaming, but allows the test + program to detect that is connected to a terminal, and works + reliable with a non-threaded runtime (#2911, and serves as a + work-around for #2398) + * Library support for multi-instance package DBs (#2948). + * Improved the `./Setup configure` solver (#3082, #3076). + * The `--allow-newer` option can be now used with `./Setup + configure` (#3163). + * Added a way to specify extra locations to find OS X frameworks + in (`extra-framework-dirs`). Can be used both in `.cabal` files and + as an argument to `./Setup configure` (#3158). + * Macros `VERSION_$pkgname` and `MIN_VERSION_$pkgname` are now + also generated for the current package. (#3235). + * Backpack is supported! Two new fields supported in Cabal + files: signatures and mixins; and a new flag + to setup scripts, `--instantiate-with`. See + https://github.com/ezyang/ghc-proposals/blob/backpack/proposals/0000-backpack.rst + for more details. + +---- + +## 1.22.8.0 [Ryan Thomas](mailto:ryan@ryant.org) March 2016 + * Distribution.Simple.Setup: remove job cap. Fixes #3191. + * Check all object file suffixes for recompilation. Fixes #3128. + * Move source files under `src/`. Fixes #3003. + +## 1.22.7.0 [Ryan Thomas](mailto:ryan@ryant.org) January 2016 + * Backport #3012 to the 1.22 branch + * Cabal.cabal: change build-type to Simple + * Add foldl' import + * The Cabal part for fully gcc-like response files + +## 1.22.6.0 [Ryan Thomas](mailto:ryan@ryant.org) December 2015 + * Relax upper bound to allow upcoming binary-0.8 + +## 1.22.5.0 [Ryan Thomas](mailto:ryan@ryant.org) November 2015 + * Don't recompile C sources unless needed (#2601). (Luke Iannini) + * Support Haddock response files. + * Add frameworks when linking a dynamic library. + +## 1.22.4.0 [Ryan Thomas](mailto:ryan@ryant.org) June 2015 + * Add libname install-dirs variable, use it by default. Fixes #2437. (Edward Z. Yang) + * Reduce temporary directory name length, fixes #2502. (Edward Z. Yang) + * Workaround for #2527. (Mikhail Glushenkov) + +## 1.22.3.0 [Ryan Thomas](mailto:ryan@ryant.org) April 2015 + * Fix for the ghcjs-pkg version number handling (Luite Stegeman) + * filterConfigureFlags: filter more flags (Mikhail Glushenkov) + * Cabal check will fail on -fprof-auto passed as a ghc-option - Fixes #2479 (John Chee) + +## 1.22.2.0 [Ryan Thomas](mailto:ryan@ryant.org) March 2015 + * Don't pass `--{en,dis}able-profiling` to old setup. + * Add -Wall police + * Fix dependencies on `old-time` + * Fix test interface detailed-0.9 with GHC 7.10 + * Fix HPC tests with GHC 7.10 + * Make sure to pass the package key to ghc + * Use `--package-{name|version}` when available for Haddock when available + * Put full package name and version in library names + * Fully specify package key format, so external tools can generate it. + +# 1.22.0.0 [Johan Tibell](mailto:johan.tibell@gmail.com) January 2015 + * Support GHC 7.10. + * Experimental support for emitting DWARF debug info. + * Preliminary support for relocatable packages. + * Allow cabal to be used inside cabal exec enviroments. + * hpc: support mutliple "ways" (e.g. profiling and vanilla). + * Support GHCJS. + * Improved command line documentation. + * Add `-none` constraint syntax for version ranges (#2093). + * Make the default doc index file path compiler/arch/os-dependent + (#2136). + * Warn instead of dying when generating documentation and hscolour + isn't installed (455f51622fa38347db62197a04bb0fa5b928ff17). + * Support the new BinaryLiterals extension + (1f25ab3c5eff311ada73c6c987061b80e9bbebd9). + * Warn about `ghc-prof-options: -auto-all` in `cabal check` (#2162). + * Add preliminary support for multiple instances of the same package + version installed side-by-side (#2002). + * New binary build config format - faster build times (#2076). + * Support module thinning and renaming (#2038). + * Add a new license type: UnspecifiedLicense (#2141). + * Remove support for Hugs and nhc98 (#2168). + * Invoke `tar` with `--formar ustar` if possible in `sdist` (#1903). + * Replace `--enable-library-coverage` with `--enable-coverage`, which + enables program coverage for all components (#1945). + * Suggest that `ExitFailure 9` is probably due to memory + exhaustion (#1522). + * Drop support for Haddock < 2.0 (#1808, #1718). + * Make `cabal test`/`cabal bench` build only what's needed for + running tests/benchmarks (#1821). + * Build shared libraries by default when linking executables dynamically. + * Build profiled libraries by default when profiling executables. + +---- + +### 1.20.0.4 [Ryan Thomas](mailto:ryan@ryant.org) January 2016 + * Cabal.cabal: change build-type to Simple. + +### 1.20.0.1 [Johan Tibell](mailto:johan.tibell@gmail.com) May 2014 + * Fix streaming test output. + +# 1.20.0.0 [Johan Tibell](mailto:johan.tibell@gmail.com) April 2014 + * Rewrite user guide + * Fix repl Ctrl+C handling + * Add haskell-suite compiler support + * Add __HADDOCK_VERSION__ define + * Allow specifying exact dependency version using hash + * Rename extra-html-files to extra-doc-files + * Add parallel build support for GHC 7.8 and later + * Don't call ranlib on OS X + * Avoid re-linking executables, test suites, and benchmarks + unnecessarily, shortening build times + * Add `--allow-newer` which allows upper version bounds to be + ignored + * Add `--enable-library-stripping` + * Add command for freezing dependencies + * Allow repl to be used outside Cabal packages + * Add `--require-sandbox` + * Don't use `--strip-unneeded` on OS X or iOS + * Add new license-files field got additional licenses + * Fix if(solaris) on some Solaris versions + * Don't use -dylib-install-name on OS X with GHC > 7.8 + * Add DragonFly as a known OS + * Improve pretty-printing of Cabal files + * Add test flag `--show-details=streaming` for real-time test output + * Add exec command + +---- + +## 1.10.2.0 [Duncan Coutts](mailto:duncan@community.haskell.org) June 2011 + * Include test suites in cabal sdist + * Fix for conditionals in test suite stanzas in `.cabal` files + * Fix permissions of directories created during install + * Fix for global builds when $HOME env var is not set + +## 1.10.1.0 [Duncan Coutts](mailto:duncan@community.haskell.org) February 2011 + * Improved error messages when test suites are not enabled + * Template parameters allowed in test `--test-option(s)` flag + * Improved documentation of the test feature + * Relaxed QA check on cabal-version when using test-suite sections + * `haddock` command now allows both `--hoogle` and `--html` at the same time + * Find ghc-version-specific instances of the hsc2hs program + * Preserve file executable permissions in sdist tarballs + * Pass gcc location and flags to ./configure scripts + * Get default gcc flags from ghc + +# 1.10.0.0 [Duncan Coutts](mailto:duncan@haskell.org) November 2010 + * New cabal test feature + * Initial support for UHC + * New default-language and other-languages fields (e.g. Haskell98/2010) + * New default-extensions and other-extensions fields + * Deprecated extensions field (for packages using cabal-version >=1.10) + * Cabal-version field must now only be of the form `>= x.y` + * Removed deprecated `--copy-prefix=` feature + * Auto-reconfigure when `.cabal` file changes + * Workaround for haddock overwriting .hi and .o files when using TH + * Extra cpp flags used with hsc2hs and c2hs (-D${os}_BUILD_OS etc) + * New cpp define VERSION_ gives string version of dependencies + * User guide source now in markdown format for easier editing + * Improved checks and error messages for C libraries and headers + * Removed BSD4 from the list of suggested licenses + * Updated list of known language extensions + * Fix for include paths to allow C code to import FFI stub.h files + * Fix for intra-package dependencies on OSX + * Stricter checks on various bits of `.cabal` file syntax + * Minor fixes for c2hs + +---- + +### 1.8.0.6 [Duncan Coutts](mailto:duncan@haskell.org) June 2010 + * Fix `register --global/--user` + +### 1.8.0.4 [Duncan Coutts](mailto:duncan@haskell.org) March 2010 + * Set dylib-install-name for dynalic libs on OSX + * Stricter configure check that compiler supports a package's extensions + * More configure-time warnings + * Hugs can compile Cabal lib again + * Default datadir now follows prefix on Windows + * Support for finding installed packages for hugs + * Cabal version macros now have proper parenthesis + * Reverted change to filter out deps of non-buildable components + * Fix for registering implace when using a specific package db + * Fix mismatch between $os and $arch path template variables + * Fix for finding ar.exe on Windows, always pick ghc's version + * Fix for intra-package dependencies with ghc-6.12 + +# 1.8.0.2 [Duncan Coutts](mailto:duncan@haskell.org) December 2009 + * Support for GHC-6.12 + * New unique installed package IDs which use a package hash + * Allow executables to depend on the lib within the same package + * Dependencies for each component apply only to that component + (previously applied to all the other components too) + * Added new known license MIT and versioned GPL and LGPL + * More liberal package version range syntax + * Package registration files are now UTF8 + * Support for LHC and JHC-0.7.2 + * Deprecated RecordPuns extension in favour of NamedFieldPuns + * Deprecated PatternSignatures extension in favor of ScopedTypeVariables + * New VersionRange semantic view as a sequence of intervals + * Improved package quality checks + * Minor simplification in a couple `Setup.hs` hooks + * Beginnings of a unit level testsuite using QuickCheck + * Various bug fixes + * Various internal cleanups + +---- + +### 1.6.0.2 [Duncan Coutts](mailto:duncan@haskell.org) February 2009 + * New configure-time check for C headers and libraries + * Added language extensions present in ghc-6.10 + * Added support for NamedFieldPuns extension in ghc-6.8 + * Fix in configure step for ghc-6.6 on Windows + * Fix warnings in `Path_pkgname.hs` module on Windows + * Fix for exotic flags in ld-options field + * Fix for using pkg-config in a package with a lib and an executable + * Fix for building haddock docs for exes that use the Paths module + * Fix for installing header files in subdirectories + * Fix for the case of building profiling libs but not ordinary libs + * Fix read-only attribute of installed files on Windows + * Ignore ghc -threaded flag when profiling in ghc-6.8 and older + +### 1.6.0.1 [Duncan Coutts](mailto:duncan@haskell.org) October 2008 + * Export a compat function to help alex and happy + +# 1.6.0.0 [Duncan Coutts](mailto:duncan@haskell.org) October 2008 + * Support for ghc-6.10 + * Source control repositories can now be specified in `.cabal` files + * Bug report URLs can be now specified in `.cabal` files + * Wildcards now allowed in data-files and extra-source-files fields + * New syntactic sugar for dependencies `build-depends: foo ==1.2.*` + * New cabal_macros.h provides macros to test versions of dependencies + * Relocatable bindists now possible on unix via env vars + * New `exposed` field allows packages to be not exposed by default + * Install dir flags can now use $os and $arch variables + * New `--builddir` flag allows multiple builds from a single sources dir + * cc-options now only apply to .c files, not for -fvia-C + * cc-options are not longer propagated to dependent packages + * The cpp/cc/ld-options fields no longer use `,` as a separator + * hsc2hs is now called using gcc instead of using ghc as gcc + * New api for manipulating sets and graphs of packages + * Internal api improvements and code cleanups + * Minor improvements to the user guide + * Miscellaneous minor bug fixes + +---- + +### 1.4.0.2 [Duncan Coutts](mailto:duncan@haskell.org) August 2008 + * Fix executable stripping default + * Fix striping exes on OSX that export dynamic symbols (like ghc) + * Correct the order of arguments given by `--prog-options=` + * Fix corner case with overlapping user and global packages + * Fix for modules that use pre-processing and `.hs-boot` files + * Clarify some points in the user guide and readme text + * Fix verbosity flags passed to sub-command like haddock + * Fix `sdist --snapshot` + * Allow meta-packages that contain no modules or C code + * Make the generated Paths module -Wall clean on Windows + +### 1.4.0.1 [Duncan Coutts](mailto:duncan@haskell.org) June 2008 + * Fix a bug which caused `.` to always be in the sources search path + * Haddock-2.2 and later do now support the `--hoogle` flag + +# 1.4.0.0 [Duncan Coutts](mailto:duncan@haskell.org) June 2008 + * Rewritten command line handling support + * Command line completion with bash + * Better support for Haddock 2 + * Improved support for nhc98 + * Removed support for ghc-6.2 + * Haddock markup in `.lhs` files now supported + * Default colour scheme for highlighted source code + * Default prefix for `--user` installs is now `$HOME/.cabal` + * All `.cabal` files are treaded as UTF-8 and must be valid + * Many checks added for common mistakes + * New `--package-db=` option for specific package databases + * Many internal changes to support cabal-install + * Stricter parsing for version strings, eg dissalows "1.05" + * Improved user guide introduction + * Programatica support removed + * New options `--program-prefix/suffix` allows eg versioned programs + * Support packages that use `.hs-boot` files + * Fix sdist for Main modules that require preprocessing + * New configure -O flag with optimisation level 0--2 + * Provide access to "`x-`" extension fields through the Cabal api + * Added check for broken installed packages + * Added warning about using inconsistent versions of dependencies + * Strip binary executable files by default with an option to disable + * New options to add site-specific include and library search paths + * Lift the restriction that libraries must have exposed-modules + * Many bugs fixed. + * Many internal structural improvements and code cleanups + +---- + +## 1.2.4.0 [Duncan Coutts](mailto:duncan@haskell.org) June 2008 + * Released with GHC 6.8.3 + * Backported several fixes and minor improvements from Cabal-1.4 + * Use a default colour scheme for sources with hscolour >=1.9 + * Support `--hyperlink-source` for Haddock >= 2.0 + * Fix for running in a non-writable directory + * Add OSX -framework arguments when linking executables + * Updates to the user guide + * Allow build-tools names to include + and _ + * Export autoconfUserHooks and simpleUserHooks + * Export ccLdOptionsBuildInfo for `Setup.hs` scripts + * Export unionBuildInfo and make BuildInfo an instance of Monoid + * Fix to allow the `main-is` module to use a pre-processor + +## 1.2.3.0 [Duncan Coutts](mailto:duncan@haskell.org) Nov 2007 + * Released with GHC 6.8.2 + * Includes full list of GHC language extensions + * Fix infamous `dist/conftest.c` bug + * Fix `configure --interfacedir=` + * Find ld.exe on Windows correctly + * Export PreProcessor constructor and mkSimplePreProcessor + * Fix minor bug in unlit code + * Fix some markup in the haddock docs + +## 1.2.2.0 [Duncan Coutts](mailto:duncan@haskell.org) Nov 2007 + * Released with GHC 6.8.1 + * Support haddock-2.0 + * Support building DSOs with GHC + * Require reconfiguring if the `.cabal` file has changed + * Fix os(windows) configuration test + * Fix building documentation + * Fix building packages on Solaris + * Other minor bug fixes + +## 1.2.1 [Duncan Coutts](mailto:duncan@haskell.org) Oct 2007 + * To be included in GHC 6.8.1 + * New field `cpp-options` used when preprocessing Haskell modules + * Fixes for hsc2hs when using ghc + * C source code gets compiled with -O2 by default + * OS aliases, to allow os(windows) rather than requiring os(mingw32) + * Fix cleaning of `stub` files + * Fix cabal-setup, command line ui that replaces `runhaskell Setup.hs` + * Build docs even when dependent packages docs are missing + * Allow the `--html-dir` to be specified at configure time + * Fix building with ghc-6.2 + * Other minor bug fixes and build fixes + +# 1.2.0 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) Sept 2007 + * To be included in GHC 6.8.x + * New configurations feature + * Can make haddock docs link to hilighted sources (with hscolour) + * New flag to allow linking to haddock docs on the web + * Supports pkg-config + * New field `build-tools` for tool dependencies + * Improved c2hs support + * Preprocessor output no longer clutters source dirs + * Separate `includes` and `install-includes` fields + * Makefile command to generate makefiles for building libs with GHC + * New `--docdir` configure flag + * Generic `--with-prog` `--prog-args` configure flags + * Better default installation paths on Windows + * Install paths can be specified relative to each other + * License files now installed + * Initial support for NHC (incomplete) + * Consistent treatment of verbosity + * Reduced verbosity of configure step by default + * Improved helpfulness of output messages + * Help output now clearer and fits in 80 columns + * New setup register `--gen-pkg-config` flag for distros + * Major internal refactoring, hooks api has changed + * Dozens of bug fixes + +---- + +### 1.1.6.2 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) May 2007 + + * Released with GHC 6.6.1 + * Handle windows text file encoding for `.cabal` files + * Fix compiling a executable for profiling that uses Template Haskell + * Other minor bug fixes and user guide clarifications + +### 1.1.6.1 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) Oct 2006 + + * fix unlit code + * fix escaping in register.sh + +## 1.1.6 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) Oct 2006 + + * Released with GHC 6.6 + * Added support for hoogle + * Allow profiling and normal builds of libs to be chosen indepentantly + * Default installation directories on Win32 changed + * Register haddock docs with ghc-pkg + * Get haddock to make hyperlinks to dependent package docs + * Added BangPatterns language extension + * Various bug fixes + +## 1.1.4 [Duncan Coutts](mailto:duncan.coutts@worc.ox.ac.uk) May 2006 + + * Released with GHC 6.4.2 + * Better support for packages that need to install header files + * cabal-setup added, but not installed by default yet + * Implemented `setup register --inplace` + * Have packages exposed by default with ghc-6.2 + * It is no longer necessary to run `configure` before `clean` or `sdist` + * Added support for ghc's `-split-objs` + * Initial support for JHC + * Ignore extension fields in `.cabal` files (fields begining with "`x-`") + * Some changes to command hooks API to improve consistency + * Hugs support improvements + * Added GeneralisedNewtypeDeriving language extension + * Added cabal-version field + * Support hidden modules with haddock + * Internal code refactoring + * More bug fixes + +## 1.1.3 [Isaac Jones](mailto:ijones@syntaxpolice.org) Sept 2005 + + * WARNING: Interfaces not documented in the user's guide may + change in future releases. + * Move building of GHCi .o libs to the build phase rather than + register phase. (from Duncan Coutts) + * Use .tar.gz for source package extension + * Uses GHC instead of cpphs if the latter is not available + * Added experimental "command hooks" which completely override the + default behavior of a command. + * Some bugfixes + +# 1.1.1 [Isaac Jones](mailto:ijones@syntaxpolice.org) July 2005 + + * WARNING: Interfaces not documented in the user's guide may + change in future releases. + * Handles recursive modules for GHC 6.2 and GHC 6.4. + * Added `setup test` command (Used with UserHook) + * implemented handling of _stub.{c,h,o} files + * Added support for profiling + * Changed install prefix of libraries (pref/pkgname-version + to prefix/pkgname-version/compname-version) + * Added pattern guards as a language extension + * Moved some functionality to Language.Haskell.Extension + * Register / unregister .bat files for windows + * Exposed more of the API + * Added support for the hide-all-packages flag in GHC > 6.4 + * Several bug fixes + +---- + +# 1.0 [Isaac Jones](mailto:ijones@syntaxpolice.org) March 11 2005 + + * Released with GHC 6.4, Hugs March 2005, and nhc98 1.18 + * Some sanity checking + +---- + +# 0.5 [Isaac Jones](mailto:ijones@syntaxpolice.org) Wed Feb 19 2005 + + * __WARNING__: this is a pre-release and the interfaces are + still likely to change until we reach a 1.0 release. + * Hooks interfaces changed + * Added preprocessors to user hooks + * No more executable-modules or hidden-modules. Use + `other-modules` instead. + * Certain fields moved into BuildInfo, much refactoring + * `extra-libs` -> `extra-libraries` + * Added `--gen-script` to configure and unconfigure. + * `modules-ghc` (etc) now `ghc-modules` (etc) + * added new fields including `synopsis` + * Lots of bug fixes + * spaces can sometimes be used instead of commas + * A user manual has appeared (Thanks, ross!) + * for ghc 6.4, configures versionsed depends properly + * more features to `./setup haddock` + +---- + +# 0.4 [Isaac Jones](mailto:ijones@syntaxpolice.org) Sun Jan 16 2005 + + * Much thanks to all the awesome fptools hackers who have been + working hard to build the Haskell Cabal! + + * __Interface Changes__: + + * __WARNING__: this is a pre-release and the interfaces are still + likely to change until we reach a 1.0 release. + + * Instead of Package.description, you should name your + description files .cabal. In particular, we suggest + that you name it .cabal, but this is not enforced + (yet). Multiple `.cabal` files in the same directory is an error, + at least for now. + + * `./setup install --install-prefix` is gone. Use `./setup copy` + `--copy-prefix` instead. + + * The `Modules` field is gone. Use `hidden-modules`, + `exposed-modules`, and `executable-modules`. + + * `Build-depends` is now a package-only field, and can't go into + executable stanzas. Build-depends is a package-to-package + relationship. + + * Some new fields. Use the Source. + + * __New Features__ + + * Cabal is now included as a package in the CVS version of + fptools. That means it'll be released as `-package Cabal` in + future versions of the compilers, and if you are a bleeding-edge + user, you can grab it from the CVS repository with the compilers. + + * Hugs compatibility and NHC98 compatibility should both be + improved. + + * Hooks Interface / Autoconf compatibility: Most of the hooks + interface is hidden for now, because it's not finalized. I have + exposed only `defaultMainWithHooks` and `defaultUserHooks`. This + allows you to use a ./configure script to preprocess + `foo.buildinfo`, which gets merged with `foo.cabal`. In future + releases, we'll expose UserHooks, but we're definitely going to + change the interface to those. The interface to the two functions + I've exposed should stay the same, though. + + * ./setup haddock is a baby feature which pre-processes the + source code with hscpp and runs haddock on it. This is brand new + and hardly tested, so you get to knock it around and see what you + think. + + * Some commands now actually implement verbosity. + + * The preprocessors have been tested a bit more, and seem to work + OK. Please give feedback if you use these. + +---- + +# 0.3 [Isaac Jones](mailto:ijones@syntaxpolice.org) Sun Jan 16 2005 + + * Unstable snapshot release + * From now on, stable releases are even. + +---- + +# 0.2 [Isaac Jones](mailto:ijones@syntaxpolice.org) + + * Adds more HUGS support and preprocessor support. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ComponentsGraph.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ComponentsGraph.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ComponentsGraph.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ComponentsGraph.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,97 @@ +-- | See +module Distribution.Backpack.ComponentsGraph ( + ComponentsGraph, + ComponentsWithDeps, + mkComponentsGraph, + componentsGraphToList, + dispComponentsWithDeps, + componentCycleMsg +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Package +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Simple.BuildToolDepends +import Distribution.Simple.LocalBuildInfo +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.UnqualComponentName +import Distribution.Compat.Graph (Graph, Node(..)) +import qualified Distribution.Compat.Graph as Graph + +import Distribution.Text + ( Text(disp) ) +import Text.PrettyPrint + +------------------------------------------------------------------------------ +-- Components graph +------------------------------------------------------------------------------ + +-- | A graph of source-level components by their source-level +-- dependencies +-- +type ComponentsGraph = Graph (Node ComponentName Component) + +-- | A list of components associated with the source level +-- dependencies between them. +-- +type ComponentsWithDeps = [(Component, [ComponentName])] + +-- | Pretty-print 'ComponentsWithDeps'. +-- +dispComponentsWithDeps :: ComponentsWithDeps -> Doc +dispComponentsWithDeps graph = + vcat [ hang (text "component" <+> disp (componentName c)) 4 + (vcat [ text "dependency" <+> disp cdep | cdep <- cdeps ]) + | (c, cdeps) <- graph ] + +-- | Create a 'Graph' of 'Component', or report a cycle if there is a +-- problem. +-- +mkComponentsGraph :: ComponentRequestedSpec + -> PackageDescription + -> Either [ComponentName] ComponentsGraph +mkComponentsGraph enabled pkg_descr = + let g = Graph.fromDistinctList + [ N c (componentName c) (componentDeps c) + | c <- pkgBuildableComponents pkg_descr + , componentEnabled enabled c ] + in case Graph.cycles g of + [] -> Right g + ccycles -> Left [ componentName c | N c _ _ <- concat ccycles ] + where + -- The dependencies for the given component + componentDeps component = + (CExeName <$> getAllInternalToolDependencies pkg_descr bi) + + ++ [ if pkgname == packageName pkg_descr + then CLibName + else CSubLibName toolname + | Dependency pkgname _ <- targetBuildDepends bi + , let toolname = packageNameToUnqualComponentName pkgname + , toolname `elem` internalPkgDeps ] + where + bi = componentBuildInfo component + internalPkgDeps = map (conv . libName) (allLibraries pkg_descr) + conv Nothing = packageNameToUnqualComponentName $ packageName pkg_descr + conv (Just s) = s + +-- | Given the package description and a 'PackageDescription' (used +-- to determine if a package name is internal or not), sort the +-- components in dependency order (fewest dependencies first). This is +-- NOT necessarily the build order (although it is in the absence of +-- Backpack.) +-- +componentsGraphToList :: ComponentsGraph + -> ComponentsWithDeps +componentsGraphToList = + map (\(N c _ cs) -> (c, cs)) . Graph.revTopSort + +-- | Error message when there is a cycle; takes the SCC of components. +componentCycleMsg :: [ComponentName] -> Doc +componentCycleMsg cnames = + text $ "Components in the package depend on each other in a cyclic way:\n " + ++ intercalate " depends on " + [ "'" ++ showComponentName cname ++ "'" + | cname <- cnames ++ [head cnames] ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ConfiguredComponent.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ConfiguredComponent.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ConfiguredComponent.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ConfiguredComponent.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,303 @@ +{-# LANGUAGE PatternGuards #-} +-- | See +module Distribution.Backpack.ConfiguredComponent ( + ConfiguredComponent(..), + cc_name, + cc_cid, + cc_pkgid, + toConfiguredComponent, + toConfiguredComponents, + dispConfiguredComponent, + + ConfiguredComponentMap, + extendConfiguredComponentMap, + + -- TODO: Should go somewhere else + newPackageDepsBehaviour +) where + +import Prelude () +import Distribution.Compat.Prelude hiding ((<>)) + +import Distribution.Backpack.Id + +import Distribution.Types.AnnotatedId +import Distribution.Types.Dependency +import Distribution.Types.ExeDependency +import Distribution.Types.IncludeRenaming +import Distribution.Types.ComponentId +import Distribution.Types.PackageId +import Distribution.Types.PackageName +import Distribution.Types.Mixin +import Distribution.Types.ComponentName +import Distribution.Types.UnqualComponentName +import Distribution.Types.ComponentInclude +import Distribution.Package +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Simple.BuildToolDepends +import Distribution.Simple.Setup as Setup +import Distribution.Simple.LocalBuildInfo +import Distribution.Version +import Distribution.Utils.LogProgress +import Distribution.Utils.MapAccum +import Distribution.Utils.Generic + +import Control.Monad +import qualified Data.Set as Set +import qualified Data.Map as Map +import Distribution.Text +import Text.PrettyPrint + +-- | A configured component, we know exactly what its 'ComponentId' is, +-- and the 'ComponentId's of the things it depends on. +data ConfiguredComponent + = ConfiguredComponent { + -- | Unique identifier of component, plus extra useful info. + cc_ann_id :: AnnotatedId ComponentId, + -- | The fragment of syntax from the Cabal file describing this + -- component. + cc_component :: Component, + -- | Is this the public library component of the package? + -- (If we invoke Setup with an instantiation, this is the + -- component the instantiation applies to.) + -- Note that in one-component configure mode, this is + -- always True, because any component is the "public" one.) + cc_public :: Bool, + -- | Dependencies on executables from @build-tools@ and + -- @build-tool-depends@. + cc_exe_deps :: [AnnotatedId ComponentId], + -- | The mixins of this package, including both explicit (from + -- the @mixins@ field) and implicit (from @build-depends@). Not + -- mix-in linked yet; component configuration only looks at + -- 'ComponentId's. + cc_includes :: [ComponentInclude ComponentId IncludeRenaming] + } + + +-- | Uniquely identifies a configured component. +cc_cid :: ConfiguredComponent -> ComponentId +cc_cid = ann_id . cc_ann_id + +-- | The package this component came from. +cc_pkgid :: ConfiguredComponent -> PackageId +cc_pkgid = ann_pid . cc_ann_id + +-- | The 'ComponentName' of a component; this uniquely identifies +-- a fragment of syntax within a specified Cabal file describing the +-- component. +cc_name :: ConfiguredComponent -> ComponentName +cc_name = ann_cname . cc_ann_id + +-- | Pretty-print a 'ConfiguredComponent'. +dispConfiguredComponent :: ConfiguredComponent -> Doc +dispConfiguredComponent cc = + hang (text "component" <+> disp (cc_cid cc)) 4 + (vcat [ hsep $ [ text "include", disp (ci_id incl), disp (ci_renaming incl) ] + | incl <- cc_includes cc + ]) + +-- | Construct a 'ConfiguredComponent', given that the 'ComponentId' +-- and library/executable dependencies are known. The primary +-- work this does is handling implicit @backpack-include@ fields. +mkConfiguredComponent + :: PackageDescription + -> ComponentId + -> [AnnotatedId ComponentId] -- lib deps + -> [AnnotatedId ComponentId] -- exe deps + -> Component + -> LogProgress ConfiguredComponent +mkConfiguredComponent pkg_descr this_cid lib_deps exe_deps component = do + -- Resolve each @mixins@ into the actual dependency + -- from @lib_deps@. + explicit_includes <- forM (mixins bi) $ \(Mixin name rns) -> do + let keys = fixFakePkgName pkg_descr name + aid <- case Map.lookup keys deps_map of + Nothing -> + dieProgress $ + text "Mix-in refers to non-existent package" <+> + quotes (disp name) $$ + text "(did you forget to add the package to build-depends?)" + Just r -> return r + return ComponentInclude { + ci_ann_id = aid, + ci_renaming = rns, + ci_implicit = False + } + + -- Any @build-depends@ which is not explicitly mentioned in + -- @backpack-include@ is converted into an "implicit" include. + let used_explicitly = Set.fromList (map ci_id explicit_includes) + implicit_includes + = map (\aid -> ComponentInclude { + ci_ann_id = aid, + ci_renaming = defaultIncludeRenaming, + ci_implicit = True + }) + $ filter (flip Set.notMember used_explicitly . ann_id) lib_deps + + return ConfiguredComponent { + cc_ann_id = AnnotatedId { + ann_id = this_cid, + ann_pid = package pkg_descr, + ann_cname = componentName component + }, + cc_component = component, + cc_public = is_public, + cc_exe_deps = exe_deps, + cc_includes = explicit_includes ++ implicit_includes + } + where + bi = componentBuildInfo component + deps_map = Map.fromList [ ((packageName dep, ann_cname dep), dep) + | dep <- lib_deps ] + is_public = componentName component == CLibName + +type ConfiguredComponentMap = + Map PackageName (Map ComponentName (AnnotatedId ComponentId)) + +toConfiguredComponent + :: PackageDescription + -> ComponentId + -> ConfiguredComponentMap + -> ConfiguredComponentMap + -> Component + -> LogProgress ConfiguredComponent +toConfiguredComponent pkg_descr this_cid lib_dep_map exe_dep_map component = do + lib_deps <- + if newPackageDepsBehaviour pkg_descr + then forM (targetBuildDepends bi) $ \(Dependency name _) -> do + let (pn, cn) = fixFakePkgName pkg_descr name + value <- case Map.lookup cn =<< Map.lookup pn lib_dep_map of + Nothing -> + dieProgress $ + text "Dependency on unbuildable (i.e. 'buildable: False')" <+> + text (showComponentName cn) <+> + text "from" <+> disp pn + Just v -> return v + return value + else return old_style_lib_deps + mkConfiguredComponent + pkg_descr this_cid + lib_deps exe_deps component + where + bi = componentBuildInfo component + -- lib_dep_map contains a mix of internal and external deps. + -- We want all the public libraries (dep_cn == CLibName) + -- of all external deps (dep /= pn). Note that this + -- excludes the public library of the current package: + -- this is not supported by old-style deps behavior + -- because it would imply a cyclic dependency for the + -- library itself. + old_style_lib_deps = [ e + | (pn, comp_map) <- Map.toList lib_dep_map + , pn /= packageName pkg_descr + , (cn, e) <- Map.toList comp_map + , cn == CLibName ] + -- We have to nub here, because 'getAllToolDependencies' may return + -- duplicates (see #4986). (NB: This is not needed for lib_deps, + -- since those elaborate into includes, for which there explicitly + -- may be multiple instances of a package) + exe_deps = ordNub $ + [ exe + | ExeDependency pn cn _ <- getAllToolDependencies pkg_descr bi + -- The error suppression here is important, because in general + -- we won't know about external dependencies (e.g., 'happy') + -- which the package is attempting to use (those deps are only + -- fed in when cabal-install uses this codepath.) + -- TODO: Let cabal-install request errors here + , Just exe <- [Map.lookup (CExeName cn) =<< Map.lookup pn exe_dep_map] + ] + +-- | Also computes the 'ComponentId', and sets cc_public if necessary. +-- This is Cabal-only; cabal-install won't use this. +toConfiguredComponent' + :: Bool -- use_external_internal_deps + -> FlagAssignment + -> PackageDescription + -> Bool -- deterministic + -> Flag String -- configIPID (todo: remove me) + -> Flag ComponentId -- configCID + -> ConfiguredComponentMap + -> Component + -> LogProgress ConfiguredComponent +toConfiguredComponent' use_external_internal_deps flags + pkg_descr deterministic ipid_flag cid_flag + dep_map component = do + cc <- toConfiguredComponent + pkg_descr this_cid + dep_map dep_map component + return $ if use_external_internal_deps + then cc { cc_public = True } + else cc + where + -- TODO: pass component names to it too! + this_cid = computeComponentId deterministic ipid_flag cid_flag (package pkg_descr) + (componentName component) (Just (deps, flags)) + deps = [ ann_id aid | m <- Map.elems dep_map + , aid <- Map.elems m ] + +extendConfiguredComponentMap + :: ConfiguredComponent + -> ConfiguredComponentMap + -> ConfiguredComponentMap +extendConfiguredComponentMap cc = + Map.insertWith Map.union + (pkgName (cc_pkgid cc)) + (Map.singleton (cc_name cc) (cc_ann_id cc)) + +-- Compute the 'ComponentId's for a graph of 'Component's. The +-- list of internal components must be topologically sorted +-- based on internal package dependencies, so that any internal +-- dependency points to an entry earlier in the list. +-- +-- TODO: This function currently restricts the input configured components to +-- one version per package, by using the type ConfiguredComponentMap. It cannot +-- be used to configure a component that depends on one version of a package for +-- a library and another version for a build-tool. +toConfiguredComponents + :: Bool -- use_external_internal_deps + -> FlagAssignment + -> Bool -- deterministic + -> Flag String -- configIPID + -> Flag ComponentId -- configCID + -> PackageDescription + -> ConfiguredComponentMap + -> [Component] + -> LogProgress [ConfiguredComponent] +toConfiguredComponents + use_external_internal_deps flags deterministic ipid_flag cid_flag pkg_descr + dep_map comps + = fmap snd (mapAccumM go dep_map comps) + where + go m component = do + cc <- toConfiguredComponent' + use_external_internal_deps flags pkg_descr + deterministic ipid_flag cid_flag + m component + return (extendConfiguredComponentMap cc m, cc) + +newPackageDepsBehaviourMinVersion :: Version +newPackageDepsBehaviourMinVersion = mkVersion [1,7,1] + + +-- In older cabal versions, there was only one set of package dependencies for +-- the whole package. In this version, we can have separate dependencies per +-- target, but we only enable this behaviour if the minimum cabal version +-- specified is >= a certain minimum. Otherwise, for compatibility we use the +-- old behaviour. +newPackageDepsBehaviour :: PackageDescription -> Bool +newPackageDepsBehaviour pkg = + specVersion pkg >= newPackageDepsBehaviourMinVersion + +-- | 'build-depends:' stanzas are currently ambiguous as the external packages +-- and internal libraries are specified the same. For now, we assume internal +-- libraries shadow, and this function disambiguates accordingly, but soon the +-- underlying ambiguity will be addressed. +fixFakePkgName :: PackageDescription -> PackageName -> (PackageName, ComponentName) +fixFakePkgName pkg_descr pn = + if subLibName `elem` internalLibraries + then (packageName pkg_descr, CSubLibName subLibName) + else (pn, CLibName) + where + subLibName = packageNameToUnqualComponentName pn + internalLibraries = mapMaybe libName (allLibraries pkg_descr) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/Configure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/Configure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/Configure.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/Configure.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,357 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE PatternGuards #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE NoMonoLocalBinds #-} +{-# LANGUAGE NondecreasingIndentation #-} + +-- | See +-- +-- WARNING: The contents of this module are HIGHLY experimental. +-- We may refactor it under you. +module Distribution.Backpack.Configure ( + configureComponentLocalBuildInfos, +) where + +import Prelude () +import Distribution.Compat.Prelude hiding ((<>)) + +import Distribution.Backpack +import Distribution.Backpack.FullUnitId +import Distribution.Backpack.PreExistingComponent +import Distribution.Backpack.ConfiguredComponent +import Distribution.Backpack.LinkedComponent +import Distribution.Backpack.ReadyComponent +import Distribution.Backpack.ComponentsGraph +import Distribution.Backpack.Id + +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Package +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.InstalledPackageInfo (InstalledPackageInfo + ,emptyInstalledPackageInfo) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.ModuleName +import Distribution.Simple.Setup as Setup +import Distribution.Simple.LocalBuildInfo +import Distribution.Types.AnnotatedId +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.ComponentInclude +import Distribution.Verbosity +import qualified Distribution.Compat.Graph as Graph +import Distribution.Compat.Graph (Graph, IsNode(..)) +import Distribution.Utils.LogProgress + +import Data.Either + ( lefts ) +import qualified Data.Set as Set +import qualified Data.Map as Map +import Distribution.Text +import Text.PrettyPrint + +------------------------------------------------------------------------------ +-- Pipeline +------------------------------------------------------------------------------ + +configureComponentLocalBuildInfos + :: Verbosity + -> Bool -- use_external_internal_deps + -> ComponentRequestedSpec + -> Bool -- deterministic + -> Flag String -- configIPID + -> Flag ComponentId -- configCID + -> PackageDescription + -> [PreExistingComponent] + -> FlagAssignment -- configConfigurationsFlags + -> [(ModuleName, Module)] -- configInstantiateWith + -> InstalledPackageIndex + -> Compiler + -> LogProgress ([ComponentLocalBuildInfo], InstalledPackageIndex) +configureComponentLocalBuildInfos + verbosity use_external_internal_deps enabled deterministic ipid_flag cid_flag pkg_descr + prePkgDeps flagAssignment instantiate_with installedPackageSet comp = do + -- NB: In single component mode, this returns a *single* component. + -- In this graph, the graph is NOT closed. + graph0 <- case mkComponentsGraph enabled pkg_descr of + Left ccycle -> dieProgress (componentCycleMsg ccycle) + Right g -> return (componentsGraphToList g) + infoProgress $ hang (text "Source component graph:") 4 + (dispComponentsWithDeps graph0) + + let conf_pkg_map = Map.fromListWith Map.union + [(pc_pkgname pkg, + Map.singleton (pc_compname pkg) + (AnnotatedId { + ann_id = pc_cid pkg, + ann_pid = packageId pkg, + ann_cname = pc_compname pkg + })) + | pkg <- prePkgDeps] + graph1 <- toConfiguredComponents use_external_internal_deps + flagAssignment + deterministic ipid_flag cid_flag pkg_descr + conf_pkg_map (map fst graph0) + infoProgress $ hang (text "Configured component graph:") 4 + (vcat (map dispConfiguredComponent graph1)) + + let shape_pkg_map = Map.fromList + [ (pc_cid pkg, (pc_open_uid pkg, pc_shape pkg)) + | pkg <- prePkgDeps] + uid_lookup def_uid + | Just pkg <- PackageIndex.lookupUnitId installedPackageSet uid + = FullUnitId (Installed.installedComponentId pkg) + (Map.fromList (Installed.instantiatedWith pkg)) + | otherwise = error ("uid_lookup: " ++ display uid) + where uid = unDefUnitId def_uid + graph2 <- toLinkedComponents verbosity uid_lookup + (package pkg_descr) shape_pkg_map graph1 + + infoProgress $ + hang (text "Linked component graph:") 4 + (vcat (map dispLinkedComponent graph2)) + + let pid_map = Map.fromList $ + [ (pc_uid pkg, pc_munged_id pkg) + | pkg <- prePkgDeps] ++ + [ (Installed.installedUnitId pkg, mungedId pkg) + | (_, Module uid _) <- instantiate_with + , Just pkg <- [PackageIndex.lookupUnitId + installedPackageSet (unDefUnitId uid)] ] + subst = Map.fromList instantiate_with + graph3 = toReadyComponents pid_map subst graph2 + graph4 = Graph.revTopSort (Graph.fromDistinctList graph3) + + infoProgress $ hang (text "Ready component graph:") 4 + (vcat (map dispReadyComponent graph4)) + + toComponentLocalBuildInfos comp installedPackageSet pkg_descr prePkgDeps graph4 + +------------------------------------------------------------------------------ +-- ComponentLocalBuildInfo +------------------------------------------------------------------------------ + +toComponentLocalBuildInfos + :: Compiler + -> InstalledPackageIndex -- FULL set + -> PackageDescription + -> [PreExistingComponent] -- external package deps + -> [ReadyComponent] + -> LogProgress ([ComponentLocalBuildInfo], + InstalledPackageIndex) -- only relevant packages +toComponentLocalBuildInfos + comp installedPackageSet pkg_descr externalPkgDeps graph = do + -- Check and make sure that every instantiated component exists. + -- We have to do this now, because prior to linking/instantiating + -- we don't actually know what the full set of 'UnitId's we need + -- are. + let -- TODO: This is actually a bit questionable performance-wise, + -- since we will pay for the ALL installed packages even if + -- they are not related to what we are building. This was true + -- in the old configure code. + external_graph :: Graph (Either InstalledPackageInfo ReadyComponent) + external_graph = Graph.fromDistinctList + . map Left + $ PackageIndex.allPackages installedPackageSet + internal_graph :: Graph (Either InstalledPackageInfo ReadyComponent) + internal_graph = Graph.fromDistinctList + . map Right + $ graph + combined_graph = Graph.unionRight external_graph internal_graph + Just local_graph = Graph.closure combined_graph (map nodeKey graph) + -- The database of transitively reachable installed packages that the + -- external components the package (as a whole) depends on. This will be + -- used in several ways: + -- + -- * We'll use it to do a consistency check so we're not depending + -- on multiple versions of the same package (TODO: someday relax + -- this for private dependencies.) See right below. + -- + -- * We'll pass it on in the LocalBuildInfo, where preprocessors + -- and other things will incorrectly use it to determine what + -- the include paths and everything should be. + -- + packageDependsIndex = PackageIndex.fromList (lefts local_graph) + fullIndex = Graph.fromDistinctList local_graph + case Graph.broken fullIndex of + [] -> return () + broken -> + -- TODO: ppr this + dieProgress . text $ + "The following packages are broken because other" + ++ " packages they depend on are missing. These broken " + ++ "packages must be rebuilt before they can be used.\n" + -- TODO: Undupe. + ++ unlines [ "installed package " + ++ display (packageId pkg) + ++ " is broken due to missing package " + ++ intercalate ", " (map display deps) + | (Left pkg, deps) <- broken ] + ++ unlines [ "planned package " + ++ display (packageId pkg) + ++ " is broken due to missing package " + ++ intercalate ", " (map display deps) + | (Right pkg, deps) <- broken ] + + -- In this section, we'd like to look at the 'packageDependsIndex' + -- and see if we've picked multiple versions of the same + -- installed package (this is bad, because it means you might + -- get an error could not match foo-0.1:Type with foo-0.2:Type). + -- + -- What is pseudoTopPkg for? I have no idea. It was used + -- in the very original commit which introduced checking for + -- inconsistencies 5115bb2be4e13841ea07dc9166b9d9afa5f0d012, + -- and then moved out of PackageIndex and put here later. + -- TODO: Try this code without it... + -- + -- TODO: Move this into a helper function + -- + -- TODO: This is probably wrong for Backpack + let pseudoTopPkg :: InstalledPackageInfo + pseudoTopPkg = emptyInstalledPackageInfo { + Installed.installedUnitId = mkLegacyUnitId (packageId pkg_descr), + Installed.sourcePackageId = packageId pkg_descr, + Installed.depends = map pc_uid externalPkgDeps + } + case PackageIndex.dependencyInconsistencies + . PackageIndex.insert pseudoTopPkg + $ packageDependsIndex of + [] -> return () + inconsistencies -> + warnProgress $ + hang (text "This package indirectly depends on multiple versions of the same" <+> + text "package. This is very likely to cause a compile failure.") 2 + (vcat [ text "package" <+> disp (packageName user) <+> + parens (disp (installedUnitId user)) <+> text "requires" <+> + disp inst + | (_dep_key, insts) <- inconsistencies + , (inst, users) <- insts + , user <- users ]) + let clbis = mkLinkedComponentsLocalBuildInfo comp graph + -- forM clbis $ \(clbi,deps) -> info verbosity $ "UNIT" ++ hashUnitId (componentUnitId clbi) ++ "\n" ++ intercalate "\n" (map hashUnitId deps) + return (clbis, packageDependsIndex) + +-- Build ComponentLocalBuildInfo for each component we are going +-- to build. +-- +-- This conversion is lossy; we lose some invariants from ReadyComponent +mkLinkedComponentsLocalBuildInfo + :: Compiler + -> [ReadyComponent] + -> [ComponentLocalBuildInfo] +mkLinkedComponentsLocalBuildInfo comp rcs = map go rcs + where + internalUnits = Set.fromList (map rc_uid rcs) + isInternal x = Set.member x internalUnits + go rc = + case rc_component rc of + CLib lib -> + let convModuleExport (modname', (Module uid modname)) + | this_uid == unDefUnitId uid + , modname' == modname + = Installed.ExposedModule modname' Nothing + | otherwise + = Installed.ExposedModule modname' + (Just (OpenModule (DefiniteUnitId uid) modname)) + convOpenModuleExport (modname', modu@(OpenModule uid modname)) + | uid == this_open_uid + , modname' == modname + = Installed.ExposedModule modname' Nothing + | otherwise + = Installed.ExposedModule modname' (Just modu) + convOpenModuleExport (_, OpenModuleVar _) + = error "convOpenModuleExport: top-level modvar" + exports = + -- Loses invariants + case rc_i rc of + Left indefc -> map convOpenModuleExport + $ Map.toList (indefc_provides indefc) + Right instc -> map convModuleExport + $ Map.toList (instc_provides instc) + insts = + case rc_i rc of + Left indefc -> [ (m, OpenModuleVar m) | m <- indefc_requires indefc ] + Right instc -> [ (m, OpenModule (DefiniteUnitId uid') m') + | (m, Module uid' m') <- instc_insts instc ] + + compat_name = computeCompatPackageName (packageName rc) (libName lib) + compat_key = computeCompatPackageKey comp compat_name (packageVersion rc) this_uid + + in LibComponentLocalBuildInfo { + componentPackageDeps = cpds, + componentUnitId = this_uid, + componentComponentId = this_cid, + componentInstantiatedWith = insts, + componentIsIndefinite_ = is_indefinite, + componentLocalName = cname, + componentInternalDeps = internal_deps, + componentExeDeps = exe_deps, + componentIncludes = includes, + componentExposedModules = exports, + componentIsPublic = rc_public rc, + componentCompatPackageKey = compat_key, + componentCompatPackageName = compat_name + } + CFLib _ -> + FLibComponentLocalBuildInfo { + componentUnitId = this_uid, + componentComponentId = this_cid, + componentLocalName = cname, + componentPackageDeps = cpds, + componentExeDeps = exe_deps, + componentInternalDeps = internal_deps, + componentIncludes = includes + } + CExe _ -> + ExeComponentLocalBuildInfo { + componentUnitId = this_uid, + componentComponentId = this_cid, + componentLocalName = cname, + componentPackageDeps = cpds, + componentExeDeps = exe_deps, + componentInternalDeps = internal_deps, + componentIncludes = includes + } + CTest _ -> + TestComponentLocalBuildInfo { + componentUnitId = this_uid, + componentComponentId = this_cid, + componentLocalName = cname, + componentPackageDeps = cpds, + componentExeDeps = exe_deps, + componentInternalDeps = internal_deps, + componentIncludes = includes + } + CBench _ -> + BenchComponentLocalBuildInfo { + componentUnitId = this_uid, + componentComponentId = this_cid, + componentLocalName = cname, + componentPackageDeps = cpds, + componentExeDeps = exe_deps, + componentInternalDeps = internal_deps, + componentIncludes = includes + } + where + this_uid = rc_uid rc + this_open_uid = rc_open_uid rc + this_cid = rc_cid rc + cname = componentName (rc_component rc) + cpds = rc_depends rc + exe_deps = map ann_id $ rc_exe_deps rc + is_indefinite = + case rc_i rc of + Left _ -> True + Right _ -> False + includes = + map (\ci -> (ci_id ci, ci_renaming ci)) $ + case rc_i rc of + Left indefc -> + indefc_includes indefc + Right instc -> + map (\ci -> ci { ci_ann_id = fmap DefiniteUnitId (ci_ann_id ci) }) + (instc_includes instc) + internal_deps = filter isInternal (nodeNeighbors rc) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/DescribeUnitId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/DescribeUnitId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/DescribeUnitId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/DescribeUnitId.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,61 @@ +{-# LANGUAGE Rank2Types #-} +{-# LANGUAGE FlexibleContexts #-} +module Distribution.Backpack.DescribeUnitId where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.PackageId +import Distribution.Types.ComponentName +import Distribution.Compat.Stack +import Distribution.Verbosity +import Distribution.ModuleName +import Distribution.Text +import Distribution.Simple.Utils + +import Text.PrettyPrint + +-- Unit identifiers have a well defined, machine-readable format, +-- but this format isn't very user-friendly for users. This +-- module defines some functions for solving common rendering +-- problems one has for displaying these. +-- +-- There are three basic problems we tackle: +-- +-- - Users don't want to see pkg-0.5-inplace-libname, +-- they want to see "library 'libname' from 'pkg-0.5'" +-- +-- - Users don't want to see the raw component identifier, which +-- usually contains a wordy hash that doesn't matter. +-- +-- - Users don't want to see a hash of the instantiation: they +-- want to see the actual instantiation, and they want it in +-- interpretable form. +-- + +-- | Print a Setup message stating (1) what operation we are doing, +-- for (2) which component (with enough details to uniquely identify +-- the build in question.) +-- +setupMessage' :: Text a => Verbosity + -> String -- ^ Operation being done (capitalized), on: + -> PackageIdentifier -- ^ Package + -> ComponentName -- ^ Component name + -> Maybe [(ModuleName, a)] -- ^ Instantiation, if available. + -- Polymorphic to take + -- 'OpenModule' or 'Module' + -> IO () +setupMessage' verbosity msg pkgid cname mb_insts = withFrozenCallStack $ do + noticeDoc verbosity $ + case mb_insts of + Just insts | not (null insts) -> + hang (msg_doc <+> text "instantiated with") 2 + (vcat [ disp k <+> text "=" <+> disp v + | (k,v) <- insts ]) $$ + for_doc + _ -> + msg_doc <+> for_doc + + where + msg_doc = text msg <+> text (showComponentName cname) + for_doc = text "for" <+> disp pkgid <<>> text ".." diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/FullUnitId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/FullUnitId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/FullUnitId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/FullUnitId.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,26 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Backpack.FullUnitId ( + FullUnitId(..), + FullDb, + expandOpenUnitId, + expandUnitId +) where + +import Distribution.Backpack +import Distribution.Types.ComponentId +import Distribution.Compat.Prelude + +-- Unlike OpenUnitId, which could direct to a UnitId. +data FullUnitId = FullUnitId ComponentId OpenModuleSubst + deriving (Show, Generic) + +type FullDb = DefUnitId -> FullUnitId + +expandOpenUnitId :: FullDb -> OpenUnitId -> FullUnitId +expandOpenUnitId _db (IndefFullUnitId cid subst) + = FullUnitId cid subst +expandOpenUnitId db (DefiniteUnitId uid) + = expandUnitId db uid + +expandUnitId :: FullDb -> DefUnitId -> FullUnitId +expandUnitId db uid = db uid diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/Id.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/Id.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/Id.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/Id.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,146 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE PatternGuards #-} +-- | See +module Distribution.Backpack.Id( + computeComponentId, + computeCompatPackageKey, + computeCompatPackageName, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.UnqualComponentName +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Simple.Setup as Setup +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Simple.LocalBuildInfo +import Distribution.Types.ComponentId +import Distribution.Types.PackageId +import Distribution.Types.UnitId +import Distribution.Types.MungedPackageName +import Distribution.Utils.Base62 +import Distribution.Version + +import Distribution.Text + ( display, simpleParse ) + +-- | This method computes a default, "good enough" 'ComponentId' +-- for a package. The intent is that cabal-install (or the user) will +-- specify a more detailed IPID via the @--ipid@ flag if necessary. +computeComponentId + :: Bool -- deterministic mode + -> Flag String + -> Flag ComponentId + -> PackageIdentifier + -> ComponentName + -- This is used by cabal-install's legacy codepath + -> Maybe ([ComponentId], FlagAssignment) + -> ComponentId +computeComponentId deterministic mb_ipid mb_cid pid cname mb_details = + -- show is found to be faster than intercalate and then replacement of + -- special character used in intercalating. We cannot simply hash by + -- doubly concating list, as it just flatten out the nested list, so + -- different sources can produce same hash + let hash_suffix + | Just (dep_ipids, flags) <- mb_details + = "-" ++ hashToBase62 + -- For safety, include the package + version here + -- for GHC 7.10, where just the hash is used as + -- the package key + ( display pid + ++ show dep_ipids + ++ show flags ) + | otherwise = "" + generated_base = display pid ++ hash_suffix + explicit_base cid0 = fromPathTemplate (InstallDirs.substPathTemplate env + (toPathTemplate cid0)) + -- Hack to reuse install dirs machinery + -- NB: no real IPID available at this point + where env = packageTemplateEnv pid (mkUnitId "") + actual_base = case mb_ipid of + Flag ipid0 -> explicit_base ipid0 + NoFlag | deterministic -> display pid + | otherwise -> generated_base + in case mb_cid of + Flag cid -> cid + NoFlag -> mkComponentId $ actual_base + ++ (case componentNameString cname of + Nothing -> "" + Just s -> "-" ++ unUnqualComponentName s) + +-- | In GHC 8.0, the string we pass to GHC to use for symbol +-- names for a package can be an arbitrary, IPID-compatible string. +-- However, prior to GHC 8.0 there are some restrictions on what +-- format this string can be (due to how ghc-pkg parsed the key): +-- +-- 1. In GHC 7.10, the string had either be of the form +-- foo_ABCD, where foo is a non-semantic alphanumeric/hyphenated +-- prefix and ABCD is two base-64 encoded 64-bit integers, +-- or a GHC 7.8 style identifier. +-- +-- 2. In GHC 7.8, the string had to be a valid package identifier +-- like foo-0.1. +-- +-- So, the problem is that Cabal, in general, has a general IPID, +-- but needs to figure out a package key / package ID that the +-- old ghc-pkg will actually accept. But there's an EVERY WORSE +-- problem: if ghc-pkg decides to parse an identifier foo-0.1-xxx +-- as if it were a package identifier, which means it will SILENTLY +-- DROP the "xxx" (because it's a tag, and Cabal does not allow tags.) +-- So we must CONNIVE to ensure that we don't pick something that +-- looks like this. +-- +-- So this function attempts to define a mapping into the old formats. +-- +-- The mapping for GHC 7.8 and before: +-- +-- * We use the *compatibility* package name and version. For +-- public libraries this is just the package identifier; for +-- internal libraries, it's something like "z-pkgname-z-libname-0.1". +-- See 'computeCompatPackageName' for more details. +-- +-- The mapping for GHC 7.10: +-- +-- * For CLibName: +-- If the IPID is of the form foo-0.1-ABCDEF where foo_ABCDEF would +-- validly parse as a package key, we pass "ABCDEF". (NB: not +-- all hashes parse this way, because GHC 7.10 mandated that +-- these hashes be two base-62 encoded 64 bit integers), +-- but hashes that Cabal generated using 'computeComponentId' +-- are guaranteed to have this form. +-- +-- If it is not of this form, we rehash the IPID into the +-- correct form and pass that. +-- +-- * For sub-components, we rehash the IPID into the correct format +-- and pass that. +-- +computeCompatPackageKey + :: Compiler + -> MungedPackageName + -> Version + -> UnitId + -> String +computeCompatPackageKey comp pkg_name pkg_version uid + | not (packageKeySupported comp) = + display pkg_name ++ "-" ++ display pkg_version + | not (unifiedIPIDRequired comp) = + let str = unUnitId uid -- assume no Backpack support + mb_verbatim_key + = case simpleParse str :: Maybe PackageId of + -- Something like 'foo-0.1', use it verbatim. + -- (NB: hash tags look like tags, so they are parsed, + -- so the extra equality check tests if a tag was dropped.) + Just pid0 | display pid0 == str -> Just str + _ -> Nothing + mb_truncated_key + = let cand = reverse (takeWhile isAlphaNum (reverse str)) + in if length cand == 22 && all isAlphaNum cand + then Just cand + else Nothing + rehashed_key = hashToBase62 str + in fromMaybe rehashed_key (mb_verbatim_key `mplus` mb_truncated_key) + | otherwise = display uid diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/LinkedComponent.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/LinkedComponent.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/LinkedComponent.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/LinkedComponent.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,399 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} +-- | See +module Distribution.Backpack.LinkedComponent ( + LinkedComponent(..), + lc_insts, + lc_uid, + lc_cid, + lc_pkgid, + toLinkedComponent, + toLinkedComponents, + dispLinkedComponent, + LinkedComponentMap, + extendLinkedComponentMap, +) where + +import Prelude () +import Distribution.Compat.Prelude hiding ((<>)) + +import Distribution.Backpack +import Distribution.Backpack.FullUnitId +import Distribution.Backpack.ConfiguredComponent +import Distribution.Backpack.ModuleShape +import Distribution.Backpack.PreModuleShape +import Distribution.Backpack.ModuleScope +import Distribution.Backpack.UnifyM +import Distribution.Backpack.MixLink +import Distribution.Utils.MapAccum + +import Distribution.Types.AnnotatedId +import Distribution.Types.ComponentName +import Distribution.Types.ModuleRenaming +import Distribution.Types.IncludeRenaming +import Distribution.Types.ComponentInclude +import Distribution.Types.ComponentId +import Distribution.Types.PackageId +import Distribution.Package +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.ModuleName +import Distribution.Simple.LocalBuildInfo +import Distribution.Verbosity +import Distribution.Utils.LogProgress + +import qualified Data.Set as Set +import qualified Data.Map as Map +import Data.Traversable + ( mapM ) +import Distribution.Text + ( Text(disp) ) +import Text.PrettyPrint +import Data.Either + +-- | A linked component is a component that has been mix-in linked, at +-- which point we have determined how all the dependencies of the +-- component are explicitly instantiated (in the form of an OpenUnitId). +-- 'ConfiguredComponent' is mix-in linked into 'LinkedComponent', which +-- is then instantiated into 'ReadyComponent'. +data LinkedComponent + = LinkedComponent { + -- | Uniquely identifies linked component + lc_ann_id :: AnnotatedId ComponentId, + -- | Corresponds to 'cc_component'. + lc_component :: Component, + -- | @build-tools@ and @build-tool-depends@ dependencies. + -- Corresponds to 'cc_exe_deps'. + lc_exe_deps :: [AnnotatedId OpenUnitId], + -- | Is this the public library of a package? Corresponds to + -- 'cc_public'. + lc_public :: Bool, + -- | Corresponds to 'cc_includes', but (1) this does not contain + -- includes of signature packages (packages with no exports), + -- and (2) the 'ModuleRenaming' for requirements (stored in + -- 'IncludeRenaming') has been removed, as it is reflected in + -- 'OpenUnitId'.) + lc_includes :: [ComponentInclude OpenUnitId ModuleRenaming], + -- | Like 'lc_includes', but this specifies includes on + -- signature packages which have no exports. + lc_sig_includes :: [ComponentInclude OpenUnitId ModuleRenaming], + -- | The module shape computed by mix-in linking. This is + -- newly computed from 'ConfiguredComponent' + lc_shape :: ModuleShape + } + +-- | Uniquely identifies a 'LinkedComponent'. Corresponds to +-- 'cc_cid'. +lc_cid :: LinkedComponent -> ComponentId +lc_cid = ann_id . lc_ann_id + +-- | Corresponds to 'cc_pkgid'. +lc_pkgid :: LinkedComponent -> PackageId +lc_pkgid = ann_pid . lc_ann_id + +-- | The 'OpenUnitId' of this component in the "default" instantiation. +-- See also 'lc_insts'. 'LinkedComponent's cannot be instantiated +-- (e.g., there is no 'ModSubst' instance for them). +lc_uid :: LinkedComponent -> OpenUnitId +lc_uid lc = IndefFullUnitId (lc_cid lc) . Map.fromList $ lc_insts lc + +-- | The instantiation of 'lc_uid'; this always has the invariant +-- that it is a mapping from a module name @A@ to @@ (the hole A). +lc_insts :: LinkedComponent -> [(ModuleName, OpenModule)] +lc_insts lc = [ (req, OpenModuleVar req) + | req <- Set.toList (modShapeRequires (lc_shape lc)) ] + +dispLinkedComponent :: LinkedComponent -> Doc +dispLinkedComponent lc = + hang (text "unit" <+> disp (lc_uid lc)) 4 $ + vcat [ text "include" <+> disp (ci_id incl) <+> disp (ci_renaming incl) + | incl <- lc_includes lc ] + $+$ + vcat [ text "signature include" <+> disp (ci_id incl) + | incl <- lc_sig_includes lc ] + $+$ dispOpenModuleSubst (modShapeProvides (lc_shape lc)) + +instance Package LinkedComponent where + packageId = lc_pkgid + +toLinkedComponent + :: Verbosity + -> FullDb + -> PackageId + -> LinkedComponentMap + -> ConfiguredComponent + -> LogProgress LinkedComponent +toLinkedComponent verbosity db this_pid pkg_map ConfiguredComponent { + cc_ann_id = aid@AnnotatedId { ann_id = this_cid }, + cc_component = component, + cc_exe_deps = exe_deps, + cc_public = is_public, + cc_includes = cid_includes + } = do + let + -- The explicitly specified requirements, provisions and + -- reexports from the Cabal file. These are only non-empty for + -- libraries; everything else is trivial. + (src_reqs :: [ModuleName], + src_provs :: [ModuleName], + src_reexports :: [ModuleReexport]) = + case component of + CLib lib -> (signatures lib, + exposedModules lib, + reexportedModules lib) + _ -> ([], [], []) + src_hidden = otherModules (componentBuildInfo component) + + -- Take each included ComponentId and resolve it into an + -- *unlinked* unit identity. We will use unification (relying + -- on the ModuleShape) to resolve these into linked identities. + unlinked_includes :: [ComponentInclude (OpenUnitId, ModuleShape) IncludeRenaming] + unlinked_includes = [ ComponentInclude (fmap lookupUid dep_aid) rns i + | ComponentInclude dep_aid rns i <- cid_includes ] + + lookupUid :: ComponentId -> (OpenUnitId, ModuleShape) + lookupUid cid = fromMaybe (error "linkComponent: lookupUid") + (Map.lookup cid pkg_map) + + let orErr (Right x) = return x + orErr (Left [err]) = dieProgress err + orErr (Left errs) = do + dieProgress (vcat (intersperse (text "") -- double newline! + [ hang (text "-") 2 err | err <- errs])) + + -- Pre-shaping + let pre_shape = mixLinkPreModuleShape $ + PreModuleShape { + preModShapeProvides = Set.fromList (src_provs ++ src_hidden), + preModShapeRequires = Set.fromList src_reqs + } : [ renamePreModuleShape (toPreModuleShape sh) rns + | ComponentInclude (AnnotatedId { ann_id = (_, sh) }) rns _ <- unlinked_includes ] + reqs = preModShapeRequires pre_shape + insts = [ (req, OpenModuleVar req) + | req <- Set.toList reqs ] + this_uid = IndefFullUnitId this_cid . Map.fromList $ insts + + -- OK, actually do unification + -- TODO: the unification monad might return errors, in which + -- case we have to deal. Use monadic bind for now. + (linked_shape0 :: ModuleScope, + linked_includes0 :: [ComponentInclude OpenUnitId ModuleRenaming], + linked_sig_includes0 :: [ComponentInclude OpenUnitId ModuleRenaming]) + <- orErr $ runUnifyM verbosity this_cid db $ do + -- The unification monad is implemented using mutable + -- references. Thus, we must convert our *pure* data + -- structures into mutable ones to perform unification. + + let convertMod :: (ModuleName -> ModuleSource) -> ModuleName -> UnifyM s (ModuleScopeU s) + convertMod from m = do + m_u <- convertModule (OpenModule this_uid m) + return (Map.singleton m [WithSource (from m) m_u], Map.empty) + -- Handle 'exposed-modules' + exposed_mod_shapes_u <- mapM (convertMod FromExposedModules) src_provs + -- Handle 'other-modules' + other_mod_shapes_u <- mapM (convertMod FromOtherModules) src_hidden + + -- Handle 'signatures' + let convertReq :: ModuleName -> UnifyM s (ModuleScopeU s) + convertReq req = do + req_u <- convertModule (OpenModuleVar req) + return (Map.empty, Map.singleton req [WithSource (FromSignatures req) req_u]) + req_shapes_u <- mapM convertReq src_reqs + + -- Handle 'mixins' + (incl_shapes_u, all_includes_u) <- fmap unzip (mapM convertInclude unlinked_includes) + + failIfErrs -- Prevent error cascade + -- Mix-in link everything! mixLink is the real workhorse. + shape_u <- mixLink $ exposed_mod_shapes_u + ++ other_mod_shapes_u + ++ req_shapes_u + ++ incl_shapes_u + + -- src_reqs_u <- mapM convertReq src_reqs + -- Read out all the final results by converting back + -- into a pure representation. + let convertIncludeU (ComponentInclude dep_aid rns i) = do + uid <- convertUnitIdU (ann_id dep_aid) + return (ComponentInclude { + ci_ann_id = dep_aid { ann_id = uid }, + ci_renaming = rns, + ci_implicit = i + }) + shape <- convertModuleScopeU shape_u + let (includes_u, sig_includes_u) = partitionEithers all_includes_u + incls <- mapM convertIncludeU includes_u + sig_incls <- mapM convertIncludeU sig_includes_u + return (shape, incls, sig_incls) + + let isNotLib (CLib _) = False + isNotLib _ = True + when (not (Set.null reqs) && isNotLib component) $ + dieProgress $ + hang (text "Non-library component has unfilled requirements:") + 4 (vcat [disp req | req <- Set.toList reqs]) + + -- NB: do NOT include hidden modules here: GHC 7.10's ghc-pkg + -- won't allow it (since someone could directly synthesize + -- an 'InstalledPackageInfo' that violates abstraction.) + -- Though, maybe it should be relaxed? + let src_hidden_set = Set.fromList src_hidden + linked_shape = linked_shape0 { + modScopeProvides = + -- Would rather use withoutKeys but need BC + Map.filterWithKey + (\k _ -> not (k `Set.member` src_hidden_set)) + (modScopeProvides linked_shape0) + } + + -- OK, compute the reexports + -- TODO: This code reports the errors for reexports one reexport at + -- a time. Better to collect them all up and report them all at + -- once. + let hdl :: [Either Doc a] -> LogProgress [a] + hdl es = + case partitionEithers es of + ([], rs) -> return rs + (ls, _) -> + dieProgress $ + hang (text "Problem with module re-exports:") 2 + (vcat [hang (text "-") 2 l | l <- ls]) + reexports_list <- hdl . (flip map) src_reexports $ \reex@(ModuleReexport mb_pn from to) -> do + case Map.lookup from (modScopeProvides linked_shape) of + Just cands@(x0:xs0) -> do + -- Make sure there is at least one candidate + (x, xs) <- + case mb_pn of + Just pn -> + let matches_pn (FromMixins pn' _ _) = pn == pn' + matches_pn (FromBuildDepends pn' _) = pn == pn' + matches_pn (FromExposedModules _) = pn == packageName this_pid + matches_pn (FromOtherModules _) = pn == packageName this_pid + matches_pn (FromSignatures _) = pn == packageName this_pid + in case filter (matches_pn . getSource) cands of + (x1:xs1) -> return (x1, xs1) + _ -> Left (brokenReexportMsg reex) + Nothing -> return (x0, xs0) + -- Test that all the candidates are consistent + case filter (\x' -> unWithSource x /= unWithSource x') xs of + [] -> return () + _ -> Left $ ambiguousReexportMsg reex x xs + return (to, unWithSource x) + _ -> + Left (brokenReexportMsg reex) + + -- TODO: maybe check this earlier; it's syntactically obvious. + let build_reexports m (k, v) + | Map.member k m = + dieProgress $ hsep + [ text "Module name ", disp k, text " is exported multiple times." ] + | otherwise = return (Map.insert k v m) + provs <- foldM build_reexports Map.empty $ + -- TODO: doublecheck we have checked for + -- src_provs duplicates already! + [ (mod_name, OpenModule this_uid mod_name) | mod_name <- src_provs ] ++ + reexports_list + + let final_linked_shape = ModuleShape provs (Map.keysSet (modScopeRequires linked_shape)) + + -- See Note Note [Signature package special case] + let (linked_includes, linked_sig_includes) + | Set.null reqs = (linked_includes0 ++ linked_sig_includes0, []) + | otherwise = (linked_includes0, linked_sig_includes0) + + return $ LinkedComponent { + lc_ann_id = aid, + lc_component = component, + lc_public = is_public, + -- These must be executables + lc_exe_deps = map (fmap (\cid -> IndefFullUnitId cid Map.empty)) exe_deps, + lc_shape = final_linked_shape, + lc_includes = linked_includes, + lc_sig_includes = linked_sig_includes + } + +-- Note [Signature package special case] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Suppose we have p-indef, which depends on str-sig and inherits +-- the hole from that signature package. When we instantiate p-indef, +-- it's a bit pointless to also go ahead and build str-sig, because +-- str-sig cannot possibly have contributed any code to the package +-- in question. Furthermore, because the signature was inherited to +-- p-indef, if we test matching against p-indef, we also have tested +-- matching against p-sig. In fact, skipping p-sig is *mandatory*, +-- because p-indef may have thinned it (so that an implementation may +-- match p-indef but not p-sig.) +-- +-- However, suppose that we have a package which mixes together str-sig +-- and str-bytestring, with the intent of *checking* that str-sig is +-- implemented by str-bytestring. Here, it's quite important to +-- build an instantiated str-sig, since that is the only way we will +-- actually end up testing if the matching works. Note that this +-- admonition only applies if the package has NO requirements; if it +-- has any requirements, we will typecheck it as an indefinite +-- package, at which point the signature includes will be passed to +-- GHC who will in turn actually do the checking to make sure they +-- are instantiated correctly. + +-- Handle mix-in linking for components. In the absence of Backpack, +-- every ComponentId gets converted into a UnitId by way of SimpleUnitId. +toLinkedComponents + :: Verbosity + -> FullDb + -> PackageId + -> LinkedComponentMap + -> [ConfiguredComponent] + -> LogProgress [LinkedComponent] +toLinkedComponents verbosity db this_pid lc_map0 comps + = fmap snd (mapAccumM go lc_map0 comps) + where + go :: Map ComponentId (OpenUnitId, ModuleShape) + -> ConfiguredComponent + -> LogProgress (Map ComponentId (OpenUnitId, ModuleShape), LinkedComponent) + go lc_map cc = do + lc <- addProgressCtx (text "In the stanza" <+> text (componentNameStanza (cc_name cc))) $ + toLinkedComponent verbosity db this_pid lc_map cc + return (extendLinkedComponentMap lc lc_map, lc) + +type LinkedComponentMap = Map ComponentId (OpenUnitId, ModuleShape) + +extendLinkedComponentMap :: LinkedComponent + -> LinkedComponentMap + -> LinkedComponentMap +extendLinkedComponentMap lc m = + Map.insert (lc_cid lc) (lc_uid lc, lc_shape lc) m + +brokenReexportMsg :: ModuleReexport -> Doc +brokenReexportMsg (ModuleReexport (Just pn) from _to) = + vcat [ text "The package" <+> quotes (disp pn) + , text "does not export a module" <+> quotes (disp from) ] +brokenReexportMsg (ModuleReexport Nothing from _to) = + vcat [ text "The module" <+> quotes (disp from) + , text "is not exported by any suitable package." + , text "It occurs in neither the 'exposed-modules' of this package," + , text "nor any of its 'build-depends' dependencies." ] + +ambiguousReexportMsg :: ModuleReexport -> ModuleWithSource -> [ModuleWithSource] -> Doc +ambiguousReexportMsg (ModuleReexport mb_pn from _to) y1 ys = + vcat [ text "Ambiguous reexport" <+> quotes (disp from) + , hang (text "It could refer to either:") 2 + (vcat (msg : msgs)) + , help_msg mb_pn ] + where + msg = text " " <+> displayModuleWithSource y1 + msgs = [text "or" <+> displayModuleWithSource y | y <- ys] + help_msg Nothing = + -- TODO: This advice doesn't help if the ambiguous exports + -- come from a package named the same thing + vcat [ text "The ambiguity can be resolved by qualifying the" + , text "re-export with a package name." + , text "The syntax is 'packagename:ModuleName [as NewName]'." ] + -- Qualifying won't help that much. + help_msg (Just _) = + vcat [ text "The ambiguity can be resolved by using the" + , text "mixins field to rename one of the module" + , text "names differently." ] + displayModuleWithSource y + = vcat [ quotes (disp (unWithSource y)) + , text "brought into scope by" <+> + dispModuleSource (getSource y) + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/MixLink.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/MixLink.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/MixLink.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/MixLink.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,185 @@ +{-# LANGUAGE NondecreasingIndentation #-} +-- | See +module Distribution.Backpack.MixLink ( + mixLink, +) where + +import Prelude () +import Distribution.Compat.Prelude hiding (mod) + +import Distribution.Backpack +import Distribution.Backpack.UnifyM +import Distribution.Backpack.FullUnitId +import Distribution.Backpack.ModuleScope + +import qualified Distribution.Utils.UnionFind as UnionFind +import Distribution.ModuleName +import Distribution.Text +import Distribution.Types.ComponentId + +import Text.PrettyPrint +import Control.Monad +import qualified Data.Map as Map +import qualified Data.Foldable as F + +----------------------------------------------------------------------- +-- Linking + +-- | Given to scopes of provisions and requirements, link them together. +mixLink :: [ModuleScopeU s] -> UnifyM s (ModuleScopeU s) +mixLink scopes = do + let provs = Map.unionsWith (++) (map fst scopes) + -- Invariant: any identically named holes refer to same mutable cell + reqs = Map.unionsWith (++) (map snd scopes) + filled = Map.intersectionWithKey linkProvision provs reqs + F.sequenceA_ filled + let remaining = Map.difference reqs filled + return (provs, remaining) + +-- | Link a list of possibly provided modules to a single +-- requirement. This applies a side-condition that all +-- of the provided modules at the same name are *actually* +-- the same module. +linkProvision :: ModuleName + -> [ModuleWithSourceU s] -- provs + -> [ModuleWithSourceU s] -- reqs + -> UnifyM s [ModuleWithSourceU s] +linkProvision mod_name ret@(prov:provs) (req:reqs) = do + -- TODO: coalesce all the non-unifying modules together + forM_ provs $ \prov' -> do + -- Careful: read it out BEFORE unifying, because the + -- unification algorithm preemptively unifies modules + mod <- convertModuleU (unWithSource prov) + mod' <- convertModuleU (unWithSource prov') + r <- unify prov prov' + case r of + Just () -> return () + Nothing -> do + addErr $ + text "Ambiguous module" <+> quotes (disp mod_name) $$ + text "It could refer to" <+> + ( text " " <+> (quotes (disp mod) $$ in_scope_by (getSource prov)) $$ + text "or" <+> (quotes (disp mod') $$ in_scope_by (getSource prov')) ) $$ + link_doc + mod <- convertModuleU (unWithSource prov) + req_mod <- convertModuleU (unWithSource req) + self_cid <- fmap unify_self_cid getUnifEnv + case mod of + OpenModule (IndefFullUnitId cid _) _ + | cid == self_cid -> addErr $ + text "Cannot instantiate requirement" <+> quotes (disp mod_name) <+> + in_scope_by (getSource req) $$ + text "with locally defined module" <+> in_scope_by (getSource prov) $$ + text "as this would create a cyclic dependency, which GHC does not support." $$ + text "Try moving this module to a separate library, e.g.," $$ + text "create a new stanza: library 'sublib'." + _ -> return () + r <- unify prov req + case r of + Just () -> return () + Nothing -> do + -- TODO: Record and report WHERE the bad constraint came from + addErr $ text "Could not instantiate requirement" <+> quotes (disp mod_name) $$ + nest 4 (text "Expected:" <+> disp mod $$ + text "Actual: " <+> disp req_mod) $$ + parens (text "This can occur if an exposed module of" <+> + text "a libraries shares a name with another module.") $$ + link_doc + return ret + where + unify s1 s2 = tryM $ addErrContext short_link_doc + $ unifyModule (unWithSource s1) (unWithSource s2) + in_scope_by s = text "brought into scope by" <+> dispModuleSource s + short_link_doc = text "While filling requirement" <+> quotes (disp mod_name) + link_doc = text "While filling requirements of" <+> reqs_doc + reqs_doc + | null reqs = dispModuleSource (getSource req) + | otherwise = ( text " " <+> dispModuleSource (getSource req) $$ + vcat [ text "and" <+> dispModuleSource (getSource r) | r <- reqs]) +linkProvision _ _ _ = error "linkProvision" + + + +----------------------------------------------------------------------- +-- The unification algorithm + +-- This is based off of https://gist.github.com/amnn/559551517d020dbb6588 +-- which is a translation from Huet's thesis. + +unifyUnitId :: UnitIdU s -> UnitIdU s -> UnifyM s () +unifyUnitId uid1_u uid2_u + | uid1_u == uid2_u = return () + | otherwise = do + xuid1 <- liftST $ UnionFind.find uid1_u + xuid2 <- liftST $ UnionFind.find uid2_u + case (xuid1, xuid2) of + (UnitIdThunkU u1, UnitIdThunkU u2) + | u1 == u2 -> return () + | otherwise -> + failWith $ hang (text "Couldn't match unit IDs:") 4 + (text " " <+> disp u1 $$ + text "and" <+> disp u2) + (UnitIdThunkU uid1, UnitIdU _ cid2 insts2) + -> unifyThunkWith cid2 insts2 uid2_u uid1 uid1_u + (UnitIdU _ cid1 insts1, UnitIdThunkU uid2) + -> unifyThunkWith cid1 insts1 uid1_u uid2 uid2_u + (UnitIdU _ cid1 insts1, UnitIdU _ cid2 insts2) + -> unifyInner cid1 insts1 uid1_u cid2 insts2 uid2_u + +unifyThunkWith :: ComponentId + -> Map ModuleName (ModuleU s) + -> UnitIdU s + -> DefUnitId + -> UnitIdU s + -> UnifyM s () +unifyThunkWith cid1 insts1 uid1_u uid2 uid2_u = do + db <- fmap unify_db getUnifEnv + let FullUnitId cid2 insts2' = expandUnitId db uid2 + insts2 <- convertModuleSubst insts2' + unifyInner cid1 insts1 uid1_u cid2 insts2 uid2_u + +unifyInner :: ComponentId + -> Map ModuleName (ModuleU s) + -> UnitIdU s + -> ComponentId + -> Map ModuleName (ModuleU s) + -> UnitIdU s + -> UnifyM s () +unifyInner cid1 insts1 uid1_u cid2 insts2 uid2_u = do + when (cid1 /= cid2) $ + -- TODO: if we had a package identifier, could be an + -- easier to understand error message. + failWith $ + hang (text "Couldn't match component IDs:") 4 + (text " " <+> disp cid1 $$ + text "and" <+> disp cid2) + -- The KEY STEP which makes this a Huet-style unification + -- algorithm. (Also a payoff of using union-find.) + -- We can build infinite unit IDs this way, which is necessary + -- for support mutual recursion. NB: union keeps the SECOND + -- descriptor, so we always arrange for a UnitIdThunkU to live + -- there. + liftST $ UnionFind.union uid1_u uid2_u + F.sequenceA_ $ Map.intersectionWith unifyModule insts1 insts2 + +-- | Imperatively unify two modules. +unifyModule :: ModuleU s -> ModuleU s -> UnifyM s () +unifyModule mod1_u mod2_u + | mod1_u == mod2_u = return () + | otherwise = do + mod1 <- liftST $ UnionFind.find mod1_u + mod2 <- liftST $ UnionFind.find mod2_u + case (mod1, mod2) of + (ModuleVarU _, _) -> liftST $ UnionFind.union mod1_u mod2_u + (_, ModuleVarU _) -> liftST $ UnionFind.union mod2_u mod1_u + (ModuleU uid1 mod_name1, ModuleU uid2 mod_name2) -> do + when (mod_name1 /= mod_name2) $ + failWith $ + hang (text "Cannot match module names") 4 $ + text " " <+> disp mod_name1 $$ + text "and" <+> disp mod_name2 + -- NB: this is not actually necessary (because we'll + -- detect loops eventually in 'unifyUnitId'), but it + -- seems harmless enough + liftST $ UnionFind.union mod1_u mod2_u + unifyUnitId uid1 uid2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ModSubst.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ModSubst.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ModSubst.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ModSubst.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,54 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE PatternGuards #-} + +-- | A type class 'ModSubst' for objects which can have 'ModuleSubst' +-- applied to them. +-- +-- See also + +module Distribution.Backpack.ModSubst ( + ModSubst(..), +) where + +import Prelude () +import Distribution.Compat.Prelude hiding (mod) + +import Distribution.ModuleName + +import Distribution.Backpack + +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set + +-- | Applying module substitutions to semantic objects. +class ModSubst a where + -- In notation, substitution is postfix, which implies + -- putting it on the right hand side, but for partial + -- application it's more convenient to have it on the left + -- hand side. + modSubst :: OpenModuleSubst -> a -> a + +instance ModSubst OpenModule where + modSubst subst (OpenModule cid mod_name) = OpenModule (modSubst subst cid) mod_name + modSubst subst mod@(OpenModuleVar mod_name) + | Just mod' <- Map.lookup mod_name subst = mod' + | otherwise = mod + +instance ModSubst OpenUnitId where + modSubst subst (IndefFullUnitId cid insts) = IndefFullUnitId cid (modSubst subst insts) + modSubst _subst uid = uid + +instance ModSubst (Set ModuleName) where + modSubst subst reqs + = Set.union (Set.difference reqs (Map.keysSet subst)) + (openModuleSubstFreeHoles subst) + +-- Substitutions are functorial. NB: this means that +-- there is an @instance 'ModSubst' 'ModuleSubst'@! +instance ModSubst a => ModSubst (Map k a) where + modSubst subst = fmap (modSubst subst) +instance ModSubst a => ModSubst [a] where + modSubst subst = fmap (modSubst subst) +instance ModSubst a => ModSubst (k, a) where + modSubst subst (x,y) = (x, modSubst subst y) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ModuleScope.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ModuleScope.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ModuleScope.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ModuleScope.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,131 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE DeriveFoldable #-} +-- | See +module Distribution.Backpack.ModuleScope ( + -- * Module scopes + ModuleScope(..), + ModuleProvides, + ModuleRequires, + ModuleSource(..), + dispModuleSource, + WithSource(..), + unWithSource, + getSource, + ModuleWithSource, + emptyModuleScope, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.ModuleName +import Distribution.Types.IncludeRenaming +import Distribution.Types.PackageName +import Distribution.Types.ComponentName + +import Distribution.Backpack +import Distribution.Backpack.ModSubst +import Distribution.Text + +import qualified Data.Map as Map +import Text.PrettyPrint + + +----------------------------------------------------------------------- +-- Module scopes + +-- Why is ModuleProvides so complicated? The basic problem is that +-- we want to support this: +-- +-- package p where +-- include q (A) +-- include r (A) +-- module B where +-- import "q" A +-- import "r" A +-- +-- Specifically, in Cabal today it is NOT an error have two modules in +-- scope with the same identifier. So we need to preserve this for +-- Backpack. The modification is that an ambiguous module name is +-- OK... as long as it is NOT used to fill a requirement! +-- +-- So as a first try, we might try deferring unifying provisions that +-- are being glommed together, and check for equality after the fact. +-- But this doesn't work, because what if a multi-module provision +-- is used to fill a requirement?! So you do the equality test +-- IMMEDIATELY before a requirement fill happens... or never at all. +-- +-- Alternate strategy: go ahead and unify, and then if it is revealed +-- that some requirements got filled "out-of-thin-air", error. + + +-- | A 'ModuleScope' describes the modules and requirements that +-- are in-scope as we are processing a Cabal package. Unlike +-- a 'ModuleShape', there may be multiple modules in scope at +-- the same 'ModuleName'; this is only an error if we attempt +-- to use those modules to fill a requirement. A 'ModuleScope' +-- can influence the 'ModuleShape' via a reexport. +data ModuleScope = ModuleScope { + modScopeProvides :: ModuleProvides, + modScopeRequires :: ModuleRequires + } + +-- | An empty 'ModuleScope'. +emptyModuleScope :: ModuleScope +emptyModuleScope = ModuleScope Map.empty Map.empty + +-- | Every 'Module' in scope at a 'ModuleName' is annotated with +-- the 'PackageName' it comes from. +type ModuleProvides = Map ModuleName [ModuleWithSource] +-- | INVARIANT: entries for ModuleName m, have msrc_module is OpenModuleVar m +type ModuleRequires = Map ModuleName [ModuleWithSource] +-- TODO: consider newtping the two types above. + +-- | Description of where a module participating in mixin linking came +-- from. +data ModuleSource + = FromMixins PackageName ComponentName IncludeRenaming + | FromBuildDepends PackageName ComponentName + | FromExposedModules ModuleName + | FromOtherModules ModuleName + | FromSignatures ModuleName +-- We don't have line numbers, but if we did, we'd want to record that +-- too + +-- TODO: Deduplicate this with Distribution.Backpack.UnifyM.ci_msg +dispModuleSource :: ModuleSource -> Doc +dispModuleSource (FromMixins pn cn incls) + = text "mixins:" <+> dispComponent pn cn <+> disp incls +dispModuleSource (FromBuildDepends pn cn) + = text "build-depends:" <+> dispComponent pn cn +dispModuleSource (FromExposedModules m) + = text "exposed-modules:" <+> disp m +dispModuleSource (FromOtherModules m) + = text "other-modules:" <+> disp m +dispModuleSource (FromSignatures m) + = text "signatures:" <+> disp m + +-- Dependency +dispComponent :: PackageName -> ComponentName -> Doc +dispComponent pn cn = + -- NB: This syntax isn't quite the source syntax, but it + -- should be clear enough. To do source syntax, we'd + -- need to know what the package we're linking is. + case cn of + CLibName -> disp pn + CSubLibName ucn -> disp pn <<>> colon <<>> disp ucn + -- Case below shouldn't happen + _ -> disp pn <+> parens (disp cn) + +-- | An 'OpenModule', annotated with where it came from in a Cabal file. +data WithSource a = WithSource ModuleSource a + deriving (Functor, Foldable, Traversable) +unWithSource :: WithSource a -> a +unWithSource (WithSource _ x) = x +getSource :: WithSource a -> ModuleSource +getSource (WithSource s _) = s +type ModuleWithSource = WithSource OpenModule + +instance ModSubst a => ModSubst (WithSource a) where + modSubst subst (WithSource s m) = WithSource s (modSubst subst m) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ModuleShape.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ModuleShape.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ModuleShape.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ModuleShape.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,83 @@ +{-# LANGUAGE DeriveGeneric #-} +-- | See +module Distribution.Backpack.ModuleShape ( + -- * Module shapes + ModuleShape(..), + emptyModuleShape, + shapeInstalledPackage, +) where + +import Prelude () +import Distribution.Compat.Prelude hiding (mod) + +import Distribution.ModuleName +import Distribution.InstalledPackageInfo as IPI + +import Distribution.Backpack.ModSubst +import Distribution.Backpack + +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set + +----------------------------------------------------------------------- +-- Module shapes + +-- | A 'ModuleShape' describes the provisions and requirements of +-- a library. We can extract a 'ModuleShape' from an +-- 'InstalledPackageInfo'. +data ModuleShape = ModuleShape { + modShapeProvides :: OpenModuleSubst, + modShapeRequires :: Set ModuleName + } + deriving (Eq, Show, Generic) + +instance Binary ModuleShape + +instance ModSubst ModuleShape where + modSubst subst (ModuleShape provs reqs) + = ModuleShape (modSubst subst provs) (modSubst subst reqs) + +-- | The default module shape, with no provisions and no requirements. +emptyModuleShape :: ModuleShape +emptyModuleShape = ModuleShape Map.empty Set.empty + +-- Food for thought: suppose we apply the Merkel tree optimization. +-- Imagine this situation: +-- +-- component p +-- signature H +-- module P +-- component h +-- module H +-- component a +-- signature P +-- module A +-- component q(P) +-- include p +-- include h +-- component r +-- include q (P) +-- include p (P) requires (H) +-- include h (H) +-- include a (A) requires (P) +-- +-- Component r should not have any conflicts, since after mix-in linking +-- the two P imports will end up being the same, so we can properly +-- instantiate it. But to know that q's P is p:P instantiated with h:H, +-- we have to be able to expand its unit id. Maybe we can expand it +-- lazily but in some cases it will need to be expanded. +-- +-- FWIW, the way that GHC handles this is by improving unit IDs as +-- soon as it sees an improved one in the package database. This +-- is a bit disgusting. +shapeInstalledPackage :: IPI.InstalledPackageInfo -> ModuleShape +shapeInstalledPackage ipi = ModuleShape (Map.fromList provs) reqs + where + uid = installedOpenUnitId ipi + provs = map shapeExposedModule (IPI.exposedModules ipi) + reqs = requiredSignatures ipi + shapeExposedModule (IPI.ExposedModule mod_name Nothing) + = (mod_name, OpenModule uid mod_name) + shapeExposedModule (IPI.ExposedModule mod_name (Just mod)) + = (mod_name, mod) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/PreExistingComponent.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/PreExistingComponent.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/PreExistingComponent.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/PreExistingComponent.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,67 @@ +-- | See +module Distribution.Backpack.PreExistingComponent ( + PreExistingComponent(..), + ipiToPreExistingComponent, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Backpack.ModuleShape +import Distribution.Backpack +import Distribution.Types.ComponentId +import Distribution.Types.MungedPackageId +import Distribution.Types.PackageId +import Distribution.Types.UnitId +import Distribution.Types.ComponentName +import Distribution.Types.PackageName +import Distribution.Package + +import qualified Data.Map as Map +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.InstalledPackageInfo (InstalledPackageInfo) + +-- | Stripped down version of 'LinkedComponent' for things +-- we don't need to know how to build. +data PreExistingComponent + = PreExistingComponent { + -- | The actual name of the package. This may DISAGREE with 'pc_pkgid' + -- for internal dependencies: e.g., an internal component @lib@ may be + -- munged to @z-pkg-z-lib@, but we still want to use it when we see + -- @lib@ in @build-depends@ + pc_pkgname :: PackageName, + -- | The actual name of the component. + pc_compname :: ComponentName, + pc_munged_id :: MungedPackageId, + pc_uid :: UnitId, + pc_cid :: ComponentId, + pc_open_uid :: OpenUnitId, + pc_shape :: ModuleShape + } + +-- | Convert an 'InstalledPackageInfo' into a 'PreExistingComponent', +-- which was brought into scope under the 'PackageName' (important for +-- a package qualified reference.) +ipiToPreExistingComponent :: InstalledPackageInfo -> PreExistingComponent +ipiToPreExistingComponent ipi = + PreExistingComponent { + pc_pkgname = packageName ipi, + pc_compname = libraryComponentName $ Installed.sourceLibName ipi, + pc_munged_id = mungedId ipi, + pc_uid = Installed.installedUnitId ipi, + pc_cid = Installed.installedComponentId ipi, + pc_open_uid = + IndefFullUnitId (Installed.installedComponentId ipi) + (Map.fromList (Installed.instantiatedWith ipi)), + pc_shape = shapeInstalledPackage ipi + } + +instance HasMungedPackageId PreExistingComponent where + mungedId = pc_munged_id + +instance Package PreExistingComponent where + packageId pec = PackageIdentifier (pc_pkgname pec) v + where MungedPackageId _ v = pc_munged_id pec + +instance HasUnitId PreExistingComponent where + installedUnitId = pc_uid diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/PreModuleShape.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/PreModuleShape.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/PreModuleShape.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/PreModuleShape.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,45 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Backpack.PreModuleShape ( + PreModuleShape(..), + toPreModuleShape, + renamePreModuleShape, + mixLinkPreModuleShape, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Data.Set (Set) +import qualified Data.Set as Set +import qualified Data.Map as Map + +import Distribution.Backpack.ModuleShape +import Distribution.Types.IncludeRenaming +import Distribution.Types.ModuleRenaming +import Distribution.ModuleName + +data PreModuleShape = PreModuleShape { + preModShapeProvides :: Set ModuleName, + preModShapeRequires :: Set ModuleName + } + deriving (Eq, Show, Generic) + +toPreModuleShape :: ModuleShape -> PreModuleShape +toPreModuleShape (ModuleShape provs reqs) = PreModuleShape (Map.keysSet provs) reqs + +renamePreModuleShape :: PreModuleShape -> IncludeRenaming -> PreModuleShape +renamePreModuleShape (PreModuleShape provs reqs) (IncludeRenaming prov_rn req_rn) = + PreModuleShape + (Set.fromList (mapMaybe prov_fn (Set.toList provs))) + (Set.map req_fn reqs) + where + prov_fn = interpModuleRenaming prov_rn + req_fn k = fromMaybe k (interpModuleRenaming req_rn k) + +mixLinkPreModuleShape :: [PreModuleShape] -> PreModuleShape +mixLinkPreModuleShape shapes = PreModuleShape provs (Set.difference reqs provs) + where + provs = Set.unions (map preModShapeProvides shapes) + reqs = Set.unions (map preModShapeRequires shapes) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ReadyComponent.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ReadyComponent.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/ReadyComponent.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/ReadyComponent.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,368 @@ +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE PatternGuards #-} +-- | See +module Distribution.Backpack.ReadyComponent ( + ReadyComponent(..), + InstantiatedComponent(..), + IndefiniteComponent(..), + rc_depends, + rc_uid, + rc_pkgid, + dispReadyComponent, + toReadyComponents, +) where + +import Prelude () +import Distribution.Compat.Prelude hiding ((<>)) + +import Distribution.Backpack +import Distribution.Backpack.LinkedComponent +import Distribution.Backpack.ModuleShape + +import Distribution.Types.AnnotatedId +import Distribution.Types.ModuleRenaming +import Distribution.Types.Component +import Distribution.Types.ComponentInclude +import Distribution.Types.ComponentId +import Distribution.Types.ComponentName +import Distribution.Types.PackageId +import Distribution.Types.UnitId +import Distribution.Compat.Graph (IsNode(..)) +import Distribution.Types.Module +import Distribution.Types.MungedPackageId +import Distribution.Types.MungedPackageName +import Distribution.Types.Library + +import Distribution.ModuleName +import Distribution.Package +import Distribution.Simple.Utils + +import qualified Control.Applicative as A +import qualified Data.Traversable as T + +import Control.Monad +import Text.PrettyPrint +import qualified Data.Map as Map + +import Distribution.Version +import Distribution.Text + +-- | A 'ReadyComponent' is one that we can actually generate build +-- products for. We have a ready component for the typecheck-only +-- products of every indefinite package, as well as a ready component +-- for every way these packages can be fully instantiated. +-- +data ReadyComponent + = ReadyComponent { + rc_ann_id :: AnnotatedId UnitId, + -- | The 'OpenUnitId' for this package. At the moment, this + -- is used in only one case, which is to determine if an + -- export is of a module from this library (indefinite + -- libraries record these exports as 'OpenModule'); + -- 'rc_open_uid' can be conveniently used to test for + -- equality, whereas 'UnitId' cannot always be used in this + -- case. + rc_open_uid :: OpenUnitId, + -- | Corresponds to 'lc_cid'. Invariant: if 'rc_open_uid' + -- records a 'ComponentId', it coincides with this one. + rc_cid :: ComponentId, + -- | Corresponds to 'lc_component'. + rc_component :: Component, + -- | Corresponds to 'lc_exe_deps'. + -- Build-tools don't participate in mix-in linking. + -- (but what if they could?) + rc_exe_deps :: [AnnotatedId UnitId], + -- | Corresponds to 'lc_public'. + rc_public :: Bool, + -- | Extra metadata depending on whether or not this is an + -- indefinite library (typechecked only) or an instantiated + -- component (can be compiled). + rc_i :: Either IndefiniteComponent InstantiatedComponent + } + +-- | The final, string 'UnitId' that will uniquely identify +-- the compilation products of this component. +rc_uid :: ReadyComponent -> UnitId +rc_uid = ann_id . rc_ann_id + +-- | Corresponds to 'lc_pkgid'. +rc_pkgid :: ReadyComponent -> PackageId +rc_pkgid = ann_pid . rc_ann_id + +-- | An 'InstantiatedComponent' is a library which is fully instantiated +-- (or, possibly, has no requirements at all.) +data InstantiatedComponent + = InstantiatedComponent { + -- | How this library was instantiated. + instc_insts :: [(ModuleName, Module)], + -- | Dependencies induced by 'instc_insts'. These are recorded + -- here because there isn't a convenient way otherwise to get + -- the 'PackageId' we need to fill 'componentPackageDeps' as needed. + instc_insts_deps :: [(UnitId, MungedPackageId)], + -- | The modules exported/reexported by this library. + instc_provides :: Map ModuleName Module, + -- | The dependencies which need to be passed to the compiler + -- to bring modules into scope. These always refer to installed + -- fully instantiated libraries. + instc_includes :: [ComponentInclude DefUnitId ModuleRenaming] + } + +-- | An 'IndefiniteComponent' is a library with requirements +-- which we will typecheck only. +data IndefiniteComponent + = IndefiniteComponent { + -- | The requirements of the library. + indefc_requires :: [ModuleName], + -- | The modules exported/reexported by this library. + indefc_provides :: Map ModuleName OpenModule, + -- | The dependencies which need to be passed to the compiler + -- to bring modules into scope. These are 'OpenUnitId' because + -- these may refer to partially instantiated libraries. + indefc_includes :: [ComponentInclude OpenUnitId ModuleRenaming] + } + +-- | Compute the dependencies of a 'ReadyComponent' that should +-- be recorded in the @depends@ field of 'InstalledPackageInfo'. +rc_depends :: ReadyComponent -> [(UnitId, MungedPackageId)] +rc_depends rc = ordNub $ + case rc_i rc of + Left indefc -> + map (\ci -> (abstractUnitId $ ci_id ci, toMungedPackageId ci)) + (indefc_includes indefc) + Right instc -> + map (\ci -> (unDefUnitId $ ci_id ci, toMungedPackageId ci)) + (instc_includes instc) + ++ instc_insts_deps instc + where + toMungedPackageId :: Text id => ComponentInclude id rn -> MungedPackageId + toMungedPackageId ci = + computeCompatPackageId + (ci_pkgid ci) + (case ci_cname ci of + CLibName -> Nothing + CSubLibName uqn -> Just uqn + _ -> error $ display (rc_cid rc) ++ + " depends on non-library " ++ display (ci_id ci)) + +-- | Get the 'MungedPackageId' of a 'ReadyComponent' IF it is +-- a library. +rc_munged_id :: ReadyComponent -> MungedPackageId +rc_munged_id rc = + computeCompatPackageId + (rc_pkgid rc) + (case rc_component rc of + CLib lib -> libName lib + _ -> error "rc_munged_id: not library") + +instance Package ReadyComponent where + packageId = rc_pkgid + +instance HasUnitId ReadyComponent where + installedUnitId = rc_uid + +instance IsNode ReadyComponent where + type Key ReadyComponent = UnitId + nodeKey = rc_uid + nodeNeighbors rc = + (case rc_i rc of + Right inst | [] <- instc_insts inst + -> [] + | otherwise + -> [newSimpleUnitId (rc_cid rc)] + _ -> []) ++ + ordNub (map fst (rc_depends rc)) ++ + map ann_id (rc_exe_deps rc) + +dispReadyComponent :: ReadyComponent -> Doc +dispReadyComponent rc = + hang (text (case rc_i rc of + Left _ -> "indefinite" + Right _ -> "definite") + <+> disp (nodeKey rc) + {- <+> dispModSubst (Map.fromList (lc_insts lc)) -} ) 4 $ + vcat [ text "depends" <+> disp uid + | uid <- nodeNeighbors rc ] + +-- | The state of 'InstM'; a mapping from 'UnitId's to their +-- ready component, or @Nothing@ if its an external +-- component which we don't know how to build. +type InstS = Map UnitId (Maybe ReadyComponent) + +-- | A state monad for doing instantiations (can't use actual +-- State because that would be an extra dependency.) +newtype InstM a = InstM { runInstM :: InstS -> (a, InstS) } + +instance Functor InstM where + fmap f (InstM m) = InstM $ \s -> let (x, s') = m s + in (f x, s') + +instance A.Applicative InstM where + pure a = InstM $ \s -> (a, s) + InstM f <*> InstM x = InstM $ \s -> let (f', s') = f s + (x', s'') = x s' + in (f' x', s'') + +instance Monad InstM where + return = A.pure + InstM m >>= f = InstM $ \s -> let (x, s') = m s + in runInstM (f x) s' + +-- | Given a list of 'LinkedComponent's, expand the module graph +-- so that we have an instantiated graph containing all of the +-- instantiated components we need to build. +-- +-- Instantiation intuitively follows the following algorithm: +-- +-- instantiate a definite unit id p[S]: +-- recursively instantiate each module M in S +-- recursively instantiate modules exported by this unit +-- recursively instantiate dependencies substituted by S +-- +-- The implementation is a bit more involved to memoize instantiation +-- if we have done it already. +-- +-- We also call 'improveUnitId' during this process, so that fully +-- instantiated components are given 'HashedUnitId'. +-- +toReadyComponents + :: Map UnitId MungedPackageId + -> Map ModuleName Module -- subst for the public component + -> [LinkedComponent] + -> [ReadyComponent] +toReadyComponents pid_map subst0 comps + = catMaybes (Map.elems ready_map) + where + cmap = Map.fromList [ (lc_cid lc, lc) | lc <- comps ] + + instantiateUnitId :: ComponentId -> Map ModuleName Module + -> InstM DefUnitId + instantiateUnitId cid insts = InstM $ \s -> + case Map.lookup uid s of + Nothing -> + -- Knot tied + let (r, s') = runInstM (instantiateComponent uid cid insts) + (Map.insert uid r s) + in (def_uid, Map.insert uid r s') + Just _ -> (def_uid, s) + where + -- The mkDefUnitId here indicates that we assume + -- that Cabal handles unit id hash allocation. + -- Good thing about hashing here: map is only on string. + -- Bad thing: have to repeatedly hash. + def_uid = mkDefUnitId cid insts + uid = unDefUnitId def_uid + + instantiateComponent + :: UnitId -> ComponentId -> Map ModuleName Module + -> InstM (Maybe ReadyComponent) + instantiateComponent uid cid insts + | Just lc <- Map.lookup cid cmap = do + provides <- T.mapM (substModule insts) (modShapeProvides (lc_shape lc)) + -- NB: lc_sig_includes is omitted here, because we don't + -- need them to build + includes <- forM (lc_includes lc) $ \ci -> do + uid' <- substUnitId insts (ci_id ci) + return ci { ci_ann_id = fmap (const uid') (ci_ann_id ci) } + exe_deps <- mapM (substExeDep insts) (lc_exe_deps lc) + s <- InstM $ \s -> (s, s) + let getDep (Module dep_def_uid _) + | let dep_uid = unDefUnitId dep_def_uid + -- Lose DefUnitId invariant for rc_depends + = [(dep_uid, + fromMaybe err_pid $ + Map.lookup dep_uid pid_map A.<|> + fmap rc_munged_id (join (Map.lookup dep_uid s)))] + where + err_pid = MungedPackageId + (mkMungedPackageName "nonexistent-package-this-is-a-cabal-bug") + (mkVersion [0]) + instc = InstantiatedComponent { + instc_insts = Map.toList insts, + instc_insts_deps = concatMap getDep (Map.elems insts), + instc_provides = provides, + instc_includes = includes + -- NB: there is no dependency on the + -- indefinite version of this instantiated package here, + -- as (1) it doesn't go in depends in the + -- IPI: it's not a run time dep, and (2) + -- we don't have to tell GHC about it, it + -- will match up the ComponentId + -- automatically + } + return $ Just ReadyComponent { + rc_ann_id = (lc_ann_id lc) { ann_id = uid }, + rc_open_uid = DefiniteUnitId (unsafeMkDefUnitId uid), + rc_cid = lc_cid lc, + rc_component = lc_component lc, + rc_exe_deps = exe_deps, + rc_public = lc_public lc, + rc_i = Right instc + } + | otherwise = return Nothing + + substUnitId :: Map ModuleName Module -> OpenUnitId -> InstM DefUnitId + substUnitId _ (DefiniteUnitId uid) = + return uid + substUnitId subst (IndefFullUnitId cid insts) = do + insts' <- substSubst subst insts + instantiateUnitId cid insts' + + -- NB: NOT composition + substSubst :: Map ModuleName Module + -> Map ModuleName OpenModule + -> InstM (Map ModuleName Module) + substSubst subst insts = T.mapM (substModule subst) insts + + substModule :: Map ModuleName Module -> OpenModule -> InstM Module + substModule subst (OpenModuleVar mod_name) + | Just m <- Map.lookup mod_name subst = return m + | otherwise = error "substModule: non-closing substitution" + substModule subst (OpenModule uid mod_name) = do + uid' <- substUnitId subst uid + return (Module uid' mod_name) + + substExeDep :: Map ModuleName Module + -> AnnotatedId OpenUnitId -> InstM (AnnotatedId UnitId) + substExeDep insts exe_aid = do + exe_uid' <- substUnitId insts (ann_id exe_aid) + return exe_aid { ann_id = unDefUnitId exe_uid' } + + indefiniteUnitId :: ComponentId -> InstM UnitId + indefiniteUnitId cid = do + let uid = newSimpleUnitId cid + r <- indefiniteComponent uid cid + InstM $ \s -> (uid, Map.insert uid r s) + + indefiniteComponent :: UnitId -> ComponentId -> InstM (Maybe ReadyComponent) + indefiniteComponent uid cid + | Just lc <- Map.lookup cid cmap = do + exe_deps <- mapM (substExeDep Map.empty) (lc_exe_deps lc) + let indefc = IndefiniteComponent { + indefc_requires = map fst (lc_insts lc), + indefc_provides = modShapeProvides (lc_shape lc), + indefc_includes = lc_includes lc ++ lc_sig_includes lc + } + return $ Just ReadyComponent { + rc_ann_id = (lc_ann_id lc) { ann_id = uid }, + rc_cid = lc_cid lc, + rc_open_uid = lc_uid lc, + rc_component = lc_component lc, + -- It's always fully built + rc_exe_deps = exe_deps, + rc_public = lc_public lc, + rc_i = Left indefc + } + | otherwise = return Nothing + + ready_map = snd $ runInstM work Map.empty + + work + | not (Map.null subst0) + , [lc] <- filter lc_public (Map.elems cmap) + = do _ <- instantiateUnitId (lc_cid lc) subst0 + return () + | otherwise + = forM_ (Map.elems cmap) $ \lc -> + if null (lc_insts lc) + then instantiateUnitId (lc_cid lc) Map.empty >> return () + else indefiniteUnitId (lc_cid lc) >> return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/UnifyM.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/UnifyM.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack/UnifyM.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack/UnifyM.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,628 @@ +{-# LANGUAGE Rank2Types #-} +{-# LANGUAGE ScopedTypeVariables #-} +-- | See +module Distribution.Backpack.UnifyM ( + -- * Unification monad + UnifyM, + runUnifyM, + failWith, + addErr, + failIfErrs, + tryM, + addErrContext, + addErrContextM, + liftST, + + UnifEnv(..), + getUnifEnv, + + -- * Modules and unit IDs + ModuleU, + ModuleU'(..), + convertModule, + convertModuleU, + + UnitIdU, + UnitIdU'(..), + convertUnitId, + convertUnitIdU, + + ModuleSubstU, + convertModuleSubstU, + convertModuleSubst, + + ModuleScopeU, + emptyModuleScopeU, + convertModuleScopeU, + + ModuleWithSourceU, + + convertInclude, + convertModuleProvides, + convertModuleProvidesU, + +) where + +import Prelude () +import Distribution.Compat.Prelude hiding (mod) + +import Distribution.Backpack.ModuleShape +import Distribution.Backpack.ModuleScope +import Distribution.Backpack.ModSubst +import Distribution.Backpack.FullUnitId +import Distribution.Backpack + +import qualified Distribution.Utils.UnionFind as UnionFind +import Distribution.ModuleName +import Distribution.Package +import Distribution.PackageDescription +import Distribution.Text +import Distribution.Types.IncludeRenaming +import Distribution.Types.ComponentInclude +import Distribution.Types.AnnotatedId +import Distribution.Types.ComponentName +import Distribution.Verbosity + +import Data.STRef +import Data.Traversable +import Control.Monad.ST +import qualified Data.Map as Map +import qualified Data.Set as Set +import Data.IntMap (IntMap) +import qualified Data.IntMap as IntMap +import qualified Data.Traversable as T +import Text.PrettyPrint + +-- TODO: more detailed trace output on high verbosity would probably +-- be appreciated by users debugging unification errors. Collect +-- some good examples! + +data ErrMsg = ErrMsg { + err_msg :: Doc, + err_ctx :: [Doc] + } +type MsgDoc = Doc + +renderErrMsg :: ErrMsg -> MsgDoc +renderErrMsg ErrMsg { err_msg = msg, err_ctx = ctx } = + msg $$ vcat ctx + +-- | The unification monad, this monad encapsulates imperative +-- unification. +newtype UnifyM s a = UnifyM { unUnifyM :: UnifEnv s -> ST s (Maybe a) } + +-- | Run a computation in the unification monad. +runUnifyM :: Verbosity -> ComponentId -> FullDb -> (forall s. UnifyM s a) -> Either [MsgDoc] a +runUnifyM verbosity self_cid db m + = runST $ do i <- newSTRef 0 + hmap <- newSTRef Map.empty + errs <- newSTRef [] + mb_r <- unUnifyM m UnifEnv { + unify_uniq = i, + unify_reqs = hmap, + unify_self_cid = self_cid, + unify_verbosity = verbosity, + unify_ctx = [], + unify_db = db, + unify_errs = errs } + final_errs <- readSTRef errs + case mb_r of + Just x | null final_errs -> return (Right x) + _ -> return (Left (map renderErrMsg (reverse final_errs))) +-- NB: GHC 7.6 throws a hissy fit if you pattern match on 'm'. + +type ErrCtx s = MsgDoc + +-- | The unification environment. +data UnifEnv s = UnifEnv { + -- | A supply of unique integers to label 'UnitIdU' + -- cells. This is used to determine loops in unit + -- identifiers (which can happen with mutual recursion.) + unify_uniq :: UnifRef s UnitIdUnique, + -- | The set of requirements in scope. When + -- a provision is brought into scope, we unify with + -- the requirement at the same module name to fill it. + -- This mapping grows monotonically. + unify_reqs :: UnifRef s (Map ModuleName (ModuleU s)), + -- | Component id of the unit we're linking. We use this + -- to detect if we fill a requirement with a local module, + -- which in principle should be OK but is not currently + -- supported by GHC. + unify_self_cid :: ComponentId, + -- | How verbose the error message should be + unify_verbosity :: Verbosity, + -- | The error reporting context + unify_ctx :: [ErrCtx s], + -- | The package index for expanding unit identifiers + unify_db :: FullDb, + -- | Accumulated errors + unify_errs :: UnifRef s [ErrMsg] + } + +instance Functor (UnifyM s) where + fmap f (UnifyM m) = UnifyM (fmap (fmap (fmap f)) m) + +instance Applicative (UnifyM s) where + pure = UnifyM . pure . pure . pure + UnifyM f <*> UnifyM x = UnifyM $ \r -> do + f' <- f r + case f' of + Nothing -> return Nothing + Just f'' -> do + x' <- x r + case x' of + Nothing -> return Nothing + Just x'' -> return (Just (f'' x'')) + +instance Monad (UnifyM s) where + return = pure + UnifyM m >>= f = UnifyM $ \r -> do + x <- m r + case x of + Nothing -> return Nothing + Just x' -> unUnifyM (f x') r + +-- | Lift a computation from 'ST' monad to 'UnifyM' monad. +-- Internal use only. +liftST :: ST s a -> UnifyM s a +liftST m = UnifyM $ \_ -> fmap Just m + +addErr :: MsgDoc -> UnifyM s () +addErr msg = do + env <- getUnifEnv + let err = ErrMsg { + err_msg = msg, + err_ctx = unify_ctx env + } + liftST $ modifySTRef (unify_errs env) (\errs -> err:errs) + +failWith :: MsgDoc -> UnifyM s a +failWith msg = do + addErr msg + failM + +failM :: UnifyM s a +failM = UnifyM $ \_ -> return Nothing + +failIfErrs :: UnifyM s () +failIfErrs = do + env <- getUnifEnv + errs <- liftST $ readSTRef (unify_errs env) + when (not (null errs)) failM + +tryM :: UnifyM s a -> UnifyM s (Maybe a) +tryM m = + UnifyM (\env -> do + mb_r <- unUnifyM m env + return (Just mb_r)) + +{- +otherFail :: ErrMsg -> UnifyM s a +otherFail s = UnifyM $ \_ -> return (Left s) + +unifyFail :: ErrMsg -> UnifyM s a +unifyFail err = do + env <- getUnifEnv + msg <- case unify_ctx env of + Nothing -> return (text "Unspecified unification error:" <+> err) + Just (ctx, mod1, mod2) + | unify_verbosity env > normal + -> do mod1' <- convertModuleU mod1 + mod2' <- convertModuleU mod2 + let extra = " (was unifying " ++ display mod1' + ++ " and " ++ display mod2' ++ ")" + return (ctx ++ err ++ extra) + | otherwise + -> return (ctx ++ err ++ " (for more information, pass -v flag)") + UnifyM $ \_ -> return (Left msg) +-} + +-- | A convenient alias for mutable references in the unification monad. +type UnifRef s a = STRef s a + +-- | Imperatively read a 'UnifRef'. +readUnifRef :: UnifRef s a -> UnifyM s a +readUnifRef = liftST . readSTRef + +-- | Imperatively write a 'UnifRef'. +writeUnifRef :: UnifRef s a -> a -> UnifyM s () +writeUnifRef x = liftST . writeSTRef x + +-- | Get the current unification environment. +getUnifEnv :: UnifyM s (UnifEnv s) +getUnifEnv = UnifyM $ \r -> return (return r) + +-- | Add a fixed message to the error context. +addErrContext :: Doc -> UnifyM s a -> UnifyM s a +addErrContext ctx m = addErrContextM ctx m + +-- | Add a message to the error context. It may make monadic queries. +addErrContextM :: ErrCtx s -> UnifyM s a -> UnifyM s a +addErrContextM ctx m = + UnifyM $ \r -> unUnifyM m r { unify_ctx = ctx : unify_ctx r } + + +----------------------------------------------------------------------- +-- The "unifiable" variants of the data types +-- +-- In order to properly do unification over infinite trees, we +-- need to union find over 'Module's and 'UnitId's. The pure +-- representation is ill-equipped to do this, so we convert +-- from the pure representation into one which is indirected +-- through union-find. 'ModuleU' handles hole variables; +-- 'UnitIdU' handles mu-binders. + +-- | Contents of a mutable 'ModuleU' reference. +data ModuleU' s + = ModuleU (UnitIdU s) ModuleName + | ModuleVarU ModuleName + +-- | Contents of a mutable 'UnitIdU' reference. +data UnitIdU' s + = UnitIdU UnitIdUnique ComponentId (Map ModuleName (ModuleU s)) + | UnitIdThunkU DefUnitId + +-- | A mutable version of 'Module' which can be imperatively unified. +type ModuleU s = UnionFind.Point s (ModuleU' s) + +-- | A mutable version of 'UnitId' which can be imperatively unified. +type UnitIdU s = UnionFind.Point s (UnitIdU' s) + +-- | An integer for uniquely labeling 'UnitIdU' nodes. We need +-- these labels in order to efficiently serialize 'UnitIdU's into +-- 'UnitId's (we use the label to check if any parent is the +-- node in question, and if so insert a deBruijn index instead.) +-- These labels must be unique across all 'UnitId's/'Module's which +-- participate in unification! +type UnitIdUnique = Int + + +----------------------------------------------------------------------- +-- Conversion to the unifiable data types + +-- An environment for tracking the mu-bindings in scope. +-- The invariant for a state @(m, i)@ is that [0..i] are +-- keys of @m@; in fact, the @i-k@th entry is the @k@th +-- de Bruijn index (this saves us from having to shift as +-- we enter mu-binders.) +type MuEnv s = (IntMap (UnitIdU s), Int) + +extendMuEnv :: MuEnv s -> UnitIdU s -> MuEnv s +extendMuEnv (m, i) x = + (IntMap.insert (i + 1) x m, i + 1) + +{- +lookupMuEnv :: MuEnv s -> Int {- de Bruijn index -} -> UnitIdU s +lookupMuEnv (m, i) k = + case IntMap.lookup (i - k) m of + -- Technically a user can trigger this by giving us a + -- bad 'UnitId', so handle this better. + Nothing -> error "lookupMuEnv: out of bounds (malformed de Bruijn index)" + Just v -> v +-} + +emptyMuEnv :: MuEnv s +emptyMuEnv = (IntMap.empty, -1) + +-- The workhorse functions. These share an environment: +-- * @UnifRef s UnitIdUnique@ - the unique label supply for 'UnitIdU' nodes +-- * @UnifRef s (Map ModuleName moduleU)@ - the (lazily initialized) +-- environment containing the implicitly universally quantified +-- @hole:A@ binders. +-- * @MuEnv@ - the environment for mu-binders. + +convertUnitId' :: MuEnv s + -> OpenUnitId + -> UnifyM s (UnitIdU s) +-- TODO: this could be more lazy if we know there are no internal +-- references +convertUnitId' _ (DefiniteUnitId uid) = + liftST $ UnionFind.fresh (UnitIdThunkU uid) +convertUnitId' stk (IndefFullUnitId cid insts) = do + fs <- fmap unify_uniq getUnifEnv + x <- liftST $ UnionFind.fresh (error "convertUnitId") -- tie the knot later + insts_u <- T.forM insts $ convertModule' (extendMuEnv stk x) + u <- readUnifRef fs + writeUnifRef fs (u+1) + y <- liftST $ UnionFind.fresh (UnitIdU u cid insts_u) + liftST $ UnionFind.union x y + return y +-- convertUnitId' stk (UnitIdVar i) = return (lookupMuEnv stk i) + +convertModule' :: MuEnv s + -> OpenModule -> UnifyM s (ModuleU s) +convertModule' _stk (OpenModuleVar mod_name) = do + hmap <- fmap unify_reqs getUnifEnv + hm <- readUnifRef hmap + case Map.lookup mod_name hm of + Nothing -> do mod <- liftST $ UnionFind.fresh (ModuleVarU mod_name) + writeUnifRef hmap (Map.insert mod_name mod hm) + return mod + Just mod -> return mod +convertModule' stk (OpenModule uid mod_name) = do + uid_u <- convertUnitId' stk uid + liftST $ UnionFind.fresh (ModuleU uid_u mod_name) + +convertUnitId :: OpenUnitId -> UnifyM s (UnitIdU s) +convertUnitId = convertUnitId' emptyMuEnv + +convertModule :: OpenModule -> UnifyM s (ModuleU s) +convertModule = convertModule' emptyMuEnv + + + +----------------------------------------------------------------------- +-- Substitutions + +-- | The mutable counterpart of a 'ModuleSubst' (not defined here). +type ModuleSubstU s = Map ModuleName (ModuleU s) + +-- | Conversion of 'ModuleSubst' to 'ModuleSubstU' +convertModuleSubst :: Map ModuleName OpenModule -> UnifyM s (Map ModuleName (ModuleU s)) +convertModuleSubst = T.mapM convertModule + +-- | Conversion of 'ModuleSubstU' to 'ModuleSubst' +convertModuleSubstU :: ModuleSubstU s -> UnifyM s OpenModuleSubst +convertModuleSubstU = T.mapM convertModuleU + +----------------------------------------------------------------------- +-- Conversion from the unifiable data types + +-- An environment for tracking candidates for adding a mu-binding. +-- The invariant for a state @(m, i)@, is that if we encounter a node +-- labeled @k@ such that @m[k -> v]@, then we can replace this +-- node with the de Bruijn index @i-v@ referring to an enclosing +-- mu-binder; furthermore, @range(m) = [0..i]@. +type MooEnv = (IntMap Int, Int) + +emptyMooEnv :: MooEnv +emptyMooEnv = (IntMap.empty, -1) + +extendMooEnv :: MooEnv -> UnitIdUnique -> MooEnv +extendMooEnv (m, i) k = (IntMap.insert k (i + 1) m, i + 1) + +lookupMooEnv :: MooEnv -> UnitIdUnique -> Maybe Int +lookupMooEnv (m, i) k = + case IntMap.lookup k m of + Nothing -> Nothing + Just v -> Just (i-v) -- de Bruijn indexize + +-- The workhorse functions + +convertUnitIdU' :: MooEnv -> UnitIdU s -> UnifyM s OpenUnitId +convertUnitIdU' stk uid_u = do + x <- liftST $ UnionFind.find uid_u + case x of + UnitIdThunkU uid -> return (DefiniteUnitId uid) + UnitIdU u cid insts_u -> + case lookupMooEnv stk u of + Just _i -> + failWith (text "Unsupported mutually recursive unit identifier") + -- return (UnitIdVar i) + Nothing -> do + insts <- T.forM insts_u $ convertModuleU' (extendMooEnv stk u) + return (IndefFullUnitId cid insts) + +convertModuleU' :: MooEnv -> ModuleU s -> UnifyM s OpenModule +convertModuleU' stk mod_u = do + mod <- liftST $ UnionFind.find mod_u + case mod of + ModuleVarU mod_name -> return (OpenModuleVar mod_name) + ModuleU uid_u mod_name -> do + uid <- convertUnitIdU' stk uid_u + return (OpenModule uid mod_name) + +-- Helper functions + +convertUnitIdU :: UnitIdU s -> UnifyM s OpenUnitId +convertUnitIdU = convertUnitIdU' emptyMooEnv + +convertModuleU :: ModuleU s -> UnifyM s OpenModule +convertModuleU = convertModuleU' emptyMooEnv + +-- | An empty 'ModuleScopeU'. +emptyModuleScopeU :: ModuleScopeU s +emptyModuleScopeU = (Map.empty, Map.empty) + + +-- | The mutable counterpart of 'ModuleScope'. +type ModuleScopeU s = (ModuleProvidesU s, ModuleRequiresU s) +-- | The mutable counterpart of 'ModuleProvides' +type ModuleProvidesU s = Map ModuleName [ModuleWithSourceU s] +type ModuleRequiresU s = ModuleProvidesU s +type ModuleWithSourceU s = WithSource (ModuleU s) + +-- TODO: Deduplicate this with Distribution.Backpack.MixLink.dispSource +ci_msg :: ComponentInclude (OpenUnitId, ModuleShape) IncludeRenaming -> Doc +ci_msg ci + | ci_implicit ci = text "build-depends:" <+> pp_pn + | otherwise = text "mixins:" <+> pp_pn <+> disp (ci_renaming ci) + where + pn = pkgName (ci_pkgid ci) + pp_pn = + case ci_cname ci of + CLibName -> disp pn + CSubLibName cn -> disp pn <<>> colon <<>> disp cn + -- Shouldn't happen + cn -> disp pn <+> parens (disp cn) + +-- | Convert a 'ModuleShape' into a 'ModuleScopeU', so we can do +-- unification on it. +convertInclude + :: ComponentInclude (OpenUnitId, ModuleShape) IncludeRenaming + -> UnifyM s (ModuleScopeU s, + Either (ComponentInclude (UnitIdU s) ModuleRenaming) {- normal -} + (ComponentInclude (UnitIdU s) ModuleRenaming) {- sig -}) +convertInclude ci@(ComponentInclude { + ci_ann_id = AnnotatedId { + ann_id = (uid, ModuleShape provs reqs), + ann_pid = pid, + ann_cname = compname + }, + ci_renaming = incl@(IncludeRenaming prov_rns req_rns), + ci_implicit = implicit + }) = addErrContext (text "In" <+> ci_msg ci) $ do + let pn = packageName pid + the_source | implicit + = FromBuildDepends pn compname + | otherwise + = FromMixins pn compname incl + source = WithSource the_source + + -- Suppose our package has two requirements A and B, and + -- we include it with @requires (A as X)@ + -- There are three closely related things we compute based + -- off of @reqs@ and @reqs_rns@: + -- + -- 1. The requirement renaming (A -> X) + -- 2. The requirement substitution (A -> , B -> ) + + -- Requirement renaming. This is read straight off the syntax: + -- + -- [nothing] ==> [empty] + -- requires (B as Y) ==> B -> Y + -- + -- Requirement renamings are NOT injective: if two requirements + -- are mapped to the same name, the intent is to merge them + -- together. But they are *functions*, so @B as X, B as Y@ is + -- illegal. + + req_rename_list <- + case req_rns of + DefaultRenaming -> return [] + HidingRenaming _ -> do + -- Not valid here for requires! + addErr $ text "Unsupported syntax" <+> + quotes (text "requires hiding (...)") + return [] + ModuleRenaming rns -> return rns + + let req_rename_listmap :: Map ModuleName [ModuleName] + req_rename_listmap = + Map.fromListWith (++) [ (k,[v]) | (k,v) <- req_rename_list ] + req_rename <- sequenceA . flip Map.mapWithKey req_rename_listmap $ \k vs0 -> + case vs0 of + [] -> error "req_rename" + [v] -> return v + v:vs -> do addErr $ + text "Conflicting renamings of requirement" <+> quotes (disp k) $$ + text "Renamed to: " <+> vcat (map disp (v:vs)) + return v + + let req_rename_fn k = case Map.lookup k req_rename of + Nothing -> k + Just v -> v + + -- Requirement substitution. + -- + -- A -> X ==> A -> + let req_subst = fmap OpenModuleVar req_rename + + uid_u <- convertUnitId (modSubst req_subst uid) + + -- Requirement mapping. This is just taking the range of the + -- requirement substitution, and making a mapping so that it is + -- convenient to merge things together. It INCLUDES the implicit + -- mappings. + -- + -- A -> X ==> X -> , B -> + reqs_u <- convertModuleRequires . Map.fromList $ + [ (k, [source (OpenModuleVar k)]) + | k <- map req_rename_fn (Set.toList reqs) + ] + + -- Report errors if there were unused renamings + let leftover = Map.keysSet req_rename `Set.difference` reqs + unless (Set.null leftover) $ + addErr $ + hang (text "The" <+> text (showComponentName compname) <+> + text "from package" <+> quotes (disp pid) + <+> text "does not require:") 4 + (vcat (map disp (Set.toList leftover))) + + -- Provision computation is more complex. + -- For example, if we have: + -- + -- include p (A as X) requires (B as Y) + -- where A -> q[B=]:A + -- + -- Then we need: + -- + -- X -> [("p", q[B=]:A)] + -- + -- There are a bunch of clever ways to present the algorithm + -- but here is the simple one: + -- + -- 1. If we have a default renaming, apply req_subst + -- to provs and use that. + -- + -- 2. Otherwise, build a map by successively looking + -- up the referenced modules in the renaming in provs. + -- + -- Importantly, overlapping rename targets get accumulated + -- together. It's not an (immediate) error. + (pre_prov_scope, prov_rns') <- + case prov_rns of + DefaultRenaming -> return (Map.toList provs, prov_rns) + HidingRenaming hides -> + let hides_set = Set.fromList hides + in let r = [ (k,v) + | (k,v) <- Map.toList provs + , not (k `Set.member` hides_set) ] + -- GHC doesn't understand hiding, so expand it out! + in return (r, ModuleRenaming (map ((\x -> (x,x)).fst) r)) + ModuleRenaming rns -> do + r <- sequence + [ case Map.lookup from provs of + Just m -> return (to, m) + Nothing -> failWith $ + text "Package" <+> quotes (disp pid) <+> + text "does not expose the module" <+> quotes (disp from) + | (from, to) <- rns ] + return (r, prov_rns) + let prov_scope = modSubst req_subst + $ Map.fromListWith (++) + [ (k, [source v]) + | (k, v) <- pre_prov_scope ] + + provs_u <- convertModuleProvides prov_scope + + -- TODO: Assert that provs_u is empty if provs was empty + return ((provs_u, reqs_u), + -- NB: We test that requirements is not null so that + -- users can create packages with zero module exports + -- that cause some C library to linked in, etc. + (if Map.null provs && not (Set.null reqs) + then Right -- is sig + else Left) (ComponentInclude { + ci_ann_id = AnnotatedId { + ann_id = uid_u, + ann_pid = pid, + ann_cname = compname + }, + ci_renaming = prov_rns', + ci_implicit = ci_implicit ci + })) + +-- | Convert a 'ModuleScopeU' to a 'ModuleScope'. +convertModuleScopeU :: ModuleScopeU s -> UnifyM s ModuleScope +convertModuleScopeU (provs_u, reqs_u) = do + provs <- convertModuleProvidesU provs_u + reqs <- convertModuleRequiresU reqs_u + -- TODO: Test that the requirements are still free. If they + -- are not, they got unified, and that's dodgy at best. + return (ModuleScope provs reqs) + +-- | Convert a 'ModuleProvides' to a 'ModuleProvidesU' +convertModuleProvides :: ModuleProvides -> UnifyM s (ModuleProvidesU s) +convertModuleProvides = T.mapM (mapM (T.mapM convertModule)) + +-- | Convert a 'ModuleProvidesU' to a 'ModuleProvides' +convertModuleProvidesU :: ModuleProvidesU s -> UnifyM s ModuleProvides +convertModuleProvidesU = T.mapM (mapM (T.mapM convertModuleU)) + +convertModuleRequires :: ModuleRequires -> UnifyM s (ModuleRequiresU s) +convertModuleRequires = convertModuleProvides + +convertModuleRequiresU :: ModuleRequiresU s -> UnifyM s ModuleRequires +convertModuleRequiresU = convertModuleProvidesU diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Backpack.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Backpack.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,311 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE PatternGuards #-} +{-# LANGUAGE RankNTypes #-} + +-- | This module defines the core data types for Backpack. For more +-- details, see: +-- +-- + +module Distribution.Backpack ( + -- * OpenUnitId + OpenUnitId(..), + openUnitIdFreeHoles, + mkOpenUnitId, + + -- * DefUnitId + DefUnitId, + unDefUnitId, + mkDefUnitId, + + -- * OpenModule + OpenModule(..), + openModuleFreeHoles, + + -- * OpenModuleSubst + OpenModuleSubst, + dispOpenModuleSubst, + dispOpenModuleSubstEntry, + parseOpenModuleSubst, + parseOpenModuleSubstEntry, + parsecOpenModuleSubst, + parsecOpenModuleSubstEntry, + openModuleSubstFreeHoles, + + -- * Conversions to 'UnitId' + abstractUnitId, + hashModuleSubst, +) where + +import Distribution.Compat.Prelude hiding (mod) +import Distribution.Compat.ReadP ((<++)) +import Distribution.Parsec.Class +import Distribution.Pretty +import Prelude () +import Text.PrettyPrint (hcat) + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +import Distribution.ModuleName +import Distribution.Text +import Distribution.Types.ComponentId +import Distribution.Types.Module +import Distribution.Types.UnitId +import Distribution.Utils.Base62 + +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set + +----------------------------------------------------------------------- +-- OpenUnitId + +-- | An 'OpenUnitId' describes a (possibly partially) instantiated +-- Backpack component, with a description of how the holes are filled +-- in. Unlike 'OpenUnitId', the 'ModuleSubst' is kept in a structured +-- form that allows for substitution (which fills in holes.) This form +-- of unit cannot be installed. It must first be converted to a +-- 'UnitId'. +-- +-- In the absence of Backpack, there are no holes to fill, so any such +-- component always has an empty module substitution; thus we can lossly +-- represent it as an 'OpenUnitId uid'. +-- +-- For a source component using Backpack, however, there is more +-- structure as components may be parametrized over some signatures, and +-- these \"holes\" may be partially or wholly filled. +-- +-- OpenUnitId plays an important role when we are mix-in linking, +-- and is recorded to the installed packaged database for indefinite +-- packages; however, for compiled packages that are fully instantiated, +-- we instantiate 'OpenUnitId' into 'UnitId'. +-- +-- For more details see the Backpack spec +-- +-- + +data OpenUnitId + -- | Identifies a component which may have some unfilled holes; + -- specifying its 'ComponentId' and its 'OpenModuleSubst'. + -- TODO: Invariant that 'OpenModuleSubst' is non-empty? + -- See also the Text instance. + = IndefFullUnitId ComponentId OpenModuleSubst + -- | Identifies a fully instantiated component, which has + -- been compiled and abbreviated as a hash. The embedded 'UnitId' + -- MUST NOT be for an indefinite component; an 'OpenUnitId' + -- is guaranteed not to have any holes. + | DefiniteUnitId DefUnitId + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) +-- TODO: cache holes? + +instance Binary OpenUnitId + +instance NFData OpenUnitId where + rnf (IndefFullUnitId cid subst) = rnf cid `seq` rnf subst + rnf (DefiniteUnitId uid) = rnf uid + +instance Pretty OpenUnitId where + pretty (IndefFullUnitId cid insts) + -- TODO: arguably a smart constructor to enforce invariant would be + -- better + | Map.null insts = pretty cid + | otherwise = pretty cid <<>> Disp.brackets (dispOpenModuleSubst insts) + pretty (DefiniteUnitId uid) = pretty uid + +-- | +-- +-- >>> eitherParsec "foobar" :: Either String OpenUnitId +--Right (DefiniteUnitId (DefUnitId {unDefUnitId = UnitId "foobar"})) +-- +-- >>> eitherParsec "foo[Str=text-1.2.3:Data.Text.Text]" :: Either String OpenUnitId +-- Right (IndefFullUnitId (ComponentId "foo") (fromList [(ModuleName ["Str"],OpenModule (DefiniteUnitId (DefUnitId {unDefUnitId = UnitId "text-1.2.3"})) (ModuleName ["Data","Text","Text"]))])) +-- +instance Parsec OpenUnitId where + parsec = P.try parseOpenUnitId <|> fmap DefiniteUnitId parsec + where + parseOpenUnitId = do + cid <- parsec + insts <- P.between (P.char '[') (P.char ']') + parsecOpenModuleSubst + return (IndefFullUnitId cid insts) + +instance Text OpenUnitId where + parse = parseOpenUnitId <++ fmap DefiniteUnitId parse + where + parseOpenUnitId = do + cid <- parse + insts <- Parse.between (Parse.char '[') (Parse.char ']') + parseOpenModuleSubst + return (IndefFullUnitId cid insts) + +-- | Get the set of holes ('ModuleVar') embedded in a 'UnitId'. +openUnitIdFreeHoles :: OpenUnitId -> Set ModuleName +openUnitIdFreeHoles (IndefFullUnitId _ insts) = openModuleSubstFreeHoles insts +openUnitIdFreeHoles _ = Set.empty + +-- | Safe constructor from a UnitId. The only way to do this safely +-- is if the instantiation is provided. +mkOpenUnitId :: UnitId -> ComponentId -> OpenModuleSubst -> OpenUnitId +mkOpenUnitId uid cid insts = + if Set.null (openModuleSubstFreeHoles insts) + then DefiniteUnitId (unsafeMkDefUnitId uid) -- invariant holds! + else IndefFullUnitId cid insts + +----------------------------------------------------------------------- +-- DefUnitId + +-- | Create a 'DefUnitId' from a 'ComponentId' and an instantiation +-- with no holes. +mkDefUnitId :: ComponentId -> Map ModuleName Module -> DefUnitId +mkDefUnitId cid insts = + unsafeMkDefUnitId (mkUnitId + (unComponentId cid ++ maybe "" ("+"++) (hashModuleSubst insts))) + -- impose invariant! + +----------------------------------------------------------------------- +-- OpenModule + +-- | Unlike a 'Module', an 'OpenModule' is either an ordinary +-- module from some unit, OR an 'OpenModuleVar', representing a +-- hole that needs to be filled in. Substitutions are over +-- module variables. +data OpenModule + = OpenModule OpenUnitId ModuleName + | OpenModuleVar ModuleName + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +instance Binary OpenModule + +instance NFData OpenModule where + rnf (OpenModule uid mod_name) = rnf uid `seq` rnf mod_name + rnf (OpenModuleVar mod_name) = rnf mod_name + +instance Pretty OpenModule where + pretty (OpenModule uid mod_name) = + hcat [pretty uid, Disp.text ":", pretty mod_name] + pretty (OpenModuleVar mod_name) = + hcat [Disp.char '<', pretty mod_name, Disp.char '>'] + +-- | +-- +-- >>> eitherParsec "Includes2-0.1.0.0-inplace-mysql:Database.MySQL" :: Either String OpenModule +-- Right (OpenModule (DefiniteUnitId (DefUnitId {unDefUnitId = UnitId "Includes2-0.1.0.0-inplace-mysql"})) (ModuleName ["Database","MySQL"])) +-- +instance Parsec OpenModule where + parsec = parsecModuleVar <|> parsecOpenModule + where + parsecOpenModule = do + uid <- parsec + _ <- P.char ':' + mod_name <- parsec + return (OpenModule uid mod_name) + + parsecModuleVar = do + _ <- P.char '<' + mod_name <- parsec + _ <- P.char '>' + return (OpenModuleVar mod_name) + +instance Text OpenModule where + parse = parseModuleVar <++ parseOpenModule + where + parseOpenModule = do + uid <- parse + _ <- Parse.char ':' + mod_name <- parse + return (OpenModule uid mod_name) + parseModuleVar = do + _ <- Parse.char '<' + mod_name <- parse + _ <- Parse.char '>' + return (OpenModuleVar mod_name) + +-- | Get the set of holes ('ModuleVar') embedded in a 'Module'. +openModuleFreeHoles :: OpenModule -> Set ModuleName +openModuleFreeHoles (OpenModuleVar mod_name) = Set.singleton mod_name +openModuleFreeHoles (OpenModule uid _n) = openUnitIdFreeHoles uid + +----------------------------------------------------------------------- +-- OpenModuleSubst + +-- | An explicit substitution on modules. +-- +-- NB: These substitutions are NOT idempotent, for example, a +-- valid substitution is (A -> B, B -> A). +type OpenModuleSubst = Map ModuleName OpenModule + +-- | Pretty-print the entries of a module substitution, suitable +-- for embedding into a 'OpenUnitId' or passing to GHC via @--instantiate-with@. +dispOpenModuleSubst :: OpenModuleSubst -> Disp.Doc +dispOpenModuleSubst subst + = Disp.hcat + . Disp.punctuate Disp.comma + $ map dispOpenModuleSubstEntry (Map.toAscList subst) + +-- | Pretty-print a single entry of a module substitution. +dispOpenModuleSubstEntry :: (ModuleName, OpenModule) -> Disp.Doc +dispOpenModuleSubstEntry (k, v) = disp k <<>> Disp.char '=' <<>> disp v + +-- | Inverse to 'dispModSubst'. +parseOpenModuleSubst :: Parse.ReadP r OpenModuleSubst +parseOpenModuleSubst = fmap Map.fromList + . flip Parse.sepBy (Parse.char ',') + $ parseOpenModuleSubstEntry + +-- | Inverse to 'dispModSubstEntry'. +parseOpenModuleSubstEntry :: Parse.ReadP r (ModuleName, OpenModule) +parseOpenModuleSubstEntry = + do k <- parse + _ <- Parse.char '=' + v <- parse + return (k, v) + +-- | Inverse to 'dispModSubst'. +-- +-- @since 2.2 +parsecOpenModuleSubst :: CabalParsing m => m OpenModuleSubst +parsecOpenModuleSubst = fmap Map.fromList + . flip P.sepBy (P.char ',') + $ parsecOpenModuleSubstEntry + +-- | Inverse to 'dispModSubstEntry'. +-- +-- @since 2.2 +parsecOpenModuleSubstEntry :: CabalParsing m => m (ModuleName, OpenModule) +parsecOpenModuleSubstEntry = + do k <- parsec + _ <- P.char '=' + v <- parsec + return (k, v) + +-- | Get the set of holes ('ModuleVar') embedded in a 'OpenModuleSubst'. +-- This is NOT the domain of the substitution. +openModuleSubstFreeHoles :: OpenModuleSubst -> Set ModuleName +openModuleSubstFreeHoles insts = Set.unions (map openModuleFreeHoles (Map.elems insts)) + +----------------------------------------------------------------------- +-- Conversions to UnitId + +-- | When typechecking, we don't demand that a freshly instantiated +-- 'IndefFullUnitId' be compiled; instead, we just depend on the +-- installed indefinite unit installed at the 'ComponentId'. +abstractUnitId :: OpenUnitId -> UnitId +abstractUnitId (DefiniteUnitId def_uid) = unDefUnitId def_uid +abstractUnitId (IndefFullUnitId cid _) = newSimpleUnitId cid + +-- | Take a module substitution and hash it into a string suitable for +-- 'UnitId'. Note that since this takes 'Module', not 'OpenModule', +-- you are responsible for recursively converting 'OpenModule' +-- into 'Module'. See also "Distribution.Backpack.ReadyComponent". +hashModuleSubst :: Map ModuleName Module -> Maybe String +hashModuleSubst subst + | Map.null subst = Nothing + | otherwise = + Just . hashToBase62 $ + concat [ display mod_name ++ "=" ++ display m ++ "\n" + | (mod_name, m) <- Map.toList subst] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/CabalSpecVersion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/CabalSpecVersion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/CabalSpecVersion.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/CabalSpecVersion.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,80 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.CabalSpecVersion where + +import Prelude () +import Distribution.Compat.Prelude +import qualified Data.Set as Set + +-- | Different Cabal-the-spec versions. +-- +-- We branch based on this at least in the parser. +-- +data CabalSpecVersion + = CabalSpecOld + | CabalSpecV1_22 + | CabalSpecV1_24 + | CabalSpecV2_0 + | CabalSpecV2_2 + | CabalSpecV2_4 + deriving (Eq, Ord, Show, Read, Enum, Bounded, Typeable, Data, Generic) + +cabalSpecLatest :: CabalSpecVersion +cabalSpecLatest = CabalSpecV2_4 + +cabalSpecFeatures :: CabalSpecVersion -> Set.Set CabalFeature +cabalSpecFeatures CabalSpecOld = Set.empty +cabalSpecFeatures CabalSpecV1_22 = Set.empty +cabalSpecFeatures CabalSpecV1_24 = Set.empty +cabalSpecFeatures CabalSpecV2_0 = Set.empty +cabalSpecFeatures CabalSpecV2_2 = Set.fromList + [ Elif + , CommonStanzas + ] +cabalSpecFeatures CabalSpecV2_4 = Set.fromList + [ Elif + , CommonStanzas + , Globstar + ] + +cabalSpecSupports :: CabalSpecVersion -> [Int] -> Bool +cabalSpecSupports CabalSpecOld v = v < [1,21] +cabalSpecSupports CabalSpecV1_22 v = v < [1,23] +cabalSpecSupports CabalSpecV1_24 v = v < [1,25] +cabalSpecSupports CabalSpecV2_0 v = v < [2,1] +cabalSpecSupports CabalSpecV2_2 v = v < [2,3] +cabalSpecSupports CabalSpecV2_4 _ = True + +specHasCommonStanzas :: CabalSpecVersion -> HasCommonStanzas +specHasCommonStanzas CabalSpecV2_2 = HasCommonStanzas +specHasCommonStanzas CabalSpecV2_4 = HasCommonStanzas +specHasCommonStanzas _ = NoCommonStanzas + +specHasElif :: CabalSpecVersion -> HasElif +specHasElif CabalSpecV2_2 = HasElif +specHasElif CabalSpecV2_4 = HasElif +specHasElif _ = NoElif + +------------------------------------------------------------------------------- +-- Features +------------------------------------------------------------------------------- + +data CabalFeature + = Elif + | CommonStanzas + | Globstar + -- ^ Implemented in #5284. Not actually a change to the parser, + -- as filename patterns are opaque to it currently. + deriving (Eq, Ord, Show, Read, Enum, Bounded, Typeable, Data, Generic) + +------------------------------------------------------------------------------- +-- Booleans +------------------------------------------------------------------------------- + +data HasElif = HasElif | NoElif + deriving (Eq, Show) + +data HasCommonStanzas = HasCommonStanzas | NoCommonStanzas + deriving (Eq, Show) + +data HasGlobstar = HasGlobstar | NoGlobstar diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Binary/Class.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Binary/Class.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Binary/Class.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Binary/Class.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,519 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE Trustworthy #-} +{-# LANGUAGE DefaultSignatures #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.Binary.Class +-- Copyright : Lennart Kolmodin +-- License : BSD3-style (see LICENSE) +-- +-- Maintainer : Lennart Kolmodin +-- Stability : unstable +-- Portability : portable to Hugs and GHC. Requires the FFI and some flexible instances +-- +-- Typeclass and instances for binary serialization. +-- +----------------------------------------------------------------------------- + +module Distribution.Compat.Binary.Class ( + + -- * The Binary class + Binary(..) + + -- * Support for generics + , GBinary(..) + + ) where + +import Data.Word + +import Data.Binary.Put +import Data.Binary.Get + +import Control.Applicative ((<$>), (<*>), (*>)) +import Foreign + +import Data.ByteString.Lazy (ByteString) +import qualified Data.ByteString.Lazy as L + +import Data.Char (chr,ord) +import Data.List (unfoldr) +import Data.Foldable (traverse_) + +-- And needed for the instances: +import qualified Data.ByteString as B +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Data.IntMap as IntMap +import qualified Data.IntSet as IntSet +import qualified Data.Ratio as R + +import qualified Data.Tree as T + +import Data.Array.Unboxed + +import GHC.Generics + +import qualified Data.Sequence as Seq +import qualified Data.Foldable as Fold + +------------------------------------------------------------------------ + +class GBinary f where + gput :: f t -> Put + gget :: Get (f t) + +-- | The 'Binary' class provides 'put' and 'get', methods to encode and +-- decode a Haskell value to a lazy 'ByteString'. It mirrors the 'Read' and +-- 'Show' classes for textual representation of Haskell types, and is +-- suitable for serialising Haskell values to disk, over the network. +-- +-- For decoding and generating simple external binary formats (e.g. C +-- structures), Binary may be used, but in general is not suitable +-- for complex protocols. Instead use the 'Put' and 'Get' primitives +-- directly. +-- +-- Instances of Binary should satisfy the following property: +-- +-- > decode . encode == id +-- +-- That is, the 'get' and 'put' methods should be the inverse of each +-- other. A range of instances are provided for basic Haskell types. +-- +class Binary t where + -- | Encode a value in the Put monad. + put :: t -> Put + -- | Decode a value in the Get monad + get :: Get t + + default put :: (Generic t, GBinary (Rep t)) => t -> Put + put = gput . from + + default get :: (Generic t, GBinary (Rep t)) => Get t + get = to `fmap` gget + +------------------------------------------------------------------------ +-- Simple instances + +-- The () type need never be written to disk: values of singleton type +-- can be reconstructed from the type alone +instance Binary () where + put () = return () + get = return () + +-- Bools are encoded as a byte in the range 0 .. 1 +instance Binary Bool where + put = putWord8 . fromIntegral . fromEnum + get = fmap (toEnum . fromIntegral) getWord8 + +-- Values of type 'Ordering' are encoded as a byte in the range 0 .. 2 +instance Binary Ordering where + put = putWord8 . fromIntegral . fromEnum + get = fmap (toEnum . fromIntegral) getWord8 + +------------------------------------------------------------------------ +-- Words and Ints + +-- Words8s are written as bytes +instance Binary Word8 where + put = putWord8 + get = getWord8 + +-- Words16s are written as 2 bytes in big-endian (network) order +instance Binary Word16 where + put = putWord16be + get = getWord16be + +-- Words32s are written as 4 bytes in big-endian (network) order +instance Binary Word32 where + put = putWord32be + get = getWord32be + +-- Words64s are written as 8 bytes in big-endian (network) order +instance Binary Word64 where + put = putWord64be + get = getWord64be + +-- Int8s are written as a single byte. +instance Binary Int8 where + put i = put (fromIntegral i :: Word8) + get = fmap fromIntegral (get :: Get Word8) + +-- Int16s are written as a 2 bytes in big endian format +instance Binary Int16 where + put i = put (fromIntegral i :: Word16) + get = fmap fromIntegral (get :: Get Word16) + +-- Int32s are written as a 4 bytes in big endian format +instance Binary Int32 where + put i = put (fromIntegral i :: Word32) + get = fmap fromIntegral (get :: Get Word32) + +-- Int64s are written as a 4 bytes in big endian format +instance Binary Int64 where + put i = put (fromIntegral i :: Word64) + get = fmap fromIntegral (get :: Get Word64) + +------------------------------------------------------------------------ + +-- Words are are written as Word64s, that is, 8 bytes in big endian format +instance Binary Word where + put i = put (fromIntegral i :: Word64) + get = fmap fromIntegral (get :: Get Word64) + +-- Ints are are written as Int64s, that is, 8 bytes in big endian format +instance Binary Int where + put i = put (fromIntegral i :: Int64) + get = fmap fromIntegral (get :: Get Int64) + +------------------------------------------------------------------------ +-- +-- Portable, and pretty efficient, serialisation of Integer +-- + +-- Fixed-size type for a subset of Integer +type SmallInt = Int32 + +-- Integers are encoded in two ways: if they fit inside a SmallInt, +-- they're written as a byte tag, and that value. If the Integer value +-- is too large to fit in a SmallInt, it is written as a byte array, +-- along with a sign and length field. + +instance Binary Integer where + + {-# INLINE put #-} + put n | n >= lo && n <= hi = do + putWord8 0 + put (fromIntegral n :: SmallInt) -- fast path + where + lo = fromIntegral (minBound :: SmallInt) :: Integer + hi = fromIntegral (maxBound :: SmallInt) :: Integer + + put n = do + putWord8 1 + put sign + put (unroll (abs n)) -- unroll the bytes + where + sign = fromIntegral (signum n) :: Word8 + + {-# INLINE get #-} + get = do + tag <- get :: Get Word8 + case tag of + 0 -> fmap fromIntegral (get :: Get SmallInt) + _ -> do sign <- get + bytes <- get + let v = roll bytes + return $! if sign == (1 :: Word8) then v else - v + +-- +-- Fold and unfold an Integer to and from a list of its bytes +-- +unroll :: Integer -> [Word8] +unroll = unfoldr step + where + step 0 = Nothing + step i = Just (fromIntegral i, i `shiftR` 8) + +roll :: [Word8] -> Integer +roll = foldr unstep 0 + where + unstep b a = a `shiftL` 8 .|. fromIntegral b + +{- + +-- +-- An efficient, raw serialisation for Integer (GHC only) +-- + +-- TODO This instance is not architecture portable. GMP stores numbers as +-- arrays of machine sized words, so the byte format is not portable across +-- architectures with different endianness and word size. + +import Data.ByteString.Base (toForeignPtr,unsafePackAddress, memcpy) +import GHC.Base hiding (ord, chr) +import GHC.Prim +import GHC.Ptr (Ptr(..)) +import GHC.IOBase (IO(..)) + +instance Binary Integer where + put (S# i) = putWord8 0 *> put (I# i) + put (J# s ba) = do + putWord8 1 + put (I# s) + put (BA ba) + + get = do + b <- getWord8 + case b of + 0 -> do (I# i#) <- get + return (S# i#) + _ -> do (I# s#) <- get + (BA a#) <- get + return (J# s# a#) + +instance Binary ByteArray where + + -- Pretty safe. + put (BA ba) = + let sz = sizeofByteArray# ba -- (primitive) in *bytes* + addr = byteArrayContents# ba + bs = unsafePackAddress (I# sz) addr + in put bs -- write as a ByteString. easy, yay! + + -- Pretty scary. Should be quick though + get = do + (fp, off, n@(I# sz)) <- fmap toForeignPtr get -- so decode a ByteString + assert (off == 0) $ return $ unsafePerformIO $ do + (MBA arr) <- newByteArray sz -- and copy it into a ByteArray# + let to = byteArrayContents# (unsafeCoerce# arr) -- urk, is this safe? + withForeignPtr fp $ \from -> memcpy (Ptr to) from (fromIntegral n) + freezeByteArray arr + +-- wrapper for ByteArray# +data ByteArray = BA {-# UNPACK #-} !ByteArray# +data MBA = MBA {-# UNPACK #-} !(MutableByteArray# RealWorld) + +newByteArray :: Int# -> IO MBA +newByteArray sz = IO $ \s -> + case newPinnedByteArray# sz s of { (# s', arr #) -> + (# s', MBA arr #) } + +freezeByteArray :: MutableByteArray# RealWorld -> IO ByteArray +freezeByteArray arr = IO $ \s -> + case unsafeFreezeByteArray# arr s of { (# s', arr' #) -> + (# s', BA arr' #) } + +-} + +instance (Binary a,Integral a) => Binary (R.Ratio a) where + put r = put (R.numerator r) *> put (R.denominator r) + get = (R.%) <$> get <*> get + +------------------------------------------------------------------------ + +-- Char is serialised as UTF-8 +instance Binary Char where + put a | c <= 0x7f = put (fromIntegral c :: Word8) + | c <= 0x7ff = do put (0xc0 .|. y) + put (0x80 .|. z) + | c <= 0xffff = do put (0xe0 .|. x) + put (0x80 .|. y) + put (0x80 .|. z) + | c <= 0x10ffff = do put (0xf0 .|. w) + put (0x80 .|. x) + put (0x80 .|. y) + put (0x80 .|. z) + | otherwise = error "Not a valid Unicode code point" + where + c = ord a + z, y, x, w :: Word8 + z = fromIntegral (c .&. 0x3f) + y = fromIntegral (shiftR c 6 .&. 0x3f) + x = fromIntegral (shiftR c 12 .&. 0x3f) + w = fromIntegral (shiftR c 18 .&. 0x7) + + get = do + let getByte = fmap (fromIntegral :: Word8 -> Int) get + shiftL6 = flip shiftL 6 :: Int -> Int + w <- getByte + r <- case () of + _ | w < 0x80 -> return w + | w < 0xe0 -> do + x <- fmap (xor 0x80) getByte + return (x .|. shiftL6 (xor 0xc0 w)) + | w < 0xf0 -> do + x <- fmap (xor 0x80) getByte + y <- fmap (xor 0x80) getByte + return (y .|. shiftL6 (x .|. shiftL6 + (xor 0xe0 w))) + | otherwise -> do + x <- fmap (xor 0x80) getByte + y <- fmap (xor 0x80) getByte + z <- fmap (xor 0x80) getByte + return (z .|. shiftL6 (y .|. shiftL6 + (x .|. shiftL6 (xor 0xf0 w)))) + return $! chr r + +------------------------------------------------------------------------ +-- Instances for the first few tuples + +instance (Binary a, Binary b) => Binary (a,b) where + put (a,b) = put a *> put b + get = (,) <$> get <*> get + +instance (Binary a, Binary b, Binary c) => Binary (a,b,c) where + put (a,b,c) = put a *> put b *> put c + get = (,,) <$> get <*> get <*> get + +instance (Binary a, Binary b, Binary c, Binary d) => Binary (a,b,c,d) where + put (a,b,c,d) = put a *> put b *> put c *> put d + get = (,,,) <$> get <*> get <*> get <*> get + +instance (Binary a, Binary b, Binary c, Binary d, Binary e) => Binary (a,b,c,d,e) where + put (a,b,c,d,e) = put a *> put b *> put c *> put d *> put e + get = (,,,,) <$> get <*> get <*> get <*> get <*> get + +-- +-- and now just recurse: +-- + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f) + => Binary (a,b,c,d,e,f) where + put (a,b,c,d,e,f) = put (a,(b,c,d,e,f)) + get = do (a,(b,c,d,e,f)) <- get ; return (a,b,c,d,e,f) + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f, Binary g) + => Binary (a,b,c,d,e,f,g) where + put (a,b,c,d,e,f,g) = put (a,(b,c,d,e,f,g)) + get = do (a,(b,c,d,e,f,g)) <- get ; return (a,b,c,d,e,f,g) + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, + Binary f, Binary g, Binary h) + => Binary (a,b,c,d,e,f,g,h) where + put (a,b,c,d,e,f,g,h) = put (a,(b,c,d,e,f,g,h)) + get = do (a,(b,c,d,e,f,g,h)) <- get ; return (a,b,c,d,e,f,g,h) + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, + Binary f, Binary g, Binary h, Binary i) + => Binary (a,b,c,d,e,f,g,h,i) where + put (a,b,c,d,e,f,g,h,i) = put (a,(b,c,d,e,f,g,h,i)) + get = do (a,(b,c,d,e,f,g,h,i)) <- get ; return (a,b,c,d,e,f,g,h,i) + +instance (Binary a, Binary b, Binary c, Binary d, Binary e, + Binary f, Binary g, Binary h, Binary i, Binary j) + => Binary (a,b,c,d,e,f,g,h,i,j) where + put (a,b,c,d,e,f,g,h,i,j) = put (a,(b,c,d,e,f,g,h,i,j)) + get = do (a,(b,c,d,e,f,g,h,i,j)) <- get ; return (a,b,c,d,e,f,g,h,i,j) + +------------------------------------------------------------------------ +-- Container types + +instance Binary a => Binary [a] where + put l = put (length l) *> traverse_ put l + get = do n <- get :: Get Int + getMany n + +-- | 'getMany n' get 'n' elements in order, without blowing the stack. +getMany :: Binary a => Int -> Get [a] +getMany n = go [] n + where + go xs 0 = return $! reverse xs + go xs i = do x <- get + -- we must seq x to avoid stack overflows due to laziness in + -- (>>=) + x `seq` go (x:xs) (i-1) +{-# INLINE getMany #-} + +instance (Binary a) => Binary (Maybe a) where + put Nothing = putWord8 0 + put (Just x) = putWord8 1 *> put x + get = do + w <- getWord8 + case w of + 0 -> return Nothing + _ -> fmap Just get + +instance (Binary a, Binary b) => Binary (Either a b) where + put (Left a) = putWord8 0 *> put a + put (Right b) = putWord8 1 *> put b + get = do + w <- getWord8 + case w of + 0 -> fmap Left get + _ -> fmap Right get + +------------------------------------------------------------------------ +-- ByteStrings (have specially efficient instances) + +instance Binary B.ByteString where + put bs = do put (B.length bs) + putByteString bs + get = get >>= getByteString + +-- +-- Using old versions of fps, this is a type synonym, and non portable +-- +-- Requires 'flexible instances' +-- +instance Binary ByteString where + put bs = do put (fromIntegral (L.length bs) :: Int) + putLazyByteString bs + get = get >>= getLazyByteString + +------------------------------------------------------------------------ +-- Maps and Sets + +instance (Binary a) => Binary (Set.Set a) where + put s = put (Set.size s) *> traverse_ put (Set.toAscList s) + get = fmap Set.fromDistinctAscList get + +instance (Binary k, Binary e) => Binary (Map.Map k e) where + put m = put (Map.size m) *> traverse_ put (Map.toAscList m) + get = fmap Map.fromDistinctAscList get + +instance Binary IntSet.IntSet where + put s = put (IntSet.size s) *> traverse_ put (IntSet.toAscList s) + get = fmap IntSet.fromDistinctAscList get + +instance (Binary e) => Binary (IntMap.IntMap e) where + put m = put (IntMap.size m) *> traverse_ put (IntMap.toAscList m) + get = fmap IntMap.fromDistinctAscList get + +------------------------------------------------------------------------ +-- Queues and Sequences + +instance (Binary e) => Binary (Seq.Seq e) where + put s = put (Seq.length s) *> Fold.traverse_ put s + get = do n <- get :: Get Int + rep Seq.empty n get + where rep xs 0 _ = return $! xs + rep xs n g = xs `seq` n `seq` do + x <- g + rep (xs Seq.|> x) (n-1) g + +------------------------------------------------------------------------ +-- Floating point + +instance Binary Double where + put d = put (decodeFloat d) + get = encodeFloat <$> get <*> get + +instance Binary Float where + put f = put (decodeFloat f) + get = encodeFloat <$> get <*> get + +------------------------------------------------------------------------ +-- Trees + +instance (Binary e) => Binary (T.Tree e) where + put (T.Node r s) = put r *> put s + get = T.Node <$> get <*> get + +------------------------------------------------------------------------ +-- Arrays + +instance (Binary i, Ix i, Binary e) => Binary (Array i e) where + put a = do + put (bounds a) + put (rangeSize $ bounds a) -- write the length + traverse_ put (elems a) -- now the elems. + get = do + bs <- get + n <- get -- read the length + xs <- getMany n -- now the elems. + return (listArray bs xs) + +-- +-- The IArray UArray e constraint is non portable. Requires flexible instances +-- +instance (Binary i, Ix i, Binary e, IArray UArray e) => Binary (UArray i e) where + put a = do + put (bounds a) + put (rangeSize $ bounds a) -- now write the length + traverse_ put (elems a) + get = do + bs <- get + n <- get + xs <- getMany n + return (listArray bs xs) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Binary/Generic.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Binary/Generic.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Binary/Generic.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Binary/Generic.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,128 @@ +{-# LANGUAGE BangPatterns, CPP, FlexibleInstances, KindSignatures, + ScopedTypeVariables, Trustworthy, TypeOperators, TypeSynonymInstances #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.Binary.Generic +-- Copyright : Bryan O'Sullivan +-- License : BSD3-style (see LICENSE) +-- +-- Maintainer : Bryan O'Sullivan +-- Stability : unstable +-- Portability : Only works with GHC 7.2 and newer +-- +-- Instances for supporting GHC generics. +-- +----------------------------------------------------------------------------- +module Distribution.Compat.Binary.Generic + ( + ) where + +import Control.Applicative +import Distribution.Compat.Binary.Class +import Data.Binary.Get +import Data.Binary.Put +import Data.Bits +import Data.Word +import GHC.Generics + +-- Type without constructors +instance GBinary V1 where + gput _ = return () + gget = return undefined + +-- Constructor without arguments +instance GBinary U1 where + gput U1 = return () + gget = return U1 + +-- Product: constructor with parameters +instance (GBinary a, GBinary b) => GBinary (a :*: b) where + gput (x :*: y) = gput x >> gput y + gget = (:*:) <$> gget <*> gget + +-- Metadata (constructor name, etc) +instance GBinary a => GBinary (M1 i c a) where + gput = gput . unM1 + gget = M1 <$> gget + +-- Constants, additional parameters, and rank-1 recursion +instance Binary a => GBinary (K1 i a) where + gput = put . unK1 + gget = K1 <$> get + +-- Borrowed from the cereal package. + +-- The following GBinary instance for sums has support for serializing +-- types with up to 2^64-1 constructors. It will use the minimal +-- number of bytes needed to encode the constructor. For example when +-- a type has 2^8 constructors or less it will use a single byte to +-- encode the constructor. If it has 2^16 constructors or less it will +-- use two bytes, and so on till 2^64-1. + +#define GUARD(WORD) (size - 1) <= fromIntegral (maxBound :: WORD) +#define PUTSUM(WORD) GUARD(WORD) = putSum (0 :: WORD) (fromIntegral size) +#define GETSUM(WORD) GUARD(WORD) = (get :: Get WORD) >>= checkGetSum (fromIntegral size) + +instance ( GSum a, GSum b + , GBinary a, GBinary b + , SumSize a, SumSize b) => GBinary (a :+: b) where + gput | PUTSUM(Word8) | PUTSUM(Word16) | PUTSUM(Word32) | PUTSUM(Word64) + | otherwise = sizeError "encode" size + where + size = unTagged (sumSize :: Tagged (a :+: b) Word64) + + gget | GETSUM(Word8) | GETSUM(Word16) | GETSUM(Word32) | GETSUM(Word64) + | otherwise = sizeError "decode" size + where + size = unTagged (sumSize :: Tagged (a :+: b) Word64) + +sizeError :: Show size => String -> size -> error +sizeError s size = + error $ "Can't " ++ s ++ " a type with " ++ show size ++ " constructors" + +------------------------------------------------------------------------ + +checkGetSum :: (Ord word, Num word, Bits word, GSum f) + => word -> word -> Get (f a) +checkGetSum size code | code < size = getSum code size + | otherwise = fail "Unknown encoding for constructor" +{-# INLINE checkGetSum #-} + +class GSum f where + getSum :: (Ord word, Num word, Bits word) => word -> word -> Get (f a) + putSum :: (Num w, Bits w, Binary w) => w -> w -> f a -> Put + +instance (GSum a, GSum b, GBinary a, GBinary b) => GSum (a :+: b) where + getSum !code !size | code < sizeL = L1 <$> getSum code sizeL + | otherwise = R1 <$> getSum (code - sizeL) sizeR + where + sizeL = size `shiftR` 1 + sizeR = size - sizeL + + putSum !code !size s = case s of + L1 x -> putSum code sizeL x + R1 x -> putSum (code + sizeL) sizeR x + where + sizeL = size `shiftR` 1 + sizeR = size - sizeL + +instance GBinary a => GSum (C1 c a) where + getSum _ _ = gget + + putSum !code _ x = put code *> gput x + +------------------------------------------------------------------------ + +class SumSize f where + sumSize :: Tagged f Word64 + +newtype Tagged (s :: * -> *) b = Tagged {unTagged :: b} + +instance (SumSize a, SumSize b) => SumSize (a :+: b) where + sumSize = Tagged $ unTagged (sumSize :: Tagged a Word64) + + unTagged (sumSize :: Tagged b Word64) + +instance SumSize (C1 c a) where + sumSize = Tagged 1 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Binary.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Binary.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Binary.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Binary.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,71 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 711 +{-# LANGUAGE PatternSynonyms #-} +#endif + +#ifndef MIN_VERSION_binary +#define MIN_VERSION_binary(x, y, z) 0 +#endif + +module Distribution.Compat.Binary + ( decodeOrFailIO + , decodeFileOrFail' +#if __GLASGOW_HASKELL__ >= 708 || MIN_VERSION_binary(0,7,0) + , module Data.Binary +#else + , Binary(..) + , decode, encode, encodeFile +#endif + ) where + +import Control.Exception (catch, evaluate) +#if __GLASGOW_HASKELL__ >= 711 +import Control.Exception (pattern ErrorCall) +#else +import Control.Exception (ErrorCall(..)) +#endif +import Data.ByteString.Lazy (ByteString) + +#if __GLASGOW_HASKELL__ >= 708 || MIN_VERSION_binary(0,7,0) + +import Data.Binary + +-- | Lazily reconstruct a value previously written to a file. +decodeFileOrFail' :: Binary a => FilePath -> IO (Either String a) +decodeFileOrFail' f = either (Left . snd) Right `fmap` decodeFileOrFail f + +#else + +import Data.Binary.Get +import Data.Binary.Put +import qualified Data.ByteString.Lazy as BSL + +import Distribution.Compat.Binary.Class +import Distribution.Compat.Binary.Generic () + +-- | Decode a value from a lazy ByteString, reconstructing the +-- original structure. +-- +decode :: Binary a => ByteString -> a +decode = runGet get + +-- | Encode a value using binary serialisation to a lazy ByteString. +-- +encode :: Binary a => a -> ByteString +encode = runPut . put +{-# INLINE encode #-} + +-- | Lazily reconstruct a value previously written to a file. +decodeFileOrFail' :: Binary a => FilePath -> IO (Either String a) +decodeFileOrFail' f = decodeOrFailIO =<< BSL.readFile f + +-- | Lazily serialise a value to a file +encodeFile :: Binary a => FilePath -> a -> IO () +encodeFile f = BSL.writeFile f . encode + +#endif + +decodeOrFailIO :: Binary a => ByteString -> IO (Either String a) +decodeOrFailIO bs = + catch (evaluate (decode bs) >>= return . Right) + $ \(ErrorCall str) -> return $ Left str diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/CharParsing.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/CharParsing.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/CharParsing.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/CharParsing.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,356 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# OPTIONS_GHC -fspec-constr -fspec-constr-count=8 #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.CharParsing +-- Copyright : (c) Edward Kmett 2011 +-- License : BSD3 +-- +-- Maintainer : ekmett@gmail.com +-- Stability : experimental +-- Portability : non-portable +-- +-- Parsers for character streams +-- +-- Originally in @parsers@ package. +-- +----------------------------------------------------------------------------- +module Distribution.Compat.CharParsing + ( + -- * Combinators + oneOf -- :: CharParsing m => [Char] -> m Char + , noneOf -- :: CharParsing m => [Char] -> m Char + , spaces -- :: CharParsing m => m () + , space -- :: CharParsing m => m Char + , newline -- :: CharParsing m => m Char + , tab -- :: CharParsing m => m Char + , upper -- :: CharParsing m => m Char + , lower -- :: CharParsing m => m Char + , alphaNum -- :: CharParsing m => m Char + , letter -- :: CharParsing m => m Char + , digit -- :: CharParsing m => m Char + , hexDigit -- :: CharParsing m => m Char + , octDigit -- :: CharParsing m => m Char + , satisfyRange -- :: CharParsing m => Char -> Char -> m Char + -- * Class + , CharParsing(..) + -- * Cabal additions + , integral + , munch1 + , munch + , skipSpaces1 + , module Distribution.Compat.Parsing + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.State.Lazy as Lazy +import Control.Monad.Trans.State.Strict as Strict +import Control.Monad.Trans.Writer.Lazy as Lazy +import Control.Monad.Trans.Writer.Strict as Strict +import Control.Monad.Trans.RWS.Lazy as Lazy +import Control.Monad.Trans.RWS.Strict as Strict +import Control.Monad.Trans.Reader (ReaderT (..)) +import Control.Monad.Trans.Identity (IdentityT (..)) +import Data.Char +import Data.Text (Text, unpack) + +import qualified Text.Parsec as Parsec +import qualified Distribution.Compat.ReadP as ReadP + +import Distribution.Compat.Parsing + +-- | @oneOf cs@ succeeds if the current character is in the supplied +-- list of characters @cs@. Returns the parsed character. See also +-- 'satisfy'. +-- +-- > vowel = oneOf "aeiou" +oneOf :: CharParsing m => [Char] -> m Char +oneOf xs = satisfy (\c -> c `elem` xs) +{-# INLINE oneOf #-} + +-- | As the dual of 'oneOf', @noneOf cs@ succeeds if the current +-- character is /not/ in the supplied list of characters @cs@. Returns the +-- parsed character. +-- +-- > consonant = noneOf "aeiou" +noneOf :: CharParsing m => [Char] -> m Char +noneOf xs = satisfy (\c -> c `notElem` xs) +{-# INLINE noneOf #-} + +-- | Skips /zero/ or more white space characters. See also 'skipMany'. +spaces :: CharParsing m => m () +spaces = skipMany space "white space" +{-# INLINE spaces #-} + +-- | Parses a white space character (any character which satisfies 'isSpace') +-- Returns the parsed character. +space :: CharParsing m => m Char +space = satisfy isSpace "space" +{-# INLINE space #-} + +-- | Parses a newline character (\'\\n\'). Returns a newline character. +newline :: CharParsing m => m Char +newline = char '\n' "new-line" +{-# INLINE newline #-} + +-- | Parses a tab character (\'\\t\'). Returns a tab character. +tab :: CharParsing m => m Char +tab = char '\t' "tab" +{-# INLINE tab #-} + +-- | Parses an upper case letter. Returns the parsed character. +upper :: CharParsing m => m Char +upper = satisfy isUpper "uppercase letter" +{-# INLINE upper #-} + +-- | Parses a lower case character. Returns the parsed character. +lower :: CharParsing m => m Char +lower = satisfy isLower "lowercase letter" +{-# INLINE lower #-} + +-- | Parses a letter or digit. Returns the parsed character. +alphaNum :: CharParsing m => m Char +alphaNum = satisfy isAlphaNum "letter or digit" +{-# INLINE alphaNum #-} + +-- | Parses a letter (an upper case or lower case character). Returns the +-- parsed character. +letter :: CharParsing m => m Char +letter = satisfy isAlpha "letter" +{-# INLINE letter #-} + +-- | Parses a digit. Returns the parsed character. +digit :: CharParsing m => m Char +digit = satisfy isDigit "digit" +{-# INLINE digit #-} + +-- | Parses a hexadecimal digit (a digit or a letter between \'a\' and +-- \'f\' or \'A\' and \'F\'). Returns the parsed character. +hexDigit :: CharParsing m => m Char +hexDigit = satisfy isHexDigit "hexadecimal digit" +{-# INLINE hexDigit #-} + +-- | Parses an octal digit (a character between \'0\' and \'7\'). Returns +-- the parsed character. +octDigit :: CharParsing m => m Char +octDigit = satisfy isOctDigit "octal digit" +{-# INLINE octDigit #-} + +satisfyRange :: CharParsing m => Char -> Char -> m Char +satisfyRange a z = satisfy (\c -> c >= a && c <= z) +{-# INLINE satisfyRange #-} + +-- | Additional functionality needed to parse character streams. +class Parsing m => CharParsing m where + -- | Parse a single character of the input, with UTF-8 decoding + satisfy :: (Char -> Bool) -> m Char + + -- | @char c@ parses a single character @c@. Returns the parsed + -- character (i.e. @c@). + -- + -- /e.g./ + -- + -- @semiColon = 'char' ';'@ + char :: Char -> m Char + char c = satisfy (c ==) show [c] + {-# INLINE char #-} + + -- | @notChar c@ parses any single character other than @c@. Returns the parsed + -- character. + notChar :: Char -> m Char + notChar c = satisfy (c /=) + {-# INLINE notChar #-} + + -- | This parser succeeds for any character. Returns the parsed character. + anyChar :: m Char + anyChar = satisfy (const True) + {-# INLINE anyChar #-} + + -- | @string s@ parses a sequence of characters given by @s@. Returns + -- the parsed string (i.e. @s@). + -- + -- > divOrMod = string "div" + -- > <|> string "mod" + string :: String -> m String + string s = s <$ try (traverse_ char s) show s + {-# INLINE string #-} + + -- | @text t@ parses a sequence of characters determined by the text @t@ Returns + -- the parsed text fragment (i.e. @t@). + -- + -- Using @OverloadedStrings@: + -- + -- > divOrMod = text "div" + -- > <|> text "mod" + text :: Text -> m Text + text t = t <$ string (unpack t) + {-# INLINE text #-} + +instance (CharParsing m, MonadPlus m) => CharParsing (Lazy.StateT s m) where + satisfy = lift . satisfy + {-# INLINE satisfy #-} + char = lift . char + {-# INLINE char #-} + notChar = lift . notChar + {-# INLINE notChar #-} + anyChar = lift anyChar + {-# INLINE anyChar #-} + string = lift . string + {-# INLINE string #-} + text = lift . text + {-# INLINE text #-} + +instance (CharParsing m, MonadPlus m) => CharParsing (Strict.StateT s m) where + satisfy = lift . satisfy + {-# INLINE satisfy #-} + char = lift . char + {-# INLINE char #-} + notChar = lift . notChar + {-# INLINE notChar #-} + anyChar = lift anyChar + {-# INLINE anyChar #-} + string = lift . string + {-# INLINE string #-} + text = lift . text + {-# INLINE text #-} + +instance (CharParsing m, MonadPlus m) => CharParsing (ReaderT e m) where + satisfy = lift . satisfy + {-# INLINE satisfy #-} + char = lift . char + {-# INLINE char #-} + notChar = lift . notChar + {-# INLINE notChar #-} + anyChar = lift anyChar + {-# INLINE anyChar #-} + string = lift . string + {-# INLINE string #-} + text = lift . text + {-# INLINE text #-} + +instance (CharParsing m, MonadPlus m, Monoid w) => CharParsing (Strict.WriterT w m) where + satisfy = lift . satisfy + {-# INLINE satisfy #-} + char = lift . char + {-# INLINE char #-} + notChar = lift . notChar + {-# INLINE notChar #-} + anyChar = lift anyChar + {-# INLINE anyChar #-} + string = lift . string + {-# INLINE string #-} + text = lift . text + {-# INLINE text #-} + +instance (CharParsing m, MonadPlus m, Monoid w) => CharParsing (Lazy.WriterT w m) where + satisfy = lift . satisfy + {-# INLINE satisfy #-} + char = lift . char + {-# INLINE char #-} + notChar = lift . notChar + {-# INLINE notChar #-} + anyChar = lift anyChar + {-# INLINE anyChar #-} + string = lift . string + {-# INLINE string #-} + text = lift . text + {-# INLINE text #-} + +instance (CharParsing m, MonadPlus m, Monoid w) => CharParsing (Lazy.RWST r w s m) where + satisfy = lift . satisfy + {-# INLINE satisfy #-} + char = lift . char + {-# INLINE char #-} + notChar = lift . notChar + {-# INLINE notChar #-} + anyChar = lift anyChar + {-# INLINE anyChar #-} + string = lift . string + {-# INLINE string #-} + text = lift . text + {-# INLINE text #-} + +instance (CharParsing m, MonadPlus m, Monoid w) => CharParsing (Strict.RWST r w s m) where + satisfy = lift . satisfy + {-# INLINE satisfy #-} + char = lift . char + {-# INLINE char #-} + notChar = lift . notChar + {-# INLINE notChar #-} + anyChar = lift anyChar + {-# INLINE anyChar #-} + string = lift . string + {-# INLINE string #-} + text = lift . text + {-# INLINE text #-} + +instance (CharParsing m, MonadPlus m) => CharParsing (IdentityT m) where + satisfy = lift . satisfy + {-# INLINE satisfy #-} + char = lift . char + {-# INLINE char #-} + notChar = lift . notChar + {-# INLINE notChar #-} + anyChar = lift anyChar + {-# INLINE anyChar #-} + string = lift . string + {-# INLINE string #-} + text = lift . text + {-# INLINE text #-} + +instance Parsec.Stream s m Char => CharParsing (Parsec.ParsecT s u m) where + satisfy = Parsec.satisfy + char = Parsec.char + notChar c = Parsec.satisfy (/= c) + anyChar = Parsec.anyChar + string = Parsec.string + +instance t ~ Char => CharParsing (ReadP.Parser r t) where + satisfy = ReadP.satisfy + char = ReadP.char + notChar c = ReadP.satisfy (/= c) + anyChar = ReadP.get + string = ReadP.string + +------------------------------------------------------------------------------- +-- Our additions +------------------------------------------------------------------------------- + +integral :: (CharParsing m, Integral a) => m a +integral = toNumber <$> some d "integral" + where + toNumber = foldl' (\a b -> a * 10 + b) 0 + d = f <$> satisfyRange '0' '9' + f '0' = 0 + f '1' = 1 + f '2' = 2 + f '3' = 3 + f '4' = 4 + f '5' = 5 + f '6' = 6 + f '7' = 7 + f '8' = 8 + f '9' = 9 + f _ = error "panic! integral" +{-# INLINE integral #-} + +-- | Greedily munch characters while predicate holds. +-- Require at least one character. +munch1 :: CharParsing m => (Char -> Bool) -> m String +munch1 = some . satisfy +{-# INLINE munch1 #-} + +-- | Greedely munch characters while predicate holds. +-- Always succeeds. +munch :: CharParsing m => (Char -> Bool) -> m String +munch = many . satisfy +{-# INLINE munch #-} + +skipSpaces1 :: CharParsing m => m () +skipSpaces1 = skipSome space +{-# INLINE skipSpaces1 #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/CopyFile.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/CopyFile.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/CopyFile.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/CopyFile.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,153 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_HADDOCK hide #-} +module Distribution.Compat.CopyFile ( + copyFile, + copyFileChanged, + filesEqual, + copyOrdinaryFile, + copyExecutableFile, + setFileOrdinary, + setFileExecutable, + setDirOrdinary, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Compat.Exception + +#ifndef mingw32_HOST_OS +import Distribution.Compat.Internal.TempFile + +import Control.Exception + ( bracketOnError, throwIO ) +import qualified Data.ByteString.Lazy as BSL +import System.IO.Error + ( ioeSetLocation ) +import System.Directory + ( doesFileExist, renameFile, removeFile ) +import System.FilePath + ( takeDirectory ) +import System.IO + ( IOMode(ReadMode), hClose, hGetBuf, hPutBuf, hFileSize + , withBinaryFile ) +import Foreign + ( allocaBytes ) + +import System.Posix.Types + ( FileMode ) +import System.Posix.Internals + ( c_chmod, withFilePath ) +import Foreign.C + ( throwErrnoPathIfMinus1_ ) + +#else /* else mingw32_HOST_OS */ + +import Control.Exception + ( throwIO ) +import qualified Data.ByteString.Lazy as BSL +import System.IO.Error + ( ioeSetLocation ) +import System.Directory + ( doesFileExist ) +import System.FilePath + ( isRelative, normalise ) +import System.IO + ( IOMode(ReadMode), hFileSize + , withBinaryFile ) + +import qualified System.Win32.File as Win32 ( copyFile ) +#endif /* mingw32_HOST_OS */ + +copyOrdinaryFile, copyExecutableFile :: FilePath -> FilePath -> NoCallStackIO () +copyOrdinaryFile src dest = copyFile src dest >> setFileOrdinary dest +copyExecutableFile src dest = copyFile src dest >> setFileExecutable dest + +setFileOrdinary, setFileExecutable, setDirOrdinary :: FilePath -> NoCallStackIO () +#ifndef mingw32_HOST_OS +setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- +setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x + +setFileMode :: FilePath -> FileMode -> NoCallStackIO () +setFileMode name m = + withFilePath name $ \s -> do + throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) +#else +setFileOrdinary _ = return () +setFileExecutable _ = return () +#endif +-- This happens to be true on Unix and currently on Windows too: +setDirOrdinary = setFileExecutable + +-- | Copies a file to a new destination. +-- Often you should use `copyFileChanged` instead. +copyFile :: FilePath -> FilePath -> NoCallStackIO () +copyFile fromFPath toFPath = + copy + `catchIO` (\ioe -> throwIO (ioeSetLocation ioe "copyFile")) + where +#ifndef mingw32_HOST_OS + copy = withBinaryFile fromFPath ReadMode $ \hFrom -> + bracketOnError openTmp cleanTmp $ \(tmpFPath, hTmp) -> + do allocaBytes bufferSize $ copyContents hFrom hTmp + hClose hTmp + renameFile tmpFPath toFPath + openTmp = openBinaryTempFile (takeDirectory toFPath) ".copyFile.tmp" + cleanTmp (tmpFPath, hTmp) = do + hClose hTmp `catchIO` \_ -> return () + removeFile tmpFPath `catchIO` \_ -> return () + bufferSize = 4096 + + copyContents hFrom hTo buffer = do + count <- hGetBuf hFrom buffer bufferSize + when (count > 0) $ do + hPutBuf hTo buffer count + copyContents hFrom hTo buffer +#else + copy = Win32.copyFile (toExtendedLengthPath fromFPath) + (toExtendedLengthPath toFPath) + False + +-- NOTE: Shamelessly lifted from System.Directory.Internal.Windows + +-- | Add the @"\\\\?\\"@ prefix if necessary or possible. The path remains +-- unchanged if the prefix is not added. This function can sometimes be used +-- to bypass the @MAX_PATH@ length restriction in Windows API calls. +toExtendedLengthPath :: FilePath -> FilePath +toExtendedLengthPath path + | isRelative path = path + | otherwise = + case normalise path of + '\\' : '?' : '?' : '\\' : _ -> path + '\\' : '\\' : '?' : '\\' : _ -> path + '\\' : '\\' : '.' : '\\' : _ -> path + '\\' : subpath@('\\' : _) -> "\\\\?\\UNC" <> subpath + normalisedPath -> "\\\\?\\" <> normalisedPath +#endif /* mingw32_HOST_OS */ + +-- | Like `copyFile`, but does not touch the target if source and destination +-- are already byte-identical. This is recommended as it is useful for +-- time-stamp based recompilation avoidance. +copyFileChanged :: FilePath -> FilePath -> NoCallStackIO () +copyFileChanged src dest = do + equal <- filesEqual src dest + unless equal $ copyFile src dest + +-- | Checks if two files are byte-identical. +-- Returns False if either of the files do not exist or if files +-- are of different size. +filesEqual :: FilePath -> FilePath -> NoCallStackIO Bool +filesEqual f1 f2 = do + ex1 <- doesFileExist f1 + ex2 <- doesFileExist f2 + if not (ex1 && ex2) then return False else + withBinaryFile f1 ReadMode $ \h1 -> + withBinaryFile f2 ReadMode $ \h2 -> do + s1 <- hFileSize h1 + s2 <- hFileSize h2 + if s1 /= s2 + then return False + else do + c1 <- BSL.hGetContents h1 + c2 <- BSL.hGetContents h2 + return $! c1 == c2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/CreatePipe.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/CreatePipe.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/CreatePipe.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/CreatePipe.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,77 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +module Distribution.Compat.CreatePipe (createPipe) where + +import System.IO (Handle, hSetEncoding, localeEncoding) + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Compat.Stack + +-- The mingw32_HOST_OS CPP macro is GHC-specific +#ifdef mingw32_HOST_OS +import qualified Prelude +import Control.Exception (onException) +import Foreign.C.Error (throwErrnoIfMinus1_) +import Foreign.C.Types (CInt(..), CUInt(..)) +import Foreign.Ptr (Ptr) +import Foreign.Marshal.Array (allocaArray) +import Foreign.Storable (peek, peekElemOff) +import GHC.IO.FD (mkFD) +import GHC.IO.Device (IODeviceType(Stream)) +import GHC.IO.Handle.FD (mkHandleFromFD) +import System.IO (IOMode(ReadMode, WriteMode)) +#elif defined ghcjs_HOST_OS +#else +import System.Posix.IO (fdToHandle) +import qualified System.Posix.IO as Posix +#endif + +createPipe :: IO (Handle, Handle) +-- The mingw32_HOST_OS CPP macro is GHC-specific +#ifdef mingw32_HOST_OS +createPipe = do + (readfd, writefd) <- allocaArray 2 $ \ pfds -> do + throwErrnoIfMinus1_ "_pipe" $ c__pipe pfds 2 ({- _O_BINARY -} 32768) + readfd <- peek pfds + writefd <- peekElemOff pfds 1 + return (readfd, writefd) + (do readh <- fdToHandle readfd ReadMode + writeh <- fdToHandle writefd WriteMode + hSetEncoding readh localeEncoding + hSetEncoding writeh localeEncoding + return (readh, writeh)) `onException` (close readfd >> close writefd) + where + fdToHandle :: CInt -> IOMode -> NoCallStackIO Handle + fdToHandle fd mode = do + (fd', deviceType) <- mkFD fd mode (Just (Stream, 0, 0)) False False + mkHandleFromFD fd' deviceType "" mode False Nothing + + close :: CInt -> IO () + close = throwErrnoIfMinus1_ "_close" . c__close + where _ = callStack -- TODO: attach call stack to exception + + _ = callStack -- TODO: attach call stack to exceptions + +foreign import ccall "io.h _pipe" c__pipe :: + Ptr CInt -> CUInt -> CInt -> Prelude.IO CInt + +foreign import ccall "io.h _close" c__close :: + CInt -> Prelude.IO CInt +#elif defined ghcjs_HOST_OS +createPipe = error "createPipe" + where + _ = callStack +#else +createPipe = do + (readfd, writefd) <- Posix.createPipe + readh <- fdToHandle readfd + writeh <- fdToHandle writefd + hSetEncoding readh localeEncoding + hSetEncoding writeh localeEncoding + return (readh, writeh) + where + _ = callStack +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Directory.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Directory.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Directory.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Directory.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,44 @@ +{-# LANGUAGE CPP #-} + +module Distribution.Compat.Directory +( listDirectory +, makeAbsolute +, doesPathExist +) where + +#if MIN_VERSION_directory(1,2,7) +import System.Directory as Dir hiding (doesPathExist) +import System.Directory (doesPathExist) +#else +import System.Directory as Dir +#endif +#if !MIN_VERSION_directory(1,2,2) +import System.FilePath as Path +#endif + +#if !MIN_VERSION_directory(1,2,5) + +listDirectory :: FilePath -> IO [FilePath] +listDirectory path = + filter f `fmap` Dir.getDirectoryContents path + where f filename = filename /= "." && filename /= ".." + +#endif + +#if !MIN_VERSION_directory(1,2,2) + +makeAbsolute :: FilePath -> IO FilePath +makeAbsolute p | Path.isAbsolute p = return p + | otherwise = do + cwd <- Dir.getCurrentDirectory + return $ cwd p + +#endif + +#if !MIN_VERSION_directory(1,2,7) + +doesPathExist :: FilePath -> IO Bool +doesPathExist path = (||) <$> doesDirectoryExist path <*> doesFileExist path + +#endif + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/DList.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/DList.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/DList.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/DList.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,48 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.DList +-- Copyright : (c) Ben Gamari 2015-2019 +-- License : BSD3 +-- +-- Maintainer : cabal-dev@haskell.org +-- Stability : experimental +-- Portability : portable +-- +-- A very simple difference list. +module Distribution.Compat.DList ( + DList, + runDList, + singleton, + fromList, + toList, + snoc, +) where + +import Prelude () +import Distribution.Compat.Prelude + +-- | Difference list. +newtype DList a = DList ([a] -> [a]) + +runDList :: DList a -> [a] +runDList (DList run) = run [] + +-- | Make 'DList' with containing single element. +singleton :: a -> DList a +singleton a = DList (a:) + +fromList :: [a] -> DList a +fromList as = DList (as ++) + +toList :: DList a -> [a] +toList = runDList + +snoc :: DList a -> a -> DList a +snoc xs x = xs <> singleton x + +instance Monoid (DList a) where + mempty = DList id + mappend = (<>) + +instance Semigroup (DList a) where + DList a <> DList b = DList (a . b) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Environment.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Environment.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Environment.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Environment.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,134 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# OPTIONS_HADDOCK hide #-} + +module Distribution.Compat.Environment + ( getEnvironment, lookupEnv, setEnv, unsetEnv ) + where + +import Prelude () +import qualified Prelude +import Distribution.Compat.Prelude + +#ifndef mingw32_HOST_OS +#if __GLASGOW_HASKELL__ < 708 +import Foreign.C.Error (throwErrnoIf_) +#endif +#endif + +import qualified System.Environment as System +import System.Environment (lookupEnv) +#if __GLASGOW_HASKELL__ >= 708 +import System.Environment (unsetEnv) +#endif + +import Distribution.Compat.Stack + +#ifdef mingw32_HOST_OS +import Foreign.C +#if __GLASGOW_HASKELL__ < 708 +import Foreign.Ptr (nullPtr) +#endif +import GHC.Windows +#else +import Foreign.C.Types +import Foreign.C.String +import Foreign.C.Error (throwErrnoIfMinus1_) +import System.Posix.Internals ( withFilePath ) +#endif /* mingw32_HOST_OS */ + +getEnvironment :: NoCallStackIO [(String, String)] +#ifdef mingw32_HOST_OS +-- On Windows, the names of environment variables are case-insensitive, but are +-- often given in mixed-case (e.g. "PATH" is "Path"), so we have to normalise +-- them. +getEnvironment = fmap upcaseVars System.getEnvironment + where + upcaseVars = map upcaseVar + upcaseVar (var, val) = (map toUpper var, val) +#else +getEnvironment = System.getEnvironment +#endif + +-- | @setEnv name value@ sets the specified environment variable to @value@. +-- +-- Throws `Control.Exception.IOException` if either @name@ or @value@ is the +-- empty string or contains an equals sign. +setEnv :: String -> String -> IO () +setEnv key value_ = setEnv_ key value + where + -- NOTE: Anything that follows NUL is ignored on both POSIX and Windows. We + -- still strip it manually so that the null check above succeeds if a value + -- starts with NUL. + value = takeWhile (/= '\NUL') value_ + +setEnv_ :: String -> String -> IO () + +#ifdef mingw32_HOST_OS + +setEnv_ key value = withCWString key $ \k -> withCWString value $ \v -> do + success <- c_SetEnvironmentVariable k v + unless success (throwGetLastError "setEnv") + where + _ = callStack -- TODO: attach CallStack to exception + +# if defined(i386_HOST_ARCH) +# define WINDOWS_CCONV stdcall +# elif defined(x86_64_HOST_ARCH) +# define WINDOWS_CCONV ccall +# else +# error Unknown mingw32 arch +# endif /* i386_HOST_ARCH */ + +foreign import WINDOWS_CCONV unsafe "windows.h SetEnvironmentVariableW" + c_SetEnvironmentVariable :: LPTSTR -> LPTSTR -> Prelude.IO Bool +#else +setEnv_ key value = do + withFilePath key $ \ keyP -> + withFilePath value $ \ valueP -> + throwErrnoIfMinus1_ "setenv" $ + c_setenv keyP valueP (fromIntegral (fromEnum True)) + where + _ = callStack -- TODO: attach CallStack to exception + +foreign import ccall unsafe "setenv" + c_setenv :: CString -> CString -> CInt -> Prelude.IO CInt +#endif /* mingw32_HOST_OS */ + +#if __GLASGOW_HASKELL__ < 708 + +-- | @unsetEnv name@ removes the specified environment variable from the +-- environment of the current process. +-- +-- Throws `Control.Exception.IOException` if @name@ is the empty string or +-- contains an equals sign. +-- +-- @since 4.7.0.0 +unsetEnv :: String -> IO () +#ifdef mingw32_HOST_OS +unsetEnv key = withCWString key $ \k -> do + success <- c_SetEnvironmentVariable k nullPtr + unless success $ do + -- We consider unsetting an environment variable that does not exist not as + -- an error, hence we ignore eRROR_ENVVAR_NOT_FOUND. + err <- c_GetLastError + unless (err == eRROR_ENVVAR_NOT_FOUND) $ do + throwGetLastError "unsetEnv" + +eRROR_ENVVAR_NOT_FOUND :: DWORD +eRROR_ENVVAR_NOT_FOUND = 203 + +foreign import WINDOWS_CCONV unsafe "windows.h GetLastError" + c_GetLastError:: IO DWORD +#else +unsetEnv key = withFilePath key (throwErrnoIf_ (/= 0) "unsetEnv" . c_unsetenv) +#if __GLASGOW_HASKELL__ > 706 +foreign import ccall unsafe "__hsbase_unsetenv" c_unsetenv :: CString -> Prelude.IO CInt +#else +-- HACK: We hope very hard that !UNSETENV_RETURNS_VOID +foreign import ccall unsafe "unsetenv" c_unsetenv :: CString -> Prelude.IO CInt +#endif +#endif + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Exception.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Exception.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Exception.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Exception.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,27 @@ +{-# LANGUAGE CPP #-} +module Distribution.Compat.Exception ( + catchIO, + catchExit, + tryIO, + displayException, + ) where + +import System.Exit +import qualified Control.Exception as Exception +#if __GLASGOW_HASKELL__ >= 710 +import Control.Exception (displayException) +#endif + +tryIO :: IO a -> IO (Either Exception.IOException a) +tryIO = Exception.try + +catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a +catchIO = Exception.catch + +catchExit :: IO a -> (ExitCode -> IO a) -> IO a +catchExit = Exception.catch + +#if __GLASGOW_HASKELL__ < 710 +displayException :: Exception.Exception e => e -> String +displayException = show +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/GetShortPathName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/GetShortPathName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/GetShortPathName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/GetShortPathName.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,59 @@ +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.GetShortPathName +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : Windows-only +-- +-- Win32 API 'GetShortPathName' function. + +module Distribution.Compat.GetShortPathName ( getShortPathName ) + where + +import Prelude () +import Distribution.Compat.Prelude + +#ifdef mingw32_HOST_OS + +import qualified Prelude +import qualified System.Win32 as Win32 +import System.Win32 (LPCTSTR, LPTSTR, DWORD) +import Foreign.Marshal.Array (allocaArray) + +#ifdef x86_64_HOST_ARCH +#define WINAPI ccall +#else +#define WINAPI stdcall +#endif + +foreign import WINAPI unsafe "windows.h GetShortPathNameW" + c_GetShortPathName :: LPCTSTR -> LPTSTR -> DWORD -> Prelude.IO DWORD + +-- | On Windows, retrieves the short path form of the specified path. On +-- non-Windows, does nothing. See https://github.com/haskell/cabal/issues/3185. +-- +-- From MS's GetShortPathName docs: +-- +-- Passing NULL for [the second] parameter and zero for cchBuffer +-- will always return the required buffer size for a +-- specified lpszLongPath. +-- +getShortPathName :: FilePath -> NoCallStackIO FilePath +getShortPathName path = + Win32.withTString path $ \c_path -> do + c_len <- Win32.failIfZero "GetShortPathName #1 failed!" $ + c_GetShortPathName c_path Win32.nullPtr 0 + let arr_len = fromIntegral c_len + allocaArray arr_len $ \c_out -> do + void $ Win32.failIfZero "GetShortPathName #2 failed!" $ + c_GetShortPathName c_path c_out c_len + Win32.peekTString c_out + +#else + +getShortPathName :: FilePath -> NoCallStackIO FilePath +getShortPathName path = return path + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Graph.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Graph.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Graph.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Graph.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,404 @@ +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE BangPatterns #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.Graph +-- Copyright : (c) Edward Z. Yang 2016 +-- License : BSD3 +-- +-- Maintainer : cabal-dev@haskell.org +-- Stability : experimental +-- Portability : portable +-- +-- A data type representing directed graphs, backed by "Data.Graph". +-- It is strict in the node type. +-- +-- This is an alternative interface to "Data.Graph". In this interface, +-- nodes (identified by the 'IsNode' type class) are associated with a +-- key and record the keys of their neighbors. This interface is more +-- convenient than 'Data.Graph.Graph', which requires vertices to be +-- explicitly handled by integer indexes. +-- +-- The current implementation has somewhat peculiar performance +-- characteristics. The asymptotics of all map-like operations mirror +-- their counterparts in "Data.Map". However, to perform a graph +-- operation, we first must build the "Data.Graph" representation, an +-- operation that takes /O(V + E log V)/. However, this operation can +-- be amortized across all queries on that particular graph. +-- +-- Some nodes may be broken, i.e., refer to neighbors which are not +-- stored in the graph. In our graph algorithms, we transparently +-- ignore such edges; however, you can easily query for the broken +-- vertices of a graph using 'broken' (and should, e.g., to ensure that +-- a closure of a graph is well-formed.) It's possible to take a closed +-- subset of a broken graph and get a well-formed graph. +-- +----------------------------------------------------------------------------- + +module Distribution.Compat.Graph ( + -- * Graph type + Graph, + IsNode(..), + -- * Query + null, + size, + member, + lookup, + -- * Construction + empty, + insert, + deleteKey, + deleteLookup, + -- * Combine + unionLeft, + unionRight, + -- * Graph algorithms + stronglyConnComp, + SCC(..), + cycles, + broken, + neighbors, + revNeighbors, + closure, + revClosure, + topSort, + revTopSort, + -- * Conversions + -- ** Maps + toMap, + -- ** Lists + fromDistinctList, + toList, + keys, + -- ** Sets + keysSet, + -- ** Graphs + toGraph, + -- * Node type + Node(..), + nodeValue, +) where + +import Prelude () +import qualified Distribution.Compat.Prelude as Prelude +import Distribution.Compat.Prelude hiding (lookup, null, empty) + +import Data.Graph (SCC(..)) +import qualified Data.Graph as G + +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import qualified Data.Array as Array +import Data.Array ((!)) +import qualified Data.Tree as Tree +import Data.Either (partitionEithers) +import qualified Data.Foldable as Foldable + +-- | A graph of nodes @a@. The nodes are expected to have instance +-- of class 'IsNode'. +data Graph a + = Graph { + graphMap :: !(Map (Key a) a), + -- Lazily cached graph representation + graphForward :: G.Graph, + graphAdjoint :: G.Graph, + graphVertexToNode :: G.Vertex -> a, + graphKeyToVertex :: Key a -> Maybe G.Vertex, + graphBroken :: [(a, [Key a])] + } + deriving (Typeable) + +-- NB: Not a Functor! (or Traversable), because you need +-- to restrict Key a ~ Key b. We provide our own mapping +-- functions. + +-- General strategy is most operations are deferred to the +-- Map representation. + +instance Show a => Show (Graph a) where + show = show . toList + +instance (IsNode a, Read a, Show (Key a)) => Read (Graph a) where + readsPrec d s = map (\(a,r) -> (fromDistinctList a, r)) (readsPrec d s) + +instance (IsNode a, Binary a, Show (Key a)) => Binary (Graph a) where + put x = put (toList x) + get = fmap fromDistinctList get + +instance (Eq (Key a), Eq a) => Eq (Graph a) where + g1 == g2 = graphMap g1 == graphMap g2 + +instance Foldable.Foldable Graph where + fold = Foldable.fold . graphMap + foldr f z = Foldable.foldr f z . graphMap + foldl f z = Foldable.foldl f z . graphMap + foldMap f = Foldable.foldMap f . graphMap + foldl' f z = Foldable.foldl' f z . graphMap + foldr' f z = Foldable.foldr' f z . graphMap +#ifdef MIN_VERSION_base +#if MIN_VERSION_base(4,8,0) + length = Foldable.length . graphMap + null = Foldable.null . graphMap + toList = Foldable.toList . graphMap + elem x = Foldable.elem x . graphMap + maximum = Foldable.maximum . graphMap + minimum = Foldable.minimum . graphMap + sum = Foldable.sum . graphMap + product = Foldable.product . graphMap +#endif +#endif + +instance (NFData a, NFData (Key a)) => NFData (Graph a) where + rnf Graph { + graphMap = m, + graphForward = gf, + graphAdjoint = ga, + graphVertexToNode = vtn, + graphKeyToVertex = ktv, + graphBroken = b + } = gf `seq` ga `seq` vtn `seq` ktv `seq` b `seq` rnf m + +-- TODO: Data instance? + +-- | The 'IsNode' class is used for datatypes which represent directed +-- graph nodes. A node of type @a@ is associated with some unique key of +-- type @'Key' a@; given a node we can determine its key ('nodeKey') +-- and the keys of its neighbors ('nodeNeighbors'). +class Ord (Key a) => IsNode a where + type Key a + nodeKey :: a -> Key a + nodeNeighbors :: a -> [Key a] + +instance (IsNode a, IsNode b, Key a ~ Key b) => IsNode (Either a b) where + type Key (Either a b) = Key a + nodeKey (Left x) = nodeKey x + nodeKey (Right x) = nodeKey x + nodeNeighbors (Left x) = nodeNeighbors x + nodeNeighbors (Right x) = nodeNeighbors x + +-- | A simple, trivial data type which admits an 'IsNode' instance. +data Node k a = N a k [k] + deriving (Show, Eq) + +-- | Get the value from a 'Node'. +nodeValue :: Node k a -> a +nodeValue (N a _ _) = a + +instance Functor (Node k) where + fmap f (N a k ks) = N (f a) k ks + +instance Ord k => IsNode (Node k a) where + type Key (Node k a) = k + nodeKey (N _ k _) = k + nodeNeighbors (N _ _ ks) = ks + +-- TODO: Maybe introduce a typeclass for items which just +-- keys (so, Key associated type, and nodeKey method). But +-- I didn't need it here, so I didn't introduce it. + +-- Query + +-- | /O(1)/. Is the graph empty? +null :: Graph a -> Bool +null = Map.null . toMap + +-- | /O(1)/. The number of nodes in the graph. +size :: Graph a -> Int +size = Map.size . toMap + +-- | /O(log V)/. Check if the key is in the graph. +member :: IsNode a => Key a -> Graph a -> Bool +member k g = Map.member k (toMap g) + +-- | /O(log V)/. Lookup the node at a key in the graph. +lookup :: IsNode a => Key a -> Graph a -> Maybe a +lookup k g = Map.lookup k (toMap g) + +-- Construction + +-- | /O(1)/. The empty graph. +empty :: IsNode a => Graph a +empty = fromMap Map.empty + +-- | /O(log V)/. Insert a node into a graph. +insert :: IsNode a => a -> Graph a -> Graph a +insert !n g = fromMap (Map.insert (nodeKey n) n (toMap g)) + +-- | /O(log V)/. Delete the node at a key from the graph. +deleteKey :: IsNode a => Key a -> Graph a -> Graph a +deleteKey k g = fromMap (Map.delete k (toMap g)) + +-- | /O(log V)/. Lookup and delete. This function returns the deleted +-- value if it existed. +deleteLookup :: IsNode a => Key a -> Graph a -> (Maybe a, Graph a) +deleteLookup k g = + let (r, m') = Map.updateLookupWithKey (\_ _ -> Nothing) k (toMap g) + in (r, fromMap m') + +-- Combining + +-- | /O(V + V')/. Right-biased union, preferring entries +-- from the second map when conflicts occur. +-- @'nodeKey' x = 'nodeKey' (f x)@. +unionRight :: IsNode a => Graph a -> Graph a -> Graph a +unionRight g g' = fromMap (Map.union (toMap g') (toMap g)) + +-- | /O(V + V')/. Left-biased union, preferring entries from +-- the first map when conflicts occur. +unionLeft :: IsNode a => Graph a -> Graph a -> Graph a +unionLeft = flip unionRight + +-- Graph-like operations + +-- | /Ω(V + E)/. Compute the strongly connected components of a graph. +-- Requires amortized construction of graph. +stronglyConnComp :: Graph a -> [SCC a] +stronglyConnComp g = map decode forest + where + forest = G.scc (graphForward g) + decode (Tree.Node v []) + | mentions_itself v = CyclicSCC [graphVertexToNode g v] + | otherwise = AcyclicSCC (graphVertexToNode g v) + decode other = CyclicSCC (dec other []) + where dec (Tree.Node v ts) vs + = graphVertexToNode g v : foldr dec vs ts + mentions_itself v = v `elem` (graphForward g ! v) +-- Implementation copied from 'stronglyConnCompR' in 'Data.Graph'. + +-- | /Ω(V + E)/. Compute the cycles of a graph. +-- Requires amortized construction of graph. +cycles :: Graph a -> [[a]] +cycles g = [ vs | CyclicSCC vs <- stronglyConnComp g ] + +-- | /O(1)/. Return a list of nodes paired with their broken +-- neighbors (i.e., neighbor keys which are not in the graph). +-- Requires amortized construction of graph. +broken :: Graph a -> [(a, [Key a])] +broken g = graphBroken g + +-- | Lookup the immediate neighbors from a key in the graph. +-- Requires amortized construction of graph. +neighbors :: Graph a -> Key a -> Maybe [a] +neighbors g k = do + v <- graphKeyToVertex g k + return (map (graphVertexToNode g) (graphForward g ! v)) + +-- | Lookup the immediate reverse neighbors from a key in the graph. +-- Requires amortized construction of graph. +revNeighbors :: Graph a -> Key a -> Maybe [a] +revNeighbors g k = do + v <- graphKeyToVertex g k + return (map (graphVertexToNode g) (graphAdjoint g ! v)) + +-- | Compute the subgraph which is the closure of some set of keys. +-- Returns @Nothing@ if one (or more) keys are not present in +-- the graph. +-- Requires amortized construction of graph. +closure :: Graph a -> [Key a] -> Maybe [a] +closure g ks = do + vs <- traverse (graphKeyToVertex g) ks + return (decodeVertexForest g (G.dfs (graphForward g) vs)) + +-- | Compute the reverse closure of a graph from some set +-- of keys. Returns @Nothing@ if one (or more) keys are not present in +-- the graph. +-- Requires amortized construction of graph. +revClosure :: Graph a -> [Key a] -> Maybe [a] +revClosure g ks = do + vs <- traverse (graphKeyToVertex g) ks + return (decodeVertexForest g (G.dfs (graphAdjoint g) vs)) + +flattenForest :: Tree.Forest a -> [a] +flattenForest = concatMap Tree.flatten + +decodeVertexForest :: Graph a -> Tree.Forest G.Vertex -> [a] +decodeVertexForest g = map (graphVertexToNode g) . flattenForest + +-- | Topologically sort the nodes of a graph. +-- Requires amortized construction of graph. +topSort :: Graph a -> [a] +topSort g = map (graphVertexToNode g) $ G.topSort (graphForward g) + +-- | Reverse topologically sort the nodes of a graph. +-- Requires amortized construction of graph. +revTopSort :: Graph a -> [a] +revTopSort g = map (graphVertexToNode g) $ G.topSort (graphAdjoint g) + +-- Conversions + +-- | /O(1)/. Convert a map from keys to nodes into a graph. +-- The map must satisfy the invariant that +-- @'fromMap' m == 'fromList' ('Data.Map.elems' m)@; +-- if you can't fulfill this invariant use @'fromList' ('Data.Map.elems' m)@ +-- instead. The values of the map are assumed to already +-- be in WHNF. +fromMap :: IsNode a => Map (Key a) a -> Graph a +fromMap m + = Graph { graphMap = m + -- These are lazily computed! + , graphForward = g + , graphAdjoint = G.transposeG g + , graphVertexToNode = vertex_to_node + , graphKeyToVertex = key_to_vertex + , graphBroken = broke + } + where + try_key_to_vertex k = maybe (Left k) Right (key_to_vertex k) + + (brokenEdges, edges) + = unzip + $ [ partitionEithers (map try_key_to_vertex (nodeNeighbors n)) + | n <- ns ] + broke = filter (not . Prelude.null . snd) (zip ns brokenEdges) + + g = Array.listArray bounds edges + + ns = Map.elems m -- sorted ascending + vertices = zip (map nodeKey ns) [0..] + vertex_map = Map.fromAscList vertices + key_to_vertex k = Map.lookup k vertex_map + + vertex_to_node vertex = nodeTable ! vertex + + nodeTable = Array.listArray bounds ns + bounds = (0, Map.size m - 1) + +-- | /O(V log V)/. Convert a list of nodes (with distinct keys) into a graph. +fromDistinctList :: (IsNode a, Show (Key a)) => [a] -> Graph a +fromDistinctList = fromMap + . Map.fromListWith (\_ -> duplicateError) + . map (\n -> n `seq` (nodeKey n, n)) + where + duplicateError n = error $ "Graph.fromDistinctList: duplicate key: " + ++ show (nodeKey n) + +-- Map-like operations + +-- | /O(V)/. Convert a graph into a list of nodes. +toList :: Graph a -> [a] +toList g = Map.elems (toMap g) + +-- | /O(V)/. Convert a graph into a list of keys. +keys :: Graph a -> [Key a] +keys g = Map.keys (toMap g) + +-- | /O(V)/. Convert a graph into a set of keys. +keysSet :: Graph a -> Set.Set (Key a) +keysSet g = Map.keysSet (toMap g) + +-- | /O(1)/. Convert a graph into a map from keys to nodes. +-- The resulting map @m@ is guaranteed to have the property that +-- @'Prelude.all' (\(k,n) -> k == 'nodeKey' n) ('Data.Map.toList' m)@. +toMap :: Graph a -> Map (Key a) a +toMap = graphMap + +-- Graph-like operations + +-- | /O(1)/. Convert a graph into a 'Data.Graph.Graph'. +-- Requires amortized construction of graph. +toGraph :: Graph a -> (G.Graph, G.Vertex -> a, Key a -> Maybe G.Vertex) +toGraph g = (graphForward g, graphVertexToNode g, graphKeyToVertex g) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Internal/TempFile.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Internal/TempFile.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Internal/TempFile.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Internal/TempFile.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,123 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_HADDOCK hide #-} +module Distribution.Compat.Internal.TempFile ( + openTempFile, + openBinaryTempFile, + openNewBinaryFile, + createTempDirectory, + ) where + +import Distribution.Compat.Exception + +import System.FilePath (()) +import Foreign.C (CInt, eEXIST, getErrno, errnoToIOError) + +import System.IO (Handle, openTempFile, openBinaryTempFile) +import Data.Bits ((.|.)) +import System.Posix.Internals (c_open, c_close, o_CREAT, o_EXCL, o_RDWR, + o_BINARY, o_NONBLOCK, o_NOCTTY, + withFilePath, c_getpid) +import System.IO.Error (isAlreadyExistsError) +import GHC.IO.Handle.FD (fdToHandle) +import Control.Exception (onException) + +#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS) +import System.Directory ( createDirectory ) +#else +import qualified System.Posix +#endif + +-- ------------------------------------------------------------ +-- * temporary files +-- ------------------------------------------------------------ + +-- This is here for Haskell implementations that do not come with +-- System.IO.openTempFile. This includes nhc-1.20, hugs-2006.9. +-- TODO: This file should probably be removed. + +-- This is a copy/paste of the openBinaryTempFile definition, but +-- if uses 666 rather than 600 for the permissions. The base library +-- needs to be changed to make this better. +openNewBinaryFile :: FilePath -> String -> IO (FilePath, Handle) +openNewBinaryFile dir template = do + pid <- c_getpid + findTempName pid + where + -- We split off the last extension, so we can use .foo.ext files + -- for temporary files (hidden on Unix OSes). Unfortunately we're + -- below file path in the hierarchy here. + (prefix,suffix) = + case break (== '.') $ reverse template of + -- First case: template contains no '.'s. Just re-reverse it. + (rev_suffix, "") -> (reverse rev_suffix, "") + -- Second case: template contains at least one '.'. Strip the + -- dot from the prefix and prepend it to the suffix (if we don't + -- do this, the unique number will get added after the '.' and + -- thus be part of the extension, which is wrong.) + (rev_suffix, '.':rest) -> (reverse rest, '.':reverse rev_suffix) + -- Otherwise, something is wrong, because (break (== '.')) should + -- always return a pair with either the empty string or a string + -- beginning with '.' as the second component. + _ -> error "bug in System.IO.openTempFile" + + oflags = rw_flags .|. o_EXCL .|. o_BINARY + + findTempName x = do + fd <- withFilePath filepath $ \ f -> + c_open f oflags 0o666 + if fd < 0 + then do + errno <- getErrno + if errno == eEXIST + then findTempName (x+1) + else ioError (errnoToIOError "openNewBinaryFile" errno Nothing (Just dir)) + else do + -- TODO: We want to tell fdToHandle what the file path is, + -- as any exceptions etc will only be able to report the + -- FD currently + h <- fdToHandle fd `onException` c_close fd + return (filepath, h) + where + filename = prefix ++ show x ++ suffix + filepath = dir `combine` filename + + -- FIXME: bits copied from System.FilePath + combine a b + | null b = a + | null a = b + | last a == pathSeparator = a ++ b + | otherwise = a ++ [pathSeparator] ++ b + +-- FIXME: Should use System.FilePath library +pathSeparator :: Char +#ifdef mingw32_HOST_OS +pathSeparator = '\\' +#else +pathSeparator = '/' +#endif + +-- FIXME: Copied from GHC.Handle +std_flags, output_flags, rw_flags :: CInt +std_flags = o_NONBLOCK .|. o_NOCTTY +output_flags = std_flags .|. o_CREAT +rw_flags = output_flags .|. o_RDWR + +createTempDirectory :: FilePath -> String -> IO FilePath +createTempDirectory dir template = do + pid <- c_getpid + findTempName pid + where + findTempName x = do + let dirpath = dir template ++ "-" ++ show x + r <- tryIO $ mkPrivateDir dirpath + case r of + Right _ -> return dirpath + Left e | isAlreadyExistsError e -> findTempName (x+1) + | otherwise -> ioError e + +mkPrivateDir :: String -> IO () +#if defined(mingw32_HOST_OS) || defined(ghcjs_HOST_OS) +mkPrivateDir s = createDirectory s +#else +mkPrivateDir s = System.Posix.createDirectory s 0o700 +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,264 @@ +{-# LANGUAGE RankNTypes #-} +-- | This module provides very basic lens functionality, without extra dependencies. +-- +-- For the documentation of the combinators see package. +-- This module uses the same vocabulary. +module Distribution.Compat.Lens ( + -- * Types + Lens, + Lens', + Traversal, + Traversal', + -- ** LensLike + LensLike, + LensLike', + -- ** rank-1 types + Getting, + AGetter, + ASetter, + ALens, + ALens', + -- * Getter + view, + use, + getting, + -- * Setter + set, + over, + -- * Fold + toDListOf, + toListOf, + toSetOf, + -- * Lens + cloneLens, + aview, + -- * Common lenses + _1, _2, + -- * Operators + (&), + (^.), + (.~), (?~), (%~), + (.=), (?=), (%=), + (^#), + (#~), (#%~), + -- * Internal Comonads + Pretext (..), + -- * Cabal developer info + -- $development + ) where + +import Prelude() +import Distribution.Compat.Prelude + +import Control.Applicative (Const (..)) +import Data.Functor.Identity (Identity (..)) +import Control.Monad.State.Class (MonadState (..), gets, modify) + +import qualified Distribution.Compat.DList as DList +import qualified Data.Set as Set + +------------------------------------------------------------------------------- +-- Types +------------------------------------------------------------------------------- + +type LensLike f s t a b = (a -> f b) -> s -> f t +type LensLike' f s a = (a -> f a) -> s -> f s + +type Lens s t a b = forall f. Functor f => LensLike f s t a b +type Traversal s t a b = forall f. Applicative f => LensLike f s t a b + +type Lens' s a = Lens s s a a +type Traversal' s a = Traversal s s a a + +type Getting r s a = LensLike (Const r) s s a a + +type AGetter s a = LensLike (Const a) s s a a -- this doens't exist in 'lens' +type ASetter s t a b = LensLike Identity s t a b +type ALens s t a b = LensLike (Pretext a b) s t a b + +type ALens' s a = ALens s s a a + +------------------------------------------------------------------------------- +-- Getter +------------------------------------------------------------------------------- + +view :: Getting a s a -> s -> a +view l s = getConst (l Const s) +{-# INLINE view #-} + +use :: MonadState s m => Getting a s a -> m a +use l = gets (view l) +{-# INLINE use #-} + +-- | @since 2.4 +-- +-- >>> (3 :: Int) ^. getting (+2) . getting show +-- "5" +getting :: (s -> a) -> Getting r s a +getting k f = Const . getConst . f . k +{-# INLINE getting #-} + +------------------------------------------------------------------------------- +-- Setter +------------------------------------------------------------------------------- + +set :: ASetter s t a b -> b -> s -> t +set l x = over l (const x) + +over :: ASetter s t a b -> (a -> b) -> s -> t +over l f s = runIdentity (l (\x -> Identity (f x)) s) + +------------------------------------------------------------------------------- +-- Fold +------------------------------------------------------------------------------- + +toDListOf :: Getting (DList.DList a) s a -> s -> DList.DList a +toDListOf l s = getConst (l (\x -> Const (DList.singleton x)) s) + +toListOf :: Getting (DList.DList a) s a -> s -> [a] +toListOf l = DList.runDList . toDListOf l + +toSetOf :: Getting (Set.Set a) s a -> s -> Set.Set a +toSetOf l s = getConst (l (\x -> Const (Set.singleton x)) s) + +------------------------------------------------------------------------------- +-- Lens +------------------------------------------------------------------------------- + +aview :: ALens s t a b -> s -> a +aview l = pretextPos . l pretextSell +{-# INLINE aview #-} + +{- +lens :: (s -> a) -> (s -> a -> s) -> Lens' s a +lens sa sbt afb s = sbt s <$> afb (sa s) +-} + +------------------------------------------------------------------------------- +-- Common +------------------------------------------------------------------------------- + +_1 :: Lens (a, c) (b, c) a b +_1 f (a, c) = flip (,) c <$> f a + +_2 :: Lens (c, a) (c, b) a b +_2 f (c, a) = (,) c <$> f a + +------------------------------------------------------------------------------- +-- Operators +------------------------------------------------------------------------------- + +-- | '&' is a reverse application operator +(&) :: a -> (a -> b) -> b +(&) = flip ($) +{-# INLINE (&) #-} +infixl 1 & + +infixl 8 ^., ^# +infixr 4 .~, %~, ?~ +infixr 4 #~, #%~ +infixr 4 .=, %=, ?= + +(^.) :: s -> Getting a s a -> a +s ^. l = getConst (l Const s) +{-# INLINE (^.) #-} + +(.~) :: ASetter s t a b -> b -> s -> t +(.~) = set +{-# INLINE (.~) #-} + +(?~) :: ASetter s t a (Maybe b) -> b -> s -> t +l ?~ b = set l (Just b) +{-# INLINE (?~) #-} + +(%~) :: ASetter s t a b -> (a -> b) -> s -> t +(%~) = over +{-# INLINE (%~) #-} + +(.=) :: MonadState s m => ASetter s s a b -> b -> m () +l .= b = modify (l .~ b) +{-# INLINE (.=) #-} + +(?=) :: MonadState s m => ASetter s s a (Maybe b) -> b -> m () +l ?= b = modify (l ?~ b) +{-# INLINE (?=) #-} + +(%=) :: MonadState s m => ASetter s s a b -> (a -> b) -> m () +l %= f = modify (l %~ f) +{-# INLINE (%=) #-} + +(^#) :: s -> ALens s t a b -> a +s ^# l = aview l s + +(#~) :: ALens s t a b -> b -> s -> t +(#~) l b s = pretextPeek b (l pretextSell s) +{-# INLINE (#~) #-} + +(#%~) :: ALens s t a b -> (a -> b) -> s -> t +(#%~) l f s = pretextPeeks f (l pretextSell s) +{-# INLINE (#%~) #-} + +pretextSell :: a -> Pretext a b b +pretextSell a = Pretext (\afb -> afb a) +{-# INLINE pretextSell #-} + +pretextPeeks :: (a -> b) -> Pretext a b t -> t +pretextPeeks f (Pretext m) = runIdentity $ m (\x -> Identity (f x)) +{-# INLINE pretextPeeks #-} + +pretextPeek :: b -> Pretext a b t -> t +pretextPeek b (Pretext m) = runIdentity $ m (\_ -> Identity b) +{-# INLINE pretextPeek #-} + +pretextPos :: Pretext a b t -> a +pretextPos (Pretext m) = getConst (m Const) +{-# INLINE pretextPos #-} + +cloneLens :: Functor f => ALens s t a b -> LensLike f s t a b +cloneLens l f s = runPretext (l pretextSell s) f +{-# INLINE cloneLens #-} + +------------------------------------------------------------------------------- +-- Comonads +------------------------------------------------------------------------------- + +-- | @lens@ variant is also parametrised by profunctor. +data Pretext a b t = Pretext { runPretext :: forall f. Functor f => (a -> f b) -> f t } + +instance Functor (Pretext a b) where + fmap f (Pretext pretext) = Pretext (\afb -> fmap f (pretext afb)) + +------------------------------------------------------------------------------- +-- Documentation +------------------------------------------------------------------------------- + +-- $development +-- +-- We cannot depend on @template-haskell@, because Cabal is a boot library. +-- This fact makes defining optics a manual task. Here is a small recipe to +-- make the process less tedious. +-- +-- First start a repl +-- +-- > cabal new-repl Cabal:hackage-tests +-- +-- Because @--extra-package@ isn't yet implemented, we use a test-suite +-- with @generics-sop@ dependency. +-- +-- In the repl, we load a helper script: +-- +-- > :l ../generics-sop-lens.hs +-- +-- Now we are set up to derive lenses! +-- +-- > :m +Distribution.Types.SourceRepo +-- > putStr $ genericLenses (Proxy :: Proxy SourceRepo) +-- +-- @ +-- repoKind :: Lens' SourceRepo RepoKind +-- repoKind f s = fmap (\\x -> s { T.repoKind = x }) (f (T.repoKind s)) +-- \{-# INLINE repoKind #-\} +-- ... +-- @ +-- +-- /Note:/ You may need to adjust type-aliases, e.g. `String` to `FilePath`. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/MonadFail.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/MonadFail.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/MonadFail.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/MonadFail.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,36 @@ +{-# LANGUAGE CPP #-} + +-- | Compatibility layer for "Control.Monad.Fail" +module Distribution.Compat.MonadFail ( MonadFail(fail) ) where +#if __GLASGOW_HASKELL__ >= 800 +-- provided by base-4.9.0.0 and later +import Control.Monad.Fail (MonadFail(fail)) +#else +-- the following code corresponds to +-- http://hackage.haskell.org/package/fail-4.9.0.0 +import qualified Prelude as P +import Distribution.Compat.Prelude hiding (fail) + +import Text.ParserCombinators.ReadP +import Text.ParserCombinators.ReadPrec + +class Monad m => MonadFail m where + fail :: String -> m a + +-- instances provided by base-4.9 + +instance MonadFail Maybe where + fail _ = Nothing + +instance MonadFail [] where + fail _ = [] + +instance MonadFail P.IO where + fail = P.fail + +instance MonadFail ReadPrec where + fail = P.fail -- = P (\_ -> fail s) + +instance MonadFail ReadP where + fail = P.fail +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Newtype.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Newtype.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Newtype.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Newtype.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,70 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE FunctionalDependencies #-} +-- | Per Conor McBride, the 'Newtype' typeclass represents the packing and +-- unpacking of a newtype, and allows you to operatate under that newtype with +-- functions such as 'ala'. +module Distribution.Compat.Newtype ( + Newtype (..), + ala, + alaf, + pack', + unpack', + ) where + +import Data.Functor.Identity (Identity (..)) +import Data.Monoid (Sum (..), Product (..), Endo (..)) + +-- | The @FunctionalDependencies@ version of 'Newtype' type-class. +-- +-- /Note:/ for actual newtypes the implementation can be +-- @pack = coerce; unpack = coerce@. We don't have default implementation, +-- because @Cabal@ have to support older than @base >= 4.7@ compilers. +-- Also, 'Newtype' could witness a non-structural isomorphism. +class Newtype n o | n -> o where + pack :: o -> n + unpack :: n -> o + +instance Newtype (Identity a) a where + pack = Identity + unpack = runIdentity + +instance Newtype (Sum a) a where + pack = Sum + unpack = getSum + +instance Newtype (Product a) a where + pack = Product + unpack = getProduct + +instance Newtype (Endo a) (a -> a) where + pack = Endo + unpack = appEndo + +-- | +-- +-- >>> ala Sum foldMap [1, 2, 3, 4 :: Int] +-- 10 +-- +-- /Note:/ the user supplied function for the newtype is /ignored/. +-- +-- >>> ala (Sum . (+1)) foldMap [1, 2, 3, 4 :: Int] +-- 10 +ala :: (Newtype n o, Newtype n' o') => (o -> n) -> ((o -> n) -> b -> n') -> (b -> o') +ala pa hof = alaf pa hof id + +-- | +-- +-- >>> alaf Sum foldMap length ["cabal", "install"] +-- 12 +-- +-- /Note:/ as with 'ala', the user supplied function for the newtype is /ignored/. +alaf :: (Newtype n o, Newtype n' o') => (o -> n) -> ((a -> n) -> b -> n') -> (a -> o) -> (b -> o') +alaf _ hof f = unpack . hof (pack . f) + +-- | Variant of 'pack', which takes a phantom type. +pack' :: Newtype n o => (o -> n) -> o -> n +pack' _ = pack + +-- | Variant of 'pack', which takes a phantom type. +unpack' :: Newtype n o => (o -> n) -> n -> o +unpack' _ = unpack diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Parsing.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Parsing.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Parsing.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Parsing.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,403 @@ +{-# LANGUAGE GADTs, UndecidableInstances #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.Parsing +-- Copyright : (c) Edward Kmett 2011-2012 +-- License : BSD3 +-- +-- Maintainer : ekmett@gmail.com +-- Stability : experimental +-- Portability : non-portable +-- +-- Alternative parser combinators. +-- +-- Originally in @parsers@ package. +-- +----------------------------------------------------------------------------- +module Distribution.Compat.Parsing + ( + -- * Parsing Combinators + choice + , option + , optional -- from Control.Applicative, parsec optionMaybe + , skipOptional -- parsec optional + , between + , some -- from Control.Applicative, parsec many1 + , many -- from Control.Applicative + , sepBy + , sepBy1 + -- , sepByNonEmpty + , sepEndBy1 + -- , sepEndByNonEmpty + , sepEndBy + , endBy1 + -- , endByNonEmpty + , endBy + , count + , chainl + , chainr + , chainl1 + , chainr1 + , manyTill + -- * Parsing Class + , Parsing(..) + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Control.Applicative ((<**>), optional) +import Control.Monad.Trans.Class (lift) +import Control.Monad.Trans.State.Lazy as Lazy +import Control.Monad.Trans.State.Strict as Strict +import Control.Monad.Trans.Writer.Lazy as Lazy +import Control.Monad.Trans.Writer.Strict as Strict +import Control.Monad.Trans.RWS.Lazy as Lazy +import Control.Monad.Trans.RWS.Strict as Strict +import Control.Monad.Trans.Reader (ReaderT (..)) +import Control.Monad.Trans.Identity (IdentityT (..)) +import Data.Foldable (asum) + +import qualified Text.Parsec as Parsec +import qualified Distribution.Compat.ReadP as ReadP + +-- | @choice ps@ tries to apply the parsers in the list @ps@ in order, +-- until one of them succeeds. Returns the value of the succeeding +-- parser. +choice :: Alternative m => [m a] -> m a +choice = asum +{-# INLINE choice #-} + +-- | @option x p@ tries to apply parser @p@. If @p@ fails without +-- consuming input, it returns the value @x@, otherwise the value +-- returned by @p@. +-- +-- > priority = option 0 (digitToInt <$> digit) +option :: Alternative m => a -> m a -> m a +option x p = p <|> pure x +{-# INLINE option #-} + +-- | @skipOptional p@ tries to apply parser @p@. It will parse @p@ or nothing. +-- It only fails if @p@ fails after consuming input. It discards the result +-- of @p@. (Plays the role of parsec's optional, which conflicts with Applicative's optional) +skipOptional :: Alternative m => m a -> m () +skipOptional p = (() <$ p) <|> pure () +{-# INLINE skipOptional #-} + +-- | @between open close p@ parses @open@, followed by @p@ and @close@. +-- Returns the value returned by @p@. +-- +-- > braces = between (symbol "{") (symbol "}") +between :: Applicative m => m bra -> m ket -> m a -> m a +between bra ket p = bra *> p <* ket +{-# INLINE between #-} + +-- | @sepBy p sep@ parses /zero/ or more occurrences of @p@, separated +-- by @sep@. Returns a list of values returned by @p@. +-- +-- > commaSep p = p `sepBy` (symbol ",") +sepBy :: Alternative m => m a -> m sep -> m [a] +sepBy p sep = sepBy1 p sep <|> pure [] +{-# INLINE sepBy #-} + +-- | @sepBy1 p sep@ parses /one/ or more occurrences of @p@, separated +-- by @sep@. Returns a list of values returned by @p@. +sepBy1 :: Alternative m => m a -> m sep -> m [a] +sepBy1 p sep = (:) <$> p <*> many (sep *> p) +-- toList <$> sepByNonEmpty p sep +{-# INLINE sepBy1 #-} + +{- +-- | @sepByNonEmpty p sep@ parses /one/ or more occurrences of @p@, separated +-- by @sep@. Returns a non-empty list of values returned by @p@. +sepByNonEmpty :: Alternative m => m a -> m sep -> m (NonEmpty a) +sepByNonEmpty p sep = (:|) <$> p <*> many (sep *> p) +{-# INLINE sepByNonEmpty #-} +-} + +-- | @sepEndBy1 p sep@ parses /one/ or more occurrences of @p@, +-- separated and optionally ended by @sep@. Returns a list of values +-- returned by @p@. +sepEndBy1 :: Alternative m => m a -> m sep -> m [a] +sepEndBy1 p sep = (:) <$> p <*> ((sep *> sepEndBy p sep) <|> pure []) +-- toList <$> sepEndByNonEmpty p sep + +{- +-- | @sepEndByNonEmpty p sep@ parses /one/ or more occurrences of @p@, +-- separated and optionally ended by @sep@. Returns a non-empty list of values +-- returned by @p@. +sepEndByNonEmpty :: Alternative m => m a -> m sep -> m (NonEmpty a) +sepEndByNonEmpty p sep = (:|) <$> p <*> ((sep *> sepEndBy p sep) <|> pure []) +-} + +-- | @sepEndBy p sep@ parses /zero/ or more occurrences of @p@, +-- separated and optionally ended by @sep@, ie. haskell style +-- statements. Returns a list of values returned by @p@. +-- +-- > haskellStatements = haskellStatement `sepEndBy` semi +sepEndBy :: Alternative m => m a -> m sep -> m [a] +sepEndBy p sep = sepEndBy1 p sep <|> pure [] +{-# INLINE sepEndBy #-} + +-- | @endBy1 p sep@ parses /one/ or more occurrences of @p@, separated +-- and ended by @sep@. Returns a list of values returned by @p@. +endBy1 :: Alternative m => m a -> m sep -> m [a] +endBy1 p sep = some (p <* sep) +{-# INLINE endBy1 #-} + +{- +-- | @endByNonEmpty p sep@ parses /one/ or more occurrences of @p@, separated +-- and ended by @sep@. Returns a non-empty list of values returned by @p@. +endByNonEmpty :: Alternative m => m a -> m sep -> m (NonEmpty a) +endByNonEmpty p sep = some1 (p <* sep) +{-# INLINE endByNonEmpty #-} +-} + +-- | @endBy p sep@ parses /zero/ or more occurrences of @p@, separated +-- and ended by @sep@. Returns a list of values returned by @p@. +-- +-- > cStatements = cStatement `endBy` semi +endBy :: Alternative m => m a -> m sep -> m [a] +endBy p sep = many (p <* sep) +{-# INLINE endBy #-} + +-- | @count n p@ parses @n@ occurrences of @p@. If @n@ is smaller or +-- equal to zero, the parser equals to @return []@. Returns a list of +-- @n@ values returned by @p@. +count :: Applicative m => Int -> m a -> m [a] +count n p | n <= 0 = pure [] + | otherwise = sequenceA (replicate n p) +{-# INLINE count #-} + +-- | @chainr p op x@ parses /zero/ or more occurrences of @p@, +-- separated by @op@ Returns a value obtained by a /right/ associative +-- application of all functions returned by @op@ to the values returned +-- by @p@. If there are no occurrences of @p@, the value @x@ is +-- returned. +chainr :: Alternative m => m a -> m (a -> a -> a) -> a -> m a +chainr p op x = chainr1 p op <|> pure x +{-# INLINE chainr #-} + +-- | @chainl p op x@ parses /zero/ or more occurrences of @p@, +-- separated by @op@. Returns a value obtained by a /left/ associative +-- application of all functions returned by @op@ to the values returned +-- by @p@. If there are zero occurrences of @p@, the value @x@ is +-- returned. +chainl :: Alternative m => m a -> m (a -> a -> a) -> a -> m a +chainl p op x = chainl1 p op <|> pure x +{-# INLINE chainl #-} + +-- | @chainl1 p op x@ parses /one/ or more occurrences of @p@, +-- separated by @op@ Returns a value obtained by a /left/ associative +-- application of all functions returned by @op@ to the values returned +-- by @p@. . This parser can for example be used to eliminate left +-- recursion which typically occurs in expression grammars. +-- +-- > expr = term `chainl1` addop +-- > term = factor `chainl1` mulop +-- > factor = parens expr <|> integer +-- > +-- > mulop = (*) <$ symbol "*" +-- > <|> div <$ symbol "/" +-- > +-- > addop = (+) <$ symbol "+" +-- > <|> (-) <$ symbol "-" +chainl1 :: Alternative m => m a -> m (a -> a -> a) -> m a +chainl1 p op = scan where + scan = p <**> rst + rst = (\f y g x -> g (f x y)) <$> op <*> p <*> rst <|> pure id +{-# INLINE chainl1 #-} + +-- | @chainr1 p op x@ parses /one/ or more occurrences of @p@, +-- separated by @op@ Returns a value obtained by a /right/ associative +-- application of all functions returned by @op@ to the values returned +-- by @p@. +chainr1 :: Alternative m => m a -> m (a -> a -> a) -> m a +chainr1 p op = scan where + scan = p <**> rst + rst = (flip <$> op <*> scan) <|> pure id +{-# INLINE chainr1 #-} + +-- | @manyTill p end@ applies parser @p@ /zero/ or more times until +-- parser @end@ succeeds. Returns the list of values returned by @p@. +-- This parser can be used to scan comments: +-- +-- > simpleComment = do{ string "")) +-- > } +-- +-- Note the overlapping parsers @anyChar@ and @string \"-->\"@, and +-- therefore the use of the 'try' combinator. +manyTill :: Alternative m => m a -> m end -> m [a] +manyTill p end = go where go = ([] <$ end) <|> ((:) <$> p <*> go) +{-# INLINE manyTill #-} + +infixr 0 + +-- | Additional functionality needed to describe parsers independent of input type. +class Alternative m => Parsing m where + -- | Take a parser that may consume input, and on failure, go back to + -- where we started and fail as if we didn't consume input. + try :: m a -> m a + + -- | Give a parser a name + () :: m a -> String -> m a + + -- | A version of many that discards its input. Specialized because it + -- can often be implemented more cheaply. + skipMany :: m a -> m () + skipMany p = () <$ many p + {-# INLINE skipMany #-} + + -- | @skipSome p@ applies the parser @p@ /one/ or more times, skipping + -- its result. (aka skipMany1 in parsec) + skipSome :: m a -> m () + skipSome p = p *> skipMany p + {-# INLINE skipSome #-} + + -- | Used to emit an error on an unexpected token + unexpected :: String -> m a + + -- | This parser only succeeds at the end of the input. This is not a + -- primitive parser but it is defined using 'notFollowedBy'. + -- + -- > eof = notFollowedBy anyChar "end of input" + eof :: m () + + -- | @notFollowedBy p@ only succeeds when parser @p@ fails. This parser + -- does not consume any input. This parser can be used to implement the + -- \'longest match\' rule. For example, when recognizing keywords (for + -- example @let@), we want to make sure that a keyword is not followed + -- by a legal identifier character, in which case the keyword is + -- actually an identifier (for example @lets@). We can program this + -- behaviour as follows: + -- + -- > keywordLet = try $ string "let" <* notFollowedBy alphaNum + notFollowedBy :: Show a => m a -> m () + +instance (Parsing m, MonadPlus m) => Parsing (Lazy.StateT s m) where + try (Lazy.StateT m) = Lazy.StateT $ try . m + {-# INLINE try #-} + Lazy.StateT m l = Lazy.StateT $ \s -> m s l + {-# INLINE () #-} + unexpected = lift . unexpected + {-# INLINE unexpected #-} + eof = lift eof + {-# INLINE eof #-} + notFollowedBy (Lazy.StateT m) = Lazy.StateT + $ \s -> notFollowedBy (fst <$> m s) >> return ((),s) + {-# INLINE notFollowedBy #-} + +instance (Parsing m, MonadPlus m) => Parsing (Strict.StateT s m) where + try (Strict.StateT m) = Strict.StateT $ try . m + {-# INLINE try #-} + Strict.StateT m l = Strict.StateT $ \s -> m s l + {-# INLINE () #-} + unexpected = lift . unexpected + {-# INLINE unexpected #-} + eof = lift eof + {-# INLINE eof #-} + notFollowedBy (Strict.StateT m) = Strict.StateT + $ \s -> notFollowedBy (fst <$> m s) >> return ((),s) + {-# INLINE notFollowedBy #-} + +instance (Parsing m, MonadPlus m) => Parsing (ReaderT e m) where + try (ReaderT m) = ReaderT $ try . m + {-# INLINE try #-} + ReaderT m l = ReaderT $ \e -> m e l + {-# INLINE () #-} + skipMany (ReaderT m) = ReaderT $ skipMany . m + {-# INLINE skipMany #-} + unexpected = lift . unexpected + {-# INLINE unexpected #-} + eof = lift eof + {-# INLINE eof #-} + notFollowedBy (ReaderT m) = ReaderT $ notFollowedBy . m + {-# INLINE notFollowedBy #-} + +instance (Parsing m, MonadPlus m, Monoid w) => Parsing (Strict.WriterT w m) where + try (Strict.WriterT m) = Strict.WriterT $ try m + {-# INLINE try #-} + Strict.WriterT m l = Strict.WriterT (m l) + {-# INLINE () #-} + unexpected = lift . unexpected + {-# INLINE unexpected #-} + eof = lift eof + {-# INLINE eof #-} + notFollowedBy (Strict.WriterT m) = Strict.WriterT + $ notFollowedBy (fst <$> m) >>= \x -> return (x, mempty) + {-# INLINE notFollowedBy #-} + +instance (Parsing m, MonadPlus m, Monoid w) => Parsing (Lazy.WriterT w m) where + try (Lazy.WriterT m) = Lazy.WriterT $ try m + {-# INLINE try #-} + Lazy.WriterT m l = Lazy.WriterT (m l) + {-# INLINE () #-} + unexpected = lift . unexpected + {-# INLINE unexpected #-} + eof = lift eof + {-# INLINE eof #-} + notFollowedBy (Lazy.WriterT m) = Lazy.WriterT + $ notFollowedBy (fst <$> m) >>= \x -> return (x, mempty) + {-# INLINE notFollowedBy #-} + +instance (Parsing m, MonadPlus m, Monoid w) => Parsing (Lazy.RWST r w s m) where + try (Lazy.RWST m) = Lazy.RWST $ \r s -> try (m r s) + {-# INLINE try #-} + Lazy.RWST m l = Lazy.RWST $ \r s -> m r s l + {-# INLINE () #-} + unexpected = lift . unexpected + {-# INLINE unexpected #-} + eof = lift eof + {-# INLINE eof #-} + notFollowedBy (Lazy.RWST m) = Lazy.RWST + $ \r s -> notFollowedBy ((\(a,_,_) -> a) <$> m r s) >>= \x -> return (x, s, mempty) + {-# INLINE notFollowedBy #-} + +instance (Parsing m, MonadPlus m, Monoid w) => Parsing (Strict.RWST r w s m) where + try (Strict.RWST m) = Strict.RWST $ \r s -> try (m r s) + {-# INLINE try #-} + Strict.RWST m l = Strict.RWST $ \r s -> m r s l + {-# INLINE () #-} + unexpected = lift . unexpected + {-# INLINE unexpected #-} + eof = lift eof + {-# INLINE eof #-} + notFollowedBy (Strict.RWST m) = Strict.RWST + $ \r s -> notFollowedBy ((\(a,_,_) -> a) <$> m r s) >>= \x -> return (x, s, mempty) + {-# INLINE notFollowedBy #-} + +instance (Parsing m, Monad m) => Parsing (IdentityT m) where + try = IdentityT . try . runIdentityT + {-# INLINE try #-} + IdentityT m l = IdentityT (m l) + {-# INLINE () #-} + skipMany = IdentityT . skipMany . runIdentityT + {-# INLINE skipMany #-} + unexpected = lift . unexpected + {-# INLINE unexpected #-} + eof = lift eof + {-# INLINE eof #-} + notFollowedBy (IdentityT m) = IdentityT $ notFollowedBy m + {-# INLINE notFollowedBy #-} + +instance (Parsec.Stream s m t, Show t) => Parsing (Parsec.ParsecT s u m) where + try = Parsec.try + () = (Parsec.) + skipMany = Parsec.skipMany + skipSome = Parsec.skipMany1 + unexpected = Parsec.unexpected + eof = Parsec.eof + notFollowedBy = Parsec.notFollowedBy + +instance t ~ Char => Parsing (ReadP.Parser r t) where + try = id + () = const + skipMany = ReadP.skipMany + skipSome = ReadP.skipMany1 + unexpected = const ReadP.pfail + eof = ReadP.eof + + -- TODO: we would like to have <++ here + notFollowedBy p = ((Just <$> p) ReadP.+++ pure Nothing) + >>= maybe (pure ()) (unexpected . show) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Prelude/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Prelude/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Prelude/Internal.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Prelude/Internal.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,14 @@ +-- | This module re-exports the non-exposed +-- "Distribution.Compat.Prelude" module for +-- reuse by @cabal-install@'s +-- "Distribution.Client.Compat.Prelude" module. +-- +-- It is highly discouraged to rely on this module +-- for @Setup.hs@ scripts since its API is /not/ +-- stable. +module Distribution.Compat.Prelude.Internal + {-# WARNING "This modules' API is not stable. Use at your own risk, or better yet, use @base-compat@!" #-} + ( module Distribution.Compat.Prelude + ) where + +import Distribution.Compat.Prelude diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Prelude.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Prelude.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Prelude.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Prelude.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,204 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TypeOperators #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE FlexibleContexts #-} + +#ifdef MIN_VERSION_base +#define MINVER_base_48 MIN_VERSION_base(4,8,0) +#define MINVER_base_47 MIN_VERSION_base(4,7,0) +#else +#define MINVER_base_48 (__GLASGOW_HASKELL__ >= 710) +#define MINVER_base_47 (__GLASGOW_HASKELL__ >= 708) +#endif + +-- | This module does two things: +-- +-- * Acts as a compatiblity layer, like @base-compat@. +-- +-- * Provides commonly used imports. +module Distribution.Compat.Prelude ( + -- * Prelude + -- + -- Prelude is re-exported, following is hidden: + module BasePrelude, + +#if !MINVER_base_48 + -- * base 4.8 shim + Applicative(..), (<$), (<$>), + Monoid(..), +#endif + + -- * Common type-classes + Semigroup (..), + gmappend, gmempty, + Typeable, + Data, + Generic, + NFData (..), genericRnf, + Binary (..), + Alternative (..), + MonadPlus (..), + IsString (..), + + -- * Some types + IO, NoCallStackIO, + Map, + + -- * Data.Maybe + catMaybes, mapMaybe, + fromMaybe, + maybeToList, listToMaybe, + isNothing, isJust, + + -- * Data.List + unfoldr, + isPrefixOf, isSuffixOf, + intercalate, intersperse, + sort, sortBy, + nub, nubBy, + + -- * Data.Foldable + Foldable, foldMap, foldr, + null, length, + find, foldl', + traverse_, for_, + any, all, + + -- * Data.Traversable + Traversable, traverse, sequenceA, + for, + + -- * Control.Arrow + first, + + -- * Control.Monad + liftM, liftM2, + unless, when, + ap, void, + foldM, filterM, + + -- * Data.Char + isSpace, isDigit, isUpper, isAlpha, isAlphaNum, + chr, ord, + toLower, toUpper, + + -- * Data.Word & Data.Int + Word, + Word8, Word16, Word32, Word64, + Int8, Int16, Int32, Int64, + + -- * Text.PrettyPrint + (<<>>), + ) where + +-- We also could hide few partial function +import Prelude as BasePrelude hiding + ( IO, mapM, mapM_, sequence, null, length, foldr, any, all +#if MINVER_base_48 + , Word + -- We hide them, as we import only some members + , Traversable, traverse, sequenceA + , Foldable, foldMap +#endif + ) + +#if !MINVER_base_48 +import Control.Applicative (Applicative (..), (<$), (<$>)) +import Distribution.Compat.Semigroup (Monoid (..)) +#else +import Data.Foldable (length, null) +#endif + +import Data.Foldable (Foldable (foldMap, foldr), find, foldl', for_, traverse_, any, all) +import Data.Traversable (Traversable (traverse, sequenceA), for) + +import Control.Applicative (Alternative (..)) +import Control.DeepSeq (NFData (..)) +import Data.Data (Data) +import Data.Typeable (Typeable) +import Distribution.Compat.Binary (Binary (..)) +import Distribution.Compat.Semigroup (Semigroup (..), gmappend, gmempty) +import GHC.Generics (Generic, Rep(..), + V1, U1(U1), K1(unK1), M1(unM1), + (:*:)((:*:)), (:+:)(L1,R1)) + +import Data.Map (Map) + +import Control.Arrow (first) +import Control.Monad hiding (mapM) +import Data.Char +import Data.List (intercalate, intersperse, isPrefixOf, + isSuffixOf, nub, nubBy, sort, sortBy, + unfoldr) +import Data.Maybe +import Data.String (IsString (..)) +import Data.Int +import Data.Word + +import qualified Text.PrettyPrint as Disp + +import qualified Prelude as OrigPrelude +import Distribution.Compat.Stack + +type IO a = WithCallStack (OrigPrelude.IO a) +type NoCallStackIO a = OrigPrelude.IO a + +-- | New name for 'Text.PrettyPrint.<>' +(<<>>) :: Disp.Doc -> Disp.Doc -> Disp.Doc +(<<>>) = (Disp.<>) + +#if !MINVER_base_48 +-- | Test whether the structure is empty. The default implementation is +-- optimized for structures that are similar to cons-lists, because there +-- is no general way to do better. +null :: Foldable t => t a -> Bool +null = foldr (\_ _ -> False) True + +-- | Returns the size/length of a finite structure as an 'Int'. The +-- default implementation is optimized for structures that are similar to +-- cons-lists, because there is no general way to do better. +length :: Foldable t => t a -> Int +length = foldl' (\c _ -> c+1) 0 +#endif + + +-- | "GHC.Generics"-based 'rnf' implementation +-- +-- This is needed in order to support @deepseq < 1.4@ which didn't +-- have a 'Generic'-based default 'rnf' implementation yet. +-- +-- In order to define instances, use e.g. +-- +-- > instance NFData MyType where rnf = genericRnf +-- +-- The implementation has been taken from @deepseq-1.4.2@'s default +-- 'rnf' implementation. +genericRnf :: (Generic a, GNFData (Rep a)) => a -> () +genericRnf = grnf . from + +-- | Hidden internal type-class +class GNFData f where + grnf :: f a -> () + +instance GNFData V1 where + grnf = error "Control.DeepSeq.rnf: uninhabited type" + +instance GNFData U1 where + grnf U1 = () + +instance NFData a => GNFData (K1 i a) where + grnf = rnf . unK1 + {-# INLINEABLE grnf #-} + +instance GNFData a => GNFData (M1 i c a) where + grnf = grnf . unM1 + {-# INLINEABLE grnf #-} + +instance (GNFData a, GNFData b) => GNFData (a :*: b) where + grnf (x :*: y) = grnf x `seq` grnf y + {-# INLINEABLE grnf #-} + +instance (GNFData a, GNFData b) => GNFData (a :+: b) where + grnf (L1 x) = grnf x + grnf (R1 x) = grnf x + {-# INLINEABLE grnf #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/ReadP.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/ReadP.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/ReadP.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/ReadP.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,424 @@ +{-# LANGUAGE GADTs #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.ReadP +-- Copyright : (c) The University of Glasgow 2002 +-- License : BSD-style (see the file libraries/base/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- This is a library of parser combinators, originally written by Koen Claessen. +-- It parses all alternatives in parallel, so it never keeps hold of +-- the beginning of the input string, a common source of space leaks with +-- other parsers. The '(+++)' choice combinator is genuinely commutative; +-- it makes no difference which branch is \"shorter\". +-- +-- See also Koen's paper /Parallel Parsing Processes/ +-- (). +-- +-- This version of ReadP has been locally hacked to make it H98, by +-- Martin Sjögren +-- +-- The unit tests have been moved to UnitTest.Distribution.Compat.ReadP, by +-- Mark Lentczner +----------------------------------------------------------------------------- + +module Distribution.Compat.ReadP + ( + -- * The 'ReadP' type + ReadP, -- :: * -> *; instance Functor, Monad, MonadPlus + + -- * Primitive operations + get, -- :: ReadP Char + look, -- :: ReadP String + (+++), -- :: ReadP a -> ReadP a -> ReadP a + (<++), -- :: ReadP a -> ReadP a -> ReadP a + gather, -- :: ReadP a -> ReadP (String, a) + + -- * Other operations + pfail, -- :: ReadP a + eof, -- :: ReadP () + satisfy, -- :: (Char -> Bool) -> ReadP Char + char, -- :: Char -> ReadP Char + string, -- :: String -> ReadP String + munch, -- :: (Char -> Bool) -> ReadP String + munch1, -- :: (Char -> Bool) -> ReadP String + skipSpaces, -- :: ReadP () + skipSpaces1,-- :: ReadP () + choice, -- :: [ReadP a] -> ReadP a + count, -- :: Int -> ReadP a -> ReadP [a] + between, -- :: ReadP open -> ReadP close -> ReadP a -> ReadP a + option, -- :: a -> ReadP a -> ReadP a + optional, -- :: ReadP a -> ReadP () + many, -- :: ReadP a -> ReadP [a] + many1, -- :: ReadP a -> ReadP [a] + skipMany, -- :: ReadP a -> ReadP () + skipMany1, -- :: ReadP a -> ReadP () + sepBy, -- :: ReadP a -> ReadP sep -> ReadP [a] + sepBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] + endBy, -- :: ReadP a -> ReadP sep -> ReadP [a] + endBy1, -- :: ReadP a -> ReadP sep -> ReadP [a] + chainr, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a + chainl, -- :: ReadP a -> ReadP (a -> a -> a) -> a -> ReadP a + chainl1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a + chainr1, -- :: ReadP a -> ReadP (a -> a -> a) -> ReadP a + manyTill, -- :: ReadP a -> ReadP end -> ReadP [a] + + -- * Running a parser + ReadS, -- :: *; = String -> [(a,String)] + readP_to_S, -- :: ReadP a -> ReadS a + readS_to_P, -- :: ReadS a -> ReadP a + + -- ** Internal + Parser, + ) + where + +import Prelude () +import Distribution.Compat.Prelude hiding (many, get) + +import qualified Distribution.Compat.MonadFail as Fail + +import Control.Monad( replicateM, (>=>) ) + +infixr 5 +++, <++ + +-- --------------------------------------------------------------------------- +-- The P type +-- is representation type -- should be kept abstract + +data P s a + = Get (s -> P s a) + | Look ([s] -> P s a) + | Fail + | Result a (P s a) + | Final [(a,[s])] -- invariant: list is non-empty! + +-- Monad, MonadPlus + +instance Functor (P s) where + fmap = liftM + +instance Applicative (P s) where + pure x = Result x Fail + (<*>) = ap + +instance Monad (P s) where + return = pure + + (Get f) >>= k = Get (f >=> k) + (Look f) >>= k = Look (f >=> k) + Fail >>= _ = Fail + (Result x p) >>= k = k x `mplus` (p >>= k) + (Final r) >>= k = final [ys' | (x,s) <- r, ys' <- run (k x) s] + + fail = Fail.fail + +instance Fail.MonadFail (P s) where + fail _ = Fail + +instance Alternative (P s) where + empty = mzero + (<|>) = mplus + +instance MonadPlus (P s) where + mzero = Fail + + -- most common case: two gets are combined + Get f1 `mplus` Get f2 = Get (\c -> f1 c `mplus` f2 c) + + -- results are delivered as soon as possible + Result x p `mplus` q = Result x (p `mplus` q) + p `mplus` Result x q = Result x (p `mplus` q) + + -- fail disappears + Fail `mplus` p = p + p `mplus` Fail = p + + -- two finals are combined + -- final + look becomes one look and one final (=optimization) + -- final + sthg else becomes one look and one final + Final r `mplus` Final t = Final (r ++ t) + Final r `mplus` Look f = Look (\s -> Final (r ++ run (f s) s)) + Final r `mplus` p = Look (\s -> Final (r ++ run p s)) + Look f `mplus` Final r = Look (\s -> Final (run (f s) s ++ r)) + p `mplus` Final r = Look (\s -> Final (run p s ++ r)) + + -- two looks are combined (=optimization) + -- look + sthg else floats upwards + Look f `mplus` Look g = Look (\s -> f s `mplus` g s) + Look f `mplus` p = Look (\s -> f s `mplus` p) + p `mplus` Look f = Look (\s -> p `mplus` f s) + +-- --------------------------------------------------------------------------- +-- The ReadP type + +newtype Parser r s a = R ((a -> P s r) -> P s r) +type ReadP r a = Parser r Char a + +-- Functor, Monad, MonadPlus + +instance Functor (Parser r s) where + fmap h (R f) = R (\k -> f (k . h)) + +instance Applicative (Parser r s) where + pure x = R (\k -> k x) + (<*>) = ap + +instance s ~ Char => Alternative (Parser r s) where + empty = pfail + (<|>) = (+++) + +instance Monad (Parser r s) where + return = pure + fail = Fail.fail + R m >>= f = R (\k -> m (\a -> let R m' = f a in m' k)) + +instance Fail.MonadFail (Parser r s) where + fail _ = R (const Fail) + +instance s ~ Char => MonadPlus (Parser r s) where + mzero = pfail + mplus = (+++) + +-- --------------------------------------------------------------------------- +-- Operations over P + +final :: [(a,[s])] -> P s a +-- Maintains invariant for Final constructor +final [] = Fail +final r = Final r + +run :: P c a -> ([c] -> [(a, [c])]) +run (Get f) (c:s) = run (f c) s +run (Look f) s = run (f s) s +run (Result x p) s = (x,s) : run p s +run (Final r) _ = r +run _ _ = [] + +-- --------------------------------------------------------------------------- +-- Operations over ReadP + +get :: ReadP r Char +-- ^ Consumes and returns the next character. +-- Fails if there is no input left. +get = R Get + +look :: ReadP r String +-- ^ Look-ahead: returns the part of the input that is left, without +-- consuming it. +look = R Look + +pfail :: ReadP r a +-- ^ Always fails. +pfail = R (const Fail) + +eof :: ReadP r () +-- ^ Succeeds iff we are at the end of input +eof = do { s <- look + ; if null s then return () + else pfail } + +(+++) :: ReadP r a -> ReadP r a -> ReadP r a +-- ^ Symmetric choice. +R f1 +++ R f2 = R (\k -> f1 k `mplus` f2 k) + +(<++) :: ReadP a a -> ReadP r a -> ReadP r a +-- ^ Local, exclusive, left-biased choice: If left parser +-- locally produces any result at all, then right parser is +-- not used. +R f <++ q = + do s <- look + probe (f return) s 0 + where + probe (Get f') (c:s) n = probe (f' c) s (n+1 :: Int) + probe (Look f') s n = probe (f' s) s n + probe p@(Result _ _) _ n = discard n >> R (p >>=) + probe (Final r) _ _ = R (Final r >>=) + probe _ _ _ = q + + discard 0 = return () + discard n = get >> discard (n-1 :: Int) + +gather :: ReadP (String -> P Char r) a -> ReadP r (String, a) +-- ^ Transforms a parser into one that does the same, but +-- in addition returns the exact characters read. +-- IMPORTANT NOTE: 'gather' gives a runtime error if its first argument +-- is built using any occurrences of readS_to_P. +gather (R m) = + R (\k -> gath id (m (\a -> return (\s -> k (s,a))))) + where + gath l (Get f) = Get (\c -> gath (l.(c:)) (f c)) + gath _ Fail = Fail + gath l (Look f) = Look (gath l . f) + gath l (Result k p) = k (l []) `mplus` gath l p + gath _ (Final _) = error "do not use readS_to_P in gather!" + +-- --------------------------------------------------------------------------- +-- Derived operations + +satisfy :: (Char -> Bool) -> ReadP r Char +-- ^ Consumes and returns the next character, if it satisfies the +-- specified predicate. +satisfy p = do c <- get; if p c then return c else pfail + +char :: Char -> ReadP r Char +-- ^ Parses and returns the specified character. +char c = satisfy (c ==) + +string :: String -> ReadP r String +-- ^ Parses and returns the specified string. +string this = do s <- look; scan this s + where + scan [] _ = return this + scan (x:xs) (y:ys) | x == y = get >> scan xs ys + scan _ _ = pfail + +munch :: (Char -> Bool) -> ReadP r String +-- ^ Parses the first zero or more characters satisfying the predicate. +munch p = + do s <- look + scan s + where + scan (c:cs) | p c = do _ <- get; s <- scan cs; return (c:s) + scan _ = do return "" + +munch1 :: (Char -> Bool) -> ReadP r String +-- ^ Parses the first one or more characters satisfying the predicate. +munch1 p = + do c <- get + if p c then do s <- munch p; return (c:s) + else pfail + +choice :: [ReadP r a] -> ReadP r a +-- ^ Combines all parsers in the specified list. +choice [] = pfail +choice [p] = p +choice (p:ps) = p +++ choice ps + +skipSpaces :: ReadP r () +-- ^ Skips all whitespace. +skipSpaces = + do s <- look + skip s + where + skip (c:s) | isSpace c = do _ <- get; skip s + skip _ = do return () + +skipSpaces1 :: ReadP r () +-- ^ Like 'skipSpaces' but succeeds only if there is at least one +-- whitespace character to skip. +skipSpaces1 = satisfy isSpace >> skipSpaces + +count :: Int -> ReadP r a -> ReadP r [a] +-- ^ @ count n p @ parses @n@ occurrences of @p@ in sequence. A list of +-- results is returned. +count n p = replicateM n p + +between :: ReadP r open -> ReadP r close -> ReadP r a -> ReadP r a +-- ^ @ between open close p @ parses @open@, followed by @p@ and finally +-- @close@. Only the value of @p@ is returned. +between open close p = do _ <- open + x <- p + _ <- close + return x + +option :: a -> ReadP r a -> ReadP r a +-- ^ @option x p@ will either parse @p@ or return @x@ without consuming +-- any input. +option x p = p +++ return x + +optional :: ReadP r a -> ReadP r () +-- ^ @optional p@ optionally parses @p@ and always returns @()@. +optional p = (p >> return ()) +++ return () + +many :: ReadP r a -> ReadP r [a] +-- ^ Parses zero or more occurrences of the given parser. +many p = return [] +++ many1 p + +many1 :: ReadP r a -> ReadP r [a] +-- ^ Parses one or more occurrences of the given parser. +many1 p = liftM2 (:) p (many p) + +skipMany :: ReadP r a -> ReadP r () +-- ^ Like 'many', but discards the result. +skipMany p = many p >> return () + +skipMany1 :: ReadP r a -> ReadP r () +-- ^ Like 'many1', but discards the result. +skipMany1 p = p >> skipMany p + +sepBy :: ReadP r a -> ReadP r sep -> ReadP r [a] +-- ^ @sepBy p sep@ parses zero or more occurrences of @p@, separated by @sep@. +-- Returns a list of values returned by @p@. +sepBy p sep = sepBy1 p sep +++ return [] + +sepBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] +-- ^ @sepBy1 p sep@ parses one or more occurrences of @p@, separated by @sep@. +-- Returns a list of values returned by @p@. +sepBy1 p sep = liftM2 (:) p (many (sep >> p)) + +endBy :: ReadP r a -> ReadP r sep -> ReadP r [a] +-- ^ @endBy p sep@ parses zero or more occurrences of @p@, separated and ended +-- by @sep@. +endBy p sep = many (do x <- p ; _ <- sep ; return x) + +endBy1 :: ReadP r a -> ReadP r sep -> ReadP r [a] +-- ^ @endBy p sep@ parses one or more occurrences of @p@, separated and ended +-- by @sep@. +endBy1 p sep = many1 (do x <- p ; _ <- sep ; return x) + +chainr :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a +-- ^ @chainr p op x@ parses zero or more occurrences of @p@, separated by @op@. +-- Returns a value produced by a /right/ associative application of all +-- functions returned by @op@. If there are no occurrences of @p@, @x@ is +-- returned. +chainr p op x = chainr1 p op +++ return x + +chainl :: ReadP r a -> ReadP r (a -> a -> a) -> a -> ReadP r a +-- ^ @chainl p op x@ parses zero or more occurrences of @p@, separated by @op@. +-- Returns a value produced by a /left/ associative application of all +-- functions returned by @op@. If there are no occurrences of @p@, @x@ is +-- returned. +chainl p op x = chainl1 p op +++ return x + +chainr1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a +-- ^ Like 'chainr', but parses one or more occurrences of @p@. +chainr1 p op = scan + where scan = p >>= rest + rest x = do f <- op + y <- scan + return (f x y) + +++ return x + +chainl1 :: ReadP r a -> ReadP r (a -> a -> a) -> ReadP r a +-- ^ Like 'chainl', but parses one or more occurrences of @p@. +chainl1 p op = p >>= rest + where rest x = do f <- op + y <- p + rest (f x y) + +++ return x + +manyTill :: ReadP r a -> ReadP [a] end -> ReadP r [a] +-- ^ @manyTill p end@ parses zero or more occurrences of @p@, until @end@ +-- succeeds. Returns a list of values returned by @p@. +manyTill p end = scan + where scan = (end >> return []) <++ (liftM2 (:) p scan) + +-- --------------------------------------------------------------------------- +-- Converting between ReadP and Read + +readP_to_S :: ReadP a a -> ReadS a +-- ^ Converts a parser into a Haskell ReadS-style function. +-- This is the main way in which you can \"run\" a 'ReadP' parser: +-- the expanded type is +-- @ readP_to_S :: ReadP a -> String -> [(a,String)] @ +readP_to_S (R f) = run (f return) + +readS_to_P :: ReadS a -> ReadP r a +-- ^ Converts a Haskell ReadS-style function into a parser. +-- Warning: This introduces local backtracking in the resulting +-- parser, and therefore a possible inefficiency. +readS_to_P r = + R (\k -> Look (\s -> final [bs'' | (a,s') <- r s, bs'' <- run (k a) s'])) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Semigroup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Semigroup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Semigroup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Semigroup.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,171 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE TypeOperators #-} + +-- | Compatibility layer for "Data.Semigroup" +module Distribution.Compat.Semigroup + ( Semigroup((<>)) + , Mon.Monoid(..) + , All(..) + , Any(..) + + , Last'(..) + + , gmappend + , gmempty + ) where + +import Distribution.Compat.Binary (Binary) + +import Control.Applicative as App +import GHC.Generics +#if __GLASGOW_HASKELL__ >= 711 +-- Data.Semigroup is available since GHC 8.0/base-4.9 +import Data.Semigroup +import qualified Data.Monoid as Mon +#else +-- provide internal simplified non-exposed class for older GHCs +import Data.Monoid as Mon (Monoid(..), All(..), Any(..), Dual(..)) +-- containers +import Data.Set (Set) +import Data.IntSet (IntSet) +import Data.Map (Map) +import Data.IntMap (IntMap) + + +class Semigroup a where + (<>) :: a -> a -> a + +-- several primitive instances +instance Semigroup () where + _ <> _ = () + +instance Semigroup [a] where + (<>) = (++) + +instance Semigroup a => Semigroup (Dual a) where + Dual a <> Dual b = Dual (b <> a) + +instance Semigroup a => Semigroup (Maybe a) where + Nothing <> b = b + a <> Nothing = a + Just a <> Just b = Just (a <> b) + +instance Semigroup (Either a b) where + Left _ <> b = b + a <> _ = a + +instance Semigroup Ordering where + LT <> _ = LT + EQ <> y = y + GT <> _ = GT + +instance Semigroup b => Semigroup (a -> b) where + f <> g = \a -> f a <> g a + +instance Semigroup All where + All a <> All b = All (a && b) + +instance Semigroup Any where + Any a <> Any b = Any (a || b) + +instance (Semigroup a, Semigroup b) => Semigroup (a, b) where + (a,b) <> (a',b') = (a<>a',b<>b') + +instance (Semigroup a, Semigroup b, Semigroup c) + => Semigroup (a, b, c) where + (a,b,c) <> (a',b',c') = (a<>a',b<>b',c<>c') + +instance (Semigroup a, Semigroup b, Semigroup c, Semigroup d) + => Semigroup (a, b, c, d) where + (a,b,c,d) <> (a',b',c',d') = (a<>a',b<>b',c<>c',d<>d') + +instance (Semigroup a, Semigroup b, Semigroup c, Semigroup d, Semigroup e) + => Semigroup (a, b, c, d, e) where + (a,b,c,d,e) <> (a',b',c',d',e') = (a<>a',b<>b',c<>c',d<>d',e<>e') + +-- containers instances +instance Semigroup IntSet where + (<>) = mappend + +instance Ord a => Semigroup (Set a) where + (<>) = mappend + +instance Semigroup (IntMap v) where + (<>) = mappend + +instance Ord k => Semigroup (Map k v) where + (<>) = mappend +#endif + +-- | Cabal's own 'Data.Monoid.Last' copy to avoid requiring an orphan +-- 'Binary' instance. +-- +-- Once the oldest `binary` version we support provides a 'Binary' +-- instance for 'Data.Monoid.Last' we can remove this one here. +-- +-- NB: 'Data.Semigroup.Last' is defined differently and not a 'Monoid' +newtype Last' a = Last' { getLast' :: Maybe a } + deriving (Eq, Ord, Read, Show, Binary, + Functor, App.Applicative, Generic) + +instance Semigroup (Last' a) where + x <> Last' Nothing = x + _ <> x = x + +instance Monoid (Last' a) where + mempty = Last' Nothing + mappend = (<>) + +------------------------------------------------------------------------------- +------------------------------------------------------------------------------- +-- Stolen from Edward Kmett's BSD3-licensed `semigroups` package + +-- | Generically generate a 'Semigroup' ('<>') operation for any type +-- implementing 'Generic'. This operation will append two values +-- by point-wise appending their component fields. It is only defined +-- for product types. +-- +-- @ +-- 'gmappend' a ('gmappend' b c) = 'gmappend' ('gmappend' a b) c +-- @ +gmappend :: (Generic a, GSemigroup (Rep a)) => a -> a -> a +gmappend x y = to (gmappend' (from x) (from y)) + +class GSemigroup f where + gmappend' :: f p -> f p -> f p + +instance Semigroup a => GSemigroup (K1 i a) where + gmappend' (K1 x) (K1 y) = K1 (x <> y) + +instance GSemigroup f => GSemigroup (M1 i c f) where + gmappend' (M1 x) (M1 y) = M1 (gmappend' x y) + +instance (GSemigroup f, GSemigroup g) => GSemigroup (f :*: g) where + gmappend' (x1 :*: x2) (y1 :*: y2) = gmappend' x1 y1 :*: gmappend' x2 y2 + +-- | Generically generate a 'Monoid' 'mempty' for any product-like type +-- implementing 'Generic'. +-- +-- It is only defined for product types. +-- +-- @ +-- 'gmappend' 'gmempty' a = a = 'gmappend' a 'gmempty' +-- @ + +gmempty :: (Generic a, GMonoid (Rep a)) => a +gmempty = to gmempty' + +class GSemigroup f => GMonoid f where + gmempty' :: f p + +instance (Semigroup a, Monoid a) => GMonoid (K1 i a) where + gmempty' = K1 mempty + +instance GMonoid f => GMonoid (M1 i c f) where + gmempty' = M1 gmempty' + +instance (GMonoid f, GMonoid g) => GMonoid (f :*: g) where + gmempty' = gmempty' :*: gmempty' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/SnocList.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/SnocList.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/SnocList.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/SnocList.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,33 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.SnocList +-- License : BSD3 +-- +-- Maintainer : cabal-dev@haskell.org +-- Stability : experimental +-- Portability : portable +-- +-- A very reversed list. Has efficient `snoc` +module Distribution.Compat.SnocList ( + SnocList, + runSnocList, + snoc, +) where + +import Prelude () +import Distribution.Compat.Prelude + +newtype SnocList a = SnocList [a] + +snoc :: SnocList a -> a -> SnocList a +snoc (SnocList xs) x = SnocList (x : xs) + +runSnocList :: SnocList a -> [a] +runSnocList (SnocList xs) = reverse xs + +instance Semigroup (SnocList a) where + SnocList xs <> SnocList ys = SnocList (ys <> xs) + +instance Monoid (SnocList a) where + mempty = SnocList [] + mappend = (<>) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Stack.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Stack.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Stack.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Stack.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,113 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ImplicitParams #-} +module Distribution.Compat.Stack ( + WithCallStack, + CallStack, + annotateCallStackIO, + withFrozenCallStack, + withLexicalCallStack, + callStack, + prettyCallStack, + parentSrcLocPrefix +) where + +import System.IO.Error + +#ifdef MIN_VERSION_base +#if MIN_VERSION_base(4,8,1) +#define GHC_STACK_SUPPORTED 1 +#endif +#endif + +#ifdef GHC_STACK_SUPPORTED +import GHC.Stack +#endif + +#ifdef GHC_STACK_SUPPORTED + +#if MIN_VERSION_base(4,9,0) +type WithCallStack a = HasCallStack => a +#elif MIN_VERSION_base(4,8,1) +type WithCallStack a = (?callStack :: CallStack) => a +#endif + +#if !MIN_VERSION_base(4,9,0) +-- NB: Can't say WithCallStack (WithCallStack a -> a); +-- Haskell doesn't support this kind of implicit parameter! +-- See https://mail.haskell.org/pipermail/ghc-devs/2016-January/011096.html +-- Since this function doesn't do anything, it's OK to +-- give it a less good type. +withFrozenCallStack :: WithCallStack (a -> a) +withFrozenCallStack x = x + +callStack :: (?callStack :: CallStack) => CallStack +callStack = ?callStack + +prettyCallStack :: CallStack -> String +prettyCallStack = showCallStack +#endif + +-- | Give the *parent* of the person who invoked this; +-- so it's most suitable for being called from a utility function. +-- You probably want to call this using 'withFrozenCallStack'; otherwise +-- it's not very useful. We didn't implement this for base-4.8.1 +-- because we cannot rely on freezing to have taken place. +-- +parentSrcLocPrefix :: WithCallStack String +#if MIN_VERSION_base(4,9,0) +parentSrcLocPrefix = + case getCallStack callStack of + (_:(_, loc):_) -> showLoc loc + [(_, loc)] -> showLoc loc + [] -> error "parentSrcLocPrefix: empty call stack" + where + showLoc loc = + srcLocFile loc ++ ":" ++ show (srcLocStartLine loc) ++ ": " +#else +parentSrcLocPrefix = "Call sites not available with base < 4.9.0.0 (GHC 8.0): " +#endif + +-- Yeah, this uses skivvy implementation details. +withLexicalCallStack :: (a -> WithCallStack (IO b)) -> WithCallStack (a -> IO b) +withLexicalCallStack f = + let stk = ?callStack + in \x -> let ?callStack = stk in f x + +#else + +data CallStack = CallStack + deriving (Eq, Show) + +type WithCallStack a = a + +withFrozenCallStack :: a -> a +withFrozenCallStack x = x + +callStack :: CallStack +callStack = CallStack + +prettyCallStack :: CallStack -> String +prettyCallStack _ = "Call stacks not available with base < 4.8.1.0 (GHC 7.10)" + +parentSrcLocPrefix :: String +parentSrcLocPrefix = "Call sites not available with base < 4.9.0.0 (GHC 8.0): " + +withLexicalCallStack :: (a -> IO b) -> a -> IO b +withLexicalCallStack f = f + +#endif + +-- | This function is for when you *really* want to add a call +-- stack to raised IO, but you don't have a +-- 'Distribution.Verbosity.Verbosity' so you can't use +-- 'Distribution.Simple.Utils.annotateIO'. If you have a 'Verbosity', +-- please use that function instead. +annotateCallStackIO :: WithCallStack (IO a -> IO a) +annotateCallStackIO = modifyIOError f + where + f ioe = ioeSetErrorString ioe + . wrapCallStack + $ ioeGetErrorString ioe + wrapCallStack s = + prettyCallStack callStack ++ "\n" ++ s diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Time.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Time.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compat/Time.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compat/Time.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,190 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module Distribution.Compat.Time + ( ModTime(..) -- Needed for testing + , getModTime, getFileAge, getCurTime + , posixSecondsToModTime + , calibrateMtimeChangeDelay ) + where + +import Prelude () +import Distribution.Compat.Prelude + +import System.Directory ( getModificationTime ) + +import Distribution.Simple.Utils ( withTempDirectory ) +import Distribution.Verbosity ( silent ) + +import System.FilePath + +import Data.Time.Clock.POSIX ( POSIXTime, getPOSIXTime ) +import Data.Time ( diffUTCTime, getCurrentTime ) +import Data.Time.Clock.POSIX ( posixDayLength ) + + +#if defined mingw32_HOST_OS + +import qualified Prelude +import Data.Bits ((.|.), unsafeShiftL) +#if MIN_VERSION_base(4,7,0) +import Data.Bits (finiteBitSize) +#else +import Data.Bits (bitSize) +#endif + +import Foreign ( allocaBytes, peekByteOff ) +import System.IO.Error ( mkIOError, doesNotExistErrorType ) +import System.Win32.Types ( BOOL, DWORD, LPCTSTR, LPVOID, withTString ) + +#else + +import System.Posix.Files ( FileStatus, getFileStatus ) + +#if MIN_VERSION_unix(2,6,0) +import System.Posix.Files ( modificationTimeHiRes ) +#else +import System.Posix.Files ( modificationTime ) +#endif + +#endif + +-- | An opaque type representing a file's modification time, represented +-- internally as a 64-bit unsigned integer in the Windows UTC format. +newtype ModTime = ModTime Word64 + deriving (Binary, Bounded, Eq, Ord) + +instance Show ModTime where + show (ModTime x) = show x + +instance Read ModTime where + readsPrec p str = map (first ModTime) (readsPrec p str) + +-- | Return modification time of the given file. Works around the low clock +-- resolution problem that 'getModificationTime' has on GHC < 7.8. +-- +-- This is a modified version of the code originally written for Shake by Neil +-- Mitchell. See module Development.Shake.FileInfo. +getModTime :: FilePath -> NoCallStackIO ModTime + +#if defined mingw32_HOST_OS + +-- Directly against the Win32 API. +getModTime path = allocaBytes size_WIN32_FILE_ATTRIBUTE_DATA $ \info -> do + res <- getFileAttributesEx path info + if not res + then do + let err = mkIOError doesNotExistErrorType + "Distribution.Compat.Time.getModTime" + Nothing (Just path) + ioError err + else do + dwLow <- peekByteOff info + index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime + dwHigh <- peekByteOff info + index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime +#if MIN_VERSION_base(4,7,0) + let qwTime = + (fromIntegral (dwHigh :: DWORD) `unsafeShiftL` finiteBitSize dwHigh) + .|. (fromIntegral (dwLow :: DWORD)) +#else + let qwTime = + (fromIntegral (dwHigh :: DWORD) `unsafeShiftL` bitSize dwHigh) + .|. (fromIntegral (dwLow :: DWORD)) +#endif + return $! ModTime (qwTime :: Word64) + +#ifdef x86_64_HOST_ARCH +#define CALLCONV ccall +#else +#define CALLCONV stdcall +#endif + +foreign import CALLCONV "windows.h GetFileAttributesExW" + c_getFileAttributesEx :: LPCTSTR -> Int32 -> LPVOID -> Prelude.IO BOOL + +getFileAttributesEx :: String -> LPVOID -> NoCallStackIO BOOL +getFileAttributesEx path lpFileInformation = + withTString path $ \c_path -> + c_getFileAttributesEx c_path getFileExInfoStandard lpFileInformation + +getFileExInfoStandard :: Int32 +getFileExInfoStandard = 0 + +size_WIN32_FILE_ATTRIBUTE_DATA :: Int +size_WIN32_FILE_ATTRIBUTE_DATA = 36 + +index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime :: Int +index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwLowDateTime = 20 + +index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime :: Int +index_WIN32_FILE_ATTRIBUTE_DATA_ftLastWriteTime_dwHighDateTime = 24 + +#else + +-- Directly against the unix library. +getModTime path = do + st <- getFileStatus path + return $! (extractFileTime st) + +extractFileTime :: FileStatus -> ModTime +extractFileTime x = posixTimeToModTime (modificationTimeHiRes x) + +#endif + +windowsTick, secToUnixEpoch :: Word64 +windowsTick = 10000000 +secToUnixEpoch = 11644473600 + +-- | Convert POSIX seconds to ModTime. +posixSecondsToModTime :: Int64 -> ModTime +posixSecondsToModTime s = + ModTime $ ((fromIntegral s :: Word64) + secToUnixEpoch) * windowsTick + +-- | Convert 'POSIXTime' to 'ModTime'. +posixTimeToModTime :: POSIXTime -> ModTime +posixTimeToModTime p = ModTime $ (ceiling $ p * 1e7) -- 100 ns precision + + (secToUnixEpoch * windowsTick) + +-- | Return age of given file in days. +getFileAge :: FilePath -> NoCallStackIO Double +getFileAge file = do + t0 <- getModificationTime file + t1 <- getCurrentTime + return $ realToFrac (t1 `diffUTCTime` t0) / realToFrac posixDayLength + +-- | Return the current time as 'ModTime'. +getCurTime :: NoCallStackIO ModTime +getCurTime = posixTimeToModTime `fmap` getPOSIXTime -- Uses 'gettimeofday'. + +-- | Based on code written by Neil Mitchell for Shake. See +-- 'sleepFileTimeCalibrate' in 'Test.Type'. Returns a pair +-- of microsecond values: first, the maximum delay seen, and the +-- recommended delay to use before testing for file modification change. +-- The returned delay is never smaller +-- than 10 ms, but never larger than 1 second. +calibrateMtimeChangeDelay :: IO (Int, Int) +calibrateMtimeChangeDelay = + withTempDirectory silent "." "calibration-" $ \dir -> do + let fileName = dir "probe" + mtimes <- for [1..25] $ \(i::Int) -> time $ do + writeFile fileName $ show i + t0 <- getModTime fileName + let spin j = do + writeFile fileName $ show (i,j) + t1 <- getModTime fileName + unless (t0 < t1) (spin $ j + 1) + spin (0::Int) + let mtimeChange = maximum mtimes + mtimeChange' = min 1000000 $ (max 10000 mtimeChange) * 2 + return (mtimeChange, mtimeChange') + where + time :: IO () -> IO Int + time act = do + t0 <- getCurrentTime + act + t1 <- getCurrentTime + return . ceiling $! (t1 `diffUTCTime` t0) * 1e6 -- microseconds diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compiler.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compiler.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Compiler.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Compiler.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,220 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compiler +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This has an enumeration of the various compilers that Cabal knows about. It +-- also specifies the default compiler. Sadly you'll often see code that does +-- case analysis on this compiler flavour enumeration like: +-- +-- > case compilerFlavor comp of +-- > GHC -> GHC.getInstalledPackages verbosity packageDb progdb +-- +-- Obviously it would be better to use the proper 'Compiler' abstraction +-- because that would keep all the compiler-specific code together. +-- Unfortunately we cannot make this change yet without breaking the +-- 'UserHooks' api, which would break all custom @Setup.hs@ files, so for the +-- moment we just have to live with this deficiency. If you're interested, see +-- ticket #57. + +module Distribution.Compiler ( + -- * Compiler flavor + CompilerFlavor(..), + buildCompilerId, + buildCompilerFlavor, + defaultCompilerFlavor, + parseCompilerFlavorCompat, + classifyCompilerFlavor, + knownCompilerFlavors, + + -- * Compiler id + CompilerId(..), + + -- * Compiler info + CompilerInfo(..), + unknownCompilerInfo, + AbiTag(..), abiTagString + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Language.Haskell.Extension + +import Distribution.Version (Version, mkVersion', nullVersion) + +import qualified System.Info (compilerName, compilerVersion) +import Distribution.Parsec.Class (Parsec (..)) +import Distribution.Pretty (Pretty (..)) +import Distribution.Text (Text(..), display) +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp + +data CompilerFlavor = + GHC | GHCJS | NHC | YHC | Hugs | HBC | Helium | JHC | LHC | UHC | Eta + | HaskellSuite String -- string is the id of the actual compiler + | OtherCompiler String + deriving (Generic, Show, Read, Eq, Ord, Typeable, Data) + +instance Binary CompilerFlavor + +instance NFData CompilerFlavor where rnf = genericRnf + +knownCompilerFlavors :: [CompilerFlavor] +knownCompilerFlavors = + [GHC, GHCJS, NHC, YHC, Hugs, HBC, Helium, JHC, LHC, UHC, Eta] + +instance Pretty CompilerFlavor where + pretty (OtherCompiler name) = Disp.text name + pretty (HaskellSuite name) = Disp.text name + pretty NHC = Disp.text "nhc98" + pretty other = Disp.text (lowercase (show other)) + +instance Parsec CompilerFlavor where + parsec = classifyCompilerFlavor <$> component + where + component = do + cs <- P.munch1 isAlphaNum + if all isDigit cs then fail "all digits compiler name" else return cs + +instance Text CompilerFlavor where + parse = do + comp <- Parse.munch1 isAlphaNum + when (all isDigit comp) Parse.pfail + return (classifyCompilerFlavor comp) + +classifyCompilerFlavor :: String -> CompilerFlavor +classifyCompilerFlavor s = + fromMaybe (OtherCompiler s) $ lookup (lowercase s) compilerMap + where + compilerMap = [ (lowercase (display compiler), compiler) + | compiler <- knownCompilerFlavors ] + + +--TODO: In some future release, remove 'parseCompilerFlavorCompat' and use +-- ordinary 'parse'. Also add ("nhc", NHC) to the above 'compilerMap'. + +-- | Like 'classifyCompilerFlavor' but compatible with the old ReadS parser. +-- +-- It is compatible in the sense that it accepts only the same strings, +-- eg "GHC" but not "ghc". However other strings get mapped to 'OtherCompiler'. +-- The point of this is that we do not allow extra valid values that would +-- upset older Cabal versions that had a stricter parser however we cope with +-- new values more gracefully so that we'll be able to introduce new value in +-- future without breaking things so much. +-- +parseCompilerFlavorCompat :: Parse.ReadP r CompilerFlavor +parseCompilerFlavorCompat = do + comp <- Parse.munch1 isAlphaNum + when (all isDigit comp) Parse.pfail + case lookup comp compilerMap of + Just compiler -> return compiler + Nothing -> return (OtherCompiler comp) + where + compilerMap = [ (show compiler, compiler) + | compiler <- knownCompilerFlavors + , compiler /= YHC ] + +buildCompilerFlavor :: CompilerFlavor +buildCompilerFlavor = classifyCompilerFlavor System.Info.compilerName + +buildCompilerVersion :: Version +buildCompilerVersion = mkVersion' System.Info.compilerVersion + +buildCompilerId :: CompilerId +buildCompilerId = CompilerId buildCompilerFlavor buildCompilerVersion + +-- | The default compiler flavour to pick when compiling stuff. This defaults +-- to the compiler used to build the Cabal lib. +-- +-- However if it's not a recognised compiler then it's 'Nothing' and the user +-- will have to specify which compiler they want. +-- +defaultCompilerFlavor :: Maybe CompilerFlavor +defaultCompilerFlavor = case buildCompilerFlavor of + OtherCompiler _ -> Nothing + _ -> Just buildCompilerFlavor + +-- ------------------------------------------------------------ +-- * Compiler Id +-- ------------------------------------------------------------ + +data CompilerId = CompilerId CompilerFlavor Version + deriving (Eq, Generic, Ord, Read, Show) + +instance Binary CompilerId + +instance NFData CompilerId where rnf = genericRnf + +instance Text CompilerId where + disp (CompilerId f v) + | v == nullVersion = disp f + | otherwise = disp f <<>> Disp.char '-' <<>> disp v + + parse = do + flavour <- parse + version <- (Parse.char '-' >> parse) Parse.<++ return nullVersion + return (CompilerId flavour version) + +lowercase :: String -> String +lowercase = map toLower + +-- ------------------------------------------------------------ +-- * Compiler Info +-- ------------------------------------------------------------ + +-- | Compiler information used for resolving configurations. Some +-- fields can be set to Nothing to indicate that the information is +-- unknown. + +data CompilerInfo = CompilerInfo { + compilerInfoId :: CompilerId, + -- ^ Compiler flavour and version. + compilerInfoAbiTag :: AbiTag, + -- ^ Tag for distinguishing incompatible ABI's on the same + -- architecture/os. + compilerInfoCompat :: Maybe [CompilerId], + -- ^ Other implementations that this compiler claims to be + -- compatible with, if known. + compilerInfoLanguages :: Maybe [Language], + -- ^ Supported language standards, if known. + compilerInfoExtensions :: Maybe [Extension] + -- ^ Supported extensions, if known. + } + deriving (Generic, Show, Read) + +instance Binary CompilerInfo + +data AbiTag + = NoAbiTag + | AbiTag String + deriving (Eq, Generic, Show, Read) + +instance Binary AbiTag + +instance Text AbiTag where + disp NoAbiTag = Disp.empty + disp (AbiTag tag) = Disp.text tag + + parse = do + tag <- Parse.munch (\c -> isAlphaNum c || c == '_') + if null tag then return NoAbiTag else return (AbiTag tag) + +abiTagString :: AbiTag -> String +abiTagString NoAbiTag = "" +abiTagString (AbiTag tag) = tag + +-- | Make a CompilerInfo of which only the known information is its CompilerId, +-- its AbiTag and that it does not claim to be compatible with other +-- compiler id's. +unknownCompilerInfo :: CompilerId -> AbiTag -> CompilerInfo +unknownCompilerInfo compilerId abiTag = + CompilerInfo compilerId abiTag (Just []) Nothing Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Class.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Class.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Class.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Class.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,147 @@ +module Distribution.FieldGrammar.Class ( + FieldGrammar (..), + uniqueField, + optionalField, + optionalFieldDef, + monoidalField, + deprecatedField', + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Data.Functor.Identity (Identity (..)) + +import Distribution.Compat.Newtype (Newtype) +import Distribution.Parsec.Class (Parsec) +import Distribution.Parsec.Field +import Distribution.Pretty (Pretty) + +-- | 'FieldGrammar' is parametrised by +-- +-- * @s@ which is a structure we are parsing. We need this to provide prettyprinter +-- functionality +-- +-- * @a@ type of the field. +-- +-- /Note:/ We'd like to have @forall s. Applicative (f s)@ context. +-- +class FieldGrammar g where + -- | Unfocus, zoom out, /blur/ 'FieldGrammar'. + blurFieldGrammar :: ALens' a b -> g b c -> g a c + + -- | Field which should be defined, exactly once. + uniqueFieldAla + :: (Parsec b, Pretty b, Newtype b a) + => FieldName -- ^ field name + -> (a -> b) -- ^ 'Newtype' pack + -> ALens' s a -- ^ lens into the field + -> g s a + + -- | Boolean field with a default value. + booleanFieldDef + :: FieldName -- ^ field name + -> ALens' s Bool -- ^ lens into the field + -> Bool -- ^ default + -> g s Bool + + -- | Optional field. + optionalFieldAla + :: (Parsec b, Pretty b, Newtype b a) + => FieldName -- ^ field name + -> (a -> b) -- ^ 'pack' + -> ALens' s (Maybe a) -- ^ lens into the field + -> g s (Maybe a) + + -- | Optional field with default value. + optionalFieldDefAla + :: (Parsec b, Pretty b, Newtype b a, Eq a) + => FieldName -- ^ field name + -> (a -> b) -- ^ 'Newtype' pack + -> ALens' s a -- ^ @'Lens'' s a@: lens into the field + -> a -- ^ default value + -> g s a + + -- | Monoidal field. + -- + -- Values are combined with 'mappend'. + -- + -- /Note:/ 'optionalFieldAla' is a @monoidalField@ with 'Last' monoid. + -- + monoidalFieldAla + :: (Parsec b, Pretty b, Monoid a, Newtype b a) + => FieldName -- ^ field name + -> (a -> b) -- ^ 'pack' + -> ALens' s a -- ^ lens into the field + -> g s a + + -- | Parser matching all fields with a name starting with a prefix. + prefixedFields + :: FieldName -- ^ field name prefix + -> ALens' s [(String, String)] -- ^ lens into the field + -> g s [(String, String)] + + -- | Known field, which we don't parse, neither pretty print. + knownField :: FieldName -> g s () + + -- | Field which is parsed but not pretty printed. + hiddenField :: g s a -> g s a + + -- | Deprecated since + deprecatedSince + :: [Int] -- ^ version + -> String -- ^ deprecation message + -> g s a + -> g s a + + -- | Annotate field with since spec-version. + availableSince + :: [Int] -- ^ spec version + -> a -- ^ default value + -> g s a + -> g s a + +-- | Field which can be defined at most once. +uniqueField + :: (FieldGrammar g, Parsec a, Pretty a) + => FieldName -- ^ field name + -> ALens' s a -- ^ lens into the field + -> g s a +uniqueField fn = uniqueFieldAla fn Identity + +-- | Field which can be defined at most once. +optionalField + :: (FieldGrammar g, Parsec a, Pretty a) + => FieldName -- ^ field name + -> ALens' s (Maybe a) -- ^ lens into the field + -> g s (Maybe a) +optionalField fn = optionalFieldAla fn Identity + +-- | Optional field with default value. +optionalFieldDef + :: (FieldGrammar g, Functor (g s), Parsec a, Pretty a, Eq a) + => FieldName -- ^ field name + -> ALens' s a -- ^ @'Lens'' s a@: lens into the field + -> a -- ^ default value + -> g s a +optionalFieldDef fn = optionalFieldDefAla fn Identity + +-- | Field which can be define multiple times, and the results are @mappend@ed. +monoidalField + :: (FieldGrammar g, Parsec a, Pretty a, Monoid a) + => FieldName -- ^ field name + -> ALens' s a -- ^ lens into the field + -> g s a +monoidalField fn = monoidalFieldAla fn Identity + +-- | Deprecated field. If found, warning is issued. +-- +-- /Note:/ also it's not pretty printed! +-- +deprecatedField' + :: FieldGrammar g + => String -- ^ deprecation message + -> g s a + -> g s a +deprecatedField' = deprecatedSince [] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar/FieldDescrs.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar/FieldDescrs.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar/FieldDescrs.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar/FieldDescrs.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,86 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE RankNTypes #-} +module Distribution.FieldGrammar.FieldDescrs ( + FieldDescrs, + fieldDescrPretty, + fieldDescrParse, + fieldDescrsToList, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Compat.Lens (aview, cloneLens) +import Distribution.Compat.Newtype +import Distribution.FieldGrammar +import Distribution.Pretty (pretty) +import Distribution.Utils.Generic (fromUTF8BS) + +import qualified Data.Map as Map +import qualified Distribution.Parsec.Class as P +import qualified Distribution.Parsec.Field as P +import qualified Text.PrettyPrint as Disp + +-- strict pair +data SP s = SP + { pPretty :: !(s -> Disp.Doc) + , pParse :: !(forall m. P.CabalParsing m => s -> m s) + } + +-- | A collection field parsers and pretty-printers. +newtype FieldDescrs s a = F { runF :: Map String (SP s) } + deriving (Functor) + +instance Applicative (FieldDescrs s) where + pure _ = F mempty + f <*> x = F (mappend (runF f) (runF x)) + +singletonF :: P.FieldName -> (s -> Disp.Doc) -> (forall m. P.CabalParsing m => s -> m s) -> FieldDescrs s a +singletonF fn f g = F $ Map.singleton (fromUTF8BS fn) (SP f g) + +-- | Lookup a field value pretty-printer. +fieldDescrPretty :: FieldDescrs s a -> String -> Maybe (s -> Disp.Doc) +fieldDescrPretty (F m) fn = pPretty <$> Map.lookup fn m + +-- | Lookup a field value parser. +fieldDescrParse :: P.CabalParsing m => FieldDescrs s a -> String -> Maybe (s -> m s) +fieldDescrParse (F m) fn = pParse <$> Map.lookup fn m + +fieldDescrsToList + :: P.CabalParsing m + => FieldDescrs s a + -> [(String, s -> Disp.Doc, s -> m s)] +fieldDescrsToList = map mk . Map.toList . runF where + mk (name, SP ppr parse) = (name, ppr, parse) + +-- | /Note:/ default values are printed. +instance FieldGrammar FieldDescrs where + blurFieldGrammar l (F m) = F (fmap blur m) where + blur (SP f g) = SP (f . aview l) (cloneLens l g) + + booleanFieldDef fn l _def = singletonF fn f g where + f s = Disp.text (show (aview l s)) + g s = cloneLens l (const P.parsec) s + -- Note: eta expansion is needed for RankNTypes type-checking to work. + + uniqueFieldAla fn _pack l = singletonF fn f g where + f s = pretty (pack' _pack (aview l s)) + g s = cloneLens l (const (unpack' _pack <$> P.parsec)) s + + optionalFieldAla fn _pack l = singletonF fn f g where + f s = maybe mempty (pretty . pack' _pack) (aview l s) + g s = cloneLens l (const (Just . unpack' _pack <$> P.parsec)) s + + optionalFieldDefAla fn _pack l _def = singletonF fn f g where + f s = pretty (pack' _pack (aview l s)) + g s = cloneLens l (const (unpack' _pack <$> P.parsec)) s + + monoidalFieldAla fn _pack l = singletonF fn f g where + f s = pretty (pack' _pack (aview l s)) + g s = cloneLens l (\x -> mappend x . unpack' _pack <$> P.parsec) s + + prefixedFields _fnPfx _l = F mempty + knownField _ = pure () + deprecatedSince _ _ x = x + availableSince _ _ = id + hiddenField _ = F mempty diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Parsec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Parsec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Parsec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Parsec.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,297 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} +-- | This module provides a 'FieldGrammarParser', one way to parse +-- @.cabal@ -like files. +-- +-- Fields can be specified multiple times in the .cabal files. The order of +-- such entries is important, but the mutual ordering of different fields is +-- not.Also conditional sections are considered after non-conditional data. +-- The example of this silent-commutation quirk is the fact that +-- +-- @ +-- buildable: True +-- if os(linux) +-- buildable: False +-- @ +-- +-- and +-- +-- @ +-- if os(linux) +-- buildable: False +-- buildable: True +-- @ +-- +-- behave the same! This is the limitation of 'GeneralPackageDescription' +-- structure. +-- +-- So we transform the list of fields @['Field' ann]@ into +-- a map of grouped ordinary fields and a list of lists of sections: +-- @'Fields' ann = 'Map' 'FieldName' ['NamelessField' ann]@ and @[['Section' ann]]@. +-- +-- We need list of list of sections, because we need to distinguish situations +-- where there are fields in between. For example +-- +-- @ +-- if flag(bytestring-lt-0_10_4) +-- build-depends: bytestring < 0.10.4 +-- +-- default-language: Haskell2020 +-- +-- else +-- build-depends: bytestring >= 0.10.4 +-- +-- @ +-- +-- is obviously invalid specification. +-- +-- We can parse 'Fields' like we parse @aeson@ objects, yet we use +-- slighly higher-level API, so we can process unspecified fields, +-- to report unknown fields and save custom @x-fields@. +-- +module Distribution.FieldGrammar.Parsec ( + ParsecFieldGrammar, + parseFieldGrammar, + fieldGrammarKnownFieldList, + -- * Auxiliary + Fields, + NamelessField (..), + namelessFieldAnn, + Section (..), + runFieldParser, + runFieldParser', + ) where + +import Data.List (dropWhileEnd) +import Data.Ord (comparing) +import Data.Set (Set) +import Distribution.Compat.Newtype +import Distribution.Compat.Prelude +import Distribution.Simple.Utils (fromUTF8BS) +import Prelude () + +import qualified Data.ByteString as BS +import qualified Data.Set as Set +import qualified Data.Map.Strict as Map +import qualified Text.Parsec as P +import qualified Text.Parsec.Error as P + +import Distribution.CabalSpecVersion +import Distribution.FieldGrammar.Class +import Distribution.Parsec.Class +import Distribution.Parsec.Common +import Distribution.Parsec.Field +import Distribution.Parsec.FieldLineStream +import Distribution.Parsec.ParseResult + +------------------------------------------------------------------------------- +-- Auxiliary types +------------------------------------------------------------------------------- + +type Fields ann = Map FieldName [NamelessField ann] + +-- | Single field, without name, but with its annotation. +data NamelessField ann = MkNamelessField !ann [FieldLine ann] + deriving (Eq, Show, Functor) + +namelessFieldAnn :: NamelessField ann -> ann +namelessFieldAnn (MkNamelessField ann _) = ann + +-- | The 'Section' constructor of 'Field'. +data Section ann = MkSection !(Name ann) [SectionArg ann] [Field ann] + deriving (Eq, Show, Functor) + +------------------------------------------------------------------------------- +-- ParsecFieldGrammar +------------------------------------------------------------------------------- + +data ParsecFieldGrammar s a = ParsecFG + { fieldGrammarKnownFields :: !(Set FieldName) + , fieldGrammarKnownPrefixes :: !(Set FieldName) + , fieldGrammarParser :: !(CabalSpecVersion -> Fields Position -> ParseResult a) + } + deriving (Functor) + +parseFieldGrammar :: CabalSpecVersion -> Fields Position -> ParsecFieldGrammar s a -> ParseResult a +parseFieldGrammar v fields grammar = do + for_ (Map.toList (Map.filterWithKey isUnknownField fields)) $ \(name, nfields) -> + for_ nfields $ \(MkNamelessField pos _) -> + parseWarning pos PWTUnknownField $ "Unknown field: " ++ show name + -- TODO: fields allowed in this section + + -- parse + fieldGrammarParser grammar v fields + + where + isUnknownField k _ = not $ + k `Set.member` fieldGrammarKnownFields grammar + || any (`BS.isPrefixOf` k) (fieldGrammarKnownPrefixes grammar) + +fieldGrammarKnownFieldList :: ParsecFieldGrammar s a -> [FieldName] +fieldGrammarKnownFieldList = Set.toList . fieldGrammarKnownFields + +instance Applicative (ParsecFieldGrammar s) where + pure x = ParsecFG mempty mempty (\_ _ -> pure x) + {-# INLINE pure #-} + + ParsecFG f f' f'' <*> ParsecFG x x' x'' = ParsecFG + (mappend f x) + (mappend f' x') + (\v fields -> f'' v fields <*> x'' v fields) + {-# INLINE (<*>) #-} + +warnMultipleSingularFields :: FieldName -> [NamelessField Position] -> ParseResult () +warnMultipleSingularFields _ [] = pure () +warnMultipleSingularFields fn (x : xs) = do + let pos = namelessFieldAnn x + poss = map namelessFieldAnn xs + parseWarning pos PWTMultipleSingularField $ + "The field " <> show fn <> " is specified more than once at positions " ++ intercalate ", " (map showPos (pos : poss)) + +instance FieldGrammar ParsecFieldGrammar where + blurFieldGrammar _ (ParsecFG s s' parser) = ParsecFG s s' parser + + uniqueFieldAla fn _pack _extract = ParsecFG (Set.singleton fn) Set.empty parser + where + parser v fields = case Map.lookup fn fields of + Nothing -> parseFatalFailure zeroPos $ show fn ++ " field missing" + Just [] -> parseFatalFailure zeroPos $ show fn ++ " field missing" + Just [x] -> parseOne v x + Just xs -> do + warnMultipleSingularFields fn xs + last <$> traverse (parseOne v) xs + + parseOne v (MkNamelessField pos fls) = + unpack' _pack <$> runFieldParser pos parsec v fls + + booleanFieldDef fn _extract def = ParsecFG (Set.singleton fn) Set.empty parser + where + parser v fields = case Map.lookup fn fields of + Nothing -> pure def + Just [] -> pure def + Just [x] -> parseOne v x + Just xs -> do + warnMultipleSingularFields fn xs + last <$> traverse (parseOne v) xs + + parseOne v (MkNamelessField pos fls) = runFieldParser pos parsec v fls + + optionalFieldAla fn _pack _extract = ParsecFG (Set.singleton fn) Set.empty parser + where + parser v fields = case Map.lookup fn fields of + Nothing -> pure Nothing + Just [] -> pure Nothing + Just [x] -> parseOne v x + Just xs -> do + warnMultipleSingularFields fn xs + last <$> traverse (parseOne v) xs + + parseOne v (MkNamelessField pos fls) + | null fls = pure Nothing + | otherwise = Just . unpack' _pack <$> runFieldParser pos parsec v fls + + optionalFieldDefAla fn _pack _extract def = ParsecFG (Set.singleton fn) Set.empty parser + where + parser v fields = case Map.lookup fn fields of + Nothing -> pure def + Just [] -> pure def + Just [x] -> parseOne v x + Just xs -> do + warnMultipleSingularFields fn xs + last <$> traverse (parseOne v) xs + + parseOne v (MkNamelessField pos fls) + | null fls = pure def + | otherwise = unpack' _pack <$> runFieldParser pos parsec v fls + + monoidalFieldAla fn _pack _extract = ParsecFG (Set.singleton fn) Set.empty parser + where + parser v fields = case Map.lookup fn fields of + Nothing -> pure mempty + Just xs -> foldMap (unpack' _pack) <$> traverse (parseOne v) xs + + parseOne v (MkNamelessField pos fls) = runFieldParser pos parsec v fls + + prefixedFields fnPfx _extract = ParsecFG mempty (Set.singleton fnPfx) (\_ fs -> pure (parser fs)) + where + parser :: Fields Position -> [(String, String)] + parser values = reorder $ concatMap convert $ filter match $ Map.toList values + + match (fn, _) = fnPfx `BS.isPrefixOf` fn + convert (fn, fields) = + [ (pos, (fromUTF8BS fn, trim $ fromUTF8BS $ fieldlinesToBS fls)) + | MkNamelessField pos fls <- fields + ] + -- hack: recover the order of prefixed fields + reorder = map snd . sortBy (comparing fst) + trim :: String -> String + trim = dropWhile isSpace . dropWhileEnd isSpace + + availableSince vs def (ParsecFG names prefixes parser) = ParsecFG names prefixes parser' + where + parser' v values + | cabalSpecSupports v vs = parser v values + | otherwise = do + let unknownFields = Map.intersection values $ Map.fromSet (const ()) names + for_ (Map.toList unknownFields) $ \(name, fields) -> + for_ fields $ \(MkNamelessField pos _) -> + parseWarning pos PWTUnknownField $ + "The field " <> show name <> " is available since Cabal " ++ show vs + + pure def + + -- todo we know about this field + deprecatedSince (_ : _) _ grammar = grammar -- pass on non-empty version + deprecatedSince _ msg (ParsecFG names prefixes parser) = ParsecFG names prefixes parser' + where + parser' v values = do + let deprecatedFields = Map.intersection values $ Map.fromSet (const ()) names + for_ (Map.toList deprecatedFields) $ \(name, fields) -> + for_ fields $ \(MkNamelessField pos _) -> + parseWarning pos PWTDeprecatedField $ + "The field " <> show name <> " is deprecated. " ++ msg + + parser v values + + knownField fn = ParsecFG (Set.singleton fn) Set.empty (\_ _ -> pure ()) + + hiddenField = id + +------------------------------------------------------------------------------- +-- Parsec +------------------------------------------------------------------------------- + +runFieldParser' :: Position -> ParsecParser a -> CabalSpecVersion -> FieldLineStream -> ParseResult a +runFieldParser' (Position row col) p v str = case P.runParser p' [] "" str of + Right (pok, ws) -> do + -- TODO: map pos + traverse_ (\(PWarning t pos w) -> parseWarning pos t w) ws + pure pok + Left err -> do + let ppos = P.errorPos err + -- Positions start from 1:1, not 0:0 + let epos = Position (row - 1 + P.sourceLine ppos) (col - 1 + P.sourceColumn ppos) + let msg = P.showErrorMessages + "or" "unknown parse error" "expecting" "unexpected" "end of input" + (P.errorMessages err) + let str' = unlines (filter (not . all isSpace) (fieldLineStreamToLines str)) + + parseFatalFailure epos $ msg ++ "\n" ++ "\n" ++ str' + where + p' = (,) <$ P.spaces <*> unPP p v <* P.spaces <* P.eof <*> P.getState + +fieldLineStreamToLines :: FieldLineStream -> [String] +fieldLineStreamToLines (FLSLast bs) = [ fromUTF8BS bs ] +fieldLineStreamToLines (FLSCons bs s) = fromUTF8BS bs : fieldLineStreamToLines s + +runFieldParser :: Position -> ParsecParser a -> CabalSpecVersion -> [FieldLine Position] -> ParseResult a +runFieldParser pp p v ls = runFieldParser' pos p v (fieldLinesToStream ls) + where + -- TODO: make per line lookup + pos = case ls of + [] -> pp + (FieldLine pos' _ : _) -> pos' + +fieldlinesToBS :: [FieldLine ann] -> BS.ByteString +fieldlinesToBS = BS.intercalate "\n" . map (\(FieldLine _ bs) -> bs) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Pretty.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Pretty.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Pretty.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar/Pretty.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,80 @@ +{-# LANGUAGE DeriveFunctor #-} +module Distribution.FieldGrammar.Pretty ( + PrettyFieldGrammar, + prettyFieldGrammar, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Newtype +import Distribution.Compat.Prelude +import Distribution.Pretty (Pretty (..)) +import Distribution.Simple.Utils (fromUTF8BS) +import Prelude () +import Text.PrettyPrint (Doc) +import qualified Text.PrettyPrint as PP + +import Distribution.FieldGrammar.Class +import Distribution.ParseUtils (ppField) + +newtype PrettyFieldGrammar s a = PrettyFG + { fieldGrammarPretty :: s -> Doc + } + deriving (Functor) + +instance Applicative (PrettyFieldGrammar s) where + pure _ = PrettyFG (\_ -> mempty) + PrettyFG f <*> PrettyFG x = PrettyFG (\s -> f s PP.$$ x s) + +-- | We can use 'PrettyFieldGrammar' to pp print the @s@. +-- +-- /Note:/ there is not trailing @($+$ text "")@. +prettyFieldGrammar :: PrettyFieldGrammar s a -> s -> Doc +prettyFieldGrammar = fieldGrammarPretty + +instance FieldGrammar PrettyFieldGrammar where + blurFieldGrammar f (PrettyFG pp) = PrettyFG (pp . aview f) + + uniqueFieldAla fn _pack l = PrettyFG $ \s -> + ppField (fromUTF8BS fn) (pretty (pack' _pack (aview l s))) + + booleanFieldDef fn l def = PrettyFG pp + where + pp s + | b == def = mempty + | otherwise = ppField (fromUTF8BS fn) (PP.text (show b)) + where + b = aview l s + + optionalFieldAla fn _pack l = PrettyFG pp + where + pp s = case aview l s of + Nothing -> mempty + Just a -> ppField (fromUTF8BS fn) (pretty (pack' _pack a)) + + optionalFieldDefAla fn _pack l def = PrettyFG pp + where + pp s + | x == def = mempty + | otherwise = ppField (fromUTF8BS fn) (pretty (pack' _pack x)) + where + x = aview l s + + monoidalFieldAla fn _pack l = PrettyFG pp + where + pp s = ppField (fromUTF8BS fn) (pretty (pack' _pack (aview l s))) + + prefixedFields _fnPfx l = PrettyFG (pp . aview l) + where + pp xs = PP.vcat + -- always print the field, even its Doc is empty + -- i.e. don't use ppField + [ PP.text n <<>> PP.colon PP.<+> (PP.vcat $ map PP.text $ lines s) + | (n, s) <- xs + -- fnPfx `isPrefixOf` n + ] + + knownField _ = pure () + deprecatedSince [] _ _ = PrettyFG (\_ -> mempty) + deprecatedSince _ _ x = x + availableSince _ _ = id + hiddenField _ = PrettyFG (\_ -> mempty) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/FieldGrammar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/FieldGrammar.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,85 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE ScopedTypeVariables #-} +-- | This module provides a way to specify a grammar of @.cabal@ -like files. +module Distribution.FieldGrammar ( + -- * Field grammar type + FieldGrammar (..), + uniqueField, + optionalField, + optionalFieldDef, + monoidalField, + deprecatedField', + -- * Concrete grammar implementations + ParsecFieldGrammar, + ParsecFieldGrammar', + parseFieldGrammar, + fieldGrammarKnownFieldList, + PrettyFieldGrammar, + PrettyFieldGrammar', + prettyFieldGrammar, + -- * Auxlilary + (^^^), + Section(..), + Fields, + partitionFields, + takeFields, + runFieldParser, + runFieldParser', + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import qualified Data.Map.Strict as Map + +import Distribution.FieldGrammar.Class +import Distribution.FieldGrammar.Parsec +import Distribution.FieldGrammar.Pretty +import Distribution.Parsec.Field +import Distribution.Utils.Generic (spanMaybe) + +type ParsecFieldGrammar' a = ParsecFieldGrammar a a +type PrettyFieldGrammar' a = PrettyFieldGrammar a a + +infixl 5 ^^^ + +-- | Reverse function application which binds tighter than '<$>' and '<*>'. +-- Useful for refining grammar specification. +-- +-- @ +-- \<*\> 'monoidalFieldAla' "extensions" (alaList' FSep MQuoted) oldExtensions +-- ^^^ 'deprecatedSince' [1,12] "Please use 'default-extensions' or 'other-extensions' fields." +-- @ +(^^^) :: a -> (a -> b) -> b +x ^^^ f = f x + +-- | Partitioning state +data PS ann = PS (Fields ann) [Section ann] [[Section ann]] + +-- | Partition field list into field map and groups of sections. +partitionFields :: [Field ann] -> (Fields ann, [[Section ann]]) +partitionFields = finalize . foldl' f (PS mempty mempty mempty) + where + finalize :: PS ann -> (Fields ann, [[Section ann]]) + finalize (PS fs s ss) + | null s = (fs, reverse ss) + | otherwise = (fs, reverse (reverse s : ss)) + + f :: PS ann -> Field ann -> PS ann + f (PS fs s ss) (Field (Name ann name) fss) = + PS (Map.insertWith (flip (++)) name [MkNamelessField ann fss] fs) [] ss' + where + ss' | null s = ss + | otherwise = reverse s : ss + f (PS fs s ss) (Section name sargs sfields) = + PS fs (MkSection name sargs sfields : s) ss + +-- | Take all fields from the front. +takeFields :: [Field ann] -> (Fields ann, [Field ann]) +takeFields = finalize . spanMaybe match + where + finalize (fs, rest) = (Map.fromListWith (flip (++)) fs, rest) + + match (Field (Name ann name) fs) = Just (name, [MkNamelessField ann fs]) + match _ = Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/GetOpt.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/GetOpt.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/GetOpt.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/GetOpt.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,209 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.GetOpt +-- Copyright : (c) Sven Panne 2002-2005 +-- License : BSD-style (see the file libraries/base/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- This is a fork of "System.Console.GetOpt" with the following changes: +-- +-- * Treat "cabal --flag command" as "cabal command --flag" e.g. +-- "cabal -v configure" to mean "cabal configure -v" For flags that are +-- not recognised as global flags, pass them on to the sub-command. See +-- the difference in 'shortOpt'. +-- +-- * Line wrapping in the 'usageInfo' output, plus a more compact +-- rendering of short options, and slightly less padding. +-- +-- If you want to take on the challenge of merging this with the GetOpt +-- from the base package then go for it! +-- +module Distribution.GetOpt ( + -- * GetOpt + getOpt, getOpt', + usageInfo, + ArgOrder(..), + OptDescr(..), + ArgDescr(..), + + -- * Example + -- | See "System.Console.GetOpt" for examples +) where + +import Prelude () +import Distribution.Compat.Prelude +import System.Console.GetOpt + ( ArgOrder(..), OptDescr(..), ArgDescr(..) ) + +data OptKind a -- kind of cmd line arg (internal use only): + = Opt a -- an option + | UnreqOpt String -- an un-recognized option + | NonOpt String -- a non-option + | EndOfOpts -- end-of-options marker (i.e. "--") + | OptErr String -- something went wrong... + +-- | Return a string describing the usage of a command, derived from +-- the header (first argument) and the options described by the +-- second argument. +usageInfo :: String -- header + -> [OptDescr a] -- option descriptors + -> String -- nicely formatted decription of options +usageInfo header optDescr = unlines (header:table) + where (ss,ls,ds) = unzip3 [ (intercalate ", " (map (fmtShort ad) sos) + ,concatMap (fmtLong ad) (take 1 los) + ,d) + | Option sos los ad d <- optDescr ] + ssWidth = (maximum . map length) ss + lsWidth = (maximum . map length) ls + dsWidth = 30 `max` (80 - (ssWidth + lsWidth + 3)) + table = [ " " ++ padTo ssWidth so' ++ + " " ++ padTo lsWidth lo' ++ + " " ++ d' + | (so,lo,d) <- zip3 ss ls ds + , (so',lo',d') <- fmtOpt dsWidth so lo d ] + padTo n x = take n (x ++ repeat ' ') + +fmtOpt :: Int -> String -> String -> String -> [(String, String, String)] +fmtOpt descrWidth so lo descr = + case wrapText descrWidth descr of + [] -> [(so,lo,"")] + (d:ds) -> (so,lo,d) : [ ("","",d') | d' <- ds ] + +fmtShort :: ArgDescr a -> Char -> String +fmtShort (NoArg _ ) so = "-" ++ [so] +fmtShort (ReqArg _ _) so = "-" ++ [so] +fmtShort (OptArg _ _) so = "-" ++ [so] + -- unlike upstream GetOpt we omit the arg name for short options + +fmtLong :: ArgDescr a -> String -> String +fmtLong (NoArg _ ) lo = "--" ++ lo +fmtLong (ReqArg _ ad) lo = "--" ++ lo ++ "=" ++ ad +fmtLong (OptArg _ ad) lo = "--" ++ lo ++ "[=" ++ ad ++ "]" + +wrapText :: Int -> String -> [String] +wrapText width = map unwords . wrap 0 [] . words + where wrap :: Int -> [String] -> [String] -> [[String]] + wrap 0 [] (w:ws) + | length w + 1 > width + = wrap (length w) [w] ws + wrap col line (w:ws) + | col + length w + 1 > width + = reverse line : wrap 0 [] (w:ws) + wrap col line (w:ws) + = let col' = col + length w + 1 + in wrap col' (w:line) ws + wrap _ [] [] = [] + wrap _ line [] = [reverse line] + +{-| +Process the command-line, and return the list of values that matched +(and those that didn\'t). The arguments are: + +* The order requirements (see 'ArgOrder') + +* The option descriptions (see 'OptDescr') + +* The actual command line arguments (presumably got from + 'System.Environment.getArgs'). + +'getOpt' returns a triple consisting of the option arguments, a list +of non-options, and a list of error messages. +-} +getOpt :: ArgOrder a -- non-option handling + -> [OptDescr a] -- option descriptors + -> [String] -- the command-line arguments + -> ([a],[String],[String]) -- (options,non-options,error messages) +getOpt ordering optDescr args = (os,xs,es ++ map errUnrec us) + where (os,xs,us,es) = getOpt' ordering optDescr args + +{-| +This is almost the same as 'getOpt', but returns a quadruple +consisting of the option arguments, a list of non-options, a list of +unrecognized options, and a list of error messages. +-} +getOpt' :: ArgOrder a -- non-option handling + -> [OptDescr a] -- option descriptors + -> [String] -- the command-line arguments + -> ([a],[String], [String] ,[String]) -- (options,non-options,unrecognized,error messages) +getOpt' _ _ [] = ([],[],[],[]) +getOpt' ordering optDescr (arg:args) = procNextOpt opt ordering + where procNextOpt (Opt o) _ = (o:os,xs,us,es) + procNextOpt (UnreqOpt u) _ = (os,xs,u:us,es) + procNextOpt (NonOpt x) RequireOrder = ([],x:rest,[],[]) + procNextOpt (NonOpt x) Permute = (os,x:xs,us,es) + procNextOpt (NonOpt x) (ReturnInOrder f) = (f x :os, xs,us,es) + procNextOpt EndOfOpts RequireOrder = ([],rest,[],[]) + procNextOpt EndOfOpts Permute = ([],rest,[],[]) + procNextOpt EndOfOpts (ReturnInOrder f) = (map f rest,[],[],[]) + procNextOpt (OptErr e) _ = (os,xs,us,e:es) + + (opt,rest) = getNext arg args optDescr + (os,xs,us,es) = getOpt' ordering optDescr rest + +-- take a look at the next cmd line arg and decide what to do with it +getNext :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) +getNext ('-':'-':[]) rest _ = (EndOfOpts,rest) +getNext ('-':'-':xs) rest optDescr = longOpt xs rest optDescr +getNext ('-': x :xs) rest optDescr = shortOpt x xs rest optDescr +getNext a rest _ = (NonOpt a,rest) + +-- handle long option +longOpt :: String -> [String] -> [OptDescr a] -> (OptKind a,[String]) +longOpt ls rs optDescr = long ads arg rs + where (opt,arg) = break (=='=') ls + getWith p = [ o | o@(Option _ xs _ _) <- optDescr + , isJust (find (p opt) xs)] + exact = getWith (==) + options = if null exact then getWith isPrefixOf else exact + ads = [ ad | Option _ _ ad _ <- options ] + optStr = "--" ++ opt + + long (_:_:_) _ rest = (errAmbig options optStr,rest) + long [NoArg a ] [] rest = (Opt a,rest) + long [NoArg _ ] ('=':_) rest = (errNoArg optStr,rest) + long [ReqArg _ d] [] [] = (errReq d optStr,[]) + long [ReqArg f _] [] (r:rest) = (Opt (f r),rest) + long [ReqArg f _] ('=':xs) rest = (Opt (f xs),rest) + long [OptArg f _] [] rest = (Opt (f Nothing),rest) + long [OptArg f _] ('=':xs) rest = (Opt (f (Just xs)),rest) + long _ _ rest = (UnreqOpt ("--"++ls),rest) + +-- handle short option +shortOpt :: Char -> String -> [String] -> [OptDescr a] -> (OptKind a,[String]) +shortOpt y ys rs optDescr = short ads ys rs + where options = [ o | o@(Option ss _ _ _) <- optDescr, s <- ss, y == s ] + ads = [ ad | Option _ _ ad _ <- options ] + optStr = '-':[y] + + short (_:_:_) _ rest = (errAmbig options optStr,rest) + short (NoArg a :_) [] rest = (Opt a,rest) + short (NoArg a :_) xs rest = (Opt a,('-':xs):rest) + short (ReqArg _ d:_) [] [] = (errReq d optStr,[]) + short (ReqArg f _:_) [] (r:rest) = (Opt (f r),rest) + short (ReqArg f _:_) xs rest = (Opt (f xs),rest) + short (OptArg f _:_) [] rest = (Opt (f Nothing),rest) + short (OptArg f _:_) xs rest = (Opt (f (Just xs)),rest) + short [] [] rest = (UnreqOpt optStr,rest) + short [] xs rest = (UnreqOpt (optStr++xs),rest) + -- This is different vs upstream = (UnreqOpt optStr,('-':xs):rest) + -- Apparently this was part of the change so that flags that are + -- not recognised as global flags are passed on to the sub-command. + -- But why was no equivalent change required for longOpt? So could + -- this change go upstream? + +-- miscellaneous error formatting + +errAmbig :: [OptDescr a] -> String -> OptKind a +errAmbig ods optStr = OptErr (usageInfo header ods) + where header = "option `" ++ optStr ++ "' is ambiguous; could be one of:" + +errReq :: String -> String -> OptKind a +errReq d optStr = OptErr ("option `" ++ optStr ++ "' requires an argument " ++ d ++ "\n") + +errUnrec :: String -> String +errUnrec optStr = "unrecognized option `" ++ optStr ++ "'\n" + +errNoArg :: String -> OptKind a +errNoArg optStr = OptErr ("option `" ++ optStr ++ "' doesn't allow an argument\n") diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/InstalledPackageInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/InstalledPackageInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/InstalledPackageInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/InstalledPackageInfo.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,150 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.InstalledPackageInfo +-- Copyright : (c) The University of Glasgow 2004 +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- This is the information about an /installed/ package that +-- is communicated to the @ghc-pkg@ program in order to register +-- a package. @ghc-pkg@ now consumes this package format (as of version +-- 6.4). This is specific to GHC at the moment. +-- +-- The @.cabal@ file format is for describing a package that is not yet +-- installed. It has a lot of flexibility, like conditionals and dependency +-- ranges. As such, that format is not at all suitable for describing a package +-- that has already been built and installed. By the time we get to that stage, +-- we have resolved all conditionals and resolved dependency version +-- constraints to exact versions of dependent packages. So, this module defines +-- the 'InstalledPackageInfo' data structure that contains all the info we keep +-- about an installed package. There is a parser and pretty printer. The +-- textual format is rather simpler than the @.cabal@ format: there are no +-- sections, for example. + +-- This module is meant to be local-only to Distribution... + +module Distribution.InstalledPackageInfo ( + InstalledPackageInfo(..), + installedPackageId, + installedComponentId, + installedOpenUnitId, + sourceComponentName, + requiredSignatures, + ExposedModule(..), + AbiDependency(..), + ParseResult(..), PError(..), PWarning, + emptyInstalledPackageInfo, + parseInstalledPackageInfo, + showInstalledPackageInfo, + showFullInstalledPackageInfo, + showInstalledPackageInfoField, + showSimpleInstalledPackageInfoField, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Data.Set (Set) +import Distribution.Backpack +import Distribution.CabalSpecVersion (cabalSpecLatest) +import Distribution.FieldGrammar +import Distribution.FieldGrammar.FieldDescrs +import Distribution.ModuleName +import Distribution.Package hiding (installedPackageId, installedUnitId) +import Distribution.ParseUtils +import Distribution.Types.ComponentName +import Distribution.Utils.Generic (toUTF8BS) + +import qualified Data.Map as Map +import qualified Distribution.Parsec.Common as P +import qualified Distribution.Parsec.Parser as P +import qualified Distribution.Parsec.ParseResult as P +import qualified Text.Parsec.Error as Parsec +import qualified Text.Parsec.Pos as Parsec +import qualified Text.PrettyPrint as Disp + +import Distribution.Types.InstalledPackageInfo +import Distribution.Types.InstalledPackageInfo.FieldGrammar + + + +installedComponentId :: InstalledPackageInfo -> ComponentId +installedComponentId ipi = + case unComponentId (installedComponentId_ ipi) of + "" -> mkComponentId (unUnitId (installedUnitId ipi)) + _ -> installedComponentId_ ipi + +-- | Get the indefinite unit identity representing this package. +-- This IS NOT guaranteed to give you a substitution; for +-- instantiated packages you will get @DefiniteUnitId (installedUnitId ipi)@. +-- For indefinite libraries, however, you will correctly get +-- an @OpenUnitId@ with the appropriate 'OpenModuleSubst'. +installedOpenUnitId :: InstalledPackageInfo -> OpenUnitId +installedOpenUnitId ipi + = mkOpenUnitId (installedUnitId ipi) (installedComponentId ipi) (Map.fromList (instantiatedWith ipi)) + +-- | Returns the set of module names which need to be filled for +-- an indefinite package, or the empty set if the package is definite. +requiredSignatures :: InstalledPackageInfo -> Set ModuleName +requiredSignatures ipi = openModuleSubstFreeHoles (Map.fromList (instantiatedWith ipi)) + +{-# DEPRECATED installedPackageId "Use installedUnitId instead" #-} +-- | Backwards compatibility with Cabal pre-1.24. +-- +-- This type synonym is slightly awful because in cabal-install +-- we define an 'InstalledPackageId' but it's a ComponentId, +-- not a UnitId! +installedPackageId :: InstalledPackageInfo -> UnitId +installedPackageId = installedUnitId + +-- ----------------------------------------------------------------------------- +-- Munging + +sourceComponentName :: InstalledPackageInfo -> ComponentName +sourceComponentName ipi = + case sourceLibName ipi of + Nothing -> CLibName + Just qn -> CSubLibName qn + +-- ----------------------------------------------------------------------------- +-- Parsing + +parseInstalledPackageInfo :: String -> ParseResult InstalledPackageInfo +parseInstalledPackageInfo s = case P.readFields (toUTF8BS s) of + Left err -> ParseFailed (NoParse (show err) $ Parsec.sourceLine $ Parsec.errorPos err) + Right fs -> case partitionFields fs of + (fs', _) -> case P.runParseResult $ parseFieldGrammar cabalSpecLatest fs' ipiFieldGrammar of + (ws, Right x) -> ParseOk ws' x where + ws' = map (PWarning . P.showPWarning "") ws + (_, Left (_, errs)) -> ParseFailed (NoParse errs' 0) where + errs' = intercalate "; " $ map (\(P.PError _ msg) -> msg) errs + +-- ----------------------------------------------------------------------------- +-- Pretty-printing + +-- | Pretty print 'InstalledPackageInfo'. +-- +-- @pkgRoot@ isn't printed, as ghc-pkg prints it manually (as GHC-8.4). +showInstalledPackageInfo :: InstalledPackageInfo -> String +showInstalledPackageInfo ipi = + showFullInstalledPackageInfo ipi { pkgRoot = Nothing } + +-- | The variant of 'showInstalledPackageInfo' which outputs @pkgroot@ field too. +showFullInstalledPackageInfo :: InstalledPackageInfo -> String +showFullInstalledPackageInfo = Disp.render . (Disp.$+$ Disp.text "") . prettyFieldGrammar ipiFieldGrammar + +-- | +-- +-- >>> let ipi = emptyInstalledPackageInfo { maintainer = "Tester" } +-- >>> fmap ($ ipi) $ showInstalledPackageInfoField "maintainer" +-- Just "maintainer: Tester" +showInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) +showInstalledPackageInfoField fn = + fmap (\g -> Disp.render . ppField fn . g) $ fieldDescrPretty ipiFieldGrammar fn + +showSimpleInstalledPackageInfoField :: String -> Maybe (InstalledPackageInfo -> String) +showSimpleInstalledPackageInfoField fn = + fmap (Disp.renderStyle myStyle .) $ fieldDescrPretty ipiFieldGrammar fn + where + myStyle = Disp.style { Disp.mode = Disp.LeftMode } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Lex.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Lex.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Lex.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Lex.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,40 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Lex +-- Copyright : Ben Gamari 2015-2019 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains a simple lexer supporting quoted strings + +module Distribution.Lex ( + tokenizeQuotedWords + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Compat.DList + +tokenizeQuotedWords :: String -> [String] +tokenizeQuotedWords = filter (not . null) . go False mempty + where + go :: Bool -- ^ in quoted region + -> DList Char -- ^ accumulator + -> String -- ^ string to be parsed + -> [String] -- ^ parse result + go _ accum [] + | [] <- accum' = [] + | otherwise = [accum'] + where accum' = runDList accum + + go False accum (c:cs) + | isSpace c = runDList accum : go False mempty cs + | c == '"' = go True accum cs + + go True accum (c:cs) + | c == '"' = go False accum cs + + go quoted accum (c:cs) + = go quoted (accum `mappend` singleton c) cs + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/License.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/License.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/License.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/License.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,275 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.License +-- Description : The License data type. +-- Copyright : Isaac Jones 2003-2005 +-- Duncan Coutts 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Package descriptions contain fields for specifying the name of a software +-- license and the name of the file containing the text of that license. While +-- package authors may choose any license they like, Cabal provides an +-- enumeration of a small set of common free and open source software licenses. +-- This is done so that Hackage can recognise licenses, so that tools can detect +-- , +-- and to deter +-- . +-- +-- It is recommended that all package authors use the @license-file@ or +-- @license-files@ fields in their package descriptions. Further information +-- about these fields can be found in the +-- . +-- +-- = Additional resources +-- +-- The following websites provide information about free and open source +-- software licenses: +-- +-- * +-- +-- * +-- +-- = Disclaimer +-- +-- The descriptions of software licenses provided by this documentation are +-- intended for informational purposes only and in no way constitute legal +-- advice. Please read the text of the licenses and consult a lawyer for any +-- advice regarding software licensing. + +module Distribution.License ( + License(..), + knownLicenses, + licenseToSPDX, + licenseFromSPDX, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text +import Distribution.Version + +import qualified Distribution.Compat.CharParsing as P +import qualified Data.Map.Strict as Map +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.SPDX as SPDX +import qualified Text.PrettyPrint as Disp + +-- | Indicates the license under which a package's source code is released. +-- Versions of the licenses not listed here will be rejected by Hackage and +-- cause @cabal check@ to issue a warning. +data License = + -- TODO: * remove BSD4 + + -- | GNU General Public License, + -- or + -- . + GPL (Maybe Version) + + -- | . + | AGPL (Maybe Version) + + -- | GNU Lesser General Public License, + -- or + -- . + | LGPL (Maybe Version) + + -- | . + | BSD2 + + -- | . + | BSD3 + + -- | . + -- This license has not been approved by the OSI and is incompatible with + -- the GNU GPL. It is provided for historical reasons and should be avoided. + | BSD4 + + -- | . + | MIT + + -- | + | ISC + + -- | . + | MPL Version + + -- | . + | Apache (Maybe Version) + + -- | The author of a package disclaims any copyright to its source code and + -- dedicates it to the public domain. This is not a software license. Please + -- note that it is not possible to dedicate works to the public domain in + -- every jurisdiction, nor is a work that is in the public domain in one + -- jurisdiction necessarily in the public domain elsewhere. + | PublicDomain + + -- | Explicitly 'All Rights Reserved', eg for proprietary software. The + -- package may not be legally modified or redistributed by anyone but the + -- rightsholder. + | AllRightsReserved + + -- | No license specified which legally defaults to 'All Rights Reserved'. + -- The package may not be legally modified or redistributed by anyone but + -- the rightsholder. + | UnspecifiedLicense + + -- | Any other software license. + | OtherLicense + + -- | Indicates an erroneous license name. + | UnknownLicense String + deriving (Generic, Read, Show, Eq, Typeable, Data) + +instance Binary License + +instance NFData License where rnf = genericRnf + +-- | The list of all currently recognised licenses. +knownLicenses :: [License] +knownLicenses = [ GPL unversioned, GPL (version [2]), GPL (version [3]) + , LGPL unversioned, LGPL (version [2, 1]), LGPL (version [3]) + , AGPL unversioned, AGPL (version [3]) + , BSD2, BSD3, MIT, ISC + , MPL (mkVersion [2, 0]) + , Apache unversioned, Apache (version [2, 0]) + , PublicDomain, AllRightsReserved, OtherLicense] + where + unversioned = Nothing + version = Just . mkVersion + +-- | Convert old 'License' to SPDX 'SPDX.License'. +-- Non-SPDX licenses are converted to 'SPDX.LicenseRef'. +-- +-- @since 2.2.0.0 +licenseToSPDX :: License -> SPDX.License +licenseToSPDX l = case l of + GPL v | v == version [2] -> spdx SPDX.GPL_2_0_only + GPL v | v == version [3] -> spdx SPDX.GPL_3_0_only + LGPL v | v == version [2,1] -> spdx SPDX.LGPL_2_1_only + LGPL v | v == version [3] -> spdx SPDX.LGPL_3_0_only + AGPL v | v == version [3] -> spdx SPDX.AGPL_3_0_only + BSD2 -> spdx SPDX.BSD_2_Clause + BSD3 -> spdx SPDX.BSD_3_Clause + BSD4 -> spdx SPDX.BSD_4_Clause + MIT -> spdx SPDX.MIT + ISC -> spdx SPDX.ISC + MPL v | v == mkVersion [2,0] -> spdx SPDX.MPL_2_0 + Apache v | v == version [2,0] -> spdx SPDX.Apache_2_0 + AllRightsReserved -> SPDX.NONE + UnspecifiedLicense -> SPDX.NONE + OtherLicense -> ref (SPDX.mkLicenseRef' Nothing "OtherLicense") + PublicDomain -> ref (SPDX.mkLicenseRef' Nothing "PublicDomain") + UnknownLicense str -> ref (SPDX.mkLicenseRef' Nothing str) + _ -> ref (SPDX.mkLicenseRef' Nothing $ prettyShow l) + where + version = Just . mkVersion + spdx = SPDX.License . SPDX.simpleLicenseExpression + ref r = SPDX.License $ SPDX.ELicense (SPDX.ELicenseRef r) Nothing + +-- | Convert 'SPDX.License' to 'License', +-- +-- This is lossy conversion. We try our best. +-- +-- >>> licenseFromSPDX . licenseToSPDX $ BSD3 +-- BSD3 +-- +-- >>> licenseFromSPDX . licenseToSPDX $ GPL (Just (mkVersion [3])) +-- GPL (Just (mkVersion [3])) +-- +-- >>> licenseFromSPDX . licenseToSPDX $ PublicDomain +-- UnknownLicense "LicenseRefPublicDomain" +-- +-- >>> licenseFromSPDX $ SPDX.License $ SPDX.simpleLicenseExpression SPDX.EUPL_1_1 +-- UnknownLicense "EUPL-1.1" +-- +-- >>> licenseFromSPDX . licenseToSPDX $ AllRightsReserved +-- AllRightsReserved +-- +-- >>> licenseFromSPDX <$> simpleParsec "BSD-3-Clause OR GPL-3.0-only" +-- Just (UnknownLicense "BSD3ClauseORGPL30only") +-- +-- @since 2.2.0.0 +licenseFromSPDX :: SPDX.License -> License +licenseFromSPDX SPDX.NONE = AllRightsReserved +licenseFromSPDX l = + fromMaybe (mungle $ prettyShow l) $ Map.lookup l m + where + m :: Map.Map SPDX.License License + m = Map.fromList $ filter (isSimple . fst ) $ + map (\x -> (licenseToSPDX x, x)) knownLicenses + + isSimple (SPDX.License (SPDX.ELicense (SPDX.ELicenseId _) Nothing)) = True + isSimple _ = False + + mungle name = fromMaybe (UnknownLicense (mapMaybe mangle name)) (simpleParsec name) + + mangle c + | isAlphaNum c = Just c + | otherwise = Nothing + +instance Pretty License where + pretty (GPL version) = Disp.text "GPL" <<>> dispOptVersion version + pretty (LGPL version) = Disp.text "LGPL" <<>> dispOptVersion version + pretty (AGPL version) = Disp.text "AGPL" <<>> dispOptVersion version + pretty (MPL version) = Disp.text "MPL" <<>> dispVersion version + pretty (Apache version) = Disp.text "Apache" <<>> dispOptVersion version + pretty (UnknownLicense other) = Disp.text other + pretty other = Disp.text (show other) + +instance Parsec License where + parsec = do + name <- P.munch1 isAlphaNum + version <- P.optional (P.char '-' *> parsec) + return $! case (name, version :: Maybe Version) of + ("GPL", _ ) -> GPL version + ("LGPL", _ ) -> LGPL version + ("AGPL", _ ) -> AGPL version + ("BSD2", Nothing) -> BSD2 + ("BSD3", Nothing) -> BSD3 + ("BSD4", Nothing) -> BSD4 + ("ISC", Nothing) -> ISC + ("MIT", Nothing) -> MIT + ("MPL", Just version') -> MPL version' + ("Apache", _ ) -> Apache version + ("PublicDomain", Nothing) -> PublicDomain + ("AllRightsReserved", Nothing) -> AllRightsReserved + ("OtherLicense", Nothing) -> OtherLicense + _ -> UnknownLicense $ name ++ + maybe "" (('-':) . display) version + +instance Text License where + parse = do + name <- Parse.munch1 (\c -> isAlphaNum c && c /= '-') + version <- Parse.option Nothing (Parse.char '-' >> fmap Just parse) + return $! case (name, version :: Maybe Version) of + ("GPL", _ ) -> GPL version + ("LGPL", _ ) -> LGPL version + ("AGPL", _ ) -> AGPL version + ("BSD2", Nothing) -> BSD2 + ("BSD3", Nothing) -> BSD3 + ("BSD4", Nothing) -> BSD4 + ("ISC", Nothing) -> ISC + ("MIT", Nothing) -> MIT + ("MPL", Just version') -> MPL version' + ("Apache", _ ) -> Apache version + ("PublicDomain", Nothing) -> PublicDomain + ("AllRightsReserved", Nothing) -> AllRightsReserved + ("OtherLicense", Nothing) -> OtherLicense + _ -> UnknownLicense $ name ++ + maybe "" (('-':) . display) version + +dispOptVersion :: Maybe Version -> Disp.Doc +dispOptVersion Nothing = Disp.empty +dispOptVersion (Just v) = dispVersion v + +dispVersion :: Version -> Disp.Doc +dispVersion v = Disp.char '-' <<>> disp v diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Make.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Make.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Make.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Make.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,189 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Make +-- Copyright : Martin Sjögren 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is an alternative build system that delegates everything to the @make@ +-- program. All the commands just end up calling @make@ with appropriate +-- arguments. The intention was to allow preexisting packages that used +-- makefiles to be wrapped into Cabal packages. In practice essentially all +-- such packages were converted over to the \"Simple\" build system instead. +-- Consequently this module is not used much and it certainly only sees cursory +-- maintenance and no testing. Perhaps at some point we should stop pretending +-- that it works. +-- +-- Uses the parsed command-line from "Distribution.Simple.Setup" in order to build +-- Haskell tools using a back-end build system based on make. Obviously we +-- assume that there is a configure script, and that after the ConfigCmd has +-- been run, there is a Makefile. Further assumptions: +-- +-- [ConfigCmd] We assume the configure script accepts +-- @--with-hc@, +-- @--with-hc-pkg@, +-- @--prefix@, +-- @--bindir@, +-- @--libdir@, +-- @--libexecdir@, +-- @--datadir@. +-- +-- [BuildCmd] We assume that the default Makefile target will build everything. +-- +-- [InstallCmd] We assume there is an @install@ target. Note that we assume that +-- this does *not* register the package! +-- +-- [CopyCmd] We assume there is a @copy@ target, and a variable @$(destdir)@. +-- The @copy@ target should probably just invoke @make install@ +-- recursively (e.g. @$(MAKE) install prefix=$(destdir)\/$(prefix) +-- bindir=$(destdir)\/$(bindir)@. The reason we can\'t invoke @make +-- install@ directly here is that we don\'t know the value of @$(prefix)@. +-- +-- [SDistCmd] We assume there is a @dist@ target. +-- +-- [RegisterCmd] We assume there is a @register@ target and a variable @$(user)@. +-- +-- [UnregisterCmd] We assume there is an @unregister@ target. +-- +-- [HaddockCmd] We assume there is a @docs@ or @doc@ target. + + +-- copy : +-- $(MAKE) install prefix=$(destdir)/$(prefix) \ +-- bindir=$(destdir)/$(bindir) \ + +module Distribution.Make ( + module Distribution.Package, + License(..), Version, + defaultMain, defaultMainArgs, defaultMainNoRead + ) where + +import Prelude () +import Distribution.Compat.Prelude + +-- local +import Distribution.Compat.Exception +import Distribution.Package +import Distribution.Simple.Program +import Distribution.PackageDescription +import Distribution.Simple.Setup +import Distribution.Simple.Command + +import Distribution.Simple.Utils + +import Distribution.License +import Distribution.Version +import Distribution.Text + +import System.Environment (getArgs, getProgName) +import System.Exit + +defaultMain :: IO () +defaultMain = getArgs >>= defaultMainArgs + +defaultMainArgs :: [String] -> IO () +defaultMainArgs = defaultMainHelper + +{-# DEPRECATED defaultMainNoRead "it ignores its PackageDescription arg" #-} +defaultMainNoRead :: PackageDescription -> IO () +defaultMainNoRead = const defaultMain + +defaultMainHelper :: [String] -> IO () +defaultMainHelper args = + case commandsRun (globalCommand commands) commands args of + CommandHelp help -> printHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo (flags, commandParse) -> + case commandParse of + _ | fromFlag (globalVersion flags) -> printVersion + | fromFlag (globalNumericVersion flags) -> printNumericVersion + CommandHelp help -> printHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo action -> action + + where + printHelp help = getProgName >>= putStr . help + printOptionsList = putStr . unlines + printErrors errs = do + putStr (intercalate "\n" errs) + exitWith (ExitFailure 1) + printNumericVersion = putStrLn $ display cabalVersion + printVersion = putStrLn $ "Cabal library version " + ++ display cabalVersion + + progs = defaultProgramDb + commands = + [configureCommand progs `commandAddAction` configureAction + ,buildCommand progs `commandAddAction` buildAction + ,installCommand `commandAddAction` installAction + ,copyCommand `commandAddAction` copyAction + ,haddockCommand `commandAddAction` haddockAction + ,cleanCommand `commandAddAction` cleanAction + ,sdistCommand `commandAddAction` sdistAction + ,registerCommand `commandAddAction` registerAction + ,unregisterCommand `commandAddAction` unregisterAction + ] + +configureAction :: ConfigFlags -> [String] -> IO () +configureAction flags args = do + noExtraFlags args + let verbosity = fromFlag (configVerbosity flags) + rawSystemExit verbosity "sh" $ + "configure" + : configureArgs backwardsCompatHack flags + where backwardsCompatHack = True + +copyAction :: CopyFlags -> [String] -> IO () +copyAction flags args = do + noExtraFlags args + let destArgs = case fromFlag $ copyDest flags of + NoCopyDest -> ["install"] + CopyTo path -> ["copy", "destdir=" ++ path] + CopyToDb _ -> error "CopyToDb not supported via Make" + + rawSystemExit (fromFlag $ copyVerbosity flags) "make" destArgs + +installAction :: InstallFlags -> [String] -> IO () +installAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ installVerbosity flags) "make" ["install"] + rawSystemExit (fromFlag $ installVerbosity flags) "make" ["register"] + +haddockAction :: HaddockFlags -> [String] -> IO () +haddockAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ haddockVerbosity flags) "make" ["docs"] + `catchIO` \_ -> + rawSystemExit (fromFlag $ haddockVerbosity flags) "make" ["doc"] + +buildAction :: BuildFlags -> [String] -> IO () +buildAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ buildVerbosity flags) "make" [] + +cleanAction :: CleanFlags -> [String] -> IO () +cleanAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ cleanVerbosity flags) "make" ["clean"] + +sdistAction :: SDistFlags -> [String] -> IO () +sdistAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ sDistVerbosity flags) "make" ["dist"] + +registerAction :: RegisterFlags -> [String] -> IO () +registerAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ regVerbosity flags) "make" ["register"] + +unregisterAction :: RegisterFlags -> [String] -> IO () +unregisterAction flags args = do + noExtraFlags args + rawSystemExit (fromFlag $ regVerbosity flags) "make" ["unregister"] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/ModuleName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/ModuleName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/ModuleName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/ModuleName.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,164 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.ModuleName +-- Copyright : Duncan Coutts 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Data type for Haskell module names. + +module Distribution.ModuleName ( + ModuleName (..), -- TODO: move Parsec instance here, don't export constructor + fromString, + fromComponents, + components, + toFilePath, + main, + simple, + -- * Internal + validModuleComponent, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Utils.ShortText +import System.FilePath ( pathSeparator ) + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +-- | A valid Haskell module name. +-- +newtype ModuleName = ModuleName ShortTextLst + deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) + +instance Binary ModuleName + +instance NFData ModuleName where + rnf (ModuleName ms) = rnf ms + +instance Pretty ModuleName where + pretty (ModuleName ms) = + Disp.hcat (intersperse (Disp.char '.') (map Disp.text $ stlToStrings ms)) + +instance Parsec ModuleName where + parsec = fromComponents <$> P.sepBy1 component (P.char '.') + where + component = do + c <- P.satisfy isUpper + cs <- P.munch validModuleChar + return (c:cs) + +instance Text ModuleName where + parse = do + ms <- Parse.sepBy1 component (Parse.char '.') + return (ModuleName $ stlFromStrings ms) + + where + component = do + c <- Parse.satisfy isUpper + cs <- Parse.munch validModuleChar + return (c:cs) + +validModuleChar :: Char -> Bool +validModuleChar c = isAlphaNum c || c == '_' || c == '\'' + +validModuleComponent :: String -> Bool +validModuleComponent [] = False +validModuleComponent (c:cs) = isUpper c + && all validModuleChar cs + +{-# DEPRECATED simple "use ModuleName.fromString instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +simple :: String -> ModuleName +simple str = ModuleName (stlFromStrings [str]) + +-- | Construct a 'ModuleName' from a valid module name 'String'. +-- +-- This is just a convenience function intended for valid module strings. It is +-- an error if it is used with a string that is not a valid module name. If you +-- are parsing user input then use 'Distribution.Text.simpleParse' instead. +-- +instance IsString ModuleName where + fromString string = fromComponents (split string) + where + split cs = case break (=='.') cs of + (chunk,[]) -> chunk : [] + (chunk,_:rest) -> chunk : split rest + +-- | Construct a 'ModuleName' from valid module components, i.e. parts +-- separated by dots. +fromComponents :: [String] -> ModuleName +fromComponents components' + | null components' = error zeroComponents + | all validModuleComponent components' = ModuleName (stlFromStrings components') + | otherwise = error badName + where + zeroComponents = "ModuleName.fromComponents: zero components" + badName = "ModuleName.fromComponents: invalid components " ++ show components' + +-- | The module name @Main@. +-- +main :: ModuleName +main = ModuleName (stlFromStrings ["Main"]) + +-- | The individual components of a hierarchical module name. For example +-- +-- > components (fromString "A.B.C") = ["A", "B", "C"] +-- +components :: ModuleName -> [String] +components (ModuleName ms) = stlToStrings ms + +-- | Convert a module name to a file path, but without any file extension. +-- For example: +-- +-- > toFilePath (fromString "A.B.C") = "A/B/C" +-- +toFilePath :: ModuleName -> FilePath +toFilePath = intercalate [pathSeparator] . components + +---------------------------------------------------------------------------- +-- internal helper + +-- | Strict/unpacked representation of @[ShortText]@ +data ShortTextLst = STLNil + | STLCons !ShortText !ShortTextLst + deriving (Eq, Generic, Ord, Typeable, Data) + +instance NFData ShortTextLst where + rnf = flip seq () + +instance Show ShortTextLst where + showsPrec p = showsPrec p . stlToList + + +instance Read ShortTextLst where + readsPrec p = map (first stlFromList) . readsPrec p + +instance Binary ShortTextLst where + put = put . stlToList + get = stlFromList <$> get + +stlToList :: ShortTextLst -> [ShortText] +stlToList STLNil = [] +stlToList (STLCons st next) = st : stlToList next + +stlToStrings :: ShortTextLst -> [String] +stlToStrings = map fromShortText . stlToList + +stlFromList :: [ShortText] -> ShortTextLst +stlFromList [] = STLNil +stlFromList (x:xs) = STLCons x (stlFromList xs) + +stlFromStrings :: [String] -> ShortTextLst +stlFromStrings = stlFromList . map toShortText diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Check.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Check.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Check.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Check.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,2244 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.Check +-- Copyright : Lennart Kolmodin 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This has code for checking for various problems in packages. There is one +-- set of checks that just looks at a 'PackageDescription' in isolation and +-- another set of checks that also looks at files in the package. Some of the +-- checks are basic sanity checks, others are portability standards that we'd +-- like to encourage. There is a 'PackageCheck' type that distinguishes the +-- different kinds of check so we can see which ones are appropriate to report +-- in different situations. This code gets uses when configuring a package when +-- we consider only basic problems. The higher standard is uses when when +-- preparing a source tarball and by Hackage when uploading new packages. The +-- reason for this is that we want to hold packages that are expected to be +-- distributed to a higher standard than packages that are only ever expected +-- to be used on the author's own environment. + +module Distribution.PackageDescription.Check ( + -- * Package Checking + PackageCheck(..), + checkPackage, + checkConfiguredPackage, + + -- ** Checking package contents + checkPackageFiles, + checkPackageContent, + CheckPackageContentOps(..), + checkPackageFileNames, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Control.Monad (mapM) +import Data.List (group) +import Distribution.Compat.Lens +import Distribution.Compiler +import Distribution.License +import Distribution.Package +import Distribution.PackageDescription +import Distribution.PackageDescription.Configuration +import Distribution.Pretty (prettyShow) +import Distribution.Simple.BuildPaths (autogenPathsModuleName) +import Distribution.Simple.BuildToolDepends +import Distribution.Simple.CCompiler +import Distribution.Simple.Glob +import Distribution.Simple.Utils hiding (findPackageDesc, notice) +import Distribution.System +import Distribution.Text +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.CondTree +import Distribution.Types.ExeDependency +import Distribution.Types.UnqualComponentName +import Distribution.Utils.Generic (isAscii) +import Distribution.Verbosity +import Distribution.Version +import Language.Haskell.Extension +import System.FilePath + (splitDirectories, splitExtension, splitPath, takeExtension, takeFileName, (<.>), ()) + +import qualified Data.ByteString.Lazy as BS +import qualified Data.Map as Map +import qualified Distribution.Compat.DList as DList +import qualified Distribution.SPDX as SPDX +import qualified System.Directory as System + +import qualified System.Directory (getDirectoryContents) +import qualified System.FilePath.Windows as FilePath.Windows (isValid) + +import qualified Data.Set as Set + +import qualified Distribution.Types.BuildInfo.Lens as L +import qualified Distribution.Types.GenericPackageDescription.Lens as L +import qualified Distribution.Types.PackageDescription.Lens as L + +-- | Results of some kind of failed package check. +-- +-- There are a range of severities, from merely dubious to totally insane. +-- All of them come with a human readable explanation. In future we may augment +-- them with more machine readable explanations, for example to help an IDE +-- suggest automatic corrections. +-- +data PackageCheck = + + -- | This package description is no good. There's no way it's going to + -- build sensibly. This should give an error at configure time. + PackageBuildImpossible { explanation :: String } + + -- | A problem that is likely to affect building the package, or an + -- issue that we'd like every package author to be aware of, even if + -- the package is never distributed. + | PackageBuildWarning { explanation :: String } + + -- | An issue that might not be a problem for the package author but + -- might be annoying or detrimental when the package is distributed to + -- users. We should encourage distributed packages to be free from these + -- issues, but occasionally there are justifiable reasons so we cannot + -- ban them entirely. + | PackageDistSuspicious { explanation :: String } + + -- | Like PackageDistSuspicious but will only display warnings + -- rather than causing abnormal exit when you run 'cabal check'. + | PackageDistSuspiciousWarn { explanation :: String } + + -- | An issue that is OK in the author's environment but is almost + -- certain to be a portability problem for other environments. We can + -- quite legitimately refuse to publicly distribute packages with these + -- problems. + | PackageDistInexcusable { explanation :: String } + deriving (Eq, Ord) + +instance Show PackageCheck where + show notice = explanation notice + +check :: Bool -> PackageCheck -> Maybe PackageCheck +check False _ = Nothing +check True pc = Just pc + +checkSpecVersion :: PackageDescription -> [Int] -> Bool -> PackageCheck + -> Maybe PackageCheck +checkSpecVersion pkg specver cond pc + | specVersion pkg >= mkVersion specver = Nothing + | otherwise = check cond pc + +-- ------------------------------------------------------------ +-- * Standard checks +-- ------------------------------------------------------------ + +-- | Check for common mistakes and problems in package descriptions. +-- +-- This is the standard collection of checks covering all aspects except +-- for checks that require looking at files within the package. For those +-- see 'checkPackageFiles'. +-- +-- It requires the 'GenericPackageDescription' and optionally a particular +-- configuration of that package. If you pass 'Nothing' then we just check +-- a version of the generic description using 'flattenPackageDescription'. +-- +checkPackage :: GenericPackageDescription + -> Maybe PackageDescription + -> [PackageCheck] +checkPackage gpkg mpkg = + checkConfiguredPackage pkg + ++ checkConditionals gpkg + ++ checkPackageVersions gpkg + ++ checkDevelopmentOnlyFlags gpkg + ++ checkFlagNames gpkg + ++ checkUnusedFlags gpkg + ++ checkUnicodeXFields gpkg + ++ checkPathsModuleExtensions pkg + where + pkg = fromMaybe (flattenPackageDescription gpkg) mpkg + +--TODO: make this variant go away +-- we should always know the GenericPackageDescription +checkConfiguredPackage :: PackageDescription -> [PackageCheck] +checkConfiguredPackage pkg = + checkSanity pkg + ++ checkFields pkg + ++ checkLicense pkg + ++ checkSourceRepos pkg + ++ checkGhcOptions pkg + ++ checkCCOptions pkg + ++ checkCxxOptions pkg + ++ checkCPPOptions pkg + ++ checkPaths pkg + ++ checkCabalVersion pkg + + +-- ------------------------------------------------------------ +-- * Basic sanity checks +-- ------------------------------------------------------------ + +-- | Check that this package description is sane. +-- +checkSanity :: PackageDescription -> [PackageCheck] +checkSanity pkg = + catMaybes [ + + check (null . unPackageName . packageName $ pkg) $ + PackageBuildImpossible "No 'name' field." + + , check (nullVersion == packageVersion pkg) $ + PackageBuildImpossible "No 'version' field." + + , check (all ($ pkg) [ null . executables + , null . testSuites + , null . benchmarks + , null . allLibraries + , null . foreignLibs ]) $ + PackageBuildImpossible + "No executables, libraries, tests, or benchmarks found. Nothing to do." + + , check (any isNothing (map libName $ subLibraries pkg)) $ + PackageBuildImpossible $ "Found one or more unnamed internal libraries. " + ++ "Only the non-internal library can have the same name as the package." + + , check (not (null duplicateNames)) $ + PackageBuildImpossible $ "Duplicate sections: " + ++ commaSep (map unUnqualComponentName duplicateNames) + ++ ". The name of every library, executable, test suite," + ++ " and benchmark section in" + ++ " the package must be unique." + + -- NB: but it's OK for executables to have the same name! + -- TODO shouldn't need to compare on the string level + , check (any (== display (packageName pkg)) (display <$> subLibNames)) $ + PackageBuildImpossible $ "Illegal internal library name " + ++ display (packageName pkg) + ++ ". Internal libraries cannot have the same name as the package." + ++ " Maybe you wanted a non-internal library?" + ++ " If so, rewrite the section stanza" + ++ " from 'library: '" ++ display (packageName pkg) ++ "' to 'library'." + ] + --TODO: check for name clashes case insensitively: windows file systems cannot + --cope. + + ++ concatMap (checkLibrary pkg) (allLibraries pkg) + ++ concatMap (checkExecutable pkg) (executables pkg) + ++ concatMap (checkTestSuite pkg) (testSuites pkg) + ++ concatMap (checkBenchmark pkg) (benchmarks pkg) + + ++ catMaybes [ + + check (specVersion pkg > cabalVersion) $ + PackageBuildImpossible $ + "This package description follows version " + ++ display (specVersion pkg) ++ " of the Cabal specification. This " + ++ "tool only supports up to version " ++ display cabalVersion ++ "." + ] + where + -- The public 'library' gets special dispensation, because it + -- is common practice to export a library and name the executable + -- the same as the package. + subLibNames = catMaybes . map libName $ subLibraries pkg + exeNames = map exeName $ executables pkg + testNames = map testName $ testSuites pkg + bmNames = map benchmarkName $ benchmarks pkg + duplicateNames = dups $ subLibNames ++ exeNames ++ testNames ++ bmNames + +checkLibrary :: PackageDescription -> Library -> [PackageCheck] +checkLibrary pkg lib = + catMaybes [ + + check (not (null moduleDuplicates)) $ + PackageBuildImpossible $ + "Duplicate modules in library: " + ++ commaSep (map display moduleDuplicates) + + -- TODO: This check is bogus if a required-signature was passed through + , check (null (explicitLibModules lib) && null (reexportedModules lib)) $ + PackageDistSuspiciousWarn $ + "Library " ++ (case libName lib of + Nothing -> "" + Just n -> display n + ) ++ "does not expose any modules" + + -- check use of signatures sections + , checkVersion [1,25] (not (null (signatures lib))) $ + PackageDistInexcusable $ + "To use the 'signatures' field the package needs to specify " + ++ "at least 'cabal-version: 2.0'." + + -- check that all autogen-modules appear on other-modules or exposed-modules + , check + (not $ and $ map (flip elem (explicitLibModules lib)) (libModulesAutogen lib)) $ + PackageBuildImpossible $ + "An 'autogen-module' is neither on 'exposed-modules' or " + ++ "'other-modules'." + + ] + + where + checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck + checkVersion ver cond pc + | specVersion pkg >= mkVersion ver = Nothing + | otherwise = check cond pc + + -- TODO: not sure if this check is always right in Backpack + moduleDuplicates = dups (explicitLibModules lib ++ + map moduleReexportName (reexportedModules lib)) + +checkExecutable :: PackageDescription -> Executable -> [PackageCheck] +checkExecutable pkg exe = + catMaybes [ + + check (null (modulePath exe)) $ + PackageBuildImpossible $ + "No 'main-is' field found for executable " ++ display (exeName exe) + + , check (not (null (modulePath exe)) + && (not $ fileExtensionSupportedLanguage $ modulePath exe)) $ + PackageBuildImpossible $ + "The 'main-is' field must specify a '.hs' or '.lhs' file " + ++ "(even if it is generated by a preprocessor), " + ++ "or it may specify a C/C++/obj-C source file." + + , checkSpecVersion pkg [1,17] + (fileExtensionSupportedLanguage (modulePath exe) + && takeExtension (modulePath exe) `notElem` [".hs", ".lhs"]) $ + PackageDistInexcusable $ + "The package uses a C/C++/obj-C source file for the 'main-is' field. " + ++ "To use this feature you must specify 'cabal-version: >= 1.18'." + + , check (not (null moduleDuplicates)) $ + PackageBuildImpossible $ + "Duplicate modules in executable '" ++ display (exeName exe) ++ "': " + ++ commaSep (map display moduleDuplicates) + + -- check that all autogen-modules appear on other-modules + , check + (not $ and $ map (flip elem (exeModules exe)) (exeModulesAutogen exe)) $ + PackageBuildImpossible $ + "On executable '" ++ display (exeName exe) ++ "' an 'autogen-module' is not " + ++ "on 'other-modules'" + ] + where + moduleDuplicates = dups (exeModules exe) + +checkTestSuite :: PackageDescription -> TestSuite -> [PackageCheck] +checkTestSuite pkg test = + catMaybes [ + + case testInterface test of + TestSuiteUnsupported tt@(TestTypeUnknown _ _) -> Just $ + PackageBuildWarning $ + quote (display tt) ++ " is not a known type of test suite. " + ++ "The known test suite types are: " + ++ commaSep (map display knownTestTypes) + + TestSuiteUnsupported tt -> Just $ + PackageBuildWarning $ + quote (display tt) ++ " is not a supported test suite version. " + ++ "The known test suite types are: " + ++ commaSep (map display knownTestTypes) + _ -> Nothing + + , check (not $ null moduleDuplicates) $ + PackageBuildImpossible $ + "Duplicate modules in test suite '" ++ display (testName test) ++ "': " + ++ commaSep (map display moduleDuplicates) + + , check mainIsWrongExt $ + PackageBuildImpossible $ + "The 'main-is' field must specify a '.hs' or '.lhs' file " + ++ "(even if it is generated by a preprocessor), " + ++ "or it may specify a C/C++/obj-C source file." + + , checkSpecVersion pkg [1,17] (mainIsNotHsExt && not mainIsWrongExt) $ + PackageDistInexcusable $ + "The package uses a C/C++/obj-C source file for the 'main-is' field. " + ++ "To use this feature you must specify 'cabal-version: >= 1.18'." + + -- check that all autogen-modules appear on other-modules + , check + (not $ and $ map + (flip elem (testModules test)) + (testModulesAutogen test) + ) $ + PackageBuildImpossible $ + "On test suite '" ++ display (testName test) ++ "' an 'autogen-module' is not " + ++ "on 'other-modules'" + ] + where + moduleDuplicates = dups $ testModules test + + mainIsWrongExt = case testInterface test of + TestSuiteExeV10 _ f -> not $ fileExtensionSupportedLanguage f + _ -> False + + mainIsNotHsExt = case testInterface test of + TestSuiteExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"] + _ -> False + +checkBenchmark :: PackageDescription -> Benchmark -> [PackageCheck] +checkBenchmark _pkg bm = + catMaybes [ + + case benchmarkInterface bm of + BenchmarkUnsupported tt@(BenchmarkTypeUnknown _ _) -> Just $ + PackageBuildWarning $ + quote (display tt) ++ " is not a known type of benchmark. " + ++ "The known benchmark types are: " + ++ commaSep (map display knownBenchmarkTypes) + + BenchmarkUnsupported tt -> Just $ + PackageBuildWarning $ + quote (display tt) ++ " is not a supported benchmark version. " + ++ "The known benchmark types are: " + ++ commaSep (map display knownBenchmarkTypes) + _ -> Nothing + + , check (not $ null moduleDuplicates) $ + PackageBuildImpossible $ + "Duplicate modules in benchmark '" ++ display (benchmarkName bm) ++ "': " + ++ commaSep (map display moduleDuplicates) + + , check mainIsWrongExt $ + PackageBuildImpossible $ + "The 'main-is' field must specify a '.hs' or '.lhs' file " + ++ "(even if it is generated by a preprocessor)." + + -- check that all autogen-modules appear on other-modules + , check + (not $ and $ map + (flip elem (benchmarkModules bm)) + (benchmarkModulesAutogen bm) + ) $ + PackageBuildImpossible $ + "On benchmark '" ++ display (benchmarkName bm) ++ "' an 'autogen-module' is " + ++ "not on 'other-modules'" + ] + where + moduleDuplicates = dups $ benchmarkModules bm + + mainIsWrongExt = case benchmarkInterface bm of + BenchmarkExeV10 _ f -> takeExtension f `notElem` [".hs", ".lhs"] + _ -> False + +-- ------------------------------------------------------------ +-- * Additional pure checks +-- ------------------------------------------------------------ + +checkFields :: PackageDescription -> [PackageCheck] +checkFields pkg = + catMaybes [ + + check (not . FilePath.Windows.isValid . display . packageName $ pkg) $ + PackageDistInexcusable $ + "Unfortunately, the package name '" ++ display (packageName pkg) + ++ "' is one of the reserved system file names on Windows. Many tools " + ++ "need to convert package names to file names so using this name " + ++ "would cause problems." + + , check ((isPrefixOf "z-") . display . packageName $ pkg) $ + PackageDistInexcusable $ + "Package names with the prefix 'z-' are reserved by Cabal and " + ++ "cannot be used." + + , check (isNothing (buildTypeRaw pkg) && specVersion pkg < mkVersion [2,1]) $ + PackageBuildWarning $ + "No 'build-type' specified. If you do not need a custom Setup.hs or " + ++ "./configure script then use 'build-type: Simple'." + + , check (isJust (setupBuildInfo pkg) && buildType pkg /= Custom) $ + PackageBuildWarning $ + "Ignoring the 'custom-setup' section because the 'build-type' is " + ++ "not 'Custom'. Use 'build-type: Custom' if you need to use a " + ++ "custom Setup.hs script." + + , check (not (null unknownCompilers)) $ + PackageBuildWarning $ + "Unknown compiler " ++ commaSep (map quote unknownCompilers) + ++ " in 'tested-with' field." + + , check (not (null unknownLanguages)) $ + PackageBuildWarning $ + "Unknown languages: " ++ commaSep unknownLanguages + + , check (not (null unknownExtensions)) $ + PackageBuildWarning $ + "Unknown extensions: " ++ commaSep unknownExtensions + + , check (not (null languagesUsedAsExtensions)) $ + PackageBuildWarning $ + "Languages listed as extensions: " + ++ commaSep languagesUsedAsExtensions + ++ ". Languages must be specified in either the 'default-language' " + ++ " or the 'other-languages' field." + + , check (not (null ourDeprecatedExtensions)) $ + PackageDistSuspicious $ + "Deprecated extensions: " + ++ commaSep (map (quote . display . fst) ourDeprecatedExtensions) + ++ ". " ++ unwords + [ "Instead of '" ++ display ext + ++ "' use '" ++ display replacement ++ "'." + | (ext, Just replacement) <- ourDeprecatedExtensions ] + + , check (null (category pkg)) $ + PackageDistSuspicious "No 'category' field." + + , check (null (maintainer pkg)) $ + PackageDistSuspicious "No 'maintainer' field." + + , check (null (synopsis pkg) && null (description pkg)) $ + PackageDistInexcusable "No 'synopsis' or 'description' field." + + , check (null (description pkg) && not (null (synopsis pkg))) $ + PackageDistSuspicious "No 'description' field." + + , check (null (synopsis pkg) && not (null (description pkg))) $ + PackageDistSuspicious "No 'synopsis' field." + + --TODO: recommend the bug reports URL, author and homepage fields + --TODO: recommend not using the stability field + --TODO: recommend specifying a source repo + + , check (length (synopsis pkg) >= 80) $ + PackageDistSuspicious + "The 'synopsis' field is rather long (max 80 chars is recommended)." + + -- See also https://github.com/haskell/cabal/pull/3479 + , check (not (null (description pkg)) + && length (description pkg) <= length (synopsis pkg)) $ + PackageDistSuspicious $ + "The 'description' field should be longer than the 'synopsis' " + ++ "field. " + ++ "It's useful to provide an informative 'description' to allow " + ++ "Haskell programmers who have never heard about your package to " + ++ "understand the purpose of your package. " + ++ "The 'description' field content is typically shown by tooling " + ++ "(e.g. 'cabal info', Haddock, Hackage) below the 'synopsis' which " + ++ "serves as a headline. " + ++ "Please refer to " + ++ " for more details." + + -- check use of impossible constraints "tested-with: GHC== 6.10 && ==6.12" + , check (not (null testedWithImpossibleRanges)) $ + PackageDistInexcusable $ + "Invalid 'tested-with' version range: " + ++ commaSep (map display testedWithImpossibleRanges) + ++ ". To indicate that you have tested a package with multiple " + ++ "different versions of the same compiler use multiple entries, " + ++ "for example 'tested-with: GHC==6.10.4, GHC==6.12.3' and not " + ++ "'tested-with: GHC==6.10.4 && ==6.12.3'." + + -- Disabled due to #5119: we generate loads of spurious instances of + -- this warning. Re-enabling this check should be part of the fix to + -- #5119. + , check (False && not (null depInternalLibraryWithExtraVersion)) $ + PackageBuildWarning $ + "The package has an extraneous version range for a dependency on an " + ++ "internal library: " + ++ commaSep (map display depInternalLibraryWithExtraVersion) + ++ ". This version range includes the current package but isn't needed " + ++ "as the current package's library will always be used." + + , check (not (null depInternalLibraryWithImpossibleVersion)) $ + PackageBuildImpossible $ + "The package has an impossible version range for a dependency on an " + ++ "internal library: " + ++ commaSep (map display depInternalLibraryWithImpossibleVersion) + ++ ". This version range does not include the current package, and must " + ++ "be removed as the current package's library will always be used." + + , check (not (null depInternalExecutableWithExtraVersion)) $ + PackageBuildWarning $ + "The package has an extraneous version range for a dependency on an " + ++ "internal executable: " + ++ commaSep (map display depInternalExecutableWithExtraVersion) + ++ ". This version range includes the current package but isn't needed " + ++ "as the current package's executable will always be used." + + , check (not (null depInternalExecutableWithImpossibleVersion)) $ + PackageBuildImpossible $ + "The package has an impossible version range for a dependency on an " + ++ "internal executable: " + ++ commaSep (map display depInternalExecutableWithImpossibleVersion) + ++ ". This version range does not include the current package, and must " + ++ "be removed as the current package's executable will always be used." + + , check (not (null depMissingInternalExecutable)) $ + PackageBuildImpossible $ + "The package depends on a missing internal executable: " + ++ commaSep (map display depInternalExecutableWithImpossibleVersion) + ] + where + unknownCompilers = [ name | (OtherCompiler name, _) <- testedWith pkg ] + unknownLanguages = [ name | bi <- allBuildInfo pkg + , UnknownLanguage name <- allLanguages bi ] + unknownExtensions = [ name | bi <- allBuildInfo pkg + , UnknownExtension name <- allExtensions bi + , name `notElem` map display knownLanguages ] + ourDeprecatedExtensions = nub $ catMaybes + [ find ((==ext) . fst) deprecatedExtensions + | bi <- allBuildInfo pkg + , ext <- allExtensions bi ] + languagesUsedAsExtensions = + [ name | bi <- allBuildInfo pkg + , UnknownExtension name <- allExtensions bi + , name `elem` map display knownLanguages ] + + testedWithImpossibleRanges = + [ Dependency (mkPackageName (display compiler)) vr + | (compiler, vr) <- testedWith pkg + , isNoVersion vr ] + + internalLibraries = + map (maybe (packageName pkg) (unqualComponentNameToPackageName) . libName) + (allLibraries pkg) + + internalExecutables = map exeName $ executables pkg + + internalLibDeps = + [ dep + | bi <- allBuildInfo pkg + , dep@(Dependency name _) <- targetBuildDepends bi + , name `elem` internalLibraries + ] + + internalExeDeps = + [ dep + | bi <- allBuildInfo pkg + , dep <- getAllToolDependencies pkg bi + , isInternal pkg dep + ] + + depInternalLibraryWithExtraVersion = + [ dep + | dep@(Dependency _ versionRange) <- internalLibDeps + , not $ isAnyVersion versionRange + , packageVersion pkg `withinRange` versionRange + ] + + depInternalLibraryWithImpossibleVersion = + [ dep + | dep@(Dependency _ versionRange) <- internalLibDeps + , not $ packageVersion pkg `withinRange` versionRange + ] + + depInternalExecutableWithExtraVersion = + [ dep + | dep@(ExeDependency _ _ versionRange) <- internalExeDeps + , not $ isAnyVersion versionRange + , packageVersion pkg `withinRange` versionRange + ] + + depInternalExecutableWithImpossibleVersion = + [ dep + | dep@(ExeDependency _ _ versionRange) <- internalExeDeps + , not $ packageVersion pkg `withinRange` versionRange + ] + + depMissingInternalExecutable = + [ dep + | dep@(ExeDependency _ eName _) <- internalExeDeps + , not $ eName `elem` internalExecutables + ] + + +checkLicense :: PackageDescription -> [PackageCheck] +checkLicense pkg = case licenseRaw pkg of + Right l -> checkOldLicense pkg l + Left l -> checkNewLicense pkg l + +checkNewLicense :: PackageDescription -> SPDX.License -> [PackageCheck] +checkNewLicense _pkg lic = catMaybes + [ check (lic == SPDX.NONE) $ + PackageDistInexcusable + "The 'license' field is missing or is NONE." + ] + +checkOldLicense :: PackageDescription -> License -> [PackageCheck] +checkOldLicense pkg lic = catMaybes + [ check (lic == UnspecifiedLicense) $ + PackageDistInexcusable + "The 'license' field is missing." + + , check (lic == AllRightsReserved) $ + PackageDistSuspicious + "The 'license' is AllRightsReserved. Is that really what you want?" + + , checkVersion [1,4] (lic `notElem` compatLicenses) $ + PackageDistInexcusable $ + "Unfortunately the license " ++ quote (prettyShow (license pkg)) + ++ " messes up the parser in earlier Cabal versions so you need to " + ++ "specify 'cabal-version: >= 1.4'. Alternatively if you require " + ++ "compatibility with earlier Cabal versions then use 'OtherLicense'." + + , case lic of + UnknownLicense l -> Just $ + PackageBuildWarning $ + quote ("license: " ++ l) ++ " is not a recognised license. The " + ++ "known licenses are: " + ++ commaSep (map display knownLicenses) + _ -> Nothing + + , check (lic == BSD4) $ + PackageDistSuspicious $ + "Using 'license: BSD4' is almost always a misunderstanding. 'BSD4' " + ++ "refers to the old 4-clause BSD license with the advertising " + ++ "clause. 'BSD3' refers the new 3-clause BSD license." + + , case unknownLicenseVersion (lic) of + Just knownVersions -> Just $ + PackageDistSuspicious $ + "'license: " ++ display (lic) ++ "' is not a known " + ++ "version of that license. The known versions are " + ++ commaSep (map display knownVersions) + ++ ". If this is not a mistake and you think it should be a known " + ++ "version then please file a ticket." + _ -> Nothing + + , check (lic `notElem` [ AllRightsReserved + , UnspecifiedLicense, PublicDomain] + -- AllRightsReserved and PublicDomain are not strictly + -- licenses so don't need license files. + && null (licenseFiles pkg)) $ + PackageDistSuspicious "A 'license-file' is not specified." + ] + where + unknownLicenseVersion (GPL (Just v)) + | v `notElem` knownVersions = Just knownVersions + where knownVersions = [ v' | GPL (Just v') <- knownLicenses ] + unknownLicenseVersion (LGPL (Just v)) + | v `notElem` knownVersions = Just knownVersions + where knownVersions = [ v' | LGPL (Just v') <- knownLicenses ] + unknownLicenseVersion (AGPL (Just v)) + | v `notElem` knownVersions = Just knownVersions + where knownVersions = [ v' | AGPL (Just v') <- knownLicenses ] + unknownLicenseVersion (Apache (Just v)) + | v `notElem` knownVersions = Just knownVersions + where knownVersions = [ v' | Apache (Just v') <- knownLicenses ] + unknownLicenseVersion _ = Nothing + + checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck + checkVersion ver cond pc + | specVersion pkg >= mkVersion ver = Nothing + | otherwise = check cond pc + + compatLicenses = [ GPL Nothing, LGPL Nothing, AGPL Nothing, BSD3, BSD4 + , PublicDomain, AllRightsReserved + , UnspecifiedLicense, OtherLicense ] + +checkSourceRepos :: PackageDescription -> [PackageCheck] +checkSourceRepos pkg = + catMaybes $ concat [[ + + case repoKind repo of + RepoKindUnknown kind -> Just $ PackageDistInexcusable $ + quote kind ++ " is not a recognised kind of source-repository. " + ++ "The repo kind is usually 'head' or 'this'" + _ -> Nothing + + , check (isNothing (repoType repo)) $ + PackageDistInexcusable + "The source-repository 'type' is a required field." + + , check (isNothing (repoLocation repo)) $ + PackageDistInexcusable + "The source-repository 'location' is a required field." + + , check (repoType repo == Just CVS && isNothing (repoModule repo)) $ + PackageDistInexcusable + "For a CVS source-repository, the 'module' is a required field." + + , check (repoKind repo == RepoThis && isNothing (repoTag repo)) $ + PackageDistInexcusable $ + "For the 'this' kind of source-repository, the 'tag' is a required " + ++ "field. It should specify the tag corresponding to this version " + ++ "or release of the package." + + , check (maybe False isAbsoluteOnAnyPlatform (repoSubdir repo)) $ + PackageDistInexcusable + "The 'subdir' field of a source-repository must be a relative path." + ] + | repo <- sourceRepos pkg ] + +--TODO: check location looks like a URL for some repo types. + +checkGhcOptions :: PackageDescription -> [PackageCheck] +checkGhcOptions pkg = + catMaybes [ + + checkFlags ["-fasm"] $ + PackageDistInexcusable $ + "'ghc-options: -fasm' is unnecessary and will not work on CPU " + ++ "architectures other than x86, x86-64, ppc or sparc." + + , checkFlags ["-fvia-C"] $ + PackageDistSuspicious $ + "'ghc-options: -fvia-C' is usually unnecessary. If your package " + ++ "needs -via-C for correctness rather than performance then it " + ++ "is using the FFI incorrectly and will probably not work with GHC " + ++ "6.10 or later." + + , checkFlags ["-fhpc"] $ + PackageDistInexcusable $ + "'ghc-options: -fhpc' is not not necessary. Use the configure flag " + ++ " --enable-coverage instead." + + , checkFlags ["-prof"] $ + PackageBuildWarning $ + "'ghc-options: -prof' is not necessary and will lead to problems " + ++ "when used on a library. Use the configure flag " + ++ "--enable-library-profiling and/or --enable-profiling." + + , checkFlags ["-o"] $ + PackageBuildWarning $ + "'ghc-options: -o' is not needed. " + ++ "The output files are named automatically." + + , checkFlags ["-hide-package"] $ + PackageBuildWarning $ + "'ghc-options: -hide-package' is never needed. " + ++ "Cabal hides all packages." + + , checkFlags ["--make"] $ + PackageBuildWarning $ + "'ghc-options: --make' is never needed. Cabal uses this automatically." + + , checkFlags ["-main-is"] $ + PackageDistSuspicious $ + "'ghc-options: -main-is' is not portable." + + , checkNonTestAndBenchmarkFlags ["-O0", "-Onot"] $ + PackageDistSuspicious $ + "'ghc-options: -O0' is not needed. " + ++ "Use the --disable-optimization configure flag." + + , checkTestAndBenchmarkFlags ["-O0", "-Onot"] $ + PackageDistSuspiciousWarn $ + "'ghc-options: -O0' is not needed. " + ++ "Use the --disable-optimization configure flag." + + , checkFlags [ "-O", "-O1"] $ + PackageDistInexcusable $ + "'ghc-options: -O' is not needed. " + ++ "Cabal automatically adds the '-O' flag. " + ++ "Setting it yourself interferes with the --disable-optimization flag." + + , checkFlags ["-O2"] $ + PackageDistSuspiciousWarn $ + "'ghc-options: -O2' is rarely needed. " + ++ "Check that it is giving a real benefit " + ++ "and not just imposing longer compile times on your users." + + , checkFlags ["-split-sections"] $ + PackageBuildWarning $ + "'ghc-options: -split-sections' is not needed. " + ++ "Use the --enable-split-sections configure flag." + + , checkFlags ["-split-objs"] $ + PackageBuildWarning $ + "'ghc-options: -split-objs' is not needed. " + ++ "Use the --enable-split-objs configure flag." + + , checkFlags ["-optl-Wl,-s", "-optl-s"] $ + PackageDistInexcusable $ + "'ghc-options: -optl-Wl,-s' is not needed and is not portable to all" + ++ " operating systems. Cabal 1.4 and later automatically strip" + ++ " executables. Cabal also has a flag --disable-executable-stripping" + ++ " which is necessary when building packages for some Linux" + ++ " distributions and using '-optl-Wl,-s' prevents that from working." + + , checkFlags ["-fglasgow-exts"] $ + PackageDistSuspicious $ + "Instead of 'ghc-options: -fglasgow-exts' it is preferable to use " + ++ "the 'extensions' field." + + , check ("-threaded" `elem` lib_ghc_options) $ + PackageBuildWarning $ + "'ghc-options: -threaded' has no effect for libraries. It should " + ++ "only be used for executables." + + , check ("-rtsopts" `elem` lib_ghc_options) $ + PackageBuildWarning $ + "'ghc-options: -rtsopts' has no effect for libraries. It should " + ++ "only be used for executables." + + , check (any (\opt -> "-with-rtsopts" `isPrefixOf` opt) lib_ghc_options) $ + PackageBuildWarning $ + "'ghc-options: -with-rtsopts' has no effect for libraries. It " + ++ "should only be used for executables." + + , checkAlternatives "ghc-options" "extensions" + [ (flag, display extension) | flag <- all_ghc_options + , Just extension <- [ghcExtension flag] ] + + , checkAlternatives "ghc-options" "extensions" + [ (flag, extension) | flag@('-':'X':extension) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "cpp-options" $ + [ (flag, flag) | flag@('-':'D':_) <- all_ghc_options ] + ++ [ (flag, flag) | flag@('-':'U':_) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "include-dirs" + [ (flag, dir) | flag@('-':'I':dir) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "extra-libraries" + [ (flag, lib) | flag@('-':'l':lib) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "extra-lib-dirs" + [ (flag, dir) | flag@('-':'L':dir) <- all_ghc_options ] + + , checkAlternatives "ghc-options" "frameworks" + [ (flag, fmwk) | (flag@"-framework", fmwk) <- + zip all_ghc_options (safeTail all_ghc_options) ] + + , checkAlternatives "ghc-options" "extra-framework-dirs" + [ (flag, dir) | (flag@"-framework-path", dir) <- + zip all_ghc_options (safeTail all_ghc_options) ] + ] + + where + all_ghc_options = concatMap get_ghc_options (allBuildInfo pkg) + lib_ghc_options = concatMap (get_ghc_options . libBuildInfo) + (allLibraries pkg) + get_ghc_options bi = hcOptions GHC bi ++ hcProfOptions GHC bi + ++ hcSharedOptions GHC bi + + test_ghc_options = concatMap (get_ghc_options . testBuildInfo) + (testSuites pkg) + benchmark_ghc_options = concatMap (get_ghc_options . benchmarkBuildInfo) + (benchmarks pkg) + test_and_benchmark_ghc_options = test_ghc_options ++ + benchmark_ghc_options + non_test_and_benchmark_ghc_options = concatMap get_ghc_options + (allBuildInfo (pkg { testSuites = [] + , benchmarks = [] + })) + + checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck + checkFlags flags = check (any (`elem` flags) all_ghc_options) + + checkTestAndBenchmarkFlags :: [String] -> PackageCheck -> Maybe PackageCheck + checkTestAndBenchmarkFlags flags = check (any (`elem` flags) test_and_benchmark_ghc_options) + + checkNonTestAndBenchmarkFlags :: [String] -> PackageCheck -> Maybe PackageCheck + checkNonTestAndBenchmarkFlags flags = check (any (`elem` flags) non_test_and_benchmark_ghc_options) + + ghcExtension ('-':'f':name) = case name of + "allow-overlapping-instances" -> enable OverlappingInstances + "no-allow-overlapping-instances" -> disable OverlappingInstances + "th" -> enable TemplateHaskell + "no-th" -> disable TemplateHaskell + "ffi" -> enable ForeignFunctionInterface + "no-ffi" -> disable ForeignFunctionInterface + "fi" -> enable ForeignFunctionInterface + "no-fi" -> disable ForeignFunctionInterface + "monomorphism-restriction" -> enable MonomorphismRestriction + "no-monomorphism-restriction" -> disable MonomorphismRestriction + "mono-pat-binds" -> enable MonoPatBinds + "no-mono-pat-binds" -> disable MonoPatBinds + "allow-undecidable-instances" -> enable UndecidableInstances + "no-allow-undecidable-instances" -> disable UndecidableInstances + "allow-incoherent-instances" -> enable IncoherentInstances + "no-allow-incoherent-instances" -> disable IncoherentInstances + "arrows" -> enable Arrows + "no-arrows" -> disable Arrows + "generics" -> enable Generics + "no-generics" -> disable Generics + "implicit-prelude" -> enable ImplicitPrelude + "no-implicit-prelude" -> disable ImplicitPrelude + "implicit-params" -> enable ImplicitParams + "no-implicit-params" -> disable ImplicitParams + "bang-patterns" -> enable BangPatterns + "no-bang-patterns" -> disable BangPatterns + "scoped-type-variables" -> enable ScopedTypeVariables + "no-scoped-type-variables" -> disable ScopedTypeVariables + "extended-default-rules" -> enable ExtendedDefaultRules + "no-extended-default-rules" -> disable ExtendedDefaultRules + _ -> Nothing + ghcExtension "-cpp" = enable CPP + ghcExtension _ = Nothing + + enable e = Just (EnableExtension e) + disable e = Just (DisableExtension e) + +checkCCOptions :: PackageDescription -> [PackageCheck] +checkCCOptions = checkCLikeOptions "C" "cc-options" ccOptions + +checkCxxOptions :: PackageDescription -> [PackageCheck] +checkCxxOptions = checkCLikeOptions "C++" "cxx-options" cxxOptions + +checkCLikeOptions :: String -> String -> (BuildInfo -> [String]) -> PackageDescription -> [PackageCheck] +checkCLikeOptions label prefix accessor pkg = + catMaybes [ + + checkAlternatives prefix "include-dirs" + [ (flag, dir) | flag@('-':'I':dir) <- all_cLikeOptions ] + + , checkAlternatives prefix "extra-libraries" + [ (flag, lib) | flag@('-':'l':lib) <- all_cLikeOptions ] + + , checkAlternatives prefix "extra-lib-dirs" + [ (flag, dir) | flag@('-':'L':dir) <- all_cLikeOptions ] + + , checkAlternatives "ld-options" "extra-libraries" + [ (flag, lib) | flag@('-':'l':lib) <- all_ldOptions ] + + , checkAlternatives "ld-options" "extra-lib-dirs" + [ (flag, dir) | flag@('-':'L':dir) <- all_ldOptions ] + + , checkCCFlags [ "-O", "-Os", "-O0", "-O1", "-O2", "-O3" ] $ + PackageDistSuspicious $ + "'"++prefix++": -O[n]' is generally not needed. When building with " + ++ " optimisations Cabal automatically adds '-O2' for "++label++" code. " + ++ "Setting it yourself interferes with the --disable-optimization flag." + ] + + where all_cLikeOptions = [ opts | bi <- allBuildInfo pkg + , opts <- accessor bi ] + all_ldOptions = [ opts | bi <- allBuildInfo pkg + , opts <- ldOptions bi ] + + checkCCFlags :: [String] -> PackageCheck -> Maybe PackageCheck + checkCCFlags flags = check (any (`elem` flags) all_cLikeOptions) + +checkCPPOptions :: PackageDescription -> [PackageCheck] +checkCPPOptions pkg = + catMaybes [ + checkAlternatives "cpp-options" "include-dirs" + [ (flag, dir) | flag@('-':'I':dir) <- all_cppOptions] + ] + where all_cppOptions = [ opts | bi <- allBuildInfo pkg + , opts <- cppOptions bi ] + +checkAlternatives :: String -> String -> [(String, String)] + -> Maybe PackageCheck +checkAlternatives badField goodField flags = + check (not (null badFlags)) $ + PackageBuildWarning $ + "Instead of " ++ quote (badField ++ ": " ++ unwords badFlags) + ++ " use " ++ quote (goodField ++ ": " ++ unwords goodFlags) + + where (badFlags, goodFlags) = unzip flags + +checkPaths :: PackageDescription -> [PackageCheck] +checkPaths pkg = + [ PackageBuildWarning $ + quote (kind ++ ": " ++ path) + ++ " is a relative path outside of the source tree. " + ++ "This will not work when generating a tarball with 'sdist'." + | (path, kind) <- relPaths ++ absPaths + , isOutsideTree path ] + ++ + [ PackageDistInexcusable $ + quote (kind ++ ": " ++ path) ++ " is an absolute path." + | (path, kind) <- relPaths + , isAbsoluteOnAnyPlatform path ] + ++ + [ PackageDistInexcusable $ + quote (kind ++ ": " ++ path) ++ " points inside the 'dist' " + ++ "directory. This is not reliable because the location of this " + ++ "directory is configurable by the user (or package manager). In " + ++ "addition the layout of the 'dist' directory is subject to change " + ++ "in future versions of Cabal." + | (path, kind) <- relPaths ++ absPaths + , isInsideDist path ] + ++ + [ PackageDistInexcusable $ + "The 'ghc-options' contains the path '" ++ path ++ "' which points " + ++ "inside the 'dist' directory. This is not reliable because the " + ++ "location of this directory is configurable by the user (or package " + ++ "manager). In addition the layout of the 'dist' directory is subject " + ++ "to change in future versions of Cabal." + | bi <- allBuildInfo pkg + , (GHC, flags) <- options bi + , path <- flags + , isInsideDist path ] + ++ + [ PackageDistInexcusable $ + "In the 'data-files' field: " ++ explainGlobSyntaxError pat err + | pat <- dataFiles pkg + , Left err <- [parseFileGlob (specVersion pkg) pat] + ] + ++ + [ PackageDistInexcusable $ + "In the 'extra-source-files' field: " ++ explainGlobSyntaxError pat err + | pat <- extraSrcFiles pkg + , Left err <- [parseFileGlob (specVersion pkg) pat] + ] + ++ + [ PackageDistInexcusable $ + "In the 'extra-doc-files' field: " ++ explainGlobSyntaxError pat err + | pat <- extraDocFiles pkg + , Left err <- [parseFileGlob (specVersion pkg) pat] + ] + where + isOutsideTree path = case splitDirectories path of + "..":_ -> True + ".":"..":_ -> True + _ -> False + isInsideDist path = case map lowercase (splitDirectories path) of + "dist" :_ -> True + ".":"dist":_ -> True + _ -> False + -- paths that must be relative + relPaths = + [ (path, "extra-source-files") | path <- extraSrcFiles pkg ] + ++ [ (path, "extra-tmp-files") | path <- extraTmpFiles pkg ] + ++ [ (path, "extra-doc-files") | path <- extraDocFiles pkg ] + ++ [ (path, "data-files") | path <- dataFiles pkg ] + ++ [ (path, "data-dir") | path <- [dataDir pkg]] + ++ [ (path, "license-file") | path <- licenseFiles pkg ] + ++ concat + [ [ (path, "asm-sources") | path <- asmSources bi ] + ++ [ (path, "cmm-sources") | path <- cmmSources bi ] + ++ [ (path, "c-sources") | path <- cSources bi ] + ++ [ (path, "cxx-sources") | path <- cxxSources bi ] + ++ [ (path, "js-sources") | path <- jsSources bi ] + ++ [ (path, "install-includes") | path <- installIncludes bi ] + ++ [ (path, "hs-source-dirs") | path <- hsSourceDirs bi ] + | bi <- allBuildInfo pkg ] + -- paths that are allowed to be absolute + absPaths = concat + [ [ (path, "includes") | path <- includes bi ] + ++ [ (path, "include-dirs") | path <- includeDirs bi ] + ++ [ (path, "extra-lib-dirs") | path <- extraLibDirs bi ] + | bi <- allBuildInfo pkg ] + +--TODO: check sets of paths that would be interpreted differently between Unix +-- and windows, ie case-sensitive or insensitive. Things that might clash, or +-- conversely be distinguished. + +--TODO: use the tar path checks on all the above paths + +-- | Check that the package declares the version in the @\"cabal-version\"@ +-- field correctly. +-- +checkCabalVersion :: PackageDescription -> [PackageCheck] +checkCabalVersion pkg = + catMaybes [ + + -- check syntax of cabal-version field + check (specVersion pkg >= mkVersion [1,10] + && not simpleSpecVersionRangeSyntax) $ + PackageBuildWarning $ + "Packages relying on Cabal 1.10 or later must only specify a " + ++ "version range of the form 'cabal-version: >= x.y'. Use " + ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'." + + -- check syntax of cabal-version field + , check (specVersion pkg < mkVersion [1,9] + && not simpleSpecVersionRangeSyntax) $ + PackageDistSuspicious $ + "It is recommended that the 'cabal-version' field only specify a " + ++ "version range of the form '>= x.y'. Use " + ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'. " + ++ "Tools based on Cabal 1.10 and later will ignore upper bounds." + + -- check syntax of cabal-version field + , checkVersion [1,12] simpleSpecVersionSyntax $ + PackageBuildWarning $ + "With Cabal 1.10 or earlier, the 'cabal-version' field must use " + ++ "range syntax rather than a simple version number. Use " + ++ "'cabal-version: >= " ++ display (specVersion pkg) ++ "'." + + -- check syntax of cabal-version field + , check (specVersion pkg >= mkVersion [1,12] + && not simpleSpecVersionSyntax) $ + (if specVersion pkg >= mkVersion [2,0] then PackageDistSuspicious else PackageDistSuspiciousWarn) $ + "Packages relying on Cabal 1.12 or later should specify a " + ++ "specific version of the Cabal spec of the form " + ++ "'cabal-version: x.y'. " + ++ "Use 'cabal-version: " ++ display (specVersion pkg) ++ "'." + + -- check use of test suite sections + , checkVersion [1,8] (not (null $ testSuites pkg)) $ + PackageDistInexcusable $ + "The 'test-suite' section is new in Cabal 1.10. " + ++ "Unfortunately it messes up the parser in older Cabal versions " + ++ "so you must specify at least 'cabal-version: >= 1.8', but note " + ++ "that only Cabal 1.10 and later can actually run such test suites." + + -- check use of default-language field + -- note that we do not need to do an equivalent check for the + -- other-language field since that one does not change behaviour + , checkVersion [1,10] (any isJust (buildInfoField defaultLanguage)) $ + PackageBuildWarning $ + "To use the 'default-language' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.10'." + + , check (specVersion pkg >= mkVersion [1,10] + && (any isNothing (buildInfoField defaultLanguage))) $ + PackageBuildWarning $ + "Packages using 'cabal-version: >= 1.10' must specify the " + ++ "'default-language' field for each component (e.g. Haskell98 or " + ++ "Haskell2010). If a component uses different languages in " + ++ "different modules then list the other ones in the " + ++ "'other-languages' field." + + , checkVersion [1,18] + (not . null $ extraDocFiles pkg) $ + PackageDistInexcusable $ + "To use the 'extra-doc-files' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.18'." + + , checkVersion [2,0] + (not (null (subLibraries pkg))) $ + PackageDistInexcusable $ + "To use multiple 'library' sections or a named library section " + ++ "the package needs to specify at least 'cabal-version: 2.0'." + + -- check use of reexported-modules sections + , checkVersion [1,21] + (any (not.null.reexportedModules) (allLibraries pkg)) $ + PackageDistInexcusable $ + "To use the 'reexported-module' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.22'." + + -- check use of thinning and renaming + , checkVersion [1,25] usesBackpackIncludes $ + PackageDistInexcusable $ + "To use the 'mixins' field the package needs to specify " + ++ "at least 'cabal-version: 2.0'." + + -- check use of 'extra-framework-dirs' field + , checkVersion [1,23] (any (not . null) (buildInfoField extraFrameworkDirs)) $ + -- Just a warning, because this won't break on old Cabal versions. + PackageDistSuspiciousWarn $ + "To use the 'extra-framework-dirs' field the package needs to specify" + ++ " at least 'cabal-version: >= 1.24'." + + -- check use of default-extensions field + -- don't need to do the equivalent check for other-extensions + , checkVersion [1,10] (any (not . null) (buildInfoField defaultExtensions)) $ + PackageBuildWarning $ + "To use the 'default-extensions' field the package needs to specify " + ++ "at least 'cabal-version: >= 1.10'." + + -- check use of extensions field + , check (specVersion pkg >= mkVersion [1,10] + && (any (not . null) (buildInfoField oldExtensions))) $ + PackageBuildWarning $ + "For packages using 'cabal-version: >= 1.10' the 'extensions' " + ++ "field is deprecated. The new 'default-extensions' field lists " + ++ "extensions that are used in all modules in the component, while " + ++ "the 'other-extensions' field lists extensions that are used in " + ++ "some modules, e.g. via the {-# LANGUAGE #-} pragma." + + -- check use of "foo (>= 1.0 && < 1.4) || >=1.8 " version-range syntax + , checkVersion [1,8] (not (null versionRangeExpressions)) $ + PackageDistInexcusable $ + "The package uses full version-range expressions " + ++ "in a 'build-depends' field: " + ++ commaSep (map displayRawDependency versionRangeExpressions) + ++ ". To use this new syntax the package needs to specify at least " + ++ "'cabal-version: >= 1.8'. Alternatively, if broader compatibility " + ++ "is important, then convert to conjunctive normal form, and use " + ++ "multiple 'build-depends:' lines, one conjunct per line." + + -- check use of "build-depends: foo == 1.*" syntax + , checkVersion [1,6] (not (null depsUsingWildcardSyntax)) $ + PackageDistInexcusable $ + "The package uses wildcard syntax in the 'build-depends' field: " + ++ commaSep (map display depsUsingWildcardSyntax) + ++ ". To use this new syntax the package need to specify at least " + ++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility " + ++ "is important then use: " ++ commaSep + [ display (Dependency name (eliminateWildcardSyntax versionRange)) + | Dependency name versionRange <- depsUsingWildcardSyntax ] + + -- check use of "build-depends: foo ^>= 1.2.3" syntax + , checkVersion [2,0] (not (null depsUsingMajorBoundSyntax)) $ + PackageDistInexcusable $ + "The package uses major bounded version syntax in the " + ++ "'build-depends' field: " + ++ commaSep (map display depsUsingMajorBoundSyntax) + ++ ". To use this new syntax the package need to specify at least " + ++ "'cabal-version: 2.0'. Alternatively, if broader compatibility " + ++ "is important then use: " ++ commaSep + [ display (Dependency name (eliminateMajorBoundSyntax versionRange)) + | Dependency name versionRange <- depsUsingMajorBoundSyntax ] + + , checkVersion [2,1] (any (not . null) + (concatMap buildInfoField + [ asmSources + , cmmSources + , extraBundledLibs + , extraLibFlavours ])) $ + PackageDistInexcusable $ + "The use of 'asm-sources', 'cmm-sources', 'extra-bundled-libraries' " + ++ " and 'extra-library-flavours' requires the package " + ++ " to specify at least 'cabal-version: >= 2.1'." + + , checkVersion [2,1] (any (not . null) + (buildInfoField virtualModules)) $ + PackageDistInexcusable $ + "The use of 'virtual-modules' requires the package " + ++ " to specify at least 'cabal-version: >= 2.1'." + + -- check use of "tested-with: GHC (>= 1.0 && < 1.4) || >=1.8 " syntax + , checkVersion [1,8] (not (null testedWithVersionRangeExpressions)) $ + PackageDistInexcusable $ + "The package uses full version-range expressions " + ++ "in a 'tested-with' field: " + ++ commaSep (map displayRawDependency testedWithVersionRangeExpressions) + ++ ". To use this new syntax the package needs to specify at least " + ++ "'cabal-version: >= 1.8'." + + -- check use of "tested-with: GHC == 6.12.*" syntax + , checkVersion [1,6] (not (null testedWithUsingWildcardSyntax)) $ + PackageDistInexcusable $ + "The package uses wildcard syntax in the 'tested-with' field: " + ++ commaSep (map display testedWithUsingWildcardSyntax) + ++ ". To use this new syntax the package need to specify at least " + ++ "'cabal-version: >= 1.6'. Alternatively, if broader compatibility " + ++ "is important then use: " ++ commaSep + [ display (Dependency name (eliminateWildcardSyntax versionRange)) + | Dependency name versionRange <- testedWithUsingWildcardSyntax ] + + -- check use of "source-repository" section + , checkVersion [1,6] (not (null (sourceRepos pkg))) $ + PackageDistInexcusable $ + "The 'source-repository' section is new in Cabal 1.6. " + ++ "Unfortunately it messes up the parser in earlier Cabal versions " + ++ "so you need to specify 'cabal-version: >= 1.6'." + + -- check for new language extensions + , checkVersion [1,2,3] (not (null mentionedExtensionsThatNeedCabal12)) $ + PackageDistInexcusable $ + "Unfortunately the language extensions " + ++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal12) + ++ " break the parser in earlier Cabal versions so you need to " + ++ "specify 'cabal-version: >= 1.2.3'. Alternatively if you require " + ++ "compatibility with earlier Cabal versions then you may be able to " + ++ "use an equivalent compiler-specific flag." + + , checkVersion [1,4] (not (null mentionedExtensionsThatNeedCabal14)) $ + PackageDistInexcusable $ + "Unfortunately the language extensions " + ++ commaSep (map (quote . display) mentionedExtensionsThatNeedCabal14) + ++ " break the parser in earlier Cabal versions so you need to " + ++ "specify 'cabal-version: >= 1.4'. Alternatively if you require " + ++ "compatibility with earlier Cabal versions then you may be able to " + ++ "use an equivalent compiler-specific flag." + + , check (specVersion pkg >= mkVersion [1,23] + && isNothing (setupBuildInfo pkg) + && buildType pkg == Custom) $ + PackageBuildWarning $ + "Packages using 'cabal-version: >= 1.24' with 'build-type: Custom' " + ++ "must use a 'custom-setup' section with a 'setup-depends' field " + ++ "that specifies the dependencies of the Setup.hs script itself. " + ++ "The 'setup-depends' field uses the same syntax as 'build-depends', " + ++ "so a simple example would be 'setup-depends: base, Cabal'." + + , check (specVersion pkg < mkVersion [1,23] + && isNothing (setupBuildInfo pkg) + && buildType pkg == Custom) $ + PackageDistSuspiciousWarn $ + "From version 1.24 cabal supports specifiying explicit dependencies " + ++ "for Custom setup scripts. Consider using cabal-version >= 1.24 and " + ++ "adding a 'custom-setup' section with a 'setup-depends' field " + ++ "that specifies the dependencies of the Setup.hs script itself. " + ++ "The 'setup-depends' field uses the same syntax as 'build-depends', " + ++ "so a simple example would be 'setup-depends: base, Cabal'." + + , check (specVersion pkg >= mkVersion [1,25] + && elem (autogenPathsModuleName pkg) allModuleNames + && not (elem (autogenPathsModuleName pkg) allModuleNamesAutogen) ) $ + PackageDistInexcusable $ + "Packages using 'cabal-version: 2.0' and the autogenerated " + ++ "module Paths_* must include it also on the 'autogen-modules' field " + ++ "besides 'exposed-modules' and 'other-modules'. This specifies that " + ++ "the module does not come with the package and is generated on " + ++ "setup. Modules built with a custom Setup.hs script also go here " + ++ "to ensure that commands like sdist don't fail." + + ] + where + -- Perform a check on packages that use a version of the spec less than + -- the version given. This is for cases where a new Cabal version adds + -- a new feature and we want to check that it is not used prior to that + -- version. + checkVersion :: [Int] -> Bool -> PackageCheck -> Maybe PackageCheck + checkVersion ver cond pc + | specVersion pkg >= mkVersion ver = Nothing + | otherwise = check cond pc + + buildInfoField field = map field (allBuildInfo pkg) + + versionRangeExpressions = + [ dep | dep@(Dependency _ vr) <- allBuildDepends pkg + , usesNewVersionRangeSyntax vr ] + + testedWithVersionRangeExpressions = + [ Dependency (mkPackageName (display compiler)) vr + | (compiler, vr) <- testedWith pkg + , usesNewVersionRangeSyntax vr ] + + simpleSpecVersionRangeSyntax = + either (const True) (cataVersionRange alg) (specVersionRaw pkg) + where + alg (OrLaterVersionF _) = True + alg _ = False + + -- is the cabal-version field a simple version number, rather than a range + simpleSpecVersionSyntax = + either (const True) (const False) (specVersionRaw pkg) + + usesNewVersionRangeSyntax :: VersionRange -> Bool + usesNewVersionRangeSyntax + = (> 2) -- uses the new syntax if depth is more than 2 + . cataVersionRange alg + where + alg (UnionVersionRangesF a b) = a + b + alg (IntersectVersionRangesF a b) = a + b + alg (VersionRangeParensF _) = 3 + alg _ = 1 :: Int + + depsUsingWildcardSyntax = [ dep | dep@(Dependency _ vr) <- allBuildDepends pkg + , usesWildcardSyntax vr ] + + depsUsingMajorBoundSyntax = [ dep | dep@(Dependency _ vr) <- allBuildDepends pkg + , usesMajorBoundSyntax vr ] + + usesBackpackIncludes = any (not . null . mixins) (allBuildInfo pkg) + + testedWithUsingWildcardSyntax = + [ Dependency (mkPackageName (display compiler)) vr + | (compiler, vr) <- testedWith pkg + , usesWildcardSyntax vr ] + + usesWildcardSyntax :: VersionRange -> Bool + usesWildcardSyntax = cataVersionRange alg + where + alg (WildcardVersionF _) = True + alg (UnionVersionRangesF a b) = a || b + alg (IntersectVersionRangesF a b) = a || b + alg (VersionRangeParensF a) = a + alg _ = False + + -- NB: this eliminates both, WildcardVersion and MajorBoundVersion + -- because when WildcardVersion is not support, neither is MajorBoundVersion + eliminateWildcardSyntax = hyloVersionRange embed projectVersionRange + where + embed (WildcardVersionF v) = intersectVersionRanges + (orLaterVersion v) (earlierVersion (wildcardUpperBound v)) + embed (MajorBoundVersionF v) = intersectVersionRanges + (orLaterVersion v) (earlierVersion (majorUpperBound v)) + embed vr = embedVersionRange vr + + usesMajorBoundSyntax :: VersionRange -> Bool + usesMajorBoundSyntax = cataVersionRange alg + where + alg (MajorBoundVersionF _) = True + alg (UnionVersionRangesF a b) = a || b + alg (IntersectVersionRangesF a b) = a || b + alg (VersionRangeParensF a) = a + alg _ = False + + eliminateMajorBoundSyntax = hyloVersionRange embed projectVersionRange + where + embed (MajorBoundVersionF v) = intersectVersionRanges + (orLaterVersion v) (earlierVersion (majorUpperBound v)) + embed vr = embedVersionRange vr + + mentionedExtensions = [ ext | bi <- allBuildInfo pkg + , ext <- allExtensions bi ] + mentionedExtensionsThatNeedCabal12 = + nub (filter (`elem` compatExtensionsExtra) mentionedExtensions) + + -- As of Cabal-1.4 we can add new extensions without worrying about + -- breaking old versions of cabal. + mentionedExtensionsThatNeedCabal14 = + nub (filter (`notElem` compatExtensions) mentionedExtensions) + + -- The known extensions in Cabal-1.2.3 + compatExtensions = + map EnableExtension + [ OverlappingInstances, UndecidableInstances, IncoherentInstances + , RecursiveDo, ParallelListComp, MultiParamTypeClasses + , FunctionalDependencies, Rank2Types + , RankNTypes, PolymorphicComponents, ExistentialQuantification + , ScopedTypeVariables, ImplicitParams, FlexibleContexts + , FlexibleInstances, EmptyDataDecls, CPP, BangPatterns + , TypeSynonymInstances, TemplateHaskell, ForeignFunctionInterface + , Arrows, Generics, NamedFieldPuns, PatternGuards + , GeneralizedNewtypeDeriving, ExtensibleRecords, RestrictedTypeSynonyms + , HereDocuments] ++ + map DisableExtension + [MonomorphismRestriction, ImplicitPrelude] ++ + compatExtensionsExtra + + -- The extra known extensions in Cabal-1.2.3 vs Cabal-1.1.6 + -- (Cabal-1.1.6 came with ghc-6.6. Cabal-1.2 came with ghc-6.8) + compatExtensionsExtra = + map EnableExtension + [ KindSignatures, MagicHash, TypeFamilies, StandaloneDeriving + , UnicodeSyntax, PatternSignatures, UnliftedFFITypes, LiberalTypeSynonyms + , TypeOperators, RecordWildCards, RecordPuns, DisambiguateRecordFields + , OverloadedStrings, GADTs, RelaxedPolyRec + , ExtendedDefaultRules, UnboxedTuples, DeriveDataTypeable + , ConstrainedClassMethods + ] ++ + map DisableExtension + [MonoPatBinds] + + allModuleNames = + (case library pkg of + Nothing -> [] + (Just lib) -> explicitLibModules lib + ) + ++ concatMap otherModules (allBuildInfo pkg) + + allModuleNamesAutogen = concatMap autogenModules (allBuildInfo pkg) + +displayRawDependency :: Dependency -> String +displayRawDependency (Dependency pkg vr) = + display pkg ++ " " ++ display vr + + +-- ------------------------------------------------------------ +-- * Checks on the GenericPackageDescription +-- ------------------------------------------------------------ + +-- | Check the build-depends fields for any weirdness or bad practise. +-- +checkPackageVersions :: GenericPackageDescription -> [PackageCheck] +checkPackageVersions pkg = + catMaybes [ + + -- Check that the version of base is bounded above. + -- For example this bans "build-depends: base >= 3". + -- It should probably be "build-depends: base >= 3 && < 4" + -- which is the same as "build-depends: base == 3.*" + check (not (boundedAbove baseDependency)) $ + PackageDistInexcusable $ + "The dependency 'build-depends: base' does not specify an upper " + ++ "bound on the version number. Each major release of the 'base' " + ++ "package changes the API in various ways and most packages will " + ++ "need some changes to compile with it. The recommended practise " + ++ "is to specify an upper bound on the version of the 'base' " + ++ "package. This ensures your package will continue to build when a " + ++ "new major version of the 'base' package is released. If you are " + ++ "not sure what upper bound to use then use the next major " + ++ "version. For example if you have tested your package with 'base' " + ++ "version 4.5 and 4.6 then use 'build-depends: base >= 4.5 && < 4.7'." + + ] + where + -- TODO: What we really want to do is test if there exists any + -- configuration in which the base version is unbounded above. + -- However that's a bit tricky because there are many possible + -- configurations. As a cheap easy and safe approximation we will + -- pick a single "typical" configuration and check if that has an + -- open upper bound. To get a typical configuration we finalise + -- using no package index and the current platform. + finalised = finalizePD + mempty defaultComponentRequestedSpec (const True) + buildPlatform + (unknownCompilerInfo + (CompilerId buildCompilerFlavor nullVersion) + NoAbiTag) + [] pkg + baseDependency = case finalised of + Right (pkg', _) | not (null baseDeps) -> + foldr intersectVersionRanges anyVersion baseDeps + where + baseDeps = + [ vr | Dependency pname vr <- allBuildDepends pkg' + , pname == mkPackageName "base" ] + + -- Just in case finalizePD fails for any reason, + -- or if the package doesn't depend on the base package at all, + -- then we will just skip the check, since boundedAbove noVersion = True + _ -> noVersion + + boundedAbove :: VersionRange -> Bool + boundedAbove vr = case asVersionIntervals vr of + [] -> True -- this is the inconsistent version range. + intervals -> case last intervals of + (_, UpperBound _ _) -> True + (_, NoUpperBound ) -> False + + +checkConditionals :: GenericPackageDescription -> [PackageCheck] +checkConditionals pkg = + catMaybes [ + + check (not $ null unknownOSs) $ + PackageDistInexcusable $ + "Unknown operating system name " + ++ commaSep (map quote unknownOSs) + + , check (not $ null unknownArches) $ + PackageDistInexcusable $ + "Unknown architecture name " + ++ commaSep (map quote unknownArches) + + , check (not $ null unknownImpls) $ + PackageDistInexcusable $ + "Unknown compiler name " + ++ commaSep (map quote unknownImpls) + ] + where + unknownOSs = [ os | OS (OtherOS os) <- conditions ] + unknownArches = [ arch | Arch (OtherArch arch) <- conditions ] + unknownImpls = [ impl | Impl (OtherCompiler impl) _ <- conditions ] + conditions = concatMap fvs (maybeToList (condLibrary pkg)) + ++ concatMap (fvs . snd) (condSubLibraries pkg) + ++ concatMap (fvs . snd) (condForeignLibs pkg) + ++ concatMap (fvs . snd) (condExecutables pkg) + ++ concatMap (fvs . snd) (condTestSuites pkg) + ++ concatMap (fvs . snd) (condBenchmarks pkg) + fvs (CondNode _ _ ifs) = concatMap compfv ifs -- free variables + compfv (CondBranch c ct mct) = condfv c ++ fvs ct ++ maybe [] fvs mct + condfv c = case c of + Var v -> [v] + Lit _ -> [] + CNot c1 -> condfv c1 + COr c1 c2 -> condfv c1 ++ condfv c2 + CAnd c1 c2 -> condfv c1 ++ condfv c2 + +checkFlagNames :: GenericPackageDescription -> [PackageCheck] +checkFlagNames gpd + | null invalidFlagNames = [] + | otherwise = [ PackageDistInexcusable + $ "Suspicious flag names: " ++ unwords invalidFlagNames ++ ". " + ++ "To avoid ambiguity in command line interfaces, flag shouldn't " + ++ "start with a dash. Also for better compatibility, flag names " + ++ "shouldn't contain non-ascii characters." + ] + where + invalidFlagNames = + [ fn + | flag <- genPackageFlags gpd + , let fn = unFlagName (flagName flag) + , invalidFlagName fn + ] + -- starts with dash + invalidFlagName ('-':_) = True + -- mon ascii letter + invalidFlagName cs = any (not . isAscii) cs + +checkUnusedFlags :: GenericPackageDescription -> [PackageCheck] +checkUnusedFlags gpd + | declared == used = [] + | otherwise = [ PackageDistSuspicious + $ "Declared and used flag sets differ: " + ++ s declared ++ " /= " ++ s used ++ ". " + ] + where + s :: Set.Set FlagName -> String + s = commaSep . map unFlagName . Set.toList + + declared :: Set.Set FlagName + declared = toSetOf (L.genPackageFlags . traverse . L.flagName) gpd + + used :: Set.Set FlagName + used = mconcat + [ toSetOf (L.condLibrary . traverse . traverseCondTreeV . L._Flag) gpd + , toSetOf (L.condSubLibraries . traverse . _2 . traverseCondTreeV . L._Flag) gpd + , toSetOf (L.condForeignLibs . traverse . _2 . traverseCondTreeV . L._Flag) gpd + , toSetOf (L.condExecutables . traverse . _2 . traverseCondTreeV . L._Flag) gpd + , toSetOf (L.condTestSuites . traverse . _2 . traverseCondTreeV . L._Flag) gpd + , toSetOf (L.condBenchmarks . traverse . _2 . traverseCondTreeV . L._Flag) gpd + ] + +checkUnicodeXFields :: GenericPackageDescription -> [PackageCheck] +checkUnicodeXFields gpd + | null nonAsciiXFields = [] + | otherwise = [ PackageDistInexcusable + $ "Non ascii custom fields: " ++ unwords nonAsciiXFields ++ ". " + ++ "For better compatibility, custom field names " + ++ "shouldn't contain non-ascii characters." + ] + where + nonAsciiXFields :: [String] + nonAsciiXFields = [ n | (n, _) <- xfields, any (not . isAscii) n ] + + xfields :: [(String,String)] + xfields = DList.runDList $ mconcat + [ toDListOf (L.packageDescription . L.customFieldsPD . traverse) gpd + , toDListOf (L.traverseBuildInfos . L.customFieldsBI . traverse) gpd + ] + +-- | cabal-version <2.2 + Paths_module + default-extensions: doesn't build. +checkPathsModuleExtensions :: PackageDescription -> [PackageCheck] +checkPathsModuleExtensions pd + | specVersion pd >= mkVersion [2,1] = [] + | any checkBI (allBuildInfo pd) || any checkLib (allLibraries pd) + = return $ PackageBuildImpossible $ unwords + [ "The package uses RebindableSyntax with OverloadedStrings or OverloadedLists" + , "in default-extensions, and also Paths_ autogen module." + , "That configuration is known to cause compile failures with Cabal < 2.2." + , "To use these default-extensions with Paths_ autogen module" + , "specify at least 'cabal-version: 2.2'." + ] + | otherwise = [] + where + mn = autogenPathsModuleName pd + + checkLib :: Library -> Bool + checkLib l = mn `elem` exposedModules l && checkExts (l ^. L.defaultExtensions) + + checkBI :: BuildInfo -> Bool + checkBI bi = + (mn `elem` otherModules bi || mn `elem` autogenModules bi) && + checkExts (bi ^. L.defaultExtensions) + + checkExts exts = rebind `elem` exts && (strings `elem` exts || lists `elem` exts) + where + rebind = EnableExtension RebindableSyntax + strings = EnableExtension OverloadedStrings + lists = EnableExtension OverloadedLists + +checkDevelopmentOnlyFlagsBuildInfo :: BuildInfo -> [PackageCheck] +checkDevelopmentOnlyFlagsBuildInfo bi = + catMaybes [ + + check has_WerrorWall $ + PackageDistInexcusable $ + "'ghc-options: -Wall -Werror' makes the package very easy to " + ++ "break with future GHC versions because new GHC versions often " + ++ "add new warnings. Use just 'ghc-options: -Wall' instead." + ++ extraExplanation + + , check (not has_WerrorWall && has_Werror) $ + PackageDistInexcusable $ + "'ghc-options: -Werror' makes the package easy to " + ++ "break with future GHC versions because new GHC versions often " + ++ "add new warnings. " + ++ extraExplanation + + , check (has_J) $ + PackageDistInexcusable $ + "'ghc-options: -j[N]' can make sense for specific user's setup," + ++ " but it is not appropriate for a distributed package." + ++ extraExplanation + + , checkFlags ["-fdefer-type-errors"] $ + PackageDistInexcusable $ + "'ghc-options: -fdefer-type-errors' is fine during development but " + ++ "is not appropriate for a distributed package. " + ++ extraExplanation + + -- -dynamic is not a debug flag + , check (any (\opt -> "-d" `isPrefixOf` opt && opt /= "-dynamic") + ghc_options) $ + PackageDistInexcusable $ + "'ghc-options: -d*' debug flags are not appropriate " + ++ "for a distributed package. " + ++ extraExplanation + + , checkFlags ["-fprof-auto", "-fprof-auto-top", "-fprof-auto-calls", + "-fprof-cafs", "-fno-prof-count-entries", + "-auto-all", "-auto", "-caf-all"] $ + PackageDistSuspicious $ + "'ghc-options/ghc-prof-options: -fprof*' profiling flags are typically not " + ++ "appropriate for a distributed library package. These flags are " + ++ "useful to profile this package, but when profiling other packages " + ++ "that use this one these flags clutter the profile output with " + ++ "excessive detail. If you think other packages really want to see " + ++ "cost centres from this package then use '-fprof-auto-exported' " + ++ "which puts cost centres only on exported functions. " + ++ extraExplanation + ] + where + extraExplanation = + " Alternatively, if you want to use this, make it conditional based " + ++ "on a Cabal configuration flag (with 'manual: True' and 'default: " + ++ "False') and enable that flag during development." + + has_WerrorWall = has_Werror && ( has_Wall || has_W ) + has_Werror = "-Werror" `elem` ghc_options + has_Wall = "-Wall" `elem` ghc_options + has_W = "-W" `elem` ghc_options + has_J = any + (\o -> case o of + "-j" -> True + ('-' : 'j' : d : _) -> isDigit d + _ -> False + ) + ghc_options + ghc_options = hcOptions GHC bi ++ hcProfOptions GHC bi + ++ hcSharedOptions GHC bi + + checkFlags :: [String] -> PackageCheck -> Maybe PackageCheck + checkFlags flags = check (any (`elem` flags) ghc_options) + +checkDevelopmentOnlyFlags :: GenericPackageDescription -> [PackageCheck] +checkDevelopmentOnlyFlags pkg = + concatMap checkDevelopmentOnlyFlagsBuildInfo + [ bi + | (conditions, bi) <- allConditionalBuildInfo + , not (any guardedByManualFlag conditions) ] + where + guardedByManualFlag = definitelyFalse + + -- We've basically got three-values logic here: True, False or unknown + -- hence this pattern to propagate the unknown cases properly. + definitelyFalse (Var (Flag n)) = maybe False not (Map.lookup n manualFlags) + definitelyFalse (Var _) = False + definitelyFalse (Lit b) = not b + definitelyFalse (CNot c) = definitelyTrue c + definitelyFalse (COr c1 c2) = definitelyFalse c1 && definitelyFalse c2 + definitelyFalse (CAnd c1 c2) = definitelyFalse c1 || definitelyFalse c2 + + definitelyTrue (Var (Flag n)) = fromMaybe False (Map.lookup n manualFlags) + definitelyTrue (Var _) = False + definitelyTrue (Lit b) = b + definitelyTrue (CNot c) = definitelyFalse c + definitelyTrue (COr c1 c2) = definitelyTrue c1 || definitelyTrue c2 + definitelyTrue (CAnd c1 c2) = definitelyTrue c1 && definitelyTrue c2 + + manualFlags = Map.fromList + [ (flagName flag, flagDefault flag) + | flag <- genPackageFlags pkg + , flagManual flag ] + + allConditionalBuildInfo :: [([Condition ConfVar], BuildInfo)] + allConditionalBuildInfo = + concatMap (collectCondTreePaths libBuildInfo) + (maybeToList (condLibrary pkg)) + + ++ concatMap (collectCondTreePaths libBuildInfo . snd) + (condSubLibraries pkg) + + ++ concatMap (collectCondTreePaths buildInfo . snd) + (condExecutables pkg) + + ++ concatMap (collectCondTreePaths testBuildInfo . snd) + (condTestSuites pkg) + + ++ concatMap (collectCondTreePaths benchmarkBuildInfo . snd) + (condBenchmarks pkg) + + -- get all the leaf BuildInfo, paired up with the path (in the tree sense) + -- of if-conditions that guard it + collectCondTreePaths :: (a -> b) + -> CondTree v c a + -> [([Condition v], b)] + collectCondTreePaths mapData = go [] + where + go conditions condNode = + -- the data at this level in the tree: + (reverse conditions, mapData (condTreeData condNode)) + + : concat + [ go (condition:conditions) ifThen + | (CondBranch condition ifThen _) <- condTreeComponents condNode ] + + ++ concat + [ go (condition:conditions) elseThen + | (CondBranch condition _ (Just elseThen)) <- condTreeComponents condNode ] + + +-- ------------------------------------------------------------ +-- * Checks involving files in the package +-- ------------------------------------------------------------ + +-- | Sanity check things that requires IO. It looks at the files in the +-- package and expects to find the package unpacked in at the given file path. +-- +checkPackageFiles :: Verbosity -> PackageDescription -> FilePath -> NoCallStackIO [PackageCheck] +checkPackageFiles verbosity pkg root = do + contentChecks <- checkPackageContent checkFilesIO pkg + preDistributionChecks <- checkPackageFilesPreDistribution verbosity pkg root + -- Sort because different platforms will provide files from + -- `getDirectoryContents` in different orders, and we'd like to be + -- stable for test output. + return (sort contentChecks ++ sort preDistributionChecks) + where + checkFilesIO = CheckPackageContentOps { + doesFileExist = System.doesFileExist . relative, + doesDirectoryExist = System.doesDirectoryExist . relative, + getDirectoryContents = System.Directory.getDirectoryContents . relative, + getFileContents = BS.readFile . relative + } + relative path = root path + +-- | A record of operations needed to check the contents of packages. +-- Used by 'checkPackageContent'. +-- +data CheckPackageContentOps m = CheckPackageContentOps { + doesFileExist :: FilePath -> m Bool, + doesDirectoryExist :: FilePath -> m Bool, + getDirectoryContents :: FilePath -> m [FilePath], + getFileContents :: FilePath -> m BS.ByteString + } + +-- | Sanity check things that requires looking at files in the package. +-- This is a generalised version of 'checkPackageFiles' that can work in any +-- monad for which you can provide 'CheckPackageContentOps' operations. +-- +-- The point of this extra generality is to allow doing checks in some virtual +-- file system, for example a tarball in memory. +-- +checkPackageContent :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m [PackageCheck] +checkPackageContent ops pkg = do + cabalBomError <- checkCabalFileBOM ops + cabalNameError <- checkCabalFileName ops pkg + licenseErrors <- checkLicensesExist ops pkg + setupError <- checkSetupExists ops pkg + configureError <- checkConfigureExists ops pkg + localPathErrors <- checkLocalPathsExist ops pkg + vcsLocation <- checkMissingVcsInfo ops pkg + + return $ licenseErrors + ++ catMaybes [cabalBomError, cabalNameError, setupError, configureError] + ++ localPathErrors + ++ vcsLocation + +checkCabalFileBOM :: Monad m => CheckPackageContentOps m + -> m (Maybe PackageCheck) +checkCabalFileBOM ops = do + epdfile <- findPackageDesc ops + case epdfile of + -- MASSIVE HACK. If the Cabal file doesn't exist, that is + -- a very strange situation to be in, because the driver code + -- in 'Distribution.Setup' ought to have noticed already! + -- But this can be an issue, see #3552 and also when + -- --cabal-file is specified. So if you can't find the file, + -- just don't bother with this check. + Left _ -> return $ Nothing + Right pdfile -> (flip check pc . BS.isPrefixOf bomUtf8) + `liftM` (getFileContents ops pdfile) + where pc = PackageDistInexcusable $ + pdfile ++ " starts with an Unicode byte order mark (BOM)." + ++ " This may cause problems with older cabal versions." + + where + bomUtf8 :: BS.ByteString + bomUtf8 = BS.pack [0xef,0xbb,0xbf] -- U+FEFF encoded as UTF8 + +checkCabalFileName :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m (Maybe PackageCheck) +checkCabalFileName ops pkg = do + -- findPackageDesc already takes care to detect missing/multiple + -- .cabal files; we don't include this check in 'findPackageDesc' in + -- order not to short-cut other checks which call 'findPackageDesc' + epdfile <- findPackageDesc ops + case epdfile of + -- see "MASSIVE HACK" note in 'checkCabalFileBOM' + Left _ -> return Nothing + Right pdfile + | takeFileName pdfile == expectedCabalname -> return Nothing + | otherwise -> return $ Just $ PackageDistInexcusable $ + "The filename " ++ pdfile ++ " does not match package name " ++ + "(expected: " ++ expectedCabalname ++ ")" + where + pkgname = unPackageName . packageName $ pkg + expectedCabalname = pkgname <.> "cabal" + + +-- |Find a package description file in the given directory. Looks for +-- @.cabal@ files. Like 'Distribution.Simple.Utils.findPackageDesc', +-- but generalized over monads. +findPackageDesc :: Monad m => CheckPackageContentOps m + -> m (Either PackageCheck FilePath) -- ^.cabal +findPackageDesc ops + = do let dir = "." + files <- getDirectoryContents ops dir + -- to make sure we do not mistake a ~/.cabal/ dir for a .cabal + -- file we filter to exclude dirs and null base file names: + cabalFiles <- filterM (doesFileExist ops) + [ dir file + | file <- files + , let (name, ext) = splitExtension file + , not (null name) && ext == ".cabal" ] + case cabalFiles of + [] -> return (Left $ PackageBuildImpossible noDesc) + [cabalFile] -> return (Right cabalFile) + multiple -> return (Left $ PackageBuildImpossible + $ multiDesc multiple) + + where + noDesc :: String + noDesc = "No cabal file found.\n" + ++ "Please create a package description file .cabal" + + multiDesc :: [String] -> String + multiDesc l = "Multiple cabal files found while checking.\n" + ++ "Please use only one of: " + ++ intercalate ", " l + +checkLicensesExist :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m [PackageCheck] +checkLicensesExist ops pkg = do + exists <- mapM (doesFileExist ops) (licenseFiles pkg) + return + [ PackageBuildWarning $ + "The '" ++ fieldname ++ "' field refers to the file " + ++ quote file ++ " which does not exist." + | (file, False) <- zip (licenseFiles pkg) exists ] + where + fieldname | length (licenseFiles pkg) == 1 = "license-file" + | otherwise = "license-files" + +checkSetupExists :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m (Maybe PackageCheck) +checkSetupExists ops pkg = do + let simpleBuild = buildType pkg == Simple + hsexists <- doesFileExist ops "Setup.hs" + lhsexists <- doesFileExist ops "Setup.lhs" + return $ check (not simpleBuild && not hsexists && not lhsexists) $ + PackageDistInexcusable $ + "The package is missing a Setup.hs or Setup.lhs script." + +checkConfigureExists :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m (Maybe PackageCheck) +checkConfigureExists ops pd + | buildType pd == Configure = do + exists <- doesFileExist ops "configure" + return $ check (not exists) $ + PackageBuildWarning $ + "The 'build-type' is 'Configure' but there is no 'configure' script. " + ++ "You probably need to run 'autoreconf -i' to generate it." + | otherwise = return Nothing + +checkLocalPathsExist :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m [PackageCheck] +checkLocalPathsExist ops pkg = do + let dirs = [ (dir, kind) + | bi <- allBuildInfo pkg + , (dir, kind) <- + [ (dir, "extra-lib-dirs") | dir <- extraLibDirs bi ] + ++ [ (dir, "extra-framework-dirs") + | dir <- extraFrameworkDirs bi ] + ++ [ (dir, "include-dirs") | dir <- includeDirs bi ] + ++ [ (dir, "hs-source-dirs") | dir <- hsSourceDirs bi ] + , isRelativeOnAnyPlatform dir ] + missing <- filterM (liftM not . doesDirectoryExist ops . fst) dirs + return [ PackageBuildWarning { + explanation = quote (kind ++ ": " ++ dir) + ++ " directory does not exist." + } + | (dir, kind) <- missing ] + +checkMissingVcsInfo :: Monad m => CheckPackageContentOps m + -> PackageDescription + -> m [PackageCheck] +checkMissingVcsInfo ops pkg | null (sourceRepos pkg) = do + vcsInUse <- liftM or $ mapM (doesDirectoryExist ops) repoDirnames + if vcsInUse + then return [ PackageDistSuspicious message ] + else return [] + where + repoDirnames = [ dirname | repo <- knownRepoTypes + , dirname <- repoTypeDirname repo ] + message = "When distributing packages it is encouraged to specify source " + ++ "control information in the .cabal file using one or more " + ++ "'source-repository' sections. See the Cabal user guide for " + ++ "details." + +checkMissingVcsInfo _ _ = return [] + +repoTypeDirname :: RepoType -> [FilePath] +repoTypeDirname Darcs = ["_darcs"] +repoTypeDirname Git = [".git"] +repoTypeDirname SVN = [".svn"] +repoTypeDirname CVS = ["CVS"] +repoTypeDirname Mercurial = [".hg"] +repoTypeDirname GnuArch = [".arch-params"] +repoTypeDirname Bazaar = [".bzr"] +repoTypeDirname Monotone = ["_MTN"] +repoTypeDirname _ = [] + + +-- ------------------------------------------------------------ +-- * Checks involving files in the package +-- ------------------------------------------------------------ + +-- | Check the names of all files in a package for portability problems. This +-- should be done for example when creating or validating a package tarball. +-- +checkPackageFileNames :: [FilePath] -> [PackageCheck] +checkPackageFileNames files = + (take 1 . mapMaybe checkWindowsPath $ files) + ++ (take 1 . mapMaybe checkTarPath $ files) + -- If we get any of these checks triggering then we're likely to get + -- many, and that's probably not helpful, so return at most one. + +checkWindowsPath :: FilePath -> Maybe PackageCheck +checkWindowsPath path = + check (not $ FilePath.Windows.isValid path') $ + PackageDistInexcusable $ + "Unfortunately, the file " ++ quote path ++ " is not a valid file " + ++ "name on Windows which would cause portability problems for this " + ++ "package. Windows file names cannot contain any of the characters " + ++ "\":*?<>|\" and there are a few reserved names including \"aux\", " + ++ "\"nul\", \"con\", \"prn\", \"com1-9\", \"lpt1-9\" and \"clock$\"." + where + path' = ".\\" ++ path + -- force a relative name to catch invalid file names like "f:oo" which + -- otherwise parse as file "oo" in the current directory on the 'f' drive. + +-- | Check a file name is valid for the portable POSIX tar format. +-- +-- The POSIX tar format has a restriction on the length of file names. It is +-- unfortunately not a simple restriction like a maximum length. The exact +-- restriction is that either the whole path be 100 characters or less, or it +-- be possible to split the path on a directory separator such that the first +-- part is 155 characters or less and the second part 100 characters or less. +-- +checkTarPath :: FilePath -> Maybe PackageCheck +checkTarPath path + | length path > 255 = Just longPath + | otherwise = case pack nameMax (reverse (splitPath path)) of + Left err -> Just err + Right [] -> Nothing + Right (h:rest) -> case pack prefixMax remainder of + Left err -> Just err + Right [] -> Nothing + Right (_:_) -> Just noSplit + where + -- drop the '/' between the name and prefix: + remainder = init h : rest + + where + nameMax, prefixMax :: Int + nameMax = 100 + prefixMax = 155 + + pack _ [] = Left emptyName + pack maxLen (c:cs) + | n > maxLen = Left longName + | otherwise = Right (pack' maxLen n cs) + where n = length c + + pack' maxLen n (c:cs) + | n' <= maxLen = pack' maxLen n' cs + where n' = n + length c + pack' _ _ cs = cs + + longPath = PackageDistInexcusable $ + "The following file name is too long to store in a portable POSIX " + ++ "format tar archive. The maximum length is 255 ASCII characters.\n" + ++ "The file in question is:\n " ++ path + longName = PackageDistInexcusable $ + "The following file name is too long to store in a portable POSIX " + ++ "format tar archive. The maximum length for the name part (including " + ++ "extension) is 100 ASCII characters. The maximum length for any " + ++ "individual directory component is 155.\n" + ++ "The file in question is:\n " ++ path + noSplit = PackageDistInexcusable $ + "The following file name is too long to store in a portable POSIX " + ++ "format tar archive. While the total length is less than 255 ASCII " + ++ "characters, there are unfortunately further restrictions. It has to " + ++ "be possible to split the file path on a directory separator into " + ++ "two parts such that the first part fits in 155 characters or less " + ++ "and the second part fits in 100 characters or less. Basically you " + ++ "have to make the file name or directory names shorter, or you could " + ++ "split a long directory name into nested subdirectories with shorter " + ++ "names.\nThe file in question is:\n " ++ path + emptyName = PackageDistInexcusable $ + "Encountered a file with an empty name, something is very wrong! " + ++ "Files with an empty name cannot be stored in a tar archive or in " + ++ "standard file systems." + +-- -------------------------------------------------------------- +-- * Checks for missing content and other pre-distribution checks +-- -------------------------------------------------------------- + +-- | Similar to 'checkPackageContent', 'checkPackageFilesPreDistribution' +-- inspects the files included in the package, but is primarily looking for +-- files in the working tree that may have been missed or other similar +-- problems that can only be detected pre-distribution. +-- +-- Because Hackage necessarily checks the uploaded tarball, it is too late to +-- check these on the server; these checks only make sense in the development +-- and package-creation environment. Hence we can use IO, rather than needing +-- to pass a 'CheckPackageContentOps' dictionary around. +checkPackageFilesPreDistribution :: Verbosity -> PackageDescription -> FilePath -> NoCallStackIO [PackageCheck] +-- Note: this really shouldn't return any 'Inexcusable' warnings, +-- because that will make us say that Hackage would reject the package. +-- But, because Hackage doesn't run these tests, that will be a lie! +checkPackageFilesPreDistribution = checkGlobFiles + +-- | Discover problems with the package's wildcards. +checkGlobFiles :: Verbosity + -> PackageDescription + -> FilePath + -> NoCallStackIO [PackageCheck] +checkGlobFiles verbosity pkg root = + fmap concat $ for allGlobs $ \(field, dir, glob) -> + -- Note: we just skip over parse errors here; they're reported elsewhere. + case parseFileGlob (specVersion pkg) glob of + Left _ -> return [] + Right parsedGlob -> do + results <- runDirFileGlob verbosity (root dir) parsedGlob + let individualWarnings = results >>= getWarning field glob + noMatchesWarning = + [ PackageDistSuspiciousWarn $ + "In '" ++ field ++ "': the pattern '" ++ glob ++ "' does not" + ++ " match any files." + | all (not . suppressesNoMatchesWarning) results + ] + return (noMatchesWarning ++ individualWarnings) + where + adjustedDataDir = if null (dataDir pkg) then "." else dataDir pkg + allGlobs = concat + [ (,,) "extra-source-files" "." <$> extraSrcFiles pkg + , (,,) "extra-doc-files" "." <$> extraDocFiles pkg + , (,,) "data-files" adjustedDataDir <$> dataFiles pkg + ] + + -- If there's a missing directory in play, since our globs don't + -- (currently) support disjunction, that will always mean there are no + -- matches. The no matches error in this case is strictly less informative + -- than the missing directory error, so sit on it. + suppressesNoMatchesWarning (GlobMatch _) = True + suppressesNoMatchesWarning (GlobWarnMultiDot _) = False + suppressesNoMatchesWarning (GlobMissingDirectory _) = True + + getWarning :: String -> FilePath -> GlobResult FilePath -> [PackageCheck] + getWarning _ _ (GlobMatch _) = + [] + -- Before Cabal 2.4, the extensions of globs had to match the file + -- exactly. This has been relaxed in 2.4 to allow matching only the + -- suffix. This warning detects when pre-2.4 package descriptions are + -- omitting files purely because of the stricter check. + getWarning field glob (GlobWarnMultiDot file) = + [ PackageDistSuspiciousWarn $ + "In '" ++ field ++ "': the pattern '" ++ glob ++ "' does not" + ++ " match the file '" ++ file ++ "' because the extensions do not" + ++ " exactly match (e.g., foo.en.html does not exactly match *.html)." + ++ " To enable looser suffix-only matching, set 'cabal-version: 2.4' or higher." + ] + getWarning field glob (GlobMissingDirectory dir) = + [ PackageDistSuspiciousWarn $ + "In '" ++ field ++ "': the pattern '" ++ glob ++ "' attempts to" + ++ " match files in the directory '" ++ dir ++ "', but there is no" + ++ " directory by that name." + ] + +-- ------------------------------------------------------------ +-- * Utils +-- ------------------------------------------------------------ + +quote :: String -> String +quote s = "'" ++ s ++ "'" + +commaSep :: [String] -> String +commaSep = intercalate ", " + +dups :: Ord a => [a] -> [a] +dups xs = [ x | (x:_:_) <- group (sort xs) ] + +fileExtensionSupportedLanguage :: FilePath -> Bool +fileExtensionSupportedLanguage path = + isHaskell || isC + where + extension = takeExtension path + isHaskell = extension `elem` [".hs", ".lhs"] + isC = isJust (filenameCDialect extension) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Configuration.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Configuration.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Configuration.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Configuration.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,599 @@ +-- -fno-warn-deprecations for use of Map.foldWithKey +{-# OPTIONS_GHC -fno-warn-deprecations #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.Configuration +-- Copyright : Thomas Schilling, 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is about the cabal configurations feature. It exports +-- 'finalizePD' and 'flattenPackageDescription' which are +-- functions for converting 'GenericPackageDescription's down to +-- 'PackageDescription's. It has code for working with the tree of conditions +-- and resolving or flattening conditions. + +module Distribution.PackageDescription.Configuration ( + finalizePD, + finalizePackageDescription, + flattenPackageDescription, + + -- Utils + parseCondition, + freeVars, + extractCondition, + extractConditions, + addBuildableCondition, + mapCondTree, + mapTreeData, + mapTreeConds, + mapTreeConstrs, + transformAllBuildInfos, + transformAllBuildDepends, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +-- lens +import qualified Distribution.Types.BuildInfo.Lens as L +import qualified Distribution.Types.GenericPackageDescription.Lens as L +import qualified Distribution.Types.PackageDescription.Lens as L +import qualified Distribution.Types.SetupBuildInfo.Lens as L + +import Distribution.PackageDescription +import Distribution.PackageDescription.Utils +import Distribution.Version +import Distribution.Compiler +import Distribution.System +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.Compat.Lens +import Distribution.Compat.ReadP as ReadP hiding ( char ) +import qualified Distribution.Compat.ReadP as ReadP ( char ) +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.ForeignLib +import Distribution.Types.Component +import Distribution.Types.Dependency +import Distribution.Types.PackageName +import Distribution.Types.UnqualComponentName +import Distribution.Types.CondTree +import Distribution.Types.Condition +import Distribution.Types.DependencyMap + +import qualified Data.Map.Strict as Map.Strict +import qualified Data.Map.Lazy as Map +import Data.Tree ( Tree(Node) ) + +------------------------------------------------------------------------------ + +-- | Simplify a configuration condition using the OS and arch names. Returns +-- the names of all the flags occurring in the condition. +simplifyWithSysParams :: OS -> Arch -> CompilerInfo -> Condition ConfVar + -> (Condition FlagName, [FlagName]) +simplifyWithSysParams os arch cinfo cond = (cond', flags) + where + (cond', flags) = simplifyCondition cond interp + interp (OS os') = Right $ os' == os + interp (Arch arch') = Right $ arch' == arch + interp (Impl comp vr) + | matchImpl (compilerInfoId cinfo) = Right True + | otherwise = case compilerInfoCompat cinfo of + -- fixme: treat Nothing as unknown, rather than empty list once we + -- support partial resolution of system parameters + Nothing -> Right False + Just compat -> Right (any matchImpl compat) + where + matchImpl (CompilerId c v) = comp == c && v `withinRange` vr + interp (Flag f) = Left f + +-- TODO: Add instances and check +-- +-- prop_sC_idempotent cond a o = cond' == cond'' +-- where +-- cond' = simplifyCondition cond a o +-- cond'' = simplifyCondition cond' a o +-- +-- prop_sC_noLits cond a o = isLit res || not (hasLits res) +-- where +-- res = simplifyCondition cond a o +-- hasLits (Lit _) = True +-- hasLits (CNot c) = hasLits c +-- hasLits (COr l r) = hasLits l || hasLits r +-- hasLits (CAnd l r) = hasLits l || hasLits r +-- hasLits _ = False +-- + +-- | Parse a configuration condition from a string. +parseCondition :: ReadP r (Condition ConfVar) +parseCondition = condOr + where + condOr = sepBy1 condAnd (oper "||") >>= return . foldl1 COr + condAnd = sepBy1 cond (oper "&&")>>= return . foldl1 CAnd + cond = sp >> (boolLiteral +++ inparens condOr +++ notCond +++ osCond + +++ archCond +++ flagCond +++ implCond ) + inparens = between (ReadP.char '(' >> sp) (sp >> ReadP.char ')' >> sp) + notCond = ReadP.char '!' >> sp >> cond >>= return . CNot + osCond = string "os" >> sp >> inparens osIdent >>= return . Var + archCond = string "arch" >> sp >> inparens archIdent >>= return . Var + flagCond = string "flag" >> sp >> inparens flagIdent >>= return . Var + implCond = string "impl" >> sp >> inparens implIdent >>= return . Var + boolLiteral = fmap Lit parse + archIdent = fmap Arch parse + osIdent = fmap OS parse + flagIdent = fmap (Flag . mkFlagName . lowercase) (munch1 isIdentChar) + isIdentChar c = isAlphaNum c || c == '_' || c == '-' + oper s = sp >> string s >> sp + sp = skipSpaces + implIdent = do i <- parse + vr <- sp >> option anyVersion parse + return $ Impl i vr + +------------------------------------------------------------------------------ + +-- | Result of dependency test. Isomorphic to @Maybe d@ but renamed for +-- clarity. +data DepTestRslt d = DepOk | MissingDeps d + +instance Semigroup d => Monoid (DepTestRslt d) where + mempty = DepOk + mappend = (<>) + +instance Semigroup d => Semigroup (DepTestRslt d) where + DepOk <> x = x + x <> DepOk = x + (MissingDeps d) <> (MissingDeps d') = MissingDeps (d <> d') + + +-- | Try to find a flag assignment that satisfies the constraints of all trees. +-- +-- Returns either the missing dependencies, or a tuple containing the +-- resulting data, the associated dependencies, and the chosen flag +-- assignments. +-- +-- In case of failure, the union of the dependencies that led to backtracking +-- on all branches is returned. +-- [TODO: Could also be specified with a function argument.] +-- +-- TODO: The current algorithm is rather naive. A better approach would be to: +-- +-- * Rule out possible paths, by taking a look at the associated dependencies. +-- +-- * Infer the required values for the conditions of these paths, and +-- calculate the required domains for the variables used in these +-- conditions. Then picking a flag assignment would be linear (I guess). +-- +-- This would require some sort of SAT solving, though, thus it's not +-- implemented unless we really need it. +-- +resolveWithFlags :: + [(FlagName,[Bool])] + -- ^ Domain for each flag name, will be tested in order. + -> ComponentRequestedSpec + -> OS -- ^ OS as returned by Distribution.System.buildOS + -> Arch -- ^ Arch as returned by Distribution.System.buildArch + -> CompilerInfo -- ^ Compiler information + -> [Dependency] -- ^ Additional constraints + -> [CondTree ConfVar [Dependency] PDTagged] + -> ([Dependency] -> DepTestRslt [Dependency]) -- ^ Dependency test function. + -> Either [Dependency] (TargetSet PDTagged, FlagAssignment) + -- ^ Either the missing dependencies (error case), or a pair of + -- (set of build targets with dependencies, chosen flag assignments) +resolveWithFlags dom enabled os arch impl constrs trees checkDeps = + either (Left . fromDepMapUnion) Right $ explore (build mempty dom) + where + extraConstrs = toDepMap constrs + + -- simplify trees by (partially) evaluating all conditions and converting + -- dependencies to dependency maps. + simplifiedTrees :: [CondTree FlagName DependencyMap PDTagged] + simplifiedTrees = map ( mapTreeConstrs toDepMap -- convert to maps + . addBuildableConditionPDTagged + . mapTreeConds (fst . simplifyWithSysParams os arch impl)) + trees + + -- @explore@ searches a tree of assignments, backtracking whenever a flag + -- introduces a dependency that cannot be satisfied. If there is no + -- solution, @explore@ returns the union of all dependencies that caused + -- it to backtrack. Since the tree is constructed lazily, we avoid some + -- computation overhead in the successful case. + explore :: Tree FlagAssignment + -> Either DepMapUnion (TargetSet PDTagged, FlagAssignment) + explore (Node flags ts) = + let targetSet = TargetSet $ flip map simplifiedTrees $ + -- apply additional constraints to all dependencies + first (`constrainBy` extraConstrs) . + simplifyCondTree (env flags) + deps = overallDependencies enabled targetSet + in case checkDeps (fromDepMap deps) of + DepOk | null ts -> Right (targetSet, flags) + | otherwise -> tryAll $ map explore ts + MissingDeps mds -> Left (toDepMapUnion mds) + + -- Builds a tree of all possible flag assignments. Internal nodes + -- have only partial assignments. + build :: FlagAssignment -> [(FlagName, [Bool])] -> Tree FlagAssignment + build assigned [] = Node assigned [] + build assigned ((fn, vals) : unassigned) = + Node assigned $ map (\v -> build (insertFlagAssignment fn v assigned) unassigned) vals + + tryAll :: [Either DepMapUnion a] -> Either DepMapUnion a + tryAll = foldr mp mz + + -- special version of `mplus' for our local purposes + mp :: Either DepMapUnion a -> Either DepMapUnion a -> Either DepMapUnion a + mp m@(Right _) _ = m + mp _ m@(Right _) = m + mp (Left xs) (Left ys) = + let union = Map.foldrWithKey (Map.Strict.insertWith combine) + (unDepMapUnion xs) (unDepMapUnion ys) + combine x y = simplifyVersionRange $ unionVersionRanges x y + in union `seq` Left (DepMapUnion union) + + -- `mzero' + mz :: Either DepMapUnion a + mz = Left (DepMapUnion Map.empty) + + env :: FlagAssignment -> FlagName -> Either FlagName Bool + env flags flag = (maybe (Left flag) Right . lookupFlagAssignment flag) flags + +-- | Transforms a 'CondTree' by putting the input under the "then" branch of a +-- conditional that is True when Buildable is True. If 'addBuildableCondition' +-- can determine that Buildable is always True, it returns the input unchanged. +-- If Buildable is always False, it returns the empty 'CondTree'. +addBuildableCondition :: (Eq v, Monoid a, Monoid c) => (a -> BuildInfo) + -> CondTree v c a + -> CondTree v c a +addBuildableCondition getInfo t = + case extractCondition (buildable . getInfo) t of + Lit True -> t + Lit False -> CondNode mempty mempty [] + c -> CondNode mempty mempty [condIfThen c t] + +-- | This is a special version of 'addBuildableCondition' for the 'PDTagged' +-- type. +-- +-- It is not simply a specialisation. It is more complicated than it +-- ought to be because of the way the 'PDTagged' monoid instance works. The +-- @mempty = 'PDNull'@ forgets the component type, which has the effect of +-- completely deleting components that are not buildable. +-- +-- See for more details. +-- +addBuildableConditionPDTagged :: (Eq v, Monoid c) => + CondTree v c PDTagged + -> CondTree v c PDTagged +addBuildableConditionPDTagged t = + case extractCondition (buildable . getInfo) t of + Lit True -> t + Lit False -> deleteConstraints t + c -> CondNode mempty mempty [condIfThenElse c t (deleteConstraints t)] + where + deleteConstraints = mapTreeConstrs (const mempty) + + getInfo :: PDTagged -> BuildInfo + getInfo (Lib l) = libBuildInfo l + getInfo (SubComp _ c) = componentBuildInfo c + getInfo PDNull = mempty + + +-- Note: extracting buildable conditions. +-- -------------------------------------- +-- +-- If the conditions in a cond tree lead to Buildable being set to False, then +-- none of the dependencies for this cond tree should actually be taken into +-- account. On the other hand, some of the flags may only be decided in the +-- solver, so we cannot necessarily make the decision whether a component is +-- Buildable or not prior to solving. +-- +-- What we are doing here is to partially evaluate a condition tree in order to +-- extract the condition under which Buildable is True. The predicate determines +-- whether data under a 'CondTree' is buildable. + +-- | Extract conditions matched by the given predicate from all cond trees in a +-- 'GenericPackageDescription'. +extractConditions :: (BuildInfo -> Bool) -> GenericPackageDescription + -> [Condition ConfVar] +extractConditions f gpkg = + concat [ + extractCondition (f . libBuildInfo) <$> maybeToList (condLibrary gpkg) + , extractCondition (f . libBuildInfo) . snd <$> condSubLibraries gpkg + , extractCondition (f . buildInfo) . snd <$> condExecutables gpkg + , extractCondition (f . testBuildInfo) . snd <$> condTestSuites gpkg + , extractCondition (f . benchmarkBuildInfo) . snd <$> condBenchmarks gpkg + ] + + +-- | A map of dependencies that combines version ranges using 'unionVersionRanges'. +newtype DepMapUnion = DepMapUnion { unDepMapUnion :: Map PackageName VersionRange } + +toDepMapUnion :: [Dependency] -> DepMapUnion +toDepMapUnion ds = + DepMapUnion $ Map.fromListWith unionVersionRanges [ (p,vr) | Dependency p vr <- ds ] + +fromDepMapUnion :: DepMapUnion -> [Dependency] +fromDepMapUnion m = [ Dependency p vr | (p,vr) <- Map.toList (unDepMapUnion m) ] + +freeVars :: CondTree ConfVar c a -> [FlagName] +freeVars t = [ f | Flag f <- freeVars' t ] + where + freeVars' (CondNode _ _ ifs) = concatMap compfv ifs + compfv (CondBranch c ct mct) = condfv c ++ freeVars' ct ++ maybe [] freeVars' mct + condfv c = case c of + Var v -> [v] + Lit _ -> [] + CNot c' -> condfv c' + COr c1 c2 -> condfv c1 ++ condfv c2 + CAnd c1 c2 -> condfv c1 ++ condfv c2 + + +------------------------------------------------------------------------------ + +-- | A set of targets with their package dependencies +newtype TargetSet a = TargetSet [(DependencyMap, a)] + +-- | Combine the target-specific dependencies in a TargetSet to give the +-- dependencies for the package as a whole. +overallDependencies :: ComponentRequestedSpec -> TargetSet PDTagged -> DependencyMap +overallDependencies enabled (TargetSet targets) = mconcat depss + where + (depss, _) = unzip $ filter (removeDisabledSections . snd) targets + removeDisabledSections :: PDTagged -> Bool + -- UGH. The embedded componentName in the 'Component's here is + -- BLANK. I don't know whose fault this is but I'll use the tag + -- instead. -- ezyang + removeDisabledSections (Lib _) = componentNameRequested enabled CLibName + removeDisabledSections (SubComp t c) + -- Do NOT use componentName + = componentNameRequested enabled + $ case c of + CLib _ -> CSubLibName t + CFLib _ -> CFLibName t + CExe _ -> CExeName t + CTest _ -> CTestName t + CBench _ -> CBenchName t + removeDisabledSections PDNull = True + +-- | Collect up the targets in a TargetSet of tagged targets, storing the +-- dependencies as we go. +flattenTaggedTargets :: TargetSet PDTagged -> (Maybe Library, [(UnqualComponentName, Component)]) +flattenTaggedTargets (TargetSet targets) = foldr untag (Nothing, []) targets where + untag (depMap, pdTagged) accum = case (pdTagged, accum) of + (Lib _, (Just _, _)) -> userBug "Only one library expected" + (Lib l, (Nothing, comps)) -> (Just $ redoBD l, comps) + (SubComp n c, (mb_lib, comps)) + | any ((== n) . fst) comps -> + userBug $ "There exist several components with the same name: '" ++ display n ++ "'" + | otherwise -> (mb_lib, (n, redoBD c) : comps) + (PDNull, x) -> x -- actually this should not happen, but let's be liberal + where + redoBD :: L.HasBuildInfo a => a -> a + redoBD = set L.targetBuildDepends $ fromDepMap depMap + +------------------------------------------------------------------------------ +-- Convert GenericPackageDescription to PackageDescription +-- + +data PDTagged = Lib Library + | SubComp UnqualComponentName Component + | PDNull + deriving Show + +instance Monoid PDTagged where + mempty = PDNull + mappend = (<>) + +instance Semigroup PDTagged where + PDNull <> x = x + x <> PDNull = x + Lib l <> Lib l' = Lib (l <> l') + SubComp n x <> SubComp n' x' | n == n' = SubComp n (x <> x') + _ <> _ = cabalBug "Cannot combine incompatible tags" + +-- | Create a package description with all configurations resolved. +-- +-- This function takes a `GenericPackageDescription` and several environment +-- parameters and tries to generate `PackageDescription` by finding a flag +-- assignment that result in satisfiable dependencies. +-- +-- It takes as inputs a not necessarily complete specifications of flags +-- assignments, an optional package index as well as platform parameters. If +-- some flags are not assigned explicitly, this function will try to pick an +-- assignment that causes this function to succeed. The package index is +-- optional since on some platforms we cannot determine which packages have +-- been installed before. When no package index is supplied, every dependency +-- is assumed to be satisfiable, therefore all not explicitly assigned flags +-- will get their default values. +-- +-- This function will fail if it cannot find a flag assignment that leads to +-- satisfiable dependencies. (It will not try alternative assignments for +-- explicitly specified flags.) In case of failure it will return the missing +-- dependencies that it encountered when trying different flag assignments. +-- On success, it will return the package description and the full flag +-- assignment chosen. +-- +-- Note that this drops any stanzas which have @buildable: False@. While +-- this is arguably the right thing to do, it means we give bad error +-- messages in some situations, see #3858. +-- +finalizePD :: + FlagAssignment -- ^ Explicitly specified flag assignments + -> ComponentRequestedSpec + -> (Dependency -> Bool) -- ^ Is a given dependency satisfiable from the set of + -- available packages? If this is unknown then use + -- True. + -> Platform -- ^ The 'Arch' and 'OS' + -> CompilerInfo -- ^ Compiler information + -> [Dependency] -- ^ Additional constraints + -> GenericPackageDescription + -> Either [Dependency] + (PackageDescription, FlagAssignment) + -- ^ Either missing dependencies or the resolved package + -- description along with the flag assignments chosen. +finalizePD userflags enabled satisfyDep + (Platform arch os) impl constraints + (GenericPackageDescription pkg flags mb_lib0 sub_libs0 flibs0 exes0 tests0 bms0) = do + (targetSet, flagVals) <- + resolveWithFlags flagChoices enabled os arch impl constraints condTrees check + let + (mb_lib, comps) = flattenTaggedTargets targetSet + mb_lib' = fmap libFillInDefaults mb_lib + comps' = flip map comps $ \(n,c) -> foldComponent + (\l -> CLib (libFillInDefaults l) { libName = Just n + , libExposed = False }) + (\l -> CFLib (flibFillInDefaults l) { foreignLibName = n }) + (\e -> CExe (exeFillInDefaults e) { exeName = n }) + (\t -> CTest (testFillInDefaults t) { testName = n }) + (\b -> CBench (benchFillInDefaults b) { benchmarkName = n }) + c + (sub_libs', flibs', exes', tests', bms') = partitionComponents comps' + return ( pkg { library = mb_lib' + , subLibraries = sub_libs' + , foreignLibs = flibs' + , executables = exes' + , testSuites = tests' + , benchmarks = bms' + } + , flagVals ) + where + -- Combine lib, exes, and tests into one list of @CondTree@s with tagged data + condTrees = maybeToList (fmap (mapTreeData Lib) mb_lib0) + ++ map (\(name,tree) -> mapTreeData (SubComp name . CLib) tree) sub_libs0 + ++ map (\(name,tree) -> mapTreeData (SubComp name . CFLib) tree) flibs0 + ++ map (\(name,tree) -> mapTreeData (SubComp name . CExe) tree) exes0 + ++ map (\(name,tree) -> mapTreeData (SubComp name . CTest) tree) tests0 + ++ map (\(name,tree) -> mapTreeData (SubComp name . CBench) tree) bms0 + + flagChoices = map (\(MkFlag n _ d manual) -> (n, d2c manual n d)) flags + d2c manual n b = case lookupFlagAssignment n userflags of + Just val -> [val] + Nothing + | manual -> [b] + | otherwise -> [b, not b] + --flagDefaults = map (\(n,x:_) -> (n,x)) flagChoices + check ds = let missingDeps = filter (not . satisfyDep) ds + in if null missingDeps + then DepOk + else MissingDeps missingDeps + +{-# DEPRECATED finalizePackageDescription "This function now always assumes tests and benchmarks are disabled; use finalizePD with ComponentRequestedSpec to specify something more specific. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +finalizePackageDescription :: + FlagAssignment -- ^ Explicitly specified flag assignments + -> (Dependency -> Bool) -- ^ Is a given dependency satisfiable from the set of + -- available packages? If this is unknown then use + -- True. + -> Platform -- ^ The 'Arch' and 'OS' + -> CompilerInfo -- ^ Compiler information + -> [Dependency] -- ^ Additional constraints + -> GenericPackageDescription + -> Either [Dependency] + (PackageDescription, FlagAssignment) +finalizePackageDescription flags = finalizePD flags defaultComponentRequestedSpec + +{- +let tst_p = (CondNode [1::Int] [Distribution.Package.Dependency "a" AnyVersion] []) +let tst_p2 = (CondNode [1::Int] [Distribution.Package.Dependency "a" (EarlierVersion (Version [1,0] [])), Distribution.Package.Dependency "a" (LaterVersion (Version [2,0] []))] []) + +let p_index = Distribution.Simple.PackageIndex.fromList [Distribution.Package.PackageIdentifier "a" (Version [0,5] []), Distribution.Package.PackageIdentifier "a" (Version [2,5] [])] +let look = not . null . Distribution.Simple.PackageIndex.lookupDependency p_index +let looks ds = mconcat $ map (\d -> if look d then DepOk else MissingDeps [d]) ds +resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p] looks ===> Right ... +resolveWithFlags [] Distribution.System.Linux Distribution.System.I386 (Distribution.Compiler.GHC,Version [6,8,2] []) [tst_p2] looks ===> Left ... +-} + +-- | Flatten a generic package description by ignoring all conditions and just +-- join the field descriptors into on package description. Note, however, +-- that this may lead to inconsistent field values, since all values are +-- joined into one field, which may not be possible in the original package +-- description, due to the use of exclusive choices (if ... else ...). +-- +-- TODO: One particularly tricky case is defaulting. In the original package +-- description, e.g., the source directory might either be the default or a +-- certain, explicitly set path. Since defaults are filled in only after the +-- package has been resolved and when no explicit value has been set, the +-- default path will be missing from the package description returned by this +-- function. +flattenPackageDescription :: GenericPackageDescription -> PackageDescription +flattenPackageDescription + (GenericPackageDescription pkg _ mlib0 sub_libs0 flibs0 exes0 tests0 bms0) = + pkg { library = mlib + , subLibraries = reverse sub_libs + , foreignLibs = reverse flibs + , executables = reverse exes + , testSuites = reverse tests + , benchmarks = reverse bms + } + where + mlib = f <$> mlib0 + where f lib = (libFillInDefaults . fst . ignoreConditions $ lib) { libName = Nothing } + sub_libs = flattenLib <$> sub_libs0 + flibs = flattenFLib <$> flibs0 + exes = flattenExe <$> exes0 + tests = flattenTst <$> tests0 + bms = flattenBm <$> bms0 + flattenLib (n, t) = libFillInDefaults $ (fst $ ignoreConditions t) + { libName = Just n, libExposed = False } + flattenFLib (n, t) = flibFillInDefaults $ (fst $ ignoreConditions t) + { foreignLibName = n } + flattenExe (n, t) = exeFillInDefaults $ (fst $ ignoreConditions t) + { exeName = n } + flattenTst (n, t) = testFillInDefaults $ (fst $ ignoreConditions t) + { testName = n } + flattenBm (n, t) = benchFillInDefaults $ (fst $ ignoreConditions t) + { benchmarkName = n } + +-- This is in fact rather a hack. The original version just overrode the +-- default values, however, when adding conditions we had to switch to a +-- modifier-based approach. There, nothing is ever overwritten, but only +-- joined together. +-- +-- This is the cleanest way i could think of, that doesn't require +-- changing all field parsing functions to return modifiers instead. +libFillInDefaults :: Library -> Library +libFillInDefaults lib@(Library { libBuildInfo = bi }) = + lib { libBuildInfo = biFillInDefaults bi } + +flibFillInDefaults :: ForeignLib -> ForeignLib +flibFillInDefaults flib@(ForeignLib { foreignLibBuildInfo = bi }) = + flib { foreignLibBuildInfo = biFillInDefaults bi } + +exeFillInDefaults :: Executable -> Executable +exeFillInDefaults exe@(Executable { buildInfo = bi }) = + exe { buildInfo = biFillInDefaults bi } + +testFillInDefaults :: TestSuite -> TestSuite +testFillInDefaults tst@(TestSuite { testBuildInfo = bi }) = + tst { testBuildInfo = biFillInDefaults bi } + +benchFillInDefaults :: Benchmark -> Benchmark +benchFillInDefaults bm@(Benchmark { benchmarkBuildInfo = bi }) = + bm { benchmarkBuildInfo = biFillInDefaults bi } + +biFillInDefaults :: BuildInfo -> BuildInfo +biFillInDefaults bi = + if null (hsSourceDirs bi) + then bi { hsSourceDirs = [currentDir] } + else bi + +-- Walk a 'GenericPackageDescription' and apply @onBuildInfo@/@onSetupBuildInfo@ +-- to all nested 'BuildInfo'/'SetupBuildInfo' values. +transformAllBuildInfos :: (BuildInfo -> BuildInfo) + -> (SetupBuildInfo -> SetupBuildInfo) + -> GenericPackageDescription + -> GenericPackageDescription +transformAllBuildInfos onBuildInfo onSetupBuildInfo = + over L.traverseBuildInfos onBuildInfo + . over (L.packageDescription . L.setupBuildInfo . traverse) onSetupBuildInfo + +-- | Walk a 'GenericPackageDescription' and apply @f@ to all nested +-- @build-depends@ fields. +transformAllBuildDepends :: (Dependency -> Dependency) + -> GenericPackageDescription + -> GenericPackageDescription +transformAllBuildDepends f = + over (L.traverseBuildInfos . L.targetBuildDepends . traverse) f + . over (L.packageDescription . L.setupBuildInfo . traverse . L.setupDepends . traverse) f + -- cannot be point-free as normal because of higher rank + . over (\f' -> L.allCondTrees $ traverseCondTreeC f') (map f) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/FieldGrammar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/FieldGrammar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/FieldGrammar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/FieldGrammar.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,532 @@ +{-# LANGUAGE OverloadedStrings #-} +-- | 'GenericPackageDescription' Field descriptions +module Distribution.PackageDescription.FieldGrammar ( + -- * Package description + packageDescriptionFieldGrammar, + -- * Library + libraryFieldGrammar, + -- * Foreign library + foreignLibFieldGrammar, + -- * Executable + executableFieldGrammar, + -- * Test suite + TestSuiteStanza (..), + testSuiteFieldGrammar, + validateTestSuite, + unvalidateTestSuite, + -- ** Lenses + testStanzaTestType, + testStanzaMainIs, + testStanzaTestModule, + testStanzaBuildInfo, + -- * Benchmark + BenchmarkStanza (..), + benchmarkFieldGrammar, + validateBenchmark, + unvalidateBenchmark, + -- ** Lenses + benchmarkStanzaBenchmarkType, + benchmarkStanzaMainIs, + benchmarkStanzaBenchmarkModule, + benchmarkStanzaBuildInfo, + -- * Flag + flagFieldGrammar, + -- * Source repository + sourceRepoFieldGrammar, + -- * Setup build info + setupBInfoFieldGrammar, + -- * Component build info + buildInfoFieldGrammar, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Compiler (CompilerFlavor (..)) +import Distribution.FieldGrammar +import Distribution.ModuleName (ModuleName) +import Distribution.Package +import Distribution.PackageDescription +import Distribution.Parsec.Common +import Distribution.Parsec.Newtypes +import Distribution.Parsec.ParseResult +import Distribution.Text (display) +import Distribution.Types.ExecutableScope +import Distribution.Types.ForeignLib +import Distribution.Types.ForeignLibType +import Distribution.Types.UnqualComponentName +import Distribution.Version (anyVersion) + +import qualified Distribution.SPDX as SPDX + +import qualified Distribution.Types.Lens as L + +------------------------------------------------------------------------------- +-- PackageDescription +------------------------------------------------------------------------------- + +packageDescriptionFieldGrammar + :: (FieldGrammar g, Applicative (g PackageDescription), Applicative (g PackageIdentifier)) + => g PackageDescription PackageDescription +packageDescriptionFieldGrammar = PackageDescription + <$> optionalFieldDefAla "cabal-version" SpecVersion L.specVersionRaw (Right anyVersion) + <*> blurFieldGrammar L.package packageIdentifierGrammar + <*> optionalFieldDefAla "license" SpecLicense L.licenseRaw (Left SPDX.NONE) + <*> licenseFilesGrammar + <*> optionalFieldDefAla "copyright" FreeText L.copyright "" + <*> optionalFieldDefAla "maintainer" FreeText L.maintainer "" + <*> optionalFieldDefAla "author" FreeText L.author "" + <*> optionalFieldDefAla "stability" FreeText L.stability "" + <*> monoidalFieldAla "tested-with" (alaList' FSep TestedWith) L.testedWith + <*> optionalFieldDefAla "homepage" FreeText L.homepage "" + <*> optionalFieldDefAla "package-url" FreeText L.pkgUrl "" + <*> optionalFieldDefAla "bug-reports" FreeText L.bugReports "" + <*> pure [] -- source-repos are stanza + <*> optionalFieldDefAla "synopsis" FreeText L.synopsis "" + <*> optionalFieldDefAla "description" FreeText L.description "" + <*> optionalFieldDefAla "category" FreeText L.category "" + <*> prefixedFields "x-" L.customFieldsPD + <*> optionalField "build-type" L.buildTypeRaw + <*> pure Nothing -- custom-setup + -- components + <*> pure Nothing -- lib + <*> pure [] -- sub libs + <*> pure [] -- executables + <*> pure [] -- foreign libs + <*> pure [] -- test suites + <*> pure [] -- benchmarks + -- * Files + <*> monoidalFieldAla "data-files" (alaList' VCat FilePathNT) L.dataFiles + <*> optionalFieldDefAla "data-dir" FilePathNT L.dataDir "" + <*> monoidalFieldAla "extra-source-files" (alaList' VCat FilePathNT) L.extraSrcFiles + <*> monoidalFieldAla "extra-tmp-files" (alaList' VCat FilePathNT) L.extraTmpFiles + <*> monoidalFieldAla "extra-doc-files" (alaList' VCat FilePathNT) L.extraDocFiles + where + packageIdentifierGrammar = PackageIdentifier + <$> uniqueField "name" L.pkgName + <*> uniqueField "version" L.pkgVersion + + licenseFilesGrammar = (++) + -- TODO: neither field is deprecated + -- should we pretty print license-file if there's single license file + -- and license-files when more + <$> monoidalFieldAla "license-file" (alaList' FSep FilePathNT) L.licenseFiles + <*> monoidalFieldAla "license-files" (alaList' FSep FilePathNT) L.licenseFiles + ^^^ hiddenField + +------------------------------------------------------------------------------- +-- Library +------------------------------------------------------------------------------- + +libraryFieldGrammar + :: (FieldGrammar g, Applicative (g Library), Applicative (g BuildInfo)) + => Maybe UnqualComponentName -> g Library Library +libraryFieldGrammar n = Library n + <$> monoidalFieldAla "exposed-modules" (alaList' VCat MQuoted) L.exposedModules + <*> monoidalFieldAla "reexported-modules" (alaList CommaVCat) L.reexportedModules + <*> monoidalFieldAla "signatures" (alaList' VCat MQuoted) L.signatures + ^^^ availableSince [2,0] [] + <*> booleanFieldDef "exposed" L.libExposed True + <*> blurFieldGrammar L.libBuildInfo buildInfoFieldGrammar +{-# SPECIALIZE libraryFieldGrammar :: Maybe UnqualComponentName -> ParsecFieldGrammar' Library #-} +{-# SPECIALIZE libraryFieldGrammar :: Maybe UnqualComponentName -> PrettyFieldGrammar' Library #-} + +------------------------------------------------------------------------------- +-- Foreign library +------------------------------------------------------------------------------- + +foreignLibFieldGrammar + :: (FieldGrammar g, Applicative (g ForeignLib), Applicative (g BuildInfo)) + => UnqualComponentName -> g ForeignLib ForeignLib +foreignLibFieldGrammar n = ForeignLib n + <$> optionalFieldDef "type" L.foreignLibType ForeignLibTypeUnknown + <*> monoidalFieldAla "options" (alaList FSep) L.foreignLibOptions + <*> blurFieldGrammar L.foreignLibBuildInfo buildInfoFieldGrammar + <*> optionalField "lib-version-info" L.foreignLibVersionInfo + <*> optionalField "lib-version-linux" L.foreignLibVersionLinux + <*> monoidalFieldAla "mod-def-file" (alaList' FSep FilePathNT) L.foreignLibModDefFile +{-# SPECIALIZE foreignLibFieldGrammar :: UnqualComponentName -> ParsecFieldGrammar' ForeignLib #-} +{-# SPECIALIZE foreignLibFieldGrammar :: UnqualComponentName -> PrettyFieldGrammar' ForeignLib #-} + +------------------------------------------------------------------------------- +-- Executable +------------------------------------------------------------------------------- + +executableFieldGrammar + :: (FieldGrammar g, Applicative (g Executable), Applicative (g BuildInfo)) + => UnqualComponentName -> g Executable Executable +executableFieldGrammar n = Executable n + -- main-is is optional as conditional blocks don't have it + <$> optionalFieldDefAla "main-is" FilePathNT L.modulePath "" + <*> optionalFieldDef "scope" L.exeScope ExecutablePublic + ^^^ availableSince [2,0] ExecutablePublic + <*> blurFieldGrammar L.buildInfo buildInfoFieldGrammar +{-# SPECIALIZE executableFieldGrammar :: UnqualComponentName -> ParsecFieldGrammar' Executable #-} +{-# SPECIALIZE executableFieldGrammar :: UnqualComponentName -> PrettyFieldGrammar' Executable #-} + +------------------------------------------------------------------------------- +-- TestSuite +------------------------------------------------------------------------------- + +-- | An intermediate type just used for parsing the test-suite stanza. +-- After validation it is converted into the proper 'TestSuite' type. +data TestSuiteStanza = TestSuiteStanza + { _testStanzaTestType :: Maybe TestType + , _testStanzaMainIs :: Maybe FilePath + , _testStanzaTestModule :: Maybe ModuleName + , _testStanzaBuildInfo :: BuildInfo + } + +instance L.HasBuildInfo TestSuiteStanza where + buildInfo = testStanzaBuildInfo + +testStanzaTestType :: Lens' TestSuiteStanza (Maybe TestType) +testStanzaTestType f s = fmap (\x -> s { _testStanzaTestType = x }) (f (_testStanzaTestType s)) +{-# INLINE testStanzaTestType #-} + +testStanzaMainIs :: Lens' TestSuiteStanza (Maybe FilePath) +testStanzaMainIs f s = fmap (\x -> s { _testStanzaMainIs = x }) (f (_testStanzaMainIs s)) +{-# INLINE testStanzaMainIs #-} + +testStanzaTestModule :: Lens' TestSuiteStanza (Maybe ModuleName) +testStanzaTestModule f s = fmap (\x -> s { _testStanzaTestModule = x }) (f (_testStanzaTestModule s)) +{-# INLINE testStanzaTestModule #-} + +testStanzaBuildInfo :: Lens' TestSuiteStanza BuildInfo +testStanzaBuildInfo f s = fmap (\x -> s { _testStanzaBuildInfo = x }) (f (_testStanzaBuildInfo s)) +{-# INLINE testStanzaBuildInfo #-} + +testSuiteFieldGrammar + :: (FieldGrammar g, Applicative (g TestSuiteStanza), Applicative (g BuildInfo)) + => g TestSuiteStanza TestSuiteStanza +testSuiteFieldGrammar = TestSuiteStanza + <$> optionalField "type" testStanzaTestType + <*> optionalFieldAla "main-is" FilePathNT testStanzaMainIs + <*> optionalField "test-module" testStanzaTestModule + <*> blurFieldGrammar testStanzaBuildInfo buildInfoFieldGrammar + +validateTestSuite :: Position -> TestSuiteStanza -> ParseResult TestSuite +validateTestSuite pos stanza = case _testStanzaTestType stanza of + Nothing -> return $ + emptyTestSuite { testBuildInfo = _testStanzaBuildInfo stanza } + + Just tt@(TestTypeUnknown _ _) -> + pure emptyTestSuite + { testInterface = TestSuiteUnsupported tt + , testBuildInfo = _testStanzaBuildInfo stanza + } + + Just tt | tt `notElem` knownTestTypes -> + pure emptyTestSuite + { testInterface = TestSuiteUnsupported tt + , testBuildInfo = _testStanzaBuildInfo stanza + } + + Just tt@(TestTypeExe ver) -> case _testStanzaMainIs stanza of + Nothing -> do + parseFailure pos (missingField "main-is" tt) + pure emptyTestSuite + Just file -> do + when (isJust (_testStanzaTestModule stanza)) $ + parseWarning pos PWTExtraBenchmarkModule (extraField "test-module" tt) + pure emptyTestSuite + { testInterface = TestSuiteExeV10 ver file + , testBuildInfo = _testStanzaBuildInfo stanza + } + + Just tt@(TestTypeLib ver) -> case _testStanzaTestModule stanza of + Nothing -> do + parseFailure pos (missingField "test-module" tt) + pure emptyTestSuite + Just module_ -> do + when (isJust (_testStanzaMainIs stanza)) $ + parseWarning pos PWTExtraMainIs (extraField "main-is" tt) + pure emptyTestSuite + { testInterface = TestSuiteLibV09 ver module_ + , testBuildInfo = _testStanzaBuildInfo stanza + } + + where + missingField name tt = "The '" ++ name ++ "' field is required for the " + ++ display tt ++ " test suite type." + + extraField name tt = "The '" ++ name ++ "' field is not used for the '" + ++ display tt ++ "' test suite type." + +unvalidateTestSuite :: TestSuite -> TestSuiteStanza +unvalidateTestSuite t = TestSuiteStanza + { _testStanzaTestType = ty + , _testStanzaMainIs = ma + , _testStanzaTestModule = mo + , _testStanzaBuildInfo = testBuildInfo t + } + where + (ty, ma, mo) = case testInterface t of + TestSuiteExeV10 ver file -> (Just $ TestTypeExe ver, Just file, Nothing) + TestSuiteLibV09 ver modu -> (Just $ TestTypeLib ver, Nothing, Just modu) + _ -> (Nothing, Nothing, Nothing) + +------------------------------------------------------------------------------- +-- Benchmark +------------------------------------------------------------------------------- + +-- | An intermediate type just used for parsing the benchmark stanza. +-- After validation it is converted into the proper 'Benchmark' type. +data BenchmarkStanza = BenchmarkStanza + { _benchmarkStanzaBenchmarkType :: Maybe BenchmarkType + , _benchmarkStanzaMainIs :: Maybe FilePath + , _benchmarkStanzaBenchmarkModule :: Maybe ModuleName + , _benchmarkStanzaBuildInfo :: BuildInfo + } + +instance L.HasBuildInfo BenchmarkStanza where + buildInfo = benchmarkStanzaBuildInfo + +benchmarkStanzaBenchmarkType :: Lens' BenchmarkStanza (Maybe BenchmarkType) +benchmarkStanzaBenchmarkType f s = fmap (\x -> s { _benchmarkStanzaBenchmarkType = x }) (f (_benchmarkStanzaBenchmarkType s)) +{-# INLINE benchmarkStanzaBenchmarkType #-} + +benchmarkStanzaMainIs :: Lens' BenchmarkStanza (Maybe FilePath) +benchmarkStanzaMainIs f s = fmap (\x -> s { _benchmarkStanzaMainIs = x }) (f (_benchmarkStanzaMainIs s)) +{-# INLINE benchmarkStanzaMainIs #-} + +benchmarkStanzaBenchmarkModule :: Lens' BenchmarkStanza (Maybe ModuleName) +benchmarkStanzaBenchmarkModule f s = fmap (\x -> s { _benchmarkStanzaBenchmarkModule = x }) (f (_benchmarkStanzaBenchmarkModule s)) +{-# INLINE benchmarkStanzaBenchmarkModule #-} + +benchmarkStanzaBuildInfo :: Lens' BenchmarkStanza BuildInfo +benchmarkStanzaBuildInfo f s = fmap (\x -> s { _benchmarkStanzaBuildInfo = x }) (f (_benchmarkStanzaBuildInfo s)) +{-# INLINE benchmarkStanzaBuildInfo #-} + +benchmarkFieldGrammar + :: (FieldGrammar g, Applicative (g BenchmarkStanza), Applicative (g BuildInfo)) + => g BenchmarkStanza BenchmarkStanza +benchmarkFieldGrammar = BenchmarkStanza + <$> optionalField "type" benchmarkStanzaBenchmarkType + <*> optionalFieldAla "main-is" FilePathNT benchmarkStanzaMainIs + <*> optionalField "benchmark-module" benchmarkStanzaBenchmarkModule + <*> blurFieldGrammar benchmarkStanzaBuildInfo buildInfoFieldGrammar + +validateBenchmark :: Position -> BenchmarkStanza -> ParseResult Benchmark +validateBenchmark pos stanza = case _benchmarkStanzaBenchmarkType stanza of + Nothing -> pure emptyBenchmark + { benchmarkBuildInfo = _benchmarkStanzaBuildInfo stanza } + + Just tt@(BenchmarkTypeUnknown _ _) -> pure emptyBenchmark + { benchmarkInterface = BenchmarkUnsupported tt + , benchmarkBuildInfo = _benchmarkStanzaBuildInfo stanza + } + + Just tt | tt `notElem` knownBenchmarkTypes -> pure emptyBenchmark + { benchmarkInterface = BenchmarkUnsupported tt + , benchmarkBuildInfo = _benchmarkStanzaBuildInfo stanza + } + + Just tt@(BenchmarkTypeExe ver) -> case _benchmarkStanzaMainIs stanza of + Nothing -> do + parseFailure pos (missingField "main-is" tt) + pure emptyBenchmark + Just file -> do + when (isJust (_benchmarkStanzaBenchmarkModule stanza)) $ + parseWarning pos PWTExtraBenchmarkModule (extraField "benchmark-module" tt) + pure emptyBenchmark + { benchmarkInterface = BenchmarkExeV10 ver file + , benchmarkBuildInfo = _benchmarkStanzaBuildInfo stanza + } + + where + missingField name tt = "The '" ++ name ++ "' field is required for the " + ++ display tt ++ " benchmark type." + + extraField name tt = "The '" ++ name ++ "' field is not used for the '" + ++ display tt ++ "' benchmark type." + +unvalidateBenchmark :: Benchmark -> BenchmarkStanza +unvalidateBenchmark b = BenchmarkStanza + { _benchmarkStanzaBenchmarkType = ty + , _benchmarkStanzaMainIs = ma + , _benchmarkStanzaBenchmarkModule = mo + , _benchmarkStanzaBuildInfo = benchmarkBuildInfo b + } + where + (ty, ma, mo) = case benchmarkInterface b of + BenchmarkExeV10 ver "" -> (Just $ BenchmarkTypeExe ver, Nothing, Nothing) + BenchmarkExeV10 ver ma' -> (Just $ BenchmarkTypeExe ver, Just ma', Nothing) + _ -> (Nothing, Nothing, Nothing) + +------------------------------------------------------------------------------- +-- Build info +------------------------------------------------------------------------------- + +buildInfoFieldGrammar + :: (FieldGrammar g, Applicative (g BuildInfo)) + => g BuildInfo BuildInfo +buildInfoFieldGrammar = BuildInfo + <$> booleanFieldDef "buildable" L.buildable True + <*> monoidalFieldAla "build-tools" (alaList CommaFSep) L.buildTools + ^^^ deprecatedSince [2,0] "Please use 'build-tool-depends' field" + <*> monoidalFieldAla "build-tool-depends" (alaList CommaFSep) L.buildToolDepends + -- {- ^^^ availableSince [2,0] [] -} + -- here, we explicitly want to recognise build-tool-depends for all Cabal files + -- as otherwise cabal new-build cannot really work. + -- + -- I.e. we don't want trigger unknown field warning + <*> monoidalFieldAla "cpp-options" (alaList' NoCommaFSep Token') L.cppOptions + <*> monoidalFieldAla "asm-options" (alaList' NoCommaFSep Token') L.asmOptions + <*> monoidalFieldAla "cmm-options" (alaList' NoCommaFSep Token') L.cmmOptions + <*> monoidalFieldAla "cc-options" (alaList' NoCommaFSep Token') L.ccOptions + <*> monoidalFieldAla "cxx-options" (alaList' NoCommaFSep Token') L.cxxOptions + ^^^ availableSince [2,1] [] -- TODO change to 2,2 when version is bumped + <*> monoidalFieldAla "ld-options" (alaList' NoCommaFSep Token') L.ldOptions + <*> monoidalFieldAla "pkgconfig-depends" (alaList CommaFSep) L.pkgconfigDepends + <*> monoidalFieldAla "frameworks" (alaList' FSep Token) L.frameworks + <*> monoidalFieldAla "extra-framework-dirs" (alaList' FSep FilePathNT) L.extraFrameworkDirs + <*> monoidalFieldAla "asm-sources" (alaList' VCat FilePathNT) L.asmSources + <*> monoidalFieldAla "cmm-sources" (alaList' VCat FilePathNT) L.cmmSources + <*> monoidalFieldAla "c-sources" (alaList' VCat FilePathNT) L.cSources + <*> monoidalFieldAla "cxx-sources" (alaList' VCat FilePathNT) L.cxxSources + ^^^ availableSince [2,1] [] -- TODO change to 2,2 when version is bumped + <*> monoidalFieldAla "js-sources" (alaList' VCat FilePathNT) L.jsSources + <*> hsSourceDirsGrammar + <*> monoidalFieldAla "other-modules" (alaList' VCat MQuoted) L.otherModules + <*> monoidalFieldAla "virtual-modules" (alaList' VCat MQuoted) L.virtualModules + ^^^ availableSince [2,1] [] -- TODO change to 2,2 when version is bumped + <*> monoidalFieldAla "autogen-modules" (alaList' VCat MQuoted) L.autogenModules + <*> optionalFieldAla "default-language" MQuoted L.defaultLanguage + <*> monoidalFieldAla "other-languages" (alaList' FSep MQuoted) L.otherLanguages + <*> monoidalFieldAla "default-extensions" (alaList' FSep MQuoted) L.defaultExtensions + <*> monoidalFieldAla "other-extensions" (alaList' FSep MQuoted) L.otherExtensions + <*> monoidalFieldAla "extensions" (alaList' FSep MQuoted) L.oldExtensions + ^^^ deprecatedSince [1,12] "Please use 'default-extensions' or 'other-extensions' fields." + <*> monoidalFieldAla "extra-libraries" (alaList' VCat Token) L.extraLibs + <*> monoidalFieldAla "extra-ghci-libraries" (alaList' VCat Token) L.extraGHCiLibs + <*> monoidalFieldAla "extra-bundled-libraries" (alaList' VCat Token) L.extraBundledLibs + <*> monoidalFieldAla "extra-library-flavours" (alaList' VCat Token) L.extraLibFlavours + <*> monoidalFieldAla "extra-lib-dirs" (alaList' FSep FilePathNT) L.extraLibDirs + <*> monoidalFieldAla "include-dirs" (alaList' FSep FilePathNT) L.includeDirs + <*> monoidalFieldAla "includes" (alaList' FSep FilePathNT) L.includes + <*> monoidalFieldAla "install-includes" (alaList' FSep FilePathNT) L.installIncludes + <*> optionsFieldGrammar + <*> profOptionsFieldGrammar + <*> sharedOptionsFieldGrammar + <*> pure [] -- static-options ??? + <*> prefixedFields "x-" L.customFieldsBI + <*> monoidalFieldAla "build-depends" (alaList CommaVCat) L.targetBuildDepends + <*> monoidalFieldAla "mixins" (alaList CommaVCat) L.mixins + ^^^ availableSince [2,0] [] +{-# SPECIALIZE buildInfoFieldGrammar :: ParsecFieldGrammar' BuildInfo #-} +{-# SPECIALIZE buildInfoFieldGrammar :: PrettyFieldGrammar' BuildInfo #-} + +hsSourceDirsGrammar + :: (FieldGrammar g, Applicative (g BuildInfo)) + => g BuildInfo [FilePath] +hsSourceDirsGrammar = (++) + <$> monoidalFieldAla "hs-source-dirs" (alaList' FSep FilePathNT) L.hsSourceDirs + <*> monoidalFieldAla "hs-source-dir" (alaList' FSep FilePathNT) L.hsSourceDirs + ^^^ deprecatedField' "Please use 'hs-source-dirs'" + +optionsFieldGrammar + :: (FieldGrammar g, Applicative (g BuildInfo)) + => g BuildInfo [(CompilerFlavor, [String])] +optionsFieldGrammar = combine + <$> monoidalFieldAla "ghc-options" (alaList' NoCommaFSep Token') (extract GHC) + <*> monoidalFieldAla "ghcjs-options" (alaList' NoCommaFSep Token') (extract GHCJS) + -- NOTE: Hugs, NHC and JHC are not supported anymore, but these + -- fields are kept around so that we can still parse legacy .cabal + -- files that have them. + <* knownField "jhc-options" + <* knownField "hugs-options" + <* knownField "nhc98-options" + where + extract :: CompilerFlavor -> ALens' BuildInfo [String] + extract flavor = L.options . lookupLens flavor + + combine ghc ghcjs = + f GHC ghc ++ f GHCJS ghcjs + where + f _flavor [] = [] + f flavor opts = [(flavor, opts)] + +profOptionsFieldGrammar + :: (FieldGrammar g, Applicative (g BuildInfo)) + => g BuildInfo [(CompilerFlavor, [String])] +profOptionsFieldGrammar = combine + <$> monoidalFieldAla "ghc-prof-options" (alaList' NoCommaFSep Token') (extract GHC) + <*> monoidalFieldAla "ghcjs-prof-options" (alaList' NoCommaFSep Token') (extract GHCJS) + where + extract :: CompilerFlavor -> ALens' BuildInfo [String] + extract flavor = L.profOptions . lookupLens flavor + + combine ghc ghcjs = f GHC ghc ++ f GHCJS ghcjs + where + f _flavor [] = [] + f flavor opts = [(flavor, opts)] + +sharedOptionsFieldGrammar + :: (FieldGrammar g, Applicative (g BuildInfo)) + => g BuildInfo [(CompilerFlavor, [String])] +sharedOptionsFieldGrammar = combine + <$> monoidalFieldAla "ghc-shared-options" (alaList' NoCommaFSep Token') (extract GHC) + <*> monoidalFieldAla "ghcjs-shared-options" (alaList' NoCommaFSep Token') (extract GHCJS) + where + extract :: CompilerFlavor -> ALens' BuildInfo [String] + extract flavor = L.sharedOptions . lookupLens flavor + + combine ghc ghcjs = f GHC ghc ++ f GHCJS ghcjs + where + f _flavor [] = [] + f flavor opts = [(flavor, opts)] + +lookupLens :: (Functor f, Ord k) => k -> LensLike' f [(k, [v])] [v] +lookupLens k f kvs = str kvs <$> f (gtr kvs) + where + gtr = fromMaybe [] . lookup k + + str [] v = [(k, v)] + str (x@(k',_):xs) v + | k == k' = (k, v) : xs + | otherwise = x : str xs v + +------------------------------------------------------------------------------- +-- Flag +------------------------------------------------------------------------------- + +flagFieldGrammar + :: (FieldGrammar g, Applicative (g Flag)) + => FlagName -> g Flag Flag +flagFieldGrammar name = MkFlag name + <$> optionalFieldDefAla "description" FreeText L.flagDescription "" + <*> booleanFieldDef "default" L.flagDefault True + <*> booleanFieldDef "manual" L.flagManual False +{-# SPECIALIZE flagFieldGrammar :: FlagName -> ParsecFieldGrammar' Flag #-} +{-# SPECIALIZE flagFieldGrammar :: FlagName -> PrettyFieldGrammar' Flag #-} + +------------------------------------------------------------------------------- +-- SourceRepo +------------------------------------------------------------------------------- + +sourceRepoFieldGrammar + :: (FieldGrammar g, Applicative (g SourceRepo)) + => RepoKind -> g SourceRepo SourceRepo +sourceRepoFieldGrammar kind = SourceRepo kind + <$> optionalField "type" L.repoType + <*> optionalFieldAla "location" FreeText L.repoLocation + <*> optionalFieldAla "module" Token L.repoModule + <*> optionalFieldAla "branch" Token L.repoBranch + <*> optionalFieldAla "tag" Token L.repoTag + <*> optionalFieldAla "subdir" FilePathNT L.repoSubdir +{-# SPECIALIZE sourceRepoFieldGrammar :: RepoKind -> ParsecFieldGrammar' SourceRepo #-} +{-# SPECIALIZE sourceRepoFieldGrammar :: RepoKind ->PrettyFieldGrammar' SourceRepo #-} + +------------------------------------------------------------------------------- +-- SetupBuildInfo +------------------------------------------------------------------------------- + +setupBInfoFieldGrammar + :: (FieldGrammar g, Functor (g SetupBuildInfo)) + => Bool -> g SetupBuildInfo SetupBuildInfo +setupBInfoFieldGrammar def = flip SetupBuildInfo def + <$> monoidalFieldAla "setup-depends" (alaList CommaVCat) L.setupDepends +{-# SPECIALIZE setupBInfoFieldGrammar :: Bool -> ParsecFieldGrammar' SetupBuildInfo #-} +{-# SPECIALIZE setupBInfoFieldGrammar :: Bool ->PrettyFieldGrammar' SetupBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Parsec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Parsec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Parsec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Parsec.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,806 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE Rank2Types #-} +{-# LANGUAGE ScopedTypeVariables #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.Parsec +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defined parsers and partial pretty printers for the @.cabal@ format. + +module Distribution.PackageDescription.Parsec ( + -- * Package descriptions + readGenericPackageDescription, + parseGenericPackageDescription, + parseGenericPackageDescriptionMaybe, + + -- ** Parsing + ParseResult, + runParseResult, + + -- * New-style spec-version + scanSpecVersion, + + -- ** Supplementary build information + readHookedBuildInfo, + parseHookedBuildInfo, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Control.Monad (guard) +import Control.Monad.State.Strict (StateT, execStateT) +import Control.Monad.Trans.Class (lift) +import Data.List (partition) +import Distribution.CabalSpecVersion +import Distribution.Compat.Lens +import Distribution.FieldGrammar +import Distribution.FieldGrammar.Parsec (NamelessField (..)) +import Distribution.PackageDescription +import Distribution.PackageDescription.FieldGrammar +import Distribution.PackageDescription.Quirks (patchQuirks) +import Distribution.Parsec.Class (parsec, simpleParsec) +import Distribution.Parsec.Common +import Distribution.Parsec.ConfVar (parseConditionConfVar) +import Distribution.Parsec.Field (FieldName, getName) +import Distribution.Parsec.FieldLineStream (fieldLineStreamFromBS) +import Distribution.Parsec.LexerMonad (LexWarning, toPWarnings) +import Distribution.Parsec.Newtypes (CommaFSep, List, SpecVersion (..), Token) +import Distribution.Parsec.Parser +import Distribution.Parsec.ParseResult +import Distribution.Pretty (prettyShow) +import Distribution.Simple.Utils (fromUTF8BS) +import Distribution.Text (display) +import Distribution.Types.CondTree +import Distribution.Types.Dependency (Dependency) +import Distribution.Types.ForeignLib +import Distribution.Types.ForeignLibType (knownForeignLibTypes) +import Distribution.Types.GenericPackageDescription (emptyGenericPackageDescription) +import Distribution.Types.PackageDescription (specVersion') +import Distribution.Types.UnqualComponentName (UnqualComponentName, mkUnqualComponentName) +import Distribution.Utils.Generic (breakMaybe, unfoldrM, validateUTF8) +import Distribution.Verbosity (Verbosity) +import Distribution.Version + (LowerBound (..), Version, asVersionIntervals, mkVersion, orLaterVersion, version0, + versionNumbers) + +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BS8 +import qualified Data.Map.Strict as Map +import qualified Distribution.Compat.Newtype as Newtype +import qualified Distribution.Types.BuildInfo.Lens as L +import qualified Distribution.Types.GenericPackageDescription.Lens as L +import qualified Distribution.Types.PackageDescription.Lens as L +import qualified Text.Parsec as P + +-- --------------------------------------------------------------- +-- Parsing +-- --------------------------------------------------------------- + +-- | Parse the given package file. +readGenericPackageDescription :: Verbosity -> FilePath -> IO GenericPackageDescription +readGenericPackageDescription = readAndParseFile parseGenericPackageDescription + +------------------------------------------------------------------------------ +-- | Parses the given file into a 'GenericPackageDescription'. +-- +-- In Cabal 1.2 the syntax for package descriptions was changed to a format +-- with sections and possibly indented property descriptions. +-- +parseGenericPackageDescription :: BS.ByteString -> ParseResult GenericPackageDescription +parseGenericPackageDescription bs = do + -- set scanned version + setCabalSpecVersion ver + -- if we get too new version, fail right away + case ver of + Just v | v > mkVersion [2,4] -> parseFailure zeroPos + "Unsupported cabal-version. See https://github.com/haskell/cabal/issues/4899." + _ -> pure () + + case readFields' bs' of + Right (fs, lexWarnings) -> do + when patched $ + parseWarning zeroPos PWTQuirkyCabalFile "Legacy cabal file" + -- UTF8 is validated in a prepass step, afterwards parsing is lenient. + parseGenericPackageDescription' ver lexWarnings (validateUTF8 bs') fs + -- TODO: better marshalling of errors + Left perr -> parseFatalFailure pos (show perr) where + ppos = P.errorPos perr + pos = Position (P.sourceLine ppos) (P.sourceColumn ppos) + where + (patched, bs') = patchQuirks bs + ver = scanSpecVersion bs' + +-- | 'Maybe' variant of 'parseGenericPackageDescription' +parseGenericPackageDescriptionMaybe :: BS.ByteString -> Maybe GenericPackageDescription +parseGenericPackageDescriptionMaybe = + either (const Nothing) Just . snd . runParseResult . parseGenericPackageDescription + +fieldlinesToBS :: [FieldLine ann] -> BS.ByteString +fieldlinesToBS = BS.intercalate "\n" . map (\(FieldLine _ bs) -> bs) + +-- Monad in which sections are parsed +type SectionParser = StateT SectionS ParseResult + +-- | State of section parser +data SectionS = SectionS + { _stateGpd :: !GenericPackageDescription + , _stateCommonStanzas :: !(Map String CondTreeBuildInfo) + } + +stateGpd :: Lens' SectionS GenericPackageDescription +stateGpd f (SectionS gpd cs) = (\x -> SectionS x cs) <$> f gpd +{-# INLINE stateGpd #-} + +stateCommonStanzas :: Lens' SectionS (Map String CondTreeBuildInfo) +stateCommonStanzas f (SectionS gpd cs) = SectionS gpd <$> f cs +{-# INLINE stateCommonStanzas #-} + +-- Note [Accumulating parser] +-- +-- This parser has two "states": +-- * first we parse fields of PackageDescription +-- * then we parse sections (libraries, executables, etc) +parseGenericPackageDescription' + :: Maybe Version + -> [LexWarning] + -> Maybe Int + -> [Field Position] + -> ParseResult GenericPackageDescription +parseGenericPackageDescription' cabalVerM lexWarnings utf8WarnPos fs = do + parseWarnings (toPWarnings lexWarnings) + for_ utf8WarnPos $ \pos -> + parseWarning zeroPos PWTUTF $ "UTF8 encoding problem at byte offset " ++ show pos + let (syntax, fs') = sectionizeFields fs + let (fields, sectionFields) = takeFields fs' + + -- cabal-version + cabalVer <- case cabalVerM of + Just v -> return v + Nothing -> case Map.lookup "cabal-version" fields >>= safeLast of + Nothing -> return version0 + Just (MkNamelessField pos fls) -> do + v <- specVersion' . Newtype.unpack' SpecVersion <$> runFieldParser pos parsec cabalSpecLatest fls + when (v >= mkVersion [2,1]) $ parseFailure pos $ + "cabal-version should be at the beginning of the file starting with spec version 2.2. " ++ + "See https://github.com/haskell/cabal/issues/4899" + + return v + + let specVer + | cabalVer >= mkVersion [2,3] = CabalSpecV2_4 + | cabalVer >= mkVersion [2,1] = CabalSpecV2_2 + | cabalVer >= mkVersion [1,25] = CabalSpecV2_0 + | cabalVer >= mkVersion [1,23] = CabalSpecV1_24 + | cabalVer >= mkVersion [1,21] = CabalSpecV1_22 + | otherwise = CabalSpecOld + + -- reset cabal version + setCabalSpecVersion (Just cabalVer) + + -- Package description + pd <- parseFieldGrammar specVer fields packageDescriptionFieldGrammar + + -- Check that scanned and parsed versions match. + unless (cabalVer == specVersion pd) $ parseFailure zeroPos $ + "Scanned and parsed cabal-versions don't match " ++ + prettyShow cabalVer ++ " /= " ++ prettyShow (specVersion pd) + + maybeWarnCabalVersion syntax pd + + -- Sections + let gpd = emptyGenericPackageDescription & L.packageDescription .~ pd + + view stateGpd <$> execStateT (goSections specVer sectionFields) (SectionS gpd Map.empty) + where + safeLast :: [a] -> Maybe a + safeLast = listToMaybe . reverse + + newSyntaxVersion :: Version + newSyntaxVersion = mkVersion [1, 2] + + maybeWarnCabalVersion :: Syntax -> PackageDescription -> ParseResult () + maybeWarnCabalVersion syntax pkg + | syntax == NewSyntax && specVersion pkg < newSyntaxVersion + = parseWarning zeroPos PWTNewSyntax $ + "A package using section syntax must specify at least\n" + ++ "'cabal-version: >= 1.2'." + + maybeWarnCabalVersion syntax pkg + | syntax == OldSyntax && specVersion pkg >= newSyntaxVersion + = parseWarning zeroPos PWTOldSyntax $ + "A package using 'cabal-version: " + ++ displaySpecVersion (specVersionRaw pkg) + ++ "' must use section syntax. See the Cabal user guide for details." + where + displaySpecVersion (Left version) = display version + displaySpecVersion (Right versionRange) = + case asVersionIntervals versionRange of + [] {- impossible -} -> display versionRange + ((LowerBound version _, _):_) -> display (orLaterVersion version) + + maybeWarnCabalVersion _ _ = return () + +goSections :: CabalSpecVersion -> [Field Position] -> SectionParser () +goSections specVer = traverse_ process + where + process (Field (Name pos name) _) = + lift $ parseWarning pos PWTTrailingFields $ + "Ignoring trailing fields after sections: " ++ show name + process (Section name args secFields) = + parseSection name args secFields + + snoc x xs = xs ++ [x] + + hasCommonStanzas = specHasCommonStanzas specVer + + -- we need signature, because this is polymorphic, but not-closed + parseCondTree' + :: FromBuildInfo a + => ParsecFieldGrammar' a -- ^ grammar + -> Map String CondTreeBuildInfo -- ^ common stanzas + -> [Field Position] + -> ParseResult (CondTree ConfVar [Dependency] a) + parseCondTree' = parseCondTreeWithCommonStanzas specVer + + parseSection :: Name Position -> [SectionArg Position] -> [Field Position] -> SectionParser () + parseSection (Name pos name) args fields + | hasCommonStanzas == NoCommonStanzas, name == "common" = lift $ do + parseWarning pos PWTUnknownSection $ "Ignoring section: common. You should set cabal-version: 2.2 or larger to use common stanzas." + + | name == "common" = do + commonStanzas <- use stateCommonStanzas + name' <- lift $ parseCommonName pos args + biTree <- lift $ parseCondTree' buildInfoFieldGrammar commonStanzas fields + + case Map.lookup name' commonStanzas of + Nothing -> stateCommonStanzas .= Map.insert name' biTree commonStanzas + Just _ -> lift $ parseFailure pos $ + "Duplicate common stanza: " ++ name' + + | name == "library" && null args = do + commonStanzas <- use stateCommonStanzas + lib <- lift $ parseCondTree' (libraryFieldGrammar Nothing) commonStanzas fields + -- TODO: check that library is defined once + stateGpd . L.condLibrary ?= lib + + -- Sublibraries + -- TODO: check cabal-version + | name == "library" = do + commonStanzas <- use stateCommonStanzas + name' <- parseUnqualComponentName pos args + lib <- lift $ parseCondTree' (libraryFieldGrammar $ Just name') commonStanzas fields + -- TODO check duplicate name here? + stateGpd . L.condSubLibraries %= snoc (name', lib) + + -- TODO: check cabal-version + | name == "foreign-library" = do + commonStanzas <- use stateCommonStanzas + name' <- parseUnqualComponentName pos args + flib <- lift $ parseCondTree' (foreignLibFieldGrammar name') commonStanzas fields + + let hasType ts = foreignLibType ts /= foreignLibType mempty + unless (onAllBranches hasType flib) $ lift $ parseFailure pos $ concat + [ "Foreign library " ++ show (display name') + , " is missing required field \"type\" or the field " + , "is not present in all conditional branches. The " + , "available test types are: " + , intercalate ", " (map display knownForeignLibTypes) + ] + + -- TODO check duplicate name here? + stateGpd . L.condForeignLibs %= snoc (name', flib) + + | name == "executable" = do + commonStanzas <- use stateCommonStanzas + name' <- parseUnqualComponentName pos args + exe <- lift $ parseCondTree' (executableFieldGrammar name') commonStanzas fields + -- TODO check duplicate name here? + stateGpd . L.condExecutables %= snoc (name', exe) + + | name == "test-suite" = do + commonStanzas <- use stateCommonStanzas + name' <- parseUnqualComponentName pos args + testStanza <- lift $ parseCondTree' testSuiteFieldGrammar commonStanzas fields + testSuite <- lift $ traverse (validateTestSuite pos) testStanza + + let hasType ts = testInterface ts /= testInterface mempty + unless (onAllBranches hasType testSuite) $ lift $ parseFailure pos $ concat + [ "Test suite " ++ show (display name') + , " is missing required field \"type\" or the field " + , "is not present in all conditional branches. The " + , "available test types are: " + , intercalate ", " (map display knownTestTypes) + ] + + -- TODO check duplicate name here? + stateGpd . L.condTestSuites %= snoc (name', testSuite) + + | name == "benchmark" = do + commonStanzas <- use stateCommonStanzas + name' <- parseUnqualComponentName pos args + benchStanza <- lift $ parseCondTree' benchmarkFieldGrammar commonStanzas fields + bench <- lift $ traverse (validateBenchmark pos) benchStanza + + let hasType ts = benchmarkInterface ts /= benchmarkInterface mempty + unless (onAllBranches hasType bench) $ lift $ parseFailure pos $ concat + [ "Benchmark " ++ show (display name') + , " is missing required field \"type\" or the field " + , "is not present in all conditional branches. The " + , "available benchmark types are: " + , intercalate ", " (map display knownBenchmarkTypes) + ] + + -- TODO check duplicate name here? + stateGpd . L.condBenchmarks %= snoc (name', bench) + + | name == "flag" = do + name' <- parseNameBS pos args + name'' <- lift $ runFieldParser' pos parsec specVer (fieldLineStreamFromBS name') `recoverWith` mkFlagName "" + flag <- lift $ parseFields specVer fields (flagFieldGrammar name'') + -- Check default flag + stateGpd . L.genPackageFlags %= snoc flag + + | name == "custom-setup" && null args = do + sbi <- lift $ parseFields specVer fields (setupBInfoFieldGrammar False) + stateGpd . L.packageDescription . L.setupBuildInfo ?= sbi + + | name == "source-repository" = do + kind <- lift $ case args of + [SecArgName spos secName] -> + runFieldParser' spos parsec specVer (fieldLineStreamFromBS secName) `recoverWith` RepoHead + [] -> do + parseFailure pos "'source-repository' requires exactly one argument" + pure RepoHead + _ -> do + parseFailure pos $ "Invalid source-repository kind " ++ show args + pure RepoHead + + sr <- lift $ parseFields specVer fields (sourceRepoFieldGrammar kind) + stateGpd . L.packageDescription . L.sourceRepos %= snoc sr + + | otherwise = lift $ + parseWarning pos PWTUnknownSection $ "Ignoring section: " ++ show name + +parseName :: Position -> [SectionArg Position] -> SectionParser String +parseName pos args = fromUTF8BS <$> parseNameBS pos args + +parseNameBS :: Position -> [SectionArg Position] -> SectionParser BS.ByteString +-- TODO: use strict parser +parseNameBS pos args = case args of + [SecArgName _pos secName] -> + pure secName + [SecArgStr _pos secName] -> + pure secName + [] -> do + lift $ parseFailure pos "name required" + pure "" + _ -> do + -- TODO: pretty print args + lift $ parseFailure pos $ "Invalid name " ++ show args + pure "" + +parseCommonName :: Position -> [SectionArg Position] -> ParseResult String +parseCommonName pos args = case args of + [SecArgName _pos secName] -> + pure $ fromUTF8BS secName + [SecArgStr _pos secName] -> + pure $ fromUTF8BS secName + [] -> do + parseFailure pos $ "name required" + pure "" + _ -> do + -- TODO: pretty print args + parseFailure pos $ "Invalid name " ++ show args + pure "" + +-- TODO: avoid conversion to 'String'. +parseUnqualComponentName :: Position -> [SectionArg Position] -> SectionParser UnqualComponentName +parseUnqualComponentName pos args = mkUnqualComponentName <$> parseName pos args + +-- | Parse a non-recursive list of fields. +parseFields + :: CabalSpecVersion + -> [Field Position] -- ^ fields to be parsed + -> ParsecFieldGrammar' a + -> ParseResult a +parseFields v fields grammar = do + let (fs0, ss) = partitionFields fields + traverse_ (traverse_ warnInvalidSubsection) ss + parseFieldGrammar v fs0 grammar + +warnInvalidSubsection :: Section Position -> ParseResult () +warnInvalidSubsection (MkSection (Name pos name) _ _) = + void (parseFailure pos $ "invalid subsection " ++ show name) + +parseCondTree + :: forall a c. + CabalSpecVersion + -> HasElif -- ^ accept @elif@ + -> ParsecFieldGrammar' a -- ^ grammar + -> (a -> c) -- ^ condition extractor + -> [Field Position] + -> ParseResult (CondTree ConfVar c a) +parseCondTree v hasElif grammar cond = go + where + go fields = do + let (fs, ss) = partitionFields fields + x <- parseFieldGrammar v fs grammar + branches <- concat <$> traverse parseIfs ss + return (CondNode x (cond x) branches) -- TODO: branches + + parseIfs :: [Section Position] -> ParseResult [CondBranch ConfVar c a] + parseIfs [] = return [] + parseIfs (MkSection (Name _ name) test fields : sections) | name == "if" = do + test' <- parseConditionConfVar test + fields' <- go fields + -- TODO: else + (elseFields, sections') <- parseElseIfs sections + return (CondBranch test' fields' elseFields : sections') + parseIfs (MkSection (Name pos name) _ _ : sections) = do + parseWarning pos PWTInvalidSubsection $ "invalid subsection " ++ show name + parseIfs sections + + parseElseIfs + :: [Section Position] + -> ParseResult (Maybe (CondTree ConfVar c a), [CondBranch ConfVar c a]) + parseElseIfs [] = return (Nothing, []) + parseElseIfs (MkSection (Name pos name) args fields : sections) | name == "else" = do + unless (null args) $ + parseFailure pos $ "`else` section has section arguments " ++ show args + elseFields <- go fields + sections' <- parseIfs sections + return (Just elseFields, sections') + + + + parseElseIfs (MkSection (Name _ name) test fields : sections) | hasElif == HasElif, name == "elif" = do + -- TODO: check cabal-version + test' <- parseConditionConfVar test + fields' <- go fields + (elseFields, sections') <- parseElseIfs sections + -- we parse an empty 'Fields', to get empty value for a node + a <- parseFieldGrammar v mempty grammar + return (Just $ CondNode a (cond a) [CondBranch test' fields' elseFields], sections') + + parseElseIfs (MkSection (Name pos name) _ _ : sections) | name == "elif" = do + parseWarning pos PWTInvalidSubsection $ "invalid subsection \"elif\". You should set cabal-version: 2.2 or larger to use elif-conditionals." + (,) Nothing <$> parseIfs sections + + parseElseIfs sections = (,) Nothing <$> parseIfs sections + +{- Note [Accumulating parser] + +Note: Outdated a bit + +In there parser, @'FieldDescr' a@ is transformed into @Map FieldName (a -> +FieldParser a)@. The weird value is used because we accumulate structure of +@a@ by folding over the fields. There are various reasons for that: + +* Almost all fields are optional + +* This is simple approach so declarative bi-directional format (parsing and +printing) of structure could be specified (list of @'FieldDescr' a@) + +* There are surface syntax fields corresponding to single field in the file: + @license-file@ and @license-files@ + +* This is quite safe approach. + +When/if we re-implement the parser to support formatting preservging roundtrip +with new AST, this all need to be rewritten. +-} + +------------------------------------------------------------------------------- +-- Common stanzas +------------------------------------------------------------------------------- + +-- $commonStanzas +-- +-- [Note: Common stanzas] +-- +-- In Cabal 2.2 we support simple common stanzas: +-- +-- * Commons stanzas define 'BuildInfo' +-- +-- * import "fields" can only occur at top of other stanzas (think: imports) +-- +-- In particular __there aren't__ +-- +-- * implicit stanzas +-- +-- * More specific common stanzas (executable, test-suite). +-- +-- +-- The approach uses the fact that 'BuildInfo' is a 'Monoid': +-- +-- @ +-- mergeCommonStanza' :: HasBuildInfo comp => BuildInfo -> comp -> comp +-- mergeCommonStanza' bi = over L.BuildInfo (bi <>) +-- @ +-- +-- Real 'mergeCommonStanza' is more complicated as we have to deal with +-- conditional trees. +-- +-- The approach is simple, and have good properties: +-- +-- * Common stanzas are parsed exactly once, even if not-used. Thus we report errors in them. +-- +type CondTreeBuildInfo = CondTree ConfVar [Dependency] BuildInfo + +-- | Create @a@ from 'BuildInfo'. +-- +-- Law: @view buildInfo . fromBuildInfo = id@ +class L.HasBuildInfo a => FromBuildInfo a where + fromBuildInfo :: BuildInfo -> a + +instance FromBuildInfo BuildInfo where fromBuildInfo = id +instance FromBuildInfo Library where fromBuildInfo bi = set L.buildInfo bi emptyLibrary +instance FromBuildInfo ForeignLib where fromBuildInfo bi = set L.buildInfo bi emptyForeignLib +instance FromBuildInfo Executable where fromBuildInfo bi = set L.buildInfo bi emptyExecutable + +instance FromBuildInfo TestSuiteStanza where + fromBuildInfo = TestSuiteStanza Nothing Nothing Nothing + +instance FromBuildInfo BenchmarkStanza where + fromBuildInfo = BenchmarkStanza Nothing Nothing Nothing + +parseCondTreeWithCommonStanzas + :: forall a. FromBuildInfo a + => CabalSpecVersion + -> ParsecFieldGrammar' a -- ^ grammar + -> Map String CondTreeBuildInfo -- ^ common stanzas + -> [Field Position] + -> ParseResult (CondTree ConfVar [Dependency] a) +parseCondTreeWithCommonStanzas v grammar commonStanzas = goImports [] + where + hasElif = specHasElif v + hasCommonStanzas = specHasCommonStanzas v + + getList' :: List CommaFSep Token String -> [String] + getList' = Newtype.unpack + + -- parse leading imports + -- not supported: + goImports acc (Field (Name pos name) _ : fields) | name == "import", hasCommonStanzas == NoCommonStanzas = do + parseWarning pos PWTUnknownField "Unknown field: import. You should set cabal-version: 2.2 or larger to use common stanzas" + goImports acc fields + -- supported: + goImports acc (Field (Name pos name) fls : fields) | name == "import" = do + names <- getList' <$> runFieldParser pos parsec v fls + names' <- for names $ \commonName -> + case Map.lookup commonName commonStanzas of + Nothing -> do + parseFailure pos $ "Undefined common stanza imported: " ++ commonName + pure Nothing + Just commonTree -> + pure (Just commonTree) + + goImports (acc ++ catMaybes names') fields + + -- Go to parsing condTree after first non-import 'Field'. + goImports acc fields = go acc fields + + -- parse actual CondTree + go :: [CondTreeBuildInfo] -> [Field Position] -> ParseResult (CondTree ConfVar [Dependency] a) + go bis fields = do + x <- parseCondTree v hasElif grammar (view L.targetBuildDepends) fields + pure $ foldr mergeCommonStanza x bis + +mergeCommonStanza + :: forall a. FromBuildInfo a + => CondTree ConfVar [Dependency] BuildInfo + -> CondTree ConfVar [Dependency] a + -> CondTree ConfVar [Dependency] a +mergeCommonStanza (CondNode bi _ bis) (CondNode x _ cs) = + CondNode x' (x' ^. L.targetBuildDepends) cs' + where + -- new value is old value with buildInfo field _prepended_. + x' = x & L.buildInfo %~ (bi <>) + + -- tree components are appended together. + cs' = map (fmap fromBuildInfo) bis ++ cs + +------------------------------------------------------------------------------- +-- Branches +------------------------------------------------------------------------------- + +-- Check that a property holds on all branches of a condition tree +onAllBranches :: forall v c a. Monoid a => (a -> Bool) -> CondTree v c a -> Bool +onAllBranches p = go mempty + where + -- If the current level of the tree satisfies the property, then we are + -- done. If not, then one of the conditional branches below the current node + -- must satisfy it. Each node may have multiple immediate children; we only + -- one need one to satisfy the property because the configure step uses + -- 'mappend' to join together the results of flag resolution. + go :: a -> CondTree v c a -> Bool + go acc ct = let acc' = acc `mappend` condTreeData ct + in p acc' || any (goBranch acc') (condTreeComponents ct) + + -- Both the 'true' and the 'false' block must satisfy the property. + goBranch :: a -> CondBranch v c a -> Bool + goBranch _ (CondBranch _ _ Nothing) = False + goBranch acc (CondBranch _ t (Just e)) = go acc t && go acc e + +------------------------------------------------------------------------------- +-- Old syntax +------------------------------------------------------------------------------- + +-- TODO: move to own module + +-- | "Sectionize" an old-style Cabal file. A sectionized file has: +-- +-- * all global fields at the beginning, followed by +-- +-- * all flag declarations, followed by +-- +-- * an optional library section, and an arbitrary number of executable +-- sections (in any order). +-- +-- The current implementation just gathers all library-specific fields +-- in a library section and wraps all executable stanzas in an executable +-- section. +sectionizeFields :: [Field ann] -> (Syntax, [Field ann]) +sectionizeFields fs = case classifyFields fs of + Just fields -> (OldSyntax, convert fields) + Nothing -> (NewSyntax, fs) + where + -- return 'Just' if all fields are simple fields + classifyFields :: [Field ann] -> Maybe [(Name ann, [FieldLine ann])] + classifyFields = traverse f + where + f (Field name fieldlines) = Just (name, fieldlines) + f _ = Nothing + + trim = BS.dropWhile isSpace' . BS.reverse . BS.dropWhile isSpace' . BS.reverse + isSpace' = (== 32) + + convert :: [(Name ann, [FieldLine ann])] -> [Field ann] + convert fields = + let + toField (name, ls) = Field name ls + -- "build-depends" is a local field now. To be backwards + -- compatible, we still allow it as a global field in old-style + -- package description files and translate it to a local field by + -- adding it to every non-empty section + (hdr0, exes0) = break ((=="executable") . getName . fst) fields + (hdr, libfs0) = partition (not . (`elem` libFieldNames) . getName . fst) hdr0 + + (deps, libfs) = partition ((== "build-depends") . getName . fst) + libfs0 + + exes = unfoldr toExe exes0 + toExe [] = Nothing + toExe ((Name pos n, ls) : r) + | n == "executable" = + let (efs, r') = break ((== "executable") . getName . fst) r + in Just (Section (Name pos "executable") [SecArgName pos $ trim $ fieldlinesToBS ls] (map toField $ deps ++ efs), r') + toExe _ = error "unexpected input to 'toExe'" + + lib = case libfs of + [] -> [] + ((Name pos _, _) : _) -> + [Section (Name pos "library") [] (map toField $ deps ++ libfs)] + + in map toField hdr ++ lib ++ exes + +-- | See 'sectionizeFields'. +data Syntax = OldSyntax | NewSyntax + deriving (Eq, Show) + +-- TODO: +libFieldNames :: [FieldName] +libFieldNames = fieldGrammarKnownFieldList (libraryFieldGrammar Nothing) + +------------------------------------------------------------------------------- +-- Suplementary build information +------------------------------------------------------------------------------- + +readHookedBuildInfo :: Verbosity -> FilePath -> IO HookedBuildInfo +readHookedBuildInfo = readAndParseFile parseHookedBuildInfo + +parseHookedBuildInfo :: BS.ByteString -> ParseResult HookedBuildInfo +parseHookedBuildInfo bs = case readFields' bs of + Right (fs, lexWarnings) -> do + parseHookedBuildInfo' lexWarnings fs + -- TODO: better marshalling of errors + Left perr -> parseFatalFailure zeroPos (show perr) + +parseHookedBuildInfo' + :: [LexWarning] + -> [Field Position] + -> ParseResult HookedBuildInfo +parseHookedBuildInfo' lexWarnings fs = do + parseWarnings (toPWarnings lexWarnings) + (mLibFields, exes) <- stanzas fs + mLib <- parseLib mLibFields + biExes <- traverse parseExe exes + return (mLib, biExes) + where + parseLib :: Fields Position -> ParseResult (Maybe BuildInfo) + parseLib fields + | Map.null fields = pure Nothing + | otherwise = Just <$> parseFieldGrammar cabalSpecLatest fields buildInfoFieldGrammar + + parseExe :: (UnqualComponentName, Fields Position) -> ParseResult (UnqualComponentName, BuildInfo) + parseExe (n, fields) = do + bi <- parseFieldGrammar cabalSpecLatest fields buildInfoFieldGrammar + pure (n, bi) + + stanzas :: [Field Position] -> ParseResult (Fields Position, [(UnqualComponentName, Fields Position)]) + stanzas fields = do + let (hdr0, exes0) = breakMaybe isExecutableField fields + hdr <- toFields hdr0 + exes <- unfoldrM (traverse toExe) exes0 + pure (hdr, exes) + + toFields :: [Field Position] -> ParseResult (Fields Position) + toFields fields = do + let (fields', ss) = partitionFields fields + traverse_ (traverse_ warnInvalidSubsection) ss + pure fields' + + toExe + :: ([FieldLine Position], [Field Position]) + -> ParseResult ((UnqualComponentName, Fields Position), Maybe ([FieldLine Position], [Field Position])) + toExe (fss, fields) = do + name <- runFieldParser zeroPos parsec cabalSpecLatest fss + let (hdr0, rest) = breakMaybe isExecutableField fields + hdr <- toFields hdr0 + pure ((name, hdr), rest) + + isExecutableField (Field (Name _ name) fss) + | name == "executable" = Just fss + | otherwise = Nothing + isExecutableField _ = Nothing + +-- | Quickly scan new-style spec-version +-- +-- A new-style spec-version declaration begins the .cabal file and +-- follow the following case-insensitive grammar (expressed in +-- RFC5234 ABNF): +-- +-- @ +-- newstyle-spec-version-decl = "cabal-version" *WS ":" *WS newstyle-pec-version *WS +-- +-- spec-version = NUM "." NUM [ "." NUM ] +-- +-- NUM = DIGIT0 / DIGITP 1*DIGIT0 +-- DIGIT0 = %x30-39 +-- DIGITP = %x31-39 +-- WS = %20 +-- @ +-- +scanSpecVersion :: BS.ByteString -> Maybe Version +scanSpecVersion bs = do + fstline':_ <- pure (BS8.lines bs) + + -- parse + -- normalise: remove all whitespace, convert to lower-case + let fstline = BS.map toLowerW8 $ BS.filter (/= 0x20) fstline' + ["cabal-version",vers] <- pure (BS8.split ':' fstline) + + -- parse + -- + -- This is currently more tolerant regarding leading 0 digits. + -- + ver <- simpleParsec (BS8.unpack vers) + guard $ case versionNumbers ver of + [_,_] -> True + [_,_,_] -> True + _ -> False + + pure ver + where + -- | Translate ['A'..'Z'] to ['a'..'z'] + toLowerW8 :: Word8 -> Word8 + toLowerW8 w | 0x40 < w && w < 0x5b = w+0x20 + | otherwise = w diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/PrettyPrint.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/PrettyPrint.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/PrettyPrint.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/PrettyPrint.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,247 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.PrettyPrint +-- Copyright : Jürgen Nicklisch-Franken 2010 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Pretty printing for cabal files +-- +----------------------------------------------------------------------------- + +module Distribution.PackageDescription.PrettyPrint ( + -- * Generic package descriptions + writeGenericPackageDescription, + showGenericPackageDescription, + + -- * Package descriptions + writePackageDescription, + showPackageDescription, + + -- ** Supplementary build information + writeHookedBuildInfo, + showHookedBuildInfo, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.Dependency +import Distribution.Types.ForeignLib (ForeignLib (foreignLibName)) +import Distribution.Types.UnqualComponentName +import Distribution.Types.CondTree + +import Distribution.PackageDescription +import Distribution.Simple.Utils +import Distribution.ParseUtils +import Distribution.Text + +import Distribution.FieldGrammar (PrettyFieldGrammar', prettyFieldGrammar) +import Distribution.PackageDescription.FieldGrammar + (packageDescriptionFieldGrammar, buildInfoFieldGrammar, + flagFieldGrammar, foreignLibFieldGrammar, libraryFieldGrammar, + benchmarkFieldGrammar, testSuiteFieldGrammar, + setupBInfoFieldGrammar, sourceRepoFieldGrammar, executableFieldGrammar) + +import qualified Distribution.PackageDescription.FieldGrammar as FG + +import Text.PrettyPrint + (hsep, space, parens, char, nest, ($$), (<+>), + text, vcat, ($+$), Doc, render) + +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 + +-- | Writes a .cabal file from a generic package description +writeGenericPackageDescription :: FilePath -> GenericPackageDescription -> NoCallStackIO () +writeGenericPackageDescription fpath pkg = writeUTF8File fpath (showGenericPackageDescription pkg) + +-- | Writes a generic package description to a string +showGenericPackageDescription :: GenericPackageDescription -> String +showGenericPackageDescription = render . ($+$ text "") . ppGenericPackageDescription + +ppGenericPackageDescription :: GenericPackageDescription -> Doc +ppGenericPackageDescription gpd = + ppPackageDescription (packageDescription gpd) + $+$ ppSetupBInfo (setupBuildInfo (packageDescription gpd)) + $+$ ppGenPackageFlags (genPackageFlags gpd) + $+$ ppCondLibrary (condLibrary gpd) + $+$ ppCondSubLibraries (condSubLibraries gpd) + $+$ ppCondForeignLibs (condForeignLibs gpd) + $+$ ppCondExecutables (condExecutables gpd) + $+$ ppCondTestSuites (condTestSuites gpd) + $+$ ppCondBenchmarks (condBenchmarks gpd) + +ppPackageDescription :: PackageDescription -> Doc +ppPackageDescription pd = + prettyFieldGrammar packageDescriptionFieldGrammar pd + $+$ ppSourceRepos (sourceRepos pd) + +ppSourceRepos :: [SourceRepo] -> Doc +ppSourceRepos [] = mempty +ppSourceRepos (hd:tl) = ppSourceRepo hd $+$ ppSourceRepos tl + +ppSourceRepo :: SourceRepo -> Doc +ppSourceRepo repo = + emptyLine $ text "source-repository" <+> disp kind $+$ + nest indentWith (prettyFieldGrammar (sourceRepoFieldGrammar kind) repo) + where + kind = repoKind repo + +ppSetupBInfo :: Maybe SetupBuildInfo -> Doc +ppSetupBInfo Nothing = mempty +ppSetupBInfo (Just sbi) + | defaultSetupDepends sbi = mempty + | otherwise = + emptyLine $ text "custom-setup" $+$ + nest indentWith (prettyFieldGrammar (setupBInfoFieldGrammar False) sbi) + +ppGenPackageFlags :: [Flag] -> Doc +ppGenPackageFlags flds = vcat [ppFlag f | f <- flds] + +ppFlag :: Flag -> Doc +ppFlag flag@(MkFlag name _ _ _) = + emptyLine $ text "flag" <+> ppFlagName name $+$ + nest indentWith (prettyFieldGrammar (flagFieldGrammar name) flag) + +ppCondTree2 :: PrettyFieldGrammar' s -> CondTree ConfVar [Dependency] s -> Doc +ppCondTree2 grammar = go + where + -- TODO: recognise elif opportunities + go (CondNode it _ ifs) = + prettyFieldGrammar grammar it + $+$ vcat (map ppIf ifs) + + ppIf (CondBranch c thenTree Nothing) +-- | isEmpty thenDoc = mempty + | otherwise = ppIfCondition c $$ nest indentWith thenDoc + where + thenDoc = go thenTree + + ppIf (CondBranch c thenTree (Just elseTree)) = + case (False, False) of + -- case (isEmpty thenDoc, isEmpty elseDoc) of + (True, True) -> mempty + (False, True) -> ppIfCondition c $$ nest indentWith thenDoc + (True, False) -> ppIfCondition (cNot c) $$ nest indentWith elseDoc + (False, False) -> (ppIfCondition c $$ nest indentWith thenDoc) + $+$ (text "else" $$ nest indentWith elseDoc) + where + thenDoc = go thenTree + elseDoc = go elseTree + +ppCondLibrary :: Maybe (CondTree ConfVar [Dependency] Library) -> Doc +ppCondLibrary Nothing = mempty +ppCondLibrary (Just condTree) = + emptyLine $ text "library" $+$ + nest indentWith (ppCondTree2 (libraryFieldGrammar Nothing) condTree) + +ppCondSubLibraries :: [(UnqualComponentName, CondTree ConfVar [Dependency] Library)] -> Doc +ppCondSubLibraries libs = vcat + [ emptyLine $ (text "library" <+> disp n) $+$ + nest indentWith (ppCondTree2 (libraryFieldGrammar $ Just n) condTree) + | (n, condTree) <- libs + ] + +ppCondForeignLibs :: [(UnqualComponentName, CondTree ConfVar [Dependency] ForeignLib)] -> Doc +ppCondForeignLibs flibs = vcat + [ emptyLine $ (text "foreign-library" <+> disp n) $+$ + nest indentWith (ppCondTree2 (foreignLibFieldGrammar n) condTree) + | (n, condTree) <- flibs + ] + +ppCondExecutables :: [(UnqualComponentName, CondTree ConfVar [Dependency] Executable)] -> Doc +ppCondExecutables exes = vcat + [ emptyLine $ (text "executable" <+> disp n) $+$ + nest indentWith (ppCondTree2 (executableFieldGrammar n) condTree) + | (n, condTree) <- exes + ] + +ppCondTestSuites :: [(UnqualComponentName, CondTree ConfVar [Dependency] TestSuite)] -> Doc +ppCondTestSuites suites = vcat + [ emptyLine $ (text "test-suite" <+> disp n) $+$ + nest indentWith (ppCondTree2 testSuiteFieldGrammar (fmap FG.unvalidateTestSuite condTree)) + | (n, condTree) <- suites + ] + +ppCondBenchmarks :: [(UnqualComponentName, CondTree ConfVar [Dependency] Benchmark)] -> Doc +ppCondBenchmarks suites = vcat + [ emptyLine $ (text "benchmark" <+> disp n) $+$ + nest indentWith (ppCondTree2 benchmarkFieldGrammar (fmap FG.unvalidateBenchmark condTree)) + | (n, condTree) <- suites + ] + +ppCondition :: Condition ConfVar -> Doc +ppCondition (Var x) = ppConfVar x +ppCondition (Lit b) = text (show b) +ppCondition (CNot c) = char '!' <<>> (ppCondition c) +ppCondition (COr c1 c2) = parens (hsep [ppCondition c1, text "||" + <+> ppCondition c2]) +ppCondition (CAnd c1 c2) = parens (hsep [ppCondition c1, text "&&" + <+> ppCondition c2]) +ppConfVar :: ConfVar -> Doc +ppConfVar (OS os) = text "os" <<>> parens (disp os) +ppConfVar (Arch arch) = text "arch" <<>> parens (disp arch) +ppConfVar (Flag name) = text "flag" <<>> parens (ppFlagName name) +ppConfVar (Impl c v) = text "impl" <<>> parens (disp c <+> disp v) + +ppFlagName :: FlagName -> Doc +ppFlagName = text . unFlagName + +ppIfCondition :: (Condition ConfVar) -> Doc +ppIfCondition c = (emptyLine $ text "if" <+> ppCondition c) + +emptyLine :: Doc -> Doc +emptyLine d = text "" $+$ d + +-- | @since 2.0.0.2 +writePackageDescription :: FilePath -> PackageDescription -> NoCallStackIO () +writePackageDescription fpath pkg = writeUTF8File fpath (showPackageDescription pkg) + +--TODO: make this use section syntax +-- add equivalent for GenericPackageDescription + +-- | @since 2.0.0.2 +showPackageDescription :: PackageDescription -> String +showPackageDescription = showGenericPackageDescription . pdToGpd + +pdToGpd :: PackageDescription -> GenericPackageDescription +pdToGpd pd = GenericPackageDescription + { packageDescription = pd + , genPackageFlags = [] + , condLibrary = mkCondTree <$> library pd + , condSubLibraries = mkCondTreeL <$> subLibraries pd + , condForeignLibs = mkCondTree' foreignLibName <$> foreignLibs pd + , condExecutables = mkCondTree' exeName <$> executables pd + , condTestSuites = mkCondTree' testName <$> testSuites pd + , condBenchmarks = mkCondTree' benchmarkName <$> benchmarks pd + } + where + -- We set CondTree's [Dependency] to an empty list, as it + -- is not pretty printed anyway. + mkCondTree x = CondNode x [] [] + mkCondTreeL l = (fromMaybe (mkUnqualComponentName "") (libName l), CondNode l [] []) + + mkCondTree' + :: (a -> UnqualComponentName) + -> a -> (UnqualComponentName, CondTree ConfVar [Dependency] a) + mkCondTree' f x = (f x, CondNode x [] []) + +-- | @since 2.0.0.2 +writeHookedBuildInfo :: FilePath -> HookedBuildInfo -> NoCallStackIO () +writeHookedBuildInfo fpath = writeFileAtomic fpath . BS.Char8.pack + . showHookedBuildInfo + +-- | @since 2.0.0.2 +showHookedBuildInfo :: HookedBuildInfo -> String +showHookedBuildInfo (mb_lib_bi, ex_bis) = render $ + maybe mempty (prettyFieldGrammar buildInfoFieldGrammar) mb_lib_bi + $$ vcat + [ space + $$ (text "executable:" <+> disp name) + $$ prettyFieldGrammar buildInfoFieldGrammar bi + | (name, bi) <- ex_bis + ] + $+$ text "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Quirks.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Quirks.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Quirks.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Quirks.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,203 @@ +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE OverloadedStrings #-} +-- | +-- +-- @since 2.2.0.0 +module Distribution.PackageDescription.Quirks (patchQuirks) where + +import Prelude () +import Distribution.Compat.Prelude +import GHC.Fingerprint (Fingerprint (..), fingerprintData) +import Foreign.Ptr (castPtr) +import System.IO.Unsafe (unsafeDupablePerformIO) + +import qualified Data.ByteString as BS +import qualified Data.ByteString.Unsafe as BS +import qualified Data.Map as Map + +-- | Patch legacy @.cabal@ file contents to allow parsec parser to accept +-- all of Hackage. +-- +-- Bool part of the result tells whether the output is modified. +-- +-- @since 2.2.0.0 +patchQuirks :: BS.ByteString -> (Bool, BS.ByteString) +patchQuirks bs = case Map.lookup (BS.take 256 bs, md5 bs) patches of + Nothing -> (False, bs) + Just (post, f) + | post /= md5 output -> (False, bs) + | otherwise -> (True, output) + where + output = f bs + +md5 :: BS.ByteString -> Fingerprint +md5 bs = unsafeDupablePerformIO $ BS.unsafeUseAsCStringLen bs $ \(ptr, len) -> + fingerprintData (castPtr ptr) len + +-- | 'patches' contains first 256 bytes, pre- and post-fingerprints and a patch function. +-- +-- +patches :: Map.Map (BS.ByteString, Fingerprint) (Fingerprint, BS.ByteString -> BS.ByteString) +patches = Map.fromList + -- http://hackage.haskell.org/package/unicode-transforms-0.3.3 + -- other-modules: . + -- ReadP assumed dot is empty line + [ mk "-- This file has been generated from package.yaml by hpack version 0.17.0.\n--\n-- see: https://github.com/sol/hpack\n\nname: unicode-transforms\nversion: 0.3.3\nsynopsis: Unicode normalization\ndescription: Fast Unic" + (Fingerprint 15958160436627155571 10318709190730872881) + (Fingerprint 11008465475756725834 13815629925116264363) + (bsRemove " other-modules:\n .\n") -- TODO: remove traling \n to test structural-diff + -- http://hackage.haskell.org/package/DSTM-0.1.2 + -- http://hackage.haskell.org/package/DSTM-0.1.1 + -- http://hackage.haskell.org/package/DSTM-0.1 + -- Other Modules: no dash + -- ReadP parsed as section + , mk "Name: DSTM\nVersion: 0.1.2\nCopyright: (c) 2010, Frank Kupke\nLicense: LGPL\nLicense-File: LICENSE\nAuthor: Frank Kupke\nMaintainer: frk@informatik.uni-kiel.de\nCabal-Version: >= 1.2.3\nStability: provisional\nSynopsis: A framework for using STM within distributed " + (Fingerprint 6919263071548559054 9050746360708965827) + (Fingerprint 17015177514298962556 11943164891661867280) + (bsReplace "Other modules:" "-- ") + , mk "Name: DSTM\nVersion: 0.1.1\nCopyright: (c) 2010, Frank Kupke\nLicense: LGPL\nLicense-File: LICENSE\nAuthor: Frank Kupke\nMaintainer: frk@informatik.uni-kiel.de\nCabal-Version: >= 1.2.3\nStability: provisional\nSynopsis: A framework for using STM within distributed " + (Fingerprint 17313105789069667153 9610429408495338584) + (Fingerprint 17250946493484671738 17629939328766863497) + (bsReplace "Other modules:" "-- ") + , mk "Name: DSTM\nVersion: 0.1\nCopyright: (c) 2010, Frank Kupke\nLicense: LGPL\nLicense-File: LICENSE\nAuthor: Frank Kupke\nMaintainer: frk@informatik.uni-kiel.de\nCabal-Version: >= 1.2.3\nStability: provisional\nSynopsis: A framework for using STM within distributed sy" + (Fingerprint 10502599650530614586 16424112934471063115) + (Fingerprint 13562014713536696107 17899511905611879358) + (bsReplace "Other modules:" "-- ") + -- http://hackage.haskell.org/package/control-monad-exception-mtl-0.10.3 + , mk "name: control-monad-exception-mtl\nversion: 0.10.3\nCabal-Version: >= 1.10\nbuild-type: Simple\nlicense: PublicDomain\nauthor: Pepe Iborra\nmaintainer: pepeiborra@gmail.com\nhomepage: http://pepeiborra.github.com/control-monad-exception\nsynopsis: MTL instances f" + (Fingerprint 18274748422558568404 4043538769550834851) + (Fingerprint 11395257416101232635 4303318131190196308) + (bsReplace " default- extensions:" "unknown-section") + -- http://hackage.haskell.org/package/vacuum-opengl-0.0 + -- \DEL character + , mk "Name: vacuum-opengl\nVersion: 0.0\nSynopsis: Visualize live Haskell data structures using vacuum, graphviz and OpenGL.\nDescription: \DELVisualize live Haskell data structures using vacuum, graphviz and OpenGL.\n " + (Fingerprint 5946760521961682577 16933361639326309422) + (Fingerprint 14034745101467101555 14024175957788447824) + (bsRemove "\DEL") + , mk "Name: vacuum-opengl\nVersion: 0.0.1\nSynopsis: Visualize live Haskell data structures using vacuum, graphviz and OpenGL.\nDescription: \DELVisualize live Haskell data structures using vacuum, graphviz and OpenGL.\n " + (Fingerprint 10790950110330119503 1309560249972452700) + (Fingerprint 1565743557025952928 13645502325715033593) + (bsRemove "\DEL") + -- http://hackage.haskell.org/package/ixset-1.0.4 + -- {- comments -} + , mk "Name: ixset\nVersion: 1.0.4\nSynopsis: Efficient relational queries on Haskell sets.\nDescription:\n Create and query sets that are indexed by multiple indices.\nLicense: BSD3\nLicense-file: COPYING\nAut" + (Fingerprint 11886092342440414185 4150518943472101551) + (Fingerprint 5731367240051983879 17473925006273577821) + (bsRemoveStarting "{-") + -- : after section + -- http://hackage.haskell.org/package/ds-kanren + , mk "name: ds-kanren\nversion: 0.2.0.0\nsynopsis: A subset of the miniKanren language\ndescription:\n ds-kanren is an implementation of the language.\n .\n == What's in ds-kanren?\n .\n ['dis" + (Fingerprint 2804006762382336875 9677726932108735838) + (Fingerprint 9830506174094917897 12812107316777006473) + (bsReplace "Test-Suite test-unify:" "Test-Suite \"test-unify:\"" . bsReplace "Test-Suite test-list-ops:" "Test-Suite \"test-list-ops:\"") + , mk "name: ds-kanren\nversion: 0.2.0.1\nsynopsis: A subset of the miniKanren language\ndescription:\n ds-kanren is an implementation of the language.\n\nlicense: MIT\nlicense-file: " + (Fingerprint 9130259649220396193 2155671144384738932) + (Fingerprint 1847988234352024240 4597789823227580457) + (bsReplace "Test-Suite test-unify:" "Test-Suite \"test-unify:\"" . bsReplace "Test-Suite test-list-ops:" "Test-Suite \"test-list-ops:\"") + , mk "name: metric\nversion: 0.1.4\nsynopsis: Metric spaces.\nlicense: MIT\nlicense-file: LICENSE\nauthor: Vikram Verma\nmaintainer: me@vikramverma.com\ncategory: Data\nbuild-type:" + (Fingerprint 6150019278861565482 3066802658031228162) + (Fingerprint 9124826020564520548 15629704249829132420) + (bsReplace "test-suite metric-tests:" "test-suite \"metric-tests:\"") + , mk "name: metric\nversion: 0.2.0\nsynopsis: Metric spaces.\nlicense: MIT\nlicense-file: LICENSE\nauthor: Vikram Verma\nmaintainer: me@vikramverma.com\ncategory: Data\nbuild-type:" + (Fingerprint 4639805967994715694 7859317050376284551) + (Fingerprint 5566222290622325231 873197212916959151) + (bsReplace "test-suite metric-tests:" "test-suite \"metric-tests:\"") + , mk "name: phasechange\ncategory: Data\nversion: 0.1\nauthor: G\195\161bor Lehel\nmaintainer: G\195\161bor Lehel \nhomepage: http://github.com/glehel/phasechange\ncopyright: Copyright (C) 2012 G\195\161bor Lehel\nlicense: " + (Fingerprint 10546509771395401582 245508422312751943) + (Fingerprint 5169853482576003304 7247091607933993833) + (bsReplace "impl(ghc >= 7.4):" "erroneous-section" . bsReplace "impl(ghc >= 7.6):" "erroneous-section") + , mk "Name: smartword\nSynopsis: Web based flash card for Word Smart I and II vocabularies\nVersion: 0.0.0.5\nHomepage: http://kyagrd.dyndns.org/~kyagrd/project/smartword/\nCategory: Web,Education\nLicense: " + (Fingerprint 7803544783533485151 10807347873998191750) + (Fingerprint 1665635316718752601 16212378357991151549) + (bsReplace "build depends:" "--") + , mk "name: shelltestrunner\n-- sync with README.md, ANNOUNCE:\nversion: 1.3\ncategory: Testing\nsynopsis: A tool for testing command-line programs.\ndescription:\n shelltestrunner is a cross-platform tool for testing command-line\n program" + (Fingerprint 4403237110790078829 15392625961066653722) + (Fingerprint 10218887328390239431 4644205837817510221) + (bsReplace "other modules:" "--") + -- &&! + -- http://hackage.haskell.org/package/hblas-0.3.0.0 + , mk "-- Initial hblas.cabal generated by cabal init. For further \n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP) \n-- " + (Fingerprint 8570120150072467041 18315524331351505945) + (Fingerprint 10838007242302656005 16026440017674974175) + (bsReplace "&&!" "&& !") + , mk "-- Initial hblas.cabal generated by cabal init. For further \n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP) \n-- " + (Fingerprint 5262875856214215155 10846626274067555320) + (Fingerprint 3022954285783401045 13395975869915955260) + (bsReplace "&&!" "&& !") + , mk "-- Initial hblas.cabal generated by cabal init. For further \n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP) \n-- " + (Fingerprint 54222628930951453 5526514916844166577) + (Fingerprint 1749630806887010665 8607076506606977549) + (bsReplace "&&!" "&& !") + , mk "-- Initial hblas.cabal generated by cabal init. For further\n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP)\n-- fo" + (Fingerprint 6817250511240350300 15278852712000783849) + (Fingerprint 15757717081429529536 15542551865099640223) + (bsReplace "&&!" "&& !") + , mk "-- Initial hblas.cabal generated by cabal init. For further\n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP)\n-- fo" + (Fingerprint 8310050400349211976 201317952074418615) + (Fingerprint 10283381191257209624 4231947623042413334) + (bsReplace "&&!" "&& !") + , mk "-- Initial hblas.cabal generated by cabal init. For further\n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP)\n-- fo" + (Fingerprint 7010988292906098371 11591884496857936132) + (Fingerprint 6158672440010710301 6419743768695725095) + (bsReplace "&&!" "&& !") + , mk "-- Initial hblas.cabal generated by cabal init. For further\r\n-- documentation, see http://haskell.org/cabal/users-guide/\r\n\r\n-- The name of the package.\r\nname: hblas\r\n\r\n-- The package version. See the Haskell package versioning policy (PVP)" + (Fingerprint 2076850805659055833 16615160726215879467) + (Fingerprint 10634706281258477722 5285812379517916984) + (bsReplace "&&!" "&& !") + , mk "-- Initial hblas.cabal generated by cabal init. For further\r\n-- documentation, see http://haskell.org/cabal/users-guide/\r\n\r\n-- The name of the package.\r\nname: hblas\r\n\r\n-- The package version. See the Haskell package versioning policy (PVP)" + (Fingerprint 11850020631622781099 11956481969231030830) + (Fingerprint 13702868780337762025 13383526367149067158) + (bsReplace "&&!" "&& !") + , mk "-- Initial hblas.cabal generated by cabal init. For further\n-- documentation, see http://haskell.org/cabal/users-guide/\n\n-- The name of the package.\nname: hblas\n\n-- The package version. See the Haskell package versioning policy (PVP)\n-- fo" + (Fingerprint 13690322768477779172 19704059263540994) + (Fingerprint 11189374824645442376 8363528115442591078) + (bsReplace "&&!" "&& !") + ] + where + mk a b c d = ((a, b), (c, d)) + +-- | Helper to create entries in patches +_makePatchKey :: FilePath -> (BS.ByteString -> BS.ByteString) -> NoCallStackIO () +_makePatchKey fp transform = do + contents <- BS.readFile fp + let output = transform contents + let Fingerprint hi lo = md5 contents + let Fingerprint hi' lo' = md5 output + putStrLn + $ showString " , mk " + . shows (BS.take 256 contents) + . showString "\n (Fingerprint " + . shows hi + . showString " " + . shows lo + . showString ")\n (Fingerprint " + . shows hi' + . showString " " + . shows lo' + . showString ")" + $ "" + +------------------------------------------------------------------------------- +-- Patch helpers +------------------------------------------------------------------------------- + +bsRemove + :: BS.ByteString -- ^ needle + -> BS.ByteString -> BS.ByteString +bsRemove needle haystack = case BS.breakSubstring needle haystack of + (h, t) -> BS.append h (BS.drop (BS.length needle) t) + +bsReplace + :: BS.ByteString -- ^ needle + -> BS.ByteString -- ^ replacement + -> BS.ByteString -> BS.ByteString +bsReplace needle repl haystack = case BS.breakSubstring needle haystack of + (h, t) + | not (BS.null t) -> BS.append h (BS.append repl (BS.drop (BS.length needle) t)) + | otherwise -> haystack + +bsRemoveStarting + :: BS.ByteString -- ^ needle + -> BS.ByteString -> BS.ByteString +bsRemoveStarting needle haystack = case BS.breakSubstring needle haystack of + (h, _) -> h diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription/Utils.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,23 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription.Utils +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Common utils used by modules under Distribution.PackageDescription.*. + +module Distribution.PackageDescription.Utils ( + cabalBug, userBug + ) where + +-- ---------------------------------------------------------------------------- +-- Exception and logging utils + +userBug :: String -> a +userBug msg = error $ msg ++ ". This is a bug in your .cabal file." + +cabalBug :: String -> a +cabalBug msg = error $ msg ++ ". This is possibly a bug in Cabal.\n" + ++ "Please report it to the developers: " + ++ "https://github.com/haskell/cabal/issues/new" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PackageDescription.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,139 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PackageDescription +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Backwards compatibility reexport of everything you need to know +-- about @.cabal@ files. + +module Distribution.PackageDescription ( + -- * Package descriptions + PackageDescription(..), + emptyPackageDescription, + specVersion, + buildType, + license, + descCabalVersion, + BuildType(..), + knownBuildTypes, + allLibraries, + + -- ** Renaming (syntactic) + ModuleRenaming(..), + defaultRenaming, + + -- ** Libraries + Library(..), + ModuleReexport(..), + emptyLibrary, + withLib, + hasPublicLib, + hasLibs, + explicitLibModules, + libModulesAutogen, + libModules, + + -- ** Executables + Executable(..), + emptyExecutable, + withExe, + hasExes, + exeModules, + exeModulesAutogen, + + -- * Tests + TestSuite(..), + TestSuiteInterface(..), + TestType(..), + testType, + knownTestTypes, + emptyTestSuite, + hasTests, + withTest, + testModules, + testModulesAutogen, + + -- * Benchmarks + Benchmark(..), + BenchmarkInterface(..), + BenchmarkType(..), + benchmarkType, + knownBenchmarkTypes, + emptyBenchmark, + hasBenchmarks, + withBenchmark, + benchmarkModules, + benchmarkModulesAutogen, + + -- * Build information + BuildInfo(..), + emptyBuildInfo, + allBuildInfo, + allLanguages, + allExtensions, + usedExtensions, + usesTemplateHaskellOrQQ, + hcOptions, + hcProfOptions, + hcSharedOptions, + hcStaticOptions, + + -- ** Supplementary build information + allBuildDepends, + enabledBuildDepends, + ComponentName(..), + defaultLibName, + HookedBuildInfo, + emptyHookedBuildInfo, + updatePackageDescription, + + -- * package configuration + GenericPackageDescription(..), + Flag(..), emptyFlag, + FlagName, mkFlagName, unFlagName, + FlagAssignment, mkFlagAssignment, unFlagAssignment, + nullFlagAssignment, showFlagValue, + diffFlagAssignment, lookupFlagAssignment, insertFlagAssignment, + dispFlagAssignment, parseFlagAssignment, parsecFlagAssignment, + findDuplicateFlagAssignments, + CondTree(..), ConfVar(..), Condition(..), + cNot, cAnd, cOr, + + -- * Source repositories + SourceRepo(..), + RepoKind(..), + RepoType(..), + knownRepoTypes, + emptySourceRepo, + + -- * Custom setup build information + SetupBuildInfo(..), + ) where + +import Prelude () +--import Distribution.Compat.Prelude + +import Distribution.Types.Library +import Distribution.Types.TestSuite +import Distribution.Types.Executable +import Distribution.Types.Benchmark +import Distribution.Types.TestType +import Distribution.Types.TestSuiteInterface +import Distribution.Types.BenchmarkType +import Distribution.Types.BenchmarkInterface +import Distribution.Types.ModuleRenaming +import Distribution.Types.ModuleReexport +import Distribution.Types.BuildInfo +import Distribution.Types.SetupBuildInfo +import Distribution.Types.BuildType +import Distribution.Types.GenericPackageDescription +import Distribution.Types.CondTree +import Distribution.Types.Condition +import Distribution.Types.PackageDescription +import Distribution.Types.ComponentName +import Distribution.Types.HookedBuildInfo +import Distribution.Types.SourceRepo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Package.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Package.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Package.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Package.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,102 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Package +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Defines a package identifier along with a parser and pretty printer for it. +-- 'PackageIdentifier's consist of a name and an exact version. It also defines +-- a 'Dependency' data type. A dependency is a package name and a version +-- range, like @\"foo >= 1.2 && < 2\"@. + +module Distribution.Package + ( module Distribution.Types.AbiHash + , module Distribution.Types.ComponentId + , module Distribution.Types.PackageId + , module Distribution.Types.UnitId + , module Distribution.Types.Module + , module Distribution.Types.PackageName + , module Distribution.Types.PkgconfigName + , module Distribution.Types.Dependency + , Package(..), packageName, packageVersion + , HasMungedPackageId(..), mungedName', mungedVersion' + , HasUnitId(..) + , installedPackageId + , PackageInstalled(..) + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Version + ( Version ) + +import Distribution.Types.AbiHash +import Distribution.Types.ComponentId +import Distribution.Types.Dependency +import Distribution.Types.MungedPackageId +import Distribution.Types.PackageId +import Distribution.Types.UnitId +import Distribution.Types.Module +import Distribution.Types.MungedPackageName +import Distribution.Types.PackageName +import Distribution.Types.PkgconfigName + +-- | Class of things that have a 'PackageIdentifier' +-- +-- Types in this class are all notions of a package. This allows us to have +-- different types for the different phases that packages go though, from +-- simple name\/id, package description, configured or installed packages. +-- +-- Not all kinds of packages can be uniquely identified by a +-- 'PackageIdentifier'. In particular, installed packages cannot, there may be +-- many installed instances of the same source package. +-- +class Package pkg where + packageId :: pkg -> PackageIdentifier + +mungedName' :: HasMungedPackageId pkg => pkg -> MungedPackageName +mungedName' = mungedName . mungedId + +mungedVersion' :: HasMungedPackageId munged => munged -> Version +mungedVersion' = mungedVersion . mungedId + +class HasMungedPackageId pkg where + mungedId :: pkg -> MungedPackageId + +instance Package PackageIdentifier where + packageId = id + +packageName :: Package pkg => pkg -> PackageName +packageName = pkgName . packageId + +packageVersion :: Package pkg => pkg -> Version +packageVersion = pkgVersion . packageId + +instance HasMungedPackageId MungedPackageId where + mungedId = id + +-- | Packages that have an installed unit ID +class Package pkg => HasUnitId pkg where + installedUnitId :: pkg -> UnitId + +{-# DEPRECATED installedPackageId "Use installedUnitId instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +-- | Compatibility wrapper for Cabal pre-1.24. +installedPackageId :: HasUnitId pkg => pkg -> UnitId +installedPackageId = installedUnitId + +-- | Class of installed packages. +-- +-- The primary data type which is an instance of this package is +-- 'InstalledPackageInfo', but when we are doing install plans in Cabal install +-- we may have other, installed package-like things which contain more metadata. +-- Installed packages have exact dependencies 'installedDepends'. +class (HasUnitId pkg) => PackageInstalled pkg where + installedDepends :: pkg -> [UnitId] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Class.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Class.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Class.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Class.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,353 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +module Distribution.Parsec.Class ( + Parsec(..), + ParsecParser (..), + runParsecParser, + simpleParsec, + lexemeParsec, + eitherParsec, + explicitEitherParsec, + -- * CabalParsing & warnings + CabalParsing (..), + PWarnType (..), + -- * Utilities + parsecToken, + parsecToken', + parsecFilePath, + parsecQuoted, + parsecMaybeQuoted, + parsecCommaList, + parsecLeadingCommaList, + parsecOptCommaList, + parsecStandard, + parsecUnqualComponentName, + ) where + +import Data.Char (digitToInt, intToDigit) +import Data.Functor.Identity (Identity (..)) +import Data.List (transpose) +import Distribution.CabalSpecVersion +import Distribution.Compat.Prelude +import Distribution.Parsec.FieldLineStream +import Distribution.Parsec.Common (PWarnType (..), PWarning (..), Position (..)) +import Numeric (showIntAtBase) +import Prelude () + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.MonadFail as Fail +import qualified Distribution.Compat.ReadP as ReadP +import qualified Text.Parsec as Parsec + +------------------------------------------------------------------------------- +-- Class +------------------------------------------------------------------------------- + +-- | Class for parsing with @parsec@. Mainly used for @.cabal@ file fields. +class Parsec a where + parsec :: CabalParsing m => m a + +-- | Parsing class which +-- +-- * can report Cabal parser warnings. +-- +-- * knows @cabal-version@ we work with +-- +class (P.CharParsing m, MonadPlus m) => CabalParsing m where + parsecWarning :: PWarnType -> String -> m () + + parsecHaskellString :: m String + parsecHaskellString = stringLiteral + + askCabalSpecVersion :: m CabalSpecVersion + +instance t ~ Char => CabalParsing (ReadP.Parser r t) where + parsecWarning _ _ = pure () + askCabalSpecVersion = pure cabalSpecLatest + +-- | 'parsec' /could/ consume trailing spaces, this function /will/ consume. +lexemeParsec :: (CabalParsing m, Parsec a) => m a +lexemeParsec = parsec <* P.spaces + +newtype ParsecParser a = PP { unPP + :: CabalSpecVersion -> Parsec.Parsec FieldLineStream [PWarning] a + } + +liftParsec :: Parsec.Parsec FieldLineStream [PWarning] a -> ParsecParser a +liftParsec p = PP $ \_ -> p + +instance Functor ParsecParser where + fmap f p = PP $ \v -> fmap f (unPP p v) + {-# INLINE fmap #-} + + x <$ p = PP $ \v -> x <$ unPP p v + {-# INLINE (<$) #-} + +instance Applicative ParsecParser where + pure = liftParsec . pure + {-# INLINE pure #-} + + f <*> x = PP $ \v -> unPP f v <*> unPP x v + {-# INLINE (<*>) #-} + f *> x = PP $ \v -> unPP f v *> unPP x v + {-# INLINE (*>) #-} + f <* x = PP $ \v -> unPP f v <* unPP x v + {-# INLINE (<*) #-} + +instance Alternative ParsecParser where + empty = liftParsec empty + + a <|> b = PP $ \v -> unPP a v <|> unPP b v + {-# INLINE (<|>) #-} + + many p = PP $ \v -> many (unPP p v) + {-# INLINE many #-} + + some p = PP $ \v -> some (unPP p v) + {-# INLINE some #-} + +instance Monad ParsecParser where + return = pure + + m >>= k = PP $ \v -> unPP m v >>= \x -> unPP (k x) v + {-# INLINE (>>=) #-} + (>>) = (*>) + {-# INLINE (>>) #-} + + fail = Fail.fail + +instance MonadPlus ParsecParser where + mzero = empty + mplus = (<|>) + +instance Fail.MonadFail ParsecParser where + fail = P.unexpected + +instance P.Parsing ParsecParser where + try p = PP $ \v -> P.try (unPP p v) + p d = PP $ \v -> unPP p v P. d + skipMany p = PP $ \v -> P.skipMany (unPP p v) + skipSome p = PP $ \v -> P.skipSome (unPP p v) + unexpected = liftParsec . P.unexpected + eof = liftParsec P.eof + notFollowedBy p = PP $ \v -> P.notFollowedBy (unPP p v) + +instance P.CharParsing ParsecParser where + satisfy = liftParsec . P.satisfy + char = liftParsec . P.char + notChar = liftParsec . P.notChar + anyChar = liftParsec P.anyChar + string = liftParsec . P.string + +instance CabalParsing ParsecParser where + parsecWarning t w = liftParsec $ Parsec.modifyState (PWarning t (Position 0 0) w :) + askCabalSpecVersion = PP pure + +-- | Parse a 'String' with 'lexemeParsec'. +simpleParsec :: Parsec a => String -> Maybe a +simpleParsec + = either (const Nothing) Just + . runParsecParser lexemeParsec "" + . fieldLineStreamFromString + +-- | Parse a 'String' with 'lexemeParsec'. +eitherParsec :: Parsec a => String -> Either String a +eitherParsec = explicitEitherParsec parsec + +-- | Parse a 'String' with given 'ParsecParser'. Trailing whitespace is accepted. +explicitEitherParsec :: ParsecParser a -> String -> Either String a +explicitEitherParsec parser + = either (Left . show) Right + . runParsecParser (parser <* P.spaces) "" + . fieldLineStreamFromString + +-- | Run 'ParsecParser' with 'cabalSpecLatest'. +runParsecParser :: ParsecParser a -> FilePath -> FieldLineStream -> Either Parsec.ParseError a +runParsecParser p n = Parsec.runParser (unPP p cabalSpecLatest <* P.eof) [] n + +instance Parsec a => Parsec (Identity a) where + parsec = Identity <$> parsec + +instance Parsec Bool where + parsec = P.munch1 isAlpha >>= postprocess + where + postprocess str + | str == "True" = pure True + | str == "False" = pure False + | lstr == "true" = parsecWarning PWTBoolCase caseWarning *> pure True + | lstr == "false" = parsecWarning PWTBoolCase caseWarning *> pure False + | otherwise = fail $ "Not a boolean: " ++ str + where + lstr = map toLower str + caseWarning = + "Boolean values are case sensitive, use 'True' or 'False'." + +-- | @[^ ,]@ +parsecToken :: CabalParsing m => m String +parsecToken = parsecHaskellString <|> ((P.munch1 (\x -> not (isSpace x) && x /= ',') P. "identifier" ) >>= checkNotDoubleDash) + +-- | @[^ ]@ +parsecToken' :: CabalParsing m => m String +parsecToken' = parsecHaskellString <|> ((P.munch1 (not . isSpace) P. "token") >>= checkNotDoubleDash) + +checkNotDoubleDash :: CabalParsing m => String -> m String +checkNotDoubleDash s = do + when (s == "--") $ parsecWarning PWTDoubleDash $ unwords + [ "Double-dash token found." + , "Note: there are no end-of-line comments in .cabal files, only whole line comments." + , "Use \"--\" (quoted double dash) to silence this warning, if you actually want -- token" + ] + + return s + +parsecFilePath :: CabalParsing m => m FilePath +parsecFilePath = parsecToken + +-- | Parse a benchmark/test-suite types. +parsecStandard :: (CabalParsing m, Parsec ver) => (ver -> String -> a) -> m a +parsecStandard f = do + cs <- some $ P.try (component <* P.char '-') + ver <- parsec + let name = map toLower (intercalate "-" cs) + return $! f ver name + where + component = do + cs <- P.munch1 isAlphaNum + if all isDigit cs then fail "all digit component" else return cs + -- each component must contain an alphabetic character, to avoid + -- ambiguity in identifiers like foo-1 (the 1 is the version number). + +parsecCommaList :: CabalParsing m => m a -> m [a] +parsecCommaList p = P.sepBy (p <* P.spaces) (P.char ',' *> P.spaces P. "comma") + +-- | Like 'parsecCommaList' but accept leading or trailing comma. +-- +-- @ +-- p (comma p)* -- p `sepBy` comma +-- (comma p)* -- leading comma +-- (p comma)* -- trailing comma +-- @ +parsecLeadingCommaList :: CabalParsing m => m a -> m [a] +parsecLeadingCommaList p = do + c <- P.optional comma + case c of + Nothing -> P.sepEndBy1 lp comma <|> pure [] + Just _ -> P.sepBy1 lp comma + where + lp = p <* P.spaces + comma = P.char ',' *> P.spaces P. "comma" + +parsecOptCommaList :: CabalParsing m => m a -> m [a] +parsecOptCommaList p = P.sepBy (p <* P.spaces) (P.optional comma) + where + comma = P.char ',' *> P.spaces + +-- | Content isn't unquoted +parsecQuoted :: CabalParsing m => m a -> m a +parsecQuoted = P.between (P.char '"') (P.char '"') + +-- | @parsecMaybeQuoted p = 'parsecQuoted' p <|> p@. +parsecMaybeQuoted :: CabalParsing m => m a -> m a +parsecMaybeQuoted p = parsecQuoted p <|> p + +parsecUnqualComponentName :: CabalParsing m => m String +parsecUnqualComponentName = intercalate "-" <$> P.sepBy1 component (P.char '-') + where + component :: CabalParsing m => m String + component = do + cs <- P.munch1 isAlphaNum + if all isDigit cs + then fail "all digits in portion of unqualified component name" + else return cs + +stringLiteral :: forall m. P.CharParsing m => m String +stringLiteral = lit where + lit :: m String + lit = foldr (maybe id (:)) "" + <$> P.between (P.char '"') (P.char '"' P. "end of string") (many stringChar) + P. "string" + + stringChar :: m (Maybe Char) + stringChar = Just <$> stringLetter + <|> stringEscape + P. "string character" + + stringLetter :: m Char + stringLetter = P.satisfy (\c -> (c /= '"') && (c /= '\\') && (c > '\026')) + + stringEscape :: m (Maybe Char) + stringEscape = P.char '\\' *> esc where + esc :: m (Maybe Char) + esc = Nothing <$ escapeGap + <|> Nothing <$ escapeEmpty + <|> Just <$> escapeCode + + escapeEmpty, escapeGap :: m Char + escapeEmpty = P.char '&' + escapeGap = P.skipSpaces1 *> (P.char '\\' P. "end of string gap") + +escapeCode :: forall m. P.CharParsing m => m Char +escapeCode = (charEsc <|> charNum <|> charAscii <|> charControl) P. "escape code" + where + charControl, charNum :: m Char + charControl = (\c -> toEnum (fromEnum c - fromEnum '@')) <$> (P.char '^' *> (P.upper <|> P.char '@')) + charNum = toEnum <$> num + where + num :: m Int + num = bounded 10 maxchar + <|> (P.char 'o' *> bounded 8 maxchar) + <|> (P.char 'x' *> bounded 16 maxchar) + maxchar = fromEnum (maxBound :: Char) + + bounded :: Int -> Int -> m Int + bounded base bnd = foldl' (\x d -> base * x + digitToInt d) 0 + <$> bounded' (take base thedigits) (map digitToInt $ showIntAtBase base intToDigit bnd "") + where + thedigits :: [m Char] + thedigits = map P.char ['0'..'9'] ++ map P.oneOf (transpose [['A'..'F'],['a'..'f']]) + + toomuch :: m a + toomuch = P.unexpected "out-of-range numeric escape sequence" + + bounded', bounded'' :: [m Char] -> [Int] -> m [Char] + bounded' dps@(zero:_) bds = P.skipSome zero *> ([] <$ P.notFollowedBy (P.choice dps) <|> bounded'' dps bds) + <|> bounded'' dps bds + bounded' [] _ = error "bounded called with base 0" + bounded'' dps [] = [] <$ P.notFollowedBy (P.choice dps) <|> toomuch + bounded'' dps (bd : bds) = let anyd :: m Char + anyd = P.choice dps + + nomore :: m () + nomore = P.notFollowedBy anyd <|> toomuch + + (low, ex : high) = splitAt bd dps + in ((:) <$> P.choice low <*> atMost (length bds) anyd) <* nomore + <|> ((:) <$> ex <*> ([] <$ nomore <|> bounded'' dps bds)) + <|> if not (null bds) + then (:) <$> P.choice high <*> atMost (length bds - 1) anyd <* nomore + else empty + atMost n p | n <= 0 = pure [] + | otherwise = ((:) <$> p <*> atMost (n - 1) p) <|> pure [] + + charEsc :: m Char + charEsc = P.choice $ parseEsc <$> escMap + + parseEsc (c,code) = code <$ P.char c + escMap = zip "abfnrtv\\\"\'" "\a\b\f\n\r\t\v\\\"\'" + + charAscii :: m Char + charAscii = P.choice $ parseAscii <$> asciiMap + + parseAscii (asc,code) = P.try $ code <$ P.string asc + asciiMap = zip (ascii3codes ++ ascii2codes) (ascii3 ++ ascii2) + ascii2codes, ascii3codes :: [String] + ascii2codes = [ "BS","HT","LF","VT","FF","CR","SO" + , "SI","EM","FS","GS","RS","US","SP"] + ascii3codes = ["NUL","SOH","STX","ETX","EOT","ENQ","ACK" + ,"BEL","DLE","DC1","DC2","DC3","DC4","NAK" + ,"SYN","ETB","CAN","SUB","ESC","DEL"] + ascii2, ascii3 :: String + ascii2 = "\BS\HT\LF\VT\FF\CR\SO\SI\EM\FS\GS\RS\US\SP" + ascii3 = "\NUL\SOH\STX\ETX\EOT\ENQ\ACK\BEL\DLE\DC1\DC2\DC3\DC4\NAK\SYN\ETB\CAN\SUB\ESC\DEL" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Common.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Common.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Common.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Common.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,100 @@ +{-# LANGUAGE DeriveGeneric #-} +-- | Module containing small types +module Distribution.Parsec.Common ( + -- * Diagnostics + PError (..), + showPError, + PWarning (..), + PWarnType (..), + showPWarning, + -- * Position + Position (..), + incPos, + retPos, + showPos, + zeroPos, + ) where + +import Distribution.Compat.Prelude +import Prelude () +import System.FilePath (normalise) + +-- | Parser error. +data PError = PError Position String + deriving (Show, Generic) + +instance Binary PError +instance NFData PError where rnf = genericRnf + +-- | Type of parser warning. We do classify warnings. +-- +-- Different application may decide not to show some, or have fatal behaviour on others +data PWarnType + = PWTOther -- ^ Unclassified warning + | PWTUTF -- ^ Invalid UTF encoding + | PWTBoolCase -- ^ @true@ or @false@, not @True@ or @False@ + | PWTVersionTag -- ^ there are version with tags + | PWTNewSyntax -- ^ New syntax used, but no @cabal-version: >= 1.2@ specified + | PWTOldSyntax -- ^ Old syntax used, and @cabal-version >= 1.2@ specified + | PWTDeprecatedField + | PWTInvalidSubsection + | PWTUnknownField + | PWTUnknownSection + | PWTTrailingFields + | PWTExtraMainIs -- ^ extra main-is field + | PWTExtraTestModule -- ^ extra test-module field + | PWTExtraBenchmarkModule -- ^ extra benchmark-module field + | PWTLexNBSP + | PWTLexBOM + | PWTLexTab + | PWTQuirkyCabalFile -- ^ legacy cabal file that we know how to patch + | PWTDoubleDash -- ^ Double dash token, most likely it's a mistake - it's not a comment + | PWTMultipleSingularField -- ^ e.g. name or version should be specified only once. + | PWTBuildTypeDefault -- ^ Workaround for derive-package having build-type: Default. See . + | PWTVersionLeadingZeros -- ^ See https://github.com/haskell-infra/hackage-trustees/issues/128 + deriving (Eq, Ord, Show, Enum, Bounded, Generic) + +instance Binary PWarnType +instance NFData PWarnType where rnf = genericRnf + +-- | Parser warning. +data PWarning = PWarning !PWarnType !Position String + deriving (Show, Generic) + +instance Binary PWarning +instance NFData PWarning where rnf = genericRnf + +showPWarning :: FilePath -> PWarning -> String +showPWarning fpath (PWarning _ pos msg) = + normalise fpath ++ ":" ++ showPos pos ++ ": " ++ msg + +showPError :: FilePath -> PError -> String +showPError fpath (PError pos msg) = + normalise fpath ++ ":" ++ showPos pos ++ ": " ++ msg + +------------------------------------------------------------------------------- +-- Position +------------------------------------------------------------------------------- + +-- | 1-indexed row and column positions in a file. +data Position = Position + {-# UNPACK #-} !Int -- row + {-# UNPACK #-} !Int -- column + deriving (Eq, Ord, Show, Generic) + +instance Binary Position +instance NFData Position where rnf = genericRnf + +-- | Shift position by n columns to the right. +incPos :: Int -> Position -> Position +incPos n (Position row col) = Position row (col + n) + +-- | Shift position to beginning of next row. +retPos :: Position -> Position +retPos (Position row _col) = Position (row + 1) 1 + +showPos :: Position -> String +showPos (Position row col) = show row ++ ":" ++ show col + +zeroPos :: Position +zeroPos = Position 0 0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/ConfVar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/ConfVar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/ConfVar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/ConfVar.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,124 @@ +{-# LANGUAGE OverloadedStrings #-} +module Distribution.Parsec.ConfVar (parseConditionConfVar) where + +import Distribution.Compat.CharParsing (char, integral) +import Distribution.Compat.Prelude +import Distribution.Parsec.Class (Parsec (..), runParsecParser) +import Distribution.Parsec.Common +import Distribution.Parsec.FieldLineStream +import Distribution.Parsec.Field (SectionArg (..)) +import Distribution.Parsec.ParseResult +import Distribution.Types.Condition +import Distribution.Types.GenericPackageDescription (ConfVar (..)) +import Distribution.Version + (anyVersion, earlierVersion, intersectVersionRanges, laterVersion, majorBoundVersion, + mkVersion, noVersion, orEarlierVersion, orLaterVersion, thisVersion, unionVersionRanges, + withinVersion) +import Prelude () + +import qualified Text.Parsec as P +import qualified Text.Parsec.Error as P + +-- | Parse @'Condition' 'ConfVar'@ from section arguments provided by parsec +-- based outline parser. +parseConditionConfVar :: [SectionArg Position] -> ParseResult (Condition ConfVar) +parseConditionConfVar args = + -- The name of the input file is irrelevant, as we reformat the error message. + case P.runParser (parser <* P.eof) () "" args of + Right x -> pure x + Left err -> do + -- Mangle the position to the actual one + let ppos = P.errorPos err + let epos = Position (P.sourceLine ppos) (P.sourceColumn ppos) + let msg = P.showErrorMessages + "or" "unknown parse error" "expecting" "unexpected" "end of input" + (P.errorMessages err) + parseFailure epos msg + pure $ Lit True + +type Parser = P.Parsec [SectionArg Position] () + +parser :: Parser (Condition ConfVar) +parser = condOr + where + condOr = P.sepBy1 condAnd (oper "||") >>= return . foldl1 COr + condAnd = P.sepBy1 cond (oper "&&") >>= return . foldl1 CAnd + cond = P.choice + [ boolLiteral, parens condOr, notCond, osCond, archCond, flagCond, implCond ] + + notCond = CNot <$ oper "!" <*> cond + + boolLiteral = Lit <$> boolLiteral' + osCond = Var . OS <$ string "os" <*> parens fromParsec + flagCond = Var . Flag <$ string "flag" <*> parens fromParsec + archCond = Var . Arch <$ string "arch" <*> parens fromParsec + implCond = Var <$ string "impl" <*> parens implCond' + + implCond' = Impl + <$> fromParsec + <*> P.option anyVersion versionRange + + version = fromParsec + versionStar = mkVersion <$> fromParsec' versionStar' <* oper "*" + versionStar' = some (integral <* char '.') + + versionRange = expr + where + expr = foldl1 unionVersionRanges <$> P.sepBy1 term (oper "||") + term = foldl1 intersectVersionRanges <$> P.sepBy1 factor (oper "&&") + + factor = P.choice + $ parens expr + : parseAnyVersion + : parseNoVersion + : parseWildcardRange + : map parseRangeOp rangeOps + + parseAnyVersion = anyVersion <$ string "-any" + parseNoVersion = noVersion <$ string "-none" + + parseWildcardRange = P.try $ withinVersion <$ oper "==" <*> versionStar + + parseRangeOp (s,f) = P.try (f <$ oper s <*> version) + rangeOps = [ ("<", earlierVersion), + ("<=", orEarlierVersion), + (">", laterVersion), + (">=", orLaterVersion), + ("^>=", majorBoundVersion), + ("==", thisVersion) ] + + -- Number token can have many dots in it: SecArgNum (Position 65 15) "7.6.1" + identBS = tokenPrim $ \t -> case t of + SecArgName _ s -> Just s + _ -> Nothing + + boolLiteral' = tokenPrim $ \t -> case t of + SecArgName _ s + | s == "True" -> Just True + | s == "true" -> Just True + | s == "False" -> Just False + | s == "false" -> Just False + _ -> Nothing + + string s = tokenPrim $ \t -> case t of + SecArgName _ s' | s == s' -> Just () + _ -> Nothing + + oper o = tokenPrim $ \t -> case t of + SecArgOther _ o' | o == o' -> Just () + _ -> Nothing + + parens = P.between (oper "(") (oper ")") + + tokenPrim = P.tokenPrim prettySectionArg updatePosition + -- TODO: check where the errors are reported + updatePosition x _ _ = x + prettySectionArg = show + + fromParsec :: Parsec a => Parser a + fromParsec = fromParsec' parsec + + fromParsec' p = do + bs <- identBS + let fls = fieldLineStreamFromBS bs + either (fail . show) pure (runParsecParser p "" fls) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Field.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Field.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Field.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Field.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,96 @@ +{-# LANGUAGE DeriveFunctor #-} +-- | Cabal-like file AST types: 'Field', 'Section' etc +-- +-- These types are parametrized by an annotation. +module Distribution.Parsec.Field ( + -- * Cabal file + Field (..), + fieldName, + fieldAnn, + fieldUniverse, + FieldLine (..), + SectionArg (..), + sectionArgAnn, + -- * Name + FieldName, + Name (..), + mkName, + getName, + nameAnn, + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Data.ByteString (ByteString) +import qualified Data.ByteString.Char8 as B +import qualified Data.Char as Char + +------------------------------------------------------------------------------- +-- Cabal file +------------------------------------------------------------------------------- + +-- | A Cabal-like file consists of a series of fields (@foo: bar@) and sections (@library ...@). +data Field ann + = Field !(Name ann) [FieldLine ann] + | Section !(Name ann) [SectionArg ann] [Field ann] + deriving (Eq, Show, Functor) + +-- | Section of field name +fieldName :: Field ann -> Name ann +fieldName (Field n _ ) = n +fieldName (Section n _ _) = n + +fieldAnn :: Field ann -> ann +fieldAnn = nameAnn . fieldName + +-- | All transitive descendands of 'Field', including itself. +-- +-- /Note:/ the resulting list is never empty. +-- +fieldUniverse :: Field ann -> [Field ann] +fieldUniverse f@(Section _ _ fs) = f : concatMap fieldUniverse fs +fieldUniverse f@(Field _ _) = [f] + +-- | A line of text representing the value of a field from a Cabal file. +-- A field may contain multiple lines. +-- +-- /Invariant:/ 'ByteString' has no newlines. +data FieldLine ann = FieldLine !ann !ByteString + deriving (Eq, Show, Functor) + +-- | Section arguments, e.g. name of the library +data SectionArg ann + = SecArgName !ann !ByteString + -- ^ identifier, or omething which loos like number. Also many dot numbers, i.e. "7.6.3" + | SecArgStr !ann !ByteString + -- ^ quoted string + | SecArgOther !ann !ByteString + -- ^ everything else, mm. operators (e.g. in if-section conditionals) + deriving (Eq, Show, Functor) + +-- | Extract annotation from 'SectionArg'. +sectionArgAnn :: SectionArg ann -> ann +sectionArgAnn (SecArgName ann _) = ann +sectionArgAnn (SecArgStr ann _) = ann +sectionArgAnn (SecArgOther ann _) = ann + +------------------------------------------------------------------------------- +-- Name +------------------------------------------------------------------------------- + +type FieldName = ByteString + +-- | A field name. +-- +-- /Invariant/: 'ByteString' is lower-case ASCII. +data Name ann = Name !ann !FieldName + deriving (Eq, Show, Functor) + +mkName :: ann -> FieldName -> Name ann +mkName ann bs = Name ann (B.map Char.toLower bs) + +getName :: Name ann -> FieldName +getName (Name _ bs) = bs + +nameAnn :: Name ann -> ann +nameAnn (Name ann _) = ann diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/FieldLineStream.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/FieldLineStream.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/FieldLineStream.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/FieldLineStream.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,96 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings , ScopedTypeVariables #-} +{-# OPTIONS_GHC -Wall -Werror #-} +module Distribution.Parsec.FieldLineStream ( + FieldLineStream (..), + fieldLinesToStream, + fieldLineStreamFromString, + fieldLineStreamFromBS, + ) where + +import Data.Bits +import Data.ByteString (ByteString) +import Distribution.Compat.Prelude +import Distribution.Parsec.Field (FieldLine (..)) +import Distribution.Utils.Generic (toUTF8BS) +import Prelude () + +import qualified Data.ByteString as BS +import qualified Text.Parsec as Parsec + +-- | This is essentially a lazy bytestring, but chunks are glued with newline '\n'. +data FieldLineStream + = FLSLast !ByteString + | FLSCons {-# UNPACK #-} !ByteString FieldLineStream + deriving Show + +fieldLinesToStream :: [FieldLine ann] -> FieldLineStream +fieldLinesToStream [] = end +fieldLinesToStream [FieldLine _ bs] = FLSLast bs +fieldLinesToStream (FieldLine _ bs : fs) = FLSCons bs (fieldLinesToStream fs) + +end :: FieldLineStream +end = FLSLast "" + +-- | Convert 'String' to 'FieldLineStream'. +-- +-- /Note:/ inefficient! +fieldLineStreamFromString :: String -> FieldLineStream +fieldLineStreamFromString = FLSLast . toUTF8BS + +fieldLineStreamFromBS :: ByteString -> FieldLineStream +fieldLineStreamFromBS = FLSLast + +instance Monad m => Parsec.Stream FieldLineStream m Char where + uncons (FLSLast bs) = return $ case BS.uncons bs of + Nothing -> Nothing + Just (c, bs') -> Just (unconsChar c bs' (\bs'' -> FLSLast bs'') end) + + uncons (FLSCons bs s) = return $ case BS.uncons bs of + -- as lines are glued with '\n', we return '\n' here! + Nothing -> Just ('\n', s) + Just (c, bs') -> Just (unconsChar c bs' (\bs'' -> FLSCons bs'' s) s) + +-- Bssed on implementation 'decodeStringUtf8' +unconsChar :: forall a. Word8 -> ByteString -> (ByteString -> a) -> a -> (Char, a) +unconsChar c0 bs0 f next + | c0 <= 0x7F = (chr (fromIntegral c0), f bs0) + | c0 <= 0xBF = (replacementChar, f bs0) + | c0 <= 0xDF = twoBytes + | c0 <= 0xEF = moreBytes 3 0x800 bs0 (fromIntegral $ c0 .&. 0xF) + | c0 <= 0xF7 = moreBytes 4 0x10000 bs0 (fromIntegral $ c0 .&. 0x7) + | c0 <= 0xFB = moreBytes 5 0x200000 bs0 (fromIntegral $ c0 .&. 0x3) + | c0 <= 0xFD = moreBytes 6 0x4000000 bs0 (fromIntegral $ c0 .&. 0x1) + | otherwise = error $ "not implemented " ++ show c0 + where + twoBytes = case BS.uncons bs0 of + Nothing -> (replacementChar, next) + Just (c1, bs1) + | c1 .&. 0xC0 == 0x80 -> + if d >= 0x80 + then (chr d, f bs1) + else (replacementChar, f bs1) + | otherwise -> (replacementChar, f bs1) + where + d = (fromIntegral (c0 .&. 0x1F) `shiftL` 6) .|. fromIntegral (c1 .&. 0x3F) + + moreBytes :: Int -> Int -> ByteString -> Int -> (Char, a) + moreBytes 1 overlong bs' acc + | overlong <= acc, acc <= 0x10FFFF, acc < 0xD800 || 0xDFFF < acc + = (chr acc, f bs') + | otherwise + = (replacementChar, f bs') + + moreBytes byteCount overlong bs' acc = case BS.uncons bs' of + Nothing -> (replacementChar, f bs') + Just (cn, bs1) + | cn .&. 0xC0 == 0x80 -> moreBytes + (byteCount-1) + overlong + bs1 + ((acc `shiftL` 6) .|. fromIntegral cn .&. 0x3F) + | otherwise -> (replacementChar, f bs1) + +replacementChar :: Char +replacementChar = '\xfffd' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Lexer.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Lexer.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Lexer.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Lexer.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,422 @@ +{-# OPTIONS_GHC -fno-warn-unused-binds -fno-warn-missing-signatures #-} +{-# LANGUAGE CPP,MagicHash #-} +{-# LINE 1 "boot/Lexer.x" #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Parsec.Lexer +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Lexer for the cabal files. +{-# LANGUAGE CPP #-} +{-# LANGUAGE BangPatterns #-} +#ifdef CABAL_PARSEC_DEBUG +{-# LANGUAGE PatternGuards #-} +#endif +{-# OPTIONS_GHC -fno-warn-unused-imports #-} +module Distribution.Parsec.Lexer + (ltest, lexToken, Token(..), LToken(..) + ,bol_section, in_section, in_field_layout, in_field_braces + ,mkLexState) where + +-- [Note: boostrapping parsec parser] +-- +-- We manually produce the `Lexer.hs` file from `boot/Lexer.x` (make lexer) +-- because boostrapping cabal-install would be otherwise tricky. +-- Alex is (atm) tricky package to build, cabal-install has some magic +-- to move bundled generated files in place, so rather we don't depend +-- on it before we can build it ourselves. +-- Therefore there is one thing less to worry in bootstrap.sh, which is a win. +-- +-- See also https://github.com/haskell/cabal/issues/4633 +-- + +import Prelude () +import qualified Prelude as Prelude +import Distribution.Compat.Prelude + +import Distribution.Parsec.LexerMonad +import Distribution.Parsec.Common (Position (..), incPos, retPos) +import Data.ByteString (ByteString) +import qualified Data.ByteString as B +import qualified Data.ByteString.Char8 as B.Char8 +import qualified Data.Word as Word + +#ifdef CABAL_PARSEC_DEBUG +import Debug.Trace +import qualified Data.Vector as V +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Encoding.Error as T +#endif + +#if __GLASGOW_HASKELL__ >= 603 +#include "ghcconfig.h" +#elif defined(__GLASGOW_HASKELL__) +#include "config.h" +#endif +#if __GLASGOW_HASKELL__ >= 503 +import Data.Array +import Data.Array.Base (unsafeAt) +#else +import Array +#endif +#if __GLASGOW_HASKELL__ >= 503 +import GHC.Exts +#else +import GlaExts +#endif +alex_tab_size :: Int +alex_tab_size = 8 +alex_base :: AlexAddr +alex_base = AlexA# "\x12\xff\xff\xff\xf9\xff\xff\xff\xfb\xff\xff\xff\x01\x00\x00\x00\x2f\x00\x00\x00\x50\x00\x00\x00\xd0\x00\x00\x00\x48\xff\xff\xff\xdc\xff\xff\xff\x51\xff\xff\xff\x6d\xff\xff\xff\x6f\xff\xff\xff\x50\x01\x00\x00\x74\x01\x00\x00\x70\xff\xff\xff\x68\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\xa3\x01\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6a\x00\x00\x00\xd1\x01\x00\x00\xfb\x01\x00\x00\x7b\x02\x00\x00\xfb\x02\x00\x00\x00\x00\x00\x00\x7b\x03\x00\x00\x7d\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x6d\x00\x00\x00\x6b\x00\x00\x00\xfc\x03\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x6f\x00\x00\x00\x1c\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00"# + +alex_table :: AlexAddr +alex_table = AlexA# "\x00\x00\x09\x00\x0f\x00\x11\x00\x02\x00\x11\x00\x12\x00\x00\x00\x12\x00\x13\x00\x03\x00\x11\x00\x07\x00\x10\x00\x12\x00\x25\x00\x14\x00\x11\x00\x10\x00\x11\x00\x14\x00\x11\x00\x12\x00\x23\x00\x12\x00\x0f\x00\x28\x00\x02\x00\x2e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x08\x00\x10\x00\x00\x00\x14\x00\x00\x00\x00\x00\x08\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2a\x00\x2e\x00\xff\xff\xff\xff\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2a\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x26\x00\x28\x00\xff\xff\xff\xff\x29\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x26\x00\x0f\x00\x11\x00\x17\x00\x26\x00\x12\x00\x25\x00\x11\x00\x2a\x00\x00\x00\x12\x00\x00\x00\x15\x00\x00\x00\x16\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x17\x00\x26\x00\x00\x00\x25\x00\x00\x00\x2a\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x2c\x00\x00\x00\x2d\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00\x0e\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x17\x00\x23\x00\xff\xff\xff\xff\x24\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x17\x00\x1e\x00\x0d\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x1f\x00\x1f\x00\x1e\x00\x1e\x00\x1e\x00\x19\x00\x1a\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x20\x00\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x1f\x00\x1e\x00\x1f\x00\x1e\x00\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x21\x00\x1e\x00\x22\x00\x1e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x1d\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x0c\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x0c\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1e\x00\xff\xff\x1e\x00\x1e\x00\x1e\x00\x1e\x00\xff\xff\xff\xff\xff\xff\x1e\x00\x1e\x00\x1e\x00\x18\x00\x1a\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x1e\x00\xff\xff\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x1e\x00\xff\xff\x1e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1e\x00\xff\xff\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\xff\xff\xff\xff\x1e\x00\x1e\x00\x1e\x00\x1a\x00\x1a\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x1e\x00\xff\xff\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x1e\x00\xff\xff\x1e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x1c\x00\x1e\x00\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c\x00\x00\x00\x1e\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\xff\xff\x1e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"# + +alex_check :: AlexAddr +alex_check = AlexA# "\xff\xff\xef\x00\x09\x00\x0a\x00\x09\x00\x0a\x00\x0d\x00\xbf\x00\x0d\x00\x2d\x00\x09\x00\x0a\x00\xbb\x00\xa0\x00\x0d\x00\xa0\x00\xa0\x00\x0a\x00\x09\x00\x0a\x00\x09\x00\x0a\x00\x0d\x00\x0a\x00\x0d\x00\x20\x00\x0a\x00\x20\x00\x0a\x00\xff\xff\xff\xff\xff\xff\xff\xff\x20\x00\xff\xff\xff\xff\xff\xff\xff\xff\x2d\x00\xff\xff\x2d\x00\x20\x00\xff\xff\x20\x00\xff\xff\xff\xff\x2d\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x09\x00\x0a\x00\x09\x00\x09\x00\x0d\x00\x09\x00\x0a\x00\x09\x00\xff\xff\x0d\x00\xff\xff\x7b\x00\xff\xff\x7d\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x20\x00\xff\xff\x20\x00\x20\x00\xff\xff\x20\x00\xff\xff\x20\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2d\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\xff\xff\x7d\x00\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x00\xff\xff\xc2\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x00\xff\xff\xc2\x00\xff\xff\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x23\x00\x24\x00\x25\x00\x26\x00\xff\xff\x28\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2e\x00\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x00\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x00\x5b\x00\x5c\x00\x5d\x00\x5e\x00\xc2\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\x7c\x00\x7d\x00\x7e\x00\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\x22\x00\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\x22\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x5c\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x00\x5c\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\xff\xff\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x23\x00\x24\x00\x25\x00\x26\x00\x7f\x00\x28\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2e\x00\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x00\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5b\x00\x5c\x00\x5d\x00\x5e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\x7c\x00\x7d\x00\x7e\x00\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x23\x00\x24\x00\x25\x00\x26\x00\xff\xff\x28\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2e\x00\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x00\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5b\x00\x5c\x00\x5d\x00\x5e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\x7c\x00\x7d\x00\x7e\x00\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x20\x00\x21\x00\x22\x00\x23\x00\x24\x00\x25\x00\x26\x00\xff\xff\x28\x00\x29\x00\x2a\x00\x2b\x00\x2c\x00\xff\xff\xff\xff\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x00\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5b\x00\x5c\x00\x5d\x00\x5e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\x7c\x00\x7d\x00\x7e\x00\x7f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\x09\x00\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\x22\x00\x21\x00\xff\xff\x23\x00\x24\x00\x25\x00\x26\x00\xff\xff\xff\xff\xff\xff\x2a\x00\x2b\x00\x2c\x00\x2d\x00\x2e\x00\x2f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3c\x00\x3d\x00\x3e\x00\x3f\x00\x40\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5c\x00\xff\xff\x5c\x00\xff\xff\x5e\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7c\x00\x7f\x00\x7e\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\xff\xff\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\xff\xff\x0a\x00\x0b\x00\x0c\x00\x0d\x00\x0e\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x14\x00\x15\x00\x16\x00\x17\x00\x18\x00\x19\x00\x1a\x00\x1b\x00\x1c\x00\x1d\x00\x1e\x00\x1f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7b\x00\xff\xff\x7d\x00\xff\xff\x7f\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"# + +alex_deflt :: AlexAddr +alex_deflt = AlexA# "\xff\xff\xff\xff\xff\xff\xff\xff\x2b\x00\x27\x00\x1b\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x0d\x00\x0d\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x13\x00\xff\xff\xff\xff\xff\xff\xff\xff\x18\x00\x1b\x00\x1b\x00\x1b\x00\xff\xff\x0d\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x27\x00\xff\xff\xff\xff\xff\xff\x2b\x00\xff\xff\xff\xff\xff\xff\xff\xff"# + +alex_accept = listArray (0::Int,47) [AlexAcc (alex_action_0),AlexAcc (alex_action_20),AlexAcc (alex_action_16),AlexAcc (alex_action_3),AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAccNone,AlexAcc (alex_action_1),AlexAcc (alex_action_1),AlexAccSkip,AlexAcc (alex_action_3),AlexAcc (alex_action_4),AlexAcc (alex_action_5),AlexAccSkip,AlexAccSkip,AlexAcc (alex_action_8),AlexAcc (alex_action_8),AlexAcc (alex_action_8),AlexAcc (alex_action_9),AlexAcc (alex_action_9),AlexAcc (alex_action_10),AlexAcc (alex_action_11),AlexAcc (alex_action_12),AlexAcc (alex_action_13),AlexAcc (alex_action_14),AlexAcc (alex_action_15),AlexAcc (alex_action_15),AlexAcc (alex_action_16),AlexAccSkip,AlexAcc (alex_action_18),AlexAcc (alex_action_19),AlexAcc (alex_action_19),AlexAccSkip,AlexAcc (alex_action_22),AlexAcc (alex_action_23),AlexAcc (alex_action_24),AlexAcc (alex_action_25),AlexAcc (alex_action_25)] +{-# LINE 151 "boot/Lexer.x" #-} + +-- | Tokens of outer cabal file structure. Field values are treated opaquely. +data Token = TokSym !ByteString -- ^ Haskell-like identifier, number or operator + | TokStr !ByteString -- ^ String in quotes + | TokOther !ByteString -- ^ Operators and parens + | Indent !Int -- ^ Indentation token + | TokFieldLine !ByteString -- ^ Lines after @:@ + | Colon + | OpenBrace + | CloseBrace + | EOF + | LexicalError InputStream --TODO: add separate string lexical error + deriving Show + +data LToken = L !Position !Token + deriving Show + +toki :: (ByteString -> Token) -> Position -> Int -> ByteString -> Lex LToken +toki t pos len input = return $! L pos (t (B.take len input)) + +tok :: Token -> Position -> Int -> ByteString -> Lex LToken +tok t pos _len _input = return $! L pos t + +checkLeadingWhitespace :: Int -> ByteString -> Lex Int +checkLeadingWhitespace len bs + | B.any (== 9) (B.take len bs) = do + addWarning LexWarningTab + checkWhitespace len bs + | otherwise = checkWhitespace len bs + +checkWhitespace :: Int -> ByteString -> Lex Int +checkWhitespace len bs + | B.any (== 194) (B.take len bs) = do + addWarning LexWarningNBSP + return $ len - B.count 194 (B.take len bs) + | otherwise = return len + +-- ----------------------------------------------------------------------------- +-- The input type + +type AlexInput = InputStream + +alexInputPrevChar :: AlexInput -> Char +alexInputPrevChar _ = error "alexInputPrevChar not used" + +alexGetByte :: AlexInput -> Maybe (Word.Word8,AlexInput) +alexGetByte = B.uncons + +lexicalError :: Position -> InputStream -> Lex LToken +lexicalError pos inp = do + setInput B.empty + return $! L pos (LexicalError inp) + +lexToken :: Lex LToken +lexToken = do + pos <- getPos + inp <- getInput + st <- getStartCode + case alexScan inp st of + AlexEOF -> return (L pos EOF) + AlexError inp' -> + let !len_bytes = B.length inp - B.length inp' in + --FIXME: we want len_chars here really + -- need to decode utf8 up to this point + lexicalError (incPos len_bytes pos) inp' + AlexSkip inp' len_chars -> do + checkPosition pos inp inp' len_chars + adjustPos (incPos len_chars) + setInput inp' + lexToken + AlexToken inp' len_chars action -> do + checkPosition pos inp inp' len_chars + adjustPos (incPos len_chars) + setInput inp' + let !len_bytes = B.length inp - B.length inp' + t <- action pos len_bytes inp + --traceShow t $ return tok + return t + +checkPosition :: Position -> ByteString -> ByteString -> Int -> Lex () +#ifdef CABAL_PARSEC_DEBUG +checkPosition pos@(Position lineno colno) inp inp' len_chars = do + text_lines <- getDbgText + let len_bytes = B.length inp - B.length inp' + pos_txt | lineno-1 < V.length text_lines = T.take len_chars (T.drop (colno-1) (text_lines V.! (lineno-1))) + | otherwise = T.empty + real_txt = B.take len_bytes inp + when (pos_txt /= T.decodeUtf8 real_txt) $ + traceShow (pos, pos_txt, T.decodeUtf8 real_txt) $ + traceShow (take 3 (V.toList text_lines)) $ return () + where + getDbgText = Lex $ \s@LexState{ dbgText = txt } -> LexResult s txt +#else +checkPosition _ _ _ _ = return () +#endif + +lexAll :: Lex [LToken] +lexAll = do + t <- lexToken + case t of + L _ EOF -> return [t] + _ -> do ts <- lexAll + return (t : ts) + +ltest :: Int -> String -> Prelude.IO () +ltest code s = + let (ws, xs) = execLexer (setStartCode code >> lexAll) (B.Char8.pack s) + in traverse_ print ws >> traverse_ print xs + +mkLexState :: ByteString -> LexState +mkLexState input = LexState + { curPos = Position 1 1 + , curInput = input + , curCode = 0 + , warnings = [] +#ifdef CABAL_PARSEC_DEBUG + , dbgText = V.fromList . lines' . T.decodeUtf8With T.lenientDecode $ input +#endif + } + +#ifdef CABAL_PARSEC_DEBUG +lines' :: T.Text -> [T.Text] +lines' s1 + | T.null s1 = [] + | otherwise = case T.break (\c -> c == '\r' || c == '\n') s1 of + (l, s2) | Just (c,s3) <- T.uncons s2 + -> case T.uncons s3 of + Just ('\n', s4) | c == '\r' -> l `T.snoc` '\r' `T.snoc` '\n' : lines' s4 + _ -> l `T.snoc` c : lines' s3 + + | otherwise + -> [l] +#endif + +bol_field_braces,bol_field_layout,bol_section,in_field_braces,in_field_layout,in_section :: Int +bol_field_braces = 1 +bol_field_layout = 2 +bol_section = 3 +in_field_braces = 4 +in_field_layout = 5 +in_section = 6 +alex_action_0 = \_ len _ -> do + when (len /= 0) $ addWarning LexWarningBOM + setStartCode bol_section + lexToken + +alex_action_1 = \_pos len inp -> checkWhitespace len inp >> adjustPos retPos >> lexToken +alex_action_3 = \pos len inp -> checkLeadingWhitespace len inp >> + if B.length inp == len + then return (L pos EOF) + else setStartCode in_section + >> return (L pos (Indent len)) +alex_action_4 = tok OpenBrace +alex_action_5 = tok CloseBrace +alex_action_8 = toki TokSym +alex_action_9 = \pos len inp -> return $! L pos (TokStr (B.take (len - 2) (B.tail inp))) +alex_action_10 = toki TokOther +alex_action_11 = toki TokOther +alex_action_12 = tok Colon +alex_action_13 = tok OpenBrace +alex_action_14 = tok CloseBrace +alex_action_15 = \_ _ _ -> adjustPos retPos >> setStartCode bol_section >> lexToken +alex_action_16 = \pos len inp -> checkLeadingWhitespace len inp >>= \len' -> + if B.length inp == len + then return (L pos EOF) + else setStartCode in_field_layout + >> return (L pos (Indent len')) +alex_action_18 = toki TokFieldLine +alex_action_19 = \_ _ _ -> adjustPos retPos >> setStartCode bol_field_layout >> lexToken +alex_action_20 = \_ _ _ -> setStartCode in_field_braces >> lexToken +alex_action_22 = toki TokFieldLine +alex_action_23 = tok OpenBrace +alex_action_24 = tok CloseBrace +alex_action_25 = \_ _ _ -> adjustPos retPos >> setStartCode bol_field_braces >> lexToken +{-# LINE 1 "templates/GenericTemplate.hs" #-} +{-# LINE 1 "templates/GenericTemplate.hs" #-} +{-# LINE 1 "" #-} +{-# LINE 1 "" #-} +{-# LINE 10 "" #-} +# 1 "/usr/include/stdc-predef.h" 1 3 4 + +# 17 "/usr/include/stdc-predef.h" 3 4 + +{-# LINE 10 "" #-} +{-# LINE 1 "/opt/ghc/7.10.3/lib/ghc-7.10.3/include/ghcversion.h" #-} + +{-# LINE 10 "" #-} +{-# LINE 1 "templates/GenericTemplate.hs" #-} +-- ----------------------------------------------------------------------------- +-- ALEX TEMPLATE +-- +-- This code is in the PUBLIC DOMAIN; you may copy it freely and use +-- it for any purpose whatsoever. + +-- ----------------------------------------------------------------------------- +-- INTERNALS and main scanner engine + +{-# LINE 21 "templates/GenericTemplate.hs" #-} + +-- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. +#if __GLASGOW_HASKELL__ > 706 +#define GTE(n,m) (tagToEnum# (n >=# m)) +#define EQ(n,m) (tagToEnum# (n ==# m)) +#else +#define GTE(n,m) (n >=# m) +#define EQ(n,m) (n ==# m) +#endif +{-# LINE 51 "templates/GenericTemplate.hs" #-} + +data AlexAddr = AlexA# Addr# +-- Do not remove this comment. Required to fix CPP parsing when using GCC and a clang-compiled alex. +#if __GLASGOW_HASKELL__ < 503 +uncheckedShiftL# = shiftL# +#endif + +{-# INLINE alexIndexInt16OffAddr #-} +alexIndexInt16OffAddr (AlexA# arr) off = +#ifdef WORDS_BIGENDIAN + narrow16Int# i + where + i = word2Int# ((high `uncheckedShiftL#` 8#) `or#` low) + high = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#))) + low = int2Word# (ord# (indexCharOffAddr# arr off')) + off' = off *# 2# +#else + indexInt16OffAddr# arr off +#endif + +{-# INLINE alexIndexInt32OffAddr #-} +alexIndexInt32OffAddr (AlexA# arr) off = +#ifdef WORDS_BIGENDIAN + narrow32Int# i + where + i = word2Int# ((b3 `uncheckedShiftL#` 24#) `or#` + (b2 `uncheckedShiftL#` 16#) `or#` + (b1 `uncheckedShiftL#` 8#) `or#` b0) + b3 = int2Word# (ord# (indexCharOffAddr# arr (off' +# 3#))) + b2 = int2Word# (ord# (indexCharOffAddr# arr (off' +# 2#))) + b1 = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#))) + b0 = int2Word# (ord# (indexCharOffAddr# arr off')) + off' = off *# 4# +#else + indexInt32OffAddr# arr off +#endif + +#if __GLASGOW_HASKELL__ < 503 +quickIndex arr i = arr ! i +#else +-- GHC >= 503, unsafeAt is available from Data.Array.Base. +quickIndex = unsafeAt +#endif + +-- ----------------------------------------------------------------------------- +-- Main lexing routines + +data AlexReturn a + = AlexEOF + | AlexError !AlexInput + | AlexSkip !AlexInput !Int + | AlexToken !AlexInput !Int a + +-- alexScan :: AlexInput -> StartCode -> AlexReturn a +alexScan input (I# (sc)) + = alexScanUser undefined input (I# (sc)) + +alexScanUser user input (I# (sc)) + = case alex_scan_tkn user input 0# input sc AlexNone of + (AlexNone, input') -> + case alexGetByte input of + Nothing -> + + AlexEOF + Just _ -> + + AlexError input' + + (AlexLastSkip input'' len, _) -> + + AlexSkip input'' len + + (AlexLastAcc k input''' len, _) -> + + AlexToken input''' len k + +-- Push the input through the DFA, remembering the most recent accepting +-- state it encountered. + +alex_scan_tkn user orig_input len input s last_acc = + input `seq` -- strict in the input + let + new_acc = (check_accs (alex_accept `quickIndex` (I# (s)))) + in + new_acc `seq` + case alexGetByte input of + Nothing -> (new_acc, input) + Just (c, new_input) -> + + case fromIntegral c of { (I# (ord_c)) -> + let + base = alexIndexInt32OffAddr alex_base s + offset = (base +# ord_c) + check = alexIndexInt16OffAddr alex_check offset + + new_s = if GTE(offset,0#) && EQ(check,ord_c) + then alexIndexInt16OffAddr alex_table offset + else alexIndexInt16OffAddr alex_deflt s + in + case new_s of + -1# -> (new_acc, input) + -- on an error, we want to keep the input *before* the + -- character that failed, not after. + _ -> alex_scan_tkn user orig_input (if c < 0x80 || c >= 0xC0 then (len +# 1#) else len) + -- note that the length is increased ONLY if this is the 1st byte in a char encoding) + new_input new_s new_acc + } + where + check_accs (AlexAccNone) = last_acc + check_accs (AlexAcc a ) = AlexLastAcc a input (I# (len)) + check_accs (AlexAccSkip) = AlexLastSkip input (I# (len)) +{-# LINE 198 "templates/GenericTemplate.hs" #-} + +data AlexLastAcc a + = AlexNone + | AlexLastAcc a !AlexInput !Int + | AlexLastSkip !AlexInput !Int + +instance Functor AlexLastAcc where + fmap _ AlexNone = AlexNone + fmap f (AlexLastAcc x y z) = AlexLastAcc (f x) y z + fmap _ (AlexLastSkip x y) = AlexLastSkip x y + +data AlexAcc a user + = AlexAccNone + | AlexAcc a + | AlexAccSkip diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/LexerMonad.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/LexerMonad.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/LexerMonad.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/LexerMonad.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,153 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Parsec.LexerMonad +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +module Distribution.Parsec.LexerMonad ( + InputStream, + LexState(..), + LexResult(..), + + Lex(..), + execLexer, + + getPos, + setPos, + adjustPos, + + getInput, + setInput, + + getStartCode, + setStartCode, + + LexWarning(..), + LexWarningType(..), + addWarning, + toPWarnings, + + ) where + +import qualified Data.ByteString as B +import Distribution.Compat.Prelude +import Distribution.Parsec.Common (PWarnType (..), PWarning (..), Position (..), showPos) +import Prelude () + +import qualified Data.Map.Strict as Map + +#ifdef CABAL_PARSEC_DEBUG +-- testing only: +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Vector as V +#endif + +-- simple state monad +newtype Lex a = Lex { unLex :: LexState -> LexResult a } + +instance Functor Lex where + fmap = liftM + +instance Applicative Lex where + pure = returnLex + (<*>) = ap + +instance Monad Lex where + return = pure + (>>=) = thenLex + +data LexResult a = LexResult {-# UNPACK #-} !LexState a + +data LexWarningType + = LexWarningNBSP -- ^ Encountered non breaking space + | LexWarningBOM -- ^ BOM at the start of the cabal file + | LexWarningTab -- ^ Leading tags + deriving (Eq, Ord, Show) + +data LexWarning = LexWarning !LexWarningType + {-# UNPACK #-} !Position + deriving (Show) + +toPWarnings :: [LexWarning] -> [PWarning] +toPWarnings + = map (uncurry toWarning) + . Map.toList + . Map.fromListWith (++) + . map (\(LexWarning t p) -> (t, [p])) + where + toWarning LexWarningBOM poss = + PWarning PWTLexBOM (head poss) "Byte-order mark found at the beginning of the file" + toWarning LexWarningNBSP poss = + PWarning PWTLexNBSP (head poss) $ "Non breaking spaces at " ++ intercalate ", " (map showPos poss) + toWarning LexWarningTab poss = + PWarning PWTLexTab (head poss) $ "Tabs used as indentation at " ++ intercalate ", " (map showPos poss) + +data LexState = LexState { + curPos :: {-# UNPACK #-} !Position, -- ^ position at current input location + curInput :: {-# UNPACK #-} !InputStream, -- ^ the current input + curCode :: {-# UNPACK #-} !StartCode, -- ^ lexer code + warnings :: [LexWarning] +#ifdef CABAL_PARSEC_DEBUG + , dbgText :: V.Vector T.Text -- ^ input lines, to print pretty debug info +#endif + } --TODO: check if we should cache the first token + -- since it looks like parsec's uncons can be called many times on the same input + +type StartCode = Int -- ^ An @alex@ lexer start code +type InputStream = B.ByteString + + + +-- | Execute the given lexer on the supplied input stream. +execLexer :: Lex a -> InputStream -> ([LexWarning], a) +execLexer (Lex lexer) input = + case lexer initialState of + LexResult LexState{ warnings = ws } result -> (ws, result) + where + initialState = LexState + -- TODO: add 'startPosition' + { curPos = Position 1 1 + , curInput = input + , curCode = 0 + , warnings = [] +#ifdef CABAL_PARSEC_DEBUG + , dbgText = V.fromList . T.lines . T.decodeUtf8 $ input +#endif + } + +{-# INLINE returnLex #-} +returnLex :: a -> Lex a +returnLex a = Lex $ \s -> LexResult s a + +{-# INLINE thenLex #-} +thenLex :: Lex a -> (a -> Lex b) -> Lex b +(Lex m) `thenLex` k = Lex $ \s -> case m s of LexResult s' a -> (unLex (k a)) s' + +setPos :: Position -> Lex () +setPos pos = Lex $ \s -> LexResult s{ curPos = pos } () + +getPos :: Lex Position +getPos = Lex $ \s@LexState{ curPos = pos } -> LexResult s pos + +adjustPos :: (Position -> Position) -> Lex () +adjustPos f = Lex $ \s@LexState{ curPos = pos } -> LexResult s{ curPos = f pos } () + +getInput :: Lex InputStream +getInput = Lex $ \s@LexState{ curInput = i } -> LexResult s i + +setInput :: InputStream -> Lex () +setInput i = Lex $ \s -> LexResult s{ curInput = i } () + +getStartCode :: Lex Int +getStartCode = Lex $ \s@LexState{ curCode = c } -> LexResult s c + +setStartCode :: Int -> Lex () +setStartCode c = Lex $ \s -> LexResult s{ curCode = c } () + +-- | Add warning at the current position +addWarning :: LexWarningType -> Lex () +addWarning wt = Lex $ \s@LexState{ curPos = pos, warnings = ws } -> + LexResult s{ warnings = LexWarning wt pos : ws } () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Newtypes.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Newtypes.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Newtypes.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Newtypes.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,278 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE FunctionalDependencies #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeSynonymInstances #-} +-- | This module provides @newtype@ wrappers to be used with "Distribution.FieldGrammar". +module Distribution.Parsec.Newtypes ( + -- * List + alaList, + alaList', + -- ** Modifiers + CommaVCat (..), + CommaFSep (..), + VCat (..), + FSep (..), + NoCommaFSep (..), + -- ** Type + List, + -- * Version & License + SpecVersion (..), + TestedWith (..), + SpecLicense (..), + -- * Identifiers + Token (..), + Token' (..), + MQuoted (..), + FreeText (..), + FilePathNT (..), + ) where + +import Distribution.Compat.Newtype +import Distribution.Compat.Prelude +import Prelude () + +import Data.Functor.Identity (Identity (..)) +import Data.List (dropWhileEnd) +import Distribution.CabalSpecVersion +import Distribution.Compiler (CompilerFlavor) +import Distribution.License (License) +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Version + (LowerBound (..), Version, VersionRange, anyVersion, asVersionIntervals, mkVersion) +import Text.PrettyPrint (Doc, comma, fsep, punctuate, vcat, (<+>)) + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.SPDX as SPDX + +-- | Vertical list with commas. Displayed with 'vcat' +data CommaVCat = CommaVCat + +-- | Paragraph fill list with commas. Displayed with 'fsep' +data CommaFSep = CommaFSep + +-- | Vertical list with optional commas. Displayed with 'vcat'. +data VCat = VCat + +-- | Paragraph fill list with optional commas. Displayed with 'fsep'. +data FSep = FSep + +-- | Paragraph fill list without commas. Displayed with 'fsep'. +data NoCommaFSep = NoCommaFSep + +-- | Proxy, internal to this module. +data P sep = P + +class Sep sep where + prettySep :: P sep -> [Doc] -> Doc + + parseSep :: CabalParsing m => P sep -> m a -> m [a] + +instance Sep CommaVCat where + prettySep _ = vcat . punctuate comma + parseSep _ p = do + v <- askCabalSpecVersion + if v >= CabalSpecV2_2 then parsecLeadingCommaList p else parsecCommaList p +instance Sep CommaFSep where + prettySep _ = fsep . punctuate comma + parseSep _ p = do + v <- askCabalSpecVersion + if v >= CabalSpecV2_2 then parsecLeadingCommaList p else parsecCommaList p +instance Sep VCat where + prettySep _ = vcat + parseSep _ = parsecOptCommaList +instance Sep FSep where + prettySep _ = fsep + parseSep _ = parsecOptCommaList +instance Sep NoCommaFSep where + prettySep _ = fsep + parseSep _ p = many (p <* P.spaces) + +-- | List separated with optional commas. Displayed with @sep@, arguments of +-- type @a@ are parsed and pretty-printed as @b@. +newtype List sep b a = List { getList :: [a] } + +-- | 'alaList' and 'alaList'' are simply 'List', with additional phantom +-- arguments to constraint the resulting type +-- +-- >>> :t alaList VCat +-- alaList VCat :: [a] -> List VCat (Identity a) a +-- +-- >>> :t alaList' FSep Token +-- alaList' FSep Token :: [String] -> List FSep Token String +-- +alaList :: sep -> [a] -> List sep (Identity a) a +alaList _ = List + +-- | More general version of 'alaList'. +alaList' :: sep -> (a -> b) -> [a] -> List sep b a +alaList' _ _ = List + +instance Newtype (List sep wrapper a) [a] where + pack = List + unpack = getList + +instance (Newtype b a, Sep sep, Parsec b) => Parsec (List sep b a) where + parsec = pack . map (unpack :: b -> a) <$> parseSep (P :: P sep) parsec + +instance (Newtype b a, Sep sep, Pretty b) => Pretty (List sep b a) where + pretty = prettySep (P :: P sep) . map (pretty . (pack :: a -> b)) . unpack + +-- | Haskell string or @[^ ,]+@ +newtype Token = Token { getToken :: String } + +instance Newtype Token String where + pack = Token + unpack = getToken + +instance Parsec Token where + parsec = pack <$> parsecToken + +instance Pretty Token where + pretty = showToken . unpack + +-- | Haskell string or @[^ ]+@ +newtype Token' = Token' { getToken' :: String } + +instance Newtype Token' String where + pack = Token' + unpack = getToken' + +instance Parsec Token' where + parsec = pack <$> parsecToken' + +instance Pretty Token' where + pretty = showToken . unpack + +-- | Either @"quoted"@ or @un-quoted@. +newtype MQuoted a = MQuoted { getMQuoted :: a } + +instance Newtype (MQuoted a) a where + pack = MQuoted + unpack = getMQuoted + +instance Parsec a => Parsec (MQuoted a) where + parsec = pack <$> parsecMaybeQuoted parsec + +instance Pretty a => Pretty (MQuoted a) where + pretty = pretty . unpack + +-- | Version range or just version, i.e. @cabal-version@ field. +-- +-- There are few things to consider: +-- +-- * Starting with 2.2 the cabal-version field should be the first field in the +-- file and only exact version is accepted. Therefore if we get e.g. +-- @>= 2.2@, we fail. +-- See +-- +newtype SpecVersion = SpecVersion { getSpecVersion :: Either Version VersionRange } + +instance Newtype SpecVersion (Either Version VersionRange) where + pack = SpecVersion + unpack = getSpecVersion + +instance Parsec SpecVersion where + parsec = pack <$> parsecSpecVersion + where + parsecSpecVersion = Left <$> parsec <|> Right <$> range + range = do + vr <- parsec + if specVersionFromRange vr >= mkVersion [2,1] + then fail "cabal-version higher than 2.2 cannot be specified as a range. See https://github.com/haskell/cabal/issues/4899" + else return vr + +instance Pretty SpecVersion where + pretty = either pretty pretty . unpack + +specVersionFromRange :: VersionRange -> Version +specVersionFromRange versionRange = case asVersionIntervals versionRange of + [] -> mkVersion [0] + ((LowerBound version _, _):_) -> version + +-- | SPDX License expression or legacy license +newtype SpecLicense = SpecLicense { getSpecLicense :: Either SPDX.License License } + +instance Newtype SpecLicense (Either SPDX.License License) where + pack = SpecLicense + unpack = getSpecLicense + +instance Parsec SpecLicense where + parsec = do + v <- askCabalSpecVersion + if v >= CabalSpecV2_2 + then SpecLicense . Left <$> parsec + else SpecLicense . Right <$> parsec + +instance Pretty SpecLicense where + pretty = either pretty pretty . unpack + +-- | Version range or just version +newtype TestedWith = TestedWith { getTestedWith :: (CompilerFlavor, VersionRange) } + +instance Newtype TestedWith (CompilerFlavor, VersionRange) where + pack = TestedWith + unpack = getTestedWith + +instance Parsec TestedWith where + parsec = pack <$> parsecTestedWith + +instance Pretty TestedWith where + pretty x = case unpack x of + (compiler, vr) -> pretty compiler <+> pretty vr + +-- | This is /almost/ @'many' 'Distribution.Compat.P.anyChar'@, but it +-- +-- * trims whitespace from ends of the lines, +-- +-- * converts lines with only single dot into empty line. +-- +newtype FreeText = FreeText { getFreeText :: String } + +instance Newtype FreeText String where + pack = FreeText + unpack = getFreeText + +instance Parsec FreeText where + parsec = pack . dropDotLines <$ P.spaces <*> many P.anyChar + where + -- Example package with dot lines + -- http://hackage.haskell.org/package/copilot-cbmc-0.1/copilot-cbmc.cabal + dropDotLines "." = "." + dropDotLines x = intercalate "\n" . map dotToEmpty . lines $ x + dotToEmpty x | trim' x == "." = "" + dotToEmpty x = trim x + + trim' :: String -> String + trim' = dropWhileEnd (`elem` (" \t" :: String)) + + trim :: String -> String + trim = dropWhile isSpace . dropWhileEnd isSpace + +instance Pretty FreeText where + pretty = showFreeText . unpack + +-- | Filepath are parsed as 'Token'. +newtype FilePathNT = FilePathNT { getFilePathNT :: String } + +instance Newtype FilePathNT String where + pack = FilePathNT + unpack = getFilePathNT + +instance Parsec FilePathNT where + parsec = pack <$> parsecToken + +instance Pretty FilePathNT where + pretty = showFilePath . unpack + +------------------------------------------------------------------------------- +-- Internal +------------------------------------------------------------------------------- + +parsecTestedWith :: CabalParsing m => m (CompilerFlavor, VersionRange) +parsecTestedWith = do + name <- lexemeParsec + ver <- parsec <|> pure anyVersion + return (name, ver) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/ParseResult.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/ParseResult.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/ParseResult.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/ParseResult.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,184 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +-- | A parse result type for parsers from AST to Haskell types. +module Distribution.Parsec.ParseResult ( + ParseResult, + runParseResult, + recoverWith, + parseWarning, + parseWarnings, + parseFailure, + parseFatalFailure, + parseFatalFailure', + getCabalSpecVersion, + setCabalSpecVersion, + readAndParseFile, + parseString + ) where + +import qualified Data.ByteString.Char8 as BS +import Distribution.Compat.Prelude +import Distribution.Parsec.Common + ( PError (..), PWarnType (..), PWarning (..), Position (..), zeroPos + , showPWarning, showPError) +import Distribution.Simple.Utils (die', warn) +import Distribution.Verbosity (Verbosity) +import Distribution.Version (Version) +import Prelude () +import System.Directory (doesFileExist) + +#if MIN_VERSION_base(4,10,0) +import Control.Applicative (Applicative (..)) +#endif + +-- | A monad with failure and accumulating errors and warnings. +newtype ParseResult a = PR + { unPR + :: forall r. PRState + -> (PRState -> r) -- failure, but we were able to recover a new-style spec-version declaration + -> (PRState -> a -> r) -- success + -> r + } + +data PRState = PRState ![PWarning] ![PError] !(Maybe Version) + +emptyPRState :: PRState +emptyPRState = PRState [] [] Nothing + +-- | Destruct a 'ParseResult' into the emitted warnings and either +-- a successful value or +-- list of errors and possibly recovered a spec-version declaration. +runParseResult :: ParseResult a -> ([PWarning], Either (Maybe Version, [PError]) a) +runParseResult pr = unPR pr emptyPRState failure success + where + failure (PRState warns errs v) = (warns, Left (v, errs)) + success (PRState warns [] _) x = (warns, Right x) + -- If there are any errors, don't return the result + success (PRState warns errs v) _ = (warns, Left (v, errs)) + +instance Functor ParseResult where + fmap f (PR pr) = PR $ \ !s failure success -> + pr s failure $ \ !s' a -> + success s' (f a) + {-# INLINE fmap #-} + +instance Applicative ParseResult where + pure x = PR $ \ !s _ success -> success s x + {-# INLINE pure #-} + + f <*> x = PR $ \ !s0 failure success -> + unPR f s0 failure $ \ !s1 f' -> + unPR x s1 failure $ \ !s2 x' -> + success s2 (f' x') + {-# INLINE (<*>) #-} + + x *> y = PR $ \ !s0 failure success -> + unPR x s0 failure $ \ !s1 _ -> + unPR y s1 failure success + {-# INLINE (*>) #-} + + x <* y = PR $ \ !s0 failure success -> + unPR x s0 failure $ \ !s1 x' -> + unPR y s1 failure $ \ !s2 _ -> + success s2 x' + {-# INLINE (<*) #-} + +#if MIN_VERSION_base(4,10,0) + liftA2 f x y = PR $ \ !s0 failure success -> + unPR x s0 failure $ \ !s1 x' -> + unPR y s1 failure $ \ !s2 y' -> + success s2 (f x' y') + {-# INLINE liftA2 #-} +#endif + +instance Monad ParseResult where + return = pure + (>>) = (*>) + + m >>= k = PR $ \ !s failure success -> + unPR m s failure $ \ !s' a -> + unPR (k a) s' failure success + {-# INLINE (>>=) #-} + +-- | "Recover" the parse result, so we can proceed parsing. +-- 'runParseResult' will still result in 'Nothing', if there are recorded errors. +recoverWith :: ParseResult a -> a -> ParseResult a +recoverWith (PR pr) x = PR $ \ !s _failure success -> + pr s (\ !s' -> success s' x) success + +-- | Set cabal spec version. +setCabalSpecVersion :: Maybe Version -> ParseResult () +setCabalSpecVersion v = PR $ \(PRState warns errs _) _failure success -> + success (PRState warns errs v) () + +-- | Get cabal spec version. +getCabalSpecVersion :: ParseResult (Maybe Version) +getCabalSpecVersion = PR $ \s@(PRState _ _ v) _failure success -> + success s v + +-- | Add a warning. This doesn't fail the parsing process. +parseWarning :: Position -> PWarnType -> String -> ParseResult () +parseWarning pos t msg = PR $ \(PRState warns errs v) _failure success -> + success (PRState (PWarning t pos msg : warns) errs v) () + +-- | Add multiple warnings at once. +parseWarnings :: [PWarning] -> ParseResult () +parseWarnings newWarns = PR $ \(PRState warns errs v) _failure success -> + success (PRState (newWarns ++ warns) errs v) () + +-- | Add an error, but not fail the parser yet. +-- +-- For fatal failure use 'parseFatalFailure' +parseFailure :: Position -> String -> ParseResult () +parseFailure pos msg = PR $ \(PRState warns errs v) _failure success -> + success (PRState warns (PError pos msg : errs) v) () + +-- | Add an fatal error. +parseFatalFailure :: Position -> String -> ParseResult a +parseFatalFailure pos msg = PR $ \(PRState warns errs v) failure _success -> + failure (PRState warns (PError pos msg : errs) v) + +-- | A 'mzero'. +parseFatalFailure' :: ParseResult a +parseFatalFailure' = PR pr + where + pr (PRState warns [] v) failure _success = failure (PRState warns [err] v) + pr s failure _success = failure s + + err = PError zeroPos "Unknown fatal error" + +-- | Helper combinator to do parsing plumbing for files. +-- +-- Given a parser and a filename, return the parse of the file, +-- after checking if the file exists. +-- +-- Argument order is chosen to encourage partial application. +readAndParseFile + :: (BS.ByteString -> ParseResult a) -- ^ File contents to final value parser + -> Verbosity -- ^ Verbosity level + -> FilePath -- ^ File to read + -> IO a +readAndParseFile parser verbosity fpath = do + exists <- doesFileExist fpath + unless exists $ + die' verbosity $ + "Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue." + bs <- BS.readFile fpath + parseString parser verbosity fpath bs + +parseString + :: (BS.ByteString -> ParseResult a) -- ^ File contents to final value parser + -> Verbosity -- ^ Verbosity level + -> String -- ^ File name + -> BS.ByteString + -> IO a +parseString parser verbosity name bs = do + let (warnings, result) = runParseResult (parser bs) + traverse_ (warn verbosity . showPWarning name) warnings + case result of + Right x -> return x + Left (_, errors) -> do + traverse_ (warn verbosity . showPError name) errors + die' verbosity $ "Failed parsing \"" ++ name ++ "\"." diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Parser.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Parser.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Parsec/Parser.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Parsec/Parser.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,378 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PatternGuards #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Parsec.Parser +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +module Distribution.Parsec.Parser ( + -- * Types + Field(..), + Name(..), + FieldLine(..), + SectionArg(..), + -- * Grammar and parsing + -- $grammar + readFields, + readFields', +#ifdef CABAL_PARSEC_DEBUG + -- * Internal + parseFile, + parseStr, + parseBS, +#endif + ) where + +import Control.Monad (guard) +import qualified Data.ByteString.Char8 as B8 +import Data.Functor.Identity +import Distribution.Compat.Prelude +import Distribution.Parsec.Common +import Distribution.Parsec.Field +import Distribution.Parsec.Lexer +import Distribution.Parsec.LexerMonad + (LexResult (..), LexState (..), LexWarning (..), unLex) +import Prelude () +import Text.Parsec.Combinator hiding (eof, notFollowedBy) +import Text.Parsec.Error +import Text.Parsec.Pos +import Text.Parsec.Prim hiding (many, (<|>)) + +#ifdef CABAL_PARSEC_DEBUG +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Encoding.Error as T +#endif + +-- | The 'LexState'' (with a prime) is an instance of parsec's 'Stream' +-- wrapped around lexer's 'LexState' (without a prime) +data LexState' = LexState' !LexState (LToken, LexState') + +mkLexState' :: LexState -> LexState' +mkLexState' st = LexState' st + (case unLex lexToken st of LexResult st' tok -> (tok, mkLexState' st')) + +type Parser a = ParsecT LexState' () Identity a + +instance Stream LexState' Identity LToken where + uncons (LexState' _ (tok, st')) = + case tok of + L _ EOF -> return Nothing + _ -> return (Just (tok, st')) + +-- | Get lexer warnings accumulated so far +getLexerWarnings :: Parser [LexWarning] +getLexerWarnings = do + LexState' (LexState { warnings = ws }) _ <- getInput + return ws + +-- | Set Alex code i.e. the mode "state" lexer is in. +setLexerMode :: Int -> Parser () +setLexerMode code = do + LexState' ls _ <- getInput + setInput $! mkLexState' ls { curCode = code } + +getToken :: (Token -> Maybe a) -> Parser a +getToken getTok = getTokenWithPos (\(L _ t) -> getTok t) + +getTokenWithPos :: (LToken -> Maybe a) -> Parser a +getTokenWithPos getTok = tokenPrim (\(L _ t) -> describeToken t) updatePos getTok + where + updatePos :: SourcePos -> LToken -> LexState' -> SourcePos + updatePos pos (L (Position col line) _) _ = newPos (sourceName pos) col line + +describeToken :: Token -> String +describeToken t = case t of + TokSym s -> "symbol " ++ show s + TokStr s -> "string " ++ show s + TokOther s -> "operator " ++ show s + Indent _ -> "new line" + TokFieldLine _ -> "field content" + Colon -> "\":\"" + OpenBrace -> "\"{\"" + CloseBrace -> "\"}\"" +-- SemiColon -> "\";\"" + EOF -> "end of file" + LexicalError is -> "character in input " ++ show (B8.head is) + +tokSym :: Parser (Name Position) +tokSym', tokStr, tokOther :: Parser (SectionArg Position) +tokIndent :: Parser Int +tokColon, tokOpenBrace, tokCloseBrace :: Parser () +tokFieldLine :: Parser (FieldLine Position) + +tokSym = getTokenWithPos $ \t -> case t of L pos (TokSym x) -> Just (mkName pos x); _ -> Nothing +tokSym' = getTokenWithPos $ \t -> case t of L pos (TokSym x) -> Just (SecArgName pos x); _ -> Nothing +tokStr = getTokenWithPos $ \t -> case t of L pos (TokStr x) -> Just (SecArgStr pos x); _ -> Nothing +tokOther = getTokenWithPos $ \t -> case t of L pos (TokOther x) -> Just (SecArgOther pos x); _ -> Nothing +tokIndent = getToken $ \t -> case t of Indent x -> Just x; _ -> Nothing +tokColon = getToken $ \t -> case t of Colon -> Just (); _ -> Nothing +tokOpenBrace = getToken $ \t -> case t of OpenBrace -> Just (); _ -> Nothing +tokCloseBrace = getToken $ \t -> case t of CloseBrace -> Just (); _ -> Nothing +tokFieldLine = getTokenWithPos $ \t -> case t of L pos (TokFieldLine s) -> Just (FieldLine pos s); _ -> Nothing + +colon, openBrace, closeBrace :: Parser () + +sectionArg :: Parser (SectionArg Position) +sectionArg = tokSym' <|> tokStr <|> tokOther "section parameter" + +fieldSecName :: Parser (Name Position) +fieldSecName = tokSym "field or section name" + +colon = tokColon "\":\"" +openBrace = tokOpenBrace "\"{\"" +closeBrace = tokCloseBrace "\"}\"" + +fieldContent :: Parser (FieldLine Position) +fieldContent = tokFieldLine "field contents" + +newtype IndentLevel = IndentLevel Int + +zeroIndentLevel :: IndentLevel +zeroIndentLevel = IndentLevel 0 + +incIndentLevel :: IndentLevel -> IndentLevel +incIndentLevel (IndentLevel i) = IndentLevel (succ i) + +indentOfAtLeast :: IndentLevel -> Parser IndentLevel +indentOfAtLeast (IndentLevel i) = try $ do + j <- tokIndent + guard (j >= i) "indentation of at least " ++ show i + return (IndentLevel j) + + +newtype LexerMode = LexerMode Int + +inLexerMode :: LexerMode -> Parser p -> Parser p +inLexerMode (LexerMode mode) p = + do setLexerMode mode; x <- p; setLexerMode in_section; return x + + +----------------------- +-- Cabal file grammar +-- + +-- $grammar +-- +-- @ +-- CabalStyleFile ::= SecElems +-- +-- SecElems ::= SecElem* '\n'? +-- SecElem ::= '\n' SecElemLayout | SecElemBraces +-- SecElemLayout ::= FieldLayout | FieldBraces | SectionLayout | SectionBraces +-- SecElemBraces ::= FieldInline | FieldBraces | SectionBraces +-- FieldLayout ::= name ':' line? ('\n' line)* +-- FieldBraces ::= name ':' '\n'? '{' content '}' +-- FieldInline ::= name ':' content +-- SectionLayout ::= name arg* SecElems +-- SectionBraces ::= name arg* '\n'? '{' SecElems '}' +-- @ +-- +-- and the same thing but left factored... +-- +-- @ +-- SecElems ::= SecElem* +-- SecElem ::= '\n' name SecElemLayout +-- | name SecElemBraces +-- SecElemLayout ::= ':' FieldLayoutOrBraces +-- | arg* SectionLayoutOrBraces +-- FieldLayoutOrBraces ::= '\n'? '{' content '}' +-- | line? ('\n' line)* +-- SectionLayoutOrBraces ::= '\n'? '{' SecElems '\n'? '}' +-- | SecElems +-- SecElemBraces ::= ':' FieldInlineOrBraces +-- | arg* '\n'? '{' SecElems '\n'? '}' +-- FieldInlineOrBraces ::= '\n'? '{' content '}' +-- | content +-- @ +-- +-- Note how we have several productions with the sequence: +-- +-- > '\n'? '{' +-- +-- That is, an optional newline (and indent) followed by a @{@ token. +-- In the @SectionLayoutOrBraces@ case you can see that this makes it +-- not fully left factored (because @SecElems@ can start with a @\n@). +-- Fully left factoring here would be ugly, and though we could use a +-- lookahead of two tokens to resolve the alternatives, we can't +-- conveniently use Parsec's 'try' here to get a lookahead of only two. +-- So instead we deal with this case in the lexer by making a line +-- where the first non-space is @{@ lex as just the @{@ token, without +-- the usual indent token. Then in the parser we can resolve everything +-- with just one token of lookahead and so without using 'try'. + +-- Top level of a file using cabal syntax +-- +cabalStyleFile :: Parser [Field Position] +cabalStyleFile = do es <- elements zeroIndentLevel + eof + return es + +-- Elements that live at the top level or inside a section, ie fields +-- and sectionscontent +-- +-- elements ::= element* +elements :: IndentLevel -> Parser [Field Position] +elements ilevel = many (element ilevel) + +-- An individual element, ie a field or a section. These can either use +-- layout style or braces style. For layout style then it must start on +-- a line on its own (so that we know its indentation level). +-- +-- element ::= '\n' name elementInLayoutContext +-- | name elementInNonLayoutContext +element :: IndentLevel -> Parser (Field Position) +element ilevel = + (do ilevel' <- indentOfAtLeast ilevel + name <- fieldSecName + elementInLayoutContext (incIndentLevel ilevel') name) + <|> (do name <- fieldSecName + elementInNonLayoutContext name) + +-- An element (field or section) that is valid in a layout context. +-- In a layout context we can have fields and sections that themselves +-- either use layout style or that use braces style. +-- +-- elementInLayoutContext ::= ':' fieldLayoutOrBraces +-- | arg* sectionLayoutOrBraces +elementInLayoutContext :: IndentLevel -> Name Position -> Parser (Field Position) +elementInLayoutContext ilevel name = + (do colon; fieldLayoutOrBraces ilevel name) + <|> (do args <- many sectionArg + elems <- sectionLayoutOrBraces ilevel + return (Section name args elems)) + +-- An element (field or section) that is valid in a non-layout context. +-- In a non-layout context we can have only have fields and sections that +-- themselves use braces style, or inline style fields. +-- +-- elementInNonLayoutContext ::= ':' FieldInlineOrBraces +-- | arg* '\n'? '{' elements '\n'? '}' +elementInNonLayoutContext :: Name Position -> Parser (Field Position) +elementInNonLayoutContext name = + (do colon; fieldInlineOrBraces name) + <|> (do args <- many sectionArg + openBrace + elems <- elements zeroIndentLevel + optional tokIndent + closeBrace + return (Section name args elems)) + +-- The body of a field, using either layout style or braces style. +-- +-- fieldLayoutOrBraces ::= '\n'? '{' content '}' +-- | line? ('\n' line)* +fieldLayoutOrBraces :: IndentLevel -> Name Position -> Parser (Field Position) +fieldLayoutOrBraces ilevel name = braces <|> fieldLayout + where + braces = do + openBrace + ls <- inLexerMode (LexerMode in_field_braces) (many fieldContent) + closeBrace + return (Field name ls) + fieldLayout = inLexerMode (LexerMode in_field_layout) $ do + l <- optionMaybe fieldContent + ls <- many (do _ <- indentOfAtLeast ilevel; fieldContent) + return $ case l of + Nothing -> Field name ls + Just l' -> Field name (l' : ls) + +-- The body of a section, using either layout style or braces style. +-- +-- sectionLayoutOrBraces ::= '\n'? '{' elements \n? '}' +-- | elements +sectionLayoutOrBraces :: IndentLevel -> Parser [Field Position] +sectionLayoutOrBraces ilevel = + (do openBrace + elems <- elements zeroIndentLevel + optional tokIndent + closeBrace + return elems) + <|> (elements ilevel) + +-- The body of a field, using either inline style or braces. +-- +-- fieldInlineOrBraces ::= '\n'? '{' content '}' +-- | content +fieldInlineOrBraces :: Name Position -> Parser (Field Position) +fieldInlineOrBraces name = + (do openBrace + ls <- inLexerMode (LexerMode in_field_braces) (many fieldContent) + closeBrace + return (Field name ls)) + <|> (do ls <- inLexerMode (LexerMode in_field_braces) (option [] (fmap (\l -> [l]) fieldContent)) + return (Field name ls)) + + +-- | Parse cabal style 'B8.ByteString' into list of 'Field's, i.e. the cabal AST. +readFields :: B8.ByteString -> Either ParseError [Field Position] +readFields s = fmap fst (readFields' s) + +-- | Like 'readFields' but also return lexer warnings +readFields' :: B8.ByteString -> Either ParseError ([Field Position], [LexWarning]) +readFields' s = do + parse parser "the input" lexSt + where + parser = do + fields <- cabalStyleFile + ws <- getLexerWarnings + pure (fields, ws) + + lexSt = mkLexState' (mkLexState s) + +#ifdef CABAL_PARSEC_DEBUG +parseTest' :: Show a => Parsec LexState' () a -> SourceName -> B8.ByteString -> IO () +parseTest' p fname s = + case parse p fname (lexSt s) of + Left err -> putStrLn (formatError s err) + + Right x -> print x + where + lexSt = mkLexState' . mkLexState + +parseFile :: Show a => Parser a -> FilePath -> IO () +parseFile p f = B8.readFile f >>= \s -> parseTest' p f s + +parseStr :: Show a => Parser a -> String -> IO () +parseStr p = parseBS p . B8.pack + +parseBS :: Show a => Parser a -> B8.ByteString -> IO () +parseBS p = parseTest' p "" + +formatError :: B8.ByteString -> ParseError -> String +formatError input perr = + unlines + [ "Parse error "++ show (errorPos perr) ++ ":" + , errLine + , indicator ++ errmsg ] + where + pos = errorPos perr + ls = lines' (T.decodeUtf8With T.lenientDecode input) + errLine = T.unpack (ls !! (sourceLine pos - 1)) + indicator = replicate (sourceColumn pos) ' ' ++ "^" + errmsg = showErrorMessages "or" "unknown parse error" + "expecting" "unexpected" "end of file" + (errorMessages perr) + +-- | Handles windows/osx/unix line breaks uniformly +lines' :: T.Text -> [T.Text] +lines' s1 + | T.null s1 = [] + | otherwise = case T.break (\c -> c == '\r' || c == '\n') s1 of + (l, s2) | Just (c,s3) <- T.uncons s2 + -> case T.uncons s3 of + Just ('\n', s4) | c == '\r' -> l : lines' s4 + _ -> l : lines' s3 + | otherwise -> [l] +#endif + +eof :: Parser () +eof = notFollowedBy anyToken "end of file" + where + notFollowedBy :: Parser LToken -> Parser () + notFollowedBy p = try ( (do L _ t <- try p; unexpected (describeToken t)) + <|> return ()) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/ParseUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/ParseUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/ParseUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/ParseUtils.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,715 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.ParseUtils +-- Copyright : (c) The University of Glasgow 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Utilities for parsing 'PackageDescription' and 'InstalledPackageInfo'. +-- +-- The @.cabal@ file format is not trivial, especially with the introduction +-- of configurations and the section syntax that goes with that. This module +-- has a bunch of parsing functions that is used by the @.cabal@ parser and a +-- couple others. It has the parsing framework code and also little parsers for +-- many of the formats we get in various @.cabal@ file fields, like module +-- names, comma separated lists etc. + +-- This module is meant to be local-only to Distribution... + +{-# OPTIONS_HADDOCK hide #-} +{-# LANGUAGE Rank2Types #-} +module Distribution.ParseUtils ( + LineNo, PError(..), PWarning(..), locatedErrorMsg, syntaxError, warning, + runP, runE, ParseResult(..), catchParseError, parseFail, showPWarning, + Field(..), fName, lineNo, + FieldDescr(..), ppField, ppFields, readFields, readFieldsFlat, + showFields, showSingleNamedField, showSimpleSingleNamedField, + parseFields, parseFieldsFlat, + parseFilePathQ, parseTokenQ, parseTokenQ', + parseModuleNameQ, + parseOptVersion, parsePackageName, + parseTestedWithQ, parseLicenseQ, parseLanguageQ, parseExtensionQ, + parseSepList, parseCommaList, parseOptCommaList, + showFilePath, showToken, showTestedWith, showFreeText, parseFreeText, + field, simpleField, listField, listFieldWithSep, spaceListField, + commaListField, commaListFieldWithSep, commaNewLineListField, + optsField, liftField, boolField, parseQuoted, parseMaybeQuoted, indentWith, + readPToMaybe, + + UnrecFieldParser, warnUnrec, ignoreUnrec, + ) where + +import Prelude () +import Distribution.Compat.Prelude hiding (get) + +import Distribution.Compiler +import Distribution.License +import Distribution.Version +import Distribution.ModuleName +import qualified Distribution.Compat.MonadFail as Fail +import Distribution.Compat.ReadP as ReadP hiding (get) +import Distribution.ReadE +import Distribution.Compat.Newtype +import Distribution.Parsec.Newtypes (TestedWith (..)) +import Distribution.Text +import Distribution.Utils.Generic +import Distribution.Pretty +import Language.Haskell.Extension + +import Text.PrettyPrint + ( Doc, render, style, renderStyle + , text, colon, nest, punctuate, comma, sep + , fsep, hsep, isEmpty, vcat, mode, Mode (..) + , ($+$), (<+>) + ) +import Data.Tree as Tree (Tree(..), flatten) +import qualified Data.Map as Map +import System.FilePath (normalise) + +-- ----------------------------------------------------------------------------- + +type LineNo = Int + +data PError = AmbiguousParse String LineNo + | NoParse String LineNo + | TabsError LineNo + | FromString String (Maybe LineNo) + deriving (Eq, Show) + +data PWarning = PWarning String + | UTFWarning LineNo String + deriving (Eq, Show) + +showPWarning :: FilePath -> PWarning -> String +showPWarning fpath (PWarning msg) = + normalise fpath ++ ": " ++ msg +showPWarning fpath (UTFWarning line fname) = + normalise fpath ++ ":" ++ show line + ++ ": Invalid UTF-8 text in the '" ++ fname ++ "' field." + +data ParseResult a = ParseFailed PError | ParseOk [PWarning] a + deriving Show + +instance Functor ParseResult where + fmap _ (ParseFailed err) = ParseFailed err + fmap f (ParseOk ws x) = ParseOk ws $ f x + +instance Applicative ParseResult where + pure = ParseOk [] + (<*>) = ap + + +instance Monad ParseResult where + return = pure + ParseFailed err >>= _ = ParseFailed err + ParseOk ws x >>= f = case f x of + ParseFailed err -> ParseFailed err + ParseOk ws' x' -> ParseOk (ws'++ws) x' + fail = Fail.fail + +instance Fail.MonadFail ParseResult where + fail s = ParseFailed (FromString s Nothing) + +catchParseError :: ParseResult a -> (PError -> ParseResult a) + -> ParseResult a +p@(ParseOk _ _) `catchParseError` _ = p +ParseFailed e `catchParseError` k = k e + +parseFail :: PError -> ParseResult a +parseFail = ParseFailed + +runP :: LineNo -> String -> ReadP a a -> String -> ParseResult a +runP line fieldname p s = + case [ x | (x,"") <- results ] of + [a] -> ParseOk (utf8Warnings line fieldname s) a + --TODO: what is this double parse thing all about? + -- Can't we just do the all isSpace test the first time? + [] -> case [ x | (x,ys) <- results, all isSpace ys ] of + [a] -> ParseOk (utf8Warnings line fieldname s) a + [] -> ParseFailed (NoParse fieldname line) + _ -> ParseFailed (AmbiguousParse fieldname line) + _ -> ParseFailed (AmbiguousParse fieldname line) + where results = readP_to_S p s + +runE :: LineNo -> String -> ReadE a -> String -> ParseResult a +runE line fieldname p s = + case runReadE p s of + Right a -> ParseOk (utf8Warnings line fieldname s) a + Left e -> syntaxError line $ + "Parse of field '" ++ fieldname ++ "' failed (" ++ e ++ "): " ++ s + +utf8Warnings :: LineNo -> String -> String -> [PWarning] +utf8Warnings line fieldname s = + take 1 [ UTFWarning n fieldname + | (n,l) <- zip [line..] (lines s) + , '\xfffd' `elem` l ] + +locatedErrorMsg :: PError -> (Maybe LineNo, String) +locatedErrorMsg (AmbiguousParse f n) = (Just n, + "Ambiguous parse in field '"++f++"'.") +locatedErrorMsg (NoParse f n) = (Just n, + "Parse of field '"++f++"' failed.") +locatedErrorMsg (TabsError n) = (Just n, "Tab used as indentation.") +locatedErrorMsg (FromString s n) = (n, s) + +syntaxError :: LineNo -> String -> ParseResult a +syntaxError n s = ParseFailed $ FromString s (Just n) + +tabsError :: LineNo -> ParseResult a +tabsError ln = ParseFailed $ TabsError ln + +warning :: String -> ParseResult () +warning s = ParseOk [PWarning s] () + +-- | Field descriptor. The parameter @a@ parameterizes over where the field's +-- value is stored in. +data FieldDescr a + = FieldDescr + { fieldName :: String + , fieldGet :: a -> Doc + , fieldSet :: LineNo -> String -> a -> ParseResult a + -- ^ @fieldSet n str x@ Parses the field value from the given input + -- string @str@ and stores the result in @x@ if the parse was + -- successful. Otherwise, reports an error on line number @n@. + } + +field :: String -> (a -> Doc) -> ReadP a a -> FieldDescr a +field name showF readF = + FieldDescr name showF (\line val _st -> runP line name readF val) + +-- Lift a field descriptor storing into an 'a' to a field descriptor storing +-- into a 'b'. +liftField :: (b -> a) -> (a -> b -> b) -> FieldDescr a -> FieldDescr b +liftField get set (FieldDescr name showF parseF) + = FieldDescr name (showF . get) + (\line str b -> do + a <- parseF line str (get b) + return (set a b)) + +-- Parser combinator for simple fields. Takes a field name, a pretty printer, +-- a parser function, an accessor, and a setter, returns a FieldDescr over the +-- compoid structure. +simpleField :: String -> (a -> Doc) -> ReadP a a + -> (b -> a) -> (a -> b -> b) -> FieldDescr b +simpleField name showF readF get set + = liftField get set $ field name showF readF + +commaListFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +commaListFieldWithSep separator name showF readF get set = + liftField get set' $ + field name showF' (parseCommaList readF) + where + set' xs b = set (get b ++ xs) b + showF' = separator . punctuate comma . map showF + +commaListField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +commaListField = commaListFieldWithSep fsep + +commaNewLineListField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +commaNewLineListField = commaListFieldWithSep sep + +spaceListField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +spaceListField name showF readF get set = + liftField get set' $ + field name showF' (parseSpaceList readF) + where + set' xs b = set (get b ++ xs) b + showF' = fsep . map showF + +listFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +listFieldWithSep separator name showF readF get set = + liftField get set' $ + field name showF' (parseOptCommaList readF) + where + set' xs b = set (get b ++ xs) b + showF' = separator . map showF + +listField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +listField = listFieldWithSep fsep + +optsField :: String -> CompilerFlavor -> (b -> [(CompilerFlavor,[String])]) + -> ([(CompilerFlavor,[String])] -> b -> b) -> FieldDescr b +optsField name flavor get set = + liftField (fromMaybe [] . lookup flavor . get) + (\opts b -> set (reorder (update flavor opts (get b))) b) $ + field name showF (sepBy parseTokenQ' (munch1 isSpace)) + where + update _ opts l | all null opts = l --empty opts as if no opts + update f opts [] = [(f,opts)] + update f opts ((f',opts'):rest) + | f == f' = (f, opts' ++ opts) : rest + | otherwise = (f',opts') : update f opts rest + reorder = sortBy (comparing fst) + showF = hsep . map text + +-- TODO: this is a bit smelly hack. It's because we want to parse bool fields +-- liberally but not accept new parses. We cannot do that with ReadP +-- because it does not support warnings. We need a new parser framework! +boolField :: String -> (b -> Bool) -> (Bool -> b -> b) -> FieldDescr b +boolField name get set = liftField get set (FieldDescr name showF readF) + where + showF = text . show + readF line str _ + | str == "True" = ParseOk [] True + | str == "False" = ParseOk [] False + | lstr == "true" = ParseOk [caseWarning] True + | lstr == "false" = ParseOk [caseWarning] False + | otherwise = ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'." + +ppFields :: [FieldDescr a] -> a -> Doc +ppFields fields x = + vcat [ ppField name (getter x) | FieldDescr name getter _ <- fields ] + +ppField :: String -> Doc -> Doc +ppField name fielddoc + | isEmpty fielddoc = mempty + | name `elem` nestedFields = text name <<>> colon $+$ nest indentWith fielddoc + | otherwise = text name <<>> colon <+> fielddoc + where + nestedFields = + [ "description" + , "build-depends" + , "data-files" + , "extra-source-files" + , "extra-tmp-files" + , "exposed-modules" + , "asm-sources" + , "cmm-sources" + , "c-sources" + , "js-sources" + , "extra-libraries" + , "includes" + , "install-includes" + , "other-modules" + , "autogen-modules" + , "depends" + ] + +showFields :: [FieldDescr a] -> a -> String +showFields fields = render . ($+$ text "") . ppFields fields + +showSingleNamedField :: [FieldDescr a] -> String -> Maybe (a -> String) +showSingleNamedField fields f = + case [ get | (FieldDescr f' get _) <- fields, f' == f ] of + [] -> Nothing + (get:_) -> Just (render . ppField f . get) + +showSimpleSingleNamedField :: [FieldDescr a] -> String -> Maybe (a -> String) +showSimpleSingleNamedField fields f = + case [ get | (FieldDescr f' get _) <- fields, f' == f ] of + [] -> Nothing + (get:_) -> Just (renderStyle myStyle . get) + where myStyle = style { mode = LeftMode } + +parseFields :: [FieldDescr a] -> a -> String -> ParseResult a +parseFields fields initial str = + readFields str >>= accumFields fields initial + +parseFieldsFlat :: [FieldDescr a] -> a -> String -> ParseResult a +parseFieldsFlat fields initial str = + readFieldsFlat str >>= accumFields fields initial + +accumFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a +accumFields fields = foldM setField + where + fieldMap = Map.fromList + [ (name, f) | f@(FieldDescr name _ _) <- fields ] + setField accum (F line name value) = case Map.lookup name fieldMap of + Just (FieldDescr _ _ set) -> set line value accum + Nothing -> do + warning ("Unrecognized field " ++ name ++ " on line " ++ show line) + return accum + setField accum f = do + warning ("Unrecognized stanza on line " ++ show (lineNo f)) + return accum + +-- | The type of a function which, given a name-value pair of an +-- unrecognized field, and the current structure being built, +-- decides whether to incorporate the unrecognized field +-- (by returning Just x, where x is a possibly modified version +-- of the structure being built), or not (by returning Nothing). +type UnrecFieldParser a = (String,String) -> a -> Maybe a + +-- | A default unrecognized field parser which simply returns Nothing, +-- i.e. ignores all unrecognized fields, so warnings will be generated. +warnUnrec :: UnrecFieldParser a +warnUnrec _ _ = Nothing + +-- | A default unrecognized field parser which silently (i.e. no +-- warnings will be generated) ignores unrecognized fields, by +-- returning the structure being built unmodified. +ignoreUnrec :: UnrecFieldParser a +ignoreUnrec _ = Just + +------------------------------------------------------------------------------ + +-- The data type for our three syntactic categories +data Field + = F LineNo String String + -- ^ A regular @: @ field + | Section LineNo String String [Field] + -- ^ A section with a name and possible parameter. The syntactic + -- structure is: + -- + -- @ + -- { + -- * + -- } + -- @ + | IfBlock LineNo String [Field] [Field] + -- ^ A conditional block with an optional else branch: + -- + -- @ + -- if { + -- * + -- } else { + -- * + -- } + -- @ + deriving (Show + ,Eq) -- for testing + +lineNo :: Field -> LineNo +lineNo (F n _ _) = n +lineNo (Section n _ _ _) = n +lineNo (IfBlock n _ _ _) = n + +fName :: Field -> String +fName (F _ n _) = n +fName (Section _ n _ _) = n +fName _ = error "fname: not a field or section" + +readFields :: String -> ParseResult [Field] +readFields input = ifelse + =<< traverse (mkField 0) + =<< mkTree tokens + + where ls = (lines . normaliseLineEndings) input + tokens = (concatMap tokeniseLine . trimLines) ls + +readFieldsFlat :: String -> ParseResult [Field] +readFieldsFlat input = traverse (mkField 0) + =<< mkTree tokens + where ls = (lines . normaliseLineEndings) input + tokens = (concatMap tokeniseLineFlat . trimLines) ls + +-- attach line number and determine indentation +trimLines :: [String] -> [(LineNo, Indent, HasTabs, String)] +trimLines ls = [ (lineno, indent, hastabs, trimTrailing l') + | (lineno, l) <- zip [1..] ls + , let (sps, l') = span isSpace l + indent = length sps + hastabs = '\t' `elem` sps + , validLine l' ] + where validLine ('-':'-':_) = False -- Comment + validLine [] = False -- blank line + validLine _ = True + +-- | We parse generically based on indent level and braces '{' '}'. To do that +-- we split into lines and then '{' '}' tokens and other spans within a line. +data Token = + -- | The 'Line' token is for bits that /start/ a line, eg: + -- + -- > "\n blah blah { blah" + -- + -- tokenises to: + -- + -- > [Line n 2 False "blah blah", OpenBracket, Span n "blah"] + -- + -- so lines are the only ones that can have nested layout, since they + -- have a known indentation level. + -- + -- eg: we can't have this: + -- + -- > if ... { + -- > } else + -- > other + -- + -- because other cannot nest under else, since else doesn't start a line + -- so cannot have nested layout. It'd have to be: + -- + -- > if ... { + -- > } + -- > else + -- > other + -- + -- but that's not so common, people would normally use layout or + -- brackets not both in a single @if else@ construct. + -- + -- > if ... { foo : bar } + -- > else + -- > other + -- + -- this is OK + Line LineNo Indent HasTabs String + | Span LineNo String -- ^ span in a line, following brackets + | OpenBracket LineNo | CloseBracket LineNo + +type Indent = Int +type HasTabs = Bool + +-- | Tokenise a single line, splitting on '{' '}' and the spans in between. +-- Also trims leading & trailing space on those spans within the line. +tokeniseLine :: (LineNo, Indent, HasTabs, String) -> [Token] +tokeniseLine (n0, i, t, l) = case split n0 l of + (Span _ l':ss) -> Line n0 i t l' :ss + cs -> cs + where split _ "" = [] + split n s = case span (\c -> c /='}' && c /= '{') s of + ("", '{' : s') -> OpenBracket n : split n s' + (w , '{' : s') -> mkspan n w (OpenBracket n : split n s') + ("", '}' : s') -> CloseBracket n : split n s' + (w , '}' : s') -> mkspan n w (CloseBracket n : split n s') + (w , _) -> mkspan n w [] + + mkspan n s ss | null s' = ss + | otherwise = Span n s' : ss + where s' = trimTrailing (trimLeading s) + +tokeniseLineFlat :: (LineNo, Indent, HasTabs, String) -> [Token] +tokeniseLineFlat (n0, i, t, l) + | null l' = [] + | otherwise = [Line n0 i t l'] + where + l' = trimTrailing (trimLeading l) + +trimLeading, trimTrailing :: String -> String +trimLeading = dropWhile isSpace +trimTrailing = dropWhileEndLE isSpace + + +type SyntaxTree = Tree (LineNo, HasTabs, String) + +-- | Parse the stream of tokens into a tree of them, based on indent \/ layout +mkTree :: [Token] -> ParseResult [SyntaxTree] +mkTree toks = + layout 0 [] toks >>= \(trees, trailing) -> case trailing of + [] -> return trees + OpenBracket n:_ -> syntaxError n "mismatched brackets, unexpected {" + CloseBracket n:_ -> syntaxError n "mismatched brackets, unexpected }" + -- the following two should never happen: + Span n l :_ -> syntaxError n $ "unexpected span: " ++ show l + Line n _ _ l :_ -> syntaxError n $ "unexpected line: " ++ show l + + +-- | Parse the stream of tokens into a tree of them, based on indent +-- This parse state expect to be in a layout context, though possibly +-- nested within a braces context so we may still encounter closing braces. +layout :: Indent -- ^ indent level of the parent\/previous line + -> [SyntaxTree] -- ^ accumulating param, trees in this level + -> [Token] -- ^ remaining tokens + -> ParseResult ([SyntaxTree], [Token]) + -- ^ collected trees on this level and trailing tokens +layout _ a [] = return (reverse a, []) +layout i a (s@(Line _ i' _ _):ss) | i' < i = return (reverse a, s:ss) +layout i a (Line n _ t l:OpenBracket n':ss) = do + (sub, ss') <- braces n' [] ss + layout i (Node (n,t,l) sub:a) ss' + +layout i a (Span n l:OpenBracket n':ss) = do + (sub, ss') <- braces n' [] ss + layout i (Node (n,False,l) sub:a) ss' + +-- look ahead to see if following lines are more indented, giving a sub-tree +layout i a (Line n i' t l:ss) = do + lookahead <- layout (i'+1) [] ss + case lookahead of + ([], _) -> layout i (Node (n,t,l) [] :a) ss + (ts, ss') -> layout i (Node (n,t,l) ts :a) ss' + +layout _ _ ( OpenBracket n :_) = syntaxError n "unexpected '{'" +layout _ a (s@(CloseBracket _):ss) = return (reverse a, s:ss) +layout _ _ ( Span n l : _) = syntaxError n $ "unexpected span: " + ++ show l + +-- | Parse the stream of tokens into a tree of them, based on explicit braces +-- This parse state expects to find a closing bracket. +braces :: LineNo -- ^ line of the '{', used for error messages + -> [SyntaxTree] -- ^ accumulating param, trees in this level + -> [Token] -- ^ remaining tokens + -> ParseResult ([SyntaxTree],[Token]) + -- ^ collected trees on this level and trailing tokens +braces m a (Line n _ t l:OpenBracket n':ss) = do + (sub, ss') <- braces n' [] ss + braces m (Node (n,t,l) sub:a) ss' + +braces m a (Span n l:OpenBracket n':ss) = do + (sub, ss') <- braces n' [] ss + braces m (Node (n,False,l) sub:a) ss' + +braces m a (Line n i t l:ss) = do + lookahead <- layout (i+1) [] ss + case lookahead of + ([], _) -> braces m (Node (n,t,l) [] :a) ss + (ts, ss') -> braces m (Node (n,t,l) ts :a) ss' + +braces m a (Span n l:ss) = braces m (Node (n,False,l) []:a) ss +braces _ a (CloseBracket _:ss) = return (reverse a, ss) +braces n _ [] = syntaxError n $ "opening brace '{'" + ++ "has no matching closing brace '}'" +braces _ _ (OpenBracket n:_) = syntaxError n "unexpected '{'" + +-- | Convert the parse tree into the Field AST +-- Also check for dodgy uses of tabs in indentation. +mkField :: Int -> SyntaxTree -> ParseResult Field +mkField d (Node (n,t,_) _) | d >= 1 && t = tabsError n +mkField d (Node (n,_,l) ts) = case span (\c -> isAlphaNum c || c == '-') l of + ([], _) -> syntaxError n $ "unrecognised field or section: " ++ show l + (name, rest) -> case trimLeading rest of + (':':rest') -> do let followingLines = concatMap Tree.flatten ts + tabs = not (null [()| (_,True,_) <- followingLines ]) + if tabs && d >= 1 + then tabsError n + else return $ F n (map toLower name) + (fieldValue rest' followingLines) + rest' -> do ts' <- traverse (mkField (d+1)) ts + return (Section n (map toLower name) rest' ts') + where fieldValue firstLine followingLines = + let firstLine' = trimLeading firstLine + followingLines' = map (\(_,_,s) -> stripDot s) followingLines + allLines | null firstLine' = followingLines' + | otherwise = firstLine' : followingLines' + in intercalate "\n" allLines + stripDot "." = "" + stripDot s = s + +-- | Convert if/then/else 'Section's to 'IfBlock's +ifelse :: [Field] -> ParseResult [Field] +ifelse [] = return [] +ifelse (Section n "if" cond thenpart + :Section _ "else" as elsepart:fs) + | null cond = syntaxError n "'if' with missing condition" + | null thenpart = syntaxError n "'then' branch of 'if' is empty" + | not (null as) = syntaxError n "'else' takes no arguments" + | null elsepart = syntaxError n "'else' branch of 'if' is empty" + | otherwise = do tp <- ifelse thenpart + ep <- ifelse elsepart + fs' <- ifelse fs + return (IfBlock n cond tp ep:fs') +ifelse (Section n "if" cond thenpart:fs) + | null cond = syntaxError n "'if' with missing condition" + | null thenpart = syntaxError n "'then' branch of 'if' is empty" + | otherwise = do tp <- ifelse thenpart + fs' <- ifelse fs + return (IfBlock n cond tp []:fs') +ifelse (Section n "else" _ _:_) = syntaxError n + "stray 'else' with no preceding 'if'" +ifelse (Section n s a fs':fs) = do fs'' <- ifelse fs' + fs''' <- ifelse fs + return (Section n s a fs'' : fs''') +ifelse (f:fs) = do fs' <- ifelse fs + return (f : fs') + +------------------------------------------------------------------------------ + +-- |parse a module name +parseModuleNameQ :: ReadP r ModuleName +parseModuleNameQ = parseMaybeQuoted parse + +parseFilePathQ :: ReadP r FilePath +parseFilePathQ = parseTokenQ + -- removed until normalise is no longer broken, was: + -- liftM normalise parseTokenQ + +betweenSpaces :: ReadP r a -> ReadP r a +betweenSpaces act = do skipSpaces + res <- act + skipSpaces + return res + +parsePackageName :: ReadP r String +parsePackageName = do + ns <- sepBy1 component (char '-') + return $ intercalate "-" ns + where + component = do + cs <- munch1 isAlphaNum + if all isDigit cs then pfail else return cs + -- each component must contain an alphabetic character, to avoid + -- ambiguity in identifiers like foo-1 (the 1 is the version number). + +parseOptVersion :: ReadP r Version +parseOptVersion = parseMaybeQuoted ver + where ver :: ReadP r Version + ver = parse <++ return nullVersion + +parseTestedWithQ :: ReadP r (CompilerFlavor,VersionRange) +parseTestedWithQ = parseMaybeQuoted tw + where + tw :: ReadP r (CompilerFlavor,VersionRange) + tw = do compiler <- parseCompilerFlavorCompat + version <- betweenSpaces $ parse <++ return anyVersion + return (compiler,version) + +parseLicenseQ :: ReadP r License +parseLicenseQ = parseMaybeQuoted parse + +-- urgh, we can't define optQuotes :: ReadP r a -> ReadP r a +-- because the "compat" version of ReadP isn't quite powerful enough. In +-- particular, the type of <++ is ReadP r r -> ReadP r a -> ReadP r a +-- Hence the trick above to make 'lic' polymorphic. + +parseLanguageQ :: ReadP r Language +parseLanguageQ = parseMaybeQuoted parse + +parseExtensionQ :: ReadP r Extension +parseExtensionQ = parseMaybeQuoted parse + +parseHaskellString :: ReadP r String +parseHaskellString = readS_to_P reads + +parseTokenQ :: ReadP r String +parseTokenQ = parseHaskellString <++ munch1 (\x -> not (isSpace x) && x /= ',') + +parseTokenQ' :: ReadP r String +parseTokenQ' = parseHaskellString <++ munch1 (not . isSpace) + +parseSepList :: ReadP r b + -> ReadP r a -- ^The parser for the stuff between commas + -> ReadP r [a] +parseSepList sepr p = sepBy p separator + where separator = betweenSpaces sepr + +parseSpaceList :: ReadP r a -- ^The parser for the stuff between commas + -> ReadP r [a] +parseSpaceList p = sepBy p skipSpaces + +parseCommaList :: ReadP r a -- ^The parser for the stuff between commas + -> ReadP r [a] +parseCommaList = parseSepList (ReadP.char ',') + +parseOptCommaList :: ReadP r a -- ^The parser for the stuff between commas + -> ReadP r [a] +parseOptCommaList = parseSepList (optional (ReadP.char ',')) + +parseQuoted :: ReadP r a -> ReadP r a +parseQuoted = between (ReadP.char '"') (ReadP.char '"') + +parseMaybeQuoted :: (forall r. ReadP r a) -> ReadP r' a +parseMaybeQuoted p = parseQuoted p <++ p + +parseFreeText :: ReadP.ReadP s String +parseFreeText = ReadP.munch (const True) + +readPToMaybe :: ReadP a a -> String -> Maybe a +readPToMaybe p str = listToMaybe [ r | (r,s) <- readP_to_S p str + , all isSpace s ] + +------------------------------------------------------------------------------- +-- Internal +------------------------------------------------------------------------------- + +showTestedWith :: (CompilerFlavor, VersionRange) -> Doc +showTestedWith = pretty . pack' TestedWith diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Pretty.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Pretty.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Pretty.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Pretty.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,95 @@ +module Distribution.Pretty ( + Pretty (..), + prettyShow, + defaultStyle, + flatStyle, + -- * Utilities + showFilePath, + showToken, + showFreeText, + indentWith, + -- * Deprecated + Separator, + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Data.Functor.Identity (Identity (..)) + +import qualified Text.PrettyPrint as PP + +class Pretty a where + pretty :: a -> PP.Doc + +instance Pretty Bool where + pretty = PP.text . show + +instance Pretty Int where + pretty = PP.text . show + +instance Pretty a => Pretty (Identity a) where + pretty = pretty . runIdentity + +prettyShow :: Pretty a => a -> String +prettyShow = PP.renderStyle defaultStyle . pretty + +-- | The default rendering style used in Cabal for console +-- output. It has a fixed page width and adds line breaks +-- automatically. +defaultStyle :: PP.Style +defaultStyle = PP.Style { PP.mode = PP.PageMode + , PP.lineLength = 79 + , PP.ribbonsPerLine = 1.0 + } + +-- | A style for rendering all on one line. +flatStyle :: PP.Style +flatStyle = PP.Style { PP.mode = PP.LeftMode + , PP.lineLength = err "lineLength" + , PP.ribbonsPerLine = err "ribbonsPerLine" + } + where + err x = error ("flatStyle: tried to access " ++ x ++ " in LeftMode. " ++ + "This should never happen and indicates a bug in Cabal.") + +------------------------------------------------------------------------------- +-- Utilities +------------------------------------------------------------------------------- + +-- TODO: remove when ReadP parser is gone. +type Separator = [PP.Doc] -> PP.Doc + +showFilePath :: FilePath -> PP.Doc +showFilePath = showToken + +showToken :: String -> PP.Doc +showToken str + -- if token looks like a comment (starts with --), print it in quotes + | "--" `isPrefixOf` str = PP.text (show str) + -- also if token ends with a colon (e.g. executable name), print it in quotes + | ":" `isSuffixOf` str = PP.text (show str) + | not (any dodgy str) && not (null str) = PP.text str + | otherwise = PP.text (show str) + where + dodgy c = isSpace c || c == ',' + + +-- | Pretty-print free-format text, ensuring that it is vertically aligned, +-- and with blank lines replaced by dots for correct re-parsing. +showFreeText :: String -> PP.Doc +showFreeText "" = mempty +showFreeText s = PP.vcat [ PP.text (if null l then "." else l) | l <- lines_ s ] + +-- | 'lines_' breaks a string up into a list of strings at newline +-- characters. The resulting strings do not contain newlines. +lines_ :: String -> [String] +lines_ [] = [""] +lines_ s = + let (l, s') = break (== '\n') s + in l : case s' of + [] -> [] + (_:s'') -> lines_ s'' + +-- | the indentation used for pretty printing +indentWith :: Int +indentWith = 4 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PrettyUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PrettyUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/PrettyUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/PrettyUtils.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,23 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.PrettyUtils +-- Copyright : (c) The University of Glasgow 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Utilities for pretty printing. +{-# OPTIONS_HADDOCK hide #-} +module Distribution.PrettyUtils {-# DEPRECATED "Use Distribution.Pretty. This module will be removed in Cabal-3.0 (est. Mar 2019)." #-} ( + Separator, + -- * Internal + showFilePath, + showToken, + showTestedWith, + showFreeText, + indentWith, + ) where + +import Distribution.Pretty +import Distribution.ParseUtils diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/ReadE.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/ReadE.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/ReadE.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/ReadE.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,64 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.ReadE +-- Copyright : Jose Iborra 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Simple parsing with failure + +module Distribution.ReadE ( + -- * ReadE + ReadE(..), succeedReadE, failReadE, + -- * Projections + parseReadE, readEOrFail, + readP_to_E, + parsecToReadE, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Compat.ReadP +import Distribution.Parsec.Class +import Distribution.Parsec.FieldLineStream + +-- | Parser with simple error reporting +newtype ReadE a = ReadE {runReadE :: String -> Either ErrorMsg a} +type ErrorMsg = String + +instance Functor ReadE where + fmap f (ReadE p) = ReadE $ \txt -> case p txt of + Right a -> Right (f a) + Left err -> Left err + +succeedReadE :: (String -> a) -> ReadE a +succeedReadE f = ReadE (Right . f) + +failReadE :: ErrorMsg -> ReadE a +failReadE = ReadE . const . Left + +parseReadE :: ReadE a -> ReadP r a +parseReadE (ReadE p) = do + txt <- look + either fail return (p txt) + +readEOrFail :: ReadE a -> String -> a +readEOrFail r = either error id . runReadE r + +-- {-# DEPRECATED readP_to_E "Use parsecToReadE. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +readP_to_E :: (String -> ErrorMsg) -> ReadP a a -> ReadE a +readP_to_E err r = + ReadE $ \txt -> case [ p | (p, s) <- readP_to_S r txt + , all isSpace s ] + of [] -> Left (err txt) + (p:_) -> Right p + +parsecToReadE :: (String -> ErrorMsg) -> ParsecParser a -> ReadE a +parsecToReadE err p = ReadE $ \txt -> + case runParsecParser p "" (fieldLineStreamFromString txt) of + Right x -> Right x + Left _e -> Left (err txt) +-- TODO: use parsec error to make 'ErrorMsg'. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Bench.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Bench.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Bench.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Bench.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,126 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Bench +-- Copyright : Johan Tibell 2011 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the entry point into running the benchmarks in a built +-- package. It performs the \"@.\/setup bench@\" action. It runs +-- benchmarks designated in the package description. + +module Distribution.Simple.Bench + ( bench + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.UnqualComponentName +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.UserHooks +import Distribution.Simple.Utils +import Distribution.Text + +import System.Exit ( ExitCode(..), exitFailure, exitSuccess ) +import System.Directory ( doesFileExist ) +import System.FilePath ( (), (<.>) ) + +-- | Perform the \"@.\/setup bench@\" action. +bench :: Args -- ^positional command-line arguments + -> PD.PackageDescription -- ^information from the .cabal file + -> LBI.LocalBuildInfo -- ^information from the configure step + -> BenchmarkFlags -- ^flags sent to benchmark + -> IO () +bench args pkg_descr lbi flags = do + let verbosity = fromFlag $ benchmarkVerbosity flags + benchmarkNames = args + pkgBenchmarks = PD.benchmarks pkg_descr + enabledBenchmarks = map fst (LBI.enabledBenchLBIs pkg_descr lbi) + + -- Run the benchmark + doBench :: PD.Benchmark -> IO ExitCode + doBench bm = + case PD.benchmarkInterface bm of + PD.BenchmarkExeV10 _ _ -> do + let cmd = LBI.buildDir lbi name name <.> exeExtension (LBI.hostPlatform lbi) + options = map (benchOption pkg_descr lbi bm) $ + benchmarkOptions flags + -- Check that the benchmark executable exists. + exists <- doesFileExist cmd + unless exists $ die' verbosity $ + "Error: Could not find benchmark program \"" + ++ cmd ++ "\". Did you build the package first?" + + notice verbosity $ startMessage name + -- This will redirect the child process + -- stdout/stderr to the parent process. + exitcode <- rawSystemExitCode verbosity cmd options + notice verbosity $ finishMessage name exitcode + return exitcode + + _ -> do + notice verbosity $ "No support for running " + ++ "benchmark " ++ name ++ " of type: " + ++ display (PD.benchmarkType bm) + exitFailure + where name = unUnqualComponentName $ PD.benchmarkName bm + + unless (PD.hasBenchmarks pkg_descr) $ do + notice verbosity "Package has no benchmarks." + exitSuccess + + when (PD.hasBenchmarks pkg_descr && null enabledBenchmarks) $ + die' verbosity $ "No benchmarks enabled. Did you remember to configure with " + ++ "\'--enable-benchmarks\'?" + + bmsToRun <- case benchmarkNames of + [] -> return enabledBenchmarks + names -> for names $ \bmName -> + let benchmarkMap = zip enabledNames enabledBenchmarks + enabledNames = map PD.benchmarkName enabledBenchmarks + allNames = map PD.benchmarkName pkgBenchmarks + in case lookup (mkUnqualComponentName bmName) benchmarkMap of + Just t -> return t + _ | mkUnqualComponentName bmName `elem` allNames -> + die' verbosity $ "Package configured with benchmark " + ++ bmName ++ " disabled." + | otherwise -> die' verbosity $ "no such benchmark: " ++ bmName + + let totalBenchmarks = length bmsToRun + notice verbosity $ "Running " ++ show totalBenchmarks ++ " benchmarks..." + exitcodes <- traverse doBench bmsToRun + let allOk = totalBenchmarks == length (filter (== ExitSuccess) exitcodes) + unless allOk exitFailure + where + startMessage name = "Benchmark " ++ name ++ ": RUNNING...\n" + finishMessage name exitcode = "Benchmark " ++ name ++ ": " + ++ (case exitcode of + ExitSuccess -> "FINISH" + ExitFailure _ -> "ERROR") + + +-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't +-- necessarily a path. +benchOption :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> PD.Benchmark + -> PathTemplate + -> String +benchOption pkg_descr lbi bm template = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ + [(BenchmarkNameVar, toPathTemplate $ unUnqualComponentName $ PD.benchmarkName bm)] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Build/Macros.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Build/Macros.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Build/Macros.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Build/Macros.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,158 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Build.Macros +-- Copyright : Simon Marlow 2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Generate cabal_macros.h - CPP macros for package version testing +-- +-- When using CPP you get +-- +-- > VERSION_ +-- > MIN_VERSION_(A,B,C) +-- +-- for each /package/ in @build-depends@, which is true if the version of +-- /package/ in use is @>= A.B.C@, using the normal ordering on version +-- numbers. +-- +-- TODO Figure out what to do about backpack and internal libraries. It is very +-- suspecious that this stuff works with munged package identifiers +module Distribution.Simple.Build.Macros ( + generate, + generatePackageVersionMacros, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Version +import Distribution.PackageDescription +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.Program.Db +import Distribution.Simple.Program.Types +import Distribution.Types.MungedPackageId +import Distribution.Types.MungedPackageName +import Distribution.Types.PackageId +import Distribution.Text + +-- ------------------------------------------------------------ +-- * Generate cabal_macros.h +-- ------------------------------------------------------------ + +-- Invariant: HeaderLines always has a trailing newline +type HeaderLines = String + +line :: String -> HeaderLines +line str = str ++ "\n" + +ifndef :: String -> HeaderLines -> HeaderLines +ifndef macro body = + line ("#ifndef " ++ macro) ++ + body ++ + line ("#endif /* " ++ macro ++ " */") + +define :: String -> Maybe [String] -> String -> HeaderLines +define macro params val = + line ("#define " ++ macro ++ f params ++ " " ++ val) + where + f Nothing = "" + f (Just xs) = "(" ++ intercalate "," xs ++ ")" + +defineStr :: String -> String -> HeaderLines +defineStr macro str = define macro Nothing (show str) + +ifndefDefine :: String -> Maybe [String] -> String -> HeaderLines +ifndefDefine macro params str = + ifndef macro (define macro params str) + +ifndefDefineStr :: String -> String -> HeaderLines +ifndefDefineStr macro str = + ifndef macro (defineStr macro str) + +-- | The contents of the @cabal_macros.h@ for the given configured package. +-- +generate :: PackageDescription -> LocalBuildInfo -> ComponentLocalBuildInfo -> String +generate pkg_descr lbi clbi = + "/* DO NOT EDIT: This file is automatically generated by Cabal */\n\n" ++ + generatePackageVersionMacros + (package pkg_descr : map getPid (componentPackageDeps clbi)) ++ + generateToolVersionMacros (configuredPrograms . withPrograms $ lbi) ++ + generateComponentIdMacro lbi clbi ++ + generateCurrentPackageVersion pkg_descr + where + getPid (_, MungedPackageId mpn v) = + PackageIdentifier pn v + where + -- NB: Drop the component name! We're just reporting package versions. + -- This would have to be revisited if you are allowed to depend + -- on different versions of the same package + pn = fst (decodeCompatPackageName mpn) + +-- | Helper function that generates just the @VERSION_pkg@ and @MIN_VERSION_pkg@ +-- macros for a list of package ids (usually used with the specific deps of +-- a configured package). +-- +generatePackageVersionMacros :: [PackageId] -> String +generatePackageVersionMacros pkgids = concat + [ line ("/* package " ++ display pkgid ++ " */") + ++ generateMacros "" pkgname version + | pkgid@(PackageIdentifier name version) <- pkgids + , let pkgname = map fixchar (display name) + ] + +-- | Helper function that generates just the @TOOL_VERSION_pkg@ and +-- @MIN_TOOL_VERSION_pkg@ macros for a list of configured programs. +-- +generateToolVersionMacros :: [ConfiguredProgram] -> String +generateToolVersionMacros progs = concat + [ line ("/* tool " ++ progid ++ " */") + ++ generateMacros "TOOL_" progname version + | prog <- progs + , isJust . programVersion $ prog + , let progid = programId prog ++ "-" ++ display version + progname = map fixchar (programId prog) + Just version = programVersion prog + ] + +-- | Common implementation of 'generatePackageVersionMacros' and +-- 'generateToolVersionMacros'. +-- +generateMacros :: String -> String -> Version -> String +generateMacros macro_prefix name version = + concat + [ifndefDefineStr (macro_prefix ++ "VERSION_" ++ name) (display version) + ,ifndefDefine ("MIN_" ++ macro_prefix ++ "VERSION_" ++ name) + (Just ["major1","major2","minor"]) + $ concat [ + "(\\\n" + ," (major1) < ",major1," || \\\n" + ," (major1) == ",major1," && (major2) < ",major2," || \\\n" + ," (major1) == ",major1," && (major2) == ",major2," && (minor) <= ",minor,")" + ] + ,"\n"] + where + (major1:major2:minor:_) = map show (versionNumbers version ++ repeat 0) + +-- | Generate the @CURRENT_COMPONENT_ID@ definition for the component ID +-- of the current package. +generateComponentIdMacro :: LocalBuildInfo -> ComponentLocalBuildInfo -> String +generateComponentIdMacro _lbi clbi = + concat $ + [case clbi of + LibComponentLocalBuildInfo{} -> + ifndefDefineStr "CURRENT_PACKAGE_KEY" (componentCompatPackageKey clbi) + _ -> "" + ,ifndefDefineStr "CURRENT_COMPONENT_ID" (display (componentComponentId clbi)) + ] + +-- | Generate the @CURRENT_PACKAGE_VERSION@ definition for the declared version +-- of the current package. +generateCurrentPackageVersion :: PackageDescription -> String +generateCurrentPackageVersion pd = + ifndefDefineStr "CURRENT_PACKAGE_VERSION" (display (pkgVersion (package pd))) + +fixchar :: Char -> Char +fixchar '-' = '_' +fixchar c = c diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Build/PathsModule.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Build/PathsModule.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Build/PathsModule.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Build/PathsModule.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,353 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Build.Macros +-- Copyright : Isaac Jones 2003-2005, +-- Ross Paterson 2006, +-- Duncan Coutts 2007-2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Generating the Paths_pkgname module. +-- +-- This is a module that Cabal generates for the benefit of packages. It +-- enables them to find their version number and find any installed data files +-- at runtime. This code should probably be split off into another module. +-- +module Distribution.Simple.Build.PathsModule ( + generate, pkgPathEnvVar + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.System +import Distribution.Simple.Compiler +import Distribution.Package +import Distribution.PackageDescription +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.Version + +import System.FilePath ( pathSeparator ) + +-- ------------------------------------------------------------ +-- * Building Paths_.hs +-- ------------------------------------------------------------ + +generate :: PackageDescription -> LocalBuildInfo -> ComponentLocalBuildInfo -> String +generate pkg_descr lbi clbi = + let pragmas = + cpp_pragma + ++ no_rebindable_syntax_pragma + ++ ffi_pragmas + ++ warning_pragmas + + cpp_pragma + | supports_cpp = "{-# LANGUAGE CPP #-}\n" + | otherwise = "" + + -- -XRebindableSyntax is problematic because when paired with + -- -XOverloadedLists, 'fromListN' is not in scope, + -- or -XOverloadedStrings 'fromString' is not in scope, + -- so we disable 'RebindableSyntax'. + no_rebindable_syntax_pragma + | supports_rebindable_syntax = "{-# LANGUAGE NoRebindableSyntax #-}\n" + | otherwise = "" + + ffi_pragmas + | absolute = "" + | supports_language_pragma = + "{-# LANGUAGE ForeignFunctionInterface #-}\n" + | otherwise = + "{-# OPTIONS_GHC -fffi #-}\n" + + warning_pragmas = + "{-# OPTIONS_GHC -fno-warn-missing-import-lists #-}\n" + + foreign_imports + | absolute = "" + | otherwise = + "import Foreign\n"++ + "import Foreign.C\n" + + reloc_imports + | reloc = + "import System.Environment (getExecutablePath)\n" + | otherwise = "" + + header = + pragmas++ + "module " ++ display paths_modulename ++ " (\n"++ + " version,\n"++ + " getBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir,\n"++ + " getDataFileName, getSysconfDir\n"++ + " ) where\n"++ + "\n"++ + foreign_imports++ + "import qualified Control.Exception as Exception\n"++ + "import Data.Version (Version(..))\n"++ + "import System.Environment (getEnv)\n"++ + reloc_imports ++ + "import Prelude\n"++ + "\n"++ + (if supports_cpp + then + ("#if defined(VERSION_base)\n"++ + "\n"++ + "#if MIN_VERSION_base(4,0,0)\n"++ + "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n"++ + "#else\n"++ + "catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a\n"++ + "#endif\n"++ + "\n"++ + "#else\n"++ + "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n"++ + "#endif\n") + else + "catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a\n")++ + "catchIO = Exception.catch\n" ++ + "\n"++ + "version :: Version"++ + "\nversion = Version " ++ show branch ++ " []" + where branch = versionNumbers $ packageVersion pkg_descr + + body + | reloc = + "\n\nbindirrel :: FilePath\n" ++ + "bindirrel = " ++ show flat_bindirreloc ++ + "\n"++ + "\ngetBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath\n"++ + "getBinDir = "++mkGetEnvOrReloc "bindir" flat_bindirreloc++"\n"++ + "getLibDir = "++mkGetEnvOrReloc "libdir" flat_libdirreloc++"\n"++ + "getDynLibDir = "++mkGetEnvOrReloc "libdir" flat_dynlibdirreloc++"\n"++ + "getDataDir = "++mkGetEnvOrReloc "datadir" flat_datadirreloc++"\n"++ + "getLibexecDir = "++mkGetEnvOrReloc "libexecdir" flat_libexecdirreloc++"\n"++ + "getSysconfDir = "++mkGetEnvOrReloc "sysconfdir" flat_sysconfdirreloc++"\n"++ + "\n"++ + "getDataFileName :: FilePath -> IO FilePath\n"++ + "getDataFileName name = do\n"++ + " dir <- getDataDir\n"++ + " return (dir `joinFileName` name)\n"++ + "\n"++ + get_prefix_reloc_stuff++ + "\n"++ + filename_stuff + | absolute = + "\nbindir, libdir, dynlibdir, datadir, libexecdir, sysconfdir :: FilePath\n"++ + "\nbindir = " ++ show flat_bindir ++ + "\nlibdir = " ++ show flat_libdir ++ + "\ndynlibdir = " ++ show flat_dynlibdir ++ + "\ndatadir = " ++ show flat_datadir ++ + "\nlibexecdir = " ++ show flat_libexecdir ++ + "\nsysconfdir = " ++ show flat_sysconfdir ++ + "\n"++ + "\ngetBinDir, getLibDir, getDynLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath\n"++ + "getBinDir = "++mkGetEnvOr "bindir" "return bindir"++"\n"++ + "getLibDir = "++mkGetEnvOr "libdir" "return libdir"++"\n"++ + "getDynLibDir = "++mkGetEnvOr "dynlibdir" "return dynlibdir"++"\n"++ + "getDataDir = "++mkGetEnvOr "datadir" "return datadir"++"\n"++ + "getLibexecDir = "++mkGetEnvOr "libexecdir" "return libexecdir"++"\n"++ + "getSysconfDir = "++mkGetEnvOr "sysconfdir" "return sysconfdir"++"\n"++ + "\n"++ + "getDataFileName :: FilePath -> IO FilePath\n"++ + "getDataFileName name = do\n"++ + " dir <- getDataDir\n"++ + " return (dir ++ "++path_sep++" ++ name)\n" + | otherwise = + "\nprefix, bindirrel :: FilePath" ++ + "\nprefix = " ++ show flat_prefix ++ + "\nbindirrel = " ++ show (fromMaybe (error "PathsModule.generate") flat_bindirrel) ++ + "\n\n"++ + "getBinDir :: IO FilePath\n"++ + "getBinDir = getPrefixDirRel bindirrel\n\n"++ + "getLibDir :: IO FilePath\n"++ + "getLibDir = "++mkGetDir flat_libdir flat_libdirrel++"\n\n"++ + "getDynLibDir :: IO FilePath\n"++ + "getDynLibDir = "++mkGetDir flat_dynlibdir flat_dynlibdirrel++"\n\n"++ + "getDataDir :: IO FilePath\n"++ + "getDataDir = "++ mkGetEnvOr "datadir" + (mkGetDir flat_datadir flat_datadirrel)++"\n\n"++ + "getLibexecDir :: IO FilePath\n"++ + "getLibexecDir = "++mkGetDir flat_libexecdir flat_libexecdirrel++"\n\n"++ + "getSysconfDir :: IO FilePath\n"++ + "getSysconfDir = "++mkGetDir flat_sysconfdir flat_sysconfdirrel++"\n\n"++ + "getDataFileName :: FilePath -> IO FilePath\n"++ + "getDataFileName name = do\n"++ + " dir <- getDataDir\n"++ + " return (dir `joinFileName` name)\n"++ + "\n"++ + get_prefix_stuff++ + "\n"++ + filename_stuff + in header++body + + where + cid = componentUnitId clbi + + InstallDirs { + prefix = flat_prefix, + bindir = flat_bindir, + libdir = flat_libdir, + dynlibdir = flat_dynlibdir, + datadir = flat_datadir, + libexecdir = flat_libexecdir, + sysconfdir = flat_sysconfdir + } = absoluteComponentInstallDirs pkg_descr lbi cid NoCopyDest + InstallDirs { + bindir = flat_bindirrel, + libdir = flat_libdirrel, + dynlibdir = flat_dynlibdirrel, + datadir = flat_datadirrel, + libexecdir = flat_libexecdirrel, + sysconfdir = flat_sysconfdirrel + } = prefixRelativeComponentInstallDirs (packageId pkg_descr) lbi cid + + flat_bindirreloc = shortRelativePath flat_prefix flat_bindir + flat_libdirreloc = shortRelativePath flat_prefix flat_libdir + flat_dynlibdirreloc = shortRelativePath flat_prefix flat_dynlibdir + flat_datadirreloc = shortRelativePath flat_prefix flat_datadir + flat_libexecdirreloc = shortRelativePath flat_prefix flat_libexecdir + flat_sysconfdirreloc = shortRelativePath flat_prefix flat_sysconfdir + + mkGetDir _ (Just dirrel) = "getPrefixDirRel " ++ show dirrel + mkGetDir dir Nothing = "return " ++ show dir + + mkGetEnvOrReloc var dirrel = "catchIO (getEnv \""++var'++"\")" ++ + " (\\_ -> getPrefixDirReloc \"" ++ dirrel ++ + "\")" + where var' = pkgPathEnvVar pkg_descr var + + mkGetEnvOr var expr = "catchIO (getEnv \""++var'++"\")"++ + " (\\_ -> "++expr++")" + where var' = pkgPathEnvVar pkg_descr var + + -- In several cases we cannot make relocatable installations + absolute = + hasLibs pkg_descr -- we can only make progs relocatable + || isNothing flat_bindirrel -- if the bin dir is an absolute path + || not (supportsRelocatableProgs (compilerFlavor (compiler lbi))) + + reloc = relocatable lbi + + supportsRelocatableProgs GHC = case buildOS of + Windows -> True + _ -> False + supportsRelocatableProgs GHCJS = case buildOS of + Windows -> True + _ -> False + supportsRelocatableProgs _ = False + + paths_modulename = autogenPathsModuleName pkg_descr + + get_prefix_stuff = get_prefix_win32 supports_cpp buildArch + + path_sep = show [pathSeparator] + + supports_cpp = supports_language_pragma + supports_rebindable_syntax= ghc_newer_than (mkVersion [7,0,1]) + supports_language_pragma = ghc_newer_than (mkVersion [6,6,1]) + + ghc_newer_than minVersion = + case compilerCompatVersion GHC (compiler lbi) of + Nothing -> False + Just version -> version `withinRange` orLaterVersion minVersion + +-- | Generates the name of the environment variable controlling the path +-- component of interest. +-- +-- Note: The format of these strings is part of Cabal's public API; +-- changing this function constitutes a *backwards-compatibility* break. +pkgPathEnvVar :: PackageDescription + -> String -- ^ path component; one of \"bindir\", \"libdir\", + -- \"datadir\", \"libexecdir\", or \"sysconfdir\" + -> String -- ^ environment variable name +pkgPathEnvVar pkg_descr var = + showPkgName (packageName pkg_descr) ++ "_" ++ var + where + showPkgName = map fixchar . display + fixchar '-' = '_' + fixchar c = c + +get_prefix_reloc_stuff :: String +get_prefix_reloc_stuff = + "getPrefixDirReloc :: FilePath -> IO FilePath\n"++ + "getPrefixDirReloc dirRel = do\n"++ + " exePath <- getExecutablePath\n"++ + " let (bindir,_) = splitFileName exePath\n"++ + " return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)\n" + +get_prefix_win32 :: Bool -> Arch -> String +get_prefix_win32 supports_cpp arch = + "getPrefixDirRel :: FilePath -> IO FilePath\n"++ + "getPrefixDirRel dirRel = try_size 2048 -- plenty, PATH_MAX is 512 under Win32.\n"++ + " where\n"++ + " try_size size = allocaArray (fromIntegral size) $ \\buf -> do\n"++ + " ret <- c_GetModuleFileName nullPtr buf size\n"++ + " case ret of\n"++ + " 0 -> return (prefix `joinFileName` dirRel)\n"++ + " _ | ret < size -> do\n"++ + " exePath <- peekCWString buf\n"++ + " let (bindir,_) = splitFileName exePath\n"++ + " return ((bindir `minusFileName` bindirrel) `joinFileName` dirRel)\n"++ + " | otherwise -> try_size (size * 2)\n"++ + "\n"++ + (case supports_cpp of + False -> "" + True -> "#if defined(i386_HOST_ARCH)\n"++ + "# define WINDOWS_CCONV stdcall\n"++ + "#elif defined(x86_64_HOST_ARCH)\n"++ + "# define WINDOWS_CCONV ccall\n"++ + "#else\n"++ + "# error Unknown mingw32 arch\n"++ + "#endif\n")++ + "foreign import " ++ cconv ++ " unsafe \"windows.h GetModuleFileNameW\"\n"++ + " c_GetModuleFileName :: Ptr () -> CWString -> Int32 -> IO Int32\n" + where cconv = if supports_cpp + then "WINDOWS_CCONV" + else case arch of + I386 -> "stdcall" + X86_64 -> "ccall" + _ -> error "win32 supported only with I386, X86_64" + +filename_stuff :: String +filename_stuff = + "minusFileName :: FilePath -> String -> FilePath\n"++ + "minusFileName dir \"\" = dir\n"++ + "minusFileName dir \".\" = dir\n"++ + "minusFileName dir suffix =\n"++ + " minusFileName (fst (splitFileName dir)) (fst (splitFileName suffix))\n"++ + "\n"++ + "joinFileName :: String -> String -> FilePath\n"++ + "joinFileName \"\" fname = fname\n"++ + "joinFileName \".\" fname = fname\n"++ + "joinFileName dir \"\" = dir\n"++ + "joinFileName dir fname\n"++ + " | isPathSeparator (last dir) = dir++fname\n"++ + " | otherwise = dir++pathSeparator:fname\n"++ + "\n"++ + "splitFileName :: FilePath -> (String, String)\n"++ + "splitFileName p = (reverse (path2++drive), reverse fname)\n"++ + " where\n"++ + " (path,drive) = case p of\n"++ + " (c:':':p') -> (reverse p',[':',c])\n"++ + " _ -> (reverse p ,\"\")\n"++ + " (fname,path1) = break isPathSeparator path\n"++ + " path2 = case path1 of\n"++ + " [] -> \".\"\n"++ + " [_] -> path1 -- don't remove the trailing slash if \n"++ + " -- there is only one character\n"++ + " (c:path') | isPathSeparator c -> path'\n"++ + " _ -> path1\n"++ + "\n"++ + "pathSeparator :: Char\n"++ + (case buildOS of + Windows -> "pathSeparator = '\\\\'\n" + _ -> "pathSeparator = '/'\n") ++ + "\n"++ + "isPathSeparator :: Char -> Bool\n"++ + (case buildOS of + Windows -> "isPathSeparator c = c == '/' || c == '\\\\'\n" + _ -> "isPathSeparator c = c == '/'\n") diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Build.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Build.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Build.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Build.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,697 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Build +-- Copyright : Isaac Jones 2003-2005, +-- Ross Paterson 2006, +-- Duncan Coutts 2007-2008, 2012 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the entry point to actually building the modules in a package. It +-- doesn't actually do much itself, most of the work is delegated to +-- compiler-specific actions. It does do some non-compiler specific bits like +-- running pre-processors. +-- + +module Distribution.Simple.Build ( + build, repl, + startInterpreter, + + initialBuildSteps, + createInternalPackageDB, + componentInitialBuildSteps, + writeAutogenFiles, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.Dependency +import Distribution.Types.LocalBuildInfo +import Distribution.Types.TargetInfo +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.ForeignLib +import Distribution.Types.MungedPackageId +import Distribution.Types.MungedPackageName +import Distribution.Types.UnqualComponentName +import Distribution.Types.ComponentLocalBuildInfo +import Distribution.Types.ExecutableScope + +import Distribution.Package +import Distribution.Backpack +import Distribution.Backpack.DescribeUnitId +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS +import qualified Distribution.Simple.UHC as UHC +import qualified Distribution.Simple.HaskellSuite as HaskellSuite +import qualified Distribution.Simple.PackageIndex as Index + +import qualified Distribution.Simple.Build.Macros as Build.Macros +import qualified Distribution.Simple.Build.PathsModule as Build.PathsModule +import qualified Distribution.Simple.Program.HcPkg as HcPkg + +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.PackageDescription hiding (Flag) +import qualified Distribution.InstalledPackageInfo as IPI +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import qualified Distribution.ModuleName as ModuleName + +import Distribution.Simple.Setup +import Distribution.Simple.BuildTarget +import Distribution.Simple.BuildToolDepends +import Distribution.Simple.PreProcess +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Db +import Distribution.Simple.BuildPaths +import Distribution.Simple.Configure +import Distribution.Simple.Register +import Distribution.Simple.Test.LibV09 +import Distribution.Simple.Utils + +import Distribution.System +import Distribution.Text +import Distribution.Verbosity + +import Distribution.Compat.Graph (IsNode(..)) + +import Control.Monad +import qualified Data.Set as Set +import System.FilePath ( (), (<.>), takeDirectory ) +import System.Directory ( getCurrentDirectory ) + +-- ----------------------------------------------------------------------------- +-- |Build the libraries and executables in this package. + +build :: PackageDescription -- ^ Mostly information from the .cabal file + -> LocalBuildInfo -- ^ Configuration information + -> BuildFlags -- ^ Flags that the user passed to build + -> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling + -> IO () +build pkg_descr lbi flags suffixes = do + targets <- readTargetInfos verbosity pkg_descr lbi (buildArgs flags) + let componentsToBuild = neededTargetsInBuildOrder' pkg_descr lbi (map nodeKey targets) + info verbosity $ "Component build order: " + ++ intercalate ", " + (map (showComponentName . componentLocalName . targetCLBI) + componentsToBuild) + + when (null targets) $ + -- Only bother with this message if we're building the whole package + setupMessage verbosity "Building" (packageId pkg_descr) + + internalPackageDB <- createInternalPackageDB verbosity lbi distPref + + (\f -> foldM_ f (installedPkgs lbi) componentsToBuild) $ \index target -> do + let comp = targetComponent target + clbi = targetCLBI target + componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity + let bi = componentBuildInfo comp + progs' = addInternalBuildTools pkg_descr lbi bi (withPrograms lbi) + lbi' = lbi { + withPrograms = progs', + withPackageDB = withPackageDB lbi ++ [internalPackageDB], + installedPkgs = index + } + mb_ipi <- buildComponent verbosity (buildNumJobs flags) pkg_descr + lbi' suffixes comp clbi distPref + return (maybe index (Index.insert `flip` index) mb_ipi) + return () + where + distPref = fromFlag (buildDistPref flags) + verbosity = fromFlag (buildVerbosity flags) + + +repl :: PackageDescription -- ^ Mostly information from the .cabal file + -> LocalBuildInfo -- ^ Configuration information + -> ReplFlags -- ^ Flags that the user passed to build + -> [ PPSuffixHandler ] -- ^ preprocessors to run before compiling + -> [String] + -> IO () +repl pkg_descr lbi flags suffixes args = do + let distPref = fromFlag (replDistPref flags) + verbosity = fromFlag (replVerbosity flags) + + target <- readTargetInfos verbosity pkg_descr lbi args >>= \r -> case r of + -- This seems DEEPLY questionable. + [] -> return (head (allTargetsInBuildOrder' pkg_descr lbi)) + [target] -> return target + _ -> die' verbosity $ "The 'repl' command does not support multiple targets at once." + let componentsToBuild = neededTargetsInBuildOrder' pkg_descr lbi [nodeKey target] + debug verbosity $ "Component build order: " + ++ intercalate ", " + (map (showComponentName . componentLocalName . targetCLBI) + componentsToBuild) + + internalPackageDB <- createInternalPackageDB verbosity lbi distPref + + let lbiForComponent comp lbi' = + lbi' { + withPackageDB = withPackageDB lbi ++ [internalPackageDB], + withPrograms = addInternalBuildTools pkg_descr lbi' + (componentBuildInfo comp) (withPrograms lbi') + } + + -- build any dependent components + sequence_ + [ do let clbi = targetCLBI subtarget + comp = targetComponent subtarget + lbi' = lbiForComponent comp lbi + componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity + buildComponent verbosity NoFlag + pkg_descr lbi' suffixes comp clbi distPref + | subtarget <- init componentsToBuild ] + + -- REPL for target components + let clbi = targetCLBI target + comp = targetComponent target + lbi' = lbiForComponent comp lbi + replFlags = replReplOptions flags + componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity + replComponent replFlags verbosity pkg_descr lbi' suffixes comp clbi distPref + + +-- | Start an interpreter without loading any package files. +startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform + -> PackageDBStack -> IO () +startInterpreter verbosity programDb comp platform packageDBs = + case compilerFlavor comp of + GHC -> GHC.startInterpreter verbosity programDb comp platform packageDBs + GHCJS -> GHCJS.startInterpreter verbosity programDb comp platform packageDBs + _ -> die' verbosity "A REPL is not supported with this compiler." + +buildComponent :: Verbosity + -> Flag (Maybe Int) + -> PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> Component + -> ComponentLocalBuildInfo + -> FilePath + -> IO (Maybe InstalledPackageInfo) +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CLib lib) clbi distPref = do + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + setupMessage' verbosity "Building" (packageId pkg_descr) + (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) + let libbi = libBuildInfo lib + lib' = lib { libBuildInfo = addExtraCxxSources (addExtraCSources libbi extras) extras } + buildLib verbosity numJobs pkg_descr lbi lib' clbi + + let oneComponentRequested (OneComponentRequestedSpec _) = True + oneComponentRequested _ = False + -- Don't register inplace if we're only building a single component; + -- it's not necessary because there won't be any subsequent builds + -- that need to tag us + if (not (oneComponentRequested (componentEnabledSpec lbi))) + then do + -- Register the library in-place, so exes can depend + -- on internally defined libraries. + pwd <- getCurrentDirectory + let -- The in place registration uses the "-inplace" suffix, not an ABI hash + installedPkgInfo = inplaceInstalledPackageInfo pwd distPref pkg_descr + -- NB: Use a fake ABI hash to avoid + -- needing to recompute it every build. + (mkAbiHash "inplace") lib' lbi clbi + + debug verbosity $ "Registering inplace:\n" ++ (IPI.showInstalledPackageInfo installedPkgInfo) + registerPackage verbosity (compiler lbi) (withPrograms lbi) + (withPackageDB lbi) installedPkgInfo + HcPkg.defaultRegisterOptions { + HcPkg.registerMultiInstance = True + } + return (Just installedPkgInfo) + else return Nothing + +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CFLib flib) clbi _distPref = do + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + setupMessage' verbosity "Building" (packageId pkg_descr) + (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) + buildFLib verbosity numJobs pkg_descr lbi flib clbi + return Nothing + +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CExe exe) clbi _ = do + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + setupMessage' verbosity "Building" (packageId pkg_descr) + (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) + let ebi = buildInfo exe + exe' = exe { buildInfo = addExtraCSources ebi extras } + buildExe verbosity numJobs pkg_descr lbi exe' clbi + return Nothing + + +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} }) + clbi _distPref = do + let exe = testSuiteExeV10AsExe test + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + setupMessage' verbosity "Building" (packageId pkg_descr) + (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) + let ebi = buildInfo exe + exe' = exe { buildInfo = addExtraCSources ebi extras } + buildExe verbosity numJobs pkg_descr lbi exe' clbi + return Nothing + + +buildComponent verbosity numJobs pkg_descr lbi0 suffixes + comp@(CTest + test@TestSuite { testInterface = TestSuiteLibV09{} }) + clbi -- This ComponentLocalBuildInfo corresponds to a detailed + -- test suite and not a real component. It should not + -- be used, except to construct the CLBIs for the + -- library and stub executable that will actually be + -- built. + distPref = do + pwd <- getCurrentDirectory + let (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) = + testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + setupMessage' verbosity "Building" (packageId pkg_descr) + (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) + buildLib verbosity numJobs pkg lbi lib libClbi + -- NB: need to enable multiple instances here, because on 7.10+ + -- the package name is the same as the library, and we still + -- want the registration to go through. + registerPackage verbosity (compiler lbi) (withPrograms lbi) + (withPackageDB lbi) ipi + HcPkg.defaultRegisterOptions { + HcPkg.registerMultiInstance = True + } + let ebi = buildInfo exe + -- NB: The stub executable is linked against the test-library + -- which already contains all `other-modules`, so we need + -- to remove those from the stub-exe's build-info + exe' = exe { buildInfo = (addExtraCSources ebi extras) { otherModules = [] } } + buildExe verbosity numJobs pkg_descr lbi exe' exeClbi + return Nothing -- Can't depend on test suite + + +buildComponent verbosity _ _ _ _ + (CTest TestSuite { testInterface = TestSuiteUnsupported tt }) + _ _ = + die' verbosity $ "No support for building test suite type " ++ display tt + + +buildComponent verbosity numJobs pkg_descr lbi suffixes + comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} }) + clbi _ = do + let (exe, exeClbi) = benchmarkExeV10asExe bm clbi + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + setupMessage' verbosity "Building" (packageId pkg_descr) + (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) + let ebi = buildInfo exe + exe' = exe { buildInfo = addExtraCSources ebi extras } + buildExe verbosity numJobs pkg_descr lbi exe' exeClbi + return Nothing + + +buildComponent verbosity _ _ _ _ + (CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt }) + _ _ = + die' verbosity $ "No support for building benchmark type " ++ display tt + + +-- | Add extra C sources generated by preprocessing to build +-- information. +addExtraCSources :: BuildInfo -> [FilePath] -> BuildInfo +addExtraCSources bi extras = bi { cSources = new } + where new = Set.toList $ old `Set.union` exs + old = Set.fromList $ cSources bi + exs = Set.fromList extras + + +-- | Add extra C++ sources generated by preprocessing to build +-- information. +addExtraCxxSources :: BuildInfo -> [FilePath] -> BuildInfo +addExtraCxxSources bi extras = bi { cxxSources = new } + where new = Set.toList $ old `Set.union` exs + old = Set.fromList $ cxxSources bi + exs = Set.fromList extras + + +replComponent :: [String] + -> Verbosity + -> PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> Component + -> ComponentLocalBuildInfo + -> FilePath + -> IO () +replComponent replFlags verbosity pkg_descr lbi suffixes + comp@(CLib lib) clbi _ = do + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + let libbi = libBuildInfo lib + lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } } + replLib replFlags verbosity pkg_descr lbi lib' clbi + +replComponent replFlags verbosity pkg_descr lbi suffixes + comp@(CFLib flib) clbi _ = do + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + replFLib replFlags verbosity pkg_descr lbi flib clbi + +replComponent replFlags verbosity pkg_descr lbi suffixes + comp@(CExe exe) clbi _ = do + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + let ebi = buildInfo exe + exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } + replExe replFlags verbosity pkg_descr lbi exe' clbi + + +replComponent replFlags verbosity pkg_descr lbi suffixes + comp@(CTest test@TestSuite { testInterface = TestSuiteExeV10{} }) + clbi _distPref = do + let exe = testSuiteExeV10AsExe test + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + let ebi = buildInfo exe + exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } + replExe replFlags verbosity pkg_descr lbi exe' clbi + + +replComponent replFlags verbosity pkg_descr lbi0 suffixes + comp@(CTest + test@TestSuite { testInterface = TestSuiteLibV09{} }) + clbi distPref = do + pwd <- getCurrentDirectory + let (pkg, lib, libClbi, lbi, _, _, _) = + testSuiteLibV09AsLibAndExe pkg_descr test clbi lbi0 distPref pwd + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + let libbi = libBuildInfo lib + lib' = lib { libBuildInfo = libbi { cSources = cSources libbi ++ extras } } + replLib replFlags verbosity pkg lbi lib' libClbi + + +replComponent _ verbosity _ _ _ + (CTest TestSuite { testInterface = TestSuiteUnsupported tt }) + _ _ = + die' verbosity $ "No support for building test suite type " ++ display tt + + +replComponent replFlags verbosity pkg_descr lbi suffixes + comp@(CBench bm@Benchmark { benchmarkInterface = BenchmarkExeV10 {} }) + clbi _ = do + let (exe, exeClbi) = benchmarkExeV10asExe bm clbi + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + extras <- preprocessExtras verbosity comp lbi + let ebi = buildInfo exe + exe' = exe { buildInfo = ebi { cSources = cSources ebi ++ extras } } + replExe replFlags verbosity pkg_descr lbi exe' exeClbi + + +replComponent _ verbosity _ _ _ + (CBench Benchmark { benchmarkInterface = BenchmarkUnsupported tt }) + _ _ = + die' verbosity $ "No support for building benchmark type " ++ display tt + +---------------------------------------------------- +-- Shared code for buildComponent and replComponent +-- + +-- | Translate a exe-style 'TestSuite' component into an exe for building +testSuiteExeV10AsExe :: TestSuite -> Executable +testSuiteExeV10AsExe test@TestSuite { testInterface = TestSuiteExeV10 _ mainFile } = + Executable { + exeName = testName test, + modulePath = mainFile, + exeScope = ExecutablePublic, + buildInfo = testBuildInfo test + } +testSuiteExeV10AsExe TestSuite{} = error "testSuiteExeV10AsExe: wrong kind" + +-- | Translate a lib-style 'TestSuite' component into a lib + exe for building +testSuiteLibV09AsLibAndExe :: PackageDescription + -> TestSuite + -> ComponentLocalBuildInfo + -> LocalBuildInfo + -> FilePath + -> FilePath + -> (PackageDescription, + Library, ComponentLocalBuildInfo, + LocalBuildInfo, + IPI.InstalledPackageInfo, + Executable, ComponentLocalBuildInfo) +testSuiteLibV09AsLibAndExe pkg_descr + test@TestSuite { testInterface = TestSuiteLibV09 _ m } + clbi lbi distPref pwd = + (pkg, lib, libClbi, lbi, ipi, exe, exeClbi) + where + bi = testBuildInfo test + lib = Library { + libName = Nothing, + exposedModules = [ m ], + reexportedModules = [], + signatures = [], + libExposed = True, + libBuildInfo = bi + } + -- This is, like, the one place where we use a CTestName for a library. + -- Should NOT use library name, since that could conflict! + PackageIdentifier pkg_name pkg_ver = package pkg_descr + compat_name = computeCompatPackageName pkg_name (Just (testName test)) + compat_key = computeCompatPackageKey (compiler lbi) compat_name pkg_ver (componentUnitId clbi) + libClbi = LibComponentLocalBuildInfo + { componentPackageDeps = componentPackageDeps clbi + , componentInternalDeps = componentInternalDeps clbi + , componentIsIndefinite_ = False + , componentExeDeps = componentExeDeps clbi + , componentLocalName = CSubLibName (testName test) + , componentIsPublic = False + , componentIncludes = componentIncludes clbi + , componentUnitId = componentUnitId clbi + , componentComponentId = componentComponentId clbi + , componentInstantiatedWith = [] + , componentCompatPackageName = compat_name + , componentCompatPackageKey = compat_key + , componentExposedModules = [IPI.ExposedModule m Nothing] + } + pkg = pkg_descr { + package = (package pkg_descr) { pkgName = mkPackageName $ unMungedPackageName compat_name } + , executables = [] + , testSuites = [] + , subLibraries = [lib] + } + ipi = inplaceInstalledPackageInfo pwd distPref pkg (mkAbiHash "") lib lbi libClbi + testDir = buildDir lbi stubName test + stubName test ++ "-tmp" + testLibDep = thisPackageVersion $ package pkg + exe = Executable { + exeName = mkUnqualComponentName $ stubName test, + modulePath = stubFilePath test, + exeScope = ExecutablePublic, + buildInfo = (testBuildInfo test) { + hsSourceDirs = [ testDir ], + targetBuildDepends = testLibDep + : (targetBuildDepends $ testBuildInfo test) + } + } + -- | The stub executable needs a new 'ComponentLocalBuildInfo' + -- that exposes the relevant test suite library. + deps = (IPI.installedUnitId ipi, mungedId ipi) + : (filter (\(_, x) -> let name = unMungedPackageName $ mungedName x + in name == "Cabal" || name == "base") + (componentPackageDeps clbi)) + exeClbi = ExeComponentLocalBuildInfo { + -- TODO: this is a hack, but as long as this is unique + -- (doesn't clobber something) we won't run into trouble + componentUnitId = mkUnitId (stubName test), + componentComponentId = mkComponentId (stubName test), + componentInternalDeps = [componentUnitId clbi], + componentExeDeps = [], + componentLocalName = CExeName $ mkUnqualComponentName $ stubName test, + componentPackageDeps = deps, + -- Assert DefUnitId invariant! + -- Executable can't be indefinite, so dependencies must + -- be definite packages. + componentIncludes = zip (map (DefiniteUnitId . unsafeMkDefUnitId . fst) deps) + (repeat defaultRenaming) + } +testSuiteLibV09AsLibAndExe _ TestSuite{} _ _ _ _ = error "testSuiteLibV09AsLibAndExe: wrong kind" + + +-- | Translate a exe-style 'Benchmark' component into an exe for building +benchmarkExeV10asExe :: Benchmark -> ComponentLocalBuildInfo + -> (Executable, ComponentLocalBuildInfo) +benchmarkExeV10asExe bm@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } + clbi = + (exe, exeClbi) + where + exe = Executable { + exeName = benchmarkName bm, + modulePath = f, + exeScope = ExecutablePublic, + buildInfo = benchmarkBuildInfo bm + } + exeClbi = ExeComponentLocalBuildInfo { + componentUnitId = componentUnitId clbi, + componentComponentId = componentComponentId clbi, + componentLocalName = CExeName (benchmarkName bm), + componentInternalDeps = componentInternalDeps clbi, + componentExeDeps = componentExeDeps clbi, + componentPackageDeps = componentPackageDeps clbi, + componentIncludes = componentIncludes clbi + } +benchmarkExeV10asExe Benchmark{} _ = error "benchmarkExeV10asExe: wrong kind" + +-- | Initialize a new package db file for libraries defined +-- internally to the package. +createInternalPackageDB :: Verbosity -> LocalBuildInfo -> FilePath + -> IO PackageDB +createInternalPackageDB verbosity lbi distPref = do + existsAlready <- doesPackageDBExist dbPath + when existsAlready $ deletePackageDB dbPath + createPackageDB verbosity (compiler lbi) (withPrograms lbi) False dbPath + return (SpecificPackageDB dbPath) + where + dbPath = internalPackageDBPath lbi distPref + +addInternalBuildTools :: PackageDescription -> LocalBuildInfo -> BuildInfo + -> ProgramDb -> ProgramDb +addInternalBuildTools pkg lbi bi progs = + foldr updateProgram progs internalBuildTools + where + internalBuildTools = + [ simpleConfiguredProgram toolName' (FoundOnSystem toolLocation) + | toolName <- getAllInternalToolDependencies pkg bi + , let toolName' = unUnqualComponentName toolName + , let toolLocation = buildDir lbi toolName' toolName' <.> exeExtension (hostPlatform lbi) ] + + +-- TODO: build separate libs in separate dirs so that we can build +-- multiple libs, e.g. for 'LibTest' library-style test suites +buildLib :: Verbosity -> Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib verbosity numJobs pkg_descr lbi lib clbi = + case compilerFlavor (compiler lbi) of + GHC -> GHC.buildLib verbosity numJobs pkg_descr lbi lib clbi + GHCJS -> GHCJS.buildLib verbosity numJobs pkg_descr lbi lib clbi + UHC -> UHC.buildLib verbosity pkg_descr lbi lib clbi + HaskellSuite {} -> HaskellSuite.buildLib verbosity pkg_descr lbi lib clbi + _ -> die' verbosity "Building is not supported with this compiler." + +-- | Build a foreign library +-- +-- NOTE: We assume that we already checked that we can actually build the +-- foreign library in configure. +buildFLib :: Verbosity -> Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> ForeignLib -> ComponentLocalBuildInfo -> IO () +buildFLib verbosity numJobs pkg_descr lbi flib clbi = + case compilerFlavor (compiler lbi) of + GHC -> GHC.buildFLib verbosity numJobs pkg_descr lbi flib clbi + _ -> die' verbosity "Building is not supported with this compiler." + +buildExe :: Verbosity -> Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe verbosity numJobs pkg_descr lbi exe clbi = + case compilerFlavor (compiler lbi) of + GHC -> GHC.buildExe verbosity numJobs pkg_descr lbi exe clbi + GHCJS -> GHCJS.buildExe verbosity numJobs pkg_descr lbi exe clbi + UHC -> UHC.buildExe verbosity pkg_descr lbi exe clbi + _ -> die' verbosity "Building is not supported with this compiler." + +replLib :: [String] -> Verbosity -> PackageDescription + -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo + -> IO () +replLib replFlags verbosity pkg_descr lbi lib clbi = + case compilerFlavor (compiler lbi) of + -- 'cabal repl' doesn't need to support 'ghc --make -j', so we just pass + -- NoFlag as the numJobs parameter. + GHC -> GHC.replLib replFlags verbosity NoFlag pkg_descr lbi lib clbi + GHCJS -> GHCJS.replLib replFlags verbosity NoFlag pkg_descr lbi lib clbi + _ -> die' verbosity "A REPL is not supported for this compiler." + +replExe :: [String] -> Verbosity -> PackageDescription + -> LocalBuildInfo -> Executable -> ComponentLocalBuildInfo + -> IO () +replExe replFlags verbosity pkg_descr lbi exe clbi = + case compilerFlavor (compiler lbi) of + GHC -> GHC.replExe replFlags verbosity NoFlag pkg_descr lbi exe clbi + GHCJS -> GHCJS.replExe replFlags verbosity NoFlag pkg_descr lbi exe clbi + _ -> die' verbosity "A REPL is not supported for this compiler." + +replFLib :: [String] -> Verbosity -> PackageDescription + -> LocalBuildInfo -> ForeignLib -> ComponentLocalBuildInfo + -> IO () +replFLib replFlags verbosity pkg_descr lbi exe clbi = + case compilerFlavor (compiler lbi) of + GHC -> GHC.replFLib replFlags verbosity NoFlag pkg_descr lbi exe clbi + _ -> die' verbosity "A REPL is not supported for this compiler." + +-- | Runs 'componentInitialBuildSteps' on every configured component. +initialBuildSteps :: FilePath -- ^"dist" prefix + -> PackageDescription -- ^mostly information from the .cabal file + -> LocalBuildInfo -- ^Configuration information + -> Verbosity -- ^The verbosity to use + -> IO () +initialBuildSteps distPref pkg_descr lbi verbosity = + withAllComponentsInBuildOrder pkg_descr lbi $ \_comp clbi -> + componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity + +-- | Creates the autogenerated files for a particular configured component. +componentInitialBuildSteps :: FilePath -- ^"dist" prefix + -> PackageDescription -- ^mostly information from the .cabal file + -> LocalBuildInfo -- ^Configuration information + -> ComponentLocalBuildInfo + -> Verbosity -- ^The verbosity to use + -> IO () +componentInitialBuildSteps _distPref pkg_descr lbi clbi verbosity = do + createDirectoryIfMissingVerbose verbosity True (componentBuildDir lbi clbi) + + writeAutogenFiles verbosity pkg_descr lbi clbi + +-- | Generate and write out the Paths_.hs and cabal_macros.h files +-- +writeAutogenFiles :: Verbosity + -> PackageDescription + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> IO () +writeAutogenFiles verbosity pkg lbi clbi = do + createDirectoryIfMissingVerbose verbosity True (autogenComponentModulesDir lbi clbi) + + let pathsModulePath = autogenComponentModulesDir lbi clbi + ModuleName.toFilePath (autogenPathsModuleName pkg) <.> "hs" + pathsModuleDir = takeDirectory pathsModulePath + -- Ensure that the directory exists! + createDirectoryIfMissingVerbose verbosity True pathsModuleDir + rewriteFileEx verbosity pathsModulePath (Build.PathsModule.generate pkg lbi clbi) + + --TODO: document what we're doing here, and move it to its own function + case clbi of + LibComponentLocalBuildInfo { componentInstantiatedWith = insts } -> + -- Write out empty hsig files for all requirements, so that GHC + -- has a source file to look at it when it needs to typecheck + -- a signature. It's harmless to write these out even when + -- there is a real hsig file written by the user, since + -- include path ordering ensures that the real hsig file + -- will always be picked up before the autogenerated one. + for_ (map fst insts) $ \mod_name -> do + let sigPath = autogenComponentModulesDir lbi clbi + ModuleName.toFilePath mod_name <.> "hsig" + createDirectoryIfMissingVerbose verbosity True (takeDirectory sigPath) + rewriteFileEx verbosity sigPath $ + "{-# OPTIONS_GHC -w #-}\n" ++ + "{-# LANGUAGE NoImplicitPrelude #-}\n" ++ + "signature " ++ display mod_name ++ " where" + _ -> return () + + let cppHeaderPath = autogenComponentModulesDir lbi clbi cppHeaderName + rewriteFileEx verbosity cppHeaderPath (Build.Macros.generate pkg lbi clbi) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/BuildPaths.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/BuildPaths.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/BuildPaths.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/BuildPaths.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,251 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.BuildPaths +-- Copyright : Isaac Jones 2003-2004, +-- Duncan Coutts 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A bunch of dirs, paths and file names used for intermediate build steps. +-- + +module Distribution.Simple.BuildPaths ( + defaultDistPref, srcPref, + haddockDirName, hscolourPref, haddockPref, + autogenModulesDir, + autogenPackageModulesDir, + autogenComponentModulesDir, + + autogenModuleName, + autogenPathsModuleName, + cppHeaderName, + haddockName, + + mkGenericStaticLibName, + mkLibName, + mkProfLibName, + mkGenericSharedLibName, + mkSharedLibName, + mkStaticLibName, + + exeExtension, + objExtension, + dllExtension, + staticLibExtension, + -- * Source files & build directories + getSourceFiles, getLibSourceFiles, getExeSourceFiles, + getFLibSourceFiles, exeBuildDir, flibBuildDir, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.ForeignLib +import Distribution.Types.UnqualComponentName (unUnqualComponentName) +import Distribution.Package +import Distribution.ModuleName as ModuleName +import Distribution.Compiler +import Distribution.PackageDescription +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.Setup +import Distribution.Text +import Distribution.System +import Distribution.Verbosity +import Distribution.Simple.Utils + +import System.FilePath ((), (<.>), normalise) + +-- --------------------------------------------------------------------------- +-- Build directories and files + +srcPref :: FilePath -> FilePath +srcPref distPref = distPref "src" + +hscolourPref :: HaddockTarget -> FilePath -> PackageDescription -> FilePath +hscolourPref = haddockPref + +-- | This is the name of the directory in which the generated haddocks +-- should be stored. It does not include the @/doc/html@ prefix. +haddockDirName :: HaddockTarget -> PackageDescription -> FilePath +haddockDirName ForDevelopment = display . packageName +haddockDirName ForHackage = (++ "-docs") . display . packageId + +-- | The directory to which generated haddock documentation should be written. +haddockPref :: HaddockTarget -> FilePath -> PackageDescription -> FilePath +haddockPref haddockTarget distPref pkg_descr + = distPref "doc" "html" haddockDirName haddockTarget pkg_descr + +-- | The directory in which we put auto-generated modules for EVERY +-- component in the package. See deprecation notice. +{-# DEPRECATED autogenModulesDir "If you can, use 'autogenComponentModulesDir' instead, but if you really wanted package-global generated modules, use 'autogenPackageModulesDir'. In Cabal 2.0, we avoid using autogenerated files which apply to all components, because the information you often want in these files, e.g., dependency information, is best specified per component, so that reconfiguring a different component (e.g., enabling tests) doesn't force the entire to be rebuilt. 'autogenPackageModulesDir' still provides a place to put files you want to apply to the entire package, but most users of 'autogenModulesDir' should seriously consider 'autogenComponentModulesDir' if you really wanted the module to apply to one component." #-} +autogenModulesDir :: LocalBuildInfo -> String +autogenModulesDir = autogenPackageModulesDir + +-- | The directory in which we put auto-generated modules for EVERY +-- component in the package. +autogenPackageModulesDir :: LocalBuildInfo -> String +autogenPackageModulesDir lbi = buildDir lbi "global-autogen" + +-- | The directory in which we put auto-generated modules for a +-- particular component. +autogenComponentModulesDir :: LocalBuildInfo -> ComponentLocalBuildInfo -> String +autogenComponentModulesDir lbi clbi = componentBuildDir lbi clbi "autogen" +-- NB: Look at 'checkForeignDeps' for where a simplified version of this +-- has been copy-pasted. + +cppHeaderName :: String +cppHeaderName = "cabal_macros.h" + +{-# DEPRECATED autogenModuleName "Use autogenPathsModuleName instead" #-} +-- |The name of the auto-generated module associated with a package +autogenModuleName :: PackageDescription -> ModuleName +autogenModuleName = autogenPathsModuleName + +-- | The name of the auto-generated Paths_* module associated with a package +autogenPathsModuleName :: PackageDescription -> ModuleName +autogenPathsModuleName pkg_descr = + ModuleName.fromString $ + "Paths_" ++ map fixchar (display (packageName pkg_descr)) + where fixchar '-' = '_' + fixchar c = c + +haddockName :: PackageDescription -> FilePath +haddockName pkg_descr = display (packageName pkg_descr) <.> "haddock" + +-- ----------------------------------------------------------------------------- +-- Source File helper + +getLibSourceFiles :: Verbosity + -> LocalBuildInfo + -> Library + -> ComponentLocalBuildInfo + -> IO [(ModuleName.ModuleName, FilePath)] +getLibSourceFiles verbosity lbi lib clbi = getSourceFiles verbosity searchpaths modules + where + bi = libBuildInfo lib + modules = allLibModules lib clbi + searchpaths = componentBuildDir lbi clbi : hsSourceDirs bi ++ + [ autogenComponentModulesDir lbi clbi + , autogenPackageModulesDir lbi ] + +getExeSourceFiles :: Verbosity + -> LocalBuildInfo + -> Executable + -> ComponentLocalBuildInfo + -> IO [(ModuleName.ModuleName, FilePath)] +getExeSourceFiles verbosity lbi exe clbi = do + moduleFiles <- getSourceFiles verbosity searchpaths modules + srcMainPath <- findFile (hsSourceDirs bi) (modulePath exe) + return ((ModuleName.main, srcMainPath) : moduleFiles) + where + bi = buildInfo exe + modules = otherModules bi + searchpaths = autogenComponentModulesDir lbi clbi + : autogenPackageModulesDir lbi + : exeBuildDir lbi exe : hsSourceDirs bi + +getFLibSourceFiles :: Verbosity + -> LocalBuildInfo + -> ForeignLib + -> ComponentLocalBuildInfo + -> IO [(ModuleName.ModuleName, FilePath)] +getFLibSourceFiles verbosity lbi flib clbi = getSourceFiles verbosity searchpaths modules + where + bi = foreignLibBuildInfo flib + modules = otherModules bi + searchpaths = autogenComponentModulesDir lbi clbi + : autogenPackageModulesDir lbi + : flibBuildDir lbi flib : hsSourceDirs bi + +getSourceFiles :: Verbosity -> [FilePath] + -> [ModuleName.ModuleName] + -> IO [(ModuleName.ModuleName, FilePath)] +getSourceFiles verbosity dirs modules = flip traverse modules $ \m -> fmap ((,) m) $ + findFileWithExtension ["hs", "lhs", "hsig", "lhsig"] dirs (ModuleName.toFilePath m) + >>= maybe (notFound m) (return . normalise) + where + notFound module_ = die' verbosity $ "can't find source for module " ++ display module_ + +-- | The directory where we put build results for an executable +exeBuildDir :: LocalBuildInfo -> Executable -> FilePath +exeBuildDir lbi exe = buildDir lbi nm nm ++ "-tmp" + where + nm = unUnqualComponentName $ exeName exe + +-- | The directory where we put build results for a foreign library +flibBuildDir :: LocalBuildInfo -> ForeignLib -> FilePath +flibBuildDir lbi flib = buildDir lbi nm nm ++ "-tmp" + where + nm = unUnqualComponentName $ foreignLibName flib + +-- --------------------------------------------------------------------------- +-- Library file names + +-- | Create a library name for a static library from a given name. +-- Prepends 'lib' and appends the static library extension ('.a'). +mkGenericStaticLibName :: String -> String +mkGenericStaticLibName lib = "lib" ++ lib <.> "a" + +mkLibName :: UnitId -> String +mkLibName lib = mkGenericStaticLibName (getHSLibraryName lib) + +mkProfLibName :: UnitId -> String +mkProfLibName lib = mkGenericStaticLibName (getHSLibraryName lib ++ "_p") + +-- | Create a library name for a shared lirbary from a given name. +-- Prepends 'lib' and appends the '-' +-- as well as the shared library extension. +mkGenericSharedLibName :: Platform -> CompilerId -> String -> String +mkGenericSharedLibName platform (CompilerId compilerFlavor compilerVersion) lib + = mconcat [ "lib", lib, "-", comp <.> dllExtension platform ] + where comp = display compilerFlavor ++ display compilerVersion + +-- Implement proper name mangling for dynamical shared objects +-- libHS- +-- e.g. libHSbase-2.1-ghc6.6.1.so +mkSharedLibName :: Platform -> CompilerId -> UnitId -> String +mkSharedLibName platform comp lib + = mkGenericSharedLibName platform comp (getHSLibraryName lib) + +-- Static libs are named the same as shared libraries, only with +-- a different extension. +mkStaticLibName :: Platform -> CompilerId -> UnitId -> String +mkStaticLibName platform (CompilerId compilerFlavor compilerVersion) lib + = "lib" ++ getHSLibraryName lib ++ "-" ++ comp <.> staticLibExtension platform + where comp = display compilerFlavor ++ display compilerVersion + +-- ------------------------------------------------------------ +-- * Platform file extensions +-- ------------------------------------------------------------ + +-- | Default extension for executable files on the current platform. +-- (typically @\"\"@ on Unix and @\"exe\"@ on Windows or OS\/2) +exeExtension :: Platform -> String +exeExtension (Platform _arch os) = case os of + Windows -> "exe" + _ -> "" + +-- | Extension for object files. For GHC the extension is @\"o\"@. +objExtension :: String +objExtension = "o" + +-- | Extension for dynamically linked (or shared) libraries +-- (typically @\"so\"@ on Unix and @\"dll\"@ on Windows) +dllExtension :: Platform -> String +dllExtension (Platform _arch os)= case os of + Windows -> "dll" + OSX -> "dylib" + _ -> "so" + +-- | Extension for static libraries +-- +-- TODO: Here, as well as in dllExtension, it's really the target OS that we're +-- interested in, not the build OS. +staticLibExtension :: Platform -> String +staticLibExtension (Platform _arch os) = case os of + Windows -> "lib" + _ -> "a" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/BuildTarget.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/BuildTarget.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/BuildTarget.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/BuildTarget.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,1038 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.BuildTargets +-- Copyright : (c) Duncan Coutts 2012 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- +-- Handling for user-specified build targets +----------------------------------------------------------------------------- +module Distribution.Simple.BuildTarget ( + -- * Main interface + readTargetInfos, + readBuildTargets, -- in case you don't have LocalBuildInfo + + -- * Build targets + BuildTarget(..), + showBuildTarget, + QualLevel(..), + buildTargetComponentName, + + -- * Parsing user build targets + UserBuildTarget, + readUserBuildTargets, + showUserBuildTarget, + UserBuildTargetProblem(..), + reportUserBuildTargetProblems, + + -- * Resolving build targets + resolveBuildTargets, + BuildTargetProblem(..), + reportBuildTargetProblems, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.TargetInfo +import Distribution.Types.LocalBuildInfo +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.ForeignLib +import Distribution.Types.UnqualComponentName + +import Distribution.Package +import Distribution.PackageDescription +import Distribution.ModuleName +import Distribution.Simple.LocalBuildInfo +import Distribution.Text +import Distribution.Simple.Utils +import Distribution.Verbosity + +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP ( (+++), (<++) ) +import Distribution.ParseUtils ( readPToMaybe ) + +import Control.Monad ( msum ) +import Data.List ( stripPrefix, groupBy, partition ) +import Data.Either ( partitionEithers ) +import System.FilePath as FilePath + ( dropExtension, normalise, splitDirectories, joinPath, splitPath + , hasTrailingPathSeparator ) +import System.Directory ( doesFileExist, doesDirectoryExist ) +import qualified Data.Map as Map + +-- | Take a list of 'String' build targets, and parse and validate them +-- into actual 'TargetInfo's to be built/registered/whatever. +readTargetInfos :: Verbosity -> PackageDescription -> LocalBuildInfo -> [String] -> IO [TargetInfo] +readTargetInfos verbosity pkg_descr lbi args = do + build_targets <- readBuildTargets verbosity pkg_descr args + checkBuildTargets verbosity pkg_descr lbi build_targets + +-- ------------------------------------------------------------ +-- * User build targets +-- ------------------------------------------------------------ + +-- | Various ways that a user may specify a build target. +-- +data UserBuildTarget = + + -- | A target specified by a single name. This could be a component + -- module or file. + -- + -- > cabal build foo + -- > cabal build Data.Foo + -- > cabal build Data/Foo.hs Data/Foo.hsc + -- + UserBuildTargetSingle String + + -- | A target specified by a qualifier and name. This could be a component + -- name qualified by the component namespace kind, or a module or file + -- qualified by the component name. + -- + -- > cabal build lib:foo exe:foo + -- > cabal build foo:Data.Foo + -- > cabal build foo:Data/Foo.hs + -- + | UserBuildTargetDouble String String + + -- | A fully qualified target, either a module or file qualified by a + -- component name with the component namespace kind. + -- + -- > cabal build lib:foo:Data/Foo.hs exe:foo:Data/Foo.hs + -- > cabal build lib:foo:Data.Foo exe:foo:Data.Foo + -- + | UserBuildTargetTriple String String String + deriving (Show, Eq, Ord) + + +-- ------------------------------------------------------------ +-- * Resolved build targets +-- ------------------------------------------------------------ + +-- | A fully resolved build target. +-- +data BuildTarget = + + -- | A specific component + -- + BuildTargetComponent ComponentName + + -- | A specific module within a specific component. + -- + | BuildTargetModule ComponentName ModuleName + + -- | A specific file within a specific component. + -- + | BuildTargetFile ComponentName FilePath + deriving (Eq, Show, Generic) + +instance Binary BuildTarget + +buildTargetComponentName :: BuildTarget -> ComponentName +buildTargetComponentName (BuildTargetComponent cn) = cn +buildTargetComponentName (BuildTargetModule cn _) = cn +buildTargetComponentName (BuildTargetFile cn _) = cn + +-- | Read a list of user-supplied build target strings and resolve them to +-- 'BuildTarget's according to a 'PackageDescription'. If there are problems +-- with any of the targets e.g. they don't exist or are misformatted, throw an +-- 'IOException'. +readBuildTargets :: Verbosity -> PackageDescription -> [String] -> IO [BuildTarget] +readBuildTargets verbosity pkg targetStrs = do + let (uproblems, utargets) = readUserBuildTargets targetStrs + reportUserBuildTargetProblems verbosity uproblems + + utargets' <- traverse checkTargetExistsAsFile utargets + + let (bproblems, btargets) = resolveBuildTargets pkg utargets' + reportBuildTargetProblems verbosity bproblems + + return btargets + +checkTargetExistsAsFile :: UserBuildTarget -> NoCallStackIO (UserBuildTarget, Bool) +checkTargetExistsAsFile t = do + fexists <- existsAsFile (fileComponentOfTarget t) + return (t, fexists) + + where + existsAsFile f = do + exists <- doesFileExist f + case splitPath f of + (d:_) | hasTrailingPathSeparator d -> doesDirectoryExist d + (d:_:_) | not exists -> doesDirectoryExist d + _ -> return exists + + fileComponentOfTarget (UserBuildTargetSingle s1) = s1 + fileComponentOfTarget (UserBuildTargetDouble _ s2) = s2 + fileComponentOfTarget (UserBuildTargetTriple _ _ s3) = s3 + + +-- ------------------------------------------------------------ +-- * Parsing user targets +-- ------------------------------------------------------------ + +readUserBuildTargets :: [String] -> ([UserBuildTargetProblem] + ,[UserBuildTarget]) +readUserBuildTargets = partitionEithers . map readUserBuildTarget + +readUserBuildTarget :: String -> Either UserBuildTargetProblem + UserBuildTarget +readUserBuildTarget targetstr = + case readPToMaybe parseTargetApprox targetstr of + Nothing -> Left (UserBuildTargetUnrecognised targetstr) + Just tgt -> Right tgt + + where + parseTargetApprox :: Parse.ReadP r UserBuildTarget + parseTargetApprox = + (do a <- tokenQ + return (UserBuildTargetSingle a)) + +++ (do a <- token + _ <- Parse.char ':' + b <- tokenQ + return (UserBuildTargetDouble a b)) + +++ (do a <- token + _ <- Parse.char ':' + b <- token + _ <- Parse.char ':' + c <- tokenQ + return (UserBuildTargetTriple a b c)) + + token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') + tokenQ = parseHaskellString <++ token + parseHaskellString :: Parse.ReadP r String + parseHaskellString = Parse.readS_to_P reads + +data UserBuildTargetProblem + = UserBuildTargetUnrecognised String + deriving Show + +reportUserBuildTargetProblems :: Verbosity -> [UserBuildTargetProblem] -> IO () +reportUserBuildTargetProblems verbosity problems = do + case [ target | UserBuildTargetUnrecognised target <- problems ] of + [] -> return () + target -> + die' verbosity $ unlines + [ "Unrecognised build target '" ++ name ++ "'." + | name <- target ] + ++ "Examples:\n" + ++ " - build foo -- component name " + ++ "(library, executable, test-suite or benchmark)\n" + ++ " - build Data.Foo -- module name\n" + ++ " - build Data/Foo.hsc -- file name\n" + ++ " - build lib:foo exe:foo -- component qualified by kind\n" + ++ " - build foo:Data.Foo -- module qualified by component\n" + ++ " - build foo:Data/Foo.hsc -- file qualified by component" + +showUserBuildTarget :: UserBuildTarget -> String +showUserBuildTarget = intercalate ":" . getComponents + where + getComponents (UserBuildTargetSingle s1) = [s1] + getComponents (UserBuildTargetDouble s1 s2) = [s1,s2] + getComponents (UserBuildTargetTriple s1 s2 s3) = [s1,s2,s3] + +-- | Unless you use 'QL1', this function is PARTIAL; +-- use 'showBuildTarget' instead. +showBuildTarget' :: QualLevel -> PackageId -> BuildTarget -> String +showBuildTarget' ql pkgid bt = + showUserBuildTarget (renderBuildTarget ql bt pkgid) + +-- | Unambiguously render a 'BuildTarget', so that it can +-- be parsed in all situations. +showBuildTarget :: PackageId -> BuildTarget -> String +showBuildTarget pkgid t = + showBuildTarget' (qlBuildTarget t) pkgid t + where + qlBuildTarget BuildTargetComponent{} = QL2 + qlBuildTarget _ = QL3 + + +-- ------------------------------------------------------------ +-- * Resolving user targets to build targets +-- ------------------------------------------------------------ + +{- +stargets = + [ BuildTargetComponent (CExeName "foo") + , BuildTargetModule (CExeName "foo") (mkMn "Foo") + , BuildTargetModule (CExeName "tst") (mkMn "Foo") + ] + where + mkMn :: String -> ModuleName + mkMn = fromJust . simpleParse + +ex_pkgid :: PackageIdentifier +Just ex_pkgid = simpleParse "thelib" +-} + +-- | Given a bunch of user-specified targets, try to resolve what it is they +-- refer to. +-- +resolveBuildTargets :: PackageDescription + -> [(UserBuildTarget, Bool)] + -> ([BuildTargetProblem], [BuildTarget]) +resolveBuildTargets pkg = partitionEithers + . map (uncurry (resolveBuildTarget pkg)) + +resolveBuildTarget :: PackageDescription -> UserBuildTarget -> Bool + -> Either BuildTargetProblem BuildTarget +resolveBuildTarget pkg userTarget fexists = + case findMatch (matchBuildTarget pkg userTarget fexists) of + Unambiguous target -> Right target + Ambiguous targets -> Left (BuildTargetAmbiguous userTarget targets') + where targets' = disambiguateBuildTargets + (packageId pkg) + userTarget + targets + None errs -> Left (classifyMatchErrors errs) + + where + classifyMatchErrors errs + | not (null expected) = let (things, got:_) = unzip expected in + BuildTargetExpected userTarget things got + | not (null nosuch) = BuildTargetNoSuch userTarget nosuch + | otherwise = error $ "resolveBuildTarget: internal error in matching" + where + expected = [ (thing, got) | MatchErrorExpected thing got <- errs ] + nosuch = [ (thing, got) | MatchErrorNoSuch thing got <- errs ] + + +data BuildTargetProblem + = BuildTargetExpected UserBuildTarget [String] String + -- ^ [expected thing] (actually got) + | BuildTargetNoSuch UserBuildTarget [(String, String)] + -- ^ [(no such thing, actually got)] + | BuildTargetAmbiguous UserBuildTarget [(UserBuildTarget, BuildTarget)] + deriving Show + + +disambiguateBuildTargets :: PackageId -> UserBuildTarget -> [BuildTarget] + -> [(UserBuildTarget, BuildTarget)] +disambiguateBuildTargets pkgid original = + disambiguate (userTargetQualLevel original) + where + disambiguate ql ts + | null amb = unamb + | otherwise = unamb ++ disambiguate (succ ql) amb + where + (amb, unamb) = step ql ts + + userTargetQualLevel (UserBuildTargetSingle _ ) = QL1 + userTargetQualLevel (UserBuildTargetDouble _ _ ) = QL2 + userTargetQualLevel (UserBuildTargetTriple _ _ _) = QL3 + + step :: QualLevel -> [BuildTarget] + -> ([BuildTarget], [(UserBuildTarget, BuildTarget)]) + step ql = (\(amb, unamb) -> (map snd $ concat amb, concat unamb)) + . partition (\g -> length g > 1) + . groupBy (equating fst) + . sortBy (comparing fst) + . map (\t -> (renderBuildTarget ql t pkgid, t)) + +data QualLevel = QL1 | QL2 | QL3 + deriving (Enum, Show) + +renderBuildTarget :: QualLevel -> BuildTarget -> PackageId -> UserBuildTarget +renderBuildTarget ql target pkgid = + case ql of + QL1 -> UserBuildTargetSingle s1 where s1 = single target + QL2 -> UserBuildTargetDouble s1 s2 where (s1, s2) = double target + QL3 -> UserBuildTargetTriple s1 s2 s3 where (s1, s2, s3) = triple target + + where + single (BuildTargetComponent cn ) = dispCName cn + single (BuildTargetModule _ m) = display m + single (BuildTargetFile _ f) = f + + double (BuildTargetComponent cn ) = (dispKind cn, dispCName cn) + double (BuildTargetModule cn m) = (dispCName cn, display m) + double (BuildTargetFile cn f) = (dispCName cn, f) + + triple (BuildTargetComponent _ ) = error "triple BuildTargetComponent" + triple (BuildTargetModule cn m) = (dispKind cn, dispCName cn, display m) + triple (BuildTargetFile cn f) = (dispKind cn, dispCName cn, f) + + dispCName = componentStringName pkgid + dispKind = showComponentKindShort . componentKind + +reportBuildTargetProblems :: Verbosity -> [BuildTargetProblem] -> IO () +reportBuildTargetProblems verbosity problems = do + + case [ (t, e, g) | BuildTargetExpected t e g <- problems ] of + [] -> return () + targets -> + die' verbosity $ unlines + [ "Unrecognised build target '" ++ showUserBuildTarget target + ++ "'.\n" + ++ "Expected a " ++ intercalate " or " expected + ++ ", rather than '" ++ got ++ "'." + | (target, expected, got) <- targets ] + + case [ (t, e) | BuildTargetNoSuch t e <- problems ] of + [] -> return () + targets -> + die' verbosity $ unlines + [ "Unknown build target '" ++ showUserBuildTarget target + ++ "'.\nThere is no " + ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" + | (thing, got) <- nosuch ] ++ "." + | (target, nosuch) <- targets ] + where + mungeThing "file" = "file target" + mungeThing thing = thing + + case [ (t, ts) | BuildTargetAmbiguous t ts <- problems ] of + [] -> return () + targets -> + die' verbosity $ unlines + [ "Ambiguous build target '" ++ showUserBuildTarget target + ++ "'. It could be:\n " + ++ unlines [ " "++ showUserBuildTarget ut ++ + " (" ++ showBuildTargetKind bt ++ ")" + | (ut, bt) <- amb ] + | (target, amb) <- targets ] + + where + showBuildTargetKind (BuildTargetComponent _ ) = "component" + showBuildTargetKind (BuildTargetModule _ _) = "module" + showBuildTargetKind (BuildTargetFile _ _) = "file" + + +---------------------------------- +-- Top level BuildTarget matcher +-- + +matchBuildTarget :: PackageDescription + -> UserBuildTarget -> Bool -> Match BuildTarget +matchBuildTarget pkg = \utarget fexists -> + case utarget of + UserBuildTargetSingle str1 -> + matchBuildTarget1 cinfo str1 fexists + + UserBuildTargetDouble str1 str2 -> + matchBuildTarget2 cinfo str1 str2 fexists + + UserBuildTargetTriple str1 str2 str3 -> + matchBuildTarget3 cinfo str1 str2 str3 fexists + where + cinfo = pkgComponentInfo pkg + +matchBuildTarget1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget +matchBuildTarget1 cinfo str1 fexists = + matchComponent1 cinfo str1 + `matchPlusShadowing` matchModule1 cinfo str1 + `matchPlusShadowing` matchFile1 cinfo str1 fexists + + +matchBuildTarget2 :: [ComponentInfo] -> String -> String -> Bool + -> Match BuildTarget +matchBuildTarget2 cinfo str1 str2 fexists = + matchComponent2 cinfo str1 str2 + `matchPlusShadowing` matchModule2 cinfo str1 str2 + `matchPlusShadowing` matchFile2 cinfo str1 str2 fexists + + +matchBuildTarget3 :: [ComponentInfo] -> String -> String -> String -> Bool + -> Match BuildTarget +matchBuildTarget3 cinfo str1 str2 str3 fexists = + matchModule3 cinfo str1 str2 str3 + `matchPlusShadowing` matchFile3 cinfo str1 str2 str3 fexists + + +data ComponentInfo = ComponentInfo { + cinfoName :: ComponentName, + cinfoStrName :: ComponentStringName, + cinfoSrcDirs :: [FilePath], + cinfoModules :: [ModuleName], + cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) + cinfoAsmFiles:: [FilePath], + cinfoCmmFiles:: [FilePath], + cinfoCFiles :: [FilePath], + cinfoCxxFiles:: [FilePath], + cinfoJsFiles :: [FilePath] + } + +type ComponentStringName = String + +pkgComponentInfo :: PackageDescription -> [ComponentInfo] +pkgComponentInfo pkg = + [ ComponentInfo { + cinfoName = componentName c, + cinfoStrName = componentStringName pkg (componentName c), + cinfoSrcDirs = hsSourceDirs bi, + cinfoModules = componentModules c, + cinfoHsFiles = componentHsFiles c, + cinfoAsmFiles= asmSources bi, + cinfoCmmFiles= cmmSources bi, + cinfoCFiles = cSources bi, + cinfoCxxFiles= cxxSources bi, + cinfoJsFiles = jsSources bi + } + | c <- pkgComponents pkg + , let bi = componentBuildInfo c ] + +componentStringName :: Package pkg => pkg -> ComponentName -> ComponentStringName +componentStringName pkg CLibName = display (packageName pkg) +componentStringName _ (CSubLibName name) = unUnqualComponentName name +componentStringName _ (CFLibName name) = unUnqualComponentName name +componentStringName _ (CExeName name) = unUnqualComponentName name +componentStringName _ (CTestName name) = unUnqualComponentName name +componentStringName _ (CBenchName name) = unUnqualComponentName name + +componentModules :: Component -> [ModuleName] +-- TODO: Use of 'explicitLibModules' here is a bit wrong: +-- a user could very well ask to build a specific signature +-- that was inherited from other packages. To fix this +-- we have to plumb 'LocalBuildInfo' through this code. +-- Fortunately, this is only used by 'pkgComponentInfo' +-- Please don't export this function unless you plan on fixing +-- this. +componentModules (CLib lib) = explicitLibModules lib +componentModules (CFLib flib) = foreignLibModules flib +componentModules (CExe exe) = exeModules exe +componentModules (CTest test) = testModules test +componentModules (CBench bench) = benchmarkModules bench + +componentHsFiles :: Component -> [FilePath] +componentHsFiles (CExe exe) = [modulePath exe] +componentHsFiles (CTest TestSuite { + testInterface = TestSuiteExeV10 _ mainfile + }) = [mainfile] +componentHsFiles (CBench Benchmark { + benchmarkInterface = BenchmarkExeV10 _ mainfile + }) = [mainfile] +componentHsFiles _ = [] + +{- +ex_cs :: [ComponentInfo] +ex_cs = + [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) + , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) + ] + where + mkC n ds ms = ComponentInfo n (componentStringName pkgid n) ds (map mkMn ms) + mkMn :: String -> ModuleName + mkMn = fromJust . simpleParse + pkgid :: PackageIdentifier + Just pkgid = simpleParse "thelib" +-} + +------------------------------ +-- Matching component kinds +-- + +data ComponentKind = LibKind | FLibKind | ExeKind | TestKind | BenchKind + deriving (Eq, Ord, Show) + +componentKind :: ComponentName -> ComponentKind +componentKind CLibName = LibKind +componentKind (CSubLibName _) = LibKind +componentKind (CFLibName _) = FLibKind +componentKind (CExeName _) = ExeKind +componentKind (CTestName _) = TestKind +componentKind (CBenchName _) = BenchKind + +cinfoKind :: ComponentInfo -> ComponentKind +cinfoKind = componentKind . cinfoName + +matchComponentKind :: String -> Match ComponentKind +matchComponentKind s + | s `elem` ["lib", "library"] = return' LibKind + | s `elem` ["flib", "foreign-lib", "foreign-library"] = return' FLibKind + | s `elem` ["exe", "executable"] = return' ExeKind + | s `elem` ["tst", "test", "test-suite"] = return' TestKind + | s `elem` ["bench", "benchmark"] = return' BenchKind + | otherwise = matchErrorExpected "component kind" s + where + return' ck = increaseConfidence >> return ck + +showComponentKind :: ComponentKind -> String +showComponentKind LibKind = "library" +showComponentKind FLibKind = "foreign-library" +showComponentKind ExeKind = "executable" +showComponentKind TestKind = "test-suite" +showComponentKind BenchKind = "benchmark" + +showComponentKindShort :: ComponentKind -> String +showComponentKindShort LibKind = "lib" +showComponentKindShort FLibKind = "flib" +showComponentKindShort ExeKind = "exe" +showComponentKindShort TestKind = "test" +showComponentKindShort BenchKind = "bench" + +------------------------------ +-- Matching component targets +-- + +matchComponent1 :: [ComponentInfo] -> String -> Match BuildTarget +matchComponent1 cs = \str1 -> do + guardComponentName str1 + c <- matchComponentName cs str1 + return (BuildTargetComponent (cinfoName c)) + +matchComponent2 :: [ComponentInfo] -> String -> String -> Match BuildTarget +matchComponent2 cs = \str1 str2 -> do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + return (BuildTargetComponent (cinfoName c)) + +-- utils: + +guardComponentName :: String -> Match () +guardComponentName s + | all validComponentChar s + && not (null s) = increaseConfidence + | otherwise = matchErrorExpected "component name" s + where + validComponentChar c = isAlphaNum c || c == '.' + || c == '_' || c == '-' || c == '\'' + +matchComponentName :: [ComponentInfo] -> String -> Match ComponentInfo +matchComponentName cs str = + orNoSuchThing "component" str + $ increaseConfidenceFor + $ matchInexactly caseFold + [ (cinfoStrName c, c) | c <- cs ] + str + +matchComponentKindAndName :: [ComponentInfo] -> ComponentKind -> String + -> Match ComponentInfo +matchComponentKindAndName cs ckind str = + orNoSuchThing (showComponentKind ckind ++ " component") str + $ increaseConfidenceFor + $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) + [ ((cinfoKind c, cinfoStrName c), c) | c <- cs ] + (ckind, str) + + +------------------------------ +-- Matching module targets +-- + +matchModule1 :: [ComponentInfo] -> String -> Match BuildTarget +matchModule1 cs = \str1 -> do + guardModuleName str1 + nubMatchErrors $ do + c <- tryEach cs + let ms = cinfoModules c + m <- matchModuleName ms str1 + return (BuildTargetModule (cinfoName c) m) + +matchModule2 :: [ComponentInfo] -> String -> String -> Match BuildTarget +matchModule2 cs = \str1 str2 -> do + guardComponentName str1 + guardModuleName str2 + c <- matchComponentName cs str1 + let ms = cinfoModules c + m <- matchModuleName ms str2 + return (BuildTargetModule (cinfoName c) m) + +matchModule3 :: [ComponentInfo] -> String -> String -> String + -> Match BuildTarget +matchModule3 cs str1 str2 str3 = do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + guardModuleName str3 + let ms = cinfoModules c + m <- matchModuleName ms str3 + return (BuildTargetModule (cinfoName c) m) + +-- utils: + +guardModuleName :: String -> Match () +guardModuleName s + | all validModuleChar s + && not (null s) = increaseConfidence + | otherwise = matchErrorExpected "module name" s + where + validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' + +matchModuleName :: [ModuleName] -> String -> Match ModuleName +matchModuleName ms str = + orNoSuchThing "module" str + $ increaseConfidenceFor + $ matchInexactly caseFold + [ (display m, m) + | m <- ms ] + str + + +------------------------------ +-- Matching file targets +-- + +matchFile1 :: [ComponentInfo] -> String -> Bool -> Match BuildTarget +matchFile1 cs str1 exists = + nubMatchErrors $ do + c <- tryEach cs + filepath <- matchComponentFile c str1 exists + return (BuildTargetFile (cinfoName c) filepath) + + +matchFile2 :: [ComponentInfo] -> String -> String -> Bool -> Match BuildTarget +matchFile2 cs str1 str2 exists = do + guardComponentName str1 + c <- matchComponentName cs str1 + filepath <- matchComponentFile c str2 exists + return (BuildTargetFile (cinfoName c) filepath) + + +matchFile3 :: [ComponentInfo] -> String -> String -> String -> Bool + -> Match BuildTarget +matchFile3 cs str1 str2 str3 exists = do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + filepath <- matchComponentFile c str3 exists + return (BuildTargetFile (cinfoName c) filepath) + + +matchComponentFile :: ComponentInfo -> String -> Bool -> Match FilePath +matchComponentFile c str fexists = + expecting "file" str $ + matchPlus + (matchFileExists str fexists) + (matchPlusShadowing + (msum [ matchModuleFileRooted dirs ms str + , matchOtherFileRooted dirs hsFiles str ]) + (msum [ matchModuleFileUnrooted ms str + , matchOtherFileUnrooted hsFiles str + , matchOtherFileUnrooted cFiles str + , matchOtherFileUnrooted jsFiles str ])) + where + dirs = cinfoSrcDirs c + ms = cinfoModules c + hsFiles = cinfoHsFiles c + cFiles = cinfoCFiles c + jsFiles = cinfoJsFiles c + + +-- utils + +matchFileExists :: FilePath -> Bool -> Match a +matchFileExists _ False = mzero +matchFileExists fname True = do increaseConfidence + matchErrorNoSuch "file" fname + +matchModuleFileUnrooted :: [ModuleName] -> String -> Match FilePath +matchModuleFileUnrooted ms str = do + let filepath = normalise str + _ <- matchModuleFileStem ms filepath + return filepath + +matchModuleFileRooted :: [FilePath] -> [ModuleName] -> String -> Match FilePath +matchModuleFileRooted dirs ms str = nubMatches $ do + let filepath = normalise str + filepath' <- matchDirectoryPrefix dirs filepath + _ <- matchModuleFileStem ms filepath' + return filepath + +matchModuleFileStem :: [ModuleName] -> FilePath -> Match ModuleName +matchModuleFileStem ms = + increaseConfidenceFor + . matchInexactly caseFold + [ (toFilePath m, m) | m <- ms ] + . dropExtension + +matchOtherFileRooted :: [FilePath] -> [FilePath] -> FilePath -> Match FilePath +matchOtherFileRooted dirs fs str = do + let filepath = normalise str + filepath' <- matchDirectoryPrefix dirs filepath + _ <- matchFile fs filepath' + return filepath + +matchOtherFileUnrooted :: [FilePath] -> FilePath -> Match FilePath +matchOtherFileUnrooted fs str = do + let filepath = normalise str + _ <- matchFile fs filepath + return filepath + +matchFile :: [FilePath] -> FilePath -> Match FilePath +matchFile fs = increaseConfidenceFor + . matchInexactly caseFold [ (f, f) | f <- fs ] + +matchDirectoryPrefix :: [FilePath] -> FilePath -> Match FilePath +matchDirectoryPrefix dirs filepath = + exactMatches $ + catMaybes + [ stripDirectory (normalise dir) filepath | dir <- dirs ] + where + stripDirectory :: FilePath -> FilePath -> Maybe FilePath + stripDirectory dir fp = + joinPath `fmap` stripPrefix (splitDirectories dir) (splitDirectories fp) + + +------------------------------ +-- Matching monad +-- + +-- | A matcher embodies a way to match some input as being some recognised +-- value. In particular it deals with multiple and ambiguous matches. +-- +-- There are various matcher primitives ('matchExactly', 'matchInexactly'), +-- ways to combine matchers ('ambiguousWith', 'shadows') and finally we can +-- run a matcher against an input using 'findMatch'. +-- + +data Match a = NoMatch Confidence [MatchError] + | ExactMatch Confidence [a] + | InexactMatch Confidence [a] + deriving Show + +type Confidence = Int + +data MatchError = MatchErrorExpected String String + | MatchErrorNoSuch String String + deriving (Show, Eq) + + +instance Alternative Match where + empty = mzero + (<|>) = mplus + +instance MonadPlus Match where + mzero = matchZero + mplus = matchPlus + +matchZero :: Match a +matchZero = NoMatch 0 [] + +-- | Combine two matchers. Exact matches are used over inexact matches +-- but if we have multiple exact, or inexact then the we collect all the +-- ambiguous matches. +-- +matchPlus :: Match a -> Match a -> Match a +matchPlus (ExactMatch d1 xs) (ExactMatch d2 xs') = + ExactMatch (max d1 d2) (xs ++ xs') +matchPlus a@(ExactMatch _ _ ) (InexactMatch _ _ ) = a +matchPlus a@(ExactMatch _ _ ) (NoMatch _ _ ) = a +matchPlus (InexactMatch _ _ ) b@(ExactMatch _ _ ) = b +matchPlus (InexactMatch d1 xs) (InexactMatch d2 xs') = + InexactMatch (max d1 d2) (xs ++ xs') +matchPlus a@(InexactMatch _ _ ) (NoMatch _ _ ) = a +matchPlus (NoMatch _ _ ) b@(ExactMatch _ _ ) = b +matchPlus (NoMatch _ _ ) b@(InexactMatch _ _ ) = b +matchPlus a@(NoMatch d1 ms) b@(NoMatch d2 ms') + | d1 > d2 = a + | d1 < d2 = b + | otherwise = NoMatch d1 (ms ++ ms') + +-- | Combine two matchers. This is similar to 'ambiguousWith' with the +-- difference that an exact match from the left matcher shadows any exact +-- match on the right. Inexact matches are still collected however. +-- +matchPlusShadowing :: Match a -> Match a -> Match a +matchPlusShadowing a@(ExactMatch _ _) (ExactMatch _ _) = a +matchPlusShadowing a b = matchPlus a b + +instance Functor Match where + fmap _ (NoMatch d ms) = NoMatch d ms + fmap f (ExactMatch d xs) = ExactMatch d (fmap f xs) + fmap f (InexactMatch d xs) = InexactMatch d (fmap f xs) + +instance Applicative Match where + pure a = ExactMatch 0 [a] + (<*>) = ap + +instance Monad Match where + return = pure + + NoMatch d ms >>= _ = NoMatch d ms + ExactMatch d xs >>= f = addDepth d + $ foldr matchPlus matchZero (map f xs) + InexactMatch d xs >>= f = addDepth d . forceInexact + $ foldr matchPlus matchZero (map f xs) + +addDepth :: Confidence -> Match a -> Match a +addDepth d' (NoMatch d msgs) = NoMatch (d'+d) msgs +addDepth d' (ExactMatch d xs) = ExactMatch (d'+d) xs +addDepth d' (InexactMatch d xs) = InexactMatch (d'+d) xs + +forceInexact :: Match a -> Match a +forceInexact (ExactMatch d ys) = InexactMatch d ys +forceInexact m = m + +------------------------------ +-- Various match primitives +-- + +matchErrorExpected, matchErrorNoSuch :: String -> String -> Match a +matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] +matchErrorNoSuch thing got = NoMatch 0 [MatchErrorNoSuch thing got] + +expecting :: String -> String -> Match a -> Match a +expecting thing got (NoMatch 0 _) = matchErrorExpected thing got +expecting _ _ m = m + +orNoSuchThing :: String -> String -> Match a -> Match a +orNoSuchThing thing got (NoMatch 0 _) = matchErrorNoSuch thing got +orNoSuchThing _ _ m = m + +increaseConfidence :: Match () +increaseConfidence = ExactMatch 1 [()] + +increaseConfidenceFor :: Match a -> Match a +increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r + +nubMatches :: Eq a => Match a -> Match a +nubMatches (NoMatch d msgs) = NoMatch d msgs +nubMatches (ExactMatch d xs) = ExactMatch d (nub xs) +nubMatches (InexactMatch d xs) = InexactMatch d (nub xs) + +nubMatchErrors :: Match a -> Match a +nubMatchErrors (NoMatch d msgs) = NoMatch d (nub msgs) +nubMatchErrors (ExactMatch d xs) = ExactMatch d xs +nubMatchErrors (InexactMatch d xs) = InexactMatch d xs + +-- | Lift a list of matches to an exact match. +-- +exactMatches, inexactMatches :: [a] -> Match a + +exactMatches [] = matchZero +exactMatches xs = ExactMatch 0 xs + +inexactMatches [] = matchZero +inexactMatches xs = InexactMatch 0 xs + +tryEach :: [a] -> Match a +tryEach = exactMatches + + +------------------------------ +-- Top level match runner +-- + +-- | Given a matcher and a key to look up, use the matcher to find all the +-- possible matches. There may be 'None', a single 'Unambiguous' match or +-- you may have an 'Ambiguous' match with several possibilities. +-- +findMatch :: Eq b => Match b -> MaybeAmbiguous b +findMatch match = + case match of + NoMatch _ msgs -> None (nub msgs) + ExactMatch _ xs -> checkAmbiguous xs + InexactMatch _ xs -> checkAmbiguous xs + where + checkAmbiguous xs = case nub xs of + [x] -> Unambiguous x + xs' -> Ambiguous xs' + +data MaybeAmbiguous a = None [MatchError] | Unambiguous a | Ambiguous [a] + deriving Show + + +------------------------------ +-- Basic matchers +-- + +{- +-- | A primitive matcher that looks up a value in a finite 'Map'. The +-- value must match exactly. +-- +matchExactly :: forall a b. Ord a => [(a, b)] -> (a -> Match b) +matchExactly xs = + \x -> case Map.lookup x m of + Nothing -> matchZero + Just ys -> ExactMatch 0 ys + where + m :: Ord a => Map a [b] + m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ] +-} + +-- | A primitive matcher that looks up a value in a finite 'Map'. It checks +-- for an exact or inexact match. We get an inexact match if the match +-- is not exact, but the canonical forms match. It takes a canonicalisation +-- function for this purpose. +-- +-- So for example if we used string case fold as the canonicalisation +-- function, then we would get case insensitive matching (but it will still +-- report an exact match when the case matches too). +-- +matchInexactly :: (Ord a, Ord a') => + (a -> a') -> + [(a, b)] -> (a -> Match b) +matchInexactly cannonicalise xs = + \x -> case Map.lookup x m of + Just ys -> exactMatches ys + Nothing -> case Map.lookup (cannonicalise x) m' of + Just ys -> inexactMatches ys + Nothing -> matchZero + where + m = Map.fromListWith (++) [ (k,[x]) | (k,x) <- xs ] + + -- the map of canonicalised keys to groups of inexact matches + m' = Map.mapKeysWith (++) cannonicalise m + + + +------------------------------ +-- Utils +-- + +caseFold :: String -> String +caseFold = lowercase + + +-- | Check that the given build targets are valid in the current context. +-- +-- Also swizzle into a more convenient form. +-- +checkBuildTargets :: Verbosity -> PackageDescription -> LocalBuildInfo -> [BuildTarget] + -> IO [TargetInfo] +checkBuildTargets _ pkg_descr lbi [] = + return (allTargetsInBuildOrder' pkg_descr lbi) + +checkBuildTargets verbosity pkg_descr lbi targets = do + + let (enabled, disabled) = + partitionEithers + [ case componentDisabledReason (componentEnabledSpec lbi) comp of + Nothing -> Left target' + Just reason -> Right (cname, reason) + | target <- targets + , let target'@(cname,_) = swizzleTarget target + , let comp = getComponent pkg_descr cname ] + + case disabled of + [] -> return () + ((cname,reason):_) -> die' verbosity $ formatReason (showComponentName cname) reason + + for_ [ (c, t) | (c, Just t) <- enabled ] $ \(c, t) -> + warn verbosity $ "Ignoring '" ++ either display id t ++ ". The whole " + ++ showComponentName c ++ " will be processed. (Support for " + ++ "module and file targets has not been implemented yet.)" + + -- Pick out the actual CLBIs for each of these cnames + enabled' <- for enabled $ \(cname, _) -> do + case componentNameTargets' pkg_descr lbi cname of + [] -> error "checkBuildTargets: nothing enabled" + [target] -> return target + _targets -> error "checkBuildTargets: multiple copies enabled" + + return enabled' + + where + swizzleTarget (BuildTargetComponent c) = (c, Nothing) + swizzleTarget (BuildTargetModule c m) = (c, Just (Left m)) + swizzleTarget (BuildTargetFile c f) = (c, Just (Right f)) + + formatReason cn DisabledComponent = + "Cannot process the " ++ cn ++ " because the component is marked " + ++ "as disabled in the .cabal file." + formatReason cn DisabledAllTests = + "Cannot process the " ++ cn ++ " because test suites are not " + ++ "enabled. Run configure with the flag --enable-tests" + formatReason cn DisabledAllBenchmarks = + "Cannot process the " ++ cn ++ " because benchmarks are not " + ++ "enabled. Re-run configure with the flag --enable-benchmarks" + formatReason cn (DisabledAllButOne cn') = + "Cannot process the " ++ cn ++ " because this package was " + ++ "configured only to build " ++ cn' ++ ". Re-run configure " + ++ "with the argument " ++ cn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/BuildToolDepends.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/BuildToolDepends.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/BuildToolDepends.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/BuildToolDepends.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,96 @@ +-- | +-- +-- This modules provides functions for working with both the legacy +-- "build-tools" field, and its replacement, "build-tool-depends". Prefer using +-- the functions contained to access those fields directly. +module Distribution.Simple.BuildToolDepends where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Data.Map as Map + +import Distribution.Package +import Distribution.PackageDescription +import Distribution.Types.ExeDependency +import Distribution.Types.LegacyExeDependency +import Distribution.Types.UnqualComponentName + +-- | Desugar a "build-tools" entry into proper a executable dependency if +-- possible. +-- +-- An entry can be so desguared in two cases: +-- +-- 1. The name in build-tools matches a locally defined executable. The +-- executable dependency produced is on that exe in the current package. +-- +-- 2. The name in build-tools matches a hard-coded set of known tools. For now, +-- the executable dependency produced is one an executable in a package of +-- the same, but the hard-coding could just as well be per-key. +-- +-- The first cases matches first. +desugarBuildTool :: PackageDescription + -> LegacyExeDependency + -> Maybe ExeDependency +desugarBuildTool pkg led = + if foundLocal + then Just $ ExeDependency (packageName pkg) toolName reqVer + else Map.lookup name whiteMap + where + LegacyExeDependency name reqVer = led + toolName = mkUnqualComponentName name + foundLocal = toolName `elem` map exeName (executables pkg) + whitelist = [ "hscolour", "haddock", "happy", "alex", "hsc2hs", "c2hs" + , "cpphs", "greencard", "hspec-discover" + ] + whiteMap = Map.fromList $ flip map whitelist $ \n -> + (n, ExeDependency (mkPackageName n) (mkUnqualComponentName n) reqVer) + +-- | Get everything from "build-tool-depends", along with entries from +-- "build-tools" that we know how to desugar. +-- +-- This should almost always be used instead of just accessing the +-- `buildToolDepends` field directly. +getAllToolDependencies :: PackageDescription + -> BuildInfo + -> [ExeDependency] +getAllToolDependencies pkg bi = + buildToolDepends bi ++ mapMaybe (desugarBuildTool pkg) (buildTools bi) + +-- | Does the given executable dependency map to this current package? +-- +-- This is a tiny function, but used in a number of places. +-- +-- This function is only sound to call on `BuildInfo`s from the given package +-- description. This is because it just filters the package names of each +-- dependency, and does not check whether version bounds in fact exclude the +-- current package, or the referenced components in fact exist in the current +-- package. +-- +-- This is OK because when a package is loaded, it is checked (in +-- `Distribution.Package.Check`) that dependencies matching internal components +-- do indeed have version bounds accepting the current package, and any +-- depended-on component in the current package actually exists. In fact this +-- check is performed by gathering the internal tool dependencies of each +-- component of the package according to this module, and ensuring those +-- properties on each so-gathered dependency. +-- +-- version bounds and components of the package are unchecked. This is because +-- we sanitize exe deps so that the matching name implies these other +-- conditions. +isInternal :: PackageDescription -> ExeDependency -> Bool +isInternal pkg (ExeDependency n _ _) = n == packageName pkg + + +-- | Get internal "build-tool-depends", along with internal "build-tools" +-- +-- This is a tiny function, but used in a number of places. The same +-- restrictions that apply to `isInternal` also apply to this function. +getAllInternalToolDependencies :: PackageDescription + -> BuildInfo + -> [UnqualComponentName] +getAllInternalToolDependencies pkg bi = + [ toolname + | dep@(ExeDependency _ toolname _) <- getAllToolDependencies pkg bi + , isInternal pkg dep + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/CCompiler.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/CCompiler.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/CCompiler.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/CCompiler.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,123 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.CCompiler +-- Copyright : 2011, Dan Knapp +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This simple package provides types and functions for interacting with +-- C compilers. Currently it's just a type enumerating extant C-like +-- languages, which we call dialects. + +{- +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Isaac Jones nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -} + +module Distribution.Simple.CCompiler ( + CDialect(..), + cSourceExtensions, + cDialectFilenameExtension, + filenameCDialect + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import System.FilePath + ( takeExtension ) + + +-- | Represents a dialect of C. The Monoid instance expresses backward +-- compatibility, in the sense that 'mappend a b' is the least inclusive +-- dialect which both 'a' and 'b' can be correctly interpreted as. +data CDialect = C + | ObjectiveC + | CPlusPlus + | ObjectiveCPlusPlus + deriving (Eq, Show) + +instance Monoid CDialect where + mempty = C + mappend = (<>) + +instance Semigroup CDialect where + C <> anything = anything + ObjectiveC <> CPlusPlus = ObjectiveCPlusPlus + CPlusPlus <> ObjectiveC = ObjectiveCPlusPlus + _ <> ObjectiveCPlusPlus = ObjectiveCPlusPlus + ObjectiveC <> _ = ObjectiveC + CPlusPlus <> _ = CPlusPlus + ObjectiveCPlusPlus <> _ = ObjectiveCPlusPlus + +-- | A list of all file extensions which are recognized as possibly containing +-- some dialect of C code. Note that this list is only for source files, +-- not for header files. +cSourceExtensions :: [String] +cSourceExtensions = ["c", "i", "ii", "m", "mi", "mm", "M", "mii", "cc", "cp", + "cxx", "cpp", "CPP", "c++", "C"] + + +-- | Takes a dialect of C and whether code is intended to be passed through +-- the preprocessor, and returns a filename extension for containing that +-- code. +cDialectFilenameExtension :: CDialect -> Bool -> String +cDialectFilenameExtension C True = "c" +cDialectFilenameExtension C False = "i" +cDialectFilenameExtension ObjectiveC True = "m" +cDialectFilenameExtension ObjectiveC False = "mi" +cDialectFilenameExtension CPlusPlus True = "cpp" +cDialectFilenameExtension CPlusPlus False = "ii" +cDialectFilenameExtension ObjectiveCPlusPlus True = "mm" +cDialectFilenameExtension ObjectiveCPlusPlus False = "mii" + + +-- | Infers from a filename's extension the dialect of C which it contains, +-- and whether it is intended to be passed through the preprocessor. +filenameCDialect :: String -> Maybe (CDialect, Bool) +filenameCDialect filename = do + extension <- case takeExtension filename of + '.':ext -> Just ext + _ -> Nothing + case extension of + "c" -> return (C, True) + "i" -> return (C, False) + "ii" -> return (CPlusPlus, False) + "m" -> return (ObjectiveC, True) + "mi" -> return (ObjectiveC, False) + "mm" -> return (ObjectiveCPlusPlus, True) + "M" -> return (ObjectiveCPlusPlus, True) + "mii" -> return (ObjectiveCPlusPlus, False) + "cc" -> return (CPlusPlus, True) + "cp" -> return (CPlusPlus, True) + "cxx" -> return (CPlusPlus, True) + "cpp" -> return (CPlusPlus, True) + "CPP" -> return (CPlusPlus, True) + "c++" -> return (CPlusPlus, True) + "C" -> return (CPlusPlus, True) + _ -> Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Command.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Command.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Command.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Command.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,621 @@ +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Command +-- Copyright : Duncan Coutts 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : non-portable (ExistentialQuantification) +-- +-- This is to do with command line handling. The Cabal command line is +-- organised into a number of named sub-commands (much like darcs). The +-- 'CommandUI' abstraction represents one of these sub-commands, with a name, +-- description, a set of flags. Commands can be associated with actions and +-- run. It handles some common stuff automatically, like the @--help@ and +-- command line completion flags. It is designed to allow other tools make +-- derived commands. This feature is used heavily in @cabal-install@. + +module Distribution.Simple.Command ( + + -- * Command interface + CommandUI(..), + commandShowOptions, + CommandParse(..), + commandParseArgs, + getNormalCommandDescriptions, + helpCommandUI, + + -- ** Constructing commands + ShowOrParseArgs(..), + usageDefault, + usageAlternatives, + mkCommandUI, + hiddenCommand, + + -- ** Associating actions with commands + Command, + commandAddAction, + noExtraFlags, + + -- ** Building lists of commands + CommandType(..), + CommandSpec(..), + commandFromSpec, + + -- ** Running commands + commandsRun, + +-- * Option Fields + OptionField(..), Name, + +-- ** Constructing Option Fields + option, multiOption, + +-- ** Liftings & Projections + liftOption, viewAsFieldDescr, + +-- * Option Descriptions + OptDescr(..), Description, SFlags, LFlags, OptFlags, ArgPlaceHolder, + +-- ** OptDescr 'smart' constructors + MkOptDescr, + reqArg, reqArg', optArg, optArg', noArg, + boolOpt, boolOpt', choiceOpt, choiceOptFromEnum + + ) where + +import Prelude () +import Distribution.Compat.Prelude hiding (get) + +import qualified Distribution.GetOpt as GetOpt +import Distribution.Text +import Distribution.ParseUtils +import Distribution.ReadE +import Distribution.Simple.Utils + +import Text.PrettyPrint ( punctuate, cat, comma, text ) +import Text.PrettyPrint as PP ( empty ) + +data CommandUI flags = CommandUI { + -- | The name of the command as it would be entered on the command line. + -- For example @\"build\"@. + commandName :: String, + -- | A short, one line description of the command to use in help texts. + commandSynopsis :: String, + -- | A function that maps a program name to a usage summary for this + -- command. + commandUsage :: String -> String, + -- | Additional explanation of the command to use in help texts. + commandDescription :: Maybe (String -> String), + -- | Post-Usage notes and examples in help texts + commandNotes :: Maybe (String -> String), + -- | Initial \/ empty flags + commandDefaultFlags :: flags, + -- | All the Option fields for this command + commandOptions :: ShowOrParseArgs -> [OptionField flags] + } + +data ShowOrParseArgs = ShowArgs | ParseArgs +type Name = String +type Description = String + +-- | We usually have a data type for storing configuration values, where +-- every field stores a configuration option, and the user sets +-- the value either via command line flags or a configuration file. +-- An individual OptionField models such a field, and we usually +-- build a list of options associated to a configuration data type. +data OptionField a = OptionField { + optionName :: Name, + optionDescr :: [OptDescr a] } + +-- | An OptionField takes one or more OptDescrs, describing the command line +-- interface for the field. +data OptDescr a = ReqArg Description OptFlags ArgPlaceHolder + (ReadE (a->a)) (a -> [String]) + + | OptArg Description OptFlags ArgPlaceHolder + (ReadE (a->a)) (a->a) (a -> [Maybe String]) + + | ChoiceOpt [(Description, OptFlags, a->a, a -> Bool)] + + | BoolOpt Description OptFlags{-True-} OptFlags{-False-} + (Bool -> a -> a) (a-> Maybe Bool) + +-- | Short command line option strings +type SFlags = [Char] +-- | Long command line option strings +type LFlags = [String] +type OptFlags = (SFlags,LFlags) +type ArgPlaceHolder = String + + +-- | Create an option taking a single OptDescr. +-- No explicit Name is given for the Option, the name is the first LFlag given. +option :: SFlags -> LFlags -> Description -> get -> set -> MkOptDescr get set a + -> OptionField a +option sf lf@(n:_) d get set arg = OptionField n [arg sf lf d get set] +option _ _ _ _ _ _ = error $ "Distribution.command.option: " + ++ "An OptionField must have at least one LFlag" + +-- | Create an option taking several OptDescrs. +-- You will have to give the flags and description individually to the +-- OptDescr constructor. +multiOption :: Name -> get -> set + -> [get -> set -> OptDescr a] -- ^MkOptDescr constructors partially + -- applied to flags and description. + -> OptionField a +multiOption n get set args = OptionField n [arg get set | arg <- args] + +type MkOptDescr get set a = SFlags -> LFlags -> Description -> get -> set + -> OptDescr a + +-- | Create a string-valued command line interface. +reqArg :: Monoid b => ArgPlaceHolder -> ReadE b -> (b -> [String]) + -> MkOptDescr (a -> b) (b -> a -> a) a +reqArg ad mkflag showflag sf lf d get set = + ReqArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) + (showflag . get) + +-- | Create a string-valued command line interface with a default value. +optArg :: Monoid b => ArgPlaceHolder -> ReadE b -> b -> (b -> [Maybe String]) + -> MkOptDescr (a -> b) (b -> a -> a) a +optArg ad mkflag def showflag sf lf d get set = + OptArg d (sf,lf) ad (fmap (\a b -> set (get b `mappend` a) b) mkflag) + (\b -> set (get b `mappend` def) b) + (showflag . get) + +-- | (String -> a) variant of "reqArg" +reqArg' :: Monoid b => ArgPlaceHolder -> (String -> b) -> (b -> [String]) + -> MkOptDescr (a -> b) (b -> a -> a) a +reqArg' ad mkflag showflag = + reqArg ad (succeedReadE mkflag) showflag + +-- | (String -> a) variant of "optArg" +optArg' :: Monoid b => ArgPlaceHolder -> (Maybe String -> b) + -> (b -> [Maybe String]) + -> MkOptDescr (a -> b) (b -> a -> a) a +optArg' ad mkflag showflag = + optArg ad (succeedReadE (mkflag . Just)) def showflag + where def = mkflag Nothing + +noArg :: (Eq b) => b -> MkOptDescr (a -> b) (b -> a -> a) a +noArg flag sf lf d = choiceOpt [(flag, (sf,lf), d)] sf lf d + +boolOpt :: (b -> Maybe Bool) -> (Bool -> b) -> SFlags -> SFlags + -> MkOptDescr (a -> b) (b -> a -> a) a +boolOpt g s sfT sfF _sf _lf@(n:_) d get set = + BoolOpt d (sfT, ["enable-"++n]) (sfF, ["disable-"++n]) (set.s) (g.get) +boolOpt _ _ _ _ _ _ _ _ _ = error + "Distribution.Simple.Setup.boolOpt: unreachable" + +boolOpt' :: (b -> Maybe Bool) -> (Bool -> b) -> OptFlags -> OptFlags + -> MkOptDescr (a -> b) (b -> a -> a) a +boolOpt' g s ffT ffF _sf _lf d get set = BoolOpt d ffT ffF (set.s) (g . get) + +-- | create a Choice option +choiceOpt :: Eq b => [(b,OptFlags,Description)] + -> MkOptDescr (a -> b) (b -> a -> a) a +choiceOpt aa_ff _sf _lf _d get set = ChoiceOpt alts + where alts = [(d,flags, set alt, (==alt) . get) | (alt,flags,d) <- aa_ff] + +-- | create a Choice option out of an enumeration type. +-- As long flags, the Show output is used. As short flags, the first character +-- which does not conflict with a previous one is used. +choiceOptFromEnum :: (Bounded b, Enum b, Show b, Eq b) => + MkOptDescr (a -> b) (b -> a -> a) a +choiceOptFromEnum _sf _lf d get = + choiceOpt [ (x, (sf, [map toLower $ show x]), d') + | (x, sf) <- sflags' + , let d' = d ++ show x] + _sf _lf d get + where sflags' = foldl f [] [firstOne..] + f prev x = let prevflags = concatMap snd prev in + prev ++ take 1 [(x, [toLower sf]) + | sf <- show x, isAlpha sf + , toLower sf `notElem` prevflags] + firstOne = minBound `asTypeOf` get undefined + +commandGetOpts :: ShowOrParseArgs -> CommandUI flags + -> [GetOpt.OptDescr (flags -> flags)] +commandGetOpts showOrParse command = + concatMap viewAsGetOpt (commandOptions command showOrParse) + +viewAsGetOpt :: OptionField a -> [GetOpt.OptDescr (a->a)] +viewAsGetOpt (OptionField _n aa) = concatMap optDescrToGetOpt aa + where + optDescrToGetOpt (ReqArg d (cs,ss) arg_desc set _) = + [GetOpt.Option cs ss (GetOpt.ReqArg set' arg_desc) d] + where set' = readEOrFail set + optDescrToGetOpt (OptArg d (cs,ss) arg_desc set def _) = + [GetOpt.Option cs ss (GetOpt.OptArg set' arg_desc) d] + where set' Nothing = def + set' (Just txt) = readEOrFail set txt + optDescrToGetOpt (ChoiceOpt alts) = + [GetOpt.Option sf lf (GetOpt.NoArg set) d | (d,(sf,lf),set,_) <- alts ] + optDescrToGetOpt (BoolOpt d (sfT, lfT) ([], []) set _) = + [ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) d ] + optDescrToGetOpt (BoolOpt d ([], []) (sfF, lfF) set _) = + [ GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) d ] + optDescrToGetOpt (BoolOpt d (sfT,lfT) (sfF, lfF) set _) = + [ GetOpt.Option sfT lfT (GetOpt.NoArg (set True)) ("Enable " ++ d) + , GetOpt.Option sfF lfF (GetOpt.NoArg (set False)) ("Disable " ++ d) ] + +-- | to view as a FieldDescr, we sort the list of interfaces (Req > Bool > +-- Choice > Opt) and consider only the first one. +viewAsFieldDescr :: OptionField a -> FieldDescr a +viewAsFieldDescr (OptionField _n []) = + error "Distribution.command.viewAsFieldDescr: unexpected" +viewAsFieldDescr (OptionField n dd) = FieldDescr n get set + where + optDescr = head $ sortBy cmp dd + + cmp :: OptDescr a -> OptDescr a -> Ordering + ReqArg{} `cmp` ReqArg{} = EQ + ReqArg{} `cmp` _ = GT + BoolOpt{} `cmp` ReqArg{} = LT + BoolOpt{} `cmp` BoolOpt{} = EQ + BoolOpt{} `cmp` _ = GT + ChoiceOpt{} `cmp` ReqArg{} = LT + ChoiceOpt{} `cmp` BoolOpt{} = LT + ChoiceOpt{} `cmp` ChoiceOpt{} = EQ + ChoiceOpt{} `cmp` _ = GT + OptArg{} `cmp` OptArg{} = EQ + OptArg{} `cmp` _ = LT + +-- get :: a -> Doc + get t = case optDescr of + ReqArg _ _ _ _ ppr -> + (cat . punctuate comma . map text . ppr) t + + OptArg _ _ _ _ _ ppr -> + case ppr t of [] -> PP.empty + (Nothing : _) -> text "True" + (Just a : _) -> text a + + ChoiceOpt alts -> + fromMaybe PP.empty $ listToMaybe + [ text lf | (_,(_,lf:_), _,enabled) <- alts, enabled t] + + BoolOpt _ _ _ _ enabled -> (maybe PP.empty disp . enabled) t + +-- set :: LineNo -> String -> a -> ParseResult a + set line val a = + case optDescr of + ReqArg _ _ _ readE _ -> ($ a) `liftM` runE line n readE val + -- We parse for a single value instead of a + -- list, as one can't really implement + -- parseList :: ReadE a -> ReadE [a] with + -- the current ReadE definition + ChoiceOpt{} -> + case getChoiceByLongFlag optDescr val of + Just f -> return (f a) + _ -> syntaxError line val + + BoolOpt _ _ _ setV _ -> (`setV` a) `liftM` runP line n parse val + + OptArg _ _ _ readE _ _ -> ($ a) `liftM` runE line n readE val + -- Optional arguments are parsed just like + -- required arguments here; we don't + -- provide a method to set an OptArg field + -- to the default value. + +getChoiceByLongFlag :: OptDescr b -> String -> Maybe (b->b) +getChoiceByLongFlag (ChoiceOpt alts) val = listToMaybe + [ set | (_,(_sf,lf:_), set, _) <- alts + , lf == val] + +getChoiceByLongFlag _ _ = + error "Distribution.command.getChoiceByLongFlag: expected a choice option" + +getCurrentChoice :: OptDescr a -> a -> [String] +getCurrentChoice (ChoiceOpt alts) a = + [ lf | (_,(_sf,lf:_), _, currentChoice) <- alts, currentChoice a] + +getCurrentChoice _ _ = error "Command.getChoice: expected a Choice OptDescr" + + +liftOption :: (b -> a) -> (a -> (b -> b)) -> OptionField a -> OptionField b +liftOption get' set' opt = + opt { optionDescr = liftOptDescr get' set' `map` optionDescr opt} + + +liftOptDescr :: (b -> a) -> (a -> (b -> b)) -> OptDescr a -> OptDescr b +liftOptDescr get' set' (ChoiceOpt opts) = + ChoiceOpt [ (d, ff, liftSet get' set' set , (get . get')) + | (d, ff, set, get) <- opts] + +liftOptDescr get' set' (OptArg d ff ad set def get) = + OptArg d ff ad (liftSet get' set' `fmap` set) + (liftSet get' set' def) (get . get') + +liftOptDescr get' set' (ReqArg d ff ad set get) = + ReqArg d ff ad (liftSet get' set' `fmap` set) (get . get') + +liftOptDescr get' set' (BoolOpt d ffT ffF set get) = + BoolOpt d ffT ffF (liftSet get' set' . set) (get . get') + +liftSet :: (b -> a) -> (a -> (b -> b)) -> (a -> a) -> b -> b +liftSet get' set' set x = set' (set $ get' x) x + +-- | Show flags in the standard long option command line format +commandShowOptions :: CommandUI flags -> flags -> [String] +commandShowOptions command v = concat + [ showOptDescr v od | o <- commandOptions command ParseArgs + , od <- optionDescr o] + where + maybePrefix [] = [] + maybePrefix (lOpt:_) = ["--" ++ lOpt] + + showOptDescr :: a -> OptDescr a -> [String] + showOptDescr x (BoolOpt _ (_,lfTs) (_,lfFs) _ enabled) + = case enabled x of + Nothing -> [] + Just True -> maybePrefix lfTs + Just False -> maybePrefix lfFs + showOptDescr x c@ChoiceOpt{} + = ["--" ++ val | val <- getCurrentChoice c x] + showOptDescr x (ReqArg _ (_ssff,lf:_) _ _ showflag) + = [ "--"++lf++"="++flag + | flag <- showflag x ] + showOptDescr x (OptArg _ (_ssff,lf:_) _ _ _ showflag) + = [ case flag of + Just s -> "--"++lf++"="++s + Nothing -> "--"++lf + | flag <- showflag x ] + showOptDescr _ _ + = error "Distribution.Simple.Command.showOptDescr: unreachable" + + +commandListOptions :: CommandUI flags -> [String] +commandListOptions command = + concatMap listOption $ + addCommonFlags ShowArgs $ -- This is a slight hack, we don't want + -- "--list-options" showing up in the + -- list options output, so use ShowArgs + commandGetOpts ShowArgs command + where + listOption (GetOpt.Option shortNames longNames _ _) = + [ "-" ++ [name] | name <- shortNames ] + ++ [ "--" ++ name | name <- longNames ] + +-- | The help text for this command with descriptions of all the options. +commandHelp :: CommandUI flags -> String -> String +commandHelp command pname = + commandSynopsis command + ++ "\n\n" + ++ commandUsage command pname + ++ ( case commandDescription command of + Nothing -> "" + Just desc -> '\n': desc pname) + ++ "\n" + ++ ( if cname == "" + then "Global flags:" + else "Flags for " ++ cname ++ ":" ) + ++ ( GetOpt.usageInfo "" + . addCommonFlags ShowArgs + $ commandGetOpts ShowArgs command ) + ++ ( case commandNotes command of + Nothing -> "" + Just notes -> '\n': notes pname) + where cname = commandName command + +-- | Default "usage" documentation text for commands. +usageDefault :: String -> String -> String +usageDefault name pname = + "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n\n" + ++ "Flags for " ++ name ++ ":" + +-- | Create "usage" documentation from a list of parameter +-- configurations. +usageAlternatives :: String -> [String] -> String -> String +usageAlternatives name strs pname = unlines + [ start ++ pname ++ " " ++ name ++ " " ++ s + | let starts = "Usage: " : repeat " or: " + , (start, s) <- zip starts strs + ] + +-- | Make a Command from standard 'GetOpt' options. +mkCommandUI :: String -- ^ name + -> String -- ^ synopsis + -> [String] -- ^ usage alternatives + -> flags -- ^ initial\/empty flags + -> (ShowOrParseArgs -> [OptionField flags]) -- ^ options + -> CommandUI flags +mkCommandUI name synopsis usages flags options = CommandUI + { commandName = name + , commandSynopsis = synopsis + , commandDescription = Nothing + , commandNotes = Nothing + , commandUsage = usageAlternatives name usages + , commandDefaultFlags = flags + , commandOptions = options + } + +-- | Common flags that apply to every command +data CommonFlag = HelpFlag | ListOptionsFlag + +commonFlags :: ShowOrParseArgs -> [GetOpt.OptDescr CommonFlag] +commonFlags showOrParseArgs = case showOrParseArgs of + ShowArgs -> [help] + ParseArgs -> [help, list] + where + help = GetOpt.Option helpShortFlags ["help"] (GetOpt.NoArg HelpFlag) + "Show this help text" + helpShortFlags = case showOrParseArgs of + ShowArgs -> ['h'] + ParseArgs -> ['h', '?'] + list = GetOpt.Option [] ["list-options"] (GetOpt.NoArg ListOptionsFlag) + "Print a list of command line flags" + +addCommonFlags :: ShowOrParseArgs + -> [GetOpt.OptDescr a] + -> [GetOpt.OptDescr (Either CommonFlag a)] +addCommonFlags showOrParseArgs options = + map (fmapOptDesc Left) (commonFlags showOrParseArgs) + ++ map (fmapOptDesc Right) options + where fmapOptDesc f (GetOpt.Option s l d m) = + GetOpt.Option s l (fmapArgDesc f d) m + fmapArgDesc f (GetOpt.NoArg a) = GetOpt.NoArg (f a) + fmapArgDesc f (GetOpt.ReqArg s d) = GetOpt.ReqArg (f . s) d + fmapArgDesc f (GetOpt.OptArg s d) = GetOpt.OptArg (f . s) d + +-- | Parse a bunch of command line arguments +-- +commandParseArgs :: CommandUI flags + -> Bool -- ^ Is the command a global or subcommand? + -> [String] + -> CommandParse (flags -> flags, [String]) +commandParseArgs command global args = + let options = addCommonFlags ParseArgs + $ commandGetOpts ParseArgs command + order | global = GetOpt.RequireOrder + | otherwise = GetOpt.Permute + in case GetOpt.getOpt' order options args of + (flags, _, _, _) + | any listFlag flags -> CommandList (commandListOptions command) + | any helpFlag flags -> CommandHelp (commandHelp command) + where listFlag (Left ListOptionsFlag) = True; listFlag _ = False + helpFlag (Left HelpFlag) = True; helpFlag _ = False + (flags, opts, opts', []) + | global || null opts' -> CommandReadyToGo (accum flags, mix opts opts') + | otherwise -> CommandErrors (unrecognised opts') + (_, _, _, errs) -> CommandErrors errs + + where -- Note: It is crucial to use reverse function composition here or to + -- reverse the flags here as we want to process the flags left to right + -- but data flow in function composition is right to left. + accum flags = foldr (flip (.)) id [ f | Right f <- flags ] + unrecognised opts = [ "unrecognized " + ++ "'" ++ (commandName command) ++ "'" + ++ " option `" ++ opt ++ "'\n" + | opt <- opts ] + -- For unrecognised global flags we put them in the position just after + -- the command, if there is one. This gives us a chance to parse them + -- as sub-command rather than global flags. + mix [] ys = ys + mix (x:xs) ys = x:ys++xs + +data CommandParse flags = CommandHelp (String -> String) + | CommandList [String] + | CommandErrors [String] + | CommandReadyToGo flags +instance Functor CommandParse where + fmap _ (CommandHelp help) = CommandHelp help + fmap _ (CommandList opts) = CommandList opts + fmap _ (CommandErrors errs) = CommandErrors errs + fmap f (CommandReadyToGo flags) = CommandReadyToGo (f flags) + + +data CommandType = NormalCommand | HiddenCommand +data Command action = + Command String String ([String] -> CommandParse action) CommandType + +-- | Mark command as hidden. Hidden commands don't show up in the 'progname +-- help' or 'progname --help' output. +hiddenCommand :: Command action -> Command action +hiddenCommand (Command name synopsys f _cmdType) = + Command name synopsys f HiddenCommand + +commandAddAction :: CommandUI flags + -> (flags -> [String] -> action) + -> Command action +commandAddAction command action = + Command (commandName command) + (commandSynopsis command) + (fmap (uncurry applyDefaultArgs) . commandParseArgs command False) + NormalCommand + + where applyDefaultArgs mkflags args = + let flags = mkflags (commandDefaultFlags command) + in action flags args + +commandsRun :: CommandUI a + -> [Command action] + -> [String] + -> CommandParse (a, CommandParse action) +commandsRun globalCommand commands args = + case commandParseArgs globalCommand True args of + CommandHelp help -> CommandHelp help + CommandList opts -> CommandList (opts ++ commandNames) + CommandErrors errs -> CommandErrors errs + CommandReadyToGo (mkflags, args') -> case args' of + ("help":cmdArgs) -> handleHelpCommand cmdArgs + (name:cmdArgs) -> case lookupCommand name of + [Command _ _ action _] + -> CommandReadyToGo (flags, action cmdArgs) + _ -> CommandReadyToGo (flags, badCommand name) + [] -> CommandReadyToGo (flags, noCommand) + where flags = mkflags (commandDefaultFlags globalCommand) + + where + lookupCommand cname = [ cmd | cmd@(Command cname' _ _ _) <- commands' + , cname' == cname ] + noCommand = CommandErrors ["no command given (try --help)\n"] + badCommand cname = CommandErrors ["unrecognised command: " ++ cname + ++ " (try --help)\n"] + commands' = commands ++ [commandAddAction helpCommandUI undefined] + commandNames = [ name | (Command name _ _ NormalCommand) <- commands' ] + + -- A bit of a hack: support "prog help" as a synonym of "prog --help" + -- furthermore, support "prog help command" as "prog command --help" + handleHelpCommand cmdArgs = + case commandParseArgs helpCommandUI True cmdArgs of + CommandHelp help -> CommandHelp help + CommandList list -> CommandList (list ++ commandNames) + CommandErrors _ -> CommandHelp globalHelp + CommandReadyToGo (_,[]) -> CommandHelp globalHelp + CommandReadyToGo (_,(name:cmdArgs')) -> + case lookupCommand name of + [Command _ _ action _] -> + case action ("--help":cmdArgs') of + CommandHelp help -> CommandHelp help + CommandList _ -> CommandList [] + _ -> CommandHelp globalHelp + _ -> badCommand name + + where globalHelp = commandHelp globalCommand + +-- | Utility function, many commands do not accept additional flags. This +-- action fails with a helpful error message if the user supplies any extra. +-- +noExtraFlags :: [String] -> IO () +noExtraFlags [] = return () +noExtraFlags extraFlags = + dieNoVerbosity $ "Unrecognised flags: " ++ intercalate ", " extraFlags +--TODO: eliminate this function and turn it into a variant on commandAddAction +-- instead like commandAddActionNoArgs that doesn't supply the [String] + +-- | Helper function for creating globalCommand description +getNormalCommandDescriptions :: [Command action] -> [(String, String)] +getNormalCommandDescriptions cmds = + [ (name, description) + | Command name description _ NormalCommand <- cmds ] + +helpCommandUI :: CommandUI () +helpCommandUI = + (mkCommandUI + "help" + "Help about commands." + ["[FLAGS]", "COMMAND [FLAGS]"] + () + (const [])) + { + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " help help\n" + ++ " Oh, appararently you already know this.\n" + } + +-- | wraps a @CommandUI@ together with a function that turns it into a @Command@. +-- By hiding the type of flags for the UI allows construction of a list of all UIs at the +-- top level of the program. That list can then be used for generation of manual page +-- as well as for executing the selected command. +data CommandSpec action + = forall flags. CommandSpec (CommandUI flags) (CommandUI flags -> Command action) CommandType + +commandFromSpec :: CommandSpec a -> Command a +commandFromSpec (CommandSpec ui action _) = action ui diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Compiler.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Compiler.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Compiler.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Compiler.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,438 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveDataTypeable #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Compiler +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This should be a much more sophisticated abstraction than it is. Currently +-- it's just a bit of data about the compiler, like its flavour and name and +-- version. The reason it's just data is because currently it has to be in +-- 'Read' and 'Show' so it can be saved along with the 'LocalBuildInfo'. The +-- only interesting bit of info it contains is a mapping between language +-- extensions and compiler command line flags. This module also defines a +-- 'PackageDB' type which is used to refer to package databases. Most compilers +-- only know about a single global package collection but GHC has a global and +-- per-user one and it lets you create arbitrary other package databases. We do +-- not yet fully support this latter feature. + +module Distribution.Simple.Compiler ( + -- * Haskell implementations + module Distribution.Compiler, + Compiler(..), + showCompilerId, showCompilerIdWithAbi, + compilerFlavor, compilerVersion, + compilerCompatFlavor, + compilerCompatVersion, + compilerInfo, + + -- * Support for package databases + PackageDB(..), + PackageDBStack, + registrationPackageDB, + absolutePackageDBPaths, + absolutePackageDBPath, + + -- * Support for optimisation levels + OptimisationLevel(..), + flagToOptimisationLevel, + + -- * Support for debug info levels + DebugInfoLevel(..), + flagToDebugInfoLevel, + + -- * Support for language extensions + Flag, + languageToFlags, + unsupportedLanguages, + extensionsToFlags, + unsupportedExtensions, + parmakeSupported, + reexportedModulesSupported, + renamingPackageFlagsSupported, + unifiedIPIDRequired, + packageKeySupported, + unitIdSupported, + coverageSupported, + profilingSupported, + backpackSupported, + arResponseFilesSupported, + libraryDynDirSupported, + + -- * Support for profiling detail levels + ProfDetailLevel(..), + knownProfDetailLevels, + flagToProfDetailLevel, + showProfDetailLevel, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Compiler +import Distribution.Version +import Distribution.Text +import Language.Haskell.Extension +import Distribution.Simple.Utils + +import Control.Monad (join) +import qualified Data.Map as Map (lookup) +import System.Directory (canonicalizePath) + +data Compiler = Compiler { + compilerId :: CompilerId, + -- ^ Compiler flavour and version. + compilerAbiTag :: AbiTag, + -- ^ Tag for distinguishing incompatible ABI's on the same + -- architecture/os. + compilerCompat :: [CompilerId], + -- ^ Other implementations that this compiler claims to be + -- compatible with. + compilerLanguages :: [(Language, Flag)], + -- ^ Supported language standards. + compilerExtensions :: [(Extension, Maybe Flag)], + -- ^ Supported extensions. + compilerProperties :: Map String String + -- ^ A key-value map for properties not covered by the above fields. + } + deriving (Eq, Generic, Typeable, Show, Read) + +instance Binary Compiler + +showCompilerId :: Compiler -> String +showCompilerId = display . compilerId + +showCompilerIdWithAbi :: Compiler -> String +showCompilerIdWithAbi comp = + display (compilerId comp) ++ + case compilerAbiTag comp of + NoAbiTag -> [] + AbiTag xs -> '-':xs + +compilerFlavor :: Compiler -> CompilerFlavor +compilerFlavor = (\(CompilerId f _) -> f) . compilerId + +compilerVersion :: Compiler -> Version +compilerVersion = (\(CompilerId _ v) -> v) . compilerId + + +-- | Is this compiler compatible with the compiler flavour we're interested in? +-- +-- For example this checks if the compiler is actually GHC or is another +-- compiler that claims to be compatible with some version of GHC, e.g. GHCJS. +-- +-- > if compilerCompatFlavor GHC compiler then ... else ... +-- +compilerCompatFlavor :: CompilerFlavor -> Compiler -> Bool +compilerCompatFlavor flavor comp = + flavor == compilerFlavor comp + || flavor `elem` [ flavor' | CompilerId flavor' _ <- compilerCompat comp ] + + +-- | Is this compiler compatible with the compiler flavour we're interested in, +-- and if so what version does it claim to be compatible with. +-- +-- For example this checks if the compiler is actually GHC-7.x or is another +-- compiler that claims to be compatible with some GHC-7.x version. +-- +-- > case compilerCompatVersion GHC compiler of +-- > Just (Version (7:_)) -> ... +-- > _ -> ... +-- +compilerCompatVersion :: CompilerFlavor -> Compiler -> Maybe Version +compilerCompatVersion flavor comp + | compilerFlavor comp == flavor = Just (compilerVersion comp) + | otherwise = + listToMaybe [ v | CompilerId fl v <- compilerCompat comp, fl == flavor ] + +compilerInfo :: Compiler -> CompilerInfo +compilerInfo c = CompilerInfo (compilerId c) + (compilerAbiTag c) + (Just . compilerCompat $ c) + (Just . map fst . compilerLanguages $ c) + (Just . map fst . compilerExtensions $ c) + +-- ------------------------------------------------------------ +-- * Package databases +-- ------------------------------------------------------------ + +-- |Some compilers have a notion of a database of available packages. +-- For some there is just one global db of packages, other compilers +-- support a per-user or an arbitrary db specified at some location in +-- the file system. This can be used to build isloated environments of +-- packages, for example to build a collection of related packages +-- without installing them globally. +-- +data PackageDB = GlobalPackageDB + | UserPackageDB + | SpecificPackageDB FilePath + deriving (Eq, Generic, Ord, Show, Read) + +instance Binary PackageDB + +-- | We typically get packages from several databases, and stack them +-- together. This type lets us be explicit about that stacking. For example +-- typical stacks include: +-- +-- > [GlobalPackageDB] +-- > [GlobalPackageDB, UserPackageDB] +-- > [GlobalPackageDB, SpecificPackageDB "package.conf.inplace"] +-- +-- Note that the 'GlobalPackageDB' is invariably at the bottom since it +-- contains the rts, base and other special compiler-specific packages. +-- +-- We are not restricted to using just the above combinations. In particular +-- we can use several custom package dbs and the user package db together. +-- +-- When it comes to writing, the top most (last) package is used. +-- +type PackageDBStack = [PackageDB] + +-- | Return the package that we should register into. This is the package db at +-- the top of the stack. +-- +registrationPackageDB :: PackageDBStack -> PackageDB +registrationPackageDB [] = error "internal error: empty package db set" +registrationPackageDB dbs = last dbs + +-- | Make package paths absolute + + +absolutePackageDBPaths :: PackageDBStack -> NoCallStackIO PackageDBStack +absolutePackageDBPaths = traverse absolutePackageDBPath + +absolutePackageDBPath :: PackageDB -> NoCallStackIO PackageDB +absolutePackageDBPath GlobalPackageDB = return GlobalPackageDB +absolutePackageDBPath UserPackageDB = return UserPackageDB +absolutePackageDBPath (SpecificPackageDB db) = + SpecificPackageDB `liftM` canonicalizePath db + +-- ------------------------------------------------------------ +-- * Optimisation levels +-- ------------------------------------------------------------ + +-- | Some compilers support optimising. Some have different levels. +-- For compilers that do not the level is just capped to the level +-- they do support. +-- +data OptimisationLevel = NoOptimisation + | NormalOptimisation + | MaximumOptimisation + deriving (Bounded, Enum, Eq, Generic, Read, Show) + +instance Binary OptimisationLevel + +flagToOptimisationLevel :: Maybe String -> OptimisationLevel +flagToOptimisationLevel Nothing = NormalOptimisation +flagToOptimisationLevel (Just s) = case reads s of + [(i, "")] + | i >= fromEnum (minBound :: OptimisationLevel) + && i <= fromEnum (maxBound :: OptimisationLevel) + -> toEnum i + | otherwise -> error $ "Bad optimisation level: " ++ show i + ++ ". Valid values are 0..2" + _ -> error $ "Can't parse optimisation level " ++ s + +-- ------------------------------------------------------------ +-- * Debug info levels +-- ------------------------------------------------------------ + +-- | Some compilers support emitting debug info. Some have different +-- levels. For compilers that do not the level is just capped to the +-- level they do support. +-- +data DebugInfoLevel = NoDebugInfo + | MinimalDebugInfo + | NormalDebugInfo + | MaximalDebugInfo + deriving (Bounded, Enum, Eq, Generic, Read, Show) + +instance Binary DebugInfoLevel + +flagToDebugInfoLevel :: Maybe String -> DebugInfoLevel +flagToDebugInfoLevel Nothing = NormalDebugInfo +flagToDebugInfoLevel (Just s) = case reads s of + [(i, "")] + | i >= fromEnum (minBound :: DebugInfoLevel) + && i <= fromEnum (maxBound :: DebugInfoLevel) + -> toEnum i + | otherwise -> error $ "Bad debug info level: " ++ show i + ++ ". Valid values are 0..3" + _ -> error $ "Can't parse debug info level " ++ s + +-- ------------------------------------------------------------ +-- * Languages and Extensions +-- ------------------------------------------------------------ + +unsupportedLanguages :: Compiler -> [Language] -> [Language] +unsupportedLanguages comp langs = + [ lang | lang <- langs + , isNothing (languageToFlag comp lang) ] + +languageToFlags :: Compiler -> Maybe Language -> [Flag] +languageToFlags comp = filter (not . null) + . catMaybes . map (languageToFlag comp) + . maybe [Haskell98] (\x->[x]) + +languageToFlag :: Compiler -> Language -> Maybe Flag +languageToFlag comp ext = lookup ext (compilerLanguages comp) + + +-- |For the given compiler, return the extensions it does not support. +unsupportedExtensions :: Compiler -> [Extension] -> [Extension] +unsupportedExtensions comp exts = + [ ext | ext <- exts + , isNothing (extensionToFlag' comp ext) ] + +type Flag = String + +-- |For the given compiler, return the flags for the supported extensions. +extensionsToFlags :: Compiler -> [Extension] -> [Flag] +extensionsToFlags comp = nub . filter (not . null) + . catMaybes . map (extensionToFlag comp) + +-- | Looks up the flag for a given extension, for a given compiler. +-- Ignores the subtlety of extensions which lack associated flags. +extensionToFlag :: Compiler -> Extension -> Maybe Flag +extensionToFlag comp ext = join (extensionToFlag' comp ext) + +-- | Looks up the flag for a given extension, for a given compiler. +-- However, the extension may be valid for the compiler but not have a flag. +-- For example, NondecreasingIndentation is enabled by default on GHC 7.0.4, +-- hence it is considered a supported extension but not an accepted flag. +-- +-- The outer layer of Maybe indicates whether the extensions is supported, while +-- the inner layer indicates whether it has a flag. +-- When building strings, it is often more convenient to use 'extensionToFlag', +-- which ignores the difference. +extensionToFlag' :: Compiler -> Extension -> Maybe (Maybe Flag) +extensionToFlag' comp ext = lookup ext (compilerExtensions comp) + +-- | Does this compiler support parallel --make mode? +parmakeSupported :: Compiler -> Bool +parmakeSupported = ghcSupported "Support parallel --make" + +-- | Does this compiler support reexported-modules? +reexportedModulesSupported :: Compiler -> Bool +reexportedModulesSupported = ghcSupported "Support reexported-modules" + +-- | Does this compiler support thinning/renaming on package flags? +renamingPackageFlagsSupported :: Compiler -> Bool +renamingPackageFlagsSupported = ghcSupported + "Support thinning and renaming package flags" + +-- | Does this compiler have unified IPIDs (so no package keys) +unifiedIPIDRequired :: Compiler -> Bool +unifiedIPIDRequired = ghcSupported "Requires unified installed package IDs" + +-- | Does this compiler support package keys? +packageKeySupported :: Compiler -> Bool +packageKeySupported = ghcSupported "Uses package keys" + +-- | Does this compiler support unit IDs? +unitIdSupported :: Compiler -> Bool +unitIdSupported = ghcSupported "Uses unit IDs" + +-- | Does this compiler support Backpack? +backpackSupported :: Compiler -> Bool +backpackSupported = ghcSupported "Support Backpack" + +-- | Does this compiler support a package database entry with: +-- "dynamic-library-dirs"? +libraryDynDirSupported :: Compiler -> Bool +libraryDynDirSupported comp = case compilerFlavor comp of + GHC -> + -- Not just v >= mkVersion [8,0,1,20161022], as there + -- are many GHC 8.1 nightlies which don't support this. + ((v >= mkVersion [8,0,1,20161022] && v < mkVersion [8,1]) || + v >= mkVersion [8,1,20161021]) + _ -> False + where + v = compilerVersion comp + +-- | Does this compiler's "ar" command supports response file +-- arguments (i.e. @file-style arguments). +arResponseFilesSupported :: Compiler -> Bool +arResponseFilesSupported = ghcSupported "ar supports at file" + +-- | Does this compiler support Haskell program coverage? +coverageSupported :: Compiler -> Bool +coverageSupported comp = + case compilerFlavor comp of + GHC -> True + GHCJS -> True + _ -> False + +-- | Does this compiler support profiling? +profilingSupported :: Compiler -> Bool +profilingSupported comp = + case compilerFlavor comp of + GHC -> True + GHCJS -> True + _ -> False + +-- | Utility function for GHC only features +ghcSupported :: String -> Compiler -> Bool +ghcSupported key comp = + case compilerFlavor comp of + GHC -> checkProp + GHCJS -> checkProp + _ -> False + where checkProp = + case Map.lookup key (compilerProperties comp) of + Just "YES" -> True + _ -> False + +-- ------------------------------------------------------------ +-- * Profiling detail level +-- ------------------------------------------------------------ + +-- | Some compilers (notably GHC) support profiling and can instrument +-- programs so the system can account costs to different functions. There are +-- different levels of detail that can be used for this accounting. +-- For compilers that do not support this notion or the particular detail +-- levels, this is either ignored or just capped to some similar level +-- they do support. +-- +data ProfDetailLevel = ProfDetailNone + | ProfDetailDefault + | ProfDetailExportedFunctions + | ProfDetailToplevelFunctions + | ProfDetailAllFunctions + | ProfDetailOther String + deriving (Eq, Generic, Read, Show) + +instance Binary ProfDetailLevel + +flagToProfDetailLevel :: String -> ProfDetailLevel +flagToProfDetailLevel "" = ProfDetailDefault +flagToProfDetailLevel s = + case lookup (lowercase s) + [ (name, value) + | (primary, aliases, value) <- knownProfDetailLevels + , name <- primary : aliases ] + of Just value -> value + Nothing -> ProfDetailOther s + +knownProfDetailLevels :: [(String, [String], ProfDetailLevel)] +knownProfDetailLevels = + [ ("default", [], ProfDetailDefault) + , ("none", [], ProfDetailNone) + , ("exported-functions", ["exported"], ProfDetailExportedFunctions) + , ("toplevel-functions", ["toplevel", "top"], ProfDetailToplevelFunctions) + , ("all-functions", ["all"], ProfDetailAllFunctions) + ] + +showProfDetailLevel :: ProfDetailLevel -> String +showProfDetailLevel dl = case dl of + ProfDetailNone -> "none" + ProfDetailDefault -> "default" + ProfDetailExportedFunctions -> "exported-functions" + ProfDetailToplevelFunctions -> "toplevel-functions" + ProfDetailAllFunctions -> "all-functions" + ProfDetailOther other -> other diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Configure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Configure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Configure.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Configure.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,2032 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Configure +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This deals with the /configure/ phase. It provides the 'configure' action +-- which is given the package description and configure flags. It then tries +-- to: configure the compiler; resolves any conditionals in the package +-- description; resolve the package dependencies; check if all the extensions +-- used by this package are supported by the compiler; check that all the build +-- tools are available (including version checks if appropriate); checks for +-- any required @pkg-config@ packages (updating the 'BuildInfo' with the +-- results) +-- +-- Then based on all this it saves the info in the 'LocalBuildInfo' and writes +-- it out to the @dist\/setup-config@ file. It also displays various details to +-- the user, the amount of information displayed depending on the verbosity +-- level. + +module Distribution.Simple.Configure (configure, + writePersistBuildConfig, + getConfigStateFile, + getPersistBuildConfig, + checkPersistBuildConfigOutdated, + tryGetPersistBuildConfig, + maybeGetPersistBuildConfig, + findDistPref, findDistPrefOrDefault, + getInternalPackages, + computeComponentId, + computeCompatPackageKey, + computeCompatPackageName, + localBuildInfoFile, + getInstalledPackages, + getInstalledPackagesMonitorFiles, + getPackageDBContents, + configCompiler, configCompilerAux, + configCompilerEx, configCompilerAuxEx, + computeEffectiveProfiling, + ccLdOptionsBuildInfo, + checkForeignDeps, + interpretPackageDbFlags, + ConfigStateFileError(..), + tryGetConfigStateFile, + platformDefines, + ) + where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Compiler +import Distribution.Types.IncludeRenaming +import Distribution.Utils.NubList +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.PreProcess +import Distribution.Package +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Types.PackageDescription as PD +import Distribution.PackageDescription.PrettyPrint +import Distribution.PackageDescription.Configuration +import Distribution.PackageDescription.Check hiding (doesFileExist) +import Distribution.Simple.BuildToolDepends +import Distribution.Simple.Program +import Distribution.Simple.Setup as Setup +import Distribution.Simple.BuildTarget +import Distribution.Simple.LocalBuildInfo +import Distribution.Types.ExeDependency +import Distribution.Types.LegacyExeDependency +import Distribution.Types.PkgconfigDependency +import Distribution.Types.MungedPackageName +import Distribution.Types.LocalBuildInfo +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.ForeignLib +import Distribution.Types.ForeignLibType +import Distribution.Types.ForeignLibOption +import Distribution.Types.Mixin +import Distribution.Types.UnqualComponentName +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Version +import Distribution.Verbosity +import qualified Distribution.Compat.Graph as Graph +import Distribution.Compat.Stack +import Distribution.Backpack.Configure +import Distribution.Backpack.DescribeUnitId +import Distribution.Backpack.PreExistingComponent +import Distribution.Backpack.ConfiguredComponent (newPackageDepsBehaviour) +import Distribution.Backpack.Id +import Distribution.Utils.LogProgress + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS +import qualified Distribution.Simple.UHC as UHC +import qualified Distribution.Simple.HaskellSuite as HaskellSuite + +import Control.Exception + ( ErrorCall, Exception, evaluate, throw, throwIO, try ) +import Control.Monad ( forM, forM_ ) +import Distribution.Compat.Binary ( decodeOrFailIO, encode ) +import Distribution.Compat.Directory ( listDirectory ) +import Data.ByteString.Lazy ( ByteString ) +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy.Char8 as BLC8 +import Data.List + ( (\\), partition, inits, stripPrefix, intersect ) +import Data.Either + ( partitionEithers ) +import qualified Data.Map as Map +import System.Directory + ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory + , removeFile) +import System.FilePath + ( (), isAbsolute, takeDirectory ) +import Distribution.Compat.Directory + ( doesPathExist ) +import qualified System.Info + ( compilerName, compilerVersion ) +import System.IO + ( hPutStrLn, hClose ) +import Distribution.Text + ( Text(disp), defaultStyle, display, simpleParse ) +import Text.PrettyPrint + ( Doc, (<+>), ($+$), char, comma, hsep, nest + , punctuate, quotes, render, renderStyle, sep, text ) +import Distribution.Compat.Environment ( lookupEnv ) +import Distribution.Compat.Exception ( catchExit, catchIO ) + + +type UseExternalInternalDeps = Bool + +-- | The errors that can be thrown when reading the @setup-config@ file. +data ConfigStateFileError + = ConfigStateFileNoHeader -- ^ No header found. + | ConfigStateFileBadHeader -- ^ Incorrect header. + | ConfigStateFileNoParse -- ^ Cannot parse file contents. + | ConfigStateFileMissing -- ^ No file! + | ConfigStateFileBadVersion PackageIdentifier PackageIdentifier + (Either ConfigStateFileError LocalBuildInfo) -- ^ Mismatched version. + deriving (Typeable) + +-- | Format a 'ConfigStateFileError' as a user-facing error message. +dispConfigStateFileError :: ConfigStateFileError -> Doc +dispConfigStateFileError ConfigStateFileNoHeader = + text "Saved package config file header is missing." + <+> text "Re-run the 'configure' command." +dispConfigStateFileError ConfigStateFileBadHeader = + text "Saved package config file header is corrupt." + <+> text "Re-run the 'configure' command." +dispConfigStateFileError ConfigStateFileNoParse = + text "Saved package config file is corrupt." + <+> text "Re-run the 'configure' command." +dispConfigStateFileError ConfigStateFileMissing = + text "Run the 'configure' command first." +dispConfigStateFileError (ConfigStateFileBadVersion oldCabal oldCompiler _) = + text "Saved package config file is outdated:" + $+$ badCabal $+$ badCompiler + $+$ text "Re-run the 'configure' command." + where + badCabal = + text "• the Cabal version changed from" + <+> disp oldCabal <+> "to" <+> disp currentCabalId + badCompiler + | oldCompiler == currentCompilerId = mempty + | otherwise = + text "• the compiler changed from" + <+> disp oldCompiler <+> "to" <+> disp currentCompilerId + +instance Show ConfigStateFileError where + show = renderStyle defaultStyle . dispConfigStateFileError + +instance Exception ConfigStateFileError + +-- | Read the 'localBuildInfoFile'. Throw an exception if the file is +-- missing, if the file cannot be read, or if the file was created by an older +-- version of Cabal. +getConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file. + -> IO LocalBuildInfo +getConfigStateFile filename = do + exists <- doesFileExist filename + unless exists $ throwIO ConfigStateFileMissing + -- Read the config file into a strict ByteString to avoid problems with + -- lazy I/O, then convert to lazy because the binary package needs that. + contents <- BS.readFile filename + let (header, body) = BLC8.span (/='\n') (BLC8.fromChunks [contents]) + + headerParseResult <- try $ evaluate $ parseHeader header + let (cabalId, compId) = + case headerParseResult of + Left (_ :: ErrorCall) -> throw ConfigStateFileBadHeader + Right x -> x + + let getStoredValue = do + result <- decodeOrFailIO (BLC8.tail body) + case result of + Left _ -> throw ConfigStateFileNoParse + Right x -> return x + deferErrorIfBadVersion act + | cabalId /= currentCabalId = do + eResult <- try act + throw $ ConfigStateFileBadVersion cabalId compId eResult + | otherwise = act + deferErrorIfBadVersion getStoredValue + where + _ = callStack -- TODO: attach call stack to exception + +-- | Read the 'localBuildInfoFile', returning either an error or the local build +-- info. +tryGetConfigStateFile :: FilePath -- ^ The file path of the @setup-config@ file. + -> IO (Either ConfigStateFileError LocalBuildInfo) +tryGetConfigStateFile = try . getConfigStateFile + +-- | Try to read the 'localBuildInfoFile'. +tryGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. + -> IO (Either ConfigStateFileError LocalBuildInfo) +tryGetPersistBuildConfig = try . getPersistBuildConfig + +-- | Read the 'localBuildInfoFile'. Throw an exception if the file is +-- missing, if the file cannot be read, or if the file was created by an older +-- version of Cabal. +getPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. + -> IO LocalBuildInfo +getPersistBuildConfig = getConfigStateFile . localBuildInfoFile + +-- | Try to read the 'localBuildInfoFile'. +maybeGetPersistBuildConfig :: FilePath -- ^ The @dist@ directory path. + -> IO (Maybe LocalBuildInfo) +maybeGetPersistBuildConfig = + liftM (either (const Nothing) Just) . tryGetPersistBuildConfig + +-- | After running configure, output the 'LocalBuildInfo' to the +-- 'localBuildInfoFile'. +writePersistBuildConfig :: FilePath -- ^ The @dist@ directory path. + -> LocalBuildInfo -- ^ The 'LocalBuildInfo' to write. + -> NoCallStackIO () +writePersistBuildConfig distPref lbi = do + createDirectoryIfMissing False distPref + writeFileAtomic (localBuildInfoFile distPref) $ + BLC8.unlines [showHeader pkgId, encode lbi] + where + pkgId = localPackage lbi + +-- | Identifier of the current Cabal package. +currentCabalId :: PackageIdentifier +currentCabalId = PackageIdentifier (mkPackageName "Cabal") cabalVersion + +-- | Identifier of the current compiler package. +currentCompilerId :: PackageIdentifier +currentCompilerId = PackageIdentifier (mkPackageName System.Info.compilerName) + (mkVersion' System.Info.compilerVersion) + +-- | Parse the @setup-config@ file header, returning the package identifiers +-- for Cabal and the compiler. +parseHeader :: ByteString -- ^ The file contents. + -> (PackageIdentifier, PackageIdentifier) +parseHeader header = case BLC8.words header of + ["Saved", "package", "config", "for", pkgId, "written", "by", cabalId, + "using", compId] -> + fromMaybe (throw ConfigStateFileBadHeader) $ do + _ <- simpleParse (BLC8.unpack pkgId) :: Maybe PackageIdentifier + cabalId' <- simpleParse (BLC8.unpack cabalId) + compId' <- simpleParse (BLC8.unpack compId) + return (cabalId', compId') + _ -> throw ConfigStateFileNoHeader + +-- | Generate the @setup-config@ file header. +showHeader :: PackageIdentifier -- ^ The processed package. + -> ByteString +showHeader pkgId = BLC8.unwords + [ "Saved", "package", "config", "for" + , BLC8.pack $ display pkgId + , "written", "by" + , BLC8.pack $ display currentCabalId + , "using" + , BLC8.pack $ display currentCompilerId + ] + +-- | Check that localBuildInfoFile is up-to-date with respect to the +-- .cabal file. +checkPersistBuildConfigOutdated :: FilePath -> FilePath -> NoCallStackIO Bool +checkPersistBuildConfigOutdated distPref pkg_descr_file = + pkg_descr_file `moreRecentFile` localBuildInfoFile distPref + +-- | Get the path of @dist\/setup-config@. +localBuildInfoFile :: FilePath -- ^ The @dist@ directory path. + -> FilePath +localBuildInfoFile distPref = distPref "setup-config" + +-- ----------------------------------------------------------------------------- +-- * Configuration +-- ----------------------------------------------------------------------------- + +-- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken +-- from (in order of highest to lowest preference) the override prefix, the +-- \"CABAL_BUILDDIR\" environment variable, or the default prefix. +findDistPref :: FilePath -- ^ default \"dist\" prefix + -> Setup.Flag FilePath -- ^ override \"dist\" prefix + -> NoCallStackIO FilePath +findDistPref defDistPref overrideDistPref = do + envDistPref <- liftM parseEnvDistPref (lookupEnv "CABAL_BUILDDIR") + return $ fromFlagOrDefault defDistPref (mappend envDistPref overrideDistPref) + where + parseEnvDistPref env = + case env of + Just distPref | not (null distPref) -> toFlag distPref + _ -> NoFlag + +-- | Return the \"dist/\" prefix, or the default prefix. The prefix is taken +-- from (in order of highest to lowest preference) the override prefix, the +-- \"CABAL_BUILDDIR\" environment variable, or 'defaultDistPref' is used. Call +-- this function to resolve a @*DistPref@ flag whenever it is not known to be +-- set. (The @*DistPref@ flags are always set to a definite value before +-- invoking 'UserHooks'.) +findDistPrefOrDefault :: Setup.Flag FilePath -- ^ override \"dist\" prefix + -> NoCallStackIO FilePath +findDistPrefOrDefault = findDistPref defaultDistPref + +-- |Perform the \"@.\/setup configure@\" action. +-- Returns the @.setup-config@ file. +configure :: (GenericPackageDescription, HookedBuildInfo) + -> ConfigFlags -> IO LocalBuildInfo +configure (pkg_descr0, pbi) cfg = do + -- Determine the component we are configuring, if a user specified + -- one on the command line. We use a fake, flattened version of + -- the package since at this point, we're not really sure what + -- components we *can* configure. @Nothing@ means that we should + -- configure everything (the old behavior). + (mb_cname :: Maybe ComponentName) <- do + let flat_pkg_descr = flattenPackageDescription pkg_descr0 + targets <- readBuildTargets verbosity flat_pkg_descr (configArgs cfg) + -- TODO: bleat if you use the module/file syntax + let targets' = [ cname | BuildTargetComponent cname <- targets ] + case targets' of + _ | null (configArgs cfg) -> return Nothing + [cname] -> return (Just cname) + [] -> die' verbosity "No valid component targets found" + _ -> die' verbosity "Can only configure either single component or all of them" + + let use_external_internal_deps = isJust mb_cname + case mb_cname of + Nothing -> setupMessage verbosity "Configuring" (packageId pkg_descr0) + Just cname -> setupMessage' verbosity "Configuring" (packageId pkg_descr0) + cname (Just (configInstantiateWith cfg)) + + -- configCID is only valid for per-component configure + when (isJust (flagToMaybe (configCID cfg)) && isNothing mb_cname) $ + die' verbosity "--cid is only supported for per-component configure" + + checkDeprecatedFlags verbosity cfg + checkExactConfiguration verbosity pkg_descr0 cfg + + -- Where to build the package + let buildDir :: FilePath -- e.g. dist/build + -- fromFlag OK due to Distribution.Simple calling + -- findDistPrefOrDefault to fill it in + buildDir = fromFlag (configDistPref cfg) "build" + createDirectoryIfMissingVerbose (lessVerbose verbosity) True buildDir + + -- What package database(s) to use + let packageDbs :: PackageDBStack + packageDbs + = interpretPackageDbFlags + (fromFlag (configUserInstall cfg)) + (configPackageDBs cfg) + + -- comp: the compiler we're building with + -- compPlatform: the platform we're building for + -- programDb: location and args of all programs we're + -- building with + (comp :: Compiler, + compPlatform :: Platform, + programDb :: ProgramDb) + <- configCompilerEx + (flagToMaybe (configHcFlavor cfg)) + (flagToMaybe (configHcPath cfg)) + (flagToMaybe (configHcPkg cfg)) + (mkProgramDb cfg (configPrograms cfg)) + (lessVerbose verbosity) + + -- The InstalledPackageIndex of all installed packages + installedPackageSet :: InstalledPackageIndex + <- getInstalledPackages (lessVerbose verbosity) comp + packageDbs programDb + + -- The set of package names which are "shadowed" by internal + -- packages, and which component they map to + let internalPackageSet :: Map PackageName (Maybe UnqualComponentName) + internalPackageSet = getInternalPackages pkg_descr0 + + -- Make a data structure describing what components are enabled. + let enabled :: ComponentRequestedSpec + enabled = case mb_cname of + Just cname -> OneComponentRequestedSpec cname + Nothing -> ComponentRequestedSpec + -- The flag name (@--enable-tests@) is a + -- little bit of a misnomer, because + -- just passing this flag won't + -- "enable", in our internal + -- nomenclature; it's just a request; a + -- @buildable: False@ might make it + -- not possible to enable. + { testsRequested = fromFlag (configTests cfg) + , benchmarksRequested = + fromFlag (configBenchmarks cfg) } + -- Some sanity checks related to enabling components. + when (isJust mb_cname + && (fromFlag (configTests cfg) || fromFlag (configBenchmarks cfg))) $ + die' verbosity $ "--enable-tests/--enable-benchmarks are incompatible with" ++ + " explicitly specifying a component to configure." + + -- allConstraints: The set of all 'Dependency's we have. Used ONLY + -- to 'configureFinalizedPackage'. + -- requiredDepsMap: A map from 'PackageName' to the specifically + -- required 'InstalledPackageInfo', due to --dependency + -- + -- NB: These constraints are to be applied to ALL components of + -- a package. Thus, it's not an error if allConstraints contains + -- more constraints than is necessary for a component (another + -- component might need it.) + -- + -- NB: The fact that we bundle all the constraints together means + -- that is not possible to configure a test-suite to use one + -- version of a dependency, and the executable to use another. + (allConstraints :: [Dependency], + requiredDepsMap :: Map PackageName InstalledPackageInfo) + <- either (die' verbosity) return $ + combinedConstraints (configConstraints cfg) + (configDependencies cfg) + installedPackageSet + + -- pkg_descr: The resolved package description, that does not contain any + -- conditionals, because we have have an assignment for + -- every flag, either picking them ourselves using a + -- simple naive algorithm, or having them be passed to + -- us by 'configConfigurationsFlags') + -- flags: The 'FlagAssignment' that the conditionals were + -- resolved with. + -- + -- NB: Why doesn't finalizing a package also tell us what the + -- dependencies are (e.g. when we run the naive algorithm, + -- we are checking if dependencies are satisfiable)? The + -- primary reason is that we may NOT have done any solving: + -- if the flags are all chosen for us, this step is a simple + -- matter of flattening according to that assignment. It's + -- cleaner to then configure the dependencies afterwards. + (pkg_descr :: PackageDescription, + flags :: FlagAssignment) + <- configureFinalizedPackage verbosity cfg enabled + allConstraints + (dependencySatisfiable + use_external_internal_deps + (fromFlagOrDefault False (configExactConfiguration cfg)) + (packageName pkg_descr0) + installedPackageSet + internalPackageSet + requiredDepsMap) + comp + compPlatform + pkg_descr0 + + debug verbosity $ "Finalized package description:\n" + ++ showPackageDescription pkg_descr + + let cabalFileDir = maybe "." takeDirectory $ + flagToMaybe (configCabalFilePath cfg) + checkCompilerProblems verbosity comp pkg_descr enabled + checkPackageProblems verbosity cabalFileDir pkg_descr0 + (updatePackageDescription pbi pkg_descr) + + -- The list of 'InstalledPackageInfo' recording the selected + -- dependencies on external packages. + -- + -- Invariant: For any package name, there is at most one package + -- in externalPackageDeps which has that name. + -- + -- NB: The dependency selection is global over ALL components + -- in the package (similar to how allConstraints and + -- requiredDepsMap are global over all components). In particular, + -- if *any* component (post-flag resolution) has an unsatisfiable + -- dependency, we will fail. This can sometimes be undesirable + -- for users, see #1786 (benchmark conflicts with executable), + -- + -- In the presence of Backpack, these package dependencies are + -- NOT complete: they only ever include the INDEFINITE + -- dependencies. After we apply an instantiation, we'll get + -- definite references which constitute extra dependencies. + -- (Why not have cabal-install pass these in explicitly? + -- For one it's deterministic; for two, we need to associate + -- them with renamings which would require a far more complicated + -- input scheme than what we have today.) + externalPkgDeps :: [PreExistingComponent] + <- configureDependencies + verbosity + use_external_internal_deps + internalPackageSet + installedPackageSet + requiredDepsMap + pkg_descr + enabled + + -- Compute installation directory templates, based on user + -- configuration. + -- + -- TODO: Move this into a helper function. + defaultDirs :: InstallDirTemplates + <- defaultInstallDirs' use_external_internal_deps + (compilerFlavor comp) + (fromFlag (configUserInstall cfg)) + (hasLibs pkg_descr) + let installDirs :: InstallDirTemplates + installDirs = combineInstallDirs fromFlagOrDefault + defaultDirs (configInstallDirs cfg) + + -- Check languages and extensions + -- TODO: Move this into a helper function. + let langlist = nub $ catMaybes $ map defaultLanguage + (enabledBuildInfos pkg_descr enabled) + let langs = unsupportedLanguages comp langlist + when (not (null langs)) $ + die' verbosity $ "The package " ++ display (packageId pkg_descr0) + ++ " requires the following languages which are not " + ++ "supported by " ++ display (compilerId comp) ++ ": " + ++ intercalate ", " (map display langs) + let extlist = nub $ concatMap allExtensions (enabledBuildInfos pkg_descr enabled) + let exts = unsupportedExtensions comp extlist + when (not (null exts)) $ + die' verbosity $ "The package " ++ display (packageId pkg_descr0) + ++ " requires the following language extensions which are not " + ++ "supported by " ++ display (compilerId comp) ++ ": " + ++ intercalate ", " (map display exts) + + -- Check foreign library build requirements + let flibs = [flib | CFLib flib <- enabledComponents pkg_descr enabled] + let unsupportedFLibs = unsupportedForeignLibs comp compPlatform flibs + when (not (null unsupportedFLibs)) $ + die' verbosity $ "Cannot build some foreign libraries: " + ++ intercalate "," unsupportedFLibs + + -- Configure certain external build tools, see below for which ones. + let requiredBuildTools = do + bi <- enabledBuildInfos pkg_descr enabled + -- First, we collect any tool dep that we know is external. This is, + -- in practice: + -- + -- 1. `build-tools` entries on the whitelist + -- + -- 2. `build-tool-depends` that aren't from the current package. + let externBuildToolDeps = + [ LegacyExeDependency (unUnqualComponentName eName) versionRange + | buildTool@(ExeDependency _ eName versionRange) + <- getAllToolDependencies pkg_descr bi + , not $ isInternal pkg_descr buildTool ] + -- Second, we collect any build-tools entry we don't know how to + -- desugar. We'll never have any idea how to build them, so we just + -- hope they are already on the PATH. + let unknownBuildTools = + [ buildTool + | buildTool <- buildTools bi + , Nothing == desugarBuildTool pkg_descr buildTool ] + externBuildToolDeps ++ unknownBuildTools + + programDb' <- + configureAllKnownPrograms (lessVerbose verbosity) programDb + >>= configureRequiredPrograms verbosity requiredBuildTools + + (pkg_descr', programDb'') <- + configurePkgconfigPackages verbosity pkg_descr programDb' enabled + + -- Compute internal component graph + -- + -- The general idea is that we take a look at all the source level + -- components (which may build-depends on each other) and form a graph. + -- From there, we build a ComponentLocalBuildInfo for each of the + -- components, which lets us actually build each component. + -- internalPackageSet + -- use_external_internal_deps + (buildComponents :: [ComponentLocalBuildInfo], + packageDependsIndex :: InstalledPackageIndex) <- + runLogProgress verbosity $ configureComponentLocalBuildInfos + verbosity + use_external_internal_deps + enabled + (fromFlagOrDefault False (configDeterministic cfg)) + (configIPID cfg) + (configCID cfg) + pkg_descr + externalPkgDeps + (configConfigurationsFlags cfg) + (configInstantiateWith cfg) + installedPackageSet + comp + + -- Decide if we're going to compile with split sections. + split_sections :: Bool <- + if not (fromFlag $ configSplitSections cfg) + then return False + else case compilerFlavor comp of + GHC | compilerVersion comp >= mkVersion [8,0] + -> return True + GHCJS + -> return True + _ -> do warn verbosity + ("this compiler does not support " ++ + "--enable-split-sections; ignoring") + return False + + -- Decide if we're going to compile with split objects. + split_objs :: Bool <- + if not (fromFlag $ configSplitObjs cfg) + then return False + else case compilerFlavor comp of + _ | split_sections + -> do warn verbosity + ("--enable-split-sections and " ++ + "--enable-split-objs are mutually" ++ + "exclusive; ignoring the latter") + return False + GHC + -> return True + GHCJS + -> return True + _ -> do warn verbosity + ("this compiler does not support " ++ + "--enable-split-objs; ignoring") + return False + + let ghciLibByDefault = + case compilerId comp of + CompilerId GHC _ -> + -- If ghc is non-dynamic, then ghci needs object files, + -- so we build one by default. + -- + -- Technically, archive files should be sufficient for ghci, + -- but because of GHC bug #8942, it has never been safe to + -- rely on them. By the time that bug was fixed, ghci had + -- been changed to read shared libraries instead of archive + -- files (see next code block). + not (GHC.isDynamic comp) + CompilerId GHCJS _ -> + not (GHCJS.isDynamic comp) + _ -> False + + let sharedLibsByDefault + | fromFlag (configDynExe cfg) = + -- build a shared library if dynamically-linked + -- executables are requested + True + | otherwise = case compilerId comp of + CompilerId GHC _ -> + -- if ghc is dynamic, then ghci needs a shared + -- library, so we build one by default. + GHC.isDynamic comp + CompilerId GHCJS _ -> + GHCJS.isDynamic comp + _ -> False + withSharedLib_ = + -- build shared libraries if required by GHC or by the + -- executable linking mode, but allow the user to force + -- building only static library archives with + -- --disable-shared. + fromFlagOrDefault sharedLibsByDefault $ configSharedLib cfg + + withStaticLib_ = + -- build a static library (all dependent libraries rolled + -- into a huge .a archive) via GHCs -staticlib flag. + fromFlagOrDefault False $ configStaticLib cfg + + withDynExe_ = fromFlag $ configDynExe cfg + when (withDynExe_ && not withSharedLib_) $ warn verbosity $ + "Executables will use dynamic linking, but a shared library " + ++ "is not being built. Linking will fail if any executables " + ++ "depend on the library." + + setProfLBI <- configureProfiling verbosity cfg comp + + setCoverageLBI <- configureCoverage verbosity cfg comp + + let reloc = fromFlagOrDefault False $ configRelocatable cfg + + let buildComponentsMap = + foldl' (\m clbi -> Map.insertWith (++) + (componentLocalName clbi) [clbi] m) + Map.empty buildComponents + + let lbi = (setCoverageLBI . setProfLBI) + LocalBuildInfo { + configFlags = cfg, + flagAssignment = flags, + componentEnabledSpec = enabled, + extraConfigArgs = [], -- Currently configure does not + -- take extra args, but if it + -- did they would go here. + installDirTemplates = installDirs, + compiler = comp, + hostPlatform = compPlatform, + buildDir = buildDir, + cabalFilePath = flagToMaybe (configCabalFilePath cfg), + componentGraph = Graph.fromDistinctList buildComponents, + componentNameMap = buildComponentsMap, + installedPkgs = packageDependsIndex, + pkgDescrFile = Nothing, + localPkgDescr = pkg_descr', + withPrograms = programDb'', + withVanillaLib = fromFlag $ configVanillaLib cfg, + withSharedLib = withSharedLib_, + withStaticLib = withStaticLib_, + withDynExe = withDynExe_, + withProfLib = False, + withProfLibDetail = ProfDetailNone, + withProfExe = False, + withProfExeDetail = ProfDetailNone, + withOptimization = fromFlag $ configOptimization cfg, + withDebugInfo = fromFlag $ configDebugInfo cfg, + withGHCiLib = fromFlagOrDefault ghciLibByDefault $ + configGHCiLib cfg, + splitSections = split_sections, + splitObjs = split_objs, + stripExes = fromFlag $ configStripExes cfg, + stripLibs = fromFlag $ configStripLibs cfg, + exeCoverage = False, + libCoverage = False, + withPackageDB = packageDbs, + progPrefix = fromFlag $ configProgPrefix cfg, + progSuffix = fromFlag $ configProgSuffix cfg, + relocatable = reloc + } + + when reloc (checkRelocatable verbosity pkg_descr lbi) + + -- TODO: This is not entirely correct, because the dirs may vary + -- across libraries/executables + let dirs = absoluteInstallDirs pkg_descr lbi NoCopyDest + relative = prefixRelativeInstallDirs (packageId pkg_descr) lbi + + -- PKGROOT: allowing ${pkgroot} to be passed as --prefix to + -- cabal configure, is only a hidden option. It allows packages + -- to be relocatable with their package database. This however + -- breaks when the Paths_* or other includes are used that + -- contain hard coded paths. This is still an open TODO. + -- + -- Allowing ${pkgroot} here, however requires less custom hooks + -- in scripts that *really* want ${pkgroot}. See haskell/cabal/#4872 + unless (isAbsolute (prefix dirs) + || "${pkgroot}" `isPrefixOf` prefix dirs) $ die' verbosity $ + "expected an absolute directory name for --prefix: " ++ prefix dirs + + when ("${pkgroot}" `isPrefixOf` prefix dirs) $ + warn verbosity $ "Using ${pkgroot} in prefix " ++ prefix dirs + ++ " will not work if you rely on the Path_* module " + ++ " or other hard coded paths. Cabal does not yet " + ++ " support fully relocatable builds! " + ++ " See #462 #2302 #2994 #3305 #3473 #3586 #3909 #4097 #4291 #4872" + + info verbosity $ "Using " ++ display currentCabalId + ++ " compiled by " ++ display currentCompilerId + info verbosity $ "Using compiler: " ++ showCompilerId comp + info verbosity $ "Using install prefix: " ++ prefix dirs + + let dirinfo name dir isPrefixRelative = + info verbosity $ name ++ " installed in: " ++ dir ++ relNote + where relNote = case buildOS of + Windows | not (hasLibs pkg_descr) + && isNothing isPrefixRelative + -> " (fixed location)" + _ -> "" + + dirinfo "Executables" (bindir dirs) (bindir relative) + dirinfo "Libraries" (libdir dirs) (libdir relative) + dirinfo "Dynamic Libraries" (dynlibdir dirs) (dynlibdir relative) + dirinfo "Private executables" (libexecdir dirs) (libexecdir relative) + dirinfo "Data files" (datadir dirs) (datadir relative) + dirinfo "Documentation" (docdir dirs) (docdir relative) + dirinfo "Configuration files" (sysconfdir dirs) (sysconfdir relative) + + sequence_ [ reportProgram verbosity prog configuredProg + | (prog, configuredProg) <- knownPrograms programDb'' ] + + return lbi + + where + verbosity = fromFlag (configVerbosity cfg) + +mkProgramDb :: ConfigFlags -> ProgramDb -> ProgramDb +mkProgramDb cfg initialProgramDb = programDb + where + programDb = userSpecifyArgss (configProgramArgs cfg) + . userSpecifyPaths (configProgramPaths cfg) + . setProgramSearchPath searchpath + $ initialProgramDb + searchpath = getProgramSearchPath initialProgramDb + ++ map ProgramSearchPathDir + (fromNubList $ configProgramPathExtra cfg) + +-- ----------------------------------------------------------------------------- +-- Helper functions for configure + +-- | Check if the user used any deprecated flags. +checkDeprecatedFlags :: Verbosity -> ConfigFlags -> IO () +checkDeprecatedFlags verbosity cfg = do + unless (configProfExe cfg == NoFlag) $ do + let enable | fromFlag (configProfExe cfg) = "enable" + | otherwise = "disable" + warn verbosity + ("The flag --" ++ enable ++ "-executable-profiling is deprecated. " + ++ "Please use --" ++ enable ++ "-profiling instead.") + + unless (configLibCoverage cfg == NoFlag) $ do + let enable | fromFlag (configLibCoverage cfg) = "enable" + | otherwise = "disable" + warn verbosity + ("The flag --" ++ enable ++ "-library-coverage is deprecated. " + ++ "Please use --" ++ enable ++ "-coverage instead.") + +-- | Sanity check: if '--exact-configuration' was given, ensure that the +-- complete flag assignment was specified on the command line. +checkExactConfiguration :: Verbosity -> GenericPackageDescription -> ConfigFlags -> IO () +checkExactConfiguration verbosity pkg_descr0 cfg = + when (fromFlagOrDefault False (configExactConfiguration cfg)) $ do + let cmdlineFlags = map fst (unFlagAssignment (configConfigurationsFlags cfg)) + allFlags = map flagName . genPackageFlags $ pkg_descr0 + diffFlags = allFlags \\ cmdlineFlags + when (not . null $ diffFlags) $ + die' verbosity $ "'--exact-configuration' was given, " + ++ "but the following flags were not specified: " + ++ intercalate ", " (map show diffFlags) + +-- | Create a PackageIndex that makes *any libraries that might be* +-- defined internally to this package look like installed packages, in +-- case an executable should refer to any of them as dependencies. +-- +-- It must be *any libraries that might be* defined rather than the +-- actual definitions, because these depend on conditionals in the .cabal +-- file, and we haven't resolved them yet. finalizePD +-- does the resolution of conditionals, and it takes internalPackageSet +-- as part of its input. +getInternalPackages :: GenericPackageDescription + -> Map PackageName (Maybe UnqualComponentName) +getInternalPackages pkg_descr0 = + -- TODO: some day, executables will be fair game here too! + let pkg_descr = flattenPackageDescription pkg_descr0 + f lib = case libName lib of + Nothing -> (packageName pkg_descr, Nothing) + Just n' -> (unqualComponentNameToPackageName n', Just n') + in Map.fromList (map f (allLibraries pkg_descr)) + +-- | Returns true if a dependency is satisfiable. This function may +-- report a dependency satisfiable even when it is not, but not vice +-- versa. This is to be passed to finalizePD. +dependencySatisfiable + :: Bool -- ^ use external internal deps? + -> Bool -- ^ exact configuration? + -> PackageName + -> InstalledPackageIndex -- ^ installed set + -> Map PackageName (Maybe UnqualComponentName) -- ^ internal set + -> Map PackageName InstalledPackageInfo -- ^ required dependencies + -> (Dependency -> Bool) +dependencySatisfiable + use_external_internal_deps + exact_config pn installedPackageSet internalPackageSet requiredDepsMap + d@(Dependency depName vr) + + | exact_config + -- When we're given '--exact-configuration', we assume that all + -- dependencies and flags are exactly specified on the command + -- line. Thus we only consult the 'requiredDepsMap'. Note that + -- we're not doing the version range check, so if there's some + -- dependency that wasn't specified on the command line, + -- 'finalizePD' will fail. + -- TODO: mention '--exact-configuration' in the error message + -- when this fails? + = if isInternalDep && not use_external_internal_deps + -- Except for internal deps, when we're NOT per-component mode; + -- those are just True. + then True + else depName `Map.member` requiredDepsMap + + | isInternalDep + = if use_external_internal_deps + -- When we are doing per-component configure, we now need to + -- test if the internal dependency is in the index. This has + -- DIFFERENT semantics from normal dependency satisfiability. + then internalDepSatisfiable + -- If a 'PackageName' is defined by an internal component, the dep is + -- satisfiable (we're going to build it ourselves) + else True + + | otherwise + = depSatisfiable + + where + isInternalDep = Map.member depName internalPackageSet + + depSatisfiable = + not . null $ PackageIndex.lookupDependency installedPackageSet d + + internalDepSatisfiable = + not . null $ PackageIndex.lookupInternalDependency + installedPackageSet (Dependency pn vr) cn + where + cn | pn == depName + = Nothing + | otherwise + -- Reinterpret the "package name" as an unqualified component + -- name + = Just (mkUnqualComponentName (unPackageName depName)) + +-- | Finalize a generic package description. The workhorse is +-- 'finalizePD' but there's a bit of other nattering +-- about necessary. +-- +-- TODO: what exactly is the business with @flaggedTests@ and +-- @flaggedBenchmarks@? +configureFinalizedPackage + :: Verbosity + -> ConfigFlags + -> ComponentRequestedSpec + -> [Dependency] + -> (Dependency -> Bool) -- ^ tests if a dependency is satisfiable. + -- Might say it's satisfiable even when not. + -> Compiler + -> Platform + -> GenericPackageDescription + -> IO (PackageDescription, FlagAssignment) +configureFinalizedPackage verbosity cfg enabled + allConstraints satisfies comp compPlatform pkg_descr0 = do + + (pkg_descr0', flags) <- + case finalizePD + (configConfigurationsFlags cfg) + enabled + satisfies + compPlatform + (compilerInfo comp) + allConstraints + pkg_descr0 + of Right r -> return r + Left missing -> + die' verbosity $ "Encountered missing dependencies:\n" + ++ (render . nest 4 . sep . punctuate comma + . map (disp . simplifyDependency) + $ missing) + + -- add extra include/lib dirs as specified in cfg + -- we do it here so that those get checked too + let pkg_descr = addExtraIncludeLibDirs pkg_descr0' + + unless (nullFlagAssignment flags) $ + info verbosity $ "Flags chosen: " + ++ intercalate ", " [ unFlagName fn ++ "=" ++ display value + | (fn, value) <- unFlagAssignment flags ] + + return (pkg_descr, flags) + where + addExtraIncludeLibDirs pkg_descr = + let extraBi = mempty { extraLibDirs = configExtraLibDirs cfg + , extraFrameworkDirs = configExtraFrameworkDirs cfg + , PD.includeDirs = configExtraIncludeDirs cfg} + modifyLib l = l{ libBuildInfo = libBuildInfo l + `mappend` extraBi } + modifyExecutable e = e{ buildInfo = buildInfo e + `mappend` extraBi} + modifyForeignLib f = f{ foreignLibBuildInfo = foreignLibBuildInfo f + `mappend` extraBi} + modifyTestsuite t = t{ testBuildInfo = testBuildInfo t + `mappend` extraBi} + modifyBenchmark b = b{ benchmarkBuildInfo = benchmarkBuildInfo b + `mappend` extraBi} + in pkg_descr + { library = modifyLib `fmap` library pkg_descr + , subLibraries = modifyLib `map` subLibraries pkg_descr + , executables = modifyExecutable `map` executables pkg_descr + , foreignLibs = modifyForeignLib `map` foreignLibs pkg_descr + , testSuites = modifyTestsuite `map` testSuites pkg_descr + , benchmarks = modifyBenchmark `map` benchmarks pkg_descr + } + +-- | Check for use of Cabal features which require compiler support +checkCompilerProblems :: Verbosity -> Compiler -> PackageDescription -> ComponentRequestedSpec -> IO () +checkCompilerProblems verbosity comp pkg_descr enabled = do + unless (renamingPackageFlagsSupported comp || + all (all (isDefaultIncludeRenaming . mixinIncludeRenaming) . mixins) + (enabledBuildInfos pkg_descr enabled)) $ + die' verbosity $ "Your compiler does not support thinning and renaming on " + ++ "package flags. To use this feature you must use " + ++ "GHC 7.9 or later." + + when (any (not.null.PD.reexportedModules) (PD.allLibraries pkg_descr) + && not (reexportedModulesSupported comp)) $ + die' verbosity $ "Your compiler does not support module re-exports. To use " + ++ "this feature you must use GHC 7.9 or later." + + when (any (not.null.PD.signatures) (PD.allLibraries pkg_descr) + && not (backpackSupported comp)) $ + die' verbosity $ "Your compiler does not support Backpack. To use " + ++ "this feature you must use GHC 8.1 or later." + +-- | Select dependencies for the package. +configureDependencies + :: Verbosity + -> UseExternalInternalDeps + -> Map PackageName (Maybe UnqualComponentName) -- ^ internal packages + -> InstalledPackageIndex -- ^ installed packages + -> Map PackageName InstalledPackageInfo -- ^ required deps + -> PackageDescription + -> ComponentRequestedSpec + -> IO [PreExistingComponent] +configureDependencies verbosity use_external_internal_deps + internalPackageSet installedPackageSet requiredDepsMap pkg_descr enableSpec = do + let failedDeps :: [FailedDependency] + allPkgDeps :: [ResolvedDependency] + (failedDeps, allPkgDeps) = partitionEithers + [ (\s -> (dep, s)) <$> status + | dep <- enabledBuildDepends pkg_descr enableSpec + , let status = selectDependency (package pkg_descr) + internalPackageSet installedPackageSet + requiredDepsMap use_external_internal_deps dep ] + + internalPkgDeps = [ pkgid + | (_, InternalDependency pkgid) <- allPkgDeps ] + -- NB: we have to SAVE the package name, because this is the only + -- way we can be able to resolve package names in the package + -- description. + externalPkgDeps = [ pec + | (_, ExternalDependency pec) <- allPkgDeps ] + + when (not (null internalPkgDeps) + && not (newPackageDepsBehaviour pkg_descr)) $ + die' verbosity $ "The field 'build-depends: " + ++ intercalate ", " (map (display . packageName) internalPkgDeps) + ++ "' refers to a library which is defined within the same " + ++ "package. To use this feature the package must specify at " + ++ "least 'cabal-version: >= 1.8'." + + reportFailedDependencies verbosity failedDeps + reportSelectedDependencies verbosity allPkgDeps + + return externalPkgDeps + +-- | Select and apply coverage settings for the build based on the +-- 'ConfigFlags' and 'Compiler'. +configureCoverage :: Verbosity -> ConfigFlags -> Compiler + -> IO (LocalBuildInfo -> LocalBuildInfo) +configureCoverage verbosity cfg comp = do + let tryExeCoverage = fromFlagOrDefault False (configCoverage cfg) + tryLibCoverage = fromFlagOrDefault tryExeCoverage + (mappend (configCoverage cfg) (configLibCoverage cfg)) + if coverageSupported comp + then do + let apply lbi = lbi { libCoverage = tryLibCoverage + , exeCoverage = tryExeCoverage + } + return apply + else do + let apply lbi = lbi { libCoverage = False + , exeCoverage = False + } + when (tryExeCoverage || tryLibCoverage) $ warn verbosity + ("The compiler " ++ showCompilerId comp ++ " does not support " + ++ "program coverage. Program coverage has been disabled.") + return apply + +-- | Compute the effective value of the profiling flags +-- @--enable-library-profiling@ and @--enable-executable-profiling@ +-- from the specified 'ConfigFlags'. This may be useful for +-- external Cabal tools which need to interact with Setup in +-- a backwards-compatible way: the most predictable mechanism +-- for enabling profiling across many legacy versions is to +-- NOT use @--enable-profiling@ and use those two flags instead. +-- +-- Note that @--enable-executable-profiling@ also affects profiling +-- of benchmarks and (non-detailed) test suites. +computeEffectiveProfiling :: ConfigFlags -> (Bool {- lib -}, Bool {- exe -}) +computeEffectiveProfiling cfg = + -- The --profiling flag sets the default for both libs and exes, + -- but can be overidden by --library-profiling, or the old deprecated + -- --executable-profiling flag. + -- + -- The --profiling-detail and --library-profiling-detail flags behave + -- similarly + let tryExeProfiling = fromFlagOrDefault False + (mappend (configProf cfg) (configProfExe cfg)) + tryLibProfiling = fromFlagOrDefault tryExeProfiling + (mappend (configProf cfg) (configProfLib cfg)) + in (tryLibProfiling, tryExeProfiling) + +-- | Select and apply profiling settings for the build based on the +-- 'ConfigFlags' and 'Compiler'. +configureProfiling :: Verbosity -> ConfigFlags -> Compiler + -> IO (LocalBuildInfo -> LocalBuildInfo) +configureProfiling verbosity cfg comp = do + let (tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling cfg + + tryExeProfileLevel = fromFlagOrDefault ProfDetailDefault + (configProfDetail cfg) + tryLibProfileLevel = fromFlagOrDefault ProfDetailDefault + (mappend + (configProfDetail cfg) + (configProfLibDetail cfg)) + + checkProfileLevel (ProfDetailOther other) = do + warn verbosity + ("Unknown profiling detail level '" ++ other + ++ "', using default.\nThe profiling detail levels are: " + ++ intercalate ", " + [ name | (name, _, _) <- knownProfDetailLevels ]) + return ProfDetailDefault + checkProfileLevel other = return other + + (exeProfWithoutLibProf, applyProfiling) <- + if profilingSupported comp + then do + exeLevel <- checkProfileLevel tryExeProfileLevel + libLevel <- checkProfileLevel tryLibProfileLevel + let apply lbi = lbi { withProfLib = tryLibProfiling + , withProfLibDetail = libLevel + , withProfExe = tryExeProfiling + , withProfExeDetail = exeLevel + } + return (tryExeProfiling && not tryLibProfiling, apply) + else do + let apply lbi = lbi { withProfLib = False + , withProfLibDetail = ProfDetailNone + , withProfExe = False + , withProfExeDetail = ProfDetailNone + } + when (tryExeProfiling || tryLibProfiling) $ warn verbosity + ("The compiler " ++ showCompilerId comp ++ " does not support " + ++ "profiling. Profiling has been disabled.") + return (False, apply) + + when exeProfWithoutLibProf $ warn verbosity + ("Executables will be built with profiling, but library " + ++ "profiling is disabled. Linking will fail if any executables " + ++ "depend on the library.") + + return applyProfiling + +-- ----------------------------------------------------------------------------- +-- Configuring package dependencies + +reportProgram :: Verbosity -> Program -> Maybe ConfiguredProgram -> IO () +reportProgram verbosity prog Nothing + = info verbosity $ "No " ++ programName prog ++ " found" +reportProgram verbosity prog (Just configuredProg) + = info verbosity $ "Using " ++ programName prog ++ version ++ location + where location = case programLocation configuredProg of + FoundOnSystem p -> " found on system at: " ++ p + UserSpecified p -> " given by user at: " ++ p + version = case programVersion configuredProg of + Nothing -> "" + Just v -> " version " ++ display v + +hackageUrl :: String +hackageUrl = "http://hackage.haskell.org/package/" + +type ResolvedDependency = (Dependency, DependencyResolution) + +data DependencyResolution + -- | An external dependency from the package database, OR an + -- internal dependency which we are getting from the package + -- database. + = ExternalDependency PreExistingComponent + -- | An internal dependency ('PackageId' should be a library name) + -- which we are going to have to build. (The + -- 'PackageId' here is a hack to get a modest amount of + -- polymorphism out of the 'Package' typeclass.) + | InternalDependency PackageId + +data FailedDependency = DependencyNotExists PackageName + | DependencyMissingInternal PackageName PackageName + | DependencyNoVersion Dependency + +-- | Test for a package dependency and record the version we have installed. +selectDependency :: PackageId -- ^ Package id of current package + -> Map PackageName (Maybe UnqualComponentName) + -> InstalledPackageIndex -- ^ Installed packages + -> Map PackageName InstalledPackageInfo + -- ^ Packages for which we have been given specific deps to + -- use + -> UseExternalInternalDeps -- ^ Are we configuring a + -- single component? + -> Dependency + -> Either FailedDependency DependencyResolution +selectDependency pkgid internalIndex installedIndex requiredDepsMap + use_external_internal_deps + dep@(Dependency dep_pkgname vr) = + -- If the dependency specification matches anything in the internal package + -- index, then we prefer that match to anything in the second. + -- For example: + -- + -- Name: MyLibrary + -- Version: 0.1 + -- Library + -- .. + -- Executable my-exec + -- build-depends: MyLibrary + -- + -- We want "build-depends: MyLibrary" always to match the internal library + -- even if there is a newer installed library "MyLibrary-0.2". + case Map.lookup dep_pkgname internalIndex of + Just cname -> if use_external_internal_deps + then do_external (Just cname) + else do_internal + _ -> do_external Nothing + where + + -- It's an internal library, and we're not per-component build + do_internal = Right $ InternalDependency + $ PackageIdentifier dep_pkgname $ packageVersion pkgid + + -- We have to look it up externally + do_external is_internal = do + ipi <- case Map.lookup dep_pkgname requiredDepsMap of + -- If we know the exact pkg to use, then use it. + Just pkginstance -> Right pkginstance + -- Otherwise we just pick an arbitrary instance of the latest version. + Nothing -> + case is_internal of + Nothing -> do_external_external + Just mb_uqn -> do_external_internal mb_uqn + return $ ExternalDependency $ ipiToPreExistingComponent ipi + + -- It's an external package, normal situation + do_external_external = + case PackageIndex.lookupDependency installedIndex dep of + [] -> Left (DependencyNotExists dep_pkgname) + pkgs -> Right $ head $ snd $ last pkgs + + -- It's an internal library, being looked up externally + do_external_internal mb_uqn = + case PackageIndex.lookupInternalDependency installedIndex + (Dependency (packageName pkgid) vr) mb_uqn of + [] -> Left (DependencyMissingInternal dep_pkgname (packageName pkgid)) + pkgs -> Right $ head $ snd $ last pkgs + +reportSelectedDependencies :: Verbosity + -> [ResolvedDependency] -> IO () +reportSelectedDependencies verbosity deps = + info verbosity $ unlines + [ "Dependency " ++ display (simplifyDependency dep) + ++ ": using " ++ display pkgid + | (dep, resolution) <- deps + , let pkgid = case resolution of + ExternalDependency pkg' -> packageId pkg' + InternalDependency pkgid' -> pkgid' ] + +reportFailedDependencies :: Verbosity -> [FailedDependency] -> IO () +reportFailedDependencies _ [] = return () +reportFailedDependencies verbosity failed = + die' verbosity (intercalate "\n\n" (map reportFailedDependency failed)) + + where + reportFailedDependency (DependencyNotExists pkgname) = + "there is no version of " ++ display pkgname ++ " installed.\n" + ++ "Perhaps you need to download and install it from\n" + ++ hackageUrl ++ display pkgname ++ "?" + + reportFailedDependency (DependencyMissingInternal pkgname real_pkgname) = + "internal dependency " ++ display pkgname ++ " not installed.\n" + ++ "Perhaps you need to configure and install it first?\n" + ++ "(This library was defined by " ++ display real_pkgname ++ ")" + + reportFailedDependency (DependencyNoVersion dep) = + "cannot satisfy dependency " ++ display (simplifyDependency dep) ++ "\n" + +-- | List all installed packages in the given package databases. +-- Non-existent package databases do not cause errors, they just get skipped +-- with a warning and treated as empty ones, since technically they do not +-- contain any package. +getInstalledPackages :: Verbosity -> Compiler + -> PackageDBStack -- ^ The stack of package databases. + -> ProgramDb + -> IO InstalledPackageIndex +getInstalledPackages verbosity comp packageDBs progdb = do + when (null packageDBs) $ + die' verbosity $ "No package databases have been specified. If you use " + ++ "--package-db=clear, you must follow it with --package-db= " + ++ "with 'global', 'user' or a specific file." + + info verbosity "Reading installed packages..." + -- do not check empty packagedbs (ghc-pkg would error out) + packageDBs' <- filterM packageDBExists packageDBs + case compilerFlavor comp of + GHC -> GHC.getInstalledPackages verbosity comp packageDBs' progdb + GHCJS -> GHCJS.getInstalledPackages verbosity packageDBs' progdb + UHC -> UHC.getInstalledPackages verbosity comp packageDBs' progdb + HaskellSuite {} -> + HaskellSuite.getInstalledPackages verbosity packageDBs' progdb + flv -> die' verbosity $ "don't know how to find the installed packages for " + ++ display flv + where + packageDBExists (SpecificPackageDB path) = do + exists <- doesPathExist path + unless exists $ + warn verbosity $ "Package db " <> path <> " does not exist yet" + return exists + -- Checking the user and global package dbs is more complicated and needs + -- way more data. Also ghc-pkg won't error out unless the user/global + -- pkgdb is overridden with an empty one, so we just don't check for them. + packageDBExists UserPackageDB = pure True + packageDBExists GlobalPackageDB = pure True + +-- | Like 'getInstalledPackages', but for a single package DB. +-- +-- NB: Why isn't this always a fall through to 'getInstalledPackages'? +-- That is because 'getInstalledPackages' performs some sanity checks +-- on the package database stack in question. However, when sandboxes +-- are involved these sanity checks are not desirable. +getPackageDBContents :: Verbosity -> Compiler + -> PackageDB -> ProgramDb + -> IO InstalledPackageIndex +getPackageDBContents verbosity comp packageDB progdb = do + info verbosity "Reading installed packages..." + case compilerFlavor comp of + GHC -> GHC.getPackageDBContents verbosity packageDB progdb + GHCJS -> GHCJS.getPackageDBContents verbosity packageDB progdb + -- For other compilers, try to fall back on 'getInstalledPackages'. + _ -> getInstalledPackages verbosity comp [packageDB] progdb + + +-- | A set of files (or directories) that can be monitored to detect when +-- there might have been a change in the installed packages. +-- +getInstalledPackagesMonitorFiles :: Verbosity -> Compiler + -> PackageDBStack + -> ProgramDb -> Platform + -> IO [FilePath] +getInstalledPackagesMonitorFiles verbosity comp packageDBs progdb platform = + case compilerFlavor comp of + GHC -> GHC.getInstalledPackagesMonitorFiles + verbosity platform progdb packageDBs + other -> do + warn verbosity $ "don't know how to find change monitoring files for " + ++ "the installed package databases for " ++ display other + return [] + +-- | The user interface specifies the package dbs to use with a combination of +-- @--global@, @--user@ and @--package-db=global|user|clear|$file@. +-- This function combines the global/user flag and interprets the package-db +-- flag into a single package db stack. +-- +interpretPackageDbFlags :: Bool -> [Maybe PackageDB] -> PackageDBStack +interpretPackageDbFlags userInstall specificDBs = + extra initialStack specificDBs + where + initialStack | userInstall = [GlobalPackageDB, UserPackageDB] + | otherwise = [GlobalPackageDB] + + extra dbs' [] = dbs' + extra _ (Nothing:dbs) = extra [] dbs + extra dbs' (Just db:dbs) = extra (dbs' ++ [db]) dbs + +-- We are given both --constraint="foo < 2.0" style constraints and also +-- specific packages to pick via --dependency="foo=foo-2.0-177d5cdf20962d0581". +-- +-- When finalising the package we have to take into account the specific +-- installed deps we've been given, and the finalise function expects +-- constraints, so we have to translate these deps into version constraints. +-- +-- But after finalising we then have to make sure we pick the right specific +-- deps in the end. So we still need to remember which installed packages to +-- pick. +combinedConstraints :: [Dependency] -> + [(PackageName, ComponentId)] -> + InstalledPackageIndex -> + Either String ([Dependency], + Map PackageName InstalledPackageInfo) +combinedConstraints constraints dependencies installedPackages = do + + when (not (null badComponentIds)) $ + Left $ render $ text "The following package dependencies were requested" + $+$ nest 4 (dispDependencies badComponentIds) + $+$ text "however the given installed package instance does not exist." + + --TODO: we don't check that all dependencies are used! + + return (allConstraints, idConstraintMap) + + where + allConstraints :: [Dependency] + allConstraints = constraints + ++ [ thisPackageVersion (packageId pkg) + | (_, _, Just pkg) <- dependenciesPkgInfo ] + + idConstraintMap :: Map PackageName InstalledPackageInfo + idConstraintMap = Map.fromList + -- NB: do NOT use the packageName from + -- dependenciesPkgInfo! + [ (pn, pkg) + | (pn, _, Just pkg) <- dependenciesPkgInfo ] + + -- The dependencies along with the installed package info, if it exists + dependenciesPkgInfo :: [(PackageName, ComponentId, + Maybe InstalledPackageInfo)] + dependenciesPkgInfo = + [ (pkgname, cid, mpkg) + | (pkgname, cid) <- dependencies + , let mpkg = PackageIndex.lookupComponentId + installedPackages cid + ] + + -- If we looked up a package specified by an installed package id + -- (i.e. someone has written a hash) and didn't find it then it's + -- an error. + badComponentIds = + [ (pkgname, cid) + | (pkgname, cid, Nothing) <- dependenciesPkgInfo ] + + dispDependencies deps = + hsep [ text "--dependency=" + <<>> quotes (disp pkgname <<>> char '=' <<>> disp cid) + | (pkgname, cid) <- deps ] + +-- ----------------------------------------------------------------------------- +-- Configuring program dependencies + +configureRequiredPrograms :: Verbosity -> [LegacyExeDependency] -> ProgramDb + -> IO ProgramDb +configureRequiredPrograms verbosity deps progdb = + foldM (configureRequiredProgram verbosity) progdb deps + +-- | Configure a required program, ensuring that it exists in the PATH +-- (or where the user has specified the program must live) and making it +-- available for use via the 'ProgramDb' interface. If the program is +-- known (exists in the input 'ProgramDb'), we will make sure that the +-- program matches the required version; otherwise we will accept +-- any version of the program and assume that it is a simpleProgram. +configureRequiredProgram :: Verbosity -> ProgramDb -> LegacyExeDependency + -> IO ProgramDb +configureRequiredProgram verbosity progdb + (LegacyExeDependency progName verRange) = + case lookupKnownProgram progName progdb of + Nothing -> + -- Try to configure it as a 'simpleProgram' automatically + -- + -- There's a bit of a story behind this line. In old versions + -- of Cabal, there were only internal build-tools dependencies. So the + -- behavior in this case was: + -- + -- - If a build-tool dependency was internal, don't do + -- any checking. + -- + -- - If it was external, call 'configureRequiredProgram' to + -- "configure" the executable. In particular, if + -- the program was not "known" (present in 'ProgramDb'), + -- then we would just error. This was fine, because + -- the only way a program could be executed from 'ProgramDb' + -- is if some library code from Cabal actually called it, + -- and the pre-existing Cabal code only calls known + -- programs from 'defaultProgramDb', and so if it + -- is calling something else, you have a Custom setup + -- script, and in that case you are expected to register + -- the program you want to call in the ProgramDb. + -- + -- OK, so that was fine, until I (ezyang, in 2016) refactored + -- Cabal to support per-component builds. In this case, what + -- was previously an internal build-tool dependency now became + -- an external one, and now previously "internal" dependencies + -- are now external. But these are permitted to exist even + -- when they are not previously configured (something that + -- can only occur by a Custom script.) + -- + -- So, I decided, "Fine, let's just accept these in any + -- case." Thus this line. The alternative would have been to + -- somehow detect when a build-tools dependency was "internal" (by + -- looking at the unflattened package description) but this + -- would also be incompatible with future work to support + -- external executable dependencies: we definitely cannot + -- assume they will be preinitialized in the 'ProgramDb'. + configureProgram verbosity (simpleProgram progName) progdb + Just prog + -- requireProgramVersion always requires the program have a version + -- but if the user says "build-depends: foo" ie no version constraint + -- then we should not fail if we cannot discover the program version. + | verRange == anyVersion -> do + (_, progdb') <- requireProgram verbosity prog progdb + return progdb' + | otherwise -> do + (_, _, progdb') <- requireProgramVersion verbosity prog verRange progdb + return progdb' + +-- ----------------------------------------------------------------------------- +-- Configuring pkg-config package dependencies + +configurePkgconfigPackages :: Verbosity -> PackageDescription + -> ProgramDb -> ComponentRequestedSpec + -> IO (PackageDescription, ProgramDb) +configurePkgconfigPackages verbosity pkg_descr progdb enabled + | null allpkgs = return (pkg_descr, progdb) + | otherwise = do + (_, _, progdb') <- requireProgramVersion + (lessVerbose verbosity) pkgConfigProgram + (orLaterVersion $ mkVersion [0,9,0]) progdb + traverse_ requirePkg allpkgs + mlib' <- traverse addPkgConfigBILib (library pkg_descr) + libs' <- traverse addPkgConfigBILib (subLibraries pkg_descr) + exes' <- traverse addPkgConfigBIExe (executables pkg_descr) + tests' <- traverse addPkgConfigBITest (testSuites pkg_descr) + benches' <- traverse addPkgConfigBIBench (benchmarks pkg_descr) + let pkg_descr' = pkg_descr { library = mlib', + subLibraries = libs', executables = exes', + testSuites = tests', benchmarks = benches' } + return (pkg_descr', progdb') + + where + allpkgs = concatMap pkgconfigDepends (enabledBuildInfos pkg_descr enabled) + pkgconfig = getDbProgramOutput (lessVerbose verbosity) + pkgConfigProgram progdb + + requirePkg dep@(PkgconfigDependency pkgn range) = do + version <- pkgconfig ["--modversion", pkg] + `catchIO` (\_ -> die' verbosity notFound) + `catchExit` (\_ -> die' verbosity notFound) + case simpleParse version of + Nothing -> die' verbosity "parsing output of pkg-config --modversion failed" + Just v | not (withinRange v range) -> die' verbosity (badVersion v) + | otherwise -> info verbosity (depSatisfied v) + where + notFound = "The pkg-config package '" ++ pkg ++ "'" + ++ versionRequirement + ++ " is required but it could not be found." + badVersion v = "The pkg-config package '" ++ pkg ++ "'" + ++ versionRequirement + ++ " is required but the version installed on the" + ++ " system is version " ++ display v + depSatisfied v = "Dependency " ++ display dep + ++ ": using version " ++ display v + + versionRequirement + | isAnyVersion range = "" + | otherwise = " version " ++ display range + + pkg = unPkgconfigName pkgn + + -- Adds pkgconfig dependencies to the build info for a component + addPkgConfigBI compBI setCompBI comp = do + bi <- pkgconfigBuildInfo (pkgconfigDepends (compBI comp)) + return $ setCompBI comp (compBI comp `mappend` bi) + + -- Adds pkgconfig dependencies to the build info for a library + addPkgConfigBILib = addPkgConfigBI libBuildInfo $ + \lib bi -> lib { libBuildInfo = bi } + + -- Adds pkgconfig dependencies to the build info for an executable + addPkgConfigBIExe = addPkgConfigBI buildInfo $ + \exe bi -> exe { buildInfo = bi } + + -- Adds pkgconfig dependencies to the build info for a test suite + addPkgConfigBITest = addPkgConfigBI testBuildInfo $ + \test bi -> test { testBuildInfo = bi } + + -- Adds pkgconfig dependencies to the build info for a benchmark + addPkgConfigBIBench = addPkgConfigBI benchmarkBuildInfo $ + \bench bi -> bench { benchmarkBuildInfo = bi } + + pkgconfigBuildInfo :: [PkgconfigDependency] -> NoCallStackIO BuildInfo + pkgconfigBuildInfo [] = return mempty + pkgconfigBuildInfo pkgdeps = do + let pkgs = nub [ display pkg | PkgconfigDependency pkg _ <- pkgdeps ] + ccflags <- pkgconfig ("--cflags" : pkgs) + ldflags <- pkgconfig ("--libs" : pkgs) + return (ccLdOptionsBuildInfo (words ccflags) (words ldflags)) + +-- | Makes a 'BuildInfo' from C compiler and linker flags. +-- +-- This can be used with the output from configuration programs like pkg-config +-- and similar package-specific programs like mysql-config, freealut-config etc. +-- For example: +-- +-- > ccflags <- getDbProgramOutput verbosity prog progdb ["--cflags"] +-- > ldflags <- getDbProgramOutput verbosity prog progdb ["--libs"] +-- > return (ccldOptionsBuildInfo (words ccflags) (words ldflags)) +-- +ccLdOptionsBuildInfo :: [String] -> [String] -> BuildInfo +ccLdOptionsBuildInfo cflags ldflags = + let (includeDirs', cflags') = partition ("-I" `isPrefixOf`) cflags + (extraLibs', ldflags') = partition ("-l" `isPrefixOf`) ldflags + (extraLibDirs', ldflags'') = partition ("-L" `isPrefixOf`) ldflags' + in mempty { + PD.includeDirs = map (drop 2) includeDirs', + PD.extraLibs = map (drop 2) extraLibs', + PD.extraLibDirs = map (drop 2) extraLibDirs', + PD.ccOptions = cflags', + PD.ldOptions = ldflags'' + } + +-- ----------------------------------------------------------------------------- +-- Determining the compiler details + +configCompilerAuxEx :: ConfigFlags + -> IO (Compiler, Platform, ProgramDb) +configCompilerAuxEx cfg = configCompilerEx (flagToMaybe $ configHcFlavor cfg) + (flagToMaybe $ configHcPath cfg) + (flagToMaybe $ configHcPkg cfg) + programDb + (fromFlag (configVerbosity cfg)) + where + programDb = mkProgramDb cfg defaultProgramDb + +configCompilerEx :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath + -> ProgramDb -> Verbosity + -> IO (Compiler, Platform, ProgramDb) +configCompilerEx Nothing _ _ _ verbosity = die' verbosity "Unknown compiler" +configCompilerEx (Just hcFlavor) hcPath hcPkg progdb verbosity = do + (comp, maybePlatform, programDb) <- case hcFlavor of + GHC -> GHC.configure verbosity hcPath hcPkg progdb + GHCJS -> GHCJS.configure verbosity hcPath hcPkg progdb + UHC -> UHC.configure verbosity hcPath hcPkg progdb + HaskellSuite {} -> HaskellSuite.configure verbosity hcPath hcPkg progdb + _ -> die' verbosity "Unknown compiler" + return (comp, fromMaybe buildPlatform maybePlatform, programDb) + +-- Ideally we would like to not have separate configCompiler* and +-- configCompiler*Ex sets of functions, but there are many custom setup scripts +-- in the wild that are using them, so the versions with old types are kept for +-- backwards compatibility. Platform was added to the return triple in 1.18. + +{-# DEPRECATED configCompiler + "'configCompiler' is deprecated. Use 'configCompilerEx' instead." #-} +configCompiler :: Maybe CompilerFlavor -> Maybe FilePath -> Maybe FilePath + -> ProgramDb -> Verbosity + -> IO (Compiler, ProgramDb) +configCompiler mFlavor hcPath hcPkg progdb verbosity = + fmap (\(a,_,b) -> (a,b)) $ configCompilerEx mFlavor hcPath hcPkg progdb verbosity + +{-# DEPRECATED configCompilerAux + "configCompilerAux is deprecated. Use 'configCompilerAuxEx' instead." #-} +configCompilerAux :: ConfigFlags + -> IO (Compiler, ProgramDb) +configCompilerAux = fmap (\(a,_,b) -> (a,b)) . configCompilerAuxEx + +-- ----------------------------------------------------------------------------- +-- Testing C lib and header dependencies + +-- Try to build a test C program which includes every header and links every +-- lib. If that fails, try to narrow it down by preprocessing (only) and linking +-- with individual headers and libs. If none is the obvious culprit then give a +-- generic error message. +-- TODO: produce a log file from the compiler errors, if any. +checkForeignDeps :: PackageDescription -> LocalBuildInfo -> Verbosity -> IO () +checkForeignDeps pkg lbi verbosity = + ifBuildsWith allHeaders (commonCcArgs ++ makeLdArgs allLibs) -- I'm feeling + -- lucky + (return ()) + (do missingLibs <- findMissingLibs + missingHdr <- findOffendingHdr + explainErrors missingHdr missingLibs) + where + allHeaders = collectField PD.includes + allLibs = collectField PD.extraLibs + + ifBuildsWith headers args success failure = do + checkDuplicateHeaders + ok <- builds (makeProgram headers) args + if ok then success else failure + + -- Ensure that there is only one header with a given name + -- in either the generated (most likely by `configure`) + -- build directory (e.g. `dist/build`) or in the source directory. + -- + -- If it exists in both, we'll remove the one in the source + -- directory, as the generated should take precedence. + -- + -- C compilers like to prefer source local relative includes, + -- so the search paths provided to the compiler via -I are + -- ignored if the included file can be found relative to the + -- including file. As such we need to take drastic measures + -- and delete the offending file in the source directory. + checkDuplicateHeaders = do + let relIncDirs = filter (not . isAbsolute) (collectField PD.includeDirs) + isHeader = isSuffixOf ".h" + genHeaders <- forM relIncDirs $ \dir -> + fmap (dir ) . filter isHeader <$> listDirectory (buildDir lbi dir) + `catchIO` (\_ -> return []) + srcHeaders <- forM relIncDirs $ \dir -> + fmap (dir ) . filter isHeader <$> listDirectory (baseDir lbi dir) + `catchIO` (\_ -> return []) + let commonHeaders = concat genHeaders `intersect` concat srcHeaders + forM_ commonHeaders $ \hdr -> do + warn verbosity $ "Duplicate header found in " + ++ (buildDir lbi hdr) + ++ " and " + ++ (baseDir lbi hdr) + ++ "; removing " + ++ (baseDir lbi hdr) + removeFile (baseDir lbi hdr) + + findOffendingHdr = + ifBuildsWith allHeaders ccArgs + (return Nothing) + (go . tail . inits $ allHeaders) + where + go [] = return Nothing -- cannot happen + go (hdrs:hdrsInits) = + -- Try just preprocessing first + ifBuildsWith hdrs cppArgs + -- If that works, try compiling too + (ifBuildsWith hdrs ccArgs + (go hdrsInits) + (return . Just . Right . last $ hdrs)) + (return . Just . Left . last $ hdrs) + + cppArgs = "-E":commonCppArgs -- preprocess only + ccArgs = "-c":commonCcArgs -- don't try to link + + findMissingLibs = ifBuildsWith [] (makeLdArgs allLibs) + (return []) + (filterM (fmap not . libExists) allLibs) + + libExists lib = builds (makeProgram []) (makeLdArgs [lib]) + + baseDir lbi' = fromMaybe "." (takeDirectory <$> cabalFilePath lbi') + + commonCppArgs = platformDefines lbi + -- TODO: This is a massive hack, to work around the + -- fact that the test performed here should be + -- PER-component (c.f. the "I'm Feeling Lucky"; we + -- should NOT be glomming everything together.) + ++ [ "-I" ++ buildDir lbi "autogen" ] + -- `configure' may generate headers in the build directory + ++ [ "-I" ++ buildDir lbi dir | dir <- ordNub (collectField PD.includeDirs) + , not (isAbsolute dir)] + -- we might also reference headers from the packages directory. + ++ [ "-I" ++ baseDir lbi dir | dir <- ordNub (collectField PD.includeDirs) + , not (isAbsolute dir)] + ++ [ "-I" ++ dir | dir <- ordNub (collectField PD.includeDirs) + , isAbsolute dir] + ++ ["-I" ++ baseDir lbi] + ++ collectField PD.cppOptions + ++ collectField PD.ccOptions + ++ [ "-I" ++ dir + | dir <- ordNub [ dir + | dep <- deps + , dir <- Installed.includeDirs dep ] + -- dedupe include dirs of dependencies + -- to prevent quadratic blow-up + ] + ++ [ opt + | dep <- deps + , opt <- Installed.ccOptions dep ] + + commonCcArgs = commonCppArgs + ++ collectField PD.ccOptions + ++ [ opt + | dep <- deps + , opt <- Installed.ccOptions dep ] + + commonLdArgs = [ "-L" ++ dir | dir <- ordNub (collectField PD.extraLibDirs) ] + ++ collectField PD.ldOptions + ++ [ "-L" ++ dir + | dir <- ordNub [ dir + | dep <- deps + , dir <- Installed.libraryDirs dep ] + ] + --TODO: do we also need dependent packages' ld options? + makeLdArgs libs = [ "-l"++lib | lib <- libs ] ++ commonLdArgs + + makeProgram hdrs = unlines $ + [ "#include \"" ++ hdr ++ "\"" | hdr <- hdrs ] ++ + ["int main(int argc, char** argv) { return 0; }"] + + collectField f = concatMap f allBi + allBi = enabledBuildInfos pkg (componentEnabledSpec lbi) + deps = PackageIndex.topologicalOrder (installedPkgs lbi) + + builds program args = do + tempDir <- getTemporaryDirectory + withTempFile tempDir ".c" $ \cName cHnd -> + withTempFile tempDir "" $ \oNname oHnd -> do + hPutStrLn cHnd program + hClose cHnd + hClose oHnd + _ <- getDbProgramOutput verbosity + gccProgram (withPrograms lbi) (cName:"-o":oNname:args) + return True + `catchIO` (\_ -> return False) + `catchExit` (\_ -> return False) + + explainErrors Nothing [] = return () -- should be impossible! + explainErrors _ _ + | isNothing . lookupProgram gccProgram . withPrograms $ lbi + + = die' verbosity $ unlines + [ "No working gcc", + "This package depends on foreign library but we cannot " + ++ "find a working C compiler. If you have it in a " + ++ "non-standard location you can use the --with-gcc " + ++ "flag to specify it." ] + + explainErrors hdr libs = die' verbosity $ unlines $ + [ if plural + then "Missing dependencies on foreign libraries:" + else "Missing dependency on a foreign library:" + | missing ] + ++ case hdr of + Just (Left h) -> ["* Missing (or bad) header file: " ++ h ] + _ -> [] + ++ case libs of + [] -> [] + [lib] -> ["* Missing (or bad) C library: " ++ lib] + _ -> ["* Missing (or bad) C libraries: " ++ intercalate ", " libs] + ++ [if plural then messagePlural else messageSingular | missing] + ++ case hdr of + Just (Left _) -> [ headerCppMessage ] + Just (Right h) -> [ (if missing then "* " else "") + ++ "Bad header file: " ++ h + , headerCcMessage ] + _ -> [] + + where + plural = length libs >= 2 + -- Is there something missing? (as opposed to broken) + missing = not (null libs) + || case hdr of Just (Left _) -> True; _ -> False + + messageSingular = + "This problem can usually be solved by installing the system " + ++ "package that provides this library (you may need the " + ++ "\"-dev\" version). If the library is already installed " + ++ "but in a non-standard location then you can use the flags " + ++ "--extra-include-dirs= and --extra-lib-dirs= to specify " + ++ "where it is." + ++ "If the library file does exist, it may contain errors that " + ++ "are caught by the C compiler at the preprocessing stage. " + ++ "In this case you can re-run configure with the verbosity " + ++ "flag -v3 to see the error messages." + messagePlural = + "This problem can usually be solved by installing the system " + ++ "packages that provide these libraries (you may need the " + ++ "\"-dev\" versions). If the libraries are already installed " + ++ "but in a non-standard location then you can use the flags " + ++ "--extra-include-dirs= and --extra-lib-dirs= to specify " + ++ "where they are." + ++ "If the library files do exist, it may contain errors that " + ++ "are caught by the C compiler at the preprocessing stage. " + ++ "In this case you can re-run configure with the verbosity " + ++ "flag -v3 to see the error messages." + headerCppMessage = + "If the header file does exist, it may contain errors that " + ++ "are caught by the C compiler at the preprocessing stage. " + ++ "In this case you can re-run configure with the verbosity " + ++ "flag -v3 to see the error messages." + headerCcMessage = + "The header file contains a compile error. " + ++ "You can re-run configure with the verbosity flag " + ++ "-v3 to see the error messages from the C compiler." + +-- | Output package check warnings and errors. Exit if any errors. +checkPackageProblems :: Verbosity + -> FilePath + -- ^ Path to the @.cabal@ file's directory + -> GenericPackageDescription + -> PackageDescription + -> IO () +checkPackageProblems verbosity dir gpkg pkg = do + ioChecks <- checkPackageFiles verbosity pkg dir + let pureChecks = checkPackage gpkg (Just pkg) + errors = [ e | PackageBuildImpossible e <- pureChecks ++ ioChecks ] + warnings = [ w | PackageBuildWarning w <- pureChecks ++ ioChecks ] + if null errors + then traverse_ (warn verbosity) warnings + else die' verbosity (intercalate "\n\n" errors) + +-- | Preform checks if a relocatable build is allowed +checkRelocatable :: Verbosity + -> PackageDescription + -> LocalBuildInfo + -> IO () +checkRelocatable verbosity pkg lbi + = sequence_ [ checkOS + , checkCompiler + , packagePrefixRelative + , depsPrefixRelative + ] + where + -- Check if the OS support relocatable builds. + -- + -- If you add new OS' to this list, and your OS supports dynamic libraries + -- and RPATH, make sure you add your OS to RPATH-support list of: + -- Distribution.Simple.GHC.getRPaths + checkOS + = unless (os `elem` [ OSX, Linux ]) + $ die' verbosity $ "Operating system: " ++ display os ++ + ", does not support relocatable builds" + where + (Platform _ os) = hostPlatform lbi + + -- Check if the Compiler support relocatable builds + checkCompiler + = unless (compilerFlavor comp `elem` [ GHC ]) + $ die' verbosity $ "Compiler: " ++ show comp ++ + ", does not support relocatable builds" + where + comp = compiler lbi + + -- Check if all the install dirs are relative to same prefix + packagePrefixRelative + = unless (relativeInstallDirs installDirs) + $ die' verbosity $ "Installation directories are not prefix_relative:\n" ++ + show installDirs + where + -- NB: should be good enough to check this against the default + -- component ID, but if we wanted to be strictly correct we'd + -- check for each ComponentId. + installDirs = absoluteInstallDirs pkg lbi NoCopyDest + p = prefix installDirs + relativeInstallDirs (InstallDirs {..}) = + all isJust + (fmap (stripPrefix p) + [ bindir, libdir, dynlibdir, libexecdir, includedir, datadir + , docdir, mandir, htmldir, haddockdir, sysconfdir] ) + + -- Check if the library dirs of the dependencies that are in the package + -- database to which the package is installed are relative to the + -- prefix of the package + depsPrefixRelative = do + pkgr <- GHC.pkgRoot verbosity lbi (last (withPackageDB lbi)) + traverse_ (doCheck pkgr) ipkgs + where + doCheck pkgr ipkg + | maybe False (== pkgr) (Installed.pkgRoot ipkg) + = traverse_ (\l -> when (isNothing $ stripPrefix p l) (die' verbosity (msg l))) + (Installed.libraryDirs ipkg) + | otherwise + = return () + -- NB: should be good enough to check this against the default + -- component ID, but if we wanted to be strictly correct we'd + -- check for each ComponentId. + installDirs = absoluteInstallDirs pkg lbi NoCopyDest + p = prefix installDirs + ipkgs = PackageIndex.allPackages (installedPkgs lbi) + msg l = "Library directory of a dependency: " ++ show l ++ + "\nis not relative to the installation prefix:\n" ++ + show p + +-- ----------------------------------------------------------------------------- +-- Testing foreign library requirements + +unsupportedForeignLibs :: Compiler -> Platform -> [ForeignLib] -> [String] +unsupportedForeignLibs comp platform = + mapMaybe (checkForeignLibSupported comp platform) + +checkForeignLibSupported :: Compiler -> Platform -> ForeignLib -> Maybe String +checkForeignLibSupported comp platform flib = go (compilerFlavor comp) + where + go :: CompilerFlavor -> Maybe String + go GHC + | compilerVersion comp < mkVersion [7,8] = unsupported [ + "Building foreign libraires is only supported with GHC >= 7.8" + ] + | otherwise = goGhcPlatform platform + go _ = unsupported [ + "Building foreign libraries is currently only supported with ghc" + ] + + goGhcPlatform :: Platform -> Maybe String + goGhcPlatform (Platform X86_64 OSX ) = goGhcOsx (foreignLibType flib) + goGhcPlatform (Platform I386 Linux ) = goGhcLinux (foreignLibType flib) + goGhcPlatform (Platform X86_64 Linux ) = goGhcLinux (foreignLibType flib) + goGhcPlatform (Platform I386 Windows) = goGhcWindows (foreignLibType flib) + goGhcPlatform (Platform X86_64 Windows) = goGhcWindows (foreignLibType flib) + goGhcPlatform _ = unsupported [ + "Building foreign libraries is currently only supported on OSX, " + , "Linux and Windows" + ] + + goGhcOsx :: ForeignLibType -> Maybe String + goGhcOsx ForeignLibNativeShared + | not (null (foreignLibModDefFile flib)) = unsupported [ + "Module definition file not supported on OSX" + ] + | not (null (foreignLibVersionInfo flib)) = unsupported [ + "Foreign library versioning not currently supported on OSX" + ] + | otherwise = + Nothing + goGhcOsx _ = unsupported [ + "We can currently only build shared foreign libraries on OSX" + ] + + goGhcLinux :: ForeignLibType -> Maybe String + goGhcLinux ForeignLibNativeShared + | not (null (foreignLibModDefFile flib)) = unsupported [ + "Module definition file not supported on Linux" + ] + | not (null (foreignLibVersionInfo flib)) + && not (null (foreignLibVersionLinux flib)) = unsupported [ + "You must not specify both lib-version-info and lib-version-linux" + ] + | otherwise = + Nothing + goGhcLinux _ = unsupported [ + "We can currently only build shared foreign libraries on Linux" + ] + + goGhcWindows :: ForeignLibType -> Maybe String + goGhcWindows ForeignLibNativeShared + | not standalone = unsupported [ + "We can currently only build standalone libraries on Windows. Use\n" + , " if os(Windows)\n" + , " options: standalone\n" + , "in your foreign-library stanza." + ] + | not (null (foreignLibVersionInfo flib)) = unsupported [ + "Foreign library versioning not currently supported on Windows.\n" + , "You can specify module definition files in the mod-def-file field." + ] + | otherwise = + Nothing + goGhcWindows _ = unsupported [ + "We can currently only build shared foreign libraries on Windows" + ] + + standalone :: Bool + standalone = ForeignLibStandalone `elem` foreignLibOptions flib + + unsupported :: [String] -> Maybe String + unsupported = Just . concat diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Doctest.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Doctest.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Doctest.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Doctest.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,187 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Doctest +-- Copyright : Moritz Angermann 2017 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module deals with the @doctest@ command. + +-- Note: this module is modelled after Distribution.Simple.Haddock + +module Distribution.Simple.Doctest ( + doctest + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS + +-- local +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.Program.GHC +import Distribution.Simple.Program +import Distribution.Simple.PreProcess +import Distribution.Simple.Setup +import Distribution.Simple.Build +import Distribution.Simple.LocalBuildInfo hiding (substPathTemplate) +import Distribution.Simple.Register (internalPackageDBPath) +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Version +import Distribution.Verbosity + +-- ----------------------------------------------------------------------------- +-- Types + +-- | A record that represents the arguments to the doctest executable. +data DoctestArgs = DoctestArgs { + argTargets :: [FilePath] + -- ^ Modules to process + , argGhcOptions :: Flag (GhcOptions, Version) +} deriving (Show, Generic) + +-- ----------------------------------------------------------------------------- +-- Doctest support + +doctest :: PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> DoctestFlags + -> IO () +doctest pkg_descr lbi suffixes doctestFlags = do + let verbosity = flag doctestVerbosity + distPref = flag doctestDistPref + flag f = fromFlag $ f doctestFlags + tmpFileOpts = defaultTempFileOptions + lbi' = lbi { withPackageDB = withPackageDB lbi + ++ [SpecificPackageDB (internalPackageDBPath lbi distPref)] } + + (doctestProg, _version, _) <- + requireProgramVersion verbosity doctestProgram + (orLaterVersion (mkVersion [0,11,3])) (withPrograms lbi) + + withAllComponentsInBuildOrder pkg_descr lbi $ \component clbi -> do + componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity + preprocessComponent pkg_descr component lbi clbi False verbosity suffixes + + case component of + CLib lib -> do + withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ + \tmp -> do + inFiles <- map snd <$> getLibSourceFiles verbosity lbi lib clbi + args <- mkDoctestArgs verbosity tmp lbi' clbi inFiles (libBuildInfo lib) + runDoctest verbosity (compiler lbi) (hostPlatform lbi) doctestProg args + CExe exe -> do + withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ + \tmp -> do + inFiles <- map snd <$> getExeSourceFiles verbosity lbi exe clbi + args <- mkDoctestArgs verbosity tmp lbi' clbi inFiles (buildInfo exe) + runDoctest verbosity (compiler lbi) (hostPlatform lbi) doctestProg args + CFLib _ -> return () -- do not doctest foreign libs + CTest _ -> return () -- do not doctest tests + CBench _ -> return () -- do not doctest benchmarks + +-- ----------------------------------------------------------------------------- +-- Contributions to DoctestArgs (see also Haddock.hs for very similar code). + +componentGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions verbosity lbi bi clbi odir = + let f = case compilerFlavor (compiler lbi) of + GHC -> GHC.componentGhcOptions + GHCJS -> GHCJS.componentGhcOptions + _ -> error $ + "Distribution.Simple.Doctest.componentGhcOptions:" ++ + "doctest only supports GHC and GHCJS" + in f verbosity lbi bi clbi odir + +mkDoctestArgs :: Verbosity + -> FilePath + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> [FilePath] + -> BuildInfo + -> IO DoctestArgs +mkDoctestArgs verbosity tmp lbi clbi inFiles bi = do + let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) + { ghcOptOptimisation = mempty -- no optimizations when runnign doctest + -- disable -Wmissing-home-modules + , ghcOptWarnMissingHomeModules = mempty + -- clear out ghc-options: these are likely not meant for doctest. + -- If so, should be explicitly specified via doctest-ghc-options: again. + , ghcOptExtra = mempty + , ghcOptCabal = toFlag False + + , ghcOptObjDir = toFlag tmp + , ghcOptHiDir = toFlag tmp + , ghcOptStubDir = toFlag tmp } + sharedOpts = vanillaOpts + { ghcOptDynLinkMode = toFlag GhcDynamicOnly + , ghcOptFPic = toFlag True + , ghcOptHiSuffix = toFlag "dyn_hi" + , ghcOptObjSuffix = toFlag "dyn_o" + , ghcOptExtra = hcSharedOptions GHC bi} + opts <- if withVanillaLib lbi + then return vanillaOpts + else if withSharedLib lbi + then return sharedOpts + else die' verbosity $ "Must have vanilla or shared lirbaries " + ++ "enabled in order to run doctest" + ghcVersion <- maybe (die' verbosity "Compiler has no GHC version") + return + (compilerCompatVersion GHC (compiler lbi)) + return $ DoctestArgs + { argTargets = inFiles + , argGhcOptions = toFlag (opts, ghcVersion) + } + + +-- ----------------------------------------------------------------------------- +-- Call doctest with the specified arguments. +runDoctest :: Verbosity + -> Compiler + -> Platform + -> ConfiguredProgram + -> DoctestArgs + -> IO () +runDoctest verbosity comp platform doctestProg args = do + renderArgs verbosity comp platform args $ + \(flags, files) -> do + runProgram verbosity doctestProg (flags <> files) + +renderArgs :: Verbosity + -> Compiler + -> Platform + -> DoctestArgs + -> (([String],[FilePath]) -> IO a) + -> IO a +renderArgs _verbosity comp platform args k = do + k (flags, argTargets args) + where + flags :: [String] + flags = mconcat + [ pure "--no-magic" -- disable doctests automagic discovery heuristics + , pure "-fdiagnostics-color=never" -- disable ghc's color diagnostics. + , [ opt | (opts, _ghcVer) <- flagToList (argGhcOptions args) + , opt <- renderGhcOptions comp platform opts ] + ] + +-- ------------------------------------------------------------------------------ +-- Boilerplate Monoid instance. +instance Monoid DoctestArgs where + mempty = gmempty + mappend = (<>) + +instance Semigroup DoctestArgs where + (<>) = gmappend diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Flag.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Flag.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Flag.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Flag.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,124 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Flag +-- Copyright : Isaac Jones 2003-2004 +-- Duncan Coutts 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Defines the 'Flag' type and it's 'Monoid' instance, see +-- +-- for an explanation. +-- +-- Split off from "Distribution.Simple.Setup" to break import cycles. +module Distribution.Simple.Flag ( + Flag(..), + allFlags, + toFlag, + fromFlag, + fromFlagOrDefault, + flagToMaybe, + flagToList, + maybeToFlag, + BooleanFlag(..) ) where + +import Prelude () +import Distribution.Compat.Prelude hiding (get) +import Distribution.Compat.Stack + +-- ------------------------------------------------------------ +-- * Flag type +-- ------------------------------------------------------------ + +-- | All flags are monoids, they come in two flavours: +-- +-- 1. list flags eg +-- +-- > --ghc-option=foo --ghc-option=bar +-- +-- gives us all the values ["foo", "bar"] +-- +-- 2. singular value flags, eg: +-- +-- > --enable-foo --disable-foo +-- +-- gives us Just False +-- So this Flag type is for the latter singular kind of flag. +-- Its monoid instance gives us the behaviour where it starts out as +-- 'NoFlag' and later flags override earlier ones. +-- +data Flag a = Flag a | NoFlag deriving (Eq, Generic, Show, Read) + +instance Binary a => Binary (Flag a) + +instance Functor Flag where + fmap f (Flag x) = Flag (f x) + fmap _ NoFlag = NoFlag + +instance Applicative Flag where + (Flag x) <*> y = x <$> y + NoFlag <*> _ = NoFlag + pure = Flag + +instance Monoid (Flag a) where + mempty = NoFlag + mappend = (<>) + +instance Semigroup (Flag a) where + _ <> f@(Flag _) = f + f <> NoFlag = f + +instance Bounded a => Bounded (Flag a) where + minBound = toFlag minBound + maxBound = toFlag maxBound + +instance Enum a => Enum (Flag a) where + fromEnum = fromEnum . fromFlag + toEnum = toFlag . toEnum + enumFrom (Flag a) = map toFlag . enumFrom $ a + enumFrom _ = [] + enumFromThen (Flag a) (Flag b) = toFlag `map` enumFromThen a b + enumFromThen _ _ = [] + enumFromTo (Flag a) (Flag b) = toFlag `map` enumFromTo a b + enumFromTo _ _ = [] + enumFromThenTo (Flag a) (Flag b) (Flag c) = toFlag `map` enumFromThenTo a b c + enumFromThenTo _ _ _ = [] + +toFlag :: a -> Flag a +toFlag = Flag + +fromFlag :: WithCallStack (Flag a -> a) +fromFlag (Flag x) = x +fromFlag NoFlag = error "fromFlag NoFlag. Use fromFlagOrDefault" + +fromFlagOrDefault :: a -> Flag a -> a +fromFlagOrDefault _ (Flag x) = x +fromFlagOrDefault def NoFlag = def + +flagToMaybe :: Flag a -> Maybe a +flagToMaybe (Flag x) = Just x +flagToMaybe NoFlag = Nothing + +flagToList :: Flag a -> [a] +flagToList (Flag x) = [x] +flagToList NoFlag = [] + +allFlags :: [Flag Bool] -> Flag Bool +allFlags flags = if all (\f -> fromFlagOrDefault False f) flags + then Flag True + else NoFlag + +maybeToFlag :: Maybe a -> Flag a +maybeToFlag Nothing = NoFlag +maybeToFlag (Just x) = Flag x + +-- | Types that represent boolean flags. +class BooleanFlag a where + asBool :: a -> Bool + +instance BooleanFlag Bool where + asBool = id diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHC/EnvironmentParser.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHC/EnvironmentParser.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHC/EnvironmentParser.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHC/EnvironmentParser.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,51 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +module Distribution.Simple.GHC.EnvironmentParser + ( parseGhcEnvironmentFile, readGhcEnvironmentFile, ParseErrorExc(..) ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Compiler + ( PackageDB(..) ) +import Distribution.Simple.GHC.Internal + ( GhcEnvironmentFileEntry(..) ) +import Distribution.Types.UnitId + ( mkUnitId ) + +import Control.Exception + ( Exception, throwIO ) +import qualified Text.Parsec as P +import Text.Parsec.String + ( Parser, parseFromFile ) + +parseEnvironmentFileLine :: Parser GhcEnvironmentFileEntry +parseEnvironmentFileLine = GhcEnvFileComment <$> comment + <|> GhcEnvFilePackageId <$> unitId + <|> GhcEnvFilePackageDb <$> packageDb + <|> pure GhcEnvFileClearPackageDbStack <* clearDb + where + comment = P.string "--" *> P.many (P.noneOf "\r\n") + unitId = P.try $ P.string "package-id" *> P.spaces *> + (mkUnitId <$> P.many1 (P.satisfy $ \c -> isAlphaNum c || c `elem` "-_.+")) + packageDb = (P.string "global-package-db" *> pure GlobalPackageDB) + <|> (P.string "user-package-db" *> pure UserPackageDB) + <|> (P.string "package-db" *> P.spaces *> (SpecificPackageDB <$> P.many1 (P.noneOf "\r\n") <* P.lookAhead P.endOfLine)) + clearDb = P.string "clear-package-db" + +newtype ParseErrorExc = ParseErrorExc P.ParseError + deriving (Show, Typeable) + +instance Exception ParseErrorExc + +parseGhcEnvironmentFile :: Parser [GhcEnvironmentFileEntry] +parseGhcEnvironmentFile = parseEnvironmentFileLine `P.sepEndBy` P.endOfLine <* P.eof + +readGhcEnvironmentFile :: FilePath -> IO [GhcEnvironmentFileEntry] +readGhcEnvironmentFile path = + either (throwIO . ParseErrorExc) return =<< + parseFromFile parseGhcEnvironmentFile path diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHC/ImplInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHC/ImplInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHC/ImplInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHC/ImplInfo.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,93 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.GHC.ImplInfo +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains the data structure describing invocation +-- details for a GHC or GHC-derived compiler, such as supported flags +-- and workarounds for bugs. + +module Distribution.Simple.GHC.ImplInfo ( + GhcImplInfo(..), getImplInfo, + ghcVersionImplInfo, ghcjsVersionImplInfo + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Compiler +import Distribution.Version + +{- | + Information about features and quirks of a GHC-based implementation. + + Compiler flavors based on GHC behave similarly enough that some of + the support code for them is shared. Every implementation has its + own peculiarities, that may or may not be a direct result of the + underlying GHC version. This record keeps track of these differences. + + All shared code (i.e. everything not in the Distribution.Simple.FLAVOR + module) should use implementation info rather than version numbers + to test for supported features. +-} + +data GhcImplInfo = GhcImplInfo + { supportsHaskell2010 :: Bool -- ^ -XHaskell2010 and -XHaskell98 flags + , reportsNoExt :: Bool -- ^ --supported-languages gives Ext and NoExt + , alwaysNondecIndent :: Bool -- ^ NondecreasingIndentation is always on + , flagGhciScript :: Bool -- ^ -ghci-script flag supported + , flagProfAuto :: Bool -- ^ new style -fprof-auto* flags + , flagPackageConf :: Bool -- ^ use package-conf instead of package-db + , flagDebugInfo :: Bool -- ^ -g flag supported + , supportsDebugLevels :: Bool -- ^ supports numeric @-g@ levels + , supportsPkgEnvFiles :: Bool -- ^ picks up @.ghc.environment@ files + , flagWarnMissingHomeModules :: Bool -- ^ -Wmissing-home-modules is supported + } + +getImplInfo :: Compiler -> GhcImplInfo +getImplInfo comp = + case compilerFlavor comp of + GHC -> ghcVersionImplInfo (compilerVersion comp) + GHCJS -> case compilerCompatVersion GHC comp of + Just ghcVer -> ghcjsVersionImplInfo (compilerVersion comp) ghcVer + _ -> error ("Distribution.Simple.GHC.Props.getImplProps: " ++ + "could not find GHC version for GHCJS compiler") + x -> error ("Distribution.Simple.GHC.Props.getImplProps only works" ++ + "for GHC-like compilers (GHC, GHCJS)" ++ + ", but found " ++ show x) + +ghcVersionImplInfo :: Version -> GhcImplInfo +ghcVersionImplInfo ver = GhcImplInfo + { supportsHaskell2010 = v >= [7] + , reportsNoExt = v >= [7] + , alwaysNondecIndent = v < [7,1] + , flagGhciScript = v >= [7,2] + , flagProfAuto = v >= [7,4] + , flagPackageConf = v < [7,5] + , flagDebugInfo = v >= [7,10] + , supportsDebugLevels = v >= [8,0] + , supportsPkgEnvFiles = v >= [8,0,1,20160901] -- broken in 8.0.1, fixed in 8.0.2 + , flagWarnMissingHomeModules = v >= [8,2] + } + where + v = versionNumbers ver + +ghcjsVersionImplInfo :: Version -- ^ The GHCJS version + -> Version -- ^ The GHC version + -> GhcImplInfo +ghcjsVersionImplInfo _ghcjsver ghcver = GhcImplInfo + { supportsHaskell2010 = True + , reportsNoExt = True + , alwaysNondecIndent = False + , flagGhciScript = True + , flagProfAuto = True + , flagPackageConf = False + , flagDebugInfo = False + , supportsDebugLevels = ghcv >= [8,0] + , supportsPkgEnvFiles = ghcv >= [8,0,2] --TODO: check this works in ghcjs + , flagWarnMissingHomeModules = ghcv >= [8,2] + } + where + ghcv = versionNumbers ghcver diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHC/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHC/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHC/Internal.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHC/Internal.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,613 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.GHC.Internal +-- Copyright : Isaac Jones 2003-2007 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains functions shared by GHC (Distribution.Simple.GHC) +-- and GHC-derived compilers. + +module Distribution.Simple.GHC.Internal ( + configureToolchain, + getLanguages, + getExtensions, + targetPlatform, + getGhcInfo, + componentCcGhcOptions, + componentCxxGhcOptions, + componentGhcOptions, + mkGHCiLibName, + mkGHCiProfLibName, + filterGhciFlags, + ghcLookupProperty, + getHaskellObjects, + mkGhcOptPackages, + substTopDir, + checkPackageDbEnvVar, + profDetailLevelFlag, + -- * GHC platform and version strings + ghcArchString, + ghcOsString, + ghcPlatformAndVersionString, + -- * Constructing GHC environment files + GhcEnvironmentFileEntry(..), + writeGhcEnvironmentFile, + simpleGhcEnvironmentFile, + ghcEnvironmentFileName, + renderGhcEnvironmentFile, + renderGhcEnvironmentFileEntry, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.GHC.ImplInfo +import Distribution.Types.ComponentLocalBuildInfo +import Distribution.Backpack +import Distribution.InstalledPackageInfo +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Compat.Exception +import Distribution.Lex +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.Program.GHC +import Distribution.Simple.Setup +import qualified Distribution.ModuleName as ModuleName +import Distribution.Simple.Program +import Distribution.Simple.LocalBuildInfo +import Distribution.Types.UnitId +import Distribution.Types.LocalBuildInfo +import Distribution.Types.TargetInfo +import Distribution.Simple.Utils +import Distribution.Simple.BuildPaths +import Distribution.System +import Distribution.Text ( display, simpleParse ) +import Distribution.Utils.NubList ( toNubListR ) +import Distribution.Verbosity +import Distribution.Compat.Stack +import Distribution.Version (Version) +import Language.Haskell.Extension + +import qualified Data.Map as Map +import qualified Data.ByteString.Lazy.Char8 as BS +import System.Directory ( getDirectoryContents, getTemporaryDirectory ) +import System.Environment ( getEnv ) +import System.FilePath ( (), (<.>), takeExtension + , takeDirectory, takeFileName) +import System.IO ( hClose, hPutStrLn ) + +targetPlatform :: [(String, String)] -> Maybe Platform +targetPlatform ghcInfo = platformFromTriple =<< lookup "Target platform" ghcInfo + +-- | Adjust the way we find and configure gcc and ld +-- +configureToolchain :: GhcImplInfo + -> ConfiguredProgram + -> Map String String + -> ProgramDb + -> ProgramDb +configureToolchain _implInfo ghcProg ghcInfo = + addKnownProgram gccProgram { + programFindLocation = findProg gccProgramName extraGccPath, + programPostConf = configureGcc + } + . addKnownProgram ldProgram { + programFindLocation = findProg ldProgramName extraLdPath, + programPostConf = configureLd + } + . addKnownProgram arProgram { + programFindLocation = findProg arProgramName extraArPath + } + . addKnownProgram stripProgram { + programFindLocation = findProg stripProgramName extraStripPath + } + where + compilerDir = takeDirectory (programPath ghcProg) + base_dir = takeDirectory compilerDir + mingwBinDir = base_dir "mingw" "bin" + isWindows = case buildOS of Windows -> True; _ -> False + binPrefix = "" + + maybeName :: Program -> Maybe FilePath -> String + maybeName prog = maybe (programName prog) (dropExeExtension . takeFileName) + + gccProgramName = maybeName gccProgram mbGccLocation + ldProgramName = maybeName ldProgram mbLdLocation + arProgramName = maybeName arProgram mbArLocation + stripProgramName = maybeName stripProgram mbStripLocation + + mkExtraPath :: Maybe FilePath -> FilePath -> [FilePath] + mkExtraPath mbPath mingwPath | isWindows = mbDir ++ [mingwPath] + | otherwise = mbDir + where + mbDir = maybeToList . fmap takeDirectory $ mbPath + + extraGccPath = mkExtraPath mbGccLocation windowsExtraGccDir + extraLdPath = mkExtraPath mbLdLocation windowsExtraLdDir + extraArPath = mkExtraPath mbArLocation windowsExtraArDir + extraStripPath = mkExtraPath mbStripLocation windowsExtraStripDir + + -- on Windows finding and configuring ghc's gcc & binutils is a bit special + (windowsExtraGccDir, windowsExtraLdDir, + windowsExtraArDir, windowsExtraStripDir) = + let b = mingwBinDir binPrefix + in (b, b, b, b) + + findProg :: String -> [FilePath] + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) + findProg progName extraPath v searchpath = + findProgramOnSearchPath v searchpath' progName + where + searchpath' = (map ProgramSearchPathDir extraPath) ++ searchpath + + -- Read tool locations from the 'ghc --info' output. Useful when + -- cross-compiling. + mbGccLocation = Map.lookup "C compiler command" ghcInfo + mbLdLocation = Map.lookup "ld command" ghcInfo + mbArLocation = Map.lookup "ar command" ghcInfo + mbStripLocation = Map.lookup "strip command" ghcInfo + + ccFlags = getFlags "C compiler flags" + -- GHC 7.8 renamed "Gcc Linker flags" to "C compiler link flags" + -- and "Ld Linker flags" to "ld flags" (GHC #4862). + gccLinkerFlags = getFlags "Gcc Linker flags" ++ getFlags "C compiler link flags" + ldLinkerFlags = getFlags "Ld Linker flags" ++ getFlags "ld flags" + + -- It appears that GHC 7.6 and earlier encode the tokenized flags as a + -- [String] in these settings whereas later versions just encode the flags as + -- String. + -- + -- We first try to parse as a [String] and if this fails then tokenize the + -- flags ourself. + getFlags :: String -> [String] + getFlags key = + case Map.lookup key ghcInfo of + Nothing -> [] + Just flags + | (flags', ""):_ <- reads flags -> flags' + | otherwise -> tokenizeQuotedWords flags + + configureGcc :: Verbosity -> ConfiguredProgram -> NoCallStackIO ConfiguredProgram + configureGcc _v gccProg = do + return gccProg { + programDefaultArgs = programDefaultArgs gccProg + ++ ccFlags ++ gccLinkerFlags + } + + configureLd :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + configureLd v ldProg = do + ldProg' <- configureLd' v ldProg + return ldProg' { + programDefaultArgs = programDefaultArgs ldProg' ++ ldLinkerFlags + } + + -- we need to find out if ld supports the -x flag + configureLd' :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram + configureLd' verbosity ldProg = do + tempDir <- getTemporaryDirectory + ldx <- withTempFile tempDir ".c" $ \testcfile testchnd -> + withTempFile tempDir ".o" $ \testofile testohnd -> do + hPutStrLn testchnd "int foo() { return 0; }" + hClose testchnd; hClose testohnd + runProgram verbosity ghcProg + [ "-hide-all-packages" + , "-c", testcfile + , "-o", testofile + ] + withTempFile tempDir ".o" $ \testofile' testohnd' -> + do + hClose testohnd' + _ <- getProgramOutput verbosity ldProg + ["-x", "-r", testofile, "-o", testofile'] + return True + `catchIO` (\_ -> return False) + `catchExit` (\_ -> return False) + if ldx + then return ldProg { programDefaultArgs = ["-x"] } + else return ldProg + +getLanguages :: Verbosity -> GhcImplInfo -> ConfiguredProgram + -> NoCallStackIO [(Language, String)] +getLanguages _ implInfo _ + -- TODO: should be using --supported-languages rather than hard coding + | supportsHaskell2010 implInfo = return [(Haskell98, "-XHaskell98") + ,(Haskell2010, "-XHaskell2010")] + | otherwise = return [(Haskell98, "")] + +getGhcInfo :: Verbosity -> GhcImplInfo -> ConfiguredProgram + -> IO [(String, String)] +getGhcInfo verbosity _implInfo ghcProg = do + xs <- getProgramOutput verbosity (suppressOverrideArgs ghcProg) + ["--info"] + case reads xs of + [(i, ss)] + | all isSpace ss -> + return i + _ -> + die' verbosity "Can't parse --info output of GHC" + +getExtensions :: Verbosity -> GhcImplInfo -> ConfiguredProgram + -> IO [(Extension, Maybe String)] +getExtensions verbosity implInfo ghcProg = do + str <- getProgramOutput verbosity (suppressOverrideArgs ghcProg) + ["--supported-languages"] + let extStrs = if reportsNoExt implInfo + then lines str + else -- Older GHCs only gave us either Foo or NoFoo, + -- so we have to work out the other one ourselves + [ extStr'' + | extStr <- lines str + , let extStr' = case extStr of + 'N' : 'o' : xs -> xs + _ -> "No" ++ extStr + , extStr'' <- [extStr, extStr'] + ] + let extensions0 = [ (ext, Just $ "-X" ++ display ext) + | Just ext <- map simpleParse extStrs ] + extensions1 = if alwaysNondecIndent implInfo + then -- ghc-7.2 split NondecreasingIndentation off + -- into a proper extension. Before that it + -- was always on. + -- Since it was not a proper extension, it could + -- not be turned off, hence we omit a + -- DisableExtension entry here. + (EnableExtension NondecreasingIndentation, Nothing) : + extensions0 + else extensions0 + return extensions1 + +componentCcGhcOptions :: Verbosity -> GhcImplInfo -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> FilePath + -> GhcOptions +componentCcGhcOptions verbosity _implInfo lbi bi clbi odir filename = + mempty { + -- Respect -v0, but don't crank up verbosity on GHC if + -- Cabal verbosity is requested. For that, use --ghc-option=-v instead! + ghcOptVerbosity = toFlag (min verbosity normal), + ghcOptMode = toFlag GhcModeCompile, + ghcOptInputFiles = toNubListR [filename], + + ghcOptCppIncludePath = toNubListR $ [autogenComponentModulesDir lbi clbi + ,autogenPackageModulesDir lbi + ,odir] + -- includes relative to the package + ++ PD.includeDirs bi + -- potential includes generated by `configure' + -- in the build directory + ++ [buildDir lbi dir | dir <- PD.includeDirs bi], + ghcOptHideAllPackages= toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, + ghcOptCcOptions = (case withOptimization lbi of + NoOptimisation -> [] + _ -> ["-O2"]) ++ + (case withDebugInfo lbi of + NoDebugInfo -> [] + MinimalDebugInfo -> ["-g1"] + NormalDebugInfo -> ["-g"] + MaximalDebugInfo -> ["-g3"]) ++ + PD.ccOptions bi, + ghcOptObjDir = toFlag odir + } + + +componentCxxGhcOptions :: Verbosity -> GhcImplInfo -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> FilePath + -> GhcOptions +componentCxxGhcOptions verbosity _implInfo lbi bi clbi odir filename = + mempty { + -- Respect -v0, but don't crank up verbosity on GHC if + -- Cabal verbosity is requested. For that, use --ghc-option=-v instead! + ghcOptVerbosity = toFlag (min verbosity normal), + ghcOptMode = toFlag GhcModeCompile, + ghcOptInputFiles = toNubListR [filename], + + ghcOptCppIncludePath = toNubListR $ [autogenComponentModulesDir lbi clbi + ,autogenPackageModulesDir lbi + ,odir] + -- includes relative to the package + ++ PD.includeDirs bi + -- potential includes generated by `configure' + -- in the build directory + ++ [buildDir lbi dir | dir <- PD.includeDirs bi], + ghcOptHideAllPackages= toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, + ghcOptCxxOptions = (case withOptimization lbi of + NoOptimisation -> [] + _ -> ["-O2"]) ++ + (case withDebugInfo lbi of + NoDebugInfo -> [] + MinimalDebugInfo -> ["-g1"] + NormalDebugInfo -> ["-g"] + MaximalDebugInfo -> ["-g3"]) ++ + PD.cxxOptions bi, + ghcOptObjDir = toFlag odir + } + + +componentGhcOptions :: Verbosity -> GhcImplInfo -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions verbosity implInfo lbi bi clbi odir = + mempty { + -- Respect -v0, but don't crank up verbosity on GHC if + -- Cabal verbosity is requested. For that, use --ghc-option=-v instead! + ghcOptVerbosity = toFlag (min verbosity normal), + ghcOptCabal = toFlag True, + ghcOptThisUnitId = case clbi of + LibComponentLocalBuildInfo { componentCompatPackageKey = pk } + -> toFlag pk + _ -> mempty, + ghcOptThisComponentId = case clbi of + LibComponentLocalBuildInfo { componentComponentId = cid + , componentInstantiatedWith = insts } -> + if null insts + then mempty + else toFlag cid + _ -> mempty, + ghcOptInstantiatedWith = case clbi of + LibComponentLocalBuildInfo { componentInstantiatedWith = insts } + -> insts + _ -> [], + ghcOptNoCode = toFlag $ componentIsIndefinite clbi, + ghcOptHideAllPackages = toFlag True, + ghcOptWarnMissingHomeModules = toFlag $ flagWarnMissingHomeModules implInfo, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptPackages = toNubListR $ mkGhcOptPackages clbi, + ghcOptSplitSections = toFlag (splitSections lbi), + ghcOptSplitObjs = toFlag (splitObjs lbi), + ghcOptSourcePathClear = toFlag True, + ghcOptSourcePath = toNubListR $ [odir] ++ (hsSourceDirs bi) + ++ [autogenComponentModulesDir lbi clbi] + ++ [autogenPackageModulesDir lbi], + ghcOptCppIncludePath = toNubListR $ [autogenComponentModulesDir lbi clbi + ,autogenPackageModulesDir lbi + ,odir] + -- includes relative to the package + ++ PD.includeDirs bi + -- potential includes generated by `configure' + -- in the build directory + ++ [buildDir lbi dir | dir <- PD.includeDirs bi], + ghcOptCppOptions = cppOptions bi, + ghcOptCppIncludes = toNubListR $ + [autogenComponentModulesDir lbi clbi cppHeaderName], + ghcOptFfiIncludes = toNubListR $ PD.includes bi, + ghcOptObjDir = toFlag odir, + ghcOptHiDir = toFlag odir, + ghcOptStubDir = toFlag odir, + ghcOptOutputDir = toFlag odir, + ghcOptOptimisation = toGhcOptimisation (withOptimization lbi), + ghcOptDebugInfo = toFlag (withDebugInfo lbi), + ghcOptExtra = hcOptions GHC bi, + ghcOptExtraPath = toNubListR $ exe_paths, + ghcOptLanguage = toFlag (fromMaybe Haskell98 (defaultLanguage bi)), + -- Unsupported extensions have already been checked by configure + ghcOptExtensions = toNubListR $ usedExtensions bi, + ghcOptExtensionMap = Map.fromList . compilerExtensions $ (compiler lbi) + } + where + toGhcOptimisation NoOptimisation = mempty --TODO perhaps override? + toGhcOptimisation NormalOptimisation = toFlag GhcNormalOptimisation + toGhcOptimisation MaximumOptimisation = toFlag GhcMaximumOptimisation + + exe_paths = [ componentBuildDir lbi (targetCLBI exe_tgt) + | uid <- componentExeDeps clbi + -- TODO: Ugh, localPkgDescr + , Just exe_tgt <- [unitIdTarget' (localPkgDescr lbi) lbi uid] ] + +-- | Strip out flags that are not supported in ghci +filterGhciFlags :: [String] -> [String] +filterGhciFlags = filter supported + where + supported ('-':'O':_) = False + supported "-debug" = False + supported "-threaded" = False + supported "-ticky" = False + supported "-eventlog" = False + supported "-prof" = False + supported "-unreg" = False + supported _ = True + +mkGHCiLibName :: UnitId -> String +mkGHCiLibName lib = getHSLibraryName lib <.> "o" + +mkGHCiProfLibName :: UnitId -> String +mkGHCiProfLibName lib = getHSLibraryName lib <.> "p_o" + +ghcLookupProperty :: String -> Compiler -> Bool +ghcLookupProperty prop comp = + case Map.lookup prop (compilerProperties comp) of + Just "YES" -> True + _ -> False + +-- when using -split-objs, we need to search for object files in the +-- Module_split directory for each module. +getHaskellObjects :: GhcImplInfo -> Library -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> FilePath -> String -> Bool -> NoCallStackIO [FilePath] +getHaskellObjects _implInfo lib lbi clbi pref wanted_obj_ext allow_split_objs + | splitObjs lbi && allow_split_objs = do + let splitSuffix = "_" ++ wanted_obj_ext ++ "_split" + dirs = [ pref (ModuleName.toFilePath x ++ splitSuffix) + | x <- allLibModules lib clbi ] + objss <- traverse getDirectoryContents dirs + let objs = [ dir obj + | (objs',dir) <- zip objss dirs, obj <- objs', + let obj_ext = takeExtension obj, + '.':wanted_obj_ext == obj_ext ] + return objs + | otherwise = + return [ pref ModuleName.toFilePath x <.> wanted_obj_ext + | x <- allLibModules lib clbi ] + +mkGhcOptPackages :: ComponentLocalBuildInfo + -> [(OpenUnitId, ModuleRenaming)] +mkGhcOptPackages = componentIncludes + +substTopDir :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo +substTopDir topDir ipo + = ipo { + InstalledPackageInfo.importDirs + = map f (InstalledPackageInfo.importDirs ipo), + InstalledPackageInfo.libraryDirs + = map f (InstalledPackageInfo.libraryDirs ipo), + InstalledPackageInfo.includeDirs + = map f (InstalledPackageInfo.includeDirs ipo), + InstalledPackageInfo.frameworkDirs + = map f (InstalledPackageInfo.frameworkDirs ipo), + InstalledPackageInfo.haddockInterfaces + = map f (InstalledPackageInfo.haddockInterfaces ipo), + InstalledPackageInfo.haddockHTMLs + = map f (InstalledPackageInfo.haddockHTMLs ipo) + } + where f ('$':'t':'o':'p':'d':'i':'r':rest) = topDir ++ rest + f x = x + +-- Cabal does not use the environment variable GHC{,JS}_PACKAGE_PATH; let +-- users know that this is the case. See ticket #335. Simply ignoring it is +-- not a good idea, since then ghc and cabal are looking at different sets +-- of package DBs and chaos is likely to ensue. +-- +-- An exception to this is when running cabal from within a `cabal exec` +-- environment. In this case, `cabal exec` will set the +-- CABAL_SANDBOX_PACKAGE_PATH to the same value that it set +-- GHC{,JS}_PACKAGE_PATH to. If that is the case it is OK to allow +-- GHC{,JS}_PACKAGE_PATH. +checkPackageDbEnvVar :: Verbosity -> String -> String -> IO () +checkPackageDbEnvVar verbosity compilerName packagePathEnvVar = do + mPP <- lookupEnv packagePathEnvVar + when (isJust mPP) $ do + mcsPP <- lookupEnv "CABAL_SANDBOX_PACKAGE_PATH" + unless (mPP == mcsPP) abort + where + lookupEnv :: String -> NoCallStackIO (Maybe String) + lookupEnv name = (Just `fmap` getEnv name) + `catchIO` const (return Nothing) + abort = + die' verbosity $ "Use of " ++ compilerName ++ "'s environment variable " + ++ packagePathEnvVar ++ " is incompatible with Cabal. Use the " + ++ "flag --package-db to specify a package database (it can be " + ++ "used multiple times)." + + _ = callStack -- TODO: output stack when erroring + +profDetailLevelFlag :: Bool -> ProfDetailLevel -> Flag GhcProfAuto +profDetailLevelFlag forLib mpl = + case mpl of + ProfDetailNone -> mempty + ProfDetailDefault | forLib -> toFlag GhcProfAutoExported + | otherwise -> toFlag GhcProfAutoToplevel + ProfDetailExportedFunctions -> toFlag GhcProfAutoExported + ProfDetailToplevelFunctions -> toFlag GhcProfAutoToplevel + ProfDetailAllFunctions -> toFlag GhcProfAutoAll + ProfDetailOther _ -> mempty + + +-- ----------------------------------------------------------------------------- +-- GHC platform and version strings + +-- | GHC's rendering of its host or target 'Arch' as used in its platform +-- strings and certain file locations (such as user package db location). +-- +ghcArchString :: Arch -> String +ghcArchString PPC = "powerpc" +ghcArchString PPC64 = "powerpc64" +ghcArchString other = display other + +-- | GHC's rendering of its host or target 'OS' as used in its platform +-- strings and certain file locations (such as user package db location). +-- +ghcOsString :: OS -> String +ghcOsString Windows = "mingw32" +ghcOsString OSX = "darwin" +ghcOsString Solaris = "solaris2" +ghcOsString other = display other + +-- | GHC's rendering of its platform and compiler version string as used in +-- certain file locations (such as user package db location). +-- For example @x86_64-linux-7.10.4@ +-- +ghcPlatformAndVersionString :: Platform -> Version -> String +ghcPlatformAndVersionString (Platform arch os) version = + intercalate "-" [ ghcArchString arch, ghcOsString os, display version ] + + +-- ----------------------------------------------------------------------------- +-- Constructing GHC environment files + +-- | The kinds of entries we can stick in a @.ghc.environment@ file. +-- +data GhcEnvironmentFileEntry = + GhcEnvFileComment String -- ^ @-- a comment@ + | GhcEnvFilePackageId UnitId -- ^ @package-id foo-1.0-4fe301a...@ + | GhcEnvFilePackageDb PackageDB -- ^ @global-package-db@, + -- @user-package-db@ or + -- @package-db blah/package.conf.d/@ + | GhcEnvFileClearPackageDbStack -- ^ @clear-package-db@ + deriving (Eq, Ord, Show) + +-- | Make entries for a GHC environment file based on a 'PackageDBStack' and +-- a bunch of package (unit) ids. +-- +-- If you need to do anything more complicated then either use this as a basis +-- and add more entries, or just make all the entries directly. +-- +simpleGhcEnvironmentFile :: PackageDBStack + -> [UnitId] + -> [GhcEnvironmentFileEntry] +simpleGhcEnvironmentFile packageDBs pkgids = + GhcEnvFileClearPackageDbStack + : map GhcEnvFilePackageDb packageDBs + ++ map GhcEnvFilePackageId pkgids + +-- | Write a @.ghc.environment-$arch-$os-$ver@ file in the given directory. +-- +-- The 'Platform' and GHC 'Version' are needed as part of the file name. +-- +-- Returns the name of the file written. +writeGhcEnvironmentFile :: FilePath -- ^ directory in which to put it + -> Platform -- ^ the GHC target platform + -> Version -- ^ the GHC version + -> [GhcEnvironmentFileEntry] -- ^ the content + -> NoCallStackIO FilePath +writeGhcEnvironmentFile directory platform ghcversion entries = do + writeFileAtomic envfile . BS.pack . renderGhcEnvironmentFile $ entries + return envfile + where + envfile = directory ghcEnvironmentFileName platform ghcversion + +-- | The @.ghc.environment-$arch-$os-$ver@ file name +-- +ghcEnvironmentFileName :: Platform -> Version -> FilePath +ghcEnvironmentFileName platform ghcversion = + ".ghc.environment." ++ ghcPlatformAndVersionString platform ghcversion + +-- | Render a bunch of GHC environment file entries +-- +renderGhcEnvironmentFile :: [GhcEnvironmentFileEntry] -> String +renderGhcEnvironmentFile = + unlines . map renderGhcEnvironmentFileEntry + +-- | Render an individual GHC environment file entry +-- +renderGhcEnvironmentFileEntry :: GhcEnvironmentFileEntry -> String +renderGhcEnvironmentFileEntry entry = case entry of + GhcEnvFileComment comment -> format comment + where format = intercalate "\n" . map ("-- " ++) . lines + GhcEnvFilePackageId pkgid -> "package-id " ++ display pkgid + GhcEnvFilePackageDb pkgdb -> + case pkgdb of + GlobalPackageDB -> "global-package-db" + UserPackageDB -> "user-package-db" + SpecificPackageDB dbfile -> "package-db " ++ dbfile + GhcEnvFileClearPackageDbStack -> "clear-package-db" + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHC.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHC.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHC.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHC.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,1945 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.GHC +-- Copyright : Isaac Jones 2003-2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is a fairly large module. It contains most of the GHC-specific code for +-- configuring, building and installing packages. It also exports a function +-- for finding out what packages are already installed. Configuring involves +-- finding the @ghc@ and @ghc-pkg@ programs, finding what language extensions +-- this version of ghc supports and returning a 'Compiler' value. +-- +-- 'getInstalledPackages' involves calling the @ghc-pkg@ program to find out +-- what packages are installed. +-- +-- Building is somewhat complex as there is quite a bit of information to take +-- into account. We have to build libs and programs, possibly for profiling and +-- shared libs. We have to support building libraries that will be usable by +-- GHCi and also ghc's @-split-objs@ feature. We have to compile any C files +-- using ghc. Linking, especially for @split-objs@ is remarkably complex, +-- partly because there tend to be 1,000's of @.o@ files and this can often be +-- more than we can pass to the @ld@ or @ar@ programs in one go. +-- +-- Installing for libs and exes involves finding the right files and copying +-- them to the right places. One of the more tricky things about this module is +-- remembering the layout of files in the build directory (which is not +-- explicitly documented) and thus what search dirs are used for various kinds +-- of files. + +module Distribution.Simple.GHC ( + getGhcInfo, + configure, + getInstalledPackages, + getInstalledPackagesMonitorFiles, + getPackageDBContents, + buildLib, buildFLib, buildExe, + replLib, replFLib, replExe, + startInterpreter, + installLib, installFLib, installExe, + libAbiHash, + hcPkgInfo, + registerPackage, + componentGhcOptions, + componentCcGhcOptions, + getLibDir, + isDynamic, + getGlobalPackageDB, + pkgRoot, + -- * Constructing and deconstructing GHC environment files + Internal.GhcEnvironmentFileEntry(..), + Internal.simpleGhcEnvironmentFile, + Internal.renderGhcEnvironmentFile, + Internal.writeGhcEnvironmentFile, + Internal.ghcPlatformAndVersionString, + readGhcEnvironmentFile, + parseGhcEnvironmentFile, + ParseErrorExc(..), + -- * Version-specific implementation quirks + getImplInfo, + GhcImplInfo(..) + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Distribution.Simple.GHC.Internal as Internal +import Distribution.Simple.GHC.ImplInfo +import Distribution.Simple.GHC.EnvironmentParser +import Distribution.PackageDescription.Utils (cabalBug) +import Distribution.PackageDescription as PD +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.LocalBuildInfo +import Distribution.Types.ComponentLocalBuildInfo +import qualified Distribution.Simple.Hpc as Hpc +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Package +import qualified Distribution.ModuleName as ModuleName +import Distribution.ModuleName (ModuleName) +import Distribution.Simple.Program +import Distribution.Simple.Program.Builtin (runghcProgram) +import qualified Distribution.Simple.Program.HcPkg as HcPkg +import qualified Distribution.Simple.Program.Ar as Ar +import qualified Distribution.Simple.Program.Ld as Ld +import qualified Distribution.Simple.Program.Strip as Strip +import Distribution.Simple.Program.GHC +import Distribution.Simple.Setup +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Version +import Distribution.System +import Distribution.Verbosity +import Distribution.Text +import Distribution.Types.ForeignLib +import Distribution.Types.ForeignLibType +import Distribution.Types.ForeignLibOption +import Distribution.Types.UnqualComponentName +import Distribution.Utils.NubList +import Language.Haskell.Extension + +import Control.Monad (msum) +import Data.Char (isLower) +import qualified Data.Map as Map +import System.Directory + ( doesFileExist, getAppUserDataDirectory, createDirectoryIfMissing + , canonicalizePath, removeFile, renameFile ) +import System.FilePath ( (), (<.>), takeExtension + , takeDirectory, replaceExtension + ,isRelative ) +import qualified System.Info +#ifndef mingw32_HOST_OS +import System.Posix (createSymbolicLink) +#endif /* mingw32_HOST_OS */ + +-- ----------------------------------------------------------------------------- +-- Configuring + +configure :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramDb + -> IO (Compiler, Maybe Platform, ProgramDb) +configure verbosity hcPath hcPkgPath conf0 = do + + (ghcProg, ghcVersion, progdb1) <- + requireProgramVersion verbosity ghcProgram + (orLaterVersion (mkVersion [7,0,1])) + (userMaybeSpecifyPath "ghc" hcPath conf0) + let implInfo = ghcVersionImplInfo ghcVersion + + -- Cabal currently supports ghc >= 7.0.1 && < 8.7 + unless (ghcVersion < mkVersion [8,7]) $ + warn verbosity $ + "Unknown/unsupported 'ghc' version detected " + ++ "(Cabal " ++ display cabalVersion ++ " supports 'ghc' version < 8.7): " + ++ programPath ghcProg ++ " is version " ++ display ghcVersion + + -- This is slightly tricky, we have to configure ghc first, then we use the + -- location of ghc to help find ghc-pkg in the case that the user did not + -- specify the location of ghc-pkg directly: + (ghcPkgProg, ghcPkgVersion, progdb2) <- + requireProgramVersion verbosity ghcPkgProgram { + programFindLocation = guessGhcPkgFromGhcPath ghcProg + } + anyVersion (userMaybeSpecifyPath "ghc-pkg" hcPkgPath progdb1) + + when (ghcVersion /= ghcPkgVersion) $ die' verbosity $ + "Version mismatch between ghc and ghc-pkg: " + ++ programPath ghcProg ++ " is version " ++ display ghcVersion ++ " " + ++ programPath ghcPkgProg ++ " is version " ++ display ghcPkgVersion + + -- Likewise we try to find the matching hsc2hs and haddock programs. + let hsc2hsProgram' = hsc2hsProgram { + programFindLocation = guessHsc2hsFromGhcPath ghcProg + } + haddockProgram' = haddockProgram { + programFindLocation = guessHaddockFromGhcPath ghcProg + } + hpcProgram' = hpcProgram { + programFindLocation = guessHpcFromGhcPath ghcProg + } + runghcProgram' = runghcProgram { + programFindLocation = guessRunghcFromGhcPath ghcProg + } + progdb3 = addKnownProgram haddockProgram' $ + addKnownProgram hsc2hsProgram' $ + addKnownProgram hpcProgram' $ + addKnownProgram runghcProgram' progdb2 + + languages <- Internal.getLanguages verbosity implInfo ghcProg + extensions0 <- Internal.getExtensions verbosity implInfo ghcProg + + ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcProg + let ghcInfoMap = Map.fromList ghcInfo + extensions = -- workaround https://ghc.haskell.org/ticket/11214 + filterExt JavaScriptFFI $ + -- see 'filterExtTH' comment below + filterExtTH $ extensions0 + + -- starting with GHC 8.0, `TemplateHaskell` will be omitted from + -- `--supported-extensions` when it's not available. + -- for older GHCs we can use the "Have interpreter" property to + -- filter out `TemplateHaskell` + filterExtTH | ghcVersion < mkVersion [8] + , Just "NO" <- Map.lookup "Have interpreter" ghcInfoMap + = filterExt TemplateHaskell + | otherwise = id + + filterExt ext = filter ((/= EnableExtension ext) . fst) + + let comp = Compiler { + compilerId = CompilerId GHC ghcVersion, + compilerAbiTag = NoAbiTag, + compilerCompat = [], + compilerLanguages = languages, + compilerExtensions = extensions, + compilerProperties = ghcInfoMap + } + compPlatform = Internal.targetPlatform ghcInfo + -- configure gcc and ld + progdb4 = Internal.configureToolchain implInfo ghcProg ghcInfoMap progdb3 + return (comp, compPlatform, progdb4) + +-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find +-- the corresponding tool; e.g. if the tool is ghc-pkg, we try looking +-- for a versioned or unversioned ghc-pkg in the same dir, that is: +-- +-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe) +-- > /usr/local/bin/ghc-pkg-6.6.1(.exe) +-- > /usr/local/bin/ghc-pkg(.exe) +-- +guessToolFromGhcPath :: Program -> ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessToolFromGhcPath tool ghcProg verbosity searchpath + = do let toolname = programName tool + given_path = programPath ghcProg + given_dir = takeDirectory given_path + real_path <- canonicalizePath given_path + let real_dir = takeDirectory real_path + versionSuffix path = takeVersionSuffix (dropExeExtension path) + given_suf = versionSuffix given_path + real_suf = versionSuffix real_path + guessNormal dir = dir toolname <.> exeExtension buildPlatform + guessGhcVersioned dir suf = dir (toolname ++ "-ghc" ++ suf) + <.> exeExtension buildPlatform + guessVersioned dir suf = dir (toolname ++ suf) + <.> exeExtension buildPlatform + mkGuesses dir suf | null suf = [guessNormal dir] + | otherwise = [guessGhcVersioned dir suf, + guessVersioned dir suf, + guessNormal dir] + guesses = mkGuesses given_dir given_suf ++ + if real_path == given_path + then [] + else mkGuesses real_dir real_suf + info verbosity $ "looking for tool " ++ toolname + ++ " near compiler in " ++ given_dir + debug verbosity $ "candidate locations: " ++ show guesses + exists <- traverse doesFileExist guesses + case [ file | (file, True) <- zip guesses exists ] of + -- If we can't find it near ghc, fall back to the usual + -- method. + [] -> programFindLocation tool verbosity searchpath + (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp + let lookedAt = map fst + . takeWhile (\(_file, exist) -> not exist) + $ zip guesses exists + return (Just (fp, lookedAt)) + + where takeVersionSuffix :: FilePath -> String + takeVersionSuffix = takeWhileEndLE isSuffixChar + + isSuffixChar :: Char -> Bool + isSuffixChar c = isDigit c || c == '.' || c == '-' + +-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a +-- corresponding ghc-pkg, we try looking for both a versioned and unversioned +-- ghc-pkg in the same dir, that is: +-- +-- > /usr/local/bin/ghc-pkg-ghc-6.6.1(.exe) +-- > /usr/local/bin/ghc-pkg-6.6.1(.exe) +-- > /usr/local/bin/ghc-pkg(.exe) +-- +guessGhcPkgFromGhcPath :: ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessGhcPkgFromGhcPath = guessToolFromGhcPath ghcPkgProgram + +-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a +-- corresponding hsc2hs, we try looking for both a versioned and unversioned +-- hsc2hs in the same dir, that is: +-- +-- > /usr/local/bin/hsc2hs-ghc-6.6.1(.exe) +-- > /usr/local/bin/hsc2hs-6.6.1(.exe) +-- > /usr/local/bin/hsc2hs(.exe) +-- +guessHsc2hsFromGhcPath :: ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessHsc2hsFromGhcPath = guessToolFromGhcPath hsc2hsProgram + +-- | Given something like /usr/local/bin/ghc-6.6.1(.exe) we try and find a +-- corresponding haddock, we try looking for both a versioned and unversioned +-- haddock in the same dir, that is: +-- +-- > /usr/local/bin/haddock-ghc-6.6.1(.exe) +-- > /usr/local/bin/haddock-6.6.1(.exe) +-- > /usr/local/bin/haddock(.exe) +-- +guessHaddockFromGhcPath :: ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessHaddockFromGhcPath = guessToolFromGhcPath haddockProgram + +guessHpcFromGhcPath :: ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessHpcFromGhcPath = guessToolFromGhcPath hpcProgram + +guessRunghcFromGhcPath :: ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessRunghcFromGhcPath = guessToolFromGhcPath runghcProgram + + +getGhcInfo :: Verbosity -> ConfiguredProgram -> IO [(String, String)] +getGhcInfo verbosity ghcProg = Internal.getGhcInfo verbosity implInfo ghcProg + where + Just version = programVersion ghcProg + implInfo = ghcVersionImplInfo version + +-- | Given a single package DB, return all installed packages. +getPackageDBContents :: Verbosity -> PackageDB -> ProgramDb + -> IO InstalledPackageIndex +getPackageDBContents verbosity packagedb progdb = do + pkgss <- getInstalledPackages' verbosity [packagedb] progdb + toPackageIndex verbosity pkgss progdb + +-- | Given a package DB stack, return all installed packages. +getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack + -> ProgramDb + -> IO InstalledPackageIndex +getInstalledPackages verbosity comp packagedbs progdb = do + checkPackageDbEnvVar verbosity + checkPackageDbStack verbosity comp packagedbs + pkgss <- getInstalledPackages' verbosity packagedbs progdb + index <- toPackageIndex verbosity pkgss progdb + return $! hackRtsPackage index + + where + hackRtsPackage index = + case PackageIndex.lookupPackageName index (mkPackageName "rts") of + [(_,[rts])] + -> PackageIndex.insert (removeMingwIncludeDir rts) index + _ -> index -- No (or multiple) ghc rts package is registered!! + -- Feh, whatever, the ghc test suite does some crazy stuff. + +-- | Given a list of @(PackageDB, InstalledPackageInfo)@ pairs, produce a +-- @PackageIndex@. Helper function used by 'getPackageDBContents' and +-- 'getInstalledPackages'. +toPackageIndex :: Verbosity + -> [(PackageDB, [InstalledPackageInfo])] + -> ProgramDb + -> IO InstalledPackageIndex +toPackageIndex verbosity pkgss progdb = do + -- On Windows, various fields have $topdir/foo rather than full + -- paths. We need to substitute the right value in so that when + -- we, for example, call gcc, we have proper paths to give it. + topDir <- getLibDir' verbosity ghcProg + let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) + | (_, pkgs) <- pkgss ] + return $! mconcat indices + + where + Just ghcProg = lookupProgram ghcProgram progdb + +getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath +getLibDir verbosity lbi = + dropWhileEndLE isSpace `fmap` + getDbProgramOutput verbosity ghcProgram + (withPrograms lbi) ["--print-libdir"] + +getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath +getLibDir' verbosity ghcProg = + dropWhileEndLE isSpace `fmap` + getProgramOutput verbosity ghcProg ["--print-libdir"] + + +-- | Return the 'FilePath' to the global GHC package database. +getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath +getGlobalPackageDB verbosity ghcProg = + dropWhileEndLE isSpace `fmap` + getProgramOutput verbosity ghcProg ["--print-global-package-db"] + +-- | Return the 'FilePath' to the per-user GHC package database. +getUserPackageDB :: Verbosity -> ConfiguredProgram -> Platform -> NoCallStackIO FilePath +getUserPackageDB _verbosity ghcProg platform = do + -- It's rather annoying that we have to reconstruct this, because ghc + -- hides this information from us otherwise. But for certain use cases + -- like change monitoring it really can't remain hidden. + appdir <- getAppUserDataDirectory "ghc" + return (appdir platformAndVersion packageConfFileName) + where + platformAndVersion = Internal.ghcPlatformAndVersionString + platform ghcVersion + packageConfFileName = "package.conf.d" + Just ghcVersion = programVersion ghcProg + +checkPackageDbEnvVar :: Verbosity -> IO () +checkPackageDbEnvVar verbosity = + Internal.checkPackageDbEnvVar verbosity "GHC" "GHC_PACKAGE_PATH" + +checkPackageDbStack :: Verbosity -> Compiler -> PackageDBStack -> IO () +checkPackageDbStack verbosity comp = + if flagPackageConf implInfo + then checkPackageDbStackPre76 verbosity + else checkPackageDbStackPost76 verbosity + where implInfo = ghcVersionImplInfo (compilerVersion comp) + +checkPackageDbStackPost76 :: Verbosity -> PackageDBStack -> IO () +checkPackageDbStackPost76 _ (GlobalPackageDB:rest) + | GlobalPackageDB `notElem` rest = return () +checkPackageDbStackPost76 verbosity rest + | GlobalPackageDB `elem` rest = + die' verbosity $ "If the global package db is specified, it must be " + ++ "specified first and cannot be specified multiple times" +checkPackageDbStackPost76 _ _ = return () + +checkPackageDbStackPre76 :: Verbosity -> PackageDBStack -> IO () +checkPackageDbStackPre76 _ (GlobalPackageDB:rest) + | GlobalPackageDB `notElem` rest = return () +checkPackageDbStackPre76 verbosity rest + | GlobalPackageDB `notElem` rest = + die' verbosity $ "With current ghc versions the global package db is always used " + ++ "and must be listed first. This ghc limitation is lifted in GHC 7.6," + ++ "see http://hackage.haskell.org/trac/ghc/ticket/5977" +checkPackageDbStackPre76 verbosity _ = + die' verbosity $ "If the global package db is specified, it must be " + ++ "specified first and cannot be specified multiple times" + +-- GHC < 6.10 put "$topdir/include/mingw" in rts's installDirs. This +-- breaks when you want to use a different gcc, so we need to filter +-- it out. +removeMingwIncludeDir :: InstalledPackageInfo -> InstalledPackageInfo +removeMingwIncludeDir pkg = + let ids = InstalledPackageInfo.includeDirs pkg + ids' = filter (not . ("mingw" `isSuffixOf`)) ids + in pkg { InstalledPackageInfo.includeDirs = ids' } + +-- | Get the packages from specific PackageDBs, not cumulative. +-- +getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramDb + -> IO [(PackageDB, [InstalledPackageInfo])] +getInstalledPackages' verbosity packagedbs progdb = + sequenceA + [ do pkgs <- HcPkg.dump (hcPkgInfo progdb) verbosity packagedb + return (packagedb, pkgs) + | packagedb <- packagedbs ] + +getInstalledPackagesMonitorFiles :: Verbosity -> Platform + -> ProgramDb + -> [PackageDB] + -> IO [FilePath] +getInstalledPackagesMonitorFiles verbosity platform progdb = + traverse getPackageDBPath + where + getPackageDBPath :: PackageDB -> IO FilePath + getPackageDBPath GlobalPackageDB = + selectMonitorFile =<< getGlobalPackageDB verbosity ghcProg + + getPackageDBPath UserPackageDB = + selectMonitorFile =<< getUserPackageDB verbosity ghcProg platform + + getPackageDBPath (SpecificPackageDB path) = selectMonitorFile path + + -- GHC has old style file dbs, and new style directory dbs. + -- Note that for dir style dbs, we only need to monitor the cache file, not + -- the whole directory. The ghc program itself only reads the cache file + -- so it's safe to only monitor this one file. + selectMonitorFile path = do + isFileStyle <- doesFileExist path + if isFileStyle then return path + else return (path "package.cache") + + Just ghcProg = lookupProgram ghcProgram progdb + + +-- ----------------------------------------------------------------------------- +-- Building a library + +buildLib :: Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib = buildOrReplLib Nothing + +replLib :: [String] -> Verbosity + -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Library + -> ComponentLocalBuildInfo -> IO () +replLib = buildOrReplLib . Just + +buildOrReplLib :: Maybe [String] -> Verbosity + -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Library + -> ComponentLocalBuildInfo -> IO () +buildOrReplLib mReplFlags verbosity numJobs pkg_descr lbi lib clbi = do + let uid = componentUnitId clbi + libTargetDir = componentBuildDir lbi clbi + whenVanillaLib forceVanilla = + when (forceVanilla || withVanillaLib lbi) + whenProfLib = when (withProfLib lbi) + whenSharedLib forceShared = + when (forceShared || withSharedLib lbi) + whenStaticLib forceStatic = + when (forceStatic || withStaticLib lbi) + whenGHCiLib = when (withGHCiLib lbi) + forRepl = maybe False (const True) mReplFlags + ifReplLib = when forRepl + replFlags = fromMaybe mempty mReplFlags + comp = compiler lbi + ghcVersion = compilerVersion comp + implInfo = getImplInfo comp + platform@(Platform _hostArch hostOS) = hostPlatform lbi + has_code = not (componentIsIndefinite clbi) + + (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) + let runGhcProg = runGHC verbosity ghcProg comp platform + + let libBi = libBuildInfo lib + + let isGhcDynamic = isDynamic comp + dynamicTooSupported = supportsDynamicToo comp + doingTH = usesTemplateHaskellOrQQ libBi + forceVanillaLib = doingTH && not isGhcDynamic + forceSharedLib = doingTH && isGhcDynamic + -- TH always needs default libs, even when building for profiling + + -- Determine if program coverage should be enabled and if so, what + -- '-hpcdir' should be. + let isCoverageEnabled = libCoverage lbi + -- TODO: Historically HPC files have been put into a directory which + -- has the package name. I'm going to avoid changing this for + -- now, but it would probably be better for this to be the + -- component ID instead... + pkg_name = display (PD.package pkg_descr) + distPref = fromFlag $ configDistPref $ configFlags lbi + hpcdir way + | forRepl = mempty -- HPC is not supported in ghci + | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way pkg_name + | otherwise = mempty + + createDirectoryIfMissingVerbose verbosity True libTargetDir + -- TODO: do we need to put hs-boot files into place for mutually recursive + -- modules? + let cLikeFiles = fromNubListR $ toNubListR (cSources libBi) <> toNubListR (cxxSources libBi) + cObjs = map (`replaceExtension` objExtension) cLikeFiles + baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir + vanillaOpts = baseOpts `mappend` mempty { + ghcOptMode = toFlag GhcModeMake, + ghcOptNumJobs = numJobs, + ghcOptInputModules = toNubListR $ allLibModules lib clbi, + ghcOptHPCDir = hpcdir Hpc.Vanilla + } + + profOpts = vanillaOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptProfilingAuto = Internal.profDetailLevelFlag True + (withProfLibDetail lbi), + ghcOptHiSuffix = toFlag "p_hi", + ghcOptObjSuffix = toFlag "p_o", + ghcOptExtra = hcProfOptions GHC libBi, + ghcOptHPCDir = hpcdir Hpc.Prof + } + + sharedOpts = vanillaOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = hcSharedOptions GHC libBi, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + linkerOpts = mempty { + ghcOptLinkOptions = PD.ldOptions libBi, + ghcOptLinkLibs = extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, + ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, + ghcOptLinkFrameworkDirs = toNubListR $ PD.extraFrameworkDirs libBi, + ghcOptInputFiles = toNubListR + [libTargetDir x | x <- cObjs] + } + replOpts = vanillaOpts { + ghcOptExtra = Internal.filterGhciFlags + (ghcOptExtra vanillaOpts) + <> replFlags, + ghcOptNumJobs = mempty + } + `mappend` linkerOpts + `mappend` mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptOptimisation = toFlag GhcNoOptimisation + } + + vanillaSharedOpts = vanillaOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, + ghcOptDynHiSuffix = toFlag "dyn_hi", + ghcOptDynObjSuffix = toFlag "dyn_o", + ghcOptHPCDir = hpcdir Hpc.Dyn + } + + unless (forRepl || null (allLibModules lib clbi)) $ + do let vanilla = whenVanillaLib forceVanillaLib (runGhcProg vanillaOpts) + shared = whenSharedLib forceSharedLib (runGhcProg sharedOpts) + useDynToo = dynamicTooSupported && + (forceVanillaLib || withVanillaLib lbi) && + (forceSharedLib || withSharedLib lbi) && + null (hcSharedOptions GHC libBi) + if not has_code + then vanilla + else + if useDynToo + then do + runGhcProg vanillaSharedOpts + case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of + (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> + -- When the vanilla and shared library builds are done + -- in one pass, only one set of HPC module interfaces + -- are generated. This set should suffice for both + -- static and dynamically linked executables. We copy + -- the modules interfaces so they are available under + -- both ways. + copyDirectoryRecursive verbosity dynDir vanillaDir + _ -> return () + else if isGhcDynamic + then do shared; vanilla + else do vanilla; shared + whenProfLib (runGhcProg profOpts) + + -- Build any C++ sources separately. + unless (not has_code || null (cxxSources libBi)) $ do + info verbosity "Building C++ Sources..." + sequence_ + [ do let baseCxxOpts = Internal.componentCxxGhcOptions verbosity implInfo + lbi libBi clbi libTargetDir filename + vanillaCxxOpts = if isGhcDynamic + then baseCxxOpts { ghcOptFPic = toFlag True } + else baseCxxOpts + profCxxOpts = vanillaCxxOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptObjSuffix = toFlag "p_o" + } + sharedCxxOpts = vanillaCxxOpts `mappend` mempty { + ghcOptFPic = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptObjSuffix = toFlag "dyn_o" + } + odir = fromFlag (ghcOptObjDir vanillaCxxOpts) + createDirectoryIfMissingVerbose verbosity True odir + let runGhcProgIfNeeded cxxOpts = do + needsRecomp <- checkNeedsRecompilation filename cxxOpts + when needsRecomp $ runGhcProg cxxOpts + runGhcProgIfNeeded vanillaCxxOpts + unless forRepl $ + whenSharedLib forceSharedLib (runGhcProgIfNeeded sharedCxxOpts) + unless forRepl $ whenProfLib (runGhcProgIfNeeded profCxxOpts) + | filename <- cxxSources libBi] + + ifReplLib $ do + when (null (allLibModules lib clbi)) $ warn verbosity "No exposed modules" + ifReplLib (runGhcProg replOpts) + + -- build any C sources + -- TODO: Add support for S and CMM files. + unless (not has_code || null (cSources libBi)) $ do + info verbosity "Building C Sources..." + sequence_ + [ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo + lbi libBi clbi libTargetDir filename + vanillaCcOpts = if isGhcDynamic + -- Dynamic GHC requires C sources to be built + -- with -fPIC for REPL to work. See #2207. + then baseCcOpts { ghcOptFPic = toFlag True } + else baseCcOpts + profCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptObjSuffix = toFlag "p_o" + } + sharedCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptFPic = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptObjSuffix = toFlag "dyn_o" + } + odir = fromFlag (ghcOptObjDir vanillaCcOpts) + createDirectoryIfMissingVerbose verbosity True odir + let runGhcProgIfNeeded ccOpts = do + needsRecomp <- checkNeedsRecompilation filename ccOpts + when needsRecomp $ runGhcProg ccOpts + runGhcProgIfNeeded vanillaCcOpts + unless forRepl $ + whenSharedLib forceSharedLib (runGhcProgIfNeeded sharedCcOpts) + unless forRepl $ whenProfLib (runGhcProgIfNeeded profCcOpts) + | filename <- cSources libBi] + + -- TODO: problem here is we need the .c files built first, so we can load them + -- with ghci, but .c files can depend on .h files generated by ghc by ffi + -- exports. + + -- link: + when has_code . unless forRepl $ do + info verbosity "Linking..." + let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) + (cSources libBi ++ cxxSources libBi) + cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) + (cSources libBi ++ cxxSources libBi) + compiler_id = compilerId (compiler lbi) + vanillaLibFilePath = libTargetDir mkLibName uid + profileLibFilePath = libTargetDir mkProfLibName uid + sharedLibFilePath = libTargetDir mkSharedLibName (hostPlatform lbi) compiler_id uid + staticLibFilePath = libTargetDir mkStaticLibName (hostPlatform lbi) compiler_id uid + ghciLibFilePath = libTargetDir Internal.mkGHCiLibName uid + ghciProfLibFilePath = libTargetDir Internal.mkGHCiProfLibName uid + libInstallPath = libdir $ absoluteComponentInstallDirs pkg_descr lbi uid NoCopyDest + sharedLibInstallPath = libInstallPath mkSharedLibName (hostPlatform lbi) compiler_id uid + + stubObjs <- catMaybes <$> sequenceA + [ findFileWithExtension [objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files + , x <- allLibModules lib clbi ] + stubProfObjs <- catMaybes <$> sequenceA + [ findFileWithExtension ["p_" ++ objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files + , x <- allLibModules lib clbi ] + stubSharedObjs <- catMaybes <$> sequenceA + [ findFileWithExtension ["dyn_" ++ objExtension] [libTargetDir] + (ModuleName.toFilePath x ++"_stub") + | ghcVersion < mkVersion [7,2] -- ghc-7.2+ does not make _stub.o files + , x <- allLibModules lib clbi ] + + hObjs <- Internal.getHaskellObjects implInfo lib lbi clbi + libTargetDir objExtension True + hProfObjs <- + if withProfLib lbi + then Internal.getHaskellObjects implInfo lib lbi clbi + libTargetDir ("p_" ++ objExtension) True + else return [] + hSharedObjs <- + if withSharedLib lbi + then Internal.getHaskellObjects implInfo lib lbi clbi + libTargetDir ("dyn_" ++ objExtension) False + else return [] + + unless (null hObjs && null cObjs && null stubObjs) $ do + rpaths <- getRPaths lbi clbi + + let staticObjectFiles = + hObjs + ++ map (libTargetDir ) cObjs + ++ stubObjs + profObjectFiles = + hProfObjs + ++ map (libTargetDir ) cProfObjs + ++ stubProfObjs + dynamicObjectFiles = + hSharedObjs + ++ map (libTargetDir ) cSharedObjs + ++ stubSharedObjs + -- After the relocation lib is created we invoke ghc -shared + -- with the dependencies spelled out as -package arguments + -- and ghc invokes the linker with the proper library paths + ghcSharedLinkArgs = + mempty { + ghcOptShared = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptInputFiles = toNubListR dynamicObjectFiles, + ghcOptOutputFile = toFlag sharedLibFilePath, + ghcOptExtra = hcSharedOptions GHC libBi, + -- For dynamic libs, Mac OS/X needs to know the install location + -- at build time. This only applies to GHC < 7.8 - see the + -- discussion in #1660. + ghcOptDylibName = if hostOS == OSX + && ghcVersion < mkVersion [7,8] + then toFlag sharedLibInstallPath + else mempty, + ghcOptHideAllPackages = toFlag True, + ghcOptNoAutoLinkPackages = toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptThisUnitId = case clbi of + LibComponentLocalBuildInfo { componentCompatPackageKey = pk } + -> toFlag pk + _ -> mempty, + ghcOptThisComponentId = case clbi of + LibComponentLocalBuildInfo { componentInstantiatedWith = insts } -> + if null insts + then mempty + else toFlag (componentComponentId clbi) + _ -> mempty, + ghcOptInstantiatedWith = case clbi of + LibComponentLocalBuildInfo { componentInstantiatedWith = insts } + -> insts + _ -> [], + ghcOptPackages = toNubListR $ + Internal.mkGhcOptPackages clbi , + ghcOptLinkLibs = extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, + ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, + ghcOptLinkFrameworkDirs = + toNubListR $ PD.extraFrameworkDirs libBi, + ghcOptRPaths = rpaths + } + ghcStaticLinkArgs = + mempty { + ghcOptStaticLib = toFlag True, + ghcOptInputFiles = toNubListR staticObjectFiles, + ghcOptOutputFile = toFlag staticLibFilePath, + ghcOptExtra = hcStaticOptions GHC libBi, + ghcOptHideAllPackages = toFlag True, + ghcOptNoAutoLinkPackages = toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptThisUnitId = case clbi of + LibComponentLocalBuildInfo { componentCompatPackageKey = pk } + -> toFlag pk + _ -> mempty, + ghcOptThisComponentId = case clbi of + LibComponentLocalBuildInfo { componentInstantiatedWith = insts } -> + if null insts + then mempty + else toFlag (componentComponentId clbi) + _ -> mempty, + ghcOptInstantiatedWith = case clbi of + LibComponentLocalBuildInfo { componentInstantiatedWith = insts } + -> insts + _ -> [], + ghcOptPackages = toNubListR $ + Internal.mkGhcOptPackages clbi , + ghcOptLinkLibs = extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi + } + + info verbosity (show (ghcOptPackages ghcSharedLinkArgs)) + + whenVanillaLib False $ do + Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles + whenGHCiLib $ do + (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) + Ld.combineObjectFiles verbosity lbi ldProg + ghciLibFilePath staticObjectFiles + + whenProfLib $ do + Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles + whenGHCiLib $ do + (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) + Ld.combineObjectFiles verbosity lbi ldProg + ghciProfLibFilePath profObjectFiles + + whenSharedLib False $ + runGhcProg ghcSharedLinkArgs + + whenStaticLib False $ + runGhcProg ghcStaticLinkArgs + +-- | Start a REPL without loading any source files. +startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform + -> PackageDBStack -> IO () +startInterpreter verbosity progdb comp platform packageDBs = do + let replOpts = mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptPackageDBs = packageDBs + } + checkPackageDbStack verbosity comp packageDBs + (ghcProg, _) <- requireProgram verbosity ghcProgram progdb + runGHC verbosity ghcProg comp platform replOpts + +-- ----------------------------------------------------------------------------- +-- Building an executable or foreign library + +-- | Build a foreign library +buildFLib + :: Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> ForeignLib -> ComponentLocalBuildInfo -> IO () +buildFLib v njobs pkg lbi = gbuild v njobs pkg lbi . GBuildFLib + +replFLib + :: [String] -> Verbosity + -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> ForeignLib + -> ComponentLocalBuildInfo -> IO () +replFLib replFlags v njobs pkg lbi = + gbuild v njobs pkg lbi . GReplFLib replFlags + +-- | Build an executable with GHC. +-- +buildExe + :: Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe v njobs pkg lbi = gbuild v njobs pkg lbi . GBuildExe + +replExe + :: [String] -> Verbosity + -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Executable + -> ComponentLocalBuildInfo -> IO () +replExe replFlags v njobs pkg lbi = + gbuild v njobs pkg lbi . GReplExe replFlags + +-- | Building an executable, starting the REPL, and building foreign +-- libraries are all very similar and implemented in 'gbuild'. The +-- 'GBuildMode' distinguishes between the various kinds of operation. +data GBuildMode = + GBuildExe Executable + | GReplExe [String] Executable + | GBuildFLib ForeignLib + | GReplFLib [String] ForeignLib + +gbuildInfo :: GBuildMode -> BuildInfo +gbuildInfo (GBuildExe exe) = buildInfo exe +gbuildInfo (GReplExe _ exe) = buildInfo exe +gbuildInfo (GBuildFLib flib) = foreignLibBuildInfo flib +gbuildInfo (GReplFLib _ flib) = foreignLibBuildInfo flib + +gbuildName :: GBuildMode -> String +gbuildName (GBuildExe exe) = unUnqualComponentName $ exeName exe +gbuildName (GReplExe _ exe) = unUnqualComponentName $ exeName exe +gbuildName (GBuildFLib flib) = unUnqualComponentName $ foreignLibName flib +gbuildName (GReplFLib _ flib) = unUnqualComponentName $ foreignLibName flib + +gbuildTargetName :: LocalBuildInfo -> GBuildMode -> String +gbuildTargetName lbi (GBuildExe exe) = exeTargetName (hostPlatform lbi) exe +gbuildTargetName lbi (GReplExe _ exe) = exeTargetName (hostPlatform lbi) exe +gbuildTargetName lbi (GBuildFLib flib) = flibTargetName lbi flib +gbuildTargetName lbi (GReplFLib _ flib) = flibTargetName lbi flib + +exeTargetName :: Platform -> Executable -> String +exeTargetName platform exe = unUnqualComponentName (exeName exe) `withExt` exeExtension platform + +-- | Target name for a foreign library (the actual file name) +-- +-- We do not use mkLibName and co here because the naming for foreign libraries +-- is slightly different (we don't use "_p" or compiler version suffices, and we +-- don't want the "lib" prefix on Windows). +-- +-- TODO: We do use `dllExtension` and co here, but really that's wrong: they +-- use the OS used to build cabal to determine which extension to use, rather +-- than the target OS (but this is wrong elsewhere in Cabal as well). +flibTargetName :: LocalBuildInfo -> ForeignLib -> String +flibTargetName lbi flib = + case (os, foreignLibType flib) of + (Windows, ForeignLibNativeShared) -> nm <.> "dll" + (Windows, ForeignLibNativeStatic) -> nm <.> "lib" + (Linux, ForeignLibNativeShared) -> "lib" ++ nm <.> versionedExt + (_other, ForeignLibNativeShared) -> "lib" ++ nm <.> dllExtension (hostPlatform lbi) + (_other, ForeignLibNativeStatic) -> "lib" ++ nm <.> staticLibExtension (hostPlatform lbi) + (_any, ForeignLibTypeUnknown) -> cabalBug "unknown foreign lib type" + where + nm :: String + nm = unUnqualComponentName $ foreignLibName flib + + os :: OS + os = let (Platform _ os') = hostPlatform lbi + in os' + + -- If a foreign lib foo has lib-version-info 5:1:2 or + -- lib-version-linux 3.2.1, it should be built as libfoo.so.3.2.1 + -- Libtool's version-info data is translated into library versions in a + -- nontrivial way: so refer to libtool documentation. + versionedExt :: String + versionedExt = + let nums = foreignLibVersion flib os + in foldl (<.>) "so" (map show nums) + +-- | Name for the library when building. +-- +-- If the `lib-version-info` field or the `lib-version-linux` field of +-- a foreign library target is set, we need to incorporate that +-- version into the SONAME field. +-- +-- If a foreign library foo has lib-version-info 5:1:2, it should be +-- built as libfoo.so.3.2.1. We want it to get soname libfoo.so.3. +-- However, GHC does not allow overriding soname by setting linker +-- options, as it sets a soname of its own (namely the output +-- filename), after the user-supplied linker options. Hence, we have +-- to compile the library with the soname as its filename. We rename +-- the compiled binary afterwards. +-- +-- This method allows to adjust the name of the library at build time +-- such that the correct soname can be set. +flibBuildName :: LocalBuildInfo -> ForeignLib -> String +flibBuildName lbi flib + -- On linux, if a foreign-library has version data, the first digit is used + -- to produce the SONAME. + | (os, foreignLibType flib) == + (Linux, ForeignLibNativeShared) + = let nums = foreignLibVersion flib os + in "lib" ++ nm <.> foldl (<.>) "so" (map show (take 1 nums)) + | otherwise = flibTargetName lbi flib + where + os :: OS + os = let (Platform _ os') = hostPlatform lbi + in os' + + nm :: String + nm = unUnqualComponentName $ foreignLibName flib + +gbuildIsRepl :: GBuildMode -> Bool +gbuildIsRepl (GBuildExe _) = False +gbuildIsRepl (GReplExe _ _) = True +gbuildIsRepl (GBuildFLib _) = False +gbuildIsRepl (GReplFLib _ _) = True + +gbuildNeedDynamic :: LocalBuildInfo -> GBuildMode -> Bool +gbuildNeedDynamic lbi bm = + case bm of + GBuildExe _ -> withDynExe lbi + GReplExe _ _ -> withDynExe lbi + GBuildFLib flib -> withDynFLib flib + GReplFLib _ flib -> withDynFLib flib + where + withDynFLib flib = + case foreignLibType flib of + ForeignLibNativeShared -> + ForeignLibStandalone `notElem` foreignLibOptions flib + ForeignLibNativeStatic -> + False + ForeignLibTypeUnknown -> + cabalBug "unknown foreign lib type" + +gbuildModDefFiles :: GBuildMode -> [FilePath] +gbuildModDefFiles (GBuildExe _) = [] +gbuildModDefFiles (GReplExe _ _) = [] +gbuildModDefFiles (GBuildFLib flib) = foreignLibModDefFile flib +gbuildModDefFiles (GReplFLib _ flib) = foreignLibModDefFile flib + +-- | "Main" module name when overridden by @ghc-options: -main-is ...@ +-- or 'Nothing' if no @-main-is@ flag could be found. +-- +-- In case of 'Nothing', 'Distribution.ModuleName.main' can be assumed. +exeMainModuleName :: Executable -> Maybe ModuleName +exeMainModuleName Executable{buildInfo = bnfo} = + -- GHC honors the last occurence of a module name updated via -main-is + -- + -- Moreover, -main-is when parsed left-to-right can update either + -- the "Main" module name, or the "main" function name, or both, + -- see also 'decodeMainIsArg'. + msum $ reverse $ map decodeMainIsArg $ findIsMainArgs ghcopts + where + ghcopts = hcOptions GHC bnfo + + findIsMainArgs [] = [] + findIsMainArgs ("-main-is":arg:rest) = arg : findIsMainArgs rest + findIsMainArgs (_:rest) = findIsMainArgs rest + +-- | Decode argument to '-main-is' +-- +-- Returns 'Nothing' if argument set only the function name. +-- +-- This code has been stolen/refactored from GHC's DynFlags.setMainIs +-- function. The logic here is deliberately imperfect as it is +-- intended to be bug-compatible with GHC's parser. See discussion in +-- https://github.com/haskell/cabal/pull/4539#discussion_r118981753. +decodeMainIsArg :: String -> Maybe ModuleName +decodeMainIsArg arg + | not (null main_fn) && isLower (head main_fn) + -- The arg looked like "Foo.Bar.baz" + = Just (ModuleName.fromString main_mod) + | isUpper (head arg) -- The arg looked like "Foo" or "Foo.Bar" + = Just (ModuleName.fromString arg) + | otherwise -- The arg looked like "baz" + = Nothing + where + (main_mod, main_fn) = splitLongestPrefix arg (== '.') + + splitLongestPrefix :: String -> (Char -> Bool) -> (String,String) + splitLongestPrefix str pred' + | null r_pre = (str, []) + | otherwise = (reverse (tail r_pre), reverse r_suf) + -- 'tail' drops the char satisfying 'pred' + where (r_suf, r_pre) = break pred' (reverse str) + + +-- | A collection of: +-- * C input files +-- * C++ input files +-- * GHC input files +-- * GHC input modules +-- +-- Used to correctly build and link sources. +data BuildSources = BuildSources { + cSourcesFiles :: [FilePath], + cxxSourceFiles :: [FilePath], + inputSourceFiles :: [FilePath], + inputSourceModules :: [ModuleName] + } + +-- | Locate and return the 'BuildSources' required to build and link. +gbuildSources :: Verbosity + -> Version -- ^ specVersion + -> FilePath + -> GBuildMode + -> IO BuildSources +gbuildSources verbosity specVer tmpDir bm = + case bm of + GBuildExe exe -> exeSources exe + GReplExe _ exe -> exeSources exe + GBuildFLib flib -> return $ flibSources flib + GReplFLib _ flib -> return $ flibSources flib + where + exeSources :: Executable -> IO BuildSources + exeSources exe@Executable{buildInfo = bnfo, modulePath = modPath} = do + main <- findFile (tmpDir : hsSourceDirs bnfo) modPath + let mainModName = fromMaybe ModuleName.main $ exeMainModuleName exe + otherModNames = exeModules exe + + if isHaskell main + then + if specVer < mkVersion [2] && (mainModName `elem` otherModNames) + then do + -- The cabal manual clearly states that `other-modules` is + -- intended for non-main modules. However, there's at least one + -- important package on Hackage (happy-1.19.5) which + -- violates this. We workaround this here so that we don't + -- invoke GHC with e.g. 'ghc --make Main src/Main.hs' which + -- would result in GHC complaining about duplicate Main + -- modules. + -- + -- Finally, we only enable this workaround for + -- specVersion < 2, as 'cabal-version:>=2.0' cabal files + -- have no excuse anymore to keep doing it wrong... ;-) + warn verbosity $ "Enabling workaround for Main module '" + ++ display mainModName + ++ "' listed in 'other-modules' illegally!" + + return BuildSources { + cSourcesFiles = cSources bnfo, + cxxSourceFiles = cxxSources bnfo, + inputSourceFiles = [main], + inputSourceModules = filter (/= mainModName) $ exeModules exe + } + + else return BuildSources { + cSourcesFiles = cSources bnfo, + cxxSourceFiles = cxxSources bnfo, + inputSourceFiles = [main], + inputSourceModules = exeModules exe + } + else let (csf, cxxsf) + | isCxx main = ( cSources bnfo, main : cxxSources bnfo) + -- if main is not a Haskell source + -- and main is not a C++ source + -- then we assume that it is a C source + | otherwise = (main : cSources bnfo, cxxSources bnfo) + + in return BuildSources { + cSourcesFiles = csf, + cxxSourceFiles = cxxsf, + inputSourceFiles = [], + inputSourceModules = exeModules exe + } + + flibSources :: ForeignLib -> BuildSources + flibSources flib@ForeignLib{foreignLibBuildInfo = bnfo} = + BuildSources { + cSourcesFiles = cSources bnfo, + cxxSourceFiles = cxxSources bnfo, + inputSourceFiles = [], + inputSourceModules = foreignLibModules flib + } + + isHaskell :: FilePath -> Bool + isHaskell fp = elem (takeExtension fp) [".hs", ".lhs"] + + isCxx :: FilePath -> Bool + isCxx fp = elem (takeExtension fp) [".cpp", ".cxx", ".c++"] + +-- | Generic build function. See comment for 'GBuildMode'. +gbuild :: Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> GBuildMode -> ComponentLocalBuildInfo -> IO () +gbuild verbosity numJobs pkg_descr lbi bm clbi = do + (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) + let replFlags = case bm of + GReplExe flags _ -> flags + GReplFLib flags _ -> flags + GBuildExe{} -> mempty + GBuildFLib{} -> mempty + comp = compiler lbi + platform = hostPlatform lbi + implInfo = getImplInfo comp + runGhcProg = runGHC verbosity ghcProg comp platform + + let (bnfo, threaded) = case bm of + GBuildFLib _ -> popThreadedFlag (gbuildInfo bm) + _ -> (gbuildInfo bm, False) + + -- the name that GHC really uses (e.g., with .exe on Windows for executables) + let targetName = gbuildTargetName lbi bm + let targetDir = buildDir lbi (gbuildName bm) + let tmpDir = targetDir (gbuildName bm ++ "-tmp") + createDirectoryIfMissingVerbose verbosity True targetDir + createDirectoryIfMissingVerbose verbosity True tmpDir + + -- TODO: do we need to put hs-boot files into place for mutually recursive + -- modules? FIX: what about exeName.hi-boot? + + -- Determine if program coverage should be enabled and if so, what + -- '-hpcdir' should be. + let isCoverageEnabled = exeCoverage lbi + distPref = fromFlag $ configDistPref $ configFlags lbi + hpcdir way + | gbuildIsRepl bm = mempty -- HPC is not supported in ghci + | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way (gbuildName bm) + | otherwise = mempty + + rpaths <- getRPaths lbi clbi + buildSources <- gbuildSources verbosity (specVersion pkg_descr) tmpDir bm + + let cSrcs = cSourcesFiles buildSources + cxxSrcs = cxxSourceFiles buildSources + inputFiles = inputSourceFiles buildSources + inputModules = inputSourceModules buildSources + isGhcDynamic = isDynamic comp + dynamicTooSupported = supportsDynamicToo comp + cObjs = map (`replaceExtension` objExtension) cSrcs + cxxObjs = map (`replaceExtension` objExtension) cxxSrcs + needDynamic = gbuildNeedDynamic lbi bm + needProfiling = withProfExe lbi + + -- build executables + baseOpts = (componentGhcOptions verbosity lbi bnfo clbi tmpDir) + `mappend` mempty { + ghcOptMode = toFlag GhcModeMake, + ghcOptInputFiles = toNubListR inputFiles, + ghcOptInputModules = toNubListR inputModules + } + staticOpts = baseOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticOnly, + ghcOptHPCDir = hpcdir Hpc.Vanilla + } + profOpts = baseOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptProfilingAuto = Internal.profDetailLevelFlag False + (withProfExeDetail lbi), + ghcOptHiSuffix = toFlag "p_hi", + ghcOptObjSuffix = toFlag "p_o", + ghcOptExtra = hcProfOptions GHC bnfo, + ghcOptHPCDir = hpcdir Hpc.Prof + } + dynOpts = baseOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + -- TODO: Does it hurt to set -fPIC for executables? + ghcOptFPic = toFlag True, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = hcSharedOptions GHC bnfo, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + dynTooOpts = staticOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, + ghcOptDynHiSuffix = toFlag "dyn_hi", + ghcOptDynObjSuffix = toFlag "dyn_o", + ghcOptHPCDir = hpcdir Hpc.Dyn + } + linkerOpts = mempty { + ghcOptLinkOptions = PD.ldOptions bnfo, + ghcOptLinkLibs = extraLibs bnfo, + ghcOptLinkLibPath = toNubListR $ extraLibDirs bnfo, + ghcOptLinkFrameworks = toNubListR $ + PD.frameworks bnfo, + ghcOptLinkFrameworkDirs = toNubListR $ + PD.extraFrameworkDirs bnfo, + ghcOptInputFiles = toNubListR + [tmpDir x | x <- cObjs ++ cxxObjs] + } + dynLinkerOpts = mempty { + ghcOptRPaths = rpaths + } + replOpts = baseOpts { + ghcOptExtra = Internal.filterGhciFlags + (ghcOptExtra baseOpts) + <> replFlags + } + -- For a normal compile we do separate invocations of ghc for + -- compiling as for linking. But for repl we have to do just + -- the one invocation, so that one has to include all the + -- linker stuff too, like -l flags and any .o files from C + -- files etc. + `mappend` linkerOpts + `mappend` mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptOptimisation = toFlag GhcNoOptimisation + } + commonOpts | needProfiling = profOpts + | needDynamic = dynOpts + | otherwise = staticOpts + compileOpts | useDynToo = dynTooOpts + | otherwise = commonOpts + withStaticExe = not needProfiling && not needDynamic + + -- For building exe's that use TH with -prof or -dynamic we actually have + -- to build twice, once without -prof/-dynamic and then again with + -- -prof/-dynamic. This is because the code that TH needs to run at + -- compile time needs to be the vanilla ABI so it can be loaded up and run + -- by the compiler. + -- With dynamic-by-default GHC the TH object files loaded at compile-time + -- need to be .dyn_o instead of .o. + doingTH = usesTemplateHaskellOrQQ bnfo + -- Should we use -dynamic-too instead of compiling twice? + useDynToo = dynamicTooSupported && isGhcDynamic + && doingTH && withStaticExe + && null (hcSharedOptions GHC bnfo) + compileTHOpts | isGhcDynamic = dynOpts + | otherwise = staticOpts + compileForTH + | gbuildIsRepl bm = False + | useDynToo = False + | isGhcDynamic = doingTH && (needProfiling || withStaticExe) + | otherwise = doingTH && (needProfiling || needDynamic) + + -- Build static/dynamic object files for TH, if needed. + when compileForTH $ + runGhcProg compileTHOpts { ghcOptNoLink = toFlag True + , ghcOptNumJobs = numJobs } + + -- Do not try to build anything if there are no input files. + -- This can happen if the cabal file ends up with only cSrcs + -- but no Haskell modules. + unless ((null inputFiles && null inputModules) + || gbuildIsRepl bm) $ + runGhcProg compileOpts { ghcOptNoLink = toFlag True + , ghcOptNumJobs = numJobs } + + -- build any C++ sources + unless (null cxxSrcs) $ do + info verbosity "Building C++ Sources..." + sequence_ + [ do let baseCxxOpts = Internal.componentCxxGhcOptions verbosity implInfo + lbi bnfo clbi tmpDir filename + vanillaCxxOpts = if isGhcDynamic + -- Dynamic GHC requires C++ sources to be built + -- with -fPIC for REPL to work. See #2207. + then baseCxxOpts { ghcOptFPic = toFlag True } + else baseCxxOpts + profCxxOpts = vanillaCxxOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True + } + sharedCxxOpts = vanillaCxxOpts `mappend` mempty { + ghcOptFPic = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly + } + opts | needProfiling = profCxxOpts + | needDynamic = sharedCxxOpts + | otherwise = vanillaCxxOpts + -- TODO: Placing all Haskell, C, & C++ objects in a single directory + -- Has the potential for file collisions. In general we would + -- consider this a user error. However, we should strive to + -- add a warning if this occurs. + odir = fromFlag (ghcOptObjDir opts) + createDirectoryIfMissingVerbose verbosity True odir + needsRecomp <- checkNeedsRecompilation filename opts + when needsRecomp $ + runGhcProg opts + | filename <- cxxSrcs ] + + -- build any C sources + unless (null cSrcs) $ do + info verbosity "Building C Sources..." + sequence_ + [ do let baseCcOpts = Internal.componentCcGhcOptions verbosity implInfo + lbi bnfo clbi tmpDir filename + vanillaCcOpts = if isGhcDynamic + -- Dynamic GHC requires C sources to be built + -- with -fPIC for REPL to work. See #2207. + then baseCcOpts { ghcOptFPic = toFlag True } + else baseCcOpts + profCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True + } + sharedCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptFPic = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly + } + opts | needProfiling = profCcOpts + | needDynamic = sharedCcOpts + | otherwise = vanillaCcOpts + odir = fromFlag (ghcOptObjDir opts) + createDirectoryIfMissingVerbose verbosity True odir + needsRecomp <- checkNeedsRecompilation filename opts + when needsRecomp $ + runGhcProg opts + | filename <- cSrcs ] + + -- TODO: problem here is we need the .c files built first, so we can load them + -- with ghci, but .c files can depend on .h files generated by ghc by ffi + -- exports. + case bm of + GReplExe _ _ -> runGhcProg replOpts + GReplFLib _ _ -> runGhcProg replOpts + GBuildExe _ -> do + let linkOpts = commonOpts + `mappend` linkerOpts + `mappend` mempty { + ghcOptLinkNoHsMain = toFlag (null inputFiles) + } + `mappend` (if withDynExe lbi then dynLinkerOpts else mempty) + + info verbosity "Linking..." + -- Work around old GHCs not relinking in this + -- situation, see #3294 + let target = targetDir targetName + when (compilerVersion comp < mkVersion [7,7]) $ do + e <- doesFileExist target + when e (removeFile target) + runGhcProg linkOpts { ghcOptOutputFile = toFlag target } + GBuildFLib flib -> do + let rtsInfo = extractRtsInfo lbi + rtsOptLinkLibs = [ + if needDynamic + then if threaded + then dynRtsThreadedLib (rtsDynamicInfo rtsInfo) + else dynRtsVanillaLib (rtsDynamicInfo rtsInfo) + else if threaded + then statRtsThreadedLib (rtsStaticInfo rtsInfo) + else statRtsVanillaLib (rtsStaticInfo rtsInfo) + ] + linkOpts = case foreignLibType flib of + ForeignLibNativeShared -> + commonOpts + `mappend` linkerOpts + `mappend` dynLinkerOpts + `mappend` mempty { + ghcOptLinkNoHsMain = toFlag True, + ghcOptShared = toFlag True, + ghcOptLinkLibs = rtsOptLinkLibs, + ghcOptLinkLibPath = toNubListR $ rtsLibPaths rtsInfo, + ghcOptFPic = toFlag True, + ghcOptLinkModDefFiles = toNubListR $ gbuildModDefFiles bm + } + -- See Note [RPATH] + `mappend` ifNeedsRPathWorkaround lbi mempty { + ghcOptLinkOptions = ["-Wl,--no-as-needed"] + , ghcOptLinkLibs = ["ffi"] + } + ForeignLibNativeStatic -> + -- this should be caught by buildFLib + -- (and if we do implement tihs, we probably don't even want to call + -- ghc here, but rather Ar.createArLibArchive or something) + cabalBug "static libraries not yet implemented" + ForeignLibTypeUnknown -> + cabalBug "unknown foreign lib type" + -- We build under a (potentially) different filename to set a + -- soname on supported platforms. See also the note for + -- @flibBuildName@. + info verbosity "Linking..." + let buildName = flibBuildName lbi flib + runGhcProg linkOpts { ghcOptOutputFile = toFlag (targetDir buildName) } + renameFile (targetDir buildName) (targetDir targetName) + +{- +Note [RPATH] +~~~~~~~~~~~~ + +Suppose that the dynamic library depends on `base`, but not (directly) on +`integer-gmp` (which, however, is a dependency of `base`). We will link the +library as + + gcc ... -lHSbase-4.7.0.2-ghc7.8.4 -lHSinteger-gmp-0.5.1.0-ghc7.8.4 ... + +However, on systems (like Ubuntu) where the linker gets called with `-as-needed` +by default, the linker will notice that `integer-gmp` isn't actually a direct +dependency and hence omit the link. + +Then when we attempt to link a C program against this dynamic library, the +_static_ linker will attempt to verify that all symbols can be resolved. The +dynamic library itself does not require any symbols from `integer-gmp`, but +`base` does. In order to verify that the symbols used by `base` can be +resolved, the static linker needs to be able to _find_ integer-gmp. + +Finding the `base` dependency is simple, because the dynamic elf header +(`readelf -d`) for the library that we have created looks something like + + (NEEDED) Shared library: [libHSbase-4.7.0.2-ghc7.8.4.so] + (RPATH) Library rpath: [/path/to/base-4.7.0.2:...] + +However, when it comes to resolving the dependency on `integer-gmp`, it needs +to look at the dynamic header for `base`. On modern ghc (7.8 and higher) this +looks something like + + (NEEDED) Shared library: [libHSinteger-gmp-0.5.1.0-ghc7.8.4.so] + (RPATH) Library rpath: [$ORIGIN/../integer-gmp-0.5.1.0:...] + +This specifies the location of `integer-gmp` _in terms of_ the location of base +(using the `$ORIGIN`) variable. But here's the crux: when the static linker +attempts to verify that all symbols can be resolved, [**IT DOES NOT RESOLVE +`$ORIGIN`**](http://stackoverflow.com/questions/6323603/ld-using-rpath-origin-inside-a-shared-library-recursive). +As a consequence, it will not be able to resolve the symbols and report the +missing symbols as errors, _even though the dynamic linker **would** be able to +resolve these symbols_. We can tell the static linker not to report these +errors by using `--unresolved-symbols=ignore-all` and all will be fine when we +run the program ([(indeed, this is what the gold linker +does)](https://sourceware.org/ml/binutils/2013-05/msg00038.html), but it makes +the resulting library more difficult to use. + +Instead what we can do is make sure that the generated dynamic library has +explicit top-level dependencies on these libraries. This means that the static +linker knows where to find them, and when we have transitive dependencies on +the same libraries the linker will only load them once, so we avoid needing to +look at the `RPATH` of our dependencies. We can do this by passing +`--no-as-needed` to the linker, so that it doesn't omit any libraries. + +Note that on older ghc (7.6 and before) the Haskell libraries don't have an +RPATH set at all, which makes it even more important that we make these +top-level dependencies. + +Finally, we have to explicitly link against `libffi` for the same reason. For +newer ghc this _happens_ to be unnecessary on many systems because `libffi` is +a library which is not specific to GHC, and when the static linker verifies +that all symbols can be resolved it will find the `libffi` that is globally +installed (completely independent from ghc). Of course, this may well be the +_wrong_ version of `libffi`, but it's quite possible that symbol resolution +happens to work. This is of course the wrong approach, which is why we link +explicitly against `libffi` so that we will find the _right_ version of +`libffi`. +-} + +-- | Do we need the RPATH workaround? +-- +-- See Note [RPATH]. +ifNeedsRPathWorkaround :: Monoid a => LocalBuildInfo -> a -> a +ifNeedsRPathWorkaround lbi a = + case hostPlatform lbi of + Platform _ Linux -> a + _otherwise -> mempty + +data DynamicRtsInfo = DynamicRtsInfo { + dynRtsVanillaLib :: FilePath + , dynRtsThreadedLib :: FilePath + , dynRtsDebugLib :: FilePath + , dynRtsEventlogLib :: FilePath + , dynRtsThreadedDebugLib :: FilePath + , dynRtsThreadedEventlogLib :: FilePath + } + +data StaticRtsInfo = StaticRtsInfo { + statRtsVanillaLib :: FilePath + , statRtsThreadedLib :: FilePath + , statRtsDebugLib :: FilePath + , statRtsEventlogLib :: FilePath + , statRtsThreadedDebugLib :: FilePath + , statRtsThreadedEventlogLib :: FilePath + , statRtsProfilingLib :: FilePath + , statRtsThreadedProfilingLib :: FilePath + } + +data RtsInfo = RtsInfo { + rtsDynamicInfo :: DynamicRtsInfo + , rtsStaticInfo :: StaticRtsInfo + , rtsLibPaths :: [FilePath] + } + +-- | Extract (and compute) information about the RTS library +-- +-- TODO: This hardcodes the name as @HSrts-ghc@. I don't know if we can +-- find this information somewhere. We can lookup the 'hsLibraries' field of +-- 'InstalledPackageInfo' but it will tell us @["HSrts", "Cffi"]@, which +-- doesn't really help. +extractRtsInfo :: LocalBuildInfo -> RtsInfo +extractRtsInfo lbi = + case PackageIndex.lookupPackageName (installedPkgs lbi) (mkPackageName "rts") of + [(_, [rts])] -> aux rts + _otherwise -> error "No (or multiple) ghc rts package is registered" + where + aux :: InstalledPackageInfo -> RtsInfo + aux rts = RtsInfo { + rtsDynamicInfo = DynamicRtsInfo { + dynRtsVanillaLib = withGhcVersion "HSrts" + , dynRtsThreadedLib = withGhcVersion "HSrts_thr" + , dynRtsDebugLib = withGhcVersion "HSrts_debug" + , dynRtsEventlogLib = withGhcVersion "HSrts_l" + , dynRtsThreadedDebugLib = withGhcVersion "HSrts_thr_debug" + , dynRtsThreadedEventlogLib = withGhcVersion "HSrts_thr_l" + } + , rtsStaticInfo = StaticRtsInfo { + statRtsVanillaLib = "HSrts" + , statRtsThreadedLib = "HSrts_thr" + , statRtsDebugLib = "HSrts_debug" + , statRtsEventlogLib = "HSrts_l" + , statRtsThreadedDebugLib = "HSrts_thr_debug" + , statRtsThreadedEventlogLib = "HSrts_thr_l" + , statRtsProfilingLib = "HSrts_p" + , statRtsThreadedProfilingLib = "HSrts_thr_p" + } + , rtsLibPaths = InstalledPackageInfo.libraryDirs rts + } + withGhcVersion = (++ ("-ghc" ++ display (compilerVersion (compiler lbi)))) + +-- | Returns True if the modification date of the given source file is newer than +-- the object file we last compiled for it, or if no object file exists yet. +checkNeedsRecompilation :: FilePath -> GhcOptions -> NoCallStackIO Bool +checkNeedsRecompilation filename opts = filename `moreRecentFile` oname + where oname = getObjectFileName filename opts + +-- | Finds the object file name of the given source file +getObjectFileName :: FilePath -> GhcOptions -> FilePath +getObjectFileName filename opts = oname + where odir = fromFlag (ghcOptObjDir opts) + oext = fromFlagOrDefault "o" (ghcOptObjSuffix opts) + oname = odir replaceExtension filename oext + +-- | Calculate the RPATHs for the component we are building. +-- +-- Calculates relative RPATHs when 'relocatable' is set. +getRPaths :: LocalBuildInfo + -> ComponentLocalBuildInfo -- ^ Component we are building + -> NoCallStackIO (NubListR FilePath) +getRPaths lbi clbi | supportRPaths hostOS = do + libraryPaths <- depLibraryPaths False (relocatable lbi) lbi clbi + let hostPref = case hostOS of + OSX -> "@loader_path" + _ -> "$ORIGIN" + relPath p = if isRelative p then hostPref p else p + rpaths = toNubListR (map relPath libraryPaths) + return rpaths + where + (Platform _ hostOS) = hostPlatform lbi + compid = compilerId . compiler $ lbi + + -- The list of RPath-supported operating systems below reflects the + -- platforms on which Cabal's RPATH handling is tested. It does _NOT_ + -- reflect whether the OS supports RPATH. + + -- E.g. when this comment was written, the *BSD operating systems were + -- untested with regards to Cabal RPATH handling, and were hence set to + -- 'False', while those operating systems themselves do support RPATH. + supportRPaths Linux   = True + supportRPaths Windows = False + supportRPaths OSX   = True + supportRPaths FreeBSD   = + case compid of + CompilerId GHC ver | ver >= mkVersion [7,10,2] -> True + _ -> False + supportRPaths OpenBSD   = False + supportRPaths NetBSD   = False + supportRPaths DragonFly = False + supportRPaths Solaris = False + supportRPaths AIX = False + supportRPaths HPUX = False + supportRPaths IRIX = False + supportRPaths HaLVM = False + supportRPaths IOS = False + supportRPaths Android = False + supportRPaths Ghcjs = False + supportRPaths Hurd = False + supportRPaths (OtherOS _) = False + -- Do _not_ add a default case so that we get a warning here when a new OS + -- is added. + +getRPaths _ _ = return mempty + +-- | Remove the "-threaded" flag when building a foreign library, as it has no +-- effect when used with "-shared". Returns the updated 'BuildInfo', along +-- with whether or not the flag was present, so we can use it to link against +-- the appropriate RTS on our own. +popThreadedFlag :: BuildInfo -> (BuildInfo, Bool) +popThreadedFlag bi = + ( bi { options = filterHcOptions (/= "-threaded") (options bi) } + , hasThreaded (options bi)) + + where + filterHcOptions :: (String -> Bool) + -> [(CompilerFlavor, [String])] + -> [(CompilerFlavor, [String])] + filterHcOptions p hcoptss = + [ (hc, if hc == GHC then filter p opts else opts) + | (hc, opts) <- hcoptss ] + + hasThreaded :: [(CompilerFlavor, [String])] -> Bool + hasThreaded hcoptss = + or [ if hc == GHC then elem "-threaded" opts else False + | (hc, opts) <- hcoptss ] + +-- | Extracts a String representing a hash of the ABI of a built +-- library. It can fail if the library has not yet been built. +-- +libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO String +libAbiHash verbosity _pkg_descr lbi lib clbi = do + let + libBi = libBuildInfo lib + comp = compiler lbi + platform = hostPlatform lbi + vanillaArgs0 = + (componentGhcOptions verbosity lbi libBi clbi (componentBuildDir lbi clbi)) + `mappend` mempty { + ghcOptMode = toFlag GhcModeAbiHash, + ghcOptInputModules = toNubListR $ exposedModules lib + } + vanillaArgs = + -- Package DBs unnecessary, and break ghc-cabal. See #3633 + -- BUT, put at least the global database so that 7.4 doesn't + -- break. + vanillaArgs0 { ghcOptPackageDBs = [GlobalPackageDB] + , ghcOptPackages = mempty } + sharedArgs = vanillaArgs `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = hcSharedOptions GHC libBi + } + profArgs = vanillaArgs `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptProfilingAuto = Internal.profDetailLevelFlag True + (withProfLibDetail lbi), + ghcOptHiSuffix = toFlag "p_hi", + ghcOptObjSuffix = toFlag "p_o", + ghcOptExtra = hcProfOptions GHC libBi + } + ghcArgs + | withVanillaLib lbi = vanillaArgs + | withSharedLib lbi = sharedArgs + | withProfLib lbi = profArgs + | otherwise = error "libAbiHash: Can't find an enabled library way" + + (ghcProg, _) <- requireProgram verbosity ghcProgram (withPrograms lbi) + hash <- getProgramInvocationOutput verbosity + (ghcInvocation ghcProg comp platform ghcArgs) + return (takeWhile (not . isSpace) hash) + +componentGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions verbosity lbi = + Internal.componentGhcOptions verbosity implInfo lbi + where + comp = compiler lbi + implInfo = getImplInfo comp + +componentCcGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> FilePath + -> GhcOptions +componentCcGhcOptions verbosity lbi = + Internal.componentCcGhcOptions verbosity implInfo lbi + where + comp = compiler lbi + implInfo = getImplInfo comp + +-- ----------------------------------------------------------------------------- +-- Installing + +-- |Install executables for GHC. +installExe :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^Where to copy the files to + -> FilePath -- ^Build location + -> (FilePath, FilePath) -- ^Executable (prefix,suffix) + -> PackageDescription + -> Executable + -> IO () +installExe verbosity lbi binDir buildPref + (progprefix, progsuffix) _pkg exe = do + createDirectoryIfMissingVerbose verbosity True binDir + let exeName' = unUnqualComponentName $ exeName exe + exeFileName = exeTargetName (hostPlatform lbi) exe + fixedExeBaseName = progprefix ++ exeName' ++ progsuffix + installBinary dest = do + installExecutableFile verbosity + (buildPref exeName' exeFileName) + (dest <.> exeExtension (hostPlatform lbi)) + when (stripExes lbi) $ + Strip.stripExe verbosity (hostPlatform lbi) (withPrograms lbi) + (dest <.> exeExtension (hostPlatform lbi)) + installBinary (binDir fixedExeBaseName) + +-- |Install foreign library for GHC. +installFLib :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^install location + -> FilePath -- ^Build location + -> PackageDescription + -> ForeignLib + -> IO () +installFLib verbosity lbi targetDir builtDir _pkg flib = + install (foreignLibIsShared flib) + builtDir + targetDir + (flibTargetName lbi flib) + where + install isShared srcDir dstDir name = do + let src = srcDir name + dst = dstDir name + createDirectoryIfMissingVerbose verbosity True targetDir + -- TODO: Should we strip? (stripLibs lbi) + if isShared + then installExecutableFile verbosity src dst + else installOrdinaryFile verbosity src dst + -- Now install appropriate symlinks if library is versioned + let (Platform _ os) = hostPlatform lbi + when (not (null (foreignLibVersion flib os))) $ do + when (os /= Linux) $ die' verbosity + -- It should be impossible to get here. + "Can't install foreign-library symlink on non-Linux OS" +#ifndef mingw32_HOST_OS + -- 'createSymbolicLink file1 file2' creates a symbolic link + -- named 'file2' which points to the file 'file1'. + -- Note that we do want a symlink to 'name' rather than + -- 'dst', because the symlink will be relative to the + -- directory it's created in. + -- Finally, we first create the symlinks in a temporary + -- directory and then rename to simulate 'ln --force'. + withTempDirectory verbosity dstDir nm $ \tmpDir -> do + let link1 = flibBuildName lbi flib + link2 = "lib" ++ nm <.> "so" + createSymbolicLink name (tmpDir link1) + renameFile (tmpDir link1) (dstDir link1) + createSymbolicLink name (tmpDir link2) + renameFile (tmpDir link2) (dstDir link2) + where + nm :: String + nm = unUnqualComponentName $ foreignLibName flib +#endif /* mingw32_HOST_OS */ + + +-- |Install for ghc, .hi, .a and, if --with-ghci given, .o +installLib :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^install location + -> FilePath -- ^install location for dynamic libraries + -> FilePath -- ^Build location + -> PackageDescription + -> Library + -> ComponentLocalBuildInfo + -> IO () +installLib verbosity lbi targetDir dynlibTargetDir _builtDir _pkg lib clbi = do + -- copy .hi files over: + whenVanilla $ copyModuleFiles "hi" + whenProf $ copyModuleFiles "p_hi" + whenShared $ copyModuleFiles "dyn_hi" + + -- copy the built library files over: + whenHasCode $ do + whenVanilla $ do + sequence_ [ installOrdinary builtDir targetDir (mkGenericStaticLibName (l ++ f)) + | l <- getHSLibraryName (componentUnitId clbi):(extraBundledLibs (libBuildInfo lib)) + , f <- "":extraLibFlavours (libBuildInfo lib) + ] + whenGHCi $ installOrdinary builtDir targetDir ghciLibName + whenProf $ do + installOrdinary builtDir targetDir profileLibName + whenGHCi $ installOrdinary builtDir targetDir ghciProfLibName + whenShared $ installShared builtDir dynlibTargetDir sharedLibName + + where + builtDir = componentBuildDir lbi clbi + + install isShared srcDir dstDir name = do + let src = srcDir name + dst = dstDir name + + createDirectoryIfMissingVerbose verbosity True dstDir + + if isShared + then installExecutableFile verbosity src dst + else installOrdinaryFile verbosity src dst + + when (stripLibs lbi) $ Strip.stripLib verbosity + (hostPlatform lbi) (withPrograms lbi) dst + + installOrdinary = install False + installShared = install True + + copyModuleFiles ext = + findModuleFiles [builtDir] [ext] (allLibModules lib clbi) + >>= installOrdinaryFiles verbosity targetDir + + compiler_id = compilerId (compiler lbi) + uid = componentUnitId clbi + profileLibName = mkProfLibName uid + ghciLibName = Internal.mkGHCiLibName uid + ghciProfLibName = Internal.mkGHCiProfLibName uid + sharedLibName = (mkSharedLibName (hostPlatform lbi) compiler_id) uid + + hasLib = not $ null (allLibModules lib clbi) + && null (cSources (libBuildInfo lib)) + && null (cxxSources (libBuildInfo lib)) + has_code = not (componentIsIndefinite clbi) + whenHasCode = when has_code + whenVanilla = when (hasLib && withVanillaLib lbi) + whenProf = when (hasLib && withProfLib lbi && has_code) + whenGHCi = when (hasLib && withGHCiLib lbi && has_code) + whenShared = when (hasLib && withSharedLib lbi && has_code) + +-- ----------------------------------------------------------------------------- +-- Registering + +hcPkgInfo :: ProgramDb -> HcPkg.HcPkgInfo +hcPkgInfo progdb = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcPkgProg + , HcPkg.noPkgDbStack = v < [6,9] + , HcPkg.noVerboseFlag = v < [6,11] + , HcPkg.flagPackageConf = v < [7,5] + , HcPkg.supportsDirDbs = v >= [6,8] + , HcPkg.requiresDirDbs = v >= [7,10] + , HcPkg.nativeMultiInstance = v >= [7,10] + , HcPkg.recacheMultiInstance = v >= [6,12] + , HcPkg.suppressFilesCheck = v >= [6,6] + } + where + v = versionNumbers ver + Just ghcPkgProg = lookupProgram ghcPkgProgram progdb + Just ver = programVersion ghcPkgProg + +registerPackage + :: Verbosity + -> ProgramDb + -> PackageDBStack + -> InstalledPackageInfo + -> HcPkg.RegisterOptions + -> IO () +registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions = + HcPkg.register (hcPkgInfo progdb) verbosity packageDbs + installedPkgInfo registerOptions + +pkgRoot :: Verbosity -> LocalBuildInfo -> PackageDB -> IO FilePath +pkgRoot verbosity lbi = pkgRoot' + where + pkgRoot' GlobalPackageDB = + let Just ghcProg = lookupProgram ghcProgram (withPrograms lbi) + in fmap takeDirectory (getGlobalPackageDB verbosity ghcProg) + pkgRoot' UserPackageDB = do + appDir <- getAppUserDataDirectory "ghc" + let ver = compilerVersion (compiler lbi) + subdir = System.Info.arch ++ '-':System.Info.os + ++ '-':display ver + rootDir = appDir subdir + -- We must create the root directory for the user package database if it + -- does not yet exists. Otherwise '${pkgroot}' will resolve to a + -- directory at the time of 'ghc-pkg register', and registration will + -- fail. + createDirectoryIfMissing True rootDir + return rootDir + pkgRoot' (SpecificPackageDB fp) = return (takeDirectory fp) + +-- ----------------------------------------------------------------------------- +-- Utils + +isDynamic :: Compiler -> Bool +isDynamic = Internal.ghcLookupProperty "GHC Dynamic" + +supportsDynamicToo :: Compiler -> Bool +supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" + +withExt :: FilePath -> String -> FilePath +withExt fp ext = fp <.> if takeExtension fp /= ('.':ext) then ext else "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHCJS.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHCJS.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/GHCJS.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/GHCJS.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,895 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +module Distribution.Simple.GHCJS ( + configure, getInstalledPackages, getPackageDBContents, + buildLib, buildExe, + replLib, replExe, + startInterpreter, + installLib, installExe, + libAbiHash, + hcPkgInfo, + registerPackage, + componentGhcOptions, + getLibDir, + isDynamic, + getGlobalPackageDB, + runCmd + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.UnqualComponentName +import Distribution.Simple.GHC.ImplInfo +import qualified Distribution.Simple.GHC.Internal as Internal +import Distribution.PackageDescription as PD +import Distribution.InstalledPackageInfo +import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.LocalBuildInfo +import qualified Distribution.Simple.Hpc as Hpc +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Simple.Program +import qualified Distribution.Simple.Program.HcPkg as HcPkg +import qualified Distribution.Simple.Program.Ar as Ar +import qualified Distribution.Simple.Program.Ld as Ld +import qualified Distribution.Simple.Program.Strip as Strip +import Distribution.Simple.Program.GHC +import Distribution.Simple.Setup hiding ( Flag ) +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Compiler hiding ( Flag ) +import Distribution.Version +import Distribution.System +import Distribution.Verbosity +import Distribution.Utils.NubList +import Distribution.Text +import Distribution.Types.UnitId + +import qualified Data.Map as Map +import System.Directory ( doesFileExist ) +import System.FilePath ( (), (<.>), takeExtension + , takeDirectory, replaceExtension ) + +configure :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramDb + -> IO (Compiler, Maybe Platform, ProgramDb) +configure verbosity hcPath hcPkgPath progdb0 = do + (ghcjsProg, ghcjsVersion, progdb1) <- + requireProgramVersion verbosity ghcjsProgram + (orLaterVersion (mkVersion [0,1])) + (userMaybeSpecifyPath "ghcjs" hcPath progdb0) + Just ghcjsGhcVersion <- findGhcjsGhcVersion verbosity (programPath ghcjsProg) + let implInfo = ghcjsVersionImplInfo ghcjsVersion ghcjsGhcVersion + + -- This is slightly tricky, we have to configure ghcjs first, then we use the + -- location of ghcjs to help find ghcjs-pkg in the case that the user did not + -- specify the location of ghc-pkg directly: + (ghcjsPkgProg, ghcjsPkgVersion, progdb2) <- + requireProgramVersion verbosity ghcjsPkgProgram { + programFindLocation = guessGhcjsPkgFromGhcjsPath ghcjsProg + } + anyVersion (userMaybeSpecifyPath "ghcjs-pkg" hcPkgPath progdb1) + + Just ghcjsPkgGhcjsVersion <- findGhcjsPkgGhcjsVersion + verbosity (programPath ghcjsPkgProg) + + when (ghcjsVersion /= ghcjsPkgGhcjsVersion) $ die' verbosity $ + "Version mismatch between ghcjs and ghcjs-pkg: " + ++ programPath ghcjsProg ++ " is version " ++ display ghcjsVersion ++ " " + ++ programPath ghcjsPkgProg ++ " is version " ++ display ghcjsPkgGhcjsVersion + + when (ghcjsGhcVersion /= ghcjsPkgVersion) $ die' verbosity $ + "Version mismatch between ghcjs and ghcjs-pkg: " + ++ programPath ghcjsProg + ++ " was built with GHC version " ++ display ghcjsGhcVersion ++ " " + ++ programPath ghcjsPkgProg + ++ " was built with GHC version " ++ display ghcjsPkgVersion + + -- be sure to use our versions of hsc2hs, c2hs, haddock and ghc + let hsc2hsProgram' = + hsc2hsProgram { programFindLocation = + guessHsc2hsFromGhcjsPath ghcjsProg } + c2hsProgram' = + c2hsProgram { programFindLocation = + guessC2hsFromGhcjsPath ghcjsProg } + + haddockProgram' = + haddockProgram { programFindLocation = + guessHaddockFromGhcjsPath ghcjsProg } + progdb3 = addKnownPrograms [ hsc2hsProgram', c2hsProgram', haddockProgram' ] progdb2 + + languages <- Internal.getLanguages verbosity implInfo ghcjsProg + extensions <- Internal.getExtensions verbosity implInfo ghcjsProg + + ghcInfo <- Internal.getGhcInfo verbosity implInfo ghcjsProg + let ghcInfoMap = Map.fromList ghcInfo + + let comp = Compiler { + compilerId = CompilerId GHCJS ghcjsVersion, + compilerAbiTag = AbiTag $ + "ghc" ++ intercalate "_" (map show . versionNumbers $ ghcjsGhcVersion), + compilerCompat = [CompilerId GHC ghcjsGhcVersion], + compilerLanguages = languages, + compilerExtensions = extensions, + compilerProperties = ghcInfoMap + } + compPlatform = Internal.targetPlatform ghcInfo + -- configure gcc and ld + let progdb4 = if ghcjsNativeToo comp + then Internal.configureToolchain implInfo + ghcjsProg ghcInfoMap progdb3 + else progdb3 + return (comp, compPlatform, progdb4) + +ghcjsNativeToo :: Compiler -> Bool +ghcjsNativeToo = Internal.ghcLookupProperty "Native Too" + +guessGhcjsPkgFromGhcjsPath :: ConfiguredProgram -> Verbosity + -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) +guessGhcjsPkgFromGhcjsPath = guessToolFromGhcjsPath ghcjsPkgProgram + +guessHsc2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity + -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) +guessHsc2hsFromGhcjsPath = guessToolFromGhcjsPath hsc2hsProgram + +guessC2hsFromGhcjsPath :: ConfiguredProgram -> Verbosity + -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) +guessC2hsFromGhcjsPath = guessToolFromGhcjsPath c2hsProgram + +guessHaddockFromGhcjsPath :: ConfiguredProgram -> Verbosity + -> ProgramSearchPath -> IO (Maybe (FilePath, [FilePath])) +guessHaddockFromGhcjsPath = guessToolFromGhcjsPath haddockProgram + +guessToolFromGhcjsPath :: Program -> ConfiguredProgram + -> Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])) +guessToolFromGhcjsPath tool ghcjsProg verbosity searchpath + = do let toolname = programName tool + path = programPath ghcjsProg + dir = takeDirectory path + versionSuffix = takeVersionSuffix (dropExeExtension path) + guessNormal = dir toolname <.> exeExtension buildPlatform + guessGhcjsVersioned = dir (toolname ++ "-ghcjs" ++ versionSuffix) + <.> exeExtension buildPlatform + guessGhcjs = dir (toolname ++ "-ghcjs") + <.> exeExtension buildPlatform + guessVersioned = dir (toolname ++ versionSuffix) <.> exeExtension buildPlatform + guesses | null versionSuffix = [guessGhcjs, guessNormal] + | otherwise = [guessGhcjsVersioned, + guessGhcjs, + guessVersioned, + guessNormal] + info verbosity $ "looking for tool " ++ toolname + ++ " near compiler in " ++ dir + exists <- traverse doesFileExist guesses + case [ file | (file, True) <- zip guesses exists ] of + -- If we can't find it near ghc, fall back to the usual + -- method. + [] -> programFindLocation tool verbosity searchpath + (fp:_) -> do info verbosity $ "found " ++ toolname ++ " in " ++ fp + let lookedAt = map fst + . takeWhile (\(_file, exist) -> not exist) + $ zip guesses exists + return (Just (fp, lookedAt)) + + where takeVersionSuffix :: FilePath -> String + takeVersionSuffix = reverse . takeWhile (`elem ` "0123456789.-") . + reverse + +-- | Given a single package DB, return all installed packages. +getPackageDBContents :: Verbosity -> PackageDB -> ProgramDb + -> IO InstalledPackageIndex +getPackageDBContents verbosity packagedb progdb = do + pkgss <- getInstalledPackages' verbosity [packagedb] progdb + toPackageIndex verbosity pkgss progdb + +-- | Given a package DB stack, return all installed packages. +getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramDb + -> IO InstalledPackageIndex +getInstalledPackages verbosity packagedbs progdb = do + checkPackageDbEnvVar verbosity + checkPackageDbStack verbosity packagedbs + pkgss <- getInstalledPackages' verbosity packagedbs progdb + index <- toPackageIndex verbosity pkgss progdb + return $! index + +toPackageIndex :: Verbosity + -> [(PackageDB, [InstalledPackageInfo])] + -> ProgramDb + -> IO InstalledPackageIndex +toPackageIndex verbosity pkgss progdb = do + -- On Windows, various fields have $topdir/foo rather than full + -- paths. We need to substitute the right value in so that when + -- we, for example, call gcc, we have proper paths to give it. + topDir <- getLibDir' verbosity ghcjsProg + let indices = [ PackageIndex.fromList (map (Internal.substTopDir topDir) pkgs) + | (_, pkgs) <- pkgss ] + return $! (mconcat indices) + + where + Just ghcjsProg = lookupProgram ghcjsProgram progdb + +checkPackageDbEnvVar :: Verbosity -> IO () +checkPackageDbEnvVar verbosity = + Internal.checkPackageDbEnvVar verbosity "GHCJS" "GHCJS_PACKAGE_PATH" + +checkPackageDbStack :: Verbosity -> PackageDBStack -> IO () +checkPackageDbStack _ (GlobalPackageDB:rest) + | GlobalPackageDB `notElem` rest = return () +checkPackageDbStack verbosity rest + | GlobalPackageDB `notElem` rest = + die' verbosity $ "With current ghc versions the global package db is always used " + ++ "and must be listed first. This ghc limitation may be lifted in " + ++ "future, see http://hackage.haskell.org/trac/ghc/ticket/5977" +checkPackageDbStack verbosity _ = + die' verbosity $ "If the global package db is specified, it must be " + ++ "specified first and cannot be specified multiple times" + +getInstalledPackages' :: Verbosity -> [PackageDB] -> ProgramDb + -> IO [(PackageDB, [InstalledPackageInfo])] +getInstalledPackages' verbosity packagedbs progdb = + sequenceA + [ do pkgs <- HcPkg.dump (hcPkgInfo progdb) verbosity packagedb + return (packagedb, pkgs) + | packagedb <- packagedbs ] + +getLibDir :: Verbosity -> LocalBuildInfo -> IO FilePath +getLibDir verbosity lbi = + (reverse . dropWhile isSpace . reverse) `fmap` + getDbProgramOutput verbosity ghcjsProgram + (withPrograms lbi) ["--print-libdir"] + +getLibDir' :: Verbosity -> ConfiguredProgram -> IO FilePath +getLibDir' verbosity ghcjsProg = + (reverse . dropWhile isSpace . reverse) `fmap` + getProgramOutput verbosity ghcjsProg ["--print-libdir"] + +-- | Return the 'FilePath' to the global GHC package database. +getGlobalPackageDB :: Verbosity -> ConfiguredProgram -> IO FilePath +getGlobalPackageDB verbosity ghcjsProg = + (reverse . dropWhile isSpace . reverse) `fmap` + getProgramOutput verbosity ghcjsProg ["--print-global-package-db"] + +toJSLibName :: String -> String +toJSLibName lib + | takeExtension lib `elem` [".dll",".dylib",".so"] + = replaceExtension lib "js_so" + | takeExtension lib == ".a" = replaceExtension lib "js_a" + | otherwise = lib <.> "js_a" + +buildLib :: Verbosity -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Library -> ComponentLocalBuildInfo + -> IO () +buildLib = buildOrReplLib Nothing + +replLib :: [String] -> Verbosity + -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Library + -> ComponentLocalBuildInfo -> IO () +replLib = buildOrReplLib . Just + +buildOrReplLib :: Maybe [String] -> Verbosity + -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Library + -> ComponentLocalBuildInfo -> IO () +buildOrReplLib mReplFlags verbosity numJobs pkg_descr lbi lib clbi = do + let uid = componentUnitId clbi + libTargetDir = buildDir lbi + whenVanillaLib forceVanilla = + when (not forRepl && (forceVanilla || withVanillaLib lbi)) + whenProfLib = when (not forRepl && withProfLib lbi) + whenSharedLib forceShared = + when (not forRepl && (forceShared || withSharedLib lbi)) + whenGHCiLib = when (not forRepl && withGHCiLib lbi) + forRepl = maybe False (const True) mReplFlags + ifReplLib = when forRepl + replFlags = fromMaybe mempty mReplFlags + comp = compiler lbi + platform = hostPlatform lbi + implInfo = getImplInfo comp + nativeToo = ghcjsNativeToo comp + + (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) + let runGhcjsProg = runGHC verbosity ghcjsProg comp platform + libBi = libBuildInfo lib + isGhcjsDynamic = isDynamic comp + dynamicTooSupported = supportsDynamicToo comp + doingTH = usesTemplateHaskellOrQQ libBi + forceVanillaLib = doingTH && not isGhcjsDynamic + forceSharedLib = doingTH && isGhcjsDynamic + -- TH always needs default libs, even when building for profiling + + -- Determine if program coverage should be enabled and if so, what + -- '-hpcdir' should be. + let isCoverageEnabled = libCoverage lbi + pkg_name = display $ PD.package pkg_descr + distPref = fromFlag $ configDistPref $ configFlags lbi + hpcdir way + | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way pkg_name + | otherwise = mempty + + createDirectoryIfMissingVerbose verbosity True libTargetDir + -- TODO: do we need to put hs-boot files into place for mutually recursive + -- modules? + let cObjs = map (`replaceExtension` objExtension) (cSources libBi) + jsSrcs = jsSources libBi + baseOpts = componentGhcOptions verbosity lbi libBi clbi libTargetDir + linkJsLibOpts = mempty { + ghcOptExtra = + [ "-link-js-lib" , getHSLibraryName uid + , "-js-lib-outputdir", libTargetDir ] ++ + jsSrcs + } + vanillaOptsNoJsLib = baseOpts `mappend` mempty { + ghcOptMode = toFlag GhcModeMake, + ghcOptNumJobs = numJobs, + ghcOptInputModules = toNubListR $ allLibModules lib clbi, + ghcOptHPCDir = hpcdir Hpc.Vanilla + } + vanillaOpts = vanillaOptsNoJsLib `mappend` linkJsLibOpts + + profOpts = adjustExts "p_hi" "p_o" vanillaOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptExtra = ghcjsProfOptions libBi, + ghcOptHPCDir = hpcdir Hpc.Prof + } + sharedOpts = adjustExts "dyn_hi" "dyn_o" vanillaOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptExtra = ghcjsSharedOptions libBi, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + linkerOpts = mempty { + ghcOptLinkOptions = PD.ldOptions libBi, + ghcOptLinkLibs = extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi, + ghcOptLinkFrameworks = toNubListR $ PD.frameworks libBi, + ghcOptInputFiles = + toNubListR $ [libTargetDir x | x <- cObjs] ++ jsSrcs + } + replOpts = vanillaOptsNoJsLib { + ghcOptExtra = Internal.filterGhciFlags + (ghcOptExtra vanillaOpts) + <> replFlags, + ghcOptNumJobs = mempty + } + `mappend` linkerOpts + `mappend` mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptOptimisation = toFlag GhcNoOptimisation + } + + vanillaSharedOpts = vanillaOpts `mappend` + mempty { + ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, + ghcOptDynHiSuffix = toFlag "dyn_hi", + ghcOptDynObjSuffix = toFlag "dyn_o", + ghcOptHPCDir = hpcdir Hpc.Dyn + } + + unless (forRepl || (null (allLibModules lib clbi) && null jsSrcs && null cObjs)) $ + do let vanilla = whenVanillaLib forceVanillaLib (runGhcjsProg vanillaOpts) + shared = whenSharedLib forceSharedLib (runGhcjsProg sharedOpts) + useDynToo = dynamicTooSupported && + (forceVanillaLib || withVanillaLib lbi) && + (forceSharedLib || withSharedLib lbi) && + null (ghcjsSharedOptions libBi) + if useDynToo + then do + runGhcjsProg vanillaSharedOpts + case (hpcdir Hpc.Dyn, hpcdir Hpc.Vanilla) of + (Cabal.Flag dynDir, Cabal.Flag vanillaDir) -> do + -- When the vanilla and shared library builds are done + -- in one pass, only one set of HPC module interfaces + -- are generated. This set should suffice for both + -- static and dynamically linked executables. We copy + -- the modules interfaces so they are available under + -- both ways. + copyDirectoryRecursive verbosity dynDir vanillaDir + _ -> return () + else if isGhcjsDynamic + then do shared; vanilla + else do vanilla; shared + whenProfLib (runGhcjsProg profOpts) + + -- build any C sources + unless (null (cSources libBi) || not nativeToo) $ do + info verbosity "Building C Sources..." + sequence_ + [ do let vanillaCcOpts = + (Internal.componentCcGhcOptions verbosity implInfo + lbi libBi clbi libTargetDir filename) + profCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptObjSuffix = toFlag "p_o" + } + sharedCcOpts = vanillaCcOpts `mappend` mempty { + ghcOptFPic = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptObjSuffix = toFlag "dyn_o" + } + odir = fromFlag (ghcOptObjDir vanillaCcOpts) + createDirectoryIfMissingVerbose verbosity True odir + runGhcjsProg vanillaCcOpts + whenSharedLib forceSharedLib (runGhcjsProg sharedCcOpts) + whenProfLib (runGhcjsProg profCcOpts) + | filename <- cSources libBi] + + -- TODO: problem here is we need the .c files built first, so we can load them + -- with ghci, but .c files can depend on .h files generated by ghc by ffi + -- exports. + unless (null (allLibModules lib clbi)) $ + ifReplLib (runGhcjsProg replOpts) + + -- link: + when (nativeToo && not forRepl) $ do + info verbosity "Linking..." + let cProfObjs = map (`replaceExtension` ("p_" ++ objExtension)) + (cSources libBi) + cSharedObjs = map (`replaceExtension` ("dyn_" ++ objExtension)) + (cSources libBi) + compiler_id = compilerId (compiler lbi) + vanillaLibFilePath = libTargetDir mkLibName uid + profileLibFilePath = libTargetDir mkProfLibName uid + sharedLibFilePath = libTargetDir mkSharedLibName (hostPlatform lbi) compiler_id uid + ghciLibFilePath = libTargetDir Internal.mkGHCiLibName uid + ghciProfLibFilePath = libTargetDir Internal.mkGHCiProfLibName uid + + hObjs <- Internal.getHaskellObjects implInfo lib lbi clbi + libTargetDir objExtension True + hProfObjs <- + if (withProfLib lbi) + then Internal.getHaskellObjects implInfo lib lbi clbi + libTargetDir ("p_" ++ objExtension) True + else return [] + hSharedObjs <- + if (withSharedLib lbi) + then Internal.getHaskellObjects implInfo lib lbi clbi + libTargetDir ("dyn_" ++ objExtension) False + else return [] + + unless (null hObjs && null cObjs) $ do + + let staticObjectFiles = + hObjs + ++ map (libTargetDir ) cObjs + profObjectFiles = + hProfObjs + ++ map (libTargetDir ) cProfObjs + dynamicObjectFiles = + hSharedObjs + ++ map (libTargetDir ) cSharedObjs + -- After the relocation lib is created we invoke ghc -shared + -- with the dependencies spelled out as -package arguments + -- and ghc invokes the linker with the proper library paths + ghcSharedLinkArgs = + mempty { + ghcOptShared = toFlag True, + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptInputFiles = toNubListR dynamicObjectFiles, + ghcOptOutputFile = toFlag sharedLibFilePath, + ghcOptExtra = ghcjsSharedOptions libBi, + ghcOptNoAutoLinkPackages = toFlag True, + ghcOptPackageDBs = withPackageDB lbi, + ghcOptPackages = toNubListR $ + Internal.mkGhcOptPackages clbi, + ghcOptLinkLibs = extraLibs libBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs libBi + } + + whenVanillaLib False $ do + Ar.createArLibArchive verbosity lbi vanillaLibFilePath staticObjectFiles + whenGHCiLib $ do + (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) + Ld.combineObjectFiles verbosity lbi ldProg + ghciLibFilePath staticObjectFiles + + whenProfLib $ do + Ar.createArLibArchive verbosity lbi profileLibFilePath profObjectFiles + whenGHCiLib $ do + (ldProg, _) <- requireProgram verbosity ldProgram (withPrograms lbi) + Ld.combineObjectFiles verbosity lbi ldProg + ghciProfLibFilePath profObjectFiles + + whenSharedLib False $ + runGhcjsProg ghcSharedLinkArgs + +-- | Start a REPL without loading any source files. +startInterpreter :: Verbosity -> ProgramDb -> Compiler -> Platform + -> PackageDBStack -> IO () +startInterpreter verbosity progdb comp platform packageDBs = do + let replOpts = mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptPackageDBs = packageDBs + } + checkPackageDbStack verbosity packageDBs + (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram progdb + runGHC verbosity ghcjsProg comp platform replOpts + +buildExe :: Verbosity -> Cabal.Flag (Maybe Int) + -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe = buildOrReplExe Nothing + +replExe :: [String] -> Verbosity + -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Executable + -> ComponentLocalBuildInfo -> IO () +replExe = buildOrReplExe . Just + +buildOrReplExe :: Maybe [String] -> Verbosity + -> Cabal.Flag (Maybe Int) -> PackageDescription + -> LocalBuildInfo -> Executable + -> ComponentLocalBuildInfo -> IO () +buildOrReplExe mReplFlags verbosity numJobs _pkg_descr lbi + exe@Executable { exeName = exeName', modulePath = modPath } clbi = do + + (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) + let forRepl = maybe False (const True) mReplFlags + replFlags = fromMaybe mempty mReplFlags + comp = compiler lbi + platform = hostPlatform lbi + implInfo = getImplInfo comp + runGhcjsProg = runGHC verbosity ghcjsProg comp platform + exeBi = buildInfo exe + + let exeName'' = unUnqualComponentName exeName' + -- exeNameReal, the name that GHC really uses (with .exe on Windows) + let exeNameReal = exeName'' <.> + (if takeExtension exeName'' /= ('.':exeExtension buildPlatform) + then exeExtension buildPlatform + else "") + + let targetDir = (buildDir lbi) exeName'' + let exeDir = targetDir (exeName'' ++ "-tmp") + createDirectoryIfMissingVerbose verbosity True targetDir + createDirectoryIfMissingVerbose verbosity True exeDir + -- TODO: do we need to put hs-boot files into place for mutually recursive + -- modules? FIX: what about exeName.hi-boot? + + -- Determine if program coverage should be enabled and if so, what + -- '-hpcdir' should be. + let isCoverageEnabled = exeCoverage lbi + distPref = fromFlag $ configDistPref $ configFlags lbi + hpcdir way + | isCoverageEnabled = toFlag $ Hpc.mixDir distPref way exeName'' + | otherwise = mempty + + -- build executables + + srcMainFile <- findFile (exeDir : hsSourceDirs exeBi) modPath + let isGhcjsDynamic = isDynamic comp + dynamicTooSupported = supportsDynamicToo comp + buildRunner = case clbi of + ExeComponentLocalBuildInfo {} -> False + _ -> True + isHaskellMain = elem (takeExtension srcMainFile) [".hs", ".lhs"] + jsSrcs = jsSources exeBi + cSrcs = cSources exeBi ++ [srcMainFile | not isHaskellMain] + cObjs = map (`replaceExtension` objExtension) cSrcs + nativeToo = ghcjsNativeToo comp + baseOpts = (componentGhcOptions verbosity lbi exeBi clbi exeDir) + `mappend` mempty { + ghcOptMode = toFlag GhcModeMake, + ghcOptInputFiles = toNubListR $ + [ srcMainFile | isHaskellMain], + ghcOptInputModules = toNubListR $ + [ m | not isHaskellMain, m <- exeModules exe], + ghcOptExtra = + if buildRunner then ["-build-runner"] + else mempty + } + staticOpts = baseOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticOnly, + ghcOptHPCDir = hpcdir Hpc.Vanilla + } + profOpts = adjustExts "p_hi" "p_o" baseOpts `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptExtra = ghcjsProfOptions exeBi, + ghcOptHPCDir = hpcdir Hpc.Prof + } + dynOpts = adjustExts "dyn_hi" "dyn_o" baseOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptExtra = ghcjsSharedOptions exeBi, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + dynTooOpts = adjustExts "dyn_hi" "dyn_o" staticOpts `mappend` mempty { + ghcOptDynLinkMode = toFlag GhcStaticAndDynamic, + ghcOptHPCDir = hpcdir Hpc.Dyn + } + linkerOpts = mempty { + ghcOptLinkOptions = PD.ldOptions exeBi, + ghcOptLinkLibs = extraLibs exeBi, + ghcOptLinkLibPath = toNubListR $ extraLibDirs exeBi, + ghcOptLinkFrameworks = toNubListR $ PD.frameworks exeBi, + ghcOptInputFiles = toNubListR $ + [exeDir x | x <- cObjs] ++ jsSrcs + } + replOpts = baseOpts { + ghcOptExtra = Internal.filterGhciFlags + (ghcOptExtra baseOpts) + <> replFlags + } + -- For a normal compile we do separate invocations of ghc for + -- compiling as for linking. But for repl we have to do just + -- the one invocation, so that one has to include all the + -- linker stuff too, like -l flags and any .o files from C + -- files etc. + `mappend` linkerOpts + `mappend` mempty { + ghcOptMode = toFlag GhcModeInteractive, + ghcOptOptimisation = toFlag GhcNoOptimisation + } + commonOpts | withProfExe lbi = profOpts + | withDynExe lbi = dynOpts + | otherwise = staticOpts + compileOpts | useDynToo = dynTooOpts + | otherwise = commonOpts + withStaticExe = (not $ withProfExe lbi) && (not $ withDynExe lbi) + + -- For building exe's that use TH with -prof or -dynamic we actually have + -- to build twice, once without -prof/-dynamic and then again with + -- -prof/-dynamic. This is because the code that TH needs to run at + -- compile time needs to be the vanilla ABI so it can be loaded up and run + -- by the compiler. + -- With dynamic-by-default GHC the TH object files loaded at compile-time + -- need to be .dyn_o instead of .o. + doingTH = usesTemplateHaskellOrQQ exeBi + -- Should we use -dynamic-too instead of compiling twice? + useDynToo = dynamicTooSupported && isGhcjsDynamic + && doingTH && withStaticExe && null (ghcjsSharedOptions exeBi) + compileTHOpts | isGhcjsDynamic = dynOpts + | otherwise = staticOpts + compileForTH + | forRepl = False + | useDynToo = False + | isGhcjsDynamic = doingTH && (withProfExe lbi || withStaticExe) + | otherwise = doingTH && (withProfExe lbi || withDynExe lbi) + + linkOpts = commonOpts `mappend` + linkerOpts `mappend` mempty { + ghcOptLinkNoHsMain = toFlag (not isHaskellMain) + } + + -- Build static/dynamic object files for TH, if needed. + when compileForTH $ + runGhcjsProg compileTHOpts { ghcOptNoLink = toFlag True + , ghcOptNumJobs = numJobs } + + unless forRepl $ + runGhcjsProg compileOpts { ghcOptNoLink = toFlag True + , ghcOptNumJobs = numJobs } + + -- build any C sources + unless (null cSrcs || not nativeToo) $ do + info verbosity "Building C Sources..." + sequence_ + [ do let opts = (Internal.componentCcGhcOptions verbosity implInfo lbi exeBi + clbi exeDir filename) `mappend` mempty { + ghcOptDynLinkMode = toFlag (if withDynExe lbi + then GhcDynamicOnly + else GhcStaticOnly), + ghcOptProfilingMode = toFlag (withProfExe lbi) + } + odir = fromFlag (ghcOptObjDir opts) + createDirectoryIfMissingVerbose verbosity True odir + runGhcjsProg opts + | filename <- cSrcs ] + + -- TODO: problem here is we need the .c files built first, so we can load them + -- with ghci, but .c files can depend on .h files generated by ghc by ffi + -- exports. + when forRepl $ runGhcjsProg replOpts + + -- link: + unless forRepl $ do + info verbosity "Linking..." + runGhcjsProg linkOpts { ghcOptOutputFile = toFlag (targetDir exeNameReal) } + +-- |Install for ghc, .hi, .a and, if --with-ghci given, .o +installLib :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^install location + -> FilePath -- ^install location for dynamic libraries + -> FilePath -- ^Build location + -> PackageDescription + -> Library + -> ComponentLocalBuildInfo + -> IO () +installLib verbosity lbi targetDir dynlibTargetDir builtDir _pkg lib clbi = do + whenVanilla $ copyModuleFiles "js_hi" + whenProf $ copyModuleFiles "js_p_hi" + whenShared $ copyModuleFiles "js_dyn_hi" + + whenVanilla $ installOrdinary builtDir targetDir $ toJSLibName vanillaLibName + whenProf $ installOrdinary builtDir targetDir $ toJSLibName profileLibName + whenShared $ installShared builtDir dynlibTargetDir $ toJSLibName sharedLibName + + when (ghcjsNativeToo $ compiler lbi) $ do + -- copy .hi files over: + whenVanilla $ copyModuleFiles "hi" + whenProf $ copyModuleFiles "p_hi" + whenShared $ copyModuleFiles "dyn_hi" + + -- copy the built library files over: + whenVanilla $ do + installOrdinaryNative builtDir targetDir vanillaLibName + whenGHCi $ installOrdinaryNative builtDir targetDir ghciLibName + whenProf $ do + installOrdinaryNative builtDir targetDir profileLibName + whenGHCi $ installOrdinaryNative builtDir targetDir ghciProfLibName + whenShared $ installSharedNative builtDir dynlibTargetDir sharedLibName + + where + install isShared isJS srcDir dstDir name = do + let src = srcDir name + dst = dstDir name + createDirectoryIfMissingVerbose verbosity True dstDir + + if isShared + then installExecutableFile verbosity src dst + else installOrdinaryFile verbosity src dst + + when (stripLibs lbi && not isJS) $ + Strip.stripLib verbosity + (hostPlatform lbi) (withPrograms lbi) dst + + installOrdinary = install False True + installShared = install True True + + installOrdinaryNative = install False False + installSharedNative = install True False + + copyModuleFiles ext = + findModuleFiles [builtDir] [ext] (allLibModules lib clbi) + >>= installOrdinaryFiles verbosity targetDir + + compiler_id = compilerId (compiler lbi) + uid = componentUnitId clbi + vanillaLibName = mkLibName uid + profileLibName = mkProfLibName uid + ghciLibName = Internal.mkGHCiLibName uid + ghciProfLibName = Internal.mkGHCiProfLibName uid + sharedLibName = (mkSharedLibName (hostPlatform lbi) compiler_id) uid + + hasLib = not $ null (allLibModules lib clbi) + && null (cSources (libBuildInfo lib)) + whenVanilla = when (hasLib && withVanillaLib lbi) + whenProf = when (hasLib && withProfLib lbi) + whenGHCi = when (hasLib && withGHCiLib lbi) + whenShared = when (hasLib && withSharedLib lbi) + +installExe :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^Where to copy the files to + -> FilePath -- ^Build location + -> (FilePath, FilePath) -- ^Executable (prefix,suffix) + -> PackageDescription + -> Executable + -> IO () +installExe verbosity lbi binDir buildPref + (progprefix, progsuffix) _pkg exe = do + createDirectoryIfMissingVerbose verbosity True binDir + let exeName' = unUnqualComponentName $ exeName exe + exeFileName = exeName' + fixedExeBaseName = progprefix ++ exeName' ++ progsuffix + installBinary dest = do + runDbProgram verbosity ghcjsProgram (withPrograms lbi) $ + [ "--install-executable" + , buildPref exeName' exeFileName + , "-o", dest + ] ++ + case (stripExes lbi, lookupProgram stripProgram $ withPrograms lbi) of + (True, Just strip) -> ["-strip-program", programPath strip] + _ -> [] + installBinary (binDir fixedExeBaseName) + +libAbiHash :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO String +libAbiHash verbosity _pkg_descr lbi lib clbi = do + let + libBi = libBuildInfo lib + comp = compiler lbi + platform = hostPlatform lbi + vanillaArgs = + (componentGhcOptions verbosity lbi libBi clbi (buildDir lbi)) + `mappend` mempty { + ghcOptMode = toFlag GhcModeAbiHash, + ghcOptInputModules = toNubListR $ PD.exposedModules lib + } + profArgs = adjustExts "js_p_hi" "js_p_o" vanillaArgs `mappend` mempty { + ghcOptProfilingMode = toFlag True, + ghcOptExtra = ghcjsProfOptions libBi + } + ghcArgs | withVanillaLib lbi = vanillaArgs + | withProfLib lbi = profArgs + | otherwise = error "libAbiHash: Can't find an enabled library way" + -- + (ghcjsProg, _) <- requireProgram verbosity ghcjsProgram (withPrograms lbi) + hash <- getProgramInvocationOutput verbosity + (ghcInvocation ghcjsProg comp platform ghcArgs) + return (takeWhile (not . isSpace) hash) + +adjustExts :: String -> String -> GhcOptions -> GhcOptions +adjustExts hiSuf objSuf opts = + opts `mappend` mempty { + ghcOptHiSuffix = toFlag hiSuf, + ghcOptObjSuffix = toFlag objSuf + } + +registerPackage :: Verbosity + -> ProgramDb + -> PackageDBStack + -> InstalledPackageInfo + -> HcPkg.RegisterOptions + -> IO () +registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions = + HcPkg.register (hcPkgInfo progdb) verbosity packageDbs + installedPkgInfo registerOptions + +componentGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions verbosity lbi bi clbi odir = + let opts = Internal.componentGhcOptions verbosity implInfo lbi bi clbi odir + comp = compiler lbi + implInfo = getImplInfo comp + in opts { ghcOptExtra = ghcOptExtra opts `mappend` hcOptions GHCJS bi + } + +ghcjsProfOptions :: BuildInfo -> [String] +ghcjsProfOptions bi = + hcProfOptions GHC bi `mappend` hcProfOptions GHCJS bi + +ghcjsSharedOptions :: BuildInfo -> [String] +ghcjsSharedOptions bi = + hcSharedOptions GHC bi `mappend` hcSharedOptions GHCJS bi + +isDynamic :: Compiler -> Bool +isDynamic = Internal.ghcLookupProperty "GHC Dynamic" + +supportsDynamicToo :: Compiler -> Bool +supportsDynamicToo = Internal.ghcLookupProperty "Support dynamic-too" + +findGhcjsGhcVersion :: Verbosity -> FilePath -> IO (Maybe Version) +findGhcjsGhcVersion verbosity pgm = + findProgramVersion "--numeric-ghc-version" id verbosity pgm + +findGhcjsPkgGhcjsVersion :: Verbosity -> FilePath -> IO (Maybe Version) +findGhcjsPkgGhcjsVersion verbosity pgm = + findProgramVersion "--numeric-ghcjs-version" id verbosity pgm + +-- ----------------------------------------------------------------------------- +-- Registering + +hcPkgInfo :: ProgramDb -> HcPkg.HcPkgInfo +hcPkgInfo progdb = HcPkg.HcPkgInfo { HcPkg.hcPkgProgram = ghcjsPkgProg + , HcPkg.noPkgDbStack = False + , HcPkg.noVerboseFlag = False + , HcPkg.flagPackageConf = False + , HcPkg.supportsDirDbs = True + , HcPkg.requiresDirDbs = ver >= v7_10 + , HcPkg.nativeMultiInstance = ver >= v7_10 + , HcPkg.recacheMultiInstance = True + , HcPkg.suppressFilesCheck = True + } + where + v7_10 = mkVersion [7,10] + Just ghcjsPkgProg = lookupProgram ghcjsPkgProgram progdb + Just ver = programVersion ghcjsPkgProg + +-- | Get the JavaScript file name and command and arguments to run a +-- program compiled by GHCJS +-- the exe should be the base program name without exe extension +runCmd :: ProgramDb -> FilePath + -> (FilePath, FilePath, [String]) +runCmd progdb exe = + ( script + , programPath ghcjsProg + , programDefaultArgs ghcjsProg ++ programOverrideArgs ghcjsProg ++ ["--run"] + ) + where + script = exe <.> "jsexe" "all" <.> "js" + Just ghcjsProg = lookupProgram ghcjsProgram progdb diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Glob.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Glob.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Glob.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Glob.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,295 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Glob +-- Copyright : Isaac Jones, Simon Marlow 2003-2004 +-- License : BSD3 +-- portions Copyright (c) 2007, Galois Inc. +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Simple file globbing. + +module Distribution.Simple.Glob ( + GlobSyntaxError(..), + GlobResult(..), + matchDirFileGlob, + runDirFileGlob, + fileGlobMatches, + parseFileGlob, + explainGlobSyntaxError, + Glob, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Control.Monad (guard) + +import Distribution.Simple.Utils +import Distribution.Verbosity +import Distribution.Version + +import System.Directory (getDirectoryContents, doesDirectoryExist, doesFileExist) +import System.FilePath (joinPath, splitExtensions, splitDirectories, takeFileName, (), (<.>)) + +-- Note throughout that we use splitDirectories, not splitPath. On +-- Posix, this makes no difference, but, because Windows accepts both +-- slash and backslash as its path separators, if we left in the +-- separators from the glob we might not end up properly normalised. + +data GlobResult a + = GlobMatch a + -- ^ The glob matched the value supplied. + | GlobWarnMultiDot a + -- ^ The glob did not match the value supplied because the + -- cabal-version is too low and the extensions on the file did + -- not precisely match the glob's extensions, but rather the + -- glob was a proper suffix of the file's extensions; i.e., if + -- not for the low cabal-version, it would have matched. + | GlobMissingDirectory FilePath + -- ^ The glob couldn't match because the directory named doesn't + -- exist. The directory will be as it appears in the glob (i.e., + -- relative to the directory passed to 'matchDirFileGlob', and, + -- for 'data-files', relative to 'data-dir'). + deriving (Show, Eq, Ord, Functor) + +-- | Extract the matches from a list of 'GlobResult's. +-- +-- Note: throws away the 'GlobMissingDirectory' results; chances are +-- that you want to check for these and error out if any are present. +globMatches :: [GlobResult a] -> [a] +globMatches input = [ a | GlobMatch a <- input ] + +data GlobSyntaxError + = StarInDirectory + | StarInFileName + | StarInExtension + | NoExtensionOnStar + | EmptyGlob + | LiteralFileNameGlobStar + | VersionDoesNotSupportGlobStar + | VersionDoesNotSupportGlob + deriving (Eq, Show) + +explainGlobSyntaxError :: FilePath -> GlobSyntaxError -> String +explainGlobSyntaxError filepath StarInDirectory = + "invalid file glob '" ++ filepath + ++ "'. A wildcard '**' is only allowed as the final parent" + ++ " directory. Stars must not otherwise appear in the parent" + ++ " directories." +explainGlobSyntaxError filepath StarInExtension = + "invalid file glob '" ++ filepath + ++ "'. Wildcards '*' are only allowed as the" + ++ " file's base name, not in the file extension." +explainGlobSyntaxError filepath StarInFileName = + "invalid file glob '" ++ filepath + ++ "'. Wildcards '*' may only totally replace the" + ++ " file's base name, not only parts of it." +explainGlobSyntaxError filepath NoExtensionOnStar = + "invalid file glob '" ++ filepath + ++ "'. If a wildcard '*' is used it must be with an file extension." +explainGlobSyntaxError filepath LiteralFileNameGlobStar = + "invalid file glob '" ++ filepath + ++ "'. If a wildcard '**' is used as a parent directory, the" + ++ " file's base name must be a wildcard '*'." +explainGlobSyntaxError _ EmptyGlob = + "invalid file glob. A glob cannot be the empty string." +explainGlobSyntaxError filepath VersionDoesNotSupportGlobStar = + "invalid file glob '" ++ filepath + ++ "'. Using the double-star syntax requires 'cabal-version: 2.4'" + ++ " or greater. Alternatively, for compatibility with earlier Cabal" + ++ " versions, list the included directories explicitly." +explainGlobSyntaxError filepath VersionDoesNotSupportGlob = + "invalid file glob '" ++ filepath + ++ "'. Using star wildcards requires 'cabal-version: >= 1.6'. " + ++ "Alternatively if you require compatibility with earlier Cabal " + ++ "versions then list all the files explicitly." + +data IsRecursive = Recursive | NonRecursive + +data MultiDot = MultiDotDisabled | MultiDotEnabled + +data Glob + = GlobStem FilePath Glob + -- ^ A single subdirectory component + remainder. + | GlobFinal GlobFinal + +data GlobFinal + = FinalMatch IsRecursive MultiDot String + -- ^ First argument: Is this a @**/*.ext@ pattern? + -- Second argument: should we match against the exact extensions, or accept a suffix? + -- Third argument: the extensions to accept. + | FinalLit FilePath + -- ^ Literal file name. + +reconstructGlob :: Glob -> FilePath +reconstructGlob (GlobStem dir glob) = + dir reconstructGlob glob +reconstructGlob (GlobFinal final) = case final of + FinalMatch Recursive _ exts -> "**" "*" <.> exts + FinalMatch NonRecursive _ exts -> "*" <.> exts + FinalLit path -> path + +-- | Returns 'Nothing' if the glob didn't match at all, or 'Just' the +-- result if the glob matched (or would have matched with a higher +-- cabal-version). +fileGlobMatches :: Glob -> FilePath -> Maybe (GlobResult FilePath) +fileGlobMatches pat candidate = do + match <- fileGlobMatchesSegments pat (splitDirectories candidate) + return (candidate <$ match) + +fileGlobMatchesSegments :: Glob -> [FilePath] -> Maybe (GlobResult ()) +fileGlobMatchesSegments _ [] = Nothing +fileGlobMatchesSegments pat (seg : segs) = case pat of + GlobStem dir pat' -> do + guard (dir == seg) + fileGlobMatchesSegments pat' segs + GlobFinal final -> case final of + FinalMatch Recursive multidot ext -> do + let (candidateBase, candidateExts) = splitExtensions (last $ seg:segs) + guard (not (null candidateBase)) + checkExt multidot ext candidateExts + FinalMatch NonRecursive multidot ext -> do + let (candidateBase, candidateExts) = splitExtensions seg + guard (null segs && not (null candidateBase)) + checkExt multidot ext candidateExts + FinalLit filename -> do + guard (null segs && filename == seg) + return (GlobMatch ()) + +checkExt + :: MultiDot + -> String -- ^ The pattern's extension + -> String -- ^ The candidate file's extension + -> Maybe (GlobResult ()) +checkExt multidot ext candidate + | ext == candidate = Just (GlobMatch ()) + | ext `isSuffixOf` candidate = case multidot of + MultiDotDisabled -> Just (GlobWarnMultiDot ()) + MultiDotEnabled -> Just (GlobMatch ()) + | otherwise = Nothing + +parseFileGlob :: Version -> FilePath -> Either GlobSyntaxError Glob +parseFileGlob version filepath = case reverse (splitDirectories filepath) of + [] -> + Left EmptyGlob + (filename : "**" : segments) + | allowGlobStar -> do + ext <- case splitExtensions filename of + ("*", ext) | '*' `elem` ext -> Left StarInExtension + | null ext -> Left NoExtensionOnStar + | otherwise -> Right ext + _ -> Left LiteralFileNameGlobStar + foldM addStem (GlobFinal $ FinalMatch Recursive multidot ext) segments + | otherwise -> Left VersionDoesNotSupportGlobStar + (filename : segments) -> do + pat <- case splitExtensions filename of + ("*", ext) | not allowGlob -> Left VersionDoesNotSupportGlob + | '*' `elem` ext -> Left StarInExtension + | null ext -> Left NoExtensionOnStar + | otherwise -> Right (FinalMatch NonRecursive multidot ext) + (_, ext) | '*' `elem` ext -> Left StarInExtension + | '*' `elem` filename -> Left StarInFileName + | otherwise -> Right (FinalLit filename) + foldM addStem (GlobFinal pat) segments + where + allowGlob = version >= mkVersion [1,6] + allowGlobStar = version >= mkVersion [2,4] + addStem pat seg + | '*' `elem` seg = Left StarInDirectory + | otherwise = Right (GlobStem seg pat) + multidot + | version >= mkVersion [2,4] = MultiDotEnabled + | otherwise = MultiDotDisabled + +-- | This will 'die'' when the glob matches no files, or if the glob +-- refers to a missing directory, or if the glob fails to parse. +-- +-- The returned values do not include the supplied @dir@ prefix, which +-- must itself be a valid directory (hence, it can't be the empty +-- string). +matchDirFileGlob :: Verbosity -> Version -> FilePath -> FilePath -> IO [FilePath] +matchDirFileGlob verbosity version dir filepath = case parseFileGlob version filepath of + Left err -> die' verbosity $ explainGlobSyntaxError filepath err + Right glob -> do + results <- runDirFileGlob verbosity dir glob + let missingDirectories = + [ missingDir | GlobMissingDirectory missingDir <- results ] + matches = globMatches results + -- Check for missing directories first, since we'll obviously have + -- no matches in that case. + for_ missingDirectories $ \ missingDir -> + die' verbosity $ + "filepath wildcard '" ++ filepath ++ "' refers to the directory" + ++ " '" ++ missingDir ++ "', which does not exist or is not a directory." + when (null matches) $ die' verbosity $ + "filepath wildcard '" ++ filepath + ++ "' does not match any files." + return matches + +-- | Match files against a pre-parsed glob, starting in a directory. +-- +-- The returned values do not include the supplied @dir@ prefix, which +-- must itself be a valid directory (hence, it can't be the empty +-- string). +runDirFileGlob :: Verbosity -> FilePath -> Glob -> IO [GlobResult FilePath] +runDirFileGlob verbosity rawDir pat = do + -- The default data-dir is null. Our callers -should- be + -- converting that to '.' themselves, but it's a certainty that + -- some future call-site will forget and trigger a really + -- hard-to-debug failure if we don't check for that here. + when (null rawDir) $ + warn verbosity $ + "Null dir passed to runDirFileGlob; interpreting it " + ++ "as '.'. This is probably an internal error." + let dir = if null rawDir then "." else rawDir + debug verbosity $ "Expanding glob '" ++ reconstructGlob pat ++ "' in directory '" ++ dir ++ "'." + -- This function might be called from the project root with dir as + -- ".". Walking the tree starting there involves going into .git/ + -- and dist-newstyle/, which is a lot of work for no reward, so + -- extract the constant prefix from the pattern and start walking + -- there, and only walk as much as we need to: recursively if **, + -- the whole directory if *, and just the specific file if it's a + -- literal. + let (prefixSegments, final) = splitConstantPrefix pat + joinedPrefix = joinPath prefixSegments + case final of + FinalMatch recursive multidot exts -> do + let prefix = dir joinedPrefix + directoryExists <- doesDirectoryExist prefix + if directoryExists + then do + candidates <- case recursive of + Recursive -> getDirectoryContentsRecursive prefix + NonRecursive -> filterM (doesFileExist . (prefix )) =<< getDirectoryContents prefix + let checkName candidate = do + let (candidateBase, candidateExts) = splitExtensions $ takeFileName candidate + guard (not (null candidateBase)) + match <- checkExt multidot exts candidateExts + return (joinedPrefix candidate <$ match) + return $ mapMaybe checkName candidates + else + return [ GlobMissingDirectory joinedPrefix ] + FinalLit fn -> do + exists <- doesFileExist (dir joinedPrefix fn) + return [ GlobMatch (joinedPrefix fn) | exists ] + +unfoldr' :: (a -> Either r (b, a)) -> a -> ([b], r) +unfoldr' f a = case f a of + Left r -> ([], r) + Right (b, a') -> case unfoldr' f a' of + (bs, r) -> (b : bs, r) + +-- | Extract the (possibly null) constant prefix from the pattern. +-- This has the property that, if @(pref, final) = splitConstantPrefix pat@, +-- then @pat === foldr GlobStem (GlobFinal final) pref@. +splitConstantPrefix :: Glob -> ([FilePath], GlobFinal) +splitConstantPrefix = unfoldr' step + where + step (GlobStem seg pat) = Right (seg, pat) + step (GlobFinal pat) = Left pat diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Haddock.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Haddock.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Haddock.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Haddock.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,900 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Haddock +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module deals with the @haddock@ and @hscolour@ commands. +-- It uses information about installed packages (from @ghc-pkg@) to find the +-- locations of documentation for dependent packages, so it can create links. +-- +-- The @hscolour@ support allows generating HTML versions of the original +-- source, with coloured syntax highlighting. + +module Distribution.Simple.Haddock ( + haddock, hscolour, + + haddockPackagePaths + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS + +-- local +import Distribution.Backpack.DescribeUnitId +import Distribution.Types.ForeignLib +import Distribution.Types.UnqualComponentName +import Distribution.Types.ComponentLocalBuildInfo +import Distribution.Types.ExecutableScope +import Distribution.Types.LocalBuildInfo +import Distribution.Types.TargetInfo +import Distribution.Package +import qualified Distribution.ModuleName as ModuleName +import Distribution.PackageDescription as PD hiding (Flag) +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.Glob +import Distribution.Simple.Program.GHC +import Distribution.Simple.Program.ResponseFile +import Distribution.Simple.Program +import Distribution.Simple.PreProcess +import Distribution.Simple.Setup +import Distribution.Simple.Build +import Distribution.Simple.BuildTarget +import Distribution.Simple.InstallDirs +import Distribution.Simple.LocalBuildInfo hiding (substPathTemplate) +import Distribution.Simple.BuildPaths +import Distribution.Simple.Register +import qualified Distribution.Simple.Program.HcPkg as HcPkg +import qualified Distribution.Simple.PackageIndex as PackageIndex +import qualified Distribution.InstalledPackageInfo as InstalledPackageInfo +import Distribution.InstalledPackageInfo ( InstalledPackageInfo ) +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Text +import Distribution.Utils.NubList +import Distribution.Version +import Distribution.Verbosity +import Language.Haskell.Extension + +import Distribution.Compat.Semigroup (All (..), Any (..)) + +import Control.Monad +import Data.Either ( rights ) + +import System.Directory (getCurrentDirectory, doesDirectoryExist, doesFileExist) +import System.FilePath ( (), (<.>), normalise, isAbsolute ) +import System.IO (hClose, hPutStrLn, hSetEncoding, utf8) + +-- ------------------------------------------------------------------------------ +-- Types + +-- | A record that represents the arguments to the haddock executable, a product +-- monoid. +data HaddockArgs = HaddockArgs { + argInterfaceFile :: Flag FilePath, + -- ^ Path to the interface file, relative to argOutputDir, required. + argPackageName :: Flag PackageIdentifier, + -- ^ Package name, required. + argHideModules :: (All,[ModuleName.ModuleName]), + -- ^ (Hide modules ?, modules to hide) + argIgnoreExports :: Any, + -- ^ Ignore export lists in modules? + argLinkSource :: Flag (Template,Template,Template), + -- ^ (Template for modules, template for symbols, template for lines). + argLinkedSource :: Flag Bool, + -- ^ Generate hyperlinked sources + argQuickJump :: Flag Bool, + -- ^ Generate quickjump index + argCssFile :: Flag FilePath, + -- ^ Optional custom CSS file. + argContents :: Flag String, + -- ^ Optional URL to contents page. + argVerbose :: Any, + argOutput :: Flag [Output], + -- ^ HTML or Hoogle doc or both? Required. + argInterfaces :: [(FilePath, Maybe String, Maybe String)], + -- ^ [(Interface file, URL to the HTML docs and hyperlinked-source for links)]. + argOutputDir :: Directory, + -- ^ Where to generate the documentation. + argTitle :: Flag String, + -- ^ Page title, required. + argPrologue :: Flag String, + -- ^ Prologue text, required. + argGhcOptions :: GhcOptions, + -- ^ Additional flags to pass to GHC. + argGhcLibDir :: Flag FilePath, + -- ^ To find the correct GHC, required. + argTargets :: [FilePath] + -- ^ Modules to process. +} deriving Generic + +-- | The FilePath of a directory, it's a monoid under '()'. +newtype Directory = Dir { unDir' :: FilePath } deriving (Read,Show,Eq,Ord) + +unDir :: Directory -> FilePath +unDir = normalise . unDir' + +type Template = String + +data Output = Html | Hoogle + +-- ------------------------------------------------------------------------------ +-- Haddock support + +haddock :: PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> HaddockFlags + -> IO () +haddock pkg_descr _ _ haddockFlags + | not (hasLibs pkg_descr) + && not (fromFlag $ haddockExecutables haddockFlags) + && not (fromFlag $ haddockTestSuites haddockFlags) + && not (fromFlag $ haddockBenchmarks haddockFlags) + && not (fromFlag $ haddockForeignLibs haddockFlags) + = + warn (fromFlag $ haddockVerbosity haddockFlags) $ + "No documentation was generated as this package does not contain " + ++ "a library. Perhaps you want to use the --executables, --tests," + ++ " --benchmarks or --foreign-libraries flags." + +haddock pkg_descr lbi suffixes flags' = do + let verbosity = flag haddockVerbosity + comp = compiler lbi + platform = hostPlatform lbi + + quickJmpFlag = haddockQuickJump flags' + flags = case haddockTarget of + ForDevelopment -> flags' + ForHackage -> flags' + { haddockHoogle = Flag True + , haddockHtml = Flag True + , haddockHtmlLocation = Flag (pkg_url ++ "/docs") + , haddockContents = Flag (toPathTemplate pkg_url) + , haddockLinkedSource = Flag True + , haddockQuickJump = Flag True + } + pkg_url = "/package/$pkg-$version" + flag f = fromFlag $ f flags + + tmpFileOpts = defaultTempFileOptions + { optKeepTempFiles = flag haddockKeepTempFiles } + htmlTemplate = fmap toPathTemplate . flagToMaybe . haddockHtmlLocation + $ flags + haddockTarget = + fromFlagOrDefault ForDevelopment (haddockForHackage flags') + + (haddockProg, version, _) <- + requireProgramVersion verbosity haddockProgram + (orLaterVersion (mkVersion [2,0])) (withPrograms lbi) + + -- various sanity checks + when (flag haddockHoogle && version < mkVersion [2,2]) $ + die' verbosity "Haddock 2.0 and 2.1 do not support the --hoogle flag." + + + when (flag haddockQuickJump && version < mkVersion [2,19]) $ do + let msg = "Haddock prior to 2.19 does not support the --quickjump flag." + alt = "The generated documentation won't have the QuickJump feature." + if Flag True == quickJmpFlag + then die' verbosity msg + else warn verbosity (msg ++ "\n" ++ alt) + + haddockGhcVersionStr <- getProgramOutput verbosity haddockProg + ["--ghc-version"] + case (simpleParse haddockGhcVersionStr, compilerCompatVersion GHC comp) of + (Nothing, _) -> die' verbosity "Could not get GHC version from Haddock" + (_, Nothing) -> die' verbosity "Could not get GHC version from compiler" + (Just haddockGhcVersion, Just ghcVersion) + | haddockGhcVersion == ghcVersion -> return () + | otherwise -> die' verbosity $ + "Haddock's internal GHC version must match the configured " + ++ "GHC version.\n" + ++ "The GHC version is " ++ display ghcVersion ++ " but " + ++ "haddock is using GHC version " ++ display haddockGhcVersion + + -- the tools match the requests, we can proceed + + -- We fall back to using HsColour only for versions of Haddock which don't + -- support '--hyperlinked-sources'. + when (flag haddockLinkedSource && version < mkVersion [2,17]) $ + hscolour' (warn verbosity) haddockTarget pkg_descr lbi suffixes + (defaultHscolourFlags `mappend` haddockToHscolour flags) + + libdirArgs <- getGhcLibDir verbosity lbi + let commonArgs = mconcat + [ libdirArgs + , fromFlags (haddockTemplateEnv lbi (packageId pkg_descr)) flags + , fromPackageDescription haddockTarget pkg_descr ] + + targets <- readTargetInfos verbosity pkg_descr lbi (haddockArgs flags) + + let + targets' = + case targets of + [] -> allTargetsInBuildOrder' pkg_descr lbi + _ -> targets + + internalPackageDB <- + createInternalPackageDB verbosity lbi (flag haddockDistPref) + + (\f -> foldM_ f (installedPkgs lbi) targets') $ \index target -> do + + let component = targetComponent target + clbi = targetCLBI target + + componentInitialBuildSteps (flag haddockDistPref) pkg_descr lbi clbi verbosity + + let + lbi' = lbi { + withPackageDB = withPackageDB lbi ++ [internalPackageDB], + installedPkgs = index + } + + preprocessComponent pkg_descr component lbi' clbi False verbosity suffixes + let + doExe com = case (compToExe com) of + Just exe -> do + withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi') "tmp" $ + \tmp -> do + exeArgs <- fromExecutable verbosity tmp lbi' clbi htmlTemplate + version exe + let exeArgs' = commonArgs `mappend` exeArgs + runHaddock verbosity tmpFileOpts comp platform + haddockProg exeArgs' + Nothing -> do + warn (fromFlag $ haddockVerbosity flags) + "Unsupported component, skipping..." + return () + -- We define 'smsg' once and then reuse it inside the case, so that + -- we don't say we are running Haddock when we actually aren't + -- (e.g., Haddock is not run on non-libraries) + smsg :: IO () + smsg = setupMessage' verbosity "Running Haddock on" (packageId pkg_descr) + (componentLocalName clbi) (maybeComponentInstantiatedWith clbi) + case component of + CLib lib -> do + withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi) "tmp" $ + \tmp -> do + smsg + libArgs <- fromLibrary verbosity tmp lbi' clbi htmlTemplate + version lib + let libArgs' = commonArgs `mappend` libArgs + runHaddock verbosity tmpFileOpts comp platform haddockProg libArgs' + + case libName lib of + Just _ -> do + pwd <- getCurrentDirectory + + let + ipi = inplaceInstalledPackageInfo + pwd (flag haddockDistPref) pkg_descr + (mkAbiHash "inplace") lib lbi' clbi + + debug verbosity $ "Registering inplace:\n" + ++ (InstalledPackageInfo.showInstalledPackageInfo ipi) + + registerPackage verbosity (compiler lbi') (withPrograms lbi') + (withPackageDB lbi') ipi + HcPkg.defaultRegisterOptions { + HcPkg.registerMultiInstance = True + } + + return $ PackageIndex.insert ipi index + Nothing -> + pure index + + CFLib flib -> (when (flag haddockForeignLibs) $ do + withTempDirectoryEx verbosity tmpFileOpts (buildDir lbi') "tmp" $ + \tmp -> do + smsg + flibArgs <- fromForeignLib verbosity tmp lbi' clbi htmlTemplate + version flib + let libArgs' = commonArgs `mappend` flibArgs + runHaddock verbosity tmpFileOpts comp platform haddockProg libArgs') + + >> return index + + CExe _ -> (when (flag haddockExecutables) $ smsg >> doExe component) >> return index + CTest _ -> (when (flag haddockTestSuites) $ smsg >> doExe component) >> return index + CBench _ -> (when (flag haddockBenchmarks) $ smsg >> doExe component) >> return index + + for_ (extraDocFiles pkg_descr) $ \ fpath -> do + files <- matchDirFileGlob verbosity (specVersion pkg_descr) "." fpath + for_ files $ copyFileTo verbosity (unDir $ argOutputDir commonArgs) + +-- ------------------------------------------------------------------------------ +-- Contributions to HaddockArgs (see also Doctest.hs for very similar code). + +fromFlags :: PathTemplateEnv -> HaddockFlags -> HaddockArgs +fromFlags env flags = + mempty { + argHideModules = (maybe mempty (All . not) + $ flagToMaybe (haddockInternal flags), mempty), + argLinkSource = if fromFlag (haddockLinkedSource flags) + then Flag ("src/%{MODULE/./-}.html" + ,"src/%{MODULE/./-}.html#%{NAME}" + ,"src/%{MODULE/./-}.html#line-%{LINE}") + else NoFlag, + argLinkedSource = haddockLinkedSource flags, + argQuickJump = haddockQuickJump flags, + argCssFile = haddockCss flags, + argContents = fmap (fromPathTemplate . substPathTemplate env) + (haddockContents flags), + argVerbose = maybe mempty (Any . (>= deafening)) + . flagToMaybe $ haddockVerbosity flags, + argOutput = + Flag $ case [ Html | Flag True <- [haddockHtml flags] ] ++ + [ Hoogle | Flag True <- [haddockHoogle flags] ] + of [] -> [ Html ] + os -> os, + argOutputDir = maybe mempty Dir . flagToMaybe $ haddockDistPref flags, + + argGhcOptions = mempty { ghcOptExtra = ghcArgs } + } + where + ghcArgs = fromMaybe [] . lookup "ghc" . haddockProgramArgs $ flags + +fromPackageDescription :: HaddockTarget -> PackageDescription -> HaddockArgs +fromPackageDescription haddockTarget pkg_descr = + mempty { argInterfaceFile = Flag $ haddockName pkg_descr, + argPackageName = Flag $ packageId $ pkg_descr, + argOutputDir = Dir $ + "doc" "html" haddockDirName haddockTarget pkg_descr, + argPrologue = Flag $ if null desc then synopsis pkg_descr + else desc, + argTitle = Flag $ showPkg ++ subtitle + } + where + desc = PD.description pkg_descr + showPkg = display (packageId pkg_descr) + subtitle | null (synopsis pkg_descr) = "" + | otherwise = ": " ++ synopsis pkg_descr + +componentGhcOptions :: Verbosity -> LocalBuildInfo + -> BuildInfo -> ComponentLocalBuildInfo -> FilePath + -> GhcOptions +componentGhcOptions verbosity lbi bi clbi odir = + let f = case compilerFlavor (compiler lbi) of + GHC -> GHC.componentGhcOptions + GHCJS -> GHCJS.componentGhcOptions + _ -> error $ + "Distribution.Simple.Haddock.componentGhcOptions:" ++ + "haddock only supports GHC and GHCJS" + in f verbosity lbi bi clbi odir + +mkHaddockArgs :: Verbosity + -> FilePath + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Maybe PathTemplate -- ^ template for HTML location + -> Version + -> [FilePath] + -> BuildInfo + -> IO HaddockArgs +mkHaddockArgs verbosity tmp lbi clbi htmlTemplate haddockVersion inFiles bi = do + ifaceArgs <- getInterfaces verbosity lbi clbi htmlTemplate + let vanillaOpts = (componentGhcOptions normal lbi bi clbi (buildDir lbi)) { + -- Noooooooooo!!!!!111 + -- haddock stomps on our precious .hi + -- and .o files. Workaround by telling + -- haddock to write them elsewhere. + ghcOptObjDir = toFlag tmp, + ghcOptHiDir = toFlag tmp, + ghcOptStubDir = toFlag tmp + } `mappend` getGhcCppOpts haddockVersion bi + sharedOpts = vanillaOpts { + ghcOptDynLinkMode = toFlag GhcDynamicOnly, + ghcOptFPic = toFlag True, + ghcOptHiSuffix = toFlag "dyn_hi", + ghcOptObjSuffix = toFlag "dyn_o", + ghcOptExtra = hcSharedOptions GHC bi + + } + opts <- if withVanillaLib lbi + then return vanillaOpts + else if withSharedLib lbi + then return sharedOpts + else die' verbosity $ "Must have vanilla or shared libraries " + ++ "enabled in order to run haddock" + + return ifaceArgs { + argGhcOptions = opts, + argTargets = inFiles + } + +fromLibrary :: Verbosity + -> FilePath + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Maybe PathTemplate -- ^ template for HTML location + -> Version + -> Library + -> IO HaddockArgs +fromLibrary verbosity tmp lbi clbi htmlTemplate haddockVersion lib = do + inFiles <- map snd `fmap` getLibSourceFiles verbosity lbi lib clbi + args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate haddockVersion + inFiles (libBuildInfo lib) + return args { + argHideModules = (mempty, otherModules (libBuildInfo lib)) + } + +fromExecutable :: Verbosity + -> FilePath + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Maybe PathTemplate -- ^ template for HTML location + -> Version + -> Executable + -> IO HaddockArgs +fromExecutable verbosity tmp lbi clbi htmlTemplate haddockVersion exe = do + inFiles <- map snd `fmap` getExeSourceFiles verbosity lbi exe clbi + args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate + haddockVersion inFiles (buildInfo exe) + return args { + argOutputDir = Dir $ unUnqualComponentName $ exeName exe, + argTitle = Flag $ unUnqualComponentName $ exeName exe + } + +fromForeignLib :: Verbosity + -> FilePath + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Maybe PathTemplate -- ^ template for HTML location + -> Version + -> ForeignLib + -> IO HaddockArgs +fromForeignLib verbosity tmp lbi clbi htmlTemplate haddockVersion flib = do + inFiles <- map snd `fmap` getFLibSourceFiles verbosity lbi flib clbi + args <- mkHaddockArgs verbosity tmp lbi clbi htmlTemplate + haddockVersion inFiles (foreignLibBuildInfo flib) + return args { + argOutputDir = Dir $ unUnqualComponentName $ foreignLibName flib, + argTitle = Flag $ unUnqualComponentName $ foreignLibName flib + } + +compToExe :: Component -> Maybe Executable +compToExe comp = + case comp of + CTest test@TestSuite { testInterface = TestSuiteExeV10 _ f } -> + Just Executable { + exeName = testName test, + modulePath = f, + exeScope = ExecutablePublic, + buildInfo = testBuildInfo test + } + CBench bench@Benchmark { benchmarkInterface = BenchmarkExeV10 _ f } -> + Just Executable { + exeName = benchmarkName bench, + modulePath = f, + exeScope = ExecutablePublic, + buildInfo = benchmarkBuildInfo bench + } + CExe exe -> Just exe + _ -> Nothing + +getInterfaces :: Verbosity + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Maybe PathTemplate -- ^ template for HTML location + -> IO HaddockArgs +getInterfaces verbosity lbi clbi htmlTemplate = do + (packageFlags, warnings) <- haddockPackageFlags verbosity lbi clbi htmlTemplate + traverse_ (warn (verboseUnmarkOutput verbosity)) warnings + return $ mempty { + argInterfaces = packageFlags + } + +getGhcCppOpts :: Version + -> BuildInfo + -> GhcOptions +getGhcCppOpts haddockVersion bi = + mempty { + ghcOptExtensions = toNubListR [EnableExtension CPP | needsCpp], + ghcOptCppOptions = defines + } + where + needsCpp = EnableExtension CPP `elem` usedExtensions bi + defines = [haddockVersionMacro] + haddockVersionMacro = "-D__HADDOCK_VERSION__=" + ++ show (v1 * 1000 + v2 * 10 + v3) + where + [v1, v2, v3] = take 3 $ versionNumbers haddockVersion ++ [0,0] + +getGhcLibDir :: Verbosity -> LocalBuildInfo + -> IO HaddockArgs +getGhcLibDir verbosity lbi = do + l <- case compilerFlavor (compiler lbi) of + GHC -> GHC.getLibDir verbosity lbi + GHCJS -> GHCJS.getLibDir verbosity lbi + _ -> error "haddock only supports GHC and GHCJS" + return $ mempty { argGhcLibDir = Flag l } + +-- ------------------------------------------------------------------------------ +-- | Call haddock with the specified arguments. +runHaddock :: Verbosity + -> TempFileOptions + -> Compiler + -> Platform + -> ConfiguredProgram + -> HaddockArgs + -> IO () +runHaddock verbosity tmpFileOpts comp platform haddockProg args + | null (argTargets args) = warn verbosity $ + "Haddocks are being requested, but there aren't any modules given " + ++ "to create documentation for." + | otherwise = do + let haddockVersion = fromMaybe (error "unable to determine haddock version") + (programVersion haddockProg) + renderArgs verbosity tmpFileOpts haddockVersion comp platform args $ + \(flags,result)-> do + + runProgram verbosity haddockProg flags + + notice verbosity $ "Documentation created: " ++ result + + +renderArgs :: Verbosity + -> TempFileOptions + -> Version + -> Compiler + -> Platform + -> HaddockArgs + -> (([String], FilePath) -> IO a) + -> IO a +renderArgs verbosity tmpFileOpts version comp platform args k = do + let haddockSupportsUTF8 = version >= mkVersion [2,14,4] + haddockSupportsResponseFiles = version > mkVersion [2,16,2] + createDirectoryIfMissingVerbose verbosity True outputDir + withTempFileEx tmpFileOpts outputDir "haddock-prologue.txt" $ + \prologueFileName h -> do + do + when haddockSupportsUTF8 (hSetEncoding h utf8) + hPutStrLn h $ fromFlag $ argPrologue args + hClose h + let pflag = "--prologue=" ++ prologueFileName + renderedArgs = pflag : renderPureArgs version comp platform args + if haddockSupportsResponseFiles + then + withResponseFile + verbosity + tmpFileOpts + outputDir + "haddock-response.txt" + (if haddockSupportsUTF8 then Just utf8 else Nothing) + renderedArgs + (\responseFileName -> k (["@" ++ responseFileName], result)) + else + k (renderedArgs, result) + where + outputDir = (unDir $ argOutputDir args) + result = intercalate ", " + . map (\o -> outputDir + case o of + Html -> "index.html" + Hoogle -> pkgstr <.> "txt") + $ arg argOutput + where + pkgstr = display $ packageName pkgid + pkgid = arg argPackageName + arg f = fromFlag $ f args + +renderPureArgs :: Version -> Compiler -> Platform -> HaddockArgs -> [String] +renderPureArgs version comp platform args = concat + [ (:[]) . (\f -> "--dump-interface="++ unDir (argOutputDir args) f) + . fromFlag . argInterfaceFile $ args + + , if isVersion 2 16 + then (\pkg -> [ "--package-name=" ++ display (pkgName pkg) + , "--package-version="++display (pkgVersion pkg) + ]) + . fromFlag . argPackageName $ args + else [] + + , [ "--since-qual=external" | isVersion 2 20 ] + + , [ "--quickjump" | isVersion 2 19 + , fromFlag . argQuickJump $ args ] + + , [ "--hyperlinked-source" | isVersion 2 17 + , fromFlag . argLinkedSource $ args ] + + , (\(All b,xs) -> bool (map (("--hide=" ++). display) xs) [] b) + . argHideModules $ args + + , bool ["--ignore-all-exports"] [] . getAny . argIgnoreExports $ args + + , maybe [] (\(m,e,l) -> + ["--source-module=" ++ m + ,"--source-entity=" ++ e] + ++ if isVersion 2 14 then ["--source-entity-line=" ++ l] + else [] + ) . flagToMaybe . argLinkSource $ args + + , maybe [] ((:[]) . ("--css="++)) . flagToMaybe . argCssFile $ args + + , maybe [] ((:[]) . ("--use-contents="++)) . flagToMaybe . argContents $ args + + , bool [] [verbosityFlag] . getAny . argVerbose $ args + + , map (\o -> case o of Hoogle -> "--hoogle"; Html -> "--html") + . fromFlag . argOutput $ args + + , renderInterfaces . argInterfaces $ args + + , (:[]) . ("--odir="++) . unDir . argOutputDir $ args + + , (:[]) . ("--title="++) + . (bool (++" (internal documentation)") + id (getAny $ argIgnoreExports args)) + . fromFlag . argTitle $ args + + , [ "--optghc=" ++ opt | let opts = argGhcOptions args + , opt <- renderGhcOptions comp platform opts ] + + , maybe [] (\l -> ["-B"++l]) $ + flagToMaybe (argGhcLibDir args) -- error if Nothing? + + , argTargets $ args + ] + where + renderInterfaces = map renderInterface + + renderInterface :: (FilePath, Maybe FilePath, Maybe FilePath) -> String + renderInterface (i, html, hypsrc) = "--read-interface=" ++ + (intercalate "," $ concat [ [ x | Just x <- [html] ] + , [ x | Just _ <- [html] + -- only render hypsrc path if html path + -- is given and hyperlinked-source is + -- enabled + , Just x <- [hypsrc] + , isVersion 2 17 + , fromFlag . argLinkedSource $ args + ] + , [ i ] + ]) + + bool a b c = if c then a else b + isVersion major minor = version >= mkVersion [major,minor] + verbosityFlag + | isVersion 2 5 = "--verbosity=1" + | otherwise = "--verbose" + +--------------------------------------------------------------------------------- + +-- | Given a list of 'InstalledPackageInfo's, return a list of interfaces and +-- HTML paths, and an optional warning for packages with missing documentation. +haddockPackagePaths :: [InstalledPackageInfo] + -> Maybe (InstalledPackageInfo -> FilePath) + -> NoCallStackIO ([( FilePath -- path to interface + -- file + + , Maybe FilePath -- url to html + -- documentation + + , Maybe FilePath -- url to hyperlinked + -- source + )] + , Maybe String -- warning about + -- missing documentation + ) +haddockPackagePaths ipkgs mkHtmlPath = do + interfaces <- sequenceA + [ case interfaceAndHtmlPath ipkg of + Nothing -> return (Left (packageId ipkg)) + Just (interface, html) -> do + + (html', hypsrc') <- + case html of + Just htmlPath -> do + let hypSrcPath = htmlPath defaultHyperlinkedSourceDirectory + hypSrcExists <- doesDirectoryExist hypSrcPath + return $ ( Just (fixFileUrl htmlPath) + , if hypSrcExists + then Just (fixFileUrl hypSrcPath) + else Nothing + ) + Nothing -> return (Nothing, Nothing) + + exists <- doesFileExist interface + if exists + then return (Right (interface, html', hypsrc')) + else return (Left pkgid) + | ipkg <- ipkgs, let pkgid = packageId ipkg + , pkgName pkgid `notElem` noHaddockWhitelist + ] + + let missing = [ pkgid | Left pkgid <- interfaces ] + warning = "The documentation for the following packages are not " + ++ "installed. No links will be generated to these packages: " + ++ intercalate ", " (map display missing) + flags = rights interfaces + + return (flags, if null missing then Nothing else Just warning) + + where + -- Don't warn about missing documentation for these packages. See #1231. + noHaddockWhitelist = map mkPackageName [ "rts" ] + + -- Actually extract interface and HTML paths from an 'InstalledPackageInfo'. + interfaceAndHtmlPath :: InstalledPackageInfo + -> Maybe (FilePath, Maybe FilePath) + interfaceAndHtmlPath pkg = do + interface <- listToMaybe (InstalledPackageInfo.haddockInterfaces pkg) + html <- case mkHtmlPath of + Nothing -> listToMaybe (InstalledPackageInfo.haddockHTMLs pkg) + Just mkPath -> Just (mkPath pkg) + return (interface, if null html then Nothing else Just html) + + -- The 'haddock-html' field in the hc-pkg output is often set as a + -- native path, but we need it as a URL. See #1064. Also don't "fix" + -- the path if it is an interpolated one. + fixFileUrl f | Nothing <- mkHtmlPath + , isAbsolute f = "file://" ++ f + | otherwise = f + + -- 'src' is the default hyperlinked source directory ever since. It is + -- not possible to configure that directory in any way in haddock. + defaultHyperlinkedSourceDirectory = "src" + + +haddockPackageFlags :: Verbosity + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Maybe PathTemplate + -> IO ([( FilePath -- path to interface + -- file + + , Maybe FilePath -- url to html + -- documentation + + , Maybe FilePath -- url to hyperlinked + -- source + )] + , Maybe String -- warning about + -- missing documentation + ) +haddockPackageFlags verbosity lbi clbi htmlTemplate = do + let allPkgs = installedPkgs lbi + directDeps = map fst (componentPackageDeps clbi) + transitiveDeps <- case PackageIndex.dependencyClosure allPkgs directDeps of + Left x -> return x + Right inf -> die' verbosity $ "internal error when calculating transitive " + ++ "package dependencies.\nDebug info: " ++ show inf + haddockPackagePaths (PackageIndex.allPackages transitiveDeps) mkHtmlPath + where + mkHtmlPath = fmap expandTemplateVars htmlTemplate + expandTemplateVars tmpl pkg = + fromPathTemplate . substPathTemplate (env pkg) $ tmpl + env pkg = haddockTemplateEnv lbi (packageId pkg) + + +haddockTemplateEnv :: LocalBuildInfo -> PackageIdentifier -> PathTemplateEnv +haddockTemplateEnv lbi pkg_id = + (PrefixVar, prefix (installDirTemplates lbi)) + -- We want the legacy unit ID here, because it gives us nice paths + -- (Haddock people don't care about the dependencies) + : initialPathTemplateEnv + pkg_id + (mkLegacyUnitId pkg_id) + (compilerInfo (compiler lbi)) + (hostPlatform lbi) + +-- ------------------------------------------------------------------------------ +-- hscolour support. + +hscolour :: PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> HscolourFlags + -> IO () +hscolour = hscolour' dieNoVerbosity ForDevelopment + +hscolour' :: (String -> IO ()) -- ^ Called when the 'hscolour' exe is not found. + -> HaddockTarget + -> PackageDescription + -> LocalBuildInfo + -> [PPSuffixHandler] + -> HscolourFlags + -> IO () +hscolour' onNoHsColour haddockTarget pkg_descr lbi suffixes flags = + either onNoHsColour (\(hscolourProg, _, _) -> go hscolourProg) =<< + lookupProgramVersion verbosity hscolourProgram + (orLaterVersion (mkVersion [1,8])) (withPrograms lbi) + where + go :: ConfiguredProgram -> IO () + go hscolourProg = do + warn verbosity $ + "the 'cabal hscolour' command is deprecated in favour of 'cabal " ++ + "haddock --hyperlink-source' and will be removed in the next major " ++ + "release." + + setupMessage verbosity "Running hscolour for" (packageId pkg_descr) + createDirectoryIfMissingVerbose verbosity True $ + hscolourPref haddockTarget distPref pkg_descr + + withAllComponentsInBuildOrder pkg_descr lbi $ \comp clbi -> do + componentInitialBuildSteps distPref pkg_descr lbi clbi verbosity + preprocessComponent pkg_descr comp lbi clbi False verbosity suffixes + let + doExe com = case (compToExe com) of + Just exe -> do + let outputDir = hscolourPref haddockTarget distPref pkg_descr + unUnqualComponentName (exeName exe) "src" + runHsColour hscolourProg outputDir =<< getExeSourceFiles verbosity lbi exe clbi + Nothing -> do + warn (fromFlag $ hscolourVerbosity flags) + "Unsupported component, skipping..." + return () + case comp of + CLib lib -> do + let outputDir = hscolourPref haddockTarget distPref pkg_descr "src" + runHsColour hscolourProg outputDir =<< getLibSourceFiles verbosity lbi lib clbi + CFLib flib -> do + let outputDir = hscolourPref haddockTarget distPref pkg_descr + unUnqualComponentName (foreignLibName flib) "src" + runHsColour hscolourProg outputDir =<< getFLibSourceFiles verbosity lbi flib clbi + CExe _ -> when (fromFlag (hscolourExecutables flags)) $ doExe comp + CTest _ -> when (fromFlag (hscolourTestSuites flags)) $ doExe comp + CBench _ -> when (fromFlag (hscolourBenchmarks flags)) $ doExe comp + + stylesheet = flagToMaybe (hscolourCSS flags) + + verbosity = fromFlag (hscolourVerbosity flags) + distPref = fromFlag (hscolourDistPref flags) + + runHsColour prog outputDir moduleFiles = do + createDirectoryIfMissingVerbose verbosity True outputDir + + case stylesheet of -- copy the CSS file + Nothing | programVersion prog >= Just (mkVersion [1,9]) -> + runProgram verbosity prog + ["-print-css", "-o" ++ outputDir "hscolour.css"] + | otherwise -> return () + Just s -> copyFileVerbose verbosity s (outputDir "hscolour.css") + + for_ moduleFiles $ \(m, inFile) -> + runProgram verbosity prog + ["-css", "-anchor", "-o" ++ outFile m, inFile] + where + outFile m = outputDir + intercalate "-" (ModuleName.components m) <.> "html" + +haddockToHscolour :: HaddockFlags -> HscolourFlags +haddockToHscolour flags = + HscolourFlags { + hscolourCSS = haddockHscolourCss flags, + hscolourExecutables = haddockExecutables flags, + hscolourTestSuites = haddockTestSuites flags, + hscolourBenchmarks = haddockBenchmarks flags, + hscolourForeignLibs = haddockForeignLibs flags, + hscolourVerbosity = haddockVerbosity flags, + hscolourDistPref = haddockDistPref flags, + hscolourCabalFilePath = haddockCabalFilePath flags + } + +-- ------------------------------------------------------------------------------ +-- Boilerplate Monoid instance. +instance Monoid HaddockArgs where + mempty = gmempty + mappend = (<>) + +instance Semigroup HaddockArgs where + (<>) = gmappend + +instance Monoid Directory where + mempty = Dir "." + mappend = (<>) + +instance Semigroup Directory where + Dir m <> Dir n = Dir $ m n diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/HaskellSuite.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/HaskellSuite.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/HaskellSuite.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/HaskellSuite.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,228 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +module Distribution.Simple.HaskellSuite where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Data.Map as Map (empty) + +import Distribution.Simple.Program +import Distribution.Simple.Compiler as Compiler +import Distribution.Simple.Utils +import Distribution.Simple.BuildPaths +import Distribution.Verbosity +import Distribution.Version +import Distribution.Text +import Distribution.Package +import Distribution.InstalledPackageInfo hiding (includeDirs) +import Distribution.Simple.PackageIndex as PackageIndex +import Distribution.PackageDescription +import Distribution.Simple.LocalBuildInfo +import Distribution.System (Platform) +import Distribution.Compat.Exception +import Language.Haskell.Extension +import Distribution.Simple.Program.Builtin + +configure + :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramDb -> IO (Compiler, Maybe Platform, ProgramDb) +configure verbosity mbHcPath hcPkgPath progdb0 = do + + -- We have no idea how a haskell-suite tool is named, so we require at + -- least some information from the user. + hcPath <- + let msg = "You have to provide name or path of a haskell-suite tool (-w PATH)" + in maybe (die' verbosity msg) return mbHcPath + + when (isJust hcPkgPath) $ + warn verbosity "--with-hc-pkg option is ignored for haskell-suite" + + (comp, confdCompiler, progdb1) <- configureCompiler hcPath progdb0 + + -- Update our pkg tool. It uses the same executable as the compiler, but + -- all command start with "pkg" + (confdPkg, _) <- requireProgram verbosity haskellSuitePkgProgram progdb1 + let progdb2 = + updateProgram + confdPkg + { programLocation = programLocation confdCompiler + , programDefaultArgs = ["pkg"] + } + progdb1 + + return (comp, Nothing, progdb2) + + where + configureCompiler hcPath progdb0' = do + let + haskellSuiteProgram' = + haskellSuiteProgram + { programFindLocation = \v p -> findProgramOnSearchPath v p hcPath } + + -- NB: cannot call requireProgram right away — it'd think that + -- the program is already configured and won't reconfigure it again. + -- Instead, call configureProgram directly first. + progdb1 <- configureProgram verbosity haskellSuiteProgram' progdb0' + (confdCompiler, progdb2) <- requireProgram verbosity haskellSuiteProgram' progdb1 + + extensions <- getExtensions verbosity confdCompiler + languages <- getLanguages verbosity confdCompiler + (compName, compVersion) <- + getCompilerVersion verbosity confdCompiler + + let + comp = Compiler { + compilerId = CompilerId (HaskellSuite compName) compVersion, + compilerAbiTag = Compiler.NoAbiTag, + compilerCompat = [], + compilerLanguages = languages, + compilerExtensions = extensions, + compilerProperties = Map.empty + } + + return (comp, confdCompiler, progdb2) + +hstoolVersion :: Verbosity -> FilePath -> IO (Maybe Version) +hstoolVersion = findProgramVersion "--hspkg-version" id + +numericVersion :: Verbosity -> FilePath -> IO (Maybe Version) +numericVersion = findProgramVersion "--compiler-version" (last . words) + +getCompilerVersion :: Verbosity -> ConfiguredProgram -> IO (String, Version) +getCompilerVersion verbosity prog = do + output <- rawSystemStdout verbosity (programPath prog) ["--compiler-version"] + let + parts = words output + name = concat $ init parts -- there shouldn't be any spaces in the name anyway + versionStr = last parts + version <- + maybe (die' verbosity "haskell-suite: couldn't determine compiler version") return $ + simpleParse versionStr + return (name, version) + +getExtensions :: Verbosity -> ConfiguredProgram -> IO [(Extension, Maybe Compiler.Flag)] +getExtensions verbosity prog = do + extStrs <- + lines `fmap` + rawSystemStdout verbosity (programPath prog) ["--supported-extensions"] + return + [ (ext, Just $ "-X" ++ display ext) | Just ext <- map simpleParse extStrs ] + +getLanguages :: Verbosity -> ConfiguredProgram -> IO [(Language, Compiler.Flag)] +getLanguages verbosity prog = do + langStrs <- + lines `fmap` + rawSystemStdout verbosity (programPath prog) ["--supported-languages"] + return + [ (ext, "-G" ++ display ext) | Just ext <- map simpleParse langStrs ] + +-- Other compilers do some kind of a packagedb stack check here. Not sure +-- if we need something like that as well. +getInstalledPackages :: Verbosity -> PackageDBStack -> ProgramDb + -> IO InstalledPackageIndex +getInstalledPackages verbosity packagedbs progdb = + liftM (PackageIndex.fromList . concat) $ for packagedbs $ \packagedb -> + do str <- + getDbProgramOutput verbosity haskellSuitePkgProgram progdb + ["dump", packageDbOpt packagedb] + `catchExit` \_ -> die' verbosity $ "pkg dump failed" + case parsePackages str of + Right ok -> return ok + _ -> die' verbosity "failed to parse output of 'pkg dump'" + + where + parsePackages str = + let parsed = map parseInstalledPackageInfo (splitPkgs str) + in case [ msg | ParseFailed msg <- parsed ] of + [] -> Right [ pkg | ParseOk _ pkg <- parsed ] + msgs -> Left msgs + + splitPkgs :: String -> [String] + splitPkgs = map unlines . splitWith ("---" ==) . lines + where + splitWith :: (a -> Bool) -> [a] -> [[a]] + splitWith p xs = ys : case zs of + [] -> [] + _:ws -> splitWith p ws + where (ys,zs) = break p xs + +buildLib + :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib verbosity pkg_descr lbi lib clbi = do + -- In future, there should be a mechanism for the compiler to request any + -- number of the above parameters (or their parts) — in particular, + -- pieces of PackageDescription. + -- + -- For now, we only pass those that we know are used. + + let odir = buildDir lbi + bi = libBuildInfo lib + srcDirs = hsSourceDirs bi ++ [odir] + dbStack = withPackageDB lbi + language = fromMaybe Haskell98 (defaultLanguage bi) + progdb = withPrograms lbi + pkgid = packageId pkg_descr + + runDbProgram verbosity haskellSuiteProgram progdb $ + [ "compile", "--build-dir", odir ] ++ + concat [ ["-i", d] | d <- srcDirs ] ++ + concat [ ["-I", d] | d <- [autogenComponentModulesDir lbi clbi + ,autogenPackageModulesDir lbi + ,odir] ++ includeDirs bi ] ++ + [ packageDbOpt pkgDb | pkgDb <- dbStack ] ++ + [ "--package-name", display pkgid ] ++ + concat [ ["--package-id", display ipkgid ] + | (ipkgid, _) <- componentPackageDeps clbi ] ++ + ["-G", display language] ++ + concat [ ["-X", display ex] | ex <- usedExtensions bi ] ++ + cppOptions (libBuildInfo lib) ++ + [ display modu | modu <- allLibModules lib clbi ] + + + +installLib + :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^install location + -> FilePath -- ^install location for dynamic libraries + -> FilePath -- ^Build location + -> PackageDescription + -> Library + -> ComponentLocalBuildInfo + -> IO () +installLib verbosity lbi targetDir dynlibTargetDir builtDir pkg lib clbi = do + let progdb = withPrograms lbi + runDbProgram verbosity haskellSuitePkgProgram progdb $ + [ "install-library" + , "--build-dir", builtDir + , "--target-dir", targetDir + , "--dynlib-target-dir", dynlibTargetDir + , "--package-id", display $ packageId pkg + ] ++ map display (allLibModules lib clbi) + +registerPackage + :: Verbosity + -> ProgramDb + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerPackage verbosity progdb packageDbs installedPkgInfo = do + (hspkg, _) <- requireProgram verbosity haskellSuitePkgProgram progdb + + runProgramInvocation verbosity $ + (programInvocation hspkg + ["update", packageDbOpt $ last packageDbs]) + { progInvokeInput = Just $ showInstalledPackageInfo installedPkgInfo } + +initPackageDB :: Verbosity -> ProgramDb -> FilePath -> IO () +initPackageDB verbosity progdb dbPath = + runDbProgram verbosity haskellSuitePkgProgram progdb + ["init", dbPath] + +packageDbOpt :: PackageDB -> String +packageDbOpt GlobalPackageDB = "--global" +packageDbOpt UserPackageDB = "--user" +packageDbOpt (SpecificPackageDB db) = "--package-db=" ++ db diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Hpc.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Hpc.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Hpc.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Hpc.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,149 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Hpc +-- Copyright : Thomas Tuegel 2011 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides functions for locating various HPC-related paths and +-- a function for adding the necessary options to a PackageDescription to +-- build test suites with HPC enabled. + +module Distribution.Simple.Hpc + ( Way(..), guessWay + , htmlDir + , mixDir + , tixDir + , tixFilePath + , markupPackage + , markupTest + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.UnqualComponentName +import Distribution.ModuleName ( main ) +import Distribution.PackageDescription + ( TestSuite(..) + , testModules + ) +import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) ) +import Distribution.Simple.Program + ( hpcProgram + , requireProgramVersion + ) +import Distribution.Simple.Program.Hpc ( markup, union ) +import Distribution.Simple.Utils ( notice ) +import Distribution.Version ( anyVersion ) +import Distribution.Verbosity ( Verbosity() ) +import System.Directory ( createDirectoryIfMissing, doesFileExist ) +import System.FilePath + +-- ------------------------------------------------------------------------- +-- Haskell Program Coverage + +data Way = Vanilla | Prof | Dyn + deriving (Bounded, Enum, Eq, Read, Show) + +hpcDir :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Directory containing component's HPC .mix files +hpcDir distPref way = distPref "hpc" wayDir + where + wayDir = case way of + Vanilla -> "vanilla" + Prof -> "prof" + Dyn -> "dyn" + +mixDir :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Component name + -> FilePath -- ^ Directory containing test suite's .mix files +mixDir distPref way name = hpcDir distPref way "mix" name + +tixDir :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Component name + -> FilePath -- ^ Directory containing test suite's .tix files +tixDir distPref way name = hpcDir distPref way "tix" name + +-- | Path to the .tix file containing a test suite's sum statistics. +tixFilePath :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Component name + -> FilePath -- ^ Path to test suite's .tix file +tixFilePath distPref way name = tixDir distPref way name name <.> "tix" + +htmlDir :: FilePath -- ^ \"dist/\" prefix + -> Way + -> FilePath -- ^ Component name + -> FilePath -- ^ Path to test suite's HTML markup directory +htmlDir distPref way name = hpcDir distPref way "html" name + +-- | Attempt to guess the way the test suites in this package were compiled +-- and linked with the library so the correct module interfaces are found. +guessWay :: LocalBuildInfo -> Way +guessWay lbi + | withProfExe lbi = Prof + | withDynExe lbi = Dyn + | otherwise = Vanilla + +-- | Generate the HTML markup for a test suite. +markupTest :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^ \"dist/\" prefix + -> String -- ^ Library name + -> TestSuite + -> IO () +markupTest verbosity lbi distPref libName suite = do + tixFileExists <- doesFileExist $ tixFilePath distPref way $ testName' + when tixFileExists $ do + -- behaviour of 'markup' depends on version, so we need *a* version + -- but no particular one + (hpc, hpcVer, _) <- requireProgramVersion verbosity + hpcProgram anyVersion (withPrograms lbi) + let htmlDir_ = htmlDir distPref way testName' + markup hpc hpcVer verbosity + (tixFilePath distPref way testName') mixDirs + htmlDir_ + (testModules suite ++ [ main ]) + notice verbosity $ "Test coverage report written to " + ++ htmlDir_ "hpc_index" <.> "html" + where + way = guessWay lbi + testName' = unUnqualComponentName $ testName suite + mixDirs = map (mixDir distPref way) [ testName', libName ] + +-- | Generate the HTML markup for all of a package's test suites. +markupPackage :: Verbosity + -> LocalBuildInfo + -> FilePath -- ^ \"dist/\" prefix + -> String -- ^ Library name + -> [TestSuite] + -> IO () +markupPackage verbosity lbi distPref libName suites = do + let tixFiles = map (tixFilePath distPref way) testNames + tixFilesExist <- traverse doesFileExist tixFiles + when (and tixFilesExist) $ do + -- behaviour of 'markup' depends on version, so we need *a* version + -- but no particular one + (hpc, hpcVer, _) <- requireProgramVersion verbosity + hpcProgram anyVersion (withPrograms lbi) + let outFile = tixFilePath distPref way libName + htmlDir' = htmlDir distPref way libName + excluded = concatMap testModules suites ++ [ main ] + createDirectoryIfMissing True $ takeDirectory outFile + union hpc verbosity tixFiles outFile excluded + markup hpc hpcVer verbosity outFile mixDirs htmlDir' excluded + notice verbosity $ "Package coverage report written to " + ++ htmlDir' "hpc_index.html" + where + way = guessWay lbi + testNames = fmap (unUnqualComponentName . testName) suites + mixDirs = map (mixDir distPref way) $ libName : testNames diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/InstallDirs.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/InstallDirs.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/InstallDirs.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/InstallDirs.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,614 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.InstallDirs +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This manages everything to do with where files get installed (though does +-- not get involved with actually doing any installation). It provides an +-- 'InstallDirs' type which is a set of directories for where to install +-- things. It also handles the fact that we use templates in these install +-- dirs. For example most install dirs are relative to some @$prefix@ and by +-- changing the prefix all other dirs still end up changed appropriately. So it +-- provides a 'PathTemplate' type and functions for substituting for these +-- templates. + +module Distribution.Simple.InstallDirs ( + InstallDirs(..), + InstallDirTemplates, + defaultInstallDirs, + defaultInstallDirs', + combineInstallDirs, + absoluteInstallDirs, + CopyDest(..), + prefixRelativeInstallDirs, + substituteInstallDirTemplates, + + PathTemplate, + PathTemplateVariable(..), + PathTemplateEnv, + toPathTemplate, + fromPathTemplate, + combinePathTemplate, + substPathTemplate, + initialPathTemplateEnv, + platformTemplateEnv, + compilerTemplateEnv, + packageTemplateEnv, + abiTemplateEnv, + installDirsTemplateEnv, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Compat.Environment (lookupEnv) +import Distribution.Package +import Distribution.System +import Distribution.Compiler +import Distribution.Text + +import System.Directory (getAppUserDataDirectory) +import System.FilePath + ( (), isPathSeparator + , pathSeparator, dropDrive + , takeDirectory ) + +#ifdef mingw32_HOST_OS +import qualified Prelude +import Foreign +import Foreign.C +#endif + +-- --------------------------------------------------------------------------- +-- Installation directories + + +-- | The directories where we will install files for packages. +-- +-- We have several different directories for different types of files since +-- many systems have conventions whereby different types of files in a package +-- are installed in different directories. This is particularly the case on +-- Unix style systems. +-- +data InstallDirs dir = InstallDirs { + prefix :: dir, + bindir :: dir, + libdir :: dir, + libsubdir :: dir, + dynlibdir :: dir, + flibdir :: dir, -- ^ foreign libraries + libexecdir :: dir, + libexecsubdir:: dir, + includedir :: dir, + datadir :: dir, + datasubdir :: dir, + docdir :: dir, + mandir :: dir, + htmldir :: dir, + haddockdir :: dir, + sysconfdir :: dir + } deriving (Eq, Read, Show, Functor, Generic) + +instance Binary dir => Binary (InstallDirs dir) + +instance (Semigroup dir, Monoid dir) => Monoid (InstallDirs dir) where + mempty = gmempty + mappend = (<>) + +instance Semigroup dir => Semigroup (InstallDirs dir) where + (<>) = gmappend + +combineInstallDirs :: (a -> b -> c) + -> InstallDirs a + -> InstallDirs b + -> InstallDirs c +combineInstallDirs combine a b = InstallDirs { + prefix = prefix a `combine` prefix b, + bindir = bindir a `combine` bindir b, + libdir = libdir a `combine` libdir b, + libsubdir = libsubdir a `combine` libsubdir b, + dynlibdir = dynlibdir a `combine` dynlibdir b, + flibdir = flibdir a `combine` flibdir b, + libexecdir = libexecdir a `combine` libexecdir b, + libexecsubdir= libexecsubdir a `combine` libexecsubdir b, + includedir = includedir a `combine` includedir b, + datadir = datadir a `combine` datadir b, + datasubdir = datasubdir a `combine` datasubdir b, + docdir = docdir a `combine` docdir b, + mandir = mandir a `combine` mandir b, + htmldir = htmldir a `combine` htmldir b, + haddockdir = haddockdir a `combine` haddockdir b, + sysconfdir = sysconfdir a `combine` sysconfdir b + } + +appendSubdirs :: (a -> a -> a) -> InstallDirs a -> InstallDirs a +appendSubdirs append dirs = dirs { + libdir = libdir dirs `append` libsubdir dirs, + libexecdir = libexecdir dirs `append` libexecsubdir dirs, + datadir = datadir dirs `append` datasubdir dirs, + libsubdir = error "internal error InstallDirs.libsubdir", + libexecsubdir = error "internal error InstallDirs.libexecsubdir", + datasubdir = error "internal error InstallDirs.datasubdir" + } + +-- | The installation directories in terms of 'PathTemplate's that contain +-- variables. +-- +-- The defaults for most of the directories are relative to each other, in +-- particular they are all relative to a single prefix. This makes it +-- convenient for the user to override the default installation directory +-- by only having to specify --prefix=... rather than overriding each +-- individually. This is done by allowing $-style variables in the dirs. +-- These are expanded by textual substitution (see 'substPathTemplate'). +-- +-- A few of these installation directories are split into two components, the +-- dir and subdir. The full installation path is formed by combining the two +-- together with @\/@. The reason for this is compatibility with other Unix +-- build systems which also support @--libdir@ and @--datadir@. We would like +-- users to be able to configure @--libdir=\/usr\/lib64@ for example but +-- because by default we want to support installing multiple versions of +-- packages and building the same package for multiple compilers we append the +-- libsubdir to get: @\/usr\/lib64\/$libname\/$compiler@. +-- +-- An additional complication is the need to support relocatable packages on +-- systems which support such things, like Windows. +-- +type InstallDirTemplates = InstallDirs PathTemplate + +-- --------------------------------------------------------------------------- +-- Default installation directories + +defaultInstallDirs :: CompilerFlavor -> Bool -> Bool -> IO InstallDirTemplates +defaultInstallDirs = defaultInstallDirs' False + +defaultInstallDirs' :: Bool {- use external internal deps -} + -> CompilerFlavor -> Bool -> Bool -> IO InstallDirTemplates +defaultInstallDirs' True comp userInstall hasLibs = do + dflt <- defaultInstallDirs' False comp userInstall hasLibs + -- Be a bit more hermetic about per-component installs + return dflt { datasubdir = toPathTemplate $ "$abi" "$libname", + docdir = toPathTemplate $ "$datadir" "doc" "$abi" "$libname" + } +defaultInstallDirs' False comp userInstall _hasLibs = do + installPrefix <- + if userInstall + then do + mDir <- lookupEnv "CABAL_DIR" + case mDir of + Nothing -> getAppUserDataDirectory "cabal" + Just dir -> return dir + else case buildOS of + Windows -> do windowsProgramFilesDir <- getWindowsProgramFilesDir + return (windowsProgramFilesDir "Haskell") + _ -> return "/usr/local" + installLibDir <- + case buildOS of + Windows -> return "$prefix" + _ -> return ("$prefix" "lib") + return $ fmap toPathTemplate $ InstallDirs { + prefix = installPrefix, + bindir = "$prefix" "bin", + libdir = installLibDir, + libsubdir = case comp of + UHC -> "$pkgid" + _other -> "$abi" "$libname", + dynlibdir = "$libdir" case comp of + UHC -> "$pkgid" + _other -> "$abi", + libexecsubdir= "$abi" "$pkgid", + flibdir = "$libdir", + libexecdir = case buildOS of + Windows -> "$prefix" "$libname" + _other -> "$prefix" "libexec", + includedir = "$libdir" "$libsubdir" "include", + datadir = case buildOS of + Windows -> "$prefix" + _other -> "$prefix" "share", + datasubdir = "$abi" "$pkgid", + docdir = "$datadir" "doc" "$abi" "$pkgid", + mandir = "$datadir" "man", + htmldir = "$docdir" "html", + haddockdir = "$htmldir", + sysconfdir = "$prefix" "etc" + } + +-- --------------------------------------------------------------------------- +-- Converting directories, absolute or prefix-relative + +-- | Substitute the install dir templates into each other. +-- +-- To prevent cyclic substitutions, only some variables are allowed in +-- particular dir templates. If out of scope vars are present, they are not +-- substituted for. Checking for any remaining unsubstituted vars can be done +-- as a subsequent operation. +-- +-- The reason it is done this way is so that in 'prefixRelativeInstallDirs' we +-- can replace 'prefix' with the 'PrefixVar' and get resulting +-- 'PathTemplate's that still have the 'PrefixVar' in them. Doing this makes it +-- each to check which paths are relative to the $prefix. +-- +substituteInstallDirTemplates :: PathTemplateEnv + -> InstallDirTemplates -> InstallDirTemplates +substituteInstallDirTemplates env dirs = dirs' + where + dirs' = InstallDirs { + -- So this specifies exactly which vars are allowed in each template + prefix = subst prefix [], + bindir = subst bindir [prefixVar], + libdir = subst libdir [prefixVar, bindirVar], + libsubdir = subst libsubdir [], + dynlibdir = subst dynlibdir [prefixVar, bindirVar, libdirVar], + flibdir = subst flibdir [prefixVar, bindirVar, libdirVar], + libexecdir = subst libexecdir prefixBinLibVars, + libexecsubdir = subst libexecsubdir [], + includedir = subst includedir prefixBinLibVars, + datadir = subst datadir prefixBinLibVars, + datasubdir = subst datasubdir [], + docdir = subst docdir prefixBinLibDataVars, + mandir = subst mandir (prefixBinLibDataVars ++ [docdirVar]), + htmldir = subst htmldir (prefixBinLibDataVars ++ [docdirVar]), + haddockdir = subst haddockdir (prefixBinLibDataVars ++ + [docdirVar, htmldirVar]), + sysconfdir = subst sysconfdir prefixBinLibVars + } + subst dir env' = substPathTemplate (env'++env) (dir dirs) + + prefixVar = (PrefixVar, prefix dirs') + bindirVar = (BindirVar, bindir dirs') + libdirVar = (LibdirVar, libdir dirs') + libsubdirVar = (LibsubdirVar, libsubdir dirs') + datadirVar = (DatadirVar, datadir dirs') + datasubdirVar = (DatasubdirVar, datasubdir dirs') + docdirVar = (DocdirVar, docdir dirs') + htmldirVar = (HtmldirVar, htmldir dirs') + prefixBinLibVars = [prefixVar, bindirVar, libdirVar, libsubdirVar] + prefixBinLibDataVars = prefixBinLibVars ++ [datadirVar, datasubdirVar] + +-- | Convert from abstract install directories to actual absolute ones by +-- substituting for all the variables in the abstract paths, to get real +-- absolute path. +absoluteInstallDirs :: PackageIdentifier + -> UnitId + -> CompilerInfo + -> CopyDest + -> Platform + -> InstallDirs PathTemplate + -> InstallDirs FilePath +absoluteInstallDirs pkgId libname compilerId copydest platform dirs = + (case copydest of + CopyTo destdir -> fmap ((destdir ) . dropDrive) + CopyToDb dbdir -> fmap (substPrefix "${pkgroot}" (takeDirectory dbdir)) + _ -> id) + . appendSubdirs () + . fmap fromPathTemplate + $ substituteInstallDirTemplates env dirs + where + env = initialPathTemplateEnv pkgId libname compilerId platform + substPrefix pre root path + | pre `isPrefixOf` path = root ++ drop (length pre) path + | otherwise = path + + +-- |The location prefix for the /copy/ command. +data CopyDest + = NoCopyDest + | CopyTo FilePath + | CopyToDb FilePath + -- ^ when using the ${pkgroot} as prefix. The CopyToDb will + -- adjust the paths to be relative to the provided package + -- database when copying / installing. + deriving (Eq, Show, Generic) + +instance Binary CopyDest + +-- | Check which of the paths are relative to the installation $prefix. +-- +-- If any of the paths are not relative, ie they are absolute paths, then it +-- prevents us from making a relocatable package (also known as a \"prefix +-- independent\" package). +-- +prefixRelativeInstallDirs :: PackageIdentifier + -> UnitId + -> CompilerInfo + -> Platform + -> InstallDirTemplates + -> InstallDirs (Maybe FilePath) +prefixRelativeInstallDirs pkgId libname compilerId platform dirs = + fmap relative + . appendSubdirs combinePathTemplate + $ -- substitute the path template into each other, except that we map + -- \$prefix back to $prefix. We're trying to end up with templates that + -- mention no vars except $prefix. + substituteInstallDirTemplates env dirs { + prefix = PathTemplate [Variable PrefixVar] + } + where + env = initialPathTemplateEnv pkgId libname compilerId platform + + -- If it starts with $prefix then it's relative and produce the relative + -- path by stripping off $prefix/ or $prefix + relative dir = case dir of + PathTemplate cs -> fmap (fromPathTemplate . PathTemplate) (relative' cs) + relative' (Variable PrefixVar : Ordinary (s:rest) : rest') + | isPathSeparator s = Just (Ordinary rest : rest') + relative' (Variable PrefixVar : rest) = Just rest + relative' _ = Nothing + +-- --------------------------------------------------------------------------- +-- Path templates + +-- | An abstract path, possibly containing variables that need to be +-- substituted for to get a real 'FilePath'. +-- +newtype PathTemplate = PathTemplate [PathComponent] + deriving (Eq, Ord, Generic) + +instance Binary PathTemplate + +data PathComponent = + Ordinary FilePath + | Variable PathTemplateVariable + deriving (Eq, Ord, Generic) + +instance Binary PathComponent + +data PathTemplateVariable = + PrefixVar -- ^ The @$prefix@ path variable + | BindirVar -- ^ The @$bindir@ path variable + | LibdirVar -- ^ The @$libdir@ path variable + | LibsubdirVar -- ^ The @$libsubdir@ path variable + | DynlibdirVar -- ^ The @$dynlibdir@ path variable + | DatadirVar -- ^ The @$datadir@ path variable + | DatasubdirVar -- ^ The @$datasubdir@ path variable + | DocdirVar -- ^ The @$docdir@ path variable + | HtmldirVar -- ^ The @$htmldir@ path variable + | PkgNameVar -- ^ The @$pkg@ package name path variable + | PkgVerVar -- ^ The @$version@ package version path variable + | PkgIdVar -- ^ The @$pkgid@ package Id path variable, eg @foo-1.0@ + | LibNameVar -- ^ The @$libname@ path variable + | CompilerVar -- ^ The compiler name and version, eg @ghc-6.6.1@ + | OSVar -- ^ The operating system name, eg @windows@ or @linux@ + | ArchVar -- ^ The CPU architecture name, eg @i386@ or @x86_64@ + | AbiVar -- ^ The Compiler's ABI identifier, $arch-$os-$compiler-$abitag + | AbiTagVar -- ^ The optional ABI tag for the compiler + | ExecutableNameVar -- ^ The executable name; used in shell wrappers + | TestSuiteNameVar -- ^ The name of the test suite being run + | TestSuiteResultVar -- ^ The result of the test suite being run, eg + -- @pass@, @fail@, or @error@. + | BenchmarkNameVar -- ^ The name of the benchmark being run + deriving (Eq, Ord, Generic) + +instance Binary PathTemplateVariable + +type PathTemplateEnv = [(PathTemplateVariable, PathTemplate)] + +-- | Convert a 'FilePath' to a 'PathTemplate' including any template vars. +-- +toPathTemplate :: FilePath -> PathTemplate +toPathTemplate = PathTemplate . read -- TODO: eradicateNoParse + +-- | Convert back to a path, any remaining vars are included +-- +fromPathTemplate :: PathTemplate -> FilePath +fromPathTemplate (PathTemplate template) = show template + +combinePathTemplate :: PathTemplate -> PathTemplate -> PathTemplate +combinePathTemplate (PathTemplate t1) (PathTemplate t2) = + PathTemplate (t1 ++ [Ordinary [pathSeparator]] ++ t2) + +substPathTemplate :: PathTemplateEnv -> PathTemplate -> PathTemplate +substPathTemplate environment (PathTemplate template) = + PathTemplate (concatMap subst template) + + where subst component@(Ordinary _) = [component] + subst component@(Variable variable) = + case lookup variable environment of + Just (PathTemplate components) -> components + Nothing -> [component] + +-- | The initial environment has all the static stuff but no paths +initialPathTemplateEnv :: PackageIdentifier + -> UnitId + -> CompilerInfo + -> Platform + -> PathTemplateEnv +initialPathTemplateEnv pkgId libname compiler platform = + packageTemplateEnv pkgId libname + ++ compilerTemplateEnv compiler + ++ platformTemplateEnv platform + ++ abiTemplateEnv compiler platform + +packageTemplateEnv :: PackageIdentifier -> UnitId -> PathTemplateEnv +packageTemplateEnv pkgId uid = + [(PkgNameVar, PathTemplate [Ordinary $ display (packageName pkgId)]) + ,(PkgVerVar, PathTemplate [Ordinary $ display (packageVersion pkgId)]) + -- Invariant: uid is actually a HashedUnitId. Hard to enforce because + -- it's an API change. + ,(LibNameVar, PathTemplate [Ordinary $ display uid]) + ,(PkgIdVar, PathTemplate [Ordinary $ display pkgId]) + ] + +compilerTemplateEnv :: CompilerInfo -> PathTemplateEnv +compilerTemplateEnv compiler = + [(CompilerVar, PathTemplate [Ordinary $ display (compilerInfoId compiler)]) + ] + +platformTemplateEnv :: Platform -> PathTemplateEnv +platformTemplateEnv (Platform arch os) = + [(OSVar, PathTemplate [Ordinary $ display os]) + ,(ArchVar, PathTemplate [Ordinary $ display arch]) + ] + +abiTemplateEnv :: CompilerInfo -> Platform -> PathTemplateEnv +abiTemplateEnv compiler (Platform arch os) = + [(AbiVar, PathTemplate [Ordinary $ display arch ++ '-':display os ++ + '-':display (compilerInfoId compiler) ++ + case compilerInfoAbiTag compiler of + NoAbiTag -> "" + AbiTag tag -> '-':tag]) + ,(AbiTagVar, PathTemplate [Ordinary $ abiTagString (compilerInfoAbiTag compiler)]) + ] + +installDirsTemplateEnv :: InstallDirs PathTemplate -> PathTemplateEnv +installDirsTemplateEnv dirs = + [(PrefixVar, prefix dirs) + ,(BindirVar, bindir dirs) + ,(LibdirVar, libdir dirs) + ,(LibsubdirVar, libsubdir dirs) + ,(DynlibdirVar, dynlibdir dirs) + ,(DatadirVar, datadir dirs) + ,(DatasubdirVar, datasubdir dirs) + ,(DocdirVar, docdir dirs) + ,(HtmldirVar, htmldir dirs) + ] + + +-- --------------------------------------------------------------------------- +-- Parsing and showing path templates: + +-- The textual format is that of an ordinary Haskell String, eg +-- "$prefix/bin" +-- and this gets parsed to the internal representation as a sequence of path +-- spans which are either strings or variables, eg: +-- PathTemplate [Variable PrefixVar, Ordinary "/bin" ] + +instance Show PathTemplateVariable where + show PrefixVar = "prefix" + show LibNameVar = "libname" + show BindirVar = "bindir" + show LibdirVar = "libdir" + show LibsubdirVar = "libsubdir" + show DynlibdirVar = "dynlibdir" + show DatadirVar = "datadir" + show DatasubdirVar = "datasubdir" + show DocdirVar = "docdir" + show HtmldirVar = "htmldir" + show PkgNameVar = "pkg" + show PkgVerVar = "version" + show PkgIdVar = "pkgid" + show CompilerVar = "compiler" + show OSVar = "os" + show ArchVar = "arch" + show AbiTagVar = "abitag" + show AbiVar = "abi" + show ExecutableNameVar = "executablename" + show TestSuiteNameVar = "test-suite" + show TestSuiteResultVar = "result" + show BenchmarkNameVar = "benchmark" + +instance Read PathTemplateVariable where + readsPrec _ s = + take 1 + [ (var, drop (length varStr) s) + | (varStr, var) <- vars + , varStr `isPrefixOf` s ] + -- NB: order matters! Longer strings first + where vars = [("prefix", PrefixVar) + ,("bindir", BindirVar) + ,("libdir", LibdirVar) + ,("libsubdir", LibsubdirVar) + ,("dynlibdir", DynlibdirVar) + ,("datadir", DatadirVar) + ,("datasubdir", DatasubdirVar) + ,("docdir", DocdirVar) + ,("htmldir", HtmldirVar) + ,("pkgid", PkgIdVar) + ,("libname", LibNameVar) + ,("pkgkey", LibNameVar) -- backwards compatibility + ,("pkg", PkgNameVar) + ,("version", PkgVerVar) + ,("compiler", CompilerVar) + ,("os", OSVar) + ,("arch", ArchVar) + ,("abitag", AbiTagVar) + ,("abi", AbiVar) + ,("executablename", ExecutableNameVar) + ,("test-suite", TestSuiteNameVar) + ,("result", TestSuiteResultVar) + ,("benchmark", BenchmarkNameVar)] + +instance Show PathComponent where + show (Ordinary path) = path + show (Variable var) = '$':show var + showList = foldr (\x -> (shows x .)) id + +instance Read PathComponent where + -- for some reason we collapse multiple $ symbols here + readsPrec _ = lex0 + where lex0 [] = [] + lex0 ('$':'$':s') = lex0 ('$':s') + lex0 ('$':s') = case [ (Variable var, s'') + | (var, s'') <- reads s' ] of + [] -> lex1 "$" s' + ok -> ok + lex0 s' = lex1 [] s' + lex1 "" "" = [] + lex1 acc "" = [(Ordinary (reverse acc), "")] + lex1 acc ('$':'$':s) = lex1 acc ('$':s) + lex1 acc ('$':s) = [(Ordinary (reverse acc), '$':s)] + lex1 acc (c:s) = lex1 (c:acc) s + readList [] = [([],"")] + readList s = [ (component:components, s'') + | (component, s') <- reads s + , (components, s'') <- readList s' ] + +instance Show PathTemplate where + show (PathTemplate template) = show (show template) + +instance Read PathTemplate where + readsPrec p s = [ (PathTemplate template, s') + | (path, s') <- readsPrec p s + , (template, "") <- reads path ] + +-- --------------------------------------------------------------------------- +-- Internal utilities + +getWindowsProgramFilesDir :: NoCallStackIO FilePath +getWindowsProgramFilesDir = do +#ifdef mingw32_HOST_OS + m <- shGetFolderPath csidl_PROGRAM_FILES +#else + let m = Nothing +#endif + return (fromMaybe "C:\\Program Files" m) + +#ifdef mingw32_HOST_OS +shGetFolderPath :: CInt -> NoCallStackIO (Maybe FilePath) +shGetFolderPath n = + allocaArray long_path_size $ \pPath -> do + r <- c_SHGetFolderPath nullPtr n nullPtr 0 pPath + if (r /= 0) + then return Nothing + else do s <- peekCWString pPath; return (Just s) + where + long_path_size = 1024 -- MAX_PATH is 260, this should be plenty + +csidl_PROGRAM_FILES :: CInt +csidl_PROGRAM_FILES = 0x0026 +-- csidl_PROGRAM_FILES_COMMON :: CInt +-- csidl_PROGRAM_FILES_COMMON = 0x002b + +#ifdef x86_64_HOST_ARCH +#define CALLCONV ccall +#else +#define CALLCONV stdcall +#endif + +foreign import CALLCONV unsafe "shlobj.h SHGetFolderPathW" + c_SHGetFolderPath :: Ptr () + -> CInt + -> Ptr () + -> CInt + -> CWString + -> Prelude.IO CInt +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Install.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Install.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Install.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Install.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,268 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Install +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the entry point into installing a built package. Performs the +-- \"@.\/setup install@\" and \"@.\/setup copy@\" actions. It moves files into +-- place based on the prefix argument. It does the generic bits and then calls +-- compiler-specific functions to do the rest. + +module Distribution.Simple.Install ( + install, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.TargetInfo +import Distribution.Types.LocalBuildInfo +import Distribution.Types.ForeignLib +import Distribution.Types.PackageDescription +import Distribution.Types.UnqualComponentName +import Distribution.Types.ExecutableScope + +import Distribution.Package +import Distribution.PackageDescription +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths (haddockName, haddockPref) +import Distribution.Simple.Glob (matchDirFileGlob) +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose + , installDirectoryContents, installOrdinaryFile, isInSearchPath + , die', info, noticeNoWrap, warn ) +import Distribution.Simple.Compiler + ( CompilerFlavor(..), compilerFlavor ) +import Distribution.Simple.Setup + ( CopyFlags(..), fromFlag, HaddockTarget(ForDevelopment) ) +import Distribution.Simple.BuildTarget + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS +import qualified Distribution.Simple.UHC as UHC +import qualified Distribution.Simple.HaskellSuite as HaskellSuite +import Distribution.Compat.Graph (IsNode(..)) + +import System.Directory + ( doesDirectoryExist, doesFileExist ) +import System.FilePath + ( takeFileName, takeDirectory, (), isRelative ) + +import Distribution.Verbosity +import Distribution.Text + ( display ) + +-- |Perform the \"@.\/setup install@\" and \"@.\/setup copy@\" +-- actions. Move files into place based on the prefix argument. +-- +-- This does NOT register libraries, you should call 'register' +-- to do that. + +install :: PackageDescription -- ^information from the .cabal file + -> LocalBuildInfo -- ^information from the configure step + -> CopyFlags -- ^flags sent to copy or install + -> IO () +install pkg_descr lbi flags = do + checkHasLibsOrExes + targets <- readTargetInfos verbosity pkg_descr lbi (copyArgs flags) + + copyPackage verbosity pkg_descr lbi distPref copydest + + -- It's not necessary to do these in build-order, but it's harmless + withNeededTargetsInBuildOrder' pkg_descr lbi (map nodeKey targets) $ \target -> + let comp = targetComponent target + clbi = targetCLBI target + in copyComponent verbosity pkg_descr lbi comp clbi copydest + where + distPref = fromFlag (copyDistPref flags) + verbosity = fromFlag (copyVerbosity flags) + copydest = fromFlag (copyDest flags) + + checkHasLibsOrExes = + unless (hasLibs pkg_descr || hasForeignLibs pkg_descr || hasExes pkg_descr) $ + die' verbosity "No executables and no library found. Nothing to do." + +-- | Copy package global files. +copyPackage :: Verbosity -> PackageDescription + -> LocalBuildInfo -> FilePath -> CopyDest -> IO () +copyPackage verbosity pkg_descr lbi distPref copydest = do + let -- This is a bit of a hack, to handle files which are not + -- per-component (data files and Haddock files.) + InstallDirs { + datadir = dataPref, + -- NB: The situation with Haddock is a bit delicate. On the + -- one hand, the easiest to understand Haddock documentation + -- path is pkgname-0.1, which means it's per-package (not + -- per-component). But this means that it's impossible to + -- install Haddock documentation for internal libraries. We'll + -- keep this constraint for now; this means you can't use + -- Cabal to Haddock internal libraries. This does not seem + -- like a big problem. + docdir = docPref, + htmldir = htmlPref, + haddockdir = interfacePref} + -- Notice use of 'absoluteInstallDirs' (not the + -- per-component variant). This means for non-library + -- packages we'll just pick a nondescriptive foo-0.1 + = absoluteInstallDirs pkg_descr lbi copydest + + -- Install (package-global) data files + installDataFiles verbosity pkg_descr dataPref + + -- Install (package-global) Haddock files + -- TODO: these should be done per-library + docExists <- doesDirectoryExist $ haddockPref ForDevelopment distPref pkg_descr + info verbosity ("directory " ++ haddockPref ForDevelopment distPref pkg_descr ++ + " does exist: " ++ show docExists) + + -- TODO: this is a bit questionable, Haddock files really should + -- be per library (when there are convenience libraries.) + when docExists $ do + createDirectoryIfMissingVerbose verbosity True htmlPref + installDirectoryContents verbosity + (haddockPref ForDevelopment distPref pkg_descr) htmlPref + -- setPermissionsRecursive [Read] htmlPref + -- The haddock interface file actually already got installed + -- in the recursive copy, but now we install it where we actually + -- want it to be (normally the same place). We could remove the + -- copy in htmlPref first. + let haddockInterfaceFileSrc = haddockPref ForDevelopment distPref pkg_descr + haddockName pkg_descr + haddockInterfaceFileDest = interfacePref haddockName pkg_descr + -- We only generate the haddock interface file for libs, So if the + -- package consists only of executables there will not be one: + exists <- doesFileExist haddockInterfaceFileSrc + when exists $ do + createDirectoryIfMissingVerbose verbosity True interfacePref + installOrdinaryFile verbosity haddockInterfaceFileSrc + haddockInterfaceFileDest + + let lfiles = licenseFiles pkg_descr + unless (null lfiles) $ do + createDirectoryIfMissingVerbose verbosity True docPref + sequence_ + [ installOrdinaryFile verbosity lfile (docPref takeFileName lfile) + | lfile <- lfiles ] + +-- | Copy files associated with a component. +copyComponent :: Verbosity -> PackageDescription + -> LocalBuildInfo -> Component -> ComponentLocalBuildInfo + -> CopyDest + -> IO () +copyComponent verbosity pkg_descr lbi (CLib lib) clbi copydest = do + let InstallDirs{ + libdir = libPref, + dynlibdir = dynlibPref, + includedir = incPref + } = absoluteComponentInstallDirs pkg_descr lbi (componentUnitId clbi) copydest + buildPref = componentBuildDir lbi clbi + + case libName lib of + Nothing -> noticeNoWrap verbosity ("Installing library in " ++ libPref) + Just n -> noticeNoWrap verbosity ("Installing internal library " ++ display n ++ " in " ++ libPref) + + -- install include files for all compilers - they may be needed to compile + -- haskell files (using the CPP extension) + installIncludeFiles verbosity (libBuildInfo lib) lbi buildPref incPref + + case compilerFlavor (compiler lbi) of + GHC -> GHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr lib clbi + GHCJS -> GHCJS.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr lib clbi + UHC -> UHC.installLib verbosity lbi libPref dynlibPref buildPref pkg_descr lib clbi + HaskellSuite _ -> HaskellSuite.installLib + verbosity lbi libPref dynlibPref buildPref pkg_descr lib clbi + _ -> die' verbosity $ "installing with " + ++ display (compilerFlavor (compiler lbi)) + ++ " is not implemented" + +copyComponent verbosity pkg_descr lbi (CFLib flib) clbi copydest = do + let InstallDirs{ + flibdir = flibPref, + includedir = incPref + } = absoluteComponentInstallDirs pkg_descr lbi (componentUnitId clbi) copydest + buildPref = componentBuildDir lbi clbi + + noticeNoWrap verbosity ("Installing foreign library " ++ unUnqualComponentName (foreignLibName flib) ++ " in " ++ flibPref) + installIncludeFiles verbosity (foreignLibBuildInfo flib) lbi buildPref incPref + + case compilerFlavor (compiler lbi) of + GHC -> GHC.installFLib verbosity lbi flibPref buildPref pkg_descr flib + _ -> die' verbosity $ "installing foreign lib with " + ++ display (compilerFlavor (compiler lbi)) + ++ " is not implemented" + +copyComponent verbosity pkg_descr lbi (CExe exe) clbi copydest = do + let installDirs = absoluteComponentInstallDirs pkg_descr lbi (componentUnitId clbi) copydest + -- the installers know how to find the actual location of the + -- binaries + buildPref = buildDir lbi + uid = componentUnitId clbi + pkgid = packageId pkg_descr + binPref | ExecutablePrivate <- exeScope exe = libexecdir installDirs + | otherwise = bindir installDirs + progPrefixPref = substPathTemplate pkgid lbi uid (progPrefix lbi) + progSuffixPref = substPathTemplate pkgid lbi uid (progSuffix lbi) + progFix = (progPrefixPref, progSuffixPref) + noticeNoWrap verbosity ("Installing executable " ++ display (exeName exe) + ++ " in " ++ binPref) + inPath <- isInSearchPath binPref + when (not inPath) $ + warn verbosity ("The directory " ++ binPref + ++ " is not in the system search path.") + case compilerFlavor (compiler lbi) of + GHC -> GHC.installExe verbosity lbi binPref buildPref progFix pkg_descr exe + GHCJS -> GHCJS.installExe verbosity lbi binPref buildPref progFix pkg_descr exe + UHC -> return () + HaskellSuite {} -> return () + _ -> die' verbosity $ "installing with " + ++ display (compilerFlavor (compiler lbi)) + ++ " is not implemented" + +-- Nothing to do for benchmark/testsuite +copyComponent _ _ _ (CBench _) _ _ = return () +copyComponent _ _ _ (CTest _) _ _ = return () + +-- | Install the files listed in data-files +-- +installDataFiles :: Verbosity -> PackageDescription -> FilePath -> IO () +installDataFiles verbosity pkg_descr destDataDir = + flip traverse_ (dataFiles pkg_descr) $ \ file -> do + let srcDataDirRaw = dataDir pkg_descr + srcDataDir = if null srcDataDirRaw + then "." + else srcDataDirRaw + files <- matchDirFileGlob verbosity (specVersion pkg_descr) srcDataDir file + let dir = takeDirectory file + createDirectoryIfMissingVerbose verbosity True (destDataDir dir) + sequence_ [ installOrdinaryFile verbosity (srcDataDir file') + (destDataDir file') + | file' <- files ] + +-- | Install the files listed in install-includes for a library +-- +installIncludeFiles :: Verbosity -> BuildInfo -> LocalBuildInfo -> FilePath -> FilePath -> IO () +installIncludeFiles verbosity libBi lbi buildPref destIncludeDir = do + let relincdirs = "." : filter isRelative (includeDirs libBi) + incdirs = [ baseDir lbi dir | dir <- relincdirs ] + ++ [ buildPref dir | dir <- relincdirs ] + incs <- traverse (findInc incdirs) (installIncludes libBi) + sequence_ + [ do createDirectoryIfMissingVerbose verbosity True destDir + installOrdinaryFile verbosity srcFile destFile + | (relFile, srcFile) <- incs + , let destFile = destIncludeDir relFile + destDir = takeDirectory destFile ] + where + baseDir lbi' = fromMaybe "" (takeDirectory <$> cabalFilePath lbi') + findInc [] file = die' verbosity ("can't find include file " ++ file) + findInc (dir:dirs) file = do + let path = dir file + exists <- doesFileExist path + if exists then return (file, path) else findInc dirs file diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/LocalBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/LocalBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/LocalBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/LocalBuildInfo.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,386 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.LocalBuildInfo +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Once a package has been configured we have resolved conditionals and +-- dependencies, configured the compiler and other needed external programs. +-- The 'LocalBuildInfo' is used to hold all this information. It holds the +-- install dirs, the compiler, the exact package dependencies, the configured +-- programs, the package database to use and a bunch of miscellaneous configure +-- flags. It gets saved and reloaded from a file (@dist\/setup-config@). It gets +-- passed in to very many subsequent build actions. + +module Distribution.Simple.LocalBuildInfo ( + LocalBuildInfo(..), + externalPackageDeps, + localComponentId, + localUnitId, + localCompatPackageKey, + + -- * Buildable package components + Component(..), + ComponentName(..), + defaultLibName, + showComponentName, + componentNameString, + ComponentLocalBuildInfo(..), + componentBuildDir, + foldComponent, + componentName, + componentBuildInfo, + componentBuildable, + pkgComponents, + pkgBuildableComponents, + lookupComponent, + getComponent, + getComponentLocalBuildInfo, + allComponentsInBuildOrder, + componentsInBuildOrder, + depLibraryPaths, + allLibModules, + + withAllComponentsInBuildOrder, + withComponentsInBuildOrder, + withComponentsLBI, + withLibLBI, + withExeLBI, + withBenchLBI, + withTestLBI, + enabledTestLBIs, + enabledBenchLBIs, + + -- * Installation directories + module Distribution.Simple.InstallDirs, + absoluteInstallDirs, prefixRelativeInstallDirs, + absoluteComponentInstallDirs, prefixRelativeComponentInstallDirs, + substPathTemplate, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.Component +import Distribution.Types.PackageId +import Distribution.Types.UnitId +import Distribution.Types.ComponentName +import Distribution.Types.UnqualComponentName +import Distribution.Types.PackageDescription +import Distribution.Types.ComponentLocalBuildInfo +import Distribution.Types.LocalBuildInfo +import Distribution.Types.TargetInfo + +import Distribution.Simple.InstallDirs hiding (absoluteInstallDirs, + prefixRelativeInstallDirs, + substPathTemplate, ) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.PackageDescription +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Package +import Distribution.ModuleName +import Distribution.Simple.Compiler +import Distribution.Simple.PackageIndex +import Distribution.Simple.Utils +import Distribution.Text +import qualified Distribution.Compat.Graph as Graph + +import Data.List (stripPrefix) +import System.FilePath +import qualified Data.Map as Map + +import System.Directory (doesDirectoryExist, canonicalizePath) + +-- ----------------------------------------------------------------------------- +-- Configuration information of buildable components + +componentBuildDir :: LocalBuildInfo -> ComponentLocalBuildInfo -> FilePath +-- For now, we assume that libraries/executables/test-suites/benchmarks +-- are only ever built once. With Backpack, we need a special case for +-- libraries so that we can handle building them multiple times. +componentBuildDir lbi clbi + = buildDir lbi + case componentLocalName clbi of + CLibName -> + if display (componentUnitId clbi) == display (componentComponentId clbi) + then "" + else display (componentUnitId clbi) + CSubLibName s -> + if display (componentUnitId clbi) == display (componentComponentId clbi) + then unUnqualComponentName s + else display (componentUnitId clbi) + CFLibName s -> unUnqualComponentName s + CExeName s -> unUnqualComponentName s + CTestName s -> unUnqualComponentName s + CBenchName s -> unUnqualComponentName s + +{-# DEPRECATED getComponentLocalBuildInfo "This function is not well-defined, because a 'ComponentName' does not uniquely identify a 'ComponentLocalBuildInfo'. If you have a 'TargetInfo', you should use 'targetCLBI' to get the 'ComponentLocalBuildInfo'. Otherwise, use 'componentNameTargets' to get all possible 'ComponentLocalBuildInfo's. This will be removed in Cabal 2.2." #-} +getComponentLocalBuildInfo :: LocalBuildInfo -> ComponentName -> ComponentLocalBuildInfo +getComponentLocalBuildInfo lbi cname = + case componentNameCLBIs lbi cname of + [clbi] -> clbi + [] -> + error $ "internal error: there is no configuration data " + ++ "for component " ++ show cname + clbis -> + error $ "internal error: the component name " ++ show cname + ++ "is ambiguous. Refers to: " + ++ intercalate ", " (map (display . componentUnitId) clbis) + +-- | Perform the action on each enabled 'library' in the package +-- description with the 'ComponentLocalBuildInfo'. +withLibLBI :: PackageDescription -> LocalBuildInfo + -> (Library -> ComponentLocalBuildInfo -> IO ()) -> IO () +withLibLBI pkg lbi f = + withAllTargetsInBuildOrder' pkg lbi $ \target -> + case targetComponent target of + CLib lib -> f lib (targetCLBI target) + _ -> return () + +-- | Perform the action on each enabled 'Executable' in the package +-- description. Extended version of 'withExe' that also gives corresponding +-- build info. +withExeLBI :: PackageDescription -> LocalBuildInfo + -> (Executable -> ComponentLocalBuildInfo -> IO ()) -> IO () +withExeLBI pkg lbi f = + withAllTargetsInBuildOrder' pkg lbi $ \target -> + case targetComponent target of + CExe exe -> f exe (targetCLBI target) + _ -> return () + +-- | Perform the action on each enabled 'Benchmark' in the package +-- description. +withBenchLBI :: PackageDescription -> LocalBuildInfo + -> (Benchmark -> ComponentLocalBuildInfo -> IO ()) -> IO () +withBenchLBI pkg lbi f = + sequence_ [ f test clbi | (test, clbi) <- enabledBenchLBIs pkg lbi ] + +withTestLBI :: PackageDescription -> LocalBuildInfo + -> (TestSuite -> ComponentLocalBuildInfo -> IO ()) -> IO () +withTestLBI pkg lbi f = + sequence_ [ f test clbi | (test, clbi) <- enabledTestLBIs pkg lbi ] + +enabledTestLBIs :: PackageDescription -> LocalBuildInfo + -> [(TestSuite, ComponentLocalBuildInfo)] +enabledTestLBIs pkg lbi = + [ (test, targetCLBI target) + | target <- allTargetsInBuildOrder' pkg lbi + , CTest test <- [targetComponent target] ] + +enabledBenchLBIs :: PackageDescription -> LocalBuildInfo + -> [(Benchmark, ComponentLocalBuildInfo)] +enabledBenchLBIs pkg lbi = + [ (bench, targetCLBI target) + | target <- allTargetsInBuildOrder' pkg lbi + , CBench bench <- [targetComponent target] ] + +{-# DEPRECATED withComponentsLBI "Use withAllComponentsInBuildOrder" #-} +withComponentsLBI :: PackageDescription -> LocalBuildInfo + -> (Component -> ComponentLocalBuildInfo -> IO ()) + -> IO () +withComponentsLBI = withAllComponentsInBuildOrder + +-- | Perform the action on each buildable 'Library' or 'Executable' (Component) +-- in the PackageDescription, subject to the build order specified by the +-- 'compBuildOrder' field of the given 'LocalBuildInfo' +withAllComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo + -> (Component -> ComponentLocalBuildInfo -> IO ()) + -> IO () +withAllComponentsInBuildOrder pkg lbi f = + withAllTargetsInBuildOrder' pkg lbi $ \target -> + f (targetComponent target) (targetCLBI target) + +{-# DEPRECATED withComponentsInBuildOrder "You have got a 'TargetInfo' right? Use 'withNeededTargetsInBuildOrder' on the 'UnitId's you can 'nodeKey' out." #-} +withComponentsInBuildOrder :: PackageDescription -> LocalBuildInfo + -> [ComponentName] + -> (Component -> ComponentLocalBuildInfo -> IO ()) + -> IO () +withComponentsInBuildOrder pkg lbi cnames f = + withNeededTargetsInBuildOrder' pkg lbi uids $ \target -> + f (targetComponent target) (targetCLBI target) + where uids = concatMap (componentNameToUnitIds lbi) cnames + +allComponentsInBuildOrder :: LocalBuildInfo + -> [ComponentLocalBuildInfo] +allComponentsInBuildOrder lbi = + Graph.topSort (componentGraph lbi) + +-- | Private helper function for some of the deprecated implementations. +componentNameToUnitIds :: LocalBuildInfo -> ComponentName -> [UnitId] +componentNameToUnitIds lbi cname = + case Map.lookup cname (componentNameMap lbi) of + Just clbis -> map componentUnitId clbis + Nothing -> error $ "componentNameToUnitIds " ++ display cname + +{-# DEPRECATED componentsInBuildOrder "You've got 'TargetInfo' right? Use 'neededTargetsInBuildOrder' on the 'UnitId's you can 'nodeKey' out." #-} +componentsInBuildOrder :: LocalBuildInfo -> [ComponentName] + -> [ComponentLocalBuildInfo] +componentsInBuildOrder lbi cnames + -- NB: use of localPkgDescr here is safe because we throw out the + -- result immediately afterwards + = map targetCLBI (neededTargetsInBuildOrder' (localPkgDescr lbi) lbi uids) + where uids = concatMap (componentNameToUnitIds lbi) cnames + +-- ----------------------------------------------------------------------------- +-- A random function that has no business in this module + +-- | Determine the directories containing the dynamic libraries of the +-- transitive dependencies of the component we are building. +-- +-- When wanted, and possible, returns paths relative to the installDirs 'prefix' +depLibraryPaths :: Bool -- ^ Building for inplace? + -> Bool -- ^ Generate prefix-relative library paths + -> LocalBuildInfo + -> ComponentLocalBuildInfo -- ^ Component that is being built + -> NoCallStackIO [FilePath] +depLibraryPaths inplace relative lbi clbi = do + let pkgDescr = localPkgDescr lbi + installDirs = absoluteComponentInstallDirs pkgDescr lbi (componentUnitId clbi) NoCopyDest + executable = case clbi of + ExeComponentLocalBuildInfo {} -> True + _ -> False + relDir | executable = bindir installDirs + | otherwise = libdir installDirs + + let -- TODO: this is kind of inefficient + internalDeps = [ uid + | (uid, _) <- componentPackageDeps clbi + -- Test that it's internal + , sub_target <- allTargetsInBuildOrder' pkgDescr lbi + , componentUnitId (targetCLBI (sub_target)) == uid ] + internalLibs = [ getLibDir (targetCLBI sub_target) + | sub_target <- neededTargetsInBuildOrder' + pkgDescr lbi internalDeps ] + {- + -- This is better, but it doesn't work, because we may be passed a + -- CLBI which doesn't actually exist, and was faked up when we + -- were building a test suite/benchmark. See #3599 for proposal + -- to fix this. + let internalCLBIs = filter ((/= componentUnitId clbi) . componentUnitId) + . map targetCLBI + $ neededTargetsInBuildOrder lbi [componentUnitId clbi] + internalLibs = map getLibDir internalCLBIs + -} + getLibDir sub_clbi + | inplace = componentBuildDir lbi sub_clbi + | otherwise = dynlibdir (absoluteComponentInstallDirs pkgDescr lbi (componentUnitId sub_clbi) NoCopyDest) + + -- Why do we go through all the trouble of a hand-crafting + -- internalLibs, when 'installedPkgs' actually contains the + -- internal libraries? The trouble is that 'installedPkgs' + -- may contain *inplace* entries, which we must NOT use for + -- not inplace 'depLibraryPaths' (e.g., for RPATH calculation). + -- See #4025 for more details. This is all horrible but it + -- is a moot point if you are using a per-component build, + -- because you never have any internal libraries in this case; + -- they're all external. + let external_ipkgs = filter is_external (allPackages (installedPkgs lbi)) + is_external ipkg = not (installedUnitId ipkg `elem` internalDeps) + -- First look for dynamic libraries in `dynamic-library-dirs`, and use + -- `library-dirs` as a fall back. + getDynDir pkg = case Installed.libraryDynDirs pkg of + [] -> Installed.libraryDirs pkg + d -> d + allDepLibDirs = concatMap getDynDir external_ipkgs + + allDepLibDirs' = internalLibs ++ allDepLibDirs + allDepLibDirsC <- traverse canonicalizePathNoFail allDepLibDirs' + + let p = prefix installDirs + prefixRelative l = isJust (stripPrefix p l) + libPaths + | relative && + prefixRelative relDir = map (\l -> + if prefixRelative l + then shortRelativePath relDir l + else l + ) allDepLibDirsC + | otherwise = allDepLibDirsC + + return libPaths + where + -- 'canonicalizePath' fails on UNIX when the directory does not exists. + -- So just don't canonicalize when it doesn't exist. + canonicalizePathNoFail p = do + exists <- doesDirectoryExist p + if exists + then canonicalizePath p + else return p + +-- | Get all module names that needed to be built by GHC; i.e., all +-- of these 'ModuleName's have interface files associated with them +-- that need to be installed. +allLibModules :: Library -> ComponentLocalBuildInfo -> [ModuleName] +allLibModules lib clbi = + ordNub $ + explicitLibModules lib ++ + case clbi of + LibComponentLocalBuildInfo { componentInstantiatedWith = insts } -> map fst insts + _ -> [] + +-- ----------------------------------------------------------------------------- +-- Wrappers for a couple functions from InstallDirs + +-- | Backwards compatibility function which computes the InstallDirs +-- assuming that @$libname@ points to the public library (or some fake +-- package identifier if there is no public library.) IF AT ALL +-- POSSIBLE, please use 'absoluteComponentInstallDirs' instead. +absoluteInstallDirs :: PackageDescription -> LocalBuildInfo + -> CopyDest + -> InstallDirs FilePath +absoluteInstallDirs pkg lbi copydest = + absoluteComponentInstallDirs pkg lbi (localUnitId lbi) copydest + +-- | See 'InstallDirs.absoluteInstallDirs'. +absoluteComponentInstallDirs :: PackageDescription -> LocalBuildInfo + -> UnitId + -> CopyDest + -> InstallDirs FilePath +absoluteComponentInstallDirs pkg lbi uid copydest = + InstallDirs.absoluteInstallDirs + (packageId pkg) + uid + (compilerInfo (compiler lbi)) + copydest + (hostPlatform lbi) + (installDirTemplates lbi) + +-- | Backwards compatibility function which computes the InstallDirs +-- assuming that @$libname@ points to the public library (or some fake +-- package identifier if there is no public library.) IF AT ALL +-- POSSIBLE, please use 'prefixRelativeComponentInstallDirs' instead. +prefixRelativeInstallDirs :: PackageId -> LocalBuildInfo + -> InstallDirs (Maybe FilePath) +prefixRelativeInstallDirs pkg_descr lbi = + prefixRelativeComponentInstallDirs pkg_descr lbi (localUnitId lbi) + +-- |See 'InstallDirs.prefixRelativeInstallDirs' +prefixRelativeComponentInstallDirs :: PackageId -> LocalBuildInfo + -> UnitId + -> InstallDirs (Maybe FilePath) +prefixRelativeComponentInstallDirs pkg_descr lbi uid = + InstallDirs.prefixRelativeInstallDirs + (packageId pkg_descr) + uid + (compilerInfo (compiler lbi)) + (hostPlatform lbi) + (installDirTemplates lbi) + +substPathTemplate :: PackageId -> LocalBuildInfo + -> UnitId + -> PathTemplate -> FilePath +substPathTemplate pkgid lbi uid = fromPathTemplate + . ( InstallDirs.substPathTemplate env ) + where env = initialPathTemplateEnv + pkgid + uid + (compilerInfo (compiler lbi)) + (hostPlatform lbi) + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/PackageIndex.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/PackageIndex.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/PackageIndex.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/PackageIndex.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,723 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.PackageIndex +-- Copyright : (c) David Himmelstrup 2005, +-- Bjorn Bringert 2007, +-- Duncan Coutts 2008-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- An index of packages whose primary key is 'UnitId'. Public libraries +-- are additionally indexed by 'PackageName' and 'Version'. +-- Technically, these are an index of *units* (so we should eventually +-- rename it to 'UnitIndex'); but in the absence of internal libraries +-- or Backpack each unit is equivalent to a package. +-- +-- While 'PackageIndex' is parametric over what it actually records, +-- it is in fact only ever instantiated with a single element: +-- The 'InstalledPackageIndex' (defined here) contains a graph of +-- 'InstalledPackageInfo's representing the packages in a +-- package database stack. It is used in a variety of ways: +-- +-- * The primary use to let Cabal access the same installed +-- package database which is used by GHC during compilation. +-- For example, this data structure is used by 'ghc-pkg' +-- and 'Cabal' to do consistency checks on the database +-- (are the references closed). +-- +-- * Given a set of dependencies, we can compute the transitive +-- closure of dependencies. This is to check if the versions +-- of packages are consistent, and also needed by multiple +-- tools (Haddock must be explicitly told about the every +-- transitive package to do cross-package linking; +-- preprocessors must know about the include paths of all +-- transitive dependencies.) +-- +-- This 'PackageIndex' is NOT to be confused with +-- 'Distribution.Client.PackageIndex', which indexes packages only by +-- 'PackageName' (this makes it suitable for indexing source packages, +-- for which we don't know 'UnitId's.) +-- +module Distribution.Simple.PackageIndex ( + -- * Package index data type + InstalledPackageIndex, + PackageIndex, + + -- * Creating an index + fromList, + + -- * Updates + merge, + + insert, + + deleteUnitId, + deleteSourcePackageId, + deletePackageName, +-- deleteDependency, + + -- * Queries + + -- ** Precise lookups + lookupUnitId, + lookupComponentId, + lookupSourcePackageId, + lookupPackageId, + lookupPackageName, + lookupDependency, + lookupInternalDependency, + + -- ** Case-insensitive searches + searchByName, + SearchResult(..), + searchByNameSubstring, + + -- ** Bulk queries + allPackages, + allPackagesByName, + allPackagesBySourcePackageId, + allPackagesBySourcePackageIdAndLibName, + + -- ** Special queries + brokenPackages, + dependencyClosure, + reverseDependencyClosure, + topologicalOrder, + reverseTopologicalOrder, + dependencyInconsistencies, + dependencyCycles, + dependencyGraph, + moduleNameIndex, + + -- * Backwards compatibility + deleteInstalledPackageId, + lookupInstalledPackageId, + ) where + +import Prelude () +import Distribution.Compat.Prelude hiding (lookup) +import qualified Data.Map.Strict as Map + +import Distribution.Package +import Distribution.Backpack +import Distribution.ModuleName +import qualified Distribution.InstalledPackageInfo as IPI +import Distribution.Version +import Distribution.Simple.Utils +import Distribution.Types.UnqualComponentName + +import Control.Exception (assert) +import Data.Array ((!)) +import qualified Data.Array as Array +import qualified Data.Graph as Graph +import Data.List as List ( groupBy, deleteBy, deleteFirstsBy ) +import qualified Data.Tree as Tree +import Control.Monad +import Distribution.Compat.Stack + +-- | The collection of information about packages from one or more 'PackageDB's. +-- These packages generally should have an instance of 'PackageInstalled' +-- +-- Packages are uniquely identified in by their 'UnitId', they can +-- also be efficiently looked up by package name or by name and version. +-- +data PackageIndex a = PackageIndex { + -- The primary index. Each InstalledPackageInfo record is uniquely identified + -- by its UnitId. + -- + unitIdIndex :: !(Map UnitId a), + + -- This auxiliary index maps package names (case-sensitively) to all the + -- versions and instances of that package. This allows us to find all + -- versions satisfying a dependency. + -- + -- It is a three-level index. The first level is the package name, + -- the second is the package version and the final level is instances + -- of the same package version. These are unique by UnitId + -- and are kept in preference order. + -- + -- FIXME: Clarify what "preference order" means. Check that this invariant is + -- preserved. See #1463 for discussion. + packageIdIndex :: !(Map (PackageName, Maybe UnqualComponentName) (Map Version [a])) + + } deriving (Eq, Generic, Show, Read) + +instance Binary a => Binary (PackageIndex a) + +-- | The default package index which contains 'InstalledPackageInfo'. Normally +-- use this. +type InstalledPackageIndex = PackageIndex IPI.InstalledPackageInfo + +instance Monoid (PackageIndex IPI.InstalledPackageInfo) where + mempty = PackageIndex Map.empty Map.empty + mappend = (<>) + --save one mappend with empty in the common case: + mconcat [] = mempty + mconcat xs = foldr1 mappend xs + +instance Semigroup (PackageIndex IPI.InstalledPackageInfo) where + (<>) = merge + +{-# NOINLINE invariant #-} +invariant :: WithCallStack (InstalledPackageIndex -> Bool) +invariant (PackageIndex pids pnames) = + -- trace (show pids' ++ "\n" ++ show pnames') $ + pids' == pnames' + where + pids' = map installedUnitId (Map.elems pids) + pnames' = sort + [ assert pinstOk (installedUnitId pinst) + | ((pname, plib), pvers) <- Map.toList pnames + , let pversOk = not (Map.null pvers) + , (pver, pinsts) <- assert pversOk $ Map.toList pvers + , let pinsts' = sortBy (comparing installedUnitId) pinsts + pinstsOk = all (\g -> length g == 1) + (groupBy (equating installedUnitId) pinsts') + , pinst <- assert pinstsOk $ pinsts' + , let pinstOk = packageName pinst == pname + && packageVersion pinst == pver + && IPI.sourceLibName pinst == plib + ] + -- If you see this invariant failing (ie the assert in mkPackageIndex below) + -- then one thing to check is if it is happening in fromList. Check if the + -- second list above (the sort [...] bit) is ending up with duplicates. This + -- has been observed in practice once due to a messed up ghc-pkg db. How/why + -- it became messed up was not discovered. + + +-- +-- * Internal helpers +-- + +mkPackageIndex :: WithCallStack (Map UnitId IPI.InstalledPackageInfo + -> Map (PackageName, Maybe UnqualComponentName) + (Map Version [IPI.InstalledPackageInfo]) + -> InstalledPackageIndex) +mkPackageIndex pids pnames = assert (invariant index) index + where index = PackageIndex pids pnames + + +-- +-- * Construction +-- + +-- | Build an index out of a bunch of packages. +-- +-- If there are duplicates by 'UnitId' then later ones mask earlier +-- ones. +-- +fromList :: [IPI.InstalledPackageInfo] -> InstalledPackageIndex +fromList pkgs = mkPackageIndex pids pnames + where + pids = Map.fromList [ (installedUnitId pkg, pkg) | pkg <- pkgs ] + pnames = + Map.fromList + [ (liftM2 (,) packageName IPI.sourceLibName (head pkgsN), pvers) + | pkgsN <- groupBy (equating (liftM2 (,) packageName IPI.sourceLibName)) + . sortBy (comparing (liftM3 (,,) packageName IPI.sourceLibName packageVersion)) + $ pkgs + , let pvers = + Map.fromList + [ (packageVersion (head pkgsNV), + nubBy (equating installedUnitId) (reverse pkgsNV)) + | pkgsNV <- groupBy (equating packageVersion) pkgsN + ] + ] + +-- +-- * Updates +-- + +-- | Merge two indexes. +-- +-- Packages from the second mask packages from the first if they have the exact +-- same 'UnitId'. +-- +-- For packages with the same source 'PackageId', packages from the second are +-- \"preferred\" over those from the first. Being preferred means they are top +-- result when we do a lookup by source 'PackageId'. This is the mechanism we +-- use to prefer user packages over global packages. +-- +merge :: InstalledPackageIndex -> InstalledPackageIndex + -> InstalledPackageIndex +merge (PackageIndex pids1 pnames1) (PackageIndex pids2 pnames2) = + mkPackageIndex (Map.unionWith (\_ y -> y) pids1 pids2) + (Map.unionWith (Map.unionWith mergeBuckets) pnames1 pnames2) + where + -- Packages in the second list mask those in the first, however preferred + -- packages go first in the list. + mergeBuckets xs ys = ys ++ (xs \\ ys) + (\\) = deleteFirstsBy (equating installedUnitId) + + +-- | Inserts a single package into the index. +-- +-- This is equivalent to (but slightly quicker than) using 'mappend' or +-- 'merge' with a singleton index. +-- +insert :: IPI.InstalledPackageInfo -> InstalledPackageIndex -> InstalledPackageIndex +insert pkg (PackageIndex pids pnames) = + mkPackageIndex pids' pnames' + + where + pids' = Map.insert (installedUnitId pkg) pkg pids + pnames' = insertPackageName pnames + insertPackageName = + Map.insertWith (\_ -> insertPackageVersion) + (packageName pkg, IPI.sourceLibName pkg) + (Map.singleton (packageVersion pkg) [pkg]) + + insertPackageVersion = + Map.insertWith (\_ -> insertPackageInstance) + (packageVersion pkg) [pkg] + + insertPackageInstance pkgs = + pkg : deleteBy (equating installedUnitId) pkg pkgs + + +-- | Removes a single installed package from the index. +-- +deleteUnitId :: UnitId -> InstalledPackageIndex + -> InstalledPackageIndex +deleteUnitId ipkgid original@(PackageIndex pids pnames) = + case Map.updateLookupWithKey (\_ _ -> Nothing) ipkgid pids of + (Nothing, _) -> original + (Just spkgid, pids') -> mkPackageIndex pids' + (deletePkgName spkgid pnames) + + where + deletePkgName spkgid = + Map.update (deletePkgVersion spkgid) (packageName spkgid, IPI.sourceLibName spkgid) + + deletePkgVersion spkgid = + (\m -> if Map.null m then Nothing else Just m) + . Map.update deletePkgInstance (packageVersion spkgid) + + deletePkgInstance = + (\xs -> if null xs then Nothing else Just xs) + . List.deleteBy (\_ pkg -> installedUnitId pkg == ipkgid) undefined + +-- | Backwards compatibility wrapper for Cabal pre-1.24. +{-# DEPRECATED deleteInstalledPackageId "Use deleteUnitId instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +deleteInstalledPackageId :: UnitId -> InstalledPackageIndex + -> InstalledPackageIndex +deleteInstalledPackageId = deleteUnitId + +-- | Removes all packages with this source 'PackageId' from the index. +-- +deleteSourcePackageId :: PackageId -> InstalledPackageIndex + -> InstalledPackageIndex +deleteSourcePackageId pkgid original@(PackageIndex pids pnames) = + -- NB: Doesn't delete internal packages + case Map.lookup (packageName pkgid, Nothing) pnames of + Nothing -> original + Just pvers -> case Map.lookup (packageVersion pkgid) pvers of + Nothing -> original + Just pkgs -> mkPackageIndex + (foldl' (flip (Map.delete . installedUnitId)) pids pkgs) + (deletePkgName pnames) + where + deletePkgName = + Map.update deletePkgVersion (packageName pkgid, Nothing) + + deletePkgVersion = + (\m -> if Map.null m then Nothing else Just m) + . Map.delete (packageVersion pkgid) + + +-- | Removes all packages with this (case-sensitive) name from the index. +-- +-- NB: Does NOT delete internal libraries from this package. +-- +deletePackageName :: PackageName -> InstalledPackageIndex + -> InstalledPackageIndex +deletePackageName name original@(PackageIndex pids pnames) = + case Map.lookup (name, Nothing) pnames of + Nothing -> original + Just pvers -> mkPackageIndex + (foldl' (flip (Map.delete . installedUnitId)) pids + (concat (Map.elems pvers))) + (Map.delete (name, Nothing) pnames) + +{- +-- | Removes all packages satisfying this dependency from the index. +-- +deleteDependency :: Dependency -> PackageIndex -> PackageIndex +deleteDependency (Dependency name verstionRange) = + delete' name (\pkg -> packageVersion pkg `withinRange` verstionRange) +-} + +-- +-- * Bulk queries +-- + +-- | Get all the packages from the index. +-- +allPackages :: PackageIndex a -> [a] +allPackages = Map.elems . unitIdIndex + +-- | Get all the packages from the index. +-- +-- They are grouped by package name (case-sensitively). +-- +-- (Doesn't include private libraries.) +-- +allPackagesByName :: PackageIndex a -> [(PackageName, [a])] +allPackagesByName index = + [ (pkgname, concat (Map.elems pvers)) + | ((pkgname, Nothing), pvers) <- Map.toList (packageIdIndex index) ] + +-- | Get all the packages from the index. +-- +-- They are grouped by source package id (package name and version). +-- +-- (Doesn't include private libraries) +-- +allPackagesBySourcePackageId :: HasUnitId a => PackageIndex a + -> [(PackageId, [a])] +allPackagesBySourcePackageId index = + [ (packageId ipkg, ipkgs) + | ((_, Nothing), pvers) <- Map.toList (packageIdIndex index) + , ipkgs@(ipkg:_) <- Map.elems pvers ] + +-- | Get all the packages from the index. +-- +-- They are grouped by source package id and library name. +-- +-- This DOES include internal libraries. +allPackagesBySourcePackageIdAndLibName :: HasUnitId a => PackageIndex a + -> [((PackageId, Maybe UnqualComponentName), [a])] +allPackagesBySourcePackageIdAndLibName index = + [ ((packageId ipkg, ln), ipkgs) + | ((_, ln), pvers) <- Map.toList (packageIdIndex index) + , ipkgs@(ipkg:_) <- Map.elems pvers ] + +-- +-- * Lookups +-- + +-- | Does a lookup by unit identifier. +-- +-- Since multiple package DBs mask each other by 'UnitId', +-- then we get back at most one package. +-- +lookupUnitId :: PackageIndex a -> UnitId + -> Maybe a +lookupUnitId index uid = Map.lookup uid (unitIdIndex index) + +-- | Does a lookup by component identifier. In the absence +-- of Backpack, this is just a 'lookupUnitId'. +-- +lookupComponentId :: PackageIndex a -> ComponentId + -> Maybe a +lookupComponentId index cid = + Map.lookup (newSimpleUnitId cid) (unitIdIndex index) + +-- | Backwards compatibility for Cabal pre-1.24. +{-# DEPRECATED lookupInstalledPackageId "Use lookupUnitId instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +lookupInstalledPackageId :: PackageIndex a -> UnitId + -> Maybe a +lookupInstalledPackageId = lookupUnitId + + +-- | Does a lookup by source package id (name & version). +-- +-- There can be multiple installed packages with the same source 'PackageId' +-- but different 'UnitId'. They are returned in order of +-- preference, with the most preferred first. +-- +lookupSourcePackageId :: PackageIndex a -> PackageId -> [a] +lookupSourcePackageId index pkgid = + -- Do not lookup internal libraries + case Map.lookup (packageName pkgid, Nothing) (packageIdIndex index) of + Nothing -> [] + Just pvers -> case Map.lookup (packageVersion pkgid) pvers of + Nothing -> [] + Just pkgs -> pkgs -- in preference order + +-- | Convenient alias of 'lookupSourcePackageId', but assuming only +-- one package per package ID. +lookupPackageId :: PackageIndex a -> PackageId -> Maybe a +lookupPackageId index pkgid = case lookupSourcePackageId index pkgid of + [] -> Nothing + [pkg] -> Just pkg + _ -> error "Distribution.Simple.PackageIndex: multiple matches found" + +-- | Does a lookup by source package name. +-- +lookupPackageName :: PackageIndex a -> PackageName + -> [(Version, [a])] +lookupPackageName index name = + -- Do not match internal libraries + case Map.lookup (name, Nothing) (packageIdIndex index) of + Nothing -> [] + Just pvers -> Map.toList pvers + + +-- | Does a lookup by source package name and a range of versions. +-- +-- We get back any number of versions of the specified package name, all +-- satisfying the version range constraint. +-- +-- This does NOT work for internal dependencies, DO NOT use this +-- function on those; use 'lookupInternalDependency' instead. +-- +-- INVARIANT: List of eligible 'IPI.InstalledPackageInfo' is non-empty. +-- +lookupDependency :: InstalledPackageIndex -> Dependency + -> [(Version, [IPI.InstalledPackageInfo])] +lookupDependency index dep = + -- Yes, a little bit of a misnomer here! + lookupInternalDependency index dep Nothing + +-- | Does a lookup by source package name and a range of versions. +-- +-- We get back any number of versions of the specified package name, all +-- satisfying the version range constraint. +-- +-- INVARIANT: List of eligible 'IPI.InstalledPackageInfo' is non-empty. +-- +lookupInternalDependency :: InstalledPackageIndex -> Dependency + -> Maybe UnqualComponentName + -> [(Version, [IPI.InstalledPackageInfo])] +lookupInternalDependency index (Dependency name versionRange) libn = + case Map.lookup (name, libn) (packageIdIndex index) of + Nothing -> [] + Just pvers -> [ (ver, pkgs') + | (ver, pkgs) <- Map.toList pvers + , ver `withinRange` versionRange + , let pkgs' = filter eligible pkgs + -- Enforce the invariant + , not (null pkgs') + ] + where + -- When we select for dependencies, we ONLY want to pick up indefinite + -- packages, or packages with no instantiations. We'll do mix-in + -- linking to improve any such package into an instantiated one + -- later. + eligible pkg = IPI.indefinite pkg || null (IPI.instantiatedWith pkg) + + +-- +-- * Case insensitive name lookups +-- + +-- | Does a case-insensitive search by package name. +-- +-- If there is only one package that compares case-insensitively to this name +-- then the search is unambiguous and we get back all versions of that package. +-- If several match case-insensitively but one matches exactly then it is also +-- unambiguous. +-- +-- If however several match case-insensitively and none match exactly then we +-- have an ambiguous result, and we get back all the versions of all the +-- packages. The list of ambiguous results is split by exact package name. So +-- it is a non-empty list of non-empty lists. +-- +searchByName :: PackageIndex a -> String -> SearchResult [a] +searchByName index name = + -- Don't match internal packages + case [ pkgs | pkgs@((pname, Nothing),_) <- Map.toList (packageIdIndex index) + , lowercase (unPackageName pname) == lname ] of + [] -> None + [(_,pvers)] -> Unambiguous (concat (Map.elems pvers)) + pkgss -> case find ((mkPackageName name ==) . fst . fst) pkgss of + Just (_,pvers) -> Unambiguous (concat (Map.elems pvers)) + Nothing -> Ambiguous (map (concat . Map.elems . snd) pkgss) + where lname = lowercase name + +data SearchResult a = None | Unambiguous a | Ambiguous [a] + +-- | Does a case-insensitive substring search by package name. +-- +-- That is, all packages that contain the given string in their name. +-- +searchByNameSubstring :: PackageIndex a -> String -> [a] +searchByNameSubstring index searchterm = + [ pkg + -- Don't match internal packages + | ((pname, Nothing), pvers) <- Map.toList (packageIdIndex index) + , lsearchterm `isInfixOf` lowercase (unPackageName pname) + , pkgs <- Map.elems pvers + , pkg <- pkgs ] + where lsearchterm = lowercase searchterm + + +-- +-- * Special queries +-- + +-- None of the stuff below depends on the internal representation of the index. +-- + +-- | Find if there are any cycles in the dependency graph. If there are no +-- cycles the result is @[]@. +-- +-- This actually computes the strongly connected components. So it gives us a +-- list of groups of packages where within each group they all depend on each +-- other, directly or indirectly. +-- +dependencyCycles :: PackageInstalled a => PackageIndex a -> [[a]] +dependencyCycles index = + [ vs | Graph.CyclicSCC vs <- Graph.stronglyConnComp adjacencyList ] + where + adjacencyList = [ (pkg, installedUnitId pkg, installedDepends pkg) + | pkg <- allPackages index ] + + +-- | All packages that have immediate dependencies that are not in the index. +-- +-- Returns such packages along with the dependencies that they're missing. +-- +brokenPackages :: PackageInstalled a => PackageIndex a + -> [(a, [UnitId])] +brokenPackages index = + [ (pkg, missing) + | pkg <- allPackages index + , let missing = [ pkg' | pkg' <- installedDepends pkg + , isNothing (lookupUnitId index pkg') ] + , not (null missing) ] + +-- | Tries to take the transitive closure of the package dependencies. +-- +-- If the transitive closure is complete then it returns that subset of the +-- index. Otherwise it returns the broken packages as in 'brokenPackages'. +-- +-- * Note that if the result is @Right []@ it is because at least one of +-- the original given 'PackageId's do not occur in the index. +-- +dependencyClosure :: InstalledPackageIndex + -> [UnitId] + -> Either (InstalledPackageIndex) + [(IPI.InstalledPackageInfo, [UnitId])] +dependencyClosure index pkgids0 = case closure mempty [] pkgids0 of + (completed, []) -> Left completed + (completed, _) -> Right (brokenPackages completed) + where + closure completed failed [] = (completed, failed) + closure completed failed (pkgid:pkgids) = case lookupUnitId index pkgid of + Nothing -> closure completed (pkgid:failed) pkgids + Just pkg -> case lookupUnitId completed (installedUnitId pkg) of + Just _ -> closure completed failed pkgids + Nothing -> closure completed' failed pkgids' + where completed' = insert pkg completed + pkgids' = installedDepends pkg ++ pkgids + +-- | Takes the transitive closure of the packages reverse dependencies. +-- +-- * The given 'PackageId's must be in the index. +-- +reverseDependencyClosure :: PackageInstalled a => PackageIndex a + -> [UnitId] + -> [a] +reverseDependencyClosure index = + map vertexToPkg + . concatMap Tree.flatten + . Graph.dfs reverseDepGraph + . map (fromMaybe noSuchPkgId . pkgIdToVertex) + + where + (depGraph, vertexToPkg, pkgIdToVertex) = dependencyGraph index + reverseDepGraph = Graph.transposeG depGraph + noSuchPkgId = error "reverseDependencyClosure: package is not in the graph" + +topologicalOrder :: PackageInstalled a => PackageIndex a -> [a] +topologicalOrder index = map toPkgId + . Graph.topSort + $ graph + where (graph, toPkgId, _) = dependencyGraph index + +reverseTopologicalOrder :: PackageInstalled a => PackageIndex a -> [a] +reverseTopologicalOrder index = map toPkgId + . Graph.topSort + . Graph.transposeG + $ graph + where (graph, toPkgId, _) = dependencyGraph index + +-- | Builds a graph of the package dependencies. +-- +-- Dependencies on other packages that are not in the index are discarded. +-- You can check if there are any such dependencies with 'brokenPackages'. +-- +dependencyGraph :: PackageInstalled a => PackageIndex a + -> (Graph.Graph, + Graph.Vertex -> a, + UnitId -> Maybe Graph.Vertex) +dependencyGraph index = (graph, vertex_to_pkg, id_to_vertex) + where + graph = Array.listArray bounds + [ [ v | Just v <- map id_to_vertex (installedDepends pkg) ] + | pkg <- pkgs ] + + pkgs = sortBy (comparing packageId) (allPackages index) + vertices = zip (map installedUnitId pkgs) [0..] + vertex_map = Map.fromList vertices + id_to_vertex pid = Map.lookup pid vertex_map + + vertex_to_pkg vertex = pkgTable ! vertex + + pkgTable = Array.listArray bounds pkgs + topBound = length pkgs - 1 + bounds = (0, topBound) + +-- | We maintain the invariant that, for any 'DepUniqueKey', there +-- is only one instance of the package in our database. +type DepUniqueKey = (PackageName, Maybe UnqualComponentName, Map ModuleName OpenModule) + +-- | Given a package index where we assume we want to use all the packages +-- (use 'dependencyClosure' if you need to get such a index subset) find out +-- if the dependencies within it use consistent versions of each package. +-- Return all cases where multiple packages depend on different versions of +-- some other package. +-- +-- Each element in the result is a package name along with the packages that +-- depend on it and the versions they require. These are guaranteed to be +-- distinct. +-- +dependencyInconsistencies :: InstalledPackageIndex + -- At DepUniqueKey... + -> [(DepUniqueKey, + -- There were multiple packages (BAD!) + [(UnitId, + -- And here are the packages which + -- immediately depended on it + [IPI.InstalledPackageInfo])])] +dependencyInconsistencies index = do + (dep_key, insts_map) <- Map.toList inverseIndex + let insts = Map.toList insts_map + guard (length insts >= 2) + return (dep_key, insts) + where + inverseIndex :: Map DepUniqueKey (Map UnitId [IPI.InstalledPackageInfo]) + inverseIndex = Map.fromListWith (Map.unionWith (++)) $ do + pkg <- allPackages index + dep_ipid <- installedDepends pkg + Just dep <- [lookupUnitId index dep_ipid] + let dep_key = (packageName dep, IPI.sourceLibName dep, + Map.fromList (IPI.instantiatedWith dep)) + return (dep_key, Map.singleton dep_ipid [pkg]) + +-- | A rough approximation of GHC's module finder, takes a +-- 'InstalledPackageIndex' and turns it into a map from module names to their +-- source packages. It's used to initialize the @build-deps@ field in @cabal +-- init@. +moduleNameIndex :: InstalledPackageIndex -> Map ModuleName [IPI.InstalledPackageInfo] +moduleNameIndex index = + Map.fromListWith (++) $ do + pkg <- allPackages index + IPI.ExposedModule m reexport <- IPI.exposedModules pkg + case reexport of + Nothing -> return (m, [pkg]) + Just (OpenModuleVar _) -> [] + Just (OpenModule _ m') | m == m' -> [] + | otherwise -> return (m', [pkg]) + -- The heuristic is this: we want to prefer the original package + -- which originally exported a module. However, if a reexport + -- also *renamed* the module (m /= m'), then we have to use the + -- downstream package, since the upstream package has the wrong + -- module name! diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/PreProcess/Unlit.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/PreProcess/Unlit.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/PreProcess/Unlit.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/PreProcess/Unlit.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,167 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.PreProcess.Unlit +-- Copyright : ... +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Remove the \"literal\" markups from a Haskell source file, including +-- \"@>@\", \"@\\begin{code}@\", \"@\\end{code}@\", and \"@#@\" + +-- This version is interesting because instead of striping comment lines, it +-- turns them into "-- " style comments. This allows using haddock markup +-- in literate scripts without having to use "> --" prefix. + +module Distribution.Simple.PreProcess.Unlit (unlit,plain) where + +import Prelude () +import Distribution.Compat.Prelude + +import Data.List (mapAccumL) + +data Classified = BirdTrack String | Blank String | Ordinary String + | Line !Int String | CPP String + | BeginCode | EndCode + -- output only: + | Error String | Comment String + +-- | No unliteration. +plain :: String -> String -> String +plain _ hs = hs + +classify :: String -> Classified +classify ('>':s) = BirdTrack s +classify ('#':s) = case tokens s of + (line:file:_) | all isDigit line + && length file >= 2 + && head file == '"' + && last file == '"' + -> Line (read line) (tail (init file)) -- TODO:eradicateNoParse + _ -> CPP s + where tokens = unfoldr $ \str -> case lex str of + (t@(_:_), str'):_ -> Just (t, str') + _ -> Nothing +classify ('\\':s) + | "begin{code}" `isPrefixOf` s = BeginCode + | "end{code}" `isPrefixOf` s = EndCode +classify s | all isSpace s = Blank s +classify s = Ordinary s + +-- So the weird exception for comment indenting is to make things work with +-- haddock, see classifyAndCheckForBirdTracks below. +unclassify :: Bool -> Classified -> String +unclassify _ (BirdTrack s) = ' ':s +unclassify _ (Blank s) = s +unclassify _ (Ordinary s) = s +unclassify _ (Line n file) = "# " ++ show n ++ " " ++ show file +unclassify _ (CPP s) = '#':s +unclassify True (Comment "") = " --" +unclassify True (Comment s) = " -- " ++ s +unclassify False (Comment "") = "--" +unclassify False (Comment s) = "-- " ++ s +unclassify _ _ = internalError + +-- | 'unlit' takes a filename (for error reports), and transforms the +-- given string, to eliminate the literate comments from the program text. +unlit :: FilePath -> String -> Either String String +unlit file input = + let (usesBirdTracks, classified) = classifyAndCheckForBirdTracks + . inlines + $ input + in either (Left . unlines . map (unclassify usesBirdTracks)) + Right + . checkErrors + . reclassify + $ classified + + where + -- So haddock requires comments and code to align, since it treats comments + -- as following the layout rule. This is a pain for us since bird track + -- style literate code typically gets indented by two since ">" is replaced + -- by " " and people usually use one additional space of indent ie + -- "> then the code". On the other hand we cannot just go and indent all + -- the comments by two since that does not work for latex style literate + -- code. So the hacky solution we use here is that if we see any bird track + -- style code then we'll indent all comments by two, otherwise by none. + -- Of course this will not work for mixed latex/bird track .lhs files but + -- nobody does that, it's silly and specifically recommended against in the + -- H98 unlit spec. + -- + classifyAndCheckForBirdTracks = + flip mapAccumL False $ \seenBirdTrack line -> + let classification = classify line + in (seenBirdTrack || isBirdTrack classification, classification) + + isBirdTrack (BirdTrack _) = True + isBirdTrack _ = False + + checkErrors ls = case [ e | Error e <- ls ] of + [] -> Left ls + (message:_) -> Right (f ++ ":" ++ show n ++ ": " ++ message) + where (f, n) = errorPos file 1 ls + errorPos f n [] = (f, n) + errorPos f n (Error _:_) = (f, n) + errorPos _ _ (Line n' f':ls) = errorPos f' n' ls + errorPos f n (_ :ls) = errorPos f (n+1) ls + +-- Here we model a state machine, with each state represented by +-- a local function. We only have four states (well, five, +-- if you count the error state), but the rules +-- to transition between then are not so simple. +-- Would it be simpler to have more states? +-- +-- Each state represents the type of line that was last read +-- i.e. are we in a comment section, or a latex-code section, +-- or a bird-code section, etc? +reclassify :: [Classified] -> [Classified] +reclassify = blank -- begin in blank state + where + latex [] = [] + latex (EndCode :ls) = Blank "" : comment ls + latex (BeginCode :_ ) = [Error "\\begin{code} in code section"] + latex (BirdTrack l:ls) = Ordinary ('>':l) : latex ls + latex ( l:ls) = l : latex ls + + blank [] = [] + blank (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] + blank (BeginCode :ls) = Blank "" : latex ls + blank (BirdTrack l:ls) = BirdTrack l : bird ls + blank (Ordinary l:ls) = Comment l : comment ls + blank ( l:ls) = l : blank ls + + bird [] = [] + bird (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] + bird (BeginCode :ls) = Blank "" : latex ls + bird (Blank l :ls) = Blank l : blank ls + bird (Ordinary _:_ ) = [Error "program line before comment line"] + bird ( l:ls) = l : bird ls + + comment [] = [] + comment (EndCode :_ ) = [Error "\\end{code} without \\begin{code}"] + comment (BeginCode :ls) = Blank "" : latex ls + comment (CPP l :ls) = CPP l : comment ls + comment (BirdTrack _:_ ) = [Error "comment line before program line"] + -- a blank line and another ordinary line following a comment + -- will be treated as continuing the comment. Otherwise it's + -- then end of the comment, with a blank line. + comment (Blank l:ls@(Ordinary _:_)) = Comment l : comment ls + comment (Blank l:ls) = Blank l : blank ls + comment (Line n f :ls) = Line n f : comment ls + comment (Ordinary l:ls) = Comment l : comment ls + comment (Comment _: _) = internalError + comment (Error _: _) = internalError + +-- Re-implementation of 'lines', for better efficiency (but decreased laziness). +-- Also, importantly, accepts non-standard DOS and Mac line ending characters. +inlines :: String -> [String] +inlines xs = lines' xs id + where + lines' [] acc = [acc []] + lines' ('\^M':'\n':s) acc = acc [] : lines' s id -- DOS + lines' ('\^M':s) acc = acc [] : lines' s id -- MacOS + lines' ('\n':s) acc = acc [] : lines' s id -- Unix + lines' (c:s) acc = lines' s (acc . (c:)) + +internalError :: a +internalError = error "unlit: internal error" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/PreProcess.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/PreProcess.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/PreProcess.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/PreProcess.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,752 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.PreProcess +-- Copyright : (c) 2003-2005, Isaac Jones, Malcolm Wallace +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defines a 'PreProcessor' abstraction which represents a pre-processor +-- that can transform one kind of file into another. There is also a +-- 'PPSuffixHandler' which is a combination of a file extension and a function +-- for configuring a 'PreProcessor'. It defines a bunch of known built-in +-- preprocessors like @cpp@, @cpphs@, @c2hs@, @hsc2hs@, @happy@, @alex@ etc and +-- lists them in 'knownSuffixHandlers'. On top of this it provides a function +-- for actually preprocessing some sources given a bunch of known suffix +-- handlers. This module is not as good as it could be, it could really do with +-- a rewrite to address some of the problems we have with pre-processors. + +module Distribution.Simple.PreProcess (preprocessComponent, preprocessExtras, + knownSuffixHandlers, ppSuffixes, + PPSuffixHandler, PreProcessor(..), + mkSimplePreProcessor, runSimplePreProcessor, + ppCpp, ppCpp', ppGreenCard, ppC2hs, ppHsc2hs, + ppHappy, ppAlex, ppUnlit, platformDefines + ) + where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Compat.Stack + +import Distribution.Simple.PreProcess.Unlit +import Distribution.Backpack.DescribeUnitId +import Distribution.Package +import qualified Distribution.ModuleName as ModuleName +import Distribution.ModuleName (ModuleName) +import Distribution.PackageDescription as PD +import qualified Distribution.InstalledPackageInfo as Installed +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.CCompiler +import Distribution.Simple.Compiler +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Utils +import Distribution.Simple.Program +import Distribution.Simple.Program.ResponseFile +import Distribution.Simple.Test.LibV09 +import Distribution.System +import Distribution.Text +import Distribution.Version +import Distribution.Verbosity +import Distribution.Types.ForeignLib +import Distribution.Types.UnqualComponentName + +import System.Directory (doesFileExist) +import System.Info (os, arch) +import System.FilePath (splitExtension, dropExtensions, (), (<.>), + takeDirectory, normalise, replaceExtension, + takeExtensions) + +-- |The interface to a preprocessor, which may be implemented using an +-- external program, but need not be. The arguments are the name of +-- the input file, the name of the output file and a verbosity level. +-- Here is a simple example that merely prepends a comment to the given +-- source file: +-- +-- > ppTestHandler :: PreProcessor +-- > ppTestHandler = +-- > PreProcessor { +-- > platformIndependent = True, +-- > runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> +-- > do info verbosity (inFile++" has been preprocessed to "++outFile) +-- > stuff <- readFile inFile +-- > writeFile outFile ("-- preprocessed as a test\n\n" ++ stuff) +-- > return ExitSuccess +-- +-- We split the input and output file names into a base directory and the +-- rest of the file name. The input base dir is the path in the list of search +-- dirs that this file was found in. The output base dir is the build dir where +-- all the generated source files are put. +-- +-- The reason for splitting it up this way is that some pre-processors don't +-- simply generate one output .hs file from one input file but have +-- dependencies on other generated files (notably c2hs, where building one +-- .hs file may require reading other .chi files, and then compiling the .hs +-- file may require reading a generated .h file). In these cases the generated +-- files need to embed relative path names to each other (eg the generated .hs +-- file mentions the .h file in the FFI imports). This path must be relative to +-- the base directory where the generated files are located, it cannot be +-- relative to the top level of the build tree because the compilers do not +-- look for .h files relative to there, ie we do not use \"-I .\", instead we +-- use \"-I dist\/build\" (or whatever dist dir has been set by the user) +-- +-- Most pre-processors do not care of course, so mkSimplePreProcessor and +-- runSimplePreProcessor functions handle the simple case. +-- +data PreProcessor = PreProcessor { + + -- Is the output of the pre-processor platform independent? eg happy output + -- is portable haskell but c2hs's output is platform dependent. + -- This matters since only platform independent generated code can be + -- included into a source tarball. + platformIndependent :: Bool, + + -- TODO: deal with pre-processors that have implementation dependent output + -- eg alex and happy have --ghc flags. However we can't really include + -- ghc-specific code into supposedly portable source tarballs. + + runPreProcessor :: (FilePath, FilePath) -- Location of the source file relative to a base dir + -> (FilePath, FilePath) -- Output file name, relative to an output base dir + -> Verbosity -- verbosity + -> IO () -- Should exit if the preprocessor fails + } + +-- | Function to determine paths to possible extra C sources for a +-- preprocessor: just takes the path to the build directory and uses +-- this to search for C sources with names that match the +-- preprocessor's output name format. +type PreProcessorExtras = FilePath -> IO [FilePath] + + +mkSimplePreProcessor :: (FilePath -> FilePath -> Verbosity -> IO ()) + -> (FilePath, FilePath) + -> (FilePath, FilePath) -> Verbosity -> IO () +mkSimplePreProcessor simplePP + (inBaseDir, inRelativeFile) + (outBaseDir, outRelativeFile) verbosity = simplePP inFile outFile verbosity + where inFile = normalise (inBaseDir inRelativeFile) + outFile = normalise (outBaseDir outRelativeFile) + +runSimplePreProcessor :: PreProcessor -> FilePath -> FilePath -> Verbosity + -> IO () +runSimplePreProcessor pp inFile outFile verbosity = + runPreProcessor pp (".", inFile) (".", outFile) verbosity + +-- |A preprocessor for turning non-Haskell files with the given extension +-- into plain Haskell source files. +type PPSuffixHandler + = (String, BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor) + +-- | Apply preprocessors to the sources from 'hsSourceDirs' for a given +-- component (lib, exe, or test suite). +preprocessComponent :: PackageDescription + -> Component + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Bool + -> Verbosity + -> [PPSuffixHandler] + -> IO () +preprocessComponent pd comp lbi clbi isSrcDist verbosity handlers = do + -- NB: never report instantiation here; we'll report it properly when + -- building. + setupMessage' verbosity "Preprocessing" (packageId pd) + (componentLocalName clbi) (Nothing :: Maybe [(ModuleName, Module)]) + case comp of + (CLib lib@Library{ libBuildInfo = bi }) -> do + let dirs = hsSourceDirs bi ++ [autogenComponentModulesDir lbi clbi + ,autogenPackageModulesDir lbi] + for_ (map ModuleName.toFilePath $ allLibModules lib clbi) $ + pre dirs (componentBuildDir lbi clbi) (localHandlers bi) + (CFLib flib@ForeignLib { foreignLibBuildInfo = bi, foreignLibName = nm }) -> do + let nm' = unUnqualComponentName nm + let flibDir = buildDir lbi nm' nm' ++ "-tmp" + dirs = hsSourceDirs bi ++ [autogenComponentModulesDir lbi clbi + ,autogenPackageModulesDir lbi] + for_ (map ModuleName.toFilePath $ foreignLibModules flib) $ + pre dirs flibDir (localHandlers bi) + (CExe exe@Executable { buildInfo = bi, exeName = nm }) -> do + let nm' = unUnqualComponentName nm + let exeDir = buildDir lbi nm' nm' ++ "-tmp" + dirs = hsSourceDirs bi ++ [autogenComponentModulesDir lbi clbi + ,autogenPackageModulesDir lbi] + for_ (map ModuleName.toFilePath $ otherModules bi) $ + pre dirs exeDir (localHandlers bi) + pre (hsSourceDirs bi) exeDir (localHandlers bi) $ + dropExtensions (modulePath exe) + CTest test@TestSuite{ testName = nm } -> do + let nm' = unUnqualComponentName nm + case testInterface test of + TestSuiteExeV10 _ f -> + preProcessTest test f $ buildDir lbi nm' nm' ++ "-tmp" + TestSuiteLibV09 _ _ -> do + let testDir = buildDir lbi stubName test + stubName test ++ "-tmp" + writeSimpleTestStub test testDir + preProcessTest test (stubFilePath test) testDir + TestSuiteUnsupported tt -> + die' verbosity $ "No support for preprocessing test " + ++ "suite type " ++ display tt + CBench bm@Benchmark{ benchmarkName = nm } -> do + let nm' = unUnqualComponentName nm + case benchmarkInterface bm of + BenchmarkExeV10 _ f -> + preProcessBench bm f $ buildDir lbi nm' nm' ++ "-tmp" + BenchmarkUnsupported tt -> + die' verbosity $ "No support for preprocessing benchmark " + ++ "type " ++ display tt + where + builtinHaskellSuffixes = ["hs", "lhs", "hsig", "lhsig"] + builtinCSuffixes = cSourceExtensions + builtinSuffixes = builtinHaskellSuffixes ++ builtinCSuffixes + localHandlers bi = [(ext, h bi lbi clbi) | (ext, h) <- handlers] + pre dirs dir lhndlrs fp = + preprocessFile dirs dir isSrcDist fp verbosity builtinSuffixes lhndlrs + preProcessTest test = preProcessComponent (testBuildInfo test) + (testModules test) + preProcessBench bm = preProcessComponent (benchmarkBuildInfo bm) + (benchmarkModules bm) + preProcessComponent bi modules exePath dir = do + let biHandlers = localHandlers bi + sourceDirs = hsSourceDirs bi ++ [ autogenComponentModulesDir lbi clbi + , autogenPackageModulesDir lbi ] + sequence_ [ preprocessFile sourceDirs dir isSrcDist + (ModuleName.toFilePath modu) verbosity builtinSuffixes + biHandlers + | modu <- modules ] + preprocessFile (dir : (hsSourceDirs bi)) dir isSrcDist + (dropExtensions $ exePath) verbosity + builtinSuffixes biHandlers + +--TODO: try to list all the modules that could not be found +-- not just the first one. It's annoying and slow due to the need +-- to reconfigure after editing the .cabal file each time. + +-- |Find the first extension of the file that exists, and preprocess it +-- if required. +preprocessFile + :: [FilePath] -- ^source directories + -> FilePath -- ^build directory + -> Bool -- ^preprocess for sdist + -> FilePath -- ^module file name + -> Verbosity -- ^verbosity + -> [String] -- ^builtin suffixes + -> [(String, PreProcessor)] -- ^possible preprocessors + -> IO () +preprocessFile searchLoc buildLoc forSDist baseFile verbosity builtinSuffixes handlers = do + -- look for files in the various source dirs with this module name + -- and a file extension of a known preprocessor + psrcFiles <- findFileWithExtension' (map fst handlers) searchLoc baseFile + case psrcFiles of + -- no preprocessor file exists, look for an ordinary source file + -- just to make sure one actually exists at all for this module. + -- Note: by looking in the target/output build dir too, we allow + -- source files to appear magically in the target build dir without + -- any corresponding "real" source file. This lets custom Setup.hs + -- files generate source modules directly into the build dir without + -- the rest of the build system being aware of it (somewhat dodgy) + Nothing -> do + bsrcFiles <- findFileWithExtension builtinSuffixes (buildLoc : searchLoc) baseFile + case bsrcFiles of + Nothing -> + die' verbosity $ "can't find source for " ++ baseFile + ++ " in " ++ intercalate ", " searchLoc + _ -> return () + -- found a pre-processable file in one of the source dirs + Just (psrcLoc, psrcRelFile) -> do + let (srcStem, ext) = splitExtension psrcRelFile + psrcFile = psrcLoc psrcRelFile + pp = fromMaybe (error "Distribution.Simple.PreProcess: Just expected") + (lookup (tailNotNull ext) handlers) + -- Preprocessing files for 'sdist' is different from preprocessing + -- for 'build'. When preprocessing for sdist we preprocess to + -- avoid that the user has to have the preprocessors available. + -- ATM, we don't have a way to specify which files are to be + -- preprocessed and which not, so for sdist we only process + -- platform independent files and put them into the 'buildLoc' + -- (which we assume is set to the temp. directory that will become + -- the tarball). + --TODO: eliminate sdist variant, just supply different handlers + when (not forSDist || forSDist && platformIndependent pp) $ do + -- look for existing pre-processed source file in the dest dir to + -- see if we really have to re-run the preprocessor. + ppsrcFiles <- findFileWithExtension builtinSuffixes [buildLoc] baseFile + recomp <- case ppsrcFiles of + Nothing -> return True + Just ppsrcFile -> + psrcFile `moreRecentFile` ppsrcFile + when recomp $ do + let destDir = buildLoc dirName srcStem + createDirectoryIfMissingVerbose verbosity True destDir + runPreProcessorWithHsBootHack pp + (psrcLoc, psrcRelFile) + (buildLoc, srcStem <.> "hs") + + where + dirName = takeDirectory + tailNotNull [] = [] + tailNotNull x = tail x + + -- FIXME: This is a somewhat nasty hack. GHC requires that hs-boot files + -- be in the same place as the hs files, so if we put the hs file in dist/ + -- then we need to copy the hs-boot file there too. This should probably be + -- done another way. Possibly we should also be looking for .lhs-boot + -- files, but I think that preprocessors only produce .hs files. + runPreProcessorWithHsBootHack pp + (inBaseDir, inRelativeFile) + (outBaseDir, outRelativeFile) = do + runPreProcessor pp + (inBaseDir, inRelativeFile) + (outBaseDir, outRelativeFile) verbosity + + exists <- doesFileExist inBoot + when exists $ copyFileVerbose verbosity inBoot outBoot + + where + inBoot = replaceExtension inFile "hs-boot" + outBoot = replaceExtension outFile "hs-boot" + + inFile = normalise (inBaseDir inRelativeFile) + outFile = normalise (outBaseDir outRelativeFile) + +-- ------------------------------------------------------------ +-- * known preprocessors +-- ------------------------------------------------------------ + +ppGreenCard :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppGreenCard _ lbi _ + = PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> + runDbProgram verbosity greencardProgram (withPrograms lbi) + (["-tffi", "-o" ++ outFile, inFile]) + } + +-- This one is useful for preprocessors that can't handle literate source. +-- We also need a way to chain preprocessors. +ppUnlit :: PreProcessor +ppUnlit = + PreProcessor { + platformIndependent = True, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> + withUTF8FileContents inFile $ \contents -> + either (writeUTF8File outFile) (die' verbosity) (unlit inFile contents) + } + +ppCpp :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppCpp = ppCpp' [] + +ppCpp' :: [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppCpp' extraArgs bi lbi clbi = + case compilerFlavor (compiler lbi) of + GHC -> ppGhcCpp ghcProgram (const True) args bi lbi clbi + GHCJS -> ppGhcCpp ghcjsProgram (const True) args bi lbi clbi + _ -> ppCpphs args bi lbi clbi + where cppArgs = getCppOptions bi lbi + args = cppArgs ++ extraArgs + +ppGhcCpp :: Program -> (Version -> Bool) + -> [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppGhcCpp program xHs extraArgs _bi lbi clbi = + PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do + (prog, version, _) <- requireProgramVersion verbosity + program anyVersion (withPrograms lbi) + runProgram verbosity prog $ + ["-E", "-cpp"] + -- This is a bit of an ugly hack. We're going to + -- unlit the file ourselves later on if appropriate, + -- so we need GHC not to unlit it now or it'll get + -- double-unlitted. In the future we might switch to + -- using cpphs --unlit instead. + ++ (if xHs version then ["-x", "hs"] else []) + ++ [ "-optP-include", "-optP"++ (autogenComponentModulesDir lbi clbi cppHeaderName) ] + ++ ["-o", outFile, inFile] + ++ extraArgs + } + +ppCpphs :: [String] -> BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppCpphs extraArgs _bi lbi clbi = + PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do + (cpphsProg, cpphsVersion, _) <- requireProgramVersion verbosity + cpphsProgram anyVersion (withPrograms lbi) + runProgram verbosity cpphsProg $ + ("-O" ++ outFile) : inFile + : "--noline" : "--strip" + : (if cpphsVersion >= mkVersion [1,6] + then ["--include="++ (autogenComponentModulesDir lbi clbi cppHeaderName)] + else []) + ++ extraArgs + } + +ppHsc2hs :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppHsc2hs bi lbi clbi = + PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> do + (gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi) + (hsc2hsProg, hsc2hsVersion, _) <- requireProgramVersion verbosity + hsc2hsProgram anyVersion (withPrograms lbi) + -- See Trac #13896 and https://github.com/haskell/cabal/issues/3122. + let hsc2hsSupportsResponseFiles = hsc2hsVersion >= mkVersion [0,68,4] + pureArgs = genPureArgs gccProg inFile outFile + if hsc2hsSupportsResponseFiles + then withResponseFile + verbosity + defaultTempFileOptions + (takeDirectory outFile) + "hsc2hs-response.txt" + Nothing + pureArgs + (\responseFileName -> + runProgram verbosity hsc2hsProg ["@"++ responseFileName]) + else runProgram verbosity hsc2hsProg pureArgs + } + where + -- Returns a list of command line arguments that can either be passed + -- directly, or via a response file. + genPureArgs :: ConfiguredProgram -> String -> String -> [String] + genPureArgs gccProg inFile outFile = + [ "--cc=" ++ programPath gccProg + , "--ld=" ++ programPath gccProg ] + + -- Additional gcc options + ++ [ "--cflag=" ++ opt | opt <- programDefaultArgs gccProg + ++ programOverrideArgs gccProg ] + ++ [ "--lflag=" ++ opt | opt <- programDefaultArgs gccProg + ++ programOverrideArgs gccProg ] + + -- OSX frameworks: + ++ [ what ++ "=-F" ++ opt + | isOSX + , opt <- nub (concatMap Installed.frameworkDirs pkgs) + , what <- ["--cflag", "--lflag"] ] + ++ [ "--lflag=" ++ arg + | isOSX + , opt <- PD.frameworks bi ++ concatMap Installed.frameworks pkgs + , arg <- ["-framework", opt] ] + + -- Note that on ELF systems, wherever we use -L, we must also use -R + -- because presumably that -L dir is not on the normal path for the + -- system's dynamic linker. This is needed because hsc2hs works by + -- compiling a C program and then running it. + + ++ [ "--cflag=" ++ opt | opt <- platformDefines lbi ] + + -- Options from the current package: + ++ [ "--cflag=-I" ++ dir | dir <- PD.includeDirs bi ] + ++ [ "--cflag=-I" ++ buildDir lbi dir | dir <- PD.includeDirs bi ] + ++ [ "--cflag=" ++ opt | opt <- PD.ccOptions bi + ++ PD.cppOptions bi + -- hsc2hs uses the C ABI + -- We assume that there are only C sources + -- and C++ functions are exported via a C + -- interface and wrapped in a C source file. + -- Therefore we do not supply C++ flags + -- because there will not be C++ sources. + -- + -- DO NOT add PD.cxxOptions unless this changes! + ] + ++ [ "--cflag=" ++ opt | opt <- + [ "-I" ++ autogenComponentModulesDir lbi clbi, + "-I" ++ autogenPackageModulesDir lbi, + "-include", autogenComponentModulesDir lbi clbi cppHeaderName ] ] + ++ [ "--lflag=-L" ++ opt | opt <- PD.extraLibDirs bi ] + ++ [ "--lflag=-Wl,-R," ++ opt | isELF + , opt <- PD.extraLibDirs bi ] + ++ [ "--lflag=-l" ++ opt | opt <- PD.extraLibs bi ] + ++ [ "--lflag=" ++ opt | opt <- PD.ldOptions bi ] + + -- Options from dependent packages + ++ [ "--cflag=" ++ opt + | pkg <- pkgs + , opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ] + ++ [ opt | opt <- Installed.ccOptions pkg ] ] + ++ [ "--lflag=" ++ opt + | pkg <- pkgs + , opt <- [ "-L" ++ opt | opt <- Installed.libraryDirs pkg ] + ++ [ "-Wl,-R," ++ opt | isELF + , opt <- Installed.libraryDirs pkg ] + ++ [ "-l" ++ opt | opt <- Installed.extraLibraries pkg ] + ++ [ opt | opt <- Installed.ldOptions pkg ] ] + ++ ["-o", outFile, inFile] + + hacked_index = packageHacks (installedPkgs lbi) + -- Look only at the dependencies of the current component + -- being built! This relies on 'installedPkgs' maintaining + -- 'InstalledPackageInfo' for internal deps too; see #2971. + pkgs = PackageIndex.topologicalOrder $ + case PackageIndex.dependencyClosure hacked_index + (map fst (componentPackageDeps clbi)) of + Left index' -> index' + Right inf -> + error ("ppHsc2hs: broken closure: " ++ show inf) + isOSX = case buildOS of OSX -> True; _ -> False + isELF = case buildOS of OSX -> False; Windows -> False; AIX -> False; _ -> True; + packageHacks = case compilerFlavor (compiler lbi) of + GHC -> hackRtsPackage + GHCJS -> hackRtsPackage + _ -> id + -- We don't link in the actual Haskell libraries of our dependencies, so + -- the -u flags in the ldOptions of the rts package mean linking fails on + -- OS X (its ld is a tad stricter than gnu ld). Thus we remove the + -- ldOptions for GHC's rts package: + hackRtsPackage index = + case PackageIndex.lookupPackageName index (mkPackageName "rts") of + [(_, [rts])] + -> PackageIndex.insert rts { Installed.ldOptions = [] } index + _ -> error "No (or multiple) ghc rts package is registered!!" + +ppHsc2hsExtras :: PreProcessorExtras +ppHsc2hsExtras buildBaseDir = filter ("_hsc.c" `isSuffixOf`) `fmap` + getDirectoryContentsRecursive buildBaseDir + +ppC2hs :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppC2hs bi lbi clbi = + PreProcessor { + platformIndependent = False, + runPreProcessor = \(inBaseDir, inRelativeFile) + (outBaseDir, outRelativeFile) verbosity -> do + (c2hsProg, _, _) <- requireProgramVersion verbosity + c2hsProgram (orLaterVersion (mkVersion [0,15])) + (withPrograms lbi) + (gccProg, _) <- requireProgram verbosity gccProgram (withPrograms lbi) + runProgram verbosity c2hsProg $ + + -- Options from the current package: + [ "--cpp=" ++ programPath gccProg, "--cppopts=-E" ] + ++ [ "--cppopts=" ++ opt | opt <- getCppOptions bi lbi ] + ++ [ "--cppopts=-include" ++ (autogenComponentModulesDir lbi clbi cppHeaderName) ] + ++ [ "--include=" ++ outBaseDir ] + + -- Options from dependent packages + ++ [ "--cppopts=" ++ opt + | pkg <- pkgs + , opt <- [ "-I" ++ opt | opt <- Installed.includeDirs pkg ] + ++ [ opt | opt@('-':c:_) <- Installed.ccOptions pkg + -- c2hs uses the C ABI + -- We assume that there are only C sources + -- and C++ functions are exported via a C + -- interface and wrapped in a C source file. + -- Therefore we do not supply C++ flags + -- because there will not be C++ sources. + -- + -- + -- DO NOT add Installed.cxxOptions unless this changes! + , c `elem` "DIU" ] ] + --TODO: install .chi files for packages, so we can --include + -- those dirs here, for the dependencies + + -- input and output files + ++ [ "--output-dir=" ++ outBaseDir + , "--output=" ++ outRelativeFile + , inBaseDir inRelativeFile ] + } + where + pkgs = PackageIndex.topologicalOrder (installedPkgs lbi) + +ppC2hsExtras :: PreProcessorExtras +ppC2hsExtras d = filter (\p -> takeExtensions p == ".chs.c") `fmap` + getDirectoryContentsRecursive d + +--TODO: perhaps use this with hsc2hs too +--TODO: remove cc-options from cpphs for cabal-version: >= 1.10 +--TODO: Refactor and add separate getCppOptionsForHs, getCppOptionsForCxx, & getCppOptionsForC +-- instead of combining all these cases in a single function. This blind combination can +-- potentially lead to compilation inconsistencies. +getCppOptions :: BuildInfo -> LocalBuildInfo -> [String] +getCppOptions bi lbi + = platformDefines lbi + ++ cppOptions bi + ++ ["-I" ++ dir | dir <- PD.includeDirs bi] + ++ [opt | opt@('-':c:_) <- PD.ccOptions bi ++ PD.cxxOptions bi, c `elem` "DIU"] + +platformDefines :: LocalBuildInfo -> [String] +platformDefines lbi = + case compilerFlavor comp of + GHC -> + ["-D__GLASGOW_HASKELL__=" ++ versionInt version] ++ + ["-D" ++ os ++ "_BUILD_OS=1"] ++ + ["-D" ++ arch ++ "_BUILD_ARCH=1"] ++ + map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ + map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr + GHCJS -> + compatGlasgowHaskell ++ + ["-D__GHCJS__=" ++ versionInt version] ++ + ["-D" ++ os ++ "_BUILD_OS=1"] ++ + ["-D" ++ arch ++ "_BUILD_ARCH=1"] ++ + map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ + map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr + HaskellSuite {} -> + ["-D__HASKELL_SUITE__"] ++ + map (\os' -> "-D" ++ os' ++ "_HOST_OS=1") osStr ++ + map (\arch' -> "-D" ++ arch' ++ "_HOST_ARCH=1") archStr + _ -> [] + where + comp = compiler lbi + Platform hostArch hostOS = hostPlatform lbi + version = compilerVersion comp + compatGlasgowHaskell = + maybe [] (\v -> ["-D__GLASGOW_HASKELL__=" ++ versionInt v]) + (compilerCompatVersion GHC comp) + -- TODO: move this into the compiler abstraction + -- FIXME: this forces GHC's crazy 4.8.2 -> 408 convention on all + -- the other compilers. Check if that's really what they want. + versionInt :: Version -> String + versionInt v = case versionNumbers v of + [] -> "1" + [n] -> show n + n1:n2:_ -> + -- 6.8.x -> 608 + -- 6.10.x -> 610 + let s1 = show n1 + s2 = show n2 + middle = case s2 of + _ : _ : _ -> "" + _ -> "0" + in s1 ++ middle ++ s2 + + osStr = case hostOS of + Linux -> ["linux"] + Windows -> ["mingw32"] + OSX -> ["darwin"] + FreeBSD -> ["freebsd"] + OpenBSD -> ["openbsd"] + NetBSD -> ["netbsd"] + DragonFly -> ["dragonfly"] + Solaris -> ["solaris2"] + AIX -> ["aix"] + HPUX -> ["hpux"] + IRIX -> ["irix"] + HaLVM -> [] + IOS -> ["ios"] + Android -> ["android"] + Ghcjs -> ["ghcjs"] + Hurd -> ["hurd"] + OtherOS _ -> [] + archStr = case hostArch of + I386 -> ["i386"] + X86_64 -> ["x86_64"] + PPC -> ["powerpc"] + PPC64 -> ["powerpc64"] + Sparc -> ["sparc"] + Arm -> ["arm"] + AArch64 -> ["aarch64"] + Mips -> ["mips"] + SH -> [] + IA64 -> ["ia64"] + S390 -> ["s390"] + Alpha -> ["alpha"] + Hppa -> ["hppa"] + Rs6000 -> ["rs6000"] + M68k -> ["m68k"] + Vax -> ["vax"] + JavaScript -> ["javascript"] + OtherArch _ -> [] + +ppHappy :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppHappy _ lbi _ = pp { platformIndependent = True } + where pp = standardPP lbi happyProgram (hcFlags hc) + hc = compilerFlavor (compiler lbi) + hcFlags GHC = ["-agc"] + hcFlags GHCJS = ["-agc"] + hcFlags _ = [] + +ppAlex :: BuildInfo -> LocalBuildInfo -> ComponentLocalBuildInfo -> PreProcessor +ppAlex _ lbi _ = pp { platformIndependent = True } + where pp = standardPP lbi alexProgram (hcFlags hc) + hc = compilerFlavor (compiler lbi) + hcFlags GHC = ["-g"] + hcFlags GHCJS = ["-g"] + hcFlags _ = [] + +standardPP :: LocalBuildInfo -> Program -> [String] -> PreProcessor +standardPP lbi prog args = + PreProcessor { + platformIndependent = False, + runPreProcessor = mkSimplePreProcessor $ \inFile outFile verbosity -> + runDbProgram verbosity prog (withPrograms lbi) + (args ++ ["-o", outFile, inFile]) + } + +-- |Convenience function; get the suffixes of these preprocessors. +ppSuffixes :: [ PPSuffixHandler ] -> [String] +ppSuffixes = map fst + +-- |Standard preprocessors: GreenCard, c2hs, hsc2hs, happy, alex and cpphs. +knownSuffixHandlers :: [ PPSuffixHandler ] +knownSuffixHandlers = + [ ("gc", ppGreenCard) + , ("chs", ppC2hs) + , ("hsc", ppHsc2hs) + , ("x", ppAlex) + , ("y", ppHappy) + , ("ly", ppHappy) + , ("cpphs", ppCpp) + ] + +-- |Standard preprocessors with possible extra C sources: c2hs, hsc2hs. +knownExtrasHandlers :: [ PreProcessorExtras ] +knownExtrasHandlers = [ ppC2hsExtras, ppHsc2hsExtras ] + +-- | Find any extra C sources generated by preprocessing that need to +-- be added to the component (addresses issue #238). +preprocessExtras :: Verbosity + -> Component + -> LocalBuildInfo + -> IO [FilePath] +preprocessExtras verbosity comp lbi = case comp of + CLib _ -> pp $ buildDir lbi + (CExe Executable { exeName = nm }) -> do + let nm' = unUnqualComponentName nm + pp $ buildDir lbi nm' nm' ++ "-tmp" + (CFLib ForeignLib { foreignLibName = nm }) -> do + let nm' = unUnqualComponentName nm + pp $ buildDir lbi nm' nm' ++ "-tmp" + CTest test -> do + let nm' = unUnqualComponentName $ testName test + case testInterface test of + TestSuiteExeV10 _ _ -> + pp $ buildDir lbi nm' nm' ++ "-tmp" + TestSuiteLibV09 _ _ -> + pp $ buildDir lbi stubName test stubName test ++ "-tmp" + TestSuiteUnsupported tt -> die' verbosity $ "No support for preprocessing test " + ++ "suite type " ++ display tt + CBench bm -> do + let nm' = unUnqualComponentName $ benchmarkName bm + case benchmarkInterface bm of + BenchmarkExeV10 _ _ -> + pp $ buildDir lbi nm' nm' ++ "-tmp" + BenchmarkUnsupported tt -> + die' verbosity $ "No support for preprocessing benchmark " + ++ "type " ++ display tt + where + pp :: FilePath -> IO [FilePath] + pp dir = (map (dir ) . filter not_sub . concat) + <$> for knownExtrasHandlers + (withLexicalCallStack (\f -> f dir)) + -- TODO: This is a terrible hack to work around #3545 while we don't + -- reorganize the directory layout. Basically, for the main + -- library, we might accidentally pick up autogenerated sources for + -- our subcomponents, because they are all stored as subdirectories + -- in dist/build. This is a cheap and cheerful check to prevent + -- this from happening. It is not particularly correct; for example + -- if a user has a test suite named foobar and puts their C file in + -- foobar/foo.c, this test will incorrectly exclude it. But I + -- didn't want to break BC... + not_sub p = and [ not (pre `isPrefixOf` p) | pre <- component_dirs ] + component_dirs = component_names (localPkgDescr lbi) + -- TODO: libify me + component_names pkg_descr = fmap unUnqualComponentName $ + mapMaybe libName (subLibraries pkg_descr) ++ + map exeName (executables pkg_descr) ++ + map testName (testSuites pkg_descr) ++ + map benchmarkName (benchmarks pkg_descr) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Ar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Ar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Ar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Ar.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,191 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE NondecreasingIndentation #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Ar +-- Copyright : Duncan Coutts 2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @ar@ program. + +module Distribution.Simple.Program.Ar ( + createArLibArchive, + multiStageProgramInvocation + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BS8 +import Distribution.Compat.CopyFile (filesEqual) +import Distribution.Simple.Compiler (arResponseFilesSupported) +import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) +import Distribution.Simple.Program + ( ProgramInvocation, arProgram, requireProgram ) +import Distribution.Simple.Program.ResponseFile + ( withResponseFile ) +import Distribution.Simple.Program.Run + ( programInvocation, multiStageProgramInvocation + , runProgramInvocation ) +import Distribution.Simple.Setup + ( fromFlagOrDefault, configUseResponseFiles ) +import Distribution.Simple.Utils + ( defaultTempFileOptions, dieWithLocation', withTempDirectory ) +import Distribution.System + ( Arch(..), OS(..), Platform(..) ) +import Distribution.Verbosity + ( Verbosity, deafening, verbose ) +import System.Directory (doesFileExist, renameFile) +import System.FilePath ((), splitFileName) +import System.IO + ( Handle, IOMode(ReadWriteMode), SeekMode(AbsoluteSeek) + , hFileSize, hSeek, withBinaryFile ) + +-- | Call @ar@ to create a library archive from a bunch of object files. +-- +createArLibArchive :: Verbosity -> LocalBuildInfo + -> FilePath -> [FilePath] -> IO () +createArLibArchive verbosity lbi targetPath files = do + (ar, _) <- requireProgram verbosity arProgram progDb + + let (targetDir, targetName) = splitFileName targetPath + withTempDirectory verbosity targetDir "objs" $ \ tmpDir -> do + let tmpPath = tmpDir targetName + + -- The args to use with "ar" are actually rather subtle and system-dependent. + -- In particular we have the following issues: + -- + -- -- On OS X, "ar q" does not make an archive index. Archives with no + -- index cannot be used. + -- + -- -- GNU "ar r" will not let us add duplicate objects, only "ar q" lets us + -- do that. We have duplicates because of modules like "A.M" and "B.M" + -- both make an object file "M.o" and ar does not consider the directory. + -- + -- Our solution is to use "ar r" in the simple case when one call is enough. + -- When we need to call ar multiple times we use "ar q" and for the last + -- call on OSX we use "ar qs" so that it'll make the index. + + let simpleArgs = case hostOS of + OSX -> ["-r", "-s"] + _ -> ["-r"] + + initialArgs = ["-q"] + finalArgs = case hostOS of + OSX -> ["-q", "-s"] + _ -> ["-q"] + + extraArgs = verbosityOpts verbosity ++ [tmpPath] + + simple = programInvocation ar (simpleArgs ++ extraArgs) + initial = programInvocation ar (initialArgs ++ extraArgs) + middle = initial + final = programInvocation ar (finalArgs ++ extraArgs) + + oldVersionManualOverride = + fromFlagOrDefault False $ configUseResponseFiles $ configFlags lbi + responseArgumentsNotSupported = + not (arResponseFilesSupported (compiler lbi)) + + invokeWithResponesFile :: FilePath -> ProgramInvocation + invokeWithResponesFile atFile = + programInvocation ar $ + simpleArgs ++ extraArgs ++ ['@' : atFile] + + if oldVersionManualOverride || responseArgumentsNotSupported + then + sequence_ + [ runProgramInvocation verbosity inv + | inv <- multiStageProgramInvocation + simple (initial, middle, final) files ] + else + withResponseFile verbosity defaultTempFileOptions tmpDir "ar.rsp" Nothing files $ + \path -> runProgramInvocation verbosity $ invokeWithResponesFile path + + unless (hostArch == Arm -- See #1537 + || hostOS == AIX) $ -- AIX uses its own "ar" format variant + wipeMetadata verbosity tmpPath + equal <- filesEqual tmpPath targetPath + unless equal $ renameFile tmpPath targetPath + + where + progDb = withPrograms lbi + Platform hostArch hostOS = hostPlatform lbi + verbosityOpts v + | v >= deafening = ["-v"] + | v >= verbose = [] + | otherwise = ["-c"] -- Do not warn if library had to be created. + +-- | @ar@ by default includes various metadata for each object file in their +-- respective headers, so the output can differ for the same inputs, making +-- it difficult to avoid re-linking. GNU @ar@(1) has a deterministic mode +-- (@-D@) flag that always writes zero for the mtime, UID and GID, and 0644 +-- for the file mode. However detecting whether @-D@ is supported seems +-- rather harder than just re-implementing this feature. +wipeMetadata :: Verbosity -> FilePath -> IO () +wipeMetadata verbosity path = do + -- Check for existence first (ReadWriteMode would create one otherwise) + exists <- doesFileExist path + unless exists $ wipeError "Temporary file disappeared" + withBinaryFile path ReadWriteMode $ \ h -> hFileSize h >>= wipeArchive h + + where + wipeError msg = dieWithLocation' verbosity path Nothing $ + "Distribution.Simple.Program.Ar.wipeMetadata: " ++ msg + archLF = "!\x0a" -- global magic, 8 bytes + x60LF = "\x60\x0a" -- header magic, 2 bytes + metadata = BS.concat + [ "0 " -- mtime, 12 bytes + , "0 " -- UID, 6 bytes + , "0 " -- GID, 6 bytes + , "0644 " -- mode, 8 bytes + ] + headerSize :: Int + headerSize = 60 + + -- http://en.wikipedia.org/wiki/Ar_(Unix)#File_format_details + wipeArchive :: Handle -> Integer -> IO () + wipeArchive h archiveSize = do + global <- BS.hGet h (BS.length archLF) + unless (global == archLF) $ wipeError "Bad global header" + wipeHeader (toInteger $ BS.length archLF) + + where + wipeHeader :: Integer -> IO () + wipeHeader offset = case compare offset archiveSize of + EQ -> return () + GT -> wipeError (atOffset "Archive truncated") + LT -> do + header <- BS.hGet h headerSize + unless (BS.length header == headerSize) $ + wipeError (atOffset "Short header") + let magic = BS.drop 58 header + unless (magic == x60LF) . wipeError . atOffset $ + "Bad magic " ++ show magic ++ " in header" + + let name = BS.take 16 header + let size = BS.take 10 $ BS.drop 48 header + objSize <- case reads (BS8.unpack size) of + [(n, s)] | all isSpace s -> return n + _ -> wipeError (atOffset "Bad file size in header") + + let replacement = BS.concat [ name, metadata, size, magic ] + unless (BS.length replacement == headerSize) $ + wipeError (atOffset "Something has gone terribly wrong") + hSeek h AbsoluteSeek offset + BS.hPut h replacement + + let nextHeader = offset + toInteger headerSize + + -- Odd objects are padded with an extra '\x0a' + if odd objSize then objSize + 1 else objSize + hSeek h AbsoluteSeek nextHeader + wipeHeader nextHeader + + where + atOffset msg = msg ++ " at offset " ++ show offset diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Builtin.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Builtin.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Builtin.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Builtin.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,349 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Builtin +-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- The module defines all the known built-in 'Program's. +-- +-- Where possible we try to find their version numbers. +-- +module Distribution.Simple.Program.Builtin ( + + -- * The collection of unconfigured and configured programs + builtinPrograms, + + -- * Programs that Cabal knows about + ghcProgram, + ghcPkgProgram, + runghcProgram, + ghcjsProgram, + ghcjsPkgProgram, + hmakeProgram, + jhcProgram, + haskellSuiteProgram, + haskellSuitePkgProgram, + uhcProgram, + gccProgram, + arProgram, + stripProgram, + happyProgram, + alexProgram, + hsc2hsProgram, + c2hsProgram, + cpphsProgram, + hscolourProgram, + doctestProgram, + haddockProgram, + greencardProgram, + ldProgram, + tarProgram, + cppProgram, + pkgConfigProgram, + hpcProgram, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Program.GHC +import Distribution.Simple.Program.Find +import Distribution.Simple.Program.Internal +import Distribution.Simple.Program.Run +import Distribution.Simple.Program.Types +import Distribution.Simple.Utils +import Distribution.Compat.Exception +import Distribution.Verbosity +import Distribution.Version + +import qualified Data.Map as Map + +-- ------------------------------------------------------------ +-- * Known programs +-- ------------------------------------------------------------ + +-- | The default list of programs. +-- These programs are typically used internally to Cabal. +builtinPrograms :: [Program] +builtinPrograms = + [ + -- compilers and related progs + ghcProgram + , runghcProgram + , ghcPkgProgram + , ghcjsProgram + , ghcjsPkgProgram + , haskellSuiteProgram + , haskellSuitePkgProgram + , hmakeProgram + , jhcProgram + , uhcProgram + , hpcProgram + -- preprocessors + , hscolourProgram + , doctestProgram + , haddockProgram + , happyProgram + , alexProgram + , hsc2hsProgram + , c2hsProgram + , cpphsProgram + , greencardProgram + -- platform toolchain + , gccProgram + , arProgram + , stripProgram + , ldProgram + , tarProgram + -- configuration tools + , pkgConfigProgram + ] + +ghcProgram :: Program +ghcProgram = (simpleProgram "ghc") { + programFindVersion = findProgramVersion "--numeric-version" id, + + -- Workaround for https://ghc.haskell.org/trac/ghc/ticket/8825 + -- (spurious warning on non-english locales) + programPostConf = \_verbosity ghcProg -> + do let ghcProg' = ghcProg { + programOverrideEnv = ("LANGUAGE", Just "en") + : programOverrideEnv ghcProg + } + -- Only the 7.8 branch seems to be affected. Fixed in 7.8.4. + affectedVersionRange = intersectVersionRanges + (laterVersion $ mkVersion [7,8,0]) + (earlierVersion $ mkVersion [7,8,4]) + return $ maybe ghcProg + (\v -> if withinRange v affectedVersionRange + then ghcProg' else ghcProg) + (programVersion ghcProg), + + programNormaliseArgs = normaliseGhcArgs + } + +runghcProgram :: Program +runghcProgram = (simpleProgram "runghc") { + programFindVersion = findProgramVersion "--version" $ \str -> + case words str of + -- "runghc 7.10.3" + (_:ver:_) -> ver + _ -> "" + } + +ghcPkgProgram :: Program +ghcPkgProgram = (simpleProgram "ghc-pkg") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "ghc-pkg --version" gives a string like + -- "GHC package manager version 6.4.1" + case words str of + (_:_:_:_:ver:_) -> ver + _ -> "" + } + +ghcjsProgram :: Program +ghcjsProgram = (simpleProgram "ghcjs") { + programFindVersion = findProgramVersion "--numeric-ghcjs-version" id + } + +-- note: version is the version number of the GHC version that ghcjs-pkg was built with +ghcjsPkgProgram :: Program +ghcjsPkgProgram = (simpleProgram "ghcjs-pkg") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "ghcjs-pkg --version" gives a string like + -- "GHCJS package manager version 6.4.1" + case words str of + (_:_:_:_:ver:_) -> ver + _ -> "" + } + +hmakeProgram :: Program +hmakeProgram = (simpleProgram "hmake") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "hmake --version" gives a string line + -- "/usr/local/bin/hmake: 3.13 (2006-11-01)" + case words str of + (_:ver:_) -> ver + _ -> "" + } + +jhcProgram :: Program +jhcProgram = (simpleProgram "jhc") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- invoking "jhc --version" gives a string like + -- "jhc 0.3.20080208 (wubgipkamcep-2) + -- compiled by ghc-6.8 on a x86_64 running linux" + case words str of + (_:ver:_) -> ver + _ -> "" + } + +uhcProgram :: Program +uhcProgram = (simpleProgram "uhc") { + programFindVersion = findProgramVersion "--version-dotted" id + } + +hpcProgram :: Program +hpcProgram = (simpleProgram "hpc") + { + programFindVersion = findProgramVersion "version" $ \str -> + case words str of + (_ : _ : _ : ver : _) -> ver + _ -> "" + } + +-- This represents a haskell-suite compiler. Of course, the compiler +-- itself probably is not called "haskell-suite", so this is not a real +-- program. (But we don't know statically the name of the actual compiler, +-- so this is the best we can do.) +-- +-- Having this Program value serves two purposes: +-- +-- 1. We can accept options for the compiler in the form of +-- +-- --haskell-suite-option(s)=... +-- +-- 2. We can find a program later using this static id (with +-- requireProgram). +-- +-- The path to the real compiler is found and recorded in the ProgramDb +-- during the configure phase. +haskellSuiteProgram :: Program +haskellSuiteProgram = (simpleProgram "haskell-suite") { + -- pretend that the program exists, otherwise it won't be in the + -- "configured" state + programFindLocation = \_verbosity _searchPath -> + return $ Just ("haskell-suite-dummy-location", []) + } + +-- This represent a haskell-suite package manager. See the comments for +-- haskellSuiteProgram. +haskellSuitePkgProgram :: Program +haskellSuitePkgProgram = (simpleProgram "haskell-suite-pkg") { + programFindLocation = \_verbosity _searchPath -> + return $ Just ("haskell-suite-pkg-dummy-location", []) + } + + +happyProgram :: Program +happyProgram = (simpleProgram "happy") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "happy --version" gives a string like + -- "Happy Version 1.16 Copyright (c) ...." + case words str of + (_:_:ver:_) -> ver + _ -> "" + } + +alexProgram :: Program +alexProgram = (simpleProgram "alex") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "alex --version" gives a string like + -- "Alex version 2.1.0, (c) 2003 Chris Dornan and Simon Marlow" + case words str of + (_:_:ver:_) -> takeWhile (\x -> isDigit x || x == '.') ver + _ -> "" + } + +gccProgram :: Program +gccProgram = (simpleProgram "gcc") { + programFindVersion = findProgramVersion "-dumpversion" id + } + +arProgram :: Program +arProgram = simpleProgram "ar" + +stripProgram :: Program +stripProgram = (simpleProgram "strip") { + programFindVersion = \verbosity -> + findProgramVersion "--version" stripExtractVersion (lessVerbose verbosity) + } + +hsc2hsProgram :: Program +hsc2hsProgram = (simpleProgram "hsc2hs") { + programFindVersion = + findProgramVersion "--version" $ \str -> + -- Invoking "hsc2hs --version" gives a string like "hsc2hs version 0.66" + case words str of + (_:_:ver:_) -> ver + _ -> "" + } + +c2hsProgram :: Program +c2hsProgram = (simpleProgram "c2hs") { + programFindVersion = findProgramVersion "--numeric-version" id + } + +cpphsProgram :: Program +cpphsProgram = (simpleProgram "cpphs") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "cpphs --version" gives a string like "cpphs 1.3" + case words str of + (_:ver:_) -> ver + _ -> "" + } + +hscolourProgram :: Program +hscolourProgram = (simpleProgram "hscolour") { + programFindLocation = \v p -> findProgramOnSearchPath v p "HsColour", + programFindVersion = findProgramVersion "-version" $ \str -> + -- Invoking "HsColour -version" gives a string like "HsColour 1.7" + case words str of + (_:ver:_) -> ver + _ -> "" + } + +-- TODO: Ensure that doctest is built against the same GHC as the one +-- that's being used. Same for haddock. @phadej pointed this out. +doctestProgram :: Program +doctestProgram = (simpleProgram "doctest") { + programFindLocation = \v p -> findProgramOnSearchPath v p "doctest" + , programFindVersion = findProgramVersion "--version" $ \str -> + -- "doctest version 0.11.2" + case words str of + (_:_:ver:_) -> ver + _ -> "" + } + +haddockProgram :: Program +haddockProgram = (simpleProgram "haddock") { + programFindVersion = findProgramVersion "--version" $ \str -> + -- Invoking "haddock --version" gives a string like + -- "Haddock version 0.8, (c) Simon Marlow 2006" + case words str of + (_:_:ver:_) -> takeWhile (`elem` ('.':['0'..'9'])) ver + _ -> "", + + programNormaliseArgs = \_ _ args -> args + } + +greencardProgram :: Program +greencardProgram = simpleProgram "greencard" + +ldProgram :: Program +ldProgram = simpleProgram "ld" + +tarProgram :: Program +tarProgram = (simpleProgram "tar") { + -- See #1901. Some versions of 'tar' (OpenBSD, NetBSD, ...) don't support the + -- '--format' option. + programPostConf = \verbosity tarProg -> do + tarHelpOutput <- getProgramInvocationOutput + verbosity (programInvocation tarProg ["--help"]) + -- Some versions of tar don't support '--help'. + `catchIO` (\_ -> return "") + let k = "Supports --format" + v = if ("--format" `isInfixOf` tarHelpOutput) then "YES" else "NO" + m = Map.insert k v (programProperties tarProg) + return $ tarProg { programProperties = m } + } + +cppProgram :: Program +cppProgram = simpleProgram "cpp" + +pkgConfigProgram :: Program +pkgConfigProgram = (simpleProgram "pkg-config") { + programFindVersion = findProgramVersion "--version" id + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Db.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Db.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Db.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Db.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,485 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE DeriveDataTypeable #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Db +-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This provides a 'ProgramDb' type which holds configured and not-yet +-- configured programs. It is the parameter to lots of actions elsewhere in +-- Cabal that need to look up and run programs. If we had a Cabal monad, +-- the 'ProgramDb' would probably be a reader or state component of it. +-- +-- One nice thing about using it is that any program that is +-- registered with Cabal will get some \"configure\" and \".cabal\" +-- helpers like --with-foo-args --foo-path= and extra-foo-args. +-- +-- There's also a hook for adding programs in a Setup.lhs script. See +-- hookedPrograms in 'Distribution.Simple.UserHooks'. This gives a +-- hook user the ability to get the above flags and such so that they +-- don't have to write all the PATH logic inside Setup.lhs. + +module Distribution.Simple.Program.Db ( + -- * The collection of configured programs we can run + ProgramDb, + emptyProgramDb, + defaultProgramDb, + restoreProgramDb, + + -- ** Query and manipulate the program db + addKnownProgram, + addKnownPrograms, + lookupKnownProgram, + knownPrograms, + getProgramSearchPath, + setProgramSearchPath, + modifyProgramSearchPath, + userSpecifyPath, + userSpecifyPaths, + userMaybeSpecifyPath, + userSpecifyArgs, + userSpecifyArgss, + userSpecifiedArgs, + lookupProgram, + updateProgram, + configuredPrograms, + + -- ** Query and manipulate the program db + configureProgram, + configureAllKnownPrograms, + unconfigureProgram, + lookupProgramVersion, + reconfigurePrograms, + requireProgram, + requireProgramVersion, + + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Find +import Distribution.Simple.Program.Builtin +import Distribution.Simple.Utils +import Distribution.Version +import Distribution.Text +import Distribution.Verbosity + +import Control.Monad (join) +import Data.Tuple (swap) +import qualified Data.Map as Map + +-- ------------------------------------------------------------ +-- * Programs database +-- ------------------------------------------------------------ + +-- | The configuration is a collection of information about programs. It +-- contains information both about configured programs and also about programs +-- that we are yet to configure. +-- +-- The idea is that we start from a collection of unconfigured programs and one +-- by one we try to configure them at which point we move them into the +-- configured collection. For unconfigured programs we record not just the +-- 'Program' but also any user-provided arguments and location for the program. +data ProgramDb = ProgramDb { + unconfiguredProgs :: UnconfiguredProgs, + progSearchPath :: ProgramSearchPath, + configuredProgs :: ConfiguredProgs + } + deriving (Typeable) + +type UnconfiguredProgram = (Program, Maybe FilePath, [ProgArg]) +type UnconfiguredProgs = Map.Map String UnconfiguredProgram +type ConfiguredProgs = Map.Map String ConfiguredProgram + + +emptyProgramDb :: ProgramDb +emptyProgramDb = ProgramDb Map.empty defaultProgramSearchPath Map.empty + +defaultProgramDb :: ProgramDb +defaultProgramDb = restoreProgramDb builtinPrograms emptyProgramDb + + +-- internal helpers: +updateUnconfiguredProgs :: (UnconfiguredProgs -> UnconfiguredProgs) + -> ProgramDb -> ProgramDb +updateUnconfiguredProgs update progdb = + progdb { unconfiguredProgs = update (unconfiguredProgs progdb) } + +updateConfiguredProgs :: (ConfiguredProgs -> ConfiguredProgs) + -> ProgramDb -> ProgramDb +updateConfiguredProgs update progdb = + progdb { configuredProgs = update (configuredProgs progdb) } + + +-- Read & Show instances are based on listToFM + +-- | Note that this instance does not preserve the known 'Program's. +-- See 'restoreProgramDb' for details. +-- +instance Show ProgramDb where + show = show . Map.toAscList . configuredProgs + +-- | Note that this instance does not preserve the known 'Program's. +-- See 'restoreProgramDb' for details. +-- +instance Read ProgramDb where + readsPrec p s = + [ (emptyProgramDb { configuredProgs = Map.fromList s' }, r) + | (s', r) <- readsPrec p s ] + +-- | Note that this instance does not preserve the known 'Program's. +-- See 'restoreProgramDb' for details. +-- +instance Binary ProgramDb where + put db = do + put (progSearchPath db) + put (configuredProgs db) + + get = do + searchpath <- get + progs <- get + return $! emptyProgramDb { + progSearchPath = searchpath, + configuredProgs = progs + } + + +-- | The 'Read'\/'Show' and 'Binary' instances do not preserve all the +-- unconfigured 'Programs' because 'Program' is not in 'Read'\/'Show' because +-- it contains functions. So to fully restore a deserialised 'ProgramDb' use +-- this function to add back all the known 'Program's. +-- +-- * It does not add the default programs, but you probably want them, use +-- 'builtinPrograms' in addition to any extra you might need. +-- +restoreProgramDb :: [Program] -> ProgramDb -> ProgramDb +restoreProgramDb = addKnownPrograms + + +-- ------------------------------- +-- Managing unconfigured programs + +-- | Add a known program that we may configure later +-- +addKnownProgram :: Program -> ProgramDb -> ProgramDb +addKnownProgram prog = updateUnconfiguredProgs $ + Map.insertWith combine (programName prog) (prog, Nothing, []) + where combine _ (_, path, args) = (prog, path, args) + + +addKnownPrograms :: [Program] -> ProgramDb -> ProgramDb +addKnownPrograms progs progdb = foldl' (flip addKnownProgram) progdb progs + + +lookupKnownProgram :: String -> ProgramDb -> Maybe Program +lookupKnownProgram name = + fmap (\(p,_,_)->p) . Map.lookup name . unconfiguredProgs + + +knownPrograms :: ProgramDb -> [(Program, Maybe ConfiguredProgram)] +knownPrograms progdb = + [ (p,p') | (p,_,_) <- Map.elems (unconfiguredProgs progdb) + , let p' = Map.lookup (programName p) (configuredProgs progdb) ] + +-- | Get the current 'ProgramSearchPath' used by the 'ProgramDb'. +-- This is the default list of locations where programs are looked for when +-- configuring them. This can be overridden for specific programs (with +-- 'userSpecifyPath'), and specific known programs can modify or ignore this +-- search path in their own configuration code. +-- +getProgramSearchPath :: ProgramDb -> ProgramSearchPath +getProgramSearchPath = progSearchPath + +-- | Change the current 'ProgramSearchPath' used by the 'ProgramDb'. +-- This will affect programs that are configured from here on, so you +-- should usually set it before configuring any programs. +-- +setProgramSearchPath :: ProgramSearchPath -> ProgramDb -> ProgramDb +setProgramSearchPath searchpath db = db { progSearchPath = searchpath } + +-- | Modify the current 'ProgramSearchPath' used by the 'ProgramDb'. +-- This will affect programs that are configured from here on, so you +-- should usually modify it before configuring any programs. +-- +modifyProgramSearchPath :: (ProgramSearchPath -> ProgramSearchPath) + -> ProgramDb + -> ProgramDb +modifyProgramSearchPath f db = + setProgramSearchPath (f $ getProgramSearchPath db) db + +-- |User-specify this path. Basically override any path information +-- for this program in the configuration. If it's not a known +-- program ignore it. +-- +userSpecifyPath :: String -- ^Program name + -> FilePath -- ^user-specified path to the program + -> ProgramDb -> ProgramDb +userSpecifyPath name path = updateUnconfiguredProgs $ + flip Map.update name $ \(prog, _, args) -> Just (prog, Just path, args) + + +userMaybeSpecifyPath :: String -> Maybe FilePath + -> ProgramDb -> ProgramDb +userMaybeSpecifyPath _ Nothing progdb = progdb +userMaybeSpecifyPath name (Just path) progdb = userSpecifyPath name path progdb + + +-- |User-specify the arguments for this program. Basically override +-- any args information for this program in the configuration. If it's +-- not a known program, ignore it.. +userSpecifyArgs :: String -- ^Program name + -> [ProgArg] -- ^user-specified args + -> ProgramDb + -> ProgramDb +userSpecifyArgs name args' = + updateUnconfiguredProgs + (flip Map.update name $ + \(prog, path, args) -> Just (prog, path, args ++ args')) + . updateConfiguredProgs + (flip Map.update name $ + \prog -> Just prog { programOverrideArgs = programOverrideArgs prog + ++ args' }) + + +-- | Like 'userSpecifyPath' but for a list of progs and their paths. +-- +userSpecifyPaths :: [(String, FilePath)] + -> ProgramDb + -> ProgramDb +userSpecifyPaths paths progdb = + foldl' (\progdb' (prog, path) -> userSpecifyPath prog path progdb') progdb paths + + +-- | Like 'userSpecifyPath' but for a list of progs and their args. +-- +userSpecifyArgss :: [(String, [ProgArg])] + -> ProgramDb + -> ProgramDb +userSpecifyArgss argss progdb = + foldl' (\progdb' (prog, args) -> userSpecifyArgs prog args progdb') progdb argss + + +-- | Get the path that has been previously specified for a program, if any. +-- +userSpecifiedPath :: Program -> ProgramDb -> Maybe FilePath +userSpecifiedPath prog = + join . fmap (\(_,p,_)->p) . Map.lookup (programName prog) . unconfiguredProgs + + +-- | Get any extra args that have been previously specified for a program. +-- +userSpecifiedArgs :: Program -> ProgramDb -> [ProgArg] +userSpecifiedArgs prog = + maybe [] (\(_,_,as)->as) . Map.lookup (programName prog) . unconfiguredProgs + + +-- ----------------------------- +-- Managing configured programs + +-- | Try to find a configured program +lookupProgram :: Program -> ProgramDb -> Maybe ConfiguredProgram +lookupProgram prog = Map.lookup (programName prog) . configuredProgs + + +-- | Update a configured program in the database. +updateProgram :: ConfiguredProgram -> ProgramDb + -> ProgramDb +updateProgram prog = updateConfiguredProgs $ + Map.insert (programId prog) prog + + +-- | List all configured programs. +configuredPrograms :: ProgramDb -> [ConfiguredProgram] +configuredPrograms = Map.elems . configuredProgs + +-- --------------------------- +-- Configuring known programs + +-- | Try to configure a specific program. If the program is already included in +-- the collection of unconfigured programs then we use any user-supplied +-- location and arguments. If the program gets configured successfully it gets +-- added to the configured collection. +-- +-- Note that it is not a failure if the program cannot be configured. It's only +-- a failure if the user supplied a location and the program could not be found +-- at that location. +-- +-- The reason for it not being a failure at this stage is that we don't know up +-- front all the programs we will need, so we try to configure them all. +-- To verify that a program was actually successfully configured use +-- 'requireProgram'. +-- +configureProgram :: Verbosity + -> Program + -> ProgramDb + -> IO ProgramDb +configureProgram verbosity prog progdb = do + let name = programName prog + maybeLocation <- case userSpecifiedPath prog progdb of + Nothing -> + programFindLocation prog verbosity (progSearchPath progdb) + >>= return . fmap (swap . fmap FoundOnSystem . swap) + Just path -> do + absolute <- doesExecutableExist path + if absolute + then return (Just (UserSpecified path, [])) + else findProgramOnSearchPath verbosity (progSearchPath progdb) path + >>= maybe (die' verbosity notFound) + (return . Just . swap . fmap UserSpecified . swap) + where notFound = "Cannot find the program '" ++ name + ++ "'. User-specified path '" + ++ path ++ "' does not refer to an executable and " + ++ "the program is not on the system path." + case maybeLocation of + Nothing -> return progdb + Just (location, triedLocations) -> do + version <- programFindVersion prog verbosity (locationPath location) + newPath <- programSearchPathAsPATHVar (progSearchPath progdb) + let configuredProg = ConfiguredProgram { + programId = name, + programVersion = version, + programDefaultArgs = [], + programOverrideArgs = userSpecifiedArgs prog progdb, + programOverrideEnv = [("PATH", Just newPath)], + programProperties = Map.empty, + programLocation = location, + programMonitorFiles = triedLocations + } + configuredProg' <- programPostConf prog verbosity configuredProg + return (updateConfiguredProgs (Map.insert name configuredProg') progdb) + + +-- | Configure a bunch of programs using 'configureProgram'. Just a 'foldM'. +-- +configurePrograms :: Verbosity + -> [Program] + -> ProgramDb + -> IO ProgramDb +configurePrograms verbosity progs progdb = + foldM (flip (configureProgram verbosity)) progdb progs + + +-- | Unconfigure a program. This is basically a hack and you shouldn't +-- use it, but it can be handy for making sure a 'requireProgram' +-- actually reconfigures. +unconfigureProgram :: String -> ProgramDb -> ProgramDb +unconfigureProgram progname = + updateConfiguredProgs $ Map.delete progname + +-- | Try to configure all the known programs that have not yet been configured. +-- +configureAllKnownPrograms :: Verbosity + -> ProgramDb + -> IO ProgramDb +configureAllKnownPrograms verbosity progdb = + configurePrograms verbosity + [ prog | (prog,_,_) <- Map.elems notYetConfigured ] progdb + where + notYetConfigured = unconfiguredProgs progdb + `Map.difference` configuredProgs progdb + + +-- | reconfigure a bunch of programs given new user-specified args. It takes +-- the same inputs as 'userSpecifyPath' and 'userSpecifyArgs' and for all progs +-- with a new path it calls 'configureProgram'. +-- +reconfigurePrograms :: Verbosity + -> [(String, FilePath)] + -> [(String, [ProgArg])] + -> ProgramDb + -> IO ProgramDb +reconfigurePrograms verbosity paths argss progdb = do + configurePrograms verbosity progs + . userSpecifyPaths paths + . userSpecifyArgss argss + $ progdb + + where + progs = catMaybes [ lookupKnownProgram name progdb | (name,_) <- paths ] + + +-- | Check that a program is configured and available to be run. +-- +-- It raises an exception if the program could not be configured, otherwise +-- it returns the configured program. +-- +requireProgram :: Verbosity -> Program -> ProgramDb + -> IO (ConfiguredProgram, ProgramDb) +requireProgram verbosity prog progdb = do + + -- If it's not already been configured, try to configure it now + progdb' <- case lookupProgram prog progdb of + Nothing -> configureProgram verbosity prog progdb + Just _ -> return progdb + + case lookupProgram prog progdb' of + Nothing -> die' verbosity notFound + Just configuredProg -> return (configuredProg, progdb') + + where notFound = "The program '" ++ programName prog + ++ "' is required but it could not be found." + + +-- | Check that a program is configured and available to be run. +-- +-- Additionally check that the program version number is suitable and return +-- it. For example you could require 'AnyVersion' or @'orLaterVersion' +-- ('Version' [1,0] [])@ +-- +-- It returns the configured program, its version number and a possibly updated +-- 'ProgramDb'. If the program could not be configured or the version is +-- unsuitable, it returns an error value. +-- +lookupProgramVersion + :: Verbosity -> Program -> VersionRange -> ProgramDb + -> IO (Either String (ConfiguredProgram, Version, ProgramDb)) +lookupProgramVersion verbosity prog range programDb = do + + -- If it's not already been configured, try to configure it now + programDb' <- case lookupProgram prog programDb of + Nothing -> configureProgram verbosity prog programDb + Just _ -> return programDb + + case lookupProgram prog programDb' of + Nothing -> return $! Left notFound + Just configuredProg@ConfiguredProgram { programLocation = location } -> + case programVersion configuredProg of + Just version + | withinRange version range -> + return $! Right (configuredProg, version ,programDb') + | otherwise -> + return $! Left (badVersion version location) + Nothing -> + return $! Left (unknownVersion location) + + where notFound = "The program '" + ++ programName prog ++ "'" ++ versionRequirement + ++ " is required but it could not be found." + badVersion v l = "The program '" + ++ programName prog ++ "'" ++ versionRequirement + ++ " is required but the version found at " + ++ locationPath l ++ " is version " ++ display v + unknownVersion l = "The program '" + ++ programName prog ++ "'" ++ versionRequirement + ++ " is required but the version of " + ++ locationPath l ++ " could not be determined." + versionRequirement + | isAnyVersion range = "" + | otherwise = " version " ++ display range + +-- | Like 'lookupProgramVersion', but raises an exception in case of error +-- instead of returning 'Left errMsg'. +-- +requireProgramVersion :: Verbosity -> Program -> VersionRange + -> ProgramDb + -> IO (ConfiguredProgram, Version, ProgramDb) +requireProgramVersion verbosity prog range programDb = + join $ either (die' verbosity) return `fmap` + lookupProgramVersion verbosity prog range programDb diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Find.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Find.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Find.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Find.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,187 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Find +-- Copyright : Duncan Coutts 2013 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A somewhat extended notion of the normal program search path concept. +-- +-- Usually when finding executables we just want to look in the usual places +-- using the OS's usual method for doing so. In Haskell the normal OS-specific +-- method is captured by 'findExecutable'. On all common OSs that makes use of +-- a @PATH@ environment variable, (though on Windows it is not just the @PATH@). +-- +-- However it is sometimes useful to be able to look in additional locations +-- without having to change the process-global @PATH@ environment variable. +-- So we need an extension of the usual 'findExecutable' that can look in +-- additional locations, either before, after or instead of the normal OS +-- locations. +-- +module Distribution.Simple.Program.Find ( + -- * Program search path + ProgramSearchPath, + ProgramSearchPathEntry(..), + defaultProgramSearchPath, + findProgramOnSearchPath, + programSearchPathAsPATHVar, + getSystemSearchPath, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Verbosity +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Compat.Environment + +import qualified System.Directory as Directory + ( findExecutable ) +import System.FilePath as FilePath + ( (), (<.>), splitSearchPath, searchPathSeparator, getSearchPath + , takeDirectory ) +#if defined(mingw32_HOST_OS) +import qualified System.Win32 as Win32 +#endif + +-- | A search path to use when locating executables. This is analogous +-- to the unix @$PATH@ or win32 @%PATH%@ but with the ability to use +-- the system default method for finding executables ('findExecutable' which +-- on unix is simply looking on the @$PATH@ but on win32 is a bit more +-- complicated). +-- +-- The default to use is @[ProgSearchPathDefault]@ but you can add extra dirs +-- either before, after or instead of the default, e.g. here we add an extra +-- dir to search after the usual ones. +-- +-- > ['ProgramSearchPathDefault', 'ProgramSearchPathDir' dir] +-- +type ProgramSearchPath = [ProgramSearchPathEntry] +data ProgramSearchPathEntry = + ProgramSearchPathDir FilePath -- ^ A specific dir + | ProgramSearchPathDefault -- ^ The system default + deriving (Eq, Generic) + +instance Binary ProgramSearchPathEntry + +defaultProgramSearchPath :: ProgramSearchPath +defaultProgramSearchPath = [ProgramSearchPathDefault] + +findProgramOnSearchPath :: Verbosity -> ProgramSearchPath + -> FilePath -> IO (Maybe (FilePath, [FilePath])) +findProgramOnSearchPath verbosity searchpath prog = do + debug verbosity $ "Searching for " ++ prog ++ " in path." + res <- tryPathElems [] searchpath + case res of + Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path") + Just (path, _) -> debug verbosity ("Found " ++ prog ++ " at "++ path) + return res + where + tryPathElems :: [[FilePath]] -> [ProgramSearchPathEntry] + -> IO (Maybe (FilePath, [FilePath])) + tryPathElems _ [] = return Nothing + tryPathElems tried (pe:pes) = do + res <- tryPathElem pe + case res of + (Nothing, notfoundat) -> tryPathElems (notfoundat : tried) pes + (Just foundat, notfoundat) -> return (Just (foundat, alltried)) + where + alltried = concat (reverse (notfoundat : tried)) + + tryPathElem :: ProgramSearchPathEntry -> NoCallStackIO (Maybe FilePath, [FilePath]) + tryPathElem (ProgramSearchPathDir dir) = + findFirstExe [ dir prog <.> ext | ext <- exeExtensions ] + + -- On windows, getSystemSearchPath is not guaranteed 100% correct so we + -- use findExecutable and then approximate the not-found-at locations. + tryPathElem ProgramSearchPathDefault | buildOS == Windows = do + mExe <- findExecutable prog + syspath <- getSystemSearchPath + case mExe of + Nothing -> + let notfoundat = [ dir prog | dir <- syspath ] in + return (Nothing, notfoundat) + + Just foundat -> do + let founddir = takeDirectory foundat + notfoundat = [ dir prog + | dir <- takeWhile (/= founddir) syspath ] + return (Just foundat, notfoundat) + + -- On other OSs we can just do the simple thing + tryPathElem ProgramSearchPathDefault = do + dirs <- getSystemSearchPath + findFirstExe [ dir prog <.> ext | dir <- dirs, ext <- exeExtensions ] + + findFirstExe :: [FilePath] -> NoCallStackIO (Maybe FilePath, [FilePath]) + findFirstExe = go [] + where + go fs' [] = return (Nothing, reverse fs') + go fs' (f:fs) = do + isExe <- doesExecutableExist f + if isExe + then return (Just f, reverse fs') + else go (f:fs') fs + +-- | Interpret a 'ProgramSearchPath' to construct a new @$PATH@ env var. +-- Note that this is close but not perfect because on Windows the search +-- algorithm looks at more than just the @%PATH%@. +programSearchPathAsPATHVar :: ProgramSearchPath -> NoCallStackIO String +programSearchPathAsPATHVar searchpath = do + ess <- traverse getEntries searchpath + return (intercalate [searchPathSeparator] (concat ess)) + where + getEntries (ProgramSearchPathDir dir) = return [dir] + getEntries ProgramSearchPathDefault = do + env <- getEnvironment + return (maybe [] splitSearchPath (lookup "PATH" env)) + +-- | Get the system search path. On Unix systems this is just the @$PATH@ env +-- var, but on windows it's a bit more complicated. +-- +getSystemSearchPath :: NoCallStackIO [FilePath] +getSystemSearchPath = fmap nub $ do +#if defined(mingw32_HOST_OS) + processdir <- takeDirectory `fmap` Win32.getModuleFileName Win32.nullHANDLE + currentdir <- Win32.getCurrentDirectory + systemdir <- Win32.getSystemDirectory + windowsdir <- Win32.getWindowsDirectory + pathdirs <- FilePath.getSearchPath + let path = processdir : currentdir + : systemdir : windowsdir + : pathdirs + return path +#else + FilePath.getSearchPath +#endif + +#ifdef MIN_VERSION_directory +#if MIN_VERSION_directory(1,2,1) +#define HAVE_directory_121 +#endif +#endif + +findExecutable :: FilePath -> NoCallStackIO (Maybe FilePath) +#ifdef HAVE_directory_121 +findExecutable = Directory.findExecutable +#else +findExecutable prog = do + -- With directory < 1.2.1 'findExecutable' doesn't check that the path + -- really refers to an executable. + mExe <- Directory.findExecutable prog + case mExe of + Just exe -> do + exeExists <- doesExecutableExist exe + if exeExists + then return mExe + else return Nothing + _ -> return mExe +#endif + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/GHC.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/GHC.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/GHC.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/GHC.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,828 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module Distribution.Simple.Program.GHC ( + GhcOptions(..), + GhcMode(..), + GhcOptimisation(..), + GhcDynLinkMode(..), + GhcProfAuto(..), + + ghcInvocation, + renderGhcOptions, + + runGHC, + + packageDbArgsDb, + normaliseGhcArgs + + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Backpack +import Distribution.Simple.GHC.ImplInfo +import Distribution.PackageDescription hiding (Flag) +import Distribution.ModuleName +import Distribution.Simple.Compiler hiding (Flag) +import qualified Distribution.Simple.Compiler as Compiler (Flag) +import Distribution.Simple.Flag +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Run +import Distribution.System +import Distribution.Text +import Distribution.Types.ComponentId +import Distribution.Verbosity +import Distribution.Version +import Distribution.Utils.NubList +import Language.Haskell.Extension + +import Data.List (stripPrefix) +import qualified Data.Map as Map +import Data.Monoid (All(..), Any(..), Endo(..), First(..), Last(..)) +import Data.Set (Set) +import qualified Data.Set as Set + +normaliseGhcArgs :: Maybe Version -> PackageDescription -> [String] -> [String] +normaliseGhcArgs (Just ghcVersion) PackageDescription{..} ghcArgs + | ghcVersion `withinRange` supportedGHCVersions + = argumentFilters . filter simpleFilters . filterRtsOpts $ ghcArgs + where + supportedGHCVersions :: VersionRange + supportedGHCVersions = intersectVersionRanges + (orLaterVersion (mkVersion [8,0])) + (earlierVersion (mkVersion [8,7])) + + from :: Monoid m => [Int] -> m -> m + from version flags + | ghcVersion `withinRange` orLaterVersion (mkVersion version) = flags + | otherwise = mempty + + to :: Monoid m => [Int] -> m -> m + to version flags + | ghcVersion `withinRange` earlierVersion (mkVersion version) = flags + | otherwise = mempty + + checkGhcFlags :: forall m . Monoid m => ([String] -> m) -> m + checkGhcFlags fun = mconcat + [ fun ghcArgs + , checkComponentFlags libBuildInfo pkgLibs + , checkComponentFlags buildInfo executables + , checkComponentFlags testBuildInfo testSuites + , checkComponentFlags benchmarkBuildInfo benchmarks + ] + where + pkgLibs = maybeToList library ++ subLibraries + + checkComponentFlags :: (a -> BuildInfo) -> [a] -> m + checkComponentFlags getInfo = foldMap (checkComponent . getInfo) + where + checkComponent :: BuildInfo -> m + checkComponent = foldMap fun . filterGhcOptions . allGhcOptions + + allGhcOptions :: BuildInfo -> [(CompilerFlavor, [String])] + allGhcOptions = + mconcat [options, profOptions, sharedOptions, staticOptions] + + filterGhcOptions :: [(CompilerFlavor, [String])] -> [[String]] + filterGhcOptions l = [opts | (GHC, opts) <- l] + + safeToFilterWarnings :: Bool + safeToFilterWarnings = getAll $ checkGhcFlags checkWarnings + where + checkWarnings :: [String] -> All + checkWarnings = All . Set.null . foldr alter Set.empty + + alter :: String -> Set String -> Set String + alter flag = appEndo $ mconcat + [ \s -> Endo $ if s == "-Werror" then Set.insert s else id + , \s -> Endo $ if s == "-Wwarn" then const Set.empty else id + , \s -> from [8,6] . Endo $ + if s == "-Werror=compat" + then Set.union compatWarningSet else id + , \s -> from [8,6] . Endo $ + if s == "-Wno-error=compat" + then (`Set.difference` compatWarningSet) else id + , \s -> from [8,6] . Endo $ + if s == "-Wwarn=compat" + then (`Set.difference` compatWarningSet) else id + , from [8,4] $ markFlag "-Werror=" Set.insert + , from [8,4] $ markFlag "-Wwarn=" Set.delete + , from [8,4] $ markFlag "-Wno-error=" Set.delete + ] flag + + markFlag + :: String + -> (String -> Set String -> Set String) + -> String + -> Endo (Set String) + markFlag name update flag = Endo $ case stripPrefix name flag of + Just rest | not (null rest) && rest /= "compat" -> update rest + _ -> id + + flagArgumentFilter :: [String] -> [String] -> [String] + flagArgumentFilter flags = go + where + makeFilter :: String -> String -> First ([String] -> [String]) + makeFilter flag arg = First $ filterRest <$> stripPrefix flag arg + where + filterRest leftOver = case dropEq leftOver of + [] -> drop 1 + _ -> id + + checkFilter :: String -> Maybe ([String] -> [String]) + checkFilter = getFirst . mconcat (map makeFilter flags) + + go :: [String] -> [String] + go [] = [] + go (arg:args) = case checkFilter arg of + Just f -> go (f args) + Nothing -> arg : go args + + argumentFilters :: [String] -> [String] + argumentFilters = flagArgumentFilter + ["-ghci-script", "-H", "-interactive-print"] + + filterRtsOpts :: [String] -> [String] + filterRtsOpts = go False + where + go :: Bool -> [String] -> [String] + go _ [] = [] + go _ ("+RTS":opts) = go True opts + go _ ("-RTS":opts) = go False opts + go isRTSopts (opt:opts) = addOpt $ go isRTSopts opts + where + addOpt | isRTSopts = id + | otherwise = (opt:) + + simpleFilters :: String -> Bool + simpleFilters = not . getAny . mconcat + [ flagIn simpleFlags + , Any . isPrefixOf "-ddump-" + , Any . isPrefixOf "-dsuppress-" + , Any . isPrefixOf "-dno-suppress-" + , flagIn $ invertibleFlagSet "-" ["ignore-dot-ghci"] + , flagIn . invertibleFlagSet "-f" . mconcat $ + [ [ "reverse-errors", "warn-unused-binds", "break-on-error" + , "break-on-exception", "print-bind-result" + , "print-bind-contents", "print-evld-with-show" + , "implicit-import-qualified", "error-spans" + ] + , from [8,2] + [ "diagnostics-show-caret", "local-ghci-history" + , "show-warning-groups", "hide-source-paths" + , "show-hole-constraints" + ] + , from [8,4] ["show-loaded-modules"] + , from [8,6] [ "ghci-leak-check", "no-it" ] + ] + , flagIn . invertibleFlagSet "-d" $ [ "ppr-case-as-let", "ppr-ticks" ] + , isOptIntFlag + , isIntFlag + , if safeToFilterWarnings + then isWarning <> (Any . ("-w"==)) + else mempty + , from [8,6] $ + if safeToFilterHoles + then isTypedHoleFlag + else mempty + ] + + flagIn :: Set String -> String -> Any + flagIn set flag = Any $ Set.member flag set + + isWarning :: String -> Any + isWarning = mconcat $ map ((Any .) . isPrefixOf) + ["-fwarn-", "-fno-warn-", "-W", "-Wno-"] + + simpleFlags :: Set String + simpleFlags = Set.fromList . mconcat $ + [ [ "-n", "-#include", "-Rghc-timing", "-dsuppress-all", "-dstg-stats" + , "-dth-dec-file", "-dsource-stats", "-dverbose-core2core" + , "-dverbose-stg2stg", "-dcore-lint", "-dstg-lint", "-dcmm-lint" + , "-dasm-lint", "-dannot-lint", "-dshow-passes", "-dfaststring-stats" + , "-fno-max-relevant-binds", "-recomp", "-no-recomp", "-fforce-recomp" + , "-fno-force-recomp" + ] + + , from [8,2] + [ "-fno-max-errors", "-fdiagnostics-color=auto" + , "-fdiagnostics-color=always", "-fdiagnostics-color=never" + , "-dppr-debug", "-dno-debug-output" + ] + + , from [8,4] [ "-ddebug-output" ] + , from [8,4] $ to [8,6] [ "-fno-max-valid-substitutions" ] + , from [8,6] [ "-dhex-word-literals" ] + ] + + isOptIntFlag :: String -> Any + isOptIntFlag = mconcat . map (dropIntFlag True) $ ["-v", "-j"] + + isIntFlag :: String -> Any + isIntFlag = mconcat . map (dropIntFlag False) . mconcat $ + [ [ "-fmax-relevant-binds", "-ddpr-user-length", "-ddpr-cols" + , "-dtrace-level", "-fghci-hist-size" ] + , from [8,2] ["-fmax-uncovered-patterns", "-fmax-errors"] + , from [8,4] $ to [8,6] ["-fmax-valid-substitutions"] + ] + + dropIntFlag :: Bool -> String -> String -> Any + dropIntFlag isOpt flag input = Any $ case stripPrefix flag input of + Nothing -> False + Just rest | isOpt && null rest -> True + | otherwise -> case parseInt rest of + Just _ -> True + Nothing -> False + where + parseInt :: String -> Maybe Int + parseInt = readMaybe . dropEq + + readMaybe :: Read a => String -> Maybe a + readMaybe s = case reads s of + [(x, "")] -> Just x + _ -> Nothing + + dropEq :: String -> String + dropEq ('=':s) = s + dropEq s = s + + invertibleFlagSet :: String -> [String] -> Set String + invertibleFlagSet prefix flagNames = + Set.fromList $ (++) <$> [prefix, prefix ++ "no-"] <*> flagNames + + compatWarningSet :: Set String + compatWarningSet = Set.fromList $ mconcat + [ from [8,6] + [ "missing-monadfail-instances", "semigroup" + , "noncanonical-monoid-instances", "implicit-kind-vars" ] + ] + + safeToFilterHoles :: Bool + safeToFilterHoles = getAll . checkGhcFlags $ fromLast . foldMap notDeferred + where + fromLast :: Last All -> All + fromLast = fromMaybe (All True) . getLast + + notDeferred :: String -> Last All + notDeferred "-fdefer-typed-holes" = Last . Just . All $ False + notDeferred "-fno-defer-typed-holes" = Last . Just . All $ True + notDeferred _ = Last Nothing + + isTypedHoleFlag :: String -> Any + isTypedHoleFlag = mconcat + [ flagIn . invertibleFlagSet "-f" $ + [ "show-hole-constraints", "show-valid-substitutions" + , "show-valid-hole-fits", "sort-valid-hole-fits" + , "sort-by-size-hole-fits", "sort-by-subsumption-hole-fits" + , "abstract-refinement-hole-fits", "show-provenance-of-hole-fits" + , "show-hole-matches-of-hole-fits", "show-type-of-hole-fits" + , "show-type-app-of-hole-fits", "show-type-app-vars-of-hole-fits" + , "unclutter-valid-hole-fits" + ] + , flagIn . Set.fromList $ + [ "-fno-max-valid-hole-fits", "-fno-max-refinement-hole-fits" + , "-fno-refinement-level-hole-fits" ] + , mconcat . map (dropIntFlag False) $ + [ "-fmax-valid-hole-fits", "-fmax-refinement-hole-fits" + , "-frefinement-level-hole-fits" ] + ] + +normaliseGhcArgs _ _ args = args + +-- | A structured set of GHC options/flags +-- +-- Note that options containing lists fall into two categories: +-- +-- * options that can be safely deduplicated, e.g. input modules or +-- enabled extensions; +-- * options that cannot be deduplicated in general without changing +-- semantics, e.g. extra ghc options or linking options. +data GhcOptions = GhcOptions { + + -- | The major mode for the ghc invocation. + ghcOptMode :: Flag GhcMode, + + -- | Any extra options to pass directly to ghc. These go at the end and hence + -- override other stuff. + ghcOptExtra :: [String], + + -- | Extra default flags to pass directly to ghc. These go at the beginning + -- and so can be overridden by other stuff. + ghcOptExtraDefault :: [String], + + ----------------------- + -- Inputs and outputs + + -- | The main input files; could be .hs, .hi, .c, .o, depending on mode. + ghcOptInputFiles :: NubListR FilePath, + + -- | The names of input Haskell modules, mainly for @--make@ mode. + ghcOptInputModules :: NubListR ModuleName, + + -- | Location for output file; the @ghc -o@ flag. + ghcOptOutputFile :: Flag FilePath, + + -- | Location for dynamic output file in 'GhcStaticAndDynamic' mode; + -- the @ghc -dyno@ flag. + ghcOptOutputDynFile :: Flag FilePath, + + -- | Start with an empty search path for Haskell source files; + -- the @ghc -i@ flag (@-i@ on its own with no path argument). + ghcOptSourcePathClear :: Flag Bool, + + -- | Search path for Haskell source files; the @ghc -i@ flag. + ghcOptSourcePath :: NubListR FilePath, + + ------------- + -- Packages + + -- | The unit ID the modules will belong to; the @ghc -this-unit-id@ + -- flag (or @-this-package-key@ or @-package-name@ on older + -- versions of GHC). This is a 'String' because we assume you've + -- already figured out what the correct format for this string is + -- (we need to handle backwards compatibility.) + ghcOptThisUnitId :: Flag String, + + -- | GHC doesn't make any assumptions about the format of + -- definite unit ids, so when we are instantiating a package it + -- needs to be told explicitly what the component being instantiated + -- is. This only gets set when 'ghcOptInstantiatedWith' is non-empty + ghcOptThisComponentId :: Flag ComponentId, + + -- | How the requirements of the package being compiled are to + -- be filled. When typechecking an indefinite package, the 'OpenModule' + -- is always a 'OpenModuleVar'; otherwise, it specifies the installed module + -- that instantiates a package. + ghcOptInstantiatedWith :: [(ModuleName, OpenModule)], + + -- | No code? (But we turn on interface writing + ghcOptNoCode :: Flag Bool, + + -- | GHC package databases to use, the @ghc -package-conf@ flag. + ghcOptPackageDBs :: PackageDBStack, + + -- | The GHC packages to bring into scope when compiling, + -- the @ghc -package-id@ flags. + ghcOptPackages :: + NubListR (OpenUnitId, ModuleRenaming), + + -- | Start with a clean package set; the @ghc -hide-all-packages@ flag + ghcOptHideAllPackages :: Flag Bool, + + -- | Warn about modules, not listed in command line + ghcOptWarnMissingHomeModules :: Flag Bool, + + -- | Don't automatically link in Haskell98 etc; the @ghc + -- -no-auto-link-packages@ flag. + ghcOptNoAutoLinkPackages :: Flag Bool, + + ----------------- + -- Linker stuff + + -- | Names of libraries to link in; the @ghc -l@ flag. + ghcOptLinkLibs :: [FilePath], + + -- | Search path for libraries to link in; the @ghc -L@ flag. + ghcOptLinkLibPath :: NubListR FilePath, + + -- | Options to pass through to the linker; the @ghc -optl@ flag. + ghcOptLinkOptions :: [String], + + -- | OSX only: frameworks to link in; the @ghc -framework@ flag. + ghcOptLinkFrameworks :: NubListR String, + + -- | OSX only: Search path for frameworks to link in; the + -- @ghc -framework-path@ flag. + ghcOptLinkFrameworkDirs :: NubListR String, + + -- | Don't do the link step, useful in make mode; the @ghc -no-link@ flag. + ghcOptNoLink :: Flag Bool, + + -- | Don't link in the normal RTS @main@ entry point; the @ghc -no-hs-main@ + -- flag. + ghcOptLinkNoHsMain :: Flag Bool, + + -- | Module definition files (Windows specific) + ghcOptLinkModDefFiles :: NubListR FilePath, + + -------------------- + -- C and CPP stuff + + -- | Options to pass through to the C compiler; the @ghc -optc@ flag. + ghcOptCcOptions :: [String], + + -- | Options to pass through to the C++ compiler. + ghcOptCxxOptions :: [String], + + -- | Options to pass through to CPP; the @ghc -optP@ flag. + ghcOptCppOptions :: [String], + + -- | Search path for CPP includes like header files; the @ghc -I@ flag. + ghcOptCppIncludePath :: NubListR FilePath, + + -- | Extra header files to include at CPP stage; the @ghc -optP-include@ flag. + ghcOptCppIncludes :: NubListR FilePath, + + -- | Extra header files to include for old-style FFI; the @ghc -#include@ flag. + ghcOptFfiIncludes :: NubListR FilePath, + + ---------------------------- + -- Language and extensions + + -- | The base language; the @ghc -XHaskell98@ or @-XHaskell2010@ flag. + ghcOptLanguage :: Flag Language, + + -- | The language extensions; the @ghc -X@ flag. + ghcOptExtensions :: NubListR Extension, + + -- | A GHC version-dependent mapping of extensions to flags. This must be + -- set to be able to make use of the 'ghcOptExtensions'. + ghcOptExtensionMap :: Map Extension (Maybe Compiler.Flag), + + ---------------- + -- Compilation + + -- | What optimisation level to use; the @ghc -O@ flag. + ghcOptOptimisation :: Flag GhcOptimisation, + + -- | Emit debug info; the @ghc -g@ flag. + ghcOptDebugInfo :: Flag DebugInfoLevel, + + -- | Compile in profiling mode; the @ghc -prof@ flag. + ghcOptProfilingMode :: Flag Bool, + + -- | Automatically add profiling cost centers; the @ghc -fprof-auto*@ flags. + ghcOptProfilingAuto :: Flag GhcProfAuto, + + -- | Use the \"split sections\" feature; the @ghc -split-sections@ flag. + ghcOptSplitSections :: Flag Bool, + + -- | Use the \"split object files\" feature; the @ghc -split-objs@ flag. + ghcOptSplitObjs :: Flag Bool, + + -- | Run N jobs simultaneously (if possible). + ghcOptNumJobs :: Flag (Maybe Int), + + -- | Enable coverage analysis; the @ghc -fhpc -hpcdir@ flags. + ghcOptHPCDir :: Flag FilePath, + + ---------------- + -- GHCi + + -- | Extra GHCi startup scripts; the @-ghci-script@ flag + ghcOptGHCiScripts :: [FilePath], + + ------------------------ + -- Redirecting outputs + + ghcOptHiSuffix :: Flag String, + ghcOptObjSuffix :: Flag String, + ghcOptDynHiSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode + ghcOptDynObjSuffix :: Flag String, -- ^ only in 'GhcStaticAndDynamic' mode + ghcOptHiDir :: Flag FilePath, + ghcOptObjDir :: Flag FilePath, + ghcOptOutputDir :: Flag FilePath, + ghcOptStubDir :: Flag FilePath, + + -------------------- + -- Creating libraries + + ghcOptDynLinkMode :: Flag GhcDynLinkMode, + ghcOptStaticLib :: Flag Bool, + ghcOptShared :: Flag Bool, + ghcOptFPic :: Flag Bool, + ghcOptDylibName :: Flag String, + ghcOptRPaths :: NubListR FilePath, + + --------------- + -- Misc flags + + -- | Get GHC to be quiet or verbose with what it's doing; the @ghc -v@ flag. + ghcOptVerbosity :: Flag Verbosity, + + -- | Put the extra folders in the PATH environment variable we invoke + -- GHC with + ghcOptExtraPath :: NubListR FilePath, + + -- | Let GHC know that it is Cabal that's calling it. + -- Modifies some of the GHC error messages. + ghcOptCabal :: Flag Bool + +} deriving (Show, Generic) + + +data GhcMode = GhcModeCompile -- ^ @ghc -c@ + | GhcModeLink -- ^ @ghc@ + | GhcModeMake -- ^ @ghc --make@ + | GhcModeInteractive -- ^ @ghci@ \/ @ghc --interactive@ + | GhcModeAbiHash -- ^ @ghc --abi-hash@ +-- | GhcModeDepAnalysis -- ^ @ghc -M@ +-- | GhcModeEvaluate -- ^ @ghc -e@ + deriving (Show, Eq) + +data GhcOptimisation = GhcNoOptimisation -- ^ @-O0@ + | GhcNormalOptimisation -- ^ @-O@ + | GhcMaximumOptimisation -- ^ @-O2@ + | GhcSpecialOptimisation String -- ^ e.g. @-Odph@ + deriving (Show, Eq) + +data GhcDynLinkMode = GhcStaticOnly -- ^ @-static@ + | GhcDynamicOnly -- ^ @-dynamic@ + | GhcStaticAndDynamic -- ^ @-static -dynamic-too@ + deriving (Show, Eq) + +data GhcProfAuto = GhcProfAutoAll -- ^ @-fprof-auto@ + | GhcProfAutoToplevel -- ^ @-fprof-auto-top@ + | GhcProfAutoExported -- ^ @-fprof-auto-exported@ + deriving (Show, Eq) + +runGHC :: Verbosity -> ConfiguredProgram -> Compiler -> Platform -> GhcOptions + -> IO () +runGHC verbosity ghcProg comp platform opts = do + runProgramInvocation verbosity (ghcInvocation ghcProg comp platform opts) + + +ghcInvocation :: ConfiguredProgram -> Compiler -> Platform -> GhcOptions + -> ProgramInvocation +ghcInvocation prog comp platform opts = + (programInvocation prog (renderGhcOptions comp platform opts)) { + progInvokePathEnv = fromNubListR (ghcOptExtraPath opts) + } + +renderGhcOptions :: Compiler -> Platform -> GhcOptions -> [String] +renderGhcOptions comp _platform@(Platform _arch os) opts + | compilerFlavor comp `notElem` [GHC, GHCJS] = + error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " + ++ "compiler flavor must be 'GHC' or 'GHCJS'!" + | otherwise = + concat + [ case flagToMaybe (ghcOptMode opts) of + Nothing -> [] + Just GhcModeCompile -> ["-c"] + Just GhcModeLink -> [] + Just GhcModeMake -> ["--make"] + Just GhcModeInteractive -> ["--interactive"] + Just GhcModeAbiHash -> ["--abi-hash"] +-- Just GhcModeDepAnalysis -> ["-M"] +-- Just GhcModeEvaluate -> ["-e", expr] + + , ghcOptExtraDefault opts + + , [ "-no-link" | flagBool ghcOptNoLink ] + + --------------- + -- Misc flags + + , maybe [] verbosityOpts (flagToMaybe (ghcOptVerbosity opts)) + + , [ "-fbuilding-cabal-package" | flagBool ghcOptCabal ] + + ---------------- + -- Compilation + + , case flagToMaybe (ghcOptOptimisation opts) of + Nothing -> [] + Just GhcNoOptimisation -> ["-O0"] + Just GhcNormalOptimisation -> ["-O"] + Just GhcMaximumOptimisation -> ["-O2"] + Just (GhcSpecialOptimisation s) -> ["-O" ++ s] -- eg -Odph + + , case flagToMaybe (ghcOptDebugInfo opts) of + Nothing -> [] + Just NoDebugInfo -> [] + Just MinimalDebugInfo -> ["-g1"] + Just NormalDebugInfo -> ["-g2"] + Just MaximalDebugInfo -> ["-g3"] + + , [ "-prof" | flagBool ghcOptProfilingMode ] + + , case flagToMaybe (ghcOptProfilingAuto opts) of + _ | not (flagBool ghcOptProfilingMode) + -> [] + Nothing -> [] + Just GhcProfAutoAll + | flagProfAuto implInfo -> ["-fprof-auto"] + | otherwise -> ["-auto-all"] -- not the same, but close + Just GhcProfAutoToplevel + | flagProfAuto implInfo -> ["-fprof-auto-top"] + | otherwise -> ["-auto-all"] + Just GhcProfAutoExported + | flagProfAuto implInfo -> ["-fprof-auto-exported"] + | otherwise -> ["-auto"] + + , [ "-split-sections" | flagBool ghcOptSplitSections ] + , [ "-split-objs" | flagBool ghcOptSplitObjs ] + + , case flagToMaybe (ghcOptHPCDir opts) of + Nothing -> [] + Just hpcdir -> ["-fhpc", "-hpcdir", hpcdir] + + , if parmakeSupported comp + then case ghcOptNumJobs opts of + NoFlag -> [] + Flag n -> ["-j" ++ maybe "" show n] + else [] + + -------------------- + -- Creating libraries + + , [ "-staticlib" | flagBool ghcOptStaticLib ] + , [ "-shared" | flagBool ghcOptShared ] + , case flagToMaybe (ghcOptDynLinkMode opts) of + Nothing -> [] + Just GhcStaticOnly -> ["-static"] + Just GhcDynamicOnly -> ["-dynamic"] + Just GhcStaticAndDynamic -> ["-static", "-dynamic-too"] + , [ "-fPIC" | flagBool ghcOptFPic ] + + , concat [ ["-dylib-install-name", libname] | libname <- flag ghcOptDylibName ] + + ------------------------ + -- Redirecting outputs + + , concat [ ["-osuf", suf] | suf <- flag ghcOptObjSuffix ] + , concat [ ["-hisuf", suf] | suf <- flag ghcOptHiSuffix ] + , concat [ ["-dynosuf", suf] | suf <- flag ghcOptDynObjSuffix ] + , concat [ ["-dynhisuf",suf] | suf <- flag ghcOptDynHiSuffix ] + , concat [ ["-outputdir", dir] | dir <- flag ghcOptOutputDir ] + , concat [ ["-odir", dir] | dir <- flag ghcOptObjDir ] + , concat [ ["-hidir", dir] | dir <- flag ghcOptHiDir ] + , concat [ ["-stubdir", dir] | dir <- flag ghcOptStubDir ] + + ----------------------- + -- Source search path + + , [ "-i" | flagBool ghcOptSourcePathClear ] + , [ "-i" ++ dir | dir <- flags ghcOptSourcePath ] + + -------------------- + + -------------------- + -- CPP, C, and C++ stuff + + , [ "-I" ++ dir | dir <- flags ghcOptCppIncludePath ] + , [ "-optP" ++ opt | opt <- ghcOptCppOptions opts] + , concat [ [ "-optP-include", "-optP" ++ inc] + | inc <- flags ghcOptCppIncludes ] + , [ "-optc" ++ opt | opt <- ghcOptCcOptions opts] + , [ "-optc" ++ opt | opt <- ghcOptCxxOptions opts] + + ----------------- + -- Linker stuff + + , [ "-optl" ++ opt | opt <- ghcOptLinkOptions opts] + , ["-l" ++ lib | lib <- ghcOptLinkLibs opts] + , ["-L" ++ dir | dir <- flags ghcOptLinkLibPath ] + , if isOSX + then concat [ ["-framework", fmwk] + | fmwk <- flags ghcOptLinkFrameworks ] + else [] + , if isOSX + then concat [ ["-framework-path", path] + | path <- flags ghcOptLinkFrameworkDirs ] + else [] + , [ "-no-hs-main" | flagBool ghcOptLinkNoHsMain ] + , [ "-dynload deploy" | not (null (flags ghcOptRPaths)) ] + , concat [ [ "-optl-Wl,-rpath," ++ dir] + | dir <- flags ghcOptRPaths ] + , [ modDefFile | modDefFile <- flags ghcOptLinkModDefFiles ] + + ------------- + -- Packages + + , concat [ [ case () of + _ | unitIdSupported comp -> "-this-unit-id" + | packageKeySupported comp -> "-this-package-key" + | otherwise -> "-package-name" + , this_arg ] + | this_arg <- flag ghcOptThisUnitId ] + + , concat [ ["-this-component-id", display this_cid ] + | this_cid <- flag ghcOptThisComponentId ] + + , if null (ghcOptInstantiatedWith opts) + then [] + else "-instantiated-with" + : intercalate "," (map (\(n,m) -> display n ++ "=" + ++ display m) + (ghcOptInstantiatedWith opts)) + : [] + + , concat [ ["-fno-code", "-fwrite-interface"] | flagBool ghcOptNoCode ] + + , [ "-hide-all-packages" | flagBool ghcOptHideAllPackages ] + , [ "-Wmissing-home-modules" | flagBool ghcOptWarnMissingHomeModules ] + , [ "-no-auto-link-packages" | flagBool ghcOptNoAutoLinkPackages ] + + , packageDbArgs implInfo (ghcOptPackageDBs opts) + + , concat $ let space "" = "" + space xs = ' ' : xs + in [ ["-package-id", display ipkgid ++ space (display rns)] + | (ipkgid,rns) <- flags ghcOptPackages ] + + ---------------------------- + -- Language and extensions + + , if supportsHaskell2010 implInfo + then [ "-X" ++ display lang | lang <- flag ghcOptLanguage ] + else [] + + , [ ext' + | ext <- flags ghcOptExtensions + , ext' <- case Map.lookup ext (ghcOptExtensionMap opts) of + Just (Just arg) -> [arg] + Just Nothing -> [] + Nothing -> + error $ "Distribution.Simple.Program.GHC.renderGhcOptions: " + ++ display ext ++ " not present in ghcOptExtensionMap." + ] + + ---------------- + -- GHCi + + , concat [ [ "-ghci-script", script ] | script <- ghcOptGHCiScripts opts + , flagGhciScript implInfo ] + + --------------- + -- Inputs + + , [ display modu | modu <- flags ghcOptInputModules ] + , flags ghcOptInputFiles + + , concat [ [ "-o", out] | out <- flag ghcOptOutputFile ] + , concat [ [ "-dyno", out] | out <- flag ghcOptOutputDynFile ] + + --------------- + -- Extra + + , ghcOptExtra opts + + ] + + + where + implInfo = getImplInfo comp + isOSX = os == OSX + flag flg = flagToList (flg opts) + flags flg = fromNubListR . flg $ opts + flagBool flg = fromFlagOrDefault False (flg opts) + +verbosityOpts :: Verbosity -> [String] +verbosityOpts verbosity + | verbosity >= deafening = ["-v"] + | verbosity >= normal = [] + | otherwise = ["-w", "-v0"] + + +-- | GHC <7.6 uses '-package-conf' instead of '-package-db'. +packageDbArgsConf :: PackageDBStack -> [String] +packageDbArgsConf dbstack = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs + (GlobalPackageDB:dbs) -> ("-no-user-package-conf") + : concatMap specific dbs + _ -> ierror + where + specific (SpecificPackageDB db) = [ "-package-conf", db ] + specific _ = ierror + ierror = error $ "internal error: unexpected package db stack: " + ++ show dbstack + +-- | GHC >= 7.6 uses the '-package-db' flag. See +-- https://ghc.haskell.org/trac/ghc/ticket/5977. +packageDbArgsDb :: PackageDBStack -> [String] +-- special cases to make arguments prettier in common scenarios +packageDbArgsDb dbstack = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) + | all isSpecific dbs -> concatMap single dbs + (GlobalPackageDB:dbs) + | all isSpecific dbs -> "-no-user-package-db" + : concatMap single dbs + dbs -> "-clear-package-db" + : concatMap single dbs + where + single (SpecificPackageDB db) = [ "-package-db", db ] + single GlobalPackageDB = [ "-global-package-db" ] + single UserPackageDB = [ "-user-package-db" ] + isSpecific (SpecificPackageDB _) = True + isSpecific _ = False + +packageDbArgs :: GhcImplInfo -> PackageDBStack -> [String] +packageDbArgs implInfo + | flagPackageConf implInfo = packageDbArgsConf + | otherwise = packageDbArgsDb + +-- ----------------------------------------------------------------------------- +-- Boilerplate Monoid instance for GhcOptions + +instance Monoid GhcOptions where + mempty = gmempty + mappend = (<>) + +instance Semigroup GhcOptions where + (<>) = gmappend diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/HcPkg.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/HcPkg.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/HcPkg.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/HcPkg.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,495 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.HcPkg +-- Copyright : Duncan Coutts 2009, 2013 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @hc-pkg@ program. +-- Currently only GHC and GHCJS have hc-pkg programs. + +module Distribution.Simple.Program.HcPkg ( + -- * Types + HcPkgInfo(..), + RegisterOptions(..), + defaultRegisterOptions, + + -- * Actions + init, + invoke, + register, + unregister, + recache, + expose, + hide, + dump, + describe, + list, + + -- * Program invocations + initInvocation, + registerInvocation, + unregisterInvocation, + recacheInvocation, + exposeInvocation, + hideInvocation, + dumpInvocation, + describeInvocation, + listInvocation, + ) where + +import Prelude () +import Distribution.Compat.Prelude hiding (init) + +import Distribution.InstalledPackageInfo +import Distribution.Simple.Compiler +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Run +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.Types.ComponentId +import Distribution.Types.PackageId +import Distribution.Types.UnitId +import Distribution.Verbosity +import Distribution.Compat.Exception + +import Data.List + ( stripPrefix ) +import System.FilePath as FilePath + ( (), (<.>) + , splitPath, splitDirectories, joinPath, isPathSeparator ) +import qualified System.FilePath.Posix as FilePath.Posix + +-- | Information about the features and capabilities of an @hc-pkg@ +-- program. +-- +data HcPkgInfo = HcPkgInfo + { hcPkgProgram :: ConfiguredProgram + , noPkgDbStack :: Bool -- ^ no package DB stack supported + , noVerboseFlag :: Bool -- ^ hc-pkg does not support verbosity flags + , flagPackageConf :: Bool -- ^ use package-conf option instead of package-db + , supportsDirDbs :: Bool -- ^ supports directory style package databases + , requiresDirDbs :: Bool -- ^ requires directory style package databases + , nativeMultiInstance :: Bool -- ^ supports --enable-multi-instance flag + , recacheMultiInstance :: Bool -- ^ supports multi-instance via recache + , suppressFilesCheck :: Bool -- ^ supports --force-files or equivalent + } + + +-- | Call @hc-pkg@ to initialise a package database at the location {path}. +-- +-- > hc-pkg init {path} +-- +init :: HcPkgInfo -> Verbosity -> Bool -> FilePath -> IO () +init hpi verbosity preferCompat path + | not (supportsDirDbs hpi) + || (not (requiresDirDbs hpi) && preferCompat) + = writeFile path "[]" + + | otherwise + = runProgramInvocation verbosity (initInvocation hpi verbosity path) + +-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the +-- provided command-line arguments to it. +invoke :: HcPkgInfo -> Verbosity -> PackageDBStack -> [String] -> IO () +invoke hpi verbosity dbStack extraArgs = + runProgramInvocation verbosity invocation + where + args = packageDbStackOpts hpi dbStack ++ extraArgs + invocation = programInvocation (hcPkgProgram hpi) args + +-- | Additional variations in the behaviour for 'register'. +data RegisterOptions = RegisterOptions { + -- | Allows re-registering \/ overwriting an existing package + registerAllowOverwrite :: Bool, + + -- | Insist on the ability to register multiple instances of a + -- single version of a single package. This will fail if the @hc-pkg@ + -- does not support it, see 'nativeMultiInstance' and + -- 'recacheMultiInstance'. + registerMultiInstance :: Bool, + + -- | Require that no checks are performed on the existence of package + -- files mentioned in the registration info. This must be used if + -- registering prior to putting the files in their final place. This will + -- fail if the @hc-pkg@ does not support it, see 'suppressFilesCheck'. + registerSuppressFilesCheck :: Bool + } + +-- | Defaults are @True@, @False@ and @False@ +defaultRegisterOptions :: RegisterOptions +defaultRegisterOptions = RegisterOptions { + registerAllowOverwrite = True, + registerMultiInstance = False, + registerSuppressFilesCheck = False + } + +-- | Call @hc-pkg@ to register a package. +-- +-- > hc-pkg register {filename | -} [--user | --global | --package-db] +-- +register :: HcPkgInfo -> Verbosity -> PackageDBStack + -> InstalledPackageInfo + -> RegisterOptions + -> IO () +register hpi verbosity packagedbs pkgInfo registerOptions + | registerMultiInstance registerOptions + , not (nativeMultiInstance hpi || recacheMultiInstance hpi) + = die' verbosity $ "HcPkg.register: the compiler does not support " + ++ "registering multiple instances of packages." + + | registerSuppressFilesCheck registerOptions + , not (suppressFilesCheck hpi) + = die' verbosity $ "HcPkg.register: the compiler does not support " + ++ "suppressing checks on files." + + -- This is a trick. Older versions of GHC do not support the + -- --enable-multi-instance flag for ghc-pkg register but it turns out that + -- the same ability is available by using ghc-pkg recache. The recache + -- command is there to support distro package managers that like to work + -- by just installing files and running update commands, rather than + -- special add/remove commands. So the way to register by this method is + -- to write the package registration file directly into the package db and + -- then call hc-pkg recache. + -- + | registerMultiInstance registerOptions + , recacheMultiInstance hpi + = do let pkgdb = last packagedbs + writeRegistrationFileDirectly verbosity hpi pkgdb pkgInfo + recache hpi verbosity pkgdb + + | otherwise + = runProgramInvocation verbosity + (registerInvocation hpi verbosity packagedbs pkgInfo registerOptions) + +writeRegistrationFileDirectly :: Verbosity + -> HcPkgInfo + -> PackageDB + -> InstalledPackageInfo + -> IO () +writeRegistrationFileDirectly verbosity hpi (SpecificPackageDB dir) pkgInfo + | supportsDirDbs hpi + = do let pkgfile = dir display (installedUnitId pkgInfo) <.> "conf" + writeUTF8File pkgfile (showInstalledPackageInfo pkgInfo) + + | otherwise + = die' verbosity $ "HcPkg.writeRegistrationFileDirectly: compiler does not support dir style package dbs" + +writeRegistrationFileDirectly verbosity _ _ _ = + -- We don't know here what the dir for the global or user dbs are, + -- if that's needed it'll require a bit more plumbing to support. + die' verbosity $ "HcPkg.writeRegistrationFileDirectly: only supports SpecificPackageDB for now" + + +-- | Call @hc-pkg@ to unregister a package +-- +-- > hc-pkg unregister [pkgid] [--user | --global | --package-db] +-- +unregister :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () +unregister hpi verbosity packagedb pkgid = + runProgramInvocation verbosity + (unregisterInvocation hpi verbosity packagedb pkgid) + + +-- | Call @hc-pkg@ to recache the registered packages. +-- +-- > hc-pkg recache [--user | --global | --package-db] +-- +recache :: HcPkgInfo -> Verbosity -> PackageDB -> IO () +recache hpi verbosity packagedb = + runProgramInvocation verbosity + (recacheInvocation hpi verbosity packagedb) + + +-- | Call @hc-pkg@ to expose a package. +-- +-- > hc-pkg expose [pkgid] [--user | --global | --package-db] +-- +expose :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () +expose hpi verbosity packagedb pkgid = + runProgramInvocation verbosity + (exposeInvocation hpi verbosity packagedb pkgid) + +-- | Call @hc-pkg@ to retrieve a specific package +-- +-- > hc-pkg describe [pkgid] [--user | --global | --package-db] +-- +describe :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId -> IO [InstalledPackageInfo] +describe hpi verbosity packagedb pid = do + + output <- getProgramInvocationOutput verbosity + (describeInvocation hpi verbosity packagedb pid) + `catchIO` \_ -> return "" + + case parsePackages output of + Left ok -> return ok + _ -> die' verbosity $ "failed to parse output of '" + ++ programId (hcPkgProgram hpi) ++ " describe " ++ display pid ++ "'" + +-- | Call @hc-pkg@ to hide a package. +-- +-- > hc-pkg hide [pkgid] [--user | --global | --package-db] +-- +hide :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId -> IO () +hide hpi verbosity packagedb pkgid = + runProgramInvocation verbosity + (hideInvocation hpi verbosity packagedb pkgid) + + +-- | Call @hc-pkg@ to get all the details of all the packages in the given +-- package database. +-- +dump :: HcPkgInfo -> Verbosity -> PackageDB -> IO [InstalledPackageInfo] +dump hpi verbosity packagedb = do + + output <- getProgramInvocationOutput verbosity + (dumpInvocation hpi verbosity packagedb) + `catchIO` \e -> die' verbosity $ programId (hcPkgProgram hpi) ++ " dump failed: " + ++ displayException e + + case parsePackages output of + Left ok -> return ok + _ -> die' verbosity $ "failed to parse output of '" + ++ programId (hcPkgProgram hpi) ++ " dump'" + +parsePackages :: String -> Either [InstalledPackageInfo] [PError] +parsePackages str = + let parsed = map parseInstalledPackageInfo (splitPkgs str) + in case [ msg | ParseFailed msg <- parsed ] of + [] -> Left [ setUnitId + . maybe id mungePackagePaths (pkgRoot pkg) + $ pkg + | ParseOk _ pkg <- parsed ] + msgs -> Right msgs + +--TODO: this could be a lot faster. We're doing normaliseLineEndings twice +-- and converting back and forth with lines/unlines. +splitPkgs :: String -> [String] +splitPkgs = checkEmpty . map unlines . splitWith ("---" ==) . lines + where + -- Handle the case of there being no packages at all. + checkEmpty [s] | all isSpace s = [] + checkEmpty ss = ss + + splitWith :: (a -> Bool) -> [a] -> [[a]] + splitWith p xs = ys : case zs of + [] -> [] + _:ws -> splitWith p ws + where (ys,zs) = break p xs + +mungePackagePaths :: FilePath -> InstalledPackageInfo -> InstalledPackageInfo +-- Perform path/URL variable substitution as per the Cabal ${pkgroot} spec +-- (http://www.haskell.org/pipermail/libraries/2009-May/011772.html) +-- Paths/URLs can be relative to ${pkgroot} or ${pkgrooturl}. +-- The "pkgroot" is the directory containing the package database. +mungePackagePaths pkgroot pkginfo = + pkginfo { + importDirs = mungePaths (importDirs pkginfo), + includeDirs = mungePaths (includeDirs pkginfo), + libraryDirs = mungePaths (libraryDirs pkginfo), + libraryDynDirs = mungePaths (libraryDynDirs pkginfo), + frameworkDirs = mungePaths (frameworkDirs pkginfo), + haddockInterfaces = mungePaths (haddockInterfaces pkginfo), + haddockHTMLs = mungeUrls (haddockHTMLs pkginfo) + } + where + mungePaths = map mungePath + mungeUrls = map mungeUrl + + mungePath p = case stripVarPrefix "${pkgroot}" p of + Just p' -> pkgroot p' + Nothing -> p + + mungeUrl p = case stripVarPrefix "${pkgrooturl}" p of + Just p' -> toUrlPath pkgroot p' + Nothing -> p + + toUrlPath r p = "file:///" + -- URLs always use posix style '/' separators: + ++ FilePath.Posix.joinPath (r : FilePath.splitDirectories p) + + stripVarPrefix var p = + case splitPath p of + (root:path') -> case stripPrefix var root of + Just [sep] | isPathSeparator sep -> Just (joinPath path') + _ -> Nothing + _ -> Nothing + + +-- Older installed package info files did not have the installedUnitId +-- field, so if it is missing then we fill it as the source package ID. +-- NB: Internal libraries not supported. +setUnitId :: InstalledPackageInfo -> InstalledPackageInfo +setUnitId pkginfo@InstalledPackageInfo { + installedUnitId = uid, + sourcePackageId = pid + } | unUnitId uid == "" + = pkginfo { + installedUnitId = mkLegacyUnitId pid, + installedComponentId_ = mkComponentId (display pid) + } +setUnitId pkginfo = pkginfo + + +-- | Call @hc-pkg@ to get the source package Id of all the packages in the +-- given package database. +-- +-- This is much less information than with 'dump', but also rather quicker. +-- Note in particular that it does not include the 'UnitId', just +-- the source 'PackageId' which is not necessarily unique in any package db. +-- +list :: HcPkgInfo -> Verbosity -> PackageDB + -> IO [PackageId] +list hpi verbosity packagedb = do + + output <- getProgramInvocationOutput verbosity + (listInvocation hpi verbosity packagedb) + `catchIO` \_ -> die' verbosity $ programId (hcPkgProgram hpi) ++ " list failed" + + case parsePackageIds output of + Just ok -> return ok + _ -> die' verbosity $ "failed to parse output of '" + ++ programId (hcPkgProgram hpi) ++ " list'" + + where + parsePackageIds = traverse simpleParse . words + +-------------------------- +-- The program invocations +-- + +initInvocation :: HcPkgInfo -> Verbosity -> FilePath -> ProgramInvocation +initInvocation hpi verbosity path = + programInvocation (hcPkgProgram hpi) args + where + args = ["init", path] + ++ verbosityOpts hpi verbosity + +registerInvocation + :: HcPkgInfo -> Verbosity -> PackageDBStack + -> InstalledPackageInfo + -> RegisterOptions + -> ProgramInvocation +registerInvocation hpi verbosity packagedbs pkgInfo registerOptions = + (programInvocation (hcPkgProgram hpi) (args "-")) { + progInvokeInput = Just (showInstalledPackageInfo pkgInfo), + progInvokeInputEncoding = IOEncodingUTF8 + } + where + cmdname + | registerAllowOverwrite registerOptions = "update" + | registerMultiInstance registerOptions = "update" + | otherwise = "register" + + args file = [cmdname, file] + ++ (if noPkgDbStack hpi + then [packageDbOpts hpi (last packagedbs)] + else packageDbStackOpts hpi packagedbs) + ++ [ "--enable-multi-instance" + | registerMultiInstance registerOptions ] + ++ [ "--force-files" + | registerSuppressFilesCheck registerOptions ] + ++ verbosityOpts hpi verbosity + +unregisterInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId + -> ProgramInvocation +unregisterInvocation hpi verbosity packagedb pkgid = + programInvocation (hcPkgProgram hpi) $ + ["unregister", packageDbOpts hpi packagedb, display pkgid] + ++ verbosityOpts hpi verbosity + + +recacheInvocation :: HcPkgInfo -> Verbosity -> PackageDB + -> ProgramInvocation +recacheInvocation hpi verbosity packagedb = + programInvocation (hcPkgProgram hpi) $ + ["recache", packageDbOpts hpi packagedb] + ++ verbosityOpts hpi verbosity + + +exposeInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId + -> ProgramInvocation +exposeInvocation hpi verbosity packagedb pkgid = + programInvocation (hcPkgProgram hpi) $ + ["expose", packageDbOpts hpi packagedb, display pkgid] + ++ verbosityOpts hpi verbosity + +describeInvocation :: HcPkgInfo -> Verbosity -> PackageDBStack -> PackageId + -> ProgramInvocation +describeInvocation hpi verbosity packagedbs pkgid = + programInvocation (hcPkgProgram hpi) $ + ["describe", display pkgid] + ++ (if noPkgDbStack hpi + then [packageDbOpts hpi (last packagedbs)] + else packageDbStackOpts hpi packagedbs) + ++ verbosityOpts hpi verbosity + +hideInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> PackageId + -> ProgramInvocation +hideInvocation hpi verbosity packagedb pkgid = + programInvocation (hcPkgProgram hpi) $ + ["hide", packageDbOpts hpi packagedb, display pkgid] + ++ verbosityOpts hpi verbosity + + +dumpInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation +dumpInvocation hpi _verbosity packagedb = + (programInvocation (hcPkgProgram hpi) args) { + progInvokeOutputEncoding = IOEncodingUTF8 + } + where + args = ["dump", packageDbOpts hpi packagedb] + ++ verbosityOpts hpi silent + -- We use verbosity level 'silent' because it is important that we + -- do not contaminate the output with info/debug messages. + +listInvocation :: HcPkgInfo -> Verbosity -> PackageDB -> ProgramInvocation +listInvocation hpi _verbosity packagedb = + (programInvocation (hcPkgProgram hpi) args) { + progInvokeOutputEncoding = IOEncodingUTF8 + } + where + args = ["list", "--simple-output", packageDbOpts hpi packagedb] + ++ verbosityOpts hpi silent + -- We use verbosity level 'silent' because it is important that we + -- do not contaminate the output with info/debug messages. + + +packageDbStackOpts :: HcPkgInfo -> PackageDBStack -> [String] +packageDbStackOpts hpi dbstack = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) -> "--global" + : "--user" + : map specific dbs + (GlobalPackageDB:dbs) -> "--global" + : ("--no-user-" ++ packageDbFlag hpi) + : map specific dbs + _ -> ierror + where + specific (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db + specific _ = ierror + ierror :: a + ierror = error ("internal error: unexpected package db stack: " ++ show dbstack) + +packageDbFlag :: HcPkgInfo -> String +packageDbFlag hpi + | flagPackageConf hpi + = "package-conf" + | otherwise + = "package-db" + +packageDbOpts :: HcPkgInfo -> PackageDB -> String +packageDbOpts _ GlobalPackageDB = "--global" +packageDbOpts _ UserPackageDB = "--user" +packageDbOpts hpi (SpecificPackageDB db) = "--" ++ packageDbFlag hpi ++ "=" ++ db + +verbosityOpts :: HcPkgInfo -> Verbosity -> [String] +verbosityOpts hpi v + | noVerboseFlag hpi + = [] + | v >= deafening = ["-v2"] + | v == silent = ["-v0"] + | otherwise = [] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Hpc.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Hpc.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Hpc.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Hpc.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,111 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Hpc +-- Copyright : Thomas Tuegel 2011 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @hpc@ program. + +module Distribution.Simple.Program.Hpc + ( markup + , union + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Control.Monad (mapM) +import System.Directory (makeRelativeToCurrentDirectory) + +import Distribution.ModuleName +import Distribution.Simple.Program.Run +import Distribution.Simple.Program.Types +import Distribution.Text +import Distribution.Simple.Utils +import Distribution.Verbosity +import Distribution.Version + +-- | Invoke hpc with the given parameters. +-- +-- Prior to HPC version 0.7 (packaged with GHC 7.8), hpc did not handle +-- multiple .mix paths correctly, so we print a warning, and only pass it the +-- first path in the list. This means that e.g. test suites that import their +-- library as a dependency can still work, but those that include the library +-- modules directly (in other-modules) don't. +markup :: ConfiguredProgram + -> Version + -> Verbosity + -> FilePath -- ^ Path to .tix file + -> [FilePath] -- ^ Paths to .mix file directories + -> FilePath -- ^ Path where html output should be located + -> [ModuleName] -- ^ List of modules to exclude from report + -> IO () +markup hpc hpcVer verbosity tixFile hpcDirs destDir excluded = do + hpcDirs' <- if withinRange hpcVer (orLaterVersion version07) + then return hpcDirs + else do + warn verbosity $ "Your version of HPC (" ++ display hpcVer + ++ ") does not properly handle multiple search paths. " + ++ "Coverage report generation may fail unexpectedly. These " + ++ "issues are addressed in version 0.7 or later (GHC 7.8 or " + ++ "later)." + ++ if null droppedDirs + then "" + else " The following search paths have been abandoned: " + ++ show droppedDirs + return passedDirs + + -- Prior to GHC 8.0, hpc assumes all .mix paths are relative. + hpcDirs'' <- mapM makeRelativeToCurrentDirectory hpcDirs' + + runProgramInvocation verbosity + (markupInvocation hpc tixFile hpcDirs'' destDir excluded) + where + version07 = mkVersion [0, 7] + (passedDirs, droppedDirs) = splitAt 1 hpcDirs + +markupInvocation :: ConfiguredProgram + -> FilePath -- ^ Path to .tix file + -> [FilePath] -- ^ Paths to .mix file directories + -> FilePath -- ^ Path where html output should be + -- located + -> [ModuleName] -- ^ List of modules to exclude from + -- report + -> ProgramInvocation +markupInvocation hpc tixFile hpcDirs destDir excluded = + let args = [ "markup", tixFile + , "--destdir=" ++ destDir + ] + ++ map ("--hpcdir=" ++) hpcDirs + ++ ["--exclude=" ++ display moduleName + | moduleName <- excluded ] + in programInvocation hpc args + +union :: ConfiguredProgram + -> Verbosity + -> [FilePath] -- ^ Paths to .tix files + -> FilePath -- ^ Path to resultant .tix file + -> [ModuleName] -- ^ List of modules to exclude from union + -> IO () +union hpc verbosity tixFiles outFile excluded = + runProgramInvocation verbosity + (unionInvocation hpc tixFiles outFile excluded) + +unionInvocation :: ConfiguredProgram + -> [FilePath] -- ^ Paths to .tix files + -> FilePath -- ^ Path to resultant .tix file + -> [ModuleName] -- ^ List of modules to exclude from union + -> ProgramInvocation +unionInvocation hpc tixFiles outFile excluded = + programInvocation hpc $ concat + [ ["sum", "--union"] + , tixFiles + , ["--output=" ++ outFile] + , ["--exclude=" ++ display moduleName + | moduleName <- excluded ] + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Internal.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Internal.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,46 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Internal +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Internal utilities used by Distribution.Simple.Program.*. + +module Distribution.Simple.Program.Internal ( + stripExtractVersion, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +-- | Extract the version number from the output of 'strip --version'. +-- +-- Invoking "strip --version" gives very inconsistent results. We ignore +-- everything in parentheses (see #2497), look for the first word that starts +-- with a number, and try parsing out the first two components of it. Non-GNU +-- 'strip' doesn't appear to have a version flag. +stripExtractVersion :: String -> String +stripExtractVersion str = + let numeric "" = False + numeric (x:_) = isDigit x + + -- Filter out everything in parentheses. + filterPar' :: Int -> [String] -> [String] + filterPar' _ [] = [] + filterPar' n (x:xs) + | n >= 0 && "(" `isPrefixOf` x = filterPar' (n+1) ((tail x):xs) + | n > 0 && ")" `isSuffixOf` x = filterPar' (n-1) xs + | n > 0 = filterPar' n xs + | otherwise = x:filterPar' n xs + + filterPar = filterPar' 0 + + in case dropWhile (not . numeric) (filterPar . words $ str) of + (ver:_) -> + -- take the first two version components + let isDot = (== '.') + (major, rest) = break isDot ver + minor = takeWhile isDigit (dropWhile isDot rest) + in major ++ "." ++ minor + _ -> "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Ld.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Ld.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Ld.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Ld.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,93 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Ld +-- Copyright : Duncan Coutts 2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @ld@ linker program. + +module Distribution.Simple.Program.Ld ( + combineObjectFiles, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Compiler (arResponseFilesSupported) +import Distribution.Simple.LocalBuildInfo (LocalBuildInfo(..)) +import Distribution.Simple.Program.ResponseFile + ( withResponseFile ) +import Distribution.Simple.Program.Run + ( ProgramInvocation, programInvocation, multiStageProgramInvocation + , runProgramInvocation ) +import Distribution.Simple.Program.Types + ( ConfiguredProgram(..) ) +import Distribution.Simple.Setup + ( fromFlagOrDefault, configUseResponseFiles ) +import Distribution.Simple.Utils + ( defaultTempFileOptions ) +import Distribution.Verbosity + ( Verbosity ) + +import System.Directory + ( renameFile ) +import System.FilePath + ( (<.>), takeDirectory ) + +-- | Call @ld -r@ to link a bunch of object files together. +-- +combineObjectFiles :: Verbosity -> LocalBuildInfo -> ConfiguredProgram + -> FilePath -> [FilePath] -> IO () +combineObjectFiles verbosity lbi ld target files = do + + -- Unlike "ar", the "ld" tool is not designed to be used with xargs. That is, + -- if we have more object files than fit on a single command line then we + -- have a slight problem. What we have to do is link files in batches into + -- a temp object file and then include that one in the next batch. + + let simpleArgs = ["-r", "-o", target] + + initialArgs = ["-r", "-o", target] + middleArgs = ["-r", "-o", target, tmpfile] + finalArgs = middleArgs + + simple = programInvocation ld simpleArgs + initial = programInvocation ld initialArgs + middle = programInvocation ld middleArgs + final = programInvocation ld finalArgs + + targetDir = takeDirectory target + + invokeWithResponesFile :: FilePath -> ProgramInvocation + invokeWithResponesFile atFile = + programInvocation ld $ simpleArgs ++ ['@' : atFile] + + oldVersionManualOverride = + fromFlagOrDefault False $ configUseResponseFiles $ configFlags lbi + -- Whether ghc's ar supports response files is a good proxy for + -- whether ghc's ld supports them as well. + responseArgumentsNotSupported = + not (arResponseFilesSupported (compiler lbi)) + + if oldVersionManualOverride || responseArgumentsNotSupported + then + run $ multiStageProgramInvocation simple (initial, middle, final) files + else + withResponseFile verbosity defaultTempFileOptions targetDir "ld.rsp" Nothing files $ + \path -> runProgramInvocation verbosity $ invokeWithResponesFile path + + where + tmpfile = target <.> "tmp" -- perhaps should use a proper temp file + + run :: [ProgramInvocation] -> IO () + run [] = return () + run [inv] = runProgramInvocation verbosity inv + run (inv:invs) = do runProgramInvocation verbosity inv + renameFile target tmpfile + run invs + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/ResponseFile.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/ResponseFile.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/ResponseFile.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/ResponseFile.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,60 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +---------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.ResponseFile +-- Copyright : (c) Sergey Vinokurov 2017 +-- License : BSD3-style +-- +-- Maintainer : cabal-devel@haskell.org +-- Created : 23 July 2017 +---------------------------------------------------------------------------- + +module Distribution.Simple.Program.ResponseFile (withResponseFile) where + +import Prelude () +import System.IO (TextEncoding, hSetEncoding, hPutStr, hClose) + +import Distribution.Compat.Prelude +import Distribution.Simple.Utils (TempFileOptions, withTempFileEx, debug) +import Distribution.Verbosity + +withResponseFile + :: Verbosity + -> TempFileOptions + -> FilePath -- ^ Working directory to create response file in. + -> FilePath -- ^ Template for response file name. + -> Maybe TextEncoding -- ^ Encoding to use for response file contents. + -> [String] -- ^ Arguments to put into response file. + -> (FilePath -> IO a) + -> IO a +withResponseFile verbosity tmpFileOpts workDir fileNameTemplate encoding arguments f = + withTempFileEx tmpFileOpts workDir fileNameTemplate $ \responseFileName hf -> do + traverse_ (hSetEncoding hf) encoding + let responseContents = unlines $ map escapeResponseFileArg arguments + hPutStr hf responseContents + hClose hf + debug verbosity $ responseFileName ++ " contents: <<<" + debug verbosity responseContents + debug verbosity $ ">>> " ++ responseFileName + f responseFileName + +-- Support a gcc-like response file syntax. Each separate +-- argument and its possible parameter(s), will be separated in the +-- response file by an actual newline; all other whitespace, +-- single quotes, double quotes, and the character used for escaping +-- (backslash) are escaped. The called program will need to do a similar +-- inverse operation to de-escape and re-constitute the argument list. +escapeResponseFileArg :: String -> String +escapeResponseFileArg = reverse . foldl' escape [] + where + escape :: String -> Char -> String + escape cs c = + case c of + '\\' -> c:'\\':cs + '\'' -> c:'\\':cs + '"' -> c:'\\':cs + _ | isSpace c -> c:'\\':cs + | otherwise -> c:cs + + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Run.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Run.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Run.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Run.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,283 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Run +-- Copyright : Duncan Coutts 2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides a data type for program invocations and functions to +-- run them. + +module Distribution.Simple.Program.Run ( + ProgramInvocation(..), + IOEncoding(..), + emptyProgramInvocation, + simpleProgramInvocation, + programInvocation, + multiStageProgramInvocation, + + runProgramInvocation, + getProgramInvocationOutput, + getProgramInvocationOutputAndErrors, + + getEffectiveEnvironment, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Program.Types +import Distribution.Simple.Utils +import Distribution.Verbosity +import Distribution.Compat.Environment + +import qualified Data.Map as Map +import System.FilePath +import System.Exit + ( ExitCode(..), exitWith ) + +-- | Represents a specific invocation of a specific program. +-- +-- This is used as an intermediate type between deciding how to call a program +-- and actually doing it. This provides the opportunity to the caller to +-- adjust how the program will be called. These invocations can either be run +-- directly or turned into shell or batch scripts. +-- +data ProgramInvocation = ProgramInvocation { + progInvokePath :: FilePath, + progInvokeArgs :: [String], + progInvokeEnv :: [(String, Maybe String)], + -- Extra paths to add to PATH + progInvokePathEnv :: [FilePath], + progInvokeCwd :: Maybe FilePath, + progInvokeInput :: Maybe String, + progInvokeInputEncoding :: IOEncoding, + progInvokeOutputEncoding :: IOEncoding + } + +data IOEncoding = IOEncodingText -- locale mode text + | IOEncodingUTF8 -- always utf8 + +encodeToIOData :: IOEncoding -> String -> IOData +encodeToIOData IOEncodingText = IODataText +encodeToIOData IOEncodingUTF8 = IODataBinary . toUTF8LBS + +emptyProgramInvocation :: ProgramInvocation +emptyProgramInvocation = + ProgramInvocation { + progInvokePath = "", + progInvokeArgs = [], + progInvokeEnv = [], + progInvokePathEnv = [], + progInvokeCwd = Nothing, + progInvokeInput = Nothing, + progInvokeInputEncoding = IOEncodingText, + progInvokeOutputEncoding = IOEncodingText + } + +simpleProgramInvocation :: FilePath -> [String] -> ProgramInvocation +simpleProgramInvocation path args = + emptyProgramInvocation { + progInvokePath = path, + progInvokeArgs = args + } + +programInvocation :: ConfiguredProgram -> [String] -> ProgramInvocation +programInvocation prog args = + emptyProgramInvocation { + progInvokePath = programPath prog, + progInvokeArgs = programDefaultArgs prog + ++ args + ++ programOverrideArgs prog, + progInvokeEnv = programOverrideEnv prog + } + + +runProgramInvocation :: Verbosity -> ProgramInvocation -> IO () +runProgramInvocation verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = [], + progInvokePathEnv = [], + progInvokeCwd = Nothing, + progInvokeInput = Nothing + } = + rawSystemExit verbosity path args + +runProgramInvocation verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envOverrides, + progInvokePathEnv = extraPath, + progInvokeCwd = mcwd, + progInvokeInput = Nothing + } = do + pathOverride <- getExtraPathEnv envOverrides extraPath + menv <- getEffectiveEnvironment (envOverrides ++ pathOverride) + exitCode <- rawSystemIOWithEnv verbosity + path args + mcwd menv + Nothing Nothing Nothing + when (exitCode /= ExitSuccess) $ + exitWith exitCode + +runProgramInvocation verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envOverrides, + progInvokePathEnv = extraPath, + progInvokeCwd = mcwd, + progInvokeInput = Just inputStr, + progInvokeInputEncoding = encoding + } = do + pathOverride <- getExtraPathEnv envOverrides extraPath + menv <- getEffectiveEnvironment (envOverrides ++ pathOverride) + (_, errors, exitCode) <- rawSystemStdInOut verbosity + path args + mcwd menv + (Just input) IODataModeBinary + when (exitCode /= ExitSuccess) $ + die' verbosity $ "'" ++ path ++ "' exited with an error:\n" ++ errors + where + input = encodeToIOData encoding inputStr + +getProgramInvocationOutput :: Verbosity -> ProgramInvocation -> IO String +getProgramInvocationOutput verbosity inv = do + (output, errors, exitCode) <- getProgramInvocationOutputAndErrors verbosity inv + when (exitCode /= ExitSuccess) $ + die' verbosity $ "'" ++ progInvokePath inv ++ "' exited with an error:\n" ++ errors + return output + + +getProgramInvocationOutputAndErrors :: Verbosity -> ProgramInvocation + -> IO (String, String, ExitCode) +getProgramInvocationOutputAndErrors verbosity + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envOverrides, + progInvokePathEnv = extraPath, + progInvokeCwd = mcwd, + progInvokeInput = minputStr, + progInvokeOutputEncoding = encoding + } = do + let mode = case encoding of IOEncodingUTF8 -> IODataModeBinary + IOEncodingText -> IODataModeText + + decode (IODataBinary b) = normaliseLineEndings (fromUTF8LBS b) + decode (IODataText s) = s + + pathOverride <- getExtraPathEnv envOverrides extraPath + menv <- getEffectiveEnvironment (envOverrides ++ pathOverride) + (output, errors, exitCode) <- rawSystemStdInOut verbosity + path args + mcwd menv + input mode + return (decode output, errors, exitCode) + where + input = encodeToIOData encoding <$> minputStr + +getExtraPathEnv :: [(String, Maybe String)] -> [FilePath] -> NoCallStackIO [(String, Maybe String)] +getExtraPathEnv _ [] = return [] +getExtraPathEnv env extras = do + mb_path <- case lookup "PATH" env of + Just x -> return x + Nothing -> lookupEnv "PATH" + let extra = intercalate [searchPathSeparator] extras + path' = case mb_path of + Nothing -> extra + Just path -> extra ++ searchPathSeparator : path + return [("PATH", Just path')] + +-- | Return the current environment extended with the given overrides. +-- If an entry is specified twice in @overrides@, the second entry takes +-- precedence. +-- +getEffectiveEnvironment :: [(String, Maybe String)] + -> NoCallStackIO (Maybe [(String, String)]) +getEffectiveEnvironment [] = return Nothing +getEffectiveEnvironment overrides = + fmap (Just . Map.toList . apply overrides . Map.fromList) getEnvironment + where + apply os env = foldl' (flip update) env os + update (var, Nothing) = Map.delete var + update (var, Just val) = Map.insert var val + +-- | Like the unix xargs program. Useful for when we've got very long command +-- lines that might overflow an OS limit on command line length and so you +-- need to invoke a command multiple times to get all the args in. +-- +-- It takes four template invocations corresponding to the simple, initial, +-- middle and last invocations. If the number of args given is small enough +-- that we can get away with just a single invocation then the simple one is +-- used: +-- +-- > $ simple args +-- +-- If the number of args given means that we need to use multiple invocations +-- then the templates for the initial, middle and last invocations are used: +-- +-- > $ initial args_0 +-- > $ middle args_1 +-- > $ middle args_2 +-- > ... +-- > $ final args_n +-- +multiStageProgramInvocation + :: ProgramInvocation + -> (ProgramInvocation, ProgramInvocation, ProgramInvocation) + -> [String] + -> [ProgramInvocation] +multiStageProgramInvocation simple (initial, middle, final) args = + + let argSize inv = length (progInvokePath inv) + + foldl' (\s a -> length a + 1 + s) 1 (progInvokeArgs inv) + fixedArgSize = maximum (map argSize [simple, initial, middle, final]) + chunkSize = maxCommandLineSize - fixedArgSize + + in case splitChunks chunkSize args of + [] -> [ simple ] + + [c] -> [ simple `appendArgs` c ] + + (c:cs) -> [ initial `appendArgs` c ] + ++ [ middle `appendArgs` c'| c' <- init cs ] + ++ [ final `appendArgs` c'| let c' = last cs ] + + where + appendArgs :: ProgramInvocation -> [String] -> ProgramInvocation + inv `appendArgs` as = inv { progInvokeArgs = progInvokeArgs inv ++ as } + + splitChunks :: Int -> [[a]] -> [[[a]]] + splitChunks len = unfoldr $ \s -> + if null s then Nothing + else Just (chunk len s) + + chunk :: Int -> [[a]] -> ([[a]], [[a]]) + chunk len (s:_) | length s >= len = error toolong + chunk len ss = chunk' [] len ss + + chunk' :: [[a]] -> Int -> [[a]] -> ([[a]], [[a]]) + chunk' acc len (s:ss) + | len' < len = chunk' (s:acc) (len-len'-1) ss + where len' = length s + chunk' acc _ ss = (reverse acc, ss) + + toolong = "multiStageProgramInvocation: a single program arg is larger " + ++ "than the maximum command line length!" + + +--FIXME: discover this at configure time or runtime on unix +-- The value is 32k on Windows and posix specifies a minimum of 4k +-- but all sensible unixes use more than 4k. +-- we could use getSysVar ArgumentLimit but that's in the unix lib +-- +maxCommandLineSize :: Int +maxCommandLineSize = 30 * 1024 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Script.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Script.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Script.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Script.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,108 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Script +-- Copyright : Duncan Coutts 2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @hc-pkg@ program. +-- Currently only GHC and LHC have hc-pkg programs. + +module Distribution.Simple.Program.Script ( + + invocationAsSystemScript, + invocationAsShellScript, + invocationAsBatchFile, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Program.Run +import Distribution.System + +-- | Generate a system script, either POSIX shell script or Windows batch file +-- as appropriate for the given system. +-- +invocationAsSystemScript :: OS -> ProgramInvocation -> String +invocationAsSystemScript Windows = invocationAsBatchFile +invocationAsSystemScript _ = invocationAsShellScript + + +-- | Generate a POSIX shell script that invokes a program. +-- +invocationAsShellScript :: ProgramInvocation -> String +invocationAsShellScript + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envExtra, + progInvokeCwd = mcwd, + progInvokeInput = minput + } = unlines $ + [ "#!/bin/sh" ] + ++ concatMap setEnv envExtra + ++ [ "cd " ++ quote cwd | cwd <- maybeToList mcwd ] + ++ [ (case minput of + Nothing -> "" + Just input -> "echo " ++ quote input ++ " | ") + ++ unwords (map quote $ path : args) ++ " \"$@\""] + + where + setEnv (var, Nothing) = ["unset " ++ var, "export " ++ var] + setEnv (var, Just val) = ["export " ++ var ++ "=" ++ quote val] + + quote :: String -> String + quote s = "'" ++ escape s ++ "'" + + escape [] = [] + escape ('\'':cs) = "'\\''" ++ escape cs + escape (c :cs) = c : escape cs + + +-- | Generate a Windows batch file that invokes a program. +-- +invocationAsBatchFile :: ProgramInvocation -> String +invocationAsBatchFile + ProgramInvocation { + progInvokePath = path, + progInvokeArgs = args, + progInvokeEnv = envExtra, + progInvokeCwd = mcwd, + progInvokeInput = minput + } = unlines $ + [ "@echo off" ] + ++ map setEnv envExtra + ++ [ "cd \"" ++ cwd ++ "\"" | cwd <- maybeToList mcwd ] + ++ case minput of + Nothing -> + [ path ++ concatMap (' ':) args ] + + Just input -> + [ "(" ] + ++ [ "echo " ++ escape line | line <- lines input ] + ++ [ ") | " + ++ "\"" ++ path ++ "\"" + ++ concatMap (\arg -> ' ':quote arg) args ] + + where + setEnv (var, Nothing) = "set " ++ var ++ "=" + setEnv (var, Just val) = "set " ++ var ++ "=" ++ escape val + + quote :: String -> String + quote s = "\"" ++ escapeQ s ++ "\"" + + escapeQ [] = [] + escapeQ ('"':cs) = "\"\"\"" ++ escapeQ cs + escapeQ (c :cs) = c : escapeQ cs + + escape [] = [] + escape ('|':cs) = "^|" ++ escape cs + escape ('<':cs) = "^<" ++ escape cs + escape ('>':cs) = "^>" ++ escape cs + escape ('&':cs) = "^&" ++ escape cs + escape ('(':cs) = "^(" ++ escape cs + escape (')':cs) = "^)" ++ escape cs + escape ('^':cs) = "^^" ++ escape cs + escape (c :cs) = c : escape cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Strip.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Strip.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Strip.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Strip.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,75 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Strip +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module provides an library interface to the @strip@ program. + +module Distribution.Simple.Program.Strip (stripLib, stripExe) + where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Program +import Distribution.Simple.Utils +import Distribution.System +import Distribution.Verbosity +import Distribution.Version + +import System.FilePath (takeBaseName) + +runStrip :: Verbosity -> ProgramDb -> FilePath -> [String] -> IO () +runStrip verbosity progDb path args = + case lookupProgram stripProgram progDb of + Just strip -> runProgram verbosity strip (args ++ [path]) + Nothing -> unless (buildOS == Windows) $ + -- Don't bother warning on windows, we don't expect them to + -- have the strip program anyway. + warn verbosity $ "Unable to strip executable or library '" + ++ (takeBaseName path) + ++ "' (missing the 'strip' program)" + +stripExe :: Verbosity -> Platform -> ProgramDb -> FilePath -> IO () +stripExe verbosity (Platform _arch os) progdb path = + runStrip verbosity progdb path args + where + args = case os of + OSX -> ["-x"] -- By default, stripping the ghc binary on at least + -- some OS X installations causes: + -- HSbase-3.0.o: unknown symbol `_environ'" + -- The -x flag fixes that. + _ -> [] + +stripLib :: Verbosity -> Platform -> ProgramDb -> FilePath -> IO () +stripLib verbosity (Platform arch os) progdb path = do + case os of + OSX -> -- '--strip-unneeded' is not supported on OS X, iOS, AIX, or + -- Solaris. See #1630. + return () + IOS -> return () + AIX -> return () + Solaris -> return () + Windows -> -- Stripping triggers a bug in 'strip.exe' for + -- libraries with lots identically named modules. See + -- #1784. + return() + Linux | arch == I386 -> + -- Versions of 'strip' on 32-bit Linux older than 2.18 are + -- broken. See #2339. + let okVersion = orLaterVersion (mkVersion [2,18]) + in case programVersion =<< lookupProgram stripProgram progdb of + Just v | withinRange v okVersion -> + runStrip verbosity progdb path args + _ -> warn verbosity $ "Unable to strip library '" + ++ (takeBaseName path) + ++ "' (version of 'strip' too old; " + ++ "requires >= 2.18 on 32-bit Linux)" + _ -> runStrip verbosity progdb path args + where + args = ["--strip-unneeded"] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program/Types.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,187 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE DeriveDataTypeable #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program.Types +-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This provides an abstraction which deals with configuring and running +-- programs. A 'Program' is a static notion of a known program. A +-- 'ConfiguredProgram' is a 'Program' that has been found on the current +-- machine and is ready to be run (possibly with some user-supplied default +-- args). Configuring a program involves finding its location and if necessary +-- finding its version. There's reasonable default behavior for trying to find +-- \"foo\" in PATH, being able to override its location, etc. +-- +module Distribution.Simple.Program.Types ( + -- * Program and functions for constructing them + Program(..), + ProgramSearchPath, + ProgramSearchPathEntry(..), + simpleProgram, + + -- * Configured program and related functions + ConfiguredProgram(..), + programPath, + suppressOverrideArgs, + ProgArg, + ProgramLocation(..), + simpleConfiguredProgram, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.PackageDescription +import Distribution.Simple.Program.Find +import Distribution.Version +import Distribution.Verbosity + +import qualified Data.Map as Map + +-- | Represents a program which can be configured. +-- +-- Note: rather than constructing this directly, start with 'simpleProgram' and +-- override any extra fields. +-- +data Program = Program { + -- | The simple name of the program, eg. ghc + programName :: String, + + -- | A function to search for the program if its location was not + -- specified by the user. Usually this will just be a call to + -- 'findProgramOnSearchPath'. + -- + -- It is supplied with the prevailing search path which will typically + -- just be used as-is, but can be extended or ignored as needed. + -- + -- For the purpose of change monitoring, in addition to the location + -- where the program was found, it returns all the other places that + -- were tried. + -- + programFindLocation :: Verbosity -> ProgramSearchPath + -> IO (Maybe (FilePath, [FilePath])), + + -- | Try to find the version of the program. For many programs this is + -- not possible or is not necessary so it's OK to return Nothing. + programFindVersion :: Verbosity -> FilePath -> IO (Maybe Version), + + -- | A function to do any additional configuration after we have + -- located the program (and perhaps identified its version). For example + -- it could add args, or environment vars. + programPostConf :: Verbosity -> ConfiguredProgram -> IO ConfiguredProgram, + -- | A function that filters any arguments that don't impact the output + -- from a commandline. Used to limit the volatility of dependency hashes + -- when using new-build. + programNormaliseArgs :: Maybe Version -> PackageDescription -> [String] -> [String] + } +instance Show Program where + show (Program name _ _ _ _) = "Program: " ++ name + +type ProgArg = String + +-- | Represents a program which has been configured and is thus ready to be run. +-- +-- These are usually made by configuring a 'Program', but if you have to +-- construct one directly then start with 'simpleConfiguredProgram' and +-- override any extra fields. +-- +data ConfiguredProgram = ConfiguredProgram { + -- | Just the name again + programId :: String, + + -- | The version of this program, if it is known. + programVersion :: Maybe Version, + + -- | Default command-line args for this program. + -- These flags will appear first on the command line, so they can be + -- overridden by subsequent flags. + programDefaultArgs :: [String], + + -- | Override command-line args for this program. + -- These flags will appear last on the command line, so they override + -- all earlier flags. + programOverrideArgs :: [String], + + -- | Override environment variables for this program. + -- These env vars will extend\/override the prevailing environment of + -- the current to form the environment for the new process. + programOverrideEnv :: [(String, Maybe String)], + + -- | A key-value map listing various properties of the program, useful + -- for feature detection. Populated during the configuration step, key + -- names depend on the specific program. + programProperties :: Map.Map String String, + + -- | Location of the program. eg. @\/usr\/bin\/ghc-6.4@ + programLocation :: ProgramLocation, + + -- | In addition to the 'programLocation' where the program was found, + -- these are additional locations that were looked at. The combination + -- of ths found location and these not-found locations can be used to + -- monitor to detect when the re-configuring the program might give a + -- different result (e.g. found in a different location). + -- + programMonitorFiles :: [FilePath] + } + deriving (Eq, Generic, Read, Show, Typeable) + +instance Binary ConfiguredProgram + +-- | Where a program was found. Also tells us whether it's specified by user or +-- not. This includes not just the path, but the program as well. +data ProgramLocation + = UserSpecified { locationPath :: FilePath } + -- ^The user gave the path to this program, + -- eg. --ghc-path=\/usr\/bin\/ghc-6.6 + | FoundOnSystem { locationPath :: FilePath } + -- ^The program was found automatically. + deriving (Eq, Generic, Read, Show) + +instance Binary ProgramLocation + +-- | The full path of a configured program. +programPath :: ConfiguredProgram -> FilePath +programPath = locationPath . programLocation + +-- | Suppress any extra arguments added by the user. +suppressOverrideArgs :: ConfiguredProgram -> ConfiguredProgram +suppressOverrideArgs prog = prog { programOverrideArgs = [] } + +-- | Make a simple named program. +-- +-- By default we'll just search for it in the path and not try to find the +-- version name. You can override these behaviours if necessary, eg: +-- +-- > (simpleProgram "foo") { programFindLocation = ... , programFindVersion ... } +-- +simpleProgram :: String -> Program +simpleProgram name = Program { + programName = name, + programFindLocation = \v p -> findProgramOnSearchPath v p name, + programFindVersion = \_ _ -> return Nothing, + programPostConf = \_ p -> return p, + programNormaliseArgs = \_ _ -> id + } + +-- | Make a simple 'ConfiguredProgram'. +-- +-- > simpleConfiguredProgram "foo" (FoundOnSystem path) +-- +simpleConfiguredProgram :: String -> ProgramLocation -> ConfiguredProgram +simpleConfiguredProgram name loc = ConfiguredProgram { + programId = name, + programVersion = Nothing, + programDefaultArgs = [], + programOverrideArgs = [], + programOverrideEnv = [], + programProperties = Map.empty, + programLocation = loc, + programMonitorFiles = [] -- did not look in any other locations + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Program.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Program.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,239 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Program +-- Copyright : Isaac Jones 2006, Duncan Coutts 2007-2009 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This provides an abstraction which deals with configuring and running +-- programs. A 'Program' is a static notion of a known program. A +-- 'ConfiguredProgram' is a 'Program' that has been found on the current +-- machine and is ready to be run (possibly with some user-supplied default +-- args). Configuring a program involves finding its location and if necessary +-- finding its version. There is also a 'ProgramDb' type which holds +-- configured and not-yet configured programs. It is the parameter to lots of +-- actions elsewhere in Cabal that need to look up and run programs. If we had +-- a Cabal monad, the 'ProgramDb' would probably be a reader or +-- state component of it. +-- +-- The module also defines all the known built-in 'Program's and the +-- 'defaultProgramDb' which contains them all. +-- +-- One nice thing about using it is that any program that is +-- registered with Cabal will get some \"configure\" and \".cabal\" +-- helpers like --with-foo-args --foo-path= and extra-foo-args. +-- +-- There's also good default behavior for trying to find \"foo\" in +-- PATH, being able to override its location, etc. +-- +-- There's also a hook for adding programs in a Setup.lhs script. See +-- hookedPrograms in 'Distribution.Simple.UserHooks'. This gives a +-- hook user the ability to get the above flags and such so that they +-- don't have to write all the PATH logic inside Setup.lhs. + +module Distribution.Simple.Program ( + -- * Program and functions for constructing them + Program(..) + , ProgramSearchPath + , ProgramSearchPathEntry(..) + , simpleProgram + , findProgramOnSearchPath + , defaultProgramSearchPath + , findProgramVersion + + -- * Configured program and related functions + , ConfiguredProgram(..) + , programPath + , ProgArg + , ProgramLocation(..) + , runProgram + , getProgramOutput + , suppressOverrideArgs + + -- * Program invocations + , ProgramInvocation(..) + , emptyProgramInvocation + , simpleProgramInvocation + , programInvocation + , runProgramInvocation + , getProgramInvocationOutput + + -- * The collection of unconfigured and configured programs + , builtinPrograms + + -- * The collection of configured programs we can run + , ProgramDb + , defaultProgramDb + , emptyProgramDb + , restoreProgramDb + , addKnownProgram + , addKnownPrograms + , lookupKnownProgram + , knownPrograms + , getProgramSearchPath + , setProgramSearchPath + , userSpecifyPath + , userSpecifyPaths + , userMaybeSpecifyPath + , userSpecifyArgs + , userSpecifyArgss + , userSpecifiedArgs + , lookupProgram + , lookupProgramVersion + , updateProgram + , configureProgram + , configureAllKnownPrograms + , reconfigurePrograms + , requireProgram + , requireProgramVersion + , runDbProgram + , getDbProgramOutput + + -- * Programs that Cabal knows about + , ghcProgram + , ghcPkgProgram + , ghcjsProgram + , ghcjsPkgProgram + , hmakeProgram + , jhcProgram + , uhcProgram + , gccProgram + , arProgram + , stripProgram + , happyProgram + , alexProgram + , hsc2hsProgram + , c2hsProgram + , cpphsProgram + , hscolourProgram + , doctestProgram + , haddockProgram + , greencardProgram + , ldProgram + , tarProgram + , cppProgram + , pkgConfigProgram + , hpcProgram + + -- * deprecated + , ProgramConfiguration + , emptyProgramConfiguration + , defaultProgramConfiguration + , restoreProgramConfiguration + , rawSystemProgram + , rawSystemProgramStdout + , rawSystemProgramConf + , rawSystemProgramStdoutConf + , findProgramOnPath + , findProgramLocation + + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Program.Types +import Distribution.Simple.Program.Run +import Distribution.Simple.Program.Db +import Distribution.Simple.Program.Builtin +import Distribution.Simple.Program.Find +import Distribution.Simple.Utils +import Distribution.Verbosity + +-- | Runs the given configured program. +runProgram :: Verbosity -- ^Verbosity + -> ConfiguredProgram -- ^The program to run + -> [ProgArg] -- ^Any /extra/ arguments to add + -> IO () +runProgram verbosity prog args = + runProgramInvocation verbosity (programInvocation prog args) + + +-- | Runs the given configured program and gets the output. +-- +getProgramOutput :: Verbosity -- ^Verbosity + -> ConfiguredProgram -- ^The program to run + -> [ProgArg] -- ^Any /extra/ arguments to add + -> IO String +getProgramOutput verbosity prog args = + getProgramInvocationOutput verbosity (programInvocation prog args) + + +-- | Looks up the given program in the program database and runs it. +-- +runDbProgram :: Verbosity -- ^verbosity + -> Program -- ^The program to run + -> ProgramDb -- ^look up the program here + -> [ProgArg] -- ^Any /extra/ arguments to add + -> IO () +runDbProgram verbosity prog programDb args = + case lookupProgram prog programDb of + Nothing -> die' verbosity notFound + Just configuredProg -> runProgram verbosity configuredProg args + where + notFound = "The program '" ++ programName prog + ++ "' is required but it could not be found" + +-- | Looks up the given program in the program database and runs it. +-- +getDbProgramOutput :: Verbosity -- ^verbosity + -> Program -- ^The program to run + -> ProgramDb -- ^look up the program here + -> [ProgArg] -- ^Any /extra/ arguments to add + -> IO String +getDbProgramOutput verbosity prog programDb args = + case lookupProgram prog programDb of + Nothing -> die' verbosity notFound + Just configuredProg -> getProgramOutput verbosity configuredProg args + where + notFound = "The program '" ++ programName prog + ++ "' is required but it could not be found" + + +--------------------- +-- Deprecated aliases +-- + +{-# DEPRECATED rawSystemProgram "use runProgram instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +rawSystemProgram :: Verbosity -> ConfiguredProgram + -> [ProgArg] -> IO () +rawSystemProgram = runProgram + +{-# DEPRECATED rawSystemProgramStdout "use getProgramOutput instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +rawSystemProgramStdout :: Verbosity -> ConfiguredProgram + -> [ProgArg] -> IO String +rawSystemProgramStdout = getProgramOutput + +{-# DEPRECATED rawSystemProgramConf "use runDbProgram instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +rawSystemProgramConf :: Verbosity -> Program -> ProgramConfiguration + -> [ProgArg] -> IO () +rawSystemProgramConf = runDbProgram + +{-# DEPRECATED rawSystemProgramStdoutConf "use getDbProgramOutput instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +rawSystemProgramStdoutConf :: Verbosity -> Program -> ProgramConfiguration + -> [ProgArg] -> IO String +rawSystemProgramStdoutConf = getDbProgramOutput + +{-# DEPRECATED ProgramConfiguration "use ProgramDb instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +type ProgramConfiguration = ProgramDb + +{-# DEPRECATED emptyProgramConfiguration "use emptyProgramDb instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +{-# DEPRECATED defaultProgramConfiguration "use defaultProgramDb instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +emptyProgramConfiguration, defaultProgramConfiguration :: ProgramConfiguration +emptyProgramConfiguration = emptyProgramDb +defaultProgramConfiguration = defaultProgramDb + +{-# DEPRECATED restoreProgramConfiguration "use restoreProgramDb instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +restoreProgramConfiguration :: [Program] -> ProgramConfiguration + -> ProgramConfiguration +restoreProgramConfiguration = restoreProgramDb + +{-# DEPRECATED findProgramOnPath "use findProgramOnSearchPath instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +findProgramOnPath :: String -> Verbosity -> IO (Maybe FilePath) +findProgramOnPath name verbosity = + fmap (fmap fst) $ + findProgramOnSearchPath verbosity defaultProgramSearchPath name diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Register.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Register.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Register.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Register.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,593 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Register +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module deals with registering and unregistering packages. There are a +-- couple ways it can do this, one is to do it directly. Another is to generate +-- a script that can be run later to do it. The idea here being that the user +-- is shielded from the details of what command to use for package registration +-- for a particular compiler. In practice this aspect was not especially +-- popular so we also provide a way to simply generate the package registration +-- file which then must be manually passed to @ghc-pkg@. It is possible to +-- generate registration information for where the package is to be installed, +-- or alternatively to register the package in place in the build tree. The +-- latter is occasionally handy, and will become more important when we try to +-- build multi-package systems. +-- +-- This module does not delegate anything to the per-compiler modules but just +-- mixes it all in in this module, which is rather unsatisfactory. The script +-- generation and the unregister feature are not well used or tested. + +module Distribution.Simple.Register ( + register, + unregister, + + internalPackageDBPath, + + initPackageDB, + doesPackageDBExist, + createPackageDB, + deletePackageDB, + + abiHash, + invokeHcPkg, + registerPackage, + HcPkg.RegisterOptions(..), + HcPkg.defaultRegisterOptions, + generateRegistrationInfo, + inplaceInstalledPackageInfo, + absoluteInstalledPackageInfo, + generalInstalledPackageInfo, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.TargetInfo +import Distribution.Types.LocalBuildInfo +import Distribution.Types.ComponentLocalBuildInfo + +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.BuildTarget + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS +import qualified Distribution.Simple.UHC as UHC +import qualified Distribution.Simple.HaskellSuite as HaskellSuite +import qualified Distribution.Simple.PackageIndex as Index + +import Distribution.Backpack.DescribeUnitId +import Distribution.Simple.Compiler +import Distribution.Simple.Program +import Distribution.Simple.Program.Script +import qualified Distribution.Simple.Program.HcPkg as HcPkg +import Distribution.Simple.Setup +import Distribution.PackageDescription +import Distribution.Package +import Distribution.License (licenseToSPDX, licenseFromSPDX) +import qualified Distribution.InstalledPackageInfo as IPI +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import Distribution.Simple.Utils +import Distribution.Utils.MapAccum +import Distribution.System +import Distribution.Text +import Distribution.Types.ComponentName +import Distribution.Verbosity as Verbosity +import Distribution.Version +import Distribution.Compat.Graph (IsNode(nodeKey)) + +import System.FilePath ((), (<.>), isAbsolute) +import System.Directory + +import Data.List (partition) +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 + +-- ----------------------------------------------------------------------------- +-- Registration + +register :: PackageDescription -> LocalBuildInfo + -> RegisterFlags -- ^Install in the user's database?; verbose + -> IO () +register pkg_descr lbi0 flags = + -- Duncan originally asked for us to not register/install files + -- when there was no public library. But with per-component + -- configure, we legitimately need to install internal libraries + -- so that we can get them. So just unconditionally install. + doRegister + where + doRegister = do + targets <- readTargetInfos verbosity pkg_descr lbi0 (regArgs flags) + + -- It's important to register in build order, because ghc-pkg + -- will complain if a dependency is not registered. + let componentsToRegister + = neededTargetsInBuildOrder' pkg_descr lbi0 (map nodeKey targets) + + (_, ipi_mbs) <- + mapAccumM `flip` installedPkgs lbi0 `flip` componentsToRegister $ \index tgt -> + case targetComponent tgt of + CLib lib -> do + let clbi = targetCLBI tgt + lbi = lbi0 { installedPkgs = index } + ipi <- generateOne pkg_descr lib lbi clbi flags + return (Index.insert ipi index, Just ipi) + _ -> return (index, Nothing) + + registerAll pkg_descr lbi0 flags (catMaybes ipi_mbs) + where + verbosity = fromFlag (regVerbosity flags) + +generateOne :: PackageDescription -> Library -> LocalBuildInfo -> ComponentLocalBuildInfo + -> RegisterFlags + -> IO InstalledPackageInfo +generateOne pkg lib lbi clbi regFlags + = do + absPackageDBs <- absolutePackageDBPaths packageDbs + installedPkgInfo <- generateRegistrationInfo + verbosity pkg lib lbi clbi inplace reloc distPref + (registrationPackageDB absPackageDBs) + info verbosity (IPI.showInstalledPackageInfo installedPkgInfo) + return installedPkgInfo + where + inplace = fromFlag (regInPlace regFlags) + reloc = relocatable lbi + -- FIXME: there's really no guarantee this will work. + -- registering into a totally different db stack can + -- fail if dependencies cannot be satisfied. + packageDbs = nub $ withPackageDB lbi + ++ maybeToList (flagToMaybe (regPackageDB regFlags)) + distPref = fromFlag (regDistPref regFlags) + verbosity = fromFlag (regVerbosity regFlags) + +registerAll :: PackageDescription -> LocalBuildInfo -> RegisterFlags + -> [InstalledPackageInfo] + -> IO () +registerAll pkg lbi regFlags ipis + = do + when (fromFlag (regPrintId regFlags)) $ do + for_ ipis $ \installedPkgInfo -> + -- Only print the public library's IPI + when (packageId installedPkgInfo == packageId pkg + && IPI.sourceLibName installedPkgInfo == Nothing) $ + putStrLn (display (IPI.installedUnitId installedPkgInfo)) + + -- Three different modes: + case () of + _ | modeGenerateRegFile -> writeRegistrationFileOrDirectory + | modeGenerateRegScript -> writeRegisterScript + | otherwise -> do + for_ ipis $ \ipi -> do + setupMessage' verbosity "Registering" (packageId pkg) + (libraryComponentName (IPI.sourceLibName ipi)) + (Just (IPI.instantiatedWith ipi)) + registerPackage verbosity (compiler lbi) (withPrograms lbi) + packageDbs ipi HcPkg.defaultRegisterOptions + + where + modeGenerateRegFile = isJust (flagToMaybe (regGenPkgConf regFlags)) + regFile = fromMaybe (display (packageId pkg) <.> "conf") + (fromFlag (regGenPkgConf regFlags)) + + modeGenerateRegScript = fromFlag (regGenScript regFlags) + + -- FIXME: there's really no guarantee this will work. + -- registering into a totally different db stack can + -- fail if dependencies cannot be satisfied. + packageDbs = nub $ withPackageDB lbi + ++ maybeToList (flagToMaybe (regPackageDB regFlags)) + verbosity = fromFlag (regVerbosity regFlags) + + writeRegistrationFileOrDirectory = do + -- Handles overwriting both directory and file + deletePackageDB regFile + case ipis of + [installedPkgInfo] -> do + info verbosity ("Creating package registration file: " ++ regFile) + writeUTF8File regFile (IPI.showInstalledPackageInfo installedPkgInfo) + _ -> do + info verbosity ("Creating package registration directory: " ++ regFile) + createDirectory regFile + let num_ipis = length ipis + lpad m xs = replicate (m - length ys) '0' ++ ys + where ys = take m xs + number i = lpad (length (show num_ipis)) (show i) + for_ (zip ([1..] :: [Int]) ipis) $ \(i, installedPkgInfo) -> + writeUTF8File (regFile (number i ++ "-" ++ display (IPI.installedUnitId installedPkgInfo))) + (IPI.showInstalledPackageInfo installedPkgInfo) + + writeRegisterScript = + case compilerFlavor (compiler lbi) of + UHC -> notice verbosity "Registration scripts not needed for uhc" + _ -> withHcPkg verbosity + "Registration scripts are not implemented for this compiler" + (compiler lbi) (withPrograms lbi) + (writeHcPkgRegisterScript verbosity ipis packageDbs) + + +generateRegistrationInfo :: Verbosity + -> PackageDescription + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> Bool + -> Bool + -> FilePath + -> PackageDB + -> IO InstalledPackageInfo +generateRegistrationInfo verbosity pkg lib lbi clbi inplace reloc distPref packageDb = do + --TODO: eliminate pwd! + pwd <- getCurrentDirectory + + installedPkgInfo <- + if inplace + -- NB: With an inplace installation, the user may run './Setup + -- build' to update the library files, without reregistering. + -- In this case, it is critical that the ABI hash not flip. + then return (inplaceInstalledPackageInfo pwd distPref + pkg (mkAbiHash "inplace") lib lbi clbi) + else do + abi_hash <- abiHash verbosity pkg distPref lbi lib clbi + if reloc + then relocRegistrationInfo verbosity + pkg lib lbi clbi abi_hash packageDb + else return (absoluteInstalledPackageInfo + pkg abi_hash lib lbi clbi) + + + return installedPkgInfo + +-- | Compute the 'AbiHash' of a library that we built inplace. +abiHash :: Verbosity + -> PackageDescription + -> FilePath + -> LocalBuildInfo + -> Library + -> ComponentLocalBuildInfo + -> IO AbiHash +abiHash verbosity pkg distPref lbi lib clbi = + case compilerFlavor comp of + GHC -> do + fmap mkAbiHash $ GHC.libAbiHash verbosity pkg lbi' lib clbi + GHCJS -> do + fmap mkAbiHash $ GHCJS.libAbiHash verbosity pkg lbi' lib clbi + _ -> return (mkAbiHash "") + where + comp = compiler lbi + lbi' = lbi { + withPackageDB = withPackageDB lbi + ++ [SpecificPackageDB (internalPackageDBPath lbi distPref)] + } + +relocRegistrationInfo :: Verbosity + -> PackageDescription + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> AbiHash + -> PackageDB + -> IO InstalledPackageInfo +relocRegistrationInfo verbosity pkg lib lbi clbi abi_hash packageDb = + case (compilerFlavor (compiler lbi)) of + GHC -> do fs <- GHC.pkgRoot verbosity lbi packageDb + return (relocatableInstalledPackageInfo + pkg abi_hash lib lbi clbi fs) + _ -> die' verbosity + "Distribution.Simple.Register.relocRegistrationInfo: \ + \not implemented for this compiler" + +initPackageDB :: Verbosity -> Compiler -> ProgramDb -> FilePath -> IO () +initPackageDB verbosity comp progdb dbPath = + createPackageDB verbosity comp progdb False dbPath + +-- | Create an empty package DB at the specified location. +createPackageDB :: Verbosity -> Compiler -> ProgramDb -> Bool + -> FilePath -> IO () +createPackageDB verbosity comp progdb preferCompat dbPath = + case compilerFlavor comp of + GHC -> HcPkg.init (GHC.hcPkgInfo progdb) verbosity preferCompat dbPath + GHCJS -> HcPkg.init (GHCJS.hcPkgInfo progdb) verbosity False dbPath + UHC -> return () + HaskellSuite _ -> HaskellSuite.initPackageDB verbosity progdb dbPath + _ -> die' verbosity $ + "Distribution.Simple.Register.createPackageDB: " + ++ "not implemented for this compiler" + +doesPackageDBExist :: FilePath -> NoCallStackIO Bool +doesPackageDBExist dbPath = do + -- currently one impl for all compiler flavours, but could change if needed + dir_exists <- doesDirectoryExist dbPath + if dir_exists + then return True + else doesFileExist dbPath + +deletePackageDB :: FilePath -> NoCallStackIO () +deletePackageDB dbPath = do + -- currently one impl for all compiler flavours, but could change if needed + dir_exists <- doesDirectoryExist dbPath + if dir_exists + then removeDirectoryRecursive dbPath + else do file_exists <- doesFileExist dbPath + when file_exists $ removeFile dbPath + +-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the +-- provided command-line arguments to it. +invokeHcPkg :: Verbosity -> Compiler -> ProgramDb -> PackageDBStack + -> [String] -> IO () +invokeHcPkg verbosity comp progdb dbStack extraArgs = + withHcPkg verbosity "invokeHcPkg" comp progdb + (\hpi -> HcPkg.invoke hpi verbosity dbStack extraArgs) + +withHcPkg :: Verbosity -> String -> Compiler -> ProgramDb + -> (HcPkg.HcPkgInfo -> IO a) -> IO a +withHcPkg verbosity name comp progdb f = + case compilerFlavor comp of + GHC -> f (GHC.hcPkgInfo progdb) + GHCJS -> f (GHCJS.hcPkgInfo progdb) + _ -> die' verbosity ("Distribution.Simple.Register." ++ name ++ ":\ + \not implemented for this compiler") + +registerPackage :: Verbosity + -> Compiler + -> ProgramDb + -> PackageDBStack + -> InstalledPackageInfo + -> HcPkg.RegisterOptions + -> IO () +registerPackage verbosity comp progdb packageDbs installedPkgInfo registerOptions = + case compilerFlavor comp of + GHC -> GHC.registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions + GHCJS -> GHCJS.registerPackage verbosity progdb packageDbs installedPkgInfo registerOptions + HaskellSuite {} -> + HaskellSuite.registerPackage verbosity progdb packageDbs installedPkgInfo + _ | HcPkg.registerMultiInstance registerOptions + -> die' verbosity "Registering multiple package instances is not yet supported for this compiler" + UHC -> UHC.registerPackage verbosity comp progdb packageDbs installedPkgInfo + _ -> die' verbosity "Registering is not implemented for this compiler" + +writeHcPkgRegisterScript :: Verbosity + -> [InstalledPackageInfo] + -> PackageDBStack + -> HcPkg.HcPkgInfo + -> IO () +writeHcPkgRegisterScript verbosity ipis packageDbs hpi = do + let genScript installedPkgInfo = + let invocation = HcPkg.registerInvocation hpi Verbosity.normal + packageDbs installedPkgInfo + HcPkg.defaultRegisterOptions + in invocationAsSystemScript buildOS invocation + scripts = map genScript ipis + -- TODO: Do something more robust here + regScript = unlines scripts + + info verbosity ("Creating package registration script: " ++ regScriptFileName) + writeUTF8File regScriptFileName regScript + setFileExecutable regScriptFileName + +regScriptFileName :: FilePath +regScriptFileName = case buildOS of + Windows -> "register.bat" + _ -> "register.sh" + + +-- ----------------------------------------------------------------------------- +-- Making the InstalledPackageInfo + +-- | Construct 'InstalledPackageInfo' for a library in a package, given a set +-- of installation directories. +-- +generalInstalledPackageInfo + :: ([FilePath] -> [FilePath]) -- ^ Translate relative include dir paths to + -- absolute paths. + -> PackageDescription + -> AbiHash + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> InstallDirs FilePath + -> InstalledPackageInfo +generalInstalledPackageInfo adjustRelIncDirs pkg abi_hash lib lbi clbi installDirs = + IPI.InstalledPackageInfo { + IPI.sourcePackageId = packageId pkg, + IPI.installedUnitId = componentUnitId clbi, + IPI.installedComponentId_ = componentComponentId clbi, + IPI.instantiatedWith = componentInstantiatedWith clbi, + IPI.sourceLibName = libName lib, + IPI.compatPackageKey = componentCompatPackageKey clbi, + -- If GHC >= 8.4 we register with SDPX, otherwise with legacy license + IPI.license = + if ghc84 + then Left $ either id licenseToSPDX $ licenseRaw pkg + else Right $ either licenseFromSPDX id $ licenseRaw pkg, + IPI.copyright = copyright pkg, + IPI.maintainer = maintainer pkg, + IPI.author = author pkg, + IPI.stability = stability pkg, + IPI.homepage = homepage pkg, + IPI.pkgUrl = pkgUrl pkg, + IPI.synopsis = synopsis pkg, + IPI.description = description pkg, + IPI.category = category pkg, + IPI.abiHash = abi_hash, + IPI.indefinite = componentIsIndefinite clbi, + IPI.exposed = libExposed lib, + IPI.exposedModules = componentExposedModules clbi + -- add virtual modules into the list of exposed modules for the + -- package database as well. + ++ map (\name -> IPI.ExposedModule name Nothing) (virtualModules bi), + IPI.hiddenModules = otherModules bi, + IPI.trusted = IPI.trusted IPI.emptyInstalledPackageInfo, + IPI.importDirs = [ libdir installDirs | hasModules ], + IPI.libraryDirs = libdirs, + IPI.libraryDynDirs = dynlibdirs, + IPI.dataDir = datadir installDirs, + IPI.hsLibraries = (if hasLibrary + then [getHSLibraryName (componentUnitId clbi)] + else []) ++ extraBundledLibs bi, + IPI.extraLibraries = extraLibs bi, + IPI.extraGHCiLibraries = extraGHCiLibs bi, + IPI.includeDirs = absinc ++ adjustRelIncDirs relinc, + IPI.includes = includes bi, + IPI.depends = depends, + IPI.abiDepends = [], -- due to #5465 + IPI.ccOptions = [], -- Note. NOT ccOptions bi! + -- We don't want cc-options to be propagated + -- to C compilations in other packages. + IPI.cxxOptions = [], -- Also. NOT cxxOptions bi! + IPI.ldOptions = ldOptions bi, + IPI.frameworks = frameworks bi, + IPI.frameworkDirs = extraFrameworkDirs bi, + IPI.haddockInterfaces = [haddockdir installDirs haddockName pkg], + IPI.haddockHTMLs = [htmldir installDirs], + IPI.pkgRoot = Nothing + } + where + ghc84 = case compilerId $ compiler lbi of + CompilerId GHC v -> v >= mkVersion [8, 4] + _ -> False + + bi = libBuildInfo lib + --TODO: unclear what the root cause of the + -- duplication is, but we nub it here for now: + depends = ordNub $ map fst (componentPackageDeps clbi) + (absinc, relinc) = partition isAbsolute (includeDirs bi) + hasModules = not $ null (allLibModules lib clbi) + comp = compiler lbi + hasLibrary = (hasModules || not (null (cSources bi)) + || not (null (asmSources bi)) + || not (null (cmmSources bi)) + || not (null (cxxSources bi)) + || (not (null (jsSources bi)) && + compilerFlavor comp == GHCJS)) + && not (componentIsIndefinite clbi) + (libdirs, dynlibdirs) + | not hasLibrary + = (extraLibDirs bi, []) + -- the dynamic-library-dirs defaults to the library-dirs if not specified, + -- so this works whether the dynamic-library-dirs field is supported or not + + | libraryDynDirSupported comp + = (libdir installDirs : extraLibDirs bi, + dynlibdir installDirs : extraLibDirs bi) + + | otherwise + = (libdir installDirs : dynlibdir installDirs : extraLibDirs bi, []) + -- the compiler doesn't understand the dynamic-library-dirs field so we + -- add the dyn directory to the "normal" list in the library-dirs field + +-- | Construct 'InstalledPackageInfo' for a library that is in place in the +-- build tree. +-- +-- This function knows about the layout of in place packages. +-- +inplaceInstalledPackageInfo :: FilePath -- ^ top of the build tree + -> FilePath -- ^ location of the dist tree + -> PackageDescription + -> AbiHash + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> InstalledPackageInfo +inplaceInstalledPackageInfo inplaceDir distPref pkg abi_hash lib lbi clbi = + generalInstalledPackageInfo adjustRelativeIncludeDirs + pkg abi_hash lib lbi clbi installDirs + where + adjustRelativeIncludeDirs = map (inplaceDir ) + libTargetDir = componentBuildDir lbi clbi + installDirs = + (absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest) { + libdir = inplaceDir libTargetDir, + dynlibdir = inplaceDir libTargetDir, + datadir = inplaceDir dataDir pkg, + docdir = inplaceDocdir, + htmldir = inplaceHtmldir, + haddockdir = inplaceHtmldir + } + inplaceDocdir = inplaceDir distPref "doc" + inplaceHtmldir = inplaceDocdir "html" display (packageName pkg) + + +-- | Construct 'InstalledPackageInfo' for the final install location of a +-- library package. +-- +-- This function knows about the layout of installed packages. +-- +absoluteInstalledPackageInfo :: PackageDescription + -> AbiHash + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> InstalledPackageInfo +absoluteInstalledPackageInfo pkg abi_hash lib lbi clbi = + generalInstalledPackageInfo adjustReativeIncludeDirs + pkg abi_hash lib lbi clbi installDirs + where + -- For installed packages we install all include files into one dir, + -- whereas in the build tree they may live in multiple local dirs. + adjustReativeIncludeDirs _ + | null (installIncludes bi) = [] + | otherwise = [includedir installDirs] + bi = libBuildInfo lib + installDirs = absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest + + +relocatableInstalledPackageInfo :: PackageDescription + -> AbiHash + -> Library + -> LocalBuildInfo + -> ComponentLocalBuildInfo + -> FilePath + -> InstalledPackageInfo +relocatableInstalledPackageInfo pkg abi_hash lib lbi clbi pkgroot = + generalInstalledPackageInfo adjustReativeIncludeDirs + pkg abi_hash lib lbi clbi installDirs + where + -- For installed packages we install all include files into one dir, + -- whereas in the build tree they may live in multiple local dirs. + adjustReativeIncludeDirs _ + | null (installIncludes bi) = [] + | otherwise = [includedir installDirs] + bi = libBuildInfo lib + + installDirs = fmap (("${pkgroot}" ) . shortRelativePath pkgroot) + $ absoluteComponentInstallDirs pkg lbi (componentUnitId clbi) NoCopyDest + +-- ----------------------------------------------------------------------------- +-- Unregistration + +unregister :: PackageDescription -> LocalBuildInfo -> RegisterFlags -> IO () +unregister pkg lbi regFlags = do + let pkgid = packageId pkg + genScript = fromFlag (regGenScript regFlags) + verbosity = fromFlag (regVerbosity regFlags) + packageDb = fromFlagOrDefault (registrationPackageDB (withPackageDB lbi)) + (regPackageDB regFlags) + unreg hpi = + let invocation = HcPkg.unregisterInvocation + hpi Verbosity.normal packageDb pkgid + in if genScript + then writeFileAtomic unregScriptFileName + (BS.Char8.pack $ invocationAsSystemScript buildOS invocation) + else runProgramInvocation verbosity invocation + setupMessage verbosity "Unregistering" pkgid + withHcPkg verbosity "unregistering is only implemented for GHC and GHCJS" + (compiler lbi) (withPrograms lbi) unreg + +unregScriptFileName :: FilePath +unregScriptFileName = case buildOS of + Windows -> "unregister.bat" + _ -> "unregister.sh" + +internalPackageDBPath :: LocalBuildInfo -> FilePath -> FilePath +internalPackageDBPath lbi distPref = + case compilerFlavor (compiler lbi) of + UHC -> UHC.inplacePackageDbPath lbi + _ -> distPref "package.conf.inplace" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Setup.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,2316 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Setup +-- Copyright : Isaac Jones 2003-2004 +-- Duncan Coutts 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is a big module, but not very complicated. The code is very regular +-- and repetitive. It defines the command line interface for all the Cabal +-- commands. For each command (like @configure@, @build@ etc) it defines a type +-- that holds all the flags, the default set of flags and a 'CommandUI' that +-- maps command line flags to and from the corresponding flags type. +-- +-- All the flags types are instances of 'Monoid', see +-- +-- for an explanation. +-- +-- The types defined here get used in the front end and especially in +-- @cabal-install@ which has to do quite a bit of manipulating sets of command +-- line flags. +-- +-- This is actually relatively nice, it works quite well. The main change it +-- needs is to unify it with the code for managing sets of fields that can be +-- read and written from files. This would allow us to save configure flags in +-- config files. + +module Distribution.Simple.Setup ( + + GlobalFlags(..), emptyGlobalFlags, defaultGlobalFlags, globalCommand, + ConfigFlags(..), emptyConfigFlags, defaultConfigFlags, configureCommand, + configPrograms, + configAbsolutePaths, readPackageDbList, showPackageDbList, + CopyFlags(..), emptyCopyFlags, defaultCopyFlags, copyCommand, + InstallFlags(..), emptyInstallFlags, defaultInstallFlags, installCommand, + DoctestFlags(..), emptyDoctestFlags, defaultDoctestFlags, doctestCommand, + HaddockTarget(..), + HaddockFlags(..), emptyHaddockFlags, defaultHaddockFlags, haddockCommand, + HscolourFlags(..), emptyHscolourFlags, defaultHscolourFlags, hscolourCommand, + BuildFlags(..), emptyBuildFlags, defaultBuildFlags, buildCommand, + buildVerbose, + ReplFlags(..), defaultReplFlags, replCommand, + CleanFlags(..), emptyCleanFlags, defaultCleanFlags, cleanCommand, + RegisterFlags(..), emptyRegisterFlags, defaultRegisterFlags, registerCommand, + unregisterCommand, + SDistFlags(..), emptySDistFlags, defaultSDistFlags, sdistCommand, + TestFlags(..), emptyTestFlags, defaultTestFlags, testCommand, + TestShowDetails(..), + BenchmarkFlags(..), emptyBenchmarkFlags, + defaultBenchmarkFlags, benchmarkCommand, + CopyDest(..), + configureArgs, configureOptions, configureCCompiler, configureLinker, + buildOptions, haddockOptions, installDirsOptions, + programDbOptions, programDbPaths', + programConfigurationOptions, programConfigurationPaths', + programFlagsDescription, + replOptions, + splitArgs, + + defaultDistPref, optionDistPref, + + Flag(..), + toFlag, + fromFlag, + fromFlagOrDefault, + flagToMaybe, + flagToList, + maybeToFlag, + BooleanFlag(..), + boolOpt, boolOpt', trueArg, falseArg, + optionVerbosity, optionNumJobs, readPToMaybe ) where + +import Prelude () +import Distribution.Compat.Prelude hiding (get) + +import Distribution.Compiler +import Distribution.ReadE +import Distribution.Text +import Distribution.Parsec.Class +import Distribution.Pretty +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import Distribution.ParseUtils (readPToMaybe) +import qualified Text.PrettyPrint as Disp +import Distribution.ModuleName +import Distribution.PackageDescription hiding (Flag) +import Distribution.Simple.Command hiding (boolOpt, boolOpt') +import qualified Distribution.Simple.Command as Command +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.Flag +import Distribution.Simple.Utils +import Distribution.Simple.Program +import Distribution.Simple.InstallDirs +import Distribution.Verbosity +import Distribution.Utils.NubList +import Distribution.Types.Dependency +import Distribution.Types.ComponentId +import Distribution.Types.Module +import Distribution.Types.PackageName + +import Distribution.Compat.Stack +import Distribution.Compat.Semigroup (Last' (..)) + +import Data.Function (on) + +-- FIXME Not sure where this should live +defaultDistPref :: FilePath +defaultDistPref = "dist" + +-- ------------------------------------------------------------ +-- * Global flags +-- ------------------------------------------------------------ + +-- In fact since individual flags types are monoids and these are just sets of +-- flags then they are also monoids pointwise. This turns out to be really +-- useful. The mempty is the set of empty flags and mappend allows us to +-- override specific flags. For example we can start with default flags and +-- override with the ones we get from a file or the command line, or both. + +-- | Flags that apply at the top level, not to any sub-command. +data GlobalFlags = GlobalFlags { + globalVersion :: Flag Bool, + globalNumericVersion :: Flag Bool + } deriving (Generic) + +defaultGlobalFlags :: GlobalFlags +defaultGlobalFlags = GlobalFlags { + globalVersion = Flag False, + globalNumericVersion = Flag False + } + +globalCommand :: [Command action] -> CommandUI GlobalFlags +globalCommand commands = CommandUI + { commandName = "" + , commandSynopsis = "" + , commandUsage = \pname -> + "This Setup program uses the Haskell Cabal Infrastructure.\n" + ++ "See http://www.haskell.org/cabal/ for more information.\n" + ++ "\n" + ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n" + , commandDescription = Just $ \pname -> + let + commands' = commands ++ [commandAddAction helpCommandUI undefined] + cmdDescs = getNormalCommandDescriptions commands' + maxlen = maximum $ [length name | (name, _) <- cmdDescs] + align str = str ++ replicate (maxlen - length str) ' ' + in + "Commands:\n" + ++ unlines [ " " ++ align name ++ " " ++ descr + | (name, descr) <- cmdDescs ] + ++ "\n" + ++ "For more information about a command use\n" + ++ " " ++ pname ++ " COMMAND --help\n\n" + ++ "Typical steps for installing Cabal packages:\n" + ++ concat [ " " ++ pname ++ " " ++ x ++ "\n" + | x <- ["configure", "build", "install"]] + , commandNotes = Nothing + , commandDefaultFlags = defaultGlobalFlags + , commandOptions = \_ -> + [option ['V'] ["version"] + "Print version information" + globalVersion (\v flags -> flags { globalVersion = v }) + trueArg + ,option [] ["numeric-version"] + "Print just the version number" + globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) + trueArg + ] + } + +emptyGlobalFlags :: GlobalFlags +emptyGlobalFlags = mempty + +instance Monoid GlobalFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup GlobalFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Config flags +-- ------------------------------------------------------------ + +-- | Flags to @configure@ command. +-- +-- IMPORTANT: every time a new flag is added, 'D.C.Setup.filterConfigureFlags' +-- should be updated. +-- IMPORTANT: every time a new flag is added, it should be added to the Eq instance +data ConfigFlags = ConfigFlags { + -- This is the same hack as in 'buildArgs' and 'copyArgs'. + -- TODO: Stop using this eventually when 'UserHooks' gets changed + configArgs :: [String], + + --FIXME: the configPrograms is only here to pass info through to configure + -- because the type of configure is constrained by the UserHooks. + -- when we change UserHooks next we should pass the initial + -- ProgramDb directly and not via ConfigFlags + configPrograms_ :: Last' ProgramDb, -- ^All programs that + -- @cabal@ may run + + configProgramPaths :: [(String, FilePath)], -- ^user specified programs paths + configProgramArgs :: [(String, [String])], -- ^user specified programs args + configProgramPathExtra :: NubList FilePath, -- ^Extend the $PATH + configHcFlavor :: Flag CompilerFlavor, -- ^The \"flavor\" of the + -- compiler, e.g. GHC. + configHcPath :: Flag FilePath, -- ^given compiler location + configHcPkg :: Flag FilePath, -- ^given hc-pkg location + configVanillaLib :: Flag Bool, -- ^Enable vanilla library + configProfLib :: Flag Bool, -- ^Enable profiling in the library + configSharedLib :: Flag Bool, -- ^Build shared library + configStaticLib :: Flag Bool, -- ^Build static library + configDynExe :: Flag Bool, -- ^Enable dynamic linking of the + -- executables. + configProfExe :: Flag Bool, -- ^Enable profiling in the + -- executables. + configProf :: Flag Bool, -- ^Enable profiling in the library + -- and executables. + configProfDetail :: Flag ProfDetailLevel, -- ^Profiling detail level + -- in the library and executables. + configProfLibDetail :: Flag ProfDetailLevel, -- ^Profiling detail level + -- in the library + configConfigureArgs :: [String], -- ^Extra arguments to @configure@ + configOptimization :: Flag OptimisationLevel, -- ^Enable optimization. + configProgPrefix :: Flag PathTemplate, -- ^Installed executable prefix. + configProgSuffix :: Flag PathTemplate, -- ^Installed executable suffix. + configInstallDirs :: InstallDirs (Flag PathTemplate), -- ^Installation + -- paths + configScratchDir :: Flag FilePath, + configExtraLibDirs :: [FilePath], -- ^ path to search for extra libraries + configExtraFrameworkDirs :: [FilePath], -- ^ path to search for extra + -- frameworks (OS X only) + configExtraIncludeDirs :: [FilePath], -- ^ path to search for header files + configIPID :: Flag String, -- ^ explicit IPID to be used + configCID :: Flag ComponentId, -- ^ explicit CID to be used + configDeterministic :: Flag Bool, -- ^ be as deterministic as possible + -- (e.g., invariant over GHC, database, + -- etc). Used by the test suite + + configDistPref :: Flag FilePath, -- ^"dist" prefix + configCabalFilePath :: Flag FilePath, -- ^ Cabal file to use + configVerbosity :: Flag Verbosity, -- ^verbosity level + configUserInstall :: Flag Bool, -- ^The --user\/--global flag + configPackageDBs :: [Maybe PackageDB], -- ^Which package DBs to use + configGHCiLib :: Flag Bool, -- ^Enable compiling library for GHCi + configSplitSections :: Flag Bool, -- ^Enable -split-sections with GHC + configSplitObjs :: Flag Bool, -- ^Enable -split-objs with GHC + configStripExes :: Flag Bool, -- ^Enable executable stripping + configStripLibs :: Flag Bool, -- ^Enable library stripping + configConstraints :: [Dependency], -- ^Additional constraints for + -- dependencies. + configDependencies :: [(PackageName, ComponentId)], + -- ^The packages depended on. + configInstantiateWith :: [(ModuleName, Module)], + -- ^ The requested Backpack instantiation. If empty, either this + -- package does not use Backpack, or we just want to typecheck + -- the indefinite package. + configConfigurationsFlags :: FlagAssignment, + configTests :: Flag Bool, -- ^Enable test suite compilation + configBenchmarks :: Flag Bool, -- ^Enable benchmark compilation + configCoverage :: Flag Bool, -- ^Enable program coverage + configLibCoverage :: Flag Bool, -- ^Enable program coverage (deprecated) + configExactConfiguration :: Flag Bool, + -- ^All direct dependencies and flags are provided on the command line by + -- the user via the '--dependency' and '--flags' options. + configFlagError :: Flag String, + -- ^Halt and show an error message indicating an error in flag assignment + configRelocatable :: Flag Bool, -- ^ Enable relocatable package built + configDebugInfo :: Flag DebugInfoLevel, -- ^ Emit debug info. + configUseResponseFiles :: Flag Bool + -- ^ Whether to use response files at all. They're used for such tools + -- as haddock, or or ld. + } + deriving (Generic, Read, Show) + +instance Binary ConfigFlags + +-- | More convenient version of 'configPrograms'. Results in an +-- 'error' if internal invariant is violated. +configPrograms :: WithCallStack (ConfigFlags -> ProgramDb) +configPrograms = maybe (error "FIXME: remove configPrograms") id . getLast' . configPrograms_ + +instance Eq ConfigFlags where + (==) a b = + -- configPrograms skipped: not user specified, has no Eq instance + equal configProgramPaths + && equal configProgramArgs + && equal configProgramPathExtra + && equal configHcFlavor + && equal configHcPath + && equal configHcPkg + && equal configVanillaLib + && equal configProfLib + && equal configSharedLib + && equal configStaticLib + && equal configDynExe + && equal configProfExe + && equal configProf + && equal configProfDetail + && equal configProfLibDetail + && equal configConfigureArgs + && equal configOptimization + && equal configProgPrefix + && equal configProgSuffix + && equal configInstallDirs + && equal configScratchDir + && equal configExtraLibDirs + && equal configExtraIncludeDirs + && equal configIPID + && equal configDeterministic + && equal configDistPref + && equal configVerbosity + && equal configUserInstall + && equal configPackageDBs + && equal configGHCiLib + && equal configSplitSections + && equal configSplitObjs + && equal configStripExes + && equal configStripLibs + && equal configConstraints + && equal configDependencies + && equal configConfigurationsFlags + && equal configTests + && equal configBenchmarks + && equal configCoverage + && equal configLibCoverage + && equal configExactConfiguration + && equal configFlagError + && equal configRelocatable + && equal configDebugInfo + && equal configUseResponseFiles + where + equal f = on (==) f a b + +configAbsolutePaths :: ConfigFlags -> NoCallStackIO ConfigFlags +configAbsolutePaths f = + (\v -> f { configPackageDBs = v }) + `liftM` traverse (maybe (return Nothing) (liftM Just . absolutePackageDBPath)) + (configPackageDBs f) + +defaultConfigFlags :: ProgramDb -> ConfigFlags +defaultConfigFlags progDb = emptyConfigFlags { + configArgs = [], + configPrograms_ = pure progDb, + configHcFlavor = maybe NoFlag Flag defaultCompilerFlavor, + configVanillaLib = Flag True, + configProfLib = NoFlag, + configSharedLib = NoFlag, + configStaticLib = NoFlag, + configDynExe = Flag False, + configProfExe = NoFlag, + configProf = NoFlag, + configProfDetail = NoFlag, + configProfLibDetail= NoFlag, + configOptimization = Flag NormalOptimisation, + configProgPrefix = Flag (toPathTemplate ""), + configProgSuffix = Flag (toPathTemplate ""), + configDistPref = NoFlag, + configCabalFilePath = NoFlag, + configVerbosity = Flag normal, + configUserInstall = Flag False, --TODO: reverse this +#if defined(mingw32_HOST_OS) + -- See #1589. + configGHCiLib = Flag True, +#else + configGHCiLib = NoFlag, +#endif + configSplitSections = Flag False, + configSplitObjs = Flag False, -- takes longer, so turn off by default + configStripExes = Flag True, + configStripLibs = Flag True, + configTests = Flag False, + configBenchmarks = Flag False, + configCoverage = Flag False, + configLibCoverage = NoFlag, + configExactConfiguration = Flag False, + configFlagError = NoFlag, + configRelocatable = Flag False, + configDebugInfo = Flag NoDebugInfo, + configUseResponseFiles = NoFlag + } + +configureCommand :: ProgramDb -> CommandUI ConfigFlags +configureCommand progDb = CommandUI + { commandName = "configure" + , commandSynopsis = "Prepare to build the package." + , commandDescription = Just $ \_ -> wrapText $ + "Configure how the package is built by setting " + ++ "package (and other) flags.\n" + ++ "\n" + ++ "The configuration affects several other commands, " + ++ "including build, test, bench, run, repl.\n" + , commandNotes = Just $ \_pname -> programFlagsDescription progDb + , commandUsage = \pname -> + "Usage: " ++ pname ++ " configure [FLAGS]\n" + , commandDefaultFlags = defaultConfigFlags progDb + , commandOptions = \showOrParseArgs -> + configureOptions showOrParseArgs + ++ programDbPaths progDb showOrParseArgs + configProgramPaths (\v fs -> fs { configProgramPaths = v }) + ++ programDbOption progDb showOrParseArgs + configProgramArgs (\v fs -> fs { configProgramArgs = v }) + ++ programDbOptions progDb showOrParseArgs + configProgramArgs (\v fs -> fs { configProgramArgs = v }) + } + +-- | Inverse to 'dispModSubstEntry'. +parsecModSubstEntry :: ParsecParser (ModuleName, Module) +parsecModSubstEntry = do + k <- parsec + _ <- P.char '=' + v <- parsec + return (k, v) + +-- | Pretty-print a single entry of a module substitution. +dispModSubstEntry :: (ModuleName, Module) -> Disp.Doc +dispModSubstEntry (k, v) = disp k <<>> Disp.char '=' <<>> disp v + +configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] +configureOptions showOrParseArgs = + [optionVerbosity configVerbosity + (\v flags -> flags { configVerbosity = v }) + ,optionDistPref + configDistPref (\d flags -> flags { configDistPref = d }) + showOrParseArgs + + ,option [] ["compiler"] "compiler" + configHcFlavor (\v flags -> flags { configHcFlavor = v }) + (choiceOpt [ (Flag GHC, ("g", ["ghc"]), "compile with GHC") + , (Flag GHCJS, ([] , ["ghcjs"]), "compile with GHCJS") + , (Flag UHC, ([] , ["uhc"]), "compile with UHC") + -- "haskell-suite" compiler id string will be replaced + -- by a more specific one during the configure stage + , (Flag (HaskellSuite "haskell-suite"), ([] , ["haskell-suite"]), + "compile with a haskell-suite compiler")]) + + ,option "" ["cabal-file"] + "use this Cabal file" + configCabalFilePath (\v flags -> flags { configCabalFilePath = v }) + (reqArgFlag "PATH") + + ,option "w" ["with-compiler"] + "give the path to a particular compiler" + configHcPath (\v flags -> flags { configHcPath = v }) + (reqArgFlag "PATH") + + ,option "" ["with-hc-pkg"] + "give the path to the package tool" + configHcPkg (\v flags -> flags { configHcPkg = v }) + (reqArgFlag "PATH") + ] + ++ map liftInstallDirs installDirsOptions + ++ [option "" ["program-prefix"] + "prefix to be applied to installed executables" + configProgPrefix + (\v flags -> flags { configProgPrefix = v }) + (reqPathTemplateArgFlag "PREFIX") + + ,option "" ["program-suffix"] + "suffix to be applied to installed executables" + configProgSuffix (\v flags -> flags { configProgSuffix = v } ) + (reqPathTemplateArgFlag "SUFFIX") + + ,option "" ["library-vanilla"] + "Vanilla libraries" + configVanillaLib (\v flags -> flags { configVanillaLib = v }) + (boolOpt [] []) + + ,option "p" ["library-profiling"] + "Library profiling" + configProfLib (\v flags -> flags { configProfLib = v }) + (boolOpt "p" []) + + ,option "" ["shared"] + "Shared library" + configSharedLib (\v flags -> flags { configSharedLib = v }) + (boolOpt [] []) + + ,option "" ["static"] + "Static library" + configStaticLib (\v flags -> flags { configStaticLib = v }) + (boolOpt [] []) + + ,option "" ["executable-dynamic"] + "Executable dynamic linking" + configDynExe (\v flags -> flags { configDynExe = v }) + (boolOpt [] []) + + ,option "" ["profiling"] + "Executable and library profiling" + configProf (\v flags -> flags { configProf = v }) + (boolOpt [] []) + + ,option "" ["executable-profiling"] + "Executable profiling (DEPRECATED)" + configProfExe (\v flags -> flags { configProfExe = v }) + (boolOpt [] []) + + ,option "" ["profiling-detail"] + ("Profiling detail level for executable and library (default, " ++ + "none, exported-functions, toplevel-functions, all-functions).") + configProfDetail (\v flags -> flags { configProfDetail = v }) + (reqArg' "level" (Flag . flagToProfDetailLevel) + showProfDetailLevelFlag) + + ,option "" ["library-profiling-detail"] + "Profiling detail level for libraries only." + configProfLibDetail (\v flags -> flags { configProfLibDetail = v }) + (reqArg' "level" (Flag . flagToProfDetailLevel) + showProfDetailLevelFlag) + + ,multiOption "optimization" + configOptimization (\v flags -> flags { configOptimization = v }) + [optArg' "n" (Flag . flagToOptimisationLevel) + (\f -> case f of + Flag NoOptimisation -> [] + Flag NormalOptimisation -> [Nothing] + Flag MaximumOptimisation -> [Just "2"] + _ -> []) + "O" ["enable-optimization","enable-optimisation"] + "Build with optimization (n is 0--2, default is 1)", + noArg (Flag NoOptimisation) [] + ["disable-optimization","disable-optimisation"] + "Build without optimization" + ] + + ,multiOption "debug-info" + configDebugInfo (\v flags -> flags { configDebugInfo = v }) + [optArg' "n" (Flag . flagToDebugInfoLevel) + (\f -> case f of + Flag NoDebugInfo -> [] + Flag MinimalDebugInfo -> [Just "1"] + Flag NormalDebugInfo -> [Nothing] + Flag MaximalDebugInfo -> [Just "3"] + _ -> []) + "" ["enable-debug-info"] + "Emit debug info (n is 0--3, default is 0)", + noArg (Flag NoDebugInfo) [] + ["disable-debug-info"] + "Don't emit debug info" + ] + + ,option "" ["library-for-ghci"] + "compile library for use with GHCi" + configGHCiLib (\v flags -> flags { configGHCiLib = v }) + (boolOpt [] []) + + ,option "" ["split-sections"] + "compile library code such that unneeded definitions can be dropped from the final executable (GHC 7.8+)" + configSplitSections (\v flags -> flags { configSplitSections = v }) + (boolOpt [] []) + + ,option "" ["split-objs"] + "split library into smaller objects to reduce binary sizes (GHC 6.6+)" + configSplitObjs (\v flags -> flags { configSplitObjs = v }) + (boolOpt [] []) + + ,option "" ["executable-stripping"] + "strip executables upon installation to reduce binary sizes" + configStripExes (\v flags -> flags { configStripExes = v }) + (boolOpt [] []) + + ,option "" ["library-stripping"] + "strip libraries upon installation to reduce binary sizes" + configStripLibs (\v flags -> flags { configStripLibs = v }) + (boolOpt [] []) + + ,option "" ["configure-option"] + "Extra option for configure" + configConfigureArgs (\v flags -> flags { configConfigureArgs = v }) + (reqArg' "OPT" (\x -> [x]) id) + + ,option "" ["user-install"] + "doing a per-user installation" + configUserInstall (\v flags -> flags { configUserInstall = v }) + (boolOpt' ([],["user"]) ([], ["global"])) + + ,option "" ["package-db"] + ( "Append the given package database to the list of package" + ++ " databases used (to satisfy dependencies and register into)." + ++ " May be a specific file, 'global' or 'user'. The initial list" + ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," + ++ " depending on context. Use 'clear' to reset the list to empty." + ++ " See the user guide for details.") + configPackageDBs (\v flags -> flags { configPackageDBs = v }) + (reqArg' "DB" readPackageDbList showPackageDbList) + + ,option "f" ["flags"] + "Force values for the given flags in Cabal conditionals in the .cabal file. E.g., --flags=\"debug -usebytestrings\" forces the flag \"debug\" to true and \"usebytestrings\" to false." + configConfigurationsFlags (\v flags -> flags { configConfigurationsFlags = v }) + (reqArg "FLAGS" + (parsecToReadE (\err -> "Invalid flag assignment: " ++ err) parsecFlagAssignment) + showFlagAssignment) + + ,option "" ["extra-include-dirs"] + "A list of directories to search for header files" + configExtraIncludeDirs (\v flags -> flags {configExtraIncludeDirs = v}) + (reqArg' "PATH" (\x -> [x]) id) + + ,option "" ["deterministic"] + "Try to be as deterministic as possible (used by the test suite)" + configDeterministic (\v flags -> flags {configDeterministic = v}) + (boolOpt [] []) + + ,option "" ["ipid"] + "Installed package ID to compile this package as" + configIPID (\v flags -> flags {configIPID = v}) + (reqArgFlag "IPID") + + ,option "" ["cid"] + "Installed component ID to compile this component as" + (fmap display . configCID) (\v flags -> flags {configCID = fmap mkComponentId v}) + (reqArgFlag "CID") + + ,option "" ["extra-lib-dirs"] + "A list of directories to search for external libraries" + configExtraLibDirs (\v flags -> flags {configExtraLibDirs = v}) + (reqArg' "PATH" (\x -> [x]) id) + + ,option "" ["extra-framework-dirs"] + "A list of directories to search for external frameworks (OS X only)" + configExtraFrameworkDirs + (\v flags -> flags {configExtraFrameworkDirs = v}) + (reqArg' "PATH" (\x -> [x]) id) + + ,option "" ["extra-prog-path"] + "A list of directories to search for required programs (in addition to the normal search locations)" + configProgramPathExtra (\v flags -> flags {configProgramPathExtra = v}) + (reqArg' "PATH" (\x -> toNubList [x]) fromNubList) + + ,option "" ["constraint"] + "A list of additional constraints on the dependencies." + configConstraints (\v flags -> flags { configConstraints = v}) + (reqArg "DEPENDENCY" + (parsecToReadE (const "dependency expected") ((\x -> [x]) `fmap` parsec)) + (map display)) + + ,option "" ["dependency"] + "A list of exact dependencies. E.g., --dependency=\"void=void-0.5.8-177d5cdf20962d0581fe2e4932a6c309\"" + configDependencies (\v flags -> flags { configDependencies = v}) + (reqArg "NAME=CID" + (parsecToReadE (const "dependency expected") ((\x -> [x]) `fmap` parsecDependency)) + (map (\x -> display (fst x) ++ "=" ++ display (snd x)))) + + ,option "" ["instantiate-with"] + "A mapping of signature names to concrete module instantiations." + configInstantiateWith (\v flags -> flags { configInstantiateWith = v }) + (reqArg "NAME=MOD" + (parsecToReadE ("Cannot parse module substitution: " ++) (fmap (:[]) parsecModSubstEntry)) + (map (Disp.renderStyle defaultStyle . dispModSubstEntry))) + + ,option "" ["tests"] + "dependency checking and compilation for test suites listed in the package description file." + configTests (\v flags -> flags { configTests = v }) + (boolOpt [] []) + + ,option "" ["coverage"] + "build package with Haskell Program Coverage. (GHC only)" + configCoverage (\v flags -> flags { configCoverage = v }) + (boolOpt [] []) + + ,option "" ["library-coverage"] + "build package with Haskell Program Coverage. (GHC only) (DEPRECATED)" + configLibCoverage (\v flags -> flags { configLibCoverage = v }) + (boolOpt [] []) + + ,option "" ["exact-configuration"] + "All direct dependencies and flags are provided on the command line." + configExactConfiguration + (\v flags -> flags { configExactConfiguration = v }) + trueArg + + ,option "" ["benchmarks"] + "dependency checking and compilation for benchmarks listed in the package description file." + configBenchmarks (\v flags -> flags { configBenchmarks = v }) + (boolOpt [] []) + + ,option "" ["relocatable"] + "building a package that is relocatable. (GHC only)" + configRelocatable (\v flags -> flags { configRelocatable = v}) + (boolOpt [] []) + + ,option "" ["response-files"] + "enable workaround for old versions of programs like \"ar\" that do not support @file arguments" + configUseResponseFiles + (\v flags -> flags { configUseResponseFiles = v }) + (boolOpt' ([], ["disable-response-files"]) ([], [])) + ] + where + liftInstallDirs = + liftOption configInstallDirs (\v flags -> flags { configInstallDirs = v }) + + reqPathTemplateArgFlag title _sf _lf d get set = + reqArgFlag title _sf _lf d + (fmap fromPathTemplate . get) (set . fmap toPathTemplate) + +showFlagAssignment :: FlagAssignment -> [String] +showFlagAssignment = map showFlagValue' . unFlagAssignment + where + -- We can't use 'showFlagValue' because legacy custom-setups don't + -- support the '+' prefix in --flags; so we omit the (redundant) + prefix; + -- NB: we assume that we never have to set/enable '-'-prefixed flags here. + showFlagValue' :: (FlagName, Bool) -> String + showFlagValue' (f, True) = unFlagName f + showFlagValue' (f, False) = '-' : unFlagName f + +readPackageDbList :: String -> [Maybe PackageDB] +readPackageDbList "clear" = [Nothing] +readPackageDbList "global" = [Just GlobalPackageDB] +readPackageDbList "user" = [Just UserPackageDB] +readPackageDbList other = [Just (SpecificPackageDB other)] + +showPackageDbList :: [Maybe PackageDB] -> [String] +showPackageDbList = map showPackageDb + where + showPackageDb Nothing = "clear" + showPackageDb (Just GlobalPackageDB) = "global" + showPackageDb (Just UserPackageDB) = "user" + showPackageDb (Just (SpecificPackageDB db)) = db + +showProfDetailLevelFlag :: Flag ProfDetailLevel -> [String] +showProfDetailLevelFlag NoFlag = [] +showProfDetailLevelFlag (Flag dl) = [showProfDetailLevel dl] + +parsecDependency :: ParsecParser (PackageName, ComponentId) +parsecDependency = do + x <- parsec + _ <- P.char '=' + y <- parsec + return (x, y) + +installDirsOptions :: [OptionField (InstallDirs (Flag PathTemplate))] +installDirsOptions = + [ option "" ["prefix"] + "bake this prefix in preparation of installation" + prefix (\v flags -> flags { prefix = v }) + installDirArg + + , option "" ["bindir"] + "installation directory for executables" + bindir (\v flags -> flags { bindir = v }) + installDirArg + + , option "" ["libdir"] + "installation directory for libraries" + libdir (\v flags -> flags { libdir = v }) + installDirArg + + , option "" ["libsubdir"] + "subdirectory of libdir in which libs are installed" + libsubdir (\v flags -> flags { libsubdir = v }) + installDirArg + + , option "" ["dynlibdir"] + "installation directory for dynamic libraries" + dynlibdir (\v flags -> flags { dynlibdir = v }) + installDirArg + + , option "" ["libexecdir"] + "installation directory for program executables" + libexecdir (\v flags -> flags { libexecdir = v }) + installDirArg + + , option "" ["libexecsubdir"] + "subdirectory of libexecdir in which private executables are installed" + libexecsubdir (\v flags -> flags { libexecsubdir = v }) + installDirArg + + , option "" ["datadir"] + "installation directory for read-only data" + datadir (\v flags -> flags { datadir = v }) + installDirArg + + , option "" ["datasubdir"] + "subdirectory of datadir in which data files are installed" + datasubdir (\v flags -> flags { datasubdir = v }) + installDirArg + + , option "" ["docdir"] + "installation directory for documentation" + docdir (\v flags -> flags { docdir = v }) + installDirArg + + , option "" ["htmldir"] + "installation directory for HTML documentation" + htmldir (\v flags -> flags { htmldir = v }) + installDirArg + + , option "" ["haddockdir"] + "installation directory for haddock interfaces" + haddockdir (\v flags -> flags { haddockdir = v }) + installDirArg + + , option "" ["sysconfdir"] + "installation directory for configuration files" + sysconfdir (\v flags -> flags { sysconfdir = v }) + installDirArg + ] + where + installDirArg _sf _lf d get set = + reqArgFlag "DIR" _sf _lf d + (fmap fromPathTemplate . get) (set . fmap toPathTemplate) + +emptyConfigFlags :: ConfigFlags +emptyConfigFlags = mempty + +instance Monoid ConfigFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ConfigFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Copy flags +-- ------------------------------------------------------------ + +-- | Flags to @copy@: (destdir, copy-prefix (backwards compat), verbosity) +data CopyFlags = CopyFlags { + copyDest :: Flag CopyDest, + copyDistPref :: Flag FilePath, + copyVerbosity :: Flag Verbosity, + -- This is the same hack as in 'buildArgs'. But I (ezyang) don't + -- think it's a hack, it's the right way to make hooks more robust + -- TODO: Stop using this eventually when 'UserHooks' gets changed + copyArgs :: [String], + copyCabalFilePath :: Flag FilePath + } + deriving (Show, Generic) + +defaultCopyFlags :: CopyFlags +defaultCopyFlags = CopyFlags { + copyDest = Flag NoCopyDest, + copyDistPref = NoFlag, + copyVerbosity = Flag normal, + copyArgs = [], + copyCabalFilePath = mempty + } + +copyCommand :: CommandUI CopyFlags +copyCommand = CommandUI + { commandName = "copy" + , commandSynopsis = "Copy the files of all/specific components to install locations." + , commandDescription = Just $ \_ -> wrapText $ + "Components encompass executables and libraries. " + ++ "Does not call register, and allows a prefix at install time. " + ++ "Without the --destdir flag, configure determines location.\n" + , commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " copy " + ++ " All the components in the package\n" + ++ " " ++ pname ++ " copy foo " + ++ " A component (i.e. lib, exe, test suite)" + , commandUsage = usageAlternatives "copy" $ + [ "[FLAGS]" + , "COMPONENTS [FLAGS]" + ] + , commandDefaultFlags = defaultCopyFlags + , commandOptions = \showOrParseArgs -> case showOrParseArgs of + ShowArgs -> filter ((`notElem` ["target-package-db"]) + . optionName) $ copyOptions ShowArgs + ParseArgs -> copyOptions ParseArgs +} + +copyOptions :: ShowOrParseArgs -> [OptionField CopyFlags] +copyOptions showOrParseArgs = + [optionVerbosity copyVerbosity (\v flags -> flags { copyVerbosity = v }) + + ,optionDistPref + copyDistPref (\d flags -> flags { copyDistPref = d }) + showOrParseArgs + + ,option "" ["destdir"] + "directory to copy files to, prepended to installation directories" + copyDest (\v flags -> case copyDest flags of + Flag (CopyToDb _) -> error "Use either 'destdir' or 'target-package-db'." + _ -> flags { copyDest = v }) + (reqArg "DIR" (succeedReadE (Flag . CopyTo)) + (\f -> case f of Flag (CopyTo p) -> [p]; _ -> [])) + + ,option "" ["target-package-db"] + "package database to copy files into. Required when using ${pkgroot} prefix." + copyDest (\v flags -> case copyDest flags of + NoFlag -> flags { copyDest = v } + Flag NoCopyDest -> flags { copyDest = v } + _ -> error "Use either 'destdir' or 'target-package-db'.") + (reqArg "DATABASE" (succeedReadE (Flag . CopyToDb)) + (\f -> case f of Flag (CopyToDb p) -> [p]; _ -> [])) + ] + +emptyCopyFlags :: CopyFlags +emptyCopyFlags = mempty + +instance Monoid CopyFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup CopyFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Install flags +-- ------------------------------------------------------------ + +-- | Flags to @install@: (package db, verbosity) +data InstallFlags = InstallFlags { + installPackageDB :: Flag PackageDB, + installDest :: Flag CopyDest, + installDistPref :: Flag FilePath, + installUseWrapper :: Flag Bool, + installInPlace :: Flag Bool, + installVerbosity :: Flag Verbosity, + -- this is only here, because we can not + -- change the hooks API. + installCabalFilePath :: Flag FilePath + } + deriving (Show, Generic) + +defaultInstallFlags :: InstallFlags +defaultInstallFlags = InstallFlags { + installPackageDB = NoFlag, + installDest = Flag NoCopyDest, + installDistPref = NoFlag, + installUseWrapper = Flag False, + installInPlace = Flag False, + installVerbosity = Flag normal, + installCabalFilePath = mempty + } + +installCommand :: CommandUI InstallFlags +installCommand = CommandUI + { commandName = "install" + , commandSynopsis = + "Copy the files into the install locations. Run register." + , commandDescription = Just $ \_ -> wrapText $ + "Unlike the copy command, install calls the register command." + ++ "If you want to install into a location that is not what was" + ++ "specified in the configure step, use the copy command.\n" + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " install [FLAGS]\n" + , commandDefaultFlags = defaultInstallFlags + , commandOptions = \showOrParseArgs -> case showOrParseArgs of + ShowArgs -> filter ((`notElem` ["target-package-db"]) + . optionName) $ installOptions ShowArgs + ParseArgs -> installOptions ParseArgs + } + +installOptions :: ShowOrParseArgs -> [OptionField InstallFlags] +installOptions showOrParseArgs = + [optionVerbosity installVerbosity (\v flags -> flags { installVerbosity = v }) + ,optionDistPref + installDistPref (\d flags -> flags { installDistPref = d }) + showOrParseArgs + + ,option "" ["inplace"] + "install the package in the install subdirectory of the dist prefix, so it can be used without being installed" + installInPlace (\v flags -> flags { installInPlace = v }) + trueArg + + ,option "" ["shell-wrappers"] + "using shell script wrappers around executables" + installUseWrapper (\v flags -> flags { installUseWrapper = v }) + (boolOpt [] []) + + ,option "" ["package-db"] "" + installPackageDB (\v flags -> flags { installPackageDB = v }) + (choiceOpt [ (Flag UserPackageDB, ([],["user"]), + "upon configuration register this package in the user's local package database") + , (Flag GlobalPackageDB, ([],["global"]), + "(default) upon configuration register this package in the system-wide package database")]) + ,option "" ["target-package-db"] + "package database to install into. Required when using ${pkgroot} prefix." + installDest (\v flags -> flags { installDest = v }) + (reqArg "DATABASE" (succeedReadE (Flag . CopyToDb)) + (\f -> case f of Flag (CopyToDb p) -> [p]; _ -> [])) + ] + +emptyInstallFlags :: InstallFlags +emptyInstallFlags = mempty + +instance Monoid InstallFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup InstallFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * SDist flags +-- ------------------------------------------------------------ + +-- | Flags to @sdist@: (snapshot, verbosity) +data SDistFlags = SDistFlags { + sDistSnapshot :: Flag Bool, + sDistDirectory :: Flag FilePath, + sDistDistPref :: Flag FilePath, + sDistListSources :: Flag FilePath, + sDistVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +defaultSDistFlags :: SDistFlags +defaultSDistFlags = SDistFlags { + sDistSnapshot = Flag False, + sDistDirectory = mempty, + sDistDistPref = NoFlag, + sDistListSources = mempty, + sDistVerbosity = Flag normal + } + +sdistCommand :: CommandUI SDistFlags +sdistCommand = CommandUI + { commandName = "sdist" + , commandSynopsis = + "Generate a source distribution file (.tar.gz)." + , commandDescription = Nothing + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " sdist [FLAGS]\n" + , commandDefaultFlags = defaultSDistFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity sDistVerbosity (\v flags -> flags { sDistVerbosity = v }) + ,optionDistPref + sDistDistPref (\d flags -> flags { sDistDistPref = d }) + showOrParseArgs + + ,option "" ["list-sources"] + "Just write a list of the package's sources to a file" + sDistListSources (\v flags -> flags { sDistListSources = v }) + (reqArgFlag "FILE") + + ,option "" ["snapshot"] + "Produce a snapshot source distribution" + sDistSnapshot (\v flags -> flags { sDistSnapshot = v }) + trueArg + + ,option "" ["output-directory"] + ("Generate a source distribution in the given directory, " + ++ "without creating a tarball") + sDistDirectory (\v flags -> flags { sDistDirectory = v }) + (reqArgFlag "DIR") + ] + } + +emptySDistFlags :: SDistFlags +emptySDistFlags = mempty + +instance Monoid SDistFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup SDistFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Register flags +-- ------------------------------------------------------------ + +-- | Flags to @register@ and @unregister@: (user package, gen-script, +-- in-place, verbosity) +data RegisterFlags = RegisterFlags { + regPackageDB :: Flag PackageDB, + regGenScript :: Flag Bool, + regGenPkgConf :: Flag (Maybe FilePath), + regInPlace :: Flag Bool, + regDistPref :: Flag FilePath, + regPrintId :: Flag Bool, + regVerbosity :: Flag Verbosity, + -- Same as in 'buildArgs' and 'copyArgs' + regArgs :: [String], + regCabalFilePath :: Flag FilePath + } + deriving (Show, Generic) + +defaultRegisterFlags :: RegisterFlags +defaultRegisterFlags = RegisterFlags { + regPackageDB = NoFlag, + regGenScript = Flag False, + regGenPkgConf = NoFlag, + regInPlace = Flag False, + regDistPref = NoFlag, + regPrintId = Flag False, + regArgs = [], + regCabalFilePath = mempty, + regVerbosity = Flag normal + } + +registerCommand :: CommandUI RegisterFlags +registerCommand = CommandUI + { commandName = "register" + , commandSynopsis = + "Register this package with the compiler." + , commandDescription = Nothing + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " register [FLAGS]\n" + , commandDefaultFlags = defaultRegisterFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v }) + ,optionDistPref + regDistPref (\d flags -> flags { regDistPref = d }) + showOrParseArgs + + ,option "" ["packageDB"] "" + regPackageDB (\v flags -> flags { regPackageDB = v }) + (choiceOpt [ (Flag UserPackageDB, ([],["user"]), + "upon registration, register this package in the user's local package database") + , (Flag GlobalPackageDB, ([],["global"]), + "(default)upon registration, register this package in the system-wide package database")]) + + ,option "" ["inplace"] + "register the package in the build location, so it can be used without being installed" + regInPlace (\v flags -> flags { regInPlace = v }) + trueArg + + ,option "" ["gen-script"] + "instead of registering, generate a script to register later" + regGenScript (\v flags -> flags { regGenScript = v }) + trueArg + + ,option "" ["gen-pkg-config"] + "instead of registering, generate a package registration file/directory" + regGenPkgConf (\v flags -> flags { regGenPkgConf = v }) + (optArg' "PKG" Flag flagToList) + + ,option "" ["print-ipid"] + "print the installed package ID calculated for this package" + regPrintId (\v flags -> flags { regPrintId = v }) + trueArg + ] + } + +unregisterCommand :: CommandUI RegisterFlags +unregisterCommand = CommandUI + { commandName = "unregister" + , commandSynopsis = + "Unregister this package with the compiler." + , commandDescription = Nothing + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " unregister [FLAGS]\n" + , commandDefaultFlags = defaultRegisterFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity regVerbosity (\v flags -> flags { regVerbosity = v }) + ,optionDistPref + regDistPref (\d flags -> flags { regDistPref = d }) + showOrParseArgs + + ,option "" ["user"] "" + regPackageDB (\v flags -> flags { regPackageDB = v }) + (choiceOpt [ (Flag UserPackageDB, ([],["user"]), + "unregister this package in the user's local package database") + , (Flag GlobalPackageDB, ([],["global"]), + "(default) unregister this package in the system-wide package database")]) + + ,option "" ["gen-script"] + "Instead of performing the unregister command, generate a script to unregister later" + regGenScript (\v flags -> flags { regGenScript = v }) + trueArg + ] + } + +emptyRegisterFlags :: RegisterFlags +emptyRegisterFlags = mempty + +instance Monoid RegisterFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup RegisterFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * HsColour flags +-- ------------------------------------------------------------ + +data HscolourFlags = HscolourFlags { + hscolourCSS :: Flag FilePath, + hscolourExecutables :: Flag Bool, + hscolourTestSuites :: Flag Bool, + hscolourBenchmarks :: Flag Bool, + hscolourForeignLibs :: Flag Bool, + hscolourDistPref :: Flag FilePath, + hscolourVerbosity :: Flag Verbosity, + hscolourCabalFilePath :: Flag FilePath + } + deriving (Show, Generic) + +emptyHscolourFlags :: HscolourFlags +emptyHscolourFlags = mempty + +defaultHscolourFlags :: HscolourFlags +defaultHscolourFlags = HscolourFlags { + hscolourCSS = NoFlag, + hscolourExecutables = Flag False, + hscolourTestSuites = Flag False, + hscolourBenchmarks = Flag False, + hscolourDistPref = NoFlag, + hscolourForeignLibs = Flag False, + hscolourVerbosity = Flag normal, + hscolourCabalFilePath = mempty + } + +instance Monoid HscolourFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup HscolourFlags where + (<>) = gmappend + +hscolourCommand :: CommandUI HscolourFlags +hscolourCommand = CommandUI + { commandName = "hscolour" + , commandSynopsis = + "Generate HsColour colourised code, in HTML format." + , commandDescription = Just (\_ -> "Requires the hscolour program.\n") + , commandNotes = Just $ \_ -> + "Deprecated in favour of 'cabal haddock --hyperlink-source'." + , commandUsage = \pname -> + "Usage: " ++ pname ++ " hscolour [FLAGS]\n" + , commandDefaultFlags = defaultHscolourFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity hscolourVerbosity + (\v flags -> flags { hscolourVerbosity = v }) + ,optionDistPref + hscolourDistPref (\d flags -> flags { hscolourDistPref = d }) + showOrParseArgs + + ,option "" ["executables"] + "Run hscolour for Executables targets" + hscolourExecutables (\v flags -> flags { hscolourExecutables = v }) + trueArg + + ,option "" ["tests"] + "Run hscolour for Test Suite targets" + hscolourTestSuites (\v flags -> flags { hscolourTestSuites = v }) + trueArg + + ,option "" ["benchmarks"] + "Run hscolour for Benchmark targets" + hscolourBenchmarks (\v flags -> flags { hscolourBenchmarks = v }) + trueArg + + ,option "" ["foreign-libraries"] + "Run hscolour for Foreign Library targets" + hscolourForeignLibs (\v flags -> flags { hscolourForeignLibs = v }) + trueArg + + ,option "" ["all"] + "Run hscolour for all targets" + (\f -> allFlags [ hscolourExecutables f + , hscolourTestSuites f + , hscolourBenchmarks f + , hscolourForeignLibs f + ]) + (\v flags -> flags { hscolourExecutables = v + , hscolourTestSuites = v + , hscolourBenchmarks = v + , hscolourForeignLibs = v + }) + trueArg + + ,option "" ["css"] + "Use a cascading style sheet" + hscolourCSS (\v flags -> flags { hscolourCSS = v }) + (reqArgFlag "PATH") + ] + } + +-- ------------------------------------------------------------ +-- * Doctest flags +-- ------------------------------------------------------------ + +data DoctestFlags = DoctestFlags { + doctestProgramPaths :: [(String, FilePath)], + doctestProgramArgs :: [(String, [String])], + doctestDistPref :: Flag FilePath, + doctestVerbosity :: Flag Verbosity + } + deriving (Show, Generic) + +defaultDoctestFlags :: DoctestFlags +defaultDoctestFlags = DoctestFlags { + doctestProgramPaths = mempty, + doctestProgramArgs = [], + doctestDistPref = NoFlag, + doctestVerbosity = Flag normal + } + +doctestCommand :: CommandUI DoctestFlags +doctestCommand = CommandUI + { commandName = "doctest" + , commandSynopsis = "Run doctest tests." + , commandDescription = Just $ \_ -> + "Requires the program doctest, version 0.12.\n" + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " doctest [FLAGS]\n" + , commandDefaultFlags = defaultDoctestFlags + , commandOptions = \showOrParseArgs -> + doctestOptions showOrParseArgs + ++ programDbPaths progDb ParseArgs + doctestProgramPaths (\v flags -> flags { doctestProgramPaths = v }) + ++ programDbOption progDb showOrParseArgs + doctestProgramArgs (\v fs -> fs { doctestProgramArgs = v }) + ++ programDbOptions progDb ParseArgs + doctestProgramArgs (\v flags -> flags { doctestProgramArgs = v }) + } + where + progDb = addKnownProgram doctestProgram + emptyProgramDb + +doctestOptions :: ShowOrParseArgs -> [OptionField DoctestFlags] +doctestOptions showOrParseArgs = + [optionVerbosity doctestVerbosity + (\v flags -> flags { doctestVerbosity = v }) + ,optionDistPref + doctestDistPref (\d flags -> flags { doctestDistPref = d }) + showOrParseArgs + ] + +emptyDoctestFlags :: DoctestFlags +emptyDoctestFlags = mempty + +instance Monoid DoctestFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup DoctestFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Haddock flags +-- ------------------------------------------------------------ + + +-- | When we build haddock documentation, there are two cases: +-- +-- 1. We build haddocks only for the current development version, +-- intended for local use and not for distribution. In this case, +-- we store the generated documentation in @/doc/html/@. +-- +-- 2. We build haddocks for intended for uploading them to hackage. +-- In this case, we need to follow the layout that hackage expects +-- from documentation tarballs, and we might also want to use different +-- flags than for development builds, so in this case we store the generated +-- documentation in @/doc/html/-docs@. +data HaddockTarget = ForHackage | ForDevelopment deriving (Eq, Show, Generic) + +instance Binary HaddockTarget + +instance Text HaddockTarget where + disp ForHackage = Disp.text "for-hackage" + disp ForDevelopment = Disp.text "for-development" + + parse = Parse.choice [ Parse.string "for-hackage" >> return ForHackage + , Parse.string "for-development" >> return ForDevelopment] + +data HaddockFlags = HaddockFlags { + haddockProgramPaths :: [(String, FilePath)], + haddockProgramArgs :: [(String, [String])], + haddockHoogle :: Flag Bool, + haddockHtml :: Flag Bool, + haddockHtmlLocation :: Flag String, + haddockForHackage :: Flag HaddockTarget, + haddockExecutables :: Flag Bool, + haddockTestSuites :: Flag Bool, + haddockBenchmarks :: Flag Bool, + haddockForeignLibs :: Flag Bool, + haddockInternal :: Flag Bool, + haddockCss :: Flag FilePath, + haddockLinkedSource :: Flag Bool, + haddockQuickJump :: Flag Bool, + haddockHscolourCss :: Flag FilePath, + haddockContents :: Flag PathTemplate, + haddockDistPref :: Flag FilePath, + haddockKeepTempFiles:: Flag Bool, + haddockVerbosity :: Flag Verbosity, + haddockCabalFilePath :: Flag FilePath, + haddockArgs :: [String] + } + deriving (Show, Generic) + +defaultHaddockFlags :: HaddockFlags +defaultHaddockFlags = HaddockFlags { + haddockProgramPaths = mempty, + haddockProgramArgs = [], + haddockHoogle = Flag False, + haddockHtml = Flag False, + haddockHtmlLocation = NoFlag, + haddockForHackage = NoFlag, + haddockExecutables = Flag False, + haddockTestSuites = Flag False, + haddockBenchmarks = Flag False, + haddockForeignLibs = Flag False, + haddockInternal = Flag False, + haddockCss = NoFlag, + haddockLinkedSource = Flag False, + haddockQuickJump = Flag False, + haddockHscolourCss = NoFlag, + haddockContents = NoFlag, + haddockDistPref = NoFlag, + haddockKeepTempFiles= Flag False, + haddockVerbosity = Flag normal, + haddockCabalFilePath = mempty, + haddockArgs = mempty + } + +haddockCommand :: CommandUI HaddockFlags +haddockCommand = CommandUI + { commandName = "haddock" + , commandSynopsis = "Generate Haddock HTML documentation." + , commandDescription = Just $ \_ -> + "Requires the program haddock, version 2.x.\n" + , commandNotes = Nothing + , commandUsage = usageAlternatives "haddock" $ + [ "[FLAGS]" + , "COMPONENTS [FLAGS]" + ] + , commandDefaultFlags = defaultHaddockFlags + , commandOptions = \showOrParseArgs -> + haddockOptions showOrParseArgs + ++ programDbPaths progDb ParseArgs + haddockProgramPaths (\v flags -> flags { haddockProgramPaths = v}) + ++ programDbOption progDb showOrParseArgs + haddockProgramArgs (\v fs -> fs { haddockProgramArgs = v }) + ++ programDbOptions progDb ParseArgs + haddockProgramArgs (\v flags -> flags { haddockProgramArgs = v}) + } + where + progDb = addKnownProgram haddockProgram + $ addKnownProgram ghcProgram + $ emptyProgramDb + +haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] +haddockOptions showOrParseArgs = + [optionVerbosity haddockVerbosity + (\v flags -> flags { haddockVerbosity = v }) + ,optionDistPref + haddockDistPref (\d flags -> flags { haddockDistPref = d }) + showOrParseArgs + + ,option "" ["keep-temp-files"] + "Keep temporary files" + haddockKeepTempFiles (\b flags -> flags { haddockKeepTempFiles = b }) + trueArg + + ,option "" ["hoogle"] + "Generate a hoogle database" + haddockHoogle (\v flags -> flags { haddockHoogle = v }) + trueArg + + ,option "" ["html"] + "Generate HTML documentation (the default)" + haddockHtml (\v flags -> flags { haddockHtml = v }) + trueArg + + ,option "" ["html-location"] + "Location of HTML documentation for pre-requisite packages" + haddockHtmlLocation (\v flags -> flags { haddockHtmlLocation = v }) + (reqArgFlag "URL") + + ,option "" ["for-hackage"] + "Collection of flags to generate documentation suitable for upload to hackage" + haddockForHackage (\v flags -> flags { haddockForHackage = v }) + (noArg (Flag ForHackage)) + + ,option "" ["executables"] + "Run haddock for Executables targets" + haddockExecutables (\v flags -> flags { haddockExecutables = v }) + trueArg + + ,option "" ["tests"] + "Run haddock for Test Suite targets" + haddockTestSuites (\v flags -> flags { haddockTestSuites = v }) + trueArg + + ,option "" ["benchmarks"] + "Run haddock for Benchmark targets" + haddockBenchmarks (\v flags -> flags { haddockBenchmarks = v }) + trueArg + + ,option "" ["foreign-libraries"] + "Run haddock for Foreign Library targets" + haddockForeignLibs (\v flags -> flags { haddockForeignLibs = v }) + trueArg + + ,option "" ["all"] + "Run haddock for all targets" + (\f -> allFlags [ haddockExecutables f + , haddockTestSuites f + , haddockBenchmarks f + , haddockForeignLibs f + ]) + (\v flags -> flags { haddockExecutables = v + , haddockTestSuites = v + , haddockBenchmarks = v + , haddockForeignLibs = v + }) + trueArg + + ,option "" ["internal"] + "Run haddock for internal modules and include all symbols" + haddockInternal (\v flags -> flags { haddockInternal = v }) + trueArg + + ,option "" ["css"] + "Use PATH as the haddock stylesheet" + haddockCss (\v flags -> flags { haddockCss = v }) + (reqArgFlag "PATH") + + ,option "" ["hyperlink-source","hyperlink-sources","hyperlinked-source"] + "Hyperlink the documentation to the source code" + haddockLinkedSource (\v flags -> flags { haddockLinkedSource = v }) + trueArg + + ,option "" ["quickjump"] + "Generate an index for interactive documentation navigation" + haddockQuickJump (\v flags -> flags { haddockQuickJump = v }) + trueArg + + ,option "" ["hscolour-css"] + "Use PATH as the HsColour stylesheet" + haddockHscolourCss (\v flags -> flags { haddockHscolourCss = v }) + (reqArgFlag "PATH") + + ,option "" ["contents-location"] + "Bake URL in as the location for the contents page" + haddockContents (\v flags -> flags { haddockContents = v }) + (reqArg' "URL" + (toFlag . toPathTemplate) + (flagToList . fmap fromPathTemplate)) + ] + +emptyHaddockFlags :: HaddockFlags +emptyHaddockFlags = mempty + +instance Monoid HaddockFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup HaddockFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Clean flags +-- ------------------------------------------------------------ + +data CleanFlags = CleanFlags { + cleanSaveConf :: Flag Bool, + cleanDistPref :: Flag FilePath, + cleanVerbosity :: Flag Verbosity, + cleanCabalFilePath :: Flag FilePath + } + deriving (Show, Generic) + +defaultCleanFlags :: CleanFlags +defaultCleanFlags = CleanFlags { + cleanSaveConf = Flag False, + cleanDistPref = NoFlag, + cleanVerbosity = Flag normal, + cleanCabalFilePath = mempty + } + +cleanCommand :: CommandUI CleanFlags +cleanCommand = CommandUI + { commandName = "clean" + , commandSynopsis = "Clean up after a build." + , commandDescription = Just $ \_ -> + "Removes .hi, .o, preprocessed sources, etc.\n" + , commandNotes = Nothing + , commandUsage = \pname -> + "Usage: " ++ pname ++ " clean [FLAGS]\n" + , commandDefaultFlags = defaultCleanFlags + , commandOptions = \showOrParseArgs -> + [optionVerbosity cleanVerbosity (\v flags -> flags { cleanVerbosity = v }) + ,optionDistPref + cleanDistPref (\d flags -> flags { cleanDistPref = d }) + showOrParseArgs + + ,option "s" ["save-configure"] + "Do not remove the configuration file (dist/setup-config) during cleaning. Saves need to reconfigure." + cleanSaveConf (\v flags -> flags { cleanSaveConf = v }) + trueArg + ] + } + +emptyCleanFlags :: CleanFlags +emptyCleanFlags = mempty + +instance Monoid CleanFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup CleanFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Build flags +-- ------------------------------------------------------------ + +data BuildFlags = BuildFlags { + buildProgramPaths :: [(String, FilePath)], + buildProgramArgs :: [(String, [String])], + buildDistPref :: Flag FilePath, + buildVerbosity :: Flag Verbosity, + buildNumJobs :: Flag (Maybe Int), + -- TODO: this one should not be here, it's just that the silly + -- UserHooks stop us from passing extra info in other ways + buildArgs :: [String], + buildCabalFilePath :: Flag FilePath + } + deriving (Read, Show, Generic) + +{-# DEPRECATED buildVerbose "Use buildVerbosity instead" #-} +buildVerbose :: BuildFlags -> Verbosity +buildVerbose = fromFlagOrDefault normal . buildVerbosity + +defaultBuildFlags :: BuildFlags +defaultBuildFlags = BuildFlags { + buildProgramPaths = mempty, + buildProgramArgs = [], + buildDistPref = mempty, + buildVerbosity = Flag normal, + buildNumJobs = mempty, + buildArgs = [], + buildCabalFilePath = mempty + } + +buildCommand :: ProgramDb -> CommandUI BuildFlags +buildCommand progDb = CommandUI + { commandName = "build" + , commandSynopsis = "Compile all/specific components." + , commandDescription = Just $ \_ -> wrapText $ + "Components encompass executables, tests, and benchmarks.\n" + ++ "\n" + ++ "Affected by configuration options, see `configure`.\n" + , commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " build " + ++ " All the components in the package\n" + ++ " " ++ pname ++ " build foo " + ++ " A component (i.e. lib, exe, test suite)\n\n" + ++ programFlagsDescription progDb +--TODO: re-enable once we have support for module/file targets +-- ++ " " ++ pname ++ " build Foo.Bar " +-- ++ " A module\n" +-- ++ " " ++ pname ++ " build Foo/Bar.hs" +-- ++ " A file\n\n" +-- ++ "If a target is ambiguous it can be qualified with the component " +-- ++ "name, e.g.\n" +-- ++ " " ++ pname ++ " build foo:Foo.Bar\n" +-- ++ " " ++ pname ++ " build testsuite1:Foo/Bar.hs\n" + , commandUsage = usageAlternatives "build" $ + [ "[FLAGS]" + , "COMPONENTS [FLAGS]" + ] + , commandDefaultFlags = defaultBuildFlags + , commandOptions = \showOrParseArgs -> + [ optionVerbosity + buildVerbosity (\v flags -> flags { buildVerbosity = v }) + + , optionDistPref + buildDistPref (\d flags -> flags { buildDistPref = d }) showOrParseArgs + ] + ++ buildOptions progDb showOrParseArgs + } + +buildOptions :: ProgramDb -> ShowOrParseArgs + -> [OptionField BuildFlags] +buildOptions progDb showOrParseArgs = + [ optionNumJobs + buildNumJobs (\v flags -> flags { buildNumJobs = v }) + ] + + ++ programDbPaths progDb showOrParseArgs + buildProgramPaths (\v flags -> flags { buildProgramPaths = v}) + + ++ programDbOption progDb showOrParseArgs + buildProgramArgs (\v fs -> fs { buildProgramArgs = v }) + + ++ programDbOptions progDb showOrParseArgs + buildProgramArgs (\v flags -> flags { buildProgramArgs = v}) + +emptyBuildFlags :: BuildFlags +emptyBuildFlags = mempty + +instance Monoid BuildFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup BuildFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * REPL Flags +-- ------------------------------------------------------------ + +data ReplFlags = ReplFlags { + replProgramPaths :: [(String, FilePath)], + replProgramArgs :: [(String, [String])], + replDistPref :: Flag FilePath, + replVerbosity :: Flag Verbosity, + replReload :: Flag Bool, + replReplOptions :: [String] + } + deriving (Show, Generic) + +defaultReplFlags :: ReplFlags +defaultReplFlags = ReplFlags { + replProgramPaths = mempty, + replProgramArgs = [], + replDistPref = NoFlag, + replVerbosity = Flag normal, + replReload = Flag False, + replReplOptions = [] + } + +instance Monoid ReplFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ReplFlags where + (<>) = gmappend + +replCommand :: ProgramDb -> CommandUI ReplFlags +replCommand progDb = CommandUI + { commandName = "repl" + , commandSynopsis = + "Open an interpreter session for the given component." + , commandDescription = Just $ \pname -> wrapText $ + "If the current directory contains no package, ignores COMPONENT " + ++ "parameters and opens an interactive interpreter session; if a " + ++ "sandbox is present, its package database will be used.\n" + ++ "\n" + ++ "Otherwise, (re)configures with the given or default flags, and " + ++ "loads the interpreter with the relevant modules. For executables, " + ++ "tests and benchmarks, loads the main module (and its " + ++ "dependencies); for libraries all exposed/other modules.\n" + ++ "\n" + ++ "The default component is the library itself, or the executable " + ++ "if that is the only component.\n" + ++ "\n" + ++ "Support for loading specific modules is planned but not " + ++ "implemented yet. For certain scenarios, `" ++ pname + ++ " exec -- ghci :l Foo` may be used instead. Note that `exec` will " + ++ "not (re)configure and you will have to specify the location of " + ++ "other modules, if required.\n" + + , commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " repl " + ++ " The first component in the package\n" + ++ " " ++ pname ++ " repl foo " + ++ " A named component (i.e. lib, exe, test suite)\n" + ++ " " ++ pname ++ " repl --repl-options=\"-lstdc++\"" + ++ " Specifying flags for interpreter\n" +--TODO: re-enable once we have support for module/file targets +-- ++ " " ++ pname ++ " repl Foo.Bar " +-- ++ " A module\n" +-- ++ " " ++ pname ++ " repl Foo/Bar.hs" +-- ++ " A file\n\n" +-- ++ "If a target is ambiguous it can be qualified with the component " +-- ++ "name, e.g.\n" +-- ++ " " ++ pname ++ " repl foo:Foo.Bar\n" +-- ++ " " ++ pname ++ " repl testsuite1:Foo/Bar.hs\n" + , commandUsage = \pname -> "Usage: " ++ pname ++ " repl [COMPONENT] [FLAGS]\n" + , commandDefaultFlags = defaultReplFlags + , commandOptions = \showOrParseArgs -> + optionVerbosity replVerbosity (\v flags -> flags { replVerbosity = v }) + : optionDistPref + replDistPref (\d flags -> flags { replDistPref = d }) + showOrParseArgs + + : programDbPaths progDb showOrParseArgs + replProgramPaths (\v flags -> flags { replProgramPaths = v}) + + ++ programDbOption progDb showOrParseArgs + replProgramArgs (\v flags -> flags { replProgramArgs = v}) + + ++ programDbOptions progDb showOrParseArgs + replProgramArgs (\v flags -> flags { replProgramArgs = v}) + + ++ case showOrParseArgs of + ParseArgs -> + [ option "" ["reload"] + "Used from within an interpreter to update files." + replReload (\v flags -> flags { replReload = v }) + trueArg + ] + _ -> [] + ++ map liftReplOption (replOptions showOrParseArgs) + } + where + liftReplOption = liftOption replReplOptions (\v flags -> flags { replReplOptions = v }) + +replOptions :: ShowOrParseArgs -> [OptionField [String]] +replOptions _ = [ option [] ["repl-options"] "use this option for the repl" id + const (reqArg "FLAG" (succeedReadE (:[])) id) ] + +-- ------------------------------------------------------------ +-- * Test flags +-- ------------------------------------------------------------ + +data TestShowDetails = Never | Failures | Always | Streaming | Direct + deriving (Eq, Ord, Enum, Bounded, Show) + +knownTestShowDetails :: [TestShowDetails] +knownTestShowDetails = [minBound..maxBound] + +instance Pretty TestShowDetails where + pretty = Disp.text . lowercase . show + +instance Parsec TestShowDetails where + parsec = maybe (fail "invalid TestShowDetails") return . classify =<< ident + where + ident = P.munch1 (\c -> isAlpha c || c == '_' || c == '-') + classify str = lookup (lowercase str) enumMap + enumMap :: [(String, TestShowDetails)] + enumMap = [ (display x, x) + | x <- knownTestShowDetails ] + +instance Text TestShowDetails where + parse = maybe Parse.pfail return . classify =<< ident + where + ident = Parse.munch1 (\c -> isAlpha c || c == '_' || c == '-') + classify str = lookup (lowercase str) enumMap + enumMap :: [(String, TestShowDetails)] + enumMap = [ (display x, x) + | x <- knownTestShowDetails ] + +--TODO: do we need this instance? +instance Monoid TestShowDetails where + mempty = Never + mappend = (<>) + +instance Semigroup TestShowDetails where + a <> b = if a < b then b else a + +data TestFlags = TestFlags { + testDistPref :: Flag FilePath, + testVerbosity :: Flag Verbosity, + testHumanLog :: Flag PathTemplate, + testMachineLog :: Flag PathTemplate, + testShowDetails :: Flag TestShowDetails, + testKeepTix :: Flag Bool, + -- TODO: think about if/how options are passed to test exes + testOptions :: [PathTemplate] + } deriving (Generic) + +defaultTestFlags :: TestFlags +defaultTestFlags = TestFlags { + testDistPref = NoFlag, + testVerbosity = Flag normal, + testHumanLog = toFlag $ toPathTemplate $ "$pkgid-$test-suite.log", + testMachineLog = toFlag $ toPathTemplate $ "$pkgid.log", + testShowDetails = toFlag Failures, + testKeepTix = toFlag False, + testOptions = [] + } + +testCommand :: CommandUI TestFlags +testCommand = CommandUI + { commandName = "test" + , commandSynopsis = + "Run all/specific tests in the test suite." + , commandDescription = Just $ \pname -> wrapText $ + "If necessary (re)configures with `--enable-tests` flag and builds" + ++ " the test suite.\n" + ++ "\n" + ++ "Remember that the tests' dependencies must be installed if there" + ++ " are additional ones; e.g. with `" ++ pname + ++ " install --only-dependencies --enable-tests`.\n" + ++ "\n" + ++ "By defining UserHooks in a custom Setup.hs, the package can" + ++ " define actions to be executed before and after running tests.\n" + , commandNotes = Nothing + , commandUsage = usageAlternatives "test" + [ "[FLAGS]" + , "TESTCOMPONENTS [FLAGS]" + ] + , commandDefaultFlags = defaultTestFlags + , commandOptions = \showOrParseArgs -> + [ optionVerbosity testVerbosity (\v flags -> flags { testVerbosity = v }) + , optionDistPref + testDistPref (\d flags -> flags { testDistPref = d }) + showOrParseArgs + , option [] ["log"] + ("Log all test suite results to file (name template can use " + ++ "$pkgid, $compiler, $os, $arch, $test-suite, $result)") + testHumanLog (\v flags -> flags { testHumanLog = v }) + (reqArg' "TEMPLATE" + (toFlag . toPathTemplate) + (flagToList . fmap fromPathTemplate)) + , option [] ["machine-log"] + ("Produce a machine-readable log file (name template can use " + ++ "$pkgid, $compiler, $os, $arch, $result)") + testMachineLog (\v flags -> flags { testMachineLog = v }) + (reqArg' "TEMPLATE" + (toFlag . toPathTemplate) + (flagToList . fmap fromPathTemplate)) + , option [] ["show-details"] + ("'always': always show results of individual test cases. " + ++ "'never': never show results of individual test cases. " + ++ "'failures': show results of failing test cases. " + ++ "'streaming': show results of test cases in real time." + ++ "'direct': send results of test cases in real time; no log file.") + testShowDetails (\v flags -> flags { testShowDetails = v }) + (reqArg "FILTER" + (parsecToReadE (\_ -> "--show-details flag expects one of " + ++ intercalate ", " + (map display knownTestShowDetails)) + (fmap toFlag parsec)) + (flagToList . fmap display)) + , option [] ["keep-tix-files"] + "keep .tix files for HPC between test runs" + testKeepTix (\v flags -> flags { testKeepTix = v}) + trueArg + , option [] ["test-options"] + ("give extra options to test executables " + ++ "(name templates can use $pkgid, $compiler, " + ++ "$os, $arch, $test-suite)") + testOptions (\v flags -> flags { testOptions = v }) + (reqArg' "TEMPLATES" (map toPathTemplate . splitArgs) + (const [])) + , option [] ["test-option"] + ("give extra option to test executables " + ++ "(no need to quote options containing spaces, " + ++ "name template can use $pkgid, $compiler, " + ++ "$os, $arch, $test-suite)") + testOptions (\v flags -> flags { testOptions = v }) + (reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) + (map fromPathTemplate)) + ] + } + +emptyTestFlags :: TestFlags +emptyTestFlags = mempty + +instance Monoid TestFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup TestFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Benchmark flags +-- ------------------------------------------------------------ + +data BenchmarkFlags = BenchmarkFlags { + benchmarkDistPref :: Flag FilePath, + benchmarkVerbosity :: Flag Verbosity, + benchmarkOptions :: [PathTemplate] + } deriving (Generic) + +defaultBenchmarkFlags :: BenchmarkFlags +defaultBenchmarkFlags = BenchmarkFlags { + benchmarkDistPref = NoFlag, + benchmarkVerbosity = Flag normal, + benchmarkOptions = [] + } + +benchmarkCommand :: CommandUI BenchmarkFlags +benchmarkCommand = CommandUI + { commandName = "bench" + , commandSynopsis = + "Run all/specific benchmarks." + , commandDescription = Just $ \pname -> wrapText $ + "If necessary (re)configures with `--enable-benchmarks` flag and" + ++ " builds the benchmarks.\n" + ++ "\n" + ++ "Remember that the benchmarks' dependencies must be installed if" + ++ " there are additional ones; e.g. with `" ++ pname + ++ " install --only-dependencies --enable-benchmarks`.\n" + ++ "\n" + ++ "By defining UserHooks in a custom Setup.hs, the package can" + ++ " define actions to be executed before and after running" + ++ " benchmarks.\n" + , commandNotes = Nothing + , commandUsage = usageAlternatives "bench" + [ "[FLAGS]" + , "BENCHCOMPONENTS [FLAGS]" + ] + , commandDefaultFlags = defaultBenchmarkFlags + , commandOptions = \showOrParseArgs -> + [ optionVerbosity benchmarkVerbosity + (\v flags -> flags { benchmarkVerbosity = v }) + , optionDistPref + benchmarkDistPref (\d flags -> flags { benchmarkDistPref = d }) + showOrParseArgs + , option [] ["benchmark-options"] + ("give extra options to benchmark executables " + ++ "(name templates can use $pkgid, $compiler, " + ++ "$os, $arch, $benchmark)") + benchmarkOptions (\v flags -> flags { benchmarkOptions = v }) + (reqArg' "TEMPLATES" (map toPathTemplate . splitArgs) + (const [])) + , option [] ["benchmark-option"] + ("give extra option to benchmark executables " + ++ "(no need to quote options containing spaces, " + ++ "name template can use $pkgid, $compiler, " + ++ "$os, $arch, $benchmark)") + benchmarkOptions (\v flags -> flags { benchmarkOptions = v }) + (reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) + (map fromPathTemplate)) + ] + } + +emptyBenchmarkFlags :: BenchmarkFlags +emptyBenchmarkFlags = mempty + +instance Monoid BenchmarkFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup BenchmarkFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Shared options utils +-- ------------------------------------------------------------ + +programFlagsDescription :: ProgramDb -> String +programFlagsDescription progDb = + "The flags --with-PROG and --PROG-option(s) can be used with" + ++ " the following programs:" + ++ (concatMap (\line -> "\n " ++ unwords line) . wrapLine 77 . sort) + [ programName prog | (prog, _) <- knownPrograms progDb ] + ++ "\n" + +-- | For each known program @PROG@ in 'progDb', produce a @with-PROG@ +-- 'OptionField'. +programDbPaths + :: ProgramDb + -> ShowOrParseArgs + -> (flags -> [(String, FilePath)]) + -> ([(String, FilePath)] -> (flags -> flags)) + -> [OptionField flags] +programDbPaths progDb showOrParseArgs get set = + programDbPaths' ("with-" ++) progDb showOrParseArgs get set + +{-# DEPRECATED programConfigurationPaths' "Use programDbPaths' instead" #-} + +-- | Like 'programDbPaths', but allows to customise the option name. +programDbPaths', programConfigurationPaths' + :: (String -> String) + -> ProgramDb + -> ShowOrParseArgs + -> (flags -> [(String, FilePath)]) + -> ([(String, FilePath)] -> (flags -> flags)) + -> [OptionField flags] + +programConfigurationPaths' = programDbPaths' + +programDbPaths' mkName progDb showOrParseArgs get set = + case showOrParseArgs of + -- we don't want a verbose help text list so we just show a generic one: + ShowArgs -> [withProgramPath "PROG"] + ParseArgs -> map (withProgramPath . programName . fst) + (knownPrograms progDb) + where + withProgramPath prog = + option "" [mkName prog] + ("give the path to " ++ prog) + get set + (reqArg' "PATH" (\path -> [(prog, path)]) + (\progPaths -> [ path | (prog', path) <- progPaths, prog==prog' ])) + +-- | For each known program @PROG@ in 'progDb', produce a @PROG-option@ +-- 'OptionField'. +programDbOption + :: ProgramDb + -> ShowOrParseArgs + -> (flags -> [(String, [String])]) + -> ([(String, [String])] -> (flags -> flags)) + -> [OptionField flags] +programDbOption progDb showOrParseArgs get set = + case showOrParseArgs of + -- we don't want a verbose help text list so we just show a generic one: + ShowArgs -> [programOption "PROG"] + ParseArgs -> map (programOption . programName . fst) + (knownPrograms progDb) + where + programOption prog = + option "" [prog ++ "-option"] + ("give an extra option to " ++ prog ++ + " (no need to quote options containing spaces)") + get set + (reqArg' "OPT" (\arg -> [(prog, [arg])]) + (\progArgs -> concat [ args + | (prog', args) <- progArgs, prog==prog' ])) + +{-# DEPRECATED programConfigurationOptions "Use programDbOptions instead" #-} + +-- | For each known program @PROG@ in 'progDb', produce a @PROG-options@ +-- 'OptionField'. +programDbOptions, programConfigurationOptions + :: ProgramDb + -> ShowOrParseArgs + -> (flags -> [(String, [String])]) + -> ([(String, [String])] -> (flags -> flags)) + -> [OptionField flags] + +programConfigurationOptions = programDbOptions + +programDbOptions progDb showOrParseArgs get set = + case showOrParseArgs of + -- we don't want a verbose help text list so we just show a generic one: + ShowArgs -> [programOptions "PROG"] + ParseArgs -> map (programOptions . programName . fst) + (knownPrograms progDb) + where + programOptions prog = + option "" [prog ++ "-options"] + ("give extra options to " ++ prog) + get set + (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) (const [])) + +-- ------------------------------------------------------------ +-- * GetOpt Utils +-- ------------------------------------------------------------ + +boolOpt :: SFlags -> SFlags + -> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a +boolOpt = Command.boolOpt flagToMaybe Flag + +boolOpt' :: OptFlags -> OptFlags + -> MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a +boolOpt' = Command.boolOpt' flagToMaybe Flag + +trueArg, falseArg :: MkOptDescr (a -> Flag Bool) (Flag Bool -> a -> a) a +trueArg sfT lfT = boolOpt' (sfT, lfT) ([], []) sfT lfT +falseArg sfF lfF = boolOpt' ([], []) (sfF, lfF) sfF lfF + +reqArgFlag :: ArgPlaceHolder -> SFlags -> LFlags -> Description -> + (b -> Flag String) -> (Flag String -> b -> b) -> OptDescr b +reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList + +optionDistPref :: (flags -> Flag FilePath) + -> (Flag FilePath -> flags -> flags) + -> ShowOrParseArgs + -> OptionField flags +optionDistPref get set = \showOrParseArgs -> + option "" (distPrefFlagName showOrParseArgs) + ( "The directory where Cabal puts generated build files " + ++ "(default " ++ defaultDistPref ++ ")") + get set + (reqArgFlag "DIR") + where + distPrefFlagName ShowArgs = ["builddir"] + distPrefFlagName ParseArgs = ["builddir", "distdir", "distpref"] + +optionVerbosity :: (flags -> Flag Verbosity) + -> (Flag Verbosity -> flags -> flags) + -> OptionField flags +optionVerbosity get set = + option "v" ["verbose"] + "Control verbosity (n is 0--3, default verbosity level is 1)" + get set + (optArg "n" (fmap Flag flagToVerbosity) + (Flag verbose) -- default Value if no n is given + (fmap (Just . showForCabal) . flagToList)) + +optionNumJobs :: (flags -> Flag (Maybe Int)) + -> (Flag (Maybe Int) -> flags -> flags) + -> OptionField flags +optionNumJobs get set = + option "j" ["jobs"] + "Run NUM jobs simultaneously (or '$ncpus' if no NUM is given)." + get set + (optArg "NUM" (fmap Flag numJobsParser) + (Flag Nothing) + (map (Just . maybe "$ncpus" show) . flagToList)) + where + numJobsParser :: ReadE (Maybe Int) + numJobsParser = ReadE $ \s -> + case s of + "$ncpus" -> Right Nothing + _ -> case reads s of + [(n, "")] + | n < 1 -> Left "The number of jobs should be 1 or more." + | otherwise -> Right (Just n) + _ -> Left "The jobs value should be a number or '$ncpus'" + +-- ------------------------------------------------------------ +-- * Other Utils +-- ------------------------------------------------------------ + +-- | Arguments to pass to a @configure@ script, e.g. generated by +-- @autoconf@. +configureArgs :: Bool -> ConfigFlags -> [String] +configureArgs bcHack flags + = hc_flag + ++ optFlag "with-hc-pkg" configHcPkg + ++ optFlag' "prefix" prefix + ++ optFlag' "bindir" bindir + ++ optFlag' "libdir" libdir + ++ optFlag' "libexecdir" libexecdir + ++ optFlag' "datadir" datadir + ++ optFlag' "sysconfdir" sysconfdir + ++ configConfigureArgs flags + where + hc_flag = case (configHcFlavor flags, configHcPath flags) of + (_, Flag hc_path) -> [hc_flag_name ++ hc_path] + (Flag hc, NoFlag) -> [hc_flag_name ++ display hc] + (NoFlag,NoFlag) -> [] + hc_flag_name + --TODO kill off thic bc hack when defaultUserHooks is removed. + | bcHack = "--with-hc=" + | otherwise = "--with-compiler=" + optFlag name config_field = case config_field flags of + Flag p -> ["--" ++ name ++ "=" ++ p] + NoFlag -> [] + optFlag' name config_field = optFlag name (fmap fromPathTemplate + . config_field + . configInstallDirs) + +configureCCompiler :: Verbosity -> ProgramDb + -> IO (FilePath, [String]) +configureCCompiler verbosity progdb = configureProg verbosity progdb gccProgram + +configureLinker :: Verbosity -> ProgramDb -> IO (FilePath, [String]) +configureLinker verbosity progdb = configureProg verbosity progdb ldProgram + +configureProg :: Verbosity -> ProgramDb -> Program + -> IO (FilePath, [String]) +configureProg verbosity programDb prog = do + (p, _) <- requireProgram verbosity prog programDb + let pInv = programInvocation p [] + return (progInvokePath pInv, progInvokeArgs pInv) + +-- | Helper function to split a string into a list of arguments. +-- It's supposed to handle quoted things sensibly, eg: +-- +-- > splitArgs "--foo=\"C:/Program Files/Bar/" --baz" +-- > = ["--foo=C:/Program Files/Bar", "--baz"] +-- +-- > splitArgs "\"-DMSGSTR=\\\"foo bar\\\"\" --baz" +-- > = ["-DMSGSTR=\"foo bar\"","--baz"] +-- +splitArgs :: String -> [String] +splitArgs = space [] + where + space :: String -> String -> [String] + space w [] = word w [] + space w ( c :s) + | isSpace c = word w (space [] s) + space w ('"':s) = string w s + space w s = nonstring w s + + string :: String -> String -> [String] + string w [] = word w [] + string w ('"':s) = space w s + string w ('\\':'"':s) = string ('"':w) s + string w ( c :s) = string (c:w) s + + nonstring :: String -> String -> [String] + nonstring w [] = word w [] + nonstring w ('"':s) = string w s + nonstring w ( c :s) = space (c:w) s + + word [] s = s + word w s = reverse w : s + +-- The test cases kinda have to be rewritten from the ground up... :/ +--hunitTests :: [Test] +--hunitTests = +-- let m = [("ghc", GHC), ("nhc98", NHC), ("hugs", Hugs)] +-- (flags, commands', unkFlags, ers) +-- = getOpt Permute options ["configure", "foobar", "--prefix=/foo", "--ghc", "--nhc98", "--hugs", "--with-compiler=/comp", "--unknown1", "--unknown2", "--install-prefix=/foo", "--user", "--global"] +-- in [TestLabel "very basic option parsing" $ TestList [ +-- "getOpt flags" ~: "failed" ~: +-- [Prefix "/foo", GhcFlag, NhcFlag, HugsFlag, +-- WithCompiler "/comp", InstPrefix "/foo", UserFlag, GlobalFlag] +-- ~=? flags, +-- "getOpt commands" ~: "failed" ~: ["configure", "foobar"] ~=? commands', +-- "getOpt unknown opts" ~: "failed" ~: +-- ["--unknown1", "--unknown2"] ~=? unkFlags, +-- "getOpt errors" ~: "failed" ~: [] ~=? ers], +-- +-- TestLabel "test location of various compilers" $ TestList +-- ["configure parsing for prefix and compiler flag" ~: "failed" ~: +-- (Right (ConfigCmd (Just comp, Nothing, Just "/usr/local"), [])) +-- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, "configure"]) +-- | (name, comp) <- m], +-- +-- TestLabel "find the package tool" $ TestList +-- ["configure parsing for prefix comp flag, withcompiler" ~: "failed" ~: +-- (Right (ConfigCmd (Just comp, Just "/foo/comp", Just "/usr/local"), [])) +-- ~=? (parseArgs ["--prefix=/usr/local", "--"++name, +-- "--with-compiler=/foo/comp", "configure"]) +-- | (name, comp) <- m], +-- +-- TestLabel "simpler commands" $ TestList +-- [flag ~: "failed" ~: (Right (flagCmd, [])) ~=? (parseArgs [flag]) +-- | (flag, flagCmd) <- [("build", BuildCmd), +-- ("install", InstallCmd Nothing False), +-- ("sdist", SDistCmd), +-- ("register", RegisterCmd False)] +-- ] +-- ] + +{- Testing ideas: + * IO to look for hugs and hugs-pkg (which hugs, etc) + * quickCheck to test permutations of arguments + * what other options can we over-ride with a command-line flag? +-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/SrcDist.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/SrcDist.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/SrcDist.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/SrcDist.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,510 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.SrcDist +-- Copyright : Simon Marlow 2004 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This handles the @sdist@ command. The module exports an 'sdist' action but +-- also some of the phases that make it up so that other tools can use just the +-- bits they need. In particular the preparation of the tree of files to go +-- into the source tarball is separated from actually building the source +-- tarball. +-- +-- The 'createArchive' action uses the external @tar@ program and assumes that +-- it accepts the @-z@ flag. Neither of these assumptions are valid on Windows. +-- The 'sdist' action now also does some distribution QA checks. + +-- NOTE: FIX: we don't have a great way of testing this module, since +-- we can't easily look inside a tarball once its created. + +module Distribution.Simple.SrcDist ( + -- * The top level action + sdist, + + -- ** Parts of 'sdist' + printPackageProblems, + prepareTree, + createArchive, + + -- ** Snapshots + prepareSnapshotTree, + snapshotPackage, + snapshotVersion, + dateToSnapshotNumber, + + -- * Extracting the source files + listPackageSources + + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.PackageDescription hiding (Flag) +import Distribution.PackageDescription.Check hiding (doesFileExist) +import Distribution.Package +import Distribution.ModuleName +import qualified Distribution.ModuleName as ModuleName +import Distribution.Version +import Distribution.Simple.Glob +import Distribution.Simple.Utils +import Distribution.Simple.Setup +import Distribution.Simple.PreProcess +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.BuildPaths +import Distribution.Simple.Program +import Distribution.Text +import Distribution.Types.ForeignLib +import Distribution.Verbosity + +import Data.List (partition) +import qualified Data.Map as Map +import Data.Time (UTCTime, getCurrentTime, toGregorian, utctDay) +import System.Directory ( doesFileExist ) +import System.IO (IOMode(WriteMode), hPutStrLn, withFile) +import System.FilePath ((), (<.>), dropExtension, isRelative) +import Control.Monad + +-- |Create a source distribution. +sdist :: PackageDescription -- ^information from the tarball + -> Maybe LocalBuildInfo -- ^Information from configure + -> SDistFlags -- ^verbosity & snapshot + -> (FilePath -> FilePath) -- ^build prefix (temp dir) + -> [PPSuffixHandler] -- ^ extra preprocessors (includes suffixes) + -> IO () +sdist pkg mb_lbi flags mkTmpDir pps = + + -- When given --list-sources, just output the list of sources to a file. + case (sDistListSources flags) of + Flag path -> withFile path WriteMode $ \outHandle -> do + (ordinary, maybeExecutable) <- listPackageSources verbosity pkg pps + traverse_ (hPutStrLn outHandle) ordinary + traverse_ (hPutStrLn outHandle) maybeExecutable + notice verbosity $ "List of package sources written to file '" + ++ path ++ "'" + NoFlag -> do + -- do some QA + printPackageProblems verbosity pkg + + when (isNothing mb_lbi) $ + warn verbosity "Cannot run preprocessors. Run 'configure' command first." + + date <- getCurrentTime + let pkg' | snapshot = snapshotPackage date pkg + | otherwise = pkg + + case flagToMaybe (sDistDirectory flags) of + Just targetDir -> do + generateSourceDir targetDir pkg' + info verbosity $ "Source directory created: " ++ targetDir + + Nothing -> do + createDirectoryIfMissingVerbose verbosity True tmpTargetDir + withTempDirectory verbosity tmpTargetDir "sdist." $ \tmpDir -> do + let targetDir = tmpDir tarBallName pkg' + generateSourceDir targetDir pkg' + targzFile <- createArchive verbosity pkg' mb_lbi tmpDir targetPref + notice verbosity $ "Source tarball created: " ++ targzFile + + where + generateSourceDir targetDir pkg' = do + + setupMessage verbosity "Building source dist for" (packageId pkg') + prepareTree verbosity pkg' mb_lbi targetDir pps + when snapshot $ + overwriteSnapshotPackageDesc verbosity pkg' targetDir + + verbosity = fromFlag (sDistVerbosity flags) + snapshot = fromFlag (sDistSnapshot flags) + + distPref = fromFlag $ sDistDistPref flags + targetPref = distPref + tmpTargetDir = mkTmpDir distPref + +-- | List all source files of a package. Returns a tuple of lists: first +-- component is a list of ordinary files, second one is a list of those files +-- that may be executable. +listPackageSources :: Verbosity -- ^ verbosity + -> PackageDescription -- ^ info from the cabal file + -> [PPSuffixHandler] -- ^ extra preprocessors (include + -- suffixes) + -> IO ([FilePath], [FilePath]) +listPackageSources verbosity pkg_descr0 pps = do + -- Call helpers that actually do all work. + ordinary <- listPackageSourcesOrdinary verbosity pkg_descr pps + maybeExecutable <- listPackageSourcesMaybeExecutable verbosity pkg_descr + return (ordinary, maybeExecutable) + where + pkg_descr = filterAutogenModules pkg_descr0 + +-- | List those source files that may be executable (e.g. the configure script). +listPackageSourcesMaybeExecutable :: Verbosity -> PackageDescription -> IO [FilePath] +listPackageSourcesMaybeExecutable verbosity pkg_descr = + -- Extra source files. + fmap concat . for (extraSrcFiles pkg_descr) $ \fpath -> + matchDirFileGlob verbosity (specVersion pkg_descr) "." fpath + +-- | List those source files that should be copied with ordinary permissions. +listPackageSourcesOrdinary :: Verbosity + -> PackageDescription + -> [PPSuffixHandler] + -> IO [FilePath] +listPackageSourcesOrdinary verbosity pkg_descr pps = + fmap concat . sequenceA $ + [ + -- Library sources. + fmap concat + . withAllLib $ \Library { + exposedModules = modules, + signatures = sigs, + libBuildInfo = libBi + } -> + allSourcesBuildInfo verbosity libBi pps (modules ++ sigs) + + -- Executables sources. + , fmap concat + . withAllExe $ \Executable { modulePath = mainPath, buildInfo = exeBi } -> do + biSrcs <- allSourcesBuildInfo verbosity exeBi pps [] + mainSrc <- findMainExeFile exeBi pps mainPath + return (mainSrc:biSrcs) + + -- Foreign library sources + , fmap concat + . withAllFLib $ \flib@(ForeignLib { foreignLibBuildInfo = flibBi }) -> do + biSrcs <- allSourcesBuildInfo verbosity flibBi pps [] + defFiles <- mapM (findModDefFile flibBi pps) (foreignLibModDefFile flib) + return (defFiles ++ biSrcs) + + -- Test suites sources. + , fmap concat + . withAllTest $ \t -> do + let bi = testBuildInfo t + case testInterface t of + TestSuiteExeV10 _ mainPath -> do + biSrcs <- allSourcesBuildInfo verbosity bi pps [] + srcMainFile <- findMainExeFile bi pps mainPath + return (srcMainFile:biSrcs) + TestSuiteLibV09 _ m -> + allSourcesBuildInfo verbosity bi pps [m] + TestSuiteUnsupported tp -> die' verbosity $ "Unsupported test suite type: " + ++ show tp + + -- Benchmarks sources. + , fmap concat + . withAllBenchmark $ \bm -> do + let bi = benchmarkBuildInfo bm + case benchmarkInterface bm of + BenchmarkExeV10 _ mainPath -> do + biSrcs <- allSourcesBuildInfo verbosity bi pps [] + srcMainFile <- findMainExeFile bi pps mainPath + return (srcMainFile:biSrcs) + BenchmarkUnsupported tp -> die' verbosity $ "Unsupported benchmark type: " + ++ show tp + + -- Data files. + , fmap concat + . for (dataFiles pkg_descr) $ \filename -> + let srcDataDirRaw = dataDir pkg_descr + srcDataDir = if null srcDataDirRaw + then "." + else srcDataDirRaw + in fmap (fmap (srcDataDir )) $ + matchDirFileGlob verbosity (specVersion pkg_descr) srcDataDir filename + + -- Extra doc files. + , fmap concat + . for (extraDocFiles pkg_descr) $ \ filename -> + matchDirFileGlob verbosity (specVersion pkg_descr) "." filename + + -- License file(s). + , return (licenseFiles pkg_descr) + + -- Install-include files. + , fmap concat + . withAllLib $ \ l -> do + let lbi = libBuildInfo l + relincdirs = "." : filter isRelative (includeDirs lbi) + traverse (fmap snd . findIncludeFile verbosity relincdirs) (installIncludes lbi) + + -- Setup script, if it exists. + , fmap (maybe [] (\f -> [f])) $ findSetupFile "" + + -- The .cabal file itself. + , fmap (\d -> [d]) (defaultPackageDesc verbosity) + + ] + where + -- We have to deal with all libs and executables, so we have local + -- versions of these functions that ignore the 'buildable' attribute: + withAllLib action = traverse action (allLibraries pkg_descr) + withAllFLib action = traverse action (foreignLibs pkg_descr) + withAllExe action = traverse action (executables pkg_descr) + withAllTest action = traverse action (testSuites pkg_descr) + withAllBenchmark action = traverse action (benchmarks pkg_descr) + + +-- |Prepare a directory tree of source files. +prepareTree :: Verbosity -- ^verbosity + -> PackageDescription -- ^info from the cabal file + -> Maybe LocalBuildInfo + -> FilePath -- ^source tree to populate + -> [PPSuffixHandler] -- ^extra preprocessors (includes suffixes) + -> IO () +prepareTree verbosity pkg_descr0 mb_lbi targetDir pps = do + -- If the package was configured then we can run platform-independent + -- pre-processors and include those generated files. + case mb_lbi of + Just lbi | not (null pps) -> do + let lbi' = lbi{ buildDir = targetDir buildDir lbi } + withAllComponentsInBuildOrder pkg_descr lbi' $ \c clbi -> + preprocessComponent pkg_descr c lbi' clbi True verbosity pps + _ -> return () + + (ordinary, mExecutable) <- listPackageSources verbosity pkg_descr0 pps + installOrdinaryFiles verbosity targetDir (zip (repeat []) ordinary) + installMaybeExecutableFiles verbosity targetDir (zip (repeat []) mExecutable) + maybeCreateDefaultSetupScript targetDir + + where + pkg_descr = filterAutogenModules pkg_descr0 + +-- | Find the setup script file, if it exists. +findSetupFile :: FilePath -> NoCallStackIO (Maybe FilePath) +findSetupFile targetDir = do + hsExists <- doesFileExist setupHs + lhsExists <- doesFileExist setupLhs + if hsExists + then return (Just setupHs) + else if lhsExists + then return (Just setupLhs) + else return Nothing + where + setupHs = targetDir "Setup.hs" + setupLhs = targetDir "Setup.lhs" + +-- | Create a default setup script in the target directory, if it doesn't exist. +maybeCreateDefaultSetupScript :: FilePath -> NoCallStackIO () +maybeCreateDefaultSetupScript targetDir = do + mSetupFile <- findSetupFile targetDir + case mSetupFile of + Just _setupFile -> return () + Nothing -> do + writeUTF8File (targetDir "Setup.hs") $ unlines [ + "import Distribution.Simple", + "main = defaultMain"] + +-- | Find the main executable file. +findMainExeFile :: BuildInfo -> [PPSuffixHandler] -> FilePath -> IO FilePath +findMainExeFile exeBi pps mainPath = do + ppFile <- findFileWithExtension (ppSuffixes pps) (hsSourceDirs exeBi) + (dropExtension mainPath) + case ppFile of + Nothing -> findFile (hsSourceDirs exeBi) mainPath + Just pp -> return pp + +-- | Find a module definition file +-- +-- TODO: I don't know if this is right +findModDefFile :: BuildInfo -> [PPSuffixHandler] -> FilePath -> IO FilePath +findModDefFile flibBi _pps modDefPath = + findFile (".":hsSourceDirs flibBi) modDefPath + +-- | Given a list of include paths, try to find the include file named +-- @f@. Return the name of the file and the full path, or exit with error if +-- there's no such file. +findIncludeFile :: Verbosity -> [FilePath] -> String -> IO (String, FilePath) +findIncludeFile verbosity [] f = die' verbosity ("can't find include file " ++ f) +findIncludeFile verbosity (d:ds) f = do + let path = (d f) + b <- doesFileExist path + if b then return (f,path) else findIncludeFile verbosity ds f + +-- | Remove the auto-generated modules (like 'Paths_*') from 'exposed-modules' +-- and 'other-modules'. +filterAutogenModules :: PackageDescription -> PackageDescription +filterAutogenModules pkg_descr0 = mapLib filterAutogenModuleLib $ + mapAllBuildInfo filterAutogenModuleBI pkg_descr0 + where + mapLib f pkg = pkg { library = fmap f (library pkg) + , subLibraries = map f (subLibraries pkg) } + filterAutogenModuleLib lib = lib { + exposedModules = filter (filterFunction (libBuildInfo lib)) (exposedModules lib) + } + filterAutogenModuleBI bi = bi { + otherModules = filter (filterFunction bi) (otherModules bi) + } + pathsModule = autogenPathsModuleName pkg_descr0 + filterFunction bi = \mn -> + mn /= pathsModule + && not (mn `elem` autogenModules bi) + +-- | Prepare a directory tree of source files for a snapshot version. +-- It is expected that the appropriate snapshot version has already been set +-- in the package description, eg using 'snapshotPackage' or 'snapshotVersion'. +-- +prepareSnapshotTree :: Verbosity -- ^verbosity + -> PackageDescription -- ^info from the cabal file + -> Maybe LocalBuildInfo + -> FilePath -- ^source tree to populate + -> [PPSuffixHandler] -- ^extra preprocessors (includes + -- suffixes) + -> IO () +prepareSnapshotTree verbosity pkg mb_lbi targetDir pps = do + prepareTree verbosity pkg mb_lbi targetDir pps + overwriteSnapshotPackageDesc verbosity pkg targetDir + +overwriteSnapshotPackageDesc :: Verbosity -- ^verbosity + -> PackageDescription -- ^info from the cabal file + -> FilePath -- ^source tree + -> IO () +overwriteSnapshotPackageDesc verbosity pkg targetDir = do + -- We could just writePackageDescription targetDescFile pkg_descr, + -- but that would lose comments and formatting. + descFile <- defaultPackageDesc verbosity + withUTF8FileContents descFile $ + writeUTF8File (targetDir descFile) + . unlines . map (replaceVersion (packageVersion pkg)) . lines + + where + replaceVersion :: Version -> String -> String + replaceVersion version line + | "version:" `isPrefixOf` map toLower line + = "version: " ++ display version + | otherwise = line + +-- | Modifies a 'PackageDescription' by appending a snapshot number +-- corresponding to the given date. +-- +snapshotPackage :: UTCTime -> PackageDescription -> PackageDescription +snapshotPackage date pkg = + pkg { + package = pkgid { pkgVersion = snapshotVersion date (pkgVersion pkgid) } + } + where pkgid = packageId pkg + +-- | Modifies a 'Version' by appending a snapshot number corresponding +-- to the given date. +-- +snapshotVersion :: UTCTime -> Version -> Version +snapshotVersion date = alterVersion (++ [dateToSnapshotNumber date]) + +-- | Given a date produce a corresponding integer representation. +-- For example given a date @18/03/2008@ produce the number @20080318@. +-- +dateToSnapshotNumber :: UTCTime -> Int +dateToSnapshotNumber date = case toGregorian (utctDay date) of + (year, month, day) -> + fromIntegral year * 10000 + + month * 100 + + day + +-- | Callback type for use by sdistWith. +type CreateArchiveFun = Verbosity -- ^verbosity + -> PackageDescription -- ^info from cabal file + -> Maybe LocalBuildInfo -- ^info from configure + -> FilePath -- ^source tree to archive + -> FilePath -- ^name of archive to create + -> IO FilePath + +-- | Create an archive from a tree of source files, and clean up the tree. +createArchive :: CreateArchiveFun +createArchive verbosity pkg_descr mb_lbi tmpDir targetPref = do + let tarBallFilePath = targetPref tarBallName pkg_descr <.> "tar.gz" + + (tarProg, _) <- requireProgram verbosity tarProgram + (maybe defaultProgramDb withPrograms mb_lbi) + let formatOptSupported = maybe False (== "YES") $ + Map.lookup "Supports --format" + (programProperties tarProg) + runProgram verbosity tarProg $ + -- Hmm: I could well be skating on thinner ice here by using the -C option + -- (=> seems to be supported at least by GNU and *BSD tar) [The + -- prev. solution used pipes and sub-command sequences to set up the paths + -- correctly, which is problematic in a Windows setting.] + ["-czf", tarBallFilePath, "-C", tmpDir] + ++ (if formatOptSupported then ["--format", "ustar"] else []) + ++ [tarBallName pkg_descr] + return tarBallFilePath + +-- | Given a buildinfo, return the names of all source files. +allSourcesBuildInfo :: Verbosity + -> BuildInfo + -> [PPSuffixHandler] -- ^ Extra preprocessors + -> [ModuleName] -- ^ Exposed modules + -> IO [FilePath] +allSourcesBuildInfo verbosity bi pps modules = do + let searchDirs = hsSourceDirs bi + sources <- fmap concat $ sequenceA $ + [ let file = ModuleName.toFilePath module_ + -- NB: *Not* findFileWithExtension, because the same source + -- file may show up in multiple paths due to a conditional; + -- we need to package all of them. See #367. + in findAllFilesWithExtension suffixes searchDirs file + >>= nonEmpty (notFound module_) return + | module_ <- modules ++ otherModules bi ] + bootFiles <- sequenceA + [ let file = ModuleName.toFilePath module_ + fileExts = ["hs-boot", "lhs-boot"] + in findFileWithExtension fileExts (hsSourceDirs bi) file + | module_ <- modules ++ otherModules bi ] + + return $ sources ++ catMaybes bootFiles ++ cSources bi ++ cxxSources bi ++ jsSources bi + + where + nonEmpty x _ [] = x + nonEmpty _ f xs = f xs + suffixes = ppSuffixes pps ++ ["hs", "lhs", "hsig", "lhsig"] + notFound m = die' verbosity $ "Error: Could not find module: " ++ display m + ++ " with any suffix: " ++ show suffixes ++ ". If the module " + ++ "is autogenerated it should be added to 'autogen-modules'." + + +-- | Note: must be called with the CWD set to the directory containing +-- the '.cabal' file. +printPackageProblems :: Verbosity -> PackageDescription -> IO () +printPackageProblems verbosity pkg_descr = do + ioChecks <- checkPackageFiles verbosity pkg_descr "." + let pureChecks = checkConfiguredPackage pkg_descr + isDistError (PackageDistSuspicious _) = False + isDistError (PackageDistSuspiciousWarn _) = False + isDistError _ = True + (errors, warnings) = partition isDistError (pureChecks ++ ioChecks) + unless (null errors) $ + notice verbosity $ "Distribution quality errors:\n" + ++ unlines (map explanation errors) + unless (null warnings) $ + notice verbosity $ "Distribution quality warnings:\n" + ++ unlines (map explanation warnings) + unless (null errors) $ + notice verbosity + "Note: the public hackage server would reject this package." + +------------------------------------------------------------ + +-- | The name of the tarball without extension +-- +tarBallName :: PackageDescription -> String +tarBallName = display . packageId + +mapAllBuildInfo :: (BuildInfo -> BuildInfo) + -> (PackageDescription -> PackageDescription) +mapAllBuildInfo f pkg = pkg { + library = fmap mapLibBi (library pkg), + subLibraries = fmap mapLibBi (subLibraries pkg), + foreignLibs = fmap mapFLibBi (foreignLibs pkg), + executables = fmap mapExeBi (executables pkg), + testSuites = fmap mapTestBi (testSuites pkg), + benchmarks = fmap mapBenchBi (benchmarks pkg) + } + where + mapLibBi lib = lib { libBuildInfo = f (libBuildInfo lib) } + mapFLibBi flib = flib { foreignLibBuildInfo = f (foreignLibBuildInfo flib) } + mapExeBi exe = exe { buildInfo = f (buildInfo exe) } + mapTestBi tst = tst { testBuildInfo = f (testBuildInfo tst) } + mapBenchBi bm = bm { benchmarkBuildInfo = f (benchmarkBuildInfo bm) } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Test/ExeV10.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Test/ExeV10.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Test/ExeV10.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Test/ExeV10.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,175 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +module Distribution.Simple.Test.ExeV10 + ( runTest + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.UnqualComponentName +import Distribution.Compat.CreatePipe +import Distribution.Compat.Environment +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Build.PathsModule +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler +import Distribution.Simple.Hpc +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import qualified Distribution.Types.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.Test.Log +import Distribution.Simple.Utils +import Distribution.System +import Distribution.TestSuite +import Distribution.Text +import Distribution.Verbosity + +import Control.Concurrent (forkIO) +import System.Directory + ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist + , getCurrentDirectory, removeDirectoryRecursive ) +import System.Exit ( ExitCode(..) ) +import System.FilePath ( (), (<.>) ) +import System.IO ( hGetContents, stdout, stderr ) + +runTest :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> LBI.ComponentLocalBuildInfo + -> TestFlags + -> PD.TestSuite + -> IO TestSuiteLog +runTest pkg_descr lbi clbi flags suite = do + let isCoverageEnabled = LBI.testCoverage lbi + way = guessWay lbi + tixDir_ = tixDir distPref way testName' + + pwd <- getCurrentDirectory + existingEnv <- getEnvironment + + let cmd = LBI.buildDir lbi testName' + testName' <.> exeExtension (LBI.hostPlatform lbi) + -- Check that the test executable exists. + exists <- doesFileExist cmd + unless exists $ die' verbosity $ "Error: Could not find test program \"" ++ cmd + ++ "\". Did you build the package first?" + + -- Remove old .tix files if appropriate. + unless (fromFlag $ testKeepTix flags) $ do + exists' <- doesDirectoryExist tixDir_ + when exists' $ removeDirectoryRecursive tixDir_ + + -- Create directory for HPC files. + createDirectoryIfMissing True tixDir_ + + -- Write summary notices indicating start of test suite + notice verbosity $ summarizeSuiteStart $ testName' + + (wOut, wErr, logText) <- case details of + Direct -> return (stdout, stderr, "") + _ -> do + (rOut, wOut) <- createPipe + + -- Read test executable's output lazily (returns immediately) + logText <- hGetContents rOut + -- Force the IO manager to drain the test output pipe + void $ forkIO $ length logText `seq` return () + + -- '--show-details=streaming': print the log output in another thread + when (details == Streaming) $ void $ forkIO $ putStr logText + + return (wOut, wOut, logText) + + -- Run the test executable + let opts = map (testOption pkg_descr lbi suite) + (testOptions flags) + dataDirPath = pwd PD.dataDir pkg_descr + tixFile = pwd tixFilePath distPref way (testName') + pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath) + : existingEnv + shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] ++ pkgPathEnv + + -- Add (DY)LD_LIBRARY_PATH if needed + shellEnv' <- if LBI.withDynExe lbi + then do let (Platform _ os) = LBI.hostPlatform lbi + paths <- LBI.depLibraryPaths True False lbi clbi + return (addLibraryPath os paths shellEnv) + else return shellEnv + + exit <- rawSystemIOWithEnv verbosity cmd opts Nothing (Just shellEnv') + -- these handles are automatically closed + Nothing (Just wOut) (Just wErr) + + -- Generate TestSuiteLog from executable exit code and a machine- + -- readable test log. + let suiteLog = buildLog exit + + -- Write summary notice to log file indicating start of test suite + appendFile (logFile suiteLog) $ summarizeSuiteStart $ testName' + + -- Append contents of temporary log file to the final human- + -- readable log file + appendFile (logFile suiteLog) logText + + -- Write end-of-suite summary notice to log file + appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog + + -- Show the contents of the human-readable log file on the terminal + -- if there is a failure and/or detailed output is requested + let whenPrinting = when $ + ( details == Always || + details == Failures && not (suitePassed $ testLogs suiteLog)) + -- verbosity overrides show-details + && verbosity >= normal + whenPrinting $ putStr $ unlines $ lines logText + + -- Write summary notice to terminal indicating end of test suite + notice verbosity $ summarizeSuiteFinish suiteLog + + when isCoverageEnabled $ + markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite + + return suiteLog + where + testName' = unUnqualComponentName $ PD.testName suite + + distPref = fromFlag $ testDistPref flags + verbosity = fromFlag $ testVerbosity flags + details = fromFlag $ testShowDetails flags + testLogDir = distPref "test" + + buildLog exit = + let r = case exit of + ExitSuccess -> Pass + ExitFailure c -> Fail $ "exit code: " ++ show c + --n = unUnqualComponentName $ PD.testName suite + l = TestLog + { testName = testName' + , testOptionsReturned = [] + , testResult = r + } + in TestSuiteLog + { testSuiteName = PD.testName suite + , testLogs = l + , logFile = + testLogDir + testSuiteLogPath (fromFlag $ testHumanLog flags) + pkg_descr lbi testName' l + } + +-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't +-- necessarily a path. +testOption :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> PD.TestSuite + -> PathTemplate + -> String +testOption pkg_descr lbi suite template = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ + [(TestSuiteNameVar, toPathTemplate $ unUnqualComponentName $ PD.testName suite)] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Test/LibV09.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Test/LibV09.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Test/LibV09.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Test/LibV09.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,275 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +module Distribution.Simple.Test.LibV09 + ( runTest + -- Test stub + , simpleTestStub + , stubFilePath, stubMain, stubName, stubWriteLog + , writeSimpleTestStub + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Types.UnqualComponentName + +import Distribution.Compat.CreatePipe +import Distribution.Compat.Environment +import Distribution.Compat.Internal.TempFile +import Distribution.ModuleName +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Build.PathsModule +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler +import Distribution.Simple.Hpc +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import qualified Distribution.Types.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.Test.Log +import Distribution.Simple.Utils +import Distribution.System +import Distribution.TestSuite +import Distribution.Text +import Distribution.Verbosity + +import qualified Control.Exception as CE +import System.Directory + ( createDirectoryIfMissing, canonicalizePath + , doesDirectoryExist, doesFileExist + , getCurrentDirectory, removeDirectoryRecursive, removeFile + , setCurrentDirectory ) +import System.Exit ( exitSuccess, exitWith, ExitCode(..) ) +import System.FilePath ( (), (<.>) ) +import System.IO ( hClose, hGetContents, hPutStr ) +import System.Process (StdStream(..), waitForProcess) + +runTest :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> LBI.ComponentLocalBuildInfo + -> TestFlags + -> PD.TestSuite + -> IO TestSuiteLog +runTest pkg_descr lbi clbi flags suite = do + let isCoverageEnabled = LBI.testCoverage lbi + way = guessWay lbi + + pwd <- getCurrentDirectory + existingEnv <- getEnvironment + + let cmd = LBI.buildDir lbi stubName suite + stubName suite <.> exeExtension (LBI.hostPlatform lbi) + -- Check that the test executable exists. + exists <- doesFileExist cmd + unless exists $ + die' verbosity $ "Error: Could not find test program \"" ++ cmd + ++ "\". Did you build the package first?" + + -- Remove old .tix files if appropriate. + unless (fromFlag $ testKeepTix flags) $ do + let tDir = tixDir distPref way testName' + exists' <- doesDirectoryExist tDir + when exists' $ removeDirectoryRecursive tDir + + -- Create directory for HPC files. + createDirectoryIfMissing True $ tixDir distPref way testName' + + -- Write summary notices indicating start of test suite + notice verbosity $ summarizeSuiteStart testName' + + suiteLog <- CE.bracket openCabalTemp deleteIfExists $ \tempLog -> do + + (rOut, wOut) <- createPipe + + -- Run test executable + (Just wIn, _, _, process) <- do + let opts = map (testOption pkg_descr lbi suite) $ testOptions flags + dataDirPath = pwd PD.dataDir pkg_descr + tixFile = pwd tixFilePath distPref way testName' + pkgPathEnv = (pkgPathEnvVar pkg_descr "datadir", dataDirPath) + : existingEnv + shellEnv = [("HPCTIXFILE", tixFile) | isCoverageEnabled] + ++ pkgPathEnv + -- Add (DY)LD_LIBRARY_PATH if needed + shellEnv' <- + if LBI.withDynExe lbi + then do + let (Platform _ os) = LBI.hostPlatform lbi + paths <- LBI.depLibraryPaths True False lbi clbi + cpath <- canonicalizePath $ LBI.componentBuildDir lbi clbi + return (addLibraryPath os (cpath : paths) shellEnv) + else return shellEnv + createProcessWithEnv verbosity cmd opts Nothing (Just shellEnv') + -- these handles are closed automatically + CreatePipe (UseHandle wOut) (UseHandle wOut) + + hPutStr wIn $ show (tempLog, PD.testName suite) + hClose wIn + + -- Append contents of temporary log file to the final human- + -- readable log file + logText <- hGetContents rOut + -- Force the IO manager to drain the test output pipe + length logText `seq` return () + + exitcode <- waitForProcess process + unless (exitcode == ExitSuccess) $ do + debug verbosity $ cmd ++ " returned " ++ show exitcode + + -- Generate final log file name + let finalLogName l = testLogDir + testSuiteLogPath + (fromFlag $ testHumanLog flags) pkg_descr lbi + (unUnqualComponentName $ testSuiteName l) (testLogs l) + -- Generate TestSuiteLog from executable exit code and a machine- + -- readable test log + suiteLog <- fmap ((\l -> l { logFile = finalLogName l }) . read) -- TODO: eradicateNoParse + $ readFile tempLog + + -- Write summary notice to log file indicating start of test suite + appendFile (logFile suiteLog) $ summarizeSuiteStart testName' + + appendFile (logFile suiteLog) logText + + -- Write end-of-suite summary notice to log file + appendFile (logFile suiteLog) $ summarizeSuiteFinish suiteLog + + -- Show the contents of the human-readable log file on the terminal + -- if there is a failure and/or detailed output is requested + let details = fromFlag $ testShowDetails flags + whenPrinting = when $ (details > Never) + && (not (suitePassed $ testLogs suiteLog) || details == Always) + && verbosity >= normal + whenPrinting $ putStr $ unlines $ lines logText + + return suiteLog + + -- Write summary notice to terminal indicating end of test suite + notice verbosity $ summarizeSuiteFinish suiteLog + + when isCoverageEnabled $ + markupTest verbosity lbi distPref (display $ PD.package pkg_descr) suite + + return suiteLog + where + testName' = unUnqualComponentName $ PD.testName suite + + deleteIfExists file = do + exists <- doesFileExist file + when exists $ removeFile file + + testLogDir = distPref "test" + openCabalTemp = do + (f, h) <- openTempFile testLogDir $ "cabal-test-" <.> "log" + hClose h >> return f + + distPref = fromFlag $ testDistPref flags + verbosity = fromFlag $ testVerbosity flags + +-- TODO: This is abusing the notion of a 'PathTemplate'. The result isn't +-- necessarily a path. +testOption :: PD.PackageDescription + -> LBI.LocalBuildInfo + -> PD.TestSuite + -> PathTemplate + -> String +testOption pkg_descr lbi suite template = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) ++ + [(TestSuiteNameVar, toPathTemplate $ unUnqualComponentName $ PD.testName suite)] + +-- Test stub ---------- + +-- | The name of the stub executable associated with a library 'TestSuite'. +stubName :: PD.TestSuite -> FilePath +stubName t = unUnqualComponentName (PD.testName t) ++ "Stub" + +-- | The filename of the source file for the stub executable associated with a +-- library 'TestSuite'. +stubFilePath :: PD.TestSuite -> FilePath +stubFilePath t = stubName t <.> "hs" + +-- | Write the source file for a library 'TestSuite' stub executable. +writeSimpleTestStub :: PD.TestSuite -- ^ library 'TestSuite' for which a stub + -- is being created + -> FilePath -- ^ path to directory where stub source + -- should be located + -> NoCallStackIO () +writeSimpleTestStub t dir = do + createDirectoryIfMissing True dir + let filename = dir stubFilePath t + PD.TestSuiteLibV09 _ m = PD.testInterface t + writeFile filename $ simpleTestStub m + +-- | Source code for library test suite stub executable +simpleTestStub :: ModuleName -> String +simpleTestStub m = unlines + [ "module Main ( main ) where" + , "import Distribution.Simple.Test.LibV09 ( stubMain )" + , "import " ++ show (disp m) ++ " ( tests )" + , "main :: IO ()" + , "main = stubMain tests" + ] + +-- | Main function for test stubs. Once, it was written directly into the stub, +-- but minimizing the amount of code actually in the stub maximizes the number +-- of detectable errors when Cabal is compiled. +stubMain :: IO [Test] -> IO () +stubMain tests = do + (f, n) <- fmap read getContents -- TODO: eradicateNoParse + dir <- getCurrentDirectory + results <- (tests >>= stubRunTests) `CE.catch` errHandler + setCurrentDirectory dir + stubWriteLog f n results + where + errHandler :: CE.SomeException -> NoCallStackIO TestLogs + errHandler e = case CE.fromException e of + Just CE.UserInterrupt -> CE.throwIO e + _ -> return $ TestLog { testName = "Cabal test suite exception", + testOptionsReturned = [], + testResult = Error $ show e } + +-- | The test runner used in library "TestSuite" stub executables. Runs a list +-- of 'Test's. An executable calling this function is meant to be invoked as +-- the child of a Cabal process during @.\/setup test@. A 'TestSuiteLog', +-- provided by Cabal, is read from the standard input; it supplies the name of +-- the test suite and the location of the machine-readable test suite log file. +-- Human-readable log information is written to the standard output for capture +-- by the calling Cabal process. +stubRunTests :: [Test] -> IO TestLogs +stubRunTests tests = do + logs <- traverse stubRunTests' tests + return $ GroupLogs "Default" logs + where + stubRunTests' (Test t) = do + l <- run t >>= finish + summarizeTest normal Always l + return l + where + finish (Finished result) = + return TestLog + { testName = name t + , testOptionsReturned = defaultOptions t + , testResult = result + } + finish (Progress _ next) = next >>= finish + stubRunTests' g@(Group {}) = do + logs <- traverse stubRunTests' $ groupTests g + return $ GroupLogs (groupName g) logs + stubRunTests' (ExtraOptions _ t) = stubRunTests' t + maybeDefaultOption opt = + maybe Nothing (\d -> Just (optionName opt, d)) $ optionDefault opt + defaultOptions testInst = mapMaybe maybeDefaultOption $ options testInst + +-- | From a test stub, write the 'TestSuiteLog' to temporary file for the calling +-- Cabal process to read. +stubWriteLog :: FilePath -> UnqualComponentName -> TestLogs -> NoCallStackIO () +stubWriteLog f n logs = do + let testLog = TestSuiteLog { testSuiteName = n, testLogs = logs, logFile = f } + writeFile (logFile testLog) $ show testLog + when (suiteError logs) $ exitWith $ ExitFailure 2 + when (suiteFailed logs) $ exitWith $ ExitFailure 1 + exitSuccess diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Test/Log.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Test/Log.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Test/Log.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Test/Log.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,164 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +module Distribution.Simple.Test.Log + ( PackageLog(..) + , TestLogs(..) + , TestSuiteLog(..) + , countTestResults + , localPackageLog + , summarizePackage + , summarizeSuiteFinish, summarizeSuiteStart + , summarizeTest + , suiteError, suiteFailed, suitePassed + , testSuiteLogPath + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Package +import Distribution.Types.UnqualComponentName +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Compiler +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.Utils +import Distribution.System +import Distribution.TestSuite +import Distribution.Verbosity +import Distribution.Text + +-- | Logs all test results for a package, broken down first by test suite and +-- then by test case. +data PackageLog = PackageLog + { package :: PackageId + , compiler :: CompilerId + , platform :: Platform + , testSuites :: [TestSuiteLog] + } + deriving (Read, Show, Eq) + +-- | A 'PackageLog' with package and platform information specified. +localPackageLog :: PD.PackageDescription -> LBI.LocalBuildInfo -> PackageLog +localPackageLog pkg_descr lbi = PackageLog + { package = PD.package pkg_descr + , compiler = compilerId $ LBI.compiler lbi + , platform = LBI.hostPlatform lbi + , testSuites = [] + } + +-- | Logs test suite results, itemized by test case. +data TestSuiteLog = TestSuiteLog + { testSuiteName :: UnqualComponentName + , testLogs :: TestLogs + , logFile :: FilePath -- path to human-readable log file + } + deriving (Read, Show, Eq) + +data TestLogs + = TestLog + { testName :: String + , testOptionsReturned :: Options + , testResult :: Result + } + | GroupLogs String [TestLogs] + deriving (Read, Show, Eq) + +-- | Count the number of pass, fail, and error test results in a 'TestLogs' +-- tree. +countTestResults :: TestLogs + -> (Int, Int, Int) -- ^ Passes, fails, and errors, + -- respectively. +countTestResults = go (0, 0, 0) + where + go (p, f, e) (TestLog { testResult = r }) = + case r of + Pass -> (p + 1, f, e) + Fail _ -> (p, f + 1, e) + Error _ -> (p, f, e + 1) + go (p, f, e) (GroupLogs _ ts) = foldl go (p, f, e) ts + +-- | From a 'TestSuiteLog', determine if the test suite passed. +suitePassed :: TestLogs -> Bool +suitePassed l = + case countTestResults l of + (_, 0, 0) -> True + _ -> False + +-- | From a 'TestSuiteLog', determine if the test suite failed. +suiteFailed :: TestLogs -> Bool +suiteFailed l = + case countTestResults l of + (_, 0, _) -> False + _ -> True + +-- | From a 'TestSuiteLog', determine if the test suite encountered errors. +suiteError :: TestLogs -> Bool +suiteError l = + case countTestResults l of + (_, _, 0) -> False + _ -> True + +resultString :: TestLogs -> String +resultString l | suiteError l = "error" + | suiteFailed l = "fail" + | otherwise = "pass" + +testSuiteLogPath :: PathTemplate + -> PD.PackageDescription + -> LBI.LocalBuildInfo + -> String -- ^ test suite name + -> TestLogs -- ^ test suite results + -> FilePath +testSuiteLogPath template pkg_descr lbi test_name result = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) + ++ [ (TestSuiteNameVar, toPathTemplate test_name) + , (TestSuiteResultVar, toPathTemplate $ resultString result) + ] + +-- | Print a summary to the console after all test suites have been run +-- indicating the number of successful test suites and cases. Returns 'True' if +-- all test suites passed and 'False' otherwise. +summarizePackage :: Verbosity -> PackageLog -> IO Bool +summarizePackage verbosity packageLog = do + let counts = map (countTestResults . testLogs) $ testSuites packageLog + (passed, failed, errors) = foldl1 addTriple counts + totalCases = passed + failed + errors + passedSuites = length + $ filter (suitePassed . testLogs) + $ testSuites packageLog + totalSuites = length $ testSuites packageLog + notice verbosity $ show passedSuites ++ " of " ++ show totalSuites + ++ " test suites (" ++ show passed ++ " of " + ++ show totalCases ++ " test cases) passed." + return $! passedSuites == totalSuites + where + addTriple (p1, f1, e1) (p2, f2, e2) = (p1 + p2, f1 + f2, e1 + e2) + +-- | Print a summary of a single test case's result to the console, supressing +-- output for certain verbosity or test filter levels. +summarizeTest :: Verbosity -> TestShowDetails -> TestLogs -> IO () +summarizeTest _ _ (GroupLogs {}) = return () +summarizeTest verbosity details t = + when shouldPrint $ notice verbosity $ "Test case " ++ testName t + ++ ": " ++ show (testResult t) + where shouldPrint = (details > Never) && (notPassed || details == Always) + notPassed = testResult t /= Pass + +-- | Print a summary of the test suite's results on the console, suppressing +-- output for certain verbosity or test filter levels. +summarizeSuiteFinish :: TestSuiteLog -> String +summarizeSuiteFinish testLog = unlines + [ "Test suite " ++ display (testSuiteName testLog) ++ ": " ++ resStr + , "Test suite logged to: " ++ logFile testLog + ] + where resStr = map toUpper (resultString $ testLogs testLog) + +summarizeSuiteStart :: String -> String +summarizeSuiteStart n = "Test suite " ++ n ++ ": RUNNING...\n" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Test.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Test.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Test.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Test.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,137 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Test +-- Copyright : Thomas Tuegel 2010 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the entry point into testing a built package. It performs the +-- \"@.\/setup test@\" action. It runs test suites designated in the package +-- description and reports on the results. + +module Distribution.Simple.Test + ( test + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.UnqualComponentName +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Compiler +import Distribution.Simple.Hpc +import Distribution.Simple.InstallDirs +import qualified Distribution.Simple.LocalBuildInfo as LBI +import qualified Distribution.Types.LocalBuildInfo as LBI +import Distribution.Simple.Setup +import Distribution.Simple.UserHooks +import qualified Distribution.Simple.Test.ExeV10 as ExeV10 +import qualified Distribution.Simple.Test.LibV09 as LibV09 +import Distribution.Simple.Test.Log +import Distribution.Simple.Utils +import Distribution.TestSuite +import Distribution.Text + +import System.Directory + ( createDirectoryIfMissing, doesFileExist, getDirectoryContents + , removeFile ) +import System.Exit ( exitFailure, exitSuccess ) +import System.FilePath ( () ) + +-- |Perform the \"@.\/setup test@\" action. +test :: Args -- ^positional command-line arguments + -> PD.PackageDescription -- ^information from the .cabal file + -> LBI.LocalBuildInfo -- ^information from the configure step + -> TestFlags -- ^flags sent to test + -> IO () +test args pkg_descr lbi flags = do + let verbosity = fromFlag $ testVerbosity flags + machineTemplate = fromFlag $ testMachineLog flags + distPref = fromFlag $ testDistPref flags + testLogDir = distPref "test" + testNames = args + pkgTests = PD.testSuites pkg_descr + enabledTests = LBI.enabledTestLBIs pkg_descr lbi + + doTest :: ((PD.TestSuite, LBI.ComponentLocalBuildInfo), + Maybe TestSuiteLog) -> IO TestSuiteLog + doTest ((suite, clbi), _) = + case PD.testInterface suite of + PD.TestSuiteExeV10 _ _ -> + ExeV10.runTest pkg_descr lbi clbi flags suite + + PD.TestSuiteLibV09 _ _ -> + LibV09.runTest pkg_descr lbi clbi flags suite + + _ -> return TestSuiteLog + { testSuiteName = PD.testName suite + , testLogs = TestLog + { testName = unUnqualComponentName $ PD.testName suite + , testOptionsReturned = [] + , testResult = + Error $ "No support for running test suite type: " + ++ show (disp $ PD.testType suite) + } + , logFile = "" + } + + unless (PD.hasTests pkg_descr) $ do + notice verbosity "Package has no test suites." + exitSuccess + + when (PD.hasTests pkg_descr && null enabledTests) $ + die' verbosity $ + "No test suites enabled. Did you remember to configure with " + ++ "\'--enable-tests\'?" + + testsToRun <- case testNames of + [] -> return $ zip enabledTests $ repeat Nothing + names -> for names $ \tName -> + let testMap = zip enabledNames enabledTests + enabledNames = map (PD.testName . fst) enabledTests + allNames = map PD.testName pkgTests + tCompName = mkUnqualComponentName tName + in case lookup tCompName testMap of + Just t -> return (t, Nothing) + _ | tCompName `elem` allNames -> + die' verbosity $ "Package configured with test suite " + ++ tName ++ " disabled." + | otherwise -> die' verbosity $ "no such test: " ++ tName + + createDirectoryIfMissing True testLogDir + + -- Delete ordinary files from test log directory. + getDirectoryContents testLogDir + >>= filterM doesFileExist . map (testLogDir ) + >>= traverse_ removeFile + + let totalSuites = length testsToRun + notice verbosity $ "Running " ++ show totalSuites ++ " test suites..." + suites <- traverse doTest testsToRun + let packageLog = (localPackageLog pkg_descr lbi) { testSuites = suites } + packageLogFile = () testLogDir + $ packageLogPath machineTemplate pkg_descr lbi + allOk <- summarizePackage verbosity packageLog + writeFile packageLogFile $ show packageLog + + when (LBI.testCoverage lbi) $ + markupPackage verbosity lbi distPref (display $ PD.package pkg_descr) $ + map (fst . fst) testsToRun + + unless allOk exitFailure + +packageLogPath :: PathTemplate + -> PD.PackageDescription + -> LBI.LocalBuildInfo + -> FilePath +packageLogPath template pkg_descr lbi = + fromPathTemplate $ substPathTemplate env template + where + env = initialPathTemplateEnv + (PD.package pkg_descr) (LBI.localUnitId lbi) + (compilerInfo $ LBI.compiler lbi) (LBI.hostPlatform lbi) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/UHC.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/UHC.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/UHC.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/UHC.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,293 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.UHC +-- Copyright : Andres Loeh 2009 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module contains most of the UHC-specific code for configuring, building +-- and installing packages. +-- +-- Thanks to the authors of the other implementation-specific files, in +-- particular to Isaac Jones, Duncan Coutts and Henning Thielemann, for +-- inspiration on how to design this module. + +module Distribution.Simple.UHC ( + configure, getInstalledPackages, + buildLib, buildExe, installLib, registerPackage, inplacePackageDbPath + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Compat.ReadP +import Distribution.InstalledPackageInfo +import Distribution.Package hiding (installedUnitId) +import Distribution.PackageDescription +import Distribution.Simple.BuildPaths +import Distribution.Simple.Compiler as C +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.PackageIndex +import Distribution.Simple.Program +import Distribution.Simple.Utils +import Distribution.Text +import Distribution.Types.MungedPackageId +import Distribution.Verbosity +import Distribution.Version +import Distribution.System +import Language.Haskell.Extension + +import qualified Data.Map as Map ( empty ) +import System.Directory +import System.FilePath + +-- ----------------------------------------------------------------------------- +-- Configuring + +configure :: Verbosity -> Maybe FilePath -> Maybe FilePath + -> ProgramDb -> IO (Compiler, Maybe Platform, ProgramDb) +configure verbosity hcPath _hcPkgPath progdb = do + + (_uhcProg, uhcVersion, progdb') <- + requireProgramVersion verbosity uhcProgram + (orLaterVersion (mkVersion [1,0,2])) + (userMaybeSpecifyPath "uhc" hcPath progdb) + + let comp = Compiler { + compilerId = CompilerId UHC uhcVersion, + compilerAbiTag = C.NoAbiTag, + compilerCompat = [], + compilerLanguages = uhcLanguages, + compilerExtensions = uhcLanguageExtensions, + compilerProperties = Map.empty + } + compPlatform = Nothing + return (comp, compPlatform, progdb') + +uhcLanguages :: [(Language, C.Flag)] +uhcLanguages = [(Haskell98, "")] + +-- | The flags for the supported extensions. +uhcLanguageExtensions :: [(Extension, Maybe C.Flag)] +uhcLanguageExtensions = + let doFlag (f, (enable, disable)) = [(EnableExtension f, enable), + (DisableExtension f, disable)] + alwaysOn = (Nothing, Nothing{- wrong -}) + in concatMap doFlag + [(CPP, (Just "--cpp", Nothing{- wrong -})), + (PolymorphicComponents, alwaysOn), + (ExistentialQuantification, alwaysOn), + (ForeignFunctionInterface, alwaysOn), + (UndecidableInstances, alwaysOn), + (MultiParamTypeClasses, alwaysOn), + (Rank2Types, alwaysOn), + (PatternSignatures, alwaysOn), + (EmptyDataDecls, alwaysOn), + (ImplicitPrelude, (Nothing, Just "--no-prelude"{- wrong -})), + (TypeOperators, alwaysOn), + (OverlappingInstances, alwaysOn), + (FlexibleInstances, alwaysOn)] + +getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramDb + -> IO InstalledPackageIndex +getInstalledPackages verbosity comp packagedbs progdb = do + let compilerid = compilerId comp + systemPkgDir <- getGlobalPackageDir verbosity progdb + userPkgDir <- getUserPackageDir + let pkgDirs = nub (concatMap (packageDbPaths userPkgDir systemPkgDir) packagedbs) + -- putStrLn $ "pkgdirs: " ++ show pkgDirs + pkgs <- liftM (map addBuiltinVersions . concat) $ + traverse (\ d -> getDirectoryContents d >>= filterM (isPkgDir (display compilerid) d)) + pkgDirs + -- putStrLn $ "pkgs: " ++ show pkgs + let iPkgs = + map mkInstalledPackageInfo $ + concatMap parsePackage $ + pkgs + -- putStrLn $ "installed pkgs: " ++ show iPkgs + return (fromList iPkgs) + +getGlobalPackageDir :: Verbosity -> ProgramDb -> IO FilePath +getGlobalPackageDir verbosity progdb = do + output <- getDbProgramOutput verbosity + uhcProgram progdb ["--meta-pkgdir-system"] + -- call to "lines" necessary, because pkgdir contains an extra newline at the end + let [pkgdir] = lines output + return pkgdir + +getUserPackageDir :: NoCallStackIO FilePath +getUserPackageDir = do + homeDir <- getHomeDirectory + return $ homeDir ".cabal" "lib" -- TODO: determine in some other way + +packageDbPaths :: FilePath -> FilePath -> PackageDB -> [FilePath] +packageDbPaths user system db = + case db of + GlobalPackageDB -> [ system ] + UserPackageDB -> [ user ] + SpecificPackageDB path -> [ path ] + +-- | Hack to add version numbers to UHC-built-in packages. This should sooner or +-- later be fixed on the UHC side. +addBuiltinVersions :: String -> String +{- +addBuiltinVersions "uhcbase" = "uhcbase-1.0" +addBuiltinVersions "base" = "base-3.0" +addBuiltinVersions "array" = "array-0.2" +-} +addBuiltinVersions xs = xs + +-- | Name of the installed package config file. +installedPkgConfig :: String +installedPkgConfig = "installed-pkg-config" + +-- | Check if a certain dir contains a valid package. Currently, we are +-- looking only for the presence of an installed package configuration. +-- TODO: Actually make use of the information provided in the file. +isPkgDir :: String -> String -> String -> NoCallStackIO Bool +isPkgDir _ _ ('.' : _) = return False -- ignore files starting with a . +isPkgDir c dir xs = do + let candidate = dir uhcPackageDir xs c + -- putStrLn $ "trying: " ++ candidate + doesFileExist (candidate installedPkgConfig) + +parsePackage :: String -> [PackageId] +parsePackage x = map fst (filter (\ (_,y) -> null y) (readP_to_S parse x)) + +-- | Create a trivial package info from a directory name. +mkInstalledPackageInfo :: PackageId -> InstalledPackageInfo +mkInstalledPackageInfo p = emptyInstalledPackageInfo + { installedUnitId = mkLegacyUnitId p, + sourcePackageId = p } + + +-- ----------------------------------------------------------------------------- +-- Building + +buildLib :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Library -> ComponentLocalBuildInfo -> IO () +buildLib verbosity pkg_descr lbi lib clbi = do + + systemPkgDir <- getGlobalPackageDir verbosity (withPrograms lbi) + userPkgDir <- getUserPackageDir + let runUhcProg = runDbProgram verbosity uhcProgram (withPrograms lbi) + let uhcArgs = -- set package name + ["--pkg-build=" ++ display (packageId pkg_descr)] + -- common flags lib/exe + ++ constructUHCCmdLine userPkgDir systemPkgDir + lbi (libBuildInfo lib) clbi + (buildDir lbi) verbosity + -- source files + -- suboptimal: UHC does not understand module names, so + -- we replace periods by path separators + ++ map (map (\ c -> if c == '.' then pathSeparator else c)) + (map display (allLibModules lib clbi)) + + runUhcProg uhcArgs + + return () + +buildExe :: Verbosity -> PackageDescription -> LocalBuildInfo + -> Executable -> ComponentLocalBuildInfo -> IO () +buildExe verbosity _pkg_descr lbi exe clbi = do + systemPkgDir <- getGlobalPackageDir verbosity (withPrograms lbi) + userPkgDir <- getUserPackageDir + let runUhcProg = runDbProgram verbosity uhcProgram (withPrograms lbi) + let uhcArgs = -- common flags lib/exe + constructUHCCmdLine userPkgDir systemPkgDir + lbi (buildInfo exe) clbi + (buildDir lbi) verbosity + -- output file + ++ ["--output", buildDir lbi display (exeName exe)] + -- main source module + ++ [modulePath exe] + runUhcProg uhcArgs + +constructUHCCmdLine :: FilePath -> FilePath + -> LocalBuildInfo -> BuildInfo -> ComponentLocalBuildInfo + -> FilePath -> Verbosity -> [String] +constructUHCCmdLine user system lbi bi clbi odir verbosity = + -- verbosity + (if verbosity >= deafening then ["-v4"] + else if verbosity >= normal then [] + else ["-v0"]) + ++ hcOptions UHC bi + -- flags for language extensions + ++ languageToFlags (compiler lbi) (defaultLanguage bi) + ++ extensionsToFlags (compiler lbi) (usedExtensions bi) + -- packages + ++ ["--hide-all-packages"] + ++ uhcPackageDbOptions user system (withPackageDB lbi) + ++ ["--package=uhcbase"] + ++ ["--package=" ++ display (mungedName pkgid) | (_, pkgid) <- componentPackageDeps clbi ] + -- search paths + ++ ["-i" ++ odir] + ++ ["-i" ++ l | l <- nub (hsSourceDirs bi)] + ++ ["-i" ++ autogenComponentModulesDir lbi clbi] + ++ ["-i" ++ autogenPackageModulesDir lbi] + -- cpp options + ++ ["--optP=" ++ opt | opt <- cppOptions bi] + -- output path + ++ ["--odir=" ++ odir] + -- optimization + ++ (case withOptimization lbi of + NoOptimisation -> ["-O0"] + NormalOptimisation -> ["-O1"] + MaximumOptimisation -> ["-O2"]) + +uhcPackageDbOptions :: FilePath -> FilePath -> PackageDBStack -> [String] +uhcPackageDbOptions user system db = map (\ x -> "--pkg-searchpath=" ++ x) + (concatMap (packageDbPaths user system) db) + +-- ----------------------------------------------------------------------------- +-- Installation + +installLib :: Verbosity -> LocalBuildInfo + -> FilePath -> FilePath -> FilePath + -> PackageDescription -> Library -> ComponentLocalBuildInfo -> IO () +installLib verbosity _lbi targetDir _dynlibTargetDir builtDir pkg _library _clbi = do + -- putStrLn $ "dest: " ++ targetDir + -- putStrLn $ "built: " ++ builtDir + installDirectoryContents verbosity (builtDir display (packageId pkg)) targetDir + +-- currently hard-coded UHC code generator and variant to use +uhcTarget, uhcTargetVariant :: String +uhcTarget = "bc" +uhcTargetVariant = "plain" + +-- root directory for a package in UHC +uhcPackageDir :: String -> String -> FilePath +uhcPackageSubDir :: String -> FilePath +uhcPackageDir pkgid compilerid = pkgid uhcPackageSubDir compilerid +uhcPackageSubDir compilerid = compilerid uhcTarget uhcTargetVariant + +-- ----------------------------------------------------------------------------- +-- Registering + +registerPackage + :: Verbosity + -> Compiler + -> ProgramDb + -> PackageDBStack + -> InstalledPackageInfo + -> IO () +registerPackage verbosity comp progdb packageDbs installedPkgInfo = do + dbdir <- case last packageDbs of + GlobalPackageDB -> getGlobalPackageDir verbosity progdb + UserPackageDB -> getUserPackageDir + SpecificPackageDB dir -> return dir + let pkgdir = dbdir uhcPackageDir (display pkgid) (display compilerid) + createDirectoryIfMissingVerbose verbosity True pkgdir + writeUTF8File (pkgdir installedPkgConfig) + (showInstalledPackageInfo installedPkgInfo) + where + pkgid = sourcePackageId installedPkgInfo + compilerid = compilerId comp + +inplacePackageDbPath :: LocalBuildInfo -> FilePath +inplacePackageDbPath lbi = buildDir lbi diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/UserHooks.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/UserHooks.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/UserHooks.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/UserHooks.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,225 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.UserHooks +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defines the API that @Setup.hs@ scripts can use to customise the way +-- the build works. This module just defines the 'UserHooks' type. The +-- predefined sets of hooks that implement the @Simple@, @Make@ and @Configure@ +-- build systems are defined in "Distribution.Simple". The 'UserHooks' is a big +-- record of functions. There are 3 for each action, a pre, post and the action +-- itself. There are few other miscellaneous hooks, ones to extend the set of +-- programs and preprocessors and one to override the function used to read the +-- @.cabal@ file. +-- +-- This hooks type is widely agreed to not be the right solution. Partly this +-- is because changes to it usually break custom @Setup.hs@ files and yet many +-- internal code changes do require changes to the hooks. For example we cannot +-- pass any extra parameters to most of the functions that implement the +-- various phases because it would involve changing the types of the +-- corresponding hook. At some point it will have to be replaced. + +module Distribution.Simple.UserHooks ( + UserHooks(..), Args, + emptyUserHooks, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.PackageDescription +import Distribution.Simple.Program +import Distribution.Simple.Command +import Distribution.Simple.PreProcess +import Distribution.Simple.Setup +import Distribution.Simple.LocalBuildInfo + +type Args = [String] + +-- | Hooks allow authors to add specific functionality before and after a +-- command is run, and also to specify additional preprocessors. +-- +-- * WARNING: The hooks interface is under rather constant flux as we try to +-- understand users needs. Setup files that depend on this interface may +-- break in future releases. +data UserHooks = UserHooks { + + -- | Used for @.\/setup test@ + runTests :: Args -> Bool -> PackageDescription -> LocalBuildInfo -> IO (), + -- | Read the description file + readDesc :: IO (Maybe GenericPackageDescription), + -- | Custom preprocessors in addition to and overriding 'knownSuffixHandlers'. + hookedPreProcessors :: [ PPSuffixHandler ], + -- | These programs are detected at configure time. Arguments for them are + -- added to the configure command. + hookedPrograms :: [Program], + + -- |Hook to run before configure command + preConf :: Args -> ConfigFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during configure. + confHook :: (GenericPackageDescription, HookedBuildInfo) + -> ConfigFlags -> IO LocalBuildInfo, + -- |Hook to run after configure command + postConf :: Args -> ConfigFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before build command. Second arg indicates verbosity level. + preBuild :: Args -> BuildFlags -> IO HookedBuildInfo, + + -- |Over-ride this hook to get different behavior during build. + buildHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> BuildFlags -> IO (), + -- |Hook to run after build command. Second arg indicates verbosity level. + postBuild :: Args -> BuildFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before repl command. Second arg indicates verbosity level. + preRepl :: Args -> ReplFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during interpretation. + replHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> ReplFlags -> [String] -> IO (), + -- |Hook to run after repl command. Second arg indicates verbosity level. + postRepl :: Args -> ReplFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before clean command. Second arg indicates verbosity level. + preClean :: Args -> CleanFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during clean. + cleanHook :: PackageDescription -> () -> UserHooks -> CleanFlags -> IO (), + -- |Hook to run after clean command. Second arg indicates verbosity level. + postClean :: Args -> CleanFlags -> PackageDescription -> () -> IO (), + + -- |Hook to run before copy command + preCopy :: Args -> CopyFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during copy. + copyHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> CopyFlags -> IO (), + -- |Hook to run after copy command + postCopy :: Args -> CopyFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before install command + preInst :: Args -> InstallFlags -> IO HookedBuildInfo, + + -- |Over-ride this hook to get different behavior during install. + instHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> InstallFlags -> IO (), + -- |Hook to run after install command. postInst should be run + -- on the target, not on the build machine. + postInst :: Args -> InstallFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before sdist command. Second arg indicates verbosity level. + preSDist :: Args -> SDistFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during sdist. + sDistHook :: PackageDescription -> Maybe LocalBuildInfo -> UserHooks -> SDistFlags -> IO (), + -- |Hook to run after sdist command. Second arg indicates verbosity level. + postSDist :: Args -> SDistFlags -> PackageDescription -> Maybe LocalBuildInfo -> IO (), + + -- |Hook to run before register command + preReg :: Args -> RegisterFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during registration. + regHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (), + -- |Hook to run after register command + postReg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before unregister command + preUnreg :: Args -> RegisterFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during unregistration. + unregHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> RegisterFlags -> IO (), + -- |Hook to run after unregister command + postUnreg :: Args -> RegisterFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before hscolour command. Second arg indicates verbosity level. + preHscolour :: Args -> HscolourFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during hscolour. + hscolourHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HscolourFlags -> IO (), + -- |Hook to run after hscolour command. Second arg indicates verbosity level. + postHscolour :: Args -> HscolourFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before doctest command. Second arg indicates verbosity level. + preDoctest :: Args -> DoctestFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during doctest. + doctestHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> DoctestFlags -> IO (), + -- |Hook to run after doctest command. Second arg indicates verbosity level. + postDoctest :: Args -> DoctestFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before haddock command. Second arg indicates verbosity level. + preHaddock :: Args -> HaddockFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during haddock. + haddockHook :: PackageDescription -> LocalBuildInfo -> UserHooks -> HaddockFlags -> IO (), + -- |Hook to run after haddock command. Second arg indicates verbosity level. + postHaddock :: Args -> HaddockFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before test command. + preTest :: Args -> TestFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during test. + testHook :: Args -> PackageDescription -> LocalBuildInfo -> UserHooks -> TestFlags -> IO (), + -- |Hook to run after test command. + postTest :: Args -> TestFlags -> PackageDescription -> LocalBuildInfo -> IO (), + + -- |Hook to run before bench command. + preBench :: Args -> BenchmarkFlags -> IO HookedBuildInfo, + -- |Over-ride this hook to get different behavior during bench. + benchHook :: Args -> PackageDescription -> LocalBuildInfo -> UserHooks -> BenchmarkFlags -> IO (), + -- |Hook to run after bench command. + postBench :: Args -> BenchmarkFlags -> PackageDescription -> LocalBuildInfo -> IO () + } + +{-# DEPRECATED runTests "Please use the new testing interface instead!" #-} +{-# DEPRECATED preSDist "SDist hooks violate the invariants of new-sdist. Use 'autogen-modules' and 'build-tool-depends' instead." #-} +{-# DEPRECATED sDistHook "SDist hooks violate the invariants of new-sdist. Use 'autogen-modules' and 'build-tool-depends' instead." #-} +{-# DEPRECATED postSDist "SDist hooks violate the invariants of new-sdist. Use 'autogen-modules' and 'build-tool-depends' instead." #-} + +-- |Empty 'UserHooks' which do nothing. +emptyUserHooks :: UserHooks +emptyUserHooks + = UserHooks { + runTests = ru, + readDesc = return Nothing, + hookedPreProcessors = [], + hookedPrograms = [], + preConf = rn', + confHook = (\_ _ -> return (error "No local build info generated during configure. Over-ride empty configure hook.")), + postConf = ru, + preBuild = rn', + buildHook = ru, + postBuild = ru, + preRepl = \_ _ -> return emptyHookedBuildInfo, + replHook = \_ _ _ _ _ -> return (), + postRepl = ru, + preClean = rn, + cleanHook = ru, + postClean = ru, + preCopy = rn', + copyHook = ru, + postCopy = ru, + preInst = rn, + instHook = ru, + postInst = ru, + preSDist = rn, + sDistHook = ru, + postSDist = ru, + preReg = rn', + regHook = ru, + postReg = ru, + preUnreg = rn, + unregHook = ru, + postUnreg = ru, + preHscolour = rn, + hscolourHook = ru, + postHscolour = ru, + preDoctest = rn, + doctestHook = ru, + postDoctest = ru, + preHaddock = rn', + haddockHook = ru, + postHaddock = ru, + preTest = rn', + testHook = \_ -> ru, + postTest = ru, + preBench = rn', + benchHook = \_ -> ru, + postBench = ru + } + where rn args _ = noExtraFlags args >> return emptyHookedBuildInfo + rn' _ _ = return emptyHookedBuildInfo + ru _ _ _ _ = return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple/Utils.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,1515 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE BangPatterns #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Utils +-- Copyright : Isaac Jones, Simon Marlow 2003-2004 +-- License : BSD3 +-- portions Copyright (c) 2007, Galois Inc. +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A large and somewhat miscellaneous collection of utility functions used +-- throughout the rest of the Cabal lib and in other tools that use the Cabal +-- lib like @cabal-install@. It has a very simple set of logging actions. It +-- has low level functions for running programs, a bunch of wrappers for +-- various directory and file functions that do extra logging. + +module Distribution.Simple.Utils ( + cabalVersion, + + -- * logging and errors + -- Old style + die, dieWithLocation, + -- New style + dieNoVerbosity, + die', dieWithLocation', + dieNoWrap, + topHandler, topHandlerWith, + warn, + notice, noticeNoWrap, noticeDoc, + setupMessage, + info, infoNoWrap, + debug, debugNoWrap, + chattyTry, + annotateIO, + printRawCommandAndArgs, printRawCommandAndArgsAndEnv, + withOutputMarker, + + -- * exceptions + handleDoesNotExist, + + -- * running programs + rawSystemExit, + rawSystemExitCode, + rawSystemExitWithEnv, + rawSystemStdout, + rawSystemStdInOut, + rawSystemIOWithEnv, + createProcessWithEnv, + maybeExit, + xargs, + findProgramLocation, + findProgramVersion, + + -- ** 'IOData' re-export + -- + -- These types are re-exported from + -- "Distribution.Utils.IOData" for convience as they're + -- exposed in the API of 'rawSystemStdInOut' + IOData(..), + IODataMode(..), + + -- * copying files + smartCopySources, + createDirectoryIfMissingVerbose, + copyFileVerbose, + copyDirectoryRecursiveVerbose, + copyFiles, + copyFileTo, + + -- * installing files + installOrdinaryFile, + installExecutableFile, + installMaybeExecutableFile, + installOrdinaryFiles, + installExecutableFiles, + installMaybeExecutableFiles, + installDirectoryContents, + copyDirectoryRecursive, + + -- * File permissions + doesExecutableExist, + setFileOrdinary, + setFileExecutable, + + -- * file names + currentDir, + shortRelativePath, + dropExeExtension, + exeExtensions, + + -- * finding files + findFile, + findFirstFile, + findFileWithExtension, + findFileWithExtension', + findAllFilesWithExtension, + findModuleFile, + findModuleFiles, + getDirectoryContentsRecursive, + + -- * environment variables + isInSearchPath, + addLibraryPath, + + -- * modification time + moreRecentFile, + existsAndIsMoreRecentThan, + + -- * temp files and dirs + TempFileOptions(..), defaultTempFileOptions, + withTempFile, withTempFileEx, + withTempDirectory, withTempDirectoryEx, + createTempDirectory, + + -- * .cabal and .buildinfo files + defaultPackageDesc, + findPackageDesc, + tryFindPackageDesc, + defaultHookedPackageDesc, + findHookedPackageDesc, + + -- * reading and writing files safely + withFileContents, + writeFileAtomic, + rewriteFile, + rewriteFileEx, + + -- * Unicode + fromUTF8BS, + fromUTF8LBS, + toUTF8BS, + toUTF8LBS, + readUTF8File, + withUTF8FileContents, + writeUTF8File, + normaliseLineEndings, + + -- * BOM + ignoreBOM, + + -- * generic utils + dropWhileEndLE, + takeWhileEndLE, + equating, + comparing, + isInfixOf, + intercalate, + lowercase, + listUnion, + listUnionRight, + ordNub, + ordNubBy, + ordNubRight, + safeTail, + unintersperse, + wrapText, + wrapLine, + + -- * FilePath stuff + isAbsoluteOnAnyPlatform, + isRelativeOnAnyPlatform, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Text +import Distribution.Utils.Generic +import Distribution.Utils.IOData (IOData(..), IODataMode(..)) +import qualified Distribution.Utils.IOData as IOData +import Distribution.ModuleName as ModuleName +import Distribution.System +import Distribution.Version +import Distribution.Compat.CopyFile +import Distribution.Compat.Internal.TempFile +import Distribution.Compat.Exception +import Distribution.Compat.Stack +import Distribution.Verbosity +import Distribution.Types.PackageId + +#if __GLASGOW_HASKELL__ < 711 +#ifdef VERSION_base +#define BOOTSTRAPPED_CABAL 1 +#endif +#else +#ifdef CURRENT_PACKAGE_KEY +#define BOOTSTRAPPED_CABAL 1 +#endif +#endif + +#ifdef BOOTSTRAPPED_CABAL +import qualified Paths_Cabal (version) +#endif + +import Control.Concurrent.MVar + ( newEmptyMVar, putMVar, takeMVar ) +import Data.Typeable + ( cast ) +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 + +import System.Directory + ( Permissions(executable), getDirectoryContents, getPermissions + , doesDirectoryExist, doesFileExist, removeFile, findExecutable + , getModificationTime, createDirectory, removeDirectoryRecursive ) +import System.Environment + ( getProgName ) +import System.Exit + ( exitWith, ExitCode(..) ) +import System.FilePath + ( normalise, (), (<.>) + , getSearchPath, joinPath, takeDirectory, splitExtension + , splitDirectories, searchPathSeparator ) +import System.IO + ( Handle, hSetBinaryMode, hGetContents, stderr, stdout, hPutStr, hFlush + , hClose, hSetBuffering, BufferMode(..) ) +import System.IO.Error +import System.IO.Unsafe + ( unsafeInterleaveIO ) +import qualified Control.Exception as Exception + +import Data.Time.Clock.POSIX (getPOSIXTime, POSIXTime) +import Control.Exception (IOException, evaluate, throwIO) +import Control.Concurrent (forkIO) +import Numeric (showFFloat) +import qualified System.Process as Process + ( CreateProcess(..), StdStream(..), proc) +import System.Process + ( ProcessHandle, createProcess, rawSystem, runInteractiveProcess + , showCommandForUser, waitForProcess) + +import qualified Text.PrettyPrint as Disp + +-- We only get our own version number when we're building with ourselves +cabalVersion :: Version +#if defined(BOOTSTRAPPED_CABAL) +cabalVersion = mkVersion' Paths_Cabal.version +#elif defined(CABAL_VERSION) +cabalVersion = mkVersion [CABAL_VERSION] +#else +cabalVersion = mkVersion [1,9999] --used when bootstrapping +#endif + +-- ---------------------------------------------------------------------------- +-- Exception and logging utils + +-- Cabal's logging infrastructure has a few constraints: +-- +-- * We must make all logging formatting and emissions decisions based +-- on the 'Verbosity' parameter, which is the only parameter that is +-- plumbed to enough call-sites to actually be used for this matter. +-- (One of Cabal's "big mistakes" is to have never have defined a +-- monad of its own.) +-- +-- * When we 'die', we must raise an IOError. This a backwards +-- compatibility consideration, because that's what we've raised +-- previously, and if we change to any other exception type, +-- exception handlers which match on IOError will no longer work. +-- One case where it is known we rely on IOError being catchable +-- is 'readPkgConfigDb' in cabal-install; there may be other +-- user code that also assumes this. +-- +-- * The 'topHandler' does not know what 'Verbosity' is, because +-- it gets called before we've done command line parsing (where +-- the 'Verbosity' parameter would come from). +-- +-- This leads to two big architectural choices: +-- +-- * Although naively we might imagine 'Verbosity' to be a simple +-- enumeration type, actually it is a full-on abstract data type +-- that may contain arbitrarily complex information. At the +-- moment, it is fully representable as a string, but we might +-- eventually also use verbosity to let users register their +-- own logging handler. +-- +-- * When we call 'die', we perform all the formatting and addition +-- of extra information we need, and then ship this in the IOError +-- to the top-level handler. Here are alternate designs that +-- don't work: +-- +-- a) Ship the unformatted info to the handler. This doesn't +-- work because at the point the handler gets the message, +-- we've lost call stacks, and even if we did, we don't have access +-- to 'Verbosity' to decide whether or not to render it. +-- +-- b) Print the information at the 'die' site, then raise an +-- error. This means that if the exception is subsequently +-- caught by a handler, we will still have emitted the output, +-- which is not the correct behavior. +-- +-- For the top-level handler to "know" that an error message +-- contains one of these fully formatted packets, we set a sentinel +-- in one of IOError's extra fields. This is handled by +-- 'ioeSetVerbatim' and 'ioeGetVerbatim'. +-- + +{-# DEPRECATED dieWithLocation "Messages thrown with dieWithLocation can't be controlled with Verbosity; use dieWithLocation' instead" #-} +dieWithLocation :: FilePath -> Maybe Int -> String -> IO a +dieWithLocation filename lineno msg = + ioError . setLocation lineno + . flip ioeSetFileName (normalise filename) + $ userError msg + where + setLocation Nothing err = err + setLocation (Just n) err = ioeSetLocation err (show n) + _ = callStack -- TODO: Attach CallStack to exception + +{-# DEPRECATED die "Messages thrown with die can't be controlled with Verbosity; use die' instead, or dieNoVerbosity if Verbosity truly is not available" #-} +die :: String -> IO a +die = dieNoVerbosity + +dieNoVerbosity :: String -> IO a +dieNoVerbosity msg + = ioError (userError msg) + where + _ = callStack -- TODO: Attach CallStack to exception + +-- | Tag an 'IOError' whose error string should be output to the screen +-- verbatim. +ioeSetVerbatim :: IOError -> IOError +ioeSetVerbatim e = ioeSetLocation e "dieVerbatim" + +-- | Check if an 'IOError' should be output verbatim to screen. +ioeGetVerbatim :: IOError -> Bool +ioeGetVerbatim e = ioeGetLocation e == "dieVerbatim" + +-- | Create a 'userError' whose error text will be output verbatim +verbatimUserError :: String -> IOError +verbatimUserError = ioeSetVerbatim . userError + +dieWithLocation' :: Verbosity -> FilePath -> Maybe Int -> String -> IO a +dieWithLocation' verbosity filename mb_lineno msg = withFrozenCallStack $ do + ts <- getPOSIXTime + pname <- getProgName + ioError . verbatimUserError + . withMetadata ts AlwaysMark VerboseTrace verbosity + . wrapTextVerbosity verbosity + $ pname ++ ": " ++ + filename ++ (case mb_lineno of + Just lineno -> ":" ++ show lineno + Nothing -> "") ++ + ": " ++ msg + +die' :: Verbosity -> String -> IO a +die' verbosity msg = withFrozenCallStack $ do + ts <- getPOSIXTime + pname <- getProgName + ioError . verbatimUserError + . withMetadata ts AlwaysMark VerboseTrace verbosity + . wrapTextVerbosity verbosity + $ pname ++ ": " ++ msg + +dieNoWrap :: Verbosity -> String -> IO a +dieNoWrap verbosity msg = withFrozenCallStack $ do + -- TODO: should this have program name or not? + ts <- getPOSIXTime + ioError . verbatimUserError + . withMetadata ts AlwaysMark VerboseTrace verbosity + $ msg + +-- | Given a block of IO code that may raise an exception, annotate +-- it with the metadata from the current scope. Use this as close +-- to external code that raises IO exceptions as possible, since +-- this function unconditionally wraps the error message with a trace +-- (so it is NOT idempotent.) +annotateIO :: Verbosity -> IO a -> IO a +annotateIO verbosity act = do + ts <- getPOSIXTime + modifyIOError (f ts) act + where + f ts ioe = ioeSetErrorString ioe + . withMetadata ts NeverMark VerboseTrace verbosity + $ ioeGetErrorString ioe + + +{-# NOINLINE topHandlerWith #-} +topHandlerWith :: forall a. (Exception.SomeException -> IO a) -> IO a -> IO a +topHandlerWith cont prog = do + -- By default, stderr to a terminal device is NoBuffering. But this + -- is *really slow* + hSetBuffering stderr LineBuffering + Exception.catches prog [ + Exception.Handler rethrowAsyncExceptions + , Exception.Handler rethrowExitStatus + , Exception.Handler handle + ] + where + -- Let async exceptions rise to the top for the default top-handler + rethrowAsyncExceptions :: Exception.AsyncException -> NoCallStackIO a + rethrowAsyncExceptions a = throwIO a + + -- ExitCode gets thrown asynchronously too, and we don't want to print it + rethrowExitStatus :: ExitCode -> NoCallStackIO a + rethrowExitStatus = throwIO + + -- Print all other exceptions + handle :: Exception.SomeException -> NoCallStackIO a + handle se = do + hFlush stdout + pname <- getProgName + hPutStr stderr (message pname se) + cont se + + message :: String -> Exception.SomeException -> String + message pname (Exception.SomeException se) = + case cast se :: Maybe Exception.IOException of + Just ioe + | ioeGetVerbatim ioe -> + -- Use the message verbatim + ioeGetErrorString ioe ++ "\n" + | isUserError ioe -> + let file = case ioeGetFileName ioe of + Nothing -> "" + Just path -> path ++ location ++ ": " + location = case ioeGetLocation ioe of + l@(n:_) | isDigit n -> ':' : l + _ -> "" + detail = ioeGetErrorString ioe + in wrapText (pname ++ ": " ++ file ++ detail) + _ -> + displaySomeException se ++ "\n" + +-- | BC wrapper around 'Exception.displayException'. +displaySomeException :: Exception.Exception e => e -> String +displaySomeException se = +#if __GLASGOW_HASKELL__ < 710 + show se +#else + Exception.displayException se +#endif + +topHandler :: IO a -> IO a +topHandler prog = topHandlerWith (const $ exitWith (ExitFailure 1)) prog + +-- | Non fatal conditions that may be indicative of an error or problem. +-- +-- We display these at the 'normal' verbosity level. +-- +warn :: Verbosity -> String -> IO () +warn verbosity msg = withFrozenCallStack $ do + when (verbosity >= normal) $ do + ts <- getPOSIXTime + hFlush stdout + hPutStr stderr . withMetadata ts NormalMark FlagTrace verbosity + . wrapTextVerbosity verbosity + $ "Warning: " ++ msg + +-- | Useful status messages. +-- +-- We display these at the 'normal' verbosity level. +-- +-- This is for the ordinary helpful status messages that users see. Just +-- enough information to know that things are working but not floods of detail. +-- +notice :: Verbosity -> String -> IO () +notice verbosity msg = withFrozenCallStack $ do + when (verbosity >= normal) $ do + ts <- getPOSIXTime + hPutStr stdout . withMetadata ts NormalMark FlagTrace verbosity + . wrapTextVerbosity verbosity + $ msg + +-- | Display a message at 'normal' verbosity level, but without +-- wrapping. +-- +noticeNoWrap :: Verbosity -> String -> IO () +noticeNoWrap verbosity msg = withFrozenCallStack $ do + when (verbosity >= normal) $ do + ts <- getPOSIXTime + hPutStr stdout . withMetadata ts NormalMark FlagTrace verbosity $ msg + +-- | Pretty-print a 'Disp.Doc' status message at 'normal' verbosity +-- level. Use this if you need fancy formatting. +-- +noticeDoc :: Verbosity -> Disp.Doc -> IO () +noticeDoc verbosity msg = withFrozenCallStack $ do + when (verbosity >= normal) $ do + ts <- getPOSIXTime + hPutStr stdout . withMetadata ts NormalMark FlagTrace verbosity + . Disp.renderStyle defaultStyle $ msg + +-- | Display a "setup status message". Prefer using setupMessage' +-- if possible. +-- +setupMessage :: Verbosity -> String -> PackageIdentifier -> IO () +setupMessage verbosity msg pkgid = withFrozenCallStack $ do + noticeNoWrap verbosity (msg ++ ' ': display pkgid ++ "...") + +-- | More detail on the operation of some action. +-- +-- We display these messages when the verbosity level is 'verbose' +-- +info :: Verbosity -> String -> IO () +info verbosity msg = withFrozenCallStack $ + when (verbosity >= verbose) $ do + ts <- getPOSIXTime + hPutStr stdout . withMetadata ts NeverMark FlagTrace verbosity + . wrapTextVerbosity verbosity + $ msg + +infoNoWrap :: Verbosity -> String -> IO () +infoNoWrap verbosity msg = withFrozenCallStack $ + when (verbosity >= verbose) $ do + ts <- getPOSIXTime + hPutStr stdout . withMetadata ts NeverMark FlagTrace verbosity + $ msg + +-- | Detailed internal debugging information +-- +-- We display these messages when the verbosity level is 'deafening' +-- +debug :: Verbosity -> String -> IO () +debug verbosity msg = withFrozenCallStack $ + when (verbosity >= deafening) $ do + ts <- getPOSIXTime + hPutStr stdout . withMetadata ts NeverMark FlagTrace verbosity + . wrapTextVerbosity verbosity + $ msg + -- ensure that we don't lose output if we segfault/infinite loop + hFlush stdout + +-- | A variant of 'debug' that doesn't perform the automatic line +-- wrapping. Produces better output in some cases. +debugNoWrap :: Verbosity -> String -> IO () +debugNoWrap verbosity msg = withFrozenCallStack $ + when (verbosity >= deafening) $ do + ts <- getPOSIXTime + hPutStr stdout . withMetadata ts NeverMark FlagTrace verbosity + $ msg + -- ensure that we don't lose output if we segfault/infinite loop + hFlush stdout + +-- | Perform an IO action, catching any IO exceptions and printing an error +-- if one occurs. +chattyTry :: String -- ^ a description of the action we were attempting + -> IO () -- ^ the action itself + -> IO () +chattyTry desc action = + catchIO action $ \exception -> + putStrLn $ "Error while " ++ desc ++ ": " ++ show exception + +-- | Run an IO computation, returning @e@ if it raises a "file +-- does not exist" error. +handleDoesNotExist :: a -> NoCallStackIO a -> NoCallStackIO a +handleDoesNotExist e = + Exception.handleJust + (\ioe -> if isDoesNotExistError ioe then Just ioe else Nothing) + (\_ -> return e) + +-- ----------------------------------------------------------------------------- +-- Helper functions + +-- | Wraps text unless the @+nowrap@ verbosity flag is active +wrapTextVerbosity :: Verbosity -> String -> String +wrapTextVerbosity verb + | isVerboseNoWrap verb = withTrailingNewline + | otherwise = withTrailingNewline . wrapText + + +-- | Prepends a timestamp if @+timestamp@ verbosity flag is set +-- +-- This is used by 'withMetadata' +-- +withTimestamp :: Verbosity -> POSIXTime -> String -> String +withTimestamp v ts msg + | isVerboseTimestamp v = msg' + | otherwise = msg -- no-op + where + msg' = case lines msg of + [] -> tsstr "\n" + l1:rest -> unlines (tsstr (' ':l1) : map (contpfx++) rest) + + -- format timestamp to be prepended to first line with msec precision + tsstr = showFFloat (Just 3) (realToFrac ts :: Double) + + -- continuation prefix for subsequent lines of msg + contpfx = replicate (length (tsstr " ")) ' ' + +-- | Wrap output with a marker if @+markoutput@ verbosity flag is set. +-- +-- NB: Why is markoutput done with start/end markers, and not prefixes? +-- Markers are more convenient to add (if we want to add prefixes, +-- we have to 'lines' and then 'map'; here's it's just some +-- concatenates). Note that even in the prefix case, we can't +-- guarantee that the markers are unambiguous, because some of +-- Cabal's output comes straight from external programs, where +-- we don't have the ability to interpose on the output. +-- +-- This is used by 'withMetadata' +-- +withOutputMarker :: Verbosity -> String -> String +withOutputMarker v xs | not (isVerboseMarkOutput v) = xs +withOutputMarker _ "" = "" -- Minor optimization, don't mark uselessly +withOutputMarker _ xs = + "-----BEGIN CABAL OUTPUT-----\n" ++ + withTrailingNewline xs ++ + "-----END CABAL OUTPUT-----\n" + +-- | Append a trailing newline to a string if it does not +-- already have a trailing newline. +-- +withTrailingNewline :: String -> String +withTrailingNewline "" = "" +withTrailingNewline (x:xs) = x : go x xs + where + go _ (c:cs) = c : go c cs + go '\n' "" = "" + go _ "" = "\n" + +-- | Prepend a call-site and/or call-stack based on Verbosity +-- +withCallStackPrefix :: WithCallStack (TraceWhen -> Verbosity -> String -> String) +withCallStackPrefix tracer verbosity s = withFrozenCallStack $ + (if isVerboseCallSite verbosity + then parentSrcLocPrefix ++ + -- Hack: need a newline before starting output marker :( + if isVerboseMarkOutput verbosity + then "\n" + else "" + else "") ++ + (case traceWhen verbosity tracer of + Just pre -> pre ++ prettyCallStack callStack ++ "\n" + Nothing -> "") ++ + s + +-- | When should we emit the call stack? We always emit +-- for internal errors, emit the trace for errors when we +-- are in verbose mode, and otherwise only emit it if +-- explicitly asked for using the @+callstack@ verbosity +-- flag. (At the moment, 'AlwaysTrace' is not used. +-- +data TraceWhen + = AlwaysTrace + | VerboseTrace + | FlagTrace + deriving (Eq) + +-- | Determine if we should emit a call stack. +-- If we trace, it also emits any prefix we should append. +traceWhen :: Verbosity -> TraceWhen -> Maybe String +traceWhen _ AlwaysTrace = Just "" +traceWhen v VerboseTrace | v >= verbose = Just "" +traceWhen v FlagTrace | isVerboseCallStack v = Just "----\n" +traceWhen _ _ = Nothing + +-- | When should we output the marker? Things like 'die' +-- always get marked, but a 'NormalMark' will only be +-- output if we're not a quiet verbosity. +-- +data MarkWhen = AlwaysMark | NormalMark | NeverMark + +-- | Add all necessary metadata to a logging message +-- +withMetadata :: WithCallStack (POSIXTime -> MarkWhen -> TraceWhen -> Verbosity -> String -> String) +withMetadata ts marker tracer verbosity x = withFrozenCallStack $ + -- NB: order matters. Output marker first because we + -- don't want to capture call stacks. + withTrailingNewline + . withCallStackPrefix tracer verbosity + . (case marker of + AlwaysMark -> withOutputMarker verbosity + NormalMark | not (isVerboseQuiet verbosity) + -> withOutputMarker verbosity + | otherwise + -> id + NeverMark -> id) + -- Clear out any existing markers + . clearMarkers + . withTimestamp verbosity ts + $ x + +clearMarkers :: String -> String +clearMarkers s = unlines . filter isMarker $ lines s + where + isMarker "-----BEGIN CABAL OUTPUT-----" = False + isMarker "-----END CABAL OUTPUT-----" = False + isMarker _ = True + +-- ----------------------------------------------------------------------------- +-- rawSystem variants +maybeExit :: IO ExitCode -> IO () +maybeExit cmd = do + res <- cmd + unless (res == ExitSuccess) $ exitWith res + +printRawCommandAndArgs :: Verbosity -> FilePath -> [String] -> IO () +printRawCommandAndArgs verbosity path args = withFrozenCallStack $ + printRawCommandAndArgsAndEnv verbosity path args Nothing Nothing + +printRawCommandAndArgsAndEnv :: Verbosity + -> FilePath + -> [String] + -> Maybe FilePath + -> Maybe [(String, String)] + -> IO () +printRawCommandAndArgsAndEnv verbosity path args mcwd menv = do + case menv of + Just env -> debugNoWrap verbosity ("Environment: " ++ show env) + Nothing -> return () + case mcwd of + Just cwd -> debugNoWrap verbosity ("Working directory: " ++ show cwd) + Nothing -> return () + infoNoWrap verbosity (showCommandForUser path args) + +-- Exit with the same exit code if the subcommand fails +rawSystemExit :: Verbosity -> FilePath -> [String] -> IO () +rawSystemExit verbosity path args = withFrozenCallStack $ do + printRawCommandAndArgs verbosity path args + hFlush stdout + exitcode <- rawSystem path args + unless (exitcode == ExitSuccess) $ do + debug verbosity $ path ++ " returned " ++ show exitcode + exitWith exitcode + +rawSystemExitCode :: Verbosity -> FilePath -> [String] -> IO ExitCode +rawSystemExitCode verbosity path args = withFrozenCallStack $ do + printRawCommandAndArgs verbosity path args + hFlush stdout + exitcode <- rawSystem path args + unless (exitcode == ExitSuccess) $ do + debug verbosity $ path ++ " returned " ++ show exitcode + return exitcode + +rawSystemExitWithEnv :: Verbosity + -> FilePath + -> [String] + -> [(String, String)] + -> IO () +rawSystemExitWithEnv verbosity path args env = withFrozenCallStack $ do + printRawCommandAndArgsAndEnv verbosity path args Nothing (Just env) + hFlush stdout + (_,_,_,ph) <- createProcess $ + (Process.proc path args) { Process.env = (Just env) +#ifdef MIN_VERSION_process +#if MIN_VERSION_process(1,2,0) +-- delegate_ctlc has been added in process 1.2, and we still want to be able to +-- bootstrap GHC on systems not having that version + , Process.delegate_ctlc = True +#endif +#endif + } + exitcode <- waitForProcess ph + unless (exitcode == ExitSuccess) $ do + debug verbosity $ path ++ " returned " ++ show exitcode + exitWith exitcode + +-- Closes the passed in handles before returning. +rawSystemIOWithEnv :: Verbosity + -> FilePath + -> [String] + -> Maybe FilePath -- ^ New working dir or inherit + -> Maybe [(String, String)] -- ^ New environment or inherit + -> Maybe Handle -- ^ stdin + -> Maybe Handle -- ^ stdout + -> Maybe Handle -- ^ stderr + -> IO ExitCode +rawSystemIOWithEnv verbosity path args mcwd menv inp out err = withFrozenCallStack $ do + (_,_,_,ph) <- createProcessWithEnv verbosity path args mcwd menv + (mbToStd inp) (mbToStd out) (mbToStd err) + exitcode <- waitForProcess ph + unless (exitcode == ExitSuccess) $ do + debug verbosity $ path ++ " returned " ++ show exitcode + return exitcode + where + mbToStd :: Maybe Handle -> Process.StdStream + mbToStd = maybe Process.Inherit Process.UseHandle + +createProcessWithEnv :: + Verbosity + -> FilePath + -> [String] + -> Maybe FilePath -- ^ New working dir or inherit + -> Maybe [(String, String)] -- ^ New environment or inherit + -> Process.StdStream -- ^ stdin + -> Process.StdStream -- ^ stdout + -> Process.StdStream -- ^ stderr + -> IO (Maybe Handle, Maybe Handle, Maybe Handle,ProcessHandle) + -- ^ Any handles created for stdin, stdout, or stderr + -- with 'CreateProcess', and a handle to the process. +createProcessWithEnv verbosity path args mcwd menv inp out err = withFrozenCallStack $ do + printRawCommandAndArgsAndEnv verbosity path args mcwd menv + hFlush stdout + (inp', out', err', ph) <- createProcess $ + (Process.proc path args) { + Process.cwd = mcwd + , Process.env = menv + , Process.std_in = inp + , Process.std_out = out + , Process.std_err = err +#ifdef MIN_VERSION_process +#if MIN_VERSION_process(1,2,0) +-- delegate_ctlc has been added in process 1.2, and we still want to be able to +-- bootstrap GHC on systems not having that version + , Process.delegate_ctlc = True +#endif +#endif + } + return (inp', out', err', ph) + +-- | Run a command and return its output. +-- +-- The output is assumed to be text in the locale encoding. +-- +rawSystemStdout :: Verbosity -> FilePath -> [String] -> IO String +rawSystemStdout verbosity path args = withFrozenCallStack $ do + (IODataText output, errors, exitCode) <- rawSystemStdInOut verbosity path args + Nothing Nothing + Nothing IODataModeText + when (exitCode /= ExitSuccess) $ + die errors + return output + +-- | Run a command and return its output, errors and exit status. Optionally +-- also supply some input. Also provides control over whether the binary/text +-- mode of the input and output. +-- +rawSystemStdInOut :: Verbosity + -> FilePath -- ^ Program location + -> [String] -- ^ Arguments + -> Maybe FilePath -- ^ New working dir or inherit + -> Maybe [(String, String)] -- ^ New environment or inherit + -> Maybe IOData -- ^ input text and binary mode + -> IODataMode -- ^ output in binary mode + -> IO (IOData, String, ExitCode) -- ^ output, errors, exit +rawSystemStdInOut verbosity path args mcwd menv input outputMode = withFrozenCallStack $ do + printRawCommandAndArgs verbosity path args + + Exception.bracket + (runInteractiveProcess path args mcwd menv) + (\(inh,outh,errh,_) -> hClose inh >> hClose outh >> hClose errh) + $ \(inh,outh,errh,pid) -> do + + -- output mode depends on what the caller wants + -- but the errors are always assumed to be text (in the current locale) + hSetBinaryMode errh False + + -- fork off a couple threads to pull on the stderr and stdout + -- so if the process writes to stderr we do not block. + + err <- hGetContents errh + + out <- IOData.hGetContents outh outputMode + + mv <- newEmptyMVar + let force str = do + mberr <- Exception.try (evaluate (rnf str) >> return ()) + putMVar mv (mberr :: Either IOError ()) + _ <- forkIO $ force out + _ <- forkIO $ force err + + -- push all the input, if any + case input of + Nothing -> return () + Just inputData -> do + -- input mode depends on what the caller wants + IOData.hPutContents inh inputData + --TODO: this probably fails if the process refuses to consume + -- or if it closes stdin (eg if it exits) + + -- wait for both to finish, in either order + mberr1 <- takeMVar mv + mberr2 <- takeMVar mv + + -- wait for the program to terminate + exitcode <- waitForProcess pid + unless (exitcode == ExitSuccess) $ + debug verbosity $ path ++ " returned " ++ show exitcode + ++ if null err then "" else + " with error message:\n" ++ err + ++ case input of + Nothing -> "" + Just d | IOData.null d -> "" + Just (IODataText inp) -> "\nstdin input:\n" ++ inp + Just (IODataBinary inp) -> "\nstdin input (binary):\n" ++ show inp + + -- Check if we we hit an exception while consuming the output + -- (e.g. a text decoding error) + reportOutputIOError mberr1 + reportOutputIOError mberr2 + + return (out, err, exitcode) + where + reportOutputIOError :: Either IOError () -> NoCallStackIO () + reportOutputIOError = + either (\e -> throwIO (ioeSetFileName e ("output of " ++ path))) + return + + +{-# DEPRECATED findProgramLocation + "No longer used within Cabal, try findProgramOnSearchPath" #-} +-- | Look for a program on the path. +findProgramLocation :: Verbosity -> FilePath -> IO (Maybe FilePath) +findProgramLocation verbosity prog = withFrozenCallStack $ do + debug verbosity $ "searching for " ++ prog ++ " in path." + res <- findExecutable prog + case res of + Nothing -> debug verbosity ("Cannot find " ++ prog ++ " on the path") + Just path -> debug verbosity ("found " ++ prog ++ " at "++ path) + return res + + +-- | Look for a program and try to find it's version number. It can accept +-- either an absolute path or the name of a program binary, in which case we +-- will look for the program on the path. +-- +findProgramVersion :: String -- ^ version args + -> (String -> String) -- ^ function to select version + -- number from program output + -> Verbosity + -> FilePath -- ^ location + -> IO (Maybe Version) +findProgramVersion versionArg selectVersion verbosity path = withFrozenCallStack $ do + str <- rawSystemStdout verbosity path [versionArg] + `catchIO` (\_ -> return "") + `catchExit` (\_ -> return "") + let version :: Maybe Version + version = simpleParse (selectVersion str) + case version of + Nothing -> warn verbosity $ "cannot determine version of " ++ path + ++ " :\n" ++ show str + Just v -> debug verbosity $ path ++ " is version " ++ display v + return version + + +-- | Like the Unix xargs program. Useful for when we've got very long command +-- lines that might overflow an OS limit on command line length and so you +-- need to invoke a command multiple times to get all the args in. +-- +-- Use it with either of the rawSystem variants above. For example: +-- +-- > xargs (32*1024) (rawSystemExit verbosity) prog fixedArgs bigArgs +-- +xargs :: Int -> ([String] -> IO ()) + -> [String] -> [String] -> IO () +xargs maxSize rawSystemFun fixedArgs bigArgs = + let fixedArgSize = sum (map length fixedArgs) + length fixedArgs + chunkSize = maxSize - fixedArgSize + in traverse_ (rawSystemFun . (fixedArgs ++)) (chunks chunkSize bigArgs) + + where chunks len = unfoldr $ \s -> + if null s then Nothing + else Just (chunk [] len s) + + chunk acc _ [] = (reverse acc,[]) + chunk acc len (s:ss) + | len' < len = chunk (s:acc) (len-len'-1) ss + | otherwise = (reverse acc, s:ss) + where len' = length s + +-- ------------------------------------------------------------ +-- * File Utilities +-- ------------------------------------------------------------ + +---------------- +-- Finding files + +-- | Find a file by looking in a search path. The file path must match exactly. +-- +findFile :: [FilePath] -- ^search locations + -> FilePath -- ^File Name + -> IO FilePath +findFile searchPath fileName = + findFirstFile id + [ path fileName + | path <- nub searchPath] + >>= maybe (die $ fileName ++ " doesn't exist") return + +-- | Find a file by looking in a search path with one of a list of possible +-- file extensions. The file base name should be given and it will be tried +-- with each of the extensions in each element of the search path. +-- +findFileWithExtension :: [String] + -> [FilePath] + -> FilePath + -> NoCallStackIO (Maybe FilePath) +findFileWithExtension extensions searchPath baseName = + findFirstFile id + [ path baseName <.> ext + | path <- nub searchPath + , ext <- nub extensions ] + +findAllFilesWithExtension :: [String] + -> [FilePath] + -> FilePath + -> NoCallStackIO [FilePath] +findAllFilesWithExtension extensions searchPath basename = + findAllFiles id + [ path basename <.> ext + | path <- nub searchPath + , ext <- nub extensions ] + +-- | Like 'findFileWithExtension' but returns which element of the search path +-- the file was found in, and the file path relative to that base directory. +-- +findFileWithExtension' :: [String] + -> [FilePath] + -> FilePath + -> NoCallStackIO (Maybe (FilePath, FilePath)) +findFileWithExtension' extensions searchPath baseName = + findFirstFile (uncurry ()) + [ (path, baseName <.> ext) + | path <- nub searchPath + , ext <- nub extensions ] + +findFirstFile :: (a -> FilePath) -> [a] -> NoCallStackIO (Maybe a) +findFirstFile file = findFirst + where findFirst [] = return Nothing + findFirst (x:xs) = do exists <- doesFileExist (file x) + if exists + then return (Just x) + else findFirst xs + +findAllFiles :: (a -> FilePath) -> [a] -> NoCallStackIO [a] +findAllFiles file = filterM (doesFileExist . file) + +-- | Finds the files corresponding to a list of Haskell module names. +-- +-- As 'findModuleFile' but for a list of module names. +-- +findModuleFiles :: [FilePath] -- ^ build prefix (location of objects) + -> [String] -- ^ search suffixes + -> [ModuleName] -- ^ modules + -> IO [(FilePath, FilePath)] +findModuleFiles searchPath extensions moduleNames = + traverse (findModuleFile searchPath extensions) moduleNames + +-- | Find the file corresponding to a Haskell module name. +-- +-- This is similar to 'findFileWithExtension'' but specialised to a module +-- name. The function fails if the file corresponding to the module is missing. +-- +findModuleFile :: [FilePath] -- ^ build prefix (location of objects) + -> [String] -- ^ search suffixes + -> ModuleName -- ^ module + -> IO (FilePath, FilePath) +findModuleFile searchPath extensions mod_name = + maybe notFound return + =<< findFileWithExtension' extensions searchPath + (ModuleName.toFilePath mod_name) + where + notFound = die $ "Error: Could not find module: " ++ display mod_name + ++ " with any suffix: " ++ show extensions + ++ " in the search path: " ++ show searchPath + +-- | List all the files in a directory and all subdirectories. +-- +-- The order places files in sub-directories after all the files in their +-- parent directories. The list is generated lazily so is not well defined if +-- the source directory structure changes before the list is used. +-- +getDirectoryContentsRecursive :: FilePath -> IO [FilePath] +getDirectoryContentsRecursive topdir = recurseDirectories [""] + where + recurseDirectories :: [FilePath] -> IO [FilePath] + recurseDirectories [] = return [] + recurseDirectories (dir:dirs) = unsafeInterleaveIO $ do + (files, dirs') <- collect [] [] =<< getDirectoryContents (topdir dir) + files' <- recurseDirectories (dirs' ++ dirs) + return (files ++ files') + + where + collect files dirs' [] = return (reverse files + ,reverse dirs') + collect files dirs' (entry:entries) | ignore entry + = collect files dirs' entries + collect files dirs' (entry:entries) = do + let dirEntry = dir entry + isDirectory <- doesDirectoryExist (topdir dirEntry) + if isDirectory + then collect files (dirEntry:dirs') entries + else collect (dirEntry:files) dirs' entries + + ignore ['.'] = True + ignore ['.', '.'] = True + ignore _ = False + +------------------------ +-- Environment variables + +-- | Is this directory in the system search path? +isInSearchPath :: FilePath -> NoCallStackIO Bool +isInSearchPath path = fmap (elem path) getSearchPath + +addLibraryPath :: OS + -> [FilePath] + -> [(String,String)] + -> [(String,String)] +addLibraryPath os paths = addEnv + where + pathsString = intercalate [searchPathSeparator] paths + ldPath = case os of + OSX -> "DYLD_LIBRARY_PATH" + _ -> "LD_LIBRARY_PATH" + + addEnv [] = [(ldPath,pathsString)] + addEnv ((key,value):xs) + | key == ldPath = + if null value + then (key,pathsString):xs + else (key,value ++ (searchPathSeparator:pathsString)):xs + | otherwise = (key,value):addEnv xs + +-------------------- +-- Modification time + +-- | Compare the modification times of two files to see if the first is newer +-- than the second. The first file must exist but the second need not. +-- The expected use case is when the second file is generated using the first. +-- In this use case, if the result is True then the second file is out of date. +-- +moreRecentFile :: FilePath -> FilePath -> NoCallStackIO Bool +moreRecentFile a b = do + exists <- doesFileExist b + if not exists + then return True + else do tb <- getModificationTime b + ta <- getModificationTime a + return (ta > tb) + +-- | Like 'moreRecentFile', but also checks that the first file exists. +existsAndIsMoreRecentThan :: FilePath -> FilePath -> NoCallStackIO Bool +existsAndIsMoreRecentThan a b = do + exists <- doesFileExist a + if not exists + then return False + else a `moreRecentFile` b + +---------------------------------------- +-- Copying and installing files and dirs + +-- | Same as 'createDirectoryIfMissing' but logs at higher verbosity levels. +-- +createDirectoryIfMissingVerbose :: Verbosity + -> Bool -- ^ Create its parents too? + -> FilePath + -> IO () +createDirectoryIfMissingVerbose verbosity create_parents path0 + | create_parents = withFrozenCallStack $ createDirs (parents path0) + | otherwise = withFrozenCallStack $ createDirs (take 1 (parents path0)) + where + parents = reverse . scanl1 () . splitDirectories . normalise + + createDirs [] = return () + createDirs (dir:[]) = createDir dir throwIO + createDirs (dir:dirs) = + createDir dir $ \_ -> do + createDirs dirs + createDir dir throwIO + + createDir :: FilePath -> (IOException -> IO ()) -> IO () + createDir dir notExistHandler = do + r <- tryIO $ createDirectoryVerbose verbosity dir + case (r :: Either IOException ()) of + Right () -> return () + Left e + | isDoesNotExistError e -> notExistHandler e + -- createDirectory (and indeed POSIX mkdir) does not distinguish + -- between a dir already existing and a file already existing. So we + -- check for it here. Unfortunately there is a slight race condition + -- here, but we think it is benign. It could report an exception in + -- the case that the dir did exist but another process deletes the + -- directory and creates a file in its place before we can check + -- that the directory did indeed exist. + | isAlreadyExistsError e -> (do + isDir <- doesDirectoryExist dir + unless isDir $ throwIO e + ) `catchIO` ((\_ -> return ()) :: IOException -> IO ()) + | otherwise -> throwIO e + +createDirectoryVerbose :: Verbosity -> FilePath -> IO () +createDirectoryVerbose verbosity dir = withFrozenCallStack $ do + info verbosity $ "creating " ++ dir + createDirectory dir + setDirOrdinary dir + +-- | Copies a file without copying file permissions. The target file is created +-- with default permissions. Any existing target file is replaced. +-- +-- At higher verbosity levels it logs an info message. +-- +copyFileVerbose :: Verbosity -> FilePath -> FilePath -> IO () +copyFileVerbose verbosity src dest = withFrozenCallStack $ do + info verbosity ("copy " ++ src ++ " to " ++ dest) + copyFile src dest + +-- | Install an ordinary file. This is like a file copy but the permissions +-- are set appropriately for an installed file. On Unix it is \"-rw-r--r--\" +-- while on Windows it uses the default permissions for the target directory. +-- +installOrdinaryFile :: Verbosity -> FilePath -> FilePath -> IO () +installOrdinaryFile verbosity src dest = withFrozenCallStack $ do + info verbosity ("Installing " ++ src ++ " to " ++ dest) + copyOrdinaryFile src dest + +-- | Install an executable file. This is like a file copy but the permissions +-- are set appropriately for an installed file. On Unix it is \"-rwxr-xr-x\" +-- while on Windows it uses the default permissions for the target directory. +-- +installExecutableFile :: Verbosity -> FilePath -> FilePath -> IO () +installExecutableFile verbosity src dest = withFrozenCallStack $ do + info verbosity ("Installing executable " ++ src ++ " to " ++ dest) + copyExecutableFile src dest + +-- | Install a file that may or not be executable, preserving permissions. +installMaybeExecutableFile :: Verbosity -> FilePath -> FilePath -> IO () +installMaybeExecutableFile verbosity src dest = withFrozenCallStack $ do + perms <- getPermissions src + if (executable perms) --only checks user x bit + then installExecutableFile verbosity src dest + else installOrdinaryFile verbosity src dest + +-- | Given a relative path to a file, copy it to the given directory, preserving +-- the relative path and creating the parent directories if needed. +copyFileTo :: Verbosity -> FilePath -> FilePath -> IO () +copyFileTo verbosity dir file = withFrozenCallStack $ do + let targetFile = dir file + createDirectoryIfMissingVerbose verbosity True (takeDirectory targetFile) + installOrdinaryFile verbosity file targetFile + +-- | Common implementation of 'copyFiles', 'installOrdinaryFiles', +-- 'installExecutableFiles' and 'installMaybeExecutableFiles'. +copyFilesWith :: (Verbosity -> FilePath -> FilePath -> IO ()) + -> Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () +copyFilesWith doCopy verbosity targetDir srcFiles = withFrozenCallStack $ do + + -- Create parent directories for everything + let dirs = map (targetDir ) . nub . map (takeDirectory . snd) $ srcFiles + traverse_ (createDirectoryIfMissingVerbose verbosity True) dirs + + -- Copy all the files + sequence_ [ let src = srcBase srcFile + dest = targetDir srcFile + in doCopy verbosity src dest + | (srcBase, srcFile) <- srcFiles ] + +-- | Copies a bunch of files to a target directory, preserving the directory +-- structure in the target location. The target directories are created if they +-- do not exist. +-- +-- The files are identified by a pair of base directory and a path relative to +-- that base. It is only the relative part that is preserved in the +-- destination. +-- +-- For example: +-- +-- > copyFiles normal "dist/src" +-- > [("", "src/Foo.hs"), ("dist/build/", "src/Bar.hs")] +-- +-- This would copy \"src\/Foo.hs\" to \"dist\/src\/src\/Foo.hs\" and +-- copy \"dist\/build\/src\/Bar.hs\" to \"dist\/src\/src\/Bar.hs\". +-- +-- This operation is not atomic. Any IO failure during the copy (including any +-- missing source files) leaves the target in an unknown state so it is best to +-- use it with a freshly created directory so that it can be simply deleted if +-- anything goes wrong. +-- +copyFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () +copyFiles v fp fs = withFrozenCallStack (copyFilesWith copyFileVerbose v fp fs) + +-- | This is like 'copyFiles' but uses 'installOrdinaryFile'. +-- +installOrdinaryFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] -> IO () +installOrdinaryFiles v fp fs = withFrozenCallStack (copyFilesWith installOrdinaryFile v fp fs) + +-- | This is like 'copyFiles' but uses 'installExecutableFile'. +-- +installExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] + -> IO () +installExecutableFiles v fp fs = withFrozenCallStack (copyFilesWith installExecutableFile v fp fs) + +-- | This is like 'copyFiles' but uses 'installMaybeExecutableFile'. +-- +installMaybeExecutableFiles :: Verbosity -> FilePath -> [(FilePath, FilePath)] + -> IO () +installMaybeExecutableFiles v fp fs = withFrozenCallStack (copyFilesWith installMaybeExecutableFile v fp fs) + +-- | This installs all the files in a directory to a target location, +-- preserving the directory layout. All the files are assumed to be ordinary +-- rather than executable files. +-- +installDirectoryContents :: Verbosity -> FilePath -> FilePath -> IO () +installDirectoryContents verbosity srcDir destDir = withFrozenCallStack $ do + info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") + srcFiles <- getDirectoryContentsRecursive srcDir + installOrdinaryFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ] + +-- | Recursively copy the contents of one directory to another path. +copyDirectoryRecursive :: Verbosity -> FilePath -> FilePath -> IO () +copyDirectoryRecursive verbosity srcDir destDir = withFrozenCallStack $ do + info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") + srcFiles <- getDirectoryContentsRecursive srcDir + copyFilesWith (const copyFile) verbosity destDir [ (srcDir, f) + | f <- srcFiles ] + +------------------- +-- File permissions + +-- | Like 'doesFileExist', but also checks that the file is executable. +doesExecutableExist :: FilePath -> NoCallStackIO Bool +doesExecutableExist f = do + exists <- doesFileExist f + if exists + then do perms <- getPermissions f + return (executable perms) + else return False + +--------------------------------- +-- Deprecated file copy functions + +{-# DEPRECATED smartCopySources + "Use findModuleFiles and copyFiles or installOrdinaryFiles" #-} +smartCopySources :: Verbosity -> [FilePath] -> FilePath + -> [ModuleName] -> [String] -> IO () +smartCopySources verbosity searchPath targetDir moduleNames extensions = withFrozenCallStack $ + findModuleFiles searchPath extensions moduleNames + >>= copyFiles verbosity targetDir + +{-# DEPRECATED copyDirectoryRecursiveVerbose + "You probably want installDirectoryContents instead" #-} +copyDirectoryRecursiveVerbose :: Verbosity -> FilePath -> FilePath -> IO () +copyDirectoryRecursiveVerbose verbosity srcDir destDir = withFrozenCallStack $ do + info verbosity ("copy directory '" ++ srcDir ++ "' to '" ++ destDir ++ "'.") + srcFiles <- getDirectoryContentsRecursive srcDir + copyFiles verbosity destDir [ (srcDir, f) | f <- srcFiles ] + +--------------------------- +-- Temporary files and dirs + +-- | Advanced options for 'withTempFile' and 'withTempDirectory'. +data TempFileOptions = TempFileOptions { + optKeepTempFiles :: Bool -- ^ Keep temporary files? + } + +defaultTempFileOptions :: TempFileOptions +defaultTempFileOptions = TempFileOptions { optKeepTempFiles = False } + +-- | Use a temporary filename that doesn't already exist. +-- +withTempFile :: FilePath -- ^ Temp dir to create the file in + -> String -- ^ File name template. See 'openTempFile'. + -> (FilePath -> Handle -> IO a) -> IO a +withTempFile tmpDir template action = + withTempFileEx defaultTempFileOptions tmpDir template action + +-- | A version of 'withTempFile' that additionally takes a 'TempFileOptions' +-- argument. +withTempFileEx :: TempFileOptions + -> FilePath -- ^ Temp dir to create the file in + -> String -- ^ File name template. See 'openTempFile'. + -> (FilePath -> Handle -> IO a) -> IO a +withTempFileEx opts tmpDir template action = + Exception.bracket + (openTempFile tmpDir template) + (\(name, handle) -> do hClose handle + unless (optKeepTempFiles opts) $ + handleDoesNotExist () . removeFile $ name) + (withLexicalCallStack (uncurry action)) + +-- | Create and use a temporary directory. +-- +-- Creates a new temporary directory inside the given directory, making use +-- of the template. The temp directory is deleted after use. For example: +-- +-- > withTempDirectory verbosity "src" "sdist." $ \tmpDir -> do ... +-- +-- The @tmpDir@ will be a new subdirectory of the given directory, e.g. +-- @src/sdist.342@. +-- +withTempDirectory :: Verbosity -> FilePath -> String -> (FilePath -> IO a) -> IO a +withTempDirectory verbosity targetDir template f = withFrozenCallStack $ + withTempDirectoryEx verbosity defaultTempFileOptions targetDir template + (withLexicalCallStack f) + +-- | A version of 'withTempDirectory' that additionally takes a +-- 'TempFileOptions' argument. +withTempDirectoryEx :: Verbosity -> TempFileOptions + -> FilePath -> String -> (FilePath -> IO a) -> IO a +withTempDirectoryEx _verbosity opts targetDir template f = withFrozenCallStack $ + Exception.bracket + (createTempDirectory targetDir template) + (unless (optKeepTempFiles opts) + . handleDoesNotExist () . removeDirectoryRecursive) + (withLexicalCallStack f) + +----------------------------------- +-- Safely reading and writing files + +{-# DEPRECATED rewriteFile "Use rewriteFileEx so that Verbosity is respected" #-} +rewriteFile :: FilePath -> String -> IO () +rewriteFile = rewriteFileEx normal + +-- | Write a file but only if it would have new content. If we would be writing +-- the same as the existing content then leave the file as is so that we do not +-- update the file's modification time. +-- +-- NB: the file is assumed to be ASCII-encoded. +rewriteFileEx :: Verbosity -> FilePath -> String -> IO () +rewriteFileEx verbosity path newContent = + flip catchIO mightNotExist $ do + existingContent <- annotateIO verbosity $ readFile path + _ <- evaluate (length existingContent) + unless (existingContent == newContent) $ + annotateIO verbosity $ + writeFileAtomic path (BS.Char8.pack newContent) + where + mightNotExist e | isDoesNotExistError e + = annotateIO verbosity $ writeFileAtomic path + (BS.Char8.pack newContent) + | otherwise + = ioError e + +-- | The path name that represents the current directory. +-- In Unix, it's @\".\"@, but this is system-specific. +-- (E.g. AmigaOS uses the empty string @\"\"@ for the current directory.) +currentDir :: FilePath +currentDir = "." + +shortRelativePath :: FilePath -> FilePath -> FilePath +shortRelativePath from to = + case dropCommonPrefix (splitDirectories from) (splitDirectories to) of + (stuff, path) -> joinPath (map (const "..") stuff ++ path) + where + dropCommonPrefix :: Eq a => [a] -> [a] -> ([a],[a]) + dropCommonPrefix (x:xs) (y:ys) + | x == y = dropCommonPrefix xs ys + dropCommonPrefix xs ys = (xs,ys) + +-- | Drop the extension if it's one of 'exeExtensions', or return the path +-- unchanged. +dropExeExtension :: FilePath -> FilePath +dropExeExtension filepath = + case splitExtension filepath of + (filepath', extension) | extension `elem` exeExtensions -> filepath' + | otherwise -> filepath + +-- | List of possible executable file extensions on the current platform. +exeExtensions :: [String] +exeExtensions = case buildOS of + -- Possible improvement: on Windows, read the list of extensions from the + -- PATHEXT environment variable. By default PATHEXT is ".com; .exe; .bat; + -- .cmd". + Windows -> ["", "exe"] + Ghcjs -> ["", "exe"] + _ -> [""] + +-- ------------------------------------------------------------ +-- * Finding the description file +-- ------------------------------------------------------------ + +-- |Package description file (/pkgname/@.cabal@) +defaultPackageDesc :: Verbosity -> IO FilePath +defaultPackageDesc _verbosity = tryFindPackageDesc currentDir + +-- |Find a package description file in the given directory. Looks for +-- @.cabal@ files. +findPackageDesc :: FilePath -- ^Where to look + -> NoCallStackIO (Either String FilePath) -- ^.cabal +findPackageDesc dir + = do files <- getDirectoryContents dir + -- to make sure we do not mistake a ~/.cabal/ dir for a .cabal + -- file we filter to exclude dirs and null base file names: + cabalFiles <- filterM doesFileExist + [ dir file + | file <- files + , let (name, ext) = splitExtension file + , not (null name) && ext == ".cabal" ] + case cabalFiles of + [] -> return (Left noDesc) + [cabalFile] -> return (Right cabalFile) + multiple -> return (Left $ multiDesc multiple) + + where + noDesc :: String + noDesc = "No cabal file found.\n" + ++ "Please create a package description file .cabal" + + multiDesc :: [String] -> String + multiDesc l = "Multiple cabal files found.\n" + ++ "Please use only one of: " + ++ intercalate ", " l + +-- |Like 'findPackageDesc', but calls 'die' in case of error. +tryFindPackageDesc :: FilePath -> IO FilePath +tryFindPackageDesc dir = either die return =<< findPackageDesc dir + +{-# DEPRECATED defaultHookedPackageDesc "Use findHookedPackageDesc with the proper base directory instead" #-} +-- |Optional auxiliary package information file (/pkgname/@.buildinfo@) +defaultHookedPackageDesc :: IO (Maybe FilePath) +defaultHookedPackageDesc = findHookedPackageDesc currentDir + +-- |Find auxiliary package information in the given directory. +-- Looks for @.buildinfo@ files. +findHookedPackageDesc + :: FilePath -- ^Directory to search + -> IO (Maybe FilePath) -- ^/dir/@\/@/pkgname/@.buildinfo@, if present +findHookedPackageDesc dir = do + files <- getDirectoryContents dir + buildInfoFiles <- filterM doesFileExist + [ dir file + | file <- files + , let (name, ext) = splitExtension file + , not (null name) && ext == buildInfoExt ] + case buildInfoFiles of + [] -> return Nothing + [f] -> return (Just f) + _ -> die ("Multiple files with extension " ++ buildInfoExt) + +buildInfoExt :: String +buildInfoExt = ".buildinfo" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Simple.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Simple.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,855 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This is the command line front end to the Simple build system. When given +-- the parsed command-line args and package information, is able to perform +-- basic commands like configure, build, install, register, etc. +-- +-- This module exports the main functions that Setup.hs scripts use. It +-- re-exports the 'UserHooks' type, the standard entry points like +-- 'defaultMain' and 'defaultMainWithHooks' and the predefined sets of +-- 'UserHooks' that custom @Setup.hs@ scripts can extend to add their own +-- behaviour. +-- +-- This module isn't called \"Simple\" because it's simple. Far from +-- it. It's called \"Simple\" because it does complicated things to +-- simple software. +-- +-- The original idea was that there could be different build systems that all +-- presented the same compatible command line interfaces. There is still a +-- "Distribution.Make" system but in practice no packages use it. + +{- +Work around this warning: +libraries/Cabal/Distribution/Simple.hs:78:0: + Warning: In the use of `runTests' + (imported from Distribution.Simple.UserHooks): + Deprecated: "Please use the new testing interface instead!" +-} +{-# OPTIONS_GHC -fno-warn-deprecations #-} + +module Distribution.Simple ( + module Distribution.Package, + module Distribution.Version, + module Distribution.License, + module Distribution.Simple.Compiler, + module Language.Haskell.Extension, + -- * Simple interface + defaultMain, defaultMainNoRead, defaultMainArgs, + -- * Customization + UserHooks(..), Args, + defaultMainWithHooks, defaultMainWithHooksArgs, + defaultMainWithHooksNoRead, defaultMainWithHooksNoReadArgs, + -- ** Standard sets of hooks + simpleUserHooks, + autoconfUserHooks, + defaultUserHooks, emptyUserHooks, + -- ** Utils + defaultHookedPackageDesc + ) where + +import Prelude () +import Control.Exception (try) +import Distribution.Compat.Prelude + +-- local +import Distribution.Simple.Compiler hiding (Flag) +import Distribution.Simple.UserHooks +import Distribution.Package +import Distribution.PackageDescription hiding (Flag) +import Distribution.PackageDescription.Configuration +import Distribution.Simple.Program +import Distribution.Simple.Program.Db +import Distribution.Simple.PreProcess +import Distribution.Simple.Setup +import Distribution.Simple.Command + +import Distribution.Simple.Build +import Distribution.Simple.SrcDist +import Distribution.Simple.Register + +import Distribution.Simple.Configure + +import Distribution.Simple.LocalBuildInfo +import Distribution.Simple.Bench +import Distribution.Simple.BuildPaths +import Distribution.Simple.Test +import Distribution.Simple.Install +import Distribution.Simple.Haddock +import Distribution.Simple.Doctest +import Distribution.Simple.Utils +import Distribution.Utils.NubList +import Distribution.Verbosity +import Language.Haskell.Extension +import Distribution.Version +import Distribution.License +import Distribution.Text + +-- Base +import System.Environment (getArgs, getProgName) +import System.Directory (removeFile, doesFileExist + ,doesDirectoryExist, removeDirectoryRecursive) +import System.Exit (exitWith,ExitCode(..)) +import System.FilePath (searchPathSeparator, takeDirectory, (), splitDirectories, dropDrive) +import Distribution.Compat.Directory (makeAbsolute) +import Distribution.Compat.Environment (getEnvironment) +import Distribution.Compat.GetShortPathName (getShortPathName) + +import Data.List (unionBy, (\\)) + +import Distribution.PackageDescription.Parsec + +-- | A simple implementation of @main@ for a Cabal setup script. +-- It reads the package description file using IO, and performs the +-- action specified on the command line. +defaultMain :: IO () +defaultMain = getArgs >>= defaultMainHelper simpleUserHooks + +-- | A version of 'defaultMain' that is passed the command line +-- arguments, rather than getting them from the environment. +defaultMainArgs :: [String] -> IO () +defaultMainArgs = defaultMainHelper simpleUserHooks + +-- | A customizable version of 'defaultMain'. +defaultMainWithHooks :: UserHooks -> IO () +defaultMainWithHooks hooks = getArgs >>= defaultMainHelper hooks + +-- | A customizable version of 'defaultMain' that also takes the command +-- line arguments. +defaultMainWithHooksArgs :: UserHooks -> [String] -> IO () +defaultMainWithHooksArgs = defaultMainHelper + +-- | Like 'defaultMain', but accepts the package description as input +-- rather than using IO to read it. +defaultMainNoRead :: GenericPackageDescription -> IO () +defaultMainNoRead = defaultMainWithHooksNoRead simpleUserHooks + +-- | A customizable version of 'defaultMainNoRead'. +defaultMainWithHooksNoRead :: UserHooks -> GenericPackageDescription -> IO () +defaultMainWithHooksNoRead hooks pkg_descr = + getArgs >>= + defaultMainHelper hooks { readDesc = return (Just pkg_descr) } + +-- | A customizable version of 'defaultMainNoRead' that also takes the +-- command line arguments. +-- +-- @since 2.2.0.0 +defaultMainWithHooksNoReadArgs :: UserHooks -> GenericPackageDescription -> [String] -> IO () +defaultMainWithHooksNoReadArgs hooks pkg_descr = + defaultMainHelper hooks { readDesc = return (Just pkg_descr) } + +defaultMainHelper :: UserHooks -> Args -> IO () +defaultMainHelper hooks args = topHandler $ + case commandsRun (globalCommand commands) commands args of + CommandHelp help -> printHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo (flags, commandParse) -> + case commandParse of + _ | fromFlag (globalVersion flags) -> printVersion + | fromFlag (globalNumericVersion flags) -> printNumericVersion + CommandHelp help -> printHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo action -> action + + where + printHelp help = getProgName >>= putStr . help + printOptionsList = putStr . unlines + printErrors errs = do + putStr (intercalate "\n" errs) + exitWith (ExitFailure 1) + printNumericVersion = putStrLn $ display cabalVersion + printVersion = putStrLn $ "Cabal library version " + ++ display cabalVersion + + progs = addKnownPrograms (hookedPrograms hooks) defaultProgramDb + commands = + [configureCommand progs `commandAddAction` + \fs as -> configureAction hooks fs as >> return () + ,buildCommand progs `commandAddAction` buildAction hooks + ,replCommand progs `commandAddAction` replAction hooks + ,installCommand `commandAddAction` installAction hooks + ,copyCommand `commandAddAction` copyAction hooks + ,doctestCommand `commandAddAction` doctestAction hooks + ,haddockCommand `commandAddAction` haddockAction hooks + ,cleanCommand `commandAddAction` cleanAction hooks + ,sdistCommand `commandAddAction` sdistAction hooks + ,hscolourCommand `commandAddAction` hscolourAction hooks + ,registerCommand `commandAddAction` registerAction hooks + ,unregisterCommand `commandAddAction` unregisterAction hooks + ,testCommand `commandAddAction` testAction hooks + ,benchmarkCommand `commandAddAction` benchAction hooks + ] + +-- | Combine the preprocessors in the given hooks with the +-- preprocessors built into cabal. +allSuffixHandlers :: UserHooks + -> [PPSuffixHandler] +allSuffixHandlers hooks + = overridesPP (hookedPreProcessors hooks) knownSuffixHandlers + where + overridesPP :: [PPSuffixHandler] -> [PPSuffixHandler] -> [PPSuffixHandler] + overridesPP = unionBy (\x y -> fst x == fst y) + +configureAction :: UserHooks -> ConfigFlags -> Args -> IO LocalBuildInfo +configureAction hooks flags args = do + distPref <- findDistPrefOrDefault (configDistPref flags) + let flags' = flags { configDistPref = toFlag distPref + , configArgs = args } + + -- See docs for 'HookedBuildInfo' + pbi <- preConf hooks args flags' + + (mb_pd_file, pkg_descr0) <- confPkgDescr hooks verbosity + (flagToMaybe (configCabalFilePath flags)) + + let epkg_descr = (pkg_descr0, pbi) + + localbuildinfo0 <- confHook hooks epkg_descr flags' + + -- remember the .cabal filename if we know it + -- and all the extra command line args + let localbuildinfo = localbuildinfo0 { + pkgDescrFile = mb_pd_file, + extraConfigArgs = args + } + writePersistBuildConfig distPref localbuildinfo + + let pkg_descr = localPkgDescr localbuildinfo + postConf hooks args flags' pkg_descr localbuildinfo + return localbuildinfo + where + verbosity = fromFlag (configVerbosity flags) + +confPkgDescr :: UserHooks -> Verbosity -> Maybe FilePath + -> IO (Maybe FilePath, GenericPackageDescription) +confPkgDescr hooks verbosity mb_path = do + mdescr <- readDesc hooks + case mdescr of + Just descr -> return (Nothing, descr) + Nothing -> do + pdfile <- case mb_path of + Nothing -> defaultPackageDesc verbosity + Just path -> return path + info verbosity "Using Parsec parser" + descr <- readGenericPackageDescription verbosity pdfile + return (Just pdfile, descr) + +buildAction :: UserHooks -> BuildFlags -> Args -> IO () +buildAction hooks flags args = do + distPref <- findDistPrefOrDefault (buildDistPref flags) + let verbosity = fromFlag $ buildVerbosity flags + lbi <- getBuildConfig hooks verbosity distPref + let flags' = flags { buildDistPref = toFlag distPref + , buildCabalFilePath = maybeToFlag (cabalFilePath lbi)} + + progs <- reconfigurePrograms verbosity + (buildProgramPaths flags') + (buildProgramArgs flags') + (withPrograms lbi) + + hookedAction preBuild buildHook postBuild + (return lbi { withPrograms = progs }) + hooks flags' { buildArgs = args } args + +replAction :: UserHooks -> ReplFlags -> Args -> IO () +replAction hooks flags args = do + distPref <- findDistPrefOrDefault (replDistPref flags) + let verbosity = fromFlag $ replVerbosity flags + flags' = flags { replDistPref = toFlag distPref } + + lbi <- getBuildConfig hooks verbosity distPref + progs <- reconfigurePrograms verbosity + (replProgramPaths flags') + (replProgramArgs flags') + (withPrograms lbi) + + -- As far as I can tell, the only reason this doesn't use + -- 'hookedActionWithArgs' is because the arguments of 'replHook' + -- takes the args explicitly. UGH. -- ezyang + pbi <- preRepl hooks args flags' + let pkg_descr0 = localPkgDescr lbi + sanityCheckHookedBuildInfo pkg_descr0 pbi + let pkg_descr = updatePackageDescription pbi pkg_descr0 + lbi' = lbi { withPrograms = progs + , localPkgDescr = pkg_descr } + replHook hooks pkg_descr lbi' hooks flags' args + postRepl hooks args flags' pkg_descr lbi' + +hscolourAction :: UserHooks -> HscolourFlags -> Args -> IO () +hscolourAction hooks flags args = do + distPref <- findDistPrefOrDefault (hscolourDistPref flags) + let verbosity = fromFlag $ hscolourVerbosity flags + lbi <- getBuildConfig hooks verbosity distPref + let flags' = flags { hscolourDistPref = toFlag distPref + , hscolourCabalFilePath = maybeToFlag (cabalFilePath lbi)} + + hookedAction preHscolour hscolourHook postHscolour + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +doctestAction :: UserHooks -> DoctestFlags -> Args -> IO () +doctestAction hooks flags args = do + distPref <- findDistPrefOrDefault (doctestDistPref flags) + let verbosity = fromFlag $ doctestVerbosity flags + flags' = flags { doctestDistPref = toFlag distPref } + + lbi <- getBuildConfig hooks verbosity distPref + progs <- reconfigurePrograms verbosity + (doctestProgramPaths flags') + (doctestProgramArgs flags') + (withPrograms lbi) + + hookedAction preDoctest doctestHook postDoctest + (return lbi { withPrograms = progs }) + hooks flags' args + +haddockAction :: UserHooks -> HaddockFlags -> Args -> IO () +haddockAction hooks flags args = do + distPref <- findDistPrefOrDefault (haddockDistPref flags) + let verbosity = fromFlag $ haddockVerbosity flags + lbi <- getBuildConfig hooks verbosity distPref + let flags' = flags { haddockDistPref = toFlag distPref + , haddockCabalFilePath = maybeToFlag (cabalFilePath lbi)} + + progs <- reconfigurePrograms verbosity + (haddockProgramPaths flags') + (haddockProgramArgs flags') + (withPrograms lbi) + + hookedAction preHaddock haddockHook postHaddock + (return lbi { withPrograms = progs }) + hooks flags' { haddockArgs = args } args + +cleanAction :: UserHooks -> CleanFlags -> Args -> IO () +cleanAction hooks flags args = do + distPref <- findDistPrefOrDefault (cleanDistPref flags) + + elbi <- tryGetBuildConfig hooks verbosity distPref + let flags' = flags { cleanDistPref = toFlag distPref + , cleanCabalFilePath = case elbi of + Left _ -> mempty + Right lbi -> maybeToFlag (cabalFilePath lbi)} + + pbi <- preClean hooks args flags' + + (_, ppd) <- confPkgDescr hooks verbosity Nothing + -- It might seem like we are doing something clever here + -- but we're really not: if you look at the implementation + -- of 'clean' in the end all the package description is + -- used for is to clear out @extra-tmp-files@. IMO, + -- the configure script goo should go into @dist@ too! + -- -- ezyang + let pkg_descr0 = flattenPackageDescription ppd + -- We don't sanity check for clean as an error + -- here would prevent cleaning: + --sanityCheckHookedBuildInfo pkg_descr0 pbi + let pkg_descr = updatePackageDescription pbi pkg_descr0 + + cleanHook hooks pkg_descr () hooks flags' + postClean hooks args flags' pkg_descr () + where + verbosity = fromFlag (cleanVerbosity flags) + +copyAction :: UserHooks -> CopyFlags -> Args -> IO () +copyAction hooks flags args = do + distPref <- findDistPrefOrDefault (copyDistPref flags) + let verbosity = fromFlag $ copyVerbosity flags + lbi <- getBuildConfig hooks verbosity distPref + let flags' = flags { copyDistPref = toFlag distPref + , copyCabalFilePath = maybeToFlag (cabalFilePath lbi)} + hookedAction preCopy copyHook postCopy + (getBuildConfig hooks verbosity distPref) + hooks flags' { copyArgs = args } args + +installAction :: UserHooks -> InstallFlags -> Args -> IO () +installAction hooks flags args = do + distPref <- findDistPrefOrDefault (installDistPref flags) + let verbosity = fromFlag $ installVerbosity flags + lbi <- getBuildConfig hooks verbosity distPref + let flags' = flags { installDistPref = toFlag distPref + , installCabalFilePath = maybeToFlag (cabalFilePath lbi)} + hookedAction preInst instHook postInst + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +sdistAction :: UserHooks -> SDistFlags -> Args -> IO () +sdistAction hooks flags args = do + distPref <- findDistPrefOrDefault (sDistDistPref flags) + let flags' = flags { sDistDistPref = toFlag distPref } + pbi <- preSDist hooks args flags' + + mlbi <- maybeGetPersistBuildConfig distPref + + -- NB: It would be TOTALLY WRONG to use the 'PackageDescription' + -- store in the 'LocalBuildInfo' for the rest of @sdist@, because + -- that would result in only the files that would be built + -- according to the user's configure being packaged up. + -- In fact, it is not obvious why we need to read the + -- 'LocalBuildInfo' in the first place, except that we want + -- to do some architecture-independent preprocessing which + -- needs to be configured. This is totally awful, see + -- GH#130. + + (_, ppd) <- confPkgDescr hooks verbosity Nothing + + let pkg_descr0 = flattenPackageDescription ppd + sanityCheckHookedBuildInfo pkg_descr0 pbi + let pkg_descr = updatePackageDescription pbi pkg_descr0 + mlbi' = fmap (\lbi -> lbi { localPkgDescr = pkg_descr }) mlbi + + sDistHook hooks pkg_descr mlbi' hooks flags' + postSDist hooks args flags' pkg_descr mlbi' + where + verbosity = fromFlag (sDistVerbosity flags) + +testAction :: UserHooks -> TestFlags -> Args -> IO () +testAction hooks flags args = do + distPref <- findDistPrefOrDefault (testDistPref flags) + let verbosity = fromFlag $ testVerbosity flags + flags' = flags { testDistPref = toFlag distPref } + + localBuildInfo <- getBuildConfig hooks verbosity distPref + let pkg_descr = localPkgDescr localBuildInfo + -- It is safe to do 'runTests' before the new test handler because the + -- default action is a no-op and if the package uses the old test interface + -- the new handler will find no tests. + runTests hooks args False pkg_descr localBuildInfo + hookedActionWithArgs preTest testHook postTest + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +benchAction :: UserHooks -> BenchmarkFlags -> Args -> IO () +benchAction hooks flags args = do + distPref <- findDistPrefOrDefault (benchmarkDistPref flags) + let verbosity = fromFlag $ benchmarkVerbosity flags + flags' = flags { benchmarkDistPref = toFlag distPref } + hookedActionWithArgs preBench benchHook postBench + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +registerAction :: UserHooks -> RegisterFlags -> Args -> IO () +registerAction hooks flags args = do + distPref <- findDistPrefOrDefault (regDistPref flags) + let verbosity = fromFlag $ regVerbosity flags + lbi <- getBuildConfig hooks verbosity distPref + let flags' = flags { regDistPref = toFlag distPref + , regCabalFilePath = maybeToFlag (cabalFilePath lbi)} + hookedAction preReg regHook postReg + (getBuildConfig hooks verbosity distPref) + hooks flags' { regArgs = args } args + +unregisterAction :: UserHooks -> RegisterFlags -> Args -> IO () +unregisterAction hooks flags args = do + distPref <- findDistPrefOrDefault (regDistPref flags) + let verbosity = fromFlag $ regVerbosity flags + lbi <- getBuildConfig hooks verbosity distPref + let flags' = flags { regDistPref = toFlag distPref + , regCabalFilePath = maybeToFlag (cabalFilePath lbi)} + hookedAction preUnreg unregHook postUnreg + (getBuildConfig hooks verbosity distPref) + hooks flags' args + +hookedAction :: (UserHooks -> Args -> flags -> IO HookedBuildInfo) + -> (UserHooks -> PackageDescription -> LocalBuildInfo + -> UserHooks -> flags -> IO ()) + -> (UserHooks -> Args -> flags -> PackageDescription + -> LocalBuildInfo -> IO ()) + -> IO LocalBuildInfo + -> UserHooks -> flags -> Args -> IO () +hookedAction pre_hook cmd_hook = + hookedActionWithArgs pre_hook (\h _ pd lbi uh flags -> + cmd_hook h pd lbi uh flags) + +hookedActionWithArgs :: (UserHooks -> Args -> flags -> IO HookedBuildInfo) + -> (UserHooks -> Args -> PackageDescription -> LocalBuildInfo + -> UserHooks -> flags -> IO ()) + -> (UserHooks -> Args -> flags -> PackageDescription + -> LocalBuildInfo -> IO ()) + -> IO LocalBuildInfo + -> UserHooks -> flags -> Args -> IO () +hookedActionWithArgs pre_hook cmd_hook post_hook + get_build_config hooks flags args = do + pbi <- pre_hook hooks args flags + lbi0 <- get_build_config + let pkg_descr0 = localPkgDescr lbi0 + sanityCheckHookedBuildInfo pkg_descr0 pbi + let pkg_descr = updatePackageDescription pbi pkg_descr0 + lbi = lbi0 { localPkgDescr = pkg_descr } + cmd_hook hooks args pkg_descr lbi hooks flags + post_hook hooks args flags pkg_descr lbi + +sanityCheckHookedBuildInfo :: PackageDescription -> HookedBuildInfo -> IO () +sanityCheckHookedBuildInfo PackageDescription { library = Nothing } (Just _,_) + = die $ "The buildinfo contains info for a library, " + ++ "but the package does not have a library." + +sanityCheckHookedBuildInfo pkg_descr (_, hookExes) + | not (null nonExistant) + = die $ "The buildinfo contains info for an executable called '" + ++ display (head nonExistant) ++ "' but the package does not have a " + ++ "executable with that name." + where + pkgExeNames = nub (map exeName (executables pkg_descr)) + hookExeNames = nub (map fst hookExes) + nonExistant = hookExeNames \\ pkgExeNames + +sanityCheckHookedBuildInfo _ _ = return () + +-- | Try to read the 'localBuildInfoFile' +tryGetBuildConfig :: UserHooks -> Verbosity -> FilePath + -> IO (Either ConfigStateFileError LocalBuildInfo) +tryGetBuildConfig u v = try . getBuildConfig u v + + +-- | Read the 'localBuildInfoFile' or throw an exception. +getBuildConfig :: UserHooks -> Verbosity -> FilePath -> IO LocalBuildInfo +getBuildConfig hooks verbosity distPref = do + lbi_wo_programs <- getPersistBuildConfig distPref + -- Restore info about unconfigured programs, since it is not serialized + let lbi = lbi_wo_programs { + withPrograms = restoreProgramDb + (builtinPrograms ++ hookedPrograms hooks) + (withPrograms lbi_wo_programs) + } + + case pkgDescrFile lbi of + Nothing -> return lbi + Just pkg_descr_file -> do + outdated <- checkPersistBuildConfigOutdated distPref pkg_descr_file + if outdated + then reconfigure pkg_descr_file lbi + else return lbi + + where + reconfigure :: FilePath -> LocalBuildInfo -> IO LocalBuildInfo + reconfigure pkg_descr_file lbi = do + notice verbosity $ pkg_descr_file ++ " has been changed. " + ++ "Re-configuring with most recently used options. " + ++ "If this fails, please run configure manually.\n" + let cFlags = configFlags lbi + let cFlags' = cFlags { + -- Since the list of unconfigured programs is not serialized, + -- restore it to the same value as normally used at the beginning + -- of a configure run: + configPrograms_ = restoreProgramDb + (builtinPrograms ++ hookedPrograms hooks) + `fmap` configPrograms_ cFlags, + + -- Use the current, not saved verbosity level: + configVerbosity = Flag verbosity + } + configureAction hooks cFlags' (extraConfigArgs lbi) + + +-- -------------------------------------------------------------------------- +-- Cleaning + +clean :: PackageDescription -> CleanFlags -> IO () +clean pkg_descr flags = do + let distPref = fromFlagOrDefault defaultDistPref $ cleanDistPref flags + notice verbosity "cleaning..." + + maybeConfig <- if fromFlag (cleanSaveConf flags) + then maybeGetPersistBuildConfig distPref + else return Nothing + + -- remove the whole dist/ directory rather than tracking exactly what files + -- we created in there. + chattyTry "removing dist/" $ do + exists <- doesDirectoryExist distPref + when exists (removeDirectoryRecursive distPref) + + -- Any extra files the user wants to remove + traverse_ removeFileOrDirectory (extraTmpFiles pkg_descr) + + -- If the user wanted to save the config, write it back + traverse_ (writePersistBuildConfig distPref) maybeConfig + + where + removeFileOrDirectory :: FilePath -> NoCallStackIO () + removeFileOrDirectory fname = do + isDir <- doesDirectoryExist fname + isFile <- doesFileExist fname + if isDir then removeDirectoryRecursive fname + else when isFile $ removeFile fname + verbosity = fromFlag (cleanVerbosity flags) + +-- -------------------------------------------------------------------------- +-- Default hooks + +-- | Hooks that correspond to a plain instantiation of the +-- \"simple\" build system +simpleUserHooks :: UserHooks +simpleUserHooks = + emptyUserHooks { + confHook = configure, + postConf = finalChecks, + buildHook = defaultBuildHook, + replHook = defaultReplHook, + copyHook = \desc lbi _ f -> install desc lbi f, + -- 'install' has correct 'copy' behavior with params + testHook = defaultTestHook, + benchHook = defaultBenchHook, + instHook = defaultInstallHook, + sDistHook = \p l h f -> sdist p l f srcPref (allSuffixHandlers h), + cleanHook = \p _ _ f -> clean p f, + hscolourHook = \p l h f -> hscolour p l (allSuffixHandlers h) f, + haddockHook = \p l h f -> haddock p l (allSuffixHandlers h) f, + doctestHook = \p l h f -> doctest p l (allSuffixHandlers h) f, + regHook = defaultRegHook, + unregHook = \p l _ f -> unregister p l f + } + where + finalChecks _args flags pkg_descr lbi = + checkForeignDeps pkg_descr lbi (lessVerbose verbosity) + where + verbosity = fromFlag (configVerbosity flags) + +-- | Basic autoconf 'UserHooks': +-- +-- * 'postConf' runs @.\/configure@, if present. +-- +-- * the pre-hooks 'preBuild', 'preClean', 'preCopy', 'preInst', +-- 'preReg' and 'preUnreg' read additional build information from +-- /package/@.buildinfo@, if present. +-- +-- Thus @configure@ can use local system information to generate +-- /package/@.buildinfo@ and possibly other files. + +{-# DEPRECATED defaultUserHooks + "Use simpleUserHooks or autoconfUserHooks, unless you need Cabal-1.2\n compatibility in which case you must stick with defaultUserHooks" #-} +defaultUserHooks :: UserHooks +defaultUserHooks = autoconfUserHooks { + confHook = \pkg flags -> do + let verbosity = fromFlag (configVerbosity flags) + warn verbosity + "defaultUserHooks in Setup script is deprecated." + confHook autoconfUserHooks pkg flags, + postConf = oldCompatPostConf + } + -- This is the annoying old version that only runs configure if it exists. + -- It's here for compatibility with existing Setup.hs scripts. See: + -- https://github.com/haskell/cabal/issues/158 + where oldCompatPostConf args flags pkg_descr lbi + = do let verbosity = fromFlag (configVerbosity flags) + baseDir lbi' = fromMaybe "" (takeDirectory <$> cabalFilePath lbi') + + confExists <- doesFileExist $ (baseDir lbi) "configure" + when confExists $ + runConfigureScript verbosity + backwardsCompatHack flags lbi + + pbi <- getHookedBuildInfo (buildDir lbi) verbosity + sanityCheckHookedBuildInfo pkg_descr pbi + let pkg_descr' = updatePackageDescription pbi pkg_descr + lbi' = lbi { localPkgDescr = pkg_descr' } + postConf simpleUserHooks args flags pkg_descr' lbi' + + backwardsCompatHack = True + +autoconfUserHooks :: UserHooks +autoconfUserHooks + = simpleUserHooks + { + postConf = defaultPostConf, + preBuild = readHookWithArgs buildVerbosity buildDistPref, -- buildCabalFilePath, + preCopy = readHookWithArgs copyVerbosity copyDistPref, + preClean = readHook cleanVerbosity cleanDistPref, + preInst = readHook installVerbosity installDistPref, + preHscolour = readHook hscolourVerbosity hscolourDistPref, + preHaddock = readHookWithArgs haddockVerbosity haddockDistPref, + preReg = readHook regVerbosity regDistPref, + preUnreg = readHook regVerbosity regDistPref + } + where defaultPostConf :: Args -> ConfigFlags -> PackageDescription + -> LocalBuildInfo -> IO () + defaultPostConf args flags pkg_descr lbi + = do let verbosity = fromFlag (configVerbosity flags) + baseDir lbi' = fromMaybe "" (takeDirectory <$> cabalFilePath lbi') + confExists <- doesFileExist $ (baseDir lbi) "configure" + if confExists + then runConfigureScript verbosity + backwardsCompatHack flags lbi + else die "configure script not found." + + pbi <- getHookedBuildInfo (buildDir lbi) verbosity + sanityCheckHookedBuildInfo pkg_descr pbi + let pkg_descr' = updatePackageDescription pbi pkg_descr + lbi' = lbi { localPkgDescr = pkg_descr' } + postConf simpleUserHooks args flags pkg_descr' lbi' + + backwardsCompatHack = False + + readHookWithArgs :: (a -> Flag Verbosity) + -> (a -> Flag FilePath) + -> Args -> a + -> IO HookedBuildInfo + readHookWithArgs get_verbosity get_dist_pref _ flags = do + dist_dir <- findDistPrefOrDefault (get_dist_pref flags) + getHookedBuildInfo (dist_dir "build") verbosity + where + verbosity = fromFlag (get_verbosity flags) + + readHook :: (a -> Flag Verbosity) + -> (a -> Flag FilePath) + -> Args -> a -> IO HookedBuildInfo + readHook get_verbosity get_dist_pref a flags = do + noExtraFlags a + dist_dir <- findDistPrefOrDefault (get_dist_pref flags) + getHookedBuildInfo (dist_dir "build") verbosity + where + verbosity = fromFlag (get_verbosity flags) + +runConfigureScript :: Verbosity -> Bool -> ConfigFlags -> LocalBuildInfo + -> IO () +runConfigureScript verbosity backwardsCompatHack flags lbi = do + env <- getEnvironment + let programDb = withPrograms lbi + (ccProg, ccFlags) <- configureCCompiler verbosity programDb + ccProgShort <- getShortPathName ccProg + -- The C compiler's compilation and linker flags (e.g. + -- "C compiler flags" and "Gcc Linker flags" from GHC) have already + -- been merged into ccFlags, so we set both CFLAGS and LDFLAGS + -- to ccFlags + -- We don't try and tell configure which ld to use, as we don't have + -- a way to pass its flags too + configureFile <- makeAbsolute $ + fromMaybe "." (takeDirectory <$> cabalFilePath lbi) "configure" + -- autoconf is fussy about filenames, and has a set of forbidden + -- characters that can't appear in the build directory, etc: + -- https://www.gnu.org/software/autoconf/manual/autoconf.html#File-System-Conventions + -- + -- This has caused hard-to-debug failures in the past (#5368), so we + -- detect some cases early and warn with a clear message. Windows's + -- use of backslashes is problematic here, so we'll switch to + -- slashes, but we do still want to fail on backslashes in POSIX + -- paths. + -- + -- TODO: We don't check for colons, tildes or leading dashes. We + -- also should check the builddir's path, destdir, and all other + -- paths as well. + let configureFile' = intercalate "/" $ splitDirectories configureFile + for_ badAutoconfCharacters $ \(c, cname) -> + when (c `elem` dropDrive configureFile') $ + warn verbosity $ + "The path to the './configure' script, '" ++ configureFile' + ++ "', contains the character '" ++ [c] ++ "' (" ++ cname ++ ")." + ++ " This may cause the script to fail with an obscure error, or for" + ++ " building the package to fail later." + let extraPath = fromNubList $ configProgramPathExtra flags + let cflagsEnv = maybe (unwords ccFlags) (++ (" " ++ unwords ccFlags)) + $ lookup "CFLAGS" env + spSep = [searchPathSeparator] + pathEnv = maybe (intercalate spSep extraPath) + ((intercalate spSep extraPath ++ spSep)++) $ lookup "PATH" env + overEnv = ("CFLAGS", Just cflagsEnv) : + [("PATH", Just pathEnv) | not (null extraPath)] + args' = configureFile':args ++ ["CC=" ++ ccProgShort] + shProg = simpleProgram "sh" + progDb = modifyProgramSearchPath + (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb + shConfiguredProg <- lookupProgram shProg + `fmap` configureProgram verbosity shProg progDb + case shConfiguredProg of + Just sh -> runProgramInvocation verbosity $ + (programInvocation (sh {programOverrideEnv = overEnv}) args') + { progInvokeCwd = Just (buildDir lbi) } + Nothing -> die notFoundMsg + + where + args = configureArgs backwardsCompatHack flags + + badAutoconfCharacters = + [ (' ', "space") + , ('\t', "tab") + , ('\n', "newline") + , ('\0', "null") + , ('"', "double quote") + , ('#', "hash") + , ('$', "dollar sign") + , ('&', "ampersand") + , ('\'', "single quote") + , ('(', "left bracket") + , (')', "right bracket") + , ('*', "star") + , (';', "semicolon") + , ('<', "less-than sign") + , ('=', "equals sign") + , ('>', "greater-than sign") + , ('?', "question mark") + , ('[', "left square bracket") + , ('\\', "backslash") + , ('`', "backtick") + , ('|', "pipe") + ] + + notFoundMsg = "The package has a './configure' script. " + ++ "If you are on Windows, This requires a " + ++ "Unix compatibility toolchain such as MinGW+MSYS or Cygwin. " + ++ "If you are not on Windows, ensure that an 'sh' command " + ++ "is discoverable in your path." + +getHookedBuildInfo :: FilePath -> Verbosity -> IO HookedBuildInfo +getHookedBuildInfo build_dir verbosity = do + maybe_infoFile <- findHookedPackageDesc build_dir + case maybe_infoFile of + Nothing -> return emptyHookedBuildInfo + Just infoFile -> do + info verbosity $ "Reading parameters from " ++ infoFile + readHookedBuildInfo verbosity infoFile + +defaultTestHook :: Args -> PackageDescription -> LocalBuildInfo + -> UserHooks -> TestFlags -> IO () +defaultTestHook args pkg_descr localbuildinfo _ flags = + test args pkg_descr localbuildinfo flags + +defaultBenchHook :: Args -> PackageDescription -> LocalBuildInfo + -> UserHooks -> BenchmarkFlags -> IO () +defaultBenchHook args pkg_descr localbuildinfo _ flags = + bench args pkg_descr localbuildinfo flags + +defaultInstallHook :: PackageDescription -> LocalBuildInfo + -> UserHooks -> InstallFlags -> IO () +defaultInstallHook pkg_descr localbuildinfo _ flags = do + let copyFlags = defaultCopyFlags { + copyDistPref = installDistPref flags, + copyDest = installDest flags, + copyVerbosity = installVerbosity flags + } + install pkg_descr localbuildinfo copyFlags + let registerFlags = defaultRegisterFlags { + regDistPref = installDistPref flags, + regInPlace = installInPlace flags, + regPackageDB = installPackageDB flags, + regVerbosity = installVerbosity flags + } + when (hasLibs pkg_descr) $ register pkg_descr localbuildinfo registerFlags + +defaultBuildHook :: PackageDescription -> LocalBuildInfo + -> UserHooks -> BuildFlags -> IO () +defaultBuildHook pkg_descr localbuildinfo hooks flags = + build pkg_descr localbuildinfo flags (allSuffixHandlers hooks) + +defaultReplHook :: PackageDescription -> LocalBuildInfo + -> UserHooks -> ReplFlags -> [String] -> IO () +defaultReplHook pkg_descr localbuildinfo hooks flags args = + repl pkg_descr localbuildinfo flags (allSuffixHandlers hooks) args + +defaultRegHook :: PackageDescription -> LocalBuildInfo + -> UserHooks -> RegisterFlags -> IO () +defaultRegHook pkg_descr localbuildinfo _ flags = + if hasLibs pkg_descr + then register pkg_descr localbuildinfo flags + else setupMessage (fromFlag (regVerbosity flags)) + "Package contains no library to register:" (packageId pkg_descr) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseExceptionId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseExceptionId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseExceptionId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseExceptionId.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,213 @@ +-- This file is generated. See Makefile's spdx rule +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.SPDX.LicenseExceptionId ( + LicenseExceptionId (..), + licenseExceptionId, + licenseExceptionName, + mkLicenseExceptionId, + licenseExceptionIdList, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Utils.Generic (isAsciiAlphaNum) +import Distribution.SPDX.LicenseListVersion + +import qualified Data.Map.Strict as Map +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp + +------------------------------------------------------------------------------- +-- LicenseExceptionId +------------------------------------------------------------------------------- + +-- | SPDX License identifier +data LicenseExceptionId + = DS389_exception -- ^ @389-exception@, 389 Directory Server Exception + | Autoconf_exception_2_0 -- ^ @Autoconf-exception-2.0@, Autoconf exception 2.0 + | Autoconf_exception_3_0 -- ^ @Autoconf-exception-3.0@, Autoconf exception 3.0 + | Bison_exception_2_2 -- ^ @Bison-exception-2.2@, Bison exception 2.2 + | Bootloader_exception -- ^ @Bootloader-exception@, Bootloader Distribution Exception + | Classpath_exception_2_0 -- ^ @Classpath-exception-2.0@, Classpath exception 2.0 + | CLISP_exception_2_0 -- ^ @CLISP-exception-2.0@, CLISP exception 2.0 + | DigiRule_FOSS_exception -- ^ @DigiRule-FOSS-exception@, DigiRule FOSS License Exception + | ECos_exception_2_0 -- ^ @eCos-exception-2.0@, eCos exception 2.0 + | Fawkes_Runtime_exception -- ^ @Fawkes-Runtime-exception@, Fawkes Runtime Exception + | FLTK_exception -- ^ @FLTK-exception@, FLTK exception + | Font_exception_2_0 -- ^ @Font-exception-2.0@, Font exception 2.0 + | Freertos_exception_2_0 -- ^ @freertos-exception-2.0@, FreeRTOS Exception 2.0 + | GCC_exception_2_0 -- ^ @GCC-exception-2.0@, GCC Runtime Library exception 2.0 + | GCC_exception_3_1 -- ^ @GCC-exception-3.1@, GCC Runtime Library exception 3.1 + | Gnu_javamail_exception -- ^ @gnu-javamail-exception@, GNU JavaMail exception + | I2p_gpl_java_exception -- ^ @i2p-gpl-java-exception@, i2p GPL+Java Exception + | Libtool_exception -- ^ @Libtool-exception@, Libtool Exception + | Linux_syscall_note -- ^ @Linux-syscall-note@, Linux Syscall Note + | LLVM_exception -- ^ @LLVM-exception@, LLVM Exception, SPDX License List 3.2 + | LZMA_exception -- ^ @LZMA-exception@, LZMA exception + | Mif_exception -- ^ @mif-exception@, Macros and Inline Functions Exception + | Nokia_Qt_exception_1_1 -- ^ @Nokia-Qt-exception-1.1@, Nokia Qt LGPL exception 1.1 + | OCCT_exception_1_0 -- ^ @OCCT-exception-1.0@, Open CASCADE Exception 1.0 + | OpenJDK_assembly_exception_1_0 -- ^ @OpenJDK-assembly-exception-1.0@, OpenJDK Assembly exception 1.0, SPDX License List 3.2 + | Openvpn_openssl_exception -- ^ @openvpn-openssl-exception@, OpenVPN OpenSSL Exception + | PS_or_PDF_font_exception_20170817 -- ^ @PS-or-PDF-font-exception-20170817@, PS/PDF font exception (2017-08-17), SPDX License List 3.2 + | Qt_GPL_exception_1_0 -- ^ @Qt-GPL-exception-1.0@, Qt GPL exception 1.0, SPDX License List 3.2 + | Qt_LGPL_exception_1_1 -- ^ @Qt-LGPL-exception-1.1@, Qt LGPL exception 1.1, SPDX License List 3.2 + | Qwt_exception_1_0 -- ^ @Qwt-exception-1.0@, Qwt exception 1.0 + | U_boot_exception_2_0 -- ^ @u-boot-exception-2.0@, U-Boot exception 2.0 + | WxWindows_exception_3_1 -- ^ @WxWindows-exception-3.1@, WxWindows Library Exception 3.1 + deriving (Eq, Ord, Enum, Bounded, Show, Read, Typeable, Data, Generic) + +instance Binary LicenseExceptionId + +instance Pretty LicenseExceptionId where + pretty = Disp.text . licenseExceptionId + +instance Parsec LicenseExceptionId where + parsec = do + n <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' + v <- askCabalSpecVersion + maybe (fail $ "Unknown SPDX license exception identifier: " ++ n) return $ + mkLicenseExceptionId (cabalSpecVersionToSPDXListVersion v) n + +instance NFData LicenseExceptionId where + rnf l = l `seq` () + +------------------------------------------------------------------------------- +-- License Data +------------------------------------------------------------------------------- + +-- | License SPDX identifier, e.g. @"BSD-3-Clause"@. +licenseExceptionId :: LicenseExceptionId -> String +licenseExceptionId DS389_exception = "389-exception" +licenseExceptionId Autoconf_exception_2_0 = "Autoconf-exception-2.0" +licenseExceptionId Autoconf_exception_3_0 = "Autoconf-exception-3.0" +licenseExceptionId Bison_exception_2_2 = "Bison-exception-2.2" +licenseExceptionId Bootloader_exception = "Bootloader-exception" +licenseExceptionId Classpath_exception_2_0 = "Classpath-exception-2.0" +licenseExceptionId CLISP_exception_2_0 = "CLISP-exception-2.0" +licenseExceptionId DigiRule_FOSS_exception = "DigiRule-FOSS-exception" +licenseExceptionId ECos_exception_2_0 = "eCos-exception-2.0" +licenseExceptionId Fawkes_Runtime_exception = "Fawkes-Runtime-exception" +licenseExceptionId FLTK_exception = "FLTK-exception" +licenseExceptionId Font_exception_2_0 = "Font-exception-2.0" +licenseExceptionId Freertos_exception_2_0 = "freertos-exception-2.0" +licenseExceptionId GCC_exception_2_0 = "GCC-exception-2.0" +licenseExceptionId GCC_exception_3_1 = "GCC-exception-3.1" +licenseExceptionId Gnu_javamail_exception = "gnu-javamail-exception" +licenseExceptionId I2p_gpl_java_exception = "i2p-gpl-java-exception" +licenseExceptionId Libtool_exception = "Libtool-exception" +licenseExceptionId Linux_syscall_note = "Linux-syscall-note" +licenseExceptionId LLVM_exception = "LLVM-exception" +licenseExceptionId LZMA_exception = "LZMA-exception" +licenseExceptionId Mif_exception = "mif-exception" +licenseExceptionId Nokia_Qt_exception_1_1 = "Nokia-Qt-exception-1.1" +licenseExceptionId OCCT_exception_1_0 = "OCCT-exception-1.0" +licenseExceptionId OpenJDK_assembly_exception_1_0 = "OpenJDK-assembly-exception-1.0" +licenseExceptionId Openvpn_openssl_exception = "openvpn-openssl-exception" +licenseExceptionId PS_or_PDF_font_exception_20170817 = "PS-or-PDF-font-exception-20170817" +licenseExceptionId Qt_GPL_exception_1_0 = "Qt-GPL-exception-1.0" +licenseExceptionId Qt_LGPL_exception_1_1 = "Qt-LGPL-exception-1.1" +licenseExceptionId Qwt_exception_1_0 = "Qwt-exception-1.0" +licenseExceptionId U_boot_exception_2_0 = "u-boot-exception-2.0" +licenseExceptionId WxWindows_exception_3_1 = "WxWindows-exception-3.1" + +-- | License name, e.g. @"GNU General Public License v2.0 only"@ +licenseExceptionName :: LicenseExceptionId -> String +licenseExceptionName DS389_exception = "389 Directory Server Exception" +licenseExceptionName Autoconf_exception_2_0 = "Autoconf exception 2.0" +licenseExceptionName Autoconf_exception_3_0 = "Autoconf exception 3.0" +licenseExceptionName Bison_exception_2_2 = "Bison exception 2.2" +licenseExceptionName Bootloader_exception = "Bootloader Distribution Exception" +licenseExceptionName Classpath_exception_2_0 = "Classpath exception 2.0" +licenseExceptionName CLISP_exception_2_0 = "CLISP exception 2.0" +licenseExceptionName DigiRule_FOSS_exception = "DigiRule FOSS License Exception" +licenseExceptionName ECos_exception_2_0 = "eCos exception 2.0" +licenseExceptionName Fawkes_Runtime_exception = "Fawkes Runtime Exception" +licenseExceptionName FLTK_exception = "FLTK exception" +licenseExceptionName Font_exception_2_0 = "Font exception 2.0" +licenseExceptionName Freertos_exception_2_0 = "FreeRTOS Exception 2.0" +licenseExceptionName GCC_exception_2_0 = "GCC Runtime Library exception 2.0" +licenseExceptionName GCC_exception_3_1 = "GCC Runtime Library exception 3.1" +licenseExceptionName Gnu_javamail_exception = "GNU JavaMail exception" +licenseExceptionName I2p_gpl_java_exception = "i2p GPL+Java Exception" +licenseExceptionName Libtool_exception = "Libtool Exception" +licenseExceptionName Linux_syscall_note = "Linux Syscall Note" +licenseExceptionName LLVM_exception = "LLVM Exception" +licenseExceptionName LZMA_exception = "LZMA exception" +licenseExceptionName Mif_exception = "Macros and Inline Functions Exception" +licenseExceptionName Nokia_Qt_exception_1_1 = "Nokia Qt LGPL exception 1.1" +licenseExceptionName OCCT_exception_1_0 = "Open CASCADE Exception 1.0" +licenseExceptionName OpenJDK_assembly_exception_1_0 = "OpenJDK Assembly exception 1.0" +licenseExceptionName Openvpn_openssl_exception = "OpenVPN OpenSSL Exception" +licenseExceptionName PS_or_PDF_font_exception_20170817 = "PS/PDF font exception (2017-08-17)" +licenseExceptionName Qt_GPL_exception_1_0 = "Qt GPL exception 1.0" +licenseExceptionName Qt_LGPL_exception_1_1 = "Qt LGPL exception 1.1" +licenseExceptionName Qwt_exception_1_0 = "Qwt exception 1.0" +licenseExceptionName U_boot_exception_2_0 = "U-Boot exception 2.0" +licenseExceptionName WxWindows_exception_3_1 = "WxWindows Library Exception 3.1" + +------------------------------------------------------------------------------- +-- Creation +------------------------------------------------------------------------------- + +licenseExceptionIdList :: LicenseListVersion -> [LicenseExceptionId] +licenseExceptionIdList LicenseListVersion_3_0 = + [] + ++ bulkOfLicenses +licenseExceptionIdList LicenseListVersion_3_2 = + [ LLVM_exception + , OpenJDK_assembly_exception_1_0 + , PS_or_PDF_font_exception_20170817 + , Qt_GPL_exception_1_0 + , Qt_LGPL_exception_1_1 + ] + ++ bulkOfLicenses + +-- | Create a 'LicenseExceptionId' from a 'String'. +mkLicenseExceptionId :: LicenseListVersion -> String -> Maybe LicenseExceptionId +mkLicenseExceptionId LicenseListVersion_3_0 s = Map.lookup s stringLookup_3_0 +mkLicenseExceptionId LicenseListVersion_3_2 s = Map.lookup s stringLookup_3_2 + +stringLookup_3_0 :: Map String LicenseExceptionId +stringLookup_3_0 = Map.fromList $ map (\i -> (licenseExceptionId i, i)) $ + licenseExceptionIdList LicenseListVersion_3_0 + +stringLookup_3_2 :: Map String LicenseExceptionId +stringLookup_3_2 = Map.fromList $ map (\i -> (licenseExceptionId i, i)) $ + licenseExceptionIdList LicenseListVersion_3_2 + +-- | License exceptions in all SPDX License lists +bulkOfLicenses :: [LicenseExceptionId] +bulkOfLicenses = + [ DS389_exception + , Autoconf_exception_2_0 + , Autoconf_exception_3_0 + , Bison_exception_2_2 + , Bootloader_exception + , Classpath_exception_2_0 + , CLISP_exception_2_0 + , DigiRule_FOSS_exception + , ECos_exception_2_0 + , Fawkes_Runtime_exception + , FLTK_exception + , Font_exception_2_0 + , Freertos_exception_2_0 + , GCC_exception_2_0 + , GCC_exception_3_1 + , Gnu_javamail_exception + , I2p_gpl_java_exception + , Libtool_exception + , Linux_syscall_note + , LZMA_exception + , Mif_exception + , Nokia_Qt_exception_1_1 + , OCCT_exception_1_0 + , Openvpn_openssl_exception + , Qwt_exception_1_0 + , U_boot_exception_2_0 + , WxWindows_exception_3_1 + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseExpression.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseExpression.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseExpression.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseExpression.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,161 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.SPDX.LicenseExpression ( + LicenseExpression (..), + SimpleLicenseExpression (..), + simpleLicenseExpression, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.SPDX.LicenseExceptionId +import Distribution.SPDX.LicenseId +import Distribution.SPDX.LicenseListVersion +import Distribution.SPDX.LicenseReference +import Distribution.Utils.Generic (isAsciiAlphaNum) +import Text.PrettyPrint ((<+>)) + +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp + +-- | SPDX License Expression. +-- +-- @ +-- idstring = 1*(ALPHA \/ DIGIT \/ "-" \/ "." ) +-- license id = \ +-- license exception id = \ +-- license ref = [\"DocumentRef-"1*(idstring)":"]\"LicenseRef-"1*(idstring) +-- +-- simple expression = license id \/ license id"+" \/ license ref +-- +-- compound expression = 1*1(simple expression \/ +-- simple expression \"WITH" license exception id \/ +-- compound expression \"AND" compound expression \/ +-- compound expression \"OR" compound expression ) \/ +-- "(" compound expression ")" ) +-- +-- license expression = 1*1(simple expression / compound expression) +-- @ +data LicenseExpression + = ELicense !SimpleLicenseExpression !(Maybe LicenseExceptionId) + | EAnd !LicenseExpression !LicenseExpression + | EOr !LicenseExpression !LicenseExpression + deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) + +-- | Simple License Expressions. +data SimpleLicenseExpression + = ELicenseId LicenseId + -- ^ An SPDX License List Short Form Identifier. For example: @GPL-2.0-only@ + | ELicenseIdPlus LicenseId + -- ^ An SPDX License List Short Form Identifier with a unary"+" operator suffix to represent the current version of the license or any later version. For example: @GPL-2.0+@ + | ELicenseRef LicenseRef + -- ^ A SPDX user defined license reference: For example: @LicenseRef-23@, @LicenseRef-MIT-Style-1@, or @DocumentRef-spdx-tool-1.2:LicenseRef-MIT-Style-2@ + deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) + +simpleLicenseExpression :: LicenseId -> LicenseExpression +simpleLicenseExpression i = ELicense (ELicenseId i) Nothing + +instance Binary LicenseExpression +instance Binary SimpleLicenseExpression + +instance Pretty LicenseExpression where + pretty = go 0 + where + go :: Int -> LicenseExpression -> Disp.Doc + go _ (ELicense lic exc) = + let doc = pretty lic + in maybe id (\e d -> d <+> Disp.text "WITH" <+> pretty e) exc doc + go d (EAnd e1 e2) = parens (d < 0) $ go 0 e1 <+> Disp.text "AND" <+> go 0 e2 + go d (EOr e1 e2) = parens (d < 1) $ go 1 e1 <+> Disp.text "OR" <+> go 1 e2 + + + parens False doc = doc + parens True doc = Disp.parens doc + +instance Pretty SimpleLicenseExpression where + pretty (ELicenseId i) = pretty i + pretty (ELicenseIdPlus i) = pretty i <<>> Disp.char '+' + pretty (ELicenseRef r) = pretty r + +instance Parsec SimpleLicenseExpression where + parsec = idstring >>= simple where + simple n + | Just l <- "LicenseRef-" `isPrefixOfMaybe` n = + maybe (fail $ "Incorrect LicenseRef format: " ++ n) (return . ELicenseRef) $ mkLicenseRef Nothing l + | Just d <- "DocumentRef-" `isPrefixOfMaybe` n = do + _ <- P.string ":LicenseRef-" + l <- idstring + maybe (fail $ "Incorrect LicenseRef format:" ++ n) (return . ELicenseRef) $ mkLicenseRef (Just d) l + | otherwise = do + v <- askCabalSpecVersion + l <- maybe (fail $ "Unknown SPDX license identifier: '" ++ n ++ "' " ++ licenseIdMigrationMessage n) return $ + mkLicenseId (cabalSpecVersionToSPDXListVersion v) n + orLater <- isJust <$> P.optional (P.char '+') + if orLater + then return (ELicenseIdPlus l) + else return (ELicenseId l) + +idstring :: P.CharParsing m => m String +idstring = P.munch1 $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' + +-- returns suffix part +isPrefixOfMaybe :: Eq a => [a] -> [a] -> Maybe [a] +isPrefixOfMaybe pfx s + | pfx `isPrefixOf` s = Just (drop (length pfx) s) + | otherwise = Nothing + +instance Parsec LicenseExpression where + parsec = expr + where + expr = compoundOr + + simple = do + s <- parsec + exc <- exception + return $ ELicense s exc + + exception = P.optional $ P.try (spaces1 *> P.string "WITH" *> spaces1) *> parsec + + compoundOr = do + x <- compoundAnd + l <- P.optional $ P.try (spaces1 *> P.string "OR" *> spaces1) *> compoundOr + return $ maybe id (flip EOr) l x + + compoundAnd = do + x <- compound + l <- P.optional $ P.try (spaces1 *> P.string "AND" *> spaces1) *> compoundAnd + return $ maybe id (flip EAnd) l x + + compound = braces <|> simple + + -- NOTE: we require that there's a space around AND & OR operators, + -- i.e. @(MIT)AND(MIT)@ will cause parse-error. + braces = do + _ <- P.char '(' + _ <- P.spaces + x <- expr + _ <- P.char ')' + return x + + spaces1 = P.space *> P.spaces + +-- notes: +-- +-- There MUST NOT be whitespace between a license­id and any following "+".  This supports easy parsing and +-- backwards compatibility.  There MUST be whitespace on either side of the operator "WITH".  There MUST be +-- whitespace and/or parentheses on either side of the operators "AND" and "OR". +-- +-- We handle that by having greedy 'idstring' parser, so MITAND would parse as invalid license identifier. + +instance NFData LicenseExpression where + rnf (ELicense s e) = rnf s `seq` rnf e + rnf (EAnd x y) = rnf x `seq` rnf y + rnf (EOr x y) = rnf x `seq` rnf y + +instance NFData SimpleLicenseExpression where + rnf (ELicenseId i) = rnf i + rnf (ELicenseIdPlus i) = rnf i + rnf (ELicenseRef r) = rnf r diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/License.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/License.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/License.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/License.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,64 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.SPDX.License ( + License (..), + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.SPDX.LicenseExpression + +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp + +-- | Declared license. +-- See [section 3.15 of SPDX Specification 2.1](https://spdx.org/spdx-specification-21-web-version#h.1hmsyys) +-- +-- /Note:/ the NOASSERTION case is omitted. +-- +-- Old 'License' can be migrated using following rules: +-- +-- * @AllRightsReserved@ and @UnspecifiedLicense@ to 'NONE'. +-- No license specified which legally defaults to /All Rights Reserved/. +-- The package may not be legally modified or redistributed by anyone but +-- the rightsholder. +-- +-- * @OtherLicense@ can be converted to 'LicenseRef' pointing to the file +-- in the package. +-- +-- * @UnknownLicense@ i.e. other licenses of the form @name-x.y@, should be +-- covered by SPDX license list, otherwise use 'LicenseRef'. +-- +-- * @PublicDomain@ isn't covered. Consider using CC0. +-- See +-- for more information. +-- +data License + = NONE + -- ^ if the package contains no license information whatsoever; or + | License LicenseExpression + -- ^ A valid SPDX License Expression as defined in Appendix IV. + deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) + +instance Binary License + +instance NFData License where + rnf NONE = () + rnf (License l) = rnf l + +instance Pretty License where + pretty NONE = Disp.text "NONE" + pretty (License l) = pretty l + +-- | +-- >>> eitherParsec "BSD-3-Clause AND MIT" :: Either String License +-- Right (License (EAnd (ELicense (ELicenseId BSD_3_Clause) Nothing) (ELicense (ELicenseId MIT) Nothing))) +-- +-- >>> eitherParsec "NONE" :: Either String License +-- Right NONE +-- +instance Parsec License where + parsec = NONE <$ P.try (P.string "NONE") <|> License <$> parsec diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseId.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,1885 @@ +-- This file is generated. See Makefile's spdx rule +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.SPDX.LicenseId ( + LicenseId (..), + licenseId, + licenseName, + licenseIsOsiApproved, + mkLicenseId, + licenseIdList, + -- * Helpers + licenseIdMigrationMessage, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Utils.Generic (isAsciiAlphaNum) +import Distribution.SPDX.LicenseListVersion + +import qualified Data.Map.Strict as Map +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp + +------------------------------------------------------------------------------- +-- LicenseId +------------------------------------------------------------------------------- + +-- | SPDX License identifier +data LicenseId + = NullBSD -- ^ @0BSD@, BSD Zero Clause License + | AAL -- ^ @AAL@, Attribution Assurance License + | Abstyles -- ^ @Abstyles@, Abstyles License + | Adobe_2006 -- ^ @Adobe-2006@, Adobe Systems Incorporated Source Code License Agreement + | Adobe_Glyph -- ^ @Adobe-Glyph@, Adobe Glyph List License + | ADSL -- ^ @ADSL@, Amazon Digital Services License + | AFL_1_1 -- ^ @AFL-1.1@, Academic Free License v1.1 + | AFL_1_2 -- ^ @AFL-1.2@, Academic Free License v1.2 + | AFL_2_0 -- ^ @AFL-2.0@, Academic Free License v2.0 + | AFL_2_1 -- ^ @AFL-2.1@, Academic Free License v2.1 + | AFL_3_0 -- ^ @AFL-3.0@, Academic Free License v3.0 + | Afmparse -- ^ @Afmparse@, Afmparse License + | AGPL_1_0 -- ^ @AGPL-1.0@, Affero General Public License v1.0, SPDX License List 3.0 + | AGPL_1_0_only -- ^ @AGPL-1.0-only@, Affero General Public License v1.0 only, SPDX License List 3.2 + | AGPL_1_0_or_later -- ^ @AGPL-1.0-or-later@, Affero General Public License v1.0 or later, SPDX License List 3.2 + | AGPL_3_0_only -- ^ @AGPL-3.0-only@, GNU Affero General Public License v3.0 only + | AGPL_3_0_or_later -- ^ @AGPL-3.0-or-later@, GNU Affero General Public License v3.0 or later + | Aladdin -- ^ @Aladdin@, Aladdin Free Public License + | AMDPLPA -- ^ @AMDPLPA@, AMD's plpa_map.c License + | AML -- ^ @AML@, Apple MIT License + | AMPAS -- ^ @AMPAS@, Academy of Motion Picture Arts and Sciences BSD + | ANTLR_PD -- ^ @ANTLR-PD@, ANTLR Software Rights Notice + | Apache_1_0 -- ^ @Apache-1.0@, Apache License 1.0 + | Apache_1_1 -- ^ @Apache-1.1@, Apache License 1.1 + | Apache_2_0 -- ^ @Apache-2.0@, Apache License 2.0 + | APAFML -- ^ @APAFML@, Adobe Postscript AFM License + | APL_1_0 -- ^ @APL-1.0@, Adaptive Public License 1.0 + | APSL_1_0 -- ^ @APSL-1.0@, Apple Public Source License 1.0 + | APSL_1_1 -- ^ @APSL-1.1@, Apple Public Source License 1.1 + | APSL_1_2 -- ^ @APSL-1.2@, Apple Public Source License 1.2 + | APSL_2_0 -- ^ @APSL-2.0@, Apple Public Source License 2.0 + | Artistic_1_0_cl8 -- ^ @Artistic-1.0-cl8@, Artistic License 1.0 w/clause 8 + | Artistic_1_0_Perl -- ^ @Artistic-1.0-Perl@, Artistic License 1.0 (Perl) + | Artistic_1_0 -- ^ @Artistic-1.0@, Artistic License 1.0 + | Artistic_2_0 -- ^ @Artistic-2.0@, Artistic License 2.0 + | Bahyph -- ^ @Bahyph@, Bahyph License + | Barr -- ^ @Barr@, Barr License + | Beerware -- ^ @Beerware@, Beerware License + | BitTorrent_1_0 -- ^ @BitTorrent-1.0@, BitTorrent Open Source License v1.0 + | BitTorrent_1_1 -- ^ @BitTorrent-1.1@, BitTorrent Open Source License v1.1 + | Borceux -- ^ @Borceux@, Borceux license + | BSD_1_Clause -- ^ @BSD-1-Clause@, BSD 1-Clause License + | BSD_2_Clause_FreeBSD -- ^ @BSD-2-Clause-FreeBSD@, BSD 2-Clause FreeBSD License + | BSD_2_Clause_NetBSD -- ^ @BSD-2-Clause-NetBSD@, BSD 2-Clause NetBSD License + | BSD_2_Clause_Patent -- ^ @BSD-2-Clause-Patent@, BSD-2-Clause Plus Patent License + | BSD_2_Clause -- ^ @BSD-2-Clause@, BSD 2-Clause "Simplified" License + | BSD_3_Clause_Attribution -- ^ @BSD-3-Clause-Attribution@, BSD with attribution + | BSD_3_Clause_Clear -- ^ @BSD-3-Clause-Clear@, BSD 3-Clause Clear License + | BSD_3_Clause_LBNL -- ^ @BSD-3-Clause-LBNL@, Lawrence Berkeley National Labs BSD variant license + | BSD_3_Clause_No_Nuclear_License_2014 -- ^ @BSD-3-Clause-No-Nuclear-License-2014@, BSD 3-Clause No Nuclear License 2014 + | BSD_3_Clause_No_Nuclear_License -- ^ @BSD-3-Clause-No-Nuclear-License@, BSD 3-Clause No Nuclear License + | BSD_3_Clause_No_Nuclear_Warranty -- ^ @BSD-3-Clause-No-Nuclear-Warranty@, BSD 3-Clause No Nuclear Warranty + | BSD_3_Clause -- ^ @BSD-3-Clause@, BSD 3-Clause "New" or "Revised" License + | BSD_4_Clause_UC -- ^ @BSD-4-Clause-UC@, BSD-4-Clause (University of California-Specific) + | BSD_4_Clause -- ^ @BSD-4-Clause@, BSD 4-Clause "Original" or "Old" License + | BSD_Protection -- ^ @BSD-Protection@, BSD Protection License + | BSD_Source_Code -- ^ @BSD-Source-Code@, BSD Source Code Attribution + | BSL_1_0 -- ^ @BSL-1.0@, Boost Software License 1.0 + | Bzip2_1_0_5 -- ^ @bzip2-1.0.5@, bzip2 and libbzip2 License v1.0.5 + | Bzip2_1_0_6 -- ^ @bzip2-1.0.6@, bzip2 and libbzip2 License v1.0.6 + | Caldera -- ^ @Caldera@, Caldera License + | CATOSL_1_1 -- ^ @CATOSL-1.1@, Computer Associates Trusted Open Source License 1.1 + | CC_BY_1_0 -- ^ @CC-BY-1.0@, Creative Commons Attribution 1.0 Generic + | CC_BY_2_0 -- ^ @CC-BY-2.0@, Creative Commons Attribution 2.0 Generic + | CC_BY_2_5 -- ^ @CC-BY-2.5@, Creative Commons Attribution 2.5 Generic + | CC_BY_3_0 -- ^ @CC-BY-3.0@, Creative Commons Attribution 3.0 Unported + | CC_BY_4_0 -- ^ @CC-BY-4.0@, Creative Commons Attribution 4.0 International + | CC_BY_NC_1_0 -- ^ @CC-BY-NC-1.0@, Creative Commons Attribution Non Commercial 1.0 Generic + | CC_BY_NC_2_0 -- ^ @CC-BY-NC-2.0@, Creative Commons Attribution Non Commercial 2.0 Generic + | CC_BY_NC_2_5 -- ^ @CC-BY-NC-2.5@, Creative Commons Attribution Non Commercial 2.5 Generic + | CC_BY_NC_3_0 -- ^ @CC-BY-NC-3.0@, Creative Commons Attribution Non Commercial 3.0 Unported + | CC_BY_NC_4_0 -- ^ @CC-BY-NC-4.0@, Creative Commons Attribution Non Commercial 4.0 International + | CC_BY_NC_ND_1_0 -- ^ @CC-BY-NC-ND-1.0@, Creative Commons Attribution Non Commercial No Derivatives 1.0 Generic + | CC_BY_NC_ND_2_0 -- ^ @CC-BY-NC-ND-2.0@, Creative Commons Attribution Non Commercial No Derivatives 2.0 Generic + | CC_BY_NC_ND_2_5 -- ^ @CC-BY-NC-ND-2.5@, Creative Commons Attribution Non Commercial No Derivatives 2.5 Generic + | CC_BY_NC_ND_3_0 -- ^ @CC-BY-NC-ND-3.0@, Creative Commons Attribution Non Commercial No Derivatives 3.0 Unported + | CC_BY_NC_ND_4_0 -- ^ @CC-BY-NC-ND-4.0@, Creative Commons Attribution Non Commercial No Derivatives 4.0 International + | CC_BY_NC_SA_1_0 -- ^ @CC-BY-NC-SA-1.0@, Creative Commons Attribution Non Commercial Share Alike 1.0 Generic + | CC_BY_NC_SA_2_0 -- ^ @CC-BY-NC-SA-2.0@, Creative Commons Attribution Non Commercial Share Alike 2.0 Generic + | CC_BY_NC_SA_2_5 -- ^ @CC-BY-NC-SA-2.5@, Creative Commons Attribution Non Commercial Share Alike 2.5 Generic + | CC_BY_NC_SA_3_0 -- ^ @CC-BY-NC-SA-3.0@, Creative Commons Attribution Non Commercial Share Alike 3.0 Unported + | CC_BY_NC_SA_4_0 -- ^ @CC-BY-NC-SA-4.0@, Creative Commons Attribution Non Commercial Share Alike 4.0 International + | CC_BY_ND_1_0 -- ^ @CC-BY-ND-1.0@, Creative Commons Attribution No Derivatives 1.0 Generic + | CC_BY_ND_2_0 -- ^ @CC-BY-ND-2.0@, Creative Commons Attribution No Derivatives 2.0 Generic + | CC_BY_ND_2_5 -- ^ @CC-BY-ND-2.5@, Creative Commons Attribution No Derivatives 2.5 Generic + | CC_BY_ND_3_0 -- ^ @CC-BY-ND-3.0@, Creative Commons Attribution No Derivatives 3.0 Unported + | CC_BY_ND_4_0 -- ^ @CC-BY-ND-4.0@, Creative Commons Attribution No Derivatives 4.0 International + | CC_BY_SA_1_0 -- ^ @CC-BY-SA-1.0@, Creative Commons Attribution Share Alike 1.0 Generic + | CC_BY_SA_2_0 -- ^ @CC-BY-SA-2.0@, Creative Commons Attribution Share Alike 2.0 Generic + | CC_BY_SA_2_5 -- ^ @CC-BY-SA-2.5@, Creative Commons Attribution Share Alike 2.5 Generic + | CC_BY_SA_3_0 -- ^ @CC-BY-SA-3.0@, Creative Commons Attribution Share Alike 3.0 Unported + | CC_BY_SA_4_0 -- ^ @CC-BY-SA-4.0@, Creative Commons Attribution Share Alike 4.0 International + | CC0_1_0 -- ^ @CC0-1.0@, Creative Commons Zero v1.0 Universal + | CDDL_1_0 -- ^ @CDDL-1.0@, Common Development and Distribution License 1.0 + | CDDL_1_1 -- ^ @CDDL-1.1@, Common Development and Distribution License 1.1 + | CDLA_Permissive_1_0 -- ^ @CDLA-Permissive-1.0@, Community Data License Agreement Permissive 1.0 + | CDLA_Sharing_1_0 -- ^ @CDLA-Sharing-1.0@, Community Data License Agreement Sharing 1.0 + | CECILL_1_0 -- ^ @CECILL-1.0@, CeCILL Free Software License Agreement v1.0 + | CECILL_1_1 -- ^ @CECILL-1.1@, CeCILL Free Software License Agreement v1.1 + | CECILL_2_0 -- ^ @CECILL-2.0@, CeCILL Free Software License Agreement v2.0 + | CECILL_2_1 -- ^ @CECILL-2.1@, CeCILL Free Software License Agreement v2.1 + | CECILL_B -- ^ @CECILL-B@, CeCILL-B Free Software License Agreement + | CECILL_C -- ^ @CECILL-C@, CeCILL-C Free Software License Agreement + | ClArtistic -- ^ @ClArtistic@, Clarified Artistic License + | CNRI_Jython -- ^ @CNRI-Jython@, CNRI Jython License + | CNRI_Python_GPL_Compatible -- ^ @CNRI-Python-GPL-Compatible@, CNRI Python Open Source GPL Compatible License Agreement + | CNRI_Python -- ^ @CNRI-Python@, CNRI Python License + | Condor_1_1 -- ^ @Condor-1.1@, Condor Public License v1.1 + | CPAL_1_0 -- ^ @CPAL-1.0@, Common Public Attribution License 1.0 + | CPL_1_0 -- ^ @CPL-1.0@, Common Public License 1.0 + | CPOL_1_02 -- ^ @CPOL-1.02@, Code Project Open License 1.02 + | Crossword -- ^ @Crossword@, Crossword License + | CrystalStacker -- ^ @CrystalStacker@, CrystalStacker License + | CUA_OPL_1_0 -- ^ @CUA-OPL-1.0@, CUA Office Public License v1.0 + | Cube -- ^ @Cube@, Cube License + | Curl -- ^ @curl@, curl License + | D_FSL_1_0 -- ^ @D-FSL-1.0@, Deutsche Freie Software Lizenz + | Diffmark -- ^ @diffmark@, diffmark license + | DOC -- ^ @DOC@, DOC License + | Dotseqn -- ^ @Dotseqn@, Dotseqn License + | DSDP -- ^ @DSDP@, DSDP License + | Dvipdfm -- ^ @dvipdfm@, dvipdfm License + | ECL_1_0 -- ^ @ECL-1.0@, Educational Community License v1.0 + | ECL_2_0 -- ^ @ECL-2.0@, Educational Community License v2.0 + | EFL_1_0 -- ^ @EFL-1.0@, Eiffel Forum License v1.0 + | EFL_2_0 -- ^ @EFL-2.0@, Eiffel Forum License v2.0 + | EGenix -- ^ @eGenix@, eGenix.com Public License 1.1.0 + | Entessa -- ^ @Entessa@, Entessa Public License v1.0 + | EPL_1_0 -- ^ @EPL-1.0@, Eclipse Public License 1.0 + | EPL_2_0 -- ^ @EPL-2.0@, Eclipse Public License 2.0 + | ErlPL_1_1 -- ^ @ErlPL-1.1@, Erlang Public License v1.1 + | EUDatagrid -- ^ @EUDatagrid@, EU DataGrid Software License + | EUPL_1_0 -- ^ @EUPL-1.0@, European Union Public License 1.0 + | EUPL_1_1 -- ^ @EUPL-1.1@, European Union Public License 1.1 + | EUPL_1_2 -- ^ @EUPL-1.2@, European Union Public License 1.2 + | Eurosym -- ^ @Eurosym@, Eurosym License + | Fair -- ^ @Fair@, Fair License + | Frameworx_1_0 -- ^ @Frameworx-1.0@, Frameworx Open License 1.0 + | FreeImage -- ^ @FreeImage@, FreeImage Public License v1.0 + | FSFAP -- ^ @FSFAP@, FSF All Permissive License + | FSFUL -- ^ @FSFUL@, FSF Unlimited License + | FSFULLR -- ^ @FSFULLR@, FSF Unlimited License (with License Retention) + | FTL -- ^ @FTL@, Freetype Project License + | GFDL_1_1_only -- ^ @GFDL-1.1-only@, GNU Free Documentation License v1.1 only + | GFDL_1_1_or_later -- ^ @GFDL-1.1-or-later@, GNU Free Documentation License v1.1 or later + | GFDL_1_2_only -- ^ @GFDL-1.2-only@, GNU Free Documentation License v1.2 only + | GFDL_1_2_or_later -- ^ @GFDL-1.2-or-later@, GNU Free Documentation License v1.2 or later + | GFDL_1_3_only -- ^ @GFDL-1.3-only@, GNU Free Documentation License v1.3 only + | GFDL_1_3_or_later -- ^ @GFDL-1.3-or-later@, GNU Free Documentation License v1.3 or later + | Giftware -- ^ @Giftware@, Giftware License + | GL2PS -- ^ @GL2PS@, GL2PS License + | Glide -- ^ @Glide@, 3dfx Glide License + | Glulxe -- ^ @Glulxe@, Glulxe License + | Gnuplot -- ^ @gnuplot@, gnuplot License + | GPL_1_0_only -- ^ @GPL-1.0-only@, GNU General Public License v1.0 only + | GPL_1_0_or_later -- ^ @GPL-1.0-or-later@, GNU General Public License v1.0 or later + | GPL_2_0_only -- ^ @GPL-2.0-only@, GNU General Public License v2.0 only + | GPL_2_0_or_later -- ^ @GPL-2.0-or-later@, GNU General Public License v2.0 or later + | GPL_3_0_only -- ^ @GPL-3.0-only@, GNU General Public License v3.0 only + | GPL_3_0_or_later -- ^ @GPL-3.0-or-later@, GNU General Public License v3.0 or later + | GSOAP_1_3b -- ^ @gSOAP-1.3b@, gSOAP Public License v1.3b + | HaskellReport -- ^ @HaskellReport@, Haskell Language Report License + | HPND -- ^ @HPND@, Historical Permission Notice and Disclaimer + | IBM_pibs -- ^ @IBM-pibs@, IBM PowerPC Initialization and Boot Software + | ICU -- ^ @ICU@, ICU License + | IJG -- ^ @IJG@, Independent JPEG Group License + | ImageMagick -- ^ @ImageMagick@, ImageMagick License + | IMatix -- ^ @iMatix@, iMatix Standard Function Library Agreement + | Imlib2 -- ^ @Imlib2@, Imlib2 License + | Info_ZIP -- ^ @Info-ZIP@, Info-ZIP License + | Intel_ACPI -- ^ @Intel-ACPI@, Intel ACPI Software License Agreement + | Intel -- ^ @Intel@, Intel Open Source License + | Interbase_1_0 -- ^ @Interbase-1.0@, Interbase Public License v1.0 + | IPA -- ^ @IPA@, IPA Font License + | IPL_1_0 -- ^ @IPL-1.0@, IBM Public License v1.0 + | ISC -- ^ @ISC@, ISC License + | JasPer_2_0 -- ^ @JasPer-2.0@, JasPer License + | JSON -- ^ @JSON@, JSON License + | LAL_1_2 -- ^ @LAL-1.2@, Licence Art Libre 1.2 + | LAL_1_3 -- ^ @LAL-1.3@, Licence Art Libre 1.3 + | Latex2e -- ^ @Latex2e@, Latex2e License + | Leptonica -- ^ @Leptonica@, Leptonica License + | LGPL_2_0_only -- ^ @LGPL-2.0-only@, GNU Library General Public License v2 only + | LGPL_2_0_or_later -- ^ @LGPL-2.0-or-later@, GNU Library General Public License v2 or later + | LGPL_2_1_only -- ^ @LGPL-2.1-only@, GNU Lesser General Public License v2.1 only + | LGPL_2_1_or_later -- ^ @LGPL-2.1-or-later@, GNU Lesser General Public License v2.1 or later + | LGPL_3_0_only -- ^ @LGPL-3.0-only@, GNU Lesser General Public License v3.0 only + | LGPL_3_0_or_later -- ^ @LGPL-3.0-or-later@, GNU Lesser General Public License v3.0 or later + | LGPLLR -- ^ @LGPLLR@, Lesser General Public License For Linguistic Resources + | Libpng -- ^ @Libpng@, libpng License + | Libtiff -- ^ @libtiff@, libtiff License + | LiLiQ_P_1_1 -- ^ @LiLiQ-P-1.1@, Licence Libre du Québec – Permissive version 1.1 + | LiLiQ_R_1_1 -- ^ @LiLiQ-R-1.1@, Licence Libre du Québec – Réciprocité version 1.1 + | LiLiQ_Rplus_1_1 -- ^ @LiLiQ-Rplus-1.1@, Licence Libre du Québec – Réciprocité forte version 1.1 + | Linux_OpenIB -- ^ @Linux-OpenIB@, Linux Kernel Variant of OpenIB.org license, SPDX License List 3.2 + | LPL_1_0 -- ^ @LPL-1.0@, Lucent Public License Version 1.0 + | LPL_1_02 -- ^ @LPL-1.02@, Lucent Public License v1.02 + | LPPL_1_0 -- ^ @LPPL-1.0@, LaTeX Project Public License v1.0 + | LPPL_1_1 -- ^ @LPPL-1.1@, LaTeX Project Public License v1.1 + | LPPL_1_2 -- ^ @LPPL-1.2@, LaTeX Project Public License v1.2 + | LPPL_1_3a -- ^ @LPPL-1.3a@, LaTeX Project Public License v1.3a + | LPPL_1_3c -- ^ @LPPL-1.3c@, LaTeX Project Public License v1.3c + | MakeIndex -- ^ @MakeIndex@, MakeIndex License + | MirOS -- ^ @MirOS@, MirOS License + | MIT_0 -- ^ @MIT-0@, MIT No Attribution, SPDX License List 3.2 + | MIT_advertising -- ^ @MIT-advertising@, Enlightenment License (e16) + | MIT_CMU -- ^ @MIT-CMU@, CMU License + | MIT_enna -- ^ @MIT-enna@, enna License + | MIT_feh -- ^ @MIT-feh@, feh License + | MIT -- ^ @MIT@, MIT License + | MITNFA -- ^ @MITNFA@, MIT +no-false-attribs license + | Motosoto -- ^ @Motosoto@, Motosoto License + | Mpich2 -- ^ @mpich2@, mpich2 License + | MPL_1_0 -- ^ @MPL-1.0@, Mozilla Public License 1.0 + | MPL_1_1 -- ^ @MPL-1.1@, Mozilla Public License 1.1 + | MPL_2_0_no_copyleft_exception -- ^ @MPL-2.0-no-copyleft-exception@, Mozilla Public License 2.0 (no copyleft exception) + | MPL_2_0 -- ^ @MPL-2.0@, Mozilla Public License 2.0 + | MS_PL -- ^ @MS-PL@, Microsoft Public License + | MS_RL -- ^ @MS-RL@, Microsoft Reciprocal License + | MTLL -- ^ @MTLL@, Matrix Template Library License + | Multics -- ^ @Multics@, Multics License + | Mup -- ^ @Mup@, Mup License + | NASA_1_3 -- ^ @NASA-1.3@, NASA Open Source Agreement 1.3 + | Naumen -- ^ @Naumen@, Naumen Public License + | NBPL_1_0 -- ^ @NBPL-1.0@, Net Boolean Public License v1 + | NCSA -- ^ @NCSA@, University of Illinois/NCSA Open Source License + | Net_SNMP -- ^ @Net-SNMP@, Net-SNMP License + | NetCDF -- ^ @NetCDF@, NetCDF license + | Newsletr -- ^ @Newsletr@, Newsletr License + | NGPL -- ^ @NGPL@, Nethack General Public License + | NLOD_1_0 -- ^ @NLOD-1.0@, Norwegian Licence for Open Government Data + | NLPL -- ^ @NLPL@, No Limit Public License + | Nokia -- ^ @Nokia@, Nokia Open Source License + | NOSL -- ^ @NOSL@, Netizen Open Source License + | Noweb -- ^ @Noweb@, Noweb License + | NPL_1_0 -- ^ @NPL-1.0@, Netscape Public License v1.0 + | NPL_1_1 -- ^ @NPL-1.1@, Netscape Public License v1.1 + | NPOSL_3_0 -- ^ @NPOSL-3.0@, Non-Profit Open Software License 3.0 + | NRL -- ^ @NRL@, NRL License + | NTP -- ^ @NTP@, NTP License + | OCCT_PL -- ^ @OCCT-PL@, Open CASCADE Technology Public License + | OCLC_2_0 -- ^ @OCLC-2.0@, OCLC Research Public License 2.0 + | ODbL_1_0 -- ^ @ODbL-1.0@, ODC Open Database License v1.0 + | ODC_By_1_0 -- ^ @ODC-By-1.0@, Open Data Commons Attribution License v1.0, SPDX License List 3.2 + | OFL_1_0 -- ^ @OFL-1.0@, SIL Open Font License 1.0 + | OFL_1_1 -- ^ @OFL-1.1@, SIL Open Font License 1.1 + | OGTSL -- ^ @OGTSL@, Open Group Test Suite License + | OLDAP_1_1 -- ^ @OLDAP-1.1@, Open LDAP Public License v1.1 + | OLDAP_1_2 -- ^ @OLDAP-1.2@, Open LDAP Public License v1.2 + | OLDAP_1_3 -- ^ @OLDAP-1.3@, Open LDAP Public License v1.3 + | OLDAP_1_4 -- ^ @OLDAP-1.4@, Open LDAP Public License v1.4 + | OLDAP_2_0_1 -- ^ @OLDAP-2.0.1@, Open LDAP Public License v2.0.1 + | OLDAP_2_0 -- ^ @OLDAP-2.0@, Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B) + | OLDAP_2_1 -- ^ @OLDAP-2.1@, Open LDAP Public License v2.1 + | OLDAP_2_2_1 -- ^ @OLDAP-2.2.1@, Open LDAP Public License v2.2.1 + | OLDAP_2_2_2 -- ^ @OLDAP-2.2.2@, Open LDAP Public License 2.2.2 + | OLDAP_2_2 -- ^ @OLDAP-2.2@, Open LDAP Public License v2.2 + | OLDAP_2_3 -- ^ @OLDAP-2.3@, Open LDAP Public License v2.3 + | OLDAP_2_4 -- ^ @OLDAP-2.4@, Open LDAP Public License v2.4 + | OLDAP_2_5 -- ^ @OLDAP-2.5@, Open LDAP Public License v2.5 + | OLDAP_2_6 -- ^ @OLDAP-2.6@, Open LDAP Public License v2.6 + | OLDAP_2_7 -- ^ @OLDAP-2.7@, Open LDAP Public License v2.7 + | OLDAP_2_8 -- ^ @OLDAP-2.8@, Open LDAP Public License v2.8 + | OML -- ^ @OML@, Open Market License + | OpenSSL -- ^ @OpenSSL@, OpenSSL License + | OPL_1_0 -- ^ @OPL-1.0@, Open Public License v1.0 + | OSET_PL_2_1 -- ^ @OSET-PL-2.1@, OSET Public License version 2.1 + | OSL_1_0 -- ^ @OSL-1.0@, Open Software License 1.0 + | OSL_1_1 -- ^ @OSL-1.1@, Open Software License 1.1 + | OSL_2_0 -- ^ @OSL-2.0@, Open Software License 2.0 + | OSL_2_1 -- ^ @OSL-2.1@, Open Software License 2.1 + | OSL_3_0 -- ^ @OSL-3.0@, Open Software License 3.0 + | PDDL_1_0 -- ^ @PDDL-1.0@, ODC Public Domain Dedication & License 1.0 + | PHP_3_0 -- ^ @PHP-3.0@, PHP License v3.0 + | PHP_3_01 -- ^ @PHP-3.01@, PHP License v3.01 + | Plexus -- ^ @Plexus@, Plexus Classworlds License + | PostgreSQL -- ^ @PostgreSQL@, PostgreSQL License + | Psfrag -- ^ @psfrag@, psfrag License + | Psutils -- ^ @psutils@, psutils License + | Python_2_0 -- ^ @Python-2.0@, Python License 2.0 + | Qhull -- ^ @Qhull@, Qhull License + | QPL_1_0 -- ^ @QPL-1.0@, Q Public License 1.0 + | Rdisc -- ^ @Rdisc@, Rdisc License + | RHeCos_1_1 -- ^ @RHeCos-1.1@, Red Hat eCos Public License v1.1 + | RPL_1_1 -- ^ @RPL-1.1@, Reciprocal Public License 1.1 + | RPL_1_5 -- ^ @RPL-1.5@, Reciprocal Public License 1.5 + | RPSL_1_0 -- ^ @RPSL-1.0@, RealNetworks Public Source License v1.0 + | RSA_MD -- ^ @RSA-MD@, RSA Message-Digest License + | RSCPL -- ^ @RSCPL@, Ricoh Source Code Public License + | Ruby -- ^ @Ruby@, Ruby License + | SAX_PD -- ^ @SAX-PD@, Sax Public Domain Notice + | Saxpath -- ^ @Saxpath@, Saxpath License + | SCEA -- ^ @SCEA@, SCEA Shared Source License + | Sendmail -- ^ @Sendmail@, Sendmail License + | SGI_B_1_0 -- ^ @SGI-B-1.0@, SGI Free Software License B v1.0 + | SGI_B_1_1 -- ^ @SGI-B-1.1@, SGI Free Software License B v1.1 + | SGI_B_2_0 -- ^ @SGI-B-2.0@, SGI Free Software License B v2.0 + | SimPL_2_0 -- ^ @SimPL-2.0@, Simple Public License 2.0 + | SISSL_1_2 -- ^ @SISSL-1.2@, Sun Industry Standards Source License v1.2 + | SISSL -- ^ @SISSL@, Sun Industry Standards Source License v1.1 + | Sleepycat -- ^ @Sleepycat@, Sleepycat License + | SMLNJ -- ^ @SMLNJ@, Standard ML of New Jersey License + | SMPPL -- ^ @SMPPL@, Secure Messaging Protocol Public License + | SNIA -- ^ @SNIA@, SNIA Public License 1.1 + | Spencer_86 -- ^ @Spencer-86@, Spencer License 86 + | Spencer_94 -- ^ @Spencer-94@, Spencer License 94 + | Spencer_99 -- ^ @Spencer-99@, Spencer License 99 + | SPL_1_0 -- ^ @SPL-1.0@, Sun Public License v1.0 + | SugarCRM_1_1_3 -- ^ @SugarCRM-1.1.3@, SugarCRM Public License v1.1.3 + | SWL -- ^ @SWL@, Scheme Widget Library (SWL) Software License Agreement + | TCL -- ^ @TCL@, TCL/TK License + | TCP_wrappers -- ^ @TCP-wrappers@, TCP Wrappers License + | TMate -- ^ @TMate@, TMate Open Source License + | TORQUE_1_1 -- ^ @TORQUE-1.1@, TORQUE v2.5+ Software License v1.1 + | TOSL -- ^ @TOSL@, Trusster Open Source License + | TU_Berlin_1_0 -- ^ @TU-Berlin-1.0@, Technische Universitaet Berlin License 1.0, SPDX License List 3.2 + | TU_Berlin_2_0 -- ^ @TU-Berlin-2.0@, Technische Universitaet Berlin License 2.0, SPDX License List 3.2 + | Unicode_DFS_2015 -- ^ @Unicode-DFS-2015@, Unicode License Agreement - Data Files and Software (2015) + | Unicode_DFS_2016 -- ^ @Unicode-DFS-2016@, Unicode License Agreement - Data Files and Software (2016) + | Unicode_TOU -- ^ @Unicode-TOU@, Unicode Terms of Use + | Unlicense -- ^ @Unlicense@, The Unlicense + | UPL_1_0 -- ^ @UPL-1.0@, Universal Permissive License v1.0 + | Vim -- ^ @Vim@, Vim License + | VOSTROM -- ^ @VOSTROM@, VOSTROM Public License for Open Source + | VSL_1_0 -- ^ @VSL-1.0@, Vovida Software License v1.0 + | W3C_19980720 -- ^ @W3C-19980720@, W3C Software Notice and License (1998-07-20) + | W3C_20150513 -- ^ @W3C-20150513@, W3C Software Notice and Document License (2015-05-13) + | W3C -- ^ @W3C@, W3C Software Notice and License (2002-12-31) + | Watcom_1_0 -- ^ @Watcom-1.0@, Sybase Open Watcom Public License 1.0 + | Wsuipa -- ^ @Wsuipa@, Wsuipa License + | WTFPL -- ^ @WTFPL@, Do What The F*ck You Want To Public License + | X11 -- ^ @X11@, X11 License + | Xerox -- ^ @Xerox@, Xerox License + | XFree86_1_1 -- ^ @XFree86-1.1@, XFree86 License 1.1 + | Xinetd -- ^ @xinetd@, xinetd License + | Xnet -- ^ @Xnet@, X.Net License + | Xpp -- ^ @xpp@, XPP License + | XSkat -- ^ @XSkat@, XSkat License + | YPL_1_0 -- ^ @YPL-1.0@, Yahoo! Public License v1.0 + | YPL_1_1 -- ^ @YPL-1.1@, Yahoo! Public License v1.1 + | Zed -- ^ @Zed@, Zed License + | Zend_2_0 -- ^ @Zend-2.0@, Zend License v2.0 + | Zimbra_1_3 -- ^ @Zimbra-1.3@, Zimbra Public License v1.3 + | Zimbra_1_4 -- ^ @Zimbra-1.4@, Zimbra Public License v1.4 + | Zlib_acknowledgement -- ^ @zlib-acknowledgement@, zlib/libpng License with Acknowledgement + | Zlib -- ^ @Zlib@, zlib License + | ZPL_1_1 -- ^ @ZPL-1.1@, Zope Public License 1.1 + | ZPL_2_0 -- ^ @ZPL-2.0@, Zope Public License 2.0 + | ZPL_2_1 -- ^ @ZPL-2.1@, Zope Public License 2.1 + deriving (Eq, Ord, Enum, Bounded, Show, Read, Typeable, Data, Generic) + +instance Binary LicenseId + +instance Pretty LicenseId where + pretty = Disp.text . licenseId + +-- | +-- >>> eitherParsec "BSD-3-Clause" :: Either String LicenseId +-- Right BSD_3_Clause +-- +-- >>> eitherParsec "BSD3" :: Either String LicenseId +-- Left "...Unknown SPDX license identifier: 'BSD3' Do you mean BSD-3-Clause?" +-- +instance Parsec LicenseId where + parsec = do + n <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' + v <- askCabalSpecVersion + maybe (fail $ "Unknown SPDX license identifier: '" ++ n ++ "' " ++ licenseIdMigrationMessage n) return $ + mkLicenseId (cabalSpecVersionToSPDXListVersion v) n + +instance NFData LicenseId where + rnf l = l `seq` () + +-- | Help message for migrating from non-SPDX license identifiers. +-- +-- Old 'License' is almost SPDX, except for 'BSD2', 'BSD3'. This function +-- suggests SPDX variant: +-- +-- >>> licenseIdMigrationMessage "BSD3" +-- "Do you mean BSD-3-Clause?" +-- +-- Also 'OtherLicense', 'AllRightsReserved', and 'PublicDomain' aren't +-- valid SPDX identifiers +-- +-- >>> traverse_ (print . licenseIdMigrationMessage) [ "OtherLicense", "AllRightsReserved", "PublicDomain" ] +-- "SPDX license list contains plenty of licenses. See https://spdx.org/licenses/. Also they can be combined into complex expressions with AND and OR." +-- "You can use NONE as a value of license field." +-- "Public Domain is a complex matter. See https://wiki.spdx.org/view/Legal_Team/Decisions/Dealing_with_Public_Domain_within_SPDX_Files. Consider using a proper license." +-- +-- SPDX License list version 3.0 introduced "-only" and "-or-later" variants for GNU family of licenses. +-- See +-- >>> licenseIdMigrationMessage "GPL-2.0" +-- "SPDX license list 3.0 deprecated suffixless variants of GNU family of licenses. Use GPL-2.0-only or GPL-2.0-or-later." +-- +-- For other common licenses their old license format coincides with the SPDX identifiers: +-- +-- >>> traverse eitherParsec ["GPL-2.0-only", "GPL-3.0-only", "LGPL-2.1-only", "MIT", "ISC", "MPL-2.0", "Apache-2.0"] :: Either String [LicenseId] +-- Right [GPL_2_0_only,GPL_3_0_only,LGPL_2_1_only,MIT,ISC,MPL_2_0,Apache_2_0] +-- +licenseIdMigrationMessage :: String -> String +licenseIdMigrationMessage = go where + go l | gnuVariant l = "SPDX license list 3.0 deprecated suffixless variants of GNU family of licenses. Use " ++ l ++ "-only or " ++ l ++ "-or-later." + go "BSD3" = "Do you mean BSD-3-Clause?" + go "BSD2" = "Do you mean BSD-2-Clause?" + go "AllRightsReserved" = "You can use NONE as a value of license field." + go "OtherLicense" = "SPDX license list contains plenty of licenses. See https://spdx.org/licenses/. Also they can be combined into complex expressions with AND and OR." + go "PublicDomain" = "Public Domain is a complex matter. See https://wiki.spdx.org/view/Legal_Team/Decisions/Dealing_with_Public_Domain_within_SPDX_Files. Consider using a proper license." + + -- otherwise, we don't know + go _ = "" + + gnuVariant = flip elem ["GPL-2.0", "GPL-3.0", "LGPL-2.1", "LGPL-3.0", "AGPL-3.0" ] + +------------------------------------------------------------------------------- +-- License Data +------------------------------------------------------------------------------- + +-- | License SPDX identifier, e.g. @"BSD-3-Clause"@. +licenseId :: LicenseId -> String +licenseId NullBSD = "0BSD" +licenseId AAL = "AAL" +licenseId Abstyles = "Abstyles" +licenseId Adobe_2006 = "Adobe-2006" +licenseId Adobe_Glyph = "Adobe-Glyph" +licenseId ADSL = "ADSL" +licenseId AFL_1_1 = "AFL-1.1" +licenseId AFL_1_2 = "AFL-1.2" +licenseId AFL_2_0 = "AFL-2.0" +licenseId AFL_2_1 = "AFL-2.1" +licenseId AFL_3_0 = "AFL-3.0" +licenseId Afmparse = "Afmparse" +licenseId AGPL_1_0 = "AGPL-1.0" +licenseId AGPL_1_0_only = "AGPL-1.0-only" +licenseId AGPL_1_0_or_later = "AGPL-1.0-or-later" +licenseId AGPL_3_0_only = "AGPL-3.0-only" +licenseId AGPL_3_0_or_later = "AGPL-3.0-or-later" +licenseId Aladdin = "Aladdin" +licenseId AMDPLPA = "AMDPLPA" +licenseId AML = "AML" +licenseId AMPAS = "AMPAS" +licenseId ANTLR_PD = "ANTLR-PD" +licenseId Apache_1_0 = "Apache-1.0" +licenseId Apache_1_1 = "Apache-1.1" +licenseId Apache_2_0 = "Apache-2.0" +licenseId APAFML = "APAFML" +licenseId APL_1_0 = "APL-1.0" +licenseId APSL_1_0 = "APSL-1.0" +licenseId APSL_1_1 = "APSL-1.1" +licenseId APSL_1_2 = "APSL-1.2" +licenseId APSL_2_0 = "APSL-2.0" +licenseId Artistic_1_0_cl8 = "Artistic-1.0-cl8" +licenseId Artistic_1_0_Perl = "Artistic-1.0-Perl" +licenseId Artistic_1_0 = "Artistic-1.0" +licenseId Artistic_2_0 = "Artistic-2.0" +licenseId Bahyph = "Bahyph" +licenseId Barr = "Barr" +licenseId Beerware = "Beerware" +licenseId BitTorrent_1_0 = "BitTorrent-1.0" +licenseId BitTorrent_1_1 = "BitTorrent-1.1" +licenseId Borceux = "Borceux" +licenseId BSD_1_Clause = "BSD-1-Clause" +licenseId BSD_2_Clause_FreeBSD = "BSD-2-Clause-FreeBSD" +licenseId BSD_2_Clause_NetBSD = "BSD-2-Clause-NetBSD" +licenseId BSD_2_Clause_Patent = "BSD-2-Clause-Patent" +licenseId BSD_2_Clause = "BSD-2-Clause" +licenseId BSD_3_Clause_Attribution = "BSD-3-Clause-Attribution" +licenseId BSD_3_Clause_Clear = "BSD-3-Clause-Clear" +licenseId BSD_3_Clause_LBNL = "BSD-3-Clause-LBNL" +licenseId BSD_3_Clause_No_Nuclear_License_2014 = "BSD-3-Clause-No-Nuclear-License-2014" +licenseId BSD_3_Clause_No_Nuclear_License = "BSD-3-Clause-No-Nuclear-License" +licenseId BSD_3_Clause_No_Nuclear_Warranty = "BSD-3-Clause-No-Nuclear-Warranty" +licenseId BSD_3_Clause = "BSD-3-Clause" +licenseId BSD_4_Clause_UC = "BSD-4-Clause-UC" +licenseId BSD_4_Clause = "BSD-4-Clause" +licenseId BSD_Protection = "BSD-Protection" +licenseId BSD_Source_Code = "BSD-Source-Code" +licenseId BSL_1_0 = "BSL-1.0" +licenseId Bzip2_1_0_5 = "bzip2-1.0.5" +licenseId Bzip2_1_0_6 = "bzip2-1.0.6" +licenseId Caldera = "Caldera" +licenseId CATOSL_1_1 = "CATOSL-1.1" +licenseId CC_BY_1_0 = "CC-BY-1.0" +licenseId CC_BY_2_0 = "CC-BY-2.0" +licenseId CC_BY_2_5 = "CC-BY-2.5" +licenseId CC_BY_3_0 = "CC-BY-3.0" +licenseId CC_BY_4_0 = "CC-BY-4.0" +licenseId CC_BY_NC_1_0 = "CC-BY-NC-1.0" +licenseId CC_BY_NC_2_0 = "CC-BY-NC-2.0" +licenseId CC_BY_NC_2_5 = "CC-BY-NC-2.5" +licenseId CC_BY_NC_3_0 = "CC-BY-NC-3.0" +licenseId CC_BY_NC_4_0 = "CC-BY-NC-4.0" +licenseId CC_BY_NC_ND_1_0 = "CC-BY-NC-ND-1.0" +licenseId CC_BY_NC_ND_2_0 = "CC-BY-NC-ND-2.0" +licenseId CC_BY_NC_ND_2_5 = "CC-BY-NC-ND-2.5" +licenseId CC_BY_NC_ND_3_0 = "CC-BY-NC-ND-3.0" +licenseId CC_BY_NC_ND_4_0 = "CC-BY-NC-ND-4.0" +licenseId CC_BY_NC_SA_1_0 = "CC-BY-NC-SA-1.0" +licenseId CC_BY_NC_SA_2_0 = "CC-BY-NC-SA-2.0" +licenseId CC_BY_NC_SA_2_5 = "CC-BY-NC-SA-2.5" +licenseId CC_BY_NC_SA_3_0 = "CC-BY-NC-SA-3.0" +licenseId CC_BY_NC_SA_4_0 = "CC-BY-NC-SA-4.0" +licenseId CC_BY_ND_1_0 = "CC-BY-ND-1.0" +licenseId CC_BY_ND_2_0 = "CC-BY-ND-2.0" +licenseId CC_BY_ND_2_5 = "CC-BY-ND-2.5" +licenseId CC_BY_ND_3_0 = "CC-BY-ND-3.0" +licenseId CC_BY_ND_4_0 = "CC-BY-ND-4.0" +licenseId CC_BY_SA_1_0 = "CC-BY-SA-1.0" +licenseId CC_BY_SA_2_0 = "CC-BY-SA-2.0" +licenseId CC_BY_SA_2_5 = "CC-BY-SA-2.5" +licenseId CC_BY_SA_3_0 = "CC-BY-SA-3.0" +licenseId CC_BY_SA_4_0 = "CC-BY-SA-4.0" +licenseId CC0_1_0 = "CC0-1.0" +licenseId CDDL_1_0 = "CDDL-1.0" +licenseId CDDL_1_1 = "CDDL-1.1" +licenseId CDLA_Permissive_1_0 = "CDLA-Permissive-1.0" +licenseId CDLA_Sharing_1_0 = "CDLA-Sharing-1.0" +licenseId CECILL_1_0 = "CECILL-1.0" +licenseId CECILL_1_1 = "CECILL-1.1" +licenseId CECILL_2_0 = "CECILL-2.0" +licenseId CECILL_2_1 = "CECILL-2.1" +licenseId CECILL_B = "CECILL-B" +licenseId CECILL_C = "CECILL-C" +licenseId ClArtistic = "ClArtistic" +licenseId CNRI_Jython = "CNRI-Jython" +licenseId CNRI_Python_GPL_Compatible = "CNRI-Python-GPL-Compatible" +licenseId CNRI_Python = "CNRI-Python" +licenseId Condor_1_1 = "Condor-1.1" +licenseId CPAL_1_0 = "CPAL-1.0" +licenseId CPL_1_0 = "CPL-1.0" +licenseId CPOL_1_02 = "CPOL-1.02" +licenseId Crossword = "Crossword" +licenseId CrystalStacker = "CrystalStacker" +licenseId CUA_OPL_1_0 = "CUA-OPL-1.0" +licenseId Cube = "Cube" +licenseId Curl = "curl" +licenseId D_FSL_1_0 = "D-FSL-1.0" +licenseId Diffmark = "diffmark" +licenseId DOC = "DOC" +licenseId Dotseqn = "Dotseqn" +licenseId DSDP = "DSDP" +licenseId Dvipdfm = "dvipdfm" +licenseId ECL_1_0 = "ECL-1.0" +licenseId ECL_2_0 = "ECL-2.0" +licenseId EFL_1_0 = "EFL-1.0" +licenseId EFL_2_0 = "EFL-2.0" +licenseId EGenix = "eGenix" +licenseId Entessa = "Entessa" +licenseId EPL_1_0 = "EPL-1.0" +licenseId EPL_2_0 = "EPL-2.0" +licenseId ErlPL_1_1 = "ErlPL-1.1" +licenseId EUDatagrid = "EUDatagrid" +licenseId EUPL_1_0 = "EUPL-1.0" +licenseId EUPL_1_1 = "EUPL-1.1" +licenseId EUPL_1_2 = "EUPL-1.2" +licenseId Eurosym = "Eurosym" +licenseId Fair = "Fair" +licenseId Frameworx_1_0 = "Frameworx-1.0" +licenseId FreeImage = "FreeImage" +licenseId FSFAP = "FSFAP" +licenseId FSFUL = "FSFUL" +licenseId FSFULLR = "FSFULLR" +licenseId FTL = "FTL" +licenseId GFDL_1_1_only = "GFDL-1.1-only" +licenseId GFDL_1_1_or_later = "GFDL-1.1-or-later" +licenseId GFDL_1_2_only = "GFDL-1.2-only" +licenseId GFDL_1_2_or_later = "GFDL-1.2-or-later" +licenseId GFDL_1_3_only = "GFDL-1.3-only" +licenseId GFDL_1_3_or_later = "GFDL-1.3-or-later" +licenseId Giftware = "Giftware" +licenseId GL2PS = "GL2PS" +licenseId Glide = "Glide" +licenseId Glulxe = "Glulxe" +licenseId Gnuplot = "gnuplot" +licenseId GPL_1_0_only = "GPL-1.0-only" +licenseId GPL_1_0_or_later = "GPL-1.0-or-later" +licenseId GPL_2_0_only = "GPL-2.0-only" +licenseId GPL_2_0_or_later = "GPL-2.0-or-later" +licenseId GPL_3_0_only = "GPL-3.0-only" +licenseId GPL_3_0_or_later = "GPL-3.0-or-later" +licenseId GSOAP_1_3b = "gSOAP-1.3b" +licenseId HaskellReport = "HaskellReport" +licenseId HPND = "HPND" +licenseId IBM_pibs = "IBM-pibs" +licenseId ICU = "ICU" +licenseId IJG = "IJG" +licenseId ImageMagick = "ImageMagick" +licenseId IMatix = "iMatix" +licenseId Imlib2 = "Imlib2" +licenseId Info_ZIP = "Info-ZIP" +licenseId Intel_ACPI = "Intel-ACPI" +licenseId Intel = "Intel" +licenseId Interbase_1_0 = "Interbase-1.0" +licenseId IPA = "IPA" +licenseId IPL_1_0 = "IPL-1.0" +licenseId ISC = "ISC" +licenseId JasPer_2_0 = "JasPer-2.0" +licenseId JSON = "JSON" +licenseId LAL_1_2 = "LAL-1.2" +licenseId LAL_1_3 = "LAL-1.3" +licenseId Latex2e = "Latex2e" +licenseId Leptonica = "Leptonica" +licenseId LGPL_2_0_only = "LGPL-2.0-only" +licenseId LGPL_2_0_or_later = "LGPL-2.0-or-later" +licenseId LGPL_2_1_only = "LGPL-2.1-only" +licenseId LGPL_2_1_or_later = "LGPL-2.1-or-later" +licenseId LGPL_3_0_only = "LGPL-3.0-only" +licenseId LGPL_3_0_or_later = "LGPL-3.0-or-later" +licenseId LGPLLR = "LGPLLR" +licenseId Libpng = "Libpng" +licenseId Libtiff = "libtiff" +licenseId LiLiQ_P_1_1 = "LiLiQ-P-1.1" +licenseId LiLiQ_R_1_1 = "LiLiQ-R-1.1" +licenseId LiLiQ_Rplus_1_1 = "LiLiQ-Rplus-1.1" +licenseId Linux_OpenIB = "Linux-OpenIB" +licenseId LPL_1_0 = "LPL-1.0" +licenseId LPL_1_02 = "LPL-1.02" +licenseId LPPL_1_0 = "LPPL-1.0" +licenseId LPPL_1_1 = "LPPL-1.1" +licenseId LPPL_1_2 = "LPPL-1.2" +licenseId LPPL_1_3a = "LPPL-1.3a" +licenseId LPPL_1_3c = "LPPL-1.3c" +licenseId MakeIndex = "MakeIndex" +licenseId MirOS = "MirOS" +licenseId MIT_0 = "MIT-0" +licenseId MIT_advertising = "MIT-advertising" +licenseId MIT_CMU = "MIT-CMU" +licenseId MIT_enna = "MIT-enna" +licenseId MIT_feh = "MIT-feh" +licenseId MIT = "MIT" +licenseId MITNFA = "MITNFA" +licenseId Motosoto = "Motosoto" +licenseId Mpich2 = "mpich2" +licenseId MPL_1_0 = "MPL-1.0" +licenseId MPL_1_1 = "MPL-1.1" +licenseId MPL_2_0_no_copyleft_exception = "MPL-2.0-no-copyleft-exception" +licenseId MPL_2_0 = "MPL-2.0" +licenseId MS_PL = "MS-PL" +licenseId MS_RL = "MS-RL" +licenseId MTLL = "MTLL" +licenseId Multics = "Multics" +licenseId Mup = "Mup" +licenseId NASA_1_3 = "NASA-1.3" +licenseId Naumen = "Naumen" +licenseId NBPL_1_0 = "NBPL-1.0" +licenseId NCSA = "NCSA" +licenseId Net_SNMP = "Net-SNMP" +licenseId NetCDF = "NetCDF" +licenseId Newsletr = "Newsletr" +licenseId NGPL = "NGPL" +licenseId NLOD_1_0 = "NLOD-1.0" +licenseId NLPL = "NLPL" +licenseId Nokia = "Nokia" +licenseId NOSL = "NOSL" +licenseId Noweb = "Noweb" +licenseId NPL_1_0 = "NPL-1.0" +licenseId NPL_1_1 = "NPL-1.1" +licenseId NPOSL_3_0 = "NPOSL-3.0" +licenseId NRL = "NRL" +licenseId NTP = "NTP" +licenseId OCCT_PL = "OCCT-PL" +licenseId OCLC_2_0 = "OCLC-2.0" +licenseId ODbL_1_0 = "ODbL-1.0" +licenseId ODC_By_1_0 = "ODC-By-1.0" +licenseId OFL_1_0 = "OFL-1.0" +licenseId OFL_1_1 = "OFL-1.1" +licenseId OGTSL = "OGTSL" +licenseId OLDAP_1_1 = "OLDAP-1.1" +licenseId OLDAP_1_2 = "OLDAP-1.2" +licenseId OLDAP_1_3 = "OLDAP-1.3" +licenseId OLDAP_1_4 = "OLDAP-1.4" +licenseId OLDAP_2_0_1 = "OLDAP-2.0.1" +licenseId OLDAP_2_0 = "OLDAP-2.0" +licenseId OLDAP_2_1 = "OLDAP-2.1" +licenseId OLDAP_2_2_1 = "OLDAP-2.2.1" +licenseId OLDAP_2_2_2 = "OLDAP-2.2.2" +licenseId OLDAP_2_2 = "OLDAP-2.2" +licenseId OLDAP_2_3 = "OLDAP-2.3" +licenseId OLDAP_2_4 = "OLDAP-2.4" +licenseId OLDAP_2_5 = "OLDAP-2.5" +licenseId OLDAP_2_6 = "OLDAP-2.6" +licenseId OLDAP_2_7 = "OLDAP-2.7" +licenseId OLDAP_2_8 = "OLDAP-2.8" +licenseId OML = "OML" +licenseId OpenSSL = "OpenSSL" +licenseId OPL_1_0 = "OPL-1.0" +licenseId OSET_PL_2_1 = "OSET-PL-2.1" +licenseId OSL_1_0 = "OSL-1.0" +licenseId OSL_1_1 = "OSL-1.1" +licenseId OSL_2_0 = "OSL-2.0" +licenseId OSL_2_1 = "OSL-2.1" +licenseId OSL_3_0 = "OSL-3.0" +licenseId PDDL_1_0 = "PDDL-1.0" +licenseId PHP_3_0 = "PHP-3.0" +licenseId PHP_3_01 = "PHP-3.01" +licenseId Plexus = "Plexus" +licenseId PostgreSQL = "PostgreSQL" +licenseId Psfrag = "psfrag" +licenseId Psutils = "psutils" +licenseId Python_2_0 = "Python-2.0" +licenseId Qhull = "Qhull" +licenseId QPL_1_0 = "QPL-1.0" +licenseId Rdisc = "Rdisc" +licenseId RHeCos_1_1 = "RHeCos-1.1" +licenseId RPL_1_1 = "RPL-1.1" +licenseId RPL_1_5 = "RPL-1.5" +licenseId RPSL_1_0 = "RPSL-1.0" +licenseId RSA_MD = "RSA-MD" +licenseId RSCPL = "RSCPL" +licenseId Ruby = "Ruby" +licenseId SAX_PD = "SAX-PD" +licenseId Saxpath = "Saxpath" +licenseId SCEA = "SCEA" +licenseId Sendmail = "Sendmail" +licenseId SGI_B_1_0 = "SGI-B-1.0" +licenseId SGI_B_1_1 = "SGI-B-1.1" +licenseId SGI_B_2_0 = "SGI-B-2.0" +licenseId SimPL_2_0 = "SimPL-2.0" +licenseId SISSL_1_2 = "SISSL-1.2" +licenseId SISSL = "SISSL" +licenseId Sleepycat = "Sleepycat" +licenseId SMLNJ = "SMLNJ" +licenseId SMPPL = "SMPPL" +licenseId SNIA = "SNIA" +licenseId Spencer_86 = "Spencer-86" +licenseId Spencer_94 = "Spencer-94" +licenseId Spencer_99 = "Spencer-99" +licenseId SPL_1_0 = "SPL-1.0" +licenseId SugarCRM_1_1_3 = "SugarCRM-1.1.3" +licenseId SWL = "SWL" +licenseId TCL = "TCL" +licenseId TCP_wrappers = "TCP-wrappers" +licenseId TMate = "TMate" +licenseId TORQUE_1_1 = "TORQUE-1.1" +licenseId TOSL = "TOSL" +licenseId TU_Berlin_1_0 = "TU-Berlin-1.0" +licenseId TU_Berlin_2_0 = "TU-Berlin-2.0" +licenseId Unicode_DFS_2015 = "Unicode-DFS-2015" +licenseId Unicode_DFS_2016 = "Unicode-DFS-2016" +licenseId Unicode_TOU = "Unicode-TOU" +licenseId Unlicense = "Unlicense" +licenseId UPL_1_0 = "UPL-1.0" +licenseId Vim = "Vim" +licenseId VOSTROM = "VOSTROM" +licenseId VSL_1_0 = "VSL-1.0" +licenseId W3C_19980720 = "W3C-19980720" +licenseId W3C_20150513 = "W3C-20150513" +licenseId W3C = "W3C" +licenseId Watcom_1_0 = "Watcom-1.0" +licenseId Wsuipa = "Wsuipa" +licenseId WTFPL = "WTFPL" +licenseId X11 = "X11" +licenseId Xerox = "Xerox" +licenseId XFree86_1_1 = "XFree86-1.1" +licenseId Xinetd = "xinetd" +licenseId Xnet = "Xnet" +licenseId Xpp = "xpp" +licenseId XSkat = "XSkat" +licenseId YPL_1_0 = "YPL-1.0" +licenseId YPL_1_1 = "YPL-1.1" +licenseId Zed = "Zed" +licenseId Zend_2_0 = "Zend-2.0" +licenseId Zimbra_1_3 = "Zimbra-1.3" +licenseId Zimbra_1_4 = "Zimbra-1.4" +licenseId Zlib_acknowledgement = "zlib-acknowledgement" +licenseId Zlib = "Zlib" +licenseId ZPL_1_1 = "ZPL-1.1" +licenseId ZPL_2_0 = "ZPL-2.0" +licenseId ZPL_2_1 = "ZPL-2.1" + +-- | License name, e.g. @"GNU General Public License v2.0 only"@ +licenseName :: LicenseId -> String +licenseName NullBSD = "BSD Zero Clause License" +licenseName AAL = "Attribution Assurance License" +licenseName Abstyles = "Abstyles License" +licenseName Adobe_2006 = "Adobe Systems Incorporated Source Code License Agreement" +licenseName Adobe_Glyph = "Adobe Glyph List License" +licenseName ADSL = "Amazon Digital Services License" +licenseName AFL_1_1 = "Academic Free License v1.1" +licenseName AFL_1_2 = "Academic Free License v1.2" +licenseName AFL_2_0 = "Academic Free License v2.0" +licenseName AFL_2_1 = "Academic Free License v2.1" +licenseName AFL_3_0 = "Academic Free License v3.0" +licenseName Afmparse = "Afmparse License" +licenseName AGPL_1_0 = "Affero General Public License v1.0" +licenseName AGPL_1_0_only = "Affero General Public License v1.0 only" +licenseName AGPL_1_0_or_later = "Affero General Public License v1.0 or later" +licenseName AGPL_3_0_only = "GNU Affero General Public License v3.0 only" +licenseName AGPL_3_0_or_later = "GNU Affero General Public License v3.0 or later" +licenseName Aladdin = "Aladdin Free Public License" +licenseName AMDPLPA = "AMD's plpa_map.c License" +licenseName AML = "Apple MIT License" +licenseName AMPAS = "Academy of Motion Picture Arts and Sciences BSD" +licenseName ANTLR_PD = "ANTLR Software Rights Notice" +licenseName Apache_1_0 = "Apache License 1.0" +licenseName Apache_1_1 = "Apache License 1.1" +licenseName Apache_2_0 = "Apache License 2.0" +licenseName APAFML = "Adobe Postscript AFM License" +licenseName APL_1_0 = "Adaptive Public License 1.0" +licenseName APSL_1_0 = "Apple Public Source License 1.0" +licenseName APSL_1_1 = "Apple Public Source License 1.1" +licenseName APSL_1_2 = "Apple Public Source License 1.2" +licenseName APSL_2_0 = "Apple Public Source License 2.0" +licenseName Artistic_1_0_cl8 = "Artistic License 1.0 w/clause 8" +licenseName Artistic_1_0_Perl = "Artistic License 1.0 (Perl)" +licenseName Artistic_1_0 = "Artistic License 1.0" +licenseName Artistic_2_0 = "Artistic License 2.0" +licenseName Bahyph = "Bahyph License" +licenseName Barr = "Barr License" +licenseName Beerware = "Beerware License" +licenseName BitTorrent_1_0 = "BitTorrent Open Source License v1.0" +licenseName BitTorrent_1_1 = "BitTorrent Open Source License v1.1" +licenseName Borceux = "Borceux license" +licenseName BSD_1_Clause = "BSD 1-Clause License" +licenseName BSD_2_Clause_FreeBSD = "BSD 2-Clause FreeBSD License" +licenseName BSD_2_Clause_NetBSD = "BSD 2-Clause NetBSD License" +licenseName BSD_2_Clause_Patent = "BSD-2-Clause Plus Patent License" +licenseName BSD_2_Clause = "BSD 2-Clause \"Simplified\" License" +licenseName BSD_3_Clause_Attribution = "BSD with attribution" +licenseName BSD_3_Clause_Clear = "BSD 3-Clause Clear License" +licenseName BSD_3_Clause_LBNL = "Lawrence Berkeley National Labs BSD variant license" +licenseName BSD_3_Clause_No_Nuclear_License_2014 = "BSD 3-Clause No Nuclear License 2014" +licenseName BSD_3_Clause_No_Nuclear_License = "BSD 3-Clause No Nuclear License" +licenseName BSD_3_Clause_No_Nuclear_Warranty = "BSD 3-Clause No Nuclear Warranty" +licenseName BSD_3_Clause = "BSD 3-Clause \"New\" or \"Revised\" License" +licenseName BSD_4_Clause_UC = "BSD-4-Clause (University of California-Specific)" +licenseName BSD_4_Clause = "BSD 4-Clause \"Original\" or \"Old\" License" +licenseName BSD_Protection = "BSD Protection License" +licenseName BSD_Source_Code = "BSD Source Code Attribution" +licenseName BSL_1_0 = "Boost Software License 1.0" +licenseName Bzip2_1_0_5 = "bzip2 and libbzip2 License v1.0.5" +licenseName Bzip2_1_0_6 = "bzip2 and libbzip2 License v1.0.6" +licenseName Caldera = "Caldera License" +licenseName CATOSL_1_1 = "Computer Associates Trusted Open Source License 1.1" +licenseName CC_BY_1_0 = "Creative Commons Attribution 1.0 Generic" +licenseName CC_BY_2_0 = "Creative Commons Attribution 2.0 Generic" +licenseName CC_BY_2_5 = "Creative Commons Attribution 2.5 Generic" +licenseName CC_BY_3_0 = "Creative Commons Attribution 3.0 Unported" +licenseName CC_BY_4_0 = "Creative Commons Attribution 4.0 International" +licenseName CC_BY_NC_1_0 = "Creative Commons Attribution Non Commercial 1.0 Generic" +licenseName CC_BY_NC_2_0 = "Creative Commons Attribution Non Commercial 2.0 Generic" +licenseName CC_BY_NC_2_5 = "Creative Commons Attribution Non Commercial 2.5 Generic" +licenseName CC_BY_NC_3_0 = "Creative Commons Attribution Non Commercial 3.0 Unported" +licenseName CC_BY_NC_4_0 = "Creative Commons Attribution Non Commercial 4.0 International" +licenseName CC_BY_NC_ND_1_0 = "Creative Commons Attribution Non Commercial No Derivatives 1.0 Generic" +licenseName CC_BY_NC_ND_2_0 = "Creative Commons Attribution Non Commercial No Derivatives 2.0 Generic" +licenseName CC_BY_NC_ND_2_5 = "Creative Commons Attribution Non Commercial No Derivatives 2.5 Generic" +licenseName CC_BY_NC_ND_3_0 = "Creative Commons Attribution Non Commercial No Derivatives 3.0 Unported" +licenseName CC_BY_NC_ND_4_0 = "Creative Commons Attribution Non Commercial No Derivatives 4.0 International" +licenseName CC_BY_NC_SA_1_0 = "Creative Commons Attribution Non Commercial Share Alike 1.0 Generic" +licenseName CC_BY_NC_SA_2_0 = "Creative Commons Attribution Non Commercial Share Alike 2.0 Generic" +licenseName CC_BY_NC_SA_2_5 = "Creative Commons Attribution Non Commercial Share Alike 2.5 Generic" +licenseName CC_BY_NC_SA_3_0 = "Creative Commons Attribution Non Commercial Share Alike 3.0 Unported" +licenseName CC_BY_NC_SA_4_0 = "Creative Commons Attribution Non Commercial Share Alike 4.0 International" +licenseName CC_BY_ND_1_0 = "Creative Commons Attribution No Derivatives 1.0 Generic" +licenseName CC_BY_ND_2_0 = "Creative Commons Attribution No Derivatives 2.0 Generic" +licenseName CC_BY_ND_2_5 = "Creative Commons Attribution No Derivatives 2.5 Generic" +licenseName CC_BY_ND_3_0 = "Creative Commons Attribution No Derivatives 3.0 Unported" +licenseName CC_BY_ND_4_0 = "Creative Commons Attribution No Derivatives 4.0 International" +licenseName CC_BY_SA_1_0 = "Creative Commons Attribution Share Alike 1.0 Generic" +licenseName CC_BY_SA_2_0 = "Creative Commons Attribution Share Alike 2.0 Generic" +licenseName CC_BY_SA_2_5 = "Creative Commons Attribution Share Alike 2.5 Generic" +licenseName CC_BY_SA_3_0 = "Creative Commons Attribution Share Alike 3.0 Unported" +licenseName CC_BY_SA_4_0 = "Creative Commons Attribution Share Alike 4.0 International" +licenseName CC0_1_0 = "Creative Commons Zero v1.0 Universal" +licenseName CDDL_1_0 = "Common Development and Distribution License 1.0" +licenseName CDDL_1_1 = "Common Development and Distribution License 1.1" +licenseName CDLA_Permissive_1_0 = "Community Data License Agreement Permissive 1.0" +licenseName CDLA_Sharing_1_0 = "Community Data License Agreement Sharing 1.0" +licenseName CECILL_1_0 = "CeCILL Free Software License Agreement v1.0" +licenseName CECILL_1_1 = "CeCILL Free Software License Agreement v1.1" +licenseName CECILL_2_0 = "CeCILL Free Software License Agreement v2.0" +licenseName CECILL_2_1 = "CeCILL Free Software License Agreement v2.1" +licenseName CECILL_B = "CeCILL-B Free Software License Agreement" +licenseName CECILL_C = "CeCILL-C Free Software License Agreement" +licenseName ClArtistic = "Clarified Artistic License" +licenseName CNRI_Jython = "CNRI Jython License" +licenseName CNRI_Python_GPL_Compatible = "CNRI Python Open Source GPL Compatible License Agreement" +licenseName CNRI_Python = "CNRI Python License" +licenseName Condor_1_1 = "Condor Public License v1.1" +licenseName CPAL_1_0 = "Common Public Attribution License 1.0" +licenseName CPL_1_0 = "Common Public License 1.0" +licenseName CPOL_1_02 = "Code Project Open License 1.02" +licenseName Crossword = "Crossword License" +licenseName CrystalStacker = "CrystalStacker License" +licenseName CUA_OPL_1_0 = "CUA Office Public License v1.0" +licenseName Cube = "Cube License" +licenseName Curl = "curl License" +licenseName D_FSL_1_0 = "Deutsche Freie Software Lizenz" +licenseName Diffmark = "diffmark license" +licenseName DOC = "DOC License" +licenseName Dotseqn = "Dotseqn License" +licenseName DSDP = "DSDP License" +licenseName Dvipdfm = "dvipdfm License" +licenseName ECL_1_0 = "Educational Community License v1.0" +licenseName ECL_2_0 = "Educational Community License v2.0" +licenseName EFL_1_0 = "Eiffel Forum License v1.0" +licenseName EFL_2_0 = "Eiffel Forum License v2.0" +licenseName EGenix = "eGenix.com Public License 1.1.0" +licenseName Entessa = "Entessa Public License v1.0" +licenseName EPL_1_0 = "Eclipse Public License 1.0" +licenseName EPL_2_0 = "Eclipse Public License 2.0" +licenseName ErlPL_1_1 = "Erlang Public License v1.1" +licenseName EUDatagrid = "EU DataGrid Software License" +licenseName EUPL_1_0 = "European Union Public License 1.0" +licenseName EUPL_1_1 = "European Union Public License 1.1" +licenseName EUPL_1_2 = "European Union Public License 1.2" +licenseName Eurosym = "Eurosym License" +licenseName Fair = "Fair License" +licenseName Frameworx_1_0 = "Frameworx Open License 1.0" +licenseName FreeImage = "FreeImage Public License v1.0" +licenseName FSFAP = "FSF All Permissive License" +licenseName FSFUL = "FSF Unlimited License" +licenseName FSFULLR = "FSF Unlimited License (with License Retention)" +licenseName FTL = "Freetype Project License" +licenseName GFDL_1_1_only = "GNU Free Documentation License v1.1 only" +licenseName GFDL_1_1_or_later = "GNU Free Documentation License v1.1 or later" +licenseName GFDL_1_2_only = "GNU Free Documentation License v1.2 only" +licenseName GFDL_1_2_or_later = "GNU Free Documentation License v1.2 or later" +licenseName GFDL_1_3_only = "GNU Free Documentation License v1.3 only" +licenseName GFDL_1_3_or_later = "GNU Free Documentation License v1.3 or later" +licenseName Giftware = "Giftware License" +licenseName GL2PS = "GL2PS License" +licenseName Glide = "3dfx Glide License" +licenseName Glulxe = "Glulxe License" +licenseName Gnuplot = "gnuplot License" +licenseName GPL_1_0_only = "GNU General Public License v1.0 only" +licenseName GPL_1_0_or_later = "GNU General Public License v1.0 or later" +licenseName GPL_2_0_only = "GNU General Public License v2.0 only" +licenseName GPL_2_0_or_later = "GNU General Public License v2.0 or later" +licenseName GPL_3_0_only = "GNU General Public License v3.0 only" +licenseName GPL_3_0_or_later = "GNU General Public License v3.0 or later" +licenseName GSOAP_1_3b = "gSOAP Public License v1.3b" +licenseName HaskellReport = "Haskell Language Report License" +licenseName HPND = "Historical Permission Notice and Disclaimer" +licenseName IBM_pibs = "IBM PowerPC Initialization and Boot Software" +licenseName ICU = "ICU License" +licenseName IJG = "Independent JPEG Group License" +licenseName ImageMagick = "ImageMagick License" +licenseName IMatix = "iMatix Standard Function Library Agreement" +licenseName Imlib2 = "Imlib2 License" +licenseName Info_ZIP = "Info-ZIP License" +licenseName Intel_ACPI = "Intel ACPI Software License Agreement" +licenseName Intel = "Intel Open Source License" +licenseName Interbase_1_0 = "Interbase Public License v1.0" +licenseName IPA = "IPA Font License" +licenseName IPL_1_0 = "IBM Public License v1.0" +licenseName ISC = "ISC License" +licenseName JasPer_2_0 = "JasPer License" +licenseName JSON = "JSON License" +licenseName LAL_1_2 = "Licence Art Libre 1.2" +licenseName LAL_1_3 = "Licence Art Libre 1.3" +licenseName Latex2e = "Latex2e License" +licenseName Leptonica = "Leptonica License" +licenseName LGPL_2_0_only = "GNU Library General Public License v2 only" +licenseName LGPL_2_0_or_later = "GNU Library General Public License v2 or later" +licenseName LGPL_2_1_only = "GNU Lesser General Public License v2.1 only" +licenseName LGPL_2_1_or_later = "GNU Lesser General Public License v2.1 or later" +licenseName LGPL_3_0_only = "GNU Lesser General Public License v3.0 only" +licenseName LGPL_3_0_or_later = "GNU Lesser General Public License v3.0 or later" +licenseName LGPLLR = "Lesser General Public License For Linguistic Resources" +licenseName Libpng = "libpng License" +licenseName Libtiff = "libtiff License" +licenseName LiLiQ_P_1_1 = "Licence Libre du Qu\233bec \8211 Permissive version 1.1" +licenseName LiLiQ_R_1_1 = "Licence Libre du Qu\233bec \8211 R\233ciprocit\233 version 1.1" +licenseName LiLiQ_Rplus_1_1 = "Licence Libre du Qu\233bec \8211 R\233ciprocit\233 forte version 1.1" +licenseName Linux_OpenIB = "Linux Kernel Variant of OpenIB.org license" +licenseName LPL_1_0 = "Lucent Public License Version 1.0" +licenseName LPL_1_02 = "Lucent Public License v1.02" +licenseName LPPL_1_0 = "LaTeX Project Public License v1.0" +licenseName LPPL_1_1 = "LaTeX Project Public License v1.1" +licenseName LPPL_1_2 = "LaTeX Project Public License v1.2" +licenseName LPPL_1_3a = "LaTeX Project Public License v1.3a" +licenseName LPPL_1_3c = "LaTeX Project Public License v1.3c" +licenseName MakeIndex = "MakeIndex License" +licenseName MirOS = "MirOS License" +licenseName MIT_0 = "MIT No Attribution" +licenseName MIT_advertising = "Enlightenment License (e16)" +licenseName MIT_CMU = "CMU License" +licenseName MIT_enna = "enna License" +licenseName MIT_feh = "feh License" +licenseName MIT = "MIT License" +licenseName MITNFA = "MIT +no-false-attribs license" +licenseName Motosoto = "Motosoto License" +licenseName Mpich2 = "mpich2 License" +licenseName MPL_1_0 = "Mozilla Public License 1.0" +licenseName MPL_1_1 = "Mozilla Public License 1.1" +licenseName MPL_2_0_no_copyleft_exception = "Mozilla Public License 2.0 (no copyleft exception)" +licenseName MPL_2_0 = "Mozilla Public License 2.0" +licenseName MS_PL = "Microsoft Public License" +licenseName MS_RL = "Microsoft Reciprocal License" +licenseName MTLL = "Matrix Template Library License" +licenseName Multics = "Multics License" +licenseName Mup = "Mup License" +licenseName NASA_1_3 = "NASA Open Source Agreement 1.3" +licenseName Naumen = "Naumen Public License" +licenseName NBPL_1_0 = "Net Boolean Public License v1" +licenseName NCSA = "University of Illinois/NCSA Open Source License" +licenseName Net_SNMP = "Net-SNMP License" +licenseName NetCDF = "NetCDF license" +licenseName Newsletr = "Newsletr License" +licenseName NGPL = "Nethack General Public License" +licenseName NLOD_1_0 = "Norwegian Licence for Open Government Data" +licenseName NLPL = "No Limit Public License" +licenseName Nokia = "Nokia Open Source License" +licenseName NOSL = "Netizen Open Source License" +licenseName Noweb = "Noweb License" +licenseName NPL_1_0 = "Netscape Public License v1.0" +licenseName NPL_1_1 = "Netscape Public License v1.1" +licenseName NPOSL_3_0 = "Non-Profit Open Software License 3.0" +licenseName NRL = "NRL License" +licenseName NTP = "NTP License" +licenseName OCCT_PL = "Open CASCADE Technology Public License" +licenseName OCLC_2_0 = "OCLC Research Public License 2.0" +licenseName ODbL_1_0 = "ODC Open Database License v1.0" +licenseName ODC_By_1_0 = "Open Data Commons Attribution License v1.0" +licenseName OFL_1_0 = "SIL Open Font License 1.0" +licenseName OFL_1_1 = "SIL Open Font License 1.1" +licenseName OGTSL = "Open Group Test Suite License" +licenseName OLDAP_1_1 = "Open LDAP Public License v1.1" +licenseName OLDAP_1_2 = "Open LDAP Public License v1.2" +licenseName OLDAP_1_3 = "Open LDAP Public License v1.3" +licenseName OLDAP_1_4 = "Open LDAP Public License v1.4" +licenseName OLDAP_2_0_1 = "Open LDAP Public License v2.0.1" +licenseName OLDAP_2_0 = "Open LDAP Public License v2.0 (or possibly 2.0A and 2.0B)" +licenseName OLDAP_2_1 = "Open LDAP Public License v2.1" +licenseName OLDAP_2_2_1 = "Open LDAP Public License v2.2.1" +licenseName OLDAP_2_2_2 = "Open LDAP Public License 2.2.2" +licenseName OLDAP_2_2 = "Open LDAP Public License v2.2" +licenseName OLDAP_2_3 = "Open LDAP Public License v2.3" +licenseName OLDAP_2_4 = "Open LDAP Public License v2.4" +licenseName OLDAP_2_5 = "Open LDAP Public License v2.5" +licenseName OLDAP_2_6 = "Open LDAP Public License v2.6" +licenseName OLDAP_2_7 = "Open LDAP Public License v2.7" +licenseName OLDAP_2_8 = "Open LDAP Public License v2.8" +licenseName OML = "Open Market License" +licenseName OpenSSL = "OpenSSL License" +licenseName OPL_1_0 = "Open Public License v1.0" +licenseName OSET_PL_2_1 = "OSET Public License version 2.1" +licenseName OSL_1_0 = "Open Software License 1.0" +licenseName OSL_1_1 = "Open Software License 1.1" +licenseName OSL_2_0 = "Open Software License 2.0" +licenseName OSL_2_1 = "Open Software License 2.1" +licenseName OSL_3_0 = "Open Software License 3.0" +licenseName PDDL_1_0 = "ODC Public Domain Dedication & License 1.0" +licenseName PHP_3_0 = "PHP License v3.0" +licenseName PHP_3_01 = "PHP License v3.01" +licenseName Plexus = "Plexus Classworlds License" +licenseName PostgreSQL = "PostgreSQL License" +licenseName Psfrag = "psfrag License" +licenseName Psutils = "psutils License" +licenseName Python_2_0 = "Python License 2.0" +licenseName Qhull = "Qhull License" +licenseName QPL_1_0 = "Q Public License 1.0" +licenseName Rdisc = "Rdisc License" +licenseName RHeCos_1_1 = "Red Hat eCos Public License v1.1" +licenseName RPL_1_1 = "Reciprocal Public License 1.1" +licenseName RPL_1_5 = "Reciprocal Public License 1.5" +licenseName RPSL_1_0 = "RealNetworks Public Source License v1.0" +licenseName RSA_MD = "RSA Message-Digest License " +licenseName RSCPL = "Ricoh Source Code Public License" +licenseName Ruby = "Ruby License" +licenseName SAX_PD = "Sax Public Domain Notice" +licenseName Saxpath = "Saxpath License" +licenseName SCEA = "SCEA Shared Source License" +licenseName Sendmail = "Sendmail License" +licenseName SGI_B_1_0 = "SGI Free Software License B v1.0" +licenseName SGI_B_1_1 = "SGI Free Software License B v1.1" +licenseName SGI_B_2_0 = "SGI Free Software License B v2.0" +licenseName SimPL_2_0 = "Simple Public License 2.0" +licenseName SISSL_1_2 = "Sun Industry Standards Source License v1.2" +licenseName SISSL = "Sun Industry Standards Source License v1.1" +licenseName Sleepycat = "Sleepycat License" +licenseName SMLNJ = "Standard ML of New Jersey License" +licenseName SMPPL = "Secure Messaging Protocol Public License" +licenseName SNIA = "SNIA Public License 1.1" +licenseName Spencer_86 = "Spencer License 86" +licenseName Spencer_94 = "Spencer License 94" +licenseName Spencer_99 = "Spencer License 99" +licenseName SPL_1_0 = "Sun Public License v1.0" +licenseName SugarCRM_1_1_3 = "SugarCRM Public License v1.1.3" +licenseName SWL = "Scheme Widget Library (SWL) Software License Agreement" +licenseName TCL = "TCL/TK License" +licenseName TCP_wrappers = "TCP Wrappers License" +licenseName TMate = "TMate Open Source License" +licenseName TORQUE_1_1 = "TORQUE v2.5+ Software License v1.1" +licenseName TOSL = "Trusster Open Source License" +licenseName TU_Berlin_1_0 = "Technische Universitaet Berlin License 1.0" +licenseName TU_Berlin_2_0 = "Technische Universitaet Berlin License 2.0" +licenseName Unicode_DFS_2015 = "Unicode License Agreement - Data Files and Software (2015)" +licenseName Unicode_DFS_2016 = "Unicode License Agreement - Data Files and Software (2016)" +licenseName Unicode_TOU = "Unicode Terms of Use" +licenseName Unlicense = "The Unlicense" +licenseName UPL_1_0 = "Universal Permissive License v1.0" +licenseName Vim = "Vim License" +licenseName VOSTROM = "VOSTROM Public License for Open Source" +licenseName VSL_1_0 = "Vovida Software License v1.0" +licenseName W3C_19980720 = "W3C Software Notice and License (1998-07-20)" +licenseName W3C_20150513 = "W3C Software Notice and Document License (2015-05-13)" +licenseName W3C = "W3C Software Notice and License (2002-12-31)" +licenseName Watcom_1_0 = "Sybase Open Watcom Public License 1.0" +licenseName Wsuipa = "Wsuipa License" +licenseName WTFPL = "Do What The F*ck You Want To Public License" +licenseName X11 = "X11 License" +licenseName Xerox = "Xerox License" +licenseName XFree86_1_1 = "XFree86 License 1.1" +licenseName Xinetd = "xinetd License" +licenseName Xnet = "X.Net License" +licenseName Xpp = "XPP License" +licenseName XSkat = "XSkat License" +licenseName YPL_1_0 = "Yahoo! Public License v1.0" +licenseName YPL_1_1 = "Yahoo! Public License v1.1" +licenseName Zed = "Zed License" +licenseName Zend_2_0 = "Zend License v2.0" +licenseName Zimbra_1_3 = "Zimbra Public License v1.3" +licenseName Zimbra_1_4 = "Zimbra Public License v1.4" +licenseName Zlib_acknowledgement = "zlib/libpng License with Acknowledgement" +licenseName Zlib = "zlib License" +licenseName ZPL_1_1 = "Zope Public License 1.1" +licenseName ZPL_2_0 = "Zope Public License 2.0" +licenseName ZPL_2_1 = "Zope Public License 2.1" + +-- | Whether the license is approved by Open Source Initiative (OSI). +-- +-- See . +licenseIsOsiApproved :: LicenseId -> Bool +licenseIsOsiApproved NullBSD = False +licenseIsOsiApproved AAL = True +licenseIsOsiApproved Abstyles = False +licenseIsOsiApproved Adobe_2006 = False +licenseIsOsiApproved Adobe_Glyph = False +licenseIsOsiApproved ADSL = False +licenseIsOsiApproved AFL_1_1 = True +licenseIsOsiApproved AFL_1_2 = True +licenseIsOsiApproved AFL_2_0 = True +licenseIsOsiApproved AFL_2_1 = True +licenseIsOsiApproved AFL_3_0 = True +licenseIsOsiApproved Afmparse = False +licenseIsOsiApproved AGPL_1_0 = False +licenseIsOsiApproved AGPL_1_0_only = False +licenseIsOsiApproved AGPL_1_0_or_later = False +licenseIsOsiApproved AGPL_3_0_only = True +licenseIsOsiApproved AGPL_3_0_or_later = True +licenseIsOsiApproved Aladdin = False +licenseIsOsiApproved AMDPLPA = False +licenseIsOsiApproved AML = False +licenseIsOsiApproved AMPAS = False +licenseIsOsiApproved ANTLR_PD = False +licenseIsOsiApproved Apache_1_0 = False +licenseIsOsiApproved Apache_1_1 = True +licenseIsOsiApproved Apache_2_0 = True +licenseIsOsiApproved APAFML = False +licenseIsOsiApproved APL_1_0 = True +licenseIsOsiApproved APSL_1_0 = True +licenseIsOsiApproved APSL_1_1 = True +licenseIsOsiApproved APSL_1_2 = True +licenseIsOsiApproved APSL_2_0 = True +licenseIsOsiApproved Artistic_1_0_cl8 = True +licenseIsOsiApproved Artistic_1_0_Perl = True +licenseIsOsiApproved Artistic_1_0 = True +licenseIsOsiApproved Artistic_2_0 = True +licenseIsOsiApproved Bahyph = False +licenseIsOsiApproved Barr = False +licenseIsOsiApproved Beerware = False +licenseIsOsiApproved BitTorrent_1_0 = False +licenseIsOsiApproved BitTorrent_1_1 = False +licenseIsOsiApproved Borceux = False +licenseIsOsiApproved BSD_1_Clause = False +licenseIsOsiApproved BSD_2_Clause_FreeBSD = False +licenseIsOsiApproved BSD_2_Clause_NetBSD = False +licenseIsOsiApproved BSD_2_Clause_Patent = True +licenseIsOsiApproved BSD_2_Clause = True +licenseIsOsiApproved BSD_3_Clause_Attribution = False +licenseIsOsiApproved BSD_3_Clause_Clear = False +licenseIsOsiApproved BSD_3_Clause_LBNL = False +licenseIsOsiApproved BSD_3_Clause_No_Nuclear_License_2014 = False +licenseIsOsiApproved BSD_3_Clause_No_Nuclear_License = False +licenseIsOsiApproved BSD_3_Clause_No_Nuclear_Warranty = False +licenseIsOsiApproved BSD_3_Clause = True +licenseIsOsiApproved BSD_4_Clause_UC = False +licenseIsOsiApproved BSD_4_Clause = False +licenseIsOsiApproved BSD_Protection = False +licenseIsOsiApproved BSD_Source_Code = False +licenseIsOsiApproved BSL_1_0 = True +licenseIsOsiApproved Bzip2_1_0_5 = False +licenseIsOsiApproved Bzip2_1_0_6 = False +licenseIsOsiApproved Caldera = False +licenseIsOsiApproved CATOSL_1_1 = True +licenseIsOsiApproved CC_BY_1_0 = False +licenseIsOsiApproved CC_BY_2_0 = False +licenseIsOsiApproved CC_BY_2_5 = False +licenseIsOsiApproved CC_BY_3_0 = False +licenseIsOsiApproved CC_BY_4_0 = False +licenseIsOsiApproved CC_BY_NC_1_0 = False +licenseIsOsiApproved CC_BY_NC_2_0 = False +licenseIsOsiApproved CC_BY_NC_2_5 = False +licenseIsOsiApproved CC_BY_NC_3_0 = False +licenseIsOsiApproved CC_BY_NC_4_0 = False +licenseIsOsiApproved CC_BY_NC_ND_1_0 = False +licenseIsOsiApproved CC_BY_NC_ND_2_0 = False +licenseIsOsiApproved CC_BY_NC_ND_2_5 = False +licenseIsOsiApproved CC_BY_NC_ND_3_0 = False +licenseIsOsiApproved CC_BY_NC_ND_4_0 = False +licenseIsOsiApproved CC_BY_NC_SA_1_0 = False +licenseIsOsiApproved CC_BY_NC_SA_2_0 = False +licenseIsOsiApproved CC_BY_NC_SA_2_5 = False +licenseIsOsiApproved CC_BY_NC_SA_3_0 = False +licenseIsOsiApproved CC_BY_NC_SA_4_0 = False +licenseIsOsiApproved CC_BY_ND_1_0 = False +licenseIsOsiApproved CC_BY_ND_2_0 = False +licenseIsOsiApproved CC_BY_ND_2_5 = False +licenseIsOsiApproved CC_BY_ND_3_0 = False +licenseIsOsiApproved CC_BY_ND_4_0 = False +licenseIsOsiApproved CC_BY_SA_1_0 = False +licenseIsOsiApproved CC_BY_SA_2_0 = False +licenseIsOsiApproved CC_BY_SA_2_5 = False +licenseIsOsiApproved CC_BY_SA_3_0 = False +licenseIsOsiApproved CC_BY_SA_4_0 = False +licenseIsOsiApproved CC0_1_0 = False +licenseIsOsiApproved CDDL_1_0 = True +licenseIsOsiApproved CDDL_1_1 = False +licenseIsOsiApproved CDLA_Permissive_1_0 = False +licenseIsOsiApproved CDLA_Sharing_1_0 = False +licenseIsOsiApproved CECILL_1_0 = False +licenseIsOsiApproved CECILL_1_1 = False +licenseIsOsiApproved CECILL_2_0 = False +licenseIsOsiApproved CECILL_2_1 = True +licenseIsOsiApproved CECILL_B = False +licenseIsOsiApproved CECILL_C = False +licenseIsOsiApproved ClArtistic = False +licenseIsOsiApproved CNRI_Jython = False +licenseIsOsiApproved CNRI_Python_GPL_Compatible = False +licenseIsOsiApproved CNRI_Python = True +licenseIsOsiApproved Condor_1_1 = False +licenseIsOsiApproved CPAL_1_0 = True +licenseIsOsiApproved CPL_1_0 = True +licenseIsOsiApproved CPOL_1_02 = False +licenseIsOsiApproved Crossword = False +licenseIsOsiApproved CrystalStacker = False +licenseIsOsiApproved CUA_OPL_1_0 = True +licenseIsOsiApproved Cube = False +licenseIsOsiApproved Curl = False +licenseIsOsiApproved D_FSL_1_0 = False +licenseIsOsiApproved Diffmark = False +licenseIsOsiApproved DOC = False +licenseIsOsiApproved Dotseqn = False +licenseIsOsiApproved DSDP = False +licenseIsOsiApproved Dvipdfm = False +licenseIsOsiApproved ECL_1_0 = True +licenseIsOsiApproved ECL_2_0 = True +licenseIsOsiApproved EFL_1_0 = True +licenseIsOsiApproved EFL_2_0 = True +licenseIsOsiApproved EGenix = False +licenseIsOsiApproved Entessa = True +licenseIsOsiApproved EPL_1_0 = True +licenseIsOsiApproved EPL_2_0 = True +licenseIsOsiApproved ErlPL_1_1 = False +licenseIsOsiApproved EUDatagrid = True +licenseIsOsiApproved EUPL_1_0 = False +licenseIsOsiApproved EUPL_1_1 = True +licenseIsOsiApproved EUPL_1_2 = True +licenseIsOsiApproved Eurosym = False +licenseIsOsiApproved Fair = True +licenseIsOsiApproved Frameworx_1_0 = True +licenseIsOsiApproved FreeImage = False +licenseIsOsiApproved FSFAP = False +licenseIsOsiApproved FSFUL = False +licenseIsOsiApproved FSFULLR = False +licenseIsOsiApproved FTL = False +licenseIsOsiApproved GFDL_1_1_only = False +licenseIsOsiApproved GFDL_1_1_or_later = False +licenseIsOsiApproved GFDL_1_2_only = False +licenseIsOsiApproved GFDL_1_2_or_later = False +licenseIsOsiApproved GFDL_1_3_only = False +licenseIsOsiApproved GFDL_1_3_or_later = False +licenseIsOsiApproved Giftware = False +licenseIsOsiApproved GL2PS = False +licenseIsOsiApproved Glide = False +licenseIsOsiApproved Glulxe = False +licenseIsOsiApproved Gnuplot = False +licenseIsOsiApproved GPL_1_0_only = False +licenseIsOsiApproved GPL_1_0_or_later = False +licenseIsOsiApproved GPL_2_0_only = True +licenseIsOsiApproved GPL_2_0_or_later = True +licenseIsOsiApproved GPL_3_0_only = True +licenseIsOsiApproved GPL_3_0_or_later = True +licenseIsOsiApproved GSOAP_1_3b = False +licenseIsOsiApproved HaskellReport = False +licenseIsOsiApproved HPND = True +licenseIsOsiApproved IBM_pibs = False +licenseIsOsiApproved ICU = False +licenseIsOsiApproved IJG = False +licenseIsOsiApproved ImageMagick = False +licenseIsOsiApproved IMatix = False +licenseIsOsiApproved Imlib2 = False +licenseIsOsiApproved Info_ZIP = False +licenseIsOsiApproved Intel_ACPI = False +licenseIsOsiApproved Intel = True +licenseIsOsiApproved Interbase_1_0 = False +licenseIsOsiApproved IPA = True +licenseIsOsiApproved IPL_1_0 = True +licenseIsOsiApproved ISC = True +licenseIsOsiApproved JasPer_2_0 = False +licenseIsOsiApproved JSON = False +licenseIsOsiApproved LAL_1_2 = False +licenseIsOsiApproved LAL_1_3 = False +licenseIsOsiApproved Latex2e = False +licenseIsOsiApproved Leptonica = False +licenseIsOsiApproved LGPL_2_0_only = True +licenseIsOsiApproved LGPL_2_0_or_later = True +licenseIsOsiApproved LGPL_2_1_only = True +licenseIsOsiApproved LGPL_2_1_or_later = True +licenseIsOsiApproved LGPL_3_0_only = True +licenseIsOsiApproved LGPL_3_0_or_later = True +licenseIsOsiApproved LGPLLR = False +licenseIsOsiApproved Libpng = False +licenseIsOsiApproved Libtiff = False +licenseIsOsiApproved LiLiQ_P_1_1 = True +licenseIsOsiApproved LiLiQ_R_1_1 = True +licenseIsOsiApproved LiLiQ_Rplus_1_1 = True +licenseIsOsiApproved Linux_OpenIB = False +licenseIsOsiApproved LPL_1_0 = True +licenseIsOsiApproved LPL_1_02 = True +licenseIsOsiApproved LPPL_1_0 = False +licenseIsOsiApproved LPPL_1_1 = False +licenseIsOsiApproved LPPL_1_2 = False +licenseIsOsiApproved LPPL_1_3a = False +licenseIsOsiApproved LPPL_1_3c = True +licenseIsOsiApproved MakeIndex = False +licenseIsOsiApproved MirOS = True +licenseIsOsiApproved MIT_0 = True +licenseIsOsiApproved MIT_advertising = False +licenseIsOsiApproved MIT_CMU = False +licenseIsOsiApproved MIT_enna = False +licenseIsOsiApproved MIT_feh = False +licenseIsOsiApproved MIT = True +licenseIsOsiApproved MITNFA = False +licenseIsOsiApproved Motosoto = True +licenseIsOsiApproved Mpich2 = False +licenseIsOsiApproved MPL_1_0 = True +licenseIsOsiApproved MPL_1_1 = True +licenseIsOsiApproved MPL_2_0_no_copyleft_exception = True +licenseIsOsiApproved MPL_2_0 = True +licenseIsOsiApproved MS_PL = True +licenseIsOsiApproved MS_RL = True +licenseIsOsiApproved MTLL = False +licenseIsOsiApproved Multics = True +licenseIsOsiApproved Mup = False +licenseIsOsiApproved NASA_1_3 = True +licenseIsOsiApproved Naumen = True +licenseIsOsiApproved NBPL_1_0 = False +licenseIsOsiApproved NCSA = True +licenseIsOsiApproved Net_SNMP = False +licenseIsOsiApproved NetCDF = False +licenseIsOsiApproved Newsletr = False +licenseIsOsiApproved NGPL = True +licenseIsOsiApproved NLOD_1_0 = False +licenseIsOsiApproved NLPL = False +licenseIsOsiApproved Nokia = True +licenseIsOsiApproved NOSL = False +licenseIsOsiApproved Noweb = False +licenseIsOsiApproved NPL_1_0 = False +licenseIsOsiApproved NPL_1_1 = False +licenseIsOsiApproved NPOSL_3_0 = True +licenseIsOsiApproved NRL = False +licenseIsOsiApproved NTP = True +licenseIsOsiApproved OCCT_PL = False +licenseIsOsiApproved OCLC_2_0 = True +licenseIsOsiApproved ODbL_1_0 = False +licenseIsOsiApproved ODC_By_1_0 = False +licenseIsOsiApproved OFL_1_0 = False +licenseIsOsiApproved OFL_1_1 = True +licenseIsOsiApproved OGTSL = True +licenseIsOsiApproved OLDAP_1_1 = False +licenseIsOsiApproved OLDAP_1_2 = False +licenseIsOsiApproved OLDAP_1_3 = False +licenseIsOsiApproved OLDAP_1_4 = False +licenseIsOsiApproved OLDAP_2_0_1 = False +licenseIsOsiApproved OLDAP_2_0 = False +licenseIsOsiApproved OLDAP_2_1 = False +licenseIsOsiApproved OLDAP_2_2_1 = False +licenseIsOsiApproved OLDAP_2_2_2 = False +licenseIsOsiApproved OLDAP_2_2 = False +licenseIsOsiApproved OLDAP_2_3 = False +licenseIsOsiApproved OLDAP_2_4 = False +licenseIsOsiApproved OLDAP_2_5 = False +licenseIsOsiApproved OLDAP_2_6 = False +licenseIsOsiApproved OLDAP_2_7 = False +licenseIsOsiApproved OLDAP_2_8 = False +licenseIsOsiApproved OML = False +licenseIsOsiApproved OpenSSL = False +licenseIsOsiApproved OPL_1_0 = False +licenseIsOsiApproved OSET_PL_2_1 = True +licenseIsOsiApproved OSL_1_0 = True +licenseIsOsiApproved OSL_1_1 = False +licenseIsOsiApproved OSL_2_0 = True +licenseIsOsiApproved OSL_2_1 = True +licenseIsOsiApproved OSL_3_0 = True +licenseIsOsiApproved PDDL_1_0 = False +licenseIsOsiApproved PHP_3_0 = True +licenseIsOsiApproved PHP_3_01 = False +licenseIsOsiApproved Plexus = False +licenseIsOsiApproved PostgreSQL = True +licenseIsOsiApproved Psfrag = False +licenseIsOsiApproved Psutils = False +licenseIsOsiApproved Python_2_0 = True +licenseIsOsiApproved Qhull = False +licenseIsOsiApproved QPL_1_0 = True +licenseIsOsiApproved Rdisc = False +licenseIsOsiApproved RHeCos_1_1 = False +licenseIsOsiApproved RPL_1_1 = True +licenseIsOsiApproved RPL_1_5 = True +licenseIsOsiApproved RPSL_1_0 = True +licenseIsOsiApproved RSA_MD = False +licenseIsOsiApproved RSCPL = True +licenseIsOsiApproved Ruby = False +licenseIsOsiApproved SAX_PD = False +licenseIsOsiApproved Saxpath = False +licenseIsOsiApproved SCEA = False +licenseIsOsiApproved Sendmail = False +licenseIsOsiApproved SGI_B_1_0 = False +licenseIsOsiApproved SGI_B_1_1 = False +licenseIsOsiApproved SGI_B_2_0 = False +licenseIsOsiApproved SimPL_2_0 = True +licenseIsOsiApproved SISSL_1_2 = False +licenseIsOsiApproved SISSL = True +licenseIsOsiApproved Sleepycat = True +licenseIsOsiApproved SMLNJ = False +licenseIsOsiApproved SMPPL = False +licenseIsOsiApproved SNIA = False +licenseIsOsiApproved Spencer_86 = False +licenseIsOsiApproved Spencer_94 = False +licenseIsOsiApproved Spencer_99 = False +licenseIsOsiApproved SPL_1_0 = True +licenseIsOsiApproved SugarCRM_1_1_3 = False +licenseIsOsiApproved SWL = False +licenseIsOsiApproved TCL = False +licenseIsOsiApproved TCP_wrappers = False +licenseIsOsiApproved TMate = False +licenseIsOsiApproved TORQUE_1_1 = False +licenseIsOsiApproved TOSL = False +licenseIsOsiApproved TU_Berlin_1_0 = False +licenseIsOsiApproved TU_Berlin_2_0 = False +licenseIsOsiApproved Unicode_DFS_2015 = False +licenseIsOsiApproved Unicode_DFS_2016 = False +licenseIsOsiApproved Unicode_TOU = False +licenseIsOsiApproved Unlicense = False +licenseIsOsiApproved UPL_1_0 = True +licenseIsOsiApproved Vim = False +licenseIsOsiApproved VOSTROM = False +licenseIsOsiApproved VSL_1_0 = True +licenseIsOsiApproved W3C_19980720 = False +licenseIsOsiApproved W3C_20150513 = False +licenseIsOsiApproved W3C = True +licenseIsOsiApproved Watcom_1_0 = True +licenseIsOsiApproved Wsuipa = False +licenseIsOsiApproved WTFPL = False +licenseIsOsiApproved X11 = False +licenseIsOsiApproved Xerox = False +licenseIsOsiApproved XFree86_1_1 = False +licenseIsOsiApproved Xinetd = False +licenseIsOsiApproved Xnet = True +licenseIsOsiApproved Xpp = False +licenseIsOsiApproved XSkat = False +licenseIsOsiApproved YPL_1_0 = False +licenseIsOsiApproved YPL_1_1 = False +licenseIsOsiApproved Zed = False +licenseIsOsiApproved Zend_2_0 = False +licenseIsOsiApproved Zimbra_1_3 = False +licenseIsOsiApproved Zimbra_1_4 = False +licenseIsOsiApproved Zlib_acknowledgement = False +licenseIsOsiApproved Zlib = True +licenseIsOsiApproved ZPL_1_1 = False +licenseIsOsiApproved ZPL_2_0 = True +licenseIsOsiApproved ZPL_2_1 = False + +------------------------------------------------------------------------------- +-- Creation +------------------------------------------------------------------------------- + +licenseIdList :: LicenseListVersion -> [LicenseId] +licenseIdList LicenseListVersion_3_0 = + [ AGPL_1_0 + ] + ++ bulkOfLicenses +licenseIdList LicenseListVersion_3_2 = + [ AGPL_1_0_only + , AGPL_1_0_or_later + , Linux_OpenIB + , MIT_0 + , ODC_By_1_0 + , TU_Berlin_1_0 + , TU_Berlin_2_0 + ] + ++ bulkOfLicenses + +-- | Create a 'LicenseId' from a 'String'. +mkLicenseId :: LicenseListVersion -> String -> Maybe LicenseId +mkLicenseId LicenseListVersion_3_0 s = Map.lookup s stringLookup_3_0 +mkLicenseId LicenseListVersion_3_2 s = Map.lookup s stringLookup_3_2 + +stringLookup_3_0 :: Map String LicenseId +stringLookup_3_0 = Map.fromList $ map (\i -> (licenseId i, i)) $ + licenseIdList LicenseListVersion_3_0 + +stringLookup_3_2 :: Map String LicenseId +stringLookup_3_2 = Map.fromList $ map (\i -> (licenseId i, i)) $ + licenseIdList LicenseListVersion_3_2 + +-- | Licenses in all SPDX License lists +bulkOfLicenses :: [LicenseId] +bulkOfLicenses = + [ NullBSD + , AAL + , Abstyles + , Adobe_2006 + , Adobe_Glyph + , ADSL + , AFL_1_1 + , AFL_1_2 + , AFL_2_0 + , AFL_2_1 + , AFL_3_0 + , Afmparse + , AGPL_3_0_only + , AGPL_3_0_or_later + , Aladdin + , AMDPLPA + , AML + , AMPAS + , ANTLR_PD + , Apache_1_0 + , Apache_1_1 + , Apache_2_0 + , APAFML + , APL_1_0 + , APSL_1_0 + , APSL_1_1 + , APSL_1_2 + , APSL_2_0 + , Artistic_1_0_cl8 + , Artistic_1_0_Perl + , Artistic_1_0 + , Artistic_2_0 + , Bahyph + , Barr + , Beerware + , BitTorrent_1_0 + , BitTorrent_1_1 + , Borceux + , BSD_1_Clause + , BSD_2_Clause_FreeBSD + , BSD_2_Clause_NetBSD + , BSD_2_Clause_Patent + , BSD_2_Clause + , BSD_3_Clause_Attribution + , BSD_3_Clause_Clear + , BSD_3_Clause_LBNL + , BSD_3_Clause_No_Nuclear_License_2014 + , BSD_3_Clause_No_Nuclear_License + , BSD_3_Clause_No_Nuclear_Warranty + , BSD_3_Clause + , BSD_4_Clause_UC + , BSD_4_Clause + , BSD_Protection + , BSD_Source_Code + , BSL_1_0 + , Bzip2_1_0_5 + , Bzip2_1_0_6 + , Caldera + , CATOSL_1_1 + , CC_BY_1_0 + , CC_BY_2_0 + , CC_BY_2_5 + , CC_BY_3_0 + , CC_BY_4_0 + , CC_BY_NC_1_0 + , CC_BY_NC_2_0 + , CC_BY_NC_2_5 + , CC_BY_NC_3_0 + , CC_BY_NC_4_0 + , CC_BY_NC_ND_1_0 + , CC_BY_NC_ND_2_0 + , CC_BY_NC_ND_2_5 + , CC_BY_NC_ND_3_0 + , CC_BY_NC_ND_4_0 + , CC_BY_NC_SA_1_0 + , CC_BY_NC_SA_2_0 + , CC_BY_NC_SA_2_5 + , CC_BY_NC_SA_3_0 + , CC_BY_NC_SA_4_0 + , CC_BY_ND_1_0 + , CC_BY_ND_2_0 + , CC_BY_ND_2_5 + , CC_BY_ND_3_0 + , CC_BY_ND_4_0 + , CC_BY_SA_1_0 + , CC_BY_SA_2_0 + , CC_BY_SA_2_5 + , CC_BY_SA_3_0 + , CC_BY_SA_4_0 + , CC0_1_0 + , CDDL_1_0 + , CDDL_1_1 + , CDLA_Permissive_1_0 + , CDLA_Sharing_1_0 + , CECILL_1_0 + , CECILL_1_1 + , CECILL_2_0 + , CECILL_2_1 + , CECILL_B + , CECILL_C + , ClArtistic + , CNRI_Jython + , CNRI_Python_GPL_Compatible + , CNRI_Python + , Condor_1_1 + , CPAL_1_0 + , CPL_1_0 + , CPOL_1_02 + , Crossword + , CrystalStacker + , CUA_OPL_1_0 + , Cube + , Curl + , D_FSL_1_0 + , Diffmark + , DOC + , Dotseqn + , DSDP + , Dvipdfm + , ECL_1_0 + , ECL_2_0 + , EFL_1_0 + , EFL_2_0 + , EGenix + , Entessa + , EPL_1_0 + , EPL_2_0 + , ErlPL_1_1 + , EUDatagrid + , EUPL_1_0 + , EUPL_1_1 + , EUPL_1_2 + , Eurosym + , Fair + , Frameworx_1_0 + , FreeImage + , FSFAP + , FSFUL + , FSFULLR + , FTL + , GFDL_1_1_only + , GFDL_1_1_or_later + , GFDL_1_2_only + , GFDL_1_2_or_later + , GFDL_1_3_only + , GFDL_1_3_or_later + , Giftware + , GL2PS + , Glide + , Glulxe + , Gnuplot + , GPL_1_0_only + , GPL_1_0_or_later + , GPL_2_0_only + , GPL_2_0_or_later + , GPL_3_0_only + , GPL_3_0_or_later + , GSOAP_1_3b + , HaskellReport + , HPND + , IBM_pibs + , ICU + , IJG + , ImageMagick + , IMatix + , Imlib2 + , Info_ZIP + , Intel_ACPI + , Intel + , Interbase_1_0 + , IPA + , IPL_1_0 + , ISC + , JasPer_2_0 + , JSON + , LAL_1_2 + , LAL_1_3 + , Latex2e + , Leptonica + , LGPL_2_0_only + , LGPL_2_0_or_later + , LGPL_2_1_only + , LGPL_2_1_or_later + , LGPL_3_0_only + , LGPL_3_0_or_later + , LGPLLR + , Libpng + , Libtiff + , LiLiQ_P_1_1 + , LiLiQ_R_1_1 + , LiLiQ_Rplus_1_1 + , LPL_1_0 + , LPL_1_02 + , LPPL_1_0 + , LPPL_1_1 + , LPPL_1_2 + , LPPL_1_3a + , LPPL_1_3c + , MakeIndex + , MirOS + , MIT_advertising + , MIT_CMU + , MIT_enna + , MIT_feh + , MIT + , MITNFA + , Motosoto + , Mpich2 + , MPL_1_0 + , MPL_1_1 + , MPL_2_0_no_copyleft_exception + , MPL_2_0 + , MS_PL + , MS_RL + , MTLL + , Multics + , Mup + , NASA_1_3 + , Naumen + , NBPL_1_0 + , NCSA + , Net_SNMP + , NetCDF + , Newsletr + , NGPL + , NLOD_1_0 + , NLPL + , Nokia + , NOSL + , Noweb + , NPL_1_0 + , NPL_1_1 + , NPOSL_3_0 + , NRL + , NTP + , OCCT_PL + , OCLC_2_0 + , ODbL_1_0 + , OFL_1_0 + , OFL_1_1 + , OGTSL + , OLDAP_1_1 + , OLDAP_1_2 + , OLDAP_1_3 + , OLDAP_1_4 + , OLDAP_2_0_1 + , OLDAP_2_0 + , OLDAP_2_1 + , OLDAP_2_2_1 + , OLDAP_2_2_2 + , OLDAP_2_2 + , OLDAP_2_3 + , OLDAP_2_4 + , OLDAP_2_5 + , OLDAP_2_6 + , OLDAP_2_7 + , OLDAP_2_8 + , OML + , OpenSSL + , OPL_1_0 + , OSET_PL_2_1 + , OSL_1_0 + , OSL_1_1 + , OSL_2_0 + , OSL_2_1 + , OSL_3_0 + , PDDL_1_0 + , PHP_3_0 + , PHP_3_01 + , Plexus + , PostgreSQL + , Psfrag + , Psutils + , Python_2_0 + , Qhull + , QPL_1_0 + , Rdisc + , RHeCos_1_1 + , RPL_1_1 + , RPL_1_5 + , RPSL_1_0 + , RSA_MD + , RSCPL + , Ruby + , SAX_PD + , Saxpath + , SCEA + , Sendmail + , SGI_B_1_0 + , SGI_B_1_1 + , SGI_B_2_0 + , SimPL_2_0 + , SISSL_1_2 + , SISSL + , Sleepycat + , SMLNJ + , SMPPL + , SNIA + , Spencer_86 + , Spencer_94 + , Spencer_99 + , SPL_1_0 + , SugarCRM_1_1_3 + , SWL + , TCL + , TCP_wrappers + , TMate + , TORQUE_1_1 + , TOSL + , Unicode_DFS_2015 + , Unicode_DFS_2016 + , Unicode_TOU + , Unlicense + , UPL_1_0 + , Vim + , VOSTROM + , VSL_1_0 + , W3C_19980720 + , W3C_20150513 + , W3C + , Watcom_1_0 + , Wsuipa + , WTFPL + , X11 + , Xerox + , XFree86_1_1 + , Xinetd + , Xnet + , Xpp + , XSkat + , YPL_1_0 + , YPL_1_1 + , Zed + , Zend_2_0 + , Zimbra_1_3 + , Zimbra_1_4 + , Zlib_acknowledgement + , Zlib + , ZPL_1_1 + , ZPL_2_0 + , ZPL_2_1 + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseListVersion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseListVersion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseListVersion.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseListVersion.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,16 @@ +module Distribution.SPDX.LicenseListVersion ( + LicenseListVersion (..), + cabalSpecVersionToSPDXListVersion, + ) where + +import Distribution.CabalSpecVersion + +-- | SPDX License List version @Cabal@ is aware of. +data LicenseListVersion + = LicenseListVersion_3_0 + | LicenseListVersion_3_2 + deriving (Eq, Ord, Show, Enum, Bounded) + +cabalSpecVersionToSPDXListVersion :: CabalSpecVersion -> LicenseListVersion +cabalSpecVersionToSPDXListVersion CabalSpecV2_4 = LicenseListVersion_3_2 +cabalSpecVersionToSPDXListVersion _ = LicenseListVersion_3_0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseReference.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseReference.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseReference.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX/LicenseReference.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,79 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.SPDX.LicenseReference ( + LicenseRef, + licenseRef, + licenseDocumentRef, + mkLicenseRef, + mkLicenseRef', + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Utils.Generic (isAsciiAlphaNum) +import Distribution.Pretty +import Distribution.Parsec.Class + +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp + +-- | A user defined license reference denoted by @LicenseRef-[idstring]@ (for a license not on the SPDX License List); +data LicenseRef = LicenseRef + { _lrDocument :: !(Maybe String) + , _lrLicense :: !String + } + deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) + +-- | License reference. +licenseRef :: LicenseRef -> String +licenseRef = _lrLicense + +-- | Document reference. +licenseDocumentRef :: LicenseRef -> Maybe String +licenseDocumentRef = _lrDocument + +instance Binary LicenseRef + +instance NFData LicenseRef where + rnf (LicenseRef d l) = rnf d `seq` rnf l + +instance Pretty LicenseRef where + pretty (LicenseRef Nothing l) = Disp.text "LicenseRef-" <<>> Disp.text l + pretty (LicenseRef (Just d) l) = + Disp.text "DocumentRef-" <<>> Disp.text d <<>> Disp.char ':' <<>> Disp.text "LicenseRef-" <<>> Disp.text l + +instance Parsec LicenseRef where + parsec = name <|> doc + where + name = do + _ <- P.string "LicenseRef-" + n <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' + pure (LicenseRef Nothing n) + + doc = do + _ <- P.string "DocumentRef-" + d <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' + _ <- P.char ':' + _ <- P.string "LicenseRef-" + n <- some $ P.satisfy $ \c -> isAsciiAlphaNum c || c == '-' || c == '.' + pure (LicenseRef (Just d) n) + +-- | Create 'LicenseRef' from optional document ref and name. +mkLicenseRef :: Maybe String -> String -> Maybe LicenseRef +mkLicenseRef d l = do + d' <- traverse checkIdString d + l' <- checkIdString l + pure (LicenseRef d' l') + where + checkIdString s + | all (\c -> isAsciiAlphaNum c || c == '-' || c == '.') s = Just s + | otherwise = Nothing + +-- | Like 'mkLicenseRef' but convert invalid characters into @-@. +mkLicenseRef' :: Maybe String -> String -> LicenseRef +mkLicenseRef' d l = LicenseRef (fmap f d) (f l) + where + f = map g + g c | isAsciiAlphaNum c || c == '-' || c == '.' = c + | otherwise = '-' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/SPDX.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/SPDX.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,40 @@ +-- | This module implements SPDX specification version 2.1 with a version 3.0 license list. +-- +-- Specification is available on +module Distribution.SPDX ( + -- * License + License (..), + -- * License expression + LicenseExpression (..), + SimpleLicenseExpression (..), + simpleLicenseExpression, + -- * License identifier + LicenseId (..), + licenseId, + licenseName, + licenseIsOsiApproved, + mkLicenseId, + licenseIdList, + -- * License exception + LicenseExceptionId (..), + licenseExceptionId, + licenseExceptionName, + mkLicenseExceptionId, + licenseExceptionIdList, + -- * License reference + LicenseRef, + licenseRef, + licenseDocumentRef, + mkLicenseRef, + mkLicenseRef', + -- * License list version + LicenseListVersion (..), + cabalSpecVersionToSPDXListVersion, + ) where + +import Distribution.SPDX.LicenseExceptionId +import Distribution.SPDX.License +import Distribution.SPDX.LicenseId +import Distribution.SPDX.LicenseExpression +import Distribution.SPDX.LicenseReference +import Distribution.SPDX.LicenseListVersion diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/System.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/System.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/System.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/System.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,300 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.System +-- Copyright : Duncan Coutts 2007-2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Cabal often needs to do slightly different things on specific platforms. You +-- probably know about the 'System.Info.os' however using that is very +-- inconvenient because it is a string and different Haskell implementations +-- do not agree on using the same strings for the same platforms! (In +-- particular see the controversy over \"windows\" vs \"mingw32\"). So to make it +-- more consistent and easy to use we have an 'OS' enumeration. +-- +module Distribution.System ( + -- * Operating System + OS(..), + buildOS, + + -- * Machine Architecture + Arch(..), + buildArch, + + -- * Platform is a pair of arch and OS + Platform(..), + buildPlatform, + platformFromTriple, + + -- * Internal + knownOSs, + knownArches, + + -- * Classification + ClassificationStrictness (..), + classifyOS, + classifyArch, + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Control.Applicative (liftA2) + +import qualified System.Info (os, arch) +import Distribution.Utils.Generic (lowercase) + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text + +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp + +-- | How strict to be when classifying strings into the 'OS' and 'Arch' enums. +-- +-- The reason we have multiple ways to do the classification is because there +-- are two situations where we need to do it. +-- +-- For parsing OS and arch names in .cabal files we really want everyone to be +-- referring to the same or or arch by the same name. Variety is not a virtue +-- in this case. We don't mind about case though. +-- +-- For the System.Info.os\/arch different Haskell implementations use different +-- names for the same or\/arch. Also they tend to distinguish versions of an +-- OS\/arch which we just don't care about. +-- +-- The 'Compat' classification allows us to recognise aliases that are already +-- in common use but it allows us to distinguish them from the canonical name +-- which enables us to warn about such deprecated aliases. +-- +data ClassificationStrictness = Permissive | Compat | Strict + +-- ------------------------------------------------------------ +-- * Operating System +-- ------------------------------------------------------------ + +-- | These are the known OS names: Linux, Windows, OSX +-- ,FreeBSD, OpenBSD, NetBSD, DragonFly +-- ,Solaris, AIX, HPUX, IRIX +-- ,HaLVM ,Hurd ,IOS, Android,Ghcjs +-- +-- The following aliases can also be used:, +-- * Windows aliases: mingw32, win32, cygwin32 +-- * OSX alias: darwin +-- * Hurd alias: gnu +-- * FreeBSD alias: kfreebsdgnu +-- * Solaris alias: solaris2 +-- +data OS = Linux | Windows | OSX -- tier 1 desktop OSs + | FreeBSD | OpenBSD | NetBSD -- other free Unix OSs + | DragonFly + | Solaris | AIX | HPUX | IRIX -- ageing Unix OSs + | HaLVM -- bare metal / VMs / hypervisors + | Hurd -- GNU's microkernel + | IOS | Android -- mobile OSs + | Ghcjs + | OtherOS String + deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) + +instance Binary OS + +instance NFData OS where rnf = genericRnf + +knownOSs :: [OS] +knownOSs = [Linux, Windows, OSX + ,FreeBSD, OpenBSD, NetBSD, DragonFly + ,Solaris, AIX, HPUX, IRIX + ,HaLVM + ,Hurd + ,IOS, Android + ,Ghcjs] + +osAliases :: ClassificationStrictness -> OS -> [String] +osAliases Permissive Windows = ["mingw32", "win32", "cygwin32"] +osAliases Compat Windows = ["mingw32", "win32"] +osAliases _ OSX = ["darwin"] +osAliases _ Hurd = ["gnu"] +osAliases Permissive FreeBSD = ["kfreebsdgnu"] +osAliases Compat FreeBSD = ["kfreebsdgnu"] +osAliases Permissive Solaris = ["solaris2"] +osAliases Compat Solaris = ["solaris2"] +osAliases _ Android = ["linux-android"] +osAliases _ _ = [] + +instance Pretty OS where + pretty (OtherOS name) = Disp.text name + pretty other = Disp.text (lowercase (show other)) + +instance Parsec OS where + parsec = classifyOS Compat <$> parsecIdent + +instance Text OS where + parse = fmap (classifyOS Compat) ident + +classifyOS :: ClassificationStrictness -> String -> OS +classifyOS strictness s = + fromMaybe (OtherOS s) $ lookup (lowercase s) osMap + where + osMap = [ (name, os) + | os <- knownOSs + , name <- display os : osAliases strictness os ] + +buildOS :: OS +buildOS = classifyOS Permissive System.Info.os + +-- ------------------------------------------------------------ +-- * Machine Architecture +-- ------------------------------------------------------------ + +-- | These are the known Arches: I386, X86_64, PPC, PPC64, Sparc, +-- Arm, AArch64, Mips, SH, IA64, S39, Alpha, Hppa, Rs6000, M68k, +-- Vax, and JavaScript. +-- +-- The following aliases can also be used: +-- * PPC alias: powerpc +-- * PPC64 alias : powerpc64, powerpc64le +-- * Sparc aliases: sparc64, sun4 +-- * Mips aliases: mipsel, mipseb +-- * Arm aliases: armeb, armel +-- * AArch64 aliases: arm64 +-- +data Arch = I386 | X86_64 | PPC | PPC64 | Sparc + | Arm | AArch64 | Mips | SH + | IA64 | S390 + | Alpha | Hppa | Rs6000 + | M68k | Vax + | JavaScript + | OtherArch String + deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) + +instance Binary Arch + +instance NFData Arch where rnf = genericRnf + +knownArches :: [Arch] +knownArches = [I386, X86_64, PPC, PPC64, Sparc + ,Arm, AArch64, Mips, SH + ,IA64, S390 + ,Alpha, Hppa, Rs6000 + ,M68k, Vax + ,JavaScript] + +archAliases :: ClassificationStrictness -> Arch -> [String] +archAliases Strict _ = [] +archAliases Compat _ = [] +archAliases _ PPC = ["powerpc"] +archAliases _ PPC64 = ["powerpc64", "powerpc64le"] +archAliases _ Sparc = ["sparc64", "sun4"] +archAliases _ Mips = ["mipsel", "mipseb"] +archAliases _ Arm = ["armeb", "armel"] +archAliases _ AArch64 = ["arm64"] +archAliases _ _ = [] + +instance Pretty Arch where + pretty (OtherArch name) = Disp.text name + pretty other = Disp.text (lowercase (show other)) + +instance Parsec Arch where + parsec = classifyArch Strict <$> parsecIdent + +instance Text Arch where + parse = fmap (classifyArch Strict) ident + +classifyArch :: ClassificationStrictness -> String -> Arch +classifyArch strictness s = + fromMaybe (OtherArch s) $ lookup (lowercase s) archMap + where + archMap = [ (name, arch) + | arch <- knownArches + , name <- display arch : archAliases strictness arch ] + +buildArch :: Arch +buildArch = classifyArch Permissive System.Info.arch + +-- ------------------------------------------------------------ +-- * Platform +-- ------------------------------------------------------------ + +data Platform = Platform Arch OS + deriving (Eq, Generic, Ord, Show, Read, Typeable, Data) + +instance Binary Platform + +instance NFData Platform where rnf = genericRnf + +instance Pretty Platform where + pretty (Platform arch os) = pretty arch <<>> Disp.char '-' <<>> pretty os + +instance Parsec Platform where + parsec = do + arch <- parsecDashlessArch + _ <- P.char '-' + os <- parsec + return (Platform arch os) + where + parsecDashlessArch = classifyArch Strict <$> dashlessIdent + + dashlessIdent = liftA2 (:) firstChar rest + where + firstChar = P.satisfy isAlpha + rest = P.munch (\c -> isAlphaNum c || c == '_') + +instance Text Platform where + -- TODO: there are ambigious platforms like: `arch-word-os` + -- which could be parsed as + -- * Platform "arch-word" "os" + -- * Platform "arch" "word-os" + -- We could support that preferring variants 'OtherOS' or 'OtherArch' + -- + -- For now we split into arch and os parts on the first dash. + parse = do + arch <- parseDashlessArch + _ <- Parse.char '-' + os <- parse + return (Platform arch os) + where + parseDashlessArch :: Parse.ReadP r Arch + parseDashlessArch = fmap (classifyArch Strict) dashlessIdent + + dashlessIdent :: Parse.ReadP r String + dashlessIdent = liftM2 (:) firstChar rest + where firstChar = Parse.satisfy isAlpha + rest = Parse.munch (\c -> isAlphaNum c || c == '_') + +-- | The platform Cabal was compiled on. In most cases, +-- @LocalBuildInfo.hostPlatform@ should be used instead (the platform we're +-- targeting). +buildPlatform :: Platform +buildPlatform = Platform buildArch buildOS + +-- Utils: + +ident :: Parse.ReadP r String +ident = liftM2 (:) firstChar rest + where firstChar = Parse.satisfy isAlpha + rest = Parse.munch (\c -> isAlphaNum c || c == '_' || c == '-') + +parsecIdent :: CabalParsing m => m String +parsecIdent = (:) <$> firstChar <*> rest + where + firstChar = P.satisfy isAlpha + rest = P.munch (\c -> isAlphaNum c || c == '_' || c == '-') + +platformFromTriple :: String -> Maybe Platform +platformFromTriple triple = + fmap fst (listToMaybe $ Parse.readP_to_S parseTriple triple) + where parseWord = Parse.munch1 (\c -> isAlphaNum c || c == '_') + parseTriple = do + arch <- fmap (classifyArch Permissive) parseWord + _ <- Parse.char '-' + _ <- parseWord -- Skip vendor + _ <- Parse.char '-' + os <- fmap (classifyOS Permissive) ident -- OS may have hyphens, like + -- 'nto-qnx' + return $ Platform arch os diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/TestSuite.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/TestSuite.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/TestSuite.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/TestSuite.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,102 @@ +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.TestSuite +-- Copyright : Thomas Tuegel 2010 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This module defines the detailed test suite interface which makes it +-- possible to expose individual tests to Cabal or other test agents. + +module Distribution.TestSuite + ( TestInstance(..) + , OptionDescr(..) + , OptionType(..) + , Test(..) + , Options + , Progress(..) + , Result(..) + , testGroup + ) where + +import Prelude () +import Distribution.Compat.Prelude + +data TestInstance = TestInstance + { run :: IO Progress -- ^ Perform the test. + , name :: String -- ^ A name for the test, unique within a + -- test suite. + , tags :: [String] -- ^ Users can select groups of tests by + -- their tags. + , options :: [OptionDescr] -- ^ Descriptions of the options recognized + -- by this test. + , setOption :: String -> String -> Either String TestInstance + -- ^ Try to set the named option to the given value. Returns an error + -- message if the option is not supported or the value could not be + -- correctly parsed; otherwise, a 'TestInstance' with the option set to + -- the given value is returned. + } + +data OptionDescr = OptionDescr + { optionName :: String + , optionDescription :: String -- ^ A human-readable description of the + -- option to guide the user setting it. + , optionType :: OptionType + , optionDefault :: Maybe String + } + deriving (Eq, Read, Show) + +data OptionType + = OptionFile + { optionFileMustExist :: Bool + , optionFileIsDir :: Bool + , optionFileExtensions :: [String] + } + | OptionString + { optionStringMultiline :: Bool + } + | OptionNumber + { optionNumberIsInt :: Bool + , optionNumberBounds :: (Maybe String, Maybe String) + } + | OptionBool + | OptionEnum [String] + | OptionSet [String] + | OptionRngSeed + deriving (Eq, Read, Show) + +data Test + = Test TestInstance + | Group + { groupName :: String + , concurrently :: Bool + -- ^ If true, then children of this group may be run in parallel. + -- Note that this setting is not inherited by children. In + -- particular, consider a group F with "concurrently = False" that + -- has some children, including a group T with "concurrently = + -- True". The children of group T may be run concurrently with each + -- other, as long as none are run at the same time as any of the + -- direct children of group F. + , groupTests :: [Test] + } + | ExtraOptions [OptionDescr] Test + +type Options = [(String, String)] + +data Progress = Finished Result + | Progress String (IO Progress) + +data Result = Pass + | Fail String + | Error String + deriving (Eq, Read, Show) + +-- | Create a named group of tests, which are assumed to be safe to run in +-- parallel. +testGroup :: String -> [Test] -> Test +testGroup n ts = Group { groupName = n, concurrently = True, groupTests = ts } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Text.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Text.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Text.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Text.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,102 @@ +{-# LANGUAGE DefaultSignatures #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Text +-- Copyright : Duncan Coutts 2007 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defines a 'Text' class which is a bit like the 'Read' and 'Show' +-- classes. The difference is that it uses a modern pretty printer and parser +-- system and the format is not expected to be Haskell concrete syntax but +-- rather the external human readable representation used by Cabal. +-- +module Distribution.Text ( + Text(..), + defaultStyle, + display, + flatStyle, + simpleParse, + stdParse, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Data.Functor.Identity (Identity (..)) +import Distribution.Pretty +import Distribution.Parsec.Class +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +import Data.Version (Version(Version)) + +-- | /Note:/ this class will soon be deprecated. +-- It's not yet, so that we are @-Wall@ clean. +class Text a where + disp :: a -> Disp.Doc + default disp :: Pretty a => a -> Disp.Doc + disp = pretty + + parse :: Parse.ReadP r a + default parse :: Parsec a => Parse.ReadP r a + parse = parsec + +-- | Pretty-prints with the default style. +display :: Text a => a -> String +display = Disp.renderStyle defaultStyle . disp + +simpleParse :: Text a => String -> Maybe a +simpleParse str = case [ p | (p, s) <- Parse.readP_to_S parse str + , all isSpace s ] of + [] -> Nothing + (p:_) -> Just p + +stdParse :: Text ver => (ver -> String -> res) -> Parse.ReadP r res +stdParse f = do + cs <- Parse.sepBy1 component (Parse.char '-') + _ <- Parse.char '-' + ver <- parse + let name = intercalate "-" cs + return $! f ver (lowercase name) + where + component = do + cs <- Parse.munch1 isAlphaNum + if all isDigit cs then Parse.pfail else return cs + -- each component must contain an alphabetic character, to avoid + -- ambiguity in identifiers like foo-1 (the 1 is the version number). + +lowercase :: String -> String +lowercase = map toLower + +-- ----------------------------------------------------------------------------- +-- Instances for types from the base package + +instance Text Bool where + parse = Parse.choice [ (Parse.string "True" Parse.+++ + Parse.string "true") >> return True + , (Parse.string "False" Parse.+++ + Parse.string "false") >> return False ] + +instance Text Int where + parse = fmap negate (Parse.char '-' >> parseNat) Parse.+++ parseNat + +instance Text a => Text (Identity a) where + disp = disp . runIdentity + parse = fmap Identity parse + +-- | Parser for non-negative integers. +parseNat :: Parse.ReadP r Int +parseNat = read `fmap` Parse.munch1 isDigit -- TODO: eradicateNoParse + + +instance Text Version where + disp (Version branch _tags) -- Death to version tags!! + = Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int branch)) + + parse = do + branch <- Parse.sepBy1 parseNat (Parse.char '.') + -- allow but ignore tags: + _tags <- Parse.many (Parse.char '-' >> Parse.munch1 isAlphaNum) + return (Version branch []) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/AbiDependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/AbiDependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/AbiDependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/AbiDependency.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,52 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.AbiDependency where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Package as Package +import qualified Text.PrettyPrint as Disp + +-- | An ABI dependency is a dependency on a library which also +-- records the ABI hash ('abiHash') of the library it depends +-- on. +-- +-- The primary utility of this is to enable an extra sanity when +-- GHC loads libraries: it can check if the dependency has a matching +-- ABI and if not, refuse to load this library. This information +-- is critical if we are shadowing libraries; differences in the +-- ABI hash let us know what packages get shadowed by the new version +-- of a package. +data AbiDependency = AbiDependency { + depUnitId :: Package.UnitId, + depAbiHash :: Package.AbiHash + } + deriving (Eq, Generic, Read, Show) + +instance Pretty AbiDependency where + pretty (AbiDependency uid abi) = + disp uid <<>> Disp.char '=' <<>> disp abi + +instance Parsec AbiDependency where + parsec = do + uid <- parsec + _ <- P.char '=' + abi <- parsec + return (AbiDependency uid abi) + +instance Text AbiDependency where + parse = do + uid <- parse + _ <- Parse.char '=' + abi <- parse + return (AbiDependency uid abi) + +instance Binary AbiDependency + +instance NFData AbiDependency where rnf = genericRnf diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/AbiHash.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/AbiHash.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/AbiHash.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/AbiHash.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,66 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +module Distribution.Types.AbiHash + ( AbiHash, unAbiHash, mkAbiHash + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Utils.ShortText + +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import Distribution.Text +import Distribution.Pretty +import Distribution.Parsec.Class + +import Text.PrettyPrint (text) + +-- | ABI Hashes +-- +-- Use 'mkAbiHash' and 'unAbiHash' to convert from/to a +-- 'String'. +-- +-- This type is opaque since @Cabal-2.0@ +-- +-- @since 2.0.0.2 +newtype AbiHash = AbiHash ShortText + deriving (Eq, Show, Read, Generic) + +-- | Construct a 'AbiHash' from a 'String' +-- +-- 'mkAbiHash' is the inverse to 'unAbiHash' +-- +-- Note: No validations are performed to ensure that the resulting +-- 'AbiHash' is valid +-- +-- @since 2.0.0.2 +unAbiHash :: AbiHash -> String +unAbiHash (AbiHash h) = fromShortText h + +-- | Convert 'AbiHash' to 'String' +-- +-- @since 2.0.0.2 +mkAbiHash :: String -> AbiHash +mkAbiHash = AbiHash . toShortText + +-- | 'mkAbiHash' +-- +-- @since 2.0.0.2 +instance IsString AbiHash where + fromString = mkAbiHash + +instance Binary AbiHash + +instance NFData AbiHash where rnf = genericRnf + +instance Pretty AbiHash where + pretty = text . unAbiHash + +instance Parsec AbiHash where + parsec = fmap mkAbiHash (P.munch isAlphaNum) + +instance Text AbiHash where + parse = fmap mkAbiHash (Parse.munch isAlphaNum) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/AnnotatedId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/AnnotatedId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/AnnotatedId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/AnnotatedId.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,34 @@ +module Distribution.Types.AnnotatedId ( + AnnotatedId(..) +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Package +import Distribution.Types.ComponentName + +-- | An 'AnnotatedId' is a 'ComponentId', 'UnitId', etc. +-- which is annotated with some other useful information +-- that is useful for printing to users, etc. +-- +-- Invariant: if ann_id x == ann_id y, then ann_pid x == ann_pid y +-- and ann_cname x == ann_cname y +data AnnotatedId id = AnnotatedId { + ann_pid :: PackageId, + ann_cname :: ComponentName, + ann_id :: id + } + deriving (Show) + +instance Eq id => Eq (AnnotatedId id) where + x == y = ann_id x == ann_id y + +instance Ord id => Ord (AnnotatedId id) where + compare x y = compare (ann_id x) (ann_id y) + +instance Package (AnnotatedId id) where + packageId = ann_pid + +instance Functor AnnotatedId where + fmap f (AnnotatedId pid cn x) = AnnotatedId pid cn (f x) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Benchmark/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Benchmark/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Benchmark/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Benchmark/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,27 @@ +module Distribution.Types.Benchmark.Lens ( + Benchmark, + module Distribution.Types.Benchmark.Lens, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Types.Benchmark (Benchmark) +import Distribution.Types.BenchmarkInterface (BenchmarkInterface) +import Distribution.Types.BuildInfo (BuildInfo) +import Distribution.Types.UnqualComponentName (UnqualComponentName) + +import qualified Distribution.Types.Benchmark as T + +benchmarkName :: Lens' Benchmark UnqualComponentName +benchmarkName f s = fmap (\x -> s { T.benchmarkName = x }) (f (T.benchmarkName s)) +{-# INLINE benchmarkName #-} + +benchmarkInterface :: Lens' Benchmark BenchmarkInterface +benchmarkInterface f s = fmap (\x -> s { T.benchmarkInterface = x }) (f (T.benchmarkInterface s)) +{-# INLINE benchmarkInterface #-} + +benchmarkBuildInfo :: Lens' Benchmark BuildInfo +benchmarkBuildInfo f s = fmap (\x -> s { T.benchmarkBuildInfo = x }) (f (T.benchmarkBuildInfo s)) +{-# INLINE benchmarkBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Benchmark.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Benchmark.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Benchmark.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Benchmark.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,77 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.Benchmark ( + Benchmark(..), + emptyBenchmark, + benchmarkType, + benchmarkModules, + benchmarkModulesAutogen +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.BuildInfo +import Distribution.Types.BenchmarkType +import Distribution.Types.BenchmarkInterface +import Distribution.Types.UnqualComponentName + +import Distribution.ModuleName + +import qualified Distribution.Types.BuildInfo.Lens as L + +-- | A \"benchmark\" stanza in a cabal file. +-- +data Benchmark = Benchmark { + benchmarkName :: UnqualComponentName, + benchmarkInterface :: BenchmarkInterface, + benchmarkBuildInfo :: BuildInfo + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary Benchmark + +instance NFData Benchmark where rnf = genericRnf + +instance L.HasBuildInfo Benchmark where + buildInfo f (Benchmark x1 x2 x3) = fmap (\y1 -> Benchmark x1 x2 y1) (f x3) + +instance Monoid Benchmark where + mempty = Benchmark { + benchmarkName = mempty, + benchmarkInterface = mempty, + benchmarkBuildInfo = mempty + } + mappend = (<>) + +instance Semigroup Benchmark where + a <> b = Benchmark { + benchmarkName = combine' benchmarkName, + benchmarkInterface = combine benchmarkInterface, + benchmarkBuildInfo = combine benchmarkBuildInfo + } + where combine field = field a `mappend` field b + combine' field = case ( unUnqualComponentName $ field a + , unUnqualComponentName $ field b) of + ("", _) -> field b + (_, "") -> field a + (x, y) -> error $ "Ambiguous values for test field: '" + ++ x ++ "' and '" ++ y ++ "'" + +emptyBenchmark :: Benchmark +emptyBenchmark = mempty + +benchmarkType :: Benchmark -> BenchmarkType +benchmarkType benchmark = case benchmarkInterface benchmark of + BenchmarkExeV10 ver _ -> BenchmarkTypeExe ver + BenchmarkUnsupported benchmarktype -> benchmarktype + +-- | Get all the module names from a benchmark. +benchmarkModules :: Benchmark -> [ModuleName] +benchmarkModules benchmark = otherModules (benchmarkBuildInfo benchmark) + +-- | Get all the auto generated module names from a benchmark. +-- This are a subset of 'benchmarkModules'. +benchmarkModulesAutogen :: Benchmark -> [ModuleName] +benchmarkModulesAutogen benchmark = autogenModules (benchmarkBuildInfo benchmark) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BenchmarkInterface.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BenchmarkInterface.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BenchmarkInterface.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BenchmarkInterface.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,46 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.BenchmarkInterface ( + BenchmarkInterface(..), +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.BenchmarkType +import Distribution.Version + +-- | The benchmark interfaces that are currently defined. Each +-- benchmark must specify which interface it supports. +-- +-- More interfaces may be defined in future, either new revisions or +-- totally new interfaces. +-- +data BenchmarkInterface = + + -- | Benchmark interface \"exitcode-stdio-1.0\". The benchmark + -- takes the form of an executable. It returns a zero exit code + -- for success, non-zero for failure. The stdout and stderr + -- channels may be logged. It takes no command line parameters + -- and nothing on stdin. + -- + BenchmarkExeV10 Version FilePath + + -- | A benchmark that does not conform to one of the above + -- interfaces for the given reason (e.g. unknown benchmark type). + -- + | BenchmarkUnsupported BenchmarkType + deriving (Eq, Generic, Read, Show, Typeable, Data) + +instance Binary BenchmarkInterface + +instance NFData BenchmarkInterface where rnf = genericRnf + +instance Monoid BenchmarkInterface where + mempty = BenchmarkUnsupported (BenchmarkTypeUnknown mempty nullVersion) + mappend = (<>) + +instance Semigroup BenchmarkInterface where + a <> (BenchmarkUnsupported _) = a + _ <> b = b diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BenchmarkType.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BenchmarkType.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BenchmarkType.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BenchmarkType.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,47 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.BenchmarkType ( + BenchmarkType(..), + knownBenchmarkTypes, +) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text +import Distribution.Version +import Text.PrettyPrint (char, text) + +-- | The \"benchmark-type\" field in the benchmark stanza. +-- +data BenchmarkType = BenchmarkTypeExe Version + -- ^ \"type: exitcode-stdio-x.y\" + | BenchmarkTypeUnknown String Version + -- ^ Some unknown benchmark type e.g. \"type: foo\" + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary BenchmarkType + +instance NFData BenchmarkType where rnf = genericRnf + +knownBenchmarkTypes :: [BenchmarkType] +knownBenchmarkTypes = [ BenchmarkTypeExe (mkVersion [1,0]) ] + +instance Pretty BenchmarkType where + pretty (BenchmarkTypeExe ver) = text "exitcode-stdio-" <<>> pretty ver + pretty (BenchmarkTypeUnknown name ver) = text name <<>> char '-' <<>> pretty ver + +instance Parsec BenchmarkType where + parsec = parsecStandard $ \ver name -> case name of + "exitcode-stdio" -> BenchmarkTypeExe ver + _ -> BenchmarkTypeUnknown name ver + +instance Text BenchmarkType where + parse = stdParse $ \ver name -> case name of + "exitcode-stdio" -> BenchmarkTypeExe ver + _ -> BenchmarkTypeUnknown name ver + + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BuildInfo/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BuildInfo/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BuildInfo/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BuildInfo/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,320 @@ +module Distribution.Types.BuildInfo.Lens ( + BuildInfo, + HasBuildInfo (..), + HasBuildInfos (..), + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Compat.Lens + +import Distribution.Compiler (CompilerFlavor) +import Distribution.ModuleName (ModuleName) +import Distribution.Types.BuildInfo (BuildInfo) +import Distribution.Types.Dependency (Dependency) +import Distribution.Types.ExeDependency (ExeDependency) +import Distribution.Types.LegacyExeDependency (LegacyExeDependency) +import Distribution.Types.Mixin (Mixin) +import Distribution.Types.PkgconfigDependency (PkgconfigDependency) +import Language.Haskell.Extension (Extension, Language) + +import qualified Distribution.Types.BuildInfo as T + +-- | Classy lenses for 'BuildInfo'. +class HasBuildInfo a where + buildInfo :: Lens' a BuildInfo + + buildable :: Lens' a Bool + buildable = buildInfo . buildable + {-# INLINE buildable #-} + + buildTools :: Lens' a [LegacyExeDependency] + buildTools = buildInfo . buildTools + {-# INLINE buildTools #-} + + buildToolDepends :: Lens' a [ExeDependency] + buildToolDepends = buildInfo . buildToolDepends + {-# INLINE buildToolDepends #-} + + cppOptions :: Lens' a [String] + cppOptions = buildInfo . cppOptions + {-# INLINE cppOptions #-} + + asmOptions :: Lens' a [String] + asmOptions = buildInfo . asmOptions + {-# INLINE asmOptions #-} + + cmmOptions :: Lens' a [String] + cmmOptions = buildInfo . cmmOptions + {-# INLINE cmmOptions #-} + + ccOptions :: Lens' a [String] + ccOptions = buildInfo . ccOptions + {-# INLINE ccOptions #-} + + cxxOptions :: Lens' a [String] + cxxOptions = buildInfo . cxxOptions + {-# INLINE cxxOptions #-} + + ldOptions :: Lens' a [String] + ldOptions = buildInfo . ldOptions + {-# INLINE ldOptions #-} + + pkgconfigDepends :: Lens' a [PkgconfigDependency] + pkgconfigDepends = buildInfo . pkgconfigDepends + {-# INLINE pkgconfigDepends #-} + + frameworks :: Lens' a [String] + frameworks = buildInfo . frameworks + {-# INLINE frameworks #-} + + extraFrameworkDirs :: Lens' a [String] + extraFrameworkDirs = buildInfo . extraFrameworkDirs + {-# INLINE extraFrameworkDirs #-} + + asmSources :: Lens' a [FilePath] + asmSources = buildInfo . asmSources + {-# INLINE asmSources #-} + + cmmSources :: Lens' a [FilePath] + cmmSources = buildInfo . cmmSources + {-# INLINE cmmSources #-} + + cSources :: Lens' a [FilePath] + cSources = buildInfo . cSources + {-# INLINE cSources #-} + + cxxSources :: Lens' a [FilePath] + cxxSources = buildInfo . cxxSources + {-# INLINE cxxSources #-} + + jsSources :: Lens' a [FilePath] + jsSources = buildInfo . jsSources + {-# INLINE jsSources #-} + + hsSourceDirs :: Lens' a [FilePath] + hsSourceDirs = buildInfo . hsSourceDirs + {-# INLINE hsSourceDirs #-} + + otherModules :: Lens' a [ModuleName] + otherModules = buildInfo . otherModules + {-# INLINE otherModules #-} + + virtualModules :: Lens' a [ModuleName] + virtualModules = buildInfo . virtualModules + {-# INLINE virtualModules #-} + + autogenModules :: Lens' a [ModuleName] + autogenModules = buildInfo . autogenModules + {-# INLINE autogenModules #-} + + defaultLanguage :: Lens' a (Maybe Language) + defaultLanguage = buildInfo . defaultLanguage + {-# INLINE defaultLanguage #-} + + otherLanguages :: Lens' a [Language] + otherLanguages = buildInfo . otherLanguages + {-# INLINE otherLanguages #-} + + defaultExtensions :: Lens' a [Extension] + defaultExtensions = buildInfo . defaultExtensions + {-# INLINE defaultExtensions #-} + + otherExtensions :: Lens' a [Extension] + otherExtensions = buildInfo . otherExtensions + {-# INLINE otherExtensions #-} + + oldExtensions :: Lens' a [Extension] + oldExtensions = buildInfo . oldExtensions + {-# INLINE oldExtensions #-} + + extraLibs :: Lens' a [String] + extraLibs = buildInfo . extraLibs + {-# INLINE extraLibs #-} + + extraGHCiLibs :: Lens' a [String] + extraGHCiLibs = buildInfo . extraGHCiLibs + {-# INLINE extraGHCiLibs #-} + + extraBundledLibs :: Lens' a [String] + extraBundledLibs = buildInfo . extraBundledLibs + {-# INLINE extraBundledLibs #-} + + extraLibFlavours :: Lens' a [String] + extraLibFlavours = buildInfo . extraLibFlavours + {-# INLINE extraLibFlavours #-} + + extraLibDirs :: Lens' a [String] + extraLibDirs = buildInfo . extraLibDirs + {-# INLINE extraLibDirs #-} + + includeDirs :: Lens' a [FilePath] + includeDirs = buildInfo . includeDirs + {-# INLINE includeDirs #-} + + includes :: Lens' a [FilePath] + includes = buildInfo . includes + {-# INLINE includes #-} + + installIncludes :: Lens' a [FilePath] + installIncludes = buildInfo . installIncludes + {-# INLINE installIncludes #-} + + options :: Lens' a [(CompilerFlavor,[String])] + options = buildInfo . options + {-# INLINE options #-} + + profOptions :: Lens' a [(CompilerFlavor,[String])] + profOptions = buildInfo . profOptions + {-# INLINE profOptions #-} + + sharedOptions :: Lens' a [(CompilerFlavor,[String])] + sharedOptions = buildInfo . sharedOptions + {-# INLINE sharedOptions #-} + + staticOptions :: Lens' a [(CompilerFlavor,[String])] + staticOptions = buildInfo . staticOptions + {-# INLINE staticOptions #-} + + customFieldsBI :: Lens' a [(String,String)] + customFieldsBI = buildInfo . customFieldsBI + {-# INLINE customFieldsBI #-} + + targetBuildDepends :: Lens' a [Dependency] + targetBuildDepends = buildInfo . targetBuildDepends + {-# INLINE targetBuildDepends #-} + + mixins :: Lens' a [Mixin] + mixins = buildInfo . mixins + {-# INLINE mixins #-} + + +instance HasBuildInfo BuildInfo where + buildInfo = id + {-# INLINE buildInfo #-} + + buildable f s = fmap (\x -> s { T.buildable = x }) (f (T.buildable s)) + {-# INLINE buildable #-} + + buildTools f s = fmap (\x -> s { T.buildTools = x }) (f (T.buildTools s)) + {-# INLINE buildTools #-} + + buildToolDepends f s = fmap (\x -> s { T.buildToolDepends = x }) (f (T.buildToolDepends s)) + {-# INLINE buildToolDepends #-} + + cppOptions f s = fmap (\x -> s { T.cppOptions = x }) (f (T.cppOptions s)) + {-# INLINE cppOptions #-} + + asmOptions f s = fmap (\x -> s { T.asmOptions = x }) (f (T.asmOptions s)) + {-# INLINE asmOptions #-} + + cmmOptions f s = fmap (\x -> s { T.cmmOptions = x }) (f (T.cmmOptions s)) + {-# INLINE cmmOptions #-} + + ccOptions f s = fmap (\x -> s { T.ccOptions = x }) (f (T.ccOptions s)) + {-# INLINE ccOptions #-} + + cxxOptions f s = fmap (\x -> s { T.cxxOptions = x }) (f (T.cxxOptions s)) + {-# INLINE cxxOptions #-} + + ldOptions f s = fmap (\x -> s { T.ldOptions = x }) (f (T.ldOptions s)) + {-# INLINE ldOptions #-} + + pkgconfigDepends f s = fmap (\x -> s { T.pkgconfigDepends = x }) (f (T.pkgconfigDepends s)) + {-# INLINE pkgconfigDepends #-} + + frameworks f s = fmap (\x -> s { T.frameworks = x }) (f (T.frameworks s)) + {-# INLINE frameworks #-} + + extraFrameworkDirs f s = fmap (\x -> s { T.extraFrameworkDirs = x }) (f (T.extraFrameworkDirs s)) + {-# INLINE extraFrameworkDirs #-} + + asmSources f s = fmap (\x -> s { T.asmSources = x }) (f (T.asmSources s)) + {-# INLINE asmSources #-} + + cmmSources f s = fmap (\x -> s { T.cmmSources = x }) (f (T.cmmSources s)) + {-# INLINE cmmSources #-} + + cSources f s = fmap (\x -> s { T.cSources = x }) (f (T.cSources s)) + {-# INLINE cSources #-} + + cxxSources f s = fmap (\x -> s { T.cSources = x }) (f (T.cxxSources s)) + {-# INLINE cxxSources #-} + + jsSources f s = fmap (\x -> s { T.jsSources = x }) (f (T.jsSources s)) + {-# INLINE jsSources #-} + + hsSourceDirs f s = fmap (\x -> s { T.hsSourceDirs = x }) (f (T.hsSourceDirs s)) + {-# INLINE hsSourceDirs #-} + + otherModules f s = fmap (\x -> s { T.otherModules = x }) (f (T.otherModules s)) + {-# INLINE otherModules #-} + + virtualModules f s = fmap (\x -> s { T.virtualModules = x }) (f (T.virtualModules s)) + {-# INLINE virtualModules #-} + + autogenModules f s = fmap (\x -> s { T.autogenModules = x }) (f (T.autogenModules s)) + {-# INLINE autogenModules #-} + + defaultLanguage f s = fmap (\x -> s { T.defaultLanguage = x }) (f (T.defaultLanguage s)) + {-# INLINE defaultLanguage #-} + + otherLanguages f s = fmap (\x -> s { T.otherLanguages = x }) (f (T.otherLanguages s)) + {-# INLINE otherLanguages #-} + + defaultExtensions f s = fmap (\x -> s { T.defaultExtensions = x }) (f (T.defaultExtensions s)) + {-# INLINE defaultExtensions #-} + + otherExtensions f s = fmap (\x -> s { T.otherExtensions = x }) (f (T.otherExtensions s)) + {-# INLINE otherExtensions #-} + + oldExtensions f s = fmap (\x -> s { T.oldExtensions = x }) (f (T.oldExtensions s)) + {-# INLINE oldExtensions #-} + + extraLibs f s = fmap (\x -> s { T.extraLibs = x }) (f (T.extraLibs s)) + {-# INLINE extraLibs #-} + + extraGHCiLibs f s = fmap (\x -> s { T.extraGHCiLibs = x }) (f (T.extraGHCiLibs s)) + {-# INLINE extraGHCiLibs #-} + + extraBundledLibs f s = fmap (\x -> s { T.extraBundledLibs = x }) (f (T.extraBundledLibs s)) + {-# INLINE extraBundledLibs #-} + + extraLibFlavours f s = fmap (\x -> s { T.extraLibFlavours = x }) (f (T.extraLibFlavours s)) + {-# INLINE extraLibFlavours #-} + + extraLibDirs f s = fmap (\x -> s { T.extraLibDirs = x }) (f (T.extraLibDirs s)) + {-# INLINE extraLibDirs #-} + + includeDirs f s = fmap (\x -> s { T.includeDirs = x }) (f (T.includeDirs s)) + {-# INLINE includeDirs #-} + + includes f s = fmap (\x -> s { T.includes = x }) (f (T.includes s)) + {-# INLINE includes #-} + + installIncludes f s = fmap (\x -> s { T.installIncludes = x }) (f (T.installIncludes s)) + {-# INLINE installIncludes #-} + + options f s = fmap (\x -> s { T.options = x }) (f (T.options s)) + {-# INLINE options #-} + + profOptions f s = fmap (\x -> s { T.profOptions = x }) (f (T.profOptions s)) + {-# INLINE profOptions #-} + + sharedOptions f s = fmap (\x -> s { T.sharedOptions = x }) (f (T.sharedOptions s)) + {-# INLINE sharedOptions #-} + + staticOptions f s = fmap (\x -> s { T.staticOptions = x }) (f (T.staticOptions s)) + {-# INLINE staticOptions #-} + + customFieldsBI f s = fmap (\x -> s { T.customFieldsBI = x }) (f (T.customFieldsBI s)) + {-# INLINE customFieldsBI #-} + + targetBuildDepends f s = fmap (\x -> s { T.targetBuildDepends = x }) (f (T.targetBuildDepends s)) + {-# INLINE targetBuildDepends #-} + + mixins f s = fmap (\x -> s { T.mixins = x }) (f (T.mixins s)) + {-# INLINE mixins #-} + +class HasBuildInfos a where + traverseBuildInfos :: Traversal' a BuildInfo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BuildInfo.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,252 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.BuildInfo ( + BuildInfo(..), + + emptyBuildInfo, + allLanguages, + allExtensions, + usedExtensions, + usesTemplateHaskellOrQQ, + + hcOptions, + hcProfOptions, + hcSharedOptions, + hcStaticOptions, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.Mixin +import Distribution.Types.Dependency +import Distribution.Types.ExeDependency +import Distribution.Types.LegacyExeDependency +import Distribution.Types.PkgconfigDependency + +import Distribution.ModuleName +import Distribution.Compiler +import Language.Haskell.Extension + +-- Consider refactoring into executable and library versions. +data BuildInfo = BuildInfo { + -- | component is buildable here + buildable :: Bool, + -- | Tools needed to build this bit. + -- + -- This is a legacy field that 'buildToolDepends' largely supersedes. + -- + -- Unless use are very sure what you are doing, use the functions in + -- "Distribution.Simple.BuildToolDepends" rather than accessing this + -- field directly. + buildTools :: [LegacyExeDependency], + -- | Haskell tools needed to build this bit + -- + -- This field is better than 'buildTools' because it allows one to + -- precisely specify an executable in a package. + -- + -- Unless use are very sure what you are doing, use the functions in + -- "Distribution.Simple.BuildToolDepends" rather than accessing this + -- field directly. + buildToolDepends :: [ExeDependency], + cppOptions :: [String], -- ^ options for pre-processing Haskell code + asmOptions :: [String], -- ^ options for assmebler + cmmOptions :: [String], -- ^ options for C-- compiler + ccOptions :: [String], -- ^ options for C compiler + cxxOptions :: [String], -- ^ options for C++ compiler + ldOptions :: [String], -- ^ options for linker + pkgconfigDepends :: [PkgconfigDependency], -- ^ pkg-config packages that are used + frameworks :: [String], -- ^support frameworks for Mac OS X + extraFrameworkDirs:: [String], -- ^ extra locations to find frameworks. + asmSources :: [FilePath], -- ^ Assembly files. + cmmSources :: [FilePath], -- ^ C-- files. + cSources :: [FilePath], + cxxSources :: [FilePath], + jsSources :: [FilePath], + hsSourceDirs :: [FilePath], -- ^ where to look for the Haskell module hierarchy + otherModules :: [ModuleName], -- ^ non-exposed or non-main modules + virtualModules :: [ModuleName], -- ^ exposed modules that do not have a source file (e.g. @GHC.Prim@ from @ghc-prim@ package) + autogenModules :: [ModuleName], -- ^ not present on sdist, Paths_* or user-generated with a custom Setup.hs + + defaultLanguage :: Maybe Language,-- ^ language used when not explicitly specified + otherLanguages :: [Language], -- ^ other languages used within the package + defaultExtensions :: [Extension], -- ^ language extensions used by all modules + otherExtensions :: [Extension], -- ^ other language extensions used within the package + oldExtensions :: [Extension], -- ^ the old extensions field, treated same as 'defaultExtensions' + + extraLibs :: [String], -- ^ what libraries to link with when compiling a program that uses your package + extraGHCiLibs :: [String], -- ^ if present, overrides extraLibs when package is loaded with GHCi. + extraBundledLibs :: [String], -- ^ if present, adds libs to hs-lirbaries, which become part of the package. + -- Example: the Cffi library shipping with the rts, alognside the HSrts-1.0.a,.o,... + -- Example 2: a library that is being built by a foreing tool (e.g. rust) + -- and copied and registered together with this library. The + -- logic on how this library is built will have to be encoded in a + -- custom Setup for now. Oherwise cabal would need to lear how to + -- call arbitary lirbary builders. + extraLibFlavours :: [String], -- ^ Hidden Flag. This set of strings, will be appended to all lirbaries when + -- copying. E.g. [libHS_ | flavour <- extraLibFlavours]. This + -- should only be needed in very specific cases, e.g. the `rts` package, where + -- there are multiple copies of slightly differently built libs. + extraLibDirs :: [String], + includeDirs :: [FilePath], -- ^directories to find .h files + includes :: [FilePath], -- ^ The .h files to be found in includeDirs + installIncludes :: [FilePath], -- ^ .h files to install with the package + options :: [(CompilerFlavor,[String])], + profOptions :: [(CompilerFlavor,[String])], + sharedOptions :: [(CompilerFlavor,[String])], + staticOptions :: [(CompilerFlavor,[String])], + customFieldsBI :: [(String,String)], -- ^Custom fields starting + -- with x-, stored in a + -- simple assoc-list. + targetBuildDepends :: [Dependency], -- ^ Dependencies specific to a library or executable target + mixins :: [Mixin] + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary BuildInfo + +instance NFData BuildInfo where rnf = genericRnf + +instance Monoid BuildInfo where + mempty = BuildInfo { + buildable = True, + buildTools = [], + buildToolDepends = [], + cppOptions = [], + asmOptions = [], + cmmOptions = [], + ccOptions = [], + cxxOptions = [], + ldOptions = [], + pkgconfigDepends = [], + frameworks = [], + extraFrameworkDirs = [], + asmSources = [], + cmmSources = [], + cSources = [], + cxxSources = [], + jsSources = [], + hsSourceDirs = [], + otherModules = [], + virtualModules = [], + autogenModules = [], + defaultLanguage = Nothing, + otherLanguages = [], + defaultExtensions = [], + otherExtensions = [], + oldExtensions = [], + extraLibs = [], + extraGHCiLibs = [], + extraBundledLibs = [], + extraLibFlavours = [], + extraLibDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + options = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + customFieldsBI = [], + targetBuildDepends = [], + mixins = [] + } + mappend = (<>) + +instance Semigroup BuildInfo where + a <> b = BuildInfo { + buildable = buildable a && buildable b, + buildTools = combine buildTools, + buildToolDepends = combine buildToolDepends, + cppOptions = combine cppOptions, + asmOptions = combine asmOptions, + cmmOptions = combine cmmOptions, + ccOptions = combine ccOptions, + cxxOptions = combine cxxOptions, + ldOptions = combine ldOptions, + pkgconfigDepends = combine pkgconfigDepends, + frameworks = combineNub frameworks, + extraFrameworkDirs = combineNub extraFrameworkDirs, + asmSources = combineNub asmSources, + cmmSources = combineNub cmmSources, + cSources = combineNub cSources, + cxxSources = combineNub cxxSources, + jsSources = combineNub jsSources, + hsSourceDirs = combineNub hsSourceDirs, + otherModules = combineNub otherModules, + virtualModules = combineNub virtualModules, + autogenModules = combineNub autogenModules, + defaultLanguage = combineMby defaultLanguage, + otherLanguages = combineNub otherLanguages, + defaultExtensions = combineNub defaultExtensions, + otherExtensions = combineNub otherExtensions, + oldExtensions = combineNub oldExtensions, + extraLibs = combine extraLibs, + extraGHCiLibs = combine extraGHCiLibs, + extraBundledLibs = combine extraBundledLibs, + extraLibFlavours = combine extraLibFlavours, + extraLibDirs = combineNub extraLibDirs, + includeDirs = combineNub includeDirs, + includes = combineNub includes, + installIncludes = combineNub installIncludes, + options = combine options, + profOptions = combine profOptions, + sharedOptions = combine sharedOptions, + staticOptions = combine staticOptions, + customFieldsBI = combine customFieldsBI, + targetBuildDepends = combineNub targetBuildDepends, + mixins = combine mixins + } + where + combine field = field a `mappend` field b + combineNub field = nub (combine field) + combineMby field = field b `mplus` field a + +emptyBuildInfo :: BuildInfo +emptyBuildInfo = mempty + +-- | The 'Language's used by this component +-- +allLanguages :: BuildInfo -> [Language] +allLanguages bi = maybeToList (defaultLanguage bi) + ++ otherLanguages bi + +-- | The 'Extension's that are used somewhere by this component +-- +allExtensions :: BuildInfo -> [Extension] +allExtensions bi = usedExtensions bi + ++ otherExtensions bi + +-- | The 'Extensions' that are used by all modules in this component +-- +usedExtensions :: BuildInfo -> [Extension] +usedExtensions bi = oldExtensions bi + ++ defaultExtensions bi + +-- | Whether any modules in this component use Template Haskell or +-- Quasi Quotes +usesTemplateHaskellOrQQ :: BuildInfo -> Bool +usesTemplateHaskellOrQQ bi = any p (allExtensions bi) + where + p ex = ex `elem` + [EnableExtension TemplateHaskell, EnableExtension QuasiQuotes] + +-- |Select options for a particular Haskell compiler. +hcOptions :: CompilerFlavor -> BuildInfo -> [String] +hcOptions = lookupHcOptions options + +hcProfOptions :: CompilerFlavor -> BuildInfo -> [String] +hcProfOptions = lookupHcOptions profOptions + +hcSharedOptions :: CompilerFlavor -> BuildInfo -> [String] +hcSharedOptions = lookupHcOptions sharedOptions + +hcStaticOptions :: CompilerFlavor -> BuildInfo -> [String] +hcStaticOptions = lookupHcOptions staticOptions + +lookupHcOptions :: (BuildInfo -> [(CompilerFlavor,[String])]) + -> CompilerFlavor -> BuildInfo -> [String] +lookupHcOptions f hc bi = [ opt | (hc',opts) <- f bi + , hc' == hc + , opt <- opts ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BuildType.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BuildType.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/BuildType.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/BuildType.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,67 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.BuildType ( + BuildType(..), + knownBuildTypes, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.CabalSpecVersion (CabalSpecVersion (..)) +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +-- | The type of build system used by this package. +data BuildType + = Simple -- ^ calls @Distribution.Simple.defaultMain@ + | Configure -- ^ calls @Distribution.Simple.defaultMainWithHooks defaultUserHooks@, + -- which invokes @configure@ to generate additional build + -- information used by later phases. + | Make -- ^ calls @Distribution.Make.defaultMain@ + | Custom -- ^ uses user-supplied @Setup.hs@ or @Setup.lhs@ (default) + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary BuildType + +instance NFData BuildType where rnf = genericRnf + +knownBuildTypes :: [BuildType] +knownBuildTypes = [Simple, Configure, Make, Custom] + +instance Pretty BuildType where + pretty = Disp.text . show + +instance Parsec BuildType where + parsec = do + name <- P.munch1 isAlphaNum + case name of + "Simple" -> return Simple + "Configure" -> return Configure + "Custom" -> return Custom + "Make" -> return Make + "Default" -> do + v <- askCabalSpecVersion + if v <= CabalSpecOld + then do + parsecWarning PWTBuildTypeDefault "build-type: Default is parsed as Custom for legacy reasons. See https://github.com/haskell/cabal/issues/5020" + return Custom + else fail ("unknown build-type: '" ++ name ++ "'") + _ -> fail ("unknown build-type: '" ++ name ++ "'") + +instance Text BuildType where + parse = do + name <- Parse.munch1 isAlphaNum + case name of + "Simple" -> return Simple + "Configure" -> return Configure + "Custom" -> return Custom + "Make" -> return Make + "Default" -> return Custom + _ -> fail ("unknown build-type: '" ++ name ++ "'") diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Component.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Component.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Component.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Component.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,91 @@ +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.Component ( + Component(..), + foldComponent, + componentBuildInfo, + componentBuildable, + componentName, + partitionComponents, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.Library +import Distribution.Types.ForeignLib +import Distribution.Types.Executable +import Distribution.Types.TestSuite +import Distribution.Types.Benchmark + +import Distribution.Types.ComponentName +import Distribution.Types.BuildInfo + +import qualified Distribution.Types.BuildInfo.Lens as L + +data Component = CLib Library + | CFLib ForeignLib + | CExe Executable + | CTest TestSuite + | CBench Benchmark + deriving (Show, Eq, Read) + +instance Semigroup Component where + CLib l <> CLib l' = CLib (l <> l') + CFLib l <> CFLib l' = CFLib (l <> l') + CExe e <> CExe e' = CExe (e <> e') + CTest t <> CTest t' = CTest (t <> t') + CBench b <> CBench b' = CBench (b <> b') + _ <> _ = error "Cannot merge Component" + +instance L.HasBuildInfo Component where + buildInfo f (CLib l) = CLib <$> L.buildInfo f l + buildInfo f (CFLib l) = CFLib <$> L.buildInfo f l + buildInfo f (CExe e) = CExe <$> L.buildInfo f e + buildInfo f (CTest t) = CTest <$> L.buildInfo f t + buildInfo f (CBench b) = CBench <$> L.buildInfo f b + +foldComponent :: (Library -> a) + -> (ForeignLib -> a) + -> (Executable -> a) + -> (TestSuite -> a) + -> (Benchmark -> a) + -> Component + -> a +foldComponent f _ _ _ _ (CLib lib) = f lib +foldComponent _ f _ _ _ (CFLib flib)= f flib +foldComponent _ _ f _ _ (CExe exe) = f exe +foldComponent _ _ _ f _ (CTest tst) = f tst +foldComponent _ _ _ _ f (CBench bch) = f bch + +componentBuildInfo :: Component -> BuildInfo +componentBuildInfo = + foldComponent libBuildInfo foreignLibBuildInfo buildInfo testBuildInfo benchmarkBuildInfo + +-- | Is a component buildable (i.e., not marked with @buildable: False@)? +-- See also this note in +-- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components". +-- +-- @since 2.0.0.2 +-- +componentBuildable :: Component -> Bool +componentBuildable = buildable . componentBuildInfo + +componentName :: Component -> ComponentName +componentName = + foldComponent (libraryComponentName . libName) + (CFLibName . foreignLibName) + (CExeName . exeName) + (CTestName . testName) + (CBenchName . benchmarkName) + +partitionComponents + :: [Component] + -> ([Library], [ForeignLib], [Executable], [TestSuite], [Benchmark]) +partitionComponents = foldr (foldComponent fa fb fc fd fe) ([],[],[],[],[]) + where + fa x ~(a,b,c,d,e) = (x:a,b,c,d,e) + fb x ~(a,b,c,d,e) = (a,x:b,c,d,e) + fc x ~(a,b,c,d,e) = (a,b,x:c,d,e) + fd x ~(a,b,c,d,e) = (a,b,c,x:d,e) + fe x ~(a,b,c,d,e) = (a,b,c,d,x:e) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentId.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,74 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +module Distribution.Types.ComponentId + ( ComponentId, unComponentId, mkComponentId + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Utils.ShortText + +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import Distribution.Text +import Distribution.Pretty +import Distribution.Parsec.Class + +import Text.PrettyPrint (text) + +-- | A 'ComponentId' uniquely identifies the transitive source +-- code closure of a component (i.e. libraries, executables). +-- +-- For non-Backpack components, this corresponds one to one with +-- the 'UnitId', which serves as the basis for install paths, +-- linker symbols, etc. +-- +-- Use 'mkComponentId' and 'unComponentId' to convert from/to a +-- 'String'. +-- +-- This type is opaque since @Cabal-2.0@ +-- +-- @since 2.0.0.2 +newtype ComponentId = ComponentId ShortText + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +-- | Construct a 'ComponentId' from a 'String' +-- +-- 'mkComponentId' is the inverse to 'unComponentId' +-- +-- Note: No validations are performed to ensure that the resulting +-- 'ComponentId' is valid +-- +-- @since 2.0.0.2 +mkComponentId :: String -> ComponentId +mkComponentId = ComponentId . toShortText + +-- | Convert 'ComponentId' to 'String' +-- +-- @since 2.0.0.2 +unComponentId :: ComponentId -> String +unComponentId (ComponentId s) = fromShortText s + +-- | 'mkComponentId' +-- +-- @since 2.0.0.2 +instance IsString ComponentId where + fromString = mkComponentId + +instance Binary ComponentId + +instance Pretty ComponentId where + pretty = text . unComponentId + +instance Parsec ComponentId where + parsec = mkComponentId `fmap` P.munch1 abi_char + where abi_char c = isAlphaNum c || c `elem` "-_." + +instance Text ComponentId where + parse = mkComponentId `fmap` Parse.munch1 abi_char + where abi_char c = isAlphaNum c || c `elem` "-_." + +instance NFData ComponentId where + rnf = rnf . unComponentId diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentInclude.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentInclude.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentInclude.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentInclude.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,32 @@ +module Distribution.Types.ComponentInclude ( + ComponentInclude(..), + ci_id, + ci_pkgid, + ci_cname +) where + +import Distribution.Types.PackageId +import Distribution.Types.ComponentName +import Distribution.Types.AnnotatedId + +-- Once ci_id is refined to an 'OpenUnitId' or 'DefUnitId', +-- the 'includeRequiresRn' is not so useful (because it +-- includes the requirements renaming that is no longer +-- needed); use 'ci_prov_renaming' instead. +data ComponentInclude id rn = ComponentInclude { + ci_ann_id :: AnnotatedId id, + ci_renaming :: rn, + -- | Did this come from an entry in @mixins@, or + -- was implicitly generated by @build-depends@? + ci_implicit :: Bool + } + +ci_id :: ComponentInclude id rn -> id +ci_id = ann_id . ci_ann_id + +ci_pkgid :: ComponentInclude id rn -> PackageId +ci_pkgid = ann_pid . ci_ann_id + +-- | This should always return 'CLibName' or 'CSubLibName' +ci_cname :: ComponentInclude id rn -> ComponentName +ci_cname = ann_cname . ci_ann_id diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentLocalBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentLocalBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentLocalBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentLocalBuildInfo.hs 2018-11-26 08:42:52.000000000 +0000 @@ -0,0 +1,127 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE TypeFamilies #-} + +module Distribution.Types.ComponentLocalBuildInfo ( + ComponentLocalBuildInfo(..), + componentIsIndefinite, + maybeComponentInstantiatedWith, + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.ModuleName + +import Distribution.Backpack +import Distribution.Compat.Graph +import Distribution.Types.ComponentId +import Distribution.Types.MungedPackageId +import Distribution.Types.UnitId +import Distribution.Types.ComponentName +import Distribution.Types.MungedPackageName + +import Distribution.PackageDescription +import qualified Distribution.InstalledPackageInfo as Installed + +-- | The first five fields are common across all algebraic variants. +data ComponentLocalBuildInfo + = LibComponentLocalBuildInfo { + -- | It would be very convenient to store the literal Library here, + -- but if we do that, it will get serialized (via the Binary) + -- instance twice. So instead we just provide the ComponentName, + -- which can be used to find the Component in the + -- PackageDescription. NB: eventually, this will NOT uniquely + -- identify the ComponentLocalBuildInfo. + componentLocalName :: ComponentName, + -- | The computed 'ComponentId' of this component. + componentComponentId :: ComponentId, + -- | The computed 'UnitId' which uniquely identifies this + -- component. Might be hashed. + componentUnitId :: UnitId, + -- | Is this an indefinite component (i.e. has unfilled holes)? + componentIsIndefinite_ :: Bool, + -- | How the component was instantiated + componentInstantiatedWith :: [(ModuleName, OpenModule)], + -- | Resolved internal and external package dependencies for this component. + -- The 'BuildInfo' specifies a set of build dependencies that must be + -- satisfied in terms of version ranges. This field fixes those dependencies + -- to the specific versions available on this machine for this compiler. + componentPackageDeps :: [(UnitId, MungedPackageId)], + -- | The set of packages that are brought into scope during + -- compilation, including a 'ModuleRenaming' which may used + -- to hide or rename modules. This is what gets translated into + -- @-package-id@ arguments. This is a modernized version of + -- 'componentPackageDeps', which is kept around for BC purposes. + componentIncludes :: [(OpenUnitId, ModuleRenaming)], + componentExeDeps :: [UnitId], + -- | The internal dependencies which induce a graph on the + -- 'ComponentLocalBuildInfo' of this package. This does NOT + -- coincide with 'componentPackageDeps' because it ALSO records + -- 'build-tool' dependencies on executables. Maybe one day + -- @cabal-install@ will also handle these correctly too! + componentInternalDeps :: [UnitId], + -- | Compatibility "package key" that we pass to older versions of GHC. + componentCompatPackageKey :: String, + -- | Compatibility "package name" that we register this component as. + componentCompatPackageName :: MungedPackageName, + -- | A list of exposed modules (either defined in this component, + -- or reexported from another component.) + componentExposedModules :: [Installed.ExposedModule], + -- | Convenience field, specifying whether or not this is the + -- "public library" that has the same name as the package. + componentIsPublic :: Bool + } + -- TODO: refactor all these duplicates + | FLibComponentLocalBuildInfo { + componentLocalName :: ComponentName, + componentComponentId :: ComponentId, + componentUnitId :: UnitId, + componentPackageDeps :: [(UnitId, MungedPackageId)], + componentIncludes :: [(OpenUnitId, ModuleRenaming)], + componentExeDeps :: [UnitId], + componentInternalDeps :: [UnitId] + } + | ExeComponentLocalBuildInfo { + componentLocalName :: ComponentName, + componentComponentId :: ComponentId, + componentUnitId :: UnitId, + componentPackageDeps :: [(UnitId, MungedPackageId)], + componentIncludes :: [(OpenUnitId, ModuleRenaming)], + componentExeDeps :: [UnitId], + componentInternalDeps :: [UnitId] + } + | TestComponentLocalBuildInfo { + componentLocalName :: ComponentName, + componentComponentId :: ComponentId, + componentUnitId :: UnitId, + componentPackageDeps :: [(UnitId, MungedPackageId)], + componentIncludes :: [(OpenUnitId, ModuleRenaming)], + componentExeDeps :: [UnitId], + componentInternalDeps :: [UnitId] + + } + | BenchComponentLocalBuildInfo { + componentLocalName :: ComponentName, + componentComponentId :: ComponentId, + componentUnitId :: UnitId, + componentPackageDeps :: [(UnitId, MungedPackageId)], + componentIncludes :: [(OpenUnitId, ModuleRenaming)], + componentExeDeps :: [UnitId], + componentInternalDeps :: [UnitId] + } + deriving (Generic, Read, Show) + +instance Binary ComponentLocalBuildInfo + +instance IsNode ComponentLocalBuildInfo where + type Key ComponentLocalBuildInfo = UnitId + nodeKey = componentUnitId + nodeNeighbors = componentInternalDeps + +componentIsIndefinite :: ComponentLocalBuildInfo -> Bool +componentIsIndefinite LibComponentLocalBuildInfo{ componentIsIndefinite_ = b } = b +componentIsIndefinite _ = False + +maybeComponentInstantiatedWith :: ComponentLocalBuildInfo -> Maybe [(ModuleName, OpenModule)] +maybeComponentInstantiatedWith + LibComponentLocalBuildInfo { componentInstantiatedWith = insts } = Just insts +maybeComponentInstantiatedWith _ = Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentName.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,91 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.ComponentName ( + ComponentName(..), + defaultLibName, + libraryComponentName, + showComponentName, + componentNameStanza, + componentNameString, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP ((<++)) +import Distribution.Types.UnqualComponentName +import Distribution.Pretty +import Distribution.Text + +import Text.PrettyPrint as Disp + +-- Libraries live in a separate namespace, so must distinguish +data ComponentName = CLibName + | CSubLibName UnqualComponentName + | CFLibName UnqualComponentName + | CExeName UnqualComponentName + | CTestName UnqualComponentName + | CBenchName UnqualComponentName + deriving (Eq, Generic, Ord, Read, Show, Typeable) + +instance Binary ComponentName + +-- Build-target-ish syntax +instance Pretty ComponentName where + pretty CLibName = Disp.text "lib" + pretty (CSubLibName str) = Disp.text "lib:" <<>> pretty str + pretty (CFLibName str) = Disp.text "flib:" <<>> pretty str + pretty (CExeName str) = Disp.text "exe:" <<>> pretty str + pretty (CTestName str) = Disp.text "test:" <<>> pretty str + pretty (CBenchName str) = Disp.text "bench:" <<>> pretty str + +instance Text ComponentName where + parse = parseComposite <++ parseSingle + where + parseSingle = Parse.string "lib" >> return CLibName + parseComposite = do + ctor <- Parse.choice [ Parse.string "lib:" >> return CSubLibName + , Parse.string "flib:" >> return CFLibName + , Parse.string "exe:" >> return CExeName + , Parse.string "bench:" >> return CBenchName + , Parse.string "test:" >> return CTestName ] + ctor <$> parse + +defaultLibName :: ComponentName +defaultLibName = CLibName + +showComponentName :: ComponentName -> String +showComponentName CLibName = "library" +showComponentName (CSubLibName name) = "library '" ++ display name ++ "'" +showComponentName (CFLibName name) = "foreign library '" ++ display name ++ "'" +showComponentName (CExeName name) = "executable '" ++ display name ++ "'" +showComponentName (CTestName name) = "test suite '" ++ display name ++ "'" +showComponentName (CBenchName name) = "benchmark '" ++ display name ++ "'" + +componentNameStanza :: ComponentName -> String +componentNameStanza CLibName = "library" +componentNameStanza (CSubLibName name) = "library " ++ display name +componentNameStanza (CFLibName name) = "foreign-library " ++ display name +componentNameStanza (CExeName name) = "executable " ++ display name +componentNameStanza (CTestName name) = "test-suite " ++ display name +componentNameStanza (CBenchName name) = "benchmark " ++ display name + +-- | This gets the underlying unqualified component name. In fact, it is +-- guaranteed to uniquely identify a component, returning +-- @Nothing@ if the 'ComponentName' was for the public +-- library. +componentNameString :: ComponentName -> Maybe UnqualComponentName +componentNameString CLibName = Nothing +componentNameString (CSubLibName n) = Just n +componentNameString (CFLibName n) = Just n +componentNameString (CExeName n) = Just n +componentNameString (CTestName n) = Just n +componentNameString (CBenchName n) = Just n + +-- | Convert the 'UnqualComponentName' of a library into a +-- 'ComponentName'. +libraryComponentName :: Maybe UnqualComponentName -> ComponentName +libraryComponentName Nothing = CLibName +libraryComponentName (Just n) = CSubLibName n diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentRequestedSpec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentRequestedSpec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ComponentRequestedSpec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ComponentRequestedSpec.hs 2018-11-26 08:42:52.000000000 +0000 @@ -0,0 +1,123 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.ComponentRequestedSpec ( + -- $buildable_vs_enabled_components + + ComponentRequestedSpec(..), + ComponentDisabledReason(..), + + defaultComponentRequestedSpec, + componentNameRequested, + + componentEnabled, + componentDisabledReason, +) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Text + +import Distribution.Types.Component -- TODO: maybe remove me? +import Distribution.Types.ComponentName + +-- $buildable_vs_enabled_components +-- #buildable_vs_enabled_components# +-- +-- = Note: Buildable versus requested versus enabled components +-- What's the difference between a buildable component (ala +-- 'componentBuildable'), a requested component +-- (ala 'componentNameRequested'), and an enabled component (ala +-- 'componentEnabled')? +-- +-- A component is __buildable__ if, after resolving flags and +-- conditionals, there is no @buildable: False@ property in it. +-- This is a /static/ property that arises from the +-- Cabal file and the package description flattening; once we have +-- a 'PackageDescription' buildability is known. +-- +-- A component is __requested__ if a user specified, via a +-- the flags and arguments passed to configure, that it should be +-- built. E.g., @--enable-tests@ or @--enable-benchmarks@ request +-- all tests and benchmarks, if they are provided. What is requested +-- can be read off directly from 'ComponentRequestedSpec'. A requested +-- component is not always buildable; e.g., a user may @--enable-tests@ +-- but one of the test suites may have @buildable: False@. +-- +-- A component is __enabled__ if it is BOTH buildable +-- and requested. Once we have a 'LocalBuildInfo', whether or not a +-- component is enabled is known. +-- +-- Generally speaking, most Cabal API code cares if a component +-- is enabled. (For example, if you want to run a preprocessor on each +-- component prior to building them, you want to run this on each +-- /enabled/ component.) +-- +-- Note that post-configuration, you will generally not see a +-- non-buildable 'Component'. This is because 'flattenPD' will drop +-- any such components from 'PackageDescription'. See #3858 for +-- an example where this causes problems. + +-- | Describes what components are enabled by user-interaction. +-- See also this note in +-- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components". +-- +-- @since 2.0.0.2 +data ComponentRequestedSpec + = ComponentRequestedSpec { testsRequested :: Bool + , benchmarksRequested :: Bool } + | OneComponentRequestedSpec ComponentName + deriving (Generic, Read, Show, Eq) +instance Binary ComponentRequestedSpec + +-- | The default set of enabled components. Historically tests and +-- benchmarks are NOT enabled by default. +-- +-- @since 2.0.0.2 +defaultComponentRequestedSpec :: ComponentRequestedSpec +defaultComponentRequestedSpec = ComponentRequestedSpec False False + +-- | Is this component enabled? See also this note in +-- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components". +-- +-- @since 2.0.0.2 +componentEnabled :: ComponentRequestedSpec -> Component -> Bool +componentEnabled enabled = isNothing . componentDisabledReason enabled + +-- | Is this component name enabled? See also this note in +-- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components". +-- +-- @since 2.0.0.2 +componentNameRequested :: ComponentRequestedSpec -> ComponentName -> Bool +componentNameRequested enabled = isNothing . componentNameNotRequestedReason enabled + +-- | Is this component disabled, and if so, why? +-- +-- @since 2.0.0.2 +componentDisabledReason :: ComponentRequestedSpec -> Component + -> Maybe ComponentDisabledReason +componentDisabledReason enabled comp + | not (componentBuildable comp) = Just DisabledComponent + | otherwise = componentNameNotRequestedReason enabled (componentName comp) + +-- | Is this component name disabled, and if so, why? +-- +-- @since 2.0.0.2 +componentNameNotRequestedReason :: ComponentRequestedSpec -> ComponentName + -> Maybe ComponentDisabledReason +componentNameNotRequestedReason + ComponentRequestedSpec{ testsRequested = False } (CTestName _) + = Just DisabledAllTests +componentNameNotRequestedReason + ComponentRequestedSpec{ benchmarksRequested = False } (CBenchName _) + = Just DisabledAllBenchmarks +componentNameNotRequestedReason ComponentRequestedSpec{} _ = Nothing +componentNameNotRequestedReason (OneComponentRequestedSpec cname) c + | c == cname = Nothing + | otherwise = Just (DisabledAllButOne (display cname)) + +-- | A reason explaining why a component is disabled. +-- +-- @since 2.0.0.2 +data ComponentDisabledReason = DisabledComponent + | DisabledAllTests + | DisabledAllBenchmarks + | DisabledAllButOne String diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Condition.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Condition.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Condition.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Condition.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,135 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.Condition ( + Condition(..), + cNot, + cAnd, + cOr, + simplifyCondition, +) where + +import Prelude () +import Distribution.Compat.Prelude + +-- | A boolean expression parameterized over the variable type used. +data Condition c = Var c + | Lit Bool + | CNot (Condition c) + | COr (Condition c) (Condition c) + | CAnd (Condition c) (Condition c) + deriving (Show, Eq, Typeable, Data, Generic) + +-- | Boolean negation of a 'Condition' value. +cNot :: Condition a -> Condition a +cNot (Lit b) = Lit (not b) +cNot (CNot c) = c +cNot c = CNot c + +-- | Boolean AND of two 'Condtion' values. +cAnd :: Condition a -> Condition a -> Condition a +cAnd (Lit False) _ = Lit False +cAnd _ (Lit False) = Lit False +cAnd (Lit True) x = x +cAnd x (Lit True) = x +cAnd x y = CAnd x y + +-- | Boolean OR of two 'Condition' values. +cOr :: Eq v => Condition v -> Condition v -> Condition v +cOr (Lit True) _ = Lit True +cOr _ (Lit True) = Lit True +cOr (Lit False) x = x +cOr x (Lit False) = x +cOr c (CNot d) + | c == d = Lit True +cOr (CNot c) d + | c == d = Lit True +cOr x y = COr x y + +instance Functor Condition where + f `fmap` Var c = Var (f c) + _ `fmap` Lit c = Lit c + f `fmap` CNot c = CNot (fmap f c) + f `fmap` COr c d = COr (fmap f c) (fmap f d) + f `fmap` CAnd c d = CAnd (fmap f c) (fmap f d) + +instance Foldable Condition where + f `foldMap` Var c = f c + _ `foldMap` Lit _ = mempty + f `foldMap` CNot c = foldMap f c + f `foldMap` COr c d = foldMap f c `mappend` foldMap f d + f `foldMap` CAnd c d = foldMap f c `mappend` foldMap f d + +instance Traversable Condition where + f `traverse` Var c = Var `fmap` f c + _ `traverse` Lit c = pure $ Lit c + f `traverse` CNot c = CNot `fmap` traverse f c + f `traverse` COr c d = COr `fmap` traverse f c <*> traverse f d + f `traverse` CAnd c d = CAnd `fmap` traverse f c <*> traverse f d + +instance Applicative Condition where + pure = Var + (<*>) = ap + +instance Monad Condition where + return = pure + -- Terminating cases + (>>=) (Lit x) _ = Lit x + (>>=) (Var x) f = f x + -- Recursing cases + (>>=) (CNot x ) f = CNot (x >>= f) + (>>=) (COr x y) f = COr (x >>= f) (y >>= f) + (>>=) (CAnd x y) f = CAnd (x >>= f) (y >>= f) + +instance Monoid (Condition a) where + mempty = Lit False + mappend = (<>) + +instance Semigroup (Condition a) where + (<>) = COr + +instance Alternative Condition where + empty = mempty + (<|>) = mappend + +instance MonadPlus Condition where + mzero = mempty + mplus = mappend + +instance Binary c => Binary (Condition c) + +instance NFData c => NFData (Condition c) where rnf = genericRnf + +-- | Simplify the condition and return its free variables. +simplifyCondition :: Condition c + -> (c -> Either d Bool) -- ^ (partial) variable assignment + -> (Condition d, [d]) +simplifyCondition cond i = fv . walk $ cond + where + walk cnd = case cnd of + Var v -> either Var Lit (i v) + Lit b -> Lit b + CNot c -> case walk c of + Lit True -> Lit False + Lit False -> Lit True + c' -> CNot c' + COr c d -> case (walk c, walk d) of + (Lit False, d') -> d' + (Lit True, _) -> Lit True + (c', Lit False) -> c' + (_, Lit True) -> Lit True + (c',d') -> COr c' d' + CAnd c d -> case (walk c, walk d) of + (Lit False, _) -> Lit False + (Lit True, d') -> d' + (_, Lit False) -> Lit False + (c', Lit True) -> c' + (c',d') -> CAnd c' d' + -- gather free vars + fv c = (c, fv' c) + fv' c = case c of + Var v -> [v] + Lit _ -> [] + CNot c' -> fv' c' + COr c1 c2 -> fv' c1 ++ fv' c2 + CAnd c1 c2 -> fv' c1 ++ fv' c2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/CondTree.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/CondTree.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/CondTree.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/CondTree.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,179 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveTraversable #-} + +module Distribution.Types.CondTree ( + CondTree(..), + CondBranch(..), + condIfThen, + condIfThenElse, + mapCondTree, + mapTreeConstrs, + mapTreeConds, + mapTreeData, + traverseCondTreeV, + traverseCondBranchV, + traverseCondTreeC, + traverseCondBranchC, + extractCondition, + simplifyCondTree, + ignoreConditions, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.Condition + +import qualified Distribution.Compat.Lens as L + + +-- | A 'CondTree' is used to represent the conditional structure of +-- a Cabal file, reflecting a syntax element subject to constraints, +-- and then any number of sub-elements which may be enabled subject +-- to some condition. Both @a@ and @c@ are usually 'Monoid's. +-- +-- To be more concrete, consider the following fragment of a @Cabal@ +-- file: +-- +-- @ +-- build-depends: base >= 4.0 +-- if flag(extra) +-- build-depends: base >= 4.2 +-- @ +-- +-- One way to represent this is to have @'CondTree' 'ConfVar' +-- ['Dependency'] 'BuildInfo'@. Here, 'condTreeData' represents +-- the actual fields which are not behind any conditional, while +-- 'condTreeComponents' recursively records any further fields +-- which are behind a conditional. 'condTreeConstraints' records +-- the constraints (in this case, @base >= 4.0@) which would +-- be applied if you use this syntax; in general, this is +-- derived off of 'targetBuildInfo' (perhaps a good refactoring +-- would be to convert this into an opaque type, with a smart +-- constructor that pre-computes the dependencies.) +-- +data CondTree v c a = CondNode + { condTreeData :: a + , condTreeConstraints :: c + , condTreeComponents :: [CondBranch v c a] + } + deriving (Show, Eq, Typeable, Data, Generic, Functor, Foldable, Traversable) + +instance (Binary v, Binary c, Binary a) => Binary (CondTree v c a) + +instance (NFData v, NFData c, NFData a) => NFData (CondTree v c a) where rnf = genericRnf + +-- | A 'CondBranch' represents a conditional branch, e.g., @if +-- flag(foo)@ on some syntax @a@. It also has an optional false +-- branch. +-- +data CondBranch v c a = CondBranch + { condBranchCondition :: Condition v + , condBranchIfTrue :: CondTree v c a + , condBranchIfFalse :: Maybe (CondTree v c a) + } + deriving (Show, Eq, Typeable, Data, Generic, Functor, Traversable) + +-- This instance is written by hand because GHC 8.0.1/8.0.2 infinite +-- loops when trying to derive it with optimizations. See +-- https://ghc.haskell.org/trac/ghc/ticket/13056 +instance Foldable (CondBranch v c) where + foldMap f (CondBranch _ c Nothing) = foldMap f c + foldMap f (CondBranch _ c (Just a)) = foldMap f c `mappend` foldMap f a + +instance (Binary v, Binary c, Binary a) => Binary (CondBranch v c a) + +instance (NFData v, NFData c, NFData a) => NFData (CondBranch v c a) where rnf = genericRnf + +condIfThen :: Condition v -> CondTree v c a -> CondBranch v c a +condIfThen c t = CondBranch c t Nothing + +condIfThenElse :: Condition v -> CondTree v c a -> CondTree v c a -> CondBranch v c a +condIfThenElse c t e = CondBranch c t (Just e) + +mapCondTree :: (a -> b) -> (c -> d) -> (Condition v -> Condition w) + -> CondTree v c a -> CondTree w d b +mapCondTree fa fc fcnd (CondNode a c ifs) = + CondNode (fa a) (fc c) (map g ifs) + where + g (CondBranch cnd t me) + = CondBranch (fcnd cnd) + (mapCondTree fa fc fcnd t) + (fmap (mapCondTree fa fc fcnd) me) + +mapTreeConstrs :: (c -> d) -> CondTree v c a -> CondTree v d a +mapTreeConstrs f = mapCondTree id f id + +mapTreeConds :: (Condition v -> Condition w) -> CondTree v c a -> CondTree w c a +mapTreeConds f = mapCondTree id id f + +mapTreeData :: (a -> b) -> CondTree v c a -> CondTree v c b +mapTreeData f = mapCondTree f id id + +-- | @@Traversal@@ for the variables +traverseCondTreeV :: L.Traversal (CondTree v c a) (CondTree w c a) v w +traverseCondTreeV f (CondNode a c ifs) = + CondNode a c <$> traverse (traverseCondBranchV f) ifs + +-- | @@Traversal@@ for the variables +traverseCondBranchV :: L.Traversal (CondBranch v c a) (CondBranch w c a) v w +traverseCondBranchV f (CondBranch cnd t me) = CondBranch + <$> traverse f cnd + <*> traverseCondTreeV f t + <*> traverse (traverseCondTreeV f) me + +-- | @@Traversal@@ for the aggregated constraints +traverseCondTreeC :: L.Traversal (CondTree v c a) (CondTree v d a) c d +traverseCondTreeC f (CondNode a c ifs) = + CondNode a <$> f c <*> traverse (traverseCondBranchC f) ifs + +-- | @@Traversal@@ for the aggregated constraints +traverseCondBranchC :: L.Traversal (CondBranch v c a) (CondBranch v d a) c d +traverseCondBranchC f (CondBranch cnd t me) = CondBranch cnd + <$> traverseCondTreeC f t + <*> traverse (traverseCondTreeC f) me + + +-- | Extract the condition matched by the given predicate from a cond tree. +-- +-- We use this mainly for extracting buildable conditions (see the Note above), +-- but the function is in fact more general. +extractCondition :: Eq v => (a -> Bool) -> CondTree v c a -> Condition v +extractCondition p = go + where + go (CondNode x _ cs) | not (p x) = Lit False + | otherwise = goList cs + + goList [] = Lit True + goList (CondBranch c t e : cs) = + let + ct = go t + ce = maybe (Lit True) go e + in + ((c `cAnd` ct) `cOr` (CNot c `cAnd` ce)) `cAnd` goList cs + +-- | Flattens a CondTree using a partial flag assignment. When a condition +-- cannot be evaluated, both branches are ignored. +simplifyCondTree :: (Monoid a, Monoid d) => + (v -> Either v Bool) + -> CondTree v d a + -> (d, a) +simplifyCondTree env (CondNode a d ifs) = + mconcat $ (d, a) : mapMaybe simplifyIf ifs + where + simplifyIf (CondBranch cnd t me) = + case simplifyCondition cnd env of + (Lit True, _) -> Just $ simplifyCondTree env t + (Lit False, _) -> fmap (simplifyCondTree env) me + _ -> Nothing + +-- | Flatten a CondTree. This will resolve the CondTree by taking all +-- possible paths into account. Note that since branches represent exclusive +-- choices this may not result in a \"sane\" result. +ignoreConditions :: (Monoid a, Monoid c) => CondTree v c a -> (a, c) +ignoreConditions (CondNode a c ifs) = (a, c) `mappend` mconcat (concatMap f ifs) + where f (CondBranch _ t me) = ignoreConditions t + : maybeToList (fmap ignoreConditions me) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Dependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Dependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Dependency.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,72 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.Dependency + ( Dependency(..) + , depPkgName + , depVerRange + , thisPackageVersion + , notThisPackageVersion + , simplifyDependency + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Version ( VersionRange, thisVersion + , notThisVersion, anyVersion + , simplifyVersionRange ) + +import qualified Distribution.Compat.ReadP as Parse + +import Distribution.Text +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Types.PackageId +import Distribution.Types.PackageName + +import Text.PrettyPrint ((<+>)) + +-- | Describes a dependency on a source package (API) +-- +data Dependency = Dependency PackageName VersionRange + deriving (Generic, Read, Show, Eq, Typeable, Data) + +depPkgName :: Dependency -> PackageName +depPkgName (Dependency pn _) = pn + +depVerRange :: Dependency -> VersionRange +depVerRange (Dependency _ vr) = vr + +instance Binary Dependency +instance NFData Dependency where rnf = genericRnf + +instance Pretty Dependency where + pretty (Dependency name ver) = pretty name <+> pretty ver + +instance Parsec Dependency where + parsec = do + name <- lexemeParsec + ver <- parsec <|> pure anyVersion + return (Dependency name ver) + +instance Text Dependency where + parse = do name <- parse + Parse.skipSpaces + ver <- parse Parse.<++ return anyVersion + Parse.skipSpaces + return (Dependency name ver) + +thisPackageVersion :: PackageIdentifier -> Dependency +thisPackageVersion (PackageIdentifier n v) = + Dependency n (thisVersion v) + +notThisPackageVersion :: PackageIdentifier -> Dependency +notThisPackageVersion (PackageIdentifier n v) = + Dependency n (notThisVersion v) + +-- | Simplify the 'VersionRange' expression in a 'Dependency'. +-- See 'simplifyVersionRange'. +-- +simplifyDependency :: Dependency -> Dependency +simplifyDependency (Dependency name range) = + Dependency name (simplifyVersionRange range) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/DependencyMap.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/DependencyMap.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/DependencyMap.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/DependencyMap.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,51 @@ +module Distribution.Types.DependencyMap ( + DependencyMap, + toDepMap, + fromDepMap, + constrainBy, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.Dependency +import Distribution.Types.PackageName +import Distribution.Version + +import qualified Data.Map.Lazy as Map + +-- | A map of dependencies. Newtyped since the default monoid instance is not +-- appropriate. The monoid instance uses 'intersectVersionRanges'. +newtype DependencyMap = DependencyMap { unDependencyMap :: Map PackageName VersionRange } + deriving (Show, Read) + +instance Monoid DependencyMap where + mempty = DependencyMap Map.empty + mappend = (<>) + +instance Semigroup DependencyMap where + (DependencyMap a) <> (DependencyMap b) = + DependencyMap (Map.unionWith intersectVersionRanges a b) + +toDepMap :: [Dependency] -> DependencyMap +toDepMap ds = + DependencyMap $ Map.fromListWith intersectVersionRanges [ (p,vr) | Dependency p vr <- ds ] + +fromDepMap :: DependencyMap -> [Dependency] +fromDepMap m = [ Dependency p vr | (p,vr) <- Map.toList (unDependencyMap m) ] + +-- Apply extra constraints to a dependency map. +-- Combines dependencies where the result will only contain keys from the left +-- (first) map. If a key also exists in the right map, both constraints will +-- be intersected. +constrainBy :: DependencyMap -- ^ Input map + -> DependencyMap -- ^ Extra constraints + -> DependencyMap +constrainBy left extra = + DependencyMap $ + Map.foldrWithKey tightenConstraint (unDependencyMap left) + (unDependencyMap extra) + where tightenConstraint n c l = + case Map.lookup n l of + Nothing -> l + Just vr -> Map.insert n (intersectVersionRanges vr c) l diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Executable/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Executable/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Executable/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Executable/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,31 @@ +module Distribution.Types.Executable.Lens ( + Executable, + module Distribution.Types.Executable.Lens, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Types.BuildInfo (BuildInfo) +import Distribution.Types.Executable (Executable) +import Distribution.Types.ExecutableScope (ExecutableScope) +import Distribution.Types.UnqualComponentName (UnqualComponentName) + +import qualified Distribution.Types.Executable as T + +exeName :: Lens' Executable UnqualComponentName +exeName f s = fmap (\x -> s { T.exeName = x }) (f (T.exeName s)) +{-# INLINE exeName #-} + +modulePath :: Lens' Executable String +modulePath f s = fmap (\x -> s { T.modulePath = x }) (f (T.modulePath s)) +{-# INLINE modulePath #-} + +exeScope :: Lens' Executable ExecutableScope +exeScope f s = fmap (\x -> s { T.exeScope = x }) (f (T.exeScope s)) +{-# INLINE exeScope #-} + +exeBuildInfo :: Lens' Executable BuildInfo +exeBuildInfo f s = fmap (\x -> s { T.buildInfo = x }) (f (T.buildInfo s)) +{-# INLINE exeBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Executable.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Executable.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Executable.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Executable.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,65 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.Executable ( + Executable(..), + emptyExecutable, + exeModules, + exeModulesAutogen +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.BuildInfo +import Distribution.Types.UnqualComponentName +import Distribution.Types.ExecutableScope +import Distribution.ModuleName + +import qualified Distribution.Types.BuildInfo.Lens as L + +data Executable = Executable { + exeName :: UnqualComponentName, + modulePath :: FilePath, + exeScope :: ExecutableScope, + buildInfo :: BuildInfo + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance L.HasBuildInfo Executable where + buildInfo f l = (\x -> l { buildInfo = x }) <$> f (buildInfo l) + +instance Binary Executable + +instance NFData Executable where rnf = genericRnf + +instance Monoid Executable where + mempty = gmempty + mappend = (<>) + +instance Semigroup Executable where + a <> b = Executable{ + exeName = combine' exeName, + modulePath = combine modulePath, + exeScope = combine exeScope, + buildInfo = combine buildInfo + } + where combine field = field a `mappend` field b + combine' field = case ( unUnqualComponentName $ field a + , unUnqualComponentName $ field b) of + ("", _) -> field b + (_, "") -> field a + (x, y) -> error $ "Ambiguous values for executable field: '" + ++ x ++ "' and '" ++ y ++ "'" + +emptyExecutable :: Executable +emptyExecutable = mempty + +-- | Get all the module names from an exe +exeModules :: Executable -> [ModuleName] +exeModules exe = otherModules (buildInfo exe) + +-- | Get all the auto generated module names from an exe +-- This are a subset of 'exeModules'. +exeModulesAutogen :: Executable -> [ModuleName] +exeModulesAutogen exe = autogenModules (buildInfo exe) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ExecutableScope.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ExecutableScope.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ExecutableScope.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ExecutableScope.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,50 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.ExecutableScope ( + ExecutableScope(..), +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +data ExecutableScope = ExecutablePublic + | ExecutablePrivate + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Pretty ExecutableScope where + pretty ExecutablePublic = Disp.text "public" + pretty ExecutablePrivate = Disp.text "private" + +instance Parsec ExecutableScope where + parsec = P.try pub <|> pri where + pub = ExecutablePublic <$ P.string "public" + pri = ExecutablePrivate <$ P.string "private" + +instance Text ExecutableScope where + parse = Parse.choice + [ Parse.string "public" >> return ExecutablePublic + , Parse.string "private" >> return ExecutablePrivate + ] + +instance Binary ExecutableScope + +instance NFData ExecutableScope where rnf = genericRnf + +-- | 'Any' like semigroup, where 'ExecutablePrivate' is 'Any True' +instance Semigroup ExecutableScope where + ExecutablePublic <> x = x + x@ExecutablePrivate <> _ = x + +-- | 'mempty' = 'ExecutablePublic' +instance Monoid ExecutableScope where + mempty = ExecutablePublic + mappend = (<>) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ExeDependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ExeDependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ExeDependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ExeDependency.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,57 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.ExeDependency + ( ExeDependency(..) + , qualifiedExeName + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text +import Distribution.Types.ComponentName +import Distribution.Types.PackageName +import Distribution.Types.UnqualComponentName +import Distribution.Version (VersionRange, anyVersion) + +import qualified Distribution.Compat.CharParsing as P +import Distribution.Compat.ReadP ((<++)) +import qualified Distribution.Compat.ReadP as Parse +import Text.PrettyPrint (text, (<+>)) + +-- | Describes a dependency on an executable from a package +-- +data ExeDependency = ExeDependency + PackageName + UnqualComponentName -- name of executable component of package + VersionRange + deriving (Generic, Read, Show, Eq, Typeable, Data) + +instance Binary ExeDependency +instance NFData ExeDependency where rnf = genericRnf + +instance Pretty ExeDependency where + pretty (ExeDependency name exe ver) = + (pretty name <<>> text ":" <<>> pretty exe) <+> pretty ver + +instance Parsec ExeDependency where + parsec = do + name <- lexemeParsec + _ <- P.char ':' + exe <- lexemeParsec + ver <- parsec <|> pure anyVersion + return (ExeDependency name exe ver) + +instance Text ExeDependency where + parse = do name <- parse + _ <- Parse.char ':' + exe <- parse + Parse.skipSpaces + ver <- parse <++ return anyVersion + Parse.skipSpaces + return (ExeDependency name exe ver) + +qualifiedExeName :: ExeDependency -> ComponentName +qualifiedExeName (ExeDependency _ ucn _) = CExeName ucn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ExposedModule.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ExposedModule.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ExposedModule.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ExposedModule.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,57 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.ExposedModule where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Backpack +import Distribution.ModuleName +import Distribution.Parsec.Class +import Distribution.ParseUtils (parseModuleNameQ) +import Distribution.Pretty +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +data ExposedModule + = ExposedModule { + exposedName :: ModuleName, + exposedReexport :: Maybe OpenModule + } + deriving (Eq, Generic, Read, Show) + +instance Pretty ExposedModule where + pretty (ExposedModule m reexport) = + Disp.hsep [ pretty m + , case reexport of + Just m' -> Disp.hsep [Disp.text "from", disp m'] + Nothing -> Disp.empty + ] + +instance Parsec ExposedModule where + parsec = do + m <- parsecMaybeQuoted parsec + P.spaces + + reexport <- P.optional $ do + _ <- P.string "from" + P.skipSpaces1 + parsec + + return (ExposedModule m reexport) + +instance Text ExposedModule where + parse = do + m <- parseModuleNameQ + Parse.skipSpaces + reexport <- Parse.option Nothing $ do + _ <- Parse.string "from" + Parse.skipSpaces + fmap Just parse + return (ExposedModule m reexport) + +instance Binary ExposedModule + +instance NFData ExposedModule where rnf = genericRnf diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ForeignLib/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ForeignLib/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ForeignLib/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ForeignLib/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,45 @@ +module Distribution.Types.ForeignLib.Lens ( + ForeignLib, + module Distribution.Types.ForeignLib.Lens, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Types.BuildInfo (BuildInfo) +import Distribution.Types.ForeignLib (ForeignLib, LibVersionInfo) +import Distribution.Types.ForeignLibOption (ForeignLibOption) +import Distribution.Types.ForeignLibType (ForeignLibType) +import Distribution.Types.UnqualComponentName (UnqualComponentName) +import Distribution.Version (Version) + +import qualified Distribution.Types.ForeignLib as T + +foreignLibName :: Lens' ForeignLib UnqualComponentName +foreignLibName f s = fmap (\x -> s { T.foreignLibName = x }) (f (T.foreignLibName s)) +{-# INLINE foreignLibName #-} + +foreignLibType :: Lens' ForeignLib ForeignLibType +foreignLibType f s = fmap (\x -> s { T.foreignLibType = x }) (f (T.foreignLibType s)) +{-# INLINE foreignLibType #-} + +foreignLibOptions :: Lens' ForeignLib [ForeignLibOption] +foreignLibOptions f s = fmap (\x -> s { T.foreignLibOptions = x }) (f (T.foreignLibOptions s)) +{-# INLINE foreignLibOptions #-} + +foreignLibBuildInfo :: Lens' ForeignLib BuildInfo +foreignLibBuildInfo f s = fmap (\x -> s { T.foreignLibBuildInfo = x }) (f (T.foreignLibBuildInfo s)) +{-# INLINE foreignLibBuildInfo #-} + +foreignLibVersionInfo :: Lens' ForeignLib (Maybe LibVersionInfo) +foreignLibVersionInfo f s = fmap (\x -> s { T.foreignLibVersionInfo = x }) (f (T.foreignLibVersionInfo s)) +{-# INLINE foreignLibVersionInfo #-} + +foreignLibVersionLinux :: Lens' ForeignLib (Maybe Version) +foreignLibVersionLinux f s = fmap (\x -> s { T.foreignLibVersionLinux = x }) (f (T.foreignLibVersionLinux s)) +{-# INLINE foreignLibVersionLinux #-} + +foreignLibModDefFile :: Lens' ForeignLib [FilePath] +foreignLibModDefFile f s = fmap (\x -> s { T.foreignLibModDefFile = x }) (f (T.foreignLibModDefFile s)) +{-# INLINE foreignLibModDefFile #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ForeignLib.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ForeignLib.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ForeignLib.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ForeignLib.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,213 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.ForeignLib( + ForeignLib(..), + emptyForeignLib, + foreignLibModules, + foreignLibIsShared, + foreignLibVersion, + + LibVersionInfo, + mkLibVersionInfo, + libVersionInfoCRA, + libVersionNumber, + libVersionNumberShow, + libVersionMajor +) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.ModuleName +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.System +import Distribution.Text +import Distribution.Types.BuildInfo +import Distribution.Types.ForeignLibOption +import Distribution.Types.ForeignLibType +import Distribution.Types.UnqualComponentName +import Distribution.Version + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp +import qualified Text.Read as Read + +import qualified Distribution.Types.BuildInfo.Lens as L + +-- | A foreign library stanza is like a library stanza, except that +-- the built code is intended for consumption by a non-Haskell client. +data ForeignLib = ForeignLib { + -- | Name of the foreign library + foreignLibName :: UnqualComponentName + -- | What kind of foreign library is this (static or dynamic). + , foreignLibType :: ForeignLibType + -- | What options apply to this foreign library (e.g., are we + -- merging in all foreign dependencies.) + , foreignLibOptions :: [ForeignLibOption] + -- | Build information for this foreign library. + , foreignLibBuildInfo :: BuildInfo + -- | Libtool-style version-info data to compute library version. + -- Refer to the libtool documentation on the + -- current:revision:age versioning scheme. + , foreignLibVersionInfo :: Maybe LibVersionInfo + -- | Linux library version + , foreignLibVersionLinux :: Maybe Version + + -- | (Windows-specific) module definition files + -- + -- This is a list rather than a maybe field so that we can flatten + -- the condition trees (for instance, when creating an sdist) + , foreignLibModDefFile :: [FilePath] + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +data LibVersionInfo = LibVersionInfo Int Int Int deriving (Data, Eq, Generic, Typeable) + +instance Ord LibVersionInfo where + LibVersionInfo c r _ `compare` LibVersionInfo c' r' _ = + case c `compare` c' of + EQ -> r `compare` r' + e -> e + +instance Show LibVersionInfo where + showsPrec d (LibVersionInfo c r a) = showParen (d > 10) + $ showString "mkLibVersionInfo " + . showsPrec 11 (c,r,a) + +instance Read LibVersionInfo where + readPrec = Read.parens $ do + Read.Ident "mkLibVersionInfo" <- Read.lexP + t <- Read.step Read.readPrec + return (mkLibVersionInfo t) + +instance Binary LibVersionInfo + +instance NFData LibVersionInfo where rnf = genericRnf + +instance Pretty LibVersionInfo where + pretty (LibVersionInfo c r a) + = Disp.hcat $ Disp.punctuate (Disp.char ':') $ map Disp.int [c,r,a] + +instance Parsec LibVersionInfo where + parsec = do + c <- P.integral + (r, a) <- P.option (0,0) $ do + _ <- P.char ':' + r <- P.integral + a <- P.option 0 $ do + _ <- P.char ':' + P.integral + return (r,a) + return $ mkLibVersionInfo (c,r,a) + +instance Text LibVersionInfo where + parse = do + c <- parseNat + (r, a) <- Parse.option (0,0) $ do + _ <- Parse.char ':' + r <- parseNat + a <- Parse.option 0 (Parse.char ':' >> parseNat) + return (r, a) + return $ mkLibVersionInfo (c,r,a) + where + parseNat = read `fmap` Parse.munch1 isDigit + +-- | Construct 'LibVersionInfo' from @(current, revision, age)@ +-- numbers. +-- +-- For instance, @mkLibVersionInfo (3,0,0)@ constructs a +-- 'LibVersionInfo' representing the version-info @3:0:0@. +-- +-- All version components must be non-negative. +mkLibVersionInfo :: (Int, Int, Int) -> LibVersionInfo +mkLibVersionInfo (c,r,a) = LibVersionInfo c r a + +-- | From a given 'LibVersionInfo', extract the @(current, revision, +-- age)@ numbers. +libVersionInfoCRA :: LibVersionInfo -> (Int, Int, Int) +libVersionInfoCRA (LibVersionInfo c r a) = (c,r,a) + +-- | Given a version-info field, produce a @major.minor.build@ version +libVersionNumber :: LibVersionInfo -> (Int, Int, Int) +libVersionNumber (LibVersionInfo c r a) = (c-a , a , r) + +-- | Given a version-info field, return @"major.minor.build"@ as a +-- 'String' +libVersionNumberShow :: LibVersionInfo -> String +libVersionNumberShow v = + let (major, minor, build) = libVersionNumber v + in show major ++ "." ++ show minor ++ "." ++ show build + +-- | Return the @major@ version of a version-info field. +libVersionMajor :: LibVersionInfo -> Int +libVersionMajor (LibVersionInfo c _ a) = c-a + +instance L.HasBuildInfo ForeignLib where + buildInfo f l = (\x -> l { foreignLibBuildInfo = x }) <$> f (foreignLibBuildInfo l) + +instance Binary ForeignLib + +instance NFData ForeignLib where rnf = genericRnf + +instance Semigroup ForeignLib where + a <> b = ForeignLib { + foreignLibName = combine' foreignLibName + , foreignLibType = combine foreignLibType + , foreignLibOptions = combine foreignLibOptions + , foreignLibBuildInfo = combine foreignLibBuildInfo + , foreignLibVersionInfo = combine'' foreignLibVersionInfo + , foreignLibVersionLinux = combine'' foreignLibVersionLinux + , foreignLibModDefFile = combine foreignLibModDefFile + } + where combine field = field a `mappend` field b + combine' field = case ( unUnqualComponentName $ field a + , unUnqualComponentName $ field b) of + ("", _) -> field b + (_, "") -> field a + (x, y) -> error $ "Ambiguous values for executable field: '" + ++ x ++ "' and '" ++ y ++ "'" + combine'' field = field b + +instance Monoid ForeignLib where + mempty = ForeignLib { + foreignLibName = mempty + , foreignLibType = ForeignLibTypeUnknown + , foreignLibOptions = [] + , foreignLibBuildInfo = mempty + , foreignLibVersionInfo = Nothing + , foreignLibVersionLinux = Nothing + , foreignLibModDefFile = [] + } + mappend = (<>) + +-- | An empty foreign library. +emptyForeignLib :: ForeignLib +emptyForeignLib = mempty + +-- | Modules defined by a foreign library. +foreignLibModules :: ForeignLib -> [ModuleName] +foreignLibModules = otherModules . foreignLibBuildInfo + +-- | Is the foreign library shared? +foreignLibIsShared :: ForeignLib -> Bool +foreignLibIsShared = foreignLibTypeIsShared . foreignLibType + +-- | Get a version number for a foreign library. +-- If we're on Linux, and a Linux version is specified, use that. +-- If we're on Linux, and libtool-style version-info is specified, translate +-- that field into appropriate version numbers. +-- Otherwise, this feature is unsupported so we don't return any version data. +foreignLibVersion :: ForeignLib -> OS -> [Int] +foreignLibVersion flib Linux = + case foreignLibVersionLinux flib of + Just v -> versionNumbers v + Nothing -> + case foreignLibVersionInfo flib of + Just v' -> + let (major, minor, build) = libVersionNumber v' + in [major, minor, build] + Nothing -> [] +foreignLibVersion _ _ = [] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ForeignLibOption.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ForeignLibOption.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ForeignLibOption.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ForeignLibOption.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,45 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.ForeignLibOption( + ForeignLibOption(..) +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +data ForeignLibOption = + -- | Merge in all dependent libraries (i.e., use + -- @ghc -shared -static@ rather than just record + -- the dependencies, ala @ghc -shared -dynamic@). + -- This option is compulsory on Windows and unsupported + -- on other platforms. + ForeignLibStandalone + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Pretty ForeignLibOption where + pretty ForeignLibStandalone = Disp.text "standalone" + +instance Parsec ForeignLibOption where + parsec = do + name <- P.munch1 (\c -> isAlphaNum c || c == '-') + case name of + "standalone" -> return ForeignLibStandalone + _ -> fail "unrecognized foreign-library option" + +instance Text ForeignLibOption where + parse = Parse.choice [ + do _ <- Parse.string "standalone" ; return ForeignLibStandalone + ] + +instance Binary ForeignLibOption + +instance NFData ForeignLibOption where rnf = genericRnf diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ForeignLibType.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ForeignLibType.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ForeignLibType.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ForeignLibType.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,76 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.ForeignLibType( + ForeignLibType(..), + knownForeignLibTypes, + foreignLibTypeIsShared, +) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.PackageDescription.Utils + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +-- | What kind of foreign library is to be built? +data ForeignLibType = + -- | A native shared library (@.so@ on Linux, @.dylib@ on OSX, or + -- @.dll@ on Windows). + ForeignLibNativeShared + -- | A native static library (not currently supported.) + | ForeignLibNativeStatic + -- TODO: Maybe this should record a string? + | ForeignLibTypeUnknown + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Pretty ForeignLibType where + pretty ForeignLibNativeShared = Disp.text "native-shared" + pretty ForeignLibNativeStatic = Disp.text "native-static" + pretty ForeignLibTypeUnknown = Disp.text "unknown" + +instance Parsec ForeignLibType where + parsec = do + name <- P.munch1 (\c -> isAlphaNum c || c == '-') + return $ case name of + "native-shared" -> ForeignLibNativeShared + "native-static" -> ForeignLibNativeStatic + _ -> ForeignLibTypeUnknown + +instance Text ForeignLibType where + parse = Parse.choice [ + do _ <- Parse.string "native-shared" ; return ForeignLibNativeShared + , do _ <- Parse.string "native-static" ; return ForeignLibNativeStatic + ] + +instance Binary ForeignLibType + +instance NFData ForeignLibType where rnf = genericRnf + +instance Semigroup ForeignLibType where + ForeignLibTypeUnknown <> b = b + a <> ForeignLibTypeUnknown = a + _ <> _ = error "Ambiguous foreign library type" + +instance Monoid ForeignLibType where + mempty = ForeignLibTypeUnknown + mappend = (<>) + +knownForeignLibTypes :: [ForeignLibType] +knownForeignLibTypes = [ + ForeignLibNativeShared + , ForeignLibNativeStatic + ] + +foreignLibTypeIsShared :: ForeignLibType -> Bool +foreignLibTypeIsShared t = + case t of + ForeignLibNativeShared -> True + ForeignLibNativeStatic -> False + ForeignLibTypeUnknown -> cabalBug "Unknown foreign library type" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/GenericPackageDescription/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/GenericPackageDescription/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/GenericPackageDescription/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/GenericPackageDescription/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,124 @@ +{-# LANGUAGE Rank2Types #-} +module Distribution.Types.GenericPackageDescription.Lens ( + GenericPackageDescription, + Flag, + FlagName, + ConfVar (..), + module Distribution.Types.GenericPackageDescription.Lens, + ) where + +import Prelude() +import Distribution.Compat.Prelude +import Distribution.Compat.Lens + +-- We import types from their packages, so we can remove unused imports +-- and have wider inter-module dependency graph +import Distribution.Types.CondTree (CondTree) +import Distribution.Types.Dependency (Dependency) +import Distribution.Types.Executable (Executable) +import Distribution.Types.PackageDescription (PackageDescription) +import Distribution.Types.Benchmark (Benchmark) +import Distribution.Types.ForeignLib (ForeignLib) +import Distribution.Types.GenericPackageDescription + ( GenericPackageDescription(GenericPackageDescription) + , Flag(MkFlag), FlagName, ConfVar (..)) +import Distribution.Types.Library (Library) +import Distribution.Types.TestSuite (TestSuite) +import Distribution.Types.UnqualComponentName (UnqualComponentName) +import Distribution.System (Arch, OS) +import Distribution.Compiler (CompilerFlavor) +import Distribution.Version (VersionRange) + +------------------------------------------------------------------------------- +-- GenericPackageDescription +------------------------------------------------------------------------------- + +condBenchmarks :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] Benchmark)] +condBenchmarks f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 y1) (f x8) +{-# INLINE condBenchmarks #-} + +condExecutables :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] Executable)] +condExecutables f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 x4 x5 y1 x7 x8) (f x6) +{-# INLINE condExecutables #-} + +condForeignLibs :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] Distribution.Types.ForeignLib.ForeignLib)] +condForeignLibs f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 x4 y1 x6 x7 x8) (f x5) +{-# INLINE condForeignLibs #-} + +condLibrary :: Lens' GenericPackageDescription (Maybe (CondTree ConfVar [Dependency] Library)) +condLibrary f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 y1 x4 x5 x6 x7 x8) (f x3) +{-# INLINE condLibrary #-} + +condSubLibraries :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] Library)] +condSubLibraries f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 y1 x5 x6 x7 x8) (f x4) +{-# INLINE condSubLibraries #-} + +condTestSuites :: Lens' GenericPackageDescription [(UnqualComponentName, CondTree ConfVar [Dependency] TestSuite)] +condTestSuites f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 x2 x3 x4 x5 x6 y1 x8) (f x7) +{-# INLINE condTestSuites #-} + +genPackageFlags :: Lens' GenericPackageDescription [Flag] +genPackageFlags f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription x1 y1 x3 x4 x5 x6 x7 x8) (f x2) +{-# INLINE genPackageFlags #-} + +packageDescription :: Lens' GenericPackageDescription PackageDescription +packageDescription f (GenericPackageDescription x1 x2 x3 x4 x5 x6 x7 x8) = fmap (\y1 -> GenericPackageDescription y1 x2 x3 x4 x5 x6 x7 x8) (f x1) +{-# INLINE packageDescription #-} + +allCondTrees + :: Applicative f + => (forall a. CondTree ConfVar [Dependency] a + -> f (CondTree ConfVar [Dependency] a)) + -> GenericPackageDescription + -> f GenericPackageDescription +allCondTrees f (GenericPackageDescription p a1 x1 x2 x3 x4 x5 x6) = + GenericPackageDescription + <$> pure p + <*> pure a1 + <*> traverse f x1 + <*> (traverse . _2) f x2 + <*> (traverse . _2) f x3 + <*> (traverse . _2) f x4 + <*> (traverse . _2) f x5 + <*> (traverse . _2) f x6 + + +------------------------------------------------------------------------------- +-- Flag +------------------------------------------------------------------------------- + +flagName :: Lens' Flag FlagName +flagName f (MkFlag x1 x2 x3 x4) = fmap (\y1 -> MkFlag y1 x2 x3 x4) (f x1) +{-# INLINE flagName #-} + +flagDescription :: Lens' Flag String +flagDescription f (MkFlag x1 x2 x3 x4) = fmap (\y1 -> MkFlag x1 y1 x3 x4) (f x2) +{-# INLINE flagDescription #-} + +flagDefault :: Lens' Flag Bool +flagDefault f (MkFlag x1 x2 x3 x4) = fmap (\y1 -> MkFlag x1 x2 y1 x4) (f x3) +{-# INLINE flagDefault #-} + +flagManual :: Lens' Flag Bool +flagManual f (MkFlag x1 x2 x3 x4) = fmap (\y1 -> MkFlag x1 x2 x3 y1) (f x4) +{-# INLINE flagManual #-} + +------------------------------------------------------------------------------- +-- ConfVar +------------------------------------------------------------------------------- + +_OS :: Traversal' ConfVar OS +_OS f (OS os) = OS <$> f os +_OS _ x = pure x + +_Arch :: Traversal' ConfVar Arch +_Arch f (Arch arch) = Arch <$> f arch +_Arch _ x = pure x + +_Flag :: Traversal' ConfVar FlagName +_Flag f (Flag flag) = Flag <$> f flag +_Flag _ x = pure x + +_Impl :: Traversal' ConfVar (CompilerFlavor, VersionRange) +_Impl f (Impl cf vr) = uncurry Impl <$> f (cf, vr) +_Impl _ x = pure x diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/GenericPackageDescription.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/GenericPackageDescription.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/GenericPackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/GenericPackageDescription.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,353 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +module Distribution.Types.GenericPackageDescription ( + GenericPackageDescription(..), + emptyGenericPackageDescription, + Flag(..), + emptyFlag, + FlagName, + mkFlagName, + unFlagName, + FlagAssignment, + mkFlagAssignment, + unFlagAssignment, + lookupFlagAssignment, + insertFlagAssignment, + diffFlagAssignment, + findDuplicateFlagAssignments, + nullFlagAssignment, + showFlagValue, + dispFlagAssignment, + parseFlagAssignment, + parsecFlagAssignment, + ConfVar(..), +) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Utils.ShortText +import Distribution.Utils.Generic (lowercase) +import qualified Text.PrettyPrint as Disp +import qualified Data.Map as Map +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import Distribution.Compat.ReadP ((+++)) + +-- lens +import Distribution.Compat.Lens as L +import qualified Distribution.Types.BuildInfo.Lens as L + +import Distribution.Types.PackageDescription + +import Distribution.Types.Dependency +import Distribution.Types.Library +import Distribution.Types.ForeignLib +import Distribution.Types.Executable +import Distribution.Types.TestSuite +import Distribution.Types.Benchmark +import Distribution.Types.UnqualComponentName +import Distribution.Types.CondTree + +import Distribution.Package +import Distribution.Version +import Distribution.Compiler +import Distribution.System +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text + +-- --------------------------------------------------------------------------- +-- The 'GenericPackageDescription' type + +data GenericPackageDescription = + GenericPackageDescription + { packageDescription :: PackageDescription + , genPackageFlags :: [Flag] + , condLibrary :: Maybe (CondTree ConfVar [Dependency] Library) + , condSubLibraries :: [( UnqualComponentName + , CondTree ConfVar [Dependency] Library )] + , condForeignLibs :: [( UnqualComponentName + , CondTree ConfVar [Dependency] ForeignLib )] + , condExecutables :: [( UnqualComponentName + , CondTree ConfVar [Dependency] Executable )] + , condTestSuites :: [( UnqualComponentName + , CondTree ConfVar [Dependency] TestSuite )] + , condBenchmarks :: [( UnqualComponentName + , CondTree ConfVar [Dependency] Benchmark )] + } + deriving (Show, Eq, Typeable, Data, Generic) + +instance Package GenericPackageDescription where + packageId = packageId . packageDescription + +instance Binary GenericPackageDescription + +instance NFData GenericPackageDescription where rnf = genericRnf + +emptyGenericPackageDescription :: GenericPackageDescription +emptyGenericPackageDescription = GenericPackageDescription emptyPackageDescription [] Nothing [] [] [] [] [] + +-- ----------------------------------------------------------------------------- +-- Traversal Instances + +instance L.HasBuildInfos GenericPackageDescription where + traverseBuildInfos f (GenericPackageDescription p a1 x1 x2 x3 x4 x5 x6) = + GenericPackageDescription + <$> L.traverseBuildInfos f p + <*> pure a1 + <*> (traverse . traverse . L.buildInfo) f x1 + <*> (traverse . L._2 . traverse . L.buildInfo) f x2 + <*> (traverse . L._2 . traverse . L.buildInfo) f x3 + <*> (traverse . L._2 . traverse . L.buildInfo) f x4 + <*> (traverse . L._2 . traverse . L.buildInfo) f x5 + <*> (traverse . L._2 . traverse . L.buildInfo) f x6 + +-- ----------------------------------------------------------------------------- +-- The Flag' type + +-- | A flag can represent a feature to be included, or a way of linking +-- a target against its dependencies, or in fact whatever you can think of. +data Flag = MkFlag + { flagName :: FlagName + , flagDescription :: String + , flagDefault :: Bool + , flagManual :: Bool + } + deriving (Show, Eq, Typeable, Data, Generic) + +instance Binary Flag + +instance NFData Flag where rnf = genericRnf + +-- | A 'Flag' initialized with default parameters. +emptyFlag :: FlagName -> Flag +emptyFlag name = MkFlag + { flagName = name + , flagDescription = "" + , flagDefault = True + , flagManual = False + } + +-- | A 'FlagName' is the name of a user-defined configuration flag +-- +-- Use 'mkFlagName' and 'unFlagName' to convert from/to a 'String'. +-- +-- This type is opaque since @Cabal-2.0@ +-- +-- @since 2.0.0.2 +newtype FlagName = FlagName ShortText + deriving (Eq, Generic, Ord, Show, Read, Typeable, Data, NFData) + +-- | Construct a 'FlagName' from a 'String' +-- +-- 'mkFlagName' is the inverse to 'unFlagName' +-- +-- Note: No validations are performed to ensure that the resulting +-- 'FlagName' is valid +-- +-- @since 2.0.0.2 +mkFlagName :: String -> FlagName +mkFlagName = FlagName . toShortText + +-- | 'mkFlagName' +-- +-- @since 2.0.0.2 +instance IsString FlagName where + fromString = mkFlagName + +-- | Convert 'FlagName' to 'String' +-- +-- @since 2.0.0.2 +unFlagName :: FlagName -> String +unFlagName (FlagName s) = fromShortText s + +instance Binary FlagName + +instance Pretty FlagName where + pretty = Disp.text . unFlagName + +instance Parsec FlagName where + parsec = mkFlagName . lowercase <$> parsec' + where + parsec' = (:) <$> lead <*> rest + lead = P.satisfy (\c -> isAlphaNum c || c == '_') + rest = P.munch (\c -> isAlphaNum c || c == '_' || c == '-') + +instance Text FlagName where + -- Note: we don't check that FlagName doesn't have leading dash, + -- cabal check will do that. + parse = mkFlagName . lowercase <$> parse' + where + parse' = (:) <$> lead <*> rest + lead = Parse.satisfy (\c -> isAlphaNum c || c == '_') + rest = Parse.munch (\c -> isAlphaNum c || c == '_' || c == '-') + +-- | A 'FlagAssignment' is a total or partial mapping of 'FlagName's to +-- 'Bool' flag values. It represents the flags chosen by the user or +-- discovered during configuration. For example @--flags=foo --flags=-bar@ +-- becomes @[("foo", True), ("bar", False)]@ +-- +newtype FlagAssignment + = FlagAssignment { getFlagAssignment :: Map.Map FlagName (Int, Bool) } + deriving (Binary, Generic, NFData) + +instance Eq FlagAssignment where + (==) (FlagAssignment m1) (FlagAssignment m2) + = fmap snd m1 == fmap snd m2 + +instance Ord FlagAssignment where + compare (FlagAssignment m1) (FlagAssignment m2) + = fmap snd m1 `compare` fmap snd m2 + +-- | Combines pairs of values contained in the 'FlagAssignment' Map. +-- +-- The last flag specified takes precedence, and we record the number +-- of times we have seen the flag. +-- +combineFlagValues :: (Int, Bool) -> (Int, Bool) -> (Int, Bool) +combineFlagValues (c1, _) (c2, b2) = (c1 + c2, b2) + +-- The 'Semigroup' instance currently is right-biased. +-- +-- If duplicate flags are specified, we want the last flag specified to +-- take precedence and we want to know how many times the flag has been +-- specified so that we have the option of warning the user about +-- supplying duplicate flags. +instance Semigroup FlagAssignment where + (<>) (FlagAssignment m1) (FlagAssignment m2) + = FlagAssignment (Map.unionWith combineFlagValues m1 m2) + +instance Monoid FlagAssignment where + mempty = FlagAssignment Map.empty + mappend = (<>) + +-- | Construct a 'FlagAssignment' from a list of flag/value pairs. +-- +-- If duplicate flags occur in the input list, the later entries +-- in the list will take precedence. +-- +-- @since 2.2.0 +mkFlagAssignment :: [(FlagName, Bool)] -> FlagAssignment +mkFlagAssignment = + FlagAssignment . + Map.fromListWith (flip combineFlagValues) . fmap (fmap (\b -> (1, b))) + +-- | Deconstruct a 'FlagAssignment' into a list of flag/value pairs. +-- +-- @ 'null' ('findDuplicateFlagAssignments' fa) ==> ('mkFlagAssignment' . 'unFlagAssignment') fa == fa @ +-- +-- @since 2.2.0 +unFlagAssignment :: FlagAssignment -> [(FlagName, Bool)] +unFlagAssignment = fmap (fmap snd) . Map.toList . getFlagAssignment + +-- | Test whether 'FlagAssignment' is empty. +-- +-- @since 2.2.0 +nullFlagAssignment :: FlagAssignment -> Bool +nullFlagAssignment = Map.null . getFlagAssignment + +-- | Lookup the value for a flag +-- +-- Returns 'Nothing' if the flag isn't contained in the 'FlagAssignment'. +-- +-- @since 2.2.0 +lookupFlagAssignment :: FlagName -> FlagAssignment -> Maybe Bool +lookupFlagAssignment fn = fmap snd . Map.lookup fn . getFlagAssignment + +-- | Insert or update the boolean value of a flag. +-- +-- If the flag is already present in the 'FlagAssigment', the +-- value will be updated and the fact that multiple values have +-- been provided for that flag will be recorded so that a +-- warning can be generated later on. +-- +-- @since 2.2.0 +insertFlagAssignment :: FlagName -> Bool -> FlagAssignment -> FlagAssignment +-- TODO: this currently just shadows prior values for an existing +-- flag; rather than enforcing uniqueness at construction, it's +-- verified later on via `D.C.Dependency.configuredPackageProblems` +insertFlagAssignment flag val = + FlagAssignment . + Map.insertWith (flip combineFlagValues) flag (1, val) . getFlagAssignment + +-- | Remove all flag-assignments from the first 'FlagAssignment' that +-- are contained in the second 'FlagAssignment' +-- +-- NB/TODO: This currently only removes flag assignments which also +-- match the value assignment! We should review the code which uses +-- this operation to figure out if this it's not enough to only +-- compare the flagnames without the values. +-- +-- @since 2.2.0 +diffFlagAssignment :: FlagAssignment -> FlagAssignment -> FlagAssignment +diffFlagAssignment fa1 fa2 = FlagAssignment + (Map.difference (getFlagAssignment fa1) (getFlagAssignment fa2)) + +-- | Find the 'FlagName's that have been listed more than once. +-- +-- @since 2.2.0 +findDuplicateFlagAssignments :: FlagAssignment -> [FlagName] +findDuplicateFlagAssignments = + Map.keys . Map.filter ((> 1) . fst) . getFlagAssignment + +-- | @since 2.2.0 +instance Read FlagAssignment where + readsPrec p s = [ (FlagAssignment x, rest) | (x,rest) <- readsPrec p s ] + +-- | @since 2.2.0 +instance Show FlagAssignment where + showsPrec p (FlagAssignment xs) = showsPrec p xs + +-- | String representation of a flag-value pair. +showFlagValue :: (FlagName, Bool) -> String +showFlagValue (f, True) = '+' : unFlagName f +showFlagValue (f, False) = '-' : unFlagName f + +-- | Pretty-prints a flag assignment. +dispFlagAssignment :: FlagAssignment -> Disp.Doc +dispFlagAssignment = Disp.hsep . map (Disp.text . showFlagValue) . unFlagAssignment + +-- | Parses a flag assignment. +parsecFlagAssignment :: ParsecParser FlagAssignment +parsecFlagAssignment = mkFlagAssignment <$> + P.sepBy (onFlag <|> offFlag) P.skipSpaces1 + where + onFlag = do + _ <- P.optional (P.char '+') + f <- parsec + return (f, True) + offFlag = do + _ <- P.char '-' + f <- parsec + return (f, False) + +-- | Parses a flag assignment. +parseFlagAssignment :: Parse.ReadP r FlagAssignment +parseFlagAssignment = mkFlagAssignment <$> + Parse.sepBy parseFlagValue Parse.skipSpaces1 + where + parseFlagValue = + (do Parse.optional (Parse.char '+') + f <- parse + return (f, True)) + +++ (do _ <- Parse.char '-' + f <- parse + return (f, False)) +-- {-# DEPRECATED parseFlagAssignment "Use parsecFlagAssignment. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} + +-- ----------------------------------------------------------------------------- +-- The 'CondVar' type + +-- | A @ConfVar@ represents the variable type used. +data ConfVar = OS OS + | Arch Arch + | Flag FlagName + | Impl CompilerFlavor VersionRange + deriving (Eq, Show, Typeable, Data, Generic) + +instance Binary ConfVar + +instance NFData ConfVar where rnf = genericRnf diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/HookedBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/HookedBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/HookedBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/HookedBuildInfo.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,66 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.HookedBuildInfo ( + HookedBuildInfo, + emptyHookedBuildInfo, + ) where + +-- import Distribution.Compat.Prelude +import Distribution.Types.BuildInfo +import Distribution.Types.UnqualComponentName + +-- | 'HookedBuildInfo' is mechanism that hooks can use to +-- override the 'BuildInfo's inside packages. One example +-- use-case (which is used in core libraries today) is as +-- a way of passing flags which are computed by a configure +-- script into Cabal. In this case, the autoconf build type adds +-- hooks to read in a textual 'HookedBuildInfo' format prior +-- to doing any operations. +-- +-- Quite honestly, this mechanism is a massive hack since we shouldn't +-- be editing the 'PackageDescription' data structure (it's easy +-- to assume that this data structure shouldn't change and +-- run into bugs, see for example 1c20a6328579af9e37677d507e2e9836ef70ab9d). +-- But it's a bit convenient, because there isn't another data +-- structure that allows adding extra 'BuildInfo' style things. +-- +-- In any case, a lot of care has to be taken to make sure the +-- 'HookedBuildInfo' is applied to the 'PackageDescription'. In +-- general this process occurs in "Distribution.Simple", which is +-- responsible for orchestrating the hooks mechanism. The +-- general strategy: +-- +-- 1. We run the pre-hook, which produces a 'HookedBuildInfo' +-- (e.g., in the Autoconf case, it reads it out from a file). +-- 2. We sanity-check the hooked build info with +-- 'sanityCheckHookedBuildInfo'. +-- 3. We update our 'PackageDescription' (either freshly read +-- or cached from 'LocalBuildInfo') with 'updatePackageDescription'. +-- +-- In principle, we are also supposed to update the copy of +-- the 'PackageDescription' stored in 'LocalBuildInfo' +-- at 'localPkgDescr'. Unfortunately, in practice, there +-- are lots of Custom setup scripts which fail to update +-- 'localPkgDescr' so you really shouldn't rely on it. +-- It's not DEPRECATED because there are legitimate uses +-- for it, but... yeah. Sharp knife. See +-- +-- for more information on the issue. +-- +-- It is not well-specified whether or not a 'HookedBuildInfo' applied +-- at configure time is persistent to the 'LocalBuildInfo'. The +-- fact that 'HookedBuildInfo' is passed to 'confHook' MIGHT SUGGEST +-- that the 'HookedBuildInfo' is applied at this time, but actually +-- since 9317b67e6122ab14e53f81b573bd0ecb388eca5a it has been ONLY used +-- to create a modified package description that we check for problems: +-- it is never actually saved to the LBI. Since 'HookedBuildInfo' is +-- applied monoidally to the existing build infos (and it is not an +-- idempotent monoid), it could break things to save it, since we +-- are obligated to apply any new 'HookedBuildInfo' and then we'd +-- get the effect twice. But this does mean we have to re-apply +-- it every time. Hey, it's more flexibility. +type HookedBuildInfo = (Maybe BuildInfo, [(UnqualComponentName, BuildInfo)]) + +emptyHookedBuildInfo :: HookedBuildInfo +emptyHookedBuildInfo = (Nothing, []) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/IncludeRenaming.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/IncludeRenaming.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/IncludeRenaming.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/IncludeRenaming.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,76 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.IncludeRenaming ( + IncludeRenaming(..), + defaultIncludeRenaming, + isDefaultIncludeRenaming, +) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Types.ModuleRenaming + +import qualified Distribution.Compat.CharParsing as P +import Distribution.Compat.ReadP ((<++)) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text +import Text.PrettyPrint (text, (<+>)) +import qualified Text.PrettyPrint as Disp + +-- --------------------------------------------------------------------------- +-- Module renaming + +-- | A renaming on an include: (provides renaming, requires renaming) +data IncludeRenaming + = IncludeRenaming { + includeProvidesRn :: ModuleRenaming, + includeRequiresRn :: ModuleRenaming + } + deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) + +instance Binary IncludeRenaming + +instance NFData IncludeRenaming where rnf = genericRnf + +-- | The 'defaultIncludeRenaming' applied when you only @build-depends@ +-- on a package. +defaultIncludeRenaming :: IncludeRenaming +defaultIncludeRenaming = IncludeRenaming defaultRenaming defaultRenaming + +-- | Is an 'IncludeRenaming' the default one? +isDefaultIncludeRenaming :: IncludeRenaming -> Bool +isDefaultIncludeRenaming (IncludeRenaming p r) = isDefaultRenaming p && isDefaultRenaming r + +instance Pretty IncludeRenaming where + pretty (IncludeRenaming prov_rn req_rn) = + pretty prov_rn + <+> (if isDefaultRenaming req_rn + then Disp.empty + else text "requires" <+> pretty req_rn) + +instance Parsec IncludeRenaming where + parsec = do + prov_rn <- parsec + req_rn <- P.option defaultRenaming $ P.try $ do + P.spaces + _ <- P.string "requires" + P.spaces + parsec + return (IncludeRenaming prov_rn req_rn) + +instance Text IncludeRenaming where + parse = do + prov_rn <- parse + req_rn <- (Parse.string "requires" >> Parse.skipSpaces >> parse) <++ return defaultRenaming + -- Requirements don't really care if they're mentioned + -- or not (since you can't thin a requirement.) But + -- we have a little hack in Configure to combine + -- the provisions and requirements together before passing + -- them to GHC, and so the most neutral choice for a requirement + -- is for the "with" field to be False, so we correctly + -- thin provisions. + return (IncludeRenaming prov_rn req_rn) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo/FieldGrammar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo/FieldGrammar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo/FieldGrammar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo/FieldGrammar.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,263 @@ +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +module Distribution.Types.InstalledPackageInfo.FieldGrammar ( + ipiFieldGrammar, + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Backpack +import Distribution.Compat.Lens (Lens', (&), (.~)) +import Distribution.Compat.Newtype +import Distribution.FieldGrammar +import Distribution.FieldGrammar.FieldDescrs +import Distribution.License +import Distribution.ModuleName +import Distribution.Package +import Distribution.Parsec.Class +import Distribution.Parsec.Newtypes +import Distribution.Pretty +import Distribution.Text +import Distribution.Types.MungedPackageName +import Distribution.Types.UnqualComponentName +import Distribution.Version + +import qualified Data.Char as Char +import qualified Data.Map as Map +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.SPDX as SPDX +import qualified Text.PrettyPrint as Disp + +import Distribution.Types.InstalledPackageInfo + +import qualified Distribution.Types.InstalledPackageInfo.Lens as L +import qualified Distribution.Types.PackageId.Lens as L + +-- Note: GHC goes nuts and inlines everything, +-- One can see e.g. in -ddump-simpl-stats: +-- +-- 34886 KnownBranch +-- 8197 wild1_ixF0 +-- +-- https://ghc.haskell.org/trac/ghc/ticket/13253 might be the cause. +-- +-- The workaround is to prevent GHC optimising the code: +infixl 4 <+> +(<+>) :: Applicative f => f (a -> b) -> f a -> f b +f <+> x = f <*> x +{-# NOINLINE (<+>) #-} + +ipiFieldGrammar + :: (FieldGrammar g, Applicative (g InstalledPackageInfo), Applicative (g Basic)) + => g InstalledPackageInfo InstalledPackageInfo +ipiFieldGrammar = mkInstalledPackageInfo + -- Deprecated fields + <$> monoidalFieldAla "hugs-options" (alaList' FSep Token) unitedList + ^^^ deprecatedField' "hugs isn't supported anymore" + -- Very basic fields: name, version, package-name and lib-name + <+> blurFieldGrammar basic basicFieldGrammar + -- Basic fields + <+> optionalFieldDef "id" L.installedUnitId (mkUnitId "") + <+> optionalFieldDefAla "instantiated-with" InstWith L.instantiatedWith [] + <+> optionalFieldDefAla "key" CompatPackageKey L.compatPackageKey "" + <+> optionalFieldDefAla "license" SpecLicenseLenient L.license (Left SPDX.NONE) + <+> optionalFieldDefAla "copyright" FreeText L.copyright "" + <+> optionalFieldDefAla "maintainer" FreeText L.maintainer "" + <+> optionalFieldDefAla "author" FreeText L.author "" + <+> optionalFieldDefAla "stability" FreeText L.stability "" + <+> optionalFieldDefAla "homepage" FreeText L.homepage "" + <+> optionalFieldDefAla "package-url" FreeText L.pkgUrl "" + <+> optionalFieldDefAla "synopsis" FreeText L.synopsis "" + <+> optionalFieldDefAla "description" FreeText L.description "" + <+> optionalFieldDefAla "category" FreeText L.category "" + -- Installed fields + <+> optionalFieldDef "abi" L.abiHash (mkAbiHash "") + <+> booleanFieldDef "indefinite" L.indefinite False + <+> booleanFieldDef "exposed" L.exposed False + <+> monoidalFieldAla "exposed-modules" ExposedModules L.exposedModules + <+> monoidalFieldAla "hidden-modules" (alaList' FSep MQuoted) L.hiddenModules + <+> booleanFieldDef "trusted" L.trusted False + <+> monoidalFieldAla "import-dirs" (alaList' FSep FilePathNT) L.importDirs + <+> monoidalFieldAla "library-dirs" (alaList' FSep FilePathNT) L.libraryDirs + <+> monoidalFieldAla "dynamic-library-dirs" (alaList' FSep FilePathNT) L.libraryDynDirs + <+> optionalFieldDefAla "data-dir" FilePathNT L.dataDir "" + <+> monoidalFieldAla "hs-libraries" (alaList' FSep Token) L.hsLibraries + <+> monoidalFieldAla "extra-libraries" (alaList' FSep Token) L.extraLibraries + <+> monoidalFieldAla "extra-ghci-libraries" (alaList' FSep Token) L.extraGHCiLibraries + <+> monoidalFieldAla "include-dirs" (alaList' FSep FilePathNT) L.includeDirs + <+> monoidalFieldAla "includes" (alaList' FSep FilePathNT) L.includes + <+> monoidalFieldAla "depends" (alaList FSep) L.depends + <+> monoidalFieldAla "abi-depends" (alaList FSep) L.abiDepends + <+> monoidalFieldAla "cc-options" (alaList' FSep Token) L.ccOptions + <+> monoidalFieldAla "cxx-options" (alaList' FSep Token) L.cxxOptions + <+> monoidalFieldAla "ld-options" (alaList' FSep Token) L.ldOptions + <+> monoidalFieldAla "framework-dirs" (alaList' FSep FilePathNT) L.frameworkDirs + <+> monoidalFieldAla "frameworks" (alaList' FSep Token) L.frameworks + <+> monoidalFieldAla "haddock-interfaces" (alaList' FSep FilePathNT) L.haddockInterfaces + <+> monoidalFieldAla "haddock-html" (alaList' FSep FilePathNT) L.haddockHTMLs + <+> optionalFieldAla "pkgroot" FilePathNT L.pkgRoot + where + mkInstalledPackageInfo _ Basic {..} = InstalledPackageInfo + -- _basicPkgName is not used + -- setMaybePackageId says it can be no-op. + (PackageIdentifier pn _basicVersion) + (mb_uqn <|> _basicLibName) + (mkComponentId "") -- installedComponentId_, not in use + where + (pn, mb_uqn) = decodeCompatPackageName _basicName +{-# SPECIALIZE ipiFieldGrammar :: FieldDescrs InstalledPackageInfo InstalledPackageInfo #-} +{-# SPECIALIZE ipiFieldGrammar :: ParsecFieldGrammar InstalledPackageInfo InstalledPackageInfo #-} +{-# SPECIALIZE ipiFieldGrammar :: PrettyFieldGrammar InstalledPackageInfo InstalledPackageInfo #-} + +-- (forall b. [b]) ~ () +unitedList :: Lens' a [b] +unitedList f s = s <$ f [] + +------------------------------------------------------------------------------- +-- Helper functions +------------------------------------------------------------------------------- + +-- To maintain backwards-compatibility, we accept both comma/non-comma +-- separated variants of this field. You SHOULD use the comma syntax if you +-- use any new functions, although actually it's unambiguous due to a quirk +-- of the fact that modules must start with capital letters. + +showExposedModules :: [ExposedModule] -> Disp.Doc +showExposedModules xs + | all isExposedModule xs = Disp.fsep (map disp xs) + | otherwise = Disp.fsep (Disp.punctuate Disp.comma (map disp xs)) + where isExposedModule (ExposedModule _ Nothing) = True + isExposedModule _ = False + +-- | Returns @Just@ if the @name@ field of the IPI record would not contain +-- the package name verbatim. This helps us avoid writing @package-name@ +-- when it's redundant. +maybePackageName :: InstalledPackageInfo -> Maybe PackageName +maybePackageName ipi = + case sourceLibName ipi of + Nothing -> Nothing + Just _ -> Just (packageName ipi) + +-- | Setter for the @package-name@ field. It should be acceptable for this +-- to be a no-op. +setMaybePackageName :: Maybe PackageName -> InstalledPackageInfo -> InstalledPackageInfo +setMaybePackageName Nothing ipi = ipi +setMaybePackageName (Just pn) ipi = ipi { + sourcePackageId=(sourcePackageId ipi){pkgName=pn} + } + +setMungedPackageName :: MungedPackageName -> InstalledPackageInfo -> InstalledPackageInfo +setMungedPackageName mpn ipi = + let (pn, mb_uqn) = decodeCompatPackageName mpn + in ipi { + sourcePackageId = (sourcePackageId ipi) {pkgName=pn}, + sourceLibName = mb_uqn + } + +------------------------------------------------------------------------------- +-- Auxiliary types +------------------------------------------------------------------------------- + +newtype ExposedModules = ExposedModules { getExposedModules :: [ExposedModule] } + +instance Newtype ExposedModules [ExposedModule] where + pack = ExposedModules + unpack = getExposedModules + +instance Parsec ExposedModules where + parsec = ExposedModules <$> parsecOptCommaList parsec + +instance Pretty ExposedModules where + pretty = showExposedModules . getExposedModules + + +newtype CompatPackageKey = CompatPackageKey { getCompatPackageKey :: String } + +instance Newtype CompatPackageKey String where + pack = CompatPackageKey + unpack = getCompatPackageKey + +instance Pretty CompatPackageKey where + pretty = Disp.text . getCompatPackageKey + +instance Parsec CompatPackageKey where + parsec = CompatPackageKey <$> P.munch1 uid_char where + uid_char c = Char.isAlphaNum c || c `elem` ("-_.=[],:<>+" :: String) + + +newtype InstWith = InstWith { getInstWith :: [(ModuleName,OpenModule)] } + +instance Newtype InstWith [(ModuleName, OpenModule)] where + pack = InstWith + unpack = getInstWith + +instance Pretty InstWith where + pretty = dispOpenModuleSubst . Map.fromList . getInstWith + +instance Parsec InstWith where + parsec = InstWith . Map.toList <$> parsecOpenModuleSubst + + +-- | SPDX License expression or legacy license. Lenient parser, accepts either. +newtype SpecLicenseLenient = SpecLicenseLenient { getSpecLicenseLenient :: Either SPDX.License License } + +instance Newtype SpecLicenseLenient (Either SPDX.License License) where + pack = SpecLicenseLenient + unpack = getSpecLicenseLenient + +instance Parsec SpecLicenseLenient where + parsec = fmap SpecLicenseLenient $ Left <$> P.try parsec <|> Right <$> parsec + +instance Pretty SpecLicenseLenient where + pretty = either pretty pretty . unpack + + +data Basic = Basic + { _basicName :: MungedPackageName + , _basicVersion :: Version + , _basicPkgName :: Maybe PackageName + , _basicLibName :: Maybe UnqualComponentName + } + +basic :: Lens' InstalledPackageInfo Basic +basic f ipi = g <$> f b + where + b = Basic + (mungedPackageName ipi) + (packageVersion ipi) + (maybePackageName ipi) + (sourceLibName ipi) + + g (Basic n v pn ln) = ipi + & setMungedPackageName n + & L.sourcePackageId . L.pkgVersion .~ v + & setMaybePackageName pn + & L.sourceLibName .~ ln + +basicName :: Lens' Basic MungedPackageName +basicName f b = (\x -> b { _basicName = x }) <$> f (_basicName b) +{-# INLINE basicName #-} + +basicVersion :: Lens' Basic Version +basicVersion f b = (\x -> b { _basicVersion = x }) <$> f (_basicVersion b) +{-# INLINE basicVersion #-} + +basicPkgName :: Lens' Basic (Maybe PackageName) +basicPkgName f b = (\x -> b { _basicPkgName = x }) <$> f (_basicPkgName b) +{-# INLINE basicPkgName #-} + +basicLibName :: Lens' Basic (Maybe UnqualComponentName) +basicLibName f b = (\x -> b { _basicLibName = x }) <$> f (_basicLibName b) +{-# INLINE basicLibName #-} + +basicFieldGrammar + :: (FieldGrammar g, Applicative (g Basic)) + => g Basic Basic +basicFieldGrammar = Basic + <$> optionalFieldDefAla "name" MQuoted basicName (mungedPackageName emptyInstalledPackageInfo) + <*> optionalFieldDefAla "version" MQuoted basicVersion nullVersion + <*> optionalField "package-name" basicPkgName + <*> optionalField "lib-name" basicLibName diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,183 @@ +module Distribution.Types.InstalledPackageInfo.Lens ( + InstalledPackageInfo, + module Distribution.Types.InstalledPackageInfo.Lens + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Backpack (OpenModule) +import Distribution.License (License) +import Distribution.ModuleName (ModuleName) +import Distribution.Package (AbiHash, ComponentId, PackageIdentifier, UnitId) +import Distribution.Types.InstalledPackageInfo (AbiDependency, ExposedModule, InstalledPackageInfo) +import Distribution.Types.UnqualComponentName (UnqualComponentName) + +import qualified Distribution.SPDX as SPDX +import qualified Distribution.Types.InstalledPackageInfo as T + +sourcePackageId :: Lens' InstalledPackageInfo PackageIdentifier +sourcePackageId f s = fmap (\x -> s { T.sourcePackageId = x }) (f (T.sourcePackageId s)) +{-# INLINE sourcePackageId #-} + +installedUnitId :: Lens' InstalledPackageInfo UnitId +installedUnitId f s = fmap (\x -> s { T.installedUnitId = x }) (f (T.installedUnitId s)) +{-# INLINE installedUnitId #-} + +installedComponentId_ :: Lens' InstalledPackageInfo ComponentId +installedComponentId_ f s = fmap (\x -> s { T.installedComponentId_ = x }) (f (T.installedComponentId_ s)) +{-# INLINE installedComponentId_ #-} + +instantiatedWith :: Lens' InstalledPackageInfo [(ModuleName,OpenModule)] +instantiatedWith f s = fmap (\x -> s { T.instantiatedWith = x }) (f (T.instantiatedWith s)) +{-# INLINE instantiatedWith #-} + +sourceLibName :: Lens' InstalledPackageInfo (Maybe UnqualComponentName) +sourceLibName f s = fmap (\x -> s { T.sourceLibName = x }) (f (T.sourceLibName s)) +{-# INLINE sourceLibName #-} + +compatPackageKey :: Lens' InstalledPackageInfo String +compatPackageKey f s = fmap (\x -> s { T.compatPackageKey = x }) (f (T.compatPackageKey s)) +{-# INLINE compatPackageKey #-} + +license :: Lens' InstalledPackageInfo (Either SPDX.License License) +license f s = fmap (\x -> s { T.license = x }) (f (T.license s)) +{-# INLINE license #-} + +copyright :: Lens' InstalledPackageInfo String +copyright f s = fmap (\x -> s { T.copyright = x }) (f (T.copyright s)) +{-# INLINE copyright #-} + +maintainer :: Lens' InstalledPackageInfo String +maintainer f s = fmap (\x -> s { T.maintainer = x }) (f (T.maintainer s)) +{-# INLINE maintainer #-} + +author :: Lens' InstalledPackageInfo String +author f s = fmap (\x -> s { T.author = x }) (f (T.author s)) +{-# INLINE author #-} + +stability :: Lens' InstalledPackageInfo String +stability f s = fmap (\x -> s { T.stability = x }) (f (T.stability s)) +{-# INLINE stability #-} + +homepage :: Lens' InstalledPackageInfo String +homepage f s = fmap (\x -> s { T.homepage = x }) (f (T.homepage s)) +{-# INLINE homepage #-} + +pkgUrl :: Lens' InstalledPackageInfo String +pkgUrl f s = fmap (\x -> s { T.pkgUrl = x }) (f (T.pkgUrl s)) +{-# INLINE pkgUrl #-} + +synopsis :: Lens' InstalledPackageInfo String +synopsis f s = fmap (\x -> s { T.synopsis = x }) (f (T.synopsis s)) +{-# INLINE synopsis #-} + +description :: Lens' InstalledPackageInfo String +description f s = fmap (\x -> s { T.description = x }) (f (T.description s)) +{-# INLINE description #-} + +category :: Lens' InstalledPackageInfo String +category f s = fmap (\x -> s { T.category = x }) (f (T.category s)) +{-# INLINE category #-} + +abiHash :: Lens' InstalledPackageInfo AbiHash +abiHash f s = fmap (\x -> s { T.abiHash = x }) (f (T.abiHash s)) +{-# INLINE abiHash #-} + +indefinite :: Lens' InstalledPackageInfo Bool +indefinite f s = fmap (\x -> s { T.indefinite = x }) (f (T.indefinite s)) +{-# INLINE indefinite #-} + +exposed :: Lens' InstalledPackageInfo Bool +exposed f s = fmap (\x -> s { T.exposed = x }) (f (T.exposed s)) +{-# INLINE exposed #-} + +exposedModules :: Lens' InstalledPackageInfo [ExposedModule] +exposedModules f s = fmap (\x -> s { T.exposedModules = x }) (f (T.exposedModules s)) +{-# INLINE exposedModules #-} + +hiddenModules :: Lens' InstalledPackageInfo [ModuleName] +hiddenModules f s = fmap (\x -> s { T.hiddenModules = x }) (f (T.hiddenModules s)) +{-# INLINE hiddenModules #-} + +trusted :: Lens' InstalledPackageInfo Bool +trusted f s = fmap (\x -> s { T.trusted = x }) (f (T.trusted s)) +{-# INLINE trusted #-} + +importDirs :: Lens' InstalledPackageInfo [FilePath] +importDirs f s = fmap (\x -> s { T.importDirs = x }) (f (T.importDirs s)) +{-# INLINE importDirs #-} + +libraryDirs :: Lens' InstalledPackageInfo [FilePath] +libraryDirs f s = fmap (\x -> s { T.libraryDirs = x }) (f (T.libraryDirs s)) +{-# INLINE libraryDirs #-} + +libraryDynDirs :: Lens' InstalledPackageInfo [FilePath] +libraryDynDirs f s = fmap (\x -> s { T.libraryDynDirs = x }) (f (T.libraryDynDirs s)) +{-# INLINE libraryDynDirs #-} + +dataDir :: Lens' InstalledPackageInfo FilePath +dataDir f s = fmap (\x -> s { T.dataDir = x }) (f (T.dataDir s)) +{-# INLINE dataDir #-} + +hsLibraries :: Lens' InstalledPackageInfo [String] +hsLibraries f s = fmap (\x -> s { T.hsLibraries = x }) (f (T.hsLibraries s)) +{-# INLINE hsLibraries #-} + +extraLibraries :: Lens' InstalledPackageInfo [String] +extraLibraries f s = fmap (\x -> s { T.extraLibraries = x }) (f (T.extraLibraries s)) +{-# INLINE extraLibraries #-} + +extraGHCiLibraries :: Lens' InstalledPackageInfo [String] +extraGHCiLibraries f s = fmap (\x -> s { T.extraGHCiLibraries = x }) (f (T.extraGHCiLibraries s)) +{-# INLINE extraGHCiLibraries #-} + +includeDirs :: Lens' InstalledPackageInfo [FilePath] +includeDirs f s = fmap (\x -> s { T.includeDirs = x }) (f (T.includeDirs s)) +{-# INLINE includeDirs #-} + +includes :: Lens' InstalledPackageInfo [String] +includes f s = fmap (\x -> s { T.includes = x }) (f (T.includes s)) +{-# INLINE includes #-} + +depends :: Lens' InstalledPackageInfo [UnitId] +depends f s = fmap (\x -> s { T.depends = x }) (f (T.depends s)) +{-# INLINE depends #-} + +abiDepends :: Lens' InstalledPackageInfo [AbiDependency] +abiDepends f s = fmap (\x -> s { T.abiDepends = x }) (f (T.abiDepends s)) +{-# INLINE abiDepends #-} + +ccOptions :: Lens' InstalledPackageInfo [String] +ccOptions f s = fmap (\x -> s { T.ccOptions = x }) (f (T.ccOptions s)) +{-# INLINE ccOptions #-} + +cxxOptions :: Lens' InstalledPackageInfo [String] +cxxOptions f s = fmap (\x -> s { T.cxxOptions = x }) (f (T.cxxOptions s)) +{-# INLINE cxxOptions #-} + +ldOptions :: Lens' InstalledPackageInfo [String] +ldOptions f s = fmap (\x -> s { T.ldOptions = x }) (f (T.ldOptions s)) +{-# INLINE ldOptions #-} + +frameworkDirs :: Lens' InstalledPackageInfo [FilePath] +frameworkDirs f s = fmap (\x -> s { T.frameworkDirs = x }) (f (T.frameworkDirs s)) +{-# INLINE frameworkDirs #-} + +frameworks :: Lens' InstalledPackageInfo [String] +frameworks f s = fmap (\x -> s { T.frameworks = x }) (f (T.frameworks s)) +{-# INLINE frameworks #-} + +haddockInterfaces :: Lens' InstalledPackageInfo [FilePath] +haddockInterfaces f s = fmap (\x -> s { T.haddockInterfaces = x }) (f (T.haddockInterfaces s)) +{-# INLINE haddockInterfaces #-} + +haddockHTMLs :: Lens' InstalledPackageInfo [FilePath] +haddockHTMLs f s = fmap (\x -> s { T.haddockHTMLs = x }) (f (T.haddockHTMLs s)) +{-# INLINE haddockHTMLs #-} + +pkgRoot :: Lens' InstalledPackageInfo (Maybe FilePath) +pkgRoot f s = fmap (\x -> s { T.pkgRoot = x }) (f (T.pkgRoot s)) +{-# INLINE pkgRoot #-} + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/InstalledPackageInfo.hs 2018-11-26 08:42:50.000000000 +0000 @@ -0,0 +1,170 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE TypeFamilies #-} +module Distribution.Types.InstalledPackageInfo ( + InstalledPackageInfo (..), + emptyInstalledPackageInfo, + mungedPackageId, + mungedPackageName, + AbiDependency (..), + ExposedModule (..), + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Backpack +import Distribution.Compat.Graph (IsNode (..)) +import Distribution.License +import Distribution.ModuleName +import Distribution.Package hiding (installedUnitId) +import Distribution.Types.AbiDependency +import Distribution.Types.ExposedModule +import Distribution.Types.MungedPackageId +import Distribution.Types.MungedPackageName +import Distribution.Types.UnqualComponentName +import Distribution.Version (nullVersion) + +import qualified Distribution.Package as Package +import qualified Distribution.SPDX as SPDX + +-- ----------------------------------------------------------------------------- +-- The InstalledPackageInfo type + +-- For BC reasons, we continue to name this record an InstalledPackageInfo; +-- but it would more accurately be called an InstalledUnitInfo with Backpack +data InstalledPackageInfo + = InstalledPackageInfo { + -- these parts (sourcePackageId, installedUnitId) are + -- exactly the same as PackageDescription + sourcePackageId :: PackageId, + sourceLibName :: Maybe UnqualComponentName, + installedComponentId_ :: ComponentId, + installedUnitId :: UnitId, + -- INVARIANT: if this package is definite, OpenModule's + -- OpenUnitId directly records UnitId. If it is + -- indefinite, OpenModule is always an OpenModuleVar + -- with the same ModuleName as the key. + instantiatedWith :: [(ModuleName, OpenModule)], + compatPackageKey :: String, + license :: Either SPDX.License License, + copyright :: String, + maintainer :: String, + author :: String, + stability :: String, + homepage :: String, + pkgUrl :: String, + synopsis :: String, + description :: String, + category :: String, + -- these parts are required by an installed package only: + abiHash :: AbiHash, + indefinite :: Bool, + exposed :: Bool, + -- INVARIANT: if the package is definite, OpenModule's + -- OpenUnitId directly records UnitId. + exposedModules :: [ExposedModule], + hiddenModules :: [ModuleName], + trusted :: Bool, + importDirs :: [FilePath], + libraryDirs :: [FilePath], + libraryDynDirs :: [FilePath], -- ^ overrides 'libraryDirs' + dataDir :: FilePath, + hsLibraries :: [String], + extraLibraries :: [String], + extraGHCiLibraries:: [String], -- overrides extraLibraries for GHCi + includeDirs :: [FilePath], + includes :: [String], + -- INVARIANT: if the package is definite, UnitId is NOT + -- a ComponentId of an indefinite package + depends :: [UnitId], + abiDepends :: [AbiDependency], + ccOptions :: [String], + cxxOptions :: [String], + ldOptions :: [String], + frameworkDirs :: [FilePath], + frameworks :: [String], + haddockInterfaces :: [FilePath], + haddockHTMLs :: [FilePath], + pkgRoot :: Maybe FilePath + } + deriving (Eq, Generic, Typeable, Read, Show) + +instance Binary InstalledPackageInfo + +instance NFData InstalledPackageInfo where rnf = genericRnf + +instance Package.HasMungedPackageId InstalledPackageInfo where + mungedId = mungedPackageId + +instance Package.Package InstalledPackageInfo where + packageId = sourcePackageId + +instance Package.HasUnitId InstalledPackageInfo where + installedUnitId = installedUnitId + +instance Package.PackageInstalled InstalledPackageInfo where + installedDepends = depends + +instance IsNode InstalledPackageInfo where + type Key InstalledPackageInfo = UnitId + nodeKey = installedUnitId + nodeNeighbors = depends + +mungedPackageId :: InstalledPackageInfo -> MungedPackageId +mungedPackageId ipi = + MungedPackageId (mungedPackageName ipi) (packageVersion ipi) + +-- | Returns the munged package name, which we write into @name@ for +-- compatibility with old versions of GHC. +mungedPackageName :: InstalledPackageInfo -> MungedPackageName +mungedPackageName ipi = + computeCompatPackageName + (packageName ipi) + (sourceLibName ipi) + +emptyInstalledPackageInfo :: InstalledPackageInfo +emptyInstalledPackageInfo + = InstalledPackageInfo { + sourcePackageId = PackageIdentifier (mkPackageName "") nullVersion, + sourceLibName = Nothing, + installedComponentId_ = mkComponentId "", + installedUnitId = mkUnitId "", + instantiatedWith = [], + compatPackageKey = "", + license = Left SPDX.NONE, + copyright = "", + maintainer = "", + author = "", + stability = "", + homepage = "", + pkgUrl = "", + synopsis = "", + description = "", + category = "", + abiHash = mkAbiHash "", + indefinite = False, + exposed = False, + exposedModules = [], + hiddenModules = [], + trusted = False, + importDirs = [], + libraryDirs = [], + libraryDynDirs = [], + dataDir = "", + hsLibraries = [], + extraLibraries = [], + extraGHCiLibraries= [], + includeDirs = [], + includes = [], + depends = [], + abiDepends = [], + ccOptions = [], + cxxOptions = [], + ldOptions = [], + frameworkDirs = [], + frameworks = [], + haddockInterfaces = [], + haddockHTMLs = [], + pkgRoot = Nothing + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/LegacyExeDependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/LegacyExeDependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/LegacyExeDependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/LegacyExeDependency.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,65 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.LegacyExeDependency + ( LegacyExeDependency(..) + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Parsec.Class +import Distribution.ParseUtils (parseMaybeQuoted) +import Distribution.Pretty +import Distribution.Text +import Distribution.Version (VersionRange, anyVersion) + +import qualified Distribution.Compat.CharParsing as P +import Distribution.Compat.ReadP ((<++)) +import qualified Distribution.Compat.ReadP as Parse +import Text.PrettyPrint (text, (<+>)) + +-- | Describes a legacy `build-tools`-style dependency on an executable +-- +-- It is "legacy" because we do not know what the build-tool referred to. It +-- could refer to a pkg-config executable (PkgconfigName), or an internal +-- executable (UnqualComponentName). Thus the name is stringly typed. +-- +-- @since 2.0.0.2 +data LegacyExeDependency = LegacyExeDependency + String + VersionRange + deriving (Generic, Read, Show, Eq, Typeable, Data) + +instance Binary LegacyExeDependency +instance NFData LegacyExeDependency where rnf = genericRnf + +instance Pretty LegacyExeDependency where + pretty (LegacyExeDependency name ver) = + text name <+> pretty ver + +instance Parsec LegacyExeDependency where + parsec = do + name <- parsecMaybeQuoted nameP + P.spaces + verRange <- parsecMaybeQuoted parsec <|> pure anyVersion + pure $ LegacyExeDependency name verRange + where + nameP = intercalate "-" <$> P.sepBy1 component (P.char '-') + component = do + cs <- P.munch1 (\c -> isAlphaNum c || c == '+' || c == '_') + if all isDigit cs then fail "invalid component" else return cs + +instance Text LegacyExeDependency where + parse = do name <- parseMaybeQuoted parseBuildToolName + Parse.skipSpaces + ver <- parse <++ return anyVersion + Parse.skipSpaces + return $ LegacyExeDependency name ver + where + -- like parsePackageName but accepts symbols in components + parseBuildToolName :: Parse.ReadP r String + parseBuildToolName = do ns <- Parse.sepBy1 component (Parse.char '-') + return (intercalate "-" ns) + where component = do + cs <- Parse.munch1 (\c -> isAlphaNum c || c == '+' || c == '_') + if all isDigit cs then Parse.pfail else return cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,25 @@ +module Distribution.Types.Lens ( + module Distribution.Types.Benchmark.Lens, + module Distribution.Types.BuildInfo.Lens, + module Distribution.Types.Executable.Lens, + module Distribution.Types.ForeignLib.Lens, + module Distribution.Types.GenericPackageDescription.Lens, + module Distribution.Types.Library.Lens, + module Distribution.Types.PackageDescription.Lens, + module Distribution.Types.PackageId.Lens, + module Distribution.Types.SetupBuildInfo.Lens, + module Distribution.Types.SourceRepo.Lens, + module Distribution.Types.TestSuite.Lens, + ) where + +import Distribution.Types.Benchmark.Lens +import Distribution.Types.BuildInfo.Lens +import Distribution.Types.Executable.Lens +import Distribution.Types.ForeignLib.Lens +import Distribution.Types.GenericPackageDescription.Lens +import Distribution.Types.Library.Lens +import Distribution.Types.PackageDescription.Lens +import Distribution.Types.PackageId.Lens +import Distribution.Types.SetupBuildInfo.Lens +import Distribution.Types.SourceRepo.Lens +import Distribution.Types.TestSuite.Lens diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Library/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Library/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Library/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Library/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,40 @@ +module Distribution.Types.Library.Lens ( + Library, + module Distribution.Types.Library.Lens, + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Compat.Lens + +import Distribution.ModuleName (ModuleName) +import Distribution.Types.BuildInfo (BuildInfo) +import Distribution.Types.Library (Library) +import Distribution.Types.ModuleReexport (ModuleReexport) +import Distribution.Types.UnqualComponentName (UnqualComponentName) + +import qualified Distribution.Types.Library as T + +libName :: Lens' Library (Maybe UnqualComponentName) +libName f s = fmap (\x -> s { T.libName = x }) (f (T.libName s)) +{-# INLINE libName #-} + +exposedModules :: Lens' Library [ModuleName] +exposedModules f s = fmap (\x -> s { T.exposedModules = x }) (f (T.exposedModules s)) +{-# INLINE exposedModules #-} + +reexportedModules :: Lens' Library [ModuleReexport] +reexportedModules f s = fmap (\x -> s { T.reexportedModules = x }) (f (T.reexportedModules s)) +{-# INLINE reexportedModules #-} + +signatures :: Lens' Library [ModuleName] +signatures f s = fmap (\x -> s { T.signatures = x }) (f (T.signatures s)) +{-# INLINE signatures #-} + +libExposed :: Lens' Library Bool +libExposed f s = fmap (\x -> s { T.libExposed = x }) (f (T.libExposed s)) +{-# INLINE libExposed #-} + +libBuildInfo :: Lens' Library BuildInfo +libBuildInfo f s = fmap (\x -> s { T.libBuildInfo = x }) (f (T.libBuildInfo s)) +{-# INLINE libBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Library.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Library.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Library.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Library.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,87 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.Library ( + Library(..), + emptyLibrary, + explicitLibModules, + libModulesAutogen, + libModules, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.BuildInfo +import Distribution.Types.ModuleReexport +import Distribution.Types.UnqualComponentName +import Distribution.ModuleName + +import qualified Distribution.Types.BuildInfo.Lens as L + +data Library = Library + { libName :: Maybe UnqualComponentName + , exposedModules :: [ModuleName] + , reexportedModules :: [ModuleReexport] + , signatures :: [ModuleName] -- ^ What sigs need implementations? + , libExposed :: Bool -- ^ Is the lib to be exposed by default? + , libBuildInfo :: BuildInfo + } + deriving (Generic, Show, Eq, Read, Typeable, Data) + +instance L.HasBuildInfo Library where + buildInfo f l = (\x -> l { libBuildInfo = x }) <$> f (libBuildInfo l) + +instance Binary Library + +instance NFData Library where rnf = genericRnf + +instance Monoid Library where + mempty = Library { + libName = mempty, + exposedModules = mempty, + reexportedModules = mempty, + signatures = mempty, + libExposed = True, + libBuildInfo = mempty + } + mappend = (<>) + +instance Semigroup Library where + a <> b = Library { + libName = combine libName, + exposedModules = combine exposedModules, + reexportedModules = combine reexportedModules, + signatures = combine signatures, + libExposed = libExposed a && libExposed b, -- so False propagates + libBuildInfo = combine libBuildInfo + } + where combine field = field a `mappend` field b + +emptyLibrary :: Library +emptyLibrary = mempty + +-- | Get all the module names from the library (exposed and internal modules) +-- which are explicitly listed in the package description which would +-- need to be compiled. (This does not include reexports, which +-- do not need to be compiled.) This may not include all modules for which +-- GHC generated interface files (i.e., implicit modules.) +explicitLibModules :: Library -> [ModuleName] +explicitLibModules lib = exposedModules lib + ++ otherModules (libBuildInfo lib) + ++ signatures lib + +-- | Get all the auto generated module names from the library, exposed or not. +-- This are a subset of 'libModules'. +libModulesAutogen :: Library -> [ModuleName] +libModulesAutogen lib = autogenModules (libBuildInfo lib) + +-- | Backwards-compatibility shim for 'explicitLibModules'. In most cases, +-- you actually want 'allLibModules', which returns all modules that will +-- actually be compiled, as opposed to those which are explicitly listed +-- in the package description ('explicitLibModules'); unfortunately, the +-- type signature for 'allLibModules' is incompatible since we need a +-- 'ComponentLocalBuildInfo'. +{-# DEPRECATED libModules "If you want all modules that are built with a library, use 'allLibModules'. Otherwise, use 'explicitLibModules' for ONLY the modules explicitly mentioned in the package description. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +libModules :: Library -> [ModuleName] +libModules = explicitLibModules diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/LocalBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/LocalBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/LocalBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/LocalBuildInfo.hs 2018-11-26 08:42:52.000000000 +0000 @@ -0,0 +1,336 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +module Distribution.Types.LocalBuildInfo ( + -- * The type + + LocalBuildInfo(..), + + -- * Convenience accessors + + localComponentId, + localUnitId, + localCompatPackageKey, + localPackage, + + -- * Build targets of the 'LocalBuildInfo'. + + componentNameCLBIs, + + -- NB: the primes mean that they take a 'PackageDescription' + -- which may not match 'localPkgDescr' in 'LocalBuildInfo'. + -- More logical types would drop this argument, but + -- at the moment, this is the ONLY supported function, because + -- 'localPkgDescr' is not guaranteed to match. At some point + -- we will fix it and then we can use the (free) unprimed + -- namespace for the correct commands. + -- + -- See https://github.com/haskell/cabal/issues/3606 for more + -- details. + + componentNameTargets', + unitIdTarget', + allTargetsInBuildOrder', + withAllTargetsInBuildOrder', + neededTargetsInBuildOrder', + withNeededTargetsInBuildOrder', + testCoverage, + + -- * Functions you SHOULD NOT USE (yet), but are defined here to + -- prevent someone from accidentally defining them + + componentNameTargets, + unitIdTarget, + allTargetsInBuildOrder, + withAllTargetsInBuildOrder, + neededTargetsInBuildOrder, + withNeededTargetsInBuildOrder, + + -- * Backwards compatibility. + + componentsConfigs, + externalPackageDeps, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.PackageDescription +import Distribution.Types.ComponentLocalBuildInfo +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.ComponentId +import Distribution.Types.MungedPackageId +import Distribution.Types.PackageId +import Distribution.Types.UnitId +import Distribution.Types.TargetInfo + +import Distribution.Simple.InstallDirs hiding (absoluteInstallDirs, + prefixRelativeInstallDirs, + substPathTemplate, ) +import Distribution.Simple.Program +import Distribution.PackageDescription +import Distribution.Simple.Compiler +import Distribution.Simple.PackageIndex +import Distribution.Simple.Setup +import Distribution.Text +import Distribution.System + +import Distribution.Compat.Graph (Graph) +import qualified Distribution.Compat.Graph as Graph +import qualified Data.Map as Map + +-- | Data cached after configuration step. See also +-- 'Distribution.Simple.Setup.ConfigFlags'. +data LocalBuildInfo = LocalBuildInfo { + configFlags :: ConfigFlags, + -- ^ Options passed to the configuration step. + -- Needed to re-run configuration when .cabal is out of date + flagAssignment :: FlagAssignment, + -- ^ The final set of flags which were picked for this package + componentEnabledSpec :: ComponentRequestedSpec, + -- ^ What components were enabled during configuration, and why. + extraConfigArgs :: [String], + -- ^ Extra args on the command line for the configuration step. + -- Needed to re-run configuration when .cabal is out of date + installDirTemplates :: InstallDirTemplates, + -- ^ The installation directories for the various different + -- kinds of files + --TODO: inplaceDirTemplates :: InstallDirs FilePath + compiler :: Compiler, + -- ^ The compiler we're building with + hostPlatform :: Platform, + -- ^ The platform we're building for + buildDir :: FilePath, + -- ^ Where to build the package. + cabalFilePath :: Maybe FilePath, + -- ^ Path to the cabal file, if given during configuration. + componentGraph :: Graph ComponentLocalBuildInfo, + -- ^ All the components to build, ordered by topological + -- sort, and with their INTERNAL dependencies over the + -- intrapackage dependency graph. + -- TODO: this is assumed to be short; otherwise we want + -- some sort of ordered map. + componentNameMap :: Map ComponentName [ComponentLocalBuildInfo], + -- ^ A map from component name to all matching + -- components. These coincide with 'componentGraph' + installedPkgs :: InstalledPackageIndex, + -- ^ All the info about the installed packages that the + -- current package depends on (directly or indirectly). + -- The copy saved on disk does NOT include internal + -- dependencies (because we just don't have enough + -- information at this point to have an + -- 'InstalledPackageInfo' for an internal dep), but we + -- will often update it with the internal dependencies; + -- see for example 'Distribution.Simple.Build.build'. + -- (This admonition doesn't apply for per-component builds.) + pkgDescrFile :: Maybe FilePath, + -- ^ the filename containing the .cabal file, if available + localPkgDescr :: PackageDescription, + -- ^ WARNING WARNING WARNING Be VERY careful about using + -- this function; we haven't deprecated it but using it + -- could introduce subtle bugs related to + -- 'HookedBuildInfo'. + -- + -- In principle, this is supposed to contain the + -- resolved package description, that does not contain + -- any conditionals. However, it MAY NOT contain + -- the description wtih a 'HookedBuildInfo' applied + -- to it; see 'HookedBuildInfo' for the whole sordid saga. + -- As much as possible, Cabal library should avoid using + -- this parameter. + withPrograms :: ProgramDb, -- ^Location and args for all programs + withPackageDB :: PackageDBStack, -- ^What package database to use, global\/user + withVanillaLib:: Bool, -- ^Whether to build normal libs. + withProfLib :: Bool, -- ^Whether to build profiling versions of libs. + withSharedLib :: Bool, -- ^Whether to build shared versions of libs. + withStaticLib :: Bool, -- ^Whether to build static versions of libs (with all other libs rolled in) + withDynExe :: Bool, -- ^Whether to link executables dynamically + withProfExe :: Bool, -- ^Whether to build executables for profiling. + withProfLibDetail :: ProfDetailLevel, -- ^Level of automatic profile detail. + withProfExeDetail :: ProfDetailLevel, -- ^Level of automatic profile detail. + withOptimization :: OptimisationLevel, -- ^Whether to build with optimization (if available). + withDebugInfo :: DebugInfoLevel, -- ^Whether to emit debug info (if available). + withGHCiLib :: Bool, -- ^Whether to build libs suitable for use with GHCi. + splitSections :: Bool, -- ^Use -split-sections with GHC, if available + splitObjs :: Bool, -- ^Use -split-objs with GHC, if available + stripExes :: Bool, -- ^Whether to strip executables during install + stripLibs :: Bool, -- ^Whether to strip libraries during install + exeCoverage :: Bool, -- ^Whether to enable executable program coverage + libCoverage :: Bool, -- ^Whether to enable library program coverage + progPrefix :: PathTemplate, -- ^Prefix to be prepended to installed executables + progSuffix :: PathTemplate, -- ^Suffix to be appended to installed executables + relocatable :: Bool -- ^Whether to build a relocatable package + } deriving (Generic, Read, Show) + +instance Binary LocalBuildInfo + +------------------------------------------------------------------------------- +-- Accessor functions + +-- TODO: Get rid of these functions, as much as possible. They are +-- a bit useful in some cases, but you should be very careful! + +-- | Extract the 'ComponentId' from the public library component of a +-- 'LocalBuildInfo' if it exists, or make a fake component ID based +-- on the package ID. +localComponentId :: LocalBuildInfo -> ComponentId +localComponentId lbi = + case componentNameCLBIs lbi CLibName of + [LibComponentLocalBuildInfo { componentComponentId = cid }] + -> cid + _ -> mkComponentId (display (localPackage lbi)) + +-- | Extract the 'PackageIdentifier' of a 'LocalBuildInfo'. +-- This is a "safe" use of 'localPkgDescr' +localPackage :: LocalBuildInfo -> PackageId +localPackage lbi = package (localPkgDescr lbi) + +-- | Extract the 'UnitId' from the library component of a +-- 'LocalBuildInfo' if it exists, or make a fake unit ID based on +-- the package ID. +localUnitId :: LocalBuildInfo -> UnitId +localUnitId lbi = + case componentNameCLBIs lbi CLibName of + [LibComponentLocalBuildInfo { componentUnitId = uid }] + -> uid + _ -> mkLegacyUnitId $ localPackage lbi + +-- | Extract the compatibility package key from the public library component of a +-- 'LocalBuildInfo' if it exists, or make a fake package key based +-- on the package ID. +localCompatPackageKey :: LocalBuildInfo -> String +localCompatPackageKey lbi = + case componentNameCLBIs lbi CLibName of + [LibComponentLocalBuildInfo { componentCompatPackageKey = pk }] + -> pk + _ -> display (localPackage lbi) + +-- | Convenience function to generate a default 'TargetInfo' from a +-- 'ComponentLocalBuildInfo'. The idea is to call this once, and then +-- use 'TargetInfo' everywhere else. Private to this module. +mkTargetInfo :: PackageDescription -> LocalBuildInfo -> ComponentLocalBuildInfo -> TargetInfo +mkTargetInfo pkg_descr _lbi clbi = + TargetInfo { + targetCLBI = clbi, + -- NB: @pkg_descr@, not @localPkgDescr lbi@! + targetComponent = getComponent pkg_descr + (componentLocalName clbi) + } + +-- | Return all 'TargetInfo's associated with 'ComponentName'. +-- In the presence of Backpack there may be more than one! +-- Has a prime because it takes a 'PackageDescription' argument +-- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. +componentNameTargets' :: PackageDescription -> LocalBuildInfo -> ComponentName -> [TargetInfo] +componentNameTargets' pkg_descr lbi cname = + case Map.lookup cname (componentNameMap lbi) of + Just clbis -> map (mkTargetInfo pkg_descr lbi) clbis + Nothing -> [] + +unitIdTarget' :: PackageDescription -> LocalBuildInfo -> UnitId -> Maybe TargetInfo +unitIdTarget' pkg_descr lbi uid = + case Graph.lookup uid (componentGraph lbi) of + Just clbi -> Just (mkTargetInfo pkg_descr lbi clbi) + Nothing -> Nothing + +-- | Return all 'ComponentLocalBuildInfo's associated with 'ComponentName'. +-- In the presence of Backpack there may be more than one! +componentNameCLBIs :: LocalBuildInfo -> ComponentName -> [ComponentLocalBuildInfo] +componentNameCLBIs lbi cname = + case Map.lookup cname (componentNameMap lbi) of + Just clbis -> clbis + Nothing -> [] + +-- TODO: Maybe cache topsort (Graph can do this) + +-- | Return the list of default 'TargetInfo's associated with a +-- configured package, in the order they need to be built. +-- Has a prime because it takes a 'PackageDescription' argument +-- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. +allTargetsInBuildOrder' :: PackageDescription -> LocalBuildInfo -> [TargetInfo] +allTargetsInBuildOrder' pkg_descr lbi + = map (mkTargetInfo pkg_descr lbi) (Graph.revTopSort (componentGraph lbi)) + +-- | Execute @f@ for every 'TargetInfo' in the package, respecting the +-- build dependency order. (TODO: We should use Shake!) +-- Has a prime because it takes a 'PackageDescription' argument +-- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. +withAllTargetsInBuildOrder' :: PackageDescription -> LocalBuildInfo -> (TargetInfo -> IO ()) -> IO () +withAllTargetsInBuildOrder' pkg_descr lbi f + = sequence_ [ f target | target <- allTargetsInBuildOrder' pkg_descr lbi ] + +-- | Return the list of all targets needed to build the @uids@, in +-- the order they need to be built. +-- Has a prime because it takes a 'PackageDescription' argument +-- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. +neededTargetsInBuildOrder' :: PackageDescription -> LocalBuildInfo -> [UnitId] -> [TargetInfo] +neededTargetsInBuildOrder' pkg_descr lbi uids = + case Graph.closure (componentGraph lbi) uids of + Nothing -> error $ "localBuildPlan: missing uids " ++ intercalate ", " (map display uids) + Just clos -> map (mkTargetInfo pkg_descr lbi) (Graph.revTopSort (Graph.fromDistinctList clos)) + +-- | Execute @f@ for every 'TargetInfo' needed to build @uid@s, respecting +-- the build dependency order. +-- Has a prime because it takes a 'PackageDescription' argument +-- which may disagree with 'localPkgDescr' in 'LocalBuildInfo'. +withNeededTargetsInBuildOrder' :: PackageDescription -> LocalBuildInfo -> [UnitId] -> (TargetInfo -> IO ()) -> IO () +withNeededTargetsInBuildOrder' pkg_descr lbi uids f + = sequence_ [ f target | target <- neededTargetsInBuildOrder' pkg_descr lbi uids ] + +-- | Is coverage enabled for test suites? In practice, this requires library +-- and executable profiling to be enabled. +testCoverage :: LocalBuildInfo -> Bool +testCoverage lbi = exeCoverage lbi && libCoverage lbi + +------------------------------------------------------------------------------- +-- Stub functions to prevent someone from accidentally defining them + +{-# WARNING componentNameTargets, unitIdTarget, allTargetsInBuildOrder, withAllTargetsInBuildOrder, neededTargetsInBuildOrder, withNeededTargetsInBuildOrder "By using this function, you may be introducing a bug where you retrieve a 'Component' which does not have 'HookedBuildInfo' applied to it. See the documentation for 'HookedBuildInfo' for an explanation of the issue. If you have a 'PakcageDescription' handy (NOT from the 'LocalBuildInfo'), try using the primed version of the function, which takes it as an extra argument." #-} + +componentNameTargets :: LocalBuildInfo -> ComponentName -> [TargetInfo] +componentNameTargets lbi = componentNameTargets' (localPkgDescr lbi) lbi + +unitIdTarget :: LocalBuildInfo -> UnitId -> Maybe TargetInfo +unitIdTarget lbi = unitIdTarget' (localPkgDescr lbi) lbi + +allTargetsInBuildOrder :: LocalBuildInfo -> [TargetInfo] +allTargetsInBuildOrder lbi = allTargetsInBuildOrder' (localPkgDescr lbi) lbi + +withAllTargetsInBuildOrder :: LocalBuildInfo -> (TargetInfo -> IO ()) -> IO () +withAllTargetsInBuildOrder lbi = withAllTargetsInBuildOrder' (localPkgDescr lbi) lbi + +neededTargetsInBuildOrder :: LocalBuildInfo -> [UnitId] -> [TargetInfo] +neededTargetsInBuildOrder lbi = neededTargetsInBuildOrder' (localPkgDescr lbi) lbi + +withNeededTargetsInBuildOrder :: LocalBuildInfo -> [UnitId] -> (TargetInfo -> IO ()) -> IO () +withNeededTargetsInBuildOrder lbi = withNeededTargetsInBuildOrder' (localPkgDescr lbi) lbi + +------------------------------------------------------------------------------- +-- Backwards compatibility + +{-# DEPRECATED componentsConfigs "Use 'componentGraph' instead; you can get a list of 'ComponentLocalBuildInfo' with 'Distribution.Compat.Graph.toList'. There's not a good way to get the list of 'ComponentName's the 'ComponentLocalBuildInfo' depends on because this query doesn't make sense; the graph is indexed by 'UnitId' not 'ComponentName'. Given a 'UnitId' you can lookup the 'ComponentLocalBuildInfo' ('getCLBI') and then get the 'ComponentName' ('componentLocalName]). To be removed in Cabal 3.0" #-} +componentsConfigs :: LocalBuildInfo -> [(ComponentName, ComponentLocalBuildInfo, [ComponentName])] +componentsConfigs lbi = + [ (componentLocalName clbi, + clbi, + mapMaybe (fmap componentLocalName . flip Graph.lookup g) + (componentInternalDeps clbi)) + | clbi <- Graph.toList g ] + where + g = componentGraph lbi + +-- | External package dependencies for the package as a whole. This is the +-- union of the individual 'componentPackageDeps', less any internal deps. +{-# DEPRECATED externalPackageDeps "You almost certainly don't want this function, which agglomerates the dependencies of ALL enabled components. If you're using this to write out information on your dependencies, read off the dependencies directly from the actual component in question. To be removed in Cabal 3.0" #-} +externalPackageDeps :: LocalBuildInfo -> [(UnitId, MungedPackageId)] +externalPackageDeps lbi = + -- TODO: what about non-buildable components? + nub [ (ipkgid, pkgid) + | clbi <- Graph.toList (componentGraph lbi) + , (ipkgid, pkgid) <- componentPackageDeps clbi + , not (internal ipkgid) ] + where + -- True if this dependency is an internal one (depends on the library + -- defined in the same package). + internal ipkgid = any ((==ipkgid) . componentUnitId) (Graph.toList (componentGraph lbi)) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Mixin.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Mixin.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Mixin.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Mixin.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,45 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.Mixin ( + Mixin(..), +) where + +import Distribution.Compat.Prelude +import Prelude () + +import Text.PrettyPrint ((<+>)) + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text +import Distribution.Types.IncludeRenaming +import Distribution.Types.PackageName + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse + +data Mixin = Mixin { mixinPackageName :: PackageName + , mixinIncludeRenaming :: IncludeRenaming } + deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) + +instance Binary Mixin + +instance NFData Mixin where rnf = genericRnf + +instance Pretty Mixin where + pretty (Mixin pkg_name incl) = pretty pkg_name <+> pretty incl + +instance Parsec Mixin where + parsec = do + mod_name <- parsec + P.spaces + incl <- parsec + return (Mixin mod_name incl) + +instance Text Mixin where + parse = do + pkg_name <- parse + Parse.skipSpaces + incl <- parse + return (Mixin pkg_name incl) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Module.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Module.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Module.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Module.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,54 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +module Distribution.Types.Module + ( Module(..) + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text +import Distribution.Types.UnitId +import Distribution.ModuleName + +-- | A module identity uniquely identifies a Haskell module by +-- qualifying a 'ModuleName' with the 'UnitId' which defined +-- it. This type distinguishes between two packages +-- which provide a module with the same name, or a module +-- from the same package compiled with different dependencies. +-- There are a few cases where Cabal needs to know about +-- module identities, e.g., when writing out reexported modules in +-- the 'InstalledPackageInfo'. +data Module = + Module DefUnitId ModuleName + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +instance Binary Module + +instance Pretty Module where + pretty (Module uid mod_name) = + pretty uid <<>> Disp.text ":" <<>> pretty mod_name + +instance Parsec Module where + parsec = do + uid <- parsec + _ <- P.char ':' + mod_name <- parsec + return (Module uid mod_name) + +instance Text Module where + parse = do + uid <- parse + _ <- Parse.char ':' + mod_name <- parse + return (Module uid mod_name) + +instance NFData Module where + rnf (Module uid mod_name) = rnf uid `seq` rnf mod_name diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ModuleReexport.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ModuleReexport.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ModuleReexport.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ModuleReexport.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,67 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.ModuleReexport ( + ModuleReexport(..) +) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.ModuleName +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text +import Distribution.Types.PackageName + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import Text.PrettyPrint ((<+>)) +import qualified Text.PrettyPrint as Disp + +-- ----------------------------------------------------------------------------- +-- Module re-exports + +data ModuleReexport = ModuleReexport { + moduleReexportOriginalPackage :: Maybe PackageName, + moduleReexportOriginalName :: ModuleName, + moduleReexportName :: ModuleName + } + deriving (Eq, Generic, Read, Show, Typeable, Data) + +instance Binary ModuleReexport + +instance NFData ModuleReexport where rnf = genericRnf + +instance Pretty ModuleReexport where + pretty (ModuleReexport mpkgname origname newname) = + maybe Disp.empty (\pkgname -> pretty pkgname <<>> Disp.char ':') mpkgname + <<>> pretty origname + <+> if newname == origname + then Disp.empty + else Disp.text "as" <+> pretty newname + +instance Parsec ModuleReexport where + parsec = do + mpkgname <- P.optional (P.try $ parsec <* P.char ':') + origname <- parsec + newname <- P.option origname $ P.try $ do + P.spaces + _ <- P.string "as" + P.spaces + parsec + return (ModuleReexport mpkgname origname newname) + +instance Text ModuleReexport where + parse = do + mpkgname <- Parse.option Nothing $ do + pkgname <- parse + _ <- Parse.char ':' + return (Just pkgname) + origname <- parse + newname <- Parse.option origname $ do + Parse.skipSpaces + _ <- Parse.string "as" + Parse.skipSpaces + parse + return (ModuleReexport mpkgname origname newname) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ModuleRenaming.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ModuleRenaming.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/ModuleRenaming.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/ModuleRenaming.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,140 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.ModuleRenaming ( + ModuleRenaming(..), + interpModuleRenaming, + defaultRenaming, + isDefaultRenaming, +) where + +import Distribution.Compat.Prelude hiding (empty) +import Prelude () + +import Distribution.ModuleName +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text + +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Distribution.Compat.CharParsing as P +import Distribution.Compat.ReadP ((<++)) +import qualified Distribution.Compat.ReadP as Parse +import Text.PrettyPrint (hsep, parens, punctuate, text, (<+>), comma) + +-- | Renaming applied to the modules provided by a package. +-- The boolean indicates whether or not to also include all of the +-- original names of modules. Thus, @ModuleRenaming False []@ is +-- "don't expose any modules, and @ModuleRenaming True [("Data.Bool", "Bool")]@ +-- is, "expose all modules, but also expose @Data.Bool@ as @Bool@". +-- If a renaming is omitted you get the 'DefaultRenaming'. +-- +-- (NB: This is a list not a map so that we can preserve order.) +-- +data ModuleRenaming + -- | A module renaming/thinning; e.g., @(A as B, C as C)@ + -- brings @B@ and @C@ into scope. + = ModuleRenaming [(ModuleName, ModuleName)] + -- | The default renaming, bringing all exported modules + -- into scope. + | DefaultRenaming + -- | Hiding renaming, e.g., @hiding (A, B)@, bringing all + -- exported modules into scope except the hidden ones. + | HidingRenaming [ModuleName] + deriving (Show, Read, Eq, Ord, Typeable, Data, Generic) + +-- | Interpret a 'ModuleRenaming' as a partial map from 'ModuleName' +-- to 'ModuleName'. For efficiency, you should partially apply it +-- with 'ModuleRenaming' and then reuse it. +interpModuleRenaming :: ModuleRenaming -> ModuleName -> Maybe ModuleName +interpModuleRenaming DefaultRenaming = Just +interpModuleRenaming (ModuleRenaming rns) = + let m = Map.fromList rns + in \k -> Map.lookup k m +interpModuleRenaming (HidingRenaming hs) = + let s = Set.fromList hs + in \k -> if k `Set.member` s then Nothing else Just k + +-- | The default renaming, if something is specified in @build-depends@ +-- only. +defaultRenaming :: ModuleRenaming +defaultRenaming = DefaultRenaming + +-- | Tests if its the default renaming; we can use a more compact syntax +-- in 'Distribution.Types.IncludeRenaming.IncludeRenaming' in this case. +isDefaultRenaming :: ModuleRenaming -> Bool +isDefaultRenaming DefaultRenaming = True +isDefaultRenaming _ = False + +instance Binary ModuleRenaming where + +instance NFData ModuleRenaming where rnf = genericRnf + +-- NB: parentheses are mandatory, because later we may extend this syntax +-- to allow "hiding (A, B)" or other modifier words. +instance Pretty ModuleRenaming where + pretty DefaultRenaming = mempty + pretty (HidingRenaming hides) + = text "hiding" <+> parens (hsep (punctuate comma (map pretty hides))) + pretty (ModuleRenaming rns) + = parens . hsep $ punctuate comma (map dispEntry rns) + where dispEntry (orig, new) + | orig == new = pretty orig + | otherwise = pretty orig <+> text "as" <+> pretty new + +instance Parsec ModuleRenaming where + -- NB: try not necessary as the first token is obvious + parsec = P.choice [ parseRename, parseHiding, return DefaultRenaming ] + where + parseRename = do + rns <- P.between (P.char '(') (P.char ')') parseList + P.spaces + return (ModuleRenaming rns) + parseHiding = do + _ <- P.string "hiding" + P.spaces + hides <- P.between (P.char '(') (P.char ')') + (P.sepBy parsec (P.char ',' >> P.spaces)) + return (HidingRenaming hides) + parseList = + P.sepBy parseEntry (P.char ',' >> P.spaces) + parseEntry = do + orig <- parsec + P.spaces + P.option (orig, orig) $ do + _ <- P.string "as" + P.spaces + new <- parsec + P.spaces + return (orig, new) + + + +instance Text ModuleRenaming where + parse = fmap ModuleRenaming parseRns + <++ parseHidingRenaming + <++ return DefaultRenaming + where parseRns = do + rns <- Parse.between (Parse.char '(') (Parse.char ')') parseList + Parse.skipSpaces + return rns + parseHidingRenaming = do + _ <- Parse.string "hiding" + Parse.skipSpaces + hides <- Parse.between (Parse.char '(') (Parse.char ')') + (Parse.sepBy parse (Parse.char ',' >> Parse.skipSpaces)) + return (HidingRenaming hides) + parseList = + Parse.sepBy parseEntry (Parse.char ',' >> Parse.skipSpaces) + parseEntry :: Parse.ReadP r (ModuleName, ModuleName) + parseEntry = do + orig <- parse + Parse.skipSpaces + (do _ <- Parse.string "as" + Parse.skipSpaces + new <- parse + Parse.skipSpaces + return (orig, new) + <++ + return (orig, orig)) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/MungedPackageId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/MungedPackageId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/MungedPackageId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/MungedPackageId.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,55 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.MungedPackageId + ( MungedPackageId(..) + , computeCompatPackageId + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Version + ( Version, nullVersion ) + +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp +import Distribution.Compat.ReadP +import Distribution.Text +import Distribution.Types.PackageId +import Distribution.Types.UnqualComponentName +import Distribution.Types.MungedPackageName + +-- | A simple pair of a 'MungedPackageName' and 'Version'. 'MungedPackageName' is to +-- 'MungedPackageId' as 'PackageName' is to 'PackageId'. See 'MungedPackageName' for more +-- info. +data MungedPackageId + = MungedPackageId { + -- | The combined package and component name. see documentation for + -- 'MungedPackageName'. + mungedName :: MungedPackageName, + -- | The version of this package / component, eg 1.2 + mungedVersion :: Version + } + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +instance Binary MungedPackageId + +instance Text MungedPackageId where + disp (MungedPackageId n v) + | v == nullVersion = disp n -- if no version, don't show version. + | otherwise = disp n <<>> Disp.char '-' <<>> disp v + + parse = do + n <- parse + v <- (Parse.char '-' >> parse) <++ return nullVersion + return (MungedPackageId n v) + +instance NFData MungedPackageId where + rnf (MungedPackageId name version) = rnf name `seq` rnf version + +-- | See docs for 'Distribution.Types.MungedPackageName.computeCompatPackageId'. this +-- is a thin wrapper around that. +computeCompatPackageId :: PackageId -> Maybe UnqualComponentName -> MungedPackageId +computeCompatPackageId (PackageIdentifier pn vr) mb_uqn = MungedPackageId pn' vr + where pn' = computeCompatPackageName pn mb_uqn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/MungedPackageName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/MungedPackageName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/MungedPackageName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/MungedPackageName.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,141 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.MungedPackageName + ( MungedPackageName, unMungedPackageName, mkMungedPackageName + , computeCompatPackageName + , decodeCompatPackageName + ) where + +import Distribution.Compat.Prelude +import Distribution.Utils.ShortText +import Prelude () + +import Distribution.Parsec.Class +import Distribution.ParseUtils +import Distribution.Pretty +import Distribution.Text +import Distribution.Types.PackageName +import Distribution.Types.UnqualComponentName + +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +-- | A combination of a package and component name used in various legacy +-- interfaces, chiefly bundled with a version as 'MungedPackageId'. It's generally +-- better to use a 'UnitId' to opaquely refer to some compilation/packing unit, +-- but that doesn't always work, e.g. where a "name" is needed, in which case +-- this can be used as a fallback. +-- +-- Use 'mkMungedPackageName' and 'unMungedPackageName' to convert from/to a 'String'. +-- +-- @since 2.0.0.2 +newtype MungedPackageName = MungedPackageName ShortText + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +-- | Convert 'MungedPackageName' to 'String' +unMungedPackageName :: MungedPackageName -> String +unMungedPackageName (MungedPackageName s) = fromShortText s + +-- | Construct a 'MungedPackageName' from a 'String' +-- +-- 'mkMungedPackageName' is the inverse to 'unMungedPackageName' +-- +-- Note: No validations are performed to ensure that the resulting +-- 'MungedPackageName' is valid +-- +-- @since 2.0.0.2 +mkMungedPackageName :: String -> MungedPackageName +mkMungedPackageName = MungedPackageName . toShortText + +-- | 'mkMungedPackageName' +-- +-- @since 2.0.0.2 +instance IsString MungedPackageName where + fromString = mkMungedPackageName + +instance Binary MungedPackageName + +instance Pretty MungedPackageName where + pretty = Disp.text . unMungedPackageName + +instance Parsec MungedPackageName where + parsec = mkMungedPackageName <$> parsecUnqualComponentName + +instance Text MungedPackageName where + parse = mkMungedPackageName <$> parsePackageName + +instance NFData MungedPackageName where + rnf (MungedPackageName pkg) = rnf pkg + +-- | Computes the package name for a library. If this is the public +-- library, it will just be the original package name; otherwise, +-- it will be a munged package name recording the original package +-- name as well as the name of the internal library. +-- +-- A lot of tooling in the Haskell ecosystem assumes that if something +-- is installed to the package database with the package name 'foo', +-- then it actually is an entry for the (only public) library in package +-- 'foo'. With internal packages, this is not necessarily true: +-- a public library as well as arbitrarily many internal libraries may +-- come from the same package. To prevent tools from getting confused +-- in this case, the package name of these internal libraries is munged +-- so that they do not conflict the public library proper. A particular +-- case where this matters is ghc-pkg: if we don't munge the package +-- name, the inplace registration will OVERRIDE a different internal +-- library. +-- +-- We munge into a reserved namespace, "z-", and encode both the +-- component name and the package name of an internal library using the +-- following format: +-- +-- compat-pkg-name ::= "z-" package-name "-z-" library-name +-- +-- where package-name and library-name have "-" ( "z" + ) "-" +-- segments encoded by adding an extra "z". +-- +-- When we have the public library, the compat-pkg-name is just the +-- package-name, no surprises there! +-- +computeCompatPackageName :: PackageName -> Maybe UnqualComponentName -> MungedPackageName +-- First handle the cases where we can just use the original 'PackageName'. +-- This is for the PRIMARY library, and it is non-Backpack, or the +-- indefinite package for us. +computeCompatPackageName pkg_name Nothing + = mkMungedPackageName $ unPackageName pkg_name +computeCompatPackageName pkg_name (Just uqn) + = mkMungedPackageName $ + "z-" ++ zdashcode (unPackageName pkg_name) ++ + "-z-" ++ zdashcode (unUnqualComponentName uqn) + +decodeCompatPackageName :: MungedPackageName -> (PackageName, Maybe UnqualComponentName) +decodeCompatPackageName m = + case unMungedPackageName m of + 'z':'-':rest | [([pn, cn], "")] <- Parse.readP_to_S parseZDashCode rest + -> (mkPackageName pn, Just (mkUnqualComponentName cn)) + s -> (mkPackageName s, Nothing) + +zdashcode :: String -> String +zdashcode s = go s (Nothing :: Maybe Int) [] + where go [] _ r = reverse r + go ('-':z) (Just n) r | n > 0 = go z (Just 0) ('-':'z':r) + go ('-':z) _ r = go z (Just 0) ('-':r) + go ('z':z) (Just n) r = go z (Just (n+1)) ('z':r) + go (c:z) _ r = go z Nothing (c:r) + +parseZDashCode :: Parse.ReadP r [String] +parseZDashCode = do + ns <- Parse.sepBy1 (Parse.many1 (Parse.satisfy (/= '-'))) (Parse.char '-') + Parse.eof + return (go ns) + where + go ns = case break (=="z") ns of + (_, []) -> [paste ns] + (as, "z":bs) -> paste as : go bs + _ -> error "parseZDashCode: go" + unZ :: String -> String + unZ "" = error "parseZDashCode: unZ" + unZ r@('z':zs) | all (=='z') zs = zs + | otherwise = r + unZ r = r + paste :: [String] -> String + paste = intercalate "-" . map unZ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageDescription/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageDescription/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageDescription/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageDescription/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,228 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} +module Distribution.Types.PackageDescription.Lens ( + PackageDescription, + module Distribution.Types.PackageDescription.Lens, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Compiler (CompilerFlavor) +import Distribution.License (License) +import Distribution.ModuleName (ModuleName) +import Distribution.Types.Benchmark (Benchmark, benchmarkModules) +import Distribution.Types.Benchmark.Lens (benchmarkName, benchmarkBuildInfo) +import Distribution.Types.BuildInfo (BuildInfo) +import Distribution.Types.BuildType (BuildType) +import Distribution.Types.ComponentName (ComponentName(..)) +import Distribution.Types.Executable (Executable, exeModules) +import Distribution.Types.Executable.Lens (exeName, exeBuildInfo) +import Distribution.Types.ForeignLib (ForeignLib, foreignLibModules) +import Distribution.Types.ForeignLib.Lens (foreignLibName, foreignLibBuildInfo) +import Distribution.Types.Library (Library, explicitLibModules) +import Distribution.Types.Library.Lens (libName, libBuildInfo) +import Distribution.Types.PackageDescription (PackageDescription) +import Distribution.Types.PackageId (PackageIdentifier) +import Distribution.Types.SetupBuildInfo (SetupBuildInfo) +import Distribution.Types.SourceRepo (SourceRepo) +import Distribution.Types.TestSuite (TestSuite, testModules) +import Distribution.Types.TestSuite.Lens (testName, testBuildInfo) +import Distribution.Types.UnqualComponentName ( UnqualComponentName ) +import Distribution.Version (Version, VersionRange) + +import qualified Distribution.SPDX as SPDX +import qualified Distribution.Types.PackageDescription as T + +package :: Lens' PackageDescription PackageIdentifier +package f s = fmap (\x -> s { T.package = x }) (f (T.package s)) +{-# INLINE package #-} + +licenseRaw :: Lens' PackageDescription (Either SPDX.License License) +licenseRaw f s = fmap (\x -> s { T.licenseRaw = x }) (f (T.licenseRaw s)) +{-# INLINE licenseRaw #-} + +licenseFiles :: Lens' PackageDescription [String] +licenseFiles f s = fmap (\x -> s { T.licenseFiles = x }) (f (T.licenseFiles s)) +{-# INLINE licenseFiles #-} + +copyright :: Lens' PackageDescription String +copyright f s = fmap (\x -> s { T.copyright = x }) (f (T.copyright s)) +{-# INLINE copyright #-} + +maintainer :: Lens' PackageDescription String +maintainer f s = fmap (\x -> s { T.maintainer = x }) (f (T.maintainer s)) +{-# INLINE maintainer #-} + +author :: Lens' PackageDescription String +author f s = fmap (\x -> s { T.author = x }) (f (T.author s)) +{-# INLINE author #-} + +stability :: Lens' PackageDescription String +stability f s = fmap (\x -> s { T.stability = x }) (f (T.stability s)) +{-# INLINE stability #-} + +testedWith :: Lens' PackageDescription [(CompilerFlavor,VersionRange)] +testedWith f s = fmap (\x -> s { T.testedWith = x }) (f (T.testedWith s)) +{-# INLINE testedWith #-} + +homepage :: Lens' PackageDescription String +homepage f s = fmap (\x -> s { T.homepage = x }) (f (T.homepage s)) +{-# INLINE homepage #-} + +pkgUrl :: Lens' PackageDescription String +pkgUrl f s = fmap (\x -> s { T.pkgUrl = x }) (f (T.pkgUrl s)) +{-# INLINE pkgUrl #-} + +bugReports :: Lens' PackageDescription String +bugReports f s = fmap (\x -> s { T.bugReports = x }) (f (T.bugReports s)) +{-# INLINE bugReports #-} + +sourceRepos :: Lens' PackageDescription [SourceRepo] +sourceRepos f s = fmap (\x -> s { T.sourceRepos = x }) (f (T.sourceRepos s)) +{-# INLINE sourceRepos #-} + +synopsis :: Lens' PackageDescription String +synopsis f s = fmap (\x -> s { T.synopsis = x }) (f (T.synopsis s)) +{-# INLINE synopsis #-} + +description :: Lens' PackageDescription String +description f s = fmap (\x -> s { T.description = x }) (f (T.description s)) +{-# INLINE description #-} + +category :: Lens' PackageDescription String +category f s = fmap (\x -> s { T.category = x }) (f (T.category s)) +{-# INLINE category #-} + +customFieldsPD :: Lens' PackageDescription [(String,String)] +customFieldsPD f s = fmap (\x -> s { T.customFieldsPD = x }) (f (T.customFieldsPD s)) +{-# INLINE customFieldsPD #-} + +specVersionRaw :: Lens' PackageDescription (Either Version VersionRange) +specVersionRaw f s = fmap (\x -> s { T.specVersionRaw = x }) (f (T.specVersionRaw s)) +{-# INLINE specVersionRaw #-} + +buildTypeRaw :: Lens' PackageDescription (Maybe BuildType) +buildTypeRaw f s = fmap (\x -> s { T.buildTypeRaw = x }) (f (T.buildTypeRaw s)) +{-# INLINE buildTypeRaw #-} + +setupBuildInfo :: Lens' PackageDescription (Maybe SetupBuildInfo) +setupBuildInfo f s = fmap (\x -> s { T.setupBuildInfo = x }) (f (T.setupBuildInfo s)) +{-# INLINE setupBuildInfo #-} + +library :: Lens' PackageDescription (Maybe Library) +library f s = fmap (\x -> s { T.library = x }) (f (T.library s)) +{-# INLINE library #-} + +subLibraries :: Lens' PackageDescription [Library] +subLibraries f s = fmap (\x -> s { T.subLibraries = x }) (f (T.subLibraries s)) +{-# INLINE subLibraries #-} + +executables :: Lens' PackageDescription [Executable] +executables f s = fmap (\x -> s { T.executables = x }) (f (T.executables s)) +{-# INLINE executables #-} + +foreignLibs :: Lens' PackageDescription [ForeignLib] +foreignLibs f s = fmap (\x -> s { T.foreignLibs = x }) (f (T.foreignLibs s)) +{-# INLINE foreignLibs #-} + +testSuites :: Lens' PackageDescription [TestSuite] +testSuites f s = fmap (\x -> s { T.testSuites = x }) (f (T.testSuites s)) +{-# INLINE testSuites #-} + +benchmarks :: Lens' PackageDescription [Benchmark] +benchmarks f s = fmap (\x -> s { T.benchmarks = x }) (f (T.benchmarks s)) +{-# INLINE benchmarks #-} + +dataFiles :: Lens' PackageDescription [FilePath] +dataFiles f s = fmap (\x -> s { T.dataFiles = x }) (f (T.dataFiles s)) +{-# INLINE dataFiles #-} + +dataDir :: Lens' PackageDescription FilePath +dataDir f s = fmap (\x -> s { T.dataDir = x }) (f (T.dataDir s)) +{-# INLINE dataDir #-} + +extraSrcFiles :: Lens' PackageDescription [String] +extraSrcFiles f s = fmap (\x -> s { T.extraSrcFiles = x }) (f (T.extraSrcFiles s)) +{-# INLINE extraSrcFiles #-} + +extraTmpFiles :: Lens' PackageDescription [String] +extraTmpFiles f s = fmap (\x -> s { T.extraTmpFiles = x }) (f (T.extraTmpFiles s)) +{-# INLINE extraTmpFiles #-} + +extraDocFiles :: Lens' PackageDescription [String] +extraDocFiles f s = fmap (\x -> s { T.extraDocFiles = x }) (f (T.extraDocFiles s)) +{-# INLINE extraDocFiles #-} + +-- | @since 2.4 +componentModules :: Monoid r => ComponentName -> Getting r PackageDescription [ModuleName] +componentModules cname = case cname of + CLibName -> library . traverse . getting explicitLibModules + CSubLibName name -> + componentModules' name subLibraries (libName . non "") explicitLibModules + CFLibName name -> + componentModules' name foreignLibs foreignLibName foreignLibModules + CExeName name -> + componentModules' name executables exeName exeModules + CTestName name -> + componentModules' name testSuites testName testModules + CBenchName name -> + componentModules' name benchmarks benchmarkName benchmarkModules + where + componentModules' + :: Monoid r + => UnqualComponentName + -> Traversal' PackageDescription [a] + -> Traversal' a UnqualComponentName + -> (a -> [ModuleName]) + -> Getting r PackageDescription [ModuleName] + componentModules' name pdL nameL modules = + pdL + . traverse + . filtered ((== name) . view nameL) + . getting modules + + -- This are easily wrongly used, so we have them here locally only. + non :: Eq a => a -> Lens' (Maybe a) a + non x afb s = f <$> afb (fromMaybe x s) + where f y = if x == y then Nothing else Just y + + filtered :: (a -> Bool) -> Traversal' a a + filtered p f s = if p s then f s else pure s + +-- | @since 2.4 +componentBuildInfo :: ComponentName -> Traversal' PackageDescription BuildInfo +componentBuildInfo cname = case cname of + CLibName -> + library . traverse . libBuildInfo + CSubLibName name -> + componentBuildInfo' name subLibraries (libName . non "") libBuildInfo + CFLibName name -> + componentBuildInfo' name foreignLibs foreignLibName foreignLibBuildInfo + CExeName name -> + componentBuildInfo' name executables exeName exeBuildInfo + CTestName name -> + componentBuildInfo' name testSuites testName testBuildInfo + CBenchName name -> + componentBuildInfo' name benchmarks benchmarkName benchmarkBuildInfo + where + componentBuildInfo' :: UnqualComponentName + -> Traversal' PackageDescription [a] + -> Traversal' a UnqualComponentName + -> Traversal' a BuildInfo + -> Traversal' PackageDescription BuildInfo + componentBuildInfo' name pdL nameL biL = + pdL + . traverse + . filtered ((== name) . view nameL) + . biL + + -- This are easily wrongly used, so we have them here locally only. + -- We have to repeat these, as everything is exported from this module. + non :: Eq a => a -> Lens' (Maybe a) a + non x afb s = f <$> afb (fromMaybe x s) + where f y = if x == y then Nothing else Just y + + filtered :: (a -> Bool) -> Traversal' a a + filtered p f s = if p s then f s else pure s diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageDescription.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageDescription.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageDescription.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,492 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Types.PackageDescription +-- Copyright : Isaac Jones 2003-2005 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- This defines the data structure for the @.cabal@ file format. There are +-- several parts to this structure. It has top level info and then 'Library', +-- 'Executable', 'TestSuite', and 'Benchmark' sections each of which have +-- associated 'BuildInfo' data that's used to build the library, exe, test, or +-- benchmark. To further complicate things there is both a 'PackageDescription' +-- and a 'GenericPackageDescription'. This distinction relates to cabal +-- configurations. When we initially read a @.cabal@ file we get a +-- 'GenericPackageDescription' which has all the conditional sections. +-- Before actually building a package we have to decide +-- on each conditional. Once we've done that we get a 'PackageDescription'. +-- It was done this way initially to avoid breaking too much stuff when the +-- feature was introduced. It could probably do with being rationalised at some +-- point to make it simpler. + +module Distribution.Types.PackageDescription ( + PackageDescription(..), + specVersion, + specVersion', + license, + license', + descCabalVersion, + buildType, + emptyPackageDescription, + hasPublicLib, + hasLibs, + allLibraries, + withLib, + hasExes, + withExe, + hasTests, + withTest, + hasBenchmarks, + withBenchmark, + hasForeignLibs, + withForeignLib, + allBuildInfo, + enabledBuildInfos, + allBuildDepends, + enabledBuildDepends, + updatePackageDescription, + pkgComponents, + pkgBuildableComponents, + enabledComponents, + lookupComponent, + getComponent, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Control.Monad ((<=<)) + +-- lens +import qualified Distribution.Types.BuildInfo.Lens as L +import Distribution.Types.Library +import Distribution.Types.TestSuite +import Distribution.Types.Executable +import Distribution.Types.Benchmark +import Distribution.Types.ForeignLib + +import Distribution.Types.Component +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.Dependency +import Distribution.Types.PackageId +import Distribution.Types.ComponentName +import Distribution.Types.PackageName +import Distribution.Types.UnqualComponentName +import Distribution.Types.SetupBuildInfo +import Distribution.Types.BuildInfo +import Distribution.Types.BuildType +import Distribution.Types.SourceRepo +import Distribution.Types.HookedBuildInfo + +import Distribution.Compiler +import Distribution.License +import Distribution.Package +import Distribution.Version + +import qualified Distribution.SPDX as SPDX + +-- ----------------------------------------------------------------------------- +-- The PackageDescription type + +-- | This data type is the internal representation of the file @pkg.cabal@. +-- It contains two kinds of information about the package: information +-- which is needed for all packages, such as the package name and version, and +-- information which is needed for the simple build system only, such as +-- the compiler options and library name. +-- +data PackageDescription + = PackageDescription { + -- the following are required by all packages: + + -- | The version of the Cabal spec that this package description uses. + -- For historical reasons this is specified with a version range but + -- only ranges of the form @>= v@ make sense. We are in the process of + -- transitioning to specifying just a single version, not a range. + -- See also 'specVersion'. + specVersionRaw :: Either Version VersionRange, + package :: PackageIdentifier, + licenseRaw :: Either SPDX.License License, + licenseFiles :: [FilePath], + copyright :: String, + maintainer :: String, + author :: String, + stability :: String, + testedWith :: [(CompilerFlavor,VersionRange)], + homepage :: String, + pkgUrl :: String, + bugReports :: String, + sourceRepos :: [SourceRepo], + synopsis :: String, -- ^A one-line summary of this package + description :: String, -- ^A more verbose description of this package + category :: String, + customFieldsPD :: [(String,String)], -- ^Custom fields starting + -- with x-, stored in a + -- simple assoc-list. + + -- | The original @build-type@ value as parsed from the + -- @.cabal@ file without defaulting. See also 'buildType'. + -- + -- @since 2.2 + buildTypeRaw :: Maybe BuildType, + setupBuildInfo :: Maybe SetupBuildInfo, + -- components + library :: Maybe Library, + subLibraries :: [Library], + executables :: [Executable], + foreignLibs :: [ForeignLib], + testSuites :: [TestSuite], + benchmarks :: [Benchmark], + -- files + dataFiles :: [FilePath], + dataDir :: FilePath, + extraSrcFiles :: [FilePath], + extraTmpFiles :: [FilePath], + extraDocFiles :: [FilePath] + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary PackageDescription + +instance NFData PackageDescription where rnf = genericRnf + +instance Package PackageDescription where + packageId = package + +-- | The version of the Cabal spec that this package should be interpreted +-- against. +-- +-- Historically we used a version range but we are switching to using a single +-- version. Currently we accept either. This function converts into a single +-- version by ignoring upper bounds in the version range. +-- +specVersion :: PackageDescription -> Version +specVersion = specVersion' . specVersionRaw + +-- | +-- +-- @since 2.2.0.0 +specVersion' :: Either Version VersionRange -> Version +specVersion' (Left version) = version +specVersion' (Right versionRange) = case asVersionIntervals versionRange of + [] -> mkVersion [0] + ((LowerBound version _, _):_) -> version + +-- | The SPDX 'LicenseExpression' of the package. +-- +-- @since 2.2.0.0 +license :: PackageDescription -> SPDX.License +license = license' . licenseRaw + +-- | See 'license'. +-- +-- @since 2.2.0.0 +license' :: Either SPDX.License License -> SPDX.License +license' = either id licenseToSPDX + +-- | The range of versions of the Cabal tools that this package is intended to +-- work with. +-- +-- This function is deprecated and should not be used for new purposes, only to +-- support old packages that rely on the old interpretation. +-- +descCabalVersion :: PackageDescription -> VersionRange +descCabalVersion pkg = case specVersionRaw pkg of + Left version -> orLaterVersion version + Right versionRange -> versionRange +{-# DEPRECATED descCabalVersion "Use specVersion instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} + +-- | The effective @build-type@ after applying defaulting rules. +-- +-- The original @build-type@ value parsed is stored in the +-- 'buildTypeRaw' field. However, the @build-type@ field is optional +-- and can therefore be empty in which case we need to compute the +-- /effective/ @build-type@. This function implements the following +-- defaulting rules: +-- +-- * For @cabal-version:2.0@ and below, default to the @Custom@ +-- build-type unconditionally. +-- +-- * Otherwise, if a @custom-setup@ stanza is defined, default to +-- the @Custom@ build-type; else default to @Simple@ build-type. +-- +-- @since 2.2 +buildType :: PackageDescription -> BuildType +buildType pkg + | specVersion pkg >= mkVersion [2,1] + = fromMaybe newDefault (buildTypeRaw pkg) + | otherwise -- cabal-version < 2.1 + = fromMaybe Custom (buildTypeRaw pkg) + where + newDefault | isNothing (setupBuildInfo pkg) = Simple + | otherwise = Custom + +emptyPackageDescription :: PackageDescription +emptyPackageDescription + = PackageDescription { + package = PackageIdentifier (mkPackageName "") + nullVersion, + licenseRaw = Right UnspecifiedLicense, -- TODO: + licenseFiles = [], + specVersionRaw = Right anyVersion, + buildTypeRaw = Nothing, + copyright = "", + maintainer = "", + author = "", + stability = "", + testedWith = [], + homepage = "", + pkgUrl = "", + bugReports = "", + sourceRepos = [], + synopsis = "", + description = "", + category = "", + customFieldsPD = [], + setupBuildInfo = Nothing, + library = Nothing, + subLibraries = [], + foreignLibs = [], + executables = [], + testSuites = [], + benchmarks = [], + dataFiles = [], + dataDir = "", + extraSrcFiles = [], + extraTmpFiles = [], + extraDocFiles = [] + } + +-- --------------------------------------------------------------------------- +-- The Library type + +-- | Does this package have a buildable PUBLIC library? +hasPublicLib :: PackageDescription -> Bool +hasPublicLib p = + case library p of + Just lib -> buildable (libBuildInfo lib) + Nothing -> False + +-- | Does this package have any libraries? +hasLibs :: PackageDescription -> Bool +hasLibs p = any (buildable . libBuildInfo) (allLibraries p) + +allLibraries :: PackageDescription -> [Library] +allLibraries p = maybeToList (library p) ++ subLibraries p + +-- | If the package description has a buildable library section, +-- call the given function with the library build info as argument. +-- You probably want 'withLibLBI' if you have a 'LocalBuildInfo', +-- see the note in +-- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components" +-- for more information. +withLib :: PackageDescription -> (Library -> IO ()) -> IO () +withLib pkg_descr f = + sequence_ [f lib | lib <- allLibraries pkg_descr, buildable (libBuildInfo lib)] + +-- --------------------------------------------------------------------------- +-- The Executable type + +-- |does this package have any executables? +hasExes :: PackageDescription -> Bool +hasExes p = any (buildable . buildInfo) (executables p) + +-- | Perform the action on each buildable 'Executable' in the package +-- description. You probably want 'withExeLBI' if you have a +-- 'LocalBuildInfo', see the note in +-- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components" +-- for more information. +withExe :: PackageDescription -> (Executable -> IO ()) -> IO () +withExe pkg_descr f = + sequence_ [f exe | exe <- executables pkg_descr, buildable (buildInfo exe)] + +-- --------------------------------------------------------------------------- +-- The TestSuite type + +-- | Does this package have any test suites? +hasTests :: PackageDescription -> Bool +hasTests = any (buildable . testBuildInfo) . testSuites + +-- | Perform an action on each buildable 'TestSuite' in a package. +-- You probably want 'withTestLBI' if you have a 'LocalBuildInfo', see the note in +-- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components" +-- for more information. + +withTest :: PackageDescription -> (TestSuite -> IO ()) -> IO () +withTest pkg_descr f = + sequence_ [ f test | test <- testSuites pkg_descr, buildable (testBuildInfo test) ] + +-- --------------------------------------------------------------------------- +-- The Benchmark type + +-- | Does this package have any benchmarks? +hasBenchmarks :: PackageDescription -> Bool +hasBenchmarks = any (buildable . benchmarkBuildInfo) . benchmarks + +-- | Perform an action on each buildable 'Benchmark' in a package. +-- You probably want 'withBenchLBI' if you have a 'LocalBuildInfo', see the note in +-- "Distribution.Types.ComponentRequestedSpec#buildable_vs_enabled_components" +-- for more information. + +withBenchmark :: PackageDescription -> (Benchmark -> IO ()) -> IO () +withBenchmark pkg_descr f = + sequence_ [f bench | bench <- benchmarks pkg_descr, buildable (benchmarkBuildInfo bench)] + +-- --------------------------------------------------------------------------- +-- The ForeignLib type + +-- | Does this package have any foreign libraries? +hasForeignLibs :: PackageDescription -> Bool +hasForeignLibs p = any (buildable . foreignLibBuildInfo) (foreignLibs p) + +-- | Perform the action on each buildable 'ForeignLib' in the package +-- description. +withForeignLib :: PackageDescription -> (ForeignLib -> IO ()) -> IO () +withForeignLib pkg_descr f = + sequence_ [ f flib + | flib <- foreignLibs pkg_descr + , buildable (foreignLibBuildInfo flib) + ] + +-- --------------------------------------------------------------------------- +-- The BuildInfo type + +-- | All 'BuildInfo' in the 'PackageDescription': +-- libraries, executables, test-suites and benchmarks. +-- +-- Useful for implementing package checks. +allBuildInfo :: PackageDescription -> [BuildInfo] +allBuildInfo pkg_descr = [ bi | lib <- allLibraries pkg_descr + , let bi = libBuildInfo lib ] + ++ [ bi | flib <- foreignLibs pkg_descr + , let bi = foreignLibBuildInfo flib ] + ++ [ bi | exe <- executables pkg_descr + , let bi = buildInfo exe ] + ++ [ bi | tst <- testSuites pkg_descr + , let bi = testBuildInfo tst ] + ++ [ bi | tst <- benchmarks pkg_descr + , let bi = benchmarkBuildInfo tst ] + +-- | Return all of the 'BuildInfo's of enabled components, i.e., all of +-- the ones that would be built if you run @./Setup build@. +enabledBuildInfos :: PackageDescription -> ComponentRequestedSpec -> [BuildInfo] +enabledBuildInfos pkg enabled = + [ componentBuildInfo comp + | comp <- enabledComponents pkg enabled ] + + +-- ------------------------------------------------------------ +-- * Utils +-- ------------------------------------------------------------ + +-- | Get the combined build-depends entries of all components. +allBuildDepends :: PackageDescription -> [Dependency] +allBuildDepends = targetBuildDepends <=< allBuildInfo + +-- | Get the combined build-depends entries of all enabled components, per the +-- given request spec. +enabledBuildDepends :: PackageDescription -> ComponentRequestedSpec -> [Dependency] +enabledBuildDepends spec pd = targetBuildDepends =<< enabledBuildInfos spec pd + + +updatePackageDescription :: HookedBuildInfo -> PackageDescription -> PackageDescription +updatePackageDescription (mb_lib_bi, exe_bi) p + = p{ executables = updateExecutables exe_bi (executables p) + , library = updateLibrary mb_lib_bi (library p) } + where + updateLibrary :: Maybe BuildInfo -> Maybe Library -> Maybe Library + updateLibrary (Just bi) (Just lib) = Just (lib{libBuildInfo = bi `mappend` libBuildInfo lib}) + updateLibrary Nothing mb_lib = mb_lib + updateLibrary (Just _) Nothing = Nothing + + updateExecutables :: [(UnqualComponentName, BuildInfo)] -- ^[(exeName, new buildinfo)] + -> [Executable] -- ^list of executables to update + -> [Executable] -- ^list with exeNames updated + updateExecutables exe_bi' executables' = foldr updateExecutable executables' exe_bi' + + updateExecutable :: (UnqualComponentName, BuildInfo) -- ^(exeName, new buildinfo) + -> [Executable] -- ^list of executables to update + -> [Executable] -- ^list with exeName updated + updateExecutable _ [] = [] + updateExecutable exe_bi'@(name,bi) (exe:exes) + | exeName exe == name = exe{buildInfo = bi `mappend` buildInfo exe} : exes + | otherwise = exe : updateExecutable exe_bi' exes + +-- ----------------------------------------------------------------------------- +-- Source-representation of buildable components + +-- | All the components in the package. +-- +pkgComponents :: PackageDescription -> [Component] +pkgComponents pkg = + [ CLib lib | lib <- allLibraries pkg ] + ++ [ CFLib flib | flib <- foreignLibs pkg ] + ++ [ CExe exe | exe <- executables pkg ] + ++ [ CTest tst | tst <- testSuites pkg ] + ++ [ CBench bm | bm <- benchmarks pkg ] + +-- | A list of all components in the package that are buildable, +-- i.e., were not marked with @buildable: False@. This does NOT +-- indicate if we are actually going to build the component, +-- see 'enabledComponents' instead. +-- +-- @since 2.0.0.2 +-- +pkgBuildableComponents :: PackageDescription -> [Component] +pkgBuildableComponents = filter componentBuildable . pkgComponents + +-- | A list of all components in the package that are enabled. +-- +-- @since 2.0.0.2 +-- +enabledComponents :: PackageDescription -> ComponentRequestedSpec -> [Component] +enabledComponents pkg enabled = filter (componentEnabled enabled) $ pkgBuildableComponents pkg + +lookupComponent :: PackageDescription -> ComponentName -> Maybe Component +lookupComponent pkg CLibName = fmap CLib (library pkg) +lookupComponent pkg (CSubLibName name) = + fmap CLib $ find ((Just name ==) . libName) (subLibraries pkg) +lookupComponent pkg (CFLibName name) = + fmap CFLib $ find ((name ==) . foreignLibName) (foreignLibs pkg) +lookupComponent pkg (CExeName name) = + fmap CExe $ find ((name ==) . exeName) (executables pkg) +lookupComponent pkg (CTestName name) = + fmap CTest $ find ((name ==) . testName) (testSuites pkg) +lookupComponent pkg (CBenchName name) = + fmap CBench $ find ((name ==) . benchmarkName) (benchmarks pkg) + +getComponent :: PackageDescription -> ComponentName -> Component +getComponent pkg cname = + case lookupComponent pkg cname of + Just cpnt -> cpnt + Nothing -> missingComponent + where + missingComponent = + error $ "internal error: the package description contains no " + ++ "component corresponding to " ++ show cname + +-- ----------------------------------------------------------------------------- +-- Traversal Instances + +instance L.HasBuildInfos PackageDescription where + traverseBuildInfos f (PackageDescription a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 a14 a15 a16 a17 a18 a19 + x1 x2 x3 x4 x5 x6 + a20 a21 a22 a23 a24) = + PackageDescription a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 a14 a15 a16 a17 a18 a19 + <$> (traverse . L.buildInfo) f x1 -- library + <*> (traverse . L.buildInfo) f x2 -- sub libraries + <*> (traverse . L.buildInfo) f x3 -- executables + <*> (traverse . L.buildInfo) f x4 -- foreign libs + <*> (traverse . L.buildInfo) f x5 -- test suites + <*> (traverse . L.buildInfo) f x6 -- benchmarks + <*> pure a20 -- data files + <*> pure a21 -- data dir + <*> pure a22 -- exta src files + <*> pure a23 -- extra temp files + <*> pure a24 -- extra doc files diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageId/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageId/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageId/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageId/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,22 @@ +module Distribution.Types.PackageId.Lens ( + PackageIdentifier, + module Distribution.Types.PackageId.Lens, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Types.PackageId (PackageIdentifier) +import Distribution.Types.PackageName (PackageName) +import Distribution.Version (Version) + +import qualified Distribution.Types.PackageId as T + +pkgName :: Lens' PackageIdentifier PackageName +pkgName f s = fmap (\x -> s { T.pkgName = x }) (f (T.pkgName s)) +{-# INLINE pkgName #-} + +pkgVersion :: Lens' PackageIdentifier Version +pkgVersion f s = fmap (\x -> s { T.pkgVersion = x }) (f (T.pkgVersion s)) +{-# INLINE pkgVersion #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageId.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,54 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.PackageId + ( PackageIdentifier(..) + , PackageId + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Version + ( Version, nullVersion ) + +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import qualified Text.PrettyPrint as Disp +import Distribution.Compat.ReadP +import Distribution.Text +import Distribution.Parsec.Class + ( Parsec(..) ) +import Distribution.Pretty +import Distribution.Types.PackageName + +-- | Type alias so we can use the shorter name PackageId. +type PackageId = PackageIdentifier + +-- | The name and version of a package. +data PackageIdentifier + = PackageIdentifier { + pkgName :: PackageName, -- ^The name of this package, eg. foo + pkgVersion :: Version -- ^the version of this package, eg 1.2 + } + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +instance Binary PackageIdentifier + +instance Pretty PackageIdentifier where + pretty (PackageIdentifier n v) + | v == nullVersion = pretty n -- if no version, don't show version. + | otherwise = pretty n <<>> Disp.char '-' <<>> pretty v + +instance Text PackageIdentifier where + parse = do + n <- parse + v <- (Parse.char '-' >> parse) <++ return nullVersion + return (PackageIdentifier n v) + +instance Parsec PackageIdentifier where + parsec = PackageIdentifier <$> + parsec <*> (P.char '-' *> parsec <|> pure nullVersion) + +instance NFData PackageIdentifier where + rnf (PackageIdentifier name version) = rnf name `seq` rnf version diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PackageName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PackageName.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,62 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.PackageName + ( PackageName, unPackageName, mkPackageName + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Utils.ShortText + +import qualified Text.PrettyPrint as Disp +import Distribution.ParseUtils +import Distribution.Text +import Distribution.Pretty +import Distribution.Parsec.Class + +-- | A package name. +-- +-- Use 'mkPackageName' and 'unPackageName' to convert from/to a +-- 'String'. +-- +-- This type is opaque since @Cabal-2.0@ +-- +-- @since 2.0.0.2 +newtype PackageName = PackageName ShortText + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +-- | Convert 'PackageName' to 'String' +unPackageName :: PackageName -> String +unPackageName (PackageName s) = fromShortText s + +-- | Construct a 'PackageName' from a 'String' +-- +-- 'mkPackageName' is the inverse to 'unPackageName' +-- +-- Note: No validations are performed to ensure that the resulting +-- 'PackageName' is valid +-- +-- @since 2.0.0.2 +mkPackageName :: String -> PackageName +mkPackageName = PackageName . toShortText + +-- | 'mkPackageName' +-- +-- @since 2.0.0.2 +instance IsString PackageName where + fromString = mkPackageName + +instance Binary PackageName + +instance Pretty PackageName where + pretty = Disp.text . unPackageName + +instance Parsec PackageName where + parsec = mkPackageName <$> parsecUnqualComponentName + +instance Text PackageName where + parse = mkPackageName <$> parsePackageName + +instance NFData PackageName where + rnf (PackageName pkg) = rnf pkg diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PkgconfigDependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PkgconfigDependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PkgconfigDependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PkgconfigDependency.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,50 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.PkgconfigDependency + ( PkgconfigDependency(..) + ) where + +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Version (VersionRange, anyVersion) + +import Distribution.Types.PkgconfigName + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import Distribution.Compat.ReadP ((<++)) +import qualified Distribution.Compat.ReadP as Parse +import Text.PrettyPrint ((<+>)) + +-- | Describes a dependency on a pkg-config library +-- +-- @since 2.0.0.2 +data PkgconfigDependency = PkgconfigDependency + PkgconfigName + VersionRange + deriving (Generic, Read, Show, Eq, Typeable, Data) + +instance Binary PkgconfigDependency +instance NFData PkgconfigDependency where rnf = genericRnf + +instance Pretty PkgconfigDependency where + pretty (PkgconfigDependency name ver) = + pretty name <+> pretty ver + +instance Parsec PkgconfigDependency where + parsec = do + name <- parsec + P.spaces + verRange <- parsec <|> pure anyVersion + pure $ PkgconfigDependency name verRange + +instance Text PkgconfigDependency where + parse = do name <- parse + Parse.skipSpaces + ver <- parse <++ return anyVersion + Parse.skipSpaces + return $ PkgconfigDependency name ver diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PkgconfigName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PkgconfigName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/PkgconfigName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/PkgconfigName.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,67 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.PkgconfigName + ( PkgconfigName, unPkgconfigName, mkPkgconfigName + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Utils.ShortText + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +-- | A pkg-config library name +-- +-- This is parsed as any valid argument to the pkg-config utility. +-- +-- @since 2.0.0.2 +newtype PkgconfigName = PkgconfigName ShortText + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data) + +-- | Convert 'PkgconfigName' to 'String' +-- +-- @since 2.0.0.2 +unPkgconfigName :: PkgconfigName -> String +unPkgconfigName (PkgconfigName s) = fromShortText s + +-- | Construct a 'PkgconfigName' from a 'String' +-- +-- 'mkPkgconfigName' is the inverse to 'unPkgconfigName' +-- +-- Note: No validations are performed to ensure that the resulting +-- 'PkgconfigName' is valid +-- +-- @since 2.0.0.2 +mkPkgconfigName :: String -> PkgconfigName +mkPkgconfigName = PkgconfigName . toShortText + +-- | 'mkPkgconfigName' +-- +-- @since 2.0.0.2 +instance IsString PkgconfigName where + fromString = mkPkgconfigName + +instance Binary PkgconfigName + +-- pkg-config allows versions and other letters in package names, eg +-- "gtk+-2.0" is a valid pkg-config package _name_. It then has a package +-- version number like 2.10.13 +instance Pretty PkgconfigName where + pretty = Disp.text . unPkgconfigName + +instance Parsec PkgconfigName where + parsec = mkPkgconfigName <$> P.munch1 (\c -> isAlphaNum c || c `elem` "+-._") + +instance Text PkgconfigName where + parse = mkPkgconfigName + <$> Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-._") + +instance NFData PkgconfigName where + rnf (PkgconfigName pkg) = rnf pkg diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/SetupBuildInfo/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/SetupBuildInfo/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/SetupBuildInfo/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/SetupBuildInfo/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,21 @@ +module Distribution.Types.SetupBuildInfo.Lens ( + SetupBuildInfo, + module Distribution.Types.SetupBuildInfo.Lens, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Types.Dependency (Dependency) +import Distribution.Types.SetupBuildInfo (SetupBuildInfo) + +import qualified Distribution.Types.SetupBuildInfo as T + +setupDepends :: Lens' SetupBuildInfo [Dependency] +setupDepends f s = fmap (\x -> s { T.setupDepends = x }) (f (T.setupDepends s)) +{-# INLINE setupDepends #-} + +defaultSetupDepends :: Lens' SetupBuildInfo Bool +defaultSetupDepends f s = fmap (\x -> s { T.defaultSetupDepends = x }) (f (T.defaultSetupDepends s)) +{-# INLINE defaultSetupDepends #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/SetupBuildInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/SetupBuildInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/SetupBuildInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/SetupBuildInfo.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,41 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.SetupBuildInfo ( + SetupBuildInfo(..) +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.Dependency + +-- --------------------------------------------------------------------------- +-- The SetupBuildInfo type + +-- One can see this as a very cut-down version of BuildInfo below. +-- To keep things simple for tools that compile Setup.hs we limit the +-- options authors can specify to just Haskell package dependencies. + +data SetupBuildInfo = SetupBuildInfo + { setupDepends :: [Dependency] + , defaultSetupDepends :: Bool + -- ^ Is this a default 'custom-setup' section added by the cabal-install + -- code (as opposed to user-provided)? This field is only used + -- internally, and doesn't correspond to anything in the .cabal + -- file. See #3199. + } + deriving (Generic, Show, Eq, Read, Typeable, Data) + +instance Binary SetupBuildInfo + +instance NFData SetupBuildInfo where rnf = genericRnf + +instance Monoid SetupBuildInfo where + mempty = SetupBuildInfo [] False + mappend = (<>) + +instance Semigroup SetupBuildInfo where + a <> b = SetupBuildInfo + (setupDepends a <> setupDepends b) + (defaultSetupDepends a || defaultSetupDepends b) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/SourceRepo/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/SourceRepo/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/SourceRepo/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/SourceRepo/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,39 @@ +module Distribution.Types.SourceRepo.Lens ( + T.SourceRepo, + module Distribution.Types.SourceRepo.Lens, + ) where + +import Prelude() +import Distribution.Compat.Prelude +import Distribution.Compat.Lens + +import Distribution.Types.SourceRepo (SourceRepo, RepoKind, RepoType) +import qualified Distribution.Types.SourceRepo as T + +repoKind :: Lens' SourceRepo RepoKind +repoKind f s = fmap (\x -> s { T.repoKind = x }) (f (T.repoKind s)) +{-# INLINE repoKind #-} + +repoType :: Lens' SourceRepo (Maybe RepoType) +repoType f s = fmap (\x -> s { T.repoType = x }) (f (T.repoType s)) +{-# INLINE repoType #-} + +repoLocation :: Lens' SourceRepo (Maybe String) +repoLocation f s = fmap (\x -> s { T.repoLocation = x }) (f (T.repoLocation s)) +{-# INLINE repoLocation #-} + +repoModule :: Lens' SourceRepo (Maybe String) +repoModule f s = fmap (\x -> s { T.repoModule = x }) (f (T.repoModule s)) +{-# INLINE repoModule #-} + +repoBranch :: Lens' SourceRepo (Maybe String) +repoBranch f s = fmap (\x -> s { T.repoBranch = x }) (f (T.repoBranch s)) +{-# INLINE repoBranch #-} + +repoTag :: Lens' SourceRepo (Maybe String) +repoTag f s = fmap (\x -> s { T.repoTag = x }) (f (T.repoTag s)) +{-# INLINE repoTag #-} + +repoSubdir :: Lens' SourceRepo (Maybe FilePath) +repoSubdir f s = fmap (\x -> s { T.repoSubdir = x }) (f (T.repoSubdir s)) +{-# INLINE repoSubdir #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/SourceRepo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/SourceRepo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/SourceRepo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/SourceRepo.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,185 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.SourceRepo ( + SourceRepo(..), + RepoKind(..), + RepoType(..), + knownRepoTypes, + emptySourceRepo, + classifyRepoType, + classifyRepoKind, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Utils.Generic (lowercase) + +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +-- ------------------------------------------------------------ +-- * Source repos +-- ------------------------------------------------------------ + +-- | Information about the source revision control system for a package. +-- +-- When specifying a repo it is useful to know the meaning or intention of the +-- information as doing so enables automation. There are two obvious common +-- purposes: one is to find the repo for the latest development version, the +-- other is to find the repo for this specific release. The 'ReopKind' +-- specifies which one we mean (or another custom one). +-- +-- A package can specify one or the other kind or both. Most will specify just +-- a head repo but some may want to specify a repo to reconstruct the sources +-- for this package release. +-- +-- The required information is the 'RepoType' which tells us if it's using +-- 'Darcs', 'Git' for example. The 'repoLocation' and other details are +-- interpreted according to the repo type. +-- +data SourceRepo = SourceRepo { + -- | The kind of repo. This field is required. + repoKind :: RepoKind, + + -- | The type of the source repository system for this repo, eg 'Darcs' or + -- 'Git'. This field is required. + repoType :: Maybe RepoType, + + -- | The location of the repository. For most 'RepoType's this is a URL. + -- This field is required. + repoLocation :: Maybe String, + + -- | 'CVS' can put multiple \"modules\" on one server and requires a + -- module name in addition to the location to identify a particular repo. + -- Logically this is part of the location but unfortunately has to be + -- specified separately. This field is required for the 'CVS' 'RepoType' and + -- should not be given otherwise. + repoModule :: Maybe String, + + -- | The name or identifier of the branch, if any. Many source control + -- systems have the notion of multiple branches in a repo that exist in the + -- same location. For example 'Git' and 'CVS' use this while systems like + -- 'Darcs' use different locations for different branches. This field is + -- optional but should be used if necessary to identify the sources, + -- especially for the 'RepoThis' repo kind. + repoBranch :: Maybe String, + + -- | The tag identify a particular state of the repository. This should be + -- given for the 'RepoThis' repo kind and not for 'RepoHead' kind. + -- + repoTag :: Maybe String, + + -- | Some repositories contain multiple projects in different subdirectories + -- This field specifies the subdirectory where this packages sources can be + -- found, eg the subdirectory containing the @.cabal@ file. It is interpreted + -- relative to the root of the repository. This field is optional. If not + -- given the default is \".\" ie no subdirectory. + repoSubdir :: Maybe FilePath +} + deriving (Eq, Ord, Generic, Read, Show, Typeable, Data) + +emptySourceRepo :: RepoKind -> SourceRepo +emptySourceRepo kind = SourceRepo + { repoKind = kind + , repoType = Nothing + , repoLocation = Nothing + , repoModule = Nothing + , repoBranch = Nothing + , repoTag = Nothing + , repoSubdir = Nothing + } + +instance Binary SourceRepo + +instance NFData SourceRepo where rnf = genericRnf + +-- | What this repo info is for, what it represents. +-- +data RepoKind = + -- | The repository for the \"head\" or development version of the project. + -- This repo is where we should track the latest development activity or + -- the usual repo people should get to contribute patches. + RepoHead + + -- | The repository containing the sources for this exact package version + -- or release. For this kind of repo a tag should be given to give enough + -- information to re-create the exact sources. + | RepoThis + + | RepoKindUnknown String + deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) + +instance Binary RepoKind + +instance NFData RepoKind where rnf = genericRnf + +-- | An enumeration of common source control systems. The fields used in the +-- 'SourceRepo' depend on the type of repo. The tools and methods used to +-- obtain and track the repo depend on the repo type. +-- +data RepoType = Darcs | Git | SVN | CVS + | Mercurial | GnuArch | Bazaar | Monotone + | OtherRepoType String + deriving (Eq, Generic, Ord, Read, Show, Typeable, Data) + +instance Binary RepoType + +instance NFData RepoType where rnf = genericRnf + +knownRepoTypes :: [RepoType] +knownRepoTypes = [Darcs, Git, SVN, CVS + ,Mercurial, GnuArch, Bazaar, Monotone] + +repoTypeAliases :: RepoType -> [String] +repoTypeAliases Bazaar = ["bzr"] +repoTypeAliases Mercurial = ["hg"] +repoTypeAliases GnuArch = ["arch"] +repoTypeAliases _ = [] + +instance Pretty RepoKind where + pretty RepoHead = Disp.text "head" + pretty RepoThis = Disp.text "this" + pretty (RepoKindUnknown other) = Disp.text other + +instance Parsec RepoKind where + parsec = classifyRepoKind <$> P.munch1 isIdent + +instance Text RepoKind where + parse = fmap classifyRepoKind ident + +classifyRepoKind :: String -> RepoKind +classifyRepoKind name = case lowercase name of + "head" -> RepoHead + "this" -> RepoThis + _ -> RepoKindUnknown name + +instance Pretty RepoType where + pretty (OtherRepoType other) = Disp.text other + pretty other = Disp.text (lowercase (show other)) + +instance Parsec RepoType where + parsec = classifyRepoType <$> P.munch1 isIdent + +instance Text RepoType where + parse = fmap classifyRepoType ident + +classifyRepoType :: String -> RepoType +classifyRepoType s = + fromMaybe (OtherRepoType s) $ lookup (lowercase s) repoTypeMap + where + repoTypeMap = [ (name, repoType') + | repoType' <- knownRepoTypes + , name <- display repoType' : repoTypeAliases repoType' ] + +ident :: Parse.ReadP r String +ident = Parse.munch1 isIdent + +isIdent :: Char -> Bool +isIdent c = isAlphaNum c || c == '_' || c == '-' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TargetInfo.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TargetInfo.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TargetInfo.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TargetInfo.hs 2018-11-26 08:42:52.000000000 +0000 @@ -0,0 +1,33 @@ +{-# LANGUAGE TypeFamilies #-} +module Distribution.Types.TargetInfo ( + TargetInfo(..) +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.ComponentLocalBuildInfo +import Distribution.Types.Component +import Distribution.Types.UnitId + +import Distribution.Compat.Graph (IsNode(..)) + +-- | The 'TargetInfo' contains all the information necessary to build a +-- specific target (e.g., component/module/file) in a package. In +-- principle, one can get the 'Component' from a +-- 'ComponentLocalBuildInfo' and 'LocalBuildInfo', but it is much more +-- convenient to have the component in hand. +data TargetInfo = TargetInfo { + targetCLBI :: ComponentLocalBuildInfo, + targetComponent :: Component + -- TODO: BuildTargets supporting parsing these is dumb, + -- we don't have support for compiling single modules or + -- file paths. Accommodating it now is premature + -- generalization. Figure it out later. + -- targetSub :: Maybe (Either ModuleName FilePath) + } + +instance IsNode TargetInfo where + type Key TargetInfo = UnitId + nodeKey = nodeKey . targetCLBI + nodeNeighbors = nodeNeighbors . targetCLBI diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TestSuite/Lens.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TestSuite/Lens.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TestSuite/Lens.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TestSuite/Lens.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,27 @@ +module Distribution.Types.TestSuite.Lens ( + TestSuite, + module Distribution.Types.TestSuite.Lens, + ) where + +import Distribution.Compat.Lens +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.Types.BuildInfo (BuildInfo) +import Distribution.Types.TestSuite (TestSuite) +import Distribution.Types.TestSuiteInterface (TestSuiteInterface) +import Distribution.Types.UnqualComponentName (UnqualComponentName) + +import qualified Distribution.Types.TestSuite as T + +testName :: Lens' TestSuite UnqualComponentName +testName f s = fmap (\x -> s { T.testName = x }) (f (T.testName s)) +{-# INLINE testName #-} + +testInterface :: Lens' TestSuite TestSuiteInterface +testInterface f s = fmap (\x -> s { T.testInterface = x }) (f (T.testInterface s)) +{-# INLINE testInterface #-} + +testBuildInfo :: Lens' TestSuite BuildInfo +testBuildInfo f s = fmap (\x -> s { T.testBuildInfo = x }) (f (T.testBuildInfo s)) +{-# INLINE testBuildInfo #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TestSuite.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TestSuite.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TestSuite.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TestSuite.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,82 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.TestSuite ( + TestSuite(..), + emptyTestSuite, + testType, + testModules, + testModulesAutogen +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.BuildInfo +import Distribution.Types.TestType +import Distribution.Types.TestSuiteInterface +import Distribution.Types.UnqualComponentName + +import Distribution.ModuleName + +import qualified Distribution.Types.BuildInfo.Lens as L + +-- | A \"test-suite\" stanza in a cabal file. +-- +data TestSuite = TestSuite { + testName :: UnqualComponentName, + testInterface :: TestSuiteInterface, + testBuildInfo :: BuildInfo + } + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance L.HasBuildInfo TestSuite where + buildInfo f l = (\x -> l { testBuildInfo = x }) <$> f (testBuildInfo l) + +instance Binary TestSuite + +instance NFData TestSuite where rnf = genericRnf + +instance Monoid TestSuite where + mempty = TestSuite { + testName = mempty, + testInterface = mempty, + testBuildInfo = mempty + } + mappend = (<>) + +instance Semigroup TestSuite where + a <> b = TestSuite { + testName = combine' testName, + testInterface = combine testInterface, + testBuildInfo = combine testBuildInfo + } + where combine field = field a `mappend` field b + combine' field = case ( unUnqualComponentName $ field a + , unUnqualComponentName $ field b) of + ("", _) -> field b + (_, "") -> field a + (x, y) -> error $ "Ambiguous values for test field: '" + ++ x ++ "' and '" ++ y ++ "'" + +emptyTestSuite :: TestSuite +emptyTestSuite = mempty + + +testType :: TestSuite -> TestType +testType test = case testInterface test of + TestSuiteExeV10 ver _ -> TestTypeExe ver + TestSuiteLibV09 ver _ -> TestTypeLib ver + TestSuiteUnsupported testtype -> testtype + +-- | Get all the module names from a test suite. +testModules :: TestSuite -> [ModuleName] +testModules test = (case testInterface test of + TestSuiteLibV09 _ m -> [m] + _ -> []) + ++ otherModules (testBuildInfo test) + +-- | Get all the auto generated module names from a test suite. +-- This are a subset of 'testModules'. +testModulesAutogen :: TestSuite -> [ModuleName] +testModulesAutogen test = autogenModules (testBuildInfo test) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TestSuiteInterface.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TestSuiteInterface.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TestSuiteInterface.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TestSuiteInterface.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,51 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.TestSuiteInterface ( + TestSuiteInterface(..), +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Types.TestType +import Distribution.ModuleName +import Distribution.Version + +-- | The test suite interfaces that are currently defined. Each test suite must +-- specify which interface it supports. +-- +-- More interfaces may be defined in future, either new revisions or totally +-- new interfaces. +-- +data TestSuiteInterface = + + -- | Test interface \"exitcode-stdio-1.0\". The test-suite takes the form + -- of an executable. It returns a zero exit code for success, non-zero for + -- failure. The stdout and stderr channels may be logged. It takes no + -- command line parameters and nothing on stdin. + -- + TestSuiteExeV10 Version FilePath + + -- | Test interface \"detailed-0.9\". The test-suite takes the form of a + -- library containing a designated module that exports \"tests :: [Test]\". + -- + | TestSuiteLibV09 Version ModuleName + + -- | A test suite that does not conform to one of the above interfaces for + -- the given reason (e.g. unknown test type). + -- + | TestSuiteUnsupported TestType + deriving (Eq, Generic, Read, Show, Typeable, Data) + +instance Binary TestSuiteInterface + +instance NFData TestSuiteInterface where rnf = genericRnf + +instance Monoid TestSuiteInterface where + mempty = TestSuiteUnsupported (TestTypeUnknown mempty nullVersion) + mappend = (<>) + +instance Semigroup TestSuiteInterface where + a <> (TestSuiteUnsupported _) = a + _ <> b = b diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TestType.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TestType.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/TestType.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/TestType.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,48 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Types.TestType ( + TestType(..), + knownTestTypes, +) where + +import Distribution.Compat.Prelude +import Distribution.Version +import Prelude () + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text +import Text.PrettyPrint (char, text) + +-- | The \"test-type\" field in the test suite stanza. +-- +data TestType = TestTypeExe Version -- ^ \"type: exitcode-stdio-x.y\" + | TestTypeLib Version -- ^ \"type: detailed-x.y\" + | TestTypeUnknown String Version -- ^ Some unknown test type e.g. \"type: foo\" + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary TestType + +instance NFData TestType where rnf = genericRnf + +knownTestTypes :: [TestType] +knownTestTypes = [ TestTypeExe (mkVersion [1,0]) + , TestTypeLib (mkVersion [0,9]) ] + +instance Pretty TestType where + pretty (TestTypeExe ver) = text "exitcode-stdio-" <<>> pretty ver + pretty (TestTypeLib ver) = text "detailed-" <<>> pretty ver + pretty (TestTypeUnknown name ver) = text name <<>> char '-' <<>> pretty ver + +instance Parsec TestType where + parsec = parsecStandard $ \ver name -> case name of + "exitcode-stdio" -> TestTypeExe ver + "detailed" -> TestTypeLib ver + _ -> TestTypeUnknown name ver + +instance Text TestType where + parse = stdParse $ \ver name -> case name of + "exitcode-stdio" -> TestTypeExe ver + "detailed" -> TestTypeLib ver + _ -> TestTypeUnknown name ver diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/UnitId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/UnitId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/UnitId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/UnitId.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,134 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE FlexibleContexts #-} + +module Distribution.Types.UnitId + ( UnitId, unUnitId, mkUnitId + , DefUnitId + , unsafeMkDefUnitId + , unDefUnitId + , newSimpleUnitId + , mkLegacyUnitId + , getHSLibraryName + , InstalledPackageId -- backwards compat + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Utils.ShortText + +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.Compat.CharParsing as P +import Distribution.Pretty +import Distribution.Parsec.Class +import Distribution.Text +import Distribution.Types.ComponentId +import Distribution.Types.PackageId + +import Text.PrettyPrint (text) + +-- | A unit identifier identifies a (possibly instantiated) +-- package/component that can be installed the installed package +-- database. There are several types of components that can be +-- installed: +-- +-- * A traditional library with no holes, so that 'unitIdHash' +-- is @Nothing@. In the absence of Backpack, 'UnitId' +-- is the same as a 'ComponentId'. +-- +-- * An indefinite, Backpack library with holes. In this case, +-- 'unitIdHash' is still @Nothing@, but in the install, +-- there are only interfaces, no compiled objects. +-- +-- * An instantiated Backpack library with all the holes +-- filled in. 'unitIdHash' is a @Just@ a hash of the +-- instantiating mapping. +-- +-- A unit is a component plus the additional information on how the +-- holes are filled in. Thus there is a one to many relationship: for a +-- particular component there are many different ways of filling in the +-- holes, and each different combination is a unit (and has a separate +-- 'UnitId'). +-- +-- 'UnitId' is distinct from 'OpenUnitId', in that it is always +-- installed, whereas 'OpenUnitId' are intermediate unit identities +-- that arise during mixin linking, and don't necessarily correspond +-- to any actually installed unit. Since the mapping is not actually +-- recorded in a 'UnitId', you can't actually substitute over them +-- (but you can substitute over 'OpenUnitId'). See also +-- "Distribution.Backpack.FullUnitId" for a mechanism for expanding an +-- instantiated 'UnitId' to retrieve its mapping. +-- +-- Backwards compatibility note: if you need to get the string +-- representation of a UnitId to pass, e.g., as a @-package-id@ +-- flag, use the 'display' function, which will work on all +-- versions of Cabal. +-- +newtype UnitId = UnitId ShortText + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, NFData) + +{-# DEPRECATED InstalledPackageId "Use UnitId instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +type InstalledPackageId = UnitId + +instance Binary UnitId + +-- | The textual format for 'UnitId' coincides with the format +-- GHC accepts for @-package-id@. +-- +instance Pretty UnitId where + pretty = text . unUnitId + +-- | The textual format for 'UnitId' coincides with the format +-- GHC accepts for @-package-id@. +-- +instance Parsec UnitId where + parsec = mkUnitId <$> P.munch1 (\c -> isAlphaNum c || c `elem` "-_.+") + +instance Text UnitId where + parse = mkUnitId <$> Parse.munch1 (\c -> isAlphaNum c || c `elem` "-_.+") + +-- | If you need backwards compatibility, consider using 'display' +-- instead, which is supported by all versions of Cabal. +-- +unUnitId :: UnitId -> String +unUnitId (UnitId s) = fromShortText s + +mkUnitId :: String -> UnitId +mkUnitId = UnitId . toShortText + +-- | 'mkUnitId' +-- +-- @since 2.0.0.2 +instance IsString UnitId where + fromString = mkUnitId + +-- | Create a unit identity with no associated hash directly +-- from a 'ComponentId'. +newSimpleUnitId :: ComponentId -> UnitId +newSimpleUnitId = mkUnitId . unComponentId + +-- | Make an old-style UnitId from a package identifier. +-- Assumed to be for the public library +mkLegacyUnitId :: PackageId -> UnitId +mkLegacyUnitId = newSimpleUnitId . mkComponentId . display + +-- | Returns library name prefixed with HS, suitable for filenames +getHSLibraryName :: UnitId -> String +getHSLibraryName uid = "HS" ++ display uid + +-- | A 'UnitId' for a definite package. The 'DefUnitId' invariant says +-- that a 'UnitId' identified this way is definite; i.e., it has no +-- unfilled holes. +newtype DefUnitId = DefUnitId { unDefUnitId :: UnitId } + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, Binary, NFData, Pretty, Text) + +-- Workaround for a GHC 8.0.1 bug, see +-- https://github.com/haskell/cabal/issues/4793#issuecomment-334258288 +instance Parsec DefUnitId where + parsec = DefUnitId <$> parsec + +-- | Unsafely create a 'DefUnitId' from a 'UnitId'. Your responsibility +-- is to ensure that the 'DefUnitId' invariant holds. +unsafeMkDefUnitId :: UnitId -> DefUnitId +unsafeMkDefUnitId = DefUnitId diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/UnqualComponentName.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/UnqualComponentName.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/UnqualComponentName.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/UnqualComponentName.hs 2018-11-26 08:42:51.000000000 +0000 @@ -0,0 +1,89 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +module Distribution.Types.UnqualComponentName + ( UnqualComponentName, unUnqualComponentName, mkUnqualComponentName + , packageNameToUnqualComponentName, unqualComponentNameToPackageName + ) where + +import Distribution.Compat.Prelude +import Distribution.Utils.ShortText +import Prelude () + +import Distribution.Parsec.Class +import Distribution.ParseUtils (parsePackageName) +import Distribution.Pretty +import Distribution.Text +import Distribution.Types.PackageName + +-- | An unqualified component name, for any kind of component. +-- +-- This is distinguished from a 'ComponentName' and 'ComponentId'. The former +-- also states which of a library, executable, etc the name refers too. The +-- later uniquely identifiers a component and its closure. +-- +-- @since 2.0.0.2 +newtype UnqualComponentName = UnqualComponentName ShortText + deriving (Generic, Read, Show, Eq, Ord, Typeable, Data, + Semigroup, Monoid) -- TODO: bad enabler of bad monoids + +-- | Convert 'UnqualComponentName' to 'String' +-- +-- @since 2.0.0.2 +unUnqualComponentName :: UnqualComponentName -> String +unUnqualComponentName (UnqualComponentName s) = fromShortText s + +-- | Construct a 'UnqualComponentName' from a 'String' +-- +-- 'mkUnqualComponentName' is the inverse to 'unUnqualComponentName' +-- +-- Note: No validations are performed to ensure that the resulting +-- 'UnqualComponentName' is valid +-- +-- @since 2.0.0.2 +mkUnqualComponentName :: String -> UnqualComponentName +mkUnqualComponentName = UnqualComponentName . toShortText + +-- | 'mkUnqualComponentName' +-- +-- @since 2.0.0.2 +instance IsString UnqualComponentName where + fromString = mkUnqualComponentName + +instance Binary UnqualComponentName + +instance Pretty UnqualComponentName where + pretty = showToken . unUnqualComponentName + +instance Parsec UnqualComponentName where + parsec = mkUnqualComponentName <$> parsecUnqualComponentName + +instance Text UnqualComponentName where + parse = mkUnqualComponentName <$> parsePackageName + +instance NFData UnqualComponentName where + rnf (UnqualComponentName pkg) = rnf pkg + +-- TODO avoid String round trip with these PackageName <-> +-- UnqualComponentName converters. + +-- | Converts a package name to an unqualified component name +-- +-- Useful in legacy situations where a package name may refer to an internal +-- component, if one is defined with that name. +-- +-- @since 2.0.0.2 +packageNameToUnqualComponentName :: PackageName -> UnqualComponentName +packageNameToUnqualComponentName = mkUnqualComponentName . unPackageName + +-- | Converts an unqualified component name to a package name +-- +-- `packageNameToUnqualComponentName` is the inverse of +-- `unqualComponentNameToPackageName`. +-- +-- Useful in legacy situations where a package name may refer to an internal +-- component, if one is defined with that name. +-- +-- @since 2.0.0.2 +unqualComponentNameToPackageName :: UnqualComponentName -> PackageName +unqualComponentNameToPackageName = mkPackageName . unUnqualComponentName diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/Version.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/Version.hs 2018-11-26 08:42:52.000000000 +0000 @@ -0,0 +1,255 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Types.Version ( + -- * Package versions + Version, + mkVersion, + mkVersion', + versionNumbers, + nullVersion, + alterVersion, + version0, + + -- ** Backwards compatibility + showVersion, + + -- * Internal + validVersion, + ) where + +import Data.Bits (shiftL, shiftR, (.&.), (.|.)) +import Distribution.Compat.Prelude +import Prelude () + +import Distribution.CabalSpecVersion +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text + +import qualified Data.Version as Base +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp +import qualified Text.Read as Read + +-- | A 'Version' represents the version of a software entity. +-- +-- Instances of 'Eq' and 'Ord' are provided, which gives exact +-- equality and lexicographic ordering of the version number +-- components (i.e. 2.1 > 2.0, 1.2.3 > 1.2.2, etc.). +-- +-- This type is opaque and distinct from the 'Base.Version' type in +-- "Data.Version" since @Cabal-2.0@. The difference extends to the +-- 'Binary' instance using a different (and more compact) encoding. +-- +-- @since 2.0.0.2 +data Version = PV0 {-# UNPACK #-} !Word64 + | PV1 !Int [Int] + -- NOTE: If a version fits into the packed Word64 + -- representation (i.e. at most four version components + -- which all fall into the [0..0xfffe] range), then PV0 + -- MUST be used. This is essential for the 'Eq' instance + -- to work. + deriving (Data,Eq,Generic,Typeable) + +instance Ord Version where + compare (PV0 x) (PV0 y) = compare x y + compare (PV1 x xs) (PV1 y ys) = case compare x y of + EQ -> compare xs ys + c -> c + compare (PV0 w) (PV1 y ys) = case compare x y of + EQ -> compare [x2,x3,x4] ys + c -> c + where + x = fromIntegral ((w `shiftR` 48) .&. 0xffff) - 1 + x2 = fromIntegral ((w `shiftR` 32) .&. 0xffff) - 1 + x3 = fromIntegral ((w `shiftR` 16) .&. 0xffff) - 1 + x4 = fromIntegral (w .&. 0xffff) - 1 + compare (PV1 x xs) (PV0 w) = case compare x y of + EQ -> compare xs [y2,y3,y4] + c -> c + where + y = fromIntegral ((w `shiftR` 48) .&. 0xffff) - 1 + y2 = fromIntegral ((w `shiftR` 32) .&. 0xffff) - 1 + y3 = fromIntegral ((w `shiftR` 16) .&. 0xffff) - 1 + y4 = fromIntegral (w .&. 0xffff) - 1 + +instance Show Version where + showsPrec d v = showParen (d > 10) + $ showString "mkVersion " + . showsPrec 11 (versionNumbers v) + +instance Read Version where + readPrec = Read.parens $ do + Read.Ident "mkVersion" <- Read.lexP + v <- Read.step Read.readPrec + return (mkVersion v) + +instance Binary Version + +instance NFData Version where + rnf (PV0 _) = () + rnf (PV1 _ ns) = rnf ns + +instance Pretty Version where + pretty ver + = Disp.hcat (Disp.punctuate (Disp.char '.') + (map Disp.int $ versionNumbers ver)) + +instance Parsec Version where + parsec = do + digit <- digitParser <$> askCabalSpecVersion + mkVersion <$> P.sepBy1 digit (P.char '.') <* tags + where + digitParser v + | v >= CabalSpecV2_0 = P.integral + | otherwise = (some d >>= toNumber) P. "non-leading-zero integral" + where + toNumber :: CabalParsing m => [Int] -> m Int + toNumber [0] = return 0 + toNumber xs@(0:_) = do + parsecWarning PWTVersionLeadingZeros "Version digit with leading zero. Use cabal-version: 2.0 or later to write such versions. For more information see https://github.com/haskell/cabal/issues/5092" + return $ foldl' (\a b -> a * 10 + b) 0 xs + toNumber xs = return $ foldl' (\a b -> a * 10 + b) 0 xs + + d :: P.CharParsing m => m Int + d = f <$> P.satisfyRange '0' '9' + f c = ord c - ord '0' + + tags = do + ts <- many $ P.char '-' *> some (P.satisfy isAlphaNum) + case ts of + [] -> pure () + (_ : _) -> parsecWarning PWTVersionTag "version with tags" + +instance Text Version where + parse = do + branch <- Parse.sepBy1 parseNat (Parse.char '.') + -- allow but ignore tags: + _tags <- Parse.many (Parse.char '-' >> Parse.munch1 isAlphaNum) + return (mkVersion branch) + where + parseNat = read `fmap` Parse.munch1 isDigit + +-- | Construct 'Version' from list of version number components. +-- +-- For instance, @mkVersion [3,2,1]@ constructs a 'Version' +-- representing the version @3.2.1@. +-- +-- All version components must be non-negative. @mkVersion []@ +-- currently represents the special /null/ version; see also 'nullVersion'. +-- +-- @since 2.0.0.2 +mkVersion :: [Int] -> Version +-- TODO: add validity check; disallow 'mkVersion []' (we have +-- 'nullVersion' for that) +mkVersion [] = nullVersion +mkVersion (v1:[]) + | inWord16VerRep1 v1 = PV0 (mkWord64VerRep1 v1) + | otherwise = PV1 v1 [] + where + inWord16VerRep1 x1 = inWord16 (x1 .|. (x1+1)) + mkWord64VerRep1 y1 = mkWord64VerRep (y1+1) 0 0 0 + +mkVersion (v1:vs@(v2:[])) + | inWord16VerRep2 v1 v2 = PV0 (mkWord64VerRep2 v1 v2) + | otherwise = PV1 v1 vs + where + inWord16VerRep2 x1 x2 = inWord16 (x1 .|. (x1+1) + .|. x2 .|. (x2+1)) + mkWord64VerRep2 y1 y2 = mkWord64VerRep (y1+1) (y2+1) 0 0 + +mkVersion (v1:vs@(v2:v3:[])) + | inWord16VerRep3 v1 v2 v3 = PV0 (mkWord64VerRep3 v1 v2 v3) + | otherwise = PV1 v1 vs + where + inWord16VerRep3 x1 x2 x3 = inWord16 (x1 .|. (x1+1) + .|. x2 .|. (x2+1) + .|. x3 .|. (x3+1)) + mkWord64VerRep3 y1 y2 y3 = mkWord64VerRep (y1+1) (y2+1) (y3+1) 0 + +mkVersion (v1:vs@(v2:v3:v4:[])) + | inWord16VerRep4 v1 v2 v3 v4 = PV0 (mkWord64VerRep4 v1 v2 v3 v4) + | otherwise = PV1 v1 vs + where + inWord16VerRep4 x1 x2 x3 x4 = inWord16 (x1 .|. (x1+1) + .|. x2 .|. (x2+1) + .|. x3 .|. (x3+1) + .|. x4 .|. (x4+1)) + mkWord64VerRep4 y1 y2 y3 y4 = mkWord64VerRep (y1+1) (y2+1) (y3+1) (y4+1) + +mkVersion (v1:vs) = PV1 v1 vs + +-- | Version 0. A lower bound of 'Version'. +-- +-- @since 2.2 +version0 :: Version +version0 = mkVersion [0] + +{-# INLINE mkWord64VerRep #-} +mkWord64VerRep :: Int -> Int -> Int -> Int -> Word64 +mkWord64VerRep v1 v2 v3 v4 = + (fromIntegral v1 `shiftL` 48) + .|. (fromIntegral v2 `shiftL` 32) + .|. (fromIntegral v3 `shiftL` 16) + .|. fromIntegral v4 + +{-# INLINE inWord16 #-} +inWord16 :: Int -> Bool +inWord16 x = (fromIntegral x :: Word) <= 0xffff + +-- | Variant of 'mkVersion' which converts a "Data.Version" +-- 'Base.Version' into Cabal's 'Version' type. +-- +-- @since 2.0.0.2 +mkVersion' :: Base.Version -> Version +mkVersion' = mkVersion . Base.versionBranch + +-- | Unpack 'Version' into list of version number components. +-- +-- This is the inverse to 'mkVersion', so the following holds: +-- +-- > (versionNumbers . mkVersion) vs == vs +-- +-- @since 2.0.0.2 +versionNumbers :: Version -> [Int] +versionNumbers (PV1 n ns) = n:ns +versionNumbers (PV0 w) + | v1 < 0 = [] + | v2 < 0 = [v1] + | v3 < 0 = [v1,v2] + | v4 < 0 = [v1,v2,v3] + | otherwise = [v1,v2,v3,v4] + where + v1 = fromIntegral ((w `shiftR` 48) .&. 0xffff) - 1 + v2 = fromIntegral ((w `shiftR` 32) .&. 0xffff) - 1 + v3 = fromIntegral ((w `shiftR` 16) .&. 0xffff) - 1 + v4 = fromIntegral (w .&. 0xffff) - 1 + + +-- | Constant representing the special /null/ 'Version' +-- +-- The 'nullVersion' compares (via 'Ord') as less than every proper +-- 'Version' value. +-- +-- @since 2.0.0.2 +nullVersion :: Version +-- TODO: at some point, 'mkVersion' may disallow creating /null/ +-- 'Version's +nullVersion = PV0 0 + +-- | Apply function to list of version number components +-- +-- > alterVersion f == mkVersion . f . versionNumbers +-- +-- @since 2.0.0.2 +alterVersion :: ([Int] -> [Int]) -> Version -> Version +alterVersion f = mkVersion . f . versionNumbers + +-- internal helper +validVersion :: Version -> Bool +validVersion v = v /= nullVersion && all (>=0) (versionNumbers v) + +showVersion :: Version -> String +showVersion = prettyShow +{-# DEPRECATED showVersion "Use prettyShow. This function will be removed in Cabal-3.0 (estimated Mar 2019)" #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/VersionInterval.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/VersionInterval.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/VersionInterval.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/VersionInterval.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,361 @@ +{-# LANGUAGE DeriveDataTypeable #-} +module Distribution.Types.VersionInterval ( + -- * Version intervals + VersionIntervals, + toVersionIntervals, + fromVersionIntervals, + withinIntervals, + versionIntervals, + mkVersionIntervals, + unionVersionIntervals, + intersectVersionIntervals, + invertVersionIntervals, + relaxLastInterval, + relaxHeadInterval, + + -- * Version intervals view + asVersionIntervals, + VersionInterval, + LowerBound(..), + UpperBound(..), + Bound(..), + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Control.Exception (assert) + +import Distribution.Types.Version +import Distribution.Types.VersionRange + +------------------------------------------------------------------------------- +-- VersionRange +------------------------------------------------------------------------------- + +-- | View a 'VersionRange' as a union of intervals. +-- +-- This provides a canonical view of the semantics of a 'VersionRange' as +-- opposed to the syntax of the expression used to define it. For the syntactic +-- view use 'foldVersionRange'. +-- +-- Each interval is non-empty. The sequence is in increasing order and no +-- intervals overlap or touch. Therefore only the first and last can be +-- unbounded. The sequence can be empty if the range is empty +-- (e.g. a range expression like @< 1 && > 2@). +-- +-- Other checks are trivial to implement using this view. For example: +-- +-- > isNoVersion vr | [] <- asVersionIntervals vr = True +-- > | otherwise = False +-- +-- > isSpecificVersion vr +-- > | [(LowerBound v InclusiveBound +-- > ,UpperBound v' InclusiveBound)] <- asVersionIntervals vr +-- > , v == v' = Just v +-- > | otherwise = Nothing +-- +asVersionIntervals :: VersionRange -> [VersionInterval] +asVersionIntervals = versionIntervals . toVersionIntervals + + +------------------------------------------------------------------------------- +-- VersionInterval +------------------------------------------------------------------------------- + +-- | A complementary representation of a 'VersionRange'. Instead of a boolean +-- version predicate it uses an increasing sequence of non-overlapping, +-- non-empty intervals. +-- +-- The key point is that this representation gives a canonical representation +-- for the semantics of 'VersionRange's. This makes it easier to check things +-- like whether a version range is empty, covers all versions, or requires a +-- certain minimum or maximum version. It also makes it easy to check equality +-- or containment. It also makes it easier to identify \'simple\' version +-- predicates for translation into foreign packaging systems that do not +-- support complex version range expressions. +-- +newtype VersionIntervals = VersionIntervals [VersionInterval] + deriving (Eq, Show, Typeable) + +-- | Inspect the list of version intervals. +-- +versionIntervals :: VersionIntervals -> [VersionInterval] +versionIntervals (VersionIntervals is) = is + +type VersionInterval = (LowerBound, UpperBound) +data LowerBound = LowerBound Version !Bound deriving (Eq, Show) +data UpperBound = NoUpperBound | UpperBound Version !Bound deriving (Eq, Show) +data Bound = ExclusiveBound | InclusiveBound deriving (Eq, Show) + +minLowerBound :: LowerBound +minLowerBound = LowerBound (mkVersion [0]) InclusiveBound + +isVersion0 :: Version -> Bool +isVersion0 = (==) version0 + +instance Ord LowerBound where + LowerBound ver bound <= LowerBound ver' bound' = case compare ver ver' of + LT -> True + EQ -> not (bound == ExclusiveBound && bound' == InclusiveBound) + GT -> False + +instance Ord UpperBound where + _ <= NoUpperBound = True + NoUpperBound <= UpperBound _ _ = False + UpperBound ver bound <= UpperBound ver' bound' = case compare ver ver' of + LT -> True + EQ -> not (bound == InclusiveBound && bound' == ExclusiveBound) + GT -> False + +invariant :: VersionIntervals -> Bool +invariant (VersionIntervals intervals) = all validInterval intervals + && all doesNotTouch' adjacentIntervals + where + doesNotTouch' :: (VersionInterval, VersionInterval) -> Bool + doesNotTouch' ((_,u), (l',_)) = doesNotTouch u l' + + adjacentIntervals :: [(VersionInterval, VersionInterval)] + adjacentIntervals + | null intervals = [] + | otherwise = zip intervals (tail intervals) + +checkInvariant :: VersionIntervals -> VersionIntervals +checkInvariant is = assert (invariant is) is + +-- | Directly construct a 'VersionIntervals' from a list of intervals. +-- +-- In @Cabal-2.2@ the 'Maybe' is dropped from the result type. +-- +mkVersionIntervals :: [VersionInterval] -> VersionIntervals +mkVersionIntervals intervals + | invariant (VersionIntervals intervals) = VersionIntervals intervals + | otherwise + = checkInvariant + . foldl' (flip insertInterval) (VersionIntervals []) + . filter validInterval + $ intervals + +insertInterval :: VersionInterval -> VersionIntervals -> VersionIntervals +insertInterval i is = unionVersionIntervals (VersionIntervals [i]) is + +validInterval :: (LowerBound, UpperBound) -> Bool +validInterval i@(l, u) = validLower l && validUpper u && nonEmpty i + where + validLower (LowerBound v _) = validVersion v + validUpper NoUpperBound = True + validUpper (UpperBound v _) = validVersion v + +-- Check an interval is non-empty +-- +nonEmpty :: VersionInterval -> Bool +nonEmpty (_, NoUpperBound ) = True +nonEmpty (LowerBound l lb, UpperBound u ub) = + (l < u) || (l == u && lb == InclusiveBound && ub == InclusiveBound) + +-- Check an upper bound does not intersect, or even touch a lower bound: +-- +-- ---| or ---) but not ---] or ---) or ---] +-- |--- (--- (--- [--- [--- +-- +doesNotTouch :: UpperBound -> LowerBound -> Bool +doesNotTouch NoUpperBound _ = False +doesNotTouch (UpperBound u ub) (LowerBound l lb) = + u < l + || (u == l && ub == ExclusiveBound && lb == ExclusiveBound) + +-- | Check an upper bound does not intersect a lower bound: +-- +-- ---| or ---) or ---] or ---) but not ---] +-- |--- (--- (--- [--- [--- +-- +doesNotIntersect :: UpperBound -> LowerBound -> Bool +doesNotIntersect NoUpperBound _ = False +doesNotIntersect (UpperBound u ub) (LowerBound l lb) = + u < l + || (u == l && not (ub == InclusiveBound && lb == InclusiveBound)) + +-- | Test if a version falls within the version intervals. +-- +-- It exists mostly for completeness and testing. It satisfies the following +-- properties: +-- +-- > withinIntervals v (toVersionIntervals vr) = withinRange v vr +-- > withinIntervals v ivs = withinRange v (fromVersionIntervals ivs) +-- +withinIntervals :: Version -> VersionIntervals -> Bool +withinIntervals v (VersionIntervals intervals) = any withinInterval intervals + where + withinInterval (lowerBound, upperBound) = withinLower lowerBound + && withinUpper upperBound + withinLower (LowerBound v' ExclusiveBound) = v' < v + withinLower (LowerBound v' InclusiveBound) = v' <= v + + withinUpper NoUpperBound = True + withinUpper (UpperBound v' ExclusiveBound) = v' > v + withinUpper (UpperBound v' InclusiveBound) = v' >= v + +-- | Convert a 'VersionRange' to a sequence of version intervals. +-- +toVersionIntervals :: VersionRange -> VersionIntervals +toVersionIntervals = foldVersionRange + ( chkIvl (minLowerBound, NoUpperBound)) + (\v -> chkIvl (LowerBound v InclusiveBound, UpperBound v InclusiveBound)) + (\v -> chkIvl (LowerBound v ExclusiveBound, NoUpperBound)) + (\v -> if isVersion0 v then VersionIntervals [] else + chkIvl (minLowerBound, UpperBound v ExclusiveBound)) + unionVersionIntervals + intersectVersionIntervals + where + chkIvl interval = checkInvariant (VersionIntervals [interval]) + +-- | Convert a 'VersionIntervals' value back into a 'VersionRange' expression +-- representing the version intervals. +-- +fromVersionIntervals :: VersionIntervals -> VersionRange +fromVersionIntervals (VersionIntervals []) = noVersion +fromVersionIntervals (VersionIntervals intervals) = + foldr1 unionVersionRanges [ interval l u | (l, u) <- intervals ] + + where + interval (LowerBound v InclusiveBound) + (UpperBound v' InclusiveBound) | v == v' + = thisVersion v + interval (LowerBound v InclusiveBound) + (UpperBound v' ExclusiveBound) | isWildcardRange v v' + = withinVersion v + interval l u = lowerBound l `intersectVersionRanges'` upperBound u + + lowerBound (LowerBound v InclusiveBound) + | isVersion0 v = Nothing + | otherwise = Just (orLaterVersion v) + lowerBound (LowerBound v ExclusiveBound) = Just (laterVersion v) + + upperBound NoUpperBound = Nothing + upperBound (UpperBound v InclusiveBound) = Just (orEarlierVersion v) + upperBound (UpperBound v ExclusiveBound) = Just (earlierVersion v) + + intersectVersionRanges' Nothing Nothing = anyVersion + intersectVersionRanges' (Just vr) Nothing = vr + intersectVersionRanges' Nothing (Just vr) = vr + intersectVersionRanges' (Just vr) (Just vr') = intersectVersionRanges vr vr' + +unionVersionIntervals :: VersionIntervals -> VersionIntervals + -> VersionIntervals +unionVersionIntervals (VersionIntervals is0) (VersionIntervals is'0) = + checkInvariant (VersionIntervals (union is0 is'0)) + where + union is [] = is + union [] is' = is' + union (i:is) (i':is') = case unionInterval i i' of + Left Nothing -> i : union is (i' :is') + Left (Just i'') -> union is (i'':is') + Right Nothing -> i' : union (i :is) is' + Right (Just i'') -> union (i'':is) is' + +unionInterval :: VersionInterval -> VersionInterval + -> Either (Maybe VersionInterval) (Maybe VersionInterval) +unionInterval (lower , upper ) (lower', upper') + + -- Non-intersecting intervals with the left interval ending first + | upper `doesNotTouch` lower' = Left Nothing + + -- Non-intersecting intervals with the right interval first + | upper' `doesNotTouch` lower = Right Nothing + + -- Complete or partial overlap, with the left interval ending first + | upper <= upper' = lowerBound `seq` + Left (Just (lowerBound, upper')) + + -- Complete or partial overlap, with the left interval ending first + | otherwise = lowerBound `seq` + Right (Just (lowerBound, upper)) + where + lowerBound = min lower lower' + +intersectVersionIntervals :: VersionIntervals -> VersionIntervals + -> VersionIntervals +intersectVersionIntervals (VersionIntervals is0) (VersionIntervals is'0) = + checkInvariant (VersionIntervals (intersect is0 is'0)) + where + intersect _ [] = [] + intersect [] _ = [] + intersect (i:is) (i':is') = case intersectInterval i i' of + Left Nothing -> intersect is (i':is') + Left (Just i'') -> i'' : intersect is (i':is') + Right Nothing -> intersect (i:is) is' + Right (Just i'') -> i'' : intersect (i:is) is' + +intersectInterval :: VersionInterval -> VersionInterval + -> Either (Maybe VersionInterval) (Maybe VersionInterval) +intersectInterval (lower , upper ) (lower', upper') + + -- Non-intersecting intervals with the left interval ending first + | upper `doesNotIntersect` lower' = Left Nothing + + -- Non-intersecting intervals with the right interval first + | upper' `doesNotIntersect` lower = Right Nothing + + -- Complete or partial overlap, with the left interval ending first + | upper <= upper' = lowerBound `seq` + Left (Just (lowerBound, upper)) + + -- Complete or partial overlap, with the right interval ending first + | otherwise = lowerBound `seq` + Right (Just (lowerBound, upper')) + where + lowerBound = max lower lower' + +invertVersionIntervals :: VersionIntervals + -> VersionIntervals +invertVersionIntervals (VersionIntervals xs) = + case xs of + -- Empty interval set + [] -> VersionIntervals [(noLowerBound, NoUpperBound)] + -- Interval with no lower bound + ((lb, ub) : more) | lb == noLowerBound -> + VersionIntervals $ invertVersionIntervals' ub more + -- Interval with a lower bound + ((lb, ub) : more) -> + VersionIntervals $ (noLowerBound, invertLowerBound lb) + : invertVersionIntervals' ub more + where + -- Invert subsequent version intervals given the upper bound of + -- the intervals already inverted. + invertVersionIntervals' :: UpperBound + -> [(LowerBound, UpperBound)] + -> [(LowerBound, UpperBound)] + invertVersionIntervals' NoUpperBound [] = [] + invertVersionIntervals' ub0 [] = [(invertUpperBound ub0, NoUpperBound)] + invertVersionIntervals' ub0 [(lb, NoUpperBound)] = + [(invertUpperBound ub0, invertLowerBound lb)] + invertVersionIntervals' ub0 ((lb, ub1) : more) = + (invertUpperBound ub0, invertLowerBound lb) + : invertVersionIntervals' ub1 more + + invertLowerBound :: LowerBound -> UpperBound + invertLowerBound (LowerBound v b) = UpperBound v (invertBound b) + + invertUpperBound :: UpperBound -> LowerBound + invertUpperBound (UpperBound v b) = LowerBound v (invertBound b) + invertUpperBound NoUpperBound = error "NoUpperBound: unexpected" + + invertBound :: Bound -> Bound + invertBound ExclusiveBound = InclusiveBound + invertBound InclusiveBound = ExclusiveBound + + noLowerBound :: LowerBound + noLowerBound = LowerBound (mkVersion [0]) InclusiveBound + + +relaxLastInterval :: VersionIntervals -> VersionIntervals +relaxLastInterval (VersionIntervals xs) = VersionIntervals (relaxLastInterval' xs) + where + relaxLastInterval' [] = [] + relaxLastInterval' [(l,_)] = [(l, NoUpperBound)] + relaxLastInterval' (i:is) = i : relaxLastInterval' is + +relaxHeadInterval :: VersionIntervals -> VersionIntervals +relaxHeadInterval (VersionIntervals xs) = VersionIntervals (relaxHeadInterval' xs) + where + relaxHeadInterval' [] = [] + relaxHeadInterval' ((_,u):is) = (minLowerBound,u) : is diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/VersionRange.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/VersionRange.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Types/VersionRange.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Types/VersionRange.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,586 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE FlexibleContexts #-} +module Distribution.Types.VersionRange ( + -- * Version ranges + VersionRange(..), + + -- ** Constructing + anyVersion, noVersion, + thisVersion, notThisVersion, + laterVersion, earlierVersion, + orLaterVersion, orEarlierVersion, + unionVersionRanges, intersectVersionRanges, + withinVersion, + majorBoundVersion, + + -- ** Inspection + -- + -- See "Distribution.Version" for more utilities. + withinRange, + foldVersionRange, + normaliseVersionRange, + stripParensVersionRange, + hasUpperBound, + hasLowerBound, + + -- ** Cata & ana + VersionRangeF (..), + cataVersionRange, + anaVersionRange, + hyloVersionRange, + projectVersionRange, + embedVersionRange, + + -- ** Utilities + wildcardUpperBound, + majorUpperBound, + isWildcardRange, + ) where + +import Distribution.Compat.Prelude +import Distribution.Types.Version +import Prelude () + +import Distribution.CabalSpecVersion +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text +import Text.PrettyPrint ((<+>)) + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.DList as DList +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +data VersionRange + = AnyVersion + | ThisVersion Version -- = version + | LaterVersion Version -- > version (NB. not >=) + | OrLaterVersion Version -- >= version + | EarlierVersion Version -- < version + | OrEarlierVersion Version -- <= version + | WildcardVersion Version -- == ver.* (same as >= ver && < ver+1) + | MajorBoundVersion Version -- @^>= ver@ (same as >= ver && < MAJ(ver)+1) + | UnionVersionRanges VersionRange VersionRange + | IntersectVersionRanges VersionRange VersionRange + | VersionRangeParens VersionRange -- just '(exp)' parentheses syntax + deriving (Data, Eq, Generic, Read, Show, Typeable) + +instance Binary VersionRange + +instance NFData VersionRange where rnf = genericRnf + +{-# DeprecateD AnyVersion + "Use 'anyVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED ThisVersion + "Use 'thisVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED LaterVersion + "Use 'laterVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED EarlierVersion + "Use 'earlierVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED WildcardVersion + "Use 'anyVersion', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED UnionVersionRanges + "Use 'unionVersionRanges', 'foldVersionRange' or 'asVersionIntervals'" #-} +{-# DEPRECATED IntersectVersionRanges + "Use 'intersectVersionRanges', 'foldVersionRange' or 'asVersionIntervals'"#-} + +-- | The version range @-any@. That is, a version range containing all +-- versions. +-- +-- > withinRange v anyVersion = True +-- +anyVersion :: VersionRange +anyVersion = AnyVersion + +-- | The empty version range, that is a version range containing no versions. +-- +-- This can be constructed using any unsatisfiable version range expression, +-- for example @> 1 && < 1@. +-- +-- > withinRange v noVersion = False +-- +noVersion :: VersionRange +noVersion = IntersectVersionRanges (LaterVersion v) (EarlierVersion v) + where v = mkVersion [1] + +-- | The version range @== v@ +-- +-- > withinRange v' (thisVersion v) = v' == v +-- +thisVersion :: Version -> VersionRange +thisVersion = ThisVersion + +-- | The version range @< v || > v@ +-- +-- > withinRange v' (notThisVersion v) = v' /= v +-- +notThisVersion :: Version -> VersionRange +notThisVersion v = UnionVersionRanges (EarlierVersion v) (LaterVersion v) + +-- | The version range @> v@ +-- +-- > withinRange v' (laterVersion v) = v' > v +-- +laterVersion :: Version -> VersionRange +laterVersion = LaterVersion + +-- | The version range @>= v@ +-- +-- > withinRange v' (orLaterVersion v) = v' >= v +-- +orLaterVersion :: Version -> VersionRange +orLaterVersion = OrLaterVersion + +-- | The version range @< v@ +-- +-- > withinRange v' (earlierVersion v) = v' < v +-- +earlierVersion :: Version -> VersionRange +earlierVersion = EarlierVersion + +-- | The version range @<= v@ +-- +-- > withinRange v' (orEarlierVersion v) = v' <= v +-- +orEarlierVersion :: Version -> VersionRange +orEarlierVersion = OrEarlierVersion + +-- | The version range @vr1 || vr2@ +-- +-- > withinRange v' (unionVersionRanges vr1 vr2) +-- > = withinRange v' vr1 || withinRange v' vr2 +-- +unionVersionRanges :: VersionRange -> VersionRange -> VersionRange +unionVersionRanges = UnionVersionRanges + +-- | The version range @vr1 && vr2@ +-- +-- > withinRange v' (intersectVersionRanges vr1 vr2) +-- > = withinRange v' vr1 && withinRange v' vr2 +-- +intersectVersionRanges :: VersionRange -> VersionRange -> VersionRange +intersectVersionRanges = IntersectVersionRanges + +-- | The version range @== v.*@. +-- +-- For example, for version @1.2@, the version range @== 1.2.*@ is the same as +-- @>= 1.2 && < 1.3@ +-- +-- > withinRange v' (laterVersion v) = v' >= v && v' < upper v +-- > where +-- > upper (Version lower t) = Version (init lower ++ [last lower + 1]) t +-- +withinVersion :: Version -> VersionRange +withinVersion = WildcardVersion + +-- | The version range @^>= v@. +-- +-- For example, for version @1.2.3.4@, the version range @^>= 1.2.3.4@ is the same as +-- @>= 1.2.3.4 && < 1.3@. +-- +-- Note that @^>= 1@ is equivalent to @>= 1 && < 1.1@. +-- +-- @since 2.0.0.2 +majorBoundVersion :: Version -> VersionRange +majorBoundVersion = MajorBoundVersion + +-- | F-Algebra of 'VersionRange'. See 'cataVersionRange'. +-- +-- @since 2.2 +data VersionRangeF a + = AnyVersionF + | ThisVersionF Version -- = version + | LaterVersionF Version -- > version (NB. not >=) + | OrLaterVersionF Version -- >= version + | EarlierVersionF Version -- < version + | OrEarlierVersionF Version -- <= version + | WildcardVersionF Version -- == ver.* (same as >= ver && < ver+1) + | MajorBoundVersionF Version -- @^>= ver@ (same as >= ver && < MAJ(ver)+1) + | UnionVersionRangesF a a + | IntersectVersionRangesF a a + | VersionRangeParensF a + deriving (Data, Eq, Generic, Read, Show, Typeable, Functor, Foldable, Traversable) + +-- | @since 2.2 +projectVersionRange :: VersionRange -> VersionRangeF VersionRange +projectVersionRange AnyVersion = AnyVersionF +projectVersionRange (ThisVersion v) = ThisVersionF v +projectVersionRange (LaterVersion v) = LaterVersionF v +projectVersionRange (OrLaterVersion v) = OrLaterVersionF v +projectVersionRange (EarlierVersion v) = EarlierVersionF v +projectVersionRange (OrEarlierVersion v) = OrEarlierVersionF v +projectVersionRange (WildcardVersion v) = WildcardVersionF v +projectVersionRange (MajorBoundVersion v) = MajorBoundVersionF v +projectVersionRange (UnionVersionRanges a b) = UnionVersionRangesF a b +projectVersionRange (IntersectVersionRanges a b) = IntersectVersionRangesF a b +projectVersionRange (VersionRangeParens a) = VersionRangeParensF a + +-- | Fold 'VersionRange'. +-- +-- @since 2.2 +cataVersionRange :: (VersionRangeF a -> a) -> VersionRange -> a +cataVersionRange f = c where c = f . fmap c . projectVersionRange + +-- | @since 2.2 +embedVersionRange :: VersionRangeF VersionRange -> VersionRange +embedVersionRange AnyVersionF = AnyVersion +embedVersionRange (ThisVersionF v) = ThisVersion v +embedVersionRange (LaterVersionF v) = LaterVersion v +embedVersionRange (OrLaterVersionF v) = OrLaterVersion v +embedVersionRange (EarlierVersionF v) = EarlierVersion v +embedVersionRange (OrEarlierVersionF v) = OrEarlierVersion v +embedVersionRange (WildcardVersionF v) = WildcardVersion v +embedVersionRange (MajorBoundVersionF v) = MajorBoundVersion v +embedVersionRange (UnionVersionRangesF a b) = UnionVersionRanges a b +embedVersionRange (IntersectVersionRangesF a b) = IntersectVersionRanges a b +embedVersionRange (VersionRangeParensF a) = VersionRangeParens a + +-- | Unfold 'VersionRange'. +-- +-- @since 2.2 +anaVersionRange :: (a -> VersionRangeF a) -> a -> VersionRange +anaVersionRange g = a where a = embedVersionRange . fmap a . g + + +-- | Fold over the basic syntactic structure of a 'VersionRange'. +-- +-- This provides a syntactic view of the expression defining the version range. +-- The syntactic sugar @\">= v\"@, @\"<= v\"@ and @\"== v.*\"@ is presented +-- in terms of the other basic syntax. +-- +-- For a semantic view use 'asVersionIntervals'. +-- +foldVersionRange :: a -- ^ @\"-any\"@ version + -> (Version -> a) -- ^ @\"== v\"@ + -> (Version -> a) -- ^ @\"> v\"@ + -> (Version -> a) -- ^ @\"< v\"@ + -> (a -> a -> a) -- ^ @\"_ || _\"@ union + -> (a -> a -> a) -- ^ @\"_ && _\"@ intersection + -> VersionRange -> a +foldVersionRange anyv this later earlier union intersect = fold + where + fold = cataVersionRange alg + + alg AnyVersionF = anyv + alg (ThisVersionF v) = this v + alg (LaterVersionF v) = later v + alg (OrLaterVersionF v) = union (this v) (later v) + alg (EarlierVersionF v) = earlier v + alg (OrEarlierVersionF v) = union (this v) (earlier v) + alg (WildcardVersionF v) = fold (wildcard v) + alg (MajorBoundVersionF v) = fold (majorBound v) + alg (UnionVersionRangesF v1 v2) = union v1 v2 + alg (IntersectVersionRangesF v1 v2) = intersect v1 v2 + alg (VersionRangeParensF v) = v + + wildcard v = intersectVersionRanges + (orLaterVersion v) + (earlierVersion (wildcardUpperBound v)) + + majorBound v = intersectVersionRanges + (orLaterVersion v) + (earlierVersion (majorUpperBound v)) + +-- | Refold 'VersionRange' +-- +-- @since 2.2 +hyloVersionRange :: (VersionRangeF VersionRange -> VersionRange) + -> (VersionRange -> VersionRangeF VersionRange) + -> VersionRange -> VersionRange +hyloVersionRange f g = h where h = f . fmap h . g + +-- | Normalise 'VersionRange'. +-- +-- In particular collapse @(== v || > v)@ into @>= v@, and so on. +normaliseVersionRange :: VersionRange -> VersionRange +normaliseVersionRange = hyloVersionRange embed projectVersionRange + where + -- == v || > v, > v || == v ==> >= v + embed (UnionVersionRangesF (ThisVersion v) (LaterVersion v')) | v == v' = + orLaterVersion v + embed (UnionVersionRangesF (LaterVersion v) (ThisVersion v')) | v == v' = + orLaterVersion v + + -- == v || < v, < v || == v ==> <= v + embed (UnionVersionRangesF (ThisVersion v) (EarlierVersion v')) | v == v' = + orEarlierVersion v + embed (UnionVersionRangesF (EarlierVersion v) (ThisVersion v')) | v == v' = + orEarlierVersion v + + -- otherwise embed normally + embed vr = embedVersionRange vr + +-- | Remove 'VersionRangeParens' constructors. +-- +-- @since 2.2 +stripParensVersionRange :: VersionRange -> VersionRange +stripParensVersionRange = hyloVersionRange embed projectVersionRange + where + embed (VersionRangeParensF vr) = vr + embed vr = embedVersionRange vr + +-- | Does this version fall within the given range? +-- +-- This is the evaluation function for the 'VersionRange' type. +-- +withinRange :: Version -> VersionRange -> Bool +withinRange v = foldVersionRange + True + (\v' -> v == v') + (\v' -> v > v') + (\v' -> v < v') + (||) + (&&) + +---------------------------- +-- Wildcard range utilities +-- + +-- | @since 2.2 +wildcardUpperBound :: Version -> Version +wildcardUpperBound = alterVersion $ + \lowerBound -> init lowerBound ++ [last lowerBound + 1] + +isWildcardRange :: Version -> Version -> Bool +isWildcardRange ver1 ver2 = check (versionNumbers ver1) (versionNumbers ver2) + where check (n:[]) (m:[]) | n+1 == m = True + check (n:ns) (m:ms) | n == m = check ns ms + check _ _ = False + +-- | Compute next greater major version to be used as upper bound +-- +-- Example: @0.4.1@ produces the version @0.5@ which then can be used +-- to construct a range @>= 0.4.1 && < 0.5@ +-- +-- @since 2.2 +majorUpperBound :: Version -> Version +majorUpperBound = alterVersion $ \numbers -> case numbers of + [] -> [0,1] -- should not happen + [m1] -> [m1,1] -- e.g. version '1' + (m1:m2:_) -> [m1,m2+1] + +------------------------------------------------------------------------------- +-- Parsec & Pretty +------------------------------------------------------------------------------- + +instance Pretty VersionRange where + pretty = fst . cataVersionRange alg + where + alg AnyVersionF = (Disp.text "-any", 0 :: Int) + alg (ThisVersionF v) = (Disp.text "==" <<>> pretty v, 0) + alg (LaterVersionF v) = (Disp.char '>' <<>> pretty v, 0) + alg (OrLaterVersionF v) = (Disp.text ">=" <<>> pretty v, 0) + alg (EarlierVersionF v) = (Disp.char '<' <<>> pretty v, 0) + alg (OrEarlierVersionF v) = (Disp.text "<=" <<>> pretty v, 0) + alg (WildcardVersionF v) = (Disp.text "==" <<>> dispWild v, 0) + alg (MajorBoundVersionF v) = (Disp.text "^>=" <<>> pretty v, 0) + alg (UnionVersionRangesF (r1, p1) (r2, p2)) = + (punct 1 p1 r1 <+> Disp.text "||" <+> punct 2 p2 r2 , 2) + alg (IntersectVersionRangesF (r1, p1) (r2, p2)) = + (punct 0 p1 r1 <+> Disp.text "&&" <+> punct 1 p2 r2 , 1) + alg (VersionRangeParensF (r, _)) = + (Disp.parens r, 0) + + dispWild ver = + Disp.hcat (Disp.punctuate (Disp.char '.') (map Disp.int $ versionNumbers ver)) + <<>> Disp.text ".*" + + punct p p' | p < p' = Disp.parens + | otherwise = id + +instance Parsec VersionRange where + parsec = expr + where + expr = do P.spaces + t <- term + P.spaces + (do _ <- P.string "||" + P.spaces + e <- expr + return (unionVersionRanges t e) + <|> + return t) + term = do f <- factor + P.spaces + (do _ <- P.string "&&" + P.spaces + t <- term + return (intersectVersionRanges f t) + <|> + return f) + factor = parens expr <|> prim + + prim = do + op <- P.munch1 (`elem` "<>=^-") P. "operator" + case op of + "-" -> anyVersion <$ P.string "any" <|> P.string "none" *> noVersion' + + "==" -> do + P.spaces + (wild, v) <- verOrWild + pure $ (if wild then withinVersion else thisVersion) v + + _ -> do + P.spaces + (wild, v) <- verOrWild + when wild $ P.unexpected $ + "wild-card version after non-== operator: " ++ show op + case op of + ">=" -> pure $ orLaterVersion v + "<" -> pure $ earlierVersion v + "^>=" -> majorBoundVersion' v + "<=" -> pure $ orEarlierVersion v + ">" -> pure $ laterVersion v + _ -> fail $ "Unknown version operator " ++ show op + + -- Note: There are other features: + -- && and || since 1.8 + -- x.y.* (wildcard) since 1.6 + + -- -none version range is available since 1.22 + noVersion' = do + csv <- askCabalSpecVersion + if csv >= CabalSpecV1_22 + then pure noVersion + else fail $ unwords + [ "-none version range used." + , "To use this syntax the package needs to specify at least 'cabal-version: 1.22'." + , "Alternatively, if broader compatibility is important then use" + , "<0 or other empty range." + ] + + -- ^>= is available since 2.0 + majorBoundVersion' v = do + csv <- askCabalSpecVersion + if csv >= CabalSpecV2_0 + then pure $ majorBoundVersion v + else fail $ unwords + [ "major bounded version syntax (caret, ^>=) used." + , "To use this syntax the package need to specify at least 'cabal-version: 2.0'." + , "Alternatively, if broader compatibility is important then use:" + , prettyShow $ eliminateMajorBoundSyntax $ majorBoundVersion v + ] + where + eliminateMajorBoundSyntax = hyloVersionRange embed projectVersionRange + embed (MajorBoundVersionF u) = intersectVersionRanges + (orLaterVersion u) (earlierVersion (majorUpperBound u)) + embed vr = embedVersionRange vr + + -- either wildcard or normal version + verOrWild :: CabalParsing m => m (Bool, Version) + verOrWild = do + x <- P.integral + verLoop (DList.singleton x) + + -- trailing: wildcard (.y.*) or normal version (optional tags) (.y.z-tag) + verLoop :: CabalParsing m => DList.DList Int -> m (Bool, Version) + verLoop acc = verLoop' acc <|> (tags *> pure (False, mkVersion (DList.toList acc))) + + verLoop' :: CabalParsing m => DList.DList Int -> m (Bool, Version) + verLoop' acc = do + _ <- P.char '.' + let digit = P.integral >>= verLoop . DList.snoc acc + let wild = (True, mkVersion (DList.toList acc)) <$ P.char '*' + digit <|> wild + + parens p = P.between + ((P.char '(' P. "opening paren") >> P.spaces) + (P.char ')' >> P.spaces) + (do a <- p + P.spaces + return (VersionRangeParens a)) + + tags :: CabalParsing m => m () + tags = do + ts <- many $ P.char '-' *> some (P.satisfy isAlphaNum) + case ts of + [] -> pure () + (_ : _) -> parsecWarning PWTVersionTag "version with tags" + + +instance Text VersionRange where + parse = expr + where + expr = do Parse.skipSpaces + t <- term + Parse.skipSpaces + (do _ <- Parse.string "||" + Parse.skipSpaces + e <- expr + return (UnionVersionRanges t e) + Parse.+++ + return t) + term = do f <- factor + Parse.skipSpaces + (do _ <- Parse.string "&&" + Parse.skipSpaces + t <- term + return (IntersectVersionRanges f t) + Parse.+++ + return f) + factor = Parse.choice $ parens expr + : parseAnyVersion + : parseNoVersion + : parseWildcardRange + : map parseRangeOp rangeOps + parseAnyVersion = Parse.string "-any" >> return AnyVersion + parseNoVersion = Parse.string "-none" >> return noVersion + + parseWildcardRange = do + _ <- Parse.string "==" + Parse.skipSpaces + branch <- Parse.sepBy1 digits (Parse.char '.') + _ <- Parse.char '.' + _ <- Parse.char '*' + return (WildcardVersion (mkVersion branch)) + + parens p = Parse.between (Parse.char '(' >> Parse.skipSpaces) + (Parse.char ')' >> Parse.skipSpaces) + (do a <- p + Parse.skipSpaces + return (VersionRangeParens a)) + + digits = do + firstDigit <- Parse.satisfy isDigit + if firstDigit == '0' + then return 0 + else do rest <- Parse.munch isDigit + return (read (firstDigit : rest)) -- TODO: eradicateNoParse + + parseRangeOp (s,f) = Parse.string s >> Parse.skipSpaces >> fmap f parse + rangeOps = [ ("<", EarlierVersion), + ("<=", orEarlierVersion), + (">", LaterVersion), + (">=", orLaterVersion), + ("^>=", MajorBoundVersion), + ("==", ThisVersion) ] + +-- | Does the version range have an upper bound? +-- +-- @since 1.24.0.0 +hasUpperBound :: VersionRange -> Bool +hasUpperBound = foldVersionRange + False + (const True) + (const False) + (const True) + (&&) (||) + +-- | Does the version range have an explicit lower bound? +-- +-- Note: this function only considers the user-specified lower bounds, but not +-- the implicit >=0 lower bound. +-- +-- @since 1.24.0.0 +hasLowerBound :: VersionRange -> Bool +hasLowerBound = foldVersionRange + False + (const True) + (const True) + (const False) + (&&) (||) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/Base62.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/Base62.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/Base62.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/Base62.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,22 @@ + +-- | Implementation of base-62 encoding, which we use when computing hashes +-- for fully instantiated unit ids. +module Distribution.Utils.Base62 (hashToBase62) where + +import GHC.Fingerprint ( Fingerprint(..), fingerprintString ) +import Numeric ( showIntAtBase ) +import Data.Char ( chr ) + +-- | Hash a string using GHC's fingerprinting algorithm (a 128-bit +-- MD5 hash) and then encode the resulting hash in base 62. +hashToBase62 :: String -> String +hashToBase62 s = showFingerprint $ fingerprintString s + where + showIntAtBase62 x = showIntAtBase 62 representBase62 x "" + representBase62 x + | x < 10 = chr (48 + x) + | x < 36 = chr (65 + x - 10) + | x < 62 = chr (97 + x - 36) + | otherwise = '@' + showFingerprint (Fingerprint a b) = showIntAtBase62 a ++ showIntAtBase62 b + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/Generic.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/Generic.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/Generic.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/Generic.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,495 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE BangPatterns #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Simple.Utils +-- Copyright : Isaac Jones, Simon Marlow 2003-2004 +-- License : BSD3 +-- portions Copyright (c) 2007, Galois Inc. +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A large and somewhat miscellaneous collection of utility functions used +-- throughout the rest of the Cabal lib and in other tools that use the Cabal +-- lib like @cabal-install@. It has a very simple set of logging actions. It +-- has low level functions for running programs, a bunch of wrappers for +-- various directory and file functions that do extra logging. + +module Distribution.Utils.Generic ( + -- * reading and writing files safely + withFileContents, + writeFileAtomic, + + -- * Unicode + + -- ** Conversions + fromUTF8BS, + fromUTF8LBS, + + toUTF8BS, + toUTF8LBS, + + validateUTF8, + + -- ** File I/O + readUTF8File, + withUTF8FileContents, + writeUTF8File, + + -- ** BOM + ignoreBOM, + + -- ** Misc + normaliseLineEndings, + + -- * generic utils + dropWhileEndLE, + takeWhileEndLE, + equating, + comparing, + isInfixOf, + intercalate, + lowercase, + isAscii, + isAsciiAlpha, + isAsciiAlphaNum, + listUnion, + listUnionRight, + ordNub, + ordNubBy, + ordNubRight, + safeTail, + unintersperse, + wrapText, + wrapLine, + unfoldrM, + spanMaybe, + breakMaybe, + + -- * FilePath stuff + isAbsoluteOnAnyPlatform, + isRelativeOnAnyPlatform, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Utils.String + +import Data.Bits ((.&.), (.|.), shiftL) +import Data.List + ( isInfixOf ) +import Data.Ord + ( comparing ) +import qualified Data.ByteString.Lazy as BS +import qualified Data.Set as Set + +import qualified Data.ByteString as SBS + +import System.Directory + ( removeFile, renameFile ) +import System.FilePath + ( (<.>), splitFileName ) +import System.IO + ( withFile, withBinaryFile + , openBinaryTempFileWithDefaultPermissions + , IOMode(ReadMode), hGetContents, hClose ) +import qualified Control.Exception as Exception + +-- ----------------------------------------------------------------------------- +-- Helper functions + +-- | Wraps text to the default line width. Existing newlines are preserved. +wrapText :: String -> String +wrapText = unlines + . map (intercalate "\n" + . map unwords + . wrapLine 79 + . words) + . lines + +-- | Wraps a list of words to a list of lines of words of a particular width. +wrapLine :: Int -> [String] -> [[String]] +wrapLine width = wrap 0 [] + where wrap :: Int -> [String] -> [String] -> [[String]] + wrap 0 [] (w:ws) + | length w + 1 > width + = wrap (length w) [w] ws + wrap col line (w:ws) + | col + length w + 1 > width + = reverse line : wrap 0 [] (w:ws) + wrap col line (w:ws) + = let col' = col + length w + 1 + in wrap col' (w:line) ws + wrap _ [] [] = [] + wrap _ line [] = [reverse line] + +----------------------------------- +-- Safely reading and writing files + +-- | Gets the contents of a file, but guarantee that it gets closed. +-- +-- The file is read lazily but if it is not fully consumed by the action then +-- the remaining input is truncated and the file is closed. +-- +withFileContents :: FilePath -> (String -> NoCallStackIO a) -> NoCallStackIO a +withFileContents name action = + withFile name ReadMode + (\hnd -> hGetContents hnd >>= action) + +-- | Writes a file atomically. +-- +-- The file is either written successfully or an IO exception is raised and +-- the original file is left unchanged. +-- +-- On windows it is not possible to delete a file that is open by a process. +-- This case will give an IO exception but the atomic property is not affected. +-- +writeFileAtomic :: FilePath -> BS.ByteString -> NoCallStackIO () +writeFileAtomic targetPath content = do + let (targetDir, targetFile) = splitFileName targetPath + Exception.bracketOnError + (openBinaryTempFileWithDefaultPermissions targetDir $ targetFile <.> "tmp") + (\(tmpPath, handle) -> hClose handle >> removeFile tmpPath) + (\(tmpPath, handle) -> do + BS.hPut handle content + hClose handle + renameFile tmpPath targetPath) + +-- ------------------------------------------------------------ +-- * Unicode stuff +-- ------------------------------------------------------------ + +-- | Decode 'String' from UTF8-encoded 'BS.ByteString' +-- +-- Invalid data in the UTF8 stream (this includes code-points @U+D800@ +-- through @U+DFFF@) will be decoded as the replacement character (@U+FFFD@). +-- +fromUTF8BS :: SBS.ByteString -> String +fromUTF8BS = decodeStringUtf8 . SBS.unpack + +-- | Variant of 'fromUTF8BS' for lazy 'BS.ByteString's +-- +fromUTF8LBS :: BS.ByteString -> String +fromUTF8LBS = decodeStringUtf8 . BS.unpack + +-- | Encode 'String' to to UTF8-encoded 'SBS.ByteString' +-- +-- Code-points in the @U+D800@-@U+DFFF@ range will be encoded +-- as the replacement character (i.e. @U+FFFD@). +-- +toUTF8BS :: String -> SBS.ByteString +toUTF8BS = SBS.pack . encodeStringUtf8 + +-- | Variant of 'toUTF8BS' for lazy 'BS.ByteString's +-- +toUTF8LBS :: String -> BS.ByteString +toUTF8LBS = BS.pack . encodeStringUtf8 + +-- | Check that strict 'ByteString' is valid UTF8. Returns 'Just offset' if it's not. +validateUTF8 :: SBS.ByteString -> Maybe Int +validateUTF8 = go 0 where + go off bs = case SBS.uncons bs of + Nothing -> Nothing + Just (c, bs') + | c <= 0x7F -> go (off + 1) bs' + | c <= 0xBF -> Just off + | c <= 0xDF -> twoBytes off c bs' + | c <= 0xEF -> moreBytes off 3 0x800 bs' (fromIntegral $ c .&. 0xF) + | c <= 0xF7 -> moreBytes off 4 0x10000 bs' (fromIntegral $ c .&. 0x7) + | c <= 0xFB -> moreBytes off 5 0x200000 bs' (fromIntegral $ c .&. 0x3) + | c <= 0xFD -> moreBytes off 6 0x4000000 bs' (fromIntegral $ c .&. 0x1) + | otherwise -> Just off + + twoBytes off c0 bs = case SBS.uncons bs of + Nothing -> Just off + Just (c1, bs') + | c1 .&. 0xC0 == 0x80 -> + if d >= (0x80 :: Int) + then go (off + 2) bs' + else Just off + | otherwise -> Just off + where + d = (fromIntegral (c0 .&. 0x1F) `shiftL` 6) .|. fromIntegral (c1 .&. 0x3F) + + moreBytes :: Int -> Int -> Int -> SBS.ByteString -> Int -> Maybe Int + moreBytes off 1 overlong cs' acc + | overlong <= acc, acc <= 0x10FFFF, acc < 0xD800 || 0xDFFF < acc + = go (off + 1) cs' + + | otherwise + = Just off + + moreBytes off byteCount overlong bs acc = case SBS.uncons bs of + Just (cn, bs') | cn .&. 0xC0 == 0x80 -> + moreBytes (off + 1) (byteCount-1) overlong bs' ((acc `shiftL` 6) .|. fromIntegral cn .&. 0x3F) + _ -> Just off + + +-- | Ignore a Unicode byte order mark (BOM) at the beginning of the input +-- +ignoreBOM :: String -> String +ignoreBOM ('\xFEFF':string) = string +ignoreBOM string = string + +-- | Reads a UTF8 encoded text file as a Unicode String +-- +-- Reads lazily using ordinary 'readFile'. +-- +readUTF8File :: FilePath -> NoCallStackIO String +readUTF8File f = (ignoreBOM . fromUTF8LBS) <$> BS.readFile f + +-- | Reads a UTF8 encoded text file as a Unicode String +-- +-- Same behaviour as 'withFileContents'. +-- +withUTF8FileContents :: FilePath -> (String -> IO a) -> IO a +withUTF8FileContents name action = + withBinaryFile name ReadMode + (\hnd -> BS.hGetContents hnd >>= action . ignoreBOM . fromUTF8LBS) + +-- | Writes a Unicode String as a UTF8 encoded text file. +-- +-- Uses 'writeFileAtomic', so provides the same guarantees. +-- +writeUTF8File :: FilePath -> String -> NoCallStackIO () +writeUTF8File path = writeFileAtomic path . BS.pack . encodeStringUtf8 + +-- | Fix different systems silly line ending conventions +normaliseLineEndings :: String -> String +normaliseLineEndings [] = [] +normaliseLineEndings ('\r':'\n':s) = '\n' : normaliseLineEndings s -- windows +normaliseLineEndings ('\r':s) = '\n' : normaliseLineEndings s -- old OS X +normaliseLineEndings ( c :s) = c : normaliseLineEndings s + +-- ------------------------------------------------------------ +-- * Common utils +-- ------------------------------------------------------------ + +-- | @dropWhileEndLE p@ is equivalent to @reverse . dropWhile p . reverse@, but +-- quite a bit faster. The difference between "Data.List.dropWhileEnd" and this +-- version is that the one in "Data.List" is strict in elements, but spine-lazy, +-- while this one is spine-strict but lazy in elements. That's what @LE@ stands +-- for - "lazy in elements". +-- +-- Example: +-- +-- >>> tail $ Data.List.dropWhileEnd (<3) [undefined, 5, 4, 3, 2, 1] +-- *** Exception: Prelude.undefined +-- ... +-- +-- >>> tail $ dropWhileEndLE (<3) [undefined, 5, 4, 3, 2, 1] +-- [5,4,3] +-- +-- >>> take 3 $ Data.List.dropWhileEnd (<3) [5, 4, 3, 2, 1, undefined] +-- [5,4,3] +-- +-- >>> take 3 $ dropWhileEndLE (<3) [5, 4, 3, 2, 1, undefined] +-- *** Exception: Prelude.undefined +-- ... +-- +dropWhileEndLE :: (a -> Bool) -> [a] -> [a] +dropWhileEndLE p = foldr (\x r -> if null r && p x then [] else x:r) [] + +-- | @takeWhileEndLE p@ is equivalent to @reverse . takeWhile p . reverse@, but +-- is usually faster (as well as being easier to read). +takeWhileEndLE :: (a -> Bool) -> [a] -> [a] +takeWhileEndLE p = fst . foldr go ([], False) + where + go x (rest, done) + | not done && p x = (x:rest, False) + | otherwise = (rest, True) + +-- | Like 'Data.List.nub', but has @O(n log n)@ complexity instead of +-- @O(n^2)@. Code for 'ordNub' and 'listUnion' taken from Niklas Hambüchen's +-- package. +ordNub :: Ord a => [a] -> [a] +ordNub = ordNubBy id + +-- | Like 'ordNub' and 'Data.List.nubBy'. Selects a key for each element and +-- takes the nub based on that key. +ordNubBy :: Ord b => (a -> b) -> [a] -> [a] +ordNubBy f l = go Set.empty l + where + go !_ [] = [] + go !s (x:xs) + | y `Set.member` s = go s xs + | otherwise = let !s' = Set.insert y s + in x : go s' xs + where + y = f x + +-- | Like "Data.List.union", but has @O(n log n)@ complexity instead of +-- @O(n^2)@. +listUnion :: (Ord a) => [a] -> [a] -> [a] +listUnion a b = a ++ ordNub (filter (`Set.notMember` aSet) b) + where + aSet = Set.fromList a + +-- | A right-biased version of 'ordNub'. +-- +-- Example: +-- +-- >>> ordNub [1,2,1] :: [Int] +-- [1,2] +-- +-- >>> ordNubRight [1,2,1] :: [Int] +-- [2,1] +-- +ordNubRight :: (Ord a) => [a] -> [a] +ordNubRight = fst . foldr go ([], Set.empty) + where + go x p@(l, s) = if x `Set.member` s then p + else (x:l, Set.insert x s) + +-- | A right-biased version of 'listUnion'. +-- +-- Example: +-- +-- >>> listUnion [1,2,3,4,3] [2,1,1] +-- [1,2,3,4,3] +-- +-- >>> listUnionRight [1,2,3,4,3] [2,1,1] +-- [4,3,2,1,1] +-- +listUnionRight :: (Ord a) => [a] -> [a] -> [a] +listUnionRight a b = ordNubRight (filter (`Set.notMember` bSet) a) ++ b + where + bSet = Set.fromList b + +-- | A total variant of 'tail'. +safeTail :: [a] -> [a] +safeTail [] = [] +safeTail (_:xs) = xs + +equating :: Eq a => (b -> a) -> b -> b -> Bool +equating p x y = p x == p y + +-- | Lower case string +-- +-- >>> lowercase "Foobar" +-- "foobar" +lowercase :: String -> String +lowercase = map toLower + +-- | Ascii characters +isAscii :: Char -> Bool +isAscii c = fromEnum c < 0x80 + +-- | Ascii letters. +isAsciiAlpha :: Char -> Bool +isAsciiAlpha c = ('a' <= c && c <= 'z') + || ('A' <= c && c <= 'Z') + +-- | Ascii letters and digits. +-- +-- >>> isAsciiAlphaNum 'a' +-- True +-- +-- >>> isAsciiAlphaNum 'ä' +-- False +-- +isAsciiAlphaNum :: Char -> Bool +isAsciiAlphaNum c = isAscii c && isAlphaNum c + +unintersperse :: Char -> String -> [String] +unintersperse mark = unfoldr unintersperse1 where + unintersperse1 str + | null str = Nothing + | otherwise = + let (this, rest) = break (== mark) str in + Just (this, safeTail rest) + +-- | Like 'break', but with 'Maybe' predicate +-- +-- >>> breakMaybe (readMaybe :: String -> Maybe Int) ["foo", "bar", "1", "2", "quu"] +-- (["foo","bar"],Just (1,["2","quu"])) +-- +-- >>> breakMaybe (readMaybe :: String -> Maybe Int) ["foo", "bar"] +-- (["foo","bar"],Nothing) +-- +-- @since 2.2 +-- +breakMaybe :: (a -> Maybe b) -> [a] -> ([a], Maybe (b, [a])) +breakMaybe f = go id where + go !acc [] = (acc [], Nothing) + go !acc (x:xs) = case f x of + Nothing -> go (acc . (x:)) xs + Just b -> (acc [], Just (b, xs)) + +-- | Like 'span' but with 'Maybe' predicate +-- +-- >>> spanMaybe listToMaybe [[1,2],[3],[],[4,5],[6,7]] +-- ([1,3],[[],[4,5],[6,7]]) +-- +-- >>> spanMaybe (readMaybe :: String -> Maybe Int) ["1", "2", "foo"] +-- ([1,2],["foo"]) +-- +-- @since 2.2 +-- +spanMaybe :: (a -> Maybe b) -> [a] -> ([b],[a]) +spanMaybe _ xs@[] = ([], xs) +spanMaybe p xs@(x:xs') = case p x of + Just y -> let (ys, zs) = spanMaybe p xs' in (y : ys, zs) + Nothing -> ([], xs) + +-- | 'unfoldr' with monadic action. +-- +-- >>> take 5 $ unfoldrM (\b r -> Just (r + b, b + 1)) (1 :: Int) 2 +-- [3,4,5,6,7] +-- +-- @since 2.2 +-- +unfoldrM :: Monad m => (b -> m (Maybe (a, b))) -> b -> m [a] +unfoldrM f = go where + go b = do + m <- f b + case m of + Nothing -> return [] + Just (a, b') -> liftM (a :) (go b') + +-- ------------------------------------------------------------ +-- * FilePath stuff +-- ------------------------------------------------------------ + +-- | 'isAbsoluteOnAnyPlatform' and 'isRelativeOnAnyPlatform' are like +-- 'System.FilePath.isAbsolute' and 'System.FilePath.isRelative' but have +-- platform independent heuristics. +-- The System.FilePath exists in two versions, Windows and Posix. The two +-- versions don't agree on what is a relative path and we don't know if we're +-- given Windows or Posix paths. +-- This results in false positives when running on Posix and inspecting +-- Windows paths, like the hackage server does. +-- System.FilePath.Posix.isAbsolute \"C:\\hello\" == False +-- System.FilePath.Windows.isAbsolute \"/hello\" == False +-- This means that we would treat paths that start with \"/\" to be absolute. +-- On Posix they are indeed absolute, while on Windows they are not. +-- +-- The portable versions should be used when we might deal with paths that +-- are from another OS than the host OS. For example, the Hackage Server +-- deals with both Windows and Posix paths while performing the +-- PackageDescription checks. In contrast, when we run 'cabal configure' we +-- do expect the paths to be correct for our OS and we should not have to use +-- the platform independent heuristics. +isAbsoluteOnAnyPlatform :: FilePath -> Bool +-- C:\\directory +isAbsoluteOnAnyPlatform (drive:':':'\\':_) = isAlpha drive +-- UNC +isAbsoluteOnAnyPlatform ('\\':'\\':_) = True +-- Posix root +isAbsoluteOnAnyPlatform ('/':_) = True +isAbsoluteOnAnyPlatform _ = False + +-- | @isRelativeOnAnyPlatform = not . 'isAbsoluteOnAnyPlatform'@ +isRelativeOnAnyPlatform :: FilePath -> Bool +isRelativeOnAnyPlatform = not . isAbsoluteOnAnyPlatform + +-- $setup +-- >>> import Data.Maybe +-- >>> import Text.Read diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/IOData.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/IOData.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/IOData.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/IOData.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,67 @@ +-- | @since 2.2.0 +module Distribution.Utils.IOData + ( -- * 'IOData' & 'IODataMode' type + IOData(..) + , IODataMode(..) + , null + , hGetContents + , hPutContents + ) where + +import qualified Data.ByteString.Lazy as BS +import Distribution.Compat.Prelude hiding (null) +import qualified Prelude +import qualified System.IO + +-- | Represents either textual or binary data passed via I/O functions +-- which support binary/text mode +-- +-- @since 2.2.0 +data IOData = IODataText String + -- ^ How Text gets encoded is usually locale-dependent. + | IODataBinary BS.ByteString + -- ^ Raw binary which gets read/written in binary mode. + +-- | Test whether 'IOData' is empty +-- +-- @since 2.2.0 +null :: IOData -> Bool +null (IODataText s) = Prelude.null s +null (IODataBinary b) = BS.null b + +instance NFData IOData where + rnf (IODataText s) = rnf s + rnf (IODataBinary bs) = rnf bs + +data IODataMode = IODataModeText | IODataModeBinary + +-- | 'IOData' Wrapper for 'System.IO.hGetContents' +-- +-- __Note__: This operation uses lazy I/O. Use 'NFData' to force all +-- data to be read and consequently the internal file handle to be +-- closed. +-- +-- @since 2.2.0 +hGetContents :: System.IO.Handle -> IODataMode -> Prelude.IO IOData +hGetContents h IODataModeText = do + System.IO.hSetBinaryMode h False + IODataText <$> System.IO.hGetContents h +hGetContents h IODataModeBinary = do + System.IO.hSetBinaryMode h True + IODataBinary <$> BS.hGetContents h + +-- | 'IOData' Wrapper for 'System.IO.hPutStr' and 'System.IO.hClose' +-- +-- This is the dual operation ot 'ioDataHGetContents', +-- and consequently the handle is closed with `hClose`. +-- +-- @since 2.2.0 +hPutContents :: System.IO.Handle -> IOData -> Prelude.IO () +hPutContents h (IODataText c) = do + System.IO.hSetBinaryMode h False + System.IO.hPutStr h c + System.IO.hClose h +hPutContents h (IODataBinary c) = do + System.IO.hSetBinaryMode h True + BS.hPutStr h c + System.IO.hClose h diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/LogProgress.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/LogProgress.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/LogProgress.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/LogProgress.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,89 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE Rank2Types #-} +{-# LANGUAGE FlexibleContexts #-} +module Distribution.Utils.LogProgress ( + LogProgress, + runLogProgress, + warnProgress, + infoProgress, + dieProgress, + addProgressCtx, +) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Utils.Progress +import Distribution.Verbosity +import Distribution.Simple.Utils +import Text.PrettyPrint + +type CtxMsg = Doc +type LogMsg = Doc +type ErrMsg = Doc + +data LogEnv = LogEnv { + le_verbosity :: Verbosity, + le_context :: [CtxMsg] + } + +-- | The 'Progress' monad with specialized logging and +-- error messages. +newtype LogProgress a = LogProgress { unLogProgress :: LogEnv -> Progress LogMsg ErrMsg a } + +instance Functor LogProgress where + fmap f (LogProgress m) = LogProgress (fmap (fmap f) m) + +instance Applicative LogProgress where + pure x = LogProgress (pure (pure x)) + LogProgress f <*> LogProgress x = LogProgress $ \r -> f r `ap` x r + +instance Monad LogProgress where + return = pure + LogProgress m >>= f = LogProgress $ \r -> m r >>= \x -> unLogProgress (f x) r + +-- | Run 'LogProgress', outputting traces according to 'Verbosity', +-- 'die' if there is an error. +runLogProgress :: Verbosity -> LogProgress a -> NoCallStackIO a +runLogProgress verbosity (LogProgress m) = + foldProgress step_fn fail_fn return (m env) + where + env = LogEnv { + le_verbosity = verbosity, + le_context = [] + } + step_fn :: LogMsg -> NoCallStackIO a -> NoCallStackIO a + step_fn doc go = do + putStrLn (render doc) + go + fail_fn :: Doc -> NoCallStackIO a + fail_fn doc = do + dieNoWrap verbosity (render doc) + +-- | Output a warning trace message in 'LogProgress'. +warnProgress :: Doc -> LogProgress () +warnProgress s = LogProgress $ \env -> + when (le_verbosity env >= normal) $ + stepProgress $ + hang (text "Warning:") 4 (formatMsg (le_context env) s) + +-- | Output an informational trace message in 'LogProgress'. +infoProgress :: Doc -> LogProgress () +infoProgress s = LogProgress $ \env -> + when (le_verbosity env >= verbose) $ + stepProgress s + +-- | Fail the computation with an error message. +dieProgress :: Doc -> LogProgress a +dieProgress s = LogProgress $ \env -> + failProgress $ + hang (text "Error:") 4 (formatMsg (le_context env) s) + +-- | Format a message with context. (Something simple for now.) +formatMsg :: [CtxMsg] -> Doc -> Doc +formatMsg ctx doc = doc $$ vcat ctx + +-- | Add a message to the error/warning context. +addProgressCtx :: CtxMsg -> LogProgress a -> LogProgress a +addProgressCtx s (LogProgress m) = LogProgress $ \env -> + m env { le_context = s : le_context env } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/MapAccum.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/MapAccum.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/MapAccum.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/MapAccum.hs 2018-11-26 08:42:49.000000000 +0000 @@ -0,0 +1,34 @@ +{-# LANGUAGE CPP #-} +module Distribution.Utils.MapAccum (mapAccumM) where + +import Distribution.Compat.Prelude +import Prelude () + +-- Like StateT but with return tuple swapped +newtype StateM s m a = StateM { runStateM :: s -> m (s, a) } + +instance Functor m => Functor (StateM s m) where + fmap f (StateM x) = StateM $ \s -> fmap (\(s', a) -> (s', f a)) (x s) + +instance +#if __GLASGOW_HASKELL__ < 709 + (Functor m, Monad m) +#else + Monad m +#endif + => Applicative (StateM s m) where + pure x = StateM $ \s -> return (s, x) + StateM f <*> StateM x = StateM $ \s -> do (s', f') <- f s + (s'', x') <- x s' + return (s'', f' x') + +-- | Monadic variant of 'mapAccumL'. +mapAccumM :: +#if __GLASGOW_HASKELL__ < 709 + (Functor m, Monad m, Traversable t) +#else + (Monad m, Traversable t) +#endif + => (a -> b -> m (a, c)) -> a -> t b -> m (a, t c) +mapAccumM f s t = runStateM (traverse (\x -> StateM (\s' -> f s' x)) t) s + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/NubList.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/NubList.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/NubList.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/NubList.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,104 @@ +{-# LANGUAGE DeriveDataTypeable #-} +module Distribution.Utils.NubList + ( NubList -- opaque + , toNubList -- smart construtor + , fromNubList + , overNubList + + , NubListR + , toNubListR + , fromNubListR + , overNubListR + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.Simple.Utils + +import qualified Text.Read as R + +-- | NubList : A de-duplicated list that maintains the original order. +newtype NubList a = + NubList { fromNubList :: [a] } + deriving (Eq, Typeable) + +-- NubList assumes that nub retains the list order while removing duplicate +-- elements (keeping the first occurence). Documentation for "Data.List.nub" +-- does not specifically state that ordering is maintained so we will add a test +-- for that to the test suite. + +-- | Smart constructor for the NubList type. +toNubList :: Ord a => [a] -> NubList a +toNubList list = NubList $ ordNub list + +-- | Lift a function over lists to a function over NubLists. +overNubList :: Ord a => ([a] -> [a]) -> NubList a -> NubList a +overNubList f (NubList list) = toNubList . f $ list + +-- | Monoid operations on NubLists. +-- For a valid Monoid instance we need to satistfy the required monoid laws; +-- identity, associativity and closure. +-- +-- Identity : by inspection: +-- mempty `mappend` NubList xs == NubList xs `mappend` mempty +-- +-- Associativity : by inspection: +-- (NubList xs `mappend` NubList ys) `mappend` NubList zs +-- == NubList xs `mappend` (NubList ys `mappend` NubList zs) +-- +-- Closure : appending two lists of type a and removing duplicates obviously +-- does not change the type. + +instance Ord a => Monoid (NubList a) where + mempty = NubList [] + mappend = (<>) + +instance Ord a => Semigroup (NubList a) where + (NubList xs) <> (NubList ys) = NubList $ xs `listUnion` ys + +instance Show a => Show (NubList a) where + show (NubList list) = show list + +instance (Ord a, Read a) => Read (NubList a) where + readPrec = readNubList toNubList + +-- | Helper used by NubList/NubListR's Read instances. +readNubList :: (Read a) => ([a] -> l a) -> R.ReadPrec (l a) +readNubList toList = R.parens . R.prec 10 $ fmap toList R.readPrec + +-- | Binary instance for 'NubList a' is the same as for '[a]'. For 'put', we +-- just pull off constructor and put the list. For 'get', we get the list and +-- make a 'NubList' out of it using 'toNubList'. +instance (Ord a, Binary a) => Binary (NubList a) where + put (NubList l) = put l + get = fmap toNubList get + +-- | NubListR : A right-biased version of 'NubList'. That is @toNubListR +-- ["-XNoFoo", "-XFoo", "-XNoFoo"]@ will result in @["-XFoo", "-XNoFoo"]@, +-- unlike the normal 'NubList', which is left-biased. Built on top of +-- 'ordNubRight' and 'listUnionRight'. +newtype NubListR a = + NubListR { fromNubListR :: [a] } + deriving Eq + +-- | Smart constructor for the NubListR type. +toNubListR :: Ord a => [a] -> NubListR a +toNubListR list = NubListR $ ordNubRight list + +-- | Lift a function over lists to a function over NubListRs. +overNubListR :: Ord a => ([a] -> [a]) -> NubListR a -> NubListR a +overNubListR f (NubListR list) = toNubListR . f $ list + +instance Ord a => Monoid (NubListR a) where + mempty = NubListR [] + mappend = (<>) + +instance Ord a => Semigroup (NubListR a) where + (NubListR xs) <> (NubListR ys) = NubListR $ xs `listUnionRight` ys + +instance Show a => Show (NubListR a) where + show (NubListR list) = show list + +instance (Ord a, Read a) => Read (NubListR a) where + readPrec = readNubList toNubListR diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/Progress.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/Progress.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/Progress.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/Progress.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,67 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveFunctor #-} +-- Note: This module was copied from cabal-install. + +-- | A progress monad, which we use to report failure and logging from +-- otherwise pure code. +module Distribution.Utils.Progress + ( Progress + , stepProgress + , failProgress + , foldProgress + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import qualified Data.Monoid as Mon + + +-- | A type to represent the unfolding of an expensive long running +-- calculation that may fail (or maybe not expensive, but complicated!) +-- We may get intermediate steps before the final +-- result which may be used to indicate progress and\/or logging messages. +-- +-- TODO: Apply Codensity to avoid left-associativity problem. +-- See http://comonad.com/reader/2011/free-monads-for-less/ and +-- http://blog.ezyang.com/2012/01/problem-set-the-codensity-transformation/ +-- +data Progress step fail done = Step step (Progress step fail done) + | Fail fail + | Done done + deriving (Functor) + +-- | Emit a step and then continue. +-- +stepProgress :: step -> Progress step fail () +stepProgress step = Step step (Done ()) + +-- | Fail the computation. +failProgress :: fail -> Progress step fail done +failProgress err = Fail err + +-- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two +-- base cases, one for a final result and one for failure. +-- +-- Eg to convert into a simple 'Either' result use: +-- +-- > foldProgress (flip const) Left Right +-- +foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a) + -> Progress step fail done -> a +foldProgress step err done = fold + where fold (Step s p) = step s (fold p) + fold (Fail f) = err f + fold (Done r) = done r + +instance Monad (Progress step fail) where + return = pure + p >>= f = foldProgress Step Fail f p + +instance Applicative (Progress step fail) where + pure a = Done a + p <*> x = foldProgress Step Fail (flip fmap x) p + +instance Monoid fail => Alternative (Progress step fail) where + empty = Fail Mon.mempty + p <|> q = foldProgress Step (const q) Done p diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/ShortText.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/ShortText.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/ShortText.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/ShortText.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,111 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +module Distribution.Utils.ShortText + ( -- * 'ShortText' type + ShortText + , toShortText + , fromShortText + + -- * internal utilities + , decodeStringUtf8 + , encodeStringUtf8 + ) where + +import Prelude () +import Distribution.Compat.Prelude +import Distribution.Utils.String + +#if defined(MIN_VERSION_bytestring) +# if MIN_VERSION_bytestring(0,10,4) +# define HAVE_SHORTBYTESTRING 1 +# endif +#endif + +-- Hack for GHC bootstrapping +-- +-- Currently (as of GHC 8.1), GHC bootstraps Cabal by building +-- binary and Cabal in one giant ghc --make command. This +-- means no MIN_VERSION_binary macro is available. +-- +-- We could try to cleverly figure something out in this case, +-- but there is a better plan: just use the unoptimized version +-- of the Binary instance. We're not going to use it for anything +-- real in any case. +-- +-- WARNING: Don't use MIN_VERSION_binary to smooth over a BC-break! +-- +#ifndef MIN_VERSION_binary +#define MIN_VERSION_binary(x, y, z) 0 +#endif + +#if HAVE_SHORTBYTESTRING +import qualified Data.ByteString.Short as BS.Short +#endif + +-- | Construct 'ShortText' from 'String' +toShortText :: String -> ShortText + +-- | Convert 'ShortText' to 'String' +fromShortText :: ShortText -> String + +-- | Compact representation of short 'Strings' +-- +-- The data is stored internally as UTF8 in an +-- 'BS.Short.ShortByteString' when compiled against @bytestring >= +-- 0.10.4@, and otherwise the fallback is to use plain old non-compat +-- '[Char]'. +-- +-- Note: This type is for internal uses (such as e.g. 'PackageName') +-- and shall not be exposed in Cabal's API +-- +-- @since 2.0.0.2 +#if HAVE_SHORTBYTESTRING +newtype ShortText = ST { unST :: BS.Short.ShortByteString } + deriving (Eq,Ord,Generic,Data,Typeable) + +# if MIN_VERSION_binary(0,8,1) +instance Binary ShortText where + put = put . unST + get = fmap ST get +# else +instance Binary ShortText where + put = put . BS.Short.fromShort . unST + get = fmap (ST . BS.Short.toShort) get +# endif + +toShortText = ST . BS.Short.pack . encodeStringUtf8 + +fromShortText = decodeStringUtf8 . BS.Short.unpack . unST +#else +newtype ShortText = ST { unST :: String } + deriving (Eq,Ord,Generic,Data,Typeable) + +instance Binary ShortText where + put = put . encodeStringUtf8 . unST + get = fmap (ST . decodeStringUtf8) get + +toShortText = ST + +fromShortText = unST +#endif + +instance NFData ShortText where + rnf = rnf . unST + +instance Show ShortText where + show = show . fromShortText + +instance Read ShortText where + readsPrec p = map (first toShortText) . readsPrec p + +instance Semigroup ShortText where + ST a <> ST b = ST (mappend a b) + +instance Monoid ShortText where + mempty = ST mempty + mappend = (<>) + +instance IsString ShortText where + fromString = toShortText diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/String.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/String.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/String.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/String.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,93 @@ +module Distribution.Utils.String + ( -- * Encode to/from UTF8 + decodeStringUtf8 + , encodeStringUtf8 + ) where + +import Data.Word +import Data.Bits +import Data.Char (chr,ord) + +-- | Decode 'String' from UTF8-encoded octets. +-- +-- Invalid data in the UTF8 stream (this includes code-points @U+D800@ +-- through @U+DFFF@) will be decoded as the replacement character (@U+FFFD@). +-- +-- See also 'encodeStringUtf8' +decodeStringUtf8 :: [Word8] -> String +decodeStringUtf8 = go + where + go :: [Word8] -> String + go [] = [] + go (c : cs) + | c <= 0x7F = chr (fromIntegral c) : go cs + | c <= 0xBF = replacementChar : go cs + | c <= 0xDF = twoBytes c cs + | c <= 0xEF = moreBytes 3 0x800 cs (fromIntegral $ c .&. 0xF) + | c <= 0xF7 = moreBytes 4 0x10000 cs (fromIntegral $ c .&. 0x7) + | c <= 0xFB = moreBytes 5 0x200000 cs (fromIntegral $ c .&. 0x3) + | c <= 0xFD = moreBytes 6 0x4000000 cs (fromIntegral $ c .&. 0x1) + | otherwise = replacementChar : go cs + + twoBytes :: Word8 -> [Word8] -> String + twoBytes c0 (c1:cs') + | c1 .&. 0xC0 == 0x80 + = let d = (fromIntegral (c0 .&. 0x1F) `shiftL` 6) + .|. fromIntegral (c1 .&. 0x3F) + in if d >= 0x80 + then chr d : go cs' + else replacementChar : go cs' + twoBytes _ cs' = replacementChar : go cs' + + moreBytes :: Int -> Int -> [Word8] -> Int -> [Char] + moreBytes 1 overlong cs' acc + | overlong <= acc, acc <= 0x10FFFF, acc < 0xD800 || 0xDFFF < acc + = chr acc : go cs' + + | otherwise + = replacementChar : go cs' + + moreBytes byteCount overlong (cn:cs') acc + | cn .&. 0xC0 == 0x80 + = moreBytes (byteCount-1) overlong cs' + ((acc `shiftL` 6) .|. fromIntegral cn .&. 0x3F) + + moreBytes _ _ cs' _ + = replacementChar : go cs' + + replacementChar = '\xfffd' + + +-- | Encode 'String' to a list of UTF8-encoded octets +-- +-- Code-points in the @U+D800@-@U+DFFF@ range will be encoded +-- as the replacement character (i.e. @U+FFFD@). +-- +-- See also 'decodeUtf8' +encodeStringUtf8 :: String -> [Word8] +encodeStringUtf8 [] = [] +encodeStringUtf8 (c:cs) + | c <= '\x07F' = w8 + : encodeStringUtf8 cs + | c <= '\x7FF' = (0xC0 .|. w8ShiftR 6 ) + : (0x80 .|. (w8 .&. 0x3F)) + : encodeStringUtf8 cs + | c <= '\xD7FF'= (0xE0 .|. w8ShiftR 12 ) + : (0x80 .|. (w8ShiftR 6 .&. 0x3F)) + : (0x80 .|. (w8 .&. 0x3F)) + : encodeStringUtf8 cs + | c <= '\xDFFF'= 0xEF : 0xBF : 0xBD -- U+FFFD + : encodeStringUtf8 cs + | c <= '\xFFFF'= (0xE0 .|. w8ShiftR 12 ) + : (0x80 .|. (w8ShiftR 6 .&. 0x3F)) + : (0x80 .|. (w8 .&. 0x3F)) + : encodeStringUtf8 cs + | otherwise = (0xf0 .|. w8ShiftR 18 ) + : (0x80 .|. (w8ShiftR 12 .&. 0x3F)) + : (0x80 .|. (w8ShiftR 6 .&. 0x3F)) + : (0x80 .|. (w8 .&. 0x3F)) + : encodeStringUtf8 cs + where + w8 = fromIntegral (ord c) :: Word8 + w8ShiftR :: Int -> Word8 + w8ShiftR = fromIntegral . shiftR (ord c) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/UnionFind.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/UnionFind.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Utils/UnionFind.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Utils/UnionFind.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,102 @@ +{-# LANGUAGE NondecreasingIndentation #-} +-- | A simple mutable union-find data structure. +-- +-- It is used in a unification algorithm for backpack mix-in linking. +-- +-- This implementation is based off of the one in \"The Essence of ML Type +-- Inference\". (N.B. the union-find package is also based off of this.) +-- +module Distribution.Utils.UnionFind ( + Point, + fresh, + find, + union, + equivalent, +) where + +import Data.STRef +import Control.Monad +import Control.Monad.ST + +-- | A variable which can be unified; alternately, this can be thought +-- of as an equivalence class with a distinguished representative. +newtype Point s a = Point (STRef s (Link s a)) + deriving (Eq) + +-- | Mutable write to a 'Point' +writePoint :: Point s a -> Link s a -> ST s () +writePoint (Point v) = writeSTRef v + +-- | Read the current value of 'Point'. +readPoint :: Point s a -> ST s (Link s a) +readPoint (Point v) = readSTRef v + +-- | The internal data structure for a 'Point', which either records +-- the representative element of an equivalence class, or a link to +-- the 'Point' that actually stores the representative type. +data Link s a + -- NB: it is too bad we can't say STRef Int#; the weights remain boxed + = Info {-# UNPACK #-} !(STRef s Int) {-# UNPACK #-} !(STRef s a) + | Link {-# UNPACK #-} !(Point s a) + +-- | Create a fresh equivalence class with one element. +fresh :: a -> ST s (Point s a) +fresh desc = do + weight <- newSTRef 1 + descriptor <- newSTRef desc + Point `fmap` newSTRef (Info weight descriptor) + +-- | Flatten any chains of links, returning a 'Point' +-- which points directly to the canonical representation. +repr :: Point s a -> ST s (Point s a) +repr point = readPoint point >>= \r -> + case r of + Link point' -> do + point'' <- repr point' + when (point'' /= point') $ do + writePoint point =<< readPoint point' + return point'' + Info _ _ -> return point + +-- | Return the canonical element of an equivalence +-- class 'Point'. +find :: Point s a -> ST s a +find point = + -- Optimize length 0 and 1 case at expense of + -- general case + readPoint point >>= \r -> + case r of + Info _ d_ref -> readSTRef d_ref + Link point' -> readPoint point' >>= \r' -> + case r' of + Info _ d_ref -> readSTRef d_ref + Link _ -> repr point >>= find + +-- | Unify two equivalence classes, so that they share +-- a canonical element. Keeps the descriptor of point2. +union :: Point s a -> Point s a -> ST s () +union refpoint1 refpoint2 = do + point1 <- repr refpoint1 + point2 <- repr refpoint2 + when (point1 /= point2) $ do + l1 <- readPoint point1 + l2 <- readPoint point2 + case (l1, l2) of + (Info wref1 dref1, Info wref2 dref2) -> do + weight1 <- readSTRef wref1 + weight2 <- readSTRef wref2 + -- Should be able to optimize the == case separately + if weight1 >= weight2 + then do + writePoint point2 (Link point1) + -- The weight calculation here seems a bit dodgy + writeSTRef wref1 (weight1 + weight2) + writeSTRef dref1 =<< readSTRef dref2 + else do + writePoint point1 (Link point2) + writeSTRef wref2 (weight1 + weight2) + _ -> error "UnionFind.union: repr invariant broken" + +-- | Test if two points are in the same equivalence class. +equivalent :: Point s a -> Point s a -> ST s Bool +equivalent point1 point2 = liftM2 (==) (repr point1) (repr point2) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Verbosity.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Verbosity.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Verbosity.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Verbosity.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,300 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Verbosity +-- Copyright : Ian Lynagh 2007 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- A 'Verbosity' type with associated utilities. +-- +-- There are 4 standard verbosity levels from 'silent', 'normal', +-- 'verbose' up to 'deafening'. This is used for deciding what logging +-- messages to print. +-- +-- Verbosity also is equipped with some internal settings which can be +-- used to control at a fine granularity the verbosity of specific +-- settings (e.g., so that you can trace only particular things you +-- are interested in.) It's important to note that the instances +-- for 'Verbosity' assume that this does not exist. + +-- Verbosity for Cabal functions. + +module Distribution.Verbosity ( + -- * Verbosity + Verbosity, + silent, normal, verbose, deafening, + moreVerbose, lessVerbose, isVerboseQuiet, + intToVerbosity, flagToVerbosity, + showForCabal, showForGHC, + verboseNoFlags, verboseHasFlags, + modifyVerbosity, + + -- * Call stacks + verboseCallSite, verboseCallStack, + isVerboseCallSite, isVerboseCallStack, + + -- * Output markets + verboseMarkOutput, isVerboseMarkOutput, + verboseUnmarkOutput, + + -- * line-wrapping + verboseNoWrap, isVerboseNoWrap, + + -- * timestamps + verboseTimestamp, isVerboseTimestamp, + verboseNoTimestamp, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Distribution.ReadE +import Distribution.Compat.ReadP + +import Data.List (elemIndex) +import Data.Set (Set) +import qualified Data.Set as Set + +data Verbosity = Verbosity { + vLevel :: VerbosityLevel, + vFlags :: Set VerbosityFlag, + vQuiet :: Bool + } deriving (Generic) + +mkVerbosity :: VerbosityLevel -> Verbosity +mkVerbosity l = Verbosity { vLevel = l, vFlags = Set.empty, vQuiet = False } + +instance Show Verbosity where + showsPrec n = showsPrec n . vLevel + +instance Read Verbosity where + readsPrec n s = map (\(x,y) -> (mkVerbosity x,y)) (readsPrec n s) + +instance Eq Verbosity where + x == y = vLevel x == vLevel y + +instance Ord Verbosity where + compare x y = compare (vLevel x) (vLevel y) + +instance Enum Verbosity where + toEnum = mkVerbosity . toEnum + fromEnum = fromEnum . vLevel + +instance Bounded Verbosity where + minBound = mkVerbosity minBound + maxBound = mkVerbosity maxBound + +instance Binary Verbosity + +data VerbosityLevel = Silent | Normal | Verbose | Deafening + deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded) + +instance Binary VerbosityLevel + +-- We shouldn't print /anything/ unless an error occurs in silent mode +silent :: Verbosity +silent = mkVerbosity Silent + +-- Print stuff we want to see by default +normal :: Verbosity +normal = mkVerbosity Normal + +-- Be more verbose about what's going on +verbose :: Verbosity +verbose = mkVerbosity Verbose + +-- Not only are we verbose ourselves (perhaps even noisier than when +-- being "verbose"), but we tell everything we run to be verbose too +deafening :: Verbosity +deafening = mkVerbosity Deafening + +moreVerbose :: Verbosity -> Verbosity +moreVerbose v = + case vLevel v of + Silent -> v -- silent should stay silent + Normal -> v { vLevel = Verbose } + Verbose -> v { vLevel = Deafening } + Deafening -> v + +lessVerbose :: Verbosity -> Verbosity +lessVerbose v = + verboseQuiet $ + case vLevel v of + Deafening -> v -- deafening stays deafening + Verbose -> v { vLevel = Normal } + Normal -> v { vLevel = Silent } + Silent -> v + +-- | Combinator for transforming verbosity level while retaining the +-- original hidden state. +-- +-- For instance, the following property holds +-- +-- prop> isVerboseNoWrap (modifyVerbosity (max verbose) v) == isVerboseNoWrap v +-- +-- __Note__: you can use @modifyVerbosity (const v1) v0@ to overwrite +-- @v1@'s flags with @v0@'s flags. +-- +-- @since 2.0.1.0 +modifyVerbosity :: (Verbosity -> Verbosity) -> Verbosity -> Verbosity +modifyVerbosity f v = v { vLevel = vLevel (f v) } + +intToVerbosity :: Int -> Maybe Verbosity +intToVerbosity 0 = Just (mkVerbosity Silent) +intToVerbosity 1 = Just (mkVerbosity Normal) +intToVerbosity 2 = Just (mkVerbosity Verbose) +intToVerbosity 3 = Just (mkVerbosity Deafening) +intToVerbosity _ = Nothing + +parseVerbosity :: ReadP r (Either Int Verbosity) +parseVerbosity = parseIntVerbosity <++ parseStringVerbosity + where + parseIntVerbosity = fmap Left (readS_to_P reads) + parseStringVerbosity = fmap Right $ do + level <- parseVerbosityLevel + _ <- skipSpaces + extras <- sepBy parseExtra skipSpaces + return (foldr (.) id extras (mkVerbosity level)) + parseVerbosityLevel = choice + [ string "silent" >> return Silent + , string "normal" >> return Normal + , string "verbose" >> return Verbose + , string "debug" >> return Deafening + , string "deafening" >> return Deafening + ] + parseExtra = char '+' >> choice + [ string "callsite" >> return verboseCallSite + , string "callstack" >> return verboseCallStack + , string "nowrap" >> return verboseNoWrap + , string "markoutput" >> return verboseMarkOutput + , string "timestamp" >> return verboseTimestamp + ] + +flagToVerbosity :: ReadE Verbosity +flagToVerbosity = ReadE $ \s -> + case readP_to_S (parseVerbosity >>= \r -> eof >> return r) s of + [(Left i, "")] -> + case intToVerbosity i of + Just v -> Right v + Nothing -> Left ("Bad verbosity: " ++ show i ++ + ". Valid values are 0..3") + [(Right v, "")] -> Right v + _ -> Left ("Can't parse verbosity " ++ s) + +showForCabal, showForGHC :: Verbosity -> String + +showForCabal v + | Set.null (vFlags v) + = maybe (error "unknown verbosity") show $ + elemIndex v [silent,normal,verbose,deafening] + | otherwise + = unwords $ (case vLevel v of + Silent -> "silent" + Normal -> "normal" + Verbose -> "verbose" + Deafening -> "debug") + : concatMap showFlag (Set.toList (vFlags v)) + where + showFlag VCallSite = ["+callsite"] + showFlag VCallStack = ["+callstack"] + showFlag VNoWrap = ["+nowrap"] + showFlag VMarkOutput = ["+markoutput"] + showFlag VTimestamp = ["+timestamp"] +showForGHC v = maybe (error "unknown verbosity") show $ + elemIndex v [silent,normal,__,verbose,deafening] + where __ = silent -- this will be always ignored by elemIndex + +data VerbosityFlag + = VCallStack + | VCallSite + | VNoWrap + | VMarkOutput + | VTimestamp + deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded) + +instance Binary VerbosityFlag + +-- | Turn on verbose call-site printing when we log. +verboseCallSite :: Verbosity -> Verbosity +verboseCallSite = verboseFlag VCallSite + +-- | Turn on verbose call-stack printing when we log. +verboseCallStack :: Verbosity -> Verbosity +verboseCallStack = verboseFlag VCallStack + +-- | Turn on @-----BEGIN CABAL OUTPUT-----@ markers for output +-- from Cabal (as opposed to GHC, or system dependent). +verboseMarkOutput :: Verbosity -> Verbosity +verboseMarkOutput = verboseFlag VMarkOutput + +-- | Turn off marking; useful for suppressing nondeterministic output. +verboseUnmarkOutput :: Verbosity -> Verbosity +verboseUnmarkOutput = verboseNoFlag VMarkOutput + +-- | Disable line-wrapping for log messages. +verboseNoWrap :: Verbosity -> Verbosity +verboseNoWrap = verboseFlag VNoWrap + +-- | Mark the verbosity as quiet +verboseQuiet :: Verbosity -> Verbosity +verboseQuiet v = v { vQuiet = True } + +-- | Turn on timestamps for log messages. +verboseTimestamp :: Verbosity -> Verbosity +verboseTimestamp = verboseFlag VTimestamp + +-- | Turn off timestamps for log messages. +verboseNoTimestamp :: Verbosity -> Verbosity +verboseNoTimestamp = verboseNoFlag VTimestamp + +-- | Helper function for flag enabling functions +verboseFlag :: VerbosityFlag -> (Verbosity -> Verbosity) +verboseFlag flag v = v { vFlags = Set.insert flag (vFlags v) } + +-- | Helper function for flag disabling functions +verboseNoFlag :: VerbosityFlag -> (Verbosity -> Verbosity) +verboseNoFlag flag v = v { vFlags = Set.delete flag (vFlags v) } + +-- | Turn off all flags +verboseNoFlags :: Verbosity -> Verbosity +verboseNoFlags v = v { vFlags = Set.empty } + +verboseHasFlags :: Verbosity -> Bool +verboseHasFlags = not . Set.null . vFlags + +-- | Test if we should output call sites when we log. +isVerboseCallSite :: Verbosity -> Bool +isVerboseCallSite = isVerboseFlag VCallSite + +-- | Test if we should output call stacks when we log. +isVerboseCallStack :: Verbosity -> Bool +isVerboseCallStack = isVerboseFlag VCallStack + +-- | Test if we should output markets. +isVerboseMarkOutput :: Verbosity -> Bool +isVerboseMarkOutput = isVerboseFlag VMarkOutput + +-- | Test if line-wrapping is disabled for log messages. +isVerboseNoWrap :: Verbosity -> Bool +isVerboseNoWrap = isVerboseFlag VNoWrap + +-- | Test if we had called 'lessVerbose' on the verbosity +isVerboseQuiet :: Verbosity -> Bool +isVerboseQuiet = vQuiet + +-- | Test if if we should output timestamps when we log. +isVerboseTimestamp :: Verbosity -> Bool +isVerboseTimestamp = isVerboseFlag VTimestamp + +-- | Helper function for flag testing functions. +isVerboseFlag :: VerbosityFlag -> Verbosity -> Bool +isVerboseFlag flag = (Set.member flag) . vFlags + +-- $setup +-- >>> import Test.QuickCheck (Arbitrary (..), arbitraryBoundedEnum) +-- >>> instance Arbitrary VerbosityLevel where arbitrary = arbitraryBoundedEnum +-- >>> instance Arbitrary Verbosity where arbitrary = fmap mkVerbosity arbitrary diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Distribution/Version.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Distribution/Version.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,262 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Version +-- Copyright : Isaac Jones, Simon Marlow 2003-2004 +-- Duncan Coutts 2008 +-- License : BSD3 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Exports the 'Version' type along with a parser and pretty printer. A version +-- is something like @\"1.3.3\"@. It also defines the 'VersionRange' data +-- types. Version ranges are like @\">= 1.2 && < 2\"@. + +module Distribution.Version ( + -- * Package versions + Version, + version0, + mkVersion, + mkVersion', + versionNumbers, + nullVersion, + alterVersion, + + -- ** Backwards compatibility + showVersion, + + -- * Version ranges + VersionRange(..), + + -- ** Constructing + anyVersion, noVersion, + thisVersion, notThisVersion, + laterVersion, earlierVersion, + orLaterVersion, orEarlierVersion, + unionVersionRanges, intersectVersionRanges, + differenceVersionRanges, + invertVersionRange, + withinVersion, + majorBoundVersion, + betweenVersionsInclusive, + + -- ** Inspection + withinRange, + isAnyVersion, + isNoVersion, + isSpecificVersion, + simplifyVersionRange, + foldVersionRange, + foldVersionRange', + normaliseVersionRange, + stripParensVersionRange, + hasUpperBound, + hasLowerBound, + + -- ** Cata & ana + VersionRangeF (..), + cataVersionRange, + anaVersionRange, + hyloVersionRange, + projectVersionRange, + embedVersionRange, + + -- ** Utilities + wildcardUpperBound, + majorUpperBound, + + -- ** Modification + removeUpperBound, + removeLowerBound, + + -- * Version intervals view + asVersionIntervals, + VersionInterval, + LowerBound(..), + UpperBound(..), + Bound(..), + + -- ** 'VersionIntervals' abstract type + -- | The 'VersionIntervals' type and the accompanying functions are exposed + -- primarily for completeness and testing purposes. In practice + -- 'asVersionIntervals' is the main function to use to + -- view a 'VersionRange' as a bunch of 'VersionInterval's. + -- + VersionIntervals, + toVersionIntervals, + fromVersionIntervals, + withinIntervals, + versionIntervals, + mkVersionIntervals, + unionVersionIntervals, + intersectVersionIntervals, + invertVersionIntervals + + ) where + +import Distribution.Types.Version +import Distribution.Types.VersionRange +import Distribution.Types.VersionInterval + +------------------------------------------------------------------------------- +-- Utilities on VersionRange requiring VersionInterval +------------------------------------------------------------------------------- + +-- | Does this 'VersionRange' place any restriction on the 'Version' or is it +-- in fact equivalent to 'AnyVersion'. +-- +-- Note this is a semantic check, not simply a syntactic check. So for example +-- the following is @True@ (for all @v@). +-- +-- > isAnyVersion (EarlierVersion v `UnionVersionRanges` orLaterVersion v) +-- +isAnyVersion :: VersionRange -> Bool +isAnyVersion vr = case asVersionIntervals vr of + [(LowerBound v InclusiveBound, NoUpperBound)] | isVersion0 v -> True + _ -> False + where + isVersion0 :: Version -> Bool + isVersion0 = (== mkVersion [0]) + + +-- | This is the converse of 'isAnyVersion'. It check if the version range is +-- empty, if there is no possible version that satisfies the version range. +-- +-- For example this is @True@ (for all @v@): +-- +-- > isNoVersion (EarlierVersion v `IntersectVersionRanges` LaterVersion v) +-- +isNoVersion :: VersionRange -> Bool +isNoVersion vr = case asVersionIntervals vr of + [] -> True + _ -> False + +-- | Is this version range in fact just a specific version? +-- +-- For example the version range @\">= 3 && <= 3\"@ contains only the version +-- @3@. +-- +isSpecificVersion :: VersionRange -> Maybe Version +isSpecificVersion vr = case asVersionIntervals vr of + [(LowerBound v InclusiveBound + ,UpperBound v' InclusiveBound)] + | v == v' -> Just v + _ -> Nothing + +-- | Simplify a 'VersionRange' expression. For non-empty version ranges +-- this produces a canonical form. Empty or inconsistent version ranges +-- are left as-is because that provides more information. +-- +-- If you need a canonical form use +-- @fromVersionIntervals . toVersionIntervals@ +-- +-- It satisfies the following properties: +-- +-- > withinRange v (simplifyVersionRange r) = withinRange v r +-- +-- > withinRange v r = withinRange v r' +-- > ==> simplifyVersionRange r = simplifyVersionRange r' +-- > || isNoVersion r +-- > || isNoVersion r' +-- +simplifyVersionRange :: VersionRange -> VersionRange +simplifyVersionRange vr + -- If the version range is inconsistent then we just return the + -- original since that has more information than ">1 && < 1", which + -- is the canonical inconsistent version range. + | null (versionIntervals vi) = vr + | otherwise = fromVersionIntervals vi + where + vi = toVersionIntervals vr + +-- | The difference of two version ranges +-- +-- > withinRange v' (differenceVersionRanges vr1 vr2) +-- > = withinRange v' vr1 && not (withinRange v' vr2) +-- +-- @since 1.24.1.0 +differenceVersionRanges :: VersionRange -> VersionRange -> VersionRange +differenceVersionRanges vr1 vr2 = + intersectVersionRanges vr1 (invertVersionRange vr2) + +-- | The inverse of a version range +-- +-- > withinRange v' (invertVersionRange vr) +-- > = not (withinRange v' vr) +-- +invertVersionRange :: VersionRange -> VersionRange +invertVersionRange = + fromVersionIntervals . invertVersionIntervals . toVersionIntervals + +-- | Given a version range, remove the highest upper bound. Example: @(>= 1 && < +-- 3) || (>= 4 && < 5)@ is converted to @(>= 1 && < 3) || (>= 4)@. +removeUpperBound :: VersionRange -> VersionRange +removeUpperBound = fromVersionIntervals . relaxLastInterval . toVersionIntervals + +-- | Given a version range, remove the lowest lower bound. +-- Example: @(>= 1 && < 3) || (>= 4 && < 5)@ is converted to +-- @(>= 0 && < 3) || (>= 4 && < 5)@. +removeLowerBound :: VersionRange -> VersionRange +removeLowerBound = fromVersionIntervals . relaxHeadInterval . toVersionIntervals + +------------------------------------------------------------------------------- +-- Deprecated +------------------------------------------------------------------------------- + +-- In practice this is not very useful because we normally use inclusive lower +-- bounds and exclusive upper bounds. +-- +-- > withinRange v' (laterVersion v) = v' > v +-- +betweenVersionsInclusive :: Version -> Version -> VersionRange +betweenVersionsInclusive v1 v2 = + intersectVersionRanges (orLaterVersion v1) (orEarlierVersion v2) + +{-# DEPRECATED betweenVersionsInclusive + "In practice this is not very useful because we normally use inclusive lower bounds and exclusive upper bounds" #-} + + + + +-- | An extended variant of 'foldVersionRange' that also provides a view of the +-- expression in which the syntactic sugar @\">= v\"@, @\"<= v\"@ and @\"== +-- v.*\"@ is presented explicitly rather than in terms of the other basic +-- syntax. +-- +foldVersionRange' :: a -- ^ @\"-any\"@ version + -> (Version -> a) -- ^ @\"== v\"@ + -> (Version -> a) -- ^ @\"> v\"@ + -> (Version -> a) -- ^ @\"< v\"@ + -> (Version -> a) -- ^ @\">= v\"@ + -> (Version -> a) -- ^ @\"<= v\"@ + -> (Version -> Version -> a) -- ^ @\"== v.*\"@ wildcard. The + -- function is passed the + -- inclusive lower bound and the + -- exclusive upper bounds of the + -- range defined by the wildcard. + -> (Version -> Version -> a) -- ^ @\"^>= v\"@ major upper bound + -- The function is passed the + -- inclusive lower bound and the + -- exclusive major upper bounds + -- of the range defined by this + -- operator. + -> (a -> a -> a) -- ^ @\"_ || _\"@ union + -> (a -> a -> a) -- ^ @\"_ && _\"@ intersection + -> (a -> a) -- ^ @\"(_)\"@ parentheses + -> VersionRange -> a +foldVersionRange' anyv this later earlier orLater orEarlier + wildcard major union intersect parens = + cataVersionRange alg . normaliseVersionRange + where + alg AnyVersionF = anyv + alg (ThisVersionF v) = this v + alg (LaterVersionF v) = later v + alg (EarlierVersionF v) = earlier v + alg (OrLaterVersionF v) = orLater v + alg (OrEarlierVersionF v) = orEarlier v + alg (WildcardVersionF v) = wildcard v (wildcardUpperBound v) + alg (MajorBoundVersionF v) = major v (majorUpperBound v) + alg (UnionVersionRangesF v1 v2) = union v1 v2 + alg (IntersectVersionRangesF v1 v2) = intersect v1 v2 + alg (VersionRangeParensF v) = parens v +{-# DEPRECATED foldVersionRange' "Use cataVersionRange & normaliseVersionRange for more principled folding" #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/bugs-and-stability.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/bugs-and-stability.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/bugs-and-stability.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/bugs-and-stability.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,6 @@ +Reporting Bugs and Stability of Cabal Interfaces +================================================ + +.. toctree:: + misc + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/concepts-and-development.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/concepts-and-development.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/concepts-and-development.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/concepts-and-development.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,7 @@ +Package Concepts and Development +================================ + +.. toctree:: + :maxdepth: 2 + + developing-packages diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/config-and-install.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/config-and-install.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/config-and-install.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/config-and-install.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,5 @@ +Configuration and Installing Packages +===================================== + +.. toctree:: + installing-packages diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/conf.py cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/conf.py --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/conf.py 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/conf.py 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# +# GHC Users Guide documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +import sys +import os +import sphinx_rtd_theme + +# Support for :base-ref:, etc. +sys.path.insert(0, os.path.abspath('.')) +import cabaldomain + +version = "2.4.1.0" + +extensions = ['sphinx.ext.extlinks', 'sphinx.ext.todo'] + +templates_path = ['_templates'] +source_suffix = '.rst' +source_encoding = 'utf-8-sig' +master_doc = 'index' + +# extlinks -- see http://www.sphinx-doc.org/en/stable/ext/extlinks.html +extlinks = { + 'issue': ('https://github.com/haskell/cabal/issues/%s', '#'), + + 'ghc-wiki': ('http://ghc.haskell.org/trac/ghc/wiki/%s', ''), + 'ghc-ticket': ('http://ghc.haskell.org/trac/ghc/ticket/%s', 'GHC #'), + + 'hackage-pkg': ('http://hackage.haskell.org/package/%s', ''), +} + +# General information about the project. +project = u'Cabal' +copyright = u'2003-2017, Cabal Team' +# N.B. version comes from ghc_config +release = version # The full version, including alpha/beta/rc tags. + +# Syntax highlighting +highlight_language = 'cabal' +#pygments_style = 'tango' + +primary_domain = 'cabal' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['.build', "*.gen.rst"] + +# -- Options for HTML output --------------------------------------------- + +# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org +on_rtd = os.environ.get('READTHEDOCS', None) == 'True' + +if not on_rtd: # only import and set the theme if we're building docs locally + import sphinx_rtd_theme + html_theme = 'sphinx_rtd_theme' + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +html_title = "Cabal User's Guide" +html_short_title = "Cabal %s User's Guide" % release +html_logo = 'images/Cabal-dark.png' +html_static_path = ['images'] +# Convert quotes and dashes to typographically correct entities +html_use_smartypants = True +html_show_copyright = True +html_context = { + 'source_url_prefix': "https://github.com/haskell/cabal/tree/master/Cabal/doc/", + "display_github": True, + "github_host": "github.com", + "github_user": "haskell", + "github_repo": 'cabal', + "github_version": "master/", + "conf_py_path": "Cabal/doc/", + "source_suffix": '.rst', +} + + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'CabalUsersGuide' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + 'inputenc': '', + 'utf8extra': '', + 'preamble': ''' +\usepackage{fontspec} +\usepackage{makeidx} +\setsansfont{DejaVu Sans} +\setromanfont{DejaVu Serif} +\setmonofont{DejaVu Sans Mono} +''', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ('index', 'users_guide.tex', u'GHC Users Guide Documentation', + u'GHC Team', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +latex_logo = 'images/logo.pdf' + +# If true, show page references after internal links. +latex_show_pagerefs = True + +# http://www.sphinx-doc.org/en/master/usage/extensions/todo.html +todo_include_todos = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('cabal', 'cabal', 'The Haskell Cabal', 'The Cabal Team', 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'CabalUsersGuide', u'Cabal Users Guide', + u'Cabal Team', 'CabalUsersGuide', 'The Haskell Cabal.', + 'Compilers'), +] + +from sphinx import addnodes +from docutils import nodes + +def parse_ghci_cmd(env, sig, signode): + name = sig.split(';')[0] + sig = sig.replace(';', '') + signode += addnodes.desc_name(name, sig) + return name + +def parse_flag(env, sig, signode): + import re + names = [] + for i, flag in enumerate(sig.split(',')): + flag = flag.strip() + sep = '=' + parts = flag.split('=') + if len(parts) == 1: + sep=' ' + parts = flag.split() + if len(parts) == 0: continue + + name = parts[0] + names.append(name) + sig = sep + ' '.join(parts[1:]) + sig = re.sub(ur'<([-a-zA-Z ]+)>', ur'⟨\1⟩', sig) + if i > 0: + signode += addnodes.desc_name(', ', ', ') + signode += addnodes.desc_name(name, name) + if len(sig) > 0: + signode += addnodes.desc_addname(sig, sig) + + return names[0] + +def setup(app): + from sphinx.util.docfields import Field, TypedField + + increase_python_stack() + + # the :ghci-cmd: directive used in ghci.rst + app.add_object_type('ghci-cmd', 'ghci-cmd', + parse_node=parse_ghci_cmd, + objname='GHCi command', + indextemplate='pair: %s; GHCi command') + + app.add_object_type('ghc-flag', 'ghc-flag', + objname='GHC command-line option', + parse_node=parse_flag, + indextemplate='pair: %s; GHC option', + doc_field_types=[ + Field('since', label='Introduced in GHC version', names=['since']), + Field('default', label='Default value', names=['default']), + Field('static') + ]) + + app.add_object_type('rts-flag', 'rts-flag', + objname='runtime system command-line option', + parse_node=parse_flag, + indextemplate='pair: %s; RTS option', + doc_field_types=[ + Field('since', label='Introduced in GHC version', names=['since']), + ]) + + cabaldomain.setup(app) + +def increase_python_stack(): + # Workaround sphinx-build recursion limit overflow: + # pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL) + # RuntimeError: maximum recursion depth exceeded while pickling an object + # + # Default python allows recursion depth of 1000 calls. + sys.setrecursionlimit(10000) + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/developing-packages.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/developing-packages.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/developing-packages.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/developing-packages.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,3480 @@ +Quickstart +========== + +Lets assume we have created a project directory and already have a +Haskell module or two. + +Every project needs a name, we'll call this example "proglet". + +.. highlight:: console + +:: + + $ cd proglet/ + $ ls + Proglet.hs + +It is assumed that (apart from external dependencies) all the files that +make up a package live under a common project root directory. This +simple example has all the project files in one directory, but most +packages will use one or more subdirectories. + +To turn this into a Cabal package we need two extra files in the +project's root directory: + +- ``proglet.cabal``: containing package metadata and build information. + +- ``Setup.hs``: usually containing a few standardized lines of code, + but can be customized if necessary. + +We can create both files manually or we can use ``cabal init`` to create +them for us. + +Using "cabal init" +------------------ + +The ``cabal init`` command is interactive. It asks us a number of +questions starting with the package name and version. + +:: + + $ cabal init + Package name [default "proglet"]? + Package version [default "0.1"]? + ... + +It also asks questions about various other bits of package metadata. For +a package that you never intend to distribute to others, these fields +can be left blank. + +One of the important questions is whether the package contains a library +or an executable. Libraries are collections of Haskell modules that can +be re-used by other Haskell libraries and programs, while executables +are standalone programs. + +:: + + What does the package build: + 1) Library + 2) Executable + Your choice? + +For the moment these are the only choices. For more complex packages +(e.g. a library and multiple executables or test suites) the ``.cabal`` +file can be edited afterwards. + +Finally, ``cabal init`` creates the initial ``proglet.cabal`` and +``Setup.hs`` files, and depending on your choice of license, a +``LICENSE`` file as well. + +:: + + Generating LICENSE... + Generating Setup.hs... + Generating proglet.cabal... + + You may want to edit the .cabal file and add a Description field. + +As this stage the ``proglet.cabal`` is not quite complete and before you +are able to build the package you will need to edit the file and add +some build information about the library or executable. + +Editing the .cabal file +----------------------- + +.. highlight:: cabal + +Load up the ``.cabal`` file in a text editor. The first part of the +``.cabal`` file has the package metadata and towards the end of the file +you will find the :pkg-section:`executable` or :pkg-section:`library` section. + +You will see that the fields that have yet to be filled in are commented +out. Cabal files use "``--``" Haskell-style comment syntax. (Note that +comments are only allowed on lines on their own. Trailing comments on +other lines are not allowed because they could be confused with program +options.) + +If you selected earlier to create a library package then your ``.cabal`` +file will have a section that looks like this: + +:: + + library + exposed-modules: Proglet + -- other-modules: + -- build-depends: + +Alternatively, if you selected an executable then there will be a +section like: + +:: + + executable proglet + -- main-is: + -- other-modules: + -- build-depends: + +The build information fields listed (but commented out) are just the few +most important and common fields. There are many others that are covered +later in this chapter. + +Most of the build information fields are the same between libraries and +executables. The difference is that libraries have a number of "exposed" +modules that make up the public interface of the library, while +executables have a file containing a ``Main`` module. + +The name of a library always matches the name of the package, so it is +not specified in the library section. Executables often follow the name +of the package too, but this is not required and the name is given +explicitly. + +Modules included in the package +------------------------------- + +For a library, ``cabal init`` looks in the project directory for files +that look like Haskell modules and adds all the modules to the +:pkg-field:`library:exposed-modules` field. For modules that do not form part +of your package's public interface, you can move those modules to the +:pkg-field:`other-modules` field. Either way, all modules in the library need +to be listed. + +For an executable, ``cabal init`` does not try to guess which file +contains your program's ``Main`` module. You will need to fill in the +:pkg-field:`executable:main-is` field with the file name of your program's +``Main`` module (including ``.hs`` or ``.lhs`` extension). Other modules +included in the executable should be listed in the :pkg-field:`other-modules` +field. + +Modules imported from other packages +------------------------------------ + +While your library or executable may include a number of modules, it +almost certainly also imports a number of external modules from the +standard libraries or other pre-packaged libraries. (These other +libraries are of course just Cabal packages that contain a library.) + +You have to list all of the library packages that your library or +executable imports modules from. Or to put it another way: you have to +list all the other packages that your package depends on. + +For example, suppose the example ``Proglet`` module imports the module +``Data.Map``. The ``Data.Map`` module comes from the ``containers`` +package, so we must list it: + +:: + + library + exposed-modules: Proglet + other-modules: + build-depends: containers, base == 4.* + +In addition, almost every package also depends on the ``base`` library +package because it exports the standard ``Prelude`` module plus other +basic modules like ``Data.List``. + +You will notice that we have listed ``base == 4.*``. This gives a +constraint on the version of the base package that our package will work +with. The most common kinds of constraints are: + +- ``pkgname >= n`` +- ``pkgname ^>= n`` (since Cabal 2.0) +- ``pkgname >= n && < m`` +- ``pkgname == n.*`` (since Cabal 1.6) + +The last is just shorthand, for example ``base == 4.*`` means exactly +the same thing as ``base >= 4 && < 5``. Please refer to the documentation +on the :pkg-field:`build-depends` field for more information. + +Building the package +-------------------- + +For simple packages that's it! We can now try configuring and building +the package: + +.. code-block:: console + + $ cabal configure + $ cabal build + +Assuming those two steps worked then you can also install the package: + +.. code-block:: console + + $ cabal install + +For libraries this makes them available for use in GHCi or to be used by +other packages. For executables it installs the program so that you can +run it (though you may first need to adjust your system's ``$PATH``). + +Next steps +---------- + +What we have covered so far should be enough for very simple packages +that you use on your own system. + +The next few sections cover more details needed for more complex +packages and details needed for distributing packages to other people. + +The previous chapter covers building and installing packages -- your own +packages or ones developed by other people. + +Package concepts +================ + +Before diving into the details of writing packages it helps to +understand a bit about packages in the Haskell world and the particular +approach that Cabal takes. + +The point of packages +--------------------- + +Packages are a mechanism for organising and distributing code. Packages +are particularly suited for "programming in the large", that is building +big systems by using and re-using code written by different people at +different times. + +People organise code into packages based on functionality and +dependencies. Social factors are also important: most packages have a +single author, or a relatively small team of authors. + +Packages are also used for distribution: the idea is that a package can +be created in one place and be moved to a different computer and be +usable in that different environment. There are a surprising number of +details that have to be got right for this to work, and a good package +system helps to simply this process and make it reliable. + +Packages come in two main flavours: libraries of reusable code, and +complete programs. Libraries present a code interface, an API, while +programs can be run directly. In the Haskell world, library packages +expose a set of Haskell modules as their public interface. Cabal +packages can contain a library or executables or both. + +Some programming languages have packages as a builtin language concept. +For example in Java, a package provides a local namespace for types and +other definitions. In the Haskell world, packages are not a part of the +language itself. Haskell programs consist of a number of modules, and +packages just provide a way to partition the modules into sets of +related functionality. Thus the choice of module names in Haskell is +still important, even when using packages. + +Package names and versions +-------------------------- + +All packages have a name, e.g. "HUnit". Package names are assumed to be +unique. Cabal package names may contain letters, numbers and hyphens, +but not spaces and may also not contain a hyphened section consisting of +only numbers. The namespace for Cabal packages is flat, not +hierarchical. + +Packages also have a version, e.g "1.1". This matches the typical way in +which packages are developed. Strictly speaking, each version of a +package is independent, but usually they are very similar. Cabal package +versions follow the conventional numeric style, consisting of a sequence +of digits such as "1.0.1" or "2.0". There are a range of common +conventions for "versioning" packages, that is giving some meaning to +the version number in terms of changes in the package, such as +e.g. `SemVer `__; however, for packages intended to be +distributed via Hackage Haskell's `Package Versioning Policy`_ applies +(see also the `PVP/SemVer FAQ section `__). + +The combination of package name and version is called the *package ID* +and is written with a hyphen to separate the name and version, e.g. +"HUnit-1.1". + +For Cabal packages, the combination of the package name and version +*uniquely* identifies each package. Or to put it another way: two +packages with the same name and version are considered to *be* the same. + +Strictly speaking, the package ID only identifies each Cabal *source* +package; the same Cabal source package can be configured and built in +different ways. There is a separate installed package ID that uniquely +identifies each installed package instance. Most of the time however, +users need not be aware of this detail. + +Kinds of package: Cabal vs GHC vs system +---------------------------------------- + +It can be slightly confusing at first because there are various +different notions of package floating around. Fortunately the details +are not very complicated. + +Cabal packages + Cabal packages are really source packages. That is they contain + Haskell (and sometimes C) source code. + + Cabal packages can be compiled to produce GHC packages. They can + also be translated into operating system packages. + +GHC packages + This is GHC's view on packages. GHC only cares about library + packages, not executables. Library packages have to be registered + with GHC for them to be available in GHCi or to be used when + compiling other programs or packages. + + The low-level tool ``ghc-pkg`` is used to register GHC packages and + to get information on what packages are currently registered. + + You never need to make GHC packages manually. When you build and + install a Cabal package containing a library then it gets registered + with GHC automatically. + + Haskell implementations other than GHC have essentially the same + concept of registered packages. For the most part, Cabal hides the + slight differences. + +Operating system packages + On operating systems like Linux and Mac OS X, the system has a + specific notion of a package and there are tools for installing and + managing packages. + + The Cabal package format is designed to allow Cabal packages to be + translated, mostly-automatically, into operating system packages. + They are usually translated 1:1, that is a single Cabal package + becomes a single system package. + + It is also possible to make Windows installers from Cabal packages, + though this is typically done for a program together with all of its + library dependencies, rather than packaging each library separately. + +Unit of distribution +-------------------- + +The Cabal package is the unit of distribution. What this means is that +each Cabal package can be distributed on its own in source or binary +form. Of course there may dependencies between packages, but there is +usually a degree of flexibility in which versions of packages can work +together so distributing them independently makes sense. + +It is perhaps easiest to see what being "the unit of distribution" +means by contrast to an alternative approach. Many projects are made up +of several interdependent packages and during development these might +all be kept under one common directory tree and be built and tested +together. When it comes to distribution however, rather than +distributing them all together in a single tarball, it is required that +they each be distributed independently in their own tarballs. + +Cabal's approach is to say that if you can specify a dependency on a +package then that package should be able to be distributed +independently. Or to put it the other way round, if you want to +distribute it as a single unit, then it should be a single package. + +Explicit dependencies and automatic package management +------------------------------------------------------ + +Cabal takes the approach that all packages dependencies are specified +explicitly and specified in a declarative way. The point is to enable +automatic package management. This means tools like ``cabal`` can +resolve dependencies and install a package plus all of its dependencies +automatically. Alternatively, it is possible to mechanically (or mostly +mechanically) translate Cabal packages into system packages and let the +system package manager install dependencies automatically. + +It is important to track dependencies accurately so that packages can +reliably be moved from one system to another system and still be able to +build it there. Cabal is therefore relatively strict about specifying +dependencies. For example Cabal's default build system will not even let +code build if it tries to import a module from a package that isn't +listed in the ``.cabal`` file, even if that package is actually +installed. This helps to ensure that there are no "untracked +dependencies" that could cause the code to fail to build on some other +system. + +The explicit dependency approach is in contrast to the traditional +"./configure" approach where instead of specifying dependencies +declaratively, the ``./configure`` script checks if the dependencies are +present on the system. Some manual work is required to transform a +``./configure`` based package into a Linux distribution package (or +similar). This conversion work is usually done by people other than the +package author(s). The practical effect of this is that only the most +popular packages will benefit from automatic package management. +Instead, Cabal forces the original author to specify the dependencies +but the advantage is that every package can benefit from automatic +package management. + +The "./configure" approach tends to encourage packages that adapt +themselves to the environment in which they are built, for example by +disabling optional features so that they can continue to work when a +particular dependency is not available. This approach makes sense in a +world where installing additional dependencies is a tiresome manual +process and so minimising dependencies is important. The automatic +package management view is that packages should just declare what they +need and the package manager will take responsibility for ensuring that +all the dependencies are installed. + +Sometimes of course optional features and optional dependencies do make +sense. Cabal packages can have optional features and varying +dependencies. These conditional dependencies are still specified in a +declarative way however and remain compatible with automatic package +management. The need to remain compatible with automatic package +management means that Cabal's conditional dependencies system is a bit +less flexible than with the "./configure" approach. + +.. note:: + `GNU autoconf places restrictions on paths, including the + path that the user builds a package from. + `_ + Package authors using ``build-type: configure`` should be aware of + these restrictions; because users may be unexpectedly constrained and + face mysterious errors, it is recommended that ``build-type: configure`` + is only used where strictly necessary. + +Portability +----------- + +One of the purposes of Cabal is to make it easier to build packages on +different platforms (operating systems and CPU architectures), with +different compiler versions and indeed even with different Haskell +implementations. (Yes, there are Haskell implementations other than +GHC!) + +Cabal provides abstractions of features present in different Haskell +implementations and wherever possible it is best to take advantage of +these to increase portability. Where necessary however it is possible to +use specific features of specific implementations. + +For example a package author can list in the package's ``.cabal`` what +language extensions the code uses. This allows Cabal to figure out if +the language extension is supported by the Haskell implementation that +the user picks. Additionally, certain language extensions such as +Template Haskell require special handling from the build system and by +listing the extension it provides the build system with enough +information to do the right thing. + +Another similar example is linking with foreign libraries. Rather than +specifying GHC flags directly, the package author can list the libraries +that are needed and the build system will take care of using the right +flags for the compiler. Additionally this makes it easier for tools to +discover what system C libraries a package needs, which is useful for +tracking dependencies on system libraries (e.g. when translating into +Linux distribution packages). + +In fact both of these examples fall into the category of explicitly +specifying dependencies. Not all dependencies are other Cabal packages. +Foreign libraries are clearly another kind of dependency. It's also +possible to think of language extensions as dependencies: the package +depends on a Haskell implementation that supports all those extensions. + +Where compiler-specific options are needed however, there is an "escape +hatch" available. The developer can specify implementation-specific +options and more generally there is a configuration mechanism to +customise many aspects of how a package is built depending on the +Haskell implementation, the operating system, computer architecture and +user-specified configuration flags. + +Developing packages +=================== + +The Cabal package is the unit of distribution. When installed, its +purpose is to make available: + +- One or more Haskell programs. + +- At most one library, exposing a number of Haskell modules. + +However having both a library and executables in a package does not work +very well; if the executables depend on the library, they must +explicitly list all the modules they directly or indirectly import from +that library. Fortunately, starting with Cabal 1.8.0.4, executables can +also declare the package that they are in as a dependency, and Cabal +will treat them as if they were in another package that depended on the +library. + +Internally, the package may consist of much more than a bunch of Haskell +modules: it may also have C source code and header files, source code +meant for preprocessing, documentation, test cases, auxiliary tools etc. + +A package is identified by a globally-unique *package name*, which +consists of one or more alphanumeric words separated by hyphens. To +avoid ambiguity, each of these words should contain at least one letter. +Chaos will result if two distinct packages with the same name are +installed on the same system. A particular version of the package is +distinguished by a *version number*, consisting of a sequence of one or +more integers separated by dots. These can be combined to form a single +text string called the *package ID*, using a hyphen to separate the name +from the version, e.g. "``HUnit-1.1``". + +.. Note:: + + Packages are not part of the Haskell language; they simply + populate the hierarchical space of module names. In GHC 6.6 and later a + program may contain multiple modules with the same name if they come + from separate packages; in all other current Haskell systems packages + may not overlap in the modules they provide, including hidden modules. + +Creating a package +------------------ + +Suppose you have a directory hierarchy containing the source files that +make up your package. You will need to add two more files to the root +directory of the package: + +:file:`{package-name}.cabal` + a Unicode UTF-8 text file containing a package description. For + details of the syntax of this file, see the section on + `package descriptions`_. + +:file:`Setup.hs` + a single-module Haskell program to perform various setup tasks (with + the interface described in the section on :ref:`installing-packages`). + This module should import only modules that will be present in all Haskell + implementations, including modules of the Cabal library. The content of + this file is determined by the :pkg-field:`build-type` setting in the + ``.cabal`` file. In most cases it will be trivial, calling on the Cabal + library to do most of the work. + +Once you have these, you can create a source bundle of this directory +for distribution. Building of the package is discussed in the section on +:ref:`installing-packages`. + +One of the purposes of Cabal is to make it easier to build a package +with different Haskell implementations. So it provides abstractions of +features present in different Haskell implementations and wherever +possible it is best to take advantage of these to increase portability. +Where necessary however it is possible to use specific features of +specific implementations. For example one of the pieces of information a +package author can put in the package's ``.cabal`` file is what language +extensions the code uses. This is far preferable to specifying flags for +a specific compiler as it allows Cabal to pick the right flags for the +Haskell implementation that the user picks. It also allows Cabal to +figure out if the language extension is even supported by the Haskell +implementation that the user picks. Where compiler-specific options are +needed however, there is an "escape hatch" available. The developer can +specify implementation-specific options and more generally there is a +configuration mechanism to customise many aspects of how a package is +built depending on the Haskell implementation, the Operating system, +computer architecture and user-specified configuration flags. + +:: + + name: Foo + version: 1.0 + + library + build-depends: base >= 4 && < 5 + exposed-modules: Foo + extensions: ForeignFunctionInterface + ghc-options: -Wall + if os(windows) + build-depends: Win32 >= 2.1 && < 2.6 + +Example: A package containing a simple library +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The HUnit package contains a file ``HUnit.cabal`` containing: + +:: + + name: HUnit + version: 1.1.1 + synopsis: A unit testing framework for Haskell + homepage: http://hunit.sourceforge.net/ + category: Testing + author: Dean Herington + license: BSD3 + license-file: LICENSE + cabal-version: >= 1.10 + build-type: Simple + + library + build-depends: base >= 2 && < 4 + exposed-modules: Test.HUnit.Base, Test.HUnit.Lang, + Test.HUnit.Terminal, Test.HUnit.Text, Test.HUnit + default-extensions: CPP + +and the following ``Setup.hs``: + +.. code-block:: haskell + + import Distribution.Simple + main = defaultMain + +Example: A package containing executable programs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + name: TestPackage + version: 0.0 + synopsis: Small package with two programs + author: Angela Author + license: BSD3 + build-type: Simple + cabal-version: >= 1.8 + + executable program1 + build-depends: HUnit >= 1.1.1 && < 1.2 + main-is: Main.hs + hs-source-dirs: prog1 + + executable program2 + main-is: Main.hs + build-depends: HUnit >= 1.1.1 && < 1.2 + hs-source-dirs: prog2 + other-modules: Utils + +with ``Setup.hs`` the same as above. + +Example: A package containing a library and executable programs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + name: TestPackage + version: 0.0 + synopsis: Package with library and two programs + license: BSD3 + author: Angela Author + build-type: Simple + cabal-version: >= 1.8 + + library + build-depends: HUnit >= 1.1.1 && < 1.2 + exposed-modules: A, B, C + + executable program1 + main-is: Main.hs + hs-source-dirs: prog1 + other-modules: A, B + + executable program2 + main-is: Main.hs + hs-source-dirs: prog2 + other-modules: A, C, Utils + +with ``Setup.hs`` the same as above. Note that any library modules +required (directly or indirectly) by an executable must be listed again. + +The trivial setup script used in these examples uses the *simple build +infrastructure* provided by the Cabal library (see +`Distribution.Simple <../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html>`__). +The simplicity lies in its interface rather that its implementation. It +automatically handles preprocessing with standard preprocessors, and +builds packages for all the Haskell implementations. + +The simple build infrastructure can also handle packages where building +is governed by system-dependent parameters, if you specify a little more +(see the section on `system-dependent parameters`_). +A few packages require `more elaborate solutions `_. + +Package descriptions +-------------------- + +The package description file must have a name ending in "``.cabal``". It +must be a Unicode text file encoded using valid UTF-8. There must be +exactly one such file in the directory. The first part of the name is +usually the package name, and some of the tools that operate on Cabal +packages require this; specifically, Hackage rejects packages which +don't follow this rule. + +In the package description file, lines whose first non-whitespace +characters are "``--``" are treated as comments and ignored. + +This file should contain of a number global property descriptions and +several sections. + +- The `package properties`_ describe the package + as a whole, such as name, license, author, etc. + +- Optionally, a number of *configuration flags* can be declared. These + can be used to enable or disable certain features of a package. (see + the section on `configurations`_). + +- The (optional) library section specifies the `library`_ properties and + relevant `build information`_. + +- Following is an arbitrary number of executable sections which describe + an executable program and relevant `build information`_. + +Each section consists of a number of property descriptions in the form +of field/value pairs, with a syntax roughly like mail message headers. + +- Case is not significant in field names, but is significant in field + values. + +- To continue a field value, indent the next line relative to the field + name. + +- Field names may be indented, but all field values in the same section + must use the same indentation. + +- Tabs are *not* allowed as indentation characters due to a missing + standard interpretation of tab width. + +- To get a blank line in a field value, use an indented "``.``" + +The syntax of the value depends on the field. Field types include: + +*token*, *filename*, *directory* + Either a sequence of one or more non-space non-comma characters, or + a quoted string in Haskell 98 lexical syntax. The latter can be used + for escaping whitespace, for example: + ``ghc-options: -Wall "-with-rtsopts=-T -I1"``. Unless otherwise + stated, relative filenames and directories are interpreted from the + package root directory. +*freeform*, *URL*, *address* + An arbitrary, uninterpreted string. +*identifier* + A letter followed by zero or more alphanumerics or underscores. +*compiler* + A compiler flavor (one of: ``GHC``, ``UHC`` or ``LHC``) + followed by a version range. For example, ``GHC ==6.10.3``, or + ``LHC >=0.6 && <0.8``. + +Modules and preprocessors +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Haskell module names listed in the :pkg-field:`library:exposed-modules` and +:pkg-field:`library:other-modules` fields may correspond to Haskell source +files, i.e. with names ending in "``.hs``" or "``.lhs``", or to inputs for +various Haskell preprocessors. The simple build infrastructure understands the +extensions: + +- ``.gc`` (:hackage-pkg:`greencard`) +- ``.chs`` (:hackage-pkg:`c2hs`) +- ``.hsc`` (:hackage-pkg:`hsc2hs`) +- ``.y`` and ``.ly`` (happy_) +- ``.x`` (alex_) +- ``.cpphs`` (cpphs_) + +When building, Cabal will automatically run the appropriate preprocessor +and compile the Haskell module it produces. For the ``c2hs`` and +``hsc2hs`` preprocessors, Cabal will also automatically add, compile and +link any C sources generated by the preprocessor (produced by +``hsc2hs``'s ``#def`` feature or ``c2hs``'s auto-generated wrapper +functions). Dependencies on pre-processors are specified via the +:pkg-field:`build-tools` or :pkg-field:`build-tool-depends` fields. + +Some fields take lists of values, which are optionally separated by +commas, except for the :pkg-field:`build-depends` field, where the commas are +mandatory. + +Some fields are marked as required. All others are optional, and unless +otherwise specified have empty default values. + +Package properties +^^^^^^^^^^^^^^^^^^ + +These fields may occur in the first top-level properties section and +describe the package as a whole: + +.. pkg-field:: name: package-name (required) + + The unique name of the package, without the version number. + + As pointed out in the section on `package descriptions`_, some + tools require the package-name specified for this field to match + the package description's file-name :file:`{package-name}.cabal`. + + Package names are case-sensitive and must match the regular expression + (i.e. alphanumeric "words" separated by dashes; each alphanumeric + word must contain at least one letter): + ``[[:digit:]]*[[:alpha:]][[:alnum:]]*(-[[:digit:]]*[[:alpha:]][[:alnum:]]*)*``. + + Or, expressed in ABNF_: + + .. code-block:: abnf + + package-name = package-name-part *("-" package-name-part) + package-name-part = *DIGIT UALPHA *UALNUM + + UALNUM = UALPHA / DIGIT + UALPHA = ... ; set of alphabetic Unicode code-points + + .. note:: + + Hackage restricts package names to the ASCII subset. + +.. pkg-field:: version: numbers (required) + + The package version number, usually consisting of a sequence of + natural numbers separated by dots, i.e. as the regular + expression ``[0-9]+([.][0-9]+)*`` or expressed in ABNF_: + + .. code-block:: abnf + + package-version = 1*DIGIT *("." 1*DIGIT) + +.. pkg-field:: cabal-version: >= x.y + + The version of the Cabal specification that this package description + uses. The Cabal specification does slowly evolve, introducing new + features and occasionally changing the meaning of existing features. + By specifying which version of the spec you are using it enables + programs which process the package description to know what syntax + to expect and what each part means. + + For historical reasons this is always expressed using *>=* version + range syntax. No other kinds of version range make sense, in + particular upper bounds do not make sense. In future this field will + specify just a version number, rather than a version range. + + The version number you specify will affect both compatibility and + behaviour. Most tools (including the Cabal library and cabal + program) understand a range of versions of the Cabal specification. + Older tools will of course only work with older versions of the + Cabal specification. Most of the time, tools that are too old will + recognise this fact and produce a suitable error message. + + As for behaviour, new versions of the Cabal spec can change the + meaning of existing syntax. This means if you want to take advantage + of the new meaning or behaviour then you must specify the newer + Cabal version. Tools are expected to use the meaning and behaviour + appropriate to the version given in the package description. + + In particular, the syntax of package descriptions changed + significantly with Cabal version 1.2 and the :pkg-field:`cabal-version` + field is now required. Files written in the old syntax are still + recognized, so if you require compatibility with very old Cabal + versions then you may write your package description file using the + old syntax. Please consult the user's guide of an older Cabal + version for a description of that syntax. + +.. pkg-field:: build-type: identifier + + :default: ``Custom`` or ``Simple`` + + The type of build used by this package. Build types are the + constructors of the + `BuildType <../release/cabal-latest/doc/API/Cabal/Distribution-PackageDescription.html#t:BuildType>`__ + type. This field is optional and when missing, its default value + is inferred according to the following rules: + + - When :pkg-field:`cabal-version` is set to ``2.2`` or higher, + the default is ``Simple`` unless a :pkg-section:`custom-setup` + exists, in which case the inferred default is ``Custom``. + + - For lower :pkg-field:`cabal-version` values, the default is + ``Custom`` unconditionally. + + If the build type is anything other than ``Custom``, then the + ``Setup.hs`` file *must* be exactly the standardized content + discussed below. This is because in these cases, ``cabal`` will + ignore the ``Setup.hs`` file completely, whereas other methods of + package management, such as ``runhaskell Setup.hs [CMD]``, still + rely on the ``Setup.hs`` file. + + For build type ``Simple``, the contents of ``Setup.hs`` must be: + + .. code-block:: haskell + + import Distribution.Simple + main = defaultMain + + For build type ``Configure`` (see the section on `system-dependent + parameters`_ below), the contents of + ``Setup.hs`` must be: + + .. code-block:: haskell + + import Distribution.Simple + main = defaultMainWithHooks autoconfUserHooks + + For build type ``Make`` (see the section on `more complex packages`_ below), + the contents of ``Setup.hs`` must be: + + .. code-block:: haskell + + import Distribution.Make + main = defaultMain + + For build type ``Custom``, the file ``Setup.hs`` can be customized, + and will be used both by ``cabal`` and other tools. + + For most packages, the build type ``Simple`` is sufficient. + +.. pkg-field:: license: identifier + + :default: ``AllRightsReserved`` + + The type of license under which this package is distributed. License + names are the constants of the + `License <../release/cabal-latest/doc/API/Cabal/Distribution-License.html#t:License>`__ + type. + +.. pkg-field:: license-file: filename +.. pkg-field:: license-files: filename list + + The name of a file(s) containing the precise copyright license for + this package. The license file(s) will be installed with the + package. + + If you have multiple license files then use the :pkg-field:`license-files` + field instead of (or in addition to) the :pkg-field:`license-file` field. + +.. pkg-field:: copyright: freeform + + The content of a copyright notice, typically the name of the holder + of the copyright on the package and the year(s) from which copyright + is claimed. For example:: + + copyright: (c) 2006-2007 Joe Bloggs + +.. pkg-field:: author: freeform + + The original author of the package. + + Remember that ``.cabal`` files are Unicode, using the UTF-8 + encoding. + +.. pkg-field:: maintainer: address + + The current maintainer or maintainers of the package. This is an + e-mail address to which users should send bug reports, feature + requests and patches. + +.. pkg-field:: stability: freeform + + The stability level of the package, e.g. ``alpha``, + ``experimental``, ``provisional``, ``stable``. + +.. pkg-field:: homepage: URL + + The package homepage. + +.. pkg-field:: bug-reports: URL + + The URL where users should direct bug reports. This would normally + be either: + + - A ``mailto:`` URL, e.g. for a person or a mailing list. + + - An ``http:`` (or ``https:``) URL for an online bug tracking + system. + + For example Cabal itself uses a web-based bug tracking system + + :: + + bug-reports: https://github.com/haskell/cabal/issues + +.. pkg-field:: package-url: URL + + The location of a source bundle for the package. The distribution + should be a Cabal package. + +.. pkg-field:: synopsis: freeform + + A very short description of the package, for use in a table of + packages. This is your headline, so keep it short (one line) but as + informative as possible. Save space by not including the package + name or saying it's written in Haskell. + +.. pkg-field:: description: freeform + + Description of the package. This may be several paragraphs, and + should be aimed at a Haskell programmer who has never heard of your + package before. + + For library packages, this field is used as prologue text by + :ref:`setup-haddock` and thus may contain the same markup as Haddock_ + documentation comments. + +.. pkg-field:: category: freeform + + A classification category for future use by the package catalogue + Hackage_. These categories have not + yet been specified, but the upper levels of the module hierarchy + make a good start. + +.. pkg-field:: tested-with: compiler list + + A list of compilers and versions against which the package has been + tested (or at least built). + +.. pkg-field:: data-files: filename list + + A list of files to be installed for run-time use by the package. + This is useful for packages that use a large amount of static data, + such as tables of values or code templates. Cabal provides a way to + `find these files at run-time <#accessing-data-files-from-package-code>`_. + + A limited form of ``*`` wildcards in file names, for example + ``data-files: images/*.png`` matches all the ``.png`` files in the + ``images`` directory. ``data-files: audio/**/*.mp3`` matches all + the ``.mp3`` files in the ``audio`` directory, including + subdirectories. + + The specific limitations of this wildcard syntax are + + - ``*`` wildcards are only allowed in place of the file name, not + in the directory name or file extension. It must replace the + whole file name (e.g., ``*.html`` is allowed, but + ``chapter-*.html`` is not). If a wildcard is used, it must be + used with an extension, so ``data-files: data/*`` is not + allowed. + + - Prior to Cabal 2.4, when matching a wildcard plus extension, a + file's full extension must match exactly, so ``*.gz`` matches + ``foo.gz`` but not ``foo.tar.gz``. This restriction has been + lifted when ``cabal-version: 2.4`` or greater so that ``*.gz`` + does match ``foo.tar.gz`` + + - ``*`` wildcards will not match if the file name is empty (e.g., + ``*.html`` will not match ``foo/.html``). + + - ``**`` wildcards can only appear as the final path component + before the file name (e.g., ``data/**/images/*.jpg`` is not + allowed). If a ``**`` wildcard is used, then the file name must + include a ``*`` wildcard (e.g., ``data/**/README.rst`` is not + allowed). + + - A wildcard that does not match any files is an error. + + The reason for providing only a very limited form of wildcard is to + concisely express the common case of a large number of related files + of the same file type without making it too easy to accidentally + include unwanted files. + + On efficiency: if you use ``**`` patterns, the directory tree will + be walked starting with the parent directory of the ``**``. If + that's the root of the project, this might include ``.git/``, + ``dist-newstyle/``, or other large directories! To avoid this + behaviour, put the files that wildcards will match against in + their own folder. + + ``**`` wildcards are available starting in Cabal 2.4. + +.. pkg-field:: data-dir: directory + + The directory where Cabal looks for data files to install, relative + to the source directory. By default, Cabal will look in the source + directory itself. + +.. pkg-field:: extra-source-files: filename list + + A list of additional files to be included in source distributions + built with :ref:`setup-sdist`. As with :pkg-field:`data-files` it can use + a limited form of ``*`` wildcards in file names. + +.. pkg-field:: extra-doc-files: filename list + + A list of additional files to be included in source distributions, + and also copied to the html directory when Haddock documentation is + generated. As with :pkg-field:`data-files` it can use a limited form of + ``*`` wildcards in file names. + +.. pkg-field:: extra-tmp-files: filename list + + A list of additional files or directories to be removed by + :ref:`setup-clean`. These would typically be additional files created by + additional hooks, such as the scheme described in the section on + `system-dependent parameters`_ + +Library +^^^^^^^ + +.. pkg-section:: library name + :synopsis: Library build information. + + Build information for libraries. + + Currently, there can only be one publicly exposed library in a + package, and its name is the same as package name set by global + :pkg-field:`name` field. In this case, the ``name`` argument to + the :pkg-section:`library` section must be omitted. + + Starting with Cabal 2.0, private internal sub-library components + can be defined by using setting the ``name`` field to a name + different from the current package's name; see section on + :ref:`Internal Libraries ` for more information. + +The library section should contain the following fields: + +.. pkg-field:: exposed-modules: identifier list + + :required: if this package contains a library + + A list of modules added by this package. + +.. pkg-field:: virtual-modules: identifier list + :since: 2.2 + + A list of virtual modules provided by this package. Virtual modules + are modules without a source file. See for example the ``GHC.Prim`` + module from the ``ghc-prim`` package. Modules listed here will not be + built, but still end up in the list of ``exposed-modules`` in the + installed package info when the package is registered in the package + database. + +.. pkg-field:: exposed: boolean + + :default: ``True`` + + Some Haskell compilers (notably GHC) support the notion of packages + being "exposed" or "hidden" which means the modules they provide can + be easily imported without always having to specify which package + they come from. However this only works effectively if the modules + provided by all exposed packages do not overlap (otherwise a module + import would be ambiguous). + + Almost all new libraries use hierarchical module names that do not + clash, so it is very uncommon to have to use this field. However it + may be necessary to set ``exposed: False`` for some old libraries + that use a flat module namespace or where it is known that the + exposed modules would clash with other common modules. + +.. pkg-field:: reexported-modules: exportlist + :since: 1.22 + + Supported only in GHC 7.10 and later. A list of modules to + *reexport* from this package. The syntax of this field is + ``orig-pkg:Name as NewName`` to reexport module ``Name`` from + ``orig-pkg`` with the new name ``NewName``. We also support + abbreviated versions of the syntax: if you omit ``as NewName``, + we'll reexport without renaming; if you omit ``orig-pkg``, then we + will automatically figure out which package to reexport from, if + it's unambiguous. + + Reexported modules are useful for compatibility shims when a package + has been split into multiple packages, and they have the useful + property that if a package provides a module, and another package + reexports it under the same name, these are not considered a + conflict (as would be the case with a stub module.) They can also be + used to resolve name conflicts. + +.. pkg-field:: signatures: signature list + :since: 2.0 + + Supported only in GHC 8.2 and later. A list of `module signatures `__ required by this package. + + Module signatures are part of the + `Backpack `__ extension to + the Haskell module system. + + Packages that do not export any modules and only export required signatures + are called "signature-only packages", and their signatures are subjected to + `signature thinning + `__. + + + +The library section may also contain build information fields (see the +section on `build information`_). + +.. _sublibs: + +**Internal Libraries** + +Cabal 2.0 and later support "internal libraries", which are extra named +libraries (as opposed to the usual unnamed library section). For +example, suppose that your test suite needs access to some internal +modules in your library, which you do not otherwise want to export. You +could put these modules in an internal library, which the main library +and the test suite :pkg-field:`build-depends` upon. Then your Cabal file might +look something like this: + +:: + + cabal-version: 2.0 + name: foo + version: 0.1.0.0 + license: BSD3 + build-type: Simple + + library foo-internal + exposed-modules: Foo.Internal + -- NOTE: no explicit constraints on base needed + -- as they're inherited from the 'library' stanza + build-depends: base + + library + exposed-modules: Foo.Public + build-depends: foo-internal, base >= 4.3 && < 5 + + test-suite test-foo + type: exitcode-stdio-1.0 + main-is: test-foo.hs + -- NOTE: no constraints on 'foo-internal' as same-package + -- dependencies implicitly refer to the same package instance + build-depends: foo-internal, base + +Internal libraries are also useful for packages that define multiple +executables, but do not define a publically accessible library. Internal +libraries are only visible internally in the package (so they can only +be added to the :pkg-field:`build-depends` of same-package libraries, +executables, test suites, etc.) Internal libraries locally shadow any +packages which have the same name; consequently, don't name an internal +library with the same name as an external dependency if you need to be +able to refer to the external dependency in a +:pkg-field:`build-depends` declaration. + +Shadowing can be used to vendor an external dependency into a package +and thus emulate *private dependencies*. Below is an example based on +a real-world use case: + +:: + + cabal-version: 2.2 + name: haddock-library + version: 1.6.0 + + library + build-depends: + , base ^>= 4.11.1.0 + , bytestring ^>= 0.10.2.0 + , containers ^>= 0.4.2.1 || ^>= 0.5.0.0 + , transformers ^>= 0.5.0.0 + + hs-source-dirs: src + + -- internal sub-lib + build-depends: attoparsec + + exposed-modules: + Documentation.Haddock + + library attoparsec + build-depends: + , base ^>= 4.11.1.0 + , bytestring ^>= 0.10.2.0 + , deepseq ^>= 1.4.0.0 + + hs-source-dirs: vendor/attoparsec-0.13.1.0 + + -- NB: haddock-library needs only small part of lib:attoparsec + -- internally, so we only bundle that subset here + exposed-modules: + Data.Attoparsec.ByteString + Data.Attoparsec.Combinator + + other-modules: + Data.Attoparsec.Internal + + ghc-options: -funbox-strict-fields -Wall -fwarn-tabs -O2 + + +Opening an interpreter session +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +While developing a package, it is often useful to make its code +available inside an interpreter session. This can be done with the +``repl`` command: + +.. code-block:: console + + $ cabal repl + +The name comes from the acronym +`REPL `__, +which stands for "read-eval-print-loop". By default ``cabal repl`` loads +the first component in a package. If the package contains several named +components, the name can be given as an argument to ``repl``. The name +can be also optionally prefixed with the component's type for +disambiguation purposes. Example: + +.. code-block:: console + + $ cabal repl foo + $ cabal repl exe:foo + $ cabal repl test:bar + $ cabal repl bench:baz + +Freezing dependency versions +"""""""""""""""""""""""""""" + +If a package is built in several different environments, such as a +development environment, a staging environment and a production +environment, it may be necessary or desirable to ensure that the same +dependency versions are selected in each environment. This can be done +with the ``freeze`` command: + +.. code-block:: console + + $ cabal freeze + +The command writes the selected version for all dependencies to the +``cabal.config`` file. All environments which share this file will use +the dependency versions specified in it. + +Generating dependency version bounds +"""""""""""""""""""""""""""""""""""" + +Cabal also has the ability to suggest dependency version bounds that +conform to `Package Versioning Policy`_, which is +a recommended versioning system for publicly released Cabal packages. +This is done by running the ``gen-bounds`` command: + +.. code-block:: console + + $ cabal gen-bounds + +For example, given the following dependencies specified in +:pkg-field:`build-depends`: + +:: + + build-depends: + foo == 0.5.2 + bar == 1.1 + +``gen-bounds`` will suggest changing them to the following: + +:: + + build-depends: + foo >= 0.5.2 && < 0.6 + bar >= 1.1 && < 1.2 + +Listing outdated dependency version bounds +"""""""""""""""""""""""""""""""""""""""""" + +Manually updating dependency version bounds in a ``.cabal`` file or a +freeze file can be tedious, especially when there's a lot of +dependencies. The ``cabal outdated`` command is designed to help with +that. It will print a list of packages for which there is a new +version on Hackage that is outside the version bound specified in the +``build-depends`` field. The ``outdated`` command can also be +configured to act on the freeze file (both old- and new-style) and +ignore major (or all) version bumps on Hackage for a subset of +dependencies. + +The following flags are supported by the ``outdated`` command: + +``--freeze-file`` + Read dependency version bounds from the freeze file (``cabal.config``) + instead of the package description file (``$PACKAGENAME.cabal``). + ``--v1-freeze-file`` is an alias for this flag starting in Cabal 2.4. +``--new-freeze-file`` + Read dependency version bounds from the new-style freeze file + (by default, ``cabal.project.freeze``) instead of the package + description file. ``--v2-freeze-file`` is an alias for this flag + starting in Cabal 2.4. +``--project-file`` *PROJECTFILE* + :since: 2.4 + + Read dependendency version bounds from the new-style freeze file + related to the named project file (i.e., ``$PROJECTFILE.freeze``) + instead of the package desctription file. If multiple ``--project-file`` + flags are provided, only the final one is considered. This flag + must only be passed in when ``--new-freeze-file`` is present. +``--simple-output`` + Print only the names of outdated dependencies, one per line. +``--exit-code`` + Exit with a non-zero exit code when there are outdated dependencies. +``-q, --quiet`` + Don't print any output. Implies ``-v0`` and ``--exit-code``. +``--ignore`` *PACKAGENAMES* + Don't warn about outdated dependency version bounds for the packages in this + list. +``--minor`` *[PACKAGENAMES]* + Ignore major version bumps for these packages. E.g. if there's a version 2.0 + of a package ``pkg`` on Hackage and the freeze file specifies the constraint + ``pkg == 1.9``, ``cabal outdated --freeze --minor=pkg`` will only consider + the ``pkg`` outdated when there's a version of ``pkg`` on Hackage satisfying + ``pkg > 1.9 && < 2.0``. ``--minor`` can also be used without arguments, in + that case major version bumps are ignored for all packages. + +Examples: + +.. code-block:: console + + $ cd /some/package + $ cabal outdated + Outdated dependencies: + haskell-src-exts <1.17 (latest: 1.19.1) + language-javascript <0.6 (latest: 0.6.0.9) + unix ==2.7.2.0 (latest: 2.7.2.1) + + $ cabal outdated --simple-output + haskell-src-exts + language-javascript + unix + + $ cabal outdated --ignore=haskell-src-exts + Outdated dependencies: + language-javascript <0.6 (latest: 0.6.0.9) + unix ==2.7.2.0 (latest: 2.7.2.1) + + $ cabal outdated --ignore=haskell-src-exts,language-javascript,unix + All dependencies are up to date. + + $ cabal outdated --ignore=haskell-src-exts,language-javascript,unix -q + $ echo $? + 0 + + $ cd /some/other/package + $ cabal outdated --freeze-file + Outdated dependencies: + HTTP ==4000.3.3 (latest: 4000.3.4) + HUnit ==1.3.1.1 (latest: 1.5.0.0) + + $ cabal outdated --freeze-file --ignore=HTTP --minor=HUnit + Outdated dependencies: + HUnit ==1.3.1.1 (latest: 1.3.1.2) + + +Executables +^^^^^^^^^^^ + +.. pkg-section:: executable name + :synopsis: Executable build info section. + + Executable sections (if present) describe executable programs contained + in the package and must have an argument after the section label, which + defines the name of the executable. This is a freeform argument but may + not contain spaces. + +The executable may be described using the following fields, as well as +build information fields (see the section on `build information`_). + +.. pkg-field:: main-is: filename (required) + + The name of the ``.hs`` or ``.lhs`` file containing the ``Main`` + module. Note that it is the ``.hs`` filename that must be listed, + even if that file is generated using a preprocessor. The source file + must be relative to one of the directories listed in + :pkg-field:`hs-source-dirs`. Further, while the name of the file may + vary, the module itself must be named ``Main``. + +.. pkg-field:: scope: token + :since: 2.0 + + Whether the executable is ``public`` (default) or ``private``, i.e. meant to + be run by other programs rather than the user. Private executables are + installed into `$libexecdir/$libexecsubdir`. + +Running executables +""""""""""""""""""" + +You can have Cabal build and run your executables by using the ``run`` +command: + +.. code-block:: console + + $ cabal run EXECUTABLE [-- EXECUTABLE_FLAGS] + +This command will configure, build and run the executable +``EXECUTABLE``. The double dash separator is required to distinguish +executable flags from ``run``'s own flags. If there is only one +executable defined in the whole package, the executable's name can be +omitted. See the output of ``cabal help run`` for a list of options you +can pass to ``cabal run``. + +Test suites +^^^^^^^^^^^ + +.. pkg-section:: test-suite name + :synopsis: Test suite build information. + + Test suite sections (if present) describe package test suites and must + have an argument after the section label, which defines the name of the + test suite. This is a freeform argument, but may not contain spaces. It + should be unique among the names of the package's other test suites, the + package's executables, and the package itself. Using test suite sections + requires at least Cabal version 1.9.2. + +The test suite may be described using the following fields, as well as +build information fields (see the section on `build information`_). + +.. pkg-field:: type: interface (required) + + The interface type and version of the test suite. Cabal supports two + test suite interfaces, called ``exitcode-stdio-1.0`` and + ``detailed-0.9``. Each of these types may require or disallow other + fields as described below. + +Test suites using the ``exitcode-stdio-1.0`` interface are executables +that indicate test failure with a non-zero exit code when run; they may +provide human-readable log information through the standard output and +error channels. The ``exitcode-stdio-1.0`` type requires the ``main-is`` +field. + +.. pkg-field:: main-is: filename + :synopsis: Module containing tests main function. + + :required: ``exitcode-stdio-1.0`` + :disallowed: ``detailed-0.9`` + + The name of the ``.hs`` or ``.lhs`` file containing the ``Main`` + module. Note that it is the ``.hs`` filename that must be listed, + even if that file is generated using a preprocessor. The source file + must be relative to one of the directories listed in + :pkg-field:`hs-source-dirs`. This field is analogous to the ``main-is`` field + of an executable section. + +Test suites using the ``detailed-0.9`` interface are modules exporting +the symbol ``tests :: IO [Test]``. The ``Test`` type is exported by the +module ``Distribution.TestSuite`` provided by Cabal. For more details, +see the example below. + +The ``detailed-0.9`` interface allows Cabal and other test agents to +inspect a test suite's results case by case, producing detailed human- +and machine-readable log files. The ``detailed-0.9`` interface requires +the :pkg-field:`test-module` field. + +.. pkg-field:: test-module: identifier + + :required: ``detailed-0.9`` + :disallowed: ``exitcode-stdio-1.0`` + + The module exporting the ``tests`` symbol. + +Example: Package using ``exitcode-stdio-1.0`` interface +""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +The example package description and executable source file below +demonstrate the use of the ``exitcode-stdio-1.0`` interface. + +.. code-block:: cabal + :caption: foo.cabal + + Name: foo + Version: 1.0 + License: BSD3 + Cabal-Version: >= 1.9.2 + Build-Type: Simple + + Test-Suite test-foo + type: exitcode-stdio-1.0 + main-is: test-foo.hs + build-depends: base >= 4 && < 5 + +.. code-block:: haskell + :caption: test-foo.hs + + module Main where + + import System.Exit (exitFailure) + + main = do + putStrLn "This test always fails!" + exitFailure + +Example: Package using ``detailed-0.9`` interface +""""""""""""""""""""""""""""""""""""""""""""""""" + +The example package description and test module source file below +demonstrate the use of the ``detailed-0.9`` interface. The test module +also develops a simple implementation of the interface set by +``Distribution.TestSuite``, but in actual usage the implementation would +be provided by the library that provides the testing facility. + +.. code-block:: cabal + :caption: bar.cabal + + Name: bar + Version: 1.0 + License: BSD3 + Cabal-Version: >= 1.9.2 + Build-Type: Simple + + Test-Suite test-bar + type: detailed-0.9 + test-module: Bar + build-depends: base >= 4 && < 5, Cabal >= 1.9.2 && < 2 + + +.. code-block:: haskell + :caption: Bar.hs + + module Bar ( tests ) where + + import Distribution.TestSuite + + tests :: IO [Test] + tests = return [ Test succeeds, Test fails ] + where + succeeds = TestInstance + { run = return $ Finished Pass + , name = "succeeds" + , tags = [] + , options = [] + , setOption = \_ _ -> Right succeeds + } + fails = TestInstance + { run = return $ Finished $ Fail "Always fails!" + , name = "fails" + , tags = [] + , options = [] + , setOption = \_ _ -> Right fails + } + +Running test suites +""""""""""""""""""" + +You can have Cabal run your test suites using its built-in test runner: + +:: + + $ cabal configure --enable-tests + $ cabal build + $ cabal test + +See the output of ``cabal help test`` for a list of options you can pass +to ``cabal test``. + +Benchmarks +^^^^^^^^^^ + +.. pkg-section:: benchmark name + :since: 1.9.2 + :synopsis: Benchmark build information. + + Benchmark sections (if present) describe benchmarks contained in the + package and must have an argument after the section label, which defines + the name of the benchmark. This is a freeform argument, but may not + contain spaces. It should be unique among the names of the package's + other benchmarks, the package's test suites, the package's executables, + and the package itself. Using benchmark sections requires at least Cabal + version 1.9.2. + +The benchmark may be described using the following fields, as well as +build information fields (see the section on `build information`_). + +.. pkg-field:: type: interface (required) + + The interface type and version of the benchmark. At the moment Cabal + only support one benchmark interface, called ``exitcode-stdio-1.0``. + +Benchmarks using the ``exitcode-stdio-1.0`` interface are executables +that indicate failure to run the benchmark with a non-zero exit code +when run; they may provide human-readable information through the +standard output and error channels. + +.. pkg-field:: main-is: filename + + :required: ``exitcode-stdio-1.0`` + + The name of the ``.hs`` or ``.lhs`` file containing the ``Main`` + module. Note that it is the ``.hs`` filename that must be listed, + even if that file is generated using a preprocessor. The source file + must be relative to one of the directories listed in + :pkg-field:`hs-source-dirs`. This field is analogous to the ``main-is`` + field of an executable section. Further, while the name of the file may + vary, the module itself must be named ``Main``. + +Example: Package using ``exitcode-stdio-1.0`` interface +""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +The example package description and executable source file below +demonstrate the use of the ``exitcode-stdio-1.0`` interface. + +.. code-block:: cabal + :caption: foo.cabal + :name: foo-bench.cabal + + Name: foo + Version: 1.0 + License: BSD3 + Cabal-Version: >= 1.9.2 + Build-Type: Simple + + Benchmark bench-foo + type: exitcode-stdio-1.0 + main-is: bench-foo.hs + build-depends: base >= 4 && < 5, time >= 1.1 && < 1.7 + +.. code-block:: haskell + :caption: bench-foo.hs + + {-# LANGUAGE BangPatterns #-} + module Main where + + import Data.Time.Clock + + fib 0 = 1 + fib 1 = 1 + fib n = fib (n-1) + fib (n-2) + + main = do + start <- getCurrentTime + let !r = fib 20 + end <- getCurrentTime + putStrLn $ "fib 20 took " ++ show (diffUTCTime end start) + +Running benchmarks +"""""""""""""""""" + +You can have Cabal run your benchmark using its built-in benchmark +runner: + +:: + + $ cabal configure --enable-benchmarks + $ cabal build + $ cabal bench + +See the output of ``cabal help bench`` for a list of options you can +pass to ``cabal bench``. + +Foreign libraries +^^^^^^^^^^^^^^^^^ + +Foreign libraries are system libraries intended to be linked against +programs written in C or other "foreign" languages. They +come in two primary flavours: dynamic libraries (``.so`` files on Linux, +``.dylib`` files on OSX, ``.dll`` files on Windows, etc.) are linked against +executables when the executable is run (or even lazily during +execution), while static libraries (``.a`` files on Linux/OSX, ``.lib`` +files on Windows) get linked against the executable at compile time. + +Foreign libraries only work with GHC 7.8 and later. + +A typical stanza for a foreign library looks like + +:: + + foreign-library myforeignlib + type: native-shared + lib-version-info: 6:3:2 + + if os(Windows) + options: standalone + mod-def-file: MyForeignLib.def + + other-modules: MyForeignLib.SomeModule + MyForeignLib.SomeOtherModule + build-depends: base >=4.7 && <4.9 + hs-source-dirs: src + c-sources: csrc/MyForeignLibWrapper.c + default-language: Haskell2010 + + +.. pkg-section:: foreign-library name + :since: 2.0 + :synopsis: Foriegn library build information. + + Build information for `foreign libraries`_. + +.. pkg-field:: type: foreign library type + + Cabal recognizes ``native-static`` and ``native-shared`` here, although + we currently only support building `native-shared` libraries. + +.. pkg-field:: options: foreign library option list + + Options for building the foreign library, typically specific to the + specified type of foreign library. Currently we only support + ``standalone`` here. A standalone dynamic library is one that does not + have any dependencies on other (Haskell) shared libraries; without + the ``standalone`` option the generated library would have dependencies + on the Haskell runtime library (``libHSrts``), the base library + (``libHSbase``), etc. Currently, ``standalone`` *must* be used on Windows + and *must not* be used on any other platform. + +.. pkg-field:: mod-def-file: filename + + This option can only be used when creating dynamic Windows libraries + (that is, when using ``native-shared`` and the ``os`` is ``Windows``). If + used, it must be a path to a *module definition file*. The details of + module definition files are beyond the scope of this document; see the + `GHC `_ + manual for some details and some further pointers. + +.. pkg-field:: lib-version-info: current:revision:age + + This field is currently only used on Linux. + + This field specifies a Libtool-style version-info field that sets + an appropriate ABI version for the foreign library. Note that the + three numbers specified in this field do not directly specify the + actual ABI version: ``6:3:2`` results in library version ``4.2.3``. + + With this field set, the SONAME of the library is set, and symlinks + are installed. + + How you should bump this field on an ABI change depends on the + breakage you introduce: + + - Programs using the previous version may use the new version as + drop-in replacement, and programs using the new version can also + work with the previous one. In other words, no recompiling nor + relinking is needed. In this case, bump ``revision`` only, don't + touch current nor age. + - Programs using the previous version may use the new version as + drop-in replacement, but programs using the new version may use + APIs not present in the previous one. In other words, a program + linking against the new version may fail with "unresolved + symbols" if linking against the old version at runtime: set + revision to 0, bump current and age. + - Programs may need to be changed, recompiled, and relinked in + order to use the new version. Bump current, set revision and age + to 0. + + Also refer to the Libtool documentation on the version-info field. + +.. pkg-field:: lib-version-linux: version + + This field is only used on Linux. + + Specifies the library ABI version directly for foreign libraries + built on Linux: so specifying ``4.2.3`` causes a library + ``libfoo.so.4.2.3`` to be built with SONAME ``libfoo.so.4``, and + appropriate symlinks ``libfoo.so.4`` and ``libfoo.so`` to be + installed. + +Note that typically foreign libraries should export a way to initialize +and shutdown the Haskell runtime. In the example above, this is done by +the ``csrc/MyForeignLibWrapper.c`` file, which might look something like + +.. code-block:: c + + #include + #include "HsFFI.h" + + HsBool myForeignLibInit(void){ + int argc = 2; + char *argv[] = { "+RTS", "-A32m", NULL }; + char **pargv = argv; + + // Initialize Haskell runtime + hs_init(&argc, &pargv); + + // do any other initialization here and + // return false if there was a problem + return HS_BOOL_TRUE; + } + + void myForeignLibExit(void){ + hs_exit(); + } + +With modern ghc regular libraries are installed in directories that contain +package keys. This isn't usually a problem because the package gets registered +in ghc's package DB and so we can figure out what the location of the library +is. Foreign libraries however don't get registered, which means that we'd have +to have a way of finding out where a platform library got installed (other than by +searching the ``lib/`` directory). Instead, we install foreign libraries in +``~/.cabal/lib``, much like we install executables in ``~/.cabal/bin``. + +Build information +^^^^^^^^^^^^^^^^^ +.. pkg-section:: None + +The following fields may be optionally present in a library, executable, +test suite or benchmark section, and give information for the building +of the corresponding library or executable. See also the sections on +`system-dependent parameters`_ and `configurations`_ for a way to supply +system-dependent values for these fields. + +.. pkg-field:: build-depends: library list + + Declares the *library* dependencies required to build the current + package component; see :pkg-field:`build-tool-depends` for + declaring build-time *tool* dependencies. External library + dependencies should be annotated with a version constraint. + + **Library Names** + + External libraries are identified by the package's name they're + provided by (currently a package can only publically expose its + main library compeonent; in future, packages with multiple exposed + public library components will be supported and a syntax for + referring to public sub-libraries will be provided). + + In order to specify an intra-package dependency on an internal + library component you can use the unqualified name of the + component library component. Note that locally defined sub-library + names shadow external package names of the same name. See section on + :ref:`Internal Libraries ` for examples and more information. + + **Version Constraints** + + Version constraints use the operators ``==, >=, >, <, <=`` and a + version number. Multiple constraints can be combined using ``&&`` or + ``||``. If no version constraint is specified, any version is + assumed to be acceptable. For example: + + :: + + library + build-depends: + base >= 2, + foo >= 1.2.3 && < 1.3, + bar + + Dependencies like ``foo >= 1.2.3 && < 1.3`` turn out to be very + common because it is recommended practise for package versions to + correspond to API versions (see PVP_). + + Since Cabal 1.6, there is a special wildcard syntax to help with + such ranges + + :: + + build-depends: foo ==1.2.* + + It is only syntactic sugar. It is exactly equivalent to + ``foo >= 1.2 && < 1.3``. + + .. Warning:: + + A potential pitfall of the wildcard syntax is that the + constraint ``nats == 1.0.*`` doesn't match the release + ``nats-1`` because the version ``1`` is lexicographically less + than ``1.0``. This is not an issue with the caret-operator + ``^>=`` described below. + + Starting with Cabal 2.0, there's a new version operator to express + PVP_-style major upper bounds conveniently, and is inspired by similar + syntactic sugar found in other language ecosystems where it's often + called the "Caret" operator: + + :: + + build-depends: + foo ^>= 1.2.3.4, + bar ^>= 1 + + This allows to assert the positive knowledge that this package is + *known* to be semantically compatible with the releases + ``foo-1.2.3.4`` and ``bar-1`` respectively. The information + encoded via such ``^>=``-assertions is used by the cabal solver to + infer version constraints describing semantically compatible + version ranges according to the PVP_ contract (see below). + + Another way to say this is that ``foo < 1.3`` expresses *negative* + information, i.e. "``foo-1.3`` or ``foo-1.4.2`` will *not* be + compatible"; whereas ``foo ^>= 1.2.3.4`` asserts the *positive* + information that "``foo-1.2.3.4`` is *known* to be compatible" and (in + the absence of additional information) according to the PVP_ + contract we can (positively) infer right away that all versions + satisfying ``foo >= 1.2.3.4 && < 1.3`` will be compatible as well. + + .. Note:: + + More generally, the PVP_ contract implies that we can safely + relax the lower bound to ``>= 1.2``, because if we know that + ``foo-1.2.3.4`` is semantically compatible, then so is + ``foo-1.2`` (if it typechecks). But we'd need to perform + additional static analysis (i.e. perform typechecking) in order + to know if our package in the role of an API consumer will + successfully typecheck against the dependency ``foo-1.2``. But + since we cannot do this analysis during constraint solving and + to keep things simple, we pragmatically use ``foo >= 1.2.3.4`` + as the initially inferred approximation for the lower bound + resulting from the assertion ``foo ^>= 1.2.3.4``. If further + evidence becomes available that e.g. ``foo-1.2`` typechecks, + one can simply revise the dependency specification to include + the assertion ``foo ^>= 1.2``. + + The subtle but important difference in signaling allows tooling to + treat explicitly expressed ``<``-style constraints and inferred + (``^>=``-style) upper bounds differently. For instance, + :option:`--allow-newer`'s ``^``-modifier allows to relax only + ``^>=``-style bounds while leaving explicitly stated + ``<``-constraints unaffected. + + Ignoring the signaling intent, the default syntactic desugaring rules are + + - ``^>= x`` == ``>= x && < x.1`` + - ``^>= x.y`` == ``>= x.y && < x.(y+1)`` + - ``^>= x.y.z`` == ``>= x.y.z && < x.(y+1)`` + - ``^>= x.y.z.u`` == ``>= x.y.z.u && < x.(y+1)`` + - etc. + + .. Note:: + + One might expected the desugaring to truncate all version + components below (and including) the patch-level, i.e. + ``^>= x.y.z.u`` == ``>= x.y.z && < x.(y+1)``, + as the major and minor version components alone are supposed to + uniquely identify the API according to the PVP_. However, by + designing ``^>=`` to be closer to the ``>=`` operator, we avoid + the potentially confusing effect of ``^>=`` being more liberal + than ``>=`` in the presence of patch-level versions. + + Consequently, the example declaration above is equivalent to + + :: + + build-depends: + foo >= 1.2.3.4 && < 1.3, + bar >= 1 && < 1.1 + + .. Note:: + + Prior to Cabal 1.8, ``build-depends`` specified in each + section were global to all sections. This was unintentional, but + some packages were written to depend on it, so if you need your + :pkg-field:`build-depends` to be local to each section, you must specify + at least ``Cabal-Version: >= 1.8`` in your ``.cabal`` file. + + .. Note:: + + Cabal 1.20 experimentally supported module thinning and + renaming in ``build-depends``; however, this support has since been + removed and should not be used. + +.. pkg-field:: other-modules: identifier list + + A list of modules used by the component but not exposed to users. + For a library component, these would be hidden modules of the + library. For an executable, these would be auxiliary modules to be + linked with the file named in the ``main-is`` field. + + .. Note:: + + Every module in the package *must* be listed in one of + :pkg-field:`other-modules`, :pkg-field:`library:exposed-modules` or + :pkg-field:`executable:main-is` fields. + +.. pkg-field:: hs-source-dirs: directory list + + :default: ``.`` + + Root directories for the module hierarchy. + + For backwards compatibility, the old variant ``hs-source-dir`` is + also recognized. + +.. pkg-field:: default-extensions: identifier list + + A list of Haskell extensions used by every module. These determine + corresponding compiler options enabled for all files. Extension + names are the constructors of the + `Extension <../release/cabal-latest/doc/API/Cabal/Language-Haskell-Extension.html#t:Extension>`__ + type. For example, ``CPP`` specifies that Haskell source files are + to be preprocessed with a C preprocessor. + +.. pkg-field:: other-extensions: identifier list + + A list of Haskell extensions used by some (but not necessarily all) + modules. From GHC version 6.6 onward, these may be specified by + placing a ``LANGUAGE`` pragma in the source files affected e.g. + + .. code-block:: haskell + + {-# LANGUAGE CPP, MultiParamTypeClasses #-} + + In Cabal-1.24 the dependency solver will use this and + :pkg-field:`default-extensions` information. Cabal prior to 1.24 will abort + compilation if the current compiler doesn't provide the extensions. + + If you use some extensions conditionally, using CPP or conditional + module lists, it is good to replicate the condition in + :pkg-field:`other-extensions` declarations: + + :: + + other-extensions: CPP + if impl(ghc >= 7.5) + other-extensions: PolyKinds + + You could also omit the conditionally used extensions, as they are + for information only, but it is recommended to replicate them in + :pkg-field:`other-extensions` declarations. + +.. pkg-field:: extensions: identifier list + :deprecated: + + Deprecated in favor of :pkg-field:`default-extensions`. + +.. pkg-field:: build-tool-depends: package:executable list + :since: 2.0 + + A list of Haskell programs needed to build this component. + Each is specified by the package containing the executable and the name of the executable itself, separated by a colon, and optionally followed by a version bound. + It is fine for the package to be the current one, in which case this is termed an *internal*, rather than *external* executable dependency. + + External dependencies can (and should) contain a version bound like conventional :pkg-field:`build-depends` dependencies. + Internal deps should not contain a version bound, as they will be always resolved within the same configuration of the package in the build plan. + Specifically, version bounds that include the package's version will be warned for being extraneous, and version bounds that exclude the package's version will raise an error for being impossible to follow. + + Cabal can make sure that specified programs are built and on the ``PATH`` before building the component in question. + It will always do so for internal dependencies, and also do so for external dependencies when using Nix-style local builds. + + :pkg-field:`build-tool-depends` was added in Cabal 2.0, and it will + be ignored (with a warning) with old versions of Cabal. See + :pkg-field:`build-tools` for more information about backwards + compatibility. + +.. pkg-field:: build-tools: program list + :deprecated: + + Deprecated in favor of :pkg-field:`build-tool-depends`, but :ref:`see below for backwards compatibility information `. + + A list of Haskell programs needed to build this component. + Each may be followed by an optional version bound. + Confusingly, each program in the list either refer to one of three things: + + 1. Another executables in the same package (supported since Cabal 1.12) + + 2. Tool name contained in Cabal's :ref:`hard-coded set of common tools ` + + 3. A pre-built executable that should already be on the ``PATH`` + (supported since Cabal 2.0) + + These cases are listed in order of priority: + an executable in the package will override any of the hard-coded packages with the same name, + and a hard-coded package will override any executable on the ``PATH``. + + In the first two cases, the list entry is desugared into a :pkg-field:`build-tool-depends` entry. + In the first case, the entry is desugared into a :pkg-field:`build-tool-depends` entry by prefixing with ``$pkg:``. + In the second case, it is desugared by looking up the package and executable name in a hard-coded table. + In either case, the optional version bound is passed through unchanged. + Refer to the documentation for :pkg-field:`build-tool-depends` to understand the desugared field's meaning, along with restrictions on version bounds. + + .. _buildtoolsbc: + + **Backward Compatiblity** + + Although this field is deprecated in favor of :pkg-field:`build-tool-depends`, there are some situations where you may prefer to use :pkg-field:`build-tools` in cases (1) and (2), as it is supported by more versions of Cabal. + In case (3), :pkg-field:`build-tool-depends` is better for backwards-compatibility, as it will be ignored by old versions of Cabal; if you add the executable to :pkg-field:`build-tools`, a setup script built against old Cabal will choke. + If an old version of Cabal is used, an end-user will have to manually arrange for the requested executable to be in your ``PATH``. + + .. _buildtoolsmap: + + **Set of Known Tool Names** + + Identifiers specified in :pkg-field:`build-tools` are desugared into their respective equivalent :pkg-field:`build-tool-depends` form according to the table below. Consequently, a legacy specification such as:: + + build-tools: alex >= 3.2.1 && < 3.3, happy >= 1.19.5 && < 1.20 + + is simply desugared into the equivalent specification:: + + build-tool-depends: alex:alex >= 3.2.1 && < 3.3, happy:happy >= 1.19.5 && < 1.20 + + +--------------------------+-----------------------------------+-----------------+ + | :pkg-field:`build-tools` | desugared | Note | + | identifier | :pkg-field:`build-tool-depends` | | + | | identifier | | + +==========================+===================================+=================+ + | ``alex`` | ``alex:alex`` | | + +--------------------------+-----------------------------------+-----------------+ + | ``c2hs`` | ``c2hs:c2hs`` | | + +--------------------------+-----------------------------------+-----------------+ + | ``cpphs`` | ``cpphs:cpphs`` | | + +--------------------------+-----------------------------------+-----------------+ + | ``greencard`` | ``greencard:greencard`` | | + +--------------------------+-----------------------------------+-----------------+ + | ``haddock`` | ``haddock:haddock`` | | + +--------------------------+-----------------------------------+-----------------+ + | ``happy`` | ``happy:happy`` | | + +--------------------------+-----------------------------------+-----------------+ + | ``hsc2hs`` | ``hsc2hs:hsc2hs`` | | + +--------------------------+-----------------------------------+-----------------+ + | ``hscolour`` | ``hscolour:hscolour`` | | + +--------------------------+-----------------------------------+-----------------+ + | ``hspec-discover`` | ``hspec-discover:hspec-discover`` | since Cabal 2.0 | + +--------------------------+-----------------------------------+-----------------+ + + This built-in set can be programmatically extended via ``Custom`` setup scripts; this, however, is of limited use since the Cabal solver cannot access information injected by ``Custom`` setup scripts. + +.. pkg-field:: buildable: boolean + + :default: ``True`` + + Is the component buildable? Like some of the other fields below, + this field is more useful with the slightly more elaborate form of + the simple build infrastructure described in the section on + `system-dependent parameters`_. + +.. pkg-field:: ghc-options: token list + + Additional options for GHC. You can often achieve the same effect + using the :pkg-field:`extensions` field, which is preferred. + + Options required only by one module may be specified by placing an + ``OPTIONS_GHC`` pragma in the source file affected. + + As with many other fields, whitespace can be escaped by using + Haskell string syntax. Example: + ``ghc-options: -Wcompat "-with-rtsopts=-T -I1" -Wall``. + +.. pkg-field:: ghc-prof-options: token list + + Additional options for GHC when the package is built with profiling + enabled. + + Note that as of Cabal-1.24, the default profiling detail level + defaults to ``exported-functions`` for libraries and + ``toplevel-functions`` for executables. For GHC these correspond to + the flags ``-fprof-auto-exported`` and ``-fprof-auto-top``. Prior to + Cabal-1.24 the level defaulted to ``none``. These levels can be + adjusted by the person building the package with the + ``--profiling-detail`` and ``--library-profiling-detail`` flags. + + It is typically better for the person building the package to pick + the profiling detail level rather than for the package author. So + unless you have special needs it is probably better not to specify + any of the GHC ``-fprof-auto*`` flags here. However if you wish to + override the profiling detail level, you can do so using the + :pkg-field:`ghc-prof-options` field: use ``-fno-prof-auto`` or one of the + other ``-fprof-auto*`` flags. + +.. pkg-field:: ghc-shared-options: token list + + Additional options for GHC when the package is built as shared + library. The options specified via this field are combined with the + ones specified via :pkg-field:`ghc-options`, and are passed to GHC during + both the compile and link phases. + +.. pkg-field:: includes: filename list + + A list of header files to be included in any compilations via C. + This field applies to both header files that are already installed + on the system and to those coming with the package to be installed. + The former files should be found in absolute paths, while the latter + files should be found in paths relative to the top of the source + tree or relative to one of the directories listed in + :pkg-field:`include-dirs`. + + These files typically contain function prototypes for foreign + imports used by the package. This is in contrast to + :pkg-field:`install-includes`, which lists header files that are intended + to be exposed to other packages that transitively depend on this + library. + +.. pkg-field:: install-includes: filename list + + A list of header files from this package to be installed into + ``$libdir/includes`` when the package is installed. Files listed in + :pkg-field:`install-includes` should be found in relative to the top of the + source tree or relative to one of the directories listed in + :pkg-field:`include-dirs`. + + :pkg-field:`install-includes` is typically used to name header files that + contain prototypes for foreign imports used in Haskell code in this + package, for which the C implementations are also provided with the + package. For example, here is a ``.cabal`` file for a hypothetical + ``bindings-clib`` package that bundles the C source code for ``clib``:: + + include-dirs: cbits + c-sources: clib.c + install-includes: clib.h + + Now any package that depends (directly or transitively) on the + ``bindings-clib`` library can use ``clib.h``. + + Note that in order for files listed in :pkg-field:`install-includes` to be + usable when compiling the package itself, they need to be listed in + the :pkg-field:`includes` field as well. + +.. pkg-field:: include-dirs: directory list + + A list of directories to search for header files, when preprocessing + with ``c2hs``, ``hsc2hs``, ``cpphs`` or the C preprocessor, and also + when compiling via C. Directories can be absolute paths (e.g., for + system directories) or paths that are relative to the top of the + source tree. Cabal looks in these directories when attempting to + locate files listed in :pkg-field:`includes` and + :pkg-field:`install-includes`. + +.. pkg-field:: c-sources: filename list + + A list of C source files to be compiled and linked with the Haskell + files. + +.. pkg-field:: cxx-sources: filename list + :since: 2.2 + + A list of C++ source files to be compiled and linked with the Haskell + files. Useful for segregating C and C++ sources when supplying different + command-line arguments to the compiler via the :pkg-field:`cc-options` + and the :pkg-field:`cxx-options` fields. The files listed in the + :pkg-field:`cxx-sources` can reference files listed in the + :pkg-field:`c-sources` field and vice-versa. The object files will be linked + appropriately. + +.. pkg-field:: asm-sources: filename list + + A list of assembly source files to be compiled and linked with the + Haskell files. + +.. pkg-field:: cmm-sources: filename list + + A list of C-- source files to be compiled and linked with the Haskell + files. + +.. pkg-field:: js-sources: filename list + + A list of JavaScript source files to be linked with the Haskell + files (only for JavaScript targets). + +.. pkg-field:: extra-libraries: token list + + A list of extra libraries to link with. + +.. pkg-field:: extra-ghci-libraries: token list + + A list of extra libraries to be used instead of 'extra-libraries' + when the package is loaded with GHCi. + +.. pkg-field:: extra-bundled-libraries: token list + + A list of libraries that are supposed to be copied from the build + directory alongside the produced Haskell libraries. Note that you + are under the obligation to produce those lirbaries in the build + directory (e.g. via a custom setup). Libraries listed here will + be included when ``copy``-ing packages and be listed in the + ``hs-libraries`` of the package configuration. + +.. pkg-field:: extra-lib-dirs: directory list + + A list of directories to search for libraries. + +.. pkg-field:: cc-options: token list + + Command-line arguments to be passed to the C compiler. Since the + arguments are compiler-dependent, this field is more useful with the + setup described in the section on `system-dependent parameters`_. + +.. pkg-field:: cpp-options: token list + + Command-line arguments for pre-processing Haskell code. Applies to + Haskell source and other pre-processed Haskell source like .hsc + .chs. Does not apply to C code, that's what cc-options is for. + +.. pkg-field:: cxx-options: token list + :since: 2.2 + + Command-line arguments to be passed to the compiler when compiling + C++ code. The C++ sources to which these command-line arguments + should be applied can be specified with the :pkg-field:`cxx-sources` + field. Command-line options for C and C++ can be passed separately to + the compiler when compiling both C and C++ sources by segregating the C + and C++ sources with the :pkg-field:`c-sources` and + :pkg-field:`cxx-sources` fields respectively, and providing different + command-line arguments with the :pkg-field:`cc-options` and the + :pkg-field:`cxx-options` fields. + +.. pkg-field:: ld-options: token list + + Command-line arguments to be passed to the linker. Since the + arguments are compiler-dependent, this field is more useful with the + setup described in the section on `system-dependent parameters`_. + +.. pkg-field:: pkgconfig-depends: package list + + A list of + `pkg-config `__ + packages, needed to build this package. They can be annotated with + versions, e.g. ``gtk+-2.0 >= 2.10, cairo >= 1.0``. If no version + constraint is specified, any version is assumed to be acceptable. + Cabal uses ``pkg-config`` to find if the packages are available on + the system and to find the extra compilation and linker options + needed to use the packages. + + If you need to bind to a C library that supports ``pkg-config`` (use + ``pkg-config --list-all`` to find out if it is supported) then it is + much preferable to use this field rather than hard code options into + the other fields. + +.. pkg-field:: frameworks: token list + + On Darwin/MacOS X, a list of frameworks to link to. See Apple's + developer documentation for more details on frameworks. This entry + is ignored on all other platforms. + +.. pkg-field:: extra-frameworks-dirs: directory list + + On Darwin/MacOS X, a list of directories to search for frameworks. + This entry is ignored on all other platforms. + +.. pkg-field:: mixins: mixin list + :since: 2.0 + + Supported only in GHC 8.2 and later. A list of packages mentioned in the + :pkg-field:`build-depends` field, each optionally accompanied by a list of + module and module signature renamings. + + The simplest mixin syntax is simply the name of a package mentioned in the + :pkg-field:`build-depends` field. For example: + + :: + + library + build-depends: + foo >= 1.2.3 && < 1.3 + mixins: + foo + + But this doesn't have any effect. More interesting is to use the mixin + entry to rename one or more modules from the package, like this: + + :: + + library + mixins: + foo (Foo.Bar as AnotherFoo.Bar, Foo.Baz as AnotherFoo.Baz) + + Note that renaming a module like this will hide all the modules + that are not explicitly named. + + Modules can also be hidden: + + :: + + library: + mixins: + foo hiding (Foo.Bar) + + Hiding modules exposes everything that is not explicitly hidden. + + .. Note:: + + The current version of Cabal suffers from an infelicity in how the + entries of :pkg-field:`mixins` are parsed: an entry will fail to parse + if the provided renaming clause has whitespace after the opening + parenthesis. This will be fixed in future versions of Cabal. + + See issues `#5150 `__, + `#4864 `__, and + `#5293 `__. + + There can be multiple mixin entries for a given package, in effect creating + multiple copies of the dependency: + + :: + + library + mixins: + foo (Foo.Bar as AnotherFoo.Bar, Foo.Baz as AnotherFoo.Baz), + foo (Foo.Bar as YetAnotherFoo.Bar) + + The ``requires`` clause is used to rename the module signatures required by + a package: + + :: + + library + mixins: + foo (Foo.Bar as AnotherFoo.Bar) requires (Foo.SomeSig as AnotherFoo.SomeSig) + + Signature-only packages don't have any modules, so only the signatures can + be renamed, with the following syntax: + + :: + + library + mixins: + sigonly requires (SigOnly.SomeSig as AnotherSigOnly.SomeSig) + + See the :pkg-field:`signatures` field for more details. + + Mixin packages are part of the `Backpack + `__ extension to the + Haskell module system. + + The matching of the module signatures required by a + :pkg-field:`build-depends` dependency with the implementation modules + present in another dependency is triggered by a coincidence of names. When + the names of the signature and of the implementation are already the same, + the matching is automatic. But when the names don't coincide, or we want to + instantiate a signature in two different ways, adding mixin entries that + perform renamings becomes necessary. + + .. Warning:: + + Backpack has the limitation that implementation modules that instantiate + signatures required by a :pkg-field:`build-depends` dependency can't + reside in the same component that has the dependency. They must reside + in a different package dependency, or at least in a separate internal + library. + +Configurations +^^^^^^^^^^^^^^ + +Library and executable sections may include conditional blocks, which +test for various system parameters and configuration flags. The flags +mechanism is rather generic, but most of the time a flag represents +certain feature, that can be switched on or off by the package user. +Here is an example package description file using configurations: + +Example: A package containing a library and executable programs +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +:: + + Name: Test1 + Version: 0.0.1 + Cabal-Version: >= 1.8 + License: BSD3 + Author: Jane Doe + Synopsis: Test package to test configurations + Category: Example + Build-Type: Simple + + Flag Debug + Description: Enable debug support + Default: False + Manual: True + + Flag WebFrontend + Description: Include API for web frontend. + Default: False + Manual: True + + Flag NewDirectory + description: Whether to build against @directory >= 1.2@ + -- This is an automatic flag which the solver will be + -- assign automatically while searching for a solution + + Library + Build-Depends: base >= 4.2 && < 4.9 + Exposed-Modules: Testing.Test1 + Extensions: CPP + + GHC-Options: -Wall + if flag(Debug) + CPP-Options: -DDEBUG + if !os(windows) + CC-Options: "-DDEBUG" + else + CC-Options: "-DNDEBUG" + + if flag(WebFrontend) + Build-Depends: cgi >= 0.42 && < 0.44 + Other-Modules: Testing.WebStuff + CPP-Options: -DWEBFRONTEND + + if flag(NewDirectory) + build-depends: directory >= 1.2 && < 1.4 + Build-Depends: time >= 1.0 && < 1.9 + else + build-depends: directory == 1.1.* + Build-Depends: old-time >= 1.0 && < 1.2 + + Executable test1 + Main-is: T1.hs + Other-Modules: Testing.Test1 + Build-Depends: base >= 4.2 && < 4.9 + + if flag(debug) + CC-Options: "-DDEBUG" + CPP-Options: -DDEBUG + +Layout +"""""" + +Flags, conditionals, library and executable sections use layout to +indicate structure. This is very similar to the Haskell layout rule. +Entries in a section have to all be indented to the same level which +must be more than the section header. Tabs are not allowed to be used +for indentation. + +As an alternative to using layout you can also use explicit braces +``{}``. In this case the indentation of entries in a section does not +matter, though different fields within a block must be on different +lines. Here is a bit of the above example again, using braces: + +Example: Using explicit braces rather than indentation for layout +""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" + +:: + + Name: Test1 + Version: 0.0.1 + Cabal-Version: >= 1.8 + License: BSD3 + Author: Jane Doe + Synopsis: Test package to test configurations + Category: Example + Build-Type: Simple + + Flag Debug { + Description: Enable debug support + Default: False + Manual: True + } + + Library { + Build-Depends: base >= 4.2 && < 4.9 + Exposed-Modules: Testing.Test1 + Extensions: CPP + if flag(debug) { + CPP-Options: -DDEBUG + if !os(windows) { + CC-Options: "-DDEBUG" + } else { + CC-Options: "-DNDEBUG" + } + } + } + +Configuration Flags +""""""""""""""""""" + +.. pkg-section:: flag name + :synopsis: Flag declaration. + + Flag section declares a flag which can be used in `conditional blocks`_. + + Flag names are case-insensitive and must match ``[[:alnum:]_][[:alnum:]_-]*`` + regular expression, or expressed as ABNF_: + + .. code-block:: abnf + + flag-name = (UALNUM / "_") *(UALNUM / "_" / "-") + + UALNUM = UALPHA / DIGIT + UALPHA = ... ; set of alphabetic Unicode code-points + + .. note:: + + Hackage accepts ASCII-only flags, ``[a-zA-Z0-9_][a-zA-Z0-9_-]*`` regexp. + +.. pkg-field:: description: freeform + + The description of this flag. + +.. pkg-field:: default: boolean + + :default: ``True`` + + The default value of this flag. + + .. note:: + + This value may be `overridden in several + ways `__. The + rationale for having flags default to True is that users usually + want new features as soon as they are available. Flags representing + features that are not (yet) recommended for most users (such as + experimental features or debugging support) should therefore + explicitly override the default to False. + +.. pkg-field:: manual: boolean + + :default: ``False`` + :since: 1.6 + + By default, Cabal will first try to satisfy dependencies with the + default flag value and then, if that is not possible, with the + negated value. However, if the flag is manual, then the default + value (which can be overridden by commandline flags) will be used. + +Conditional Blocks +^^^^^^^^^^^^^^^^^^ + +Conditional blocks may appear anywhere inside a library or executable +section. They have to follow rather strict formatting rules. Conditional +blocks must always be of the shape + +:: + + if condition + property-descriptions-or-conditionals + +or + +:: + + if condition + property-descriptions-or-conditionals + else + property-descriptions-or-conditionals + +Note that the ``if`` and the condition have to be all on the same line. + +Since Cabal 2.2 conditional blocks support ``elif`` construct. + +:: + + if condition1 + property-descriptions-or-conditionals + elif condition2 + property-descriptions-or-conditionals + else + property-descriptions-or-conditionals + +Conditions +"""""""""" + +Conditions can be formed using boolean tests and the boolean operators +``||`` (disjunction / logical "or"), ``&&`` (conjunction / logical +"and"), or ``!`` (negation / logical "not"). The unary ``!`` takes +highest precedence, ``||`` takes lowest. Precedence levels may be +overridden through the use of parentheses. For example, +``os(darwin) && !arch(i386) || os(freebsd)`` is equivalent to +``(os(darwin) && !(arch(i386))) || os(freebsd)``. + +The following tests are currently supported. + +:samp:`os({name})` + Tests if the current operating system is *name*. The argument is + tested against ``System.Info.os`` on the target system. There is + unfortunately some disagreement between Haskell implementations + about the standard values of ``System.Info.os``. Cabal canonicalises + it so that in particular ``os(windows)`` works on all + implementations. If the canonicalised os names match, this test + evaluates to true, otherwise false. The match is case-insensitive. +:samp:`arch({name})` + Tests if the current architecture is *name*. The argument is matched + against ``System.Info.arch`` on the target system. If the arch names + match, this test evaluates to true, otherwise false. The match is + case-insensitive. +:samp:`impl({compiler})` + Tests for the configured Haskell implementation. An optional version + constraint may be specified (for example ``impl(ghc >= 6.6.1)``). If + the configured implementation is of the right type and matches the + version constraint, then this evaluates to true, otherwise false. + The match is case-insensitive. + + Note that including a version constraint in an ``impl`` test causes + it to check for two properties: + + - The current compiler has the specified name, and + + - The compiler's version satisfied the specified version constraint + + As a result, ``!impl(ghc >= x.y.z)`` is not entirely equivalent to + ``impl(ghc < x.y.z)``. The test ``!impl(ghc >= x.y.z)`` checks that: + + - The current compiler is not GHC, or + + - The version of GHC is earlier than version x.y.z. + +:samp:`flag({name})` + Evaluates to the current assignment of the flag of the given name. + Flag names are case insensitive. Testing for flags that have not + been introduced with a flag section is an error. +``true`` + Constant value true. +``false`` + Constant value false. + +Resolution of Conditions and Flags +"""""""""""""""""""""""""""""""""" + +If a package descriptions specifies configuration flags the package user +can `control these in several +ways `__. If the +user does not fix the value of a flag, Cabal will try to find a flag +assignment in the following way. + +- For each flag specified, it will assign its default value, evaluate + all conditions with this flag assignment, and check if all + dependencies can be satisfied. If this check succeeded, the package + will be configured with those flag assignments. + +- If dependencies were missing, the last flag (as by the order in which + the flags were introduced in the package description) is tried with + its alternative value and so on. This continues until either an + assignment is found where all dependencies can be satisfied, or all + possible flag assignments have been tried. + +To put it another way, Cabal does a complete backtracking search to find +a satisfiable package configuration. It is only the dependencies +specified in the :pkg-field:`build-depends` field in conditional blocks that +determine if a particular flag assignment is satisfiable +(:pkg-field:`build-tools` are not considered). The order of the declaration and +the default value of the flags determines the search order. Flags +overridden on the command line fix the assignment of that flag, so no +backtracking will be tried for that flag. + +If no suitable flag assignment could be found, the configuration phase +will fail and a list of missing dependencies will be printed. Note that +this resolution process is exponential in the worst case (i.e., in the +case where dependencies cannot be satisfied). There are some +optimizations applied internally, but the overall complexity remains +unchanged. + +Meaning of field values when using conditionals +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +During the configuration phase, a flag assignment is chosen, all +conditionals are evaluated, and the package description is combined into +a flat package descriptions. If the same field both inside a conditional +and outside then they are combined using the following rules. + +- Boolean fields are combined using conjunction (logical "and"). + +- List fields are combined by appending the inner items to the outer + items, for example + + :: + + other-extensions: CPP + if impl(ghc) + other-extensions: MultiParamTypeClasses + + when compiled using GHC will be combined to + + :: + + other-extensions: CPP, MultiParamTypeClasses + + Similarly, if two conditional sections appear at the same nesting + level, properties specified in the latter will come after properties + specified in the former. + +- All other fields must not be specified in ambiguous ways. For example + + :: + + Main-is: Main.hs + if flag(useothermain) + Main-is: OtherMain.hs + + will lead to an error. Instead use + + :: + + if flag(useothermain) + Main-is: OtherMain.hs + else + Main-is: Main.hs + +Common stanzas +^^^^^^^^^^^^^^ + +.. pkg-section:: common name + :since: 2.2 + :synopsis: Common build info section + +Starting with Cabal-2.2 it's possible to use common build info stanzas. + +:: + + common deps + build-depends: base ^>= 4.11 + ghc-options: -Wall + + common test-deps + build-depends: tasty + + library + import: deps + exposed-modules: Foo + + test-suite tests + import: deps, test-deps + type: exitcode-stdio-1.0 + main-is: Tests.hs + build-depends: foo + +- You can use `build information`_ fields in common stanzas. + +- Common stanzas must be defined before use. + +- Common stanzas can import other common stanzas. + +- You can import multiple stanzas at once. Stanza names must be separated by commas. + +.. Note:: + + The name `import` was chosen, because there is ``includes`` field. + +Source Repositories +^^^^^^^^^^^^^^^^^^^ + +.. pkg-section:: source-repository + :since: 1.6 + +It is often useful to be able to specify a source revision control +repository for a package. Cabal lets you specifying this information in +a relatively structured form which enables other tools to interpret and +make effective use of the information. For example the information +should be sufficient for an automatic tool to checkout the sources. + +Cabal supports specifying different information for various common +source control systems. Obviously not all automated tools will support +all source control systems. + +Cabal supports specifying repositories for different use cases. By +declaring which case we mean automated tools can be more useful. There +are currently two kinds defined: + +- The ``head`` kind refers to the latest development branch of the + package. This may be used for example to track activity of a project + or as an indication to outside developers what sources to get for + making new contributions. + +- The ``this`` kind refers to the branch and tag of a repository that + contains the sources for this version or release of a package. For + most source control systems this involves specifying a tag, id or + hash of some form and perhaps a branch. The purpose is to be able to + reconstruct the sources corresponding to a particular package + version. This might be used to indicate what sources to get if + someone needs to fix a bug in an older branch that is no longer an + active head branch. + +You can specify one kind or the other or both. As an example here are +the repositories for the Cabal library. Note that the ``this`` kind of +repository specifies a tag. + +:: + + source-repository head + type: darcs + location: http://darcs.haskell.org/cabal/ + + source-repository this + type: darcs + location: http://darcs.haskell.org/cabal-branches/cabal-1.6/ + tag: 1.6.1 + +The exact fields are as follows: + +.. pkg-field:: type: token + + The name of the source control system used for this repository. The + currently recognised types are: + + - ``darcs`` + - ``git`` + - ``svn`` + - ``cvs`` + - ``mercurial`` (or alias ``hg``) + - ``bazaar`` (or alias ``bzr``) + - ``arch`` + - ``monotone`` + + This field is required. + +.. pkg-field:: location: URL + + The location of the repository. The exact form of this field depends + on the repository type. For example: + + - for darcs: ``http://code.haskell.org/foo/`` + - for git: ``git://github.com/foo/bar.git`` + - for CVS: ``anoncvs@cvs.foo.org:/cvs`` + + This field is required. + +.. pkg-field:: module: token + + CVS requires a named module, as each CVS server can host multiple + named repositories. + + This field is required for the CVS repository type and should not be + used otherwise. + +.. pkg-field:: branch: token + + Many source control systems support the notion of a branch, as a + distinct concept from having repositories in separate locations. For + example CVS, SVN and git use branches while for darcs uses different + locations for different branches. If you need to specify a branch to + identify a your repository then specify it in this field. + + This field is optional. + +.. pkg-field:: tag: token + + A tag identifies a particular state of a source repository. The tag + can be used with a ``this`` repository kind to identify the state of + a repository corresponding to a particular package version or + release. The exact form of the tag depends on the repository type. + + This field is required for the ``this`` repository kind. + +.. pkg-field:: subdir: directory + + Some projects put the sources for multiple packages under a single + source repository. This field lets you specify the relative path + from the root of the repository to the top directory for the + package, i.e. the directory containing the package's ``.cabal`` + file. + + This field is optional. It default to empty which corresponds to the + root directory of the repository. + +Downloading a package's source +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The ``cabal get`` command allows to access a package's source code - +either by unpacking a tarball downloaded from Hackage (the default) or +by checking out a working copy from the package's source repository. + +:: + + $ cabal get [FLAGS] PACKAGES + +The ``get`` command supports the following options: + +``-d --destdir`` *PATH* + Where to place the package source, defaults to (a subdirectory of) + the current directory. +``-s --source-repository`` *[head\|this\|...]* + Fork the package's source repository using the appropriate version + control system. The optional argument allows to choose a specific + repository kind. +``--index-state`` *[HEAD\|@\|]* + Use source package index state as it existed at a previous time. Accepts + unix-timestamps (e.g. ``@1474732068``), ISO8601 UTC timestamps (e.g. + ``2016-09-24T17:47:48Z``), or ``HEAD`` (default). + This determines which package versions are available as well as which + ``.cabal`` file revision is selected (unless ``--pristine`` is used). +``--pristine`` + Unpack the original pristine tarball, rather than updating the + ``.cabal`` file with the latest revision from the package archive. + +Custom setup scripts +-------------------- + +Since Cabal 1.24, custom ``Setup.hs`` are required to accurately track +their dependencies by declaring them in the ``.cabal`` file rather than +rely on dependencies being implicitly in scope. Please refer +`this article `__ +for more details. + +Declaring a ``custom-setup`` stanza also enables the generation of +``MIN_VERSION_package_(A,B,C)`` CPP macros for the Setup component. + +.. pkg-section:: custom-setup + :synopsis: Custom Setup.hs build information. + :since: 1.24 + + The optional :pkg-section:`custom-setup` stanza contains information needed + for the compilation of custom ``Setup.hs`` scripts, + +:: + + custom-setup + setup-depends: + base >= 4.5 && < 4.11, + Cabal >= 1.14 && < 1.25 + +.. pkg-field:: setup-depends: package list + :since: 1.24 + + The dependencies needed to compile ``Setup.hs``. See the + :pkg-field:`build-depends` field for a description of the syntax expected by + this field. + +Backward compatibility and ``custom-setup`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Versions prior to Cabal 1.24 don't recognise ``custom-setup`` stanzas, +and will behave agnostic to them (except for warning about an unknown +section). Consequently, versions prior to Cabal 1.24 can't ensure the +declared dependencies ``setup-depends`` are in scope, and instead +whatever is registered in the current package database environment +will become eligible (and resolved by the compiler) for the +``Setup.hs`` module. + +The availability of the +``MIN_VERSION_package_(A,B,C)`` CPP macros +inside ``Setup.hs`` scripts depends on the condition that either + +- a ``custom-setup`` section has been declared (or ``cabal new-build`` is being + used which injects an implicit hard-coded ``custom-setup`` stanza if it's missing), or +- GHC 8.0 or later is used (which natively injects package version CPP macros) + +Consequently, if you need to write backward compatible ``Setup.hs`` +scripts using CPP, you should declare a ``custom-setup`` stanza and +use the pattern below: + +.. code-block:: haskell + + {-# LANGUAGE CPP #-} + import Distribution.Simple + + #if defined(MIN_VERSION_Cabal) + -- version macros are available and can be used as usual + # if MIN_VERSION_Cabal(a,b,c) + -- code specific to lib:Cabal >= a.b.c + # else + -- code specific to lib:Cabal < a.b.c + # endif + #else + # warning Enabling heuristic fall-back. Please upgrade cabal-install to 1.24 or later if Setup.hs fails to compile. + + -- package version macros not available; except for exotic environments, + -- you can heuristically assume that lib:Cabal's version is correlated + -- with __GLASGOW_HASKELL__, and specifically since we can assume that + -- GHC < 8.0, we can assume that lib:Cabal is version 1.22 or older. + #endif + + main = ... + +The simplified (heuristic) CPP pattern shown below is useful if all you need +is to distinguish ``Cabal < 2.0`` from ``Cabal >= 2.0``. + +.. code-block:: haskell + + {-# LANGUAGE CPP #-} + import Distribution.Simple + + #if !defined(MIN_VERSION_Cabal) + # define MIN_VERSION_Cabal(a,b,c) 0 + #endif + + #if MIN_VERSION_Cabal(2,0,0) + -- code for lib:Cabal >= 2.0 + #else + -- code for lib:Cabal < 2.0 + #endif + + main = ... + + + +Autogenerated modules +--------------------- + +Modules that are built automatically at setup, created with a custom +setup script, must appear on :pkg-field:`other-modules` for the library, +executable, test-suite or benchmark stanzas or also on +:pkg-field:`library:exposed-modules` for libraries to be used, but are not +really on the package when distributed. This makes commands like sdist fail +because the file is not found. + +These special modules must appear again on the :pkg-field:`autogen-modules` +field of the stanza that is using it, besides :pkg-field:`other-modules` or +:pkg-field:`library:exposed-modules`. With this there is no need to create +complex build hooks for this poweruser case. + +.. pkg-field:: autogen-modules: module list + :since: 2.0 + + .. TODO: document autogen-modules field + +Right now :pkg-field:`executable:main-is` modules are not supported on +:pkg-field:`autogen-modules`. + +:: + + Library + default-language: Haskell2010 + build-depends: base + exposed-modules: + MyLibrary + MyLibHelperModule + other-modules: + MyLibModule + autogen-modules: + MyLibHelperModule + + Executable Exe + default-language: Haskell2010 + main-is: Dummy.hs + build-depends: base + other-modules: + MyExeModule + MyExeHelperModule + autogen-modules: + MyExeHelperModule + +Accessing data files from package code +-------------------------------------- + +The placement on the target system of files listed in +the :pkg-field:`data-files` field varies between systems, and in some cases +one can even move packages around after installation (see `prefix +independence `__). To +enable packages to find these files in a portable way, Cabal generates a +module called :file:`Paths_{pkgname}` (with any hyphens in *pkgname* +replaced by underscores) during building, so that it may be imported by +modules of the package. This module defines a function + +.. code-block:: haskell + + getDataFileName :: FilePath -> IO FilePath + +If the argument is a filename listed in the :pkg-field:`data-files` field, the +result is the name of the corresponding file on the system on which the +program is running. + +.. Note:: + + If you decide to import the :file:`Paths_{pkgname}` module then it + *must* be listed in the :pkg-field:`other-modules` field just like any other + module in your package and on :pkg-field:`autogen-modules` as the file is + autogenerated. + +The :file:`Paths_{pkgname}` module is not platform independent, as any +other autogenerated module, so it does not get included in the source +tarballs generated by ``sdist``. + +The :file:`Paths_{pkgname}` module also includes some other useful +functions and values, which record the version of the package and some +other directories which the package has been configured to be installed +into (e.g. data files live in ``getDataDir``): + +.. code-block:: haskell + + version :: Version + + getBinDir :: IO FilePath + getLibDir :: IO FilePath + getDynLibDir :: IO FilePath + getDataDir :: IO FilePath + getLibexecDir :: IO FilePath + getSysconfDir :: IO FilePath + +The actual location of all these directories can be individually +overridden at runtime using environment variables of the form +``pkg_name_var``, where ``pkg_name`` is the name of the package with all +hyphens converted into underscores, and ``var`` is either ``bindir``, +``libdir``, ``dynlibdir``, ``datadir``, ``libexedir`` or ``sysconfdir``. For example, +the configured data directory for ``pretty-show`` is controlled with the +``pretty_show_datadir`` environment variable. + +Accessing the package version +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The aforementioned auto generated :file:`Paths_{pkgname}` module also +exports the constant ``version ::`` +`Version `__ +which is defined as the version of your package as specified in the +``version`` field. + +System-dependent parameters +--------------------------- + +For some packages, especially those interfacing with C libraries, +implementation details and the build procedure depend on the build +environment. The ``build-type`` ``Configure`` can be used to handle many +such situations. In this case, ``Setup.hs`` should be: + +.. code-block:: haskell + + import Distribution.Simple + main = defaultMainWithHooks autoconfUserHooks + +Most packages, however, would probably do better using the ``Simple`` +build type and `configurations`_. + +The :pkg-field:`build-type` ``Configure`` differs from ``Simple`` in two ways: + +- The package root directory must contain a shell script called + ``configure``. The configure step will run the script. This + ``configure`` script may be produced by + `autoconf `__ or may be + hand-written. The ``configure`` script typically discovers + information about the system and records it for later steps, e.g. by + generating system-dependent header files for inclusion in C source + files and preprocessed Haskell source files. (Clearly this won't work + for Windows without MSYS or Cygwin: other ideas are needed.) + +- If the package root directory contains a file called + *package*\ ``.buildinfo`` after the configuration step, subsequent + steps will read it to obtain additional settings for `build + information`_ fields,to be merged with the ones + given in the ``.cabal`` file. In particular, this file may be + generated by the ``configure`` script mentioned above, allowing these + settings to vary depending on the build environment. + +The build information file should have the following structure: + + *buildinfo* + + ``executable:`` *name* *buildinfo* + + ``executable:`` *name* *buildinfo* ... + +where each *buildinfo* consists of settings of fields listed in the +section on `build information`_. The first one (if +present) relates to the library, while each of the others relate to the +named executable. (The names must match the package description, but you +don't have to have entries for all of them.) + +Neither of these files is required. If they are absent, this setup +script is equivalent to ``defaultMain``. + +Example: Using autoconf +^^^^^^^^^^^^^^^^^^^^^^^ + +This example is for people familiar with the +`autoconf `__ tools. + +In the X11 package, the file ``configure.ac`` contains: + +.. code-block:: shell + + AC_INIT([Haskell X11 package], [1.1], [libraries@haskell.org], [X11]) + + # Safety check: Ensure that we are in the correct source directory. + AC_CONFIG_SRCDIR([X11.cabal]) + + # Header file to place defines in + AC_CONFIG_HEADERS([include/HsX11Config.h]) + + # Check for X11 include paths and libraries + AC_PATH_XTRA + AC_TRY_CPP([#include ],,[no_x=yes]) + + # Build the package if we found X11 stuff + if test "$no_x" = yes + then BUILD_PACKAGE_BOOL=False + else BUILD_PACKAGE_BOOL=True + fi + AC_SUBST([BUILD_PACKAGE_BOOL]) + + AC_CONFIG_FILES([X11.buildinfo]) + AC_OUTPUT + +Then the setup script will run the ``configure`` script, which checks +for the presence of the X11 libraries and substitutes for variables in +the file ``X11.buildinfo.in``: + +:: + + buildable: @BUILD_PACKAGE_BOOL@ + cc-options: @X_CFLAGS@ + ld-options: @X_LIBS@ + +This generates a file ``X11.buildinfo`` supplying the parameters needed +by later stages: + +:: + + buildable: True + cc-options: -I/usr/X11R6/include + ld-options: -L/usr/X11R6/lib + +The ``configure`` script also generates a header file +``include/HsX11Config.h`` containing C preprocessor defines recording +the results of various tests. This file may be included by C source +files and preprocessed Haskell source files in the package. + +.. Note:: + + Packages using these features will also need to list additional + files such as ``configure``, templates for ``.buildinfo`` files, files + named only in ``.buildinfo`` files, header files and so on in the + :pkg-field:`extra-source-files` field to ensure that they are included in + source distributions. They should also list files and directories generated + by ``configure`` in the :pkg-field:`extra-tmp-files` field to ensure that + they are removed by ``setup clean``. + +Quite often the files generated by ``configure`` need to be listed +somewhere in the package description (for example, in the +:pkg-field:`install-includes` field). However, we usually don't want generated +files to be included in the source tarball. The solution is again +provided by the ``.buildinfo`` file. In the above example, the following +line should be added to ``X11.buildinfo``: + +:: + + install-includes: HsX11Config.h + +In this way, the generated ``HsX11Config.h`` file won't be included in +the source tarball in addition to ``HsX11Config.h.in``, but it will be +copied to the right location during the install process. Packages that +use custom ``Setup.hs`` scripts can update the necessary fields +programmatically instead of using the ``.buildinfo`` file. + +Conditional compilation +----------------------- + +Sometimes you want to write code that works with more than one version +of a dependency. You can specify a range of versions for the dependency +in the :pkg-field:`build-depends`, but how do you then write the code that can +use different versions of the API? + +Haskell lets you preprocess your code using the C preprocessor (either +the real C preprocessor, or ``cpphs``). To enable this, add +``extensions: CPP`` to your package description. When using CPP, Cabal +provides some pre-defined macros to let you test the version of +dependent packages; for example, suppose your package works with either +version 3 or version 4 of the ``base`` package, you could select the +available version in your Haskell modules like this: + +.. code-block:: cpp + + #if MIN_VERSION_base(4,0,0) + ... code that works with base-4 ... + #else + ... code that works with base-3 ... + #endif + +In general, Cabal supplies a macro +``MIN_VERSION_``\ *``package``*\ ``_(A,B,C)`` for each package depended +on via :pkg-field:`build-depends`. This macro is true if the actual version of +the package in use is greater than or equal to ``A.B.C`` (using the +conventional ordering on version numbers, which is lexicographic on the +sequence, but numeric on each component, so for example 1.2.0 is greater +than 1.0.3). + +Since version 1.20, the ``MIN_TOOL_VERSION_``\ *``tool``* +family of macros lets you condition on the version of build tools used to +build the program (e.g. ``hsc2hs``). + +Since version 1.24, the macro ``CURRENT_COMPONENT_ID``, which +expands to the string of the component identifier that uniquely +identifies this component. Furthermore, if the package is a library, +the macro ``CURRENT_PACKAGE_KEY`` records the identifier that was passed +to GHC for use in symbols and for type equality. + +Since version 2.0, the macro ``CURRENT_PACKAGE_VERSION`` expands +to the string version number of the current package. + +Cabal places the definitions of these macros into an +automatically-generated header file, which is included when +preprocessing Haskell source code by passing options to the C +preprocessor. + +Cabal also allows to detect when the source code is being used for +generating documentation. The ``__HADDOCK_VERSION__`` macro is defined +only when compiling via Haddock_ +instead of a normal Haskell compiler. The value of the +``__HADDOCK_VERSION__`` macro is defined as ``A*1000 + B*10 + C``, where +``A.B.C`` is the Haddock version. This can be useful for working around +bugs in Haddock or generating prettier documentation in some special +cases. + +More complex packages +--------------------- + +For packages that don't fit the simple schemes described above, you have +a few options: + +- By using the :pkg-field:`build-type` ``Custom``, you can supply your own + ``Setup.hs`` file, and customize the simple build infrastructure + using *hooks*. These allow you to perform additional actions before + and after each command is run, and also to specify additional + preprocessors. A typical ``Setup.hs`` may look like this: + + .. code-block:: haskell + + import Distribution.Simple + main = defaultMainWithHooks simpleUserHooks { postHaddock = posthaddock } + + posthaddock args flags desc info = .... + + See ``UserHooks`` in + `Distribution.Simple <../release/cabal-latest/doc/API/Cabal/Distribution-Simple.html>`__ + for the details, but note that this interface is experimental, and + likely to change in future releases. + + If you use a custom ``Setup.hs`` file you should strongly consider + adding a :pkg-section:`custom-setup` stanza with a + :pkg-field:`custom-setup:setup-depends` field to ensure that your setup + script does not break with future dependency versions. + +- You could delegate all the work to ``make``, though this is unlikely + to be very portable. Cabal supports this with the :pkg-field:`build-type` + ``Make`` and a trivial setup library + `Distribution.Make <../release/cabal-latest/doc/API/Cabal/Distribution-Make.html>`__, + which simply parses the command line arguments and invokes ``make``. + Here ``Setup.hs`` should look like this: + + .. code-block:: haskell + + import Distribution.Make + main = defaultMain + + The root directory of the package should contain a ``configure`` + script, and, after that has run, a ``Makefile`` with a default target + that builds the package, plus targets ``install``, ``register``, + ``unregister``, ``clean``, ``dist`` and ``docs``. Some options to + commands are passed through as follows: + + - The ``--with-hc-pkg``, ``--prefix``, ``--bindir``, ``--libdir``, + ``--dynlibdir``, ``--datadir``, ``--libexecdir`` and ``--sysconfdir`` options to + the ``configure`` command are passed on to the ``configure`` + script. In addition the value of the ``--with-compiler`` option is + passed in a ``--with-hc`` option and all options specified with + ``--configure-option=`` are passed on. + + - The ``--destdir`` option to the ``copy`` command becomes a setting + of a ``destdir`` variable on the invocation of ``make copy``. The + supplied ``Makefile`` should provide a ``copy`` target, which will + probably look like this: + + .. code-block:: make + + copy : + $(MAKE) install prefix=$(destdir)/$(prefix) \ + bindir=$(destdir)/$(bindir) \ + libdir=$(destdir)/$(libdir) \ + dynlibdir=$(destdir)/$(dynlibdir) \ + datadir=$(destdir)/$(datadir) \ + libexecdir=$(destdir)/$(libexecdir) \ + sysconfdir=$(destdir)/$(sysconfdir) \ + +- Finally, with the :pkg-field:`build-type` ``Custom``, you can also write your + own setup script from scratch. It must conform to the interface + described in the section on `building and installing + packages `__, and you may use the Cabal + library for all or part of the work. One option is to copy the source + of ``Distribution.Simple``, and alter it for your needs. Good luck. + + +.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/file-format-changelog.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/file-format-changelog.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/file-format-changelog.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/file-format-changelog.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,144 @@ +Cabal file format changelog +=========================== + +Changes in 2.4 +-------------- + +* Wildcard matching has been expanded. All previous wildcard + expressions are still valid; some will match strictly more files + than before. Specifically: + + * Double-star (``**``) wildcards are now accepted for recursive + matching immediately before the final slash; they must be followed + by a filename wildcard (e.g., ``foo/**/*.html`` is valid; + ``foo/**/bar/*.html`` and ``foo/**/**/*.html``, + ``foo/**/bar.html`` are all invalid). As ``**`` was an error in + globs before, this does not affect any existing ``.cabal`` files + that previously worked. + + * Wildcards now match when the pattern's extensions form a suffix of + the candidate file's extension, rather than requiring strict + equality (e.g., previously ``*.html`` did not match + ``foo.en.html``, but now it does). + +* License fields use identifiers from SPDX License List version + ``3.2 2018-07-10`` + + +``cabal-version: 2.2`` +---------------------- + +* New :pkg-section:`common` stanzas and :pkg-field:`import` + pseudo-field added. + +* New :pkg-field:`library:virtual-modules` field added. + +* New :pkg-field:`cxx-sources` and :pkg-field:`cxx-options` fields + added for suppporting bundled foreign routines implemented in C++. + +* New :pkg-field:`asm-sources` and :pkg-field:`asm-options` fields + added for suppporting bundled foreign routines implemented in + assembler. + +* New :pkg-field:`extra-bundled-libraries` field for specifying + additional custom library objects to be installed. + +* Extended ``if`` control structure with support for ``elif`` keyword. + +* Changed default rules of :pkg-field:`build-type` field to infer + "build-type:" for "Simple"/"Custom" automatically. + +* :pkg-field:`license` field syntax changed to require SPDX + expression syntax (using SPDX license list version ``3.0 2017-12-28``). + +* Allow redundant leading or trailing commas in package fields (which + require commas) such as :pkg-field:`build-depends`. + + +``cabal-version: 2.0`` +---------------------- + +* New :pkg-field:`library:signatures` and :pkg-field:`mixins` fields + added for supporting Backpack_. + +* New :pkg-field:`build-tool-depends` field added for adding + build-time dependencies of executable components. + +* New :pkg-field:`custom-setup:autogen-modules` field added for declaring modules + which are generated at build time. + +* Support for new PVP_ caret-style version operator (``^>=``) added to + :pkg-field:`build-depends`. + +* Add support for new :pkg-section:`foreign-library` stanza. + +* Add support for :ref:`internal library stanzas `. + +* New CPP Macro ``CURRENT_PACKAGE_VERSION``. + +``cabal-version: 1.24`` +---------------------- + +* New :pkg-section:`custom-setup` stanza and + :pkg-field:`custom-setup:setup-depends` field added for specifying dependencies + of custom ``Setup.hs`` scripts. + +* CPP Macros ``VERSION_$pkgname`` and ``MIN_VERSION_$pkgname`` are now + also generated for the current package. + +* New CPP Macros ``CURRENT_COMPONENT_ID`` and ``CURRENT_PACKAGE_KEY``. + +* New :pkg-field:`extra-framework-dirs` field added for specifying + extra locations to find OS X frameworks. + +``cabal-version: 1.22`` +---------------------- + +* New :pkg-field:`library:reexported-modules` field. + +* Support for ``-none`` version constraint added to + :pkg-field:`build-depends`. + +* New :pkg-field:`license` type ``ISC`` added. + +``cabal-version: 1.20`` +---------------------- + +* Add support for new :pkg-field:`license-files` field for declaring + multiple license documents. + +* New CPP Macro ``MIN_TOOL_VERSION_$buildtool``. + +* New :pkg-field:`license` types ``BSD2`` and ``MPL-2.0`` added. + +``cabal-version: 1.18`` +---------------------- + +* Add support for new :pkg-field:`extra-doc-files` field for + specifying extra file assets referenced by the Haddock + documentation. + +* New :pkg-field:`license` type ``AGPL`` and ``AGPL-3`` added. + +* Add support for specifying a C/C++/obj-C source file in + :pkg-field:`executable:main-is` field. + +* Add ``getSysconfDir`` operation to ``Paths_`` API. + +``cabal-version: 1.16`` +---------------------- + +.. todo:: + + this needs to be researched; there were only few changes between + 1.12 and 1.18; + +``cabal-version: 1.12`` +---------------------- + +* Change syntax of :pkg-field:`cabal-version` to support the new recommended + ``cabal-version: x.y`` style + + + +.. include:: references.inc Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/images/Cabal-dark.png and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/images/Cabal-dark.png differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/index.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/index.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/index.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/index.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,15 @@ + +Welcome to the Cabal User Guide +=============================== + +.. toctree:: + :maxdepth: 2 + :numbered: + + intro + config-and-install + concepts-and-development + bugs-and-stability + nix-local-build-overview + nix-integration + file-format-changelog diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/installing-packages.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/installing-packages.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/installing-packages.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/installing-packages.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,1730 @@ +Configuration +============= + +.. highlight:: cabal + +Overview +-------- + +The global configuration file for ``cabal-install`` is +``~/.cabal/config``. If you do not have this file, ``cabal`` will create +it for you on the first call to ``cabal update``. Alternatively, you can +explicitly ask ``cabal`` to create it for you using + +.. code-block:: console + + $ cabal user-config update + +Most of the options in this configuration file are also available as +command line arguments, and the corresponding documentation can be used +to lookup their meaning. The created configuration file only specifies +values for a handful of options. Most options are left at their default +value, which it documents; for instance, + +:: + + -- executable-stripping: True + +means that the configuration file currently does not specify a value for +the ``executable-stripping`` option (the line is commented out), and +that the default is ``True``; if you wanted to disable stripping of +executables by default, you would change this line to + +:: + + executable-stripping: False + +You can also use ``cabal user-config update`` to migrate configuration +files created by older versions of ``cabal``. + +Repository specification +------------------------ + +An important part of the configuration if the specification of the +repository. When ``cabal`` creates a default config file, it configures +the repository to be the central Hackage server: + +:: + + repository hackage.haskell.org + url: http://hackage.haskell.org/ + +The name of the repository is given on the first line, and can be +anything; packages downloaded from this repository will be cached under +``~/.cabal/packages/hackage.haskell.org`` (or whatever name you specify; +you can change the prefix by changing the value of +``remote-repo-cache``). If you want, you can configure multiple +repositories, and ``cabal`` will combine them and be able to download +packages from any of them. + +Using secure repositories +^^^^^^^^^^^^^^^^^^^^^^^^^ + +For repositories that support the TUF security infrastructure (this +includes Hackage), you can enable secure access to the repository by +specifying: + +:: + + repository hackage.haskell.org + url: http://hackage.haskell.org/ + secure: True + root-keys: + key-threshold: + +The ```` and ```` values are used for +bootstrapping. As part of the TUF infrastructure the repository will +contain a file ``root.json`` (for instance, +http://hackage.haskell.org/root.json) which the client needs to do +verification. However, how can ``cabal`` verify the ``root.json`` file +*itself*? This is known as bootstrapping: if you specify a list of root +key IDs and a corresponding threshold, ``cabal`` will verify that the +downloaded ``root.json`` file has been signed with at least +```` keys from your set of ````. + +You can, but are not recommended to, omit these two fields. In that case +``cabal`` will download the ``root.json`` field and use it without +verification. Although this bootstrapping step is then unsafe, all +subsequent access is secure (provided that the downloaded ``root.json`` +was not tempered with). Of course, adding ``root-keys`` and +``key-threshold`` to your repository specification only shifts the +problem, because now you somehow need to make sure that the key IDs you +received were the right ones. How that is done is however outside the +scope of ``cabal`` proper. + +More information about the security infrastructure can be found at +https://github.com/well-typed/hackage-security. + +Legacy repositories +^^^^^^^^^^^^^^^^^^^ + +Currently ``cabal`` supports two kinds of “legacy†repositories. The +first is specified using + +:: + + remote-repo: hackage.haskell.org:http://hackage.haskell.org/packages/archive + +This is just syntactic sugar for + +:: + + repository hackage.haskell.org + url: hackage.haskell.org:http://hackage.haskell.org/packages/archive + +although, in (and only in) the specific case of Hackage, the URL +``http://hackage.haskell.org/packages/archive`` will be silently +translated to ``http://hackage.haskell.org/``. + +The second kind of legacy repositories are so-called “local†+repositories: + +:: + + local-repo: my-local-repo:/path/to/local/repo + +This can be used to access repositories on the local file system. +However, the layout of these local repositories is different from the +layout of remote repositories, and usage of these local repositories is +deprecated. + +Secure local repositories +^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you want to use repositories on your local file system, it is +recommended instead to use a *secure* local repository: + +:: + + repository my-local-repo + url: file:/path/to/local/repo + secure: True + root-keys: + key-threshold: + +The layout of these secure local repos matches the layout of remote +repositories exactly; the :hackage-pkg:`hackage-repo-tool` +can be used to create and manage such repositories. + +.. _installing-packages: + +Building and installing packages +================================ + +.. highlight:: console + +After you've unpacked a Cabal package, you can build it by moving into +the root directory of the package and running the ``cabal`` tool there: + +:: + + $ cabal [command] [option...] + +The *command* argument selects a particular step in the build/install +process. + +You can also get a summary of the command syntax with + +:: + + $ cabal help + +Alternatively, you can also use the ``Setup.hs`` or ``Setup.lhs`` +script: + +:: + + $ runhaskell Setup.hs [command] [option...] + +For the summary of the command syntax, run: + +:: + + $ cabal help + +or + +:: + + $ runhaskell Setup.hs --help + +Building and installing a system package +---------------------------------------- + +:: + + $ runhaskell Setup.hs configure --ghc + $ runhaskell Setup.hs build + $ runhaskell Setup.hs install + +The first line readies the system to build the tool using GHC; for +example, it checks that GHC exists on the system. The second line +performs the actual building, while the last both copies the build +results to some permanent place and registers the package with GHC. + +Building and installing a user package +-------------------------------------- + +:: + + $ runhaskell Setup.hs configure --user + $ runhaskell Setup.hs build + $ runhaskell Setup.hs install + +The package is installed under the user's home directory and is +registered in the user's package database (:option:`setup configure --user`). + +Installing packages from Hackage +-------------------------------- + +The ``cabal`` tool also can download, configure, build and install a +Hackage_ package and all of its +dependencies in a single step. To do this, run: + +:: + + $ cabal install [PACKAGE...] + +To browse the list of available packages, visit the +Hackage_ web site. + +Developing with sandboxes +------------------------- + +By default, any dependencies of the package are installed into the +global or user package databases (e.g. using +``cabal install --only-dependencies``). If you're building several +different packages that have incompatible dependencies, this can cause +the build to fail. One way to avoid this problem is to build each +package in an isolated environment ("sandbox"), with a sandbox-local +package database. Because sandboxes are per-project, inconsistent +dependencies can be simply disallowed. + +For more on sandboxes, see also `this +article `__. + +Sandboxes: basic usage +^^^^^^^^^^^^^^^^^^^^^^ + +To initialise a fresh sandbox in the current directory, run +``cabal sandbox init``. All subsequent commands (such as ``build`` and +``install``) from this point will use the sandbox. + +:: + + $ cd /path/to/my/haskell/library + $ cabal sandbox init # Initialise the sandbox + $ cabal install --only-dependencies # Install dependencies into the sandbox + $ cabal build # Build your package inside the sandbox + +It can be useful to make a source package available for installation in +the sandbox - for example, if your package depends on a patched or an +unreleased version of a library. This can be done with the +``cabal sandbox add-source`` command - think of it as "local Hackage_". +If an add-source dependency is later modified, it is reinstalled automatically. + +:: + + $ cabal sandbox add-source /my/patched/library # Add a new add-source dependency + $ cabal install --dependencies-only # Install it into the sandbox + $ cabal build # Build the local package + $ $EDITOR /my/patched/library/Source.hs # Modify the add-source dependency + $ cabal build # Modified dependency is automatically reinstalled + +Normally, the sandbox settings (such as optimisation level) are +inherited from the main Cabal config file (``$HOME/cabal/config``). +Sometimes, though, you need to change some settings specifically for a +single sandbox. You can do this by creating a ``cabal.config`` file in +the same directory with your ``cabal.sandbox.config`` (which was created +by ``sandbox init``). This file has the same syntax as the main Cabal +config file. + +:: + + $ cat cabal.config + documentation: True + constraints: foo == 1.0, bar >= 2.0, baz + $ cabal build # Uses settings from the cabal.config file + +When you have decided that you no longer want to build your package +inside a sandbox, just delete it: + +:: + + $ cabal sandbox delete # Built-in command + $ rm -rf .cabal-sandbox cabal.sandbox.config # Alternative manual method + +Sandboxes: advanced usage +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The default behaviour of the ``add-source`` command is to track +modifications done to the added dependency and reinstall the sandbox +copy of the package when needed. Sometimes this is not desirable: in +these cases you can use ``add-source --snapshot``, which disables the +change tracking. In addition to ``add-source``, there are also +``list-sources`` and ``delete-source`` commands. + +Sometimes one wants to share a single sandbox between multiple packages. +This can be easily done with the ``--sandbox`` option: + +:: + + $ mkdir -p /path/to/shared-sandbox + $ cd /path/to/shared-sandbox + $ cabal sandbox init --sandbox . + $ cd /path/to/package-a + $ cabal sandbox init --sandbox /path/to/shared-sandbox + $ cd /path/to/package-b + $ cabal sandbox init --sandbox /path/to/shared-sandbox + +Note that ``cabal sandbox init --sandbox .`` puts all sandbox files into +the current directory. By default, ``cabal sandbox init`` initialises a +new sandbox in a newly-created subdirectory of the current working +directory (``./.cabal-sandbox``). + +Using multiple different compiler versions simultaneously is also +supported, via the ``-w`` option: + +:: + + $ cabal sandbox init + $ cabal install --only-dependencies -w /path/to/ghc-1 # Install dependencies for both compilers + $ cabal install --only-dependencies -w /path/to/ghc-2 + $ cabal configure -w /path/to/ghc-1 # Build with the first compiler + $ cabal build + $ cabal configure -w /path/to/ghc-2 # Build with the second compiler + $ cabal build + +It can be occasionally useful to run the compiler-specific package +manager tool (e.g. ``ghc-pkg``) tool on the sandbox package DB directly +(for example, you may need to unregister some packages). The +``cabal sandbox hc-pkg`` command is a convenient wrapper that runs the +compiler-specific package manager tool with the arguments: + +:: + + $ cabal -v sandbox hc-pkg list + Using a sandbox located at /path/to/.cabal-sandbox + 'ghc-pkg' '--global' '--no-user-package-conf' + '--package-conf=/path/to/.cabal-sandbox/i386-linux-ghc-7.4.2-packages.conf.d' + 'list' + [...] + +The ``--require-sandbox`` option makes all sandbox-aware commands +(``install``/``build``/etc.) exit with error if there is no sandbox +present. This makes it harder to accidentally modify the user package +database. The option can be also turned on via the per-user +configuration file (``~/.cabal/config``) or the per-project one +(``$PROJECT_DIR/cabal.config``). The error can be squelched with +``--no-require-sandbox``. + +The option ``--sandbox-config-file`` allows to specify the location of +the ``cabal.sandbox.config`` file (by default, ``cabal`` searches for it +in the current directory). This provides the same functionality as +shared sandboxes, but sometimes can be more convenient. Example: + +:: + + $ mkdir my/sandbox + $ cd my/sandbox + $ cabal sandbox init + $ cd /path/to/my/project + $ cabal --sandbox-config-file=/path/to/my/sandbox/cabal.sandbox.config install + # Uses the sandbox located at /path/to/my/sandbox/.cabal-sandbox + $ cd ~ + $ cabal --sandbox-config-file=/path/to/my/sandbox/cabal.sandbox.config install + # Still uses the same sandbox + +The sandbox config file can be also specified via the +``CABAL_SANDBOX_CONFIG`` environment variable. + +Finally, the flag ``--ignore-sandbox`` lets you temporarily ignore an +existing sandbox: + +:: + + $ mkdir my/sandbox + $ cd my/sandbox + $ cabal sandbox init + $ cabal --ignore-sandbox install text + # Installs 'text' in the user package database ('~/.cabal'). + +Creating a binary package +------------------------- + +When creating binary packages (e.g. for Red Hat or Debian) one needs to +create a tarball that can be sent to another system for unpacking in the +root directory: + +:: + + $ runhaskell Setup.hs configure --prefix=/usr + $ runhaskell Setup.hs build + $ runhaskell Setup.hs copy --destdir=/tmp/mypkg + $ tar -czf mypkg.tar.gz /tmp/mypkg/ + +If the package contains a library, you need two additional steps: + +:: + + $ runhaskell Setup.hs register --gen-script + $ runhaskell Setup.hs unregister --gen-script + +This creates shell scripts ``register.sh`` and ``unregister.sh``, which +must also be sent to the target system. After unpacking there, the +package must be registered by running the ``register.sh`` script. The +``unregister.sh`` script would be used in the uninstall procedure of the +package. Similar steps may be used for creating binary packages for +Windows. + +The following options are understood by all commands: + +.. program:: setup + +.. option:: --help, -h or -? + + List the available options for the command. + +.. option:: --verbose=n or -v n + + Set the verbosity level (0-3). The normal level is 1; a missing *n* + defaults to 2. + + There is also an extended version of this command which can be + used to fine-tune the verbosity of output. It takes the + form ``[silent|normal|verbose|debug]``\ *flags*, where *flags* + is a list of ``+`` flags which toggle various aspects of + output. At the moment, only ``+callsite`` and ``+callstack`` + are supported, which respectively toggle call site and call + stack printing (these are only supported if Cabal + is built with a sufficiently recent GHC.) + +The various commands and the additional options they support are +described below. In the simple build infrastructure, any other options +will be reported as errors. + +.. _setup-configure: + +setup configure +--------------- + +.. program:: setup configure + +Prepare to build the package. Typically, this step checks that the +target platform is capable of building the package, and discovers +platform-specific features that are needed during the build. + +The user may also adjust the behaviour of later stages using the options +listed in the following subsections. In the simple build infrastructure, +the values supplied via these options are recorded in a private file +read by later stages. + +If a user-supplied ``configure`` script is run (see the section on +`system-dependent +parameters `__ or +on `complex +packages `__), it is +passed the :option:`--with-hc-pkg`, :option:`--prefix`, :option:`--bindir`, +:option:`--libdir`, :option:`--dynlibdir`, :option:`--datadir`, :option:`--libexecdir` and +:option:`--sysconfdir` options. In addition the value of the +:option:`--with-compiler` option is passed in a :option:`--with-hc-pkg` option +and all options specified with :option:`--configure-option` are passed on. + +.. note:: + `GNU autoconf places restrictions on paths, including the directory + that the package is built from. + `_ + The errors produced when this happens can be obscure; Cabal attempts to + detect and warn in this situation, but it is not perfect. + +In Cabal 2.0, support for a single positional argument was added to +``setup configure`` This makes Cabal configure a the specific component +to be configured. Specified names can be qualified with ``lib:`` or +``exe:`` in case just a name is ambiguous (as would be the case for a +package named ``p`` which has a library and an executable named ``p``.) +This has the following effects: + +- Subsequent invocations of ``cabal build``, ``register``, etc. operate only + on the configured component. + +- Cabal requires all "internal" dependencies (e.g., an executable + depending on a library defined in the same package) must be found in + the set of databases via :option:`--package-db` (and related flags): these + dependencies are assumed to be up-to-date. A dependency can be + explicitly specified using :option:`--dependency` simply by giving the name + of the internal library; e.g., the dependency for an internal library + named ``foo`` is given as + ``--dependency=pkg-internal=pkg-1.0-internal-abcd``. + +- Only the dependencies needed for the requested component are + required. Similarly, when :option:`--exact-configuration` is specified, + it's only necessary to specify :option:`--dependency` for the component. + (As mentioned previously, you *must* specify internal dependencies as + well.) + +- Internal ``build-tool-depends`` and ``build-tools`` dependencies are expected + to be in the ``PATH`` upon subsequent invocations of ``setup``. + +Full details can be found in the `Componentized Cabal +proposal `__. + +Programs used for building +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The following options govern the programs used to process the source +files of a package: + +.. option:: --ghc or -g, --jhc, --lhc, --uhc + + Specify which Haskell implementation to use to build the package. At + most one of these flags may be given. If none is given, the + implementation under which the setup script was compiled or + interpreted is used. + +.. option:: --with-compiler=path or -w *path* + + Specify the path to a particular compiler. If given, this must match + the implementation selected above. The default is to search for the + usual name of the selected implementation. + + This flag also sets the default value of the :option:`--with-hc-pkg` + option to the package tool for this compiler. Check the output of + ``setup configure -v`` to ensure that it finds the right package + tool (or use :option:`--with-hc-pkg` explicitly). + +.. option:: --with-hc-pkg=path + + Specify the path to the package tool, e.g. ``ghc-pkg``. The package + tool must be compatible with the compiler specified by + :option:`--with-compiler`. If this option is omitted, the default value is + determined from the compiler selected. + +.. option:: --with-prog=path + + Specify the path to the program *prog*. Any program known to Cabal + can be used in place of *prog*. It can either be a fully path or the + name of a program that can be found on the program search path. For + example: ``--with-ghc=ghc-6.6.1`` or + ``--with-cpphs=/usr/local/bin/cpphs``. The full list of accepted + programs is not enumerated in this user guide. Rather, run + ``cabal install --help`` to view the list. + +.. option:: --prog-options=options + + Specify additional options to the program *prog*. Any program known + to Cabal can be used in place of *prog*. For example: + ``--alex-options="--template=mytemplatedir/"``. The *options* is + split into program options based on spaces. Any options containing + embedded spaced need to be quoted, for example + ``--foo-options='--bar="C:\Program File\Bar"'``. As an alternative + that takes only one option at a time but avoids the need to quote, + use :option:`--prog-option` instead. + +.. option:: --prog-option=option + + Specify a single additional option to the program *prog*. For + passing an option that contain embedded spaces, such as a file name + with embedded spaces, using this rather than :option:`--prog-options` + means you do not need an additional level of quoting. Of course if you + are using a command shell you may still need to quote, for example + ``--foo-options="--bar=C:\Program File\Bar"``. + +All of the options passed with either :option:`--prog-options` +or :option:`--prog-option` are passed in the order they were +specified on the configure command line. + +Installation paths +^^^^^^^^^^^^^^^^^^ + +The following options govern the location of installed files from a +package: + +.. option:: --prefix=dir + + The root of the installation. For example for a global install you + might use ``/usr/local`` on a Unix system, or ``C:\Program Files`` + on a Windows system. The other installation paths are usually + subdirectories of *prefix*, but they don't have to be. + + In the simple build system, *dir* may contain the following path + variables: ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, + ``$os``, ``$arch``, ``$abi``, ``$abitag`` + +.. option:: --bindir=dir + + Executables that the user might invoke are installed here. + + In the simple build system, *dir* may contain the following path + variables: ``$prefix``, ``$pkgid``, ``$pkg``, ``$version``, + ``$compiler``, ``$os``, ``$arch``, ``$abi``, ``$abitag`` + +.. option:: --libdir=dir + + Object-code libraries are installed here. + + In the simple build system, *dir* may contain the following path + variables: ``$prefix``, ``$bindir``, ``$pkgid``, ``$pkg``, + ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + +.. option:: --dynlibdir=dir + + Dynamic libraries are installed here. + + By default, this is set to `$libdir/$abi`, which is usually not equal to + `$libdir/$libsubdir`. + + In the simple build system, *dir* may contain the following path + variables: ``$prefix``, ``$bindir``, ``$libdir``, ``$pkgid``, ``$pkg``, + ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + +.. option:: --libexecdir=dir + + Executables that are not expected to be invoked directly by the user + are installed here. + + In the simple build system, *dir* may contain the following path + variables: ``$prefix``, ``$bindir``, ``$libdir``, ``$libsubdir``, + ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, ``$os``, + ``$arch``, ``$abi``, ``$abitag`` + +.. option:: --datadir=dir + + Architecture-independent data files are installed here. + + In the simple build system, *dir* may contain the following path + variables: ``$prefix``, ``$bindir``, ``$libdir``, ``$libsubdir``, + ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, ``$os``, + ``$arch``, ``$abi``, ``$abitag`` + +.. option:: --sysconfdir=dir + + Installation directory for the configuration files. + + In the simple build system, *dir* may contain the following path + variables: ``$prefix``, ``$bindir``, ``$libdir``, ``$libsubdir``, + ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, ``$os``, + ``$arch``, ``$abi``, ``$abitag`` + +In addition the simple build system supports the following installation +path options: + +.. option:: --libsubdir=dir + + A subdirectory of *libdir* in which libraries are actually installed. For + example, in the simple build system on Unix, the default *libdir* is + ``/usr/local/lib``, and *libsubdir* contains the compiler ABI and package + identifier, + e.g. ``x86_64-linux-ghc-8.0.2/mypkg-0.1.0-IxQNmCA7qrSEQNkoHSF7A``, so + libraries would be installed in + ``/usr/local/lib/x86_64-linux-ghc-8.0.2/mypkg-0.1.0-IxQNmCA7qrSEQNkoHSF7A/``. + + *dir* may contain the following path variables: ``$pkgid``, + ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + +.. option:: --libexecsubdir=dir + + A subdirectory of *libexecdir* in which private executables are + installed. For example, in the simple build system on Unix, the default + *libexecdir* is ``/usr/local/libexec``, and *libsubdir* is + ``x86_64-linux-ghc-8.0.2/mypkg-0.1.0``, so private executables would be + installed in ``/usr/local/libexec/x86_64-linux-ghc-8.0.2/mypkg-0.1.0/`` + + *dir* may contain the following path variables: ``$pkgid``, + ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + +.. option:: --datasubdir=dir + + A subdirectory of *datadir* in which data files are actually + installed. + + *dir* may contain the following path variables: ``$pkgid``, + ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + +.. option:: --docdir=dir + + Documentation files are installed relative to this directory. + + *dir* may contain the following path variables: ``$prefix``, + ``$bindir``, ``$libdir``, ``$libsubdir``, ``$datadir``, + ``$datasubdir``, ``$pkgid``, ``$pkg``, ``$version``, ``$compiler``, + ``$os``, ``$arch``, ``$abi``, ``$abitag`` + +.. option:: --htmldir=dir + + HTML documentation files are installed relative to this directory. + + *dir* may contain the following path variables: ``$prefix``, + ``$bindir``, ``$libdir``, ``$libsubdir``, ``$datadir``, + ``$datasubdir``, ``$docdir``, ``$pkgid``, ``$pkg``, ``$version``, + ``$compiler``, ``$os``, ``$arch``, ``$abi``, ``$abitag`` + +.. option:: --program-prefix=prefix + + Prepend *prefix* to installed program names. + + *prefix* may contain the following path variables: ``$pkgid``, + ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + +.. option:: --program-suffix=suffix + + Append *suffix* to installed program names. The most obvious use for + this is to append the program's version number to make it possible + to install several versions of a program at once: + ``--program-suffix='$version'``. + + *suffix* may contain the following path variables: ``$pkgid``, + ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + +Path variables in the simple build system +""""""""""""""""""""""""""""""""""""""""" + +For the simple build system, there are a number of variables that can be +used when specifying installation paths. The defaults are also specified +in terms of these variables. A number of the variables are actually for +other paths, like ``$prefix``. This allows paths to be specified +relative to each other rather than as absolute paths, which is important +for building relocatable packages (see `prefix +independence <#prefix-independence>`__). + +$prefix + The path variable that stands for the root of the installation. For + an installation to be relocatable, all other installation paths must + be relative to the ``$prefix`` variable. +$bindir + The path variable that expands to the path given by the :option:`--bindir` + configure option (or the default). +$libdir + As above but for :option:`--libdir` +$libsubdir + As above but for :option:`--libsubdir` +$dynlibdir + As above but for :option:`--dynlibdir` +$datadir + As above but for :option:`--datadir` +$datasubdir + As above but for :option:`--datasubdir` +$docdir + As above but for :option:`--docdir` +$pkgid + The name and version of the package, e.g. ``mypkg-0.2`` +$pkg + The name of the package, e.g. ``mypkg`` +$version + The version of the package, e.g. ``0.2`` +$compiler + The compiler being used to build the package, e.g. ``ghc-6.6.1`` +$os + The operating system of the computer being used to build the + package, e.g. ``linux``, ``windows``, ``osx``, ``freebsd`` or + ``solaris`` +$arch + The architecture of the computer being used to build the package, + e.g. ``i386``, ``x86_64``, ``ppc`` or ``sparc`` +$abitag + An optional tag that a compiler can use for telling incompatible + ABI's on the same architecture apart. GHCJS encodes the underlying + GHC version in the ABI tag. +$abi + A shortcut for getting a path that completely identifies the + platform in terms of binary compatibility. Expands to the same value + as ``$arch-$os-compiler-$abitag`` if the compiler uses an abi tag, + ``$arch-$os-$compiler`` if it doesn't. + +Paths in the simple build system +"""""""""""""""""""""""""""""""" + +For the simple build system, the following defaults apply: + +.. list-table:: Default installation paths + + * - Option + - Unix Default + - Windows Default + * - :option:`--prefix` (global) + - ``/usr/local`` + - ``%PROGRAMFILES%\Haskell`` + * - :option:`--prefix` (per-user) + - ``$HOME/.cabal`` + - ``%APPDATA%\cabal`` + * - :option:`--bindir` + - ``$prefix/bin`` + - ``$prefix\bin`` + * - :option:`--libdir` + - ``$prefix/lib`` + - ``$prefix`` + * - :option:`--libsubdir` (others) + - ``$pkgid/$compiler`` + - ``$pkgid\$compiler`` + * - :option:`--dynlibdir` + - ``$libdir/$abi`` + - ``$libdir\$abi`` + * - :option:`--libexecdir` + - ``$prefix/libexec`` + - ``$prefix\$pkgid`` + * - :option:`--datadir` (executable) + - ``$prefix/share`` + - ``$prefix`` + * - :option:`--datadir` (library) + - ``$prefix/share`` + - ``%PROGRAMFILES%\Haskell`` + * - :option:`--datasubdir` + - ``$pkgid`` + - ``$pkgid`` + * - :option:`--docdir` + - ``$datadir/doc/$pkgid`` + - ``$prefix\doc\$pkgid`` + * - :option:`--sysconfdir` + - ``$prefix/etc`` + - ``$prefix\etc`` + * - :option:`--htmldir` + - ``$docdir/html`` + - ``$docdir\html`` + * - :option:`--program-prefix` + - (empty) + - (empty) + * - :option:`--program-suffix` + - (empty) + - (empty) + +Prefix-independence +""""""""""""""""""" + +On Windows it is possible to obtain the pathname of the running program. +This means that we can construct an installable executable package that +is independent of its absolute install location. The executable can find +its auxiliary files by finding its own path and knowing the location of +the other files relative to ``$bindir``. Prefix-independence is +particularly useful: it means the user can choose the install location +(i.e. the value of ``$prefix``) at install-time, rather than having to +bake the path into the binary when it is built. + +In order to achieve this, we require that for an executable on Windows, +all of ``$bindir``, ``$libdir``, ``$dynlibdir``, ``$datadir`` and ``$libexecdir`` begin +with ``$prefix``. If this is not the case then the compiled executable +will have baked-in all absolute paths. + +The application need do nothing special to achieve prefix-independence. +If it finds any files using ``getDataFileName`` and the `other functions +provided for the +purpose `__, +the files will be accessed relative to the location of the current +executable. + +A library cannot (currently) be prefix-independent, because it will be +linked into an executable whose file system location bears no relation +to the library package. + +Controlling Flag Assignments +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Flag assignments (see the `resolution of conditions and +flags `__) +can be controlled with the following command line options. + +.. option:: -f flagname or -f -flagname + + Force the specified flag to ``true`` or ``false`` (if preceded with + a ``-``). Later specifications for the same flags will override + earlier, i.e., specifying ``-fdebug -f-debug`` is equivalent to + ``-f-debug`` + +.. option:: --flags=flagspecs + + Same as ``-f``, but allows specifying multiple flag assignments at + once. The parameter is a space-separated list of flag names (to + force a flag to ``true``), optionally preceded by a ``-`` (to force + a flag to ``false``). For example, + ``--flags="debug -feature1 feature2"`` is equivalent to + ``-fdebug -f-feature1 -ffeature2``. + +Building Test Suites +^^^^^^^^^^^^^^^^^^^^ + +.. option:: --enable-tests + + Build the test suites defined in the package description file during + the ``build`` stage. Check for dependencies required by the test + suites. If the package is configured with this option, it will be + possible to run the test suites with the ``test`` command after the + package is built. + +.. option:: --disable-tests + + (default) Do not build any test suites during the ``build`` stage. + Do not check for dependencies required only by the test suites. It + will not be possible to invoke the ``test`` command without + reconfiguring the package. + +.. option:: --enable-coverage + + Build libraries and executables (including test suites) with Haskell + Program Coverage enabled. Running the test suites will automatically + generate coverage reports with HPC. + +.. option:: --disable-coverage + + (default) Do not enable Haskell Program Coverage. + +Miscellaneous options +^^^^^^^^^^^^^^^^^^^^^ + +.. option:: --user + + Does a per-user installation. This changes the `default installation + prefix <#paths-in-the-simple-build-system>`__. It also allow + dependencies to be satisfied by the user's package database, in + addition to the global database. This also implies a default of + ``--user`` for any subsequent ``install`` command, as packages + registered in the global database should not depend on packages + registered in a user's database. + +.. option:: --global + + (default) Does a global installation. In this case package + dependencies must be satisfied by the global package database. All + packages in the user's package database will be ignored. Typically + the final installation step will require administrative privileges. + +.. option:: --package-db=db + + Allows package dependencies to be satisfied from this additional + package database *db* in addition to the global package database. + All packages in the user's package database will be ignored. The + interpretation of *db* is implementation-specific. Typically it will + be a file or directory. Not all implementations support arbitrary + package databases. + + This pushes an extra db onto the db stack. The :option:`--global` and + :option:`--user` mode switches add the respective [Global] and [Global, + User] dbs to the initial stack. There is a compiler-implementation + constraint that the global db must appear first in the stack, and if + the user one appears at all, it must appear immediately after the + global db. + + To reset the stack, use ``--package-db=clear``. + +.. option:: --ipid=ipid + + Specifies the *installed package identifier* of the package to be + built; this identifier is passed on to GHC and serves as the basis + for linker symbols and the ``id`` field in a ``ghc-pkg`` + registration. When a package has multiple components, the actual + component identifiers are derived off of this identifier (e.g., an + internal library ``foo`` from package ``p-0.1-abcd`` will get the + identifier ``p-0.1-abcd-foo``. + +.. option:: --cid=cid + + Specifies the *component identifier* of the component being built; + this is only valid if you are configuring a single component. + +.. option:: --default-user-config=file + + Allows a "default" ``cabal.config`` freeze file to be passed in + manually. This file will only be used if one does not exist in the + project directory already. Typically, this can be set from the + global cabal ``config`` file so as to provide a default set of + partial constraints to be used by projects, providing a way for + users to peg themselves to stable package collections. + +.. option:: --enable-optimization[=n] or -O [n] + + (default) Build with optimization flags (if available). This is + appropriate for production use, taking more time to build faster + libraries and programs. + + The optional *n* value is the optimisation level. Some compilers + support multiple optimisation levels. The range is 0 to 2. Level 0 + is equivalent to :option:`--disable-optimization`, level 1 is the + default if no *n* parameter is given. Level 2 is higher optimisation + if the compiler supports it. Level 2 is likely to lead to longer + compile times and bigger generated code. + + When optimizations are enabled, Cabal passes ``-O2`` to the C compiler. + +.. option:: --disable-optimization + + Build without optimization. This is suited for development: building + will be quicker, but the resulting library or programs will be + slower. + +.. option:: --enable-profiling + + Build libraries and executables with profiling enabled (for + compilers that support profiling as a separate mode). For this to + work, all libraries used by this package must also have been built + with profiling support. For libraries this involves building an + additional instance of the library in addition to the normal + non-profiling instance. For executables it changes the single + executable to be built in profiling mode. + + This flag covers both libraries and executables, but can be + overridden by the :option:`--enable-library-profiling` flag. + + See also the :option:`--profiling-detail` flag below. + +.. option:: --disable-profiling + + (default) Do not enable profiling in generated libraries and + executables. + +.. option:: --enable-library-profiling or -p + + As with :option:`--enable-profiling` above, but it applies only for + libraries. So this generates an additional profiling instance of the + library in addition to the normal non-profiling instance. + + The :option:`--enable-profiling` flag controls the profiling mode for both + libraries and executables, but if different modes are desired for + libraries versus executables then use :option:`--enable-library-profiling` + as well. + +.. option:: --disable-library-profiling + + (default) Do not generate an additional profiling version of the library. + +.. option:: --profiling-detail[=level] + + Some compilers that support profiling, notably GHC, can allocate + costs to different parts of the program and there are different + levels of granularity or detail with which this can be done. In + particular for GHC this concept is called "cost centers", and GHC + can automatically add cost centers, and can do so in different ways. + + This flag covers both libraries and executables, but can be + overridden by the :option:`--library-profiling-detail` flag. + + Currently this setting is ignored for compilers other than GHC. The + levels that cabal currently supports are: + + default + For GHC this uses ``exported-functions`` for libraries and + ``toplevel-functions`` for executables. + none + No costs will be assigned to any code within this component. + exported-functions + Costs will be assigned at the granularity of all top level + functions exported from each module. In GHC specifically, this + is for non-inline functions. + toplevel-functions + Costs will be assigned at the granularity of all top level + functions in each module, whether they are exported from the + module or not. In GHC specifically, this is for non-inline + functions. + all-functions + Costs will be assigned at the granularity of all functions in + each module, whether top level or local. In GHC specifically, + this is for non-inline toplevel or where-bound functions or + values. + + This flag is new in Cabal-1.24. Prior versions used the equivalent + of ``none`` above. + +.. option:: --library-profiling-detail[=level] + + As with :option:`--profiling-detail` above, but it applies only for + libraries. + + The level for both libraries and executables is set by the + :option:`--profiling-detail` flag, but if different levels are desired + for libraries versus executables then use + :option:`--library-profiling-detail` as well. + +.. option:: --enable-library-vanilla + + (default) Build ordinary libraries (as opposed to profiling + libraries). This is independent of the + :option:`--enable-library-profiling` option. If you enable both, you get + both. + +.. option:: --disable-library-vanilla + + Do not build ordinary libraries. This is useful in conjunction with + :option:`--enable-library-profiling` to build only profiling libraries, + rather than profiling and ordinary libraries. + +.. option:: --enable-library-for-ghci + + (default) Build libraries suitable for use with GHCi. + +.. option:: --disable-library-for-ghci + + Not all platforms support GHCi and indeed on some platforms, trying + to build GHCi libs fails. In such cases this flag can be used as a + workaround. + +.. option:: --enable-split-objs + + Use the GHC ``-split-objs`` feature when building the library. This + reduces the final size of the executables that use the library by + allowing them to link with only the bits that they use rather than + the entire library. The downside is that building the library takes + longer and uses considerably more memory. + +.. option:: --disable-split-objs + + (default) Do not use the GHC ``-split-objs`` feature. This makes + building the library quicker but the final executables that use the + library will be larger. + +.. option:: --enable-executable-stripping + + (default) When installing binary executable programs, run the + ``strip`` program on the binary. This can considerably reduce the + size of the executable binary file. It does this by removing + debugging information and symbols. While such extra information is + useful for debugging C programs with traditional debuggers it is + rarely helpful for debugging binaries produced by Haskell compilers. + + Not all Haskell implementations generate native binaries. For such + implementations this option has no effect. + +.. option:: --disable-executable-stripping + + Do not strip binary executables during installation. You might want + to use this option if you need to debug a program using gdb, for + example if you want to debug the C parts of a program containing + both Haskell and C code. Another reason is if your are building a + package for a system which has a policy of managing the stripping + itself (such as some Linux distributions). + +.. option:: --enable-shared + + Build shared library. This implies a separate compiler run to + generate position independent code as required on most platforms. + +.. option:: --disable-shared + + (default) Do not build shared library. + +.. option:: --enable-static + + Build a static library. This passes ``-staticlib`` to GHC (available + for iOS, and with 8.4 more platforms). The result is an archive ``.a`` + containing all dependent haskell libararies combined. + +.. option:: --disable-static + + (default) Do not build a static library. + +.. option:: --enable-executable-dynamic + + Link executables dynamically. The executable's library dependencies + should be built as shared objects. This implies :option:`--enable-shared` + unless :option:`--disable-shared` is explicitly specified. + +.. option:: --disable-executable-dynamic + + (default) Link executables statically. + +.. option:: --configure-option=str + + An extra option to an external ``configure`` script, if one is used + (see the section on `system-dependent + parameters `__). + There can be several of these options. + +.. option:: --extra-include-dirs[=dir] + + An extra directory to search for C header files. You can use this + flag multiple times to get a list of directories. + + You might need to use this flag if you have standard system header + files in a non-standard location that is not mentioned in the + package's ``.cabal`` file. Using this option has the same affect as + appending the directory *dir* to the ``include-dirs`` field in each + library and executable in the package's ``.cabal`` file. The + advantage of course is that you do not have to modify the package at + all. These extra directories will be used while building the package + and for libraries it is also saved in the package registration + information and used when compiling modules that use the library. + +.. option:: --extra-lib-dirs[=dir] + + An extra directory to search for system libraries files. You can use + this flag multiple times to get a list of directories. + +.. option:: --extra-framework-dirs[=dir] + + An extra directory to search for frameworks (OS X only). You can use + this flag multiple times to get a list of directories. + + You might need to use this flag if you have standard system + libraries in a non-standard location that is not mentioned in the + package's ``.cabal`` file. Using this option has the same affect as + appending the directory *dir* to the ``extra-lib-dirs`` field in + each library and executable in the package's ``.cabal`` file. The + advantage of course is that you do not have to modify the package at + all. These extra directories will be used while building the package + and for libraries it is also saved in the package registration + information and used when compiling modules that use the library. + +.. option:: --dependency[=pkgname=ipid] + + Specify that a particular dependency should used for a particular + package name. In particular, it declares that any reference to + *pkgname* in a ``build-depends`` should be resolved to *ipid*. + +.. option:: --exact-configuration + + This changes Cabal to require every dependency be explicitly + specified using :option:`--dependency`, rather than use Cabal's (very + simple) dependency solver. This is useful for programmatic use of + Cabal's API, where you want to error if you didn't specify enough + :option:`--dependency` flags. + +.. option:: --allow-newer[=pkgs], --allow-older[=pkgs] + + Selectively relax upper or lower bounds in dependencies without + editing the package description respectively. + + The following description focuses on upper bounds and the + :option:`--allow-newer` flag, but applies analogously to + :option:`--allow-older` and lower bounds. :option:`--allow-newer` + and :option:`--allow-older` can be used at the same time. + + If you want to install a package A that depends on B >= 1.0 && < + 2.0, but you have the version 2.0 of B installed, you can compile A + against B 2.0 by using ``cabal install --allow-newer=B A``. This + works for the whole package index: if A also depends on C that in + turn depends on B < 2.0, C's dependency on B will be also relaxed. + + Example: + + :: + + $ cd foo + $ cabal configure + Resolving dependencies... + cabal: Could not resolve dependencies: + [...] + $ cabal configure --allow-newer + Resolving dependencies... + Configuring foo... + + Additional examples: + + :: + + # Relax upper bounds in all dependencies. + $ cabal install --allow-newer foo + + # Relax upper bounds only in dependencies on bar, baz and quux. + $ cabal install --allow-newer=bar,baz,quux foo + + # Relax the upper bound on bar and force bar==2.1. + $ cabal install --allow-newer=bar --constraint="bar==2.1" foo + + It's also possible to limit the scope of :option:`--allow-newer` to single + packages with the ``--allow-newer=scope:dep`` syntax. This means + that the dependency on ``dep`` will be relaxed only for the package + ``scope``. + + Example: + + :: + + # Relax upper bound in foo's dependency on base; also relax upper bound in + # every package's dependency on lens. + $ cabal install --allow-newer=foo:base,lens + + # Relax upper bounds in foo's dependency on base and bar's dependency + # on time; also relax the upper bound in the dependency on lens specified by + # any package. + $ cabal install --allow-newer=foo:base,lens --allow-newer=bar:time + + Finally, one can enable :option:`--allow-newer` permanently by setting + ``allow-newer: True`` in the ``~/.cabal/config`` file. Enabling + 'allow-newer' selectively is also supported in the config file + (``allow-newer: foo, bar, baz:base``). + +.. option:: --constraint=constraint + + Restrict solutions involving a package to given version + bounds, flag settings, and other properties. For example, to + consider only install plans that use version 2.1 of ``bar`` + or do not use ``bar`` at all, write: + + :: + + $ cabal install --constraint="bar == 2.1" + + Version bounds have the same syntax as ``build-depends``. As + a special case, the following prevents ``bar`` from being + used at all: + + :: + + # Note: this is just syntax sugar for '> 1 && < 1', and is + # supported by build-depends. + $ cabal install --constraint="bar -none" + + You can also specify flag assignments: + + :: + + # Require bar to be installed with the foo flag turned on and + # the baz flag turned off. + $ cabal install --constraint="bar +foo -baz" + + To specify multiple constraints, you may pass the + ``constraint`` option multiple times. + + There are also some more specialized constraints, which most people + don't generally need: + + :: + + # Require that a version of bar be used that is already installed in + # the global package database. + $ cabal install --constraint="bar installed" + + # Require the local source copy of bar to be used. + # (Note: By default, if we have a local package we will + # automatically use it, so it will generally not be necessary to + # specify this.) + $ cabal install --constraint="bar source" + + # Require that bar have test suites and benchmarks enabled. + $ cabal install --constraint="bar test" --constraint="bar bench" + + By default, constraints only apply to build dependencies + (``build-depends``), build dependencies of build + dependencies, and so on. Constraints normally do not apply to + dependencies of the ``Setup.hs`` script of any package + (``setup-depends``) nor do they apply to build tools + (``build-tool-depends``) or the dependencies of build + tools. To explicitly apply a constraint to a setup or build + tool dependency, you can add a qualifier to the constraint as + follows: + + :: + + # Example use of the 'any' qualifier. This constraint + # applies to package bar anywhere in the dependency graph. + $ cabal install --constraint="any.bar == 1.0" + + :: + + # Example uses of 'setup' qualifiers. + + # This constraint applies to package bar when it is a + # dependency of any Setup.hs script. + $ cabal install --constraint="setup.bar == 1.0" + + # This constraint applies to package bar when it is a + # dependency of the Setup.hs script of package foo. + $ cabal install --constraint="foo:setup.bar == 1.0" + + .. TODO: Uncomment this example once we decide on a syntax for 'exe'. + .. # Example use of the 'exe' (executable build tool) + # qualifier. This constraint applies to package baz when it + # is a dependency of the build tool bar being used to + # build package foo. + $ cabal install --constraint="foo:bar:exe.baz == 1.0" + +.. option:: --preference=preference + + Specify a soft constraint on versions of a package. The solver will + attempt to satisfy these preferences on a "best-effort" basis. + +.. option:: --disable-response-files + + Enable workaround for older versions of programs such as ``ar`` or + ``ld`` that do not support response file arguments (i.e. ``@file`` + arguments). You may want this flag only if you specify custom ar + executable. For system ``ar`` or the one bundled with ``ghc`` on + Windows the ``cabal`` should do the right thing and hence should + normally not require this flag. + +.. _setup-build: + +setup build +----------- + +Perform any preprocessing or compilation needed to make this package +ready for installation. + +This command takes the following options: + +.. program:: setup build + +.. option:: --prog-options=options, --prog-option=option + + These are mostly the same as the `options configure + step <#setup-configure>`__. Unlike the options specified at the + configure step, any program options specified at the build step are + not persistent but are used for that invocation only. They options + specified at the build step are in addition not in replacement of + any options specified at the configure step. + +.. _setup-haddock: + +setup haddock +------------- + +.. program:: setup haddock + +Build the documentation for the package using Haddock_. +By default, only the documentation for the exposed modules is generated +(but see the :option:`--executables` and :option:`--internal` flags below). + +This command takes the following options: + +.. option:: --hoogle + + Generate a file ``dist/doc/html/``\ *pkgid*\ ``.txt``, which can be + converted by Hoogle_ into a + database for searching. This is equivalent to running Haddock_ + with the ``--hoogle`` flag. + +.. option:: --html-location=url + + Specify a template for the location of HTML documentation for + prerequisite packages. The substitutions (`see + listing <#paths-in-the-simple-build-system>`__) are applied to the + template to obtain a location for each package, which will be used + by hyperlinks in the generated documentation. For example, the + following command generates links pointing at Hackage_ pages: + + setup haddock + --html-location='http://hackage.haskell.org/packages/archive/$pkg/latest/doc/html' + + Here the argument is quoted to prevent substitution by the shell. If + this option is omitted, the location for each package is obtained + using the package tool (e.g. ``ghc-pkg``). + +.. option:: --executables + + Also run Haddock_ for the modules of all the executable programs. By default + Haddock_ is run only on the exported modules. + +.. option:: --internal + + Run Haddock_ for the all + modules, including unexposed ones, and make + Haddock_ generate documentation + for unexported symbols as well. + +.. option:: --css=path + + The argument *path* denotes a CSS file, which is passed to + Haddock_ and used to set the + style of the generated documentation. This is only needed to + override the default style that + Haddock_ uses. + +.. option:: --hyperlink-source + + Generate Haddock_ documentation integrated with HsColour_ . First, + HsColour_ is run to generate colourised code. Then Haddock_ is run to + generate HTML documentation. Each entity shown in the documentation is + linked to its definition in the colourised code. + +.. option:: --hscolour-css=path + + The argument *path* denotes a CSS file, which is passed to HsColour_ as in + + runhaskell Setup.hs hscolour --css=*path* + +.. _setup-hscolour: + +setup hscolour +-------------- + +Produce colourised code in HTML format using HsColour_. Colourised code for +exported modules is put in ``dist/doc/html/``\ *pkgid*\ ``/src``. + +This command takes the following options: + +.. program:: setup hscolour + +.. option:: --executables + + Also run HsColour_ on the sources of all executable programs. Colourised + code is put in ``dist/doc/html/``\ *pkgid*/*executable*\ ``/src``. + +.. option:: --css=path + + Use the given CSS file for the generated HTML files. The CSS file + defines the colours used to colourise code. Note that this copies + the given CSS file to the directory with the generated HTML files + (renamed to ``hscolour.css``) rather than linking to it. + +.. _setup-install: + +setup install +------------- + +.. program:: setup install + +Copy the files into the install locations and (for library packages) +register the package with the compiler, i.e. make the modules it +contains available to programs. + +The `install locations <#installation-paths>`__ are determined by +options to `setup configure`_. + +This command takes the following options: + +.. option:: --global + + Register this package in the system-wide database. (This is the + default, unless the :option:`setup configure --user` option was supplied + to the ``configure`` command.) + +.. option:: --user + + Register this package in the user's local package database. (This is + the default if the :option:`setup configure --user` option was supplied + to the ``configure`` command.) + +.. _setup-copy: + +setup copy +---------- + +Copy the files without registering them. This command is mainly of use +to those creating binary packages. + +This command takes the following option: + +.. program:: setup copy + +.. option:: --destdir=path + + Specify the directory under which to place installed files. If this is + not given, then the root directory is assumed. + +.. _setup-register: + +setup register +-------------- + +Register this package with the compiler, i.e. make the modules it +contains available to programs. This only makes sense for library +packages. Note that the ``install`` command incorporates this action. +The main use of this separate command is in the post-installation step +for a binary package. + +This command takes the following options: + +.. program:: setup register + +.. option:: --global + + Register this package in the system-wide database. (This is the + default.) + +.. option:: --user + + Register this package in the user's local package database. + +.. option:: --gen-script + + Instead of registering the package, generate a script containing + commands to perform the registration. On Unix, this file is called + ``register.sh``, on Windows, ``register.bat``. This script might be + included in a binary bundle, to be run after the bundle is unpacked + on the target system. + +.. option:: --gen-pkg-config[=path] + + Instead of registering the package, generate a package registration + file (or directory, in some circumstances). This only applies to + compilers that support package registration files which at the + moment is only GHC. The file should be used with the compiler's + mechanism for registering packages. This option is mainly intended + for packaging systems. If possible use the :option:`--gen-script` option + instead since it is more portable across Haskell implementations. + The *path* is optional and can be used to specify a particular + output file to generate. Otherwise, by default the file is the + package name and version with a ``.conf`` extension. + + This option outputs a directory if the package requires multiple + registrations: this can occur if internal/convenience libraries are + used. These configuration file names are sorted so that they can be + registered in order. + +.. option:: --inplace + + Registers the package for use directly from the build tree, without + needing to install it. This can be useful for testing: there's no + need to install the package after modifying it, just recompile and + test. + + This flag does not create a build-tree-local package database. It + still registers the package in one of the user or global databases. + + However, there are some caveats. It only works with GHC (currently). + It only works if your package doesn't depend on having any + supplemental files installed --- plain Haskell libraries should be + fine. + +.. _setup-unregister: + +setup unregister +---------------- + +.. program:: setup unregister + +Deregister this package with the compiler. + +This command takes the following options: + +.. option:: --global + + Deregister this package in the system-wide database. (This is the + default.) + +.. option:: --user + + Deregister this package in the user's local package database. + +.. option:: --gen-script + + Instead of deregistering the package, generate a script containing + commands to perform the deregistration. On Unix, this file is called + ``unregister.sh``, on Windows, ``unregister.bat``. This script might + be included in a binary bundle, to be run on the target system. + +.. _setup-clean: + +setup clean +----------- + +Remove any local files created during the ``configure``, ``build``, +``haddock``, ``register`` or ``unregister`` steps, and also any files +and directories listed in the :pkg-field:`extra-tmp-files` field. + +This command takes the following options: + +.. program:: setup clean + +.. option:: --save-configure, -s + + Keeps the configuration information so it is not necessary to run + the configure step again before building. + +setup test +---------- + +Run the test suites specified in the package description file. Aside +from the following flags, Cabal accepts the name of one or more test +suites on the command line after ``test``. When supplied, Cabal will run +only the named test suites, otherwise, Cabal will run all test suites in +the package. + +.. program:: setup test + +.. option:: --builddir=dir + + The directory where Cabal puts generated build files (default: + ``dist``). Test logs will be located in the ``test`` subdirectory. + +.. option:: --human-log=path + + The template used to name human-readable test logs; the path is + relative to ``dist/test``. By default, logs are named according to + the template ``$pkgid-$test-suite.log``, so that each test suite + will be logged to its own human-readable log file. Template + variables allowed are: ``$pkgid``, ``$compiler``, ``$os``, + ``$arch``, ``$abi``, ``$abitag``, ``$test-suite``, and ``$result``. + +.. option:: --machine-log=path + + The path to the machine-readable log, relative to ``dist/test``. The + default template is ``$pkgid.log``. Template variables allowed are: + ``$pkgid``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, ``$abitag`` + and ``$result``. + +.. option:: --show-details=filter + + Determines if the results of individual test cases are shown on the + terminal. May be ``always`` (always show), ``never`` (never show), + ``failures`` (show only failed results), or ``streaming`` (show all + results in real time). + +.. option:: --test-options=options + Give extra options to the test executables. + +.. option:: --test-option=option + + give an extra option to the test executables. There is no need to + quote options containing spaces because a single option is assumed, + so options will not be split on spaces. + +.. _setup-sdist: + +setup sdist +----------- + +Create a system- and compiler-independent source distribution in a file +*package*-*version*\ ``.tar.gz`` in the ``dist`` subdirectory, for +distribution to package builders. When unpacked, the commands listed in +this section will be available. + +The files placed in this distribution are the package description file, +the setup script, the sources of the modules named in the package +description file, and files named in the ``license-file``, ``main-is``, +``c-sources``, ``asm-sources``, ``cmm-sources``, ``js-sources``, +``data-files``, ``extra-source-files`` and ``extra-doc-files`` fields. + +This command takes the following option: + +.. program:: setup sdist + +.. option:: --snapshot + + Append today's date (in "YYYYMMDD" format) to the version number for + the generated source package. The original package is unaffected. + + +.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/intro.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/intro.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/intro.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/intro.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,200 @@ +.. highlight:: console + +Cabal is the standard package system for +Haskell_ software. It helps people to +configure, build and install Haskell software and to distribute it +easily to other users and developers. + +There is a command line tool called ``cabal`` for working with Cabal +packages. It helps with installing existing packages and also helps +people developing their own packages. It can be used to work with local +packages or to install packages from online package archives, including +automatically installing dependencies. By default it is configured to +use Hackage_ which is Haskell's central +package archive that contains thousands of libraries and applications in +the Cabal package format. + +Introduction +============ + +Cabal is a package system for Haskell software. The point of a package +system is to enable software developers and users to easily distribute, +use and reuse software. A package system makes it easier for developers +to get their software into the hands of users. Equally importantly, it +makes it easier for software developers to be able to reuse software +components written by other developers. + +Packaging systems deal with packages and with Cabal we call them *Cabal +packages*. The Cabal package is the unit of distribution. Every Cabal +package has a name and a version number which are used to identify the +package, e.g. ``filepath-1.0``. + +Cabal packages can depend on other Cabal packages. There are tools to +enable automated package management. This means it is possible for +developers and users to install a package plus all of the other Cabal +packages that it depends on. It also means that it is practical to make +very modular systems using lots of packages that reuse code written by +many developers. + +Cabal packages are source based and are typically (but not necessarily) +portable to many platforms and Haskell implementations. The Cabal +package format is designed to make it possible to translate into other +formats, including binary packages for various systems. + +When distributed, Cabal packages use the standard compressed tarball +format, with the file extension ``.tar.gz``, e.g. +``filepath-1.0.tar.gz``. + +Note that packages are not part of the Haskell language, rather they are +a feature provided by the combination of Cabal and GHC (and several +other Haskell implementations). + +A tool for working with packages +-------------------------------- + +There is a command line tool, called "``cabal``", that users and +developers can use to build and install Cabal packages. It can be used +for both local packages and for packages available remotely over the +network. It can automatically install Cabal packages plus any other +Cabal packages they depend on. + +Developers can use the tool with packages in local directories, e.g. + +:: + + $ cd foo/ + $ cabal install + +While working on a package in a local directory, developers can run the +individual steps to configure and build, and also generate documentation +and run test suites and benchmarks. + +It is also possible to install several local packages at once, e.g. + +:: + + $ cabal install foo/ bar/ + +Developers and users can use the tool to install packages from remote +Cabal package archives. By default, the ``cabal`` tool is configured to +use the central Haskell package archive called +Hackage_ but it is possible to use it +with any other suitable archive. + +:: + + $ cabal install xmonad + +This will install the ``xmonad`` package plus all of its dependencies. + +In addition to packages that have been published in an archive, +developers can install packages from local or remote tarball files, for +example + +:: + + $ cabal install foo-1.0.tar.gz + $ cabal install http://example.com/foo-1.0.tar.gz + +Cabal provides a number of ways for a user to customise how and where a +package is installed. They can decide where a package will be installed, +which Haskell implementation to use and whether to build optimised code +or build with the ability to profile code. It is not expected that users +will have to modify any of the information in the ``.cabal`` file. + +For full details, see the section on `building and installing +packages `__. + +Note that ``cabal`` is not the only tool for working with Cabal +packages. Due to the standardised format and a library for reading +``.cabal`` files, there are several other special-purpose tools. + +What's in a package +------------------- + +A Cabal package consists of: + +- Haskell software, including libraries, executables and tests +- metadata about the package in a standard human and machine readable + format (the "``.cabal``" file) +- a standard interface to build the package (the "``Setup.hs``" file) + +The ``.cabal`` file contains information about the package, supplied by +the package author. In particular it lists the other Cabal packages that +the package depends on. + +For full details on what goes in the ``.cabal`` and ``Setup.hs`` files, +and for all the other features provided by the build system, see the +section on `developing packages `__. + +Cabal featureset +---------------- + +Cabal and its associated tools and websites covers: + +- a software build system +- software configuration +- packaging for distribution +- automated package management + + - natively using the ``cabal`` command line tool; or + - by translation into native package formats such as RPM or deb + +- web and local Cabal package archives + + - central Hackage website with 1000's of Cabal packages + +Some parts of the system can be used without others. In particular the +built-in build system for simple packages is optional: it is possible to +use custom build systems. + +Similar systems +--------------- + +The Cabal system is roughly comparable with the system of Python Eggs, +Ruby Gems or Perl distributions. Each system has a notion of +distributable packages, and has tools to manage the process of +distributing and installing packages. + +Hackage is an online archive of Cabal packages. It is roughly comparable +to CPAN but with rather fewer packages (around 5,000 vs 28,000). + +Cabal is often compared with autoconf and automake and there is some +overlap in functionality. The most obvious similarity is that the +command line interface for actually configuring and building packages +follows the same steps and has many of the same configuration +parameters. + +:: + + $ ./configure --prefix=... + $ make + $ make install + +compared to + +:: + + $ cabal configure --prefix=... + $ cabal build + $ cabal install + +Cabal's build system for simple packages is considerably less flexible +than make/automake, but has builtin knowledge of how to build Haskell +code and requires very little manual configuration. Cabal's simple build +system is also portable to Windows, without needing a Unix-like +environment such as cygwin/mingwin. + +Compared to autoconf, Cabal takes a somewhat different approach to +package configuration. Cabal's approach is designed for automated +package management. Instead of having a configure script that tests for +whether dependencies are available, Cabal packages specify their +dependencies. There is some scope for optional and conditional +dependencies. By having package authors specify dependencies it makes it +possible for tools to install a package and all of its dependencies +automatically. It also makes it possible to translate (in a +mostly-automatically way) into another package format like RPM or deb +which also have automatic dependency resolution. + + +.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/misc.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/misc.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/misc.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/misc.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,103 @@ +Reporting bugs and deficiencies +=============================== + +Please report any flaws or feature requests in the `bug +tracker `__. + +For general discussion or queries email the libraries mailing list +libraries@haskell.org. There is also a development mailing list +cabal-devel@haskell.org. + +Stability of Cabal interfaces +============================= + +The Cabal library and related infrastructure is still under active +development. New features are being added and limitations and bugs are +being fixed. This requires internal changes and often user visible +changes as well. We therefore cannot promise complete future-proof +stability, at least not without halting all development work. + +This section documents the aspects of the Cabal interface that we can +promise to keep stable and which bits are subject to change. + +Cabal file format +----------------- + +This is backwards compatible and mostly forwards compatible. New fields +can be added without breaking older versions of Cabal. Fields can be +deprecated without breaking older packages. + +Command-line interface +---------------------- + +Very Stable Command-line interfaces +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- ``./setup configure`` +- ``--prefix`` +- ``--user`` +- ``--ghc``, ``--uhc`` +- ``--verbose`` +- ``--prefix`` + +- ``./setup build`` +- ``./setup install`` +- ``./setup register`` +- ``./setup copy`` + +Stable Command-line interfaces +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Unstable command-line +~~~~~~~~~~~~~~~~~~~~~ + +Functions and Types +------------------- + +The Cabal library follows the `Package Versioning Policy`_. +This means that within a stable major release, for example 1.2.x, there +will be no incompatible API changes. But minor versions increments, for +example 1.2.3, indicate compatible API additions. + +The Package Versioning Policy does not require any API guarantees +between major releases, for example between 1.2.x and 1.4.x. In practise +of course not everything changes between major releases. Some parts of +the API are more prone to change than others. The rest of this section +gives some informal advice on what level of API stability you can expect +between major releases. + +Very Stable API +~~~~~~~~~~~~~~~ + +- ``defaultMain`` + +- ``defaultMainWithHooks defaultUserHooks`` + +But regular ``defaultMainWithHooks`` isn't stable since ``UserHooks`` +changes. + +Semi-stable API +~~~~~~~~~~~~~~~ + +- ``UserHooks`` The hooks API will change in the future + +- ``Distribution.*`` is mostly declarative information about packages + and is somewhat stable. + +Unstable API +~~~~~~~~~~~~ + +Everything under ``Distribution.Simple.*`` has no stability guarantee. + +Hackage +------- + +The index format is a partly stable interface. It consists of a tar.gz +file that contains directories with ``.cabal`` files in. In future it +may contain more kinds of files so do not assume every file is a +``.cabal`` file. Incompatible revisions to the format would involve +bumping the name of the index file, i.e., ``00-index.tar.gz``, +``01-index.tar.gz`` etc. + + +.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/nix-local-build-overview.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/nix-local-build-overview.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/nix-local-build-overview.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/nix-local-build-overview.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,43 @@ +Nix-style Local Builds +====================== + +Nix-style local builds are a new build system implementation inspired by Nix. +The Nix-style local build system is commonly called "new-build" for short +after the ``cabal new-*`` family of commands that control it. However, those +names are only temporary until Nix-style local builds become the default. +This is expected to happen soon. For those who do not wish to use the new +functionality, the classic project style will not be removed immediately, +but these legacy commands will require the usage of the ``v1-`` prefix as of +Cabal 3.0 and will be removed in a future release. For a future-proof +way to use these commands in a script or tutorial that anticipates the +possibility of another UI paradigm being devised in the future, there +are also ``v2-`` prefixed versions that will reference the same functionality +until such a point as it is completely removed from Cabal. + +Nix-style local builds combine the best of non-sandboxed and sandboxed Cabal: + +1. Like sandboxed Cabal today, we build sets of independent local + packages deterministically and independent of any global state. + new-build will never tell you that it can't build your package + because it would result in a "dangerous reinstall." Given a + particular state of the Hackage index, your build is completely + reproducible. For example, you no longer need to compile packages + with profiling ahead of time; just request profiling and new-build + will rebuild all its dependencies with profiling automatically. + +2. Like non-sandboxed Cabal today, builds of external packages are + cached in ``~/.cabal/store``, so that a package can be built once, + and then reused anywhere else it is also used. No need to continually + rebuild dependencies whenever you make a new sandbox: dependencies + which can be shared, are shared. + +Nix-style local builds were first released as beta in cabal-install 1.24. +They currently work with all versions of GHC supported by that release: GHC 7.0 and later. + +Some features described in this manual are not implemented. If you need +them, please give us a shout and we'll prioritize accordingly. + + + +.. toctree:: + nix-local-build diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/nix-local-build.rst cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/nix-local-build.rst --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/nix-local-build.rst 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/nix-local-build.rst 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2069 @@ +.. highlight:: console + +Quickstart +========== + +Suppose that you are in a directory containing a single Cabal package +which you wish to build. You can configure and build it using Nix-style +local builds with this command (configuring is not necessary): + +:: + + $ cabal new-build + +To open a GHCi shell with this package, use this command: + +:: + + $ cabal new-repl + +To run an executable defined in this package, use this command: + +:: + + $ cabal new-run [executable args] + +Developing multiple packages +---------------------------- + +Many Cabal projects involve multiple packages which need to be built +together. To build multiple Cabal packages, you need to first create a +``cabal.project`` file which declares where all the local package +directories live. For example, in the Cabal repository, there is a root +directory with a folder per package, e.g., the folders ``Cabal`` and +``cabal-install``. The ``cabal.project`` file specifies each folder as +part of the project: + +.. code-block:: cabal + + packages: Cabal/ + cabal-install/ + +The expectation is that a ``cabal.project`` is checked into your source +control, to be used by all developers of a project. If you need to make +local changes, they can be placed in ``cabal.project.local`` (which +should not be checked in.) + +Then, to build every component of every package, from the top-level +directory, run the command: (using cabal-install-2.0 or greater.) + +:: + + $ cabal new-build + +To build a specific package, you can either run ``new-build`` from the +directory of the package in question: + +:: + + $ cd cabal-install + $ cabal new-build + +or you can pass the name of the package as an argument to +``cabal new-build`` (this works in any subdirectory of the project): + +:: + + $ cabal new-build cabal-install + +You can also specify a specific component of the package to build. For +example, to build a test suite named ``package-tests``, use the command: + +:: + + $ cabal new-build package-tests + +Targets can be qualified with package names. So to request +``package-tests`` *from* the ``Cabal`` package, use +``Cabal:package-tests``. + +Unlike sandboxes, there is no need to setup a sandbox or ``add-source`` +projects; just check in ``cabal.project`` to your repository and +``new-build`` will just work. + +Cookbook +======== + +How can I profile my library/application? +----------------------------------------- + +Create or edit your ``cabal.project.local``, adding the following +line:: + + profiling: True + +Now, ``cabal new-build`` will automatically build all libraries and +executables with profiling. You can fine-tune the profiling settings +for each package using :cfg-field:`profiling-detail`:: + + package p + profiling-detail: toplevel-functions + +Alternately, you can call ``cabal new-build --enable-profiling`` to +temporarily build with profiling. + +How it works +============ + +Local versus external packages +------------------------------ + +One of the primary innovations of Nix-style local builds is the +distinction between local packages, which users edit and recompile and +must be built per-project, versus external packages, which can be cached +across projects. To be more precise: + +1. A **local package** is one that is listed explicitly in the + ``packages``, ``optional-packages`` or ``extra-packages`` field of a + project. Usually, these refer to packages whose source code lives + directly in a folder in your project (although, you can list an + arbitrary Hackage package in ``extra-packages`` to force it to be + treated as local). + +Local packages, as well as the external packages (below) which depend on +them, are built **inplace**, meaning that they are always built +specifically for the project and are not installed globally. Inplace +packages are not cached and not given unique hashes, which makes them +suitable for packages which you want to edit and recompile. + +2. An **external package** is any package which is not listed in the + ``packages`` field. The source code for external packages is usually + retrieved from Hackage. + +When an external package does not depend on an inplace package, it can +be built and installed to a **global** store, which can be shared across +projects. These build products are identified by a hash that over all of +the inputs which would influence the compilation of a package (flags, +dependency selection, etc.). Just as in Nix, these hashes uniquely +identify the result of a build; if we compute this identifier and we +find that we already have this ID built, we can just use the already +built version. + +The global package store is ``~/.cabal/store`` (configurable via +global `store-dir` option); if you need to clear your store for +whatever reason (e.g., to reclaim disk space or because the global +store is corrupted), deleting this directory is safe (``new-build`` +will just rebuild everything it needs on its next invocation). + +This split motivates some of the UI choices for Nix-style local build +commands. For example, flags passed to ``cabal new-build`` are only +applied to *local* packages, so that adding a flag to +``cabal new-build`` doesn't necessitate a rebuild of *every* transitive +dependency in the global package store. + +In cabal-install 2.0 and above, Nix-style local builds also take advantage of a +new Cabal library feature, `per-component +builds `__, +where each component of a package is configured and built separately. +This can massively speed up rebuilds of packages with lots of components +(e.g., a package that defines multiple executables), as only one +executable needs to be rebuilt. Packages that use Custom setup scripts +are not currently built on a per-component basis. + +Where are my build products? +---------------------------- + +A major deficiency in the current implementation of new-build is that +there is no programmatic way to access the location of build products. +The location of the build products is intended to be an internal +implementation detail of new-build, but we also understand that many +unimplemented features can only be reasonably worked around by +accessing build products directly. + +The location where build products can be found varies depending on the +version of cabal-install: + +- In cabal-install-1.24, the dist directory for a package ``p-0.1`` is + stored in ``dist-newstyle/build/p-0.1``. For example, if you built an + executable or test suite named ``pexe``, it would be located at + ``dist-newstyle/build/p-0.1/build/pexe/pexe``. + +- In cabal-install-2.0, the dist directory for a package ``p-0.1`` + defining a library built with GHC 8.0.1 on 64-bit Linux is + ``dist-newstyle/build/x86_64-linux/ghc-8.0.1/p-0.1``. When + per-component builds are enabled (any non-Custom package), a + subcomponent like an executable or test suite named ``pexe`` will be + stored at + ``dist-newstyle/build/x86_64-linux/ghc-8.0.1/p-0.1/c/pexe``; thus, + the full path of the executable is + ``dist-newstyle/build/x86_64-linux/ghc-8.0.1/p-0.1/c/pexe/build/pexe/pexe`` + (you can see why we want this to be an implementation detail!) + +- In cabal-install-2.2 and above, the ``/c/`` part of the above path + is replaced with one of ``/l/``, ``/x/``, ``/f/``, ``/t/``, or + ``/b/``, depending on the type of component (sublibrary, + executable, foreign library, test suite, or benchmark + respectively). So the full path to an executable named ``pexe`` + compiled with GHC 8.0.1 on a 64-bit Linux is now + ``dist-newstyle/build/x86_64-linux/ghc-8.0.1/p-0.1/x/pexe/build/pexe/pexe``; + for a benchmark named ``pbench`` it now is + ``dist-newstyle/build/x86_64-linux/ghc-8.0.1/p-0.1/b/pbench/build/pbench/pbench``; + + +The paths are a bit longer in 2.0 and above but the benefit is that you can +transparently have multiple builds with different versions of GHC. We +plan to add the ability to create aliases for certain build +configurations, and more convenient paths to access particularly useful +build products like executables. + +Caching +------- + +Nix-style local builds sport a robust caching system which help reduce +the time it takes to execute a rebuild cycle. While the details of how +``cabal-install`` does caching are an implementation detail and may +change in the future, knowing what gets cached is helpful for +understanding the performance characteristics of invocations to +``new-build``. The cached intermediate results are stored in +``dist-newstyle/cache``; this folder can be safely deleted to clear the +cache. + +The following intermediate results are cached in the following files in +this folder (the most important two are first): + +``solver-plan`` (binary) + The result of calling the dependency solver, assuming that the + Hackage index, local ``cabal.project`` file, and local ``cabal`` + files are unmodified. (Notably, we do NOT have to dependency solve + again if new build products are stored in the global store; the + invocation of the dependency solver is independent of what is + already available in the store.) +``source-hashes`` (binary) + The hashes of all local source files. When all local source files of + a local package are unchanged, ``cabal new-build`` will skip + invoking ``setup build`` entirely (saving us from a possibly + expensive call to ``ghc --make``). The full list of source files + participating in compilation are determined using + ``setup sdist --list-sources`` (thus, if you do not list all your + source files in a Cabal file, you may fail to recompile when you + edit them.) +``config`` (same format as ``cabal.project``) + The full project configuration, merged from ``cabal.project`` (and + friends) as well as the command line arguments. +``compiler`` (binary) + The configuration of the compiler being used to build the project. +``improved-plan`` (binary) + Like ``solver-plan``, but with all non-inplace packages improved + into pre-existing copies from the store. +``plan.json`` (JSON) + A JSON serialization of the computed install plan intended + for integrating ``cabal`` with external tooling. + The `cabal-plan `__ + package provides a library for parsing ``plan.json`` files into a + Haskell data structure as well as an example tool showing possible + applications. + + .. todo:: + + Document JSON schema (including version history of schema) + + +Note that every package also has a local cache managed by the Cabal +build system, e.g., in ``$distdir/cache``. + +There is another useful file in ``dist-newstyle/cache``, +``plan.json``, which is a JSON serialization of the computed install +plan and is intended for integrating with external tooling. + + + + +Commands +======== + +We now give an in-depth description of all the commands, describing the +arguments and flags they accept. + +cabal new-configure +------------------- + +``cabal new-configure`` takes a set of arguments and writes a +``cabal.project.local`` file based on the flags passed to this command. +``cabal new-configure FLAGS; cabal new-build`` is roughly equivalent to +``cabal new-build FLAGS``, except that with ``new-configure`` the flags +are persisted to all subsequent calls to ``new-build``. + +``cabal new-configure`` is intended to be a convenient way to write out +a ``cabal.project.local`` for simple configurations; e.g., +``cabal new-configure -w ghc-7.8`` would ensure that all subsequent +builds with ``cabal new-build`` are performed with the compiler +``ghc-7.8``. For more complex configuration, we recommend writing the +``cabal.project.local`` file directly (or placing it in +``cabal.project``!) + +``cabal new-configure`` inherits options from ``Cabal``. semantics: + +- Any flag accepted by ``./Setup configure``. + +- Any flag accepted by ``cabal configure`` beyond + ``./Setup configure``, namely ``--cabal-lib-version``, + ``--constraint``, ``--preference`` and ``--solver.`` + +- Any flag accepted by ``cabal install`` beyond ``./Setup configure``. + +- Any flag accepted by ``./Setup haddock``. + +The options of all of these flags apply only to *local* packages in a +project; this behavior is different than that of ``cabal install``, +which applies flags to every package that would be built. The motivation +for this is to avoid an innocuous addition to the flags of a package +resulting in a rebuild of every package in the store (which might need +to happen if a flag actually applied to every transitive dependency). To +apply options to an external package, use a ``package`` stanza in a +``cabal.project`` file. + +cabal new-update +---------------- + +``cabal new-update`` updates the state of the package index. If the +project contains multiple remote package repositories it will update +the index of all of them (e.g. when using overlays). + +Seom examples: + +:: + + $ cabal new-update # update all remote repos + $ cabal new-update head.hackage # update only head.hackage + +cabal new-build +--------------- + +``cabal new-build`` takes a set of targets and builds them. It +automatically handles building and installing any dependencies of these +targets. + +A target can take any of the following forms: + +- A package target: ``package``, which specifies that all enabled + components of a package to be built. By default, test suites and + benchmarks are *not* enabled, unless they are explicitly requested + (e.g., via ``--enable-tests``.) + +- A component target: ``[package:][ctype:]component``, which specifies + a specific component (e.g., a library, executable, test suite or + benchmark) to be built. + +- All packages: ``all``, which specifies all packages within the project. + +- Components of a particular type: ``package:ctypes``, ``all:ctypes``: + which specifies all components of the given type. Where valid + ``ctypes`` are: + - ``libs``, ``libraries``, + - ``flibs``, ``foreign-libraries``, + - ``exes``, ``executables``, + - ``tests``, + - ``benches``, ``benchmarks``. + +In component targets, ``package:`` and ``ctype:`` (valid component types +are ``lib``, ``flib``, ``exe``, ``test`` and ``bench``) can be used to +disambiguate when multiple packages define the same component, or the +same component name is used in a package (e.g., a package ``foo`` +defines both an executable and library named ``foo``). We always prefer +interpreting a target as a package name rather than as a component name. + +Some example targets: + +:: + + $ cabal new-build lib:foo-pkg # build the library named foo-pkg + $ cabal new-build foo-pkg:foo-tests # build foo-tests in foo-pkg + +(There is also syntax for specifying module and file targets, but it +doesn't currently do anything.) + +Beyond a list of targets, ``cabal new-build`` accepts all the flags that +``cabal new-configure`` takes. Most of these flags are only taken into +consideration when building local packages; however, some flags may +cause extra store packages to be built (for example, +``--enable-profiling`` will automatically make sure profiling libraries +for all transitive dependencies are built and installed.) + +In addition ``cabal new-build`` accepts these flags: + +- ``--only-configure``: When given we will forgoe performing a full build and + abort after running the configure phase of each target package. + + +cabal new-repl +-------------- + +``cabal new-repl TARGET`` loads all of the modules of the target into +GHCi as interpreted bytecode. In addition to ``cabal new-build``'s flags, +it takes an additional ``--repl-options`` flag. + +To avoid ``ghci`` specific flags from triggering unneeded global rebuilds these +flags are now stripped from the internal configuration. As a result +``--ghc-options`` will no longer (reliably) work to pass flags to ``ghci`` (or +other repls). Instead, you should use the new ``--repl-options`` flag to +specify these options to the invoked repl. (This flag also works on ``cabal +repl`` and ``Setup repl`` on sufficiently new versions of Cabal.) + +Currently, it is not supported to pass multiple targets to ``new-repl`` +(``new-repl`` will just successively open a separate GHCi session for +each target.) + +It also provides a way to experiment with libraries without needing to download +them manually or to install them globally. + +This command opens a REPL with the current default target loaded, and a version +of the ``vector`` package matching that specification exposed. + +:: + + $ cabal new-repl --build-depends "vector >= 0.12 && < 0.13" + +Both of these commands do the same thing as the above, but only exposes ``base``, +``vector``, and the``vector`` package's transitive dependencies even if the user +is in a project context. + +:: + + $ cabal new-repl --ignore-project --build-depends "vector >= 0.12 && < 0.13" + $ cabal new-repl --project='' --build-depends "vector >= 0.12 && < 0.13" + +This command would add ``vector``, but not (for example) ``primitive``, because +it only includes the packages specified on the command line (and ``base``, which +cannot be excluded for technical reasons). + +:: + + $ cabal new-repl --build-depends vector --no-transitive-deps + +cabal new-run +------------- + +``cabal new-run [TARGET [ARGS]]`` runs the executable specified by the +target, which can be a component, a package or can be left blank, as +long as it can uniquely identify an executable within the project. +Tests and benchmarks are also treated as executables. + +See `the new-build section <#cabal-new-build>`__ for the target syntax. + +Except in the case of the empty target, the strings after it will be +passed to the executable as arguments. + +If one of the arguments starts with ``-`` it will be interpreted as +a cabal flag, so if you need to pass flags to the executable you +have to separate them with ``--``. + +:: + + $ cabal new-run target -- -a -bcd --argument + +'new-run' also supports running script files that use a certain format. With +a script that looks like: + +:: + + #!/usr/bin/env cabal + {- cabal: + build-depends: base ^>= 4.11 + , shelly ^>= 1.8.1 + -} + + main :: IO () + main = do + ... + +It can either be executed like any other script, using ``cabal`` as an +interpreter, or through this command: + +:: + + $ cabal new-run script.hs + $ cabal new-run script.hs -- --arg1 # args are passed like this + +cabal new-freeze +---------------- + +``cabal new-freeze`` writes out a **freeze file** which records all of +the versions and flags which that are picked by the solver under the +current index and flags. Default name of this file is +``cabal.project.freeze`` but in combination with a +``--project-file=my.project`` flag (see :ref:`project-file +`) +the name will be ``my.project.freeze``. +A freeze file has the same syntax as ``cabal.project`` and looks +something like this: + +.. highlight:: cabal + +:: + + constraints: HTTP ==4000.3.3, + HTTP +warp-tests -warn-as-error -network23 +network-uri -mtl1 -conduit10, + QuickCheck ==2.9.1, + QuickCheck +templatehaskell, + -- etc... + + +For end-user executables, it is recommended that you distribute the +``cabal.project.freeze`` file in your source repository so that all +users see a consistent set of dependencies. For libraries, this is not +recommended: users often need to build against different versions of +libraries than what you developed against. + +cabal new-bench +--------------- + +``cabal new-bench [TARGETS] [OPTIONS]`` runs the specified benchmarks +(all the benchmarks in the current package by default), first ensuring +they are up to date. + +cabal new-test +-------------- + +``cabal new-test [TARGETS] [OPTIONS]`` runs the specified test suites +(all the test suites in the current package by default), first ensuring +they are up to date. + +cabal new-haddock +----------------- + +``cabal new-haddock [FLAGS] [TARGET]`` builds Haddock documentation for +the specified packages within the project. + +If a target is not a library :cfg-field:`haddock-benchmarks`, +:cfg-field:`haddock-executables`, :cfg-field:`haddock-internal`, +:cfg-field:`haddock-tests` will be implied as necessary. + +cabal new-exec +--------------- + +``cabal new-exec [FLAGS] [--] COMMAND [--] [ARGS]`` runs the specified command +using the project's environment. That is, passing the right flags to compiler +invocations and bringing the project's executables into scope. + +cabal new-install +----------------- + +``cabal new-install [FLAGS] PACKAGES`` builds the specified packages and +symlinks their executables in ``symlink-bindir`` (usually ``~/.cabal/bin``). + +For example this command will build the latest ``cabal-install`` and symlink +its ``cabal`` executable: + +:: + + $ cabal new-install cabal-install + +In addition, it's possible to use ``cabal new-install`` to install components +of a local project. For example, with an up-to-date Git clone of the Cabal +repository, this command will build cabal-install HEAD and symlink the +``cabal`` executable: + +:: + + $ cabal new-install exe:cabal + +It is also possible to "install" libraries using the ``--lib`` flag. For +example, this command will build the latest Cabal library and install it: + +:: + + $ cabal new-install --lib Cabal + +This works by managing GHC environments. By default, it is writing to the +global environment in ``~/.ghc/$ARCH-$OS-$GHCVER/environments/default``. +``new-install`` provides the ``--package-env`` flag to control which of +these environments is modified. + +This command will modify the environment file in the current directory: + +:: + + $ cabal new-install --lib Cabal --package-env . + +This command will modify the enviroment file in the ``~/foo`` directory: + +:: + + $ cabal new-install --lib Cabal --package-env foo/ + +Do note that the results of the previous two commands will be overwritten by +the use of other new-style commands, so it is not reccomended to use them inside +a project directory. + +This command will modify the environment in the "local.env" file in the +current directory: + +:: + + $ cabal new-install --lib Cabal --package-env local.env + +This command will modify the ``myenv`` named global environment: + +:: + + $ cabal new-install --lib Cabal --package-env myenv + +If you wish to create a named environment file in the current directory where +the name does not contain an extension, you must reference it as ``./myenv``. + +You can learn more about how to use these environments in `this section of the +GHC manual `_. + +cabal new-clean +--------------- + +``cabal new-clean [FLAGS]`` cleans up the temporary files and build artifacts +stored in the ``dist-newstyle`` folder. + +By default, it removes the entire folder, but it can also spare the configuration +and caches if the ``--save-config`` option is given, in which case it only removes +the build artefacts (``.hi``, ``.o`` along with any other temporary files generated +by the compiler, along with the build output). + +cabal new-sdist +--------------- + +``cabal new-sdist [FLAGS] [TARGETS]`` takes the crucial files needed to build ``TARGETS`` +and puts them into an archive format ready for upload to Hackage. These archives are stable +and two archives of the same format built from the same source will hash to the same value. + +``cabal new-sdist`` takes the following flags: + +- ``-l``, ``--list-only``: Rather than creating an archive, lists files that would be included. + Output is to ``stdout`` by default. The file paths are relative to the project's root + directory. + +- ``--targz``: Output an archive in ``.tar.gz`` format. + +- ``--zip``: Output an archive in ``.zip`` format. + +- ``-o``, ``--output-dir``: Sets the output dir, if a non-default one is desired. The default is + ``dist-newstyle/sdist/``. ``--output-dir -`` will send output to ``stdout`` + unless multiple archives are being created. + +- ``-z``, ``--null``: Only used with ``--list-only``. Separates filenames with a NUL + byte instead of newlines. + +``new-sdist`` is inherently incompatible with sdist hooks, not due to implementation but due +to fundamental core invariants (same source code should result in the same tarball, byte for +byte) that must be satisfied for it to function correctly in the larger new-build ecosystem. +``autogen-modules`` is able to replace uses of the hooks to add generated modules, along with +the custom publishing of Haddock documentation to Hackage. + +Configuring builds with cabal.project +===================================== + +``cabal.project`` files support a variety of options which configure the +details of your build. The general syntax of a ``cabal.project`` file is +similar to that of a Cabal file: there are a number of fields, some of +which live inside stanzas: + +:: + + packages: */*.cabal + with-compiler: /opt/ghc/8.0.1/bin/ghc + + package cryptohash + optimization: False + +In general, the accepted field names coincide with the accepted command +line flags that ``cabal install`` and other commands take. For example, +``cabal new-configure --enable-profiling`` will write out a project +file with ``profiling: True``. + +The full configuration of a project is determined by combining the +following sources (later entries override earlier ones): + +1. ``~/.cabal/config`` (the user-wide global configuration) + +2. ``cabal.project`` (the project configuratoin) + +3. ``cabal.project.freeze`` (the output of ``cabal new-freeze``) + +4. ``cabal.project.local`` (the output of ``cabal new-configure``) + + +Specifying the local packages +----------------------------- + +The following top-level options specify what the local packages of a +project are: + +.. cfg-field:: packages: package location list (space or comma separated) + :synopsis: Project packages. + + :default: ``./*.cabal`` + + Specifies the list of package locations which contain the local + packages to be built by this project. Package locations can take the + following forms: + + 1. They can specify a Cabal file, or a directory containing a Cabal + file, e.g., ``packages: Cabal cabal-install/cabal-install.cabal``. + + 2. They can specify a glob-style wildcards, which must match one or + more (a) directories containing a (single) Cabal file, (b) Cabal + files (extension ``.cabal``), or (c) tarballs which contain Cabal + packages (extension ``.tar.gz``). + For example, to match all Cabal files in all + subdirectories, as well as the Cabal projects in the parent + directories ``foo`` and ``bar``, use + ``packages: */*.cabal ../{foo,bar}/`` + + 3. They can specify an ``http``, ``https`` or ``file`` + URL, representing the path to a remote tarball to be downloaded + and built. + + There is no command line variant of this field; see :issue:`3585`. + +.. cfg-field:: optional-packages: package location list (space or comma-separated) + :synopsis: Optional project packages. + + :default: ``./*/*.cabal`` + + Like :cfg-field:`packages`, specifies a list of package locations + containing local packages to be built. Unlike :cfg-field:`packages`, + if we glob for a package, it is permissible for the glob to match against + zero packages. The intended use-case for :cfg-field:`optional-packages` + is to make it so that vendored packages can be automatically picked up if + they are placed in a subdirectory, but not error if there aren't any. + + There is no command line variant of this field. + +.. cfg-field:: extra-packages: package list with version bounds (comma separated) + :synopsis: Adds external pacakges as local + + [STRIKEOUT:Specifies a list of external packages from Hackage which + should be considered local packages.] (Not implemented) + + There is no command line variant of this field. + +[STRIKEOUT:There is also a stanza ``source-repository-package`` for +specifying packages from an external version control.] (Not +implemented.) + +All local packages are *vendored*, in the sense that if other packages +(including external ones from Hackage) depend on a package with the name +of a local package, the local package is preferentially used. This +motivates the default settings:: + + packages: ./*.cabal + optional-packages: ./*/*.cabal + +...any package can be vendored simply by making a checkout in the +top-level project directory, as might be seen in this hypothetical +directory layout:: + + foo.cabal + foo-helper/ # local package + unix/ # vendored external package + +All of these options support globs. ``cabal new-build`` has its own glob +format: + +- Anywhere in a path, as many times as you like, you can specify an + asterisk ``*`` wildcard. E.g., ``*/*.cabal`` matches all ``.cabal`` + files in all immediate subdirectories. Like in glob(7), asterisks do + not match hidden files unless there is an explicit period, e.g., + ``.*/foo.cabal`` will match ``.private/foo.cabal`` (but + ``*/foo.cabal`` will not). + +- You can use braces to specify specific directories; e.g., + ``{vendor,pkgs}/*.cabal`` matches all Cabal files in the ``vendor`` + and ``pkgs`` subdirectories. + +Formally, the format described by the following BNF: + +.. code-block:: abnf + + FilePathGlob ::= FilePathRoot FilePathGlobRel + FilePathRoot ::= {- empty -} # relative to cabal.project + | "/" # Unix root + | [a-zA-Z] ":" [/\\] # Windows root + | "~" # home directory + FilePathGlobRel ::= Glob "/" FilePathGlobRel # Unix directory + | Glob "\\" FilePathGlobRel # Windows directory + | Glob # file + | {- empty -} # trailing slash + Glob ::= GlobPiece * + GlobPiece ::= "*" # wildcard + | [^*{},/\\] * # literal string + | "\\" [*{},] # escaped reserved character + | "{" Glob "," ... "," Glob "}" # union (match any of these) + +Global configuration options +---------------------------- + +The following top-level configuration options are not specific to any +package, and thus apply globally: + +.. cfg-field:: verbose: nat + --verbose=n, -vn + :synopsis: Build verbosity level. + + :default: 1 + + Control the verbosity of ``cabal`` commands, valid values are from 0 + to 3. + + The command line variant of this field is ``--verbose=2``; a short + form ``-v2`` is also supported. + +.. cfg-field:: jobs: nat or $ncpus + --jobs=n, -jn, --jobs=$ncpus + :synopsis: Number of builds running in parallel. + + :default: 1 + + Run *nat* jobs simultaneously when building. If ``$ncpus`` is + specified, run the number of jobs equal to the number of CPUs. + Package building is often quite parallel, so turning on parallelism + can speed up build times quite a bit! + + The command line variant of this field is ``--jobs=2``; a short form + ``-j2`` is also supported; a bare ``--jobs`` or ``-j`` is equivalent + to ``--jobs=$ncpus``. + +.. cfg-field:: keep-going: boolean + --keep-going + :synopsis: Try to continue building on failure. + + :default: False + + If true, after a build failure, continue to build other unaffected + packages. + + The command line variant of this field is ``--keep-going``. + +.. option:: --builddir=DIR + + Specifies the name of the directory where build products for + build will be stored; defaults to ``dist-newstyle``. If a + relative name is specified, this directory is resolved relative + to the root of the project (i.e., where the ``cabal.project`` + file lives.) + + This option cannot be specified via a ``cabal.project`` file. + +.. _cmdoption-project-file: +.. option:: --project-file=FILE + + Specifies the name of the project file used to specify the + rest of the top-level configuration; defaults to ``cabal.project``. + This name not only specifies the name of the main project file, + but also the auxiliary project files ``cabal.project.freeze`` + and ``cabal.project.local``; for example, if you specify + ``--project-file=my.project``, then the other files that will + be probed are ``my.project.freeze`` and ``my.project.local``. + + If the specified project file is a relative path, we will + look for the file relative to the current working directory, + and then for the parent directory, until the project file is + found or we have hit the top of the user's home directory. + + This option cannot be specified via a ``cabal.project`` file. + +.. option:: --store-dir=DIR + + Specifies the name of the directory of the global package store. + +Solver configuration options +---------------------------- + +The following settings control the behavior of the dependency solver: + +.. cfg-field:: constraints: constraints list (comma separated) + --constraint="pkg >= 2.0" + :synopsis: Extra dependencies constraints. + + Add extra constraints to the version bounds, flag settings, + and other properties a solver can pick for a + package. For example: + + :: + + constraints: bar == 2.1 + + A package can be specified multiple times in ``constraints``, in + which case the specified constraints are intersected. This is + useful, since the syntax does not allow you to specify multiple + constraints at once. For example, to specify both version bounds and + flag assignments, you would write: + + :: + + constraints: bar == 2.1, + bar +foo -baz + + Valid constraints take the same form as for the `constraint + command line option + `__. + +.. cfg-field:: preferences: preference (comma separated) + --preference="pkg >= 2.0" + :synopsis: Prefered dependency versions. + + Like :cfg-field:`constraints`, but the solver will attempt to satisfy + these preferences on a best-effort basis. The resulting install is locally + optimal with respect to preferences; specifically, no single package + could be replaced with a more preferred version that still satisfies + the hard constraints. + + Operationally, preferences can cause the solver to attempt certain + version choices of a package before others, which can improve + dependency solver runtime. + + One way to use :cfg-field:`preferences` is to take a known working set of + constraints (e.g., via ``cabal new-freeze``) and record them as + preferences. In this case, the solver will first attempt to use this + configuration, and if this violates hard constraints, it will try to + find the minimal number of upgrades to satisfy the hard constraints + again. + + The command line variant of this field is + ``--preference="pkg >= 2.0"``; to specify multiple preferences, pass + the flag multiple times. + +.. cfg-field:: allow-newer: none, all or list of scoped package names (space or comma separated) + --allow-newer, --allow-newer=[none,all,[scope:][^]pkg] + :synopsis: Lift dependencies upper bound constaints. + + :default: ``none`` + + Allow the solver to pick an newer version of some packages than + would normally be permitted by than the :pkg-field:`build-depends` bounds + of packages in the install plan. This option may be useful if the + dependency solver cannot otherwise find a valid install plan. + + For example, to relax ``pkg``\ s :pkg-field:`build-depends` upper bound on + ``dep-pkg``, write a scoped package name of the form: + + :: + + allow-newer: pkg:dep-pkg + + If the scope shall be limited to specific releases of ``pkg``, the + extended form as in + + :: + + allow-newer: pkg-1.2.3:dep-pkg, pkg-1.1.2:dep-pkg + + can be used to limit the relaxation of dependencies on + ``dep-pkg`` by the ``pkg-1.2.3`` and ``pkg-1.1.2`` releases only. + + The scoped syntax is recommended, as it is often only a single package + whose upper bound is misbehaving. In this case, the upper bounds of + other packages should still be respected; indeed, relaxing the bound + can break some packages which test the selected version of packages. + + The syntax also allows to prefix the dependee package with a + modifier symbol to modify the scope/semantic of the relaxation + transformation in a additional ways. Currently only one modifier + symbol is defined, i.e. ``^`` (i.e. caret) which causes the + relaxation to be applied only to ``^>=`` operators and leave all other + version operators untouched. + + However, in some situations (e.g., when attempting to build packages + on a new version of GHC), it is useful to disregard *all* + upper-bounds, with respect to a package or all packages. This can be + done by specifying just a package name, or using the keyword ``all`` + to specify all packages: + + :: + + -- Disregard upper bounds involving the dependencies on + -- packages bar, baz. For quux only, relax + -- 'quux ^>= ...'-style constraints only. + allow-newer: bar, baz, ^quux + + -- Disregard all upper bounds when dependency solving + allow-newer: all + + -- Disregard all `^>=`-style upper bounds when dependency solving + allow-newer: ^all + + + For consistency, there is also the explicit wildcard scope syntax + ``*`` (or its alphabetic synonym ``all``). Consequently, the + examples above are equivalent to the explicitly scoped variants: + + :: + + allow-newer: all:bar, *:baz, *:^quux + + allow-newer: *:* + allow-newer: all:all + + allow-newer: *:^* + allow-newer: all:^all + + In order to ignore all bounds specified by a package ``pkg-1.2.3`` + you can combine scoping with a right-hand-side wildcard like so + + :: + + -- Disregard any upper bounds specified by pkg-1.2.3 + allow-newer: pkg-1.2.3:* + + -- Disregard only `^>=`-style upper bounds in pkg-1.2.3 + allow-newer: pkg-1.2.3:^* + + + :cfg-field:`allow-newer` is often used in conjunction with a constraint + (in the cfg-field:`constraints` field) forcing the usage of a specific, + newer version of a package. + + The command line variant of this field is e.g. ``--allow-newer=bar``. A + bare ``--allow-newer`` is equivalent to ``--allow-newer=all``. + +.. cfg-field:: allow-older: none, all, list of scoped package names (space or comma separated) + --allow-older, --allow-older=[none,all,[scope:][^]pkg] + :synopsis: Lift dependency lower bound constaints. + :since: 2.0 + + :default: ``none`` + + Like :cfg-field:`allow-newer`, but applied to lower bounds rather than + upper bounds. + + The command line variant of this field is ``--allow-older=all``. A + bare ``--allow-older`` is equivalent to ``--allow-older=all``. + + +.. cfg-field:: index-state: HEAD, unix-timestamp, ISO8601 UTC timestamp. + :synopsis: Use source package index state as it existed at a previous time. + :since: 2.0 + + :default: ``HEAD`` + + This allows to change the source package index state the solver uses + to compute install-plans. This is particularly useful in + combination with freeze-files in order to also freeze the state the + package index was in at the time the install-plan was frozen. + + :: + + -- UNIX timestamp format example + index-state: @1474739268 + + -- ISO8601 UTC timestamp format example + -- This format is used by 'cabal new-configure' + -- for storing `--index-state` values. + index-state: 2016-09-24T17:47:48Z + + +Package configuration options +----------------------------- + +Package options affect the building of specific packages. There are three +ways a package option can be specified: + +- They can be specified at the top-level, in which case they apply only + to **local package**, or + +- They can be specified inside a ``package`` stanza, in which case they + apply to the build of the package, whether or not it is local or + external. + +- They can be specified inside an ``package *`` stanza, in which case they + apply to all packages, local ones from the project and also external + dependencies. + + +For example, the following options specify that :cfg-field:`optimization` +should be turned off for all local packages, and that ``bytestring`` (possibly +an external dependency) should be built with ``-fno-state-hack``:: + + optimization: False + + package bytestring + ghc-options: -fno-state-hack + +``ghc-options`` is not specifically described in this documentation, +but is one of many fields for configuring programs. They take the form +``progname-options`` and ``progname-location``, and +can only be set inside package stanzas. (TODO: They are not supported +at top-level, see :issue:`3579`.) + +At the moment, there is no way to specify an option to apply to all +external packages or all inplace packages. Additionally, it is only +possible to specify these options on the command line for all local +packages (there is no per-package command line interface.) + +Some flags were added by more recent versions of the Cabal library. This +means that they are NOT supported by packages which use Custom setup +scripts that require a version of the Cabal library older than when the +feature was added. + +.. cfg-field:: flags: list of +flagname or -flagname (space separated) + --flags="+foo -bar", -ffoo, -f-bar + :synopsis: Enable or disable package flags. + + Force all flags specified as ``+flagname`` to be true, and all flags + specified as ``-flagname`` to be false. For example, to enable the + flag ``foo`` and disable ``bar``, set: + + :: + + flags: +foo -bar + + If there is no leading punctuation, it is assumed that the flag + should be enabled; e.g., this is equivalent: + + :: + + flags: foo -bar + + Flags are *per-package*, so it doesn't make much sense to specify + flags at the top-level, unless you happen to know that *all* of your + local packages support the same named flags. If a flag is not + supported by a package, it is ignored. + + See also the solver configuration field :cfg-field:`constraints`. + + The command line variant of this flag is ``--flags``. There is also + a shortened form ``-ffoo -f-bar``. + + A common mistake is to say ``cabal new-build -fhans``, where + ``hans`` is a flag for a transitive dependency that is not in the + local package; in this case, the flag will be silently ignored. If + ``haskell-tor`` is the package you want this flag to apply to, try + ``--constraint="haskell-tor +hans"`` instead. + +.. cfg-field:: with-compiler: executable + --with-compiler=executable + :synopsis: Path to compiler executable. + + Specify the path to a particular compiler to be used. If not an + absolute path, it will be resolved according to the :envvar:`PATH` + environment. The type of the compiler (GHC, GHCJS, etc) must be + consistent with the setting of the :cfg-field:`compiler` field. + + The most common use of this option is to specify a different version + of your compiler to be used; e.g., if you have ``ghc-7.8`` in your + path, you can specify ``with-compiler: ghc-7.8`` to use it. + + This flag also sets the default value of :cfg-field:`with-hc-pkg`, using + the heuristic that it is named ``ghc-pkg-7.8`` (if your executable name + is suffixed with a version number), or is the executable named + ``ghc-pkg`` in the same directory as the ``ghc`` directory. If this + heuristic does not work, set :cfg-field:`with-hc-pkg` explicitly. + + For inplace packages, ``cabal new-build`` maintains a separate build + directory for each version of GHC, so you can maintain multiple + build trees for different versions of GHC without clobbering each + other. + + At the moment, it's not possible to set :cfg-field:`with-compiler` on a + per-package basis, but eventually we plan on relaxing this + restriction. If this is something you need, give us a shout. + + The command line variant of this flag is + ``--with-compiler=ghc-7.8``; there is also a short version + ``-w ghc-7.8``. + +.. cfg-field:: with-hc-pkg: executable + --with-hc-pkg=executable + :synopsis: Specifies package tool. + + Specify the path to the package tool, e.g., ``ghc-pkg``. This + package tool must be compatible with the compiler specified by + :cfg-field:`with-compiler` (generally speaking, it should be precisely + the tool that was distributed with the compiler). If this option is + omitted, the default value is determined from :cfg-field:`with-compiler`. + + The command line variant of this flag is + ``--with-hc-pkg=ghc-pkg-7.8``. + +.. cfg-field:: optimization: nat + --enable-optimization + --disable-optimization + :synopsis: Build with optimization. + + :default: ``1`` + + Build with optimization. This is appropriate for production use, + taking more time to build faster libraries and programs. + + The optional *nat* value is the optimisation level. Some compilers + support multiple optimisation levels. The range is 0 to 2. Level 0 + disables optimization, level 1 is the default. Level 2 is higher + optimisation if the compiler supports it. Level 2 is likely to lead + to longer compile times and bigger generated code. If you are not + planning to run code, turning off optimization will lead to better + build times and less code to be rebuilt when a module changes. + + When optimizations are enabled, Cabal passes ``-O2`` to the C compiler. + + We also accept ``True`` (equivalent to 1) and ``False`` (equivalent + to 0). + + Note that as of GHC 8.0, GHC does not recompile when optimization + levels change (see :ghc-ticket:`10923`), so if + you change the optimization level for a local package you may need + to blow away your old build products in order to rebuild with the + new optimization level. + + The command line variant of this flag is ``-O2`` (with ``-O1`` + equivalent to ``-O``). There are also long-form variants + ``--enable-optimization`` and ``--disable-optimization``. + +.. cfg-field:: configure-options: args (space separated) + --configure-option=arg + :synopsis: Options to pass to configure script. + + A list of extra arguments to pass to the external ``./configure`` + script, if one is used. This is only useful for packages which have + the ``Configure`` build type. See also the section on + `system-dependent + parameters `__. + + The command line variant of this flag is ``--configure-option=arg``, + which can be specified multiple times to pass multiple options. + +.. cfg-field:: compiler: ghc, ghcjs, jhc, lhc, uhc or haskell-suite + --compiler=compiler + :synopsis: Compiler to build with. + + :default: ``ghc`` + + Specify which compiler toolchain to be used. This is independent of + ``with-compiler``, because the choice of toolchain affects Cabal's + build logic. + + The command line variant of this flag is ``--compiler=ghc``. + +.. cfg-field:: tests: boolean + --enable-tests + --disable-tests + :synopsis: Build tests. + + :default: ``False`` + + Force test suites to be enabled. For most users this should not be + needed, as we always attempt to solve for test suite dependencies, + even when this value is ``False``; furthermore, test suites are + automatically enabled if they are requested as a built target. + + The command line variant of this flag is ``--enable-tests`` and + ``--disable-tests``. + +.. cfg-field:: benchmarks: boolean + --enable-benchmarks + --disable-benchmarks + :synopsis: Build benchmarks. + + :default: ``False`` + + Force benchmarks to be enabled. For most users this should not be + needed, as we always attempt to solve for benchmark dependencies, + even when this value is ``False``; furthermore, benchmarks are + automatically enabled if they are requested as a built target. + + The command line variant of this flag is ``--enable-benchmarks`` and + ``--disable-benchmarks``. + +.. cfg-field:: extra-prog-path: paths (newline or comma separated) + --extra-prog-path=PATH + :synopsis: Add directories to program search path. + :since: 1.18 + + A list of directories to search for extra required programs. Most + users should not need this, as programs like ``happy`` and ``alex`` + will automatically be installed and added to the path. This can be + useful if a ``Custom`` setup script relies on an exotic extra + program. + + The command line variant of this flag is ``--extra-prog-path=PATH``, + which can be specified multiple times. + +.. cfg-field:: run-tests: boolean + --run-tests + :synopsis: Run package test suite upon installation. + + :default: ``False`` + + Run the package test suite upon installation. This is useful for + saying "When this package is installed, check that the test suite + passes, terminating the rest of the build if it is broken." + + .. warning:: + + One deficiency: the :cfg-field:`run-tests` setting of a package is NOT + recorded as part of the hash, so if you install something without + :cfg-field:`run-tests` and then turn on ``run-tests``, we won't + subsequently test the package. If this is causing you problems, give + us a shout. + + The command line variant of this flag is ``--run-tests``. + +Object code options +^^^^^^^^^^^^^^^^^^^ + +.. cfg-field:: debug-info: integer + --enable-debug-info= + --disable-debug-info + :synopsis: Build with debug info enabled. + :since: 1.22 + + :default: False + + If the compiler (e.g., GHC 7.10 and later) supports outputing OS + native debug info (e.g., DWARF), setting ``debug-info: True`` will + instruct it to do so. See the GHC wiki page on :ghc-wiki:`DWARF` + for more information about this feature. + + (This field also accepts numeric syntax, but until GHC 8.2 this didn't + do anything.) + + The command line variant of this flag is ``--enable-debug-info`` and + ``--disable-debug-info``. + +.. cfg-field:: split-sections: boolean + --enable-split-sections + --disable-split-sections + :synopsis: Use GHC's split sections feature. + :since: 2.1 + + :default: False + + Use the GHC ``-split-sections`` feature when building the library. This + reduces the final size of the executables that use the library by + allowing them to link with only the bits that they use rather than + the entire library. The downside is that building the library takes + longer and uses a bit more memory. + + This feature is supported by GHC 8.0 and later. + + The command line variant of this flag is ``--enable-split-sections`` and + ``--disable-split-sections``. + +.. cfg-field:: split-objs: boolean + --enable-split-objs + --disable-split-objs + :synopsis: Use GHC's split objects feature. + + :default: False + + Use the GHC ``-split-objs`` feature when building the library. This + reduces the final size of the executables that use the library by + allowing them to link with only the bits that they use rather than + the entire library. The downside is that building the library takes + longer and uses considerably more memory. + + It is generally recommend that you use ``split-sections`` instead + of ``split-objs`` where possible. + + The command line variant of this flag is ``--enable-split-objs`` and + ``--disable-split-objs``. + +.. cfg-field:: executable-stripping: boolean + --enable-executable-stripping + --disable-executable-stripping + :synopsis: Strip installed programs. + + :default: True + + When installing binary executable programs, run the ``strip`` + program on the binary. This can considerably reduce the size of the + executable binary file. It does this by removing debugging + information and symbols. + + Not all Haskell implementations generate native binaries. For such + implementations this option has no effect. + + (TODO: Check what happens if you combine this with ``debug-info``.) + + The command line variant of this flag is + ``--enable-executable-stripping`` and + ``--disable-executable-stripping``. + +.. cfg-field:: library-stripping: boolean + --enable-library-stripping + --disable-library-stripping + :synopsis: Strip installed libraries. + :since: 1.19 + + When installing binary libraries, run the ``strip`` program on the + binary, saving space on the file system. See also + ``executable-stripping``. + + The command line variant of this flag is + ``--enable-library-stripping`` and ``--disable-library-stripping``. + +Executable options +^^^^^^^^^^^^^^^^^^ + +.. cfg-field:: program-prefix: prefix + --program-prefix=prefix + :synopsis: Prepend prefix to program names. + + [STRIKEOUT:Prepend *prefix* to installed program names.] (Currently + implemented in a silly and not useful way. If you need this to work + give us a shout.) + + *prefix* may contain the following path variables: ``$pkgid``, + ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + + The command line variant of this flag is ``--program-prefix=foo-``. + +.. cfg-field:: program-suffix: suffix + --program-suffix=suffix + :synopsis: Append refix to program names. + + [STRIKEOUT:Append *suffix* to installed program names.] (Currently + implemented in a silly and not useful way. If you need this to work + give us a shout.) + + The most obvious use for this is to append the program's version + number to make it possible to install several versions of a program + at once: ``program-suffix: $version``. + + *suffix* may contain the following path variables: ``$pkgid``, + ``$pkg``, ``$version``, ``$compiler``, ``$os``, ``$arch``, ``$abi``, + ``$abitag`` + + The command line variant of this flag is + ``--program-suffix='$version'``. + +Dynamic linking options +^^^^^^^^^^^^^^^^^^^^^^^ + +.. cfg-field:: shared: boolean + --enable-shared + --disable-shared + :synopsis: Build shared library. + + :default: False + + Build shared library. This implies a separate compiler run to + generate position independent code as required on most platforms. + + The command line variant of this flag is ``--enable-shared`` and + ``--disable-shared``. + +.. cfg-field:: executable-dynamic: boolean + --enable-executable-dynamic + --disable-executable-dynamic + :synopsis: Link executables dynamically. + + :default: False + + Link executables dynamically. The executable's library dependencies + should be built as shared objects. This implies ``shared: True`` + unless ``shared: False`` is explicitly specified. + + The command line variant of this flag is + ``--enable-executable-dynamic`` and + ``--disable-executable-dynamic``. + +.. cfg-field:: library-for-ghci: boolean + --enable-library-for-ghci + --disable-library-for-ghci + :synopsis: Build libraries suitable for use with GHCi. + + :default: True + + Build libraries suitable for use with GHCi. This involves an extra + linking step after the build. + + Not all platforms support GHCi and indeed on some platforms, trying + to build GHCi libs fails. In such cases, consider setting + ``library-for-ghci: False``. + + The command line variant of this flag is + ``--enable-library-for-ghci`` and ``--disable-library-for-ghci``. + +.. cfg-field:: relocatable: + --relocatable + :synopsis: Build relocatable package. + :since: 1.21 + + :default: False + + [STRIKEOUT:Build a package which is relocatable.] (TODO: It is not + clear what this actually does, or if it works at all.) + + The command line variant of this flag is ``--relocatable``. + +Static linking options +^^^^^^^^^^^^^^^^^^^^^^ + +.. cfg-field:: static: boolean + --enable-static + --disable-static + :synopsis: Build static library. + + + :default: False + + Roll this and all dependent libraries into a combined ``.a`` archive. + This uses GHCs ``-staticlib`` flag, which is avaiable for iOS and with + GHC 8.4 and later for other platforms as well. + +Foreign function interface options +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. cfg-field:: extra-include-dirs: directories (comma or newline separated list) + --extra-include-dirs=DIR + :synopsis: Adds C header search path. + + An extra directory to search for C header files. You can use this + flag multiple times to get a list of directories. + + You might need to use this flag if you have standard system header + files in a non-standard location that is not mentioned in the + package's ``.cabal`` file. Using this option has the same affect as + appending the directory *dir* to the :pkg-field:`include-dirs` field in each + library and executable in the package's ``.cabal`` file. The + advantage of course is that you do not have to modify the package at + all. These extra directories will be used while building the package + and for libraries it is also saved in the package registration + information and used when compiling modules that use the library. + + The command line variant of this flag is + ``--extra-include-dirs=DIR``, which can be specified multiple times. + +.. cfg-field:: extra-lib-dirs: directories (comma or newline separated list) + --extra-lib-dirs=DIR + :synopsis: Adds library search directory. + + An extra directory to search for system libraries files. + + The command line variant of this flag is ``--extra-lib-dirs=DIR``, + which can be specified multiple times. + +.. cfg-field:: extra-framework-dirs: directories (comma or newline separated list) + --extra-framework-dirs=DIR + :synopsis: Adds framework search directory (OS X only). + + An extra directory to search for frameworks (OS X only). + + You might need to use this flag if you have standard system + libraries in a non-standard location that is not mentioned in the + package's ``.cabal`` file. Using this option has the same affect as + appending the directory *dir* to the :cfg-field:`extra-lib-dirs` field in + each library and executable in the package's ``.cabal`` file. The + advantage of course is that you do not have to modify the package at + all. These extra directories will be used while building the package + and for libraries it is also saved in the package registration + information and used when compiling modules that use the library. + + The command line variant of this flag is + ``--extra-framework-dirs=DIR``, which can be specified multiple + times. + +Profiling options +^^^^^^^^^^^^^^^^^ + +.. cfg-field:: profiling: boolean + --enable-profiling + --disable-profiling + :synopsis: Enable profiling builds. + :since: 1.21 + + :default: False + + Build libraries and executables with profiling enabled (for + compilers that support profiling as a separate mode). It is only + necessary to specify :cfg-field:`profiling` for the specific package you + want to profile; ``cabal new-build`` will ensure that all of its + transitive dependencies are built with profiling enabled. + + To enable profiling for only libraries or executables, see + :cfg-field:`library-profiling` and :cfg-field:`executable-profiling`. + + For useful profiling, it can be important to control precisely what + cost centers are allocated; see :cfg-field:`profiling-detail`. + + The command line variant of this flag is ``--enable-profiling`` and + ``--disable-profiling``. + +.. cfg-field:: profiling-detail: level + --profiling-detail=level + :synopsis: Profiling detail level. + :since: 1.23 + + Some compilers that support profiling, notably GHC, can allocate + costs to different parts of the program and there are different + levels of granularity or detail with which this can be done. In + particular for GHC this concept is called "cost centers", and GHC + can automatically add cost centers, and can do so in different ways. + + This flag covers both libraries and executables, but can be + overridden by the ``library-profiling-detail`` field. + + Currently this setting is ignored for compilers other than GHC. The + levels that cabal currently supports are: + + default + For GHC this uses ``exported-functions`` for libraries and + ``toplevel-functions`` for executables. + none + No costs will be assigned to any code within this component. + exported-functions + Costs will be assigned at the granularity of all top level + functions exported from each module. In GHC, this + is for non-inline functions. Corresponds to ``-fprof-auto-exported``. + toplevel-functions + Costs will be assigned at the granularity of all top level + functions in each module, whether they are exported from the + module or not. In GHC specifically, this is for non-inline + functions. Corresponds to ``-fprof-auto-top``. + all-functions + Costs will be assigned at the granularity of all functions in + each module, whether top level or local. In GHC specifically, + this is for non-inline toplevel or where-bound functions or + values. Corresponds to ``-fprof-auto``. + + The command line variant of this flag is + ``--profiling-detail=none``. + +.. cfg-field:: library-profiling-detail: level + --library-profiling-detail=level + :synopsis: Libraries profiling detail level. + :since: 1.23 + + Like :cfg-field:`profiling-detail`, but applied only to libraries + + The command line variant of this flag is + ``--library-profiling-detail=none``. + +.. cfg-field:: library-vanilla: boolean + --enable-library-vanilla + --disable-library-vanilla + :synopsis: Build libraries without profiling. + + :default: True + + Build ordinary libraries (as opposed to profiling libraries). + Mostly, you can set this to False to avoid building ordinary + libraries when you are profiling. + + The command line variant of this flag is + ``--enable-library-vanilla`` and ``--disable-library-vanilla``. + +.. cfg-field:: library-profiling: boolean + --enable-library-profiling + --disable-library-profiling + :synopsis: Build libraries with profiling enabled. + :since: 1.21 + + :default: False + + Build libraries with profiling enabled. You probably want + to use :cfg-field:`profiling` instead. + + The command line variant of this flag is + ``--enable-library-profiling`` and ``--disable-library-profiling``. + +.. cfg-field:: executable-profiling: boolean + --enable-executable-profiling + --disable-executable-profiling + :synopsis: Build executables with profiling enabled. + :since: 1.21 + + :default: False + + Build executables with profiling enabled. You probably want + to use :cfg-field:`profiling` instead. + + The command line variant of this flag is + ``--enable-executable-profiling`` and + ``--disable-executable-profiling``. + +Coverage options +^^^^^^^^^^^^^^^^ + +.. cfg-field:: coverage: boolean + --enable-coverage + --disable-coverage + :synopsis: Build with coverage enabled. + :since: 1.21 + + :default: False + + Build libraries and executables (including test suites) with Haskell + Program Coverage enabled. Running the test suites will automatically + generate coverage reports with HPC. + + The command line variant of this flag is ``--enable-coverage`` and + ``--disable-coverage``. + +.. cfg-field:: library-coverage: boolean + --enable-library-coverage + --disable-library-coverage + :since: 1.21 + :deprecated: + + :default: False + + Deprecated, use :cfg-field:`coverage`. + + The command line variant of this flag is + ``--enable-library-coverage`` and ``--disable-library-coverage``. + +Haddock options +^^^^^^^^^^^^^^^ + +.. cfg-field:: documentation: boolean + --enable-documentation + --disable-documentation + :synopsis: Enable building of documentation. + + :default: False + + Enables building of Haddock documentation + + The command line variant of this flag is ``--enable-documentation`` + and ``--disable-documentation``. + + `documentation: true` does not imply :cfg-field:`haddock-benchmarks`, + :cfg-field:`haddock-executables`, :cfg-field:`haddock-internal` or + :cfg-field:`haddock-tests`. These need to be enabled separately if + desired. + +.. cfg-field:: doc-index-file: templated path + --doc-index-file=TEMPLATE + :synopsis: Path to haddock templates. + + A central index of Haddock API documentation (template cannot use + ``$pkgid``), which should be updated as documentation is built. + + The command line variant of this flag is + ``--doc-index-file=TEMPLATE`` + +The following commands are equivalent to ones that would be passed when +running ``setup haddock``. (TODO: Where does the documentation get put.) + +.. cfg-field:: haddock-hoogle: boolean + :synopsis: Generate Hoogle file. + + :default: False + + Generate a text file which can be converted by Hoogle_ + into a database for searching. This is equivalent to running ``haddock`` + with the ``--hoogle`` flag. + + The command line variant of this flag is ``--hoogle`` (for the + ``haddock`` command). + +.. cfg-field:: haddock-html: boolean + :synopsis: Build HTML documentation. + + :default: True + + Build HTML documentation. + + The command line variant of this flag is ``--html`` (for the + ``haddock`` command). + +.. cfg-field:: haddock-html-location: templated path + :synopsis: Haddock HTML templates location. + + Specify a template for the location of HTML documentation for + prerequisite packages. The substitutions are applied to the template + to obtain a location for each package, which will be used by + hyperlinks in the generated documentation. For example, the + following command generates links pointing at [Hackage] pages: + + :: + + html-location: 'http://hackage.haskell.org/packages/archive/$pkg/latest/doc/html' + + Here the argument is quoted to prevent substitution by the shell. If + this option is omitted, the location for each package is obtained + using the package tool (e.g. ``ghc-pkg``). + + The command line variant of this flag is ``--html-location`` (for + the ``haddock`` subcommand). + +.. cfg-field:: haddock-executables: boolean + :synopsis: Generate documentation for executables. + + :default: False + + Run haddock on all executable programs. + + The command line variant of this flag is ``--executables`` (for the + ``haddock`` subcommand). + +.. cfg-field:: haddock-tests: boolean + :synopsis: Generate documentation for tests. + + :default: False + + Run haddock on all test suites. + + The command line variant of this flag is ``--tests`` (for the + ``haddock`` subcommand). + +.. cfg-field:: haddock-benchmarks: boolean + :synopsis: Generate documentation for benchmarks. + + :default: False + + Run haddock on all benchmarks. + + The command line variant of this flag is ``--benchmarks`` (for the + ``haddock`` subcommand). + +.. cfg-field:: haddock-all: boolean + :synopsis: Generate documentation for everything + + :default: False + + Run haddock on all components. + + The command line variant of this flag is ``--all`` (for the + ``haddock`` subcommand). + +.. cfg-field:: haddock-internal: boolean + :synopsis: Generate documentation for internal modules + + :default: False + + Build haddock documentation which includes unexposed modules and + symbols. + + The command line variant of this flag is ``--internal`` (for the + ``haddock`` subcommand). + +.. cfg-field:: haddock-css: path + :synopsis: Location of Haddoc CSS file. + + The CSS file that should be used to style the generated + documentation (overriding haddock's default.) + + The command line variant of this flag is ``--css`` (for the + ``haddock`` subcommand). + +.. cfg-field:: haddock-hyperlink-source: boolean + :synopsis: Generate hyperlinked source code for documentation + + :default: False + + Generated hyperlinked source code using `HsColour`_, and have + Haddock documentation link to it. + + The command line variant of this flag is ``--hyperlink-source`` (for + the ``haddock`` subcommand). + +.. cfg-field:: haddock-hscolour-css: path + :synopsis: Location of CSS file for HsColour + + The CSS file that should be used to style the generated hyperlinked + source code (from `HsColour`_). + + The command line variant of this flag is ``--hscolour-css`` (for the + ``haddock`` subcommand). + +.. cfg-field:: haddock-contents-location: URL + :synopsis: URL for contents page. + + A baked-in URL to be used as the location for the contents page. + + The command line variant of this flag is ``--contents-location`` + (for the ``haddock`` subcommand). + +.. cfg-field:: haddock-keep-temp-files: boolean + :synopsis: Keep temporary Haddock files. + + Keep temporary files. + + The command line variant of this flag is ``--keep-temp-files`` (for + the ``haddock`` subcommand). + +Advanced global configuration options +------------------------------------- + +.. cfg-field:: write-ghc-environment-files: always, never, or ghc-8.4.4+ + --write-ghc-enviroment-files=policy + :synopsis: Whether a ``.ghc.enviroment`` should be created after a successful build. + + :default: ``ghc-8.4.4+`` + + Whether a `GHC package environment file `_ + should be created after a successful build. + + Defaults to creating them only when compiling with GHC 8.4.4 and + older (GHC 8.4.4 `is the first version `_ that supports the + ``-package-env -`` option that allows ignoring the package + environment files). + + +.. cfg-field:: http-transport: curl, wget, powershell, or plain-http + --http-transport=transport + :synopsis: Transport to use with http(s) requests. + + :default: ``curl`` + + Set a transport to be used when making http(s) requests. + + The command line variant of this field is ``--http-transport=curl``. + +.. cfg-field:: ignore-expiry: boolean + --ignore-expiry + :synopsis: Ignore Hackage expiration dates. + + :default: False + + If ``True``, we will ignore expiry dates on metadata from Hackage. + + In general, you should not set this to ``True`` as it will leave you + vulnerable to stale cache attacks. However, it may be temporarily + useful if the main Hackage server is down, and we need to rely on + mirrors which have not been updated for longer than the expiry + period on the timestamp. + + The command line variant of this field is ``--ignore-expiry``. + +.. cfg-field:: remote-repo-cache: directory + --remote-repo-cache=DIR + :synopsis: Location of packages cache. + + :default: ``~/.cabal/packages`` + + [STRIKEOUT:The location where packages downloaded from remote + repositories will be cached.] Not implemented yet. + + The command line variant of this flag is + ``--remote-repo-cache=DIR``. + +.. cfg-field:: logs-dir: directory + --logs-dir=DIR + :synopsis: Directory to store build logs. + + :default: ``~/.cabal/logs`` + + [STRIKEOUT:The location where build logs for packages are stored.] + Not implemented yet. + + The command line variant of this flag is ``--logs-dir=DIR``. + +.. cfg-field:: build-summary: template filepath + --build-summary=TEMPLATE + :synopsis: Build summaries location. + + :default: ``~/.cabal/logs/build.log`` + + [STRIKEOUT:The file to save build summaries. Valid variables which + can be used in the path are ``$pkgid``, ``$compiler``, ``$os`` and + ``$arch``.] Not implemented yet. + + The command line variant of this flag is + ``--build-summary=TEMPLATE``. + +.. cfg-field:: local-repo: directory + --local-repo=DIR + :deprecated: + + [STRIKEOUT:The location of a local repository.] Deprecated. See + "Legacy repositories." + + The command line variant of this flag is ``--local-repo=DIR``. + +.. cfg-field:: world-file: path + --world-file=FILE + :deprecated: + + [STRIKEOUT:The location of the world file.] Deprecated. + + The command line variant of this flag is ``--world-file=FILE``. + +Undocumented fields: ``root-cmd``, ``symlink-bindir``, ``build-log``, +``remote-build-reporting``, ``report-planned-failure``, ``one-shot``, +``offline``. + +Advanced solver options +^^^^^^^^^^^^^^^^^^^^^^^ + +Most users generally won't need these. + +.. cfg-field:: solver: modular + --solver=modular + :synopsis: Which solver to use. + + This field is reserved to allow the specification of alternative + dependency solvers. At the moment, the only accepted option is + ``modular``. + + The command line variant of this field is ``--solver=modular``. + +.. cfg-field:: max-backjumps: nat + --max-backjumps=N + :synopsis: Maximum number of solver backjumps. + + :default: 2000 + + Maximum number of backjumps (backtracking multiple steps) allowed + while solving. Set -1 to allow unlimited backtracking, and 0 to + disable backtracking completely. + + The command line variant of this field is ``--max-backjumps=2000``. + +.. cfg-field:: reorder-goals: boolean + --reorder-goals + --no-reorder-goals + :synopsis: Allow solver to reorder goals. + + :default: False + + When enabled, the solver will reorder goals according to certain + heuristics. Slows things down on average, but may make backtracking + faster for some packages. It's unlikely to help for small projects, + but for big install plans it may help you find a plan when otherwise + this is not possible. See :issue:`1780` for more commentary. + + The command line variant of this field is ``--(no-)reorder-goals``. + +.. cfg-field:: count-conflicts: boolean + --count-conflicts + --no-count-conflicts + :synopsis: Solver prefers versions with less conflicts. + + :default: True + + Try to speed up solving by preferring goals that are involved in a + lot of conflicts. + + The command line variant of this field is + ``--(no-)count-conflicts``. + +.. cfg-field:: strong-flags: boolean + --strong-flags + --no-strong-flags + :synopsis: Do not defer flag choices when solving. + + :default: False + + Do not defer flag choices. (TODO: Better documentation.) + + The command line variant of this field is ``--(no-)strong-flags``. + +.. cfg-field:: allow-boot-library-installs: boolean + --allow-boot-library-installs + --no-allow-boot-library-installs + :synopsis: Allow cabal to install or upgrade any package. + + :default: False + + By default, the dependency solver doesn't allow ``base``, + ``ghc-prim``, ``integer-simple``, ``integer-gmp``, and + ``template-haskell`` to be installed or upgraded. This flag + removes the restriction. + + The command line variant of this field is + ``--(no-)allow-boot-library-installs``. + +.. cfg-field:: cabal-lib-version: version + --cabal-lib-version=version + :synopsis: Version of Cabal library used to build package. + + This field selects the version of the Cabal library which should be + used to build packages. This option is intended primarily for + internal development use (e.g., forcing a package to build with a + newer version of Cabal, to test a new version of Cabal.) (TODO: + Specify its semantics more clearly.) + + The command line variant of this field is + ``--cabal-lib-version=1.24.0.1``. + +.. include:: references.inc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/README.md 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,126 @@ +Cabal documentation +=================== + +### Where to read it +These docs will be built and deployed whenever a release is made, +and can be read at: https://www.haskell.org/cabal/users-guide/ + +In addition, the docs are taken directly from git and hosted at: +http://cabal.readthedocs.io/ + + +### How to build it + +* Currently requires python-2 +* `> pip install sphinx` +* `> pip install sphinx_rtd_theme` +* `> cd Cabal` +* `> make clean users-guide` +* if you are missing any other dependencies, install them with `pip` as needed +¯\\\_(ツ)_/¯ +* Python on Mac OS X dislikes `LC_CTYPE=UTF-8`, unset the env var in +terminal preferences and instead set `LC_ALL=en_US.UTF-8` or something +* On archlinux, package `python2-sphinx` is sufficient. + +### Caveats, for newcomers to RST from MD +RST does not allow you to skip section levels when nesting, like MD +does. +So, you cannot have + +``` + Section heading + =============== + + Some unimportant block + """""""""""""""""""""" +``` + + instead you need to observe order and either promote your block: + +``` + Section heading + =============== + + Some not quite so important block + --------------------------------- +``` + + or introduce more subsections: + +``` + Section heading + =============== + + Subsection + ---------- + + Subsubsection + ^^^^^^^^^^^^^ + + Some unimportant block + """""""""""""""""""""" +``` + +* RST simply parses a file and interpretes headings to indicate the + start of a new block, + * at the level implied by the header's *adornment*, if the adornment was + previously encountered in this file, + * at one level deeper than the previous block, otherwise. + + This means that a lot of confusion can arise when people use + different adornments to signify the same depth in different files. + + To eliminate this confusion, please stick to the adornment order + recommended by the Sphinx team: + +``` + #### + Part + #### + + ******* + Chapter + ******* + + Section + ======= + + Subsection + ---------- + + Subsubsection + ^^^^^^^^^^^^^ + + Paragraph + """"""""" +``` + +* The Read-The-Docs stylesheet does not support multiple top-level + sections in a file that is linked to from the top-most TOC (in + `index.rst`). It will mess up the sidebar. + E.g. you cannot link to a `cabal.rst` with sections "Introduction", + "Using Cabal", "Epilogue" from `index.rst`. + + One solution is to have a single section, e.g. "All About Cabal", in + `cabal.rst` and make the other blocks subsections of that. + + Another solution is to link via an indirection, e.g. create + `all-about-cabal.rst`, where you include `cabal.rst` using the + `.. toctree::` command and then link to `all-about-cabal.rst` from + `index.rst`. + This will effectively "push down" all blocks by one layer and solve + the problem without having to change `cabal.rst`. + + +* We use [`extlinks`](http://www.sphinx-doc.org/en/stable/ext/extlinks.html) + to shorten links to commonly referred resources (wiki, issue trackers). + + E.g. you can use the more convenient short syntax + + :issue:`123` + + which is expanded into a hyperlink + + `#123 `__ + + See `conf.py` for list of currently defined link shorteners. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/references.inc cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/references.inc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/doc/references.inc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/doc/references.inc 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,24 @@ +.. -*- rst -*- + This file contains commonly used link-references + See also "extlinks" in conf.py + +.. _`Package Versioning Policy`: +.. _PVP: http://pvp.haskell.org/ + +.. _Hackage: http://hackage.haskell.org/ + +.. _Haskell: http://www.haskell.org/ + +.. _Haddock: http://www.haskell.org/haddock/ + +.. _Alex: http://www.haskell.org/alex/ + +.. _Happy: http://www.haskell.org/happy/ + +.. _Hoogle: http://www.haskell.org/hoogle/ + +.. _HsColour: http://www.cs.york.ac.uk/fp/darcs/hscolour/ + +.. _cpphs: http://projects.haskell.org/cpphs/ + +.. _ABNF: https://tools.ietf.org/html/rfc5234 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Language/Haskell/Extension.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Language/Haskell/Extension.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Language/Haskell/Extension.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Language/Haskell/Extension.hs 2018-11-26 08:42:53.000000000 +0000 @@ -0,0 +1,909 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Language.Haskell.Extension +-- Copyright : Isaac Jones 2003-2004 +-- License : BSD3 +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- Haskell language dialects and extensions + +module Language.Haskell.Extension ( + Language(..), + knownLanguages, + classifyLanguage, + + Extension(..), + KnownExtension(..), + knownExtensions, + deprecatedExtensions, + classifyExtension, + ) where + +import Prelude () +import Distribution.Compat.Prelude + +import Data.Array (Array, accumArray, bounds, Ix(inRange), (!)) + +import Distribution.Parsec.Class +import Distribution.Pretty +import Distribution.Text + +import qualified Distribution.Compat.CharParsing as P +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +-- ------------------------------------------------------------ +-- * Language +-- ------------------------------------------------------------ + +-- | This represents a Haskell language dialect. +-- +-- Language 'Extension's are interpreted relative to one of these base +-- languages. +-- +data Language = + + -- | The Haskell 98 language as defined by the Haskell 98 report. + -- + Haskell98 + + -- | The Haskell 2010 language as defined by the Haskell 2010 report. + -- + | Haskell2010 + + -- | An unknown language, identified by its name. + | UnknownLanguage String + deriving (Generic, Show, Read, Eq, Typeable, Data) + +instance Binary Language + +instance NFData Language where rnf = genericRnf + +knownLanguages :: [Language] +knownLanguages = [Haskell98, Haskell2010] + +instance Pretty Language where + pretty (UnknownLanguage other) = Disp.text other + pretty other = Disp.text (show other) + +instance Parsec Language where + parsec = classifyLanguage <$> P.munch1 isAlphaNum + +instance Text Language where + parse = do + lang <- Parse.munch1 isAlphaNum + return (classifyLanguage lang) + +classifyLanguage :: String -> Language +classifyLanguage = \str -> case lookup str langTable of + Just lang -> lang + Nothing -> UnknownLanguage str + where + langTable = [ (show lang, lang) + | lang <- knownLanguages ] + +-- ------------------------------------------------------------ +-- * Extension +-- ------------------------------------------------------------ + +-- Note: if you add a new 'KnownExtension': +-- +-- * also add it to the Distribution.Simple.X.languageExtensions lists +-- (where X is each compiler: GHC, UHC, HaskellSuite) +-- +-- | This represents language extensions beyond a base 'Language' definition +-- (such as 'Haskell98') that are supported by some implementations, usually +-- in some special mode. +-- +-- Where applicable, references are given to an implementation's +-- official documentation. + +data Extension = + -- | Enable a known extension + EnableExtension KnownExtension + + -- | Disable a known extension + | DisableExtension KnownExtension + + -- | An unknown extension, identified by the name of its @LANGUAGE@ + -- pragma. + | UnknownExtension String + + deriving (Generic, Show, Read, Eq, Ord, Typeable, Data) + +instance Binary Extension + +instance NFData Extension where rnf = genericRnf + +data KnownExtension = + + -- | Allow overlapping class instances, provided there is a unique + -- most specific instance for each use. + -- + -- * + OverlappingInstances + + -- | Ignore structural rules guaranteeing the termination of class + -- instance resolution. Termination is guaranteed by a fixed-depth + -- recursion stack, and compilation may fail if this depth is + -- exceeded. + -- + -- * + | UndecidableInstances + + -- | Implies 'OverlappingInstances'. Allow the implementation to + -- choose an instance even when it is possible that further + -- instantiation of types will lead to a more specific instance + -- being applicable. + -- + -- * + | IncoherentInstances + + -- | /(deprecated)/ Deprecated in favour of 'RecursiveDo'. + -- + -- Old description: Allow recursive bindings in @do@ blocks, using + -- the @rec@ keyword. See also 'RecursiveDo'. + | DoRec + + -- | Allow recursive bindings in @do@ blocks, using the @rec@ + -- keyword, or @mdo@, a variant of @do@. + -- + -- * + | RecursiveDo + + -- | Provide syntax for writing list comprehensions which iterate + -- over several lists together, like the 'zipWith' family of + -- functions. + -- + -- * + | ParallelListComp + + -- | Allow multiple parameters in a type class. + -- + -- * + | MultiParamTypeClasses + + -- | Enable the dreaded monomorphism restriction. + -- + -- * + | MonomorphismRestriction + + -- | Allow a specification attached to a multi-parameter type class + -- which indicates that some parameters are entirely determined by + -- others. The implementation will check that this property holds + -- for the declared instances, and will use this property to reduce + -- ambiguity in instance resolution. + -- + -- * + | FunctionalDependencies + + -- | /(deprecated)/ A synonym for 'RankNTypes'. + -- + -- Old description: Like 'RankNTypes' but does not allow a + -- higher-rank type to itself appear on the left of a function + -- arrow. + -- + -- * + | Rank2Types + + -- | Allow a universally-quantified type to occur on the left of a + -- function arrow. + -- + -- * + | RankNTypes + + -- | /(deprecated)/ A synonym for 'RankNTypes'. + -- + -- Old description: Allow data constructors to have polymorphic + -- arguments. Unlike 'RankNTypes', does not allow this for ordinary + -- functions. + -- + -- * + | PolymorphicComponents + + -- | Allow existentially-quantified data constructors. + -- + -- * + | ExistentialQuantification + + -- | Cause a type variable in a signature, which has an explicit + -- @forall@ quantifier, to scope over the definition of the + -- accompanying value declaration. + -- + -- * + | ScopedTypeVariables + + -- | Deprecated, use 'ScopedTypeVariables' instead. + | PatternSignatures + + -- | Enable implicit function parameters with dynamic scope. + -- + -- * + | ImplicitParams + + -- | Relax some restrictions on the form of the context of a type + -- signature. + -- + -- * + | FlexibleContexts + + -- | Relax some restrictions on the form of the context of an + -- instance declaration. + -- + -- * + | FlexibleInstances + + -- | Allow data type declarations with no constructors. + -- + -- * + | EmptyDataDecls + + -- | Run the C preprocessor on Haskell source code. + -- + -- * + | CPP + + -- | Allow an explicit kind signature giving the kind of types over + -- which a type variable ranges. + -- + -- * + | KindSignatures + + -- | Enable a form of pattern which forces evaluation before an + -- attempted match, and a form of strict @let@/@where@ binding. + -- + -- * + | BangPatterns + + -- | Allow type synonyms in instance heads. + -- + -- * + | TypeSynonymInstances + + -- | Enable Template Haskell, a system for compile-time + -- metaprogramming. + -- + -- * + | TemplateHaskell + + -- | Enable the Foreign Function Interface. In GHC, implements the + -- standard Haskell 98 Foreign Function Interface Addendum, plus + -- some GHC-specific extensions. + -- + -- * + | ForeignFunctionInterface + + -- | Enable arrow notation. + -- + -- * + | Arrows + + -- | /(deprecated)/ Enable generic type classes, with default instances defined in + -- terms of the algebraic structure of a type. + -- + -- * + | Generics + + -- | Enable the implicit importing of the module "Prelude". When + -- disabled, when desugaring certain built-in syntax into ordinary + -- identifiers, use whatever is in scope rather than the "Prelude" + -- -- version. + -- + -- * + | ImplicitPrelude + + -- | Enable syntax for implicitly binding local names corresponding + -- to the field names of a record. Puns bind specific names, unlike + -- 'RecordWildCards'. + -- + -- * + | NamedFieldPuns + + -- | Enable a form of guard which matches a pattern and binds + -- variables. + -- + -- * + | PatternGuards + + -- | Allow a type declared with @newtype@ to use @deriving@ for any + -- class with an instance for the underlying type. + -- + -- * + | GeneralizedNewtypeDeriving + + -- | Enable the \"Trex\" extensible records system. + -- + -- * + | ExtensibleRecords + + -- | Enable type synonyms which are transparent in some definitions + -- and opaque elsewhere, as a way of implementing abstract + -- datatypes. + -- + -- * + | RestrictedTypeSynonyms + + -- | Enable an alternate syntax for string literals, + -- with string templating. + -- + -- * + | HereDocuments + + -- | Allow the character @#@ as a postfix modifier on identifiers. + -- Also enables literal syntax for unboxed values. + -- + -- * + | MagicHash + + -- | Allow data types and type synonyms which are indexed by types, + -- i.e. ad-hoc polymorphism for types. + -- + -- * + | TypeFamilies + + -- | Allow a standalone declaration which invokes the type class + -- @deriving@ mechanism. + -- + -- * + | StandaloneDeriving + + -- | Allow certain Unicode characters to stand for certain ASCII + -- character sequences, e.g. keywords and punctuation. + -- + -- * + | UnicodeSyntax + + -- | Allow the use of unboxed types as foreign types, e.g. in + -- @foreign import@ and @foreign export@. + -- + -- * + | UnliftedFFITypes + + -- | Enable interruptible FFI. + -- + -- * + | InterruptibleFFI + + -- | Allow use of CAPI FFI calling convention (@foreign import capi@). + -- + -- * + | CApiFFI + + -- | Defer validity checking of types until after expanding type + -- synonyms, relaxing the constraints on how synonyms may be used. + -- + -- * + | LiberalTypeSynonyms + + -- | Allow the name of a type constructor, type class, or type + -- variable to be an infix operator. + -- * + | TypeOperators + + -- | Enable syntax for implicitly binding local names corresponding + -- to the field names of a record. A wildcard binds all unmentioned + -- names, unlike 'NamedFieldPuns'. + -- + -- * + | RecordWildCards + + -- | Deprecated, use 'NamedFieldPuns' instead. + | RecordPuns + + -- | Allow a record field name to be disambiguated by the type of + -- the record it's in. + -- + -- * + | DisambiguateRecordFields + + -- | Enable traditional record syntax (as supported by Haskell 98) + -- + -- * + | TraditionalRecordSyntax + + -- | Enable overloading of string literals using a type class, much + -- like integer literals. + -- + -- * + | OverloadedStrings + + -- | Enable generalized algebraic data types, in which type + -- variables may be instantiated on a per-constructor basis. Implies + -- 'GADTSyntax'. + -- + -- * + | GADTs + + -- | Enable GADT syntax for declaring ordinary algebraic datatypes. + -- + -- * + | GADTSyntax + + -- | /(deprecated)/ Has no effect. + -- + -- Old description: Make pattern bindings monomorphic. + -- + -- * + | MonoPatBinds + + -- | Relax the requirements on mutually-recursive polymorphic + -- functions. + -- + -- * + | RelaxedPolyRec + + -- | Allow default instantiation of polymorphic types in more + -- situations. + -- + -- * + | ExtendedDefaultRules + + -- | Enable unboxed tuples. + -- + -- * + | UnboxedTuples + + -- | Enable @deriving@ for classes 'Data.Typeable.Typeable' and + -- 'Data.Generics.Data'. + -- + -- * + | DeriveDataTypeable + + -- | Enable @deriving@ for 'GHC.Generics.Generic' and 'GHC.Generics.Generic1'. + -- + -- * + | DeriveGeneric + + -- | Enable support for default signatures. + -- + -- * + | DefaultSignatures + + -- | Allow type signatures to be specified in instance declarations. + -- + -- * + | InstanceSigs + + -- | Allow a class method's type to place additional constraints on + -- a class type variable. + -- + -- * + | ConstrainedClassMethods + + -- | Allow imports to be qualified by the package name the module is + -- intended to be imported from, e.g. + -- + -- > import "network" Network.Socket + -- + -- * + | PackageImports + + -- | /(deprecated)/ Allow a type variable to be instantiated at a + -- polymorphic type. + -- + -- * + | ImpredicativeTypes + + -- | /(deprecated)/ Change the syntax for qualified infix operators. + -- + -- * + | NewQualifiedOperators + + -- | Relax the interpretation of left operator sections to allow + -- unary postfix operators. + -- + -- * + | PostfixOperators + + -- | Enable quasi-quotation, a mechanism for defining new concrete + -- syntax for expressions and patterns. + -- + -- * + | QuasiQuotes + + -- | Enable generalized list comprehensions, supporting operations + -- such as sorting and grouping. + -- + -- * + | TransformListComp + + -- | Enable monad comprehensions, which generalise the list + -- comprehension syntax to work for any monad. + -- + -- * + | MonadComprehensions + + -- | Enable view patterns, which match a value by applying a + -- function and matching on the result. + -- + -- * + | ViewPatterns + + -- | Allow concrete XML syntax to be used in expressions and patterns, + -- as per the Haskell Server Pages extension language: + -- . The ideas behind it are + -- discussed in the paper \"Haskell Server Pages through Dynamic Loading\" + -- by Niklas Broberg, from Haskell Workshop '05. + | XmlSyntax + + -- | Allow regular pattern matching over lists, as discussed in the + -- paper \"Regular Expression Patterns\" by Niklas Broberg, Andreas Farre + -- and Josef Svenningsson, from ICFP '04. + | RegularPatterns + + -- | Enable the use of tuple sections, e.g. @(, True)@ desugars into + -- @\x -> (x, True)@. + -- + -- * + | TupleSections + + -- | Allow GHC primops, written in C--, to be imported into a Haskell + -- file. + | GHCForeignImportPrim + + -- | Support for patterns of the form @n + k@, where @k@ is an + -- integer literal. + -- + -- * + | NPlusKPatterns + + -- | Improve the layout rule when @if@ expressions are used in a @do@ + -- block. + | DoAndIfThenElse + + -- | Enable support for multi-way @if@-expressions. + -- + -- * + | MultiWayIf + + -- | Enable support lambda-@case@ expressions. + -- + -- * + | LambdaCase + + -- | Makes much of the Haskell sugar be desugared into calls to the + -- function with a particular name that is in scope. + -- + -- * + | RebindableSyntax + + -- | Make @forall@ a keyword in types, which can be used to give the + -- generalisation explicitly. + -- + -- * + | ExplicitForAll + + -- | Allow contexts to be put on datatypes, e.g. the @Eq a@ in + -- @data Eq a => Set a = NilSet | ConsSet a (Set a)@. + -- + -- * + | DatatypeContexts + + -- | Local (@let@ and @where@) bindings are monomorphic. + -- + -- * + | MonoLocalBinds + + -- | Enable @deriving@ for the 'Data.Functor.Functor' class. + -- + -- * + | DeriveFunctor + + -- | Enable @deriving@ for the 'Data.Traversable.Traversable' class. + -- + -- * + | DeriveTraversable + + -- | Enable @deriving@ for the 'Data.Foldable.Foldable' class. + -- + -- * + | DeriveFoldable + + -- | Enable non-decreasing indentation for @do@ blocks. + -- + -- * + | NondecreasingIndentation + + -- | Allow imports to be qualified with a safe keyword that requires + -- the imported module be trusted as according to the Safe Haskell + -- definition of trust. + -- + -- > import safe Network.Socket + -- + -- * + | SafeImports + + -- | Compile a module in the Safe, Safe Haskell mode -- a restricted + -- form of the Haskell language to ensure type safety. + -- + -- * + | Safe + + -- | Compile a module in the Trustworthy, Safe Haskell mode -- no + -- restrictions apply but the module is marked as trusted as long as + -- the package the module resides in is trusted. + -- + -- * + | Trustworthy + + -- | Compile a module in the Unsafe, Safe Haskell mode so that + -- modules compiled using Safe, Safe Haskell mode can't import it. + -- + -- * + | Unsafe + + -- | Allow type class/implicit parameter/equality constraints to be + -- used as types with the special kind constraint. Also generalise + -- the @(ctxt => ty)@ syntax so that any type of kind constraint can + -- occur before the arrow. + -- + -- * + | ConstraintKinds + + -- | Enable kind polymorphism. + -- + -- * + | PolyKinds + + -- | Enable datatype promotion. + -- + -- * + | DataKinds + + -- | Enable parallel arrays syntax (@[:@, @:]@) for /Data Parallel Haskell/. + -- + -- * + | ParallelArrays + + -- | Enable explicit role annotations, like in (@type role Foo representational representational@). + -- + -- * + | RoleAnnotations + + -- | Enable overloading of list literals, arithmetic sequences and + -- list patterns using the 'IsList' type class. + -- + -- * + | OverloadedLists + + -- | Enable case expressions that have no alternatives. Also applies to lambda-case expressions if they are enabled. + -- + -- * + | EmptyCase + + -- | /(deprecated)/ Deprecated in favour of 'DeriveDataTypeable'. + -- + -- Old description: Triggers the generation of derived 'Typeable' + -- instances for every datatype and type class declaration. + -- + -- * + | AutoDeriveTypeable + + -- | Desugars negative literals directly (without using negate). + -- + -- * + | NegativeLiterals + + -- | Allow the use of binary integer literal syntax (e.g. @0b11001001@ to denote @201@). + -- + -- * + | BinaryLiterals + + -- | Allow the use of floating literal syntax for all instances of 'Num', including 'Int' and 'Integer'. + -- + -- * + | NumDecimals + + -- | Enable support for type classes with no type parameter. + -- + -- * + | NullaryTypeClasses + + -- | Enable explicit namespaces in module import/export lists. + -- + -- * + | ExplicitNamespaces + + -- | Allow the user to write ambiguous types, and the type inference engine to infer them. + -- + -- * + | AllowAmbiguousTypes + + -- | Enable @foreign import javascript@. + | JavaScriptFFI + + -- | Allow giving names to and abstracting over patterns. + -- + -- * + | PatternSynonyms + + -- | Allow anonymous placeholders (underscore) inside type signatures. The + -- type inference engine will generate a message describing the type inferred + -- at the hole's location. + -- + -- * + | PartialTypeSignatures + + -- | Allow named placeholders written with a leading underscore inside type + -- signatures. Wildcards with the same name unify to the same type. + -- + -- * + | NamedWildCards + + -- | Enable @deriving@ for any class. + -- + -- * + | DeriveAnyClass + + -- | Enable @deriving@ for the 'Language.Haskell.TH.Syntax.Lift' class. + -- + -- * + | DeriveLift + + -- | Enable support for 'static pointers' (and the @static@ + -- keyword) to refer to globally stable names, even across + -- different programs. + -- + -- * + | StaticPointers + + -- | Switches data type declarations to be strict by default (as if + -- they had a bang using @BangPatterns@), and allow opt-in field + -- laziness using @~@. + -- + -- * + | StrictData + + -- | Switches all pattern bindings to be strict by default (as if + -- they had a bang using @BangPatterns@), ordinary patterns are + -- recovered using @~@. Implies @StrictData@. + -- + -- * + | Strict + + -- | Allows @do@-notation for types that are @'Applicative'@ as well + -- as @'Monad'@. When enabled, desugaring @do@ notation tries to use + -- @(<*>)@ and @'fmap'@ and @'join'@ as far as possible. + | ApplicativeDo + + -- | Allow records to use duplicated field labels for accessors. + | DuplicateRecordFields + + -- | Enable explicit type applications with the syntax @id \@Int@. + | TypeApplications + + -- | Dissolve the distinction between types and kinds, allowing the compiler + -- to reason about kind equality and therefore enabling GADTs to be promoted + -- to the type-level. + | TypeInType + + -- | Allow recursive (and therefore undecideable) super-class relationships. + | UndecidableSuperClasses + + -- | A temporary extension to help library authors check if their + -- code will compile with the new planned desugaring of fail. + | MonadFailDesugaring + + -- | A subset of @TemplateHaskell@ including only quoting. + | TemplateHaskellQuotes + + -- | Allows use of the @#label@ syntax. + | OverloadedLabels + + -- | Allow functional dependency annotations on type families to declare them + -- as injective. + | TypeFamilyDependencies + + -- | Allow multiple @deriving@ clauses, each optionally qualified with a + -- /strategy/. + | DerivingStrategies + + -- | Enable the use of unboxed sum syntax. + | UnboxedSums + + -- | Allow use of hexadecimal literal notation for floating-point values. + | HexFloatLiterals + + -- | Allow @do@ blocks etc. in argument position. + | BlockArguments + + -- | Allow use of underscores in numeric literals. + | NumericUnderscores + + -- | Allow @forall@ in constraints. + | QuantifiedConstraints + + -- | Have @*@ refer to @Type@. + | StarIsType + + deriving (Generic, Show, Read, Eq, Ord, Enum, Bounded, Typeable, Data) + +instance Binary KnownExtension + +instance NFData KnownExtension where rnf = genericRnf + +{-# DEPRECATED knownExtensions + "KnownExtension is an instance of Enum and Bounded, use those instead. This symbol will be removed in Cabal-3.0 (est. Mar 2019)." #-} +knownExtensions :: [KnownExtension] +knownExtensions = [minBound..maxBound] + +-- | Extensions that have been deprecated, possibly paired with another +-- extension that replaces it. +-- +deprecatedExtensions :: [(Extension, Maybe Extension)] +deprecatedExtensions = + [ (EnableExtension RecordPuns, Just (EnableExtension NamedFieldPuns)) + , (EnableExtension PatternSignatures, Just (EnableExtension ScopedTypeVariables)) + ] +-- NOTE: when adding deprecated extensions that have new alternatives +-- we must be careful to make sure that the deprecation messages are +-- valid. We must not recommend aliases that cannot be used with older +-- compilers, perhaps by adding support in Cabal to translate the new +-- name to the old one for older compilers. Otherwise we are in danger +-- of the scenario in ticket #689. + +instance Pretty Extension where + pretty (UnknownExtension other) = Disp.text other + pretty (EnableExtension ke) = Disp.text (show ke) + pretty (DisableExtension ke) = Disp.text ("No" ++ show ke) + +instance Parsec Extension where + parsec = classifyExtension <$> P.munch1 isAlphaNum + +instance Text Extension where + parse = do + extension <- Parse.munch1 isAlphaNum + return (classifyExtension extension) + +instance Pretty KnownExtension where + pretty ke = Disp.text (show ke) + +instance Text KnownExtension where + parse = do + extension <- Parse.munch1 isAlphaNum + case classifyKnownExtension extension of + Just ke -> + return ke + Nothing -> + fail ("Can't parse " ++ show extension ++ " as KnownExtension") + +classifyExtension :: String -> Extension +classifyExtension string + = case classifyKnownExtension string of + Just ext -> EnableExtension ext + Nothing -> + case string of + 'N':'o':string' -> + case classifyKnownExtension string' of + Just ext -> DisableExtension ext + Nothing -> UnknownExtension string + _ -> UnknownExtension string + +-- | 'read' for 'KnownExtension's is really really slow so for the Text +-- instance +-- what we do is make a simple table indexed off the first letter in the +-- extension name. The extension names actually cover the range @'A'-'Z'@ +-- pretty densely and the biggest bucket is 7 so it's not too bad. We just do +-- a linear search within each bucket. +-- +-- This gives an order of magnitude improvement in parsing speed, and it'll +-- also allow us to do case insensitive matches in future if we prefer. +-- +classifyKnownExtension :: String -> Maybe KnownExtension +classifyKnownExtension "" = Nothing +classifyKnownExtension string@(c : _) + | inRange (bounds knownExtensionTable) c + = lookup string (knownExtensionTable ! c) + | otherwise = Nothing + +knownExtensionTable :: Array Char [(String, KnownExtension)] +knownExtensionTable = + accumArray (flip (:)) [] ('A', 'Z') + [ (head str, (str, extension)) + | extension <- [toEnum 0 ..] + , let str = show extension ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/LICENSE cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/LICENSE --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/LICENSE 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,31 @@ +Copyright (c) 2003-2017, Cabal Development Team. +See the AUTHORS file for the full list of copyright holders. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Isaac Jones nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/README.md 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,171 @@ +The Cabal library package +========================= + +See the [Cabal web site] for more information. + +If you also want the `cabal` command-line program, you need the +[cabal-install] package in addition to this library. + +[cabal-install]: ../cabal-install/README.md + +Installing the Cabal library +============================ + +If you already have the `cabal` program +--------------------------------------- + +In this case run: + + $ cabal install + +However, if you do not have an existing version of the `cabal` program, +you first must install the Cabal library. To avoid this bootstrapping +problem, you can install the Cabal library directly as described below. + + +Installing as a user (no root or administrator access) +------------------------------------------------------ + + ghc -threaded --make Setup + ./Setup configure --user + ./Setup build + ./Setup install + +Note the use of the `--user` flag at the configure step. + +Compiling 'Setup' rather than using `runghc Setup` is much faster and +works on Windows. For all packages other than Cabal itself, it is fine +to use `runghc`. + +This will install into `$HOME/.cabal/` on Unix and into +`Documents and Settings\$User\Application Data\cabal\` on Windows. +If you want to install elsewhere, use the `--prefix=` flag at the +configure step. + + +Installing as root or Administrator +----------------------------------- + + ghc -threaded --make Setup + ./Setup configure + ./Setup build + sudo ./Setup install + +Compiling Setup rather than using `runghc Setup` is much faster and +works on Windows. For all packages other than Cabal itself, it is fine +to use `runghc`. + +This will install into `/usr/local` on Unix, and on Windows it will +install into `$ProgramFiles/Haskell`. If you want to install elsewhere, +use the `--prefix=` flag at the configure step. + + +Using older versions of GHC and Cabal +====================================== + +It is recommended that you leave any pre-existing version of Cabal +installed. In particular, it is *essential* you keep the version that +came with GHC itself, since other installed packages require it (for +instance, the "ghc" API package). + +Prior to GHC 6.4.2, however, GHC did not deal particularly well with +having multiple versions of packages installed at once. So if you are +using GHC 6.4.1 or older and you have an older version of Cabal +installed, you should probably remove it by running: + + $ ghc-pkg unregister Cabal + +or, if you had Cabal installed only for your user account, run: + + $ ghc-pkg unregister Cabal --user + +The `filepath` dependency +========================= + +Cabal uses the [filepath] package, so it must be installed first. +GHC version 6.6.1 and later come with `filepath`, however, earlier +versions do not by default. If you do not already have `filepath`, +you need to install it. You can use any existing version of Cabal to do +that. If you have neither Cabal nor `filepath`, it is slightly +harder but still possible. + +Unpack Cabal and `filepath` into separate directories. For example: + + tar -xzf filepath-1.1.0.0.tar.gz + tar -xzf Cabal-1.6.0.0.tar.gz + + # rename to make the following instructions simpler: + mv filepath-1.1.0.0/ filepath/ + mv Cabal-1.6.0.0/ Cabal/ + + cd Cabal + ghc -i../filepath -cpp --make Setup.hs -o ../filepath/setup + cd ../filepath/ + ./setup configure --user + ./setup build + ./setup install + +This installs `filepath` so that you can install Cabal with the normal +method. + +[filepath]: http://hackage.haskell.org/package/filepath + +More information +================ + +Please see the [Cabal web site] for the [user guide] and [API +documentation]. There is additional information available on the +[development wiki]. + +[user guide]: http://www.haskell.org/cabal/users-guide +[API documentation]: http://www.haskell.org/cabal/release/cabal-latest/doc/API/Cabal/Distribution-Simple.html +[development wiki]: https://github.com/haskell/cabal/wiki + + +Bugs +==== + +Please report bugs and feature requests to Cabal's [bug tracker]. + + +Your help +--------- + +To help Cabal's development, it is enormously helpful to know from +Cabal's users what their most pressing problems are with Cabal and +[Hackage]. You may have a favourite Cabal bug or limitation. Look at +Cabal's [bug tracker]. Ensure that the problem is reported there and +adequately described. Comment on the issue to report how much of a +problem the bug is for you. Subscribe to the issues's notifications to +discussed requirements and keep informed on progress. For feature +requests, it is helpful if there is a description of how you would +expect to interact with the new feature. + +[Hackage]: http://hackage.haskell.org + + +Source code +=========== + +You can get the master development branch using: + + $ git clone https://github.com/haskell/cabal.git + + +Credits +======= + +See the `AUTHORS` file. + +Authors of the [original Cabal +specification](https://www.haskell.org/cabal/proposal/pkg-spec.pdf): + +- Isaac Jones +- Simon Marlow +- Ross Patterson +- Simon Peyton Jones +- Malcolm Wallace + + +[bug tracker]: https://github.com/haskell/cabal/issues +[Cabal web site]: http://www.haskell.org/cabal/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/Setup.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,13 @@ +import Distribution.Simple +main :: IO () +main = defaultMain + +-- Although this looks like the Simple build type, it is in fact vital that +-- we use this Setup.hs because it'll get compiled against the local copy +-- of the Cabal lib, thus enabling Cabal to bootstrap itself without relying +-- on any previous installation. This also means we can use any new features +-- immediately because we never have to worry about building Cabal with an +-- older version of itself. +-- +-- NOTE 25/01/2015: Bootstrapping is disabled for now, see +-- https://github.com/haskell/cabal/issues/3003. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/CheckTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/CheckTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/CheckTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/CheckTests.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,79 @@ +module Main + ( main + ) where + +import Test.Tasty +import Test.Tasty.Golden.Advanced (goldenTest) + +import Data.Algorithm.Diff (Diff (..), getGroupedDiff) +import Distribution.PackageDescription.Check (checkPackage) +import Distribution.PackageDescription.Parsec (parseGenericPackageDescription) +import Distribution.Parsec.Common (showPError, showPWarning) +import Distribution.Parsec.ParseResult (runParseResult) +import Distribution.Utils.Generic (fromUTF8BS, toUTF8BS) +import System.FilePath (replaceExtension, ()) + +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BS8 + +tests :: TestTree +tests = checkTests + +------------------------------------------------------------------------------- +-- Regressions +------------------------------------------------------------------------------- + +checkTests :: TestTree +checkTests = testGroup "regressions" + [ checkTest "nothing-unicode.cabal" + , checkTest "haddock-api-2.18.1-check.cabal" + , checkTest "issue-774.cabal" + , checkTest "MiniAgda.cabal" + , checkTest "extensions-paths-5054.cabal" + , checkTest "pre-1.6-glob.cabal" + , checkTest "pre-2.4-globstar.cabal" + , checkTest "bad-glob-syntax.cabal" + , checkTest "cc-options-with-optimization.cabal" + , checkTest "cxx-options-with-optimization.cabal" + , checkTest "ghc-option-j.cabal" + ] + +checkTest :: FilePath -> TestTree +checkTest fp = cabalGoldenTest fp correct $ do + contents <- BS.readFile input + let res = parseGenericPackageDescription contents + let (ws, x) = runParseResult res + + return $ toUTF8BS $ case x of + Right gpd -> + -- Note: parser warnings are reported by `cabal check`, but not by + -- D.PD.Check functionality. + unlines (map (showPWarning fp) ws) ++ + unlines (map show (checkPackage gpd Nothing)) + Left (_, errs) -> unlines $ map (("ERROR: " ++) . showPError fp) errs + where + input = "tests" "ParserTests" "regressions" fp + correct = replaceExtension input "check" + +------------------------------------------------------------------------------- +-- Main +------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain tests + +cabalGoldenTest :: TestName -> FilePath -> IO BS.ByteString -> TestTree +cabalGoldenTest name ref act = goldenTest name (BS.readFile ref) act cmp upd + where + upd = BS.writeFile ref + cmp x y | x == y = return Nothing + cmp x y = return $ Just $ unlines $ + concatMap f (getGroupedDiff (BS8.lines x) (BS8.lines y)) + where + f (First xs) = map (cons3 '-' . fromUTF8BS) xs + f (Second ys) = map (cons3 '+' . fromUTF8BS) ys + -- we print unchanged lines too. It shouldn't be a problem while we have + -- reasonably small examples + f (Both xs _) = map (cons3 ' ' . fromUTF8BS) xs + -- we add three characters, so the changed lines are easier to spot + cons3 c cs = c : c : c : ' ' : cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/custom-setup/CabalDoctestSetup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/custom-setup/CabalDoctestSetup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/custom-setup/CabalDoctestSetup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/custom-setup/CabalDoctestSetup.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,475 @@ +-- This is Distribution.Extra.Doctest module from cabal-doctest-1.0.4 +-- This isn't technically a Custom-Setup script, but it /was/. + +{- + +Copyright (c) 2017, Oleg Grenrus + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Oleg Grenrus nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +-} + +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} +-- | The provided 'generateBuildModule' generates 'Build_doctests' module. +-- That module exports enough configuration, so your doctests could be simply +-- +-- @ +-- module Main where +-- +-- import Build_doctests (flags, pkgs, module_sources) +-- import Data.Foldable (traverse_) +-- import Test.Doctest (doctest) +-- +-- main :: IO () +-- main = do +-- traverse_ putStrLn args -- optionally print arguments +-- doctest args +-- where +-- args = flags ++ pkgs ++ module_sources +-- @ +-- +-- To use this library in the @Setup.hs@, you should specify a @custom-setup@ +-- section in the cabal file, for example: +-- +-- @ +-- custom-setup +-- setup-depends: +-- base >= 4 && <5, +-- cabal-doctest >= 1 && <1.1 +-- @ +-- +-- /Note:/ you don't need to depend on @Cabal@ if you use only +-- 'defaultMainWithDoctests' in the @Setup.hs@. +-- +module CabalDoctestSetup ( + defaultMainWithDoctests, + defaultMainAutoconfWithDoctests, + addDoctestsUserHook, + doctestsUserHooks, + generateBuildModule, + ) where + +-- Hacky way to suppress few deprecation warnings. +#if MIN_VERSION_Cabal(1,24,0) +#define InstalledPackageId UnitId +#endif + +import Control.Monad + (when) +import Data.List + (nub) +import Data.Maybe + (maybeToList, mapMaybe) +import Data.String + (fromString) +import qualified Data.Foldable as F + (for_) +import qualified Data.Traversable as T + (traverse) +import qualified Distribution.ModuleName as ModuleName + (fromString) +import Distribution.ModuleName + (ModuleName) +import Distribution.Package + (InstalledPackageId) +import Distribution.Package + (Package (..), PackageId, packageVersion) +import Distribution.PackageDescription + (BuildInfo (..), Executable (..), Library (..), + PackageDescription (), TestSuite (..)) +import Distribution.Simple + (UserHooks (..), autoconfUserHooks, defaultMainWithHooks, simpleUserHooks) +import Distribution.Simple.BuildPaths + (autogenModulesDir) +import Distribution.Simple.Compiler + (PackageDB (..), showCompilerId) +import Distribution.Simple.LocalBuildInfo + (ComponentLocalBuildInfo (componentPackageDeps), LocalBuildInfo (), + compiler, withExeLBI, withLibLBI, withPackageDB, withTestLBI) +import Distribution.Simple.Setup + (BuildFlags (buildDistPref, buildVerbosity), fromFlag) +import Distribution.Simple.Utils + (createDirectoryIfMissingVerbose, findFile, rewriteFile) +import Distribution.Text + (display, simpleParse) +import System.FilePath + ((), (<.>), dropExtension) + +import Data.IORef (newIORef, modifyIORef, readIORef) + +#if MIN_VERSION_Cabal(1,25,0) +import Distribution.Simple.BuildPaths + (autogenComponentModulesDir) +#endif +#if MIN_VERSION_Cabal(2,0,0) +import Distribution.Types.MungedPackageId + (MungedPackageId) +import Distribution.Types.UnqualComponentName + (unUnqualComponentName) +#endif + +#if MIN_VERSION_directory(1,2,2) +import System.Directory + (makeAbsolute) +#else +import System.Directory + (getCurrentDirectory) +import System.FilePath + (isAbsolute) + +makeAbsolute :: FilePath -> IO FilePath +makeAbsolute p | isAbsolute p = return p + | otherwise = do + cwd <- getCurrentDirectory + return $ cwd p +#endif + +-- | A default main with doctests: +-- +-- @ +-- import Distribution.Extra.Doctest +-- (defaultMainWithDoctests) +-- +-- main :: IO () +-- main = defaultMainWithDoctests "doctests" +-- @ +defaultMainWithDoctests + :: String -- ^ doctests test-suite name + -> IO () +defaultMainWithDoctests = defaultMainWithHooks . doctestsUserHooks + +-- | Like 'defaultMainWithDoctests', for 'build-type: Configure' packages. +-- +-- @since 1.0.2 +defaultMainAutoconfWithDoctests + :: String -- ^ doctests test-suite name + -> IO () +defaultMainAutoconfWithDoctests n = + defaultMainWithHooks (addDoctestsUserHook n autoconfUserHooks) + +-- | 'simpleUserHooks' with 'generateBuildModule' prepended to the 'buildHook'. +doctestsUserHooks + :: String -- ^ doctests test-suite name + -> UserHooks +doctestsUserHooks testsuiteName = + addDoctestsUserHook testsuiteName simpleUserHooks + +-- | +-- +-- @since 1.0.2 +addDoctestsUserHook :: String -> UserHooks -> UserHooks +addDoctestsUserHook testsuiteName uh = uh + { buildHook = \pkg lbi hooks flags -> do + generateBuildModule testsuiteName flags pkg lbi + buildHook uh pkg lbi hooks flags + } + +data Name = NameLib (Maybe String) | NameExe String deriving (Eq, Show) + +nameToString :: Name -> String +nameToString n = case n of + NameLib x -> maybe "" (("_lib_" ++) . map fixchar) x + NameExe x -> "_exe_" ++ map fixchar x + where + -- Taken from Cabal: + -- https://github.com/haskell/cabal/blob/20de0bfea72145ba1c37e3f500cee5258cc18e51/Cabal/Distribution/Simple/Build/Macros.hs#L156-L158 + -- + -- Needed to fix component names with hyphens in them, as hyphens aren't + -- allowed in Haskell identifier names. + fixchar :: Char -> Char + fixchar '-' = '_' + fixchar c = c + +data Component = Component Name [String] [String] [String] + deriving Show + +-- | Generate a build module for the test suite. +-- +-- @ +-- import Distribution.Simple +-- (defaultMainWithHooks, UserHooks(..), simpleUserHooks) +-- import Distribution.Extra.Doctest +-- (generateBuildModule) +-- +-- main :: IO () +-- main = defaultMainWithHooks simpleUserHooks +-- { buildHook = \pkg lbi hooks flags -> do +-- generateBuildModule "doctests" flags pkg lbi +-- buildHook simpleUserHooks pkg lbi hooks flags +-- } +-- @ +generateBuildModule + :: String -- ^ doctests test-suite name + -> BuildFlags -> PackageDescription -> LocalBuildInfo -> IO () +generateBuildModule testSuiteName flags pkg lbi = do + let verbosity = fromFlag (buildVerbosity flags) + let distPref = fromFlag (buildDistPref flags) + + -- Package DBs + let dbStack = withPackageDB lbi ++ [ SpecificPackageDB $ distPref "package.conf.inplace" ] + let dbFlags = "-hide-all-packages" : packageDbArgs dbStack + + withTestLBI pkg lbi $ \suite suitecfg -> when (testName suite == fromString testSuiteName) $ do +#if MIN_VERSION_Cabal(1,25,0) + let testAutogenDir = autogenComponentModulesDir lbi suitecfg +#else + let testAutogenDir = autogenModulesDir lbi +#endif + + createDirectoryIfMissingVerbose verbosity True testAutogenDir + + let buildDoctestsFile = testAutogenDir "Build_doctests.hs" + + -- First, we create the autogen'd module Build_doctests. + -- Initially populate Build_doctests with a simple preamble. + writeFile buildDoctestsFile $ unlines + [ "module Build_doctests where" + , "" + , "import Prelude" + , "" + , "data Name = NameLib (Maybe String) | NameExe String deriving (Eq, Show)" + , "data Component = Component Name [String] [String] [String] deriving (Eq, Show)" + , "" + ] + + -- we cannot traverse, only traverse_ + -- so we use IORef to collect components + componentsRef <- newIORef [] + + let testBI = testBuildInfo suite + + -- TODO: `words` is not proper parser (no support for quotes) + let additionalFlags = maybe [] words + $ lookup "x-doctest-options" + $ customFieldsBI testBI + + let additionalModules = maybe [] words + $ lookup "x-doctest-modules" + $ customFieldsBI testBI + + let additionalDirs' = maybe [] words + $ lookup "x-doctest-source-dirs" + $ customFieldsBI testBI + + additionalDirs <- mapM (fmap ("-i" ++) . makeAbsolute) additionalDirs' + + -- Next, for each component (library or executable), we get to Build_doctests + -- the sets of flags needed to run doctest on that component. + let getBuildDoctests withCompLBI mbCompName compExposedModules compMainIs compBuildInfo = + withCompLBI pkg lbi $ \comp compCfg -> do + let compBI = compBuildInfo comp + + -- modules + let modules = compExposedModules comp ++ otherModules compBI + -- it seems that doctest is happy to take in module names, not actual files! + let module_sources = modules + + -- We need the directory with the component's cabal_macros.h! +#if MIN_VERSION_Cabal(1,25,0) + let compAutogenDir = autogenComponentModulesDir lbi compCfg +#else + let compAutogenDir = autogenModulesDir lbi +#endif + + -- Lib sources and includes + iArgsNoPrefix + <- mapM makeAbsolute + $ compAutogenDir -- autogenerated files + : (distPref ++ "/build") -- preprocessed files (.hsc -> .hs); "build" is hardcoded in Cabal. + : hsSourceDirs compBI + includeArgs <- mapM (fmap ("-I"++) . makeAbsolute) $ includeDirs compBI + -- We clear all includes, so the CWD isn't used. + let iArgs' = map ("-i"++) iArgsNoPrefix + iArgs = "-i" : iArgs' + + -- default-extensions + let extensionArgs = map (("-X"++) . display) $ defaultExtensions compBI + + -- CPP includes, i.e. include cabal_macros.h + let cppFlags = map ("-optP"++) $ + [ "-include", compAutogenDir ++ "/cabal_macros.h" ] + ++ cppOptions compBI + + -- Unlike other modules, the main-is module of an executable is not + -- guaranteed to share a module name with its filepath name. That is, + -- even though the main-is module is named Main, its filepath might + -- actually be Something.hs. To account for this possibility, we simply + -- pass the full path to the main-is module instead. + mainIsPath <- T.traverse (findFile iArgsNoPrefix) (compMainIs comp) + + let all_sources = map display module_sources + ++ additionalModules + ++ maybeToList mainIsPath + + let component = Component + (mbCompName comp) + (formatDeps $ testDeps compCfg suitecfg) + (concat + [ iArgs + , additionalDirs + , includeArgs + , dbFlags + , cppFlags + , extensionArgs + , additionalFlags + ]) + all_sources + + -- modify IORef, append component + modifyIORef componentsRef (\cs -> cs ++ [component]) + + -- For now, we only check for doctests in libraries and executables. + getBuildDoctests withLibLBI mbLibraryName exposedModules (const Nothing) libBuildInfo + getBuildDoctests withExeLBI (NameExe . executableName) (const []) (Just . modulePath) buildInfo + + components <- readIORef componentsRef + F.for_ components $ \(Component name pkgs flags sources) -> do + let compSuffix = nameToString name + pkgs_comp = "pkgs" ++ compSuffix + flags_comp = "flags" ++ compSuffix + module_sources_comp = "module_sources" ++ compSuffix + + -- write autogen'd file + appendFile buildDoctestsFile $ unlines + [ -- -package-id etc. flags + pkgs_comp ++ " :: [String]" + , pkgs_comp ++ " = " ++ show pkgs + , "" + , flags_comp ++ " :: [String]" + , flags_comp ++ " = " ++ show flags + , "" + , module_sources_comp ++ " :: [String]" + , module_sources_comp ++ " = " ++ show sources + , "" + ] + + -- write enabled components, i.e. x-doctest-components + -- if none enabled, pick library + let enabledComponents = maybe [NameLib Nothing] (mapMaybe parseComponentName . words) + $ lookup "x-doctest-components" + $ customFieldsBI testBI + + let components' = + filter (\(Component n _ _ _) -> n `elem` enabledComponents) components + appendFile buildDoctestsFile $ unlines + [ "-- " ++ show enabledComponents + , "components :: [Component]" + , "components = " ++ show components' + ] + + where + parseComponentName :: String -> Maybe Name + parseComponentName "lib" = Just (NameLib Nothing) + parseComponentName ('l' : 'i' : 'b' : ':' : x) = Just (NameLib (Just x)) + parseComponentName ('e' : 'x' : 'e' : ':' : x) = Just (NameExe x) + parseComponentName _ = Nothing + + -- we do this check in Setup, as then doctests don't need to depend on Cabal + isOldCompiler = maybe False id $ do + a <- simpleParse $ showCompilerId $ compiler lbi + b <- simpleParse "7.5" + return $ packageVersion (a :: PackageId) < b + + formatDeps = map formatOne + formatOne (installedPkgId, pkgId) + -- The problem is how different cabal executables handle package databases + -- when doctests depend on the library + -- + -- If the pkgId is current package, we don't output the full package-id + -- but only the name + -- + -- Because of MungedPackageId we compare display version of identifiers + -- not the identifiers themfselves. + | display (packageId pkg) == display pkgId = "-package=" ++ display pkgId + | otherwise = "-package-id=" ++ display installedPkgId + + -- From Distribution.Simple.Program.GHC + packageDbArgs :: [PackageDB] -> [String] + packageDbArgs | isOldCompiler = packageDbArgsConf + | otherwise = packageDbArgsDb + + -- GHC <7.6 uses '-package-conf' instead of '-package-db'. + packageDbArgsConf :: [PackageDB] -> [String] + packageDbArgsConf dbstack = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) -> concatMap specific dbs + (GlobalPackageDB:dbs) -> ("-no-user-package-conf") + : concatMap specific dbs + _ -> ierror + where + specific (SpecificPackageDB db) = [ "-package-conf=" ++ db ] + specific _ = ierror + ierror = error $ "internal error: unexpected package db stack: " + ++ show dbstack + + -- GHC >= 7.6 uses the '-package-db' flag. See + -- https://ghc.haskell.org/trac/ghc/ticket/5977. + packageDbArgsDb :: [PackageDB] -> [String] + -- special cases to make arguments prettier in common scenarios + packageDbArgsDb dbstack = case dbstack of + (GlobalPackageDB:UserPackageDB:dbs) + | all isSpecific dbs -> concatMap single dbs + (GlobalPackageDB:dbs) + | all isSpecific dbs -> "-no-user-package-db" + : concatMap single dbs + dbs -> "-clear-package-db" + : concatMap single dbs + where + single (SpecificPackageDB db) = [ "-package-db=" ++ db ] + single GlobalPackageDB = [ "-global-package-db" ] + single UserPackageDB = [ "-user-package-db" ] + isSpecific (SpecificPackageDB _) = True + isSpecific _ = False + + mbLibraryName :: Library -> Name +#if MIN_VERSION_Cabal(2,0,0) + -- Cabal-2.0 introduced internal libraries, which are named. + mbLibraryName = NameLib . fmap unUnqualComponentName . libName +#else + -- Before that, there was only ever at most one library per + -- .cabal file, which has no name. + mbLibraryName _ = NameLib Nothing +#endif + + executableName :: Executable -> String +#if MIN_VERSION_Cabal(2,0,0) + executableName = unUnqualComponentName . exeName +#else + executableName = exeName +#endif + +-- | In compat settings it's better to omit the type-signature +testDeps :: ComponentLocalBuildInfo -> ComponentLocalBuildInfo +#if MIN_VERSION_Cabal(2,0,0) + -> [(InstalledPackageId, MungedPackageId)] +#else + -> [(InstalledPackageId, PackageId)] +#endif +testDeps xs ys = nub $ componentPackageDeps xs ++ componentPackageDeps ys diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/custom-setup/CustomSetupTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/custom-setup/CustomSetupTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/custom-setup/CustomSetupTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/custom-setup/CustomSetupTests.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,8 @@ +-- This test-suite verifies some custom-setup scripts compile ok +-- so we don't break them by accident, i.e. when breakage can be prevented. +module Main (main) where +import CabalDoctestSetup () +import IdrisSetup () + +main :: IO () +main = return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/custom-setup/IdrisSetup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/custom-setup/IdrisSetup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/custom-setup/IdrisSetup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/custom-setup/IdrisSetup.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,383 @@ +-- This is Setup.hs script from idris-1.1.1 + +{- + +Copyright (c) 2011 Edwin Brady + School of Computer Science, University of St Andrews +All rights reserved. + +This code is derived from software written by Edwin Brady +(eb@cs.st-andrews.ac.uk). + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. None of the names of the copyright holders may be used to endorse + or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +*** End of disclaimer. *** + +-} + +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-deprecations #-} +module IdrisSetup (main) where + +#if !defined(MIN_VERSION_Cabal) +# define MIN_VERSION_Cabal(x,y,z) 0 +#endif + +#if !defined(MIN_VERSION_base) +# define MIN_VERSION_base(x,y,z) 0 +#endif + +import Control.Monad +import Data.IORef +import Control.Exception (SomeException, catch) +import Data.String (fromString) + +import Distribution.Simple +import Distribution.Simple.BuildPaths +import Distribution.Simple.InstallDirs as I +import Distribution.Simple.LocalBuildInfo as L +import qualified Distribution.Simple.Setup as S +import qualified Distribution.Simple.Program as P +import Distribution.Simple.Utils (createDirectoryIfMissingVerbose, rewriteFile, notice, installOrdinaryFiles) +import Distribution.Compiler +import Distribution.PackageDescription +import Distribution.Text + +import System.Environment +import System.Exit +import System.FilePath ((), splitDirectories,isAbsolute) +import System.Directory +import qualified System.FilePath.Posix as Px +import System.Process + +-- This is difference from vanilla idris-1.1.1 +configConfigurationsFlags :: S.ConfigFlags -> [(FlagName, Bool)] +#if MIN_VERSION_Cabal(2,1,0) +configConfigurationsFlags = unFlagAssignment . S.configConfigurationsFlags +#else +configConfigurationsFlags = S.configConfigurationsFlags +#endif + +#if !MIN_VERSION_base(4,6,0) +lookupEnv :: String -> IO (Maybe String) +lookupEnv v = lookup v `fmap` getEnvironment +#endif + +-- After Idris is built, we need to check and install the prelude and other libs + +-- ----------------------------------------------------------------------------- +-- Idris Command Path + +-- make on mingw32 exepects unix style separators +#ifdef mingw32_HOST_OS +() = (Px.) +idrisCmd local = Px.joinPath $ splitDirectories $ ".." ".." buildDir local "idris" "idris" +#else +idrisCmd local = ".." ".." buildDir local "idris" "idris" +#endif + +-- ----------------------------------------------------------------------------- +-- Make Commands + +-- use GNU make on FreeBSD +#if defined(freebsd_HOST_OS) || defined(dragonfly_HOST_OS)\ + || defined(openbsd_HOST_OS) || defined(netbsd_HOST_OS) +mymake = "gmake" +#else +mymake = "make" +#endif +make verbosity = + P.runProgramInvocation verbosity . P.simpleProgramInvocation mymake + +#ifdef mingw32_HOST_OS +windres verbosity = P.runProgramInvocation verbosity . P.simpleProgramInvocation "windres" +#endif +-- ----------------------------------------------------------------------------- +-- Flags + +usesGMP :: S.ConfigFlags -> Bool +usesGMP flags = + case lookup (mkFlagName "gmp") (configConfigurationsFlags flags) of + Just True -> True + Just False -> False + Nothing -> False + +execOnly :: S.ConfigFlags -> Bool +execOnly flags = + case lookup (mkFlagName "execonly") (configConfigurationsFlags flags) of + Just True -> True + Just False -> False + Nothing -> False + +isRelease :: S.ConfigFlags -> Bool +isRelease flags = + case lookup (mkFlagName "release") (configConfigurationsFlags flags) of + Just True -> True + Just False -> False + Nothing -> False + +isFreestanding :: S.ConfigFlags -> Bool +isFreestanding flags = + case lookup (mkFlagName "freestanding") (configConfigurationsFlags flags) of + Just True -> True + Just False -> False + Nothing -> False + +#if !(MIN_VERSION_Cabal(2,0,0)) +mkFlagName :: String -> FlagName +mkFlagName = FlagName +#endif + +-- ----------------------------------------------------------------------------- +-- Clean + +idrisClean _ flags _ _ = cleanStdLib + where + verbosity = S.fromFlag $ S.cleanVerbosity flags + + cleanStdLib = makeClean "libs" + + makeClean dir = make verbosity [ "-C", dir, "clean", "IDRIS=idris" ] + +-- ----------------------------------------------------------------------------- +-- Configure + +gitHash :: IO String +gitHash = do h <- Control.Exception.catch (readProcess "git" ["rev-parse", "--short", "HEAD"] "") + (\e -> let e' = (e :: SomeException) in return "PRE") + return $ takeWhile (/= '\n') h + +-- Put the Git hash into a module for use in the program +-- For release builds, just put the empty string in the module +generateVersionModule verbosity dir release = do + hash <- gitHash + let versionModulePath = dir "Version_idris" Px.<.> "hs" + putStrLn $ "Generating " ++ versionModulePath ++ + if release then " for release" else " for prerelease " ++ hash + createDirectoryIfMissingVerbose verbosity True dir + rewriteFile versionModulePath (versionModuleContents hash) + + where versionModuleContents h = "module Version_idris where\n\n" ++ + "gitHash :: String\n" ++ + if release + then "gitHash = \"\"\n" + else "gitHash = \"git:" ++ h ++ "\"\n" + +-- Generate a module that contains the lib path for a freestanding Idris +generateTargetModule verbosity dir targetDir = do + let absPath = isAbsolute targetDir + let targetModulePath = dir "Target_idris" Px.<.> "hs" + putStrLn $ "Generating " ++ targetModulePath + createDirectoryIfMissingVerbose verbosity True dir + rewriteFile targetModulePath (versionModuleContents absPath targetDir) + where versionModuleContents absolute td = "module Target_idris where\n\n" ++ + "import System.FilePath\n" ++ + "import System.Environment\n" ++ + "getDataDir :: IO String\n" ++ + if absolute + then "getDataDir = return \"" ++ td ++ "\"\n" + else "getDataDir = do \n" ++ + " expath <- getExecutablePath\n" ++ + " execDir <- return $ dropFileName expath\n" ++ + " return $ execDir ++ \"" ++ td ++ "\"\n" + ++ "getDataFileName :: FilePath -> IO FilePath\n" + ++ "getDataFileName name = do\n" + ++ " dir <- getDataDir\n" + ++ " return (dir ++ \"/\" ++ name)" + +-- a module that has info about existence and location of a bundled toolchain +generateToolchainModule verbosity srcDir toolDir = do + let commonContent = "module Tools_idris where\n\n" + let toolContent = case toolDir of + Just dir -> "hasBundledToolchain = True\n" ++ + "getToolchainDir = \"" ++ dir ++ "\"\n" + Nothing -> "hasBundledToolchain = False\n" ++ + "getToolchainDir = \"\"" + let toolPath = srcDir "Tools_idris" Px.<.> "hs" + createDirectoryIfMissingVerbose verbosity True srcDir + rewriteFile toolPath (commonContent ++ toolContent) + +idrisConfigure _ flags pkgdesc local = do + configureRTS + withLibLBI pkgdesc local $ \_ libcfg -> do + let libAutogenDir = autogenComponentModulesDir local libcfg + generateVersionModule verbosity libAutogenDir (isRelease (configFlags local)) + if isFreestanding $ configFlags local + then do + toolDir <- lookupEnv "IDRIS_TOOLCHAIN_DIR" + generateToolchainModule verbosity libAutogenDir toolDir + targetDir <- lookupEnv "IDRIS_LIB_DIR" + case targetDir of + Just d -> generateTargetModule verbosity libAutogenDir d + Nothing -> error $ "Trying to build freestanding without a target directory." + ++ " Set it by defining IDRIS_LIB_DIR." + else + generateToolchainModule verbosity libAutogenDir Nothing + where + verbosity = S.fromFlag $ S.configVerbosity flags + version = pkgVersion . package $ localPkgDescr local + + -- This is a hack. I don't know how to tell cabal that a data file needs + -- installing but shouldn't be in the distribution. And it won't make the + -- distribution if it's not there, so instead I just delete + -- the file after configure. + configureRTS = make verbosity ["-C", "rts", "clean"] + +#if !(MIN_VERSION_Cabal(2,0,0)) + autogenComponentModulesDir lbi _ = autogenModulesDir lbi +#endif + +idrisPreSDist args flags = do + let dir = S.fromFlag (S.sDistDirectory flags) + let verb = S.fromFlag (S.sDistVerbosity flags) + generateVersionModule verb "src" True + generateTargetModule verb "src" "./libs" + generateToolchainModule verb "src" Nothing + preSDist simpleUserHooks args flags + +idrisSDist sdist pkgDesc bi hooks flags = do + pkgDesc' <- addGitFiles pkgDesc + sdist pkgDesc' bi hooks flags + where + addGitFiles :: PackageDescription -> IO PackageDescription + addGitFiles pkgDesc = do + files <- gitFiles + return $ pkgDesc { extraSrcFiles = extraSrcFiles pkgDesc ++ files} + gitFiles :: IO [FilePath] + gitFiles = liftM lines (readProcess "git" ["ls-files"] "") + +idrisPostSDist args flags desc lbi = do + Control.Exception.catch (do let file = "src" "Version_idris" Px.<.> "hs" + let targetFile = "src" "Target_idris" Px.<.> "hs" + putStrLn $ "Removing generated modules:\n " + ++ file ++ "\n" ++ targetFile + removeFile file + removeFile targetFile) + (\e -> let e' = (e :: SomeException) in return ()) + postSDist simpleUserHooks args flags desc lbi + +-- ----------------------------------------------------------------------------- +-- Build + +getVersion :: Args -> S.BuildFlags -> IO HookedBuildInfo +getVersion args flags = do + hash <- gitHash + let buildinfo = (emptyBuildInfo { cppOptions = ["-DVERSION="++hash] }) :: BuildInfo + return (Just buildinfo, []) + +idrisPreBuild args flags = do +#ifdef mingw32_HOST_OS + createDirectoryIfMissingVerbose verbosity True dir + windres verbosity ["icons/idris_icon.rc","-o", dir++"/idris_icon.o"] + return (Nothing, [(fromString "idris", emptyBuildInfo { ldOptions = [dir ++ "/idris_icon.o"] })]) + where + verbosity = S.fromFlag $ S.buildVerbosity flags + dir = S.fromFlagOrDefault "dist" $ S.buildDistPref flags +#else + return (Nothing, []) +#endif + +idrisBuild _ flags _ local + = if (execOnly (configFlags local)) then buildRTS + else do buildStdLib + buildRTS + where + verbosity = S.fromFlag $ S.buildVerbosity flags + + buildStdLib = do + putStrLn "Building libraries..." + makeBuild "libs" + where + makeBuild dir = make verbosity [ "-C", dir, "build" , "IDRIS=" ++ idrisCmd local] + + buildRTS = make verbosity (["-C", "rts", "build"] ++ + gmpflag (usesGMP (configFlags local))) + + gmpflag False = [] + gmpflag True = ["GMP=-DIDRIS_GMP"] + +-- ----------------------------------------------------------------------------- +-- Copy/Install + +idrisInstall verbosity copy pkg local + = if (execOnly (configFlags local)) then installRTS + else do installStdLib + installRTS + installManPage + where + target = datadir $ L.absoluteInstallDirs pkg local copy + + installStdLib = do + let target' = target -- "libs" + putStrLn $ "Installing libraries in " ++ target' + makeInstall "libs" target' + + installRTS = do + let target' = target "rts" + putStrLn $ "Installing run time system in " ++ target' + makeInstall "rts" target' + + installManPage = do + let mandest = mandir (L.absoluteInstallDirs pkg local copy) ++ "/man1" + notice verbosity $ unwords ["Copying man page to", mandest] + installOrdinaryFiles verbosity mandest [("man", "idris.1")] + + makeInstall src target = + make verbosity [ "-C", src, "install" , "TARGET=" ++ target, "IDRIS=" ++ idrisCmd local] + +-- ----------------------------------------------------------------------------- +-- Test + +-- There are two "dataDir" in cabal, and they don't relate to each other. +-- When fetching modules, idris uses the second path (in the pkg record), +-- which by default is the root folder of the project. +-- We want it to be the install directory where we put the idris libraries. +fixPkg pkg target = pkg { dataDir = target } + +idrisTestHook args pkg local hooks flags = do + let target = datadir $ L.absoluteInstallDirs pkg local NoCopyDest + testHook simpleUserHooks args (fixPkg pkg target) local hooks flags + +-- ----------------------------------------------------------------------------- +-- Main + +-- Install libraries during both copy and install +-- See https://github.com/haskell/cabal/issues/709 +main = defaultMainWithHooks $ simpleUserHooks + { postClean = idrisClean + , postConf = idrisConfigure + , preBuild = idrisPreBuild + , postBuild = idrisBuild + , postCopy = \_ flags pkg local -> + idrisInstall (S.fromFlag $ S.copyVerbosity flags) + (S.fromFlag $ S.copyDest flags) pkg local + , postInst = \_ flags pkg local -> + idrisInstall (S.fromFlag $ S.installVerbosity flags) + NoCopyDest pkg local + , preSDist = idrisPreSDist + , sDistHook = idrisSDist (sDistHook simpleUserHooks) + , postSDist = idrisPostSDist + , testHook = idrisTestHook + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/hackage/check.sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/hackage/check.sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/hackage/check.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/hackage/check.sh 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,25 @@ +#!/bin/sh + +base_version=1.4.0.2 +test_version=1.5.6 + +for setup in archive/*/*/Setup.hs archive/*/*/Setup.lhs; do + + pkgname=$(basename ${setup}) + + if test $(wc -w < ${setup}) -gt 21; then + if ghc -package Cabal-${base_version} -S ${setup} -o /dev/null 2> /dev/null; then + + if ghc -package Cabal-${test_version} -S ${setup} -o /dev/null 2> /dev/null; then + echo "OK ${setup}" + else + echo "FAIL ${setup} does not compile with Cabal-${test_version}" + fi + else + echo "OK ${setup} (does not compile with Cabal-${base_version})" + fi + else + echo "trivial ${setup}" + fi + +done diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/hackage/download.sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/hackage/download.sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/hackage/download.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/hackage/download.sh 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,19 @@ +#!/bin/sh + +if test ! -f archive/archive.tar; then + + wget http://hackage.haskell.org/cgi-bin/hackage-scripts/archive.tar + mkdir -p archive + mv archive.tar archive/ + tar -C archive -xf archive/archive.tar + +fi + +if test ! -f archive/00-index.tar.gz; then + + wget http://hackage.haskell.org/packages/archive/00-index.tar.gz + mkdir -p archive + mv 00-index.tar.gz archive/ + tar -C archive -xzf archive/00-index.tar.gz + +fi diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/hackage/unpack.sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/hackage/unpack.sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/hackage/unpack.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/hackage/unpack.sh 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,16 @@ +#!/bin/sh + +for tarball in archive/*/*/*.tar.gz; do + + pkgdir=$(dirname ${tarball}) + pkgname=$(basename ${tarball} .tar.gz) + + if tar -tzf ${tarball} ${pkgname}/Setup.hs 2> /dev/null; then + tar -xzf ${tarball} ${pkgname}/Setup.hs -O > ${pkgdir}/Setup.hs + elif tar -tzf ${tarball} ${pkgname}/Setup.lhs 2> /dev/null; then + tar -xzf ${tarball} ${pkgname}/Setup.lhs -O > ${pkgdir}/Setup.lhs + else + echo "${pkgname} has no Setup.hs or .lhs at all!!?!" + fi + +done diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/HackageTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/HackageTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/HackageTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/HackageTests.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,301 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE CPP #-} +{-# LANGUAGE Rank2Types #-} +{-# LANGUAGE ScopedTypeVariables #-} +#if !MIN_VERSION_deepseq(1,4,0) +{-# OPTIONS_GHC -fno-warn-orphans #-} +#endif +module Main where + +import Distribution.Compat.Semigroup +import Prelude () +import Prelude.Compat + +import Control.Applicative (many, (<**>), (<|>)) +import Control.DeepSeq (NFData (..), force) +import Control.Exception (evaluate) +import Control.Monad (join, unless) +import Data.Foldable (traverse_) +import Data.List (isPrefixOf, isSuffixOf) +import Data.Maybe (mapMaybe) +import Data.Monoid (Sum (..)) +import Distribution.PackageDescription.Check (PackageCheck (..) + ,checkPackage) +import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription) +import Distribution.Simple.Utils (toUTF8BS) +import System.Directory (getAppUserDataDirectory) +import System.Exit (exitFailure) +import System.FilePath (()) + +import Data.Orphans () + +import qualified Codec.Archive.Tar as Tar +import qualified Data.ByteString as B +import qualified Data.ByteString.Char8 as B8 +import qualified Data.ByteString.Lazy as BSL +import qualified Data.Map as Map +import qualified Distribution.PackageDescription.Parsec as Parsec +import qualified Distribution.Parsec.Common as Parsec +import qualified Distribution.Parsec.Parser as Parsec + +import Distribution.Compat.Lens +import qualified Distribution.Types.GenericPackageDescription.Lens as L +import qualified Distribution.Types.PackageDescription.Lens as L +import qualified Options.Applicative as O + +#ifdef MIN_VERSION_tree_diff +import Data.TreeDiff (ansiWlEditExpr, ediff) +import Instances.TreeDiff () +#endif + +parseIndex :: (Monoid a, NFData a) => (FilePath -> Bool) + -> (FilePath -> BSL.ByteString -> IO a) -> IO a +parseIndex predicate action = do + cabalDir <- getAppUserDataDirectory "cabal" + cfg <- B.readFile (cabalDir "config") + cfgFields <- either (fail . show) pure $ Parsec.readFields cfg + let repos = reposFromConfig cfgFields + repoCache = case lookupInConfig "remote-repo-cache" cfgFields of + [] -> cabalDir "packages" -- Default + (rrc : _) -> rrc -- User-specified + tarName repo = repoCache repo "01-index.tar" + mconcat <$> traverse (parseIndex' predicate action . tarName) repos + +parseIndex' :: (Monoid a, NFData a) => (FilePath -> Bool) + -> (FilePath -> BSL.ByteString -> IO a) -> FilePath -> IO a +parseIndex' predicate action path = do + putStrLn $ "Reading index from: " ++ path + contents <- BSL.readFile path + let entries = Tar.read contents + entries' = Tar.foldEntries cons [] (error . show) entries + foldIO f entries' + + where + cons entry entries + | predicate (Tar.entryPath entry) = entry : entries + | otherwise = entries + + f entry = case Tar.entryContent entry of + Tar.NormalFile contents _ + | ".cabal" `isSuffixOf` fpath -> + action fpath contents >>= evaluate . force + | otherwise -> + return mempty + Tar.Directory -> return mempty + _ -> putStrLn ("Unknown content in " ++ fpath) + >> return mempty + where + fpath = Tar.entryPath entry + +readFieldTest :: FilePath -> BSL.ByteString -> IO () +readFieldTest fpath bsl = case Parsec.readFields $ BSL.toStrict bsl of + Right _ -> return () + Left err -> putStrLn $ fpath ++ "\n" ++ show err + +-- | Map with unionWith monoid +newtype M k v = M (Map.Map k v) + deriving (Show) +instance (Ord k, Monoid v) => Semigroup (M k v) where + M a <> M b = M (Map.unionWith mappend a b) +instance (Ord k, Monoid v) => Monoid (M k v) where + mempty = M Map.empty + mappend = (<>) +instance (NFData k, NFData v) => NFData (M k v) where + rnf (M m) = rnf m + +parseParsecTest :: FilePath -> BSL.ByteString -> IO (Sum Int) +parseParsecTest fpath bsl = do + let bs = BSL.toStrict bsl + let (_warnings, parsec) = Parsec.runParseResult $ + Parsec.parseGenericPackageDescription bs + case parsec of + Right _ -> return (Sum 1) + Left (_, errors) -> do + traverse_ (putStrLn . Parsec.showPError fpath) errors + exitFailure + +parseCheckTest :: FilePath -> BSL.ByteString -> IO CheckResult +parseCheckTest fpath bsl = do + let bs = BSL.toStrict bsl + let (_warnings, parsec) = Parsec.runParseResult $ + Parsec.parseGenericPackageDescription bs + case parsec of + Right gpd -> do + let checks = checkPackage gpd Nothing + -- one for file, many checks + return (CheckResult 1 0 0 0 0 0 <> foldMap toCheckResult checks) + Left (_, errors) -> do + traverse_ (putStrLn . Parsec.showPError fpath) errors + exitFailure + +data CheckResult = CheckResult !Int !Int !Int !Int !Int !Int + +instance NFData CheckResult where + rnf !_ = () + +instance Semigroup CheckResult where + CheckResult n a b c d e <> CheckResult n' a' b' c' d' e' = + CheckResult (n + n') (a + a') (b + b') (c + c') (d + d') (e + e') + +instance Monoid CheckResult where + mempty = CheckResult 0 0 0 0 0 0 + mappend = (<>) + +toCheckResult :: PackageCheck -> CheckResult +toCheckResult PackageBuildImpossible {} = CheckResult 0 1 0 0 0 0 +toCheckResult PackageBuildWarning {} = CheckResult 0 0 1 0 0 0 +toCheckResult PackageDistSuspicious {} = CheckResult 0 0 0 1 0 0 +toCheckResult PackageDistSuspiciousWarn {} = CheckResult 0 0 0 0 1 0 +toCheckResult PackageDistInexcusable {} = CheckResult 0 0 0 0 0 1 + +roundtripTest :: FilePath -> BSL.ByteString -> IO (Sum Int) +roundtripTest fpath bsl = do + let bs = BSL.toStrict bsl + x0 <- parse "1st" bs + let bs' = showGenericPackageDescription x0 + y0 <- parse "2nd" (toUTF8BS bs') + + -- we mungled license here + let y1 = y0 + + -- license-files: "" + let stripEmpty = filter (/="") + let x1 = x0 & L.packageDescription . L.licenseFiles %~ stripEmpty + let y2 = y1 & L.packageDescription . L.licenseFiles %~ stripEmpty + + let y = y2 & L.packageDescription . L.description .~ "" + let x = x1 & L.packageDescription . L.description .~ "" + + unless (x == y || fpath == "ixset/1.0.4/ixset.cabal") $ do + putStrLn fpath +#ifdef MIN_VERSION_tree_diff + print $ ansiWlEditExpr $ ediff x y +#else + putStrLn "<<<<<<" + print x + putStrLn "======" + print y + putStrLn ">>>>>>" +#endif + putStrLn bs' + exitFailure + + return (Sum 1) + where + parse phase c = do + let (_, x') = Parsec.runParseResult $ + Parsec.parseGenericPackageDescription c + case x' of + Right gpd -> pure gpd + Left (_, errs) -> do + putStrLn $ fpath ++ " " ++ phase + traverse_ print errs + B.putStr c + fail "parse error" + +main :: IO () +main = join (O.execParser opts) + where + opts = O.info (optsP <**> O.helper) $ mconcat + [ O.fullDesc + , O.progDesc "tests using Hackage's index" + ] + + optsP = subparser + [ command "read-fields" readFieldsP + "Parse outer format (to '[Field]', TODO: apply Quirks)" + , command "parsec" parsecP "Parse GPD with parsec" + , command "roundtrip" roundtripP "parse . pretty . parse = parse" + , command "check" checkP "Check GPD" + ] <|> pure defaultA + + defaultA = do + putStrLn "Default action: parsec k" + parsecA (mkPredicate ["k"]) + + readFieldsP = readFieldsA <$> prefixP + readFieldsA pfx = parseIndex pfx readFieldTest + + parsecP = parsecA <$> prefixP + parsecA pfx = do + Sum n <- parseIndex pfx parseParsecTest + putStrLn $ show n ++ " files processed" + + roundtripP = roundtripA <$> prefixP + roundtripA pfx = do + Sum n <- parseIndex pfx roundtripTest + putStrLn $ show n ++ " files processed" + + checkP = checkA <$> prefixP + checkA pfx = do + CheckResult n a b c d e <- parseIndex pfx parseCheckTest + putStrLn $ show n ++ " files processed" + putStrLn $ show a ++ " build impossible" + putStrLn $ show b ++ " build warning" + putStrLn $ show c ++ " build dist suspicious" + putStrLn $ show d ++ " build dist suspicious warning" + putStrLn $ show e ++ " build dist inexcusable" + + prefixP = fmap mkPredicate $ many $ O.strArgument $ mconcat + [ O.metavar "PREFIX" + , O.help "Check only files starting with a prefix" + ] + + mkPredicate [] = const True + mkPredicate pfxs = \n -> any (`isPrefixOf` n) pfxs + + command name p desc = O.command name + (O.info (p <**> O.helper) (O.progDesc desc)) + subparser = O.subparser . mconcat + +------------------------------------------------------------------------------- +-- Index shuffling +------------------------------------------------------------------------------- + +-- TODO: Use 'Cabal' for this? +reposFromConfig :: [Parsec.Field ann] -> [String] +reposFromConfig fields = takeWhile (/= ':') <$> mapMaybe f fields + where + f (Parsec.Field (Parsec.Name _ name) fieldLines) + | B8.unpack name == "remote-repo" = + Just $ fieldLinesToString fieldLines + f (Parsec.Section (Parsec.Name _ name) + [Parsec.SecArgName _ secName] _fieldLines) + | B8.unpack name == "repository" = + Just $ B8.unpack secName + f _ = Nothing + +-- | Looks up the given key in the cabal configuration file +lookupInConfig :: String -> [Parsec.Field ann] -> [String] +lookupInConfig key = mapMaybe f + where + f (Parsec.Field (Parsec.Name _ name) fieldLines) + | B8.unpack name == key = + Just $ fieldLinesToString fieldLines + f _ = Nothing + +fieldLinesToString :: [Parsec.FieldLine ann] -> String +fieldLinesToString fieldLines = + B8.unpack $ B.concat $ bsFromFieldLine <$> fieldLines + where + bsFromFieldLine (Parsec.FieldLine _ bs) = bs + +------------------------------------------------------------------------------- +-- Utilities +------------------------------------------------------------------------------- + +foldIO :: (Monoid m, NFData m) => (a -> IO m) -> [a] -> IO m +foldIO f = go mempty where + go !acc [] = return acc + go !acc (x : xs) = do + y <- f x + go (mappend acc y) xs + +------------------------------------------------------------------------------- +-- Orphans +------------------------------------------------------------------------------- + +#if !MIN_VERSION_deepseq(1,4,0) +instance NFData a => NFData (Sum a) where + rnf (Sum a) = rnf a +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/Language.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/Language.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/Language.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/Language.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,17 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 800 +{-# OPTIONS_GHC -freduction-depth=0 #-} +#else +{-# OPTIONS_GHC -fcontext-stack=151 #-} +#endif +{-# OPTIONS_GHC -fno-warn-orphans #-} +module Instances.TreeDiff.Language where + +import Data.TreeDiff +import Language.Haskell.Extension (Extension, KnownExtension, Language) + +-- This are big enums, so they are in separate file. +-- +instance ToExpr Extension +instance ToExpr KnownExtension +instance ToExpr Language diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/SPDX.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/SPDX.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/SPDX.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/SPDX.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,28 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 800 +{-# OPTIONS_GHC -freduction-depth=0 #-} +#else +{-# OPTIONS_GHC -fcontext-stack=151 #-} +#endif +{-# OPTIONS_GHC -fno-warn-orphans #-} +module Instances.TreeDiff.SPDX where + +import Data.TreeDiff +import Distribution.License (License) + +import Instances.TreeDiff.Version () + +import qualified Distribution.SPDX as SPDX + +-- 'License' almost belongs here. + +instance ToExpr License + +-- Generics instance is too heavy +instance ToExpr SPDX.LicenseId where toExpr = defaultExprViaShow +instance ToExpr SPDX.LicenseExceptionId where toExpr = defaultExprViaShow + +instance ToExpr SPDX.License +instance ToExpr SPDX.LicenseExpression +instance ToExpr SPDX.LicenseRef +instance ToExpr SPDX.SimpleLicenseExpression diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/Version.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Instances/TreeDiff/Version.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,14 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 800 +{-# OPTIONS_GHC -freduction-depth=0 #-} +#else +{-# OPTIONS_GHC -fcontext-stack=151 #-} +#endif +{-# OPTIONS_GHC -fno-warn-orphans #-} +module Instances.TreeDiff.Version where + +import Data.TreeDiff +import Distribution.Version (Version, VersionRange) + +instance ToExpr Version where toExpr = defaultExprViaShow +instance ToExpr VersionRange diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Instances/TreeDiff.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Instances/TreeDiff.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Instances/TreeDiff.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Instances/TreeDiff.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,91 @@ +{-# LANGUAGE CPP #-} +#if __GLASGOW_HASKELL__ >= 800 +{-# OPTIONS_GHC -freduction-depth=0 #-} +#else +{-# OPTIONS_GHC -fcontext-stack=151 #-} +#endif +{-# OPTIONS_GHC -fno-warn-orphans #-} +module Instances.TreeDiff where + +import Data.TreeDiff + +import Instances.TreeDiff.Language () +import Instances.TreeDiff.SPDX () +import Instances.TreeDiff.Version () + +------------------------------------------------------------------------------- + +import Distribution.Backpack (OpenModule, OpenUnitId) +import Distribution.Compiler (CompilerFlavor) +import Distribution.InstalledPackageInfo (AbiDependency, ExposedModule, InstalledPackageInfo) +import Distribution.ModuleName (ModuleName) +import Distribution.Package (Dependency, PackageIdentifier, PackageName) +import Distribution.PackageDescription +import Distribution.Types.AbiHash (AbiHash) +import Distribution.Types.ComponentId (ComponentId) +import Distribution.Types.CondTree +import Distribution.Types.ExecutableScope +import Distribution.Types.ExeDependency +import Distribution.Types.ForeignLib +import Distribution.Types.ForeignLibOption +import Distribution.Types.ForeignLibType +import Distribution.Types.IncludeRenaming (IncludeRenaming) +import Distribution.Types.LegacyExeDependency +import Distribution.Types.Mixin +import Distribution.Types.PkgconfigDependency +import Distribution.Types.UnitId (DefUnitId, UnitId) +import Distribution.Types.UnqualComponentName + +------------------------------------------------------------------------------- +-- instances +------------------------------------------------------------------------------- + +instance (Eq a, Show a) => ToExpr (Condition a) where toExpr = defaultExprViaShow +instance (Show a, ToExpr b, ToExpr c, Show b, Show c, Eq a, Eq c, Eq b) => ToExpr (CondTree a b c) +instance (Show a, ToExpr b, ToExpr c, Show b, Show c, Eq a, Eq c, Eq b) => ToExpr (CondBranch a b c) + +instance ToExpr AbiDependency where toExpr = defaultExprViaShow +instance ToExpr AbiHash where toExpr = defaultExprViaShow +instance ToExpr Benchmark +instance ToExpr BenchmarkInterface +instance ToExpr BenchmarkType +instance ToExpr BuildInfo +instance ToExpr BuildType +instance ToExpr CompilerFlavor +instance ToExpr ComponentId where toExpr = defaultExprViaShow +instance ToExpr DefUnitId +instance ToExpr Dependency +instance ToExpr ExeDependency where toExpr = defaultExprViaShow +instance ToExpr Executable +instance ToExpr ExecutableScope where toExpr = defaultExprViaShow +instance ToExpr ExposedModule where toExpr = defaultExprViaShow +instance ToExpr Flag +instance ToExpr FlagName where toExpr = defaultExprViaShow +instance ToExpr ForeignLib +instance ToExpr ForeignLibOption +instance ToExpr ForeignLibType +instance ToExpr GenericPackageDescription +instance ToExpr IncludeRenaming +instance ToExpr InstalledPackageInfo +instance ToExpr LegacyExeDependency where toExpr = defaultExprViaShow +instance ToExpr LibVersionInfo where toExpr = defaultExprViaShow +instance ToExpr Library +instance ToExpr Mixin where toExpr = defaultExprViaShow +instance ToExpr ModuleName where toExpr = defaultExprViaShow +instance ToExpr ModuleReexport +instance ToExpr ModuleRenaming +instance ToExpr OpenModule +instance ToExpr OpenUnitId +instance ToExpr PackageDescription +instance ToExpr PackageIdentifier +instance ToExpr PackageName where toExpr = defaultExprViaShow +instance ToExpr PkgconfigDependency where toExpr = defaultExprViaShow +instance ToExpr RepoKind +instance ToExpr RepoType +instance ToExpr SetupBuildInfo +instance ToExpr SourceRepo +instance ToExpr TestSuite +instance ToExpr TestSuiteInterface +instance ToExpr TestType +instance ToExpr UnitId where toExpr = defaultExprViaShow +instance ToExpr UnqualComponentName where toExpr = defaultExprViaShow diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/misc/ghc-supported-languages.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/misc/ghc-supported-languages.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/misc/ghc-supported-languages.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/misc/ghc-supported-languages.hs 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,97 @@ +-- | A test program to check that ghc has got all of its extensions registered +-- +module Main where + +import Language.Haskell.Extension +import Distribution.Text +import Distribution.Simple.Utils +import Distribution.Verbosity + +import Data.List ((\\)) +import Data.Maybe +import Control.Applicative +import Control.Monad +import System.Environment +import System.Exit + +-- | A list of GHC extensions that are deliberately not registered, +-- e.g. due to being experimental and not ready for public consumption +-- +exceptions = map readExtension [] + +checkProblems :: [Extension] -> [String] +checkProblems implemented = + + let unregistered = + [ ext | ext <- implemented -- extensions that ghc knows about + , not (registered ext) -- but that are not registered + , ext `notElem` exceptions ] -- except for the exceptions + + -- check if someone has forgotten to update the exceptions list... + + -- exceptions that are not implemented + badExceptions = exceptions \\ implemented + + -- exceptions that are now registered + badExceptions' = filter registered exceptions + + in catMaybes + [ check unregistered $ unlines + [ "The following extensions are known to GHC but are not in the " + , "extension registry in Language.Haskell.Extension." + , " " ++ intercalate "\n " (map display unregistered) + , "If these extensions are ready for public consumption then they " + , "should be registered. If they are still experimental and you " + , "think they are not ready to be registered then please add them " + , "to the exceptions list in this test program along with an " + , "explanation." + ] + , check badExceptions $ unlines + [ "Error in the extension exception list. The following extensions" + , "are listed as exceptions but are not even implemented by GHC:" + , " " ++ intercalate "\n " (map display badExceptions) + , "Please fix this test program by correcting the list of" + , "exceptions." + ] + , check badExceptions' $ unlines + [ "Error in the extension exception list. The following extensions" + , "are listed as exceptions to registration but they are in fact" + , "now registered in Language.Haskell.Extension:" + , " " ++ intercalate "\n " (map display badExceptions') + , "Please fix this test program by correcting the list of" + , "exceptions." + ] + ] + where + registered (UnknownExtension _) = False + registered _ = True + + check [] _ = Nothing + check _ i = Just i + + +main = topHandler $ do + [ghcPath] <- getArgs + exts <- getExtensions ghcPath + let problems = checkProblems exts + putStrLn (intercalate "\n" problems) + if null problems + then exitSuccess + else exitFailure + +getExtensions :: FilePath -> IO [Extension] +getExtensions ghcPath = + map readExtension . lines + <$> rawSystemStdout normal ghcPath ["--supported-languages"] + +readExtension :: String -> Extension +readExtension str = handleNoParse $ do + -- GHC defines extensions in a positive way, Cabal defines them + -- relative to H98 so we try parsing ("No" ++ extName) first + ext <- simpleParse ("No" ++ str) + case ext of + UnknownExtension _ -> simpleParse str + _ -> return ext + where + handleNoParse :: Maybe Extension -> Extension + handleNoParse = fromMaybe (error $ "unparsable extension " ++ show str) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common1.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common1.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common1.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common1.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,29 @@ +cabal-version: 2.1 +name: common +version: 0 +synopsis: Common-stanza demo demo +build-type: Simple + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +common windows + if os(windows) + build-depends: Win32 + +-- Non-existing common stanza +common deps + import: windo + build-depends: + base >=4.10 && <4.11, + containers + +library + import: deps + + default-language: Haskell2010 + exposed-modules: ElseIf + + build-depends: + ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common1.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common1.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common1.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common1.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +VERSION: Just (mkVersion [2,1]) +common1.cabal:17:3: Undefined common stanza imported: windo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common2.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,29 @@ +cabal-version: 2.1 +name: common +version: 0 +synopsis: Common-stanza demo demo +build-type: Simple + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +-- Used before use +common deps + import: windows + build-depends: + base >=4.10 && <4.11, + containers + +common windows + if os(windows) + build-depends: Win32 + +library + import: deps + + default-language: Haskell2010 + exposed-modules: ElseIf + + build-depends: + ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common2.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common2.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +VERSION: Just (mkVersion [2,1]) +common2.cabal:13:3: Undefined common stanza imported: windows diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common3.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common3.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common3.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common3.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,31 @@ +cabal-version: 2.1 +name: common +version: 0 +synopsis: Common-stanza demo demo +build-type: Simple + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +common windows + if os(windows) + build-depends: Win32 + +common deps + import: windows + build-depends: + base >=4.10 && <4.11, + containers + +-- Duplicate +common deps + +library + import: deps + + default-language: Haskell2010 + exposed-modules: ElseIf + + build-depends: + ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common3.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common3.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/common3.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/common3.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +VERSION: Just (mkVersion [2,1]) +common3.cabal:22:1: Duplicate common stanza: deps diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat2.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,16 @@ +name: common +version: 0 +synopsis: Common-stanza demo demo +build-type: Simple +cabal-version: 2.1 + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +library + default-language: Haskell2010 + exposed-modules: ElseIf + + build-depends: + ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat2.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat2.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +VERSION: Just (mkVersion [2,1]) +forward-compat2.cabal:5:1: cabal-version should be at the beginning of the file starting with spec version 2.2. See https://github.com/haskell/cabal/issues/4899 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat3.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat3.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat3.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat3.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,16 @@ +cabal-version: 99999.99 +name: forward-compat +version: 0 +synopsis: Forward compat, too new cabal-version: we fail. +build-type: Simple + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +library + default-language: Haskell2010 + exposed-modules: ElseIf + + build-depends: + ghc-prim diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat3.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat3.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat3.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat3.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +VERSION: Just (mkVersion [99999,99]) +forward-compat3.cabal:0:0: Unsupported cabal-version. See https://github.com/haskell/cabal/issues/4899. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,4 @@ +cabal-version: 99999.9 +name: future +============ +Lexically completely changed future diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/forward-compat.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,5 @@ +VERSION: Just (mkVersion [99999,9]) +forward-compat.cabal:3:1: "the input" (line 3, column 1): +unexpected operator "============" +expecting field or section name +forward-compat.cabal:0:0: Unsupported cabal-version. See https://github.com/haskell/cabal/issues/4899. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055-2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055-2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055-2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055-2.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,25 @@ +name: issue +version: 5055 +synopsis: no type in all branches +description: no type in all branches. +license: BSD3 +category: Test +build-type: Simple +cabal-version: >=2.0 + +executable flag-test-exe + main-is: FirstMain.hs + build-depends: base >= 4.8 && < 5 + default-language: Haskell2010 + +test-suite flag-cabal-test + -- TODO: fix so `type` can be on the top level + build-depends: base >= 4.8 && < 5 + default-language: Haskell2010 + + if os(windows) + main-is: FirstMain.hs + type: exitcode-stdio-1.0 + else: + main-is: SecondMain.hs + type: exitcode-stdio-1.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055-2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055-2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055-2.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055-2.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +VERSION: Just (mkVersion [2,0]) +issue-5055-2.cabal:15:1: Test suite "flag-cabal-test" is missing required field "type" or the field is not present in all conditional branches. The available test types are: exitcode-stdio-1.0, detailed-0.9 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,21 @@ +name: issue +version: 5055 +synopsis: no type in all branches +description: no type in all branches. +license: BSD3 +category: Test +build-type: Simple +cabal-version: >=2.0 + +executable flag-test-exe + main-is: FirstMain.hs + build-depends: base >= 4.8 && < 5 + default-language: Haskell2010 + +test-suite flag-cabal-test + build-depends: base >= 4.8 && < 5 + default-language: Haskell2010 + + if os(windows) + main-is: FirstMain.hs + type: exitcode-stdio-1.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/issue-5055.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +VERSION: Just (mkVersion [2,0]) +issue-5055.cabal:15:1: Test suite "flag-cabal-test" is missing required field "type" or the field is not present in all conditional branches. The available test types are: exitcode-stdio-1.0, detailed-0.9 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/leading-comma.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/leading-comma.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/leading-comma.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/leading-comma.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,20 @@ +name: leading-comma +version: 0 +synopsis: leading comma, trailing comma, or ordinary +build-type: Simple +-- too small cabal-version +cabal-version: 2.0 + +library + default-language: Haskell2010 + exposed-modules: LeadingComma + + build-depends: base, containers + + build-depends: + deepseq, + transformers, + + build-depends: + , filepath + , directory diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/leading-comma.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/leading-comma.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/leading-comma.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/leading-comma.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,8 @@ +VERSION: Just (mkVersion [2,0]) +leading-comma.cabal:16:18: +unexpected end of input +expecting white space + +deepseq, +transformers, + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion2.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,10 @@ +name: noVersion +version: 0 +synopsis: ^>= in build-depends +build-type: Simple +cabal-version: 1.20 + +library + default-language: Haskell2010 + exposed-modules: ElseIf + build-depends: bad-package ^>= 2.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion2.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion2.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,7 @@ +VERSION: Just (mkVersion [1,20]) +noVersion2.cabal:10:40: +unexpected major bounded version syntax (caret, ^>=) used. To use this syntax the package need to specify at least 'cabal-version: 2.0'. Alternatively, if broader compatibility is important then use: >=2.0 && <2.1 +expecting "." or "-" + +bad-package ^>= 2.0 + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,10 @@ +name: noVersion +version: 0 +synopsis: -none in build-depends +build-type: Simple +cabal-version: 1.20 + +library + default-language: Haskell2010 + exposed-modules: ElseIf + build-depends: bad-package -none diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/noVersion.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,6 @@ +VERSION: Just (mkVersion [1,20]) +noVersion.cabal:10:38: +unexpected -none version range used. To use this syntax the package needs to specify at least 'cabal-version: 1.22'. Alternatively, if broader compatibility is important then use <0 or other empty range. + +bad-package -none + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/range-ge-wild.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/range-ge-wild.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/range-ge-wild.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/range-ge-wild.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,10 @@ +name: range-ge-wild +version: 0 +synopsis: Wild range after non-== op +build-type: Simple +cabal-version: >=1.10 + +library + build-depends: + -- comment, to check that position is right + base >= 4.* diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/range-ge-wild.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/range-ge-wild.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/range-ge-wild.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/range-ge-wild.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,6 @@ +VERSION: Just (mkVersion [1,10]) +range-ge-wild.cabal:10:16: +unexpected wild-card version after non-== operator: ">=" + +base >= 4.* + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-1.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-1.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-1.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-1.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.2 +name: spdx +version: 0 +synopsis: testing positive parsing of spdx identifiers +build-type: Simple +license: BSD3 + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-1.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-1.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-1.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-1.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,6 @@ +VERSION: Just (mkVersion [2,2]) +spdx-1.cabal:6:26: +unexpected Unknown SPDX license identifier: 'BSD3' Do you mean BSD-3-Clause? + +BSD3 + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-2.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.4 +name: spdx +version: 0 +synopsis: testing positive parsing of spdx identifiers +build-type: Simple +license: AGPL-1.0 + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-2.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-2.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-2.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-2.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,6 @@ +VERSION: Just (mkVersion [2,4]) +spdx-2.cabal:6:30: +unexpected Unknown SPDX license identifier: 'AGPL-1.0' + +AGPL-1.0 + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-3.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-3.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-3.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-3.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.2 +name: spdx +version: 0 +synopsis: testing positive parsing of spdx identifiers +build-type: Simple +license: AGPL-1.0-only + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-3.errors cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-3.errors --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-3.errors 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/errors/spdx-3.errors 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,6 @@ +VERSION: Just (mkVersion [2,2]) +spdx-3.cabal:6:35: +unexpected Unknown SPDX license identifier: 'AGPL-1.0-only' + +AGPL-1.0-only + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,27 @@ +name: z-Includes2-z-mylib +version: 0.1.0.0 +id: Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +instantiated-with: Database=Includes2-0.1.0.0-inplace-mysql:Database.MySQL +package-name: Includes2 +lib-name: mylib +key: Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +license: BSD3 +maintainer: ezyang@cs.stanford.edu +author: Edward Z. Yang +exposed: False +indefinite: False +exposed-modules: + Mine +abi: inplace +trusted: False +import-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +library-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +dynamic-library-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +data-dir: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2 +hs-libraries: HSIncludes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +depends: + base-4.10.1.0 Includes2-0.1.0.0-inplace-mysql +abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 + Includes2-0.1.0.0-inplace-mysql=inplace +haddock-interfaces: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2/Includes2.haddock +haddock-html: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,51 @@ +InstalledPackageInfo + {abiDepends = [`AbiDependency {depUnitId = UnitId "base-4.10.1.0", depAbiHash = AbiHash "35a7f6be752ee4f7385cb5bf28677879"}`, + `AbiDependency {depUnitId = UnitId "Includes2-0.1.0.0-inplace-mysql", depAbiHash = AbiHash "inplace"}`], + abiHash = `AbiHash "inplace"`, + author = "Edward Z. Yang", + category = "", + ccOptions = [], + compatPackageKey = "Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n", + copyright = "", + cxxOptions = [], + dataDir = "/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2", + depends = [`UnitId "base-4.10.1.0"`, + `UnitId "Includes2-0.1.0.0-inplace-mysql"`], + description = "", + exposed = False, + exposedModules = [`ExposedModule {exposedName = ModuleName ["Mine"], exposedReexport = Nothing}`], + extraGHCiLibraries = [], + extraLibraries = [], + frameworkDirs = [], + frameworks = [], + haddockHTMLs = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2"], + haddockInterfaces = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2/Includes2.haddock"], + hiddenModules = [], + homepage = "", + hsLibraries = ["HSIncludes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"], + importDirs = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"], + includeDirs = [], + includes = [], + indefinite = False, + installedComponentId_ = `ComponentId ""`, + installedUnitId = `UnitId "Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"`, + instantiatedWith = [_×_ + `ModuleName ["Database"]` + (OpenModule + (DefiniteUnitId + (DefUnitId `UnitId "Includes2-0.1.0.0-inplace-mysql"`)) + `ModuleName ["Database","MySQL"]`)], + ldOptions = [], + libraryDirs = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"], + libraryDynDirs = ["/home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n"], + license = Right BSD3, + maintainer = "ezyang@cs.stanford.edu", + pkgRoot = Nothing, + pkgUrl = "", + sourceLibName = Just `UnqualComponentName "mylib"`, + sourcePackageId = PackageIdentifier + {pkgName = `PackageName "Includes2"`, + pkgVersion = `mkVersion [0,1,0,0]`}, + stability = "", + synopsis = "", + trusted = False} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/Includes2.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,24 @@ +name: z-Includes2-z-mylib +version: 0.1.0.0 +package-name: Includes2 +lib-name: mylib +id: Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +instantiated-with: Database=Includes2-0.1.0.0-inplace-mysql:Database.MySQL +key: Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +license: BSD3 +maintainer: ezyang@cs.stanford.edu +author: Edward Z. Yang +abi: inplace +exposed-modules: + Mine +import-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +library-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +dynamic-library-dirs: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/build/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +data-dir: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2 +hs-libraries: HSIncludes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n +depends: + base-4.10.1.0 Includes2-0.1.0.0-inplace-mysql +abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 + Includes2-0.1.0.0-inplace-mysql=inplace +haddock-interfaces: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2/Includes2.haddock +haddock-html: /home/travis/build/haskell/cabal/cabal-testsuite/PackageTests/Backpack/Includes2/cabal-internal.dist/work/./dist/build/x86_64-linux/ghc-8.2.2/Includes2-0.1.0.0/l/mylib/Includes2-0.1.0.0-inplace-mylib+3gY9SyjX86dBypHcOaev1n/doc/html/Includes2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,26 @@ +name: internal-preprocessor-test +version: 0.1.0.0 +id: internal-preprocessor-test-0.1.0.0 +key: internal-preprocessor-test-0.1.0.0 +license: GPL-3 +maintainer: mikhail.glushenkov@gmail.com +synopsis: Internal custom preprocessor example. +description: + See https://github.com/haskell/cabal/issues/1541#issuecomment-30155513 +category: Testing +author: Mikhail Glushenkov +exposed: True +exposed-modules: + A +trusted: False +import-dirs: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build +library-dirs: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build + /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build +data-dir: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess +hs-libraries: HSinternal-preprocessor-test-0.1.0.0 +depends: + base-4.8.2.0-0d6d1084fbc041e1cded9228e80e264d +haddock-interfaces: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test/internal-preprocessor-test.haddock +haddock-html: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test +pkgroot: "/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist" + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,46 @@ +InstalledPackageInfo + {abiDepends = [], + abiHash = `AbiHash ""`, + author = "Mikhail Glushenkov", + category = "Testing", + ccOptions = [], + compatPackageKey = "internal-preprocessor-test-0.1.0.0", + copyright = "", + cxxOptions = [], + dataDir = "/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess", + depends = [`UnitId "base-4.8.2.0-0d6d1084fbc041e1cded9228e80e264d"`], + description = "See https://github.com/haskell/cabal/issues/1541#issuecomment-30155513", + exposed = True, + exposedModules = [`ExposedModule {exposedName = ModuleName ["A"], exposedReexport = Nothing}`], + extraGHCiLibraries = [], + extraLibraries = [], + frameworkDirs = [], + frameworks = [], + haddockHTMLs = ["/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test"], + haddockInterfaces = ["/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test/internal-preprocessor-test.haddock"], + hiddenModules = [], + homepage = "", + hsLibraries = ["HSinternal-preprocessor-test-0.1.0.0"], + importDirs = ["/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build"], + includeDirs = [], + includes = [], + indefinite = False, + installedComponentId_ = `ComponentId ""`, + installedUnitId = `UnitId "internal-preprocessor-test-0.1.0.0"`, + instantiatedWith = [], + ldOptions = [], + libraryDirs = ["/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build", + "/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build"], + libraryDynDirs = [], + license = Right (GPL (Just `mkVersion [3]`)), + maintainer = "mikhail.glushenkov@gmail.com", + pkgRoot = Just + "/home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist", + pkgUrl = "", + sourceLibName = Nothing, + sourcePackageId = PackageIdentifier + {pkgName = `PackageName "internal-preprocessor-test"`, + pkgVersion = `mkVersion [0,1,0,0]`}, + stability = "", + synopsis = "Internal custom preprocessor example.", + trusted = False} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/internal-preprocessor-test.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,23 @@ +name: internal-preprocessor-test +version: 0.1.0.0 +id: internal-preprocessor-test-0.1.0.0 +key: internal-preprocessor-test-0.1.0.0 +license: GPL-3 +maintainer: mikhail.glushenkov@gmail.com +author: Mikhail Glushenkov +synopsis: Internal custom preprocessor example. +description: + See https://github.com/haskell/cabal/issues/1541#issuecomment-30155513 +category: Testing +exposed: True +exposed-modules: + A +import-dirs: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build +library-dirs: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build + /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/build +data-dir: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess +hs-libraries: HSinternal-preprocessor-test-0.1.0.0 +depends: + base-4.8.2.0-0d6d1084fbc041e1cded9228e80e264d +haddock-interfaces: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test/internal-preprocessor-test.haddock +haddock-html: /home/ogre/Documents/other-haskell/cabal/cabal-testsuite/PackageTests/CustomPreProcess/setup.dist/work/dist/doc/html/internal-preprocessor-test diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,175 @@ +name: transformers +version: 0.5.2.0 +id: transformers-0.5.2.0 +key: transformers-0.5.2.0 +license: BSD3 +maintainer: Ross Paterson +synopsis: Concrete functor and monad transformers +description: + A portable library of functor and monad transformers, inspired by + the paper \"Functional Programming with Overloading and Higher-Order + Polymorphism\", by Mark P Jones, + in /Advanced School of Functional Programming/, 1995 + (). + . + This package contains: + . + * the monad transformer class (in "Control.Monad.Trans.Class") + and IO monad class (in "Control.Monad.IO.Class") + . + * concrete functor and monad transformers, each with associated + operations and functions to lift operations associated with other + transformers. + . + The package can be used on its own in portable Haskell code, in + which case operations need to be manually lifted through transformer + stacks (see "Control.Monad.Trans.Class" for some examples). + Alternatively, it can be used with the non-portable monad classes in + the @mtl@ or @monads-tf@ packages, which automatically lift operations + introduced by monad transformers through other transformers. +category: Control +author: Andy Gill, Ross Paterson +exposed: True +indefinite: False +exposed-modules: + Control.Applicative.Backwards Control.Applicative.Lift + Control.Monad.Signatures Control.Monad.Trans.Class + Control.Monad.Trans.Cont Control.Monad.Trans.Error + Control.Monad.Trans.Except Control.Monad.Trans.Identity + Control.Monad.Trans.List Control.Monad.Trans.Maybe + Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy + Control.Monad.Trans.RWS.Strict Control.Monad.Trans.Reader + Control.Monad.Trans.State Control.Monad.Trans.State.Lazy + Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer + Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict + Data.Functor.Constant Data.Functor.Reverse +abi: e04579c0363c9229351d1a0b394bf2d5 +trusted: False +import-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +dynamic-library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +data-dir: /opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0 +hs-libraries: HStransformers-0.5.2.0 +depends: + base-4.10.1.0 +abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 +haddock-interfaces: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock +haddock-html: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0 +ld-options: -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2086 @@ +InstalledPackageInfo + {abiDepends = [`AbiDependency {depUnitId = UnitId "base-4.10.1.0", depAbiHash = AbiHash "35a7f6be752ee4f7385cb5bf28677879"}`], + abiHash = `AbiHash "e04579c0363c9229351d1a0b394bf2d5"`, + author = "Andy Gill, Ross Paterson", + category = "Control", + ccOptions = [], + compatPackageKey = "transformers-0.5.2.0", + copyright = "", + cxxOptions = [], + dataDir = "/opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0", + depends = [`UnitId "base-4.10.1.0"`], + description = concat + ["A portable library of functor and monad transformers, inspired by\n", + "the paper \\\"Functional Programming with Overloading and Higher-Order\n", + "Polymorphism\\\", by Mark P Jones,\n", + "in /Advanced School of Functional Programming/, 1995\n", + "().\n", + "\n", + "This package contains:\n", + "\n", + "* the monad transformer class (in \"Control.Monad.Trans.Class\")\n", + "and IO monad class (in \"Control.Monad.IO.Class\")\n", + "\n", + "* concrete functor and monad transformers, each with associated\n", + "operations and functions to lift operations associated with other\n", + "transformers.\n", + "\n", + "The package can be used on its own in portable Haskell code, in\n", + "which case operations need to be manually lifted through transformer\n", + "stacks (see \"Control.Monad.Trans.Class\" for some examples).\n", + "Alternatively, it can be used with the non-portable monad classes in\n", + "the @mtl@ or @monads-tf@ packages, which automatically lift operations\n", + "introduced by monad transformers through other transformers."], + exposed = True, + exposedModules = [`ExposedModule {exposedName = ModuleName ["Control","Applicative","Backwards"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Applicative","Lift"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Signatures"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Class"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Cont"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Error"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Except"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Identity"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","List"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Maybe"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS","Lazy"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS","Strict"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Reader"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State","Lazy"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State","Strict"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer","Lazy"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer","Strict"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Data","Functor","Constant"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Data","Functor","Reverse"], exposedReexport = Nothing}`], + extraGHCiLibraries = [], + extraLibraries = [], + frameworkDirs = [], + frameworks = [], + haddockHTMLs = ["/opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0"], + haddockInterfaces = ["/opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock"], + hiddenModules = [], + homepage = "", + hsLibraries = ["HStransformers-0.5.2.0"], + importDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], + includeDirs = [], + includes = [], + indefinite = False, + installedComponentId_ = `ComponentId ""`, + installedUnitId = `UnitId "transformers-0.5.2.0"`, + instantiatedWith = [], + ldOptions = ["-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm", + "-lm"], + libraryDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], + libraryDynDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], + license = Right BSD3, + maintainer = "Ross Paterson ", + pkgRoot = Nothing, + pkgUrl = "", + sourceLibName = Nothing, + sourcePackageId = PackageIdentifier + {pkgName = `PackageName "transformers"`, + pkgVersion = `mkVersion [0,5,2,0]`}, + stability = "", + synopsis = "Concrete functor and monad transformers", + trusted = False} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/issue-2276-ghc-9885.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,173 @@ +name: transformers +version: 0.5.2.0 +id: transformers-0.5.2.0 +key: transformers-0.5.2.0 +license: BSD3 +maintainer: Ross Paterson +author: Andy Gill, Ross Paterson +synopsis: Concrete functor and monad transformers +description: + A portable library of functor and monad transformers, inspired by + the paper \"Functional Programming with Overloading and Higher-Order + Polymorphism\", by Mark P Jones, + in /Advanced School of Functional Programming/, 1995 + (). + . + This package contains: + . + * the monad transformer class (in "Control.Monad.Trans.Class") + and IO monad class (in "Control.Monad.IO.Class") + . + * concrete functor and monad transformers, each with associated + operations and functions to lift operations associated with other + transformers. + . + The package can be used on its own in portable Haskell code, in + which case operations need to be manually lifted through transformer + stacks (see "Control.Monad.Trans.Class" for some examples). + Alternatively, it can be used with the non-portable monad classes in + the @mtl@ or @monads-tf@ packages, which automatically lift operations + introduced by monad transformers through other transformers. +category: Control +abi: e04579c0363c9229351d1a0b394bf2d5 +exposed: True +exposed-modules: + Control.Applicative.Backwards Control.Applicative.Lift + Control.Monad.Signatures Control.Monad.Trans.Class + Control.Monad.Trans.Cont Control.Monad.Trans.Error + Control.Monad.Trans.Except Control.Monad.Trans.Identity + Control.Monad.Trans.List Control.Monad.Trans.Maybe + Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy + Control.Monad.Trans.RWS.Strict Control.Monad.Trans.Reader + Control.Monad.Trans.State Control.Monad.Trans.State.Lazy + Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer + Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict + Data.Functor.Constant Data.Functor.Reverse +import-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +dynamic-library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +data-dir: /opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0 +hs-libraries: HStransformers-0.5.2.0 +depends: + base-4.10.1.0 +abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 +ld-options: -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm + -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm -lm +haddock-interfaces: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock +haddock-html: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,59 @@ +name: transformers +version: 0.5.2.0 +id: transformers-0.5.2.0 +key: transformers-0.5.2.0 +license: BSD3 +maintainer: Ross Paterson +synopsis: Concrete functor and monad transformers +description: + A portable library of functor and monad transformers, inspired by + the paper \"Functional Programming with Overloading and Higher-Order + Polymorphism\", by Mark P Jones, + in /Advanced School of Functional Programming/, 1995 + (). + . + This package contains: + . + * the monad transformer class (in "Control.Monad.Trans.Class") + and IO monad class (in "Control.Monad.IO.Class") + . + * concrete functor and monad transformers, each with associated + operations and functions to lift operations associated with other + transformers. + . + The package can be used on its own in portable Haskell code, in + which case operations need to be manually lifted through transformer + stacks (see "Control.Monad.Trans.Class" for some examples). + Alternatively, it can be used with the non-portable monad classes in + the @mtl@ or @monads-tf@ packages, which automatically lift operations + introduced by monad transformers through other transformers. +category: Control +author: Andy Gill, Ross Paterson +exposed: True +indefinite: False +exposed-modules: + Control.Applicative.Backwards Control.Applicative.Lift + Control.Monad.Signatures Control.Monad.Trans.Class + Control.Monad.Trans.Cont Control.Monad.Trans.Error + Control.Monad.Trans.Except Control.Monad.Trans.Identity + Control.Monad.Trans.List Control.Monad.Trans.Maybe + Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy + Control.Monad.Trans.RWS.Strict Control.Monad.Trans.Reader + Control.Monad.Trans.State Control.Monad.Trans.State.Lazy + Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer + Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict + Data.Functor.Constant Data.Functor.Reverse +abi: e04579c0363c9229351d1a0b394bf2d5 +trusted: False +import-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +dynamic-library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +data-dir: /opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0 +hs-libraries: HStransformers-0.5.2.0 +depends: + base-4.10.1.0 +abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 +haddock-interfaces: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock +haddock-html: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0 +pkgroot: "/opt/ghc/8.2.2/lib/ghc-8.2.2" + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,86 @@ +InstalledPackageInfo + {abiDepends = [`AbiDependency {depUnitId = UnitId "base-4.10.1.0", depAbiHash = AbiHash "35a7f6be752ee4f7385cb5bf28677879"}`], + abiHash = `AbiHash "e04579c0363c9229351d1a0b394bf2d5"`, + author = "Andy Gill, Ross Paterson", + category = "Control", + ccOptions = [], + cxxOptions = [], + compatPackageKey = "transformers-0.5.2.0", + copyright = "", + dataDir = "/opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0", + depends = [`UnitId "base-4.10.1.0"`], + description = concat + ["A portable library of functor and monad transformers, inspired by\n", + "the paper \\\"Functional Programming with Overloading and Higher-Order\n", + "Polymorphism\\\", by Mark P Jones,\n", + "in /Advanced School of Functional Programming/, 1995\n", + "().\n", + "\n", + "This package contains:\n", + "\n", + "* the monad transformer class (in \"Control.Monad.Trans.Class\")\n", + "and IO monad class (in \"Control.Monad.IO.Class\")\n", + "\n", + "* concrete functor and monad transformers, each with associated\n", + "operations and functions to lift operations associated with other\n", + "transformers.\n", + "\n", + "The package can be used on its own in portable Haskell code, in\n", + "which case operations need to be manually lifted through transformer\n", + "stacks (see \"Control.Monad.Trans.Class\" for some examples).\n", + "Alternatively, it can be used with the non-portable monad classes in\n", + "the @mtl@ or @monads-tf@ packages, which automatically lift operations\n", + "introduced by monad transformers through other transformers."], + exposed = True, + exposedModules = [`ExposedModule {exposedName = ModuleName ["Control","Applicative","Backwards"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Applicative","Lift"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Signatures"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Class"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Cont"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Error"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Except"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Identity"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","List"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Maybe"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS","Lazy"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","RWS","Strict"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Reader"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State","Lazy"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","State","Strict"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer","Lazy"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Control","Monad","Trans","Writer","Strict"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Data","Functor","Constant"], exposedReexport = Nothing}`, + `ExposedModule {exposedName = ModuleName ["Data","Functor","Reverse"], exposedReexport = Nothing}`], + extraGHCiLibraries = [], + extraLibraries = [], + frameworkDirs = [], + frameworks = [], + haddockHTMLs = ["/opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0"], + haddockInterfaces = ["/opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock"], + hiddenModules = [], + homepage = "", + hsLibraries = ["HStransformers-0.5.2.0"], + importDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], + includeDirs = [], + includes = [], + indefinite = False, + installedComponentId_ = `ComponentId ""`, + installedUnitId = `UnitId "transformers-0.5.2.0"`, + instantiatedWith = [], + ldOptions = [], + libraryDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], + libraryDynDirs = ["/opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0"], + license = Right BSD3, + maintainer = "Ross Paterson ", + pkgRoot = Just "/opt/ghc/8.2.2/lib/ghc-8.2.2", + pkgUrl = "", + sourceLibName = Nothing, + sourcePackageId = PackageIdentifier + {pkgName = `PackageName "transformers"`, + pkgVersion = `mkVersion [0,5,2,0]`}, + stability = "", + synopsis = "Concrete functor and monad transformers", + trusted = False} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/ipi/transformers.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,55 @@ +name: transformers +version: 0.5.2.0 +id: transformers-0.5.2.0 +key: transformers-0.5.2.0 +license: BSD3 +maintainer: Ross Paterson +author: Andy Gill, Ross Paterson +synopsis: Concrete functor and monad transformers +description: + A portable library of functor and monad transformers, inspired by + the paper \"Functional Programming with Overloading and Higher-Order + Polymorphism\", by Mark P Jones, + in /Advanced School of Functional Programming/, 1995 + (). + . + This package contains: + . + * the monad transformer class (in "Control.Monad.Trans.Class") + and IO monad class (in "Control.Monad.IO.Class") + . + * concrete functor and monad transformers, each with associated + operations and functions to lift operations associated with other + transformers. + . + The package can be used on its own in portable Haskell code, in + which case operations need to be manually lifted through transformer + stacks (see "Control.Monad.Trans.Class" for some examples). + Alternatively, it can be used with the non-portable monad classes in + the @mtl@ or @monads-tf@ packages, which automatically lift operations + introduced by monad transformers through other transformers. +category: Control +abi: e04579c0363c9229351d1a0b394bf2d5 +exposed: True +exposed-modules: + Control.Applicative.Backwards Control.Applicative.Lift + Control.Monad.Signatures Control.Monad.Trans.Class + Control.Monad.Trans.Cont Control.Monad.Trans.Error + Control.Monad.Trans.Except Control.Monad.Trans.Identity + Control.Monad.Trans.List Control.Monad.Trans.Maybe + Control.Monad.Trans.RWS Control.Monad.Trans.RWS.Lazy + Control.Monad.Trans.RWS.Strict Control.Monad.Trans.Reader + Control.Monad.Trans.State Control.Monad.Trans.State.Lazy + Control.Monad.Trans.State.Strict Control.Monad.Trans.Writer + Control.Monad.Trans.Writer.Lazy Control.Monad.Trans.Writer.Strict + Data.Functor.Constant Data.Functor.Reverse +import-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +dynamic-library-dirs: /opt/ghc/8.2.2/lib/ghc-8.2.2/transformers-0.5.2.0 +data-dir: /opt/ghc/8.2.2/share/x86_64-linux-ghc-8.2.2/transformers-0.5.2.0 +hs-libraries: HStransformers-0.5.2.0 +depends: + base-4.10.1.0 +abi-depends: base-4.10.1.0=35a7f6be752ee4f7385cb5bf28677879 +haddock-interfaces: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0/transformers.haddock +haddock-html: /opt/ghc/8.2.2/share/doc/ghc-8.2.2/html/libraries/transformers-0.5.2.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/bad-glob-syntax.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/bad-glob-syntax.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/bad-glob-syntax.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/bad-glob-syntax.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,16 @@ +cabal-version: 2.2 +name: bad-glob-syntax +version: 0 +extra-source-files: + foo/blah-*.hs + foo/*/bar +license: BSD-3-Clause +synopsis: no +description: none +category: Test +maintainer: none + +library + default-language: Haskell2010 + exposed-modules: + Foo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/bad-glob-syntax.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/bad-glob-syntax.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/bad-glob-syntax.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/bad-glob-syntax.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +In the 'extra-source-files' field: invalid file glob 'foo/blah-*.hs'. Wildcards '*' may only totally replace the file's base name, not only parts of it. +In the 'extra-source-files' field: invalid file glob 'foo/*/bar'. A wildcard '**' is only allowed as the final parent directory. Stars must not otherwise appear in the parent directories. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cc-options-with-optimization.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cc-options-with-optimization.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cc-options-with-optimization.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cc-options-with-optimization.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,15 @@ +cabal-version: 2.2 +category: test +description: test a build check involving C++-options field +license: BSD-3-Clause +maintainer: me@example.com +name: cxx-options-with-optimization +synopsis: test a build check +version: 1 + +library + build-depends: base >= 4.9 && <4.10 + cc-options: -O2 + default-language: Haskell2010 + exposed-modules: Prelude + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cc-options-with-optimization.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cc-options-with-optimization.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cc-options-with-optimization.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cc-options-with-optimization.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1 @@ +'cc-options: -O[n]' is generally not needed. When building with optimisations Cabal automatically adds '-O2' for C code. Setting it yourself interferes with the --disable-optimization flag. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,43 @@ +cabal-version: 2.1 +name: common +version: 0 +synopsis: Common-stanza demo demo +build-type: Simple + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +common win-dows + if os(windows) + build-depends: Win32 + +common deps + import: win-dows + buildable: True + build-depends: + base >=4.10 && <4.11, + containers + +library + import: deps + + default-language: Haskell2010 + exposed-modules: ElseIf + + build-depends: + ghc-prim + +test-suite tests + import: deps, win-dows + + -- buildable fields verify that we don't have duplicate field warnings + buildable: True + if os(windows) + buildable: False + + type: exitcode-stdio-1.0 + main-is: Tests.hs + + build-depends: + HUnit diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,411 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (OS Windows)`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "Win32"` + AnyVersion], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "Win32"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,10]`) + (EarlierVersion `mkVersion [4,11]`)), + Dependency `PackageName "containers"` AnyVersion, + Dependency `PackageName "ghc-prim"` AnyVersion], + condTreeData = Library + {exposedModules = [`ModuleName ["ElseIf"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,10]`) + (EarlierVersion + `mkVersion [4,11]`)), + Dependency + `PackageName "containers"` + AnyVersion, + Dependency + `PackageName "ghc-prim"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [_×_ + `UnqualComponentName "tests"` + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (OS Windows)`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "Win32"` + AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "Win32"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}, + CondBranch + {condBranchCondition = `Var (OS Windows)`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "Win32"` + AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "Win32"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}, + CondBranch + {condBranchCondition = `Var (OS Windows)`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = False, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,10]`) + (EarlierVersion `mkVersion [4,11]`)), + Dependency `PackageName "containers"` AnyVersion, + Dependency `PackageName "HUnit"` AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,10]`) + (EarlierVersion + `mkVersion [4,11]`)), + Dependency + `PackageName "containers"` + AnyVersion, + Dependency + `PackageName "HUnit"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "Tests.hs", + testName = `UnqualComponentName ""`}}], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "common"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just "https://github.com/hvr/-.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Left `mkVersion [2,1]`, + stability = "", + subLibraries = [], + synopsis = "Common-stanza demo demo", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common2.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,41 @@ +cabal-version: 2.1 +name: common +version: 0 +synopsis: Common-stanza demo demo +build-type: Simple + +source-repository head + type: git + location: https://github.com/hvr/-.git + +library + exposed-modules: + ElseIf + default-language: Haskell2010 + build-depends: + base >=4.10 && <4.11, + containers -any, + ghc-prim -any + + if os(windows) + build-depends: + Win32 -any + +test-suite tests + type: exitcode-stdio-1.0 + main-is: Tests.hs + build-depends: + base >=4.10 && <4.11, + containers -any, + HUnit -any + + if os(windows) + build-depends: + Win32 -any + + if os(windows) + build-depends: + Win32 -any + + if os(windows) + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,32 @@ +name: common +version: 0 +synopsis: Common-stanza demo demo +build-type: Simple +cabal-version: >=1.10 + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +common deps + build-depends: + base >=4.10 && <4.11, + containers + +library + import: deps + + default-language: Haskell2010 + exposed-modules: "ElseIf" + + build-depends: + ghc-prim + +test-suite tests + import: deps + + type: exitcode-stdio-1.0 + main-is: Tests.hs + + build-depends: + HUnit diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,155 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "ghc-prim"` AnyVersion], + condTreeData = Library + {exposedModules = [`ModuleName ["ElseIf"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "ghc-prim"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [_×_ + `UnqualComponentName "tests"` + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "HUnit"` AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "HUnit"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "Tests.hs", + testName = `UnqualComponentName ""`}}], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "common"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just "https://github.com/hvr/-.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), + stability = "", + subLibraries = [], + synopsis = "Common-stanza demo demo", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/common.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,25 @@ +common.cabal:26:3: Unknown field: import. You should set cabal-version: 2.2 or larger to use common stanzas +common.cabal:17:3: Unknown field: import. You should set cabal-version: 2.2 or larger to use common stanzas +common.cabal:11:1: Ignoring section: common. You should set cabal-version: 2.2 or larger to use common stanzas. +cabal-version: >=1.10 +name: common +version: 0 +synopsis: Common-stanza demo demo +build-type: Simple + +source-repository head + type: git + location: https://github.com/hvr/-.git + +library + exposed-modules: + ElseIf + default-language: Haskell2010 + build-depends: + ghc-prim -any + +test-suite tests + type: exitcode-stdio-1.0 + main-is: Tests.hs + build-depends: + HUnit -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cxx-options-with-optimization.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cxx-options-with-optimization.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cxx-options-with-optimization.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cxx-options-with-optimization.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,15 @@ +cabal-version: 2.2 +category: test +description: test a build check involving C++-options field +license: BSD-3-Clause +maintainer: me@example.com +name: cxx-options-with-optimization +synopsis: test a build check +version: 1 + +library + build-depends: base >= 4.9 && <4.10 + cxx-options: -O2 + default-language: Haskell2010 + exposed-modules: Prelude + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cxx-options-with-optimization.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cxx-options-with-optimization.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cxx-options-with-optimization.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/cxx-options-with-optimization.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1 @@ +'cxx-options: -O[n]' is generally not needed. When building with optimisations Cabal automatically adds '-O2' for C++ code. Setting it yourself interferes with the --disable-optimization flag. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,20 @@ +cabal-version: 2.1 +name: elif +version: 0 +synopsis: The elif demo +build-type: Simple + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +library + default-language: Haskell2010 + exposed-modules: ElseIf + + if os(linux) + build-depends: unix + elif os(windows) + build-depends: Win32 + else + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,315 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (OS Linux)`, + condBranchIfFalse = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (OS Windows)`, + condBranchIfFalse = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = False, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "Win32"` + AnyVersion], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "Win32"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "unix"` + AnyVersion], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "unix"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [`ModuleName ["ElseIf"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "elif"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just "https://github.com/hvr/-.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Left `mkVersion [2,1]`, + stability = "", + subLibraries = [], + synopsis = "The elif demo", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif2.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,25 @@ +cabal-version: 2.1 +name: elif +version: 0 +synopsis: The elif demo +build-type: Simple + +source-repository head + type: git + location: https://github.com/hvr/-.git + +library + exposed-modules: + ElseIf + default-language: Haskell2010 + + if os(linux) + build-depends: + unix -any + else + + if os(windows) + build-depends: + Win32 -any + else + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,20 @@ +name: elif +version: 0 +synopsis: The elif demo +build-type: Simple +cabal-version: >=1.10 + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +library + default-language: Haskell2010 + exposed-modules: ElseIf + + if os(linux) + build-depends: unix + elif os(windows) + build-depends: Win32 + else + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,156 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (OS Linux)`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "unix"` + AnyVersion], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "unix"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [`ModuleName ["ElseIf"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "elif"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just "https://github.com/hvr/-.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), + stability = "", + subLibraries = [], + synopsis = "The elif demo", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/elif.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,20 @@ +elif.cabal:19:3: invalid subsection "else" +elif.cabal:17:3: invalid subsection "elif". You should set cabal-version: 2.2 or larger to use elif-conditionals. +cabal-version: >=1.10 +name: elif +version: 0 +synopsis: The elif demo +build-type: Simple + +source-repository head + type: git + location: https://github.com/hvr/-.git + +library + exposed-modules: + ElseIf + default-language: Haskell2010 + + if os(linux) + build-depends: + unix -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,24 @@ +Name: encoding-wrong +Name: encoding +Version: 0.8 +cabal-version: >=1.12 +-- double-dash files +extra-source-files: + -- this is comment + README.md "--" + "--" + +custom-setup + setup-depends: + base < 5, + ghc-prim + +Library + -- version range round trip is better + build-depends: base (> 4.4 || == 4.4) + + Exposed-Modules: + Data.Encoding + + -- options with spaces + GHC-Options: -Wall -O2 -threaded -rtsopts "-with-rtsopts=-N1 -A64m" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,118 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (VersionRangeParens + (UnionVersionRanges + (LaterVersion `mkVersion [4,4]`) + (ThisVersion `mkVersion [4,4]`)))], + condTreeData = Library + {exposedModules = [`ModuleName ["Data","Encoding"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-Wall", + "-O2", + "-threaded", + "-rtsopts", + "-with-rtsopts=-N1 -A64m"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (VersionRangeParens + (UnionVersionRanges + (LaterVersion + `mkVersion [4,4]`) + (ThisVersion + `mkVersion [4,4]`)))], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Nothing, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = ["README.md", "--", "--"], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "encoding"`, + pkgVersion = `mkVersion [0,8]`}, + pkgUrl = "", + setupBuildInfo = Just + SetupBuildInfo + {defaultSetupDepends = False, + setupDepends = [Dependency + `PackageName "base"` + (EarlierVersion `mkVersion [5]`), + Dependency + `PackageName "ghc-prim"` + AnyVersion]}, + sourceRepos = [], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,12]`), + stability = "", + subLibraries = [], + synopsis = "", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/encoding-0.8.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,19 @@ +encoding-0.8.cabal:1:1: The field "name" is specified more than once at positions 1:1, 2:1 +cabal-version: >=1.12 +name: encoding +version: 0.8 +extra-source-files: + README.md + "--" + "--" + +custom-setup + setup-depends: base <5, + ghc-prim -any + +library + exposed-modules: + Data.Encoding + ghc-options: -Wall -O2 -threaded -rtsopts "-with-rtsopts=-N1 -A64m" + build-depends: + base (>4.4 || ==4.4) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/extensions-paths-5054.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/extensions-paths-5054.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/extensions-paths-5054.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/extensions-paths-5054.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,39 @@ +name: extensions-paths +version: 5054 +category: Test +maintainer: Oleg Grenrus +license: BSD3 +license-file: LICENSe +synopsis: Paths_pkg module + "bad" extensions + old cabal +description: + Only cabal-version: 2.2 or later will build Paths_pkg ok with + + * RebindableSyntax and + + * OverloadedLists or OverloadedStrings + + `fromList` or `fromString` will be out-of-scope when compiling Paths_ module. + + Other extensions (like NoImplicitPrelude) were handled before +build-type: Simple +cabal-version: 1.12 + +library + default-language: Haskell2010 + exposed-modules: Issue Paths_extensions_paths + default-extensions: + RebindableSyntax + OverloadedStrings + +test-suite tests + default-language: Haskell2010 + main-is: Test.hs + type: exitcode-stdio-1.0 + if os(linux) + other-modules: Paths_extensions_paths + else + buildable: False + + default-extensions: + OverloadedLists + RebindableSyntax diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/extensions-paths-5054.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/extensions-paths-5054.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/extensions-paths-5054.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/extensions-paths-5054.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1 @@ +The package uses RebindableSyntax with OverloadedStrings or OverloadedLists in default-extensions, and also Paths_ autogen module. That configuration is known to cause compile failures with Cabal < 2.2. To use these default-extensions with Paths_ autogen module specify at least 'cabal-version: 2.2'. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,128 @@ +name: generics-sop +version: 0.3.1.0 +synopsis: Generic Programming using True Sums of Products +description: + A library to support the definition of generic functions. + Datatypes are viewed in a uniform, structured way: + the choice between constructors is represented using an n-ary + sum, and the arguments of each constructor are represented using + an n-ary product. + . + The module "Generics.SOP" is the main module of this library and contains + more detailed documentation. + . + Examples of using this library are provided by the following + packages: + . + * @@ basic examples, + . + * @@ generic pretty printing, + . + * @@ generically computed lenses, + . + * @@ generic JSON conversions. + . + A detailed description of the ideas behind this library is provided by + the paper: + . + * Edsko de Vries and Andres Löh. + . + Workshop on Generic Programming (WGP) 2014. + . +license: BSD3 +license-file: LICENSE +author: Edsko de Vries , Andres Löh +maintainer: andres@well-typed.com +category: Generics +build-type: Custom +cabal-version: >=1.10 +extra-source-files: CHANGELOG.md +tested-with: GHC == 7.8.4, GHC == 7.10.3, GHC == 8.0.1, GHC == 8.0.2, GHC == 8.2.1, GHC == 8.3.* + +custom-setup + setup-depends: + base, + Cabal, + cabal-doctest >= 1.0.2 && <1.1 + +source-repository head + type: git + location: https://github.com/well-typed/generics-sop + +library + exposed-modules: Generics.SOP + Generics.SOP.GGP + Generics.SOP.TH + Generics.SOP.Dict + Generics.SOP.Type.Metadata + -- exposed via Generics.SOP: + Generics.SOP.BasicFunctors + Generics.SOP.Classes + Generics.SOP.Constraint + Generics.SOP.Instances + Generics.SOP.Metadata + Generics.SOP.NP + Generics.SOP.NS + Generics.SOP.Universe + Generics.SOP.Sing + build-depends: base >= 4.7 && < 5, + template-haskell >= 2.8 && < 2.13, + ghc-prim >= 0.3 && < 0.6, + deepseq >= 1.3 && < 1.5 + if !impl (ghc >= 7.8) + build-depends: tagged >= 0.7 && < 0.9 + if !impl (ghc >= 8.0) + build-depends: transformers-compat >= 0.3 && < 0.6, + transformers >= 0.3 && < 0.6 + + hs-source-dirs: src + default-language: Haskell2010 + ghc-options: -Wall + default-extensions: CPP + ScopedTypeVariables + TypeFamilies + RankNTypes + TypeOperators + GADTs + ConstraintKinds + MultiParamTypeClasses + TypeSynonymInstances + FlexibleInstances + FlexibleContexts + DeriveFunctor + DeriveFoldable + DeriveTraversable + DefaultSignatures + KindSignatures + DataKinds + FunctionalDependencies + if impl (ghc >= 7.8) + default-extensions: AutoDeriveTypeable + other-extensions: OverloadedStrings + PolyKinds + UndecidableInstances + TemplateHaskell + DeriveGeneric + StandaloneDeriving + if impl (ghc < 7.10) + other-extensions: OverlappingInstances + +test-suite doctests + type: exitcode-stdio-1.0 + main-is: doctests.hs + x-doctest-options: --preserve-it + hs-source-dirs: test + default-language: Haskell2010 + build-depends: base, + doctest >= 0.13 && <0.14 + ghc-options: -Wall -threaded + +test-suite generics-sop-examples + type: exitcode-stdio-1.0 + main-is: Example.hs + other-modules: HTransExample + hs-source-dirs: test + default-language: Haskell2010 + ghc-options: -Wall + build-depends: base >= 4.6 && < 5, + generics-sop diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,637 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `CNot (Var (Impl GHC (OrLaterVersion (mkVersion [7,8]))))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "tagged"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,7]`) + (EarlierVersion + `mkVersion [0,9]`))], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "tagged"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,7]`) + (EarlierVersion + `mkVersion [0,9]`))], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}, + CondBranch + {condBranchCondition = `CNot (Var (Impl GHC (OrLaterVersion (mkVersion [8,0]))))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "transformers-compat"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,3]`) + (EarlierVersion + `mkVersion [0,6]`)), + Dependency + `PackageName "transformers"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,3]`) + (EarlierVersion + `mkVersion [0,6]`))], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "transformers-compat"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,3]`) + (EarlierVersion + `mkVersion [0,6]`)), + Dependency + `PackageName "transformers"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,3]`) + (EarlierVersion + `mkVersion [0,6]`))], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}, + CondBranch + {condBranchCondition = `Var (Impl GHC (OrLaterVersion (mkVersion [7,8])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [EnableExtension + AutoDeriveTypeable], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}, + CondBranch + {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,10])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [EnableExtension + OverlappingInstances], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,7]`) + (EarlierVersion `mkVersion [5]`)), + Dependency + `PackageName "template-haskell"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [2,8]`) + (EarlierVersion `mkVersion [2,13]`)), + Dependency + `PackageName "ghc-prim"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,3]`) + (EarlierVersion `mkVersion [0,6]`)), + Dependency + `PackageName "deepseq"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [1,3]`) + (EarlierVersion `mkVersion [1,5]`))], + condTreeData = Library + {exposedModules = [`ModuleName ["Generics","SOP"]`, + `ModuleName ["Generics","SOP","GGP"]`, + `ModuleName ["Generics","SOP","TH"]`, + `ModuleName ["Generics","SOP","Dict"]`, + `ModuleName ["Generics","SOP","Type","Metadata"]`, + `ModuleName ["Generics","SOP","BasicFunctors"]`, + `ModuleName ["Generics","SOP","Classes"]`, + `ModuleName ["Generics","SOP","Constraint"]`, + `ModuleName ["Generics","SOP","Instances"]`, + `ModuleName ["Generics","SOP","Metadata"]`, + `ModuleName ["Generics","SOP","NP"]`, + `ModuleName ["Generics","SOP","NS"]`, + `ModuleName ["Generics","SOP","Universe"]`, + `ModuleName ["Generics","SOP","Sing"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [EnableExtension CPP, + EnableExtension + ScopedTypeVariables, + EnableExtension + TypeFamilies, + EnableExtension + RankNTypes, + EnableExtension + TypeOperators, + EnableExtension + GADTs, + EnableExtension + ConstraintKinds, + EnableExtension + MultiParamTypeClasses, + EnableExtension + TypeSynonymInstances, + EnableExtension + FlexibleInstances, + EnableExtension + FlexibleContexts, + EnableExtension + DeriveFunctor, + EnableExtension + DeriveFoldable, + EnableExtension + DeriveTraversable, + EnableExtension + DefaultSignatures, + EnableExtension + KindSignatures, + EnableExtension + DataKinds, + EnableExtension + FunctionalDependencies], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["src"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ GHC ["-Wall"]], + otherExtensions = [EnableExtension + OverloadedStrings, + EnableExtension + PolyKinds, + EnableExtension + UndecidableInstances, + EnableExtension + TemplateHaskell, + EnableExtension + DeriveGeneric, + EnableExtension + StandaloneDeriving], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,7]`) + (EarlierVersion + `mkVersion [5]`)), + Dependency + `PackageName "template-haskell"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [2,8]`) + (EarlierVersion + `mkVersion [2,13]`)), + Dependency + `PackageName "ghc-prim"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,3]`) + (EarlierVersion + `mkVersion [0,6]`)), + Dependency + `PackageName "deepseq"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [1,3]`) + (EarlierVersion + `mkVersion [1,5]`))], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [_×_ + `UnqualComponentName "doctests"` + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency `PackageName "base"` AnyVersion, + Dependency + `PackageName "doctest"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,13]`) + (EarlierVersion `mkVersion [0,14]`))], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [_×_ + "x-doctest-options" + "--preserve-it"], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["test"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-Wall", "-threaded"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + AnyVersion, + Dependency + `PackageName "doctest"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,13]`) + (EarlierVersion + `mkVersion [0,14]`))], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "doctests.hs", + testName = `UnqualComponentName ""`}}, + _×_ + `UnqualComponentName "generics-sop-examples"` + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,6]`) + (EarlierVersion `mkVersion [5]`)), + Dependency + `PackageName "generics-sop"` AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["test"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ GHC ["-Wall"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [`ModuleName ["HTransExample"]`], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,6]`) + (EarlierVersion + `mkVersion [5]`)), + Dependency + `PackageName "generics-sop"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "Example.hs", + testName = `UnqualComponentName ""`}}], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "Edsko de Vries , Andres L\246h ", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Custom, + category = "Generics", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = concat + ["A library to support the definition of generic functions.\n", + "Datatypes are viewed in a uniform, structured way:\n", + "the choice between constructors is represented using an n-ary\n", + "sum, and the arguments of each constructor are represented using\n", + "an n-ary product.\n", + "\n", + "The module \"Generics.SOP\" is the main module of this library and contains\n", + "more detailed documentation.\n", + "\n", + "Examples of using this library are provided by the following\n", + "packages:\n", + "\n", + "* @@ basic examples,\n", + "\n", + "* @@ generic pretty printing,\n", + "\n", + "* @@ generically computed lenses,\n", + "\n", + "* @@ generic JSON conversions.\n", + "\n", + "A detailed description of the ideas behind this library is provided by\n", + "the paper:\n", + "\n", + "* Edsko de Vries and Andres L\246h.\n", + ".\n", + "Workshop on Generic Programming (WGP) 2014.\n"], + executables = [], + extraDocFiles = [], + extraSrcFiles = ["CHANGELOG.md"], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = ["LICENSE"], + licenseRaw = Right BSD3, + maintainer = "andres@well-typed.com", + package = PackageIdentifier + {pkgName = `PackageName "generics-sop"`, + pkgVersion = `mkVersion [0,3,1,0]`}, + pkgUrl = "", + setupBuildInfo = Just + SetupBuildInfo + {defaultSetupDepends = False, + setupDepends = [Dependency + `PackageName "base"` AnyVersion, + Dependency + `PackageName "Cabal"` AnyVersion, + Dependency + `PackageName "cabal-doctest"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [1,0,2]`) + (EarlierVersion + `mkVersion [1,1]`))]}, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just + "https://github.com/well-typed/generics-sop", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), + stability = "", + subLibraries = [], + synopsis = "Generic Programming using True Sums of Products", + testSuites = [], + testedWith = [_×_ GHC (ThisVersion `mkVersion [7,8,4]`), + _×_ GHC (ThisVersion `mkVersion [7,10,3]`), + _×_ GHC (ThisVersion `mkVersion [8,0,1]`), + _×_ GHC (ThisVersion `mkVersion [8,0,2]`), + _×_ GHC (ThisVersion `mkVersion [8,2,1]`), + _×_ GHC (WildcardVersion `mkVersion [8,3]`)]}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/generics-sop.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,121 @@ +cabal-version: >=1.10 +name: generics-sop +version: 0.3.1.0 +license: BSD3 +license-file: LICENSE +maintainer: andres@well-typed.com +author: Edsko de Vries , Andres Löh +tested-with: ghc ==7.8.4 ghc ==7.10.3 ghc ==8.0.1 ghc ==8.0.2 + ghc ==8.2.1 ghc ==8.3.* +synopsis: Generic Programming using True Sums of Products +description: + A library to support the definition of generic functions. + Datatypes are viewed in a uniform, structured way: + the choice between constructors is represented using an n-ary + sum, and the arguments of each constructor are represented using + an n-ary product. + . + The module "Generics.SOP" is the main module of this library and contains + more detailed documentation. + . + Examples of using this library are provided by the following + packages: + . + * @@ basic examples, + . + * @@ generic pretty printing, + . + * @@ generically computed lenses, + . + * @@ generic JSON conversions. + . + A detailed description of the ideas behind this library is provided by + the paper: + . + * Edsko de Vries and Andres Löh. + . + Workshop on Generic Programming (WGP) 2014. + . +category: Generics +build-type: Custom +extra-source-files: + CHANGELOG.md + +source-repository head + type: git + location: https://github.com/well-typed/generics-sop + +custom-setup + setup-depends: base -any, + Cabal -any, + cabal-doctest >=1.0.2 && <1.1 + +library + exposed-modules: + Generics.SOP + Generics.SOP.GGP + Generics.SOP.TH + Generics.SOP.Dict + Generics.SOP.Type.Metadata + Generics.SOP.BasicFunctors + Generics.SOP.Classes + Generics.SOP.Constraint + Generics.SOP.Instances + Generics.SOP.Metadata + Generics.SOP.NP + Generics.SOP.NS + Generics.SOP.Universe + Generics.SOP.Sing + hs-source-dirs: src + default-language: Haskell2010 + default-extensions: CPP ScopedTypeVariables TypeFamilies RankNTypes + TypeOperators GADTs ConstraintKinds MultiParamTypeClasses + TypeSynonymInstances FlexibleInstances FlexibleContexts + DeriveFunctor DeriveFoldable DeriveTraversable DefaultSignatures + KindSignatures DataKinds FunctionalDependencies + other-extensions: OverloadedStrings PolyKinds UndecidableInstances + TemplateHaskell DeriveGeneric StandaloneDeriving + ghc-options: -Wall + build-depends: + base >=4.7 && <5, + template-haskell >=2.8 && <2.13, + ghc-prim >=0.3 && <0.6, + deepseq >=1.3 && <1.5 + + if !impl(ghc >=7.8) + build-depends: + tagged >=0.7 && <0.9 + + if !impl(ghc >=8.0) + build-depends: + transformers-compat >=0.3 && <0.6, + transformers >=0.3 && <0.6 + + if impl(ghc >=7.8) + default-extensions: AutoDeriveTypeable + + if impl(ghc <7.10) + other-extensions: OverlappingInstances + +test-suite doctests + type: exitcode-stdio-1.0 + main-is: doctests.hs + hs-source-dirs: test + default-language: Haskell2010 + ghc-options: -Wall -threaded + x-doctest-options: --preserve-it + build-depends: + base -any, + doctest >=0.13 && <0.14 + +test-suite generics-sop-examples + type: exitcode-stdio-1.0 + main-is: Example.hs + hs-source-dirs: test + other-modules: + HTransExample + default-language: Haskell2010 + ghc-options: -Wall + build-depends: + base >=4.6 && <5, + generics-sop -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/ghc-option-j.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/ghc-option-j.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/ghc-option-j.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/ghc-option-j.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,18 @@ +cabal-version: 2.2 +name: ghc-option-j +version: 0 +license: BSD-2-Clause +synopsis: Test +description: Testy test. +maintainer: none +category: none + +library + exposed-modules: Foo + ghc-options: -Wall -j -Wno-all + default-language: Haskell2010 + +executable foo + main-is: Main.hs + ghc-shared-options: -Wall -j2 -Wno-all + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/ghc-option-j.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/ghc-option-j.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/ghc-option-j.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/ghc-option-j.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,2 @@ +'ghc-options: -j[N]' can make sense for specific user's setup, but it is not appropriate for a distributed package. Alternatively, if you want to use this, make it conditional based on a Cabal configuration flag (with 'manual: True' and 'default: False') and enable that flag during development. +'ghc-options: -j[N]' can make sense for specific user's setup, but it is not appropriate for a distributed package. Alternatively, if you want to use this, make it conditional based on a Cabal configuration flag (with 'manual: True' and 'default: False') and enable that flag during development. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/haddock-api-2.18.1-check.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,147 @@ +name: haddock-api +version: 2.18.1 +synopsis: A documentation-generation tool for Haskell libraries +description: Haddock is a documentation-generation tool for Haskell + libraries +license: BSD3 +license-file: LICENSE +author: Simon Marlow, David Waern +maintainer: Alex Biehl , Simon Hengel , Mateusz Kowalczyk +homepage: http://www.haskell.org/haddock/ +bug-reports: https://github.com/haskell/haddock/issues +copyright: (c) Simon Marlow, David Waern +category: Documentation +build-type: Simple +cabal-version: >= 1.10 + +extra-source-files: + CHANGES.md + +data-dir: + resources +data-files: + html/solarized.css + html/haddock-util.js + html/highlight.js + html/Classic.theme/haskell_icon.gif + html/Classic.theme/minus.gif + html/Classic.theme/plus.gif + html/Classic.theme/xhaddock.css + html/Ocean.std-theme/hslogo-16.png + html/Ocean.std-theme/minus.gif + html/Ocean.std-theme/ocean.css + html/Ocean.std-theme/plus.gif + html/Ocean.std-theme/synopsis.png + latex/haddock.sty + +library + default-language: Haskell2010 + + -- this package typically supports only single major versions + build-depends: base ^>= 4.10.0 + , Cabal ^>= 2.0.0 + , ghc ^>= 8.2 + , ghc-paths ^>= 0.1.0.9 + , haddock-library == 1.4.4.* + , xhtml ^>= 3000.2.2 + + -- Versions for the dependencies below are transitively pinned by + -- the non-reinstallable `ghc` package and hence need no version + -- bounds + build-depends: array + , bytestring + , containers + , deepseq + , directory + , filepath + , ghc-boot + , transformers + + hs-source-dirs: src + + ghc-options: -funbox-strict-fields -Wall -fwarn-tabs -O2 + ghc-options: -Wall + if impl(ghc >= 8.0) + ghc-options: -Wcompat -Wnoncanonical-monad-instances -Wnoncanonical-monadfail-instances + + exposed-modules: + Documentation.Haddock + + other-modules: + Haddock + Haddock.Interface + Haddock.Interface.Rename + Haddock.Interface.Create + Haddock.Interface.AttachInstances + Haddock.Interface.LexParseRn + Haddock.Interface.ParseModuleHeader + Haddock.Interface.Specialize + Haddock.Parser + Haddock.Utils + Haddock.Backends.Xhtml + Haddock.Backends.Xhtml.Decl + Haddock.Backends.Xhtml.DocMarkup + Haddock.Backends.Xhtml.Layout + Haddock.Backends.Xhtml.Names + Haddock.Backends.Xhtml.Themes + Haddock.Backends.Xhtml.Types + Haddock.Backends.Xhtml.Utils + Haddock.Backends.LaTeX + Haddock.Backends.HaddockDB + Haddock.Backends.Hoogle + Haddock.Backends.Hyperlinker + Haddock.Backends.Hyperlinker.Ast + Haddock.Backends.Hyperlinker.Parser + Haddock.Backends.Hyperlinker.Renderer + Haddock.Backends.Hyperlinker.Types + Haddock.Backends.Hyperlinker.Utils + Haddock.ModuleTree + Haddock.Types + Haddock.Doc + Haddock.Version + Haddock.InterfaceFile + Haddock.Options + Haddock.GhcUtils + Haddock.Syb + Haddock.Convert + Paths_haddock_api + + autogen-modules: + Paths_haddock_api + +test-suite spec + type: exitcode-stdio-1.0 + default-language: Haskell2010 + main-is: Spec.hs + ghc-options: -Wall + + hs-source-dirs: + test + , src + + -- NB: We only use a small subset of lib:haddock-api here, which + -- explains why this component has a smaller build-depends set + other-modules: + Haddock.Backends.Hyperlinker.ParserSpec + Haddock.Backends.Hyperlinker.Parser + Haddock.Backends.Hyperlinker.Types + + build-depends: + ghc ^>= 8.2 + , hspec ^>= 2.4.4 + , QuickCheck ^>= 2.10 + + -- Versions for the dependencies below are transitively pinned by + -- the non-reinstallable `ghc` package and hence need no version + -- bounds + build-depends: + base + , containers + + build-tool-depends: + hspec-discover:hspec-discover ^>= 2.4.4 + +source-repository head + type: git + subdir: haddock-api + location: https://github.com/haskell/haddock.git diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/haddock-api-2.18.1-check.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/haddock-api-2.18.1-check.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/haddock-api-2.18.1-check.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/haddock-api-2.18.1-check.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,11 @@ +ERROR: haddock-api-2.18.1-check.cabal:41:44: +unexpected major bounded version syntax (caret, ^>=) used. To use this syntax the package need to specify at least 'cabal-version: 2.0'. Alternatively, if broader compatibility is important then use: >=4.10.0 && <4.11 +expecting "." or "-" + +base ^>= 4.10.0 +, Cabal ^>= 2.0.0 +, ghc ^>= 8.2 +, ghc-paths ^>= 0.1.0.9 +, haddock-library == 1.4.4.* +, xhtml ^>= 3000.2.2 + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,25 @@ +name: issue +version: 5055 +synopsis: no type in all branches +description: no type in all branches. +license: BSD3 +category: Test +build-type: Simple +cabal-version: >=2.0 + +executable flag-test-exe + main-is: FirstMain.hs + build-depends: base >= 4.8 && < 5 + default-language: Haskell2010 + +test-suite flag-cabal-test + -- TODO: fix so `type` can be on the top level + build-depends: base >= 4.8 && < 5 + default-language: Haskell2010 + + main-is: SecondMain.hs + type: exitcode-stdio-1.0 + + if os(windows) + main-is: FirstMain.hs + -- type: exitcode-stdio-1.0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,214 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [_×_ + `UnqualComponentName "flag-test-exe"` + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,8]`) + (EarlierVersion `mkVersion [5]`))], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,8]`) + (EarlierVersion + `mkVersion [5]`))], + virtualModules = []}, + exeName = `UnqualComponentName "flag-test-exe"`, + exeScope = ExecutablePublic, + modulePath = "FirstMain.hs"}}], + condForeignLibs = [], + condLibrary = Nothing, + condSubLibraries = [], + condTestSuites = [_×_ + `UnqualComponentName "flag-cabal-test"` + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (OS Windows)`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,8]`) + (EarlierVersion `mkVersion [5]`))], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,8]`) + (EarlierVersion + `mkVersion [5]`))], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "SecondMain.hs", + testName = `UnqualComponentName ""`}}], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "Test", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "no type in all branches.", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Right BSD3, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "issue"`, + pkgVersion = `mkVersion [5055]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [], + specVersionRaw = Right (OrLaterVersion `mkVersion [2,0]`), + stability = "", + subLibraries = [], + synopsis = "no type in all branches", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-5055.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,24 @@ +cabal-version: >=2.0 +name: issue +version: 5055 +license: BSD3 +synopsis: no type in all branches +description: + no type in all branches. +category: Test +build-type: Simple + +executable flag-test-exe + main-is: FirstMain.hs + default-language: Haskell2010 + build-depends: + base >=4.8 && <5 + +test-suite flag-cabal-test + type: exitcode-stdio-1.0 + main-is: SecondMain.hs + default-language: Haskell2010 + build-depends: + base >=4.8 && <5 + + if os(windows) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,21 @@ +name: issue +version: 744 +synopsis: Package description parser interprets curly braces in the description field +description: Here is some C code: + . + > for(i = 0; i < 100; i++) { + > printf("%d\n",i); + > } + . + What does it look like? +build-type: Simple +-- we test that check warns about this +cabal-version: >=1.12 + +library + default-language: Haskell2010 + exposed-modules: Issue + + -- Test for round-trip of ghc-options here too + -- See https://github.com/haskell/cabal/issues/2661 + ghc-options: -Wall -threaded "-with-rtsopts=-N -s -M1G -c" -rtsopts diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,7 @@ +No 'category' field. +No 'maintainer' field. +The 'license' field is missing or is NONE. +'ghc-options: -threaded' has no effect for libraries. It should only be used for executables. +'ghc-options: -rtsopts' has no effect for libraries. It should only be used for executables. +'ghc-options: -with-rtsopts' has no effect for libraries. It should only be used for executables. +Packages relying on Cabal 1.12 or later should specify a specific version of the Cabal spec of the form 'cabal-version: x.y'. Use 'cabal-version: 1.12'. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,104 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [`ModuleName ["Issue"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-Wall", + "-threaded", + "-with-rtsopts=-N -s -M1G -c", + "-rtsopts"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = concat + ["Here is some C code:\n", + "\n", + "> for(i = 0; i < 100; i++) {\n", + "> printf(\"%d\\n\",i);\n", + "> }\n", + "\n", + "What does it look like?"], + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "issue"`, + pkgVersion = `mkVersion [744]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,12]`), + stability = "", + subLibraries = [], + synopsis = "Package description parser interprets curly braces in the description field", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/issue-774.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,19 @@ +cabal-version: >=1.12 +name: issue +version: 744 +synopsis: Package description parser interprets curly braces in the description field +description: + Here is some C code: + . + > for(i = 0; i < 100; i++) { + > printf("%d\n",i); + > } + . + What does it look like? +build-type: Simple + +library + exposed-modules: + Issue + default-language: Haskell2010 + ghc-options: -Wall -threaded "-with-rtsopts=-N -s -M1G -c" -rtsopts diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,19 @@ +cabal-version: 2.1 +name: leading-comma +version: 0 +synopsis: leading comma, trailing comma, or ordinary +build-type: Simple + +library + default-language: Haskell2010 + exposed-modules: LeadingComma + + build-depends: base, containers + + build-depends: + deepseq, + transformers, + + build-depends: + , filepath + , directory diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,114 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency `PackageName "base"` AnyVersion, + Dependency `PackageName "containers"` AnyVersion, + Dependency `PackageName "deepseq"` AnyVersion, + Dependency `PackageName "transformers"` AnyVersion, + Dependency `PackageName "filepath"` AnyVersion, + Dependency `PackageName "directory"` AnyVersion], + condTreeData = Library + {exposedModules = [`ModuleName ["LeadingComma"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + AnyVersion, + Dependency + `PackageName "containers"` + AnyVersion, + Dependency + `PackageName "deepseq"` + AnyVersion, + Dependency + `PackageName "transformers"` + AnyVersion, + Dependency + `PackageName "filepath"` + AnyVersion, + Dependency + `PackageName "directory"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "leading-comma"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [], + specVersionRaw = Left `mkVersion [2,1]`, + stability = "", + subLibraries = [], + synopsis = "leading comma, trailing comma, or ordinary", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/leading-comma.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,17 @@ +cabal-version: 2.1 +name: leading-comma +version: 0 +synopsis: leading comma, trailing comma, or ordinary +build-type: Simple + +library + exposed-modules: + LeadingComma + default-language: Haskell2010 + build-depends: + base -any, + containers -any, + deepseq -any, + transformers -any, + filepath -any, + directory -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/MiniAgda.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/MiniAgda.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/MiniAgda.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/MiniAgda.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,89 @@ +name: MiniAgda +version: 0.2017.02.18 +build-type: Simple +cabal-version: 1.22 +license: OtherLicense +license-file: LICENSE +author: Andreas Abel and Karl Mehltretter +maintainer: Andreas Abel +homepage: http://www.tcs.ifi.lmu.de/~abel/miniagda/ +bug-reports: https://github.com/andreasabel/miniagda/issues +category: Dependent types +synopsis: A toy dependently typed programming language with type-based termination. +description: + MiniAgda is a tiny dependently-typed programming language in the style + of Agda. It serves as a laboratory to test potential additions to the + language and type system of Agda. MiniAgda's termination checker is a + fusion of sized types and size-change termination and supports + coinduction. Equality incorporates eta-expansion at record and + singleton types. Function arguments can be declared as static; such + arguments are discarded during equality checking and compilation. + + Recent features include bounded size quantification and destructor + patterns for a more general handling of coinduction. + +tested-with: GHC == 7.6.3 + GHC == 7.8.4 + GHC == 7.10.3 + GHC == 8.0.1 + +extra-source-files: Makefile + +data-files: test/succeed/Makefile + test/succeed/*.ma + test/fail/Makefile + test/fail/*.ma + test/fail/*.err + test/fail/adm/*.ma + test/fail/adm/*.err + lib/*.ma +source-repository head + type: git + location: https://github.com/andreasabel/miniagda + +executable miniagda + hs-source-dirs: src + build-depends: array >= 0.3 && < 0.6, + base >= 4.6 && < 4.11, + containers >= 0.3 && < 0.6, + haskell-src-exts >= 1.17 && < 1.18, + mtl >= 2.2.0.1 && < 2.3, + pretty >= 1.0 && < 1.2 + build-tools: happy >= 1.15 && < 2, + alex >= 3.0 && < 4 + default-language: Haskell98 + default-extensions: CPP + MultiParamTypeClasses + TypeSynonymInstances + FlexibleInstances + FlexibleContexts + GeneralizedNewtypeDeriving + NoMonomorphismRestriction + PatternGuards + TupleSections + NamedFieldPuns + main-is: Main.hs + other-modules: Abstract + Collection + Concrete + Eval + Extract + HsSyntax + Lexer + Main + Parser + Polarity + PrettyTCM + ScopeChecker + Semiring + SparseMatrix + TCM + Termination + ToHaskell + Tokens + TraceError + TreeShapedOrder + TypeChecker + Util + Value + Warshall diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/MiniAgda.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/MiniAgda.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/MiniAgda.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/MiniAgda.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1 @@ +MiniAgda.cabal:0:0: Version digit with leading zero. Use cabal-version: 2.0 or later to write such versions. For more information see https://github.com/haskell/cabal/issues/5092 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,21 @@ +name: ç„¡ +version: 0 +synopsis: The canonical non-package ç„¡ +build-type: Simple +cabal-version: >=1.10 +x-ç„¡: ç„¡ + +source-repository head + Type: git + Location: https://github.com/hvr/-.git + +flag ç„¡ + description: ç„¡ + +library + default-language: Haskell2010 + + exposed-modules: Ω + + if !flag(ç„¡) + buildable:False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,6 @@ +No 'category' field. +No 'maintainer' field. +No 'description' field. +The 'license' field is missing or is NONE. +Suspicious flag names: ç„¡. To avoid ambiguity in command line interfaces, flag shouldn't start with a dash. Also for better compatibility, flag names shouldn't contain non-ascii characters. +Non ascii custom fields: x-ç„¡. For better compatibility, custom field names shouldn't contain non-ascii characters. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,156 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `CNot (Var (Flag (FlagName "\\28961")))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = False, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [`ModuleName ["\\937"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [MkFlag + {flagDefault = True, + flagDescription = "\28961", + flagManual = False, + flagName = `FlagName "\\28961"`}], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [_×_ "x-\28961" "\28961"], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "\\28961"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just "https://github.com/hvr/-.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), + stability = "", + subLibraries = [], + synopsis = "The canonical non-package \28961", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/nothing-unicode.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,22 @@ +cabal-version: >=1.10 +name: ç„¡ +version: 0 +synopsis: The canonical non-package ç„¡ +x-ç„¡: ç„¡ +build-type: Simple + +source-repository head + type: git + location: https://github.com/hvr/-.git + +flag ç„¡ + description: + ç„¡ + +library + exposed-modules: + Ω + default-language: Haskell2010 + + if !flag(ç„¡) + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,11 @@ +name: noVersion +version: 0 +synopsis: -none in build-depends +build-type: Simple +cabal-version: 1.22 + +library + default-language: Haskell2010 + exposed-modules: ElseIf + + build-depends: bad-package -none diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,102 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "bad-package"` + (IntersectVersionRanges + (LaterVersion `mkVersion [1]`) + (EarlierVersion `mkVersion [1]`))], + condTreeData = Library + {exposedModules = [`ModuleName ["ElseIf"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "bad-package"` + (IntersectVersionRanges + (LaterVersion + `mkVersion [1]`) + (EarlierVersion + `mkVersion [1]`))], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left NONE, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "noVersion"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [], + specVersionRaw = Left `mkVersion [1,22]`, + stability = "", + subLibraries = [], + synopsis = "-none in build-depends", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/noVersion.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,12 @@ +cabal-version: 1.22 +name: noVersion +version: 0 +synopsis: -none in build-depends +build-type: Simple + +library + exposed-modules: + ElseIf + default-language: Haskell2010 + build-depends: + bad-package >1 && <1 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,44 @@ +name: Octree +version: 0.5 +stability: beta +homepage: https://github.com/mgajda/octree +package-url: http://hackage.haskell.org/package/octree +synopsis: Simple unbalanced Octree for storing data about 3D points +description: Octree data structure is relatively shallow data structure for space partitioning. +category: Data +license: BSD3 +license-file: LICENSE + +author: Michal J. Gajda +copyright: Copyright by Michal J. Gajda '2012 +maintainer: mjgajda@googlemail.com +bug-reports: mailto:mjgajda@googlemail.com + + +build-type: Simple +cabal-version: >=1.8 +tested-with: GHC==7.0.4,GHC==7.4.1,GHC==7.4.2,GHC==7.6.0 + +source-repository head + type: git + location: git@github.com:mgajda/octree.git + +Library + build-depends: base>=4.0 && < 4.7, AC-Vector >= 2.3.0, QuickCheck >= 2.4.0 + exposed-modules: Data.Octree + other-modules: Data.Octree.Internal + exposed: True + extensions: ScopedTypeVariables + +Test-suite test_Octree + Type: exitcode-stdio-1.0 + Build-depends: base>=4.0 && < 4.7, AC-Vector >= 2.3.0, QuickCheck >= 2.4.0 + Main-is: tests/test_Octree.hs + +Test-suite readme +  type: exitcode-stdio-1.0 + -- We have a symlink: README.lhs -> README.md +  main-is: README.lhs + Build-depends: base>=4.0 && < 4.7, AC-Vector >= 2.3.0, QuickCheck >= 2.4.0, markdown-unlit +  ghc-options: -pgmL markdown-unlit + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,285 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,0]`) + (EarlierVersion `mkVersion [4,7]`)), + Dependency + `PackageName "AC-Vector"` + (OrLaterVersion `mkVersion [2,3,0]`), + Dependency + `PackageName "QuickCheck"` + (OrLaterVersion `mkVersion [2,4,0]`)], + condTreeData = Library + {exposedModules = [`ModuleName ["Data","Octree"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [EnableExtension + ScopedTypeVariables], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [`ModuleName ["Data","Octree","Internal"]`], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,0]`) + (EarlierVersion + `mkVersion [4,7]`)), + Dependency + `PackageName "AC-Vector"` + (OrLaterVersion + `mkVersion [2,3,0]`), + Dependency + `PackageName "QuickCheck"` + (OrLaterVersion + `mkVersion [2,4,0]`)], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [_×_ + `UnqualComponentName "test_Octree"` + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,0]`) + (EarlierVersion `mkVersion [4,7]`)), + Dependency + `PackageName "AC-Vector"` + (OrLaterVersion `mkVersion [2,3,0]`), + Dependency + `PackageName "QuickCheck"` + (OrLaterVersion `mkVersion [2,4,0]`)], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,0]`) + (EarlierVersion + `mkVersion [4,7]`)), + Dependency + `PackageName "AC-Vector"` + (OrLaterVersion + `mkVersion [2,3,0]`), + Dependency + `PackageName "QuickCheck"` + (OrLaterVersion + `mkVersion [2,4,0]`)], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` + "tests/test_Octree.hs", + testName = `UnqualComponentName ""`}}, + _×_ + `UnqualComponentName "readme"` + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,0]`) + (EarlierVersion `mkVersion [4,7]`)), + Dependency + `PackageName "AC-Vector"` + (OrLaterVersion `mkVersion [2,3,0]`), + Dependency + `PackageName "QuickCheck"` + (OrLaterVersion `mkVersion [2,4,0]`), + Dependency + `PackageName "markdown-unlit"` AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-pgmL", + "markdown-unlit"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,0]`) + (EarlierVersion + `mkVersion [4,7]`)), + Dependency + `PackageName "AC-Vector"` + (OrLaterVersion + `mkVersion [2,3,0]`), + Dependency + `PackageName "QuickCheck"` + (OrLaterVersion + `mkVersion [2,4,0]`), + Dependency + `PackageName "markdown-unlit"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "README.lhs", + testName = `UnqualComponentName ""`}}], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "Michal J. Gajda", + benchmarks = [], + bugReports = "mailto:mjgajda@googlemail.com", + buildTypeRaw = Just Simple, + category = "Data", + copyright = "Copyright by Michal J. Gajda '2012", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "Octree data structure is relatively shallow data structure for space partitioning.", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "https://github.com/mgajda/octree", + library = Nothing, + licenseFiles = ["LICENSE"], + licenseRaw = Right BSD3, + maintainer = "mjgajda@googlemail.com", + package = PackageIdentifier + {pkgName = `PackageName "Octree"`, + pkgVersion = `mkVersion [0,5]`}, + pkgUrl = "http://hackage.haskell.org/package/octree", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just "git@github.com:mgajda/octree.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,8]`), + stability = "beta", + subLibraries = [], + synopsis = "Simple unbalanced Octree for storing data about 3D points", + testSuites = [], + testedWith = [_×_ GHC (ThisVersion `mkVersion [7,0,4]`), + _×_ GHC (ThisVersion `mkVersion [7,4,1]`), + _×_ GHC (ThisVersion `mkVersion [7,4,2]`), + _×_ GHC (ThisVersion `mkVersion [7,6,0]`)]}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/Octree-0.5.format 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,52 @@ +Octree-0.5.cabal:39:3: Non breaking spaces at 39:3, 41:3, 43:3 +cabal-version: >=1.8 +name: Octree +version: 0.5 +license: BSD3 +license-file: LICENSE +copyright: Copyright by Michal J. Gajda '2012 +maintainer: mjgajda@googlemail.com +author: Michal J. Gajda +stability: beta +tested-with: ghc ==7.0.4 ghc ==7.4.1 ghc ==7.4.2 ghc ==7.6.0 +homepage: https://github.com/mgajda/octree +package-url: http://hackage.haskell.org/package/octree +bug-reports: mailto:mjgajda@googlemail.com +synopsis: Simple unbalanced Octree for storing data about 3D points +description: + Octree data structure is relatively shallow data structure for space partitioning. +category: Data +build-type: Simple + +source-repository head + type: git + location: git@github.com:mgajda/octree.git + +library + exposed-modules: + Data.Octree + other-modules: + Data.Octree.Internal + extensions: ScopedTypeVariables + build-depends: + base >=4.0 && <4.7, + AC-Vector >=2.3.0, + QuickCheck >=2.4.0 + +test-suite test_Octree + type: exitcode-stdio-1.0 + main-is: tests/test_Octree.hs + build-depends: + base >=4.0 && <4.7, + AC-Vector >=2.3.0, + QuickCheck >=2.4.0 + +test-suite readme + type: exitcode-stdio-1.0 + main-is: README.lhs + ghc-options: -pgmL markdown-unlit + build-depends: + base >=4.0 && <4.7, + AC-Vector >=2.3.0, + QuickCheck >=2.4.0, + markdown-unlit -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-1.6-glob.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-1.6-glob.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-1.6-glob.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-1.6-glob.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,17 @@ +cabal-version: >= 1.4 +name: pre-1dot6-glob +version: 0 +license: BSD3 +license-file: pre-1.6-glob.cabal +synopsis: no +description: none +build-type: Simple +category: Test +maintainer: none + +extra-source-files: + foo/*.hs + +library + exposed-modules: + Foo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-1.6-glob.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-1.6-glob.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-1.6-glob.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-1.6-glob.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1 @@ +In the 'extra-source-files' field: invalid file glob 'foo/*.hs'. Using star wildcards requires 'cabal-version: >= 1.6'. Alternatively if you require compatibility with earlier Cabal versions then list all the files explicitly. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-2.4-globstar.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-2.4-globstar.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-2.4-globstar.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-2.4-globstar.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,19 @@ +cabal-version: 2.2 +name: pre-3dot0-globstar +version: 0 +extra-source-files: + foo/**/*.hs +extra-doc-files: + foo/**/*.html +data-files: + foo/**/*.dat +license: BSD-3-Clause +synopsis: no +description: none +category: Test +maintainer: none + +library + default-language: Haskell2010 + exposed-modules: + Foo diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-2.4-globstar.check cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-2.4-globstar.check --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-2.4-globstar.check 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/pre-2.4-globstar.check 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,3 @@ +In the 'data-files' field: invalid file glob 'foo/**/*.dat'. Using the double-star syntax requires 'cabal-version: 2.4' or greater. Alternatively, for compatibility with earlier Cabal versions, list the included directories explicitly. +In the 'extra-source-files' field: invalid file glob 'foo/**/*.hs'. Using the double-star syntax requires 'cabal-version: 2.4' or greater. Alternatively, for compatibility with earlier Cabal versions, list the included directories explicitly. +In the 'extra-doc-files' field: invalid file glob 'foo/**/*.html'. Using the double-star syntax requires 'cabal-version: 2.4' or greater. Alternatively, for compatibility with earlier Cabal versions, list the included directories explicitly. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.cabal 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,402 @@ +cabal-version: >= 1.18 +build-type: Simple +name: shake +version: 0.15.11 +license: BSD3 +license-file: LICENSE +category: Development, Shake +author: Neil Mitchell +maintainer: Neil Mitchell +copyright: Neil Mitchell 2011-2017 +synopsis: Build system library, like Make, but more accurate dependencies. +description: + Shake is a Haskell library for writing build systems - designed as a + replacement for @make@. See "Development.Shake" for an introduction, + including an example. Further examples are included in the Cabal tarball, + under the @Examples@ directory. The homepage contains links to a user + manual, an academic paper and further information: + + . + To use Shake the user writes a Haskell program + that imports "Development.Shake", defines some build rules, and calls + the 'Development.Shake.shakeArgs' function. Thanks to do notation and infix + operators, a simple Shake build system + is not too dissimilar from a simple Makefile. However, as build systems + get more complex, Shake is able to take advantage of the excellent + abstraction facilities offered by Haskell and easily support much larger + projects. The Shake library provides all the standard features available in other + build systems, including automatic parallelism and minimal rebuilds. + Shake also provides more accurate dependency tracking, including seamless + support for generated files, and dependencies on system information + (e.g. compiler version). +homepage: http://shakebuild.com +bug-reports: https://github.com/ndmitchell/shake/issues +tested-with: GHC==8.0.1, GHC==7.10.3, GHC==7.8.4, GHC==7.6.3, GHC==7.4.2 +extra-doc-files: + CHANGES.txt + README.md +extra-source-files: + src/Test/C/constants.c + src/Test/C/constants.h + src/Test/C/main.c + src/Test/MakeTutor/Makefile + src/Test/MakeTutor/hellofunc.c + src/Test/MakeTutor/hellomake.c + src/Test/MakeTutor/hellomake.h + src/Test/Tar/list.txt + src/Test/Ninja/*.ninja + src/Test/Ninja/subdir/*.ninja + src/Test/Ninja/*.output + src/Test/Progress/*.prog + src/Test/Tup/hello.c + src/Test/Tup/root.cfg + src/Test/Tup/newmath/root.cfg + src/Test/Tup/newmath/square.c + src/Test/Tup/newmath/square.h + src/Paths.hs + docs/Manual.md + docs/shake-progress.png + +data-files: + html/viz.js + html/profile.html + html/progress.html + html/shake.js + docs/manual/build.bat + docs/manual/Build.hs + docs/manual/build.sh + docs/manual/constants.c + docs/manual/constants.h + docs/manual/main.c + +source-repository head + type: git + location: https://github.com/ndmitchell/shake.git + +flag portable + default: False + manual: True + description: Obtain FileTime using portable functions + +library + default-language: Haskell2010 + hs-source-dirs: src + build-depends: + base >= 4.5, + directory, + hashable >= 1.1.2.3, + binary, + filepath, + process >= 1.1, + unordered-containers >= 0.2.1, + bytestring, + utf8-string >= 0.3, + time, + random, + js-jquery, + js-flot, + transformers >= 0.2, + extra >= 1.4.8, + deepseq >= 1.1 + + if flag(portable) + cpp-options: -DPORTABLE + if impl(ghc < 7.6) + build-depends: old-time + else + if !os(windows) + build-depends: unix >= 2.5.1 + if !os(windows) + build-depends: unix + + exposed-modules: + Development.Shake + Development.Shake.Classes + Development.Shake.Command + Development.Shake.Config + Development.Shake.FilePath + Development.Shake.Forward + Development.Shake.Rule + Development.Shake.Util + + other-modules: + Development.Ninja.Env + Development.Ninja.Lexer + Development.Ninja.Parse + Development.Ninja.Type + Development.Shake.Args + Development.Shake.ByteString + Development.Shake.Core + Development.Shake.CmdOption + Development.Shake.Database + Development.Shake.Demo + Development.Shake.Derived + Development.Shake.Errors + Development.Shake.FileInfo + Development.Shake.FilePattern + Development.Shake.Monad + Development.Shake.Pool + Development.Shake.Profile + Development.Shake.Progress + Development.Shake.Resource + Development.Shake.Rules.Directory + Development.Shake.Rules.File + Development.Shake.Rules.Files + Development.Shake.Rules.Oracle + Development.Shake.Rules.OrderOnly + Development.Shake.Rules.Rerun + Development.Shake.Shake + Development.Shake.Special + Development.Shake.Storage + Development.Shake.Types + Development.Shake.Value + General.Bilist + General.Binary + General.Cleanup + General.Concurrent + General.Extra + General.FileLock + General.Intern + General.Process + General.String + General.Template + General.Timing + Paths_shake + + +executable shake + default-language: Haskell2010 + hs-source-dirs: src + ghc-options: -main-is Run.main + main-is: Run.hs + ghc-options: -rtsopts + -- GHC bug 7646 means -threaded causes errors + if impl(ghc >= 7.8) + ghc-options: -threaded "-with-rtsopts=-I0 -qg -qb" + build-depends: + base == 4.*, + directory, + hashable >= 1.1.2.3, + binary, + filepath, + process >= 1.1, + unordered-containers >= 0.2.1, + bytestring, + utf8-string >= 0.3, + time, + random, + js-jquery, + js-flot, + transformers >= 0.2, + extra >= 1.4.8, + deepseq >= 1.1, + primitive + + if flag(portable) + cpp-options: -DPORTABLE + if impl(ghc < 7.6) + build-depends: old-time + else + if !os(windows) + build-depends: unix >= 2.5.1 + if !os(windows) + build-depends: unix + + other-modules: + Development.Make.All + Development.Make.Env + Development.Make.Parse + Development.Make.Rules + Development.Make.Type + Development.Ninja.All + Development.Ninja.Env + Development.Ninja.Lexer + Development.Ninja.Parse + Development.Ninja.Type + Development.Shake + Development.Shake.Args + Development.Shake.ByteString + Development.Shake.Classes + Development.Shake.CmdOption + Development.Shake.Command + Development.Shake.Core + Development.Shake.Database + Development.Shake.Demo + Development.Shake.Derived + Development.Shake.Errors + Development.Shake.FileInfo + Development.Shake.FilePath + Development.Shake.FilePattern + Development.Shake.Forward + Development.Shake.Monad + Development.Shake.Pool + Development.Shake.Profile + Development.Shake.Progress + Development.Shake.Resource + Development.Shake.Rule + Development.Shake.Rules.Directory + Development.Shake.Rules.File + Development.Shake.Rules.Files + Development.Shake.Rules.Oracle + Development.Shake.Rules.OrderOnly + Development.Shake.Rules.Rerun + Development.Shake.Shake + Development.Shake.Special + Development.Shake.Storage + Development.Shake.Types + Development.Shake.Value + General.Bilist + General.Binary + General.Cleanup + General.Concurrent + General.Extra + General.FileLock + General.Intern + General.Process + General.String + General.Template + General.Timing + Paths_shake + Run + + +test-suite shake-test + default-language: Haskell2010 + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-dirs: src + + ghc-options: -main-is Test.main -rtsopts + if impl(ghc >= 7.6) + -- space leak introduced by -O1 in 7.4, see #445 + ghc-options: -with-rtsopts=-K1K + if impl(ghc >= 7.8) + -- GHC bug 7646 (fixed in 7.8) means -threaded causes errors + ghc-options: -threaded + + build-depends: + base == 4.*, + directory, + hashable >= 1.1.2.3, + binary, + filepath, + process >= 1.1, + unordered-containers >= 0.2.1, + bytestring, + utf8-string >= 0.3, + time, + random, + js-jquery, + js-flot, + transformers >= 0.2, + deepseq >= 1.1, + extra >= 1.4.8, + QuickCheck >= 2.0 + + if flag(portable) + cpp-options: -DPORTABLE + if impl(ghc < 7.6) + build-depends: old-time + else + if !os(windows) + build-depends: unix >= 2.5.1 + if !os(windows) + build-depends: unix + + other-modules: + Development.Make.All + Development.Make.Env + Development.Make.Parse + Development.Make.Rules + Development.Make.Type + Development.Ninja.All + Development.Ninja.Env + Development.Ninja.Lexer + Development.Ninja.Parse + Development.Ninja.Type + Development.Shake + Development.Shake.Args + Development.Shake.ByteString + Development.Shake.Classes + Development.Shake.CmdOption + Development.Shake.Command + Development.Shake.Config + Development.Shake.Core + Development.Shake.Database + Development.Shake.Demo + Development.Shake.Derived + Development.Shake.Errors + Development.Shake.FileInfo + Development.Shake.FilePath + Development.Shake.FilePattern + Development.Shake.Forward + Development.Shake.Monad + Development.Shake.Pool + Development.Shake.Profile + Development.Shake.Progress + Development.Shake.Resource + Development.Shake.Rule + Development.Shake.Rules.Directory + Development.Shake.Rules.File + Development.Shake.Rules.Files + Development.Shake.Rules.Oracle + Development.Shake.Rules.OrderOnly + Development.Shake.Rules.Rerun + Development.Shake.Shake + Development.Shake.Special + Development.Shake.Storage + Development.Shake.Types + Development.Shake.Util + Development.Shake.Value + General.Bilist + General.Binary + General.Cleanup + General.Concurrent + General.Extra + General.FileLock + General.Intern + General.Process + General.String + General.Template + General.Timing + Paths_shake + Run + Test.Assume + Test.Basic + Test.Benchmark + Test.C + Test.Cache + Test.Command + Test.Config + Test.Digest + Test.Directory + Test.Docs + Test.Errors + Test.FileLock + Test.FilePath + Test.FilePattern + Test.Files + Test.Forward + Test.Journal + Test.Lint + Test.Live + Test.Makefile + Test.Manual + Test.Match + Test.Monad + Test.Ninja + Test.Oracle + Test.OrderOnly + Test.Parallel + Test.Pool + Test.Progress + Test.Random + Test.Resources + Test.Self + Test.Tar + Test.Tup + Test.Type + Test.Unicode + Test.Util + Test.Verbosity + Test.Version diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.expr 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,1720 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [_×_ + `UnqualComponentName "shake"` + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (Impl GHC (OrLaterVersion (mkVersion [7,8])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-threaded", + "-with-rtsopts=-I0 -qg -qb"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + exeName = `UnqualComponentName "shake"`, + exeScope = ExecutablePublic, + modulePath = ""}}}, + CondBranch + {condBranchCondition = `Var (Flag (FlagName "portable"))`, + condBranchIfFalse = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `CNot (Var (OS Windows))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "unix"` + (OrLaterVersion + `mkVersion [2,5,1]`)], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "unix"` + (OrLaterVersion + `mkVersion [2,5,1]`)], + virtualModules = []}, + exeName = `UnqualComponentName "shake"`, + exeScope = ExecutablePublic, + modulePath = ""}}}], + condTreeConstraints = [], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + exeName = `UnqualComponentName "shake"`, + exeScope = ExecutablePublic, + modulePath = ""}}, + condBranchIfTrue = CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,6])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "old-time"` + AnyVersion], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "old-time"` + AnyVersion], + virtualModules = []}, + exeName = `UnqualComponentName "shake"`, + exeScope = ExecutablePublic, + modulePath = ""}}}], + condTreeConstraints = [], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = ["-DPORTABLE"], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + exeName = `UnqualComponentName "shake"`, + exeScope = ExecutablePublic, + modulePath = ""}}}, + CondBranch + {condBranchCondition = `CNot (Var (OS Windows))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "unix"` + AnyVersion], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "unix"` + AnyVersion], + virtualModules = []}, + exeName = `UnqualComponentName "shake"`, + exeScope = ExecutablePublic, + modulePath = ""}}}], + condTreeConstraints = [Dependency + `PackageName "base"` + (WildcardVersion `mkVersion [4]`), + Dependency `PackageName "directory"` AnyVersion, + Dependency + `PackageName "hashable"` + (OrLaterVersion `mkVersion [1,1,2,3]`), + Dependency `PackageName "binary"` AnyVersion, + Dependency `PackageName "filepath"` AnyVersion, + Dependency + `PackageName "process"` + (OrLaterVersion `mkVersion [1,1]`), + Dependency + `PackageName "unordered-containers"` + (OrLaterVersion `mkVersion [0,2,1]`), + Dependency `PackageName "bytestring"` AnyVersion, + Dependency + `PackageName "utf8-string"` + (OrLaterVersion `mkVersion [0,3]`), + Dependency `PackageName "time"` AnyVersion, + Dependency `PackageName "random"` AnyVersion, + Dependency `PackageName "js-jquery"` AnyVersion, + Dependency `PackageName "js-flot"` AnyVersion, + Dependency + `PackageName "transformers"` + (OrLaterVersion `mkVersion [0,2]`), + Dependency + `PackageName "extra"` + (OrLaterVersion `mkVersion [1,4,8]`), + Dependency + `PackageName "deepseq"` + (OrLaterVersion `mkVersion [1,1]`), + Dependency `PackageName "primitive"` AnyVersion], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["src"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-main-is", + "Run.main", + "-rtsopts"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [`ModuleName ["Development","Make","All"]`, + `ModuleName ["Development","Make","Env"]`, + `ModuleName ["Development","Make","Parse"]`, + `ModuleName ["Development","Make","Rules"]`, + `ModuleName ["Development","Make","Type"]`, + `ModuleName ["Development","Ninja","All"]`, + `ModuleName ["Development","Ninja","Env"]`, + `ModuleName ["Development","Ninja","Lexer"]`, + `ModuleName ["Development","Ninja","Parse"]`, + `ModuleName ["Development","Ninja","Type"]`, + `ModuleName ["Development","Shake"]`, + `ModuleName ["Development","Shake","Args"]`, + `ModuleName ["Development","Shake","ByteString"]`, + `ModuleName ["Development","Shake","Classes"]`, + `ModuleName ["Development","Shake","CmdOption"]`, + `ModuleName ["Development","Shake","Command"]`, + `ModuleName ["Development","Shake","Core"]`, + `ModuleName ["Development","Shake","Database"]`, + `ModuleName ["Development","Shake","Demo"]`, + `ModuleName ["Development","Shake","Derived"]`, + `ModuleName ["Development","Shake","Errors"]`, + `ModuleName ["Development","Shake","FileInfo"]`, + `ModuleName ["Development","Shake","FilePath"]`, + `ModuleName ["Development","Shake","FilePattern"]`, + `ModuleName ["Development","Shake","Forward"]`, + `ModuleName ["Development","Shake","Monad"]`, + `ModuleName ["Development","Shake","Pool"]`, + `ModuleName ["Development","Shake","Profile"]`, + `ModuleName ["Development","Shake","Progress"]`, + `ModuleName ["Development","Shake","Resource"]`, + `ModuleName ["Development","Shake","Rule"]`, + `ModuleName ["Development","Shake","Rules","Directory"]`, + `ModuleName ["Development","Shake","Rules","File"]`, + `ModuleName ["Development","Shake","Rules","Files"]`, + `ModuleName ["Development","Shake","Rules","Oracle"]`, + `ModuleName ["Development","Shake","Rules","OrderOnly"]`, + `ModuleName ["Development","Shake","Rules","Rerun"]`, + `ModuleName ["Development","Shake","Shake"]`, + `ModuleName ["Development","Shake","Special"]`, + `ModuleName ["Development","Shake","Storage"]`, + `ModuleName ["Development","Shake","Types"]`, + `ModuleName ["Development","Shake","Value"]`, + `ModuleName ["General","Bilist"]`, + `ModuleName ["General","Binary"]`, + `ModuleName ["General","Cleanup"]`, + `ModuleName ["General","Concurrent"]`, + `ModuleName ["General","Extra"]`, + `ModuleName ["General","FileLock"]`, + `ModuleName ["General","Intern"]`, + `ModuleName ["General","Process"]`, + `ModuleName ["General","String"]`, + `ModuleName ["General","Template"]`, + `ModuleName ["General","Timing"]`, + `ModuleName ["Paths_shake"]`, + `ModuleName ["Run"]`], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (WildcardVersion + `mkVersion [4]`), + Dependency + `PackageName "directory"` + AnyVersion, + Dependency + `PackageName "hashable"` + (OrLaterVersion + `mkVersion [1,1,2,3]`), + Dependency + `PackageName "binary"` + AnyVersion, + Dependency + `PackageName "filepath"` + AnyVersion, + Dependency + `PackageName "process"` + (OrLaterVersion + `mkVersion [1,1]`), + Dependency + `PackageName "unordered-containers"` + (OrLaterVersion + `mkVersion [0,2,1]`), + Dependency + `PackageName "bytestring"` + AnyVersion, + Dependency + `PackageName "utf8-string"` + (OrLaterVersion + `mkVersion [0,3]`), + Dependency + `PackageName "time"` + AnyVersion, + Dependency + `PackageName "random"` + AnyVersion, + Dependency + `PackageName "js-jquery"` + AnyVersion, + Dependency + `PackageName "js-flot"` + AnyVersion, + Dependency + `PackageName "transformers"` + (OrLaterVersion + `mkVersion [0,2]`), + Dependency + `PackageName "extra"` + (OrLaterVersion + `mkVersion [1,4,8]`), + Dependency + `PackageName "deepseq"` + (OrLaterVersion + `mkVersion [1,1]`), + Dependency + `PackageName "primitive"` + AnyVersion], + virtualModules = []}, + exeName = `UnqualComponentName "shake"`, + exeScope = ExecutablePublic, + modulePath = "Run.hs"}}], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (Flag (FlagName "portable"))`, + condBranchIfFalse = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `CNot (Var (OS Windows))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "unix"` + (OrLaterVersion + `mkVersion [2,5,1]`)], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "unix"` + (OrLaterVersion + `mkVersion [2,5,1]`)], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condBranchIfTrue = CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,6])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "old-time"` + AnyVersion], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "old-time"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = ["-DPORTABLE"], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}, + CondBranch + {condBranchCondition = `CNot (Var (OS Windows))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "unix"` + AnyVersion], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "unix"` + AnyVersion], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}}], + condTreeConstraints = [Dependency + `PackageName "base"` + (OrLaterVersion `mkVersion [4,5]`), + Dependency `PackageName "directory"` AnyVersion, + Dependency + `PackageName "hashable"` + (OrLaterVersion `mkVersion [1,1,2,3]`), + Dependency `PackageName "binary"` AnyVersion, + Dependency `PackageName "filepath"` AnyVersion, + Dependency + `PackageName "process"` + (OrLaterVersion `mkVersion [1,1]`), + Dependency + `PackageName "unordered-containers"` + (OrLaterVersion `mkVersion [0,2,1]`), + Dependency `PackageName "bytestring"` AnyVersion, + Dependency + `PackageName "utf8-string"` + (OrLaterVersion `mkVersion [0,3]`), + Dependency `PackageName "time"` AnyVersion, + Dependency `PackageName "random"` AnyVersion, + Dependency `PackageName "js-jquery"` AnyVersion, + Dependency `PackageName "js-flot"` AnyVersion, + Dependency + `PackageName "transformers"` + (OrLaterVersion `mkVersion [0,2]`), + Dependency + `PackageName "extra"` + (OrLaterVersion `mkVersion [1,4,8]`), + Dependency + `PackageName "deepseq"` + (OrLaterVersion `mkVersion [1,1]`)], + condTreeData = Library + {exposedModules = [`ModuleName ["Development","Shake"]`, + `ModuleName ["Development","Shake","Classes"]`, + `ModuleName ["Development","Shake","Command"]`, + `ModuleName ["Development","Shake","Config"]`, + `ModuleName ["Development","Shake","FilePath"]`, + `ModuleName ["Development","Shake","Forward"]`, + `ModuleName ["Development","Shake","Rule"]`, + `ModuleName ["Development","Shake","Util"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["src"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [`ModuleName ["Development","Ninja","Env"]`, + `ModuleName ["Development","Ninja","Lexer"]`, + `ModuleName ["Development","Ninja","Parse"]`, + `ModuleName ["Development","Ninja","Type"]`, + `ModuleName ["Development","Shake","Args"]`, + `ModuleName ["Development","Shake","ByteString"]`, + `ModuleName ["Development","Shake","Core"]`, + `ModuleName ["Development","Shake","CmdOption"]`, + `ModuleName ["Development","Shake","Database"]`, + `ModuleName ["Development","Shake","Demo"]`, + `ModuleName ["Development","Shake","Derived"]`, + `ModuleName ["Development","Shake","Errors"]`, + `ModuleName ["Development","Shake","FileInfo"]`, + `ModuleName ["Development","Shake","FilePattern"]`, + `ModuleName ["Development","Shake","Monad"]`, + `ModuleName ["Development","Shake","Pool"]`, + `ModuleName ["Development","Shake","Profile"]`, + `ModuleName ["Development","Shake","Progress"]`, + `ModuleName ["Development","Shake","Resource"]`, + `ModuleName ["Development","Shake","Rules","Directory"]`, + `ModuleName ["Development","Shake","Rules","File"]`, + `ModuleName ["Development","Shake","Rules","Files"]`, + `ModuleName ["Development","Shake","Rules","Oracle"]`, + `ModuleName ["Development","Shake","Rules","OrderOnly"]`, + `ModuleName ["Development","Shake","Rules","Rerun"]`, + `ModuleName ["Development","Shake","Shake"]`, + `ModuleName ["Development","Shake","Special"]`, + `ModuleName ["Development","Shake","Storage"]`, + `ModuleName ["Development","Shake","Types"]`, + `ModuleName ["Development","Shake","Value"]`, + `ModuleName ["General","Bilist"]`, + `ModuleName ["General","Binary"]`, + `ModuleName ["General","Cleanup"]`, + `ModuleName ["General","Concurrent"]`, + `ModuleName ["General","Extra"]`, + `ModuleName ["General","FileLock"]`, + `ModuleName ["General","Intern"]`, + `ModuleName ["General","Process"]`, + `ModuleName ["General","String"]`, + `ModuleName ["General","Template"]`, + `ModuleName ["General","Timing"]`, + `ModuleName ["Paths_shake"]`], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (OrLaterVersion + `mkVersion [4,5]`), + Dependency + `PackageName "directory"` + AnyVersion, + Dependency + `PackageName "hashable"` + (OrLaterVersion + `mkVersion [1,1,2,3]`), + Dependency + `PackageName "binary"` + AnyVersion, + Dependency + `PackageName "filepath"` + AnyVersion, + Dependency + `PackageName "process"` + (OrLaterVersion + `mkVersion [1,1]`), + Dependency + `PackageName "unordered-containers"` + (OrLaterVersion + `mkVersion [0,2,1]`), + Dependency + `PackageName "bytestring"` + AnyVersion, + Dependency + `PackageName "utf8-string"` + (OrLaterVersion + `mkVersion [0,3]`), + Dependency + `PackageName "time"` + AnyVersion, + Dependency + `PackageName "random"` + AnyVersion, + Dependency + `PackageName "js-jquery"` + AnyVersion, + Dependency + `PackageName "js-flot"` + AnyVersion, + Dependency + `PackageName "transformers"` + (OrLaterVersion + `mkVersion [0,2]`), + Dependency + `PackageName "extra"` + (OrLaterVersion + `mkVersion [1,4,8]`), + Dependency + `PackageName "deepseq"` + (OrLaterVersion + `mkVersion [1,1]`)], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [_×_ + `UnqualComponentName "shake-test"` + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (Impl GHC (OrLaterVersion (mkVersion [7,6])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-with-rtsopts=-K1K"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}, + CondBranch + {condBranchCondition = `Var (Impl GHC (OrLaterVersion (mkVersion [7,8])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-threaded"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}, + CondBranch + {condBranchCondition = `Var (Flag (FlagName "portable"))`, + condBranchIfFalse = Just + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `CNot (Var (OS Windows))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "unix"` + (OrLaterVersion + `mkVersion [2,5,1]`)], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "unix"` + (OrLaterVersion + `mkVersion [2,5,1]`)], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}], + condTreeConstraints = [], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}, + condBranchIfTrue = CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,6])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "old-time"` + AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "old-time"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}], + condTreeConstraints = [], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = ["-DPORTABLE"], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}, + CondBranch + {condBranchCondition = `CNot (Var (OS Windows))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "unix"` + AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "unix"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}], + condTreeConstraints = [Dependency + `PackageName "base"` + (WildcardVersion `mkVersion [4]`), + Dependency `PackageName "directory"` AnyVersion, + Dependency + `PackageName "hashable"` + (OrLaterVersion `mkVersion [1,1,2,3]`), + Dependency `PackageName "binary"` AnyVersion, + Dependency `PackageName "filepath"` AnyVersion, + Dependency + `PackageName "process"` + (OrLaterVersion `mkVersion [1,1]`), + Dependency + `PackageName "unordered-containers"` + (OrLaterVersion `mkVersion [0,2,1]`), + Dependency `PackageName "bytestring"` AnyVersion, + Dependency + `PackageName "utf8-string"` + (OrLaterVersion `mkVersion [0,3]`), + Dependency `PackageName "time"` AnyVersion, + Dependency `PackageName "random"` AnyVersion, + Dependency `PackageName "js-jquery"` AnyVersion, + Dependency `PackageName "js-flot"` AnyVersion, + Dependency + `PackageName "transformers"` + (OrLaterVersion `mkVersion [0,2]`), + Dependency + `PackageName "deepseq"` + (OrLaterVersion `mkVersion [1,1]`), + Dependency + `PackageName "extra"` + (OrLaterVersion `mkVersion [1,4,8]`), + Dependency + `PackageName "QuickCheck"` + (OrLaterVersion `mkVersion [2,0]`)], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["src"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-main-is", + "Test.main", + "-rtsopts"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [`ModuleName ["Development","Make","All"]`, + `ModuleName ["Development","Make","Env"]`, + `ModuleName ["Development","Make","Parse"]`, + `ModuleName ["Development","Make","Rules"]`, + `ModuleName ["Development","Make","Type"]`, + `ModuleName ["Development","Ninja","All"]`, + `ModuleName ["Development","Ninja","Env"]`, + `ModuleName ["Development","Ninja","Lexer"]`, + `ModuleName ["Development","Ninja","Parse"]`, + `ModuleName ["Development","Ninja","Type"]`, + `ModuleName ["Development","Shake"]`, + `ModuleName ["Development","Shake","Args"]`, + `ModuleName ["Development","Shake","ByteString"]`, + `ModuleName ["Development","Shake","Classes"]`, + `ModuleName ["Development","Shake","CmdOption"]`, + `ModuleName ["Development","Shake","Command"]`, + `ModuleName ["Development","Shake","Config"]`, + `ModuleName ["Development","Shake","Core"]`, + `ModuleName ["Development","Shake","Database"]`, + `ModuleName ["Development","Shake","Demo"]`, + `ModuleName ["Development","Shake","Derived"]`, + `ModuleName ["Development","Shake","Errors"]`, + `ModuleName ["Development","Shake","FileInfo"]`, + `ModuleName ["Development","Shake","FilePath"]`, + `ModuleName ["Development","Shake","FilePattern"]`, + `ModuleName ["Development","Shake","Forward"]`, + `ModuleName ["Development","Shake","Monad"]`, + `ModuleName ["Development","Shake","Pool"]`, + `ModuleName ["Development","Shake","Profile"]`, + `ModuleName ["Development","Shake","Progress"]`, + `ModuleName ["Development","Shake","Resource"]`, + `ModuleName ["Development","Shake","Rule"]`, + `ModuleName ["Development","Shake","Rules","Directory"]`, + `ModuleName ["Development","Shake","Rules","File"]`, + `ModuleName ["Development","Shake","Rules","Files"]`, + `ModuleName ["Development","Shake","Rules","Oracle"]`, + `ModuleName ["Development","Shake","Rules","OrderOnly"]`, + `ModuleName ["Development","Shake","Rules","Rerun"]`, + `ModuleName ["Development","Shake","Shake"]`, + `ModuleName ["Development","Shake","Special"]`, + `ModuleName ["Development","Shake","Storage"]`, + `ModuleName ["Development","Shake","Types"]`, + `ModuleName ["Development","Shake","Util"]`, + `ModuleName ["Development","Shake","Value"]`, + `ModuleName ["General","Bilist"]`, + `ModuleName ["General","Binary"]`, + `ModuleName ["General","Cleanup"]`, + `ModuleName ["General","Concurrent"]`, + `ModuleName ["General","Extra"]`, + `ModuleName ["General","FileLock"]`, + `ModuleName ["General","Intern"]`, + `ModuleName ["General","Process"]`, + `ModuleName ["General","String"]`, + `ModuleName ["General","Template"]`, + `ModuleName ["General","Timing"]`, + `ModuleName ["Paths_shake"]`, + `ModuleName ["Run"]`, + `ModuleName ["Test","Assume"]`, + `ModuleName ["Test","Basic"]`, + `ModuleName ["Test","Benchmark"]`, + `ModuleName ["Test","C"]`, + `ModuleName ["Test","Cache"]`, + `ModuleName ["Test","Command"]`, + `ModuleName ["Test","Config"]`, + `ModuleName ["Test","Digest"]`, + `ModuleName ["Test","Directory"]`, + `ModuleName ["Test","Docs"]`, + `ModuleName ["Test","Errors"]`, + `ModuleName ["Test","FileLock"]`, + `ModuleName ["Test","FilePath"]`, + `ModuleName ["Test","FilePattern"]`, + `ModuleName ["Test","Files"]`, + `ModuleName ["Test","Forward"]`, + `ModuleName ["Test","Journal"]`, + `ModuleName ["Test","Lint"]`, + `ModuleName ["Test","Live"]`, + `ModuleName ["Test","Makefile"]`, + `ModuleName ["Test","Manual"]`, + `ModuleName ["Test","Match"]`, + `ModuleName ["Test","Monad"]`, + `ModuleName ["Test","Ninja"]`, + `ModuleName ["Test","Oracle"]`, + `ModuleName ["Test","OrderOnly"]`, + `ModuleName ["Test","Parallel"]`, + `ModuleName ["Test","Pool"]`, + `ModuleName ["Test","Progress"]`, + `ModuleName ["Test","Random"]`, + `ModuleName ["Test","Resources"]`, + `ModuleName ["Test","Self"]`, + `ModuleName ["Test","Tar"]`, + `ModuleName ["Test","Tup"]`, + `ModuleName ["Test","Type"]`, + `ModuleName ["Test","Unicode"]`, + `ModuleName ["Test","Util"]`, + `ModuleName ["Test","Verbosity"]`, + `ModuleName ["Test","Version"]`], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (WildcardVersion + `mkVersion [4]`), + Dependency + `PackageName "directory"` + AnyVersion, + Dependency + `PackageName "hashable"` + (OrLaterVersion + `mkVersion [1,1,2,3]`), + Dependency + `PackageName "binary"` + AnyVersion, + Dependency + `PackageName "filepath"` + AnyVersion, + Dependency + `PackageName "process"` + (OrLaterVersion + `mkVersion [1,1]`), + Dependency + `PackageName "unordered-containers"` + (OrLaterVersion + `mkVersion [0,2,1]`), + Dependency + `PackageName "bytestring"` + AnyVersion, + Dependency + `PackageName "utf8-string"` + (OrLaterVersion + `mkVersion [0,3]`), + Dependency + `PackageName "time"` + AnyVersion, + Dependency + `PackageName "random"` + AnyVersion, + Dependency + `PackageName "js-jquery"` + AnyVersion, + Dependency + `PackageName "js-flot"` + AnyVersion, + Dependency + `PackageName "transformers"` + (OrLaterVersion + `mkVersion [0,2]`), + Dependency + `PackageName "deepseq"` + (OrLaterVersion + `mkVersion [1,1]`), + Dependency + `PackageName "extra"` + (OrLaterVersion + `mkVersion [1,4,8]`), + Dependency + `PackageName "QuickCheck"` + (OrLaterVersion + `mkVersion [2,0]`)], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "Test.hs", + testName = `UnqualComponentName ""`}}], + genPackageFlags = [MkFlag + {flagDefault = False, + flagDescription = "Obtain FileTime using portable functions", + flagManual = True, + flagName = `FlagName "portable"`}], + packageDescription = PackageDescription + {author = "Neil Mitchell ", + benchmarks = [], + bugReports = "https://github.com/ndmitchell/shake/issues", + buildTypeRaw = Just Simple, + category = "Development, Shake", + copyright = "Neil Mitchell 2011-2017", + customFieldsPD = [], + dataDir = "", + dataFiles = ["html/viz.js", + "html/profile.html", + "html/progress.html", + "html/shake.js", + "docs/manual/build.bat", + "docs/manual/Build.hs", + "docs/manual/build.sh", + "docs/manual/constants.c", + "docs/manual/constants.h", + "docs/manual/main.c"], + description = concat + ["Shake is a Haskell library for writing build systems - designed as a\n", + "replacement for @make@. See \"Development.Shake\" for an introduction,\n", + "including an example. Further examples are included in the Cabal tarball,\n", + "under the @Examples@ directory. The homepage contains links to a user\n", + "manual, an academic paper and further information:\n", + "\n", + "\n", + "To use Shake the user writes a Haskell program\n", + "that imports \"Development.Shake\", defines some build rules, and calls\n", + "the 'Development.Shake.shakeArgs' function. Thanks to do notation and infix\n", + "operators, a simple Shake build system\n", + "is not too dissimilar from a simple Makefile. However, as build systems\n", + "get more complex, Shake is able to take advantage of the excellent\n", + "abstraction facilities offered by Haskell and easily support much larger\n", + "projects. The Shake library provides all the standard features available in other\n", + "build systems, including automatic parallelism and minimal rebuilds.\n", + "Shake also provides more accurate dependency tracking, including seamless\n", + "support for generated files, and dependencies on system information\n", + "(e.g. compiler version)."], + executables = [], + extraDocFiles = ["CHANGES.txt", "README.md"], + extraSrcFiles = ["src/Test/C/constants.c", + "src/Test/C/constants.h", + "src/Test/C/main.c", + "src/Test/MakeTutor/Makefile", + "src/Test/MakeTutor/hellofunc.c", + "src/Test/MakeTutor/hellomake.c", + "src/Test/MakeTutor/hellomake.h", + "src/Test/Tar/list.txt", + "src/Test/Ninja/*.ninja", + "src/Test/Ninja/subdir/*.ninja", + "src/Test/Ninja/*.output", + "src/Test/Progress/*.prog", + "src/Test/Tup/hello.c", + "src/Test/Tup/root.cfg", + "src/Test/Tup/newmath/root.cfg", + "src/Test/Tup/newmath/square.c", + "src/Test/Tup/newmath/square.h", + "src/Paths.hs", + "docs/Manual.md", + "docs/shake-progress.png"], + extraTmpFiles = [], + foreignLibs = [], + homepage = "http://shakebuild.com", + library = Nothing, + licenseFiles = ["LICENSE"], + licenseRaw = Right BSD3, + maintainer = "Neil Mitchell ", + package = PackageIdentifier + {pkgName = `PackageName "shake"`, + pkgVersion = `mkVersion [0,15,11]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just + "https://github.com/ndmitchell/shake.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,18]`), + stability = "", + subLibraries = [], + synopsis = "Build system library, like Make, but more accurate dependencies.", + testSuites = [], + testedWith = [_×_ GHC (ThisVersion `mkVersion [8,0,1]`), + _×_ GHC (ThisVersion `mkVersion [7,10,3]`), + _×_ GHC (ThisVersion `mkVersion [7,8,4]`), + _×_ GHC (ThisVersion `mkVersion [7,6,3]`), + _×_ GHC (ThisVersion `mkVersion [7,4,2]`)]}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/shake.format 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,411 @@ +cabal-version: >=1.18 +name: shake +version: 0.15.11 +license: BSD3 +license-file: LICENSE +copyright: Neil Mitchell 2011-2017 +maintainer: Neil Mitchell +author: Neil Mitchell +tested-with: ghc ==8.0.1 ghc ==7.10.3 ghc ==7.8.4 ghc ==7.6.3 + ghc ==7.4.2 +homepage: http://shakebuild.com +bug-reports: https://github.com/ndmitchell/shake/issues +synopsis: Build system library, like Make, but more accurate dependencies. +description: + Shake is a Haskell library for writing build systems - designed as a + replacement for @make@. See "Development.Shake" for an introduction, + including an example. Further examples are included in the Cabal tarball, + under the @Examples@ directory. The homepage contains links to a user + manual, an academic paper and further information: + + . + To use Shake the user writes a Haskell program + that imports "Development.Shake", defines some build rules, and calls + the 'Development.Shake.shakeArgs' function. Thanks to do notation and infix + operators, a simple Shake build system + is not too dissimilar from a simple Makefile. However, as build systems + get more complex, Shake is able to take advantage of the excellent + abstraction facilities offered by Haskell and easily support much larger + projects. The Shake library provides all the standard features available in other + build systems, including automatic parallelism and minimal rebuilds. + Shake also provides more accurate dependency tracking, including seamless + support for generated files, and dependencies on system information + (e.g. compiler version). +category: Development, Shake +build-type: Simple +data-files: + html/viz.js + html/profile.html + html/progress.html + html/shake.js + docs/manual/build.bat + docs/manual/Build.hs + docs/manual/build.sh + docs/manual/constants.c + docs/manual/constants.h + docs/manual/main.c +extra-source-files: + src/Test/C/constants.c + src/Test/C/constants.h + src/Test/C/main.c + src/Test/MakeTutor/Makefile + src/Test/MakeTutor/hellofunc.c + src/Test/MakeTutor/hellomake.c + src/Test/MakeTutor/hellomake.h + src/Test/Tar/list.txt + src/Test/Ninja/*.ninja + src/Test/Ninja/subdir/*.ninja + src/Test/Ninja/*.output + src/Test/Progress/*.prog + src/Test/Tup/hello.c + src/Test/Tup/root.cfg + src/Test/Tup/newmath/root.cfg + src/Test/Tup/newmath/square.c + src/Test/Tup/newmath/square.h + src/Paths.hs + docs/Manual.md + docs/shake-progress.png +extra-doc-files: CHANGES.txt + README.md + +source-repository head + type: git + location: https://github.com/ndmitchell/shake.git + +flag portable + description: + Obtain FileTime using portable functions + default: False + manual: True + +library + exposed-modules: + Development.Shake + Development.Shake.Classes + Development.Shake.Command + Development.Shake.Config + Development.Shake.FilePath + Development.Shake.Forward + Development.Shake.Rule + Development.Shake.Util + hs-source-dirs: src + other-modules: + Development.Ninja.Env + Development.Ninja.Lexer + Development.Ninja.Parse + Development.Ninja.Type + Development.Shake.Args + Development.Shake.ByteString + Development.Shake.Core + Development.Shake.CmdOption + Development.Shake.Database + Development.Shake.Demo + Development.Shake.Derived + Development.Shake.Errors + Development.Shake.FileInfo + Development.Shake.FilePattern + Development.Shake.Monad + Development.Shake.Pool + Development.Shake.Profile + Development.Shake.Progress + Development.Shake.Resource + Development.Shake.Rules.Directory + Development.Shake.Rules.File + Development.Shake.Rules.Files + Development.Shake.Rules.Oracle + Development.Shake.Rules.OrderOnly + Development.Shake.Rules.Rerun + Development.Shake.Shake + Development.Shake.Special + Development.Shake.Storage + Development.Shake.Types + Development.Shake.Value + General.Bilist + General.Binary + General.Cleanup + General.Concurrent + General.Extra + General.FileLock + General.Intern + General.Process + General.String + General.Template + General.Timing + Paths_shake + default-language: Haskell2010 + build-depends: + base >=4.5, + directory -any, + hashable >=1.1.2.3, + binary -any, + filepath -any, + process >=1.1, + unordered-containers >=0.2.1, + bytestring -any, + utf8-string >=0.3, + time -any, + random -any, + js-jquery -any, + js-flot -any, + transformers >=0.2, + extra >=1.4.8, + deepseq >=1.1 + + if flag(portable) + cpp-options: -DPORTABLE + + if impl(ghc <7.6) + build-depends: + old-time -any + else + + if !os(windows) + build-depends: + unix >=2.5.1 + + if !os(windows) + build-depends: + unix -any + +executable shake + main-is: Run.hs + hs-source-dirs: src + other-modules: + Development.Make.All + Development.Make.Env + Development.Make.Parse + Development.Make.Rules + Development.Make.Type + Development.Ninja.All + Development.Ninja.Env + Development.Ninja.Lexer + Development.Ninja.Parse + Development.Ninja.Type + Development.Shake + Development.Shake.Args + Development.Shake.ByteString + Development.Shake.Classes + Development.Shake.CmdOption + Development.Shake.Command + Development.Shake.Core + Development.Shake.Database + Development.Shake.Demo + Development.Shake.Derived + Development.Shake.Errors + Development.Shake.FileInfo + Development.Shake.FilePath + Development.Shake.FilePattern + Development.Shake.Forward + Development.Shake.Monad + Development.Shake.Pool + Development.Shake.Profile + Development.Shake.Progress + Development.Shake.Resource + Development.Shake.Rule + Development.Shake.Rules.Directory + Development.Shake.Rules.File + Development.Shake.Rules.Files + Development.Shake.Rules.Oracle + Development.Shake.Rules.OrderOnly + Development.Shake.Rules.Rerun + Development.Shake.Shake + Development.Shake.Special + Development.Shake.Storage + Development.Shake.Types + Development.Shake.Value + General.Bilist + General.Binary + General.Cleanup + General.Concurrent + General.Extra + General.FileLock + General.Intern + General.Process + General.String + General.Template + General.Timing + Paths_shake + Run + default-language: Haskell2010 + ghc-options: -main-is Run.main -rtsopts + build-depends: + base ==4.*, + directory -any, + hashable >=1.1.2.3, + binary -any, + filepath -any, + process >=1.1, + unordered-containers >=0.2.1, + bytestring -any, + utf8-string >=0.3, + time -any, + random -any, + js-jquery -any, + js-flot -any, + transformers >=0.2, + extra >=1.4.8, + deepseq >=1.1, + primitive -any + + if impl(ghc >=7.8) + ghc-options: -threaded "-with-rtsopts=-I0 -qg -qb" + + if flag(portable) + cpp-options: -DPORTABLE + + if impl(ghc <7.6) + build-depends: + old-time -any + else + + if !os(windows) + build-depends: + unix >=2.5.1 + + if !os(windows) + build-depends: + unix -any + +test-suite shake-test + type: exitcode-stdio-1.0 + main-is: Test.hs + hs-source-dirs: src + other-modules: + Development.Make.All + Development.Make.Env + Development.Make.Parse + Development.Make.Rules + Development.Make.Type + Development.Ninja.All + Development.Ninja.Env + Development.Ninja.Lexer + Development.Ninja.Parse + Development.Ninja.Type + Development.Shake + Development.Shake.Args + Development.Shake.ByteString + Development.Shake.Classes + Development.Shake.CmdOption + Development.Shake.Command + Development.Shake.Config + Development.Shake.Core + Development.Shake.Database + Development.Shake.Demo + Development.Shake.Derived + Development.Shake.Errors + Development.Shake.FileInfo + Development.Shake.FilePath + Development.Shake.FilePattern + Development.Shake.Forward + Development.Shake.Monad + Development.Shake.Pool + Development.Shake.Profile + Development.Shake.Progress + Development.Shake.Resource + Development.Shake.Rule + Development.Shake.Rules.Directory + Development.Shake.Rules.File + Development.Shake.Rules.Files + Development.Shake.Rules.Oracle + Development.Shake.Rules.OrderOnly + Development.Shake.Rules.Rerun + Development.Shake.Shake + Development.Shake.Special + Development.Shake.Storage + Development.Shake.Types + Development.Shake.Util + Development.Shake.Value + General.Bilist + General.Binary + General.Cleanup + General.Concurrent + General.Extra + General.FileLock + General.Intern + General.Process + General.String + General.Template + General.Timing + Paths_shake + Run + Test.Assume + Test.Basic + Test.Benchmark + Test.C + Test.Cache + Test.Command + Test.Config + Test.Digest + Test.Directory + Test.Docs + Test.Errors + Test.FileLock + Test.FilePath + Test.FilePattern + Test.Files + Test.Forward + Test.Journal + Test.Lint + Test.Live + Test.Makefile + Test.Manual + Test.Match + Test.Monad + Test.Ninja + Test.Oracle + Test.OrderOnly + Test.Parallel + Test.Pool + Test.Progress + Test.Random + Test.Resources + Test.Self + Test.Tar + Test.Tup + Test.Type + Test.Unicode + Test.Util + Test.Verbosity + Test.Version + default-language: Haskell2010 + ghc-options: -main-is Test.main -rtsopts + build-depends: + base ==4.*, + directory -any, + hashable >=1.1.2.3, + binary -any, + filepath -any, + process >=1.1, + unordered-containers >=0.2.1, + bytestring -any, + utf8-string >=0.3, + time -any, + random -any, + js-jquery -any, + js-flot -any, + transformers >=0.2, + deepseq >=1.1, + extra >=1.4.8, + QuickCheck >=2.0 + + if impl(ghc >=7.6) + ghc-options: -with-rtsopts=-K1K + + if impl(ghc >=7.8) + ghc-options: -threaded + + if flag(portable) + cpp-options: -DPORTABLE + + if impl(ghc <7.6) + build-depends: + old-time -any + else + + if !os(windows) + build-depends: + unix >=2.5.1 + + if !os(windows) + build-depends: + unix -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.0 +name: spdx +version: 0 +synopsis: testing positive parsing of spdx identifiers +build-type: Simple +license: BSD3 + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.expr 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,92 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Right BSD3, + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "spdx"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [], + specVersionRaw = Left `mkVersion [2,0]`, + stability = "", + subLibraries = [], + synopsis = "testing positive parsing of spdx identifiers", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-1.format 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.0 +name: spdx +version: 0 +license: BSD3 +synopsis: testing positive parsing of spdx identifiers +build-type: Simple + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.2 +name: spdx +version: 0 +synopsis: testing positive parsing of spdx identifiers +build-type: Simple +license: AGPL-1.0 + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.expr 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,93 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left + (License (ELicense (ELicenseId AGPL_1_0) Nothing)), + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "spdx"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [], + specVersionRaw = Left `mkVersion [2,2]`, + stability = "", + subLibraries = [], + synopsis = "testing positive parsing of spdx identifiers", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-2.format 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.2 +name: spdx +version: 0 +license: AGPL-1.0 +synopsis: testing positive parsing of spdx identifiers +build-type: Simple + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.4 +name: spdx +version: 0 +synopsis: testing positive parsing of spdx identifiers +build-type: Simple +license: AGPL-1.0-only + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.expr 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,93 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = Library + {exposedModules = [], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = "", + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = [], + licenseRaw = Left + (License (ELicense (ELicenseId AGPL_1_0_only) Nothing)), + maintainer = "", + package = PackageIdentifier + {pkgName = `PackageName "spdx"`, + pkgVersion = `mkVersion [0]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [], + specVersionRaw = Left `mkVersion [2,4]`, + stability = "", + subLibraries = [], + synopsis = "testing positive parsing of spdx identifiers", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/spdx-3.format 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +cabal-version: 2.4 +name: spdx +version: 0 +license: AGPL-1.0-only +synopsis: testing positive parsing of spdx identifiers +build-type: Simple + +library + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,76 @@ +name: th-lift-instances +version: 0.1.4 +x-revision: 1 +license: BSD3 +cabal-version: >= 1.10 +license-file: LICENSE +author: Benno Fünfstück +maintainer: Benno Fünfstück +stability: experimental +homepage: http://github.com/bennofs/th-lift-instances/ +bug-reports: http://github.com/bennofs/th-lift-instances/issues +copyright: Copyright (C) 2013-2014 Benno Fünfstück +synopsis: Lift instances for template-haskell for common data types. +description: Most data types in haskell platform do not have Lift instances. This package provides orphan instances + for containers, text, bytestring and vector. +build-type: Custom +category: Template Haskell + +extra-source-files: + .ghci + .gitignore + .travis.yml + .vim.custom + README.md + +source-repository head + type: git + location: https://github.com/bennofs/th-lift-instances.git + +library + hs-source-dirs: src + default-language: Haskell2010 + ghc-options: -Wall -fwarn-tabs + build-depends: + base >= 4.4 && < 5 + , template-haskell < 2.10 + , th-lift + , containers >= 0.4 && < 0.6 + , vector >= 0.9 && < 0.11 + , text >= 0.11 && < 1.3 + , bytestring >= 0.9 && < 0.11 + exposed-modules: + Instances.TH.Lift + other-extensions: TemplateHaskell + +test-suite tests + type: exitcode-stdio-1.0 + main-is: Main.hs + other-modules: + Data + default-language: Haskell2010 + build-depends: + base + , template-haskell <2.10 + , containers >= 0.4 && < 0.6 + , vector >= 0.9 && < 0.11 + , text >= 0.11 && < 1.2 + , bytestring >= 0.9 && < 0.11 + , th-lift-instances + , QuickCheck >= 2.6 && < 2.8 + hs-source-dirs: tests + other-extensions: TemplateHaskell + +test-suite doctests + type: exitcode-stdio-1.0 + main-is: doctests.hs + default-language: Haskell2010 + build-depends: + base + , directory >= 1.0 + , doctest >= 0.9.1 + , filepath + ghc-options: -Wall -threaded + if impl(ghc<7.6.1) + ghc-options: -Werror + hs-source-dirs: tests diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.expr 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,431 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [4,4]`) + (EarlierVersion `mkVersion [5]`)), + Dependency + `PackageName "template-haskell"` + (EarlierVersion `mkVersion [2,10]`), + Dependency `PackageName "th-lift"` AnyVersion, + Dependency + `PackageName "containers"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,4]`) + (EarlierVersion `mkVersion [0,6]`)), + Dependency + `PackageName "vector"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,9]`) + (EarlierVersion `mkVersion [0,11]`)), + Dependency + `PackageName "text"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,11]`) + (EarlierVersion `mkVersion [1,3]`)), + Dependency + `PackageName "bytestring"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,9]`) + (EarlierVersion `mkVersion [0,11]`))], + condTreeData = Library + {exposedModules = [`ModuleName ["Instances","TH","Lift"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["src"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-Wall", "-fwarn-tabs"]], + otherExtensions = [EnableExtension + TemplateHaskell], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [4,4]`) + (EarlierVersion + `mkVersion [5]`)), + Dependency + `PackageName "template-haskell"` + (EarlierVersion + `mkVersion [2,10]`), + Dependency + `PackageName "th-lift"` + AnyVersion, + Dependency + `PackageName "containers"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,4]`) + (EarlierVersion + `mkVersion [0,6]`)), + Dependency + `PackageName "vector"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,9]`) + (EarlierVersion + `mkVersion [0,11]`)), + Dependency + `PackageName "text"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,11]`) + (EarlierVersion + `mkVersion [1,3]`)), + Dependency + `PackageName "bytestring"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,9]`) + (EarlierVersion + `mkVersion [0,11]`))], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [_×_ + `UnqualComponentName "tests"` + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency `PackageName "base"` AnyVersion, + Dependency + `PackageName "template-haskell"` + (EarlierVersion `mkVersion [2,10]`), + Dependency + `PackageName "containers"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,4]`) + (EarlierVersion `mkVersion [0,6]`)), + Dependency + `PackageName "vector"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,9]`) + (EarlierVersion `mkVersion [0,11]`)), + Dependency + `PackageName "text"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,11]`) + (EarlierVersion `mkVersion [1,2]`)), + Dependency + `PackageName "bytestring"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [0,9]`) + (EarlierVersion `mkVersion [0,11]`)), + Dependency + `PackageName "th-lift-instances"` AnyVersion, + Dependency + `PackageName "QuickCheck"` + (IntersectVersionRanges + (OrLaterVersion `mkVersion [2,6]`) + (EarlierVersion `mkVersion [2,8]`))], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["tests"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [EnableExtension + TemplateHaskell], + otherLanguages = [], + otherModules = [`ModuleName ["Data"]`], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + AnyVersion, + Dependency + `PackageName "template-haskell"` + (EarlierVersion + `mkVersion [2,10]`), + Dependency + `PackageName "containers"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,4]`) + (EarlierVersion + `mkVersion [0,6]`)), + Dependency + `PackageName "vector"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,9]`) + (EarlierVersion + `mkVersion [0,11]`)), + Dependency + `PackageName "text"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,11]`) + (EarlierVersion + `mkVersion [1,2]`)), + Dependency + `PackageName "bytestring"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [0,9]`) + (EarlierVersion + `mkVersion [0,11]`)), + Dependency + `PackageName "th-lift-instances"` + AnyVersion, + Dependency + `PackageName "QuickCheck"` + (IntersectVersionRanges + (OrLaterVersion + `mkVersion [2,6]`) + (EarlierVersion + `mkVersion [2,8]`))], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "Main.hs", + testName = `UnqualComponentName ""`}}, + _×_ + `UnqualComponentName "doctests"` + CondNode + {condTreeComponents = [CondBranch + {condBranchCondition = `Var (Impl GHC (EarlierVersion (mkVersion [7,6,1])))`, + condBranchIfFalse = Nothing, + condBranchIfTrue = CondNode + {condTreeComponents = [], + condTreeConstraints = [], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-Werror"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [], + virtualModules = []}, + testInterface = TestSuiteUnsupported + (TestTypeUnknown + "" + `mkVersion []`), + testName = `UnqualComponentName ""`}}}], + condTreeConstraints = [Dependency `PackageName "base"` AnyVersion, + Dependency + `PackageName "directory"` + (OrLaterVersion `mkVersion [1,0]`), + Dependency + `PackageName "doctest"` + (OrLaterVersion `mkVersion [0,9,1]`), + Dependency `PackageName "filepath"` AnyVersion], + condTreeData = TestSuite + {testBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Just Haskell2010, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["tests"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [_×_ + GHC + ["-Wall", "-threaded"]], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + AnyVersion, + Dependency + `PackageName "directory"` + (OrLaterVersion + `mkVersion [1,0]`), + Dependency + `PackageName "doctest"` + (OrLaterVersion + `mkVersion [0,9,1]`), + Dependency + `PackageName "filepath"` + AnyVersion], + virtualModules = []}, + testInterface = TestSuiteExeV10 + `mkVersion [1,0]` "doctests.hs", + testName = `UnqualComponentName ""`}}], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "Benno F\252nfst\252ck", + benchmarks = [], + bugReports = "http://github.com/bennofs/th-lift-instances/issues", + buildTypeRaw = Just Custom, + category = "Template Haskell", + copyright = "Copyright (C) 2013-2014 Benno F\252nfst\252ck", + customFieldsPD = [_×_ "x-revision" "1"], + dataDir = "", + dataFiles = [], + description = concat + ["Most data types in haskell platform do not have Lift instances. This package provides orphan instances\n", + "for containers, text, bytestring and vector."], + executables = [], + extraDocFiles = [], + extraSrcFiles = [".ghci", + ".gitignore", + ".travis.yml", + ".vim.custom", + "README.md"], + extraTmpFiles = [], + foreignLibs = [], + homepage = "http://github.com/bennofs/th-lift-instances/", + library = Nothing, + licenseFiles = ["LICENSE"], + licenseRaw = Right BSD3, + maintainer = "Benno F\252nfst\252ck ", + package = PackageIdentifier + {pkgName = `PackageName "th-lift-instances"`, + pkgVersion = `mkVersion [0,1,4]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just + "https://github.com/bennofs/th-lift-instances.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,10]`), + stability = "experimental", + subLibraries = [], + synopsis = "Lift instances for template-haskell for common data types.", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/th-lift-instances.format 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,78 @@ +th-lift-instances.cabal:15:9: Tabs used as indentation at 15:9 +cabal-version: >=1.10 +name: th-lift-instances +version: 0.1.4 +license: BSD3 +license-file: LICENSE +copyright: Copyright (C) 2013-2014 Benno Fünfstück +maintainer: Benno Fünfstück +author: Benno Fünfstück +stability: experimental +homepage: http://github.com/bennofs/th-lift-instances/ +bug-reports: http://github.com/bennofs/th-lift-instances/issues +synopsis: Lift instances for template-haskell for common data types. +description: + Most data types in haskell platform do not have Lift instances. This package provides orphan instances + for containers, text, bytestring and vector. +category: Template Haskell +x-revision: 1 +build-type: Custom +extra-source-files: + .ghci + .gitignore + .travis.yml + .vim.custom + README.md + +source-repository head + type: git + location: https://github.com/bennofs/th-lift-instances.git + +library + exposed-modules: + Instances.TH.Lift + hs-source-dirs: src + default-language: Haskell2010 + other-extensions: TemplateHaskell + ghc-options: -Wall -fwarn-tabs + build-depends: + base >=4.4 && <5, + template-haskell <2.10, + th-lift -any, + containers >=0.4 && <0.6, + vector >=0.9 && <0.11, + text >=0.11 && <1.3, + bytestring >=0.9 && <0.11 + +test-suite tests + type: exitcode-stdio-1.0 + main-is: Main.hs + hs-source-dirs: tests + other-modules: + Data + default-language: Haskell2010 + other-extensions: TemplateHaskell + build-depends: + base -any, + template-haskell <2.10, + containers >=0.4 && <0.6, + vector >=0.9 && <0.11, + text >=0.11 && <1.2, + bytestring >=0.9 && <0.11, + th-lift-instances -any, + QuickCheck >=2.6 && <2.8 + +test-suite doctests + type: exitcode-stdio-1.0 + main-is: doctests.hs + hs-source-dirs: tests + default-language: Haskell2010 + ghc-options: -Wall -threaded + build-depends: + base -any, + directory >=1.0, + doctest >=0.9.1, + filepath -any + + if impl(ghc <7.6.1) + ghc-options: -Werror diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,34 @@ +Name: wl-pprint-indef +Version: 1.2 +Cabal-Version: >=1.6 +Synopsis: The Wadler/Leijen Pretty Printer +Category: Text +Description: + This is a pretty printing library based on Wadler's paper "A Prettier + Printer". See the haddocks for full info. This version allows the + library user to declare overlapping instances of the 'Pretty' class. +License: BSD3 +License-file: LICENSE +Author: Daan Leijen +Maintainer: Noam Lewis +Build-Type: Simple + +Executable wl-pprint-string-example + Main-is: Main.hs + Hs-Source-Dirs: example-string + Other-Modules: StringImpl + Build-Depends: base < 5, + str-string >= 0.1.0.0, + wl-pprint-indef + Mixins: wl-pprint-indef requires (Text.PrettyPrint.Leijen.Str as StringImpl) + +Library + Exposed-Modules: Text.PrettyPrint.Leijen + Signatures: Text.PrettyPrint.Leijen.Str + Mixins: str-sig requires (Str as Text.PrettyPrint.Leijen.Str) + Build-Depends: base < 5, + str-sig >= 0.1.0.0 + +source-repository head + type: git + location: git@github.com:danidiaz/wl-pprint-indef.git diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.expr cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.expr --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.expr 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.expr 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,182 @@ +GenericPackageDescription + {condBenchmarks = [], + condExecutables = [_×_ + `UnqualComponentName "wl-pprint-string-example"` + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (EarlierVersion `mkVersion [5]`), + Dependency + `PackageName "str-string"` + (OrLaterVersion `mkVersion [0,1,0,0]`), + Dependency + `PackageName "wl-pprint-indef"` AnyVersion], + condTreeData = Executable + {buildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = ["example-string"], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [`ModuleName ["StringImpl"]`], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (EarlierVersion + `mkVersion [5]`), + Dependency + `PackageName "str-string"` + (OrLaterVersion + `mkVersion [0,1,0,0]`), + Dependency + `PackageName "wl-pprint-indef"` + AnyVersion], + virtualModules = []}, + exeName = `UnqualComponentName "wl-pprint-string-example"`, + exeScope = ExecutablePublic, + modulePath = "Main.hs"}}], + condForeignLibs = [], + condLibrary = Just + CondNode + {condTreeComponents = [], + condTreeConstraints = [Dependency + `PackageName "base"` + (EarlierVersion `mkVersion [5]`), + Dependency + `PackageName "str-sig"` + (OrLaterVersion `mkVersion [0,1,0,0]`)], + condTreeData = Library + {exposedModules = [`ModuleName ["Text","PrettyPrint","Leijen"]`], + libBuildInfo = BuildInfo + {asmOptions = [], + asmSources = [], + autogenModules = [], + buildToolDepends = [], + buildTools = [], + buildable = True, + cSources = [], + ccOptions = [], + cmmOptions = [], + cmmSources = [], + cppOptions = [], + customFieldsBI = [], + cxxOptions = [], + cxxSources = [], + defaultExtensions = [], + defaultLanguage = Nothing, + extraBundledLibs = [], + extraFrameworkDirs = [], + extraGHCiLibs = [], + extraLibDirs = [], + extraLibFlavours = [], + extraLibs = [], + frameworks = [], + hsSourceDirs = [], + includeDirs = [], + includes = [], + installIncludes = [], + jsSources = [], + ldOptions = [], + mixins = [], + oldExtensions = [], + options = [], + otherExtensions = [], + otherLanguages = [], + otherModules = [], + pkgconfigDepends = [], + profOptions = [], + sharedOptions = [], + staticOptions = [], + targetBuildDepends = [Dependency + `PackageName "base"` + (EarlierVersion + `mkVersion [5]`), + Dependency + `PackageName "str-sig"` + (OrLaterVersion + `mkVersion [0,1,0,0]`)], + virtualModules = []}, + libExposed = True, + libName = Nothing, + reexportedModules = [], + signatures = []}}, + condSubLibraries = [], + condTestSuites = [], + genPackageFlags = [], + packageDescription = PackageDescription + {author = "Daan Leijen", + benchmarks = [], + bugReports = "", + buildTypeRaw = Just Simple, + category = "Text", + copyright = "", + customFieldsPD = [], + dataDir = "", + dataFiles = [], + description = concat + ["This is a pretty printing library based on Wadler's paper \"A Prettier\n", + "Printer\". See the haddocks for full info. This version allows the\n", + "library user to declare overlapping instances of the 'Pretty' class."], + executables = [], + extraDocFiles = [], + extraSrcFiles = [], + extraTmpFiles = [], + foreignLibs = [], + homepage = "", + library = Nothing, + licenseFiles = ["LICENSE"], + licenseRaw = Right BSD3, + maintainer = "Noam Lewis ", + package = PackageIdentifier + {pkgName = `PackageName "wl-pprint-indef"`, + pkgVersion = `mkVersion [1,2]`}, + pkgUrl = "", + setupBuildInfo = Nothing, + sourceRepos = [SourceRepo + {repoBranch = Nothing, + repoKind = RepoHead, + repoLocation = Just + "git@github.com:danidiaz/wl-pprint-indef.git", + repoModule = Nothing, + repoSubdir = Nothing, + repoTag = Nothing, + repoType = Just Git}], + specVersionRaw = Right (OrLaterVersion `mkVersion [1,6]`), + stability = "", + subLibraries = [], + synopsis = "The Wadler/Leijen Pretty Printer", + testSuites = [], + testedWith = []}} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.format cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.format --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.format 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/regressions/wl-pprint-indef.format 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,38 @@ +wl-pprint-indef.cabal:28:3: The field "mixins" is available since Cabal [2,0] +wl-pprint-indef.cabal:27:3: The field "signatures" is available since Cabal [2,0] +wl-pprint-indef.cabal:23:3: The field "mixins" is available since Cabal [2,0] +cabal-version: >=1.6 +name: wl-pprint-indef +version: 1.2 +license: BSD3 +license-file: LICENSE +maintainer: Noam Lewis +author: Daan Leijen +synopsis: The Wadler/Leijen Pretty Printer +description: + This is a pretty printing library based on Wadler's paper "A Prettier + Printer". See the haddocks for full info. This version allows the + library user to declare overlapping instances of the 'Pretty' class. +category: Text +build-type: Simple + +source-repository head + type: git + location: git@github.com:danidiaz/wl-pprint-indef.git + +library + exposed-modules: + Text.PrettyPrint.Leijen + build-depends: + base <5, + str-sig >=0.1.0.0 + +executable wl-pprint-string-example + main-is: Main.hs + hs-source-dirs: example-string + other-modules: + StringImpl + build-depends: + base <5, + str-string >=0.1.0.0, + wl-pprint-indef -any diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/bom.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/bom.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/bom.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/bom.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,7 @@ +name: bom +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/bool.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/bool.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/bool.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/bool.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,12 @@ +name: bool +version: 1 +cabal-version: >= 1.6 + +flag foo + manual: true + +library + build-depends: base >= 4.9 && <4.10 + if flag(foo) + build-depends: containers + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/deprecatedfield.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/deprecatedfield.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/deprecatedfield.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/deprecatedfield.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,7 @@ +name: deprecatedfield +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dir: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/doubledash.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/doubledash.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/doubledash.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/doubledash.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +name: bool +version: 1 +cabal-version: >= 1.6 +extra-source-files: + README.md -- we include it + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/extratestmodule.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/extratestmodule.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/extratestmodule.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/extratestmodule.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,11 @@ +name: extramainis +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . + +test-suite tests + type: exitcode-stdio-1.0 + test-module: Tests diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/gluedop.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/gluedop.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/gluedop.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/gluedop.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +name: gluedop +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + if os(windows) &&!impl(ghc) + build-depends: containers + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/multiplesingular.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/multiplesingular.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/multiplesingular.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/multiplesingular.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,8 @@ +name: multiplesingular +name: multiplesingular2 +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/nbsp.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/nbsp.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/nbsp.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/nbsp.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,7 @@ +name: nbsp +version: 1 +cabal-version: >= 1.6 + +library +  build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/newsyntax.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/newsyntax.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/newsyntax.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/newsyntax.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,6 @@ +name: newsyntax +version: 1 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/oldsyntax.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/oldsyntax.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/oldsyntax.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/oldsyntax.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,6 @@ +name: oldsyntax +version: 1 +cabal-version: >= 1.6 + +build-depends: base >= 4.9 && <4.10 +hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/subsection.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/subsection.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/subsection.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/subsection.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +name: subsection +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . + iff os(windows) + build-depends: containers diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/tab.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/tab.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/tab.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/tab.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,12 @@ +name: tab +version: 1 +cabal-version: >= 1.6 + +library + build-depends: { base >= 4.9 && <4.10 } + hs-source-dirs: . + +test-suite tests { + type: exitcode-stdio-1.0 + main-is: Main.hs +} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/trailingfield.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/trailingfield.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/trailingfield.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/trailingfield.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,9 @@ +name: trailingfield +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . + +description: No fields after sections diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/unknownfield.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/unknownfield.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/unknownfield.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/unknownfield.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,8 @@ +name: unknownfield +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . + xfield: x diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/unknownsection.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/unknownsection.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/unknownsection.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/unknownsection.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,10 @@ +name: unknownsection +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . + +z + z-field: z diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/utf8.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/utf8.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/utf8.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/utf8.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,8 @@ +name: utf8 +author: Oleg Grönroos +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10 + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/versiontag.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/versiontag.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests/warnings/versiontag.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests/warnings/versiontag.cabal 2018-11-26 08:42:55.000000000 +0000 @@ -0,0 +1,7 @@ +name: versiontag +version: 1 +cabal-version: >= 1.6 + +library + build-depends: base >= 4.9 && <4.10-rc1 + hs-source-dirs: . diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/ParserTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/ParserTests.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,308 @@ +{-# LANGUAGE CPP #-} +module Main + ( main + ) where + +import Prelude () +import Prelude.Compat + +import Test.Tasty +import Test.Tasty.Golden.Advanced (goldenTest) +import Test.Tasty.HUnit + +import Control.Monad (void) +import Data.Algorithm.Diff (Diff (..), getGroupedDiff) +import Data.Maybe (isNothing) +import Distribution.PackageDescription (GenericPackageDescription) +import Distribution.PackageDescription.Parsec (parseGenericPackageDescription) +import Distribution.PackageDescription.PrettyPrint (showGenericPackageDescription) +import Distribution.Parsec.Common + (PWarnType (..), PWarning (..), showPError, showPWarning) +import Distribution.Parsec.ParseResult (runParseResult) +import Distribution.Utils.Generic (fromUTF8BS, toUTF8BS) +import System.FilePath (replaceExtension, ()) + +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BS8 + +import qualified Distribution.InstalledPackageInfo as IPI +import qualified Distribution.ParseUtils as ReadP + +#ifdef MIN_VERSION_tree_diff +import Data.TreeDiff (toExpr) +import Data.TreeDiff.Golden (ediffGolden) +import Instances.TreeDiff () +#endif + +tests :: TestTree +tests = testGroup "parsec tests" + [ regressionTests + , warningTests + , errorTests + , ipiTests + ] + +------------------------------------------------------------------------------- +-- Warnings +------------------------------------------------------------------------------- + +-- Verify that we trigger warnings +warningTests :: TestTree +warningTests = testGroup "warnings triggered" + [ warningTest PWTLexBOM "bom.cabal" + , warningTest PWTLexNBSP "nbsp.cabal" + , warningTest PWTLexTab "tab.cabal" + , warningTest PWTUTF "utf8.cabal" + , warningTest PWTBoolCase "bool.cabal" + , warningTest PWTVersionTag "versiontag.cabal" + , warningTest PWTNewSyntax "newsyntax.cabal" + , warningTest PWTOldSyntax "oldsyntax.cabal" + , warningTest PWTDeprecatedField "deprecatedfield.cabal" + , warningTest PWTInvalidSubsection "subsection.cabal" + , warningTest PWTUnknownField "unknownfield.cabal" + , warningTest PWTUnknownSection "unknownsection.cabal" + , warningTest PWTTrailingFields "trailingfield.cabal" + , warningTest PWTDoubleDash "doubledash.cabal" + , warningTest PWTMultipleSingularField "multiplesingular.cabal" + -- TODO: not implemented yet + -- , warningTest PWTExtraTestModule "extratestmodule.cabal" + ] + +warningTest :: PWarnType -> FilePath -> TestTree +warningTest wt fp = testCase (show wt) $ do + contents <- BS.readFile $ "tests" "ParserTests" "warnings" fp + + let res = parseGenericPackageDescription contents + let (warns, x) = runParseResult res + + assertBool ("should parse successfully: " ++ show x) $ isRight x + + case warns of + [PWarning wt' _ _] -> assertEqual "warning type" wt wt' + [] -> assertFailure "got no warnings" + _ -> assertFailure $ "got multiple warnings: " ++ show warns + where + isRight (Right _) = True + isRight _ = False + +------------------------------------------------------------------------------- +-- Errors +------------------------------------------------------------------------------- + +errorTests :: TestTree +errorTests = testGroup "errors" + [ errorTest "common1.cabal" + , errorTest "common2.cabal" + , errorTest "common3.cabal" + , errorTest "leading-comma.cabal" + , errorTest "range-ge-wild.cabal" + , errorTest "forward-compat.cabal" + , errorTest "forward-compat2.cabal" + , errorTest "forward-compat3.cabal" + , errorTest "issue-5055.cabal" + , errorTest "issue-5055-2.cabal" + , errorTest "noVersion.cabal" + , errorTest "noVersion2.cabal" + , errorTest "spdx-1.cabal" + , errorTest "spdx-2.cabal" + , errorTest "spdx-3.cabal" + ] + +errorTest :: FilePath -> TestTree +errorTest fp = cabalGoldenTest fp correct $ do + contents <- BS.readFile input + let res = parseGenericPackageDescription contents + let (_, x) = runParseResult res + + return $ toUTF8BS $ case x of + Right gpd -> + "UNXPECTED SUCCESS\n" ++ + showGenericPackageDescription gpd + Left (v, errs) -> + unlines $ ("VERSION: " ++ show v) : map (showPError fp) errs + where + input = "tests" "ParserTests" "errors" fp + correct = replaceExtension input "errors" + +------------------------------------------------------------------------------- +-- Regressions +------------------------------------------------------------------------------- + +regressionTests :: TestTree +regressionTests = testGroup "regressions" + [ regressionTest "encoding-0.8.cabal" + , regressionTest "Octree-0.5.cabal" + , regressionTest "nothing-unicode.cabal" + , regressionTest "issue-774.cabal" + , regressionTest "generics-sop.cabal" + , regressionTest "elif.cabal" + , regressionTest "elif2.cabal" + , regressionTest "shake.cabal" + , regressionTest "common.cabal" + , regressionTest "common2.cabal" + , regressionTest "leading-comma.cabal" + , regressionTest "wl-pprint-indef.cabal" + , regressionTest "th-lift-instances.cabal" + , regressionTest "issue-5055.cabal" + , regressionTest "noVersion.cabal" + , regressionTest "spdx-1.cabal" + , regressionTest "spdx-2.cabal" + , regressionTest "spdx-3.cabal" + ] + +regressionTest :: FilePath -> TestTree +regressionTest fp = testGroup fp + [ formatGoldenTest fp + , formatRoundTripTest fp +#ifdef MIN_VERSION_tree_diff + , treeDiffGoldenTest fp +#endif + ] + +formatGoldenTest :: FilePath -> TestTree +formatGoldenTest fp = cabalGoldenTest "format" correct $ do + contents <- BS.readFile input + let res = parseGenericPackageDescription contents + let (warns, x) = runParseResult res + + return $ toUTF8BS $ case x of + Right gpd -> + unlines (map (showPWarning fp) warns) + ++ showGenericPackageDescription gpd + Left (_, errs) -> + unlines $ "ERROR" : map (showPError fp) errs + where + input = "tests" "ParserTests" "regressions" fp + correct = replaceExtension input "format" + +#ifdef MIN_VERSION_tree_diff +treeDiffGoldenTest :: FilePath -> TestTree +treeDiffGoldenTest fp = ediffGolden goldenTest "expr" exprFile $ do + contents <- BS.readFile input + let res = parseGenericPackageDescription contents + let (_, x) = runParseResult res + case x of + Right gpd -> pure (toExpr gpd) + Left (_, errs) -> fail $ unlines $ "ERROR" : map (showPError fp) errs + where + input = "tests" "ParserTests" "regressions" fp + exprFile = replaceExtension input "expr" +#endif + +formatRoundTripTest :: FilePath -> TestTree +formatRoundTripTest fp = testCase "roundtrip" $ do + contents <- BS.readFile input + x <- parse contents + let contents' = showGenericPackageDescription x + y <- parse (toUTF8BS contents') + -- previously we mangled licenses a bit + let y' = y + assertEqual "re-parsed doesn't match" x y' + where + parse :: BS.ByteString -> IO GenericPackageDescription + parse c = do + let (_, x') = runParseResult $ parseGenericPackageDescription c + case x' of + Right gpd -> pure gpd + Left (_, errs) -> do + void $ assertFailure $ unlines (map (showPError fp) errs) + fail "failure" + input = "tests" "ParserTests" "regressions" fp + +------------------------------------------------------------------------------- +-- InstalledPackageInfo regressions +------------------------------------------------------------------------------- + +ipiTests :: TestTree +ipiTests = testGroup "ipis" + [ ipiTest "transformers.cabal" + , ipiTest "Includes2.cabal" + , ipiTest "issue-2276-ghc-9885.cabal" + , ipiTest "internal-preprocessor-test.cabal" + ] + +ipiTest :: FilePath -> TestTree +ipiTest fp = testGroup fp $ +#ifdef MIN_VERSION_tree_diff + [ ipiTreeDiffGoldenTest fp ] ++ +#endif + [ ipiFormatGoldenTest fp + , ipiFormatRoundTripTest fp + ] + +ipiFormatGoldenTest :: FilePath -> TestTree +ipiFormatGoldenTest fp = cabalGoldenTest "format" correct $ do + contents <- readFile input + let res = IPI.parseInstalledPackageInfo contents + return $ toUTF8BS $ case res of + ReadP.ParseFailed err -> "ERROR " ++ show err + ReadP.ParseOk ws ipi -> + unlines (map (ReadP.showPWarning fp) ws) + ++ IPI.showInstalledPackageInfo ipi + where + input = "tests" "ParserTests" "ipi" fp + correct = replaceExtension input "format" + +#ifdef MIN_VERSION_tree_diff +ipiTreeDiffGoldenTest :: FilePath -> TestTree +ipiTreeDiffGoldenTest fp = ediffGolden goldenTest "expr" exprFile $ do + contents <- readFile input + let res = IPI.parseInstalledPackageInfo contents + case res of + ReadP.ParseFailed err -> fail $ "ERROR " ++ show err + ReadP.ParseOk _ws ipi -> pure (toExpr ipi) + where + input = "tests" "ParserTests" "ipi" fp + exprFile = replaceExtension input "expr" +#endif + +ipiFormatRoundTripTest :: FilePath -> TestTree +ipiFormatRoundTripTest fp = testCase "roundtrip" $ do + contents <- readFile input + x <- parse contents + let contents' = IPI.showInstalledPackageInfo x + y <- parse contents' + + -- ghc-pkg prints pkgroot itself, based on cli arguments! + let x' = x { IPI.pkgRoot = Nothing } + let y' = y + assertBool "pkgRoot isn't shown" (isNothing (IPI.pkgRoot y)) + assertEqual "re-parsed doesn't match" x' y' + + -- Complete round-trip + let contents2 = IPI.showFullInstalledPackageInfo x + z <- parse contents2 + assertEqual "re-parsed doesn't match" x z + + where + parse :: String -> IO IPI.InstalledPackageInfo + parse c = do + case IPI.parseInstalledPackageInfo c of + ReadP.ParseOk _ ipi -> return ipi + ReadP.ParseFailed err -> do + void $ assertFailure $ show err + fail "failure" + input = "tests" "ParserTests" "ipi" fp + +------------------------------------------------------------------------------- +-- Main +------------------------------------------------------------------------------- + +main :: IO () +main = defaultMain tests + +cabalGoldenTest :: TestName -> FilePath -> IO BS.ByteString -> TestTree +cabalGoldenTest name ref act = goldenTest name (BS.readFile ref) act cmp upd + where + upd = BS.writeFile ref + cmp x y | x == y = return Nothing + cmp x y = return $ Just $ unlines $ + concatMap f (getGroupedDiff (BS8.lines x) (BS8.lines y)) + where + f (First xs) = map (cons3 '-' . fromUTF8BS) xs + f (Second ys) = map (cons3 '+' . fromUTF8BS) ys + -- we print unchanged lines too. It shouldn't be a problem while we have + -- reasonably small examples + f (Both xs _) = map (cons3 ' ' . fromUTF8BS) xs + -- we add three characters, so the changed lines are easier to spot + cons3 c cs = c : c : c : ' ' : cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/README.md 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,5 @@ +Unit tests +========== + +Ordinary unit tests. If you're looking for the package tests, +they live in cabal-testsuite now. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Test/Laws.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Test/Laws.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Test/Laws.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Test/Laws.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,79 @@ +{-# OPTIONS_GHC -fno-warn-missing-signatures #-} +module Test.Laws where + +import Prelude hiding (Num((+), (*))) +import Data.Monoid (Monoid(..), Endo(..)) +import qualified Data.Foldable as Foldable + +idempotent_unary f x = f fx == fx where fx = f x + +-- Basic laws on binary operators + +idempotent_binary (+) x = x + x == x + +commutative (+) x y = x + y == y + x + +associative (+) x y z = (x + y) + z == x + (y + z) + +distributive_left (*) (+) x y z = x * (y + z) == (x * y) + (x * z) + +distributive_right (*) (+) x y z = (y + z) * x == (y * x) + (z * x) + + +-- | The first 'fmap' law +-- +-- > fmap id == id +-- +fmap_1 :: (Eq (f a), Functor f) => f a -> Bool +fmap_1 x = fmap id x == x + +-- | The second 'fmap' law +-- +-- > fmap (f . g) == fmap f . fmap g +-- +fmap_2 :: (Eq (f c), Functor f) => (b -> c) -> (a -> b) -> f a -> Bool +fmap_2 f g x = fmap (f . g) x == (fmap f . fmap g) x + + +-- | The monoid identity law, 'mempty' is a left and right identity of +-- 'mappend': +-- +-- > mempty `mappend` x = x +-- > x `mappend` mempty = x +-- +monoid_1 :: (Eq a, Data.Monoid.Monoid a) => a -> Bool +monoid_1 x = mempty `mappend` x == x + && x `mappend` mempty == x + +-- | The monoid associativity law, 'mappend' must be associative. +-- +-- > (x `mappend` y) `mappend` z = x `mappend` (y `mappend` z) +-- +monoid_2 :: (Eq a, Data.Monoid.Monoid a) => a -> a -> a -> Bool +monoid_2 x y z = (x `mappend` y) `mappend` z + == x `mappend` (y `mappend` z) + +-- | The 'mconcat' definition. It can be overidden for the sake of effeciency +-- but it must still satisfy the property given by the default definition: +-- +-- > mconcat = foldr mappend mempty +-- +monoid_3 :: (Eq a, Data.Monoid.Monoid a) => [a] -> Bool +monoid_3 xs = mconcat xs == foldr mappend mempty xs + + +-- | First 'Foldable' law +-- +-- > Foldable.fold = Foldable.foldr mappend mempty +-- +foldable_1 :: (Foldable.Foldable t, Monoid m, Eq m) => t m -> Bool +foldable_1 x = Foldable.fold x == Foldable.foldr mappend mempty x + +-- | Second 'Foldable' law +-- +-- > foldr f z t = appEndo (foldMap (Endo . f) t) z +-- +foldable_2 :: (Foldable.Foldable t, Eq b) + => (a -> b -> b) -> b -> t a -> Bool +foldable_2 f z t = Foldable.foldr f z t + == appEndo (Foldable.foldMap (Endo . f) t) z diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Test/QuickCheck/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Test/QuickCheck/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/Test/QuickCheck/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/Test/QuickCheck/Utils.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,29 @@ +module Test.QuickCheck.Utils where + +import Test.QuickCheck.Gen + + +-- | Adjust the size of the generated value. +-- +-- In general the size gets bigger and bigger linearly. For some types +-- it is not appropriate to generate ever bigger values but instead +-- to generate lots of intermediate sized values. You could do that using: +-- +-- > adjustSize (\n -> min n 5) +-- +-- Similarly, for some types the linear size growth may mean getting too big +-- too quickly relative to other values. So you may want to adjust how +-- quickly the size grows. For example dividing by a constant, or even +-- something like the integer square root or log. +-- +-- > adjustSize (\n -> n `div` 2) +-- +-- Putting this together we can make for example a relatively short list: +-- +-- > adjustSize (\n -> min 5 (n `div` 3)) (listOf1 arbitrary) +-- +-- Not only do we put a limit on the length but we also scale the growth to +-- prevent it from hitting the maximum size quite so early. +-- +adjustSize :: (Int -> Int) -> Gen a -> Gen a +adjustSize adjust gen = sized (\n -> resize (adjust n) gen) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/CreatePipe.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,19 @@ +module UnitTests.Distribution.Compat.CreatePipe (tests) where + +import Distribution.Compat.CreatePipe +import System.IO (hClose, hGetContents, hPutStr, hSetEncoding, localeEncoding) +import Test.Tasty +import Test.Tasty.HUnit + +tests :: [TestTree] +tests = [testCase "Locale Encoding" case_Locale_Encoding] + +case_Locale_Encoding :: Assertion +case_Locale_Encoding = do + let str = "\0252" + (r, w) <- createPipe + hSetEncoding w localeEncoding + out <- hGetContents r + hPutStr w str + hClose w + assertEqual "createPipe should support Unicode roundtripping" str out diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/Graph.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/Graph.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/Graph.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/Graph.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,91 @@ +{-# LANGUAGE PatternGuards #-} +{-# LANGUAGE FlexibleInstances #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +module UnitTests.Distribution.Compat.Graph + ( tests + , arbitraryGraph + ) where + +import Distribution.Compat.Graph + +import qualified Prelude +import Prelude hiding (null) +import Test.Tasty +import Test.Tasty.QuickCheck +import qualified Data.Set as Set +import Control.Monad +import qualified Data.Graph as G +import Data.Array ((!)) +import Data.Maybe +import Data.List (sort) + +tests :: [TestTree] +tests = + [ testProperty "arbitrary unbroken" (prop_arbitrary_unbroken :: Graph (Node Int ()) -> Bool) + , testProperty "nodes consistent" (prop_nodes_consistent :: Graph (Node Int ()) -> Bool) + , testProperty "edges consistent" (prop_edges_consistent :: Graph (Node Int ()) -> Property) + , testProperty "closure consistent" (prop_closure_consistent :: Graph (Node Int ()) -> Property) + ] + +-- Our arbitrary instance does not generate broken graphs +prop_arbitrary_unbroken :: Graph a -> Bool +prop_arbitrary_unbroken g = Prelude.null (broken g) + +-- Every node from 'toList' maps to a vertex which +-- is present in the constructed graph, and maps back +-- to a node correctly. +prop_nodes_consistent :: (Eq a, IsNode a) => Graph a -> Bool +prop_nodes_consistent g = all p (toList g) + where + (_, vtn, ktv) = toGraph g + p n = case ktv (nodeKey n) of + Just v -> vtn v == n + Nothing -> False + +-- A non-broken graph has the 'nodeNeighbors' of each node +-- equal the recorded adjacent edges in the node graph. +prop_edges_consistent :: IsNode a => Graph a -> Property +prop_edges_consistent g = Prelude.null (broken g) ==> all p (toList g) + where + (gr, vtn, ktv) = toGraph g + p n = sort (nodeNeighbors n) + == sort (map (nodeKey . vtn) (gr ! fromJust (ktv (nodeKey n)))) + +-- Closure is consistent with reachable +prop_closure_consistent :: (Show a, IsNode a) => Graph a -> Property +prop_closure_consistent g = + not (null g) ==> + forAll (elements (toList g)) $ \n -> + Set.fromList (map nodeKey (fromJust (closure g [nodeKey n]))) + == Set.fromList (map (nodeKey . vtn) (G.reachable gr (fromJust (ktv (nodeKey n))))) + where + (gr, vtn, ktv) = toGraph g + +hasNoDups :: Ord a => [a] -> Bool +hasNoDups = loop Set.empty + where + loop _ [] = True + loop s (x:xs) | s' <- Set.insert x s, Set.size s' > Set.size s + = loop s' xs + | otherwise + = False + +-- | Produces a graph of size @len@. We sample with 'suchThat'; if we +-- dropped duplicate entries our size could be smaller. +arbitraryGraph :: (Ord k, Show k, Arbitrary k, Arbitrary a) + => Int -> Gen (Graph (Node k a)) +arbitraryGraph len = do + -- Careful! Assume k is much larger than size. + ks <- vectorOf len arbitrary `suchThat` hasNoDups + ns <- forM ks $ \k -> do + a <- arbitrary + ns <- listOf (elements ks) + -- Allow duplicates! + return (N a k ns) + return (fromDistinctList ns) + +instance (Ord k, Show k, Arbitrary k, Arbitrary a) + => Arbitrary (Graph (Node k a)) where + arbitrary = sized $ \n -> do + len <- choose (0, n) + arbitraryGraph len diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/ReadP.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/ReadP.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/ReadP.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/ReadP.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,153 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Compat.ReadP +-- Copyright : (c) The University of Glasgow 2002 +-- License : BSD-style (see the file libraries/base/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Portability : portable +-- +-- This code was originally in Distribution.Compat.ReadP. Please see that file +-- for provenance. The tests have been integrated into the test framework. +-- Some properties cannot be tested, as they hold over arbitrary ReadP values, +-- and we don't have a good Arbitrary instance (nor Show instance) for ReadP. +-- +module UnitTests.Distribution.Compat.ReadP + ( tests + -- * Properties + -- $properties + ) where + +import Data.List +import Distribution.Compat.ReadP +import Test.Tasty +import Test.Tasty.QuickCheck + +tests :: [TestTree] +tests = + [ testProperty "Get Nil" prop_Get_Nil + , testProperty "Get Cons" prop_Get_Cons + , testProperty "Look" prop_Look + , testProperty "Fail" prop_Fail + , testProperty "Return" prop_Return + --, testProperty "Bind" prop_Bind + --, testProperty "Plus" prop_Plus + --, testProperty "LeftPlus" prop_LeftPlus + --, testProperty "Gather" prop_Gather + , testProperty "String Yes" prop_String_Yes + , testProperty "String Maybe" prop_String_Maybe + , testProperty "Munch" (prop_Munch evenChar) + , testProperty "Munch1" (prop_Munch1 evenChar) + --, testProperty "Choice" prop_Choice + --, testProperty "ReadS" prop_ReadS + ] + +-- --------------------------------------------------------------------------- +-- QuickCheck properties that hold for the combinators + +{- $properties +The following are QuickCheck specifications of what the combinators do. +These can be seen as formal specifications of the behavior of the +combinators. + +We use bags to give semantics to the combinators. +-} + +type Bag a = [a] + +-- Equality on bags does not care about the order of elements. + +(=~) :: Ord a => Bag a -> Bag a -> Bool +xs =~ ys = sort xs == sort ys + +-- A special equality operator to avoid unresolved overloading +-- when testing the properties. + +(=~.) :: Bag (Int,String) -> Bag (Int,String) -> Bool +(=~.) = (=~) + +-- Here follow the properties: + +prop_Get_Nil :: Bool +prop_Get_Nil = + readP_to_S get [] =~ [] + +prop_Get_Cons :: Char -> [Char] -> Bool +prop_Get_Cons c s = + readP_to_S get (c:s) =~ [(c,s)] + +prop_Look :: String -> Bool +prop_Look s = + readP_to_S look s =~ [(s,s)] + +prop_Fail :: String -> Bool +prop_Fail s = + readP_to_S pfail s =~. [] + +prop_Return :: Int -> String -> Bool +prop_Return x s = + readP_to_S (return x) s =~. [(x,s)] + +{- +prop_Bind p k s = + readP_to_S (p >>= k) s =~. + [ ys'' + | (x,s') <- readP_to_S p s + , ys'' <- readP_to_S (k (x::Int)) s' + ] + +prop_Plus :: ReadP Int Int -> ReadP Int Int -> String -> Bool +prop_Plus p q s = + readP_to_S (p +++ q) s =~. + (readP_to_S p s ++ readP_to_S q s) + +prop_LeftPlus :: ReadP Int Int -> ReadP Int Int -> String -> Bool +prop_LeftPlus p q s = + readP_to_S (p <++ q) s =~. + (readP_to_S p s +<+ readP_to_S q s) + where + [] +<+ ys = ys + xs +<+ _ = xs + +prop_Gather s = + forAll readPWithoutReadS $ \p -> + readP_to_S (gather p) s =~ + [ ((pre,x::Int),s') + | (x,s') <- readP_to_S p s + , let pre = take (length s - length s') s + ] +-} + +prop_String_Yes :: String -> [Char] -> Bool +prop_String_Yes this s = + readP_to_S (string this) (this ++ s) =~ + [(this,s)] + +prop_String_Maybe :: String -> String -> Bool +prop_String_Maybe this s = + readP_to_S (string this) s =~ + [(this, drop (length this) s) | this `isPrefixOf` s] + +prop_Munch :: (Char -> Bool) -> String -> Bool +prop_Munch p s = + readP_to_S (munch p) s =~ + [(takeWhile p s, dropWhile p s)] + +prop_Munch1 :: (Char -> Bool) -> String -> Bool +prop_Munch1 p s = + readP_to_S (munch1 p) s =~ + [(res,s') | let (res,s') = (takeWhile p s, dropWhile p s), not (null res)] + +{- +prop_Choice :: [ReadP Int Int] -> String -> Bool +prop_Choice ps s = + readP_to_S (choice ps) s =~. + readP_to_S (foldr (+++) pfail ps) s + +prop_ReadS :: ReadS Int -> String -> Bool +prop_ReadS r s = + readP_to_S (readS_to_P r) s =~. r s +-} + +evenChar :: Char -> Bool +evenChar = even . fromEnum diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/Time.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/Time.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/Time.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Compat/Time.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,49 @@ +module UnitTests.Distribution.Compat.Time (tests) where + +import Control.Concurrent (threadDelay) +import System.FilePath + +import Distribution.Simple.Utils (withTempDirectory) +import Distribution.Verbosity + +import Distribution.Compat.Time + +import Test.Tasty +import Test.Tasty.HUnit + +tests :: Int -> [TestTree] +tests mtimeChange = + [ testCase "getModTime has sub-second resolution" $ getModTimeTest mtimeChange + , testCase "getCurTime works as expected" $ getCurTimeTest mtimeChange + ] + +getModTimeTest :: Int -> Assertion +getModTimeTest mtimeChange = + withTempDirectory silent "." "getmodtime-" $ \dir -> do + let fileName = dir "foo" + writeFile fileName "bar" + t0 <- getModTime fileName + threadDelay mtimeChange + writeFile fileName "baz" + t1 <- getModTime fileName + assertBool "expected different file mtimes" (t1 > t0) + + +getCurTimeTest :: Int -> Assertion +getCurTimeTest mtimeChange = + withTempDirectory silent "." "getmodtime-" $ \dir -> do + let fileName = dir "foo" + writeFile fileName "bar" + t0 <- getModTime fileName + threadDelay mtimeChange + t1 <- getCurTime + assertBool("expected file mtime (" ++ show t0 + ++ ") to be earlier than current time (" ++ show t1 ++ ")") + (t0 < t1) + + threadDelay mtimeChange + writeFile fileName "baz" + t2 <- getModTime fileName + assertBool ("expected current time (" ++ show t1 + ++ ") to be earlier than file mtime (" ++ show t2 ++ ")") + (t1 < t2) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Glob.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Glob.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Glob.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Glob.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,164 @@ +module UnitTests.Distribution.Simple.Glob + ( tests + ) where + +import Control.Monad +import Data.Foldable (for_) +import Data.Function (on) +import Data.List (sort) +import Data.Maybe (mapMaybe) +import Distribution.Simple.Glob +import qualified Distribution.Verbosity as Verbosity +import Distribution.Version +import System.Directory (createDirectoryIfMissing) +import System.FilePath ((), splitFileName, normalise) +import System.IO.Temp (withSystemTempDirectory) +import Test.Tasty +import Test.Tasty.HUnit + +sampleFileNames :: [FilePath] +sampleFileNames = + [ "a" + , "a.html" + , "b.html" + , "b.html.gz" + , "foo/.blah.html" + , "foo/.html" + , "foo/a" + , "foo/a.html" + , "foo/a.html.gz" + , "foo/a.tex" + , "foo/a.tex.gz" + , "foo/b.html" + , "foo/b.html.gz" + , "foo/x.gz" + , "foo/bar/.html" + , "foo/bar/a.html" + , "foo/bar/a.html.gz" + , "foo/bar/a.tex" + , "foo/bar/a.tex.gz" + , "foo/bar/b.html" + , "foo/bar/b.html.gz" + , "foo/c.html/blah" + , "xyz/foo/a.html" + ] + +makeSampleFiles :: FilePath -> IO () +makeSampleFiles dir = for_ sampleFileNames $ \filename -> do + let (dir', name) = splitFileName filename + createDirectoryIfMissing True (dir dir') + writeFile (dir dir' name) $ "This is " ++ filename + +compatibilityTests :: Version -> [TestTree] +compatibilityTests version = + [ testCase "literal match" $ + testMatches "foo/a" [GlobMatch "foo/a"] + , testCase "literal no match on prefix" $ + testMatches "foo/c.html" [] + , testCase "literal no match on suffix" $ + testMatches "foo/a.html" [GlobMatch "foo/a.html"] + , testCase "literal no prefix" $ + testMatches "a" [GlobMatch "a"] + , testCase "literal multiple prefix" $ + testMatches "foo/bar/a.html" [GlobMatch "foo/bar/a.html"] + , testCase "glob" $ + testMatches "*.html" [GlobMatch "a.html", GlobMatch "b.html"] + , testCase "glob in subdir" $ + testMatches "foo/*.html" [GlobMatch "foo/a.html", GlobMatch "foo/b.html"] + , testCase "glob multiple extensions" $ + testMatches "foo/*.html.gz" [GlobMatch "foo/a.html.gz", GlobMatch "foo/b.html.gz"] + , testCase "glob in deep subdir" $ + testMatches "foo/bar/*.tex" [GlobMatch "foo/bar/a.tex"] + , testCase "star in directory" $ + testFailParse "blah/*/foo" StarInDirectory + , testCase "star plus text in segment" $ + testFailParse "xyz*/foo" StarInDirectory + , testCase "star in filename plus text" $ + testFailParse "foo*.bar" StarInFileName + , testCase "no extension on star" $ + testFailParse "foo/*" NoExtensionOnStar + , testCase "star in extension" $ + testFailParse "foo.*.gz" StarInExtension + ] + where + testMatches = testMatchesVersion version + testFailParse = testFailParseVersion version + +-- For efficiency reasons, matchDirFileGlob isn't a simple call to +-- getDirectoryContentsRecursive and then a filter with +-- fileGlobMatches. So test both that naive approach and the actual +-- approach to make sure they are both correct. +-- +-- TODO: Work out how to construct the sample tree once for all tests, +-- rather than once for each test. +testMatchesVersion :: Version -> FilePath -> [GlobResult FilePath] -> Assertion +testMatchesVersion version pat expected = do + globPat <- case parseFileGlob version pat of + Left _ -> assertFailure "Couldn't compile the pattern." + Right globPat -> return globPat + checkPure globPat + checkIO globPat + where + isEqual = (==) `on` (sort . fmap (fmap normalise)) + checkPure globPat = do + let actual = mapMaybe (fileGlobMatches globPat) sampleFileNames + unless (sort expected == sort actual) $ + assertFailure $ "Unexpected result (pure matcher): " ++ show actual + checkIO globPat = + withSystemTempDirectory "globstar-sample" $ \tmpdir -> do + makeSampleFiles tmpdir + actual <- runDirFileGlob Verbosity.normal tmpdir globPat + unless (isEqual actual expected) $ + assertFailure $ "Unexpected result (impure matcher): " ++ show actual + +testFailParseVersion :: Version -> FilePath -> GlobSyntaxError -> Assertion +testFailParseVersion version pat expected = + case parseFileGlob version pat of + Left err -> unless (expected == err) $ + assertFailure $ "Unexpected error: " ++ show err + Right _ -> assertFailure "Unexpected success in parsing." + +globstarTests :: [TestTree] +globstarTests = + [ testCase "fails to parse on early spec version" $ + testFailParseVersion (mkVersion [2,2]) "**/*.html" VersionDoesNotSupportGlobStar + , testCase "out-of-place double star" $ + testFailParse "blah/**/blah/*.foo" StarInDirectory + , testCase "multiple double star" $ + testFailParse "blah/**/**/*.foo" StarInDirectory + , testCase "fails with literal filename" $ + testFailParse "**/a.html" LiteralFileNameGlobStar + , testCase "with glob filename" $ + testMatches "**/*.html" [GlobMatch "a.html", GlobMatch "b.html", GlobMatch "foo/a.html", GlobMatch "foo/b.html", GlobMatch "foo/bar/a.html", GlobMatch "foo/bar/b.html", GlobMatch "xyz/foo/a.html"] + , testCase "glob with prefix" $ + testMatches "foo/**/*.html" [GlobMatch "foo/a.html", GlobMatch "foo/b.html", GlobMatch "foo/bar/a.html", GlobMatch "foo/bar/b.html"] + ] + where + testFailParse = testFailParseVersion (mkVersion [2,4]) + testMatches = testMatchesVersion (mkVersion [2,4]) + +multiDotTests :: [TestTree] +multiDotTests = + [ testCase "pre-2.4 single extension not matching multiple" $ + testMatchesVersion (mkVersion [2,2]) "foo/*.gz" [GlobWarnMultiDot "foo/a.html.gz", GlobWarnMultiDot "foo/a.tex.gz", GlobWarnMultiDot "foo/b.html.gz", GlobMatch "foo/x.gz"] + , testCase "doesn't match literal" $ + testMatches "foo/a.tex" [GlobMatch "foo/a.tex"] + , testCase "works" $ + testMatches "foo/*.gz" [GlobMatch "foo/a.html.gz", GlobMatch "foo/a.tex.gz", GlobMatch "foo/b.html.gz", GlobMatch "foo/x.gz"] + , testCase "works with globstar" $ + testMatches "foo/**/*.gz" [GlobMatch "foo/a.html.gz", GlobMatch "foo/a.tex.gz", GlobMatch "foo/b.html.gz", GlobMatch "foo/x.gz", GlobMatch "foo/bar/a.html.gz", GlobMatch "foo/bar/a.tex.gz", GlobMatch "foo/bar/b.html.gz"] + ] + where + testMatches = testMatchesVersion (mkVersion [2,4]) + +tests :: [TestTree] +tests = + [ testGroup "pre-2.4 compatibility" $ + compatibilityTests (mkVersion [2,2]) + , testGroup "post-2.4 compatibility" $ + compatibilityTests (mkVersion [2,4]) + , testGroup "globstar" globstarTests + , testCase "pre-1.6 rejects globbing" $ + testFailParseVersion (mkVersion [1,4]) "foo/*.bar" VersionDoesNotSupportGlob + , testGroup "multi-dot globbing" multiDotTests + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Program/Internal.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,36 @@ +module UnitTests.Distribution.Simple.Program.Internal + ( tests + ) where + +import Distribution.Simple.Program.Internal ( stripExtractVersion ) + +import Test.Tasty +import Test.Tasty.HUnit + +v :: String +v = "GNU strip (GNU Binutils; openSUSE 13.2) 2.24.0.20140403-6.1\nCopyright 2013\ + \ Free Software Foundation, Inc.\nThis program is free software; you may\ + \ redistribute it under the terms of\nthe GNU General Public License version 3\ + \ or (at your option) any later version.\nThis program has absolutely no\ + \ warranty.\n" + +v' :: String +v' = "GNU strip 2.17.50.0.6-26.el5 20061020" + +v'' :: String +v'' = "GNU strip (openSUSE-13.2) 2.23.50.0.6-26.el5 20061020" + +v''' :: String +v''' = "GNU strip (GNU (Binutils for) Ubuntu 12.04 ) 2.22" + +tests :: [TestTree] +tests = + [ testCase "Handles parentheses" $ + (stripExtractVersion v) @=? "2.24" + , testCase "Handles dashes and alphabetic characters" $ + (stripExtractVersion v') @=? "2.17" + , testCase "Handles single-word parenthetical expressions" $ + (stripExtractVersion v'') @=? "2.23" + , testCase "Handles nested parentheses" $ + (stripExtractVersion v''') @=? "2.22" + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Simple/Utils.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,101 @@ +module UnitTests.Distribution.Simple.Utils + ( tests + ) where + +import Distribution.Simple.Utils +import Distribution.Verbosity + +import Data.IORef +import System.Directory ( doesDirectoryExist, doesFileExist + , getTemporaryDirectory + , removeDirectoryRecursive, removeFile ) +import System.IO (hClose, localeEncoding, hPutStrLn) +import System.IO.Error +import qualified Control.Exception as Exception + +import Test.Tasty +import Test.Tasty.HUnit + +withTempFileTest :: Assertion +withTempFileTest = do + fileName <- newIORef "" + tempDir <- getTemporaryDirectory + withTempFile tempDir ".foo" $ \fileName' _handle -> do + writeIORef fileName fileName' + fileExists <- readIORef fileName >>= doesFileExist + assertBool "Temporary file not deleted by 'withTempFile'!" (not fileExists) + +withTempFileRemovedTest :: Assertion +withTempFileRemovedTest = do + tempDir <- getTemporaryDirectory + withTempFile tempDir ".foo" $ \fileName handle -> do + hClose handle + removeFile fileName + +withTempDirTest :: Assertion +withTempDirTest = do + dirName <- newIORef "" + tempDir <- getTemporaryDirectory + withTempDirectory normal tempDir "foo" $ \dirName' -> do + writeIORef dirName dirName' + dirExists <- readIORef dirName >>= doesDirectoryExist + assertBool "Temporary directory not deleted by 'withTempDirectory'!" + (not dirExists) + +withTempDirRemovedTest :: Assertion +withTempDirRemovedTest = do + tempDir <- getTemporaryDirectory + withTempDirectory normal tempDir "foo" $ \dirPath -> do + removeDirectoryRecursive dirPath + +rawSystemStdInOutTextDecodingTest :: Assertion +rawSystemStdInOutTextDecodingTest + -- We can only get this exception when the locale encoding is UTF-8 + -- so skip the test if it's not. + | show localeEncoding /= "UTF-8" = return () + | otherwise = do + tempDir <- getTemporaryDirectory + res <- withTempFile tempDir ".hs" $ \filenameHs handleHs -> do + withTempFile tempDir ".exe" $ \filenameExe handleExe -> do + -- Small program printing not utf8 + hPutStrLn handleHs "import Data.ByteString" + hPutStrLn handleHs "main = Data.ByteString.putStr (Data.ByteString.pack [32, 32, 255])" + hClose handleHs + + -- We need to close exe handle as well, otherwise compilation (writing) may fail + hClose handleExe + + -- Compile + (IODataText resOutput, resErrors, resExitCode) <- rawSystemStdInOut normal + "ghc" ["-o", filenameExe, filenameHs] + Nothing Nothing Nothing + IODataModeText + print (resOutput, resErrors, resExitCode) + + -- Execute + Exception.try $ do + rawSystemStdInOut normal + filenameExe [] + Nothing Nothing Nothing + IODataModeText -- not binary mode output, ie utf8 text mode so try to decode + case res of + Right (IODataText x1, x2, x3) -> assertFailure $ "expected IO decoding exception: " ++ show (x1,x2,x3) + Right (IODataBinary _, _, _) -> assertFailure "internal error" + Left err | isDoesNotExistError err -> Exception.throwIO err -- no ghc! + | otherwise -> return () + + + +tests :: [TestTree] +tests = + [ testCase "withTempFile works as expected" $ + withTempFileTest + , testCase "withTempFile can handle removed files" $ + withTempFileRemovedTest + , testCase "withTempDirectory works as expected" $ + withTempDirTest + , testCase "withTempDirectory can handle removed directories" $ + withTempDirRemovedTest + , testCase "rawSystemStdInOut reports text decoding errors" $ + rawSystemStdInOutTextDecodingTest + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/SPDX.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/SPDX.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/SPDX.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/SPDX.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,158 @@ +{-# OPTIONS_GHC -fno-warn-orphans -fno-warn-deprecations #-} +module UnitTests.Distribution.SPDX (spdxTests) where + +import Distribution.Compat.Prelude.Internal +import Prelude () + +import Distribution.SPDX +import Distribution.Parsec.Class (eitherParsec) +import Distribution.Pretty (prettyShow) + +import Test.Tasty +import Test.Tasty.QuickCheck + +spdxTests :: [TestTree] +spdxTests = + [ testProperty "LicenseId roundtrip" licenseIdRoundtrip + , testProperty "LicenseExceptionId roundtrip" licenseExceptionIdRoundtrip + , testProperty "LicenseRef roundtrip" licenseRefRoundtrip + , testProperty "SimpleLicenseExpression roundtrip" simpleLicenseExpressionRoundtrip + , testProperty "LicenseExpression roundtrip" licenseExpressionRoundtrip + , testProperty "isAcceptableLicense l = True" shouldAcceptProp + , testProperty "isAcceptableLicense l = False" shouldRejectProp + ] + +licenseIdRoundtrip :: LicenseId -> Property +licenseIdRoundtrip x = + counterexample (prettyShow x) $ + Right x === eitherParsec (prettyShow x) + +licenseExceptionIdRoundtrip :: LicenseExceptionId -> Property +licenseExceptionIdRoundtrip x = + counterexample (prettyShow x) $ + Right x === eitherParsec (prettyShow x) + +licenseRefRoundtrip :: LicenseRef -> Property +licenseRefRoundtrip x = + counterexample (prettyShow x) $ + Right x === eitherParsec (prettyShow x) + +simpleLicenseExpressionRoundtrip :: SimpleLicenseExpression -> Property +simpleLicenseExpressionRoundtrip x = + counterexample (prettyShow x) $ + Right x === eitherParsec (prettyShow x) + +licenseExpressionRoundtrip :: LicenseExpression -> Property +licenseExpressionRoundtrip x = + counterexample (prettyShow x) $ + Right (reassoc x) === eitherParsec (prettyShow x) + +-- Parser produces right biased trees of and/or expressions +reassoc :: LicenseExpression -> LicenseExpression +reassoc (EOr a b) = case reassoc a of + EOr x y -> EOr x (reassoc (EOr y b)) + x -> EOr x (reassoc b) +reassoc (EAnd a b) = case reassoc a of + EAnd x y -> EAnd x (reassoc (EAnd y b)) + x -> EAnd x (reassoc b) +reassoc l = l + +------------------------------------------------------------------------------- +-- isAcceptableLicence +------------------------------------------------------------------------------- + +shouldAccept :: [License] +shouldAccept = map License + [ simpleLicenseExpression GPL_2_0_only + , simpleLicenseExpression GPL_2_0_or_later + , simpleLicenseExpression BSD_2_Clause + , simpleLicenseExpression BSD_3_Clause + , simpleLicenseExpression MIT + , simpleLicenseExpression ISC + , simpleLicenseExpression MPL_2_0 + , simpleLicenseExpression Apache_2_0 + , simpleLicenseExpression CC0_1_0 + , simpleLicenseExpression BSD_4_Clause `EOr` simpleLicenseExpression MIT + ] + +shouldReject :: [License] +shouldReject = map License + [ simpleLicenseExpression BSD_4_Clause + , simpleLicenseExpression BSD_4_Clause `EAnd` simpleLicenseExpression MIT + ] + +-- | A sketch of what Hackage could accept +-- +-- * NONE is rejected +-- +-- * "or later" syntax (+ postfix) is rejected +-- +-- * "WITH exc" exceptions are rejected +-- +-- * There should be a way to interpert license as (conjunction of) +-- OSI-accepted licenses or CC0 +-- +isAcceptableLicense :: License -> Bool +isAcceptableLicense NONE = False +isAcceptableLicense (License expr) = goExpr expr + where + goExpr (EAnd a b) = goExpr a && goExpr b + goExpr (EOr a b) = goExpr a || goExpr b + goExpr (ELicense _ (Just _)) = False -- Don't allow exceptions + goExpr (ELicense s Nothing) = goSimple s + + goSimple (ELicenseRef _) = False -- don't allow referenced licenses + goSimple (ELicenseIdPlus _) = False -- don't allow + licenses (use GPL-3.0-or-later e.g.) + goSimple (ELicenseId CC0_1_0) = True -- CC0 isn't OSI approved, but we allow it as "PublicDomain", this is eg. PublicDomain in http://hackage.haskell.org/package/string-qq-0.0.2/src/LICENSE + goSimple (ELicenseId lid) = licenseIsOsiApproved lid -- allow only OSI approved licenses. + +shouldAcceptProp :: Property +shouldAcceptProp = conjoin $ + map (\l -> counterexample (prettyShow l) (isAcceptableLicense l)) shouldAccept + +shouldRejectProp :: Property +shouldRejectProp = conjoin $ + map (\l -> counterexample (prettyShow l) (not $ isAcceptableLicense l)) shouldReject + +------------------------------------------------------------------------------- +-- Instances +------------------------------------------------------------------------------- + +instance Arbitrary LicenseId where + arbitrary = elements $ licenseIdList LicenseListVersion_3_2 + +instance Arbitrary LicenseExceptionId where + arbitrary = elements $ licenseExceptionIdList LicenseListVersion_3_2 + +instance Arbitrary LicenseRef where + arbitrary = mkLicenseRef' <$> ids' <*> ids + where + ids = listOf1 $ elements $ ['a'..'z'] ++ ['A' .. 'Z'] ++ ['0'..'9'] ++ "_-" + ids' = oneof [ pure Nothing, Just <$> ids ] + +instance Arbitrary SimpleLicenseExpression where + arbitrary = oneof + [ ELicenseId <$> arbitrary + , ELicenseIdPlus <$> arbitrary + , ELicenseRef <$> arbitrary + ] + +instance Arbitrary LicenseExpression where + arbitrary = sized arb + where + arb n + | n <= 0 = ELicense <$> arbitrary <*> pure Nothing + | otherwise = oneof + [ ELicense <$> arbitrary <*> arbitrary + , EAnd <$> arbA <*> arbB + , EOr <$> arbA <*> arbB + ] + where + m = n `div` 2 + arbA = arb m + arbB = arb (n - m) + + shrink (EAnd a b) = a : b : map (uncurry EAnd) (shrink (a, b)) + shrink (EOr a b) = a : b : map (uncurry EOr) (shrink (a, b)) + shrink _ = [] + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/System.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/System.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/System.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/System.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,29 @@ +{-# OPTIONS_GHC -fno-warn-orphans #-} +module UnitTests.Distribution.System + ( tests + ) where + +import Control.Monad (liftM2) +import Distribution.Text (Text(..), display, simpleParse) +import Distribution.System +import Test.Tasty +import Test.Tasty.QuickCheck + +textRoundtrip :: (Show a, Eq a, Text a) => a -> Property +textRoundtrip x = simpleParse (display x) === Just x + +tests :: [TestTree] +tests = + [ testProperty "Text OS round trip" (textRoundtrip :: OS -> Property) + , testProperty "Text Arch round trip" (textRoundtrip :: Arch -> Property) + , testProperty "Text Platform round trip" (textRoundtrip :: Platform -> Property) + ] + +instance Arbitrary OS where + arbitrary = elements knownOSs + +instance Arbitrary Arch where + arbitrary = elements knownArches + +instance Arbitrary Platform where + arbitrary = liftM2 Platform arbitrary arbitrary diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Types/GenericPackageDescription.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Types/GenericPackageDescription.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Types/GenericPackageDescription.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Types/GenericPackageDescription.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,37 @@ +{-# OPTIONS_GHC -fno-warn-deprecations #-} -- for importing "Distribution.Compat.Prelude.Internal" + +module UnitTests.Distribution.Types.GenericPackageDescription where + +import Prelude () +import Distribution.Compat.Prelude.Internal +import Distribution.Types.GenericPackageDescription + +import Test.Tasty +import Test.Tasty.HUnit +import qualified Control.Exception as C + +tests :: [TestTree] +tests = + [ testCase "GenericPackageDescription deepseq" gpdDeepseq + ] + +gpdFields :: [(String, GenericPackageDescription -> GenericPackageDescription)] +gpdFields = + [ ("packageDescription", \gpd -> gpd { packageDescription = undefined }) + , ("genPackageFlags", \gpd -> gpd { genPackageFlags = undefined }) + , ("condLibrary", \gpd -> gpd { condLibrary = undefined }) + , ("condSubLibraries", \gpd -> gpd { condSubLibraries = undefined }) + , ("condForeignLibs", \gpd -> gpd { condForeignLibs = undefined }) + , ("condExecutables", \gpd -> gpd { condExecutables = undefined }) + , ("condTestSuites", \gpd -> gpd { condTestSuites = undefined }) + , ("condBenchmarks", \gpd -> gpd { condBenchmarks = undefined }) + ] + +gpdDeepseq :: Assertion +gpdDeepseq = sequence_ + [ throwsUndefined msg (f emptyGenericPackageDescription) | (msg, f) <- gpdFields ] + +throwsUndefined :: NFData a => String -> a -> Assertion +throwsUndefined field a = + C.catch (C.evaluate (rnf a) >> assertFailure ("Deepseq failed to evaluate " ++ show field)) + (\(C.ErrorCall _) -> return ()) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/Generic.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/Generic.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/Generic.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/Generic.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,71 @@ +{-# LANGUAGE OverloadedStrings #-} + +-- to suppress WARNING in "Distribution.Compat.Prelude.Internal" +{-# OPTIONS_GHC -fno-warn-deprecations #-} + +module UnitTests.Distribution.Utils.Generic ( tests ) where + +import Prelude () +import Distribution.Compat.Prelude.Internal + +import Distribution.Utils.Generic + +import qualified Data.ByteString.Char8 as BS +import qualified Data.Text as T +import qualified Data.Text.Encoding as T + +import Test.Tasty +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck + +tests :: [TestTree] +tests = + [ -- fromUTF8BS / toUTF8BS + testCase "fromUTF8BS mempty" testFromUTF8BSEmpty + , testCase "toUTF8BS mempty" testToUTF8BSEmpty + , testCase "toUTF8BS [U+D800..U+DFFF]" testToUTF8BSSurr + , testCase "toUTF8BS [U+0000..U+7F]" testToUTF8BSAscii + , testCase "toUTF8BS [U+0000..U+10FFFF]" testToUTF8BSText + , testCase "fromUTF8BS.toUTF8BS [U+0000..U+10FFFF]" testToFromUTF8BS + + , testProperty "fromUTF8BS.toUTF8BS == id" prop_toFromUTF8BS + , testProperty "toUTF8BS == encodeUtf8" prop_toUTF8BS + + , testProperty "Nothing = validateUtf8 (encodeUtf8 x)" prop_validateUtf8 + ] + +testFromUTF8BSEmpty :: Assertion +testFromUTF8BSEmpty = mempty @=? fromUTF8BS mempty + +testToUTF8BSEmpty :: Assertion +testToUTF8BSEmpty = mempty @=? toUTF8BS mempty + +testToUTF8BSSurr :: Assertion +testToUTF8BSSurr = BS.concat (replicate 2048 u_fffd) @=? toUTF8BS surrogates + where + surrogates = ['\xD800'..'\xDFFF'] + u_fffd = "\xEF\xBF\xBD" + +testToUTF8BSText :: Assertion +testToUTF8BSText = T.encodeUtf8 (T.pack txt) @=? toUTF8BS txt + where + txt = ['\x00'..'\x10FFFF'] + +testToUTF8BSAscii :: Assertion +testToUTF8BSAscii = BS.pack txt @=? toUTF8BS txt + where + txt = ['\x00'..'\x7F'] + +testToFromUTF8BS :: Assertion +testToFromUTF8BS = txt @=? (fromUTF8BS . toUTF8BS) txt + where + txt = ['\x0000'..'\xD7FF'] ++ ['\xE000'..'\x10FFFF'] + +prop_toFromUTF8BS :: [Char] -> Property +prop_toFromUTF8BS txt = txt === (fromUTF8BS . toUTF8BS) txt + +prop_toUTF8BS :: [Char] -> Property +prop_toUTF8BS txt = T.encodeUtf8 (T.pack txt) === toUTF8BS txt + +prop_validateUtf8 :: [Char] -> Property +prop_validateUtf8 txt = Nothing === validateUTF8 (toUTF8BS txt) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/NubList.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/NubList.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/NubList.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/NubList.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,76 @@ +-- to suppress WARNING in "Distribution.Compat.Prelude.Internal" +{-# OPTIONS_GHC -fno-warn-deprecations #-} +module UnitTests.Distribution.Utils.NubList + ( tests + ) where + +import Prelude () +import Distribution.Compat.Prelude.Internal + +import Distribution.Utils.NubList +import Test.Tasty +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck + +tests :: [TestTree] +tests = + [ testCase "NubList retains ordering example" testOrdering + , testCase "NubList removes duplicates example" testDeDupe + , testProperty "NubList retains ordering" prop_Ordering + , testProperty "NubList removes duplicates" prop_DeDupe + , testProperty "fromNubList . toNubList = nub" prop_Nub + , testProperty "Monoid NubList Identity" prop_Identity + , testProperty "Monoid NubList Associativity" prop_Associativity + -- NubListR + , testProperty "NubListR removes duplicates from the right" prop_DeDupeR + ] + +someIntList :: [Int] +-- This list must not have duplicate entries. +someIntList = [ 1, 3, 4, 2, 0, 7, 6, 5, 9, -1 ] + +testOrdering :: Assertion +testOrdering = + assertBool "Maintains element ordering:" $ + fromNubList (toNubList someIntList) == someIntList + +testDeDupe :: Assertion +testDeDupe = + assertBool "De-duplicates a list:" $ + fromNubList (toNubList (someIntList ++ someIntList)) == someIntList + +-- --------------------------------------------------------------------------- +-- QuickCheck properties for NubList + +prop_Ordering :: [Int] -> Property +prop_Ordering xs = + mempty <> toNubList xs' === toNubList xs' <> mempty + where + xs' = nub xs + +prop_DeDupe :: [Int] -> Property +prop_DeDupe xs = + fromNubList (toNubList (xs' ++ xs)) === xs' -- Note, we append primeless xs + where + xs' = nub xs + +prop_DeDupeR :: [Int] -> Property +prop_DeDupeR xs = + fromNubListR (toNubListR (xs ++ xs')) === xs' -- Note, we prepend primeless xs + where + xs' = nub xs + +prop_Nub :: [Int] -> Property +prop_Nub xs = rhs === lhs + where + rhs = fromNubList (toNubList xs) + lhs = nub xs + +prop_Identity :: [Int] -> Bool +prop_Identity xs = + mempty `mappend` toNubList xs == toNubList xs `mappend` mempty + +prop_Associativity :: [Int] -> [Int] -> [Int] -> Bool +prop_Associativity xs ys zs = + (toNubList xs `mappend` toNubList ys) `mappend` toNubList zs + == toNubList xs `mappend` (toNubList ys `mappend` toNubList zs) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/ShortText.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/ShortText.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/ShortText.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Utils/ShortText.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,33 @@ +module UnitTests.Distribution.Utils.ShortText + ( tests + ) where + +import Data.Monoid as Mon +import Test.Tasty +import Test.Tasty.QuickCheck + +import Distribution.Compat.Binary (encode, decode) + +import Distribution.Utils.ShortText + +prop_ShortTextOrd :: String -> String -> Bool +prop_ShortTextOrd a b = compare a b == compare (toShortText a) (toShortText b) + +prop_ShortTextMonoid :: String -> String -> Bool +prop_ShortTextMonoid a b = Mon.mappend a b == fromShortText (mappend (toShortText a) (toShortText b)) + +prop_ShortTextId :: String -> Bool +prop_ShortTextId a = (fromShortText . toShortText) a == a + +prop_ShortTextBinaryId :: String -> Bool +prop_ShortTextBinaryId a = (decode . encode) a' == a' + where + a' = toShortText a + +tests :: [TestTree] +tests = + [ testProperty "ShortText Id" prop_ShortTextId + , testProperty "ShortText Ord" prop_ShortTextOrd + , testProperty "ShortText Monoid" prop_ShortTextMonoid + , testProperty "ShortText BinaryId" prop_ShortTextBinaryId + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Version.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests/Distribution/Version.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,782 @@ +{-# LANGUAGE StandaloneDeriving, DeriveDataTypeable #-} +{-# OPTIONS_GHC -fno-warn-orphans + -fno-warn-incomplete-patterns + -fno-warn-deprecations + -fno-warn-unused-binds #-} --FIXME +module UnitTests.Distribution.Version (versionTests) where + +import Distribution.Compat.Prelude.Internal +import Prelude () + +import Distribution.Version +import Distribution.Text +import Distribution.Parsec.Class (simpleParsec) + +import Data.Typeable (typeOf) +import Math.NumberTheory.Logarithms (intLog2) +import Text.PrettyPrint as Disp (text, render, parens, hcat + ,punctuate, int, char, (<+>)) +import Test.Tasty +import Test.Tasty.QuickCheck +import qualified Test.Laws as Laws + +import Test.QuickCheck.Utils + +import Data.Maybe (fromJust) +import Data.Function (on) +import Text.Read (readMaybe) + +versionTests :: [TestTree] +versionTests = + -- test 'Version' type + [ tp "versionNumbers . mkVersion = id @[NonNegative Int]" prop_VersionId + , tp "versionNumbers . mkVersion = id @Base.Version" prop_VersionId2 + , tp "(==) = (==) `on` versionNumbers" prop_VersionEq + , tp "(==) = (==) `on` mkVersion" prop_VersionEq2 + , tp "compare = compare `on` versionNumbers" prop_VersionOrd + , tp "compare = compare `on` mkVersion" prop_VersionOrd2 + + , tp "readMaybe . show = Just" prop_ShowRead + , tp "read example" prop_ShowRead_example + + , tp "normaliseVersionRange involutive" prop_normalise_inv + , tp "parse . display involutive" prop_parse_disp_inv + , tp "parsec . display involutive" prop_parsec_disp_inv + + , tp "simpleParsec . display = Just" prop_parse_disp + ] + + ++ + zipWith + (\n (rep, p) -> testProperty ("Range Property " ++ show n ++ " (" ++ show rep ++ ")") p) + [1::Int ..] + -- properties to validate the test framework + [ typProperty prop_nonNull + , typProperty prop_gen_intervals1 + , typProperty prop_gen_intervals2 + --, typProperty prop_equivalentVersionRange --FIXME: runs out of test cases + , typProperty prop_intermediateVersion + + , typProperty prop_anyVersion + , typProperty prop_noVersion + , typProperty prop_thisVersion + , typProperty prop_notThisVersion + , typProperty prop_laterVersion + , typProperty prop_orLaterVersion + , typProperty prop_earlierVersion + , typProperty prop_orEarlierVersion + , typProperty prop_unionVersionRanges + , typProperty prop_intersectVersionRanges + , typProperty prop_differenceVersionRanges + , typProperty prop_invertVersionRange + , typProperty prop_withinVersion + , typProperty prop_foldVersionRange + , typProperty prop_foldVersionRange' + + -- the semantic query functions + --, typProperty prop_isAnyVersion1 --FIXME: runs out of test cases + --, typProperty prop_isAnyVersion2 --FIXME: runs out of test cases + --, typProperty prop_isNoVersion --FIXME: runs out of test cases + --, typProperty prop_isSpecificVersion1 --FIXME: runs out of test cases + --, typProperty prop_isSpecificVersion2 --FIXME: runs out of test cases + , typProperty prop_simplifyVersionRange1 + , typProperty prop_simplifyVersionRange1' + --, typProperty prop_simplifyVersionRange2 --FIXME: runs out of test cases + --, typProperty prop_simplifyVersionRange2' --FIXME: runs out of test cases + --, typProperty prop_simplifyVersionRange2'' --FIXME: actually wrong + + -- converting between version ranges and version intervals + , typProperty prop_to_intervals + --, typProperty prop_to_intervals_canonical --FIXME: runs out of test cases + --, typProperty prop_to_intervals_canonical' --FIXME: runs out of test cases + , typProperty prop_from_intervals + , typProperty prop_to_from_intervals + , typProperty prop_from_to_intervals + , typProperty prop_from_to_intervals' + + -- union and intersection of version intervals + , typProperty prop_unionVersionIntervals + , typProperty prop_unionVersionIntervals_idempotent + , typProperty prop_unionVersionIntervals_commutative + , typProperty prop_unionVersionIntervals_associative + , typProperty prop_intersectVersionIntervals + , typProperty prop_intersectVersionIntervals_idempotent + , typProperty prop_intersectVersionIntervals_commutative + , typProperty prop_intersectVersionIntervals_associative + , typProperty prop_union_intersect_distributive + , typProperty prop_intersect_union_distributive + + -- inversion of version intervals + , typProperty prop_invertVersionIntervals + , typProperty prop_invertVersionIntervalsTwice + ] + where + tp :: Testable p => String -> p -> TestTree + tp = testProperty + + typProperty p = (typeOf p, property p) + + +-- parseTests :: [TestTree] +-- parseTests = +-- zipWith (\n p -> testProperty ("Parse Property " ++ show n) p) [1::Int ..] +-- -- parsing and pretty printing +-- [ -- property prop_parse_disp1 --FIXME: actually wrong + +-- -- These are also wrong, see +-- -- https://github.com/haskell/cabal/issues/3037#issuecomment-177671011 + +-- -- property prop_parse_disp2 +-- -- , property prop_parse_disp3 +-- -- , property prop_parse_disp4 +-- -- , property prop_parse_disp5 +-- ] + +instance Arbitrary Version where + arbitrary = do + branch <- smallListOf1 $ + frequency [(3, return 0) + ,(3, return 1) + ,(2, return 2) + ,(2, return 3) + ,(1, return 0xfffd) + ,(1, return 0xfffe) -- max fitting into packed W64 + ,(1, return 0xffff) + ,(1, return 0x10000)] + return (mkVersion branch) + where + smallListOf1 = adjustSize (\n -> min 6 (n `div` 3)) . listOf1 + + shrink ver = [ mkVersion ns | ns <- shrink (versionNumbers ver) + , not (null ns) ] + +newtype VersionArb = VersionArb [Int] + deriving (Eq,Ord,Show) + +-- | 'Version' instance as used by QC 2.9 +instance Arbitrary VersionArb where + arbitrary = sized $ \n -> + do k <- choose (0, log2 n) + xs <- vectorOf (k+1) arbitrarySizedNatural + return (VersionArb xs) + where + log2 :: Int -> Int + log2 n | n <= 1 = 0 + | otherwise = 1 + log2 (n `div` 2) + + shrink (VersionArb xs) = + [ VersionArb xs' + | xs' <- shrink xs + , length xs' > 0 + , all (>=0) xs' + ] + +instance Arbitrary VersionRange where + arbitrary = sized verRangeExp + where + verRangeExp n = frequency $ + [ (2, return anyVersion) + , (1, liftM thisVersion arbitrary) + , (1, liftM laterVersion arbitrary) + , (1, liftM orLaterVersion arbitrary) + , (1, liftM orLaterVersion' arbitrary) + , (1, liftM earlierVersion arbitrary) + , (1, liftM orEarlierVersion arbitrary) + , (1, liftM orEarlierVersion' arbitrary) + , (1, liftM withinVersion arbitrary) + , (1, liftM majorBoundVersion arbitrary) + , (2, liftM VersionRangeParens arbitrary) + ] ++ if n == 0 then [] else + [ (2, liftM2 unionVersionRanges verRangeExp2 verRangeExp2) + , (2, liftM2 intersectVersionRanges verRangeExp2 verRangeExp2) + ] + where + verRangeExp2 = verRangeExp (n `div` 2) + + orLaterVersion' v = + unionVersionRanges (LaterVersion v) (ThisVersion v) + orEarlierVersion' v = + unionVersionRanges (EarlierVersion v) (ThisVersion v) + + shrink AnyVersion = [] + shrink (ThisVersion v) = map ThisVersion (shrink v) + shrink (LaterVersion v) = map LaterVersion (shrink v) + shrink (EarlierVersion v) = map EarlierVersion (shrink v) + shrink (OrLaterVersion v) = LaterVersion v : map OrLaterVersion (shrink v) + shrink (OrEarlierVersion v) = EarlierVersion v : map OrEarlierVersion (shrink v) + shrink (WildcardVersion v) = map WildcardVersion ( shrink v) + shrink (MajorBoundVersion v) = map MajorBoundVersion (shrink v) + shrink (VersionRangeParens vr) = vr : map VersionRangeParens (shrink vr) + shrink (UnionVersionRanges a b) = a : b : map (uncurry UnionVersionRanges) (shrink (a, b)) + shrink (IntersectVersionRanges a b) = a : b : map (uncurry IntersectVersionRanges) (shrink (a, b)) + +--------------------- +-- Version properties +-- + +prop_VersionId :: [NonNegative Int] -> Bool +prop_VersionId lst0 = + (versionNumbers . mkVersion) lst == lst + where + lst = map getNonNegative lst0 + +prop_VersionId2 :: VersionArb -> Bool +prop_VersionId2 (VersionArb lst) = + (versionNumbers . mkVersion) lst == lst + +prop_VersionEq :: Version -> Version -> Bool +prop_VersionEq v1 v2 = (==) v1 v2 == ((==) `on` versionNumbers) v1 v2 + +prop_VersionEq2 :: VersionArb -> VersionArb -> Bool +prop_VersionEq2 (VersionArb v1) (VersionArb v2) = + (==) v1 v2 == ((==) `on` mkVersion) v1 v2 + +prop_VersionOrd :: Version -> Version -> Bool +prop_VersionOrd v1 v2 = + compare v1 v2 == (compare `on` versionNumbers) v1 v2 + +prop_VersionOrd2 :: VersionArb -> VersionArb -> Bool +prop_VersionOrd2 (VersionArb v1) (VersionArb v2) = + (==) v1 v2 == ((==) `on` mkVersion) v1 v2 + +prop_ShowRead :: Version -> Property +prop_ShowRead v = Just v === readMaybe (show v) + +prop_ShowRead_example :: Bool +prop_ShowRead_example = show (mkVersion [1,2,3]) == "mkVersion [1,2,3]" + +--------------------------- +-- VersionRange properties +-- + +prop_normalise_inv :: VersionRange -> Property +prop_normalise_inv vr = + normaliseVersionRange vr === normaliseVersionRange (normaliseVersionRange vr) + +prop_nonNull :: Version -> Bool +prop_nonNull = (/= nullVersion) + +prop_anyVersion :: Version -> Bool +prop_anyVersion v' = + withinRange v' anyVersion + +prop_noVersion :: Version -> Bool +prop_noVersion v' = + withinRange v' noVersion == False + +prop_thisVersion :: Version -> Version -> Bool +prop_thisVersion v v' = + withinRange v' (thisVersion v) + == (v' == v) + +prop_notThisVersion :: Version -> Version -> Bool +prop_notThisVersion v v' = + withinRange v' (notThisVersion v) + == (v' /= v) + +prop_laterVersion :: Version -> Version -> Bool +prop_laterVersion v v' = + withinRange v' (laterVersion v) + == (v' > v) + +prop_orLaterVersion :: Version -> Version -> Bool +prop_orLaterVersion v v' = + withinRange v' (orLaterVersion v) + == (v' >= v) + +prop_earlierVersion :: Version -> Version -> Bool +prop_earlierVersion v v' = + withinRange v' (earlierVersion v) + == (v' < v) + +prop_orEarlierVersion :: Version -> Version -> Bool +prop_orEarlierVersion v v' = + withinRange v' (orEarlierVersion v) + == (v' <= v) + +prop_unionVersionRanges :: VersionRange -> VersionRange -> Version -> Bool +prop_unionVersionRanges vr1 vr2 v' = + withinRange v' (unionVersionRanges vr1 vr2) + == (withinRange v' vr1 || withinRange v' vr2) + +prop_intersectVersionRanges :: VersionRange -> VersionRange -> Version -> Bool +prop_intersectVersionRanges vr1 vr2 v' = + withinRange v' (intersectVersionRanges vr1 vr2) + == (withinRange v' vr1 && withinRange v' vr2) + +prop_differenceVersionRanges :: VersionRange -> VersionRange -> Version -> Bool +prop_differenceVersionRanges vr1 vr2 v' = + withinRange v' (differenceVersionRanges vr1 vr2) + == (withinRange v' vr1 && not (withinRange v' vr2)) + +prop_invertVersionRange :: VersionRange -> Version -> Bool +prop_invertVersionRange vr v' = + withinRange v' (invertVersionRange vr) + == not (withinRange v' vr) + +prop_withinVersion :: Version -> Version -> Bool +prop_withinVersion v v' = + withinRange v' (withinVersion v) + == (v' >= v && v' < upper v) + where + upper = alterVersion $ \numbers -> init numbers ++ [last numbers + 1] + +prop_foldVersionRange :: VersionRange -> Property +prop_foldVersionRange range = + expandVR range + === foldVersionRange anyVersion thisVersion + laterVersion earlierVersion + unionVersionRanges intersectVersionRanges + range + where + expandVR (WildcardVersion v) = + intersectVersionRanges (expandVR (orLaterVersion v)) (earlierVersion (wildcardUpperBound v)) + expandVR (MajorBoundVersion v) = + intersectVersionRanges (expandVR (orLaterVersion v)) (earlierVersion (majorUpperBound v)) + expandVR (OrEarlierVersion v) = + unionVersionRanges (thisVersion v) (earlierVersion v) + expandVR (OrLaterVersion v) = + unionVersionRanges (thisVersion v) (laterVersion v) + expandVR (UnionVersionRanges v1 v2) = + UnionVersionRanges (expandVR v1) (expandVR v2) + expandVR (IntersectVersionRanges v1 v2) = + IntersectVersionRanges (expandVR v1) (expandVR v2) + expandVR (VersionRangeParens v) = expandVR v + expandVR v = v + + upper = alterVersion $ \numbers -> init numbers ++ [last numbers + 1] + +prop_foldVersionRange' :: VersionRange -> Property +prop_foldVersionRange' range = + normaliseVersionRange srange + === foldVersionRange' anyVersion thisVersion + laterVersion earlierVersion + orLaterVersion orEarlierVersion + (\v _ -> withinVersion v) + (\v _ -> majorBoundVersion v) + unionVersionRanges intersectVersionRanges id + srange + where + srange = stripParensVersionRange range + +prop_isAnyVersion1 :: VersionRange -> Version -> Property +prop_isAnyVersion1 range version = + isAnyVersion range ==> withinRange version range + +prop_isAnyVersion2 :: VersionRange -> Property +prop_isAnyVersion2 range = + isAnyVersion range ==> + foldVersionRange True (\_ -> False) (\_ -> False) (\_ -> False) + (\_ _ -> False) (\_ _ -> False) + (simplifyVersionRange range) + +prop_isNoVersion :: VersionRange -> Version -> Property +prop_isNoVersion range version = + isNoVersion range ==> not (withinRange version range) + +prop_isSpecificVersion1 :: VersionRange -> NonEmptyList Version -> Property +prop_isSpecificVersion1 range (NonEmpty versions) = + isJust version && not (null versions') ==> + allEqual (fromJust version : versions') + where + version = isSpecificVersion range + versions' = filter (`withinRange` range) versions + allEqual xs = and (zipWith (==) xs (tail xs)) + +prop_isSpecificVersion2 :: VersionRange -> Property +prop_isSpecificVersion2 range = + isJust version ==> + foldVersionRange Nothing Just (\_ -> Nothing) (\_ -> Nothing) + (\_ _ -> Nothing) (\_ _ -> Nothing) + (simplifyVersionRange range) + == version + + where + version = isSpecificVersion range + +-- | 'simplifyVersionRange' is a semantic identity on 'VersionRange'. +-- +prop_simplifyVersionRange1 :: VersionRange -> Version -> Bool +prop_simplifyVersionRange1 range version = + withinRange version range == withinRange version (simplifyVersionRange range) + +prop_simplifyVersionRange1' :: VersionRange -> Bool +prop_simplifyVersionRange1' range = + range `equivalentVersionRange` (simplifyVersionRange range) + +-- | 'simplifyVersionRange' produces a canonical form for ranges with +-- equivalent semantics. +-- +prop_simplifyVersionRange2 :: VersionRange -> VersionRange -> Version -> Property +prop_simplifyVersionRange2 r r' v = + r /= r' && simplifyVersionRange r == simplifyVersionRange r' ==> + withinRange v r == withinRange v r' + +prop_simplifyVersionRange2' :: VersionRange -> VersionRange -> Property +prop_simplifyVersionRange2' r r' = + r /= r' && simplifyVersionRange r == simplifyVersionRange r' ==> + r `equivalentVersionRange` r' + +--FIXME: see equivalentVersionRange for details +prop_simplifyVersionRange2'' :: VersionRange -> VersionRange -> Property +prop_simplifyVersionRange2'' r r' = + r /= r' && r `equivalentVersionRange` r' ==> + simplifyVersionRange r == simplifyVersionRange r' + || isNoVersion r + || isNoVersion r' + +-------------------- +-- VersionIntervals +-- + +-- | Generating VersionIntervals +-- +-- This is a tad tricky as VersionIntervals is an abstract type, so we first +-- make a local type for generating the internal representation. Then we check +-- that this lets us construct valid 'VersionIntervals'. +-- + +instance Arbitrary VersionIntervals where + arbitrary = fmap mkVersionIntervals' arbitrary + where + mkVersionIntervals' :: [(Version, Bound)] -> VersionIntervals + mkVersionIntervals' = mkVersionIntervals . go version0 + where + go :: Version -> [(Version, Bound)] -> [VersionInterval] + go _ [] = [] + go v [(lv, lb)] = + [(LowerBound (addVersion lv v) lb, NoUpperBound)] + go v ((lv, lb) : (uv, ub) : rest) = + (LowerBound lv' lb, UpperBound uv' ub) : go uv' rest + where + lv' = addVersion v lv + uv' = addVersion lv' uv + + addVersion :: Version -> Version -> Version + addVersion xs ys = mkVersion $ z (versionNumbers xs) (versionNumbers ys) + where + z [] ys' = ys' + z xs' [] = xs' + z (x : xs') (y : ys') = x + y : z xs' ys' + +instance Arbitrary Bound where + arbitrary = elements [ExclusiveBound, InclusiveBound] + +-- | Check that our VersionIntervals' arbitrary instance generates intervals +-- that satisfies the invariant. +-- +prop_gen_intervals1 :: VersionIntervals -> Property +prop_gen_intervals1 i + = label ("length i ≈ 2 ^ " ++ show metric ++ " - 1") + $ xs === ys + where + metric = intLog2 (length xs + 1) + + xs = versionIntervals i + ys = versionIntervals (mkVersionIntervals xs) +-- | Check that constructing our intervals type and converting it to a +-- 'VersionRange' and then into the true intervals type gives us back +-- the exact same sequence of intervals. This tells us that our arbitrary +-- instance for 'VersionIntervals'' is ok. +-- +prop_gen_intervals2 :: VersionIntervals -> Property +prop_gen_intervals2 intervals = + toVersionIntervals (fromVersionIntervals intervals) === intervals + +-- | Check that 'VersionIntervals' models 'VersionRange' via +-- 'toVersionIntervals'. +-- +prop_to_intervals :: VersionRange -> Version -> Bool +prop_to_intervals range version = + withinRange version range == withinIntervals version intervals + where + intervals = toVersionIntervals range + +-- | Check that semantic equality on 'VersionRange's is the same as converting +-- to 'VersionIntervals' and doing syntactic equality. +-- +prop_to_intervals_canonical :: VersionRange -> VersionRange -> Property +prop_to_intervals_canonical r r' = + r /= r' && r `equivalentVersionRange` r' ==> + toVersionIntervals r == toVersionIntervals r' + +prop_to_intervals_canonical' :: VersionRange -> VersionRange -> Property +prop_to_intervals_canonical' r r' = + r /= r' && toVersionIntervals r == toVersionIntervals r' ==> + r `equivalentVersionRange` r' + +-- | Check that 'VersionIntervals' models 'VersionRange' via +-- 'fromVersionIntervals'. +-- +prop_from_intervals :: VersionIntervals -> Version -> Bool +prop_from_intervals intervals version = + withinRange version range == withinIntervals version intervals + where + range = fromVersionIntervals intervals + +-- | @'toVersionIntervals' . 'fromVersionIntervals'@ is an exact identity on +-- 'VersionIntervals'. +-- +prop_to_from_intervals :: VersionIntervals -> Bool +prop_to_from_intervals intervals = + toVersionIntervals (fromVersionIntervals intervals) == intervals + +-- | @'fromVersionIntervals' . 'toVersionIntervals'@ is a semantic identity on +-- 'VersionRange', though not necessarily a syntactic identity. +-- +prop_from_to_intervals :: VersionRange -> Bool +prop_from_to_intervals range = + range' `equivalentVersionRange` range + where + range' = fromVersionIntervals (toVersionIntervals range) + +-- | Equivalent of 'prop_from_to_intervals' +-- +prop_from_to_intervals' :: VersionRange -> Version -> Bool +prop_from_to_intervals' range version = + withinRange version range' == withinRange version range + where + range' = fromVersionIntervals (toVersionIntervals range) + +-- | The semantics of 'unionVersionIntervals' is (||). +-- +prop_unionVersionIntervals :: VersionIntervals -> VersionIntervals + -> Version -> Bool +prop_unionVersionIntervals is1 is2 v = + withinIntervals v (unionVersionIntervals is1 is2) + == (withinIntervals v is1 || withinIntervals v is2) + +-- | 'unionVersionIntervals' is idempotent +-- +prop_unionVersionIntervals_idempotent :: VersionIntervals -> Bool +prop_unionVersionIntervals_idempotent = + Laws.idempotent_binary unionVersionIntervals + +-- | 'unionVersionIntervals' is commutative +-- +prop_unionVersionIntervals_commutative :: VersionIntervals + -> VersionIntervals -> Bool +prop_unionVersionIntervals_commutative = + Laws.commutative unionVersionIntervals + +-- | 'unionVersionIntervals' is associative +-- +prop_unionVersionIntervals_associative :: VersionIntervals + -> VersionIntervals + -> VersionIntervals -> Bool +prop_unionVersionIntervals_associative = + Laws.associative unionVersionIntervals + +-- | The semantics of 'intersectVersionIntervals' is (&&). +-- +prop_intersectVersionIntervals :: VersionIntervals -> VersionIntervals + -> Version -> Bool +prop_intersectVersionIntervals is1 is2 v = + withinIntervals v (intersectVersionIntervals is1 is2) + == (withinIntervals v is1 && withinIntervals v is2) + +-- | 'intersectVersionIntervals' is idempotent +-- +prop_intersectVersionIntervals_idempotent :: VersionIntervals -> Bool +prop_intersectVersionIntervals_idempotent = + Laws.idempotent_binary intersectVersionIntervals + +-- | 'intersectVersionIntervals' is commutative +-- +prop_intersectVersionIntervals_commutative :: VersionIntervals + -> VersionIntervals -> Bool +prop_intersectVersionIntervals_commutative = + Laws.commutative intersectVersionIntervals + +-- | 'intersectVersionIntervals' is associative +-- +prop_intersectVersionIntervals_associative :: VersionIntervals + -> VersionIntervals + -> VersionIntervals -> Bool +prop_intersectVersionIntervals_associative = + Laws.associative intersectVersionIntervals + +-- | 'unionVersionIntervals' distributes over 'intersectVersionIntervals' +-- +prop_union_intersect_distributive :: Property +prop_union_intersect_distributive = + Laws.distributive_left unionVersionIntervals intersectVersionIntervals + .&. Laws.distributive_right unionVersionIntervals intersectVersionIntervals + +-- | 'intersectVersionIntervals' distributes over 'unionVersionIntervals' +-- +prop_intersect_union_distributive :: Property +prop_intersect_union_distributive = + Laws.distributive_left intersectVersionIntervals unionVersionIntervals + .&. Laws.distributive_right intersectVersionIntervals unionVersionIntervals + +-- | The semantics of 'invertVersionIntervals' is 'not'. +-- +prop_invertVersionIntervals :: VersionIntervals + -> Version -> Bool +prop_invertVersionIntervals vi v = + withinIntervals v (invertVersionIntervals vi) + == not (withinIntervals v vi) + +-- | Double application of 'invertVersionIntervals' is the identity function +prop_invertVersionIntervalsTwice :: VersionIntervals -> Bool +prop_invertVersionIntervalsTwice vi = + invertVersionIntervals (invertVersionIntervals vi) == vi + + + +-------------------------------- +-- equivalentVersionRange helper + +prop_equivalentVersionRange :: VersionRange -> VersionRange + -> Version -> Property +prop_equivalentVersionRange range range' version = + equivalentVersionRange range range' && range /= range' ==> + withinRange version range == withinRange version range' + +--FIXME: this is wrong. consider version ranges "<=1" and "<1.0" +-- this algorithm cannot distinguish them because there is no version +-- that is included by one that is excluded by the other. +-- Alternatively we must reconsider the semantics of '<' and '<=' +-- in version ranges / version intervals. Perhaps the canonical +-- representation should use just < v and interpret "<= v" as "< v.0". +equivalentVersionRange :: VersionRange -> VersionRange -> Bool +equivalentVersionRange vr1 vr2 = + let allVersionsUsed = nub (sort (versionsUsed vr1 ++ versionsUsed vr2)) + minPoint = mkVersion [0] + maxPoint | null allVersionsUsed = minPoint + | otherwise = alterVersion (++[1]) (maximum allVersionsUsed) + probeVersions = minPoint : maxPoint + : intermediateVersions allVersionsUsed + + in all (\v -> withinRange v vr1 == withinRange v vr2) probeVersions + + where + versionsUsed = foldVersionRange [] (\x->[x]) (\x->[x]) (\x->[x]) (++) (++) + intermediateVersions (v1:v2:vs) = v1 : intermediateVersion v1 v2 + : intermediateVersions (v2:vs) + intermediateVersions vs = vs + +intermediateVersion :: Version -> Version -> Version +intermediateVersion v1 v2 | v1 >= v2 = error "intermediateVersion: v1 >= v2" +intermediateVersion v1 v2 = + mkVersion (intermediateList (versionNumbers v1) (versionNumbers v2)) + where + intermediateList :: [Int] -> [Int] -> [Int] + intermediateList [] (_:_) = [0] + intermediateList (x:xs) (y:ys) + | x < y = x : xs ++ [0] + | otherwise = x : intermediateList xs ys + +prop_intermediateVersion :: Version -> Version -> Property +prop_intermediateVersion v1 v2 = + (v1 /= v2) && not (adjacentVersions v1 v2) ==> + if v1 < v2 + then let v = intermediateVersion v1 v2 + in (v1 < v && v < v2) + else let v = intermediateVersion v2 v1 + in v1 > v && v > v2 + +adjacentVersions :: Version -> Version -> Bool +adjacentVersions ver1 ver2 = v1 ++ [0] == v2 || v2 ++ [0] == v1 + where + v1 = versionNumbers ver1 + v2 = versionNumbers ver2 + +-------------------------------- +-- Parsing and pretty printing +-- + +prop_parse_disp_inv :: VersionRange -> Property +prop_parse_disp_inv vr = + parseDisp vr === (parseDisp vr >>= parseDisp) + where + parseDisp = simpleParse . display + +prop_parsec_disp_inv :: VersionRange -> Property +prop_parsec_disp_inv vr = + parseDisp vr === (parseDisp vr >>= parseDisp) + where + parseDisp = simpleParsec . display + +prop_parse_disp :: VersionRange -> Property +prop_parse_disp vr = counterexample (show (display vr')) $ + fmap s (simpleParse (display vr')) === Just vr' + .&&. + fmap s (simpleParsec (display vr')) === Just vr' + where + -- we have to strip parens, because arbitrary 'VersionRange' may have + -- too little parens constructors. + s = stripParensVersionRange + vr' = s vr + +prop_parse_disp1 :: VersionRange -> Bool +prop_parse_disp1 vr = + fmap stripParens (simpleParse (display vr)) == Just (normaliseVersionRange vr) + where + stripParens :: VersionRange -> VersionRange + stripParens (VersionRangeParens v) = stripParens v + stripParens (UnionVersionRanges v1 v2) = + UnionVersionRanges (stripParens v1) (stripParens v2) + stripParens (IntersectVersionRanges v1 v2) = + IntersectVersionRanges (stripParens v1) (stripParens v2) + stripParens v = v + +prop_parse_disp2 :: VersionRange -> Property +prop_parse_disp2 vr = + let b = fmap (display :: VersionRange -> String) (simpleParse (display vr)) + a = Just (display vr) + in + counterexample ("Expected: " ++ show a) $ + counterexample ("But got: " ++ show b) $ + b == a + +prop_parse_disp3 :: VersionRange -> Property +prop_parse_disp3 vr = + let a = Just (display vr) + b = fmap displayRaw (simpleParse (display vr)) + in + counterexample ("Expected: " ++ show a) $ + counterexample ("But got: " ++ show b) $ + b == a + +prop_parse_disp4 :: VersionRange -> Property +prop_parse_disp4 vr = + let a = Just vr + b = (simpleParse (display vr)) + in + counterexample ("Expected: " ++ show a) $ + counterexample ("But got: " ++ show b) $ + b == a + +prop_parse_disp5 :: VersionRange -> Property +prop_parse_disp5 vr = + let a = Just vr + b = simpleParse (displayRaw vr) + in + counterexample ("Expected: " ++ show a) $ + counterexample ("But got: " ++ show b) $ + b == a + +displayRaw :: VersionRange -> String +displayRaw = + Disp.render + . foldVersionRange' -- precedence: + -- All the same as the usual pretty printer, except for the parens + ( Disp.text "-any") + (\v -> Disp.text "==" <<>> disp v) + (\v -> Disp.char '>' <<>> disp v) + (\v -> Disp.char '<' <<>> disp v) + (\v -> Disp.text ">=" <<>> disp v) + (\v -> Disp.text "<=" <<>> disp v) + (\v _ -> Disp.text "==" <<>> dispWild v) + (\v _ -> Disp.text "^>=" <<>> disp v) + (\r1 r2 -> r1 <+> Disp.text "||" <+> r2) + (\r1 r2 -> r1 <+> Disp.text "&&" <+> r2) + (\r -> Disp.parens r) -- parens + + where + dispWild v = + Disp.hcat (Disp.punctuate (Disp.char '.') + (map Disp.int (versionNumbers v))) + <<>> Disp.text ".*" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/Cabal-2.4.1.0/tests/UnitTests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/Cabal-2.4.1.0/tests/UnitTests.hs 2018-11-26 08:42:54.000000000 +0000 @@ -0,0 +1,96 @@ +{-# LANGUAGE DeriveDataTypeable #-} +module Main + ( main + ) where + +import Test.Tasty +import Test.Tasty.Options + +import Data.Proxy +import Data.Typeable + +import Distribution.Simple.Utils +import Distribution.Verbosity +import Distribution.Compat.Time + +import qualified UnitTests.Distribution.Compat.CreatePipe +import qualified UnitTests.Distribution.Compat.ReadP +import qualified UnitTests.Distribution.Compat.Time +import qualified UnitTests.Distribution.Compat.Graph +import qualified UnitTests.Distribution.Simple.Glob +import qualified UnitTests.Distribution.Simple.Program.Internal +import qualified UnitTests.Distribution.Simple.Utils +import qualified UnitTests.Distribution.System +import qualified UnitTests.Distribution.Utils.Generic +import qualified UnitTests.Distribution.Utils.NubList +import qualified UnitTests.Distribution.Utils.ShortText +import qualified UnitTests.Distribution.Version (versionTests) +import qualified UnitTests.Distribution.SPDX (spdxTests) +import qualified UnitTests.Distribution.Types.GenericPackageDescription + +tests :: Int -> TestTree +tests mtimeChangeCalibrated = + askOption $ \(OptionMtimeChangeDelay mtimeChangeProvided) -> + let mtimeChange = if mtimeChangeProvided /= 0 + then mtimeChangeProvided + else mtimeChangeCalibrated + in + testGroup "Unit Tests" $ + [ testGroup "Distribution.Compat.CreatePipe" + UnitTests.Distribution.Compat.CreatePipe.tests + , testGroup "Distribution.Compat.ReadP" + UnitTests.Distribution.Compat.ReadP.tests + , testGroup "Distribution.Compat.Time" + (UnitTests.Distribution.Compat.Time.tests mtimeChange) + , testGroup "Distribution.Compat.Graph" + UnitTests.Distribution.Compat.Graph.tests + , testGroup "Distribution.Simple.Glob" + UnitTests.Distribution.Simple.Glob.tests + , testGroup "Distribution.Simple.Program.Internal" + UnitTests.Distribution.Simple.Program.Internal.tests + , testGroup "Distribution.Simple.Utils" + UnitTests.Distribution.Simple.Utils.tests + , testGroup "Distribution.Utils.Generic" + UnitTests.Distribution.Utils.Generic.tests + , testGroup "Distribution.Utils.NubList" + UnitTests.Distribution.Utils.NubList.tests + , testGroup "Distribution.Utils.ShortText" + UnitTests.Distribution.Utils.ShortText.tests + , testGroup "Distribution.System" + UnitTests.Distribution.System.tests + , testGroup "Distribution.Types.GenericPackageDescription" + UnitTests.Distribution.Types.GenericPackageDescription.tests + , testGroup "Distribution.Version" + UnitTests.Distribution.Version.versionTests + , testGroup "Distribution.SPDX" + UnitTests.Distribution.SPDX.spdxTests + ] + +extraOptions :: [OptionDescription] +extraOptions = + [ Option (Proxy :: Proxy OptionMtimeChangeDelay) + ] + +newtype OptionMtimeChangeDelay = OptionMtimeChangeDelay Int + deriving Typeable + +instance IsOption OptionMtimeChangeDelay where + defaultValue = OptionMtimeChangeDelay 0 + parseValue = fmap OptionMtimeChangeDelay . safeRead + optionName = return "mtime-change-delay" + optionHelp = return $ "How long to wait before attempting to detect" + ++ "file modification, in microseconds" + +main :: IO () +main = do + (mtimeChange, mtimeChange') <- calibrateMtimeChangeDelay + let toMillis :: Int -> Double + toMillis x = fromIntegral x / 1000.0 + notice normal $ "File modification time resolution calibration completed, " + ++ "maximum delay observed: " + ++ (show . toMillis $ mtimeChange ) ++ " ms. " + ++ "Will be using delay of " ++ (show . toMillis $ mtimeChange') + ++ " for test runs." + defaultMainWithIngredients + (includingOptions extraOptions : defaultIngredients) + (tests mtimeChange') diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/bash-completion/cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/bash-completion/cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/bash-completion/cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/bash-completion/cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,94 +0,0 @@ -# cabal command line completion -# Copyright 2007-2008 "Lennart Kolmodin" -# "Duncan Coutts" -# - -# List cabal targets by type, pass: -# - test-suite for test suites -# - benchmark for benchmarks -# - executable for executables -# - executable|test-suite|benchmark for the three -_cabal_list() -{ - for f in ./*.cabal; do - grep -Ei "^[[:space:]]*($1)[[:space:]]" "$f" | - sed -e "s/.* \([^ ]*\).*/\1/" - done -} - -# List possible targets depending on the command supplied as parameter. The -# ideal option would be to implement this via --list-options on cabal directly. -# This is a temporary workaround. -_cabal_targets() -{ - # If command ($*) contains build, repl, test or bench completes with - # targets of according type. - local comp - for comp in "$@"; do - [ "$comp" == new-build ] && _cabal_list "executable|test-suite|benchmark" && break - [ "$comp" == build ] && _cabal_list "executable|test-suite|benchmark" && break - [ "$comp" == repl ] && _cabal_list "executable|test-suite|benchmark" && break - [ "$comp" == run ] && _cabal_list "executable" && break - [ "$comp" == test ] && _cabal_list "test-suite" && break - [ "$comp" == bench ] && _cabal_list "benchmark" && break - done -} - -# List possible subcommands of a cabal subcommand. -# -# In example "sandbox" is a cabal subcommand that itself has subcommands. Since -# "cabal --list-options" doesn't work in such cases we have to get the list -# using other means. -_cabal_subcommands() -{ - local word - for word in "$@"; do - case "$word" in - sandbox) - # Get list of "cabal sandbox" subcommands from its help message. - "$1" help sandbox | - sed -n '1,/^Subcommands:$/d;/^Flags for sandbox:$/,$d;/^ /d;s/^\(.*\):/\1/p' - break # Terminate for loop. - ;; - esac - done -} - -__cabal_has_doubledash () -{ - local c=1 - # Ignore the last word, because it is replaced anyways. - # This allows expansion for flags on "cabal foo --", - # but does not try to complete after "cabal foo -- ". - local n=$((${#COMP_WORDS[@]} - 1)) - while [ $c -lt $n ]; do - if [ "--" = "${COMP_WORDS[c]}" ]; then - return 0 - fi - ((c++)) - done - return 1 -} - -_cabal() -{ - # no completion past cabal arguments. - __cabal_has_doubledash && return - - # get the word currently being completed - local cur - cur=${COMP_WORDS[$COMP_CWORD]} - - # create a command line to run - local cmd - # copy all words the user has entered - cmd=( ${COMP_WORDS[@]} ) - - # replace the current word with --list-options - cmd[${COMP_CWORD}]="--list-options" - - # the resulting completions should be put into this array - COMPREPLY=( $( compgen -W "$( eval "${cmd[@]}" 2>/dev/null ) $( _cabal_targets "${cmd[@]}" ) $( _cabal_subcommands "${COMP_WORDS[@]}" )" -- "$cur" ) ) -} - -complete -F _cabal -o default cabal diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/bootstrap.sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/bootstrap.sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/bootstrap.sh 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/bootstrap.sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,536 +0,0 @@ -#!/bin/sh -set -e - -# A script to bootstrap cabal-install. - -# It works by downloading and installing the Cabal, zlib and -# HTTP packages. It then installs cabal-install itself. -# It expects to be run inside the cabal-install directory. - -# Install settings, you can override these by setting environment vars. E.g. if -# you don't want profiling and dynamic versions of libraries to be installed in -# addition to vanilla, run 'EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh' - -#VERBOSE -DEFAULT_CONFIGURE_OPTS="--enable-library-profiling --enable-shared" -EXTRA_CONFIGURE_OPTS=${EXTRA_CONFIGURE_OPTS-$DEFAULT_CONFIGURE_OPTS} -#EXTRA_BUILD_OPTS -#EXTRA_INSTALL_OPTS - -die() { - printf "\nError during cabal-install bootstrap:\n%s\n" "$1" >&2 - exit 2 -} - -# programs, you can override these by setting environment vars -GHC="${GHC:-ghc}" -GHC_PKG="${GHC_PKG:-ghc-pkg}" -GHC_VER="$(${GHC} --numeric-version)" -HADDOCK=${HADDOCK:-haddock} -WGET="${WGET:-wget}" -CURL="${CURL:-curl}" -FETCH="${FETCH:-fetch}" -TAR="${TAR:-tar}" -GZIP_PROGRAM="${GZIP_PROGRAM:-gzip}" - -# The variable SCOPE_OF_INSTALLATION can be set on the command line to -# use/install the libaries needed to build cabal-install to a custom package -# database instead of the user or global package database. -# -# Example: -# -# $ ghc-pkg init /my/package/database -# $ SCOPE_OF_INSTALLATION='--package-db=/my/package/database' ./bootstrap.sh -# -# You can also combine SCOPE_OF_INSTALLATION with PREFIX: -# -# $ ghc-pkg init /my/prefix/packages.conf.d -# $ SCOPE_OF_INSTALLATION='--package-db=/my/prefix/packages.conf.d' \ -# PREFIX=/my/prefix ./bootstrap.sh -# -# If you use the --global,--user or --sandbox arguments, this will -# override the SCOPE_OF_INSTALLATION setting and not use the package -# database you pass in the SCOPE_OF_INSTALLATION variable. - -SCOPE_OF_INSTALLATION="${SCOPE_OF_INSTALLATION:---user}" -DEFAULT_PREFIX="${HOME}/.cabal" - -TMPDIR=$(mktemp -d -p /tmp -t cabal-XXXXXXX || mktemp -d -t cabal-XXXXXXX) -export TMPDIR - -# Check for a C compiler, using user-set $CC, if any, first. -for c in $CC gcc clang cc icc; do - $c --version 1>/dev/null 2>&1 && CC=$c && - echo "Using $c for C compiler. If this is not what you want, set CC." >&2 && - break -done - -# None found. -[ -"$CC"- = -""- ] && die 'C compiler not found (or could not be run). - If a C compiler is installed make sure it is on your PATH, or set $CC.' - -# Find the correct linker/linker-wrapper. -# -# See https://github.com/haskell/cabal/pull/4187#issuecomment-269074153. -LINK="$(for link in collect2 ld; do - if [ $($CC -print-prog-name=$link) = $link ] - then - continue - else - $CC -print-prog-name=$link && break - fi - done)" - -# Fall back to "ld"... might work. -[ -$LINK- = -""- ] && LINK=ld - -# And finally, see if we can compile and link something. - echo 'int main(){}' | $CC -xc - -o /dev/null || - die "C compiler and linker could not compile a simple test program. - Please check your toolchain." - -# Warn that were's overriding $LD if set (if you want). -[ -"$LD"- != -""- ] && [ -"$LD"- != -"$LINK"- ] && - echo "Warning: value set in $LD is not the same as C compiler's $LINK." >&2 - echo "Using $LINK instead." >&2 - -# Set LD, overriding environment if necessary. -export LD=$LINK - -# Check we're in the right directory, etc. -grep "cabal-install" ./cabal-install.cabal > /dev/null 2>&1 || - die "The bootstrap.sh script must be run in the cabal-install directory" - -${GHC} --numeric-version > /dev/null 2>&1 || - die "${GHC} not found (or could not be run). - If ghc is installed, make sure it is on your PATH, - or set the GHC and GHC_PKG vars." - -${GHC_PKG} --version > /dev/null 2>&1 || die "${GHC_PKG} not found." - -GHC_PKG_VER="$(${GHC_PKG} --version | cut -d' ' -f 5)" - -[ ${GHC_VER} = ${GHC_PKG_VER} ] || - die "Version mismatch between ${GHC} and ${GHC_PKG}. - If you set the GHC variable then set GHC_PKG too." - -JOBS="-j1" -while [ "$#" -gt 0 ]; do - case "${1}" in - "--user") - SCOPE_OF_INSTALLATION="${1}" - shift;; - "--global") - SCOPE_OF_INSTALLATION="${1}" - DEFAULT_PREFIX="/usr/local" - shift;; - "--sandbox") - shift - # check if there is another argument which doesn't start with -- - if [ "$#" -le 0 ] || [ ! -z $(echo "${1}" | grep "^--") ] - then - SANDBOX=".cabal-sandbox" - else - SANDBOX="${1}" - shift - fi;; - "--no-doc") - NO_DOCUMENTATION=1 - shift;; - "-j"|"--jobs") - shift - # check if there is another argument which doesn't start with - or -- - if [ "$#" -le 0 ] \ - || [ ! -z $(echo "${1}" | grep "^-") ] \ - || [ ! -z $(echo "${1}" | grep "^--") ] - then - JOBS="-j" - else - JOBS="-j${1}" - shift - fi;; - *) - echo "Unknown argument or option, quitting: ${1}" - echo "usage: bootstrap.sh [OPTION]" - echo - echo "options:" - echo " -j/--jobs Number of concurrent workers to use (Default: 1)" - echo " -j without an argument will use all available cores" - echo " --user Install for the local user (default)" - echo " --global Install systemwide (must be run as root)" - echo " --no-doc Do not generate documentation for installed"\ - "packages" - echo " --sandbox Install to a sandbox in the default location"\ - "(.cabal-sandbox)" - echo " --sandbox path Install to a sandbox located at path" - exit;; - esac -done - -# Do not try to use -j with GHC 7.8 or older -case $GHC_VER in - 7.4*|7.6*|7.8*) - JOBS="" - ;; - *) - ;; -esac - -abspath () { case "$1" in /*)printf "%s\n" "$1";; *)printf "%s\n" "$PWD/$1";; - esac; } - -if [ ! -z "$SANDBOX" ] -then # set up variables for sandbox bootstrap - # Make the sandbox path absolute since it will be used from - # different working directories when the dependency packages are - # installed. - SANDBOX=$(abspath "$SANDBOX") - # Get the name of the package database which cabal sandbox would use. - GHC_ARCH=$(ghc --info | - sed -n 's/.*"Target platform".*"\([^-]\+\)-[^-]\+-\([^"]\+\)".*/\1-\2/p') - PACKAGEDB="$SANDBOX/${GHC_ARCH}-ghc-${GHC_VER}-packages.conf.d" - # Assume that if the directory is already there, it is already a - # package database. We will get an error immediately below if it - # isn't. Uses -r to try to be compatible with Solaris, and allow - # symlinks as well as a normal dir/file. - [ ! -r "$PACKAGEDB" ] && ghc-pkg init "$PACKAGEDB" - PREFIX="$SANDBOX" - SCOPE_OF_INSTALLATION="--package-db=$PACKAGEDB" - echo Bootstrapping in sandbox at \'$SANDBOX\'. -fi - -# Check for haddock unless no documentation should be generated. -if [ ! ${NO_DOCUMENTATION} ] -then - ${HADDOCK} --version > /dev/null 2>&1 || die "${HADDOCK} not found." -fi - -PREFIX=${PREFIX:-${DEFAULT_PREFIX}} - -# Versions of the packages to install. -# The version regex says what existing installed versions are ok. -PARSEC_VER="3.1.13.0"; PARSEC_VER_REGEXP="[3]\.[1]\." - # >= 3.1 && < 3.2 -DEEPSEQ_VER="1.4.3.0"; DEEPSEQ_VER_REGEXP="1\.[1-9]\." - # >= 1.1 && < 2 -BINARY_VER="0.8.5.1"; BINARY_VER_REGEXP="[0]\.[78]\." - # >= 0.7 && < 0.9 -TEXT_VER="1.2.3.0"; TEXT_VER_REGEXP="[1]\.[2]\." - # >= 1.2 && < 1.3 -NETWORK_URI_VER="2.6.1.0"; NETWORK_URI_VER_REGEXP="2\.6\.(0\.[2-9]|[1-9])" - # >= 2.6.0.2 && < 2.7 -NETWORK_VER="2.7.0.0"; NETWORK_VER_REGEXP="2\.[0-7]\." - # >= 2.0 && < 2.7 -CABAL_VER="2.4.0.1"; CABAL_VER_REGEXP="2\.4\.[0-9]" - # >= 2.4 && < 2.5 -TRANS_VER="0.5.5.0"; TRANS_VER_REGEXP="0\.[45]\." - # >= 0.2.* && < 0.6 -MTL_VER="2.2.2"; MTL_VER_REGEXP="[2]\." - # >= 2.0 && < 3 -HTTP_VER="4000.3.12"; HTTP_VER_REGEXP="4000\.(2\.([5-9]|1[0-9]|2[0-9])|3\.?)" - # >= 4000.2.5 < 4000.4 -ZLIB_VER="0.6.2"; ZLIB_VER_REGEXP="(0\.5\.([3-9]|1[0-9])|0\.6)" - # >= 0.5.3 && <= 0.7 -TIME_VER="1.9.1" TIME_VER_REGEXP="1\.[1-9]\.?" - # >= 1.1 && < 1.10 -RANDOM_VER="1.1" RANDOM_VER_REGEXP="1\.[01]\.?" - # >= 1 && < 1.2 -STM_VER="2.4.5.0"; STM_VER_REGEXP="2\." - # == 2.* -HASHABLE_VER="1.2.7.0"; HASHABLE_VER_REGEXP="1\." - # 1.* -ASYNC_VER="2.2.1"; ASYNC_VER_REGEXP="2\." - # 2.* -BASE16_BYTESTRING_VER="0.1.1.6"; BASE16_BYTESTRING_VER_REGEXP="0\.1" - # 0.1.* -BASE64_BYTESTRING_VER="1.0.0.1"; BASE64_BYTESTRING_VER_REGEXP="1\." - # >=1.0 -CRYPTOHASH_SHA256_VER="0.11.101.0"; CRYPTOHASH_SHA256_VER_REGEXP="0\.11\.?" - # 0.11.* -RESOLV_VER="0.1.1.1"; RESOLV_VER_REGEXP="0\.1\.[1-9]" - # >= 0.1.1 && < 0.2 -MINTTY_VER="0.1.2"; MINTTY_VER_REGEXP="0\.1\.?" - # 0.1.* -ECHO_VER="0.1.3"; ECHO_VER_REGEXP="0\.1\.[3-9]" - # >= 0.1.3 && < 0.2 -EDIT_DISTANCE_VER="0.2.2.1"; EDIT_DISTANCE_VER_REGEXP="0\.2\.2\.?" - # 0.2.2.* -ED25519_VER="0.0.5.0"; ED25519_VER_REGEXP="0\.0\.?" - # 0.0.* -HACKAGE_SECURITY_VER="0.5.3.0"; HACKAGE_SECURITY_VER_REGEXP="0\.5\.((2\.[2-9]|[3-9])|3)" - # >= 0.5.2 && < 0.6 -TAR_VER="0.5.1.0"; TAR_VER_REGEXP="0\.5\.([1-9]|1[0-9]|0\.[3-9]|0\.1[0-9])\.?" - # >= 0.5.0.3 && < 0.6 -DIGEST_VER="0.0.1.2"; DIGEST_REGEXP="0\.0\.(1\.[2-9]|[2-9]\.?)" - # >= 0.0.1.2 && < 0.1 -ZIP_ARCHIVE_VER="0.3.3"; ZIP_ARCHIVE_REGEXP="0\.3\.[3-9]" - # >= 0.3.3 && < 0.4 - -HACKAGE_URL="https://hackage.haskell.org/package" - -# Haddock fails for hackage-security for GHC <8, -# c.f. https://github.com/well-typed/hackage-security/issues/149 -NO_DOCS_PACKAGES_VER_REGEXP="hackage-security-0\.5\.[0-9]+\.[0-9]+" - -# Cache the list of packages: -echo "Checking installed packages for ghc-${GHC_VER}..." -${GHC_PKG} list --global ${SCOPE_OF_INSTALLATION} > ghc-pkg.list || - die "running '${GHC_PKG} list' failed" - -# Will we need to install this package, or is a suitable version installed? -need_pkg () { - PKG=$1 - VER_MATCH=$2 - if egrep " ${PKG}-${VER_MATCH}" ghc-pkg.list > /dev/null 2>&1 - then - return 1; - else - return 0; - fi - #Note: we cannot use "! grep" here as Solaris 9 /bin/sh doesn't like it. -} - -info_pkg () { - PKG=$1 - VER=$2 - VER_MATCH=$3 - - if need_pkg ${PKG} ${VER_MATCH} - then - if [ -r "${PKG}-${VER}.tar.gz" ] - then - echo "${PKG}-${VER} will be installed from local tarball." - else - echo "${PKG}-${VER} will be downloaded and installed." - fi - else - echo "${PKG} is already installed and the version is ok." - fi -} - -fetch_pkg () { - PKG=$1 - VER=$2 - - URL_PKG=${HACKAGE_URL}/${PKG}-${VER}/${PKG}-${VER}.tar.gz - URL_PKGDESC=${HACKAGE_URL}/${PKG}-${VER}/${PKG}.cabal - if which ${CURL} > /dev/null - then - ${CURL} -L --fail -C - -O ${URL_PKG} || die "Failed to download ${PKG}." - ${CURL} -L --fail -C - -O ${URL_PKGDESC} \ - || die "Failed to download '${PKG}.cabal'." - elif which ${WGET} > /dev/null - then - ${WGET} -c ${URL_PKG} || die "Failed to download ${PKG}." - ${WGET} -c ${URL_PKGDESC} || die "Failed to download '${PKG}.cabal'." - elif which ${FETCH} > /dev/null - then - ${FETCH} ${URL_PKG} || die "Failed to download ${PKG}." - ${FETCH} ${URL_PKGDESC} || die "Failed to download '${PKG}.cabal'." - else - die "Failed to find a downloader. 'curl', 'wget' or 'fetch' is required." - fi - [ -f "${PKG}-${VER}.tar.gz" ] || - die "Downloading ${URL_PKG} did not create ${PKG}-${VER}.tar.gz" - [ -f "${PKG}.cabal" ] || - die "Downloading ${URL_PKGDESC} did not create ${PKG}.cabal" - mv "${PKG}.cabal" "${PKG}.cabal.hackage" -} - -unpack_pkg () { - PKG=$1 - VER=$2 - - rm -rf "${PKG}-${VER}.tar" "${PKG}-${VER}" - ${GZIP_PROGRAM} -d < "${PKG}-${VER}.tar.gz" | ${TAR} -xf - - [ -d "${PKG}-${VER}" ] || die "Failed to unpack ${PKG}-${VER}.tar.gz" - cp "${PKG}.cabal.hackage" "${PKG}-${VER}/${PKG}.cabal" -} - -install_pkg () { - PKG=$1 - VER=$2 - - [ -x Setup ] && ./Setup clean - [ -f Setup ] && rm Setup - - PKG_DBS=$(printf '%s\n' "${SCOPE_OF_INSTALLATION}" \ - | sed -e 's/--package-db/-package-db/' \ - -e 's/--global/-global-package-db/' \ - -e 's/--user/-user-package-db/') - - ${GHC} --make ${JOBS} ${PKG_DBS} Setup -o Setup -XRank2Types -XFlexibleContexts || - die "Compiling the Setup script failed." - - [ -x Setup ] || die "The Setup script does not exist or cannot be run" - - args="${SCOPE_OF_INSTALLATION} --prefix=${PREFIX} --with-compiler=${GHC}" - args="$args --with-hc-pkg=${GHC_PKG} --with-gcc=${CC} --with-ld=${LD}" - args="$args ${EXTRA_CONFIGURE_OPTS} ${VERBOSE}" - - ./Setup configure $args || die "Configuring the ${PKG} package failed." - - ./Setup build ${JOBS} ${EXTRA_BUILD_OPTS} ${VERBOSE} || - die "Building the ${PKG} package failed." - - if [ ! ${NO_DOCUMENTATION} ] - then - if echo "${PKG}-${VER}" | egrep ${NO_DOCS_PACKAGES_VER_REGEXP} \ - > /dev/null 2>&1 - then - echo "Skipping documentation for the ${PKG} package." - else - ./Setup haddock --with-ghc=${GHC} --with-haddock=${HADDOCK} ${VERBOSE} || - die "Documenting the ${PKG} package failed." - fi - fi - - ./Setup install ${EXTRA_INSTALL_OPTS} ${VERBOSE} || - die "Installing the ${PKG} package failed." -} - -do_pkg () { - PKG=$1 - VER=$2 - VER_MATCH=$3 - - if need_pkg ${PKG} ${VER_MATCH} - then - echo - if [ -r "${PKG}-${VER}.tar.gz" ] - then - echo "Using local tarball for ${PKG}-${VER}." - else - echo "Downloading ${PKG}-${VER}..." - fetch_pkg ${PKG} ${VER} - fi - unpack_pkg "${PKG}" "${VER}" - (cd "${PKG}-${VER}" && install_pkg ${PKG} ${VER}) - fi -} - -# If we're bootstrapping from a Git clone, install the local version of Cabal -# instead of downloading one from Hackage. -do_Cabal_pkg () { - if [ -d "../.git" ] - then - if need_pkg "Cabal" ${CABAL_VER_REGEXP} - then - echo "Cabal-${CABAL_VER} will be installed from the local Git clone." - (cd ../Cabal && install_pkg ${CABAL_VER} ${CABAL_VER_REGEXP}) - else - echo "Cabal is already installed and the version is ok." - fi - else - info_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} - do_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} - fi -} - -# Actually do something! - -info_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} -info_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} -info_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} -info_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} -info_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} -info_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} -info_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} -info_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} -info_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} -info_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} -info_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} -info_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} -info_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} -info_pkg "hashable" ${HASHABLE_VER} ${HASHABLE_VER_REGEXP} -info_pkg "async" ${ASYNC_VER} ${ASYNC_VER_REGEXP} -info_pkg "base16-bytestring" ${BASE16_BYTESTRING_VER} \ - ${BASE16_BYTESTRING_VER_REGEXP} -info_pkg "base64-bytestring" ${BASE64_BYTESTRING_VER} \ - ${BASE64_BYTESTRING_VER_REGEXP} -info_pkg "cryptohash-sha256" ${CRYPTOHASH_SHA256_VER} \ - ${CRYPTOHASH_SHA256_VER_REGEXP} -info_pkg "resolv" ${RESOLV_VER} ${RESOLV_VER_REGEXP} -info_pkg "mintty" ${MINTTY_VER} ${MINTTY_VER_REGEXP} -info_pkg "echo" ${ECHO_VER} ${ECHO_VER_REGEXP} -info_pkg "edit-distance" ${EDIT_DISTANCE_VER} ${EDIT_DISTANCE_VER_REGEXP} -info_pkg "ed25519" ${ED25519_VER} ${ED25519_VER_REGEXP} -info_pkg "tar" ${TAR_VER} ${TAR_VER_REGEXP} -info_pkg "digest" ${DIGEST_VER} ${DIGEST_REGEXP} -info_pkg "zip-archive" ${ZIP_ARCHIVE_VER} ${ZIP_ARCHIVE_REGEXP} -info_pkg "hackage-security" ${HACKAGE_SECURITY_VER} \ - ${HACKAGE_SECURITY_VER_REGEXP} - -do_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} -do_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} -do_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} - -# Cabal might depend on these -do_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} -do_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} -do_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} -do_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} - -# Install the Cabal library from the local Git clone if possible. -do_Cabal_pkg - -do_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} -do_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} -do_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} -do_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} -do_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} -do_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} -do_pkg "hashable" ${HASHABLE_VER} ${HASHABLE_VER_REGEXP} -do_pkg "async" ${ASYNC_VER} ${ASYNC_VER_REGEXP} -do_pkg "base16-bytestring" ${BASE16_BYTESTRING_VER} \ - ${BASE16_BYTESTRING_VER_REGEXP} -do_pkg "base64-bytestring" ${BASE64_BYTESTRING_VER} \ - ${BASE64_BYTESTRING_VER_REGEXP} -do_pkg "cryptohash-sha256" ${CRYPTOHASH_SHA256_VER} \ - ${CRYPTOHASH_SHA256_VER_REGEXP} -do_pkg "resolv" ${RESOLV_VER} ${RESOLV_VER_REGEXP} -do_pkg "mintty" ${MINTTY_VER} ${MINTTY_VER_REGEXP} -do_pkg "echo" ${ECHO_VER} ${ECHO_VER_REGEXP} -do_pkg "edit-distance" ${EDIT_DISTANCE_VER} ${EDIT_DISTANCE_VER_REGEXP} -do_pkg "ed25519" ${ED25519_VER} ${ED25519_VER_REGEXP} -do_pkg "tar" ${TAR_VER} ${TAR_VER_REGEXP} -do_pkg "digest" ${DIGEST_VER} ${DIGEST_REGEXP} -do_pkg "zip-archive" ${ZIP_ARCHIVE_VER} ${ZIP_ARCHIVE_REGEXP} -do_pkg "hackage-security" ${HACKAGE_SECURITY_VER} \ - ${HACKAGE_SECURITY_VER_REGEXP} - - -install_pkg "cabal-install" - -# Use the newly built cabal to turn the prefix/package database into a -# legit cabal sandbox. This works because 'cabal sandbox init' will -# reuse the already existing package database and other files if they -# are in the expected locations. -[ ! -z "$SANDBOX" ] && $SANDBOX/bin/cabal sandbox init --sandbox $SANDBOX - -echo -echo "===========================================" -CABAL_BIN="$PREFIX/bin" -if [ -x "$CABAL_BIN/cabal" ] -then - echo "The 'cabal' program has been installed in $CABAL_BIN/" - echo "You should either add $CABAL_BIN to your PATH" - echo "or copy the cabal program to a directory that is on your PATH." - echo - echo "The first thing to do is to get the latest list of packages with:" - echo " cabal update" - echo "This will also create a default config file (if it does not already" - echo "exist) at $HOME/.cabal/config" - echo - echo "By default cabal will install programs to $HOME/.cabal/bin" - echo "If you do not want to add this directory to your PATH then you can" - echo "change the setting in the config file, for example you could use:" - echo "symlink-bindir: $HOME/bin" -else - echo "Sorry, something went wrong." - echo "The 'cabal' executable was not successfully installed into" - echo "$CABAL_BIN/" -fi -echo - -rm ghc-pkg.list diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/cabal-install.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/cabal-install.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/cabal-install.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/cabal-install.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,351 +0,0 @@ -Cabal-Version: >= 1.10 --- NOTE: This file is autogenerated from 'cabal-install.cabal.pp'. --- DO NOT EDIT MANUALLY. --- To update this file, edit 'cabal-install.cabal.pp' and run --- 'make cabal-install-prod' in the project's root folder. -Name: cabal-install -Version: 2.4.0.0 -Synopsis: The command-line interface for Cabal and Hackage. -Description: - The \'cabal\' command-line program simplifies the process of managing - Haskell software by automating the fetching, configuration, compilation - and installation of Haskell libraries and programs. -homepage: http://www.haskell.org/cabal/ -bug-reports: https://github.com/haskell/cabal/issues -License: BSD3 -License-File: LICENSE -Author: Cabal Development Team (see AUTHORS file) -Maintainer: Cabal Development Team -Copyright: 2003-2018, Cabal Development Team -Category: Distribution -Build-type: Custom -Extra-Source-Files: - README.md bash-completion/cabal bootstrap.sh changelog - tests/README.md - - -- Generated with 'make gen-extra-source-files' - -- Do NOT edit this section manually; instead, run the script. - -- BEGIN gen-extra-source-files - tests/IntegrationTests2/build/keep-going/cabal.project - tests/IntegrationTests2/build/keep-going/p/P.hs - tests/IntegrationTests2/build/keep-going/p/p.cabal - tests/IntegrationTests2/build/keep-going/q/Q.hs - tests/IntegrationTests2/build/keep-going/q/q.cabal - tests/IntegrationTests2/build/local-tarball/cabal.project - tests/IntegrationTests2/build/local-tarball/q/Q.hs - tests/IntegrationTests2/build/local-tarball/q/q.cabal - tests/IntegrationTests2/build/setup-custom1/A.hs - tests/IntegrationTests2/build/setup-custom1/Setup.hs - tests/IntegrationTests2/build/setup-custom1/a.cabal - tests/IntegrationTests2/build/setup-custom2/A.hs - tests/IntegrationTests2/build/setup-custom2/Setup.hs - tests/IntegrationTests2/build/setup-custom2/a.cabal - tests/IntegrationTests2/build/setup-simple/A.hs - tests/IntegrationTests2/build/setup-simple/Setup.hs - tests/IntegrationTests2/build/setup-simple/a.cabal - tests/IntegrationTests2/exception/bad-config/cabal.project - tests/IntegrationTests2/exception/build/Main.hs - tests/IntegrationTests2/exception/build/a.cabal - tests/IntegrationTests2/exception/configure/a.cabal - tests/IntegrationTests2/exception/no-pkg/empty.in - tests/IntegrationTests2/exception/no-pkg2/cabal.project - tests/IntegrationTests2/regression/3324/cabal.project - tests/IntegrationTests2/regression/3324/p/P.hs - tests/IntegrationTests2/regression/3324/p/p.cabal - tests/IntegrationTests2/regression/3324/q/Q.hs - tests/IntegrationTests2/regression/3324/q/q.cabal - tests/IntegrationTests2/targets/all-disabled/cabal.project - tests/IntegrationTests2/targets/all-disabled/p.cabal - tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project - tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal - tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal - tests/IntegrationTests2/targets/complex/cabal.project - tests/IntegrationTests2/targets/complex/q/Q.hs - tests/IntegrationTests2/targets/complex/q/q.cabal - tests/IntegrationTests2/targets/empty-pkg/cabal.project - tests/IntegrationTests2/targets/empty-pkg/p.cabal - tests/IntegrationTests2/targets/empty/cabal.project - tests/IntegrationTests2/targets/empty/foo.hs - tests/IntegrationTests2/targets/exes-disabled/cabal.project - tests/IntegrationTests2/targets/exes-disabled/p/p.cabal - tests/IntegrationTests2/targets/exes-disabled/q/q.cabal - tests/IntegrationTests2/targets/lib-only/p.cabal - tests/IntegrationTests2/targets/libs-disabled/cabal.project - tests/IntegrationTests2/targets/libs-disabled/p/p.cabal - tests/IntegrationTests2/targets/libs-disabled/q/q.cabal - tests/IntegrationTests2/targets/multiple-exes/cabal.project - tests/IntegrationTests2/targets/multiple-exes/p.cabal - tests/IntegrationTests2/targets/multiple-libs/cabal.project - tests/IntegrationTests2/targets/multiple-libs/p/p.cabal - tests/IntegrationTests2/targets/multiple-libs/q/q.cabal - tests/IntegrationTests2/targets/multiple-tests/cabal.project - tests/IntegrationTests2/targets/multiple-tests/p.cabal - tests/IntegrationTests2/targets/simple/P.hs - tests/IntegrationTests2/targets/simple/cabal.project - tests/IntegrationTests2/targets/simple/p.cabal - tests/IntegrationTests2/targets/simple/q/QQ.hs - tests/IntegrationTests2/targets/simple/q/q.cabal - tests/IntegrationTests2/targets/test-only/p.cabal - tests/IntegrationTests2/targets/tests-disabled/cabal.project - tests/IntegrationTests2/targets/tests-disabled/p.cabal - tests/IntegrationTests2/targets/tests-disabled/q/q.cabal - tests/IntegrationTests2/targets/variety/cabal.project - tests/IntegrationTests2/targets/variety/p.cabal - -- END gen-extra-source-files - - -- Additional manual extra-source-files: - tests/IntegrationTests2/build/local-tarball/p-0.1.tar.gz - - -source-repository head - type: git - location: https://github.com/haskell/cabal/ - subdir: cabal-install - -Flag native-dns - description: Enable use of the [resolv](https://hackage.haskell.org/package/resolv) & [windns](https://hackage.haskell.org/package/windns) packages for performing DNS lookups - default: True - manual: True - -Flag debug-expensive-assertions - description: Enable expensive assertions for testing or debugging - default: False - manual: True - -Flag debug-conflict-sets - description: Add additional information to ConflictSets - default: False - manual: True - -Flag debug-tracetree - description: Compile in support for tracetree (used to debug the solver) - default: False - manual: True - -custom-setup - setup-depends: - Cabal >= 2.2, - base, - process >= 1.1.0.1 && < 1.7, - filepath >= 1.3 && < 1.5 - -executable cabal - main-is: Main.hs - hs-source-dirs: main - default-language: Haskell2010 - ghc-options: -Wall -fwarn-tabs - if impl(ghc >= 8.0) - ghc-options: -Wcompat - -Wnoncanonical-monad-instances - -Wnoncanonical-monadfail-instances - - ghc-options: -rtsopts -threaded - - -- On AIX, some legacy BSD operations such as flock(2) are provided by libbsd.a - if os(aix) - extra-libraries: bsd - hs-source-dirs: . - other-modules: - Distribution.Client.BuildReports.Anonymous - Distribution.Client.BuildReports.Storage - Distribution.Client.BuildReports.Types - Distribution.Client.BuildReports.Upload - Distribution.Client.Check - Distribution.Client.CmdBench - Distribution.Client.CmdBuild - Distribution.Client.CmdClean - Distribution.Client.CmdConfigure - Distribution.Client.CmdUpdate - Distribution.Client.CmdErrorMessages - Distribution.Client.CmdExec - Distribution.Client.CmdFreeze - Distribution.Client.CmdHaddock - Distribution.Client.CmdInstall - Distribution.Client.CmdRepl - Distribution.Client.CmdRun - Distribution.Client.CmdTest - Distribution.Client.CmdLegacy - Distribution.Client.CmdSdist - Distribution.Client.Compat.Directory - Distribution.Client.Compat.ExecutablePath - Distribution.Client.Compat.FileLock - Distribution.Client.Compat.FilePerms - Distribution.Client.Compat.Prelude - Distribution.Client.Compat.Process - Distribution.Client.Compat.Semaphore - Distribution.Client.Config - Distribution.Client.Configure - Distribution.Client.Dependency - Distribution.Client.Dependency.Types - Distribution.Client.DistDirLayout - Distribution.Client.Exec - Distribution.Client.Fetch - Distribution.Client.FetchUtils - Distribution.Client.FileMonitor - Distribution.Client.Freeze - Distribution.Client.GZipUtils - Distribution.Client.GenBounds - Distribution.Client.Get - Distribution.Client.Glob - Distribution.Client.GlobalFlags - Distribution.Client.Haddock - Distribution.Client.HttpUtils - Distribution.Client.IndexUtils - Distribution.Client.IndexUtils.Timestamp - Distribution.Client.Init - Distribution.Client.Init.Heuristics - Distribution.Client.Init.Licenses - Distribution.Client.Init.Types - Distribution.Client.Install - Distribution.Client.InstallPlan - Distribution.Client.InstallSymlink - Distribution.Client.JobControl - Distribution.Client.List - Distribution.Client.Manpage - Distribution.Client.Nix - Distribution.Client.Outdated - Distribution.Client.PackageHash - Distribution.Client.PackageUtils - Distribution.Client.ParseUtils - Distribution.Client.ProjectBuilding - Distribution.Client.ProjectBuilding.Types - Distribution.Client.ProjectConfig - Distribution.Client.ProjectConfig.Legacy - Distribution.Client.ProjectConfig.Types - Distribution.Client.ProjectOrchestration - Distribution.Client.ProjectPlanOutput - Distribution.Client.ProjectPlanning - Distribution.Client.ProjectPlanning.Types - Distribution.Client.RebuildMonad - Distribution.Client.Reconfigure - Distribution.Client.Run - Distribution.Client.Sandbox - Distribution.Client.Sandbox.Index - Distribution.Client.Sandbox.PackageEnvironment - Distribution.Client.Sandbox.Timestamp - Distribution.Client.Sandbox.Types - Distribution.Client.SavedFlags - Distribution.Client.Security.DNS - Distribution.Client.Security.HTTP - Distribution.Client.Setup - Distribution.Client.SetupWrapper - Distribution.Client.SolverInstallPlan - Distribution.Client.SourceFiles - Distribution.Client.SourceRepoParse - Distribution.Client.SrcDist - Distribution.Client.Store - Distribution.Client.Tar - Distribution.Client.TargetSelector - Distribution.Client.Targets - Distribution.Client.Types - Distribution.Client.Update - Distribution.Client.Upload - Distribution.Client.Utils - Distribution.Client.Utils.Assertion - Distribution.Client.Utils.Json - Distribution.Client.VCS - Distribution.Client.Win32SelfUpgrade - Distribution.Client.World - Distribution.Solver.Compat.Prelude - Distribution.Solver.Modular - Distribution.Solver.Modular.Assignment - Distribution.Solver.Modular.Builder - Distribution.Solver.Modular.Configured - Distribution.Solver.Modular.ConfiguredConversion - Distribution.Solver.Modular.ConflictSet - Distribution.Solver.Modular.Cycles - Distribution.Solver.Modular.Dependency - Distribution.Solver.Modular.Explore - Distribution.Solver.Modular.Flag - Distribution.Solver.Modular.Index - Distribution.Solver.Modular.IndexConversion - Distribution.Solver.Modular.LabeledGraph - Distribution.Solver.Modular.Linking - Distribution.Solver.Modular.Log - Distribution.Solver.Modular.Message - Distribution.Solver.Modular.PSQ - Distribution.Solver.Modular.Package - Distribution.Solver.Modular.Preference - Distribution.Solver.Modular.RetryLog - Distribution.Solver.Modular.Solver - Distribution.Solver.Modular.Tree - Distribution.Solver.Modular.Validate - Distribution.Solver.Modular.Var - Distribution.Solver.Modular.Version - Distribution.Solver.Modular.WeightedPSQ - Distribution.Solver.Types.ComponentDeps - Distribution.Solver.Types.ConstraintSource - Distribution.Solver.Types.DependencyResolver - Distribution.Solver.Types.Flag - Distribution.Solver.Types.InstSolverPackage - Distribution.Solver.Types.InstalledPreference - Distribution.Solver.Types.LabeledPackageConstraint - Distribution.Solver.Types.OptionalStanza - Distribution.Solver.Types.PackageConstraint - Distribution.Solver.Types.PackageFixedDeps - Distribution.Solver.Types.PackageIndex - Distribution.Solver.Types.PackagePath - Distribution.Solver.Types.PackagePreferences - Distribution.Solver.Types.PkgConfigDb - Distribution.Solver.Types.Progress - Distribution.Solver.Types.ResolverPackage - Distribution.Solver.Types.Settings - Distribution.Solver.Types.SolverId - Distribution.Solver.Types.SolverPackage - Distribution.Solver.Types.SourcePackage - Distribution.Solver.Types.Variable - Paths_cabal_install - - build-depends: - async >= 2.0 && < 2.3, - array >= 0.4 && < 0.6, - base >= 4.8 && < 4.13, - base16-bytestring >= 0.1.1 && < 0.2, - binary >= 0.7.3 && < 0.9, - bytestring >= 0.10.6.0 && < 0.11, - Cabal >= 2.4.0.1 && < 2.5, - containers >= 0.5.6.2 && < 0.7, - cryptohash-sha256 >= 0.11 && < 0.12, - deepseq >= 1.4.1.1 && < 1.5, - directory >= 1.2.2.0 && < 1.4, - echo >= 0.1.3 && < 0.2, - edit-distance >= 0.2.2 && < 0.3, - filepath >= 1.4.0.0 && < 1.5, - hashable >= 1.0 && < 1.3, - HTTP >= 4000.1.5 && < 4000.4, - mtl >= 2.0 && < 2.3, - network-uri >= 2.6.0.2 && < 2.7, - network >= 2.6 && < 2.8, - pretty >= 1.1 && < 1.2, - process >= 1.2.3.0 && < 1.7, - random >= 1 && < 1.2, - stm >= 2.0 && < 2.6, - tar >= 0.5.0.3 && < 0.6, - time >= 1.5.0.1 && < 1.10, - zlib >= 0.5.3 && < 0.7, - hackage-security >= 0.5.2.2 && < 0.6, - text >= 1.2.3 && < 1.3, - zip-archive >= 0.3.2.5 && < 0.4, - parsec >= 3.1.13.0 && < 3.2 - - if flag(native-dns) - if os(windows) - build-depends: windns >= 0.1.0 && < 0.2 - else - build-depends: resolv >= 0.1.1 && < 0.2 - - if os(windows) - build-depends: Win32 >= 2 && < 3 - else - build-depends: unix >= 2.5 && < 2.8 - - if flag(debug-expensive-assertions) - cpp-options: -DDEBUG_EXPENSIVE_ASSERTIONS - - if flag(debug-conflict-sets) - cpp-options: -DDEBUG_CONFLICT_SETS - build-depends: base >= 4.8 - - if flag(debug-tracetree) - cpp-options: -DDEBUG_TRACETREE - build-depends: tracetree >= 0.1 && < 0.2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/changelog cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/changelog --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/changelog 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/changelog 1970-01-01 00:00:00.000000000 +0000 @@ -1,526 +0,0 @@ --*-change-log-*- - -2.4.0.1 (current bugfix version) - * 'new-install' now warns when failing to symlink an exe (#5602) - * Extend 'cabal init' support for 'cabal-version' selection (#5567) - * 'new-sdist' now generates tarballs with file modification - times from a date in 2001. Using the Unix epoch caused - problems on Windows. (#5596) - * Register monolithic packages installed into the store due to a - build-tool dependency if they also happen to contain a buildable - public lib. (#5379,#5604) - -2.4.0.0 Mikhail Glushenkov September 2018 - * Bugfix: "cabal new-build --ghc-option '--bogus' --ghc-option '-O1'" - no longer ignores all arguments except the last one (#5512). - * Add the following option aliases for '-dir'-suffixed options: - 'storedir', 'logsdir', 'packagedir', 'sourcedir', 'outputdir' (#5484). - * 'new-run' now allows the user to run scripts that use a special block - to define their requirements (as in the executable stanza) in place - of a target. This also allows the use of 'cabal' as an interpreter - in a shebang line. - * Add aliases for the "new-" commands that won't change when they - lose their prefix or are eventually replaced by a third UI - paradigm in the future. (#5429) - * 'outdated' now accepts '--project-file FILE', which will look for bounds - from the new-style freeze file named FILE.freeze. This is only - available when `--new-freeze-file` has been passed. - * 'new-repl' now accepts a '--build-depends' flag which accepts the - same syntax as is used in .cabal files to add additional dependencies - to the environment when developing in the REPL. It is now usable outside - of projects. (#5425, #5454) - * 'new-build' now treats Haddock errors non-fatally. In addition, - it attempts to avoid trying to generate Haddocks when there is - nothing to generate them from. (#5232, #5459) - * 'new-run', 'new-test', and 'new-bench' now will attempt to resolve - ambiguous selectors by filtering out selectors that would be invalid. - (#4679, #5461) - * 'new-install' now supports installing libraries and local - components. (#5399) - * Drop support for GHC 7.4, since it is out of our support window - (and has been for over a year!). - * 'new-update' now works outside of projects. (#5096) - * Extend `plan.json` with `pkg-src` provenance information. (#5487) - * Add 'new-sdist' command (#5389). Creates stable archives based on - cabal projects in '.zip' and '.tar.gz' formats. - * Add '--repl-options' flag to 'cabal repl' and 'cabal new-repl' - commands. Passes its arguments to the invoked repl, bypassing the - new-build's cached configurations. This assures they don't trigger - useless rebuilds and are always applied within the repl. (#4247, #5287) - * Add 'v1-' prefixes for the commands that will be replaced in the - new-build universe, in preparation for it becoming the default. - (#5358) - * 'outdated' accepts '--v1-freeze-file' and '--v2-freeze-file' - in the same spirit. - * Completed the 'new-clean' command (#5357). The functionality is - equivalent to old-style clean, but for nix-style builds. - * Ensure that each package selected for a build-depends dependency - contains a library (#5304). - * Support packages from local tarballs in the cabal.project file. - * Default changelog generated by 'cabal init' is now named - 'CHANGELOG.md' (#5441). - * Align output of 'new-build' command phases (#4040). - * Add suport for specifying remote VCS dependencies via new - 'source-repository-package' stanzas in 'cabal.project' files - (#5351). - -2.2.0.0 Mikhail Glushenkov March 2018 - * '--with-PROG' and '--PROG-options' are applied to all packages - and not local packages only (#5019). - * Completed the 'new-update' command (#4809), which respects nix-style - cabal.project(.local) files and allows to update from - multiple repositories when using overlays. - * Completed the 'new-run' command (#4477). The functionality is the - same of the old 'run' command but using nix-style builds. - Additionally, it can run executables across packages in a project. - Tests and benchmarks are also treated as executables, providing a - quick way to pass them arguments. - * Completed the 'new-bench' command (#3638). Same as above. - * Completed the 'new-exec' command (#3638). Same as above. - * Added a preliminary 'new-install' command (#4558, nonlocal exes - part) which allows to quickly install executables from Hackage. - * Set symlink-bindir (used by new-install) to .cabal/bin by default on - .cabal/config initialization (#5188). - * 'cabal update' now supports '--index-state' which can be used to - roll back the index to an earlier state. - * '--allow-{newer,older}' syntax has been enhanced. Dependency - relaxation can be now limited to a specific release of a package, - plus there's a new syntax for relaxing only caret-style (i.e. '^>=') - dependencies (#4575, #4669). - * New config file field: 'cxx-options' to specify which options to be - passed to the compiler when compiling C++ sources specified by the - 'cxx-sources' field. (#3700) - * New config file field: 'cxx-sources' to specify C++ files to be - compiled separately from C source files. Useful in conjunction with the - 'cxx-options' flag to pass different compiler options to C and C++ - source files. (#3700) - * Use [lfxtb] letters to differentiate component kind instead of - opaque "c" in dist-dir layout. - * 'cabal configure' now supports '--enable-static', which can be - used to build static libaries with GHC via GHC's `-staticlib` - flag. - * 'cabal user-config now supports '--augment' which can append - additional lines to a new or updated cabal config file. - * Added support for '--enable-tests' and '--enable-benchmarks' to - 'cabal fetch' (#4948). - * Misspelled package-names on CLI will no longer be silently - case-corrected (#4778). - * 'cabal new-configure' now backs up the old 'cabal.project.local' - file if it exists (#4460). - * On macOS, `new-build` will now place dynamic libraries into - `store/lib` and aggressively shorten their names in an effort to - stay within the load command size limits of macOSs mach-o linker. - * 'new-build' now checks for the existence of executables for - build-tools and build-tool-depends dependencies in the solver - (#4884). - * Fixed a spurious warning telling the user to run 'cabal update' - when it wasn't necessary (#4444). - * Packages installed in sandboxes via 'add-source' now have - their timestamps updated correctly and so will not be reinstalled - unncecessarily if the main install command fails (#1375). - * Add Windows device path support for copyFile, renameFile. Allows cabal - new-build to use temporary store path of up to 32k length - (#3972, #4914, #4515). - * When a flag value is specified multiple times on the command - line, the last one is now preferred, so e.g. '-f+dev -f-dev' is - now equivalent to '-f-dev' (#4452). - * Removed support for building cabal-install with GHC < 7.10 (#4870). - * New 'package *' section in 'cabal.project' files that applies - options to all packages, not just those local to the project. - * Paths_ autogen modules now compile when `RebindableSyntax` or - `OverloadedStrings` is used in `default-extensions`. - [stack#3789](https://github.com/commercialhaskell/stack/issues/3789) - * getDataDir` and other `Paths_autogen` functions now work correctly - when compiling a custom `Setup.hs` script using `new-build` (#5164). - -2.0.0.1 Mikhail Glushenkov December 2017 - * Support for GHC's numeric -g debug levels (#4673). - * Demoted 'scope' field version check to a warning (#4714). - * Fixed verbosity flags getting removed before being passed to - 'printPlan' (#4724). - * Added a '--store-dir' option that can be used to configure the - location of the build global build store (#4623). - * Turned `allow-{newer,older}` in `cabal.project` files into an - accumulating field to match CLI flag semantics (#4679). - * Improve success message when `cabal upload`ing documentation - (#4777). - * Documentation fixes. - -2.0.0.0 Mikhail Glushenkov August 2017 - * See http://coldwa.st/e/blog/2017-09-09-Cabal-2-0.html - for more detailed release notes. - * Removed the '--root-cmd' parameter of the 'install' command - (#3356). - * Deprecated 'cabal install --global' (#3356). - * Changed 'cabal upload' to upload a package candidate by default - (#3419). Same applies to uploading documentation. - * Added a new 'cabal upload' flag '--publish' for publishing a - package on Hackage instead of uploading a candidate (#3419). - * Added optional solver output visualisation support via the - tracetree package. Mainly intended for debugging (#3410). - * Removed the '--check' option from 'cabal upload' - (#1823). It was replaced by package candidates. - * Fixed various behaviour differences between network transports - (#3429). - * The bootstrap script now works correctly when run from a Git - clone (#3439). - * Removed the top-down solver (#3598). - * The '-v/--verbosity' option no longer affects GHC verbosity - (except in the case of '-v0'). Use '--ghc-options=-v' to enable - verbose GHC output (#3540, #3671). - * Changed the default logfile template from - '.../$pkgid.log' to '.../$compiler/$libname.log' (#3807). - * Added a new command, 'cabal reconfigure', which re-runs 'configure' - with the most recently used flags (#2214). - * Added the '--index-state' flag for requesting a specific - version of the package index (#3893, #4115). - * Support for building Backpack packages. See - https://github.com/ezyang/ghc-proposals/blob/backpack/proposals/0000-backpack.rst - for more details. - * Support the Nix package manager (#3651). - * Made the 'template-haskell' package non-upgradable again (#4185). - * Fixed password echoing on MinTTY (#4128). - * Added a new solver flag, '--allow-boot-library-installs', that allows - any package to be installed or upgraded (#4209). - * New 'cabal-install' command: 'outdated', for listing outdated - version bounds in a .cabal file or a freeze file (#4207). - * Added qualified constraints for setup dependencies. For example, - --constraint="setup.bar == 1.0" constrains all setup dependencies on - bar, and --constraint="foo:setup.bar == 1.0" constrains foo's setup - dependency on bar (part of #3502). - * Non-qualified constraints, such as --constraint="bar == 1.0", now - only apply to top-level dependencies. They don't constrain setup or - build-tool dependencies. The new syntax --constraint="any.bar == 1.0" - constrains all uses of bar. - * Added a technical preview version of the 'cabal doctest' command - (#4480). - -1.24.0.2 Mikhail Glushenkov December 2016 - * Adapted to the revert of a PVP-noncompliant API change in - Cabal 1.24.2.0 (#4123). - * Bumped the directory upper bound to < 1.4 (#4158). - -1.24.0.1 Ryan Thomas October 2016 - * Fixed issue with passing '--enable-profiling' when invoking - Setup scripts built with older versions of Cabal (#3873). - * Fixed various behaviour differences between network transports - (#3429). - * Updated to depend on the latest hackage-security that fixes - various issues on Windows. - * Fixed 'new-build' to exit with a non-zero exit code on failure - (#3506). - * Store secure repo index data as 01-index.* (#3862). - * Added new hackage-security root keys for distribution with - cabal-install. - * Fix an issue where 'cabal install' sometimes had to be run twice - for packages with build-type: Custom and a custom-setup stanza - (#3723). - * 'cabal sdist' no longer ignores '--builddir' when the package's - build-type is Custom (#3794). - -1.24.0.0 Ryan Thomas March 2016 - * If there are multiple remote repos, 'cabal update' now updates - them in parallel (#2503). - * New 'cabal upload' option '-P'/'--password-command' for reading - Hackage password from arbitrary program output (#2506). - * Better warning for 'cabal run' (#2510). - * 'cabal init' now warns if the chosen package name is already - registered in the source package index (#2436). - * New 'cabal install' option: '--offline' (#2578). - * Accept 'builddir' field in cabal.config (#2484) - * Read 'builddir' option from 'CABAL_BUILDDIR' environment variable. - * Remote repos may now be configured to use https URLs. This uses - either curl or wget or, on Windows, PowerShell, under the hood (#2687). - * Install target URLs can now use https e.g. 'cabal install - https://example.com/foo-1.0.tar.gz'. - * Automatically use https for cabal upload for the main - hackage.haskell.org (other repos will use whatever they are - configured to use). - * Support for dependencies of custom Setup.hs scripts - (see http://www.well-typed.com/blog/2015/07/cabal-setup-deps/). - * 'cabal' program itself now can be used as an external setup - method. This fixes an issue when Cabal version mismatch caused - unnecessary reconfigures (#2633). - * Improved error message for unsatisfiable package constraints - (#2727). - * Fixed a space leak in 'cabal update' (#2826). - * 'cabal exec' and 'sandbox hc-pkg' now use the configured - compiler (#2859). - * New 'cabal haddock' option: '--for-hackage' (#2852). - * Added a warning when the solver cannot find a dependency (#2853). - * New 'cabal upload' option: '--doc': upload documentation to - hackage (#2890). - * Improved error handling for 'sandbox delete-source' (#2943). - * Solver support for extension and language flavours (#2873). - * Support for secure repos using hackage-security (#2983). - * Added a log file message similar to one printed by 'make' when - building in another directory (#2642). - * Added new subcommand 'init' to 'cabal user-config'. This - subcommand creates a cabal configuration file in either the - default location or as specified by --config-file (#2553). - * The man page for 'cabal-install' is now automatically generated - (#2877). - * The '--allow-newer' option now works as expected when specified - multiple times (#2588). - * New config file field: 'extra-framework-dirs' (extra locations - to find OS X frameworks in). Can be also specified as an argument - for 'install' and 'configure' commands (#3158). - * It's now possible to limit the scope of '--allow-newer' to - single packages in the install plan (#2756). - * Full '--allow-newer' syntax is now supported in the config file - (that is, 'allow-newer: base, ghc-prim, some-package:vector') - (#3171). - * Improved performance of '--reorder-goals' (#3208). - * Fixed space leaks in modular solver (#2916, #2914). - * Made the solver aware of pkg-config constraints (#3023). - * Added a new command: 'gen-bounds' (#3223). See - http://softwaresimply.blogspot.se/2015/08/cabal-gen-bounds-easy-generation-of.html. - * Tech preview of new nix-style isolated project-based builds. - Currently provides the commands (new-)build/repl/configure. - -1.22.9.0 Ryan Thomas March 2016 - * Include Cabal-1.22.8.0 - -1.22.8.0 Ryan Thomas February 2016 - * Only Custom setup scripts should be compiled with '-i -i.'. - * installedCabalVersion: Don't special-case Cabal anymore. - * Bump the HTTP upper bound. See #3069. - -1.22.7.0 Ryan Thomas December 2015 - * Remove GZipUtils tests - * maybeDecompress: bail on all errors at the beginning of the - stream with zlib < 0.6 - * Correct maybeDecompress - -1.22.6.0 Ryan Thomas June 2015 - * A fix for @ezyang's fix for #2502. (Mikhail Glushenkov) - -1.22.5.0 Ryan Thomas June 2015 - * Reduce temporary directory name length, fixes #2502. (Edward Z. Yang) - -1.22.4.0 Ryan Thomas May 2015 - * Force cabal upload to always use digest auth and never basic auth. - * Add dependency-graph information to `printPlan` output - * bootstrap.sh: fixes linker matching to avoid cases where tested - linker names appear unexpectedly in compiler output (fixes #2542) - -1.22.3.0 Ryan Thomas April 2015 - * Fix bash completion for sandbox subcommands - Fixes #2513 - (Mikhail Glushenkov) - * filterConfigureFlags: filter more flags (Mikhail Glushenkov) - -1.22.2.0 Ryan Thomas March 2015 - * Don't pass '--{en,dis}able-profiling' to old setup exes. - * -Wall police - * Allow filepath 1.4 - -1.22.0.0 Johan Tibell January 2015 - * New command: user-config (#2159). - * Implement 'cabal repl --only' (#2016). - * Fix an issue when 'cabal repl' was doing unnecessary compilation - (#1715). - * Prompt the user to specify source directory in 'cabal init' - (#1989). - * Remove the self-upgrade check (#2090). - * Don't redownload already downloaded packages when bootstrapping - (#2133). - * Support sandboxes in 'bootstrap.sh' (#2137). - * Install profiling and shared libs by default in 'bootstrap.sh' - (#2009). - -1.20.2.0 Ryan Thomas February 2016 - * Only Custom setup scripts should be compiled with '-i -i.'. - * installedCabalVersion: Don't special-case Cabal anymore. - -1.20.1.0 Ryan Thomas May 2015 - * Force cabal upload to always use digest auth and never basic auth. - * bootstrap.sh: install network-uri before HTTP - -1.20.0.5 Johan Tibell December 2014 - * Support random 1.1. - * Fix bootstrap script after network package split. - * Support network-2.6 in test suite. - -1.20.0.3 Johan Tibell June 2014 - * Don't attempt to rename dist if it is already named correctly - * Treat all flags of a package as interdependent. - * Allow template-haskell to be upgradable again - -1.20.0.2 Johan Tibell May 2014 - * Increase max-backjumps to 2000. - * Fix solver bug which led to missed install plans. - * Fix streaming test output. - * Tweak solver heuristics to avoid reinstalls. - -1.20.0.1 Johan Tibell May 2014 - * Fix cabal repl search path bug on Windows - * Include OS and arch in cabal-install user agent - * Revert --constraint flag behavior in configure to 1.18 behavior - -1.20.0.0 Johan Tibell April 2014 - * Build only selected executables - * Add -j flag to build/test/bench/run - * Improve install log file - * Don't symlink executables when in a sandbox - * Add --package-db flag to 'list' and 'info' - * Make upload more efficient - * Add --require-sandbox option - * Add experimental Cabal file format command - * Add haddock section to config file - * Add --main-is flag to init - -1.18.2.0 Ryan Thomas February 2016 - * Only Custom setup scripts should be compiled with '-i -i.'. - * installedCabalVersion: Don't special-case Cabal anymore. - -1.18.1.0 Ryan Thomas May 2015 - * Force cabal upload to always use digest auth and never basic auth. - * Merge pull request #2367 from juhp/patch-2 - * Fix bootstrap.sh by bumping HTTP to 4000.2.16.1 - -1.18.0.7 Johan Tibell December 2014 - * Support random 1.1. - * Fix bootstrap script after network package split. - * Support network-2.6 in test suite. - -1.18.0.5 Johan Tibell July 2014 - * Make solver flag resolution more conservative. - -1.18.0.4 Johan Tibell May 2014 - * Increase max-backjumps to 2000. - * Fix solver bug which led to missed install plans. - * Tweak solver heuristics to avoid reinstalls. - -0.14.0 Andres Loeh April 2012 - * Works with ghc-7.4 - * Completely new modular dependency solver (default in most cases) - * Some tweaks to old topdown dependency solver - * Install plans are now checked for reinstalls that break packages - * Flags --constraint and --preference work for nonexisting packages - * New constraint forms for source and installed packages - * New constraint form for package-specific use flags - * New constraint form for package-specific stanza flags - * Test suite dependencies are pulled in on demand - * No longer install packages on --enable-tests when tests fail - * New "cabal bench" command - * Various "cabal init" tweaks - -0.10.0 Duncan Coutts February 2011 - * New package targets: local dirs, local and remote tarballs - * Initial support for a "world" package target - * Partial fix for situation where user packages mask global ones - * Removed cabal upgrade, new --upgrade-dependencies flag - * New cabal install --only-dependencies flag - * New cabal fetch --no-dependencies and --dry-run flags - * Improved output for cabal info - * Simpler and faster bash command line completion - * Fix for broken proxies that decompress wrongly - * Fix for cabal unpack to preserve executable permissions - * Adjusted the output for the -v verbosity level in a few places - -0.8.2 Duncan Coutts March 2010 - * Fix for cabal update on Windows - * On windows switch to per-user installs (rather than global) - * Handle intra-package dependencies in dependency planning - * Minor tweaks to cabal init feature - * Fix various -Wall warnings - * Fix for cabal sdist --snapshot - -0.8.0 Duncan Coutts Dec 2009 - * Works with ghc-6.12 - * New "cabal init" command for making initial project .cabal file - * New feature to maintain an index of haddock documentation - -0.6.4 Duncan Coutts Nov 2009 - * Improve the algorithm for selecting the base package version - * Hackage errors now reported by "cabal upload [--check]" - * Improved format of messages from "cabal check" - * Config file can now be selected by an env var - * Updated tar reading/writing code - * Improve instructions in the README and bootstrap output - * Fix bootstrap.sh on Solaris 9 - * Fix bootstrap for systems where network uses parsec 3 - * Fix building with ghc-6.6 - -0.6.2 Duncan Coutts Feb 2009 - * The upgrade command has been disabled in this release - * The configure and install commands now have consistent behaviour - * Reduce the tendancy to re-install already existing packages - * The --constraint= flag now works for the install command - * New --preference= flag for soft constraints / version preferences - * Improved bootstrap.sh script, smarter and better error checking - * New cabal info command to display detailed info on packages - * New cabal unpack command to download and untar a package - * HTTP-4000 package required, should fix bugs with http proxies - * Now works with authenticated proxies. - * On Windows can now override the proxy setting using an env var - * Fix compatibility with config files generated by older versions - * Warn if the hackage package list is very old - * More helpful --help output, mention config file and examples - * Better documentation in ~/.cabal/config file - * Improved command line interface for logging and build reporting - * Minor improvements to some messages - -0.6.0 Duncan Coutts Oct 2008 - * Constraint solver can now cope with base 3 and base 4 - * Allow use of package version preferences from hackage index - * More detailed output from cabal install --dry-run -v - * Improved bootstrap.sh - -0.5.2 Duncan Coutts Aug 2008 - * Suport building haddock documentaion - * Self-reinstall now works on Windows - * Allow adding symlinks to excutables into a separate bindir - * New self-documenting config file - * New install --reinstall flag - * More helpful status messages in a couple places - * Upload failures now report full text error message from the server - * Support for local package repositories - * New build logging and reporting - * New command to upload build reports to (a compatible) server - * Allow tilde in hackage server URIs - * Internal code improvements - * Many other minor improvements and bug fixes - -0.5.1 Duncan Coutts June 2008 - * Restore minimal hugs support in dependency resolver - * Fix for disabled http proxies on Windows - * Revert to global installs on Windows by default - -0.5.0 Duncan Coutts June 2008 - * New package dependency resolver, solving diamond dep problem - * Integrate cabal-setup functionality - * Integrate cabal-upload functionality - * New cabal update and check commands - * Improved behavior for install and upgrade commands - * Full Windows support - * New command line handling - * Bash command line completion - * Allow case insensitive package names on command line - * New --dry-run flag for install, upgrade and fetch commands - * New --root-cmd flag to allow installing as root - * New --cabal-lib-version flag to select different Cabal lib versions - * Support for HTTP proxies - * Improved cabal list output - * Build other non-dependent packages even when some fail - * Report a summary of all build failures at the end - * Partial support for hugs - * Partial implementation of build reporting and logging - * More consistent logging and verbosity - * Significant internal code restructuring - -0.4 Duncan Coutts Oct 2007 - * Renamed executable from 'cabal-install' to 'cabal' - * Partial Windows compatibility - * Do per-user installs by default - * cabal install now installs the package in the current directory - * Allow multiple remote servers - * Use zlib lib and internal tar code and rather than external tar - * Reorganised configuration files - * Significant code restructuring - * Cope with packages with conditional dependencies - -0.3 and older versions by Lemmih, Paolo Martini and others 2006-2007 - * Switch from smart-server, dumb-client model to the reverse - * New .tar.gz based index format - * New remote and local package archive format diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Anonymous.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Anonymous.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Anonymous.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Anonymous.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,317 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Reporting --- Copyright : (c) David Waern 2008 --- License : BSD-like --- --- Maintainer : david.waern@gmail.com --- Stability : experimental --- Portability : portable --- --- Anonymous build report data structure, printing and parsing --- ------------------------------------------------------------------------------ -module Distribution.Client.BuildReports.Anonymous ( - BuildReport(..), - InstallOutcome(..), - Outcome(..), - - -- * Constructing and writing reports - new, - - -- * parsing and pretty printing - parse, - parseList, - show, --- showList, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude hiding (show) - -import qualified Distribution.Client.Types as BR - ( BuildOutcome, BuildFailure(..), BuildResult(..) - , DocsResult(..), TestsResult(..) ) -import Distribution.Client.Utils - ( mergeBy, MergeResult(..) ) -import qualified Paths_cabal_install (version) - -import Distribution.Package - ( PackageIdentifier(..), mkPackageName ) -import Distribution.PackageDescription - ( FlagName, mkFlagName, unFlagName - , FlagAssignment, mkFlagAssignment, unFlagAssignment ) -import Distribution.Version - ( mkVersion' ) -import Distribution.System - ( OS, Arch ) -import Distribution.Compiler - ( CompilerId(..) ) -import qualified Distribution.Text as Text - ( Text(disp, parse) ) -import Distribution.ParseUtils - ( FieldDescr(..), ParseResult(..), Field(..) - , simpleField, listField, ppFields, readFields - , syntaxError, locatedErrorMsg ) -import Distribution.Simple.Utils - ( comparing ) - -import qualified Distribution.Compat.ReadP as Parse - ( ReadP, pfail, munch1, skipSpaces ) -import qualified Text.PrettyPrint as Disp - ( Doc, render, char, text ) -import Text.PrettyPrint - ( (<+>) ) - -import Data.Char as Char - ( isAlpha, isAlphaNum ) - -data BuildReport - = BuildReport { - -- | The package this build report is about - package :: PackageIdentifier, - - -- | The OS and Arch the package was built on - os :: OS, - arch :: Arch, - - -- | The Haskell compiler (and hopefully version) used - compiler :: CompilerId, - - -- | The uploading client, ie cabal-install-x.y.z - client :: PackageIdentifier, - - -- | Which configurations flags we used - flagAssignment :: FlagAssignment, - - -- | Which dependent packages we were using exactly - dependencies :: [PackageIdentifier], - - -- | Did installing work ok? - installOutcome :: InstallOutcome, - - -- Which version of the Cabal library was used to compile the Setup.hs --- cabalVersion :: Version, - - -- Which build tools we were using (with versions) --- tools :: [PackageIdentifier], - - -- | Configure outcome, did configure work ok? - docsOutcome :: Outcome, - - -- | Configure outcome, did configure work ok? - testsOutcome :: Outcome - } - -data InstallOutcome - = PlanningFailed - | DependencyFailed PackageIdentifier - | DownloadFailed - | UnpackFailed - | SetupFailed - | ConfigureFailed - | BuildFailed - | TestsFailed - | InstallFailed - | InstallOk - deriving Eq - -data Outcome = NotTried | Failed | Ok - deriving Eq - -new :: OS -> Arch -> CompilerId -> PackageIdentifier -> FlagAssignment - -> [PackageIdentifier] -> BR.BuildOutcome -> BuildReport -new os' arch' comp pkgid flags deps result = - BuildReport { - package = pkgid, - os = os', - arch = arch', - compiler = comp, - client = cabalInstallID, - flagAssignment = flags, - dependencies = deps, - installOutcome = convertInstallOutcome, --- cabalVersion = undefined - docsOutcome = convertDocsOutcome, - testsOutcome = convertTestsOutcome - } - where - convertInstallOutcome = case result of - Left BR.PlanningFailed -> PlanningFailed - Left (BR.DependentFailed p) -> DependencyFailed p - Left (BR.DownloadFailed _) -> DownloadFailed - Left (BR.UnpackFailed _) -> UnpackFailed - Left (BR.ConfigureFailed _) -> ConfigureFailed - Left (BR.BuildFailed _) -> BuildFailed - Left (BR.TestsFailed _) -> TestsFailed - Left (BR.InstallFailed _) -> InstallFailed - Right (BR.BuildResult _ _ _) -> InstallOk - convertDocsOutcome = case result of - Left _ -> NotTried - Right (BR.BuildResult BR.DocsNotTried _ _) -> NotTried - Right (BR.BuildResult BR.DocsFailed _ _) -> Failed - Right (BR.BuildResult BR.DocsOk _ _) -> Ok - convertTestsOutcome = case result of - Left (BR.TestsFailed _) -> Failed - Left _ -> NotTried - Right (BR.BuildResult _ BR.TestsNotTried _) -> NotTried - Right (BR.BuildResult _ BR.TestsOk _) -> Ok - -cabalInstallID :: PackageIdentifier -cabalInstallID = - PackageIdentifier (mkPackageName "cabal-install") - (mkVersion' Paths_cabal_install.version) - --- ------------------------------------------------------------ --- * External format --- ------------------------------------------------------------ - -initialBuildReport :: BuildReport -initialBuildReport = BuildReport { - package = requiredField "package", - os = requiredField "os", - arch = requiredField "arch", - compiler = requiredField "compiler", - client = requiredField "client", - flagAssignment = mempty, - dependencies = [], - installOutcome = requiredField "install-outcome", --- cabalVersion = Nothing, --- tools = [], - docsOutcome = NotTried, - testsOutcome = NotTried - } - where - requiredField fname = error ("required field: " ++ fname) - --- ----------------------------------------------------------------------------- --- Parsing - -parse :: String -> Either String BuildReport -parse s = case parseFields s of - ParseFailed perror -> Left msg where (_, msg) = locatedErrorMsg perror - ParseOk _ report -> Right report - -parseFields :: String -> ParseResult BuildReport -parseFields input = do - fields <- traverse extractField =<< readFields input - let merged = mergeBy (\desc (_,name,_) -> compare (fieldName desc) name) - sortedFieldDescrs - (sortBy (comparing (\(_,name,_) -> name)) fields) - checkMerged initialBuildReport merged - - where - extractField :: Field -> ParseResult (Int, String, String) - extractField (F line name value) = return (line, name, value) - extractField (Section line _ _ _) = syntaxError line "Unrecognized stanza" - extractField (IfBlock line _ _ _) = syntaxError line "Unrecognized stanza" - - checkMerged report [] = return report - checkMerged report (merged:remaining) = case merged of - InBoth fieldDescr (line, _name, value) -> do - report' <- fieldSet fieldDescr line value report - checkMerged report' remaining - OnlyInRight (line, name, _) -> - syntaxError line ("Unrecognized field " ++ name) - OnlyInLeft fieldDescr -> - fail ("Missing field " ++ fieldName fieldDescr) - -parseList :: String -> [BuildReport] -parseList str = - [ report | Right report <- map parse (split str) ] - - where - split :: String -> [String] - split = filter (not . null) . unfoldr chunk . lines - chunk [] = Nothing - chunk ls = case break null ls of - (r, rs) -> Just (unlines r, dropWhile null rs) - --- ----------------------------------------------------------------------------- --- Pretty-printing - -show :: BuildReport -> String -show = Disp.render . ppFields fieldDescrs - --- ----------------------------------------------------------------------------- --- Description of the fields, for parsing/printing - -fieldDescrs :: [FieldDescr BuildReport] -fieldDescrs = - [ simpleField "package" Text.disp Text.parse - package (\v r -> r { package = v }) - , simpleField "os" Text.disp Text.parse - os (\v r -> r { os = v }) - , simpleField "arch" Text.disp Text.parse - arch (\v r -> r { arch = v }) - , simpleField "compiler" Text.disp Text.parse - compiler (\v r -> r { compiler = v }) - , simpleField "client" Text.disp Text.parse - client (\v r -> r { client = v }) - , listField "flags" dispFlag parseFlag - (unFlagAssignment . flagAssignment) - (\v r -> r { flagAssignment = mkFlagAssignment v }) - , listField "dependencies" Text.disp Text.parse - dependencies (\v r -> r { dependencies = v }) - , simpleField "install-outcome" Text.disp Text.parse - installOutcome (\v r -> r { installOutcome = v }) - , simpleField "docs-outcome" Text.disp Text.parse - docsOutcome (\v r -> r { docsOutcome = v }) - , simpleField "tests-outcome" Text.disp Text.parse - testsOutcome (\v r -> r { testsOutcome = v }) - ] - -sortedFieldDescrs :: [FieldDescr BuildReport] -sortedFieldDescrs = sortBy (comparing fieldName) fieldDescrs - -dispFlag :: (FlagName, Bool) -> Disp.Doc -dispFlag (fname, True) = Disp.text (unFlagName fname) -dispFlag (fname, False) = Disp.char '-' <<>> Disp.text (unFlagName fname) - -parseFlag :: Parse.ReadP r (FlagName, Bool) -parseFlag = do - name <- Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-') - case name of - ('-':flag) -> return (mkFlagName flag, False) - flag -> return (mkFlagName flag, True) - -instance Text.Text InstallOutcome where - disp PlanningFailed = Disp.text "PlanningFailed" - disp (DependencyFailed pkgid) = Disp.text "DependencyFailed" <+> Text.disp pkgid - disp DownloadFailed = Disp.text "DownloadFailed" - disp UnpackFailed = Disp.text "UnpackFailed" - disp SetupFailed = Disp.text "SetupFailed" - disp ConfigureFailed = Disp.text "ConfigureFailed" - disp BuildFailed = Disp.text "BuildFailed" - disp TestsFailed = Disp.text "TestsFailed" - disp InstallFailed = Disp.text "InstallFailed" - disp InstallOk = Disp.text "InstallOk" - - parse = do - name <- Parse.munch1 Char.isAlphaNum - case name of - "PlanningFailed" -> return PlanningFailed - "DependencyFailed" -> do Parse.skipSpaces - pkgid <- Text.parse - return (DependencyFailed pkgid) - "DownloadFailed" -> return DownloadFailed - "UnpackFailed" -> return UnpackFailed - "SetupFailed" -> return SetupFailed - "ConfigureFailed" -> return ConfigureFailed - "BuildFailed" -> return BuildFailed - "TestsFailed" -> return TestsFailed - "InstallFailed" -> return InstallFailed - "InstallOk" -> return InstallOk - _ -> Parse.pfail - -instance Text.Text Outcome where - disp NotTried = Disp.text "NotTried" - disp Failed = Disp.text "Failed" - disp Ok = Disp.text "Ok" - parse = do - name <- Parse.munch1 Char.isAlpha - case name of - "NotTried" -> return NotTried - "Failed" -> return Failed - "Ok" -> return Ok - _ -> Parse.pfail diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Storage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Storage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Storage.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Storage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,159 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Reporting --- Copyright : (c) David Waern 2008 --- License : BSD-like --- --- Maintainer : david.waern@gmail.com --- Stability : experimental --- Portability : portable --- --- Anonymous build report data structure, printing and parsing --- ------------------------------------------------------------------------------ -module Distribution.Client.BuildReports.Storage ( - - -- * Storing and retrieving build reports - storeAnonymous, - storeLocal, --- retrieve, - - -- * 'InstallPlan' support - fromInstallPlan, - fromPlanningFailure, - ) where - -import qualified Distribution.Client.BuildReports.Anonymous as BuildReport -import Distribution.Client.BuildReports.Anonymous (BuildReport) - -import Distribution.Client.Types -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.InstallPlan - ( InstallPlan ) - -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.SourcePackage - -import Distribution.Package - ( PackageId, packageId ) -import Distribution.PackageDescription - ( FlagAssignment ) -import Distribution.Simple.InstallDirs - ( PathTemplate, fromPathTemplate - , initialPathTemplateEnv, substPathTemplate ) -import Distribution.System - ( Platform(Platform) ) -import Distribution.Compiler - ( CompilerId(..), CompilerInfo(..) ) -import Distribution.Simple.Utils - ( comparing, equating ) - -import Data.List - ( groupBy, sortBy ) -import Data.Maybe - ( mapMaybe ) -import System.FilePath - ( (), takeDirectory ) -import System.Directory - ( createDirectoryIfMissing ) - -storeAnonymous :: [(BuildReport, Maybe Repo)] -> IO () -storeAnonymous reports = sequence_ - [ appendFile file (concatMap format reports') - | (repo, reports') <- separate reports - , let file = repoLocalDir repo "build-reports.log" ] - --TODO: make this concurrency safe, either lock the report file or make sure - -- the writes for each report are atomic (under 4k and flush at boundaries) - - where - format r = '\n' : BuildReport.show r ++ "\n" - separate :: [(BuildReport, Maybe Repo)] - -> [(Repo, [BuildReport])] - separate = map (\rs@((_,repo,_):_) -> (repo, [ r | (r,_,_) <- rs ])) - . map concat - . groupBy (equating (repoName . head)) - . sortBy (comparing (repoName . head)) - . groupBy (equating repoName) - . onlyRemote - repoName (_,_,rrepo) = remoteRepoName rrepo - - onlyRemote :: [(BuildReport, Maybe Repo)] - -> [(BuildReport, Repo, RemoteRepo)] - onlyRemote rs = - [ (report, repo, remoteRepo) - | (report, Just repo) <- rs - , Just remoteRepo <- [maybeRepoRemote repo] - ] - -storeLocal :: CompilerInfo -> [PathTemplate] -> [(BuildReport, Maybe Repo)] - -> Platform -> IO () -storeLocal cinfo templates reports platform = sequence_ - [ do createDirectoryIfMissing True (takeDirectory file) - appendFile file output - --TODO: make this concurrency safe, either lock the report file or make - -- sure the writes for each report are atomic - | (file, reports') <- groupByFileName - [ (reportFileName template report, report) - | template <- templates - , (report, _repo) <- reports ] - , let output = concatMap format reports' - ] - where - format r = '\n' : BuildReport.show r ++ "\n" - - reportFileName template report = - fromPathTemplate (substPathTemplate env template) - where env = initialPathTemplateEnv - (BuildReport.package report) - -- TODO: In principle, we can support $pkgkey, but only - -- if the configure step succeeds. So add a Maybe field - -- to the build report, and either use that or make up - -- a fake identifier if it's not available. - (error "storeLocal: package key not available") - cinfo - platform - - groupByFileName = map (\grp@((filename,_):_) -> (filename, map snd grp)) - . groupBy (equating fst) - . sortBy (comparing fst) - --- ------------------------------------------------------------ --- * InstallPlan support --- ------------------------------------------------------------ - -fromInstallPlan :: Platform -> CompilerId - -> InstallPlan - -> BuildOutcomes - -> [(BuildReport, Maybe Repo)] -fromInstallPlan platform comp plan buildOutcomes = - mapMaybe (\pkg -> fromPlanPackage - platform comp pkg - (InstallPlan.lookupBuildOutcome pkg buildOutcomes)) - . InstallPlan.toList - $ plan - -fromPlanPackage :: Platform -> CompilerId - -> InstallPlan.PlanPackage - -> Maybe BuildOutcome - -> Maybe (BuildReport, Maybe Repo) -fromPlanPackage (Platform arch os) comp - (InstallPlan.Configured (ConfiguredPackage _ srcPkg flags _ deps)) - (Just buildResult) = - Just ( BuildReport.new os arch comp - (packageId srcPkg) flags - (map packageId (CD.nonSetupDeps deps)) - buildResult - , extractRepo srcPkg) - where - extractRepo (SourcePackage { packageSource = RepoTarballPackage repo _ _ }) - = Just repo - extractRepo _ = Nothing - -fromPlanPackage _ _ _ _ = Nothing - - -fromPlanningFailure :: Platform -> CompilerId - -> [PackageId] -> FlagAssignment -> [(BuildReport, Maybe Repo)] -fromPlanningFailure (Platform arch os) comp pkgids flags = - [ (BuildReport.new os arch comp pkgid flags [] (Left PlanningFailed), Nothing) - | pkgid <- pkgids ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Types.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.BuildReports.Types --- Copyright : (c) Duncan Coutts 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Types related to build reporting --- ------------------------------------------------------------------------------ -module Distribution.Client.BuildReports.Types ( - ReportLevel(..), - ) where - -import qualified Distribution.Text as Text - ( Text(..) ) - -import qualified Distribution.Compat.ReadP as Parse - ( pfail, munch1 ) -import qualified Text.PrettyPrint as Disp - ( text ) - -import Data.Char as Char - ( isAlpha, toLower ) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary) - - -data ReportLevel = NoReports | AnonymousReports | DetailedReports - deriving (Eq, Ord, Enum, Show, Generic) - -instance Binary ReportLevel - -instance Text.Text ReportLevel where - disp NoReports = Disp.text "none" - disp AnonymousReports = Disp.text "anonymous" - disp DetailedReports = Disp.text "detailed" - parse = do - name <- Parse.munch1 Char.isAlpha - case lowercase name of - "none" -> return NoReports - "anonymous" -> return AnonymousReports - "detailed" -> return DetailedReports - _ -> Parse.pfail - -lowercase :: String -> String -lowercase = map Char.toLower diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Upload.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Upload.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Upload.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/BuildReports/Upload.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,92 +0,0 @@ -{-# LANGUAGE CPP, PatternGuards #-} --- This is a quick hack for uploading build reports to Hackage. - -module Distribution.Client.BuildReports.Upload - ( BuildLog - , BuildReportId - , uploadReports - ) where - -{- -import Network.Browser - ( BrowserAction, request, setAllowRedirects ) -import Network.HTTP - ( Header(..), HeaderName(..) - , Request(..), RequestMethod(..), Response(..) ) -import Network.TCP (HandleStream) --} -import Network.URI (URI, uriPath) --parseRelativeReference, relativeTo) - -import Control.Monad - ( forM_ ) -import System.FilePath.Posix - ( () ) -import qualified Distribution.Client.BuildReports.Anonymous as BuildReport -import Distribution.Client.BuildReports.Anonymous (BuildReport) -import Distribution.Text (display) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.Utils (die') -import Distribution.Client.HttpUtils -import Distribution.Client.Setup - ( RepoContext(..) ) - -type BuildReportId = URI -type BuildLog = String - -uploadReports :: Verbosity -> RepoContext -> (String, String) -> URI -> [(BuildReport, Maybe BuildLog)] -> IO () -uploadReports verbosity repoCtxt auth uri reports = do - forM_ reports $ \(report, mbBuildLog) -> do - buildId <- postBuildReport verbosity repoCtxt auth uri report - case mbBuildLog of - Just buildLog -> putBuildLog verbosity repoCtxt auth buildId buildLog - Nothing -> return () - -postBuildReport :: Verbosity -> RepoContext -> (String, String) -> URI -> BuildReport -> IO BuildReportId -postBuildReport verbosity repoCtxt auth uri buildReport = do - let fullURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" } - transport <- repoContextGetTransport repoCtxt - res <- postHttp transport verbosity fullURI (BuildReport.show buildReport) (Just auth) - case res of - (303, redir) -> return $ undefined redir --TODO parse redir - _ -> die' verbosity "unrecognized response" -- give response - -{- - setAllowRedirects False - (_, response) <- request Request { - rqURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" }, - rqMethod = POST, - rqHeaders = [Header HdrContentType ("text/plain"), - Header HdrContentLength (show (length body)), - Header HdrAccept ("text/plain")], - rqBody = body - } - case rspCode response of - (3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location -#if defined(VERSION_network_uri) - return $ relativeTo rel uri -#elif defined(VERSION_network) -#if MIN_VERSION_network(2,4,0) - return $ relativeTo rel uri -#else - relativeTo rel uri -#endif -#endif - | Header HdrLocation location <- rspHeaders response ] - -> return $ buildId - _ -> error "Unrecognised response from server." - where body = BuildReport.show buildReport --} - - --- TODO force this to be a PUT? - -putBuildLog :: Verbosity -> RepoContext -> (String, String) - -> BuildReportId -> BuildLog - -> IO () -putBuildLog verbosity repoCtxt auth reportId buildLog = do - let fullURI = reportId {uriPath = uriPath reportId "log"} - transport <- repoContextGetTransport repoCtxt - res <- postHttp transport verbosity fullURI buildLog (Just auth) - case res of - (200, _) -> return () - _ -> die' verbosity "unrecognized response" -- give response diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Check.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Check.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Check.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Check.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,111 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Check --- Copyright : (c) Lennart Kolmodin 2008 --- License : BSD-like --- --- Maintainer : kolmodin@haskell.org --- Stability : provisional --- Portability : portable --- --- Check a package for common mistakes --- ------------------------------------------------------------------------------ -module Distribution.Client.Check ( - check - ) where - -import Distribution.Client.Compat.Prelude -import Prelude () - -import Distribution.PackageDescription (GenericPackageDescription) -import Distribution.PackageDescription.Check -import Distribution.PackageDescription.Configuration (flattenPackageDescription) -import Distribution.PackageDescription.Parsec - (parseGenericPackageDescription, runParseResult) -import Distribution.Parsec.Common (PWarning (..), showPError, showPWarning) -import Distribution.Simple.Utils (defaultPackageDesc, die', notice, warn) -import Distribution.Verbosity (Verbosity) - -import qualified Data.ByteString as BS -import qualified System.Directory as Dir - -readGenericPackageDescriptionCheck :: Verbosity -> FilePath -> IO ([PWarning], GenericPackageDescription) -readGenericPackageDescriptionCheck verbosity fpath = do - exists <- Dir.doesFileExist fpath - unless exists $ - die' verbosity $ - "Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue." - bs <- BS.readFile fpath - let (warnings, result) = runParseResult (parseGenericPackageDescription bs) - case result of - Left (_, errors) -> do - traverse_ (warn verbosity . showPError fpath) errors - die' verbosity $ "Failed parsing \"" ++ fpath ++ "\"." - Right x -> return (warnings, x) - --- | Note: must be called with the CWD set to the directory containing --- the '.cabal' file. -check :: Verbosity -> IO Bool -check verbosity = do - pdfile <- defaultPackageDesc verbosity - (ws, ppd) <- readGenericPackageDescriptionCheck verbosity pdfile - -- convert parse warnings into PackageChecks - -- Note: we /could/ pick different levels, based on warning type. - let ws' = [ PackageDistSuspicious (showPWarning pdfile w) | w <- ws ] - -- flatten the generic package description into a regular package - -- description - -- TODO: this may give more warnings than it should give; - -- consider two branches of a condition, one saying - -- ghc-options: -Wall - -- and the other - -- ghc-options: -Werror - -- joined into - -- ghc-options: -Wall -Werror - -- checkPackages will yield a warning on the last line, but it - -- would not on each individual branch. - -- Hovever, this is the same way hackage does it, so we will yield - -- the exact same errors as it will. - let pkg_desc = flattenPackageDescription ppd - ioChecks <- checkPackageFiles verbosity pkg_desc "." - let packageChecks = ioChecks ++ checkPackage ppd (Just pkg_desc) ++ ws' - buildImpossible = [ x | x@PackageBuildImpossible {} <- packageChecks ] - buildWarning = [ x | x@PackageBuildWarning {} <- packageChecks ] - distSuspicious = [ x | x@PackageDistSuspicious {} <- packageChecks ] - ++ [ x | x@PackageDistSuspiciousWarn {} <- packageChecks ] - distInexusable = [ x | x@PackageDistInexcusable {} <- packageChecks ] - - unless (null buildImpossible) $ do - warn verbosity "The package will not build sanely due to these errors:" - printCheckMessages buildImpossible - - unless (null buildWarning) $ do - warn verbosity "The following warnings are likely to affect your build negatively:" - printCheckMessages buildWarning - - unless (null distSuspicious) $ do - warn verbosity "These warnings may cause trouble when distributing the package:" - printCheckMessages distSuspicious - - unless (null distInexusable) $ do - warn verbosity "The following errors will cause portability problems on other environments:" - printCheckMessages distInexusable - - let isDistError (PackageDistSuspicious {}) = False - isDistError (PackageDistSuspiciousWarn {}) = False - isDistError _ = True - isCheckError (PackageDistSuspiciousWarn {}) = False - isCheckError _ = True - errors = filter isDistError packageChecks - - unless (null errors) $ - warn verbosity "Hackage would reject this package." - - when (null packageChecks) $ - notice verbosity "No errors or warnings could be found in the package." - - return (not . any isCheckError $ packageChecks) - - where - printCheckMessages = traverse_ (warn verbosity . explanation) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdBench.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdBench.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdBench.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdBench.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,242 +0,0 @@ -{-# LANGUAGE NamedFieldPuns #-} - --- | cabal-install CLI command: bench --- -module Distribution.Client.CmdBench ( - -- * The @bench@ CLI and action - benchCommand, - benchAction, - - -- * Internals exposed for testing - TargetProblem(..), - selectPackageTargets, - selectComponentTarget - ) where - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.CmdErrorMessages - -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import qualified Distribution.Client.Setup as Client -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Simple.Command - ( CommandUI(..), usageAlternatives ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity, normal ) -import Distribution.Simple.Utils - ( wrapText, die' ) - -import Control.Monad (when) - - -benchCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -benchCommand = Client.installCommand { - commandName = "new-bench", - commandSynopsis = "Run benchmarks", - commandUsage = usageAlternatives "new-bench" [ "[TARGETS] [FLAGS]" ], - commandDescription = Just $ \_ -> wrapText $ - "Runs the specified benchmarks, first ensuring they are up to " - ++ "date.\n\n" - - ++ "Any benchmark in any package in the project can be specified. " - ++ "A package can be specified in which case all the benchmarks in the " - ++ "package are run. The default is to run all the benchmarks in the " - ++ "package in the current directory.\n\n" - - ++ "Dependencies are built or rebuilt as necessary. Additional " - ++ "configuration flags can be specified on the command line and these " - ++ "extend the project configuration from the 'cabal.project', " - ++ "'cabal.project.local' and other files.", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " new-bench\n" - ++ " Run all the benchmarks in the package in the current directory\n" - ++ " " ++ pname ++ " new-bench pkgname\n" - ++ " Run all the benchmarks in the package named pkgname\n" - ++ " " ++ pname ++ " new-bench cname\n" - ++ " Run the benchmark named cname\n" - ++ " " ++ pname ++ " new-bench cname -O2\n" - ++ " Run the benchmark built with '-O2' (including local libs used)\n\n" - - ++ cmdCommonHelpTextNewBuildBeta - } - - --- | The @build@ command does a lot. It brings the install plan up to date, --- selects that part of the plan needed by the given or implicit targets and --- then executes the plan. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -benchAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -benchAction (configFlags, configExFlags, installFlags, haddockFlags) - targetStrings globalFlags = do - - baseCtx <- establishProjectBaseContext verbosity cliConfig - - targetSelectors <- either (reportTargetSelectorProblems verbosity) return - =<< readTargetSelectors (localPackages baseCtx) (Just BenchKind) targetStrings - - buildCtx <- - runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do - - when (buildSettingOnlyDeps (buildSettings baseCtx)) $ - die' verbosity $ - "The bench command does not support '--only-dependencies'. " - ++ "You may wish to use 'build --only-dependencies' and then " - ++ "use 'bench'." - - -- Interpret the targets on the command line as bench targets - -- (as opposed to say build or haddock targets). - targets <- either (reportTargetProblems verbosity) return - $ resolveTargets - selectPackageTargets - selectComponentTarget - TargetProblemCommon - elaboratedPlan - Nothing - targetSelectors - - let elaboratedPlan' = pruneInstallPlanToTargets - TargetActionBench - targets - elaboratedPlan - return (elaboratedPlan', targets) - - printPlan verbosity baseCtx buildCtx - - buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx - runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - --- | This defines what a 'TargetSelector' means for the @bench@ command. --- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, --- or otherwise classifies the problem. --- --- For the @bench@ command we select all buildable benchmarks, --- or fail if there are no benchmarks or no buildable benchmarks. --- -selectPackageTargets :: TargetSelector - -> [AvailableTarget k] -> Either TargetProblem [k] -selectPackageTargets targetSelector targets - - -- If there are any buildable benchmark targets then we select those - | not (null targetsBenchBuildable) - = Right targetsBenchBuildable - - -- If there are benchmarks but none are buildable then we report those - | not (null targetsBench) - = Left (TargetProblemNoneEnabled targetSelector targetsBench) - - -- If there are no benchmarks but some other targets then we report that - | not (null targets) - = Left (TargetProblemNoBenchmarks targetSelector) - - -- If there are no targets at all then we report that - | otherwise - = Left (TargetProblemNoTargets targetSelector) - where - targetsBenchBuildable = selectBuildableTargets - . filterTargetsKind BenchKind - $ targets - - targetsBench = forgetTargetsDetail - . filterTargetsKind BenchKind - $ targets - - --- | For a 'TargetComponent' 'TargetSelector', check if the component can be --- selected. --- --- For the @bench@ command we just need to check it is a benchmark, in addition --- to the basic checks on being buildable etc. --- -selectComponentTarget :: SubComponentTarget - -> AvailableTarget k -> Either TargetProblem k -selectComponentTarget subtarget@WholeComponent t - | CBenchName _ <- availableTargetComponentName t - = either (Left . TargetProblemCommon) return $ - selectComponentTargetBasic subtarget t - | otherwise - = Left (TargetProblemComponentNotBenchmark (availableTargetPackageId t) - (availableTargetComponentName t)) - -selectComponentTarget subtarget t - = Left (TargetProblemIsSubComponent (availableTargetPackageId t) - (availableTargetComponentName t) - subtarget) - --- | The various error conditions that can occur when matching a --- 'TargetSelector' against 'AvailableTarget's for the @bench@ command. --- -data TargetProblem = - TargetProblemCommon TargetProblemCommon - - -- | The 'TargetSelector' matches benchmarks but none are buildable - | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] - - -- | There are no targets at all - | TargetProblemNoTargets TargetSelector - - -- | The 'TargetSelector' matches targets but no benchmarks - | TargetProblemNoBenchmarks TargetSelector - - -- | The 'TargetSelector' refers to a component that is not a benchmark - | TargetProblemComponentNotBenchmark PackageId ComponentName - - -- | Asking to benchmark an individual file or module is not supported - | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget - deriving (Eq, Show) - -reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a -reportTargetProblems verbosity = - die' verbosity . unlines . map renderTargetProblem - -renderTargetProblem :: TargetProblem -> String -renderTargetProblem (TargetProblemCommon problem) = - renderTargetProblemCommon "run" problem - -renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = - renderTargetProblemNoneEnabled "benchmark" targetSelector targets - -renderTargetProblem (TargetProblemNoBenchmarks targetSelector) = - "Cannot run benchmarks for the target '" ++ showTargetSelector targetSelector - ++ "' which refers to " ++ renderTargetSelector targetSelector - ++ " because " - ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" - ++ " not contain any benchmarks." - -renderTargetProblem (TargetProblemNoTargets targetSelector) = - case targetSelectorFilter targetSelector of - Just kind | kind /= BenchKind - -> "The bench command is for running benchmarks, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ "." - - _ -> renderTargetProblemNoTargets "benchmark" targetSelector - -renderTargetProblem (TargetProblemComponentNotBenchmark pkgid cname) = - "The bench command is for running benchmarks, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ " from the package " - ++ display pkgid ++ "." - where - targetSelector = TargetComponent pkgid cname WholeComponent - -renderTargetProblem (TargetProblemIsSubComponent pkgid cname subtarget) = - "The bench command can only run benchmarks as a whole, " - ++ "not files or modules within them, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ "." - where - targetSelector = TargetComponent pkgid cname subtarget diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdBuild.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdBuild.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdBuild.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdBuild.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,195 +0,0 @@ --- | cabal-install CLI command: build --- -module Distribution.Client.CmdBuild ( - -- * The @build@ CLI and action - buildCommand, - buildAction, - - -- * Internals exposed for testing - TargetProblem(..), - selectPackageTargets, - selectComponentTarget - ) where - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.CmdErrorMessages - -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import qualified Distribution.Client.Setup as Client -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Simple.Command - ( CommandUI(..), usageAlternatives ) -import Distribution.Verbosity - ( Verbosity, normal ) -import Distribution.Simple.Utils - ( wrapText, die' ) - -import qualified Data.Map as Map - - -buildCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -buildCommand = Client.installCommand { - commandName = "new-build", - commandSynopsis = "Compile targets within the project.", - commandUsage = usageAlternatives "new-build" [ "[TARGETS] [FLAGS]" ], - commandDescription = Just $ \_ -> wrapText $ - "Build one or more targets from within the project. The available " - ++ "targets are the packages in the project as well as individual " - ++ "components within those packages, including libraries, executables, " - ++ "test-suites or benchmarks. Targets can be specified by name or " - ++ "location. If no target is specified then the default is to build " - ++ "the package in the current directory.\n\n" - - ++ "Dependencies are built or rebuilt as necessary. Additional " - ++ "configuration flags can be specified on the command line and these " - ++ "extend the project configuration from the 'cabal.project', " - ++ "'cabal.project.local' and other files.", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " new-build\n" - ++ " Build the package in the current directory or all packages in the project\n" - ++ " " ++ pname ++ " new-build pkgname\n" - ++ " Build the package named pkgname in the project\n" - ++ " " ++ pname ++ " new-build ./pkgfoo\n" - ++ " Build the package in the ./pkgfoo directory\n" - ++ " " ++ pname ++ " new-build cname\n" - ++ " Build the component named cname in the project\n" - ++ " " ++ pname ++ " new-build cname --enable-profiling\n" - ++ " Build the component in profiling mode (including dependencies as needed)\n\n" - - ++ cmdCommonHelpTextNewBuildBeta - } - - --- | The @build@ command does a lot. It brings the install plan up to date, --- selects that part of the plan needed by the given or implicit targets and --- then executes the plan. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -buildAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -buildAction (configFlags, configExFlags, installFlags, haddockFlags) - targetStrings globalFlags = do - - baseCtx <- establishProjectBaseContext verbosity cliConfig - - targetSelectors <- either (reportTargetSelectorProblems verbosity) return - =<< readTargetSelectors (localPackages baseCtx) Nothing targetStrings - - buildCtx <- - runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do - - -- Interpret the targets on the command line as build targets - -- (as opposed to say repl or haddock targets). - targets <- either (reportTargetProblems verbosity) return - $ resolveTargets - selectPackageTargets - selectComponentTarget - TargetProblemCommon - elaboratedPlan - Nothing - targetSelectors - - let elaboratedPlan' = pruneInstallPlanToTargets - TargetActionBuild - targets - elaboratedPlan - elaboratedPlan'' <- - if buildSettingOnlyDeps (buildSettings baseCtx) - then either (reportCannotPruneDependencies verbosity) return $ - pruneInstallPlanToDependencies (Map.keysSet targets) - elaboratedPlan' - else return elaboratedPlan' - - return (elaboratedPlan'', targets) - - printPlan verbosity baseCtx buildCtx - - buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx - runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - --- | This defines what a 'TargetSelector' means for the @bench@ command. --- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, --- or otherwise classifies the problem. --- --- For the @build@ command select all components except non-buildable and disabled --- tests\/benchmarks, fail if there are no such components --- -selectPackageTargets :: TargetSelector - -> [AvailableTarget k] -> Either TargetProblem [k] -selectPackageTargets targetSelector targets - - -- If there are any buildable targets then we select those - | not (null targetsBuildable) - = Right targetsBuildable - - -- If there are targets but none are buildable then we report those - | not (null targets) - = Left (TargetProblemNoneEnabled targetSelector targets') - - -- If there are no targets at all then we report that - | otherwise - = Left (TargetProblemNoTargets targetSelector) - where - targets' = forgetTargetsDetail targets - targetsBuildable = selectBuildableTargetsWith - (buildable targetSelector) - targets - - -- When there's a target filter like "pkg:tests" then we do select tests, - -- but if it's just a target like "pkg" then we don't build tests unless - -- they are requested by default (i.e. by using --enable-tests) - buildable (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False - buildable (TargetAllPackages Nothing) TargetNotRequestedByDefault = False - buildable _ _ = True - --- | For a 'TargetComponent' 'TargetSelector', check if the component can be --- selected. --- --- For the @build@ command we just need the basic checks on being buildable etc. --- -selectComponentTarget :: SubComponentTarget - -> AvailableTarget k -> Either TargetProblem k -selectComponentTarget subtarget = - either (Left . TargetProblemCommon) Right - . selectComponentTargetBasic subtarget - - --- | The various error conditions that can occur when matching a --- 'TargetSelector' against 'AvailableTarget's for the @build@ command. --- -data TargetProblem = - TargetProblemCommon TargetProblemCommon - - -- | The 'TargetSelector' matches targets but none are buildable - | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] - - -- | There are no targets at all - | TargetProblemNoTargets TargetSelector - deriving (Eq, Show) - -reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a -reportTargetProblems verbosity = - die' verbosity . unlines . map renderTargetProblem - -renderTargetProblem :: TargetProblem -> String -renderTargetProblem (TargetProblemCommon problem) = - renderTargetProblemCommon "build" problem -renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = - renderTargetProblemNoneEnabled "build" targetSelector targets -renderTargetProblem(TargetProblemNoTargets targetSelector) = - renderTargetProblemNoTargets "build" targetSelector - -reportCannotPruneDependencies :: Verbosity -> CannotPruneDependencies -> IO a -reportCannotPruneDependencies verbosity = - die' verbosity . renderCannotPruneDependencies - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdClean.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdClean.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdClean.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdClean.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,107 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} -module Distribution.Client.CmdClean (cleanCommand, cleanAction) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.DistDirLayout - ( DistDirLayout(..), defaultDistDirLayout ) -import Distribution.Client.ProjectConfig - ( findProjectRoot ) -import Distribution.Client.Setup - ( GlobalFlags ) -import Distribution.ReadE ( succeedReadE ) -import Distribution.Simple.Setup - ( Flag(..), toFlag, fromFlagOrDefault, flagToList, flagToMaybe - , optionDistPref, optionVerbosity, falseArg - ) -import Distribution.Simple.Command - ( CommandUI(..), option, reqArg ) -import Distribution.Simple.Utils - ( info, die', wrapText, handleDoesNotExist ) -import Distribution.Verbosity - ( Verbosity, normal ) - -import Control.Exception - ( throwIO ) -import System.Directory - ( removeDirectoryRecursive, doesDirectoryExist ) - -data CleanFlags = CleanFlags - { cleanSaveConfig :: Flag Bool - , cleanVerbosity :: Flag Verbosity - , cleanDistDir :: Flag FilePath - , cleanProjectFile :: Flag FilePath - } deriving (Eq) - -defaultCleanFlags :: CleanFlags -defaultCleanFlags = CleanFlags - { cleanSaveConfig = toFlag False - , cleanVerbosity = toFlag normal - , cleanDistDir = NoFlag - , cleanProjectFile = mempty - } - -cleanCommand :: CommandUI CleanFlags -cleanCommand = CommandUI - { commandName = "new-clean" - , commandSynopsis = "Clean the package store and remove temporary files." - , commandUsage = \pname -> - "Usage: " ++ pname ++ " new-clean [FLAGS]\n" - , commandDescription = Just $ \_ -> wrapText $ - "Removes all temporary files created during the building process " - ++ "(.hi, .o, preprocessed sources, etc.) and also empties out the " - ++ "local caches (by default).\n\n" - , commandNotes = Nothing - , commandDefaultFlags = defaultCleanFlags - , commandOptions = \showOrParseArgs -> - [ optionVerbosity - cleanVerbosity (\v flags -> flags { cleanVerbosity = v }) - , optionDistPref - cleanDistDir (\dd flags -> flags { cleanDistDir = dd }) - showOrParseArgs - , option [] ["project-file"] - "Set the name of the cabal.project file to search for in parent directories" - cleanProjectFile (\pf flags -> flags {cleanProjectFile = pf}) - (reqArg "FILE" (succeedReadE Flag) flagToList) - , option ['s'] ["save-config"] - "Save configuration, only remove build artifacts" - cleanSaveConfig (\sc flags -> flags { cleanSaveConfig = sc }) - falseArg - ] - } - -cleanAction :: CleanFlags -> [String] -> GlobalFlags -> IO () -cleanAction CleanFlags{..} extraArgs _ = do - let verbosity = fromFlagOrDefault normal cleanVerbosity - saveConfig = fromFlagOrDefault False cleanSaveConfig - mdistDirectory = flagToMaybe cleanDistDir - mprojectFile = flagToMaybe cleanProjectFile - - unless (null extraArgs) $ - die' verbosity $ "'clean' doesn't take any extra arguments: " ++ unwords extraArgs - - projectRoot <- either throwIO return =<< findProjectRoot Nothing mprojectFile - - let distLayout = defaultDistDirLayout projectRoot mdistDirectory - - if saveConfig - then do - let buildRoot = distBuildRootDirectory distLayout - unpackedSrcRoot = distUnpackedSrcRootDirectory distLayout - - buildRootExists <- doesDirectoryExist buildRoot - unpackedSrcRootExists <- doesDirectoryExist unpackedSrcRoot - - when buildRootExists $ do - info verbosity ("Deleting build root (" ++ buildRoot ++ ")") - handleDoesNotExist () $ removeDirectoryRecursive buildRoot - - when unpackedSrcRootExists $ do - info verbosity ("Deleting unpacked source root (" ++ unpackedSrcRoot ++ ")") - handleDoesNotExist () $ removeDirectoryRecursive unpackedSrcRoot - else do - let distRoot = distDirectory distLayout - - info verbosity ("Deleting dist-newstyle (" ++ distRoot ++ ")") - handleDoesNotExist () $ removeDirectoryRecursive distRoot diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdConfigure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdConfigure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdConfigure.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdConfigure.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,125 +0,0 @@ --- | cabal-install CLI command: configure --- -module Distribution.Client.CmdConfigure ( - configureCommand, - configureAction, - ) where - -import System.Directory -import Control.Monad -import qualified Data.Map as Map - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.ProjectConfig - ( writeProjectLocalExtraConfig ) - -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Verbosity - ( normal ) - -import Distribution.Simple.Command - ( CommandUI(..), usageAlternatives ) -import Distribution.Simple.Utils - ( wrapText, notice ) -import qualified Distribution.Client.Setup as Client - -configureCommand :: CommandUI (ConfigFlags, ConfigExFlags - ,InstallFlags, HaddockFlags) -configureCommand = Client.installCommand { - commandName = "new-configure", - commandSynopsis = "Add extra project configuration", - commandUsage = usageAlternatives "new-configure" [ "[FLAGS]" ], - commandDescription = Just $ \_ -> wrapText $ - "Adjust how the project is built by setting additional package flags " - ++ "and other flags.\n\n" - - ++ "The configuration options are written to the 'cabal.project.local' " - ++ "file (or '$project_file.local', if '--project-file' is specified) " - ++ "which extends the configuration from the 'cabal.project' file " - ++ "(if any). This combination is used as the project configuration for " - ++ "all other commands (such as 'new-build', 'new-repl' etc) though it " - ++ "can be extended/overridden on a per-command basis.\n\n" - - ++ "The new-configure command also checks that the project configuration " - ++ "will work. In particular it checks that there is a consistent set of " - ++ "dependencies for the project as a whole.\n\n" - - ++ "The 'cabal.project.local' file persists across 'new-clean' but is " - ++ "overwritten on the next use of the 'new-configure' command. The " - ++ "intention is that the 'cabal.project' file should be kept in source " - ++ "control but the 'cabal.project.local' should not.\n\n" - - ++ "It is never necessary to use the 'new-configure' command. It is " - ++ "merely a convenience in cases where you do not want to specify flags " - ++ "to 'new-build' (and other commands) every time and yet do not want " - ++ "to alter the 'cabal.project' persistently.", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " new-configure --with-compiler ghc-7.10.3\n" - ++ " Adjust the project configuration to use the given compiler\n" - ++ " program and check the resulting configuration works.\n" - ++ " " ++ pname ++ " new-configure\n" - ++ " Reset the local configuration to empty and check the overall\n" - ++ " project configuration works.\n\n" - - ++ cmdCommonHelpTextNewBuildBeta - } - --- | To a first approximation, the @configure@ just runs the first phase of --- the @build@ command where we bring the install plan up to date (thus --- checking that it's possible). --- --- The only difference is that @configure@ also allows the user to specify --- some extra config flags which we save in the file @cabal.project.local@. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -configureAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -configureAction (configFlags, configExFlags, installFlags, haddockFlags) - _extraArgs globalFlags = do - --TODO: deal with _extraArgs, since flags with wrong syntax end up there - - baseCtx <- establishProjectBaseContext verbosity cliConfig - - -- Write out the @cabal.project.local@ so it gets picked up by the - -- planning phase. If old config exists, then print the contents - -- before overwriting - exists <- doesFileExist "cabal.project.local" - when exists $ do - notice verbosity "'cabal.project.local' file already exists. Now overwriting it." - copyFile "cabal.project.local" "cabal.project.local~" - writeProjectLocalExtraConfig (distDirLayout baseCtx) - cliConfig - - buildCtx <- - runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> - - -- TODO: Select the same subset of targets as 'CmdBuild' would - -- pick (ignoring, for example, executables in libraries - -- we depend on). But we don't want it to fail, so actually we - -- have to do it slightly differently from build. - return (elaboratedPlan, Map.empty) - - let baseCtx' = baseCtx { - buildSettings = (buildSettings baseCtx) { - buildSettingDryRun = True - } - } - - -- TODO: Hmm, but we don't have any targets. Currently this prints - -- what we would build if we were to build everything. Could pick - -- implicit target like "." - -- - -- TODO: should we say what's in the project (+deps) as a whole? - printPlan verbosity baseCtx' buildCtx - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdErrorMessages.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdErrorMessages.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdErrorMessages.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdErrorMessages.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,410 +0,0 @@ -{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} - --- | Utilities to help format error messages for the various CLI commands. --- -module Distribution.Client.CmdErrorMessages ( - module Distribution.Client.CmdErrorMessages, - module Distribution.Client.TargetSelector, - ) where - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.TargetSelector - ( ComponentKindFilter, componentKind, showTargetSelector ) - -import Distribution.Package - ( packageId, PackageName, packageName ) -import Distribution.Types.ComponentName - ( showComponentName ) -import Distribution.Solver.Types.OptionalStanza - ( OptionalStanza(..) ) -import Distribution.Text - ( display ) - -import Data.Maybe (isNothing) -import Data.List (sortBy, groupBy, nub) -import Data.Function (on) - - ------------------------ --- Singular or plural --- - --- | A tag used in rendering messages to distinguish singular or plural. --- -data Plural = Singular | Plural - --- | Used to render a singular or plural version of something --- --- > plural (listPlural theThings) "it is" "they are" --- -plural :: Plural -> a -> a -> a -plural Singular si _pl = si -plural Plural _si pl = pl - --- | Singular for singleton lists and plural otherwise. --- -listPlural :: [a] -> Plural -listPlural [_] = Singular -listPlural _ = Plural - - --------------------- --- Rendering lists --- - --- | Render a list of things in the style @foo, bar and baz@ -renderListCommaAnd :: [String] -> String -renderListCommaAnd [] = "" -renderListCommaAnd [x] = x -renderListCommaAnd [x,x'] = x ++ " and " ++ x' -renderListCommaAnd (x:xs) = x ++ ", " ++ renderListCommaAnd xs - --- | Render a list of things in the style @blah blah; this that; and the other@ -renderListSemiAnd :: [String] -> String -renderListSemiAnd [] = "" -renderListSemiAnd [x] = x -renderListSemiAnd [x,x'] = x ++ "; and " ++ x' -renderListSemiAnd (x:xs) = x ++ "; " ++ renderListSemiAnd xs - --- | When rendering lists of things it often reads better to group related --- things, e.g. grouping components by package name --- --- > renderListSemiAnd --- > [ "the package " ++ display pkgname ++ " components " --- > ++ renderListCommaAnd showComponentName components --- > | (pkgname, components) <- sortGroupOn packageName allcomponents ] --- -sortGroupOn :: Ord b => (a -> b) -> [a] -> [(b, [a])] -sortGroupOn key = map (\xs@(x:_) -> (key x, xs)) - . groupBy ((==) `on` key) - . sortBy (compare `on` key) - - ----------------------------------------------------- --- Renderering for a few project and package types --- - -renderTargetSelector :: TargetSelector -> String -renderTargetSelector (TargetPackage _ pkgids Nothing) = - "the " ++ plural (listPlural pkgids) "package" "packages" ++ " " - ++ renderListCommaAnd (map display pkgids) - -renderTargetSelector (TargetPackage _ pkgids (Just kfilter)) = - "the " ++ renderComponentKind Plural kfilter - ++ " in the " ++ plural (listPlural pkgids) "package" "packages" ++ " " - ++ renderListCommaAnd (map display pkgids) - -renderTargetSelector (TargetPackageNamed pkgname Nothing) = - "the package " ++ display pkgname - -renderTargetSelector (TargetPackageNamed pkgname (Just kfilter)) = - "the " ++ renderComponentKind Plural kfilter - ++ " in the package " ++ display pkgname - -renderTargetSelector (TargetAllPackages Nothing) = - "all the packages in the project" - -renderTargetSelector (TargetAllPackages (Just kfilter)) = - "all the " ++ renderComponentKind Plural kfilter - ++ " in the project" - -renderTargetSelector (TargetComponent pkgid cname subtarget) = - renderSubComponentTarget subtarget ++ "the " - ++ renderComponentName (packageName pkgid) cname - -renderTargetSelector (TargetComponentUnknown pkgname (Left ucname) subtarget) = - renderSubComponentTarget subtarget ++ "the component " ++ display ucname - ++ " in the package " ++ display pkgname - -renderTargetSelector (TargetComponentUnknown pkgname (Right cname) subtarget) = - renderSubComponentTarget subtarget ++ "the " - ++ renderComponentName pkgname cname - -renderSubComponentTarget :: SubComponentTarget -> String -renderSubComponentTarget WholeComponent = "" -renderSubComponentTarget (FileTarget filename) = - "the file " ++ filename ++ "in " -renderSubComponentTarget (ModuleTarget modname) = - "the module" ++ display modname ++ "in " - - -renderOptionalStanza :: Plural -> OptionalStanza -> String -renderOptionalStanza Singular TestStanzas = "test suite" -renderOptionalStanza Plural TestStanzas = "test suites" -renderOptionalStanza Singular BenchStanzas = "benchmark" -renderOptionalStanza Plural BenchStanzas = "benchmarks" - --- | The optional stanza type (test suite or benchmark), if it is one. -optionalStanza :: ComponentName -> Maybe OptionalStanza -optionalStanza (CTestName _) = Just TestStanzas -optionalStanza (CBenchName _) = Just BenchStanzas -optionalStanza _ = Nothing - --- | Does the 'TargetSelector' potentially refer to one package or many? --- -targetSelectorPluralPkgs :: TargetSelector -> Plural -targetSelectorPluralPkgs (TargetAllPackages _) = Plural -targetSelectorPluralPkgs (TargetPackage _ pids _) = listPlural pids -targetSelectorPluralPkgs (TargetPackageNamed _ _) = Singular -targetSelectorPluralPkgs TargetComponent{} = Singular -targetSelectorPluralPkgs TargetComponentUnknown{} = Singular - --- | Does the 'TargetSelector' refer to packages or to components? -targetSelectorRefersToPkgs :: TargetSelector -> Bool -targetSelectorRefersToPkgs (TargetAllPackages mkfilter) = isNothing mkfilter -targetSelectorRefersToPkgs (TargetPackage _ _ mkfilter) = isNothing mkfilter -targetSelectorRefersToPkgs (TargetPackageNamed _ mkfilter) = isNothing mkfilter -targetSelectorRefersToPkgs TargetComponent{} = False -targetSelectorRefersToPkgs TargetComponentUnknown{} = False - -targetSelectorFilter :: TargetSelector -> Maybe ComponentKindFilter -targetSelectorFilter (TargetPackage _ _ mkfilter) = mkfilter -targetSelectorFilter (TargetPackageNamed _ mkfilter) = mkfilter -targetSelectorFilter (TargetAllPackages mkfilter) = mkfilter -targetSelectorFilter TargetComponent{} = Nothing -targetSelectorFilter TargetComponentUnknown{} = Nothing - -renderComponentName :: PackageName -> ComponentName -> String -renderComponentName pkgname CLibName = "library " ++ display pkgname -renderComponentName _ (CSubLibName name) = "library " ++ display name -renderComponentName _ (CFLibName name) = "foreign library " ++ display name -renderComponentName _ (CExeName name) = "executable " ++ display name -renderComponentName _ (CTestName name) = "test suite " ++ display name -renderComponentName _ (CBenchName name) = "benchmark " ++ display name - -renderComponentKind :: Plural -> ComponentKind -> String -renderComponentKind Singular ckind = case ckind of - LibKind -> "library" -- internal/sub libs? - FLibKind -> "foreign library" - ExeKind -> "executable" - TestKind -> "test suite" - BenchKind -> "benchmark" -renderComponentKind Plural ckind = case ckind of - LibKind -> "libraries" -- internal/sub libs? - FLibKind -> "foreign libraries" - ExeKind -> "executables" - TestKind -> "test suites" - BenchKind -> "benchmarks" - - -------------------------------------------------------- --- Renderering error messages for TargetProblemCommon --- - -renderTargetProblemCommon :: String -> TargetProblemCommon -> String -renderTargetProblemCommon verb (TargetNotInProject pkgname) = - "Cannot " ++ verb ++ " the package " ++ display pkgname ++ ", it is not " - ++ "in this project (either directly or indirectly). If you want to add it " - ++ "to the project then edit the cabal.project file." - -renderTargetProblemCommon verb (TargetAvailableInIndex pkgname) = - "Cannot " ++ verb ++ " the package " ++ display pkgname ++ ", it is not " - ++ "in this project (either directly or indirectly), but it is in the current " - ++ "package index. If you want to add it to the project then edit the " - ++ "cabal.project file." - -renderTargetProblemCommon verb (TargetComponentNotProjectLocal pkgid cname _) = - "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because the " - ++ "package " ++ display pkgid ++ " is not local to the project, and cabal " - ++ "does not currently support building test suites or benchmarks of " - ++ "non-local dependencies. To run test suites or benchmarks from " - ++ "dependencies you can unpack the package locally and adjust the " - ++ "cabal.project file to include that package directory." - -renderTargetProblemCommon verb (TargetComponentNotBuildable pkgid cname _) = - "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because it is " - ++ "marked as 'buildable: False' within the '" ++ display (packageName pkgid) - ++ ".cabal' file (at least for the current configuration). If you believe it " - ++ "should be buildable then check the .cabal file to see if the buildable " - ++ "property is conditional on flags. Alternatively you may simply have to " - ++ "edit the .cabal file to declare it as buildable and fix any resulting " - ++ "build problems." - -renderTargetProblemCommon verb (TargetOptionalStanzaDisabledByUser _ cname _) = - "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because " - ++ "building " ++ compkinds ++ " has been explicitly disabled in the " - ++ "configuration. You can adjust this configuration in the " - ++ "cabal.project{.local} file either for all packages in the project or on " - ++ "a per-package basis. Note that if you do not explicitly disable " - ++ compkinds ++ " then the solver will merely try to make a plan with " - ++ "them available, so you may wish to explicitly enable them which will " - ++ "require the solver to find a plan with them available or to fail with an " - ++ "explanation." - where - compkinds = renderComponentKind Plural (componentKind cname) - -renderTargetProblemCommon verb (TargetOptionalStanzaDisabledBySolver pkgid cname _) = - "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because the " - ++ "solver did not find a plan that included the " ++ compkinds - ++ " for " ++ display pkgid ++ ". It is probably worth trying again with " - ++ compkinds ++ " explicitly enabled in the configuration in the " - ++ "cabal.project{.local} file. This will ask the solver to find a plan with " - ++ "the " ++ compkinds ++ " available. It will either fail with an " - ++ "explanation or find a different plan that uses different versions of some " - ++ "other packages. Use the '--dry-run' flag to see package versions and " - ++ "check that you are happy with the choices." - where - compkinds = renderComponentKind Plural (componentKind cname) - -renderTargetProblemCommon verb (TargetProblemUnknownComponent pkgname ecname) = - "Cannot " ++ verb ++ " the " - ++ (case ecname of - Left ucname -> "component " ++ display ucname - Right cname -> renderComponentName pkgname cname) - ++ " from the package " ++ display pkgname - ++ ", because the package does not contain a " - ++ (case ecname of - Left _ -> "component" - Right cname -> renderComponentKind Singular (componentKind cname)) - ++ " with that name." - -renderTargetProblemCommon verb (TargetProblemNoSuchPackage pkgid) = - "Internal error when trying to " ++ verb ++ " the package " - ++ display pkgid ++ ". The package is not in the set of available targets " - ++ "for the project plan, which would suggest an inconsistency " - ++ "between readTargetSelectors and resolveTargets." - -renderTargetProblemCommon verb (TargetProblemNoSuchComponent pkgid cname) = - "Internal error when trying to " ++ verb ++ " the " - ++ showComponentName cname ++ " from the package " ++ display pkgid - ++ ". The package,component pair is not in the set of available targets " - ++ "for the project plan, which would suggest an inconsistency " - ++ "between readTargetSelectors and resolveTargets." - - ------------------------------------------------------------- --- Renderering error messages for TargetProblemNoneEnabled --- - --- | Several commands have a @TargetProblemNoneEnabled@ problem constructor. --- This renders an error message for those cases. --- -renderTargetProblemNoneEnabled :: String - -> TargetSelector - -> [AvailableTarget ()] - -> String -renderTargetProblemNoneEnabled verb targetSelector targets = - "Cannot " ++ verb ++ " " ++ renderTargetSelector targetSelector - ++ " because none of the components are available to build: " - ++ renderListSemiAnd - [ case (status, mstanza) of - (TargetDisabledByUser, Just stanza) -> - renderListCommaAnd - [ "the " ++ showComponentName availableTargetComponentName - | AvailableTarget {availableTargetComponentName} <- targets' ] - ++ plural (listPlural targets') " is " " are " - ++ " not available because building " - ++ renderOptionalStanza Plural stanza - ++ " has been disabled in the configuration" - (TargetDisabledBySolver, Just stanza) -> - renderListCommaAnd - [ "the " ++ showComponentName availableTargetComponentName - | AvailableTarget {availableTargetComponentName} <- targets' ] - ++ plural (listPlural targets') " is " " are " - ++ "not available because the solver did not find a plan that " - ++ "included the " ++ renderOptionalStanza Plural stanza - (TargetNotBuildable, _) -> - renderListCommaAnd - [ "the " ++ showComponentName availableTargetComponentName - | AvailableTarget {availableTargetComponentName} <- targets' ] - ++ plural (listPlural targets') " is " " are all " - ++ "marked as 'buildable: False'" - (TargetNotLocal, _) -> - renderListCommaAnd - [ "the " ++ showComponentName availableTargetComponentName - | AvailableTarget {availableTargetComponentName} <- targets' ] - ++ " cannot be built because cabal does not currently support " - ++ "building test suites or benchmarks of non-local dependencies" - (TargetBuildable () TargetNotRequestedByDefault, Just stanza) -> - renderListCommaAnd - [ "the " ++ showComponentName availableTargetComponentName - | AvailableTarget {availableTargetComponentName} <- targets' ] - ++ " will not be built because " ++ renderOptionalStanza Plural stanza - ++ " are not built by default in the current configuration (but you " - ++ "can still build them specifically)" --TODO: say how - _ -> error $ "renderBuildTargetProblem: unexpected status " - ++ show (status, mstanza) - | ((status, mstanza), targets') <- sortGroupOn groupingKey targets - ] - where - groupingKey t = - ( availableTargetStatus t - , case availableTargetStatus t of - TargetNotBuildable -> Nothing - TargetNotLocal -> Nothing - _ -> optionalStanza (availableTargetComponentName t) - ) - ------------------------------------------------------------- --- Renderering error messages for TargetProblemNoneEnabled --- - --- | Several commands have a @TargetProblemNoTargets@ problem constructor. --- This renders an error message for those cases. --- -renderTargetProblemNoTargets :: String -> TargetSelector -> String -renderTargetProblemNoTargets verb targetSelector = - "Cannot " ++ verb ++ " " ++ renderTargetSelector targetSelector - ++ " because " ++ reason targetSelector ++ ". " - ++ "Check the .cabal " - ++ plural (targetSelectorPluralPkgs targetSelector) - "file for the package and make sure that it properly declares " - "files for the packages and make sure that they properly declare " - ++ "the components that you expect." - where - reason (TargetPackage _ _ Nothing) = - "it does not contain any components at all" - reason (TargetPackage _ _ (Just kfilter)) = - "it does not contain any " ++ renderComponentKind Plural kfilter - reason (TargetPackageNamed _ Nothing) = - "it does not contain any components at all" - reason (TargetPackageNamed _ (Just kfilter)) = - "it does not contain any " ++ renderComponentKind Plural kfilter - reason (TargetAllPackages Nothing) = - "none of them contain any components at all" - reason (TargetAllPackages (Just kfilter)) = - "none of the packages contain any " - ++ renderComponentKind Plural kfilter - reason ts@TargetComponent{} = - error $ "renderTargetProblemNoTargets: " ++ show ts - reason ts@TargetComponentUnknown{} = - error $ "renderTargetProblemNoTargets: " ++ show ts - ------------------------------------------------------------ --- Renderering error messages for CannotPruneDependencies --- - -renderCannotPruneDependencies :: CannotPruneDependencies -> String -renderCannotPruneDependencies (CannotPruneDependencies brokenPackages) = - "Cannot select only the dependencies (as requested by the " - ++ "'--only-dependencies' flag), " - ++ (case pkgids of - [pkgid] -> "the package " ++ display pkgid ++ " is " - _ -> "the packages " - ++ renderListCommaAnd (map display pkgids) ++ " are ") - ++ "required by a dependency of one of the other targets." - where - -- throw away the details and just list the deps that are needed - pkgids :: [PackageId] - pkgids = nub . map packageId . concatMap snd $ brokenPackages - -{- - ++ "Syntax:\n" - ++ " - build [package]\n" - ++ " - build [package:]component\n" - ++ " - build [package:][component:]module\n" - ++ " - build [package:][component:]file\n" - ++ " where\n" - ++ " package is a package name, package dir or .cabal file\n\n" - ++ "Examples:\n" - ++ " - build foo -- package name\n" - ++ " - build tests -- component name\n" - ++ " (name of library, executable, test-suite or benchmark)\n" - ++ " - build Data.Foo -- module name\n" - ++ " - build Data/Foo.hsc -- file name\n\n" - ++ "An ambigious target can be qualified by package, component\n" - ++ "and/or component kind (lib|exe|test|bench|flib)\n" - ++ " - build foo:tests -- component qualified by package\n" - ++ " - build tests:Data.Foo -- module qualified by component\n" - ++ " - build lib:foo -- component qualified by kind" --} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdExec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdExec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdExec.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdExec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,259 +0,0 @@ -------------------------------------------------------------------------------- --- | --- Module : Distribution.Client.Exec --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Implementation of the 'new-exec' command for running an arbitrary executable --- in an environment suited to the part of the store built for a project. -------------------------------------------------------------------------------- - -{-# LANGUAGE RecordWildCards #-} -module Distribution.Client.CmdExec - ( execAction - , execCommand - ) where - -import Distribution.Client.DistDirLayout - ( DistDirLayout(..) - ) -import Distribution.Client.InstallPlan - ( GenericPlanPackage(..) - , toGraph - ) -import Distribution.Client.Setup - ( ConfigExFlags - , ConfigFlags(configVerbosity) - , GlobalFlags - , InstallFlags - ) -import Distribution.Client.ProjectOrchestration - ( ProjectBuildContext(..) - , runProjectPreBuildPhase - , establishProjectBaseContext - , distDirLayout - , commandLineFlagsToProjectConfig - , ProjectBaseContext(..) - ) -import Distribution.Client.ProjectPlanOutput - ( updatePostBuildProjectStatus - , createPackageEnvironment - , argsEquivalentOfGhcEnvironmentFile - , PostBuildProjectStatus - ) -import qualified Distribution.Client.ProjectPlanning as Planning -import Distribution.Client.ProjectPlanning - ( ElaboratedInstallPlan - , ElaboratedSharedConfig(..) - ) -import Distribution.Simple.Command - ( CommandUI(..) - ) -import Distribution.Simple.Program.Db - ( modifyProgramSearchPath - , requireProgram - , configuredPrograms - ) -import Distribution.Simple.Program.Find - ( ProgramSearchPathEntry(..) - ) -import Distribution.Simple.Program.Run - ( programInvocation - , runProgramInvocation - ) -import Distribution.Simple.Program.Types - ( programOverrideEnv - , programDefaultArgs - , programPath - , simpleProgram - , ConfiguredProgram - ) -import Distribution.Simple.GHC - ( getImplInfo - , GhcImplInfo(supportsPkgEnvFiles) ) -import Distribution.Simple.Setup - ( HaddockFlags - , fromFlagOrDefault - ) -import Distribution.Simple.Utils - ( die' - , info - , withTempDirectory - , wrapText - ) -import Distribution.Verbosity - ( Verbosity - , normal - ) - -import qualified Distribution.Client.CmdBuild as CmdBuild - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Data.Set (Set) -import qualified Data.Set as S -import qualified Data.Map as M - -execCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -execCommand = CommandUI - { commandName = "new-exec" - , commandSynopsis = "Give a command access to the store." - , commandUsage = \pname -> - "Usage: " ++ pname ++ " new-exec [FLAGS] [--] COMMAND [--] [ARGS]\n" - , commandDescription = Just $ \pname -> wrapText $ - "During development it is often useful to run build tasks and perform" - ++ " one-off program executions to experiment with the behavior of build" - ++ " tools. It is convenient to run these tools in the same way " ++ pname - ++ " itself would. The `" ++ pname ++ " new-exec` command provides a way to" - ++ " do so.\n" - ++ "\n" - ++ "Compiler tools will be configured to see the same subset of the store" - ++ " that builds would see. The PATH is modified to make all executables in" - ++ " the dependency tree available (provided they have been built already)." - ++ " Commands are also rewritten in the way cabal itself would. For" - ++ " example, `" ++ pname ++ " new-exec ghc` will consult the configuration" - ++ " to choose an appropriate version of ghc and to include any" - ++ " ghc-specific flags requested." - , commandNotes = Nothing - , commandOptions = commandOptions CmdBuild.buildCommand - , commandDefaultFlags = commandDefaultFlags CmdBuild.buildCommand - } - -execAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -execAction (configFlags, configExFlags, installFlags, haddockFlags) - extraArgs globalFlags = do - - baseCtx <- establishProjectBaseContext verbosity cliConfig - - -- To set up the environment, we'd like to select the libraries in our - -- dependency tree that we've already built. So first we set up an install - -- plan, but we walk the dependency tree without first executing the plan. - buildCtx <- runProjectPreBuildPhase - verbosity - baseCtx - (\plan -> return (plan, M.empty)) - - -- We use the build status below to decide what libraries to include in the - -- compiler environment, but we don't want to actually build anything. So we - -- pass mempty to indicate that nothing happened and we just want the current - -- status. - buildStatus <- updatePostBuildProjectStatus - verbosity - (distDirLayout baseCtx) - (elaboratedPlanOriginal buildCtx) - (pkgsBuildStatus buildCtx) - mempty - - -- Some dependencies may have executables. Let's put those on the PATH. - extraPaths <- pathAdditions verbosity baseCtx buildCtx - let programDb = modifyProgramSearchPath - (map ProgramSearchPathDir extraPaths ++) - . pkgConfigCompilerProgs - . elaboratedShared - $ buildCtx - - -- Now that we have the packages, set up the environment. We accomplish this - -- by creating an environment file that selects the databases and packages we - -- computed in the previous step, and setting an environment variable to - -- point at the file. - -- In case ghc is too old to support environment files, - -- we pass the same info as arguments - let compiler = pkgConfigCompiler $ elaboratedShared buildCtx - envFilesSupported = supportsPkgEnvFiles (getImplInfo compiler) - case extraArgs of - [] -> die' verbosity "Please specify an executable to run" - exe:args -> do - (program, _) <- requireProgram verbosity (simpleProgram exe) programDb - let argOverrides = - argsEquivalentOfGhcEnvironmentFile - compiler - (distDirLayout baseCtx) - (elaboratedPlanOriginal buildCtx) - buildStatus - programIsConfiguredCompiler = matchCompilerPath - (elaboratedShared buildCtx) - program - argOverrides' = - if envFilesSupported - || not programIsConfiguredCompiler - then [] - else argOverrides - - (if envFilesSupported - then withTempEnvFile verbosity baseCtx buildCtx buildStatus - else \f -> f []) $ \envOverrides -> do - let program' = withOverrides - envOverrides - argOverrides' - program - invocation = programInvocation program' args - runProgramInvocation verbosity invocation - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - withOverrides env args program = program - { programOverrideEnv = programOverrideEnv program ++ env - , programDefaultArgs = programDefaultArgs program ++ args} - -matchCompilerPath :: ElaboratedSharedConfig -> ConfiguredProgram -> Bool -matchCompilerPath elaboratedShared program = - programPath program - `elem` - (programPath <$> configuredCompilers) - where - configuredCompilers = configuredPrograms $ pkgConfigCompilerProgs elaboratedShared - --- | Execute an action with a temporary .ghc.environment file reflecting the --- current environment. The action takes an environment containing the env --- variable which points ghc to the file. -withTempEnvFile :: Verbosity - -> ProjectBaseContext - -> ProjectBuildContext - -> PostBuildProjectStatus - -> ([(String, Maybe String)] -> IO a) - -> IO a -withTempEnvFile verbosity - baseCtx - buildCtx - buildStatus - action = - withTempDirectory - verbosity - (distTempDirectory (distDirLayout baseCtx)) - "environment." - (\tmpDir -> do - envOverrides <- createPackageEnvironment - verbosity - tmpDir - (elaboratedPlanToExecute buildCtx) - (elaboratedShared buildCtx) - buildStatus - action envOverrides) - -pathAdditions :: Verbosity -> ProjectBaseContext -> ProjectBuildContext -> IO [FilePath] -pathAdditions verbosity ProjectBaseContext{..}ProjectBuildContext{..} = do - info verbosity . unlines $ "Including the following directories in PATH:" - : paths - return paths - where - paths = S.toList - $ binDirectories distDirLayout elaboratedShared elaboratedPlanToExecute - -binDirectories - :: DistDirLayout - -> ElaboratedSharedConfig - -> ElaboratedInstallPlan - -> Set FilePath -binDirectories layout config = fromElaboratedInstallPlan where - fromElaboratedInstallPlan = fromGraph . toGraph - fromGraph = foldMap fromPlan - fromSrcPkg = S.fromList . Planning.binDirectories layout config - - fromPlan (PreExisting _) = mempty - fromPlan (Configured pkg) = fromSrcPkg pkg - fromPlan (Installed pkg) = fromSrcPkg pkg - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdFreeze.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdFreeze.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdFreeze.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdFreeze.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,230 +0,0 @@ -{-# LANGUAGE NamedFieldPuns, RecordWildCards #-} - --- | cabal-install CLI command: freeze --- -module Distribution.Client.CmdFreeze ( - freezeCommand, - freezeAction, - ) where - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.ProjectPlanning -import Distribution.Client.ProjectConfig - ( ProjectConfig(..), ProjectConfigShared(..) - , writeProjectLocalFreezeConfig ) -import Distribution.Client.Targets - ( UserQualifier(..), UserConstraintScope(..), UserConstraint(..) ) -import Distribution.Solver.Types.PackageConstraint - ( PackageProperty(..) ) -import Distribution.Solver.Types.ConstraintSource - ( ConstraintSource(..) ) -import Distribution.Client.DistDirLayout - ( DistDirLayout(distProjectFile) ) -import qualified Distribution.Client.InstallPlan as InstallPlan - - -import Distribution.Package - ( PackageName, packageName, packageVersion ) -import Distribution.Version - ( VersionRange, thisVersion - , unionVersionRanges, simplifyVersionRange ) -import Distribution.PackageDescription - ( FlagAssignment, nullFlagAssignment ) -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Simple.Utils - ( die', notice, wrapText ) -import Distribution.Verbosity - ( normal ) - -import Data.Monoid as Monoid -import qualified Data.Map as Map -import Data.Map (Map) -import Control.Monad (unless) - -import Distribution.Simple.Command - ( CommandUI(..), usageAlternatives ) -import qualified Distribution.Client.Setup as Client - - -freezeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -freezeCommand = Client.installCommand { - commandName = "new-freeze", - commandSynopsis = "Freeze dependencies.", - commandUsage = usageAlternatives "new-freeze" [ "[FLAGS]" ], - commandDescription = Just $ \_ -> wrapText $ - "The project configuration is frozen so that it will be reproducible " - ++ "in future.\n\n" - - ++ "The precise dependency configuration for the project is written to " - ++ "the 'cabal.project.freeze' file (or '$project_file.freeze' if " - ++ "'--project-file' is specified). This file extends the configuration " - ++ "from the 'cabal.project' file and thus is used as the project " - ++ "configuration for all other commands (such as 'new-build', " - ++ "'new-repl' etc).\n\n" - - ++ "The freeze file can be kept in source control. To make small " - ++ "adjustments it may be edited manually, or to make bigger changes " - ++ "you may wish to delete the file and re-freeze. For more control, " - ++ "one approach is to try variations using 'new-build --dry-run' with " - ++ "solver flags such as '--constraint=\"pkg < 1.2\"' and once you have " - ++ "a satisfactory solution to freeze it using the 'new-freeze' command " - ++ "with the same set of flags.", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " new-freeze\n" - ++ " Freeze the configuration of the current project\n\n" - ++ " " ++ pname ++ " new-build --dry-run --constraint=\"aeson < 1\"\n" - ++ " Check what a solution with the given constraints would look like\n" - ++ " " ++ pname ++ " new-freeze --constraint=\"aeson < 1\"\n" - ++ " Freeze a solution using the given constraints\n\n" - - ++ "Note: this command is part of the new project-based system (aka " - ++ "nix-style\nlocal builds). These features are currently in beta. " - ++ "Please see\n" - ++ "http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html " - ++ "for\ndetails and advice on what you can expect to work. If you " - ++ "encounter problems\nplease file issues at " - ++ "https://github.com/haskell/cabal/issues and if you\nhave any time " - ++ "to get involved and help with testing, fixing bugs etc then\nthat " - ++ "is very much appreciated.\n" - } - --- | To a first approximation, the @freeze@ command runs the first phase of --- the @build@ command where we bring the install plan up to date, and then --- based on the install plan we write out a @cabal.project.freeze@ config file. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -freezeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -freezeAction (configFlags, configExFlags, installFlags, haddockFlags) - extraArgs globalFlags = do - - unless (null extraArgs) $ - die' verbosity $ "'freeze' doesn't take any extra arguments: " - ++ unwords extraArgs - - ProjectBaseContext { - distDirLayout, - cabalDirLayout, - projectConfig, - localPackages - } <- establishProjectBaseContext verbosity cliConfig - - (_, elaboratedPlan, _) <- - rebuildInstallPlan verbosity - distDirLayout cabalDirLayout - projectConfig - localPackages - - let freezeConfig = projectFreezeConfig elaboratedPlan - writeProjectLocalFreezeConfig distDirLayout freezeConfig - notice verbosity $ - "Wrote freeze file: " ++ distProjectFile distDirLayout "freeze" - - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - - - --- | Given the install plan, produce a config value with constraints that --- freezes the versions of packages used in the plan. --- -projectFreezeConfig :: ElaboratedInstallPlan -> ProjectConfig -projectFreezeConfig elaboratedPlan = - Monoid.mempty { - projectConfigShared = Monoid.mempty { - projectConfigConstraints = - concat (Map.elems (projectFreezeConstraints elaboratedPlan)) - } - } - --- | Given the install plan, produce solver constraints that will ensure the --- solver picks the same solution again in future in different environments. --- -projectFreezeConstraints :: ElaboratedInstallPlan - -> Map PackageName [(UserConstraint, ConstraintSource)] -projectFreezeConstraints plan = - -- - -- TODO: [required eventually] this is currently an underapproximation - -- since the constraints language is not expressive enough to specify the - -- precise solution. See https://github.com/haskell/cabal/issues/3502. - -- - -- For the moment we deal with multiple versions in the solution by using - -- constraints that allow either version. Also, we do not include any - -- /version/ constraints for packages that are local to the project (e.g. - -- if the solution has two instances of Cabal, one from the local project - -- and one pulled in as a setup deps then we exclude all constraints on - -- Cabal, not just the constraint for the local instance since any - -- constraint would apply to both instances). We do however keep flag - -- constraints of local packages. - -- - deleteLocalPackagesVersionConstraints - (Map.unionWith (++) versionConstraints flagConstraints) - where - versionConstraints :: Map PackageName [(UserConstraint, ConstraintSource)] - versionConstraints = - Map.mapWithKey - (\p v -> [(UserConstraint (UserAnyQualifier p) (PackagePropertyVersion v), - ConstraintSourceFreeze)]) - versionRanges - - versionRanges :: Map PackageName VersionRange - versionRanges = - Map.map simplifyVersionRange $ - Map.fromListWith unionVersionRanges $ - [ (packageName pkg, thisVersion (packageVersion pkg)) - | InstallPlan.PreExisting pkg <- InstallPlan.toList plan - ] - ++ [ (packageName pkg, thisVersion (packageVersion pkg)) - | InstallPlan.Configured pkg <- InstallPlan.toList plan - ] - - flagConstraints :: Map PackageName [(UserConstraint, ConstraintSource)] - flagConstraints = - Map.mapWithKey - (\p f -> [(UserConstraint (UserQualified UserQualToplevel p) (PackagePropertyFlags f), - ConstraintSourceFreeze)]) - flagAssignments - - flagAssignments :: Map PackageName FlagAssignment - flagAssignments = - Map.fromList - [ (pkgname, flags) - | InstallPlan.Configured elab <- InstallPlan.toList plan - , let flags = elabFlagAssignment elab - pkgname = packageName elab - , not (nullFlagAssignment flags) ] - - -- As described above, remove the version constraints on local packages, - -- but leave any flag constraints. - deleteLocalPackagesVersionConstraints - :: Map PackageName [(UserConstraint, ConstraintSource)] - -> Map PackageName [(UserConstraint, ConstraintSource)] - deleteLocalPackagesVersionConstraints = - Map.mergeWithKey - (\_pkgname () constraints -> - case filter (not . isVersionConstraint . fst) constraints of - [] -> Nothing - constraints' -> Just constraints') - (const Map.empty) id - localPackages - - isVersionConstraint (UserConstraint _ (PackagePropertyVersion _)) = True - isVersionConstraint _ = False - - localPackages :: Map PackageName () - localPackages = - Map.fromList - [ (packageName elab, ()) - | InstallPlan.Configured elab <- InstallPlan.toList plan - , elabLocalToProject elab - ] - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdHaddock.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdHaddock.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdHaddock.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdHaddock.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,206 +0,0 @@ -{-# LANGUAGE NamedFieldPuns #-} - --- | cabal-install CLI command: haddock --- -module Distribution.Client.CmdHaddock ( - -- * The @haddock@ CLI and action - haddockCommand, - haddockAction, - - -- * Internals exposed for testing - TargetProblem(..), - selectPackageTargets, - selectComponentTarget - ) where - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.CmdErrorMessages - -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import qualified Distribution.Client.Setup as Client -import Distribution.Simple.Setup - ( HaddockFlags(..), fromFlagOrDefault ) -import Distribution.Simple.Command - ( CommandUI(..), usageAlternatives ) -import Distribution.Verbosity - ( Verbosity, normal ) -import Distribution.Simple.Utils - ( wrapText, die' ) - -import Control.Monad (when) - - -haddockCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags - ,HaddockFlags) -haddockCommand = Client.installCommand { - commandName = "new-haddock", - commandSynopsis = "Build Haddock documentation", - commandUsage = usageAlternatives "new-haddock" [ "[FLAGS] TARGET" ], - commandDescription = Just $ \_ -> wrapText $ - "Build Haddock documentation for the specified packages within the " - ++ "project.\n\n" - - ++ "Any package in the project can be specified. If no package is " - ++ "specified, the default is to build the documentation for the package " - ++ "in the current directory. The default behaviour is to build " - ++ "documentation for the exposed modules of the library component (if " - ++ "any). This can be changed with the '--internal', '--executables', " - ++ "'--tests', '--benchmarks' or '--all' flags.\n\n" - - ++ "Currently, documentation for dependencies is NOT built. This " - ++ "behavior may change in future.\n\n" - - ++ "Additional configuration flags can be specified on the command line " - ++ "and these extend the project configuration from the 'cabal.project', " - ++ "'cabal.project.local' and other files.", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " new-haddock pkgname" - ++ " Build documentation for the package named pkgname\n\n" - - ++ cmdCommonHelpTextNewBuildBeta - } - --TODO: [nice to have] support haddock on specific components, not just - -- whole packages and the silly --executables etc modifiers. - --- | The @haddock@ command is TODO. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -haddockAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -haddockAction (configFlags, configExFlags, installFlags, haddockFlags) - targetStrings globalFlags = do - - baseCtx <- establishProjectBaseContext verbosity cliConfig - - targetSelectors <- either (reportTargetSelectorProblems verbosity) return - =<< readTargetSelectors (localPackages baseCtx) Nothing targetStrings - - buildCtx <- - runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do - - when (buildSettingOnlyDeps (buildSettings baseCtx)) $ - die' verbosity - "The haddock command does not support '--only-dependencies'." - - -- When we interpret the targets on the command line, interpret them as - -- haddock targets - targets <- either (reportTargetProblems verbosity) return - $ resolveTargets - (selectPackageTargets haddockFlags) - selectComponentTarget - TargetProblemCommon - elaboratedPlan - Nothing - targetSelectors - - let elaboratedPlan' = pruneInstallPlanToTargets - TargetActionHaddock - targets - elaboratedPlan - return (elaboratedPlan', targets) - - printPlan verbosity baseCtx buildCtx - - buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx - runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - --- | This defines what a 'TargetSelector' means for the @haddock@ command. --- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, --- or otherwise classifies the problem. --- --- For the @haddock@ command we select all buildable libraries. Additionally, --- depending on the @--executables@ flag we also select all the buildable exes. --- We do similarly for test-suites, benchmarks and foreign libs. --- -selectPackageTargets :: HaddockFlags -> TargetSelector - -> [AvailableTarget k] -> Either TargetProblem [k] -selectPackageTargets haddockFlags targetSelector targets - - -- If there are any buildable targets then we select those - | not (null targetsBuildable) - = Right targetsBuildable - - -- If there are targets but none are buildable then we report those - | not (null targets) - = Left (TargetProblemNoneEnabled targetSelector targets') - - -- If there are no targets at all then we report that - | otherwise - = Left (TargetProblemNoTargets targetSelector) - where - targets' = forgetTargetsDetail (map disableNotRequested targets) - targetsBuildable = selectBuildableTargets (map disableNotRequested targets) - - -- When there's a target filter like "pkg:exes" then we do select exes, - -- but if it's just a target like "pkg" then we don't build docs for exes - -- unless they are requested by default (i.e. by using --executables) - disableNotRequested t@(AvailableTarget _ cname (TargetBuildable _ _) _) - | not (isRequested targetSelector (componentKind cname)) - = t { availableTargetStatus = TargetDisabledByUser } - disableNotRequested t = t - - isRequested (TargetPackage _ _ (Just _)) _ = True - isRequested (TargetAllPackages (Just _)) _ = True - isRequested _ LibKind = True --- isRequested _ SubLibKind = True --TODO: what about sublibs? - - -- TODO/HACK, we encode some defaults here as new-haddock's logic; - -- make sure this matches the defaults applied in - -- "Distribution.Client.ProjectPlanning"; this may need more work - -- to be done properly - -- - -- See also https://github.com/haskell/cabal/pull/4886 - isRequested _ FLibKind = fromFlagOrDefault False (haddockForeignLibs haddockFlags) - isRequested _ ExeKind = fromFlagOrDefault False (haddockExecutables haddockFlags) - isRequested _ TestKind = fromFlagOrDefault False (haddockTestSuites haddockFlags) - isRequested _ BenchKind = fromFlagOrDefault False (haddockBenchmarks haddockFlags) - - --- | For a 'TargetComponent' 'TargetSelector', check if the component can be --- selected. --- --- For the @haddock@ command we just need the basic checks on being buildable --- etc. --- -selectComponentTarget :: SubComponentTarget - -> AvailableTarget k -> Either TargetProblem k -selectComponentTarget subtarget = - either (Left . TargetProblemCommon) Right - . selectComponentTargetBasic subtarget - - --- | The various error conditions that can occur when matching a --- 'TargetSelector' against 'AvailableTarget's for the @haddock@ command. --- -data TargetProblem = - TargetProblemCommon TargetProblemCommon - - -- | The 'TargetSelector' matches targets but none are buildable - | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] - - -- | There are no targets at all - | TargetProblemNoTargets TargetSelector - deriving (Eq, Show) - -reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a -reportTargetProblems verbosity = - die' verbosity . unlines . map renderTargetProblem - -renderTargetProblem :: TargetProblem -> String -renderTargetProblem (TargetProblemCommon problem) = - renderTargetProblemCommon "build documentation for" problem - -renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = - renderTargetProblemNoneEnabled "build documentation for" targetSelector targets - -renderTargetProblem(TargetProblemNoTargets targetSelector) = - renderTargetProblemNoTargets "build documentation for" targetSelector diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdInstall.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdInstall.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdInstall.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdInstall.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,757 +0,0 @@ -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE ViewPatterns #-} - --- | cabal-install CLI command: build --- -module Distribution.Client.CmdInstall ( - -- * The @build@ CLI and action - installCommand, - installAction, - - -- * Internals exposed for testing - TargetProblem(..), - selectPackageTargets, - selectComponentTarget, - establishDummyProjectBaseContext - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.CmdErrorMessages -import Distribution.Client.CmdSdist - -import Distribution.Client.Setup - ( GlobalFlags(..), ConfigFlags(..), ConfigExFlags, InstallFlags - , configureExOptions, installOptions, liftOptions ) -import Distribution.Solver.Types.ConstraintSource - ( ConstraintSource(..) ) -import Distribution.Client.Types - ( PackageSpecifier(..), PackageLocation(..), UnresolvedSourcePackage ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Package - ( Package(..), PackageName, mkPackageName ) -import Distribution.Types.PackageId - ( PackageIdentifier(..) ) -import Distribution.Client.ProjectConfig.Types - ( ProjectConfig(..), ProjectConfigShared(..) - , ProjectConfigBuildOnly(..), PackageConfig(..) - , getMapLast, getMapMappend, projectConfigLogsDir - , projectConfigStoreDir, projectConfigBuildOnly - , projectConfigDistDir, projectConfigConfigFile ) -import Distribution.Simple.Program.Db - ( userSpecifyPaths, userSpecifyArgss, defaultProgramDb - , modifyProgramSearchPath ) -import Distribution.Simple.Program.Find - ( ProgramSearchPathEntry(..) ) -import Distribution.Client.Config - ( getCabalDir ) -import Distribution.Simple.PackageIndex - ( InstalledPackageIndex, lookupPackageName, lookupUnitId ) -import Distribution.Types.InstalledPackageInfo - ( InstalledPackageInfo(..) ) -import Distribution.Types.Version - ( nullVersion ) -import Distribution.Types.VersionRange - ( thisVersion ) -import Distribution.Solver.Types.PackageConstraint - ( PackageProperty(..) ) -import Distribution.Client.IndexUtils - ( getSourcePackages, getInstalledPackages ) -import Distribution.Client.ProjectConfig - ( readGlobalConfig, projectConfigWithBuilderRepoContext - , resolveBuildTimeSettings, withProjectOrGlobalConfig ) -import Distribution.Client.DistDirLayout - ( defaultDistDirLayout, DistDirLayout(..), mkCabalDirLayout - , ProjectRoot(ProjectRootImplicit) - , storePackageDirectory, cabalStoreDirLayout - , CabalDirLayout(..), StoreDirLayout(..) ) -import Distribution.Client.RebuildMonad - ( runRebuild ) -import Distribution.Client.InstallSymlink - ( symlinkBinary ) -import Distribution.Simple.Setup - ( Flag(Flag), HaddockFlags, fromFlagOrDefault, flagToMaybe, toFlag - , trueArg, configureOptions, haddockOptions, flagToList ) -import Distribution.Solver.Types.SourcePackage - ( SourcePackage(..) ) -import Distribution.ReadE - ( succeedReadE ) -import Distribution.Simple.Command - ( CommandUI(..), ShowOrParseArgs(..), OptionField(..) - , option, usageAlternatives, reqArg ) -import Distribution.Simple.Configure - ( configCompilerEx ) -import Distribution.Simple.Compiler - ( Compiler(..), CompilerId(..), CompilerFlavor(..) ) -import Distribution.Simple.GHC - ( ghcPlatformAndVersionString - , GhcImplInfo(..), getImplInfo - , GhcEnvironmentFileEntry(..) - , renderGhcEnvironmentFile, readGhcEnvironmentFile, ParseErrorExc ) -import Distribution.Types.UnitId - ( UnitId ) -import Distribution.Types.UnqualComponentName - ( UnqualComponentName, unUnqualComponentName ) -import Distribution.Verbosity - ( Verbosity, normal, lessVerbose ) -import Distribution.Simple.Utils - ( wrapText, die', notice, warn - , withTempDirectory, createDirectoryIfMissingVerbose - , ordNub ) -import Distribution.Utils.Generic - ( writeFileAtomic ) -import Distribution.Text - ( simpleParse ) -import Distribution.Pretty - ( prettyShow ) - -import Control.Exception - ( catch ) -import Control.Monad - ( mapM, mapM_ ) -import qualified Data.ByteString.Lazy.Char8 as BS -import Data.Either - ( partitionEithers ) -import Data.Ord - ( comparing, Down(..) ) -import qualified Data.Map as Map -import Distribution.Utils.NubList - ( fromNubList ) -import System.Directory - ( getHomeDirectory, doesFileExist, createDirectoryIfMissing - , getTemporaryDirectory, makeAbsolute, doesDirectoryExist ) -import System.FilePath - ( (), takeDirectory, takeBaseName ) - -data NewInstallFlags = NewInstallFlags - { ninstInstallLibs :: Flag Bool - , ninstEnvironmentPath :: Flag FilePath - } - -defaultNewInstallFlags :: NewInstallFlags -defaultNewInstallFlags = NewInstallFlags - { ninstInstallLibs = toFlag False - , ninstEnvironmentPath = mempty - } - -newInstallOptions :: ShowOrParseArgs -> [OptionField NewInstallFlags] -newInstallOptions _ = - [ option [] ["lib"] - "Install libraries rather than executables from the target package." - ninstInstallLibs (\v flags -> flags { ninstInstallLibs = v }) - trueArg - , option [] ["package-env", "env"] - "Set the environment file that may be modified." - ninstEnvironmentPath (\pf flags -> flags { ninstEnvironmentPath = pf }) - (reqArg "ENV" (succeedReadE Flag) flagToList) - ] - -installCommand :: CommandUI ( ConfigFlags, ConfigExFlags, InstallFlags - , HaddockFlags, NewInstallFlags - ) -installCommand = CommandUI - { commandName = "new-install" - , commandSynopsis = "Install packages." - , commandUsage = usageAlternatives - "new-install" [ "[TARGETS] [FLAGS]" ] - , commandDescription = Just $ \_ -> wrapText $ - "Installs one or more packages. This is done by installing them " - ++ "in the store and symlinking the executables in the directory " - ++ "specified by the --symlink-bindir flag (`~/.cabal/bin/` by default). " - ++ "If you want the installed executables to be available globally, " - ++ "make sure that the PATH environment variable contains that directory. " - ++ "\n\n" - ++ "If TARGET is a library, it will be added to the global environment. " - ++ "When doing this, cabal will try to build a plan that includes all " - ++ "the previously installed libraries. This is currently not implemented." - , commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " new-install\n" - ++ " Install the package in the current directory\n" - ++ " " ++ pname ++ " new-install pkgname\n" - ++ " Install the package named pkgname" - ++ " (fetching it from hackage if necessary)\n" - ++ " " ++ pname ++ " new-install ./pkgfoo\n" - ++ " Install the package in the ./pkgfoo directory\n" - - ++ cmdCommonHelpTextNewBuildBeta - , commandOptions = \showOrParseArgs -> - liftOptions get1 set1 - -- Note: [Hidden Flags] - -- hide "constraint", "dependency", and - -- "exact-configuration" from the configure options. - (filter ((`notElem` ["constraint", "dependency" - , "exact-configuration"]) - . optionName) $ configureOptions showOrParseArgs) - ++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) - ++ liftOptions get3 set3 - -- hide "target-package-db" flag from the - -- install options. - (filter ((`notElem` ["target-package-db"]) - . optionName) $ - installOptions showOrParseArgs) - ++ liftOptions get4 set4 - -- hide "target-package-db" flag from the - -- install options. - (filter ((`notElem` ["v", "verbose"]) - . optionName) $ - haddockOptions showOrParseArgs) - ++ liftOptions get5 set5 (newInstallOptions showOrParseArgs) - , commandDefaultFlags = (mempty, mempty, mempty, mempty, defaultNewInstallFlags) - } - where - get1 (a,_,_,_,_) = a; set1 a (_,b,c,d,e) = (a,b,c,d,e) - get2 (_,b,_,_,_) = b; set2 b (a,_,c,d,e) = (a,b,c,d,e) - get3 (_,_,c,_,_) = c; set3 c (a,b,_,d,e) = (a,b,c,d,e) - get4 (_,_,_,d,_) = d; set4 d (a,b,c,_,e) = (a,b,c,d,e) - get5 (_,_,_,_,e) = e; set5 e (a,b,c,d,_) = (a,b,c,d,e) - - --- | The @install@ command actually serves four different needs. It installs: --- * exes: --- For example a program from hackage. The behavior is similar to the old --- install command, except that now conflicts between separate runs of the --- command are impossible thanks to the store. --- Exes are installed in the store like a normal dependency, then they are --- symlinked uin the directory specified by --symlink-bindir. --- To do this we need a dummy projectBaseContext containing the targets as --- estra packages and using a temporary dist directory. --- * libraries --- Libraries install through a similar process, but using GHC environment --- files instead of symlinks. This means that 'new-install'ing libraries --- only works on GHC >= 8.0. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags, NewInstallFlags) - -> [String] -> GlobalFlags -> IO () -installAction (configFlags, configExFlags, installFlags, haddockFlags, newInstallFlags) - targetStrings globalFlags = do - -- We never try to build tests/benchmarks for remote packages. - -- So we set them as disabled by default and error if they are explicitly - -- enabled. - when (configTests configFlags' == Flag True) $ - die' verbosity $ "--enable-tests was specified, but tests can't " - ++ "be enabled in a remote package" - when (configBenchmarks configFlags' == Flag True) $ - die' verbosity $ "--enable-benchmarks was specified, but benchmarks can't " - ++ "be enabled in a remote package" - - let - withProject = do - let verbosity' = lessVerbose verbosity - - -- First, we need to learn about what's available to be installed. - localBaseCtx <- establishProjectBaseContext verbosity' cliConfig - let localDistDirLayout = distDirLayout localBaseCtx - pkgDb <- projectConfigWithBuilderRepoContext verbosity' (buildSettings localBaseCtx) (getSourcePackages verbosity) - - let - (targetStrings', packageIds) = partitionEithers . flip fmap targetStrings $ - \str -> case simpleParse str of - Just (pkgId :: PackageId) - | pkgVersion pkgId /= nullVersion -> Right pkgId - _ -> Left str - packageSpecifiers = flip fmap packageIds $ \case - PackageIdentifier{..} - | pkgVersion == nullVersion -> NamedPackage pkgName [] - | otherwise -> - NamedPackage pkgName [PackagePropertyVersion (thisVersion pkgVersion)] - packageTargets = flip TargetPackageNamed Nothing . pkgName <$> packageIds - - if null targetStrings' - then return (packageSpecifiers, packageTargets, projectConfig localBaseCtx) - else do - targetSelectors <- either (reportTargetSelectorProblems verbosity) return - =<< readTargetSelectors (localPackages localBaseCtx) Nothing targetStrings' - - (specs, selectors) <- withInstallPlan verbosity' localBaseCtx $ \elaboratedPlan _ -> do - -- Split into known targets and hackage packages. - (targets, hackageNames) <- case - resolveTargets - selectPackageTargets - selectComponentTarget - TargetProblemCommon - elaboratedPlan - (Just pkgDb) - targetSelectors of - Right targets -> do - -- Everything is a local dependency. - return (targets, []) - Left errs -> do - -- Not everything is local. - let - (errs', hackageNames) = partitionEithers . flip fmap errs $ \case - TargetProblemCommon (TargetAvailableInIndex name) -> Right name - err -> Left err - - when (not . null $ errs') $ reportTargetProblems verbosity errs' - - let - targetSelectors' = flip filter targetSelectors $ \case - TargetComponentUnknown name _ _ - | name `elem` hackageNames -> False - TargetPackageNamed name _ - | name `elem` hackageNames -> False - _ -> True - - -- This can't fail, because all of the errors are removed (or we've given up). - targets <- either (reportTargetProblems verbosity) return $ resolveTargets - selectPackageTargets - selectComponentTarget - TargetProblemCommon - elaboratedPlan - Nothing - targetSelectors' - - return (targets, hackageNames) - - let - planMap = InstallPlan.toMap elaboratedPlan - targetIds = Map.keys targets - - sdistize (SpecificSourcePackage spkg@SourcePackage{..}) = SpecificSourcePackage spkg' - where - sdistPath = distSdistFile localDistDirLayout packageInfoId TargzFormat - spkg' = spkg { packageSource = LocalTarballPackage sdistPath } - sdistize named = named - - local = sdistize <$> localPackages localBaseCtx - - gatherTargets :: UnitId -> TargetSelector - gatherTargets targetId = TargetPackageNamed pkgName Nothing - where - Just targetUnit = Map.lookup targetId planMap - PackageIdentifier{..} = packageId targetUnit - - targets' = fmap gatherTargets targetIds - - hackagePkgs :: [PackageSpecifier UnresolvedSourcePackage] - hackagePkgs = flip NamedPackage [] <$> hackageNames - hackageTargets :: [TargetSelector] - hackageTargets = flip TargetPackageNamed Nothing <$> hackageNames - - createDirectoryIfMissing True (distSdistDirectory localDistDirLayout) - - unless (Map.null targets) $ - mapM_ - (\(SpecificSourcePackage pkg) -> packageToSdist verbosity - (distProjectRootDirectory localDistDirLayout) (Archive TargzFormat) - (distSdistFile localDistDirLayout (packageId pkg) TargzFormat) pkg - ) (localPackages localBaseCtx) - - if null targets - then return (hackagePkgs, hackageTargets) - else return (local ++ hackagePkgs, targets' ++ hackageTargets) - - return (specs ++ packageSpecifiers, selectors ++ packageTargets, projectConfig localBaseCtx) - - withoutProject globalConfig = do - let - parsePkg pkgName - | Just (pkg :: PackageId) <- simpleParse pkgName = return pkg - | otherwise = die' verbosity ("Invalid package ID: " ++ pkgName) - packageIds <- mapM parsePkg targetStrings - let - packageSpecifiers = flip fmap packageIds $ \case - PackageIdentifier{..} - | pkgVersion == nullVersion -> NamedPackage pkgName [] - | otherwise -> - NamedPackage pkgName [PackagePropertyVersion (thisVersion pkgVersion)] - packageTargets = flip TargetPackageNamed Nothing . pkgName <$> packageIds - return (packageSpecifiers, packageTargets, globalConfig <> cliConfig) - - (specs, selectors, config) <- withProjectOrGlobalConfig verbosity globalConfigFlag - withProject withoutProject - - home <- getHomeDirectory - let - ProjectConfig { - projectConfigShared = ProjectConfigShared { - projectConfigHcFlavor, - projectConfigHcPath, - projectConfigHcPkg - }, - projectConfigLocalPackages = PackageConfig { - packageConfigProgramPaths, - packageConfigProgramArgs, - packageConfigProgramPathExtra - } - } = config - - hcFlavor = flagToMaybe projectConfigHcFlavor - hcPath = flagToMaybe projectConfigHcPath - hcPkg = flagToMaybe projectConfigHcPkg - - progDb = - userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) - . userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs)) - . modifyProgramSearchPath - (++ [ ProgramSearchPathDir dir - | dir <- fromNubList packageConfigProgramPathExtra ]) - $ defaultProgramDb - - (compiler@Compiler { compilerId = - compilerId@(CompilerId compilerFlavor compilerVersion) }, platform, progDb') <- - configCompilerEx hcFlavor hcPath hcPkg progDb verbosity - - let - globalEnv name = - home ".ghc" ghcPlatformAndVersionString platform compilerVersion - "environments" name - localEnv dir = - dir ".ghc.environment." ++ ghcPlatformAndVersionString platform compilerVersion - - GhcImplInfo{ supportsPkgEnvFiles } = getImplInfo compiler - -- Why? We know what the first part will be, we only care about the packages. - filterEnvEntries = filter $ \case - GhcEnvFilePackageId _ -> True - _ -> False - - envFile <- case flagToMaybe (ninstEnvironmentPath newInstallFlags) of - Just spec - -- Is spec a bare word without any "pathy" content, then it refers to - -- a named global environment. - | takeBaseName spec == spec -> return (globalEnv spec) - | otherwise -> do - spec' <- makeAbsolute spec - isDir <- doesDirectoryExist spec' - if isDir - -- If spec is a directory, then make an ambient environment inside - -- that directory. - then return (localEnv spec') - -- Otherwise, treat it like a literal file path. - else return spec' - Nothing -> return (globalEnv "default") - - envFileExists <- doesFileExist envFile - envEntries <- filterEnvEntries <$> if - (compilerFlavor == GHC || compilerFlavor == GHCJS) - && supportsPkgEnvFiles && envFileExists - then catch (readGhcEnvironmentFile envFile) $ \(_ :: ParseErrorExc) -> - warn verbosity ("The environment file " ++ envFile ++ - " is unparsable. Libraries cannot be installed.") >> return [] - else return [] - - cabalDir <- getCabalDir - let - mstoreDir = flagToMaybe (globalStoreDir globalFlags) - mlogsDir = flagToMaybe (globalLogsDir globalFlags) - cabalLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir - packageDbs = storePackageDBStack (cabalStoreDirLayout cabalLayout) compilerId - - installedIndex <- getInstalledPackages verbosity compiler packageDbs progDb' - - let (envSpecs, envEntries') = environmentFileToSpecifiers installedIndex envEntries - - -- Second, we need to use a fake project to let Cabal build the - -- installables correctly. For that, we need a place to put a - -- temporary dist directory. - globalTmp <- getTemporaryDirectory - withTempDirectory - verbosity - globalTmp - "cabal-install." - $ \tmpDir -> do - baseCtx <- establishDummyProjectBaseContext - verbosity - config - tmpDir - (envSpecs ++ specs) - - buildCtx <- - runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do - - -- Interpret the targets on the command line as build targets - targets <- either (reportTargetProblems verbosity) return - $ resolveTargets - selectPackageTargets - selectComponentTarget - TargetProblemCommon - elaboratedPlan - Nothing - selectors - - let elaboratedPlan' = pruneInstallPlanToTargets - TargetActionBuild - targets - elaboratedPlan - elaboratedPlan'' <- - if buildSettingOnlyDeps (buildSettings baseCtx) - then either (reportCannotPruneDependencies verbosity) return $ - pruneInstallPlanToDependencies (Map.keysSet targets) - elaboratedPlan' - else return elaboratedPlan' - - return (elaboratedPlan'', targets) - - printPlan verbosity baseCtx buildCtx - - buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx - - let - mkPkgBinDir = ( "bin") . - storePackageDirectory - (cabalStoreDirLayout $ cabalDirLayout baseCtx) - compilerId - installLibs = fromFlagOrDefault False (ninstInstallLibs newInstallFlags) - - when (not installLibs) $ do - -- If there are exes, symlink them - let symlinkBindirUnknown = - "symlink-bindir is not defined. Set it in your cabal config file " - ++ "or use --symlink-bindir=" - symlinkBindir <- fromFlagOrDefault (die' verbosity symlinkBindirUnknown) - $ fmap makeAbsolute - $ projectConfigSymlinkBinDir - $ projectConfigBuildOnly - $ projectConfig $ baseCtx - createDirectoryIfMissingVerbose verbosity False symlinkBindir - traverse_ (symlinkBuiltPackage verbosity mkPkgBinDir symlinkBindir) - $ Map.toList $ targetsMap buildCtx - runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes - - when installLibs $ - if supportsPkgEnvFiles - then do - -- Why do we get it again? If we updated a globalPackage then we need - -- the new version. - installedIndex' <- getInstalledPackages verbosity compiler packageDbs progDb' - let - getLatest = fmap (head . snd) . take 1 . sortBy (comparing (Down . fst)) - . lookupPackageName installedIndex' - globalLatest = concat (getLatest <$> globalPackages) - - baseEntries = - GhcEnvFileClearPackageDbStack : fmap GhcEnvFilePackageDb packageDbs - globalEntries = GhcEnvFilePackageId . installedUnitId <$> globalLatest - pkgEntries = ordNub $ - globalEntries - ++ envEntries' - ++ entriesForLibraryComponents (targetsMap buildCtx) - contents' = renderGhcEnvironmentFile (baseEntries ++ pkgEntries) - createDirectoryIfMissing True (takeDirectory envFile) - writeFileAtomic envFile (BS.pack contents') - else - warn verbosity $ - "The current compiler doesn't support safely installing libraries, " - ++ "so only executables will be available. (Library installation is " - ++ "supported on GHC 8.0+ only)" - where - configFlags' = disableTestsBenchsByDefault configFlags - verbosity = fromFlagOrDefault normal (configVerbosity configFlags') - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags' configExFlags - installFlags haddockFlags - globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) - -globalPackages :: [PackageName] -globalPackages = mkPackageName <$> - [ "ghc", "hoopl", "bytestring", "unix", "base", "time", "hpc", "filepath" - , "process", "array", "integer-gmp", "containers", "ghc-boot", "binary" - , "ghc-prim", "ghci", "rts", "terminfo", "transformers", "deepseq" - , "ghc-boot-th", "pretty", "template-haskell", "directory", "text" - , "bin-package-db" - ] - -environmentFileToSpecifiers :: InstalledPackageIndex -> [GhcEnvironmentFileEntry] - -> ([PackageSpecifier a], [GhcEnvironmentFileEntry]) -environmentFileToSpecifiers ipi = foldMap $ \case - (GhcEnvFilePackageId unitId) - | Just InstalledPackageInfo{ sourcePackageId = PackageIdentifier{..}, installedUnitId } - <- lookupUnitId ipi unitId - , let pkgSpec = NamedPackage pkgName [PackagePropertyVersion (thisVersion pkgVersion)] - -> if pkgName `elem` globalPackages - then ([pkgSpec], []) - else ([pkgSpec], [GhcEnvFilePackageId installedUnitId]) - _ -> ([], []) - - --- | Disables tests and benchmarks if they weren't explicitly enabled. -disableTestsBenchsByDefault :: ConfigFlags -> ConfigFlags -disableTestsBenchsByDefault configFlags = - configFlags { configTests = Flag False <> configTests configFlags - , configBenchmarks = Flag False <> configBenchmarks configFlags } - --- | Symlink every exe from a package from the store to a given location -symlinkBuiltPackage :: Verbosity - -> (UnitId -> FilePath) -- ^ A function to get an UnitId's - -- store directory - -> FilePath -- ^ Where to put the symlink - -> ( UnitId - , [(ComponentTarget, [TargetSelector])] ) - -> IO () -symlinkBuiltPackage verbosity mkSourceBinDir destDir (pkg, components) = - traverse_ symlinkAndWarn exes - where - exes = catMaybes $ (exeMaybe . fst) <$> components - exeMaybe (ComponentTarget (CExeName exe) _) = Just exe - exeMaybe _ = Nothing - symlinkAndWarn exe = do - success <- symlinkBuiltExe verbosity (mkSourceBinDir pkg) destDir exe - unless success $ warn verbosity $ "Symlink for " - <> prettyShow exe - <> " already exists. Not overwriting." - --- | Symlink a specific exe. -symlinkBuiltExe :: Verbosity -> FilePath -> FilePath -> UnqualComponentName -> IO Bool -symlinkBuiltExe verbosity sourceDir destDir exe = do - notice verbosity $ "Symlinking " ++ unUnqualComponentName exe - symlinkBinary - destDir - sourceDir - exe - $ unUnqualComponentName exe - --- | Create 'GhcEnvironmentFileEntry's for packages with exposed libraries. -entriesForLibraryComponents :: TargetsMap -> [GhcEnvironmentFileEntry] -entriesForLibraryComponents = Map.foldrWithKey' (\k v -> mappend (go k v)) [] - where - hasLib :: (ComponentTarget, [TargetSelector]) -> Bool - hasLib (ComponentTarget CLibName _, _) = True - hasLib (ComponentTarget (CSubLibName _) _, _) = True - hasLib _ = False - - go :: UnitId -> [(ComponentTarget, [TargetSelector])] -> [GhcEnvironmentFileEntry] - go unitId targets - | any hasLib targets = [GhcEnvFilePackageId unitId] - | otherwise = [] - --- | Create a dummy project context, without a .cabal or a .cabal.project file --- (a place where to put a temporary dist directory is still needed) -establishDummyProjectBaseContext - :: Verbosity - -> ProjectConfig - -> FilePath - -- ^ Where to put the dist directory - -> [PackageSpecifier UnresolvedSourcePackage] - -- ^ The packages to be included in the project - -> IO ProjectBaseContext -establishDummyProjectBaseContext verbosity cliConfig tmpDir localPackages = do - - cabalDir <- getCabalDir - - -- Create the dist directories - createDirectoryIfMissingVerbose verbosity True $ distDirectory distDirLayout - createDirectoryIfMissingVerbose verbosity True $ - distProjectCacheDirectory distDirLayout - - globalConfig <- runRebuild "" - $ readGlobalConfig verbosity - $ projectConfigConfigFile - $ projectConfigShared cliConfig - let projectConfig = globalConfig <> cliConfig - - let ProjectConfigBuildOnly { - projectConfigLogsDir - } = projectConfigBuildOnly projectConfig - - ProjectConfigShared { - projectConfigStoreDir - } = projectConfigShared projectConfig - - mlogsDir = flagToMaybe projectConfigLogsDir - mstoreDir = flagToMaybe projectConfigStoreDir - cabalDirLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir - - buildSettings = resolveBuildTimeSettings - verbosity cabalDirLayout - projectConfig - - return ProjectBaseContext { - distDirLayout, - cabalDirLayout, - projectConfig, - localPackages, - buildSettings - } - where - mdistDirectory = flagToMaybe - $ projectConfigDistDir - $ projectConfigShared cliConfig - projectRoot = ProjectRootImplicit tmpDir - distDirLayout = defaultDistDirLayout projectRoot - mdistDirectory - --- | This defines what a 'TargetSelector' means for the @bench@ command. --- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, --- or otherwise classifies the problem. --- --- For the @build@ command select all components except non-buildable --- and disabled tests\/benchmarks, fail if there are no such --- components --- -selectPackageTargets :: TargetSelector - -> [AvailableTarget k] -> Either TargetProblem [k] -selectPackageTargets targetSelector targets - - -- If there are any buildable targets then we select those - | not (null targetsBuildable) - = Right targetsBuildable - - -- If there are targets but none are buildable then we report those - | not (null targets) - = Left (TargetProblemNoneEnabled targetSelector targets') - - -- If there are no targets at all then we report that - | otherwise - = Left (TargetProblemNoTargets targetSelector) - where - targets' = forgetTargetsDetail targets - targetsBuildable = selectBuildableTargetsWith - (buildable targetSelector) - targets - - -- When there's a target filter like "pkg:tests" then we do select tests, - -- but if it's just a target like "pkg" then we don't build tests unless - -- they are requested by default (i.e. by using --enable-tests) - buildable (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False - buildable (TargetAllPackages Nothing) TargetNotRequestedByDefault = False - buildable _ _ = True - --- | For a 'TargetComponent' 'TargetSelector', check if the component can be --- selected. --- --- For the @build@ command we just need the basic checks on being buildable etc. --- -selectComponentTarget :: SubComponentTarget - -> AvailableTarget k -> Either TargetProblem k -selectComponentTarget subtarget = - either (Left . TargetProblemCommon) Right - . selectComponentTargetBasic subtarget - - --- | The various error conditions that can occur when matching a --- 'TargetSelector' against 'AvailableTarget's for the @build@ command. --- -data TargetProblem = - TargetProblemCommon TargetProblemCommon - - -- | The 'TargetSelector' matches targets but none are buildable - | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] - - -- | There are no targets at all - | TargetProblemNoTargets TargetSelector - deriving (Eq, Show) - -reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a -reportTargetProblems verbosity = - die' verbosity . unlines . map renderTargetProblem - -renderTargetProblem :: TargetProblem -> String -renderTargetProblem (TargetProblemCommon problem) = - renderTargetProblemCommon "build" problem -renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = - renderTargetProblemNoneEnabled "build" targetSelector targets -renderTargetProblem(TargetProblemNoTargets targetSelector) = - renderTargetProblemNoTargets "build" targetSelector - -reportCannotPruneDependencies :: Verbosity -> CannotPruneDependencies -> IO a -reportCannotPruneDependencies verbosity = - die' verbosity . renderCannotPruneDependencies diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdLegacy.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdLegacy.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdLegacy.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdLegacy.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,173 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ViewPatterns #-} -module Distribution.Client.CmdLegacy ( legacyCmd, legacyWrapperCmd, newCmd ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.Sandbox - ( loadConfigOrSandboxConfig, findSavedDistPref ) -import qualified Distribution.Client.Setup as Client -import Distribution.Client.SetupWrapper - ( SetupScriptOptions(..), setupWrapper, defaultSetupScriptOptions ) -import qualified Distribution.Simple.Setup as Setup -import Distribution.Simple.Command -import Distribution.Simple.Utils - ( warn, wrapText ) -import Distribution.Verbosity - ( Verbosity, normal ) - -import Control.Exception - ( SomeException(..), try ) -import qualified Data.Text as T - --- Tweaked versions of code from Main. -regularCmd :: (HasVerbosity flags) => CommandUI flags -> (flags -> [String] -> globals -> IO action) -> Bool -> CommandSpec (globals -> IO action) -regularCmd ui action shouldWarn = - CommandSpec ui ((flip commandAddAction) (\flags extra globals -> showWarning flags >> action flags extra globals)) NormalCommand - where - showWarning flags = if shouldWarn - then warn (verbosity flags) (deprecationNote (commandName ui) ++ "\n") - else return () - -wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> Bool -> CommandSpec (Client.GlobalFlags -> IO ()) -wrapperCmd ui verbosity' distPref shouldWarn = - CommandSpec ui (\ui' -> wrapperAction ui' verbosity' distPref shouldWarn) NormalCommand - -wrapperAction :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> Bool -> Command (Client.GlobalFlags -> IO ()) -wrapperAction command verbosityFlag distPrefFlag shouldWarn = - commandAddAction command - { commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do - let verbosity' = Setup.fromFlagOrDefault normal (verbosityFlag flags) - - if shouldWarn - then warn verbosity' (deprecationNote (commandName command) ++ "\n") - else return () - - load <- try (loadConfigOrSandboxConfig verbosity' globalFlags) - let config = either (\(SomeException _) -> mempty) snd load - distPref <- findSavedDistPref config (distPrefFlag flags) - let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } - - let command' = command { commandName = T.unpack . T.replace "v1-" "" . T.pack . commandName $ command } - - setupWrapper verbosity' setupScriptOptions Nothing - command' (const flags) (const extraArgs) - --- - -class HasVerbosity a where - verbosity :: a -> Verbosity - -instance HasVerbosity (Setup.Flag Verbosity) where - verbosity = Setup.fromFlagOrDefault normal - -instance (HasVerbosity a) => HasVerbosity (a, b) where - verbosity (a, _) = verbosity a - -instance (HasVerbosity b) => HasVerbosity (a, b, c) where - verbosity (_ , b, _) = verbosity b - -instance (HasVerbosity a) => HasVerbosity (a, b, c, d) where - verbosity (a, _, _, _) = verbosity a - -instance HasVerbosity Setup.BuildFlags where - verbosity = verbosity . Setup.buildVerbosity - -instance HasVerbosity Setup.ConfigFlags where - verbosity = verbosity . Setup.configVerbosity - -instance HasVerbosity Setup.ReplFlags where - verbosity = verbosity . Setup.replVerbosity - -instance HasVerbosity Client.FreezeFlags where - verbosity = verbosity . Client.freezeVerbosity - -instance HasVerbosity Setup.HaddockFlags where - verbosity = verbosity . Setup.haddockVerbosity - -instance HasVerbosity Client.ExecFlags where - verbosity = verbosity . Client.execVerbosity - -instance HasVerbosity Client.UpdateFlags where - verbosity = verbosity . Client.updateVerbosity - -instance HasVerbosity Setup.CleanFlags where - verbosity = verbosity . Setup.cleanVerbosity - -instance HasVerbosity Client.SDistFlags where - verbosity = verbosity . Client.sDistVerbosity - -instance HasVerbosity Client.SandboxFlags where - verbosity = verbosity . Client.sandboxVerbosity - -instance HasVerbosity Setup.DoctestFlags where - verbosity = verbosity . Setup.doctestVerbosity - --- - -deprecationNote :: String -> String -deprecationNote cmd = wrapText $ - "The " ++ cmd ++ " command is a part of the legacy v1 style of cabal usage.\n\n" ++ - - "Please switch to using either the new project style and the new-" ++ cmd ++ - " command or the legacy v1-" ++ cmd ++ " alias as new-style projects will" ++ - " become the default in the next version of cabal-install. Please file a" ++ - " bug if you cannot replicate a working v1- use case with the new-style commands.\n\n" ++ - - "For more information, see: https://wiki.haskell.org/Cabal/NewBuild\n" - -legacyNote :: String -> String -legacyNote cmd = wrapText $ - "The v1-" ++ cmd ++ " command is a part of the legacy v1 style of cabal usage.\n\n" ++ - - "It is a legacy feature and will be removed in a future release of cabal-install." ++ - " Please file a bug if you cannot replicate a working v1- use case with the new-style" ++ - " commands.\n\n" ++ - - "For more information, see: https://wiki.haskell.org/Cabal/NewBuild\n" - -toLegacyCmd :: (Bool -> CommandSpec (globals -> IO action)) -> [CommandSpec (globals -> IO action)] -toLegacyCmd mkSpec = [toDeprecated (mkSpec True), toLegacy (mkSpec False)] - where - legacyMsg = T.unpack . T.replace "v1-" "" . T.pack - - toLegacy (CommandSpec origUi@CommandUI{..} action type') = CommandSpec legUi action type' - where - legUi = origUi - { commandName = "v1-" ++ commandName - , commandNotes = Just $ \pname -> case commandNotes of - Just notes -> notes pname ++ "\n" ++ legacyNote commandName - Nothing -> legacyNote commandName - } - - toDeprecated (CommandSpec origUi@CommandUI{..} action type') = CommandSpec depUi action type' - where - depUi = origUi - { commandName = legacyMsg commandName - , commandUsage = legacyMsg . commandUsage - , commandDescription = (legacyMsg .) <$> commandDescription - , commandNotes = Just $ \pname -> case commandNotes of - Just notes -> legacyMsg (notes pname) ++ "\n" ++ deprecationNote commandName - Nothing -> deprecationNote commandName - } - -legacyCmd :: (HasVerbosity flags) => CommandUI flags -> (flags -> [String] -> globals -> IO action) -> [CommandSpec (globals -> IO action)] -legacyCmd ui action = toLegacyCmd (regularCmd ui action) - -legacyWrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> [CommandSpec (Client.GlobalFlags -> IO ())] -legacyWrapperCmd ui verbosity' distPref = toLegacyCmd (wrapperCmd ui verbosity' distPref) - -newCmd :: CommandUI flags -> (flags -> [String] -> globals -> IO action) -> [CommandSpec (globals -> IO action)] -newCmd origUi@CommandUI{..} action = [cmd v2Ui, cmd origUi] - where - cmd ui = CommandSpec ui (flip commandAddAction action) NormalCommand - v2Msg = T.unpack . T.replace "new-" "v2-" . T.pack - v2Ui = origUi - { commandName = v2Msg commandName - , commandUsage = v2Msg . commandUsage - , commandDescription = (v2Msg .) <$> commandDescription - , commandNotes = (v2Msg .) <$> commandDescription - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdRepl.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdRepl.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdRepl.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdRepl.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,578 +0,0 @@ -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} - --- | cabal-install CLI command: repl --- -module Distribution.Client.CmdRepl ( - -- * The @repl@ CLI and action - replCommand, - replAction, - - -- * Internals exposed for testing - TargetProblem(..), - selectPackageTargets, - selectComponentTarget - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Compat.Lens -import qualified Distribution.Types.Lens as L - -import Distribution.Client.CmdErrorMessages -import Distribution.Client.CmdInstall - ( establishDummyProjectBaseContext ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.ProjectBuilding - ( rebuildTargetsDryRun, improveInstallPlanWithUpToDatePackages ) -import Distribution.Client.ProjectConfig - ( ProjectConfig(..), withProjectOrGlobalConfig - , projectConfigConfigFile, readGlobalConfig ) -import Distribution.Client.ProjectOrchestration -import Distribution.Client.ProjectPlanning - ( ElaboratedSharedConfig(..), ElaboratedInstallPlan ) -import Distribution.Client.ProjectPlanning.Types - ( elabOrderExeDependencies ) -import Distribution.Client.RebuildMonad - ( runRebuild ) -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import qualified Distribution.Client.Setup as Client -import Distribution.Client.Types - ( PackageLocation(..), PackageSpecifier(..), UnresolvedSourcePackage ) -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault, replOptions - , Flag(..), toFlag, trueArg, falseArg ) -import Distribution.Simple.Command - ( CommandUI(..), liftOption, usageAlternatives, option - , ShowOrParseArgs, OptionField, reqArg ) -import Distribution.Package - ( Package(..), packageName, UnitId, installedUnitId ) -import Distribution.PackageDescription.PrettyPrint -import Distribution.Parsec.Class - ( Parsec(..) ) -import Distribution.Pretty - ( prettyShow ) -import Distribution.ReadE - ( ReadE, parsecToReadE ) -import qualified Distribution.SPDX.License as SPDX -import Distribution.Solver.Types.SourcePackage - ( SourcePackage(..) ) -import Distribution.Types.BuildInfo - ( BuildInfo(..), emptyBuildInfo ) -import Distribution.Types.ComponentName - ( componentNameString ) -import Distribution.Types.CondTree - ( CondTree(..), traverseCondTreeC ) -import Distribution.Types.Dependency - ( Dependency(..) ) -import Distribution.Types.GenericPackageDescription - ( emptyGenericPackageDescription ) -import Distribution.Types.PackageDescription - ( PackageDescription(..), emptyPackageDescription ) -import Distribution.Types.Library - ( Library(..), emptyLibrary ) -import Distribution.Types.PackageId - ( PackageIdentifier(..) ) -import Distribution.Types.Version - ( mkVersion, version0 ) -import Distribution.Types.VersionRange - ( anyVersion ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity, normal, lessVerbose ) -import Distribution.Simple.Utils - ( wrapText, die', debugNoWrap, ordNub, createTempDirectory, handleDoesNotExist ) -import Language.Haskell.Extension - ( Language(..) ) - -import Data.List - ( (\\) ) -import qualified Data.Map as Map -import qualified Data.Set as Set -import System.Directory - ( getTemporaryDirectory, removeDirectoryRecursive ) -import System.FilePath - ( () ) - -type ReplFlags = [String] - -data EnvFlags = EnvFlags - { envPackages :: [Dependency] - , envIncludeTransitive :: Flag Bool - , envIgnoreProject :: Flag Bool - } - -defaultEnvFlags :: EnvFlags -defaultEnvFlags = EnvFlags - { envPackages = [] - , envIncludeTransitive = toFlag True - , envIgnoreProject = toFlag False - } - -envOptions :: ShowOrParseArgs -> [OptionField EnvFlags] -envOptions _ = - [ option ['b'] ["build-depends"] - "Include an additional package in the environment presented to GHCi." - envPackages (\p flags -> flags { envPackages = p ++ envPackages flags }) - (reqArg "DEPENDENCY" dependencyReadE (fmap prettyShow :: [Dependency] -> [String])) - , option [] ["no-transitive-deps"] - "Don't automatically include transitive dependencies of requested packages." - envIncludeTransitive (\p flags -> flags { envIncludeTransitive = p }) - falseArg - , option ['z'] ["ignore-project"] - "Only include explicitly specified packages (and 'base')." - envIgnoreProject (\p flags -> flags { envIgnoreProject = p }) - trueArg - ] - where - dependencyReadE :: ReadE [Dependency] - dependencyReadE = - fmap pure $ - parsecToReadE - ("couldn't parse dependency: " ++) - parsec - -replCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags, ReplFlags, EnvFlags) -replCommand = Client.installCommand { - commandName = "new-repl", - commandSynopsis = "Open an interactive session for the given component.", - commandUsage = usageAlternatives "new-repl" [ "[TARGET] [FLAGS]" ], - commandDescription = Just $ \_ -> wrapText $ - "Open an interactive session for a component within the project. The " - ++ "available targets are the same as for the 'new-build' command: " - ++ "individual components within packages in the project, including " - ++ "libraries, executables, test-suites or benchmarks. Packages can " - ++ "also be specified in which case the library component in the " - ++ "package will be used, or the (first listed) executable in the " - ++ "package if there is no library.\n\n" - - ++ "Dependencies are built or rebuilt as necessary. Additional " - ++ "configuration flags can be specified on the command line and these " - ++ "extend the project configuration from the 'cabal.project', " - ++ "'cabal.project.local' and other files.", - commandNotes = Just $ \pname -> - "Examples, open an interactive session:\n" - ++ " " ++ pname ++ " new-repl\n" - ++ " for the default component in the package in the current directory\n" - ++ " " ++ pname ++ " new-repl pkgname\n" - ++ " for the default component in the package named 'pkgname'\n" - ++ " " ++ pname ++ " new-repl ./pkgfoo\n" - ++ " for the default component in the package in the ./pkgfoo directory\n" - ++ " " ++ pname ++ " new-repl cname\n" - ++ " for the component named 'cname'\n" - ++ " " ++ pname ++ " new-repl pkgname:cname\n" - ++ " for the component 'cname' in the package 'pkgname'\n\n" - ++ " " ++ pname ++ " new-repl --build-depends lens\n" - ++ " add the latest version of the library 'lens' to the default component " - ++ "(or no componentif there is no project present)\n" - ++ " " ++ pname ++ " new-repl --build-depends \"lens >= 4.15 && < 4.18\"\n" - ++ " add a version (constrained between 4.15 and 4.18) of the library 'lens' " - ++ "to the default component (or no component if there is no project present)\n" - - ++ cmdCommonHelpTextNewBuildBeta, - commandDefaultFlags = (configFlags,configExFlags,installFlags,haddockFlags,[],defaultEnvFlags), - commandOptions = \showOrParseArgs -> - map liftOriginal (commandOptions Client.installCommand showOrParseArgs) - ++ map liftReplOpts (replOptions showOrParseArgs) - ++ map liftEnvOpts (envOptions showOrParseArgs) - } - where - (configFlags,configExFlags,installFlags,haddockFlags) = commandDefaultFlags Client.installCommand - - liftOriginal = liftOption projectOriginal updateOriginal - liftReplOpts = liftOption projectReplOpts updateReplOpts - liftEnvOpts = liftOption projectEnvOpts updateEnvOpts - - projectOriginal (a,b,c,d,_,_) = (a,b,c,d) - updateOriginal (a,b,c,d) (_,_,_,_,e,f) = (a,b,c,d,e,f) - - projectReplOpts (_,_,_,_,e,_) = e - updateReplOpts e (a,b,c,d,_,f) = (a,b,c,d,e,f) - - projectEnvOpts (_,_,_,_,_,f) = f - updateEnvOpts f (a,b,c,d,e,_) = (a,b,c,d,e,f) - --- | The @repl@ command is very much like @build@. It brings the install plan --- up to date, selects that part of the plan needed by the given or implicit --- repl target and then executes the plan. --- --- Compared to @build@ the difference is that only one target is allowed --- (given or implicit) and the target type is repl rather than build. The --- general plan execution infrastructure handles both build and repl targets. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -replAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags, ReplFlags, EnvFlags) - -> [String] -> GlobalFlags -> IO () -replAction (configFlags, configExFlags, installFlags, haddockFlags, replFlags, envFlags) - targetStrings globalFlags = do - let - ignoreProject = fromFlagOrDefault False (envIgnoreProject envFlags) - with = withProject cliConfig verbosity targetStrings - without config = withoutProject (config <> cliConfig) verbosity targetStrings - - (baseCtx, targetSelectors, finalizer) <- if ignoreProject - then do - globalConfig <- runRebuild "" $ readGlobalConfig verbosity globalConfigFlag - without globalConfig - else withProjectOrGlobalConfig verbosity globalConfigFlag with without - - when (buildSettingOnlyDeps (buildSettings baseCtx)) $ - die' verbosity $ "The repl command does not support '--only-dependencies'. " - ++ "You may wish to use 'build --only-dependencies' and then " - ++ "use 'repl'." - - (originalComponent, baseCtx') <- if null (envPackages envFlags) - then return (Nothing, baseCtx) - else - -- Unfortunately, the best way to do this is to let the normal solver - -- help us resolve the targets, but that isn't ideal for performance, - -- especially in the no-project case. - withInstallPlan (lessVerbose verbosity) baseCtx $ \elaboratedPlan _ -> do - targets <- validatedTargets elaboratedPlan targetSelectors - - let - (unitId, _) = head $ Map.toList targets - originalDeps = installedUnitId <$> InstallPlan.directDeps elaboratedPlan unitId - oci = OriginalComponentInfo unitId originalDeps - Just pkgId = packageId <$> InstallPlan.lookup elaboratedPlan unitId - baseCtx' = addDepsToProjectTarget (envPackages envFlags) pkgId baseCtx - - return (Just oci, baseCtx') - - -- Now, we run the solver again with the added packages. While the graph - -- won't actually reflect the addition of transitive dependencies, - -- they're going to be available already and will be offered to the REPL - -- and that's good enough. - -- - -- In addition, to avoid a *third* trip through the solver, we are - -- replicating the second half of 'runProjectPreBuildPhase' by hand - -- here. - (buildCtx, replFlags') <- withInstallPlan verbosity baseCtx' $ - \elaboratedPlan elaboratedShared' -> do - let ProjectBaseContext{..} = baseCtx' - - -- Recalculate with updated project. - targets <- validatedTargets elaboratedPlan targetSelectors - - let - elaboratedPlan' = pruneInstallPlanToTargets - TargetActionRepl - targets - elaboratedPlan - includeTransitive = fromFlagOrDefault True (envIncludeTransitive envFlags) - replFlags' = case originalComponent of - Just oci -> generateReplFlags includeTransitive elaboratedPlan' oci - Nothing -> [] - - pkgsBuildStatus <- rebuildTargetsDryRun distDirLayout elaboratedShared' - elaboratedPlan' - - let elaboratedPlan'' = improveInstallPlanWithUpToDatePackages - pkgsBuildStatus elaboratedPlan' - debugNoWrap verbosity (InstallPlan.showInstallPlan elaboratedPlan'') - - let - buildCtx = ProjectBuildContext - { elaboratedPlanOriginal = elaboratedPlan - , elaboratedPlanToExecute = elaboratedPlan'' - , elaboratedShared = elaboratedShared' - , pkgsBuildStatus - , targetsMap = targets - } - return (buildCtx, replFlags') - - let buildCtx' = buildCtx - { elaboratedShared = (elaboratedShared buildCtx) - { pkgConfigReplOptions = replFlags ++ replFlags' } - } - printPlan verbosity baseCtx' buildCtx' - - buildOutcomes <- runProjectBuildPhase verbosity baseCtx' buildCtx' - runProjectPostBuildPhase verbosity baseCtx' buildCtx' buildOutcomes - finalizer - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) - - validatedTargets elaboratedPlan targetSelectors = do - -- Interpret the targets on the command line as repl targets - -- (as opposed to say build or haddock targets). - targets <- either (reportTargetProblems verbosity) return - $ resolveTargets - selectPackageTargets - selectComponentTarget - TargetProblemCommon - elaboratedPlan - Nothing - targetSelectors - - -- Reject multiple targets, or at least targets in different - -- components. It is ok to have two module/file targets in the - -- same component, but not two that live in different components. - when (Set.size (distinctTargetComponents targets) > 1) $ - reportTargetProblems verbosity - [TargetProblemMultipleTargets targets] - - return targets - -data OriginalComponentInfo = OriginalComponentInfo - { ociUnitId :: UnitId - , ociOriginalDeps :: [UnitId] - } - deriving (Show) - -withProject :: ProjectConfig -> Verbosity -> [String] -> IO (ProjectBaseContext, [TargetSelector], IO ()) -withProject cliConfig verbosity targetStrings = do - baseCtx <- establishProjectBaseContext verbosity cliConfig - - targetSelectors <- either (reportTargetSelectorProblems verbosity) return - =<< readTargetSelectors (localPackages baseCtx) (Just LibKind) targetStrings - - return (baseCtx, targetSelectors, return ()) - -withoutProject :: ProjectConfig -> Verbosity -> [String] -> IO (ProjectBaseContext, [TargetSelector], IO ()) -withoutProject config verbosity extraArgs = do - unless (null extraArgs) $ - die' verbosity $ "'repl' doesn't take any extra arguments when outside a project: " ++ unwords extraArgs - - globalTmp <- getTemporaryDirectory - tempDir <- createTempDirectory globalTmp "cabal-repl." - - -- We need to create a dummy package that lives in our dummy project. - let - sourcePackage = SourcePackage - { packageInfoId = pkgId - , packageDescription = genericPackageDescription - , packageSource = LocalUnpackedPackage tempDir - , packageDescrOverride = Nothing - } - genericPackageDescription = emptyGenericPackageDescription - & L.packageDescription .~ packageDescription - & L.condLibrary .~ Just (CondNode library [baseDep] []) - packageDescription = emptyPackageDescription - { package = pkgId - , specVersionRaw = Left (mkVersion [2, 2]) - , licenseRaw = Left SPDX.NONE - } - library = emptyLibrary { libBuildInfo = buildInfo } - buildInfo = emptyBuildInfo - { targetBuildDepends = [baseDep] - , defaultLanguage = Just Haskell2010 - } - baseDep = Dependency "base" anyVersion - pkgId = PackageIdentifier "fake-package" version0 - - writeGenericPackageDescription (tempDir "fake-package.cabal") genericPackageDescription - - baseCtx <- - establishDummyProjectBaseContext - verbosity - config - tempDir - [SpecificSourcePackage sourcePackage] - - let - targetSelectors = [TargetPackage TargetExplicitNamed [pkgId] Nothing] - finalizer = handleDoesNotExist () (removeDirectoryRecursive tempDir) - - return (baseCtx, targetSelectors, finalizer) - -addDepsToProjectTarget :: [Dependency] - -> PackageId - -> ProjectBaseContext - -> ProjectBaseContext -addDepsToProjectTarget deps pkgId ctx = - (\p -> ctx { localPackages = p }) . fmap addDeps . localPackages $ ctx - where - addDeps :: PackageSpecifier UnresolvedSourcePackage - -> PackageSpecifier UnresolvedSourcePackage - addDeps (SpecificSourcePackage pkg) - | packageId pkg /= pkgId = SpecificSourcePackage pkg - | SourcePackage{..} <- pkg = - SpecificSourcePackage $ pkg { packageDescription = - packageDescription & (\f -> L.allCondTrees $ traverseCondTreeC f) - %~ (deps ++) - } - addDeps spec = spec - -generateReplFlags :: Bool -> ElaboratedInstallPlan -> OriginalComponentInfo -> ReplFlags -generateReplFlags includeTransitive elaboratedPlan OriginalComponentInfo{..} = flags - where - exeDeps :: [UnitId] - exeDeps = - foldMap - (InstallPlan.foldPlanPackage (const []) elabOrderExeDependencies) - (InstallPlan.dependencyClosure elaboratedPlan [ociUnitId]) - - deps, deps', trans, trans' :: [UnitId] - flags :: ReplFlags - deps = installedUnitId <$> InstallPlan.directDeps elaboratedPlan ociUnitId - deps' = deps \\ ociOriginalDeps - trans = installedUnitId <$> InstallPlan.dependencyClosure elaboratedPlan deps' - trans' = trans \\ ociOriginalDeps - flags = fmap (("-package-id " ++) . prettyShow) . (\\ exeDeps) - $ if includeTransitive then trans' else deps' - --- | This defines what a 'TargetSelector' means for the @repl@ command. --- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, --- or otherwise classifies the problem. --- --- For repl we select: --- --- * the library if there is only one and it's buildable; or --- --- * the exe if there is only one and it's buildable; or --- --- * any other buildable component. --- --- Fail if there are no buildable lib\/exe components, or if there are --- multiple libs or exes. --- -selectPackageTargets :: TargetSelector - -> [AvailableTarget k] -> Either TargetProblem [k] -selectPackageTargets targetSelector targets - - -- If there is exactly one buildable library then we select that - | [target] <- targetsLibsBuildable - = Right [target] - - -- but fail if there are multiple buildable libraries. - | not (null targetsLibsBuildable) - = Left (TargetProblemMatchesMultiple targetSelector targetsLibsBuildable') - - -- If there is exactly one buildable executable then we select that - | [target] <- targetsExesBuildable - = Right [target] - - -- but fail if there are multiple buildable executables. - | not (null targetsExesBuildable) - = Left (TargetProblemMatchesMultiple targetSelector targetsExesBuildable') - - -- If there is exactly one other target then we select that - | [target] <- targetsBuildable - = Right [target] - - -- but fail if there are multiple such targets - | not (null targetsBuildable) - = Left (TargetProblemMatchesMultiple targetSelector targetsBuildable') - - -- If there are targets but none are buildable then we report those - | not (null targets) - = Left (TargetProblemNoneEnabled targetSelector targets') - - -- If there are no targets at all then we report that - | otherwise - = Left (TargetProblemNoTargets targetSelector) - where - targets' = forgetTargetsDetail targets - (targetsLibsBuildable, - targetsLibsBuildable') = selectBuildableTargets' - . filterTargetsKind LibKind - $ targets - (targetsExesBuildable, - targetsExesBuildable') = selectBuildableTargets' - . filterTargetsKind ExeKind - $ targets - (targetsBuildable, - targetsBuildable') = selectBuildableTargetsWith' - (isRequested targetSelector) targets - - -- When there's a target filter like "pkg:tests" then we do select tests, - -- but if it's just a target like "pkg" then we don't build tests unless - -- they are requested by default (i.e. by using --enable-tests) - isRequested (TargetAllPackages Nothing) TargetNotRequestedByDefault = False - isRequested (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False - isRequested _ _ = True - - --- | For a 'TargetComponent' 'TargetSelector', check if the component can be --- selected. --- --- For the @repl@ command we just need the basic checks on being buildable etc. --- -selectComponentTarget :: SubComponentTarget - -> AvailableTarget k -> Either TargetProblem k -selectComponentTarget subtarget = - either (Left . TargetProblemCommon) Right - . selectComponentTargetBasic subtarget - - --- | The various error conditions that can occur when matching a --- 'TargetSelector' against 'AvailableTarget's for the @repl@ command. --- -data TargetProblem = - TargetProblemCommon TargetProblemCommon - - -- | The 'TargetSelector' matches targets but none are buildable - | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] - - -- | There are no targets at all - | TargetProblemNoTargets TargetSelector - - -- | A single 'TargetSelector' matches multiple targets - | TargetProblemMatchesMultiple TargetSelector [AvailableTarget ()] - - -- | Multiple 'TargetSelector's match multiple targets - | TargetProblemMultipleTargets TargetsMap - deriving (Eq, Show) - -reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a -reportTargetProblems verbosity = - die' verbosity . unlines . map renderTargetProblem - -renderTargetProblem :: TargetProblem -> String -renderTargetProblem (TargetProblemCommon problem) = - renderTargetProblemCommon "open a repl for" problem - -renderTargetProblem (TargetProblemMatchesMultiple targetSelector targets) = - "Cannot open a repl for multiple components at once. The target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ " which " - ++ (if targetSelectorRefersToPkgs targetSelector then "includes " else "are ") - ++ renderListSemiAnd - [ "the " ++ renderComponentKind Plural ckind ++ " " ++ - renderListCommaAnd - [ maybe (display pkgname) display (componentNameString cname) - | t <- ts - , let cname = availableTargetComponentName t - pkgname = packageName (availableTargetPackageId t) - ] - | (ckind, ts) <- sortGroupOn availableTargetComponentKind targets - ] - ++ ".\n\n" ++ explanationSingleComponentLimitation - where - availableTargetComponentKind = componentKind - . availableTargetComponentName - -renderTargetProblem (TargetProblemMultipleTargets selectorMap) = - "Cannot open a repl for multiple components at once. The targets " - ++ renderListCommaAnd - [ "'" ++ showTargetSelector ts ++ "'" - | ts <- ordNub (concatMap snd (concat (Map.elems selectorMap))) ] - ++ " refer to different components." - ++ ".\n\n" ++ explanationSingleComponentLimitation - -renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = - renderTargetProblemNoneEnabled "open a repl for" targetSelector targets - -renderTargetProblem (TargetProblemNoTargets targetSelector) = - renderTargetProblemNoTargets "open a repl for" targetSelector - - -explanationSingleComponentLimitation :: String -explanationSingleComponentLimitation = - "The reason for this limitation is that current versions of ghci do not " - ++ "support loading multiple components as source. Load just one component " - ++ "and when you make changes to a dependent component then quit and reload." - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdRun.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdRun.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdRun.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdRun.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,559 +0,0 @@ -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} - --- | cabal-install CLI command: run --- -module Distribution.Client.CmdRun ( - -- * The @run@ CLI and action - runCommand, - runAction, - handleShebang, - - -- * Internals exposed for testing - TargetProblem(..), - selectPackageTargets, - selectComponentTarget - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.CmdErrorMessages - -import Distribution.Client.Setup - ( GlobalFlags(..), ConfigFlags(..), ConfigExFlags, InstallFlags ) -import Distribution.Client.GlobalFlags - ( defaultGlobalFlags ) -import qualified Distribution.Client.Setup as Client -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Simple.Command - ( CommandUI(..), usageAlternatives ) -import Distribution.Types.ComponentName - ( showComponentName ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity, normal ) -import Distribution.Simple.Utils - ( wrapText, die', ordNub, info - , createTempDirectory, handleDoesNotExist ) -import Distribution.Client.CmdInstall - ( establishDummyProjectBaseContext ) -import Distribution.Client.ProjectConfig - ( ProjectConfig(..), ProjectConfigShared(..) - , withProjectOrGlobalConfig ) -import Distribution.Client.ProjectPlanning - ( ElaboratedConfiguredPackage(..) - , ElaboratedInstallPlan, binDirectoryFor ) -import Distribution.Client.ProjectPlanning.Types - ( dataDirsEnvironmentForPlan ) -import Distribution.Client.TargetSelector - ( TargetSelectorProblem(..), TargetString(..) ) -import Distribution.Client.InstallPlan - ( toList, foldPlanPackage ) -import Distribution.Types.UnqualComponentName - ( UnqualComponentName, unUnqualComponentName ) -import Distribution.Simple.Program.Run - ( runProgramInvocation, ProgramInvocation(..), - emptyProgramInvocation ) -import Distribution.Types.UnitId - ( UnitId ) - -import Distribution.CabalSpecVersion - ( cabalSpecLatest ) -import Distribution.Client.Types - ( PackageLocation(..), PackageSpecifier(..) ) -import Distribution.FieldGrammar - ( takeFields, parseFieldGrammar ) -import Distribution.PackageDescription.FieldGrammar - ( executableFieldGrammar ) -import Distribution.PackageDescription.PrettyPrint - ( writeGenericPackageDescription ) -import Distribution.Parsec.Common - ( Position(..) ) -import Distribution.Parsec.ParseResult - ( ParseResult, parseString, parseFatalFailure ) -import Distribution.Parsec.Parser - ( readFields ) -import qualified Distribution.SPDX.License as SPDX -import Distribution.Solver.Types.SourcePackage as SP - ( SourcePackage(..) ) -import Distribution.Types.BuildInfo - ( BuildInfo(..) ) -import Distribution.Types.CondTree - ( CondTree(..) ) -import Distribution.Types.Executable - ( Executable(..) ) -import Distribution.Types.GenericPackageDescription as GPD - ( GenericPackageDescription(..), emptyGenericPackageDescription ) -import Distribution.Types.PackageDescription - ( PackageDescription(..), emptyPackageDescription ) -import Distribution.Types.PackageId - ( PackageIdentifier(..) ) -import Distribution.Types.Version - ( mkVersion, version0 ) -import Language.Haskell.Extension - ( Language(..) ) - -import qualified Data.ByteString.Char8 as BS -import qualified Data.Map as Map -import qualified Data.Set as Set -import qualified Text.Parsec as P -import System.Directory - ( getTemporaryDirectory, removeDirectoryRecursive, doesFileExist ) -import System.FilePath - ( () ) - -runCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -runCommand = Client.installCommand { - commandName = "new-run", - commandSynopsis = "Run an executable.", - commandUsage = usageAlternatives "new-run" - [ "[TARGET] [FLAGS] [-- EXECUTABLE_FLAGS]" ], - commandDescription = Just $ \pname -> wrapText $ - "Runs the specified executable-like component (an executable, a test, " - ++ "or a benchmark), first ensuring it is up to date.\n\n" - - ++ "Any executable-like component in any package in the project can be " - ++ "specified. A package can be specified if contains just one " - ++ "executable-like. The default is to use the package in the current " - ++ "directory if it contains just one executable-like.\n\n" - - ++ "Extra arguments can be passed to the program, but use '--' to " - ++ "separate arguments for the program from arguments for " ++ pname - ++ ". The executable is run in an environment where it can find its " - ++ "data files inplace in the build tree.\n\n" - - ++ "Dependencies are built or rebuilt as necessary. Additional " - ++ "configuration flags can be specified on the command line and these " - ++ "extend the project configuration from the 'cabal.project', " - ++ "'cabal.project.local' and other files.", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " new-run\n" - ++ " Run the executable-like in the package in the current directory\n" - ++ " " ++ pname ++ " new-run foo-tool\n" - ++ " Run the named executable-like (in any package in the project)\n" - ++ " " ++ pname ++ " new-run pkgfoo:foo-tool\n" - ++ " Run the executable-like 'foo-tool' in the package 'pkgfoo'\n" - ++ " " ++ pname ++ " new-run foo -O2 -- dothing --fooflag\n" - ++ " Build with '-O2' and run the program, passing it extra arguments.\n\n" - - ++ cmdCommonHelpTextNewBuildBeta - } - --- | The @run@ command runs a specified executable-like component, building it --- first if necessary. The component can be either an executable, a test, --- or a benchmark. This is particularly useful for passing arguments to --- exes/tests/benchs by simply appending them after a @--@. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -runAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -runAction (configFlags, configExFlags, installFlags, haddockFlags) - targetStrings globalFlags = do - globalTmp <- getTemporaryDirectory - tempDir <- createTempDirectory globalTmp "cabal-repl." - - let - with = - establishProjectBaseContext verbosity cliConfig - without config = - establishDummyProjectBaseContext verbosity (config <> cliConfig) tempDir [] - - baseCtx <- withProjectOrGlobalConfig verbosity globalConfigFlag with without - - let - scriptOrError script err = do - exists <- doesFileExist script - if exists - then BS.readFile script >>= handleScriptCase verbosity baseCtx tempDir - else reportTargetSelectorProblems verbosity err - - (baseCtx', targetSelectors) <- - readTargetSelectors (localPackages baseCtx) (Just ExeKind) (take 1 targetStrings) - >>= \case - Left err@(TargetSelectorNoTargetsInProject:_) - | (script:_) <- targetStrings -> scriptOrError script err - Left err@(TargetSelectorNoSuch t _:_) - | TargetString1 script <- t -> scriptOrError script err - Left err@(TargetSelectorExpected t _ _:_) - | TargetString1 script <- t -> scriptOrError script err - Left err -> reportTargetSelectorProblems verbosity err - Right sels -> return (baseCtx, sels) - - buildCtx <- - runProjectPreBuildPhase verbosity baseCtx' $ \elaboratedPlan -> do - - when (buildSettingOnlyDeps (buildSettings baseCtx')) $ - die' verbosity $ - "The run command does not support '--only-dependencies'. " - ++ "You may wish to use 'build --only-dependencies' and then " - ++ "use 'run'." - - -- Interpret the targets on the command line as build targets - -- (as opposed to say repl or haddock targets). - targets <- either (reportTargetProblems verbosity) return - $ resolveTargets - selectPackageTargets - selectComponentTarget - TargetProblemCommon - elaboratedPlan - Nothing - targetSelectors - - -- Reject multiple targets, or at least targets in different - -- components. It is ok to have two module/file targets in the - -- same component, but not two that live in different components. - -- - -- Note that we discard the target and return the whole 'TargetsMap', - -- so this check will be repeated (and must succeed) after - -- the 'runProjectPreBuildPhase'. Keep it in mind when modifying this. - _ <- singleExeOrElse - (reportTargetProblems - verbosity - [TargetProblemMultipleTargets targets]) - targets - - let elaboratedPlan' = pruneInstallPlanToTargets - TargetActionBuild - targets - elaboratedPlan - return (elaboratedPlan', targets) - - (selectedUnitId, selectedComponent) <- - -- Slight duplication with 'runProjectPreBuildPhase'. - singleExeOrElse - (die' verbosity $ "No or multiple targets given, but the run " - ++ "phase has been reached. This is a bug.") - $ targetsMap buildCtx - - printPlan verbosity baseCtx' buildCtx - - buildOutcomes <- runProjectBuildPhase verbosity baseCtx' buildCtx - runProjectPostBuildPhase verbosity baseCtx' buildCtx buildOutcomes - - - let elaboratedPlan = elaboratedPlanToExecute buildCtx - matchingElaboratedConfiguredPackages = - matchingPackagesByUnitId - selectedUnitId - elaboratedPlan - - let exeName = unUnqualComponentName selectedComponent - - -- In the common case, we expect @matchingElaboratedConfiguredPackages@ - -- to consist of a single element that provides a single way of building - -- an appropriately-named executable. In that case we take that - -- package and continue. - -- - -- However, multiple packages/components could provide that - -- executable, or it's possible we don't find the executable anywhere - -- in the build plan. I suppose in principle it's also possible that - -- a single package provides an executable in two different ways, - -- though that's probably a bug if. Anyway it's a good lint to report - -- an error in all of these cases, even if some seem like they - -- shouldn't happen. - pkg <- case matchingElaboratedConfiguredPackages of - [] -> die' verbosity $ "Unknown executable " - ++ exeName - ++ " in package " - ++ display selectedUnitId - [elabPkg] -> do - info verbosity $ "Selecting " - ++ display selectedUnitId - ++ " to supply " ++ exeName - return elabPkg - elabPkgs -> die' verbosity - $ "Multiple matching executables found matching " - ++ exeName - ++ ":\n" - ++ unlines (fmap (\p -> " - in package " ++ display (elabUnitId p)) elabPkgs) - let exePath = binDirectoryFor (distDirLayout baseCtx) - (elaboratedShared buildCtx) - pkg - exeName - exeName - let args = drop 1 targetStrings - runProgramInvocation - verbosity - emptyProgramInvocation { - progInvokePath = exePath, - progInvokeArgs = args, - progInvokeEnv = dataDirsEnvironmentForPlan - (distDirLayout baseCtx) - elaboratedPlan - } - - handleDoesNotExist () (removeDirectoryRecursive tempDir) - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) - -handleShebang :: String -> IO () -handleShebang script = - runAction (commandDefaultFlags runCommand) [script] defaultGlobalFlags - -parseScriptBlock :: BS.ByteString -> ParseResult Executable -parseScriptBlock str = - case readFields str of - Right fs -> do - let (fields, _) = takeFields fs - parseFieldGrammar cabalSpecLatest fields (executableFieldGrammar "script") - Left perr -> parseFatalFailure pos (show perr) where - ppos = P.errorPos perr - pos = Position (P.sourceLine ppos) (P.sourceColumn ppos) - -readScriptBlock :: Verbosity -> BS.ByteString -> IO Executable -readScriptBlock verbosity = parseString parseScriptBlock verbosity "script block" - -readScriptBlockFromScript :: Verbosity -> BS.ByteString -> IO (Executable, BS.ByteString) -readScriptBlockFromScript verbosity str = - (\x -> (x, noShebang)) <$> readScriptBlock verbosity str' - where - start = "{- cabal:" - end = "-}" - - str' = BS.unlines - . takeWhile (/= end) - . drop 1 . dropWhile (/= start) - $ lines' - - noShebang = BS.unlines - . filter ((/= "#!") . BS.take 2) - $ lines' - - lines' = BS.lines str - -handleScriptCase :: Verbosity - -> ProjectBaseContext - -> FilePath - -> BS.ByteString - -> IO (ProjectBaseContext, [TargetSelector]) -handleScriptCase verbosity baseCtx tempDir scriptContents = do - (executable, contents') <- readScriptBlockFromScript verbosity scriptContents - - -- We need to create a dummy package that lives in our dummy project. - let - sourcePackage = SourcePackage - { packageInfoId = pkgId - , SP.packageDescription = genericPackageDescription - , packageSource = LocalUnpackedPackage tempDir - , packageDescrOverride = Nothing - } - genericPackageDescription = emptyGenericPackageDescription - { GPD.packageDescription = packageDescription - , condExecutables = [("script", CondNode executable' targetBuildDepends [])] - } - executable' = executable - { modulePath = "Main.hs" - , buildInfo = binfo - { defaultLanguage = - case defaultLanguage of - just@(Just _) -> just - Nothing -> Just Haskell2010 - } - } - binfo@BuildInfo{..} = buildInfo executable - packageDescription = emptyPackageDescription - { package = pkgId - , specVersionRaw = Left (mkVersion [2, 2]) - , licenseRaw = Left SPDX.NONE - } - pkgId = PackageIdentifier "fake-package" version0 - - writeGenericPackageDescription (tempDir "fake-package.cabal") genericPackageDescription - BS.writeFile (tempDir "Main.hs") contents' - - let - baseCtx' = baseCtx - { localPackages = localPackages baseCtx ++ [SpecificSourcePackage sourcePackage] } - targetSelectors = [TargetPackage TargetExplicitNamed [pkgId] Nothing] - - return (baseCtx', targetSelectors) - -singleExeOrElse :: IO (UnitId, UnqualComponentName) -> TargetsMap -> IO (UnitId, UnqualComponentName) -singleExeOrElse action targetsMap = - case Set.toList . distinctTargetComponents $ targetsMap - of [(unitId, CExeName component)] -> return (unitId, component) - [(unitId, CTestName component)] -> return (unitId, component) - [(unitId, CBenchName component)] -> return (unitId, component) - _ -> action - --- | Filter the 'ElaboratedInstallPlan' keeping only the --- 'ElaboratedConfiguredPackage's that match the specified --- 'UnitId'. -matchingPackagesByUnitId :: UnitId - -> ElaboratedInstallPlan - -> [ElaboratedConfiguredPackage] -matchingPackagesByUnitId uid = - catMaybes - . fmap (foldPlanPackage - (const Nothing) - (\x -> if elabUnitId x == uid - then Just x - else Nothing)) - . toList - --- | This defines what a 'TargetSelector' means for the @run@ command. --- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, --- or otherwise classifies the problem. --- --- For the @run@ command we select the exe if there is only one and it's --- buildable. Fail if there are no or multiple buildable exe components. --- -selectPackageTargets :: TargetSelector - -> [AvailableTarget k] -> Either TargetProblem [k] -selectPackageTargets targetSelector targets - - -- If there is exactly one buildable executable then we select that - | [target] <- targetsExesBuildable - = Right [target] - - -- but fail if there are multiple buildable executables. - | not (null targetsExesBuildable) - = Left (TargetProblemMatchesMultiple targetSelector targetsExesBuildable') - - -- If there are executables but none are buildable then we report those - | not (null targetsExes) - = Left (TargetProblemNoneEnabled targetSelector targetsExes) - - -- If there are no executables but some other targets then we report that - | not (null targets) - = Left (TargetProblemNoExes targetSelector) - - -- If there are no targets at all then we report that - | otherwise - = Left (TargetProblemNoTargets targetSelector) - where - -- Targets that can be executed - targetsExecutableLike = - concatMap (\kind -> filterTargetsKind kind targets) - [ExeKind, TestKind, BenchKind] - (targetsExesBuildable, - targetsExesBuildable') = selectBuildableTargets' targetsExecutableLike - - targetsExes = forgetTargetsDetail targetsExecutableLike - - --- | For a 'TargetComponent' 'TargetSelector', check if the component can be --- selected. --- --- For the @run@ command we just need to check it is a executable-like --- (an executable, a test, or a benchmark), in addition --- to the basic checks on being buildable etc. --- -selectComponentTarget :: SubComponentTarget - -> AvailableTarget k -> Either TargetProblem k -selectComponentTarget subtarget@WholeComponent t - = case availableTargetComponentName t - of CExeName _ -> component - CTestName _ -> component - CBenchName _ -> component - _ -> Left (TargetProblemComponentNotExe pkgid cname) - where pkgid = availableTargetPackageId t - cname = availableTargetComponentName t - component = either (Left . TargetProblemCommon) return $ - selectComponentTargetBasic subtarget t - -selectComponentTarget subtarget t - = Left (TargetProblemIsSubComponent (availableTargetPackageId t) - (availableTargetComponentName t) - subtarget) - --- | The various error conditions that can occur when matching a --- 'TargetSelector' against 'AvailableTarget's for the @run@ command. --- -data TargetProblem = - TargetProblemCommon TargetProblemCommon - -- | The 'TargetSelector' matches targets but none are buildable - | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] - - -- | There are no targets at all - | TargetProblemNoTargets TargetSelector - - -- | The 'TargetSelector' matches targets but no executables - | TargetProblemNoExes TargetSelector - - -- | A single 'TargetSelector' matches multiple targets - | TargetProblemMatchesMultiple TargetSelector [AvailableTarget ()] - - -- | Multiple 'TargetSelector's match multiple targets - | TargetProblemMultipleTargets TargetsMap - - -- | The 'TargetSelector' refers to a component that is not an executable - | TargetProblemComponentNotExe PackageId ComponentName - - -- | Asking to run an individual file or module is not supported - | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget - deriving (Eq, Show) - -reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a -reportTargetProblems verbosity = - die' verbosity . unlines . map renderTargetProblem - -renderTargetProblem :: TargetProblem -> String -renderTargetProblem (TargetProblemCommon problem) = - renderTargetProblemCommon "run" problem - -renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = - renderTargetProblemNoneEnabled "run" targetSelector targets - -renderTargetProblem (TargetProblemNoExes targetSelector) = - "Cannot run the target '" ++ showTargetSelector targetSelector - ++ "' which refers to " ++ renderTargetSelector targetSelector - ++ " because " - ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" - ++ " not contain any executables." - -renderTargetProblem (TargetProblemNoTargets targetSelector) = - case targetSelectorFilter targetSelector of - Just kind | kind /= ExeKind - -> "The run command is for running executables, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ "." - - _ -> renderTargetProblemNoTargets "run" targetSelector - -renderTargetProblem (TargetProblemMatchesMultiple targetSelector targets) = - "The run command is for running a single executable at once. The target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ " which includes " - ++ renderListCommaAnd ( ("the "++) <$> - showComponentName <$> - availableTargetComponentName <$> - foldMap - (\kind -> filterTargetsKind kind targets) - [ExeKind, TestKind, BenchKind] ) - ++ "." - -renderTargetProblem (TargetProblemMultipleTargets selectorMap) = - "The run command is for running a single executable at once. The targets " - ++ renderListCommaAnd [ "'" ++ showTargetSelector ts ++ "'" - | ts <- ordNub (concatMap snd (concat (Map.elems selectorMap))) ] - ++ " refer to different executables." - -renderTargetProblem (TargetProblemComponentNotExe pkgid cname) = - "The run command is for running executables, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ " from the package " - ++ display pkgid ++ "." - where - targetSelector = TargetComponent pkgid cname WholeComponent - -renderTargetProblem (TargetProblemIsSubComponent pkgid cname subtarget) = - "The run command can only run an executable as a whole, " - ++ "not files or modules within them, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ "." - where - targetSelector = TargetComponent pkgid cname subtarget diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdSdist.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdSdist.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdSdist.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdSdist.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,341 +0,0 @@ -{-# LANGUAGE MultiWayIf #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE TupleSections #-} -{-# LANGUAGE ViewPatterns #-} -module Distribution.Client.CmdSdist - ( sdistCommand, sdistAction, packageToSdist - , SdistFlags(..), defaultSdistFlags - , OutputFormat(..), ArchiveFormat(..) ) where - -import Distribution.Client.CmdErrorMessages - ( Plural(..), renderComponentKind ) -import Distribution.Client.ProjectOrchestration - ( ProjectBaseContext(..), establishProjectBaseContext ) -import Distribution.Client.TargetSelector - ( TargetSelector(..), ComponentKind - , readTargetSelectors, reportTargetSelectorProblems ) -import Distribution.Client.RebuildMonad - ( runRebuild ) -import Distribution.Client.Setup - ( ArchiveFormat(..), GlobalFlags(..) ) -import Distribution.Solver.Types.SourcePackage - ( SourcePackage(..) ) -import Distribution.Client.Types - ( PackageSpecifier(..), PackageLocation(..), UnresolvedSourcePackage ) -import Distribution.Client.DistDirLayout - ( DistDirLayout(..), defaultDistDirLayout ) -import Distribution.Client.ProjectConfig - ( findProjectRoot, readProjectConfig ) - -import Distribution.Package - ( Package(packageId) ) -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) -import Distribution.Pretty - ( prettyShow ) -import Distribution.ReadE - ( succeedReadE ) -import Distribution.Simple.Command - ( CommandUI(..), option, choiceOpt, reqArg ) -import Distribution.Simple.PreProcess - ( knownSuffixHandlers ) -import Distribution.Simple.Setup - ( Flag(..), toFlag, fromFlagOrDefault, flagToList, flagToMaybe - , optionVerbosity, optionDistPref, trueArg - ) -import Distribution.Simple.SrcDist - ( listPackageSources ) -import Distribution.Simple.Utils - ( die', notice, withOutputMarker ) -import Distribution.Types.ComponentName - ( ComponentName, showComponentName ) -import Distribution.Types.PackageName - ( PackageName, unPackageName ) -import Distribution.Verbosity - ( Verbosity, normal ) - -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Codec.Archive.Zip as Zip -import qualified Codec.Compression.GZip as GZip -import Control.Exception - ( throwIO ) -import Control.Monad - ( when, forM, forM_ ) -import Control.Monad.Trans - ( liftIO ) -import Control.Monad.State.Lazy - ( StateT, modify, gets, evalStateT ) -import Control.Monad.Writer.Lazy - ( WriterT, tell, execWriterT ) -import Data.Bits - ( shiftL ) -import qualified Data.ByteString.Char8 as BS -import qualified Data.ByteString.Lazy.Char8 as BSL -import Data.Either - ( partitionEithers ) -import Data.List - ( find, sortOn, nub, intercalate ) -import qualified Data.Set as Set -import System.Directory - ( getCurrentDirectory, setCurrentDirectory - , createDirectoryIfMissing, makeAbsolute ) -import System.FilePath - ( (), (<.>), makeRelative, normalise, takeDirectory ) - -sdistCommand :: CommandUI SdistFlags -sdistCommand = CommandUI - { commandName = "new-sdist" - , commandSynopsis = "Generate a source distribution file (.tar.gz)." - , commandUsage = \pname -> - "Usage: " ++ pname ++ " new-sdist [FLAGS] [PACKAGES]\n" - , commandDescription = Just $ \_ -> - "Generates tarballs of project packages suitable for upload to Hackage." - , commandNotes = Nothing - , commandDefaultFlags = defaultSdistFlags - , commandOptions = \showOrParseArgs -> - [ optionVerbosity - sdistVerbosity (\v flags -> flags { sdistVerbosity = v }) - , optionDistPref - sdistDistDir (\dd flags -> flags { sdistDistDir = dd }) - showOrParseArgs - , option [] ["project-file"] - "Set the name of the cabal.project file to search for in parent directories" - sdistProjectFile (\pf flags -> flags { sdistProjectFile = pf }) - (reqArg "FILE" (succeedReadE Flag) flagToList) - , option ['l'] ["list-only"] - "Just list the sources, do not make a tarball" - sdistListSources (\v flags -> flags { sdistListSources = v }) - trueArg - , option ['z'] ["null-sep"] - "Separate the source files with NUL bytes rather than newlines." - sdistNulSeparated (\v flags -> flags { sdistNulSeparated = v }) - trueArg - , option [] ["archive-format"] - "Choose what type of archive to create. No effect if given with '--list-only'" - sdistArchiveFormat (\v flags -> flags { sdistArchiveFormat = v }) - (choiceOpt - [ (Flag TargzFormat, ([], ["targz"]), - "Produce a '.tar.gz' format archive (default and required for uploading to hackage)") - , (Flag ZipFormat, ([], ["zip"]), - "Produce a '.zip' format archive") - ] - ) - , option ['o'] ["output-dir", "outputdir"] - "Choose the output directory of this command. '-' sends all output to stdout" - sdistOutputPath (\o flags -> flags { sdistOutputPath = o }) - (reqArg "PATH" (succeedReadE Flag) flagToList) - ] - } - -data SdistFlags = SdistFlags - { sdistVerbosity :: Flag Verbosity - , sdistDistDir :: Flag FilePath - , sdistProjectFile :: Flag FilePath - , sdistListSources :: Flag Bool - , sdistNulSeparated :: Flag Bool - , sdistArchiveFormat :: Flag ArchiveFormat - , sdistOutputPath :: Flag FilePath - } - -defaultSdistFlags :: SdistFlags -defaultSdistFlags = SdistFlags - { sdistVerbosity = toFlag normal - , sdistDistDir = mempty - , sdistProjectFile = mempty - , sdistListSources = toFlag False - , sdistNulSeparated = toFlag False - , sdistArchiveFormat = toFlag TargzFormat - , sdistOutputPath = mempty - } - --- - -sdistAction :: SdistFlags -> [String] -> GlobalFlags -> IO () -sdistAction SdistFlags{..} targetStrings globalFlags = do - let verbosity = fromFlagOrDefault normal sdistVerbosity - mDistDirectory = flagToMaybe sdistDistDir - mProjectFile = flagToMaybe sdistProjectFile - globalConfig = globalConfigFile globalFlags - listSources = fromFlagOrDefault False sdistListSources - nulSeparated = fromFlagOrDefault False sdistNulSeparated - archiveFormat = fromFlagOrDefault TargzFormat sdistArchiveFormat - mOutputPath = flagToMaybe sdistOutputPath - - projectRoot <- either throwIO return =<< findProjectRoot Nothing mProjectFile - let distLayout = defaultDistDirLayout projectRoot mDistDirectory - dir <- getCurrentDirectory - projectConfig <- runRebuild dir $ readProjectConfig verbosity globalConfig distLayout - baseCtx <- establishProjectBaseContext verbosity projectConfig - let localPkgs = localPackages baseCtx - - targetSelectors <- either (reportTargetSelectorProblems verbosity) return - =<< readTargetSelectors localPkgs Nothing targetStrings - - mOutputPath' <- case mOutputPath of - Just "-" -> return (Just "-") - Just path -> Just <$> makeAbsolute path - Nothing -> return Nothing - - let - format = - if | listSources, nulSeparated -> SourceList '\0' - | listSources -> SourceList '\n' - | otherwise -> Archive archiveFormat - - ext = case format of - SourceList _ -> "list" - Archive TargzFormat -> "tar.gz" - Archive ZipFormat -> "zip" - - outputPath pkg = case mOutputPath' of - Just path - | path == "-" -> "-" - | otherwise -> path prettyShow (packageId pkg) <.> ext - Nothing - | listSources -> "-" - | otherwise -> distSdistFile distLayout (packageId pkg) archiveFormat - - createDirectoryIfMissing True (distSdistDirectory distLayout) - - case reifyTargetSelectors localPkgs targetSelectors of - Left errs -> die' verbosity . unlines . fmap renderTargetProblem $ errs - Right pkgs - | length pkgs > 1, not listSources, Just "-" <- mOutputPath' -> - die' verbosity "Can't write multiple tarballs to standard output!" - | otherwise -> - mapM_ (\pkg -> packageToSdist verbosity (distProjectRootDirectory distLayout) format (outputPath pkg) pkg) pkgs - -data IsExec = Exec | NoExec - deriving (Show, Eq) - -data OutputFormat = SourceList Char - | Archive ArchiveFormat - deriving (Show, Eq) - -packageToSdist :: Verbosity -> FilePath -> OutputFormat -> FilePath -> UnresolvedSourcePackage -> IO () -packageToSdist verbosity projectRootDir format outputFile pkg = do - dir <- case packageSource pkg of - LocalUnpackedPackage path -> return path - _ -> die' verbosity "The impossible happened: a local package isn't local" - oldPwd <- getCurrentDirectory - setCurrentDirectory dir - - let norm flag = fmap ((flag, ) . normalise) - (norm NoExec -> nonexec, norm Exec -> exec) <- - listPackageSources verbosity (flattenPackageDescription $ packageDescription pkg) knownSuffixHandlers - - let write = if outputFile == "-" - then putStr . withOutputMarker verbosity . BSL.unpack - else BSL.writeFile outputFile - files = nub . sortOn snd $ nonexec ++ exec - - case format of - SourceList nulSep -> do - let prefix = makeRelative projectRootDir dir - write (BSL.pack . (++ [nulSep]) . intercalate [nulSep] . fmap ((prefix ) . snd) $ files) - when (outputFile /= "-") $ - notice verbosity $ "Wrote source list to " ++ outputFile ++ "\n" - Archive TargzFormat -> do - let entriesM :: StateT (Set.Set FilePath) (WriterT [Tar.Entry] IO) () - entriesM = do - let prefix = prettyShow (packageId pkg) - modify (Set.insert prefix) - case Tar.toTarPath True prefix of - Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) - Right path -> tell [Tar.directoryEntry path] - - forM_ files $ \(perm, file) -> do - let fileDir = takeDirectory (prefix file) - perm' = case perm of - Exec -> Tar.executableFilePermissions - NoExec -> Tar.ordinaryFilePermissions - needsEntry <- gets (Set.notMember fileDir) - - when needsEntry $ do - modify (Set.insert fileDir) - case Tar.toTarPath True fileDir of - Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) - Right path -> tell [Tar.directoryEntry path] - - contents <- liftIO . fmap BSL.fromStrict . BS.readFile $ file - case Tar.toTarPath False (prefix file) of - Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) - Right path -> tell [(Tar.fileEntry path contents) { Tar.entryPermissions = perm' }] - - entries <- execWriterT (evalStateT entriesM mempty) - let -- Pretend our GZip file is made on Unix. - normalize bs = BSL.concat [first, "\x03", rest'] - where - (first, rest) = BSL.splitAt 9 bs - rest' = BSL.tail rest - -- The Unix epoch, which is the default value, is - -- unsuitable because it causes unpacking problems on - -- Windows; we need a post-1980 date. One gigasecond - -- after the epoch is during 2001-09-09, so that does - -- nicely. See #5596. - setModTime entry = entry { Tar.entryTime = 1000000000 } - write . normalize . GZip.compress . Tar.write $ fmap setModTime entries - when (outputFile /= "-") $ - notice verbosity $ "Wrote tarball sdist to " ++ outputFile ++ "\n" - Archive ZipFormat -> do - let prefix = prettyShow (packageId pkg) - entries <- forM files $ \(perm, file) -> do - let perm' = case perm of - -- -rwxr-xr-x - Exec -> 0o010755 `shiftL` 16 - -- -rw-r--r-- - NoExec -> 0o010644 `shiftL` 16 - contents <- BSL.readFile file - return $ (Zip.toEntry (prefix file) 0 contents) { Zip.eExternalFileAttributes = perm' } - let archive = foldr Zip.addEntryToArchive Zip.emptyArchive entries - write (Zip.fromArchive archive) - when (outputFile /= "-") $ - notice verbosity $ "Wrote zip sdist to " ++ outputFile ++ "\n" - setCurrentDirectory oldPwd - --- - -reifyTargetSelectors :: [PackageSpecifier UnresolvedSourcePackage] -> [TargetSelector] -> Either [TargetProblem] [UnresolvedSourcePackage] -reifyTargetSelectors pkgs sels = - case partitionEithers (foldMap go sels) of - ([], sels') -> Right sels' - (errs, _) -> Left errs - where - flatten (SpecificSourcePackage pkg@SourcePackage{}) = pkg - flatten _ = error "The impossible happened: how do we not know about a local package?" - pkgs' = fmap flatten pkgs - - getPkg pid = case find ((== pid) . packageId) pkgs' of - Just pkg -> Right pkg - Nothing -> error "The impossible happened: we have a reference to a local package that isn't in localPackages." - - go :: TargetSelector -> [Either TargetProblem UnresolvedSourcePackage] - go (TargetPackage _ pids Nothing) = fmap getPkg pids - go (TargetAllPackages Nothing) = Right <$> pkgs' - - go (TargetPackage _ _ (Just kind)) = [Left (AllComponentsOnly kind)] - go (TargetAllPackages (Just kind)) = [Left (AllComponentsOnly kind)] - - go (TargetPackageNamed pname _) = [Left (NonlocalPackageNotAllowed pname)] - go (TargetComponentUnknown pname _ _) = [Left (NonlocalPackageNotAllowed pname)] - - go (TargetComponent _ cname _) = [Left (ComponentsNotAllowed cname)] - -data TargetProblem = AllComponentsOnly ComponentKind - | NonlocalPackageNotAllowed PackageName - | ComponentsNotAllowed ComponentName - -renderTargetProblem :: TargetProblem -> String -renderTargetProblem (AllComponentsOnly kind) = - "It is not possible to package only the " ++ renderComponentKind Plural kind ++ " from a package " - ++ "for distribution. Only entire packages may be packaged for distribution." -renderTargetProblem (ComponentsNotAllowed cname) = - "The component " ++ showComponentName cname ++ " cannot be packaged for distribution on its own. " - ++ "Only entire packages may be packaged for distribution." -renderTargetProblem (NonlocalPackageNotAllowed pname) = - "The package " ++ unPackageName pname ++ " cannot be packaged for distribution, because it is not " - ++ "local to this project." - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdTest.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdTest.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdTest.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdTest.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,248 +0,0 @@ -{-# LANGUAGE NamedFieldPuns #-} - --- | cabal-install CLI command: test --- -module Distribution.Client.CmdTest ( - -- * The @test@ CLI and action - testCommand, - testAction, - - -- * Internals exposed for testing - TargetProblem(..), - selectPackageTargets, - selectComponentTarget - ) where - -import Distribution.Client.ProjectOrchestration -import Distribution.Client.CmdErrorMessages - -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) -import qualified Distribution.Client.Setup as Client -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Simple.Command - ( CommandUI(..), usageAlternatives ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity, normal ) -import Distribution.Simple.Utils - ( wrapText, die' ) - -import Control.Monad (when) - - -testCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -testCommand = Client.installCommand { - commandName = "new-test", - commandSynopsis = "Run test-suites", - commandUsage = usageAlternatives "new-test" [ "[TARGETS] [FLAGS]" ], - commandDescription = Just $ \_ -> wrapText $ - "Runs the specified test-suites, first ensuring they are up to " - ++ "date.\n\n" - - ++ "Any test-suite in any package in the project can be specified. " - ++ "A package can be specified in which case all the test-suites in the " - ++ "package are run. The default is to run all the test-suites in the " - ++ "package in the current directory.\n\n" - - ++ "Dependencies are built or rebuilt as necessary. Additional " - ++ "configuration flags can be specified on the command line and these " - ++ "extend the project configuration from the 'cabal.project', " - ++ "'cabal.project.local' and other files.\n\n" - - ++ "To pass command-line arguments to a test suite, see the " - ++ "new-run command.", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " new-test\n" - ++ " Run all the test-suites in the package in the current directory\n" - ++ " " ++ pname ++ " new-test pkgname\n" - ++ " Run all the test-suites in the package named pkgname\n" - ++ " " ++ pname ++ " new-test cname\n" - ++ " Run the test-suite named cname\n" - ++ " " ++ pname ++ " new-test cname --enable-coverage\n" - ++ " Run the test-suite built with code coverage (including local libs used)\n\n" - - ++ cmdCommonHelpTextNewBuildBeta - } - - --- | The @test@ command is very much like @build@. It brings the install plan --- up to date, selects that part of the plan needed by the given or implicit --- test target(s) and then executes the plan. --- --- Compared to @build@ the difference is that there's also test targets --- which are ephemeral. --- --- For more details on how this works, see the module --- "Distribution.Client.ProjectOrchestration" --- -testAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -testAction (configFlags, configExFlags, installFlags, haddockFlags) - targetStrings globalFlags = do - - baseCtx <- establishProjectBaseContext verbosity cliConfig - - targetSelectors <- either (reportTargetSelectorProblems verbosity) return - =<< readTargetSelectors (localPackages baseCtx) (Just TestKind) targetStrings - - buildCtx <- - runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do - - when (buildSettingOnlyDeps (buildSettings baseCtx)) $ - die' verbosity $ - "The test command does not support '--only-dependencies'. " - ++ "You may wish to use 'build --only-dependencies' and then " - ++ "use 'test'." - - -- Interpret the targets on the command line as test targets - -- (as opposed to say build or haddock targets). - targets <- either (reportTargetProblems verbosity) return - $ resolveTargets - selectPackageTargets - selectComponentTarget - TargetProblemCommon - elaboratedPlan - Nothing - targetSelectors - - let elaboratedPlan' = pruneInstallPlanToTargets - TargetActionTest - targets - elaboratedPlan - return (elaboratedPlan', targets) - - printPlan verbosity baseCtx buildCtx - - buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx - runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - --- | This defines what a 'TargetSelector' means for the @test@ command. --- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, --- or otherwise classifies the problem. --- --- For the @test@ command we select all buildable test-suites, --- or fail if there are no test-suites or no buildable test-suites. --- -selectPackageTargets :: TargetSelector - -> [AvailableTarget k] -> Either TargetProblem [k] -selectPackageTargets targetSelector targets - - -- If there are any buildable test-suite targets then we select those - | not (null targetsTestsBuildable) - = Right targetsTestsBuildable - - -- If there are test-suites but none are buildable then we report those - | not (null targetsTests) - = Left (TargetProblemNoneEnabled targetSelector targetsTests) - - -- If there are no test-suite but some other targets then we report that - | not (null targets) - = Left (TargetProblemNoTests targetSelector) - - -- If there are no targets at all then we report that - | otherwise - = Left (TargetProblemNoTargets targetSelector) - where - targetsTestsBuildable = selectBuildableTargets - . filterTargetsKind TestKind - $ targets - - targetsTests = forgetTargetsDetail - . filterTargetsKind TestKind - $ targets - - --- | For a 'TargetComponent' 'TargetSelector', check if the component can be --- selected. --- --- For the @test@ command we just need to check it is a test-suite, in addition --- to the basic checks on being buildable etc. --- -selectComponentTarget :: SubComponentTarget - -> AvailableTarget k -> Either TargetProblem k -selectComponentTarget subtarget@WholeComponent t - | CTestName _ <- availableTargetComponentName t - = either (Left . TargetProblemCommon) return $ - selectComponentTargetBasic subtarget t - | otherwise - = Left (TargetProblemComponentNotTest (availableTargetPackageId t) - (availableTargetComponentName t)) - -selectComponentTarget subtarget t - = Left (TargetProblemIsSubComponent (availableTargetPackageId t) - (availableTargetComponentName t) - subtarget) - --- | The various error conditions that can occur when matching a --- 'TargetSelector' against 'AvailableTarget's for the @test@ command. --- -data TargetProblem = - TargetProblemCommon TargetProblemCommon - - -- | The 'TargetSelector' matches targets but none are buildable - | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] - - -- | There are no targets at all - | TargetProblemNoTargets TargetSelector - - -- | The 'TargetSelector' matches targets but no test-suites - | TargetProblemNoTests TargetSelector - - -- | The 'TargetSelector' refers to a component that is not a test-suite - | TargetProblemComponentNotTest PackageId ComponentName - - -- | Asking to test an individual file or module is not supported - | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget - deriving (Eq, Show) - -reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a -reportTargetProblems verbosity = - die' verbosity . unlines . map renderTargetProblem - -renderTargetProblem :: TargetProblem -> String -renderTargetProblem (TargetProblemCommon problem) = - renderTargetProblemCommon "run" problem - -renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = - renderTargetProblemNoneEnabled "test" targetSelector targets - -renderTargetProblem (TargetProblemNoTests targetSelector) = - "Cannot run tests for the target '" ++ showTargetSelector targetSelector - ++ "' which refers to " ++ renderTargetSelector targetSelector - ++ " because " - ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" - ++ " not contain any test suites." - -renderTargetProblem (TargetProblemNoTargets targetSelector) = - case targetSelectorFilter targetSelector of - Just kind | kind /= TestKind - -> "The test command is for running test suites, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ "." - - _ -> renderTargetProblemNoTargets "test" targetSelector - -renderTargetProblem (TargetProblemComponentNotTest pkgid cname) = - "The test command is for running test suites, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ " from the package " - ++ display pkgid ++ "." - where - targetSelector = TargetComponent pkgid cname WholeComponent - -renderTargetProblem (TargetProblemIsSubComponent pkgid cname subtarget) = - "The test command can only run test suites as a whole, " - ++ "not files or modules within them, but the target '" - ++ showTargetSelector targetSelector ++ "' refers to " - ++ renderTargetSelector targetSelector ++ "." - where - targetSelector = TargetComponent pkgid cname subtarget diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdUpdate.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdUpdate.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/CmdUpdate.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/CmdUpdate.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,217 +0,0 @@ -{-# LANGUAGE CPP, LambdaCase, NamedFieldPuns, RecordWildCards, ViewPatterns, - TupleSections #-} - --- | cabal-install CLI command: update --- -module Distribution.Client.CmdUpdate ( - updateCommand, - updateAction, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.Compat.Directory - ( setModificationTime ) -import Distribution.Client.ProjectOrchestration -import Distribution.Client.ProjectConfig - ( ProjectConfig(..) - , ProjectConfigShared(projectConfigConfigFile) - , projectConfigWithSolverRepoContext - , withProjectOrGlobalConfig ) -import Distribution.Client.Types - ( Repo(..), RemoteRepo(..), isRepoRemote ) -import Distribution.Client.HttpUtils - ( DownloadResult(..) ) -import Distribution.Client.FetchUtils - ( downloadIndex ) -import Distribution.Client.JobControl - ( newParallelJobControl, spawnJob, collectJob ) -import Distribution.Client.Setup - ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags - , UpdateFlags, defaultUpdateFlags - , RepoContext(..) ) -import Distribution.Simple.Setup - ( HaddockFlags, fromFlagOrDefault ) -import Distribution.Simple.Utils - ( die', notice, wrapText, writeFileAtomic, noticeNoWrap ) -import Distribution.Verbosity - ( Verbosity, normal, lessVerbose ) -import Distribution.Client.IndexUtils.Timestamp -import Distribution.Client.IndexUtils - ( updateRepoIndexCache, Index(..), writeIndexTimestamp - , currentIndexTimestamp, indexBaseName ) -import Distribution.Text - ( Text(..), display, simpleParse ) - -import Data.Maybe (fromJust) -import qualified Distribution.Compat.ReadP as ReadP -import qualified Text.PrettyPrint as Disp - -import Control.Monad (mapM, mapM_) -import qualified Data.ByteString.Lazy as BS -import Distribution.Client.GZipUtils (maybeDecompress) -import System.FilePath ((<.>), dropExtension) -import Data.Time (getCurrentTime) -import Distribution.Simple.Command - ( CommandUI(..), usageAlternatives ) -import qualified Distribution.Client.Setup as Client - -import qualified Hackage.Security.Client as Sec - -updateCommand :: CommandUI ( ConfigFlags, ConfigExFlags - , InstallFlags, HaddockFlags ) -updateCommand = Client.installCommand { - commandName = "new-update", - commandSynopsis = "Updates list of known packages.", - commandUsage = usageAlternatives "new-update" [ "[FLAGS] [REPOS]" ], - commandDescription = Just $ \_ -> wrapText $ - "For all known remote repositories, download the package list.", - commandNotes = Just $ \pname -> - "REPO has the format [,] where index-state follows\n" - ++ "the same format and syntax that is supported by the --index-state flag.\n\n" - ++ "Examples:\n" - ++ " " ++ pname ++ " new-update\n" - ++ " Download the package list for all known remote repositories.\n\n" - ++ " " ++ pname ++ " new-update hackage.haskell.org,@1474732068\n" - ++ " " ++ pname ++ " new-update hackage.haskell.org,2016-09-24T17:47:48Z\n" - ++ " " ++ pname ++ " new-update hackage.haskell.org,HEAD\n" - ++ " " ++ pname ++ " new-update hackage.haskell.org\n" - ++ " Download hackage.haskell.org at a specific index state.\n\n" - ++ " " ++ pname ++ " new update hackage.haskell.org head.hackage\n" - ++ " Download hackage.haskell.org and head.hackage\n" - ++ " head.hackage must be a known repo-id. E.g. from\n" - ++ " your cabal.project(.local) file.\n\n" - ++ "Note: this command is part of the new project-based system (aka " - ++ "nix-style\nlocal builds). These features are currently in beta. " - ++ "Please see\n" - ++ "http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html " - ++ "for\ndetails and advice on what you can expect to work. If you " - ++ "encounter problems\nplease file issues at " - ++ "https://github.com/haskell/cabal/issues and if you\nhave any time " - ++ "to get involved and help with testing, fixing bugs etc then\nthat " - ++ "is very much appreciated.\n" - } - -data UpdateRequest = UpdateRequest - { _updateRequestRepoName :: String - , _updateRequestRepoState :: IndexState - } deriving (Show) - -instance Text UpdateRequest where - disp (UpdateRequest n s) = Disp.text n Disp.<> Disp.char ',' Disp.<> disp s - parse = parseWithState ReadP.+++ parseHEAD - where parseWithState = do - name <- ReadP.many1 (ReadP.satisfy (\c -> c /= ',')) - _ <- ReadP.char ',' - state <- parse - return (UpdateRequest name state) - parseHEAD = do - name <- ReadP.manyTill (ReadP.satisfy (\c -> c /= ',')) ReadP.eof - return (UpdateRequest name IndexStateHead) - -updateAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> GlobalFlags -> IO () -updateAction (configFlags, configExFlags, installFlags, haddockFlags) - extraArgs globalFlags = do - projectConfig <- withProjectOrGlobalConfig verbosity globalConfigFlag - (projectConfig <$> establishProjectBaseContext verbosity cliConfig) - (\globalConfig -> return $ globalConfig <> cliConfig) - - projectConfigWithSolverRepoContext verbosity - (projectConfigShared projectConfig) (projectConfigBuildOnly projectConfig) - $ \repoCtxt -> do - let repos = filter isRepoRemote $ repoContextRepos repoCtxt - repoName = remoteRepoName . repoRemote - parseArg :: String -> IO UpdateRequest - parseArg s = case simpleParse s of - Just r -> return r - Nothing -> die' verbosity $ - "'new-update' unable to parse repo: \"" ++ s ++ "\"" - updateRepoRequests <- mapM parseArg extraArgs - - unless (null updateRepoRequests) $ do - let remoteRepoNames = map repoName repos - unknownRepos = [r | (UpdateRequest r _) <- updateRepoRequests - , not (r `elem` remoteRepoNames)] - unless (null unknownRepos) $ - die' verbosity $ "'new-update' repo(s): \"" - ++ intercalate "\", \"" unknownRepos - ++ "\" can not be found in known remote repo(s): " - ++ intercalate ", " remoteRepoNames - - let reposToUpdate :: [(Repo, IndexState)] - reposToUpdate = case updateRepoRequests of - -- If we are not given any specific repository, update all - -- repositories to HEAD. - [] -> map (,IndexStateHead) repos - updateRequests -> let repoMap = [(repoName r, r) | r <- repos] - lookup' k = fromJust (lookup k repoMap) - in [ (lookup' name, state) - | (UpdateRequest name state) <- updateRequests ] - - case reposToUpdate of - [] -> return () - [(remoteRepo, _)] -> - notice verbosity $ "Downloading the latest package list from " - ++ repoName remoteRepo - _ -> notice verbosity . unlines - $ "Downloading the latest package lists from: " - : map (("- " ++) . repoName . fst) reposToUpdate - - jobCtrl <- newParallelJobControl (length reposToUpdate) - mapM_ (spawnJob jobCtrl . updateRepo verbosity defaultUpdateFlags repoCtxt) - reposToUpdate - mapM_ (\_ -> collectJob jobCtrl) reposToUpdate - - where - verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - cliConfig = commandLineFlagsToProjectConfig - globalFlags configFlags configExFlags - installFlags haddockFlags - globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) - -updateRepo :: Verbosity -> UpdateFlags -> RepoContext -> (Repo, IndexState) - -> IO () -updateRepo verbosity _updateFlags repoCtxt (repo, indexState) = do - transport <- repoContextGetTransport repoCtxt - case repo of - RepoLocal{..} -> return () - RepoRemote{..} -> do - downloadResult <- downloadIndex transport verbosity - repoRemote repoLocalDir - case downloadResult of - FileAlreadyInCache -> - setModificationTime (indexBaseName repo <.> "tar") - =<< getCurrentTime - FileDownloaded indexPath -> do - writeFileAtomic (dropExtension indexPath) . maybeDecompress - =<< BS.readFile indexPath - updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) - RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> do - let index = RepoIndex repoCtxt repo - -- NB: This may be a nullTimestamp if we've never updated before - current_ts <- currentIndexTimestamp (lessVerbose verbosity) repoCtxt repo - -- NB: always update the timestamp, even if we didn't actually - -- download anything - writeIndexTimestamp index indexState - ce <- if repoContextIgnoreExpiry repoCtxt - then Just `fmap` getCurrentTime - else return Nothing - updated <- Sec.uncheckClientErrors $ Sec.checkForUpdates repoSecure ce - -- Update cabal's internal index as well so that it's not out of sync - -- (If all access to the cache goes through hackage-security this can go) - case updated of - Sec.NoUpdates -> - setModificationTime (indexBaseName repo <.> "tar") - =<< getCurrentTime - Sec.HasUpdates -> - updateRepoIndexCache verbosity index - -- TODO: This will print multiple times if there are multiple - -- repositories: main problem is we don't have a way of updating - -- a specific repo. Once we implement that, update this. - when (current_ts /= nullTimestamp) $ - noticeNoWrap verbosity $ - "To revert to previous state run:\n" ++ - " cabal new-update '" ++ remoteRepoName (repoRemote repo) - ++ "," ++ display current_ts ++ "'\n" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Directory.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Directory.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Directory.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Directory.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -{-# LANGUAGE CPP #-} -module Distribution.Client.Compat.Directory (setModificationTime) where - -#if MIN_VERSION_directory(1,2,3) -import System.Directory (setModificationTime) -#else - -import Data.Time.Clock (UTCTime) - -setModificationTime :: FilePath -> UTCTime -> IO () -setModificationTime _fp _t = return () - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/ExecutablePath.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/ExecutablePath.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/ExecutablePath.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/ExecutablePath.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,164 +0,0 @@ -{-# LANGUAGE ForeignFunctionInterface #-} -{-# LANGUAGE CPP #-} - --- Copied verbatim from base-4.6.0.0. We can't simply import --- System.Environment.getExecutablePath because we need compatibility with older --- GHCs. - -module Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) where - --- The imports are purposely kept completely disjoint to prevent edits --- to one OS implementation from breaking another. - -#if defined(darwin_HOST_OS) -import Data.Word -import Foreign.C -import Foreign.Marshal.Alloc -import Foreign.Ptr -import Foreign.Storable -import System.Posix.Internals -#elif defined(linux_HOST_OS) -import Foreign.C -import Foreign.Marshal.Array -import System.Posix.Internals -#elif defined(mingw32_HOST_OS) -import Data.Word -import Foreign.C -import Foreign.Marshal.Array -import Foreign.Ptr -import System.Posix.Internals -#else -import Foreign.C -import Foreign.Marshal.Alloc -import Foreign.Ptr -import Foreign.Storable -import System.Posix.Internals -#endif - --- The exported function is defined outside any if-guard to make sure --- every OS implements it with the same type. - --- | Returns the absolute pathname of the current executable. --- --- Note that for scripts and interactive sessions, this is the path to --- the interpreter (e.g. ghci.) --- --- /Since: 4.6.0.0/ -getExecutablePath :: IO FilePath - --------------------------------------------------------------------------------- --- Mac OS X - -#if defined(darwin_HOST_OS) - -type UInt32 = Word32 - -foreign import ccall unsafe "mach-o/dyld.h _NSGetExecutablePath" - c__NSGetExecutablePath :: CString -> Ptr UInt32 -> IO CInt - --- | Returns the path of the main executable. The path may be a --- symbolic link and not the real file. --- --- See dyld(3) -_NSGetExecutablePath :: IO FilePath -_NSGetExecutablePath = - allocaBytes 1024 $ \ buf -> -- PATH_MAX is 1024 on OS X - alloca $ \ bufsize -> do - poke bufsize 1024 - status <- c__NSGetExecutablePath buf bufsize - if status == 0 - then peekFilePath buf - else do reqBufsize <- fromIntegral `fmap` peek bufsize - allocaBytes reqBufsize $ \ newBuf -> do - status2 <- c__NSGetExecutablePath newBuf bufsize - if status2 == 0 - then peekFilePath newBuf - else error "_NSGetExecutablePath: buffer too small" - -foreign import ccall unsafe "stdlib.h realpath" - c_realpath :: CString -> CString -> IO CString - --- | Resolves all symbolic links, extra \/ characters, and references --- to \/.\/ and \/..\/. Returns an absolute pathname. --- --- See realpath(3) -realpath :: FilePath -> IO FilePath -realpath path = - withFilePath path $ \ fileName -> - allocaBytes 1024 $ \ resolvedName -> do - _ <- throwErrnoIfNull "realpath" $ c_realpath fileName resolvedName - peekFilePath resolvedName - -getExecutablePath = _NSGetExecutablePath >>= realpath - --------------------------------------------------------------------------------- --- Linux - -#elif defined(linux_HOST_OS) - -foreign import ccall unsafe "readlink" - c_readlink :: CString -> CString -> CSize -> IO CInt - --- | Reads the @FilePath@ pointed to by the symbolic link and returns --- it. --- --- See readlink(2) -readSymbolicLink :: FilePath -> IO FilePath -readSymbolicLink file = - allocaArray0 4096 $ \buf -> do - withFilePath file $ \s -> do - len <- throwErrnoPathIfMinus1 "readSymbolicLink" file $ - c_readlink s buf 4096 - peekFilePathLen (buf,fromIntegral len) - -getExecutablePath = readSymbolicLink $ "/proc/self/exe" - --------------------------------------------------------------------------------- --- Windows - -#elif defined(mingw32_HOST_OS) - -# if defined(i386_HOST_ARCH) -# define WINDOWS_CCONV stdcall -# elif defined(x86_64_HOST_ARCH) -# define WINDOWS_CCONV ccall -# else -# error Unknown mingw32 arch -# endif - -foreign import WINDOWS_CCONV unsafe "windows.h GetModuleFileNameW" - c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32 - -getExecutablePath = go 2048 -- plenty, PATH_MAX is 512 under Win32 - where - go size = allocaArray (fromIntegral size) $ \ buf -> do - ret <- c_GetModuleFileName nullPtr buf size - case ret of - 0 -> error "getExecutablePath: GetModuleFileNameW returned an error" - _ | ret < size -> peekFilePath buf - | otherwise -> go (size * 2) - --------------------------------------------------------------------------------- --- Fallback to argv[0] - -#else - -foreign import ccall unsafe "getFullProgArgv" - c_getFullProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO () - -getExecutablePath = - alloca $ \ p_argc -> - alloca $ \ p_argv -> do - c_getFullProgArgv p_argc p_argv - argc <- peek p_argc - if argc > 0 - -- If argc > 0 then argv[0] is guaranteed by the standard - -- to be a pointer to a null-terminated string. - then peek p_argv >>= peek >>= peekFilePath - else error $ "getExecutablePath: " ++ msg - where msg = "no OS specific implementation and program name couldn't be " ++ - "found in argv" - --------------------------------------------------------------------------------- - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/FileLock.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/FileLock.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/FileLock.hsc 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/FileLock.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,201 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE InterruptibleFFI #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE MultiWayIf #-} -{-# LANGUAGE DeriveDataTypeable #-} - --- | This compat module can be removed once base-4.10 (ghc-8.2) is the minimum --- required version. Though note that the locking functionality is not in --- public modules in base-4.10, just in the "GHC.IO.Handle.Lock" module. -module Distribution.Client.Compat.FileLock ( - FileLockingNotSupported(..) - , LockMode(..) - , hLock - , hTryLock - ) where - -#if MIN_VERSION_base(4,10,0) - -import GHC.IO.Handle.Lock - -#else - --- The remainder of this file is a modified copy --- of GHC.IO.Handle.Lock from ghc-8.2.x --- --- The modifications were just to the imports and the CPP, since we do not have --- access to the HAVE_FLOCK from the ./configure script. We approximate the --- lack of HAVE_FLOCK with defined(solaris2_HOST_OS) instead since that is the --- only known major Unix platform lacking flock(). - -import Control.Exception (Exception) -import Data.Typeable - -#if defined(solaris2_HOST_OS) - -import Control.Exception (throwIO) -import System.IO (Handle) - -#else - -import Data.Bits -import Data.Function -import Control.Concurrent.MVar - -import Foreign.C.Error -import Foreign.C.Types - -import GHC.IO.Handle.Types -import GHC.IO.FD -import GHC.IO.Exception - -#if defined(mingw32_HOST_OS) - -#if defined(i386_HOST_ARCH) -## define WINDOWS_CCONV stdcall -#elif defined(x86_64_HOST_ARCH) -## define WINDOWS_CCONV ccall -#else -# error Unknown mingw32 arch -#endif - -#include - -import Foreign.Marshal.Alloc -import Foreign.Marshal.Utils -import Foreign.Ptr -import GHC.Windows - -#else /* !defined(mingw32_HOST_OS), so assume unix with flock() */ - -#include - -#endif /* !defined(mingw32_HOST_OS) */ - -#endif /* !defined(solaris2_HOST_OS) */ - - --- | Exception thrown by 'hLock' on non-Windows platforms that don't support --- 'flock'. -data FileLockingNotSupported = FileLockingNotSupported - deriving (Typeable, Show) - -instance Exception FileLockingNotSupported - - --- | Indicates a mode in which a file should be locked. -data LockMode = SharedLock | ExclusiveLock - --- | If a 'Handle' references a file descriptor, attempt to lock contents of the --- underlying file in appropriate mode. If the file is already locked in --- incompatible mode, this function blocks until the lock is established. The --- lock is automatically released upon closing a 'Handle'. --- --- Things to be aware of: --- --- 1) This function may block inside a C call. If it does, in order to be able --- to interrupt it with asynchronous exceptions and/or for other threads to --- continue working, you MUST use threaded version of the runtime system. --- --- 2) The implementation uses 'LockFileEx' on Windows and 'flock' otherwise, --- hence all of their caveats also apply here. --- --- 3) On non-Windows plaftorms that don't support 'flock' (e.g. Solaris) this --- function throws 'FileLockingNotImplemented'. We deliberately choose to not --- provide fcntl based locking instead because of its broken semantics. --- --- @since 4.10.0.0 -hLock :: Handle -> LockMode -> IO () -hLock h mode = lockImpl h "hLock" mode True >> return () - --- | Non-blocking version of 'hLock'. --- --- @since 4.10.0.0 -hTryLock :: Handle -> LockMode -> IO Bool -hTryLock h mode = lockImpl h "hTryLock" mode False - ----------------------------------------- - -#if defined(solaris2_HOST_OS) - --- | No-op implementation. -lockImpl :: Handle -> String -> LockMode -> Bool -> IO Bool -lockImpl _ _ _ _ = throwIO FileLockingNotSupported - -#else /* !defined(solaris2_HOST_OS) */ - -#if defined(mingw32_HOST_OS) - -lockImpl :: Handle -> String -> LockMode -> Bool -> IO Bool -lockImpl h ctx mode block = do - FD{fdFD = fd} <- handleToFd h - wh <- throwErrnoIf (== iNVALID_HANDLE_VALUE) ctx $ c_get_osfhandle fd - allocaBytes sizeof_OVERLAPPED $ \ovrlpd -> do - fillBytes ovrlpd (fromIntegral sizeof_OVERLAPPED) 0 - let flags = cmode .|. (if block then 0 else #{const LOCKFILE_FAIL_IMMEDIATELY}) - -- We want to lock the whole file without looking up its size to be - -- consistent with what flock does. According to documentation of LockFileEx - -- "locking a region that goes beyond the current end-of-file position is - -- not an error", however e.g. Windows 10 doesn't accept maximum possible - -- value (a pair of MAXDWORDs) for mysterious reasons. Work around that by - -- trying 2^32-1. - fix $ \retry -> c_LockFileEx wh flags 0 0xffffffff 0x0 ovrlpd >>= \case - True -> return True - False -> getLastError >>= \err -> if - | not block && err == #{const ERROR_LOCK_VIOLATION} -> return False - | err == #{const ERROR_OPERATION_ABORTED} -> retry - | otherwise -> failWith ctx err - where - sizeof_OVERLAPPED = #{size OVERLAPPED} - - cmode = case mode of - SharedLock -> 0 - ExclusiveLock -> #{const LOCKFILE_EXCLUSIVE_LOCK} - --- https://msdn.microsoft.com/en-us/library/aa297958.aspx -foreign import ccall unsafe "_get_osfhandle" - c_get_osfhandle :: CInt -> IO HANDLE - --- https://msdn.microsoft.com/en-us/library/windows/desktop/aa365203.aspx -foreign import WINDOWS_CCONV interruptible "LockFileEx" - c_LockFileEx :: HANDLE -> DWORD -> DWORD -> DWORD -> DWORD -> Ptr () -> IO BOOL - -#else /* !defined(mingw32_HOST_OS), so assume unix with flock() */ - -lockImpl :: Handle -> String -> LockMode -> Bool -> IO Bool -lockImpl h ctx mode block = do - FD{fdFD = fd} <- handleToFd h - let flags = cmode .|. (if block then 0 else #{const LOCK_NB}) - fix $ \retry -> c_flock fd flags >>= \case - 0 -> return True - _ -> getErrno >>= \errno -> if - | not block && errno == eWOULDBLOCK -> return False - | errno == eINTR -> retry - | otherwise -> ioException $ errnoToIOError ctx errno (Just h) Nothing - where - cmode = case mode of - SharedLock -> #{const LOCK_SH} - ExclusiveLock -> #{const LOCK_EX} - -foreign import ccall interruptible "flock" - c_flock :: CInt -> CInt -> IO CInt - -#endif /* !defined(mingw32_HOST_OS) */ - --- | Turn an existing Handle into a file descriptor. This function throws an --- IOError if the Handle does not reference a file descriptor. -handleToFd :: Handle -> IO FD -handleToFd h = case h of - FileHandle _ mv -> do - Handle__{haDevice = dev} <- readMVar mv - case cast dev of - Just fd -> return fd - Nothing -> throwErr "not a file descriptor" - DuplexHandle{} -> throwErr "not a file handle" - where - throwErr msg = ioException $ IOError (Just h) - InappropriateType "handleToFd" msg Nothing Nothing - -#endif /* defined(solaris2_HOST_OS) */ - -#endif /* MIN_VERSION_base */ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/FilePerms.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/FilePerms.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/FilePerms.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/FilePerms.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,35 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_HADDOCK hide #-} -module Distribution.Client.Compat.FilePerms ( - setFileOrdinary, - setFileExecutable, - setFileHidden, - ) where - -#ifndef mingw32_HOST_OS -import System.Posix.Types - ( FileMode ) -import System.Posix.Internals - ( c_chmod ) -import Foreign.C - ( withCString - , throwErrnoPathIfMinus1_ ) -#else -import System.Win32.File (setFileAttributes, fILE_ATTRIBUTE_HIDDEN) -#endif /* mingw32_HOST_OS */ - -setFileHidden, setFileOrdinary, setFileExecutable :: FilePath -> IO () -#ifndef mingw32_HOST_OS -setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- -setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x -setFileHidden _ = return () - -setFileMode :: FilePath -> FileMode -> IO () -setFileMode name m = - withCString name $ \s -> - throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) -#else -setFileOrdinary _ = return () -setFileExecutable _ = return () -setFileHidden path = setFileAttributes path fILE_ATTRIBUTE_HIDDEN -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Prelude.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Prelude.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Prelude.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Prelude.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ --- to suppress WARNING in "Distribution.Compat.Prelude.Internal" -{-# OPTIONS_GHC -fno-warn-deprecations #-} - --- | This module does two things: --- --- * Acts as a compatiblity layer, like @base-compat@. --- --- * Provides commonly used imports. --- --- This module is a superset of "Distribution.Compat.Prelude" (which --- this module re-exports) --- -module Distribution.Client.Compat.Prelude - ( module Distribution.Compat.Prelude.Internal - , Prelude.IO - , readMaybe - ) where - -import Prelude (IO) -import Distribution.Compat.Prelude.Internal hiding (IO) -import Text.Read - ( readMaybe ) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Process.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Process.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Process.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Process.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,43 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Compat.Process --- Copyright : (c) 2013 Liu Hao, Brent Yorgey --- License : BSD-style (see the file LICENSE) --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Cross-platform utilities for invoking processes. --- ------------------------------------------------------------------------------ - -module Distribution.Client.Compat.Process ( - readProcessWithExitCode -) where - -import Control.Exception (catch, throw) -import System.Exit (ExitCode (ExitFailure)) -import System.IO.Error (isDoesNotExistError, isPermissionError) -import qualified System.Process as P - --- | @readProcessWithExitCode@ creates an external process, reads its --- standard output and standard error strictly, waits until the --- process terminates, and then returns the @ExitCode@ of the --- process, the standard output, and the standard error. --- --- See the documentation of the version from @System.Process@ for --- more information. --- --- The version from @System.Process@ behaves inconsistently across --- platforms when an executable with the given name is not found: in --- some cases it returns an @ExitFailure@, in others it throws an --- exception. This variant catches \"does not exist\" and --- \"permission denied\" exceptions and turns them into --- @ExitFailure@s. -readProcessWithExitCode :: FilePath -> [String] -> String -> IO (ExitCode, String, String) -readProcessWithExitCode cmd args input = - P.readProcessWithExitCode cmd args input - `catch` \e -> if isDoesNotExistError e || isPermissionError e - then return (ExitFailure 127, "", "") - else throw e diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Semaphore.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Semaphore.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Semaphore.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Compat/Semaphore.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,104 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# OPTIONS_GHC -funbox-strict-fields #-} -module Distribution.Client.Compat.Semaphore - ( QSem - , newQSem - , waitQSem - , signalQSem - ) where - -import Control.Concurrent.STM (TVar, atomically, newTVar, readTVar, retry, - writeTVar) -import Control.Exception (mask_, onException) -import Control.Monad (join, unless) -import Data.Typeable (Typeable) - --- | 'QSem' is a quantity semaphore in which the resource is aqcuired --- and released in units of one. It provides guaranteed FIFO ordering --- for satisfying blocked `waitQSem` calls. --- -data QSem = QSem !(TVar Int) !(TVar [TVar Bool]) !(TVar [TVar Bool]) - deriving (Eq, Typeable) - -newQSem :: Int -> IO QSem -newQSem i = atomically $ do - q <- newTVar i - b1 <- newTVar [] - b2 <- newTVar [] - return (QSem q b1 b2) - -waitQSem :: QSem -> IO () -waitQSem s@(QSem q _b1 b2) = - mask_ $ join $ atomically $ do - -- join, because if we need to block, we have to add a TVar to - -- the block queue. - -- mask_, because we need a chance to set up an exception handler - -- after the join returns. - v <- readTVar q - if v == 0 - then do b <- newTVar False - ys <- readTVar b2 - writeTVar b2 (b:ys) - return (wait b) - else do writeTVar q $! v - 1 - return (return ()) - where - -- - -- very careful here: if we receive an exception, then we need to - -- (a) write True into the TVar, so that another signalQSem doesn't - -- try to wake up this thread, and - -- (b) if the TVar is *already* True, then we need to do another - -- signalQSem to avoid losing a unit of the resource. - -- - -- The 'wake' function does both (a) and (b), so we can just call - -- it here. - -- - wait t = - flip onException (wake s t) $ - atomically $ do - b <- readTVar t - unless b retry - - -wake :: QSem -> TVar Bool -> IO () -wake s x = join $ atomically $ do - b <- readTVar x - if b then return (signalQSem s) - else do writeTVar x True - return (return ()) - -{- - property we want: - - bracket waitQSem (\_ -> signalQSem) (\_ -> ...) - - never loses a unit of the resource. --} - -signalQSem :: QSem -> IO () -signalQSem s@(QSem q b1 b2) = - mask_ $ join $ atomically $ do - -- join, so we don't force the reverse inside the txn - -- mask_ is needed so we don't lose a wakeup - v <- readTVar q - if v /= 0 - then do writeTVar q $! v + 1 - return (return ()) - else do xs <- readTVar b1 - checkwake1 xs - where - checkwake1 [] = do - ys <- readTVar b2 - checkwake2 ys - checkwake1 (x:xs) = do - writeTVar b1 xs - return (wake s x) - - checkwake2 [] = do - writeTVar q 1 - return (return ()) - checkwake2 ys = do - let (z:zs) = reverse ys - writeTVar b1 zs - writeTVar b2 [] - return (wake s z) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Config.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Config.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Config.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Config.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1247 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Config --- Copyright : (c) David Himmelstrup 2005 --- License : BSD-like --- --- Maintainer : lemmih@gmail.com --- Stability : provisional --- Portability : portable --- --- Utilities for handling saved state such as known packages, known servers and --- downloaded packages. ------------------------------------------------------------------------------ -module Distribution.Client.Config ( - SavedConfig(..), - loadConfig, - getConfigFilePath, - - showConfig, - showConfigWithComments, - parseConfig, - - getCabalDir, - defaultConfigFile, - defaultCacheDir, - defaultCompiler, - defaultLogsDir, - defaultUserInstall, - - baseSavedConfig, - commentSavedConfig, - initialSavedConfig, - configFieldDescriptions, - haddockFlagsFields, - installDirsFields, - withProgramsFields, - withProgramOptionsFields, - userConfigDiff, - userConfigUpdate, - createDefaultConfigFile, - - remoteRepoFields - ) where - -import Distribution.Client.Types - ( RemoteRepo(..), Username(..), Password(..), emptyRemoteRepo - , AllowOlder(..), AllowNewer(..), RelaxDeps(..), isRelaxDeps - ) -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) -import Distribution.Client.Setup - ( GlobalFlags(..), globalCommand, defaultGlobalFlags - , ConfigExFlags(..), configureExOptions, defaultConfigExFlags - , InstallFlags(..), installOptions, defaultInstallFlags - , UploadFlags(..), uploadCommand - , ReportFlags(..), reportCommand - , showRepo, parseRepo, readRepo ) -import Distribution.Utils.NubList - ( NubList, fromNubList, toNubList, overNubList ) - -import Distribution.Simple.Compiler - ( DebugInfoLevel(..), OptimisationLevel(..) ) -import Distribution.Simple.Setup - ( ConfigFlags(..), configureOptions, defaultConfigFlags - , HaddockFlags(..), haddockOptions, defaultHaddockFlags - , installDirsOptions, optionDistPref - , programDbPaths', programDbOptions - , Flag(..), toFlag, flagToMaybe, fromFlagOrDefault ) -import Distribution.Simple.InstallDirs - ( InstallDirs(..), defaultInstallDirs - , PathTemplate, toPathTemplate ) -import Distribution.ParseUtils - ( FieldDescr(..), liftField - , ParseResult(..), PError(..), PWarning(..) - , locatedErrorMsg, showPWarning - , readFields, warning, lineNo - , simpleField, listField, spaceListField - , parseFilePathQ, parseOptCommaList, parseTokenQ ) -import Distribution.Client.ParseUtils - ( parseFields, ppFields, ppSection ) -import Distribution.Client.HttpUtils - ( isOldHackageURI ) -import qualified Distribution.ParseUtils as ParseUtils - ( Field(..) ) -import qualified Distribution.Text as Text - ( Text(..), display ) -import Distribution.Simple.Command - ( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..) - , viewAsFieldDescr ) -import Distribution.Simple.Program - ( defaultProgramDb ) -import Distribution.Simple.Utils - ( die', notice, warn, lowercase, cabalVersion ) -import Distribution.Compiler - ( CompilerFlavor(..), defaultCompilerFlavor ) -import Distribution.Verbosity - ( Verbosity, normal ) - -import Distribution.Solver.Types.ConstraintSource - -import Data.List - ( partition, find, foldl', nubBy ) -import Data.Maybe - ( fromMaybe ) -import Control.Monad - ( when, unless, foldM, liftM ) -import qualified Distribution.Compat.ReadP as Parse - ( (<++), option ) -import Distribution.Compat.Semigroup -import qualified Text.PrettyPrint as Disp - ( render, text, empty ) -import Text.PrettyPrint - ( ($+$) ) -import Text.PrettyPrint.HughesPJ - ( text, Doc ) -import System.Directory - ( createDirectoryIfMissing, getAppUserDataDirectory, renameFile ) -import Network.URI - ( URI(..), URIAuth(..), parseURI ) -import System.FilePath - ( (<.>), (), takeDirectory ) -import System.IO.Error - ( isDoesNotExistError ) -import Distribution.Compat.Environment - ( getEnvironment, lookupEnv ) -import Distribution.Compat.Exception - ( catchIO ) -import qualified Paths_cabal_install - ( version ) -import Data.Version - ( showVersion ) -import Data.Char - ( isSpace ) -import qualified Data.Map as M -import Data.Function - ( on ) -import GHC.Generics ( Generic ) - --- --- * Configuration saved in the config file --- - -data SavedConfig = SavedConfig { - savedGlobalFlags :: GlobalFlags, - savedInstallFlags :: InstallFlags, - savedConfigureFlags :: ConfigFlags, - savedConfigureExFlags :: ConfigExFlags, - savedUserInstallDirs :: InstallDirs (Flag PathTemplate), - savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate), - savedUploadFlags :: UploadFlags, - savedReportFlags :: ReportFlags, - savedHaddockFlags :: HaddockFlags - } deriving Generic - -instance Monoid SavedConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup SavedConfig where - a <> b = SavedConfig { - savedGlobalFlags = combinedSavedGlobalFlags, - savedInstallFlags = combinedSavedInstallFlags, - savedConfigureFlags = combinedSavedConfigureFlags, - savedConfigureExFlags = combinedSavedConfigureExFlags, - savedUserInstallDirs = combinedSavedUserInstallDirs, - savedGlobalInstallDirs = combinedSavedGlobalInstallDirs, - savedUploadFlags = combinedSavedUploadFlags, - savedReportFlags = combinedSavedReportFlags, - savedHaddockFlags = combinedSavedHaddockFlags - } - where - -- This is ugly, but necessary. If we're mappending two config files, we - -- want the values of the *non-empty* list fields from the second one to - -- *override* the corresponding values from the first one. Default - -- behaviour (concatenation) is confusing and makes some use cases (see - -- #1884) impossible. - -- - -- However, we also want to allow specifying multiple values for a list - -- field in a *single* config file. For example, we want the following to - -- continue to work: - -- - -- remote-repo: hackage.haskell.org:http://hackage.haskell.org/ - -- remote-repo: private-collection:http://hackage.local/ - -- - -- So we can't just wrap the list fields inside Flags; we have to do some - -- special-casing just for SavedConfig. - - -- NB: the signature prevents us from using 'combine' on lists. - combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a - combine' field subfield = - (subfield . field $ a) `mappend` (subfield . field $ b) - - combineMonoid :: Monoid mon => (SavedConfig -> flags) -> (flags -> mon) - -> mon - combineMonoid field subfield = - (subfield . field $ a) `mappend` (subfield . field $ b) - - lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a] - lastNonEmpty' field subfield = - let a' = subfield . field $ a - b' = subfield . field $ b - in case b' of [] -> a' - _ -> b' - - lastNonMempty' :: (Eq a, Monoid a) => (SavedConfig -> flags) -> (flags -> a) -> a - lastNonMempty' field subfield = - let a' = subfield . field $ a - b' = subfield . field $ b - in if b' == mempty then a' else b' - - lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a) - -> NubList a - lastNonEmptyNL' field subfield = - let a' = subfield . field $ a - b' = subfield . field $ b - in case fromNubList b' of [] -> a' - _ -> b' - - combinedSavedGlobalFlags = GlobalFlags { - globalVersion = combine globalVersion, - globalNumericVersion = combine globalNumericVersion, - globalConfigFile = combine globalConfigFile, - globalSandboxConfigFile = combine globalSandboxConfigFile, - globalConstraintsFile = combine globalConstraintsFile, - globalRemoteRepos = lastNonEmptyNL globalRemoteRepos, - globalCacheDir = combine globalCacheDir, - globalLocalRepos = lastNonEmptyNL globalLocalRepos, - globalLogsDir = combine globalLogsDir, - globalWorldFile = combine globalWorldFile, - globalRequireSandbox = combine globalRequireSandbox, - globalIgnoreSandbox = combine globalIgnoreSandbox, - globalIgnoreExpiry = combine globalIgnoreExpiry, - globalHttpTransport = combine globalHttpTransport, - globalNix = combine globalNix, - globalStoreDir = combine globalStoreDir, - globalProgPathExtra = lastNonEmptyNL globalProgPathExtra - } - where - combine = combine' savedGlobalFlags - lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags - - combinedSavedInstallFlags = InstallFlags { - installDocumentation = combine installDocumentation, - installHaddockIndex = combine installHaddockIndex, - installDryRun = combine installDryRun, - installDest = combine installDest, - installMaxBackjumps = combine installMaxBackjumps, - installReorderGoals = combine installReorderGoals, - installCountConflicts = combine installCountConflicts, - installIndependentGoals = combine installIndependentGoals, - installShadowPkgs = combine installShadowPkgs, - installStrongFlags = combine installStrongFlags, - installAllowBootLibInstalls = combine installAllowBootLibInstalls, - installReinstall = combine installReinstall, - installAvoidReinstalls = combine installAvoidReinstalls, - installOverrideReinstall = combine installOverrideReinstall, - installUpgradeDeps = combine installUpgradeDeps, - installOnly = combine installOnly, - installOnlyDeps = combine installOnlyDeps, - installIndexState = combine installIndexState, - installRootCmd = combine installRootCmd, - installSummaryFile = lastNonEmptyNL installSummaryFile, - installLogFile = combine installLogFile, - installBuildReports = combine installBuildReports, - installReportPlanningFailure = combine installReportPlanningFailure, - installSymlinkBinDir = combine installSymlinkBinDir, - installPerComponent = combine installPerComponent, - installOneShot = combine installOneShot, - installNumJobs = combine installNumJobs, - installKeepGoing = combine installKeepGoing, - installRunTests = combine installRunTests, - installOfflineMode = combine installOfflineMode, - installProjectFileName = combine installProjectFileName - } - where - combine = combine' savedInstallFlags - lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags - - combinedSavedConfigureFlags = ConfigFlags { - configArgs = lastNonEmpty configArgs, - configPrograms_ = configPrograms_ . savedConfigureFlags $ b, - -- TODO: NubListify - configProgramPaths = lastNonEmpty configProgramPaths, - -- TODO: NubListify - configProgramArgs = lastNonEmpty configProgramArgs, - configProgramPathExtra = lastNonEmptyNL configProgramPathExtra, - configInstantiateWith = lastNonEmpty configInstantiateWith, - configHcFlavor = combine configHcFlavor, - configHcPath = combine configHcPath, - configHcPkg = combine configHcPkg, - configVanillaLib = combine configVanillaLib, - configProfLib = combine configProfLib, - configProf = combine configProf, - configSharedLib = combine configSharedLib, - configStaticLib = combine configStaticLib, - configDynExe = combine configDynExe, - configProfExe = combine configProfExe, - configProfDetail = combine configProfDetail, - configProfLibDetail = combine configProfLibDetail, - -- TODO: NubListify - configConfigureArgs = lastNonEmpty configConfigureArgs, - configOptimization = combine configOptimization, - configDebugInfo = combine configDebugInfo, - configProgPrefix = combine configProgPrefix, - configProgSuffix = combine configProgSuffix, - -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. - configInstallDirs = - (configInstallDirs . savedConfigureFlags $ a) - `mappend` (configInstallDirs . savedConfigureFlags $ b), - configScratchDir = combine configScratchDir, - -- TODO: NubListify - configExtraLibDirs = lastNonEmpty configExtraLibDirs, - -- TODO: NubListify - configExtraFrameworkDirs = lastNonEmpty configExtraFrameworkDirs, - -- TODO: NubListify - configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs, - configDeterministic = combine configDeterministic, - configIPID = combine configIPID, - configCID = combine configCID, - configDistPref = combine configDistPref, - configCabalFilePath = combine configCabalFilePath, - configVerbosity = combine configVerbosity, - configUserInstall = combine configUserInstall, - -- TODO: NubListify - configPackageDBs = lastNonEmpty configPackageDBs, - configGHCiLib = combine configGHCiLib, - configSplitSections = combine configSplitSections, - configSplitObjs = combine configSplitObjs, - configStripExes = combine configStripExes, - configStripLibs = combine configStripLibs, - -- TODO: NubListify - configConstraints = lastNonEmpty configConstraints, - -- TODO: NubListify - configDependencies = lastNonEmpty configDependencies, - -- TODO: NubListify - configConfigurationsFlags = lastNonMempty configConfigurationsFlags, - configTests = combine configTests, - configBenchmarks = combine configBenchmarks, - configCoverage = combine configCoverage, - configLibCoverage = combine configLibCoverage, - configExactConfiguration = combine configExactConfiguration, - configFlagError = combine configFlagError, - configRelocatable = combine configRelocatable, - configUseResponseFiles = combine configUseResponseFiles - } - where - combine = combine' savedConfigureFlags - lastNonEmpty = lastNonEmpty' savedConfigureFlags - lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags - lastNonMempty = lastNonMempty' savedConfigureFlags - - combinedSavedConfigureExFlags = ConfigExFlags { - configCabalVersion = combine configCabalVersion, - -- TODO: NubListify - configExConstraints = lastNonEmpty configExConstraints, - -- TODO: NubListify - configPreferences = lastNonEmpty configPreferences, - configSolver = combine configSolver, - configAllowNewer = combineMonoid savedConfigureExFlags configAllowNewer, - configAllowOlder = combineMonoid savedConfigureExFlags configAllowOlder - } - where - combine = combine' savedConfigureExFlags - lastNonEmpty = lastNonEmpty' savedConfigureExFlags - - -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. - combinedSavedUserInstallDirs = savedUserInstallDirs a - `mappend` savedUserInstallDirs b - - -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. - combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a - `mappend` savedGlobalInstallDirs b - - combinedSavedUploadFlags = UploadFlags { - uploadCandidate = combine uploadCandidate, - uploadDoc = combine uploadDoc, - uploadUsername = combine uploadUsername, - uploadPassword = combine uploadPassword, - uploadPasswordCmd = combine uploadPasswordCmd, - uploadVerbosity = combine uploadVerbosity - } - where - combine = combine' savedUploadFlags - - combinedSavedReportFlags = ReportFlags { - reportUsername = combine reportUsername, - reportPassword = combine reportPassword, - reportVerbosity = combine reportVerbosity - } - where - combine = combine' savedReportFlags - - combinedSavedHaddockFlags = HaddockFlags { - -- TODO: NubListify - haddockProgramPaths = lastNonEmpty haddockProgramPaths, - -- TODO: NubListify - haddockProgramArgs = lastNonEmpty haddockProgramArgs, - haddockHoogle = combine haddockHoogle, - haddockHtml = combine haddockHtml, - haddockHtmlLocation = combine haddockHtmlLocation, - haddockForHackage = combine haddockForHackage, - haddockExecutables = combine haddockExecutables, - haddockTestSuites = combine haddockTestSuites, - haddockBenchmarks = combine haddockBenchmarks, - haddockForeignLibs = combine haddockForeignLibs, - haddockInternal = combine haddockInternal, - haddockCss = combine haddockCss, - haddockLinkedSource = combine haddockLinkedSource, - haddockQuickJump = combine haddockQuickJump, - haddockHscolourCss = combine haddockHscolourCss, - haddockContents = combine haddockContents, - haddockDistPref = combine haddockDistPref, - haddockKeepTempFiles = combine haddockKeepTempFiles, - haddockVerbosity = combine haddockVerbosity, - haddockCabalFilePath = combine haddockCabalFilePath, - haddockArgs = lastNonEmpty haddockArgs - } - where - combine = combine' savedHaddockFlags - lastNonEmpty = lastNonEmpty' savedHaddockFlags - - --- --- * Default config --- - --- | These are the absolute basic defaults. The fields that must be --- initialised. When we load the config from the file we layer the loaded --- values over these ones, so any missing fields in the file take their values --- from here. --- -baseSavedConfig :: IO SavedConfig -baseSavedConfig = do - userPrefix <- getCabalDir - cacheDir <- defaultCacheDir - logsDir <- defaultLogsDir - worldFile <- defaultWorldFile - return mempty { - savedConfigureFlags = mempty { - configHcFlavor = toFlag defaultCompiler, - configUserInstall = toFlag defaultUserInstall, - configVerbosity = toFlag normal - }, - savedUserInstallDirs = mempty { - prefix = toFlag (toPathTemplate userPrefix) - }, - savedGlobalFlags = mempty { - globalCacheDir = toFlag cacheDir, - globalLogsDir = toFlag logsDir, - globalWorldFile = toFlag worldFile - } - } - --- | This is the initial configuration that we write out to to the config file --- if the file does not exist (or the config we use if the file cannot be read --- for some other reason). When the config gets loaded it gets layered on top --- of 'baseSavedConfig' so we do not need to include it into the initial --- values we save into the config file. --- -initialSavedConfig :: IO SavedConfig -initialSavedConfig = do - cacheDir <- defaultCacheDir - logsDir <- defaultLogsDir - worldFile <- defaultWorldFile - extraPath <- defaultExtraPath - symlinkPath <- defaultSymlinkPath - return mempty { - savedGlobalFlags = mempty { - globalCacheDir = toFlag cacheDir, - globalRemoteRepos = toNubList [defaultRemoteRepo], - globalWorldFile = toFlag worldFile - }, - savedConfigureFlags = mempty { - configProgramPathExtra = toNubList extraPath - }, - savedInstallFlags = mempty { - installSummaryFile = toNubList [toPathTemplate (logsDir "build.log")], - installBuildReports= toFlag AnonymousReports, - installNumJobs = toFlag Nothing, - installSymlinkBinDir = toFlag symlinkPath - } - } - -defaultCabalDir :: IO FilePath -defaultCabalDir = getAppUserDataDirectory "cabal" - -getCabalDir :: IO FilePath -getCabalDir = do - mDir <- lookupEnv "CABAL_DIR" - case mDir of - Nothing -> defaultCabalDir - Just dir -> return dir - -defaultConfigFile :: IO FilePath -defaultConfigFile = do - dir <- getCabalDir - return $ dir "config" - -defaultCacheDir :: IO FilePath -defaultCacheDir = do - dir <- getCabalDir - return $ dir "packages" - -defaultLogsDir :: IO FilePath -defaultLogsDir = do - dir <- getCabalDir - return $ dir "logs" - --- | Default position of the world file -defaultWorldFile :: IO FilePath -defaultWorldFile = do - dir <- getCabalDir - return $ dir "world" - -defaultExtraPath :: IO [FilePath] -defaultExtraPath = do - dir <- getCabalDir - return [dir "bin"] - -defaultSymlinkPath :: IO FilePath -defaultSymlinkPath = do - dir <- getCabalDir - return (dir "bin") - -defaultCompiler :: CompilerFlavor -defaultCompiler = fromMaybe GHC defaultCompilerFlavor - -defaultUserInstall :: Bool -defaultUserInstall = True --- We do per-user installs by default on all platforms. We used to default to --- global installs on Windows but that no longer works on Windows Vista or 7. - -defaultRemoteRepo :: RemoteRepo -defaultRemoteRepo = RemoteRepo name uri Nothing [] 0 False - where - name = "hackage.haskell.org" - uri = URI "http:" (Just (URIAuth "" name "")) "/" "" "" - -- Note that lots of old ~/.cabal/config files will have the old url - -- http://hackage.haskell.org/packages/archive - -- but new config files can use the new url (without the /packages/archive) - -- and avoid having to do a http redirect - --- For the default repo we know extra information, fill this in. --- --- We need this because the 'defaultRemoteRepo' above is only used for the --- first time when a config file is made. So for users with older config files --- we might have only have older info. This lets us fill that in even for old --- config files. --- -addInfoForKnownRepos :: RemoteRepo -> RemoteRepo -addInfoForKnownRepos repo - | remoteRepoName repo == remoteRepoName defaultRemoteRepo - = useSecure . tryHttps . fixOldURI $ repo - where - fixOldURI r - | isOldHackageURI (remoteRepoURI r) - = r { remoteRepoURI = remoteRepoURI defaultRemoteRepo } - | otherwise = r - - tryHttps r = r { remoteRepoShouldTryHttps = True } - - useSecure r@RemoteRepo{ - remoteRepoSecure = secure, - remoteRepoRootKeys = [], - remoteRepoKeyThreshold = 0 - } | secure /= Just False - = r { - -- Use hackage-security by default unless you opt-out with - -- secure: False - remoteRepoSecure = Just True, - remoteRepoRootKeys = defaultHackageRemoteRepoKeys, - remoteRepoKeyThreshold = defaultHackageRemoteRepoKeyThreshold - } - useSecure r = r -addInfoForKnownRepos other = other - --- | The current hackage.haskell.org repo root keys that we ship with cabal. ---- --- This lets us bootstrap trust in this repo without user intervention. --- These keys need to be periodically updated when new root keys are added. --- See the root key procedures for details. --- -defaultHackageRemoteRepoKeys :: [String] -defaultHackageRemoteRepoKeys = - [ "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0", - "1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42", - "2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3", - "0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d", - "51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921" - ] - --- | The required threshold of root key signatures for hackage.haskell.org --- -defaultHackageRemoteRepoKeyThreshold :: Int -defaultHackageRemoteRepoKeyThreshold = 3 - --- --- * Config file reading --- - --- | Loads the main configuration, and applies additional defaults to give the --- effective configuration. To loads just what is actually in the config file, --- use 'loadRawConfig'. --- -loadConfig :: Verbosity -> Flag FilePath -> IO SavedConfig -loadConfig verbosity configFileFlag = do - config <- loadRawConfig verbosity configFileFlag - extendToEffectiveConfig config - -extendToEffectiveConfig :: SavedConfig -> IO SavedConfig -extendToEffectiveConfig config = do - base <- baseSavedConfig - let effective0 = base `mappend` config - globalFlags0 = savedGlobalFlags effective0 - effective = effective0 { - savedGlobalFlags = globalFlags0 { - globalRemoteRepos = - overNubList (map addInfoForKnownRepos) - (globalRemoteRepos globalFlags0) - } - } - return effective - --- | Like 'loadConfig' but does not apply any additional defaults, it just --- loads what is actually in the config file. This is thus suitable for --- comparing or editing a config file, but not suitable for using as the --- effective configuration. --- -loadRawConfig :: Verbosity -> Flag FilePath -> IO SavedConfig -loadRawConfig verbosity configFileFlag = do - (source, configFile) <- getConfigFilePathAndSource configFileFlag - minp <- readConfigFile mempty configFile - case minp of - Nothing -> do - notice verbosity $ "Config file path source is " ++ sourceMsg source ++ "." - notice verbosity $ "Config file " ++ configFile ++ " not found." - createDefaultConfigFile verbosity [] configFile - Just (ParseOk ws conf) -> do - unless (null ws) $ warn verbosity $ - unlines (map (showPWarning configFile) ws) - return conf - Just (ParseFailed err) -> do - let (line, msg) = locatedErrorMsg err - die' verbosity $ - "Error parsing config file " ++ configFile - ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg - - where - sourceMsg CommandlineOption = "commandline option" - sourceMsg EnvironmentVariable = "env var CABAL_CONFIG" - sourceMsg Default = "default config file" - -data ConfigFileSource = CommandlineOption - | EnvironmentVariable - | Default - --- | Returns the config file path, without checking that the file exists. --- The order of precedence is: input flag, CABAL_CONFIG, default location. -getConfigFilePath :: Flag FilePath -> IO FilePath -getConfigFilePath = fmap snd . getConfigFilePathAndSource - -getConfigFilePathAndSource :: Flag FilePath -> IO (ConfigFileSource, FilePath) -getConfigFilePathAndSource configFileFlag = - getSource sources - where - sources = - [ (CommandlineOption, return . flagToMaybe $ configFileFlag) - , (EnvironmentVariable, lookup "CABAL_CONFIG" `liftM` getEnvironment) - , (Default, Just `liftM` defaultConfigFile) ] - - getSource [] = error "no config file path candidate found." - getSource ((source,action): xs) = - action >>= maybe (getSource xs) (return . (,) source) - -readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig)) -readConfigFile initial file = handleNotExists $ - fmap (Just . parseConfig (ConstraintSourceMainConfig file) initial) - (readFile file) - - where - handleNotExists action = catchIO action $ \ioe -> - if isDoesNotExistError ioe - then return Nothing - else ioError ioe - -createDefaultConfigFile :: Verbosity -> [String] -> FilePath -> IO SavedConfig -createDefaultConfigFile verbosity extraLines filePath = do - commentConf <- commentSavedConfig - initialConf <- initialSavedConfig - extraConf <- parseExtraLines verbosity extraLines - notice verbosity $ "Writing default configuration to " ++ filePath - writeConfigFile filePath commentConf (initialConf `mappend` extraConf) - return initialConf - -writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO () -writeConfigFile file comments vals = do - let tmpFile = file <.> "tmp" - createDirectoryIfMissing True (takeDirectory file) - writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n" - renameFile tmpFile file - where - explanation = unlines - ["-- This is the configuration file for the 'cabal' command line tool." - ,"--" - ,"-- The available configuration options are listed below." - ,"-- Some of them have default values listed." - ,"--" - ,"-- Lines (like this one) beginning with '--' are comments." - ,"-- Be careful with spaces and indentation because they are" - ,"-- used to indicate layout for nested sections." - ,"--" - ,"-- This config file was generated using the following versions" - ,"-- of Cabal and cabal-install:" - ,"-- Cabal library version: " ++ Text.display cabalVersion - ,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version - ,"","" - ] - --- | These are the default values that get used in Cabal if a no value is --- given. We use these here to include in comments when we write out the --- initial config file so that the user can see what default value they are --- overriding. --- -commentSavedConfig :: IO SavedConfig -commentSavedConfig = do - userInstallDirs <- defaultInstallDirs defaultCompiler True True - globalInstallDirs <- defaultInstallDirs defaultCompiler False True - let conf0 = mempty { - savedGlobalFlags = defaultGlobalFlags { - globalRemoteRepos = toNubList [defaultRemoteRepo] - }, - savedInstallFlags = defaultInstallFlags, - savedConfigureExFlags = defaultConfigExFlags { - configAllowNewer = Just (AllowNewer mempty), - configAllowOlder = Just (AllowOlder mempty) - }, - savedConfigureFlags = (defaultConfigFlags defaultProgramDb) { - configUserInstall = toFlag defaultUserInstall - }, - savedUserInstallDirs = fmap toFlag userInstallDirs, - savedGlobalInstallDirs = fmap toFlag globalInstallDirs, - savedUploadFlags = commandDefaultFlags uploadCommand, - savedReportFlags = commandDefaultFlags reportCommand, - savedHaddockFlags = defaultHaddockFlags - - } - conf1 <- extendToEffectiveConfig conf0 - let globalFlagsConf1 = savedGlobalFlags conf1 - conf2 = conf1 { - savedGlobalFlags = globalFlagsConf1 { - globalRemoteRepos = overNubList (map removeRootKeys) - (globalRemoteRepos globalFlagsConf1) - } - } - return conf2 - where - -- Most people don't want to see default root keys, so don't print them. - removeRootKeys :: RemoteRepo -> RemoteRepo - removeRootKeys r = r { remoteRepoRootKeys = [] } - --- | All config file fields. --- -configFieldDescriptions :: ConstraintSource -> [FieldDescr SavedConfig] -configFieldDescriptions src = - - toSavedConfig liftGlobalFlag - (commandOptions (globalCommand []) ParseArgs) - ["version", "numeric-version", "config-file", "sandbox-config-file"] [] - - ++ toSavedConfig liftConfigFlag - (configureOptions ParseArgs) - (["builddir", "constraint", "dependency", "ipid"] - ++ map fieldName installDirsFields) - - -- This is only here because viewAsFieldDescr gives us a parser - -- that only recognises 'ghc' etc, the case-sensitive flag names, not - -- what the normal case-insensitive parser gives us. - [simpleField "compiler" - (fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse) - configHcFlavor (\v flags -> flags { configHcFlavor = v }) - - -- TODO: The following is a temporary fix. The "optimization" - -- and "debug-info" fields are OptArg, and viewAsFieldDescr - -- fails on that. Instead of a hand-written hackaged parser - -- and printer, we should handle this case properly in the - -- library. - ,liftField configOptimization (\v flags -> - flags { configOptimization = v }) $ - let name = "optimization" in - FieldDescr name - (\f -> case f of - Flag NoOptimisation -> Disp.text "False" - Flag NormalOptimisation -> Disp.text "True" - Flag MaximumOptimisation -> Disp.text "2" - _ -> Disp.empty) - (\line str _ -> case () of - _ | str == "False" -> ParseOk [] (Flag NoOptimisation) - | str == "True" -> ParseOk [] (Flag NormalOptimisation) - | str == "0" -> ParseOk [] (Flag NoOptimisation) - | str == "1" -> ParseOk [] (Flag NormalOptimisation) - | str == "2" -> ParseOk [] (Flag MaximumOptimisation) - | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) - | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) - | otherwise -> ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name - ++ "' field is case sensitive, use 'True' or 'False'.") - ,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ - let name = "debug-info" in - FieldDescr name - (\f -> case f of - Flag NoDebugInfo -> Disp.text "False" - Flag MinimalDebugInfo -> Disp.text "1" - Flag NormalDebugInfo -> Disp.text "True" - Flag MaximalDebugInfo -> Disp.text "3" - _ -> Disp.empty) - (\line str _ -> case () of - _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) - | str == "True" -> ParseOk [] (Flag NormalDebugInfo) - | str == "0" -> ParseOk [] (Flag NoDebugInfo) - | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) - | str == "2" -> ParseOk [] (Flag NormalDebugInfo) - | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) - | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) - | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) - | otherwise -> ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name - ++ "' field is case sensitive, use 'True' or 'False'.") - ] - - ++ toSavedConfig liftConfigExFlag - (configureExOptions ParseArgs src) - [] - [let pkgs = (Just . AllowOlder . RelaxDepsSome) `fmap` parseOptCommaList Text.parse - parseAllowOlder = ((Just . AllowOlder . toRelaxDeps) `fmap` Text.parse) Parse.<++ pkgs in - simpleField "allow-older" - (showRelaxDeps . fmap unAllowOlder) parseAllowOlder - configAllowOlder (\v flags -> flags { configAllowOlder = v }) - ,let pkgs = (Just . AllowNewer . RelaxDepsSome) `fmap` parseOptCommaList Text.parse - parseAllowNewer = ((Just . AllowNewer . toRelaxDeps) `fmap` Text.parse) Parse.<++ pkgs in - simpleField "allow-newer" - (showRelaxDeps . fmap unAllowNewer) parseAllowNewer - configAllowNewer (\v flags -> flags { configAllowNewer = v }) - ] - - ++ toSavedConfig liftInstallFlag - (installOptions ParseArgs) - ["dry-run", "only", "only-dependencies", "dependencies-only"] [] - - ++ toSavedConfig liftUploadFlag - (commandOptions uploadCommand ParseArgs) - ["verbose", "check", "documentation", "publish"] [] - - ++ toSavedConfig liftReportFlag - (commandOptions reportCommand ParseArgs) - ["verbose", "username", "password"] [] - --FIXME: this is a hack, hiding the user name and password. - -- But otherwise it masks the upload ones. Either need to - -- share the options or make then distinct. In any case - -- they should probably be per-server. - - ++ [ viewAsFieldDescr - $ optionDistPref - (configDistPref . savedConfigureFlags) - (\distPref config -> - config - { savedConfigureFlags = (savedConfigureFlags config) { - configDistPref = distPref } - , savedHaddockFlags = (savedHaddockFlags config) { - haddockDistPref = distPref } - } - ) - ParseArgs - ] - - where - toSavedConfig lift options exclusions replacements = - [ lift (fromMaybe field replacement) - | opt <- options - , let field = viewAsFieldDescr opt - name = fieldName field - replacement = find ((== name) . fieldName) replacements - , name `notElem` exclusions ] - optional = Parse.option mempty . fmap toFlag - - - showRelaxDeps Nothing = mempty - showRelaxDeps (Just rd) | isRelaxDeps rd = Disp.text "True" - | otherwise = Disp.text "False" - - toRelaxDeps True = RelaxDepsAll - toRelaxDeps False = mempty - - --- TODO: next step, make the deprecated fields elicit a warning. --- -deprecatedFieldDescriptions :: [FieldDescr SavedConfig] -deprecatedFieldDescriptions = - [ liftGlobalFlag $ - listField "repos" - (Disp.text . showRepo) parseRepo - (fromNubList . globalRemoteRepos) - (\rs cfg -> cfg { globalRemoteRepos = toNubList rs }) - , liftGlobalFlag $ - simpleField "cachedir" - (Disp.text . fromFlagOrDefault "") (optional parseFilePathQ) - globalCacheDir (\d cfg -> cfg { globalCacheDir = d }) - , liftUploadFlag $ - simpleField "hackage-username" - (Disp.text . fromFlagOrDefault "" . fmap unUsername) - (optional (fmap Username parseTokenQ)) - uploadUsername (\d cfg -> cfg { uploadUsername = d }) - , liftUploadFlag $ - simpleField "hackage-password" - (Disp.text . fromFlagOrDefault "" . fmap unPassword) - (optional (fmap Password parseTokenQ)) - uploadPassword (\d cfg -> cfg { uploadPassword = d }) - , liftUploadFlag $ - spaceListField "hackage-password-command" - Disp.text parseTokenQ - (fromFlagOrDefault [] . uploadPasswordCmd) - (\d cfg -> cfg { uploadPasswordCmd = Flag d }) - ] - ++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields - ++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields - where - optional = Parse.option mempty . fmap toFlag - modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a - modifyFieldName f d = d { fieldName = f (fieldName d) } - -liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) - -> FieldDescr SavedConfig -liftUserInstallDirs = liftField - savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags }) - -liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) - -> FieldDescr SavedConfig -liftGlobalInstallDirs = liftField - savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags }) - -liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig -liftGlobalFlag = liftField - savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags }) - -liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig -liftConfigFlag = liftField - savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags }) - -liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig -liftConfigExFlag = liftField - savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags }) - -liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig -liftInstallFlag = liftField - savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags }) - -liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig -liftUploadFlag = liftField - savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags }) - -liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig -liftReportFlag = liftField - savedReportFlags (\flags conf -> conf { savedReportFlags = flags }) - -parseConfig :: ConstraintSource - -> SavedConfig - -> String - -> ParseResult SavedConfig -parseConfig src initial = \str -> do - fields <- readFields str - let (knownSections, others) = partition isKnownSection fields - config <- parse others - let user0 = savedUserInstallDirs config - global0 = savedGlobalInstallDirs config - (remoteRepoSections0, haddockFlags, user, global, paths, args) <- - foldM parseSections - ([], savedHaddockFlags config, user0, global0, [], []) - knownSections - - let remoteRepoSections = - reverse - . nubBy ((==) `on` remoteRepoName) - $ remoteRepoSections0 - - return config { - savedGlobalFlags = (savedGlobalFlags config) { - globalRemoteRepos = toNubList remoteRepoSections, - -- the global extra prog path comes from the configure flag prog path - globalProgPathExtra = configProgramPathExtra (savedConfigureFlags config) - }, - savedConfigureFlags = (savedConfigureFlags config) { - configProgramPaths = paths, - configProgramArgs = args - }, - savedHaddockFlags = haddockFlags, - savedUserInstallDirs = user, - savedGlobalInstallDirs = global - } - - where - isKnownSection (ParseUtils.Section _ "repository" _ _) = True - isKnownSection (ParseUtils.F _ "remote-repo" _) = True - isKnownSection (ParseUtils.Section _ "haddock" _ _) = True - isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True - isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True - isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True - isKnownSection _ = False - - parse = parseFields (configFieldDescriptions src - ++ deprecatedFieldDescriptions) initial - - parseSections (rs, h, u, g, p, a) - (ParseUtils.Section _ "repository" name fs) = do - r' <- parseFields remoteRepoFields (emptyRemoteRepo name) fs - when (remoteRepoKeyThreshold r' > length (remoteRepoRootKeys r')) $ - warning $ "'key-threshold' for repository " ++ show (remoteRepoName r') - ++ " higher than number of keys" - when (not (null (remoteRepoRootKeys r')) - && remoteRepoSecure r' /= Just True) $ - warning $ "'root-keys' for repository " ++ show (remoteRepoName r') - ++ " non-empty, but 'secure' not set to True." - return (r':rs, h, u, g, p, a) - - parseSections (rs, h, u, g, p, a) - (ParseUtils.F lno "remote-repo" raw) = do - let mr' = readRepo raw - r' <- maybe (ParseFailed $ NoParse "remote-repo" lno) return mr' - return (r':rs, h, u, g, p, a) - - parseSections accum@(rs, h, u, g, p, a) - (ParseUtils.Section _ "haddock" name fs) - | name == "" = do h' <- parseFields haddockFlagsFields h fs - return (rs, h', u, g, p, a) - | otherwise = do - warning "The 'haddock' section should be unnamed" - return accum - parseSections accum@(rs, h, u, g, p, a) - (ParseUtils.Section _ "install-dirs" name fs) - | name' == "user" = do u' <- parseFields installDirsFields u fs - return (rs, h, u', g, p, a) - | name' == "global" = do g' <- parseFields installDirsFields g fs - return (rs, h, u, g', p, a) - | otherwise = do - warning "The 'install-paths' section should be for 'user' or 'global'" - return accum - where name' = lowercase name - parseSections accum@(rs, h, u, g, p, a) - (ParseUtils.Section _ "program-locations" name fs) - | name == "" = do p' <- parseFields withProgramsFields p fs - return (rs, h, u, g, p', a) - | otherwise = do - warning "The 'program-locations' section should be unnamed" - return accum - parseSections accum@(rs, h, u, g, p, a) - (ParseUtils.Section _ "program-default-options" name fs) - | name == "" = do a' <- parseFields withProgramOptionsFields a fs - return (rs, h, u, g, p, a') - | otherwise = do - warning "The 'program-default-options' section should be unnamed" - return accum - parseSections accum f = do - warning $ "Unrecognized stanza on line " ++ show (lineNo f) - return accum - -showConfig :: SavedConfig -> String -showConfig = showConfigWithComments mempty - -showConfigWithComments :: SavedConfig -> SavedConfig -> String -showConfigWithComments comment vals = Disp.render $ - case fmap (uncurry ppRemoteRepoSection) - (zip (getRemoteRepos comment) (getRemoteRepos vals)) of - [] -> Disp.text "" - (x:xs) -> foldl' (\ r r' -> r $+$ Disp.text "" $+$ r') x xs - $+$ Disp.text "" - $+$ ppFields (skipSomeFields (configFieldDescriptions ConstraintSourceUnknown)) - mcomment vals - $+$ Disp.text "" - $+$ ppSection "haddock" "" haddockFlagsFields - (fmap savedHaddockFlags mcomment) (savedHaddockFlags vals) - $+$ Disp.text "" - $+$ installDirsSection "user" savedUserInstallDirs - $+$ Disp.text "" - $+$ installDirsSection "global" savedGlobalInstallDirs - $+$ Disp.text "" - $+$ configFlagsSection "program-locations" withProgramsFields - configProgramPaths - $+$ Disp.text "" - $+$ configFlagsSection "program-default-options" withProgramOptionsFields - configProgramArgs - where - getRemoteRepos = fromNubList . globalRemoteRepos . savedGlobalFlags - mcomment = Just comment - installDirsSection name field = - ppSection "install-dirs" name installDirsFields - (fmap field mcomment) (field vals) - configFlagsSection name fields field = - ppSection name "" fields - (fmap (field . savedConfigureFlags) mcomment) - ((field . savedConfigureFlags) vals) - - -- skip fields based on field name. currently only skips "remote-repo", - -- because that is rendered as a section. (see 'ppRemoteRepoSection'.) - skipSomeFields = filter ((/= "remote-repo") . fieldName) - --- | Fields for the 'install-dirs' sections. -installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))] -installDirsFields = map viewAsFieldDescr installDirsOptions - -ppRemoteRepoSection :: RemoteRepo -> RemoteRepo -> Doc -ppRemoteRepoSection def vals = ppSection "repository" (remoteRepoName vals) - remoteRepoFields (Just def) vals - -remoteRepoFields :: [FieldDescr RemoteRepo] -remoteRepoFields = - [ simpleField "url" - (text . show) (parseTokenQ >>= parseURI') - remoteRepoURI (\x repo -> repo { remoteRepoURI = x }) - , simpleField "secure" - showSecure (Just `fmap` Text.parse) - remoteRepoSecure (\x repo -> repo { remoteRepoSecure = x }) - , listField "root-keys" - text parseTokenQ - remoteRepoRootKeys (\x repo -> repo { remoteRepoRootKeys = x }) - , simpleField "key-threshold" - showThreshold Text.parse - remoteRepoKeyThreshold (\x repo -> repo { remoteRepoKeyThreshold = x }) - ] - where - parseURI' uriString = - case parseURI uriString of - Nothing -> fail $ "remote-repo: no parse on " ++ show uriString - Just uri -> return uri - - showSecure Nothing = mempty -- default 'secure' setting - showSecure (Just True) = text "True" -- user explicitly enabled it - showSecure (Just False) = text "False" -- user explicitly disabled it - - -- If the key-threshold is set to 0, we omit it as this is the default - -- and it looks odd to have a value for key-threshold but not for 'secure' - -- (note that an empty list of keys is already omitted by default, since - -- that is what we do for all list fields) - showThreshold 0 = mempty - showThreshold t = text (show t) - --- | Fields for the 'haddock' section. -haddockFlagsFields :: [FieldDescr HaddockFlags] -haddockFlagsFields = [ field - | opt <- haddockOptions ParseArgs - , let field = viewAsFieldDescr opt - name = fieldName field - , name `notElem` exclusions ] - where - exclusions = ["verbose", "builddir", "for-hackage"] - --- | Fields for the 'program-locations' section. -withProgramsFields :: [FieldDescr [(String, FilePath)]] -withProgramsFields = - map viewAsFieldDescr $ - programDbPaths' (++ "-location") defaultProgramDb - ParseArgs id (++) - --- | Fields for the 'program-default-options' section. -withProgramOptionsFields :: [FieldDescr [(String, [String])]] -withProgramOptionsFields = - map viewAsFieldDescr $ - programDbOptions defaultProgramDb ParseArgs id (++) - -parseExtraLines :: Verbosity -> [String] -> IO SavedConfig -parseExtraLines verbosity extraLines = - case parseConfig (ConstraintSourceMainConfig "additional lines") - mempty (unlines extraLines) of - ParseFailed err -> - let (line, msg) = locatedErrorMsg err - in die' verbosity $ - "Error parsing additional config lines\n" - ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg - ParseOk [] r -> return r - ParseOk ws _ -> die' verbosity $ - unlines (map (showPWarning "Error parsing additional config lines") ws) - --- | Get the differences (as a pseudo code diff) between the user's --- '~/.cabal/config' and the one that cabal would generate if it didn't exist. -userConfigDiff :: Verbosity -> GlobalFlags -> [String] -> IO [String] -userConfigDiff verbosity globalFlags extraLines = do - userConfig <- loadRawConfig normal (globalConfigFile globalFlags) - extraConfig <- parseExtraLines verbosity extraLines - testConfig <- initialSavedConfig - return $ reverse . foldl' createDiff [] . M.toList - $ M.unionWith combine - (M.fromList . map justFst $ filterShow testConfig) - (M.fromList . map justSnd $ filterShow (userConfig `mappend` extraConfig)) - where - justFst (a, b) = (a, (Just b, Nothing)) - justSnd (a, b) = (a, (Nothing, Just b)) - - combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b) - combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b) - combine x y = error $ "Can't happen : userConfigDiff " - ++ show x ++ " " ++ show y - - createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String] - createDiff acc (key, (Just a, Just b)) - | a == b = acc - | otherwise = ("+ " ++ key ++ ": " ++ b) - : ("- " ++ key ++ ": " ++ a) : acc - createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc - createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc - createDiff acc (_, (Nothing, Nothing)) = acc - - filterShow :: SavedConfig -> [(String, String)] - filterShow cfg = map keyValueSplit - . filter (\s -> not (null s) && ':' `elem` s) - . map nonComment - . lines - $ showConfig cfg - - nonComment [] = [] - nonComment ('-':'-':_) = [] - nonComment (x:xs) = x : nonComment xs - - topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace - - keyValueSplit s = - let (left, right) = break (== ':') s - in (topAndTail left, topAndTail (drop 1 right)) - - --- | Update the user's ~/.cabal/config' keeping the user's customizations. -userConfigUpdate :: Verbosity -> GlobalFlags -> [String] -> IO () -userConfigUpdate verbosity globalFlags extraLines = do - userConfig <- loadRawConfig normal (globalConfigFile globalFlags) - extraConfig <- parseExtraLines verbosity extraLines - newConfig <- initialSavedConfig - commentConf <- commentSavedConfig - cabalFile <- getConfigFilePath $ globalConfigFile globalFlags - let backup = cabalFile ++ ".backup" - notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "." - renameFile cabalFile backup - notice verbosity $ "Writing merged config to " ++ cabalFile ++ "." - writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig `mappend` extraConfig) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Configure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Configure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Configure.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Configure.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,460 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Configure --- Copyright : (c) David Himmelstrup 2005, --- Duncan Coutts 2005 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- High level interface to configuring a package. ------------------------------------------------------------------------------ -module Distribution.Client.Configure ( - configure, - configureSetupScript, - chooseCabalVersion, - checkConfigExFlags, - -- * Saved configure flags - readConfigFlagsFrom, readConfigFlags, - cabalConfigFlagsFile, - writeConfigFlagsTo, writeConfigFlags, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.Dependency -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.SolverInstallPlan (SolverInstallPlan) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import Distribution.Client.Setup - ( ConfigExFlags(..), RepoContext(..) - , configureCommand, configureExCommand, filterConfigureFlags ) -import Distribution.Client.Types as Source -import Distribution.Client.SetupWrapper - ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) -import Distribution.Client.Targets - ( userToPackageConstraint, userConstraintPackageName ) -import Distribution.Client.JobControl (Lock) - -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackageIndex - ( PackageIndex, elemByPackageName ) -import Distribution.Solver.Types.PkgConfigDb - (PkgConfigDb, readPkgConfigDb) -import Distribution.Solver.Types.SourcePackage - -import Distribution.Simple.Compiler - ( Compiler, CompilerInfo, compilerInfo, PackageDB(..), PackageDBStack ) -import Distribution.Simple.Program (ProgramDb) -import Distribution.Client.SavedFlags ( readCommandFlags, writeCommandFlags ) -import Distribution.Simple.Setup - ( ConfigFlags(..) - , fromFlag, toFlag, flagToMaybe, fromFlagOrDefault ) -import Distribution.Simple.PackageIndex - ( InstalledPackageIndex, lookupPackageName ) -import Distribution.Package - ( Package(..), packageName, PackageId ) -import Distribution.Types.Dependency - ( Dependency(..), thisPackageVersion ) -import qualified Distribution.PackageDescription as PkgDesc -import Distribution.PackageDescription.Parsec - ( readGenericPackageDescription ) -import Distribution.PackageDescription.Configuration - ( finalizePD ) -import Distribution.Version - ( Version, mkVersion, anyVersion, thisVersion - , VersionRange, orLaterVersion ) -import Distribution.Simple.Utils as Utils - ( warn, notice, debug, die' - , defaultPackageDesc ) -import Distribution.System - ( Platform ) -import Distribution.Text ( display ) -import Distribution.Verbosity as Verbosity - ( Verbosity ) - -import System.FilePath ( () ) - --- | Choose the Cabal version such that the setup scripts compiled against this --- version will support the given command-line flags. -chooseCabalVersion :: ConfigExFlags -> Maybe Version -> VersionRange -chooseCabalVersion configExFlags maybeVersion = - maybe defaultVersionRange thisVersion maybeVersion - where - -- Cabal < 1.19.2 doesn't support '--exact-configuration' which is needed - -- for '--allow-newer' to work. - allowNewer = isRelaxDeps - (maybe mempty unAllowNewer $ configAllowNewer configExFlags) - allowOlder = isRelaxDeps - (maybe mempty unAllowOlder $ configAllowOlder configExFlags) - - defaultVersionRange = if allowOlder || allowNewer - then orLaterVersion (mkVersion [1,19,2]) - else anyVersion - --- | Configure the package found in the local directory -configure :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramDb - -> ConfigFlags - -> ConfigExFlags - -> [String] - -> IO () -configure verbosity packageDBs repoCtxt comp platform progdb - configFlags configExFlags extraArgs = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb - sourcePkgDb <- getSourcePackages verbosity repoCtxt - pkgConfigDb <- readPkgConfigDb verbosity progdb - - checkConfigExFlags verbosity installedPkgIndex - (packageIndex sourcePkgDb) configExFlags - - progress <- planLocalPackage verbosity comp platform configFlags configExFlags - installedPkgIndex sourcePkgDb pkgConfigDb - - notice verbosity "Resolving dependencies..." - maybePlan <- foldProgress logMsg (return . Left) (return . Right) - progress - case maybePlan of - Left message -> do - warn verbosity $ - "solver failed to find a solution:\n" - ++ message - ++ "\nTrying configure anyway." - setupWrapper verbosity (setupScriptOptions installedPkgIndex Nothing) - Nothing configureCommand (const configFlags) (const extraArgs) - - Right installPlan0 -> - let installPlan = InstallPlan.configureInstallPlan configFlags installPlan0 - in case fst (InstallPlan.ready installPlan) of - [pkg@(ReadyPackage - (ConfiguredPackage _ (SourcePackage _ _ (LocalUnpackedPackage _) _) - _ _ _))] -> do - configurePackage verbosity - platform (compilerInfo comp) - (setupScriptOptions installedPkgIndex (Just pkg)) - configFlags pkg extraArgs - - _ -> die' verbosity $ "internal error: configure install plan should have exactly " - ++ "one local ready package." - - where - setupScriptOptions :: InstalledPackageIndex - -> Maybe ReadyPackage - -> SetupScriptOptions - setupScriptOptions = - configureSetupScript - packageDBs - comp - platform - progdb - (fromFlagOrDefault - (useDistPref defaultSetupScriptOptions) - (configDistPref configFlags)) - (chooseCabalVersion - configExFlags - (flagToMaybe (configCabalVersion configExFlags))) - Nothing - False - - logMsg message rest = debug verbosity message >> rest - -configureSetupScript :: PackageDBStack - -> Compiler - -> Platform - -> ProgramDb - -> FilePath - -> VersionRange - -> Maybe Lock - -> Bool - -> InstalledPackageIndex - -> Maybe ReadyPackage - -> SetupScriptOptions -configureSetupScript packageDBs - comp - platform - progdb - distPref - cabalVersion - lock - forceExternal - index - mpkg - = SetupScriptOptions { - useCabalVersion = cabalVersion - , useCabalSpecVersion = Nothing - , useCompiler = Just comp - , usePlatform = Just platform - , usePackageDB = packageDBs' - , usePackageIndex = index' - , useProgramDb = progdb - , useDistPref = distPref - , useLoggingHandle = Nothing - , useWorkingDir = Nothing - , useExtraPathEnv = [] - , useExtraEnvOverrides = [] - , setupCacheLock = lock - , useWin32CleanHack = False - , forceExternalSetupMethod = forceExternal - -- If we have explicit setup dependencies, list them; otherwise, we give - -- the empty list of dependencies; ideally, we would fix the version of - -- Cabal here, so that we no longer need the special case for that in - -- `compileSetupExecutable` in `externalSetupMethod`, but we don't yet - -- know the version of Cabal at this point, but only find this there. - -- Therefore, for now, we just leave this blank. - , useDependencies = fromMaybe [] explicitSetupDeps - , useDependenciesExclusive = not defaultSetupDeps && isJust explicitSetupDeps - , useVersionMacros = not defaultSetupDeps && isJust explicitSetupDeps - , isInteractive = False - } - where - -- When we are compiling a legacy setup script without an explicit - -- setup stanza, we typically want to allow the UserPackageDB for - -- finding the Cabal lib when compiling any Setup.hs even if we're doing - -- a global install. However we also allow looking in a specific package - -- db. - packageDBs' :: PackageDBStack - index' :: Maybe InstalledPackageIndex - (packageDBs', index') = - case packageDBs of - (GlobalPackageDB:dbs) | UserPackageDB `notElem` dbs - , Nothing <- explicitSetupDeps - -> (GlobalPackageDB:UserPackageDB:dbs, Nothing) - -- but if the user is using an odd db stack, don't touch it - _otherwise -> (packageDBs, Just index) - - maybeSetupBuildInfo :: Maybe PkgDesc.SetupBuildInfo - maybeSetupBuildInfo = do - ReadyPackage cpkg <- mpkg - let gpkg = packageDescription (confPkgSource cpkg) - PkgDesc.setupBuildInfo (PkgDesc.packageDescription gpkg) - - -- Was a default 'custom-setup' stanza added by 'cabal-install' itself? If - -- so, 'setup-depends' must not be exclusive. See #3199. - defaultSetupDeps :: Bool - defaultSetupDeps = maybe False PkgDesc.defaultSetupDepends - maybeSetupBuildInfo - - explicitSetupDeps :: Maybe [(InstalledPackageId, PackageId)] - explicitSetupDeps = do - -- Check if there is an explicit setup stanza. - _buildInfo <- maybeSetupBuildInfo - -- Return the setup dependencies computed by the solver - ReadyPackage cpkg <- mpkg - return [ ( cid, srcid ) - | ConfiguredId srcid (Just PkgDesc.CLibName) cid <- CD.setupDeps (confPkgDeps cpkg) - ] - --- | Warn if any constraints or preferences name packages that are not in the --- source package index or installed package index. -checkConfigExFlags :: Package pkg - => Verbosity - -> InstalledPackageIndex - -> PackageIndex pkg - -> ConfigExFlags - -> IO () -checkConfigExFlags verbosity installedPkgIndex sourcePkgIndex flags = do - unless (null unknownConstraints) $ warn verbosity $ - "Constraint refers to an unknown package: " - ++ showConstraint (head unknownConstraints) - unless (null unknownPreferences) $ warn verbosity $ - "Preference refers to an unknown package: " - ++ display (head unknownPreferences) - where - unknownConstraints = filter (unknown . userConstraintPackageName . fst) $ - configExConstraints flags - unknownPreferences = filter (unknown . \(Dependency name _) -> name) $ - configPreferences flags - unknown pkg = null (lookupPackageName installedPkgIndex pkg) - && not (elemByPackageName sourcePkgIndex pkg) - showConstraint (uc, src) = - display uc ++ " (" ++ showConstraintSource src ++ ")" - --- | Make an 'InstallPlan' for the unpacked package in the current directory, --- and all its dependencies. --- -planLocalPackage :: Verbosity -> Compiler - -> Platform - -> ConfigFlags -> ConfigExFlags - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> IO (Progress String String SolverInstallPlan) -planLocalPackage verbosity comp platform configFlags configExFlags - installedPkgIndex (SourcePackageDb _ packagePrefs) pkgConfigDb = do - pkg <- readGenericPackageDescription verbosity =<< - case flagToMaybe (configCabalFilePath configFlags) of - Nothing -> defaultPackageDesc verbosity - Just fp -> return fp - solver <- chooseSolver verbosity (fromFlag $ configSolver configExFlags) - (compilerInfo comp) - - let -- We create a local package and ask to resolve a dependency on it - localPkg = SourcePackage { - packageInfoId = packageId pkg, - packageDescription = pkg, - packageSource = LocalUnpackedPackage ".", - packageDescrOverride = Nothing - } - - testsEnabled = fromFlagOrDefault False $ configTests configFlags - benchmarksEnabled = - fromFlagOrDefault False $ configBenchmarks configFlags - - resolverParams = - removeLowerBounds - (fromMaybe (AllowOlder mempty) $ configAllowOlder configExFlags) - . removeUpperBounds - (fromMaybe (AllowNewer mempty) $ configAllowNewer configExFlags) - - . addPreferences - -- preferences from the config file or command line - [ PackageVersionPreference name ver - | Dependency name ver <- configPreferences configExFlags ] - - . addConstraints - -- version constraints from the config file or command line - -- TODO: should warn or error on constraints that are not on direct - -- deps or flag constraints not on the package in question. - [ LabeledPackageConstraint (userToPackageConstraint uc) src - | (uc, src) <- configExConstraints configExFlags ] - - . addConstraints - -- package flags from the config file or command line - [ let pc = PackageConstraint - (scopeToplevel $ packageName pkg) - (PackagePropertyFlags $ configConfigurationsFlags configFlags) - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - ] - - . addConstraints - -- '--enable-tests' and '--enable-benchmarks' constraints from - -- the config file or command line - [ let pc = PackageConstraint (scopeToplevel $ packageName pkg) . - PackagePropertyStanzas $ - [ TestStanzas | testsEnabled ] ++ - [ BenchStanzas | benchmarksEnabled ] - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - ] - - -- Don't solve for executables, since we use an empty source - -- package database and executables never show up in the - -- installed package index - . setSolveExecutables (SolveExecutables False) - - . setSolverVerbosity verbosity - - $ standardInstallPolicy - installedPkgIndex - -- NB: We pass in an *empty* source package database, - -- because cabal configure assumes that all dependencies - -- have already been installed - (SourcePackageDb mempty packagePrefs) - [SpecificSourcePackage localPkg] - - return (resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams) - - --- | Call an installer for an 'SourcePackage' but override the configure --- flags with the ones given by the 'ReadyPackage'. In particular the --- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly --- versioned package dependencies. So we ignore any previous partial flag --- assignment or dependency constraints and use the new ones. --- --- NB: when updating this function, don't forget to also update --- 'installReadyPackage' in D.C.Install. -configurePackage :: Verbosity - -> Platform -> CompilerInfo - -> SetupScriptOptions - -> ConfigFlags - -> ReadyPackage - -> [String] - -> IO () -configurePackage verbosity platform comp scriptOptions configFlags - (ReadyPackage (ConfiguredPackage ipid spkg flags stanzas deps)) - extraArgs = - - setupWrapper verbosity - scriptOptions (Just pkg) configureCommand configureFlags (const extraArgs) - - where - gpkg = packageDescription spkg - configureFlags = filterConfigureFlags configFlags { - configIPID = if isJust (flagToMaybe (configIPID configFlags)) - -- Make sure cabal configure --ipid works. - then configIPID configFlags - else toFlag (display ipid), - configConfigurationsFlags = flags, - -- We generate the legacy constraints as well as the new style precise - -- deps. In the end only one set gets passed to Setup.hs configure, - -- depending on the Cabal version we are talking to. - configConstraints = [ thisPackageVersion srcid - | ConfiguredId srcid (Just PkgDesc.CLibName) _uid <- CD.nonSetupDeps deps ], - configDependencies = [ (packageName srcid, uid) - | ConfiguredId srcid (Just PkgDesc.CLibName) uid <- CD.nonSetupDeps deps ], - -- Use '--exact-configuration' if supported. - configExactConfiguration = toFlag True, - configVerbosity = toFlag verbosity, - -- NB: if the user explicitly specified - -- --enable-tests/--enable-benchmarks, always respect it. - -- (But if they didn't, let solver decide.) - configBenchmarks = toFlag (BenchStanzas `elem` stanzas) - `mappend` configBenchmarks configFlags, - configTests = toFlag (TestStanzas `elem` stanzas) - `mappend` configTests configFlags - } - - pkg = case finalizePD flags (enableStanzas stanzas) - (const True) - platform comp [] gpkg of - Left _ -> error "finalizePD ReadyPackage failed" - Right (desc, _) -> desc - --- ----------------------------------------------------------------------------- --- * Saved configure environments and flags --- ----------------------------------------------------------------------------- - --- | Read saved configure flags and restore the saved environment from the --- specified files. -readConfigFlagsFrom :: FilePath -- ^ path to saved flags file - -> IO (ConfigFlags, ConfigExFlags) -readConfigFlagsFrom flags = do - readCommandFlags flags configureExCommand - --- | The path (relative to @--build-dir@) where the arguments to @configure@ --- should be saved. -cabalConfigFlagsFile :: FilePath -> FilePath -cabalConfigFlagsFile dist = dist "cabal-config-flags" - --- | Read saved configure flags and restore the saved environment from the --- usual location. -readConfigFlags :: FilePath -- ^ @--build-dir@ - -> IO (ConfigFlags, ConfigExFlags) -readConfigFlags dist = - readConfigFlagsFrom (cabalConfigFlagsFile dist) - --- | Save the configure flags and environment to the specified files. -writeConfigFlagsTo :: FilePath -- ^ path to saved flags file - -> Verbosity -> (ConfigFlags, ConfigExFlags) - -> IO () -writeConfigFlagsTo file verb flags = do - writeCommandFlags verb file configureExCommand flags - --- | Save the build flags to the usual location. -writeConfigFlags :: Verbosity - -> FilePath -- ^ @--build-dir@ - -> (ConfigFlags, ConfigExFlags) -> IO () -writeConfigFlags verb dist = - writeConfigFlagsTo (cabalConfigFlagsFile dist) verb diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Dependency/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Dependency/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Dependency/Types.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Dependency/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Client.Dependency.Types ( - PreSolver(..), - Solver(..), - - PackagesPreferenceDefault(..), - - ) where - -import Data.Char - ( isAlpha, toLower ) - -import qualified Distribution.Compat.ReadP as Parse - ( pfail, munch1 ) -import Distribution.Text - ( Text(..) ) - -import Text.PrettyPrint - ( text ) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary(..)) - - --- | All the solvers that can be selected. -data PreSolver = AlwaysModular - deriving (Eq, Ord, Show, Bounded, Enum, Generic) - --- | All the solvers that can be used. -data Solver = Modular - deriving (Eq, Ord, Show, Bounded, Enum, Generic) - -instance Binary PreSolver -instance Binary Solver - -instance Text PreSolver where - disp AlwaysModular = text "modular" - parse = do - name <- Parse.munch1 isAlpha - case map toLower name of - "modular" -> return AlwaysModular - _ -> Parse.pfail - --- | Global policy for all packages to say if we prefer package versions that --- are already installed locally or if we just prefer the latest available. --- -data PackagesPreferenceDefault = - - -- | Always prefer the latest version irrespective of any existing - -- installed version. - -- - -- * This is the standard policy for upgrade. - -- - PreferAllLatest - - -- | Always prefer the installed versions over ones that would need to be - -- installed. Secondarily, prefer latest versions (eg the latest installed - -- version or if there are none then the latest source version). - | PreferAllInstalled - - -- | Prefer the latest version for packages that are explicitly requested - -- but prefers the installed version for any other packages. - -- - -- * This is the standard policy for install. - -- - | PreferLatestForSelected - deriving Show diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Dependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Dependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Dependency.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1062 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Dependency --- Copyright : (c) David Himmelstrup 2005, --- Bjorn Bringert 2007 --- Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- Top level interface to dependency resolution. ------------------------------------------------------------------------------ -module Distribution.Client.Dependency ( - -- * The main package dependency resolver - chooseSolver, - resolveDependencies, - Progress(..), - foldProgress, - - -- * Alternate, simple resolver that does not do dependencies recursively - resolveWithoutDependencies, - - -- * Constructing resolver policies - PackageProperty(..), - PackageConstraint(..), - scopeToplevel, - PackagesPreferenceDefault(..), - PackagePreference(..), - - -- ** Standard policy - basicInstallPolicy, - standardInstallPolicy, - PackageSpecifier(..), - - -- ** Sandbox policy - applySandboxInstallPolicy, - - -- ** Extra policy options - upgradeDependencies, - reinstallTargets, - - -- ** Policy utils - addConstraints, - addPreferences, - setPreferenceDefault, - setReorderGoals, - setCountConflicts, - setIndependentGoals, - setAvoidReinstalls, - setShadowPkgs, - setStrongFlags, - setAllowBootLibInstalls, - setMaxBackjumps, - setEnableBackjumping, - setSolveExecutables, - setGoalOrder, - setSolverVerbosity, - removeLowerBounds, - removeUpperBounds, - addDefaultSetupDependencies, - addSetupCabalMinVersionConstraint, - addSetupCabalMaxVersionConstraint, - ) where - -import Distribution.Solver.Modular - ( modularResolver, SolverConfig(..), PruneAfterFirstSuccess(..) ) -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import Distribution.Client.SolverInstallPlan (SolverInstallPlan) -import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan -import Distribution.Client.Types - ( SourcePackageDb(SourcePackageDb) - , PackageSpecifier(..), pkgSpecifierTarget, pkgSpecifierConstraints - , UnresolvedPkgLoc, UnresolvedSourcePackage - , AllowNewer(..), AllowOlder(..), RelaxDeps(..), RelaxedDep(..) - , RelaxDepScope(..), RelaxDepMod(..), RelaxDepSubject(..), isRelaxDeps - ) -import Distribution.Client.Dependency.Types - ( PreSolver(..), Solver(..) - , PackagesPreferenceDefault(..) ) -import Distribution.Client.Sandbox.Types - ( SandboxPackageInfo(..) ) -import Distribution.Package - ( PackageName, mkPackageName, PackageIdentifier(PackageIdentifier), PackageId - , Package(..), packageName, packageVersion ) -import Distribution.Types.Dependency -import qualified Distribution.PackageDescription as PD -import qualified Distribution.PackageDescription.Configuration as PD -import Distribution.PackageDescription.Configuration - ( finalizePD ) -import Distribution.Client.PackageUtils - ( externalBuildDepends ) -import Distribution.Compiler - ( CompilerInfo(..) ) -import Distribution.System - ( Platform ) -import Distribution.Client.Utils - ( duplicatesBy, mergeBy, MergeResult(..) ) -import Distribution.Simple.Utils - ( comparing ) -import Distribution.Simple.Setup - ( asBool ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( normal, Verbosity ) -import Distribution.Version -import qualified Distribution.Compat.Graph as Graph - -import Distribution.Solver.Types.ComponentDeps (ComponentDeps) -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.DependencyResolver -import Distribution.Solver.Types.InstalledPreference -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackageConstraint -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.PackagePreferences -import qualified Distribution.Solver.Types.PackageIndex as PackageIndex -import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb) -import Distribution.Solver.Types.Progress -import Distribution.Solver.Types.ResolverPackage -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.SolverId -import Distribution.Solver.Types.SolverPackage -import Distribution.Solver.Types.SourcePackage -import Distribution.Solver.Types.Variable - -import Data.List - ( foldl', sort, sortBy, nubBy, maximumBy, intercalate, nub ) -import Data.Function (on) -import Data.Maybe (fromMaybe, mapMaybe) -import qualified Data.Map as Map -import qualified Data.Set as Set -import Data.Set (Set) -import Control.Exception - ( assert ) - - --- ------------------------------------------------------------ --- * High level planner policy --- ------------------------------------------------------------ - --- | The set of parameters to the dependency resolver. These parameters are --- relatively low level but many kinds of high level policies can be --- implemented in terms of adjustments to the parameters. --- -data DepResolverParams = DepResolverParams { - depResolverTargets :: Set PackageName, - depResolverConstraints :: [LabeledPackageConstraint], - depResolverPreferences :: [PackagePreference], - depResolverPreferenceDefault :: PackagesPreferenceDefault, - depResolverInstalledPkgIndex :: InstalledPackageIndex, - depResolverSourcePkgIndex :: PackageIndex.PackageIndex UnresolvedSourcePackage, - depResolverReorderGoals :: ReorderGoals, - depResolverCountConflicts :: CountConflicts, - depResolverIndependentGoals :: IndependentGoals, - depResolverAvoidReinstalls :: AvoidReinstalls, - depResolverShadowPkgs :: ShadowPkgs, - depResolverStrongFlags :: StrongFlags, - - -- | Whether to allow base and its dependencies to be installed. - depResolverAllowBootLibInstalls :: AllowBootLibInstalls, - - depResolverMaxBackjumps :: Maybe Int, - depResolverEnableBackjumping :: EnableBackjumping, - -- | Whether or not to solve for dependencies on executables. - -- This should be true, except in the legacy code path where - -- we can't tell if an executable has been installed or not, - -- so we shouldn't solve for them. See #3875. - depResolverSolveExecutables :: SolveExecutables, - - -- | Function to override the solver's goal-ordering heuristics. - depResolverGoalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering), - depResolverVerbosity :: Verbosity - } - -showDepResolverParams :: DepResolverParams -> String -showDepResolverParams p = - "targets: " ++ intercalate ", " (map display $ Set.toList (depResolverTargets p)) - ++ "\nconstraints: " - ++ concatMap (("\n " ++) . showLabeledConstraint) - (depResolverConstraints p) - ++ "\npreferences: " - ++ concatMap (("\n " ++) . showPackagePreference) - (depResolverPreferences p) - ++ "\nstrategy: " ++ show (depResolverPreferenceDefault p) - ++ "\nreorder goals: " ++ show (asBool (depResolverReorderGoals p)) - ++ "\ncount conflicts: " ++ show (asBool (depResolverCountConflicts p)) - ++ "\nindependent goals: " ++ show (asBool (depResolverIndependentGoals p)) - ++ "\navoid reinstalls: " ++ show (asBool (depResolverAvoidReinstalls p)) - ++ "\nshadow packages: " ++ show (asBool (depResolverShadowPkgs p)) - ++ "\nstrong flags: " ++ show (asBool (depResolverStrongFlags p)) - ++ "\nallow boot library installs: " ++ show (asBool (depResolverAllowBootLibInstalls p)) - ++ "\nmax backjumps: " ++ maybe "infinite" show - (depResolverMaxBackjumps p) - where - showLabeledConstraint :: LabeledPackageConstraint -> String - showLabeledConstraint (LabeledPackageConstraint pc src) = - showPackageConstraint pc ++ " (" ++ showConstraintSource src ++ ")" - --- | A package selection preference for a particular package. --- --- Preferences are soft constraints that the dependency resolver should try to --- respect where possible. It is not specified if preferences on some packages --- are more important than others. --- -data PackagePreference = - - -- | A suggested constraint on the version number. - PackageVersionPreference PackageName VersionRange - - -- | If we prefer versions of packages that are already installed. - | PackageInstalledPreference PackageName InstalledPreference - - -- | If we would prefer to enable these optional stanzas - -- (i.e. test suites and/or benchmarks) - | PackageStanzasPreference PackageName [OptionalStanza] - - --- | Provide a textual representation of a package preference --- for debugging purposes. --- -showPackagePreference :: PackagePreference -> String -showPackagePreference (PackageVersionPreference pn vr) = - display pn ++ " " ++ display (simplifyVersionRange vr) -showPackagePreference (PackageInstalledPreference pn ip) = - display pn ++ " " ++ show ip -showPackagePreference (PackageStanzasPreference pn st) = - display pn ++ " " ++ show st - -basicDepResolverParams :: InstalledPackageIndex - -> PackageIndex.PackageIndex UnresolvedSourcePackage - -> DepResolverParams -basicDepResolverParams installedPkgIndex sourcePkgIndex = - DepResolverParams { - depResolverTargets = Set.empty, - depResolverConstraints = [], - depResolverPreferences = [], - depResolverPreferenceDefault = PreferLatestForSelected, - depResolverInstalledPkgIndex = installedPkgIndex, - depResolverSourcePkgIndex = sourcePkgIndex, - depResolverReorderGoals = ReorderGoals False, - depResolverCountConflicts = CountConflicts True, - depResolverIndependentGoals = IndependentGoals False, - depResolverAvoidReinstalls = AvoidReinstalls False, - depResolverShadowPkgs = ShadowPkgs False, - depResolverStrongFlags = StrongFlags False, - depResolverAllowBootLibInstalls = AllowBootLibInstalls False, - depResolverMaxBackjumps = Nothing, - depResolverEnableBackjumping = EnableBackjumping True, - depResolverSolveExecutables = SolveExecutables True, - depResolverGoalOrder = Nothing, - depResolverVerbosity = normal - } - -addTargets :: [PackageName] - -> DepResolverParams -> DepResolverParams -addTargets extraTargets params = - params { - depResolverTargets = Set.fromList extraTargets `Set.union` depResolverTargets params - } - -addConstraints :: [LabeledPackageConstraint] - -> DepResolverParams -> DepResolverParams -addConstraints extraConstraints params = - params { - depResolverConstraints = extraConstraints - ++ depResolverConstraints params - } - -addPreferences :: [PackagePreference] - -> DepResolverParams -> DepResolverParams -addPreferences extraPreferences params = - params { - depResolverPreferences = extraPreferences - ++ depResolverPreferences params - } - -setPreferenceDefault :: PackagesPreferenceDefault - -> DepResolverParams -> DepResolverParams -setPreferenceDefault preferenceDefault params = - params { - depResolverPreferenceDefault = preferenceDefault - } - -setReorderGoals :: ReorderGoals -> DepResolverParams -> DepResolverParams -setReorderGoals reorder params = - params { - depResolverReorderGoals = reorder - } - -setCountConflicts :: CountConflicts -> DepResolverParams -> DepResolverParams -setCountConflicts count params = - params { - depResolverCountConflicts = count - } - -setIndependentGoals :: IndependentGoals -> DepResolverParams -> DepResolverParams -setIndependentGoals indep params = - params { - depResolverIndependentGoals = indep - } - -setAvoidReinstalls :: AvoidReinstalls -> DepResolverParams -> DepResolverParams -setAvoidReinstalls avoid params = - params { - depResolverAvoidReinstalls = avoid - } - -setShadowPkgs :: ShadowPkgs -> DepResolverParams -> DepResolverParams -setShadowPkgs shadow params = - params { - depResolverShadowPkgs = shadow - } - -setStrongFlags :: StrongFlags -> DepResolverParams -> DepResolverParams -setStrongFlags sf params = - params { - depResolverStrongFlags = sf - } - -setAllowBootLibInstalls :: AllowBootLibInstalls -> DepResolverParams -> DepResolverParams -setAllowBootLibInstalls i params = - params { - depResolverAllowBootLibInstalls = i - } - -setMaxBackjumps :: Maybe Int -> DepResolverParams -> DepResolverParams -setMaxBackjumps n params = - params { - depResolverMaxBackjumps = n - } - -setEnableBackjumping :: EnableBackjumping -> DepResolverParams -> DepResolverParams -setEnableBackjumping b params = - params { - depResolverEnableBackjumping = b - } - -setSolveExecutables :: SolveExecutables -> DepResolverParams -> DepResolverParams -setSolveExecutables b params = - params { - depResolverSolveExecutables = b - } - -setGoalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering) - -> DepResolverParams - -> DepResolverParams -setGoalOrder order params = - params { - depResolverGoalOrder = order - } - -setSolverVerbosity :: Verbosity -> DepResolverParams -> DepResolverParams -setSolverVerbosity verbosity params = - params { - depResolverVerbosity = verbosity - } - --- | Some packages are specific to a given compiler version and should never be --- upgraded. -dontUpgradeNonUpgradeablePackages :: DepResolverParams -> DepResolverParams -dontUpgradeNonUpgradeablePackages params = - addConstraints extraConstraints params - where - extraConstraints = - [ LabeledPackageConstraint - (PackageConstraint (ScopeAnyQualifier pkgname) PackagePropertyInstalled) - ConstraintSourceNonUpgradeablePackage - | Set.notMember (mkPackageName "base") (depResolverTargets params) - -- If you change this enumeration, make sure to update the list in - -- "Distribution.Solver.Modular.Solver" as well - , pkgname <- [ mkPackageName "base" - , mkPackageName "ghc-prim" - , mkPackageName "integer-gmp" - , mkPackageName "integer-simple" - , mkPackageName "template-haskell" - ] - , isInstalled pkgname ] - - isInstalled = not . null - . InstalledPackageIndex.lookupPackageName - (depResolverInstalledPkgIndex params) - -addSourcePackages :: [UnresolvedSourcePackage] - -> DepResolverParams -> DepResolverParams -addSourcePackages pkgs params = - params { - depResolverSourcePkgIndex = - foldl (flip PackageIndex.insert) - (depResolverSourcePkgIndex params) pkgs - } - -hideInstalledPackagesSpecificBySourcePackageId :: [PackageId] - -> DepResolverParams - -> DepResolverParams -hideInstalledPackagesSpecificBySourcePackageId pkgids params = - --TODO: this should work using exclude constraints instead - params { - depResolverInstalledPkgIndex = - foldl' (flip InstalledPackageIndex.deleteSourcePackageId) - (depResolverInstalledPkgIndex params) pkgids - } - -hideInstalledPackagesAllVersions :: [PackageName] - -> DepResolverParams -> DepResolverParams -hideInstalledPackagesAllVersions pkgnames params = - --TODO: this should work using exclude constraints instead - params { - depResolverInstalledPkgIndex = - foldl' (flip InstalledPackageIndex.deletePackageName) - (depResolverInstalledPkgIndex params) pkgnames - } - - --- | Remove upper bounds in dependencies using the policy specified by the --- 'AllowNewer' argument (all/some/none). --- --- Note: It's important to apply 'removeUpperBounds' after --- 'addSourcePackages'. Otherwise, the packages inserted by --- 'addSourcePackages' won't have upper bounds in dependencies relaxed. --- -removeUpperBounds :: AllowNewer -> DepResolverParams -> DepResolverParams -removeUpperBounds (AllowNewer relDeps) = removeBounds RelaxUpper relDeps - --- | Dual of 'removeUpperBounds' -removeLowerBounds :: AllowOlder -> DepResolverParams -> DepResolverParams -removeLowerBounds (AllowOlder relDeps) = removeBounds RelaxLower relDeps - -data RelaxKind = RelaxLower | RelaxUpper - --- | Common internal implementation of 'removeLowerBounds'/'removeUpperBounds' -removeBounds :: RelaxKind -> RelaxDeps -> DepResolverParams -> DepResolverParams -removeBounds _ rd params | not (isRelaxDeps rd) = params -- no-op optimisation -removeBounds relKind relDeps params = - params { - depResolverSourcePkgIndex = sourcePkgIndex' - } - where - sourcePkgIndex' = fmap relaxDeps $ depResolverSourcePkgIndex params - - relaxDeps :: UnresolvedSourcePackage -> UnresolvedSourcePackage - relaxDeps srcPkg = srcPkg { - packageDescription = relaxPackageDeps relKind relDeps - (packageDescription srcPkg) - } - --- | Relax the dependencies of this package if needed. --- --- Helper function used by 'removeBounds' -relaxPackageDeps :: RelaxKind - -> RelaxDeps - -> PD.GenericPackageDescription -> PD.GenericPackageDescription -relaxPackageDeps _ rd gpd | not (isRelaxDeps rd) = gpd -- subsumed by no-op case in 'removeBounds' -relaxPackageDeps relKind RelaxDepsAll gpd = PD.transformAllBuildDepends relaxAll gpd - where - relaxAll :: Dependency -> Dependency - relaxAll (Dependency pkgName verRange) = - Dependency pkgName (removeBound relKind RelaxDepModNone verRange) - -relaxPackageDeps relKind (RelaxDepsSome depsToRelax0) gpd = - PD.transformAllBuildDepends relaxSome gpd - where - thisPkgName = packageName gpd - thisPkgId = packageId gpd - depsToRelax = Map.fromList $ mapMaybe f depsToRelax0 - - f :: RelaxedDep -> Maybe (RelaxDepSubject,RelaxDepMod) - f (RelaxedDep scope rdm p) = case scope of - RelaxDepScopeAll -> Just (p,rdm) - RelaxDepScopePackage p0 - | p0 == thisPkgName -> Just (p,rdm) - | otherwise -> Nothing - RelaxDepScopePackageId p0 - | p0 == thisPkgId -> Just (p,rdm) - | otherwise -> Nothing - - relaxSome :: Dependency -> Dependency - relaxSome d@(Dependency depName verRange) - | Just relMod <- Map.lookup RelaxDepSubjectAll depsToRelax = - -- a '*'-subject acts absorbing, for consistency with - -- the 'Semigroup RelaxDeps' instance - Dependency depName (removeBound relKind relMod verRange) - | Just relMod <- Map.lookup (RelaxDepSubjectPkg depName) depsToRelax = - Dependency depName (removeBound relKind relMod verRange) - | otherwise = d -- no-op - --- | Internal helper for 'relaxPackageDeps' -removeBound :: RelaxKind -> RelaxDepMod -> VersionRange -> VersionRange -removeBound RelaxLower RelaxDepModNone = removeLowerBound -removeBound RelaxUpper RelaxDepModNone = removeUpperBound -removeBound relKind RelaxDepModCaret = hyloVersionRange embed projectVersionRange - where - embed (MajorBoundVersionF v) = caretTransformation v (majorUpperBound v) - embed vr = embedVersionRange vr - - -- This function is the interesting part as it defines the meaning - -- of 'RelaxDepModCaret', i.e. to transform only @^>=@ constraints; - caretTransformation l u = case relKind of - RelaxUpper -> orLaterVersion l -- rewrite @^>= x.y.z@ into @>= x.y.z@ - RelaxLower -> earlierVersion u -- rewrite @^>= x.y.z@ into @< x.(y+1)@ - --- | Supply defaults for packages without explicit Setup dependencies --- --- Note: It's important to apply 'addDefaultSetupDepends' after --- 'addSourcePackages'. Otherwise, the packages inserted by --- 'addSourcePackages' won't have upper bounds in dependencies relaxed. --- -addDefaultSetupDependencies :: (UnresolvedSourcePackage -> Maybe [Dependency]) - -> DepResolverParams -> DepResolverParams -addDefaultSetupDependencies defaultSetupDeps params = - params { - depResolverSourcePkgIndex = - fmap applyDefaultSetupDeps (depResolverSourcePkgIndex params) - } - where - applyDefaultSetupDeps :: UnresolvedSourcePackage -> UnresolvedSourcePackage - applyDefaultSetupDeps srcpkg = - srcpkg { - packageDescription = gpkgdesc { - PD.packageDescription = pkgdesc { - PD.setupBuildInfo = - case PD.setupBuildInfo pkgdesc of - Just sbi -> Just sbi - Nothing -> case defaultSetupDeps srcpkg of - Nothing -> Nothing - Just deps | isCustom -> Just PD.SetupBuildInfo { - PD.defaultSetupDepends = True, - PD.setupDepends = deps - } - | otherwise -> Nothing - } - } - } - where - isCustom = PD.buildType pkgdesc == PD.Custom - gpkgdesc = packageDescription srcpkg - pkgdesc = PD.packageDescription gpkgdesc - --- | If a package has a custom setup then we need to add a setup-depends --- on Cabal. --- -addSetupCabalMinVersionConstraint :: Version - -> DepResolverParams -> DepResolverParams -addSetupCabalMinVersionConstraint minVersion = - addConstraints - [ LabeledPackageConstraint - (PackageConstraint (ScopeAnySetupQualifier cabalPkgname) - (PackagePropertyVersion $ orLaterVersion minVersion)) - ConstraintSetupCabalMinVersion - ] - where - cabalPkgname = mkPackageName "Cabal" - --- | Variant of 'addSetupCabalMinVersionConstraint' which sets an --- upper bound on @setup.Cabal@ labeled with 'ConstraintSetupCabalMaxVersion'. --- -addSetupCabalMaxVersionConstraint :: Version - -> DepResolverParams -> DepResolverParams -addSetupCabalMaxVersionConstraint maxVersion = - addConstraints - [ LabeledPackageConstraint - (PackageConstraint (ScopeAnySetupQualifier cabalPkgname) - (PackagePropertyVersion $ earlierVersion maxVersion)) - ConstraintSetupCabalMaxVersion - ] - where - cabalPkgname = mkPackageName "Cabal" - - -upgradeDependencies :: DepResolverParams -> DepResolverParams -upgradeDependencies = setPreferenceDefault PreferAllLatest - - -reinstallTargets :: DepResolverParams -> DepResolverParams -reinstallTargets params = - hideInstalledPackagesAllVersions (Set.toList $ depResolverTargets params) params - - --- | A basic solver policy on which all others are built. --- -basicInstallPolicy :: InstalledPackageIndex - -> SourcePackageDb - -> [PackageSpecifier UnresolvedSourcePackage] - -> DepResolverParams -basicInstallPolicy - installedPkgIndex (SourcePackageDb sourcePkgIndex sourcePkgPrefs) - pkgSpecifiers - - = addPreferences - [ PackageVersionPreference name ver - | (name, ver) <- Map.toList sourcePkgPrefs ] - - . addConstraints - (concatMap pkgSpecifierConstraints pkgSpecifiers) - - . addTargets - (map pkgSpecifierTarget pkgSpecifiers) - - . hideInstalledPackagesSpecificBySourcePackageId - [ packageId pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] - - . addSourcePackages - [ pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] - - $ basicDepResolverParams - installedPkgIndex sourcePkgIndex - - --- | The policy used by all the standard commands, install, fetch, freeze etc --- (but not the new-build and related commands). --- --- It extends the 'basicInstallPolicy' with a policy on setup deps. --- -standardInstallPolicy :: InstalledPackageIndex - -> SourcePackageDb - -> [PackageSpecifier UnresolvedSourcePackage] - -> DepResolverParams -standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers - - = addDefaultSetupDependencies mkDefaultSetupDeps - - $ basicInstallPolicy - installedPkgIndex sourcePkgDb pkgSpecifiers - - where - -- Force Cabal >= 1.24 dep when the package is affected by #3199. - mkDefaultSetupDeps :: UnresolvedSourcePackage -> Maybe [Dependency] - mkDefaultSetupDeps srcpkg | affected = - Just [Dependency (mkPackageName "Cabal") - (orLaterVersion $ mkVersion [1,24])] - | otherwise = Nothing - where - gpkgdesc = packageDescription srcpkg - pkgdesc = PD.packageDescription gpkgdesc - bt = PD.buildType pkgdesc - affected = bt == PD.Custom && hasBuildableFalse gpkgdesc - - -- Does this package contain any components with non-empty 'build-depends' - -- and a 'buildable' field that could potentially be set to 'False'? False - -- positives are possible. - hasBuildableFalse :: PD.GenericPackageDescription -> Bool - hasBuildableFalse gpkg = - not (all alwaysTrue (zipWith PD.cOr buildableConditions noDepConditions)) - where - buildableConditions = PD.extractConditions PD.buildable gpkg - noDepConditions = PD.extractConditions - (null . PD.targetBuildDepends) gpkg - alwaysTrue (PD.Lit True) = True - alwaysTrue _ = False - - -applySandboxInstallPolicy :: SandboxPackageInfo - -> DepResolverParams - -> DepResolverParams -applySandboxInstallPolicy - (SandboxPackageInfo modifiedDeps otherDeps allSandboxPkgs _allDeps) - params - - = addPreferences [ PackageInstalledPreference n PreferInstalled - | n <- installedNotModified ] - - . addTargets installedNotModified - - . addPreferences - [ PackageVersionPreference (packageName pkg) - (thisVersion (packageVersion pkg)) | pkg <- otherDeps ] - - . addConstraints - [ let pc = PackageConstraint - (scopeToplevel $ packageName pkg) - (PackagePropertyVersion $ thisVersion (packageVersion pkg)) - in LabeledPackageConstraint pc ConstraintSourceModifiedAddSourceDep - | pkg <- modifiedDeps ] - - . addTargets [ packageName pkg | pkg <- modifiedDeps ] - - . hideInstalledPackagesSpecificBySourcePackageId - [ packageId pkg | pkg <- modifiedDeps ] - - -- We don't need to add source packages for add-source deps to the - -- 'installedPkgIndex' since 'getSourcePackages' did that for us. - - $ params - - where - installedPkgIds = - map fst . InstalledPackageIndex.allPackagesBySourcePackageId - $ allSandboxPkgs - modifiedPkgIds = map packageId modifiedDeps - installedNotModified = [ packageName pkg | pkg <- installedPkgIds, - pkg `notElem` modifiedPkgIds ] - --- ------------------------------------------------------------ --- * Interface to the standard resolver --- ------------------------------------------------------------ - -chooseSolver :: Verbosity -> PreSolver -> CompilerInfo -> IO Solver -chooseSolver _verbosity preSolver _cinfo = - case preSolver of - AlwaysModular -> do - return Modular - -runSolver :: Solver -> SolverConfig -> DependencyResolver UnresolvedPkgLoc -runSolver Modular = modularResolver - --- | Run the dependency solver. --- --- Since this is potentially an expensive operation, the result is wrapped in a --- a 'Progress' structure that can be unfolded to provide progress information, --- logging messages and the final result or an error. --- -resolveDependencies :: Platform - -> CompilerInfo - -> PkgConfigDb - -> Solver - -> DepResolverParams - -> Progress String String SolverInstallPlan - - --TODO: is this needed here? see dontUpgradeNonUpgradeablePackages -resolveDependencies platform comp _pkgConfigDB _solver params - | Set.null (depResolverTargets params) - = return (validateSolverResult platform comp indGoals []) - where - indGoals = depResolverIndependentGoals params - -resolveDependencies platform comp pkgConfigDB solver params = - - Step (showDepResolverParams finalparams) - $ fmap (validateSolverResult platform comp indGoals) - $ runSolver solver (SolverConfig reordGoals cntConflicts - indGoals noReinstalls - shadowing strFlags allowBootLibs maxBkjumps enableBj - solveExes order verbosity (PruneAfterFirstSuccess False)) - platform comp installedPkgIndex sourcePkgIndex - pkgConfigDB preferences constraints targets - where - - finalparams @ (DepResolverParams - targets constraints - prefs defpref - installedPkgIndex - sourcePkgIndex - reordGoals - cntConflicts - indGoals - noReinstalls - shadowing - strFlags - allowBootLibs - maxBkjumps - enableBj - solveExes - order - verbosity) = - if asBool (depResolverAllowBootLibInstalls params) - then params - else dontUpgradeNonUpgradeablePackages params - - preferences = interpretPackagesPreference targets defpref prefs - - --- | Give an interpretation to the global 'PackagesPreference' as --- specific per-package 'PackageVersionPreference'. --- -interpretPackagesPreference :: Set PackageName - -> PackagesPreferenceDefault - -> [PackagePreference] - -> (PackageName -> PackagePreferences) -interpretPackagesPreference selected defaultPref prefs = - \pkgname -> PackagePreferences (versionPref pkgname) - (installPref pkgname) - (stanzasPref pkgname) - where - versionPref pkgname = - fromMaybe [anyVersion] (Map.lookup pkgname versionPrefs) - versionPrefs = Map.fromListWith (++) - [(pkgname, [pref]) - | PackageVersionPreference pkgname pref <- prefs] - - installPref pkgname = - fromMaybe (installPrefDefault pkgname) (Map.lookup pkgname installPrefs) - installPrefs = Map.fromList - [ (pkgname, pref) - | PackageInstalledPreference pkgname pref <- prefs ] - installPrefDefault = case defaultPref of - PreferAllLatest -> const PreferLatest - PreferAllInstalled -> const PreferInstalled - PreferLatestForSelected -> \pkgname -> - -- When you say cabal install foo, what you really mean is, prefer the - -- latest version of foo, but the installed version of everything else - if pkgname `Set.member` selected then PreferLatest - else PreferInstalled - - stanzasPref pkgname = - fromMaybe [] (Map.lookup pkgname stanzasPrefs) - stanzasPrefs = Map.fromListWith (\a b -> nub (a ++ b)) - [ (pkgname, pref) - | PackageStanzasPreference pkgname pref <- prefs ] - - --- ------------------------------------------------------------ --- * Checking the result of the solver --- ------------------------------------------------------------ - --- | Make an install plan from the output of the dep resolver. --- It checks that the plan is valid, or it's an error in the dep resolver. --- -validateSolverResult :: Platform - -> CompilerInfo - -> IndependentGoals - -> [ResolverPackage UnresolvedPkgLoc] - -> SolverInstallPlan -validateSolverResult platform comp indepGoals pkgs = - case planPackagesProblems platform comp pkgs of - [] -> case SolverInstallPlan.new indepGoals graph of - Right plan -> plan - Left problems -> error (formatPlanProblems problems) - problems -> error (formatPkgProblems problems) - - where - graph = Graph.fromDistinctList pkgs - - formatPkgProblems = formatProblemMessage . map showPlanPackageProblem - formatPlanProblems = formatProblemMessage . map SolverInstallPlan.showPlanProblem - - formatProblemMessage problems = - unlines $ - "internal error: could not construct a valid install plan." - : "The proposed (invalid) plan contained the following problems:" - : problems - ++ "Proposed plan:" - : [SolverInstallPlan.showPlanIndex pkgs] - - -data PlanPackageProblem = - InvalidConfiguredPackage (SolverPackage UnresolvedPkgLoc) - [PackageProblem] - | DuplicatePackageSolverId SolverId [ResolverPackage UnresolvedPkgLoc] - -showPlanPackageProblem :: PlanPackageProblem -> String -showPlanPackageProblem (InvalidConfiguredPackage pkg packageProblems) = - "Package " ++ display (packageId pkg) - ++ " has an invalid configuration, in particular:\n" - ++ unlines [ " " ++ showPackageProblem problem - | problem <- packageProblems ] -showPlanPackageProblem (DuplicatePackageSolverId pid dups) = - "Package " ++ display (packageId pid) ++ " has " - ++ show (length dups) ++ " duplicate instances." - -planPackagesProblems :: Platform -> CompilerInfo - -> [ResolverPackage UnresolvedPkgLoc] - -> [PlanPackageProblem] -planPackagesProblems platform cinfo pkgs = - [ InvalidConfiguredPackage pkg packageProblems - | Configured pkg <- pkgs - , let packageProblems = configuredPackageProblems platform cinfo pkg - , not (null packageProblems) ] - ++ [ DuplicatePackageSolverId (Graph.nodeKey (head dups)) dups - | dups <- duplicatesBy (comparing Graph.nodeKey) pkgs ] - -data PackageProblem = DuplicateFlag PD.FlagName - | MissingFlag PD.FlagName - | ExtraFlag PD.FlagName - | DuplicateDeps [PackageId] - | MissingDep Dependency - | ExtraDep PackageId - | InvalidDep Dependency PackageId - -showPackageProblem :: PackageProblem -> String -showPackageProblem (DuplicateFlag flag) = - "duplicate flag in the flag assignment: " ++ PD.unFlagName flag - -showPackageProblem (MissingFlag flag) = - "missing an assignment for the flag: " ++ PD.unFlagName flag - -showPackageProblem (ExtraFlag flag) = - "extra flag given that is not used by the package: " ++ PD.unFlagName flag - -showPackageProblem (DuplicateDeps pkgids) = - "duplicate packages specified as selected dependencies: " - ++ intercalate ", " (map display pkgids) - -showPackageProblem (MissingDep dep) = - "the package has a dependency " ++ display dep - ++ " but no package has been selected to satisfy it." - -showPackageProblem (ExtraDep pkgid) = - "the package configuration specifies " ++ display pkgid - ++ " but (with the given flag assignment) the package does not actually" - ++ " depend on any version of that package." - -showPackageProblem (InvalidDep dep pkgid) = - "the package depends on " ++ display dep - ++ " but the configuration specifies " ++ display pkgid - ++ " which does not satisfy the dependency." - --- | A 'ConfiguredPackage' is valid if the flag assignment is total and if --- in the configuration given by the flag assignment, all the package --- dependencies are satisfied by the specified packages. --- -configuredPackageProblems :: Platform -> CompilerInfo - -> SolverPackage UnresolvedPkgLoc -> [PackageProblem] -configuredPackageProblems platform cinfo - (SolverPackage pkg specifiedFlags stanzas specifiedDeps' _specifiedExeDeps') = - [ DuplicateFlag flag - | flag <- PD.findDuplicateFlagAssignments specifiedFlags ] - ++ [ MissingFlag flag | OnlyInLeft flag <- mergedFlags ] - ++ [ ExtraFlag flag | OnlyInRight flag <- mergedFlags ] - ++ [ DuplicateDeps pkgs - | pkgs <- CD.nonSetupDeps (fmap (duplicatesBy (comparing packageName)) - specifiedDeps) ] - ++ [ MissingDep dep | OnlyInLeft dep <- mergedDeps ] - ++ [ ExtraDep pkgid | OnlyInRight pkgid <- mergedDeps ] - ++ [ InvalidDep dep pkgid | InBoth dep pkgid <- mergedDeps - , not (packageSatisfiesDependency pkgid dep) ] - -- TODO: sanity tests on executable deps - where - specifiedDeps :: ComponentDeps [PackageId] - specifiedDeps = fmap (map solverSrcId) specifiedDeps' - - mergedFlags = mergeBy compare - (sort $ map PD.flagName (PD.genPackageFlags (packageDescription pkg))) - (sort $ map fst (PD.unFlagAssignment specifiedFlags)) -- TODO - - packageSatisfiesDependency - (PackageIdentifier name version) - (Dependency name' versionRange) = assert (name == name') $ - version `withinRange` versionRange - - dependencyName (Dependency name _) = name - - mergedDeps :: [MergeResult Dependency PackageId] - mergedDeps = mergeDeps requiredDeps (CD.flatDeps specifiedDeps) - - mergeDeps :: [Dependency] -> [PackageId] - -> [MergeResult Dependency PackageId] - mergeDeps required specified = - let sortNubOn f = nubBy ((==) `on` f) . sortBy (compare `on` f) in - mergeBy - (\dep pkgid -> dependencyName dep `compare` packageName pkgid) - (sortNubOn dependencyName required) - (sortNubOn packageName specified) - - compSpec = enableStanzas stanzas - -- TODO: It would be nicer to use ComponentDeps here so we can be more - -- precise in our checks. In fact, this no longer relies on buildDepends and - -- thus should be easier to fix. As long as we _do_ use a flat list here, we - -- have to allow for duplicates when we fold specifiedDeps; once we have - -- proper ComponentDeps here we should get rid of the `nubOn` in - -- `mergeDeps`. - requiredDeps :: [Dependency] - requiredDeps = - --TODO: use something lower level than finalizePD - case finalizePD specifiedFlags - compSpec - (const True) - platform cinfo - [] - (packageDescription pkg) of - Right (resolvedPkg, _) -> - externalBuildDepends resolvedPkg compSpec - ++ maybe [] PD.setupDepends (PD.setupBuildInfo resolvedPkg) - Left _ -> - error "configuredPackageInvalidDeps internal error" - - --- ------------------------------------------------------------ --- * Simple resolver that ignores dependencies --- ------------------------------------------------------------ - --- | A simplistic method of resolving a list of target package names to --- available packages. --- --- Specifically, it does not consider package dependencies at all. Unlike --- 'resolveDependencies', no attempt is made to ensure that the selected --- packages have dependencies that are satisfiable or consistent with --- each other. --- --- It is suitable for tasks such as selecting packages to download for user --- inspection. It is not suitable for selecting packages to install. --- --- Note: if no installed package index is available, it is OK to pass 'mempty'. --- It simply means preferences for installed packages will be ignored. --- -resolveWithoutDependencies :: DepResolverParams - -> Either [ResolveNoDepsError] [UnresolvedSourcePackage] -resolveWithoutDependencies (DepResolverParams targets constraints - prefs defpref installedPkgIndex sourcePkgIndex - _reorderGoals _countConflicts _indGoals _avoidReinstalls - _shadowing _strFlags _maxBjumps _enableBj - _solveExes _allowBootLibInstalls _order _verbosity) = - collectEithers $ map selectPackage (Set.toList targets) - where - selectPackage :: PackageName -> Either ResolveNoDepsError UnresolvedSourcePackage - selectPackage pkgname - | null choices = Left $! ResolveUnsatisfiable pkgname requiredVersions - | otherwise = Right $! maximumBy bestByPrefs choices - - where - -- Constraints - requiredVersions = packageConstraints pkgname - pkgDependency = Dependency pkgname requiredVersions - choices = PackageIndex.lookupDependency sourcePkgIndex - pkgDependency - - -- Preferences - PackagePreferences preferredVersions preferInstalled _ - = packagePreferences pkgname - - bestByPrefs = comparing $ \pkg -> - (installPref pkg, versionPref pkg, packageVersion pkg) - installPref = case preferInstalled of - PreferLatest -> const False - PreferInstalled -> not . null - . InstalledPackageIndex.lookupSourcePackageId - installedPkgIndex - . packageId - versionPref pkg = length . filter (packageVersion pkg `withinRange`) $ - preferredVersions - - packageConstraints :: PackageName -> VersionRange - packageConstraints pkgname = - Map.findWithDefault anyVersion pkgname packageVersionConstraintMap - packageVersionConstraintMap = - let pcs = map unlabelPackageConstraint constraints - in Map.fromList [ (scopeToPackageName scope, range) - | PackageConstraint - scope (PackagePropertyVersion range) <- pcs ] - - packagePreferences :: PackageName -> PackagePreferences - packagePreferences = interpretPackagesPreference targets defpref prefs - - -collectEithers :: [Either a b] -> Either [a] [b] -collectEithers = collect . partitionEithers - where - collect ([], xs) = Right xs - collect (errs,_) = Left errs - partitionEithers :: [Either a b] -> ([a],[b]) - partitionEithers = foldr (either left right) ([],[]) - where - left a (l, r) = (a:l, r) - right a (l, r) = (l, a:r) - --- | Errors for 'resolveWithoutDependencies'. --- -data ResolveNoDepsError = - - -- | A package name which cannot be resolved to a specific package. - -- Also gives the constraint on the version and whether there was - -- a constraint on the package being installed. - ResolveUnsatisfiable PackageName VersionRange - -instance Show ResolveNoDepsError where - show (ResolveUnsatisfiable name ver) = - "There is no available version of " ++ display name - ++ " that satisfies " ++ display (simplifyVersionRange ver) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/DistDirLayout.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/DistDirLayout.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/DistDirLayout.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/DistDirLayout.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,285 +0,0 @@ -{-# LANGUAGE RecordWildCards #-} - --- | --- --- The layout of the .\/dist\/ directory where cabal keeps all of its state --- and build artifacts. --- -module Distribution.Client.DistDirLayout ( - -- * 'DistDirLayout' - DistDirLayout(..), - DistDirParams(..), - defaultDistDirLayout, - ProjectRoot(..), - - -- * 'StoreDirLayout' - StoreDirLayout(..), - defaultStoreDirLayout, - - -- * 'CabalDirLayout' - CabalDirLayout(..), - mkCabalDirLayout, - defaultCabalDirLayout -) where - -import Data.Maybe (fromMaybe) -import System.FilePath - -import Distribution.Package - ( PackageId, ComponentId, UnitId ) -import Distribution.Client.Setup - ( ArchiveFormat(..) ) -import Distribution.Compiler -import Distribution.Simple.Compiler - ( PackageDB(..), PackageDBStack, OptimisationLevel(..) ) -import Distribution.Text -import Distribution.Pretty - ( prettyShow ) -import Distribution.Types.ComponentName -import Distribution.System - - --- | Information which can be used to construct the path to --- the build directory of a build. This is LESS fine-grained --- than what goes into the hashed 'InstalledPackageId', --- and for good reason: we don't want this path to change if --- the user, say, adds a dependency to their project. -data DistDirParams = DistDirParams { - distParamUnitId :: UnitId, - distParamPackageId :: PackageId, - distParamComponentId :: ComponentId, - distParamComponentName :: Maybe ComponentName, - distParamCompilerId :: CompilerId, - distParamPlatform :: Platform, - distParamOptimization :: OptimisationLevel - -- TODO (see #3343): - -- Flag assignments - -- Optimization - } - - --- | The layout of the project state directory. Traditionally this has been --- called the @dist@ directory. --- -data DistDirLayout = DistDirLayout { - - -- | The root directory of the project. Many other files are relative to - -- this location. In particular, the @cabal.project@ lives here. - -- - distProjectRootDirectory :: FilePath, - - -- | The @cabal.project@ file and related like @cabal.project.freeze@. - -- The parameter is for the extension, like \"freeze\", or \"\" for the - -- main file. - -- - distProjectFile :: String -> FilePath, - - -- | The \"dist\" directory, which is the root of where cabal keeps all - -- its state including the build artifacts from each package we build. - -- - distDirectory :: FilePath, - - -- | The directory under dist where we keep the build artifacts for a - -- package we're building from a local directory. - -- - -- This uses a 'UnitId' not just a 'PackageName' because technically - -- we can have multiple instances of the same package in a solution - -- (e.g. setup deps). - -- - distBuildDirectory :: DistDirParams -> FilePath, - distBuildRootDirectory :: FilePath, - - -- | The directory under dist where we download tarballs and source - -- control repos to. - -- - distDownloadSrcDirectory :: FilePath, - - -- | The directory under dist where we put the unpacked sources of - -- packages, in those cases where it makes sense to keep the build - -- artifacts to reduce rebuild times. - -- - distUnpackedSrcDirectory :: PackageId -> FilePath, - distUnpackedSrcRootDirectory :: FilePath, - - -- | The location for project-wide cache files (e.g. state used in - -- incremental rebuilds). - -- - distProjectCacheFile :: String -> FilePath, - distProjectCacheDirectory :: FilePath, - - -- | The location for package-specific cache files (e.g. state used in - -- incremental rebuilds). - -- - distPackageCacheFile :: DistDirParams -> String -> FilePath, - distPackageCacheDirectory :: DistDirParams -> FilePath, - - -- | The location that sdists are placed by default. - distSdistFile :: PackageId -> ArchiveFormat -> FilePath, - distSdistDirectory :: FilePath, - - distTempDirectory :: FilePath, - distBinDirectory :: FilePath, - - distPackageDB :: CompilerId -> PackageDB - } - - --- | The layout of a cabal nix-style store. --- -data StoreDirLayout = StoreDirLayout { - storeDirectory :: CompilerId -> FilePath, - storePackageDirectory :: CompilerId -> UnitId -> FilePath, - storePackageDBPath :: CompilerId -> FilePath, - storePackageDB :: CompilerId -> PackageDB, - storePackageDBStack :: CompilerId -> PackageDBStack, - storeIncomingDirectory :: CompilerId -> FilePath, - storeIncomingLock :: CompilerId -> UnitId -> FilePath - } - - ---TODO: move to another module, e.g. CabalDirLayout? --- or perhaps rename this module to DirLayouts. - --- | The layout of the user-wide cabal directory, that is the @~/.cabal@ dir --- on unix, and equivalents on other systems. --- --- At the moment this is just a partial specification, but the idea is --- eventually to cover it all. --- -data CabalDirLayout = CabalDirLayout { - cabalStoreDirLayout :: StoreDirLayout, - - cabalLogsDirectory :: FilePath, - cabalWorldFile :: FilePath - } - - --- | Information about the root directory of the project. --- --- It can either be an implict project root in the current dir if no --- @cabal.project@ file is found, or an explicit root if the file is found. --- -data ProjectRoot = - -- | -- ^ An implict project root. It contains the absolute project - -- root dir. - ProjectRootImplicit FilePath - - -- | -- ^ An explicit project root. It contains the absolute project - -- root dir and the relative @cabal.project@ file (or explicit override) - | ProjectRootExplicit FilePath FilePath - deriving (Eq, Show) - --- | Make the default 'DistDirLayout' based on the project root dir and --- optional overrides for the location of the @dist@ directory and the --- @cabal.project@ file. --- -defaultDistDirLayout :: ProjectRoot -- ^ the project root - -> Maybe FilePath -- ^ the @dist@ directory or default - -- (absolute or relative to the root) - -> DistDirLayout -defaultDistDirLayout projectRoot mdistDirectory = - DistDirLayout {..} - where - (projectRootDir, projectFile) = case projectRoot of - ProjectRootImplicit dir -> (dir, dir "cabal.project") - ProjectRootExplicit dir file -> (dir, dir file) - - distProjectRootDirectory = projectRootDir - distProjectFile ext = projectFile <.> ext - - distDirectory = distProjectRootDirectory - fromMaybe "dist-newstyle" mdistDirectory - --TODO: switch to just dist at some point, or some other new name - - distBuildRootDirectory = distDirectory "build" - distBuildDirectory params = - distBuildRootDirectory - display (distParamPlatform params) - display (distParamCompilerId params) - display (distParamPackageId params) - (case distParamComponentName params of - Nothing -> "" - Just CLibName -> "" - Just (CSubLibName name) -> "l" display name - Just (CFLibName name) -> "f" display name - Just (CExeName name) -> "x" display name - Just (CTestName name) -> "t" display name - Just (CBenchName name) -> "b" display name) - (case distParamOptimization params of - NoOptimisation -> "noopt" - NormalOptimisation -> "" - MaximumOptimisation -> "opt") - (let uid_str = display (distParamUnitId params) - in if uid_str == display (distParamComponentId params) - then "" - else uid_str) - - distUnpackedSrcRootDirectory = distDirectory "src" - distUnpackedSrcDirectory pkgid = distUnpackedSrcRootDirectory - display pkgid - -- we shouldn't get name clashes so this should be fine: - distDownloadSrcDirectory = distUnpackedSrcRootDirectory - - distProjectCacheDirectory = distDirectory "cache" - distProjectCacheFile name = distProjectCacheDirectory name - - distPackageCacheDirectory params = distBuildDirectory params "cache" - distPackageCacheFile params name = distPackageCacheDirectory params name - - distSdistFile pid format = distSdistDirectory prettyShow pid <.> ext - where - ext = case format of - TargzFormat -> "tar.gz" - ZipFormat -> "zip" - - distSdistDirectory = distDirectory "sdist" - - distTempDirectory = distDirectory "tmp" - - distBinDirectory = distDirectory "bin" - - distPackageDBPath compid = distDirectory "packagedb" display compid - distPackageDB = SpecificPackageDB . distPackageDBPath - - -defaultStoreDirLayout :: FilePath -> StoreDirLayout -defaultStoreDirLayout storeRoot = - StoreDirLayout {..} - where - storeDirectory compid = - storeRoot display compid - - storePackageDirectory compid ipkgid = - storeDirectory compid display ipkgid - - storePackageDBPath compid = - storeDirectory compid "package.db" - - storePackageDB compid = - SpecificPackageDB (storePackageDBPath compid) - - storePackageDBStack compid = - [GlobalPackageDB, storePackageDB compid] - - storeIncomingDirectory compid = - storeDirectory compid "incoming" - - storeIncomingLock compid unitid = - storeIncomingDirectory compid display unitid <.> "lock" - - -defaultCabalDirLayout :: FilePath -> CabalDirLayout -defaultCabalDirLayout cabalDir = - mkCabalDirLayout cabalDir Nothing Nothing - -mkCabalDirLayout :: FilePath -- ^ Cabal directory - -> Maybe FilePath -- ^ Store directory - -> Maybe FilePath -- ^ Log directory - -> CabalDirLayout -mkCabalDirLayout cabalDir mstoreDir mlogDir = - CabalDirLayout {..} - where - cabalStoreDirLayout = - defaultStoreDirLayout (fromMaybe (cabalDir "store") mstoreDir) - cabalLogsDirectory = fromMaybe (cabalDir "logs") mlogDir - cabalWorldFile = cabalDir "world" \ No newline at end of file diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Exec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Exec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Exec.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Exec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,181 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Exec --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Implementation of the 'exec' command. Runs an arbitrary executable in an --- environment suitable for making use of the sandbox. ------------------------------------------------------------------------------ - -module Distribution.Client.Exec ( exec - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import qualified Distribution.Simple.GHC as GHC -import qualified Distribution.Simple.GHCJS as GHCJS - -import Distribution.Client.Sandbox (getSandboxConfigFilePath) -import Distribution.Client.Sandbox.PackageEnvironment (sandboxPackageDBPath) -import Distribution.Client.Sandbox.Types (UseSandbox (..)) - -import Distribution.Simple.Compiler (Compiler, CompilerFlavor(..), compilerFlavor) -import Distribution.Simple.Program (ghcProgram, ghcjsProgram, lookupProgram) -import Distribution.Simple.Program.Db (ProgramDb, requireProgram, modifyProgramSearchPath) -import Distribution.Simple.Program.Find (ProgramSearchPathEntry(..)) -import Distribution.Simple.Program.Run (programInvocation, runProgramInvocation) -import Distribution.Simple.Program.Types ( simpleProgram, ConfiguredProgram(..) ) -import Distribution.Simple.Utils (die', warn) - -import Distribution.System (Platform(..), OS(..), buildOS) -import Distribution.Verbosity (Verbosity) - -import System.Directory ( doesDirectoryExist ) -import System.Environment (lookupEnv) -import System.FilePath (searchPathSeparator, ()) - - --- | Execute the given command in the package's environment. --- --- The given command is executed with GHC configured to use the correct --- package database and with the sandbox bin directory added to the PATH. -exec :: Verbosity - -> UseSandbox - -> Compiler - -> Platform - -> ProgramDb - -> [String] - -> IO () -exec verbosity useSandbox comp platform programDb extraArgs = - case extraArgs of - (exe:args) -> do - program <- requireProgram' verbosity useSandbox programDb exe - env <- environmentOverrides (programOverrideEnv program) - let invocation = programInvocation - program { programOverrideEnv = env } - args - runProgramInvocation verbosity invocation - - [] -> die' verbosity "Please specify an executable to run" - where - environmentOverrides env = - case useSandbox of - NoSandbox -> return env - (UseSandbox sandboxDir) -> - sandboxEnvironment verbosity sandboxDir comp platform programDb env - - --- | Return the package's sandbox environment. --- --- The environment sets GHC_PACKAGE_PATH so that GHC will use the sandbox. -sandboxEnvironment :: Verbosity - -> FilePath - -> Compiler - -> Platform - -> ProgramDb - -> [(String, Maybe String)] -- environment overrides so far - -> IO [(String, Maybe String)] -sandboxEnvironment verbosity sandboxDir comp platform programDb iEnv = - case compilerFlavor comp of - GHC -> env GHC.getGlobalPackageDB ghcProgram "GHC_PACKAGE_PATH" - GHCJS -> env GHCJS.getGlobalPackageDB ghcjsProgram "GHCJS_PACKAGE_PATH" - _ -> die' verbosity "exec only works with GHC and GHCJS" - where - (Platform _ os) = platform - ldPath = case os of - OSX -> "DYLD_LIBRARY_PATH" - Windows -> "PATH" - _ -> "LD_LIBRARY_PATH" - env getGlobalPackageDB hcProgram packagePathEnvVar = do - let Just program = lookupProgram hcProgram programDb - gDb <- getGlobalPackageDB verbosity program - sandboxConfigFilePath <- getSandboxConfigFilePath mempty - let sandboxPackagePath = sandboxPackageDBPath sandboxDir comp platform - compilerPackagePaths = prependToSearchPath gDb sandboxPackagePath - -- Packages database must exist, otherwise things will start - -- failing in mysterious ways. - exists <- doesDirectoryExist sandboxPackagePath - unless exists $ warn verbosity $ "Package database is not a directory: " - ++ sandboxPackagePath - -- MASSIVE HACK. We need this to be synchronized with installLibDir - -- in defaultInstallDirs' in Distribution.Simple.InstallDirs, - -- which has a special case for Windows (WHY? Who knows; it's been - -- around as long as Windows exists.) The sane thing to do here - -- would be to read out the actual install dirs that were associated - -- with the package in question, but that's not a well-formed question - -- here because there is not actually install directory for the - -- "entire" sandbox. Since we want to kill this code in favor of - -- new-build, I decided it wasn't worth fixing this "properly." - -- Also, this doesn't handle LHC correctly but I don't care -- ezyang - let extraLibPath = - case buildOS of - Windows -> sandboxDir - _ -> sandboxDir "lib" - -- 2016-11-26 Apologies for the spaghetti code here. - -- Essentially we just want to add the sandbox's lib/ dir to - -- whatever the library search path environment variable is: - -- this allows running existing executables against foreign - -- libraries (meaning Haskell code with a bunch of foreign - -- exports). However, on Windows this variable is equal to the - -- executable search path env var. And we try to keep not only - -- what was already set in the environment, but also the - -- additional directories we add below in requireProgram'. So - -- the strategy is that we first take the environment - -- overrides from requireProgram' below. If the library search - -- path env is overridden (e.g. because we're on windows), we - -- prepend the lib/ dir to the relevant override. If not, we - -- want to avoid wiping the user's own settings, so we first - -- read the env var's current value, and then prefix ours if - -- the user had any set. - iEnv' <- - if any ((==ldPath) . fst) iEnv - then return $ updateLdPath extraLibPath iEnv - else do - currentLibraryPath <- lookupEnv ldPath - let updatedLdPath = - case currentLibraryPath of - Nothing -> Just extraLibPath - Just paths -> - Just $ extraLibPath ++ [searchPathSeparator] ++ paths - return $ (ldPath, updatedLdPath) : iEnv - - -- Build the environment - return $ [ (packagePathEnvVar, Just compilerPackagePaths) - , ("CABAL_SANDBOX_PACKAGE_PATH", Just compilerPackagePaths) - , ("CABAL_SANDBOX_CONFIG", Just sandboxConfigFilePath) - ] ++ iEnv' - - prependToSearchPath path newValue = - newValue ++ [searchPathSeparator] ++ path - - updateLdPath path = map update - where - update (name, Just current) - | name == ldPath = (ldPath, Just $ path ++ [searchPathSeparator] ++ current) - update (name, Nothing) - | name == ldPath = (ldPath, Just path) - update x = x - - --- | Check that a program is configured and available to be run. If --- a sandbox is available check in the sandbox's directory. -requireProgram' :: Verbosity - -> UseSandbox - -> ProgramDb - -> String - -> IO ConfiguredProgram -requireProgram' verbosity useSandbox programDb exe = do - (program, _) <- requireProgram - verbosity - (simpleProgram exe) - updateSearchPath - return program - where - updateSearchPath = - flip modifyProgramSearchPath programDb $ \searchPath -> - case useSandbox of - NoSandbox -> searchPath - UseSandbox sandboxDir -> - ProgramSearchPathDir (sandboxDir "bin") : searchPath diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Fetch.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Fetch.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Fetch.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Fetch.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,227 +0,0 @@ -------------------------------------------------------------------------------- | --- Module : Distribution.Client.Fetch --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- The cabal fetch command ------------------------------------------------------------------------------ -module Distribution.Client.Fetch ( - fetch, - ) where - -import Distribution.Client.Types -import Distribution.Client.Targets -import Distribution.Client.FetchUtils hiding (fetchPackage) -import Distribution.Client.Dependency -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan -import Distribution.Client.Setup - ( GlobalFlags(..), FetchFlags(..), RepoContext(..) ) - -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PkgConfigDb ( PkgConfigDb, readPkgConfigDb ) -import Distribution.Solver.Types.SolverPackage -import Distribution.Solver.Types.SourcePackage - -import Distribution.Package - ( packageId ) -import Distribution.Simple.Compiler - ( Compiler, compilerInfo, PackageDBStack ) -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.Simple.Program - ( ProgramDb ) -import Distribution.Simple.Setup - ( fromFlag, fromFlagOrDefault ) -import Distribution.Simple.Utils - ( die', notice, debug ) -import Distribution.System - ( Platform ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity ) - -import Control.Monad - ( filterM ) - --- ------------------------------------------------------------ --- * The fetch command --- ------------------------------------------------------------ - ---TODO: --- * add fetch -o support --- * support tarball URLs via ad-hoc download cache (or in -o mode?) --- * suggest using --no-deps, unpack or fetch -o if deps cannot be satisfied --- * Port various flags from install: --- * --updage-dependencies --- * --constraint and --preference --- * --only-dependencies, but note it conflicts with --no-deps - - --- | Fetch a list of packages and their dependencies. --- -fetch :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramDb - -> GlobalFlags - -> FetchFlags - -> [UserTarget] - -> IO () -fetch verbosity _ _ _ _ _ _ _ [] = - notice verbosity "No packages requested. Nothing to do." - -fetch verbosity packageDBs repoCtxt comp platform progdb - globalFlags fetchFlags userTargets = do - - mapM_ (checkTarget verbosity) userTargets - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb - sourcePkgDb <- getSourcePackages verbosity repoCtxt - pkgConfigDb <- readPkgConfigDb verbosity progdb - - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - (packageIndex sourcePkgDb) - userTargets - - pkgs <- planPackages - verbosity comp platform fetchFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers - - pkgs' <- filterM (fmap not . isFetched . packageSource) pkgs - if null pkgs' - --TODO: when we add support for remote tarballs then this message - -- will need to be changed because for remote tarballs we fetch them - -- at the earlier phase. - then notice verbosity $ "No packages need to be fetched. " - ++ "All the requested packages are already local " - ++ "or cached locally." - else if dryRun - then notice verbosity $ unlines $ - "The following packages would be fetched:" - : map (display . packageId) pkgs' - - else mapM_ (fetchPackage verbosity repoCtxt . packageSource) pkgs' - - where - dryRun = fromFlag (fetchDryRun fetchFlags) - -planPackages :: Verbosity - -> Compiler - -> Platform - -> FetchFlags - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> [PackageSpecifier UnresolvedSourcePackage] - -> IO [UnresolvedSourcePackage] -planPackages verbosity comp platform fetchFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers - - | includeDependencies = do - solver <- chooseSolver verbosity - (fromFlag (fetchSolver fetchFlags)) (compilerInfo comp) - notice verbosity "Resolving dependencies..." - installPlan <- foldProgress logMsg (die' verbosity) return $ - resolveDependencies - platform (compilerInfo comp) pkgConfigDb - solver - resolverParams - - -- The packages we want to fetch are those packages the 'InstallPlan' - -- that are in the 'InstallPlan.Configured' state. - return - [ solverPkgSource cpkg - | (SolverInstallPlan.Configured cpkg) - <- SolverInstallPlan.toList installPlan ] - - | otherwise = - either (die' verbosity . unlines . map show) return $ - resolveWithoutDependencies resolverParams - - where - resolverParams = - - setMaxBackjumps (if maxBackjumps < 0 then Nothing - else Just maxBackjumps) - - . setIndependentGoals independentGoals - - . setReorderGoals reorderGoals - - . setCountConflicts countConflicts - - . setShadowPkgs shadowPkgs - - . setStrongFlags strongFlags - - . setAllowBootLibInstalls allowBootLibInstalls - - . setSolverVerbosity verbosity - - . addConstraints - [ let pc = PackageConstraint - (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) - (PackagePropertyStanzas stanzas) - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - | pkgSpecifier <- pkgSpecifiers ] - - -- Reinstall the targets given on the command line so that the dep - -- resolver will decide that they need fetching, even if they're - -- already installed. Since we want to get the source packages of - -- things we might have installed (but not have the sources for). - . reinstallTargets - - $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers - - includeDependencies = fromFlag (fetchDeps fetchFlags) - logMsg message rest = debug verbosity message >> rest - - stanzas = [ TestStanzas | testsEnabled ] - ++ [ BenchStanzas | benchmarksEnabled ] - testsEnabled = fromFlagOrDefault False $ fetchTests fetchFlags - benchmarksEnabled = fromFlagOrDefault False $ fetchBenchmarks fetchFlags - - reorderGoals = fromFlag (fetchReorderGoals fetchFlags) - countConflicts = fromFlag (fetchCountConflicts fetchFlags) - independentGoals = fromFlag (fetchIndependentGoals fetchFlags) - shadowPkgs = fromFlag (fetchShadowPkgs fetchFlags) - strongFlags = fromFlag (fetchStrongFlags fetchFlags) - maxBackjumps = fromFlag (fetchMaxBackjumps fetchFlags) - allowBootLibInstalls = fromFlag (fetchAllowBootLibInstalls fetchFlags) - - -checkTarget :: Verbosity -> UserTarget -> IO () -checkTarget verbosity target = case target of - UserTargetRemoteTarball _uri - -> die' verbosity $ "The 'fetch' command does not yet support remote tarballs. " - ++ "In the meantime you can use the 'unpack' commands." - _ -> return () - -fetchPackage :: Verbosity -> RepoContext -> PackageLocation a -> IO () -fetchPackage verbosity repoCtxt pkgsrc = case pkgsrc of - LocalUnpackedPackage _dir -> return () - LocalTarballPackage _file -> return () - - RemoteTarballPackage _uri _ -> - die' verbosity $ "The 'fetch' command does not yet support remote tarballs. " - ++ "In the meantime you can use the 'unpack' commands." - - RemoteSourceRepoPackage _repo _ -> - die' verbosity $ "The 'fetch' command does not yet support remote " - ++ "source repositores." - - RepoTarballPackage repo pkgid _ -> do - _ <- fetchRepoTarball verbosity repoCtxt repo pkgid - return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/FetchUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/FetchUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/FetchUtils.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/FetchUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,315 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.FetchUtils --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- Functions for fetching packages ------------------------------------------------------------------------------ -{-# LANGUAGE RecordWildCards #-} -module Distribution.Client.FetchUtils ( - - -- * fetching packages - fetchPackage, - isFetched, - checkFetched, - - -- ** specifically for repo packages - checkRepoTarballFetched, - fetchRepoTarball, - - -- ** fetching packages asynchronously - asyncFetchPackages, - waitAsyncFetchPackage, - AsyncFetchMap, - - -- * fetching other things - downloadIndex, - ) where - -import Distribution.Client.Types -import Distribution.Client.HttpUtils - ( downloadURI, isOldHackageURI, DownloadResult(..) - , HttpTransport(..), transportCheckHttps, remoteRepoCheckHttps ) - -import Distribution.Package - ( PackageId, packageName, packageVersion ) -import Distribution.Simple.Utils - ( notice, info, debug, die' ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity, verboseUnmarkOutput ) -import Distribution.Client.GlobalFlags - ( RepoContext(..) ) -import Distribution.Client.Utils - ( ProgressPhase(..), progressMessage ) - -import Data.Maybe -import Data.Map (Map) -import qualified Data.Map as Map -import Control.Monad -import Control.Exception -import Control.Concurrent.Async -import Control.Concurrent.MVar -import System.Directory - ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory ) -import System.IO - ( openTempFile, hClose ) -import System.FilePath - ( (), (<.>) ) -import qualified System.FilePath.Posix as FilePath.Posix - ( combine, joinPath ) -import Network.URI - ( URI(uriPath) ) - -import qualified Hackage.Security.Client as Sec - --- ------------------------------------------------------------ --- * Actually fetch things --- ------------------------------------------------------------ - --- | Returns @True@ if the package has already been fetched --- or does not need fetching. --- -isFetched :: UnresolvedPkgLoc -> IO Bool -isFetched loc = case loc of - LocalUnpackedPackage _dir -> return True - LocalTarballPackage _file -> return True - RemoteTarballPackage _uri local -> return (isJust local) - RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid) - RemoteSourceRepoPackage _ local -> return (isJust local) - - --- | Checks if the package has already been fetched (or does not need --- fetching) and if so returns evidence in the form of a 'PackageLocation' --- with a resolved local file location. --- -checkFetched :: UnresolvedPkgLoc - -> IO (Maybe ResolvedPkgLoc) -checkFetched loc = case loc of - LocalUnpackedPackage dir -> - return (Just $ LocalUnpackedPackage dir) - LocalTarballPackage file -> - return (Just $ LocalTarballPackage file) - RemoteTarballPackage uri (Just file) -> - return (Just $ RemoteTarballPackage uri file) - RepoTarballPackage repo pkgid (Just file) -> - return (Just $ RepoTarballPackage repo pkgid file) - RemoteSourceRepoPackage repo (Just dir) -> - return (Just $ RemoteSourceRepoPackage repo dir) - - RemoteTarballPackage _uri Nothing -> return Nothing - RemoteSourceRepoPackage _repo Nothing -> return Nothing - RepoTarballPackage repo pkgid Nothing -> - fmap (fmap (RepoTarballPackage repo pkgid)) - (checkRepoTarballFetched repo pkgid) - --- | Like 'checkFetched' but for the specific case of a 'RepoTarballPackage'. --- -checkRepoTarballFetched :: Repo -> PackageId -> IO (Maybe FilePath) -checkRepoTarballFetched repo pkgid = do - let file = packageFile repo pkgid - exists <- doesFileExist file - if exists - then return (Just file) - else return Nothing - - --- | Fetch a package if we don't have it already. --- -fetchPackage :: Verbosity - -> RepoContext - -> UnresolvedPkgLoc - -> IO ResolvedPkgLoc -fetchPackage verbosity repoCtxt loc = case loc of - LocalUnpackedPackage dir -> - return (LocalUnpackedPackage dir) - LocalTarballPackage file -> - return (LocalTarballPackage file) - RemoteTarballPackage uri (Just file) -> - return (RemoteTarballPackage uri file) - RepoTarballPackage repo pkgid (Just file) -> - return (RepoTarballPackage repo pkgid file) - RemoteSourceRepoPackage repo (Just dir) -> - return (RemoteSourceRepoPackage repo dir) - - RemoteTarballPackage uri Nothing -> do - path <- downloadTarballPackage uri - return (RemoteTarballPackage uri path) - RepoTarballPackage repo pkgid Nothing -> do - local <- fetchRepoTarball verbosity repoCtxt repo pkgid - return (RepoTarballPackage repo pkgid local) - RemoteSourceRepoPackage _repo Nothing -> - die' verbosity "fetchPackage: source repos not supported" - where - downloadTarballPackage uri = do - transport <- repoContextGetTransport repoCtxt - transportCheckHttps verbosity transport uri - notice verbosity ("Downloading " ++ show uri) - tmpdir <- getTemporaryDirectory - (path, hnd) <- openTempFile tmpdir "cabal-.tar.gz" - hClose hnd - _ <- downloadURI transport verbosity uri path - return path - - --- | Fetch a repo package if we don't have it already. --- -fetchRepoTarball :: Verbosity -> RepoContext -> Repo -> PackageId -> IO FilePath -fetchRepoTarball verbosity repoCtxt repo pkgid = do - fetched <- doesFileExist (packageFile repo pkgid) - if fetched - then do info verbosity $ display pkgid ++ " has already been downloaded." - return (packageFile repo pkgid) - else do progressMessage verbosity ProgressDownloading (display pkgid) - res <- downloadRepoPackage - progressMessage verbosity ProgressDownloaded (display pkgid) - return res - - - where - downloadRepoPackage = case repo of - RepoLocal{..} -> return (packageFile repo pkgid) - - RepoRemote{..} -> do - transport <- repoContextGetTransport repoCtxt - remoteRepoCheckHttps verbosity transport repoRemote - let uri = packageURI repoRemote pkgid - dir = packageDir repo pkgid - path = packageFile repo pkgid - createDirectoryIfMissing True dir - _ <- downloadURI transport verbosity uri path - return path - - RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \rep -> do - let dir = packageDir repo pkgid - path = packageFile repo pkgid - createDirectoryIfMissing True dir - Sec.uncheckClientErrors $ do - info verbosity ("Writing " ++ path) - Sec.downloadPackage' rep pkgid path - return path - --- | Downloads an index file to [config-dir/packages/serv-id] without --- hackage-security. You probably don't want to call this directly; --- use 'updateRepo' instead. --- -downloadIndex :: HttpTransport -> Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult -downloadIndex transport verbosity remoteRepo cacheDir = do - remoteRepoCheckHttps verbosity transport remoteRepo - let uri = (remoteRepoURI remoteRepo) { - uriPath = uriPath (remoteRepoURI remoteRepo) - `FilePath.Posix.combine` "00-index.tar.gz" - } - path = cacheDir "00-index" <.> "tar.gz" - createDirectoryIfMissing True cacheDir - downloadURI transport verbosity uri path - - --- ------------------------------------------------------------ --- * Async fetch wrapper utilities --- ------------------------------------------------------------ - -type AsyncFetchMap = Map UnresolvedPkgLoc - (MVar (Either SomeException ResolvedPkgLoc)) - --- | Fork off an async action to download the given packages (by location). --- --- The downloads are initiated in order, so you can arrange for packages that --- will likely be needed sooner to be earlier in the list. --- --- The body action is passed a map from those packages (identified by their --- location) to a completion var for that package. So the body action should --- lookup the location and use 'asyncFetchPackage' to get the result. --- -asyncFetchPackages :: Verbosity - -> RepoContext - -> [UnresolvedPkgLoc] - -> (AsyncFetchMap -> IO a) - -> IO a -asyncFetchPackages verbosity repoCtxt pkglocs body = do - --TODO: [nice to have] use parallel downloads? - - asyncDownloadVars <- sequence [ do v <- newEmptyMVar - return (pkgloc, v) - | pkgloc <- pkglocs ] - - let fetchPackages :: IO () - fetchPackages = - forM_ asyncDownloadVars $ \(pkgloc, var) -> do - -- Suppress marking here, because 'withAsync' means - -- that we get nondeterministic interleaving - result <- try $ fetchPackage (verboseUnmarkOutput verbosity) - repoCtxt pkgloc - putMVar var result - - withAsync fetchPackages $ \_ -> - body (Map.fromList asyncDownloadVars) - - --- | Expect to find a download in progress in the given 'AsyncFetchMap' --- and wait on it to finish. --- --- If the download failed with an exception then this will be thrown. --- --- Note: This function is supposed to be idempotent, as our install plans --- can now use the same tarball for many builds, e.g. different --- components and/or qualified goals, and these all go through the --- download phase so we end up using 'waitAsyncFetchPackage' twice on --- the same package. C.f. #4461. -waitAsyncFetchPackage :: Verbosity - -> AsyncFetchMap - -> UnresolvedPkgLoc - -> IO ResolvedPkgLoc -waitAsyncFetchPackage verbosity downloadMap srcloc = - case Map.lookup srcloc downloadMap of - Just hnd -> do - debug verbosity $ "Waiting for download of " ++ show srcloc - either throwIO return =<< readMVar hnd - Nothing -> fail "waitAsyncFetchPackage: package not being downloaded" - - --- ------------------------------------------------------------ --- * Path utilities --- ------------------------------------------------------------ - --- | Generate the full path to the locally cached copy of --- the tarball for a given @PackageIdentifer@. --- -packageFile :: Repo -> PackageId -> FilePath -packageFile repo pkgid = packageDir repo pkgid - display pkgid - <.> "tar.gz" - --- | Generate the full path to the directory where the local cached copy of --- the tarball for a given @PackageIdentifer@ is stored. --- -packageDir :: Repo -> PackageId -> FilePath -packageDir repo pkgid = repoLocalDir repo - display (packageName pkgid) - display (packageVersion pkgid) - --- | Generate the URI of the tarball for a given package. --- -packageURI :: RemoteRepo -> PackageId -> URI -packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) = - (remoteRepoURI repo) { - uriPath = FilePath.Posix.joinPath - [uriPath (remoteRepoURI repo) - ,display (packageName pkgid) - ,display (packageVersion pkgid) - ,display pkgid <.> "tar.gz"] - } -packageURI repo pkgid = - (remoteRepoURI repo) { - uriPath = FilePath.Posix.joinPath - [uriPath (remoteRepoURI repo) - ,"package" - ,display pkgid <.> "tar.gz"] - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/FileMonitor.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/FileMonitor.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/FileMonitor.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/FileMonitor.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1119 +0,0 @@ -{-# LANGUAGE DeriveGeneric, DeriveFunctor, GeneralizedNewtypeDeriving, - NamedFieldPuns, BangPatterns #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} - --- | An abstraction to help with re-running actions when files or other --- input values they depend on have changed. --- -module Distribution.Client.FileMonitor ( - - -- * Declaring files to monitor - MonitorFilePath(..), - MonitorKindFile(..), - MonitorKindDir(..), - FilePathGlob(..), - monitorFile, - monitorFileHashed, - monitorNonExistentFile, - monitorFileExistence, - monitorDirectory, - monitorNonExistentDirectory, - monitorDirectoryExistence, - monitorFileOrDirectory, - monitorFileGlob, - monitorFileGlobExistence, - monitorFileSearchPath, - monitorFileHashedSearchPath, - - -- * Creating and checking sets of monitored files - FileMonitor(..), - newFileMonitor, - MonitorChanged(..), - MonitorChangedReason(..), - checkFileMonitorChanged, - updateFileMonitor, - MonitorTimestamp, - beginUpdateFileMonitor, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import qualified Data.Map.Strict as Map -import qualified Data.ByteString.Lazy as BS -import qualified Distribution.Compat.Binary as Binary -import qualified Data.Hashable as Hashable - -import Control.Monad -import Control.Monad.Trans (MonadIO, liftIO) -import Control.Monad.State (StateT, mapStateT) -import qualified Control.Monad.State as State -import Control.Monad.Except (ExceptT, runExceptT, withExceptT, - throwError) -import Control.Exception - -import Distribution.Compat.Time -import Distribution.Client.Glob -import Distribution.Simple.Utils (handleDoesNotExist, writeFileAtomic) -import Distribution.Client.Utils (mergeBy, MergeResult(..)) - -import System.FilePath -import System.Directory -import System.IO - ------------------------------------------------------------------------------- --- Types for specifying files to monitor --- - - --- | A description of a file (or set of files) to monitor for changes. --- --- Where file paths are relative they are relative to a common directory --- (e.g. project root), not necessarily the process current directory. --- -data MonitorFilePath = - MonitorFile { - monitorKindFile :: !MonitorKindFile, - monitorKindDir :: !MonitorKindDir, - monitorPath :: !FilePath - } - | MonitorFileGlob { - monitorKindFile :: !MonitorKindFile, - monitorKindDir :: !MonitorKindDir, - monitorPathGlob :: !FilePathGlob - } - deriving (Eq, Show, Generic) - -data MonitorKindFile = FileExists - | FileModTime - | FileHashed - | FileNotExists - deriving (Eq, Show, Generic) - -data MonitorKindDir = DirExists - | DirModTime - | DirNotExists - deriving (Eq, Show, Generic) - -instance Binary MonitorFilePath -instance Binary MonitorKindFile -instance Binary MonitorKindDir - --- | Monitor a single file for changes, based on its modification time. --- The monitored file is considered to have changed if it no longer --- exists or if its modification time has changed. --- -monitorFile :: FilePath -> MonitorFilePath -monitorFile = MonitorFile FileModTime DirNotExists - --- | Monitor a single file for changes, based on its modification time --- and content hash. The monitored file is considered to have changed if --- it no longer exists or if its modification time and content hash have --- changed. --- -monitorFileHashed :: FilePath -> MonitorFilePath -monitorFileHashed = MonitorFile FileHashed DirNotExists - --- | Monitor a single non-existent file for changes. The monitored file --- is considered to have changed if it exists. --- -monitorNonExistentFile :: FilePath -> MonitorFilePath -monitorNonExistentFile = MonitorFile FileNotExists DirNotExists - --- | Monitor a single file for existence only. The monitored file is --- considered to have changed if it no longer exists. --- -monitorFileExistence :: FilePath -> MonitorFilePath -monitorFileExistence = MonitorFile FileExists DirNotExists - --- | Monitor a single directory for changes, based on its modification --- time. The monitored directory is considered to have changed if it no --- longer exists or if its modification time has changed. --- -monitorDirectory :: FilePath -> MonitorFilePath -monitorDirectory = MonitorFile FileNotExists DirModTime - --- | Monitor a single non-existent directory for changes. The monitored --- directory is considered to have changed if it exists. --- -monitorNonExistentDirectory :: FilePath -> MonitorFilePath --- Just an alias for monitorNonExistentFile, since you can't --- tell the difference between a non-existent directory and --- a non-existent file :) -monitorNonExistentDirectory = monitorNonExistentFile - --- | Monitor a single directory for existence. The monitored directory is --- considered to have changed only if it no longer exists. --- -monitorDirectoryExistence :: FilePath -> MonitorFilePath -monitorDirectoryExistence = MonitorFile FileNotExists DirExists - --- | Monitor a single file or directory for changes, based on its modification --- time. The monitored file is considered to have changed if it no longer --- exists or if its modification time has changed. --- -monitorFileOrDirectory :: FilePath -> MonitorFilePath -monitorFileOrDirectory = MonitorFile FileModTime DirModTime - --- | Monitor a set of files (or directories) identified by a file glob. --- The monitored glob is considered to have changed if the set of files --- matching the glob changes (i.e. creations or deletions), or for files if the --- modification time and content hash of any matching file has changed. --- -monitorFileGlob :: FilePathGlob -> MonitorFilePath -monitorFileGlob = MonitorFileGlob FileHashed DirExists - --- | Monitor a set of files (or directories) identified by a file glob for --- existence only. The monitored glob is considered to have changed if the set --- of files matching the glob changes (i.e. creations or deletions). --- -monitorFileGlobExistence :: FilePathGlob -> MonitorFilePath -monitorFileGlobExistence = MonitorFileGlob FileExists DirExists - --- | Creates a list of files to monitor when you search for a file which --- unsuccessfully looked in @notFoundAtPaths@ before finding it at --- @foundAtPath@. -monitorFileSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] -monitorFileSearchPath notFoundAtPaths foundAtPath = - monitorFile foundAtPath - : map monitorNonExistentFile notFoundAtPaths - --- | Similar to 'monitorFileSearchPath', but also instructs us to --- monitor the hash of the found file. -monitorFileHashedSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] -monitorFileHashedSearchPath notFoundAtPaths foundAtPath = - monitorFileHashed foundAtPath - : map monitorNonExistentFile notFoundAtPaths - - ------------------------------------------------------------------------------- --- Implementation types, files status --- - --- | The state necessary to determine whether a set of monitored --- files has changed. It consists of two parts: a set of specific --- files to be monitored (index by their path), and a list of --- globs, which monitor may files at once. -data MonitorStateFileSet - = MonitorStateFileSet ![MonitorStateFile] - ![MonitorStateGlob] - -- Morally this is not actually a set but a bag (represented by lists). - -- There is no principled reason to use a bag here rather than a set, but - -- there is also no particular gain either. That said, we do preserve the - -- order of the lists just to reduce confusion (and have predictable I/O - -- patterns). - deriving Show - -type Hash = Int - --- | The state necessary to determine whether a monitored file has changed. --- --- This covers all the cases of 'MonitorFilePath' except for globs which is --- covered separately by 'MonitorStateGlob'. --- --- The @Maybe ModTime@ is to cover the case where we already consider the --- file to have changed, either because it had already changed by the time we --- did the snapshot (i.e. too new, changed since start of update process) or it --- no longer exists at all. --- -data MonitorStateFile = MonitorStateFile !MonitorKindFile !MonitorKindDir - !FilePath !MonitorStateFileStatus - deriving (Show, Generic) - -data MonitorStateFileStatus - = MonitorStateFileExists - | MonitorStateFileModTime !ModTime -- ^ cached file mtime - | MonitorStateFileHashed !ModTime !Hash -- ^ cached mtime and content hash - | MonitorStateDirExists - | MonitorStateDirModTime !ModTime -- ^ cached dir mtime - | MonitorStateNonExistent - | MonitorStateAlreadyChanged - deriving (Show, Generic) - -instance Binary MonitorStateFile -instance Binary MonitorStateFileStatus - --- | The state necessary to determine whether the files matched by a globbing --- match have changed. --- -data MonitorStateGlob = MonitorStateGlob !MonitorKindFile !MonitorKindDir - !FilePathRoot !MonitorStateGlobRel - deriving (Show, Generic) - -data MonitorStateGlobRel - = MonitorStateGlobDirs - !Glob !FilePathGlobRel - !ModTime - ![(FilePath, MonitorStateGlobRel)] -- invariant: sorted - - | MonitorStateGlobFiles - !Glob - !ModTime - ![(FilePath, MonitorStateFileStatus)] -- invariant: sorted - - | MonitorStateGlobDirTrailing - deriving (Show, Generic) - -instance Binary MonitorStateGlob -instance Binary MonitorStateGlobRel - --- | We can build a 'MonitorStateFileSet' from a set of 'MonitorFilePath' by --- inspecting the state of the file system, and we can go in the reverse --- direction by just forgetting the extra info. --- -reconstructMonitorFilePaths :: MonitorStateFileSet -> [MonitorFilePath] -reconstructMonitorFilePaths (MonitorStateFileSet singlePaths globPaths) = - map getSinglePath singlePaths - ++ map getGlobPath globPaths - where - getSinglePath (MonitorStateFile kindfile kinddir filepath _) = - MonitorFile kindfile kinddir filepath - - getGlobPath (MonitorStateGlob kindfile kinddir root gstate) = - MonitorFileGlob kindfile kinddir $ FilePathGlob root $ - case gstate of - MonitorStateGlobDirs glob globs _ _ -> GlobDir glob globs - MonitorStateGlobFiles glob _ _ -> GlobFile glob - MonitorStateGlobDirTrailing -> GlobDirTrailing - ------------------------------------------------------------------------------- --- Checking the status of monitored files --- - --- | A monitor for detecting changes to a set of files. It can be used to --- efficiently test if any of a set of files (specified individually or by --- glob patterns) has changed since some snapshot. In addition, it also checks --- for changes in a value (of type @a@), and when there are no changes in --- either it returns a saved value (of type @b@). --- --- The main use case looks like this: suppose we have some expensive action --- that depends on certain pure inputs and reads some set of files, and --- produces some pure result. We want to avoid re-running this action when it --- would produce the same result. So we need to monitor the files the action --- looked at, the other pure input values, and we need to cache the result. --- Then at some later point, if the input value didn't change, and none of the --- files changed, then we can re-use the cached result rather than re-running --- the action. --- --- This can be achieved using a 'FileMonitor'. Each 'FileMonitor' instance --- saves state in a disk file, so the file for that has to be specified, --- making sure it is unique. The pattern is to use 'checkFileMonitorChanged' --- to see if there's been any change. If there is, re-run the action, keeping --- track of the files, then use 'updateFileMonitor' to record the current --- set of files to monitor, the current input value for the action, and the --- result of the action. --- --- The typical occurrence of this pattern is captured by 'rerunIfChanged' --- and the 'Rebuild' monad. More complicated cases may need to use --- 'checkFileMonitorChanged' and 'updateFileMonitor' directly. --- -data FileMonitor a b - = FileMonitor { - - -- | The file where this 'FileMonitor' should store its state. - -- - fileMonitorCacheFile :: FilePath, - - -- | Compares a new cache key with old one to determine if a - -- corresponding cached value is still valid. - -- - -- Typically this is just an equality test, but in some - -- circumstances it can make sense to do things like subset - -- comparisons. - -- - -- The first arg is the new value, the second is the old cached value. - -- - fileMonitorKeyValid :: a -> a -> Bool, - - -- | When this mode is enabled, if 'checkFileMonitorChanged' returns - -- 'MonitoredValueChanged' then we have the guarantee that no files - -- changed, that the value change was the only change. In the default - -- mode no such guarantee is provided which is slightly faster. - -- - fileMonitorCheckIfOnlyValueChanged :: Bool - } - --- | Define a new file monitor. --- --- It's best practice to define file monitor values once, and then use the --- same value for 'checkFileMonitorChanged' and 'updateFileMonitor' as this --- ensures you get the same types @a@ and @b@ for reading and writing. --- --- The path of the file monitor itself must be unique because it keeps state --- on disk and these would clash. --- -newFileMonitor :: Eq a => FilePath -- ^ The file to cache the state of the - -- file monitor. Must be unique. - -> FileMonitor a b -newFileMonitor path = FileMonitor path (==) False - --- | The result of 'checkFileMonitorChanged': either the monitored files or --- value changed (and it tells us which it was) or nothing changed and we get --- the cached result. --- -data MonitorChanged a b = - -- | The monitored files and value did not change. The cached result is - -- @b@. - -- - -- The set of monitored files is also returned. This is useful - -- for composing or nesting 'FileMonitor's. - MonitorUnchanged b [MonitorFilePath] - - -- | The monitor found that something changed. The reason is given. - -- - | MonitorChanged (MonitorChangedReason a) - deriving Show - --- | What kind of change 'checkFileMonitorChanged' detected. --- -data MonitorChangedReason a = - - -- | One of the files changed (existence, file type, mtime or file - -- content, depending on the 'MonitorFilePath' in question) - MonitoredFileChanged FilePath - - -- | The pure input value changed. - -- - -- The previous cached key value is also returned. This is sometimes - -- useful when using a 'fileMonitorKeyValid' function that is not simply - -- '(==)', when invalidation can be partial. In such cases it can make - -- sense to 'updateFileMonitor' with a key value that's a combination of - -- the new and old (e.g. set union). - | MonitoredValueChanged a - - -- | There was no saved monitor state, cached value etc. Ie the file - -- for the 'FileMonitor' does not exist. - | MonitorFirstRun - - -- | There was existing state, but we could not read it. This typically - -- happens when the code has changed compared to an existing 'FileMonitor' - -- cache file and type of the input value or cached value has changed such - -- that we cannot decode the values. This is completely benign as we can - -- treat is just as if there were no cache file and re-run. - | MonitorCorruptCache - deriving (Eq, Show, Functor) - --- | Test if the input value or files monitored by the 'FileMonitor' have --- changed. If not, return the cached value. --- --- See 'FileMonitor' for a full explanation. --- -checkFileMonitorChanged - :: (Binary a, Binary b) - => FileMonitor a b -- ^ cache file path - -> FilePath -- ^ root directory - -> a -- ^ guard or key value - -> IO (MonitorChanged a b) -- ^ did the key or any paths change? -checkFileMonitorChanged - monitor@FileMonitor { fileMonitorKeyValid, - fileMonitorCheckIfOnlyValueChanged } - root currentKey = - - -- Consider it a change if the cache file does not exist, - -- or we cannot decode it. Sadly ErrorCall can still happen, despite - -- using decodeFileOrFail, e.g. Data.Char.chr errors - - handleDoesNotExist (MonitorChanged MonitorFirstRun) $ - handleErrorCall (MonitorChanged MonitorCorruptCache) $ - readCacheFile monitor - >>= either (\_ -> return (MonitorChanged MonitorCorruptCache)) - checkStatusCache - - where - checkStatusCache (cachedFileStatus, cachedKey, cachedResult) = do - change <- checkForChanges - case change of - Just reason -> return (MonitorChanged reason) - Nothing -> return (MonitorUnchanged cachedResult monitorFiles) - where monitorFiles = reconstructMonitorFilePaths cachedFileStatus - where - -- In fileMonitorCheckIfOnlyValueChanged mode we want to guarantee that - -- if we return MonitoredValueChanged that only the value changed. - -- We do that by checkin for file changes first. Otherwise it makes - -- more sense to do the cheaper test first. - checkForChanges - | fileMonitorCheckIfOnlyValueChanged - = checkFileChange cachedFileStatus cachedKey cachedResult - `mplusMaybeT` - checkValueChange cachedKey - - | otherwise - = checkValueChange cachedKey - `mplusMaybeT` - checkFileChange cachedFileStatus cachedKey cachedResult - - mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) - mplusMaybeT ma mb = do - mx <- ma - case mx of - Nothing -> mb - Just x -> return (Just x) - - -- Check if the guard value has changed - checkValueChange cachedKey - | not (fileMonitorKeyValid currentKey cachedKey) - = return (Just (MonitoredValueChanged cachedKey)) - | otherwise - = return Nothing - - -- Check if any file has changed - checkFileChange cachedFileStatus cachedKey cachedResult = do - res <- probeFileSystem root cachedFileStatus - case res of - -- Some monitored file has changed - Left changedPath -> - return (Just (MonitoredFileChanged (normalise changedPath))) - - -- No monitored file has changed - Right (cachedFileStatus', cacheStatus) -> do - - -- But we might still want to update the cache - whenCacheChanged cacheStatus $ - rewriteCacheFile monitor cachedFileStatus' cachedKey cachedResult - - return Nothing - --- | Helper for reading the cache file. --- --- This determines the type and format of the binary cache file. --- -readCacheFile :: (Binary a, Binary b) - => FileMonitor a b - -> IO (Either String (MonitorStateFileSet, a, b)) -readCacheFile FileMonitor {fileMonitorCacheFile} = - withBinaryFile fileMonitorCacheFile ReadMode $ \hnd -> - Binary.decodeOrFailIO =<< BS.hGetContents hnd - --- | Helper for writing the cache file. --- --- This determines the type and format of the binary cache file. --- -rewriteCacheFile :: (Binary a, Binary b) - => FileMonitor a b - -> MonitorStateFileSet -> a -> b -> IO () -rewriteCacheFile FileMonitor {fileMonitorCacheFile} fileset key result = - writeFileAtomic fileMonitorCacheFile $ - Binary.encode (fileset, key, result) - --- | Probe the file system to see if any of the monitored files have changed. --- --- It returns Nothing if any file changed, or returns a possibly updated --- file 'MonitorStateFileSet' plus an indicator of whether it actually changed. --- --- We may need to update the cache since there may be changes in the filesystem --- state which don't change any of our affected files. --- --- Consider the glob @{proj1,proj2}\/\*.cabal@. Say we first run and find a --- @proj1@ directory containing @proj1.cabal@ yet no @proj2@. If we later run --- and find @proj2@ was created, yet contains no files matching @*.cabal@ then --- we want to update the cache despite no changes in our relevant file set. --- Specifically, we should add an mtime for this directory so we can avoid --- re-traversing the directory in future runs. --- -probeFileSystem :: FilePath -> MonitorStateFileSet - -> IO (Either FilePath (MonitorStateFileSet, CacheChanged)) -probeFileSystem root (MonitorStateFileSet singlePaths globPaths) = - runChangedM $ do - sequence_ - [ probeMonitorStateFileStatus root file status - | MonitorStateFile _ _ file status <- singlePaths ] - -- The glob monitors can require state changes - globPaths' <- - sequence - [ probeMonitorStateGlob root globPath - | globPath <- globPaths ] - return (MonitorStateFileSet singlePaths globPaths') - - ------------------------------------------------ --- Monad for checking for file system changes --- --- We need to be able to bail out if we detect a change (using ExceptT), --- but if there's no change we need to be able to rebuild the monitor --- state. And we want to optimise that rebuilding by keeping track if --- anything actually changed (using StateT), so that in the typical case --- we can avoid rewriting the state file. - -newtype ChangedM a = ChangedM (StateT CacheChanged (ExceptT FilePath IO) a) - deriving (Functor, Applicative, Monad, MonadIO) - -runChangedM :: ChangedM a -> IO (Either FilePath (a, CacheChanged)) -runChangedM (ChangedM action) = - runExceptT $ State.runStateT action CacheUnchanged - -somethingChanged :: FilePath -> ChangedM a -somethingChanged path = ChangedM $ throwError path - -cacheChanged :: ChangedM () -cacheChanged = ChangedM $ State.put CacheChanged - -mapChangedFile :: (FilePath -> FilePath) -> ChangedM a -> ChangedM a -mapChangedFile adjust (ChangedM a) = - ChangedM (mapStateT (withExceptT adjust) a) - -data CacheChanged = CacheChanged | CacheUnchanged - -whenCacheChanged :: Monad m => CacheChanged -> m () -> m () -whenCacheChanged CacheChanged action = action -whenCacheChanged CacheUnchanged _ = return () - ----------------------- - --- | Probe the file system to see if a single monitored file has changed. --- -probeMonitorStateFileStatus :: FilePath -> FilePath - -> MonitorStateFileStatus - -> ChangedM () -probeMonitorStateFileStatus root file status = - case status of - MonitorStateFileExists -> - probeFileExistence root file - - MonitorStateFileModTime mtime -> - probeFileModificationTime root file mtime - - MonitorStateFileHashed mtime hash -> - probeFileModificationTimeAndHash root file mtime hash - - MonitorStateDirExists -> - probeDirExistence root file - - MonitorStateDirModTime mtime -> - probeFileModificationTime root file mtime - - MonitorStateNonExistent -> - probeFileNonExistence root file - - MonitorStateAlreadyChanged -> - somethingChanged file - - --- | Probe the file system to see if a monitored file glob has changed. --- -probeMonitorStateGlob :: FilePath -- ^ root path - -> MonitorStateGlob - -> ChangedM MonitorStateGlob -probeMonitorStateGlob relroot - (MonitorStateGlob kindfile kinddir globroot glob) = do - root <- liftIO $ getFilePathRootDirectory globroot relroot - case globroot of - FilePathRelative -> - MonitorStateGlob kindfile kinddir globroot <$> - probeMonitorStateGlobRel kindfile kinddir root "." glob - - -- for absolute cases, make the changed file we report absolute too - _ -> - mapChangedFile (root ) $ - MonitorStateGlob kindfile kinddir globroot <$> - probeMonitorStateGlobRel kindfile kinddir root "" glob - -probeMonitorStateGlobRel :: MonitorKindFile -> MonitorKindDir - -> FilePath -- ^ root path - -> FilePath -- ^ path of the directory we are - -- looking in relative to @root@ - -> MonitorStateGlobRel - -> ChangedM MonitorStateGlobRel -probeMonitorStateGlobRel kindfile kinddir root dirName - (MonitorStateGlobDirs glob globPath mtime children) = do - change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime - case change of - Nothing -> do - children' <- sequence - [ do fstate' <- probeMonitorStateGlobRel - kindfile kinddir root - (dirName fname) fstate - return (fname, fstate') - | (fname, fstate) <- children ] - return $! MonitorStateGlobDirs glob globPath mtime children' - - Just mtime' -> do - -- directory modification time changed: - -- a matching subdir may have been added or deleted - matches <- filterM (\entry -> let subdir = root dirName entry - in liftIO $ doesDirectoryExist subdir) - . filter (matchGlob glob) - =<< liftIO (getDirectoryContents (root dirName)) - - children' <- mapM probeMergeResult $ - mergeBy (\(path1,_) path2 -> compare path1 path2) - children - (sort matches) - return $! MonitorStateGlobDirs glob globPath mtime' children' - -- Note that just because the directory has changed, we don't force - -- a cache rewrite with 'cacheChanged' since that has some cost, and - -- all we're saving is scanning the directory. But we do rebuild the - -- cache with the new mtime', so that if the cache is rewritten for - -- some other reason, we'll take advantage of that. - - where - probeMergeResult :: MergeResult (FilePath, MonitorStateGlobRel) FilePath - -> ChangedM (FilePath, MonitorStateGlobRel) - - -- Only in cached (directory deleted) - probeMergeResult (OnlyInLeft (path, fstate)) = do - case allMatchingFiles (dirName path) fstate of - [] -> return (path, fstate) - -- Strictly speaking we should be returning 'CacheChanged' above - -- as we should prune the now-missing 'MonitorStateGlobRel'. However - -- we currently just leave these now-redundant entries in the - -- cache as they cost no IO and keeping them allows us to avoid - -- rewriting the cache. - (file:_) -> somethingChanged file - - -- Only in current filesystem state (directory added) - probeMergeResult (OnlyInRight path) = do - fstate <- liftIO $ buildMonitorStateGlobRel Nothing Map.empty - kindfile kinddir root (dirName path) globPath - case allMatchingFiles (dirName path) fstate of - (file:_) -> somethingChanged file - -- This is the only case where we use 'cacheChanged' because we can - -- have a whole new dir subtree (of unbounded size and cost), so we - -- need to save the state of that new subtree in the cache. - [] -> cacheChanged >> return (path, fstate) - - -- Found in path - probeMergeResult (InBoth (path, fstate) _) = do - fstate' <- probeMonitorStateGlobRel kindfile kinddir - root (dirName path) fstate - return (path, fstate') - - -- | Does a 'MonitorStateGlob' have any relevant files within it? - allMatchingFiles :: FilePath -> MonitorStateGlobRel -> [FilePath] - allMatchingFiles dir (MonitorStateGlobFiles _ _ entries) = - [ dir fname | (fname, _) <- entries ] - allMatchingFiles dir (MonitorStateGlobDirs _ _ _ entries) = - [ res - | (subdir, fstate) <- entries - , res <- allMatchingFiles (dir subdir) fstate ] - allMatchingFiles dir MonitorStateGlobDirTrailing = - [dir] - -probeMonitorStateGlobRel _ _ root dirName - (MonitorStateGlobFiles glob mtime children) = do - change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime - mtime' <- case change of - Nothing -> return mtime - Just mtime' -> do - -- directory modification time changed: - -- a matching file may have been added or deleted - matches <- return . filter (matchGlob glob) - =<< liftIO (getDirectoryContents (root dirName)) - - mapM_ probeMergeResult $ - mergeBy (\(path1,_) path2 -> compare path1 path2) - children - (sort matches) - return mtime' - - -- Check that none of the children have changed - forM_ children $ \(file, status) -> - probeMonitorStateFileStatus root (dirName file) status - - - return (MonitorStateGlobFiles glob mtime' children) - -- Again, we don't force a cache rewite with 'cacheChanged', but we do use - -- the new mtime' if any. - where - probeMergeResult :: MergeResult (FilePath, MonitorStateFileStatus) FilePath - -> ChangedM () - probeMergeResult mr = case mr of - InBoth _ _ -> return () - -- this is just to be able to accurately report which file changed: - OnlyInLeft (path, _) -> somethingChanged (dirName path) - OnlyInRight path -> somethingChanged (dirName path) - -probeMonitorStateGlobRel _ _ _ _ MonitorStateGlobDirTrailing = - return MonitorStateGlobDirTrailing - ------------------------------------------------------------------------------- - --- | Update the input value and the set of files monitored by the --- 'FileMonitor', plus the cached value that may be returned in future. --- --- This takes a snapshot of the state of the monitored files right now, so --- 'checkFileMonitorChanged' will look for file system changes relative to --- this snapshot. --- --- This is typically done once the action has been completed successfully and --- we have the action's result and we know what files it looked at. See --- 'FileMonitor' for a full explanation. --- --- If we do take the snapshot after the action has completed then we have a --- problem. The problem is that files might have changed /while/ the action was --- running but /after/ the action read them. If we take the snapshot after the --- action completes then we will miss these changes. The solution is to record --- a timestamp before beginning execution of the action and then we make the --- conservative assumption that any file that has changed since then has --- already changed, ie the file monitor state for these files will be such that --- 'checkFileMonitorChanged' will report that they have changed. --- --- So if you do use 'updateFileMonitor' after the action (so you can discover --- the files used rather than predicting them in advance) then use --- 'beginUpdateFileMonitor' to get a timestamp and pass that. Alternatively, --- if you take the snapshot in advance of the action, or you're not monitoring --- any files then you can use @Nothing@ for the timestamp parameter. --- -updateFileMonitor - :: (Binary a, Binary b) - => FileMonitor a b -- ^ cache file path - -> FilePath -- ^ root directory - -> Maybe MonitorTimestamp -- ^ timestamp when the update action started - -> [MonitorFilePath] -- ^ files of interest relative to root - -> a -- ^ the current key value - -> b -- ^ the current result value - -> IO () -updateFileMonitor monitor root startTime monitorFiles - cachedKey cachedResult = do - hashcache <- readCacheFileHashes monitor - msfs <- buildMonitorStateFileSet startTime hashcache root monitorFiles - rewriteCacheFile monitor msfs cachedKey cachedResult - --- | A timestamp to help with the problem of file changes during actions. --- See 'updateFileMonitor' for details. --- -newtype MonitorTimestamp = MonitorTimestamp ModTime - --- | Record a timestamp at the beginning of an action, and when the action --- completes call 'updateFileMonitor' passing it the timestamp. --- See 'updateFileMonitor' for details. --- -beginUpdateFileMonitor :: IO MonitorTimestamp -beginUpdateFileMonitor = MonitorTimestamp <$> getCurTime - --- | Take the snapshot of the monitored files. That is, given the --- specification of the set of files we need to monitor, inspect the state --- of the file system now and collect the information we'll need later to --- determine if anything has changed. --- -buildMonitorStateFileSet :: Maybe MonitorTimestamp -- ^ optional: timestamp - -- of the start of the action - -> FileHashCache -- ^ existing file hashes - -> FilePath -- ^ root directory - -> [MonitorFilePath] -- ^ patterns of interest - -- relative to root - -> IO MonitorStateFileSet -buildMonitorStateFileSet mstartTime hashcache root = - go [] [] - where - go :: [MonitorStateFile] -> [MonitorStateGlob] - -> [MonitorFilePath] -> IO MonitorStateFileSet - go !singlePaths !globPaths [] = - return (MonitorStateFileSet (reverse singlePaths) (reverse globPaths)) - - go !singlePaths !globPaths - (MonitorFile kindfile kinddir path : monitors) = do - monitorState <- MonitorStateFile kindfile kinddir path - <$> buildMonitorStateFile mstartTime hashcache - kindfile kinddir root path - go (monitorState : singlePaths) globPaths monitors - - go !singlePaths !globPaths - (MonitorFileGlob kindfile kinddir globPath : monitors) = do - monitorState <- buildMonitorStateGlob mstartTime hashcache - kindfile kinddir root globPath - go singlePaths (monitorState : globPaths) monitors - - -buildMonitorStateFile :: Maybe MonitorTimestamp -- ^ start time of update - -> FileHashCache -- ^ existing file hashes - -> MonitorKindFile -> MonitorKindDir - -> FilePath -- ^ the root directory - -> FilePath - -> IO MonitorStateFileStatus -buildMonitorStateFile mstartTime hashcache kindfile kinddir root path = do - let abspath = root path - isFile <- doesFileExist abspath - isDir <- doesDirectoryExist abspath - case (isFile, kindfile, isDir, kinddir) of - (_, FileNotExists, _, DirNotExists) -> - -- we don't need to care if it exists now, since we check at probe time - return MonitorStateNonExistent - - (False, _, False, _) -> - return MonitorStateAlreadyChanged - - (True, FileExists, _, _) -> - return MonitorStateFileExists - - (True, FileModTime, _, _) -> - handleIOException MonitorStateAlreadyChanged $ do - mtime <- getModTime abspath - if changedDuringUpdate mstartTime mtime - then return MonitorStateAlreadyChanged - else return (MonitorStateFileModTime mtime) - - (True, FileHashed, _, _) -> - handleIOException MonitorStateAlreadyChanged $ do - mtime <- getModTime abspath - if changedDuringUpdate mstartTime mtime - then return MonitorStateAlreadyChanged - else do hash <- getFileHash hashcache abspath abspath mtime - return (MonitorStateFileHashed mtime hash) - - (_, _, True, DirExists) -> - return MonitorStateDirExists - - (_, _, True, DirModTime) -> - handleIOException MonitorStateAlreadyChanged $ do - mtime <- getModTime abspath - if changedDuringUpdate mstartTime mtime - then return MonitorStateAlreadyChanged - else return (MonitorStateDirModTime mtime) - - (False, _, True, DirNotExists) -> return MonitorStateAlreadyChanged - (True, FileNotExists, False, _) -> return MonitorStateAlreadyChanged - --- | If we have a timestamp for the beginning of the update, then any file --- mtime later than this means that it changed during the update and we ought --- to consider the file as already changed. --- -changedDuringUpdate :: Maybe MonitorTimestamp -> ModTime -> Bool -changedDuringUpdate (Just (MonitorTimestamp startTime)) mtime - = mtime > startTime -changedDuringUpdate _ _ = False - --- | Much like 'buildMonitorStateFileSet' but for the somewhat complicated case --- of a file glob. --- --- This gets used both by 'buildMonitorStateFileSet' when we're taking the --- file system snapshot, but also by 'probeGlobStatus' as part of checking --- the monitored (globed) files for changes when we find a whole new subtree. --- -buildMonitorStateGlob :: Maybe MonitorTimestamp -- ^ start time of update - -> FileHashCache -- ^ existing file hashes - -> MonitorKindFile -> MonitorKindDir - -> FilePath -- ^ the root directory - -> FilePathGlob -- ^ the matching glob - -> IO MonitorStateGlob -buildMonitorStateGlob mstartTime hashcache kindfile kinddir relroot - (FilePathGlob globroot globPath) = do - root <- liftIO $ getFilePathRootDirectory globroot relroot - MonitorStateGlob kindfile kinddir globroot <$> - buildMonitorStateGlobRel - mstartTime hashcache kindfile kinddir root "." globPath - -buildMonitorStateGlobRel :: Maybe MonitorTimestamp -- ^ start time of update - -> FileHashCache -- ^ existing file hashes - -> MonitorKindFile -> MonitorKindDir - -> FilePath -- ^ the root directory - -> FilePath -- ^ directory we are examining - -- relative to the root - -> FilePathGlobRel -- ^ the matching glob - -> IO MonitorStateGlobRel -buildMonitorStateGlobRel mstartTime hashcache kindfile kinddir root - dir globPath = do - let absdir = root dir - dirEntries <- getDirectoryContents absdir - dirMTime <- getModTime absdir - case globPath of - GlobDir glob globPath' -> do - subdirs <- filterM (\subdir -> doesDirectoryExist (absdir subdir)) - $ filter (matchGlob glob) dirEntries - subdirStates <- - forM (sort subdirs) $ \subdir -> do - fstate <- buildMonitorStateGlobRel - mstartTime hashcache kindfile kinddir root - (dir subdir) globPath' - return (subdir, fstate) - return $! MonitorStateGlobDirs glob globPath' dirMTime subdirStates - - GlobFile glob -> do - let files = filter (matchGlob glob) dirEntries - filesStates <- - forM (sort files) $ \file -> do - fstate <- buildMonitorStateFile - mstartTime hashcache kindfile kinddir root - (dir file) - return (file, fstate) - return $! MonitorStateGlobFiles glob dirMTime filesStates - - GlobDirTrailing -> - return MonitorStateGlobDirTrailing - - --- | We really want to avoid re-hashing files all the time. We already make --- the assumption that if a file mtime has not changed then we don't need to --- bother checking if the content hash has changed. We can apply the same --- assumption when updating the file monitor state. In the typical case of --- updating a file monitor the set of files is the same or largely the same so --- we can grab the previously known content hashes with their corresponding --- mtimes. --- -type FileHashCache = Map FilePath (ModTime, Hash) - --- | We declare it a cache hit if the mtime of a file is the same as before. --- -lookupFileHashCache :: FileHashCache -> FilePath -> ModTime -> Maybe Hash -lookupFileHashCache hashcache file mtime = do - (mtime', hash) <- Map.lookup file hashcache - guard (mtime' == mtime) - return hash - --- | Either get it from the cache or go read the file -getFileHash :: FileHashCache -> FilePath -> FilePath -> ModTime -> IO Hash -getFileHash hashcache relfile absfile mtime = - case lookupFileHashCache hashcache relfile mtime of - Just hash -> return hash - Nothing -> readFileHash absfile - --- | Build a 'FileHashCache' from the previous 'MonitorStateFileSet'. While --- in principle we could preserve the structure of the previous state, given --- that the set of files to monitor can change then it's simpler just to throw --- away the structure and use a finite map. --- -readCacheFileHashes :: (Binary a, Binary b) - => FileMonitor a b -> IO FileHashCache -readCacheFileHashes monitor = - handleDoesNotExist Map.empty $ - handleErrorCall Map.empty $ do - res <- readCacheFile monitor - case res of - Left _ -> return Map.empty - Right (msfs, _, _) -> return (mkFileHashCache msfs) - where - mkFileHashCache :: MonitorStateFileSet -> FileHashCache - mkFileHashCache (MonitorStateFileSet singlePaths globPaths) = - collectAllFileHashes singlePaths - `Map.union` collectAllGlobHashes globPaths - - collectAllFileHashes singlePaths = - Map.fromList [ (fpath, (mtime, hash)) - | MonitorStateFile _ _ fpath - (MonitorStateFileHashed mtime hash) <- singlePaths ] - - collectAllGlobHashes globPaths = - Map.fromList [ (fpath, (mtime, hash)) - | MonitorStateGlob _ _ _ gstate <- globPaths - , (fpath, (mtime, hash)) <- collectGlobHashes "" gstate ] - - collectGlobHashes dir (MonitorStateGlobDirs _ _ _ entries) = - [ res - | (subdir, fstate) <- entries - , res <- collectGlobHashes (dir subdir) fstate ] - - collectGlobHashes dir (MonitorStateGlobFiles _ _ entries) = - [ (dir fname, (mtime, hash)) - | (fname, MonitorStateFileHashed mtime hash) <- entries ] - - collectGlobHashes _dir MonitorStateGlobDirTrailing = - [] - - ------------------------------------------------------------------------------- --- Utils --- - --- | Within the @root@ directory, check if @file@ has its 'ModTime' is --- the same as @mtime@, short-circuiting if it is different. -probeFileModificationTime :: FilePath -> FilePath -> ModTime -> ChangedM () -probeFileModificationTime root file mtime = do - unchanged <- liftIO $ checkModificationTimeUnchanged root file mtime - unless unchanged (somethingChanged file) - --- | Within the @root@ directory, check if @file@ has its 'ModTime' and --- 'Hash' is the same as @mtime@ and @hash@, short-circuiting if it is --- different. -probeFileModificationTimeAndHash :: FilePath -> FilePath -> ModTime -> Hash - -> ChangedM () -probeFileModificationTimeAndHash root file mtime hash = do - unchanged <- liftIO $ - checkFileModificationTimeAndHashUnchanged root file mtime hash - unless unchanged (somethingChanged file) - --- | Within the @root@ directory, check if @file@ still exists as a file. --- If it *does not* exist, short-circuit. -probeFileExistence :: FilePath -> FilePath -> ChangedM () -probeFileExistence root file = do - existsFile <- liftIO $ doesFileExist (root file) - unless existsFile (somethingChanged file) - --- | Within the @root@ directory, check if @dir@ still exists. --- If it *does not* exist, short-circuit. -probeDirExistence :: FilePath -> FilePath -> ChangedM () -probeDirExistence root dir = do - existsDir <- liftIO $ doesDirectoryExist (root dir) - unless existsDir (somethingChanged dir) - --- | Within the @root@ directory, check if @file@ still does not exist. --- If it *does* exist, short-circuit. -probeFileNonExistence :: FilePath -> FilePath -> ChangedM () -probeFileNonExistence root file = do - existsFile <- liftIO $ doesFileExist (root file) - existsDir <- liftIO $ doesDirectoryExist (root file) - when (existsFile || existsDir) (somethingChanged file) - --- | Returns @True@ if, inside the @root@ directory, @file@ has the same --- 'ModTime' as @mtime@. -checkModificationTimeUnchanged :: FilePath -> FilePath - -> ModTime -> IO Bool -checkModificationTimeUnchanged root file mtime = - handleIOException False $ do - mtime' <- getModTime (root file) - return (mtime == mtime') - --- | Returns @True@ if, inside the @root@ directory, @file@ has the --- same 'ModTime' and 'Hash' as @mtime and @chash@. -checkFileModificationTimeAndHashUnchanged :: FilePath -> FilePath - -> ModTime -> Hash -> IO Bool -checkFileModificationTimeAndHashUnchanged root file mtime chash = - handleIOException False $ do - mtime' <- getModTime (root file) - if mtime == mtime' - then return True - else do - chash' <- readFileHash (root file) - return (chash == chash') - --- | Read a non-cryptographic hash of a @file@. -readFileHash :: FilePath -> IO Hash -readFileHash file = - withBinaryFile file ReadMode $ \hnd -> - evaluate . Hashable.hash =<< BS.hGetContents hnd - --- | Given a directory @dir@, return @Nothing@ if its 'ModTime' --- is the same as @mtime@, and the new 'ModTime' if it is not. -checkDirectoryModificationTime :: FilePath -> ModTime -> IO (Maybe ModTime) -checkDirectoryModificationTime dir mtime = - handleIOException Nothing $ do - mtime' <- getModTime dir - if mtime == mtime' - then return Nothing - else return (Just mtime') - --- | Run an IO computation, returning @e@ if there is an 'error' --- call. ('ErrorCall') -handleErrorCall :: a -> IO a -> IO a -handleErrorCall e = - handle (\(ErrorCall _) -> return e) - --- | Run an IO computation, returning @e@ if there is any 'IOException'. --- --- This policy is OK in the file monitor code because it just causes the --- monitor to report that something changed, and then code reacting to that --- will normally encounter the same IO exception when it re-runs the action --- that uses the file. --- -handleIOException :: a -> IO a -> IO a -handleIOException e = - handle (anyIOException e) - where - anyIOException :: a -> IOException -> IO a - anyIOException x _ = return x - - ------------------------------------------------------------------------------- --- Instances --- - -instance Binary MonitorStateFileSet where - put (MonitorStateFileSet singlePaths globPaths) = do - put (1 :: Int) -- version - put singlePaths - put globPaths - get = do - ver <- get - if ver == (1 :: Int) - then do singlePaths <- get - globPaths <- get - return $! MonitorStateFileSet singlePaths globPaths - else fail "MonitorStateFileSet: wrong version" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Freeze.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Freeze.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Freeze.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Freeze.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,269 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Freeze --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- The cabal freeze command ------------------------------------------------------------------------------ -module Distribution.Client.Freeze ( - freeze, getFreezePkgs - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.Config ( SavedConfig(..) ) -import Distribution.Client.Types -import Distribution.Client.Targets -import Distribution.Client.Dependency -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import Distribution.Client.SolverInstallPlan - ( SolverInstallPlan, SolverPlanPackage ) -import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan -import Distribution.Client.Setup - ( GlobalFlags(..), FreezeFlags(..), ConfigExFlags(..) - , RepoContext(..) ) -import Distribution.Client.Sandbox.PackageEnvironment - ( loadUserConfig, pkgEnvSavedConfig, showPackageEnvironment, - userPackageEnvironmentFile ) -import Distribution.Client.Sandbox.Types - ( SandboxPackageInfo(..) ) - -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PkgConfigDb -import Distribution.Solver.Types.SolverId - -import Distribution.Package - ( Package, packageId, packageName, packageVersion ) -import Distribution.Simple.Compiler - ( Compiler, compilerInfo, PackageDBStack ) -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.Simple.Program - ( ProgramDb ) -import Distribution.Simple.Setup - ( fromFlag, fromFlagOrDefault, flagToMaybe ) -import Distribution.Simple.Utils - ( die', notice, debug, writeFileAtomic ) -import Distribution.System - ( Platform ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity ) - -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 -import Distribution.Version - ( thisVersion ) - --- ------------------------------------------------------------ --- * The freeze command --- ------------------------------------------------------------ - --- | Freeze all of the dependencies by writing a constraints section --- constraining each dependency to an exact version. --- -freeze :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramDb - -> Maybe SandboxPackageInfo - -> GlobalFlags - -> FreezeFlags - -> IO () -freeze verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo - globalFlags freezeFlags = do - - pkgs <- getFreezePkgs - verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo - globalFlags freezeFlags - - if null pkgs - then notice verbosity $ "No packages to be frozen. " - ++ "As this package has no dependencies." - else if dryRun - then notice verbosity $ unlines $ - "The following packages would be frozen:" - : formatPkgs pkgs - - else freezePackages verbosity globalFlags pkgs - - where - dryRun = fromFlag (freezeDryRun freezeFlags) - --- | Get the list of packages whose versions would be frozen by the @freeze@ --- command. -getFreezePkgs :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramDb - -> Maybe SandboxPackageInfo - -> GlobalFlags - -> FreezeFlags - -> IO [SolverPlanPackage] -getFreezePkgs verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo - globalFlags freezeFlags = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb - sourcePkgDb <- getSourcePackages verbosity repoCtxt - pkgConfigDb <- readPkgConfigDb verbosity progdb - - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - (packageIndex sourcePkgDb) - [UserTargetLocalDir "."] - - sanityCheck pkgSpecifiers - planPackages - verbosity comp platform mSandboxPkgInfo freezeFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers - where - sanityCheck pkgSpecifiers = do - when (not . null $ [n | n@(NamedPackage _ _) <- pkgSpecifiers]) $ - die' verbosity $ "internal error: 'resolveUserTargets' returned " - ++ "unexpected named package specifiers!" - when (length pkgSpecifiers /= 1) $ - die' verbosity $ "internal error: 'resolveUserTargets' returned " - ++ "unexpected source package specifiers!" - -planPackages :: Verbosity - -> Compiler - -> Platform - -> Maybe SandboxPackageInfo - -> FreezeFlags - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> [PackageSpecifier UnresolvedSourcePackage] - -> IO [SolverPlanPackage] -planPackages verbosity comp platform mSandboxPkgInfo freezeFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = do - - solver <- chooseSolver verbosity - (fromFlag (freezeSolver freezeFlags)) (compilerInfo comp) - notice verbosity "Resolving dependencies..." - - installPlan <- foldProgress logMsg (die' verbosity) return $ - resolveDependencies - platform (compilerInfo comp) pkgConfigDb - solver - resolverParams - - return $ pruneInstallPlan installPlan pkgSpecifiers - - where - resolverParams = - - setMaxBackjumps (if maxBackjumps < 0 then Nothing - else Just maxBackjumps) - - . setIndependentGoals independentGoals - - . setReorderGoals reorderGoals - - . setCountConflicts countConflicts - - . setShadowPkgs shadowPkgs - - . setStrongFlags strongFlags - - . setAllowBootLibInstalls allowBootLibInstalls - - . setSolverVerbosity verbosity - - . addConstraints - [ let pkg = pkgSpecifierTarget pkgSpecifier - pc = PackageConstraint (scopeToplevel pkg) - (PackagePropertyStanzas stanzas) - in LabeledPackageConstraint pc ConstraintSourceFreeze - | pkgSpecifier <- pkgSpecifiers ] - - . maybe id applySandboxInstallPolicy mSandboxPkgInfo - - $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers - - logMsg message rest = debug verbosity message >> rest - - stanzas = [ TestStanzas | testsEnabled ] - ++ [ BenchStanzas | benchmarksEnabled ] - testsEnabled = fromFlagOrDefault False $ freezeTests freezeFlags - benchmarksEnabled = fromFlagOrDefault False $ freezeBenchmarks freezeFlags - - reorderGoals = fromFlag (freezeReorderGoals freezeFlags) - countConflicts = fromFlag (freezeCountConflicts freezeFlags) - independentGoals = fromFlag (freezeIndependentGoals freezeFlags) - shadowPkgs = fromFlag (freezeShadowPkgs freezeFlags) - strongFlags = fromFlag (freezeStrongFlags freezeFlags) - maxBackjumps = fromFlag (freezeMaxBackjumps freezeFlags) - allowBootLibInstalls = fromFlag (freezeAllowBootLibInstalls freezeFlags) - - --- | Remove all unneeded packages from an install plan. --- --- A package is unneeded if it is either --- --- 1) the package that we are freezing, or --- --- 2) not a dependency (directly or transitively) of the package we are --- freezing. This is useful for removing previously installed packages --- which are no longer required from the install plan. --- --- Invariant: @pkgSpecifiers@ must refer to packages which are not --- 'PreExisting' in the 'SolverInstallPlan'. -pruneInstallPlan :: SolverInstallPlan - -> [PackageSpecifier UnresolvedSourcePackage] - -> [SolverPlanPackage] -pruneInstallPlan installPlan pkgSpecifiers = - removeSelf pkgIds $ - SolverInstallPlan.dependencyClosure installPlan pkgIds - where - pkgIds = [ PlannedId (packageId pkg) - | SpecificSourcePackage pkg <- pkgSpecifiers ] - removeSelf [thisPkg] = filter (\pp -> packageId pp /= packageId thisPkg) - removeSelf _ = error $ "internal error: 'pruneInstallPlan' given " - ++ "unexpected package specifiers!" - - -freezePackages :: Package pkg => Verbosity -> GlobalFlags -> [pkg] -> IO () -freezePackages verbosity globalFlags pkgs = do - - pkgEnv <- fmap (createPkgEnv . addFrozenConstraints) $ - loadUserConfig verbosity "" - (flagToMaybe . globalConstraintsFile $ globalFlags) - writeFileAtomic userPackageEnvironmentFile $ showPkgEnv pkgEnv - where - addFrozenConstraints config = - config { - savedConfigureExFlags = (savedConfigureExFlags config) { - configExConstraints = map constraint pkgs - } - } - constraint pkg = - (pkgIdToConstraint $ packageId pkg - ,ConstraintSourceUserConfig userPackageEnvironmentFile) - where - pkgIdToConstraint pkgId = - UserConstraint (UserQualified UserQualToplevel (packageName pkgId)) - (PackagePropertyVersion $ thisVersion (packageVersion pkgId)) - createPkgEnv config = mempty { pkgEnvSavedConfig = config } - showPkgEnv = BS.Char8.pack . showPackageEnvironment - - -formatPkgs :: Package pkg => [pkg] -> [String] -formatPkgs = map $ showPkg . packageId - where - showPkg pid = name pid ++ " == " ++ version pid - name = display . packageName - version = display . packageVersion diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/GenBounds.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/GenBounds.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/GenBounds.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/GenBounds.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,169 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.GenBounds --- Copyright : (c) Doug Beardsley 2015 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- The cabal gen-bounds command for generating PVP-compliant version bounds. ------------------------------------------------------------------------------ -module Distribution.Client.GenBounds ( - genBounds - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.Init - ( incVersion ) -import Distribution.Client.Freeze - ( getFreezePkgs ) -import Distribution.Client.Sandbox.Types - ( SandboxPackageInfo(..) ) -import Distribution.Client.Setup - ( GlobalFlags(..), FreezeFlags(..), RepoContext ) -import Distribution.Package - ( Package(..), unPackageName, packageName, packageVersion ) -import Distribution.PackageDescription - ( enabledBuildDepends ) -import Distribution.PackageDescription.Configuration - ( finalizePD ) -import Distribution.PackageDescription.Parsec - ( readGenericPackageDescription ) -import Distribution.Types.ComponentRequestedSpec - ( defaultComponentRequestedSpec ) -import Distribution.Types.Dependency -import Distribution.Simple.Compiler - ( Compiler, PackageDBStack, compilerInfo ) -import Distribution.Simple.Program - ( ProgramDb ) -import Distribution.Simple.Utils - ( tryFindPackageDesc ) -import Distribution.System - ( Platform ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Version - ( Version, alterVersion - , LowerBound(..), UpperBound(..), VersionRange(..), asVersionIntervals - , orLaterVersion, earlierVersion, intersectVersionRanges ) -import System.Directory - ( getCurrentDirectory ) - --- | Does this version range have an upper bound? -hasUpperBound :: VersionRange -> Bool -hasUpperBound vr = - case asVersionIntervals vr of - [] -> False - is -> if snd (last is) == NoUpperBound then False else True - --- | Given a version, return an API-compatible (according to PVP) version range. --- --- Example: @0.4.1.2@ produces the version range @>= 0.4.1 && < 0.5@. --- --- This version is slightly different than the one in --- 'Distribution.Client.Init'. This one uses a.b.c as the lower bound because --- the user could be using a new function introduced in a.b.c which would make --- ">= a.b" incorrect. -pvpize :: Version -> VersionRange -pvpize v = orLaterVersion (vn 3) - `intersectVersionRanges` - earlierVersion (incVersion 1 (vn 2)) - where - vn n = alterVersion (take n) v - --- | Show the PVP-mandated version range for this package. The @padTo@ parameter --- specifies the width of the package name column. -showBounds :: Package pkg => Int -> pkg -> String -showBounds padTo p = unwords $ - (padAfter padTo $ unPackageName $ packageName p) : - map showInterval (asVersionIntervals $ pvpize $ packageVersion p) - where - padAfter :: Int -> String -> String - padAfter n str = str ++ replicate (n - length str) ' ' - - showInterval :: (LowerBound, UpperBound) -> String - showInterval (LowerBound _ _, NoUpperBound) = - error "Error: expected upper bound...this should never happen!" - showInterval (LowerBound l _, UpperBound u _) = - unwords [">=", display l, "&& <", display u] - --- | Entry point for the @gen-bounds@ command. -genBounds - :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramDb - -> Maybe SandboxPackageInfo - -> GlobalFlags - -> FreezeFlags - -> IO () -genBounds verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo - globalFlags freezeFlags = do - - let cinfo = compilerInfo comp - - cwd <- getCurrentDirectory - path <- tryFindPackageDesc cwd - gpd <- readGenericPackageDescription verbosity path - -- NB: We don't enable tests or benchmarks, since often they - -- don't really have useful bounds. - let epd = finalizePD mempty defaultComponentRequestedSpec - (const True) platform cinfo [] gpd - case epd of - Left _ -> putStrLn "finalizePD failed" - Right (pd,_) -> do - let needBounds = filter (not . hasUpperBound . depVersion) $ - enabledBuildDepends pd defaultComponentRequestedSpec - - if (null needBounds) - then putStrLn - "Congratulations, all your dependencies have upper bounds!" - else go needBounds - where - go needBounds = do - pkgs <- getFreezePkgs - verbosity packageDBs repoCtxt comp platform progdb - mSandboxPkgInfo globalFlags freezeFlags - - putStrLn boundsNeededMsg - - let isNeeded pkg = unPackageName (packageName pkg) - `elem` map depName needBounds - let thePkgs = filter isNeeded pkgs - - let padTo = maximum $ map (length . unPackageName . packageName) pkgs - traverse_ (putStrLn . (++",") . showBounds padTo) thePkgs - - depName :: Dependency -> String - depName (Dependency pn _) = unPackageName pn - - depVersion :: Dependency -> VersionRange - depVersion (Dependency _ vr) = vr - --- | The message printed when some dependencies are found to be lacking proper --- PVP-mandated bounds. -boundsNeededMsg :: String -boundsNeededMsg = unlines - [ "" - , "The following packages need bounds and here is a suggested starting point." - , "You can copy and paste this into the build-depends section in your .cabal" - , "file and it should work (with the appropriate removal of commas)." - , "" - , "Note that version bounds are a statement that you've successfully built and" - , "tested your package and expect it to work with any of the specified package" - , "versions (PROVIDED that those packages continue to conform with the PVP)." - , "Therefore, the version bounds generated here are the most conservative" - , "based on the versions that you are currently building with. If you know" - , "your package will work with versions outside the ranges generated here," - , "feel free to widen them." - , "" - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Get.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Get.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Get.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Get.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,299 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Get --- Copyright : (c) Andrea Vezzosi 2008 --- Duncan Coutts 2011 --- John Millikin 2012 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- The 'cabal get' command. ------------------------------------------------------------------------------ - -module Distribution.Client.Get ( - get, - - -- * Cloning 'SourceRepo's - -- | Mainly exported for testing purposes - clonePackagesFromSourceRepo, - ClonePackageException(..), - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude hiding (get) - -import Distribution.Package - ( PackageId, packageId, packageName ) -import Distribution.Simple.Setup - ( Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe ) -import Distribution.Simple.Utils - ( notice, die', info, writeFileAtomic ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Text (display) -import qualified Distribution.PackageDescription as PD -import Distribution.Simple.Program - ( programName ) - -import Distribution.Client.Setup - ( GlobalFlags(..), GetFlags(..), RepoContext(..) ) -import Distribution.Client.Types -import Distribution.Client.Targets -import Distribution.Client.Dependency -import Distribution.Client.VCS -import Distribution.Client.FetchUtils -import qualified Distribution.Client.Tar as Tar (extractTarGzFile) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackagesAtIndexState ) -import Distribution.Solver.Types.SourcePackage - -import Control.Exception - ( Exception(..), catch, throwIO ) -import Control.Monad - ( mapM, forM_, mapM_ ) -import qualified Data.Map as Map -import System.Directory - ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist ) -import System.Exit - ( ExitCode(..) ) -import System.FilePath - ( (), (<.>), addTrailingPathSeparator ) - - --- | Entry point for the 'cabal get' command. -get :: Verbosity - -> RepoContext - -> GlobalFlags - -> GetFlags - -> [UserTarget] - -> IO () -get verbosity _ _ _ [] = - notice verbosity "No packages requested. Nothing to do." - -get verbosity repoCtxt globalFlags getFlags userTargets = do - let useSourceRepo = case getSourceRepository getFlags of - NoFlag -> False - _ -> True - - unless useSourceRepo $ - mapM_ (checkTarget verbosity) userTargets - - let idxState = flagToMaybe $ getIndexState getFlags - - sourcePkgDb <- getSourcePackagesAtIndexState verbosity repoCtxt idxState - - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - (packageIndex sourcePkgDb) - userTargets - - pkgs <- either (die' verbosity . unlines . map show) return $ - resolveWithoutDependencies - (resolverParams sourcePkgDb pkgSpecifiers) - - unless (null prefix) $ - createDirectoryIfMissing True prefix - - if useSourceRepo - then clone pkgs - else unpack pkgs - - where - resolverParams sourcePkgDb pkgSpecifiers = - --TODO: add command-line constraint and preference args for unpack - standardInstallPolicy mempty sourcePkgDb pkgSpecifiers - - prefix = fromFlagOrDefault "" (getDestDir getFlags) - - clone :: [UnresolvedSourcePackage] -> IO () - clone = clonePackagesFromSourceRepo verbosity prefix kind - . map (\pkg -> (packageId pkg, packageSourceRepos pkg)) - where - kind = fromFlag . getSourceRepository $ getFlags - packageSourceRepos :: SourcePackage loc -> [SourceRepo] - packageSourceRepos = PD.sourceRepos - . PD.packageDescription - . packageDescription - - unpack :: [UnresolvedSourcePackage] -> IO () - unpack pkgs = do - forM_ pkgs $ \pkg -> do - location <- fetchPackage verbosity repoCtxt (packageSource pkg) - let pkgid = packageId pkg - descOverride | usePristine = Nothing - | otherwise = packageDescrOverride pkg - case location of - LocalTarballPackage tarballPath -> - unpackPackage verbosity prefix pkgid descOverride tarballPath - - RemoteTarballPackage _tarballURL tarballPath -> - unpackPackage verbosity prefix pkgid descOverride tarballPath - - RepoTarballPackage _repo _pkgid tarballPath -> - unpackPackage verbosity prefix pkgid descOverride tarballPath - - RemoteSourceRepoPackage _repo _ -> - die' verbosity $ "The 'get' command does no yet support targets " - ++ "that are remote source repositories." - - LocalUnpackedPackage _ -> - error "Distribution.Client.Get.unpack: the impossible happened." - where - usePristine = fromFlagOrDefault False (getPristine getFlags) - -checkTarget :: Verbosity -> UserTarget -> IO () -checkTarget verbosity target = case target of - UserTargetLocalDir dir -> die' verbosity (notTarball dir) - UserTargetLocalCabalFile file -> die' verbosity (notTarball file) - _ -> return () - where - notTarball t = - "The 'get' command is for tarball packages. " - ++ "The target '" ++ t ++ "' is not a tarball." - --- ------------------------------------------------------------ --- * Unpacking the source tarball --- ------------------------------------------------------------ - -unpackPackage :: Verbosity -> FilePath -> PackageId - -> PackageDescriptionOverride - -> FilePath -> IO () -unpackPackage verbosity prefix pkgid descOverride pkgPath = do - let pkgdirname = display pkgid - pkgdir = prefix pkgdirname - pkgdir' = addTrailingPathSeparator pkgdir - existsDir <- doesDirectoryExist pkgdir - when existsDir $ die' verbosity $ - "The directory \"" ++ pkgdir' ++ "\" already exists, not unpacking." - existsFile <- doesFileExist pkgdir - when existsFile $ die' verbosity $ - "A file \"" ++ pkgdir ++ "\" is in the way, not unpacking." - notice verbosity $ "Unpacking to " ++ pkgdir' - Tar.extractTarGzFile prefix pkgdirname pkgPath - - case descOverride of - Nothing -> return () - Just pkgtxt -> do - let descFilePath = pkgdir display (packageName pkgid) <.> "cabal" - info verbosity $ - "Updating " ++ descFilePath - ++ " with the latest revision from the index." - writeFileAtomic descFilePath pkgtxt - - --- ------------------------------------------------------------ --- * Cloning packages from their declared source repositories --- ------------------------------------------------------------ - - -data ClonePackageException = - ClonePackageNoSourceRepos PackageId - | ClonePackageNoSourceReposOfKind PackageId (Maybe RepoKind) - | ClonePackageNoRepoType PackageId SourceRepo - | ClonePackageUnsupportedRepoType PackageId SourceRepo RepoType - | ClonePackageNoRepoLocation PackageId SourceRepo - | ClonePackageDestinationExists PackageId FilePath Bool - | ClonePackageFailedWithExitCode PackageId SourceRepo String ExitCode - deriving (Show, Eq) - -instance Exception ClonePackageException where - displayException (ClonePackageNoSourceRepos pkgid) = - "Cannot fetch a source repository for package " ++ display pkgid - ++ ". The package does not specify any source repositories." - - displayException (ClonePackageNoSourceReposOfKind pkgid repoKind) = - "Cannot fetch a source repository for package " ++ display pkgid - ++ ". The package does not specify a source repository of the requested " - ++ "kind" ++ maybe "." (\k -> " (kind " ++ display k ++ ").") repoKind - - displayException (ClonePackageNoRepoType pkgid _repo) = - "Cannot fetch the source repository for package " ++ display pkgid - ++ ". The package's description specifies a source repository but does " - ++ "not specify the repository 'type' field (e.g. git, darcs or hg)." - - displayException (ClonePackageUnsupportedRepoType pkgid _ repoType) = - "Cannot fetch the source repository for package " ++ display pkgid - ++ ". The repository type '" ++ display repoType - ++ "' is not yet supported." - - displayException (ClonePackageNoRepoLocation pkgid _repo) = - "Cannot fetch the source repository for package " ++ display pkgid - ++ ". The package's description specifies a source repository but does " - ++ "not specify the repository 'location' field (i.e. the URL)." - - displayException (ClonePackageDestinationExists pkgid dest isdir) = - "Not fetching the source repository for package " ++ display pkgid ++ ". " - ++ if isdir then "The destination directory " ++ dest ++ " already exists." - else "A file " ++ dest ++ " is in the way." - - displayException (ClonePackageFailedWithExitCode - pkgid repo vcsprogname exitcode) = - "Failed to fetch the source repository for package " ++ display pkgid - ++ maybe "" (", repository location " ++) (PD.repoLocation repo) ++ " (" - ++ vcsprogname ++ " failed with " ++ show exitcode ++ ")." - - --- | Given a bunch of package ids and their corresponding available --- 'SourceRepo's, pick a single 'SourceRepo' for each one and clone into --- new subdirs of the given directory. --- -clonePackagesFromSourceRepo :: Verbosity - -> FilePath -- ^ destination dir prefix - -> Maybe RepoKind -- ^ preferred 'RepoKind' - -> [(PackageId, [SourceRepo])] - -- ^ the packages and their - -- available 'SourceRepo's - -> IO () -clonePackagesFromSourceRepo verbosity destDirPrefix - preferredRepoKind pkgrepos = do - - -- Do a bunch of checks and collect the required info - pkgrepos' <- mapM preCloneChecks pkgrepos - - -- Configure the VCS drivers for all the repository types we may need - vcss <- configureVCSs verbosity $ - Map.fromList [ (vcsRepoType vcs, vcs) - | (_, _, vcs, _) <- pkgrepos' ] - - -- Now execute all the required commands for each repo - sequence_ - [ cloneSourceRepo verbosity vcs' repo destDir - `catch` \exitcode -> - throwIO (ClonePackageFailedWithExitCode - pkgid repo (programName (vcsProgram vcs)) exitcode) - | (pkgid, repo, vcs, destDir) <- pkgrepos' - , let Just vcs' = Map.lookup (vcsRepoType vcs) vcss - ] - - where - preCloneChecks :: (PackageId, [SourceRepo]) - -> IO (PackageId, SourceRepo, VCS Program, FilePath) - preCloneChecks (pkgid, repos) = do - repo <- case selectPackageSourceRepo preferredRepoKind repos of - Just repo -> return repo - Nothing | null repos -> throwIO (ClonePackageNoSourceRepos pkgid) - Nothing -> throwIO (ClonePackageNoSourceReposOfKind - pkgid preferredRepoKind) - - vcs <- case validateSourceRepo repo of - Right (_, _, _, vcs) -> return vcs - Left SourceRepoRepoTypeUnspecified -> - throwIO (ClonePackageNoRepoType pkgid repo) - - Left (SourceRepoRepoTypeUnsupported repoType) -> - throwIO (ClonePackageUnsupportedRepoType pkgid repo repoType) - - Left SourceRepoLocationUnspecified -> - throwIO (ClonePackageNoRepoLocation pkgid repo) - - let destDir = destDirPrefix display (packageName pkgid) - destDirExists <- doesDirectoryExist destDir - destFileExists <- doesFileExist destDir - when (destDirExists || destFileExists) $ - throwIO (ClonePackageDestinationExists pkgid destDir destDirExists) - - return (pkgid, repo, vcs, destDir) - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/GlobalFlags.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/GlobalFlags.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/GlobalFlags.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/GlobalFlags.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,283 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE ExistentialQuantification #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE RecordWildCards #-} - -module Distribution.Client.GlobalFlags ( - GlobalFlags(..) - , defaultGlobalFlags - , RepoContext(..) - , withRepoContext - , withRepoContext' - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.Types - ( Repo(..), RemoteRepo(..) ) -import Distribution.Simple.Setup - ( Flag(..), fromFlag, flagToMaybe ) -import Distribution.Utils.NubList - ( NubList, fromNubList ) -import Distribution.Client.HttpUtils - ( HttpTransport, configureTransport ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Simple.Utils - ( info ) - -import Control.Concurrent - ( MVar, newMVar, modifyMVar ) -import Control.Exception - ( throwIO ) -import System.FilePath - ( () ) -import Network.URI - ( URI, uriScheme, uriPath ) -import qualified Data.Map as Map - -import qualified Hackage.Security.Client as Sec -import qualified Hackage.Security.Util.Path as Sec -import qualified Hackage.Security.Util.Pretty as Sec -import qualified Hackage.Security.Client.Repository.Cache as Sec -import qualified Hackage.Security.Client.Repository.Local as Sec.Local -import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote -import qualified Distribution.Client.Security.HTTP as Sec.HTTP -import qualified Distribution.Client.Security.DNS as Sec.DNS - --- ------------------------------------------------------------ --- * Global flags --- ------------------------------------------------------------ - --- | Flags that apply at the top level, not to any sub-command. -data GlobalFlags = GlobalFlags { - globalVersion :: Flag Bool, - globalNumericVersion :: Flag Bool, - globalConfigFile :: Flag FilePath, - globalSandboxConfigFile :: Flag FilePath, - globalConstraintsFile :: Flag FilePath, - globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. - globalCacheDir :: Flag FilePath, - globalLocalRepos :: NubList FilePath, - globalLogsDir :: Flag FilePath, - globalWorldFile :: Flag FilePath, - globalRequireSandbox :: Flag Bool, - globalIgnoreSandbox :: Flag Bool, - globalIgnoreExpiry :: Flag Bool, -- ^ Ignore security expiry dates - globalHttpTransport :: Flag String, - globalNix :: Flag Bool, -- ^ Integrate with Nix - globalStoreDir :: Flag FilePath, - globalProgPathExtra :: NubList FilePath -- ^ Extra program path used for packagedb lookups in a global context (i.e. for http transports) - } deriving Generic - -defaultGlobalFlags :: GlobalFlags -defaultGlobalFlags = GlobalFlags { - globalVersion = Flag False, - globalNumericVersion = Flag False, - globalConfigFile = mempty, - globalSandboxConfigFile = mempty, - globalConstraintsFile = mempty, - globalRemoteRepos = mempty, - globalCacheDir = mempty, - globalLocalRepos = mempty, - globalLogsDir = mempty, - globalWorldFile = mempty, - globalRequireSandbox = Flag False, - globalIgnoreSandbox = Flag False, - globalIgnoreExpiry = Flag False, - globalHttpTransport = mempty, - globalNix = Flag False, - globalStoreDir = mempty, - globalProgPathExtra = mempty - } - -instance Monoid GlobalFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup GlobalFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Repo context --- ------------------------------------------------------------ - --- | Access to repositories -data RepoContext = RepoContext { - -- | All user-specified repositories - repoContextRepos :: [Repo] - - -- | Get the HTTP transport - -- - -- The transport will be initialized on the first call to this function. - -- - -- NOTE: It is important that we don't eagerly initialize the transport. - -- Initializing the transport is not free, and especially in contexts where - -- we don't know a-priori whether or not we need the transport (for instance - -- when using cabal in "nix mode") incurring the overhead of transport - -- initialization on _every_ invocation (eg @cabal build@) is undesirable. - , repoContextGetTransport :: IO HttpTransport - - -- | Get the (initialized) secure repo - -- - -- (the 'Repo' type itself is stateless and must remain so, because it - -- must be serializable) - , repoContextWithSecureRepo :: forall a. - Repo - -> (forall down. Sec.Repository down -> IO a) - -> IO a - - -- | Should we ignore expiry times (when checking security)? - , repoContextIgnoreExpiry :: Bool - } - --- | Wrapper around 'Repository', hiding the type argument -data SecureRepo = forall down. SecureRepo (Sec.Repository down) - -withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a -withRepoContext verbosity globalFlags = - withRepoContext' - verbosity - (fromNubList (globalRemoteRepos globalFlags)) - (fromNubList (globalLocalRepos globalFlags)) - (fromFlag (globalCacheDir globalFlags)) - (flagToMaybe (globalHttpTransport globalFlags)) - (flagToMaybe (globalIgnoreExpiry globalFlags)) - (fromNubList (globalProgPathExtra globalFlags)) - -withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath] - -> FilePath -> Maybe String -> Maybe Bool - -> [FilePath] - -> (RepoContext -> IO a) - -> IO a -withRepoContext' verbosity remoteRepos localRepos - sharedCacheDir httpTransport ignoreExpiry extraPaths = \callback -> do - transportRef <- newMVar Nothing - let httpLib = Sec.HTTP.transportAdapter - verbosity - (getTransport transportRef) - initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' -> - callback RepoContext { - repoContextRepos = allRemoteRepos - ++ map RepoLocal localRepos - , repoContextGetTransport = getTransport transportRef - , repoContextWithSecureRepo = withSecureRepo secureRepos' - , repoContextIgnoreExpiry = fromMaybe False ignoreExpiry - } - where - secureRemoteRepos = - [ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ] - allRemoteRepos = - [ (if isSecure then RepoSecure else RepoRemote) remote cacheDir - | remote <- remoteRepos - , let cacheDir = sharedCacheDir remoteRepoName remote - isSecure = remoteRepoSecure remote == Just True - ] - - getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport - getTransport transportRef = - modifyMVar transportRef $ \mTransport -> do - transport <- case mTransport of - Just tr -> return tr - Nothing -> configureTransport verbosity extraPaths httpTransport - return (Just transport, transport) - - withSecureRepo :: Map Repo SecureRepo - -> Repo - -> (forall down. Sec.Repository down -> IO a) - -> IO a - withSecureRepo secureRepos repo callback = - case Map.lookup repo secureRepos of - Just (SecureRepo secureRepo) -> callback secureRepo - Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo" - --- | Initialize the provided secure repositories --- --- Assumed invariant: `remoteRepoSecure` should be set for all these repos. -initSecureRepos :: forall a. Verbosity - -> Sec.HTTP.HttpLib - -> [(RemoteRepo, FilePath)] - -> (Map Repo SecureRepo -> IO a) - -> IO a -initSecureRepos verbosity httpLib repos callback = go Map.empty repos - where - go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a - go !acc [] = callback acc - go !acc ((r,cacheDir):rs) = do - cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir - initSecureRepo verbosity httpLib r cachePath $ \r' -> - go (Map.insert (RepoSecure r cacheDir) r' acc) rs - --- | Initialize the given secure repo --- --- The security library has its own concept of a "local" repository, distinct --- from @cabal-install@'s; these are secure repositories, but live in the local --- file system. We use the convention that these repositories are identified by --- URLs of the form @file:/path/to/local/repo@. -initSecureRepo :: Verbosity - -> Sec.HTTP.HttpLib - -> RemoteRepo -- ^ Secure repo ('remoteRepoSecure' assumed) - -> Sec.Path Sec.Absolute -- ^ Cache dir - -> (SecureRepo -> IO a) -- ^ Callback - -> IO a -initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do - requiresBootstrap <- withRepo [] Sec.requiresBootstrap - - mirrors <- if requiresBootstrap - then do - info verbosity $ "Trying to locate mirrors via DNS for " ++ - "initial bootstrap of secure " ++ - "repository '" ++ show remoteRepoURI ++ - "' ..." - - Sec.DNS.queryBootstrapMirrors verbosity remoteRepoURI - else pure [] - - withRepo mirrors $ \r -> do - when requiresBootstrap $ Sec.uncheckClientErrors $ - Sec.bootstrap r - (map Sec.KeyId remoteRepoRootKeys) - (Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold)) - callback $ SecureRepo r - where - -- Initialize local or remote repo depending on the URI - withRepo :: [URI] -> (forall down. Sec.Repository down -> IO a) -> IO a - withRepo _ callback | uriScheme remoteRepoURI == "file:" = do - dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI) - Sec.Local.withRepository dir - cache - Sec.hackageRepoLayout - Sec.hackageIndexLayout - logTUF - callback - withRepo mirrors callback = - Sec.Remote.withRepository httpLib - (remoteRepoURI:mirrors) - Sec.Remote.defaultRepoOpts - cache - Sec.hackageRepoLayout - Sec.hackageIndexLayout - logTUF - callback - - cache :: Sec.Cache - cache = Sec.Cache { - cacheRoot = cachePath - , cacheLayout = Sec.cabalCacheLayout { - Sec.cacheLayoutIndexTar = cacheFn "01-index.tar" - , Sec.cacheLayoutIndexIdx = cacheFn "01-index.tar.idx" - , Sec.cacheLayoutIndexTarGz = cacheFn "01-index.tar.gz" - } - } - - cacheFn :: FilePath -> Sec.CachePath - cacheFn = Sec.rootPath . Sec.fragment - - -- We display any TUF progress only in verbose mode, including any transient - -- verification errors. If verification fails, then the final exception that - -- is thrown will of course be shown. - logTUF :: Sec.LogMessage -> IO () - logTUF = info verbosity . Sec.pretty diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Glob.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Glob.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Glob.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Glob.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,266 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ---TODO: [code cleanup] plausibly much of this module should be merged with --- similar functionality in Cabal. -module Distribution.Client.Glob - ( FilePathGlob(..) - , FilePathRoot(..) - , FilePathGlobRel(..) - , Glob - , GlobPiece(..) - , matchFileGlob - , matchFileGlobRel - , matchGlob - , isTrivialFilePathGlob - , getFilePathRootDirectory - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Data.List (stripPrefix) -import Control.Monad (mapM) - -import Distribution.Text -import Distribution.Compat.ReadP (ReadP, (<++), (+++)) -import qualified Distribution.Compat.ReadP as Parse -import qualified Text.PrettyPrint as Disp - -import System.FilePath -import System.Directory - - --- | A file path specified by globbing --- -data FilePathGlob = FilePathGlob FilePathRoot FilePathGlobRel - deriving (Eq, Show, Generic) - -data FilePathGlobRel - = GlobDir !Glob !FilePathGlobRel - | GlobFile !Glob - | GlobDirTrailing -- ^ trailing dir, a glob ending in @/@ - deriving (Eq, Show, Generic) - --- | A single directory or file component of a globbed path -type Glob = [GlobPiece] - --- | A piece of a globbing pattern -data GlobPiece = WildCard - | Literal String - | Union [Glob] - deriving (Eq, Show, Generic) - -data FilePathRoot - = FilePathRelative - | FilePathRoot FilePath -- ^ e.g. @"/"@, @"c:\"@ or result of 'takeDrive' - | FilePathHomeDir - deriving (Eq, Show, Generic) - -instance Binary FilePathGlob -instance Binary FilePathRoot -instance Binary FilePathGlobRel -instance Binary GlobPiece - - --- | Check if a 'FilePathGlob' doesn't actually make use of any globbing and --- is in fact equivalent to a non-glob 'FilePath'. --- --- If it is trivial in this sense then the result is the equivalent constant --- 'FilePath'. On the other hand if it is not trivial (so could in principle --- match more than one file) then the result is @Nothing@. --- -isTrivialFilePathGlob :: FilePathGlob -> Maybe FilePath -isTrivialFilePathGlob (FilePathGlob root pathglob) = - case root of - FilePathRelative -> go [] pathglob - FilePathRoot root' -> go [root'] pathglob - FilePathHomeDir -> Nothing - where - go paths (GlobDir [Literal path] globs) = go (path:paths) globs - go paths (GlobFile [Literal path]) = Just (joinPath (reverse (path:paths))) - go paths GlobDirTrailing = Just (addTrailingPathSeparator - (joinPath (reverse paths))) - go _ _ = Nothing - --- | Get the 'FilePath' corresponding to a 'FilePathRoot'. --- --- The 'FilePath' argument is required to supply the path for the --- 'FilePathRelative' case. --- -getFilePathRootDirectory :: FilePathRoot - -> FilePath -- ^ root for relative paths - -> IO FilePath -getFilePathRootDirectory FilePathRelative root = return root -getFilePathRootDirectory (FilePathRoot root) _ = return root -getFilePathRootDirectory FilePathHomeDir _ = getHomeDirectory - - ------------------------------------------------------------------------------- --- Matching --- - --- | Match a 'FilePathGlob' against the file system, starting from a given --- root directory for relative paths. The results of relative globs are --- relative to the given root. Matches for absolute globs are absolute. --- -matchFileGlob :: FilePath -> FilePathGlob -> IO [FilePath] -matchFileGlob relroot (FilePathGlob globroot glob) = do - root <- getFilePathRootDirectory globroot relroot - matches <- matchFileGlobRel root glob - case globroot of - FilePathRelative -> return matches - _ -> return (map (root ) matches) - --- | Match a 'FilePathGlobRel' against the file system, starting from a --- given root directory. The results are all relative to the given root. --- -matchFileGlobRel :: FilePath -> FilePathGlobRel -> IO [FilePath] -matchFileGlobRel root glob0 = go glob0 "" - where - go (GlobFile glob) dir = do - entries <- getDirectoryContents (root dir) - let files = filter (matchGlob glob) entries - return (map (dir ) files) - - go (GlobDir glob globPath) dir = do - entries <- getDirectoryContents (root dir) - subdirs <- filterM (\subdir -> doesDirectoryExist - (root dir subdir)) - $ filter (matchGlob glob) entries - concat <$> mapM (\subdir -> go globPath (dir subdir)) subdirs - - go GlobDirTrailing dir = return [dir] - - --- | Match a globbing pattern against a file path component --- -matchGlob :: Glob -> String -> Bool -matchGlob = goStart - where - -- From the man page, glob(7): - -- "If a filename starts with a '.', this character must be - -- matched explicitly." - - go, goStart :: [GlobPiece] -> String -> Bool - - goStart (WildCard:_) ('.':_) = False - goStart (Union globs:rest) cs = any (\glob -> goStart (glob ++ rest) cs) - globs - goStart rest cs = go rest cs - - go [] "" = True - go (Literal lit:rest) cs - | Just cs' <- stripPrefix lit cs - = go rest cs' - | otherwise = False - go [WildCard] "" = True - go (WildCard:rest) (c:cs) = go rest (c:cs) || go (WildCard:rest) cs - go (Union globs:rest) cs = any (\glob -> go (glob ++ rest) cs) globs - go [] (_:_) = False - go (_:_) "" = False - - ------------------------------------------------------------------------------- --- Parsing & printing --- - -instance Text FilePathGlob where - disp (FilePathGlob root pathglob) = disp root Disp.<> disp pathglob - parse = - parse >>= \root -> - (FilePathGlob root <$> parse) - <++ (when (root == FilePathRelative) Parse.pfail >> - return (FilePathGlob root GlobDirTrailing)) - -instance Text FilePathRoot where - disp FilePathRelative = Disp.empty - disp (FilePathRoot root) = Disp.text root - disp FilePathHomeDir = Disp.char '~' Disp.<> Disp.char '/' - - parse = - ( (Parse.char '/' >> return (FilePathRoot "/")) - +++ (Parse.char '~' >> Parse.char '/' >> return FilePathHomeDir) - +++ (do drive <- Parse.satisfy (\c -> (c >= 'a' && c <= 'z') - || (c >= 'A' && c <= 'Z')) - _ <- Parse.char ':' - _ <- Parse.char '/' +++ Parse.char '\\' - return (FilePathRoot (toUpper drive : ":\\"))) - ) - <++ return FilePathRelative - -instance Text FilePathGlobRel where - disp (GlobDir glob pathglob) = dispGlob glob - Disp.<> Disp.char '/' - Disp.<> disp pathglob - disp (GlobFile glob) = dispGlob glob - disp GlobDirTrailing = Disp.empty - - parse = parsePath - where - parsePath :: ReadP r FilePathGlobRel - parsePath = - parseGlob >>= \globpieces -> - asDir globpieces - <++ asTDir globpieces - <++ asFile globpieces - - asDir glob = do dirSep - globs <- parsePath - return (GlobDir glob globs) - asTDir glob = do dirSep - return (GlobDir glob GlobDirTrailing) - asFile glob = return (GlobFile glob) - - dirSep = (Parse.char '/' >> return ()) - +++ (do _ <- Parse.char '\\' - -- check this isn't an escape code - following <- Parse.look - case following of - (c:_) | isGlobEscapedChar c -> Parse.pfail - _ -> return ()) - - -dispGlob :: Glob -> Disp.Doc -dispGlob = Disp.hcat . map dispPiece - where - dispPiece WildCard = Disp.char '*' - dispPiece (Literal str) = Disp.text (escape str) - dispPiece (Union globs) = Disp.braces - (Disp.hcat (Disp.punctuate - (Disp.char ',') - (map dispGlob globs))) - escape [] = [] - escape (c:cs) - | isGlobEscapedChar c = '\\' : c : escape cs - | otherwise = c : escape cs - -parseGlob :: ReadP r Glob -parseGlob = Parse.many1 parsePiece - where - parsePiece = literal +++ wildcard +++ union - - wildcard = Parse.char '*' >> return WildCard - - union = Parse.between (Parse.char '{') (Parse.char '}') $ - fmap Union (Parse.sepBy1 parseGlob (Parse.char ',')) - - literal = Literal `fmap` litchars1 - - litchar = normal +++ escape - - normal = Parse.satisfy (\c -> not (isGlobEscapedChar c) - && c /= '/' && c /= '\\') - escape = Parse.char '\\' >> Parse.satisfy isGlobEscapedChar - - litchars1 :: ReadP r [Char] - litchars1 = liftM2 (:) litchar litchars - - litchars :: ReadP r [Char] - litchars = litchars1 <++ return [] - -isGlobEscapedChar :: Char -> Bool -isGlobEscapedChar '*' = True -isGlobEscapedChar '{' = True -isGlobEscapedChar '}' = True -isGlobEscapedChar ',' = True -isGlobEscapedChar _ = False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/GZipUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/GZipUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/GZipUtils.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/GZipUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,86 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.GZipUtils --- Copyright : (c) Dmitry Astapov 2010 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- Provides a convenience functions for working with files that may or may not --- be zipped. ------------------------------------------------------------------------------ -module Distribution.Client.GZipUtils ( - maybeDecompress, - ) where - -import Codec.Compression.Zlib.Internal -import Data.ByteString.Lazy.Internal as BS (ByteString(Empty, Chunk)) - -#if MIN_VERSION_zlib(0,6,0) -import Control.Exception (throw) -import Control.Monad (liftM) -import Control.Monad.ST.Lazy (ST, runST) -import qualified Data.ByteString as Strict -#endif - --- | Attempts to decompress the `bytes' under the assumption that --- "data format" error at the very beginning of the stream means --- that it is already decompressed. Caller should make sanity checks --- to verify that it is not, in fact, garbage. --- --- This is to deal with http proxies that lie to us and transparently --- decompress without removing the content-encoding header. See: --- --- -maybeDecompress :: ByteString -> ByteString -#if MIN_VERSION_zlib(0,6,0) -maybeDecompress bytes = runST (go bytes decompressor) - where - decompressor :: DecompressStream (ST s) - decompressor = decompressST gzipOrZlibFormat defaultDecompressParams - - -- DataError at the beginning of the stream probably means that stream is - -- not compressed, so we return it as-is. - -- TODO: alternatively, we might consider looking for the two magic bytes - -- at the beginning of the gzip header. (not an option for zlib, though.) - go :: Monad m => ByteString -> DecompressStream m -> m ByteString - go cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k - go _ (DecompressStreamEnd _bs ) = return Empty - go _ (DecompressStreamError _err ) = return bytes - go cs (DecompressInputRequired k) = go cs' =<< k c - where - (c, cs') = uncons cs - - -- Once we have received any output though we regard errors as actual errors - -- and we throw them (as pure exceptions). - -- TODO: We could (and should) avoid these pure exceptions. - go' :: Monad m => ByteString -> DecompressStream m -> m ByteString - go' cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k - go' _ (DecompressStreamEnd _bs ) = return Empty - go' _ (DecompressStreamError err ) = throw err - go' cs (DecompressInputRequired k) = go' cs' =<< k c - where - (c, cs') = uncons cs - - uncons :: ByteString -> (Strict.ByteString, ByteString) - uncons Empty = (Strict.empty, Empty) - uncons (Chunk c cs) = (c, cs) -#else -maybeDecompress bytes = foldStream $ decompressWithErrors gzipOrZlibFormat defaultDecompressParams bytes - where - -- DataError at the beginning of the stream probably means that stream is not compressed. - -- Returning it as-is. - -- TODO: alternatively, we might consider looking for the two magic bytes - -- at the beginning of the gzip header. - foldStream (StreamError _ _) = bytes - foldStream somethingElse = doFold somethingElse - - doFold StreamEnd = BS.Empty - doFold (StreamChunk bs stream) = BS.Chunk bs (doFold stream) - doFold (StreamError _ msg) = error $ "Codec.Compression.Zlib: " ++ msg -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Haddock.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Haddock.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Haddock.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Haddock.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Haddock --- Copyright : (c) Andrea Vezzosi 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Interfacing with Haddock --- ------------------------------------------------------------------------------ -module Distribution.Client.Haddock - ( - regenerateHaddockIndex - ) - where - -import Data.List (maximumBy) -import Data.Foldable (forM_) -import System.Directory (createDirectoryIfMissing, renameFile) -import System.FilePath ((), splitFileName) -import Distribution.Package - ( packageVersion ) -import Distribution.Simple.Haddock (haddockPackagePaths) -import Distribution.Simple.Program (haddockProgram, ProgramDb - , runProgram, requireProgramVersion) -import Distribution.Version (mkVersion, orLaterVersion) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.PackageIndex - ( InstalledPackageIndex, allPackagesByName ) -import Distribution.Simple.Utils - ( comparing, debug, installDirectoryContents, withTempDirectory ) -import Distribution.InstalledPackageInfo as InstalledPackageInfo - ( InstalledPackageInfo(exposed) ) - -regenerateHaddockIndex :: Verbosity - -> InstalledPackageIndex -> ProgramDb - -> FilePath - -> IO () -regenerateHaddockIndex verbosity pkgs progdb index = do - (paths, warns) <- haddockPackagePaths pkgs' Nothing - let paths' = [ (interface, html) | (interface, Just html, _) <- paths] - forM_ warns (debug verbosity) - - (confHaddock, _, _) <- - requireProgramVersion verbosity haddockProgram - (orLaterVersion (mkVersion [0,6])) progdb - - createDirectoryIfMissing True destDir - - withTempDirectory verbosity destDir "tmphaddock" $ \tempDir -> do - - let flags = [ "--gen-contents" - , "--gen-index" - , "--odir=" ++ tempDir - , "--title=Haskell modules on this system" ] - ++ [ "--read-interface=" ++ html ++ "," ++ interface - | (interface, html) <- paths' ] - runProgram verbosity confHaddock flags - renameFile (tempDir "index.html") (tempDir destFile) - installDirectoryContents verbosity tempDir destDir - - where - (destDir,destFile) = splitFileName index - pkgs' = [ maximumBy (comparing packageVersion) pkgvers' - | (_pname, pkgvers) <- allPackagesByName pkgs - , let pkgvers' = filter exposed pkgvers - , not (null pkgvers') ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/HttpUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/HttpUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/HttpUtils.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/HttpUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,857 +0,0 @@ -{-# LANGUAGE BangPatterns #-} ------------------------------------------------------------------------------ --- | Separate module for HTTP actions, using a proxy server if one exists. ------------------------------------------------------------------------------ -module Distribution.Client.HttpUtils ( - DownloadResult(..), - configureTransport, - HttpTransport(..), - HttpCode, - downloadURI, - transportCheckHttps, - remoteRepoCheckHttps, - remoteRepoTryUpgradeToHttps, - isOldHackageURI - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Network.HTTP - ( Request (..), Response (..), RequestMethod (..) - , Header(..), HeaderName(..), lookupHeader ) -import Network.HTTP.Proxy ( Proxy(..), fetchProxy) -import Network.URI - ( URI (..), URIAuth (..), uriToString ) -import Network.Browser - ( browse, setOutHandler, setErrHandler, setProxy - , setAuthorityGen, request, setAllowBasicAuth, setUserAgent ) -import qualified Control.Exception as Exception -import Control.Exception - ( evaluate ) -import Control.DeepSeq - ( force ) -import Control.Monad - ( guard ) -import qualified Data.ByteString.Lazy.Char8 as BS -import qualified Paths_cabal_install (version) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.Utils - ( die', info, warn, debug, notice, writeFileAtomic - , copyFileVerbose, withTempFile ) -import Distribution.Client.Utils - ( withTempFileName ) -import Distribution.Client.Types - ( RemoteRepo(..) ) -import Distribution.System - ( buildOS, buildArch ) -import Distribution.Text - ( display ) -import qualified System.FilePath.Posix as FilePath.Posix - ( splitDirectories ) -import System.FilePath - ( (<.>), takeFileName, takeDirectory ) -import System.Directory - ( doesFileExist, renameFile, canonicalizePath ) -import System.IO - ( withFile, IOMode(ReadMode), hGetContents, hClose ) -import System.IO.Error - ( isDoesNotExistError ) -import Distribution.Simple.Program - ( Program, simpleProgram, ConfiguredProgram, programPath - , ProgramInvocation(..), programInvocation - , ProgramSearchPathEntry(..) - , getProgramInvocationOutput ) -import Distribution.Simple.Program.Db - ( ProgramDb, emptyProgramDb, addKnownPrograms - , configureAllKnownPrograms - , requireProgram, lookupProgram - , modifyProgramSearchPath ) -import Distribution.Simple.Program.Run - ( getProgramInvocationOutputAndErrors ) -import Numeric (showHex) -import System.Random (randomRIO) -import System.Exit (ExitCode(..)) - - ------------------------------------------------------------------------------- --- Downloading a URI, given an HttpTransport --- - -data DownloadResult = FileAlreadyInCache - | FileDownloaded FilePath - deriving (Eq) - -downloadURI :: HttpTransport - -> Verbosity - -> URI -- ^ What to download - -> FilePath -- ^ Where to put it - -> IO DownloadResult -downloadURI _transport verbosity uri path | uriScheme uri == "file:" = do - copyFileVerbose verbosity (uriPath uri) path - return (FileDownloaded path) - -- Can we store the hash of the file so we can safely return path when the - -- hash matches to avoid unnecessary computation? - -downloadURI transport verbosity uri path = do - - let etagPath = path <.> "etag" - targetExists <- doesFileExist path - etagPathExists <- doesFileExist etagPath - -- In rare cases the target file doesn't exist, but the etag does. - etag <- if targetExists && etagPathExists - then Just <$> readFile etagPath - else return Nothing - - -- Only use the external http transports if we actually have to - -- (or have been told to do so) - let transport' - | uriScheme uri == "http:" - , not (transportManuallySelected transport) - = plainHttpTransport - - | otherwise - = transport - - withTempFileName (takeDirectory path) (takeFileName path) $ \tmpFile -> do - result <- getHttp transport' verbosity uri etag tmpFile [] - - -- Only write the etag if we get a 200 response code. - -- A 304 still sends us an etag header. - case result of - (200, Just newEtag) -> writeFile etagPath newEtag - _ -> return () - - case fst result of - 200 -> do - info verbosity ("Downloaded to " ++ path) - renameFile tmpFile path - return (FileDownloaded path) - 304 -> do - notice verbosity "Skipping download: local and remote files match." - return FileAlreadyInCache - errCode -> die' verbosity $ "Failed to download " ++ show uri - ++ " : HTTP code " ++ show errCode - ------------------------------------------------------------------------------- --- Utilities for repo url management --- - -remoteRepoCheckHttps :: Verbosity -> HttpTransport -> RemoteRepo -> IO () -remoteRepoCheckHttps verbosity transport repo - | uriScheme (remoteRepoURI repo) == "https:" - , not (transportSupportsHttps transport) - = die' verbosity $ "The remote repository '" ++ remoteRepoName repo - ++ "' specifies a URL that " ++ requiresHttpsErrorMessage - | otherwise = return () - -transportCheckHttps :: Verbosity -> HttpTransport -> URI -> IO () -transportCheckHttps verbosity transport uri - | uriScheme uri == "https:" - , not (transportSupportsHttps transport) - = die' verbosity $ "The URL " ++ show uri - ++ " " ++ requiresHttpsErrorMessage - | otherwise = return () - -requiresHttpsErrorMessage :: String -requiresHttpsErrorMessage = - "requires HTTPS however the built-in HTTP implementation " - ++ "does not support HTTPS. The transport implementations with HTTPS " - ++ "support are " ++ intercalate ", " - [ name | (name, _, True, _ ) <- supportedTransports ] - ++ ". One of these will be selected automatically if the corresponding " - ++ "external program is available, or one can be selected specifically " - ++ "with the global flag --http-transport=" - -remoteRepoTryUpgradeToHttps :: Verbosity -> HttpTransport -> RemoteRepo -> IO RemoteRepo -remoteRepoTryUpgradeToHttps verbosity transport repo - | remoteRepoShouldTryHttps repo - , uriScheme (remoteRepoURI repo) == "http:" - , not (transportSupportsHttps transport) - , not (transportManuallySelected transport) - = die' verbosity $ "The builtin HTTP implementation does not support HTTPS, but using " - ++ "HTTPS for authenticated uploads is recommended. " - ++ "The transport implementations with HTTPS support are " - ++ intercalate ", " [ name | (name, _, True, _ ) <- supportedTransports ] - ++ "but they require the corresponding external program to be " - ++ "available. You can either make one available or use plain HTTP by " - ++ "using the global flag --http-transport=plain-http (or putting the " - ++ "equivalent in the config file). With plain HTTP, your password " - ++ "is sent using HTTP digest authentication so it cannot be easily " - ++ "intercepted, but it is not as secure as using HTTPS." - - | remoteRepoShouldTryHttps repo - , uriScheme (remoteRepoURI repo) == "http:" - , transportSupportsHttps transport - = return repo { - remoteRepoURI = (remoteRepoURI repo) { uriScheme = "https:" } - } - - | otherwise - = return repo - --- | Utility function for legacy support. -isOldHackageURI :: URI -> Bool -isOldHackageURI uri - = case uriAuthority uri of - Just (URIAuth {uriRegName = "hackage.haskell.org"}) -> - FilePath.Posix.splitDirectories (uriPath uri) - == ["/","packages","archive"] - _ -> False - - ------------------------------------------------------------------------------- --- Setting up a HttpTransport --- - -data HttpTransport = HttpTransport { - -- | GET a URI, with an optional ETag (to do a conditional fetch), - -- write the resource to the given file and return the HTTP status code, - -- and optional ETag. - getHttp :: Verbosity -> URI -> Maybe ETag -> FilePath -> [Header] - -> IO (HttpCode, Maybe ETag), - - -- | POST a resource to a URI, with optional auth (username, password) - -- and return the HTTP status code and any redirect URL. - postHttp :: Verbosity -> URI -> String -> Maybe Auth - -> IO (HttpCode, String), - - -- | POST a file resource to a URI using multipart\/form-data encoding, - -- with optional auth (username, password) and return the HTTP status - -- code and any error string. - postHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth - -> IO (HttpCode, String), - - -- | PUT a file resource to a URI, with optional auth - -- (username, password), extra headers and return the HTTP status code - -- and any error string. - putHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth -> [Header] - -> IO (HttpCode, String), - - -- | Whether this transport supports https or just http. - transportSupportsHttps :: Bool, - - -- | Whether this transport implementation was specifically chosen by - -- the user via configuration, or whether it was automatically selected. - -- Strictly speaking this is not a property of the transport itself but - -- about how it was chosen. Nevertheless it's convenient to keep here. - transportManuallySelected :: Bool - } - --TODO: why does postHttp return a redirect, but postHttpFile return errors? - -type HttpCode = Int -type ETag = String -type Auth = (String, String) - -noPostYet :: Verbosity -> URI -> String -> Maybe (String, String) - -> IO (Int, String) -noPostYet verbosity _ _ _ = die' verbosity "Posting (for report upload) is not implemented yet" - -supportedTransports :: [(String, Maybe Program, Bool, - ProgramDb -> Maybe HttpTransport)] -supportedTransports = - [ let prog = simpleProgram "curl" in - ( "curl", Just prog, True - , \db -> curlTransport <$> lookupProgram prog db ) - - , let prog = simpleProgram "wget" in - ( "wget", Just prog, True - , \db -> wgetTransport <$> lookupProgram prog db ) - - , let prog = simpleProgram "powershell" in - ( "powershell", Just prog, True - , \db -> powershellTransport <$> lookupProgram prog db ) - - , ( "plain-http", Nothing, False - , \_ -> Just plainHttpTransport ) - ] - -configureTransport :: Verbosity -> [FilePath] -> Maybe String -> IO HttpTransport - -configureTransport verbosity extraPath (Just name) = - -- the user secifically selected a transport by name so we'll try and - -- configure that one - - case find (\(name',_,_,_) -> name' == name) supportedTransports of - Just (_, mprog, _tls, mkTrans) -> do - - let baseProgDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb - progdb <- case mprog of - Nothing -> return emptyProgramDb - Just prog -> snd <$> requireProgram verbosity prog baseProgDb - -- ^^ if it fails, it'll fail here - - let Just transport = mkTrans progdb - return transport { transportManuallySelected = True } - - Nothing -> die' verbosity $ "Unknown HTTP transport specified: " ++ name - ++ ". The supported transports are " - ++ intercalate ", " - [ name' | (name', _, _, _ ) <- supportedTransports ] - -configureTransport verbosity extraPath Nothing = do - -- the user hasn't selected a transport, so we'll pick the first one we - -- can configure successfully, provided that it supports tls - - -- for all the transports except plain-http we need to try and find - -- their external executable - let baseProgDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb - progdb <- configureAllKnownPrograms verbosity $ - addKnownPrograms - [ prog | (_, Just prog, _, _) <- supportedTransports ] - baseProgDb - - let availableTransports = - [ (name, transport) - | (name, _, _, mkTrans) <- supportedTransports - , transport <- maybeToList (mkTrans progdb) ] - -- there's always one because the plain one is last and never fails - let (name, transport) = head availableTransports - debug verbosity $ "Selected http transport implementation: " ++ name - - return transport { transportManuallySelected = False } - - ------------------------------------------------------------------------------- --- The HttpTransports based on external programs --- - -curlTransport :: ConfiguredProgram -> HttpTransport -curlTransport prog = - HttpTransport gethttp posthttp posthttpfile puthttpfile True False - where - gethttp verbosity uri etag destPath reqHeaders = do - withTempFile (takeDirectory destPath) - "curl-headers.txt" $ \tmpFile tmpHandle -> do - hClose tmpHandle - let args = [ show uri - , "--output", destPath - , "--location" - , "--write-out", "%{http_code}" - , "--user-agent", userAgent - , "--silent", "--show-error" - , "--dump-header", tmpFile ] - ++ concat - [ ["--header", "If-None-Match: " ++ t] - | t <- maybeToList etag ] - ++ concat - [ ["--header", show name ++ ": " ++ value] - | Header name value <- reqHeaders ] - - resp <- getProgramInvocationOutput verbosity - (programInvocation prog args) - withFile tmpFile ReadMode $ \hnd -> do - headers <- hGetContents hnd - (code, _err, etag') <- parseResponse verbosity uri resp headers - evaluate $ force (code, etag') - - posthttp = noPostYet - - addAuthConfig auth progInvocation = progInvocation - { progInvokeInput = do - (uname, passwd) <- auth - return $ unlines - [ "--digest" - , "--user " ++ uname ++ ":" ++ passwd - ] - , progInvokeArgs = ["--config", "-"] ++ progInvokeArgs progInvocation - } - - posthttpfile verbosity uri path auth = do - let args = [ show uri - , "--form", "package=@"++path - , "--write-out", "\n%{http_code}" - , "--user-agent", userAgent - , "--silent", "--show-error" - , "--header", "Accept: text/plain" - , "--location" - ] - resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth - (programInvocation prog args) - (code, err, _etag) <- parseResponse verbosity uri resp "" - return (code, err) - - puthttpfile verbosity uri path auth headers = do - let args = [ show uri - , "--request", "PUT", "--data-binary", "@"++path - , "--write-out", "\n%{http_code}" - , "--user-agent", userAgent - , "--silent", "--show-error" - , "--location" - , "--header", "Accept: text/plain" - ] - ++ concat - [ ["--header", show name ++ ": " ++ value] - | Header name value <- headers ] - resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth - (programInvocation prog args) - (code, err, _etag) <- parseResponse verbosity uri resp "" - return (code, err) - - -- on success these curl invocations produces an output like "200" - -- and on failure it has the server error response first - parseResponse :: Verbosity -> URI -> String -> String -> IO (Int, String, Maybe ETag) - parseResponse verbosity uri resp headers = - let codeerr = - case reverse (lines resp) of - (codeLine:rerrLines) -> - case readMaybe (trim codeLine) of - Just i -> let errstr = mkErrstr rerrLines - in Just (i, errstr) - Nothing -> Nothing - [] -> Nothing - - mkErrstr = unlines . reverse . dropWhile (all isSpace) - - mb_etag :: Maybe ETag - mb_etag = listToMaybe $ reverse - [ etag - | ["ETag:", etag] <- map words (lines headers) ] - - in case codeerr of - Just (i, err) -> return (i, err, mb_etag) - _ -> statusParseFail verbosity uri resp - - -wgetTransport :: ConfiguredProgram -> HttpTransport -wgetTransport prog = - HttpTransport gethttp posthttp posthttpfile puthttpfile True False - where - gethttp verbosity uri etag destPath reqHeaders = do - resp <- runWGet verbosity uri args - - -- wget doesn't support range requests. - -- so, we not only ignore range request headers, - -- but we also dispay a warning message when we see them. - let hasRangeHeader = any isRangeHeader reqHeaders - warningMsg = "the 'wget' transport currently doesn't support" - ++ " range requests, which wastes network bandwidth." - ++ " To fix this, set 'http-transport' to 'curl' or" - ++ " 'plain-http' in '~/.cabal/config'." - ++ " Note that the 'plain-http' transport doesn't" - ++ " support HTTPS.\n" - - when (hasRangeHeader) $ warn verbosity warningMsg - (code, etag') <- parseOutput verbosity uri resp - return (code, etag') - where - args = [ "--output-document=" ++ destPath - , "--user-agent=" ++ userAgent - , "--tries=5" - , "--timeout=15" - , "--server-response" ] - ++ concat - [ ["--header", "If-None-Match: " ++ t] - | t <- maybeToList etag ] - ++ [ "--header=" ++ show name ++ ": " ++ value - | hdr@(Header name value) <- reqHeaders - , (not (isRangeHeader hdr)) ] - - -- wget doesn't support range requests. - -- so, we ignore range request headers, lest we get errors. - isRangeHeader :: Header -> Bool - isRangeHeader (Header HdrRange _) = True - isRangeHeader _ = False - - posthttp = noPostYet - - posthttpfile verbosity uri path auth = - withTempFile (takeDirectory path) - (takeFileName path) $ \tmpFile tmpHandle -> - withTempFile (takeDirectory path) "response" $ - \responseFile responseHandle -> do - hClose responseHandle - (body, boundary) <- generateMultipartBody path - BS.hPut tmpHandle body - hClose tmpHandle - let args = [ "--post-file=" ++ tmpFile - , "--user-agent=" ++ userAgent - , "--server-response" - , "--output-document=" ++ responseFile - , "--header=Accept: text/plain" - , "--header=Content-type: multipart/form-data; " ++ - "boundary=" ++ boundary ] - out <- runWGet verbosity (addUriAuth auth uri) args - (code, _etag) <- parseOutput verbosity uri out - withFile responseFile ReadMode $ \hnd -> do - resp <- hGetContents hnd - evaluate $ force (code, resp) - - puthttpfile verbosity uri path auth headers = - withTempFile (takeDirectory path) "response" $ - \responseFile responseHandle -> do - hClose responseHandle - let args = [ "--method=PUT", "--body-file="++path - , "--user-agent=" ++ userAgent - , "--server-response" - , "--output-document=" ++ responseFile - , "--header=Accept: text/plain" ] - ++ [ "--header=" ++ show name ++ ": " ++ value - | Header name value <- headers ] - - out <- runWGet verbosity (addUriAuth auth uri) args - (code, _etag) <- parseOutput verbosity uri out - withFile responseFile ReadMode $ \hnd -> do - resp <- hGetContents hnd - evaluate $ force (code, resp) - - addUriAuth Nothing uri = uri - addUriAuth (Just (user, pass)) uri = uri - { uriAuthority = Just a { uriUserInfo = user ++ ":" ++ pass ++ "@" } - } - where - a = fromMaybe (URIAuth "" "" "") (uriAuthority uri) - - runWGet verbosity uri args = do - -- We pass the URI via STDIN because it contains the users' credentials - -- and sensitive data should not be passed via command line arguments. - let - invocation = (programInvocation prog ("--input-file=-" : args)) - { progInvokeInput = Just (uriToString id uri "") - } - - -- wget returns its output on stderr rather than stdout - (_, resp, exitCode) <- getProgramInvocationOutputAndErrors verbosity - invocation - -- wget returns exit code 8 for server "errors" like "304 not modified" - if exitCode == ExitSuccess || exitCode == ExitFailure 8 - then return resp - else die' verbosity $ "'" ++ programPath prog - ++ "' exited with an error:\n" ++ resp - - -- With the --server-response flag, wget produces output with the full - -- http server response with all headers, we want to find a line like - -- "HTTP/1.1 200 OK", but only the last one, since we can have multiple - -- requests due to redirects. - parseOutput verbosity uri resp = - let parsedCode = listToMaybe - [ code - | (protocol:codestr:_err) <- map words (reverse (lines resp)) - , "HTTP/" `isPrefixOf` protocol - , code <- maybeToList (readMaybe codestr) ] - mb_etag :: Maybe ETag - mb_etag = listToMaybe - [ etag - | ["ETag:", etag] <- map words (reverse (lines resp)) ] - in case parsedCode of - Just i -> return (i, mb_etag) - _ -> statusParseFail verbosity uri resp - - -powershellTransport :: ConfiguredProgram -> HttpTransport -powershellTransport prog = - HttpTransport gethttp posthttp posthttpfile puthttpfile True False - where - gethttp verbosity uri etag destPath reqHeaders = do - resp <- runPowershellScript verbosity $ - webclientScript - (escape (show uri)) - (("$targetStream = New-Object -TypeName System.IO.FileStream -ArgumentList " ++ (escape destPath) ++ ", Create") - :(setupHeaders ((useragentHeader : etagHeader) ++ reqHeaders))) - [ "$response = $request.GetResponse()" - , "$responseStream = $response.GetResponseStream()" - , "$buffer = new-object byte[] 10KB" - , "$count = $responseStream.Read($buffer, 0, $buffer.length)" - , "while ($count -gt 0)" - , "{" - , " $targetStream.Write($buffer, 0, $count)" - , " $count = $responseStream.Read($buffer, 0, $buffer.length)" - , "}" - , "Write-Host ($response.StatusCode -as [int]);" - , "Write-Host $response.GetResponseHeader(\"ETag\").Trim('\"')" - ] - [ "$targetStream.Flush()" - , "$targetStream.Close()" - , "$targetStream.Dispose()" - , "$responseStream.Dispose()" - ] - parseResponse resp - where - parseResponse :: String -> IO (HttpCode, Maybe ETag) - parseResponse x = - case lines $ trim x of - (code:etagv:_) -> fmap (\c -> (c, Just etagv)) $ parseCode code x - (code: _) -> fmap (\c -> (c, Nothing )) $ parseCode code x - _ -> statusParseFail verbosity uri x - parseCode :: String -> String -> IO HttpCode - parseCode code x = case readMaybe code of - Just i -> return i - Nothing -> statusParseFail verbosity uri x - etagHeader = [ Header HdrIfNoneMatch t | t <- maybeToList etag ] - - posthttp = noPostYet - - posthttpfile verbosity uri path auth = - withTempFile (takeDirectory path) - (takeFileName path) $ \tmpFile tmpHandle -> do - (body, boundary) <- generateMultipartBody path - BS.hPut tmpHandle body - hClose tmpHandle - fullPath <- canonicalizePath tmpFile - - let contentHeader = Header HdrContentType - ("multipart/form-data; boundary=" ++ boundary) - resp <- runPowershellScript verbosity $ webclientScript - (escape (show uri)) - (setupHeaders (contentHeader : extraHeaders) ++ setupAuth auth) - (uploadFileAction "POST" uri fullPath) - uploadFileCleanup - parseUploadResponse verbosity uri resp - - puthttpfile verbosity uri path auth headers = do - fullPath <- canonicalizePath path - resp <- runPowershellScript verbosity $ webclientScript - (escape (show uri)) - (setupHeaders (extraHeaders ++ headers) ++ setupAuth auth) - (uploadFileAction "PUT" uri fullPath) - uploadFileCleanup - parseUploadResponse verbosity uri resp - - runPowershellScript verbosity script = do - let args = - [ "-InputFormat", "None" - -- the default execution policy doesn't allow running - -- unsigned scripts, so we need to tell powershell to bypass it - , "-ExecutionPolicy", "bypass" - , "-NoProfile", "-NonInteractive" - , "-Command", "-" - ] - debug verbosity script - getProgramInvocationOutput verbosity (programInvocation prog args) - { progInvokeInput = Just (script ++ "\nExit(0);") - } - - escape = show - - useragentHeader = Header HdrUserAgent userAgent - extraHeaders = [Header HdrAccept "text/plain", useragentHeader] - - setupHeaders headers = - [ "$request." ++ addHeader name value - | Header name value <- headers - ] - where - addHeader header value - = case header of - HdrAccept -> "Accept = " ++ escape value - HdrUserAgent -> "UserAgent = " ++ escape value - HdrConnection -> "Connection = " ++ escape value - HdrContentLength -> "ContentLength = " ++ escape value - HdrContentType -> "ContentType = " ++ escape value - HdrDate -> "Date = " ++ escape value - HdrExpect -> "Expect = " ++ escape value - HdrHost -> "Host = " ++ escape value - HdrIfModifiedSince -> "IfModifiedSince = " ++ escape value - HdrReferer -> "Referer = " ++ escape value - HdrTransferEncoding -> "TransferEncoding = " ++ escape value - HdrRange -> let (start, _:end) = - if "bytes=" `isPrefixOf` value - then break (== '-') value' - else error $ "Could not decode range: " ++ value - value' = drop 6 value - in "AddRange(\"bytes\", " ++ escape start ++ ", " ++ escape end ++ ");" - name -> "Headers.Add(" ++ escape (show name) ++ "," ++ escape value ++ ");" - - setupAuth auth = - [ "$request.Credentials = new-object System.Net.NetworkCredential(" - ++ escape uname ++ "," ++ escape passwd ++ ",\"\");" - | (uname,passwd) <- maybeToList auth - ] - - uploadFileAction method _uri fullPath = - [ "$request.Method = " ++ show method - , "$requestStream = $request.GetRequestStream()" - , "$fileStream = [System.IO.File]::OpenRead(" ++ escape fullPath ++ ")" - , "$bufSize=10000" - , "$chunk = New-Object byte[] $bufSize" - , "while( $bytesRead = $fileStream.Read($chunk,0,$bufsize) )" - , "{" - , " $requestStream.write($chunk, 0, $bytesRead)" - , " $requestStream.Flush()" - , "}" - , "" - , "$responseStream = $request.getresponse()" - , "$responseReader = new-object System.IO.StreamReader $responseStream.GetResponseStream()" - , "$code = $response.StatusCode -as [int]" - , "if ($code -eq 0) {" - , " $code = 200;" - , "}" - , "Write-Host $code" - , "Write-Host $responseReader.ReadToEnd()" - ] - - uploadFileCleanup = - [ "$fileStream.Close()" - , "$requestStream.Close()" - , "$responseStream.Close()" - ] - - parseUploadResponse verbosity uri resp = case lines (trim resp) of - (codeStr : message) - | Just code <- readMaybe codeStr -> return (code, unlines message) - _ -> statusParseFail verbosity uri resp - - webclientScript uri setup action cleanup = unlines - [ "[Net.ServicePointManager]::SecurityProtocol = \"tls12, tls11, tls\"" - , "$uri = New-Object \"System.Uri\" " ++ uri - , "$request = [System.Net.HttpWebRequest]::Create($uri)" - , unlines setup - , "Try {" - , unlines (map (" " ++) action) - , "} Catch [System.Net.WebException] {" - , " $exception = $_.Exception;" - , " If ($exception.Status -eq " - ++ "[System.Net.WebExceptionStatus]::ProtocolError) {" - , " $response = $exception.Response -as [System.Net.HttpWebResponse];" - , " $reader = new-object " - ++ "System.IO.StreamReader($response.GetResponseStream());" - , " Write-Host ($response.StatusCode -as [int]);" - , " Write-Host $reader.ReadToEnd();" - , " } Else {" - , " Write-Host $exception.Message;" - , " }" - , "} Catch {" - , " Write-Host $_.Exception.Message;" - , "} finally {" - , unlines (map (" " ++) cleanup) - , "}" - ] - - ------------------------------------------------------------------------------- --- The builtin plain HttpTransport --- - -plainHttpTransport :: HttpTransport -plainHttpTransport = - HttpTransport gethttp posthttp posthttpfile puthttpfile False False - where - gethttp verbosity uri etag destPath reqHeaders = do - let req = Request{ - rqURI = uri, - rqMethod = GET, - rqHeaders = [ Header HdrIfNoneMatch t - | t <- maybeToList etag ] - ++ reqHeaders, - rqBody = BS.empty - } - (_, resp) <- cabalBrowse verbosity Nothing (request req) - let code = convertRspCode (rspCode resp) - etag' = lookupHeader HdrETag (rspHeaders resp) - -- 206 Partial Content is a normal response to a range request; see #3385. - when (code==200 || code==206) $ - writeFileAtomic destPath $ rspBody resp - return (code, etag') - - posthttp = noPostYet - - posthttpfile verbosity uri path auth = do - (body, boundary) <- generateMultipartBody path - let headers = [ Header HdrContentType - ("multipart/form-data; boundary="++boundary) - , Header HdrContentLength (show (BS.length body)) - , Header HdrAccept ("text/plain") - ] - req = Request { - rqURI = uri, - rqMethod = POST, - rqHeaders = headers, - rqBody = body - } - (_, resp) <- cabalBrowse verbosity auth (request req) - return (convertRspCode (rspCode resp), rspErrorString resp) - - puthttpfile verbosity uri path auth headers = do - body <- BS.readFile path - let req = Request { - rqURI = uri, - rqMethod = PUT, - rqHeaders = Header HdrContentLength (show (BS.length body)) - : Header HdrAccept "text/plain" - : headers, - rqBody = body - } - (_, resp) <- cabalBrowse verbosity auth (request req) - return (convertRspCode (rspCode resp), rspErrorString resp) - - convertRspCode (a,b,c) = a*100 + b*10 + c - - rspErrorString resp = - case lookupHeader HdrContentType (rspHeaders resp) of - Just contenttype - | takeWhile (/= ';') contenttype == "text/plain" - -> BS.unpack (rspBody resp) - _ -> rspReason resp - - cabalBrowse verbosity auth act = do - p <- fixupEmptyProxy <$> fetchProxy True - Exception.handleJust - (guard . isDoesNotExistError) - (const . die' verbosity $ "Couldn't establish HTTP connection. " - ++ "Possible cause: HTTP proxy server is down.") $ - browse $ do - setProxy p - setErrHandler (warn verbosity . ("http error: "++)) - setOutHandler (debug verbosity) - setUserAgent userAgent - setAllowBasicAuth False - setAuthorityGen (\_ _ -> return auth) - act - - fixupEmptyProxy (Proxy uri _) | null uri = NoProxy - fixupEmptyProxy p = p - - ------------------------------------------------------------------------------- --- Common stuff used by multiple transport impls --- - -userAgent :: String -userAgent = concat [ "cabal-install/", display Paths_cabal_install.version - , " (", display buildOS, "; ", display buildArch, ")" - ] - -statusParseFail :: Verbosity -> URI -> String -> IO a -statusParseFail verbosity uri r = - die' verbosity $ "Failed to download " ++ show uri ++ " : " - ++ "No Status Code could be parsed from response: " ++ r - --- Trim -trim :: String -> String -trim = f . f - where f = reverse . dropWhile isSpace - - ------------------------------------------------------------------------------- --- Multipart stuff partially taken from cgi package. --- - -generateMultipartBody :: FilePath -> IO (BS.ByteString, String) -generateMultipartBody path = do - content <- BS.readFile path - boundary <- genBoundary - let !body = formatBody content (BS.pack boundary) - return (body, boundary) - where - formatBody content boundary = - BS.concat $ - [ crlf, dd, boundary, crlf ] - ++ [ BS.pack (show header) | header <- headers ] - ++ [ crlf - , content - , crlf, dd, boundary, dd, crlf ] - - headers = - [ Header (HdrCustom "Content-disposition") - ("form-data; name=package; " ++ - "filename=\"" ++ takeFileName path ++ "\"") - , Header HdrContentType "application/x-gzip" - ] - - crlf = BS.pack "\r\n" - dd = BS.pack "--" - -genBoundary :: IO String -genBoundary = do - i <- randomRIO (0x10000000000000,0xFFFFFFFFFFFFFF) :: IO Integer - return $ showHex i "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/IndexUtils/Timestamp.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/IndexUtils/Timestamp.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/IndexUtils/Timestamp.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/IndexUtils/Timestamp.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,192 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE RecordWildCards #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.IndexUtils.Timestamp --- Copyright : (c) 2016 Herbert Valerio Riedel --- License : BSD3 --- --- Timestamp type used in package indexes - -module Distribution.Client.IndexUtils.Timestamp - ( Timestamp - , nullTimestamp - , epochTimeToTimestamp - , timestampToUTCTime - , utcTimeToTimestamp - , maximumTimestamp - - , IndexState(..) - ) where - -import qualified Codec.Archive.Tar.Entry as Tar -import Control.DeepSeq -import Control.Monad -import Data.Char (isDigit) -import Data.Int (Int64) -import Data.Time (UTCTime (..), fromGregorianValid, - makeTimeOfDayValid, showGregorian, - timeOfDayToTime, timeToTimeOfDay) -import Data.Time.Clock.POSIX (posixSecondsToUTCTime, - utcTimeToPOSIXSeconds) -import Distribution.Compat.Binary -import qualified Distribution.Compat.ReadP as ReadP -import Distribution.Text -import qualified Text.PrettyPrint as Disp -import GHC.Generics (Generic) - --- | UNIX timestamp (expressed in seconds since unix epoch, i.e. 1970). -newtype Timestamp = TS Int64 -- Tar.EpochTime - deriving (Eq,Ord,Enum,NFData,Show) - -epochTimeToTimestamp :: Tar.EpochTime -> Maybe Timestamp -epochTimeToTimestamp et - | ts == nullTimestamp = Nothing - | otherwise = Just ts - where - ts = TS et - -timestampToUTCTime :: Timestamp -> Maybe UTCTime -timestampToUTCTime (TS t) - | t == minBound = Nothing - | otherwise = Just $ posixSecondsToUTCTime (fromIntegral t) - -utcTimeToTimestamp :: UTCTime -> Maybe Timestamp -utcTimeToTimestamp utct - | minTime <= t, t <= maxTime = Just (TS (fromIntegral t)) - | otherwise = Nothing - where - maxTime = toInteger (maxBound :: Int64) - minTime = toInteger (succ minBound :: Int64) - t :: Integer - t = round . utcTimeToPOSIXSeconds $ utct - --- | Compute the maximum 'Timestamp' value --- --- Returns 'nullTimestamp' for the empty list. Also note that --- 'nullTimestamp' compares as smaller to all non-'nullTimestamp' --- values. -maximumTimestamp :: [Timestamp] -> Timestamp -maximumTimestamp [] = nullTimestamp -maximumTimestamp xs@(_:_) = maximum xs - --- returns 'Nothing' if not representable as 'Timestamp' -posixSecondsToTimestamp :: Integer -> Maybe Timestamp -posixSecondsToTimestamp pt - | minTs <= pt, pt <= maxTs = Just (TS (fromInteger pt)) - | otherwise = Nothing - where - maxTs = toInteger (maxBound :: Int64) - minTs = toInteger (succ minBound :: Int64) - --- | Pretty-prints 'Timestamp' in ISO8601/RFC3339 format --- (e.g. @"2017-12-31T23:59:59Z"@) --- --- Returns empty string for 'nullTimestamp' in order for --- --- > null (display nullTimestamp) == True --- --- to hold. -showTimestamp :: Timestamp -> String -showTimestamp ts = case timestampToUTCTime ts of - Nothing -> "" - -- Note: we don't use 'formatTime' here to avoid incurring a - -- dependency on 'old-locale' for older `time` libs - Just UTCTime{..} -> showGregorian utctDay ++ ('T':showTOD utctDayTime) ++ "Z" - where - showTOD = show . timeToTimeOfDay - -instance Binary Timestamp where - put (TS t) = put t - get = TS `fmap` get - -instance Text Timestamp where - disp = Disp.text . showTimestamp - - parse = parsePosix ReadP.+++ parseUTC - where - -- | Parses unix timestamps, e.g. @"\@1474626019"@ - parsePosix = do - _ <- ReadP.char '@' - t <- parseInteger - maybe ReadP.pfail return $ posixSecondsToTimestamp t - - -- | Parses ISO8601/RFC3339-style UTC timestamps, - -- e.g. @"2017-12-31T23:59:59Z"@ - -- - -- TODO: support numeric tz offsets; allow to leave off seconds - parseUTC = do - -- Note: we don't use 'Data.Time.Format.parseTime' here since - -- we want more control over the accepted formats. - - ye <- parseYear - _ <- ReadP.char '-' - mo <- parseTwoDigits - _ <- ReadP.char '-' - da <- parseTwoDigits - _ <- ReadP.char 'T' - - utctDay <- maybe ReadP.pfail return $ - fromGregorianValid ye mo da - - ho <- parseTwoDigits - _ <- ReadP.char ':' - mi <- parseTwoDigits - _ <- ReadP.char ':' - se <- parseTwoDigits - _ <- ReadP.char 'Z' - - utctDayTime <- maybe ReadP.pfail (return . timeOfDayToTime) $ - makeTimeOfDayValid ho mi (realToFrac (se::Int)) - - maybe ReadP.pfail return $ utcTimeToTimestamp (UTCTime{..}) - - parseTwoDigits = do - d1 <- ReadP.satisfy isDigit - d2 <- ReadP.satisfy isDigit - return (read [d1,d2]) - - -- A year must have at least 4 digits; e.g. "0097" is fine, - -- while "97" is not c.f. RFC3339 which - -- deprecates 2-digit years - parseYear = do - sign <- ReadP.option ' ' (ReadP.char '-') - ds <- ReadP.munch1 isDigit - when (length ds < 4) ReadP.pfail - return (read (sign:ds)) - - parseInteger = do - sign <- ReadP.option ' ' (ReadP.char '-') - ds <- ReadP.munch1 isDigit - return (read (sign:ds) :: Integer) - --- | Special timestamp value to be used when 'timestamp' is --- missing/unknown/invalid -nullTimestamp :: Timestamp -nullTimestamp = TS minBound - ----------------------------------------------------------------------------- --- defined here for now to avoid import cycles - --- | Specification of the state of a specific repo package index -data IndexState = IndexStateHead -- ^ Use all available entries - | IndexStateTime !Timestamp -- ^ Use all entries that existed at - -- the specified time - deriving (Eq,Generic,Show) - -instance Binary IndexState -instance NFData IndexState - -instance Text IndexState where - disp IndexStateHead = Disp.text "HEAD" - disp (IndexStateTime ts) = disp ts - - parse = parseHead ReadP.+++ parseTime - where - parseHead = do - _ <- ReadP.string "HEAD" - return IndexStateHead - - parseTime = IndexStateTime `fmap` parse diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/IndexUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/IndexUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/IndexUtils.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/IndexUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,987 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE GADTs #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.IndexUtils --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- Stability : provisional --- Portability : portable --- --- Extra utils related to the package indexes. ------------------------------------------------------------------------------ -module Distribution.Client.IndexUtils ( - getIndexFileAge, - getInstalledPackages, - indexBaseName, - Configure.getInstalledPackagesMonitorFiles, - getSourcePackages, - getSourcePackagesMonitorFiles, - - IndexState(..), - getSourcePackagesAtIndexState, - - Index(..), - PackageEntry(..), - parsePackageIndex, - updateRepoIndexCache, - updatePackageIndexCacheFile, - writeIndexTimestamp, - currentIndexTimestamp, - readCacheStrict, -- only used by soon-to-be-obsolete sandbox code - - BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Codec.Archive.Tar.Index as Tar -import qualified Distribution.Client.Tar as Tar -import Distribution.Client.IndexUtils.Timestamp -import Distribution.Client.Types -import Distribution.Verbosity - -import Distribution.Package - ( PackageId, PackageIdentifier(..), mkPackageName - , Package(..), packageVersion, packageName ) -import Distribution.Types.Dependency -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.PackageDescription - ( GenericPackageDescription(..) - , PackageDescription(..), emptyPackageDescription ) -import Distribution.Simple.Compiler - ( Compiler, PackageDBStack ) -import Distribution.Simple.Program - ( ProgramDb ) -import qualified Distribution.Simple.Configure as Configure - ( getInstalledPackages, getInstalledPackagesMonitorFiles ) -import Distribution.Version - ( Version, mkVersion, intersectVersionRanges ) -import Distribution.Text - ( display, simpleParse ) -import Distribution.Simple.Utils - ( die', warn, info ) -import Distribution.Client.Setup - ( RepoContext(..) ) - -import Distribution.PackageDescription.Parsec - ( parseGenericPackageDescription, parseGenericPackageDescriptionMaybe ) -import qualified Distribution.PackageDescription.Parsec as PackageDesc.Parse - -import Distribution.Solver.Types.PackageIndex (PackageIndex) -import qualified Distribution.Solver.Types.PackageIndex as PackageIndex -import Distribution.Solver.Types.SourcePackage - -import qualified Data.Map as Map -import Control.DeepSeq -import Control.Monad -import Control.Exception -import qualified Data.ByteString.Lazy as BS -import qualified Data.ByteString.Lazy.Char8 as BS.Char8 -import qualified Data.ByteString.Char8 as BSS -import Data.ByteString.Lazy (ByteString) -import Distribution.Client.GZipUtils (maybeDecompress) -import Distribution.Client.Utils ( byteStringToFilePath - , tryFindAddSourcePackageDesc ) -import Distribution.Compat.Binary -import Distribution.Compat.Exception (catchIO) -import Distribution.Compat.Time (getFileAge, getModTime) -import System.Directory (doesFileExist, doesDirectoryExist) -import System.FilePath - ( (), (<.>), takeExtension, replaceExtension, splitDirectories, normalise ) -import System.FilePath.Posix as FilePath.Posix - ( takeFileName ) -import System.IO -import System.IO.Unsafe (unsafeInterleaveIO) -import System.IO.Error (isDoesNotExistError) - -import qualified Hackage.Security.Client as Sec -import qualified Hackage.Security.Util.Some as Sec - --- | Reduced-verbosity version of 'Configure.getInstalledPackages' -getInstalledPackages :: Verbosity -> Compiler - -> PackageDBStack -> ProgramDb - -> IO InstalledPackageIndex -getInstalledPackages verbosity comp packageDbs progdb = - Configure.getInstalledPackages verbosity' comp packageDbs progdb - where - verbosity' = lessVerbose verbosity - - --- | Get filename base (i.e. without file extension) for index-related files --- --- /Secure/ cabal repositories use a new extended & incremental --- @01-index.tar@. In order to avoid issues resulting from clobbering --- new/old-style index data, we save them locally to different names. --- --- Example: Use @indexBaseName repo <.> "tar.gz"@ to compute the 'FilePath' of the --- @00-index.tar.gz@/@01-index.tar.gz@ file. -indexBaseName :: Repo -> FilePath -indexBaseName repo = repoLocalDir repo fn - where - fn = case repo of - RepoSecure {} -> "01-index" - RepoRemote {} -> "00-index" - RepoLocal {} -> "00-index" - ------------------------------------------------------------------------- --- Reading the source package index --- - --- Note: 'data IndexState' is defined in --- "Distribution.Client.IndexUtils.Timestamp" to avoid import cycles - --- | 'IndexStateInfo' contains meta-information about the resulting --- filtered 'Cache' 'after applying 'filterCache' according to a --- requested 'IndexState'. -data IndexStateInfo = IndexStateInfo - { isiMaxTime :: !Timestamp - -- ^ 'Timestamp' of maximum/latest 'Timestamp' in the current - -- filtered view of the cache. - -- - -- The following property holds - -- - -- > filterCache (IndexState (isiMaxTime isi)) cache == (cache, isi) - -- - - , isiHeadTime :: !Timestamp - -- ^ 'Timestamp' equivalent to 'IndexStateHead', i.e. the latest - -- known 'Timestamp'; 'isiHeadTime' is always greater or equal to - -- 'isiMaxTime'. - } - -emptyStateInfo :: IndexStateInfo -emptyStateInfo = IndexStateInfo nullTimestamp nullTimestamp - --- | Filters a 'Cache' according to an 'IndexState' --- specification. Also returns 'IndexStateInfo' describing the --- resulting index cache. --- --- Note: 'filterCache' is idempotent in the 'Cache' value -filterCache :: IndexState -> Cache -> (Cache, IndexStateInfo) -filterCache IndexStateHead cache = (cache, IndexStateInfo{..}) - where - isiMaxTime = cacheHeadTs cache - isiHeadTime = cacheHeadTs cache -filterCache (IndexStateTime ts0) cache0 = (cache, IndexStateInfo{..}) - where - cache = Cache { cacheEntries = ents, cacheHeadTs = isiMaxTime } - isiHeadTime = cacheHeadTs cache0 - isiMaxTime = maximumTimestamp (map cacheEntryTimestamp ents) - ents = filter ((<= ts0) . cacheEntryTimestamp) (cacheEntries cache0) - --- | Read a repository index from disk, from the local files specified by --- a list of 'Repo's. --- --- All the 'SourcePackage's are marked as having come from the appropriate --- 'Repo'. --- --- This is a higher level wrapper used internally in cabal-install. -getSourcePackages :: Verbosity -> RepoContext -> IO SourcePackageDb -getSourcePackages verbosity repoCtxt = - getSourcePackagesAtIndexState verbosity repoCtxt Nothing - --- | Variant of 'getSourcePackages' which allows getting the source --- packages at a particular 'IndexState'. --- --- Current choices are either the latest (aka HEAD), or the index as --- it was at a particular time. --- --- TODO: Enhance to allow specifying per-repo 'IndexState's and also --- report back per-repo 'IndexStateInfo's (in order for @new-freeze@ --- to access it) -getSourcePackagesAtIndexState :: Verbosity -> RepoContext -> Maybe IndexState - -> IO SourcePackageDb -getSourcePackagesAtIndexState verbosity repoCtxt _ - | null (repoContextRepos repoCtxt) = do - -- In the test suite, we routinely don't have any remote package - -- servers, so don't bleat about it - warn (verboseUnmarkOutput verbosity) $ - "No remote package servers have been specified. Usually " ++ - "you would have one specified in the config file." - return SourcePackageDb { - packageIndex = mempty, - packagePreferences = mempty - } -getSourcePackagesAtIndexState verbosity repoCtxt mb_idxState = do - let describeState IndexStateHead = "most recent state" - describeState (IndexStateTime time) = "historical state as of " ++ display time - - pkgss <- forM (repoContextRepos repoCtxt) $ \r -> do - let rname = maybe "" remoteRepoName $ maybeRepoRemote r - info verbosity ("Reading available packages of " ++ rname ++ "...") - - idxState <- case mb_idxState of - Just idxState -> do - info verbosity $ "Using " ++ describeState idxState ++ - " as explicitly requested (via command line / project configuration)" - return idxState - Nothing -> do - mb_idxState' <- readIndexTimestamp (RepoIndex repoCtxt r) - case mb_idxState' of - Nothing -> do - info verbosity "Using most recent state (could not read timestamp file)" - return IndexStateHead - Just idxState -> do - info verbosity $ "Using " ++ describeState idxState ++ - " specified from most recent cabal update" - return idxState - - unless (idxState == IndexStateHead) $ - case r of - RepoLocal path -> warn verbosity ("index-state ignored for old-format repositories (local repository '" ++ path ++ "')") - RepoRemote {} -> warn verbosity ("index-state ignored for old-format (remote repository '" ++ rname ++ "')") - RepoSecure {} -> pure () - - let idxState' = case r of - RepoSecure {} -> idxState - _ -> IndexStateHead - - (pis,deps,isi) <- readRepoIndex verbosity repoCtxt r idxState' - - case idxState' of - IndexStateHead -> do - info verbosity ("index-state("++rname++") = " ++ - display (isiHeadTime isi)) - return () - IndexStateTime ts0 -> do - when (isiMaxTime isi /= ts0) $ - if ts0 > isiMaxTime isi - then warn verbosity $ - "Requested index-state" ++ display ts0 - ++ " is newer than '" ++ rname ++ "'!" - ++ " Falling back to older state (" - ++ display (isiMaxTime isi) ++ ")." - else info verbosity $ - "Requested index-state " ++ display ts0 - ++ " does not exist in '"++rname++"'!" - ++ " Falling back to older state (" - ++ display (isiMaxTime isi) ++ ")." - info verbosity ("index-state("++rname++") = " ++ - display (isiMaxTime isi) ++ " (HEAD = " ++ - display (isiHeadTime isi) ++ ")") - - pure (pis,deps) - - let (pkgs, prefs) = mconcat pkgss - prefs' = Map.fromListWith intersectVersionRanges - [ (name, range) | Dependency name range <- prefs ] - _ <- evaluate pkgs - _ <- evaluate prefs' - return SourcePackageDb { - packageIndex = pkgs, - packagePreferences = prefs' - } - -readCacheStrict :: NFData pkg => Verbosity -> Index -> (PackageEntry -> pkg) -> IO ([pkg], [Dependency]) -readCacheStrict verbosity index mkPkg = do - updateRepoIndexCache verbosity index - cache <- readIndexCache verbosity index - withFile (indexFile index) ReadMode $ \indexHnd -> - evaluate . force =<< packageListFromCache verbosity mkPkg indexHnd cache - --- | Read a repository index from disk, from the local file specified by --- the 'Repo'. --- --- All the 'SourcePackage's are marked as having come from the given 'Repo'. --- --- This is a higher level wrapper used internally in cabal-install. --- -readRepoIndex :: Verbosity -> RepoContext -> Repo -> IndexState - -> IO (PackageIndex UnresolvedSourcePackage, [Dependency], IndexStateInfo) -readRepoIndex verbosity repoCtxt repo idxState = - handleNotFound $ do - warnIfIndexIsOld =<< getIndexFileAge repo - updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) - readPackageIndexCacheFile verbosity mkAvailablePackage - (RepoIndex repoCtxt repo) - idxState - - where - mkAvailablePackage pkgEntry = - SourcePackage { - packageInfoId = pkgid, - packageDescription = packageDesc pkgEntry, - packageSource = case pkgEntry of - NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing - BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path, - packageDescrOverride = case pkgEntry of - NormalPackage _ _ pkgtxt _ -> Just pkgtxt - _ -> Nothing - } - where - pkgid = packageId pkgEntry - - handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e - then do - case repo of - RepoRemote{..} -> warn verbosity $ errMissingPackageList repoRemote - RepoSecure{..} -> warn verbosity $ errMissingPackageList repoRemote - RepoLocal{..} -> warn verbosity $ - "The package list for the local repo '" ++ repoLocalDir - ++ "' is missing. The repo is invalid." - return (mempty,mempty,emptyStateInfo) - else ioError e - - isOldThreshold = 15 --days - warnIfIndexIsOld dt = do - when (dt >= isOldThreshold) $ case repo of - RepoRemote{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt - RepoSecure{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt - RepoLocal{..} -> return () - - errMissingPackageList repoRemote = - "The package list for '" ++ remoteRepoName repoRemote - ++ "' does not exist. Run 'cabal update' to download it." - errOutdatedPackageList repoRemote dt = - "The package list for '" ++ remoteRepoName repoRemote - ++ "' is " ++ shows (floor dt :: Int) " days old.\nRun " - ++ "'cabal update' to get the latest list of available packages." - --- | Return the age of the index file in days (as a Double). -getIndexFileAge :: Repo -> IO Double -getIndexFileAge repo = getFileAge $ indexBaseName repo <.> "tar" - --- | A set of files (or directories) that can be monitored to detect when --- there might have been a change in the source packages. --- -getSourcePackagesMonitorFiles :: [Repo] -> [FilePath] -getSourcePackagesMonitorFiles repos = - concat [ [ indexBaseName repo <.> "cache" - , indexBaseName repo <.> "timestamp" ] - | repo <- repos ] - --- | It is not necessary to call this, as the cache will be updated when the --- index is read normally. However you can do the work earlier if you like. --- -updateRepoIndexCache :: Verbosity -> Index -> IO () -updateRepoIndexCache verbosity index = - whenCacheOutOfDate index $ do - updatePackageIndexCacheFile verbosity index - -whenCacheOutOfDate :: Index -> IO () -> IO () -whenCacheOutOfDate index action = do - exists <- doesFileExist $ cacheFile index - if not exists - then action - else do - indexTime <- getModTime $ indexFile index - cacheTime <- getModTime $ cacheFile index - when (indexTime > cacheTime) action - ------------------------------------------------------------------------- --- Reading the index file --- - --- | An index entry is either a normal package, or a local build tree reference. -data PackageEntry = - NormalPackage PackageId GenericPackageDescription ByteString BlockNo - | BuildTreeRef BuildTreeRefType - PackageId GenericPackageDescription FilePath BlockNo - --- | A build tree reference is either a link or a snapshot. -data BuildTreeRefType = SnapshotRef | LinkRef - deriving (Eq,Generic) - -instance Binary BuildTreeRefType - -refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType -refTypeFromTypeCode t - | t == Tar.buildTreeRefTypeCode = LinkRef - | t == Tar.buildTreeSnapshotTypeCode = SnapshotRef - | otherwise = - error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code" - -typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode -typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode -typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode - -instance Package PackageEntry where - packageId (NormalPackage pkgid _ _ _) = pkgid - packageId (BuildTreeRef _ pkgid _ _ _) = pkgid - -packageDesc :: PackageEntry -> GenericPackageDescription -packageDesc (NormalPackage _ descr _ _) = descr -packageDesc (BuildTreeRef _ _ descr _ _) = descr - --- | Parse an uncompressed \"00-index.tar\" repository index file represented --- as a 'ByteString'. --- - -data PackageOrDep = Pkg PackageEntry | Dep Dependency - --- | Read @00-index.tar.gz@ and extract @.cabal@ and @preferred-versions@ files --- --- We read the index using 'Tar.read', which gives us a lazily constructed --- 'TarEntries'. We translate it to a list of entries using 'tarEntriesList', --- which preserves the lazy nature of 'TarEntries', and finally 'concatMap' a --- function over this to translate it to a list of IO actions returning --- 'PackageOrDep's. We can use 'lazySequence' to turn this into a list of --- 'PackageOrDep's, still maintaining the lazy nature of the original tar read. -parsePackageIndex :: Verbosity -> ByteString -> [IO (Maybe PackageOrDep)] -parsePackageIndex verbosity = concatMap (uncurry extract) . tarEntriesList . Tar.read - where - extract :: BlockNo -> Tar.Entry -> [IO (Maybe PackageOrDep)] - extract blockNo entry = tryExtractPkg ++ tryExtractPrefs - where - tryExtractPkg = do - mkPkgEntry <- maybeToList $ extractPkg verbosity entry blockNo - return $ fmap (fmap Pkg) mkPkgEntry - - tryExtractPrefs = do - prefs' <- maybeToList $ extractPrefs entry - fmap (return . Just . Dep) prefs' - --- | Turn the 'Entries' data structure from the @tar@ package into a list, --- and pair each entry with its block number. --- --- NOTE: This preserves the lazy nature of 'Entries': the tar file is only read --- as far as the list is evaluated. -tarEntriesList :: Show e => Tar.Entries e -> [(BlockNo, Tar.Entry)] -tarEntriesList = go 0 - where - go !_ Tar.Done = [] - go !_ (Tar.Fail e) = error ("tarEntriesList: " ++ show e) - go !n (Tar.Next e es') = (n, e) : go (Tar.nextEntryOffset e n) es' - -extractPkg :: Verbosity -> Tar.Entry -> BlockNo -> Maybe (IO (Maybe PackageEntry)) -extractPkg verbosity entry blockNo = case Tar.entryContent entry of - Tar.NormalFile content _ - | takeExtension fileName == ".cabal" - -> case splitDirectories (normalise fileName) of - [pkgname,vers,_] -> case simpleParse vers of - Just ver -> Just . return $ Just (NormalPackage pkgid descr content blockNo) - where - pkgid = PackageIdentifier (mkPackageName pkgname) ver - parsed = parseGenericPackageDescriptionMaybe (BS.toStrict content) - descr = case parsed of - Just d -> d - Nothing -> error $ "Couldn't read cabal file " - ++ show fileName - _ -> Nothing - _ -> Nothing - - Tar.OtherEntryType typeCode content _ - | Tar.isBuildTreeRefTypeCode typeCode -> - Just $ do - let path = byteStringToFilePath content - dirExists <- doesDirectoryExist path - result <- if not dirExists then return Nothing - else do - cabalFile <- tryFindAddSourcePackageDesc verbosity path "Error reading package index." - descr <- PackageDesc.Parse.readGenericPackageDescription normal cabalFile - return . Just $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr) - descr path blockNo - return result - - _ -> Nothing - - where - fileName = Tar.entryPath entry - -extractPrefs :: Tar.Entry -> Maybe [Dependency] -extractPrefs entry = case Tar.entryContent entry of - Tar.NormalFile content _ - | takeFileName entrypath == "preferred-versions" - -> Just prefs - where - entrypath = Tar.entryPath entry - prefs = parsePreferredVersions content - _ -> Nothing - -parsePreferredVersions :: ByteString -> [Dependency] -parsePreferredVersions = mapMaybe simpleParse - . filter (not . isPrefixOf "--") - . lines - . BS.Char8.unpack -- TODO: Are we sure no unicode? - ------------------------------------------------------------------------- --- Reading and updating the index cache --- - --- | Variation on 'sequence' which evaluates the actions lazily --- --- Pattern matching on the result list will execute just the first action; --- more generally pattern matching on the first @n@ '(:)' nodes will execute --- the first @n@ actions. -lazySequence :: [IO a] -> IO [a] -lazySequence = unsafeInterleaveIO . go - where - go [] = return [] - go (x:xs) = do x' <- x - xs' <- lazySequence xs - return (x' : xs') - --- | A lazy unfolder for lookup operations which return the current --- value and (possibly) the next key -lazyUnfold :: (k -> IO (v, Maybe k)) -> k -> IO [(k,v)] -lazyUnfold step = goLazy . Just - where - goLazy s = unsafeInterleaveIO (go s) - - go Nothing = return [] - go (Just k) = do - (v, mk') <- step k - vs' <- goLazy mk' - return ((k,v):vs') - --- | Which index do we mean? -data Index = - -- | The main index for the specified repository - RepoIndex RepoContext Repo - - -- | A sandbox-local repository - -- Argument is the location of the index file - | SandboxIndex FilePath - -indexFile :: Index -> FilePath -indexFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "tar" -indexFile (SandboxIndex index) = index - -cacheFile :: Index -> FilePath -cacheFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "cache" -cacheFile (SandboxIndex index) = index `replaceExtension` "cache" - -timestampFile :: Index -> FilePath -timestampFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "timestamp" -timestampFile (SandboxIndex index) = index `replaceExtension` "timestamp" - --- | Return 'True' if 'Index' uses 01-index format (aka secure repo) -is01Index :: Index -> Bool -is01Index (RepoIndex _ repo) = case repo of - RepoSecure {} -> True - RepoRemote {} -> False - RepoLocal {} -> False -is01Index (SandboxIndex _) = False - - -updatePackageIndexCacheFile :: Verbosity -> Index -> IO () -updatePackageIndexCacheFile verbosity index = do - info verbosity ("Updating index cache file " ++ cacheFile index ++ " ...") - withIndexEntries verbosity index $ \entries -> do - let !maxTs = maximumTimestamp (map cacheEntryTimestamp entries) - cache = Cache { cacheHeadTs = maxTs - , cacheEntries = entries - } - writeIndexCache index cache - info verbosity ("Index cache updated to index-state " - ++ display (cacheHeadTs cache)) - --- | Read the index (for the purpose of building a cache) --- --- The callback is provided with list of cache entries, which is guaranteed to --- be lazily constructed. This list must ONLY be used in the scope of the --- callback; when the callback is terminated the file handle to the index will --- be closed and further attempts to read from the list will result in (pure) --- I/O exceptions. --- --- In the construction of the index for a secure repo we take advantage of the --- index built by the @hackage-security@ library to avoid reading the @.tar@ --- file as much as possible (we need to read it only to extract preferred --- versions). This helps performance, but is also required for correctness: --- the new @01-index.tar.gz@ may have multiple versions of preferred-versions --- files, and 'parsePackageIndex' does not correctly deal with that (see #2956); --- by reading the already-built cache from the security library we will be sure --- to only read the latest versions of all files. --- --- TODO: It would be nicer if we actually incrementally updated @cabal@'s --- cache, rather than reconstruct it from zero on each update. However, this --- would require a change in the cache format. -withIndexEntries :: Verbosity -> Index -> ([IndexCacheEntry] -> IO a) -> IO a -withIndexEntries _ (RepoIndex repoCtxt repo@RepoSecure{..}) callback = - repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> - Sec.withIndex repoSecure $ \Sec.IndexCallbacks{..} -> do - -- Incrementally (lazily) read all the entries in the tar file in order, - -- including all revisions, not just the last revision of each file - indexEntries <- lazyUnfold indexLookupEntry (Sec.directoryFirst indexDirectory) - callback [ cacheEntry - | (dirEntry, indexEntry) <- indexEntries - , cacheEntry <- toCacheEntries dirEntry indexEntry ] - where - toCacheEntries :: Sec.DirectoryEntry -> Sec.Some Sec.IndexEntry - -> [IndexCacheEntry] - toCacheEntries dirEntry (Sec.Some sie) = - case Sec.indexEntryPathParsed sie of - Nothing -> [] -- skip unrecognized file - Just (Sec.IndexPkgMetadata _pkgId) -> [] -- skip metadata - Just (Sec.IndexPkgCabal pkgId) -> force - [CachePackageId pkgId blockNo timestamp] - Just (Sec.IndexPkgPrefs _pkgName) -> force - [ CachePreference dep blockNo timestamp - | dep <- parsePreferredVersions (Sec.indexEntryContent sie) - ] - where - blockNo = Sec.directoryEntryBlockNo dirEntry - timestamp = fromMaybe (error "withIndexEntries: invalid timestamp") $ - epochTimeToTimestamp $ Sec.indexEntryTime sie - -withIndexEntries verbosity index callback = do -- non-secure repositories - withFile (indexFile index) ReadMode $ \h -> do - bs <- maybeDecompress `fmap` BS.hGetContents h - pkgsOrPrefs <- lazySequence $ parsePackageIndex verbosity bs - callback $ map toCache (catMaybes pkgsOrPrefs) - where - toCache :: PackageOrDep -> IndexCacheEntry - toCache (Pkg (NormalPackage pkgid _ _ blockNo)) = CachePackageId pkgid blockNo nullTimestamp - toCache (Pkg (BuildTreeRef refType _ _ _ blockNo)) = CacheBuildTreeRef refType blockNo - toCache (Dep d) = CachePreference d 0 nullTimestamp - -readPackageIndexCacheFile :: Package pkg - => Verbosity - -> (PackageEntry -> pkg) - -> Index - -> IndexState - -> IO (PackageIndex pkg, [Dependency], IndexStateInfo) -readPackageIndexCacheFile verbosity mkPkg index idxState = do - cache0 <- readIndexCache verbosity index - indexHnd <- openFile (indexFile index) ReadMode - let (cache,isi) = filterCache idxState cache0 - (pkgs,deps) <- packageIndexFromCache verbosity mkPkg indexHnd cache - pure (pkgs,deps,isi) - - -packageIndexFromCache :: Package pkg - => Verbosity - -> (PackageEntry -> pkg) - -> Handle - -> Cache - -> IO (PackageIndex pkg, [Dependency]) -packageIndexFromCache verbosity mkPkg hnd cache = do - (pkgs, prefs) <- packageListFromCache verbosity mkPkg hnd cache - pkgIndex <- evaluate $ PackageIndex.fromList pkgs - return (pkgIndex, prefs) - --- | Read package list --- --- The result package releases and preference entries are guaranteed --- to be unique. --- --- Note: 01-index.tar is an append-only index and therefore contains --- all .cabal edits and preference-updates. The masking happens --- here, i.e. the semantics that later entries in a tar file mask --- earlier ones is resolved in this function. -packageListFromCache :: Verbosity - -> (PackageEntry -> pkg) - -> Handle - -> Cache - -> IO ([pkg], [Dependency]) -packageListFromCache verbosity mkPkg hnd Cache{..} = accum mempty [] mempty cacheEntries - where - accum !srcpkgs btrs !prefs [] = return (Map.elems srcpkgs ++ btrs, Map.elems prefs) - - accum srcpkgs btrs prefs (CachePackageId pkgid blockno _ : entries) = do - -- Given the cache entry, make a package index entry. - -- The magic here is that we use lazy IO to read the .cabal file - -- from the index tarball if it turns out that we need it. - -- Most of the time we only need the package id. - ~(pkg, pkgtxt) <- unsafeInterleaveIO $ do - pkgtxt <- getEntryContent blockno - pkg <- readPackageDescription pkgid pkgtxt - return (pkg, pkgtxt) - - let srcpkg = mkPkg (NormalPackage pkgid pkg pkgtxt blockno) - accum (Map.insert pkgid srcpkg srcpkgs) btrs prefs entries - - accum srcpkgs btrs prefs (CacheBuildTreeRef refType blockno : entries) = do - -- We have to read the .cabal file eagerly here because we can't cache the - -- package id for build tree references - the user might edit the .cabal - -- file after the reference was added to the index. - path <- liftM byteStringToFilePath . getEntryContent $ blockno - pkg <- do let err = "Error reading package index from cache." - file <- tryFindAddSourcePackageDesc verbosity path err - PackageDesc.Parse.readGenericPackageDescription normal file - let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno) - accum srcpkgs (srcpkg:btrs) prefs entries - - accum srcpkgs btrs prefs (CachePreference pref@(Dependency pn _) _ _ : entries) = - accum srcpkgs btrs (Map.insert pn pref prefs) entries - - getEntryContent :: BlockNo -> IO ByteString - getEntryContent blockno = do - entry <- Tar.hReadEntry hnd blockno - case Tar.entryContent entry of - Tar.NormalFile content _size -> return content - Tar.OtherEntryType typecode content _size - | Tar.isBuildTreeRefTypeCode typecode - -> return content - _ -> interror "unexpected tar entry type" - - readPackageDescription :: PackageIdentifier -> ByteString -> IO GenericPackageDescription - readPackageDescription pkgid content = - case snd $ PackageDesc.Parse.runParseResult $ parseGenericPackageDescription $ BS.toStrict content of - Right gpd -> return gpd - Left (Just specVer, _) | specVer >= mkVersion [2,2] -> return (dummyPackageDescription specVer) - Left _ -> interror "failed to parse .cabal file" - where - dummyPackageDescription :: Version -> GenericPackageDescription - dummyPackageDescription specVer = GenericPackageDescription - { packageDescription = emptyPackageDescription - { specVersionRaw = Left specVer - , package = pkgid - , synopsis = dummySynopsis - } - , genPackageFlags = [] - , condLibrary = Nothing - , condSubLibraries = [] - , condForeignLibs = [] - , condExecutables = [] - , condTestSuites = [] - , condBenchmarks = [] - } - - dummySynopsis = "" - - interror :: String -> IO a - interror msg = die' verbosity $ "internal error when reading package index: " ++ msg - ++ "The package index or index cache is probably " - ++ "corrupt. Running cabal update might fix it." - - - ------------------------------------------------------------------------- --- Index cache data structure --- - --- | Read the 'Index' cache from the filesystem --- --- If a corrupted index cache is detected this function regenerates --- the index cache and then reattempt to read the index once (and --- 'die's if it fails again). -readIndexCache :: Verbosity -> Index -> IO Cache -readIndexCache verbosity index = do - cacheOrFail <- readIndexCache' index - case cacheOrFail of - Left msg -> do - warn verbosity $ concat - [ "Parsing the index cache failed (", msg, "). " - , "Trying to regenerate the index cache..." - ] - - updatePackageIndexCacheFile verbosity index - - either (die' verbosity) (return . hashConsCache) =<< readIndexCache' index - - Right res -> return (hashConsCache res) - --- | Read the 'Index' cache from the filesystem without attempting to --- regenerate on parsing failures. -readIndexCache' :: Index -> IO (Either String Cache) -readIndexCache' index - | is01Index index = decodeFileOrFail' (cacheFile index) - | otherwise = liftM (Right .read00IndexCache) $ - BSS.readFile (cacheFile index) - --- | Write the 'Index' cache to the filesystem -writeIndexCache :: Index -> Cache -> IO () -writeIndexCache index cache - | is01Index index = encodeFile (cacheFile index) cache - | otherwise = writeFile (cacheFile index) (show00IndexCache cache) - --- | Write the 'IndexState' to the filesystem -writeIndexTimestamp :: Index -> IndexState -> IO () -writeIndexTimestamp index st - = writeFile (timestampFile index) (display st) - --- | Read out the "current" index timestamp, i.e., what --- timestamp you would use to revert to this version -currentIndexTimestamp :: Verbosity -> RepoContext -> Repo -> IO Timestamp -currentIndexTimestamp verbosity repoCtxt r = do - mb_is <- readIndexTimestamp (RepoIndex repoCtxt r) - case mb_is of - Just (IndexStateTime ts) -> return ts - _ -> do - (_,_,isi) <- readRepoIndex verbosity repoCtxt r IndexStateHead - return (isiHeadTime isi) - --- | Read the 'IndexState' from the filesystem -readIndexTimestamp :: Index -> IO (Maybe IndexState) -readIndexTimestamp index - = fmap simpleParse (readFile (timestampFile index)) - `catchIO` \e -> - if isDoesNotExistError e - then return Nothing - else ioError e - --- | Optimise sharing of equal values inside 'Cache' --- --- c.f. https://en.wikipedia.org/wiki/Hash_consing -hashConsCache :: Cache -> Cache -hashConsCache cache0 - = cache0 { cacheEntries = go mempty mempty (cacheEntries cache0) } - where - -- TODO/NOTE: - -- - -- If/when we redo the binary serialisation via e.g. CBOR and we - -- are able to use incremental decoding, we may want to move the - -- hash-consing into the incremental deserialisation, or - -- alterantively even do something like - -- http://cbor.schmorp.de/value-sharing - -- - go _ _ [] = [] - -- for now we only optimise only CachePackageIds since those - -- represent the vast majority - go !pns !pvs (CachePackageId pid bno ts : rest) - = CachePackageId pid' bno ts : go pns' pvs' rest - where - !pid' = PackageIdentifier pn' pv' - (!pn',!pns') = mapIntern pn pns - (!pv',!pvs') = mapIntern pv pvs - PackageIdentifier pn pv = pid - - go pns pvs (x:xs) = x : go pns pvs xs - - mapIntern :: Ord k => k -> Map.Map k k -> (k,Map.Map k k) - mapIntern k m = maybe (k,Map.insert k k m) (\k' -> (k',m)) (Map.lookup k m) - --- | Cabal caches various information about the Hackage index -data Cache = Cache - { cacheHeadTs :: Timestamp - -- ^ maximum/latest 'Timestamp' among 'cacheEntries'; unless the - -- invariant of 'cacheEntries' being in chronological order is - -- violated, this corresponds to the last (seen) 'Timestamp' in - -- 'cacheEntries' - , cacheEntries :: [IndexCacheEntry] - } - -instance NFData Cache where - rnf = rnf . cacheEntries - --- | Tar files are block structured with 512 byte blocks. Every header and file --- content starts on a block boundary. --- -type BlockNo = Word32 -- Tar.TarEntryOffset - - -data IndexCacheEntry - = CachePackageId PackageId !BlockNo !Timestamp - | CachePreference Dependency !BlockNo !Timestamp - | CacheBuildTreeRef !BuildTreeRefType !BlockNo - -- NB: CacheBuildTreeRef is irrelevant for 01-index & new-build - deriving (Eq,Generic) - -instance NFData IndexCacheEntry where - rnf (CachePackageId pkgid _ _) = rnf pkgid - rnf (CachePreference dep _ _) = rnf dep - rnf (CacheBuildTreeRef _ _) = () - -cacheEntryTimestamp :: IndexCacheEntry -> Timestamp -cacheEntryTimestamp (CacheBuildTreeRef _ _) = nullTimestamp -cacheEntryTimestamp (CachePreference _ _ ts) = ts -cacheEntryTimestamp (CachePackageId _ _ ts) = ts - ----------------------------------------------------------------------------- --- new binary 01-index.cache format - -instance Binary Cache where - put (Cache headTs ents) = do - -- magic / format version - -- - -- NB: this currently encodes word-size implicitly; when we - -- switch to CBOR encoding, we will have a platform - -- independent binary encoding - put (0xcaba1002::Word) - put headTs - put ents - - get = do - magic <- get - when (magic /= (0xcaba1002::Word)) $ - fail ("01-index.cache: unexpected magic marker encountered: " ++ show magic) - Cache <$> get <*> get - -instance Binary IndexCacheEntry - ----------------------------------------------------------------------------- --- legacy 00-index.cache format - -packageKey, blocknoKey, buildTreeRefKey, preferredVersionKey :: String -packageKey = "pkg:" -blocknoKey = "b#" -buildTreeRefKey = "build-tree-ref:" -preferredVersionKey = "pref-ver:" - --- legacy 00-index.cache format -read00IndexCache :: BSS.ByteString -> Cache -read00IndexCache bs = Cache - { cacheHeadTs = nullTimestamp - , cacheEntries = mapMaybe read00IndexCacheEntry $ BSS.lines bs - } - -read00IndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry -read00IndexCacheEntry = \line -> - case BSS.words line of - [key, pkgnamestr, pkgverstr, sep, blocknostr] - | key == BSS.pack packageKey && sep == BSS.pack blocknoKey -> - case (parseName pkgnamestr, parseVer pkgverstr [], - parseBlockNo blocknostr) of - (Just pkgname, Just pkgver, Just blockno) - -> Just (CachePackageId (PackageIdentifier pkgname pkgver) - blockno nullTimestamp) - _ -> Nothing - [key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey -> - case (parseRefType typecodestr, parseBlockNo blocknostr) of - (Just refType, Just blockno) - -> Just (CacheBuildTreeRef refType blockno) - _ -> Nothing - - (key: remainder) | key == BSS.pack preferredVersionKey -> do - pref <- simpleParse (BSS.unpack (BSS.unwords remainder)) - return $ CachePreference pref 0 nullTimestamp - - _ -> Nothing - where - parseName str - | BSS.all (\c -> isAlphaNum c || c == '-') str - = Just (mkPackageName (BSS.unpack str)) - | otherwise = Nothing - - parseVer str vs = - case BSS.readInt str of - Nothing -> Nothing - Just (v, str') -> case BSS.uncons str' of - Just ('.', str'') -> parseVer str'' (v:vs) - Just _ -> Nothing - Nothing -> Just (mkVersion (reverse (v:vs))) - - parseBlockNo str = - case BSS.readInt str of - Just (blockno, remainder) - | BSS.null remainder -> Just (fromIntegral blockno) - _ -> Nothing - - parseRefType str = - case BSS.uncons str of - Just (typeCode, remainder) - | BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode - -> Just (refTypeFromTypeCode typeCode) - _ -> Nothing - --- legacy 00-index.cache format -show00IndexCache :: Cache -> String -show00IndexCache Cache{..} = unlines $ map show00IndexCacheEntry cacheEntries - -show00IndexCacheEntry :: IndexCacheEntry -> String -show00IndexCacheEntry entry = unwords $ case entry of - CachePackageId pkgid b _ -> [ packageKey - , display (packageName pkgid) - , display (packageVersion pkgid) - , blocknoKey - , show b - ] - CacheBuildTreeRef tr b -> [ buildTreeRefKey - , [typeCodeFromRefType tr] - , show b - ] - CachePreference dep _ _ -> [ preferredVersionKey - , display dep - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Init/Heuristics.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Init/Heuristics.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Init/Heuristics.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Init/Heuristics.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,394 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Init.Heuristics --- Copyright : (c) Benedikt Huber 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Heuristics for creating initial cabal files. --- ------------------------------------------------------------------------------ -module Distribution.Client.Init.Heuristics ( - guessPackageName, - scanForModules, SourceFileEntry(..), - neededBuildPrograms, - guessMainFileCandidates, - guessAuthorNameMail, - knownCategories, -) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Text (simpleParse) -import Distribution.Simple.Setup (Flag(..), flagToMaybe) -import Distribution.ModuleName - ( ModuleName, toFilePath ) -import qualified Distribution.Package as P -import qualified Distribution.PackageDescription as PD - ( category, packageDescription ) -import Distribution.Client.Utils - ( tryCanonicalizePath ) -import Language.Haskell.Extension ( Extension ) - -import Distribution.Solver.Types.PackageIndex - ( allPackagesByName ) -import Distribution.Solver.Types.SourcePackage - ( packageDescription ) - -import Distribution.Client.Types ( SourcePackageDb(..) ) -import Control.Monad ( mapM ) -import Data.Char ( isNumber, isLower ) -import Data.Either ( partitionEithers ) -import Data.List ( isInfixOf ) -import Data.Ord ( comparing ) -import qualified Data.Set as Set ( fromList, toList ) -import System.Directory ( getCurrentDirectory, getDirectoryContents, - doesDirectoryExist, doesFileExist, getHomeDirectory, ) -import Distribution.Compat.Environment ( getEnvironment ) -import System.FilePath ( takeExtension, takeBaseName, dropExtension, - (), (<.>), splitDirectories, makeRelative ) - -import Distribution.Client.Init.Types ( InitFlags(..) ) -import Distribution.Client.Compat.Process ( readProcessWithExitCode ) -import System.Exit ( ExitCode(..) ) - --- | Return a list of candidate main files for this executable: top-level --- modules including the word 'Main' in the file name. The list is sorted in --- order of preference, shorter file names are preferred. 'Right's are existing --- candidates and 'Left's are those that do not yet exist. -guessMainFileCandidates :: InitFlags -> IO [Either FilePath FilePath] -guessMainFileCandidates flags = do - dir <- - maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) - files <- getDirectoryContents dir - let existingCandidates = filter isMain files - -- We always want to give the user at least one default choice. If either - -- Main.hs or Main.lhs has already been created, then we don't want to - -- suggest the other; however, if neither has been created, then we - -- suggest both. - newCandidates = - if any (`elem` existingCandidates) ["Main.hs", "Main.lhs"] - then [] - else ["Main.hs", "Main.lhs"] - candidates = - sortBy (\x y -> comparing (length . either id id) x y - `mappend` compare x y) - (map Left newCandidates ++ map Right existingCandidates) - return candidates - - where - isMain f = (isInfixOf "Main" f || isInfixOf "main" f) - && (isSuffixOf ".hs" f || isSuffixOf ".lhs" f) - --- | Guess the package name based on the given root directory. -guessPackageName :: FilePath -> IO P.PackageName -guessPackageName = liftM (P.mkPackageName . repair . last . splitDirectories) - . tryCanonicalizePath - where - -- Treat each span of non-alphanumeric characters as a hyphen. Each - -- hyphenated component of a package name must contain at least one - -- alphabetic character. An arbitrary character ('x') will be prepended if - -- this is not the case for the first component, and subsequent components - -- will simply be run together. For example, "1+2_foo-3" will become - -- "x12-foo3". - repair = repair' ('x' :) id - repair' invalid valid x = case dropWhile (not . isAlphaNum) x of - "" -> repairComponent "" - x' -> let (c, r) = first repairComponent $ break (not . isAlphaNum) x' - in c ++ repairRest r - where - repairComponent c | all isNumber c = invalid c - | otherwise = valid c - repairRest = repair' id ('-' :) - --- |Data type of source files found in the working directory -data SourceFileEntry = SourceFileEntry - { relativeSourcePath :: FilePath - , moduleName :: ModuleName - , fileExtension :: String - , imports :: [ModuleName] - , extensions :: [Extension] - } deriving Show - -sfToFileName :: FilePath -> SourceFileEntry -> FilePath -sfToFileName projectRoot (SourceFileEntry relPath m ext _ _) - = projectRoot relPath toFilePath m <.> ext - --- |Search for source files in the given directory --- and return pairs of guessed Haskell source path and --- module names. -scanForModules :: FilePath -> IO [SourceFileEntry] -scanForModules rootDir = scanForModulesIn rootDir rootDir - -scanForModulesIn :: FilePath -> FilePath -> IO [SourceFileEntry] -scanForModulesIn projectRoot srcRoot = scan srcRoot [] - where - scan dir hierarchy = do - entries <- getDirectoryContents (projectRoot dir) - (files, dirs) <- liftM partitionEithers (mapM (tagIsDir dir) entries) - let modules = catMaybes [ guessModuleName hierarchy file - | file <- files - , isUpper (head file) ] - modules' <- mapM (findImportsAndExts projectRoot) modules - recMods <- mapM (scanRecursive dir hierarchy) dirs - return $ concat (modules' : recMods) - tagIsDir parent entry = do - isDir <- doesDirectoryExist (parent entry) - return $ (if isDir then Right else Left) entry - guessModuleName hierarchy entry - | takeBaseName entry == "Setup" = Nothing - | ext `elem` sourceExtensions = - SourceFileEntry <$> pure relRoot <*> modName <*> pure ext <*> pure [] <*> pure [] - | otherwise = Nothing - where - relRoot = makeRelative projectRoot srcRoot - unqualModName = dropExtension entry - modName = simpleParse - $ intercalate "." . reverse $ (unqualModName : hierarchy) - ext = case takeExtension entry of '.':e -> e; e -> e - scanRecursive parent hierarchy entry - | isUpper (head entry) = scan (parent entry) (entry : hierarchy) - | isLower (head entry) && not (ignoreDir entry) = - scanForModulesIn projectRoot $ foldl () srcRoot (reverse (entry : hierarchy)) - | otherwise = return [] - ignoreDir ('.':_) = True - ignoreDir dir = dir `elem` ["dist", "_darcs"] - -findImportsAndExts :: FilePath -> SourceFileEntry -> IO SourceFileEntry -findImportsAndExts projectRoot sf = do - s <- readFile (sfToFileName projectRoot sf) - - let modules = mapMaybe - ( getModName - . drop 1 - . filter (not . null) - . dropWhile (/= "import") - . words - ) - . filter (not . ("--" `isPrefixOf`)) -- poor man's comment filtering - . lines - $ s - - -- TODO: We should probably make a better attempt at parsing - -- comments above. Unfortunately we can't use a full-fledged - -- Haskell parser since cabal's dependencies must be kept at a - -- minimum. - - -- A poor man's LANGUAGE pragma parser. - exts = mapMaybe simpleParse - . concatMap getPragmas - . filter isLANGUAGEPragma - . map fst - . drop 1 - . takeWhile (not . null . snd) - . iterate (takeBraces . snd) - $ ("",s) - - takeBraces = break (== '}') . dropWhile (/= '{') - - isLANGUAGEPragma = ("{-# LANGUAGE " `isPrefixOf`) - - getPragmas = map trim . splitCommas . takeWhile (/= '#') . drop 13 - - splitCommas "" = [] - splitCommas xs = x : splitCommas (drop 1 y) - where (x,y) = break (==',') xs - - return sf { imports = modules - , extensions = exts - } - - where getModName :: [String] -> Maybe ModuleName - getModName [] = Nothing - getModName ("qualified":ws) = getModName ws - getModName (ms:_) = simpleParse ms - - - --- Unfortunately we cannot use the version exported by Distribution.Simple.Program -knownSuffixHandlers :: [(String,String)] -knownSuffixHandlers = - [ ("gc", "greencard") - , ("chs", "chs") - , ("hsc", "hsc2hs") - , ("x", "alex") - , ("y", "happy") - , ("ly", "happy") - , ("cpphs", "cpp") - ] - -sourceExtensions :: [String] -sourceExtensions = "hs" : "lhs" : map fst knownSuffixHandlers - -neededBuildPrograms :: [SourceFileEntry] -> [String] -neededBuildPrograms entries = - [ handler - | ext <- nubSet (map fileExtension entries) - , handler <- maybeToList (lookup ext knownSuffixHandlers) - ] - --- | Guess author and email using darcs and git configuration options. Use --- the following in decreasing order of preference: --- --- 1. vcs env vars ($DARCS_EMAIL, $GIT_AUTHOR_*) --- 2. Local repo configs --- 3. Global vcs configs --- 4. The generic $EMAIL --- --- Name and email are processed separately, so the guess might end up being --- a name from DARCS_EMAIL and an email from git config. --- --- Darcs has preference, for tradition's sake. -guessAuthorNameMail :: IO (Flag String, Flag String) -guessAuthorNameMail = fmap authorGuessPure authorGuessIO - --- Ordered in increasing preference, since Flag-as-monoid is identical to --- Last. -authorGuessPure :: AuthorGuessIO -> AuthorGuess -authorGuessPure (AuthorGuessIO { authorGuessEnv = env - , authorGuessLocalDarcs = darcsLocalF - , authorGuessGlobalDarcs = darcsGlobalF - , authorGuessLocalGit = gitLocal - , authorGuessGlobalGit = gitGlobal }) - = mconcat - [ emailEnv env - , gitGlobal - , darcsCfg darcsGlobalF - , gitLocal - , darcsCfg darcsLocalF - , gitEnv env - , darcsEnv env - ] - -authorGuessIO :: IO AuthorGuessIO -authorGuessIO = AuthorGuessIO - <$> getEnvironment - <*> (maybeReadFile $ "_darcs" "prefs" "author") - <*> (maybeReadFile =<< liftM ( (".darcs" "author")) getHomeDirectory) - <*> gitCfg Local - <*> gitCfg Global - --- Types and functions used for guessing the author are now defined: - -type AuthorGuess = (Flag String, Flag String) -type Enviro = [(String, String)] -data GitLoc = Local | Global -data AuthorGuessIO = AuthorGuessIO { - authorGuessEnv :: Enviro, -- ^ Environment lookup table - authorGuessLocalDarcs :: (Maybe String), -- ^ Contents of local darcs author info - authorGuessGlobalDarcs :: (Maybe String), -- ^ Contents of global darcs author info - authorGuessLocalGit :: AuthorGuess, -- ^ Git config --local - authorGuessGlobalGit :: AuthorGuess -- ^ Git config --global - } - -darcsEnv :: Enviro -> AuthorGuess -darcsEnv = maybe mempty nameAndMail . lookup "DARCS_EMAIL" - -gitEnv :: Enviro -> AuthorGuess -gitEnv env = (name, mail) - where - name = maybeFlag "GIT_AUTHOR_NAME" env - mail = maybeFlag "GIT_AUTHOR_EMAIL" env - -darcsCfg :: Maybe String -> AuthorGuess -darcsCfg = maybe mempty nameAndMail - -emailEnv :: Enviro -> AuthorGuess -emailEnv env = (mempty, mail) - where - mail = maybeFlag "EMAIL" env - -gitCfg :: GitLoc -> IO AuthorGuess -gitCfg which = do - name <- gitVar which "user.name" - mail <- gitVar which "user.email" - return (name, mail) - -gitVar :: GitLoc -> String -> IO (Flag String) -gitVar which = fmap happyOutput . gitConfigQuery which - -happyOutput :: (ExitCode, a, t) -> Flag a -happyOutput v = case v of - (ExitSuccess, s, _) -> Flag s - _ -> mempty - -gitConfigQuery :: GitLoc -> String -> IO (ExitCode, String, String) -gitConfigQuery which key = - fmap trim' $ readProcessWithExitCode "git" ["config", w, key] "" - where - w = case which of - Local -> "--local" - Global -> "--global" - trim' (a, b, c) = (a, trim b, c) - -maybeFlag :: String -> Enviro -> Flag String -maybeFlag k = maybe mempty Flag . lookup k - --- | Read the first non-comment, non-trivial line of a file, if it exists -maybeReadFile :: String -> IO (Maybe String) -maybeReadFile f = do - exists <- doesFileExist f - if exists - then fmap getFirstLine $ readFile f - else return Nothing - where - getFirstLine content = - let nontrivialLines = dropWhile (\l -> (null l) || ("#" `isPrefixOf` l)) . lines $ content - in case nontrivialLines of - [] -> Nothing - (l:_) -> Just l - --- |Get list of categories used in Hackage. NOTE: Very slow, needs to be cached -knownCategories :: SourcePackageDb -> [String] -knownCategories (SourcePackageDb sourcePkgIndex _) = nubSet - [ cat | pkg <- map head (allPackagesByName sourcePkgIndex) - , let catList = (PD.category . PD.packageDescription . packageDescription) pkg - , cat <- splitString ',' catList - ] - --- Parse name and email, from darcs pref files or environment variable -nameAndMail :: String -> (Flag String, Flag String) -nameAndMail str - | all isSpace nameOrEmail = mempty - | null erest = (mempty, Flag $ trim nameOrEmail) - | otherwise = (Flag $ trim nameOrEmail, Flag mail) - where - (nameOrEmail,erest) = break (== '<') str - (mail,_) = break (== '>') (tail erest) - -trim :: String -> String -trim = removeLeadingSpace . reverse . removeLeadingSpace . reverse - where - removeLeadingSpace = dropWhile isSpace - --- split string at given character, and remove whitespace -splitString :: Char -> String -> [String] -splitString sep str = go str where - go s = if null s' then [] else tok : go rest where - s' = dropWhile (\c -> c == sep || isSpace c) s - (tok,rest) = break (==sep) s' - -nubSet :: (Ord a) => [a] -> [a] -nubSet = Set.toList . Set.fromList - -{- -test db testProjectRoot = do - putStrLn "Guessed package name" - (guessPackageName >=> print) testProjectRoot - putStrLn "Guessed name and email" - guessAuthorNameMail >>= print - - mods <- scanForModules testProjectRoot - - putStrLn "Guessed modules" - mapM_ print mods - putStrLn "Needed build programs" - print (neededBuildPrograms mods) - - putStrLn "List of known categories" - print $ knownCategories db --} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Init/Licenses.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Init/Licenses.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Init/Licenses.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Init/Licenses.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3065 +0,0 @@ -module Distribution.Client.Init.Licenses - ( License - , bsd2 - , bsd3 - , gplv2 - , gplv3 - , lgpl21 - , lgpl3 - , agplv3 - , apache20 - , mit - , mpl20 - , isc - ) where - -type License = String - -bsd2 :: String -> String -> License -bsd2 authors year = unlines - [ "Copyright (c) " ++ year ++ ", " ++ authors - , "All rights reserved." - , "" - , "Redistribution and use in source and binary forms, with or without" - , "modification, are permitted provided that the following conditions are" - , "met:" - , "" - , "1. Redistributions of source code must retain the above copyright" - , " notice, this list of conditions and the following disclaimer." - , "" - , "2. Redistributions in binary form must reproduce the above copyright" - , " notice, this list of conditions and the following disclaimer in the" - , " documentation and/or other materials provided with the" - , " distribution." - , "" - , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" - , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" - , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" - , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" - , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," - , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" - , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," - , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" - , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" - , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" - , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." - ] - -bsd3 :: String -> String -> License -bsd3 authors year = unlines - [ "Copyright (c) " ++ year ++ ", " ++ authors - , "" - , "All rights reserved." - , "" - , "Redistribution and use in source and binary forms, with or without" - , "modification, are permitted provided that the following conditions are met:" - , "" - , " * Redistributions of source code must retain the above copyright" - , " notice, this list of conditions and the following disclaimer." - , "" - , " * Redistributions in binary form must reproduce the above" - , " copyright notice, this list of conditions and the following" - , " disclaimer in the documentation and/or other materials provided" - , " with the distribution." - , "" - , " * Neither the name of " ++ authors ++ " nor the names of other" - , " contributors may be used to endorse or promote products derived" - , " from this software without specific prior written permission." - , "" - , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" - , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" - , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" - , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" - , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," - , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" - , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," - , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" - , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" - , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" - , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." - ] - -gplv2 :: License -gplv2 = unlines - [ " GNU GENERAL PUBLIC LICENSE" - , " Version 2, June 1991" - , "" - , " Copyright (C) 1989, 1991 Free Software Foundation, Inc.," - , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , " Preamble" - , "" - , " The licenses for most software are designed to take away your" - , "freedom to share and change it. By contrast, the GNU General Public" - , "License is intended to guarantee your freedom to share and change free" - , "software--to make sure the software is free for all its users. This" - , "General Public License applies to most of the Free Software" - , "Foundation's software and to any other program whose authors commit to" - , "using it. (Some other Free Software Foundation software is covered by" - , "the GNU Lesser General Public License instead.) You can apply it to" - , "your programs, too." - , "" - , " When we speak of free software, we are referring to freedom, not" - , "price. Our General Public Licenses are designed to make sure that you" - , "have the freedom to distribute copies of free software (and charge for" - , "this service if you wish), that you receive source code or can get it" - , "if you want it, that you can change the software or use pieces of it" - , "in new free programs; and that you know you can do these things." - , "" - , " To protect your rights, we need to make restrictions that forbid" - , "anyone to deny you these rights or to ask you to surrender the rights." - , "These restrictions translate to certain responsibilities for you if you" - , "distribute copies of the software, or if you modify it." - , "" - , " For example, if you distribute copies of such a program, whether" - , "gratis or for a fee, you must give the recipients all the rights that" - , "you have. You must make sure that they, too, receive or can get the" - , "source code. And you must show them these terms so they know their" - , "rights." - , "" - , " We protect your rights with two steps: (1) copyright the software, and" - , "(2) offer you this license which gives you legal permission to copy," - , "distribute and/or modify the software." - , "" - , " Also, for each author's protection and ours, we want to make certain" - , "that everyone understands that there is no warranty for this free" - , "software. If the software is modified by someone else and passed on, we" - , "want its recipients to know that what they have is not the original, so" - , "that any problems introduced by others will not reflect on the original" - , "authors' reputations." - , "" - , " Finally, any free program is threatened constantly by software" - , "patents. We wish to avoid the danger that redistributors of a free" - , "program will individually obtain patent licenses, in effect making the" - , "program proprietary. To prevent this, we have made it clear that any" - , "patent must be licensed for everyone's free use or not licensed at all." - , "" - , " The precise terms and conditions for copying, distribution and" - , "modification follow." - , "" - , " GNU GENERAL PUBLIC LICENSE" - , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" - , "" - , " 0. This License applies to any program or other work which contains" - , "a notice placed by the copyright holder saying it may be distributed" - , "under the terms of this General Public License. The \"Program\", below," - , "refers to any such program or work, and a \"work based on the Program\"" - , "means either the Program or any derivative work under copyright law:" - , "that is to say, a work containing the Program or a portion of it," - , "either verbatim or with modifications and/or translated into another" - , "language. (Hereinafter, translation is included without limitation in" - , "the term \"modification\".) Each licensee is addressed as \"you\"." - , "" - , "Activities other than copying, distribution and modification are not" - , "covered by this License; they are outside its scope. The act of" - , "running the Program is not restricted, and the output from the Program" - , "is covered only if its contents constitute a work based on the" - , "Program (independent of having been made by running the Program)." - , "Whether that is true depends on what the Program does." - , "" - , " 1. You may copy and distribute verbatim copies of the Program's" - , "source code as you receive it, in any medium, provided that you" - , "conspicuously and appropriately publish on each copy an appropriate" - , "copyright notice and disclaimer of warranty; keep intact all the" - , "notices that refer to this License and to the absence of any warranty;" - , "and give any other recipients of the Program a copy of this License" - , "along with the Program." - , "" - , "You may charge a fee for the physical act of transferring a copy, and" - , "you may at your option offer warranty protection in exchange for a fee." - , "" - , " 2. You may modify your copy or copies of the Program or any portion" - , "of it, thus forming a work based on the Program, and copy and" - , "distribute such modifications or work under the terms of Section 1" - , "above, provided that you also meet all of these conditions:" - , "" - , " a) You must cause the modified files to carry prominent notices" - , " stating that you changed the files and the date of any change." - , "" - , " b) You must cause any work that you distribute or publish, that in" - , " whole or in part contains or is derived from the Program or any" - , " part thereof, to be licensed as a whole at no charge to all third" - , " parties under the terms of this License." - , "" - , " c) If the modified program normally reads commands interactively" - , " when run, you must cause it, when started running for such" - , " interactive use in the most ordinary way, to print or display an" - , " announcement including an appropriate copyright notice and a" - , " notice that there is no warranty (or else, saying that you provide" - , " a warranty) and that users may redistribute the program under" - , " these conditions, and telling the user how to view a copy of this" - , " License. (Exception: if the Program itself is interactive but" - , " does not normally print such an announcement, your work based on" - , " the Program is not required to print an announcement.)" - , "" - , "These requirements apply to the modified work as a whole. If" - , "identifiable sections of that work are not derived from the Program," - , "and can be reasonably considered independent and separate works in" - , "themselves, then this License, and its terms, do not apply to those" - , "sections when you distribute them as separate works. But when you" - , "distribute the same sections as part of a whole which is a work based" - , "on the Program, the distribution of the whole must be on the terms of" - , "this License, whose permissions for other licensees extend to the" - , "entire whole, and thus to each and every part regardless of who wrote it." - , "" - , "Thus, it is not the intent of this section to claim rights or contest" - , "your rights to work written entirely by you; rather, the intent is to" - , "exercise the right to control the distribution of derivative or" - , "collective works based on the Program." - , "" - , "In addition, mere aggregation of another work not based on the Program" - , "with the Program (or with a work based on the Program) on a volume of" - , "a storage or distribution medium does not bring the other work under" - , "the scope of this License." - , "" - , " 3. You may copy and distribute the Program (or a work based on it," - , "under Section 2) in object code or executable form under the terms of" - , "Sections 1 and 2 above provided that you also do one of the following:" - , "" - , " a) Accompany it with the complete corresponding machine-readable" - , " source code, which must be distributed under the terms of Sections" - , " 1 and 2 above on a medium customarily used for software interchange; or," - , "" - , " b) Accompany it with a written offer, valid for at least three" - , " years, to give any third party, for a charge no more than your" - , " cost of physically performing source distribution, a complete" - , " machine-readable copy of the corresponding source code, to be" - , " distributed under the terms of Sections 1 and 2 above on a medium" - , " customarily used for software interchange; or," - , "" - , " c) Accompany it with the information you received as to the offer" - , " to distribute corresponding source code. (This alternative is" - , " allowed only for noncommercial distribution and only if you" - , " received the program in object code or executable form with such" - , " an offer, in accord with Subsection b above.)" - , "" - , "The source code for a work means the preferred form of the work for" - , "making modifications to it. For an executable work, complete source" - , "code means all the source code for all modules it contains, plus any" - , "associated interface definition files, plus the scripts used to" - , "control compilation and installation of the executable. However, as a" - , "special exception, the source code distributed need not include" - , "anything that is normally distributed (in either source or binary" - , "form) with the major components (compiler, kernel, and so on) of the" - , "operating system on which the executable runs, unless that component" - , "itself accompanies the executable." - , "" - , "If distribution of executable or object code is made by offering" - , "access to copy from a designated place, then offering equivalent" - , "access to copy the source code from the same place counts as" - , "distribution of the source code, even though third parties are not" - , "compelled to copy the source along with the object code." - , "" - , " 4. You may not copy, modify, sublicense, or distribute the Program" - , "except as expressly provided under this License. Any attempt" - , "otherwise to copy, modify, sublicense or distribute the Program is" - , "void, and will automatically terminate your rights under this License." - , "However, parties who have received copies, or rights, from you under" - , "this License will not have their licenses terminated so long as such" - , "parties remain in full compliance." - , "" - , " 5. You are not required to accept this License, since you have not" - , "signed it. However, nothing else grants you permission to modify or" - , "distribute the Program or its derivative works. These actions are" - , "prohibited by law if you do not accept this License. Therefore, by" - , "modifying or distributing the Program (or any work based on the" - , "Program), you indicate your acceptance of this License to do so, and" - , "all its terms and conditions for copying, distributing or modifying" - , "the Program or works based on it." - , "" - , " 6. Each time you redistribute the Program (or any work based on the" - , "Program), the recipient automatically receives a license from the" - , "original licensor to copy, distribute or modify the Program subject to" - , "these terms and conditions. You may not impose any further" - , "restrictions on the recipients' exercise of the rights granted herein." - , "You are not responsible for enforcing compliance by third parties to" - , "this License." - , "" - , " 7. If, as a consequence of a court judgment or allegation of patent" - , "infringement or for any other reason (not limited to patent issues)," - , "conditions are imposed on you (whether by court order, agreement or" - , "otherwise) that contradict the conditions of this License, they do not" - , "excuse you from the conditions of this License. If you cannot" - , "distribute so as to satisfy simultaneously your obligations under this" - , "License and any other pertinent obligations, then as a consequence you" - , "may not distribute the Program at all. For example, if a patent" - , "license would not permit royalty-free redistribution of the Program by" - , "all those who receive copies directly or indirectly through you, then" - , "the only way you could satisfy both it and this License would be to" - , "refrain entirely from distribution of the Program." - , "" - , "If any portion of this section is held invalid or unenforceable under" - , "any particular circumstance, the balance of the section is intended to" - , "apply and the section as a whole is intended to apply in other" - , "circumstances." - , "" - , "It is not the purpose of this section to induce you to infringe any" - , "patents or other property right claims or to contest validity of any" - , "such claims; this section has the sole purpose of protecting the" - , "integrity of the free software distribution system, which is" - , "implemented by public license practices. Many people have made" - , "generous contributions to the wide range of software distributed" - , "through that system in reliance on consistent application of that" - , "system; it is up to the author/donor to decide if he or she is willing" - , "to distribute software through any other system and a licensee cannot" - , "impose that choice." - , "" - , "This section is intended to make thoroughly clear what is believed to" - , "be a consequence of the rest of this License." - , "" - , " 8. If the distribution and/or use of the Program is restricted in" - , "certain countries either by patents or by copyrighted interfaces, the" - , "original copyright holder who places the Program under this License" - , "may add an explicit geographical distribution limitation excluding" - , "those countries, so that distribution is permitted only in or among" - , "countries not thus excluded. In such case, this License incorporates" - , "the limitation as if written in the body of this License." - , "" - , " 9. The Free Software Foundation may publish revised and/or new versions" - , "of the General Public License from time to time. Such new versions will" - , "be similar in spirit to the present version, but may differ in detail to" - , "address new problems or concerns." - , "" - , "Each version is given a distinguishing version number. If the Program" - , "specifies a version number of this License which applies to it and \"any" - , "later version\", you have the option of following the terms and conditions" - , "either of that version or of any later version published by the Free" - , "Software Foundation. If the Program does not specify a version number of" - , "this License, you may choose any version ever published by the Free Software" - , "Foundation." - , "" - , " 10. If you wish to incorporate parts of the Program into other free" - , "programs whose distribution conditions are different, write to the author" - , "to ask for permission. For software which is copyrighted by the Free" - , "Software Foundation, write to the Free Software Foundation; we sometimes" - , "make exceptions for this. Our decision will be guided by the two goals" - , "of preserving the free status of all derivatives of our free software and" - , "of promoting the sharing and reuse of software generally." - , "" - , " NO WARRANTY" - , "" - , " 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY" - , "FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN" - , "OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES" - , "PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED" - , "OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF" - , "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS" - , "TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE" - , "PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING," - , "REPAIR OR CORRECTION." - , "" - , " 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" - , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR" - , "REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES," - , "INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING" - , "OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED" - , "TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY" - , "YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER" - , "PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE" - , "POSSIBILITY OF SUCH DAMAGES." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " How to Apply These Terms to Your New Programs" - , "" - , " If you develop a new program, and you want it to be of the greatest" - , "possible use to the public, the best way to achieve this is to make it" - , "free software which everyone can redistribute and change under these terms." - , "" - , " To do so, attach the following notices to the program. It is safest" - , "to attach them to the start of each source file to most effectively" - , "convey the exclusion of warranty; and each file should have at least" - , "the \"copyright\" line and a pointer to where the full notice is found." - , "" - , " " - , " Copyright (C) " - , "" - , " This program is free software; you can redistribute it and/or modify" - , " it under the terms of the GNU General Public License as published by" - , " the Free Software Foundation; either version 2 of the License, or" - , " (at your option) any later version." - , "" - , " This program is distributed in the hope that it will be useful," - , " but WITHOUT ANY WARRANTY; without even the implied warranty of" - , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" - , " GNU General Public License for more details." - , "" - , " You should have received a copy of the GNU General Public License along" - , " with this program; if not, write to the Free Software Foundation, Inc.," - , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA." - , "" - , "Also add information on how to contact you by electronic and paper mail." - , "" - , "If the program is interactive, make it output a short notice like this" - , "when it starts in an interactive mode:" - , "" - , " Gnomovision version 69, Copyright (C) year name of author" - , " Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'." - , " This is free software, and you are welcome to redistribute it" - , " under certain conditions; type `show c' for details." - , "" - , "The hypothetical commands `show w' and `show c' should show the appropriate" - , "parts of the General Public License. Of course, the commands you use may" - , "be called something other than `show w' and `show c'; they could even be" - , "mouse-clicks or menu items--whatever suits your program." - , "" - , "You should also get your employer (if you work as a programmer) or your" - , "school, if any, to sign a \"copyright disclaimer\" for the program, if" - , "necessary. Here is a sample; alter the names:" - , "" - , " Yoyodyne, Inc., hereby disclaims all copyright interest in the program" - , " `Gnomovision' (which makes passes at compilers) written by James Hacker." - , "" - , " , 1 April 1989" - , " Ty Coon, President of Vice" - , "" - , "This General Public License does not permit incorporating your program into" - , "proprietary programs. If your program is a subroutine library, you may" - , "consider it more useful to permit linking proprietary applications with the" - , "library. If this is what you want to do, use the GNU Lesser General" - , "Public License instead of this License." - ] - -gplv3 :: License -gplv3 = unlines - [ " GNU GENERAL PUBLIC LICENSE" - , " Version 3, 29 June 2007" - , "" - , " Copyright (C) 2007 Free Software Foundation, Inc. " - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , " Preamble" - , "" - , " The GNU General Public License is a free, copyleft license for" - , "software and other kinds of works." - , "" - , " The licenses for most software and other practical works are designed" - , "to take away your freedom to share and change the works. By contrast," - , "the GNU General Public License is intended to guarantee your freedom to" - , "share and change all versions of a program--to make sure it remains free" - , "software for all its users. We, the Free Software Foundation, use the" - , "GNU General Public License for most of our software; it applies also to" - , "any other work released this way by its authors. You can apply it to" - , "your programs, too." - , "" - , " When we speak of free software, we are referring to freedom, not" - , "price. Our General Public Licenses are designed to make sure that you" - , "have the freedom to distribute copies of free software (and charge for" - , "them if you wish), that you receive source code or can get it if you" - , "want it, that you can change the software or use pieces of it in new" - , "free programs, and that you know you can do these things." - , "" - , " To protect your rights, we need to prevent others from denying you" - , "these rights or asking you to surrender the rights. Therefore, you have" - , "certain responsibilities if you distribute copies of the software, or if" - , "you modify it: responsibilities to respect the freedom of others." - , "" - , " For example, if you distribute copies of such a program, whether" - , "gratis or for a fee, you must pass on to the recipients the same" - , "freedoms that you received. You must make sure that they, too, receive" - , "or can get the source code. And you must show them these terms so they" - , "know their rights." - , "" - , " Developers that use the GNU GPL protect your rights with two steps:" - , "(1) assert copyright on the software, and (2) offer you this License" - , "giving you legal permission to copy, distribute and/or modify it." - , "" - , " For the developers' and authors' protection, the GPL clearly explains" - , "that there is no warranty for this free software. For both users' and" - , "authors' sake, the GPL requires that modified versions be marked as" - , "changed, so that their problems will not be attributed erroneously to" - , "authors of previous versions." - , "" - , " Some devices are designed to deny users access to install or run" - , "modified versions of the software inside them, although the manufacturer" - , "can do so. This is fundamentally incompatible with the aim of" - , "protecting users' freedom to change the software. The systematic" - , "pattern of such abuse occurs in the area of products for individuals to" - , "use, which is precisely where it is most unacceptable. Therefore, we" - , "have designed this version of the GPL to prohibit the practice for those" - , "products. If such problems arise substantially in other domains, we" - , "stand ready to extend this provision to those domains in future versions" - , "of the GPL, as needed to protect the freedom of users." - , "" - , " Finally, every program is threatened constantly by software patents." - , "States should not allow patents to restrict development and use of" - , "software on general-purpose computers, but in those that do, we wish to" - , "avoid the special danger that patents applied to a free program could" - , "make it effectively proprietary. To prevent this, the GPL assures that" - , "patents cannot be used to render the program non-free." - , "" - , " The precise terms and conditions for copying, distribution and" - , "modification follow." - , "" - , " TERMS AND CONDITIONS" - , "" - , " 0. Definitions." - , "" - , " \"This License\" refers to version 3 of the GNU General Public License." - , "" - , " \"Copyright\" also means copyright-like laws that apply to other kinds of" - , "works, such as semiconductor masks." - , "" - , " \"The Program\" refers to any copyrightable work licensed under this" - , "License. Each licensee is addressed as \"you\". \"Licensees\" and" - , "\"recipients\" may be individuals or organizations." - , "" - , " To \"modify\" a work means to copy from or adapt all or part of the work" - , "in a fashion requiring copyright permission, other than the making of an" - , "exact copy. The resulting work is called a \"modified version\" of the" - , "earlier work or a work \"based on\" the earlier work." - , "" - , " A \"covered work\" means either the unmodified Program or a work based" - , "on the Program." - , "" - , " To \"propagate\" a work means to do anything with it that, without" - , "permission, would make you directly or secondarily liable for" - , "infringement under applicable copyright law, except executing it on a" - , "computer or modifying a private copy. Propagation includes copying," - , "distribution (with or without modification), making available to the" - , "public, and in some countries other activities as well." - , "" - , " To \"convey\" a work means any kind of propagation that enables other" - , "parties to make or receive copies. Mere interaction with a user through" - , "a computer network, with no transfer of a copy, is not conveying." - , "" - , " An interactive user interface displays \"Appropriate Legal Notices\"" - , "to the extent that it includes a convenient and prominently visible" - , "feature that (1) displays an appropriate copyright notice, and (2)" - , "tells the user that there is no warranty for the work (except to the" - , "extent that warranties are provided), that licensees may convey the" - , "work under this License, and how to view a copy of this License. If" - , "the interface presents a list of user commands or options, such as a" - , "menu, a prominent item in the list meets this criterion." - , "" - , " 1. Source Code." - , "" - , " The \"source code\" for a work means the preferred form of the work" - , "for making modifications to it. \"Object code\" means any non-source" - , "form of a work." - , "" - , " A \"Standard Interface\" means an interface that either is an official" - , "standard defined by a recognized standards body, or, in the case of" - , "interfaces specified for a particular programming language, one that" - , "is widely used among developers working in that language." - , "" - , " The \"System Libraries\" of an executable work include anything, other" - , "than the work as a whole, that (a) is included in the normal form of" - , "packaging a Major Component, but which is not part of that Major" - , "Component, and (b) serves only to enable use of the work with that" - , "Major Component, or to implement a Standard Interface for which an" - , "implementation is available to the public in source code form. A" - , "\"Major Component\", in this context, means a major essential component" - , "(kernel, window system, and so on) of the specific operating system" - , "(if any) on which the executable work runs, or a compiler used to" - , "produce the work, or an object code interpreter used to run it." - , "" - , " The \"Corresponding Source\" for a work in object code form means all" - , "the source code needed to generate, install, and (for an executable" - , "work) run the object code and to modify the work, including scripts to" - , "control those activities. However, it does not include the work's" - , "System Libraries, or general-purpose tools or generally available free" - , "programs which are used unmodified in performing those activities but" - , "which are not part of the work. For example, Corresponding Source" - , "includes interface definition files associated with source files for" - , "the work, and the source code for shared libraries and dynamically" - , "linked subprograms that the work is specifically designed to require," - , "such as by intimate data communication or control flow between those" - , "subprograms and other parts of the work." - , "" - , " The Corresponding Source need not include anything that users" - , "can regenerate automatically from other parts of the Corresponding" - , "Source." - , "" - , " The Corresponding Source for a work in source code form is that" - , "same work." - , "" - , " 2. Basic Permissions." - , "" - , " All rights granted under this License are granted for the term of" - , "copyright on the Program, and are irrevocable provided the stated" - , "conditions are met. This License explicitly affirms your unlimited" - , "permission to run the unmodified Program. The output from running a" - , "covered work is covered by this License only if the output, given its" - , "content, constitutes a covered work. This License acknowledges your" - , "rights of fair use or other equivalent, as provided by copyright law." - , "" - , " You may make, run and propagate covered works that you do not" - , "convey, without conditions so long as your license otherwise remains" - , "in force. You may convey covered works to others for the sole purpose" - , "of having them make modifications exclusively for you, or provide you" - , "with facilities for running those works, provided that you comply with" - , "the terms of this License in conveying all material for which you do" - , "not control copyright. Those thus making or running the covered works" - , "for you must do so exclusively on your behalf, under your direction" - , "and control, on terms that prohibit them from making any copies of" - , "your copyrighted material outside their relationship with you." - , "" - , " Conveying under any other circumstances is permitted solely under" - , "the conditions stated below. Sublicensing is not allowed; section 10" - , "makes it unnecessary." - , "" - , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." - , "" - , " No covered work shall be deemed part of an effective technological" - , "measure under any applicable law fulfilling obligations under article" - , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" - , "similar laws prohibiting or restricting circumvention of such" - , "measures." - , "" - , " When you convey a covered work, you waive any legal power to forbid" - , "circumvention of technological measures to the extent such circumvention" - , "is effected by exercising rights under this License with respect to" - , "the covered work, and you disclaim any intention to limit operation or" - , "modification of the work as a means of enforcing, against the work's" - , "users, your or third parties' legal rights to forbid circumvention of" - , "technological measures." - , "" - , " 4. Conveying Verbatim Copies." - , "" - , " You may convey verbatim copies of the Program's source code as you" - , "receive it, in any medium, provided that you conspicuously and" - , "appropriately publish on each copy an appropriate copyright notice;" - , "keep intact all notices stating that this License and any" - , "non-permissive terms added in accord with section 7 apply to the code;" - , "keep intact all notices of the absence of any warranty; and give all" - , "recipients a copy of this License along with the Program." - , "" - , " You may charge any price or no price for each copy that you convey," - , "and you may offer support or warranty protection for a fee." - , "" - , " 5. Conveying Modified Source Versions." - , "" - , " You may convey a work based on the Program, or the modifications to" - , "produce it from the Program, in the form of source code under the" - , "terms of section 4, provided that you also meet all of these conditions:" - , "" - , " a) The work must carry prominent notices stating that you modified" - , " it, and giving a relevant date." - , "" - , " b) The work must carry prominent notices stating that it is" - , " released under this License and any conditions added under section" - , " 7. This requirement modifies the requirement in section 4 to" - , " \"keep intact all notices\"." - , "" - , " c) You must license the entire work, as a whole, under this" - , " License to anyone who comes into possession of a copy. This" - , " License will therefore apply, along with any applicable section 7" - , " additional terms, to the whole of the work, and all its parts," - , " regardless of how they are packaged. This License gives no" - , " permission to license the work in any other way, but it does not" - , " invalidate such permission if you have separately received it." - , "" - , " d) If the work has interactive user interfaces, each must display" - , " Appropriate Legal Notices; however, if the Program has interactive" - , " interfaces that do not display Appropriate Legal Notices, your" - , " work need not make them do so." - , "" - , " A compilation of a covered work with other separate and independent" - , "works, which are not by their nature extensions of the covered work," - , "and which are not combined with it such as to form a larger program," - , "in or on a volume of a storage or distribution medium, is called an" - , "\"aggregate\" if the compilation and its resulting copyright are not" - , "used to limit the access or legal rights of the compilation's users" - , "beyond what the individual works permit. Inclusion of a covered work" - , "in an aggregate does not cause this License to apply to the other" - , "parts of the aggregate." - , "" - , " 6. Conveying Non-Source Forms." - , "" - , " You may convey a covered work in object code form under the terms" - , "of sections 4 and 5, provided that you also convey the" - , "machine-readable Corresponding Source under the terms of this License," - , "in one of these ways:" - , "" - , " a) Convey the object code in, or embodied in, a physical product" - , " (including a physical distribution medium), accompanied by the" - , " Corresponding Source fixed on a durable physical medium" - , " customarily used for software interchange." - , "" - , " b) Convey the object code in, or embodied in, a physical product" - , " (including a physical distribution medium), accompanied by a" - , " written offer, valid for at least three years and valid for as" - , " long as you offer spare parts or customer support for that product" - , " model, to give anyone who possesses the object code either (1) a" - , " copy of the Corresponding Source for all the software in the" - , " product that is covered by this License, on a durable physical" - , " medium customarily used for software interchange, for a price no" - , " more than your reasonable cost of physically performing this" - , " conveying of source, or (2) access to copy the" - , " Corresponding Source from a network server at no charge." - , "" - , " c) Convey individual copies of the object code with a copy of the" - , " written offer to provide the Corresponding Source. This" - , " alternative is allowed only occasionally and noncommercially, and" - , " only if you received the object code with such an offer, in accord" - , " with subsection 6b." - , "" - , " d) Convey the object code by offering access from a designated" - , " place (gratis or for a charge), and offer equivalent access to the" - , " Corresponding Source in the same way through the same place at no" - , " further charge. You need not require recipients to copy the" - , " Corresponding Source along with the object code. If the place to" - , " copy the object code is a network server, the Corresponding Source" - , " may be on a different server (operated by you or a third party)" - , " that supports equivalent copying facilities, provided you maintain" - , " clear directions next to the object code saying where to find the" - , " Corresponding Source. Regardless of what server hosts the" - , " Corresponding Source, you remain obligated to ensure that it is" - , " available for as long as needed to satisfy these requirements." - , "" - , " e) Convey the object code using peer-to-peer transmission, provided" - , " you inform other peers where the object code and Corresponding" - , " Source of the work are being offered to the general public at no" - , " charge under subsection 6d." - , "" - , " A separable portion of the object code, whose source code is excluded" - , "from the Corresponding Source as a System Library, need not be" - , "included in conveying the object code work." - , "" - , " A \"User Product\" is either (1) a \"consumer product\", which means any" - , "tangible personal property which is normally used for personal, family," - , "or household purposes, or (2) anything designed or sold for incorporation" - , "into a dwelling. In determining whether a product is a consumer product," - , "doubtful cases shall be resolved in favor of coverage. For a particular" - , "product received by a particular user, \"normally used\" refers to a" - , "typical or common use of that class of product, regardless of the status" - , "of the particular user or of the way in which the particular user" - , "actually uses, or expects or is expected to use, the product. A product" - , "is a consumer product regardless of whether the product has substantial" - , "commercial, industrial or non-consumer uses, unless such uses represent" - , "the only significant mode of use of the product." - , "" - , " \"Installation Information\" for a User Product means any methods," - , "procedures, authorization keys, or other information required to install" - , "and execute modified versions of a covered work in that User Product from" - , "a modified version of its Corresponding Source. The information must" - , "suffice to ensure that the continued functioning of the modified object" - , "code is in no case prevented or interfered with solely because" - , "modification has been made." - , "" - , " If you convey an object code work under this section in, or with, or" - , "specifically for use in, a User Product, and the conveying occurs as" - , "part of a transaction in which the right of possession and use of the" - , "User Product is transferred to the recipient in perpetuity or for a" - , "fixed term (regardless of how the transaction is characterized), the" - , "Corresponding Source conveyed under this section must be accompanied" - , "by the Installation Information. But this requirement does not apply" - , "if neither you nor any third party retains the ability to install" - , "modified object code on the User Product (for example, the work has" - , "been installed in ROM)." - , "" - , " The requirement to provide Installation Information does not include a" - , "requirement to continue to provide support service, warranty, or updates" - , "for a work that has been modified or installed by the recipient, or for" - , "the User Product in which it has been modified or installed. Access to a" - , "network may be denied when the modification itself materially and" - , "adversely affects the operation of the network or violates the rules and" - , "protocols for communication across the network." - , "" - , " Corresponding Source conveyed, and Installation Information provided," - , "in accord with this section must be in a format that is publicly" - , "documented (and with an implementation available to the public in" - , "source code form), and must require no special password or key for" - , "unpacking, reading or copying." - , "" - , " 7. Additional Terms." - , "" - , " \"Additional permissions\" are terms that supplement the terms of this" - , "License by making exceptions from one or more of its conditions." - , "Additional permissions that are applicable to the entire Program shall" - , "be treated as though they were included in this License, to the extent" - , "that they are valid under applicable law. If additional permissions" - , "apply only to part of the Program, that part may be used separately" - , "under those permissions, but the entire Program remains governed by" - , "this License without regard to the additional permissions." - , "" - , " When you convey a copy of a covered work, you may at your option" - , "remove any additional permissions from that copy, or from any part of" - , "it. (Additional permissions may be written to require their own" - , "removal in certain cases when you modify the work.) You may place" - , "additional permissions on material, added by you to a covered work," - , "for which you have or can give appropriate copyright permission." - , "" - , " Notwithstanding any other provision of this License, for material you" - , "add to a covered work, you may (if authorized by the copyright holders of" - , "that material) supplement the terms of this License with terms:" - , "" - , " a) Disclaiming warranty or limiting liability differently from the" - , " terms of sections 15 and 16 of this License; or" - , "" - , " b) Requiring preservation of specified reasonable legal notices or" - , " author attributions in that material or in the Appropriate Legal" - , " Notices displayed by works containing it; or" - , "" - , " c) Prohibiting misrepresentation of the origin of that material, or" - , " requiring that modified versions of such material be marked in" - , " reasonable ways as different from the original version; or" - , "" - , " d) Limiting the use for publicity purposes of names of licensors or" - , " authors of the material; or" - , "" - , " e) Declining to grant rights under trademark law for use of some" - , " trade names, trademarks, or service marks; or" - , "" - , " f) Requiring indemnification of licensors and authors of that" - , " material by anyone who conveys the material (or modified versions of" - , " it) with contractual assumptions of liability to the recipient, for" - , " any liability that these contractual assumptions directly impose on" - , " those licensors and authors." - , "" - , " All other non-permissive additional terms are considered \"further" - , "restrictions\" within the meaning of section 10. If the Program as you" - , "received it, or any part of it, contains a notice stating that it is" - , "governed by this License along with a term that is a further" - , "restriction, you may remove that term. If a license document contains" - , "a further restriction but permits relicensing or conveying under this" - , "License, you may add to a covered work material governed by the terms" - , "of that license document, provided that the further restriction does" - , "not survive such relicensing or conveying." - , "" - , " If you add terms to a covered work in accord with this section, you" - , "must place, in the relevant source files, a statement of the" - , "additional terms that apply to those files, or a notice indicating" - , "where to find the applicable terms." - , "" - , " Additional terms, permissive or non-permissive, may be stated in the" - , "form of a separately written license, or stated as exceptions;" - , "the above requirements apply either way." - , "" - , " 8. Termination." - , "" - , " You may not propagate or modify a covered work except as expressly" - , "provided under this License. Any attempt otherwise to propagate or" - , "modify it is void, and will automatically terminate your rights under" - , "this License (including any patent licenses granted under the third" - , "paragraph of section 11)." - , "" - , " However, if you cease all violation of this License, then your" - , "license from a particular copyright holder is reinstated (a)" - , "provisionally, unless and until the copyright holder explicitly and" - , "finally terminates your license, and (b) permanently, if the copyright" - , "holder fails to notify you of the violation by some reasonable means" - , "prior to 60 days after the cessation." - , "" - , " Moreover, your license from a particular copyright holder is" - , "reinstated permanently if the copyright holder notifies you of the" - , "violation by some reasonable means, this is the first time you have" - , "received notice of violation of this License (for any work) from that" - , "copyright holder, and you cure the violation prior to 30 days after" - , "your receipt of the notice." - , "" - , " Termination of your rights under this section does not terminate the" - , "licenses of parties who have received copies or rights from you under" - , "this License. If your rights have been terminated and not permanently" - , "reinstated, you do not qualify to receive new licenses for the same" - , "material under section 10." - , "" - , " 9. Acceptance Not Required for Having Copies." - , "" - , " You are not required to accept this License in order to receive or" - , "run a copy of the Program. Ancillary propagation of a covered work" - , "occurring solely as a consequence of using peer-to-peer transmission" - , "to receive a copy likewise does not require acceptance. However," - , "nothing other than this License grants you permission to propagate or" - , "modify any covered work. These actions infringe copyright if you do" - , "not accept this License. Therefore, by modifying or propagating a" - , "covered work, you indicate your acceptance of this License to do so." - , "" - , " 10. Automatic Licensing of Downstream Recipients." - , "" - , " Each time you convey a covered work, the recipient automatically" - , "receives a license from the original licensors, to run, modify and" - , "propagate that work, subject to this License. You are not responsible" - , "for enforcing compliance by third parties with this License." - , "" - , " An \"entity transaction\" is a transaction transferring control of an" - , "organization, or substantially all assets of one, or subdividing an" - , "organization, or merging organizations. If propagation of a covered" - , "work results from an entity transaction, each party to that" - , "transaction who receives a copy of the work also receives whatever" - , "licenses to the work the party's predecessor in interest had or could" - , "give under the previous paragraph, plus a right to possession of the" - , "Corresponding Source of the work from the predecessor in interest, if" - , "the predecessor has it or can get it with reasonable efforts." - , "" - , " You may not impose any further restrictions on the exercise of the" - , "rights granted or affirmed under this License. For example, you may" - , "not impose a license fee, royalty, or other charge for exercise of" - , "rights granted under this License, and you may not initiate litigation" - , "(including a cross-claim or counterclaim in a lawsuit) alleging that" - , "any patent claim is infringed by making, using, selling, offering for" - , "sale, or importing the Program or any portion of it." - , "" - , " 11. Patents." - , "" - , " A \"contributor\" is a copyright holder who authorizes use under this" - , "License of the Program or a work on which the Program is based. The" - , "work thus licensed is called the contributor's \"contributor version\"." - , "" - , " A contributor's \"essential patent claims\" are all patent claims" - , "owned or controlled by the contributor, whether already acquired or" - , "hereafter acquired, that would be infringed by some manner, permitted" - , "by this License, of making, using, or selling its contributor version," - , "but do not include claims that would be infringed only as a" - , "consequence of further modification of the contributor version. For" - , "purposes of this definition, \"control\" includes the right to grant" - , "patent sublicenses in a manner consistent with the requirements of" - , "this License." - , "" - , " Each contributor grants you a non-exclusive, worldwide, royalty-free" - , "patent license under the contributor's essential patent claims, to" - , "make, use, sell, offer for sale, import and otherwise run, modify and" - , "propagate the contents of its contributor version." - , "" - , " In the following three paragraphs, a \"patent license\" is any express" - , "agreement or commitment, however denominated, not to enforce a patent" - , "(such as an express permission to practice a patent or covenant not to" - , "sue for patent infringement). To \"grant\" such a patent license to a" - , "party means to make such an agreement or commitment not to enforce a" - , "patent against the party." - , "" - , " If you convey a covered work, knowingly relying on a patent license," - , "and the Corresponding Source of the work is not available for anyone" - , "to copy, free of charge and under the terms of this License, through a" - , "publicly available network server or other readily accessible means," - , "then you must either (1) cause the Corresponding Source to be so" - , "available, or (2) arrange to deprive yourself of the benefit of the" - , "patent license for this particular work, or (3) arrange, in a manner" - , "consistent with the requirements of this License, to extend the patent" - , "license to downstream recipients. \"Knowingly relying\" means you have" - , "actual knowledge that, but for the patent license, your conveying the" - , "covered work in a country, or your recipient's use of the covered work" - , "in a country, would infringe one or more identifiable patents in that" - , "country that you have reason to believe are valid." - , "" - , " If, pursuant to or in connection with a single transaction or" - , "arrangement, you convey, or propagate by procuring conveyance of, a" - , "covered work, and grant a patent license to some of the parties" - , "receiving the covered work authorizing them to use, propagate, modify" - , "or convey a specific copy of the covered work, then the patent license" - , "you grant is automatically extended to all recipients of the covered" - , "work and works based on it." - , "" - , " A patent license is \"discriminatory\" if it does not include within" - , "the scope of its coverage, prohibits the exercise of, or is" - , "conditioned on the non-exercise of one or more of the rights that are" - , "specifically granted under this License. You may not convey a covered" - , "work if you are a party to an arrangement with a third party that is" - , "in the business of distributing software, under which you make payment" - , "to the third party based on the extent of your activity of conveying" - , "the work, and under which the third party grants, to any of the" - , "parties who would receive the covered work from you, a discriminatory" - , "patent license (a) in connection with copies of the covered work" - , "conveyed by you (or copies made from those copies), or (b) primarily" - , "for and in connection with specific products or compilations that" - , "contain the covered work, unless you entered into that arrangement," - , "or that patent license was granted, prior to 28 March 2007." - , "" - , " Nothing in this License shall be construed as excluding or limiting" - , "any implied license or other defenses to infringement that may" - , "otherwise be available to you under applicable patent law." - , "" - , " 12. No Surrender of Others' Freedom." - , "" - , " If conditions are imposed on you (whether by court order, agreement or" - , "otherwise) that contradict the conditions of this License, they do not" - , "excuse you from the conditions of this License. If you cannot convey a" - , "covered work so as to satisfy simultaneously your obligations under this" - , "License and any other pertinent obligations, then as a consequence you may" - , "not convey it at all. For example, if you agree to terms that obligate you" - , "to collect a royalty for further conveying from those to whom you convey" - , "the Program, the only way you could satisfy both those terms and this" - , "License would be to refrain entirely from conveying the Program." - , "" - , " 13. Use with the GNU Affero General Public License." - , "" - , " Notwithstanding any other provision of this License, you have" - , "permission to link or combine any covered work with a work licensed" - , "under version 3 of the GNU Affero General Public License into a single" - , "combined work, and to convey the resulting work. The terms of this" - , "License will continue to apply to the part which is the covered work," - , "but the special requirements of the GNU Affero General Public License," - , "section 13, concerning interaction through a network will apply to the" - , "combination as such." - , "" - , " 14. Revised Versions of this License." - , "" - , " The Free Software Foundation may publish revised and/or new versions of" - , "the GNU General Public License from time to time. Such new versions will" - , "be similar in spirit to the present version, but may differ in detail to" - , "address new problems or concerns." - , "" - , " Each version is given a distinguishing version number. If the" - , "Program specifies that a certain numbered version of the GNU General" - , "Public License \"or any later version\" applies to it, you have the" - , "option of following the terms and conditions either of that numbered" - , "version or of any later version published by the Free Software" - , "Foundation. If the Program does not specify a version number of the" - , "GNU General Public License, you may choose any version ever published" - , "by the Free Software Foundation." - , "" - , " If the Program specifies that a proxy can decide which future" - , "versions of the GNU General Public License can be used, that proxy's" - , "public statement of acceptance of a version permanently authorizes you" - , "to choose that version for the Program." - , "" - , " Later license versions may give you additional or different" - , "permissions. However, no additional obligations are imposed on any" - , "author or copyright holder as a result of your choosing to follow a" - , "later version." - , "" - , " 15. Disclaimer of Warranty." - , "" - , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" - , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" - , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" - , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," - , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" - , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" - , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" - , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." - , "" - , " 16. Limitation of Liability." - , "" - , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" - , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" - , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" - , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" - , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" - , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" - , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," - , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" - , "SUCH DAMAGES." - , "" - , " 17. Interpretation of Sections 15 and 16." - , "" - , " If the disclaimer of warranty and limitation of liability provided" - , "above cannot be given local legal effect according to their terms," - , "reviewing courts shall apply local law that most closely approximates" - , "an absolute waiver of all civil liability in connection with the" - , "Program, unless a warranty or assumption of liability accompanies a" - , "copy of the Program in return for a fee." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " How to Apply These Terms to Your New Programs" - , "" - , " If you develop a new program, and you want it to be of the greatest" - , "possible use to the public, the best way to achieve this is to make it" - , "free software which everyone can redistribute and change under these terms." - , "" - , " To do so, attach the following notices to the program. It is safest" - , "to attach them to the start of each source file to most effectively" - , "state the exclusion of warranty; and each file should have at least" - , "the \"copyright\" line and a pointer to where the full notice is found." - , "" - , " " - , " Copyright (C) " - , "" - , " This program is free software: you can redistribute it and/or modify" - , " it under the terms of the GNU General Public License as published by" - , " the Free Software Foundation, either version 3 of the License, or" - , " (at your option) any later version." - , "" - , " This program is distributed in the hope that it will be useful," - , " but WITHOUT ANY WARRANTY; without even the implied warranty of" - , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" - , " GNU General Public License for more details." - , "" - , " You should have received a copy of the GNU General Public License" - , " along with this program. If not, see ." - , "" - , "Also add information on how to contact you by electronic and paper mail." - , "" - , " If the program does terminal interaction, make it output a short" - , "notice like this when it starts in an interactive mode:" - , "" - , " Copyright (C) " - , " This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'." - , " This is free software, and you are welcome to redistribute it" - , " under certain conditions; type `show c' for details." - , "" - , "The hypothetical commands `show w' and `show c' should show the appropriate" - , "parts of the General Public License. Of course, your program's commands" - , "might be different; for a GUI interface, you would use an \"about box\"." - , "" - , " You should also get your employer (if you work as a programmer) or school," - , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." - , "For more information on this, and how to apply and follow the GNU GPL, see" - , "." - , "" - , " The GNU General Public License does not permit incorporating your program" - , "into proprietary programs. If your program is a subroutine library, you" - , "may consider it more useful to permit linking proprietary applications with" - , "the library. If this is what you want to do, use the GNU Lesser General" - , "Public License instead of this License. But first, please read" - , "." - ] - -agplv3 :: License -agplv3 = unlines - [ " GNU AFFERO GENERAL PUBLIC LICENSE" - , " Version 3, 19 November 2007" - , "" - , " Copyright (C) 2007 Free Software Foundation, Inc. " - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , " Preamble" - , "" - , " The GNU Affero General Public License is a free, copyleft license for" - , "software and other kinds of works, specifically designed to ensure" - , "cooperation with the community in the case of network server software." - , "" - , " The licenses for most software and other practical works are designed" - , "to take away your freedom to share and change the works. By contrast," - , "our General Public Licenses are intended to guarantee your freedom to" - , "share and change all versions of a program--to make sure it remains free" - , "software for all its users." - , "" - , " When we speak of free software, we are referring to freedom, not" - , "price. Our General Public Licenses are designed to make sure that you" - , "have the freedom to distribute copies of free software (and charge for" - , "them if you wish), that you receive source code or can get it if you" - , "want it, that you can change the software or use pieces of it in new" - , "free programs, and that you know you can do these things." - , "" - , " Developers that use our General Public Licenses protect your rights" - , "with two steps: (1) assert copyright on the software, and (2) offer" - , "you this License which gives you legal permission to copy, distribute" - , "and/or modify the software." - , "" - , " A secondary benefit of defending all users' freedom is that" - , "improvements made in alternate versions of the program, if they" - , "receive widespread use, become available for other developers to" - , "incorporate. Many developers of free software are heartened and" - , "encouraged by the resulting cooperation. However, in the case of" - , "software used on network servers, this result may fail to come about." - , "The GNU General Public License permits making a modified version and" - , "letting the public access it on a server without ever releasing its" - , "source code to the public." - , "" - , " The GNU Affero General Public License is designed specifically to" - , "ensure that, in such cases, the modified source code becomes available" - , "to the community. It requires the operator of a network server to" - , "provide the source code of the modified version running there to the" - , "users of that server. Therefore, public use of a modified version, on" - , "a publicly accessible server, gives the public access to the source" - , "code of the modified version." - , "" - , " An older license, called the Affero General Public License and" - , "published by Affero, was designed to accomplish similar goals. This is" - , "a different license, not a version of the Affero GPL, but Affero has" - , "released a new version of the Affero GPL which permits relicensing under" - , "this license." - , "" - , " The precise terms and conditions for copying, distribution and" - , "modification follow." - , "" - , " TERMS AND CONDITIONS" - , "" - , " 0. Definitions." - , "" - , " \"This License\" refers to version 3 of the GNU Affero General Public License." - , "" - , " \"Copyright\" also means copyright-like laws that apply to other kinds of" - , "works, such as semiconductor masks." - , "" - , " \"The Program\" refers to any copyrightable work licensed under this" - , "License. Each licensee is addressed as \"you\". \"Licensees\" and" - , "\"recipients\" may be individuals or organizations." - , "" - , " To \"modify\" a work means to copy from or adapt all or part of the work" - , "in a fashion requiring copyright permission, other than the making of an" - , "exact copy. The resulting work is called a \"modified version\" of the" - , "earlier work or a work \"based on\" the earlier work." - , "" - , " A \"covered work\" means either the unmodified Program or a work based" - , "on the Program." - , "" - , " To \"propagate\" a work means to do anything with it that, without" - , "permission, would make you directly or secondarily liable for" - , "infringement under applicable copyright law, except executing it on a" - , "computer or modifying a private copy. Propagation includes copying," - , "distribution (with or without modification), making available to the" - , "public, and in some countries other activities as well." - , "" - , " To \"convey\" a work means any kind of propagation that enables other" - , "parties to make or receive copies. Mere interaction with a user through" - , "a computer network, with no transfer of a copy, is not conveying." - , "" - , " An interactive user interface displays \"Appropriate Legal Notices\"" - , "to the extent that it includes a convenient and prominently visible" - , "feature that (1) displays an appropriate copyright notice, and (2)" - , "tells the user that there is no warranty for the work (except to the" - , "extent that warranties are provided), that licensees may convey the" - , "work under this License, and how to view a copy of this License. If" - , "the interface presents a list of user commands or options, such as a" - , "menu, a prominent item in the list meets this criterion." - , "" - , " 1. Source Code." - , "" - , " The \"source code\" for a work means the preferred form of the work" - , "for making modifications to it. \"Object code\" means any non-source" - , "form of a work." - , "" - , " A \"Standard Interface\" means an interface that either is an official" - , "standard defined by a recognized standards body, or, in the case of" - , "interfaces specified for a particular programming language, one that" - , "is widely used among developers working in that language." - , "" - , " The \"System Libraries\" of an executable work include anything, other" - , "than the work as a whole, that (a) is included in the normal form of" - , "packaging a Major Component, but which is not part of that Major" - , "Component, and (b) serves only to enable use of the work with that" - , "Major Component, or to implement a Standard Interface for which an" - , "implementation is available to the public in source code form. A" - , "\"Major Component\", in this context, means a major essential component" - , "(kernel, window system, and so on) of the specific operating system" - , "(if any) on which the executable work runs, or a compiler used to" - , "produce the work, or an object code interpreter used to run it." - , "" - , " The \"Corresponding Source\" for a work in object code form means all" - , "the source code needed to generate, install, and (for an executable" - , "work) run the object code and to modify the work, including scripts to" - , "control those activities. However, it does not include the work's" - , "System Libraries, or general-purpose tools or generally available free" - , "programs which are used unmodified in performing those activities but" - , "which are not part of the work. For example, Corresponding Source" - , "includes interface definition files associated with source files for" - , "the work, and the source code for shared libraries and dynamically" - , "linked subprograms that the work is specifically designed to require," - , "such as by intimate data communication or control flow between those" - , "subprograms and other parts of the work." - , "" - , " The Corresponding Source need not include anything that users" - , "can regenerate automatically from other parts of the Corresponding" - , "Source." - , "" - , " The Corresponding Source for a work in source code form is that" - , "same work." - , "" - , " 2. Basic Permissions." - , "" - , " All rights granted under this License are granted for the term of" - , "copyright on the Program, and are irrevocable provided the stated" - , "conditions are met. This License explicitly affirms your unlimited" - , "permission to run the unmodified Program. The output from running a" - , "covered work is covered by this License only if the output, given its" - , "content, constitutes a covered work. This License acknowledges your" - , "rights of fair use or other equivalent, as provided by copyright law." - , "" - , " You may make, run and propagate covered works that you do not" - , "convey, without conditions so long as your license otherwise remains" - , "in force. You may convey covered works to others for the sole purpose" - , "of having them make modifications exclusively for you, or provide you" - , "with facilities for running those works, provided that you comply with" - , "the terms of this License in conveying all material for which you do" - , "not control copyright. Those thus making or running the covered works" - , "for you must do so exclusively on your behalf, under your direction" - , "and control, on terms that prohibit them from making any copies of" - , "your copyrighted material outside their relationship with you." - , "" - , " Conveying under any other circumstances is permitted solely under" - , "the conditions stated below. Sublicensing is not allowed; section 10" - , "makes it unnecessary." - , "" - , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." - , "" - , " No covered work shall be deemed part of an effective technological" - , "measure under any applicable law fulfilling obligations under article" - , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" - , "similar laws prohibiting or restricting circumvention of such" - , "measures." - , "" - , " When you convey a covered work, you waive any legal power to forbid" - , "circumvention of technological measures to the extent such circumvention" - , "is effected by exercising rights under this License with respect to" - , "the covered work, and you disclaim any intention to limit operation or" - , "modification of the work as a means of enforcing, against the work's" - , "users, your or third parties' legal rights to forbid circumvention of" - , "technological measures." - , "" - , " 4. Conveying Verbatim Copies." - , "" - , " You may convey verbatim copies of the Program's source code as you" - , "receive it, in any medium, provided that you conspicuously and" - , "appropriately publish on each copy an appropriate copyright notice;" - , "keep intact all notices stating that this License and any" - , "non-permissive terms added in accord with section 7 apply to the code;" - , "keep intact all notices of the absence of any warranty; and give all" - , "recipients a copy of this License along with the Program." - , "" - , " You may charge any price or no price for each copy that you convey," - , "and you may offer support or warranty protection for a fee." - , "" - , " 5. Conveying Modified Source Versions." - , "" - , " You may convey a work based on the Program, or the modifications to" - , "produce it from the Program, in the form of source code under the" - , "terms of section 4, provided that you also meet all of these conditions:" - , "" - , " a) The work must carry prominent notices stating that you modified" - , " it, and giving a relevant date." - , "" - , " b) The work must carry prominent notices stating that it is" - , " released under this License and any conditions added under section" - , " 7. This requirement modifies the requirement in section 4 to" - , " \"keep intact all notices\"." - , "" - , " c) You must license the entire work, as a whole, under this" - , " License to anyone who comes into possession of a copy. This" - , " License will therefore apply, along with any applicable section 7" - , " additional terms, to the whole of the work, and all its parts," - , " regardless of how they are packaged. This License gives no" - , " permission to license the work in any other way, but it does not" - , " invalidate such permission if you have separately received it." - , "" - , " d) If the work has interactive user interfaces, each must display" - , " Appropriate Legal Notices; however, if the Program has interactive" - , " interfaces that do not display Appropriate Legal Notices, your" - , " work need not make them do so." - , "" - , " A compilation of a covered work with other separate and independent" - , "works, which are not by their nature extensions of the covered work," - , "and which are not combined with it such as to form a larger program," - , "in or on a volume of a storage or distribution medium, is called an" - , "\"aggregate\" if the compilation and its resulting copyright are not" - , "used to limit the access or legal rights of the compilation's users" - , "beyond what the individual works permit. Inclusion of a covered work" - , "in an aggregate does not cause this License to apply to the other" - , "parts of the aggregate." - , "" - , " 6. Conveying Non-Source Forms." - , "" - , " You may convey a covered work in object code form under the terms" - , "of sections 4 and 5, provided that you also convey the" - , "machine-readable Corresponding Source under the terms of this License," - , "in one of these ways:" - , "" - , " a) Convey the object code in, or embodied in, a physical product" - , " (including a physical distribution medium), accompanied by the" - , " Corresponding Source fixed on a durable physical medium" - , " customarily used for software interchange." - , "" - , " b) Convey the object code in, or embodied in, a physical product" - , " (including a physical distribution medium), accompanied by a" - , " written offer, valid for at least three years and valid for as" - , " long as you offer spare parts or customer support for that product" - , " model, to give anyone who possesses the object code either (1) a" - , " copy of the Corresponding Source for all the software in the" - , " product that is covered by this License, on a durable physical" - , " medium customarily used for software interchange, for a price no" - , " more than your reasonable cost of physically performing this" - , " conveying of source, or (2) access to copy the" - , " Corresponding Source from a network server at no charge." - , "" - , " c) Convey individual copies of the object code with a copy of the" - , " written offer to provide the Corresponding Source. This" - , " alternative is allowed only occasionally and noncommercially, and" - , " only if you received the object code with such an offer, in accord" - , " with subsection 6b." - , "" - , " d) Convey the object code by offering access from a designated" - , " place (gratis or for a charge), and offer equivalent access to the" - , " Corresponding Source in the same way through the same place at no" - , " further charge. You need not require recipients to copy the" - , " Corresponding Source along with the object code. If the place to" - , " copy the object code is a network server, the Corresponding Source" - , " may be on a different server (operated by you or a third party)" - , " that supports equivalent copying facilities, provided you maintain" - , " clear directions next to the object code saying where to find the" - , " Corresponding Source. Regardless of what server hosts the" - , " Corresponding Source, you remain obligated to ensure that it is" - , " available for as long as needed to satisfy these requirements." - , "" - , " e) Convey the object code using peer-to-peer transmission, provided" - , " you inform other peers where the object code and Corresponding" - , " Source of the work are being offered to the general public at no" - , " charge under subsection 6d." - , "" - , " A separable portion of the object code, whose source code is excluded" - , "from the Corresponding Source as a System Library, need not be" - , "included in conveying the object code work." - , "" - , " A \"User Product\" is either (1) a \"consumer product\", which means any" - , "tangible personal property which is normally used for personal, family," - , "or household purposes, or (2) anything designed or sold for incorporation" - , "into a dwelling. In determining whether a product is a consumer product," - , "doubtful cases shall be resolved in favor of coverage. For a particular" - , "product received by a particular user, \"normally used\" refers to a" - , "typical or common use of that class of product, regardless of the status" - , "of the particular user or of the way in which the particular user" - , "actually uses, or expects or is expected to use, the product. A product" - , "is a consumer product regardless of whether the product has substantial" - , "commercial, industrial or non-consumer uses, unless such uses represent" - , "the only significant mode of use of the product." - , "" - , " \"Installation Information\" for a User Product means any methods," - , "procedures, authorization keys, or other information required to install" - , "and execute modified versions of a covered work in that User Product from" - , "a modified version of its Corresponding Source. The information must" - , "suffice to ensure that the continued functioning of the modified object" - , "code is in no case prevented or interfered with solely because" - , "modification has been made." - , "" - , " If you convey an object code work under this section in, or with, or" - , "specifically for use in, a User Product, and the conveying occurs as" - , "part of a transaction in which the right of possession and use of the" - , "User Product is transferred to the recipient in perpetuity or for a" - , "fixed term (regardless of how the transaction is characterized), the" - , "Corresponding Source conveyed under this section must be accompanied" - , "by the Installation Information. But this requirement does not apply" - , "if neither you nor any third party retains the ability to install" - , "modified object code on the User Product (for example, the work has" - , "been installed in ROM)." - , "" - , " The requirement to provide Installation Information does not include a" - , "requirement to continue to provide support service, warranty, or updates" - , "for a work that has been modified or installed by the recipient, or for" - , "the User Product in which it has been modified or installed. Access to a" - , "network may be denied when the modification itself materially and" - , "adversely affects the operation of the network or violates the rules and" - , "protocols for communication across the network." - , "" - , " Corresponding Source conveyed, and Installation Information provided," - , "in accord with this section must be in a format that is publicly" - , "documented (and with an implementation available to the public in" - , "source code form), and must require no special password or key for" - , "unpacking, reading or copying." - , "" - , " 7. Additional Terms." - , "" - , " \"Additional permissions\" are terms that supplement the terms of this" - , "License by making exceptions from one or more of its conditions." - , "Additional permissions that are applicable to the entire Program shall" - , "be treated as though they were included in this License, to the extent" - , "that they are valid under applicable law. If additional permissions" - , "apply only to part of the Program, that part may be used separately" - , "under those permissions, but the entire Program remains governed by" - , "this License without regard to the additional permissions." - , "" - , " When you convey a copy of a covered work, you may at your option" - , "remove any additional permissions from that copy, or from any part of" - , "it. (Additional permissions may be written to require their own" - , "removal in certain cases when you modify the work.) You may place" - , "additional permissions on material, added by you to a covered work," - , "for which you have or can give appropriate copyright permission." - , "" - , " Notwithstanding any other provision of this License, for material you" - , "add to a covered work, you may (if authorized by the copyright holders of" - , "that material) supplement the terms of this License with terms:" - , "" - , " a) Disclaiming warranty or limiting liability differently from the" - , " terms of sections 15 and 16 of this License; or" - , "" - , " b) Requiring preservation of specified reasonable legal notices or" - , " author attributions in that material or in the Appropriate Legal" - , " Notices displayed by works containing it; or" - , "" - , " c) Prohibiting misrepresentation of the origin of that material, or" - , " requiring that modified versions of such material be marked in" - , " reasonable ways as different from the original version; or" - , "" - , " d) Limiting the use for publicity purposes of names of licensors or" - , " authors of the material; or" - , "" - , " e) Declining to grant rights under trademark law for use of some" - , " trade names, trademarks, or service marks; or" - , "" - , " f) Requiring indemnification of licensors and authors of that" - , " material by anyone who conveys the material (or modified versions of" - , " it) with contractual assumptions of liability to the recipient, for" - , " any liability that these contractual assumptions directly impose on" - , " those licensors and authors." - , "" - , " All other non-permissive additional terms are considered \"further" - , "restrictions\" within the meaning of section 10. If the Program as you" - , "received it, or any part of it, contains a notice stating that it is" - , "governed by this License along with a term that is a further" - , "restriction, you may remove that term. If a license document contains" - , "a further restriction but permits relicensing or conveying under this" - , "License, you may add to a covered work material governed by the terms" - , "of that license document, provided that the further restriction does" - , "not survive such relicensing or conveying." - , "" - , " If you add terms to a covered work in accord with this section, you" - , "must place, in the relevant source files, a statement of the" - , "additional terms that apply to those files, or a notice indicating" - , "where to find the applicable terms." - , "" - , " Additional terms, permissive or non-permissive, may be stated in the" - , "form of a separately written license, or stated as exceptions;" - , "the above requirements apply either way." - , "" - , " 8. Termination." - , "" - , " You may not propagate or modify a covered work except as expressly" - , "provided under this License. Any attempt otherwise to propagate or" - , "modify it is void, and will automatically terminate your rights under" - , "this License (including any patent licenses granted under the third" - , "paragraph of section 11)." - , "" - , " However, if you cease all violation of this License, then your" - , "license from a particular copyright holder is reinstated (a)" - , "provisionally, unless and until the copyright holder explicitly and" - , "finally terminates your license, and (b) permanently, if the copyright" - , "holder fails to notify you of the violation by some reasonable means" - , "prior to 60 days after the cessation." - , "" - , " Moreover, your license from a particular copyright holder is" - , "reinstated permanently if the copyright holder notifies you of the" - , "violation by some reasonable means, this is the first time you have" - , "received notice of violation of this License (for any work) from that" - , "copyright holder, and you cure the violation prior to 30 days after" - , "your receipt of the notice." - , "" - , " Termination of your rights under this section does not terminate the" - , "licenses of parties who have received copies or rights from you under" - , "this License. If your rights have been terminated and not permanently" - , "reinstated, you do not qualify to receive new licenses for the same" - , "material under section 10." - , "" - , " 9. Acceptance Not Required for Having Copies." - , "" - , " You are not required to accept this License in order to receive or" - , "run a copy of the Program. Ancillary propagation of a covered work" - , "occurring solely as a consequence of using peer-to-peer transmission" - , "to receive a copy likewise does not require acceptance. However," - , "nothing other than this License grants you permission to propagate or" - , "modify any covered work. These actions infringe copyright if you do" - , "not accept this License. Therefore, by modifying or propagating a" - , "covered work, you indicate your acceptance of this License to do so." - , "" - , " 10. Automatic Licensing of Downstream Recipients." - , "" - , " Each time you convey a covered work, the recipient automatically" - , "receives a license from the original licensors, to run, modify and" - , "propagate that work, subject to this License. You are not responsible" - , "for enforcing compliance by third parties with this License." - , "" - , " An \"entity transaction\" is a transaction transferring control of an" - , "organization, or substantially all assets of one, or subdividing an" - , "organization, or merging organizations. If propagation of a covered" - , "work results from an entity transaction, each party to that" - , "transaction who receives a copy of the work also receives whatever" - , "licenses to the work the party's predecessor in interest had or could" - , "give under the previous paragraph, plus a right to possession of the" - , "Corresponding Source of the work from the predecessor in interest, if" - , "the predecessor has it or can get it with reasonable efforts." - , "" - , " You may not impose any further restrictions on the exercise of the" - , "rights granted or affirmed under this License. For example, you may" - , "not impose a license fee, royalty, or other charge for exercise of" - , "rights granted under this License, and you may not initiate litigation" - , "(including a cross-claim or counterclaim in a lawsuit) alleging that" - , "any patent claim is infringed by making, using, selling, offering for" - , "sale, or importing the Program or any portion of it." - , "" - , " 11. Patents." - , "" - , " A \"contributor\" is a copyright holder who authorizes use under this" - , "License of the Program or a work on which the Program is based. The" - , "work thus licensed is called the contributor's \"contributor version\"." - , "" - , " A contributor's \"essential patent claims\" are all patent claims" - , "owned or controlled by the contributor, whether already acquired or" - , "hereafter acquired, that would be infringed by some manner, permitted" - , "by this License, of making, using, or selling its contributor version," - , "but do not include claims that would be infringed only as a" - , "consequence of further modification of the contributor version. For" - , "purposes of this definition, \"control\" includes the right to grant" - , "patent sublicenses in a manner consistent with the requirements of" - , "this License." - , "" - , " Each contributor grants you a non-exclusive, worldwide, royalty-free" - , "patent license under the contributor's essential patent claims, to" - , "make, use, sell, offer for sale, import and otherwise run, modify and" - , "propagate the contents of its contributor version." - , "" - , " In the following three paragraphs, a \"patent license\" is any express" - , "agreement or commitment, however denominated, not to enforce a patent" - , "(such as an express permission to practice a patent or covenant not to" - , "sue for patent infringement). To \"grant\" such a patent license to a" - , "party means to make such an agreement or commitment not to enforce a" - , "patent against the party." - , "" - , " If you convey a covered work, knowingly relying on a patent license," - , "and the Corresponding Source of the work is not available for anyone" - , "to copy, free of charge and under the terms of this License, through a" - , "publicly available network server or other readily accessible means," - , "then you must either (1) cause the Corresponding Source to be so" - , "available, or (2) arrange to deprive yourself of the benefit of the" - , "patent license for this particular work, or (3) arrange, in a manner" - , "consistent with the requirements of this License, to extend the patent" - , "license to downstream recipients. \"Knowingly relying\" means you have" - , "actual knowledge that, but for the patent license, your conveying the" - , "covered work in a country, or your recipient's use of the covered work" - , "in a country, would infringe one or more identifiable patents in that" - , "country that you have reason to believe are valid." - , "" - , " If, pursuant to or in connection with a single transaction or" - , "arrangement, you convey, or propagate by procuring conveyance of, a" - , "covered work, and grant a patent license to some of the parties" - , "receiving the covered work authorizing them to use, propagate, modify" - , "or convey a specific copy of the covered work, then the patent license" - , "you grant is automatically extended to all recipients of the covered" - , "work and works based on it." - , "" - , " A patent license is \"discriminatory\" if it does not include within" - , "the scope of its coverage, prohibits the exercise of, or is" - , "conditioned on the non-exercise of one or more of the rights that are" - , "specifically granted under this License. You may not convey a covered" - , "work if you are a party to an arrangement with a third party that is" - , "in the business of distributing software, under which you make payment" - , "to the third party based on the extent of your activity of conveying" - , "the work, and under which the third party grants, to any of the" - , "parties who would receive the covered work from you, a discriminatory" - , "patent license (a) in connection with copies of the covered work" - , "conveyed by you (or copies made from those copies), or (b) primarily" - , "for and in connection with specific products or compilations that" - , "contain the covered work, unless you entered into that arrangement," - , "or that patent license was granted, prior to 28 March 2007." - , "" - , " Nothing in this License shall be construed as excluding or limiting" - , "any implied license or other defenses to infringement that may" - , "otherwise be available to you under applicable patent law." - , "" - , " 12. No Surrender of Others' Freedom." - , "" - , " If conditions are imposed on you (whether by court order, agreement or" - , "otherwise) that contradict the conditions of this License, they do not" - , "excuse you from the conditions of this License. If you cannot convey a" - , "covered work so as to satisfy simultaneously your obligations under this" - , "License and any other pertinent obligations, then as a consequence you may" - , "not convey it at all. For example, if you agree to terms that obligate you" - , "to collect a royalty for further conveying from those to whom you convey" - , "the Program, the only way you could satisfy both those terms and this" - , "License would be to refrain entirely from conveying the Program." - , "" - , " 13. Remote Network Interaction; Use with the GNU General Public License." - , "" - , " Notwithstanding any other provision of this License, if you modify the" - , "Program, your modified version must prominently offer all users" - , "interacting with it remotely through a computer network (if your version" - , "supports such interaction) an opportunity to receive the Corresponding" - , "Source of your version by providing access to the Corresponding Source" - , "from a network server at no charge, through some standard or customary" - , "means of facilitating copying of software. This Corresponding Source" - , "shall include the Corresponding Source for any work covered by version 3" - , "of the GNU General Public License that is incorporated pursuant to the" - , "following paragraph." - , "" - , " Notwithstanding any other provision of this License, you have" - , "permission to link or combine any covered work with a work licensed" - , "under version 3 of the GNU General Public License into a single" - , "combined work, and to convey the resulting work. The terms of this" - , "License will continue to apply to the part which is the covered work," - , "but the work with which it is combined will remain governed by version" - , "3 of the GNU General Public License." - , "" - , " 14. Revised Versions of this License." - , "" - , " The Free Software Foundation may publish revised and/or new versions of" - , "the GNU Affero General Public License from time to time. Such new versions" - , "will be similar in spirit to the present version, but may differ in detail to" - , "address new problems or concerns." - , "" - , " Each version is given a distinguishing version number. If the" - , "Program specifies that a certain numbered version of the GNU Affero General" - , "Public License \"or any later version\" applies to it, you have the" - , "option of following the terms and conditions either of that numbered" - , "version or of any later version published by the Free Software" - , "Foundation. If the Program does not specify a version number of the" - , "GNU Affero General Public License, you may choose any version ever published" - , "by the Free Software Foundation." - , "" - , " If the Program specifies that a proxy can decide which future" - , "versions of the GNU Affero General Public License can be used, that proxy's" - , "public statement of acceptance of a version permanently authorizes you" - , "to choose that version for the Program." - , "" - , " Later license versions may give you additional or different" - , "permissions. However, no additional obligations are imposed on any" - , "author or copyright holder as a result of your choosing to follow a" - , "later version." - , "" - , " 15. Disclaimer of Warranty." - , "" - , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" - , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" - , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" - , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," - , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" - , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" - , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" - , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." - , "" - , " 16. Limitation of Liability." - , "" - , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" - , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" - , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" - , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" - , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" - , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" - , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," - , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" - , "SUCH DAMAGES." - , "" - , " 17. Interpretation of Sections 15 and 16." - , "" - , " If the disclaimer of warranty and limitation of liability provided" - , "above cannot be given local legal effect according to their terms," - , "reviewing courts shall apply local law that most closely approximates" - , "an absolute waiver of all civil liability in connection with the" - , "Program, unless a warranty or assumption of liability accompanies a" - , "copy of the Program in return for a fee." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " How to Apply These Terms to Your New Programs" - , "" - , " If you develop a new program, and you want it to be of the greatest" - , "possible use to the public, the best way to achieve this is to make it" - , "free software which everyone can redistribute and change under these terms." - , "" - , " To do so, attach the following notices to the program. It is safest" - , "to attach them to the start of each source file to most effectively" - , "state the exclusion of warranty; and each file should have at least" - , "the \"copyright\" line and a pointer to where the full notice is found." - , "" - , " " - , " Copyright (C) " - , "" - , " This program is free software: you can redistribute it and/or modify" - , " it under the terms of the GNU Affero General Public License as published by" - , " the Free Software Foundation, either version 3 of the License, or" - , " (at your option) any later version." - , "" - , " This program is distributed in the hope that it will be useful," - , " but WITHOUT ANY WARRANTY; without even the implied warranty of" - , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" - , " GNU Affero General Public License for more details." - , "" - , " You should have received a copy of the GNU Affero General Public License" - , " along with this program. If not, see ." - , "" - , "Also add information on how to contact you by electronic and paper mail." - , "" - , " If your software can interact with users remotely through a computer" - , "network, you should also make sure that it provides a way for users to" - , "get its source. For example, if your program is a web application, its" - , "interface could display a \"Source\" link that leads users to an archive" - , "of the code. There are many ways you could offer source, and different" - , "solutions will be better for different programs; see section 13 for the" - , "specific requirements." - , "" - , " You should also get your employer (if you work as a programmer) or school," - , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." - , "For more information on this, and how to apply and follow the GNU AGPL, see" - , "." - ] - -lgpl21 :: License -lgpl21 = unlines - [ " GNU LESSER GENERAL PUBLIC LICENSE" - , " Version 2.1, February 1999" - , "" - , " Copyright (C) 1991, 1999 Free Software Foundation, Inc." - , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , "[This is the first released version of the Lesser GPL. It also counts" - , " as the successor of the GNU Library Public License, version 2, hence" - , " the version number 2.1.]" - , "" - , " Preamble" - , "" - , " The licenses for most software are designed to take away your" - , "freedom to share and change it. By contrast, the GNU General Public" - , "Licenses are intended to guarantee your freedom to share and change" - , "free software--to make sure the software is free for all its users." - , "" - , " This license, the Lesser General Public License, applies to some" - , "specially designated software packages--typically libraries--of the" - , "Free Software Foundation and other authors who decide to use it. You" - , "can use it too, but we suggest you first think carefully about whether" - , "this license or the ordinary General Public License is the better" - , "strategy to use in any particular case, based on the explanations below." - , "" - , " When we speak of free software, we are referring to freedom of use," - , "not price. Our General Public Licenses are designed to make sure that" - , "you have the freedom to distribute copies of free software (and charge" - , "for this service if you wish); that you receive source code or can get" - , "it if you want it; that you can change the software and use pieces of" - , "it in new free programs; and that you are informed that you can do" - , "these things." - , "" - , " To protect your rights, we need to make restrictions that forbid" - , "distributors to deny you these rights or to ask you to surrender these" - , "rights. These restrictions translate to certain responsibilities for" - , "you if you distribute copies of the library or if you modify it." - , "" - , " For example, if you distribute copies of the library, whether gratis" - , "or for a fee, you must give the recipients all the rights that we gave" - , "you. You must make sure that they, too, receive or can get the source" - , "code. If you link other code with the library, you must provide" - , "complete object files to the recipients, so that they can relink them" - , "with the library after making changes to the library and recompiling" - , "it. And you must show them these terms so they know their rights." - , "" - , " We protect your rights with a two-step method: (1) we copyright the" - , "library, and (2) we offer you this license, which gives you legal" - , "permission to copy, distribute and/or modify the library." - , "" - , " To protect each distributor, we want to make it very clear that" - , "there is no warranty for the free library. Also, if the library is" - , "modified by someone else and passed on, the recipients should know" - , "that what they have is not the original version, so that the original" - , "author's reputation will not be affected by problems that might be" - , "introduced by others." - , "" - , " Finally, software patents pose a constant threat to the existence of" - , "any free program. We wish to make sure that a company cannot" - , "effectively restrict the users of a free program by obtaining a" - , "restrictive license from a patent holder. Therefore, we insist that" - , "any patent license obtained for a version of the library must be" - , "consistent with the full freedom of use specified in this license." - , "" - , " Most GNU software, including some libraries, is covered by the" - , "ordinary GNU General Public License. This license, the GNU Lesser" - , "General Public License, applies to certain designated libraries, and" - , "is quite different from the ordinary General Public License. We use" - , "this license for certain libraries in order to permit linking those" - , "libraries into non-free programs." - , "" - , " When a program is linked with a library, whether statically or using" - , "a shared library, the combination of the two is legally speaking a" - , "combined work, a derivative of the original library. The ordinary" - , "General Public License therefore permits such linking only if the" - , "entire combination fits its criteria of freedom. The Lesser General" - , "Public License permits more lax criteria for linking other code with" - , "the library." - , "" - , " We call this license the \"Lesser\" General Public License because it" - , "does Less to protect the user's freedom than the ordinary General" - , "Public License. It also provides other free software developers Less" - , "of an advantage over competing non-free programs. These disadvantages" - , "are the reason we use the ordinary General Public License for many" - , "libraries. However, the Lesser license provides advantages in certain" - , "special circumstances." - , "" - , " For example, on rare occasions, there may be a special need to" - , "encourage the widest possible use of a certain library, so that it becomes" - , "a de-facto standard. To achieve this, non-free programs must be" - , "allowed to use the library. A more frequent case is that a free" - , "library does the same job as widely used non-free libraries. In this" - , "case, there is little to gain by limiting the free library to free" - , "software only, so we use the Lesser General Public License." - , "" - , " In other cases, permission to use a particular library in non-free" - , "programs enables a greater number of people to use a large body of" - , "free software. For example, permission to use the GNU C Library in" - , "non-free programs enables many more people to use the whole GNU" - , "operating system, as well as its variant, the GNU/Linux operating" - , "system." - , "" - , " Although the Lesser General Public License is Less protective of the" - , "users' freedom, it does ensure that the user of a program that is" - , "linked with the Library has the freedom and the wherewithal to run" - , "that program using a modified version of the Library." - , "" - , " The precise terms and conditions for copying, distribution and" - , "modification follow. Pay close attention to the difference between a" - , "\"work based on the library\" and a \"work that uses the library\". The" - , "former contains code derived from the library, whereas the latter must" - , "be combined with the library in order to run." - , "" - , " GNU LESSER GENERAL PUBLIC LICENSE" - , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" - , "" - , " 0. This License Agreement applies to any software library or other" - , "program which contains a notice placed by the copyright holder or" - , "other authorized party saying it may be distributed under the terms of" - , "this Lesser General Public License (also called \"this License\")." - , "Each licensee is addressed as \"you\"." - , "" - , " A \"library\" means a collection of software functions and/or data" - , "prepared so as to be conveniently linked with application programs" - , "(which use some of those functions and data) to form executables." - , "" - , " The \"Library\", below, refers to any such software library or work" - , "which has been distributed under these terms. A \"work based on the" - , "Library\" means either the Library or any derivative work under" - , "copyright law: that is to say, a work containing the Library or a" - , "portion of it, either verbatim or with modifications and/or translated" - , "straightforwardly into another language. (Hereinafter, translation is" - , "included without limitation in the term \"modification\".)" - , "" - , " \"Source code\" for a work means the preferred form of the work for" - , "making modifications to it. For a library, complete source code means" - , "all the source code for all modules it contains, plus any associated" - , "interface definition files, plus the scripts used to control compilation" - , "and installation of the library." - , "" - , " Activities other than copying, distribution and modification are not" - , "covered by this License; they are outside its scope. The act of" - , "running a program using the Library is not restricted, and output from" - , "such a program is covered only if its contents constitute a work based" - , "on the Library (independent of the use of the Library in a tool for" - , "writing it). Whether that is true depends on what the Library does" - , "and what the program that uses the Library does." - , "" - , " 1. You may copy and distribute verbatim copies of the Library's" - , "complete source code as you receive it, in any medium, provided that" - , "you conspicuously and appropriately publish on each copy an" - , "appropriate copyright notice and disclaimer of warranty; keep intact" - , "all the notices that refer to this License and to the absence of any" - , "warranty; and distribute a copy of this License along with the" - , "Library." - , "" - , " You may charge a fee for the physical act of transferring a copy," - , "and you may at your option offer warranty protection in exchange for a" - , "fee." - , "" - , " 2. You may modify your copy or copies of the Library or any portion" - , "of it, thus forming a work based on the Library, and copy and" - , "distribute such modifications or work under the terms of Section 1" - , "above, provided that you also meet all of these conditions:" - , "" - , " a) The modified work must itself be a software library." - , "" - , " b) You must cause the files modified to carry prominent notices" - , " stating that you changed the files and the date of any change." - , "" - , " c) You must cause the whole of the work to be licensed at no" - , " charge to all third parties under the terms of this License." - , "" - , " d) If a facility in the modified Library refers to a function or a" - , " table of data to be supplied by an application program that uses" - , " the facility, other than as an argument passed when the facility" - , " is invoked, then you must make a good faith effort to ensure that," - , " in the event an application does not supply such function or" - , " table, the facility still operates, and performs whatever part of" - , " its purpose remains meaningful." - , "" - , " (For example, a function in a library to compute square roots has" - , " a purpose that is entirely well-defined independent of the" - , " application. Therefore, Subsection 2d requires that any" - , " application-supplied function or table used by this function must" - , " be optional: if the application does not supply it, the square" - , " root function must still compute square roots.)" - , "" - , "These requirements apply to the modified work as a whole. If" - , "identifiable sections of that work are not derived from the Library," - , "and can be reasonably considered independent and separate works in" - , "themselves, then this License, and its terms, do not apply to those" - , "sections when you distribute them as separate works. But when you" - , "distribute the same sections as part of a whole which is a work based" - , "on the Library, the distribution of the whole must be on the terms of" - , "this License, whose permissions for other licensees extend to the" - , "entire whole, and thus to each and every part regardless of who wrote" - , "it." - , "" - , "Thus, it is not the intent of this section to claim rights or contest" - , "your rights to work written entirely by you; rather, the intent is to" - , "exercise the right to control the distribution of derivative or" - , "collective works based on the Library." - , "" - , "In addition, mere aggregation of another work not based on the Library" - , "with the Library (or with a work based on the Library) on a volume of" - , "a storage or distribution medium does not bring the other work under" - , "the scope of this License." - , "" - , " 3. You may opt to apply the terms of the ordinary GNU General Public" - , "License instead of this License to a given copy of the Library. To do" - , "this, you must alter all the notices that refer to this License, so" - , "that they refer to the ordinary GNU General Public License, version 2," - , "instead of to this License. (If a newer version than version 2 of the" - , "ordinary GNU General Public License has appeared, then you can specify" - , "that version instead if you wish.) Do not make any other change in" - , "these notices." - , "" - , " Once this change is made in a given copy, it is irreversible for" - , "that copy, so the ordinary GNU General Public License applies to all" - , "subsequent copies and derivative works made from that copy." - , "" - , " This option is useful when you wish to copy part of the code of" - , "the Library into a program that is not a library." - , "" - , " 4. You may copy and distribute the Library (or a portion or" - , "derivative of it, under Section 2) in object code or executable form" - , "under the terms of Sections 1 and 2 above provided that you accompany" - , "it with the complete corresponding machine-readable source code, which" - , "must be distributed under the terms of Sections 1 and 2 above on a" - , "medium customarily used for software interchange." - , "" - , " If distribution of object code is made by offering access to copy" - , "from a designated place, then offering equivalent access to copy the" - , "source code from the same place satisfies the requirement to" - , "distribute the source code, even though third parties are not" - , "compelled to copy the source along with the object code." - , "" - , " 5. A program that contains no derivative of any portion of the" - , "Library, but is designed to work with the Library by being compiled or" - , "linked with it, is called a \"work that uses the Library\". Such a" - , "work, in isolation, is not a derivative work of the Library, and" - , "therefore falls outside the scope of this License." - , "" - , " However, linking a \"work that uses the Library\" with the Library" - , "creates an executable that is a derivative of the Library (because it" - , "contains portions of the Library), rather than a \"work that uses the" - , "library\". The executable is therefore covered by this License." - , "Section 6 states terms for distribution of such executables." - , "" - , " When a \"work that uses the Library\" uses material from a header file" - , "that is part of the Library, the object code for the work may be a" - , "derivative work of the Library even though the source code is not." - , "Whether this is true is especially significant if the work can be" - , "linked without the Library, or if the work is itself a library. The" - , "threshold for this to be true is not precisely defined by law." - , "" - , " If such an object file uses only numerical parameters, data" - , "structure layouts and accessors, and small macros and small inline" - , "functions (ten lines or less in length), then the use of the object" - , "file is unrestricted, regardless of whether it is legally a derivative" - , "work. (Executables containing this object code plus portions of the" - , "Library will still fall under Section 6.)" - , "" - , " Otherwise, if the work is a derivative of the Library, you may" - , "distribute the object code for the work under the terms of Section 6." - , "Any executables containing that work also fall under Section 6," - , "whether or not they are linked directly with the Library itself." - , "" - , " 6. As an exception to the Sections above, you may also combine or" - , "link a \"work that uses the Library\" with the Library to produce a" - , "work containing portions of the Library, and distribute that work" - , "under terms of your choice, provided that the terms permit" - , "modification of the work for the customer's own use and reverse" - , "engineering for debugging such modifications." - , "" - , " You must give prominent notice with each copy of the work that the" - , "Library is used in it and that the Library and its use are covered by" - , "this License. You must supply a copy of this License. If the work" - , "during execution displays copyright notices, you must include the" - , "copyright notice for the Library among them, as well as a reference" - , "directing the user to the copy of this License. Also, you must do one" - , "of these things:" - , "" - , " a) Accompany the work with the complete corresponding" - , " machine-readable source code for the Library including whatever" - , " changes were used in the work (which must be distributed under" - , " Sections 1 and 2 above); and, if the work is an executable linked" - , " with the Library, with the complete machine-readable \"work that" - , " uses the Library\", as object code and/or source code, so that the" - , " user can modify the Library and then relink to produce a modified" - , " executable containing the modified Library. (It is understood" - , " that the user who changes the contents of definitions files in the" - , " Library will not necessarily be able to recompile the application" - , " to use the modified definitions.)" - , "" - , " b) Use a suitable shared library mechanism for linking with the" - , " Library. A suitable mechanism is one that (1) uses at run time a" - , " copy of the library already present on the user's computer system," - , " rather than copying library functions into the executable, and (2)" - , " will operate properly with a modified version of the library, if" - , " the user installs one, as long as the modified version is" - , " interface-compatible with the version that the work was made with." - , "" - , " c) Accompany the work with a written offer, valid for at" - , " least three years, to give the same user the materials" - , " specified in Subsection 6a, above, for a charge no more" - , " than the cost of performing this distribution." - , "" - , " d) If distribution of the work is made by offering access to copy" - , " from a designated place, offer equivalent access to copy the above" - , " specified materials from the same place." - , "" - , " e) Verify that the user has already received a copy of these" - , " materials or that you have already sent this user a copy." - , "" - , " For an executable, the required form of the \"work that uses the" - , "Library\" must include any data and utility programs needed for" - , "reproducing the executable from it. However, as a special exception," - , "the materials to be distributed need not include anything that is" - , "normally distributed (in either source or binary form) with the major" - , "components (compiler, kernel, and so on) of the operating system on" - , "which the executable runs, unless that component itself accompanies" - , "the executable." - , "" - , " It may happen that this requirement contradicts the license" - , "restrictions of other proprietary libraries that do not normally" - , "accompany the operating system. Such a contradiction means you cannot" - , "use both them and the Library together in an executable that you" - , "distribute." - , "" - , " 7. You may place library facilities that are a work based on the" - , "Library side-by-side in a single library together with other library" - , "facilities not covered by this License, and distribute such a combined" - , "library, provided that the separate distribution of the work based on" - , "the Library and of the other library facilities is otherwise" - , "permitted, and provided that you do these two things:" - , "" - , " a) Accompany the combined library with a copy of the same work" - , " based on the Library, uncombined with any other library" - , " facilities. This must be distributed under the terms of the" - , " Sections above." - , "" - , " b) Give prominent notice with the combined library of the fact" - , " that part of it is a work based on the Library, and explaining" - , " where to find the accompanying uncombined form of the same work." - , "" - , " 8. You may not copy, modify, sublicense, link with, or distribute" - , "the Library except as expressly provided under this License. Any" - , "attempt otherwise to copy, modify, sublicense, link with, or" - , "distribute the Library is void, and will automatically terminate your" - , "rights under this License. However, parties who have received copies," - , "or rights, from you under this License will not have their licenses" - , "terminated so long as such parties remain in full compliance." - , "" - , " 9. You are not required to accept this License, since you have not" - , "signed it. However, nothing else grants you permission to modify or" - , "distribute the Library or its derivative works. These actions are" - , "prohibited by law if you do not accept this License. Therefore, by" - , "modifying or distributing the Library (or any work based on the" - , "Library), you indicate your acceptance of this License to do so, and" - , "all its terms and conditions for copying, distributing or modifying" - , "the Library or works based on it." - , "" - , " 10. Each time you redistribute the Library (or any work based on the" - , "Library), the recipient automatically receives a license from the" - , "original licensor to copy, distribute, link with or modify the Library" - , "subject to these terms and conditions. You may not impose any further" - , "restrictions on the recipients' exercise of the rights granted herein." - , "You are not responsible for enforcing compliance by third parties with" - , "this License." - , "" - , " 11. If, as a consequence of a court judgment or allegation of patent" - , "infringement or for any other reason (not limited to patent issues)," - , "conditions are imposed on you (whether by court order, agreement or" - , "otherwise) that contradict the conditions of this License, they do not" - , "excuse you from the conditions of this License. If you cannot" - , "distribute so as to satisfy simultaneously your obligations under this" - , "License and any other pertinent obligations, then as a consequence you" - , "may not distribute the Library at all. For example, if a patent" - , "license would not permit royalty-free redistribution of the Library by" - , "all those who receive copies directly or indirectly through you, then" - , "the only way you could satisfy both it and this License would be to" - , "refrain entirely from distribution of the Library." - , "" - , "If any portion of this section is held invalid or unenforceable under any" - , "particular circumstance, the balance of the section is intended to apply," - , "and the section as a whole is intended to apply in other circumstances." - , "" - , "It is not the purpose of this section to induce you to infringe any" - , "patents or other property right claims or to contest validity of any" - , "such claims; this section has the sole purpose of protecting the" - , "integrity of the free software distribution system which is" - , "implemented by public license practices. Many people have made" - , "generous contributions to the wide range of software distributed" - , "through that system in reliance on consistent application of that" - , "system; it is up to the author/donor to decide if he or she is willing" - , "to distribute software through any other system and a licensee cannot" - , "impose that choice." - , "" - , "This section is intended to make thoroughly clear what is believed to" - , "be a consequence of the rest of this License." - , "" - , " 12. If the distribution and/or use of the Library is restricted in" - , "certain countries either by patents or by copyrighted interfaces, the" - , "original copyright holder who places the Library under this License may add" - , "an explicit geographical distribution limitation excluding those countries," - , "so that distribution is permitted only in or among countries not thus" - , "excluded. In such case, this License incorporates the limitation as if" - , "written in the body of this License." - , "" - , " 13. The Free Software Foundation may publish revised and/or new" - , "versions of the Lesser General Public License from time to time." - , "Such new versions will be similar in spirit to the present version," - , "but may differ in detail to address new problems or concerns." - , "" - , "Each version is given a distinguishing version number. If the Library" - , "specifies a version number of this License which applies to it and" - , "\"any later version\", you have the option of following the terms and" - , "conditions either of that version or of any later version published by" - , "the Free Software Foundation. If the Library does not specify a" - , "license version number, you may choose any version ever published by" - , "the Free Software Foundation." - , "" - , " 14. If you wish to incorporate parts of the Library into other free" - , "programs whose distribution conditions are incompatible with these," - , "write to the author to ask for permission. For software which is" - , "copyrighted by the Free Software Foundation, write to the Free" - , "Software Foundation; we sometimes make exceptions for this. Our" - , "decision will be guided by the two goals of preserving the free status" - , "of all derivatives of our free software and of promoting the sharing" - , "and reuse of software generally." - , "" - , " NO WARRANTY" - , "" - , " 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO" - , "WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW." - , "EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR" - , "OTHER PARTIES PROVIDE THE LIBRARY \"AS IS\" WITHOUT WARRANTY OF ANY" - , "KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE" - , "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" - , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE" - , "LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME" - , "THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION." - , "" - , " 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN" - , "WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY" - , "AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU" - , "FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR" - , "CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE" - , "LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING" - , "RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A" - , "FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF" - , "SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH" - , "DAMAGES." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " How to Apply These Terms to Your New Libraries" - , "" - , " If you develop a new library, and you want it to be of the greatest" - , "possible use to the public, we recommend making it free software that" - , "everyone can redistribute and change. You can do so by permitting" - , "redistribution under these terms (or, alternatively, under the terms of the" - , "ordinary General Public License)." - , "" - , " To apply these terms, attach the following notices to the library. It is" - , "safest to attach them to the start of each source file to most effectively" - , "convey the exclusion of warranty; and each file should have at least the" - , "\"copyright\" line and a pointer to where the full notice is found." - , "" - , " " - , " Copyright (C) " - , "" - , " This library is free software; you can redistribute it and/or" - , " modify it under the terms of the GNU Lesser General Public" - , " License as published by the Free Software Foundation; either" - , " version 2.1 of the License, or (at your option) any later version." - , "" - , " This library is distributed in the hope that it will be useful," - , " but WITHOUT ANY WARRANTY; without even the implied warranty of" - , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU" - , " Lesser General Public License for more details." - , "" - , " You should have received a copy of the GNU Lesser General Public" - , " License along with this library; if not, write to the Free Software" - , " Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" - , "" - , "Also add information on how to contact you by electronic and paper mail." - , "" - , "You should also get your employer (if you work as a programmer) or your" - , "school, if any, to sign a \"copyright disclaimer\" for the library, if" - , "necessary. Here is a sample; alter the names:" - , "" - , " Yoyodyne, Inc., hereby disclaims all copyright interest in the" - , " library `Frob' (a library for tweaking knobs) written by James Random Hacker." - , "" - , " , 1 April 1990" - , " Ty Coon, President of Vice" - , "" - , "That's all there is to it!" - ] - -lgpl3 :: License -lgpl3 = unlines - [ " GNU LESSER GENERAL PUBLIC LICENSE" - , " Version 3, 29 June 2007" - , "" - , " Copyright (C) 2007 Free Software Foundation, Inc. " - , " Everyone is permitted to copy and distribute verbatim copies" - , " of this license document, but changing it is not allowed." - , "" - , "" - , " This version of the GNU Lesser General Public License incorporates" - , "the terms and conditions of version 3 of the GNU General Public" - , "License, supplemented by the additional permissions listed below." - , "" - , " 0. Additional Definitions." - , "" - , " As used herein, \"this License\" refers to version 3 of the GNU Lesser" - , "General Public License, and the \"GNU GPL\" refers to version 3 of the GNU" - , "General Public License." - , "" - , " \"The Library\" refers to a covered work governed by this License," - , "other than an Application or a Combined Work as defined below." - , "" - , " An \"Application\" is any work that makes use of an interface provided" - , "by the Library, but which is not otherwise based on the Library." - , "Defining a subclass of a class defined by the Library is deemed a mode" - , "of using an interface provided by the Library." - , "" - , " A \"Combined Work\" is a work produced by combining or linking an" - , "Application with the Library. The particular version of the Library" - , "with which the Combined Work was made is also called the \"Linked" - , "Version\"." - , "" - , " The \"Minimal Corresponding Source\" for a Combined Work means the" - , "Corresponding Source for the Combined Work, excluding any source code" - , "for portions of the Combined Work that, considered in isolation, are" - , "based on the Application, and not on the Linked Version." - , "" - , " The \"Corresponding Application Code\" for a Combined Work means the" - , "object code and/or source code for the Application, including any data" - , "and utility programs needed for reproducing the Combined Work from the" - , "Application, but excluding the System Libraries of the Combined Work." - , "" - , " 1. Exception to Section 3 of the GNU GPL." - , "" - , " You may convey a covered work under sections 3 and 4 of this License" - , "without being bound by section 3 of the GNU GPL." - , "" - , " 2. Conveying Modified Versions." - , "" - , " If you modify a copy of the Library, and, in your modifications, a" - , "facility refers to a function or data to be supplied by an Application" - , "that uses the facility (other than as an argument passed when the" - , "facility is invoked), then you may convey a copy of the modified" - , "version:" - , "" - , " a) under this License, provided that you make a good faith effort to" - , " ensure that, in the event an Application does not supply the" - , " function or data, the facility still operates, and performs" - , " whatever part of its purpose remains meaningful, or" - , "" - , " b) under the GNU GPL, with none of the additional permissions of" - , " this License applicable to that copy." - , "" - , " 3. Object Code Incorporating Material from Library Header Files." - , "" - , " The object code form of an Application may incorporate material from" - , "a header file that is part of the Library. You may convey such object" - , "code under terms of your choice, provided that, if the incorporated" - , "material is not limited to numerical parameters, data structure" - , "layouts and accessors, or small macros, inline functions and templates" - , "(ten or fewer lines in length), you do both of the following:" - , "" - , " a) Give prominent notice with each copy of the object code that the" - , " Library is used in it and that the Library and its use are" - , " covered by this License." - , "" - , " b) Accompany the object code with a copy of the GNU GPL and this license" - , " document." - , "" - , " 4. Combined Works." - , "" - , " You may convey a Combined Work under terms of your choice that," - , "taken together, effectively do not restrict modification of the" - , "portions of the Library contained in the Combined Work and reverse" - , "engineering for debugging such modifications, if you also do each of" - , "the following:" - , "" - , " a) Give prominent notice with each copy of the Combined Work that" - , " the Library is used in it and that the Library and its use are" - , " covered by this License." - , "" - , " b) Accompany the Combined Work with a copy of the GNU GPL and this license" - , " document." - , "" - , " c) For a Combined Work that displays copyright notices during" - , " execution, include the copyright notice for the Library among" - , " these notices, as well as a reference directing the user to the" - , " copies of the GNU GPL and this license document." - , "" - , " d) Do one of the following:" - , "" - , " 0) Convey the Minimal Corresponding Source under the terms of this" - , " License, and the Corresponding Application Code in a form" - , " suitable for, and under terms that permit, the user to" - , " recombine or relink the Application with a modified version of" - , " the Linked Version to produce a modified Combined Work, in the" - , " manner specified by section 6 of the GNU GPL for conveying" - , " Corresponding Source." - , "" - , " 1) Use a suitable shared library mechanism for linking with the" - , " Library. A suitable mechanism is one that (a) uses at run time" - , " a copy of the Library already present on the user's computer" - , " system, and (b) will operate properly with a modified version" - , " of the Library that is interface-compatible with the Linked" - , " Version." - , "" - , " e) Provide Installation Information, but only if you would otherwise" - , " be required to provide such information under section 6 of the" - , " GNU GPL, and only to the extent that such information is" - , " necessary to install and execute a modified version of the" - , " Combined Work produced by recombining or relinking the" - , " Application with a modified version of the Linked Version. (If" - , " you use option 4d0, the Installation Information must accompany" - , " the Minimal Corresponding Source and Corresponding Application" - , " Code. If you use option 4d1, you must provide the Installation" - , " Information in the manner specified by section 6 of the GNU GPL" - , " for conveying Corresponding Source.)" - , "" - , " 5. Combined Libraries." - , "" - , " You may place library facilities that are a work based on the" - , "Library side by side in a single library together with other library" - , "facilities that are not Applications and are not covered by this" - , "License, and convey such a combined library under terms of your" - , "choice, if you do both of the following:" - , "" - , " a) Accompany the combined library with a copy of the same work based" - , " on the Library, uncombined with any other library facilities," - , " conveyed under the terms of this License." - , "" - , " b) Give prominent notice with the combined library that part of it" - , " is a work based on the Library, and explaining where to find the" - , " accompanying uncombined form of the same work." - , "" - , " 6. Revised Versions of the GNU Lesser General Public License." - , "" - , " The Free Software Foundation may publish revised and/or new versions" - , "of the GNU Lesser General Public License from time to time. Such new" - , "versions will be similar in spirit to the present version, but may" - , "differ in detail to address new problems or concerns." - , "" - , " Each version is given a distinguishing version number. If the" - , "Library as you received it specifies that a certain numbered version" - , "of the GNU Lesser General Public License \"or any later version\"" - , "applies to it, you have the option of following the terms and" - , "conditions either of that published version or of any later version" - , "published by the Free Software Foundation. If the Library as you" - , "received it does not specify a version number of the GNU Lesser" - , "General Public License, you may choose any version of the GNU Lesser" - , "General Public License ever published by the Free Software Foundation." - , "" - , " If the Library as you received it specifies that a proxy can decide" - , "whether future versions of the GNU Lesser General Public License shall" - , "apply, that proxy's public statement of acceptance of any version is" - , "permanent authorization for you to choose that version for the" - , "Library." - ] - -apache20 :: License -apache20 = unlines - [ "" - , " Apache License" - , " Version 2.0, January 2004" - , " http://www.apache.org/licenses/" - , "" - , " TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION" - , "" - , " 1. Definitions." - , "" - , " \"License\" shall mean the terms and conditions for use, reproduction," - , " and distribution as defined by Sections 1 through 9 of this document." - , "" - , " \"Licensor\" shall mean the copyright owner or entity authorized by" - , " the copyright owner that is granting the License." - , "" - , " \"Legal Entity\" shall mean the union of the acting entity and all" - , " other entities that control, are controlled by, or are under common" - , " control with that entity. For the purposes of this definition," - , " \"control\" means (i) the power, direct or indirect, to cause the" - , " direction or management of such entity, whether by contract or" - , " otherwise, or (ii) ownership of fifty percent (50%) or more of the" - , " outstanding shares, or (iii) beneficial ownership of such entity." - , "" - , " \"You\" (or \"Your\") shall mean an individual or Legal Entity" - , " exercising permissions granted by this License." - , "" - , " \"Source\" form shall mean the preferred form for making modifications," - , " including but not limited to software source code, documentation" - , " source, and configuration files." - , "" - , " \"Object\" form shall mean any form resulting from mechanical" - , " transformation or translation of a Source form, including but" - , " not limited to compiled object code, generated documentation," - , " and conversions to other media types." - , "" - , " \"Work\" shall mean the work of authorship, whether in Source or" - , " Object form, made available under the License, as indicated by a" - , " copyright notice that is included in or attached to the work" - , " (an example is provided in the Appendix below)." - , "" - , " \"Derivative Works\" shall mean any work, whether in Source or Object" - , " form, that is based on (or derived from) the Work and for which the" - , " editorial revisions, annotations, elaborations, or other modifications" - , " represent, as a whole, an original work of authorship. For the purposes" - , " of this License, Derivative Works shall not include works that remain" - , " separable from, or merely link (or bind by name) to the interfaces of," - , " the Work and Derivative Works thereof." - , "" - , " \"Contribution\" shall mean any work of authorship, including" - , " the original version of the Work and any modifications or additions" - , " to that Work or Derivative Works thereof, that is intentionally" - , " submitted to Licensor for inclusion in the Work by the copyright owner" - , " or by an individual or Legal Entity authorized to submit on behalf of" - , " the copyright owner. For the purposes of this definition, \"submitted\"" - , " means any form of electronic, verbal, or written communication sent" - , " to the Licensor or its representatives, including but not limited to" - , " communication on electronic mailing lists, source code control systems," - , " and issue tracking systems that are managed by, or on behalf of, the" - , " Licensor for the purpose of discussing and improving the Work, but" - , " excluding communication that is conspicuously marked or otherwise" - , " designated in writing by the copyright owner as \"Not a Contribution.\"" - , "" - , " \"Contributor\" shall mean Licensor and any individual or Legal Entity" - , " on behalf of whom a Contribution has been received by Licensor and" - , " subsequently incorporated within the Work." - , "" - , " 2. Grant of Copyright License. Subject to the terms and conditions of" - , " this License, each Contributor hereby grants to You a perpetual," - , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" - , " copyright license to reproduce, prepare Derivative Works of," - , " publicly display, publicly perform, sublicense, and distribute the" - , " Work and such Derivative Works in Source or Object form." - , "" - , " 3. Grant of Patent License. Subject to the terms and conditions of" - , " this License, each Contributor hereby grants to You a perpetual," - , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" - , " (except as stated in this section) patent license to make, have made," - , " use, offer to sell, sell, import, and otherwise transfer the Work," - , " where such license applies only to those patent claims licensable" - , " by such Contributor that are necessarily infringed by their" - , " Contribution(s) alone or by combination of their Contribution(s)" - , " with the Work to which such Contribution(s) was submitted. If You" - , " institute patent litigation against any entity (including a" - , " cross-claim or counterclaim in a lawsuit) alleging that the Work" - , " or a Contribution incorporated within the Work constitutes direct" - , " or contributory patent infringement, then any patent licenses" - , " granted to You under this License for that Work shall terminate" - , " as of the date such litigation is filed." - , "" - , " 4. Redistribution. You may reproduce and distribute copies of the" - , " Work or Derivative Works thereof in any medium, with or without" - , " modifications, and in Source or Object form, provided that You" - , " meet the following conditions:" - , "" - , " (a) You must give any other recipients of the Work or" - , " Derivative Works a copy of this License; and" - , "" - , " (b) You must cause any modified files to carry prominent notices" - , " stating that You changed the files; and" - , "" - , " (c) You must retain, in the Source form of any Derivative Works" - , " that You distribute, all copyright, patent, trademark, and" - , " attribution notices from the Source form of the Work," - , " excluding those notices that do not pertain to any part of" - , " the Derivative Works; and" - , "" - , " (d) If the Work includes a \"NOTICE\" text file as part of its" - , " distribution, then any Derivative Works that You distribute must" - , " include a readable copy of the attribution notices contained" - , " within such NOTICE file, excluding those notices that do not" - , " pertain to any part of the Derivative Works, in at least one" - , " of the following places: within a NOTICE text file distributed" - , " as part of the Derivative Works; within the Source form or" - , " documentation, if provided along with the Derivative Works; or," - , " within a display generated by the Derivative Works, if and" - , " wherever such third-party notices normally appear. The contents" - , " of the NOTICE file are for informational purposes only and" - , " do not modify the License. You may add Your own attribution" - , " notices within Derivative Works that You distribute, alongside" - , " or as an addendum to the NOTICE text from the Work, provided" - , " that such additional attribution notices cannot be construed" - , " as modifying the License." - , "" - , " You may add Your own copyright statement to Your modifications and" - , " may provide additional or different license terms and conditions" - , " for use, reproduction, or distribution of Your modifications, or" - , " for any such Derivative Works as a whole, provided Your use," - , " reproduction, and distribution of the Work otherwise complies with" - , " the conditions stated in this License." - , "" - , " 5. Submission of Contributions. Unless You explicitly state otherwise," - , " any Contribution intentionally submitted for inclusion in the Work" - , " by You to the Licensor shall be under the terms and conditions of" - , " this License, without any additional terms or conditions." - , " Notwithstanding the above, nothing herein shall supersede or modify" - , " the terms of any separate license agreement you may have executed" - , " with Licensor regarding such Contributions." - , "" - , " 6. Trademarks. This License does not grant permission to use the trade" - , " names, trademarks, service marks, or product names of the Licensor," - , " except as required for reasonable and customary use in describing the" - , " origin of the Work and reproducing the content of the NOTICE file." - , "" - , " 7. Disclaimer of Warranty. Unless required by applicable law or" - , " agreed to in writing, Licensor provides the Work (and each" - , " Contributor provides its Contributions) on an \"AS IS\" BASIS," - , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or" - , " implied, including, without limitation, any warranties or conditions" - , " of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A" - , " PARTICULAR PURPOSE. You are solely responsible for determining the" - , " appropriateness of using or redistributing the Work and assume any" - , " risks associated with Your exercise of permissions under this License." - , "" - , " 8. Limitation of Liability. In no event and under no legal theory," - , " whether in tort (including negligence), contract, or otherwise," - , " unless required by applicable law (such as deliberate and grossly" - , " negligent acts) or agreed to in writing, shall any Contributor be" - , " liable to You for damages, including any direct, indirect, special," - , " incidental, or consequential damages of any character arising as a" - , " result of this License or out of the use or inability to use the" - , " Work (including but not limited to damages for loss of goodwill," - , " work stoppage, computer failure or malfunction, or any and all" - , " other commercial damages or losses), even if such Contributor" - , " has been advised of the possibility of such damages." - , "" - , " 9. Accepting Warranty or Additional Liability. While redistributing" - , " the Work or Derivative Works thereof, You may choose to offer," - , " and charge a fee for, acceptance of support, warranty, indemnity," - , " or other liability obligations and/or rights consistent with this" - , " License. However, in accepting such obligations, You may act only" - , " on Your own behalf and on Your sole responsibility, not on behalf" - , " of any other Contributor, and only if You agree to indemnify," - , " defend, and hold each Contributor harmless for any liability" - , " incurred by, or claims asserted against, such Contributor by reason" - , " of your accepting any such warranty or additional liability." - , "" - , " END OF TERMS AND CONDITIONS" - , "" - , " APPENDIX: How to apply the Apache License to your work." - , "" - , " To apply the Apache License to your work, attach the following" - , " boilerplate notice, with the fields enclosed by brackets \"[]\"" - , " replaced with your own identifying information. (Don't include" - , " the brackets!) The text should be enclosed in the appropriate" - , " comment syntax for the file format. We also recommend that a" - , " file or class name and description of purpose be included on the" - , " same \"printed page\" as the copyright notice for easier" - , " identification within third-party archives." - , "" - , " Copyright [yyyy] [name of copyright owner]" - , "" - , " Licensed under the Apache License, Version 2.0 (the \"License\");" - , " you may not use this file except in compliance with the License." - , " You may obtain a copy of the License at" - , "" - , " http://www.apache.org/licenses/LICENSE-2.0" - , "" - , " Unless required by applicable law or agreed to in writing, software" - , " distributed under the License is distributed on an \"AS IS\" BASIS," - , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied." - , " See the License for the specific language governing permissions and" - , " limitations under the License." - ] - -mit :: String -> String -> License -mit authors year = unlines - [ "Copyright (c) " ++ year ++ " " ++ authors - , "" - , "Permission is hereby granted, free of charge, to any person obtaining" - , "a copy of this software and associated documentation files (the" - , "\"Software\"), to deal in the Software without restriction, including" - , "without limitation the rights to use, copy, modify, merge, publish," - , "distribute, sublicense, and/or sell copies of the Software, and to" - , "permit persons to whom the Software is furnished to do so, subject to" - , "the following conditions:" - , "" - , "The above copyright notice and this permission notice shall be included" - , "in all copies or substantial portions of the Software." - , "" - , "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND," - , "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF" - , "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT." - , "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY" - , "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT," - , "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE" - , "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE." - ] - -mpl20 :: License -mpl20 = unlines - [ "Mozilla Public License Version 2.0" - , "==================================" - , "" - , "1. Definitions" - , "--------------" - , "" - , "1.1. \"Contributor\"" - , " means each individual or legal entity that creates, contributes to" - , " the creation of, or owns Covered Software." - , "" - , "1.2. \"Contributor Version\"" - , " means the combination of the Contributions of others (if any) used" - , " by a Contributor and that particular Contributor's Contribution." - , "" - , "1.3. \"Contribution\"" - , " means Covered Software of a particular Contributor." - , "" - , "1.4. \"Covered Software\"" - , " means Source Code Form to which the initial Contributor has attached" - , " the notice in Exhibit A, the Executable Form of such Source Code" - , " Form, and Modifications of such Source Code Form, in each case" - , " including portions thereof." - , "" - , "1.5. \"Incompatible With Secondary Licenses\"" - , " means" - , "" - , " (a) that the initial Contributor has attached the notice described" - , " in Exhibit B to the Covered Software; or" - , "" - , " (b) that the Covered Software was made available under the terms of" - , " version 1.1 or earlier of the License, but not also under the" - , " terms of a Secondary License." - , "" - , "1.6. \"Executable Form\"" - , " means any form of the work other than Source Code Form." - , "" - , "1.7. \"Larger Work\"" - , " means a work that combines Covered Software with other material, in" - , " a separate file or files, that is not Covered Software." - , "" - , "1.8. \"License\"" - , " means this document." - , "" - , "1.9. \"Licensable\"" - , " means having the right to grant, to the maximum extent possible," - , " whether at the time of the initial grant or subsequently, any and" - , " all of the rights conveyed by this License." - , "" - , "1.10. \"Modifications\"" - , " means any of the following:" - , "" - , " (a) any file in Source Code Form that results from an addition to," - , " deletion from, or modification of the contents of Covered" - , " Software; or" - , "" - , " (b) any new file in Source Code Form that contains any Covered" - , " Software." - , "" - , "1.11. \"Patent Claims\" of a Contributor" - , " means any patent claim(s), including without limitation, method," - , " process, and apparatus claims, in any patent Licensable by such" - , " Contributor that would be infringed, but for the grant of the" - , " License, by the making, using, selling, offering for sale, having" - , " made, import, or transfer of either its Contributions or its" - , " Contributor Version." - , "" - , "1.12. \"Secondary License\"" - , " means either the GNU General Public License, Version 2.0, the GNU" - , " Lesser General Public License, Version 2.1, the GNU Affero General" - , " Public License, Version 3.0, or any later versions of those" - , " licenses." - , "" - , "1.13. \"Source Code Form\"" - , " means the form of the work preferred for making modifications." - , "" - , "1.14. \"You\" (or \"Your\")" - , " means an individual or a legal entity exercising rights under this" - , " License. For legal entities, \"You\" includes any entity that" - , " controls, is controlled by, or is under common control with You. For" - , " purposes of this definition, \"control\" means (a) the power, direct" - , " or indirect, to cause the direction or management of such entity," - , " whether by contract or otherwise, or (b) ownership of more than" - , " fifty percent (50%) of the outstanding shares or beneficial" - , " ownership of such entity." - , "" - , "2. License Grants and Conditions" - , "--------------------------------" - , "" - , "2.1. Grants" - , "" - , "Each Contributor hereby grants You a world-wide, royalty-free," - , "non-exclusive license:" - , "" - , "(a) under intellectual property rights (other than patent or trademark)" - , " Licensable by such Contributor to use, reproduce, make available," - , " modify, display, perform, distribute, and otherwise exploit its" - , " Contributions, either on an unmodified basis, with Modifications, or" - , " as part of a Larger Work; and" - , "" - , "(b) under Patent Claims of such Contributor to make, use, sell, offer" - , " for sale, have made, import, and otherwise transfer either its" - , " Contributions or its Contributor Version." - , "" - , "2.2. Effective Date" - , "" - , "The licenses granted in Section 2.1 with respect to any Contribution" - , "become effective for each Contribution on the date the Contributor first" - , "distributes such Contribution." - , "" - , "2.3. Limitations on Grant Scope" - , "" - , "The licenses granted in this Section 2 are the only rights granted under" - , "this License. No additional rights or licenses will be implied from the" - , "distribution or licensing of Covered Software under this License." - , "Notwithstanding Section 2.1(b) above, no patent license is granted by a" - , "Contributor:" - , "" - , "(a) for any code that a Contributor has removed from Covered Software;" - , " or" - , "" - , "(b) for infringements caused by: (i) Your and any other third party's" - , " modifications of Covered Software, or (ii) the combination of its" - , " Contributions with other software (except as part of its Contributor" - , " Version); or" - , "" - , "(c) under Patent Claims infringed by Covered Software in the absence of" - , " its Contributions." - , "" - , "This License does not grant any rights in the trademarks, service marks," - , "or logos of any Contributor (except as may be necessary to comply with" - , "the notice requirements in Section 3.4)." - , "" - , "2.4. Subsequent Licenses" - , "" - , "No Contributor makes additional grants as a result of Your choice to" - , "distribute the Covered Software under a subsequent version of this" - , "License (see Section 10.2) or under the terms of a Secondary License (if" - , "permitted under the terms of Section 3.3)." - , "" - , "2.5. Representation" - , "" - , "Each Contributor represents that the Contributor believes its" - , "Contributions are its original creation(s) or it has sufficient rights" - , "to grant the rights to its Contributions conveyed by this License." - , "" - , "2.6. Fair Use" - , "" - , "This License is not intended to limit any rights You have under" - , "applicable copyright doctrines of fair use, fair dealing, or other" - , "equivalents." - , "" - , "2.7. Conditions" - , "" - , "Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted" - , "in Section 2.1." - , "" - , "3. Responsibilities" - , "-------------------" - , "" - , "3.1. Distribution of Source Form" - , "" - , "All distribution of Covered Software in Source Code Form, including any" - , "Modifications that You create or to which You contribute, must be under" - , "the terms of this License. You must inform recipients that the Source" - , "Code Form of the Covered Software is governed by the terms of this" - , "License, and how they can obtain a copy of this License. You may not" - , "attempt to alter or restrict the recipients' rights in the Source Code" - , "Form." - , "" - , "3.2. Distribution of Executable Form" - , "" - , "If You distribute Covered Software in Executable Form then:" - , "" - , "(a) such Covered Software must also be made available in Source Code" - , " Form, as described in Section 3.1, and You must inform recipients of" - , " the Executable Form how they can obtain a copy of such Source Code" - , " Form by reasonable means in a timely manner, at a charge no more" - , " than the cost of distribution to the recipient; and" - , "" - , "(b) You may distribute such Executable Form under the terms of this" - , " License, or sublicense it under different terms, provided that the" - , " license for the Executable Form does not attempt to limit or alter" - , " the recipients' rights in the Source Code Form under this License." - , "" - , "3.3. Distribution of a Larger Work" - , "" - , "You may create and distribute a Larger Work under terms of Your choice," - , "provided that You also comply with the requirements of this License for" - , "the Covered Software. If the Larger Work is a combination of Covered" - , "Software with a work governed by one or more Secondary Licenses, and the" - , "Covered Software is not Incompatible With Secondary Licenses, this" - , "License permits You to additionally distribute such Covered Software" - , "under the terms of such Secondary License(s), so that the recipient of" - , "the Larger Work may, at their option, further distribute the Covered" - , "Software under the terms of either this License or such Secondary" - , "License(s)." - , "" - , "3.4. Notices" - , "" - , "You may not remove or alter the substance of any license notices" - , "(including copyright notices, patent notices, disclaimers of warranty," - , "or limitations of liability) contained within the Source Code Form of" - , "the Covered Software, except that You may alter any license notices to" - , "the extent required to remedy known factual inaccuracies." - , "" - , "3.5. Application of Additional Terms" - , "" - , "You may choose to offer, and to charge a fee for, warranty, support," - , "indemnity or liability obligations to one or more recipients of Covered" - , "Software. However, You may do so only on Your own behalf, and not on" - , "behalf of any Contributor. You must make it absolutely clear that any" - , "such warranty, support, indemnity, or liability obligation is offered by" - , "You alone, and You hereby agree to indemnify every Contributor for any" - , "liability incurred by such Contributor as a result of warranty, support," - , "indemnity or liability terms You offer. You may include additional" - , "disclaimers of warranty and limitations of liability specific to any" - , "jurisdiction." - , "" - , "4. Inability to Comply Due to Statute or Regulation" - , "---------------------------------------------------" - , "" - , "If it is impossible for You to comply with any of the terms of this" - , "License with respect to some or all of the Covered Software due to" - , "statute, judicial order, or regulation then You must: (a) comply with" - , "the terms of this License to the maximum extent possible; and (b)" - , "describe the limitations and the code they affect. Such description must" - , "be placed in a text file included with all distributions of the Covered" - , "Software under this License. Except to the extent prohibited by statute" - , "or regulation, such description must be sufficiently detailed for a" - , "recipient of ordinary skill to be able to understand it." - , "" - , "5. Termination" - , "--------------" - , "" - , "5.1. The rights granted under this License will terminate automatically" - , "if You fail to comply with any of its terms. However, if You become" - , "compliant, then the rights granted under this License from a particular" - , "Contributor are reinstated (a) provisionally, unless and until such" - , "Contributor explicitly and finally terminates Your grants, and (b) on an" - , "ongoing basis, if such Contributor fails to notify You of the" - , "non-compliance by some reasonable means prior to 60 days after You have" - , "come back into compliance. Moreover, Your grants from a particular" - , "Contributor are reinstated on an ongoing basis if such Contributor" - , "notifies You of the non-compliance by some reasonable means, this is the" - , "first time You have received notice of non-compliance with this License" - , "from such Contributor, and You become compliant prior to 30 days after" - , "Your receipt of the notice." - , "" - , "5.2. If You initiate litigation against any entity by asserting a patent" - , "infringement claim (excluding declaratory judgment actions," - , "counter-claims, and cross-claims) alleging that a Contributor Version" - , "directly or indirectly infringes any patent, then the rights granted to" - , "You by any and all Contributors for the Covered Software under Section" - , "2.1 of this License shall terminate." - , "" - , "5.3. In the event of termination under Sections 5.1 or 5.2 above, all" - , "end user license agreements (excluding distributors and resellers) which" - , "have been validly granted by You or Your distributors under this License" - , "prior to termination shall survive termination." - , "" - , "************************************************************************" - , "* *" - , "* 6. Disclaimer of Warranty *" - , "* ------------------------- *" - , "* *" - , "* Covered Software is provided under this License on an \"as is\" *" - , "* basis, without warranty of any kind, either expressed, implied, or *" - , "* statutory, including, without limitation, warranties that the *" - , "* Covered Software is free of defects, merchantable, fit for a *" - , "* particular purpose or non-infringing. The entire risk as to the *" - , "* quality and performance of the Covered Software is with You. *" - , "* Should any Covered Software prove defective in any respect, You *" - , "* (not any Contributor) assume the cost of any necessary servicing, *" - , "* repair, or correction. This disclaimer of warranty constitutes an *" - , "* essential part of this License. No use of any Covered Software is *" - , "* authorized under this License except under this disclaimer. *" - , "* *" - , "************************************************************************" - , "" - , "************************************************************************" - , "* *" - , "* 7. Limitation of Liability *" - , "* -------------------------- *" - , "* *" - , "* Under no circumstances and under no legal theory, whether tort *" - , "* (including negligence), contract, or otherwise, shall any *" - , "* Contributor, or anyone who distributes Covered Software as *" - , "* permitted above, be liable to You for any direct, indirect, *" - , "* special, incidental, or consequential damages of any character *" - , "* including, without limitation, damages for lost profits, loss of *" - , "* goodwill, work stoppage, computer failure or malfunction, or any *" - , "* and all other commercial damages or losses, even if such party *" - , "* shall have been informed of the possibility of such damages. This *" - , "* limitation of liability shall not apply to liability for death or *" - , "* personal injury resulting from such party's negligence to the *" - , "* extent applicable law prohibits such limitation. Some *" - , "* jurisdictions do not allow the exclusion or limitation of *" - , "* incidental or consequential damages, so this exclusion and *" - , "* limitation may not apply to You. *" - , "* *" - , "************************************************************************" - , "" - , "8. Litigation" - , "-------------" - , "" - , "Any litigation relating to this License may be brought only in the" - , "courts of a jurisdiction where the defendant maintains its principal" - , "place of business and such litigation shall be governed by laws of that" - , "jurisdiction, without reference to its conflict-of-law provisions." - , "Nothing in this Section shall prevent a party's ability to bring" - , "cross-claims or counter-claims." - , "" - , "9. Miscellaneous" - , "----------------" - , "" - , "This License represents the complete agreement concerning the subject" - , "matter hereof. If any provision of this License is held to be" - , "unenforceable, such provision shall be reformed only to the extent" - , "necessary to make it enforceable. Any law or regulation which provides" - , "that the language of a contract shall be construed against the drafter" - , "shall not be used to construe this License against a Contributor." - , "" - , "10. Versions of the License" - , "---------------------------" - , "" - , "10.1. New Versions" - , "" - , "Mozilla Foundation is the license steward. Except as provided in Section" - , "10.3, no one other than the license steward has the right to modify or" - , "publish new versions of this License. Each version will be given a" - , "distinguishing version number." - , "" - , "10.2. Effect of New Versions" - , "" - , "You may distribute the Covered Software under the terms of the version" - , "of the License under which You originally received the Covered Software," - , "or under the terms of any subsequent version published by the license" - , "steward." - , "" - , "10.3. Modified Versions" - , "" - , "If you create software not governed by this License, and you want to" - , "create a new license for such software, you may create and use a" - , "modified version of this License if you rename the license and remove" - , "any references to the name of the license steward (except to note that" - , "such modified license differs from this License)." - , "" - , "10.4. Distributing Source Code Form that is Incompatible With Secondary" - , "Licenses" - , "" - , "If You choose to distribute Source Code Form that is Incompatible With" - , "Secondary Licenses under the terms of this version of the License, the" - , "notice described in Exhibit B of this License must be attached." - , "" - , "Exhibit A - Source Code Form License Notice" - , "-------------------------------------------" - , "" - , " This Source Code Form is subject to the terms of the Mozilla Public" - , " License, v. 2.0. If a copy of the MPL was not distributed with this" - , " file, You can obtain one at http://mozilla.org/MPL/2.0/." - , "" - , "If it is not possible or desirable to put the notice in a particular" - , "file, then You may include the notice in a location (such as a LICENSE" - , "file in a relevant directory) where a recipient would be likely to look" - , "for such a notice." - , "" - , "You may add additional accurate notices of copyright ownership." - , "" - , "Exhibit B - \"Incompatible With Secondary Licenses\" Notice" - , "---------------------------------------------------------" - , "" - , " This Source Code Form is \"Incompatible With Secondary Licenses\", as" - , " defined by the Mozilla Public License, v. 2.0." - ] - -isc :: String -> String -> License -isc authors year = unlines - [ "Copyright (c) " ++ year ++ " " ++ authors - , "" - , "Permission to use, copy, modify, and/or distribute this software for any purpose" - , "with or without fee is hereby granted, provided that the above copyright notice" - , "and this permission notice appear in all copies." - , "" - , "THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH" - , "REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND" - , "FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT," - , "INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS" - , "OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER" - , "TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF" - , "THIS SOFTWARE." - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Init/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Init/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Init/Types.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Init/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,121 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Init.Types --- Copyright : (c) Brent Yorgey, Benedikt Huber 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Some types used by the 'cabal init' command. --- ------------------------------------------------------------------------------ -module Distribution.Client.Init.Types where - -import Distribution.Simple.Setup - ( Flag(..) ) - -import Distribution.Types.Dependency as P -import Distribution.Compat.Semigroup -import Distribution.Version -import Distribution.Verbosity -import qualified Distribution.Package as P -import Distribution.License -import Distribution.ModuleName -import Language.Haskell.Extension ( Language(..), Extension ) - -import qualified Text.PrettyPrint as Disp -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Text - -import GHC.Generics ( Generic ) - --- | InitFlags is really just a simple type to represent certain --- portions of a .cabal file. Rather than have a flag for EVERY --- possible field, we just have one for each field that the user is --- likely to want and/or that we are likely to be able to --- intelligently guess. -data InitFlags = - InitFlags { nonInteractive :: Flag Bool - , quiet :: Flag Bool - , packageDir :: Flag FilePath - , noComments :: Flag Bool - , minimal :: Flag Bool - - , packageName :: Flag P.PackageName - , version :: Flag Version - , cabalVersion :: Flag Version - , license :: Flag License - , author :: Flag String - , email :: Flag String - , homepage :: Flag String - - , synopsis :: Flag String - , category :: Flag (Either String Category) - , extraSrc :: Maybe [String] - - , packageType :: Flag PackageType - , mainIs :: Flag FilePath - , language :: Flag Language - - , exposedModules :: Maybe [ModuleName] - , otherModules :: Maybe [ModuleName] - , otherExts :: Maybe [Extension] - - , dependencies :: Maybe [P.Dependency] - , sourceDirs :: Maybe [String] - , buildTools :: Maybe [String] - - , initVerbosity :: Flag Verbosity - , overwrite :: Flag Bool - } - deriving (Show, Generic) - - -- the Monoid instance for Flag has later values override earlier - -- ones, which is why we want Maybe [foo] for collecting foo values, - -- not Flag [foo]. - -data BuildType = LibBuild | ExecBuild - -data PackageType = Library | Executable | LibraryAndExecutable - deriving (Show, Read, Eq) - -displayPackageType :: PackageType -> String -displayPackageType LibraryAndExecutable = "Library and Executable" -displayPackageType pkgtype = show pkgtype - -instance Monoid InitFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup InitFlags where - (<>) = gmappend - --- | Some common package categories. -data Category - = Codec - | Concurrency - | Control - | Data - | Database - | Development - | Distribution - | Game - | Graphics - | Language - | Math - | Network - | Sound - | System - | Testing - | Text - | Web - deriving (Read, Show, Eq, Ord, Bounded, Enum) - -instance Text Category where - disp = Disp.text . show - parse = Parse.choice $ map (fmap read . Parse.string . show) [Codec .. ] -- TODO: eradicateNoParse - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Init.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Init.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Init.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Init.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1058 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Init --- Copyright : (c) Brent Yorgey 2009 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Implementation of the 'cabal init' command, which creates an initial .cabal --- file for a project. --- ------------------------------------------------------------------------------ - -module Distribution.Client.Init ( - - -- * Commands - initCabal - , incVersion - - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude hiding (empty) - -import System.IO - ( hSetBuffering, stdout, BufferMode(..) ) -import System.Directory - ( getCurrentDirectory, doesDirectoryExist, doesFileExist, copyFile - , getDirectoryContents, createDirectoryIfMissing ) -import System.FilePath - ( (), (<.>), takeBaseName, equalFilePath ) -import Data.Time - ( getCurrentTime, utcToLocalTime, toGregorian, localDay, getCurrentTimeZone ) - -import Data.List - ( groupBy, (\\) ) -import Data.Function - ( on ) -import qualified Data.Map as M -import Control.Monad - ( (>=>), join, forM_, mapM, mapM_ ) -import Control.Arrow - ( (&&&), (***) ) - -import Text.PrettyPrint hiding (mode, cat) - -import Distribution.Version - ( Version, mkVersion, alterVersion, versionNumbers, majorBoundVersion - , orLaterVersion, earlierVersion, intersectVersionRanges, VersionRange ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.ModuleName - ( ModuleName ) -- And for the Text instance -import Distribution.InstalledPackageInfo - ( InstalledPackageInfo, exposed ) -import qualified Distribution.Package as P -import Language.Haskell.Extension ( Language(..) ) - -import Distribution.Client.Init.Types - ( InitFlags(..), BuildType(..), PackageType(..), Category(..) - , displayPackageType ) -import Distribution.Client.Init.Licenses - ( bsd2, bsd3, gplv2, gplv3, lgpl21, lgpl3, agplv3, apache20, mit, mpl20, isc ) -import Distribution.Client.Init.Heuristics - ( guessPackageName, guessAuthorNameMail, guessMainFileCandidates, - SourceFileEntry(..), - scanForModules, neededBuildPrograms ) - -import Distribution.License - ( License(..), knownLicenses, licenseToSPDX ) -import qualified Distribution.SPDX as SPDX - -import Distribution.ReadE - ( runReadE, readP_to_E ) -import Distribution.Simple.Setup - ( Flag(..), flagToMaybe ) -import Distribution.Simple.Utils - ( dropWhileEndLE ) -import Distribution.Simple.Configure - ( getInstalledPackages ) -import Distribution.Simple.Compiler - ( PackageDBStack, Compiler ) -import Distribution.Simple.Program - ( ProgramDb ) -import Distribution.Simple.PackageIndex - ( InstalledPackageIndex, moduleNameIndex ) -import Distribution.Text - ( display, Text(..) ) -import Distribution.Pretty - ( prettyShow ) -import Distribution.Parsec.Class - ( eitherParsec ) - -import Distribution.Solver.Types.PackageIndex - ( elemByPackageName ) - -import Distribution.Client.IndexUtils - ( getSourcePackages ) -import Distribution.Client.Types - ( SourcePackageDb(..) ) -import Distribution.Client.Setup - ( RepoContext(..) ) - -initCabal :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> ProgramDb - -> InitFlags - -> IO () -initCabal verbosity packageDBs repoCtxt comp progdb initFlags = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb - sourcePkgDb <- getSourcePackages verbosity repoCtxt - - hSetBuffering stdout NoBuffering - - initFlags' <- extendFlags installedPkgIndex sourcePkgDb initFlags - - case license initFlags' of - Flag PublicDomain -> return () - _ -> writeLicense initFlags' - writeSetupFile initFlags' - writeChangeLog initFlags' - createSourceDirectories initFlags' - createMainHs initFlags' - success <- writeCabalFile initFlags' - - when success $ generateWarnings initFlags' - ---------------------------------------------------------------------------- --- Flag acquisition ----------------------------------------------------- ---------------------------------------------------------------------------- - --- | Fill in more details by guessing, discovering, or prompting the --- user. -extendFlags :: InstalledPackageIndex -> SourcePackageDb -> InitFlags -> IO InitFlags -extendFlags pkgIx sourcePkgDb = - getCabalVersion - >=> getPackageName sourcePkgDb - >=> getVersion - >=> getLicense - >=> getAuthorInfo - >=> getHomepage - >=> getSynopsis - >=> getCategory - >=> getExtraSourceFiles - >=> getLibOrExec - >=> getSrcDir - >=> getLanguage - >=> getGenComments - >=> getModulesBuildToolsAndDeps pkgIx - --- | Combine two actions which may return a value, preferring the first. That --- is, run the second action only if the first doesn't return a value. -infixr 1 ?>> -(?>>) :: IO (Maybe a) -> IO (Maybe a) -> IO (Maybe a) -f ?>> g = do - ma <- f - if isJust ma - then return ma - else g - --- | Witness the isomorphism between Maybe and Flag. -maybeToFlag :: Maybe a -> Flag a -maybeToFlag = maybe NoFlag Flag - -defaultCabalVersion :: Version -defaultCabalVersion = mkVersion [1,10] - -displayCabalVersion :: Version -> String -displayCabalVersion v = case versionNumbers v of - [1,10] -> "1.10 (legacy)" - [2,0] -> "2.0 (+ support for Backpack, internal sub-libs, '^>=' operator)" - [2,2] -> "2.2 (+ support for 'common', 'elif', redundant commas, SPDX)" - [2,4] -> "2.4 (+ support for '**' globbing)" - _ -> display v - --- | Ask which version of the cabal spec to use. -getCabalVersion :: InitFlags -> IO InitFlags -getCabalVersion flags = do - cabVer <- return (flagToMaybe $ cabalVersion flags) - ?>> maybePrompt flags (either (const defaultCabalVersion) id `fmap` - promptList "Please choose version of the Cabal specification to use" - [mkVersion [1,10], mkVersion [2,0], mkVersion [2,2], mkVersion [2,4]] - (Just defaultCabalVersion) displayCabalVersion False) - ?>> return (Just defaultCabalVersion) - - return $ flags { cabalVersion = maybeToFlag cabVer } - - --- | Get the package name: use the package directory (supplied, or the current --- directory by default) as a guess. It looks at the SourcePackageDb to avoid --- using an existing package name. -getPackageName :: SourcePackageDb -> InitFlags -> IO InitFlags -getPackageName sourcePkgDb flags = do - guess <- traverse guessPackageName (flagToMaybe $ packageDir flags) - ?>> Just `fmap` (getCurrentDirectory >>= guessPackageName) - - let guess' | isPkgRegistered guess = Nothing - | otherwise = guess - - pkgName' <- return (flagToMaybe $ packageName flags) - ?>> maybePrompt flags (prompt "Package name" guess') - ?>> return guess' - - chooseAgain <- if isPkgRegistered pkgName' - then promptYesNo promptOtherNameMsg (Just True) - else return False - - if chooseAgain - then getPackageName sourcePkgDb flags - else return $ flags { packageName = maybeToFlag pkgName' } - - where - isPkgRegistered (Just pkg) = elemByPackageName (packageIndex sourcePkgDb) pkg - isPkgRegistered Nothing = False - - promptOtherNameMsg = "This package name is already used by another " ++ - "package on hackage. Do you want to choose a " ++ - "different name" - --- | Package version: use 0.1.0.0 as a last resort, but try prompting the user --- if possible. -getVersion :: InitFlags -> IO InitFlags -getVersion flags = do - let v = Just $ mkVersion [0,1,0,0] - v' <- return (flagToMaybe $ version flags) - ?>> maybePrompt flags (prompt "Package version" v) - ?>> return v - return $ flags { version = maybeToFlag v' } - --- | Choose a license. -getLicense :: InitFlags -> IO InitFlags -getLicense flags = do - lic <- return (flagToMaybe $ license flags) - ?>> fmap (fmap (either UnknownLicense id)) - (maybePrompt flags - (promptList "Please choose a license" listedLicenses - (Just BSD3) displayLicense True)) - - case checkLicenseInvalid lic of - Just msg -> putStrLn msg >> getLicense flags - Nothing -> return $ flags { license = maybeToFlag lic } - - where - displayLicense l | needSpdx = prettyShow (licenseToSPDX l) - | otherwise = display l - - checkLicenseInvalid (Just (UnknownLicense t)) - | needSpdx = case eitherParsec t :: Either String SPDX.License of - Right _ -> Nothing - Left _ -> Just "\nThe license must be a valid SPDX expression." - | otherwise = if any (not . isAlphaNum) t - then Just promptInvalidOtherLicenseMsg - else Nothing - checkLicenseInvalid _ = Nothing - - promptInvalidOtherLicenseMsg = "\nThe license must be alphanumeric. " ++ - "If your license name has many words, " ++ - "the convention is to use camel case (e.g. PublicDomain). " ++ - "Please choose a different license." - - listedLicenses = - knownLicenses \\ [GPL Nothing, LGPL Nothing, AGPL Nothing - , Apache Nothing, OtherLicense] - - needSpdx = maybe False (>= mkVersion [2,2]) $ flagToMaybe (cabalVersion flags) - --- | The author's name and email. Prompt, or try to guess from an existing --- darcs repo. -getAuthorInfo :: InitFlags -> IO InitFlags -getAuthorInfo flags = do - (authorName, authorEmail) <- - (flagToMaybe *** flagToMaybe) `fmap` guessAuthorNameMail - authorName' <- return (flagToMaybe $ author flags) - ?>> maybePrompt flags (promptStr "Author name" authorName) - ?>> return authorName - - authorEmail' <- return (flagToMaybe $ email flags) - ?>> maybePrompt flags (promptStr "Maintainer email" authorEmail) - ?>> return authorEmail - - return $ flags { author = maybeToFlag authorName' - , email = maybeToFlag authorEmail' - } - --- | Prompt for a homepage URL. -getHomepage :: InitFlags -> IO InitFlags -getHomepage flags = do - hp <- queryHomepage - hp' <- return (flagToMaybe $ homepage flags) - ?>> maybePrompt flags (promptStr "Project homepage URL" hp) - ?>> return hp - - return $ flags { homepage = maybeToFlag hp' } - --- | Right now this does nothing, but it could be changed to do some --- intelligent guessing. -queryHomepage :: IO (Maybe String) -queryHomepage = return Nothing -- get default remote darcs repo? - --- | Prompt for a project synopsis. -getSynopsis :: InitFlags -> IO InitFlags -getSynopsis flags = do - syn <- return (flagToMaybe $ synopsis flags) - ?>> maybePrompt flags (promptStr "Project synopsis" Nothing) - - return $ flags { synopsis = maybeToFlag syn } - --- | Prompt for a package category. --- Note that it should be possible to do some smarter guessing here too, i.e. --- look at the name of the top level source directory. -getCategory :: InitFlags -> IO InitFlags -getCategory flags = do - cat <- return (flagToMaybe $ category flags) - ?>> fmap join (maybePrompt flags - (promptListOptional "Project category" [Codec ..])) - return $ flags { category = maybeToFlag cat } - --- | Try to guess extra source files (don't prompt the user). -getExtraSourceFiles :: InitFlags -> IO InitFlags -getExtraSourceFiles flags = do - extraSrcFiles <- return (extraSrc flags) - ?>> Just `fmap` guessExtraSourceFiles flags - - return $ flags { extraSrc = extraSrcFiles } - -defaultChangeLog :: FilePath -defaultChangeLog = "CHANGELOG.md" - --- | Try to guess things to include in the extra-source-files field. --- For now, we just look for things in the root directory named --- 'readme', 'changes', or 'changelog', with any sort of --- capitalization and any extension. -guessExtraSourceFiles :: InitFlags -> IO [FilePath] -guessExtraSourceFiles flags = do - dir <- - maybe getCurrentDirectory return . flagToMaybe $ packageDir flags - files <- getDirectoryContents dir - let extraFiles = filter isExtra files - if any isLikeChangeLog extraFiles - then return extraFiles - else return (defaultChangeLog : extraFiles) - - where - isExtra = likeFileNameBase ("README" : changeLogLikeBases) - isLikeChangeLog = likeFileNameBase changeLogLikeBases - likeFileNameBase candidates = (`elem` candidates) . map toUpper . takeBaseName - changeLogLikeBases = ["CHANGES", "CHANGELOG"] - --- | Ask whether the project builds a library or executable. -getLibOrExec :: InitFlags -> IO InitFlags -getLibOrExec flags = do - pkgType <- return (flagToMaybe $ packageType flags) - ?>> maybePrompt flags (either (const Library) id `fmap` - promptList "What does the package build" - [Library, Executable, LibraryAndExecutable] - Nothing displayPackageType False) - ?>> return (Just Library) - mainFile <- if pkgType == Just Library then return Nothing else - getMainFile flags - - return $ flags { packageType = maybeToFlag pkgType - , mainIs = maybeToFlag mainFile - } - --- | Try to guess the main file of the executable, and prompt the user to choose --- one of them. Top-level modules including the word 'Main' in the file name --- will be candidates, and shorter filenames will be preferred. -getMainFile :: InitFlags -> IO (Maybe FilePath) -getMainFile flags = - return (flagToMaybe $ mainIs flags) - ?>> do - candidates <- guessMainFileCandidates flags - let showCandidate = either (++" (does not yet exist, but will be created)") id - defaultFile = listToMaybe candidates - maybePrompt flags (either id (either id id) `fmap` - promptList "What is the main module of the executable" - candidates - defaultFile showCandidate True) - ?>> return (fmap (either id id) defaultFile) - --- | Ask for the base language of the package. -getLanguage :: InitFlags -> IO InitFlags -getLanguage flags = do - lang <- return (flagToMaybe $ language flags) - ?>> maybePrompt flags - (either UnknownLanguage id `fmap` - promptList "What base language is the package written in" - [Haskell2010, Haskell98] - (Just Haskell2010) display True) - ?>> return (Just Haskell2010) - - if invalidLanguage lang - then putStrLn invalidOtherLanguageMsg >> getLanguage flags - else return $ flags { language = maybeToFlag lang } - - where - invalidLanguage (Just (UnknownLanguage t)) = any (not . isAlphaNum) t - invalidLanguage _ = False - - invalidOtherLanguageMsg = "\nThe language must be alphanumeric. " ++ - "Please enter a different language." - --- | Ask whether to generate explanatory comments. -getGenComments :: InitFlags -> IO InitFlags -getGenComments flags = do - genComments <- return (not <$> flagToMaybe (noComments flags)) - ?>> maybePrompt flags (promptYesNo promptMsg (Just False)) - ?>> return (Just False) - return $ flags { noComments = maybeToFlag (fmap not genComments) } - where - promptMsg = "Add informative comments to each field in the cabal file (y/n)" - --- | Ask for the source root directory. -getSrcDir :: InitFlags -> IO InitFlags -getSrcDir flags = do - srcDirs <- return (sourceDirs flags) - ?>> fmap (:[]) `fmap` guessSourceDir flags - ?>> fmap (>>= fmap ((:[]) . either id id)) (maybePrompt - flags - (promptListOptional' "Source directory" ["src"] id)) - - return $ flags { sourceDirs = srcDirs } - --- | Try to guess source directory. Could try harder; for the --- moment just looks to see whether there is a directory called 'src'. -guessSourceDir :: InitFlags -> IO (Maybe String) -guessSourceDir flags = do - dir <- - maybe getCurrentDirectory return . flagToMaybe $ packageDir flags - srcIsDir <- doesDirectoryExist (dir "src") - return $ if srcIsDir - then Just "src" - else Nothing - --- | Check whether a potential source file is located in one of the --- source directories. -isSourceFile :: Maybe [FilePath] -> SourceFileEntry -> Bool -isSourceFile Nothing sf = isSourceFile (Just ["."]) sf -isSourceFile (Just srcDirs) sf = any (equalFilePath (relativeSourcePath sf)) srcDirs - --- | Get the list of exposed modules and extra tools needed to build them. -getModulesBuildToolsAndDeps :: InstalledPackageIndex -> InitFlags -> IO InitFlags -getModulesBuildToolsAndDeps pkgIx flags = do - dir <- maybe getCurrentDirectory return . flagToMaybe $ packageDir flags - - sourceFiles0 <- scanForModules dir - - let sourceFiles = filter (isSourceFile (sourceDirs flags)) sourceFiles0 - - Just mods <- return (exposedModules flags) - ?>> (return . Just . map moduleName $ sourceFiles) - - tools <- return (buildTools flags) - ?>> (return . Just . neededBuildPrograms $ sourceFiles) - - deps <- return (dependencies flags) - ?>> Just <$> importsToDeps flags - (fromString "Prelude" : -- to ensure we get base as a dep - ( nub -- only need to consider each imported package once - . filter (`notElem` mods) -- don't consider modules from - -- this package itself - . concatMap imports - $ sourceFiles - ) - ) - pkgIx - - exts <- return (otherExts flags) - ?>> (return . Just . nub . concatMap extensions $ sourceFiles) - - return $ flags { exposedModules = Just mods - , buildTools = tools - , dependencies = deps - , otherExts = exts - } - -importsToDeps :: InitFlags -> [ModuleName] -> InstalledPackageIndex -> IO [P.Dependency] -importsToDeps flags mods pkgIx = do - - let modMap :: M.Map ModuleName [InstalledPackageInfo] - modMap = M.map (filter exposed) $ moduleNameIndex pkgIx - - modDeps :: [(ModuleName, Maybe [InstalledPackageInfo])] - modDeps = map (id &&& flip M.lookup modMap) mods - - message flags "\nGuessing dependencies..." - nub . catMaybes <$> mapM (chooseDep flags) modDeps - --- Given a module and a list of installed packages providing it, --- choose a dependency (i.e. package + version range) to use for that --- module. -chooseDep :: InitFlags -> (ModuleName, Maybe [InstalledPackageInfo]) - -> IO (Maybe P.Dependency) - -chooseDep flags (m, Nothing) - = message flags ("\nWarning: no package found providing " ++ display m ++ ".") - >> return Nothing - -chooseDep flags (m, Just []) - = message flags ("\nWarning: no package found providing " ++ display m ++ ".") - >> return Nothing - - -- We found some packages: group them by name. -chooseDep flags (m, Just ps) - = case pkgGroups of - -- if there's only one group, i.e. multiple versions of a single package, - -- we make it into a dependency, choosing the latest-ish version (see toDep). - [grp] -> Just <$> toDep grp - -- otherwise, we refuse to choose between different packages and make the user - -- do it. - grps -> do message flags ("\nWarning: multiple packages found providing " - ++ display m - ++ ": " ++ intercalate ", " (map (display . P.pkgName . head) grps)) - message flags "You will need to pick one and manually add it to the Build-depends: field." - return Nothing - where - pkgGroups = groupBy ((==) `on` P.pkgName) (map P.packageId ps) - - desugar = maybe True (< mkVersion [2]) $ flagToMaybe (cabalVersion flags) - - -- Given a list of available versions of the same package, pick a dependency. - toDep :: [P.PackageIdentifier] -> IO P.Dependency - - -- If only one version, easy. We change e.g. 0.4.2 into 0.4.* - toDep [pid] = return $ P.Dependency (P.pkgName pid) (pvpize desugar . P.pkgVersion $ pid) - - -- Otherwise, choose the latest version and issue a warning. - toDep pids = do - message flags ("\nWarning: multiple versions of " ++ display (P.pkgName . head $ pids) ++ " provide " ++ display m ++ ", choosing the latest.") - return $ P.Dependency (P.pkgName . head $ pids) - (pvpize desugar . maximum . map P.pkgVersion $ pids) - --- | Given a version, return an API-compatible (according to PVP) version range. --- --- If the boolean argument denotes whether to use a desugared --- representation (if 'True') or the new-style @^>=@-form (if --- 'False'). --- --- Example: @pvpize True (mkVersion [0,4,1])@ produces the version range @>= 0.4 && < 0.5@ (which is the --- same as @0.4.*@). -pvpize :: Bool -> Version -> VersionRange -pvpize False v = majorBoundVersion v -pvpize True v = orLaterVersion v' - `intersectVersionRanges` - earlierVersion (incVersion 1 v') - where v' = alterVersion (take 2) v - --- | Increment the nth version component (counting from 0). -incVersion :: Int -> Version -> Version -incVersion n = alterVersion (incVersion' n) - where - incVersion' 0 [] = [1] - incVersion' 0 (v:_) = [v+1] - incVersion' m [] = replicate m 0 ++ [1] - incVersion' m (v:vs) = v : incVersion' (m-1) vs - ---------------------------------------------------------------------------- --- Prompting/user interaction ------------------------------------------- ---------------------------------------------------------------------------- - --- | Run a prompt or not based on the nonInteractive flag of the --- InitFlags structure. -maybePrompt :: InitFlags -> IO t -> IO (Maybe t) -maybePrompt flags p = - case nonInteractive flags of - Flag True -> return Nothing - _ -> Just `fmap` p - --- | Create a prompt with optional default value that returns a --- String. -promptStr :: String -> Maybe String -> IO String -promptStr = promptDefault' Just id - --- | Create a yes/no prompt with optional default value. --- -promptYesNo :: String -> Maybe Bool -> IO Bool -promptYesNo = - promptDefault' recogniseYesNo showYesNo - where - recogniseYesNo s | s == "y" || s == "Y" = Just True - | s == "n" || s == "N" = Just False - | otherwise = Nothing - showYesNo True = "y" - showYesNo False = "n" - --- | Create a prompt with optional default value that returns a value --- of some Text instance. -prompt :: Text t => String -> Maybe t -> IO t -prompt = promptDefault' - (either (const Nothing) Just . runReadE (readP_to_E id parse)) - display - --- | Create a prompt with an optional default value. -promptDefault' :: (String -> Maybe t) -- ^ parser - -> (t -> String) -- ^ pretty-printer - -> String -- ^ prompt message - -> Maybe t -- ^ optional default value - -> IO t -promptDefault' parser pretty pr def = do - putStr $ mkDefPrompt pr (pretty `fmap` def) - inp <- getLine - case (inp, def) of - ("", Just d) -> return d - _ -> case parser inp of - Just t -> return t - Nothing -> do putStrLn $ "Couldn't parse " ++ inp ++ ", please try again!" - promptDefault' parser pretty pr def - --- | Create a prompt from a prompt string and a String representation --- of an optional default value. -mkDefPrompt :: String -> Maybe String -> String -mkDefPrompt pr def = pr ++ "?" ++ defStr def - where defStr Nothing = " " - defStr (Just s) = " [default: " ++ s ++ "] " - -promptListOptional :: (Text t, Eq t) - => String -- ^ prompt - -> [t] -- ^ choices - -> IO (Maybe (Either String t)) -promptListOptional pr choices = promptListOptional' pr choices display - -promptListOptional' :: Eq t - => String -- ^ prompt - -> [t] -- ^ choices - -> (t -> String) -- ^ show an item - -> IO (Maybe (Either String t)) -promptListOptional' pr choices displayItem = - fmap rearrange - $ promptList pr (Nothing : map Just choices) (Just Nothing) - (maybe "(none)" displayItem) True - where - rearrange = either (Just . Left) (fmap Right) - --- | Create a prompt from a list of items. -promptList :: Eq t - => String -- ^ prompt - -> [t] -- ^ choices - -> Maybe t -- ^ optional default value - -> (t -> String) -- ^ show an item - -> Bool -- ^ whether to allow an 'other' option - -> IO (Either String t) -promptList pr choices def displayItem other = do - putStrLn $ pr ++ ":" - let options1 = map (\c -> (Just c == def, displayItem c)) choices - options2 = zip ([1..]::[Int]) - (options1 ++ [(False, "Other (specify)") | other]) - mapM_ (putStrLn . \(n,(i,s)) -> showOption n i ++ s) options2 - promptList' displayItem (length options2) choices def other - where showOption n i | n < 10 = " " ++ star i ++ " " ++ rest - | otherwise = " " ++ star i ++ rest - where rest = show n ++ ") " - star True = "*" - star False = " " - -promptList' :: (t -> String) -> Int -> [t] -> Maybe t -> Bool -> IO (Either String t) -promptList' displayItem numChoices choices def other = do - putStr $ mkDefPrompt "Your choice" (displayItem `fmap` def) - inp <- getLine - case (inp, def) of - ("", Just d) -> return $ Right d - _ -> case readMaybe inp of - Nothing -> invalidChoice inp - Just n -> getChoice n - where invalidChoice inp = do putStrLn $ inp ++ " is not a valid choice." - promptList' displayItem numChoices choices def other - getChoice n | n < 1 || n > numChoices = invalidChoice (show n) - | n < numChoices || - (n == numChoices && not other) - = return . Right $ choices !! (n-1) - | otherwise = Left `fmap` promptStr "Please specify" Nothing - ---------------------------------------------------------------------------- --- File generation ------------------------------------------------------ ---------------------------------------------------------------------------- - -writeLicense :: InitFlags -> IO () -writeLicense flags = do - message flags "\nGenerating LICENSE..." - year <- show <$> getYear - let authors = fromMaybe "???" . flagToMaybe . author $ flags - let licenseFile = - case license flags of - Flag BSD2 - -> Just $ bsd2 authors year - - Flag BSD3 - -> Just $ bsd3 authors year - - Flag (GPL (Just v)) | v == mkVersion [2] - -> Just gplv2 - - Flag (GPL (Just v)) | v == mkVersion [3] - -> Just gplv3 - - Flag (LGPL (Just v)) | v == mkVersion [2,1] - -> Just lgpl21 - - Flag (LGPL (Just v)) | v == mkVersion [3] - -> Just lgpl3 - - Flag (AGPL (Just v)) | v == mkVersion [3] - -> Just agplv3 - - Flag (Apache (Just v)) | v == mkVersion [2,0] - -> Just apache20 - - Flag MIT - -> Just $ mit authors year - - Flag (MPL v) | v == mkVersion [2,0] - -> Just mpl20 - - Flag ISC - -> Just $ isc authors year - - _ -> Nothing - - case licenseFile of - Just licenseText -> writeFileSafe flags "LICENSE" licenseText - Nothing -> message flags "Warning: unknown license type, you must put a copy in LICENSE yourself." - -getYear :: IO Integer -getYear = do - u <- getCurrentTime - z <- getCurrentTimeZone - let l = utcToLocalTime z u - (y, _, _) = toGregorian $ localDay l - return y - -writeSetupFile :: InitFlags -> IO () -writeSetupFile flags = do - message flags "Generating Setup.hs..." - writeFileSafe flags "Setup.hs" setupFile - where - setupFile = unlines - [ "import Distribution.Simple" - , "main = defaultMain" - ] - -writeChangeLog :: InitFlags -> IO () -writeChangeLog flags = when ((defaultChangeLog `elem`) $ fromMaybe [] (extraSrc flags)) $ do - message flags ("Generating "++ defaultChangeLog ++"...") - writeFileSafe flags defaultChangeLog changeLog - where - changeLog = unlines - [ "# Revision history for " ++ pname - , "" - , "## " ++ pver ++ " -- YYYY-mm-dd" - , "" - , "* First version. Released on an unsuspecting world." - ] - pname = maybe "" display $ flagToMaybe $ packageName flags - pver = maybe "" display $ flagToMaybe $ version flags - - - -writeCabalFile :: InitFlags -> IO Bool -writeCabalFile flags@(InitFlags{packageName = NoFlag}) = do - message flags "Error: no package name provided." - return False -writeCabalFile flags@(InitFlags{packageName = Flag p}) = do - let cabalFileName = display p ++ ".cabal" - message flags $ "Generating " ++ cabalFileName ++ "..." - writeFileSafe flags cabalFileName (generateCabalFile cabalFileName flags) - return True - --- | Write a file \"safely\", backing up any existing version (unless --- the overwrite flag is set). -writeFileSafe :: InitFlags -> FilePath -> String -> IO () -writeFileSafe flags fileName content = do - moveExistingFile flags fileName - writeFile fileName content - --- | Create source directories, if they were given. -createSourceDirectories :: InitFlags -> IO () -createSourceDirectories flags = case sourceDirs flags of - Just dirs -> forM_ dirs (createDirectoryIfMissing True) - Nothing -> return () - --- | Create Main.hs, but only if we are init'ing an executable and --- the mainIs flag has been provided. -createMainHs :: InitFlags -> IO () -createMainHs flags = - if hasMainHs flags then - case sourceDirs flags of - Just (srcPath:_) -> writeMainHs flags (srcPath mainFile) - _ -> writeMainHs flags mainFile - else return () - where - Flag mainFile = mainIs flags - ---- | Write a main file if it doesn't already exist. -writeMainHs :: InitFlags -> FilePath -> IO () -writeMainHs flags mainPath = do - dir <- maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) - let mainFullPath = dir mainPath - exists <- doesFileExist mainFullPath - unless exists $ do - message flags $ "Generating " ++ mainPath ++ "..." - writeFileSafe flags mainFullPath mainHs - --- | Check that a main file exists. -hasMainHs :: InitFlags -> Bool -hasMainHs flags = case mainIs flags of - Flag _ -> (packageType flags == Flag Executable - || packageType flags == Flag LibraryAndExecutable) - _ -> False - --- | Default Main.hs file. Used when no Main.hs exists. -mainHs :: String -mainHs = unlines - [ "module Main where" - , "" - , "main :: IO ()" - , "main = putStrLn \"Hello, Haskell!\"" - ] - --- | Move an existing file, if there is one, and the overwrite flag is --- not set. -moveExistingFile :: InitFlags -> FilePath -> IO () -moveExistingFile flags fileName = - unless (overwrite flags == Flag True) $ do - e <- doesFileExist fileName - when e $ do - newName <- findNewName fileName - message flags $ "Warning: " ++ fileName ++ " already exists, backing up old version in " ++ newName - copyFile fileName newName - -findNewName :: FilePath -> IO FilePath -findNewName oldName = findNewName' 0 - where - findNewName' :: Integer -> IO FilePath - findNewName' n = do - let newName = oldName <.> ("save" ++ show n) - e <- doesFileExist newName - if e then findNewName' (n+1) else return newName - --- | Generate a .cabal file from an InitFlags structure. NOTE: this --- is rather ad-hoc! What we would REALLY like is to have a --- standard low-level AST type representing .cabal files, which --- preserves things like comments, and to write an *inverse* --- parser/pretty-printer pair between .cabal files and this AST. --- Then instead of this ad-hoc code we could just map an InitFlags --- structure onto a low-level AST structure and use the existing --- pretty-printing code to generate the file. -generateCabalFile :: String -> InitFlags -> String -generateCabalFile fileName c = trimTrailingWS $ - (++ "\n") . - renderStyle style { lineLength = 79, ribbonsPerLine = 1.1 } $ - -- Starting with 2.2 the `cabal-version` field needs to be the first line of the PD - (if specVer < mkVersion [1,12] - then field "cabal-version" (Flag $ orLaterVersion specVer) -- legacy - else field "cabal-version" (Flag $ specVer)) - Nothing -- NB: the first line must be the 'cabal-version' declaration - False - $$ - (if minimal c /= Flag True - then showComment (Just $ "Initial package description '" ++ fileName ++ "' generated " - ++ "by 'cabal init'. For further documentation, see " - ++ "http://haskell.org/cabal/users-guide/") - $$ text "" - else empty) - $$ - vcat [ field "name" (packageName c) - (Just "The name of the package.") - True - - , field "version" (version c) - (Just $ "The package version. See the Haskell package versioning policy (PVP) for standards guiding when and how versions should be incremented.\nhttps://pvp.haskell.org\n" - ++ "PVP summary: +-+------- breaking API changes\n" - ++ " | | +----- non-breaking API additions\n" - ++ " | | | +--- code changes with no API change") - True - - , fieldS "synopsis" (synopsis c) - (Just "A short (one-line) description of the package.") - True - - , fieldS "description" NoFlag - (Just "A longer description of the package.") - True - - , fieldS "homepage" (homepage c) - (Just "URL for the project homepage or repository.") - False - - , fieldS "bug-reports" NoFlag - (Just "A URL where users can report bugs.") - True - - , fieldS "license" licenseStr - (Just "The license under which the package is released.") - True - - , case (license c) of - Flag PublicDomain -> empty - _ -> fieldS "license-file" (Flag "LICENSE") - (Just "The file containing the license text.") - True - - , fieldS "author" (author c) - (Just "The package author(s).") - True - - , fieldS "maintainer" (email c) - (Just "An email address to which users can send suggestions, bug reports, and patches.") - True - - , case (license c) of - Flag PublicDomain -> empty - _ -> fieldS "copyright" NoFlag - (Just "A copyright notice.") - True - - , fieldS "category" (either id display `fmap` category c) - Nothing - True - - , fieldS "build-type" (if specVer >= mkVersion [2,2] then NoFlag else Flag "Simple") - Nothing - False - - , fieldS "extra-source-files" (listFieldS (extraSrc c)) - (Just "Extra files to be distributed with the package, such as examples or a README.") - True - - , case packageType c of - Flag Executable -> executableStanza - Flag Library -> libraryStanza - Flag LibraryAndExecutable -> libraryStanza $+$ executableStanza - _ -> empty - ] - where - specVer = fromMaybe defaultCabalVersion $ flagToMaybe (cabalVersion c) - - licenseStr | specVer < mkVersion [2,2] = prettyShow `fmap` license c - | otherwise = go `fmap` license c - where - go (UnknownLicense s) = s - go l = prettyShow (licenseToSPDX l) - - generateBuildInfo :: BuildType -> InitFlags -> Doc - generateBuildInfo buildType c' = vcat - [ fieldS "other-modules" (listField (otherModules c')) - (Just $ case buildType of - LibBuild -> "Modules included in this library but not exported." - ExecBuild -> "Modules included in this executable, other than Main.") - True - - , fieldS "other-extensions" (listField (otherExts c')) - (Just "LANGUAGE extensions used by modules in this package.") - True - - , fieldS "build-depends" (listField (dependencies c')) - (Just "Other library packages from which modules are imported.") - True - - , fieldS "hs-source-dirs" (listFieldS (sourceDirs c')) - (Just "Directories containing source files.") - True - - , fieldS "build-tools" (listFieldS (buildTools c')) - (Just "Extra tools (e.g. alex, hsc2hs, ...) needed to build the source.") - False - - , field "default-language" (language c') - (Just "Base language which the package is written in.") - True - ] - - listField :: Text s => Maybe [s] -> Flag String - listField = listFieldS . fmap (map display) - - listFieldS :: Maybe [String] -> Flag String - listFieldS = Flag . maybe "" (intercalate ", ") - - field :: Text t => String -> Flag t -> Maybe String -> Bool -> Doc - field s f = fieldS s (fmap display f) - - fieldS :: String -- ^ Name of the field - -> Flag String -- ^ Field contents - -> Maybe String -- ^ Comment to explain the field - -> Bool -- ^ Should the field be included (commented out) even if blank? - -> Doc - fieldS _ NoFlag _ inc | not inc || (minimal c == Flag True) = empty - fieldS _ (Flag "") _ inc | not inc || (minimal c == Flag True) = empty - fieldS s f com _ = case (isJust com, noComments c, minimal c) of - (_, _, Flag True) -> id - (_, Flag True, _) -> id - (True, _, _) -> (showComment com $$) . ($$ text "") - (False, _, _) -> ($$ text "") - $ - comment f <<>> text s <<>> colon - <<>> text (replicate (20 - length s) ' ') - <<>> text (fromMaybe "" . flagToMaybe $ f) - comment NoFlag = text "-- " - comment (Flag "") = text "-- " - comment _ = text "" - - showComment :: Maybe String -> Doc - showComment (Just t) = vcat - . map (text . ("-- "++)) . lines - . renderStyle style { - lineLength = 76, - ribbonsPerLine = 1.05 - } - . vcat - . map (fcat . map text . breakLine) - . lines - $ t - showComment Nothing = text "" - - breakLine [] = [] - breakLine cs = case break (==' ') cs of (w,cs') -> w : breakLine' cs' - breakLine' [] = [] - breakLine' cs = case span (==' ') cs of (w,cs') -> w : breakLine cs' - - trimTrailingWS :: String -> String - trimTrailingWS = unlines . map (dropWhileEndLE isSpace) . lines - - executableStanza :: Doc - executableStanza = text "\nexecutable" <+> - text (maybe "" display . flagToMaybe $ packageName c) $$ - nest 2 (vcat - [ fieldS "main-is" (mainIs c) (Just ".hs or .lhs file containing the Main module.") True - - , generateBuildInfo ExecBuild c - ]) - - libraryStanza :: Doc - libraryStanza = text "\nlibrary" $$ nest 2 (vcat - [ fieldS "exposed-modules" (listField (exposedModules c)) - (Just "Modules exported by the library.") - True - - , generateBuildInfo LibBuild c - ]) - - --- | Generate warnings for missing fields etc. -generateWarnings :: InitFlags -> IO () -generateWarnings flags = do - message flags "" - when (synopsis flags `elem` [NoFlag, Flag ""]) - (message flags "Warning: no synopsis given. You should edit the .cabal file and add one.") - - message flags "You may want to edit the .cabal file and add a Description field." - --- | Possibly generate a message to stdout, taking into account the --- --quiet flag. -message :: InitFlags -> String -> IO () -message (InitFlags{quiet = Flag True}) _ = return () -message _ s = putStrLn s diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Install.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Install.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Install.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Install.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1621 +0,0 @@ -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Install --- Copyright : (c) 2005 David Himmelstrup --- 2007 Bjorn Bringert --- 2007-2010 Duncan Coutts --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- High level interface to package installation. ------------------------------------------------------------------------------ -module Distribution.Client.Install ( - -- * High-level interface - install, - - -- * Lower-level interface that allows to manipulate the install plan - makeInstallContext, - makeInstallPlan, - processInstallPlan, - InstallArgs, - InstallContext, - - -- * Prune certain packages from the install plan - pruneInstallPlan - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import qualified Data.Map as Map -import qualified Data.Set as S -import Control.Exception as Exception - ( Exception(toException), bracket, catches - , Handler(Handler), handleJust, IOException, SomeException ) -#ifndef mingw32_HOST_OS -import Control.Exception as Exception - ( Exception(fromException) ) -#endif -import System.Exit - ( ExitCode(..) ) -import Distribution.Compat.Exception - ( catchIO, catchExit ) -import Control.Monad - ( forM_, mapM ) -import System.Directory - ( getTemporaryDirectory, doesDirectoryExist, doesFileExist, - createDirectoryIfMissing, removeFile, renameDirectory, - getDirectoryContents ) -import System.FilePath - ( (), (<.>), equalFilePath, takeDirectory ) -import System.IO - ( openFile, IOMode(AppendMode), hClose ) -import System.IO.Error - ( isDoesNotExistError, ioeGetFileName ) - -import Distribution.Client.Targets -import Distribution.Client.Configure - ( chooseCabalVersion, configureSetupScript, checkConfigExFlags ) -import Distribution.Client.Dependency -import Distribution.Client.Dependency.Types - ( Solver(..) ) -import Distribution.Client.FetchUtils -import Distribution.Client.HttpUtils - ( HttpTransport (..) ) -import Distribution.Solver.Types.PackageFixedDeps -import qualified Distribution.Client.Haddock as Haddock (regenerateHaddockIndex) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackagesAtIndexState, getInstalledPackages ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan -import Distribution.Client.InstallPlan (InstallPlan) -import Distribution.Client.SolverInstallPlan (SolverInstallPlan) -import Distribution.Client.Setup - ( GlobalFlags(..), RepoContext(..) - , ConfigFlags(..), configureCommand, filterConfigureFlags - , ConfigExFlags(..), InstallFlags(..) ) -import Distribution.Client.Config - ( getCabalDir, defaultUserInstall ) -import Distribution.Client.Sandbox.Timestamp - ( withUpdateTimestamps ) -import Distribution.Client.Sandbox.Types - ( SandboxPackageInfo(..), UseSandbox(..), isUseSandbox - , whenUsingSandbox ) -import Distribution.Client.Tar (extractTarGzFile) -import Distribution.Client.Types as Source -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) -import Distribution.Client.SetupWrapper - ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) -import qualified Distribution.Client.BuildReports.Anonymous as BuildReports -import qualified Distribution.Client.BuildReports.Storage as BuildReports - ( storeAnonymous, storeLocal, fromInstallPlan, fromPlanningFailure ) -import qualified Distribution.Client.InstallSymlink as InstallSymlink - ( symlinkBinaries ) -import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade -import qualified Distribution.Client.World as World -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Client.JobControl - -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import qualified Distribution.Solver.Types.PackageIndex as SourcePackageIndex -import Distribution.Solver.Types.PkgConfigDb - ( PkgConfigDb, readPkgConfigDb ) -import Distribution.Solver.Types.SourcePackage as SourcePackage - -import Distribution.Utils.NubList -import Distribution.Simple.Compiler - ( CompilerId(..), Compiler(compilerId), compilerFlavor - , CompilerInfo(..), compilerInfo, PackageDB(..), PackageDBStack ) -import Distribution.Simple.Program (ProgramDb) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.Simple.Setup - ( haddockCommand, HaddockFlags(..) - , buildCommand, BuildFlags(..), emptyBuildFlags - , toFlag, fromFlag, fromFlagOrDefault, flagToMaybe, defaultDistPref ) -import qualified Distribution.Simple.Setup as Cabal - ( Flag(..) - , copyCommand, CopyFlags(..), emptyCopyFlags - , registerCommand, RegisterFlags(..), emptyRegisterFlags - , testCommand, TestFlags(..), emptyTestFlags ) -import Distribution.Simple.Utils - ( createDirectoryIfMissingVerbose, comparing - , writeFileAtomic, withUTF8FileContents ) -import Distribution.Simple.InstallDirs as InstallDirs - ( PathTemplate, fromPathTemplate, toPathTemplate, substPathTemplate - , initialPathTemplateEnv, installDirsTemplateEnv ) -import Distribution.Simple.Configure (interpretPackageDbFlags) -import Distribution.Simple.Register (registerPackage, defaultRegisterOptions) -import Distribution.Package - ( PackageIdentifier(..), PackageId, packageName, packageVersion - , Package(..), HasMungedPackageId(..), HasUnitId(..) - , UnitId ) -import Distribution.Types.Dependency - ( Dependency(..), thisPackageVersion ) -import Distribution.Types.MungedPackageId -import qualified Distribution.PackageDescription as PackageDescription -import Distribution.PackageDescription - ( PackageDescription, GenericPackageDescription(..), Flag(..) - , FlagAssignment, mkFlagAssignment, unFlagAssignment - , showFlagValue, diffFlagAssignment, nullFlagAssignment ) -import Distribution.PackageDescription.Configuration - ( finalizePD ) -import Distribution.ParseUtils - ( showPWarning ) -import Distribution.Version - ( Version, VersionRange, foldVersionRange ) -import Distribution.Simple.Utils as Utils - ( notice, info, warn, debug, debugNoWrap, die' - , withTempDirectory ) -import Distribution.Client.Utils - ( determineNumJobs, logDirChange, mergeBy, MergeResult(..) - , tryCanonicalizePath, ProgressPhase(..), progressMessage ) -import Distribution.System - ( Platform, OS(Windows), buildOS, buildPlatform ) -import Distribution.Text - ( display ) -import Distribution.Verbosity as Verbosity - ( Verbosity, modifyVerbosity, normal, verbose ) -import Distribution.Simple.BuildPaths ( exeExtension ) - ---TODO: --- * assign flags to packages individually --- * complain about flags that do not apply to any package given as target --- so flags do not apply to dependencies, only listed, can use flag --- constraints for dependencies --- * only record applicable flags in world file --- * allow flag constraints --- * allow installed constraints --- * allow flag and installed preferences --- * change world file to use cabal section syntax --- * allow persistent configure flags for each package individually - --- ------------------------------------------------------------ --- * Top level user actions --- ------------------------------------------------------------ - --- | Installs the packages needed to satisfy a list of dependencies. --- -install - :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> Platform - -> ProgramDb - -> UseSandbox - -> Maybe SandboxPackageInfo - -> GlobalFlags - -> ConfigFlags - -> ConfigExFlags - -> InstallFlags - -> HaddockFlags - -> [UserTarget] - -> IO () -install verbosity packageDBs repos comp platform progdb useSandbox mSandboxPkgInfo - globalFlags configFlags configExFlags installFlags haddockFlags - userTargets0 = do - - unless (installRootCmd installFlags == Cabal.NoFlag) $ - warn verbosity $ "--root-cmd is no longer supported, " - ++ "see https://github.com/haskell/cabal/issues/3353" - ++ " (if you didn't type --root-cmd, comment out root-cmd" - ++ " in your ~/.cabal/config file)" - let userOrSandbox = fromFlag (configUserInstall configFlags) - || isUseSandbox useSandbox - unless userOrSandbox $ - warn verbosity $ "the --global flag is deprecated -- " - ++ "it is generally considered a bad idea to install packages " - ++ "into the global store" - - installContext <- makeInstallContext verbosity args (Just userTargets0) - planResult <- foldProgress logMsg (return . Left) (return . Right) =<< - makeInstallPlan verbosity args installContext - - case planResult of - Left message -> do - reportPlanningFailure verbosity args installContext message - die'' message - Right installPlan -> - processInstallPlan verbosity args installContext installPlan - where - args :: InstallArgs - args = (packageDBs, repos, comp, platform, progdb, useSandbox, - mSandboxPkgInfo, globalFlags, configFlags, configExFlags, - installFlags, haddockFlags) - - die'' message = die' verbosity (message ++ if isUseSandbox useSandbox - then installFailedInSandbox else []) - -- TODO: use a better error message, remove duplication. - installFailedInSandbox = - "\nNote: when using a sandbox, all packages are required to have " - ++ "consistent dependencies. " - ++ "Try reinstalling/unregistering the offending packages or " - ++ "recreating the sandbox." - logMsg message rest = debugNoWrap verbosity message >> rest - --- TODO: Make InstallContext a proper data type with documented fields. --- | Common context for makeInstallPlan and processInstallPlan. -type InstallContext = ( InstalledPackageIndex, SourcePackageDb - , PkgConfigDb - , [UserTarget], [PackageSpecifier UnresolvedSourcePackage] - , HttpTransport ) - --- TODO: Make InstallArgs a proper data type with documented fields or just get --- rid of it completely. --- | Initial arguments given to 'install' or 'makeInstallContext'. -type InstallArgs = ( PackageDBStack - , RepoContext - , Compiler - , Platform - , ProgramDb - , UseSandbox - , Maybe SandboxPackageInfo - , GlobalFlags - , ConfigFlags - , ConfigExFlags - , InstallFlags - , HaddockFlags ) - --- | Make an install context given install arguments. -makeInstallContext :: Verbosity -> InstallArgs -> Maybe [UserTarget] - -> IO InstallContext -makeInstallContext verbosity - (packageDBs, repoCtxt, comp, _, progdb,_,_, - globalFlags, _, configExFlags, installFlags, _) mUserTargets = do - - let idxState = flagToMaybe (installIndexState installFlags) - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb - sourcePkgDb <- getSourcePackagesAtIndexState verbosity repoCtxt idxState - pkgConfigDb <- readPkgConfigDb verbosity progdb - - checkConfigExFlags verbosity installedPkgIndex - (packageIndex sourcePkgDb) configExFlags - transport <- repoContextGetTransport repoCtxt - - (userTargets, pkgSpecifiers) <- case mUserTargets of - Nothing -> - -- We want to distinguish between the case where the user has given an - -- empty list of targets on the command-line and the case where we - -- specifically want to have an empty list of targets. - return ([], []) - Just userTargets0 -> do - -- For install, if no target is given it means we use the current - -- directory as the single target. - let userTargets | null userTargets0 = [UserTargetLocalDir "."] - | otherwise = userTargets0 - - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - (packageIndex sourcePkgDb) - userTargets - return (userTargets, pkgSpecifiers) - - return (installedPkgIndex, sourcePkgDb, pkgConfigDb, userTargets - ,pkgSpecifiers, transport) - --- | Make an install plan given install context and install arguments. -makeInstallPlan :: Verbosity -> InstallArgs -> InstallContext - -> IO (Progress String String SolverInstallPlan) -makeInstallPlan verbosity - (_, _, comp, platform, _, _, mSandboxPkgInfo, - _, configFlags, configExFlags, installFlags, - _) - (installedPkgIndex, sourcePkgDb, pkgConfigDb, - _, pkgSpecifiers, _) = do - - solver <- chooseSolver verbosity (fromFlag (configSolver configExFlags)) - (compilerInfo comp) - notice verbosity "Resolving dependencies..." - return $ planPackages verbosity comp platform mSandboxPkgInfo solver - configFlags configExFlags installFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers - --- | Given an install plan, perform the actual installations. -processInstallPlan :: Verbosity -> InstallArgs -> InstallContext - -> SolverInstallPlan - -> IO () -processInstallPlan verbosity - args@(_,_, _, _, _, _, _, _, configFlags, _, installFlags, _) - (installedPkgIndex, sourcePkgDb, _, - userTargets, pkgSpecifiers, _) installPlan0 = do - - checkPrintPlan verbosity installedPkgIndex installPlan sourcePkgDb - installFlags pkgSpecifiers - - unless (dryRun || nothingToInstall) $ do - buildOutcomes <- performInstallations verbosity - args installedPkgIndex installPlan - postInstallActions verbosity args userTargets installPlan buildOutcomes - where - installPlan = InstallPlan.configureInstallPlan configFlags installPlan0 - dryRun = fromFlag (installDryRun installFlags) - nothingToInstall = null (fst (InstallPlan.ready installPlan)) - --- ------------------------------------------------------------ --- * Installation planning --- ------------------------------------------------------------ - -planPackages :: Verbosity - -> Compiler - -> Platform - -> Maybe SandboxPackageInfo - -> Solver - -> ConfigFlags - -> ConfigExFlags - -> InstallFlags - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> [PackageSpecifier UnresolvedSourcePackage] - -> Progress String String SolverInstallPlan -planPackages verbosity comp platform mSandboxPkgInfo solver - configFlags configExFlags installFlags - installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = - - resolveDependencies - platform (compilerInfo comp) pkgConfigDb - solver - resolverParams - - >>= if onlyDeps then pruneInstallPlan pkgSpecifiers else return - - where - resolverParams = - - setMaxBackjumps (if maxBackjumps < 0 then Nothing - else Just maxBackjumps) - - . setIndependentGoals independentGoals - - . setReorderGoals reorderGoals - - . setCountConflicts countConflicts - - . setAvoidReinstalls avoidReinstalls - - . setShadowPkgs shadowPkgs - - . setStrongFlags strongFlags - - . setAllowBootLibInstalls allowBootLibInstalls - - . setSolverVerbosity verbosity - - . setPreferenceDefault (if upgradeDeps then PreferAllLatest - else PreferLatestForSelected) - - . removeLowerBounds allowOlder - . removeUpperBounds allowNewer - - . addPreferences - -- preferences from the config file or command line - [ PackageVersionPreference name ver - | Dependency name ver <- configPreferences configExFlags ] - - . addConstraints - -- version constraints from the config file or command line - [ LabeledPackageConstraint (userToPackageConstraint pc) src - | (pc, src) <- configExConstraints configExFlags ] - - . addConstraints - --FIXME: this just applies all flags to all targets which - -- is silly. We should check if the flags are appropriate - [ let pc = PackageConstraint - (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) - (PackagePropertyFlags flags) - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - | let flags = configConfigurationsFlags configFlags - , not (nullFlagAssignment flags) - , pkgSpecifier <- pkgSpecifiers ] - - . addConstraints - [ let pc = PackageConstraint - (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) - (PackagePropertyStanzas stanzas) - in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget - | pkgSpecifier <- pkgSpecifiers ] - - . maybe id applySandboxInstallPolicy mSandboxPkgInfo - - . (if reinstall then reinstallTargets else id) - - -- Don't solve for executables, the legacy install codepath - -- doesn't understand how to install them - . setSolveExecutables (SolveExecutables False) - - $ standardInstallPolicy - installedPkgIndex sourcePkgDb pkgSpecifiers - - stanzas = [ TestStanzas | testsEnabled ] - ++ [ BenchStanzas | benchmarksEnabled ] - testsEnabled = fromFlagOrDefault False $ configTests configFlags - benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags - - reinstall = fromFlag (installOverrideReinstall installFlags) || - fromFlag (installReinstall installFlags) - reorderGoals = fromFlag (installReorderGoals installFlags) - countConflicts = fromFlag (installCountConflicts installFlags) - independentGoals = fromFlag (installIndependentGoals installFlags) - avoidReinstalls = fromFlag (installAvoidReinstalls installFlags) - shadowPkgs = fromFlag (installShadowPkgs installFlags) - strongFlags = fromFlag (installStrongFlags installFlags) - maxBackjumps = fromFlag (installMaxBackjumps installFlags) - allowBootLibInstalls = fromFlag (installAllowBootLibInstalls installFlags) - upgradeDeps = fromFlag (installUpgradeDeps installFlags) - onlyDeps = fromFlag (installOnlyDeps installFlags) - - allowOlder = fromMaybe (AllowOlder mempty) - (configAllowOlder configExFlags) - allowNewer = fromMaybe (AllowNewer mempty) - (configAllowNewer configExFlags) - --- | Remove the provided targets from the install plan. -pruneInstallPlan :: Package targetpkg - => [PackageSpecifier targetpkg] - -> SolverInstallPlan - -> Progress String String SolverInstallPlan -pruneInstallPlan pkgSpecifiers = - -- TODO: this is a general feature and should be moved to D.C.Dependency - -- Also, the InstallPlan.remove should return info more precise to the - -- problem, rather than the very general PlanProblem type. - either (Fail . explain) Done - . SolverInstallPlan.remove (\pkg -> packageName pkg `elem` targetnames) - where - explain :: [SolverInstallPlan.SolverPlanProblem] -> String - explain problems = - "Cannot select only the dependencies (as requested by the " - ++ "'--only-dependencies' flag), " - ++ (case pkgids of - [pkgid] -> "the package " ++ display pkgid ++ " is " - _ -> "the packages " - ++ intercalate ", " (map display pkgids) ++ " are ") - ++ "required by a dependency of one of the other targets." - where - pkgids = - nub [ depid - | SolverInstallPlan.PackageMissingDeps _ depids <- problems - , depid <- depids - , packageName depid `elem` targetnames ] - - targetnames = map pkgSpecifierTarget pkgSpecifiers - --- ------------------------------------------------------------ --- * Informational messages --- ------------------------------------------------------------ - --- | Perform post-solver checks of the install plan and print it if --- either requested or needed. -checkPrintPlan :: Verbosity - -> InstalledPackageIndex - -> InstallPlan - -> SourcePackageDb - -> InstallFlags - -> [PackageSpecifier UnresolvedSourcePackage] - -> IO () -checkPrintPlan verbosity installed installPlan sourcePkgDb - installFlags pkgSpecifiers = do - - -- User targets that are already installed. - let preExistingTargets = - [ p | let tgts = map pkgSpecifierTarget pkgSpecifiers, - InstallPlan.PreExisting p <- InstallPlan.toList installPlan, - packageName p `elem` tgts ] - - -- If there's nothing to install, we print the already existing - -- target packages as an explanation. - when nothingToInstall $ - notice verbosity $ unlines $ - "All the requested packages are already installed:" - : map (display . packageId) preExistingTargets - ++ ["Use --reinstall if you want to reinstall anyway."] - - let lPlan = - [ (pkg, status) - | pkg <- InstallPlan.executionOrder installPlan - , let status = packageStatus installed pkg ] - -- Are any packages classified as reinstalls? - let reinstalledPkgs = - [ ipkg - | (_pkg, status) <- lPlan - , ipkg <- extractReinstalls status ] - -- Packages that are already broken. - let oldBrokenPkgs = - map Installed.installedUnitId - . PackageIndex.reverseDependencyClosure installed - . map (Installed.installedUnitId . fst) - . PackageIndex.brokenPackages - $ installed - let excluded = reinstalledPkgs ++ oldBrokenPkgs - -- Packages that are reverse dependencies of replaced packages are very - -- likely to be broken. We exclude packages that are already broken. - let newBrokenPkgs = - filter (\ p -> not (Installed.installedUnitId p `elem` excluded)) - (PackageIndex.reverseDependencyClosure installed reinstalledPkgs) - let containsReinstalls = not (null reinstalledPkgs) - let breaksPkgs = not (null newBrokenPkgs) - - let adaptedVerbosity - | containsReinstalls - , not overrideReinstall = modifyVerbosity (max verbose) verbosity - | otherwise = verbosity - - -- We print the install plan if we are in a dry-run or if we are confronted - -- with a dangerous install plan. - when (dryRun || containsReinstalls && not overrideReinstall) $ - printPlan (dryRun || breaksPkgs && not overrideReinstall) - adaptedVerbosity lPlan sourcePkgDb - - -- If the install plan is dangerous, we print various warning messages. In - -- particular, if we can see that packages are likely to be broken, we even - -- bail out (unless installation has been forced with --force-reinstalls). - when containsReinstalls $ do - if breaksPkgs - then do - (if dryRun || overrideReinstall then warn else die') verbosity $ unlines $ - "The following packages are likely to be broken by the reinstalls:" - : map (display . mungedId) newBrokenPkgs - ++ if overrideReinstall - then if dryRun then [] else - ["Continuing even though " ++ - "the plan contains dangerous reinstalls."] - else - ["Use --force-reinstalls if you want to install anyway."] - else unless dryRun $ warn verbosity - "Note that reinstalls are always dangerous. Continuing anyway..." - - -- If we are explicitly told to not download anything, check that all packages - -- are already fetched. - let offline = fromFlagOrDefault False (installOfflineMode installFlags) - when offline $ do - let pkgs = [ confPkgSource cpkg - | InstallPlan.Configured cpkg <- InstallPlan.toList installPlan ] - notFetched <- fmap (map packageInfoId) - . filterM (fmap isNothing . checkFetched . packageSource) - $ pkgs - unless (null notFetched) $ - die' verbosity $ "Can't download packages in offline mode. " - ++ "Must download the following packages to proceed:\n" - ++ intercalate ", " (map display notFetched) - ++ "\nTry using 'cabal fetch'." - - where - nothingToInstall = null (fst (InstallPlan.ready installPlan)) - - dryRun = fromFlag (installDryRun installFlags) - overrideReinstall = fromFlag (installOverrideReinstall installFlags) - -data PackageStatus = NewPackage - | NewVersion [Version] - | Reinstall [UnitId] [PackageChange] - -type PackageChange = MergeResult MungedPackageId MungedPackageId - -extractReinstalls :: PackageStatus -> [UnitId] -extractReinstalls (Reinstall ipids _) = ipids -extractReinstalls _ = [] - -packageStatus :: InstalledPackageIndex - -> ReadyPackage - -> PackageStatus -packageStatus installedPkgIndex cpkg = - case PackageIndex.lookupPackageName installedPkgIndex - (packageName cpkg) of - [] -> NewPackage - ps -> case filter ((== mungedId cpkg) - . mungedId) (concatMap snd ps) of - [] -> NewVersion (map fst ps) - pkgs@(pkg:_) -> Reinstall (map Installed.installedUnitId pkgs) - (changes pkg cpkg) - - where - - changes :: Installed.InstalledPackageInfo - -> ReadyPackage - -> [PackageChange] - changes pkg (ReadyPackage pkg') = filter changed $ - mergeBy (comparing mungedName) - -- deps of installed pkg - (resolveInstalledIds $ Installed.depends pkg) - -- deps of configured pkg - (resolveInstalledIds $ CD.nonSetupDeps (depends pkg')) - - -- convert to source pkg ids via index - resolveInstalledIds :: [UnitId] -> [MungedPackageId] - resolveInstalledIds = - nub - . sort - . map mungedId - . mapMaybe (PackageIndex.lookupUnitId installedPkgIndex) - - changed (InBoth pkgid pkgid') = pkgid /= pkgid' - changed _ = True - -printPlan :: Bool -- is dry run - -> Verbosity - -> [(ReadyPackage, PackageStatus)] - -> SourcePackageDb - -> IO () -printPlan dryRun verbosity plan sourcePkgDb = case plan of - [] -> return () - pkgs - | verbosity >= Verbosity.verbose -> notice verbosity $ unlines $ - ("In order, the following " ++ wouldWill ++ " be installed:") - : map showPkgAndReason pkgs - | otherwise -> notice verbosity $ unlines $ - ("In order, the following " ++ wouldWill - ++ " be installed (use -v for more details):") - : map showPkg pkgs - where - wouldWill | dryRun = "would" - | otherwise = "will" - - showPkg (pkg, _) = display (packageId pkg) ++ - showLatest (pkg) - - showPkgAndReason (ReadyPackage pkg', pr) = display (packageId pkg') ++ - showLatest pkg' ++ - showFlagAssignment (nonDefaultFlags pkg') ++ - showStanzas (confPkgStanzas pkg') ++ - showDep pkg' ++ - case pr of - NewPackage -> " (new package)" - NewVersion _ -> " (new version)" - Reinstall _ cs -> " (reinstall)" ++ case cs of - [] -> "" - diff -> " (changes: " ++ intercalate ", " (map change diff) - ++ ")" - - showLatest :: Package srcpkg => srcpkg -> String - showLatest pkg = case mLatestVersion of - Just latestVersion -> - if packageVersion pkg < latestVersion - then (" (latest: " ++ display latestVersion ++ ")") - else "" - Nothing -> "" - where - mLatestVersion :: Maybe Version - mLatestVersion = case SourcePackageIndex.lookupPackageName - (packageIndex sourcePkgDb) - (packageName pkg) of - [] -> Nothing - x -> Just $ packageVersion $ last x - - toFlagAssignment :: [Flag] -> FlagAssignment - toFlagAssignment = mkFlagAssignment . map (\ f -> (flagName f, flagDefault f)) - - nonDefaultFlags :: ConfiguredPackage loc -> FlagAssignment - nonDefaultFlags cpkg = - let defaultAssignment = - toFlagAssignment - (genPackageFlags (SourcePackage.packageDescription $ - confPkgSource cpkg)) - in confPkgFlags cpkg `diffFlagAssignment` defaultAssignment - - showStanzas :: [OptionalStanza] -> String - showStanzas = concatMap ((" *" ++) . showStanza) - - showFlagAssignment :: FlagAssignment -> String - showFlagAssignment = concatMap ((' ' :) . showFlagValue) . unFlagAssignment - - change (OnlyInLeft pkgid) = display pkgid ++ " removed" - change (InBoth pkgid pkgid') = display pkgid ++ " -> " - ++ display (mungedVersion pkgid') - change (OnlyInRight pkgid') = display pkgid' ++ " added" - - showDep pkg | Just rdeps <- Map.lookup (packageId pkg) revDeps - = " (via: " ++ unwords (map display rdeps) ++ ")" - | otherwise = "" - - revDepGraphEdges :: [(PackageId, PackageId)] - revDepGraphEdges = [ (rpid, packageId cpkg) - | (ReadyPackage cpkg, _) <- plan - , ConfiguredId rpid (Just PackageDescription.CLibName) _ - <- CD.flatDeps (confPkgDeps cpkg) ] - - revDeps :: Map.Map PackageId [PackageId] - revDeps = Map.fromListWith (++) (map (fmap (:[])) revDepGraphEdges) - --- ------------------------------------------------------------ --- * Post installation stuff --- ------------------------------------------------------------ - --- | Report a solver failure. This works slightly differently to --- 'postInstallActions', as (by definition) we don't have an install plan. -reportPlanningFailure :: Verbosity -> InstallArgs -> InstallContext -> String - -> IO () -reportPlanningFailure verbosity - (_, _, comp, platform, _, _, _ - ,_, configFlags, _, installFlags, _) - (_, sourcePkgDb, _, _, pkgSpecifiers, _) - message = do - - when reportFailure $ do - - -- Only create reports for explicitly named packages - let pkgids = filter - (SourcePackageIndex.elemByPackageId (packageIndex sourcePkgDb)) $ - mapMaybe theSpecifiedPackage pkgSpecifiers - - buildReports = BuildReports.fromPlanningFailure platform - (compilerId comp) pkgids - (configConfigurationsFlags configFlags) - - unless (null buildReports) $ - info verbosity $ - "Solver failure will be reported for " - ++ intercalate "," (map display pkgids) - - -- Save reports - BuildReports.storeLocal (compilerInfo comp) - (fromNubList $ installSummaryFile installFlags) - buildReports platform - - -- Save solver log - case logFile of - Nothing -> return () - Just template -> forM_ pkgids $ \pkgid -> - let env = initialPathTemplateEnv pkgid dummyIpid - (compilerInfo comp) platform - path = fromPathTemplate $ substPathTemplate env template - in writeFile path message - - where - reportFailure = fromFlag (installReportPlanningFailure installFlags) - logFile = flagToMaybe (installLogFile installFlags) - - -- A IPID is calculated from the transitive closure of - -- dependencies, but when the solver fails we don't have that. - -- So we fail. - dummyIpid = error "reportPlanningFailure: installed package ID not available" - --- | If a 'PackageSpecifier' refers to a single package, return Just that --- package. -theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId -theSpecifiedPackage pkgSpec = - case pkgSpec of - NamedPackage name [PackagePropertyVersion version] - -> PackageIdentifier name <$> trivialRange version - NamedPackage _ _ -> Nothing - SpecificSourcePackage pkg -> Just $ packageId pkg - where - -- | If a range includes only a single version, return Just that version. - trivialRange :: VersionRange -> Maybe Version - trivialRange = foldVersionRange - Nothing - Just -- "== v" - (\_ -> Nothing) - (\_ -> Nothing) - (\_ _ -> Nothing) - (\_ _ -> Nothing) - --- | Various stuff we do after successful or unsuccessfully installing a bunch --- of packages. This includes: --- --- * build reporting, local and remote --- * symlinking binaries --- * updating indexes --- * updating world file --- * error reporting --- -postInstallActions :: Verbosity - -> InstallArgs - -> [UserTarget] - -> InstallPlan - -> BuildOutcomes - -> IO () -postInstallActions verbosity - (packageDBs, _, comp, platform, progdb, useSandbox, mSandboxPkgInfo - ,globalFlags, configFlags, _, installFlags, _) - targets installPlan buildOutcomes = do - - updateSandboxTimestampsFile verbosity useSandbox mSandboxPkgInfo - comp platform installPlan buildOutcomes - - unless oneShot $ - World.insert verbosity worldFile - --FIXME: does not handle flags - [ World.WorldPkgInfo dep mempty - | UserTargetNamed dep <- targets ] - - let buildReports = BuildReports.fromInstallPlan platform (compilerId comp) - installPlan buildOutcomes - BuildReports.storeLocal (compilerInfo comp) - (fromNubList $ installSummaryFile installFlags) - buildReports - platform - when (reportingLevel >= AnonymousReports) $ - BuildReports.storeAnonymous buildReports - when (reportingLevel == DetailedReports) $ - storeDetailedBuildReports verbosity logsDir buildReports - - regenerateHaddockIndex verbosity packageDBs comp platform progdb useSandbox - configFlags installFlags buildOutcomes - - symlinkBinaries verbosity platform comp configFlags installFlags - installPlan buildOutcomes - - printBuildFailures verbosity buildOutcomes - - where - reportingLevel = fromFlag (installBuildReports installFlags) - logsDir = fromFlag (globalLogsDir globalFlags) - oneShot = fromFlag (installOneShot installFlags) - worldFile = fromFlag $ globalWorldFile globalFlags - -storeDetailedBuildReports :: Verbosity -> FilePath - -> [(BuildReports.BuildReport, Maybe Repo)] -> IO () -storeDetailedBuildReports verbosity logsDir reports = sequence_ - [ do dotCabal <- getCabalDir - let logFileName = display (BuildReports.package report) <.> "log" - logFile = logsDir logFileName - reportsDir = dotCabal "reports" remoteRepoName remoteRepo - reportFile = reportsDir logFileName - - handleMissingLogFile $ do - buildLog <- readFile logFile - createDirectoryIfMissing True reportsDir -- FIXME - writeFile reportFile (show (BuildReports.show report, buildLog)) - - | (report, Just repo) <- reports - , Just remoteRepo <- [maybeRepoRemote repo] - , isLikelyToHaveLogFile (BuildReports.installOutcome report) ] - - where - isLikelyToHaveLogFile BuildReports.ConfigureFailed {} = True - isLikelyToHaveLogFile BuildReports.BuildFailed {} = True - isLikelyToHaveLogFile BuildReports.InstallFailed {} = True - isLikelyToHaveLogFile BuildReports.InstallOk {} = True - isLikelyToHaveLogFile _ = False - - handleMissingLogFile = Exception.handleJust missingFile $ \ioe -> - warn verbosity $ "Missing log file for build report: " - ++ fromMaybe "" (ioeGetFileName ioe) - - missingFile ioe - | isDoesNotExistError ioe = Just ioe - missingFile _ = Nothing - - -regenerateHaddockIndex :: Verbosity - -> [PackageDB] - -> Compiler - -> Platform - -> ProgramDb - -> UseSandbox - -> ConfigFlags - -> InstallFlags - -> BuildOutcomes - -> IO () -regenerateHaddockIndex verbosity packageDBs comp platform progdb useSandbox - configFlags installFlags buildOutcomes - | haddockIndexFileIsRequested && shouldRegenerateHaddockIndex = do - - defaultDirs <- InstallDirs.defaultInstallDirs - (compilerFlavor comp) - (fromFlag (configUserInstall configFlags)) - True - let indexFileTemplate = fromFlag (installHaddockIndex installFlags) - indexFile = substHaddockIndexFileName defaultDirs indexFileTemplate - - notice verbosity $ - "Updating documentation index " ++ indexFile - - --TODO: might be nice if the install plan gave us the new InstalledPackageInfo - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb - Haddock.regenerateHaddockIndex verbosity installedPkgIndex progdb indexFile - - | otherwise = return () - where - haddockIndexFileIsRequested = - fromFlag (installDocumentation installFlags) - && isJust (flagToMaybe (installHaddockIndex installFlags)) - - -- We want to regenerate the index if some new documentation was actually - -- installed. Since the index can be only per-user or per-sandbox (see - -- #1337), we don't do it for global installs or special cases where we're - -- installing into a specific db. - shouldRegenerateHaddockIndex = (isUseSandbox useSandbox || normalUserInstall) - && someDocsWereInstalled buildOutcomes - where - someDocsWereInstalled = any installedDocs . Map.elems - installedDocs (Right (BuildResult DocsOk _ _)) = True - installedDocs _ = False - - normalUserInstall = (UserPackageDB `elem` packageDBs) - && all (not . isSpecificPackageDB) packageDBs - isSpecificPackageDB (SpecificPackageDB _) = True - isSpecificPackageDB _ = False - - substHaddockIndexFileName defaultDirs = fromPathTemplate - . substPathTemplate env - where - env = env0 ++ installDirsTemplateEnv absoluteDirs - env0 = InstallDirs.compilerTemplateEnv (compilerInfo comp) - ++ InstallDirs.platformTemplateEnv platform - ++ InstallDirs.abiTemplateEnv (compilerInfo comp) platform - absoluteDirs = InstallDirs.substituteInstallDirTemplates - env0 templateDirs - templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault - defaultDirs (configInstallDirs configFlags) - - -symlinkBinaries :: Verbosity - -> Platform -> Compiler - -> ConfigFlags - -> InstallFlags - -> InstallPlan - -> BuildOutcomes - -> IO () -symlinkBinaries verbosity platform comp configFlags installFlags - plan buildOutcomes = do - failed <- InstallSymlink.symlinkBinaries platform comp - configFlags installFlags - plan buildOutcomes - case failed of - [] -> return () - [(_, exe, path)] -> - warn verbosity $ - "could not create a symlink in " ++ bindir ++ " for " - ++ display exe ++ " because the file exists there already but is not " - ++ "managed by cabal. You can create a symlink for this executable " - ++ "manually if you wish. The executable file has been installed at " - ++ path - exes -> - warn verbosity $ - "could not create symlinks in " ++ bindir ++ " for " - ++ intercalate ", " [ display exe | (_, exe, _) <- exes ] - ++ " because the files exist there already and are not " - ++ "managed by cabal. You can create symlinks for these executables " - ++ "manually if you wish. The executable files have been installed at " - ++ intercalate ", " [ path | (_, _, path) <- exes ] - where - bindir = fromFlag (installSymlinkBinDir installFlags) - - -printBuildFailures :: Verbosity -> BuildOutcomes -> IO () -printBuildFailures verbosity buildOutcomes = - case [ (pkgid, failure) - | (pkgid, Left failure) <- Map.toList buildOutcomes ] of - [] -> return () - failed -> die' verbosity . unlines - $ "Error: some packages failed to install:" - : [ display pkgid ++ printFailureReason reason - | (pkgid, reason) <- failed ] - where - printFailureReason reason = case reason of - DependentFailed pkgid -> " depends on " ++ display pkgid - ++ " which failed to install." - DownloadFailed e -> " failed while downloading the package." - ++ showException e - UnpackFailed e -> " failed while unpacking the package." - ++ showException e - ConfigureFailed e -> " failed during the configure step." - ++ showException e - BuildFailed e -> " failed during the building phase." - ++ showException e - TestsFailed e -> " failed during the tests phase." - ++ showException e - InstallFailed e -> " failed during the final install step." - ++ showException e - - -- This will never happen, but we include it for completeness - PlanningFailed -> " failed during the planning phase." - - showException e = " The exception was:\n " ++ show e ++ maybeOOM e -#ifdef mingw32_HOST_OS - maybeOOM _ = "" -#else - maybeOOM e = maybe "" onExitFailure (fromException e) - onExitFailure (ExitFailure n) - | n == 9 || n == -9 = - "\nThis may be due to an out-of-memory condition." - onExitFailure _ = "" -#endif - - --- | If we're working inside a sandbox and some add-source deps were installed, --- update the timestamps of those deps. -updateSandboxTimestampsFile :: Verbosity -> UseSandbox -> Maybe SandboxPackageInfo - -> Compiler -> Platform - -> InstallPlan - -> BuildOutcomes - -> IO () -updateSandboxTimestampsFile verbosity (UseSandbox sandboxDir) - (Just (SandboxPackageInfo _ _ _ allAddSourceDeps)) - comp platform installPlan buildOutcomes = - withUpdateTimestamps verbosity sandboxDir (compilerId comp) platform $ \_ -> do - let allInstalled = [ pkg - | InstallPlan.Configured pkg - <- InstallPlan.toList installPlan - , case InstallPlan.lookupBuildOutcome pkg buildOutcomes of - Just (Right _success) -> True - _ -> False - ] - allSrcPkgs = [ confPkgSource cpkg | cpkg <- allInstalled ] - allPaths = [ pth | LocalUnpackedPackage pth - <- map packageSource allSrcPkgs] - allPathsCanonical <- mapM tryCanonicalizePath allPaths - return $! filter (`S.member` allAddSourceDeps) allPathsCanonical - -updateSandboxTimestampsFile _ _ _ _ _ _ _ = return () - --- ------------------------------------------------------------ --- * Actually do the installations --- ------------------------------------------------------------ - -data InstallMisc = InstallMisc { - libVersion :: Maybe Version - } - --- | If logging is enabled, contains location of the log file and the verbosity --- level for logging. -type UseLogFile = Maybe (PackageIdentifier -> UnitId -> FilePath, Verbosity) - -performInstallations :: Verbosity - -> InstallArgs - -> InstalledPackageIndex - -> InstallPlan - -> IO BuildOutcomes -performInstallations verbosity - (packageDBs, repoCtxt, comp, platform, progdb, useSandbox, _, - globalFlags, configFlags, configExFlags, installFlags, haddockFlags) - installedPkgIndex installPlan = do - - -- With 'install -j' it can be a bit hard to tell whether a sandbox is used. - whenUsingSandbox useSandbox $ \sandboxDir -> - when parallelInstall $ - notice verbosity $ "Notice: installing into a sandbox located at " - ++ sandboxDir - info verbosity $ "Number of threads used: " ++ (show numJobs) ++ "." - - jobControl <- if parallelInstall then newParallelJobControl numJobs - else newSerialJobControl - fetchLimit <- newJobLimit (min numJobs numFetchJobs) - installLock <- newLock -- serialise installation - cacheLock <- newLock -- serialise access to setup exe cache - - executeInstallPlan verbosity jobControl keepGoing useLogFile - installPlan $ \rpkg -> - installReadyPackage platform cinfo configFlags - rpkg $ \configFlags' src pkg pkgoverride -> - fetchSourcePackage verbosity repoCtxt fetchLimit src $ \src' -> - installLocalPackage verbosity (packageId pkg) src' distPref $ \mpath -> - installUnpackedPackage verbosity installLock numJobs - (setupScriptOptions installedPkgIndex - cacheLock rpkg) - configFlags' - installFlags haddockFlags comp progdb - platform pkg rpkg pkgoverride mpath useLogFile - - where - cinfo = compilerInfo comp - - numJobs = determineNumJobs (installNumJobs installFlags) - numFetchJobs = 2 - parallelInstall = numJobs >= 2 - keepGoing = fromFlag (installKeepGoing installFlags) - distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) - (configDistPref configFlags) - - setupScriptOptions index lock rpkg = - configureSetupScript - packageDBs - comp - platform - progdb - distPref - (chooseCabalVersion configExFlags (libVersion miscOptions)) - (Just lock) - parallelInstall - index - (Just rpkg) - - reportingLevel = fromFlag (installBuildReports installFlags) - logsDir = fromFlag (globalLogsDir globalFlags) - - -- Should the build output be written to a log file instead of stdout? - useLogFile :: UseLogFile - useLogFile = fmap ((\f -> (f, loggingVerbosity)) . substLogFileName) - logFileTemplate - where - installLogFile' = flagToMaybe $ installLogFile installFlags - defaultTemplate = toPathTemplate $ - logsDir "$compiler" "$libname" <.> "log" - - -- If the user has specified --remote-build-reporting=detailed, use the - -- default log file location. If the --build-log option is set, use the - -- provided location. Otherwise don't use logging, unless building in - -- parallel (in which case the default location is used). - logFileTemplate :: Maybe PathTemplate - logFileTemplate - | useDefaultTemplate = Just defaultTemplate - | otherwise = installLogFile' - - -- If the user has specified --remote-build-reporting=detailed or - -- --build-log, use more verbose logging. - loggingVerbosity :: Verbosity - loggingVerbosity | overrideVerbosity = modifyVerbosity (max verbose) verbosity - | otherwise = verbosity - - useDefaultTemplate :: Bool - useDefaultTemplate - | reportingLevel == DetailedReports = True - | isJust installLogFile' = False - | parallelInstall = True - | otherwise = False - - overrideVerbosity :: Bool - overrideVerbosity - | reportingLevel == DetailedReports = True - | isJust installLogFile' = True - | parallelInstall = False - | otherwise = False - - substLogFileName :: PathTemplate -> PackageIdentifier -> UnitId -> FilePath - substLogFileName template pkg uid = fromPathTemplate - . substPathTemplate env - $ template - where env = initialPathTemplateEnv (packageId pkg) uid - (compilerInfo comp) platform - - miscOptions = InstallMisc { - libVersion = flagToMaybe (configCabalVersion configExFlags) - } - - -executeInstallPlan :: Verbosity - -> JobControl IO (UnitId, BuildOutcome) - -> Bool - -> UseLogFile - -> InstallPlan - -> (ReadyPackage -> IO BuildOutcome) - -> IO BuildOutcomes -executeInstallPlan verbosity jobCtl keepGoing useLogFile plan0 installPkg = - InstallPlan.execute - jobCtl keepGoing depsFailure plan0 $ \pkg -> do - buildOutcome <- installPkg pkg - printBuildResult (packageId pkg) (installedUnitId pkg) buildOutcome - return buildOutcome - - where - depsFailure = DependentFailed . packageId - - -- Print build log if something went wrong, and 'Installed $PKGID' - -- otherwise. - printBuildResult :: PackageId -> UnitId -> BuildOutcome -> IO () - printBuildResult pkgid uid buildOutcome = case buildOutcome of - (Right _) -> progressMessage verbosity ProgressCompleted (display pkgid) - (Left _) -> do - notice verbosity $ "Failed to install " ++ display pkgid - when (verbosity >= normal) $ - case useLogFile of - Nothing -> return () - Just (mkLogFileName, _) -> do - let logName = mkLogFileName pkgid uid - putStr $ "Build log ( " ++ logName ++ " ):\n" - printFile logName - - printFile :: FilePath -> IO () - printFile path = readFile path >>= putStr - --- | Call an installer for an 'SourcePackage' but override the configure --- flags with the ones given by the 'ReadyPackage'. In particular the --- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly --- versioned package dependencies. So we ignore any previous partial flag --- assignment or dependency constraints and use the new ones. --- --- NB: when updating this function, don't forget to also update --- 'configurePackage' in D.C.Configure. -installReadyPackage :: Platform -> CompilerInfo - -> ConfigFlags - -> ReadyPackage - -> (ConfigFlags -> UnresolvedPkgLoc - -> PackageDescription - -> PackageDescriptionOverride - -> a) - -> a -installReadyPackage platform cinfo configFlags - (ReadyPackage (ConfiguredPackage ipid - (SourcePackage _ gpkg source pkgoverride) - flags stanzas deps)) - installPkg = - installPkg configFlags { - configIPID = toFlag (display ipid), - configConfigurationsFlags = flags, - -- We generate the legacy constraints as well as the new style precise deps. - -- In the end only one set gets passed to Setup.hs configure, depending on - -- the Cabal version we are talking to. - configConstraints = [ thisPackageVersion srcid - | ConfiguredId srcid (Just PackageDescription.CLibName) _ipid - <- CD.nonSetupDeps deps ], - configDependencies = [ (packageName srcid, dep_ipid) - | ConfiguredId srcid (Just PackageDescription.CLibName) dep_ipid - <- CD.nonSetupDeps deps ], - -- Use '--exact-configuration' if supported. - configExactConfiguration = toFlag True, - configBenchmarks = toFlag False, - configTests = toFlag (TestStanzas `elem` stanzas) - } source pkg pkgoverride - where - pkg = case finalizePD flags (enableStanzas stanzas) - (const True) - platform cinfo [] gpkg of - Left _ -> error "finalizePD ReadyPackage failed" - Right (desc, _) -> desc - -fetchSourcePackage - :: Verbosity - -> RepoContext - -> JobLimit - -> UnresolvedPkgLoc - -> (ResolvedPkgLoc -> IO BuildOutcome) - -> IO BuildOutcome -fetchSourcePackage verbosity repoCtxt fetchLimit src installPkg = do - fetched <- checkFetched src - case fetched of - Just src' -> installPkg src' - Nothing -> onFailure DownloadFailed $ do - loc <- withJobLimit fetchLimit $ - fetchPackage verbosity repoCtxt src - installPkg loc - - -installLocalPackage - :: Verbosity - -> PackageIdentifier -> ResolvedPkgLoc -> FilePath - -> (Maybe FilePath -> IO BuildOutcome) - -> IO BuildOutcome -installLocalPackage verbosity pkgid location distPref installPkg = - - case location of - - LocalUnpackedPackage dir -> - installPkg (Just dir) - - RemoteSourceRepoPackage _repo dir -> - installPkg (Just dir) - - LocalTarballPackage tarballPath -> - installLocalTarballPackage verbosity - pkgid tarballPath distPref installPkg - - RemoteTarballPackage _ tarballPath -> - installLocalTarballPackage verbosity - pkgid tarballPath distPref installPkg - - RepoTarballPackage _ _ tarballPath -> - installLocalTarballPackage verbosity - pkgid tarballPath distPref installPkg - -installLocalTarballPackage - :: Verbosity - -> PackageIdentifier -> FilePath -> FilePath - -> (Maybe FilePath -> IO BuildOutcome) - -> IO BuildOutcome -installLocalTarballPackage verbosity pkgid - tarballPath distPref installPkg = do - tmp <- getTemporaryDirectory - withTempDirectory verbosity tmp "cabal-tmp" $ \tmpDirPath -> - onFailure UnpackFailed $ do - let relUnpackedPath = display pkgid - absUnpackedPath = tmpDirPath relUnpackedPath - descFilePath = absUnpackedPath - display (packageName pkgid) <.> "cabal" - info verbosity $ "Extracting " ++ tarballPath - ++ " to " ++ tmpDirPath ++ "..." - extractTarGzFile tmpDirPath relUnpackedPath tarballPath - exists <- doesFileExist descFilePath - unless exists $ - die' verbosity $ "Package .cabal file not found: " ++ show descFilePath - maybeRenameDistDir absUnpackedPath - installPkg (Just absUnpackedPath) - - where - -- 'cabal sdist' puts pre-generated files in the 'dist' - -- directory. This fails when a nonstandard build directory name - -- is used (as is the case with sandboxes), so we need to rename - -- the 'dist' dir here. - -- - -- TODO: 'cabal get happy && cd sandbox && cabal install ../happy' still - -- fails even with this workaround. We probably can live with that. - maybeRenameDistDir :: FilePath -> IO () - maybeRenameDistDir absUnpackedPath = do - let distDirPath = absUnpackedPath defaultDistPref - distDirPathTmp = absUnpackedPath (defaultDistPref ++ "-tmp") - distDirPathNew = absUnpackedPath distPref - distDirExists <- doesDirectoryExist distDirPath - when (distDirExists - && (not $ distDirPath `equalFilePath` distDirPathNew)) $ do - -- NB: we need to handle the case when 'distDirPathNew' is a - -- subdirectory of 'distDirPath' (e.g. the former is - -- 'dist/dist-sandbox-3688fbc2' and the latter is 'dist'). - debug verbosity $ "Renaming '" ++ distDirPath ++ "' to '" - ++ distDirPathTmp ++ "'." - renameDirectory distDirPath distDirPathTmp - when (distDirPath `isPrefixOf` distDirPathNew) $ - createDirectoryIfMissingVerbose verbosity False distDirPath - debug verbosity $ "Renaming '" ++ distDirPathTmp ++ "' to '" - ++ distDirPathNew ++ "'." - renameDirectory distDirPathTmp distDirPathNew - -installUnpackedPackage - :: Verbosity - -> Lock - -> Int - -> SetupScriptOptions - -> ConfigFlags - -> InstallFlags - -> HaddockFlags - -> Compiler - -> ProgramDb - -> Platform - -> PackageDescription - -> ReadyPackage - -> PackageDescriptionOverride - -> Maybe FilePath -- ^ Directory to change to before starting the installation. - -> UseLogFile -- ^ File to log output to (if any) - -> IO BuildOutcome -installUnpackedPackage verbosity installLock numJobs - scriptOptions - configFlags installFlags haddockFlags comp progdb - platform pkg rpkg pkgoverride workingDir useLogFile = do - -- Override the .cabal file if necessary - case pkgoverride of - Nothing -> return () - Just pkgtxt -> do - let descFilePath = fromMaybe "." workingDir - display (packageName pkgid) <.> "cabal" - info verbosity $ - "Updating " ++ display (packageName pkgid) <.> "cabal" - ++ " with the latest revision from the index." - writeFileAtomic descFilePath pkgtxt - - -- Make sure that we pass --libsubdir etc to 'setup configure' (necessary if - -- the setup script was compiled against an old version of the Cabal lib). - configFlags' <- addDefaultInstallDirs configFlags - -- Filter out flags not supported by the old versions of the Cabal lib. - let configureFlags :: Version -> ConfigFlags - configureFlags = filterConfigureFlags configFlags' { - configVerbosity = toFlag verbosity' - } - - -- Path to the optional log file. - mLogPath <- maybeLogPath - - logDirChange (maybe (const (return ())) appendFile mLogPath) workingDir $ do - -- Configure phase - onFailure ConfigureFailed $ do - noticeProgress ProgressStarting - setup configureCommand configureFlags mLogPath - - -- Build phase - onFailure BuildFailed $ do - noticeProgress ProgressBuilding - setup buildCommand' buildFlags mLogPath - - -- Doc generation phase - docsResult <- if shouldHaddock - then (do setup haddockCommand haddockFlags' mLogPath - return DocsOk) - `catchIO` (\_ -> return DocsFailed) - `catchExit` (\_ -> return DocsFailed) - else return DocsNotTried - - -- Tests phase - onFailure TestsFailed $ do - when (testsEnabled && PackageDescription.hasTests pkg) $ - setup Cabal.testCommand testFlags mLogPath - - let testsResult | testsEnabled = TestsOk - | otherwise = TestsNotTried - - -- Install phase - onFailure InstallFailed $ criticalSection installLock $ do - -- Actual installation - withWin32SelfUpgrade verbosity uid configFlags - cinfo platform pkg $ do - setup Cabal.copyCommand copyFlags mLogPath - - -- Capture installed package configuration file, so that - -- it can be incorporated into the final InstallPlan - ipkgs <- genPkgConfs mLogPath - let ipkgs' = case ipkgs of - [ipkg] -> [ipkg { Installed.installedUnitId = uid }] - _ -> ipkgs - let packageDBs = interpretPackageDbFlags - (fromFlag (configUserInstall configFlags)) - (configPackageDBs configFlags) - forM_ ipkgs' $ \ipkg' -> - registerPackage verbosity comp progdb - packageDBs ipkg' - defaultRegisterOptions - - return (Right (BuildResult docsResult testsResult (find ((==uid).installedUnitId) ipkgs'))) - - where - pkgid = packageId pkg - uid = installedUnitId rpkg - cinfo = compilerInfo comp - buildCommand' = buildCommand progdb - dispname = display pkgid - isParallelBuild = numJobs >= 2 - - noticeProgress phase = when isParallelBuild $ - progressMessage verbosity phase dispname - - buildFlags _ = emptyBuildFlags { - buildDistPref = configDistPref configFlags, - buildVerbosity = toFlag verbosity' - } - shouldHaddock = fromFlag (installDocumentation installFlags) - haddockFlags' _ = haddockFlags { - haddockVerbosity = toFlag verbosity', - haddockDistPref = configDistPref configFlags - } - testsEnabled = fromFlag (configTests configFlags) - && fromFlagOrDefault False (installRunTests installFlags) - testFlags _ = Cabal.emptyTestFlags { - Cabal.testDistPref = configDistPref configFlags - } - copyFlags _ = Cabal.emptyCopyFlags { - Cabal.copyDistPref = configDistPref configFlags, - Cabal.copyDest = toFlag InstallDirs.NoCopyDest, - Cabal.copyVerbosity = toFlag verbosity' - } - shouldRegister = PackageDescription.hasLibs pkg - registerFlags _ = Cabal.emptyRegisterFlags { - Cabal.regDistPref = configDistPref configFlags, - Cabal.regVerbosity = toFlag verbosity' - } - verbosity' = maybe verbosity snd useLogFile - tempTemplate name = name ++ "-" ++ display pkgid - - addDefaultInstallDirs :: ConfigFlags -> IO ConfigFlags - addDefaultInstallDirs configFlags' = do - defInstallDirs <- InstallDirs.defaultInstallDirs flavor userInstall False - return $ configFlags' { - configInstallDirs = fmap Cabal.Flag . - InstallDirs.substituteInstallDirTemplates env $ - InstallDirs.combineInstallDirs fromFlagOrDefault - defInstallDirs (configInstallDirs configFlags) - } - where - CompilerId flavor _ = compilerInfoId cinfo - env = initialPathTemplateEnv pkgid uid cinfo platform - userInstall = fromFlagOrDefault defaultUserInstall - (configUserInstall configFlags') - - genPkgConfs :: Maybe FilePath - -> IO [Installed.InstalledPackageInfo] - genPkgConfs mLogPath = - if shouldRegister then do - tmp <- getTemporaryDirectory - withTempDirectory verbosity tmp (tempTemplate "pkgConf") $ \dir -> do - let pkgConfDest = dir "pkgConf" - registerFlags' version = (registerFlags version) { - Cabal.regGenPkgConf = toFlag (Just pkgConfDest) - } - setup Cabal.registerCommand registerFlags' mLogPath - is_dir <- doesDirectoryExist pkgConfDest - let notHidden = not . isHidden - isHidden name = "." `isPrefixOf` name - if is_dir - -- Sort so that each prefix of the package - -- configurations is well formed - then mapM (readPkgConf pkgConfDest) . sort . filter notHidden - =<< getDirectoryContents pkgConfDest - else fmap (:[]) $ readPkgConf "." pkgConfDest - else return [] - - readPkgConf :: FilePath -> FilePath - -> IO Installed.InstalledPackageInfo - readPkgConf pkgConfDir pkgConfFile = - (withUTF8FileContents (pkgConfDir pkgConfFile) $ \pkgConfText -> - case Installed.parseInstalledPackageInfo pkgConfText of - Installed.ParseFailed perror -> pkgConfParseFailed perror - Installed.ParseOk warns pkgConf -> do - unless (null warns) $ - warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) - return pkgConf) - - pkgConfParseFailed :: Installed.PError -> IO a - pkgConfParseFailed perror = - die' verbosity $ "Couldn't parse the output of 'setup register --gen-pkg-config':" - ++ show perror - - maybeLogPath :: IO (Maybe FilePath) - maybeLogPath = - case useLogFile of - Nothing -> return Nothing - Just (mkLogFileName, _) -> do - let logFileName = mkLogFileName (packageId pkg) uid - logDir = takeDirectory logFileName - unless (null logDir) $ createDirectoryIfMissing True logDir - logFileExists <- doesFileExist logFileName - when logFileExists $ removeFile logFileName - return (Just logFileName) - - setup cmd flags mLogPath = - Exception.bracket - (traverse (\path -> openFile path AppendMode) mLogPath) - (traverse_ hClose) - (\logFileHandle -> - setupWrapper verbosity - scriptOptions { useLoggingHandle = logFileHandle - , useWorkingDir = workingDir } - (Just pkg) - cmd flags (const [])) - - --- helper -onFailure :: (SomeException -> BuildFailure) -> IO BuildOutcome -> IO BuildOutcome -onFailure result action = - action `catches` - [ Handler $ \ioe -> handler (ioe :: IOException) - , Handler $ \exit -> handler (exit :: ExitCode) - ] - where - handler :: Exception e => e -> IO BuildOutcome - handler = return . Left . result . toException - - --- ------------------------------------------------------------ --- * Weird windows hacks --- ------------------------------------------------------------ - -withWin32SelfUpgrade :: Verbosity - -> UnitId - -> ConfigFlags - -> CompilerInfo - -> Platform - -> PackageDescription - -> IO a -> IO a -withWin32SelfUpgrade _ _ _ _ _ _ action | buildOS /= Windows = action -withWin32SelfUpgrade verbosity uid configFlags cinfo platform pkg action = do - - defaultDirs <- InstallDirs.defaultInstallDirs - compFlavor - (fromFlag (configUserInstall configFlags)) - (PackageDescription.hasLibs pkg) - - Win32SelfUpgrade.possibleSelfUpgrade verbosity - (exeInstallPaths defaultDirs) action - - where - pkgid = packageId pkg - (CompilerId compFlavor _) = compilerInfoId cinfo - - exeInstallPaths defaultDirs = - [ InstallDirs.bindir absoluteDirs exeName <.> exeExtension buildPlatform - | exe <- PackageDescription.executables pkg - , PackageDescription.buildable (PackageDescription.buildInfo exe) - , let exeName = prefix ++ display (PackageDescription.exeName exe) ++ suffix - prefix = substTemplate prefixTemplate - suffix = substTemplate suffixTemplate ] - where - fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") - prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) - suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) - templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault - defaultDirs (configInstallDirs configFlags) - absoluteDirs = InstallDirs.absoluteInstallDirs - pkgid uid - cinfo InstallDirs.NoCopyDest - platform templateDirs - substTemplate = InstallDirs.fromPathTemplate - . InstallDirs.substPathTemplate env - where env = InstallDirs.initialPathTemplateEnv pkgid uid - cinfo platform diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/InstallPlan.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/InstallPlan.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/InstallPlan.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/InstallPlan.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,952 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE ConstraintKinds #-} -{-# LANGUAGE FlexibleContexts #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.InstallPlan --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- Stability : provisional --- Portability : portable --- --- Package installation plan --- ------------------------------------------------------------------------------ -module Distribution.Client.InstallPlan ( - InstallPlan, - GenericInstallPlan, - PlanPackage, - GenericPlanPackage(..), - foldPlanPackage, - IsUnit, - - -- * Operations on 'InstallPlan's - new, - toGraph, - toList, - toMap, - keys, - keysSet, - planIndepGoals, - depends, - - fromSolverInstallPlan, - fromSolverInstallPlanWithProgress, - configureInstallPlan, - remove, - installed, - lookup, - directDeps, - revDirectDeps, - - -- * Traversal - executionOrder, - execute, - BuildOutcomes, - lookupBuildOutcome, - -- ** Traversal helpers - -- $traversal - Processing, - ready, - completed, - failed, - - -- * Display - showPlanGraph, - showInstallPlan, - - -- * Graph-like operations - dependencyClosure, - reverseTopologicalOrder, - reverseDependencyClosure, - ) where - -import Distribution.Client.Types hiding (BuildOutcomes) -import qualified Distribution.PackageDescription as PD -import qualified Distribution.Simple.Configure as Configure -import qualified Distribution.Simple.Setup as Cabal - -import Distribution.InstalledPackageInfo - ( InstalledPackageInfo ) -import Distribution.Package - ( Package(..), HasMungedPackageId(..) - , HasUnitId(..), UnitId ) -import Distribution.Solver.Types.SolverPackage -import Distribution.Client.JobControl -import Distribution.Text -import Text.PrettyPrint -import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan -import Distribution.Client.SolverInstallPlan (SolverInstallPlan) - -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.SolverId -import Distribution.Solver.Types.InstSolverPackage - -import Distribution.Utils.LogProgress - --- TODO: Need this when we compute final UnitIds --- import qualified Distribution.Simple.Configure as Configure - -import Data.List - ( foldl', intercalate ) -import qualified Data.Foldable as Foldable (all) -import Data.Maybe - ( fromMaybe, mapMaybe ) -import qualified Distribution.Compat.Graph as Graph -import Distribution.Compat.Graph (Graph, IsNode(..)) -import Distribution.Compat.Binary (Binary(..)) -import GHC.Generics -import Data.Typeable -import Control.Monad -import Control.Exception - ( assert ) -import qualified Data.Map as Map -import Data.Map (Map) -import qualified Data.Set as Set -import Data.Set (Set) - -import Prelude hiding (lookup) - - --- When cabal tries to install a number of packages, including all their --- dependencies it has a non-trivial problem to solve. --- --- The Problem: --- --- In general we start with a set of installed packages and a set of source --- packages. --- --- Installed packages have fixed dependencies. They have already been built and --- we know exactly what packages they were built against, including their exact --- versions. --- --- Source package have somewhat flexible dependencies. They are specified as --- version ranges, though really they're predicates. To make matters worse they --- have conditional flexible dependencies. Configuration flags can affect which --- packages are required and can place additional constraints on their --- versions. --- --- These two sets of package can and usually do overlap. There can be installed --- packages that are also available as source packages which means they could --- be re-installed if required, though there will also be packages which are --- not available as source and cannot be re-installed. Very often there will be --- extra versions available than are installed. Sometimes we may like to prefer --- installed packages over source ones or perhaps always prefer the latest --- available version whether installed or not. --- --- The goal is to calculate an installation plan that is closed, acyclic and --- consistent and where every configured package is valid. --- --- An installation plan is a set of packages that are going to be used --- together. It will consist of a mixture of installed packages and source --- packages along with their exact version dependencies. An installation plan --- is closed if for every package in the set, all of its dependencies are --- also in the set. It is consistent if for every package in the set, all --- dependencies which target that package have the same version. - --- Note that plans do not necessarily compose. You might have a valid plan for --- package A and a valid plan for package B. That does not mean the composition --- is simultaneously valid for A and B. In particular you're most likely to --- have problems with inconsistent dependencies. --- On the other hand it is true that every closed sub plan is valid. - --- | Packages in an install plan --- --- NOTE: 'ConfiguredPackage', 'GenericReadyPackage' and 'GenericPlanPackage' --- intentionally have no 'PackageInstalled' instance. `This is important: --- PackageInstalled returns only library dependencies, but for package that --- aren't yet installed we know many more kinds of dependencies (setup --- dependencies, exe, test-suite, benchmark, ..). Any functions that operate on --- dependencies in cabal-install should consider what to do with these --- dependencies; if we give a 'PackageInstalled' instance it would be too easy --- to get this wrong (and, for instance, call graph traversal functions from --- Cabal rather than from cabal-install). Instead, see 'PackageInstalled'. -data GenericPlanPackage ipkg srcpkg - = PreExisting ipkg - | Configured srcpkg - | Installed srcpkg - deriving (Eq, Show, Generic) - --- | Convenience combinator for destructing 'GenericPlanPackage'. --- This is handy because if you case manually, you have to handle --- 'Configured' and 'Installed' separately (where often you want --- them to be the same.) -foldPlanPackage :: (ipkg -> a) - -> (srcpkg -> a) - -> GenericPlanPackage ipkg srcpkg - -> a -foldPlanPackage f _ (PreExisting ipkg) = f ipkg -foldPlanPackage _ g (Configured srcpkg) = g srcpkg -foldPlanPackage _ g (Installed srcpkg) = g srcpkg - -type IsUnit a = (IsNode a, Key a ~ UnitId) - -depends :: IsUnit a => a -> [UnitId] -depends = nodeNeighbors - --- NB: Expanded constraint synonym here to avoid undecidable --- instance errors in GHC 7.8 and earlier. -instance (IsNode ipkg, IsNode srcpkg, Key ipkg ~ UnitId, Key srcpkg ~ UnitId) - => IsNode (GenericPlanPackage ipkg srcpkg) where - type Key (GenericPlanPackage ipkg srcpkg) = UnitId - nodeKey (PreExisting ipkg) = nodeKey ipkg - nodeKey (Configured spkg) = nodeKey spkg - nodeKey (Installed spkg) = nodeKey spkg - nodeNeighbors (PreExisting ipkg) = nodeNeighbors ipkg - nodeNeighbors (Configured spkg) = nodeNeighbors spkg - nodeNeighbors (Installed spkg) = nodeNeighbors spkg - -instance (Binary ipkg, Binary srcpkg) - => Binary (GenericPlanPackage ipkg srcpkg) - -type PlanPackage = GenericPlanPackage - InstalledPackageInfo (ConfiguredPackage UnresolvedPkgLoc) - -instance (Package ipkg, Package srcpkg) => - Package (GenericPlanPackage ipkg srcpkg) where - packageId (PreExisting ipkg) = packageId ipkg - packageId (Configured spkg) = packageId spkg - packageId (Installed spkg) = packageId spkg - -instance (HasMungedPackageId ipkg, HasMungedPackageId srcpkg) => - HasMungedPackageId (GenericPlanPackage ipkg srcpkg) where - mungedId (PreExisting ipkg) = mungedId ipkg - mungedId (Configured spkg) = mungedId spkg - mungedId (Installed spkg) = mungedId spkg - -instance (HasUnitId ipkg, HasUnitId srcpkg) => - HasUnitId - (GenericPlanPackage ipkg srcpkg) where - installedUnitId (PreExisting ipkg) = installedUnitId ipkg - installedUnitId (Configured spkg) = installedUnitId spkg - installedUnitId (Installed spkg) = installedUnitId spkg - -instance (HasConfiguredId ipkg, HasConfiguredId srcpkg) => - HasConfiguredId (GenericPlanPackage ipkg srcpkg) where - configuredId (PreExisting ipkg) = configuredId ipkg - configuredId (Configured spkg) = configuredId spkg - configuredId (Installed spkg) = configuredId spkg - -data GenericInstallPlan ipkg srcpkg = GenericInstallPlan { - planGraph :: !(Graph (GenericPlanPackage ipkg srcpkg)), - planIndepGoals :: !IndependentGoals - } - deriving (Typeable) - --- | 'GenericInstallPlan' specialised to most commonly used types. -type InstallPlan = GenericInstallPlan - InstalledPackageInfo (ConfiguredPackage UnresolvedPkgLoc) - --- | Smart constructor that deals with caching the 'Graph' representation. --- -mkInstallPlan :: (IsUnit ipkg, IsUnit srcpkg) - => String - -> Graph (GenericPlanPackage ipkg srcpkg) - -> IndependentGoals - -> GenericInstallPlan ipkg srcpkg -mkInstallPlan loc graph indepGoals = - assert (valid loc graph) - GenericInstallPlan { - planGraph = graph, - planIndepGoals = indepGoals - } - -internalError :: String -> String -> a -internalError loc msg = error $ "internal error in InstallPlan." ++ loc - ++ if null msg then "" else ": " ++ msg - -instance (IsNode ipkg, Key ipkg ~ UnitId, IsNode srcpkg, Key srcpkg ~ UnitId, - Binary ipkg, Binary srcpkg) - => Binary (GenericInstallPlan ipkg srcpkg) where - put GenericInstallPlan { - planGraph = graph, - planIndepGoals = indepGoals - } = put (graph, indepGoals) - - get = do - (graph, indepGoals) <- get - return $! mkInstallPlan "(instance Binary)" graph indepGoals - -showPlanGraph :: (Package ipkg, Package srcpkg, - IsUnit ipkg, IsUnit srcpkg) - => Graph (GenericPlanPackage ipkg srcpkg) -> String -showPlanGraph graph = renderStyle defaultStyle $ - vcat (map dispPlanPackage (Graph.toList graph)) - where dispPlanPackage p = - hang (hsep [ text (showPlanPackageTag p) - , disp (packageId p) - , parens (disp (nodeKey p))]) 2 - (vcat (map disp (nodeNeighbors p))) - -showInstallPlan :: (Package ipkg, Package srcpkg, - IsUnit ipkg, IsUnit srcpkg) - => GenericInstallPlan ipkg srcpkg -> String -showInstallPlan = showPlanGraph . planGraph - -showPlanPackageTag :: GenericPlanPackage ipkg srcpkg -> String -showPlanPackageTag (PreExisting _) = "PreExisting" -showPlanPackageTag (Configured _) = "Configured" -showPlanPackageTag (Installed _) = "Installed" - --- | Build an installation plan from a valid set of resolved packages. --- -new :: (IsUnit ipkg, IsUnit srcpkg) - => IndependentGoals - -> Graph (GenericPlanPackage ipkg srcpkg) - -> GenericInstallPlan ipkg srcpkg -new indepGoals graph = mkInstallPlan "new" graph indepGoals - -toGraph :: GenericInstallPlan ipkg srcpkg - -> Graph (GenericPlanPackage ipkg srcpkg) -toGraph = planGraph - -toList :: GenericInstallPlan ipkg srcpkg - -> [GenericPlanPackage ipkg srcpkg] -toList = Graph.toList . planGraph - -toMap :: GenericInstallPlan ipkg srcpkg - -> Map UnitId (GenericPlanPackage ipkg srcpkg) -toMap = Graph.toMap . planGraph - -keys :: GenericInstallPlan ipkg srcpkg -> [UnitId] -keys = Graph.keys . planGraph - -keysSet :: GenericInstallPlan ipkg srcpkg -> Set UnitId -keysSet = Graph.keysSet . planGraph - --- | Remove packages from the install plan. This will result in an --- error if there are remaining packages that depend on any matching --- package. This is primarily useful for obtaining an install plan for --- the dependencies of a package or set of packages without actually --- installing the package itself, as when doing development. --- -remove :: (IsUnit ipkg, IsUnit srcpkg) - => (GenericPlanPackage ipkg srcpkg -> Bool) - -> GenericInstallPlan ipkg srcpkg - -> GenericInstallPlan ipkg srcpkg -remove shouldRemove plan = - mkInstallPlan "remove" newGraph (planIndepGoals plan) - where - newGraph = Graph.fromDistinctList $ - filter (not . shouldRemove) (toList plan) - --- | Change a number of packages in the 'Configured' state to the 'Installed' --- state. --- --- To preserve invariants, the package must have all of its dependencies --- already installed too (that is 'PreExisting' or 'Installed'). --- -installed :: (IsUnit ipkg, IsUnit srcpkg) - => (srcpkg -> Bool) - -> GenericInstallPlan ipkg srcpkg - -> GenericInstallPlan ipkg srcpkg -installed shouldBeInstalled installPlan = - foldl' markInstalled installPlan - [ pkg - | Configured pkg <- reverseTopologicalOrder installPlan - , shouldBeInstalled pkg ] - where - markInstalled plan pkg = - assert (all isInstalled (directDeps plan (nodeKey pkg))) $ - plan { - planGraph = Graph.insert (Installed pkg) (planGraph plan) - } - --- | Lookup a package in the plan. --- -lookup :: (IsUnit ipkg, IsUnit srcpkg) - => GenericInstallPlan ipkg srcpkg - -> UnitId - -> Maybe (GenericPlanPackage ipkg srcpkg) -lookup plan pkgid = Graph.lookup pkgid (planGraph plan) - --- | Find all the direct dependencies of the given package. --- --- Note that the package must exist in the plan or it is an error. --- -directDeps :: GenericInstallPlan ipkg srcpkg - -> UnitId - -> [GenericPlanPackage ipkg srcpkg] -directDeps plan pkgid = - case Graph.neighbors (planGraph plan) pkgid of - Just deps -> deps - Nothing -> internalError "directDeps" "package not in graph" - --- | Find all the direct reverse dependencies of the given package. --- --- Note that the package must exist in the plan or it is an error. --- -revDirectDeps :: GenericInstallPlan ipkg srcpkg - -> UnitId - -> [GenericPlanPackage ipkg srcpkg] -revDirectDeps plan pkgid = - case Graph.revNeighbors (planGraph plan) pkgid of - Just deps -> deps - Nothing -> internalError "revDirectDeps" "package not in graph" - --- | Return all the packages in the 'InstallPlan' in reverse topological order. --- That is, for each package, all dependencies of the package appear first. --- --- Compared to 'executionOrder', this function returns all the installed and --- source packages rather than just the source ones. Also, while both this --- and 'executionOrder' produce reverse topological orderings of the package --- dependency graph, it is not necessarily exactly the same order. --- -reverseTopologicalOrder :: GenericInstallPlan ipkg srcpkg - -> [GenericPlanPackage ipkg srcpkg] -reverseTopologicalOrder plan = Graph.revTopSort (planGraph plan) - - --- | Return the packages in the plan that are direct or indirect dependencies of --- the given packages. --- -dependencyClosure :: GenericInstallPlan ipkg srcpkg - -> [UnitId] - -> [GenericPlanPackage ipkg srcpkg] -dependencyClosure plan = fromMaybe [] - . Graph.closure (planGraph plan) - --- | Return the packages in the plan that depend directly or indirectly on the --- given packages. --- -reverseDependencyClosure :: GenericInstallPlan ipkg srcpkg - -> [UnitId] - -> [GenericPlanPackage ipkg srcpkg] -reverseDependencyClosure plan = fromMaybe [] - . Graph.revClosure (planGraph plan) - - --- Alert alert! Why does SolverId map to a LIST of plan packages? --- The sordid story has to do with 'build-depends' on a package --- with libraries and executables. In an ideal world, we would --- ONLY depend on the library in this situation. But c.f. #3661 --- some people rely on the build-depends to ALSO implicitly --- depend on an executable. --- --- I don't want to commit to a strategy yet, so the only possible --- thing you can do in this case is return EVERYTHING and let --- the client filter out what they want (executables? libraries? --- etc). This similarly implies we can't return a 'ConfiguredId' --- because that's not enough information. - -fromSolverInstallPlan :: - (IsUnit ipkg, IsUnit srcpkg) - => ( (SolverId -> [GenericPlanPackage ipkg srcpkg]) - -> SolverInstallPlan.SolverPlanPackage - -> [GenericPlanPackage ipkg srcpkg] ) - -> SolverInstallPlan - -> GenericInstallPlan ipkg srcpkg -fromSolverInstallPlan f plan = - mkInstallPlan "fromSolverInstallPlan" - (Graph.fromDistinctList pkgs'') - (SolverInstallPlan.planIndepGoals plan) - where - (_, _, pkgs'') = foldl' f' (Map.empty, Map.empty, []) - (SolverInstallPlan.reverseTopologicalOrder plan) - - f' (pidMap, ipiMap, pkgs) pkg = (pidMap', ipiMap', pkgs' ++ pkgs) - where - pkgs' = f (mapDep pidMap ipiMap) pkg - - (pidMap', ipiMap') - = case nodeKey pkg of - PreExistingId _ uid -> (pidMap, Map.insert uid pkgs' ipiMap) - PlannedId pid -> (Map.insert pid pkgs' pidMap, ipiMap) - - mapDep _ ipiMap (PreExistingId _pid uid) - | Just pkgs <- Map.lookup uid ipiMap = pkgs - | otherwise = error ("fromSolverInstallPlan: PreExistingId " ++ display uid) - mapDep pidMap _ (PlannedId pid) - | Just pkgs <- Map.lookup pid pidMap = pkgs - | otherwise = error ("fromSolverInstallPlan: PlannedId " ++ display pid) - -- This shouldn't happen, since mapDep should only be called - -- on neighbor SolverId, which must have all been done already - -- by the reverse top-sort (we assume the graph is not broken). - - -fromSolverInstallPlanWithProgress :: - (IsUnit ipkg, IsUnit srcpkg) - => ( (SolverId -> [GenericPlanPackage ipkg srcpkg]) - -> SolverInstallPlan.SolverPlanPackage - -> LogProgress [GenericPlanPackage ipkg srcpkg] ) - -> SolverInstallPlan - -> LogProgress (GenericInstallPlan ipkg srcpkg) -fromSolverInstallPlanWithProgress f plan = do - (_, _, pkgs'') <- foldM f' (Map.empty, Map.empty, []) - (SolverInstallPlan.reverseTopologicalOrder plan) - return $ mkInstallPlan "fromSolverInstallPlanWithProgress" - (Graph.fromDistinctList pkgs'') - (SolverInstallPlan.planIndepGoals plan) - where - f' (pidMap, ipiMap, pkgs) pkg = do - pkgs' <- f (mapDep pidMap ipiMap) pkg - let (pidMap', ipiMap') - = case nodeKey pkg of - PreExistingId _ uid -> (pidMap, Map.insert uid pkgs' ipiMap) - PlannedId pid -> (Map.insert pid pkgs' pidMap, ipiMap) - return (pidMap', ipiMap', pkgs' ++ pkgs) - - mapDep _ ipiMap (PreExistingId _pid uid) - | Just pkgs <- Map.lookup uid ipiMap = pkgs - | otherwise = error ("fromSolverInstallPlan: PreExistingId " ++ display uid) - mapDep pidMap _ (PlannedId pid) - | Just pkgs <- Map.lookup pid pidMap = pkgs - | otherwise = error ("fromSolverInstallPlan: PlannedId " ++ display pid) - -- This shouldn't happen, since mapDep should only be called - -- on neighbor SolverId, which must have all been done already - -- by the reverse top-sort (we assume the graph is not broken). - --- | Conversion of 'SolverInstallPlan' to 'InstallPlan'. --- Similar to 'elaboratedInstallPlan' -configureInstallPlan :: Cabal.ConfigFlags -> SolverInstallPlan -> InstallPlan -configureInstallPlan configFlags solverPlan = - flip fromSolverInstallPlan solverPlan $ \mapDep planpkg -> - [case planpkg of - SolverInstallPlan.PreExisting pkg -> - PreExisting (instSolverPkgIPI pkg) - - SolverInstallPlan.Configured pkg -> - Configured (configureSolverPackage mapDep pkg) - ] - where - configureSolverPackage :: (SolverId -> [PlanPackage]) - -> SolverPackage UnresolvedPkgLoc - -> ConfiguredPackage UnresolvedPkgLoc - configureSolverPackage mapDep spkg = - ConfiguredPackage { - confPkgId = Configure.computeComponentId - (Cabal.fromFlagOrDefault False - (Cabal.configDeterministic configFlags)) - Cabal.NoFlag - Cabal.NoFlag - (packageId spkg) - PD.CLibName - (Just (map confInstId (CD.libraryDeps deps), - solverPkgFlags spkg)), - confPkgSource = solverPkgSource spkg, - confPkgFlags = solverPkgFlags spkg, - confPkgStanzas = solverPkgStanzas spkg, - confPkgDeps = deps - -- NB: no support for executable dependencies - } - where - deps = fmap (concatMap (map configuredId . mapDep)) (solverPkgLibDeps spkg) - - --- ------------------------------------------------------------ --- * Primitives for traversing plans --- ------------------------------------------------------------ - --- $traversal --- --- Algorithms to traverse or execute an 'InstallPlan', especially in parallel, --- may make use of the 'Processing' type and the associated operations --- 'ready', 'completed' and 'failed'. --- --- The 'Processing' type is used to keep track of the state of a traversal and --- includes the set of packages that are in the processing state, e.g. in the --- process of being installed, plus those that have been completed and those --- where processing failed. --- --- Traversal algorithms start with an 'InstallPlan': --- --- * Initially there will be certain packages that can be processed immediately --- (since they are configured source packages and have all their dependencies --- installed already). The function 'ready' returns these packages plus a --- 'Processing' state that marks these same packages as being in the --- processing state. --- --- * The algorithm must now arrange for these packages to be processed --- (possibly in parallel). When a package has completed processing, the --- algorithm needs to know which other packages (if any) are now ready to --- process as a result. The 'completed' function marks a package as completed --- and returns any packages that are newly in the processing state (ie ready --- to process), along with the updated 'Processing' state. --- --- * If failure is possible then when processing a package fails, the algorithm --- needs to know which other packages have also failed as a result. The --- 'failed' function marks the given package as failed as well as all the --- other packages that depend on the failed package. In addition it returns --- the other failed packages. - - --- | The 'Processing' type is used to keep track of the state of a traversal --- and includes the set of packages that are in the processing state, e.g. in --- the process of being installed, plus those that have been completed and --- those where processing failed. --- -data Processing = Processing !(Set UnitId) !(Set UnitId) !(Set UnitId) - -- processing, completed, failed - --- | The packages in the plan that are initially ready to be installed. --- That is they are in the configured state and have all their dependencies --- installed already. --- --- The result is both the packages that are now ready to be installed and also --- a 'Processing' state containing those same packages. The assumption is that --- all the packages that are ready will now be processed and so we can consider --- them to be in the processing state. --- -ready :: (IsUnit ipkg, IsUnit srcpkg) - => GenericInstallPlan ipkg srcpkg - -> ([GenericReadyPackage srcpkg], Processing) -ready plan = - assert (processingInvariant plan processing) $ - (readyPackages, processing) - where - !processing = - Processing - (Set.fromList [ nodeKey pkg | pkg <- readyPackages ]) - (Set.fromList [ nodeKey pkg | pkg <- toList plan, isInstalled pkg ]) - Set.empty - readyPackages = - [ ReadyPackage pkg - | Configured pkg <- toList plan - , all isInstalled (directDeps plan (nodeKey pkg)) - ] - -isInstalled :: GenericPlanPackage a b -> Bool -isInstalled (PreExisting {}) = True -isInstalled (Installed {}) = True -isInstalled _ = False - --- | Given a package in the processing state, mark the package as completed --- and return any packages that are newly in the processing state (ie ready to --- process), along with the updated 'Processing' state. --- -completed :: (IsUnit ipkg, IsUnit srcpkg) - => GenericInstallPlan ipkg srcpkg - -> Processing -> UnitId - -> ([GenericReadyPackage srcpkg], Processing) -completed plan (Processing processingSet completedSet failedSet) pkgid = - assert (pkgid `Set.member` processingSet) $ - assert (processingInvariant plan processing') $ - - ( map asReadyPackage newlyReady - , processing' ) - where - completedSet' = Set.insert pkgid completedSet - - -- each direct reverse dep where all direct deps are completed - newlyReady = [ dep - | dep <- revDirectDeps plan pkgid - , all ((`Set.member` completedSet') . nodeKey) - (directDeps plan (nodeKey dep)) - ] - - processingSet' = foldl' (flip Set.insert) - (Set.delete pkgid processingSet) - (map nodeKey newlyReady) - processing' = Processing processingSet' completedSet' failedSet - - asReadyPackage (Configured pkg) = ReadyPackage pkg - asReadyPackage _ = internalError "completed" "" - -failed :: (IsUnit ipkg, IsUnit srcpkg) - => GenericInstallPlan ipkg srcpkg - -> Processing -> UnitId - -> ([srcpkg], Processing) -failed plan (Processing processingSet completedSet failedSet) pkgid = - assert (pkgid `Set.member` processingSet) $ - assert (all (`Set.notMember` processingSet) (tail newlyFailedIds)) $ - assert (all (`Set.notMember` completedSet) (tail newlyFailedIds)) $ - -- but note that some newlyFailed may already be in the failed set - -- since one package can depend on two packages that both fail and - -- so would be in the rev-dep closure for both. - assert (processingInvariant plan processing') $ - - ( map asConfiguredPackage (tail newlyFailed) - , processing' ) - where - processingSet' = Set.delete pkgid processingSet - failedSet' = failedSet `Set.union` Set.fromList newlyFailedIds - newlyFailedIds = map nodeKey newlyFailed - newlyFailed = fromMaybe (internalError "failed" "package not in graph") - $ Graph.revClosure (planGraph plan) [pkgid] - processing' = Processing processingSet' completedSet failedSet' - - asConfiguredPackage (Configured pkg) = pkg - asConfiguredPackage _ = internalError "failed" "not in configured state" - -processingInvariant :: (IsUnit ipkg, IsUnit srcpkg) - => GenericInstallPlan ipkg srcpkg - -> Processing -> Bool -processingInvariant plan (Processing processingSet completedSet failedSet) = - - -- All the packages in the three sets are actually in the graph - assert (Foldable.all (flip Graph.member (planGraph plan)) processingSet) $ - assert (Foldable.all (flip Graph.member (planGraph plan)) completedSet) $ - assert (Foldable.all (flip Graph.member (planGraph plan)) failedSet) $ - - -- The processing, completed and failed sets are disjoint from each other - assert (noIntersection processingSet completedSet) $ - assert (noIntersection processingSet failedSet) $ - assert (noIntersection failedSet completedSet) $ - - -- Packages that depend on a package that's still processing cannot be - -- completed - assert (noIntersection (reverseClosure processingSet) completedSet) $ - - -- On the other hand, packages that depend on a package that's still - -- processing /can/ have failed (since they may have depended on multiple - -- packages that were processing, but it only takes one to fail to cause - -- knock-on failures) so it is quite possible to have an - -- intersection (reverseClosure processingSet) failedSet - - -- The failed set is upwards closed, i.e. equal to its own rev dep closure - assert (failedSet == reverseClosure failedSet) $ - - -- All immediate reverse deps of packges that are currently processing - -- are not currently being processed (ie not in the processing set). - assert (and [ rdeppkgid `Set.notMember` processingSet - | pkgid <- Set.toList processingSet - , rdeppkgid <- maybe (internalError "processingInvariant" "") - (map nodeKey) - (Graph.revNeighbors (planGraph plan) pkgid) - ]) $ - - -- Packages from the processing or failed sets are only ever in the - -- configured state. - assert (and [ case Graph.lookup pkgid (planGraph plan) of - Just (Configured _) -> True - Just (PreExisting _) -> False - Just (Installed _) -> False - Nothing -> False - | pkgid <- Set.toList processingSet ++ Set.toList failedSet ]) - - -- We use asserts rather than returning False so that on failure we get - -- better details on which bit of the invariant was violated. - True - where - reverseClosure = Set.fromList - . map nodeKey - . fromMaybe (internalError "processingInvariant" "") - . Graph.revClosure (planGraph plan) - . Set.toList - noIntersection a b = Set.null (Set.intersection a b) - - --- ------------------------------------------------------------ --- * Traversing plans --- ------------------------------------------------------------ - --- | Flatten an 'InstallPlan', producing the sequence of source packages in --- the order in which they would be processed when the plan is executed. This --- can be used for simultations or presenting execution dry-runs. --- --- It is guaranteed to give the same order as using 'execute' (with a serial --- in-order 'JobControl'), which is a reverse topological orderings of the --- source packages in the dependency graph, albeit not necessarily exactly the --- same ordering as that produced by 'reverseTopologicalOrder'. --- -executionOrder :: (IsUnit ipkg, IsUnit srcpkg) - => GenericInstallPlan ipkg srcpkg - -> [GenericReadyPackage srcpkg] -executionOrder plan = - let (newpkgs, processing) = ready plan - in tryNewTasks processing newpkgs - where - tryNewTasks _processing [] = [] - tryNewTasks processing (p:todo) = waitForTasks processing p todo - - waitForTasks processing p todo = - p : tryNewTasks processing' (todo++nextpkgs) - where - (nextpkgs, processing') = completed plan processing (nodeKey p) - - --- ------------------------------------------------------------ --- * Executing plans --- ------------------------------------------------------------ - --- | The set of results we get from executing an install plan. --- -type BuildOutcomes failure result = Map UnitId (Either failure result) - --- | Lookup the build result for a single package. --- -lookupBuildOutcome :: HasUnitId pkg - => pkg -> BuildOutcomes failure result - -> Maybe (Either failure result) -lookupBuildOutcome = Map.lookup . installedUnitId - --- | Execute an install plan. This traverses the plan in dependency order. --- --- Executing each individual package can fail and if so all dependents fail --- too. The result for each package is collected as a 'BuildOutcomes' map. --- --- Visiting each package happens with optional parallelism, as determined by --- the 'JobControl'. By default, after any failure we stop as soon as possible --- (using the 'JobControl' to try to cancel in-progress tasks). This behaviour --- can be reversed to keep going and build as many packages as possible. --- --- Note that the 'BuildOutcomes' is /not/ guaranteed to cover all the packages --- in the plan. In particular in the default mode where we stop as soon as --- possible after a failure then there may be packages which are skipped and --- these will have no 'BuildOutcome'. --- -execute :: forall m ipkg srcpkg result failure. - (IsUnit ipkg, IsUnit srcpkg, - Monad m) - => JobControl m (UnitId, Either failure result) - -> Bool -- ^ Keep going after failure - -> (srcpkg -> failure) -- ^ Value for dependents of failed packages - -> GenericInstallPlan ipkg srcpkg - -> (GenericReadyPackage srcpkg -> m (Either failure result)) - -> m (BuildOutcomes failure result) -execute jobCtl keepGoing depFailure plan installPkg = - let (newpkgs, processing) = ready plan - in tryNewTasks Map.empty False False processing newpkgs - where - tryNewTasks :: BuildOutcomes failure result - -> Bool -> Bool -> Processing - -> [GenericReadyPackage srcpkg] - -> m (BuildOutcomes failure result) - - tryNewTasks !results tasksFailed tasksRemaining !processing newpkgs - -- we were in the process of cancelling and now we're finished - | tasksFailed && not keepGoing && not tasksRemaining - = return results - - -- we are still in the process of cancelling, wait for remaining tasks - | tasksFailed && not keepGoing && tasksRemaining - = waitForTasks results tasksFailed processing - - -- no new tasks to do and all tasks are done so we're finished - | null newpkgs && not tasksRemaining - = return results - - -- no new tasks to do, remaining tasks to wait for - | null newpkgs - = waitForTasks results tasksFailed processing - - -- new tasks to do, spawn them, then wait for tasks to complete - | otherwise - = do sequence_ [ spawnJob jobCtl $ do - result <- installPkg pkg - return (nodeKey pkg, result) - | pkg <- newpkgs ] - waitForTasks results tasksFailed processing - - waitForTasks :: BuildOutcomes failure result - -> Bool -> Processing - -> m (BuildOutcomes failure result) - waitForTasks !results tasksFailed !processing = do - (pkgid, result) <- collectJob jobCtl - - case result of - - Right _success -> do - tasksRemaining <- remainingJobs jobCtl - tryNewTasks results' tasksFailed tasksRemaining - processing' nextpkgs - where - results' = Map.insert pkgid result results - (nextpkgs, processing') = completed plan processing pkgid - - Left _failure -> do - -- if this is the first failure and we're not trying to keep going - -- then try to cancel as many of the remaining jobs as possible - when (not tasksFailed && not keepGoing) $ - cancelJobs jobCtl - - tasksRemaining <- remainingJobs jobCtl - tryNewTasks results' True tasksRemaining processing' [] - where - (depsfailed, processing') = failed plan processing pkgid - results' = Map.insert pkgid result results `Map.union` depResults - depResults = Map.fromList - [ (nodeKey deppkg, Left (depFailure deppkg)) - | deppkg <- depsfailed ] - --- ------------------------------------------------------------ --- * Checking validity of plans --- ------------------------------------------------------------ - --- | A valid installation plan is a set of packages that is closed, acyclic --- and respects the package state relation. --- --- * if the result is @False@ use 'problems' to get a detailed list. --- -valid :: (IsUnit ipkg, IsUnit srcpkg) - => String -> Graph (GenericPlanPackage ipkg srcpkg) -> Bool -valid loc graph = - case problems graph of - [] -> True - ps -> internalError loc ('\n' : unlines (map showPlanProblem ps)) - -data PlanProblem ipkg srcpkg = - PackageMissingDeps (GenericPlanPackage ipkg srcpkg) [UnitId] - | PackageCycle [GenericPlanPackage ipkg srcpkg] - | PackageStateInvalid (GenericPlanPackage ipkg srcpkg) - (GenericPlanPackage ipkg srcpkg) - -showPlanProblem :: (IsUnit ipkg, IsUnit srcpkg) - => PlanProblem ipkg srcpkg -> String -showPlanProblem (PackageMissingDeps pkg missingDeps) = - "Package " ++ display (nodeKey pkg) - ++ " depends on the following packages which are missing from the plan: " - ++ intercalate ", " (map display missingDeps) - -showPlanProblem (PackageCycle cycleGroup) = - "The following packages are involved in a dependency cycle " - ++ intercalate ", " (map (display . nodeKey) cycleGroup) -showPlanProblem (PackageStateInvalid pkg pkg') = - "Package " ++ display (nodeKey pkg) - ++ " is in the " ++ showPlanPackageTag pkg - ++ " state but it depends on package " ++ display (nodeKey pkg') - ++ " which is in the " ++ showPlanPackageTag pkg' - ++ " state" - --- | For an invalid plan, produce a detailed list of problems as human readable --- error messages. This is mainly intended for debugging purposes. --- Use 'showPlanProblem' for a human readable explanation. --- -problems :: (IsUnit ipkg, IsUnit srcpkg) - => Graph (GenericPlanPackage ipkg srcpkg) - -> [PlanProblem ipkg srcpkg] -problems graph = - - [ PackageMissingDeps pkg - (mapMaybe - (fmap nodeKey . flip Graph.lookup graph) - missingDeps) - | (pkg, missingDeps) <- Graph.broken graph ] - - ++ [ PackageCycle cycleGroup - | cycleGroup <- Graph.cycles graph ] -{- - ++ [ PackageInconsistency name inconsistencies - | (name, inconsistencies) <- - dependencyInconsistencies indepGoals graph ] - --TODO: consider re-enabling this one, see SolverInstallPlan --} - ++ [ PackageStateInvalid pkg pkg' - | pkg <- Graph.toList graph - , Just pkg' <- map (flip Graph.lookup graph) - (nodeNeighbors pkg) - , not (stateDependencyRelation pkg pkg') ] - --- | The states of packages have that depend on each other must respect --- this relation. That is for very case where package @a@ depends on --- package @b@ we require that @stateDependencyRelation a b = True@. --- -stateDependencyRelation :: GenericPlanPackage ipkg srcpkg - -> GenericPlanPackage ipkg srcpkg -> Bool -stateDependencyRelation PreExisting{} PreExisting{} = True - -stateDependencyRelation Installed{} PreExisting{} = True -stateDependencyRelation Installed{} Installed{} = True - -stateDependencyRelation Configured{} PreExisting{} = True -stateDependencyRelation Configured{} Installed{} = True -stateDependencyRelation Configured{} Configured{} = True - -stateDependencyRelation _ _ = False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/InstallSymlink.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/InstallSymlink.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/InstallSymlink.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/InstallSymlink.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,262 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.InstallSymlink --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Managing installing binaries with symlinks. ------------------------------------------------------------------------------ -module Distribution.Client.InstallSymlink ( - symlinkBinaries, - symlinkBinary, - ) where - -#ifdef mingw32_HOST_OS - -import Distribution.Package (PackageIdentifier) -import Distribution.Types.UnqualComponentName -import Distribution.Client.InstallPlan (InstallPlan) -import Distribution.Client.Types (BuildOutcomes) -import Distribution.Client.Setup (InstallFlags) -import Distribution.Simple.Setup (ConfigFlags) -import Distribution.Simple.Compiler -import Distribution.System - -symlinkBinaries :: Platform -> Compiler - -> ConfigFlags - -> InstallFlags - -> InstallPlan - -> BuildOutcomes - -> IO [(PackageIdentifier, UnqualComponentName, FilePath)] -symlinkBinaries _ _ _ _ _ _ = return [] - -symlinkBinary :: FilePath -> FilePath -> UnqualComponentName -> String -> IO Bool -symlinkBinary _ _ _ _ = fail "Symlinking feature not available on Windows" - -#else - -import Distribution.Client.Types - ( ConfiguredPackage(..), BuildOutcomes ) -import Distribution.Client.Setup - ( InstallFlags(installSymlinkBinDir) ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.InstallPlan (InstallPlan) - -import Distribution.Solver.Types.SourcePackage -import Distribution.Solver.Types.OptionalStanza - -import Distribution.Package - ( PackageIdentifier, Package(packageId), UnitId, installedUnitId ) -import Distribution.Types.UnqualComponentName -import Distribution.Compiler - ( CompilerId(..) ) -import qualified Distribution.PackageDescription as PackageDescription -import Distribution.PackageDescription - ( PackageDescription ) -import Distribution.PackageDescription.Configuration - ( finalizePD ) -import Distribution.Simple.Setup - ( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe ) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Simple.Compiler - ( Compiler, compilerInfo, CompilerInfo(..) ) -import Distribution.System - ( Platform ) -import Distribution.Text - ( display ) - -import System.Posix.Files - ( getSymbolicLinkStatus, isSymbolicLink, createSymbolicLink - , removeLink ) -import System.Directory - ( canonicalizePath ) -import System.FilePath - ( (), splitPath, joinPath, isAbsolute ) - -import Prelude hiding (ioError) -import System.IO.Error - ( isDoesNotExistError, ioError ) -import Distribution.Compat.Exception ( catchIO ) -import Control.Exception - ( assert ) -import Data.Maybe - ( catMaybes ) - --- | We would like by default to install binaries into some location that is on --- the user's PATH. For per-user installations on Unix systems that basically --- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@ --- directory will be on the user's PATH. However some people are a bit nervous --- about letting a package manager install programs into @~/bin/@. --- --- A compromise solution is that instead of installing binaries directly into --- @~/bin/@, we could install them in a private location under @~/.cabal/bin@ --- and then create symlinks in @~/bin/@. We can be careful when setting up the --- symlinks that we do not overwrite any binary that the user installed. We can --- check if it was a symlink we made because it would point to the private dir --- where we install our binaries. This means we can install normally without --- worrying and in a later phase set up symlinks, and if that fails then we --- report it to the user, but even in this case the package is still in an OK --- installed state. --- --- This is an optional feature that users can choose to use or not. It is --- controlled from the config file. Of course it only works on POSIX systems --- with symlinks so is not available to Windows users. --- -symlinkBinaries :: Platform -> Compiler - -> ConfigFlags - -> InstallFlags - -> InstallPlan - -> BuildOutcomes - -> IO [(PackageIdentifier, UnqualComponentName, FilePath)] -symlinkBinaries platform comp configFlags installFlags plan buildOutcomes = - case flagToMaybe (installSymlinkBinDir installFlags) of - Nothing -> return [] - Just symlinkBinDir - | null exes -> return [] - | otherwise -> do - publicBinDir <- canonicalizePath symlinkBinDir --- TODO: do we want to do this here? : --- createDirectoryIfMissing True publicBinDir - fmap catMaybes $ sequence - [ do privateBinDir <- pkgBinDir pkg ipid - ok <- symlinkBinary - publicBinDir privateBinDir - publicExeName privateExeName - if ok - then return Nothing - else return (Just (pkgid, publicExeName, - privateBinDir privateExeName)) - | (rpkg, pkg, exe) <- exes - , let pkgid = packageId pkg - -- This is a bit dodgy; probably won't work for Backpack packages - ipid = installedUnitId rpkg - publicExeName = PackageDescription.exeName exe - privateExeName = prefix ++ unUnqualComponentName publicExeName ++ suffix - prefix = substTemplate pkgid ipid prefixTemplate - suffix = substTemplate pkgid ipid suffixTemplate ] - where - exes = - [ (cpkg, pkg, exe) - | InstallPlan.Configured cpkg <- InstallPlan.toList plan - , case InstallPlan.lookupBuildOutcome cpkg buildOutcomes of - Just (Right _success) -> True - _ -> False - , let pkg :: PackageDescription - pkg = pkgDescription cpkg - , exe <- PackageDescription.executables pkg - , PackageDescription.buildable (PackageDescription.buildInfo exe) ] - - pkgDescription (ConfiguredPackage _ (SourcePackage _ pkg _ _) - flags stanzas _) = - case finalizePD flags (enableStanzas stanzas) - (const True) - platform cinfo [] pkg of - Left _ -> error "finalizePD ReadyPackage failed" - Right (desc, _) -> desc - - -- This is sadly rather complicated. We're kind of re-doing part of the - -- configuration for the package. :-( - pkgBinDir :: PackageDescription -> UnitId -> IO FilePath - pkgBinDir pkg ipid = do - defaultDirs <- InstallDirs.defaultInstallDirs - compilerFlavor - (fromFlag (configUserInstall configFlags)) - (PackageDescription.hasLibs pkg) - let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault - defaultDirs (configInstallDirs configFlags) - absoluteDirs = InstallDirs.absoluteInstallDirs - (packageId pkg) ipid - cinfo InstallDirs.NoCopyDest - platform templateDirs - canonicalizePath (InstallDirs.bindir absoluteDirs) - - substTemplate pkgid ipid = InstallDirs.fromPathTemplate - . InstallDirs.substPathTemplate env - where env = InstallDirs.initialPathTemplateEnv pkgid ipid - cinfo platform - - fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") - prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) - suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) - cinfo = compilerInfo comp - (CompilerId compilerFlavor _) = compilerInfoId cinfo - -symlinkBinary :: - FilePath -- ^ The canonical path of the public bin dir eg - -- @/home/user/bin@ - -> FilePath -- ^ The canonical path of the private bin dir eg - -- @/home/user/.cabal/bin@ - -> UnqualComponentName -- ^ The name of the executable to go in the public bin - -- dir, eg @foo@ - -> String -- ^ The name of the executable to in the private bin - -- dir, eg @foo-1.0@ - -> IO Bool -- ^ If creating the symlink was successful. @False@ if - -- there was another file there already that we did - -- not own. Other errors like permission errors just - -- propagate as exceptions. -symlinkBinary publicBindir privateBindir publicName privateName = do - ok <- targetOkToOverwrite (publicBindir publicName') - (privateBindir privateName) - case ok of - NotOurFile -> return False - NotExists -> mkLink >> return True - OkToOverwrite -> rmLink >> mkLink >> return True - where - publicName' = display publicName - relativeBindir = makeRelative publicBindir privateBindir - mkLink = createSymbolicLink (relativeBindir privateName) - (publicBindir publicName') - rmLink = removeLink (publicBindir publicName') - --- | Check a file path of a symlink that we would like to create to see if it --- is OK. For it to be OK to overwrite it must either not already exist yet or --- be a symlink to our target (in which case we can assume ownership). --- -targetOkToOverwrite :: FilePath -- ^ The file path of the symlink to the private - -- binary that we would like to create - -> FilePath -- ^ The canonical path of the private binary. - -- Use 'canonicalizePath' to make this. - -> IO SymlinkStatus -targetOkToOverwrite symlink target = handleNotExist $ do - status <- getSymbolicLinkStatus symlink - if not (isSymbolicLink status) - then return NotOurFile - else do target' <- canonicalizePath symlink - -- This relies on canonicalizePath handling symlinks - if target == target' - then return OkToOverwrite - else return NotOurFile - - where - handleNotExist action = catchIO action $ \ioexception -> - -- If the target doesn't exist then there's no problem overwriting it! - if isDoesNotExistError ioexception - then return NotExists - else ioError ioexception - -data SymlinkStatus - = NotExists -- ^ The file doesn't exist so we can make a symlink. - | OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll - -- have to delete it first before we make a new symlink. - | NotOurFile -- ^ A file already exists and it is not one of our existing - -- symlinks (either because it is not a symlink or because - -- it points somewhere other than our managed space). - deriving Show - --- | Take two canonical paths and produce a relative path to get from the first --- to the second, even if it means adding @..@ path components. --- -makeRelative :: FilePath -> FilePath -> FilePath -makeRelative a b = assert (isAbsolute a && isAbsolute b) $ - let as = splitPath a - bs = splitPath b - commonLen = length $ takeWhile id $ zipWith (==) as bs - in joinPath $ [ ".." | _ <- drop commonLen as ] - ++ drop commonLen bs - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/JobControl.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/JobControl.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/JobControl.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/JobControl.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,174 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.JobControl --- Copyright : (c) Duncan Coutts 2012 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- A job control concurrency abstraction ------------------------------------------------------------------------------ -module Distribution.Client.JobControl ( - JobControl, - newSerialJobControl, - newParallelJobControl, - spawnJob, - collectJob, - remainingJobs, - cancelJobs, - - JobLimit, - newJobLimit, - withJobLimit, - - Lock, - newLock, - criticalSection - ) where - -import Control.Monad -import Control.Concurrent (forkIO) -import Control.Concurrent.MVar -import Control.Concurrent.STM (STM, atomically) -import Control.Concurrent.STM.TVar -import Control.Concurrent.STM.TChan -import Control.Exception (SomeException, bracket_, throwIO, try) -import Distribution.Client.Compat.Semaphore - - --- | A simple concurrency abstraction. Jobs can be spawned and can complete --- in any order. This allows both serial and parallel implementations. --- -data JobControl m a = JobControl { - -- | Add a new job to the pool of jobs - spawnJob :: m a -> m (), - - -- | Wait until one job is complete - collectJob :: m a, - - -- | Returns True if there are any outstanding jobs - -- (ie spawned but yet to be collected) - remainingJobs :: m Bool, - - -- | Try to cancel any outstanding but not-yet-started jobs. - -- Call 'remainingJobs' after this to find out if any jobs are left - -- (ie could not be cancelled). - cancelJobs :: m () - } - - --- | Make a 'JobControl' that executes all jobs serially and in order. --- It only executes jobs on demand when they are collected, not eagerly. --- --- Cancelling will cancel /all/ jobs that have not been collected yet. --- -newSerialJobControl :: IO (JobControl IO a) -newSerialJobControl = do - qVar <- newTChanIO - return JobControl { - spawnJob = spawn qVar, - collectJob = collect qVar, - remainingJobs = remaining qVar, - cancelJobs = cancel qVar - } - where - spawn :: TChan (IO a) -> IO a -> IO () - spawn qVar job = atomically $ writeTChan qVar job - - collect :: TChan (IO a) -> IO a - collect qVar = - join $ atomically $ readTChan qVar - - remaining :: TChan (IO a) -> IO Bool - remaining qVar = fmap not $ atomically $ isEmptyTChan qVar - - cancel :: TChan (IO a) -> IO () - cancel qVar = do - _ <- atomically $ readAllTChan qVar - return () - --- | Make a 'JobControl' that eagerly executes jobs in parallel, with a given --- maximum degree of parallelism. --- --- Cancelling will cancel jobs that have not yet begun executing, but jobs --- that have already been executed or are currently executing cannot be --- cancelled. --- -newParallelJobControl :: Int -> IO (JobControl IO a) -newParallelJobControl n | n < 1 || n > 1000 = - error $ "newParallelJobControl: not a sensible number of jobs: " ++ show n -newParallelJobControl maxJobLimit = do - inqVar <- newTChanIO - outqVar <- newTChanIO - countVar <- newTVarIO 0 - replicateM_ maxJobLimit $ - forkIO $ - worker inqVar outqVar - return JobControl { - spawnJob = spawn inqVar countVar, - collectJob = collect outqVar countVar, - remainingJobs = remaining countVar, - cancelJobs = cancel inqVar countVar - } - where - worker :: TChan (IO a) -> TChan (Either SomeException a) -> IO () - worker inqVar outqVar = - forever $ do - job <- atomically $ readTChan inqVar - res <- try job - atomically $ writeTChan outqVar res - - spawn :: TChan (IO a) -> TVar Int -> IO a -> IO () - spawn inqVar countVar job = - atomically $ do - modifyTVar' countVar (+1) - writeTChan inqVar job - - collect :: TChan (Either SomeException a) -> TVar Int -> IO a - collect outqVar countVar = do - res <- atomically $ do - modifyTVar' countVar (subtract 1) - readTChan outqVar - either throwIO return res - - remaining :: TVar Int -> IO Bool - remaining countVar = fmap (/=0) $ atomically $ readTVar countVar - - cancel :: TChan (IO a) -> TVar Int -> IO () - cancel inqVar countVar = - atomically $ do - xs <- readAllTChan inqVar - modifyTVar' countVar (subtract (length xs)) - -readAllTChan :: TChan a -> STM [a] -readAllTChan qvar = go [] - where - go xs = do - mx <- tryReadTChan qvar - case mx of - Nothing -> return (reverse xs) - Just x -> go (x:xs) - -------------------------- --- Job limits and locks --- - -data JobLimit = JobLimit QSem - -newJobLimit :: Int -> IO JobLimit -newJobLimit n = - fmap JobLimit (newQSem n) - -withJobLimit :: JobLimit -> IO a -> IO a -withJobLimit (JobLimit sem) = - bracket_ (waitQSem sem) (signalQSem sem) - -newtype Lock = Lock (MVar ()) - -newLock :: IO Lock -newLock = fmap Lock $ newMVar () - -criticalSection :: Lock -> IO a -> IO a -criticalSection (Lock lck) act = bracket_ (takeMVar lck) (putMVar lck ()) act diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/List.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/List.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/List.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/List.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,603 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.List --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2008-2011 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- --- Search for and print information about packages ------------------------------------------------------------------------------ -module Distribution.Client.List ( - list, info - ) where - -import Distribution.Package - ( PackageName, Package(..), packageName - , packageVersion, UnitId ) -import Distribution.Types.Dependency -import Distribution.Types.UnqualComponentName -import Distribution.ModuleName (ModuleName) -import Distribution.License (License) -import qualified Distribution.InstalledPackageInfo as Installed -import qualified Distribution.PackageDescription as Source -import Distribution.PackageDescription - ( Flag(..), unFlagName ) -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) -import Distribution.Pretty (pretty) - -import Distribution.Simple.Compiler - ( Compiler, PackageDBStack ) -import Distribution.Simple.Program (ProgramDb) -import Distribution.Simple.Utils - ( equating, comparing, die', notice ) -import Distribution.Simple.Setup (fromFlag) -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import Distribution.Version - ( Version, mkVersion, versionNumbers, VersionRange, withinRange, anyVersion - , intersectVersionRanges, simplifyVersionRange ) -import Distribution.Verbosity (Verbosity) -import Distribution.Text - ( Text(disp), display ) - -import qualified Distribution.SPDX as SPDX - -import Distribution.Solver.Types.PackageConstraint -import qualified Distribution.Solver.Types.PackageIndex as PackageIndex -import Distribution.Solver.Types.SourcePackage - -import Distribution.Client.Types - ( SourcePackageDb(..), PackageSpecifier(..), UnresolvedSourcePackage ) -import Distribution.Client.Targets - ( UserTarget, resolveUserTargets ) -import Distribution.Client.Setup - ( GlobalFlags(..), ListFlags(..), InfoFlags(..) - , RepoContext(..) ) -import Distribution.Client.Utils - ( mergeBy, MergeResult(..) ) -import Distribution.Client.IndexUtils as IndexUtils - ( getSourcePackages, getInstalledPackages ) -import Distribution.Client.FetchUtils - ( isFetched ) - -import Data.List - ( sortBy, groupBy, sort, nub, intersperse, maximumBy, partition ) -import Data.Maybe - ( listToMaybe, fromJust, fromMaybe, isJust, maybeToList ) -import qualified Data.Map as Map -import Data.Tree as Tree -import Control.Monad - ( MonadPlus(mplus), join ) -import Control.Exception - ( assert ) -import Text.PrettyPrint as Disp -import System.Directory - ( doesDirectoryExist ) - - --- | Return a list of packages matching given search strings. -getPkgList :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> ProgramDb - -> ListFlags - -> [String] - -> IO [PackageDisplayInfo] -getPkgList verbosity packageDBs repoCtxt comp progdb listFlags pats = do - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb - sourcePkgDb <- getSourcePackages verbosity repoCtxt - let sourcePkgIndex = packageIndex sourcePkgDb - prefs name = fromMaybe anyVersion - (Map.lookup name (packagePreferences sourcePkgDb)) - - pkgsInfo :: - [(PackageName, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage])] - pkgsInfo - -- gather info for all packages - | null pats = mergePackages - (InstalledPackageIndex.allPackages installedPkgIndex) - ( PackageIndex.allPackages sourcePkgIndex) - - -- gather info for packages matching search term - | otherwise = pkgsInfoMatching - - pkgsInfoMatching :: - [(PackageName, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage])] - pkgsInfoMatching = - let matchingInstalled = matchingPackages - InstalledPackageIndex.searchByNameSubstring - installedPkgIndex - matchingSource = matchingPackages - (\ idx n -> - concatMap snd - (PackageIndex.searchByNameSubstring idx n)) - sourcePkgIndex - in mergePackages matchingInstalled matchingSource - - matches :: [PackageDisplayInfo] - matches = [ mergePackageInfo pref - installedPkgs sourcePkgs selectedPkg False - | (pkgname, installedPkgs, sourcePkgs) <- pkgsInfo - , not onlyInstalled || not (null installedPkgs) - , let pref = prefs pkgname - selectedPkg = latestWithPref pref sourcePkgs ] - return matches - where - onlyInstalled = fromFlag (listInstalled listFlags) - matchingPackages search index = - [ pkg - | pat <- pats - , pkg <- search index pat ] - - --- | Show information about packages. -list :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> ProgramDb - -> ListFlags - -> [String] - -> IO () -list verbosity packageDBs repos comp progdb listFlags pats = do - matches <- getPkgList verbosity packageDBs repos comp progdb listFlags pats - - if simpleOutput - then putStr $ unlines - [ display (pkgName pkg) ++ " " ++ display version - | pkg <- matches - , version <- if onlyInstalled - then installedVersions pkg - else nub . sort $ installedVersions pkg - ++ sourceVersions pkg ] - -- Note: this only works because for 'list', one cannot currently - -- specify any version constraints, so listing all installed - -- and source ones works. - else - if null matches - then notice verbosity "No matches found." - else putStr $ unlines (map showPackageSummaryInfo matches) - where - onlyInstalled = fromFlag (listInstalled listFlags) - simpleOutput = fromFlag (listSimpleOutput listFlags) - -info :: Verbosity - -> PackageDBStack - -> RepoContext - -> Compiler - -> ProgramDb - -> GlobalFlags - -> InfoFlags - -> [UserTarget] - -> IO () -info verbosity _ _ _ _ _ _ [] = - notice verbosity "No packages requested. Nothing to do." - -info verbosity packageDBs repoCtxt comp progdb - globalFlags _listFlags userTargets = do - - installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb - sourcePkgDb <- getSourcePackages verbosity repoCtxt - let sourcePkgIndex = packageIndex sourcePkgDb - prefs name = fromMaybe anyVersion - (Map.lookup name (packagePreferences sourcePkgDb)) - - -- Users may specify names of packages that are only installed, not - -- just available source packages, so we must resolve targets using - -- the combination of installed and source packages. - let sourcePkgs' = PackageIndex.fromList - $ map packageId - (InstalledPackageIndex.allPackages installedPkgIndex) - ++ map packageId - (PackageIndex.allPackages sourcePkgIndex) - pkgSpecifiers <- resolveUserTargets verbosity repoCtxt - (fromFlag $ globalWorldFile globalFlags) - sourcePkgs' userTargets - - pkgsinfo <- sequence - [ do pkginfo <- either (die' verbosity) return $ - gatherPkgInfo prefs - installedPkgIndex sourcePkgIndex - pkgSpecifier - updateFileSystemPackageDetails pkginfo - | pkgSpecifier <- pkgSpecifiers ] - - putStr $ unlines (map showPackageDetailedInfo pkgsinfo) - - where - gatherPkgInfo :: (PackageName -> VersionRange) -> - InstalledPackageIndex -> - PackageIndex.PackageIndex UnresolvedSourcePackage -> - PackageSpecifier UnresolvedSourcePackage -> - Either String PackageDisplayInfo - gatherPkgInfo prefs installedPkgIndex sourcePkgIndex - (NamedPackage name props) - | null (selectedInstalledPkgs) && null (selectedSourcePkgs) - = Left $ "There is no available version of " ++ display name - ++ " that satisfies " - ++ display (simplifyVersionRange verConstraint) - - | otherwise - = Right $ mergePackageInfo pref installedPkgs - sourcePkgs selectedSourcePkg' - showPkgVersion - where - (pref, installedPkgs, sourcePkgs) = - sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex - - selectedInstalledPkgs = InstalledPackageIndex.lookupDependency - installedPkgIndex - (Dependency name verConstraint) - selectedSourcePkgs = PackageIndex.lookupDependency sourcePkgIndex - (Dependency name verConstraint) - selectedSourcePkg' = latestWithPref pref selectedSourcePkgs - - -- display a specific package version if the user - -- supplied a non-trivial version constraint - showPkgVersion = not (null verConstraints) - verConstraint = foldr intersectVersionRanges anyVersion verConstraints - verConstraints = [ vr | PackagePropertyVersion vr <- props ] - - gatherPkgInfo prefs installedPkgIndex sourcePkgIndex - (SpecificSourcePackage pkg) = - Right $ mergePackageInfo pref installedPkgs sourcePkgs - selectedPkg True - where - name = packageName pkg - selectedPkg = Just pkg - (pref, installedPkgs, sourcePkgs) = - sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex - -sourcePkgsInfo :: - (PackageName -> VersionRange) - -> PackageName - -> InstalledPackageIndex - -> PackageIndex.PackageIndex UnresolvedSourcePackage - -> (VersionRange, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage]) -sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex = - (pref, installedPkgs, sourcePkgs) - where - pref = prefs name - installedPkgs = concatMap snd (InstalledPackageIndex.lookupPackageName - installedPkgIndex name) - sourcePkgs = PackageIndex.lookupPackageName sourcePkgIndex name - - --- | The info that we can display for each package. It is information per --- package name and covers all installed and available versions. --- -data PackageDisplayInfo = PackageDisplayInfo { - pkgName :: PackageName, - selectedVersion :: Maybe Version, - selectedSourcePkg :: Maybe UnresolvedSourcePackage, - installedVersions :: [Version], - sourceVersions :: [Version], - preferredVersions :: VersionRange, - homepage :: String, - bugReports :: String, - sourceRepo :: String, - synopsis :: String, - description :: String, - category :: String, - license :: Either SPDX.License License, - author :: String, - maintainer :: String, - dependencies :: [ExtDependency], - flags :: [Flag], - hasLib :: Bool, - hasExe :: Bool, - executables :: [UnqualComponentName], - modules :: [ModuleName], - haddockHtml :: FilePath, - haveTarball :: Bool - } - --- | Covers source dependencies and installed dependencies in --- one type. -data ExtDependency = SourceDependency Dependency - | InstalledDependency UnitId - -showPackageSummaryInfo :: PackageDisplayInfo -> String -showPackageSummaryInfo pkginfo = - renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ - char '*' <+> disp (pkgName pkginfo) - $+$ - (nest 4 $ vcat [ - maybeShow (synopsis pkginfo) "Synopsis:" reflowParagraphs - , text "Default available version:" <+> - case selectedSourcePkg pkginfo of - Nothing -> text "[ Not available from any configured repository ]" - Just pkg -> disp (packageVersion pkg) - , text "Installed versions:" <+> - case installedVersions pkginfo of - [] | hasLib pkginfo -> text "[ Not installed ]" - | otherwise -> text "[ Unknown ]" - versions -> dispTopVersions 4 - (preferredVersions pkginfo) versions - , maybeShow (homepage pkginfo) "Homepage:" text - , text "License: " <+> either pretty pretty (license pkginfo) - ]) - $+$ text "" - where - maybeShow [] _ _ = empty - maybeShow l s f = text s <+> (f l) - -showPackageDetailedInfo :: PackageDisplayInfo -> String -showPackageDetailedInfo pkginfo = - renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ - char '*' <+> disp (pkgName pkginfo) - Disp.<> maybe empty (\v -> char '-' Disp.<> disp v) (selectedVersion pkginfo) - <+> text (replicate (16 - length (display (pkgName pkginfo))) ' ') - Disp.<> parens pkgkind - $+$ - (nest 4 $ vcat [ - entry "Synopsis" synopsis hideIfNull reflowParagraphs - , entry "Versions available" sourceVersions - (altText null "[ Not available from server ]") - (dispTopVersions 9 (preferredVersions pkginfo)) - , entry "Versions installed" installedVersions - (altText null (if hasLib pkginfo then "[ Not installed ]" - else "[ Unknown ]")) - (dispTopVersions 4 (preferredVersions pkginfo)) - , entry "Homepage" homepage orNotSpecified text - , entry "Bug reports" bugReports orNotSpecified text - , entry "Description" description hideIfNull reflowParagraphs - , entry "Category" category hideIfNull text - , entry "License" license alwaysShow (either pretty pretty) - , entry "Author" author hideIfNull reflowLines - , entry "Maintainer" maintainer hideIfNull reflowLines - , entry "Source repo" sourceRepo orNotSpecified text - , entry "Executables" executables hideIfNull (commaSep disp) - , entry "Flags" flags hideIfNull (commaSep dispFlag) - , entry "Dependencies" dependencies hideIfNull (commaSep dispExtDep) - , entry "Documentation" haddockHtml showIfInstalled text - , entry "Cached" haveTarball alwaysShow dispYesNo - , if not (hasLib pkginfo) then empty else - text "Modules:" $+$ nest 4 (vcat (map disp . sort . modules $ pkginfo)) - ]) - $+$ text "" - where - entry fname field cond format = case cond (field pkginfo) of - Nothing -> label <+> format (field pkginfo) - Just Nothing -> empty - Just (Just other) -> label <+> text other - where - label = text fname Disp.<> char ':' Disp.<> padding - padding = text (replicate (13 - length fname ) ' ') - - normal = Nothing - hide = Just Nothing - replace msg = Just (Just msg) - - alwaysShow = const normal - hideIfNull v = if null v then hide else normal - showIfInstalled v - | not isInstalled = hide - | null v = replace "[ Not installed ]" - | otherwise = normal - altText nul msg v = if nul v then replace msg else normal - orNotSpecified = altText null "[ Not specified ]" - - commaSep f = Disp.fsep . Disp.punctuate (Disp.char ',') . map f - dispFlag = text . unFlagName . flagName - dispYesNo True = text "Yes" - dispYesNo False = text "No" - - dispExtDep (SourceDependency dep) = disp dep - dispExtDep (InstalledDependency dep) = disp dep - - isInstalled = not (null (installedVersions pkginfo)) - hasExes = length (executables pkginfo) >= 2 - --TODO: exclude non-buildable exes - pkgkind | hasLib pkginfo && hasExes = text "programs and library" - | hasLib pkginfo && hasExe pkginfo = text "program and library" - | hasLib pkginfo = text "library" - | hasExes = text "programs" - | hasExe pkginfo = text "program" - | otherwise = empty - - -reflowParagraphs :: String -> Doc -reflowParagraphs = - vcat - . intersperse (text "") -- re-insert blank lines - . map (fsep . map text . concatMap words) -- reflow paragraphs - . filter (/= [""]) - . groupBy (\x y -> "" `notElem` [x,y]) -- break on blank lines - . lines - -reflowLines :: String -> Doc -reflowLines = vcat . map text . lines - --- | We get the 'PackageDisplayInfo' by combining the info for the installed --- and available versions of a package. --- --- * We're building info about a various versions of a single named package so --- the input package info records are all supposed to refer to the same --- package name. --- -mergePackageInfo :: VersionRange - -> [Installed.InstalledPackageInfo] - -> [UnresolvedSourcePackage] - -> Maybe UnresolvedSourcePackage - -> Bool - -> PackageDisplayInfo -mergePackageInfo versionPref installedPkgs sourcePkgs selectedPkg showVer = - assert (length installedPkgs + length sourcePkgs > 0) $ - PackageDisplayInfo { - pkgName = combine packageName source - packageName installed, - selectedVersion = if showVer then fmap packageVersion selectedPkg - else Nothing, - selectedSourcePkg = sourceSelected, - installedVersions = map packageVersion installedPkgs, - sourceVersions = map packageVersion sourcePkgs, - preferredVersions = versionPref, - - license = combine Source.licenseRaw source - Installed.license installed, - maintainer = combine Source.maintainer source - Installed.maintainer installed, - author = combine Source.author source - Installed.author installed, - homepage = combine Source.homepage source - Installed.homepage installed, - bugReports = maybe "" Source.bugReports source, - sourceRepo = fromMaybe "" . join - . fmap (uncons Nothing Source.repoLocation - . sortBy (comparing Source.repoKind) - . Source.sourceRepos) - $ source, - --TODO: installed package info is missing synopsis - synopsis = maybe "" Source.synopsis source, - description = combine Source.description source - Installed.description installed, - category = combine Source.category source - Installed.category installed, - flags = maybe [] Source.genPackageFlags sourceGeneric, - hasLib = isJust installed - || maybe False (isJust . Source.condLibrary) sourceGeneric, - hasExe = maybe False (not . null . Source.condExecutables) sourceGeneric, - executables = map fst (maybe [] Source.condExecutables sourceGeneric), - modules = combine (map Installed.exposedName . Installed.exposedModules) - installed - -- NB: only for the PUBLIC library - (concatMap getListOfExposedModules . maybeToList . Source.library) - source, - dependencies = - combine (map (SourceDependency . simplifyDependency) - . Source.allBuildDepends) source - (map InstalledDependency . Installed.depends) installed, - haddockHtml = fromMaybe "" . join - . fmap (listToMaybe . Installed.haddockHTMLs) - $ installed, - haveTarball = False - } - where - combine f x g y = fromJust (fmap f x `mplus` fmap g y) - installed :: Maybe Installed.InstalledPackageInfo - installed = latestWithPref versionPref installedPkgs - - getListOfExposedModules lib = Source.exposedModules lib - ++ map Source.moduleReexportName - (Source.reexportedModules lib) - - sourceSelected - | isJust selectedPkg = selectedPkg - | otherwise = latestWithPref versionPref sourcePkgs - sourceGeneric = fmap packageDescription sourceSelected - source = fmap flattenPackageDescription sourceGeneric - - uncons :: b -> (a -> b) -> [a] -> b - uncons z _ [] = z - uncons _ f (x:_) = f x - - --- | Not all the info is pure. We have to check if the docs really are --- installed, because the registered package info lies. Similarly we have to --- check if the tarball has indeed been fetched. --- -updateFileSystemPackageDetails :: PackageDisplayInfo -> IO PackageDisplayInfo -updateFileSystemPackageDetails pkginfo = do - fetched <- maybe (return False) (isFetched . packageSource) - (selectedSourcePkg pkginfo) - docsExist <- doesDirectoryExist (haddockHtml pkginfo) - return pkginfo { - haveTarball = fetched, - haddockHtml = if docsExist then haddockHtml pkginfo else "" - } - -latestWithPref :: Package pkg => VersionRange -> [pkg] -> Maybe pkg -latestWithPref _ [] = Nothing -latestWithPref pref pkgs = Just (maximumBy (comparing prefThenVersion) pkgs) - where - prefThenVersion pkg = let ver = packageVersion pkg - in (withinRange ver pref, ver) - - --- | Rearrange installed and source packages into groups referring to the --- same package by name. In the result pairs, the lists are guaranteed to not --- both be empty. --- -mergePackages :: [Installed.InstalledPackageInfo] - -> [UnresolvedSourcePackage] - -> [( PackageName - , [Installed.InstalledPackageInfo] - , [UnresolvedSourcePackage] )] -mergePackages installedPkgs sourcePkgs = - map collect - $ mergeBy (\i a -> fst i `compare` fst a) - (groupOn packageName installedPkgs) - (groupOn packageName sourcePkgs) - where - collect (OnlyInLeft (name,is) ) = (name, is, []) - collect ( InBoth (_,is) (name,as)) = (name, is, as) - collect (OnlyInRight (name,as)) = (name, [], as) - -groupOn :: Ord key => (a -> key) -> [a] -> [(key,[a])] -groupOn key = map (\xs -> (key (head xs), xs)) - . groupBy (equating key) - . sortBy (comparing key) - -dispTopVersions :: Int -> VersionRange -> [Version] -> Doc -dispTopVersions n pref vs = - (Disp.fsep . Disp.punctuate (Disp.char ',') - . map (\ver -> if ispref ver then disp ver else parens (disp ver)) - . sort . take n . interestingVersions ispref - $ vs) - <+> trailingMessage - - where - ispref ver = withinRange ver pref - extra = length vs - n - trailingMessage - | extra <= 0 = Disp.empty - | otherwise = Disp.parens $ Disp.text "and" - <+> Disp.int (length vs - n) - <+> if extra == 1 then Disp.text "other" - else Disp.text "others" - --- | Reorder a bunch of versions to put the most interesting / significant --- versions first. A preferred version range is taken into account. --- --- This may be used in a user interface to select a small number of versions --- to present to the user, e.g. --- --- > let selectVersions = sort . take 5 . interestingVersions pref --- -interestingVersions :: (Version -> Bool) -> [Version] -> [Version] -interestingVersions pref = - map (mkVersion . fst) . filter snd - . concat . Tree.levels - . swizzleTree - . reorderTree (\(Node (v,_) _) -> pref (mkVersion v)) - . reverseTree - . mkTree - . map versionNumbers - - where - swizzleTree = unfoldTree (spine []) - where - spine ts' (Node x []) = (x, ts') - spine ts' (Node x (t:ts)) = spine (Node x ts:ts') t - - reorderTree _ (Node x []) = Node x [] - reorderTree p (Node x ts) = Node x (ts' ++ ts'') - where - (ts',ts'') = partition p (map (reorderTree p) ts) - - reverseTree (Node x cs) = Node x (reverse (map reverseTree cs)) - - mkTree xs = unfoldTree step (False, [], xs) - where - step (node,ns,vs) = - ( (reverse ns, node) - , [ (any null vs', n:ns, filter (not . null) vs') - | (n, vs') <- groups vs ] - ) - groups = map (\g -> (head (head g), map tail g)) - . groupBy (equating head) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Manpage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Manpage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Manpage.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Manpage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,171 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Manpage --- Copyright : (c) Maciek Makowski 2015 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Functions for building the manual page. - -module Distribution.Client.Manpage - ( -- * Manual page generation - manpage - ) where - -import Distribution.Simple.Command -import Distribution.Client.Setup (globalCommand) - -import Data.Char (toUpper) -import Data.List (intercalate) - -data FileInfo = FileInfo String String -- ^ path, description - --- | A list of files that should be documented in the manual page. -files :: [FileInfo] -files = - [ (FileInfo "~/.cabal/config" "The defaults that can be overridden with command-line options.") - , (FileInfo "~/.cabal/world" "A list of all packages whose installation has been explicitly requested.") - ] - --- | Produces a manual page with @troff@ markup. -manpage :: String -> [CommandSpec a] -> String -manpage pname commands = unlines $ - [ ".TH " ++ map toUpper pname ++ " 1" - , ".SH NAME" - , pname ++ " \\- a system for building and packaging Haskell libraries and programs" - , ".SH SYNOPSIS" - , ".B " ++ pname - , ".I command" - , ".RI < arguments |[ options ]>..." - , "" - , "Where the" - , ".I commands" - , "are" - , "" - ] ++ - concatMap (commandSynopsisLines pname) commands ++ - [ ".SH DESCRIPTION" - , "Cabal is the standard package system for Haskell software. It helps people to configure, " - , "build and install Haskell software and to distribute it easily to other users and developers." - , "" - , "The command line " ++ pname ++ " tool (also referred to as cabal-install) helps with " - , "installing existing packages and developing new packages. " - , "It can be used to work with local packages or to install packages from online package archives, " - , "including automatically installing dependencies. By default it is configured to use Hackage, " - , "which is Haskell's central package archive that contains thousands of libraries and applications " - , "in the Cabal package format." - , ".SH OPTIONS" - , "Global options:" - , "" - ] ++ - optionsLines (globalCommand []) ++ - [ ".SH COMMANDS" - ] ++ - concatMap (commandDetailsLines pname) commands ++ - [ ".SH FILES" - ] ++ - concatMap fileLines files ++ - [ ".SH BUGS" - , "To browse the list of known issues or report a new one please see " - , "https://github.com/haskell/cabal/labels/cabal-install." - ] - -commandSynopsisLines :: String -> CommandSpec action -> [String] -commandSynopsisLines pname (CommandSpec ui _ NormalCommand) = - [ ".B " ++ pname ++ " " ++ (commandName ui) - , ".R - " ++ commandSynopsis ui - , ".br" - ] -commandSynopsisLines _ (CommandSpec _ _ HiddenCommand) = [] - -commandDetailsLines :: String -> CommandSpec action -> [String] -commandDetailsLines pname (CommandSpec ui _ NormalCommand) = - [ ".B " ++ pname ++ " " ++ (commandName ui) - , "" - , commandUsage ui pname - , "" - ] ++ - optional commandDescription ++ - optional commandNotes ++ - [ "Flags:" - , ".RS" - ] ++ - optionsLines ui ++ - [ ".RE" - , "" - ] - where - optional field = - case field ui of - Just text -> [text pname, ""] - Nothing -> [] -commandDetailsLines _ (CommandSpec _ _ HiddenCommand) = [] - -optionsLines :: CommandUI flags -> [String] -optionsLines command = concatMap optionLines (concatMap optionDescr (commandOptions command ParseArgs)) - -data ArgumentRequired = Optional | Required -type OptionArg = (ArgumentRequired, ArgPlaceHolder) - -optionLines :: OptDescr flags -> [String] -optionLines (ReqArg description (optionChars, optionStrings) placeHolder _ _) = - argOptionLines description optionChars optionStrings (Required, placeHolder) -optionLines (OptArg description (optionChars, optionStrings) placeHolder _ _ _) = - argOptionLines description optionChars optionStrings (Optional, placeHolder) -optionLines (BoolOpt description (trueChars, trueStrings) (falseChars, falseStrings) _ _) = - optionLinesIfPresent trueChars trueStrings ++ - optionLinesIfPresent falseChars falseStrings ++ - optionDescriptionLines description -optionLines (ChoiceOpt options) = - concatMap choiceLines options - where - choiceLines (description, (optionChars, optionStrings), _, _) = - [ optionsLine optionChars optionStrings ] ++ - optionDescriptionLines description - -argOptionLines :: String -> [Char] -> [String] -> OptionArg -> [String] -argOptionLines description optionChars optionStrings arg = - [ optionsLine optionChars optionStrings - , optionArgLine arg - ] ++ - optionDescriptionLines description - -optionLinesIfPresent :: [Char] -> [String] -> [String] -optionLinesIfPresent optionChars optionStrings = - if null optionChars && null optionStrings then [] - else [ optionsLine optionChars optionStrings, ".br" ] - -optionDescriptionLines :: String -> [String] -optionDescriptionLines description = - [ ".RS" - , description - , ".RE" - , "" - ] - -optionsLine :: [Char] -> [String] -> String -optionsLine optionChars optionStrings = - intercalate ", " (shortOptions optionChars ++ longOptions optionStrings) - -shortOptions :: [Char] -> [String] -shortOptions = map (\c -> "\\-" ++ [c]) - -longOptions :: [String] -> [String] -longOptions = map (\s -> "\\-\\-" ++ s) - -optionArgLine :: OptionArg -> String -optionArgLine (Required, placeHolder) = ".I " ++ placeHolder -optionArgLine (Optional, placeHolder) = ".RI [ " ++ placeHolder ++ " ]" - -fileLines :: FileInfo -> [String] -fileLines (FileInfo path description) = - [ path - , ".RS" - , description - , ".RE" - , "" - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Nix.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Nix.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Nix.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Nix.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,202 +0,0 @@ -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE ViewPatterns #-} - -module Distribution.Client.Nix - ( findNixExpr - , inNixShell - , nixInstantiate - , nixShell - , nixShellIfSandboxed - ) where - -import Distribution.Client.Compat.Prelude - -import Control.Exception (bracket, catch) -import System.Directory - ( canonicalizePath, createDirectoryIfMissing, doesDirectoryExist - , doesFileExist, removeDirectoryRecursive, removeFile ) -import System.Environment (getArgs, getExecutablePath) -import System.FilePath - ( (), replaceExtension, takeDirectory, takeFileName ) -import System.IO (IOMode(..), hClose, openFile) -import System.IO.Error (isDoesNotExistError) -import System.Process (showCommandForUser) - -import Distribution.Compat.Environment - ( lookupEnv, setEnv, unsetEnv ) - -import Distribution.Verbosity - -import Distribution.Simple.Program - ( Program(..), ProgramDb - , addKnownProgram, configureProgram, emptyProgramDb, getDbProgramOutput - , runDbProgram, simpleProgram ) -import Distribution.Simple.Setup (fromFlagOrDefault) -import Distribution.Simple.Utils (debug, existsAndIsMoreRecentThan) - -import Distribution.Client.Config (SavedConfig(..)) -import Distribution.Client.GlobalFlags (GlobalFlags(..)) -import Distribution.Client.Sandbox.Types (UseSandbox(..)) - - -configureOneProgram :: Verbosity -> Program -> IO ProgramDb -configureOneProgram verb prog = - configureProgram verb prog (addKnownProgram prog emptyProgramDb) - - -touchFile :: FilePath -> IO () -touchFile path = do - catch (removeFile path) (\e -> when (isDoesNotExistError e) (return ())) - createDirectoryIfMissing True (takeDirectory path) - openFile path WriteMode >>= hClose - - -findNixExpr :: GlobalFlags -> SavedConfig -> IO (Maybe FilePath) -findNixExpr globalFlags config = do - -- criteria for deciding to run nix-shell - let nixEnabled = - fromFlagOrDefault False - (globalNix (savedGlobalFlags config) <> globalNix globalFlags) - - if nixEnabled - then do - let exprPaths = [ "shell.nix", "default.nix" ] - filterM doesFileExist exprPaths >>= \case - [] -> return Nothing - (path : _) -> return (Just path) - else return Nothing - - --- set IN_NIX_SHELL so that builtins.getEnv in Nix works as in nix-shell -inFakeNixShell :: IO a -> IO a -inFakeNixShell f = - bracket (fakeEnv "IN_NIX_SHELL" "1") (resetEnv "IN_NIX_SHELL") (\_ -> f) - where - fakeEnv var new = do - old <- lookupEnv var - setEnv var new - return old - resetEnv var = maybe (unsetEnv var) (setEnv var) - - -nixInstantiate - :: Verbosity - -> FilePath - -> Bool - -> GlobalFlags - -> SavedConfig - -> IO () -nixInstantiate verb dist force globalFlags config = - findNixExpr globalFlags config >>= \case - Nothing -> return () - Just shellNix -> do - alreadyInShell <- inNixShell - shellDrv <- drvPath dist shellNix - instantiated <- doesFileExist shellDrv - -- an extra timestamp file is necessary because the derivation lives in - -- the store so its mtime is always 1. - let timestamp = timestampPath dist shellNix - upToDate <- existsAndIsMoreRecentThan timestamp shellNix - - let ready = alreadyInShell || (instantiated && upToDate && not force) - unless ready $ do - - let prog = simpleProgram "nix-instantiate" - progdb <- configureOneProgram verb prog - - removeGCRoots verb dist - touchFile timestamp - - _ <- inFakeNixShell - (getDbProgramOutput verb prog progdb - [ "--add-root", shellDrv, "--indirect", shellNix ]) - return () - - -nixShell - :: Verbosity - -> FilePath - -> GlobalFlags - -> SavedConfig - -> IO () - -- ^ The action to perform inside a nix-shell. This is also the action - -- that will be performed immediately if Nix is disabled. - -> IO () -nixShell verb dist globalFlags config go = do - - alreadyInShell <- inNixShell - - if alreadyInShell - then go - else do - findNixExpr globalFlags config >>= \case - Nothing -> go - Just shellNix -> do - - let prog = simpleProgram "nix-shell" - progdb <- configureOneProgram verb prog - - cabal <- getExecutablePath - - -- alreadyInShell == True in child process - setEnv "CABAL_IN_NIX_SHELL" "1" - - -- Run cabal with the same arguments inside nix-shell. - -- When the child process reaches the top of nixShell, it will - -- detect that it is running inside the shell and fall back - -- automatically. - shellDrv <- drvPath dist shellNix - args <- getArgs - runDbProgram verb prog progdb - [ "--add-root", gcrootPath dist "result", "--indirect", shellDrv - , "--run", showCommandForUser cabal args - ] - - -drvPath :: FilePath -> FilePath -> IO FilePath -drvPath dist path = do - -- We do not actually care about canonicity, but makeAbsolute is only - -- available in newer versions of directory. - -- We expect the path to be a symlink if it exists, so we do not canonicalize - -- the entire path because that would dereference the symlink. - distNix <- canonicalizePath (dist "nix") - -- Nix garbage collector roots must be absolute paths - return (distNix replaceExtension (takeFileName path) "drv") - - -timestampPath :: FilePath -> FilePath -> FilePath -timestampPath dist path = - dist "nix" replaceExtension (takeFileName path) "drv.timestamp" - - -gcrootPath :: FilePath -> FilePath -gcrootPath dist = dist "nix" "gcroots" - - -inNixShell :: IO Bool -inNixShell = isJust <$> lookupEnv "CABAL_IN_NIX_SHELL" - - -removeGCRoots :: Verbosity -> FilePath -> IO () -removeGCRoots verb dist = do - let tgt = gcrootPath dist - exists <- doesDirectoryExist tgt - when exists $ do - debug verb ("removing Nix gcroots from " ++ tgt) - removeDirectoryRecursive tgt - - -nixShellIfSandboxed - :: Verbosity - -> FilePath - -> GlobalFlags - -> SavedConfig - -> UseSandbox - -> IO () - -- ^ The action to perform inside a nix-shell. This is also the action - -- that will be performed immediately if Nix is disabled. - -> IO () -nixShellIfSandboxed verb dist globalFlags config useSandbox go = - case useSandbox of - NoSandbox -> go - UseSandbox _ -> nixShell verb dist globalFlags config go diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Outdated.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Outdated.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Outdated.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Outdated.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,211 +0,0 @@ -{-# LANGUAGE CPP #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Outdated --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Implementation of the 'outdated' command. Checks for outdated --- dependencies in the package description file or freeze file. ------------------------------------------------------------------------------ - -module Distribution.Client.Outdated ( outdated - , ListOutdatedSettings(..), listOutdated ) -where - -import Prelude () -import Distribution.Client.Config -import Distribution.Client.IndexUtils as IndexUtils -import Distribution.Client.Compat.Prelude -import Distribution.Client.ProjectConfig -import Distribution.Client.DistDirLayout -import Distribution.Client.RebuildMonad -import Distribution.Client.Setup hiding (quiet) -import Distribution.Client.Targets -import Distribution.Client.Types -import Distribution.Solver.Types.PackageConstraint -import Distribution.Solver.Types.PackageIndex -import Distribution.Client.Sandbox.PackageEnvironment - -import Distribution.Package (PackageName, packageVersion) -import Distribution.PackageDescription (allBuildDepends) -import Distribution.PackageDescription.Configuration (finalizePD) -import Distribution.Simple.Compiler (Compiler, compilerInfo) -import Distribution.Simple.Setup - (fromFlagOrDefault, flagToMaybe) -import Distribution.Simple.Utils - (die', notice, debug, tryFindPackageDesc) -import Distribution.System (Platform) -import Distribution.Text (display) -import Distribution.Types.ComponentRequestedSpec - (ComponentRequestedSpec(..)) -import Distribution.Types.Dependency - (Dependency(..), depPkgName, simplifyDependency) -import Distribution.Verbosity (Verbosity, silent) -import Distribution.Version - (Version, LowerBound(..), UpperBound(..) - ,asVersionIntervals, majorBoundVersion) -import Distribution.PackageDescription.Parsec - (readGenericPackageDescription) - -import qualified Data.Set as S -import System.Directory (getCurrentDirectory) -import System.Exit (exitFailure) -import Control.Exception (throwIO) - --- | Entry point for the 'outdated' command. -outdated :: Verbosity -> OutdatedFlags -> RepoContext - -> Compiler -> Platform - -> IO () -outdated verbosity0 outdatedFlags repoContext comp platform = do - let freezeFile = fromFlagOrDefault False (outdatedFreezeFile outdatedFlags) - newFreezeFile = fromFlagOrDefault False - (outdatedNewFreezeFile outdatedFlags) - mprojectFile = flagToMaybe - (outdatedProjectFile outdatedFlags) - simpleOutput = fromFlagOrDefault False - (outdatedSimpleOutput outdatedFlags) - quiet = fromFlagOrDefault False (outdatedQuiet outdatedFlags) - exitCode = fromFlagOrDefault quiet (outdatedExitCode outdatedFlags) - ignorePred = let ignoreSet = S.fromList (outdatedIgnore outdatedFlags) - in \pkgname -> pkgname `S.member` ignoreSet - minorPred = case outdatedMinor outdatedFlags of - Nothing -> const False - Just IgnoreMajorVersionBumpsNone -> const False - Just IgnoreMajorVersionBumpsAll -> const True - Just (IgnoreMajorVersionBumpsSome pkgs) -> - let minorSet = S.fromList pkgs - in \pkgname -> pkgname `S.member` minorSet - verbosity = if quiet then silent else verbosity0 - - when (not newFreezeFile && isJust mprojectFile) $ - die' verbosity $ - "--project-file must only be used with --new-freeze-file." - - sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoContext - let pkgIndex = packageIndex sourcePkgDb - deps <- if freezeFile - then depsFromFreezeFile verbosity - else if newFreezeFile - then depsFromNewFreezeFile verbosity mprojectFile - else depsFromPkgDesc verbosity comp platform - debug verbosity $ "Dependencies loaded: " - ++ (intercalate ", " $ map display deps) - let outdatedDeps = listOutdated deps pkgIndex - (ListOutdatedSettings ignorePred minorPred) - when (not quiet) $ - showResult verbosity outdatedDeps simpleOutput - if (exitCode && (not . null $ outdatedDeps)) - then exitFailure - else return () - --- | Print either the list of all outdated dependencies, or a message --- that there are none. -showResult :: Verbosity -> [(Dependency,Version)] -> Bool -> IO () -showResult verbosity outdatedDeps simpleOutput = - if (not . null $ outdatedDeps) - then - do when (not simpleOutput) $ - notice verbosity "Outdated dependencies:" - for_ outdatedDeps $ \(d@(Dependency pn _), v) -> - let outdatedDep = if simpleOutput then display pn - else display d ++ " (latest: " ++ display v ++ ")" - in notice verbosity outdatedDep - else notice verbosity "All dependencies are up to date." - --- | Convert a list of 'UserConstraint's to a 'Dependency' list. -userConstraintsToDependencies :: [UserConstraint] -> [Dependency] -userConstraintsToDependencies ucnstrs = - mapMaybe (packageConstraintToDependency . userToPackageConstraint) ucnstrs - --- | Read the list of dependencies from the freeze file. -depsFromFreezeFile :: Verbosity -> IO [Dependency] -depsFromFreezeFile verbosity = do - cwd <- getCurrentDirectory - userConfig <- loadUserConfig verbosity cwd Nothing - let ucnstrs = map fst . configExConstraints . savedConfigureExFlags $ - userConfig - deps = userConstraintsToDependencies ucnstrs - debug verbosity "Reading the list of dependencies from the freeze file" - return deps - --- | Read the list of dependencies from the new-style freeze file. -depsFromNewFreezeFile :: Verbosity -> Maybe FilePath -> IO [Dependency] -depsFromNewFreezeFile verbosity mprojectFile = do - projectRoot <- either throwIO return =<< - findProjectRoot Nothing mprojectFile - let distDirLayout = defaultDistDirLayout projectRoot - {- TODO: Support dist dir override -} Nothing - projectConfig <- runRebuild (distProjectRootDirectory distDirLayout) $ - readProjectLocalFreezeConfig verbosity distDirLayout - let ucnstrs = map fst . projectConfigConstraints . projectConfigShared - $ projectConfig - deps = userConstraintsToDependencies ucnstrs - debug verbosity $ - "Reading the list of dependencies from the new-style freeze file " ++ distProjectFile distDirLayout "freeze" - return deps - --- | Read the list of dependencies from the package description. -depsFromPkgDesc :: Verbosity -> Compiler -> Platform -> IO [Dependency] -depsFromPkgDesc verbosity comp platform = do - cwd <- getCurrentDirectory - path <- tryFindPackageDesc cwd - gpd <- readGenericPackageDescription verbosity path - let cinfo = compilerInfo comp - epd = finalizePD mempty (ComponentRequestedSpec True True) - (const True) platform cinfo [] gpd - case epd of - Left _ -> die' verbosity "finalizePD failed" - Right (pd, _) -> do - let bd = allBuildDepends pd - debug verbosity - "Reading the list of dependencies from the package description" - return bd - --- | Various knobs for customising the behaviour of 'listOutdated'. -data ListOutdatedSettings = ListOutdatedSettings { - -- | Should this package be ignored? - listOutdatedIgnorePred :: PackageName -> Bool, - -- | Should major version bumps should be ignored for this package? - listOutdatedMinorPred :: PackageName -> Bool - } - --- | Find all outdated dependencies. -listOutdated :: [Dependency] - -> PackageIndex UnresolvedSourcePackage - -> ListOutdatedSettings - -> [(Dependency, Version)] -listOutdated deps pkgIndex (ListOutdatedSettings ignorePred minorPred) = - mapMaybe isOutdated $ map simplifyDependency deps - where - isOutdated :: Dependency -> Maybe (Dependency, Version) - isOutdated dep - | ignorePred (depPkgName dep) = Nothing - | otherwise = - let this = map packageVersion $ lookupDependency pkgIndex dep - latest = lookupLatest dep - in (\v -> (dep, v)) `fmap` isOutdated' this latest - - isOutdated' :: [Version] -> [Version] -> Maybe Version - isOutdated' [] _ = Nothing - isOutdated' _ [] = Nothing - isOutdated' this latest = - let this' = maximum this - latest' = maximum latest - in if this' < latest' then Just latest' else Nothing - - lookupLatest :: Dependency -> [Version] - lookupLatest dep - | minorPred (depPkgName dep) = - map packageVersion $ lookupDependency pkgIndex (relaxMinor dep) - | otherwise = - map packageVersion $ lookupPackageName pkgIndex (depPkgName dep) - - relaxMinor :: Dependency -> Dependency - relaxMinor (Dependency pn vr) = (Dependency pn vr') - where - vr' = let vis = asVersionIntervals vr - (LowerBound v0 _,upper) = last vis - in case upper of - NoUpperBound -> vr - UpperBound _v1 _ -> majorBoundVersion v0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/PackageHash.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/PackageHash.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/PackageHash.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/PackageHash.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,385 +0,0 @@ -{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} -{-# LANGUAGE DeriveDataTypeable, DeriveGeneric #-} - --- | Functions to calculate nix-style hashes for package ids. --- --- The basic idea is simple, hash the combination of: --- --- * the package tarball --- * the ids of all the direct dependencies --- * other local configuration (flags, profiling, etc) --- -module Distribution.Client.PackageHash ( - -- * Calculating package hashes - PackageHashInputs(..), - PackageHashConfigInputs(..), - PackageSourceHash, - hashedInstalledPackageId, - hashPackageHashInputs, - renderPackageHashInputs, - -- ** Platform-specific variations - hashedInstalledPackageIdLong, - hashedInstalledPackageIdShort, - - -- * Low level hash choice - HashValue, - hashValue, - showHashValue, - readFileHashValue, - hashFromTUF, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Package - ( PackageId, PackageIdentifier(..), mkComponentId - , PkgconfigName ) -import Distribution.System - ( Platform, OS(Windows, OSX), buildOS ) -import Distribution.PackageDescription - ( FlagAssignment, unFlagAssignment, showFlagValue ) -import Distribution.Simple.Compiler - ( CompilerId, OptimisationLevel(..), DebugInfoLevel(..) - , ProfDetailLevel(..), showProfDetailLevel ) -import Distribution.Simple.InstallDirs - ( PathTemplate, fromPathTemplate ) -import Distribution.Text - ( display ) -import Distribution.Version -import Distribution.Client.Types - ( InstalledPackageId ) -import qualified Distribution.Solver.Types.ComponentDeps as CD - -import qualified Hackage.Security.Client as Sec - -import qualified Crypto.Hash.SHA256 as SHA256 -import qualified Data.ByteString.Base16 as Base16 -import qualified Data.ByteString.Char8 as BS -import qualified Data.ByteString.Lazy.Char8 as LBS -import qualified Data.Map as Map -import qualified Data.Set as Set -import Data.Set (Set) - -import Data.Function (on) -import Control.Exception (evaluate) -import System.IO (withBinaryFile, IOMode(..)) - - -------------------------------- --- Calculating package hashes --- - --- | Calculate a 'InstalledPackageId' for a package using our nix-style --- inputs hashing method. --- --- Note that due to path length limitations on Windows, this function uses --- a different method on Windows that produces shorted package ids. --- See 'hashedInstalledPackageIdLong' vs 'hashedInstalledPackageIdShort'. --- -hashedInstalledPackageId :: PackageHashInputs -> InstalledPackageId -hashedInstalledPackageId - | buildOS == Windows = hashedInstalledPackageIdShort - | buildOS == OSX = hashedInstalledPackageIdVeryShort - | otherwise = hashedInstalledPackageIdLong - --- | Calculate a 'InstalledPackageId' for a package using our nix-style --- inputs hashing method. --- --- This produces large ids with big hashes. It is only suitable for systems --- without significant path length limitations (ie not Windows). --- -hashedInstalledPackageIdLong :: PackageHashInputs -> InstalledPackageId -hashedInstalledPackageIdLong pkghashinputs@PackageHashInputs{pkgHashPkgId} = - mkComponentId $ - display pkgHashPkgId -- to be a bit user friendly - ++ "-" - ++ showHashValue (hashPackageHashInputs pkghashinputs) - --- | On Windows we have serious problems with path lengths. Windows imposes a --- maximum path length of 260 chars, and even if we can use the windows long --- path APIs ourselves, we cannot guarantee that ghc, gcc, ld, ar, etc etc all --- do so too. --- --- So our only choice is to limit the lengths of the paths, and the only real --- way to do that is to limit the size of the 'InstalledPackageId's that we --- generate. We do this by truncating the package names and versions and also --- by truncating the hash sizes. --- --- Truncating the package names and versions is technically ok because they are --- just included for human convenience, the full source package id is included --- in the hash. --- --- Truncating the hash size is disappointing but also technically ok. We --- rely on the hash primarily for collision avoidance not for any security --- properties (at least for now). --- -hashedInstalledPackageIdShort :: PackageHashInputs -> InstalledPackageId -hashedInstalledPackageIdShort pkghashinputs@PackageHashInputs{pkgHashPkgId} = - mkComponentId $ - intercalate "-" - -- max length now 64 - [ truncateStr 14 (display name) - , truncateStr 8 (display version) - , showHashValue (truncateHash (hashPackageHashInputs pkghashinputs)) - ] - where - PackageIdentifier name version = pkgHashPkgId - - -- Truncate a 32 byte SHA256 hash to 160bits, 20 bytes :-( - -- It'll render as 40 hex chars. - truncateHash (HashValue h) = HashValue (BS.take 20 h) - - -- Truncate a string, with a visual indication that it is truncated. - truncateStr n s | length s <= n = s - | otherwise = take (n-1) s ++ "_" - --- | On macOS we shorten the name very aggressively. The mach-o linker on --- macOS has a limited load command size, to which the name of the library --- as well as its relative path (\@rpath) entry count. To circumvent this, --- on macOS the libraries are not stored as --- @store//libHS.dylib@ --- where libraryname contains the libraries name, version and abi hash, but in --- @store/lib/libHS.dylib@ --- where the very short library name drops all vowels from the package name, --- and truncates the hash to 4 bytes. --- --- We therefore we only need one \@rpath entry to @store/lib@ instead of one --- \@rpath entry for each library. And the reduced library name saves some --- additional space. --- --- This however has two major drawbacks: --- 1) Packages can collide more easily due to the shortened hash. --- 2) The libraries are *not* prefix relocatable anymore as they all end up --- in the same @store/lib@ folder. --- --- The ultimate solution would have to include generating proxy dynamic --- libraries on macOS, such that the proxy libraries and the linked libraries --- stay under the load command limit, and the recursive linker is still able --- to link all of them. -hashedInstalledPackageIdVeryShort :: PackageHashInputs -> InstalledPackageId -hashedInstalledPackageIdVeryShort pkghashinputs@PackageHashInputs{pkgHashPkgId} = - mkComponentId $ - intercalate "-" - [ filter (not . flip elem "aeiou") (display name) - , display version - , showHashValue (truncateHash (hashPackageHashInputs pkghashinputs)) - ] - where - PackageIdentifier name version = pkgHashPkgId - truncateHash (HashValue h) = HashValue (BS.take 4 h) - --- | All the information that contribues to a package's hash, and thus its --- 'InstalledPackageId'. --- -data PackageHashInputs = PackageHashInputs { - pkgHashPkgId :: PackageId, - pkgHashComponent :: Maybe CD.Component, - pkgHashSourceHash :: PackageSourceHash, - pkgHashPkgConfigDeps :: Set (PkgconfigName, Maybe Version), - pkgHashDirectDeps :: Set InstalledPackageId, - pkgHashOtherConfig :: PackageHashConfigInputs - } - -type PackageSourceHash = HashValue - --- | Those parts of the package configuration that contribute to the --- package hash. --- -data PackageHashConfigInputs = PackageHashConfigInputs { - pkgHashCompilerId :: CompilerId, - pkgHashPlatform :: Platform, - pkgHashFlagAssignment :: FlagAssignment, -- complete not partial - pkgHashConfigureScriptArgs :: [String], -- just ./configure for build-type Configure - pkgHashVanillaLib :: Bool, - pkgHashSharedLib :: Bool, - pkgHashDynExe :: Bool, - pkgHashGHCiLib :: Bool, - pkgHashProfLib :: Bool, - pkgHashProfExe :: Bool, - pkgHashProfLibDetail :: ProfDetailLevel, - pkgHashProfExeDetail :: ProfDetailLevel, - pkgHashCoverage :: Bool, - pkgHashOptimization :: OptimisationLevel, - pkgHashSplitObjs :: Bool, - pkgHashSplitSections :: Bool, - pkgHashStripLibs :: Bool, - pkgHashStripExes :: Bool, - pkgHashDebugInfo :: DebugInfoLevel, - pkgHashProgramArgs :: Map String [String], - pkgHashExtraLibDirs :: [FilePath], - pkgHashExtraFrameworkDirs :: [FilePath], - pkgHashExtraIncludeDirs :: [FilePath], - pkgHashProgPrefix :: Maybe PathTemplate, - pkgHashProgSuffix :: Maybe PathTemplate, - - -- Haddock options - pkgHashDocumentation :: Bool, - pkgHashHaddockHoogle :: Bool, - pkgHashHaddockHtml :: Bool, - pkgHashHaddockHtmlLocation :: Maybe String, - pkgHashHaddockForeignLibs :: Bool, - pkgHashHaddockExecutables :: Bool, - pkgHashHaddockTestSuites :: Bool, - pkgHashHaddockBenchmarks :: Bool, - pkgHashHaddockInternal :: Bool, - pkgHashHaddockCss :: Maybe FilePath, - pkgHashHaddockLinkedSource :: Bool, - pkgHashHaddockQuickJump :: Bool, - pkgHashHaddockContents :: Maybe PathTemplate - --- TODO: [required eventually] pkgHashToolsVersions ? --- TODO: [required eventually] pkgHashToolsExtraOptions ? - } - deriving Show - - --- | Calculate the overall hash to be used for an 'InstalledPackageId'. --- -hashPackageHashInputs :: PackageHashInputs -> HashValue -hashPackageHashInputs = hashValue . renderPackageHashInputs - --- | Render a textual representation of the 'PackageHashInputs'. --- --- The 'hashValue' of this text is the overall package hash. --- -renderPackageHashInputs :: PackageHashInputs -> LBS.ByteString -renderPackageHashInputs PackageHashInputs{ - pkgHashPkgId, - pkgHashComponent, - pkgHashSourceHash, - pkgHashDirectDeps, - pkgHashPkgConfigDeps, - pkgHashOtherConfig = - PackageHashConfigInputs{..} - } = - -- The purpose of this somewhat laboured rendering (e.g. why not just - -- use show?) is so that existing package hashes do not change - -- unnecessarily when new configuration inputs are added into the hash. - - -- In particular, the assumption is that when a new configuration input - -- is included into the hash, that existing packages will typically get - -- the default value for that feature. So if we avoid adding entries with - -- the default value then most of the time adding new features will not - -- change the hashes of existing packages and so fewer packages will need - -- to be rebuilt. - - --TODO: [nice to have] ultimately we probably want to put this config info - -- into the ghc-pkg db. At that point this should probably be changed to - -- use the config file infrastructure so it can be read back in again. - LBS.pack $ unlines $ catMaybes $ - [ entry "pkgid" display pkgHashPkgId - , mentry "component" show pkgHashComponent - , entry "src" showHashValue pkgHashSourceHash - , entry "pkg-config-deps" - (intercalate ", " . map (\(pn, mb_v) -> display pn ++ - case mb_v of - Nothing -> "" - Just v -> " " ++ display v) - . Set.toList) pkgHashPkgConfigDeps - , entry "deps" (intercalate ", " . map display - . Set.toList) pkgHashDirectDeps - -- and then all the config - , entry "compilerid" display pkgHashCompilerId - , entry "platform" display pkgHashPlatform - , opt "flags" mempty showFlagAssignment pkgHashFlagAssignment - , opt "configure-script" [] unwords pkgHashConfigureScriptArgs - , opt "vanilla-lib" True display pkgHashVanillaLib - , opt "shared-lib" False display pkgHashSharedLib - , opt "dynamic-exe" False display pkgHashDynExe - , opt "ghci-lib" False display pkgHashGHCiLib - , opt "prof-lib" False display pkgHashProfLib - , opt "prof-exe" False display pkgHashProfExe - , opt "prof-lib-detail" ProfDetailDefault showProfDetailLevel pkgHashProfLibDetail - , opt "prof-exe-detail" ProfDetailDefault showProfDetailLevel pkgHashProfExeDetail - , opt "hpc" False display pkgHashCoverage - , opt "optimisation" NormalOptimisation (show . fromEnum) pkgHashOptimization - , opt "split-objs" False display pkgHashSplitObjs - , opt "split-sections" False display pkgHashSplitSections - , opt "stripped-lib" False display pkgHashStripLibs - , opt "stripped-exe" True display pkgHashStripExes - , opt "debug-info" NormalDebugInfo (show . fromEnum) pkgHashDebugInfo - , opt "extra-lib-dirs" [] unwords pkgHashExtraLibDirs - , opt "extra-framework-dirs" [] unwords pkgHashExtraFrameworkDirs - , opt "extra-include-dirs" [] unwords pkgHashExtraIncludeDirs - , opt "prog-prefix" Nothing (maybe "" fromPathTemplate) pkgHashProgPrefix - , opt "prog-suffix" Nothing (maybe "" fromPathTemplate) pkgHashProgSuffix - - , opt "documentation" False display pkgHashDocumentation - , opt "haddock-hoogle" False display pkgHashHaddockHoogle - , opt "haddock-html" False display pkgHashHaddockHtml - , opt "haddock-html-location" Nothing (fromMaybe "") pkgHashHaddockHtmlLocation - , opt "haddock-foreign-libraries" False display pkgHashHaddockForeignLibs - , opt "haddock-executables" False display pkgHashHaddockExecutables - , opt "haddock-tests" False display pkgHashHaddockTestSuites - , opt "haddock-benchmarks" False display pkgHashHaddockBenchmarks - , opt "haddock-internal" False display pkgHashHaddockInternal - , opt "haddock-css" Nothing (fromMaybe "") pkgHashHaddockCss - , opt "haddock-hyperlink-source" False display pkgHashHaddockLinkedSource - , opt "haddock-quickjump" False display pkgHashHaddockQuickJump - , opt "haddock-contents-location" Nothing (maybe "" fromPathTemplate) pkgHashHaddockContents - - ] ++ Map.foldrWithKey (\prog args acc -> opt (prog ++ "-options") [] unwords args : acc) [] pkgHashProgramArgs - where - entry key format value = Just (key ++ ": " ++ format value) - mentry key format value = fmap (\v -> key ++ ": " ++ format v) value - opt key def format value - | value == def = Nothing - | otherwise = entry key format value - - showFlagAssignment = unwords . map showFlagValue . sortBy (compare `on` fst) . unFlagAssignment - ------------------------------------------------ --- The specific choice of hash implementation --- - --- Is a crypto hash necessary here? One thing to consider is who controls the --- inputs and what's the result of a hash collision. Obviously we should not --- install packages we don't trust because they can run all sorts of code, but --- if I've checked there's no TH, no custom Setup etc, is there still a --- problem? If someone provided us a tarball that hashed to the same value as --- some other package and we installed it, we could end up re-using that --- installed package in place of another one we wanted. So yes, in general --- there is some value in preventing intentional hash collisions in installed --- package ids. - -newtype HashValue = HashValue BS.ByteString - deriving (Eq, Generic, Show, Typeable) - -instance Binary HashValue where - put (HashValue digest) = put digest - get = do - digest <- get - -- Cannot do any sensible validation here. Although we use SHA256 - -- for stuff we hash ourselves, we can also get hashes from TUF - -- and that can in principle use different hash functions in future. - return (HashValue digest) - --- | Hash some data. Currently uses SHA256. --- -hashValue :: LBS.ByteString -> HashValue -hashValue = HashValue . SHA256.hashlazy - -showHashValue :: HashValue -> String -showHashValue (HashValue digest) = BS.unpack (Base16.encode digest) - --- | Hash the content of a file. Uses SHA256. --- -readFileHashValue :: FilePath -> IO HashValue -readFileHashValue tarball = - withBinaryFile tarball ReadMode $ \hnd -> - evaluate . hashValue =<< LBS.hGetContents hnd - --- | Convert a hash from TUF metadata into a 'PackageSourceHash'. --- --- Note that TUF hashes don't neessarily have to be SHA256, since it can --- support new algorithms in future. --- -hashFromTUF :: Sec.Hash -> HashValue -hashFromTUF (Sec.Hash hashstr) = - --TODO: [code cleanup] either we should get TUF to use raw bytestrings or - -- perhaps we should also just use a base16 string as the internal rep. - case Base16.decode (BS.pack hashstr) of - (hash, trailing) | not (BS.null hash) && BS.null trailing - -> HashValue hash - _ -> error "hashFromTUF: cannot decode base16 hash" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/PackageUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/PackageUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/PackageUtils.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/PackageUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,40 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.PackageUtils --- Copyright : (c) Duncan Coutts 2010 --- License : BSD-like --- --- Maintainer : cabal-devel@gmail.com --- Stability : provisional --- Portability : portable --- --- Various package description utils that should be in the Cabal lib ------------------------------------------------------------------------------ -module Distribution.Client.PackageUtils ( - externalBuildDepends, - ) where - -import Distribution.Package - ( packageVersion, packageName ) -import Distribution.Types.ComponentRequestedSpec - ( ComponentRequestedSpec ) -import Distribution.Types.Dependency -import Distribution.Types.UnqualComponentName -import Distribution.PackageDescription - ( PackageDescription(..), libName, enabledBuildDepends ) -import Distribution.Version - ( withinRange, isAnyVersion ) - --- | The list of dependencies that refer to external packages --- rather than internal package components. --- -externalBuildDepends :: PackageDescription -> ComponentRequestedSpec -> [Dependency] -externalBuildDepends pkg spec = filter (not . internal) (enabledBuildDepends pkg spec) - where - -- True if this dependency is an internal one (depends on a library - -- defined in the same package). - internal (Dependency depName versionRange) = - (depName == packageName pkg && - packageVersion pkg `withinRange` versionRange) || - (Just (packageNameToUnqualComponentName depName) `elem` map libName (subLibraries pkg) && - isAnyVersion versionRange) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ParseUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ParseUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ParseUtils.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ParseUtils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,279 +0,0 @@ -{-# LANGUAGE ExistentialQuantification, NamedFieldPuns #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.ParseUtils --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Parsing utilities. ------------------------------------------------------------------------------ - -module Distribution.Client.ParseUtils ( - - -- * Fields and field utilities - FieldDescr(..), - liftField, - liftFields, - filterFields, - mapFieldNames, - commandOptionToField, - commandOptionsToFields, - - -- * Sections and utilities - SectionDescr(..), - liftSection, - - -- * Parsing and printing flat config - parseFields, - ppFields, - ppSection, - - -- * Parsing and printing config with sections and subsections - parseFieldsAndSections, - ppFieldsAndSections, - - -- ** Top level of config files - parseConfig, - showConfig, - ) - where - -import Distribution.ParseUtils - ( FieldDescr(..), ParseResult(..), warning, LineNo, lineNo - , Field(..), liftField, readFieldsFlat ) -import Distribution.Simple.Command - ( OptionField, viewAsFieldDescr ) - -import Control.Monad ( foldM ) -import Text.PrettyPrint ( (<+>), ($+$) ) -import qualified Data.Map as Map -import qualified Text.PrettyPrint as Disp - ( (<>), Doc, text, colon, vcat, empty, isEmpty, nest ) - - -------------------------- --- FieldDescr utilities --- - -liftFields :: (b -> a) - -> (a -> b -> b) - -> [FieldDescr a] - -> [FieldDescr b] -liftFields get set = map (liftField get set) - - --- | Given a collection of field descriptions, keep only a given list of them, --- identified by name. --- -filterFields :: [String] -> [FieldDescr a] -> [FieldDescr a] -filterFields includeFields = filter ((`elem` includeFields) . fieldName) - --- | Apply a name mangling function to the field names of all the field --- descriptions. The typical use case is to apply some prefix. --- -mapFieldNames :: (String -> String) -> [FieldDescr a] -> [FieldDescr a] -mapFieldNames mangleName = - map (\descr -> descr { fieldName = mangleName (fieldName descr) }) - - --- | Reuse a command line 'OptionField' as a config file 'FieldDescr'. --- -commandOptionToField :: OptionField a -> FieldDescr a -commandOptionToField = viewAsFieldDescr - --- | Reuse a bunch of command line 'OptionField's as config file 'FieldDescr's. --- -commandOptionsToFields :: [OptionField a] -> [FieldDescr a] -commandOptionsToFields = map viewAsFieldDescr - - ------------------------------------------- --- SectionDescr definition and utilities --- - --- | The description of a section in a config file. It can contain both --- fields and optionally further subsections. See also 'FieldDescr'. --- -data SectionDescr a = forall b. SectionDescr { - sectionName :: String, - sectionFields :: [FieldDescr b], - sectionSubsections :: [SectionDescr b], - sectionGet :: a -> [(String, b)], - sectionSet :: LineNo -> String -> b -> a -> ParseResult a, - sectionEmpty :: b - } - --- | To help construction of config file descriptions in a modular way it is --- useful to define fields and sections on local types and then hoist them --- into the parent types when combining them in bigger descriptions. --- --- This is essentially a lens operation for 'SectionDescr' to help embedding --- one inside another. --- -liftSection :: (b -> a) - -> (a -> b -> b) - -> SectionDescr a - -> SectionDescr b -liftSection get' set' (SectionDescr name fields sections get set empty) = - let sectionGet' = get . get' - sectionSet' lineno param x y = do - x' <- set lineno param x (get' y) - return (set' x' y) - in SectionDescr name fields sections sectionGet' sectionSet' empty - - -------------------------------------- --- Parsing and printing flat config --- - --- | Parse a bunch of semi-parsed 'Field's according to a set of field --- descriptions. It accumulates the result on top of a given initial value. --- --- This only covers the case of flat configuration without subsections. See --- also 'parseFieldsAndSections'. --- -parseFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a -parseFields fieldDescrs = - foldM setField - where - fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] - - setField accum (F line name value) = - case Map.lookup name fieldMap of - Just (FieldDescr _ _ set) -> set line value accum - Nothing -> do - warning $ "Unrecognized field " ++ name ++ " on line " ++ show line - return accum - - setField accum f = do - warning $ "Unrecognized stanza on line " ++ show (lineNo f) - return accum - --- | This is a customised version of the functions from Distribution.ParseUtils --- that also optionally print default values for empty fields as comments. --- -ppFields :: [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc -ppFields fields def cur = - Disp.vcat [ ppField name (fmap getter def) (getter cur) - | FieldDescr name getter _ <- fields] - -ppField :: String -> (Maybe Disp.Doc) -> Disp.Doc -> Disp.Doc -ppField name mdef cur - | Disp.isEmpty cur = maybe Disp.empty - (\def -> Disp.text "--" <+> Disp.text name - Disp.<> Disp.colon <+> def) mdef - | otherwise = Disp.text name Disp.<> Disp.colon <+> cur - --- | Pretty print a section. --- --- Since 'ppFields' does not cover subsections you can use this to add them. --- Or alternatively use a 'SectionDescr' and use 'ppFieldsAndSections'. --- -ppSection :: String -> String -> [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc -ppSection name arg fields def cur - | Disp.isEmpty fieldsDoc = Disp.empty - | otherwise = Disp.text name <+> argDoc - $+$ (Disp.nest 2 fieldsDoc) - where - fieldsDoc = ppFields fields def cur - argDoc | arg == "" = Disp.empty - | otherwise = Disp.text arg - - ------------------------------------------ --- Parsing and printing non-flat config --- - --- | Much like 'parseFields' but it also allows subsections. The permitted --- subsections are given by a list of 'SectionDescr's. --- -parseFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> a - -> [Field] -> ParseResult a -parseFieldsAndSections fieldDescrs sectionDescrs = - foldM setField - where - fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] - sectionMap = Map.fromList [ (sectionName s, s) | s <- sectionDescrs ] - - setField a (F line name value) = - case Map.lookup name fieldMap of - Just (FieldDescr _ _ set) -> set line value a - Nothing -> do - warning $ "Unrecognized field '" ++ name - ++ "' on line " ++ show line - return a - - setField a (Section line name param fields) = - case Map.lookup name sectionMap of - Just (SectionDescr _ fieldDescrs' sectionDescrs' _ set sectionEmpty) -> do - b <- parseFieldsAndSections fieldDescrs' sectionDescrs' sectionEmpty fields - set line param b a - Nothing -> do - warning $ "Unrecognized section '" ++ name - ++ "' on line " ++ show line - return a - - setField accum (block@IfBlock {}) = do - warning $ "Unrecognized stanza on line " ++ show (lineNo block) - return accum - --- | Much like 'ppFields' but also pretty prints any subsections. Subsection --- are only shown if they are non-empty. --- --- Note that unlike 'ppFields', at present it does not support printing --- default values. If needed, adding such support would be quite reasonable. --- -ppFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc -ppFieldsAndSections fieldDescrs sectionDescrs val = - ppFields fieldDescrs Nothing val - $+$ - Disp.vcat - [ Disp.text "" $+$ sectionDoc - | SectionDescr { - sectionName, sectionGet, - sectionFields, sectionSubsections - } <- sectionDescrs - , (param, x) <- sectionGet val - , let sectionDoc = ppSectionAndSubsections - sectionName param - sectionFields sectionSubsections x - , not (Disp.isEmpty sectionDoc) - ] - --- | Unlike 'ppSection' which has to be called directly, this gets used via --- 'ppFieldsAndSections' and so does not need to be exported. --- -ppSectionAndSubsections :: String -> String - -> [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc -ppSectionAndSubsections name arg fields sections cur - | Disp.isEmpty fieldsDoc = Disp.empty - | otherwise = Disp.text name <+> argDoc - $+$ (Disp.nest 2 fieldsDoc) - where - fieldsDoc = showConfig fields sections cur - argDoc | arg == "" = Disp.empty - | otherwise = Disp.text arg - - ------------------------------------------------ --- Top level config file parsing and printing --- - --- | Parse a string in the config file syntax into a value, based on a --- description of the configuration file in terms of its fields and sections. --- --- It accumulates the result on top of a given initial (typically empty) value. --- -parseConfig :: [FieldDescr a] -> [SectionDescr a] -> a - -> String -> ParseResult a -parseConfig fieldDescrs sectionDescrs empty str = - parseFieldsAndSections fieldDescrs sectionDescrs empty - =<< readFieldsFlat str - --- | Render a value in the config file syntax, based on a description of the --- configuration file in terms of its fields and sections. --- -showConfig :: [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc -showConfig = ppFieldsAndSections - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectBuilding/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectBuilding/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectBuilding/Types.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectBuilding/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,206 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} - --- | Types for the "Distribution.Client.ProjectBuilding" --- --- Moved out to avoid module cycles. --- -module Distribution.Client.ProjectBuilding.Types ( - -- * Pre-build status - BuildStatusMap, - BuildStatus(..), - buildStatusRequiresBuild, - buildStatusToString, - BuildStatusRebuild(..), - BuildReason(..), - MonitorChangedReason(..), - - -- * Build outcomes - BuildOutcomes, - BuildOutcome, - BuildResult(..), - BuildFailure(..), - BuildFailureReason(..), - ) where - -import Distribution.Client.Types (DocsResult, TestsResult) -import Distribution.Client.FileMonitor (MonitorChangedReason(..)) - -import Distribution.Package (UnitId, PackageId) -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import Distribution.Simple.LocalBuildInfo (ComponentName) - -import Data.Map (Map) -import Data.Set (Set) -import Data.Typeable (Typeable) -import Control.Exception (Exception, SomeException) - - ------------------------------------------------------------------------------- --- Pre-build status: result of the dry run --- - --- | The 'BuildStatus' of every package in the 'ElaboratedInstallPlan'. --- --- This is used as the result of the dry-run of building an install plan. --- -type BuildStatusMap = Map UnitId BuildStatus - --- | The build status for an individual package is the state that the --- package is in /prior/ to initiating a (re)build. --- --- This should not be confused with a 'BuildResult' which is the result --- /after/ successfully building a package. --- --- It serves two purposes: --- --- * For dry-run output, it lets us explain to the user if and why a package --- is going to be (re)built. --- --- * It tell us what step to start or resume building from, and carries --- enough information for us to be able to do so. --- -data BuildStatus = - - -- | The package is in the 'InstallPlan.PreExisting' state, so does not - -- need building. - BuildStatusPreExisting - - -- | The package is in the 'InstallPlan.Installed' state, so does not - -- need building. - | BuildStatusInstalled - - -- | The package has not been downloaded yet, so it will have to be - -- downloaded, unpacked and built. - | BuildStatusDownload - - -- | The package has not been unpacked yet, so it will have to be - -- unpacked and built. - | BuildStatusUnpack FilePath - - -- | The package exists in a local dir already, and just needs building - -- or rebuilding. So this can only happen for 'BuildInplaceOnly' style - -- packages. - | BuildStatusRebuild FilePath BuildStatusRebuild - - -- | The package exists in a local dir already, and is fully up to date. - -- So this package can be put into the 'InstallPlan.Installed' state - -- and it does not need to be built. - | BuildStatusUpToDate BuildResult - - --- | Which 'BuildStatus' values indicate we'll have to do some build work of --- some sort. In particular we use this as part of checking if any of a --- package's deps have changed. --- -buildStatusRequiresBuild :: BuildStatus -> Bool -buildStatusRequiresBuild BuildStatusPreExisting = False -buildStatusRequiresBuild BuildStatusInstalled = False -buildStatusRequiresBuild BuildStatusUpToDate {} = False -buildStatusRequiresBuild _ = True - --- | This is primarily here for debugging. It's not actually used anywhere. --- -buildStatusToString :: BuildStatus -> String -buildStatusToString BuildStatusPreExisting = "BuildStatusPreExisting" -buildStatusToString BuildStatusInstalled = "BuildStatusInstalled" -buildStatusToString BuildStatusDownload = "BuildStatusDownload" -buildStatusToString (BuildStatusUnpack fp) = "BuildStatusUnpack " ++ show fp -buildStatusToString (BuildStatusRebuild fp _) = "BuildStatusRebuild " ++ show fp -buildStatusToString (BuildStatusUpToDate _) = "BuildStatusUpToDate" - - --- | For a package that is going to be built or rebuilt, the state it's in now. --- --- So again, this tells us why a package needs to be rebuilt and what build --- phases need to be run. The 'MonitorChangedReason' gives us details like --- which file changed, which is mainly for high verbosity debug output. --- -data BuildStatusRebuild = - - -- | The package configuration changed, so the configure and build phases - -- needs to be (re)run. - BuildStatusConfigure (MonitorChangedReason ()) - - -- | The configuration has not changed but the build phase needs to be - -- rerun. We record the reason the (re)build is needed. - -- - -- The optional registration info here tells us if we've registered the - -- package already, or if we still need to do that after building. - -- @Just Nothing@ indicates that we know that no registration is - -- necessary (e.g., executable.) - -- - | BuildStatusBuild (Maybe (Maybe InstalledPackageInfo)) BuildReason - -data BuildReason = - -- | The dependencies of this package have been (re)built so the build - -- phase needs to be rerun. - -- - BuildReasonDepsRebuilt - - -- | Changes in files within the package (or first run or corrupt cache) - | BuildReasonFilesChanged (MonitorChangedReason ()) - - -- | An important special case is that no files have changed but the - -- set of components the /user asked to build/ has changed. We track the - -- set of components /we have built/, which of course only grows (until - -- some other change resets it). - -- - -- The @Set 'ComponentName'@ is the set of components we have built - -- previously. When we update the monitor we take the union of the ones - -- we have built previously with the ones the user has asked for this - -- time and save those. See 'updatePackageBuildFileMonitor'. - -- - | BuildReasonExtraTargets (Set ComponentName) - - -- | Although we're not going to build any additional targets as a whole, - -- we're going to build some part of a component or run a repl or any - -- other action that does not result in additional persistent artifacts. - -- - | BuildReasonEphemeralTargets - - ------------------------------------------------------------------------------- --- Build outcomes: result of the build --- - --- | A summary of the outcome for building a whole set of packages. --- -type BuildOutcomes = Map UnitId BuildOutcome - --- | A summary of the outcome for building a single package: either success --- or failure. --- -type BuildOutcome = Either BuildFailure BuildResult - --- | Information arising from successfully building a single package. --- -data BuildResult = BuildResult { - buildResultDocs :: DocsResult, - buildResultTests :: TestsResult, - buildResultLogFile :: Maybe FilePath - } - deriving Show - --- | Information arising from the failure to build a single package. --- -data BuildFailure = BuildFailure { - buildFailureLogFile :: Maybe FilePath, - buildFailureReason :: BuildFailureReason - } - deriving (Show, Typeable) - -instance Exception BuildFailure - --- | Detail on the reason that a package failed to build. --- -data BuildFailureReason = DependentFailed PackageId - | DownloadFailed SomeException - | UnpackFailed SomeException - | ConfigureFailed SomeException - | BuildFailed SomeException - | ReplFailed SomeException - | HaddocksFailed SomeException - | TestsFailed SomeException - | BenchFailed SomeException - | InstallFailed SomeException - deriving Show diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectBuilding.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectBuilding.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectBuilding.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectBuilding.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1448 +0,0 @@ -{-# LANGUAGE CPP, BangPatterns, RecordWildCards, NamedFieldPuns, - ScopedTypeVariables #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE NoMonoLocalBinds #-} -{-# LANGUAGE ConstraintKinds #-} - --- | --- -module Distribution.Client.ProjectBuilding ( - -- * Dry run phase - -- | What bits of the plan will we execute? The dry run does not change - -- anything but tells us what will need to be built. - rebuildTargetsDryRun, - improveInstallPlanWithUpToDatePackages, - - -- ** Build status - -- | This is the detailed status information we get from the dry run. - BuildStatusMap, - BuildStatus(..), - BuildStatusRebuild(..), - BuildReason(..), - MonitorChangedReason(..), - buildStatusToString, - - -- * Build phase - -- | Now we actually execute the plan. - rebuildTargets, - -- ** Build outcomes - -- | This is the outcome for each package of executing the plan. - -- For each package, did the build succeed or fail? - BuildOutcomes, - BuildOutcome, - BuildResult(..), - BuildFailure(..), - BuildFailureReason(..), - ) where - -#if !MIN_VERSION_base(4,8,0) -import Control.Applicative ((<$>)) -#endif - -import Distribution.Client.PackageHash (renderPackageHashInputs) -import Distribution.Client.RebuildMonad -import Distribution.Client.ProjectConfig -import Distribution.Client.ProjectPlanning -import Distribution.Client.ProjectPlanning.Types -import Distribution.Client.ProjectBuilding.Types -import Distribution.Client.Store - -import Distribution.Client.Types - hiding (BuildOutcomes, BuildOutcome, - BuildResult(..), BuildFailure(..)) -import Distribution.Client.InstallPlan - ( GenericInstallPlan, GenericPlanPackage, IsUnit ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.DistDirLayout -import Distribution.Client.FileMonitor -import Distribution.Client.SetupWrapper -import Distribution.Client.JobControl -import Distribution.Client.FetchUtils -import Distribution.Client.GlobalFlags (RepoContext) -import qualified Distribution.Client.Tar as Tar -import Distribution.Client.Setup - ( filterConfigureFlags, filterHaddockArgs - , filterHaddockFlags ) -import Distribution.Client.SourceFiles -import Distribution.Client.SrcDist (allPackageSourceFiles) -import Distribution.Client.Utils - ( ProgressPhase(..), progressMessage, removeExistingFile ) - -import Distribution.Compat.Lens -import Distribution.Package hiding (InstalledPackageId, installedPackageId) -import qualified Distribution.PackageDescription as PD -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import qualified Distribution.InstalledPackageInfo as Installed -import Distribution.Simple.BuildPaths (haddockDirName) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Types.BuildType -import Distribution.Types.PackageDescription.Lens (componentModules) -import Distribution.Simple.Program -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Command (CommandUI) -import qualified Distribution.Simple.Register as Cabal -import Distribution.Simple.LocalBuildInfo (ComponentName(..)) -import Distribution.Simple.Compiler - ( Compiler, compilerId, PackageDB(..) ) - -import Distribution.Simple.Utils -import Distribution.Version -import Distribution.Verbosity -import Distribution.Text -import Distribution.ParseUtils ( showPWarning ) -import Distribution.Compat.Graph (IsNode(..)) - -import Data.Map (Map) -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set -import qualified Data.ByteString.Lazy as LBS -import Data.List (isPrefixOf) - -import Control.Monad -import Control.Exception -import Data.Function (on) -import Data.Maybe - -import System.FilePath -import System.IO -import System.Directory - -#if !MIN_VERSION_directory(1,2,5) -listDirectory :: FilePath -> IO [FilePath] -listDirectory path = - (filter f) <$> (getDirectoryContents path) - where f filename = filename /= "." && filename /= ".." -#endif - ------------------------------------------------------------------------------- --- * Overall building strategy. ------------------------------------------------------------------------------- --- --- We start with an 'ElaboratedInstallPlan' that has already been improved by --- reusing packages from the store, and pruned to include only the targets of --- interest and their dependencies. So the remaining packages in the --- 'InstallPlan.Configured' state are ones we either need to build or rebuild. --- --- First, we do a preliminary dry run phase where we work out which packages --- we really need to (re)build, and for the ones we do need to build which --- build phase to start at. --- --- We use this to improve the 'ElaboratedInstallPlan' again by changing --- up-to-date 'InstallPlan.Configured' packages to 'InstallPlan.Installed' --- so that the build phase will skip them. --- --- Then we execute the plan, that is actually build packages. The outcomes of --- trying to build all the packages are collected and returned. --- --- We split things like this (dry run and execute) for a couple reasons. --- Firstly we need to be able to do dry runs anyway, and these need to be --- reasonably accurate in terms of letting users know what (and why) things --- are going to be (re)built. --- --- Given that we need to be able to do dry runs, it would not be great if --- we had to repeat all the same work when we do it for real. Not only is --- it duplicate work, but it's duplicate code which is likely to get out of --- sync. So we do things only once. We preserve info we discover in the dry --- run phase and rely on it later when we build things for real. This also --- somewhat simplifies the build phase. So this way the dry run can't so --- easily drift out of sync with the real thing since we're relying on the --- info it produces. --- --- An additional advantage is that it makes it easier to debug rebuild --- errors (ie rebuilding too much or too little), since all the rebuild --- decisions are made without making any state changes at the same time --- (that would make it harder to reproduce the problem situation). --- --- Finally, we can use the dry run build status and the build outcomes to --- give us some information on the overall status of packages in the project. --- This includes limited information about the status of things that were --- not actually in the subset of the plan that was used for the dry run or --- execution phases. In particular we may know that some packages are now --- definitely out of date. See "Distribution.Client.ProjectPlanOutput" for --- details. - - ------------------------------------------------------------------------------- --- * Dry run: what bits of the 'ElaboratedInstallPlan' will we execute? ------------------------------------------------------------------------------- - --- Refer to ProjectBuilding.Types for details of these important types: - --- type BuildStatusMap = ... --- data BuildStatus = ... --- data BuildStatusRebuild = ... --- data BuildReason = ... - --- | Do the dry run pass. This is a prerequisite of 'rebuildTargets'. --- --- It gives us the 'BuildStatusMap'. This should be used with --- 'improveInstallPlanWithUpToDatePackages' to give an improved version of --- the 'ElaboratedInstallPlan' with packages switched to the --- 'InstallPlan.Installed' state when we find that they're already up to date. --- -rebuildTargetsDryRun :: DistDirLayout - -> ElaboratedSharedConfig - -> ElaboratedInstallPlan - -> IO BuildStatusMap -rebuildTargetsDryRun distDirLayout@DistDirLayout{..} shared = - -- Do the various checks to work out the 'BuildStatus' of each package - foldMInstallPlanDepOrder dryRunPkg - where - dryRunPkg :: ElaboratedPlanPackage - -> [BuildStatus] - -> IO BuildStatus - dryRunPkg (InstallPlan.PreExisting _pkg) _depsBuildStatus = - return BuildStatusPreExisting - - dryRunPkg (InstallPlan.Installed _pkg) _depsBuildStatus = - return BuildStatusInstalled - - dryRunPkg (InstallPlan.Configured pkg) depsBuildStatus = do - mloc <- checkFetched (elabPkgSourceLocation pkg) - case mloc of - Nothing -> return BuildStatusDownload - - Just (LocalUnpackedPackage srcdir) -> - -- For the case of a user-managed local dir, irrespective of the - -- build style, we build from that directory and put build - -- artifacts under the shared dist directory. - dryRunLocalPkg pkg depsBuildStatus srcdir - - Just (RemoteSourceRepoPackage _repo srcdir) -> - -- At this point, source repos are essentially the same as local - -- dirs, since we've already download them. - dryRunLocalPkg pkg depsBuildStatus srcdir - - -- The three tarball cases are handled the same as each other, - -- though depending on the build style. - Just (LocalTarballPackage tarball) -> - dryRunTarballPkg pkg depsBuildStatus tarball - - Just (RemoteTarballPackage _ tarball) -> - dryRunTarballPkg pkg depsBuildStatus tarball - - Just (RepoTarballPackage _ _ tarball) -> - dryRunTarballPkg pkg depsBuildStatus tarball - - dryRunTarballPkg :: ElaboratedConfiguredPackage - -> [BuildStatus] - -> FilePath - -> IO BuildStatus - dryRunTarballPkg pkg depsBuildStatus tarball = - case elabBuildStyle pkg of - BuildAndInstall -> return (BuildStatusUnpack tarball) - BuildInplaceOnly -> do - -- TODO: [nice to have] use a proper file monitor rather than this dir exists test - exists <- doesDirectoryExist srcdir - if exists - then dryRunLocalPkg pkg depsBuildStatus srcdir - else return (BuildStatusUnpack tarball) - where - srcdir = distUnpackedSrcDirectory (packageId pkg) - - dryRunLocalPkg :: ElaboratedConfiguredPackage - -> [BuildStatus] - -> FilePath - -> IO BuildStatus - dryRunLocalPkg pkg depsBuildStatus srcdir = do - -- Go and do lots of I/O, reading caches and probing files to work out - -- if anything has changed - change <- checkPackageFileMonitorChanged - packageFileMonitor pkg srcdir depsBuildStatus - case change of - -- It did change, giving us 'BuildStatusRebuild' info on why - Left rebuild -> - return (BuildStatusRebuild srcdir rebuild) - - -- No changes, the package is up to date. Use the saved build results. - Right buildResult -> - return (BuildStatusUpToDate buildResult) - where - packageFileMonitor = - newPackageFileMonitor shared distDirLayout (elabDistDirParams shared pkg) - - --- | A specialised traversal over the packages in an install plan. --- --- The packages are visited in dependency order, starting with packages with no --- dependencies. The result for each package is accumulated into a 'Map' and --- returned as the final result. In addition, when visting a package, the --- visiting function is passed the results for all the immediate package --- dependencies. This can be used to propagate information from dependencies. --- -foldMInstallPlanDepOrder - :: forall m ipkg srcpkg b. - (Monad m, IsUnit ipkg, IsUnit srcpkg) - => (GenericPlanPackage ipkg srcpkg -> - [b] -> m b) - -> GenericInstallPlan ipkg srcpkg - -> m (Map UnitId b) -foldMInstallPlanDepOrder visit = - go Map.empty . InstallPlan.reverseTopologicalOrder - where - go :: Map UnitId b - -> [GenericPlanPackage ipkg srcpkg] - -> m (Map UnitId b) - go !results [] = return results - - go !results (pkg : pkgs) = do - -- we go in the right order so the results map has entries for all deps - let depresults :: [b] - depresults = - map (\ipkgid -> let Just result = Map.lookup ipkgid results - in result) - (InstallPlan.depends pkg) - result <- visit pkg depresults - let results' = Map.insert (nodeKey pkg) result results - go results' pkgs - -improveInstallPlanWithUpToDatePackages :: BuildStatusMap - -> ElaboratedInstallPlan - -> ElaboratedInstallPlan -improveInstallPlanWithUpToDatePackages pkgsBuildStatus = - InstallPlan.installed canPackageBeImproved - where - canPackageBeImproved pkg = - case Map.lookup (installedUnitId pkg) pkgsBuildStatus of - Just BuildStatusUpToDate {} -> True - Just _ -> False - Nothing -> error $ "improveInstallPlanWithUpToDatePackages: " - ++ display (packageId pkg) ++ " not in status map" - - ------------------------------ --- Package change detection --- - --- | As part of the dry run for local unpacked packages we have to check if the --- package config or files have changed. That is the purpose of --- 'PackageFileMonitor' and 'checkPackageFileMonitorChanged'. --- --- When a package is (re)built, the monitor must be updated to reflect the new --- state of the package. Because we sometimes build without reconfiguring the --- state updates are split into two, one for package config changes and one --- for other changes. This is the purpose of 'updatePackageConfigFileMonitor' --- and 'updatePackageBuildFileMonitor'. --- -data PackageFileMonitor = PackageFileMonitor { - pkgFileMonitorConfig :: FileMonitor ElaboratedConfiguredPackage (), - pkgFileMonitorBuild :: FileMonitor (Set ComponentName) BuildResultMisc, - pkgFileMonitorReg :: FileMonitor () (Maybe InstalledPackageInfo) - } - --- | This is all the components of the 'BuildResult' other than the --- @['InstalledPackageInfo']@. --- --- We have to split up the 'BuildResult' components since they get produced --- at different times (or rather, when different things change). --- -type BuildResultMisc = (DocsResult, TestsResult) - -newPackageFileMonitor :: ElaboratedSharedConfig - -> DistDirLayout - -> DistDirParams - -> PackageFileMonitor -newPackageFileMonitor shared - DistDirLayout{distPackageCacheFile} - dparams = - PackageFileMonitor { - pkgFileMonitorConfig = - FileMonitor { - fileMonitorCacheFile = distPackageCacheFile dparams "config", - fileMonitorKeyValid = (==) `on` normaliseConfiguredPackage shared, - fileMonitorCheckIfOnlyValueChanged = False - }, - - pkgFileMonitorBuild = - FileMonitor { - fileMonitorCacheFile = distPackageCacheFile dparams "build", - fileMonitorKeyValid = \componentsToBuild componentsAlreadyBuilt -> - componentsToBuild `Set.isSubsetOf` componentsAlreadyBuilt, - fileMonitorCheckIfOnlyValueChanged = True - }, - - pkgFileMonitorReg = - newFileMonitor (distPackageCacheFile dparams "registration") - } - --- | Helper function for 'checkPackageFileMonitorChanged', --- 'updatePackageConfigFileMonitor' and 'updatePackageBuildFileMonitor'. --- --- It selects the info from a 'ElaboratedConfiguredPackage' that are used by --- the 'FileMonitor's (in the 'PackageFileMonitor') to detect value changes. --- -packageFileMonitorKeyValues :: ElaboratedConfiguredPackage - -> (ElaboratedConfiguredPackage, Set ComponentName) -packageFileMonitorKeyValues elab = - (elab_config, buildComponents) - where - -- The first part is the value used to guard (re)configuring the package. - -- That is, if this value changes then we will reconfigure. - -- The ElaboratedConfiguredPackage consists mostly (but not entirely) of - -- information that affects the (re)configure step. But those parts that - -- do not affect the configure step need to be nulled out. Those parts are - -- the specific targets that we're going to build. - -- - elab_config = - elab { - elabBuildTargets = [], - elabTestTargets = [], - elabBenchTargets = [], - elabReplTarget = Nothing, - elabHaddockTargets = [], - elabBuildHaddocks = False - } - - -- The second part is the value used to guard the build step. So this is - -- more or less the opposite of the first part, as it's just the info about - -- what targets we're going to build. - -- - buildComponents = elabBuildTargetWholeComponents elab - --- | Do all the checks on whether a package has changed and thus needs either --- rebuilding or reconfiguring and rebuilding. --- -checkPackageFileMonitorChanged :: PackageFileMonitor - -> ElaboratedConfiguredPackage - -> FilePath - -> [BuildStatus] - -> IO (Either BuildStatusRebuild BuildResult) -checkPackageFileMonitorChanged PackageFileMonitor{..} - pkg srcdir depsBuildStatus = do - --TODO: [nice to have] some debug-level message about file changes, like rerunIfChanged - configChanged <- checkFileMonitorChanged - pkgFileMonitorConfig srcdir pkgconfig - case configChanged of - MonitorChanged monitorReason -> - return (Left (BuildStatusConfigure monitorReason')) - where - monitorReason' = fmap (const ()) monitorReason - - MonitorUnchanged () _ - -- The configChanged here includes the identity of the dependencies, - -- so depsBuildStatus is just needed for the changes in the content - -- of dependencies. - | any buildStatusRequiresBuild depsBuildStatus -> do - regChanged <- checkFileMonitorChanged pkgFileMonitorReg srcdir () - let mreg = changedToMaybe regChanged - return (Left (BuildStatusBuild mreg BuildReasonDepsRebuilt)) - - | otherwise -> do - buildChanged <- checkFileMonitorChanged - pkgFileMonitorBuild srcdir buildComponents - regChanged <- checkFileMonitorChanged - pkgFileMonitorReg srcdir () - let mreg = changedToMaybe regChanged - case (buildChanged, regChanged) of - (MonitorChanged (MonitoredValueChanged prevBuildComponents), _) -> - return (Left (BuildStatusBuild mreg buildReason)) - where - buildReason = BuildReasonExtraTargets prevBuildComponents - - (MonitorChanged monitorReason, _) -> - return (Left (BuildStatusBuild mreg buildReason)) - where - buildReason = BuildReasonFilesChanged monitorReason' - monitorReason' = fmap (const ()) monitorReason - - (MonitorUnchanged _ _, MonitorChanged monitorReason) -> - -- this should only happen if the file is corrupt or been - -- manually deleted. We don't want to bother with another - -- phase just for this, so we'll reregister by doing a build. - return (Left (BuildStatusBuild Nothing buildReason)) - where - buildReason = BuildReasonFilesChanged monitorReason' - monitorReason' = fmap (const ()) monitorReason - - (MonitorUnchanged _ _, MonitorUnchanged _ _) - | pkgHasEphemeralBuildTargets pkg -> - return (Left (BuildStatusBuild mreg buildReason)) - where - buildReason = BuildReasonEphemeralTargets - - (MonitorUnchanged buildResult _, MonitorUnchanged _ _) -> - return $ Right BuildResult { - buildResultDocs = docsResult, - buildResultTests = testsResult, - buildResultLogFile = Nothing - } - where - (docsResult, testsResult) = buildResult - where - (pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg - changedToMaybe (MonitorChanged _) = Nothing - changedToMaybe (MonitorUnchanged x _) = Just x - - -updatePackageConfigFileMonitor :: PackageFileMonitor - -> FilePath - -> ElaboratedConfiguredPackage - -> IO () -updatePackageConfigFileMonitor PackageFileMonitor{pkgFileMonitorConfig} - srcdir pkg = - updateFileMonitor pkgFileMonitorConfig srcdir Nothing - [] pkgconfig () - where - (pkgconfig, _buildComponents) = packageFileMonitorKeyValues pkg - -updatePackageBuildFileMonitor :: PackageFileMonitor - -> FilePath - -> MonitorTimestamp - -> ElaboratedConfiguredPackage - -> BuildStatusRebuild - -> [MonitorFilePath] - -> BuildResultMisc - -> IO () -updatePackageBuildFileMonitor PackageFileMonitor{pkgFileMonitorBuild} - srcdir timestamp pkg pkgBuildStatus - monitors buildResult = - updateFileMonitor pkgFileMonitorBuild srcdir (Just timestamp) - monitors buildComponents' buildResult - where - (_pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg - - -- If the only thing that's changed is that we're now building extra - -- components, then we can avoid later unnecessary rebuilds by saving the - -- total set of components that have been built, namely the union of the - -- existing ones plus the new ones. If files also changed this would be - -- the wrong thing to do. Note that we rely on the - -- fileMonitorCheckIfOnlyValueChanged = True mode to get this guarantee - -- that it's /only/ the value that changed not any files that changed. - buildComponents' = - case pkgBuildStatus of - BuildStatusBuild _ (BuildReasonExtraTargets prevBuildComponents) - -> buildComponents `Set.union` prevBuildComponents - _ -> buildComponents - -updatePackageRegFileMonitor :: PackageFileMonitor - -> FilePath - -> Maybe InstalledPackageInfo - -> IO () -updatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} - srcdir mipkg = - updateFileMonitor pkgFileMonitorReg srcdir Nothing - [] () mipkg - -invalidatePackageRegFileMonitor :: PackageFileMonitor -> IO () -invalidatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} = - removeExistingFile (fileMonitorCacheFile pkgFileMonitorReg) - - ------------------------------------------------------------------------------- --- * Doing it: executing an 'ElaboratedInstallPlan' ------------------------------------------------------------------------------- - --- Refer to ProjectBuilding.Types for details of these important types: - --- type BuildOutcomes = ... --- type BuildOutcome = ... --- data BuildResult = ... --- data BuildFailure = ... --- data BuildFailureReason = ... - --- | Build things for real. --- --- It requires the 'BuildStatusMap' gathered by 'rebuildTargetsDryRun'. --- -rebuildTargets :: Verbosity - -> DistDirLayout - -> StoreDirLayout - -> ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> BuildStatusMap - -> BuildTimeSettings - -> IO BuildOutcomes -rebuildTargets verbosity - distDirLayout@DistDirLayout{..} - storeDirLayout - installPlan - sharedPackageConfig@ElaboratedSharedConfig { - pkgConfigCompiler = compiler, - pkgConfigCompilerProgs = progdb - } - pkgsBuildStatus - buildSettings@BuildTimeSettings{ - buildSettingNumJobs, - buildSettingKeepGoing - } = do - - -- Concurrency control: create the job controller and concurrency limits - -- for downloading, building and installing. - jobControl <- if isParallelBuild - then newParallelJobControl buildSettingNumJobs - else newSerialJobControl - registerLock <- newLock -- serialise registration - cacheLock <- newLock -- serialise access to setup exe cache - --TODO: [code cleanup] eliminate setup exe cache - - debug verbosity $ - "Executing install plan " - ++ if isParallelBuild - then " in parallel using " ++ show buildSettingNumJobs ++ " threads." - else " serially." - - createDirectoryIfMissingVerbose verbosity True distBuildRootDirectory - createDirectoryIfMissingVerbose verbosity True distTempDirectory - mapM_ (createPackageDBIfMissing verbosity compiler progdb) packageDBsToUse - - -- Before traversing the install plan, pre-emptively find all packages that - -- will need to be downloaded and start downloading them. - asyncDownloadPackages verbosity withRepoCtx - installPlan pkgsBuildStatus $ \downloadMap -> - - -- For each package in the plan, in dependency order, but in parallel... - InstallPlan.execute jobControl keepGoing - (BuildFailure Nothing . DependentFailed . packageId) - installPlan $ \pkg -> - --TODO: review exception handling - handle (\(e :: BuildFailure) -> return (Left e)) $ fmap Right $ - - let uid = installedUnitId pkg - Just pkgBuildStatus = Map.lookup uid pkgsBuildStatus in - - rebuildTarget - verbosity - distDirLayout - storeDirLayout - buildSettings downloadMap - registerLock cacheLock - sharedPackageConfig - installPlan pkg - pkgBuildStatus - where - isParallelBuild = buildSettingNumJobs >= 2 - keepGoing = buildSettingKeepGoing - withRepoCtx = projectConfigWithBuilderRepoContext verbosity - buildSettings - packageDBsToUse = -- all the package dbs we may need to create - (Set.toList . Set.fromList) - [ pkgdb - | InstallPlan.Configured elab <- InstallPlan.toList installPlan - , pkgdb <- concat [ elabBuildPackageDBStack elab - , elabRegisterPackageDBStack elab - , elabSetupPackageDBStack elab ] - ] - - --- | Create a package DB if it does not currently exist. Note that this action --- is /not/ safe to run concurrently. --- -createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb - -> PackageDB -> IO () -createPackageDBIfMissing verbosity compiler progdb - (SpecificPackageDB dbPath) = do - exists <- Cabal.doesPackageDBExist dbPath - unless exists $ do - createDirectoryIfMissingVerbose verbosity True (takeDirectory dbPath) - Cabal.createPackageDB verbosity compiler progdb False dbPath -createPackageDBIfMissing _ _ _ _ = return () - - --- | Given all the context and resources, (re)build an individual package. --- -rebuildTarget :: Verbosity - -> DistDirLayout - -> StoreDirLayout - -> BuildTimeSettings - -> AsyncFetchMap - -> Lock -> Lock - -> ElaboratedSharedConfig - -> ElaboratedInstallPlan - -> ElaboratedReadyPackage - -> BuildStatus - -> IO BuildResult -rebuildTarget verbosity - distDirLayout@DistDirLayout{distBuildDirectory} - storeDirLayout - buildSettings downloadMap - registerLock cacheLock - sharedPackageConfig - plan rpkg@(ReadyPackage pkg) - pkgBuildStatus = - - -- We rely on the 'BuildStatus' to decide which phase to start from: - case pkgBuildStatus of - BuildStatusDownload -> downloadPhase - BuildStatusUnpack tarball -> unpackTarballPhase tarball - BuildStatusRebuild srcdir status -> rebuildPhase status srcdir - - -- TODO: perhaps re-nest the types to make these impossible - BuildStatusPreExisting {} -> unexpectedState - BuildStatusInstalled {} -> unexpectedState - BuildStatusUpToDate {} -> unexpectedState - where - unexpectedState = error "rebuildTarget: unexpected package status" - - downloadPhase = do - downsrcloc <- annotateFailureNoLog DownloadFailed $ - waitAsyncPackageDownload verbosity downloadMap pkg - case downsrcloc of - DownloadedTarball tarball -> unpackTarballPhase tarball - --TODO: [nice to have] git/darcs repos etc - - - unpackTarballPhase tarball = - withTarballLocalDirectory - verbosity distDirLayout tarball - (packageId pkg) (elabDistDirParams sharedPackageConfig pkg) (elabBuildStyle pkg) - (elabPkgDescriptionOverride pkg) $ - - case elabBuildStyle pkg of - BuildAndInstall -> buildAndInstall - BuildInplaceOnly -> buildInplace buildStatus - where - buildStatus = BuildStatusConfigure MonitorFirstRun - - -- Note that this really is rebuild, not build. It can only happen for - -- 'BuildInplaceOnly' style packages. 'BuildAndInstall' style packages - -- would only start from download or unpack phases. - -- - rebuildPhase buildStatus srcdir = - assert (elabBuildStyle pkg == BuildInplaceOnly) $ - - buildInplace buildStatus srcdir builddir - where - builddir = distBuildDirectory (elabDistDirParams sharedPackageConfig pkg) - - buildAndInstall srcdir builddir = - buildAndInstallUnpackedPackage - verbosity distDirLayout storeDirLayout - buildSettings registerLock cacheLock - sharedPackageConfig - plan rpkg - srcdir builddir' - where - builddir' = makeRelative srcdir builddir - --TODO: [nice to have] ^^ do this relative stuff better - - buildInplace buildStatus srcdir builddir = - --TODO: [nice to have] use a relative build dir rather than absolute - buildInplaceUnpackedPackage - verbosity distDirLayout - buildSettings registerLock cacheLock - sharedPackageConfig - plan rpkg - buildStatus - srcdir builddir - --- TODO: [nice to have] do we need to use a with-style for the temp --- files for downloading http packages, or are we going to cache them --- persistently? - --- | Given the current 'InstallPlan' and 'BuildStatusMap', select all the --- packages we have to download and fork off an async action to download them. --- We download them in dependency order so that the one's we'll need --- first are the ones we will start downloading first. --- --- The body action is passed a map from those packages (identified by their --- location) to a completion var for that package. So the body action should --- lookup the location and use 'waitAsyncPackageDownload' to get the result. --- -asyncDownloadPackages :: Verbosity - -> ((RepoContext -> IO a) -> IO a) - -> ElaboratedInstallPlan - -> BuildStatusMap - -> (AsyncFetchMap -> IO a) - -> IO a -asyncDownloadPackages verbosity withRepoCtx installPlan pkgsBuildStatus body - | null pkgsToDownload = body Map.empty - | otherwise = withRepoCtx $ \repoctx -> - asyncFetchPackages verbosity repoctx - pkgsToDownload body - where - pkgsToDownload = - ordNub $ - [ elabPkgSourceLocation elab - | InstallPlan.Configured elab - <- InstallPlan.reverseTopologicalOrder installPlan - , let uid = installedUnitId elab - Just pkgBuildStatus = Map.lookup uid pkgsBuildStatus - , BuildStatusDownload <- [pkgBuildStatus] - ] - - --- | Check if a package needs downloading, and if so expect to find a download --- in progress in the given 'AsyncFetchMap' and wait on it to finish. --- -waitAsyncPackageDownload :: Verbosity - -> AsyncFetchMap - -> ElaboratedConfiguredPackage - -> IO DownloadedSourceLocation -waitAsyncPackageDownload verbosity downloadMap elab = do - pkgloc <- waitAsyncFetchPackage verbosity downloadMap - (elabPkgSourceLocation elab) - case downloadedSourceLocation pkgloc of - Just loc -> return loc - Nothing -> fail "waitAsyncPackageDownload: unexpected source location" - -data DownloadedSourceLocation = DownloadedTarball FilePath - --TODO: [nice to have] git/darcs repos etc - -downloadedSourceLocation :: PackageLocation FilePath - -> Maybe DownloadedSourceLocation -downloadedSourceLocation pkgloc = - case pkgloc of - RemoteTarballPackage _ tarball -> Just (DownloadedTarball tarball) - RepoTarballPackage _ _ tarball -> Just (DownloadedTarball tarball) - _ -> Nothing - - - - --- | Ensure that the package is unpacked in an appropriate directory, either --- a temporary one or a persistent one under the shared dist directory. --- -withTarballLocalDirectory - :: Verbosity - -> DistDirLayout - -> FilePath - -> PackageId - -> DistDirParams - -> BuildStyle - -> Maybe CabalFileText - -> (FilePath -> -- Source directory - FilePath -> -- Build directory - IO a) - -> IO a -withTarballLocalDirectory verbosity distDirLayout@DistDirLayout{..} - tarball pkgid dparams buildstyle pkgTextOverride - buildPkg = - case buildstyle of - -- In this case we make a temp dir (e.g. tmp/src2345/), unpack - -- the tarball to it (e.g. tmp/src2345/foo-1.0/), and for - -- compatibility we put the dist dir within it - -- (i.e. tmp/src2345/foo-1.0/dist/). - -- - -- Unfortunately, a few custom Setup.hs scripts do not respect - -- the --builddir flag and always look for it at ./dist/ so - -- this way we avoid breaking those packages - BuildAndInstall -> - let tmpdir = distTempDirectory in - withTempDirectory verbosity tmpdir "src" $ \unpackdir -> do - unpackPackageTarball verbosity tarball unpackdir - pkgid pkgTextOverride - let srcdir = unpackdir display pkgid - builddir = srcdir "dist" - buildPkg srcdir builddir - - -- In this case we make sure the tarball has been unpacked to the - -- appropriate location under the shared dist dir, and then build it - -- inplace there - BuildInplaceOnly -> do - let srcrootdir = distUnpackedSrcRootDirectory - srcdir = distUnpackedSrcDirectory pkgid - builddir = distBuildDirectory dparams - -- TODO: [nice to have] use a proper file monitor rather than this dir exists test - exists <- doesDirectoryExist srcdir - unless exists $ do - createDirectoryIfMissingVerbose verbosity True srcrootdir - unpackPackageTarball verbosity tarball srcrootdir - pkgid pkgTextOverride - moveTarballShippedDistDirectory verbosity distDirLayout - srcrootdir pkgid dparams - buildPkg srcdir builddir - - -unpackPackageTarball :: Verbosity -> FilePath -> FilePath - -> PackageId -> Maybe CabalFileText - -> IO () -unpackPackageTarball verbosity tarball parentdir pkgid pkgTextOverride = - --TODO: [nice to have] switch to tar package and catch tar exceptions - annotateFailureNoLog UnpackFailed $ do - - -- Unpack the tarball - -- - info verbosity $ "Extracting " ++ tarball ++ " to " ++ parentdir ++ "..." - Tar.extractTarGzFile parentdir pkgsubdir tarball - - -- Sanity check - -- - exists <- doesFileExist cabalFile - unless exists $ - die' verbosity $ "Package .cabal file not found in the tarball: " ++ cabalFile - - -- Overwrite the .cabal with the one from the index, when appropriate - -- - case pkgTextOverride of - Nothing -> return () - Just pkgtxt -> do - info verbosity $ "Updating " ++ display pkgname <.> "cabal" - ++ " with the latest revision from the index." - writeFileAtomic cabalFile pkgtxt - - where - cabalFile = parentdir pkgsubdir - display pkgname <.> "cabal" - pkgsubdir = display pkgid - pkgname = packageName pkgid - - --- | This is a bit of a hacky workaround. A number of packages ship --- pre-processed .hs files in a dist directory inside the tarball. We don't --- use the standard 'dist' location so unless we move this dist dir to the --- right place then we'll miss the shipped pre-procssed files. This hacky --- approach to shipped pre-procssed files ought to be replaced by a proper --- system, though we'll still need to keep this hack for older packages. --- -moveTarballShippedDistDirectory :: Verbosity -> DistDirLayout - -> FilePath -> PackageId -> DistDirParams -> IO () -moveTarballShippedDistDirectory verbosity DistDirLayout{distBuildDirectory} - parentdir pkgid dparams = do - distDirExists <- doesDirectoryExist tarballDistDir - when distDirExists $ do - debug verbosity $ "Moving '" ++ tarballDistDir ++ "' to '" - ++ targetDistDir ++ "'" - --TODO: [nice to have] or perhaps better to copy, and use a file monitor - renameDirectory tarballDistDir targetDistDir - where - tarballDistDir = parentdir display pkgid "dist" - targetDistDir = distBuildDirectory dparams - - -buildAndInstallUnpackedPackage :: Verbosity - -> DistDirLayout - -> StoreDirLayout - -> BuildTimeSettings -> Lock -> Lock - -> ElaboratedSharedConfig - -> ElaboratedInstallPlan - -> ElaboratedReadyPackage - -> FilePath -> FilePath - -> IO BuildResult -buildAndInstallUnpackedPackage verbosity - distDirLayout@DistDirLayout{distTempDirectory} - storeDirLayout@StoreDirLayout { - storePackageDBStack - } - BuildTimeSettings { - buildSettingNumJobs, - buildSettingLogFile - } - registerLock cacheLock - pkgshared@ElaboratedSharedConfig { - pkgConfigPlatform = platform, - pkgConfigCompiler = compiler, - pkgConfigCompilerProgs = progdb - } - plan rpkg@(ReadyPackage pkg) - srcdir builddir = do - - createDirectoryIfMissingVerbose verbosity True builddir - initLogFile - - --TODO: [code cleanup] deal consistently with talking to older Setup.hs versions, much like - -- we do for ghc, with a proper options type and rendering step - -- which will also let us call directly into the lib, rather than always - -- going via the lib's command line interface, which would also allow - -- passing data like installed packages, compiler, and program db for a - -- quicker configure. - - --TODO: [required feature] docs and tests - --TODO: [required feature] sudo re-exec - - -- Configure phase - noticeProgress ProgressStarting - - annotateFailure mlogFile ConfigureFailed $ - setup' configureCommand configureFlags configureArgs - - -- Build phase - noticeProgress ProgressBuilding - - annotateFailure mlogFile BuildFailed $ - setup buildCommand buildFlags - - -- Haddock phase - whenHaddock $ do - noticeProgress ProgressHaddock - annotateFailureNoLog HaddocksFailed $ - setup haddockCommand haddockFlags - - -- Install phase - noticeProgress ProgressInstalling - annotateFailure mlogFile InstallFailed $ do - - let copyPkgFiles tmpDir = do - setup Cabal.copyCommand (copyFlags tmpDir) - -- Note that the copy command has put the files into - -- @$tmpDir/$prefix@ so we need to return this dir so - -- the store knows which dir will be the final store entry. - let prefix = dropDrive (InstallDirs.prefix (elabInstallDirs pkg)) - entryDir = tmpDir prefix - LBS.writeFile - (entryDir "cabal-hash.txt") - (renderPackageHashInputs (packageHashInputs pkgshared pkg)) - - -- Ensure that there are no files in `tmpDir`, that are not in `entryDir` - -- While this breaks the prefix-relocatable property of the lirbaries - -- it is necessary on macOS to stay under the load command limit of the - -- macOS mach-o linker. See also @PackageHash.hashedInstalledPackageIdVeryShort@. - otherFiles <- filter (not . isPrefixOf entryDir) <$> listFilesRecursive tmpDir - -- here's where we could keep track of the installed files ourselves - -- if we wanted to by making a manifest of the files in the tmp dir - return (entryDir, otherFiles) - where - listFilesRecursive :: FilePath -> IO [FilePath] - listFilesRecursive path = do - files <- fmap (path ) <$> (listDirectory path) - allFiles <- forM files $ \file -> do - isDir <- doesDirectoryExist file - if isDir - then listFilesRecursive file - else return [file] - return (concat allFiles) - - registerPkg - | not (elabRequiresRegistration pkg) = - debug verbosity $ - "registerPkg: elab does NOT require registration for " ++ display uid - | otherwise = do - -- We register ourselves rather than via Setup.hs. We need to - -- grab and modify the InstalledPackageInfo. We decide what - -- the installed package id is, not the build system. - ipkg0 <- generateInstalledPackageInfo - let ipkg = ipkg0 { Installed.installedUnitId = uid } - assert ( elabRegisterPackageDBStack pkg - == storePackageDBStack compid) (return ()) - criticalSection registerLock $ - Cabal.registerPackage - verbosity compiler progdb - (storePackageDBStack compid) ipkg - Cabal.defaultRegisterOptions { - Cabal.registerMultiInstance = True, - Cabal.registerSuppressFilesCheck = True - } - - - -- Actual installation - void $ newStoreEntry verbosity storeDirLayout - compid uid - copyPkgFiles registerPkg - - --TODO: [nice to have] we currently rely on Setup.hs copy to do the right - -- thing. Although we do copy into an image dir and do the move into the - -- final location ourselves, perhaps we ought to do some sanity checks on - -- the image dir first. - - -- TODO: [required eventually] note that for nix-style installations it is not necessary to do - -- the 'withWin32SelfUpgrade' dance, but it would be necessary for a - -- shared bin dir. - - --TODO: [required feature] docs and test phases - let docsResult = DocsNotTried - testsResult = TestsNotTried - - noticeProgress ProgressCompleted - - return BuildResult { - buildResultDocs = docsResult, - buildResultTests = testsResult, - buildResultLogFile = mlogFile - } - - where - pkgid = packageId rpkg - uid = installedUnitId rpkg - compid = compilerId compiler - - dispname = case elabPkgOrComp pkg of - ElabPackage _ -> display pkgid - ++ " (all, legacy fallback)" - ElabComponent comp -> display pkgid - ++ " (" ++ maybe "custom" display (compComponentName comp) ++ ")" - - noticeProgress phase = when isParallelBuild $ - progressMessage verbosity phase dispname - - isParallelBuild = buildSettingNumJobs >= 2 - - whenHaddock action - | hasValidHaddockTargets pkg = action - | otherwise = return () - - configureCommand = Cabal.configureCommand defaultProgramDb - configureFlags v = flip filterConfigureFlags v $ - setupHsConfigureFlags rpkg pkgshared - verbosity builddir - configureArgs _ = setupHsConfigureArgs pkg - - buildCommand = Cabal.buildCommand defaultProgramDb - buildFlags _ = setupHsBuildFlags pkg pkgshared verbosity builddir - - haddockCommand = Cabal.haddockCommand - haddockFlags _ = setupHsHaddockFlags pkg pkgshared - verbosity builddir - - generateInstalledPackageInfo :: IO InstalledPackageInfo - generateInstalledPackageInfo = - withTempInstalledPackageInfoFile - verbosity distTempDirectory $ \pkgConfDest -> do - let registerFlags _ = setupHsRegisterFlags - pkg pkgshared - verbosity builddir - pkgConfDest - setup Cabal.registerCommand registerFlags - - copyFlags destdir _ = setupHsCopyFlags pkg pkgshared verbosity - builddir destdir - - scriptOptions = setupHsScriptOptions rpkg plan pkgshared - distDirLayout srcdir builddir - isParallelBuild cacheLock - - setup :: CommandUI flags -> (Version -> flags) -> IO () - setup cmd flags = setup' cmd flags (const []) - - setup' :: CommandUI flags -> (Version -> flags) -> (Version -> [String]) -> IO () - setup' cmd flags args = - withLogging $ \mLogFileHandle -> - setupWrapper - verbosity - scriptOptions - { useLoggingHandle = mLogFileHandle - , useExtraEnvOverrides = dataDirsEnvironmentForPlan distDirLayout plan } - (Just (elabPkgDescription pkg)) - cmd flags args - - mlogFile :: Maybe FilePath - mlogFile = - case buildSettingLogFile of - Nothing -> Nothing - Just mkLogFile -> Just (mkLogFile compiler platform pkgid uid) - - initLogFile = - case mlogFile of - Nothing -> return () - Just logFile -> do - createDirectoryIfMissing True (takeDirectory logFile) - exists <- doesFileExist logFile - when exists $ removeFile logFile - - withLogging action = - case mlogFile of - Nothing -> action Nothing - Just logFile -> withFile logFile AppendMode (action . Just) - - -hasValidHaddockTargets :: ElaboratedConfiguredPackage -> Bool -hasValidHaddockTargets ElaboratedConfiguredPackage{..} - | not elabBuildHaddocks = False - | otherwise = any componentHasHaddocks components - where - components = elabBuildTargets ++ elabTestTargets ++ elabBenchTargets - ++ maybeToList elabReplTarget ++ elabHaddockTargets - - componentHasHaddocks :: ComponentTarget -> Bool - componentHasHaddocks (ComponentTarget name _) = - case name of - CLibName -> hasHaddocks - CSubLibName _ -> elabHaddockInternal && hasHaddocks - CFLibName _ -> elabHaddockForeignLibs && hasHaddocks - CExeName _ -> elabHaddockExecutables && hasHaddocks - CTestName _ -> elabHaddockTestSuites && hasHaddocks - CBenchName _ -> elabHaddockBenchmarks && hasHaddocks - where - hasHaddocks = not (null (elabPkgDescription ^. componentModules name)) - - -buildInplaceUnpackedPackage :: Verbosity - -> DistDirLayout - -> BuildTimeSettings -> Lock -> Lock - -> ElaboratedSharedConfig - -> ElaboratedInstallPlan - -> ElaboratedReadyPackage - -> BuildStatusRebuild - -> FilePath -> FilePath - -> IO BuildResult -buildInplaceUnpackedPackage verbosity - distDirLayout@DistDirLayout { - distTempDirectory, - distPackageCacheDirectory, - distDirectory - } - BuildTimeSettings{buildSettingNumJobs} - registerLock cacheLock - pkgshared@ElaboratedSharedConfig { - pkgConfigCompiler = compiler, - pkgConfigCompilerProgs = progdb - } - plan - rpkg@(ReadyPackage pkg) - buildStatus - srcdir builddir = do - - --TODO: [code cleanup] there is duplication between the distdirlayout and the builddir here - -- builddir is not enough, we also need the per-package cachedir - createDirectoryIfMissingVerbose verbosity True builddir - createDirectoryIfMissingVerbose verbosity True (distPackageCacheDirectory dparams) - - -- Configure phase - -- - whenReConfigure $ do - annotateFailureNoLog ConfigureFailed $ - setup configureCommand configureFlags configureArgs - invalidatePackageRegFileMonitor packageFileMonitor - updatePackageConfigFileMonitor packageFileMonitor srcdir pkg - - -- Build phase - -- - let docsResult = DocsNotTried - testsResult = TestsNotTried - - buildResult :: BuildResultMisc - buildResult = (docsResult, testsResult) - - whenRebuild $ do - timestamp <- beginUpdateFileMonitor - annotateFailureNoLog BuildFailed $ - setup buildCommand buildFlags buildArgs - - let listSimple = - execRebuild srcdir (needElaboratedConfiguredPackage pkg) - listSdist = - fmap (map monitorFileHashed) $ - allPackageSourceFiles verbosity scriptOptions srcdir - ifNullThen m m' = do xs <- m - if null xs then m' else return xs - monitors <- case PD.buildType (elabPkgDescription pkg) of - Simple -> listSimple - -- If a Custom setup was used, AND the Cabal is recent - -- enough to have sdist --list-sources, use that to - -- determine the files that we need to track. This can - -- cause unnecessary rebuilding (for example, if README - -- is edited, we will try to rebuild) but there isn't - -- a more accurate Custom interface we can use to get - -- this info. We prefer not to use listSimple here - -- as it can miss extra source files that are considered - -- by the Custom setup. - _ | elabSetupScriptCliVersion pkg >= mkVersion [1,17] - -- However, sometimes sdist --list-sources will fail - -- and return an empty list. In that case, fall - -- back on the (inaccurate) simple tracking. - -> listSdist `ifNullThen` listSimple - | otherwise - -> listSimple - - let dep_monitors = map monitorFileHashed - $ elabInplaceDependencyBuildCacheFiles - distDirLayout pkgshared plan pkg - updatePackageBuildFileMonitor packageFileMonitor srcdir timestamp - pkg buildStatus - (monitors ++ dep_monitors) buildResult - - -- PURPOSELY omitted: no copy! - - whenReRegister $ annotateFailureNoLog InstallFailed $ do - -- Register locally - mipkg <- if elabRequiresRegistration pkg - then do - ipkg0 <- generateInstalledPackageInfo - -- We register ourselves rather than via Setup.hs. We need to - -- grab and modify the InstalledPackageInfo. We decide what - -- the installed package id is, not the build system. - let ipkg = ipkg0 { Installed.installedUnitId = ipkgid } - criticalSection registerLock $ - Cabal.registerPackage verbosity compiler progdb - (elabRegisterPackageDBStack pkg) - ipkg Cabal.defaultRegisterOptions - return (Just ipkg) - - else return Nothing - - updatePackageRegFileMonitor packageFileMonitor srcdir mipkg - - whenTest $ do - annotateFailureNoLog TestsFailed $ - setup testCommand testFlags testArgs - - whenBench $ - annotateFailureNoLog BenchFailed $ - setup benchCommand benchFlags benchArgs - - -- Repl phase - -- - whenRepl $ - annotateFailureNoLog ReplFailed $ - setupInteractive replCommand replFlags replArgs - - -- Haddock phase - whenHaddock $ - annotateFailureNoLog HaddocksFailed $ do - setup haddockCommand haddockFlags haddockArgs - let haddockTarget = elabHaddockForHackage pkg - when (haddockTarget == Cabal.ForHackage) $ do - let dest = distDirectory name <.> "tar.gz" - name = haddockDirName haddockTarget (elabPkgDescription pkg) - docDir = distBuildDirectory distDirLayout dparams "doc" "html" - Tar.createTarGzFile dest docDir name - notice verbosity $ "Documentation tarball created: " ++ dest - - return BuildResult { - buildResultDocs = docsResult, - buildResultTests = testsResult, - buildResultLogFile = Nothing - } - - where - ipkgid = installedUnitId pkg - dparams = elabDistDirParams pkgshared pkg - - isParallelBuild = buildSettingNumJobs >= 2 - - packageFileMonitor = newPackageFileMonitor pkgshared distDirLayout dparams - - whenReConfigure action = case buildStatus of - BuildStatusConfigure _ -> action - _ -> return () - - whenRebuild action - | null (elabBuildTargets pkg) - -- NB: we have to build the test/bench suite! - , null (elabTestTargets pkg) - , null (elabBenchTargets pkg) = return () - | otherwise = action - - whenTest action - | null (elabTestTargets pkg) = return () - | otherwise = action - - whenBench action - | null (elabBenchTargets pkg) = return () - | otherwise = action - - whenRepl action - | isNothing (elabReplTarget pkg) = return () - | otherwise = action - - whenHaddock action - | hasValidHaddockTargets pkg = action - | otherwise = return () - - whenReRegister action - = case buildStatus of - -- We registered the package already - BuildStatusBuild (Just _) _ -> info verbosity "whenReRegister: previously registered" - -- There is nothing to register - _ | null (elabBuildTargets pkg) -> info verbosity "whenReRegister: nothing to register" - | otherwise -> action - - configureCommand = Cabal.configureCommand defaultProgramDb - configureFlags v = flip filterConfigureFlags v $ - setupHsConfigureFlags rpkg pkgshared - verbosity builddir - configureArgs _ = setupHsConfigureArgs pkg - - buildCommand = Cabal.buildCommand defaultProgramDb - buildFlags _ = setupHsBuildFlags pkg pkgshared - verbosity builddir - buildArgs _ = setupHsBuildArgs pkg - - testCommand = Cabal.testCommand -- defaultProgramDb - testFlags _ = setupHsTestFlags pkg pkgshared - verbosity builddir - testArgs _ = setupHsTestArgs pkg - - benchCommand = Cabal.benchmarkCommand - benchFlags _ = setupHsBenchFlags pkg pkgshared - verbosity builddir - benchArgs _ = setupHsBenchArgs pkg - - replCommand = Cabal.replCommand defaultProgramDb - replFlags _ = setupHsReplFlags pkg pkgshared - verbosity builddir - replArgs _ = setupHsReplArgs pkg - - haddockCommand = Cabal.haddockCommand - haddockFlags v = flip filterHaddockFlags v $ - setupHsHaddockFlags pkg pkgshared - verbosity builddir - haddockArgs v = flip filterHaddockArgs v $ - setupHsHaddockArgs pkg - - scriptOptions = setupHsScriptOptions rpkg plan pkgshared - distDirLayout srcdir builddir - isParallelBuild cacheLock - - setupInteractive :: CommandUI flags - -> (Version -> flags) -> (Version -> [String]) -> IO () - setupInteractive cmd flags args = - setupWrapper verbosity - scriptOptions { isInteractive = True } - (Just (elabPkgDescription pkg)) - cmd flags args - - setup :: CommandUI flags -> (Version -> flags) -> (Version -> [String]) -> IO () - setup cmd flags args = - setupWrapper verbosity - scriptOptions - (Just (elabPkgDescription pkg)) - cmd flags args - - generateInstalledPackageInfo :: IO InstalledPackageInfo - generateInstalledPackageInfo = - withTempInstalledPackageInfoFile - verbosity distTempDirectory $ \pkgConfDest -> do - let registerFlags _ = setupHsRegisterFlags - pkg pkgshared - verbosity builddir - pkgConfDest - setup Cabal.registerCommand registerFlags (const []) - -withTempInstalledPackageInfoFile :: Verbosity -> FilePath - -> (FilePath -> IO ()) - -> IO InstalledPackageInfo -withTempInstalledPackageInfoFile verbosity tempdir action = - withTempDirectory verbosity tempdir "package-registration-" $ \dir -> do - -- make absolute since @action@ will often change directory - abs_dir <- canonicalizePath dir - - let pkgConfDest = abs_dir "pkgConf" - action pkgConfDest - - readPkgConf "." pkgConfDest - where - pkgConfParseFailed :: Installed.PError -> IO a - pkgConfParseFailed perror = - die' verbosity $ "Couldn't parse the output of 'setup register --gen-pkg-config':" - ++ show perror - - readPkgConf pkgConfDir pkgConfFile = do - (warns, ipkg) <- withUTF8FileContents (pkgConfDir pkgConfFile) $ \pkgConfStr -> - case Installed.parseInstalledPackageInfo pkgConfStr of - Installed.ParseFailed perror -> pkgConfParseFailed perror - Installed.ParseOk warns ipkg -> return (warns, ipkg) - - unless (null warns) $ - warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) - - return ipkg - - ------------------------------------------------------------------------------- --- * Utilities ------------------------------------------------------------------------------- - -annotateFailureNoLog :: (SomeException -> BuildFailureReason) - -> IO a -> IO a -annotateFailureNoLog annotate action = - annotateFailure Nothing annotate action - -annotateFailure :: Maybe FilePath - -> (SomeException -> BuildFailureReason) - -> IO a -> IO a -annotateFailure mlogFile annotate action = - action `catches` - -- It's not just IOException and ExitCode we have to deal with, there's - -- lots, including exceptions from the hackage-security and tar packages. - -- So we take the strategy of catching everything except async exceptions. - [ -#if MIN_VERSION_base(4,7,0) - Handler $ \async -> throwIO (async :: SomeAsyncException) -#else - Handler $ \async -> throwIO (async :: AsyncException) -#endif - , Handler $ \other -> handler (other :: SomeException) - ] - where - handler :: Exception e => e -> IO a - handler = throwIO . BuildFailure mlogFile . annotate . toException diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig/Legacy.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig/Legacy.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig/Legacy.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig/Legacy.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1370 +0,0 @@ -{-# LANGUAGE RecordWildCards, NamedFieldPuns, DeriveGeneric #-} - --- | Project configuration, implementation in terms of legacy types. --- -module Distribution.Client.ProjectConfig.Legacy ( - - -- * Project config in terms of legacy types - LegacyProjectConfig, - parseLegacyProjectConfig, - showLegacyProjectConfig, - - -- * Conversion to and from legacy config types - commandLineFlagsToProjectConfig, - convertLegacyProjectConfig, - convertLegacyGlobalConfig, - convertToLegacyProjectConfig, - - -- * Internals, just for tests - parsePackageLocationTokenQ, - renderPackageLocationToken, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.ProjectConfig.Types -import Distribution.Client.Types - ( RemoteRepo(..), emptyRemoteRepo - , AllowNewer(..), AllowOlder(..) ) - -import Distribution.Client.Config - ( SavedConfig(..), remoteRepoFields ) - -import Distribution.Solver.Types.ConstraintSource - -import Distribution.Package -import Distribution.PackageDescription - ( SourceRepo(..), RepoKind(..) - , dispFlagAssignment, parseFlagAssignment ) -import Distribution.Client.SourceRepoParse - ( sourceRepoFieldDescrs ) -import Distribution.Simple.Compiler - ( OptimisationLevel(..), DebugInfoLevel(..) ) -import Distribution.Simple.InstallDirs ( CopyDest (NoCopyDest) ) -import Distribution.Simple.Setup - ( Flag(Flag), toFlag, fromFlagOrDefault - , ConfigFlags(..), configureOptions - , HaddockFlags(..), haddockOptions, defaultHaddockFlags - , programDbPaths', splitArgs - ) -import Distribution.Client.Setup - ( GlobalFlags(..), globalCommand - , ConfigExFlags(..), configureExOptions, defaultConfigExFlags - , InstallFlags(..), installOptions, defaultInstallFlags ) -import Distribution.Simple.Program - ( programName, knownPrograms ) -import Distribution.Simple.Program.Db - ( ProgramDb, defaultProgramDb ) -import Distribution.Simple.Utils - ( lowercase ) -import Distribution.Utils.NubList - ( toNubList, fromNubList, overNubList ) -import Distribution.Simple.LocalBuildInfo - ( toPathTemplate, fromPathTemplate ) - -import Distribution.Text -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP - ( ReadP, (+++), (<++) ) -import qualified Text.Read as Read -import qualified Text.PrettyPrint as Disp -import Text.PrettyPrint - ( Doc, ($+$) ) -import qualified Distribution.ParseUtils as ParseUtils (field) -import Distribution.ParseUtils - ( ParseResult(..), PError(..), syntaxError, PWarning(..), warning - , simpleField, commaNewLineListField - , showToken ) -import Distribution.Client.ParseUtils -import Distribution.Simple.Command - ( CommandUI(commandOptions), ShowOrParseArgs(..) - , OptionField, option, reqArg' ) - -import qualified Data.Map as Map ------------------------------------------------------------------- --- Representing the project config file in terms of legacy types --- - --- | We already have parsers\/pretty-printers for almost all the fields in the --- project config file, but they're in terms of the types used for the command --- line flags for Setup.hs or cabal commands. We don't want to redefine them --- all, at least not yet so for the moment we use the parsers at the old types --- and use conversion functions. --- --- Ultimately if\/when this project-based approach becomes the default then we --- can redefine the parsers directly for the new types. --- -data LegacyProjectConfig = LegacyProjectConfig { - legacyPackages :: [String], - legacyPackagesOptional :: [String], - legacyPackagesRepo :: [SourceRepo], - legacyPackagesNamed :: [Dependency], - - legacySharedConfig :: LegacySharedConfig, - legacyAllConfig :: LegacyPackageConfig, - legacyLocalConfig :: LegacyPackageConfig, - legacySpecificConfig :: MapMappend PackageName LegacyPackageConfig - } deriving Generic - -instance Monoid LegacyProjectConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup LegacyProjectConfig where - (<>) = gmappend - -data LegacyPackageConfig = LegacyPackageConfig { - legacyConfigureFlags :: ConfigFlags, - legacyInstallPkgFlags :: InstallFlags, - legacyHaddockFlags :: HaddockFlags - } deriving Generic - -instance Monoid LegacyPackageConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup LegacyPackageConfig where - (<>) = gmappend - -data LegacySharedConfig = LegacySharedConfig { - legacyGlobalFlags :: GlobalFlags, - legacyConfigureShFlags :: ConfigFlags, - legacyConfigureExFlags :: ConfigExFlags, - legacyInstallFlags :: InstallFlags - } deriving Generic - -instance Monoid LegacySharedConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup LegacySharedConfig where - (<>) = gmappend - - ------------------------------------------------------------------- --- Converting from and to the legacy types --- - --- | Convert configuration from the @cabal configure@ or @cabal build@ command --- line into a 'ProjectConfig' value that can combined with configuration from --- other sources. --- --- At the moment this uses the legacy command line flag types. See --- 'LegacyProjectConfig' for an explanation. --- -commandLineFlagsToProjectConfig :: GlobalFlags - -> ConfigFlags -> ConfigExFlags - -> InstallFlags -> HaddockFlags - -> ProjectConfig -commandLineFlagsToProjectConfig globalFlags configFlags configExFlags - installFlags haddockFlags = - mempty { - projectConfigBuildOnly = convertLegacyBuildOnlyFlags - globalFlags configFlags - installFlags haddockFlags, - projectConfigShared = convertLegacyAllPackageFlags - globalFlags configFlags - configExFlags installFlags, - projectConfigLocalPackages = localConfig, - projectConfigAllPackages = allConfig - } - where (localConfig, allConfig) = splitConfig - (convertLegacyPerPackageFlags - configFlags installFlags haddockFlags) - -- split the package config (from command line arguments) into - -- those applied to all packages and those to local only. - -- - -- for now we will just copy over the ProgramPaths/Args/Extra into - -- the AllPackages. The LocalPackages do not inherit them from - -- AllPackages, and as such need to retain them. - -- - -- The general decision rule for what to put into allConfig - -- into localConfig is the following: - -- - -- - anything that is host/toolchain/env specific should be applied - -- to all packages, as packagesets have to be host/toolchain/env - -- consistent. - -- - anything else should be in the local config and could potentially - -- be lifted into all-packages vial the `package *` cabal.project - -- section. - -- - splitConfig :: PackageConfig -> (PackageConfig, PackageConfig) - splitConfig pc = (pc - , mempty { packageConfigProgramPaths = packageConfigProgramPaths pc - , packageConfigProgramArgs = packageConfigProgramArgs pc - , packageConfigProgramPathExtra = packageConfigProgramPathExtra pc - , packageConfigDocumentation = packageConfigDocumentation pc }) - --- | Convert from the types currently used for the user-wide @~/.cabal/config@ --- file into the 'ProjectConfig' type. --- --- Only a subset of the 'ProjectConfig' can be represented in the user-wide --- config. In particular it does not include packages that are in the project, --- and it also doesn't support package-specific configuration (only --- configuration that applies to all packages). --- -convertLegacyGlobalConfig :: SavedConfig -> ProjectConfig -convertLegacyGlobalConfig - SavedConfig { - savedGlobalFlags = globalFlags, - savedInstallFlags = installFlags, - savedConfigureFlags = configFlags, - savedConfigureExFlags = configExFlags, - savedUserInstallDirs = _, - savedGlobalInstallDirs = _, - savedUploadFlags = _, - savedReportFlags = _, - savedHaddockFlags = haddockFlags - } = - mempty { - projectConfigBuildOnly = configBuildOnly, - projectConfigShared = configShared, - projectConfigAllPackages = configAllPackages - } - where - --TODO: [code cleanup] eliminate use of default*Flags here and specify the - -- defaults in the various resolve functions in terms of the new types. - configExFlags' = defaultConfigExFlags <> configExFlags - installFlags' = defaultInstallFlags <> installFlags - haddockFlags' = defaultHaddockFlags <> haddockFlags - - configAllPackages = convertLegacyPerPackageFlags - configFlags installFlags' haddockFlags' - configShared = convertLegacyAllPackageFlags - globalFlags configFlags - configExFlags' installFlags' - configBuildOnly = convertLegacyBuildOnlyFlags - globalFlags configFlags - installFlags' haddockFlags' - - --- | Convert the project config from the legacy types to the 'ProjectConfig' --- and associated types. See 'LegacyProjectConfig' for an explanation of the --- approach. --- -convertLegacyProjectConfig :: LegacyProjectConfig -> ProjectConfig -convertLegacyProjectConfig - LegacyProjectConfig { - legacyPackages, - legacyPackagesOptional, - legacyPackagesRepo, - legacyPackagesNamed, - legacySharedConfig = LegacySharedConfig globalFlags configShFlags - configExFlags installSharedFlags, - legacyAllConfig, - legacyLocalConfig = LegacyPackageConfig configFlags installPerPkgFlags - haddockFlags, - legacySpecificConfig - } = - - ProjectConfig { - projectPackages = legacyPackages, - projectPackagesOptional = legacyPackagesOptional, - projectPackagesRepo = legacyPackagesRepo, - projectPackagesNamed = legacyPackagesNamed, - - projectConfigBuildOnly = configBuildOnly, - projectConfigShared = configPackagesShared, - projectConfigProvenance = mempty, - projectConfigAllPackages = configAllPackages, - projectConfigLocalPackages = configLocalPackages, - projectConfigSpecificPackage = fmap perPackage legacySpecificConfig - } - where - configAllPackages = convertLegacyPerPackageFlags g i h - where LegacyPackageConfig g i h = legacyAllConfig - configLocalPackages = convertLegacyPerPackageFlags - configFlags installPerPkgFlags haddockFlags - configPackagesShared= convertLegacyAllPackageFlags - globalFlags (configFlags <> configShFlags) - configExFlags installSharedFlags - configBuildOnly = convertLegacyBuildOnlyFlags - globalFlags configShFlags - installSharedFlags haddockFlags - - perPackage (LegacyPackageConfig perPkgConfigFlags perPkgInstallFlags - perPkgHaddockFlags) = - convertLegacyPerPackageFlags - perPkgConfigFlags perPkgInstallFlags perPkgHaddockFlags - - --- | Helper used by other conversion functions that returns the --- 'ProjectConfigShared' subset of the 'ProjectConfig'. --- -convertLegacyAllPackageFlags :: GlobalFlags -> ConfigFlags - -> ConfigExFlags -> InstallFlags - -> ProjectConfigShared -convertLegacyAllPackageFlags globalFlags configFlags - configExFlags installFlags = - ProjectConfigShared{..} - where - GlobalFlags { - globalConfigFile = projectConfigConfigFile, - globalSandboxConfigFile = _, -- ?? - globalRemoteRepos = projectConfigRemoteRepos, - globalLocalRepos = projectConfigLocalRepos, - globalProgPathExtra = projectConfigProgPathExtra, - globalStoreDir = projectConfigStoreDir - } = globalFlags - - ConfigFlags { - configDistPref = projectConfigDistDir, - configHcFlavor = projectConfigHcFlavor, - configHcPath = projectConfigHcPath, - configHcPkg = projectConfigHcPkg - --configProgramPathExtra = projectConfigProgPathExtra DELETE ME - --configInstallDirs = projectConfigInstallDirs, - --configUserInstall = projectConfigUserInstall, - --configPackageDBs = projectConfigPackageDBs, - } = configFlags - - ConfigExFlags { - configCabalVersion = projectConfigCabalVersion, - configExConstraints = projectConfigConstraints, - configPreferences = projectConfigPreferences, - configSolver = projectConfigSolver, - configAllowOlder = projectConfigAllowOlder, - configAllowNewer = projectConfigAllowNewer - } = configExFlags - - InstallFlags { - installProjectFileName = projectConfigProjectFile, - installHaddockIndex = projectConfigHaddockIndex, - --installReinstall = projectConfigReinstall, - --installAvoidReinstalls = projectConfigAvoidReinstalls, - --installOverrideReinstall = projectConfigOverrideReinstall, - installIndexState = projectConfigIndexState, - installMaxBackjumps = projectConfigMaxBackjumps, - --installUpgradeDeps = projectConfigUpgradeDeps, - installReorderGoals = projectConfigReorderGoals, - installCountConflicts = projectConfigCountConflicts, - installPerComponent = projectConfigPerComponent, - installIndependentGoals = projectConfigIndependentGoals, - --installShadowPkgs = projectConfigShadowPkgs, - installStrongFlags = projectConfigStrongFlags, - installAllowBootLibInstalls = projectConfigAllowBootLibInstalls - } = installFlags - - - --- | Helper used by other conversion functions that returns the --- 'PackageConfig' subset of the 'ProjectConfig'. --- -convertLegacyPerPackageFlags :: ConfigFlags -> InstallFlags -> HaddockFlags - -> PackageConfig -convertLegacyPerPackageFlags configFlags installFlags haddockFlags = - PackageConfig{..} - where - ConfigFlags { - configProgramPaths, - configProgramArgs, - configProgramPathExtra = packageConfigProgramPathExtra, - configVanillaLib = packageConfigVanillaLib, - configProfLib = packageConfigProfLib, - configSharedLib = packageConfigSharedLib, - configStaticLib = packageConfigStaticLib, - configDynExe = packageConfigDynExe, - configProfExe = packageConfigProfExe, - configProf = packageConfigProf, - configProfDetail = packageConfigProfDetail, - configProfLibDetail = packageConfigProfLibDetail, - configConfigureArgs = packageConfigConfigureArgs, - configOptimization = packageConfigOptimization, - configProgPrefix = packageConfigProgPrefix, - configProgSuffix = packageConfigProgSuffix, - configGHCiLib = packageConfigGHCiLib, - configSplitSections = packageConfigSplitSections, - configSplitObjs = packageConfigSplitObjs, - configStripExes = packageConfigStripExes, - configStripLibs = packageConfigStripLibs, - configExtraLibDirs = packageConfigExtraLibDirs, - configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, - configExtraIncludeDirs = packageConfigExtraIncludeDirs, - configConfigurationsFlags = packageConfigFlagAssignment, - configTests = packageConfigTests, - configBenchmarks = packageConfigBenchmarks, - configCoverage = coverage, - configLibCoverage = libcoverage, --deprecated - configDebugInfo = packageConfigDebugInfo, - configRelocatable = packageConfigRelocatable - } = configFlags - packageConfigProgramPaths = MapLast (Map.fromList configProgramPaths) - packageConfigProgramArgs = MapMappend (Map.fromListWith (++) configProgramArgs) - - packageConfigCoverage = coverage <> libcoverage - --TODO: defer this merging to the resolve phase - - InstallFlags { - installDocumentation = packageConfigDocumentation, - installRunTests = packageConfigRunTests - } = installFlags - - HaddockFlags { - haddockHoogle = packageConfigHaddockHoogle, - haddockHtml = packageConfigHaddockHtml, - haddockHtmlLocation = packageConfigHaddockHtmlLocation, - haddockForeignLibs = packageConfigHaddockForeignLibs, - haddockForHackage = packageConfigHaddockForHackage, - haddockExecutables = packageConfigHaddockExecutables, - haddockTestSuites = packageConfigHaddockTestSuites, - haddockBenchmarks = packageConfigHaddockBenchmarks, - haddockInternal = packageConfigHaddockInternal, - haddockCss = packageConfigHaddockCss, - haddockLinkedSource = packageConfigHaddockLinkedSource, - haddockQuickJump = packageConfigHaddockQuickJump, - haddockHscolourCss = packageConfigHaddockHscolourCss, - haddockContents = packageConfigHaddockContents - } = haddockFlags - - - --- | Helper used by other conversion functions that returns the --- 'ProjectConfigBuildOnly' subset of the 'ProjectConfig'. --- -convertLegacyBuildOnlyFlags :: GlobalFlags -> ConfigFlags - -> InstallFlags -> HaddockFlags - -> ProjectConfigBuildOnly -convertLegacyBuildOnlyFlags globalFlags configFlags - installFlags haddockFlags = - ProjectConfigBuildOnly{..} - where - GlobalFlags { - globalCacheDir = projectConfigCacheDir, - globalLogsDir = projectConfigLogsDir, - globalWorldFile = _, - globalHttpTransport = projectConfigHttpTransport, - globalIgnoreExpiry = projectConfigIgnoreExpiry - } = globalFlags - - ConfigFlags { - configVerbosity = projectConfigVerbosity - } = configFlags - - InstallFlags { - installDryRun = projectConfigDryRun, - installOnly = _, - installOnlyDeps = projectConfigOnlyDeps, - installRootCmd = _, - installSummaryFile = projectConfigSummaryFile, - installLogFile = projectConfigLogFile, - installBuildReports = projectConfigBuildReports, - installReportPlanningFailure = projectConfigReportPlanningFailure, - installSymlinkBinDir = projectConfigSymlinkBinDir, - installOneShot = projectConfigOneShot, - installNumJobs = projectConfigNumJobs, - installKeepGoing = projectConfigKeepGoing, - installOfflineMode = projectConfigOfflineMode - } = installFlags - - HaddockFlags { - haddockKeepTempFiles = projectConfigKeepTempFiles --TODO: this ought to live elsewhere - } = haddockFlags - - -convertToLegacyProjectConfig :: ProjectConfig -> LegacyProjectConfig -convertToLegacyProjectConfig - projectConfig@ProjectConfig { - projectPackages, - projectPackagesOptional, - projectPackagesRepo, - projectPackagesNamed, - projectConfigAllPackages, - projectConfigLocalPackages, - projectConfigSpecificPackage - } = - LegacyProjectConfig { - legacyPackages = projectPackages, - legacyPackagesOptional = projectPackagesOptional, - legacyPackagesRepo = projectPackagesRepo, - legacyPackagesNamed = projectPackagesNamed, - legacySharedConfig = convertToLegacySharedConfig projectConfig, - legacyAllConfig = convertToLegacyPerPackageConfig - projectConfigAllPackages, - legacyLocalConfig = convertToLegacyAllPackageConfig projectConfig - <> convertToLegacyPerPackageConfig - projectConfigLocalPackages, - legacySpecificConfig = fmap convertToLegacyPerPackageConfig - projectConfigSpecificPackage - } - -convertToLegacySharedConfig :: ProjectConfig -> LegacySharedConfig -convertToLegacySharedConfig - ProjectConfig { - projectConfigBuildOnly = ProjectConfigBuildOnly {..}, - projectConfigShared = ProjectConfigShared {..}, - projectConfigAllPackages = PackageConfig { - packageConfigDocumentation - } - } = - - LegacySharedConfig { - legacyGlobalFlags = globalFlags, - legacyConfigureShFlags = configFlags, - legacyConfigureExFlags = configExFlags, - legacyInstallFlags = installFlags - } - where - globalFlags = GlobalFlags { - globalVersion = mempty, - globalNumericVersion = mempty, - globalConfigFile = projectConfigConfigFile, - globalSandboxConfigFile = mempty, - globalConstraintsFile = mempty, - globalRemoteRepos = projectConfigRemoteRepos, - globalCacheDir = projectConfigCacheDir, - globalLocalRepos = projectConfigLocalRepos, - globalLogsDir = projectConfigLogsDir, - globalWorldFile = mempty, - globalRequireSandbox = mempty, - globalIgnoreSandbox = mempty, - globalIgnoreExpiry = projectConfigIgnoreExpiry, - globalHttpTransport = projectConfigHttpTransport, - globalNix = mempty, - globalStoreDir = projectConfigStoreDir, - globalProgPathExtra = projectConfigProgPathExtra - } - - configFlags = mempty { - configVerbosity = projectConfigVerbosity, - configDistPref = projectConfigDistDir - } - - configExFlags = ConfigExFlags { - configCabalVersion = projectConfigCabalVersion, - configExConstraints = projectConfigConstraints, - configPreferences = projectConfigPreferences, - configSolver = projectConfigSolver, - configAllowOlder = projectConfigAllowOlder, - configAllowNewer = projectConfigAllowNewer - - } - - installFlags = InstallFlags { - installDocumentation = packageConfigDocumentation, - installHaddockIndex = projectConfigHaddockIndex, - installDest = Flag NoCopyDest, - installDryRun = projectConfigDryRun, - installReinstall = mempty, --projectConfigReinstall, - installAvoidReinstalls = mempty, --projectConfigAvoidReinstalls, - installOverrideReinstall = mempty, --projectConfigOverrideReinstall, - installMaxBackjumps = projectConfigMaxBackjumps, - installUpgradeDeps = mempty, --projectConfigUpgradeDeps, - installReorderGoals = projectConfigReorderGoals, - installCountConflicts = projectConfigCountConflicts, - installIndependentGoals = projectConfigIndependentGoals, - installShadowPkgs = mempty, --projectConfigShadowPkgs, - installStrongFlags = projectConfigStrongFlags, - installAllowBootLibInstalls = projectConfigAllowBootLibInstalls, - installOnly = mempty, - installOnlyDeps = projectConfigOnlyDeps, - installIndexState = projectConfigIndexState, - installRootCmd = mempty, --no longer supported - installSummaryFile = projectConfigSummaryFile, - installLogFile = projectConfigLogFile, - installBuildReports = projectConfigBuildReports, - installReportPlanningFailure = projectConfigReportPlanningFailure, - installSymlinkBinDir = projectConfigSymlinkBinDir, - installPerComponent = projectConfigPerComponent, - installOneShot = projectConfigOneShot, - installNumJobs = projectConfigNumJobs, - installKeepGoing = projectConfigKeepGoing, - installRunTests = mempty, - installOfflineMode = projectConfigOfflineMode, - installProjectFileName = projectConfigProjectFile - } - - -convertToLegacyAllPackageConfig :: ProjectConfig -> LegacyPackageConfig -convertToLegacyAllPackageConfig - ProjectConfig { - projectConfigBuildOnly = ProjectConfigBuildOnly {..}, - projectConfigShared = ProjectConfigShared {..} - } = - - LegacyPackageConfig { - legacyConfigureFlags = configFlags, - legacyInstallPkgFlags= mempty, - legacyHaddockFlags = haddockFlags - } - where - configFlags = ConfigFlags { - configArgs = mempty, - configPrograms_ = mempty, - configProgramPaths = mempty, - configProgramArgs = mempty, - configProgramPathExtra = mempty, - configHcFlavor = projectConfigHcFlavor, - configHcPath = projectConfigHcPath, - configHcPkg = projectConfigHcPkg, - configInstantiateWith = mempty, - configVanillaLib = mempty, - configProfLib = mempty, - configSharedLib = mempty, - configStaticLib = mempty, - configDynExe = mempty, - configProfExe = mempty, - configProf = mempty, - configProfDetail = mempty, - configProfLibDetail = mempty, - configConfigureArgs = mempty, - configOptimization = mempty, - configProgPrefix = mempty, - configProgSuffix = mempty, - configInstallDirs = mempty, - configScratchDir = mempty, - configDistPref = mempty, - configCabalFilePath = mempty, - configVerbosity = mempty, - configUserInstall = mempty, --projectConfigUserInstall, - configPackageDBs = mempty, --projectConfigPackageDBs, - configGHCiLib = mempty, - configSplitSections = mempty, - configSplitObjs = mempty, - configStripExes = mempty, - configStripLibs = mempty, - configExtraLibDirs = mempty, - configExtraFrameworkDirs = mempty, - configConstraints = mempty, - configDependencies = mempty, - configExtraIncludeDirs = mempty, - configDeterministic = mempty, - configIPID = mempty, - configCID = mempty, - configConfigurationsFlags = mempty, - configTests = mempty, - configCoverage = mempty, --TODO: don't merge - configLibCoverage = mempty, --TODO: don't merge - configExactConfiguration = mempty, - configBenchmarks = mempty, - configFlagError = mempty, --TODO: ??? - configRelocatable = mempty, - configDebugInfo = mempty, - configUseResponseFiles = mempty - } - - haddockFlags = mempty { - haddockKeepTempFiles = projectConfigKeepTempFiles - } - - -convertToLegacyPerPackageConfig :: PackageConfig -> LegacyPackageConfig -convertToLegacyPerPackageConfig PackageConfig {..} = - LegacyPackageConfig { - legacyConfigureFlags = configFlags, - legacyInstallPkgFlags = installFlags, - legacyHaddockFlags = haddockFlags - } - where - configFlags = ConfigFlags { - configArgs = mempty, - configPrograms_ = configPrograms_ mempty, - configProgramPaths = Map.toList (getMapLast packageConfigProgramPaths), - configProgramArgs = Map.toList (getMapMappend packageConfigProgramArgs), - configProgramPathExtra = packageConfigProgramPathExtra, - configHcFlavor = mempty, - configHcPath = mempty, - configHcPkg = mempty, - configInstantiateWith = mempty, - configVanillaLib = packageConfigVanillaLib, - configProfLib = packageConfigProfLib, - configSharedLib = packageConfigSharedLib, - configStaticLib = packageConfigStaticLib, - configDynExe = packageConfigDynExe, - configProfExe = packageConfigProfExe, - configProf = packageConfigProf, - configProfDetail = packageConfigProfDetail, - configProfLibDetail = packageConfigProfLibDetail, - configConfigureArgs = packageConfigConfigureArgs, - configOptimization = packageConfigOptimization, - configProgPrefix = packageConfigProgPrefix, - configProgSuffix = packageConfigProgSuffix, - configInstallDirs = mempty, - configScratchDir = mempty, - configDistPref = mempty, - configCabalFilePath = mempty, - configVerbosity = mempty, - configUserInstall = mempty, - configPackageDBs = mempty, - configGHCiLib = packageConfigGHCiLib, - configSplitSections = packageConfigSplitSections, - configSplitObjs = packageConfigSplitObjs, - configStripExes = packageConfigStripExes, - configStripLibs = packageConfigStripLibs, - configExtraLibDirs = packageConfigExtraLibDirs, - configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, - configConstraints = mempty, - configDependencies = mempty, - configExtraIncludeDirs = packageConfigExtraIncludeDirs, - configIPID = mempty, - configCID = mempty, - configDeterministic = mempty, - configConfigurationsFlags = packageConfigFlagAssignment, - configTests = packageConfigTests, - configCoverage = packageConfigCoverage, --TODO: don't merge - configLibCoverage = packageConfigCoverage, --TODO: don't merge - configExactConfiguration = mempty, - configBenchmarks = packageConfigBenchmarks, - configFlagError = mempty, --TODO: ??? - configRelocatable = packageConfigRelocatable, - configDebugInfo = packageConfigDebugInfo, - configUseResponseFiles = mempty - } - - installFlags = mempty { - installDocumentation = packageConfigDocumentation, - installRunTests = packageConfigRunTests - } - - haddockFlags = HaddockFlags { - haddockProgramPaths = mempty, - haddockProgramArgs = mempty, - haddockHoogle = packageConfigHaddockHoogle, - haddockHtml = packageConfigHaddockHtml, - haddockHtmlLocation = packageConfigHaddockHtmlLocation, - haddockForHackage = packageConfigHaddockForHackage, - haddockForeignLibs = packageConfigHaddockForeignLibs, - haddockExecutables = packageConfigHaddockExecutables, - haddockTestSuites = packageConfigHaddockTestSuites, - haddockBenchmarks = packageConfigHaddockBenchmarks, - haddockInternal = packageConfigHaddockInternal, - haddockCss = packageConfigHaddockCss, - haddockLinkedSource = packageConfigHaddockLinkedSource, - haddockQuickJump = packageConfigHaddockQuickJump, - haddockHscolourCss = packageConfigHaddockHscolourCss, - haddockContents = packageConfigHaddockContents, - haddockDistPref = mempty, - haddockKeepTempFiles = mempty, - haddockVerbosity = mempty, - haddockCabalFilePath = mempty, - haddockArgs = mempty - } - - ------------------------------------------------- --- Parsing and showing the project config file --- - -parseLegacyProjectConfig :: String -> ParseResult LegacyProjectConfig -parseLegacyProjectConfig = - parseConfig legacyProjectConfigFieldDescrs - legacyPackageConfigSectionDescrs - mempty - -showLegacyProjectConfig :: LegacyProjectConfig -> String -showLegacyProjectConfig config = - Disp.render $ - showConfig legacyProjectConfigFieldDescrs - legacyPackageConfigSectionDescrs - config - $+$ - Disp.text "" - - -legacyProjectConfigFieldDescrs :: [FieldDescr LegacyProjectConfig] -legacyProjectConfigFieldDescrs = - - [ newLineListField "packages" - (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ - legacyPackages - (\v flags -> flags { legacyPackages = v }) - , newLineListField "optional-packages" - (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ - legacyPackagesOptional - (\v flags -> flags { legacyPackagesOptional = v }) - , commaNewLineListField "extra-packages" - disp parse - legacyPackagesNamed - (\v flags -> flags { legacyPackagesNamed = v }) - ] - - ++ map (liftField - legacySharedConfig - (\flags conf -> conf { legacySharedConfig = flags })) - legacySharedConfigFieldDescrs - - ++ map (liftField - legacyLocalConfig - (\flags conf -> conf { legacyLocalConfig = flags })) - legacyPackageConfigFieldDescrs - --- | This is a bit tricky since it has to cover globs which have embedded @,@ --- chars. But we don't just want to parse strictly as a glob since we want to --- allow http urls which don't parse as globs, and possibly some --- system-dependent file paths. So we parse fairly liberally as a token, but --- we allow @,@ inside matched @{}@ braces. --- -parsePackageLocationTokenQ :: ReadP r String -parsePackageLocationTokenQ = parseHaskellString - Parse.<++ parsePackageLocationToken - where - parsePackageLocationToken :: ReadP r String - parsePackageLocationToken = fmap fst (Parse.gather outerTerm) - where - outerTerm = alternateEither1 outerToken (braces innerTerm) - innerTerm = alternateEither innerToken (braces innerTerm) - outerToken = Parse.munch1 outerChar >> return () - innerToken = Parse.munch1 innerChar >> return () - outerChar c = not (isSpace c || c == '{' || c == '}' || c == ',') - innerChar c = not (isSpace c || c == '{' || c == '}') - braces = Parse.between (Parse.char '{') (Parse.char '}') - - alternateEither, alternateEither1, - alternatePQs, alternate1PQs, alternateQsP, alternate1QsP - :: ReadP r () -> ReadP r () -> ReadP r () - - alternateEither1 p q = alternate1PQs p q +++ alternate1QsP q p - alternateEither p q = alternateEither1 p q +++ return () - alternate1PQs p q = p >> alternateQsP q p - alternatePQs p q = alternate1PQs p q +++ return () - alternate1QsP q p = Parse.many1 q >> alternatePQs p q - alternateQsP q p = alternate1QsP q p +++ return () - -renderPackageLocationToken :: String -> String -renderPackageLocationToken s | needsQuoting = show s - | otherwise = s - where - needsQuoting = not (ok 0 s) - || s == "." -- . on its own on a line has special meaning - || take 2 s == "--" -- on its own line is comment syntax - --TODO: [code cleanup] these "." and "--" escaping issues - -- ought to be dealt with systematically in ParseUtils. - ok :: Int -> String -> Bool - ok n [] = n == 0 - ok _ ('"':_) = False - ok n ('{':cs) = ok (n+1) cs - ok n ('}':cs) = ok (n-1) cs - ok n (',':cs) = (n > 0) && ok n cs - ok _ (c:_) - | isSpace c = False - ok n (_ :cs) = ok n cs - - -legacySharedConfigFieldDescrs :: [FieldDescr LegacySharedConfig] -legacySharedConfigFieldDescrs = - - ( liftFields - legacyGlobalFlags - (\flags conf -> conf { legacyGlobalFlags = flags }) - . addFields - [ newLineListField "local-repo" - showTokenQ parseTokenQ - (fromNubList . globalLocalRepos) - (\v conf -> conf { globalLocalRepos = toNubList v }), - newLineListField "extra-prog-path-shared-only" - showTokenQ parseTokenQ - (fromNubList . globalProgPathExtra) - (\v conf -> conf { globalProgPathExtra = toNubList v }) - ] - . filterFields - [ "remote-repo-cache" - , "logs-dir", "store-dir", "ignore-expiry", "http-transport" - ] - . commandOptionsToFields - ) (commandOptions (globalCommand []) ParseArgs) - ++ - ( liftFields - legacyConfigureShFlags - (\flags conf -> conf { legacyConfigureShFlags = flags }) - . filterFields ["verbose", "builddir" ] - . commandOptionsToFields - ) (configureOptions ParseArgs) - ++ - ( liftFields - legacyConfigureExFlags - (\flags conf -> conf { legacyConfigureExFlags = flags }) - . addFields - [ commaNewLineListField "constraints" - (disp . fst) (fmap (\constraint -> (constraint, constraintSrc)) parse) - configExConstraints (\v conf -> conf { configExConstraints = v }) - - , commaNewLineListField "preferences" - disp parse - configPreferences (\v conf -> conf { configPreferences = v }) - - , monoidField "allow-older" - (maybe mempty disp) (fmap Just parse) - (fmap unAllowOlder . configAllowOlder) - (\v conf -> conf { configAllowOlder = fmap AllowOlder v }) - - , monoidField "allow-newer" - (maybe mempty disp) (fmap Just parse) - (fmap unAllowNewer . configAllowNewer) - (\v conf -> conf { configAllowNewer = fmap AllowNewer v }) - ] - . filterFields - [ "cabal-lib-version", "solver" - -- not "constraint" or "preference", we use our own plural ones above - ] - . commandOptionsToFields - ) (configureExOptions ParseArgs constraintSrc) - ++ - ( liftFields - legacyInstallFlags - (\flags conf -> conf { legacyInstallFlags = flags }) - . addFields - [ newLineListField "build-summary" - (showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ) - (fromNubList . installSummaryFile) - (\v conf -> conf { installSummaryFile = toNubList v }) - ] - . filterFields - [ "doc-index-file" - , "root-cmd", "symlink-bindir" - , "build-log" - , "remote-build-reporting", "report-planning-failure" - , "one-shot", "jobs", "keep-going", "offline", "per-component" - -- solver flags: - , "max-backjumps", "reorder-goals", "count-conflicts", "independent-goals" - , "strong-flags" , "allow-boot-library-installs", "index-state" - ] - . commandOptionsToFields - ) (installOptions ParseArgs) - where - constraintSrc = ConstraintSourceProjectConfig "TODO" - - -legacyPackageConfigFieldDescrs :: [FieldDescr LegacyPackageConfig] -legacyPackageConfigFieldDescrs = - ( liftFields - legacyConfigureFlags - (\flags conf -> conf { legacyConfigureFlags = flags }) - . addFields - [ newLineListField "extra-include-dirs" - showTokenQ parseTokenQ - configExtraIncludeDirs - (\v conf -> conf { configExtraIncludeDirs = v }) - , newLineListField "extra-lib-dirs" - showTokenQ parseTokenQ - configExtraLibDirs - (\v conf -> conf { configExtraLibDirs = v }) - , newLineListField "extra-framework-dirs" - showTokenQ parseTokenQ - configExtraFrameworkDirs - (\v conf -> conf { configExtraFrameworkDirs = v }) - , newLineListField "extra-prog-path" - showTokenQ parseTokenQ - (fromNubList . configProgramPathExtra) - (\v conf -> conf { configProgramPathExtra = toNubList v }) - , newLineListField "configure-options" - showTokenQ parseTokenQ - configConfigureArgs - (\v conf -> conf { configConfigureArgs = v }) - , simpleField "flags" - dispFlagAssignment parseFlagAssignment - configConfigurationsFlags - (\v conf -> conf { configConfigurationsFlags = v }) - ] - . filterFields - [ "with-compiler", "with-hc-pkg" - , "program-prefix", "program-suffix" - , "library-vanilla", "library-profiling" - , "shared", "static", "executable-dynamic" - , "profiling", "executable-profiling" - , "profiling-detail", "library-profiling-detail" - , "library-for-ghci", "split-objs", "split-sections" - , "executable-stripping", "library-stripping" - , "tests", "benchmarks" - , "coverage", "library-coverage" - , "relocatable" - -- not "extra-include-dirs", "extra-lib-dirs", "extra-framework-dirs" - -- or "extra-prog-path". We use corrected ones above that parse - -- as list fields. - ] - . commandOptionsToFields - ) (configureOptions ParseArgs) - ++ - liftFields - legacyConfigureFlags - (\flags conf -> conf { legacyConfigureFlags = flags }) - [ overrideFieldCompiler - , overrideFieldOptimization - , overrideFieldDebugInfo - ] - ++ - ( liftFields - legacyInstallPkgFlags - (\flags conf -> conf { legacyInstallPkgFlags = flags }) - . filterFields - [ "documentation", "run-tests" - ] - . commandOptionsToFields - ) (installOptions ParseArgs) - ++ - ( liftFields - legacyHaddockFlags - (\flags conf -> conf { legacyHaddockFlags = flags }) - . mapFieldNames - ("haddock-"++) - . addFields - [ simpleField "for-hackage" - -- TODO: turn this into a library function - (fromFlagOrDefault Disp.empty . fmap disp) (Parse.option mempty (fmap toFlag parse)) - haddockForHackage (\v conf -> conf { haddockForHackage = v }) - ] - . filterFields - [ "hoogle", "html", "html-location" - , "foreign-libraries" - , "executables", "tests", "benchmarks", "all", "internal", "css" - , "hyperlink-source", "quickjump", "hscolour-css" - , "contents-location", "keep-temp-files" - ] - . commandOptionsToFields - ) (haddockOptions ParseArgs) - - where - overrideFieldCompiler = - simpleField "compiler" - (fromFlagOrDefault Disp.empty . fmap disp) - (Parse.option mempty (fmap toFlag parse)) - configHcFlavor (\v flags -> flags { configHcFlavor = v }) - - - -- TODO: [code cleanup] The following is a hack. The "optimization" and - -- "debug-info" fields are OptArg, and viewAsFieldDescr fails on that. - -- Instead of a hand-written parser and printer, we should handle this case - -- properly in the library. - - overrideFieldOptimization = - liftField configOptimization - (\v flags -> flags { configOptimization = v }) $ - let name = "optimization" in - FieldDescr name - (\f -> case f of - Flag NoOptimisation -> Disp.text "False" - Flag NormalOptimisation -> Disp.text "True" - Flag MaximumOptimisation -> Disp.text "2" - _ -> Disp.empty) - (\line str _ -> case () of - _ | str == "False" -> ParseOk [] (Flag NoOptimisation) - | str == "True" -> ParseOk [] (Flag NormalOptimisation) - | str == "0" -> ParseOk [] (Flag NoOptimisation) - | str == "1" -> ParseOk [] (Flag NormalOptimisation) - | str == "2" -> ParseOk [] (Flag MaximumOptimisation) - | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) - | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) - | otherwise -> ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") - - overrideFieldDebugInfo = - liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ - let name = "debug-info" in - FieldDescr name - (\f -> case f of - Flag NoDebugInfo -> Disp.text "False" - Flag MinimalDebugInfo -> Disp.text "1" - Flag NormalDebugInfo -> Disp.text "True" - Flag MaximalDebugInfo -> Disp.text "3" - _ -> Disp.empty) - (\line str _ -> case () of - _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) - | str == "True" -> ParseOk [] (Flag NormalDebugInfo) - | str == "0" -> ParseOk [] (Flag NoDebugInfo) - | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) - | str == "2" -> ParseOk [] (Flag NormalDebugInfo) - | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) - | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) - | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) - | otherwise -> ParseFailed (NoParse name line) - where - lstr = lowercase str - caseWarning = PWarning $ - "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") - - -legacyPackageConfigSectionDescrs :: [SectionDescr LegacyProjectConfig] -legacyPackageConfigSectionDescrs = - [ packageRepoSectionDescr - , packageSpecificOptionsSectionDescr - , liftSection - legacyLocalConfig - (\flags conf -> conf { legacyLocalConfig = flags }) - programOptionsSectionDescr - , liftSection - legacyLocalConfig - (\flags conf -> conf { legacyLocalConfig = flags }) - programLocationsSectionDescr - , liftSection - legacySharedConfig - (\flags conf -> conf { legacySharedConfig = flags }) $ - liftSection - legacyGlobalFlags - (\flags conf -> conf { legacyGlobalFlags = flags }) - remoteRepoSectionDescr - ] - -packageRepoSectionDescr :: SectionDescr LegacyProjectConfig -packageRepoSectionDescr = - SectionDescr { - sectionName = "source-repository-package", - sectionFields = sourceRepoFieldDescrs, - sectionSubsections = [], - sectionGet = map (\x->("", x)) - . legacyPackagesRepo, - sectionSet = - \lineno unused pkgrepo projconf -> do - unless (null unused) $ - syntaxError lineno "the section 'source-repository-package' takes no arguments" - return projconf { - legacyPackagesRepo = legacyPackagesRepo projconf ++ [pkgrepo] - }, - sectionEmpty = SourceRepo { - repoKind = RepoThis, -- hopefully unused - repoType = Nothing, - repoLocation = Nothing, - repoModule = Nothing, - repoBranch = Nothing, - repoTag = Nothing, - repoSubdir = Nothing - } - } - --- | The definitions of all the fields that can appear in the @package pkgfoo@ --- and @package *@ sections of the @cabal.project@-format files. --- -packageSpecificOptionsFieldDescrs :: [FieldDescr LegacyPackageConfig] -packageSpecificOptionsFieldDescrs = - legacyPackageConfigFieldDescrs - ++ programOptionsFieldDescrs - (configProgramArgs . legacyConfigureFlags) - (\args pkgconf -> pkgconf { - legacyConfigureFlags = (legacyConfigureFlags pkgconf) { - configProgramArgs = args - } - } - ) - ++ liftFields - legacyConfigureFlags - (\flags pkgconf -> pkgconf { - legacyConfigureFlags = flags - } - ) - programLocationsFieldDescrs - --- | The definition of the @package pkgfoo@ sections of the @cabal.project@-format --- files. This section is per-package name. The special package @*@ applies to all --- packages used anywhere by the project, locally or as dependencies. --- -packageSpecificOptionsSectionDescr :: SectionDescr LegacyProjectConfig -packageSpecificOptionsSectionDescr = - SectionDescr { - sectionName = "package", - sectionFields = packageSpecificOptionsFieldDescrs, - sectionSubsections = [], - sectionGet = \projconf -> - [ (display pkgname, pkgconf) - | (pkgname, pkgconf) <- - Map.toList . getMapMappend - . legacySpecificConfig $ projconf ] - ++ [ ("*", legacyAllConfig projconf) ], - sectionSet = - \lineno pkgnamestr pkgconf projconf -> case pkgnamestr of - "*" -> return projconf { - legacyAllConfig = legacyAllConfig projconf <> pkgconf - } - _ -> do - pkgname <- case simpleParse pkgnamestr of - Just pkgname -> return pkgname - Nothing -> syntaxError lineno $ - "a 'package' section requires a package name " - ++ "as an argument" - return projconf { - legacySpecificConfig = - MapMappend $ - Map.insertWith mappend pkgname pkgconf - (getMapMappend $ legacySpecificConfig projconf) - }, - sectionEmpty = mempty - } - -programOptionsFieldDescrs :: (a -> [(String, [String])]) - -> ([(String, [String])] -> a -> a) - -> [FieldDescr a] -programOptionsFieldDescrs get' set = - commandOptionsToFields - $ programDbOptions - defaultProgramDb - ParseArgs get' set - -programOptionsSectionDescr :: SectionDescr LegacyPackageConfig -programOptionsSectionDescr = - SectionDescr { - sectionName = "program-options", - sectionFields = programOptionsFieldDescrs - configProgramArgs - (\args conf -> conf { configProgramArgs = args }), - sectionSubsections = [], - sectionGet = (\x->[("", x)]) - . legacyConfigureFlags, - sectionSet = - \lineno unused confflags pkgconf -> do - unless (null unused) $ - syntaxError lineno "the section 'program-options' takes no arguments" - return pkgconf { - legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags - }, - sectionEmpty = mempty - } - -programLocationsFieldDescrs :: [FieldDescr ConfigFlags] -programLocationsFieldDescrs = - commandOptionsToFields - $ programDbPaths' - (++ "-location") - defaultProgramDb - ParseArgs - configProgramPaths - (\paths conf -> conf { configProgramPaths = paths }) - -programLocationsSectionDescr :: SectionDescr LegacyPackageConfig -programLocationsSectionDescr = - SectionDescr { - sectionName = "program-locations", - sectionFields = programLocationsFieldDescrs, - sectionSubsections = [], - sectionGet = (\x->[("", x)]) - . legacyConfigureFlags, - sectionSet = - \lineno unused confflags pkgconf -> do - unless (null unused) $ - syntaxError lineno "the section 'program-locations' takes no arguments" - return pkgconf { - legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags - }, - sectionEmpty = mempty - } - - --- | For each known program @PROG@ in 'progDb', produce a @PROG-options@ --- 'OptionField'. -programDbOptions - :: ProgramDb - -> ShowOrParseArgs - -> (flags -> [(String, [String])]) - -> ([(String, [String])] -> (flags -> flags)) - -> [OptionField flags] -programDbOptions progDb showOrParseArgs get' set = - case showOrParseArgs of - -- we don't want a verbose help text list so we just show a generic one: - ShowArgs -> [programOptions "PROG"] - ParseArgs -> map (programOptions . programName . fst) - (knownPrograms progDb) - where - programOptions prog = - option "" [prog ++ "-options"] - ("give extra options to " ++ prog) - get' set - (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) - (\progArgs -> [ joinsArgs args - | (prog', args) <- progArgs, prog==prog' ])) - - - joinsArgs = unwords . map escape - escape arg | any isSpace arg = "\"" ++ arg ++ "\"" - | otherwise = arg - - -remoteRepoSectionDescr :: SectionDescr GlobalFlags -remoteRepoSectionDescr = - SectionDescr { - sectionName = "repository", - sectionFields = remoteRepoFields, - sectionSubsections = [], - sectionGet = map (\x->(remoteRepoName x, x)) . fromNubList - . globalRemoteRepos, - sectionSet = - \lineno reponame repo0 conf -> do - when (null reponame) $ - syntaxError lineno $ "a 'repository' section requires the " - ++ "repository name as an argument" - let repo = repo0 { remoteRepoName = reponame } - when (remoteRepoKeyThreshold repo - > length (remoteRepoRootKeys repo)) $ - warning $ "'key-threshold' for repository " - ++ show (remoteRepoName repo) - ++ " higher than number of keys" - when (not (null (remoteRepoRootKeys repo)) - && remoteRepoSecure repo /= Just True) $ - warning $ "'root-keys' for repository " - ++ show (remoteRepoName repo) - ++ " non-empty, but 'secure' not set to True." - return conf { - globalRemoteRepos = overNubList (++[repo]) (globalRemoteRepos conf) - }, - sectionEmpty = emptyRemoteRepo "" - } - - -------------------------------- --- Local field utils --- - ---TODO: [code cleanup] all these utils should move to Distribution.ParseUtils --- either augmenting or replacing the ones there - ---TODO: [code cleanup] this is a different definition from listField, like --- commaNewLineListField it pretty prints on multiple lines -newLineListField :: String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -newLineListField = listFieldWithSep Disp.sep - ---TODO: [code cleanup] local copy purely so we can use the fixed version --- of parseOptCommaList below -listFieldWithSep :: ([Doc] -> Doc) -> String -> (a -> Doc) -> ReadP [a] a - -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b -listFieldWithSep separator name showF readF get' set = - liftField get' set' $ - ParseUtils.field name showF' (parseOptCommaList readF) - where - set' xs b = set (get' b ++ xs) b - showF' = separator . map showF - --- | Parser combinator for simple fields which uses the field type's --- 'Monoid' instance for combining multiple occurences of the field. -monoidField :: Monoid a => String -> (a -> Doc) -> ReadP a a - -> (b -> a) -> (a -> b -> b) -> FieldDescr b -monoidField name showF readF get' set = - liftField get' set' $ ParseUtils.field name showF readF - where - set' xs b = set (get' b `mappend` xs) b - ---TODO: [code cleanup] local redefinition that should replace the version in --- D.ParseUtils. This version avoid parse ambiguity for list element parsers --- that have multiple valid parses of prefixes. -parseOptCommaList :: ReadP r a -> ReadP r [a] -parseOptCommaList p = Parse.sepBy p sep - where - -- The separator must not be empty or it introduces ambiguity - sep = (Parse.skipSpaces >> Parse.char ',' >> Parse.skipSpaces) - +++ (Parse.satisfy isSpace >> Parse.skipSpaces) - ---TODO: [code cleanup] local redefinition that should replace the version in --- D.ParseUtils called showFilePath. This version escapes "." and "--" which --- otherwise are special syntax. -showTokenQ :: String -> Doc -showTokenQ "" = Disp.empty -showTokenQ x@('-':'-':_) = Disp.text (show x) -showTokenQ x@('.':[]) = Disp.text (show x) -showTokenQ x = showToken x - --- This is just a copy of parseTokenQ, using the fixed parseHaskellString -parseTokenQ :: ReadP r String -parseTokenQ = parseHaskellString - <++ Parse.munch1 (\x -> not (isSpace x) && x /= ',') - ---TODO: [code cleanup] use this to replace the parseHaskellString in --- Distribution.ParseUtils. It turns out Read instance for String accepts --- the ['a', 'b'] syntax, which we do not want. In particular it messes --- up any token starting with []. -parseHaskellString :: ReadP r String -parseHaskellString = - Parse.readS_to_P $ - Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0 - --- Handy util -addFields :: [FieldDescr a] - -> ([FieldDescr a] -> [FieldDescr a]) -addFields = (++) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig/Types.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,422 +0,0 @@ -{-# LANGUAGE DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving #-} - --- | Handling project configuration, types. --- -module Distribution.Client.ProjectConfig.Types ( - - -- * Types for project config - ProjectConfig(..), - ProjectConfigBuildOnly(..), - ProjectConfigShared(..), - ProjectConfigProvenance(..), - PackageConfig(..), - - -- * Resolving configuration - SolverSettings(..), - BuildTimeSettings(..), - - -- * Extra useful Monoids - MapLast(..), - MapMappend(..), - ) where - -import Distribution.Client.Types - ( RemoteRepo, AllowNewer(..), AllowOlder(..) ) -import Distribution.Client.Dependency.Types - ( PreSolver ) -import Distribution.Client.Targets - ( UserConstraint ) -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) - -import Distribution.Client.IndexUtils.Timestamp - ( IndexState ) - -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.ConstraintSource - -import Distribution.Package - ( PackageName, PackageId, UnitId ) -import Distribution.Types.Dependency -import Distribution.Version - ( Version ) -import Distribution.System - ( Platform ) -import Distribution.PackageDescription - ( FlagAssignment, SourceRepo(..) ) -import Distribution.Simple.Compiler - ( Compiler, CompilerFlavor - , OptimisationLevel(..), ProfDetailLevel, DebugInfoLevel(..) ) -import Distribution.Simple.Setup - ( Flag, HaddockTarget(..) ) -import Distribution.Simple.InstallDirs - ( PathTemplate ) -import Distribution.Utils.NubList - ( NubList ) -import Distribution.Verbosity - ( Verbosity ) - -import Data.Map (Map) -import qualified Data.Map as Map -import Data.Set (Set) -import Distribution.Compat.Binary (Binary) -import Distribution.Compat.Semigroup -import GHC.Generics (Generic) -import Data.Typeable - - -------------------------------- --- Project config types --- - --- | This type corresponds directly to what can be written in the --- @cabal.project@ file. Other sources of configuration can also be injected --- into this type, such as the user-wide @~/.cabal/config@ file and the --- command line of @cabal configure@ or @cabal build@. --- --- Since it corresponds to the external project file it is an instance of --- 'Monoid' and all the fields can be empty. This also means there has to --- be a step where we resolve configuration. At a minimum resolving means --- applying defaults but it can also mean merging information from multiple --- sources. For example for package-specific configuration the project file --- can specify configuration that applies to all local packages, and then --- additional configuration for a specific package. --- --- Future directions: multiple profiles, conditionals. If we add these --- features then the gap between configuration as written in the config file --- and resolved settings we actually use will become even bigger. --- -data ProjectConfig - = ProjectConfig { - - -- | Packages in this project, including local dirs, local .cabal files - -- local and remote tarballs. When these are file globs, they must - -- match at least one package. - projectPackages :: [String], - - -- | Like 'projectConfigPackageGlobs' but /optional/ in the sense that - -- file globs are allowed to match nothing. The primary use case for - -- this is to be able to say @optional-packages: */@ to automagically - -- pick up deps that we unpack locally without erroring when - -- there aren't any. - projectPackagesOptional :: [String], - - -- | Packages in this project from remote source repositories. - projectPackagesRepo :: [SourceRepo], - - -- | Packages in this project from hackage repositories. - projectPackagesNamed :: [Dependency], - - -- See respective types for an explanation of what these - -- values are about: - projectConfigBuildOnly :: ProjectConfigBuildOnly, - projectConfigShared :: ProjectConfigShared, - projectConfigProvenance :: Set ProjectConfigProvenance, - - -- | Configuration to be applied to *all* packages, - -- whether named in `cabal.project` or not. - projectConfigAllPackages :: PackageConfig, - - -- | Configuration to be applied to *local* packages; i.e., - -- any packages which are explicitly named in `cabal.project`. - projectConfigLocalPackages :: PackageConfig, - projectConfigSpecificPackage :: MapMappend PackageName PackageConfig - } - deriving (Eq, Show, Generic, Typeable) - --- | That part of the project configuration that only affects /how/ we build --- and not the /value/ of the things we build. This means this information --- does not need to be tracked for changes since it does not affect the --- outcome. --- -data ProjectConfigBuildOnly - = ProjectConfigBuildOnly { - projectConfigVerbosity :: Flag Verbosity, - projectConfigDryRun :: Flag Bool, - projectConfigOnlyDeps :: Flag Bool, - projectConfigSummaryFile :: NubList PathTemplate, - projectConfigLogFile :: Flag PathTemplate, - projectConfigBuildReports :: Flag ReportLevel, - projectConfigReportPlanningFailure :: Flag Bool, - projectConfigSymlinkBinDir :: Flag FilePath, - projectConfigOneShot :: Flag Bool, - projectConfigNumJobs :: Flag (Maybe Int), - projectConfigKeepGoing :: Flag Bool, - projectConfigOfflineMode :: Flag Bool, - projectConfigKeepTempFiles :: Flag Bool, - projectConfigHttpTransport :: Flag String, - projectConfigIgnoreExpiry :: Flag Bool, - projectConfigCacheDir :: Flag FilePath, - projectConfigLogsDir :: Flag FilePath - } - deriving (Eq, Show, Generic) - - --- | Project configuration that is shared between all packages in the project. --- In particular this includes configuration that affects the solver. --- -data ProjectConfigShared - = ProjectConfigShared { - projectConfigDistDir :: Flag FilePath, - projectConfigConfigFile :: Flag FilePath, - projectConfigProjectFile :: Flag FilePath, - projectConfigHcFlavor :: Flag CompilerFlavor, - projectConfigHcPath :: Flag FilePath, - projectConfigHcPkg :: Flag FilePath, - projectConfigHaddockIndex :: Flag PathTemplate, - - -- Things that only make sense for manual mode, not --local mode - -- too much control! - --projectConfigUserInstall :: Flag Bool, - --projectConfigInstallDirs :: InstallDirs (Flag PathTemplate), - --TODO: [required eventually] decide what to do with InstallDirs - -- currently we don't allow it to be specified in the config file - --projectConfigPackageDBs :: [Maybe PackageDB], - - -- configuration used both by the solver and other phases - projectConfigRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. - projectConfigLocalRepos :: NubList FilePath, - projectConfigIndexState :: Flag IndexState, - projectConfigStoreDir :: Flag FilePath, - - -- solver configuration - projectConfigConstraints :: [(UserConstraint, ConstraintSource)], - projectConfigPreferences :: [Dependency], - projectConfigCabalVersion :: Flag Version, --TODO: [required eventually] unused - projectConfigSolver :: Flag PreSolver, - projectConfigAllowOlder :: Maybe AllowOlder, - projectConfigAllowNewer :: Maybe AllowNewer, - projectConfigMaxBackjumps :: Flag Int, - projectConfigReorderGoals :: Flag ReorderGoals, - projectConfigCountConflicts :: Flag CountConflicts, - projectConfigStrongFlags :: Flag StrongFlags, - projectConfigAllowBootLibInstalls :: Flag AllowBootLibInstalls, - projectConfigPerComponent :: Flag Bool, - projectConfigIndependentGoals :: Flag IndependentGoals, - - projectConfigProgPathExtra :: NubList FilePath - - -- More things that only make sense for manual mode, not --local mode - -- too much control! - --projectConfigShadowPkgs :: Flag Bool, - --projectConfigReinstall :: Flag Bool, - --projectConfigAvoidReinstalls :: Flag Bool, - --projectConfigOverrideReinstall :: Flag Bool, - --projectConfigUpgradeDeps :: Flag Bool - } - deriving (Eq, Show, Generic) - - --- | Specifies the provenance of project configuration, whether defaults were --- used or if the configuration was read from an explicit file path. -data ProjectConfigProvenance - - -- | The configuration is implicit due to no explicit configuration - -- being found. See 'Distribution.Client.ProjectConfig.readProjectConfig' - -- for how implicit configuration is determined. - = Implicit - - -- | The path the project configuration was explicitly read from. - -- | The configuration was explicitly read from the specified 'FilePath'. - | Explicit FilePath - deriving (Eq, Ord, Show, Generic) - - --- | Project configuration that is specific to each package, that is where we --- can in principle have different values for different packages in the same --- project. --- -data PackageConfig - = PackageConfig { - packageConfigProgramPaths :: MapLast String FilePath, - packageConfigProgramArgs :: MapMappend String [String], - packageConfigProgramPathExtra :: NubList FilePath, - packageConfigFlagAssignment :: FlagAssignment, - packageConfigVanillaLib :: Flag Bool, - packageConfigSharedLib :: Flag Bool, - packageConfigStaticLib :: Flag Bool, - packageConfigDynExe :: Flag Bool, - packageConfigProf :: Flag Bool, --TODO: [code cleanup] sort out - packageConfigProfLib :: Flag Bool, -- this duplication - packageConfigProfExe :: Flag Bool, -- and consistency - packageConfigProfDetail :: Flag ProfDetailLevel, - packageConfigProfLibDetail :: Flag ProfDetailLevel, - packageConfigConfigureArgs :: [String], - packageConfigOptimization :: Flag OptimisationLevel, - packageConfigProgPrefix :: Flag PathTemplate, - packageConfigProgSuffix :: Flag PathTemplate, - packageConfigExtraLibDirs :: [FilePath], - packageConfigExtraFrameworkDirs :: [FilePath], - packageConfigExtraIncludeDirs :: [FilePath], - packageConfigGHCiLib :: Flag Bool, - packageConfigSplitSections :: Flag Bool, - packageConfigSplitObjs :: Flag Bool, - packageConfigStripExes :: Flag Bool, - packageConfigStripLibs :: Flag Bool, - packageConfigTests :: Flag Bool, - packageConfigBenchmarks :: Flag Bool, - packageConfigCoverage :: Flag Bool, - packageConfigRelocatable :: Flag Bool, - packageConfigDebugInfo :: Flag DebugInfoLevel, - packageConfigRunTests :: Flag Bool, --TODO: [required eventually] use this - packageConfigDocumentation :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockHoogle :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockHtml :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockHtmlLocation :: Flag String, --TODO: [required eventually] use this - packageConfigHaddockForeignLibs :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockExecutables :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockTestSuites :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockBenchmarks :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockInternal :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockCss :: Flag FilePath, --TODO: [required eventually] use this - packageConfigHaddockLinkedSource :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockQuickJump :: Flag Bool, --TODO: [required eventually] use this - packageConfigHaddockHscolourCss :: Flag FilePath, --TODO: [required eventually] use this - packageConfigHaddockContents :: Flag PathTemplate, --TODO: [required eventually] use this - packageConfigHaddockForHackage :: Flag HaddockTarget - } - deriving (Eq, Show, Generic) - -instance Binary ProjectConfig -instance Binary ProjectConfigBuildOnly -instance Binary ProjectConfigShared -instance Binary ProjectConfigProvenance -instance Binary PackageConfig - - --- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that takes --- the last value rather than the first value for overlapping keys. -newtype MapLast k v = MapLast { getMapLast :: Map k v } - deriving (Eq, Show, Functor, Generic, Binary, Typeable) - -instance Ord k => Monoid (MapLast k v) where - mempty = MapLast Map.empty - mappend = (<>) - -instance Ord k => Semigroup (MapLast k v) where - MapLast a <> MapLast b = MapLast $ Map.union b a - -- rather than Map.union which is the normal Map monoid instance - - --- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that --- 'mappend's values of overlapping keys rather than taking the first. -newtype MapMappend k v = MapMappend { getMapMappend :: Map k v } - deriving (Eq, Show, Functor, Generic, Binary, Typeable) - -instance (Semigroup v, Ord k) => Monoid (MapMappend k v) where - mempty = MapMappend Map.empty - mappend = (<>) - -instance (Semigroup v, Ord k) => Semigroup (MapMappend k v) where - MapMappend a <> MapMappend b = MapMappend (Map.unionWith (<>) a b) - -- rather than Map.union which is the normal Map monoid instance - - -instance Monoid ProjectConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup ProjectConfig where - (<>) = gmappend - - -instance Monoid ProjectConfigBuildOnly where - mempty = gmempty - mappend = (<>) - -instance Semigroup ProjectConfigBuildOnly where - (<>) = gmappend - - -instance Monoid ProjectConfigShared where - mempty = gmempty - mappend = (<>) - -instance Semigroup ProjectConfigShared where - (<>) = gmappend - - -instance Monoid PackageConfig where - mempty = gmempty - mappend = (<>) - -instance Semigroup PackageConfig where - (<>) = gmappend - ----------------------------------------- --- Resolving configuration to settings --- - --- | Resolved configuration for the solver. The idea is that this is easier to --- use than the raw configuration because in the raw configuration everything --- is optional (monoidial). In the 'BuildTimeSettings' every field is filled --- in, if only with the defaults. --- --- Use 'resolveSolverSettings' to make one from the project config (by --- applying defaults etc). --- -data SolverSettings - = SolverSettings { - solverSettingRemoteRepos :: [RemoteRepo], -- ^ Available Hackage servers. - solverSettingLocalRepos :: [FilePath], - solverSettingConstraints :: [(UserConstraint, ConstraintSource)], - solverSettingPreferences :: [Dependency], - solverSettingFlagAssignment :: FlagAssignment, -- ^ For all local packages - solverSettingFlagAssignments :: Map PackageName FlagAssignment, - solverSettingCabalVersion :: Maybe Version, --TODO: [required eventually] unused - solverSettingSolver :: PreSolver, - solverSettingAllowOlder :: AllowOlder, - solverSettingAllowNewer :: AllowNewer, - solverSettingMaxBackjumps :: Maybe Int, - solverSettingReorderGoals :: ReorderGoals, - solverSettingCountConflicts :: CountConflicts, - solverSettingStrongFlags :: StrongFlags, - solverSettingAllowBootLibInstalls :: AllowBootLibInstalls, - solverSettingIndexState :: Maybe IndexState, - solverSettingIndependentGoals :: IndependentGoals - -- Things that only make sense for manual mode, not --local mode - -- too much control! - --solverSettingShadowPkgs :: Bool, - --solverSettingReinstall :: Bool, - --solverSettingAvoidReinstalls :: Bool, - --solverSettingOverrideReinstall :: Bool, - --solverSettingUpgradeDeps :: Bool - } - deriving (Eq, Show, Generic, Typeable) - -instance Binary SolverSettings - - --- | Resolved configuration for things that affect how we build and not the --- value of the things we build. The idea is that this is easier to use than --- the raw configuration because in the raw configuration everything is --- optional (monoidial). In the 'BuildTimeSettings' every field is filled in, --- if only with the defaults. --- --- Use 'resolveBuildTimeSettings' to make one from the project config (by --- applying defaults etc). --- -data BuildTimeSettings - = BuildTimeSettings { - buildSettingDryRun :: Bool, - buildSettingOnlyDeps :: Bool, - buildSettingSummaryFile :: [PathTemplate], - buildSettingLogFile :: Maybe (Compiler -> Platform - -> PackageId -> UnitId - -> FilePath), - buildSettingLogVerbosity :: Verbosity, - buildSettingBuildReports :: ReportLevel, - buildSettingReportPlanningFailure :: Bool, - buildSettingSymlinkBinDir :: [FilePath], - buildSettingOneShot :: Bool, - buildSettingNumJobs :: Int, - buildSettingKeepGoing :: Bool, - buildSettingOfflineMode :: Bool, - buildSettingKeepTempFiles :: Bool, - buildSettingRemoteRepos :: [RemoteRepo], - buildSettingLocalRepos :: [FilePath], - buildSettingCacheDir :: FilePath, - buildSettingHttpTransport :: Maybe String, - buildSettingIgnoreExpiry :: Bool, - buildSettingProgPathExtra :: [FilePath] - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectConfig.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1411 +0,0 @@ -{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, DeriveDataTypeable, LambdaCase #-} - --- | Handling project configuration. --- -module Distribution.Client.ProjectConfig ( - - -- * Types for project config - ProjectConfig(..), - ProjectConfigBuildOnly(..), - ProjectConfigShared(..), - ProjectConfigProvenance(..), - PackageConfig(..), - MapLast(..), - MapMappend(..), - - -- * Project root - findProjectRoot, - ProjectRoot(..), - BadProjectRoot(..), - - -- * Project config files - readProjectConfig, - readGlobalConfig, - readProjectLocalFreezeConfig, - withProjectOrGlobalConfig, - writeProjectLocalExtraConfig, - writeProjectLocalFreezeConfig, - writeProjectConfigFile, - commandLineFlagsToProjectConfig, - - -- * Packages within projects - ProjectPackageLocation(..), - BadPackageLocations(..), - BadPackageLocation(..), - BadPackageLocationMatch(..), - findProjectPackages, - fetchAndReadSourcePackages, - - -- * Resolving configuration - lookupLocalPackageConfig, - projectConfigWithBuilderRepoContext, - projectConfigWithSolverRepoContext, - SolverSettings(..), - resolveSolverSettings, - BuildTimeSettings(..), - resolveBuildTimeSettings, - - -- * Checking configuration - checkBadPerPackageCompilerPaths, - BadPerPackageCompilerPaths(..) - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.ProjectConfig.Types -import Distribution.Client.ProjectConfig.Legacy -import Distribution.Client.RebuildMonad -import Distribution.Client.Glob - ( isTrivialFilePathGlob ) -import Distribution.Client.VCS - ( validateSourceRepos, SourceRepoProblem(..) - , VCS(..), knownVCSs, configureVCS, syncSourceRepos ) - -import Distribution.Client.Types -import Distribution.Client.DistDirLayout - ( DistDirLayout(..), CabalDirLayout(..), ProjectRoot(..) ) -import Distribution.Client.GlobalFlags - ( RepoContext(..), withRepoContext' ) -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) -import Distribution.Client.Config - ( loadConfig, getConfigFilePath ) -import Distribution.Client.HttpUtils - ( HttpTransport, configureTransport, transportCheckHttps - , downloadURI ) - -import Distribution.Solver.Types.SourcePackage -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.PackageConstraint - ( PackageProperty(..) ) - -import Distribution.Package - ( PackageName, PackageId, packageId, UnitId ) -import Distribution.Types.Dependency -import Distribution.System - ( Platform ) -import Distribution.Types.GenericPackageDescription - ( GenericPackageDescription ) -import Distribution.PackageDescription.Parsec - ( parseGenericPackageDescription ) -import Distribution.Parsec.ParseResult - ( runParseResult ) -import Distribution.Parsec.Common as NewParser - ( PError, PWarning, showPWarning ) -import Distribution.Types.SourceRepo - ( SourceRepo(..), RepoType(..), ) -import Distribution.Simple.Compiler - ( Compiler, compilerInfo ) -import Distribution.Simple.Program - ( ConfiguredProgram(..) ) -import Distribution.Simple.Setup - ( Flag(Flag), toFlag, flagToMaybe, flagToList - , fromFlag, fromFlagOrDefault ) -import Distribution.Client.Setup - ( defaultSolver, defaultMaxBackjumps ) -import Distribution.Simple.InstallDirs - ( PathTemplate, fromPathTemplate - , toPathTemplate, substPathTemplate, initialPathTemplateEnv ) -import Distribution.Simple.Utils - ( die', warn, notice, info, createDirectoryIfMissingVerbose ) -import Distribution.Client.Utils - ( determineNumJobs ) -import Distribution.Utils.NubList - ( fromNubList ) -import Distribution.Verbosity - ( Verbosity, modifyVerbosity, verbose ) -import Distribution.Version - ( Version ) -import Distribution.Text -import Distribution.ParseUtils as OldParser - ( ParseResult(..), locatedErrorMsg, showPWarning ) - -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Distribution.Client.Tar as Tar -import qualified Distribution.Client.GZipUtils as GZipUtils - -import Control.Monad -import Control.Monad.Trans (liftIO) -import Control.Exception -import Data.Either -import qualified Data.ByteString as BS -import qualified Data.ByteString.Lazy as LBS -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set -import qualified Data.Hashable as Hashable -import Numeric (showHex) - -import System.FilePath hiding (combine) -import System.IO - ( withBinaryFile, IOMode(ReadMode) ) -import System.Directory -import Network.URI - ( URI(..), URIAuth(..), parseAbsoluteURI, uriToString ) - - ----------------------------------------- --- Resolving configuration to settings --- - --- | Look up a 'PackageConfig' field in the 'ProjectConfig' for a specific --- 'PackageName'. This returns the configuration that applies to all local --- packages plus any package-specific configuration for this package. --- -lookupLocalPackageConfig :: (Semigroup a, Monoid a) - => (PackageConfig -> a) - -> ProjectConfig - -> PackageName -> a -lookupLocalPackageConfig field ProjectConfig { - projectConfigLocalPackages, - projectConfigSpecificPackage - } pkgname = - field projectConfigLocalPackages - <> maybe mempty field - (Map.lookup pkgname (getMapMappend projectConfigSpecificPackage)) - - --- | Use a 'RepoContext' based on the 'BuildTimeSettings'. --- -projectConfigWithBuilderRepoContext :: Verbosity - -> BuildTimeSettings - -> (RepoContext -> IO a) -> IO a -projectConfigWithBuilderRepoContext verbosity BuildTimeSettings{..} = - withRepoContext' - verbosity - buildSettingRemoteRepos - buildSettingLocalRepos - buildSettingCacheDir - buildSettingHttpTransport - (Just buildSettingIgnoreExpiry) - buildSettingProgPathExtra - - --- | Use a 'RepoContext', but only for the solver. The solver does not use the --- full facilities of the 'RepoContext' so we can get away with making one --- that doesn't have an http transport. And that avoids having to have access --- to the 'BuildTimeSettings' --- -projectConfigWithSolverRepoContext :: Verbosity - -> ProjectConfigShared - -> ProjectConfigBuildOnly - -> (RepoContext -> IO a) -> IO a -projectConfigWithSolverRepoContext verbosity - ProjectConfigShared{..} - ProjectConfigBuildOnly{..} = - withRepoContext' - verbosity - (fromNubList projectConfigRemoteRepos) - (fromNubList projectConfigLocalRepos) - (fromFlagOrDefault (error "projectConfigWithSolverRepoContext: projectConfigCacheDir") - projectConfigCacheDir) - (flagToMaybe projectConfigHttpTransport) - (flagToMaybe projectConfigIgnoreExpiry) - (fromNubList projectConfigProgPathExtra) - - --- | Resolve the project configuration, with all its optional fields, into --- 'SolverSettings' with no optional fields (by applying defaults). --- -resolveSolverSettings :: ProjectConfig -> SolverSettings -resolveSolverSettings ProjectConfig{ - projectConfigShared, - projectConfigLocalPackages, - projectConfigSpecificPackage - } = - SolverSettings {..} - where - --TODO: [required eventually] some of these settings need validation, e.g. - -- the flag assignments need checking. - solverSettingRemoteRepos = fromNubList projectConfigRemoteRepos - solverSettingLocalRepos = fromNubList projectConfigLocalRepos - solverSettingConstraints = projectConfigConstraints - solverSettingPreferences = projectConfigPreferences - solverSettingFlagAssignment = packageConfigFlagAssignment projectConfigLocalPackages - solverSettingFlagAssignments = fmap packageConfigFlagAssignment - (getMapMappend projectConfigSpecificPackage) - solverSettingCabalVersion = flagToMaybe projectConfigCabalVersion - solverSettingSolver = fromFlag projectConfigSolver - solverSettingAllowOlder = fromMaybe mempty projectConfigAllowOlder - solverSettingAllowNewer = fromMaybe mempty projectConfigAllowNewer - solverSettingMaxBackjumps = case fromFlag projectConfigMaxBackjumps of - n | n < 0 -> Nothing - | otherwise -> Just n - solverSettingReorderGoals = fromFlag projectConfigReorderGoals - solverSettingCountConflicts = fromFlag projectConfigCountConflicts - solverSettingStrongFlags = fromFlag projectConfigStrongFlags - solverSettingAllowBootLibInstalls = fromFlag projectConfigAllowBootLibInstalls - solverSettingIndexState = flagToMaybe projectConfigIndexState - solverSettingIndependentGoals = fromFlag projectConfigIndependentGoals - --solverSettingShadowPkgs = fromFlag projectConfigShadowPkgs - --solverSettingReinstall = fromFlag projectConfigReinstall - --solverSettingAvoidReinstalls = fromFlag projectConfigAvoidReinstalls - --solverSettingOverrideReinstall = fromFlag projectConfigOverrideReinstall - --solverSettingUpgradeDeps = fromFlag projectConfigUpgradeDeps - - ProjectConfigShared {..} = defaults <> projectConfigShared - - defaults = mempty { - projectConfigSolver = Flag defaultSolver, - projectConfigAllowOlder = Just (AllowOlder mempty), - projectConfigAllowNewer = Just (AllowNewer mempty), - projectConfigMaxBackjumps = Flag defaultMaxBackjumps, - projectConfigReorderGoals = Flag (ReorderGoals False), - projectConfigCountConflicts = Flag (CountConflicts True), - projectConfigStrongFlags = Flag (StrongFlags False), - projectConfigAllowBootLibInstalls = Flag (AllowBootLibInstalls False), - projectConfigIndependentGoals = Flag (IndependentGoals False) - --projectConfigShadowPkgs = Flag False, - --projectConfigReinstall = Flag False, - --projectConfigAvoidReinstalls = Flag False, - --projectConfigOverrideReinstall = Flag False, - --projectConfigUpgradeDeps = Flag False - } - - --- | Resolve the project configuration, with all its optional fields, into --- 'BuildTimeSettings' with no optional fields (by applying defaults). --- -resolveBuildTimeSettings :: Verbosity - -> CabalDirLayout - -> ProjectConfig - -> BuildTimeSettings -resolveBuildTimeSettings verbosity - CabalDirLayout { - cabalLogsDirectory - } - ProjectConfig { - projectConfigShared = ProjectConfigShared { - projectConfigRemoteRepos, - projectConfigLocalRepos, - projectConfigProgPathExtra - }, - projectConfigBuildOnly - } = - BuildTimeSettings {..} - where - buildSettingDryRun = fromFlag projectConfigDryRun - buildSettingOnlyDeps = fromFlag projectConfigOnlyDeps - buildSettingSummaryFile = fromNubList projectConfigSummaryFile - --buildSettingLogFile -- defined below, more complicated - --buildSettingLogVerbosity -- defined below, more complicated - buildSettingBuildReports = fromFlag projectConfigBuildReports - buildSettingSymlinkBinDir = flagToList projectConfigSymlinkBinDir - buildSettingOneShot = fromFlag projectConfigOneShot - buildSettingNumJobs = determineNumJobs projectConfigNumJobs - buildSettingKeepGoing = fromFlag projectConfigKeepGoing - buildSettingOfflineMode = fromFlag projectConfigOfflineMode - buildSettingKeepTempFiles = fromFlag projectConfigKeepTempFiles - buildSettingRemoteRepos = fromNubList projectConfigRemoteRepos - buildSettingLocalRepos = fromNubList projectConfigLocalRepos - buildSettingCacheDir = fromFlag projectConfigCacheDir - buildSettingHttpTransport = flagToMaybe projectConfigHttpTransport - buildSettingIgnoreExpiry = fromFlag projectConfigIgnoreExpiry - buildSettingReportPlanningFailure - = fromFlag projectConfigReportPlanningFailure - buildSettingProgPathExtra = fromNubList projectConfigProgPathExtra - - ProjectConfigBuildOnly{..} = defaults - <> projectConfigBuildOnly - - defaults = mempty { - projectConfigDryRun = toFlag False, - projectConfigOnlyDeps = toFlag False, - projectConfigBuildReports = toFlag NoReports, - projectConfigReportPlanningFailure = toFlag False, - projectConfigKeepGoing = toFlag False, - projectConfigOneShot = toFlag False, - projectConfigOfflineMode = toFlag False, - projectConfigKeepTempFiles = toFlag False, - projectConfigIgnoreExpiry = toFlag False - } - - -- The logging logic: what log file to use and what verbosity. - -- - -- If the user has specified --remote-build-reporting=detailed, use the - -- default log file location. If the --build-log option is set, use the - -- provided location. Otherwise don't use logging, unless building in - -- parallel (in which case the default location is used). - -- - buildSettingLogFile :: Maybe (Compiler -> Platform - -> PackageId -> UnitId -> FilePath) - buildSettingLogFile - | useDefaultTemplate = Just (substLogFileName defaultTemplate) - | otherwise = fmap substLogFileName givenTemplate - - defaultTemplate = toPathTemplate $ - cabalLogsDirectory - "$compiler" "$libname" <.> "log" - givenTemplate = flagToMaybe projectConfigLogFile - - useDefaultTemplate - | buildSettingBuildReports == DetailedReports = True - | isJust givenTemplate = False - | isParallelBuild = True - | otherwise = False - - isParallelBuild = buildSettingNumJobs >= 2 - - substLogFileName :: PathTemplate - -> Compiler -> Platform - -> PackageId -> UnitId -> FilePath - substLogFileName template compiler platform pkgid uid = - fromPathTemplate (substPathTemplate env template) - where - env = initialPathTemplateEnv - pkgid uid (compilerInfo compiler) platform - - -- If the user has specified --remote-build-reporting=detailed or - -- --build-log, use more verbose logging. - -- - buildSettingLogVerbosity - | overrideVerbosity = modifyVerbosity (max verbose) verbosity - | otherwise = verbosity - - overrideVerbosity - | buildSettingBuildReports == DetailedReports = True - | isJust givenTemplate = True - | isParallelBuild = False - | otherwise = False - - ---------------------------------------------- --- Reading and writing project config files --- - --- | Find the root of this project. --- --- Searches for an explicit @cabal.project@ file, in the current directory or --- parent directories. If no project file is found then the current dir is the --- project root (and the project will use an implicit config). --- -findProjectRoot :: Maybe FilePath -- ^ starting directory, or current directory - -> Maybe FilePath -- ^ @cabal.project@ file name override - -> IO (Either BadProjectRoot ProjectRoot) -findProjectRoot _ (Just projectFile) - | isAbsolute projectFile = do - exists <- doesFileExist projectFile - if exists - then do projectFile' <- canonicalizePath projectFile - let projectRoot = ProjectRootExplicit (takeDirectory projectFile') - (takeFileName projectFile') - return (Right projectRoot) - else return (Left (BadProjectRootExplicitFile projectFile)) - -findProjectRoot mstartdir mprojectFile = do - startdir <- maybe getCurrentDirectory canonicalizePath mstartdir - homedir <- getHomeDirectory - probe startdir homedir - where - projectFileName = fromMaybe "cabal.project" mprojectFile - - -- Search upwards. If we get to the users home dir or the filesystem root, - -- then use the current dir - probe startdir homedir = go startdir - where - go dir | isDrive dir || dir == homedir = - case mprojectFile of - Nothing -> return (Right (ProjectRootImplicit startdir)) - Just file -> return (Left (BadProjectRootExplicitFile file)) - go dir = do - exists <- doesFileExist (dir projectFileName) - if exists - then return (Right (ProjectRootExplicit dir projectFileName)) - else go (takeDirectory dir) - - --TODO: [nice to have] add compat support for old style sandboxes - - --- | Errors returned by 'findProjectRoot'. --- -data BadProjectRoot = BadProjectRootExplicitFile FilePath -#if MIN_VERSION_base(4,8,0) - deriving (Show, Typeable) -#else - deriving (Typeable) - -instance Show BadProjectRoot where - show = renderBadProjectRoot -#endif - -instance Exception BadProjectRoot where -#if MIN_VERSION_base(4,8,0) - displayException = renderBadProjectRoot -#endif - -renderBadProjectRoot :: BadProjectRoot -> String -renderBadProjectRoot (BadProjectRootExplicitFile projectFile) = - "The given project file '" ++ projectFile ++ "' does not exist." - -withProjectOrGlobalConfig :: Verbosity - -> Flag FilePath - -> IO a - -> (ProjectConfig -> IO a) - -> IO a -withProjectOrGlobalConfig verbosity globalConfigFlag with without = do - globalConfig <- runRebuild "" $ readGlobalConfig verbosity globalConfigFlag - - let - res' = catch with - $ \case - (BadPackageLocations prov locs) - | prov == Set.singleton Implicit - , let - isGlobErr (BadLocGlobEmptyMatch _) = True - isGlobErr _ = False - , any isGlobErr locs -> - without globalConfig - err -> throwIO err - - catch res' - $ \case - (BadProjectRootExplicitFile "") -> without globalConfig - err -> throwIO err - --- | Read all the config relevant for a project. This includes the project --- file if any, plus other global config. --- -readProjectConfig :: Verbosity - -> Flag FilePath - -> DistDirLayout - -> Rebuild ProjectConfig -readProjectConfig verbosity configFileFlag distDirLayout = do - global <- readGlobalConfig verbosity configFileFlag - local <- readProjectLocalConfigOrDefault verbosity distDirLayout - freeze <- readProjectLocalFreezeConfig verbosity distDirLayout - extra <- readProjectLocalExtraConfig verbosity distDirLayout - return (global <> local <> freeze <> extra) - - --- | Reads an explicit @cabal.project@ file in the given project root dir, --- or returns the default project config for an implicitly defined project. --- -readProjectLocalConfigOrDefault :: Verbosity - -> DistDirLayout - -> Rebuild ProjectConfig -readProjectLocalConfigOrDefault verbosity distDirLayout = do - usesExplicitProjectRoot <- liftIO $ doesFileExist projectFile - if usesExplicitProjectRoot - then do - readProjectFile verbosity distDirLayout "" "project file" - else do - monitorFiles [monitorNonExistentFile projectFile] - return defaultImplicitProjectConfig - - where - projectFile = distProjectFile distDirLayout "" - - defaultImplicitProjectConfig :: ProjectConfig - defaultImplicitProjectConfig = - mempty { - -- We expect a package in the current directory. - projectPackages = [ "./*.cabal" ], - - -- This is to automatically pick up deps that we unpack locally. - projectPackagesOptional = [ "./*/*.cabal" ], - - projectConfigProvenance = Set.singleton Implicit - } - --- | Reads a @cabal.project.local@ file in the given project root dir, --- or returns empty. This file gets written by @cabal configure@, or in --- principle can be edited manually or by other tools. --- -readProjectLocalExtraConfig :: Verbosity -> DistDirLayout - -> Rebuild ProjectConfig -readProjectLocalExtraConfig verbosity distDirLayout = - readProjectFile verbosity distDirLayout "local" - "project local configuration file" - --- | Reads a @cabal.project.freeze@ file in the given project root dir, --- or returns empty. This file gets written by @cabal freeze@, or in --- principle can be edited manually or by other tools. --- -readProjectLocalFreezeConfig :: Verbosity -> DistDirLayout - -> Rebuild ProjectConfig -readProjectLocalFreezeConfig verbosity distDirLayout = - readProjectFile verbosity distDirLayout "freeze" - "project freeze file" - --- | Reads a named config file in the given project root dir, or returns empty. --- -readProjectFile :: Verbosity - -> DistDirLayout - -> String - -> String - -> Rebuild ProjectConfig -readProjectFile verbosity DistDirLayout{distProjectFile} - extensionName extensionDescription = do - exists <- liftIO $ doesFileExist extensionFile - if exists - then do monitorFiles [monitorFileHashed extensionFile] - addProjectFileProvenance <$> liftIO readExtensionFile - else do monitorFiles [monitorNonExistentFile extensionFile] - return mempty - where - extensionFile = distProjectFile extensionName - - readExtensionFile = - reportParseResult verbosity extensionDescription extensionFile - . parseProjectConfig - =<< readFile extensionFile - - addProjectFileProvenance config = - config { - projectConfigProvenance = - Set.insert (Explicit extensionFile) (projectConfigProvenance config) - } - - --- | Parse the 'ProjectConfig' format. --- --- For the moment this is implemented in terms of parsers for legacy --- configuration types, plus a conversion. --- -parseProjectConfig :: String -> ParseResult ProjectConfig -parseProjectConfig content = - convertLegacyProjectConfig <$> - parseLegacyProjectConfig content - - --- | Render the 'ProjectConfig' format. --- --- For the moment this is implemented in terms of a pretty printer for the --- legacy configuration types, plus a conversion. --- -showProjectConfig :: ProjectConfig -> String -showProjectConfig = - showLegacyProjectConfig . convertToLegacyProjectConfig - - --- | Write a @cabal.project.local@ file in the given project root dir. --- -writeProjectLocalExtraConfig :: DistDirLayout -> ProjectConfig -> IO () -writeProjectLocalExtraConfig DistDirLayout{distProjectFile} = - writeProjectConfigFile (distProjectFile "local") - - --- | Write a @cabal.project.freeze@ file in the given project root dir. --- -writeProjectLocalFreezeConfig :: DistDirLayout -> ProjectConfig -> IO () -writeProjectLocalFreezeConfig DistDirLayout{distProjectFile} = - writeProjectConfigFile (distProjectFile "freeze") - - --- | Write in the @cabal.project@ format to the given file. --- -writeProjectConfigFile :: FilePath -> ProjectConfig -> IO () -writeProjectConfigFile file = - writeFile file . showProjectConfig - - --- | Read the user's @~/.cabal/config@ file. --- -readGlobalConfig :: Verbosity -> Flag FilePath -> Rebuild ProjectConfig -readGlobalConfig verbosity configFileFlag = do - config <- liftIO (loadConfig verbosity configFileFlag) - configFile <- liftIO (getConfigFilePath configFileFlag) - monitorFiles [monitorFileHashed configFile] - return (convertLegacyGlobalConfig config) - -reportParseResult :: Verbosity -> String -> FilePath -> ParseResult a -> IO a -reportParseResult verbosity _filetype filename (ParseOk warnings x) = do - unless (null warnings) $ - let msg = unlines (map (OldParser.showPWarning filename) warnings) - in warn verbosity msg - return x -reportParseResult verbosity filetype filename (ParseFailed err) = - let (line, msg) = locatedErrorMsg err - in die' verbosity $ "Error parsing " ++ filetype ++ " " ++ filename - ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg - - ---------------------------------------------- --- Finding packages in the project --- - --- | The location of a package as part of a project. Local file paths are --- either absolute (if the user specified it as such) or they are relative --- to the project root. --- -data ProjectPackageLocation = - ProjectPackageLocalCabalFile FilePath - | ProjectPackageLocalDirectory FilePath FilePath -- dir and .cabal file - | ProjectPackageLocalTarball FilePath - | ProjectPackageRemoteTarball URI - | ProjectPackageRemoteRepo SourceRepo - | ProjectPackageNamed Dependency - deriving Show - - --- | Exception thrown by 'findProjectPackages'. --- -data BadPackageLocations - = BadPackageLocations (Set ProjectConfigProvenance) [BadPackageLocation] -#if MIN_VERSION_base(4,8,0) - deriving (Show, Typeable) -#else - deriving (Typeable) - -instance Show BadPackageLocations where - show = renderBadPackageLocations -#endif - -instance Exception BadPackageLocations where -#if MIN_VERSION_base(4,8,0) - displayException = renderBadPackageLocations -#endif ---TODO: [nice to have] custom exception subclass for Doc rendering, colour etc - -data BadPackageLocation - = BadPackageLocationFile BadPackageLocationMatch - | BadLocGlobEmptyMatch String - | BadLocGlobBadMatches String [BadPackageLocationMatch] - | BadLocUnexpectedUriScheme String - | BadLocUnrecognisedUri String - | BadLocUnrecognised String - deriving Show - -data BadPackageLocationMatch - = BadLocUnexpectedFile String - | BadLocNonexistantFile String - | BadLocDirNoCabalFile String - | BadLocDirManyCabalFiles String - deriving Show - -renderBadPackageLocations :: BadPackageLocations -> String -renderBadPackageLocations (BadPackageLocations provenance bpls) - -- There is no provenance information, - -- render standard bad package error information. - | Set.null provenance = renderErrors renderBadPackageLocation - - -- The configuration is implicit, render bad package locations - -- using possibly specialized error messages. - | Set.singleton Implicit == provenance = - renderErrors renderImplicitBadPackageLocation - - -- The configuration contains both implicit and explicit provenance. - -- This should not occur, and a message is output to assist debugging. - | Implicit `Set.member` provenance = - "Warning: both implicit and explicit configuration is present." - ++ renderExplicit - - -- The configuration was read from one or more explicit path(s), - -- list the locations and render the bad package error information. - -- The intent is to supersede this with the relevant location information - -- per package error. - | otherwise = renderExplicit - where - renderErrors f = unlines (map f bpls) - - renderExplicit = - "When using configuration(s) from " - ++ intercalate ", " (mapMaybe getExplicit (Set.toList provenance)) - ++ ", the following errors occurred:\n" - ++ renderErrors renderBadPackageLocation - - getExplicit (Explicit path) = Just path - getExplicit Implicit = Nothing - ---TODO: [nice to have] keep track of the config file (and src loc) packages --- were listed, to use in error messages - --- | Render bad package location error information for the implicit --- @cabal.project@ configuration. --- --- TODO: This is currently not fully realized, with only one of the implicit --- cases handled. More cases should be added with informative help text --- about the issues related specifically when having no project configuration --- is present. -renderImplicitBadPackageLocation :: BadPackageLocation -> String -renderImplicitBadPackageLocation bpl = case bpl of - BadLocGlobEmptyMatch pkglocstr -> - "No cabal.project file or cabal file matching the default glob '" - ++ pkglocstr ++ "' was found.\n" - ++ "Please create a package description file .cabal " - ++ "or a cabal.project file referencing the packages you " - ++ "want to build." - _ -> renderBadPackageLocation bpl - -renderBadPackageLocation :: BadPackageLocation -> String -renderBadPackageLocation bpl = case bpl of - BadPackageLocationFile badmatch -> - renderBadPackageLocationMatch badmatch - BadLocGlobEmptyMatch pkglocstr -> - "The package location glob '" ++ pkglocstr - ++ "' does not match any files or directories." - BadLocGlobBadMatches pkglocstr failures -> - "The package location glob '" ++ pkglocstr ++ "' does not match any " - ++ "recognised forms of package. " - ++ concatMap ((' ':) . renderBadPackageLocationMatch) failures - BadLocUnexpectedUriScheme pkglocstr -> - "The package location URI '" ++ pkglocstr ++ "' does not use a " - ++ "supported URI scheme. The supported URI schemes are http, https and " - ++ "file." - BadLocUnrecognisedUri pkglocstr -> - "The package location URI '" ++ pkglocstr ++ "' does not appear to " - ++ "be a valid absolute URI." - BadLocUnrecognised pkglocstr -> - "The package location syntax '" ++ pkglocstr ++ "' is not recognised." - -renderBadPackageLocationMatch :: BadPackageLocationMatch -> String -renderBadPackageLocationMatch bplm = case bplm of - BadLocUnexpectedFile pkglocstr -> - "The package location '" ++ pkglocstr ++ "' is not recognised. The " - ++ "supported file targets are .cabal files, .tar.gz tarballs or package " - ++ "directories (i.e. directories containing a .cabal file)." - BadLocNonexistantFile pkglocstr -> - "The package location '" ++ pkglocstr ++ "' does not exist." - BadLocDirNoCabalFile pkglocstr -> - "The package directory '" ++ pkglocstr ++ "' does not contain any " - ++ ".cabal file." - BadLocDirManyCabalFiles pkglocstr -> - "The package directory '" ++ pkglocstr ++ "' contains multiple " - ++ ".cabal files (which is not currently supported)." - --- | Given the project config, --- --- Throws 'BadPackageLocations'. --- -findProjectPackages :: DistDirLayout -> ProjectConfig - -> Rebuild [ProjectPackageLocation] -findProjectPackages DistDirLayout{distProjectRootDirectory} - ProjectConfig{..} = do - - requiredPkgs <- findPackageLocations True projectPackages - optionalPkgs <- findPackageLocations False projectPackagesOptional - let repoPkgs = map ProjectPackageRemoteRepo projectPackagesRepo - namedPkgs = map ProjectPackageNamed projectPackagesNamed - - return (concat [requiredPkgs, optionalPkgs, repoPkgs, namedPkgs]) - where - findPackageLocations required pkglocstr = do - (problems, pkglocs) <- - partitionEithers <$> mapM (findPackageLocation required) pkglocstr - unless (null problems) $ - liftIO $ throwIO $ BadPackageLocations projectConfigProvenance problems - return (concat pkglocs) - - - findPackageLocation :: Bool -> String - -> Rebuild (Either BadPackageLocation - [ProjectPackageLocation]) - findPackageLocation _required@True pkglocstr = - -- strategy: try first as a file:// or http(s):// URL. - -- then as a file glob (usually encompassing single file) - -- finally as a single file, for files that fail to parse as globs - checkIsUriPackage pkglocstr - `mplusMaybeT` checkIsFileGlobPackage pkglocstr - `mplusMaybeT` checkIsSingleFilePackage pkglocstr - >>= maybe (return (Left (BadLocUnrecognised pkglocstr))) return - - - findPackageLocation _required@False pkglocstr = do - -- just globs for optional case - res <- checkIsFileGlobPackage pkglocstr - case res of - Nothing -> return (Left (BadLocUnrecognised pkglocstr)) - Just (Left _) -> return (Right []) -- it's optional - Just (Right pkglocs) -> return (Right pkglocs) - - - checkIsUriPackage, checkIsFileGlobPackage, checkIsSingleFilePackage - :: String -> Rebuild (Maybe (Either BadPackageLocation - [ProjectPackageLocation])) - checkIsUriPackage pkglocstr = - case parseAbsoluteURI pkglocstr of - Just uri@URI { - uriScheme = scheme, - uriAuthority = Just URIAuth { uriRegName = host }, - uriPath = path, - uriQuery = query, - uriFragment = frag - } - | recognisedScheme && not (null host) -> - return (Just (Right [ProjectPackageRemoteTarball uri])) - - | scheme == "file:" && null host && null query && null frag -> - checkIsSingleFilePackage path - - | not recognisedScheme && not (null host) -> - return (Just (Left (BadLocUnexpectedUriScheme pkglocstr))) - - | recognisedScheme && null host -> - return (Just (Left (BadLocUnrecognisedUri pkglocstr))) - where - recognisedScheme = scheme == "http:" || scheme == "https:" - || scheme == "file:" - - _ -> return Nothing - - - checkIsFileGlobPackage pkglocstr = - case simpleParse pkglocstr of - Nothing -> return Nothing - Just glob -> liftM Just $ do - matches <- matchFileGlob glob - case matches of - [] | isJust (isTrivialFilePathGlob glob) - -> return (Left (BadPackageLocationFile - (BadLocNonexistantFile pkglocstr))) - - [] -> return (Left (BadLocGlobEmptyMatch pkglocstr)) - - _ -> do - (failures, pkglocs) <- partitionEithers <$> - mapM checkFilePackageMatch matches - return $! case (failures, pkglocs) of - ([failure], []) | isJust (isTrivialFilePathGlob glob) - -> Left (BadPackageLocationFile failure) - (_, []) -> Left (BadLocGlobBadMatches pkglocstr failures) - _ -> Right pkglocs - - - checkIsSingleFilePackage pkglocstr = do - let filename = distProjectRootDirectory pkglocstr - isFile <- liftIO $ doesFileExist filename - isDir <- liftIO $ doesDirectoryExist filename - if isFile || isDir - then checkFilePackageMatch pkglocstr - >>= either (return . Just . Left . BadPackageLocationFile) - (return . Just . Right . (\x->[x])) - else return Nothing - - - checkFilePackageMatch :: String -> Rebuild (Either BadPackageLocationMatch - ProjectPackageLocation) - checkFilePackageMatch pkglocstr = do - -- The pkglocstr may be absolute or may be relative to the project root. - -- Either way, does the right thing here. We return relative paths if - -- they were relative in the first place. - let abspath = distProjectRootDirectory pkglocstr - isFile <- liftIO $ doesFileExist abspath - isDir <- liftIO $ doesDirectoryExist abspath - parentDirExists <- case takeDirectory abspath of - [] -> return False - dir -> liftIO $ doesDirectoryExist dir - case () of - _ | isDir - -> do matches <- matchFileGlob (globStarDotCabal pkglocstr) - case matches of - [cabalFile] - -> return (Right (ProjectPackageLocalDirectory - pkglocstr cabalFile)) - [] -> return (Left (BadLocDirNoCabalFile pkglocstr)) - _ -> return (Left (BadLocDirManyCabalFiles pkglocstr)) - - | extensionIsTarGz pkglocstr - -> return (Right (ProjectPackageLocalTarball pkglocstr)) - - | takeExtension pkglocstr == ".cabal" - -> return (Right (ProjectPackageLocalCabalFile pkglocstr)) - - | isFile - -> return (Left (BadLocUnexpectedFile pkglocstr)) - - | parentDirExists - -> return (Left (BadLocNonexistantFile pkglocstr)) - - | otherwise - -> return (Left (BadLocUnexpectedFile pkglocstr)) - - - extensionIsTarGz f = takeExtension f == ".gz" - && takeExtension (dropExtension f) == ".tar" - - --- | A glob to find all the cabal files in a directory. --- --- For a directory @some/dir/@, this is a glob of the form @some/dir/\*.cabal@. --- The directory part can be either absolute or relative. --- -globStarDotCabal :: FilePath -> FilePathGlob -globStarDotCabal dir = - FilePathGlob - (if isAbsolute dir then FilePathRoot root else FilePathRelative) - (foldr (\d -> GlobDir [Literal d]) - (GlobFile [WildCard, Literal ".cabal"]) dirComponents) - where - (root, dirComponents) = fmap splitDirectories (splitDrive dir) - - ---TODO: [code cleanup] use sufficiently recent transformers package -mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) -mplusMaybeT ma mb = do - mx <- ma - case mx of - Nothing -> mb - Just x -> return (Just x) - - -------------------------------------------------- --- Fetching and reading packages in the project --- - --- | Read the @.cabal@ files for a set of packages. For remote tarballs and --- VCS source repos this also fetches them if needed. --- --- Note here is where we convert from project-root relative paths to absolute --- paths. --- -fetchAndReadSourcePackages - :: Verbosity - -> DistDirLayout - -> ProjectConfigShared - -> ProjectConfigBuildOnly - -> [ProjectPackageLocation] - -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] -fetchAndReadSourcePackages verbosity distDirLayout - projectConfigShared - projectConfigBuildOnly - pkgLocations = do - - pkgsLocalDirectory <- - sequence - [ readSourcePackageLocalDirectory verbosity dir cabalFile - | location <- pkgLocations - , (dir, cabalFile) <- projectPackageLocal location ] - - pkgsLocalTarball <- - sequence - [ readSourcePackageLocalTarball verbosity path - | ProjectPackageLocalTarball path <- pkgLocations ] - - pkgsRemoteTarball <- do - getTransport <- delayInitSharedResource $ - configureTransport verbosity progPathExtra - preferredHttpTransport - sequence - [ fetchAndReadSourcePackageRemoteTarball verbosity distDirLayout - getTransport uri - | ProjectPackageRemoteTarball uri <- pkgLocations ] - - pkgsRemoteRepo <- - syncAndReadSourcePackagesRemoteRepos - verbosity distDirLayout - projectConfigShared - [ repo | ProjectPackageRemoteRepo repo <- pkgLocations ] - - let pkgsNamed = - [ NamedPackage pkgname [PackagePropertyVersion verrange] - | ProjectPackageNamed (Dependency pkgname verrange) <- pkgLocations ] - - return $ concat - [ pkgsLocalDirectory - , pkgsLocalTarball - , pkgsRemoteTarball - , pkgsRemoteRepo - , pkgsNamed - ] - where - projectPackageLocal (ProjectPackageLocalDirectory dir file) = [(dir, file)] - projectPackageLocal (ProjectPackageLocalCabalFile file) = [(dir, file)] - where dir = takeDirectory file - projectPackageLocal _ = [] - - progPathExtra = fromNubList (projectConfigProgPathExtra projectConfigShared) - preferredHttpTransport = - flagToMaybe (projectConfigHttpTransport projectConfigBuildOnly) - --- | A helper for 'fetchAndReadSourcePackages' to handle the case of --- 'ProjectPackageLocalDirectory' and 'ProjectPackageLocalCabalFile'. --- We simply read the @.cabal@ file. --- -readSourcePackageLocalDirectory - :: Verbosity - -> FilePath -- ^ The package directory - -> FilePath -- ^ The package @.cabal@ file - -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) -readSourcePackageLocalDirectory verbosity dir cabalFile = do - monitorFiles [monitorFileHashed cabalFile] - root <- askRoot - let location = LocalUnpackedPackage (root dir) - liftIO $ fmap (mkSpecificSourcePackage location) - . readSourcePackageCabalFile verbosity cabalFile - =<< BS.readFile (root cabalFile) - - --- | A helper for 'fetchAndReadSourcePackages' to handle the case of --- 'ProjectPackageLocalTarball'. We scan through the @.tar.gz@ file to find --- the @.cabal@ file and read that. --- -readSourcePackageLocalTarball - :: Verbosity - -> FilePath - -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) -readSourcePackageLocalTarball verbosity tarballFile = do - monitorFiles [monitorFile tarballFile] - root <- askRoot - let location = LocalTarballPackage (root tarballFile) - liftIO $ fmap (mkSpecificSourcePackage location) - . uncurry (readSourcePackageCabalFile verbosity) - =<< extractTarballPackageCabalFile (root tarballFile) - - --- | A helper for 'fetchAndReadSourcePackages' to handle the case of --- 'ProjectPackageRemoteTarball'. We download the tarball to the dist src dir --- and after that handle it like the local tarball case. --- -fetchAndReadSourcePackageRemoteTarball - :: Verbosity - -> DistDirLayout - -> Rebuild HttpTransport - -> URI - -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) -fetchAndReadSourcePackageRemoteTarball verbosity - DistDirLayout { - distDownloadSrcDirectory - } - getTransport - tarballUri = - -- The tarball download is expensive so we use another layer of file - -- monitor to avoid it whenever possible. - rerunIfChanged verbosity monitor tarballUri $ do - - -- Download - transport <- getTransport - liftIO $ do - transportCheckHttps verbosity transport tarballUri - notice verbosity ("Downloading " ++ show tarballUri) - createDirectoryIfMissingVerbose verbosity True - distDownloadSrcDirectory - _ <- downloadURI transport verbosity tarballUri tarballFile - return () - - -- Read - monitorFiles [monitorFile tarballFile] - let location = RemoteTarballPackage tarballUri tarballFile - liftIO $ fmap (mkSpecificSourcePackage location) - . uncurry (readSourcePackageCabalFile verbosity) - =<< extractTarballPackageCabalFile tarballFile - where - tarballStem = distDownloadSrcDirectory - localFileNameForRemoteTarball tarballUri - tarballFile = tarballStem <.> "tar.gz" - - monitor :: FileMonitor URI (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) - monitor = newFileMonitor (tarballStem <.> "cache") - - --- | A helper for 'fetchAndReadSourcePackages' to handle all the cases of --- 'ProjectPackageRemoteRepo'. --- -syncAndReadSourcePackagesRemoteRepos - :: Verbosity - -> DistDirLayout - -> ProjectConfigShared - -> [SourceRepo] - -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] -syncAndReadSourcePackagesRemoteRepos verbosity - DistDirLayout{distDownloadSrcDirectory} - ProjectConfigShared { - projectConfigProgPathExtra - } - repos = do - - repos' <- either reportSourceRepoProblems return $ - validateSourceRepos repos - - -- All 'SourceRepo's grouped by referring to the "same" remote repo - -- instance. So same location but can differ in commit/tag/branch/subdir. - let reposByLocation :: Map (RepoType, String) - [(SourceRepo, RepoType)] - reposByLocation = Map.fromListWith (++) - [ ((rtype, rloc), [(repo, vcsRepoType vcs)]) - | (repo, rloc, rtype, vcs) <- repos' ] - - --TODO: pass progPathExtra on to 'configureVCS' - let _progPathExtra = fromNubList projectConfigProgPathExtra - getConfiguredVCS <- delayInitSharedResources $ \repoType -> - let Just vcs = Map.lookup repoType knownVCSs in - configureVCS verbosity {-progPathExtra-} vcs - - concat <$> sequence - [ rerunIfChanged verbosity monitor repoGroup' $ do - vcs' <- getConfiguredVCS repoType - syncRepoGroupAndReadSourcePackages vcs' pathStem repoGroup' - | repoGroup@((primaryRepo, repoType):_) <- Map.elems reposByLocation - , let repoGroup' = map fst repoGroup - pathStem = distDownloadSrcDirectory - localFileNameForRemoteRepo primaryRepo - monitor :: FileMonitor - [SourceRepo] - [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] - monitor = newFileMonitor (pathStem <.> "cache") - ] - where - syncRepoGroupAndReadSourcePackages - :: VCS ConfiguredProgram - -> FilePath - -> [SourceRepo] - -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] - syncRepoGroupAndReadSourcePackages vcs pathStem repoGroup = do - liftIO $ createDirectoryIfMissingVerbose verbosity False - distDownloadSrcDirectory - - -- For syncing we don't care about different 'SourceRepo' values that - -- are just different subdirs in the same repo. - syncSourceRepos verbosity vcs - [ (repo, repoPath) - | (repo, _, repoPath) <- repoGroupWithPaths ] - - -- But for reading we go through each 'SourceRepo' including its subdir - -- value and have to know which path each one ended up in. - sequence - [ readPackageFromSourceRepo repoWithSubdir repoPath - | (_, reposWithSubdir, repoPath) <- repoGroupWithPaths - , repoWithSubdir <- reposWithSubdir ] - where - -- So to do both things above, we pair them up here. - repoGroupWithPaths = - zipWith (\(x, y) z -> (x,y,z)) - (Map.toList - (Map.fromListWith (++) - [ (repo { repoSubdir = Nothing }, [repo]) - | repo <- repoGroup ])) - repoPaths - - -- The repos in a group are given distinct names by simple enumeration - -- foo, foo-2, foo-3 etc - repoPaths = pathStem - : [ pathStem ++ "-" ++ show (i :: Int) | i <- [2..] ] - - readPackageFromSourceRepo repo repoPath = do - let packageDir = maybe repoPath (repoPath ) (repoSubdir repo) - entries <- liftIO $ getDirectoryContents packageDir - --TODO: wrap exceptions - case filter (\e -> takeExtension e == ".cabal") entries of - [] -> liftIO $ throwIO NoCabalFileFound - (_:_:_) -> liftIO $ throwIO MultipleCabalFilesFound - [cabalFileName] -> do - monitorFiles [monitorFileHashed cabalFilePath] - liftIO $ fmap (mkSpecificSourcePackage location) - . readSourcePackageCabalFile verbosity cabalFilePath - =<< BS.readFile cabalFilePath - where - cabalFilePath = packageDir cabalFileName - location = RemoteSourceRepoPackage repo packageDir - - - reportSourceRepoProblems :: [(SourceRepo, SourceRepoProblem)] -> Rebuild a - reportSourceRepoProblems = liftIO . die' verbosity . renderSourceRepoProblems - - renderSourceRepoProblems :: [(SourceRepo, SourceRepoProblem)] -> String - renderSourceRepoProblems = unlines . map show -- "TODO: the repo problems" - - --- | Utility used by all the helpers of 'fetchAndReadSourcePackages' to make an --- appropriate @'PackageSpecifier' ('SourcePackage' (..))@ for a given package --- from a given location. --- -mkSpecificSourcePackage :: PackageLocation FilePath - -> GenericPackageDescription - -> PackageSpecifier - (SourcePackage (PackageLocation (Maybe FilePath))) -mkSpecificSourcePackage location pkg = - SpecificSourcePackage SourcePackage { - packageInfoId = packageId pkg, - packageDescription = pkg, - --TODO: it is silly that we still have to use a Maybe FilePath here - packageSource = fmap Just location, - packageDescrOverride = Nothing - } - - --- | Errors reported upon failing to parse a @.cabal@ file. --- -data CabalFileParseError = - CabalFileParseError - FilePath - [PError] - (Maybe Version) -- We might discover the spec version the package needs - [PWarning] - deriving (Show, Typeable) - -instance Exception CabalFileParseError - - --- | Wrapper for the @.cabal@ file parser. It reports warnings on higher --- verbosity levels and throws 'CabalFileParseError' on failure. --- -readSourcePackageCabalFile :: Verbosity - -> FilePath - -> BS.ByteString - -> IO GenericPackageDescription -readSourcePackageCabalFile verbosity pkgfilename content = - case runParseResult (parseGenericPackageDescription content) of - (warnings, Right pkg) -> do - unless (null warnings) $ - info verbosity (formatWarnings warnings) - return pkg - - (warnings, Left (mspecVersion, errors)) -> - throwIO $ CabalFileParseError pkgfilename errors mspecVersion warnings - where - formatWarnings warnings = - "The package description file " ++ pkgfilename - ++ " has warnings: " - ++ unlines (map (NewParser.showPWarning pkgfilename) warnings) - - --- | When looking for a package's @.cabal@ file we can find none, or several, --- both of which are failures. --- -data CabalFileSearchFailure = - NoCabalFileFound - | MultipleCabalFilesFound - deriving (Show, Typeable) - -instance Exception CabalFileSearchFailure - - --- | Find the @.cabal@ file within a tarball file and return it by value. --- --- Can fail with a 'Tar.FormatError' or 'CabalFileSearchFailure' exception. --- -extractTarballPackageCabalFile :: FilePath -> IO (FilePath, BS.ByteString) -extractTarballPackageCabalFile tarballFile = - withBinaryFile tarballFile ReadMode $ \hnd -> do - content <- LBS.hGetContents hnd - case extractTarballPackageCabalFilePure content of - Left (Left e) -> throwIO e - Left (Right e) -> throwIO e - Right (fileName, fileContent) -> - (,) fileName <$> evaluate (LBS.toStrict fileContent) - - --- | Scan through a tar file stream and collect the @.cabal@ file, or fail. --- -extractTarballPackageCabalFilePure :: LBS.ByteString - -> Either (Either Tar.FormatError - CabalFileSearchFailure) - (FilePath, LBS.ByteString) -extractTarballPackageCabalFilePure = - check - . accumEntryMap - . Tar.filterEntries isCabalFile - . Tar.read - . GZipUtils.maybeDecompress - where - accumEntryMap = Tar.foldlEntries - (\m e -> Map.insert (Tar.entryTarPath e) e m) - Map.empty - - check (Left (e, _m)) = Left (Left e) - check (Right m) = case Map.elems m of - [] -> Left (Right NoCabalFileFound) - [file] -> case Tar.entryContent file of - Tar.NormalFile content _ -> Right (Tar.entryPath file, content) - _ -> Left (Right NoCabalFileFound) - _files -> Left (Right MultipleCabalFilesFound) - - isCabalFile e = case splitPath (Tar.entryPath e) of - [ _dir, file] -> takeExtension file == ".cabal" - [".", _dir, file] -> takeExtension file == ".cabal" - _ -> False - - --- | The name to use for a local file for a remote tarball 'SourceRepo'. --- This is deterministic based on the remote tarball URI, and is intended --- to produce non-clashing file names for different tarballs. --- -localFileNameForRemoteTarball :: URI -> FilePath -localFileNameForRemoteTarball uri = - mangleName uri - ++ "-" ++ showHex locationHash "" - where - mangleName = truncateString 10 . dropExtension . dropExtension - . takeFileName . dropTrailingPathSeparator . uriPath - - locationHash :: Word - locationHash = fromIntegral (Hashable.hash (uriToString id uri "")) - - --- | The name to use for a local file or dir for a remote 'SourceRepo'. --- This is deterministic based on the source repo identity details, and --- intended to produce non-clashing file names for different repos. --- -localFileNameForRemoteRepo :: SourceRepo -> FilePath -localFileNameForRemoteRepo SourceRepo{repoType, repoLocation, repoModule} = - maybe "" ((++ "-") . mangleName) repoLocation - ++ showHex locationHash "" - where - mangleName = truncateString 10 . dropExtension - . takeFileName . dropTrailingPathSeparator - - -- just the parts that make up the "identity" of the repo - locationHash :: Word - locationHash = - fromIntegral (Hashable.hash (show repoType, repoLocation, repoModule)) - - --- | Truncate a string, with a visual indication that it is truncated. -truncateString :: Int -> String -> String -truncateString n s | length s <= n = s - | otherwise = take (n-1) s ++ "_" - - --- TODO: add something like this, here or in the project planning --- Based on the package location, which packages will be built inplace in the --- build tree vs placed in the store. This has various implications on what we --- can do with the package, e.g. can we run tests, ghci etc. --- --- packageIsLocalToProject :: ProjectPackageLocation -> Bool - - ---------------------------------------------- --- Checking configuration sanity --- - -data BadPerPackageCompilerPaths - = BadPerPackageCompilerPaths [(PackageName, String)] -#if MIN_VERSION_base(4,8,0) - deriving (Show, Typeable) -#else - deriving (Typeable) - -instance Show BadPerPackageCompilerPaths where - show = renderBadPerPackageCompilerPaths -#endif - -instance Exception BadPerPackageCompilerPaths where -#if MIN_VERSION_base(4,8,0) - displayException = renderBadPerPackageCompilerPaths -#endif ---TODO: [nice to have] custom exception subclass for Doc rendering, colour etc - -renderBadPerPackageCompilerPaths :: BadPerPackageCompilerPaths -> String -renderBadPerPackageCompilerPaths - (BadPerPackageCompilerPaths ((pkgname, progname) : _)) = - "The path to the compiler program (or programs used by the compiler) " - ++ "cannot be specified on a per-package basis in the cabal.project file " - ++ "(i.e. setting the '" ++ progname ++ "-location' for package '" - ++ display pkgname ++ "'). All packages have to use the same compiler, so " - ++ "specify the path in a global 'program-locations' section." - --TODO: [nice to have] better format control so we can pretty-print the - -- offending part of the project file. Currently the line wrapping breaks any - -- formatting. -renderBadPerPackageCompilerPaths _ = error "renderBadPerPackageCompilerPaths" - --- | The project configuration is not allowed to specify program locations for --- programs used by the compiler as these have to be the same for each set of --- packages. --- --- We cannot check this until we know which programs the compiler uses, which --- in principle is not until we've configured the compiler. --- --- Throws 'BadPerPackageCompilerPaths' --- -checkBadPerPackageCompilerPaths :: [ConfiguredProgram] - -> Map PackageName PackageConfig - -> IO () -checkBadPerPackageCompilerPaths compilerPrograms packagesConfig = - case [ (pkgname, progname) - | let compProgNames = Set.fromList (map programId compilerPrograms) - , (pkgname, pkgconf) <- Map.toList packagesConfig - , progname <- Map.keys (getMapLast (packageConfigProgramPaths pkgconf)) - , progname `Set.member` compProgNames ] of - [] -> return () - ps -> throwIO (BadPerPackageCompilerPaths ps) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectOrchestration.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectOrchestration.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectOrchestration.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectOrchestration.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1156 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} -{-# LANGUAGE RankNTypes, ScopedTypeVariables #-} - --- | This module deals with building and incrementally rebuilding a collection --- of packages. It is what backs the @cabal build@ and @configure@ commands, --- as well as being a core part of @run@, @test@, @bench@ and others. --- --- The primary thing is in fact rebuilding (and trying to make that quick by --- not redoing unnecessary work), so building from scratch is just a special --- case. --- --- The build process and the code can be understood by breaking it down into --- three major parts: --- --- * The 'ElaboratedInstallPlan' type --- --- * The \"what to do\" phase, where we look at the all input configuration --- (project files, .cabal files, command line etc) and produce a detailed --- plan of what to do -- the 'ElaboratedInstallPlan'. --- --- * The \"do it\" phase, where we take the 'ElaboratedInstallPlan' and we --- re-execute it. --- --- As far as possible, the \"what to do\" phase embodies all the policy, leaving --- the \"do it\" phase policy free. The first phase contains more of the --- complicated logic, but it is contained in code that is either pure or just --- has read effects (except cache updates). Then the second phase does all the --- actions to build packages, but as far as possible it just follows the --- instructions and avoids any logic for deciding what to do (apart from --- recompilation avoidance in executing the plan). --- --- This division helps us keep the code under control, making it easier to --- understand, test and debug. So when you are extending these modules, please --- think about which parts of your change belong in which part. It is --- perfectly ok to extend the description of what to do (i.e. the --- 'ElaboratedInstallPlan') if that helps keep the policy decisions in the --- first phase. Also, the second phase does not have direct access to any of --- the input configuration anyway; all the information has to flow via the --- 'ElaboratedInstallPlan'. --- -module Distribution.Client.ProjectOrchestration ( - -- * Discovery phase: what is in the project? - establishProjectBaseContext, - ProjectBaseContext(..), - BuildTimeSettings(..), - commandLineFlagsToProjectConfig, - - -- * Pre-build phase: decide what to do. - withInstallPlan, - runProjectPreBuildPhase, - ProjectBuildContext(..), - - -- ** Selecting what targets we mean - readTargetSelectors, - reportTargetSelectorProblems, - resolveTargets, - TargetsMap, - TargetSelector(..), - TargetImplicitCwd(..), - PackageId, - AvailableTarget(..), - AvailableTargetStatus(..), - TargetRequested(..), - ComponentName(..), - ComponentKind(..), - ComponentTarget(..), - SubComponentTarget(..), - TargetProblemCommon(..), - selectComponentTargetBasic, - distinctTargetComponents, - -- ** Utils for selecting targets - filterTargetsKind, - filterTargetsKindWith, - selectBuildableTargets, - selectBuildableTargetsWith, - selectBuildableTargets', - selectBuildableTargetsWith', - forgetTargetsDetail, - - -- ** Adjusting the plan - pruneInstallPlanToTargets, - TargetAction(..), - pruneInstallPlanToDependencies, - CannotPruneDependencies(..), - printPlan, - - -- * Build phase: now do it. - runProjectBuildPhase, - - -- * Post build actions - runProjectPostBuildPhase, - dieOnBuildFailures, - - -- * Shared CLI utils - cmdCommonHelpTextNewBuildBeta, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.ProjectConfig -import Distribution.Client.ProjectPlanning - hiding ( pruneInstallPlanToTargets ) -import qualified Distribution.Client.ProjectPlanning as ProjectPlanning - ( pruneInstallPlanToTargets ) -import Distribution.Client.ProjectPlanning.Types -import Distribution.Client.ProjectBuilding -import Distribution.Client.ProjectPlanOutput - -import Distribution.Client.Types - ( GenericReadyPackage(..), UnresolvedSourcePackage - , PackageSpecifier(..) - , SourcePackageDb(..) ) -import Distribution.Solver.Types.PackageIndex - ( lookupPackageName ) -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.TargetSelector - ( TargetSelector(..), TargetImplicitCwd(..) - , ComponentKind(..), componentKind - , readTargetSelectors, reportTargetSelectorProblems ) -import Distribution.Client.DistDirLayout -import Distribution.Client.Config (getCabalDir) -import Distribution.Client.Setup hiding (packageName) -import Distribution.Types.ComponentName - ( componentNameString ) -import Distribution.Types.UnqualComponentName - ( UnqualComponentName, packageNameToUnqualComponentName ) - -import Distribution.Solver.Types.OptionalStanza - -import Distribution.Package - hiding (InstalledPackageId, installedPackageId) -import Distribution.PackageDescription - ( FlagAssignment, unFlagAssignment, showFlagValue - , diffFlagAssignment ) -import Distribution.Simple.LocalBuildInfo - ( ComponentName(..), pkgComponents ) -import qualified Distribution.Simple.Setup as Setup -import Distribution.Simple.Command (commandShowOptions) -import Distribution.Simple.Configure (computeEffectiveProfiling) - -import Distribution.Simple.Utils - ( die', warn, notice, noticeNoWrap, debugNoWrap ) -import Distribution.Verbosity -import Distribution.Text -import Distribution.Simple.Compiler - ( showCompilerId - , OptimisationLevel(..)) - -import qualified Data.Monoid as Mon -import qualified Data.Set as Set -import qualified Data.Map as Map -import Data.Either -import Control.Exception (Exception(..), throwIO, assert) -import System.Exit (ExitCode(..), exitFailure) -#ifdef MIN_VERSION_unix -import System.Posix.Signals (sigKILL, sigSEGV) -#endif - - --- | This holds the context of a project prior to solving: the content of the --- @cabal.project@ and all the local package @.cabal@ files. --- -data ProjectBaseContext = ProjectBaseContext { - distDirLayout :: DistDirLayout, - cabalDirLayout :: CabalDirLayout, - projectConfig :: ProjectConfig, - localPackages :: [PackageSpecifier UnresolvedSourcePackage], - buildSettings :: BuildTimeSettings - } - -establishProjectBaseContext :: Verbosity - -> ProjectConfig - -> IO ProjectBaseContext -establishProjectBaseContext verbosity cliConfig = do - - cabalDir <- getCabalDir - projectRoot <- either throwIO return =<< - findProjectRoot Nothing mprojectFile - - let distDirLayout = defaultDistDirLayout projectRoot - mdistDirectory - - (projectConfig, localPackages) <- - rebuildProjectConfig verbosity - distDirLayout - cliConfig - - let ProjectConfigBuildOnly { - projectConfigLogsDir - } = projectConfigBuildOnly projectConfig - - ProjectConfigShared { - projectConfigStoreDir - } = projectConfigShared projectConfig - - mlogsDir = Setup.flagToMaybe projectConfigLogsDir - mstoreDir = Setup.flagToMaybe projectConfigStoreDir - cabalDirLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir - - buildSettings = resolveBuildTimeSettings - verbosity cabalDirLayout - projectConfig - - return ProjectBaseContext { - distDirLayout, - cabalDirLayout, - projectConfig, - localPackages, - buildSettings - } - where - mdistDirectory = Setup.flagToMaybe projectConfigDistDir - mprojectFile = Setup.flagToMaybe projectConfigProjectFile - ProjectConfigShared { - projectConfigDistDir, - projectConfigProjectFile - } = projectConfigShared cliConfig - - --- | This holds the context between the pre-build, build and post-build phases. --- -data ProjectBuildContext = ProjectBuildContext { - -- | This is the improved plan, before we select a plan subset based on - -- the build targets, and before we do the dry-run. So this contains - -- all packages in the project. - elaboratedPlanOriginal :: ElaboratedInstallPlan, - - -- | This is the 'elaboratedPlanOriginal' after we select a plan subset - -- and do the dry-run phase to find out what is up-to or out-of date. - -- This is the plan that will be executed during the build phase. So - -- this contains only a subset of packages in the project. - elaboratedPlanToExecute:: ElaboratedInstallPlan, - - -- | The part of the install plan that's shared between all packages in - -- the plan. This does not change between the two plan variants above, - -- so there is just the one copy. - elaboratedShared :: ElaboratedSharedConfig, - - -- | The result of the dry-run phase. This tells us about each member of - -- the 'elaboratedPlanToExecute'. - pkgsBuildStatus :: BuildStatusMap, - - -- | The targets selected by @selectPlanSubset@. This is useful eg. in - -- CmdRun, where we need a valid target to execute. - targetsMap :: TargetsMap - } - - --- | Pre-build phase: decide what to do. --- -withInstallPlan - :: Verbosity - -> ProjectBaseContext - -> (ElaboratedInstallPlan -> ElaboratedSharedConfig -> IO a) - -> IO a -withInstallPlan - verbosity - ProjectBaseContext { - distDirLayout, - cabalDirLayout, - projectConfig, - localPackages - } - action = do - -- Take the project configuration and make a plan for how to build - -- everything in the project. This is independent of any specific targets - -- the user has asked for. - -- - (elaboratedPlan, _, elaboratedShared) <- - rebuildInstallPlan verbosity - distDirLayout cabalDirLayout - projectConfig - localPackages - action elaboratedPlan elaboratedShared - -runProjectPreBuildPhase - :: Verbosity - -> ProjectBaseContext - -> (ElaboratedInstallPlan -> IO (ElaboratedInstallPlan, TargetsMap)) - -> IO ProjectBuildContext -runProjectPreBuildPhase - verbosity - ProjectBaseContext { - distDirLayout, - cabalDirLayout, - projectConfig, - localPackages - } - selectPlanSubset = do - -- Take the project configuration and make a plan for how to build - -- everything in the project. This is independent of any specific targets - -- the user has asked for. - -- - (elaboratedPlan, _, elaboratedShared) <- - rebuildInstallPlan verbosity - distDirLayout cabalDirLayout - projectConfig - localPackages - - -- The plan for what to do is represented by an 'ElaboratedInstallPlan' - - -- Now given the specific targets the user has asked for, decide - -- which bits of the plan we will want to execute. - -- - (elaboratedPlan', targets) <- selectPlanSubset elaboratedPlan - - -- Check which packages need rebuilding. - -- This also gives us more accurate reasons for the --dry-run output. - -- - pkgsBuildStatus <- rebuildTargetsDryRun distDirLayout elaboratedShared - elaboratedPlan' - - -- Improve the plan by marking up-to-date packages as installed. - -- - let elaboratedPlan'' = improveInstallPlanWithUpToDatePackages - pkgsBuildStatus elaboratedPlan' - debugNoWrap verbosity (InstallPlan.showInstallPlan elaboratedPlan'') - - return ProjectBuildContext { - elaboratedPlanOriginal = elaboratedPlan, - elaboratedPlanToExecute = elaboratedPlan'', - elaboratedShared, - pkgsBuildStatus, - targetsMap = targets - } - - --- | Build phase: now do it. --- --- Execute all or parts of the description of what to do to build or --- rebuild the various packages needed. --- -runProjectBuildPhase :: Verbosity - -> ProjectBaseContext - -> ProjectBuildContext - -> IO BuildOutcomes -runProjectBuildPhase _ ProjectBaseContext{buildSettings} _ - | buildSettingDryRun buildSettings - = return Map.empty - -runProjectBuildPhase verbosity - ProjectBaseContext{..} ProjectBuildContext {..} = - fmap (Map.union (previousBuildOutcomes pkgsBuildStatus)) $ - rebuildTargets verbosity - distDirLayout - (cabalStoreDirLayout cabalDirLayout) - elaboratedPlanToExecute - elaboratedShared - pkgsBuildStatus - buildSettings - where - previousBuildOutcomes :: BuildStatusMap -> BuildOutcomes - previousBuildOutcomes = - Map.mapMaybe $ \status -> case status of - BuildStatusUpToDate buildSuccess -> Just (Right buildSuccess) - --TODO: [nice to have] record build failures persistently - _ -> Nothing - --- | Post-build phase: various administrative tasks --- --- Update bits of state based on the build outcomes and report any failures. --- -runProjectPostBuildPhase :: Verbosity - -> ProjectBaseContext - -> ProjectBuildContext - -> BuildOutcomes - -> IO () -runProjectPostBuildPhase _ ProjectBaseContext{buildSettings} _ _ - | buildSettingDryRun buildSettings - = return () - -runProjectPostBuildPhase verbosity - ProjectBaseContext {..} ProjectBuildContext {..} - buildOutcomes = do - -- Update other build artefacts - -- TODO: currently none, but could include: - -- - bin symlinks/wrappers - -- - haddock/hoogle/ctags indexes - -- - delete stale lib registrations - -- - delete stale package dirs - - postBuildStatus <- updatePostBuildProjectStatus - verbosity - distDirLayout - elaboratedPlanOriginal - pkgsBuildStatus - buildOutcomes - - void $ writePlanGhcEnvironment (distProjectRootDirectory - distDirLayout) - elaboratedPlanOriginal - elaboratedShared - postBuildStatus - - -- Finally if there were any build failures then report them and throw - -- an exception to terminate the program - dieOnBuildFailures verbosity elaboratedPlanToExecute buildOutcomes - - -- Note that it is a deliberate design choice that the 'buildTargets' is - -- not passed to phase 1, and the various bits of input config is not - -- passed to phase 2. - -- - -- We make the install plan without looking at the particular targets the - -- user asks us to build. The set of available things we can build is - -- discovered from the env and config and is used to make the install plan. - -- The targets just tell us which parts of the install plan to execute. - -- - -- Conversely, executing the plan does not directly depend on any of the - -- input config. The bits that are needed (or better, the decisions based - -- on it) all go into the install plan. - - -- Notionally, the 'BuildFlags' should be things that do not affect what - -- we build, just how we do it. These ones of course do - - ------------------------------------------------------------------------------- --- Taking targets into account, selecting what to build --- - --- | The set of components to build, represented as a mapping from 'UnitId's --- to the 'ComponentTarget's within the unit that will be selected --- (e.g. selected to build, test or repl). --- --- Associated with each 'ComponentTarget' is the set of 'TargetSelector's that --- matched this target. Typically this is exactly one, but in general it is --- possible to for different selectors to match the same target. This extra --- information is primarily to help make helpful error messages. --- -type TargetsMap = Map UnitId [(ComponentTarget, [TargetSelector])] - --- | Given a set of 'TargetSelector's, resolve which 'UnitId's and --- 'ComponentTarget's they ought to refer to. --- --- The idea is that every user target identifies one or more roots in the --- 'ElaboratedInstallPlan', which we will use to determine the closure --- of what packages need to be built, dropping everything from the plan --- that is unnecessary. This closure and pruning is done by --- 'pruneInstallPlanToTargets' and this needs to be told the roots in terms --- of 'UnitId's and the 'ComponentTarget's within those. --- --- This means we first need to translate the 'TargetSelector's into the --- 'UnitId's and 'ComponentTarget's. This translation has to be different for --- the different command line commands, like @build@, @repl@ etc. For example --- the command @build pkgfoo@ could select a different set of components in --- pkgfoo than @repl pkgfoo@. The @build@ command would select any library and --- all executables, whereas @repl@ would select the library or a single --- executable. Furthermore, both of these examples could fail, and fail in --- different ways and each needs to be able to produce helpful error messages. --- --- So 'resolveTargets' takes two helpers: one to select the targets to be used --- by user targets that refer to a whole package ('TargetPackage'), and --- another to check user targets that refer to a component (or a module or --- file within a component). These helpers can fail, and use their own error --- type. Both helpers get given the 'AvailableTarget' info about the --- component(s). --- --- While commands vary quite a bit in their behaviour about which components to --- select for a whole-package target, most commands have the same behaviour for --- checking a user target that refers to a specific component. To help with --- this commands can use 'selectComponentTargetBasic', either directly or as --- a basis for their own @selectComponentTarget@ implementation. --- -resolveTargets :: forall err. - (forall k. TargetSelector - -> [AvailableTarget k] - -> Either err [k]) - -> (forall k. SubComponentTarget - -> AvailableTarget k - -> Either err k ) - -> (TargetProblemCommon -> err) - -> ElaboratedInstallPlan - -> Maybe (SourcePackageDb) - -> [TargetSelector] - -> Either [err] TargetsMap -resolveTargets selectPackageTargets selectComponentTarget liftProblem - installPlan mPkgDb = - fmap mkTargetsMap - . checkErrors - . map (\ts -> (,) ts <$> checkTarget ts) - where - mkTargetsMap :: [(TargetSelector, [(UnitId, ComponentTarget)])] - -> TargetsMap - mkTargetsMap targets = - Map.map nubComponentTargets - $ Map.fromListWith (++) - [ (uid, [(ct, ts)]) - | (ts, cts) <- targets - , (uid, ct) <- cts ] - - AvailableTargetIndexes{..} = availableTargetIndexes installPlan - - checkTarget :: TargetSelector -> Either err [(UnitId, ComponentTarget)] - - -- We can ask to build any whole package, project-local or a dependency - checkTarget bt@(TargetPackage _ [pkgid] mkfilter) - | Just ats <- fmap (maybe id filterTargetsKind mkfilter) - $ Map.lookup pkgid availableTargetsByPackageId - = fmap (componentTargets WholeComponent) - $ selectPackageTargets bt ats - - | otherwise - = Left (liftProblem (TargetProblemNoSuchPackage pkgid)) - - checkTarget (TargetPackage _ _ _) - = error "TODO: add support for multiple packages in a directory" - -- For the moment this error cannot happen here, because it gets - -- detected when the package config is being constructed. This case - -- will need handling properly when we do add support. - -- - -- TODO: how should this use case play together with the - -- '--cabal-file' option of 'configure' which allows using multiple - -- .cabal files for a single package? - - checkTarget bt@(TargetAllPackages mkfilter) = - fmap (componentTargets WholeComponent) - . selectPackageTargets bt - . maybe id filterTargetsKind mkfilter - . filter availableTargetLocalToProject - $ concat (Map.elems availableTargetsByPackageId) - - checkTarget (TargetComponent pkgid cname subtarget) - | Just ats <- Map.lookup (pkgid, cname) - availableTargetsByPackageIdAndComponentName - = fmap (componentTargets subtarget) - $ selectComponentTargets subtarget ats - - | Map.member pkgid availableTargetsByPackageId - = Left (liftProblem (TargetProblemNoSuchComponent pkgid cname)) - - | otherwise - = Left (liftProblem (TargetProblemNoSuchPackage pkgid)) - - checkTarget (TargetComponentUnknown pkgname ecname subtarget) - | Just ats <- case ecname of - Left ucname -> - Map.lookup (pkgname, ucname) - availableTargetsByPackageNameAndUnqualComponentName - Right cname -> - Map.lookup (pkgname, cname) - availableTargetsByPackageNameAndComponentName - = fmap (componentTargets subtarget) - $ selectComponentTargets subtarget ats - - | Map.member pkgname availableTargetsByPackageName - = Left (liftProblem (TargetProblemUnknownComponent pkgname ecname)) - - | otherwise - = Left (liftProblem (TargetNotInProject pkgname)) - - checkTarget bt@(TargetPackageNamed pkgname mkfilter) - | Just ats <- fmap (maybe id filterTargetsKind mkfilter) - $ Map.lookup pkgname availableTargetsByPackageName - = fmap (componentTargets WholeComponent) - . selectPackageTargets bt - $ ats - - | Just SourcePackageDb{ packageIndex } <- mPkgDb - , let pkg = lookupPackageName packageIndex pkgname - , not (null pkg) - = Left (liftProblem (TargetAvailableInIndex pkgname)) - - | otherwise - = Left (liftProblem (TargetNotInProject pkgname)) - - componentTargets :: SubComponentTarget - -> [(b, ComponentName)] - -> [(b, ComponentTarget)] - componentTargets subtarget = - map (fmap (\cname -> ComponentTarget cname subtarget)) - - selectComponentTargets :: SubComponentTarget - -> [AvailableTarget k] - -> Either err [k] - selectComponentTargets subtarget = - either (Left . head) Right - . checkErrors - . map (selectComponentTarget subtarget) - - checkErrors :: [Either e a] -> Either [e] [a] - checkErrors = (\(es, xs) -> if null es then Right xs else Left es) - . partitionEithers - - -data AvailableTargetIndexes = AvailableTargetIndexes { - availableTargetsByPackageIdAndComponentName - :: AvailableTargetsMap (PackageId, ComponentName), - - availableTargetsByPackageId - :: AvailableTargetsMap PackageId, - - availableTargetsByPackageName - :: AvailableTargetsMap PackageName, - - availableTargetsByPackageNameAndComponentName - :: AvailableTargetsMap (PackageName, ComponentName), - - availableTargetsByPackageNameAndUnqualComponentName - :: AvailableTargetsMap (PackageName, UnqualComponentName) - } -type AvailableTargetsMap k = Map k [AvailableTarget (UnitId, ComponentName)] - --- We define a bunch of indexes to help 'resolveTargets' with resolving --- 'TargetSelector's to specific 'UnitId's. --- --- They are all derived from the 'availableTargets' index. --- The 'availableTargetsByPackageIdAndComponentName' is just that main index, --- while the others are derived by re-grouping on the index key. --- --- They are all constructed lazily because they are not necessarily all used. --- -availableTargetIndexes :: ElaboratedInstallPlan -> AvailableTargetIndexes -availableTargetIndexes installPlan = AvailableTargetIndexes{..} - where - availableTargetsByPackageIdAndComponentName = - availableTargets installPlan - - availableTargetsByPackageId = - Map.mapKeysWith - (++) (\(pkgid, _cname) -> pkgid) - availableTargetsByPackageIdAndComponentName - `Map.union` availableTargetsEmptyPackages - - availableTargetsByPackageName = - Map.mapKeysWith - (++) packageName - availableTargetsByPackageId - - availableTargetsByPackageNameAndComponentName = - Map.mapKeysWith - (++) (\(pkgid, cname) -> (packageName pkgid, cname)) - availableTargetsByPackageIdAndComponentName - - availableTargetsByPackageNameAndUnqualComponentName = - Map.mapKeysWith - (++) (\(pkgid, cname) -> let pname = packageName pkgid - cname' = unqualComponentName pname cname - in (pname, cname')) - availableTargetsByPackageIdAndComponentName - where - unqualComponentName :: PackageName -> ComponentName -> UnqualComponentName - unqualComponentName pkgname = - fromMaybe (packageNameToUnqualComponentName pkgname) - . componentNameString - - -- Add in all the empty packages. These do not appear in the - -- availableTargetsByComponent map, since that only contains components - -- so packages with no components are invisible from that perspective. - -- The empty packages need to be there for proper error reporting, so users - -- can select the empty package and then we can report that it is empty, - -- otherwise we falsely report there is no such package at all. - availableTargetsEmptyPackages = - Map.fromList - [ (packageId pkg, []) - | InstallPlan.Configured pkg <- InstallPlan.toList installPlan - , case elabPkgOrComp pkg of - ElabComponent _ -> False - ElabPackage _ -> null (pkgComponents (elabPkgDescription pkg)) - ] - - --TODO: [research required] what if the solution has multiple versions of this package? - -- e.g. due to setup deps or due to multiple independent sets of - -- packages being built (e.g. ghc + ghcjs in a project) - -filterTargetsKind :: ComponentKind -> [AvailableTarget k] -> [AvailableTarget k] -filterTargetsKind ckind = filterTargetsKindWith (== ckind) - -filterTargetsKindWith :: (ComponentKind -> Bool) - -> [AvailableTarget k] -> [AvailableTarget k] -filterTargetsKindWith p ts = - [ t | t@(AvailableTarget _ cname _ _) <- ts - , p (componentKind cname) ] - -selectBuildableTargets :: [AvailableTarget k] -> [k] -selectBuildableTargets ts = - [ k | AvailableTarget _ _ (TargetBuildable k _) _ <- ts ] - -selectBuildableTargetsWith :: (TargetRequested -> Bool) - -> [AvailableTarget k] -> [k] -selectBuildableTargetsWith p ts = - [ k | AvailableTarget _ _ (TargetBuildable k req) _ <- ts, p req ] - -selectBuildableTargets' :: [AvailableTarget k] -> ([k], [AvailableTarget ()]) -selectBuildableTargets' ts = - (,) [ k | AvailableTarget _ _ (TargetBuildable k _) _ <- ts ] - [ forgetTargetDetail t - | t@(AvailableTarget _ _ (TargetBuildable _ _) _) <- ts ] - -selectBuildableTargetsWith' :: (TargetRequested -> Bool) - -> [AvailableTarget k] -> ([k], [AvailableTarget ()]) -selectBuildableTargetsWith' p ts = - (,) [ k | AvailableTarget _ _ (TargetBuildable k req) _ <- ts, p req ] - [ forgetTargetDetail t - | t@(AvailableTarget _ _ (TargetBuildable _ req) _) <- ts, p req ] - - -forgetTargetDetail :: AvailableTarget k -> AvailableTarget () -forgetTargetDetail = fmap (const ()) - -forgetTargetsDetail :: [AvailableTarget k] -> [AvailableTarget ()] -forgetTargetsDetail = map forgetTargetDetail - --- | A basic @selectComponentTarget@ implementation to use or pass to --- 'resolveTargets', that does the basic checks that the component is --- buildable and isn't a test suite or benchmark that is disabled. This --- can also be used to do these basic checks as part of a custom impl that --- -selectComponentTargetBasic :: SubComponentTarget - -> AvailableTarget k - -> Either TargetProblemCommon k -selectComponentTargetBasic subtarget - AvailableTarget { - availableTargetPackageId = pkgid, - availableTargetComponentName = cname, - availableTargetStatus - } = - case availableTargetStatus of - TargetDisabledByUser -> - Left (TargetOptionalStanzaDisabledByUser pkgid cname subtarget) - - TargetDisabledBySolver -> - Left (TargetOptionalStanzaDisabledBySolver pkgid cname subtarget) - - TargetNotLocal -> - Left (TargetComponentNotProjectLocal pkgid cname subtarget) - - TargetNotBuildable -> - Left (TargetComponentNotBuildable pkgid cname subtarget) - - TargetBuildable targetKey _ -> - Right targetKey - -data TargetProblemCommon - = TargetNotInProject PackageName - | TargetAvailableInIndex PackageName - | TargetComponentNotProjectLocal PackageId ComponentName SubComponentTarget - | TargetComponentNotBuildable PackageId ComponentName SubComponentTarget - | TargetOptionalStanzaDisabledByUser PackageId ComponentName SubComponentTarget - | TargetOptionalStanzaDisabledBySolver PackageId ComponentName SubComponentTarget - | TargetProblemUnknownComponent PackageName - (Either UnqualComponentName ComponentName) - - -- The target matching stuff only returns packages local to the project, - -- so these lookups should never fail, but if 'resolveTargets' is called - -- directly then of course it can. - | TargetProblemNoSuchPackage PackageId - | TargetProblemNoSuchComponent PackageId ComponentName - deriving (Eq, Show) - --- | Wrapper around 'ProjectPlanning.pruneInstallPlanToTargets' that adjusts --- for the extra unneeded info in the 'TargetsMap'. --- -pruneInstallPlanToTargets :: TargetAction -> TargetsMap - -> ElaboratedInstallPlan -> ElaboratedInstallPlan -pruneInstallPlanToTargets targetActionType targetsMap elaboratedPlan = - assert (Map.size targetsMap > 0) $ - ProjectPlanning.pruneInstallPlanToTargets - targetActionType - (Map.map (map fst) targetsMap) - elaboratedPlan - --- | Utility used by repl and run to check if the targets spans multiple --- components, since those commands do not support multiple components. --- -distinctTargetComponents :: TargetsMap -> Set.Set (UnitId, ComponentName) -distinctTargetComponents targetsMap = - Set.fromList [ (uid, cname) - | (uid, cts) <- Map.toList targetsMap - , (ComponentTarget cname _, _) <- cts ] - - ------------------------------------------------------------------------------- --- Displaying what we plan to do --- - --- | Print a user-oriented presentation of the install plan, indicating what --- will be built. --- -printPlan :: Verbosity - -> ProjectBaseContext - -> ProjectBuildContext - -> IO () -printPlan verbosity - ProjectBaseContext { - buildSettings = BuildTimeSettings{buildSettingDryRun}, - projectConfig = ProjectConfig { - projectConfigLocalPackages = PackageConfig {packageConfigOptimization} - } - } - ProjectBuildContext { - elaboratedPlanToExecute = elaboratedPlan, - elaboratedShared, - pkgsBuildStatus - } - - | null pkgs - = notice verbosity "Up to date" - - | otherwise - = noticeNoWrap verbosity $ unlines $ - (showBuildProfile ++ "In order, the following " ++ wouldWill ++ " be built" ++ - ifNormal " (use -v for more details)" ++ ":") - : map showPkgAndReason pkgs - - where - pkgs = InstallPlan.executionOrder elaboratedPlan - - ifVerbose s | verbosity >= verbose = s - | otherwise = "" - - ifNormal s | verbosity >= verbose = "" - | otherwise = s - - wouldWill | buildSettingDryRun = "would" - | otherwise = "will" - - showPkgAndReason :: ElaboratedReadyPackage -> String - showPkgAndReason (ReadyPackage elab) = - " - " ++ - (if verbosity >= deafening - then display (installedUnitId elab) - else display (packageId elab) - ) ++ - (case elabPkgOrComp elab of - ElabPackage pkg -> showTargets elab ++ ifVerbose (showStanzas pkg) - ElabComponent comp -> - " (" ++ showComp elab comp ++ ")" - ) ++ - showFlagAssignment (nonDefaultFlags elab) ++ - showConfigureFlags elab ++ - let buildStatus = pkgsBuildStatus Map.! installedUnitId elab in - " (" ++ showBuildStatus buildStatus ++ ")" - - showComp elab comp = - maybe "custom" display (compComponentName comp) ++ - if Map.null (elabInstantiatedWith elab) - then "" - else " with " ++ - intercalate ", " - -- TODO: Abbreviate the UnitIds - [ display k ++ "=" ++ display v - | (k,v) <- Map.toList (elabInstantiatedWith elab) ] - - nonDefaultFlags :: ElaboratedConfiguredPackage -> FlagAssignment - nonDefaultFlags elab = elabFlagAssignment elab `diffFlagAssignment` elabFlagDefaults elab - - showStanzas pkg = concat - $ [ " *test" - | TestStanzas `Set.member` pkgStanzasEnabled pkg ] - ++ [ " *bench" - | BenchStanzas `Set.member` pkgStanzasEnabled pkg ] - - showTargets elab - | null (elabBuildTargets elab) = "" - | otherwise - = " (" ++ intercalate ", " [ showComponentTarget (packageId elab) t | t <- elabBuildTargets elab ] - ++ ")" - - showFlagAssignment :: FlagAssignment -> String - showFlagAssignment = concatMap ((' ' :) . showFlagValue) . unFlagAssignment - - showConfigureFlags elab = - let fullConfigureFlags - = setupHsConfigureFlags - (ReadyPackage elab) - elaboratedShared - verbosity - "$builddir" - -- | Given a default value @x@ for a flag, nub @Flag x@ - -- into @NoFlag@. This gives us a tidier command line - -- rendering. - nubFlag :: Eq a => a -> Setup.Flag a -> Setup.Flag a - nubFlag x (Setup.Flag x') | x == x' = Setup.NoFlag - nubFlag _ f = f - (tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling fullConfigureFlags - partialConfigureFlags - = Mon.mempty { - configProf = - nubFlag False (configProf fullConfigureFlags), - configProfExe = - nubFlag tryExeProfiling (configProfExe fullConfigureFlags), - configProfLib = - nubFlag tryLibProfiling (configProfLib fullConfigureFlags) - -- Maybe there are more we can add - } - -- Not necessary to "escape" it, it's just for user output - in unwords . ("":) $ - commandShowOptions - (Setup.configureCommand (pkgConfigCompilerProgs elaboratedShared)) - partialConfigureFlags - - showBuildStatus status = case status of - BuildStatusPreExisting -> "existing package" - BuildStatusInstalled -> "already installed" - BuildStatusDownload {} -> "requires download & build" - BuildStatusUnpack {} -> "requires build" - BuildStatusRebuild _ rebuild -> case rebuild of - BuildStatusConfigure - (MonitoredValueChanged _) -> "configuration changed" - BuildStatusConfigure mreason -> showMonitorChangedReason mreason - BuildStatusBuild _ buildreason -> case buildreason of - BuildReasonDepsRebuilt -> "dependency rebuilt" - BuildReasonFilesChanged - mreason -> showMonitorChangedReason mreason - BuildReasonExtraTargets _ -> "additional components to build" - BuildReasonEphemeralTargets -> "ephemeral targets" - BuildStatusUpToDate {} -> "up to date" -- doesn't happen - - showMonitorChangedReason (MonitoredFileChanged file) = "file " ++ file ++ " changed" - showMonitorChangedReason (MonitoredValueChanged _) = "value changed" - showMonitorChangedReason MonitorFirstRun = "first run" - showMonitorChangedReason MonitorCorruptCache = "cannot read state cache" - - showBuildProfile = "Build profile: " ++ unwords [ - "-w " ++ (showCompilerId . pkgConfigCompiler) elaboratedShared, - "-O" ++ (case packageConfigOptimization of - Setup.Flag NoOptimisation -> "0" - Setup.Flag NormalOptimisation -> "1" - Setup.Flag MaximumOptimisation -> "2" - Setup.NoFlag -> "1")] - ++ "\n" - --- | If there are build failures then report them and throw an exception. --- -dieOnBuildFailures :: Verbosity - -> ElaboratedInstallPlan -> BuildOutcomes -> IO () -dieOnBuildFailures verbosity plan buildOutcomes - | null failures = return () - - | isSimpleCase = exitFailure - - | otherwise = do - -- For failures where we have a build log, print the log plus a header - sequence_ - [ do notice verbosity $ - '\n' : renderFailureDetail False pkg reason - ++ "\nBuild log ( " ++ logfile ++ " ):" - readFile logfile >>= noticeNoWrap verbosity - | (pkg, ShowBuildSummaryAndLog reason logfile) - <- failuresClassification - ] - - -- For all failures, print either a short summary (if we showed the - -- build log) or all details - dieIfNotHaddockFailure verbosity $ unlines - [ case failureClassification of - ShowBuildSummaryAndLog reason _ - | verbosity > normal - -> renderFailureDetail mentionDepOf pkg reason - - | otherwise - -> renderFailureSummary mentionDepOf pkg reason - ++ ". See the build log above for details." - - ShowBuildSummaryOnly reason -> - renderFailureDetail mentionDepOf pkg reason - - | let mentionDepOf = verbosity <= normal - , (pkg, failureClassification) <- failuresClassification ] - where - failures = [ (pkgid, failure) - | (pkgid, Left failure) <- Map.toList buildOutcomes ] - - failuresClassification = - [ (pkg, classifyBuildFailure failure) - | (pkgid, failure) <- failures - , case buildFailureReason failure of - DependentFailed {} -> verbosity > normal - _ -> True - , InstallPlan.Configured pkg <- - maybeToList (InstallPlan.lookup plan pkgid) - ] - - dieIfNotHaddockFailure - | all isHaddockFailure failuresClassification = warn - | otherwise = die' - where - isHaddockFailure (_, ShowBuildSummaryOnly (HaddocksFailed _) ) = True - isHaddockFailure (_, ShowBuildSummaryAndLog (HaddocksFailed _) _) = True - isHaddockFailure _ = False - - - classifyBuildFailure :: BuildFailure -> BuildFailurePresentation - classifyBuildFailure BuildFailure { - buildFailureReason = reason, - buildFailureLogFile = mlogfile - } = - maybe (ShowBuildSummaryOnly reason) - (ShowBuildSummaryAndLog reason) $ do - logfile <- mlogfile - e <- buildFailureException reason - ExitFailure 1 <- fromException e - return logfile - - -- Special case: we don't want to report anything complicated in the case - -- of just doing build on the current package, since it's clear from - -- context which package failed. - -- - -- We generalise this rule as follows: - -- - if only one failure occurs, and it is in a single root package (ie a - -- package with nothing else depending on it) - -- - and that failure is of a kind that always reports enough detail - -- itself (e.g. ghc reporting errors on stdout) - -- - then we do not report additional error detail or context. - -- - isSimpleCase - | [(pkgid, failure)] <- failures - , [pkg] <- rootpkgs - , installedUnitId pkg == pkgid - , isFailureSelfExplanatory (buildFailureReason failure) - = True - | otherwise - = False - - -- NB: if the Setup script segfaulted or was interrupted, - -- we should give more detailed information. So only - -- assume that exit code 1 is "pedestrian failure." - isFailureSelfExplanatory (BuildFailed e) - | Just (ExitFailure 1) <- fromException e = True - - isFailureSelfExplanatory (ConfigureFailed e) - | Just (ExitFailure 1) <- fromException e = True - - isFailureSelfExplanatory _ = False - - rootpkgs = - [ pkg - | InstallPlan.Configured pkg <- InstallPlan.toList plan - , hasNoDependents pkg ] - - ultimateDeps pkgid = - filter (\pkg -> hasNoDependents pkg && installedUnitId pkg /= pkgid) - (InstallPlan.reverseDependencyClosure plan [pkgid]) - - hasNoDependents :: HasUnitId pkg => pkg -> Bool - hasNoDependents = null . InstallPlan.revDirectDeps plan . installedUnitId - - renderFailureDetail mentionDepOf pkg reason = - renderFailureSummary mentionDepOf pkg reason ++ "." - ++ renderFailureExtraDetail reason - ++ maybe "" showException (buildFailureException reason) - - renderFailureSummary mentionDepOf pkg reason = - case reason of - DownloadFailed _ -> "Failed to download " ++ pkgstr - UnpackFailed _ -> "Failed to unpack " ++ pkgstr - ConfigureFailed _ -> "Failed to build " ++ pkgstr - BuildFailed _ -> "Failed to build " ++ pkgstr - ReplFailed _ -> "repl failed for " ++ pkgstr - HaddocksFailed _ -> "Failed to build documentation for " ++ pkgstr - TestsFailed _ -> "Tests failed for " ++ pkgstr - BenchFailed _ -> "Benchmarks failed for " ++ pkgstr - InstallFailed _ -> "Failed to build " ++ pkgstr - DependentFailed depid - -> "Failed to build " ++ display (packageId pkg) - ++ " because it depends on " ++ display depid - ++ " which itself failed to build" - where - pkgstr = elabConfiguredName verbosity pkg - ++ if mentionDepOf - then renderDependencyOf (installedUnitId pkg) - else "" - - renderFailureExtraDetail reason = - case reason of - ConfigureFailed _ -> " The failure occurred during the configure step." - InstallFailed _ -> " The failure occurred during the final install step." - _ -> "" - - renderDependencyOf pkgid = - case ultimateDeps pkgid of - [] -> "" - (p1:[]) -> " (which is required by " ++ elabPlanPackageName verbosity p1 ++ ")" - (p1:p2:[]) -> " (which is required by " ++ elabPlanPackageName verbosity p1 - ++ " and " ++ elabPlanPackageName verbosity p2 ++ ")" - (p1:p2:_) -> " (which is required by " ++ elabPlanPackageName verbosity p1 - ++ ", " ++ elabPlanPackageName verbosity p2 - ++ " and others)" - - showException e = case fromException e of - Just (ExitFailure 1) -> "" - -#ifdef MIN_VERSION_unix - -- Note [Positive "signal" exit code] - -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -- What's the business with the test for negative and positive - -- signal values? The API for process specifies that if the - -- process died due to a signal, it returns a *negative* exit - -- code. So that's the negative test. - -- - -- What about the positive test? Well, when we find out that - -- a process died due to a signal, we ourselves exit with that - -- exit code. However, we don't "kill ourselves" with the - -- signal; we just exit with the same code as the signal: thus - -- the caller sees a *positive* exit code. So that's what - -- happens when we get a positive exit code. - Just (ExitFailure n) - | -n == fromIntegral sigSEGV -> - " The build process segfaulted (i.e. SIGSEGV)." - - | n == fromIntegral sigSEGV -> - " The build process terminated with exit code " ++ show n - ++ " which may be because some part of it segfaulted. (i.e. SIGSEGV)." - - | -n == fromIntegral sigKILL -> - " The build process was killed (i.e. SIGKILL). " ++ explanation - - | n == fromIntegral sigKILL -> - " The build process terminated with exit code " ++ show n - ++ " which may be because some part of it was killed " - ++ "(i.e. SIGKILL). " ++ explanation - where - explanation = "The typical reason for this is that there is not " - ++ "enough memory available (e.g. the OS killed a process " - ++ "using lots of memory)." -#endif - Just (ExitFailure n) -> - " The build process terminated with exit code " ++ show n - - _ -> " The exception was:\n " -#if MIN_VERSION_base(4,8,0) - ++ displayException e -#else - ++ show e -#endif - - buildFailureException reason = - case reason of - DownloadFailed e -> Just e - UnpackFailed e -> Just e - ConfigureFailed e -> Just e - BuildFailed e -> Just e - ReplFailed e -> Just e - HaddocksFailed e -> Just e - TestsFailed e -> Just e - BenchFailed e -> Just e - InstallFailed e -> Just e - DependentFailed _ -> Nothing - -data BuildFailurePresentation = - ShowBuildSummaryOnly BuildFailureReason - | ShowBuildSummaryAndLog BuildFailureReason FilePath - - -cmdCommonHelpTextNewBuildBeta :: String -cmdCommonHelpTextNewBuildBeta = - "Note: this command is part of the new project-based system (aka " - ++ "nix-style\nlocal builds). These features are currently in beta. " - ++ "Please see\n" - ++ "http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html " - ++ "for\ndetails and advice on what you can expect to work. If you " - ++ "encounter problems\nplease file issues at " - ++ "https://github.com/haskell/cabal/issues and if you\nhave any time " - ++ "to get involved and help with testing, fixing bugs etc then\nthat " - ++ "is very much appreciated.\n" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanning/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanning/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanning/Types.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanning/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,810 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE TypeFamilies #-} - --- | Types used while planning how to build everything in a project. --- --- Primarily this is the 'ElaboratedInstallPlan'. --- -module Distribution.Client.ProjectPlanning.Types ( - SolverInstallPlan, - - -- * Elaborated install plan types - ElaboratedInstallPlan, - normaliseConfiguredPackage, - ElaboratedConfiguredPackage(..), - - elabDistDirParams, - elabExeDependencyPaths, - elabLibDependencies, - elabOrderLibDependencies, - elabExeDependencies, - elabOrderExeDependencies, - elabSetupDependencies, - elabPkgConfigDependencies, - elabInplaceDependencyBuildCacheFiles, - elabRequiresRegistration, - dataDirsEnvironmentForPlan, - - elabPlanPackageName, - elabConfiguredName, - elabComponentName, - - ElaboratedPackageOrComponent(..), - ElaboratedComponent(..), - ElaboratedPackage(..), - pkgOrderDependencies, - ElaboratedPlanPackage, - ElaboratedSharedConfig(..), - ElaboratedReadyPackage, - BuildStyle(..), - CabalFileText, - - -- * Build targets - ComponentTarget(..), - showComponentTarget, - showTestComponentTarget, - showBenchComponentTarget, - SubComponentTarget(..), - - isSubLibComponentTarget, - isForeignLibComponentTarget, - isExeComponentTarget, - isTestComponentTarget, - isBenchComponentTarget, - - -- * Setup script - SetupScriptStyle(..), - ) where - -import Distribution.Client.TargetSelector - ( SubComponentTarget(..) ) -import Distribution.Client.PackageHash - -import Distribution.Client.Types -import qualified Distribution.Client.InstallPlan as InstallPlan -import Distribution.Client.InstallPlan - ( GenericInstallPlan, GenericPlanPackage(..) ) -import Distribution.Client.SolverInstallPlan - ( SolverInstallPlan ) -import Distribution.Client.DistDirLayout - -import Distribution.Backpack -import Distribution.Backpack.ModuleShape - -import Distribution.Verbosity -import Distribution.Text -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.PackageDescription (PackageDescription(..)) -import Distribution.Package - hiding (InstalledPackageId, installedPackageId) -import Distribution.System -import qualified Distribution.PackageDescription as Cabal -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import Distribution.Simple.Compiler -import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) -import qualified Distribution.Simple.BuildTarget as Cabal -import Distribution.Simple.Program -import Distribution.ModuleName (ModuleName) -import Distribution.Simple.LocalBuildInfo (ComponentName(..)) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import Distribution.Simple.InstallDirs (PathTemplate) -import Distribution.Simple.Setup (HaddockTarget) -import Distribution.Version - -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.ComponentDeps (ComponentDeps) -import Distribution.Solver.Types.OptionalStanza -import Distribution.Compat.Graph (IsNode(..)) -import Distribution.Simple.Utils (ordNub) - -import Data.Map (Map) -import qualified Data.Map as Map -import Data.Maybe (catMaybes) -import Data.Set (Set) -import qualified Data.ByteString.Lazy as LBS -import Distribution.Compat.Binary -import GHC.Generics (Generic) -import qualified Data.Monoid as Mon -import Data.Typeable -import Control.Monad -import System.FilePath (()) - - --- | The combination of an elaborated install plan plus a --- 'ElaboratedSharedConfig' contains all the details necessary to be able --- to execute the plan without having to make further policy decisions. --- --- It does not include dynamic elements such as resources (such as http --- connections). --- -type ElaboratedInstallPlan - = GenericInstallPlan InstalledPackageInfo - ElaboratedConfiguredPackage - -type ElaboratedPlanPackage - = GenericPlanPackage InstalledPackageInfo - ElaboratedConfiguredPackage - --- | User-friendly display string for an 'ElaboratedPlanPackage'. -elabPlanPackageName :: Verbosity -> ElaboratedPlanPackage -> String -elabPlanPackageName verbosity (PreExisting ipkg) - | verbosity <= normal = display (packageName ipkg) - | otherwise = display (installedUnitId ipkg) -elabPlanPackageName verbosity (Configured elab) - = elabConfiguredName verbosity elab -elabPlanPackageName verbosity (Installed elab) - = elabConfiguredName verbosity elab - ---TODO: [code cleanup] decide if we really need this, there's not much in it, and in principle --- even platform and compiler could be different if we're building things --- like a server + client with ghc + ghcjs -data ElaboratedSharedConfig - = ElaboratedSharedConfig { - - pkgConfigPlatform :: Platform, - pkgConfigCompiler :: Compiler, --TODO: [code cleanup] replace with CompilerInfo - -- | The programs that the compiler configured (e.g. for GHC, the progs - -- ghc & ghc-pkg). Once constructed, only the 'configuredPrograms' are - -- used. - pkgConfigCompilerProgs :: ProgramDb, - pkgConfigReplOptions :: [String] - } - deriving (Show, Generic, Typeable) - --TODO: [code cleanup] no Eq instance - -instance Binary ElaboratedSharedConfig - -data ElaboratedConfiguredPackage - = ElaboratedConfiguredPackage { - -- | The 'UnitId' which uniquely identifies this item in a build plan - elabUnitId :: UnitId, - - elabComponentId :: ComponentId, - elabInstantiatedWith :: Map ModuleName Module, - elabLinkedInstantiatedWith :: Map ModuleName OpenModule, - - -- | This is true if this is an indefinite package, or this is a - -- package with no signatures. (Notably, it's not true for instantiated - -- packages.) The motivation for this is if you ask to build - -- @foo-indef@, this probably means that you want to typecheck - -- it, NOT that you want to rebuild all of the various - -- instantiations of it. - elabIsCanonical :: Bool, - - -- | The 'PackageId' of the originating package - elabPkgSourceId :: PackageId, - - -- | Shape of the package/component, for Backpack. - elabModuleShape :: ModuleShape, - - -- | A total flag assignment for the package. - -- TODO: Actually this can be per-component if we drop - -- all flags that don't affect a component. - elabFlagAssignment :: Cabal.FlagAssignment, - - -- | The original default flag assignment, used only for reporting. - elabFlagDefaults :: Cabal.FlagAssignment, - - elabPkgDescription :: Cabal.PackageDescription, - - -- | Where the package comes from, e.g. tarball, local dir etc. This - -- is not the same as where it may be unpacked to for the build. - elabPkgSourceLocation :: PackageLocation (Maybe FilePath), - - -- | The hash of the source, e.g. the tarball. We don't have this for - -- local source dir packages. - elabPkgSourceHash :: Maybe PackageSourceHash, - - -- | Is this package one of the ones specified by location in the - -- project file? (As opposed to a dependency, or a named package pulled - -- in) - elabLocalToProject :: Bool, - - -- | Are we going to build and install this package to the store, or are - -- we going to build it and register it locally. - elabBuildStyle :: BuildStyle, - - -- | Another way of phrasing 'pkgStanzasAvailable'. - elabEnabledSpec :: ComponentRequestedSpec, - - -- | Which optional stanzas (ie testsuites, benchmarks) can be built. - -- This means the solver produced a plan that has them available. - -- This doesn't necessary mean we build them by default. - elabStanzasAvailable :: Set OptionalStanza, - - -- | Which optional stanzas the user explicitly asked to enable or - -- to disable. This tells us which ones we build by default, and - -- helps with error messages when the user asks to build something - -- they explicitly disabled. - -- - -- TODO: The 'Bool' here should be refined into an ADT with three - -- cases: NotRequested, ExplicitlyRequested and - -- ImplicitlyRequested. A stanza is explicitly requested if - -- the user asked, for this *specific* package, that the stanza - -- be enabled; it's implicitly requested if the user asked for - -- all global packages to have this stanza enabled. The - -- difference between an explicit and implicit request is - -- error reporting behavior: if a user asks for tests to be - -- enabled for a specific package that doesn't have any tests, - -- we should warn them about it, but we shouldn't complain - -- that a user enabled tests globally, and some local packages - -- just happen not to have any tests. (But perhaps we should - -- warn if ALL local packages don't have any tests.) - elabStanzasRequested :: Map OptionalStanza Bool, - - elabSetupPackageDBStack :: PackageDBStack, - elabBuildPackageDBStack :: PackageDBStack, - elabRegisterPackageDBStack :: PackageDBStack, - - elabPkgDescriptionOverride :: Maybe CabalFileText, - - -- TODO: make per-component variants of these flags - elabVanillaLib :: Bool, - elabSharedLib :: Bool, - elabStaticLib :: Bool, - elabDynExe :: Bool, - elabGHCiLib :: Bool, - elabProfLib :: Bool, - elabProfExe :: Bool, - elabProfLibDetail :: ProfDetailLevel, - elabProfExeDetail :: ProfDetailLevel, - elabCoverage :: Bool, - elabOptimization :: OptimisationLevel, - elabSplitObjs :: Bool, - elabSplitSections :: Bool, - elabStripLibs :: Bool, - elabStripExes :: Bool, - elabDebugInfo :: DebugInfoLevel, - - elabProgramPaths :: Map String FilePath, - elabProgramArgs :: Map String [String], - elabProgramPathExtra :: [FilePath], - elabConfigureScriptArgs :: [String], - elabExtraLibDirs :: [FilePath], - elabExtraFrameworkDirs :: [FilePath], - elabExtraIncludeDirs :: [FilePath], - elabProgPrefix :: Maybe PathTemplate, - elabProgSuffix :: Maybe PathTemplate, - - elabInstallDirs :: InstallDirs.InstallDirs FilePath, - - elabHaddockHoogle :: Bool, - elabHaddockHtml :: Bool, - elabHaddockHtmlLocation :: Maybe String, - elabHaddockForeignLibs :: Bool, - elabHaddockForHackage :: HaddockTarget, - elabHaddockExecutables :: Bool, - elabHaddockTestSuites :: Bool, - elabHaddockBenchmarks :: Bool, - elabHaddockInternal :: Bool, - elabHaddockCss :: Maybe FilePath, - elabHaddockLinkedSource :: Bool, - elabHaddockQuickJump :: Bool, - elabHaddockHscolourCss :: Maybe FilePath, - elabHaddockContents :: Maybe PathTemplate, - - -- Setup.hs related things: - - -- | One of four modes for how we build and interact with the Setup.hs - -- script, based on whether it's a build-type Custom, with or without - -- explicit deps and the cabal spec version the .cabal file needs. - elabSetupScriptStyle :: SetupScriptStyle, - - -- | The version of the Cabal command line interface that we are using - -- for this package. This is typically the version of the Cabal lib - -- that the Setup.hs is built against. - elabSetupScriptCliVersion :: Version, - - -- Build time related: - elabBuildTargets :: [ComponentTarget], - elabTestTargets :: [ComponentTarget], - elabBenchTargets :: [ComponentTarget], - elabReplTarget :: Maybe ComponentTarget, - elabHaddockTargets :: [ComponentTarget], - - elabBuildHaddocks :: Bool, - - --pkgSourceDir ? -- currently passed in later because they can use temp locations - --pkgBuildDir ? -- but could in principle still have it here, with optional instr to use temp loc - - -- | Component/package specific information - elabPkgOrComp :: ElaboratedPackageOrComponent - } - deriving (Eq, Show, Generic, Typeable) - -normaliseConfiguredPackage :: ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> ElaboratedConfiguredPackage -normaliseConfiguredPackage ElaboratedSharedConfig{pkgConfigCompilerProgs} pkg = - pkg { elabProgramArgs = Map.mapMaybeWithKey lookupFilter (elabProgramArgs pkg) } - where - knownProgramDb = addKnownPrograms builtinPrograms pkgConfigCompilerProgs - - pkgDesc :: PackageDescription - pkgDesc = elabPkgDescription pkg - - removeEmpty :: [String] -> Maybe [String] - removeEmpty [] = Nothing - removeEmpty xs = Just xs - - lookupFilter :: String -> [String] -> Maybe [String] - lookupFilter n args = removeEmpty $ case lookupKnownProgram n knownProgramDb of - Just p -> programNormaliseArgs p (getVersion p) pkgDesc args - Nothing -> args - - getVersion :: Program -> Maybe Version - getVersion p = lookupProgram p knownProgramDb >>= programVersion - --- | The package/component contains/is a library and so must be registered -elabRequiresRegistration :: ElaboratedConfiguredPackage -> Bool -elabRequiresRegistration elab = - case elabPkgOrComp elab of - ElabComponent comp -> - case compComponentName comp of - Just cn -> is_lib cn && build_target - _ -> False - ElabPackage pkg -> - -- Tricky! Not only do we have to test if the user selected - -- a library as a build target, we also have to test if - -- the library was TRANSITIVELY depended upon, since we will - -- also require a register in this case. - -- - -- NB: It would have been far nicer to just unconditionally - -- register in all cases, but some Custom Setups will fall - -- over if you try to do that, ESPECIALLY if there actually is - -- a library but they hadn't built it. - -- - -- However, as the case of `cpphs-1.20.8` has shown in - -- #5379, in cases when a monolithic package gets - -- installed due to its executable components - -- (i.e. exe:cpphs) into the store we *have* to register - -- if there's a buildable public library (i.e. lib:cpphs) - -- that was built and installed into the same store folder - -- as otherwise this will cause build failures once a - -- target actually depends on lib:cpphs. - build_target || (elabBuildStyle elab == BuildAndInstall && - Cabal.hasPublicLib (elabPkgDescription elab)) - -- the next sub-condition below is currently redundant - -- (see discussion in #5604 for more details), but it's - -- being kept intentionally here as a safeguard because if - -- internal libraries ever start working with - -- non-per-component builds this condition won't be - -- redundant anymore. - || any (depends_on_lib pkg) (elabBuildTargets elab) - where - depends_on_lib pkg (ComponentTarget cn _) = - not (null (CD.select (== CD.componentNameToComponent cn) - (pkgDependsOnSelfLib pkg))) - build_target = - if not (null (elabBuildTargets elab)) - then any is_lib_target (elabBuildTargets elab) - -- Empty build targets mean we build /everything/; - -- that means we have to look more carefully to see - -- if there is anything to register - else Cabal.hasLibs (elabPkgDescription elab) - -- NB: this means we DO NOT reregister if you just built a - -- single file - is_lib_target (ComponentTarget cn WholeComponent) = is_lib cn - is_lib_target _ = False - is_lib CLibName = True - is_lib (CSubLibName _) = True - is_lib _ = False - --- | Construct the environment needed for the data files to work. --- This consists of a separate @*_datadir@ variable for each --- inplace package in the plan. -dataDirsEnvironmentForPlan :: DistDirLayout - -> ElaboratedInstallPlan - -> [(String, Maybe FilePath)] -dataDirsEnvironmentForPlan distDirLayout = catMaybes - . fmap (InstallPlan.foldPlanPackage - (const Nothing) - (dataDirEnvVarForPackage distDirLayout)) - . InstallPlan.toList - --- | Construct an environment variable that points --- the package's datadir to its correct location. --- This might be: --- * 'Just' the package's source directory plus the data subdirectory --- for inplace packages. --- * 'Nothing' for packages installed in the store (the path was --- already included in the package at install/build time). -dataDirEnvVarForPackage :: DistDirLayout - -> ElaboratedConfiguredPackage - -> Maybe (String, Maybe FilePath) -dataDirEnvVarForPackage distDirLayout pkg = - case elabBuildStyle pkg - of BuildAndInstall -> Nothing - BuildInplaceOnly -> Just - ( pkgPathEnvVar (elabPkgDescription pkg) "datadir" - , Just $ srcPath (elabPkgSourceLocation pkg) - dataDir (elabPkgDescription pkg)) - where - srcPath (LocalUnpackedPackage path) = path - srcPath (LocalTarballPackage _path) = unpackedPath - srcPath (RemoteTarballPackage _uri _localTar) = unpackedPath - srcPath (RepoTarballPackage _repo _packageId _localTar) = unpackedPath - srcPath (RemoteSourceRepoPackage _sourceRepo (Just localCheckout)) = localCheckout - -- TODO: see https://github.com/haskell/cabal/wiki/Potential-Refactors#unresolvedpkgloc - srcPath (RemoteSourceRepoPackage _sourceRepo Nothing) = error - "calling dataDirEnvVarForPackage on a not-downloaded repo is an error" - unpackedPath = - distUnpackedSrcDirectory distDirLayout $ elabPkgSourceId pkg - -instance Package ElaboratedConfiguredPackage where - packageId = elabPkgSourceId - -instance HasConfiguredId ElaboratedConfiguredPackage where - configuredId elab = - ConfiguredId (packageId elab) (elabComponentName elab) (elabComponentId elab) - -instance HasUnitId ElaboratedConfiguredPackage where - installedUnitId = elabUnitId - -instance IsNode ElaboratedConfiguredPackage where - type Key ElaboratedConfiguredPackage = UnitId - nodeKey = elabUnitId - nodeNeighbors = elabOrderDependencies - -instance Binary ElaboratedConfiguredPackage - -data ElaboratedPackageOrComponent - = ElabPackage ElaboratedPackage - | ElabComponent ElaboratedComponent - deriving (Eq, Show, Generic) - -instance Binary ElaboratedPackageOrComponent - -elabComponentName :: ElaboratedConfiguredPackage -> Maybe ComponentName -elabComponentName elab = - case elabPkgOrComp elab of - ElabPackage _ -> Just CLibName -- there could be more, but default this - ElabComponent comp -> compComponentName comp - --- | A user-friendly descriptor for an 'ElaboratedConfiguredPackage'. -elabConfiguredName :: Verbosity -> ElaboratedConfiguredPackage -> String -elabConfiguredName verbosity elab - | verbosity <= normal - = (case elabPkgOrComp elab of - ElabPackage _ -> "" - ElabComponent comp -> - case compComponentName comp of - Nothing -> "setup from " - Just CLibName -> "" - Just cname -> display cname ++ " from ") - ++ display (packageId elab) - | otherwise - = display (elabUnitId elab) - -elabDistDirParams :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> DistDirParams -elabDistDirParams shared elab = DistDirParams { - distParamUnitId = installedUnitId elab, - distParamComponentId = elabComponentId elab, - distParamPackageId = elabPkgSourceId elab, - distParamComponentName = case elabPkgOrComp elab of - ElabComponent comp -> compComponentName comp - ElabPackage _ -> Nothing, - distParamCompilerId = compilerId (pkgConfigCompiler shared), - distParamPlatform = pkgConfigPlatform shared, - distParamOptimization = elabOptimization elab - } - --- | The full set of dependencies which dictate what order we --- need to build things in the install plan: "order dependencies" --- balls everything together. This is mostly only useful for --- ordering; if you are, for example, trying to compute what --- @--dependency@ flags to pass to a Setup script, you need to --- use 'elabLibDependencies'. This method is the same as --- 'nodeNeighbors'. --- --- NB: this method DOES include setup deps. -elabOrderDependencies :: ElaboratedConfiguredPackage -> [UnitId] -elabOrderDependencies elab = - case elabPkgOrComp elab of - -- Important not to have duplicates: otherwise InstallPlan gets - -- confused. - ElabPackage pkg -> ordNub (CD.flatDeps (pkgOrderDependencies pkg)) - ElabComponent comp -> compOrderDependencies comp - --- | Like 'elabOrderDependencies', but only returns dependencies on --- libraries. -elabOrderLibDependencies :: ElaboratedConfiguredPackage -> [UnitId] -elabOrderLibDependencies elab = - case elabPkgOrComp elab of - ElabPackage pkg -> map (newSimpleUnitId . confInstId) $ - ordNub $ CD.flatDeps (pkgLibDependencies pkg) - ElabComponent comp -> compOrderLibDependencies comp - --- | The library dependencies (i.e., the libraries we depend on, NOT --- the dependencies of the library), NOT including setup dependencies. --- These are passed to the @Setup@ script via @--dependency@. -elabLibDependencies :: ElaboratedConfiguredPackage -> [ConfiguredId] -elabLibDependencies elab = - case elabPkgOrComp elab of - ElabPackage pkg -> ordNub (CD.nonSetupDeps (pkgLibDependencies pkg)) - ElabComponent comp -> compLibDependencies comp - --- | Like 'elabOrderDependencies', but only returns dependencies on --- executables. (This coincides with 'elabExeDependencies'.) -elabOrderExeDependencies :: ElaboratedConfiguredPackage -> [UnitId] -elabOrderExeDependencies = - map newSimpleUnitId . elabExeDependencies - --- | The executable dependencies (i.e., the executables we depend on); --- these are the executables we must add to the PATH before we invoke --- the setup script. -elabExeDependencies :: ElaboratedConfiguredPackage -> [ComponentId] -elabExeDependencies elab = map confInstId $ - case elabPkgOrComp elab of - ElabPackage pkg -> CD.nonSetupDeps (pkgExeDependencies pkg) - ElabComponent comp -> compExeDependencies comp - --- | This returns the paths of all the executables we depend on; we --- must add these paths to PATH before invoking the setup script. --- (This is usually what you want, not 'elabExeDependencies', if you --- actually want to build something.) -elabExeDependencyPaths :: ElaboratedConfiguredPackage -> [FilePath] -elabExeDependencyPaths elab = - case elabPkgOrComp elab of - ElabPackage pkg -> map snd $ CD.nonSetupDeps (pkgExeDependencyPaths pkg) - ElabComponent comp -> map snd (compExeDependencyPaths comp) - --- | The setup dependencies (the library dependencies of the setup executable; --- note that it is not legal for setup scripts to have executable --- dependencies at the moment.) -elabSetupDependencies :: ElaboratedConfiguredPackage -> [ConfiguredId] -elabSetupDependencies elab = - case elabPkgOrComp elab of - ElabPackage pkg -> CD.setupDeps (pkgLibDependencies pkg) - -- TODO: Custom setups not supported for components yet. When - -- they are, need to do this differently - ElabComponent _ -> [] - -elabPkgConfigDependencies :: ElaboratedConfiguredPackage -> [(PkgconfigName, Maybe Version)] -elabPkgConfigDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage pkg } - = pkgPkgConfigDependencies pkg -elabPkgConfigDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp } - = compPkgConfigDependencies comp - --- | The cache files of all our inplace dependencies which, --- when updated, require us to rebuild. See #4202 for --- more details. Essentially, this is a list of filepaths --- that, if our dependencies get rebuilt, will themselves --- get updated. --- --- Note: the hash of these cache files gets built into --- the build cache ourselves, which means that we end --- up tracking transitive dependencies! --- --- Note: This tracks the "build" cache file, but not --- "registration" or "config" cache files. Why not? --- Arguably we should... --- --- Note: This is a bit of a hack, because it is not really --- the hashes of the SOURCES of our (transitive) dependencies --- that we should use to decide whether or not to rebuild, --- but the output BUILD PRODUCTS. The strategy we use --- here will never work if we want to implement unchanging --- rebuilds. -elabInplaceDependencyBuildCacheFiles - :: DistDirLayout - -> ElaboratedSharedConfig - -> ElaboratedInstallPlan - -> ElaboratedConfiguredPackage - -> [FilePath] -elabInplaceDependencyBuildCacheFiles layout sconf plan root_elab = - go =<< InstallPlan.directDeps plan (nodeKey root_elab) - where - go = InstallPlan.foldPlanPackage (const []) $ \elab -> do - guard (elabBuildStyle elab == BuildInplaceOnly) - return $ distPackageCacheFile layout (elabDistDirParams sconf elab) "build" - --- | Some extra metadata associated with an --- 'ElaboratedConfiguredPackage' which indicates that the "package" --- in question is actually a single component to be built. Arguably --- it would be clearer if there were an ADT which branched into --- package work items and component work items, but I've structured --- it this way to minimize change to the existing code (which I --- don't feel qualified to rewrite.) -data ElaboratedComponent - = ElaboratedComponent { - -- | The name of the component to be built according to the solver - compSolverName :: CD.Component, - -- | The name of the component to be built. Nothing if - -- it's a setup dep. - compComponentName :: Maybe ComponentName, - -- | The *external* library dependencies of this component. We - -- pass this to the configure script. - compLibDependencies :: [ConfiguredId], - -- | In a component prior to instantiation, this list specifies - -- the 'OpenUnitId's which, after instantiation, are the - -- actual dependencies of this package. Note that this does - -- NOT include signature packages, which do not turn into real - -- ordering dependencies when we instantiate. This is intended to be - -- a purely temporary field, to carry some information to the - -- instantiation phase. It's more precise than - -- 'compLibDependencies', and also stores information about internal - -- dependencies. - compLinkedLibDependencies :: [OpenUnitId], - -- | The executable dependencies of this component (including - -- internal executables). - compExeDependencies :: [ConfiguredId], - -- | The @pkg-config@ dependencies of the component - compPkgConfigDependencies :: [(PkgconfigName, Maybe Version)], - -- | The paths all our executable dependencies will be installed - -- to once they are installed. - compExeDependencyPaths :: [(ConfiguredId, FilePath)], - compOrderLibDependencies :: [UnitId] - } - deriving (Eq, Show, Generic) - -instance Binary ElaboratedComponent - --- | See 'elabOrderDependencies'. -compOrderDependencies :: ElaboratedComponent -> [UnitId] -compOrderDependencies comp = - compOrderLibDependencies comp - ++ compOrderExeDependencies comp - --- | See 'elabOrderExeDependencies'. -compOrderExeDependencies :: ElaboratedComponent -> [UnitId] -compOrderExeDependencies = map (newSimpleUnitId . confInstId) . compExeDependencies - -data ElaboratedPackage - = ElaboratedPackage { - pkgInstalledId :: InstalledPackageId, - - -- | The exact dependencies (on other plan packages) - -- - pkgLibDependencies :: ComponentDeps [ConfiguredId], - - -- | Components which depend (transitively) on an internally - -- defined library. These are used by 'elabRequiresRegistration', - -- to determine if a user-requested build is going to need - -- a library registration - -- - pkgDependsOnSelfLib :: ComponentDeps [()], - - -- | Dependencies on executable packages. - -- - pkgExeDependencies :: ComponentDeps [ConfiguredId], - - -- | Paths where executable dependencies live. - -- - pkgExeDependencyPaths :: ComponentDeps [(ConfiguredId, FilePath)], - - -- | Dependencies on @pkg-config@ packages. - -- NB: this is NOT per-component (although it could be) - -- because Cabal library does not track per-component - -- pkg-config depends; it always does them all at once. - -- - pkgPkgConfigDependencies :: [(PkgconfigName, Maybe Version)], - - -- | Which optional stanzas (ie testsuites, benchmarks) will actually - -- be enabled during the package configure step. - pkgStanzasEnabled :: Set OptionalStanza - } - deriving (Eq, Show, Generic) - -instance Binary ElaboratedPackage - --- | See 'elabOrderDependencies'. This gives the unflattened version, --- which can be useful in some circumstances. -pkgOrderDependencies :: ElaboratedPackage -> ComponentDeps [UnitId] -pkgOrderDependencies pkg = - fmap (map (newSimpleUnitId . confInstId)) (pkgLibDependencies pkg) `Mon.mappend` - fmap (map (newSimpleUnitId . confInstId)) (pkgExeDependencies pkg) - --- | This is used in the install plan to indicate how the package will be --- built. --- -data BuildStyle = - -- | The classic approach where the package is built, then the files - -- installed into some location and the result registered in a package db. - -- - -- If the package came from a tarball then it's built in a temp dir and - -- the results discarded. - BuildAndInstall - - -- | The package is built, but the files are not installed anywhere, - -- rather the build dir is kept and the package is registered inplace. - -- - -- Such packages can still subsequently be installed. - -- - -- Typically 'BuildAndInstall' packages will only depend on other - -- 'BuildAndInstall' style packages and not on 'BuildInplaceOnly' ones. - -- - | BuildInplaceOnly - deriving (Eq, Show, Generic) - -instance Binary BuildStyle - -type CabalFileText = LBS.ByteString - -type ElaboratedReadyPackage = GenericReadyPackage ElaboratedConfiguredPackage - - ---------------------------- --- Build targets --- - --- | Specific targets within a package or component to act on e.g. to build, --- haddock or open a repl. --- -data ComponentTarget = ComponentTarget ComponentName SubComponentTarget - deriving (Eq, Ord, Show, Generic) - -instance Binary ComponentTarget - --- | Unambiguously render a 'ComponentTarget', e.g., to pass --- to a Cabal Setup script. -showComponentTarget :: PackageId -> ComponentTarget -> String -showComponentTarget pkgid = - Cabal.showBuildTarget pkgid . toBuildTarget - where - toBuildTarget :: ComponentTarget -> Cabal.BuildTarget - toBuildTarget (ComponentTarget cname subtarget) = - case subtarget of - WholeComponent -> Cabal.BuildTargetComponent cname - ModuleTarget mname -> Cabal.BuildTargetModule cname mname - FileTarget fname -> Cabal.BuildTargetFile cname fname - -showTestComponentTarget :: PackageId -> ComponentTarget -> Maybe String -showTestComponentTarget _ (ComponentTarget (CTestName n) _) = Just $ display n -showTestComponentTarget _ _ = Nothing - -isTestComponentTarget :: ComponentTarget -> Bool -isTestComponentTarget (ComponentTarget (CTestName _) _) = True -isTestComponentTarget _ = False - -showBenchComponentTarget :: PackageId -> ComponentTarget -> Maybe String -showBenchComponentTarget _ (ComponentTarget (CBenchName n) _) = Just $ display n -showBenchComponentTarget _ _ = Nothing - -isBenchComponentTarget :: ComponentTarget -> Bool -isBenchComponentTarget (ComponentTarget (CBenchName _) _) = True -isBenchComponentTarget _ = False - -isForeignLibComponentTarget :: ComponentTarget -> Bool -isForeignLibComponentTarget (ComponentTarget (CFLibName _) _) = True -isForeignLibComponentTarget _ = False - -isExeComponentTarget :: ComponentTarget -> Bool -isExeComponentTarget (ComponentTarget (CExeName _) _ ) = True -isExeComponentTarget _ = False - -isSubLibComponentTarget :: ComponentTarget -> Bool -isSubLibComponentTarget (ComponentTarget (CSubLibName _) _) = True -isSubLibComponentTarget _ = False - ---------------------------- --- Setup.hs script policy --- - --- | There are four major cases for Setup.hs handling: --- --- 1. @build-type@ Custom with a @custom-setup@ section --- 2. @build-type@ Custom without a @custom-setup@ section --- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ --- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ --- --- It's also worth noting that packages specifying @cabal-version: >= 1.23@ --- or later that have @build-type@ Custom will always have a @custom-setup@ --- section. Therefore in case 2, the specified @cabal-version@ will always be --- less than 1.23. --- --- In cases 1 and 2 we obviously have to build an external Setup.hs script, --- while in case 4 we can use the internal library API. In case 3 we also have --- to build an external Setup.hs script because the package needs a later --- Cabal lib version than we can support internally. --- -data SetupScriptStyle = SetupCustomExplicitDeps - | SetupCustomImplicitDeps - | SetupNonCustomExternalLib - | SetupNonCustomInternalLib - deriving (Eq, Show, Generic, Typeable) - -instance Binary SetupScriptStyle - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanning.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanning.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanning.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanning.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,3685 +0,0 @@ -{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-} -{-# LANGUAGE ViewPatterns #-} -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE NoMonoLocalBinds #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveFunctor #-} - --- | Planning how to build everything in a project. --- -module Distribution.Client.ProjectPlanning ( - -- * elaborated install plan types - ElaboratedInstallPlan, - ElaboratedConfiguredPackage(..), - ElaboratedPlanPackage, - ElaboratedSharedConfig(..), - ElaboratedReadyPackage, - BuildStyle(..), - CabalFileText, - - -- * Producing the elaborated install plan - rebuildProjectConfig, - rebuildInstallPlan, - - -- * Build targets - availableTargets, - AvailableTarget(..), - AvailableTargetStatus(..), - TargetRequested(..), - ComponentTarget(..), - SubComponentTarget(..), - showComponentTarget, - nubComponentTargets, - - -- * Selecting a plan subset - pruneInstallPlanToTargets, - TargetAction(..), - pruneInstallPlanToDependencies, - CannotPruneDependencies(..), - - -- * Utils required for building - pkgHasEphemeralBuildTargets, - elabBuildTargetWholeComponents, - - -- * Setup.hs CLI flags for building - setupHsScriptOptions, - setupHsConfigureFlags, - setupHsConfigureArgs, - setupHsBuildFlags, - setupHsBuildArgs, - setupHsReplFlags, - setupHsReplArgs, - setupHsTestFlags, - setupHsTestArgs, - setupHsBenchFlags, - setupHsBenchArgs, - setupHsCopyFlags, - setupHsRegisterFlags, - setupHsHaddockFlags, - setupHsHaddockArgs, - - packageHashInputs, - - -- * Path construction - binDirectoryFor, - binDirectories - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.ProjectPlanning.Types as Ty -import Distribution.Client.PackageHash -import Distribution.Client.RebuildMonad -import Distribution.Client.Store -import Distribution.Client.ProjectConfig -import Distribution.Client.ProjectPlanOutput - -import Distribution.Client.Types -import qualified Distribution.Client.InstallPlan as InstallPlan -import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan -import Distribution.Client.Dependency -import Distribution.Client.Dependency.Types -import qualified Distribution.Client.IndexUtils as IndexUtils -import Distribution.Client.Init (incVersion) -import Distribution.Client.Targets (userToPackageConstraint) -import Distribution.Client.DistDirLayout -import Distribution.Client.SetupWrapper -import Distribution.Client.JobControl -import Distribution.Client.FetchUtils -import Distribution.Client.Config -import qualified Hackage.Security.Client as Sec -import Distribution.Client.Setup hiding (packageName, cabalVersion) -import Distribution.Utils.NubList -import Distribution.Utils.LogProgress -import Distribution.Utils.MapAccum - -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.ComponentDeps (ComponentDeps) -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PkgConfigDb -import Distribution.Solver.Types.ResolverPackage -import Distribution.Solver.Types.SolverId -import Distribution.Solver.Types.SolverPackage -import Distribution.Solver.Types.InstSolverPackage -import Distribution.Solver.Types.SourcePackage -import Distribution.Solver.Types.Settings - -import Distribution.ModuleName -import Distribution.Package hiding - (InstalledPackageId, installedPackageId) -import Distribution.Types.AnnotatedId -import Distribution.Types.ComponentName -import Distribution.Types.PkgconfigDependency -import Distribution.Types.UnqualComponentName -import Distribution.System -import qualified Distribution.PackageDescription as Cabal -import qualified Distribution.PackageDescription as PD -import qualified Distribution.PackageDescription.Configuration as PD -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import Distribution.Simple.Compiler hiding (Flag) -import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate -import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate -import Distribution.Simple.Program -import Distribution.Simple.Program.Db -import Distribution.Simple.Program.Find -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Setup - (Flag, toFlag, flagToMaybe, flagToList, fromFlagOrDefault) -import qualified Distribution.Simple.Configure as Cabal -import qualified Distribution.Simple.LocalBuildInfo as Cabal -import Distribution.Simple.LocalBuildInfo - ( Component(..), pkgComponents, componentBuildInfo - , componentName ) -import qualified Distribution.Simple.InstallDirs as InstallDirs -import qualified Distribution.InstalledPackageInfo as IPI - -import Distribution.Backpack.ConfiguredComponent -import Distribution.Backpack.LinkedComponent -import Distribution.Backpack.ComponentsGraph -import Distribution.Backpack.ModuleShape -import Distribution.Backpack.FullUnitId -import Distribution.Backpack -import Distribution.Types.ComponentInclude - -import Distribution.Simple.Utils -import Distribution.Version -import Distribution.Verbosity -import Distribution.Text - -import qualified Distribution.Compat.Graph as Graph -import Distribution.Compat.Graph(IsNode(..)) - -import Text.PrettyPrint hiding ((<>)) -import qualified Text.PrettyPrint as Disp -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set -import Control.Monad -import qualified Data.Traversable as T -import Control.Monad.State as State -import Control.Exception -import Data.List (groupBy) -import Data.Either -import Data.Function -import System.FilePath - ------------------------------------------------------------------------------- --- * Elaborated install plan ------------------------------------------------------------------------------- - --- "Elaborated" -- worked out with great care and nicety of detail; --- executed with great minuteness: elaborate preparations; --- elaborate care. --- --- So here's the idea: --- --- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc --- all passed in as separate args and which are then further selected, --- transformed etc during the execution of the build. Instead we construct --- an elaborated install plan that includes everything we will need, and then --- during the execution of the plan we do as little transformation of this --- info as possible. --- --- So we're trying to split the work into two phases: construction of the --- elaborated install plan (which as far as possible should be pure) and --- then simple execution of that plan without any smarts, just doing what the --- plan says to do. --- --- So that means we need a representation of this fully elaborated install --- plan. The representation consists of two parts: --- --- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a --- representation of source packages that includes a lot more detail about --- that package's individual configuration --- --- * A 'ElaboratedSharedConfig'. Some package configuration is the same for --- every package in a plan. Rather than duplicate that info every entry in --- the 'GenericInstallPlan' we keep that separately. --- --- The division between the shared and per-package config is /not set in stone --- for all time/. For example if we wanted to generalise the install plan to --- describe a situation where we want to build some packages with GHC and some --- with GHCJS then the platform and compiler would no longer be shared between --- all packages but would have to be per-package (probably with some sanity --- condition on the graph structure). --- - --- Refer to ProjectPlanning.Types for details of these important types: - --- type ElaboratedInstallPlan = ... --- type ElaboratedPlanPackage = ... --- data ElaboratedSharedConfig = ... --- data ElaboratedConfiguredPackage = ... --- data BuildStyle = - - --- | Check that an 'ElaboratedConfiguredPackage' actually makes --- sense under some 'ElaboratedSharedConfig'. -sanityCheckElaboratedConfiguredPackage - :: ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> a - -> a -sanityCheckElaboratedConfiguredPackage sharedConfig - elab@ElaboratedConfiguredPackage{..} = - (case elabPkgOrComp of - ElabPackage pkg -> sanityCheckElaboratedPackage elab pkg - ElabComponent comp -> sanityCheckElaboratedComponent elab comp) - - -- either a package is being built inplace, or the - -- 'installedPackageId' we assigned is consistent with - -- the 'hashedInstalledPackageId' we would compute from - -- the elaborated configured package - . assert (elabBuildStyle == BuildInplaceOnly || - elabComponentId == hashedInstalledPackageId - (packageHashInputs sharedConfig elab)) - - -- the stanzas explicitly disabled should not be available - . assert (Set.null (Map.keysSet (Map.filter not elabStanzasRequested) - `Set.intersection` elabStanzasAvailable)) - - -- either a package is built inplace, or we are not attempting to - -- build any test suites or benchmarks (we never build these - -- for remote packages!) - . assert (elabBuildStyle == BuildInplaceOnly || - Set.null elabStanzasAvailable) - -sanityCheckElaboratedComponent - :: ElaboratedConfiguredPackage - -> ElaboratedComponent - -> a - -> a -sanityCheckElaboratedComponent ElaboratedConfiguredPackage{..} - ElaboratedComponent{..} = - - -- Should not be building bench or test if not inplace. - assert (elabBuildStyle == BuildInplaceOnly || - case compComponentName of - Nothing -> True - Just CLibName -> True - Just (CSubLibName _) -> True - Just (CExeName _) -> True - -- This is interesting: there's no way to declare a dependency - -- on a foreign library at the moment, but you may still want - -- to install these to the store - Just (CFLibName _) -> True - Just (CBenchName _) -> False - Just (CTestName _) -> False) - - -sanityCheckElaboratedPackage - :: ElaboratedConfiguredPackage - -> ElaboratedPackage - -> a - -> a -sanityCheckElaboratedPackage ElaboratedConfiguredPackage{..} - ElaboratedPackage{..} = - -- we should only have enabled stanzas that actually can be built - -- (according to the solver) - assert (pkgStanzasEnabled `Set.isSubsetOf` elabStanzasAvailable) - - -- the stanzas that the user explicitly requested should be - -- enabled (by the previous test, they are also available) - . assert (Map.keysSet (Map.filter id elabStanzasRequested) - `Set.isSubsetOf` pkgStanzasEnabled) - ------------------------------------------------------------------------------- --- * Deciding what to do: making an 'ElaboratedInstallPlan' ------------------------------------------------------------------------------- - --- | Return the up-to-date project config and information about the local --- packages within the project. --- -rebuildProjectConfig :: Verbosity - -> DistDirLayout - -> ProjectConfig - -> IO ( ProjectConfig - , [PackageSpecifier UnresolvedSourcePackage] ) -rebuildProjectConfig verbosity - distDirLayout@DistDirLayout { - distProjectRootDirectory, - distDirectory, - distProjectCacheFile, - distProjectCacheDirectory, - distProjectFile - } - cliConfig = do - - fileMonitorProjectConfigKey <- do - configPath <- getConfigFilePath projectConfigConfigFile - return (configPath, distProjectFile "") - - (projectConfig, localPackages) <- - runRebuild distProjectRootDirectory - $ rerunIfChanged verbosity - fileMonitorProjectConfig - fileMonitorProjectConfigKey - $ do - liftIO $ info verbosity "Project settings changed, reconfiguring..." - projectConfig <- phaseReadProjectConfig - localPackages <- phaseReadLocalPackages projectConfig - return (projectConfig, localPackages) - - info verbosity - $ unlines - $ ("this build was affected by the following (project) config files:" :) - $ [ "- " ++ path - | Explicit path <- Set.toList $ projectConfigProvenance projectConfig - ] - - return (projectConfig <> cliConfig, localPackages) - - where - - ProjectConfigShared { projectConfigConfigFile } = - projectConfigShared cliConfig - - fileMonitorProjectConfig = - newFileMonitor (distProjectCacheFile "config") :: FileMonitor - (FilePath, FilePath) - (ProjectConfig, [PackageSpecifier UnresolvedSourcePackage]) - - -- Read the cabal.project (or implicit config) and combine it with - -- arguments from the command line - -- - phaseReadProjectConfig :: Rebuild ProjectConfig - phaseReadProjectConfig = do - readProjectConfig verbosity projectConfigConfigFile distDirLayout - - -- Look for all the cabal packages in the project - -- some of which may be local src dirs, tarballs etc - -- - phaseReadLocalPackages :: ProjectConfig - -> Rebuild [PackageSpecifier UnresolvedSourcePackage] - phaseReadLocalPackages projectConfig@ProjectConfig { - projectConfigShared, - projectConfigBuildOnly - } = do - pkgLocations <- findProjectPackages distDirLayout projectConfig - - -- Create folder only if findProjectPackages did not throw a - -- BadPackageLocations exception. - liftIO $ do - createDirectoryIfMissingVerbose verbosity True distDirectory - createDirectoryIfMissingVerbose verbosity True distProjectCacheDirectory - - fetchAndReadSourcePackages verbosity distDirLayout - projectConfigShared - projectConfigBuildOnly - pkgLocations - - --- | Return an up-to-date elaborated install plan. --- --- Two variants of the install plan are returned: with and without packages --- from the store. That is, the \"improved\" plan where source packages are --- replaced by pre-existing installed packages from the store (when their ids --- match), and also the original elaborated plan which uses primarily source --- packages. - --- The improved plan is what we use for building, but the original elaborated --- plan is useful for reporting and configuration. For example the @freeze@ --- command needs the source package info to know about flag choices and --- dependencies of executables and setup scripts. --- -rebuildInstallPlan :: Verbosity - -> DistDirLayout -> CabalDirLayout - -> ProjectConfig - -> [PackageSpecifier UnresolvedSourcePackage] - -> IO ( ElaboratedInstallPlan -- with store packages - , ElaboratedInstallPlan -- with source packages - , ElaboratedSharedConfig ) - -- ^ @(improvedPlan, elaboratedPlan, _, _)@ -rebuildInstallPlan verbosity - distDirLayout@DistDirLayout { - distProjectRootDirectory, - distProjectCacheFile - } - CabalDirLayout { - cabalStoreDirLayout - } = \projectConfig localPackages -> - runRebuild distProjectRootDirectory $ do - progsearchpath <- liftIO $ getSystemSearchPath - let projectConfigMonitored = projectConfig { projectConfigBuildOnly = mempty } - - -- The overall improved plan is cached - rerunIfChanged verbosity fileMonitorImprovedPlan - -- react to changes in the project config, - -- the package .cabal files and the path - (projectConfigMonitored, localPackages, progsearchpath) $ do - - -- And so is the elaborated plan that the improved plan based on - (elaboratedPlan, elaboratedShared) <- - rerunIfChanged verbosity fileMonitorElaboratedPlan - (projectConfigMonitored, localPackages, - progsearchpath) $ do - - compilerEtc <- phaseConfigureCompiler projectConfig - _ <- phaseConfigurePrograms projectConfig compilerEtc - (solverPlan, pkgConfigDB) - <- phaseRunSolver projectConfig - compilerEtc - localPackages - (elaboratedPlan, - elaboratedShared) <- phaseElaboratePlan projectConfig - compilerEtc pkgConfigDB - solverPlan - localPackages - - phaseMaintainPlanOutputs elaboratedPlan elaboratedShared - return (elaboratedPlan, elaboratedShared) - - -- The improved plan changes each time we install something, whereas - -- the underlying elaborated plan only changes when input config - -- changes, so it's worth caching them separately. - improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared - - return (improvedPlan, elaboratedPlan, elaboratedShared) - - where - fileMonitorCompiler = newFileMonitorInCacheDir "compiler" - fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan" - fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes" - fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan" - fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan" - - newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b - newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile - - - -- Configure the compiler we're using. - -- - -- This is moderately expensive and doesn't change that often so we cache - -- it independently. - -- - phaseConfigureCompiler :: ProjectConfig - -> Rebuild (Compiler, Platform, ProgramDb) - phaseConfigureCompiler ProjectConfig { - projectConfigShared = ProjectConfigShared { - projectConfigHcFlavor, - projectConfigHcPath, - projectConfigHcPkg - }, - projectConfigLocalPackages = PackageConfig { - packageConfigProgramPaths, - packageConfigProgramArgs, - packageConfigProgramPathExtra - } - } = do - progsearchpath <- liftIO $ getSystemSearchPath - rerunIfChanged verbosity fileMonitorCompiler - (hcFlavor, hcPath, hcPkg, progsearchpath, - packageConfigProgramPaths, - packageConfigProgramArgs, - packageConfigProgramPathExtra) $ do - - liftIO $ info verbosity "Compiler settings changed, reconfiguring..." - result@(_, _, progdb') <- liftIO $ - Cabal.configCompilerEx - hcFlavor hcPath hcPkg - progdb verbosity - - -- Note that we added the user-supplied program locations and args - -- for /all/ programs, not just those for the compiler prog and - -- compiler-related utils. In principle we don't know which programs - -- the compiler will configure (and it does vary between compilers). - -- We do know however that the compiler will only configure the - -- programs it cares about, and those are the ones we monitor here. - monitorFiles (programsMonitorFiles progdb') - - return result - where - hcFlavor = flagToMaybe projectConfigHcFlavor - hcPath = flagToMaybe projectConfigHcPath - hcPkg = flagToMaybe projectConfigHcPkg - progdb = - userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) - . userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs)) - . modifyProgramSearchPath - (++ [ ProgramSearchPathDir dir - | dir <- fromNubList packageConfigProgramPathExtra ]) - $ defaultProgramDb - - - -- Configuring other programs. - -- - -- Having configred the compiler, now we configure all the remaining - -- programs. This is to check we can find them, and to monitor them for - -- changes. - -- - -- TODO: [required eventually] we don't actually do this yet. - -- - -- We rely on the fact that the previous phase added the program config for - -- all local packages, but that all the programs configured so far are the - -- compiler program or related util programs. - -- - phaseConfigurePrograms :: ProjectConfig - -> (Compiler, Platform, ProgramDb) - -> Rebuild () - phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do - -- Users are allowed to specify program locations independently for - -- each package (e.g. to use a particular version of a pre-processor - -- for some packages). However they cannot do this for the compiler - -- itself as that's just not going to work. So we check for this. - liftIO $ checkBadPerPackageCompilerPaths - (configuredPrograms compilerprogdb) - (getMapMappend (projectConfigSpecificPackage projectConfig)) - - --TODO: [required eventually] find/configure other programs that the - -- user specifies. - - --TODO: [required eventually] find/configure all build-tools - -- but note that some of them may be built as part of the plan. - - - -- Run the solver to get the initial install plan. - -- This is expensive so we cache it independently. - -- - phaseRunSolver :: ProjectConfig - -> (Compiler, Platform, ProgramDb) - -> [PackageSpecifier UnresolvedSourcePackage] - -> Rebuild (SolverInstallPlan, PkgConfigDb) - phaseRunSolver projectConfig@ProjectConfig { - projectConfigShared, - projectConfigBuildOnly - } - (compiler, platform, progdb) - localPackages = - rerunIfChanged verbosity fileMonitorSolverPlan - (solverSettings, - localPackages, localPackagesEnabledStanzas, - compiler, platform, programDbSignature progdb) $ do - - installedPkgIndex <- getInstalledPackages verbosity - compiler progdb platform - corePackageDbs - sourcePkgDb <- getSourcePackages verbosity withRepoCtx - (solverSettingIndexState solverSettings) - pkgConfigDB <- getPkgConfigDb verbosity progdb - - --TODO: [code cleanup] it'd be better if the Compiler contained the - -- ConfiguredPrograms that it needs, rather than relying on the progdb - -- since we don't need to depend on all the programs here, just the - -- ones relevant for the compiler. - - liftIO $ do - solver <- chooseSolver verbosity - (solverSettingSolver solverSettings) - (compilerInfo compiler) - - notice verbosity "Resolving dependencies..." - plan <- foldProgress logMsg (die' verbosity) return $ - planPackages verbosity compiler platform solver solverSettings - installedPkgIndex sourcePkgDb pkgConfigDB - localPackages localPackagesEnabledStanzas - return (plan, pkgConfigDB) - where - corePackageDbs = [GlobalPackageDB] - withRepoCtx = projectConfigWithSolverRepoContext verbosity - projectConfigShared - projectConfigBuildOnly - solverSettings = resolveSolverSettings projectConfig - logMsg message rest = debugNoWrap verbosity message >> rest - - localPackagesEnabledStanzas = - Map.fromList - [ (pkgname, stanzas) - | pkg <- localPackages - , let pkgname = pkgSpecifierTarget pkg - testsEnabled = lookupLocalPackageConfig - packageConfigTests - projectConfig pkgname - benchmarksEnabled = lookupLocalPackageConfig - packageConfigBenchmarks - projectConfig pkgname - stanzas = - Map.fromList $ - [ (TestStanzas, enabled) - | enabled <- flagToList testsEnabled ] - ++ [ (BenchStanzas , enabled) - | enabled <- flagToList benchmarksEnabled ] - ] - - -- Elaborate the solver's install plan to get a fully detailed plan. This - -- version of the plan has the final nix-style hashed ids. - -- - phaseElaboratePlan :: ProjectConfig - -> (Compiler, Platform, ProgramDb) - -> PkgConfigDb - -> SolverInstallPlan - -> [PackageSpecifier (SourcePackage (PackageLocation loc))] - -> Rebuild ( ElaboratedInstallPlan - , ElaboratedSharedConfig ) - phaseElaboratePlan ProjectConfig { - projectConfigShared, - projectConfigAllPackages, - projectConfigLocalPackages, - projectConfigSpecificPackage, - projectConfigBuildOnly - } - (compiler, platform, progdb) pkgConfigDB - solverPlan localPackages = do - - liftIO $ debug verbosity "Elaborating the install plan..." - - sourcePackageHashes <- - rerunIfChanged verbosity fileMonitorSourceHashes - (packageLocationsSignature solverPlan) $ - getPackageSourceHashes verbosity withRepoCtx solverPlan - - defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler - (elaboratedPlan, elaboratedShared) - <- liftIO . runLogProgress verbosity $ - elaborateInstallPlan - verbosity - platform compiler progdb pkgConfigDB - distDirLayout - cabalStoreDirLayout - solverPlan - localPackages - sourcePackageHashes - defaultInstallDirs - projectConfigShared - projectConfigAllPackages - projectConfigLocalPackages - (getMapMappend projectConfigSpecificPackage) - let instantiatedPlan = instantiateInstallPlan elaboratedPlan - liftIO $ debugNoWrap verbosity (InstallPlan.showInstallPlan instantiatedPlan) - return (instantiatedPlan, elaboratedShared) - where - withRepoCtx = projectConfigWithSolverRepoContext verbosity - projectConfigShared - projectConfigBuildOnly - - -- Update the files we maintain that reflect our current build environment. - -- In particular we maintain a JSON representation of the elaborated - -- install plan (but not the improved plan since that reflects the state - -- of the build rather than just the input environment). - -- - phaseMaintainPlanOutputs :: ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> Rebuild () - phaseMaintainPlanOutputs elaboratedPlan elaboratedShared = liftIO $ do - debug verbosity "Updating plan.json" - writePlanExternalRepresentation - distDirLayout - elaboratedPlan - elaboratedShared - - - -- Improve the elaborated install plan. The elaborated plan consists - -- mostly of source packages (with full nix-style hashed ids). Where - -- corresponding installed packages already exist in the store, replace - -- them in the plan. - -- - -- Note that we do monitor the store's package db here, so we will redo - -- this improvement phase when the db changes -- including as a result of - -- executing a plan and installing things. - -- - phaseImprovePlan :: ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> Rebuild ElaboratedInstallPlan - phaseImprovePlan elaboratedPlan elaboratedShared = do - - liftIO $ debug verbosity "Improving the install plan..." - storePkgIdSet <- getStoreEntries cabalStoreDirLayout compid - let improvedPlan = improveInstallPlanWithInstalledPackages - storePkgIdSet - elaboratedPlan - liftIO $ debugNoWrap verbosity (InstallPlan.showInstallPlan improvedPlan) - -- TODO: [nice to have] having checked which packages from the store - -- we're using, it may be sensible to sanity check those packages - -- by loading up the compiler package db and checking everything - -- matches up as expected, e.g. no dangling deps, files deleted. - return improvedPlan - where - compid = compilerId (pkgConfigCompiler elaboratedShared) - - -programsMonitorFiles :: ProgramDb -> [MonitorFilePath] -programsMonitorFiles progdb = - [ monitor - | prog <- configuredPrograms progdb - , monitor <- monitorFileSearchPath (programMonitorFiles prog) - (programPath prog) - ] - --- | Select the bits of a 'ProgramDb' to monitor for value changes. --- Use 'programsMonitorFiles' for the files to monitor. --- -programDbSignature :: ProgramDb -> [ConfiguredProgram] -programDbSignature progdb = - [ prog { programMonitorFiles = [] - , programOverrideEnv = filter ((/="PATH") . fst) - (programOverrideEnv prog) } - | prog <- configuredPrograms progdb ] - -getInstalledPackages :: Verbosity - -> Compiler -> ProgramDb -> Platform - -> PackageDBStack - -> Rebuild InstalledPackageIndex -getInstalledPackages verbosity compiler progdb platform packagedbs = do - monitorFiles . map monitorFileOrDirectory - =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles - verbosity compiler - packagedbs progdb platform) - liftIO $ IndexUtils.getInstalledPackages - verbosity compiler - packagedbs progdb - -{- ---TODO: [nice to have] use this but for sanity / consistency checking -getPackageDBContents :: Verbosity - -> Compiler -> ProgramDb -> Platform - -> PackageDB - -> Rebuild InstalledPackageIndex -getPackageDBContents verbosity compiler progdb platform packagedb = do - monitorFiles . map monitorFileOrDirectory - =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles - verbosity compiler - [packagedb] progdb platform) - liftIO $ do - createPackageDBIfMissing verbosity compiler progdb packagedb - Cabal.getPackageDBContents verbosity compiler - packagedb progdb --} - -getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a) - -> Maybe IndexUtils.IndexState -> Rebuild SourcePackageDb -getSourcePackages verbosity withRepoCtx idxState = do - (sourcePkgDb, repos) <- - liftIO $ - withRepoCtx $ \repoctx -> do - sourcePkgDb <- IndexUtils.getSourcePackagesAtIndexState verbosity - repoctx idxState - return (sourcePkgDb, repoContextRepos repoctx) - - mapM_ needIfExists - . IndexUtils.getSourcePackagesMonitorFiles - $ repos - return sourcePkgDb - - -getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb -getPkgConfigDb verbosity progdb = do - dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb - -- Just monitor the dirs so we'll notice new .pc files. - -- Alternatively we could monitor all the .pc files too. - mapM_ monitorDirectoryStatus dirs - liftIO $ readPkgConfigDb verbosity progdb - - --- | Select the config values to monitor for changes package source hashes. -packageLocationsSignature :: SolverInstallPlan - -> [(PackageId, PackageLocation (Maybe FilePath))] -packageLocationsSignature solverPlan = - [ (packageId pkg, packageSource pkg) - | SolverInstallPlan.Configured (SolverPackage { solverPkgSource = pkg}) - <- SolverInstallPlan.toList solverPlan - ] - - --- | Get the 'HashValue' for all the source packages where we use hashes, --- and download any packages required to do so. --- --- Note that we don't get hashes for local unpacked packages. --- -getPackageSourceHashes :: Verbosity - -> (forall a. (RepoContext -> IO a) -> IO a) - -> SolverInstallPlan - -> Rebuild (Map PackageId PackageSourceHash) -getPackageSourceHashes verbosity withRepoCtx solverPlan = do - - -- Determine if and where to get the package's source hash from. - -- - let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))] - allPkgLocations = - [ (packageId pkg, packageSource pkg) - | SolverInstallPlan.Configured (SolverPackage { solverPkgSource = pkg}) - <- SolverInstallPlan.toList solverPlan ] - - -- Tarballs that were local in the first place. - -- We'll hash these tarball files directly. - localTarballPkgs :: [(PackageId, FilePath)] - localTarballPkgs = - [ (pkgid, tarball) - | (pkgid, LocalTarballPackage tarball) <- allPkgLocations ] - - -- Tarballs from remote URLs. We must have downloaded these already - -- (since we extracted the .cabal file earlier) - --TODO: [required eventually] finish remote tarball functionality --- allRemoteTarballPkgs = --- [ (pkgid, ) --- | (pkgid, RemoteTarballPackage ) <- allPkgLocations ] - - -- Tarballs from repositories, either where the repository provides - -- hashes as part of the repo metadata, or where we will have to - -- download and hash the tarball. - repoTarballPkgsWithMetadata :: [(PackageId, Repo)] - repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)] - (repoTarballPkgsWithMetadata, - repoTarballPkgsWithoutMetadata) = - partitionEithers - [ case repo of - RepoSecure{} -> Left (pkgid, repo) - _ -> Right (pkgid, repo) - | (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ] - - -- For tarballs from repos that do not have hashes available we now have - -- to check if the packages were downloaded already. - -- - (repoTarballPkgsToDownload, - repoTarballPkgsDownloaded) - <- fmap partitionEithers $ - liftIO $ sequence - [ do mtarball <- checkRepoTarballFetched repo pkgid - case mtarball of - Nothing -> return (Left (pkgid, repo)) - Just tarball -> return (Right (pkgid, tarball)) - | (pkgid, repo) <- repoTarballPkgsWithoutMetadata ] - - (hashesFromRepoMetadata, - repoTarballPkgsNewlyDownloaded) <- - -- Avoid having to initialise the repository (ie 'withRepoCtx') if we - -- don't have to. (The main cost is configuring the http client.) - if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata - then return (Map.empty, []) - else liftIO $ withRepoCtx $ \repoctx -> do - - -- For tarballs from repos that do have hashes available as part of the - -- repo metadata we now load up the index for each repo and retrieve - -- the hashes for the packages - -- - hashesFromRepoMetadata <- - Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions - fmap (Map.fromList . concat) $ - sequence - -- Reading the repo index is expensive so we group the packages by repo - [ repoContextWithSecureRepo repoctx repo $ \secureRepo -> - Sec.withIndex secureRepo $ \repoIndex -> - sequence - [ do hash <- Sec.trusted <$> -- strip off Trusted tag - Sec.indexLookupHash repoIndex pkgid - -- Note that hackage-security currently uses SHA256 - -- but this API could in principle give us some other - -- choice in future. - return (pkgid, hashFromTUF hash) - | pkgid <- pkgids ] - | (repo, pkgids) <- - map (\grp@((_,repo):_) -> (repo, map fst grp)) - . groupBy ((==) `on` (remoteRepoName . repoRemote . snd)) - . sortBy (compare `on` (remoteRepoName . repoRemote . snd)) - $ repoTarballPkgsWithMetadata - ] - - -- For tarballs from repos that do not have hashes available, download - -- the ones we previously determined we need. - -- - repoTarballPkgsNewlyDownloaded <- - sequence - [ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid - return (pkgid, tarball) - | (pkgid, repo) <- repoTarballPkgsToDownload ] - - return (hashesFromRepoMetadata, - repoTarballPkgsNewlyDownloaded) - - -- Hash tarball files for packages where we have to do that. This includes - -- tarballs that were local in the first place, plus tarballs from repos, - -- either previously cached or freshly downloaded. - -- - let allTarballFilePkgs :: [(PackageId, FilePath)] - allTarballFilePkgs = localTarballPkgs - ++ repoTarballPkgsDownloaded - ++ repoTarballPkgsNewlyDownloaded - hashesFromTarballFiles <- liftIO $ - fmap Map.fromList $ - sequence - [ do srchash <- readFileHashValue tarball - return (pkgid, srchash) - | (pkgid, tarball) <- allTarballFilePkgs - ] - monitorFiles [ monitorFile tarball - | (_pkgid, tarball) <- allTarballFilePkgs ] - - -- Return the combination - return $! hashesFromRepoMetadata - <> hashesFromTarballFiles - - --- ------------------------------------------------------------ --- * Installation planning --- ------------------------------------------------------------ - -planPackages :: Verbosity - -> Compiler - -> Platform - -> Solver -> SolverSettings - -> InstalledPackageIndex - -> SourcePackageDb - -> PkgConfigDb - -> [PackageSpecifier UnresolvedSourcePackage] - -> Map PackageName (Map OptionalStanza Bool) - -> Progress String String SolverInstallPlan -planPackages verbosity comp platform solver SolverSettings{..} - installedPkgIndex sourcePkgDb pkgConfigDB - localPackages pkgStanzasEnable = - - resolveDependencies - platform (compilerInfo comp) - pkgConfigDB solver - resolverParams - - where - - --TODO: [nice to have] disable multiple instances restriction in the solver, but then - -- make sure we can cope with that in the output. - resolverParams = - - setMaxBackjumps solverSettingMaxBackjumps - - . setIndependentGoals solverSettingIndependentGoals - - . setReorderGoals solverSettingReorderGoals - - . setCountConflicts solverSettingCountConflicts - - --TODO: [required eventually] should only be configurable for custom installs - -- . setAvoidReinstalls solverSettingAvoidReinstalls - - --TODO: [required eventually] should only be configurable for custom installs - -- . setShadowPkgs solverSettingShadowPkgs - - . setStrongFlags solverSettingStrongFlags - - . setAllowBootLibInstalls solverSettingAllowBootLibInstalls - - . setSolverVerbosity verbosity - - --TODO: [required eventually] decide if we need to prefer installed for - -- global packages, or prefer latest even for global packages. Perhaps - -- should be configurable but with a different name than "upgrade-dependencies". - . setPreferenceDefault PreferLatestForSelected - {-(if solverSettingUpgradeDeps - then PreferAllLatest - else PreferLatestForSelected)-} - - . removeLowerBounds solverSettingAllowOlder - . removeUpperBounds solverSettingAllowNewer - - . addDefaultSetupDependencies (defaultSetupDeps comp platform - . PD.packageDescription - . packageDescription) - - . addSetupCabalMinVersionConstraint setupMinCabalVersionConstraint - . addSetupCabalMaxVersionConstraint setupMaxCabalVersionConstraint - - . addPreferences - -- preferences from the config file or command line - [ PackageVersionPreference name ver - | Dependency name ver <- solverSettingPreferences ] - - . addConstraints - -- version constraints from the config file or command line - [ LabeledPackageConstraint (userToPackageConstraint pc) src - | (pc, src) <- solverSettingConstraints ] - - . addPreferences - -- enable stanza preference where the user did not specify - [ PackageStanzasPreference pkgname stanzas - | pkg <- localPackages - , let pkgname = pkgSpecifierTarget pkg - stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable - stanzas = [ stanza | stanza <- [minBound..maxBound] - , Map.lookup stanza stanzaM == Nothing ] - , not (null stanzas) - ] - - . addConstraints - -- enable stanza constraints where the user asked to enable - [ LabeledPackageConstraint - (PackageConstraint (scopeToplevel pkgname) - (PackagePropertyStanzas stanzas)) - ConstraintSourceConfigFlagOrTarget - | pkg <- localPackages - , let pkgname = pkgSpecifierTarget pkg - stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable - stanzas = [ stanza | stanza <- [minBound..maxBound] - , Map.lookup stanza stanzaM == Just True ] - , not (null stanzas) - ] - - . addConstraints - --TODO: [nice to have] should have checked at some point that the - -- package in question actually has these flags. - [ LabeledPackageConstraint - (PackageConstraint (scopeToplevel pkgname) - (PackagePropertyFlags flags)) - ConstraintSourceConfigFlagOrTarget - | (pkgname, flags) <- Map.toList solverSettingFlagAssignments ] - - . addConstraints - --TODO: [nice to have] we have user-supplied flags for unspecified - -- local packages (as well as specific per-package flags). For the - -- former we just apply all these flags to all local targets which - -- is silly. We should check if the flags are appropriate. - [ LabeledPackageConstraint - (PackageConstraint (scopeToplevel pkgname) - (PackagePropertyFlags flags)) - ConstraintSourceConfigFlagOrTarget - | let flags = solverSettingFlagAssignment - , not (PD.nullFlagAssignment flags) - , pkg <- localPackages - , let pkgname = pkgSpecifierTarget pkg ] - - $ stdResolverParams - - stdResolverParams = - -- Note: we don't use the standardInstallPolicy here, since that uses - -- its own addDefaultSetupDependencies that is not appropriate for us. - basicInstallPolicy - installedPkgIndex sourcePkgDb - localPackages - - -- While we can talk to older Cabal versions (we need to be able to - -- do so for custom Setup scripts that require older Cabal lib - -- versions), we have problems talking to some older versions that - -- don't support certain features. - -- - -- For example, Cabal-1.16 and older do not know about build targets. - -- Even worse, 1.18 and older only supported the --constraint flag - -- with source package ids, not --dependency with installed package - -- ids. That is bad because we cannot reliably select the right - -- dependencies in the presence of multiple instances (i.e. the - -- store). See issue #3932. So we require Cabal 1.20 as a minimum. - -- - -- Moreover, lib:Cabal generally only supports the interface of - -- current and past compilers; in fact recent lib:Cabal versions - -- will warn when they encounter a too new or unknown GHC compiler - -- version (c.f. #415). To avoid running into unsupported - -- configurations we encode the compatiblity matrix as lower - -- bounds on lib:Cabal here (effectively corresponding to the - -- respective major Cabal version bundled with the respective GHC - -- release). - -- - -- GHC 8.4 needs Cabal >= 2.4 - -- GHC 8.4 needs Cabal >= 2.2 - -- GHC 8.2 needs Cabal >= 2.0 - -- GHC 8.0 needs Cabal >= 1.24 - -- GHC 7.10 needs Cabal >= 1.22 - -- - -- (NB: we don't need to consider older GHCs as Cabal >= 1.20 is - -- the absolute lower bound) - -- - -- TODO: long-term, this compatibility matrix should be - -- stored as a field inside 'Distribution.Compiler.Compiler' - setupMinCabalVersionConstraint - | isGHC, compVer >= mkVersion [8,6,1] = mkVersion [2,4] - -- GHC 8.6alpha2 (GHC 8.6.0.20180714) still shipped with a - -- devel snapshot of Cabal-2.3.0.0; the rule below can be - -- dropped at some point - | isGHC, compVer >= mkVersion [8,6] = mkVersion [2,3] - | isGHC, compVer >= mkVersion [8,4] = mkVersion [2,2] - | isGHC, compVer >= mkVersion [8,2] = mkVersion [2,0] - | isGHC, compVer >= mkVersion [8,0] = mkVersion [1,24] - | isGHC, compVer >= mkVersion [7,10] = mkVersion [1,22] - | otherwise = mkVersion [1,20] - where - isGHC = compFlav `elem` [GHC,GHCJS] - compFlav = compilerFlavor comp - compVer = compilerVersion comp - - -- As we can't predict the future, we also place a global upper - -- bound on the lib:Cabal version we know how to interact with: - -- - -- The upper bound is computed by incrementing the current major - -- version twice in order to allow for the current version, as - -- well as the next adjacent major version (one of which will not - -- be released, as only "even major" versions of Cabal are - -- released to Hackage or bundled with proper GHC releases). - -- - -- For instance, if the current version of cabal-install is an odd - -- development version, e.g. Cabal-2.1.0.0, then we impose an - -- upper bound `setup.Cabal < 2.3`; if `cabal-install` is on a - -- stable/release even version, e.g. Cabal-2.2.1.0, the upper - -- bound is `setup.Cabal < 2.4`. This gives us enough flexibility - -- when dealing with development snapshots of Cabal and cabal-install. - -- - setupMaxCabalVersionConstraint = - alterVersion (take 2) $ incVersion 1 $ incVersion 1 cabalVersion - ------------------------------------------------------------------------------- --- * Install plan post-processing ------------------------------------------------------------------------------- - --- This phase goes from the InstallPlan we get from the solver and has to --- make an elaborated install plan. --- --- We go in two steps: --- --- 1. elaborate all the source packages that the solver has chosen. --- 2. swap source packages for pre-existing installed packages wherever --- possible. --- --- We do it in this order, elaborating and then replacing, because the easiest --- way to calculate the installed package ids used for the replacement step is --- from the elaborated configuration for each package. - - - - ------------------------------------------------------------------------------- --- * Install plan elaboration ------------------------------------------------------------------------------- - --- Note [SolverId to ConfiguredId] --- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --- Dependency solving is a per package affair, so after we're done, we --- end up with 'SolverInstallPlan' that records in 'solverPkgLibDeps' --- and 'solverPkgExeDeps' what packages provide the libraries and executables --- needed by each component of the package (phew!) For example, if I have --- --- library --- build-depends: lib --- build-tool-depends: pkg:exe1 --- build-tools: alex --- --- After dependency solving, I find out that this library component has --- library dependencies on lib-0.2, and executable dependencies on pkg-0.1 --- and alex-0.3 (other components of the package may have different --- dependencies). Note that I've "lost" the knowledge that I depend --- *specifically* on the exe1 executable from pkg. --- --- So, we have a this graph of packages, and we need to transform it into --- a graph of components which we are actually going to build. In particular: --- --- NODE changes from PACKAGE (SolverPackage) to COMPONENTS (ElaboratedConfiguredPackage) --- EDGE changes from PACKAGE DEP (SolverId) to COMPONENT DEPS (ConfiguredId) --- --- In both cases, what was previously a single node/edge may turn into multiple --- nodes/edges. Multiple components, because there may be multiple components --- in a package; multiple component deps, because we may depend upon multiple --- executables from the same package (and maybe, some day, multiple libraries --- from the same package.) --- --- Let's talk about how to do this transformation. Naively, we might consider --- just processing each package, converting it into (zero or) one or more --- components. But we also have to update the edges; this leads to --- two complications: --- --- 1. We don't know what the ConfiguredId of a component is until --- we've configured it, but we cannot configure a component unless --- we know the ConfiguredId of all its dependencies. Thus, we must --- process the 'SolverInstallPlan' in topological order. --- --- 2. When we process a package, we know the SolverIds of its --- dependencies, but we have to do some work to turn these into --- ConfiguredIds. For example, in the case of build-tool-depends, the --- SolverId isn't enough to uniquely determine the ConfiguredId we should --- elaborate to: we have to look at the executable name attached to --- the package name in the package description to figure it out. --- At the same time, we NEED to use the SolverId, because there might --- be multiple versions of the same package in the build plan --- (due to setup dependencies); we can't just look up the package name --- from the package description. --- --- We can adopt the following strategy: --- --- * When a package is transformed into components, record --- a mapping from SolverId to ALL of the components --- which were elaborated. --- --- * When we look up an edge, we use our knowledge of the --- component name to *filter* the list of components into --- the ones we actually wanted to refer to. --- --- By the way, we can tell that SolverInstallPlan is not the "right" type --- because a SolverId cannot adequately represent all possible dependency --- solver states: we may need to record foo-0.1 multiple times in --- the solver install plan with different dependencies. This imprecision in the --- type currently doesn't cause any problems because the dependency solver --- continues to enforce the single instance restriction regardless of compiler --- version. The right way to solve this is to come up with something very much --- like a 'ConfiguredId', in that it incorporates the version choices of its --- dependencies, but less fine grained. - - --- | Produce an elaborated install plan using the policy for local builds with --- a nix-style shared store. --- --- In theory should be able to make an elaborated install plan with a policy --- matching that of the classic @cabal install --user@ or @--global@ --- -elaborateInstallPlan - :: Verbosity -> Platform -> Compiler -> ProgramDb -> PkgConfigDb - -> DistDirLayout - -> StoreDirLayout - -> SolverInstallPlan - -> [PackageSpecifier (SourcePackage (PackageLocation loc))] - -> Map PackageId PackageSourceHash - -> InstallDirs.InstallDirTemplates - -> ProjectConfigShared - -> PackageConfig - -> PackageConfig - -> Map PackageName PackageConfig - -> LogProgress (ElaboratedInstallPlan, ElaboratedSharedConfig) -elaborateInstallPlan verbosity platform compiler compilerprogdb pkgConfigDB - distDirLayout@DistDirLayout{..} - storeDirLayout@StoreDirLayout{storePackageDBStack} - solverPlan localPackages - sourcePackageHashes - defaultInstallDirs - sharedPackageConfig - allPackagesConfig - localPackagesConfig - perPackageConfig = do - x <- elaboratedInstallPlan - return (x, elaboratedSharedConfig) - where - elaboratedSharedConfig = - ElaboratedSharedConfig { - pkgConfigPlatform = platform, - pkgConfigCompiler = compiler, - pkgConfigCompilerProgs = compilerprogdb, - pkgConfigReplOptions = [] - } - - preexistingInstantiatedPkgs = - Map.fromList (mapMaybe f (SolverInstallPlan.toList solverPlan)) - where - f (SolverInstallPlan.PreExisting inst) - | let ipkg = instSolverPkgIPI inst - , not (IPI.indefinite ipkg) - = Just (IPI.installedUnitId ipkg, - (FullUnitId (IPI.installedComponentId ipkg) - (Map.fromList (IPI.instantiatedWith ipkg)))) - f _ = Nothing - - elaboratedInstallPlan = - flip InstallPlan.fromSolverInstallPlanWithProgress solverPlan $ \mapDep planpkg -> - case planpkg of - SolverInstallPlan.PreExisting pkg -> - return [InstallPlan.PreExisting (instSolverPkgIPI pkg)] - - SolverInstallPlan.Configured pkg -> - let inplace_doc | shouldBuildInplaceOnly pkg = text "inplace" - | otherwise = Disp.empty - in addProgressCtx (text "In the" <+> inplace_doc <+> text "package" <+> - quotes (disp (packageId pkg))) $ - map InstallPlan.Configured <$> elaborateSolverToComponents mapDep pkg - - -- NB: We don't INSTANTIATE packages at this point. That's - -- a post-pass. This makes it simpler to compute dependencies. - elaborateSolverToComponents - :: (SolverId -> [ElaboratedPlanPackage]) - -> SolverPackage UnresolvedPkgLoc - -> LogProgress [ElaboratedConfiguredPackage] - elaborateSolverToComponents mapDep spkg@(SolverPackage _ _ _ deps0 exe_deps0) - = case mkComponentsGraph (elabEnabledSpec elab0) pd of - Right g -> do - let src_comps = componentsGraphToList g - infoProgress $ hang (text "Component graph for" <+> disp pkgid <<>> colon) - 4 (dispComponentsWithDeps src_comps) - (_, comps) <- mapAccumM buildComponent - (Map.empty, Map.empty, Map.empty) - (map fst src_comps) - let not_per_component_reasons = why_not_per_component src_comps - if null not_per_component_reasons - then return comps - else do checkPerPackageOk comps not_per_component_reasons - return [elaborateSolverToPackage spkg g $ - comps ++ maybeToList setupComponent] - Left cns -> - dieProgress $ - hang (text "Dependency cycle between the following components:") 4 - (vcat (map (text . componentNameStanza) cns)) - where - -- You are eligible to per-component build if this list is empty - why_not_per_component g - = cuz_buildtype ++ cuz_spec ++ cuz_length ++ cuz_flag ++ cuz_coverage - where - cuz reason = [text reason] - -- We have to disable per-component for now with - -- Configure-type scripts in order to prevent parallel - -- invocation of the same `./configure` script. - -- See https://github.com/haskell/cabal/issues/4548 - -- - -- Moreoever, at this point in time, only non-Custom setup scripts - -- are supported. Implementing per-component builds with - -- Custom would require us to create a new 'ElabSetup' - -- type, and teach all of the code paths how to handle it. - -- Once you've implemented this, swap it for the code below. - cuz_buildtype = - case PD.buildType (elabPkgDescription elab0) of - PD.Configure -> cuz "build-type is Configure" - PD.Custom -> cuz "build-type is Custom" - _ -> [] - -- cabal-format versions prior to 1.8 have different build-depends semantics - -- for now it's easier to just fallback to legacy-mode when specVersion < 1.8 - -- see, https://github.com/haskell/cabal/issues/4121 - cuz_spec - | PD.specVersion pd >= mkVersion [1,8] = [] - | otherwise = cuz "cabal-version is less than 1.8" - -- In the odd corner case that a package has no components at all - -- then keep it as a whole package, since otherwise it turns into - -- 0 component graph nodes and effectively vanishes. We want to - -- keep it around at least for error reporting purposes. - cuz_length - | length g > 0 = [] - | otherwise = cuz "there are no buildable components" - -- For ease of testing, we let per-component builds be toggled - -- at the top level - cuz_flag - | fromFlagOrDefault True (projectConfigPerComponent sharedPackageConfig) - = [] - | otherwise = cuz "you passed --disable-per-component" - -- Enabling program coverage introduces odd runtime dependencies - -- between components. - cuz_coverage - | fromFlagOrDefault False (packageConfigCoverage localPackagesConfig) - = cuz "program coverage is enabled" - | otherwise = [] - - -- | Sometimes a package may make use of features which are only - -- supported in per-package mode. If this is the case, we should - -- give an error when this occurs. - checkPerPackageOk comps reasons = do - let is_sublib (CSubLibName _) = True - is_sublib _ = False - when (any (matchElabPkg is_sublib) comps) $ - dieProgress $ - text "Internal libraries only supported with per-component builds." $$ - text "Per-component builds were disabled because" <+> - fsep (punctuate comma reasons) - -- TODO: Maybe exclude Backpack too - - elab0 = elaborateSolverToCommon spkg - pkgid = elabPkgSourceId elab0 - pd = elabPkgDescription elab0 - - -- TODO: This is just a skeleton to get elaborateSolverToPackage - -- working correctly - -- TODO: When we actually support building these components, we - -- have to add dependencies on this from all other components - setupComponent :: Maybe ElaboratedConfiguredPackage - setupComponent - | PD.buildType (elabPkgDescription elab0) == PD.Custom - = Just elab0 { - elabModuleShape = emptyModuleShape, - elabUnitId = notImpl "elabUnitId", - elabComponentId = notImpl "elabComponentId", - elabLinkedInstantiatedWith = Map.empty, - elabInstallDirs = notImpl "elabInstallDirs", - elabPkgOrComp = ElabComponent (ElaboratedComponent {..}) - } - | otherwise - = Nothing - where - compSolverName = CD.ComponentSetup - compComponentName = Nothing - dep_pkgs = elaborateLibSolverId mapDep =<< CD.setupDeps deps0 - compLibDependencies - = map configuredId dep_pkgs - compLinkedLibDependencies = notImpl "compLinkedLibDependencies" - compOrderLibDependencies = notImpl "compOrderLibDependencies" - -- Not supported: - compExeDependencies = [] - compExeDependencyPaths = [] - compPkgConfigDependencies = [] - - notImpl f = - error $ "Distribution.Client.ProjectPlanning.setupComponent: " ++ - f ++ " not implemented yet" - - - buildComponent - :: (ConfiguredComponentMap, - LinkedComponentMap, - Map ComponentId FilePath) - -> Cabal.Component - -> LogProgress - ((ConfiguredComponentMap, - LinkedComponentMap, - Map ComponentId FilePath), - ElaboratedConfiguredPackage) - buildComponent (cc_map, lc_map, exe_map) comp = - addProgressCtx (text "In the stanza" <+> - quotes (text (componentNameStanza cname))) $ do - - -- 1. Configure the component, but with a place holder ComponentId. - cc0 <- toConfiguredComponent - pd - (error "Distribution.Client.ProjectPlanning.cc_cid: filled in later") - (Map.unionWith Map.union external_lib_cc_map cc_map) - (Map.unionWith Map.union external_exe_cc_map cc_map) - comp - - - -- 2. Read out the dependencies from the ConfiguredComponent cc0 - let compLibDependencies = - -- Nub because includes can show up multiple times - ordNub (map (annotatedIdToConfiguredId . ci_ann_id) - (cc_includes cc0)) - compExeDependencies = - map annotatedIdToConfiguredId - (cc_exe_deps cc0) - compExeDependencyPaths = - [ (annotatedIdToConfiguredId aid', path) - | aid' <- cc_exe_deps cc0 - , Just path <- [Map.lookup (ann_id aid') exe_map1]] - elab_comp = ElaboratedComponent {..} - - -- 3. Construct a preliminary ElaboratedConfiguredPackage, - -- and use this to compute the component ID. Fix up cc_id - -- correctly. - let elab1 = elab0 { - elabPkgOrComp = ElabComponent $ elab_comp - } - cid = case elabBuildStyle elab0 of - BuildInplaceOnly -> - mkComponentId $ - display pkgid ++ "-inplace" ++ - (case Cabal.componentNameString cname of - Nothing -> "" - Just s -> "-" ++ display s) - BuildAndInstall -> - hashedInstalledPackageId - (packageHashInputs - elaboratedSharedConfig - elab1) -- knot tied - cc = cc0 { cc_ann_id = fmap (const cid) (cc_ann_id cc0) } - infoProgress $ dispConfiguredComponent cc - - -- 4. Perform mix-in linking - let lookup_uid def_uid = - case Map.lookup (unDefUnitId def_uid) preexistingInstantiatedPkgs of - Just full -> full - Nothing -> error ("lookup_uid: " ++ display def_uid) - lc <- toLinkedComponent verbosity lookup_uid (elabPkgSourceId elab0) - (Map.union external_lc_map lc_map) cc - infoProgress $ dispLinkedComponent lc - -- NB: elab is setup to be the correct form for an - -- indefinite library, or a definite library with no holes. - -- We will modify it in 'instantiateInstallPlan' to handle - -- instantiated packages. - - -- 5. Construct the final ElaboratedConfiguredPackage - let - elab = elab1 { - elabModuleShape = lc_shape lc, - elabUnitId = abstractUnitId (lc_uid lc), - elabComponentId = lc_cid lc, - elabLinkedInstantiatedWith = Map.fromList (lc_insts lc), - elabPkgOrComp = ElabComponent $ elab_comp { - compLinkedLibDependencies = ordNub (map ci_id (lc_includes lc)), - compOrderLibDependencies = - ordNub (map (abstractUnitId . ci_id) - (lc_includes lc ++ lc_sig_includes lc)) - }, - elabInstallDirs = install_dirs cid - } - - -- 6. Construct the updated local maps - let cc_map' = extendConfiguredComponentMap cc cc_map - lc_map' = extendLinkedComponentMap lc lc_map - exe_map' = Map.insert cid (inplace_bin_dir elab) exe_map - - return ((cc_map', lc_map', exe_map'), elab) - where - compLinkedLibDependencies = error "buildComponent: compLinkedLibDependencies" - compOrderLibDependencies = error "buildComponent: compOrderLibDependencies" - - cname = Cabal.componentName comp - compComponentName = Just cname - compSolverName = CD.componentNameToComponent cname - - -- NB: compLinkedLibDependencies and - -- compOrderLibDependencies are defined when we define - -- 'elab'. - external_lib_dep_sids = CD.select (== compSolverName) deps0 - external_exe_dep_sids = CD.select (== compSolverName) exe_deps0 - - external_lib_dep_pkgs = concatMap mapDep external_lib_dep_sids - - -- Combine library and build-tool dependencies, for backwards - -- compatibility (See issue #5412 and the documentation for - -- InstallPlan.fromSolverInstallPlan), but prefer the versions - -- specified as build-tools. - external_exe_dep_pkgs = - concatMap mapDep $ - ordNubBy (pkgName . packageId) $ - external_exe_dep_sids ++ external_lib_dep_sids - - external_exe_map = Map.fromList $ - [ (getComponentId pkg, path) - | pkg <- external_exe_dep_pkgs - , Just path <- [planPackageExePath pkg] ] - exe_map1 = Map.union external_exe_map exe_map - - external_lib_cc_map = Map.fromListWith Map.union - $ map mkCCMapping external_lib_dep_pkgs - external_exe_cc_map = Map.fromListWith Map.union - $ map mkCCMapping external_exe_dep_pkgs - external_lc_map = - Map.fromList $ map mkShapeMapping $ - external_lib_dep_pkgs ++ concatMap mapDep external_exe_dep_sids - - compPkgConfigDependencies = - [ (pn, fromMaybe (error $ "compPkgConfigDependencies: impossible! " - ++ display pn ++ " from " - ++ display (elabPkgSourceId elab0)) - (pkgConfigDbPkgVersion pkgConfigDB pn)) - | PkgconfigDependency pn _ <- PD.pkgconfigDepends - (Cabal.componentBuildInfo comp) ] - - install_dirs cid - | shouldBuildInplaceOnly spkg - -- use the ordinary default install dirs - = (InstallDirs.absoluteInstallDirs - pkgid - (newSimpleUnitId cid) - (compilerInfo compiler) - InstallDirs.NoCopyDest - platform - defaultInstallDirs) { - - -- absoluteInstallDirs sets these as 'undefined' but we have - -- to use them as "Setup.hs configure" args - InstallDirs.libsubdir = "", - InstallDirs.libexecsubdir = "", - InstallDirs.datasubdir = "" - } - - | otherwise - -- use special simplified install dirs - = storePackageInstallDirs - storeDirLayout - (compilerId compiler) - cid - - inplace_bin_dir elab = - binDirectoryFor - distDirLayout - elaboratedSharedConfig - elab $ - case Cabal.componentNameString cname of - Just n -> display n - Nothing -> "" - - - -- | Given a 'SolverId' referencing a dependency on a library, return - -- the 'ElaboratedPlanPackage' corresponding to the library. This - -- returns at most one result. - elaborateLibSolverId :: (SolverId -> [ElaboratedPlanPackage]) - -> SolverId -> [ElaboratedPlanPackage] - elaborateLibSolverId mapDep = filter (matchPlanPkg (== CLibName)) . mapDep - - -- | Given an 'ElaboratedPlanPackage', return the path to where the - -- executable that this package represents would be installed. - planPackageExePath :: ElaboratedPlanPackage -> Maybe FilePath - planPackageExePath = - -- Pre-existing executables are assumed to be in PATH - -- already. In fact, this should be impossible. - InstallPlan.foldPlanPackage (const Nothing) $ \elab -> Just $ - binDirectoryFor - distDirLayout - elaboratedSharedConfig - elab $ - case elabPkgOrComp elab of - ElabPackage _ -> "" - ElabComponent comp -> - case fmap Cabal.componentNameString - (compComponentName comp) of - Just (Just n) -> display n - _ -> "" - - elaborateSolverToPackage :: SolverPackage UnresolvedPkgLoc - -> ComponentsGraph - -> [ElaboratedConfiguredPackage] - -> ElaboratedConfiguredPackage - elaborateSolverToPackage - pkg@(SolverPackage (SourcePackage pkgid _gdesc _srcloc _descOverride) - _flags _stanzas _deps0 _exe_deps0) - compGraph comps = - -- Knot tying: the final elab includes the - -- pkgInstalledId, which is calculated by hashing many - -- of the other fields of the elaboratedPackage. - elab - where - elab0@ElaboratedConfiguredPackage{..} = elaborateSolverToCommon pkg - elab = elab0 { - elabUnitId = newSimpleUnitId pkgInstalledId, - elabComponentId = pkgInstalledId, - elabLinkedInstantiatedWith = Map.empty, - elabInstallDirs = install_dirs, - elabPkgOrComp = ElabPackage $ ElaboratedPackage {..}, - elabModuleShape = modShape - } - - modShape = case find (matchElabPkg (== CLibName)) comps of - Nothing -> emptyModuleShape - Just e -> Ty.elabModuleShape e - - pkgInstalledId - | shouldBuildInplaceOnly pkg - = mkComponentId (display pkgid ++ "-inplace") - - | otherwise - = assert (isJust elabPkgSourceHash) $ - hashedInstalledPackageId - (packageHashInputs - elaboratedSharedConfig - elab) -- recursive use of elab - - | otherwise - = error $ "elaborateInstallPlan: non-inplace package " - ++ " is missing a source hash: " ++ display pkgid - - -- Need to filter out internal dependencies, because they don't - -- correspond to anything real anymore. - isExt confid = confSrcId confid /= pkgid - filterExt = filter isExt - filterExt' = filter (isExt . fst) - - pkgLibDependencies - = buildComponentDeps (filterExt . compLibDependencies) - pkgExeDependencies - = buildComponentDeps (filterExt . compExeDependencies) - pkgExeDependencyPaths - = buildComponentDeps (filterExt' . compExeDependencyPaths) - -- TODO: Why is this flat? - pkgPkgConfigDependencies - = CD.flatDeps $ buildComponentDeps compPkgConfigDependencies - - pkgDependsOnSelfLib - = CD.fromList [ (CD.componentNameToComponent cn, [()]) - | Graph.N _ cn _ <- fromMaybe [] mb_closure ] - where - mb_closure = Graph.revClosure compGraph [ k | k <- Graph.keys compGraph, is_lib k ] - is_lib CLibName = True - -- NB: this case should not occur, because sub-libraries - -- are not supported without per-component builds - is_lib (CSubLibName _) = True - is_lib _ = False - - buildComponentDeps f - = CD.fromList [ (compSolverName comp, f comp) - | ElaboratedConfiguredPackage{ - elabPkgOrComp = ElabComponent comp - } <- comps - ] - - -- NB: This is not the final setting of 'pkgStanzasEnabled'. - -- See [Sticky enabled testsuites]; we may enable some extra - -- stanzas opportunistically when it is cheap to do so. - -- - -- However, we start off by enabling everything that was - -- requested, so that we can maintain an invariant that - -- pkgStanzasEnabled is a superset of elabStanzasRequested - pkgStanzasEnabled = Map.keysSet (Map.filter (id :: Bool -> Bool) elabStanzasRequested) - - install_dirs - | shouldBuildInplaceOnly pkg - -- use the ordinary default install dirs - = (InstallDirs.absoluteInstallDirs - pkgid - (newSimpleUnitId pkgInstalledId) - (compilerInfo compiler) - InstallDirs.NoCopyDest - platform - defaultInstallDirs) { - - -- absoluteInstallDirs sets these as 'undefined' but we have to - -- use them as "Setup.hs configure" args - InstallDirs.libsubdir = "", - InstallDirs.libexecsubdir = "", - InstallDirs.datasubdir = "" - } - - | otherwise - -- use special simplified install dirs - = storePackageInstallDirs - storeDirLayout - (compilerId compiler) - pkgInstalledId - - elaborateSolverToCommon :: SolverPackage UnresolvedPkgLoc - -> ElaboratedConfiguredPackage - elaborateSolverToCommon - pkg@(SolverPackage (SourcePackage pkgid gdesc srcloc descOverride) - flags stanzas deps0 _exe_deps0) = - elaboratedPackage - where - elaboratedPackage = ElaboratedConfiguredPackage {..} - - -- These get filled in later - elabUnitId = error "elaborateSolverToCommon: elabUnitId" - elabComponentId = error "elaborateSolverToCommon: elabComponentId" - elabInstantiatedWith = Map.empty - elabLinkedInstantiatedWith = error "elaborateSolverToCommon: elabLinkedInstantiatedWith" - elabPkgOrComp = error "elaborateSolverToCommon: elabPkgOrComp" - elabInstallDirs = error "elaborateSolverToCommon: elabInstallDirs" - elabModuleShape = error "elaborateSolverToCommon: elabModuleShape" - - elabIsCanonical = True - elabPkgSourceId = pkgid - elabPkgDescription = let Right (desc, _) = - PD.finalizePD - flags elabEnabledSpec (const True) - platform (compilerInfo compiler) - [] gdesc - in desc - elabFlagAssignment = flags - elabFlagDefaults = PD.mkFlagAssignment - [ (Cabal.flagName flag, Cabal.flagDefault flag) - | flag <- PD.genPackageFlags gdesc ] - - elabEnabledSpec = enableStanzas stanzas - elabStanzasAvailable = Set.fromList stanzas - elabStanzasRequested = - -- NB: even if a package stanza is requested, if the package - -- doesn't actually have any of that stanza we omit it from - -- the request, to ensure that we don't decide that this - -- package needs to be rebuilt. (It needs to be done here, - -- because the ElaboratedConfiguredPackage is where we test - -- whether or not there have been changes.) - Map.fromList $ [ (TestStanzas, v) | v <- maybeToList tests - , _ <- PD.testSuites elabPkgDescription ] - ++ [ (BenchStanzas, v) | v <- maybeToList benchmarks - , _ <- PD.benchmarks elabPkgDescription ] - where - tests, benchmarks :: Maybe Bool - tests = perPkgOptionMaybe pkgid packageConfigTests - benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks - - -- This is a placeholder which will get updated by 'pruneInstallPlanPass1' - -- and 'pruneInstallPlanPass2'. We can't populate it here - -- because whether or not tests/benchmarks should be enabled - -- is heuristically calculated based on whether or not the - -- dependencies of the test suite have already been installed, - -- but this function doesn't know what is installed (since - -- we haven't improved the plan yet), so we do it in another pass. - -- Check the comments of those functions for more details. - elabBuildTargets = [] - elabTestTargets = [] - elabBenchTargets = [] - elabReplTarget = Nothing - elabHaddockTargets = [] - - elabBuildHaddocks = - perPkgOptionFlag pkgid False packageConfigDocumentation - - elabPkgSourceLocation = srcloc - elabPkgSourceHash = Map.lookup pkgid sourcePackageHashes - elabLocalToProject = isLocalToProject pkg - elabBuildStyle = if shouldBuildInplaceOnly pkg - then BuildInplaceOnly else BuildAndInstall - elabBuildPackageDBStack = buildAndRegisterDbs - elabRegisterPackageDBStack = buildAndRegisterDbs - - elabSetupScriptStyle = packageSetupScriptStyle elabPkgDescription - elabSetupScriptCliVersion = - packageSetupScriptSpecVersion - elabSetupScriptStyle elabPkgDescription libDepGraph deps0 - elabSetupPackageDBStack = buildAndRegisterDbs - - buildAndRegisterDbs - | shouldBuildInplaceOnly pkg = inplacePackageDbs - | otherwise = storePackageDbs - - elabPkgDescriptionOverride = descOverride - - elabVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively - elabSharedLib = pkgid `Set.member` pkgsUseSharedLibrary - elabStaticLib = perPkgOptionFlag pkgid False packageConfigStaticLib - elabDynExe = perPkgOptionFlag pkgid False packageConfigDynExe - elabGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still - - elabProfExe = perPkgOptionFlag pkgid False packageConfigProf - elabProfLib = pkgid `Set.member` pkgsUseProfilingLibrary - - (elabProfExeDetail, - elabProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault - packageConfigProfDetail - packageConfigProfLibDetail - elabCoverage = perPkgOptionFlag pkgid False packageConfigCoverage - - elabOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization - elabSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs - elabSplitSections = perPkgOptionFlag pkgid False packageConfigSplitSections - elabStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs - elabStripExes = perPkgOptionFlag pkgid False packageConfigStripExes - elabDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo - - -- Combine the configured compiler prog settings with the user-supplied - -- config. For the compiler progs any user-supplied config was taken - -- into account earlier when configuring the compiler so its ok that - -- our configured settings for the compiler override the user-supplied - -- config here. - elabProgramPaths = Map.fromList - [ (programId prog, programPath prog) - | prog <- configuredPrograms compilerprogdb ] - <> perPkgOptionMapLast pkgid packageConfigProgramPaths - elabProgramArgs = Map.fromList - [ (programId prog, args) - | prog <- configuredPrograms compilerprogdb - , let args = programOverrideArgs prog - , not (null args) - ] - <> perPkgOptionMapMappend pkgid packageConfigProgramArgs - elabProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra - elabConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs - elabExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs - elabExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs - elabExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs - elabProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix - elabProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix - - - elabHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle - elabHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml - elabHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation - elabHaddockForeignLibs = perPkgOptionFlag pkgid False packageConfigHaddockForeignLibs - elabHaddockForHackage = perPkgOptionFlag pkgid Cabal.ForDevelopment packageConfigHaddockForHackage - elabHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables - elabHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites - elabHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks - elabHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal - elabHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss - elabHaddockLinkedSource = perPkgOptionFlag pkgid False packageConfigHaddockLinkedSource - elabHaddockQuickJump = perPkgOptionFlag pkgid False packageConfigHaddockQuickJump - elabHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss - elabHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents - - perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a - perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a - perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a] - - perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f) - perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f) - perPkgOptionList pkgid f = lookupPerPkgOption pkgid f - perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f) - perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f) - perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f) - - perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib) - where - exe = fromFlagOrDefault def bothflag - lib = fromFlagOrDefault def (bothflag <> libflag) - - bothflag = lookupPerPkgOption pkgid fboth - libflag = lookupPerPkgOption pkgid flib - - lookupPerPkgOption :: (Package pkg, Monoid m) - => pkg -> (PackageConfig -> m) -> m - lookupPerPkgOption pkg f = - -- This is where we merge the options from the project config that - -- apply to all packages, all project local packages, and to specific - -- named packages - global `mappend` local `mappend` perpkg - where - global = f allPackagesConfig - local | isLocalToProject pkg - = f localPackagesConfig - | otherwise - = mempty - perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig) - - inplacePackageDbs = storePackageDbs - ++ [ distPackageDB (compilerId compiler) ] - - storePackageDbs = storePackageDBStack (compilerId compiler) - - -- For this local build policy, every package that lives in a local source - -- dir (as opposed to a tarball), or depends on such a package, will be - -- built inplace into a shared dist dir. Tarball packages that depend on - -- source dir packages will also get unpacked locally. - shouldBuildInplaceOnly :: SolverPackage loc -> Bool - shouldBuildInplaceOnly pkg = Set.member (packageId pkg) - pkgsToBuildInplaceOnly - - pkgsToBuildInplaceOnly :: Set PackageId - pkgsToBuildInplaceOnly = - Set.fromList - $ map packageId - $ SolverInstallPlan.reverseDependencyClosure - solverPlan - (map PlannedId (Set.toList pkgsLocalToProject)) - - isLocalToProject :: Package pkg => pkg -> Bool - isLocalToProject pkg = Set.member (packageId pkg) - pkgsLocalToProject - - pkgsLocalToProject :: Set PackageId - pkgsLocalToProject = - Set.fromList (catMaybes (map shouldBeLocal localPackages)) - --TODO: localPackages is a misnomer, it's all project packages - -- here is where we decide which ones will be local! - where - shouldBeLocal :: PackageSpecifier (SourcePackage (PackageLocation loc)) -> Maybe PackageId - shouldBeLocal NamedPackage{} = Nothing - shouldBeLocal (SpecificSourcePackage pkg) - | LocalTarballPackage _ <- packageSource pkg = Nothing - | otherwise = Just (packageId pkg) - -- TODO: Is it only LocalTarballPackages we can know about without - -- them being "local" in the sense meant here? - -- - -- Also, review use of SourcePackage's loc vs ProjectPackageLocation - - pkgsUseSharedLibrary :: Set PackageId - pkgsUseSharedLibrary = - packagesWithLibDepsDownwardClosedProperty needsSharedLib - where - needsSharedLib pkg = - fromMaybe compilerShouldUseSharedLibByDefault - (liftM2 (||) pkgSharedLib pkgDynExe) - where - pkgid = packageId pkg - pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib - pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe - - --TODO: [code cleanup] move this into the Cabal lib. It's currently open - -- coded in Distribution.Simple.Configure, but should be made a proper - -- function of the Compiler or CompilerInfo. - compilerShouldUseSharedLibByDefault = - case compilerFlavor compiler of - GHC -> GHC.isDynamic compiler - GHCJS -> GHCJS.isDynamic compiler - _ -> False - - pkgsUseProfilingLibrary :: Set PackageId - pkgsUseProfilingLibrary = - packagesWithLibDepsDownwardClosedProperty needsProfilingLib - where - needsProfilingLib pkg = - fromFlagOrDefault False (profBothFlag <> profLibFlag) - where - pkgid = packageId pkg - profBothFlag = lookupPerPkgOption pkgid packageConfigProf - profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib - --TODO: [code cleanup] unused: the old deprecated packageConfigProfExe - - libDepGraph = Graph.fromDistinctList $ - map NonSetupLibDepSolverPlanPackage - (SolverInstallPlan.toList solverPlan) - - packagesWithLibDepsDownwardClosedProperty property = - Set.fromList - . map packageId - . fromMaybe [] - $ Graph.closure - libDepGraph - [ Graph.nodeKey pkg - | pkg <- SolverInstallPlan.toList solverPlan - , property pkg ] -- just the packages that satisfy the property - --TODO: [nice to have] this does not check the config consistency, - -- e.g. a package explicitly turning off profiling, but something - -- depending on it that needs profiling. This really needs a separate - -- package config validation/resolution pass. - - --TODO: [nice to have] config consistency checking: - -- + profiling libs & exes, exe needs lib, recursive - -- + shared libs & exes, exe needs lib, recursive - -- + vanilla libs & exes, exe needs lib, recursive - -- + ghci or shared lib needed by TH, recursive, ghc version dependent - --- TODO: Drop matchPlanPkg/matchElabPkg in favor of mkCCMapping - --- | Given a 'ElaboratedPlanPackage', report if it matches a 'ComponentName'. -matchPlanPkg :: (ComponentName -> Bool) -> ElaboratedPlanPackage -> Bool -matchPlanPkg p = InstallPlan.foldPlanPackage (p . ipiComponentName) (matchElabPkg p) - --- | Get the appropriate 'ComponentName' which identifies an installed --- component. -ipiComponentName :: IPI.InstalledPackageInfo -> ComponentName -ipiComponentName ipkg = - case IPI.sourceLibName ipkg of - Nothing -> CLibName - Just n -> (CSubLibName n) - --- | Given a 'ElaboratedConfiguredPackage', report if it matches a --- 'ComponentName'. -matchElabPkg :: (ComponentName -> Bool) -> ElaboratedConfiguredPackage -> Bool -matchElabPkg p elab = - case elabPkgOrComp elab of - ElabComponent comp -> maybe False p (compComponentName comp) - ElabPackage _ -> - -- So, what should we do here? One possibility is to - -- unconditionally return 'True', because whatever it is - -- that we're looking for, it better be in this package. - -- But this is a bit dodgy if the package doesn't actually - -- have, e.g., a library. Fortunately, it's not possible - -- for the build of the library/executables to be toggled - -- by 'pkgStanzasEnabled', so the only thing we have to - -- test is if the component in question is *buildable.* - any (p . componentName) - (Cabal.pkgBuildableComponents (elabPkgDescription elab)) - --- | Given an 'ElaboratedPlanPackage', generate the mapping from 'PackageName' --- and 'ComponentName' to the 'ComponentId' that that should be used --- in this case. -mkCCMapping :: ElaboratedPlanPackage - -> (PackageName, Map ComponentName (AnnotatedId ComponentId)) -mkCCMapping = - InstallPlan.foldPlanPackage - (\ipkg -> (packageName ipkg, - Map.singleton (ipiComponentName ipkg) - -- TODO: libify - (AnnotatedId { - ann_id = IPI.installedComponentId ipkg, - ann_pid = packageId ipkg, - ann_cname = IPI.sourceComponentName ipkg - }))) - $ \elab -> - let mk_aid cn = AnnotatedId { - ann_id = elabComponentId elab, - ann_pid = packageId elab, - ann_cname = cn - } - in (packageName elab, - case elabPkgOrComp elab of - ElabComponent comp -> - case compComponentName comp of - Nothing -> Map.empty - Just n -> Map.singleton n (mk_aid n) - ElabPackage _ -> - Map.fromList $ - map (\comp -> let cn = Cabal.componentName comp in (cn, mk_aid cn)) - (Cabal.pkgBuildableComponents (elabPkgDescription elab))) - --- | Given an 'ElaboratedPlanPackage', generate the mapping from 'ComponentId' --- to the shape of this package, as per mix-in linking. -mkShapeMapping :: ElaboratedPlanPackage - -> (ComponentId, (OpenUnitId, ModuleShape)) -mkShapeMapping dpkg = - (getComponentId dpkg, (indef_uid, shape)) - where - (dcid, shape) = - InstallPlan.foldPlanPackage - -- Uses Monad (->) - (liftM2 (,) IPI.installedComponentId shapeInstalledPackage) - (liftM2 (,) elabComponentId elabModuleShape) - dpkg - indef_uid = - IndefFullUnitId dcid - (Map.fromList [ (req, OpenModuleVar req) - | req <- Set.toList (modShapeRequires shape)]) - --- | Get the bin\/ directories that a package's executables should reside in. --- --- The result may be empty if the package does not build any executables. --- --- The result may have several entries if this is an inplace build of a package --- with multiple executables. -binDirectories - :: DistDirLayout - -> ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> [FilePath] -binDirectories layout config package = case elabBuildStyle package of - -- quick sanity check: no sense returning a bin directory if we're not going - -- to put any executables in it, that will just clog up the PATH - _ | noExecutables -> [] - BuildAndInstall -> [installedBinDirectory package] - BuildInplaceOnly -> map (root) $ case elabPkgOrComp package of - ElabComponent comp -> case compSolverName comp of - CD.ComponentExe n -> [display n] - _ -> [] - ElabPackage _ -> map (display . PD.exeName) - . PD.executables - . elabPkgDescription - $ package - where - noExecutables = null . PD.executables . elabPkgDescription $ package - root = distBuildDirectory layout (elabDistDirParams config package) - "build" - --- | A newtype for 'SolverInstallPlan.SolverPlanPackage' for which the --- dependency graph considers only dependencies on libraries which are --- NOT from setup dependencies. Used to compute the set --- of packages needed for profiling and dynamic libraries. -newtype NonSetupLibDepSolverPlanPackage - = NonSetupLibDepSolverPlanPackage - { unNonSetupLibDepSolverPlanPackage :: SolverInstallPlan.SolverPlanPackage } - -instance Package NonSetupLibDepSolverPlanPackage where - packageId = packageId . unNonSetupLibDepSolverPlanPackage - -instance IsNode NonSetupLibDepSolverPlanPackage where - type Key NonSetupLibDepSolverPlanPackage = SolverId - nodeKey = nodeKey . unNonSetupLibDepSolverPlanPackage - nodeNeighbors (NonSetupLibDepSolverPlanPackage spkg) - = ordNub $ CD.nonSetupDeps (resolverPackageLibDeps spkg) - -type InstS = Map UnitId ElaboratedPlanPackage -type InstM a = State InstS a - -getComponentId :: ElaboratedPlanPackage - -> ComponentId -getComponentId (InstallPlan.PreExisting dipkg) = IPI.installedComponentId dipkg -getComponentId (InstallPlan.Configured elab) = elabComponentId elab -getComponentId (InstallPlan.Installed elab) = elabComponentId elab - -instantiateInstallPlan :: ElaboratedInstallPlan -> ElaboratedInstallPlan -instantiateInstallPlan plan = - InstallPlan.new (IndependentGoals False) - (Graph.fromDistinctList (Map.elems ready_map)) - where - pkgs = InstallPlan.toList plan - - cmap = Map.fromList [ (getComponentId pkg, pkg) | pkg <- pkgs ] - - instantiateUnitId :: ComponentId -> Map ModuleName Module - -> InstM DefUnitId - instantiateUnitId cid insts = state $ \s -> - case Map.lookup uid s of - Nothing -> - -- Knot tied - let (r, s') = runState (instantiateComponent uid cid insts) - (Map.insert uid r s) - in (def_uid, Map.insert uid r s') - Just _ -> (def_uid, s) - where - def_uid = mkDefUnitId cid insts - uid = unDefUnitId def_uid - - instantiateComponent - :: UnitId -> ComponentId -> Map ModuleName Module - -> InstM ElaboratedPlanPackage - instantiateComponent uid cid insts - | Just planpkg <- Map.lookup cid cmap - = case planpkg of - InstallPlan.Configured (elab@ElaboratedConfiguredPackage - { elabPkgOrComp = ElabComponent comp }) -> do - deps <- mapM (substUnitId insts) - (compLinkedLibDependencies comp) - let getDep (Module dep_uid _) = [dep_uid] - return $ InstallPlan.Configured elab { - elabUnitId = uid, - elabComponentId = cid, - elabInstantiatedWith = insts, - elabIsCanonical = Map.null insts, - elabPkgOrComp = ElabComponent comp { - compOrderLibDependencies = - (if Map.null insts then [] else [newSimpleUnitId cid]) ++ - ordNub (map unDefUnitId - (deps ++ concatMap getDep (Map.elems insts))) - } - } - _ -> return planpkg - | otherwise = error ("instantiateComponent: " ++ display cid) - - substUnitId :: Map ModuleName Module -> OpenUnitId -> InstM DefUnitId - substUnitId _ (DefiniteUnitId uid) = - return uid - substUnitId subst (IndefFullUnitId cid insts) = do - insts' <- substSubst subst insts - instantiateUnitId cid insts' - - -- NB: NOT composition - substSubst :: Map ModuleName Module - -> Map ModuleName OpenModule - -> InstM (Map ModuleName Module) - substSubst subst insts = T.mapM (substModule subst) insts - - substModule :: Map ModuleName Module -> OpenModule -> InstM Module - substModule subst (OpenModuleVar mod_name) - | Just m <- Map.lookup mod_name subst = return m - | otherwise = error "substModule: non-closing substitution" - substModule subst (OpenModule uid mod_name) = do - uid' <- substUnitId subst uid - return (Module uid' mod_name) - - indefiniteUnitId :: ComponentId -> InstM UnitId - indefiniteUnitId cid = do - let uid = newSimpleUnitId cid - r <- indefiniteComponent uid cid - state $ \s -> (uid, Map.insert uid r s) - - indefiniteComponent :: UnitId -> ComponentId -> InstM ElaboratedPlanPackage - indefiniteComponent _uid cid - | Just planpkg <- Map.lookup cid cmap - = return planpkg - | otherwise = error ("indefiniteComponent: " ++ display cid) - - ready_map = execState work Map.empty - - work = forM_ pkgs $ \pkg -> - case pkg of - InstallPlan.Configured elab - | not (Map.null (elabLinkedInstantiatedWith elab)) - -> indefiniteUnitId (elabComponentId elab) - >> return () - _ -> instantiateUnitId (getComponentId pkg) Map.empty - >> return () - ---------------------------- --- Build targets --- - --- Refer to ProjectPlanning.Types for details of these important types: - --- data ComponentTarget = ... --- data SubComponentTarget = ... - --- One step in the build system is to translate higher level intentions like --- "build this package", "test that package", or "repl that component" into --- a more detailed specification of exactly which components to build (or other --- actions like repl or build docs). This translation is somewhat different for --- different commands. For example "test" for a package will build a different --- set of components than "build". In addition, the translation of these --- intentions can fail. For example "run" for a package is only unambiguous --- when the package has a single executable. --- --- So we need a little bit of infrastructure to make it easy for the command --- implementations to select what component targets are meant when a user asks --- to do something with a package or component. To do this (and to be able to --- produce good error messages for mistakes and when targets are not available) --- we need to gather and summarise accurate information about all the possible --- targets, both available and unavailable. Then a command implementation can --- decide which of the available component targets should be selected. - --- | An available target represents a component within a package that a user --- command could plausibly refer to. In this sense, all the components defined --- within the package are things the user could refer to, whether or not it --- would actually be possible to build that component. --- --- In particular the available target contains an 'AvailableTargetStatus' which --- informs us about whether it's actually possible to select this component to --- be built, and if not why not. This detail makes it possible for command --- implementations (like @build@, @test@ etc) to accurately report why a target --- cannot be used. --- --- Note that the type parameter is used to help enforce that command --- implementations can only select targets that can actually be built (by --- forcing them to return the @k@ value for the selected targets). --- In particular 'resolveTargets' makes use of this (with @k@ as --- @('UnitId', ComponentName')@) to identify the targets thus selected. --- -data AvailableTarget k = AvailableTarget { - availableTargetPackageId :: PackageId, - availableTargetComponentName :: ComponentName, - availableTargetStatus :: AvailableTargetStatus k, - availableTargetLocalToProject :: Bool - } - deriving (Eq, Show, Functor) - --- | The status of a an 'AvailableTarget' component. This tells us whether --- it's actually possible to select this component to be built, and if not --- why not. --- -data AvailableTargetStatus k = - TargetDisabledByUser -- ^ When the user does @tests: False@ - | TargetDisabledBySolver -- ^ When the solver could not enable tests - | TargetNotBuildable -- ^ When the component has @buildable: False@ - | TargetNotLocal -- ^ When the component is non-core in a non-local package - | TargetBuildable k TargetRequested -- ^ The target can or should be built - deriving (Eq, Ord, Show, Functor) - --- | This tells us whether a target ought to be built by default, or only if --- specifically requested. The policy is that components like libraries and --- executables are built by default by @build@, but test suites and benchmarks --- are not, unless this is overridden in the project configuration. --- -data TargetRequested = - TargetRequestedByDefault -- ^ To be built by default - | TargetNotRequestedByDefault -- ^ Not to be built by default - deriving (Eq, Ord, Show) - --- | Given the install plan, produce the set of 'AvailableTarget's for each --- package-component pair. --- --- Typically there will only be one such target for each component, but for --- example if we have a plan with both normal and profiling variants of a --- component then we would get both as available targets, or similarly if we --- had a plan that contained two instances of the same version of a package. --- This approach makes it relatively easy to select all instances\/variants --- of a component. --- -availableTargets :: ElaboratedInstallPlan - -> Map (PackageId, ComponentName) - [AvailableTarget (UnitId, ComponentName)] -availableTargets installPlan = - let rs = [ (pkgid, cname, fake, target) - | pkg <- InstallPlan.toList installPlan - , (pkgid, cname, fake, target) <- case pkg of - InstallPlan.PreExisting ipkg -> availableInstalledTargets ipkg - InstallPlan.Installed elab -> availableSourceTargets elab - InstallPlan.Configured elab -> availableSourceTargets elab - ] - in Map.union - (Map.fromListWith (++) - [ ((pkgid, cname), [target]) - | (pkgid, cname, fake, target) <- rs, not fake]) - (Map.fromList - [ ((pkgid, cname), [target]) - | (pkgid, cname, fake, target) <- rs, fake]) - -- The normal targets mask the fake ones. We get all instances of the - -- normal ones and only one copy of the fake ones (as there are many - -- duplicates of the fake ones). See 'availableSourceTargets' below for - -- more details on this fake stuff is about. - -availableInstalledTargets :: IPI.InstalledPackageInfo - -> [(PackageId, ComponentName, Bool, - AvailableTarget (UnitId, ComponentName))] -availableInstalledTargets ipkg = - let unitid = installedUnitId ipkg - cname = CLibName - status = TargetBuildable (unitid, cname) TargetRequestedByDefault - target = AvailableTarget (packageId ipkg) cname status False - fake = False - in [(packageId ipkg, cname, fake, target)] - -availableSourceTargets :: ElaboratedConfiguredPackage - -> [(PackageId, ComponentName, Bool, - AvailableTarget (UnitId, ComponentName))] -availableSourceTargets elab = - -- We have a somewhat awkward problem here. We need to know /all/ the - -- components from /all/ the packages because these are the things that - -- users could refer to. Unfortunately, at this stage the elaborated install - -- plan does /not/ contain all components: some components have already - -- been deleted because they cannot possibly be built. This is the case - -- for components that are marked @buildable: False@ in their .cabal files. - -- (It's not unreasonable that the unbuildable components have been pruned - -- as the plan invariant is considerably simpler if all nodes can be built) - -- - -- We can recover the missing components but it's not exactly elegant. For - -- a graph node corresponding to a component we still have the information - -- about the package that it came from, and this includes the names of - -- /all/ the other components in the package. So in principle this lets us - -- find the names of all components, plus full details of the buildable - -- components. - -- - -- Consider for example a package with 3 exe components: foo, bar and baz - -- where foo and bar are buildable, but baz is not. So the plan contains - -- nodes for the components foo and bar. Now we look at each of these two - -- nodes and look at the package they come from and the names of the - -- components in this package. This will give us the names foo, bar and - -- baz, twice (once for each of the two buildable components foo and bar). - -- - -- We refer to these reconstructed missing components as fake targets. - -- It is an invariant that they are not available to be built. - -- - -- To produce the final set of targets we put the fake targets in a finite - -- map (thus eliminating the duplicates) and then we overlay that map with - -- the normal buildable targets. (This is done above in 'availableTargets'.) - -- - [ (packageId elab, cname, fake, target) - | component <- pkgComponents (elabPkgDescription elab) - , let cname = componentName component - status = componentAvailableTargetStatus component - target = AvailableTarget { - availableTargetPackageId = packageId elab, - availableTargetComponentName = cname, - availableTargetStatus = status, - availableTargetLocalToProject = elabLocalToProject elab - } - fake = isFakeTarget cname - - -- TODO: The goal of this test is to exclude "instantiated" - -- packages as available targets. This means that you can't - -- ask for a particular instantiated component to be built; - -- it will only get built by a dependency. Perhaps the - -- correct way to implement this is to run selection - -- prior to instantiating packages. If you refactor - -- this, then you can delete this test. - , elabIsCanonical elab - - -- Filter out some bogus parts of the cross product that are never needed - , case status of - TargetBuildable{} | fake -> False - _ -> True - ] - where - isFakeTarget cname = - case elabPkgOrComp elab of - ElabPackage _ -> False - ElabComponent elabComponent -> compComponentName elabComponent - /= Just cname - - componentAvailableTargetStatus - :: Component -> AvailableTargetStatus (UnitId, ComponentName) - componentAvailableTargetStatus component = - case componentOptionalStanza (componentName component) of - -- it is not an optional stanza, so a library, exe or foreign lib - Nothing - | not buildable -> TargetNotBuildable - | otherwise -> TargetBuildable (elabUnitId elab, cname) - TargetRequestedByDefault - - -- it is not an optional stanza, so a testsuite or benchmark - Just stanza -> - case (Map.lookup stanza (elabStanzasRequested elab), - Set.member stanza (elabStanzasAvailable elab)) of - _ | not withinPlan -> TargetNotLocal - (Just False, _) -> TargetDisabledByUser - (Nothing, False) -> TargetDisabledBySolver - _ | not buildable -> TargetNotBuildable - (Just True, True) -> TargetBuildable (elabUnitId elab, cname) - TargetRequestedByDefault - (Nothing, True) -> TargetBuildable (elabUnitId elab, cname) - TargetNotRequestedByDefault - (Just True, False) -> - error "componentAvailableTargetStatus: impossible" - where - cname = componentName component - buildable = PD.buildable (componentBuildInfo component) - withinPlan = elabLocalToProject elab - || case elabPkgOrComp elab of - ElabComponent elabComponent -> - compComponentName elabComponent == Just cname - ElabPackage _ -> - case componentName component of - CLibName -> True - CExeName _ -> True - --TODO: what about sub-libs and foreign libs? - _ -> False - --- | Merge component targets that overlap each other. Specially when we have --- multiple targets for the same component and one of them refers to the whole --- component (rather than a module or file within) then all the other targets --- for that component are subsumed. --- --- We also allow for information associated with each component target, and --- whenever we targets subsume each other we aggregate their associated info. --- -nubComponentTargets :: [(ComponentTarget, a)] -> [(ComponentTarget, [a])] -nubComponentTargets = - concatMap (wholeComponentOverrides . map snd) - . groupBy ((==) `on` fst) - . sortBy (compare `on` fst) - . map (\t@((ComponentTarget cname _, _)) -> (cname, t)) - . map compatSubComponentTargets - where - -- If we're building the whole component then that the only target all we - -- need, otherwise we can have several targets within the component. - wholeComponentOverrides :: [(ComponentTarget, a )] - -> [(ComponentTarget, [a])] - wholeComponentOverrides ts = - case [ t | (t@(ComponentTarget _ WholeComponent), _) <- ts ] of - (t:_) -> [ (t, map snd ts) ] - [] -> [ (t,[x]) | (t,x) <- ts ] - - -- Not all Cabal Setup.hs versions support sub-component targets, so switch - -- them over to the whole component - compatSubComponentTargets :: (ComponentTarget, a) -> (ComponentTarget, a) - compatSubComponentTargets target@(ComponentTarget cname _subtarget, x) - | not setupHsSupportsSubComponentTargets - = (ComponentTarget cname WholeComponent, x) - | otherwise = target - - -- Actually the reality is that no current version of Cabal's Setup.hs - -- build command actually support building specific files or modules. - setupHsSupportsSubComponentTargets = False - -- TODO: when that changes, adjust this test, e.g. - -- | pkgSetupScriptCliVersion >= Version [x,y] [] - -pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool -pkgHasEphemeralBuildTargets elab = - isJust (elabReplTarget elab) - || (not . null) (elabTestTargets elab) - || (not . null) (elabBenchTargets elab) - || (not . null) (elabHaddockTargets elab) - || (not . null) [ () | ComponentTarget _ subtarget <- elabBuildTargets elab - , subtarget /= WholeComponent ] - --- | The components that we'll build all of, meaning that after they're built --- we can skip building them again (unlike with building just some modules or --- other files within a component). --- -elabBuildTargetWholeComponents :: ElaboratedConfiguredPackage - -> Set ComponentName -elabBuildTargetWholeComponents elab = - Set.fromList - [ cname | ComponentTarget cname WholeComponent <- elabBuildTargets elab ] - - - ------------------------------------------------------------------------------- --- * Install plan pruning ------------------------------------------------------------------------------- - --- | How 'pruneInstallPlanToTargets' should interpret the per-package --- 'ComponentTarget's: as build, repl or haddock targets. --- -data TargetAction = TargetActionBuild - | TargetActionRepl - | TargetActionTest - | TargetActionBench - | TargetActionHaddock - --- | Given a set of per-package\/per-component targets, take the subset of the --- install plan needed to build those targets. Also, update the package config --- to specify which optional stanzas to enable, and which targets within each --- package to build. --- --- NB: Pruning happens after improvement, which is important because we --- will prune differently depending on what is already installed (to --- implement "sticky" test suite enabling behavior). --- -pruneInstallPlanToTargets :: TargetAction - -> Map UnitId [ComponentTarget] - -> ElaboratedInstallPlan -> ElaboratedInstallPlan -pruneInstallPlanToTargets targetActionType perPkgTargetsMap elaboratedPlan = - InstallPlan.new (InstallPlan.planIndepGoals elaboratedPlan) - . Graph.fromDistinctList - -- We have to do the pruning in two passes - . pruneInstallPlanPass2 - . pruneInstallPlanPass1 - -- Set the targets that will be the roots for pruning - . setRootTargets targetActionType perPkgTargetsMap - . InstallPlan.toList - $ elaboratedPlan - --- | This is a temporary data type, where we temporarily --- override the graph dependencies of an 'ElaboratedPackage', --- so we can take a closure over them. We'll throw out the --- overriden dependencies when we're done so it's strictly temporary. --- --- For 'ElaboratedComponent', this the cached unit IDs always --- coincide with the real thing. -data PrunedPackage = PrunedPackage ElaboratedConfiguredPackage [UnitId] - -instance Package PrunedPackage where - packageId (PrunedPackage elab _) = packageId elab - -instance HasUnitId PrunedPackage where - installedUnitId = nodeKey - -instance IsNode PrunedPackage where - type Key PrunedPackage = UnitId - nodeKey (PrunedPackage elab _) = nodeKey elab - nodeNeighbors (PrunedPackage _ deps) = deps - -fromPrunedPackage :: PrunedPackage -> ElaboratedConfiguredPackage -fromPrunedPackage (PrunedPackage elab _) = elab - --- | Set the build targets based on the user targets (but not rev deps yet). --- This is required before we can prune anything. --- -setRootTargets :: TargetAction - -> Map UnitId [ComponentTarget] - -> [ElaboratedPlanPackage] - -> [ElaboratedPlanPackage] -setRootTargets targetAction perPkgTargetsMap = - assert (not (Map.null perPkgTargetsMap)) $ - assert (all (not . null) (Map.elems perPkgTargetsMap)) $ - - map (mapConfiguredPackage setElabBuildTargets) - where - -- Set the targets we'll build for this package/component. This is just - -- based on the root targets from the user, not targets implied by reverse - -- dependencies. Those comes in the second pass once we know the rev deps. - -- - setElabBuildTargets elab = - case (Map.lookup (installedUnitId elab) perPkgTargetsMap, - targetAction) of - (Nothing, _) -> elab - (Just tgts, TargetActionBuild) -> elab { elabBuildTargets = tgts } - (Just tgts, TargetActionTest) -> elab { elabTestTargets = tgts } - (Just tgts, TargetActionBench) -> elab { elabBenchTargets = tgts } - (Just [tgt], TargetActionRepl) -> elab { elabReplTarget = Just tgt - , elabBuildHaddocks = False } - (Just tgts, TargetActionHaddock) -> - foldr setElabHaddockTargets (elab { elabHaddockTargets = tgts - , elabBuildHaddocks = True }) tgts - (Just _, TargetActionRepl) -> - error "pruneInstallPlanToTargets: multiple repl targets" - - setElabHaddockTargets tgt elab - | isTestComponentTarget tgt = elab { elabHaddockTestSuites = True } - | isBenchComponentTarget tgt = elab { elabHaddockBenchmarks = True } - | isForeignLibComponentTarget tgt = elab { elabHaddockForeignLibs = True } - | isExeComponentTarget tgt = elab { elabHaddockExecutables = True } - | isSubLibComponentTarget tgt = elab { elabHaddockInternal = True } - | otherwise = elab - --- | Assuming we have previously set the root build targets (i.e. the user --- targets but not rev deps yet), the first pruning pass does two things: --- --- * A first go at determining which optional stanzas (testsuites, benchmarks) --- are needed. We have a second go in the next pass. --- * Take the dependency closure using pruned dependencies. We prune deps that --- are used only by unneeded optional stanzas. These pruned deps are only --- used for the dependency closure and are not persisted in this pass. --- -pruneInstallPlanPass1 :: [ElaboratedPlanPackage] - -> [ElaboratedPlanPackage] -pruneInstallPlanPass1 pkgs = - map (mapConfiguredPackage fromPrunedPackage) - (fromMaybe [] $ Graph.closure graph roots) - where - pkgs' = map (mapConfiguredPackage prune) pkgs - graph = Graph.fromDistinctList pkgs' - roots = mapMaybe find_root pkgs' - - prune elab = PrunedPackage elab' (pruneOptionalDependencies elab') - where elab' = - setDocumentation - $ addOptionalStanzas elab - - find_root (InstallPlan.Configured (PrunedPackage elab _)) = - if not (null (elabBuildTargets elab) - && null (elabTestTargets elab) - && null (elabBenchTargets elab) - && isNothing (elabReplTarget elab) - && null (elabHaddockTargets elab)) - then Just (installedUnitId elab) - else Nothing - find_root _ = Nothing - - -- Note [Sticky enabled testsuites] - -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -- The testsuite and benchmark targets are somewhat special in that we need - -- to configure the packages with them enabled, and we need to do that even - -- if we only want to build one of several testsuites. - -- - -- There are two cases in which we will enable the testsuites (or - -- benchmarks): if one of the targets is a testsuite, or if all of the - -- testsuite dependencies are already cached in the store. The rationale - -- for the latter is to minimise how often we have to reconfigure due to - -- the particular targets we choose to build. Otherwise choosing to build - -- a testsuite target, and then later choosing to build an exe target - -- would involve unnecessarily reconfiguring the package with testsuites - -- disabled. Technically this introduces a little bit of stateful - -- behaviour to make this "sticky", but it should be benign. - - -- Decide whether or not to enable testsuites and benchmarks. - -- See [Sticky enabled testsuites] - addOptionalStanzas :: ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage - addOptionalStanzas elab@ElaboratedConfiguredPackage{ elabPkgOrComp = ElabPackage pkg } = - elab { - elabPkgOrComp = ElabPackage (pkg { pkgStanzasEnabled = stanzas }) - } - where - stanzas :: Set OptionalStanza - -- By default, we enabled all stanzas requested by the user, - -- as per elabStanzasRequested, done in - -- 'elaborateSolverToPackage' - stanzas = pkgStanzasEnabled pkg - -- optionalStanzasRequiredByTargets has to be done at - -- prune-time because it depends on 'elabTestTargets' - -- et al, which is done by 'setRootTargets' at the - -- beginning of pruning. - <> optionalStanzasRequiredByTargets elab - -- optionalStanzasWithDepsAvailable has to be done at - -- prune-time because it depends on what packages are - -- installed, which is not known until after improvement - -- (pruning is done after improvement) - <> optionalStanzasWithDepsAvailable availablePkgs elab pkg - addOptionalStanzas elab = elab - - setDocumentation :: ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage - setDocumentation elab@ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp } = - elab { - elabBuildHaddocks = - elabBuildHaddocks elab && documentationEnabled (compSolverName comp) elab - } - - where - documentationEnabled c = - case c of - CD.ComponentLib -> const True - CD.ComponentSubLib _ -> elabHaddockInternal - CD.ComponentFLib _ -> elabHaddockForeignLibs - CD.ComponentExe _ -> elabHaddockExecutables - CD.ComponentTest _ -> elabHaddockTestSuites - CD.ComponentBench _ -> elabHaddockBenchmarks - CD.ComponentSetup -> const False - - setDocumentation elab = elab - - -- Calculate package dependencies but cut out those needed only by - -- optional stanzas that we've determined we will not enable. - -- These pruned deps are not persisted in this pass since they're based on - -- the optional stanzas and we'll make further tweaks to the optional - -- stanzas in the next pass. - -- - pruneOptionalDependencies :: ElaboratedConfiguredPackage -> [UnitId] - pruneOptionalDependencies elab@ElaboratedConfiguredPackage{ elabPkgOrComp = ElabComponent _ } - = InstallPlan.depends elab -- no pruning - pruneOptionalDependencies ElaboratedConfiguredPackage{ elabPkgOrComp = ElabPackage pkg } - = (CD.flatDeps . CD.filterDeps keepNeeded) (pkgOrderDependencies pkg) - where - keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas - keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas - keepNeeded _ _ = True - stanzas = pkgStanzasEnabled pkg - - optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage - -> Set OptionalStanza - optionalStanzasRequiredByTargets pkg = - Set.fromList - [ stanza - | ComponentTarget cname _ <- elabBuildTargets pkg - ++ elabTestTargets pkg - ++ elabBenchTargets pkg - ++ maybeToList (elabReplTarget pkg) - ++ elabHaddockTargets pkg - , stanza <- maybeToList (componentOptionalStanza cname) - ] - - availablePkgs = - Set.fromList - [ installedUnitId pkg - | InstallPlan.PreExisting pkg <- pkgs ] - --- | Given a set of already installed packages @availablePkgs@, --- determine the set of available optional stanzas from @pkg@ --- which have all of their dependencies already installed. This is used --- to implement "sticky" testsuites, where once we have installed --- all of the deps needed for the test suite, we go ahead and --- enable it always. -optionalStanzasWithDepsAvailable :: Set UnitId - -> ElaboratedConfiguredPackage - -> ElaboratedPackage - -> Set OptionalStanza -optionalStanzasWithDepsAvailable availablePkgs elab pkg = - Set.fromList - [ stanza - | stanza <- Set.toList (elabStanzasAvailable elab) - , let deps :: [UnitId] - deps = CD.select (optionalStanzaDeps stanza) - -- TODO: probably need to select other - -- dep types too eventually - (pkgOrderDependencies pkg) - , all (`Set.member` availablePkgs) deps - ] - where - optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True - optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True - optionalStanzaDeps _ _ = False - - --- The second pass does three things: --- --- * A second go at deciding which optional stanzas to enable. --- * Prune the dependencies based on the final choice of optional stanzas. --- * Extend the targets within each package to build, now we know the reverse --- dependencies, ie we know which libs are needed as deps by other packages. --- --- Achieving sticky behaviour with enabling\/disabling optional stanzas is --- tricky. The first approximation was handled by the first pass above, but --- it's not quite enough. That pass will enable stanzas if all of the deps --- of the optional stanza are already installed /in the store/. That's important --- but it does not account for dependencies that get built inplace as part of --- the project. We cannot take those inplace build deps into account in the --- pruning pass however because we don't yet know which ones we're going to --- build. Once we do know, we can have another go and enable stanzas that have --- all their deps available. Now we can consider all packages in the pruned --- plan to be available, including ones we already decided to build from --- source. --- --- Deciding which targets to build depends on knowing which packages have --- reverse dependencies (ie are needed). This requires the result of first --- pass, which is another reason we have to split it into two passes. --- --- Note that just because we might enable testsuites or benchmarks (in the --- first or second pass) doesn't mean that we build all (or even any) of them. --- That depends on which targets we picked in the first pass. --- -pruneInstallPlanPass2 :: [ElaboratedPlanPackage] - -> [ElaboratedPlanPackage] -pruneInstallPlanPass2 pkgs = - map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs - where - setStanzasDepsAndTargets elab = - elab { - elabBuildTargets = ordNub - $ elabBuildTargets elab - ++ libTargetsRequiredForRevDeps - ++ exeTargetsRequiredForRevDeps, - elabPkgOrComp = - case elabPkgOrComp elab of - ElabPackage pkg -> - let stanzas = pkgStanzasEnabled pkg - <> optionalStanzasWithDepsAvailable availablePkgs elab pkg - keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas - keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas - keepNeeded _ _ = True - in ElabPackage $ pkg { - pkgStanzasEnabled = stanzas, - pkgLibDependencies = CD.filterDeps keepNeeded (pkgLibDependencies pkg), - pkgExeDependencies = CD.filterDeps keepNeeded (pkgExeDependencies pkg), - pkgExeDependencyPaths = CD.filterDeps keepNeeded (pkgExeDependencyPaths pkg) - } - r@(ElabComponent _) -> r - } - where - libTargetsRequiredForRevDeps = - [ ComponentTarget Cabal.defaultLibName WholeComponent - | installedUnitId elab `Set.member` hasReverseLibDeps - ] - exeTargetsRequiredForRevDeps = - -- TODO: allow requesting executable with different name - -- than package name - [ ComponentTarget (Cabal.CExeName - $ packageNameToUnqualComponentName - $ packageName $ elabPkgSourceId elab) - WholeComponent - | installedUnitId elab `Set.member` hasReverseExeDeps - ] - - - availablePkgs :: Set UnitId - availablePkgs = Set.fromList (map installedUnitId pkgs) - - hasReverseLibDeps :: Set UnitId - hasReverseLibDeps = - Set.fromList [ depid - | InstallPlan.Configured pkg <- pkgs - , depid <- elabOrderLibDependencies pkg ] - - hasReverseExeDeps :: Set UnitId - hasReverseExeDeps = - Set.fromList [ depid - | InstallPlan.Configured pkg <- pkgs - , depid <- elabOrderExeDependencies pkg ] - -mapConfiguredPackage :: (srcpkg -> srcpkg') - -> InstallPlan.GenericPlanPackage ipkg srcpkg - -> InstallPlan.GenericPlanPackage ipkg srcpkg' -mapConfiguredPackage f (InstallPlan.Configured pkg) = - InstallPlan.Configured (f pkg) -mapConfiguredPackage f (InstallPlan.Installed pkg) = - InstallPlan.Installed (f pkg) -mapConfiguredPackage _ (InstallPlan.PreExisting pkg) = - InstallPlan.PreExisting pkg - -componentOptionalStanza :: Cabal.ComponentName -> Maybe OptionalStanza -componentOptionalStanza (Cabal.CTestName _) = Just TestStanzas -componentOptionalStanza (Cabal.CBenchName _) = Just BenchStanzas -componentOptionalStanza _ = Nothing - ------------------------------------- --- Support for --only-dependencies --- - --- | Try to remove the given targets from the install plan. --- --- This is not always possible. --- -pruneInstallPlanToDependencies :: Set UnitId - -> ElaboratedInstallPlan - -> Either CannotPruneDependencies - ElaboratedInstallPlan -pruneInstallPlanToDependencies pkgTargets installPlan = - assert (all (isJust . InstallPlan.lookup installPlan) - (Set.toList pkgTargets)) $ - - fmap (InstallPlan.new (InstallPlan.planIndepGoals installPlan)) - . checkBrokenDeps - . Graph.fromDistinctList - . filter (\pkg -> installedUnitId pkg `Set.notMember` pkgTargets) - . InstallPlan.toList - $ installPlan - where - -- Our strategy is to remove the packages we don't want and then check - -- if the remaining graph is broken or not, ie any packages with dangling - -- dependencies. If there are then we cannot prune the given targets. - checkBrokenDeps :: Graph.Graph ElaboratedPlanPackage - -> Either CannotPruneDependencies - (Graph.Graph ElaboratedPlanPackage) - checkBrokenDeps graph = - case Graph.broken graph of - [] -> Right graph - brokenPackages -> - Left $ CannotPruneDependencies - [ (pkg, missingDeps) - | (pkg, missingDepIds) <- brokenPackages - , let missingDeps = mapMaybe lookupDep missingDepIds - ] - where - -- lookup in the original unpruned graph - lookupDep = InstallPlan.lookup installPlan - --- | It is not always possible to prune to only the dependencies of a set of --- targets. It may be the case that removing a package leaves something else --- that still needed the pruned package. --- --- This lists all the packages that would be broken, and their dependencies --- that would be missing if we did prune. --- -newtype CannotPruneDependencies = - CannotPruneDependencies [(ElaboratedPlanPackage, - [ElaboratedPlanPackage])] - deriving (Show) - - ---------------------------- --- Setup.hs script policy --- - --- Handling for Setup.hs scripts is a bit tricky, part of it lives in the --- solver phase, and part in the elaboration phase. We keep the helper --- functions for both phases together here so at least you can see all of it --- in one place. --- --- There are four major cases for Setup.hs handling: --- --- 1. @build-type@ Custom with a @custom-setup@ section --- 2. @build-type@ Custom without a @custom-setup@ section --- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ --- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ --- --- It's also worth noting that packages specifying @cabal-version: >= 1.23@ --- or later that have @build-type@ Custom will always have a @custom-setup@ --- section. Therefore in case 2, the specified @cabal-version@ will always be --- less than 1.23. --- --- In cases 1 and 2 we obviously have to build an external Setup.hs script, --- while in case 4 we can use the internal library API. In case 3 we also have --- to build an external Setup.hs script because the package needs a later --- Cabal lib version than we can support internally. --- --- data SetupScriptStyle = ... -- see ProjectPlanning.Types - --- | Work out the 'SetupScriptStyle' given the package description. --- -packageSetupScriptStyle :: PD.PackageDescription -> SetupScriptStyle -packageSetupScriptStyle pkg - | buildType == PD.Custom - , Just setupbi <- PD.setupBuildInfo pkg -- does have a custom-setup stanza - , not (PD.defaultSetupDepends setupbi) -- but not one we added internally - = SetupCustomExplicitDeps - - | buildType == PD.Custom - , Just setupbi <- PD.setupBuildInfo pkg -- we get this case post-solver as - , PD.defaultSetupDepends setupbi -- the solver fills in the deps - = SetupCustomImplicitDeps - - | buildType == PD.Custom - , Nothing <- PD.setupBuildInfo pkg -- we get this case pre-solver - = SetupCustomImplicitDeps - - | PD.specVersion pkg > cabalVersion -- one cabal-install is built against - = SetupNonCustomExternalLib - - | otherwise - = SetupNonCustomInternalLib - where - buildType = PD.buildType pkg - - --- | Part of our Setup.hs handling policy is implemented by getting the solver --- to work out setup dependencies for packages. The solver already handles --- packages that explicitly specify setup dependencies, but we can also tell --- the solver to treat other packages as if they had setup dependencies. --- That's what this function does, it gets called by the solver for all --- packages that don't already have setup dependencies. --- --- The dependencies we want to add is different for each 'SetupScriptStyle'. --- --- Note that adding default deps means these deps are actually /added/ to the --- packages that we get out of the solver in the 'SolverInstallPlan'. Making --- implicit setup deps explicit is a problem in the post-solver stages because --- we still need to distinguish the case of explicit and implict setup deps. --- See 'rememberImplicitSetupDeps'. --- --- Note in addition to adding default setup deps, we also use --- 'addSetupCabalMinVersionConstraint' (in 'planPackages') to require --- @Cabal >= 1.20@ for Setup scripts. --- -defaultSetupDeps :: Compiler -> Platform - -> PD.PackageDescription - -> Maybe [Dependency] -defaultSetupDeps compiler platform pkg = - case packageSetupScriptStyle pkg of - - -- For packages with build type custom that do not specify explicit - -- setup dependencies, we add a dependency on Cabal and a number - -- of other packages. - SetupCustomImplicitDeps -> - Just $ - [ Dependency depPkgname anyVersion - | depPkgname <- legacyCustomSetupPkgs compiler platform ] ++ - [ Dependency cabalPkgname cabalConstraint - | packageName pkg /= cabalPkgname ] - where - -- The Cabal dep is slightly special: - -- * We omit the dep for the Cabal lib itself, since it bootstraps. - -- * We constrain it to be < 1.25 - -- - -- Note: we also add a global constraint to require Cabal >= 1.20 - -- for Setup scripts (see use addSetupCabalMinVersionConstraint). - -- - cabalConstraint = orLaterVersion (PD.specVersion pkg) - `intersectVersionRanges` - earlierVersion cabalCompatMaxVer - -- The idea here is that at some point we will make significant - -- breaking changes to the Cabal API that Setup.hs scripts use. - -- So for old custom Setup scripts that do not specify explicit - -- constraints, we constrain them to use a compatible Cabal version. - cabalCompatMaxVer = mkVersion [1,25] - - -- For other build types (like Simple) if we still need to compile an - -- external Setup.hs, it'll be one of the simple ones that only depends - -- on Cabal and base. - SetupNonCustomExternalLib -> - Just [ Dependency cabalPkgname cabalConstraint - , Dependency basePkgname anyVersion ] - where - cabalConstraint = orLaterVersion (PD.specVersion pkg) - - -- The internal setup wrapper method has no deps at all. - SetupNonCustomInternalLib -> Just [] - - -- This case gets ruled out by the caller, planPackages, see the note - -- above in the SetupCustomImplicitDeps case. - SetupCustomExplicitDeps -> - error $ "defaultSetupDeps: called for a package with explicit " - ++ "setup deps: " ++ display (packageId pkg) - - --- | Work out which version of the Cabal spec we will be using to talk to the --- Setup.hs interface for this package. --- --- This depends somewhat on the 'SetupScriptStyle' but most cases are a result --- of what the solver picked for us, based on the explicit setup deps or the --- ones added implicitly by 'defaultSetupDeps'. --- -packageSetupScriptSpecVersion :: SetupScriptStyle - -> PD.PackageDescription - -> Graph.Graph NonSetupLibDepSolverPlanPackage - -> ComponentDeps [SolverId] - -> Version - --- We're going to be using the internal Cabal library, so the spec version of --- that is simply the version of the Cabal library that cabal-install has been --- built with. -packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ _ = - cabalVersion - --- If we happen to be building the Cabal lib itself then because that --- bootstraps itself then we use the version of the lib we're building. -packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _ _ - | packageName pkg == cabalPkgname - = packageVersion pkg - --- In all other cases we have a look at what version of the Cabal lib the --- solver picked. Or if it didn't depend on Cabal at all (which is very rare) --- then we look at the .cabal file to see what spec version it declares. -packageSetupScriptSpecVersion _ pkg libDepGraph deps = - case find ((cabalPkgname ==) . packageName) setupLibDeps of - Just dep -> packageVersion dep - Nothing -> PD.specVersion pkg - where - setupLibDeps = map packageId $ fromMaybe [] $ - Graph.closure libDepGraph (CD.setupDeps deps) - - -cabalPkgname, basePkgname :: PackageName -cabalPkgname = mkPackageName "Cabal" -basePkgname = mkPackageName "base" - - -legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName] -legacyCustomSetupPkgs compiler (Platform _ os) = - map mkPackageName $ - [ "array", "base", "binary", "bytestring", "containers" - , "deepseq", "directory", "filepath", "old-time", "pretty" - , "process", "time", "transformers" ] - ++ [ "Win32" | os == Windows ] - ++ [ "unix" | os /= Windows ] - ++ [ "ghc-prim" | isGHC ] - ++ [ "template-haskell" | isGHC ] - where - isGHC = compilerCompatFlavor GHC compiler - --- The other aspects of our Setup.hs policy lives here where we decide on --- the 'SetupScriptOptions'. --- --- Our current policy for the 'SetupCustomImplicitDeps' case is that we --- try to make the implicit deps cover everything, and we don't allow the --- compiler to pick up other deps. This may or may not be sustainable, and --- we might have to allow the deps to be non-exclusive, but that itself would --- be tricky since we would have to allow the Setup access to all the packages --- in the store and local dbs. - -setupHsScriptOptions :: ElaboratedReadyPackage - -> ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> DistDirLayout - -> FilePath - -> FilePath - -> Bool - -> Lock - -> SetupScriptOptions --- TODO: Fix this so custom is a separate component. Custom can ALWAYS --- be a separate component!!! -setupHsScriptOptions (ReadyPackage elab@ElaboratedConfiguredPackage{..}) - plan ElaboratedSharedConfig{..} distdir srcdir builddir - isParallelBuild cacheLock = - SetupScriptOptions { - useCabalVersion = thisVersion elabSetupScriptCliVersion, - useCabalSpecVersion = Just elabSetupScriptCliVersion, - useCompiler = Just pkgConfigCompiler, - usePlatform = Just pkgConfigPlatform, - usePackageDB = elabSetupPackageDBStack, - usePackageIndex = Nothing, - useDependencies = [ (uid, srcid) - | ConfiguredId srcid (Just CLibName) uid - <- elabSetupDependencies elab ], - useDependenciesExclusive = True, - useVersionMacros = elabSetupScriptStyle == SetupCustomExplicitDeps, - useProgramDb = pkgConfigCompilerProgs, - useDistPref = builddir, - useLoggingHandle = Nothing, -- this gets set later - useWorkingDir = Just srcdir, - useExtraPathEnv = elabExeDependencyPaths elab, - useExtraEnvOverrides = dataDirsEnvironmentForPlan distdir plan, - useWin32CleanHack = False, --TODO: [required eventually] - forceExternalSetupMethod = isParallelBuild, - setupCacheLock = Just cacheLock, - isInteractive = False - } - - --- | To be used for the input for elaborateInstallPlan. --- --- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure. --- -userInstallDirTemplates :: Compiler - -> IO InstallDirs.InstallDirTemplates -userInstallDirTemplates compiler = do - InstallDirs.defaultInstallDirs - (compilerFlavor compiler) - True -- user install - False -- unused - -storePackageInstallDirs :: StoreDirLayout - -> CompilerId - -> InstalledPackageId - -> InstallDirs.InstallDirs FilePath -storePackageInstallDirs StoreDirLayout{ storePackageDirectory - , storeDirectory } - compid ipkgid = - InstallDirs.InstallDirs {..} - where - store = storeDirectory compid - prefix = storePackageDirectory compid (newSimpleUnitId ipkgid) - bindir = prefix "bin" - libdir = prefix "lib" - libsubdir = "" - -- Note: on macOS, we place libraries into - -- @store/lib@ to work around the load - -- command size limit of macOSs mach-o linker. - -- See also @PackageHash.hashedInstalledPackageIdVeryShort@ - dynlibdir | buildOS == OSX = store "lib" - | otherwise = libdir - flibdir = libdir - libexecdir = prefix "libexec" - libexecsubdir= "" - includedir = libdir "include" - datadir = prefix "share" - datasubdir = "" - docdir = datadir "doc" - mandir = datadir "man" - htmldir = docdir "html" - haddockdir = htmldir - sysconfdir = prefix "etc" - - ---TODO: [code cleanup] perhaps reorder this code --- based on the ElaboratedInstallPlan + ElaboratedSharedConfig, --- make the various Setup.hs {configure,build,copy} flags - - -setupHsConfigureFlags :: ElaboratedReadyPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.ConfigFlags -setupHsConfigureFlags (ReadyPackage elab@ElaboratedConfiguredPackage{..}) - sharedConfig@ElaboratedSharedConfig{..} - verbosity builddir = - sanityCheckElaboratedConfiguredPackage sharedConfig elab - (Cabal.ConfigFlags {..}) - where - configArgs = mempty -- unused, passed via args - configDistPref = toFlag builddir - configCabalFilePath = mempty - configVerbosity = toFlag verbosity - - configInstantiateWith = Map.toList elabInstantiatedWith - - configDeterministic = mempty -- doesn't matter, configIPID/configCID overridese - configIPID = case elabPkgOrComp of - ElabPackage pkg -> toFlag (display (pkgInstalledId pkg)) - ElabComponent _ -> mempty - configCID = case elabPkgOrComp of - ElabPackage _ -> mempty - ElabComponent _ -> toFlag elabComponentId - - configProgramPaths = Map.toList elabProgramPaths - configProgramArgs - | {- elabSetupScriptCliVersion < mkVersion [1,24,3] -} True - -- workaround for - -- - -- It turns out, that even with Cabal 2.0, there's still cases such as e.g. - -- custom Setup.hs scripts calling out to GHC even when going via - -- @runProgram ghcProgram@, as e.g. happy does in its - -- - -- (see also ) - -- - -- So for now, let's pass the rather harmless and idempotent - -- `-hide-all-packages` flag to all invocations (which has - -- the benefit that every GHC invocation starts with a - -- conistently well-defined clean slate) until we find a - -- better way. - = Map.toList $ - Map.insertWith (++) "ghc" ["-hide-all-packages"] - elabProgramArgs - | otherwise = Map.toList elabProgramArgs - configProgramPathExtra = toNubList elabProgramPathExtra - configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler) - configHcPath = mempty -- we use configProgramPaths instead - configHcPkg = mempty -- we use configProgramPaths instead - - configVanillaLib = toFlag elabVanillaLib - configSharedLib = toFlag elabSharedLib - configStaticLib = toFlag elabStaticLib - - configDynExe = toFlag elabDynExe - configGHCiLib = toFlag elabGHCiLib - configProfExe = mempty - configProfLib = toFlag elabProfLib - configProf = toFlag elabProfExe - - -- configProfDetail is for exe+lib, but overridden by configProfLibDetail - -- so we specify both so we can specify independently - configProfDetail = toFlag elabProfExeDetail - configProfLibDetail = toFlag elabProfLibDetail - - configCoverage = toFlag elabCoverage - configLibCoverage = mempty - - configOptimization = toFlag elabOptimization - configSplitSections = toFlag elabSplitSections - configSplitObjs = toFlag elabSplitObjs - configStripExes = toFlag elabStripExes - configStripLibs = toFlag elabStripLibs - configDebugInfo = toFlag elabDebugInfo - - configConfigurationsFlags = elabFlagAssignment - configConfigureArgs = elabConfigureScriptArgs - configExtraLibDirs = elabExtraLibDirs - configExtraFrameworkDirs = elabExtraFrameworkDirs - configExtraIncludeDirs = elabExtraIncludeDirs - configProgPrefix = maybe mempty toFlag elabProgPrefix - configProgSuffix = maybe mempty toFlag elabProgSuffix - - configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate) - elabInstallDirs - - -- we only use configDependencies, unless we're talking to an old Cabal - -- in which case we use configConstraints - -- NB: This does NOT use InstallPlan.depends, which includes executable - -- dependencies which should NOT be fed in here (also you don't have - -- enough info anyway) - configDependencies = [ (case mb_cn of - -- Special case for internal libraries - Just (CSubLibName uqn) - | packageId elab == srcid - -> mkPackageName (unUnqualComponentName uqn) - _ -> packageName srcid, - cid) - | ConfiguredId srcid mb_cn cid <- elabLibDependencies elab ] - configConstraints = - case elabPkgOrComp of - ElabPackage _ -> - [ thisPackageVersion srcid - | ConfiguredId srcid _ _uid <- elabLibDependencies elab ] - ElabComponent _ -> [] - - - -- explicitly clear, then our package db stack - -- TODO: [required eventually] have to do this differently for older Cabal versions - configPackageDBs = Nothing : map Just elabBuildPackageDBStack - - configTests = case elabPkgOrComp of - ElabPackage pkg -> toFlag (TestStanzas `Set.member` pkgStanzasEnabled pkg) - ElabComponent _ -> mempty - configBenchmarks = case elabPkgOrComp of - ElabPackage pkg -> toFlag (BenchStanzas `Set.member` pkgStanzasEnabled pkg) - ElabComponent _ -> mempty - - configExactConfiguration = toFlag True - configFlagError = mempty --TODO: [research required] appears not to be implemented - configRelocatable = mempty --TODO: [research required] ??? - configScratchDir = mempty -- never use - configUserInstall = mempty -- don't rely on defaults - configPrograms_ = mempty -- never use, shouldn't exist - configUseResponseFiles = mempty - -setupHsConfigureArgs :: ElaboratedConfiguredPackage - -> [String] -setupHsConfigureArgs (ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage _ }) = [] -setupHsConfigureArgs elab@(ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }) = - [showComponentTarget (packageId elab) (ComponentTarget cname WholeComponent)] - where - cname = fromMaybe (error "setupHsConfigureArgs: trying to configure setup") - (compComponentName comp) - -setupHsBuildFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.BuildFlags -setupHsBuildFlags _ _ verbosity builddir = - Cabal.BuildFlags { - buildProgramPaths = mempty, --unused, set at configure time - buildProgramArgs = mempty, --unused, set at configure time - buildVerbosity = toFlag verbosity, - buildDistPref = toFlag builddir, - buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs), - buildArgs = mempty, -- unused, passed via args not flags - buildCabalFilePath= mempty - } - - -setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String] -setupHsBuildArgs elab@(ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage _ }) - -- Fix for #3335, don't pass build arguments if it's not supported - | elabSetupScriptCliVersion elab >= mkVersion [1,17] - = map (showComponentTarget (packageId elab)) (elabBuildTargets elab) - | otherwise - = [] -setupHsBuildArgs (ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent _ }) - = [] - - -setupHsTestFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.TestFlags -setupHsTestFlags _ _ verbosity builddir = Cabal.TestFlags - { testDistPref = toFlag builddir - , testVerbosity = toFlag verbosity - , testMachineLog = mempty - , testHumanLog = mempty - , testShowDetails = toFlag Cabal.Always - , testKeepTix = mempty - , testOptions = mempty - } - -setupHsTestArgs :: ElaboratedConfiguredPackage -> [String] --- TODO: Does the issue #3335 affects test as well -setupHsTestArgs elab = - mapMaybe (showTestComponentTarget (packageId elab)) (elabTestTargets elab) - - -setupHsBenchFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.BenchmarkFlags -setupHsBenchFlags _ _ verbosity builddir = Cabal.BenchmarkFlags - { benchmarkDistPref = toFlag builddir - , benchmarkVerbosity = toFlag verbosity - , benchmarkOptions = mempty - } - -setupHsBenchArgs :: ElaboratedConfiguredPackage -> [String] -setupHsBenchArgs elab = - mapMaybe (showBenchComponentTarget (packageId elab)) (elabBenchTargets elab) - - -setupHsReplFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.ReplFlags -setupHsReplFlags _ sharedConfig verbosity builddir = - Cabal.ReplFlags { - replProgramPaths = mempty, --unused, set at configure time - replProgramArgs = mempty, --unused, set at configure time - replVerbosity = toFlag verbosity, - replDistPref = toFlag builddir, - replReload = mempty, --only used as callback from repl - replReplOptions = pkgConfigReplOptions sharedConfig --runtime override for repl flags - } - - -setupHsReplArgs :: ElaboratedConfiguredPackage -> [String] -setupHsReplArgs elab = - maybe [] (\t -> [showComponentTarget (packageId elab) t]) (elabReplTarget elab) - --TODO: should be able to give multiple modules in one component - - -setupHsCopyFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> FilePath - -> Cabal.CopyFlags -setupHsCopyFlags _ _ verbosity builddir destdir = - Cabal.CopyFlags { - copyArgs = [], -- TODO: could use this to only copy what we enabled - copyDest = toFlag (InstallDirs.CopyTo destdir), - copyDistPref = toFlag builddir, - copyVerbosity = toFlag verbosity, - copyCabalFilePath = mempty - } - -setupHsRegisterFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> FilePath - -> Cabal.RegisterFlags -setupHsRegisterFlags ElaboratedConfiguredPackage{..} _ - verbosity builddir pkgConfFile = - Cabal.RegisterFlags { - regPackageDB = mempty, -- misfeature - regGenScript = mempty, -- never use - regGenPkgConf = toFlag (Just pkgConfFile), - regInPlace = case elabBuildStyle of - BuildInplaceOnly -> toFlag True - _ -> toFlag False, - regPrintId = mempty, -- never use - regDistPref = toFlag builddir, - regArgs = [], - regVerbosity = toFlag verbosity, - regCabalFilePath = mempty - } - -setupHsHaddockFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.HaddockFlags -setupHsHaddockFlags (ElaboratedConfiguredPackage{..}) _ verbosity builddir = - Cabal.HaddockFlags { - haddockProgramPaths = mempty, --unused, set at configure time - haddockProgramArgs = mempty, --unused, set at configure time - haddockHoogle = toFlag elabHaddockHoogle, - haddockHtml = toFlag elabHaddockHtml, - haddockHtmlLocation = maybe mempty toFlag elabHaddockHtmlLocation, - haddockForHackage = toFlag elabHaddockForHackage, - haddockForeignLibs = toFlag elabHaddockForeignLibs, - haddockExecutables = toFlag elabHaddockExecutables, - haddockTestSuites = toFlag elabHaddockTestSuites, - haddockBenchmarks = toFlag elabHaddockBenchmarks, - haddockInternal = toFlag elabHaddockInternal, - haddockCss = maybe mempty toFlag elabHaddockCss, - haddockLinkedSource = toFlag elabHaddockLinkedSource, - haddockQuickJump = toFlag elabHaddockQuickJump, - haddockHscolourCss = maybe mempty toFlag elabHaddockHscolourCss, - haddockContents = maybe mempty toFlag elabHaddockContents, - haddockDistPref = toFlag builddir, - haddockKeepTempFiles = mempty, --TODO: from build settings - haddockVerbosity = toFlag verbosity, - haddockCabalFilePath = mempty, - haddockArgs = mempty - } - -setupHsHaddockArgs :: ElaboratedConfiguredPackage -> [String] --- TODO: Does the issue #3335 affects test as well -setupHsHaddockArgs elab = - map (showComponentTarget (packageId elab)) (elabHaddockTargets elab) - -{- -setupHsTestFlags :: ElaboratedConfiguredPackage - -> ElaboratedSharedConfig - -> Verbosity - -> FilePath - -> Cabal.TestFlags -setupHsTestFlags _ _ verbosity builddir = - Cabal.TestFlags { - } --} - ------------------------------------------------------------------------------- --- * Sharing installed packages ------------------------------------------------------------------------------- - --- --- Nix style store management for tarball packages --- --- So here's our strategy: --- --- We use a per-user nix-style hashed store, but /only/ for tarball packages. --- So that includes packages from hackage repos (and other http and local --- tarballs). For packages in local directories we do not register them into --- the shared store by default, we just build them locally inplace. --- --- The reason we do it like this is that it's easy to make stable hashes for --- tarball packages, and these packages benefit most from sharing. By contrast --- unpacked dir packages are harder to hash and they tend to change more --- frequently so there's less benefit to sharing them. --- --- When using the nix store approach we have to run the solver *without* --- looking at the packages installed in the store, just at the source packages --- (plus core\/global installed packages). Then we do a post-processing pass --- to replace configured packages in the plan with pre-existing ones, where --- possible. Where possible of course means where the nix-style package hash --- equals one that's already in the store. --- --- One extra wrinkle is that unless we know package tarball hashes upfront, we --- will have to download the tarballs to find their hashes. So we have two --- options: delay replacing source with pre-existing installed packages until --- the point during the execution of the install plan where we have the --- tarball, or try to do as much up-front as possible and then check again --- during plan execution. The former isn't great because we would end up --- telling users we're going to re-install loads of packages when in fact we --- would just share them. It'd be better to give as accurate a prediction as --- we can. The latter is better for users, but we do still have to check --- during plan execution because it's important that we don't replace existing --- installed packages even if they have the same package hash, because we --- don't guarantee ABI stability. - --- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but --- not replace installed packages with ghc-pkg. - -packageHashInputs :: ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> PackageHashInputs -packageHashInputs - pkgshared - elab@(ElaboratedConfiguredPackage { - elabPkgSourceHash = Just srchash - }) = - PackageHashInputs { - pkgHashPkgId = packageId elab, - pkgHashComponent = - case elabPkgOrComp elab of - ElabPackage _ -> Nothing - ElabComponent comp -> Just (compSolverName comp), - pkgHashSourceHash = srchash, - pkgHashPkgConfigDeps = Set.fromList (elabPkgConfigDependencies elab), - pkgHashDirectDeps = - case elabPkgOrComp elab of - ElabPackage (ElaboratedPackage{..}) -> - Set.fromList $ - [ confInstId dep - | dep <- CD.select relevantDeps pkgLibDependencies ] ++ - [ confInstId dep - | dep <- CD.select relevantDeps pkgExeDependencies ] - ElabComponent comp -> - Set.fromList (map confInstId (compLibDependencies comp - ++ compExeDependencies comp)), - pkgHashOtherConfig = packageHashConfigInputs pkgshared elab - } - where - -- Obviously the main deps are relevant - relevantDeps CD.ComponentLib = True - relevantDeps (CD.ComponentSubLib _) = True - relevantDeps (CD.ComponentFLib _) = True - relevantDeps (CD.ComponentExe _) = True - -- Setup deps can affect the Setup.hs behaviour and thus what is built - relevantDeps CD.ComponentSetup = True - -- However testsuites and benchmarks do not get installed and should not - -- affect the result, so we do not include them. - relevantDeps (CD.ComponentTest _) = False - relevantDeps (CD.ComponentBench _) = False - -packageHashInputs _ pkg = - error $ "packageHashInputs: only for packages with source hashes. " - ++ display (packageId pkg) - -packageHashConfigInputs :: ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> PackageHashConfigInputs -packageHashConfigInputs shared@ElaboratedSharedConfig{..} pkg = - PackageHashConfigInputs { - pkgHashCompilerId = compilerId pkgConfigCompiler, - pkgHashPlatform = pkgConfigPlatform, - pkgHashFlagAssignment = elabFlagAssignment, - pkgHashConfigureScriptArgs = elabConfigureScriptArgs, - pkgHashVanillaLib = elabVanillaLib, - pkgHashSharedLib = elabSharedLib, - pkgHashDynExe = elabDynExe, - pkgHashGHCiLib = elabGHCiLib, - pkgHashProfLib = elabProfLib, - pkgHashProfExe = elabProfExe, - pkgHashProfLibDetail = elabProfLibDetail, - pkgHashProfExeDetail = elabProfExeDetail, - pkgHashCoverage = elabCoverage, - pkgHashOptimization = elabOptimization, - pkgHashSplitSections = elabSplitSections, - pkgHashSplitObjs = elabSplitObjs, - pkgHashStripLibs = elabStripLibs, - pkgHashStripExes = elabStripExes, - pkgHashDebugInfo = elabDebugInfo, - pkgHashProgramArgs = elabProgramArgs, - pkgHashExtraLibDirs = elabExtraLibDirs, - pkgHashExtraFrameworkDirs = elabExtraFrameworkDirs, - pkgHashExtraIncludeDirs = elabExtraIncludeDirs, - pkgHashProgPrefix = elabProgPrefix, - pkgHashProgSuffix = elabProgSuffix, - - pkgHashDocumentation = elabBuildHaddocks, - pkgHashHaddockHoogle = elabHaddockHoogle, - pkgHashHaddockHtml = elabHaddockHtml, - pkgHashHaddockHtmlLocation = elabHaddockHtmlLocation, - pkgHashHaddockForeignLibs = elabHaddockForeignLibs, - pkgHashHaddockExecutables = elabHaddockExecutables, - pkgHashHaddockTestSuites = elabHaddockTestSuites, - pkgHashHaddockBenchmarks = elabHaddockBenchmarks, - pkgHashHaddockInternal = elabHaddockInternal, - pkgHashHaddockCss = elabHaddockCss, - pkgHashHaddockLinkedSource = elabHaddockLinkedSource, - pkgHashHaddockQuickJump = elabHaddockQuickJump, - pkgHashHaddockContents = elabHaddockContents - } - where - ElaboratedConfiguredPackage{..} = normaliseConfiguredPackage shared pkg - --- | Given the 'InstalledPackageIndex' for a nix-style package store, and an --- 'ElaboratedInstallPlan', replace configured source packages by installed --- packages from the store whenever they exist. --- -improveInstallPlanWithInstalledPackages :: Set UnitId - -> ElaboratedInstallPlan - -> ElaboratedInstallPlan -improveInstallPlanWithInstalledPackages installedPkgIdSet = - InstallPlan.installed canPackageBeImproved - where - canPackageBeImproved pkg = - installedUnitId pkg `Set.member` installedPkgIdSet - --TODO: sanity checks: - -- * the installed package must have the expected deps etc - -- * the installed package must not be broken, valid dep closure - - --TODO: decide what to do if we encounter broken installed packages, - -- since overwriting is never safe. - - --- Path construction ------- - --- | The path to the directory that contains a specific executable. --- NB: For inplace NOT InstallPaths.bindir installDirs; for an --- inplace build those values are utter nonsense. So we --- have to guess where the directory is going to be. --- Fortunately this is "stable" part of Cabal API. --- But the way we get the build directory is A HORRIBLE --- HACK. -binDirectoryFor - :: DistDirLayout - -> ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> FilePath - -> FilePath -binDirectoryFor layout config package exe = case elabBuildStyle package of - BuildAndInstall -> installedBinDirectory package - BuildInplaceOnly -> inplaceBinRoot layout config package exe - --- package has been built and installed. -installedBinDirectory :: ElaboratedConfiguredPackage -> FilePath -installedBinDirectory = InstallDirs.bindir . elabInstallDirs - --- | The path to the @build@ directory for an inplace build. -inplaceBinRoot - :: DistDirLayout - -> ElaboratedSharedConfig - -> ElaboratedConfiguredPackage - -> FilePath -inplaceBinRoot layout config package - = distBuildDirectory layout (elabDistDirParams config package) - "build" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanOutput.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanOutput.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanOutput.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/ProjectPlanOutput.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,935 +0,0 @@ -{-# LANGUAGE BangPatterns, RecordWildCards, NamedFieldPuns, - DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving, - ScopedTypeVariables #-} - -module Distribution.Client.ProjectPlanOutput ( - -- * Plan output - writePlanExternalRepresentation, - - -- * Project status - -- | Several outputs rely on having a general overview of - PostBuildProjectStatus(..), - updatePostBuildProjectStatus, - createPackageEnvironment, - writePlanGhcEnvironment, - argsEquivalentOfGhcEnvironmentFile, - ) where - -import Distribution.Client.ProjectPlanning.Types -import Distribution.Client.ProjectBuilding.Types -import Distribution.Client.DistDirLayout -import Distribution.Client.Types (Repo(..), RemoteRepo(..), PackageLocation(..), confInstId) -import Distribution.Client.PackageHash (showHashValue) - -import qualified Distribution.Client.InstallPlan as InstallPlan -import qualified Distribution.Client.Utils.Json as J -import qualified Distribution.Simple.InstallDirs as InstallDirs - -import qualified Distribution.Solver.Types.ComponentDeps as ComponentDeps - -import Distribution.Package -import Distribution.System -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import qualified Distribution.PackageDescription as PD -import Distribution.Compiler (CompilerFlavor(GHC, GHCJS)) -import Distribution.Simple.Compiler - ( PackageDBStack, PackageDB(..) - , compilerVersion, compilerFlavor, showCompilerId - , compilerId, CompilerId(..), Compiler ) -import Distribution.Simple.GHC - ( getImplInfo, GhcImplInfo(supportsPkgEnvFiles) - , GhcEnvironmentFileEntry(..), simpleGhcEnvironmentFile - , writeGhcEnvironmentFile ) -import Distribution.Text -import qualified Distribution.Compat.Graph as Graph -import Distribution.Compat.Graph (Graph, Node) -import qualified Distribution.Compat.Binary as Binary -import Distribution.Simple.Utils -import Distribution.Verbosity -import qualified Paths_cabal_install as Our (version) - -import Prelude () -import Distribution.Client.Compat.Prelude - -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set -import qualified Data.ByteString.Lazy as BS -import qualified Data.ByteString.Builder as BB - -import System.FilePath -import System.IO - -import Distribution.Simple.Program.GHC (packageDbArgsDb) - ------------------------------------------------------------------------------ --- Writing plan.json files --- - --- | Write out a representation of the elaborated install plan. --- --- This is for the benefit of debugging and external tools like editors. --- -writePlanExternalRepresentation :: DistDirLayout - -> ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> IO () -writePlanExternalRepresentation distDirLayout elaboratedInstallPlan - elaboratedSharedConfig = - writeFileAtomic (distProjectCacheFile distDirLayout "plan.json") $ - BB.toLazyByteString - . J.encodeToBuilder - $ encodePlanAsJson distDirLayout elaboratedInstallPlan elaboratedSharedConfig - --- | Renders a subset of the elaborated install plan in a semi-stable JSON --- format. --- -encodePlanAsJson :: DistDirLayout -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> J.Value -encodePlanAsJson distDirLayout elaboratedInstallPlan elaboratedSharedConfig = - --TODO: [nice to have] include all of the sharedPackageConfig and all of - -- the parts of the elaboratedInstallPlan - J.object [ "cabal-version" J..= jdisplay Our.version - , "cabal-lib-version" J..= jdisplay cabalVersion - , "compiler-id" J..= (J.String . showCompilerId . pkgConfigCompiler) - elaboratedSharedConfig - , "os" J..= jdisplay os - , "arch" J..= jdisplay arch - , "install-plan" J..= installPlanToJ elaboratedInstallPlan - ] - where - Platform arch os = pkgConfigPlatform elaboratedSharedConfig - - installPlanToJ :: ElaboratedInstallPlan -> [J.Value] - installPlanToJ = map planPackageToJ . InstallPlan.toList - - planPackageToJ :: ElaboratedPlanPackage -> J.Value - planPackageToJ pkg = - case pkg of - InstallPlan.PreExisting ipi -> installedPackageInfoToJ ipi - InstallPlan.Configured elab -> elaboratedPackageToJ False elab - InstallPlan.Installed elab -> elaboratedPackageToJ True elab - -- Note that the plan.json currently only uses the elaborated plan, - -- not the improved plan. So we will not get the Installed state for - -- that case, but the code supports it in case we want to use this - -- later in some use case where we want the status of the build. - - installedPackageInfoToJ :: InstalledPackageInfo -> J.Value - installedPackageInfoToJ ipi = - -- Pre-existing packages lack configuration information such as their flag - -- settings or non-lib components. We only get pre-existing packages for - -- the global/core packages however, so this isn't generally a problem. - -- So these packages are never local to the project. - -- - J.object - [ "type" J..= J.String "pre-existing" - , "id" J..= (jdisplay . installedUnitId) ipi - , "pkg-name" J..= (jdisplay . pkgName . packageId) ipi - , "pkg-version" J..= (jdisplay . pkgVersion . packageId) ipi - , "depends" J..= map jdisplay (installedDepends ipi) - ] - - elaboratedPackageToJ :: Bool -> ElaboratedConfiguredPackage -> J.Value - elaboratedPackageToJ isInstalled elab = - J.object $ - [ "type" J..= J.String (if isInstalled then "installed" - else "configured") - , "id" J..= (jdisplay . installedUnitId) elab - , "pkg-name" J..= (jdisplay . pkgName . packageId) elab - , "pkg-version" J..= (jdisplay . pkgVersion . packageId) elab - , "flags" J..= J.object [ PD.unFlagName fn J..= v - | (fn,v) <- PD.unFlagAssignment (elabFlagAssignment elab) ] - , "style" J..= J.String (style2str (elabLocalToProject elab) (elabBuildStyle elab)) - , "pkg-src" J..= packageLocationToJ (elabPkgSourceLocation elab) - ] ++ - [ "pkg-src-sha256" J..= J.String (showHashValue hash) - | Just hash <- [elabPkgSourceHash elab] ] ++ - (case elabBuildStyle elab of - BuildInplaceOnly -> - ["dist-dir" J..= J.String dist_dir] - BuildAndInstall -> - -- TODO: install dirs? - [] - ) ++ - case elabPkgOrComp elab of - ElabPackage pkg -> - let components = J.object $ - [ comp2str c J..= (J.object $ - [ "depends" J..= map (jdisplay . confInstId) ldeps - , "exe-depends" J..= map (jdisplay . confInstId) edeps ] ++ - bin_file c) - | (c,(ldeps,edeps)) - <- ComponentDeps.toList $ - ComponentDeps.zip (pkgLibDependencies pkg) - (pkgExeDependencies pkg) ] - in ["components" J..= components] - ElabComponent comp -> - ["depends" J..= map (jdisplay . confInstId) (elabLibDependencies elab) - ,"exe-depends" J..= map jdisplay (elabExeDependencies elab) - ,"component-name" J..= J.String (comp2str (compSolverName comp)) - ] ++ - bin_file (compSolverName comp) - where - packageLocationToJ :: PackageLocation (Maybe FilePath) -> J.Value - packageLocationToJ pkgloc = - case pkgloc of - LocalUnpackedPackage local -> - J.object [ "type" J..= J.String "local" - , "path" J..= J.String local - ] - LocalTarballPackage local -> - J.object [ "type" J..= J.String "local-tar" - , "path" J..= J.String local - ] - RemoteTarballPackage uri _ -> - J.object [ "type" J..= J.String "remote-tar" - , "uri" J..= J.String (show uri) - ] - RepoTarballPackage repo _ _ -> - J.object [ "type" J..= J.String "repo-tar" - , "repo" J..= repoToJ repo - ] - RemoteSourceRepoPackage srcRepo _ -> - J.object [ "type" J..= J.String "source-repo" - , "source-repo" J..= sourceRepoToJ srcRepo - ] - - repoToJ :: Repo -> J.Value - repoToJ repo = - case repo of - RepoLocal{..} -> - J.object [ "type" J..= J.String "local-repo" - , "path" J..= J.String repoLocalDir - ] - RepoRemote{..} -> - J.object [ "type" J..= J.String "remote-repo" - , "uri" J..= J.String (show (remoteRepoURI repoRemote)) - ] - RepoSecure{..} -> - J.object [ "type" J..= J.String "secure-repo" - , "uri" J..= J.String (show (remoteRepoURI repoRemote)) - ] - - sourceRepoToJ :: PD.SourceRepo -> J.Value - sourceRepoToJ PD.SourceRepo{..} = - J.object $ filter ((/= J.Null) . snd) $ - [ "type" J..= fmap jdisplay repoType - , "location" J..= fmap J.String repoLocation - , "module" J..= fmap J.String repoModule - , "branch" J..= fmap J.String repoBranch - , "tag" J..= fmap J.String repoTag - , "subdir" J..= fmap J.String repoSubdir - ] - - dist_dir = distBuildDirectory distDirLayout - (elabDistDirParams elaboratedSharedConfig elab) - - bin_file c = case c of - ComponentDeps.ComponentExe s -> bin_file' s - ComponentDeps.ComponentTest s -> bin_file' s - ComponentDeps.ComponentBench s -> bin_file' s - _ -> [] - bin_file' s = - ["bin-file" J..= J.String bin] - where - bin = if elabBuildStyle elab == BuildInplaceOnly - then dist_dir "build" display s display s - else InstallDirs.bindir (elabInstallDirs elab) display s - - -- TODO: maybe move this helper to "ComponentDeps" module? - -- Or maybe define a 'Text' instance? - comp2str :: ComponentDeps.Component -> String - comp2str c = case c of - ComponentDeps.ComponentLib -> "lib" - ComponentDeps.ComponentSubLib s -> "lib:" <> display s - ComponentDeps.ComponentFLib s -> "flib:" <> display s - ComponentDeps.ComponentExe s -> "exe:" <> display s - ComponentDeps.ComponentTest s -> "test:" <> display s - ComponentDeps.ComponentBench s -> "bench:" <> display s - ComponentDeps.ComponentSetup -> "setup" - - style2str :: Bool -> BuildStyle -> String - style2str True _ = "local" - style2str False BuildInplaceOnly = "inplace" - style2str False BuildAndInstall = "global" - - jdisplay :: Text a => a -> J.Value - jdisplay = J.String . display - - ------------------------------------------------------------------------------ --- Project status --- - --- So, what is the status of a project after a build? That is, how do the --- inputs (package source files etc) compare to the output artefacts (build --- libs, exes etc)? Do the outputs reflect the current values of the inputs --- or are outputs out of date or invalid? --- --- First of all, what do we mean by out-of-date and what do we mean by --- invalid? We think of the build system as a morally pure function that --- computes the output artefacts given input values. We say an output artefact --- is out of date when its value is not the value that would be computed by a --- build given the current values of the inputs. An output artefact can be --- out-of-date but still be perfectly usable; it simply correspond to a --- previous state of the inputs. --- --- On the other hand there are cases where output artefacts cannot safely be --- used. For example libraries and dynamically linked executables cannot be --- used when the libs they depend on change without them being recompiled --- themselves. Whether an artefact is still usable depends on what it is, e.g. --- dynamically linked vs statically linked and on how it gets updated (e.g. --- only atomically on success or if failure can leave invalid states). We need --- a definition (or two) that is independent of the kind of artefact and can --- be computed just in terms of changes in package graphs, but are still --- useful for determining when particular kinds of artefacts are invalid. --- --- Note that when we talk about packages in this context we just mean nodes --- in the elaborated install plan, which can be components or packages. --- --- There's obviously a close connection between packages being out of date and --- their output artefacts being unusable: most of the time if a package --- remains out of date at the end of a build then some of its output artefacts --- will be unusable. That is true most of the time because a build will have --- attempted to build one of the out-of-date package's dependencies. If the --- build of the dependency succeeded then it changed output artefacts (like --- libs) and if it failed then it may have failed after already changing --- things (think failure after updating some but not all .hi files). --- --- There are a few reasons we may end up with still-usable output artefacts --- for a package even when it remains out of date at the end of a build. --- Firstly if executing a plan fails then packages can be skipped, and thus we --- may have packages where all their dependencies were skipped. Secondly we --- have artefacts like statically linked executables which are not affected by --- libs they depend on being recompiled. Furthermore, packages can be out of --- date due to changes in build tools or Setup.hs scripts they depend on, but --- again libraries or executables in those out-of-date packages remain usable. --- --- So we have two useful definitions of invalid. Both are useful, for --- different purposes, so we will compute both. The first corresponds to the --- invalid libraries and dynamic executables. We say a package is invalid by --- changed deps if any of the packages it depends on (via library dep edges) --- were rebuilt (successfully or unsuccessfully). The second definition --- corresponds to invalid static executables. We say a package is invalid by --- a failed build simply if the package was built but unsuccessfully. --- --- So how do we find out what packages are out of date or invalid? --- --- Obviously we know something for all the packages that were part of the plan --- that was executed, but that is just a subset since we prune the plan down --- to the targets and their dependencies. --- --- Recall the steps we go though: --- --- + starting with the initial improved plan (this is the full project); --- --- + prune the plan to the user's build targets; --- --- + rebuildTargetsDryRun on the pruned plan giving us a BuildStatusMap --- covering the pruned subset of the original plan; --- --- + execute the plan giving us BuildOutcomes which tell us success/failure --- for each package. --- --- So given that the BuildStatusMap and BuildOutcomes do not cover everything --- in the original plan, what can they tell us about the original plan? --- --- The BuildStatusMap tells us directly that some packages are up to date and --- others out of date (but only for the pruned subset). But we know that --- everything that is a reverse dependency of an out-of-date package is itself --- out-of-date (whether or not it is in the pruned subset). Of course after --- a build the BuildOutcomes may tell us that some of those out-of-date --- packages are now up to date (ie a successful build outcome). --- --- The difference is packages that are reverse dependencies of out-of-date --- packages but are not brought up-to-date by the build (i.e. did not have --- successful outcomes, either because they failed or were not in the pruned --- subset to be built). We also know which packages were rebuilt, so we can --- use this to find the now-invalid packages. --- --- Note that there are still packages for which we cannot discover full status --- information. There may be packages outside of the pruned plan that do not --- depend on packages within the pruned plan that were discovered to be --- out-of-date. For these packages we do not know if their build artefacts --- are out-of-date or not. We do know however that they are not invalid, as --- that's not possible given our definition of invalid. Intuitively it is --- because we have not disturbed anything that these packages depend on, e.g. --- we've not rebuilt any libs they depend on. Recall that our widest --- definition of invalid was only concerned about dependencies on libraries --- (to cover problems like shared libs or GHC seeing inconsistent .hi files). --- --- So our algorithm for out-of-date packages is relatively simple: take the --- reverse dependency closure in the original improved plan (pre-pruning) of --- the out-of-date packages (as determined by the BuildStatusMap from the dry --- run). That gives a set of packages that were definitely out of date after --- the dry run. Now we remove from this set the packages that the --- BuildOutcomes tells us are now up-to-date after the build. The remaining --- set is the out-of-date packages. --- --- As for packages that are invalid by changed deps, we start with the plan --- dependency graph but keep only those edges that point to libraries (so --- ignoring deps on exes and setup scripts). We take the packages for which a --- build was attempted (successfully or unsuccessfully, but not counting --- knock-on failures) and take the reverse dependency closure. We delete from --- this set all the packages that were built successfully. Note that we do not --- need to intersect with the out-of-date packages since this follows --- automatically: all rev deps of packages we attempted to build must have --- been out of date at the start of the build, and if they were not built --- successfully then they're still out of date -- meeting our definition of --- invalid. - - -type PackageIdSet = Set UnitId -type PackagesUpToDate = PackageIdSet - -data PostBuildProjectStatus = PostBuildProjectStatus { - - -- | Packages that are known to be up to date. These were found to be - -- up to date before the build, or they have a successful build outcome - -- afterwards. - -- - -- This does not include any packages outside of the subset of the plan - -- that was executed because we did not check those and so don't know - -- for sure that they're still up to date. - -- - packagesDefinitelyUpToDate :: PackageIdSet, - - -- | Packages that are probably still up to date (and at least not - -- known to be out of date, and certainly not invalid). This includes - -- 'packagesDefinitelyUpToDate' plus packages that were up to date - -- previously and are outside of the subset of the plan that was - -- executed. It excludes 'packagesOutOfDate'. - -- - packagesProbablyUpToDate :: PackageIdSet, - - -- | Packages that are known to be out of date. These are packages - -- that were determined to be out of date before the build, and they - -- do not have a successful build outcome afterwards. - -- - -- Note that this can sometimes include packages outside of the subset - -- of the plan that was executed. For example suppose package A and B - -- depend on C, and A is the target so only A and C are in the subset - -- to be built. Now suppose C is found to have changed, then both A - -- and B are out-of-date before the build and since B is outside the - -- subset to be built then it will remain out of date. - -- - -- Note also that this is /not/ the inverse of - -- 'packagesDefinitelyUpToDate' or 'packagesProbablyUpToDate'. - -- There are packages where we have no information (ones that were not - -- in the subset of the plan that was executed). - -- - packagesOutOfDate :: PackageIdSet, - - -- | Packages that depend on libraries that have changed during the - -- build (either build success or failure). - -- - -- This corresponds to the fact that libraries and dynamic executables - -- are invalid once any of the libs they depend on change. - -- - -- This does include packages that themselves failed (i.e. it is a - -- superset of 'packagesInvalidByFailedBuild'). It does not include - -- changes in dependencies on executables (i.e. build tools). - -- - packagesInvalidByChangedLibDeps :: PackageIdSet, - - -- | Packages that themselves failed during the build (i.e. them - -- directly not a dep). - -- - -- This corresponds to the fact that static executables are invalid - -- in unlucky circumstances such as linking failing half way though, - -- or data file generation failing. - -- - -- This is a subset of 'packagesInvalidByChangedLibDeps'. - -- - packagesInvalidByFailedBuild :: PackageIdSet, - - -- | A subset of the plan graph, including only dependency-on-library - -- edges. That is, dependencies /on/ libraries, not dependencies /of/ - -- libraries. This tells us all the libraries that packages link to. - -- - -- This is here as a convenience, as strictly speaking it's not status - -- as it's just a function of the original 'ElaboratedInstallPlan'. - -- - packagesLibDepGraph :: Graph (Node UnitId ElaboratedPlanPackage), - - -- | As a convenience for 'Set.intersection' with any of the other - -- 'PackageIdSet's to select only packages that are part of the - -- project locally (i.e. with a local source dir). - -- - packagesBuildLocal :: PackageIdSet, - - -- | As a convenience for 'Set.intersection' with any of the other - -- 'PackageIdSet's to select only packages that are being built - -- in-place within the project (i.e. not destined for the store). - -- - packagesBuildInplace :: PackageIdSet, - - -- | As a convenience for 'Set.intersection' or 'Set.difference' with - -- any of the other 'PackageIdSet's to select only packages that were - -- pre-installed or already in the store prior to the build. - -- - packagesAlreadyInStore :: PackageIdSet - } - --- | Work out which packages are out of date or invalid after a build. --- -postBuildProjectStatus :: ElaboratedInstallPlan - -> PackagesUpToDate - -> BuildStatusMap - -> BuildOutcomes - -> PostBuildProjectStatus -postBuildProjectStatus plan previousPackagesUpToDate - pkgBuildStatus buildOutcomes = - PostBuildProjectStatus { - packagesDefinitelyUpToDate, - packagesProbablyUpToDate, - packagesOutOfDate, - packagesInvalidByChangedLibDeps, - packagesInvalidByFailedBuild, - -- convenience stuff - packagesLibDepGraph, - packagesBuildLocal, - packagesBuildInplace, - packagesAlreadyInStore - } - where - packagesDefinitelyUpToDate = - packagesUpToDatePreBuild - `Set.union` - packagesSuccessfulPostBuild - - packagesProbablyUpToDate = - packagesDefinitelyUpToDate - `Set.union` - (previousPackagesUpToDate' `Set.difference` packagesOutOfDatePreBuild) - - packagesOutOfDate = - packagesOutOfDatePreBuild `Set.difference` packagesSuccessfulPostBuild - - packagesInvalidByChangedLibDeps = - packagesDepOnChangedLib `Set.difference` packagesSuccessfulPostBuild - - packagesInvalidByFailedBuild = - packagesFailurePostBuild - - -- Note: if any of the intermediate values below turn out to be useful in - -- their own right then we can simply promote them to the result record - - -- The previous set of up-to-date packages will contain bogus package ids - -- when the solver plan or config contributing to the hash changes. - -- So keep only the ones where the package id (i.e. hash) is the same. - previousPackagesUpToDate' = - Set.intersection - previousPackagesUpToDate - (InstallPlan.keysSet plan) - - packagesUpToDatePreBuild = - Set.filter - (\ipkgid -> not (lookupBuildStatusRequiresBuild True ipkgid)) - -- For packages not in the plan subset we did the dry-run on we don't - -- know anything about their status, so not known to be /up to date/. - (InstallPlan.keysSet plan) - - packagesOutOfDatePreBuild = - Set.fromList . map installedUnitId $ - InstallPlan.reverseDependencyClosure plan - [ ipkgid - | pkg <- InstallPlan.toList plan - , let ipkgid = installedUnitId pkg - , lookupBuildStatusRequiresBuild False ipkgid - -- For packages not in the plan subset we did the dry-run on we don't - -- know anything about their status, so not known to be /out of date/. - ] - - packagesSuccessfulPostBuild = - Set.fromList - [ ikgid | (ikgid, Right _) <- Map.toList buildOutcomes ] - - -- direct failures, not failures due to deps - packagesFailurePostBuild = - Set.fromList - [ ikgid - | (ikgid, Left failure) <- Map.toList buildOutcomes - , case buildFailureReason failure of - DependentFailed _ -> False - _ -> True - ] - - -- Packages that have a library dependency on a package for which a build - -- was attempted - packagesDepOnChangedLib = - Set.fromList . map Graph.nodeKey $ - fromMaybe (error "packagesBuildStatusAfterBuild: broken dep closure") $ - Graph.revClosure packagesLibDepGraph - ( Map.keys - . Map.filter (uncurry buildAttempted) - $ Map.intersectionWith (,) pkgBuildStatus buildOutcomes - ) - - -- The plan graph but only counting dependency-on-library edges - packagesLibDepGraph :: Graph (Node UnitId ElaboratedPlanPackage) - packagesLibDepGraph = - Graph.fromDistinctList - [ Graph.N pkg (installedUnitId pkg) libdeps - | pkg <- InstallPlan.toList plan - , let libdeps = case pkg of - InstallPlan.PreExisting ipkg -> installedDepends ipkg - InstallPlan.Configured srcpkg -> elabLibDeps srcpkg - InstallPlan.Installed srcpkg -> elabLibDeps srcpkg - ] - elabLibDeps = map (newSimpleUnitId . confInstId) . elabLibDependencies - - -- Was a build was attempted for this package? - -- If it doesn't have both a build status and outcome then the answer is no. - buildAttempted :: BuildStatus -> BuildOutcome -> Bool - -- And not if it didn't need rebuilding in the first place. - buildAttempted buildStatus _buildOutcome - | not (buildStatusRequiresBuild buildStatus) - = False - - -- And not if it was skipped due to a dep failing first. - buildAttempted _ (Left BuildFailure {buildFailureReason}) - | DependentFailed _ <- buildFailureReason - = False - - -- Otherwise, succeeded or failed, yes the build was tried. - buildAttempted _ (Left BuildFailure {}) = True - buildAttempted _ (Right _) = True - - lookupBuildStatusRequiresBuild def ipkgid = - case Map.lookup ipkgid pkgBuildStatus of - Nothing -> def -- Not in the plan subset we did the dry-run on - Just buildStatus -> buildStatusRequiresBuild buildStatus - - packagesBuildLocal = - selectPlanPackageIdSet $ \pkg -> - case pkg of - InstallPlan.PreExisting _ -> False - InstallPlan.Installed _ -> False - InstallPlan.Configured srcpkg -> elabLocalToProject srcpkg - - packagesBuildInplace = - selectPlanPackageIdSet $ \pkg -> - case pkg of - InstallPlan.PreExisting _ -> False - InstallPlan.Installed _ -> False - InstallPlan.Configured srcpkg -> elabBuildStyle srcpkg - == BuildInplaceOnly - - packagesAlreadyInStore = - selectPlanPackageIdSet $ \pkg -> - case pkg of - InstallPlan.PreExisting _ -> True - InstallPlan.Installed _ -> True - InstallPlan.Configured _ -> False - - selectPlanPackageIdSet p = Map.keysSet - . Map.filter p - $ InstallPlan.toMap plan - - - -updatePostBuildProjectStatus :: Verbosity - -> DistDirLayout - -> ElaboratedInstallPlan - -> BuildStatusMap - -> BuildOutcomes - -> IO PostBuildProjectStatus -updatePostBuildProjectStatus verbosity distDirLayout - elaboratedInstallPlan - pkgsBuildStatus buildOutcomes = do - - -- Read the previous up-to-date set, update it and write it back - previousUpToDate <- readPackagesUpToDateCacheFile distDirLayout - let currentBuildStatus@PostBuildProjectStatus{..} - = postBuildProjectStatus - elaboratedInstallPlan - previousUpToDate - pkgsBuildStatus - buildOutcomes - let currentUpToDate = packagesProbablyUpToDate - writePackagesUpToDateCacheFile distDirLayout currentUpToDate - - -- Report various possibly interesting things - -- We additionally intersect with the packagesBuildInplace so that - -- we don't show huge numbers of boring packages from the store. - debugNoWrap verbosity $ - "packages definitely up to date: " - ++ displayPackageIdSet (packagesDefinitelyUpToDate - `Set.intersection` packagesBuildInplace) - - debugNoWrap verbosity $ - "packages previously probably up to date: " - ++ displayPackageIdSet (previousUpToDate - `Set.intersection` packagesBuildInplace) - - debugNoWrap verbosity $ - "packages now probably up to date: " - ++ displayPackageIdSet (packagesProbablyUpToDate - `Set.intersection` packagesBuildInplace) - - debugNoWrap verbosity $ - "packages newly up to date: " - ++ displayPackageIdSet (packagesDefinitelyUpToDate - `Set.difference` previousUpToDate - `Set.intersection` packagesBuildInplace) - - debugNoWrap verbosity $ - "packages out to date: " - ++ displayPackageIdSet (packagesOutOfDate - `Set.intersection` packagesBuildInplace) - - debugNoWrap verbosity $ - "packages invalid due to dep change: " - ++ displayPackageIdSet packagesInvalidByChangedLibDeps - - debugNoWrap verbosity $ - "packages invalid due to build failure: " - ++ displayPackageIdSet packagesInvalidByFailedBuild - - return currentBuildStatus - where - displayPackageIdSet = intercalate ", " . map display . Set.toList - --- | Helper for reading the cache file. --- --- This determines the type and format of the binary cache file. --- -readPackagesUpToDateCacheFile :: DistDirLayout -> IO PackagesUpToDate -readPackagesUpToDateCacheFile DistDirLayout{distProjectCacheFile} = - handleDoesNotExist Set.empty $ - handleDecodeFailure $ - withBinaryFile (distProjectCacheFile "up-to-date") ReadMode $ \hnd -> - Binary.decodeOrFailIO =<< BS.hGetContents hnd - where - handleDecodeFailure = fmap (either (const Set.empty) id) - --- | Helper for writing the package up-to-date cache file. --- --- This determines the type and format of the binary cache file. --- -writePackagesUpToDateCacheFile :: DistDirLayout -> PackagesUpToDate -> IO () -writePackagesUpToDateCacheFile DistDirLayout{distProjectCacheFile} upToDate = - writeFileAtomic (distProjectCacheFile "up-to-date") $ - Binary.encode upToDate - --- | Prepare a package environment that includes all the library dependencies --- for a plan. --- --- When running cabal new-exec, we want to set things up so that the compiler --- can find all the right packages (and nothing else). This function is --- intended to do that work. It takes a location where it can write files --- temporarily, in case the compiler wants to learn this information via the --- filesystem, and returns any environment variable overrides the compiler --- needs. -createPackageEnvironment :: Verbosity - -> FilePath - -> ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> PostBuildProjectStatus - -> IO [(String, Maybe String)] -createPackageEnvironment verbosity - path - elaboratedPlan - elaboratedShared - buildStatus - | compilerFlavor (pkgConfigCompiler elaboratedShared) == GHC - = do - envFileM <- writePlanGhcEnvironment - path - elaboratedPlan - elaboratedShared - buildStatus - case envFileM of - Just envFile -> return [("GHC_ENVIRONMENT", Just envFile)] - Nothing -> do - warn verbosity "the configured version of GHC does not support reading package lists from the environment; commands that need the current project's package database are likely to fail" - return [] - | otherwise - = do - warn verbosity "package environment configuration is not supported for the currently configured compiler; commands that need the current project's package database are likely to fail" - return [] - --- Writing .ghc.environment files --- - -writePlanGhcEnvironment :: FilePath - -> ElaboratedInstallPlan - -> ElaboratedSharedConfig - -> PostBuildProjectStatus - -> IO (Maybe FilePath) -writePlanGhcEnvironment path - elaboratedInstallPlan - ElaboratedSharedConfig { - pkgConfigCompiler = compiler, - pkgConfigPlatform = platform - } - postBuildStatus - | compilerFlavor compiler == GHC - , supportsPkgEnvFiles (getImplInfo compiler) - --TODO: check ghcjs compat - = fmap Just $ writeGhcEnvironmentFile - path - platform (compilerVersion compiler) - (renderGhcEnvironmentFile path - elaboratedInstallPlan - postBuildStatus) - --TODO: [required eventually] support for writing user-wide package - -- environments, e.g. like a global project, but we would not put the - -- env file in the home dir, rather it lives under ~/.ghc/ - -writePlanGhcEnvironment _ _ _ _ = return Nothing - -renderGhcEnvironmentFile :: FilePath - -> ElaboratedInstallPlan - -> PostBuildProjectStatus - -> [GhcEnvironmentFileEntry] -renderGhcEnvironmentFile projectRootDir elaboratedInstallPlan - postBuildStatus = - headerComment - : simpleGhcEnvironmentFile packageDBs unitIds - where - headerComment = - GhcEnvFileComment - $ "This is a GHC environment file written by cabal. This means you can\n" - ++ "run ghc or ghci and get the environment of the project as a whole.\n" - ++ "But you still need to use cabal repl $target to get the environment\n" - ++ "of specific components (libs, exes, tests etc) because each one can\n" - ++ "have its own source dirs, cpp flags etc.\n\n" - unitIds = selectGhcEnvironmentFileLibraries postBuildStatus - packageDBs = relativePackageDBPaths projectRootDir $ - selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan - - -argsEquivalentOfGhcEnvironmentFile - :: Compiler - -> DistDirLayout - -> ElaboratedInstallPlan - -> PostBuildProjectStatus - -> [String] -argsEquivalentOfGhcEnvironmentFile compiler = - case compilerId compiler - of CompilerId GHC _ -> argsEquivalentOfGhcEnvironmentFileGhc - CompilerId GHCJS _ -> argsEquivalentOfGhcEnvironmentFileGhc - CompilerId _ _ -> error "Only GHC and GHCJS are supported" - --- TODO remove this when we drop support for non-.ghc.env ghc -argsEquivalentOfGhcEnvironmentFileGhc - :: DistDirLayout - -> ElaboratedInstallPlan - -> PostBuildProjectStatus - -> [String] -argsEquivalentOfGhcEnvironmentFileGhc - distDirLayout - elaboratedInstallPlan - postBuildStatus = - clearPackageDbStackFlag - ++ packageDbArgsDb packageDBs - ++ foldMap packageIdFlag packageIds - where - projectRootDir = distProjectRootDirectory distDirLayout - packageIds = selectGhcEnvironmentFileLibraries postBuildStatus - packageDBs = relativePackageDBPaths projectRootDir $ - selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan - -- TODO use proper flags? but packageDbArgsDb is private - clearPackageDbStackFlag = ["-clear-package-db", "-global-package-db"] - packageIdFlag uid = ["-package-id", display uid] - - --- We're producing an environment for users to use in ghci, so of course --- that means libraries only (can't put exes into the ghc package env!). --- The library environment should be /consistent/ with the environment --- that each of the packages in the project use (ie same lib versions). --- So that means all the normal library dependencies of all the things --- in the project (including deps of exes that are local to the project). --- We do not however want to include the dependencies of Setup.hs scripts, --- since these are generally uninteresting but also they need not in --- general be consistent with the library versions that packages local to --- the project use (recall that Setup.hs script's deps can be picked --- independently of other packages in the project). --- --- So, our strategy is as follows: --- --- produce a dependency graph of all the packages in the install plan, --- but only consider normal library deps as edges in the graph. Thus we --- exclude the dependencies on Setup.hs scripts (in the case of --- per-component granularity) or of Setup.hs scripts (in the case of --- per-package granularity). Then take a dependency closure, using as --- roots all the packages/components local to the project. This will --- exclude Setup scripts and their dependencies. --- --- Note: this algorithm will have to be adapted if/when the install plan --- is extended to cover multiple compilers at once, and may also have to --- change if we start to treat unshared deps of test suites in a similar --- way to how we treat Setup.hs script deps (ie being able to pick them --- independently). --- --- Since we had to use all the local packages, including exes, (as roots --- to find the libs) then those exes still end up in our list so we have --- to filter them out at the end. --- -selectGhcEnvironmentFileLibraries :: PostBuildProjectStatus -> [UnitId] -selectGhcEnvironmentFileLibraries PostBuildProjectStatus{..} = - case Graph.closure packagesLibDepGraph (Set.toList packagesBuildLocal) of - Nothing -> error "renderGhcEnvironmentFile: broken dep closure" - Just nodes -> [ pkgid | Graph.N pkg pkgid _ <- nodes - , hasUpToDateLib pkg ] - where - hasUpToDateLib planpkg = case planpkg of - -- A pre-existing global lib - InstallPlan.PreExisting _ -> True - - -- A package in the store. Check it's a lib. - InstallPlan.Installed pkg -> elabRequiresRegistration pkg - - -- A package we were installing this time, either destined for the store - -- or just locally. Check it's a lib and that it is probably up to date. - InstallPlan.Configured pkg -> - elabRequiresRegistration pkg - && installedUnitId pkg `Set.member` packagesProbablyUpToDate - - -selectGhcEnvironmentFilePackageDbs :: ElaboratedInstallPlan -> PackageDBStack -selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan = - -- If we have any inplace packages then their package db stack is the - -- one we should use since it'll include the store + the local db but - -- it's certainly possible to have no local inplace packages - -- e.g. just "extra" packages coming from the store. - case (inplacePackages, sourcePackages) of - ([], pkgs) -> checkSamePackageDBs pkgs - (pkgs, _) -> checkSamePackageDBs pkgs - where - checkSamePackageDBs pkgs = - case ordNub (map elabBuildPackageDBStack pkgs) of - [packageDbs] -> packageDbs - [] -> [] - _ -> error $ "renderGhcEnvironmentFile: packages with " - ++ "different package db stacks" - -- This should not happen at the moment but will happen as soon - -- as we support projects where we build packages with different - -- compilers, at which point we have to consider how to adapt - -- this feature, e.g. write out multiple env files, one for each - -- compiler / project profile. - - inplacePackages = - [ srcpkg - | srcpkg <- sourcePackages - , elabBuildStyle srcpkg == BuildInplaceOnly ] - sourcePackages = - [ srcpkg - | pkg <- InstallPlan.toList elaboratedInstallPlan - , srcpkg <- maybeToList $ case pkg of - InstallPlan.Configured srcpkg -> Just srcpkg - InstallPlan.Installed srcpkg -> Just srcpkg - InstallPlan.PreExisting _ -> Nothing - ] - -relativePackageDBPaths :: FilePath -> PackageDBStack -> PackageDBStack -relativePackageDBPaths relroot = map (relativePackageDBPath relroot) - -relativePackageDBPath :: FilePath -> PackageDB -> PackageDB -relativePackageDBPath relroot pkgdb = - case pkgdb of - GlobalPackageDB -> GlobalPackageDB - UserPackageDB -> UserPackageDB - SpecificPackageDB path -> SpecificPackageDB relpath - where relpath = makeRelative relroot path diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/RebuildMonad.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/RebuildMonad.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/RebuildMonad.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/RebuildMonad.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,311 +0,0 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving, ScopedTypeVariables, BangPatterns #-} - --- | An abstraction for re-running actions if values or files have changed. --- --- This is not a full-blown make-style incremental build system, it's a bit --- more ad-hoc than that, but it's easier to integrate with existing code. --- --- It's a convenient interface to the "Distribution.Client.FileMonitor" --- functions. --- -module Distribution.Client.RebuildMonad ( - -- * Rebuild monad - Rebuild, - runRebuild, - execRebuild, - askRoot, - - -- * Setting up file monitoring - monitorFiles, - MonitorFilePath, - monitorFile, - monitorFileHashed, - monitorNonExistentFile, - monitorDirectory, - monitorNonExistentDirectory, - monitorDirectoryExistence, - monitorFileOrDirectory, - monitorFileSearchPath, - monitorFileHashedSearchPath, - -- ** Monitoring file globs - monitorFileGlob, - monitorFileGlobExistence, - FilePathGlob(..), - FilePathRoot(..), - FilePathGlobRel(..), - GlobPiece(..), - - -- * Using a file monitor - FileMonitor(..), - newFileMonitor, - rerunIfChanged, - - -- * Utils - delayInitSharedResource, - delayInitSharedResources, - matchFileGlob, - getDirectoryContentsMonitored, - createDirectoryMonitored, - monitorDirectoryStatus, - doesFileExistMonitored, - need, - needIfExists, - findFileWithExtensionMonitored, - findFirstFileMonitored, - findFileMonitored, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.FileMonitor -import Distribution.Client.Glob hiding (matchFileGlob) -import qualified Distribution.Client.Glob as Glob (matchFileGlob) - -import Distribution.Simple.Utils (debug) -import Distribution.Verbosity (Verbosity) - -import qualified Data.Map.Strict as Map -import Control.Monad.State as State -import Control.Monad.Reader as Reader -import Control.Concurrent.MVar (MVar, newMVar, modifyMVar) -import System.FilePath -import System.Directory - - --- | A monad layered on top of 'IO' to help with re-running actions when the --- input files and values they depend on change. The crucial operations are --- 'rerunIfChanged' and 'monitorFiles'. --- -newtype Rebuild a = Rebuild (ReaderT FilePath (StateT [MonitorFilePath] IO) a) - deriving (Functor, Applicative, Monad, MonadIO) - --- | Use this wihin the body action of 'rerunIfChanged' to declare that the --- action depends on the given files. This can be based on what the action --- actually did. It is these files that will be checked for changes next --- time 'rerunIfChanged' is called for that 'FileMonitor'. --- --- Relative paths are interpreted as relative to an implicit root, ultimately --- passed in to 'runRebuild'. --- -monitorFiles :: [MonitorFilePath] -> Rebuild () -monitorFiles filespecs = Rebuild (State.modify (filespecs++)) - --- | Run a 'Rebuild' IO action. -unRebuild :: FilePath -> Rebuild a -> IO (a, [MonitorFilePath]) -unRebuild rootDir (Rebuild action) = runStateT (runReaderT action rootDir) [] - --- | Run a 'Rebuild' IO action. -runRebuild :: FilePath -> Rebuild a -> IO a -runRebuild rootDir (Rebuild action) = evalStateT (runReaderT action rootDir) [] - --- | Run a 'Rebuild' IO action. -execRebuild :: FilePath -> Rebuild a -> IO [MonitorFilePath] -execRebuild rootDir (Rebuild action) = execStateT (runReaderT action rootDir) [] - --- | The root that relative paths are interpreted as being relative to. -askRoot :: Rebuild FilePath -askRoot = Rebuild Reader.ask - --- | This captures the standard use pattern for a 'FileMonitor': given a --- monitor, an action and the input value the action depends on, either --- re-run the action to get its output, or if the value and files the action --- depends on have not changed then return a previously cached action result. --- --- The result is still in the 'Rebuild' monad, so these can be nested. --- --- Do not share 'FileMonitor's between different uses of 'rerunIfChanged'. --- -rerunIfChanged :: (Binary a, Binary b) - => Verbosity - -> FileMonitor a b - -> a - -> Rebuild b - -> Rebuild b -rerunIfChanged verbosity monitor key action = do - rootDir <- askRoot - changed <- liftIO $ checkFileMonitorChanged monitor rootDir key - case changed of - MonitorUnchanged result files -> do - liftIO $ debug verbosity $ "File monitor '" ++ monitorName - ++ "' unchanged." - monitorFiles files - return result - - MonitorChanged reason -> do - liftIO $ debug verbosity $ "File monitor '" ++ monitorName - ++ "' changed: " ++ showReason reason - startTime <- liftIO $ beginUpdateFileMonitor - (result, files) <- liftIO $ unRebuild rootDir action - liftIO $ updateFileMonitor monitor rootDir - (Just startTime) files key result - monitorFiles files - return result - where - monitorName = takeFileName (fileMonitorCacheFile monitor) - - showReason (MonitoredFileChanged file) = "file " ++ file - showReason (MonitoredValueChanged _) = "monitor value changed" - showReason MonitorFirstRun = "first run" - showReason MonitorCorruptCache = "invalid cache file" - - --- | When using 'rerunIfChanged' for each element of a list of actions, it is --- sometimes the case that each action needs to make use of some resource. e.g. --- --- > sequence --- > [ rerunIfChanged verbosity monitor key $ do --- > resource <- mkResource --- > ... -- use the resource --- > | ... ] --- --- For efficiency one would like to share the resource between the actions --- but the straightforward way of doing this means initialising it every time --- even when no actions need re-running. --- --- > resource <- mkResource --- > sequence --- > [ rerunIfChanged verbosity monitor key $ do --- > ... -- use the resource --- > | ... ] --- --- This utility allows one to get the best of both worlds: --- --- > getResource <- delayInitSharedResource mkResource --- > sequence --- > [ rerunIfChanged verbosity monitor key $ do --- > resource <- getResource --- > ... -- use the resource --- > | ... ] --- -delayInitSharedResource :: forall a. IO a -> Rebuild (Rebuild a) -delayInitSharedResource action = do - var <- liftIO (newMVar Nothing) - return (liftIO (getOrInitResource var)) - where - getOrInitResource :: MVar (Maybe a) -> IO a - getOrInitResource var = - modifyMVar var $ \mx -> - case mx of - Just x -> return (Just x, x) - Nothing -> do - x <- action - return (Just x, x) - - --- | Much like 'delayInitSharedResource' but for a keyed set of resources. --- --- > getResource <- delayInitSharedResource mkResource --- > sequence --- > [ rerunIfChanged verbosity monitor key $ do --- > resource <- getResource key --- > ... -- use the resource --- > | ... ] --- -delayInitSharedResources :: forall k v. Ord k - => (k -> IO v) - -> Rebuild (k -> Rebuild v) -delayInitSharedResources action = do - var <- liftIO (newMVar Map.empty) - return (liftIO . getOrInitResource var) - where - getOrInitResource :: MVar (Map k v) -> k -> IO v - getOrInitResource var k = - modifyMVar var $ \m -> - case Map.lookup k m of - Just x -> return (m, x) - Nothing -> do - x <- action k - let !m' = Map.insert k x m - return (m', x) - - --- | Utility to match a file glob against the file system, starting from a --- given root directory. The results are all relative to the given root. --- --- Since this operates in the 'Rebuild' monad, it also monitors the given glob --- for changes. --- -matchFileGlob :: FilePathGlob -> Rebuild [FilePath] -matchFileGlob glob = do - root <- askRoot - monitorFiles [monitorFileGlobExistence glob] - liftIO $ Glob.matchFileGlob root glob - -getDirectoryContentsMonitored :: FilePath -> Rebuild [FilePath] -getDirectoryContentsMonitored dir = do - exists <- monitorDirectoryStatus dir - if exists - then liftIO $ getDirectoryContents dir - else return [] - -createDirectoryMonitored :: Bool -> FilePath -> Rebuild () -createDirectoryMonitored createParents dir = do - monitorFiles [monitorDirectoryExistence dir] - liftIO $ createDirectoryIfMissing createParents dir - --- | Monitor a directory as in 'monitorDirectory' if it currently exists or --- as 'monitorNonExistentDirectory' if it does not. -monitorDirectoryStatus :: FilePath -> Rebuild Bool -monitorDirectoryStatus dir = do - exists <- liftIO $ doesDirectoryExist dir - monitorFiles [if exists - then monitorDirectory dir - else monitorNonExistentDirectory dir] - return exists - --- | Like 'doesFileExist', but in the 'Rebuild' monad. This does --- NOT track the contents of 'FilePath'; use 'need' in that case. -doesFileExistMonitored :: FilePath -> Rebuild Bool -doesFileExistMonitored f = do - root <- askRoot - exists <- liftIO $ doesFileExist (root f) - monitorFiles [if exists - then monitorFileExistence f - else monitorNonExistentFile f] - return exists - --- | Monitor a single file -need :: FilePath -> Rebuild () -need f = monitorFiles [monitorFileHashed f] - --- | Monitor a file if it exists; otherwise check for when it --- gets created. This is a bit better for recompilation avoidance --- because sometimes users give bad package metadata, and we don't --- want to repeatedly rebuild in this case (which we would if we --- need'ed a non-existent file). -needIfExists :: FilePath -> Rebuild () -needIfExists f = do - root <- askRoot - exists <- liftIO $ doesFileExist (root f) - monitorFiles [if exists - then monitorFileHashed f - else monitorNonExistentFile f] - --- | Like 'findFileWithExtension', but in the 'Rebuild' monad. -findFileWithExtensionMonitored - :: [String] - -> [FilePath] - -> FilePath - -> Rebuild (Maybe FilePath) -findFileWithExtensionMonitored extensions searchPath baseName = - findFirstFileMonitored id - [ path baseName <.> ext - | path <- nub searchPath - , ext <- nub extensions ] - --- | Like 'findFirstFile', but in the 'Rebuild' monad. -findFirstFileMonitored :: (a -> FilePath) -> [a] -> Rebuild (Maybe a) -findFirstFileMonitored file = findFirst - where findFirst [] = return Nothing - findFirst (x:xs) = do exists <- doesFileExistMonitored (file x) - if exists - then return (Just x) - else findFirst xs - --- | Like 'findFile', but in the 'Rebuild' monad. -findFileMonitored :: [FilePath] -> FilePath -> Rebuild (Maybe FilePath) -findFileMonitored searchPath fileName = - findFirstFileMonitored id - [ path fileName - | path <- nub searchPath] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Reconfigure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Reconfigure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Reconfigure.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Reconfigure.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,233 +0,0 @@ -module Distribution.Client.Reconfigure ( Check(..), reconfigure ) where - -import Distribution.Client.Compat.Prelude - -import Data.Monoid ( Any(..) ) -import System.Directory ( doesFileExist ) - -import Distribution.Verbosity - -import Distribution.Simple.Configure ( localBuildInfoFile ) -import Distribution.Simple.Setup ( Flag, flagToMaybe, toFlag ) -import Distribution.Simple.Utils - ( existsAndIsMoreRecentThan, defaultPackageDesc, info ) - -import Distribution.Client.Config ( SavedConfig(..) ) -import Distribution.Client.Configure ( readConfigFlags ) -import Distribution.Client.Nix ( findNixExpr, inNixShell, nixInstantiate ) -import Distribution.Client.Sandbox - ( WereDepsReinstalled(..), findSavedDistPref, getSandboxConfigFilePath - , maybeReinstallAddSourceDeps, updateInstallDirs ) -import Distribution.Client.Sandbox.PackageEnvironment - ( userPackageEnvironmentFile ) -import Distribution.Client.Sandbox.Types ( UseSandbox(..) ) -import Distribution.Client.Setup - ( ConfigFlags(..), ConfigExFlags, GlobalFlags(..) - , SkipAddSourceDepsCheck(..) ) - - --- | @Check@ represents a function to check some condition on type @a@. The --- returned 'Any' is 'True' if any part of the condition failed. -newtype Check a = Check { - runCheck :: Any -- Did any previous check fail? - -> a -- value returned by previous checks - -> IO (Any, a) -- Did this check fail? What value is returned? -} - -instance Semigroup (Check a) where - (<>) c d = Check $ \any0 a0 -> do - (any1, a1) <- runCheck c any0 a0 - (any2, a2) <- runCheck d (any0 <> any1) a1 - return (any0 <> any1 <> any2, a2) - -instance Monoid (Check a) where - mempty = Check $ \_ a -> return (mempty, a) - mappend = (<>) - - --- | Re-configure the package in the current directory if needed. Deciding --- when to reconfigure and with which options is convoluted: --- --- If we are reconfiguring, we must always run @configure@ with the --- verbosity option we are given; however, that a previous configuration --- uses a different verbosity setting is not reason enough to reconfigure. --- --- The package should be configured to use the same \"dist\" prefix as --- given to the @build@ command, otherwise the build will probably --- fail. Not only does this determine the \"dist\" prefix setting if we --- need to reconfigure anyway, but an existing configuration should be --- invalidated if its \"dist\" prefix differs. --- --- If the package has never been configured (i.e., there is no --- LocalBuildInfo), we must configure first, using the default options. --- --- If the package has been configured, there will be a 'LocalBuildInfo'. --- If there no package description file, we assume that the --- 'PackageDescription' is up to date, though the configuration may need --- to be updated for other reasons (see above). If there is a package --- description file, and it has been modified since the 'LocalBuildInfo' --- was generated, then we need to reconfigure. --- --- The caller of this function may also have specific requirements --- regarding the flags the last configuration used. For example, --- 'testAction' requires that the package be configured with test suites --- enabled. The caller may pass the required settings to this function --- along with a function to check the validity of the saved 'ConfigFlags'; --- these required settings will be checked first upon determining that --- a previous configuration exists. -reconfigure - :: ((ConfigFlags, ConfigExFlags) -> [String] -> GlobalFlags -> IO ()) - -- ^ configure action - -> Verbosity - -- ^ Verbosity setting - -> FilePath - -- ^ \"dist\" prefix - -> UseSandbox - -> SkipAddSourceDepsCheck - -- ^ Should we skip the timestamp check for modified - -- add-source dependencies? - -> Flag (Maybe Int) - -- ^ -j flag for reinstalling add-source deps. - -> Check (ConfigFlags, ConfigExFlags) - -- ^ Check that the required flags are set. - -- If they are not set, provide a message explaining the - -- reason for reconfiguration. - -> [String] -- ^ Extra arguments - -> GlobalFlags -- ^ Global flags - -> SavedConfig - -> IO SavedConfig -reconfigure - configureAction - verbosity - dist - useSandbox - skipAddSourceDepsCheck - numJobsFlag - check - extraArgs - globalFlags - config - = do - - savedFlags@(_, _) <- readConfigFlags dist - - useNix <- fmap isJust (findNixExpr globalFlags config) - alreadyInNixShell <- inNixShell - - if useNix && not alreadyInNixShell - then do - - -- If we are using Nix, we must reinstantiate the derivation outside - -- the shell. Eventually, the caller will invoke 'nixShell' which will - -- rerun cabal inside the shell. That will bring us back to 'reconfigure', - -- but inside the shell we'll take the second branch, below. - - -- This seems to have a problem: won't 'configureAction' call 'nixShell' - -- yet again, spawning an infinite tree of subprocesses? - -- No, because 'nixShell' doesn't spawn a new process if it is already - -- running in a Nix shell. - - nixInstantiate verbosity dist False globalFlags config - return config - - else do - - let checks = - checkVerb - <> checkDist - <> checkOutdated - <> check - <> checkAddSourceDeps - (Any force, flags@(configFlags, _)) <- runCheck checks mempty savedFlags - - let (_, config') = - updateInstallDirs - (configUserInstall configFlags) - (useSandbox, config) - - when force $ configureAction flags extraArgs globalFlags - return config' - - where - - -- Changing the verbosity does not require reconfiguration, but the new - -- verbosity should be used if reconfiguring. - checkVerb = Check $ \_ (configFlags, configExFlags) -> do - let configFlags' = configFlags { configVerbosity = toFlag verbosity} - return (mempty, (configFlags', configExFlags)) - - -- Reconfiguration is required if @--build-dir@ changes. - checkDist = Check $ \_ (configFlags, configExFlags) -> do - -- Always set the chosen @--build-dir@ before saving the flags, - -- or bad things could happen. - savedDist <- findSavedDistPref config (configDistPref configFlags) - let distChanged = dist /= savedDist - when distChanged $ info verbosity "build directory changed" - let configFlags' = configFlags { configDistPref = toFlag dist } - return (Any distChanged, (configFlags', configExFlags)) - - checkOutdated = Check $ \_ flags@(configFlags, _) -> do - let buildConfig = localBuildInfoFile dist - - -- Has the package ever been configured? If not, reconfiguration is - -- required. - configured <- doesFileExist buildConfig - unless configured $ info verbosity "package has never been configured" - - -- Is the configuration older than the sandbox configuration file? - -- If so, reconfiguration is required. - sandboxConfig <- getSandboxConfigFilePath globalFlags - sandboxConfigNewer <- existsAndIsMoreRecentThan sandboxConfig buildConfig - when sandboxConfigNewer $ - info verbosity "sandbox was created after the package was configured" - - -- Is the @cabal.config@ file newer than @dist/setup.config@? Then we need - -- to force reconfigure. Note that it's possible to use @cabal.config@ - -- even without sandboxes. - userPackageEnvironmentFileModified <- - existsAndIsMoreRecentThan userPackageEnvironmentFile buildConfig - when userPackageEnvironmentFileModified $ - info verbosity ("user package environment file ('" - ++ userPackageEnvironmentFile ++ "') was modified") - - -- Is the configuration older than the package description? - descrFile <- maybe (defaultPackageDesc verbosity) return - (flagToMaybe (configCabalFilePath configFlags)) - outdated <- existsAndIsMoreRecentThan descrFile buildConfig - when outdated $ info verbosity (descrFile ++ " was changed") - - let failed = - Any outdated - <> Any userPackageEnvironmentFileModified - <> Any sandboxConfigNewer - <> Any (not configured) - return (failed, flags) - - checkAddSourceDeps = Check $ \(Any force') flags@(configFlags, _) -> do - let (_, config') = - updateInstallDirs - (configUserInstall configFlags) - (useSandbox, config) - - skipAddSourceDepsCheck' - | force' = SkipAddSourceDepsCheck - | otherwise = skipAddSourceDepsCheck - - when (skipAddSourceDepsCheck' == SkipAddSourceDepsCheck) $ - info verbosity "skipping add-source deps check" - - -- Were any add-source dependencies reinstalled in the sandbox? - depsReinstalled <- - case skipAddSourceDepsCheck' of - DontSkipAddSourceDepsCheck -> - maybeReinstallAddSourceDeps - verbosity numJobsFlag configFlags globalFlags - (useSandbox, config') - SkipAddSourceDepsCheck -> do - return NoDepsReinstalled - - case depsReinstalled of - NoDepsReinstalled -> return (mempty, flags) - ReinstalledSomeDeps -> do - info verbosity "some add-source dependencies were reinstalled" - return (Any True, flags) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Run.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Run.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Run.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Run.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,143 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Run --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Implementation of the 'run' command. ------------------------------------------------------------------------------ - -module Distribution.Client.Run ( run, splitRunArgs ) - where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Types.TargetInfo (targetCLBI) -import Distribution.Types.LocalBuildInfo (componentNameTargets') - -import Distribution.Client.Utils (tryCanonicalizePath) - -import Distribution.Types.UnqualComponentName -import Distribution.PackageDescription (Executable (..), - TestSuite(..), - Benchmark(..), - PackageDescription (..), - BuildInfo(buildable)) -import Distribution.Simple.Compiler (compilerFlavor, CompilerFlavor(..)) -import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) -import Distribution.Simple.BuildPaths (exeExtension) -import Distribution.Simple.LocalBuildInfo (ComponentName (..), - LocalBuildInfo (..), - depLibraryPaths) -import Distribution.Simple.Utils (die', notice, warn, - rawSystemExitWithEnv, - addLibraryPath) -import Distribution.System (Platform (..)) -import Distribution.Verbosity (Verbosity) -import Distribution.Text (display) - -import qualified Distribution.Simple.GHCJS as GHCJS - -import System.Directory (getCurrentDirectory) -import Distribution.Compat.Environment (getEnvironment) -import System.FilePath ((<.>), ()) - - --- | Return the executable to run and any extra arguments that should be --- forwarded to it. Die in case of error. -splitRunArgs :: Verbosity -> LocalBuildInfo -> [String] - -> IO (Executable, [String]) -splitRunArgs verbosity lbi args = - case whichExecutable of -- Either err (wasManuallyChosen, exe, paramsRest) - Left err -> do - warn verbosity `traverse_` maybeWarning -- If there is a warning, print it. - die' verbosity err - Right (True, exe, xs) -> return (exe, xs) - Right (False, exe, xs) -> do - let addition = " Interpreting all parameters to `run` as a parameter to" - ++ " the default executable." - -- If there is a warning, print it together with the addition. - warn verbosity `traverse_` fmap (++addition) maybeWarning - return (exe, xs) - where - pkg_descr = localPkgDescr lbi - whichExecutable :: Either String -- Error string. - ( Bool -- If it was manually chosen. - , Executable -- The executable. - , [String] -- The remaining parameters. - ) - whichExecutable = case (enabledExes, args) of - ([] , _) -> Left "Couldn't find any enabled executables." - ([exe], []) -> return (False, exe, []) - ([exe], (x:xs)) - | x == unUnqualComponentName (exeName exe) -> return (True, exe, xs) - | otherwise -> return (False, exe, args) - (_ , []) -> Left - $ "This package contains multiple executables. " - ++ "You must pass the executable name as the first argument " - ++ "to 'cabal run'." - (_ , (x:xs)) -> - case find (\exe -> unUnqualComponentName (exeName exe) == x) enabledExes of - Nothing -> Left $ "No executable named '" ++ x ++ "'." - Just exe -> return (True, exe, xs) - where - enabledExes = filter (buildable . buildInfo) (executables pkg_descr) - - maybeWarning :: Maybe String - maybeWarning = case args of - [] -> Nothing - (x:_) -> lookup (mkUnqualComponentName x) components - where - components :: [(UnqualComponentName, String)] -- Component name, message. - components = - [ (name, "The executable '" ++ display name ++ "' is disabled.") - | e <- executables pkg_descr - , not . buildable . buildInfo $ e, let name = exeName e] - - ++ [ (name, "There is a test-suite '" ++ display name ++ "'," - ++ " but the `run` command is only for executables.") - | t <- testSuites pkg_descr - , let name = testName t] - - ++ [ (name, "There is a benchmark '" ++ display name ++ "'," - ++ " but the `run` command is only for executables.") - | b <- benchmarks pkg_descr - , let name = benchmarkName b] - --- | Run a given executable. -run :: Verbosity -> LocalBuildInfo -> Executable -> [String] -> IO () -run verbosity lbi exe exeArgs = do - curDir <- getCurrentDirectory - let buildPref = buildDir lbi - pkg_descr = localPkgDescr lbi - dataDirEnvVar = (pkgPathEnvVar pkg_descr "datadir", - curDir dataDir pkg_descr) - - (path, runArgs) <- - let exeName' = display $ exeName exe - in case compilerFlavor (compiler lbi) of - GHCJS -> do - let (script, cmd, cmdArgs) = - GHCJS.runCmd (withPrograms lbi) - (buildPref exeName' exeName') - script' <- tryCanonicalizePath script - return (cmd, cmdArgs ++ [script']) - _ -> do - p <- tryCanonicalizePath $ - buildPref exeName' (exeName' <.> exeExtension (hostPlatform lbi)) - return (p, []) - - env <- (dataDirEnvVar:) <$> getEnvironment - -- Add (DY)LD_LIBRARY_PATH if needed - env' <- if withDynExe lbi - then do let (Platform _ os) = hostPlatform lbi - clbi <- case componentNameTargets' pkg_descr lbi (CExeName (exeName exe)) of - [target] -> return (targetCLBI target) - [] -> die' verbosity "run: Could not find executable in LocalBuildInfo" - _ -> die' verbosity "run: Found multiple matching exes in LocalBuildInfo" - paths <- depLibraryPaths True False lbi clbi - return (addLibraryPath os paths env) - else return env - notice verbosity $ "Running " ++ display (exeName exe) ++ "..." - rawSystemExitWithEnv verbosity path (runArgs++exeArgs) env' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Index.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Index.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Index.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Index.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,285 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox.Index --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Querying and modifying local build tree references in the package index. ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox.Index ( - createEmpty, - addBuildTreeRefs, - removeBuildTreeRefs, - ListIgnoredBuildTreeRefs(..), RefTypesToList(..), - DeleteSourceError(..), - listBuildTreeRefs, - validateIndexPath, - - defaultIndexFileName - ) where - -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Codec.Archive.Tar.Index as Tar -import qualified Distribution.Client.Tar as Tar -import Distribution.Client.IndexUtils ( BuildTreeRefType(..) - , refTypeFromTypeCode - , typeCodeFromRefType - , updatePackageIndexCacheFile - , readCacheStrict - , Index(..) ) -import qualified Distribution.Client.IndexUtils as IndexUtils -import Distribution.Client.Utils ( byteStringToFilePath, filePathToByteString - , makeAbsoluteToCwd, tryCanonicalizePath - , tryFindAddSourcePackageDesc ) - -import Distribution.Simple.Utils ( die', debug ) -import Distribution.Compat.Exception ( tryIO ) -import Distribution.Verbosity ( Verbosity ) - -import qualified Data.ByteString.Lazy as BS -import Control.DeepSeq ( NFData(rnf) ) -import Control.Exception ( evaluate, throw, Exception ) -import Control.Monad ( liftM, unless ) -import Control.Monad.Writer.Lazy (WriterT(..), runWriterT, tell) -import Data.List ( (\\), intersect, nub, find ) -import Data.Maybe ( catMaybes ) -import Data.Either (partitionEithers) -import System.Directory ( createDirectoryIfMissing, - doesDirectoryExist, doesFileExist, - renameFile, canonicalizePath) -import System.FilePath ( (), (<.>), takeDirectory, takeExtension ) -import System.IO ( IOMode(..), withBinaryFile ) - --- | A reference to a local build tree. -data BuildTreeRef = BuildTreeRef { - buildTreeRefType :: !BuildTreeRefType, - buildTreePath :: !FilePath - } - -instance NFData BuildTreeRef where - rnf (BuildTreeRef _ fp) = rnf fp - -defaultIndexFileName :: FilePath -defaultIndexFileName = "00-index.tar" - --- | Given a path, ensure that it refers to a local build tree. -buildTreeRefFromPath :: Verbosity -> BuildTreeRefType -> FilePath -> IO (Maybe BuildTreeRef) -buildTreeRefFromPath verbosity refType dir = do - dirExists <- doesDirectoryExist dir - unless dirExists $ - die' verbosity $ "directory '" ++ dir ++ "' does not exist" - _ <- tryFindAddSourcePackageDesc verbosity dir "Error adding source reference." - return . Just $ BuildTreeRef refType dir - --- | Given a tar archive entry, try to parse it as a local build tree reference. -readBuildTreeRef :: Tar.Entry -> Maybe BuildTreeRef -readBuildTreeRef entry = case Tar.entryContent entry of - (Tar.OtherEntryType typeCode bs size) - | (Tar.isBuildTreeRefTypeCode typeCode) - && (size == BS.length bs) -> Just $! BuildTreeRef - (refTypeFromTypeCode typeCode) - (byteStringToFilePath bs) - | otherwise -> Nothing - _ -> Nothing - --- | Given a sequence of tar archive entries, extract all references to local --- build trees. -readBuildTreeRefs :: Exception e => Tar.Entries e -> [BuildTreeRef] -readBuildTreeRefs = - catMaybes - . Tar.foldEntries (\e r -> readBuildTreeRef e : r) - [] throw - --- | Given a path to a tar archive, extract all references to local build trees. -readBuildTreeRefsFromFile :: FilePath -> IO [BuildTreeRef] -readBuildTreeRefsFromFile = liftM (readBuildTreeRefs . Tar.read) . BS.readFile - --- | Read build tree references from an index cache -readBuildTreeRefsFromCache :: Verbosity -> FilePath -> IO [BuildTreeRef] -readBuildTreeRefsFromCache verbosity indexPath = do - (mRefs, _prefs) <- readCacheStrict verbosity (SandboxIndex indexPath) buildTreeRef - return (catMaybes mRefs) - where - buildTreeRef pkgEntry = - case pkgEntry of - IndexUtils.NormalPackage _ _ _ _ -> Nothing - IndexUtils.BuildTreeRef typ _ _ path _ -> Just $ BuildTreeRef typ path - --- | Given a local build tree ref, serialise it to a tar archive entry. -writeBuildTreeRef :: BuildTreeRef -> Tar.Entry -writeBuildTreeRef (BuildTreeRef refType path) = Tar.simpleEntry tarPath content - where - bs = filePathToByteString path - -- Provide a filename for tools that treat custom entries as ordinary files. - tarPath' = "local-build-tree-reference" - -- fromRight can't fail because the path is shorter than 255 characters. - tarPath = fromRight $ Tar.toTarPath True tarPath' - content = Tar.OtherEntryType (typeCodeFromRefType refType) bs (BS.length bs) - - -- TODO: Move this to D.C.Utils? - fromRight (Left err) = error err - fromRight (Right a) = a - --- | Check that the provided path is either an existing directory, or a tar --- archive in an existing directory. -validateIndexPath :: Verbosity -> FilePath -> IO FilePath -validateIndexPath verbosity path' = do - path <- makeAbsoluteToCwd path' - if (== ".tar") . takeExtension $ path - then return path - else do dirExists <- doesDirectoryExist path - unless dirExists $ - die' verbosity $ "directory does not exist: '" ++ path ++ "'" - return $ path defaultIndexFileName - --- | Create an empty index file. -createEmpty :: Verbosity -> FilePath -> IO () -createEmpty verbosity path = do - indexExists <- doesFileExist path - if indexExists - then debug verbosity $ "Package index already exists: " ++ path - else do - debug verbosity $ "Creating the index file '" ++ path ++ "'" - createDirectoryIfMissing True (takeDirectory path) - -- Equivalent to 'tar cvf empty.tar --files-from /dev/null'. - let zeros = BS.replicate (512*20) 0 - BS.writeFile path zeros - --- | Add given local build tree references to the index. -addBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] -> BuildTreeRefType - -> IO () -addBuildTreeRefs _ _ [] _ = - error "Distribution.Client.Sandbox.Index.addBuildTreeRefs: unexpected" -addBuildTreeRefs verbosity path l' refType = do - checkIndexExists verbosity path - l <- liftM nub . mapM tryCanonicalizePath $ l' - treesInIndex <- fmap (map buildTreePath) (readBuildTreeRefsFromFile path) - -- Add only those paths that aren't already in the index. - treesToAdd <- mapM (buildTreeRefFromPath verbosity refType) (l \\ treesInIndex) - let entries = map writeBuildTreeRef (catMaybes treesToAdd) - unless (null entries) $ do - withBinaryFile path ReadWriteMode $ \h -> do - block <- Tar.hSeekEndEntryOffset h Nothing - debug verbosity $ "Writing at tar block: " ++ show block - BS.hPut h (Tar.write entries) - debug verbosity $ "Successfully appended to '" ++ path ++ "'" - updatePackageIndexCacheFile verbosity $ SandboxIndex path - -data DeleteSourceError = ErrNonregisteredSource { nrPath :: FilePath } - | ErrNonexistentSource { nePath :: FilePath } deriving Show - --- | Remove given local build tree references from the index. --- --- Returns a tuple with either removed build tree refs or errors and a function --- that converts from a provided build tree ref to corresponding full directory path. -removeBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] - -> IO ([Either DeleteSourceError FilePath], - (FilePath -> FilePath)) -removeBuildTreeRefs _ _ [] = - error "Distribution.Client.Sandbox.Index.removeBuildTreeRefs: unexpected" -removeBuildTreeRefs verbosity indexPath l = do - checkIndexExists verbosity indexPath - let tmpFile = indexPath <.> "tmp" - - canonRes <- mapM (\btr -> do res <- tryIO $ canonicalizePath btr - return $ case res of - Right pth -> Right (btr, pth) - Left _ -> Left $ ErrNonexistentSource btr) l - let (failures, convDict) = partitionEithers canonRes - allRefs = fmap snd convDict - - -- Performance note: on my system, it takes 'index --remove-source' - -- approx. 3,5s to filter a 65M file. Real-life indices are expected to be - -- much smaller. - removedRefs <- doRemove convDict tmpFile - - renameFile tmpFile indexPath - debug verbosity $ "Successfully renamed '" ++ tmpFile - ++ "' to '" ++ indexPath ++ "'" - - unless (null removedRefs) $ - updatePackageIndexCacheFile verbosity $ SandboxIndex indexPath - - let results = fmap Right removedRefs - ++ fmap Left failures - ++ fmap (Left . ErrNonregisteredSource) - (fmap (convertWith convDict) (allRefs \\ removedRefs)) - - return (results, convertWith convDict) - - where - doRemove :: [(FilePath, FilePath)] -> FilePath -> IO [FilePath] - doRemove srcRefs tmpFile = do - (newIdx, changedPaths) <- - Tar.read `fmap` BS.readFile indexPath - >>= runWriterT . Tar.filterEntriesM (p $ fmap snd srcRefs) - BS.writeFile tmpFile . Tar.write . Tar.entriesToList $ newIdx - return changedPaths - - p :: [FilePath] -> Tar.Entry -> WriterT [FilePath] IO Bool - p refs entry = case readBuildTreeRef entry of - Nothing -> return True - -- FIXME: removing snapshot deps is done with `delete-source - -- .cabal-sandbox/snapshots/$SNAPSHOT_NAME`. Perhaps we also want to - -- support removing snapshots by providing the original path. - (Just (BuildTreeRef _ pth)) -> if pth `elem` refs - then tell [pth] >> return False - else return True - - convertWith dict pth = maybe pth fst $ find ((==pth) . snd) dict - --- | A build tree ref can become ignored if the user later adds a build tree ref --- with the same package ID. We display ignored build tree refs when the user --- runs 'cabal sandbox list-sources', but do not look at their timestamps in --- 'reinstallAddSourceDeps'. -data ListIgnoredBuildTreeRefs = ListIgnored | DontListIgnored - --- | Which types of build tree refs should be listed? -data RefTypesToList = OnlySnapshots | OnlyLinks | LinksAndSnapshots - --- | List the local build trees that are referred to from the index. -listBuildTreeRefs :: Verbosity -> ListIgnoredBuildTreeRefs -> RefTypesToList - -> FilePath - -> IO [FilePath] -listBuildTreeRefs verbosity listIgnored refTypesToList path = do - checkIndexExists verbosity path - buildTreeRefs <- - case listIgnored of - DontListIgnored -> do - paths <- listWithoutIgnored - case refTypesToList of - LinksAndSnapshots -> return paths - _ -> do - allPathsFiltered <- fmap (map buildTreePath . filter predicate) - listWithIgnored - _ <- evaluate (length allPathsFiltered) - return (paths `intersect` allPathsFiltered) - - ListIgnored -> fmap (map buildTreePath . filter predicate) listWithIgnored - - _ <- evaluate (length buildTreeRefs) - return buildTreeRefs - - where - predicate :: BuildTreeRef -> Bool - predicate = case refTypesToList of - OnlySnapshots -> (==) SnapshotRef . buildTreeRefType - OnlyLinks -> (==) LinkRef . buildTreeRefType - LinksAndSnapshots -> const True - - listWithIgnored :: IO [BuildTreeRef] - listWithIgnored = readBuildTreeRefsFromFile path - - listWithoutIgnored :: IO [FilePath] - listWithoutIgnored = fmap (map buildTreePath) - $ readBuildTreeRefsFromCache verbosity path - - --- | Check that the package index file exists and exit with error if it does not. -checkIndexExists :: Verbosity -> FilePath -> IO () -checkIndexExists verbosity path = do - indexExists <- doesFileExist path - unless indexExists $ - die' verbosity $ "index does not exist: '" ++ path ++ "'" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/PackageEnvironment.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/PackageEnvironment.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/PackageEnvironment.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/PackageEnvironment.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,573 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox.PackageEnvironment --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Utilities for working with the package environment file. Patterned after --- Distribution.Client.Config. ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox.PackageEnvironment ( - PackageEnvironment(..) - , PackageEnvironmentType(..) - , classifyPackageEnvironment - , createPackageEnvironmentFile - , tryLoadSandboxPackageEnvironmentFile - , readPackageEnvironmentFile - , showPackageEnvironment - , showPackageEnvironmentWithComments - , setPackageDB - , sandboxPackageDBPath - , loadUserConfig - - , basePackageEnvironment - , initialPackageEnvironment - , commentPackageEnvironment - , sandboxPackageEnvironmentFile - , userPackageEnvironmentFile - ) where - -import Distribution.Client.Config ( SavedConfig(..), commentSavedConfig - , loadConfig, configFieldDescriptions - , haddockFlagsFields - , installDirsFields, withProgramsFields - , withProgramOptionsFields - , defaultCompiler ) -import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) -import Distribution.Client.Setup ( GlobalFlags(..), ConfigExFlags(..) - , InstallFlags(..) - , defaultSandboxLocation ) -import Distribution.Client.Targets ( userConstraintPackageName ) -import Distribution.Utils.NubList ( toNubList ) -import Distribution.Simple.Compiler ( Compiler, PackageDB(..) - , compilerFlavor, showCompilerIdWithAbi ) -import Distribution.Simple.InstallDirs ( InstallDirs(..), PathTemplate - , defaultInstallDirs, combineInstallDirs - , fromPathTemplate, toPathTemplate ) -import Distribution.Simple.Setup ( Flag(..) - , ConfigFlags(..), HaddockFlags(..) - , fromFlagOrDefault, toFlag, flagToMaybe ) -import Distribution.Simple.Utils ( die', info, notice, warn, debug ) -import Distribution.Solver.Types.ConstraintSource -import Distribution.ParseUtils ( FieldDescr(..), ParseResult(..) - , commaListField, commaNewLineListField - , liftField, lineNo, locatedErrorMsg - , parseFilePathQ, readFields - , showPWarning, simpleField - , syntaxError, warning ) -import Distribution.System ( Platform ) -import Distribution.Verbosity ( Verbosity, normal ) -import Control.Monad ( foldM, liftM2, unless ) -import Data.List ( partition, sortBy ) -import Data.Maybe ( isJust ) -import Data.Ord ( comparing ) -import Distribution.Compat.Exception ( catchIO ) -import Distribution.Compat.Semigroup -import System.Directory ( doesDirectoryExist, doesFileExist - , renameFile ) -import System.FilePath ( (<.>), (), takeDirectory ) -import System.IO.Error ( isDoesNotExistError ) -import Text.PrettyPrint ( ($+$) ) - -import qualified Text.PrettyPrint as Disp -import qualified Distribution.Compat.ReadP as Parse -import qualified Distribution.ParseUtils as ParseUtils ( Field(..) ) -import qualified Distribution.Text as Text -import GHC.Generics ( Generic ) - - --- --- * Configuration saved in the package environment file --- - --- TODO: would be nice to remove duplication between --- D.C.Sandbox.PackageEnvironment and D.C.Config. -data PackageEnvironment = PackageEnvironment { - -- The 'inherit' feature is not used ATM, but could be useful in the future - -- for constructing nested sandboxes (see discussion in #1196). - pkgEnvInherit :: Flag FilePath, - pkgEnvSavedConfig :: SavedConfig -} deriving Generic - -instance Monoid PackageEnvironment where - mempty = gmempty - mappend = (<>) - -instance Semigroup PackageEnvironment where - (<>) = gmappend - --- | The automatically-created package environment file that should not be --- touched by the user. -sandboxPackageEnvironmentFile :: FilePath -sandboxPackageEnvironmentFile = "cabal.sandbox.config" - --- | Optional package environment file that can be used to customize the default --- settings. Created by the user. -userPackageEnvironmentFile :: FilePath -userPackageEnvironmentFile = "cabal.config" - --- | Type of the current package environment. -data PackageEnvironmentType = - SandboxPackageEnvironment -- ^ './cabal.sandbox.config' - | UserPackageEnvironment -- ^ './cabal.config' - | AmbientPackageEnvironment -- ^ '~/.cabal/config' - --- | Is there a 'cabal.sandbox.config' or 'cabal.config' in this --- directory? -classifyPackageEnvironment :: FilePath -> Flag FilePath -> Flag Bool - -> IO PackageEnvironmentType -classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag ignoreSandboxFlag = - do isSandbox <- liftM2 (||) (return forceSandboxConfig) - (configExists sandboxPackageEnvironmentFile) - isUser <- configExists userPackageEnvironmentFile - return (classify isSandbox isUser) - where - configExists fname = doesFileExist (pkgEnvDir fname) - ignoreSandbox = fromFlagOrDefault False ignoreSandboxFlag - forceSandboxConfig = isJust . flagToMaybe $ sandboxConfigFileFlag - - classify :: Bool -> Bool -> PackageEnvironmentType - classify True _ - | not ignoreSandbox = SandboxPackageEnvironment - classify _ True = UserPackageEnvironment - classify _ False = AmbientPackageEnvironment - --- | Defaults common to 'initialPackageEnvironment' and --- 'commentPackageEnvironment'. -commonPackageEnvironmentConfig :: FilePath -> SavedConfig -commonPackageEnvironmentConfig sandboxDir = - mempty { - savedConfigureFlags = mempty { - -- TODO: Currently, we follow cabal-dev and set 'user-install: False' in - -- the config file. In the future we may want to distinguish between - -- global, sandbox and user install types. - configUserInstall = toFlag False, - configInstallDirs = installDirs - }, - savedUserInstallDirs = installDirs, - savedGlobalInstallDirs = installDirs, - savedGlobalFlags = mempty { - globalLogsDir = toFlag $ sandboxDir "logs", - -- Is this right? cabal-dev uses the global world file. - globalWorldFile = toFlag $ sandboxDir "world" - } - } - where - installDirs = sandboxInstallDirs sandboxDir - --- | 'commonPackageEnvironmentConfig' wrapped inside a 'PackageEnvironment'. -commonPackageEnvironment :: FilePath -> PackageEnvironment -commonPackageEnvironment sandboxDir = mempty { - pkgEnvSavedConfig = commonPackageEnvironmentConfig sandboxDir - } - --- | Given a path to a sandbox, return the corresponding InstallDirs record. -sandboxInstallDirs :: FilePath -> InstallDirs (Flag PathTemplate) -sandboxInstallDirs sandboxDir = mempty { - prefix = toFlag (toPathTemplate sandboxDir) - } - --- | These are the absolute basic defaults, the fields that must be --- initialised. When we load the package environment from the file we layer the --- loaded values over these ones. -basePackageEnvironment :: PackageEnvironment -basePackageEnvironment = - mempty { - pkgEnvSavedConfig = mempty { - savedConfigureFlags = mempty { - configHcFlavor = toFlag defaultCompiler, - configVerbosity = toFlag normal - } - } - } - --- | Initial configuration that we write out to the package environment file if --- it does not exist. When the package environment gets loaded this --- configuration gets layered on top of 'basePackageEnvironment'. -initialPackageEnvironment :: FilePath -> Compiler -> Platform - -> IO PackageEnvironment -initialPackageEnvironment sandboxDir compiler platform = do - defInstallDirs <- defaultInstallDirs (compilerFlavor compiler) - {- userInstall= -} False {- _hasLibs= -} False - let initialConfig = commonPackageEnvironmentConfig sandboxDir - installDirs = combineInstallDirs (\d f -> Flag $ fromFlagOrDefault d f) - defInstallDirs (savedUserInstallDirs initialConfig) - return $ mempty { - pkgEnvSavedConfig = initialConfig { - savedUserInstallDirs = installDirs, - savedGlobalInstallDirs = installDirs, - savedGlobalFlags = (savedGlobalFlags initialConfig) { - globalLocalRepos = toNubList [sandboxDir "packages"] - }, - savedConfigureFlags = setPackageDB sandboxDir compiler platform - (savedConfigureFlags initialConfig), - savedInstallFlags = (savedInstallFlags initialConfig) { - installSummaryFile = toNubList [toPathTemplate (sandboxDir - "logs" "build.log")] - } - } - } - --- | Return the path to the sandbox package database. -sandboxPackageDBPath :: FilePath -> Compiler -> Platform -> String -sandboxPackageDBPath sandboxDir compiler platform = - sandboxDir - (Text.display platform ++ "-" - ++ showCompilerIdWithAbi compiler - ++ "-packages.conf.d") --- The path in sandboxPackageDBPath should be kept in sync with the --- path in the bootstrap.sh which is used to bootstrap cabal-install --- into a sandbox. - --- | Use the package DB location specific for this compiler. -setPackageDB :: FilePath -> Compiler -> Platform -> ConfigFlags -> ConfigFlags -setPackageDB sandboxDir compiler platform configFlags = - configFlags { - configPackageDBs = [Just (SpecificPackageDB $ sandboxPackageDBPath - sandboxDir - compiler - platform)] - } - --- | Almost the same as 'savedConf `mappend` pkgEnv', but some settings are --- overridden instead of mappend'ed. -overrideSandboxSettings :: PackageEnvironment -> PackageEnvironment -> - PackageEnvironment -overrideSandboxSettings pkgEnv0 pkgEnv = - pkgEnv { - pkgEnvSavedConfig = mappendedConf { - savedConfigureFlags = (savedConfigureFlags mappendedConf) { - configPackageDBs = configPackageDBs pkgEnvConfigureFlags - } - , savedInstallFlags = (savedInstallFlags mappendedConf) { - installSummaryFile = installSummaryFile pkgEnvInstallFlags - } - }, - pkgEnvInherit = pkgEnvInherit pkgEnv0 - } - where - pkgEnvConf = pkgEnvSavedConfig pkgEnv - mappendedConf = (pkgEnvSavedConfig pkgEnv0) `mappend` pkgEnvConf - pkgEnvConfigureFlags = savedConfigureFlags pkgEnvConf - pkgEnvInstallFlags = savedInstallFlags pkgEnvConf - --- | Default values that get used if no value is given. Used here to include in --- comments when we write out the initial package environment. -commentPackageEnvironment :: FilePath -> IO PackageEnvironment -commentPackageEnvironment sandboxDir = do - commentConf <- commentSavedConfig - let baseConf = commonPackageEnvironmentConfig sandboxDir - return $ mempty { - pkgEnvSavedConfig = commentConf `mappend` baseConf - } - --- | If this package environment inherits from some other package environment, --- return that package environment; otherwise return mempty. -inheritedPackageEnvironment :: Verbosity -> PackageEnvironment - -> IO PackageEnvironment -inheritedPackageEnvironment verbosity pkgEnv = do - case (pkgEnvInherit pkgEnv) of - NoFlag -> return mempty - confPathFlag@(Flag _) -> do - conf <- loadConfig verbosity confPathFlag - return $ mempty { pkgEnvSavedConfig = conf } - --- | Load the user package environment if it exists (the optional "cabal.config" --- file). If it does not exist locally, attempt to load an optional global one. -userPackageEnvironment :: Verbosity -> FilePath -> Maybe FilePath - -> IO PackageEnvironment -userPackageEnvironment verbosity pkgEnvDir globalConfigLocation = do - let path = pkgEnvDir userPackageEnvironmentFile - minp <- readPackageEnvironmentFile (ConstraintSourceUserConfig path) - mempty path - case (minp, globalConfigLocation) of - (Just parseRes, _) -> processConfigParse path parseRes - (_, Just globalLoc) -> do - minp' <- readPackageEnvironmentFile (ConstraintSourceUserConfig globalLoc) - mempty globalLoc - maybe (warn verbosity ("no constraints file found at " ++ globalLoc) - >> return mempty) - (processConfigParse globalLoc) - minp' - _ -> do - debug verbosity ("no user package environment file found at " ++ pkgEnvDir) - return mempty - where - processConfigParse path (ParseOk warns parseResult) = do - unless (null warns) $ warn verbosity $ - unlines (map (showPWarning path) warns) - return parseResult - processConfigParse path (ParseFailed err) = do - let (line, msg) = locatedErrorMsg err - warn verbosity $ "Error parsing package environment file " ++ path - ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg - return mempty - --- | Same as @userPackageEnvironmentFile@, but returns a SavedConfig. -loadUserConfig :: Verbosity -> FilePath -> Maybe FilePath -> IO SavedConfig -loadUserConfig verbosity pkgEnvDir globalConfigLocation = - fmap pkgEnvSavedConfig $ - userPackageEnvironment verbosity pkgEnvDir globalConfigLocation - --- | Common error handling code used by 'tryLoadSandboxPackageEnvironment' and --- 'updatePackageEnvironment'. -handleParseResult :: Verbosity -> FilePath - -> Maybe (ParseResult PackageEnvironment) - -> IO PackageEnvironment -handleParseResult verbosity path minp = - case minp of - Nothing -> die' verbosity $ - "The package environment file '" ++ path ++ "' doesn't exist" - Just (ParseOk warns parseResult) -> do - unless (null warns) $ warn verbosity $ - unlines (map (showPWarning path) warns) - return parseResult - Just (ParseFailed err) -> do - let (line, msg) = locatedErrorMsg err - die' verbosity $ "Error parsing package environment file " ++ path - ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg - --- | Try to load the given package environment file, exiting with error if it --- doesn't exist. Also returns the path to the sandbox directory. The path --- parameter should refer to an existing file. -tryLoadSandboxPackageEnvironmentFile :: Verbosity -> FilePath -> (Flag FilePath) - -> IO (FilePath, PackageEnvironment) -tryLoadSandboxPackageEnvironmentFile verbosity pkgEnvFile configFileFlag = do - let pkgEnvDir = takeDirectory pkgEnvFile - minp <- readPackageEnvironmentFile - (ConstraintSourceSandboxConfig pkgEnvFile) mempty pkgEnvFile - pkgEnv <- handleParseResult verbosity pkgEnvFile minp - - -- Get the saved sandbox directory. - -- TODO: Use substPathTemplate with - -- compilerTemplateEnv ++ platformTemplateEnv ++ abiTemplateEnv. - let sandboxDir = fromFlagOrDefault defaultSandboxLocation - . fmap fromPathTemplate . prefix . savedUserInstallDirs - . pkgEnvSavedConfig $ pkgEnv - - -- Do some sanity checks - dirExists <- doesDirectoryExist sandboxDir - -- TODO: Also check for an initialised package DB? - unless dirExists $ - die' verbosity ("No sandbox exists at " ++ sandboxDir) - info verbosity $ "Using a sandbox located at " ++ sandboxDir - - let base = basePackageEnvironment - let common = commonPackageEnvironment sandboxDir - user <- userPackageEnvironment verbosity pkgEnvDir Nothing --TODO - inherited <- inheritedPackageEnvironment verbosity user - - -- Layer the package environment settings over settings from ~/.cabal/config. - cabalConfig <- fmap unsetSymlinkBinDir $ loadConfig verbosity configFileFlag - return (sandboxDir, - updateInstallDirs $ - (base `mappend` (toPkgEnv cabalConfig) `mappend` - common `mappend` inherited `mappend` user) - `overrideSandboxSettings` pkgEnv) - where - toPkgEnv config = mempty { pkgEnvSavedConfig = config } - - updateInstallDirs pkgEnv = - let config = pkgEnvSavedConfig pkgEnv - configureFlags = savedConfigureFlags config - installDirs = savedUserInstallDirs config - in pkgEnv { - pkgEnvSavedConfig = config { - savedConfigureFlags = configureFlags { - configInstallDirs = installDirs - } - } - } - - -- We don't want to inherit the value of 'symlink-bindir' from - -- '~/.cabal/config'. See #1514. - unsetSymlinkBinDir config = - let installFlags = savedInstallFlags config - in config { - savedInstallFlags = installFlags { - installSymlinkBinDir = NoFlag - } - } - --- | Create a new package environment file, replacing the existing one if it --- exists. Note that the path parameters should point to existing directories. -createPackageEnvironmentFile :: Verbosity -> FilePath -> FilePath - -> Compiler - -> Platform - -> IO () -createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile compiler platform = do - notice verbosity $ "Writing a default package environment file to " ++ pkgEnvFile - initialPkgEnv <- initialPackageEnvironment sandboxDir compiler platform - writePackageEnvironmentFile pkgEnvFile initialPkgEnv - --- | Descriptions of all fields in the package environment file. -pkgEnvFieldDescrs :: ConstraintSource -> [FieldDescr PackageEnvironment] -pkgEnvFieldDescrs src = [ - simpleField "inherit" - (fromFlagOrDefault Disp.empty . fmap Disp.text) (optional parseFilePathQ) - pkgEnvInherit (\v pkgEnv -> pkgEnv { pkgEnvInherit = v }) - - , commaNewLineListField "constraints" - (Text.disp . fst) ((\pc -> (pc, src)) `fmap` Text.parse) - (sortConstraints . configExConstraints - . savedConfigureExFlags . pkgEnvSavedConfig) - (\v pkgEnv -> updateConfigureExFlags pkgEnv - (\flags -> flags { configExConstraints = v })) - - , commaListField "preferences" - Text.disp Text.parse - (configPreferences . savedConfigureExFlags . pkgEnvSavedConfig) - (\v pkgEnv -> updateConfigureExFlags pkgEnv - (\flags -> flags { configPreferences = v })) - ] - ++ map toPkgEnv configFieldDescriptions' - where - optional = Parse.option mempty . fmap toFlag - - configFieldDescriptions' :: [FieldDescr SavedConfig] - configFieldDescriptions' = filter - (\(FieldDescr name _ _) -> name /= "preference" && name /= "constraint") - (configFieldDescriptions src) - - toPkgEnv :: FieldDescr SavedConfig -> FieldDescr PackageEnvironment - toPkgEnv fieldDescr = - liftField pkgEnvSavedConfig - (\savedConfig pkgEnv -> pkgEnv { pkgEnvSavedConfig = savedConfig}) - fieldDescr - - updateConfigureExFlags :: PackageEnvironment - -> (ConfigExFlags -> ConfigExFlags) - -> PackageEnvironment - updateConfigureExFlags pkgEnv f = pkgEnv { - pkgEnvSavedConfig = (pkgEnvSavedConfig pkgEnv) { - savedConfigureExFlags = f . savedConfigureExFlags . pkgEnvSavedConfig - $ pkgEnv - } - } - - sortConstraints = sortBy (comparing $ userConstraintPackageName . fst) - --- | Read the package environment file. -readPackageEnvironmentFile :: ConstraintSource -> PackageEnvironment -> FilePath - -> IO (Maybe (ParseResult PackageEnvironment)) -readPackageEnvironmentFile src initial file = - handleNotExists $ - fmap (Just . parsePackageEnvironment src initial) (readFile file) - where - handleNotExists action = catchIO action $ \ioe -> - if isDoesNotExistError ioe - then return Nothing - else ioError ioe - --- | Parse the package environment file. -parsePackageEnvironment :: ConstraintSource -> PackageEnvironment -> String - -> ParseResult PackageEnvironment -parsePackageEnvironment src initial str = do - fields <- readFields str - let (knownSections, others) = partition isKnownSection fields - pkgEnv <- parse others - let config = pkgEnvSavedConfig pkgEnv - installDirs0 = savedUserInstallDirs config - (haddockFlags, installDirs, paths, args) <- - foldM parseSections - (savedHaddockFlags config, installDirs0, [], []) - knownSections - return pkgEnv { - pkgEnvSavedConfig = config { - savedConfigureFlags = (savedConfigureFlags config) { - configProgramPaths = paths, - configProgramArgs = args - }, - savedHaddockFlags = haddockFlags, - savedUserInstallDirs = installDirs, - savedGlobalInstallDirs = installDirs - } - } - - where - isKnownSection :: ParseUtils.Field -> Bool - isKnownSection (ParseUtils.Section _ "haddock" _ _) = True - isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True - isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True - isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True - isKnownSection _ = False - - parse :: [ParseUtils.Field] -> ParseResult PackageEnvironment - parse = parseFields (pkgEnvFieldDescrs src) initial - - parseSections :: SectionsAccum -> ParseUtils.Field - -> ParseResult SectionsAccum - parseSections accum@(h,d,p,a) - (ParseUtils.Section _ "haddock" name fs) - | name == "" = do h' <- parseFields haddockFlagsFields h fs - return (h', d, p, a) - | otherwise = do - warning "The 'haddock' section should be unnamed" - return accum - parseSections (h,d,p,a) - (ParseUtils.Section line "install-dirs" name fs) - | name == "" = do d' <- parseFields installDirsFields d fs - return (h, d',p,a) - | otherwise = - syntaxError line $ - "Named 'install-dirs' section: '" ++ name - ++ "'. Note that named 'install-dirs' sections are not allowed in the '" - ++ userPackageEnvironmentFile ++ "' file." - parseSections accum@(h, d,p,a) - (ParseUtils.Section _ "program-locations" name fs) - | name == "" = do p' <- parseFields withProgramsFields p fs - return (h, d, p', a) - | otherwise = do - warning "The 'program-locations' section should be unnamed" - return accum - parseSections accum@(h, d, p, a) - (ParseUtils.Section _ "program-default-options" name fs) - | name == "" = do a' <- parseFields withProgramOptionsFields a fs - return (h, d, p, a') - | otherwise = do - warning "The 'program-default-options' section should be unnamed" - return accum - parseSections accum f = do - warning $ "Unrecognized stanza on line " ++ show (lineNo f) - return accum - --- | Accumulator type for 'parseSections'. -type SectionsAccum = (HaddockFlags, InstallDirs (Flag PathTemplate) - , [(String, FilePath)], [(String, [String])]) - --- | Write out the package environment file. -writePackageEnvironmentFile :: FilePath -> PackageEnvironment -> IO () -writePackageEnvironmentFile path pkgEnv = do - let tmpPath = (path <.> "tmp") - writeFile tmpPath $ explanation ++ pkgEnvStr ++ "\n" - renameFile tmpPath path - where - pkgEnvStr = showPackageEnvironment pkgEnv - explanation = unlines - ["-- This is a Cabal package environment file." - ,"-- THIS FILE IS AUTO-GENERATED. DO NOT EDIT DIRECTLY." - ,"-- Please create a 'cabal.config' file in the same directory" - ,"-- if you want to change the default settings for this sandbox." - ,"","" - ] - --- | Pretty-print the package environment. -showPackageEnvironment :: PackageEnvironment -> String -showPackageEnvironment pkgEnv = showPackageEnvironmentWithComments Nothing pkgEnv - --- | Pretty-print the package environment with default values for empty fields --- commented out (just like the default ~/.cabal/config). -showPackageEnvironmentWithComments :: (Maybe PackageEnvironment) - -> PackageEnvironment - -> String -showPackageEnvironmentWithComments mdefPkgEnv pkgEnv = Disp.render $ - ppFields (pkgEnvFieldDescrs ConstraintSourceUnknown) - mdefPkgEnv pkgEnv - $+$ Disp.text "" - $+$ ppSection "install-dirs" "" installDirsFields - (fmap installDirsSection mdefPkgEnv) (installDirsSection pkgEnv) - where - installDirsSection = savedUserInstallDirs . pkgEnvSavedConfig diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Timestamp.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Timestamp.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Timestamp.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Timestamp.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,273 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox.Timestamp --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Timestamp file handling (for add-source dependencies). ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox.Timestamp ( - AddSourceTimestamp, - withAddTimestamps, - withUpdateTimestamps, - maybeAddCompilerTimestampRecord, - listModifiedDeps, - removeTimestamps, - - -- * For testing - TimestampFileRecord, - readTimestampFile, - writeTimestampFile - ) where - -import Control.Monad (filterM, forM, when) -import Data.Char (isSpace) -import Data.List (partition) -import System.Directory (renameFile) -import System.FilePath ((<.>), ()) -import qualified Data.Map as M - -import Distribution.Compiler (CompilerId) -import Distribution.Simple.Utils (debug, die', warn) -import Distribution.System (Platform) -import Distribution.Text (display) -import Distribution.Verbosity (Verbosity) - -import Distribution.Client.SrcDist (allPackageSourceFiles) -import Distribution.Client.Sandbox.Index - (ListIgnoredBuildTreeRefs (ListIgnored), RefTypesToList(OnlyLinks) - ,listBuildTreeRefs) -import Distribution.Client.SetupWrapper - -import Distribution.Compat.Exception (catchIO) -import Distribution.Compat.Time (ModTime, getCurTime, - getModTime, - posixSecondsToModTime) - - --- | Timestamp of an add-source dependency. -type AddSourceTimestamp = (FilePath, ModTime) --- | Timestamp file record - a string identifying the compiler & platform plus a --- list of add-source timestamps. -type TimestampFileRecord = (String, [AddSourceTimestamp]) - -timestampRecordKey :: CompilerId -> Platform -> String -timestampRecordKey compId platform = display platform ++ "-" ++ display compId - --- | The 'add-source-timestamps' file keeps the timestamps of all add-source --- dependencies. It is initially populated by 'sandbox add-source' and kept --- current by 'reinstallAddSourceDeps' and 'configure -w'. The user can install --- add-source deps manually with 'cabal install' after having edited them, so we --- can err on the side of caution sometimes. --- FIXME: We should keep this info in the index file, together with build tree --- refs. -timestampFileName :: FilePath -timestampFileName = "add-source-timestamps" - --- | Read the timestamp file. Exits with error if the timestamp file is --- corrupted. Returns an empty list if the file doesn't exist. -readTimestampFile :: Verbosity -> FilePath -> IO [TimestampFileRecord] -readTimestampFile verbosity timestampFile = do - timestampString <- readFile timestampFile `catchIO` \_ -> return "[]" - case reads timestampString of - [(version, s)] - | version == (2::Int) -> - case reads s of - [(timestamps, s')] | all isSpace s' -> return timestamps - _ -> dieCorrupted - | otherwise -> dieWrongFormat - - -- Old format (timestamps are POSIX seconds). Convert to new format. - [] -> - case reads timestampString of - [(timestamps, s)] | all isSpace s -> do - let timestamps' = map (\(i, ts) -> - (i, map (\(p, t) -> - (p, posixSecondsToModTime t)) ts)) - timestamps - writeTimestampFile timestampFile timestamps' - return timestamps' - _ -> dieCorrupted - _ -> dieCorrupted - where - dieWrongFormat = die' verbosity $ wrongFormat ++ deleteAndRecreate - dieCorrupted = die' verbosity $ corrupted ++ deleteAndRecreate - wrongFormat = "The timestamps file is in the wrong format." - corrupted = "The timestamps file is corrupted." - deleteAndRecreate = " Please delete and recreate the sandbox." - --- | Write the timestamp file, atomically. -writeTimestampFile :: FilePath -> [TimestampFileRecord] -> IO () -writeTimestampFile timestampFile timestamps = do - writeFile timestampTmpFile "2\n" -- version - appendFile timestampTmpFile (show timestamps ++ "\n") - renameFile timestampTmpFile timestampFile - where - timestampTmpFile = timestampFile <.> "tmp" - --- | Read, process and write the timestamp file in one go. -withTimestampFile :: Verbosity -> FilePath - -> ([TimestampFileRecord] -> IO [TimestampFileRecord]) - -> IO () -withTimestampFile verbosity sandboxDir process = do - let timestampFile = sandboxDir timestampFileName - timestampRecords <- readTimestampFile verbosity timestampFile >>= process - writeTimestampFile timestampFile timestampRecords - --- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps --- we've added and an initial timestamp, add an 'AddSourceTimestamp' to the list --- for each path. If a timestamp for a given path already exists in the list, --- update it. -addTimestamps :: ModTime -> [AddSourceTimestamp] -> [FilePath] - -> [AddSourceTimestamp] -addTimestamps initial timestamps newPaths = - [ (p, initial) | p <- newPaths ] ++ oldTimestamps - where - (oldTimestamps, _toBeUpdated) = - partition (\(path, _) -> path `notElem` newPaths) timestamps - --- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps --- we've reinstalled and a new timestamp value, update the timestamp value for --- the deps in the list. If there are new paths in the list, ignore them. -updateTimestamps :: [AddSourceTimestamp] -> [FilePath] -> ModTime - -> [AddSourceTimestamp] -updateTimestamps timestamps pathsToUpdate newTimestamp = - foldr updateTimestamp [] timestamps - where - updateTimestamp t@(path, _oldTimestamp) rest - | path `elem` pathsToUpdate = (path, newTimestamp) : rest - | otherwise = t : rest - --- | Given a list of 'TimestampFileRecord's and a list of paths to add-source --- deps we've removed, remove those deps from the list. -removeTimestamps' :: [AddSourceTimestamp] -> [FilePath] -> [AddSourceTimestamp] -removeTimestamps' l pathsToRemove = foldr removeTimestamp [] l - where - removeTimestamp t@(path, _oldTimestamp) rest = - if path `elem` pathsToRemove - then rest - else t : rest - --- | If a timestamp record for this compiler doesn't exist, add a new one. -maybeAddCompilerTimestampRecord :: Verbosity -> FilePath -> FilePath - -> CompilerId -> Platform - -> IO () -maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - compId platform = do - let key = timestampRecordKey compId platform - withTimestampFile verbosity sandboxDir $ \timestampRecords -> do - case lookup key timestampRecords of - Just _ -> return timestampRecords - Nothing -> do - buildTreeRefs <- listBuildTreeRefs verbosity ListIgnored OnlyLinks - indexFile - now <- getCurTime - let timestamps = map (\p -> (p, now)) buildTreeRefs - return $ (key, timestamps):timestampRecords - --- | Given an IO action that returns a list of build tree refs, add those --- build tree refs to the timestamps file (for all compilers). -withAddTimestamps :: Verbosity -> FilePath -> IO [FilePath] -> IO () -withAddTimestamps verbosity sandboxDir act = do - let initialTimestamp = minBound - withActionOnAllTimestamps (addTimestamps initialTimestamp) verbosity sandboxDir act - --- | Given a list of build tree refs, remove those --- build tree refs from the timestamps file (for all compilers). -removeTimestamps :: Verbosity -> FilePath -> [FilePath] -> IO () -removeTimestamps verbosity idxFile = - withActionOnAllTimestamps removeTimestamps' verbosity idxFile . return - --- | Given an IO action that returns a list of build tree refs, update the --- timestamps of the returned build tree refs to the current time (only for the --- given compiler & platform). -withUpdateTimestamps :: Verbosity -> FilePath -> CompilerId -> Platform - ->([AddSourceTimestamp] -> IO [FilePath]) - -> IO () -withUpdateTimestamps = - withActionOnCompilerTimestamps updateTimestamps - --- | Helper for implementing 'withAddTimestamps' and --- 'withRemoveTimestamps'. Runs a given action on the list of --- 'AddSourceTimestamp's for all compilers, applies 'f' to the result and then --- updates the timestamp file. The IO action is run only once. -withActionOnAllTimestamps :: ([AddSourceTimestamp] -> [FilePath] - -> [AddSourceTimestamp]) - -> Verbosity - -> FilePath - -> IO [FilePath] - -> IO () -withActionOnAllTimestamps f verbosity sandboxDir act = - withTimestampFile verbosity sandboxDir $ \timestampRecords -> do - paths <- act - return [(key, f timestamps paths) | (key, timestamps) <- timestampRecords] - --- | Helper for implementing 'withUpdateTimestamps'. Runs a given action on the --- list of 'AddSourceTimestamp's for this compiler, applies 'f' to the result --- and then updates the timestamp file record. The IO action is run only once. -withActionOnCompilerTimestamps :: ([AddSourceTimestamp] - -> [FilePath] -> ModTime - -> [AddSourceTimestamp]) - -> Verbosity - -> FilePath - -> CompilerId - -> Platform - -> ([AddSourceTimestamp] -> IO [FilePath]) - -> IO () -withActionOnCompilerTimestamps f verbosity sandboxDir compId platform act = do - let needle = timestampRecordKey compId platform - withTimestampFile verbosity sandboxDir $ \timestampRecords -> do - timestampRecords' <- forM timestampRecords $ \r@(key, timestamps) -> - if key == needle - then do paths <- act timestamps - now <- getCurTime - return (key, f timestamps paths now) - else return r - return timestampRecords' - --- | Has this dependency been modified since we have last looked at it? -isDepModified :: Verbosity -> ModTime -> AddSourceTimestamp -> IO Bool -isDepModified verbosity now (packageDir, timestamp) = do - debug verbosity ("Checking whether the dependency is modified: " ++ packageDir) - -- TODO: we should properly plumb the correct options through - -- instead of using defaultSetupScriptOptions - depSources <- allPackageSourceFiles verbosity defaultSetupScriptOptions packageDir - go depSources - - where - go [] = return False - go (dep0:rest) = do - -- FIXME: What if the clock jumps backwards at any point? For now we only - -- print a warning. - let dep = packageDir dep0 - modTime <- getModTime dep - when (modTime > now) $ - warn verbosity $ "File '" ++ dep - ++ "' has a modification time that is in the future." - if modTime >= timestamp - then do - debug verbosity ("Dependency has a modified source file: " ++ dep) - return True - else go rest - --- | List all modified dependencies. -listModifiedDeps :: Verbosity -> FilePath -> CompilerId -> Platform - -> M.Map FilePath a - -- ^ The set of all installed add-source deps. - -> IO [FilePath] -listModifiedDeps verbosity sandboxDir compId platform installedDepsMap = do - timestampRecords <- readTimestampFile verbosity (sandboxDir timestampFileName) - let needle = timestampRecordKey compId platform - timestamps <- maybe noTimestampRecord return - (lookup needle timestampRecords) - now <- getCurTime - fmap (map fst) . filterM (isDepModified verbosity now) - . filter (\ts -> fst ts `M.member` installedDepsMap) - $ timestamps - - where - noTimestampRecord = die' verbosity $ "Сouldn't find a timestamp record for the given " - ++ "compiler/platform pair. " - ++ "Please report this on the Cabal bug tracker: " - ++ "https://github.com/haskell/cabal/issues/new ." diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Types.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,65 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox.Types --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Helpers for writing code that works both inside and outside a sandbox. ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox.Types ( - UseSandbox(..), isUseSandbox, whenUsingSandbox, - SandboxPackageInfo(..) - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import Distribution.Client.Types (UnresolvedSourcePackage) - -import qualified Data.Set as S - --- | Are we using a sandbox? -data UseSandbox = UseSandbox FilePath | NoSandbox - -instance Monoid UseSandbox where - mempty = NoSandbox - mappend = (<>) - -instance Semigroup UseSandbox where - NoSandbox <> s = s - u0@(UseSandbox _) <> NoSandbox = u0 - (UseSandbox _) <> u1@(UseSandbox _) = u1 - --- | Convert a @UseSandbox@ value to a boolean. Useful in conjunction with --- @when@. -isUseSandbox :: UseSandbox -> Bool -isUseSandbox (UseSandbox _) = True -isUseSandbox NoSandbox = False - --- | Execute an action only if we're in a sandbox, feeding to it the path to the --- sandbox directory. -whenUsingSandbox :: UseSandbox -> (FilePath -> IO ()) -> IO () -whenUsingSandbox NoSandbox _ = return () -whenUsingSandbox (UseSandbox sandboxDir) act = act sandboxDir - --- | Data about the packages installed in the sandbox that is passed from --- 'reinstallAddSourceDeps' to the solver. -data SandboxPackageInfo = SandboxPackageInfo { - modifiedAddSourceDependencies :: ![UnresolvedSourcePackage], - -- ^ Modified add-source deps that we want to reinstall. These are guaranteed - -- to be already installed in the sandbox. - - otherAddSourceDependencies :: ![UnresolvedSourcePackage], - -- ^ Remaining add-source deps. Some of these may be not installed in the - -- sandbox. - - otherInstalledSandboxPackages :: !InstalledPackageIndex.InstalledPackageIndex, - -- ^ All packages installed in the sandbox. Intersection with - -- 'modifiedAddSourceDependencies' and/or 'otherAddSourceDependencies' can be - -- non-empty. - - allAddSourceDependencies :: !(S.Set FilePath) - -- ^ A set of paths to all add-source dependencies, for convenience. - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Sandbox.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,867 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE FlexibleContexts #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Sandbox --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- UI for the sandboxing functionality. ------------------------------------------------------------------------------ - -module Distribution.Client.Sandbox ( - sandboxInit, - sandboxDelete, - sandboxAddSource, - sandboxAddSourceSnapshot, - sandboxDeleteSource, - sandboxListSources, - sandboxHcPkg, - dumpPackageEnvironment, - withSandboxBinDirOnSearchPath, - - getSandboxConfigFilePath, - loadConfigOrSandboxConfig, - findSavedDistPref, - initPackageDBIfNeeded, - maybeWithSandboxDirOnSearchPath, - - WereDepsReinstalled(..), - reinstallAddSourceDeps, - maybeReinstallAddSourceDeps, - - SandboxPackageInfo(..), - maybeWithSandboxPackageInfo, - - tryGetIndexFilePath, - sandboxBuildDir, - getInstalledPackagesInSandbox, - updateSandboxConfigFileFlag, - updateInstallDirs, - - getPersistOrConfigCompiler - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Client.Setup - ( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..) - , GlobalFlags(..), configCompilerAux', configPackageDB' - , defaultConfigExFlags, defaultInstallFlags - , defaultSandboxLocation, withRepoContext ) -import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps - , maybeAddCompilerTimestampRecord - , withAddTimestamps - , removeTimestamps ) -import Distribution.Client.Config - ( SavedConfig(..), defaultUserInstall, loadConfig ) -import Distribution.Client.Dependency ( foldProgress ) -import Distribution.Client.IndexUtils ( BuildTreeRefType(..) ) -import Distribution.Client.Install ( InstallArgs, - makeInstallContext, - makeInstallPlan, - processInstallPlan ) -import Distribution.Utils.NubList ( fromNubList ) - -import Distribution.Client.Sandbox.PackageEnvironment - ( PackageEnvironment(..), PackageEnvironmentType(..) - , createPackageEnvironmentFile, classifyPackageEnvironment - , tryLoadSandboxPackageEnvironmentFile, loadUserConfig - , commentPackageEnvironment, showPackageEnvironmentWithComments - , sandboxPackageEnvironmentFile, userPackageEnvironmentFile - , sandboxPackageDBPath ) -import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) - , UseSandbox(..) ) -import Distribution.Client.SetupWrapper - ( SetupScriptOptions(..), defaultSetupScriptOptions ) -import Distribution.Client.Types ( PackageLocation(..) ) -import Distribution.Client.Utils ( inDir, tryCanonicalizePath - , tryFindAddSourcePackageDesc) -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) -import Distribution.PackageDescription.Parsec ( readGenericPackageDescription ) -import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..) ) -import Distribution.Simple.Configure ( configCompilerAuxEx - , getPackageDBContents - , maybeGetPersistBuildConfig - , findDistPrefOrDefault - , findDistPref ) -import qualified Distribution.Simple.LocalBuildInfo as LocalBuildInfo -import Distribution.Simple.PreProcess ( knownSuffixHandlers ) -import Distribution.Simple.Program ( ProgramDb ) -import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..) - , fromFlagOrDefault, flagToMaybe ) -import Distribution.Simple.SrcDist ( prepareTree ) -import Distribution.Simple.Utils ( die', debug, notice, info, warn - , debugNoWrap, defaultPackageDesc - , topHandlerWith - , createDirectoryIfMissingVerbose ) -import Distribution.Package ( Package(..) ) -import Distribution.System ( Platform ) -import Distribution.Text ( display ) -import Distribution.Verbosity ( Verbosity ) -import Distribution.Compat.Environment ( lookupEnv, setEnv ) -import Distribution.Client.Compat.FilePerms ( setFileHidden ) -import qualified Distribution.Client.Sandbox.Index as Index -import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) -import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex -import qualified Distribution.Simple.Register as Register - -import Distribution.Solver.Types.SourcePackage - -import qualified Data.Map as M -import qualified Data.Set as S -import Data.Either (partitionEithers) -import Control.Exception ( assert, bracket_ ) -import Control.Monad ( forM, mapM, mapM_ ) -import Data.Bits ( shiftL, shiftR, xor ) -import Data.IORef ( newIORef, writeIORef, readIORef ) -import Data.List ( delete - , groupBy ) -import Data.Maybe ( fromJust ) -import Numeric ( showHex ) -import System.Directory ( canonicalizePath - , createDirectory - , doesDirectoryExist - , doesFileExist - , getCurrentDirectory - , removeDirectoryRecursive - , removeFile - , renameDirectory ) -import System.FilePath ( (), equalFilePath - , getSearchPath - , searchPathSeparator - , splitSearchPath - , takeDirectory ) - --- --- * Constants --- - --- | The name of the sandbox subdirectory where we keep snapshots of add-source --- dependencies. -snapshotDirectoryName :: FilePath -snapshotDirectoryName = "snapshots" - --- | Non-standard build dir that is used for building add-source deps instead of --- "dist". Fixes surprising behaviour in some cases (see issue #1281). -sandboxBuildDir :: FilePath -> FilePath -sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash "" - where - sandboxDirHash = jenkins sandboxDir - - -- See http://en.wikipedia.org/wiki/Jenkins_hash_function - jenkins :: String -> Word32 - jenkins str = loop_finish $ foldl' loop 0 str - where - loop :: Word32 -> Char -> Word32 - loop hash key_i' = hash''' - where - key_i = toEnum . ord $ key_i' - hash' = hash + key_i - hash'' = hash' + (shiftL hash' 10) - hash''' = hash'' `xor` (shiftR hash'' 6) - - loop_finish :: Word32 -> Word32 - loop_finish hash = hash''' - where - hash' = hash + (shiftL hash 3) - hash'' = hash' `xor` (shiftR hash' 11) - hash''' = hash'' + (shiftL hash'' 15) - --- --- * Basic sandbox functions. --- - --- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the --- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to --- 'NoFlag'. -updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags -updateSandboxConfigFileFlag globalFlags = - case globalSandboxConfigFile globalFlags of - Flag _ -> return globalFlags - NoFlag -> do - f' <- fmap (maybe NoFlag Flag) . lookupEnv $ "CABAL_SANDBOX_CONFIG" - return globalFlags { globalSandboxConfigFile = f' } - --- | Return the path to the sandbox config file - either the default or the one --- specified with @--sandbox-config-file@. -getSandboxConfigFilePath :: GlobalFlags -> IO FilePath -getSandboxConfigFilePath globalFlags = do - let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags - case sandboxConfigFileFlag of - NoFlag -> do pkgEnvDir <- getCurrentDirectory - return (pkgEnvDir sandboxPackageEnvironmentFile) - Flag path -> return path - --- | Load the @cabal.sandbox.config@ file (and possibly the optional --- @cabal.config@). In addition to a @PackageEnvironment@, also return a --- canonical path to the sandbox. Exit with error if the sandbox directory or --- the package environment file do not exist. -tryLoadSandboxConfig :: Verbosity -> GlobalFlags - -> IO (FilePath, PackageEnvironment) -tryLoadSandboxConfig verbosity globalFlags = do - path <- getSandboxConfigFilePath globalFlags - tryLoadSandboxPackageEnvironmentFile verbosity path - (globalConfigFile globalFlags) - --- | Return the name of the package index file for this package environment. -tryGetIndexFilePath :: Verbosity -> SavedConfig -> IO FilePath -tryGetIndexFilePath verbosity config = tryGetIndexFilePath' verbosity (savedGlobalFlags config) - --- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of --- 'SavedConfig'. -tryGetIndexFilePath' :: Verbosity -> GlobalFlags -> IO FilePath -tryGetIndexFilePath' verbosity globalFlags = do - let paths = fromNubList $ globalLocalRepos globalFlags - case paths of - [] -> die' verbosity $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++ - "no local repos found. " ++ checkConfiguration - _ -> return $ (last paths) Index.defaultIndexFileName - where - checkConfiguration = "Please check your configuration ('" - ++ userPackageEnvironmentFile ++ "')." - --- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error --- message than just pattern-matching. -getSandboxPackageDB :: Verbosity -> ConfigFlags -> IO PackageDB -getSandboxPackageDB verbosity configFlags = do - case configPackageDBs configFlags of - [Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB - -- TODO: should we allow multiple package DBs (e.g. with 'inherit')? - - [] -> - die' verbosity $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt - [_] -> - die' verbosity $ "Unexpected contents of the 'package-db' field. " - ++ sandboxConfigCorrupt - _ -> - die' verbosity $ "Too many package DBs provided. " ++ sandboxConfigCorrupt - - where - sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt." - - --- | Which packages are installed in the sandbox package DB? -getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags - -> Compiler -> ProgramDb - -> IO InstalledPackageIndex -getInstalledPackagesInSandbox verbosity configFlags comp progdb = do - sandboxDB <- getSandboxPackageDB verbosity configFlags - getPackageDBContents verbosity comp sandboxDB progdb - --- | Temporarily add $SANDBOX_DIR/bin to $PATH. -withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a -withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir - where - -- TODO: Instead of modifying the global process state, it'd be better to - -- set the environment individually for each subprocess invocation. This - -- will have to wait until the Shell monad is implemented; without it the - -- required changes are too intrusive. - addBinDir :: IO () - addBinDir = do - mbOldPath <- lookupEnv "PATH" - let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator) - mbOldPath - setEnv "PATH" newPath - - rmBinDir :: IO () - rmBinDir = do - oldPath <- getSearchPath - let newPath = intercalate [searchPathSeparator] - (delete sandboxBin oldPath) - setEnv "PATH" newPath - - sandboxBin = sandboxDir "bin" - --- | Initialise a package DB for this compiler if it doesn't exist. -initPackageDBIfNeeded :: Verbosity -> ConfigFlags - -> Compiler -> ProgramDb - -> IO () -initPackageDBIfNeeded verbosity configFlags comp progdb = do - SpecificPackageDB dbPath <- getSandboxPackageDB verbosity configFlags - packageDBExists <- doesDirectoryExist dbPath - unless packageDBExists $ - Register.initPackageDB verbosity comp progdb dbPath - when packageDBExists $ - debug verbosity $ "The package database already exists: " ++ dbPath - --- | Entry point for the 'cabal sandbox dump-pkgenv' command. -dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () -dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - commentPkgEnv <- commentPackageEnvironment sandboxDir - putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv - --- | Entry point for the 'cabal sandbox init' command. -sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () -sandboxInit verbosity sandboxFlags globalFlags = do - -- Warn if there's a 'cabal-dev' sandbox. - isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev") - (doesFileExist $ "cabal-dev" "cabal.config") - when isCabalDevSandbox $ - warn verbosity $ - "You are apparently using a legacy (cabal-dev) sandbox. " - ++ "Legacy sandboxes may interact badly with native Cabal sandboxes. " - ++ "You may want to delete the 'cabal-dev' directory to prevent issues." - - -- Create the sandbox directory. - let sandboxDir' = fromFlagOrDefault defaultSandboxLocation - (sandboxLocation sandboxFlags) - createDirectoryIfMissingVerbose verbosity True sandboxDir' - sandboxDir <- tryCanonicalizePath sandboxDir' - setFileHidden sandboxDir - - -- Determine which compiler to use (using the value from ~/.cabal/config). - userConfig <- loadConfig verbosity (globalConfigFile globalFlags) - (comp, platform, progdb) <- configCompilerAuxEx (savedConfigureFlags userConfig) - - -- Create the package environment file. - pkgEnvFile <- getSandboxConfigFilePath globalFlags - createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile comp platform - (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - let config = pkgEnvSavedConfig pkgEnv - configFlags = savedConfigureFlags config - - -- Create the index file if it doesn't exist. - indexFile <- tryGetIndexFilePath verbosity config - indexFileExists <- doesFileExist indexFile - if indexFileExists - then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir - else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir - Index.createEmpty verbosity indexFile - - -- Create the package DB for the default compiler. - initPackageDBIfNeeded verbosity configFlags comp progdb - maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - (compilerId comp) platform - --- | Entry point for the 'cabal sandbox delete' command. -sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () -sandboxDelete verbosity _sandboxFlags globalFlags = do - (useSandbox, _) <- loadConfigOrSandboxConfig - verbosity - globalFlags { globalRequireSandbox = Flag False } - case useSandbox of - NoSandbox -> warn verbosity "Not in a sandbox." - UseSandbox sandboxDir -> do - curDir <- getCurrentDirectory - pkgEnvFile <- getSandboxConfigFilePath globalFlags - - -- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard - -- location. - let isNonDefaultConfigLocation = not $ equalFilePath pkgEnvFile $ - curDir sandboxPackageEnvironmentFile - - if isNonDefaultConfigLocation - then warn verbosity $ "Sandbox config file is in non-default location: '" - ++ pkgEnvFile ++ "'.\n Please delete manually." - else removeFile pkgEnvFile - - -- Remove the sandbox directory, unless we're using a shared sandbox. - let isNonDefaultSandboxLocation = not $ equalFilePath sandboxDir $ - curDir defaultSandboxLocation - - when isNonDefaultSandboxLocation $ - die' verbosity $ "Non-default sandbox location used: '" ++ sandboxDir - ++ "'.\nAssuming a shared sandbox. Please delete '" - ++ sandboxDir ++ "' manually." - - absSandboxDir <- canonicalizePath sandboxDir - notice verbosity $ "Deleting the sandbox located at " ++ absSandboxDir - removeDirectoryRecursive absSandboxDir - - let - pathInsideSandbox = isPrefixOf absSandboxDir - - -- Warn the user if deleting the sandbox deleted a package database - -- referenced in the current environment. - checkPackagePaths var = do - let - checkPath path = do - absPath <- canonicalizePath path - (when (pathInsideSandbox absPath) . warn verbosity) - (var ++ " refers to package database " ++ path - ++ " inside the deleted sandbox.") - liftM (maybe [] splitSearchPath) (lookupEnv var) >>= mapM_ checkPath - - checkPackagePaths "CABAL_SANDBOX_PACKAGE_PATH" - checkPackagePaths "GHC_PACKAGE_PATH" - checkPackagePaths "GHCJS_PACKAGE_PATH" - --- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'. -doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment - -> BuildTreeRefType - -> IO () -doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do - let savedConfig = pkgEnvSavedConfig pkgEnv - indexFile <- tryGetIndexFilePath verbosity savedConfig - - -- If we're running 'sandbox add-source' for the first time for this compiler, - -- we need to create an initial timestamp record. - (comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig - maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - (compilerId comp) platform - - withAddTimestamps verbosity sandboxDir $ do - -- Path canonicalisation is done in addBuildTreeRefs, but we do it - -- twice because of the timestamps file. - buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs - Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType - return buildTreeRefs' - --- | Entry point for the 'cabal sandbox add-source' command. -sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags - -> IO () -sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - - if fromFlagOrDefault False (sandboxSnapshot sandboxFlags) - then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv - else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef - --- | Entry point for the 'cabal sandbox add-source --snapshot' command. -sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath - -> PackageEnvironment - -> IO () -sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do - let snapshotDir = sandboxDir snapshotDirectoryName - - -- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private - -- location. - createDirectoryIfMissingVerbose verbosity True snapshotDir - - -- Collect the package descriptions first, so that if some path does not refer - -- to a cabal package, we fail immediately. - pkgs <- forM buildTreeRefs $ \buildTreeRef -> - inDir (Just buildTreeRef) $ - return . flattenPackageDescription - =<< readGenericPackageDescription verbosity - =<< defaultPackageDesc verbosity - - -- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If - -- 'prepareTree' throws an error at any point, the old snapshots will still be - -- in consistent state. - tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) -> - inDir (Just buildTreeRef) $ do - let targetDir = snapshotDir (display . packageId $ pkg) - targetTmpDir = targetDir ++ "-tmp" - dirExists <- doesDirectoryExist targetTmpDir - when dirExists $ - removeDirectoryRecursive targetDir - createDirectory targetTmpDir - prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers - return (targetTmpDir, targetDir) - - -- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to - -- "snapshots/$PKGNAME-$VERSION". - snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do - dirExists <- doesDirectoryExist targetDir - when dirExists $ - removeDirectoryRecursive targetDir - renameDirectory targetTmpDir targetDir - return targetDir - - -- Once the packages are copied, just 'add-source' them as usual. - doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef - --- | Entry point for the 'cabal sandbox delete-source' command. -sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags - -> IO () -sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - indexFile <- tryGetIndexFilePath verbosity (pkgEnvSavedConfig pkgEnv) - - (results, convDict) <- - Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs - - let (failedPaths, removedPaths) = partitionEithers results - removedRefs = fmap convDict removedPaths - - unless (null removedPaths) $ do - removeTimestamps verbosity sandboxDir removedPaths - - notice verbosity $ "Success deleting sources: " ++ - showL removedRefs ++ "\n\n" - - unless (null failedPaths) $ do - let groupedFailures = groupBy errorType failedPaths - mapM_ handleErrors groupedFailures - die' verbosity $ "The sources with the above errors were skipped. (" ++ - showL (fmap getPath failedPaths) ++ ")" - - notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++ - "source dependency, but does not remove the package " ++ - "from the sandbox package DB.\n\n" ++ - "Use 'sandbox hc-pkg -- unregister' to do that." - where - getPath (Index.ErrNonregisteredSource p) = p - getPath (Index.ErrNonexistentSource p) = p - - showPaths f = concat . intersperse " " . fmap (show . f) - - showL = showPaths id - - showE [] = return ' ' - showE errs = showPaths getPath errs - - errorType Index.ErrNonregisteredSource{} Index.ErrNonregisteredSource{} = - True - errorType Index.ErrNonexistentSource{} Index.ErrNonexistentSource{} = True - errorType _ _ = False - - handleErrors [] = return () - handleErrors errs@(Index.ErrNonregisteredSource{}:_) = - warn verbosity ("Sources not registered: " ++ showE errs ++ "\n\n") - handleErrors errs@(Index.ErrNonexistentSource{}:_) = - warn verbosity - ("Source directory not found for paths: " ++ showE errs ++ "\n" - ++ "If you are trying to delete a reference to a removed directory, " - ++ "please provide the full absolute path " - ++ "(as given by `sandbox list-sources`).\n\n") - --- | Entry point for the 'cabal sandbox list-sources' command. -sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags - -> IO () -sandboxListSources verbosity _sandboxFlags globalFlags = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - indexFile <- tryGetIndexFilePath verbosity (pkgEnvSavedConfig pkgEnv) - - refs <- Index.listBuildTreeRefs verbosity - Index.ListIgnored Index.LinksAndSnapshots indexFile - when (null refs) $ - notice verbosity $ "Index file '" ++ indexFile - ++ "' has no references to local build trees." - when (not . null $ refs) $ do - notice verbosity $ "Source dependencies registered " - ++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n" - mapM_ putStrLn refs - notice verbosity $ "\nTo unregister source dependencies, " - ++ "use the 'sandbox delete-source' command." - --- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@ --- tool with provided arguments, restricted to the sandbox. -sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO () -sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv - -- Invoke hc-pkg for the most recently configured compiler (if any), - -- using the right package-db for the compiler (see #1935). - (comp, platform, progdb) <- getPersistOrConfigCompiler configFlags - let dir = sandboxPackageDBPath sandboxDir comp platform - dbStack = [GlobalPackageDB, SpecificPackageDB dir] - Register.invokeHcPkg verbosity comp progdb dbStack extraArgs - -updateInstallDirs :: Flag Bool - -> (UseSandbox, SavedConfig) -> (UseSandbox, SavedConfig) -updateInstallDirs userInstallFlag (useSandbox, savedConfig) = - case useSandbox of - NoSandbox -> - let savedConfig' = savedConfig { - savedConfigureFlags = configureFlags { - configInstallDirs = installDirs - } - } - in (useSandbox, savedConfig') - _ -> (useSandbox, savedConfig) - where - configureFlags = savedConfigureFlags savedConfig - userInstallDirs = savedUserInstallDirs savedConfig - globalInstallDirs = savedGlobalInstallDirs savedConfig - installDirs | userInstall = userInstallDirs - | otherwise = globalInstallDirs - userInstall = fromFlagOrDefault defaultUserInstall - (configUserInstall configureFlags `mappend` userInstallFlag) - --- | Check which type of package environment we're in and return a --- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates --- whether we're working in a sandbox. -loadConfigOrSandboxConfig :: Verbosity - -> GlobalFlags -- ^ For @--config-file@ and - -- @--sandbox-config-file@. - -> IO (UseSandbox, SavedConfig) -loadConfigOrSandboxConfig verbosity globalFlags = do - let configFileFlag = globalConfigFile globalFlags - sandboxConfigFileFlag = globalSandboxConfigFile globalFlags - ignoreSandboxFlag = globalIgnoreSandbox globalFlags - - pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag - pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag - ignoreSandboxFlag - case pkgEnvType of - -- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present. - SandboxPackageEnvironment -> do - (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags - -- Prints an error message and exits on error. - let config = pkgEnvSavedConfig pkgEnv - return (UseSandbox sandboxDir, config) - - -- Only @cabal.config@ is present. - UserPackageEnvironment -> do - config <- loadConfig verbosity configFileFlag - userConfig <- loadUserConfig verbosity pkgEnvDir Nothing - let config' = config `mappend` userConfig - dieIfSandboxRequired config' - return (NoSandbox, config') - - -- Neither @cabal.sandbox.config@ nor @cabal.config@ are present. - AmbientPackageEnvironment -> do - config <- loadConfig verbosity configFileFlag - let globalConstraintsOpt = - flagToMaybe . globalConstraintsFile . savedGlobalFlags $ config - globalConstraintConfig <- - loadUserConfig verbosity pkgEnvDir globalConstraintsOpt - let config' = config `mappend` globalConstraintConfig - dieIfSandboxRequired config - return (NoSandbox, config') - - where - -- Return the path to the package environment directory - either the - -- current directory or the one that @--sandbox-config-file@ resides in. - getPkgEnvDir :: (Flag FilePath) -> IO FilePath - getPkgEnvDir sandboxConfigFileFlag = do - case sandboxConfigFileFlag of - NoFlag -> getCurrentDirectory - Flag path -> tryCanonicalizePath . takeDirectory $ path - - -- Die if @--require-sandbox@ was specified and we're not inside a sandbox. - dieIfSandboxRequired :: SavedConfig -> IO () - dieIfSandboxRequired config = checkFlag flag - where - flag = (globalRequireSandbox . savedGlobalFlags $ config) - `mappend` (globalRequireSandbox globalFlags) - checkFlag (Flag True) = - die' verbosity $ "'require-sandbox' is set to True, but no sandbox is present. " - ++ "Use '--no-require-sandbox' if you want to override " - ++ "'require-sandbox' temporarily." - checkFlag (Flag False) = return () - checkFlag (NoFlag) = return () - --- | Return the saved \"dist/\" prefix, or the default prefix. -findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath -findSavedDistPref config flagDistPref = do - let defDistPref = useDistPref defaultSetupScriptOptions - flagDistPref' = configDistPref (savedConfigureFlags config) - `mappend` flagDistPref - findDistPref defDistPref flagDistPref' - --- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do --- nothing. -maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a -maybeWithSandboxDirOnSearchPath NoSandbox act = act -maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act = - withSandboxBinDirOnSearchPath sandboxDir $ act - --- | Had reinstallAddSourceDeps actually reinstalled any dependencies? -data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled - --- | Reinstall those add-source dependencies that have been modified since --- we've last installed them. Assumes that we're working inside a sandbox. -reinstallAddSourceDeps :: Verbosity - -> ConfigFlags -> ConfigExFlags - -> InstallFlags -> GlobalFlags - -> FilePath - -> IO WereDepsReinstalled -reinstallAddSourceDeps verbosity configFlags' configExFlags - installFlags globalFlags sandboxDir = topHandler' $ do - let sandboxDistPref = sandboxBuildDir sandboxDir - configFlags = configFlags' - { configDistPref = Flag sandboxDistPref } - haddockFlags = mempty - { haddockDistPref = Flag sandboxDistPref } - (comp, platform, progdb) <- configCompilerAux' configFlags - retVal <- newIORef NoDepsReinstalled - - withSandboxPackageInfo verbosity configFlags globalFlags - comp platform progdb sandboxDir $ \sandboxPkgInfo -> - unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do - - withRepoContext verbosity globalFlags $ \repoContext -> do - let args :: InstallArgs - args = ((configPackageDB' configFlags) - ,repoContext - ,comp, platform, progdb - ,UseSandbox sandboxDir, Just sandboxPkgInfo - ,globalFlags, configFlags, configExFlags, installFlags - ,haddockFlags) - - -- This can actually be replaced by a call to 'install', but we use a - -- lower-level API because of layer separation reasons. Additionally, we - -- might want to use some lower-level features this in the future. - withSandboxBinDirOnSearchPath sandboxDir $ do - installContext <- makeInstallContext verbosity args Nothing - installPlan <- foldProgress logMsg die'' return =<< - makeInstallPlan verbosity args installContext - - processInstallPlan verbosity args installContext installPlan - writeIORef retVal ReinstalledSomeDeps - - readIORef retVal - - where - die'' message = die' verbosity (message ++ installFailedInSandbox) - -- TODO: use a better error message, remove duplication. - installFailedInSandbox = - "Note: when using a sandbox, all packages are required to have " - ++ "consistent dependencies. Try reinstalling/unregistering the " - ++ "offending packages or recreating the sandbox." - logMsg message rest = debugNoWrap verbosity message >> rest - - topHandler' = topHandlerWith $ \_ -> do - warn verbosity "Couldn't reinstall some add-source dependencies." - -- Here we can't know whether any deps have been reinstalled, so we have - -- to be conservative. - return ReinstalledSomeDeps - --- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that --- we don't update the timestamp file here - this is done in --- 'postInstallActions'. -withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags - -> Compiler -> Platform -> ProgramDb - -> FilePath - -> (SandboxPackageInfo -> IO ()) - -> IO () -withSandboxPackageInfo verbosity configFlags globalFlags - comp platform progdb sandboxDir cont = do - -- List all add-source deps. - indexFile <- tryGetIndexFilePath' verbosity globalFlags - buildTreeRefs <- Index.listBuildTreeRefs verbosity - Index.DontListIgnored Index.OnlyLinks indexFile - let allAddSourceDepsSet = S.fromList buildTreeRefs - - -- List all packages installed in the sandbox. - installedPkgIndex <- getInstalledPackagesInSandbox verbosity - configFlags comp progdb - let err = "Error reading sandbox package information." - -- Get the package descriptions for all add-source deps. - depsCabalFiles <- mapM (flip (tryFindAddSourcePackageDesc verbosity) err) buildTreeRefs - depsPkgDescs <- mapM (readGenericPackageDescription verbosity) depsCabalFiles - let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs) - isInstalled pkgid = not . null - . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid - installedDepsMap = M.filter (isInstalled . packageId) depsMap - - -- Get the package ids of modified (and installed) add-source deps. - modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir - (compilerId comp) platform installedDepsMap - -- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to - -- be a subset of the keys of 'depsMap'. - let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap) - | modDepPath <- modifiedAddSourceDeps ] - modifiedDepsMap = M.fromList modifiedDeps - - assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ()) - if (null modifiedDeps) - then info verbosity $ "Found no modified add-source deps." - else notice verbosity $ "Some add-source dependencies have been modified. " - ++ "They will be reinstalled..." - - -- Get the package ids of the remaining add-source deps (some are possibly not - -- installed). - let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap) - - -- Finally, assemble a 'SandboxPackageInfo'. - cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps) - (map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet - - where - toSourcePackage (path, pkgDesc) = SourcePackage - (packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing - --- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and the --- identity otherwise. -maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags - -> Compiler -> Platform -> ProgramDb - -> UseSandbox - -> (Maybe SandboxPackageInfo -> IO ()) - -> IO () -maybeWithSandboxPackageInfo verbosity configFlags globalFlags - comp platform progdb useSandbox cont = - case useSandbox of - NoSandbox -> cont Nothing - UseSandbox sandboxDir -> withSandboxPackageInfo verbosity - configFlags globalFlags - comp platform progdb sandboxDir - (\spi -> cont (Just spi)) - --- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that --- case. -maybeReinstallAddSourceDeps :: Verbosity - -> Flag (Maybe Int) -- ^ The '-j' flag - -> ConfigFlags -- ^ Saved configure flags - -- (from dist/setup-config) - -> GlobalFlags - -> (UseSandbox, SavedConfig) - -> IO WereDepsReinstalled -maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags' - globalFlags' (useSandbox, config) = do - case useSandbox of - NoSandbox -> return NoDepsReinstalled - UseSandbox sandboxDir -> do - -- Reinstall the modified add-source deps. - let configFlags = savedConfigureFlags config - `mappendSomeSavedFlags` - configFlags' - configExFlags = defaultConfigExFlags - `mappend` savedConfigureExFlags config - installFlags' = defaultInstallFlags - `mappend` savedInstallFlags config - installFlags = installFlags' { - installNumJobs = installNumJobs installFlags' - `mappend` numJobsFlag - } - globalFlags = savedGlobalFlags config - -- This makes it possible to override things like 'remote-repo-cache' - -- from the command line. These options are hidden, and are only - -- useful for debugging, so this should be fine. - `mappend` globalFlags' - reinstallAddSourceDeps - verbosity configFlags configExFlags - installFlags globalFlags sandboxDir - - where - - -- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@ - -- because we don't want to auto-enable things like 'library-profiling' for - -- all add-source dependencies even if the user has passed - -- '--enable-library-profiling' to 'cabal configure'. These options are - -- supposed to be set in 'cabal.config'. - mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags - mappendSomeSavedFlags sandboxConfigFlags savedFlags = - sandboxConfigFlags { - configHcFlavor = configHcFlavor sandboxConfigFlags - `mappend` configHcFlavor savedFlags, - configHcPath = configHcPath sandboxConfigFlags - `mappend` configHcPath savedFlags, - configHcPkg = configHcPkg sandboxConfigFlags - `mappend` configHcPkg savedFlags, - configProgramPaths = configProgramPaths sandboxConfigFlags - `mappend` configProgramPaths savedFlags, - configProgramArgs = configProgramArgs sandboxConfigFlags - `mappend` configProgramArgs savedFlags, - -- NOTE: Unconditionally choosing the value from - -- 'dist/setup-config'. Sandbox package DB location may have been - -- changed by 'configure -w'. - configPackageDBs = configPackageDBs savedFlags - -- FIXME: Is this compatible with the 'inherit' feature? - } - --- --- Utils (transitionary) --- - --- | Try to read the most recently configured compiler from the --- 'localBuildInfoFile', falling back on 'configCompilerAuxEx' if it --- cannot be read. -getPersistOrConfigCompiler :: ConfigFlags - -> IO (Compiler, Platform, ProgramDb) -getPersistOrConfigCompiler configFlags = do - distPref <- findDistPrefOrDefault (configDistPref configFlags) - mlbi <- maybeGetPersistBuildConfig distPref - case mlbi of - Nothing -> do configCompilerAux' configFlags - Just lbi -> return ( LocalBuildInfo.compiler lbi - , LocalBuildInfo.hostPlatform lbi - , LocalBuildInfo.withPrograms lbi - ) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SavedFlags.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SavedFlags.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SavedFlags.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SavedFlags.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,83 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} - -module Distribution.Client.SavedFlags - ( readCommandFlags, writeCommandFlags - , readSavedArgs, writeSavedArgs - ) where - -import Distribution.Simple.Command -import Distribution.Simple.UserHooks ( Args ) -import Distribution.Simple.Utils - ( createDirectoryIfMissingVerbose, unintersperse ) -import Distribution.Verbosity - -import Control.Exception ( Exception, throwIO ) -import Control.Monad ( liftM ) -import Data.List ( intercalate ) -import Data.Maybe ( fromMaybe ) -import Data.Typeable -import System.Directory ( doesFileExist ) -import System.FilePath ( takeDirectory ) - - -writeSavedArgs :: Verbosity -> FilePath -> [String] -> IO () -writeSavedArgs verbosity path args = do - createDirectoryIfMissingVerbose - (lessVerbose verbosity) True (takeDirectory path) - writeFile path (intercalate "\0" args) - - --- | Write command-line flags to a file, separated by null characters. This --- format is also suitable for the @xargs -0@ command. Using the null --- character also avoids the problem of escaping newlines or spaces, --- because unlike other whitespace characters, the null character is --- not valid in command-line arguments. -writeCommandFlags :: Verbosity -> FilePath -> CommandUI flags -> flags -> IO () -writeCommandFlags verbosity path command flags = - writeSavedArgs verbosity path (commandShowOptions command flags) - - -readSavedArgs :: FilePath -> IO (Maybe [String]) -readSavedArgs path = do - exists <- doesFileExist path - if exists - then liftM (Just . unintersperse '\0') (readFile path) - else return Nothing - - --- | Read command-line arguments, separated by null characters, from a file. --- Returns the default flags if the file does not exist. -readCommandFlags :: FilePath -> CommandUI flags -> IO flags -readCommandFlags path command = do - savedArgs <- liftM (fromMaybe []) (readSavedArgs path) - case (commandParseArgs command True savedArgs) of - CommandHelp _ -> throwIO (SavedArgsErrorHelp savedArgs) - CommandList _ -> throwIO (SavedArgsErrorList savedArgs) - CommandErrors errs -> throwIO (SavedArgsErrorOther savedArgs errs) - CommandReadyToGo (mkFlags, _) -> - return (mkFlags (commandDefaultFlags command)) - --- ----------------------------------------------------------------------------- --- * Exceptions --- ----------------------------------------------------------------------------- - -data SavedArgsError - = SavedArgsErrorHelp Args - | SavedArgsErrorList Args - | SavedArgsErrorOther Args [String] - deriving (Typeable) - -instance Show SavedArgsError where - show (SavedArgsErrorHelp args) = - "unexpected flag '--help', saved command line was:\n" - ++ intercalate " " args - show (SavedArgsErrorList args) = - "unexpected flag '--list-options', saved command line was:\n" - ++ intercalate " " args - show (SavedArgsErrorOther args errs) = - "saved command line was:\n" - ++ intercalate " " args ++ "\n" - ++ "encountered errors:\n" - ++ intercalate "\n" errs - -instance Exception SavedArgsError diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Security/DNS.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Security/DNS.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Security/DNS.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Security/DNS.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,196 +0,0 @@ -{-# LANGUAGE CPP #-} - -module Distribution.Client.Security.DNS - ( queryBootstrapMirrors - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude -import Network.URI (URI(..), URIAuth(..), parseURI) -import Distribution.Verbosity -import Control.Monad -import Control.DeepSeq (force) -import Control.Exception (SomeException, evaluate, try) -import Distribution.Simple.Utils -import Distribution.Compat.Exception (displayException) - -#if defined(MIN_VERSION_resolv) || defined(MIN_VERSION_windns) -import Network.DNS (queryTXT, Name(..), CharStr(..)) -import qualified Data.ByteString.Char8 as BS.Char8 -#else -import Distribution.Simple.Program.Db - ( emptyProgramDb, addKnownProgram - , configureAllKnownPrograms, lookupProgram ) -import Distribution.Simple.Program - ( simpleProgram - , programInvocation - , getProgramInvocationOutput ) -#endif - --- | Try to lookup RFC1464-encoded mirror urls for a Hackage --- repository url by performing a DNS TXT lookup on the --- @_mirrors.@-prefixed URL hostname. --- --- Example: for @http://hackage.haskell.org/@ --- perform a DNS TXT query for the hostname --- @_mirrors.hackage.haskell.org@ which may look like e.g. --- --- > _mirrors.hackage.haskell.org. 300 IN TXT --- > "0.urlbase=http://hackage.fpcomplete.com/" --- > "1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/" --- --- NB: hackage-security doesn't require DNS lookups being trustworthy, --- as the trust is established via the cryptographically signed TUF --- meta-data that is retrieved from the resolved Hackage repository. --- Moreover, we already have to protect against a compromised --- @hackage.haskell.org@ DNS entry, so an the additional --- @_mirrors.hackage.haskell.org@ DNS entry in the same SOA doesn't --- constitute a significant new attack vector anyway. --- -queryBootstrapMirrors :: Verbosity -> URI -> IO [URI] - -#if defined(MIN_VERSION_resolv) || defined(MIN_VERSION_windns) --- use @resolv@ package for performing DNS queries -queryBootstrapMirrors verbosity repoUri - | Just auth <- uriAuthority repoUri = do - let mirrorsDnsName = Name (BS.Char8.pack ("_mirrors." ++ uriRegName auth)) - - mirrors' <- try $ do - txts <- queryTXT mirrorsDnsName - evaluate (force $ extractMirrors (map snd txts)) - - mirrors <- case mirrors' of - Left e -> do - warn verbosity ("Caught exception during _mirrors lookup:"++ - displayException (e :: SomeException)) - return [] - Right v -> return v - - if null mirrors - then warn verbosity ("No mirrors found for " ++ show repoUri) - else do info verbosity ("located " ++ show (length mirrors) ++ - " mirrors for " ++ show repoUri ++ " :") - forM_ mirrors $ \url -> info verbosity ("- " ++ show url) - - return mirrors - - | otherwise = return [] - --- | Extract list of mirrors from 'queryTXT' result -extractMirrors :: [[CharStr]] -> [URI] -extractMirrors txtChunks = mapMaybe (parseURI . snd) . sort $ vals - where - vals = [ (kn,v) | CharStr e <- concat txtChunks - , Just (k,v) <- [splitRfc1464 (BS.Char8.unpack e)] - , Just kn <- [isUrlBase k] - ] - ----------------------------------------------------------------------------- -#else /* !defined(MIN_VERSION_resolv) */ --- use external method via @nslookup@ -queryBootstrapMirrors verbosity repoUri - | Just auth <- uriAuthority repoUri = do - progdb <- configureAllKnownPrograms verbosity $ - addKnownProgram nslookupProg emptyProgramDb - - case lookupProgram nslookupProg progdb of - Nothing -> do - warn verbosity "'nslookup' tool missing - can't locate mirrors" - return [] - - Just nslookup -> do - let mirrorsDnsName = "_mirrors." ++ uriRegName auth - - mirrors' <- try $ do - out <- getProgramInvocationOutput verbosity $ - programInvocation nslookup ["-query=TXT", mirrorsDnsName] - evaluate (force $ extractMirrors mirrorsDnsName out) - - mirrors <- case mirrors' of - Left e -> do - warn verbosity ("Caught exception during _mirrors lookup:"++ - displayException (e :: SomeException)) - return [] - Right v -> return v - - if null mirrors - then warn verbosity ("No mirrors found for " ++ show repoUri) - else do info verbosity ("located " ++ show (length mirrors) ++ - " mirrors for " ++ show repoUri ++ " :") - forM_ mirrors $ \url -> info verbosity ("- " ++ show url) - - return mirrors - - | otherwise = return [] - where - nslookupProg = simpleProgram "nslookup" - --- | Extract list of mirrors from @nslookup -query=TXT@ output. -extractMirrors :: String -> String -> [URI] -extractMirrors hostname s0 = mapMaybe (parseURI . snd) . sort $ vals - where - vals = [ (kn,v) | (h,ents) <- fromMaybe [] $ parseNsLookupTxt s0 - , h == hostname - , e <- ents - , Just (k,v) <- [splitRfc1464 e] - , Just kn <- [isUrlBase k] - ] - --- | Parse output of @nslookup -query=TXT $HOSTNAME@ tolerantly -parseNsLookupTxt :: String -> Maybe [(String,[String])] -parseNsLookupTxt = go0 [] [] - where - -- approximate grammar: - -- := { } - -- ( starts at begin of line, but may span multiple lines) - -- := ^ TAB "text =" { } - -- := string enclosed by '"'s ('\' and '"' are \-escaped) - - -- scan for ^ "text =" - go0 [] _ [] = Nothing - go0 res _ [] = Just (reverse res) - go0 res _ ('\n':xs) = go0 res [] xs - go0 res lw ('\t':'t':'e':'x':'t':' ':'=':xs) = go1 res (reverse lw) [] (dropWhile isSpace xs) - go0 res lw (x:xs) = go0 res (x:lw) xs - - -- collect at least one - go1 res lw qs ('"':xs) = case qstr "" xs of - Just (s, xs') -> go1 res lw (s:qs) (dropWhile isSpace xs') - Nothing -> Nothing -- bad quoting - go1 _ _ [] _ = Nothing -- missing qstring - go1 res lw qs xs = go0 ((lw,reverse qs):res) [] xs - - qstr _ ('\n':_) = Nothing -- We don't support unquoted LFs - qstr acc ('\\':'\\':cs) = qstr ('\\':acc) cs - qstr acc ('\\':'"':cs) = qstr ('"':acc) cs - qstr acc ('"':cs) = Just (reverse acc, cs) - qstr acc (c:cs) = qstr (c:acc) cs - qstr _ [] = Nothing - -#endif ----------------------------------------------------------------------------- - --- | Helper used by 'extractMirrors' for extracting @urlbase@ keys from Rfc1464-encoded data -isUrlBase :: String -> Maybe Int -isUrlBase s - | ".urlbase" `isSuffixOf` s, not (null ns), all isDigit ns = readMaybe ns - | otherwise = Nothing - where - ns = take (length s - 8) s - --- | Split a TXT string into key and value according to RFC1464. --- Returns 'Nothing' if parsing fails. -splitRfc1464 :: String -> Maybe (String,String) -splitRfc1464 = go "" - where - go _ [] = Nothing - go acc ('`':c:cs) = go (c:acc) cs - go acc ('=':cs) = go2 (reverse acc) "" cs - go acc (c:cs) - | isSpace c = go acc cs - | otherwise = go (c:acc) cs - - go2 k acc [] = Just (k,reverse acc) - go2 _ _ ['`'] = Nothing - go2 k acc ('`':c:cs) = go2 k (c:acc) cs - go2 k acc (c:cs) = go2 k (c:acc) cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Security/HTTP.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Security/HTTP.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Security/HTTP.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Security/HTTP.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,174 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleContexts #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE StandaloneDeriving #-} --- | Implementation of 'HttpLib' using cabal-install's own 'HttpTransport' -module Distribution.Client.Security.HTTP (HttpLib, transportAdapter) where - --- stdlibs -import Control.Exception - ( Exception(..), IOException ) -import Data.List - ( intercalate ) -import Data.Typeable - ( Typeable ) -import System.Directory - ( getTemporaryDirectory ) -import Network.URI - ( URI ) -import qualified Data.ByteString.Lazy as BS.L -import qualified Network.HTTP as HTTP - --- Cabal/cabal-install -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Client.HttpUtils - ( HttpTransport(..), HttpCode ) -import Distribution.Client.Utils - ( withTempFileName ) - --- hackage-security -import Hackage.Security.Client -import Hackage.Security.Client.Repository.HttpLib -import Hackage.Security.Util.Checked -import Hackage.Security.Util.Pretty -import qualified Hackage.Security.Util.Lens as Lens - -{------------------------------------------------------------------------------- - 'HttpLib' implementation --------------------------------------------------------------------------------} - --- | Translate from hackage-security's 'HttpLib' to cabal-install's 'HttpTransport' --- --- NOTE: The match between these two APIs is currently not perfect: --- --- * We don't get any response headers back from the 'HttpTransport', so we --- don't know if the server supports range requests. For now we optimistically --- assume that it does. --- * The 'HttpTransport' wants to know where to place the resulting file, --- whereas the 'HttpLib' expects an 'IO' action which streams the download; --- the security library then makes sure that the file gets written to a --- location which is suitable (in particular, to a temporary file in the --- directory where the file needs to end up, so that it can "finalize" the --- file simply by doing 'renameFile'). Right now we write the file to a --- temporary file in the system temp directory here and then read it again --- to pass it to the security library; this is a problem for two reasons: it --- is a source of inefficiency; and it means that the security library cannot --- insist on a minimum download rate (potential security attack). --- Fixing it however would require changing the 'HttpTransport'. -transportAdapter :: Verbosity -> IO HttpTransport -> HttpLib -transportAdapter verbosity getTransport = HttpLib{ - httpGet = \headers uri callback -> do - transport <- getTransport - get verbosity transport headers uri callback - , httpGetRange = \headers uri range callback -> do - transport <- getTransport - getRange verbosity transport headers uri range callback - } - -get :: Throws SomeRemoteError - => Verbosity - -> HttpTransport - -> [HttpRequestHeader] -> URI - -> ([HttpResponseHeader] -> BodyReader -> IO a) - -> IO a -get verbosity transport reqHeaders uri callback = wrapCustomEx $ do - get' verbosity transport reqHeaders uri Nothing $ \code respHeaders br -> - case code of - 200 -> callback respHeaders br - _ -> throwChecked $ UnexpectedResponse uri code - -getRange :: Throws SomeRemoteError - => Verbosity - -> HttpTransport - -> [HttpRequestHeader] -> URI -> (Int, Int) - -> (HttpStatus -> [HttpResponseHeader] -> BodyReader -> IO a) - -> IO a -getRange verbosity transport reqHeaders uri range callback = wrapCustomEx $ do - get' verbosity transport reqHeaders uri (Just range) $ \code respHeaders br -> - case code of - 200 -> callback HttpStatus200OK respHeaders br - 206 -> callback HttpStatus206PartialContent respHeaders br - _ -> throwChecked $ UnexpectedResponse uri code - --- | Internal generalization of 'get' and 'getRange' -get' :: Verbosity - -> HttpTransport - -> [HttpRequestHeader] -> URI -> Maybe (Int, Int) - -> (HttpCode -> [HttpResponseHeader] -> BodyReader -> IO a) - -> IO a -get' verbosity transport reqHeaders uri mRange callback = do - tempDir <- getTemporaryDirectory - withTempFileName tempDir "transportAdapterGet" $ \temp -> do - (code, _etag) <- getHttp transport verbosity uri Nothing temp reqHeaders' - br <- bodyReaderFromBS =<< BS.L.readFile temp - callback code [HttpResponseAcceptRangesBytes] br - where - reqHeaders' = mkReqHeaders reqHeaders mRange - -{------------------------------------------------------------------------------- - Request headers --------------------------------------------------------------------------------} - -mkRangeHeader :: Int -> Int -> HTTP.Header -mkRangeHeader from to = HTTP.Header HTTP.HdrRange rangeHeader - where - -- Content-Range header uses inclusive rather than exclusive bounds - -- See - rangeHeader = "bytes=" ++ show from ++ "-" ++ show (to - 1) - -mkReqHeaders :: [HttpRequestHeader] -> Maybe (Int, Int) -> [HTTP.Header] -mkReqHeaders reqHeaders mRange = concat [ - tr [] reqHeaders - , [mkRangeHeader fr to | Just (fr, to) <- [mRange]] - ] - where - tr :: [(HTTP.HeaderName, [String])] -> [HttpRequestHeader] -> [HTTP.Header] - tr acc [] = - concatMap finalize acc - tr acc (HttpRequestMaxAge0:os) = - tr (insert HTTP.HdrCacheControl ["max-age=0"] acc) os - tr acc (HttpRequestNoTransform:os) = - tr (insert HTTP.HdrCacheControl ["no-transform"] acc) os - - -- Some headers are comma-separated, others need multiple headers for - -- multiple options. - -- - -- TODO: Right we we just comma-separate all of them. - finalize :: (HTTP.HeaderName, [String]) -> [HTTP.Header] - finalize (name, strs) = [HTTP.Header name (intercalate ", " (reverse strs))] - - insert :: Eq a => a -> [b] -> [(a, [b])] -> [(a, [b])] - insert x y = Lens.modify (Lens.lookupM x) (++ y) - -{------------------------------------------------------------------------------- - Custom exceptions --------------------------------------------------------------------------------} - -data UnexpectedResponse = UnexpectedResponse URI Int - deriving (Typeable) - -instance Pretty UnexpectedResponse where - pretty (UnexpectedResponse uri code) = "Unexpected response " ++ show code - ++ "for " ++ show uri - -#if MIN_VERSION_base(4,8,0) -deriving instance Show UnexpectedResponse -instance Exception UnexpectedResponse where displayException = pretty -#else -instance Show UnexpectedResponse where show = pretty -instance Exception UnexpectedResponse -#endif - -wrapCustomEx :: ( ( Throws UnexpectedResponse - , Throws IOException - ) => IO a) - -> (Throws SomeRemoteError => IO a) -wrapCustomEx act = handleChecked (\(ex :: UnexpectedResponse) -> go ex) - $ handleChecked (\(ex :: IOException) -> go ex) - $ act - where - go ex = throwChecked (SomeRemoteError ex) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Setup.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2845 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Setup --- Copyright : (c) David Himmelstrup 2005 --- License : BSD-like --- --- Maintainer : lemmih@gmail.com --- Stability : provisional --- Portability : portable --- --- ------------------------------------------------------------------------------ -module Distribution.Client.Setup - ( globalCommand, GlobalFlags(..), defaultGlobalFlags - , RepoContext(..), withRepoContext - , configureCommand, ConfigFlags(..), filterConfigureFlags - , configPackageDB', configCompilerAux' - , configureExCommand, ConfigExFlags(..), defaultConfigExFlags - , buildCommand, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) - , replCommand, testCommand, benchmarkCommand - , configureExOptions, reconfigureCommand - , installCommand, InstallFlags(..), installOptions, defaultInstallFlags - , filterHaddockArgs, filterHaddockFlags - , defaultSolver, defaultMaxBackjumps - , listCommand, ListFlags(..) - , updateCommand, UpdateFlags(..), defaultUpdateFlags - , upgradeCommand - , uninstallCommand - , infoCommand, InfoFlags(..) - , fetchCommand, FetchFlags(..) - , freezeCommand, FreezeFlags(..) - , genBoundsCommand - , outdatedCommand, OutdatedFlags(..), IgnoreMajorVersionBumps(..) - , getCommand, unpackCommand, GetFlags(..) - , checkCommand - , formatCommand - , uploadCommand, UploadFlags(..), IsCandidate(..) - , reportCommand, ReportFlags(..) - , runCommand - , initCommand, IT.InitFlags(..) - , sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) - , win32SelfUpgradeCommand, Win32SelfUpgradeFlags(..) - , actAsSetupCommand, ActAsSetupFlags(..) - , sandboxCommand, defaultSandboxLocation, SandboxFlags(..) - , execCommand, ExecFlags(..), defaultExecFlags - , userConfigCommand, UserConfigFlags(..) - , manpageCommand - , haddockCommand - , cleanCommand - , doctestCommand - , copyCommand - , registerCommand - - , parsePackageArgs - , liftOptions - --TODO: stop exporting these: - , showRepo - , parseRepo - , readRepo - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude hiding (get) - -import Distribution.Client.Types - ( Username(..), Password(..), RemoteRepo(..) - , AllowNewer(..), AllowOlder(..), RelaxDeps(..) - ) -import Distribution.Client.BuildReports.Types - ( ReportLevel(..) ) -import Distribution.Client.Dependency.Types - ( PreSolver(..) ) -import Distribution.Client.IndexUtils.Timestamp - ( IndexState(..) ) -import qualified Distribution.Client.Init.Types as IT - ( InitFlags(..), PackageType(..) ) -import Distribution.Client.Targets - ( UserConstraint, readUserConstraint ) -import Distribution.Utils.NubList - ( NubList, toNubList, fromNubList) - -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.Settings - -import Distribution.Simple.Compiler ( Compiler, PackageDB, PackageDBStack ) -import Distribution.Simple.Program (ProgramDb, defaultProgramDb) -import Distribution.Simple.Command hiding (boolOpt, boolOpt') -import qualified Distribution.Simple.Command as Command -import Distribution.Simple.Configure - ( configCompilerAuxEx, interpretPackageDbFlags, computeEffectiveProfiling ) -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Setup - ( ConfigFlags(..), BuildFlags(..), ReplFlags - , TestFlags(..), BenchmarkFlags(..) - , SDistFlags(..), HaddockFlags(..) - , CleanFlags(..), DoctestFlags(..) - , CopyFlags(..), RegisterFlags(..) - , readPackageDbList, showPackageDbList - , Flag(..), toFlag, flagToMaybe, flagToList, maybeToFlag - , BooleanFlag(..), optionVerbosity - , boolOpt, boolOpt', trueArg, falseArg - , optionNumJobs ) -import Distribution.Simple.InstallDirs - ( PathTemplate, InstallDirs(..) - , toPathTemplate, fromPathTemplate, combinePathTemplate ) -import Distribution.Version - ( Version, mkVersion, nullVersion, anyVersion, thisVersion ) -import Distribution.Package - ( PackageIdentifier, PackageName, packageName, packageVersion ) -import Distribution.Types.Dependency -import Distribution.PackageDescription - ( BuildType(..), RepoKind(..) ) -import Distribution.System ( Platform ) -import Distribution.Text - ( Text(..), display ) -import Distribution.ReadE - ( ReadE(..), readP_to_E, succeedReadE ) -import qualified Distribution.Compat.ReadP as Parse - ( ReadP, char, munch1, pfail, sepBy1, (+++) ) -import Distribution.ParseUtils - ( readPToMaybe ) -import Distribution.Verbosity - ( Verbosity, lessVerbose, normal, verboseNoFlags, verboseNoTimestamp ) -import Distribution.Simple.Utils - ( wrapText, wrapLine ) -import Distribution.Client.GlobalFlags - ( GlobalFlags(..), defaultGlobalFlags - , RepoContext(..), withRepoContext - ) - -import Data.List - ( deleteFirstsBy ) -import System.FilePath - ( () ) -import Network.URI - ( parseAbsoluteURI, uriToString ) - -globalCommand :: [Command action] -> CommandUI GlobalFlags -globalCommand commands = CommandUI { - commandName = "", - commandSynopsis = - "Command line interface to the Haskell Cabal infrastructure.", - commandUsage = \pname -> - "See http://www.haskell.org/cabal/ for more information.\n" - ++ "\n" - ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n", - commandDescription = Just $ \pname -> - let - commands' = commands ++ [commandAddAction helpCommandUI undefined] - cmdDescs = getNormalCommandDescriptions commands' - -- if new commands are added, we want them to appear even if they - -- are not included in the custom listing below. Thus, we calculate - -- the `otherCmds` list and append it under the `other` category. - -- Alternatively, a new testcase could be added that ensures that - -- the set of commands listed here is equal to the set of commands - -- that are actually available. - otherCmds = deleteFirstsBy (==) (map fst cmdDescs) - [ "help" - , "update" - , "install" - , "fetch" - , "list" - , "info" - , "user-config" - , "get" - , "init" - , "configure" - , "reconfigure" - , "build" - , "clean" - , "run" - , "repl" - , "test" - , "bench" - , "check" - , "sdist" - , "upload" - , "report" - , "freeze" - , "gen-bounds" - , "outdated" - , "doctest" - , "haddock" - , "hscolour" - , "copy" - , "register" - , "sandbox" - , "exec" - , "new-build" - , "new-configure" - , "new-repl" - , "new-freeze" - , "new-run" - , "new-test" - , "new-bench" - , "new-haddock" - , "new-exec" - , "new-update" - , "new-install" - , "new-clean" - , "new-sdist" - -- v1 commands, stateful style - , "v1-build" - , "v1-configure" - , "v1-repl" - , "v1-freeze" - , "v1-run" - , "v1-test" - , "v1-bench" - , "v1-haddock" - , "v1-exec" - , "v1-update" - , "v1-install" - , "v1-clean" - , "v1-sdist" - , "v1-doctest" - , "v1-copy" - , "v1-register" - , "v1-reconfigure" - , "v1-sandbox" - -- v2 commands, nix-style - , "v2-build" - , "v2-configure" - , "v2-repl" - , "v2-freeze" - , "v2-run" - , "v2-test" - , "v2-bench" - , "v2-haddock" - , "v2-exec" - , "v2-update" - , "v2-install" - , "v2-clean" - , "v2-sdist" - ] - maxlen = maximum $ [length name | (name, _) <- cmdDescs] - align str = str ++ replicate (maxlen - length str) ' ' - startGroup n = " ["++n++"]" - par = "" - addCmd n = case lookup n cmdDescs of - Nothing -> "" - Just d -> " " ++ align n ++ " " ++ d - addCmdCustom n d = case lookup n cmdDescs of -- make sure that the - -- command still exists. - Nothing -> "" - Just _ -> " " ++ align n ++ " " ++ d - in - "Commands:\n" - ++ unlines ( - [ startGroup "global" - , addCmd "update" - , addCmd "install" - , par - , addCmd "help" - , addCmd "info" - , addCmd "list" - , addCmd "fetch" - , addCmd "user-config" - , par - , startGroup "package" - , addCmd "get" - , addCmd "init" - , par - , addCmd "configure" - , addCmd "build" - , addCmd "clean" - , par - , addCmd "run" - , addCmd "repl" - , addCmd "test" - , addCmd "bench" - , par - , addCmd "check" - , addCmd "sdist" - , addCmd "upload" - , addCmd "report" - , par - , addCmd "freeze" - , addCmd "gen-bounds" - , addCmd "outdated" - , addCmd "doctest" - , addCmd "haddock" - , addCmd "hscolour" - , addCmd "copy" - , addCmd "register" - , addCmd "reconfigure" - , par - , startGroup "sandbox" - , addCmd "sandbox" - , addCmd "exec" - , addCmdCustom "repl" "Open interpreter with access to sandbox packages." - , par - , startGroup "new-style projects (beta)" - , addCmd "new-build" - , addCmd "new-configure" - , addCmd "new-repl" - , addCmd "new-run" - , addCmd "new-test" - , addCmd "new-bench" - , addCmd "new-freeze" - , addCmd "new-haddock" - , addCmd "new-exec" - , addCmd "new-update" - , addCmd "new-install" - , addCmd "new-clean" - , addCmd "new-sdist" - , par - , startGroup "new-style projects (forwards-compatible aliases)" - , addCmd "v2-build" - , addCmd "v2-configure" - , addCmd "v2-repl" - , addCmd "v2-run" - , addCmd "v2-test" - , addCmd "v2-bench" - , addCmd "v2-freeze" - , addCmd "v2-haddock" - , addCmd "v2-exec" - , addCmd "v2-update" - , addCmd "v2-install" - , addCmd "v2-clean" - , addCmd "v2-sdist" - , par - , startGroup "legacy command aliases" - , addCmd "v1-build" - , addCmd "v1-configure" - , addCmd "v1-repl" - , addCmd "v1-run" - , addCmd "v1-test" - , addCmd "v1-bench" - , addCmd "v1-freeze" - , addCmd "v1-haddock" - , addCmd "v1-exec" - , addCmd "v1-update" - , addCmd "v1-install" - , addCmd "v1-clean" - , addCmd "v1-sdist" - , addCmd "v1-doctest" - , addCmd "v1-copy" - , addCmd "v1-register" - , addCmd "v1-reconfigure" - , addCmd "v1-sandbox" - ] ++ if null otherCmds then [] else par - :startGroup "other" - :[addCmd n | n <- otherCmds]) - ++ "\n" - ++ "For more information about a command use:\n" - ++ " " ++ pname ++ " COMMAND --help\n" - ++ "or " ++ pname ++ " help COMMAND\n" - ++ "\n" - ++ "To install Cabal packages from hackage use:\n" - ++ " " ++ pname ++ " install foo [--dry-run]\n" - ++ "\n" - ++ "Occasionally you need to update the list of available packages:\n" - ++ " " ++ pname ++ " update\n", - commandNotes = Nothing, - commandDefaultFlags = mempty, - commandOptions = args - } - where - args :: ShowOrParseArgs -> [OptionField GlobalFlags] - args ShowArgs = argsShown - args ParseArgs = argsShown ++ argsNotShown - - -- arguments we want to show in the help - argsShown :: [OptionField GlobalFlags] - argsShown = [ - option ['V'] ["version"] - "Print version information" - globalVersion (\v flags -> flags { globalVersion = v }) - trueArg - - ,option [] ["numeric-version"] - "Print just the version number" - globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) - trueArg - - ,option [] ["config-file"] - "Set an alternate location for the config file" - globalConfigFile (\v flags -> flags { globalConfigFile = v }) - (reqArgFlag "FILE") - - ,option [] ["sandbox-config-file"] - "Set an alternate location for the sandbox config file (default: './cabal.sandbox.config')" - globalSandboxConfigFile (\v flags -> flags { globalSandboxConfigFile = v }) - (reqArgFlag "FILE") - - ,option [] ["default-user-config"] - "Set a location for a cabal.config file for projects without their own cabal.config freeze file." - globalConstraintsFile (\v flags -> flags {globalConstraintsFile = v}) - (reqArgFlag "FILE") - - ,option [] ["require-sandbox"] - "requiring the presence of a sandbox for sandbox-aware commands" - globalRequireSandbox (\v flags -> flags { globalRequireSandbox = v }) - (boolOpt' ([], ["require-sandbox"]) ([], ["no-require-sandbox"])) - - ,option [] ["ignore-sandbox"] - "Ignore any existing sandbox" - globalIgnoreSandbox (\v flags -> flags { globalIgnoreSandbox = v }) - trueArg - - ,option [] ["ignore-expiry"] - "Ignore expiry dates on signed metadata (use only in exceptional circumstances)" - globalIgnoreExpiry (\v flags -> flags { globalIgnoreExpiry = v }) - trueArg - - ,option [] ["http-transport"] - "Set a transport for http(s) requests. Accepts 'curl', 'wget', 'powershell', and 'plain-http'. (default: 'curl')" - globalHttpTransport (\v flags -> flags { globalHttpTransport = v }) - (reqArgFlag "HttpTransport") - ,option [] ["nix"] - "Nix integration: run commands through nix-shell if a 'shell.nix' file exists" - globalNix (\v flags -> flags { globalNix = v }) - (boolOpt [] []) - ] - - -- arguments we don't want shown in the help - argsNotShown :: [OptionField GlobalFlags] - argsNotShown = [ - option [] ["remote-repo"] - "The name and url for a remote repository" - globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v }) - (reqArg' "NAME:URL" (toNubList . maybeToList . readRepo) (map showRepo . fromNubList)) - - ,option [] ["remote-repo-cache"] - "The location where downloads from all remote repos are cached" - globalCacheDir (\v flags -> flags { globalCacheDir = v }) - (reqArgFlag "DIR") - - ,option [] ["local-repo"] - "The location of a local repository" - globalLocalRepos (\v flags -> flags { globalLocalRepos = v }) - (reqArg' "DIR" (\x -> toNubList [x]) fromNubList) - - ,option [] ["logs-dir", "logsdir"] - "The location to put log files" - globalLogsDir (\v flags -> flags { globalLogsDir = v }) - (reqArgFlag "DIR") - - ,option [] ["world-file"] - "The location of the world file" - globalWorldFile (\v flags -> flags { globalWorldFile = v }) - (reqArgFlag "FILE") - - ,option [] ["store-dir", "storedir"] - "The location of the nix-local-build store" - globalStoreDir (\v flags -> flags { globalStoreDir = v }) - (reqArgFlag "DIR") - ] - --- ------------------------------------------------------------ --- * Config flags --- ------------------------------------------------------------ - -configureCommand :: CommandUI ConfigFlags -configureCommand = c - { commandName = "configure" - , commandDefaultFlags = mempty - , commandDescription = Just $ \_ -> wrapText $ - "Configure how the package is built by setting " - ++ "package (and other) flags.\n" - ++ "\n" - ++ "The configuration affects several other commands, " - ++ "including v1-build, v1-test, v1-bench, v1-run, v1-repl.\n" - , commandUsage = \pname -> - "Usage: " ++ pname ++ " v1-configure [FLAGS]\n" - , commandNotes = Just $ \pname -> - (Cabal.programFlagsDescription defaultProgramDb ++ "\n") - ++ "Examples:\n" - ++ " " ++ pname ++ " v1-configure\n" - ++ " Configure with defaults;\n" - ++ " " ++ pname ++ " v1-configure --enable-tests -fcustomflag\n" - ++ " Configure building package including tests,\n" - ++ " with some package-specific flag.\n" - } - where - c = Cabal.configureCommand defaultProgramDb - -configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] -configureOptions = commandOptions configureCommand - --- | Given some 'ConfigFlags' for the version of Cabal that --- cabal-install was built with, and a target older 'Version' of --- Cabal that we want to pass these flags to, convert the --- flags into a form that will be accepted by the older --- Setup script. Generally speaking, this just means filtering --- out flags that the old Cabal library doesn't understand, but --- in some cases it may also mean "emulating" a feature using --- some more legacy flags. -filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags -filterConfigureFlags flags cabalLibVersion - -- NB: we expect the latest version to be the most common case, - -- so test it first. - | cabalLibVersion >= mkVersion [2,1,0] = flags_latest - -- The naming convention is that flags_version gives flags with - -- all flags *introduced* in version eliminated. - -- It is NOT the latest version of Cabal library that - -- these flags work for; version of introduction is a more - -- natural metric. - | cabalLibVersion < mkVersion [1,3,10] = flags_1_3_10 - | cabalLibVersion < mkVersion [1,10,0] = flags_1_10_0 - | cabalLibVersion < mkVersion [1,12,0] = flags_1_12_0 - | cabalLibVersion < mkVersion [1,14,0] = flags_1_14_0 - | cabalLibVersion < mkVersion [1,18,0] = flags_1_18_0 - | cabalLibVersion < mkVersion [1,19,1] = flags_1_19_1 - | cabalLibVersion < mkVersion [1,19,2] = flags_1_19_2 - | cabalLibVersion < mkVersion [1,21,1] = flags_1_21_1 - | cabalLibVersion < mkVersion [1,22,0] = flags_1_22_0 - | cabalLibVersion < mkVersion [1,23,0] = flags_1_23_0 - | cabalLibVersion < mkVersion [1,25,0] = flags_1_25_0 - | cabalLibVersion < mkVersion [2,1,0] = flags_2_1_0 - | otherwise = flags_latest - where - flags_latest = flags { - -- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'. - configConstraints = [] - } - - flags_2_1_0 = flags_latest { - -- Cabal < 2.1 doesn't know about -v +timestamp modifier - configVerbosity = fmap verboseNoTimestamp (configVerbosity flags_latest) - -- Cabal < 2.1 doesn't know about ---static - , configStaticLib = NoFlag - , configSplitSections = NoFlag - } - - flags_1_25_0 = flags_2_1_0 { - -- Cabal < 1.25.0 doesn't know about --dynlibdir. - configInstallDirs = configInstallDirs_1_25_0, - -- Cabal < 1.25 doesn't have extended verbosity syntax - configVerbosity = fmap verboseNoFlags (configVerbosity flags_2_1_0), - -- Cabal < 1.25 doesn't support --deterministic - configDeterministic = mempty - } - configInstallDirs_1_25_0 = let dirs = configInstallDirs flags in - dirs { dynlibdir = NoFlag - , libexecsubdir = NoFlag - , libexecdir = maybeToFlag $ - combinePathTemplate <$> flagToMaybe (libexecdir dirs) - <*> flagToMaybe (libexecsubdir dirs) - } - -- Cabal < 1.23 doesn't know about '--profiling-detail'. - -- Cabal < 1.23 has a hacked up version of 'enable-profiling' - -- which we shouldn't use. - (tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling flags - flags_1_23_0 = flags_1_25_0 { configProfDetail = NoFlag - , configProfLibDetail = NoFlag - , configIPID = NoFlag - , configProf = NoFlag - , configProfExe = Flag tryExeProfiling - , configProfLib = Flag tryLibProfiling - } - - -- Cabal < 1.22 doesn't know about '--disable-debug-info'. - flags_1_22_0 = flags_1_23_0 { configDebugInfo = NoFlag } - - -- Cabal < 1.21.1 doesn't know about 'disable-relocatable' - -- Cabal < 1.21.1 doesn't know about 'enable-profiling' - -- (but we already dealt with it in flags_1_23_0) - flags_1_21_1 = - flags_1_22_0 { configRelocatable = NoFlag - , configCoverage = NoFlag - , configLibCoverage = configCoverage flags - } - -- Cabal < 1.19.2 doesn't know about '--exact-configuration' and - -- '--enable-library-stripping'. - flags_1_19_2 = flags_1_21_1 { configExactConfiguration = NoFlag - , configStripLibs = NoFlag } - -- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'. - flags_1_19_1 = flags_1_19_2 { configDependencies = [] - , configConstraints = configConstraints flags } - -- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir. - flags_1_18_0 = flags_1_19_1 { configProgramPathExtra = toNubList [] - , configInstallDirs = configInstallDirs_1_18_0} - configInstallDirs_1_18_0 = (configInstallDirs flags_1_19_1) { sysconfdir = NoFlag } - -- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'. - flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag } - -- Cabal < 1.12.0 doesn't know about '--enable/disable-executable-dynamic' - -- and '--enable/disable-library-coverage'. - flags_1_12_0 = flags_1_14_0 { configLibCoverage = NoFlag - , configDynExe = NoFlag } - -- Cabal < 1.10.0 doesn't know about '--disable-tests'. - flags_1_10_0 = flags_1_12_0 { configTests = NoFlag } - -- Cabal < 1.3.10 does not grok the '--constraints' flag. - flags_1_3_10 = flags_1_10_0 { configConstraints = [] } - --- | Get the package database settings from 'ConfigFlags', accounting for --- @--package-db@ and @--user@ flags. -configPackageDB' :: ConfigFlags -> PackageDBStack -configPackageDB' cfg = - interpretPackageDbFlags userInstall (configPackageDBs cfg) - where - userInstall = Cabal.fromFlagOrDefault True (configUserInstall cfg) - --- | Configure the compiler, but reduce verbosity during this step. -configCompilerAux' :: ConfigFlags -> IO (Compiler, Platform, ProgramDb) -configCompilerAux' configFlags = - configCompilerAuxEx configFlags - --FIXME: make configCompilerAux use a sensible verbosity - { configVerbosity = fmap lessVerbose (configVerbosity configFlags) } - --- ------------------------------------------------------------ --- * Config extra flags --- ------------------------------------------------------------ - --- | cabal configure takes some extra flags beyond runghc Setup configure --- -data ConfigExFlags = ConfigExFlags { - configCabalVersion :: Flag Version, - configExConstraints:: [(UserConstraint, ConstraintSource)], - configPreferences :: [Dependency], - configSolver :: Flag PreSolver, - configAllowNewer :: Maybe AllowNewer, - configAllowOlder :: Maybe AllowOlder - } - deriving (Eq, Generic) - -defaultConfigExFlags :: ConfigExFlags -defaultConfigExFlags = mempty { configSolver = Flag defaultSolver } - -configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags) -configureExCommand = configureCommand { - commandDefaultFlags = (mempty, defaultConfigExFlags), - commandOptions = \showOrParseArgs -> - liftOptions fst setFst - (filter ((`notElem` ["constraint", "dependency", "exact-configuration"]) - . optionName) $ configureOptions showOrParseArgs) - ++ liftOptions snd setSnd - (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - -configureExOptions :: ShowOrParseArgs - -> ConstraintSource - -> [OptionField ConfigExFlags] -configureExOptions _showOrParseArgs src = - [ option [] ["cabal-lib-version"] - ("Select which version of the Cabal lib to use to build packages " - ++ "(useful for testing).") - configCabalVersion (\v flags -> flags { configCabalVersion = v }) - (reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++) - (fmap toFlag parse)) - (map display . flagToList)) - , option [] ["constraint"] - "Specify constraints on a package (version, installed/source, flags)" - configExConstraints (\v flags -> flags { configExConstraints = v }) - (reqArg "CONSTRAINT" - ((\x -> [(x, src)]) `fmap` ReadE readUserConstraint) - (map $ display . fst)) - - , option [] ["preference"] - "Specify preferences (soft constraints) on the version of a package" - configPreferences (\v flags -> flags { configPreferences = v }) - (reqArg "CONSTRAINT" - (readP_to_E (const "dependency expected") - (fmap (\x -> [x]) parse)) - (map display)) - - , optionSolver configSolver (\v flags -> flags { configSolver = v }) - - , option [] ["allow-older"] - ("Ignore lower bounds in all dependencies or DEPS") - (fmap unAllowOlder . configAllowOlder) - (\v flags -> flags { configAllowOlder = fmap AllowOlder v}) - (optArg "DEPS" - (readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser) - (Just RelaxDepsAll) relaxDepsPrinter) - - , option [] ["allow-newer"] - ("Ignore upper bounds in all dependencies or DEPS") - (fmap unAllowNewer . configAllowNewer) - (\v flags -> flags { configAllowNewer = fmap AllowNewer v}) - (optArg "DEPS" - (readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser) - (Just RelaxDepsAll) relaxDepsPrinter) - - ] - - -relaxDepsParser :: Parse.ReadP r (Maybe RelaxDeps) -relaxDepsParser = - (Just . RelaxDepsSome) `fmap` Parse.sepBy1 parse (Parse.char ',') - -relaxDepsPrinter :: (Maybe RelaxDeps) -> [Maybe String] -relaxDepsPrinter Nothing = [] -relaxDepsPrinter (Just RelaxDepsAll) = [Nothing] -relaxDepsPrinter (Just (RelaxDepsSome pkgs)) = map (Just . display) $ pkgs - - -instance Monoid ConfigExFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ConfigExFlags where - (<>) = gmappend - -reconfigureCommand :: CommandUI (ConfigFlags, ConfigExFlags) -reconfigureCommand - = configureExCommand - { commandName = "reconfigure" - , commandSynopsis = "Reconfigure the package if necessary." - , commandDescription = Just $ \pname -> wrapText $ - "Run `configure` with the most recently used flags, or append FLAGS " - ++ "to the most recently used configuration. " - ++ "Accepts the same flags as `" ++ pname ++ " v1-configure'. " - ++ "If the package has never been configured, the default flags are " - ++ "used." - , commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " v1-reconfigure\n" - ++ " Configure with the most recently used flags.\n" - ++ " " ++ pname ++ " v1-reconfigure -w PATH\n" - ++ " Reconfigure with the most recently used flags,\n" - ++ " but use the compiler at PATH.\n\n" - , commandUsage = usageAlternatives "v1-reconfigure" [ "[FLAGS]" ] - , commandDefaultFlags = mempty - } - --- ------------------------------------------------------------ --- * Build flags --- ------------------------------------------------------------ - -data SkipAddSourceDepsCheck = - SkipAddSourceDepsCheck | DontSkipAddSourceDepsCheck - deriving Eq - -data BuildExFlags = BuildExFlags { - buildOnly :: Flag SkipAddSourceDepsCheck -} deriving Generic - -buildExOptions :: ShowOrParseArgs -> [OptionField BuildExFlags] -buildExOptions _showOrParseArgs = - option [] ["only"] - "Don't reinstall add-source dependencies (sandbox-only)" - buildOnly (\v flags -> flags { buildOnly = v }) - (noArg (Flag SkipAddSourceDepsCheck)) - - : [] - -buildCommand :: CommandUI (BuildFlags, BuildExFlags) -buildCommand = parent { - commandName = "build", - commandDescription = Just $ \_ -> wrapText $ - "Components encompass executables, tests, and benchmarks.\n" - ++ "\n" - ++ "Affected by configuration options, see `v1-configure`.\n", - commandDefaultFlags = (commandDefaultFlags parent, mempty), - commandUsage = usageAlternatives "v1-build" $ - [ "[FLAGS]", "COMPONENTS [FLAGS]" ], - commandOptions = - \showOrParseArgs -> liftOptions fst setFst - (commandOptions parent showOrParseArgs) - ++ - liftOptions snd setSnd (buildExOptions showOrParseArgs) - , commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " v1-build " - ++ " All the components in the package\n" - ++ " " ++ pname ++ " v1-build foo " - ++ " A component (i.e. lib, exe, test suite)\n\n" - ++ Cabal.programFlagsDescription defaultProgramDb - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - - parent = Cabal.buildCommand defaultProgramDb - -instance Monoid BuildExFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup BuildExFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Repl command --- ------------------------------------------------------------ - -replCommand :: CommandUI (ReplFlags, BuildExFlags) -replCommand = parent { - commandName = "repl", - commandDescription = Just $ \pname -> wrapText $ - "If the current directory contains no package, ignores COMPONENT " - ++ "parameters and opens an interactive interpreter session; if a " - ++ "sandbox is present, its package database will be used.\n" - ++ "\n" - ++ "Otherwise, (re)configures with the given or default flags, and " - ++ "loads the interpreter with the relevant modules. For executables, " - ++ "tests and benchmarks, loads the main module (and its " - ++ "dependencies); for libraries all exposed/other modules.\n" - ++ "\n" - ++ "The default component is the library itself, or the executable " - ++ "if that is the only component.\n" - ++ "\n" - ++ "Support for loading specific modules is planned but not " - ++ "implemented yet. For certain scenarios, `" ++ pname - ++ " v1-exec -- ghci :l Foo` may be used instead. Note that `v1-exec` will " - ++ "not (re)configure and you will have to specify the location of " - ++ "other modules, if required.\n", - commandUsage = \pname -> "Usage: " ++ pname ++ " v1-repl [COMPONENT] [FLAGS]\n", - commandDefaultFlags = (commandDefaultFlags parent, mempty), - commandOptions = - \showOrParseArgs -> liftOptions fst setFst - (commandOptions parent showOrParseArgs) - ++ - liftOptions snd setSnd (buildExOptions showOrParseArgs), - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " v1-repl " - ++ " The first component in the package\n" - ++ " " ++ pname ++ " v1-repl foo " - ++ " A named component (i.e. lib, exe, test suite)\n" - ++ " " ++ pname ++ " v1-repl --ghc-options=\"-lstdc++\"" - ++ " Specifying flags for interpreter\n" - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - - parent = Cabal.replCommand defaultProgramDb - --- ------------------------------------------------------------ --- * Test command --- ------------------------------------------------------------ - -testCommand :: CommandUI (TestFlags, BuildFlags, BuildExFlags) -testCommand = parent { - commandName = "test", - commandDescription = Just $ \pname -> wrapText $ - "If necessary (re)configures with `--enable-tests` flag and builds" - ++ " the test suite.\n" - ++ "\n" - ++ "Remember that the tests' dependencies must be installed if there" - ++ " are additional ones; e.g. with `" ++ pname - ++ " v1-install --only-dependencies --enable-tests`.\n" - ++ "\n" - ++ "By defining UserHooks in a custom Setup.hs, the package can" - ++ " define actions to be executed before and after running tests.\n", - commandUsage = usageAlternatives "v1-test" - [ "[FLAGS]", "TESTCOMPONENTS [FLAGS]" ], - commandDefaultFlags = (commandDefaultFlags parent, - Cabal.defaultBuildFlags, mempty), - commandOptions = - \showOrParseArgs -> liftOptions get1 set1 - (commandOptions parent showOrParseArgs) - ++ - liftOptions get2 set2 - (Cabal.buildOptions progDb showOrParseArgs) - ++ - liftOptions get3 set3 (buildExOptions showOrParseArgs) - } - where - get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) - get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) - get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) - - parent = Cabal.testCommand - progDb = defaultProgramDb - --- ------------------------------------------------------------ --- * Bench command --- ------------------------------------------------------------ - -benchmarkCommand :: CommandUI (BenchmarkFlags, BuildFlags, BuildExFlags) -benchmarkCommand = parent { - commandName = "bench", - commandUsage = usageAlternatives "v1-bench" - [ "[FLAGS]", "BENCHCOMPONENTS [FLAGS]" ], - commandDescription = Just $ \pname -> wrapText $ - "If necessary (re)configures with `--enable-benchmarks` flag and" - ++ " builds the benchmarks.\n" - ++ "\n" - ++ "Remember that the benchmarks' dependencies must be installed if" - ++ " there are additional ones; e.g. with `" ++ pname - ++ " v1-install --only-dependencies --enable-benchmarks`.\n" - ++ "\n" - ++ "By defining UserHooks in a custom Setup.hs, the package can" - ++ " define actions to be executed before and after running" - ++ " benchmarks.\n", - commandDefaultFlags = (commandDefaultFlags parent, - Cabal.defaultBuildFlags, mempty), - commandOptions = - \showOrParseArgs -> liftOptions get1 set1 - (commandOptions parent showOrParseArgs) - ++ - liftOptions get2 set2 - (Cabal.buildOptions progDb showOrParseArgs) - ++ - liftOptions get3 set3 (buildExOptions showOrParseArgs) - } - where - get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) - get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) - get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) - - parent = Cabal.benchmarkCommand - progDb = defaultProgramDb - --- ------------------------------------------------------------ --- * Fetch command --- ------------------------------------------------------------ - -data FetchFlags = FetchFlags { --- fetchOutput :: Flag FilePath, - fetchDeps :: Flag Bool, - fetchDryRun :: Flag Bool, - fetchSolver :: Flag PreSolver, - fetchMaxBackjumps :: Flag Int, - fetchReorderGoals :: Flag ReorderGoals, - fetchCountConflicts :: Flag CountConflicts, - fetchIndependentGoals :: Flag IndependentGoals, - fetchShadowPkgs :: Flag ShadowPkgs, - fetchStrongFlags :: Flag StrongFlags, - fetchAllowBootLibInstalls :: Flag AllowBootLibInstalls, - fetchTests :: Flag Bool, - fetchBenchmarks :: Flag Bool, - fetchVerbosity :: Flag Verbosity - } - -defaultFetchFlags :: FetchFlags -defaultFetchFlags = FetchFlags { --- fetchOutput = mempty, - fetchDeps = toFlag True, - fetchDryRun = toFlag False, - fetchSolver = Flag defaultSolver, - fetchMaxBackjumps = Flag defaultMaxBackjumps, - fetchReorderGoals = Flag (ReorderGoals False), - fetchCountConflicts = Flag (CountConflicts True), - fetchIndependentGoals = Flag (IndependentGoals False), - fetchShadowPkgs = Flag (ShadowPkgs False), - fetchStrongFlags = Flag (StrongFlags False), - fetchAllowBootLibInstalls = Flag (AllowBootLibInstalls False), - fetchTests = toFlag False, - fetchBenchmarks = toFlag False, - fetchVerbosity = toFlag normal - } - -fetchCommand :: CommandUI FetchFlags -fetchCommand = CommandUI { - commandName = "fetch", - commandSynopsis = "Downloads packages for later installation.", - commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES" - ], - commandDescription = Just $ \_ -> - "Note that it currently is not possible to fetch the dependencies for a\n" - ++ "package in the current directory.\n", - commandNotes = Nothing, - commandDefaultFlags = defaultFetchFlags, - commandOptions = \ showOrParseArgs -> [ - optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v }) - --- , option "o" ["output"] --- "Put the package(s) somewhere specific rather than the usual cache." --- fetchOutput (\v flags -> flags { fetchOutput = v }) --- (reqArgFlag "PATH") - - , option [] ["dependencies", "deps"] - "Resolve and fetch dependencies (default)" - fetchDeps (\v flags -> flags { fetchDeps = v }) - trueArg - - , option [] ["no-dependencies", "no-deps"] - "Ignore dependencies" - fetchDeps (\v flags -> flags { fetchDeps = v }) - falseArg - - , option [] ["dry-run"] - "Do not install anything, only print what would be installed." - fetchDryRun (\v flags -> flags { fetchDryRun = v }) - trueArg - - , option "" ["tests"] - "dependency checking and compilation for test suites listed in the package description file." - fetchTests (\v flags -> flags { fetchTests = v }) - (boolOpt [] []) - - , option "" ["benchmarks"] - "dependency checking and compilation for benchmarks listed in the package description file." - fetchBenchmarks (\v flags -> flags { fetchBenchmarks = v }) - (boolOpt [] []) - - ] ++ - - optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) : - optionSolverFlags showOrParseArgs - fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v }) - fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v }) - fetchCountConflicts (\v flags -> flags { fetchCountConflicts = v }) - fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v }) - fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v }) - fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v }) - fetchAllowBootLibInstalls (\v flags -> flags { fetchAllowBootLibInstalls = v }) - - } - --- ------------------------------------------------------------ --- * Freeze command --- ------------------------------------------------------------ - -data FreezeFlags = FreezeFlags { - freezeDryRun :: Flag Bool, - freezeTests :: Flag Bool, - freezeBenchmarks :: Flag Bool, - freezeSolver :: Flag PreSolver, - freezeMaxBackjumps :: Flag Int, - freezeReorderGoals :: Flag ReorderGoals, - freezeCountConflicts :: Flag CountConflicts, - freezeIndependentGoals :: Flag IndependentGoals, - freezeShadowPkgs :: Flag ShadowPkgs, - freezeStrongFlags :: Flag StrongFlags, - freezeAllowBootLibInstalls :: Flag AllowBootLibInstalls, - freezeVerbosity :: Flag Verbosity - } - -defaultFreezeFlags :: FreezeFlags -defaultFreezeFlags = FreezeFlags { - freezeDryRun = toFlag False, - freezeTests = toFlag False, - freezeBenchmarks = toFlag False, - freezeSolver = Flag defaultSolver, - freezeMaxBackjumps = Flag defaultMaxBackjumps, - freezeReorderGoals = Flag (ReorderGoals False), - freezeCountConflicts = Flag (CountConflicts True), - freezeIndependentGoals = Flag (IndependentGoals False), - freezeShadowPkgs = Flag (ShadowPkgs False), - freezeStrongFlags = Flag (StrongFlags False), - freezeAllowBootLibInstalls = Flag (AllowBootLibInstalls False), - freezeVerbosity = toFlag normal - } - -freezeCommand :: CommandUI FreezeFlags -freezeCommand = CommandUI { - commandName = "freeze", - commandSynopsis = "Freeze dependencies.", - commandDescription = Just $ \_ -> wrapText $ - "Calculates a valid set of dependencies and their exact versions. " - ++ "If successful, saves the result to the file `cabal.config`.\n" - ++ "\n" - ++ "The package versions specified in `cabal.config` will be used for " - ++ "any future installs.\n" - ++ "\n" - ++ "An existing `cabal.config` is ignored and overwritten.\n", - commandNotes = Nothing, - commandUsage = usageFlags "freeze", - commandDefaultFlags = defaultFreezeFlags, - commandOptions = \ showOrParseArgs -> [ - optionVerbosity freezeVerbosity - (\v flags -> flags { freezeVerbosity = v }) - - , option [] ["dry-run"] - "Do not freeze anything, only print what would be frozen" - freezeDryRun (\v flags -> flags { freezeDryRun = v }) - trueArg - - , option [] ["tests"] - ("freezing of the dependencies of any tests suites " - ++ "in the package description file.") - freezeTests (\v flags -> flags { freezeTests = v }) - (boolOpt [] []) - - , option [] ["benchmarks"] - ("freezing of the dependencies of any benchmarks suites " - ++ "in the package description file.") - freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v }) - (boolOpt [] []) - - ] ++ - - optionSolver - freezeSolver (\v flags -> flags { freezeSolver = v }): - optionSolverFlags showOrParseArgs - freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v }) - freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v }) - freezeCountConflicts (\v flags -> flags { freezeCountConflicts = v }) - freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v }) - freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v }) - freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v }) - freezeAllowBootLibInstalls (\v flags -> flags { freezeAllowBootLibInstalls = v }) - - } - --- ------------------------------------------------------------ --- * 'gen-bounds' command --- ------------------------------------------------------------ - -genBoundsCommand :: CommandUI FreezeFlags -genBoundsCommand = CommandUI { - commandName = "gen-bounds", - commandSynopsis = "Generate dependency bounds.", - commandDescription = Just $ \_ -> wrapText $ - "Generates bounds for all dependencies that do not currently have them. " - ++ "Generated bounds are printed to stdout. " - ++ "You can then paste them into your .cabal file.\n" - ++ "\n", - commandNotes = Nothing, - commandUsage = usageFlags "gen-bounds", - commandDefaultFlags = defaultFreezeFlags, - commandOptions = \ _ -> [ - optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) - ] - } - --- ------------------------------------------------------------ --- * 'outdated' command --- ------------------------------------------------------------ - -data IgnoreMajorVersionBumps = IgnoreMajorVersionBumpsNone - | IgnoreMajorVersionBumpsAll - | IgnoreMajorVersionBumpsSome [PackageName] - -instance Monoid IgnoreMajorVersionBumps where - mempty = IgnoreMajorVersionBumpsNone - mappend = (<>) - -instance Semigroup IgnoreMajorVersionBumps where - IgnoreMajorVersionBumpsNone <> r = r - l@IgnoreMajorVersionBumpsAll <> _ = l - l@(IgnoreMajorVersionBumpsSome _) <> IgnoreMajorVersionBumpsNone = l - (IgnoreMajorVersionBumpsSome _) <> r@IgnoreMajorVersionBumpsAll = r - (IgnoreMajorVersionBumpsSome a) <> (IgnoreMajorVersionBumpsSome b) = - IgnoreMajorVersionBumpsSome (a ++ b) - -data OutdatedFlags = OutdatedFlags { - outdatedVerbosity :: Flag Verbosity, - outdatedFreezeFile :: Flag Bool, - outdatedNewFreezeFile :: Flag Bool, - outdatedProjectFile :: Flag FilePath, - outdatedSimpleOutput :: Flag Bool, - outdatedExitCode :: Flag Bool, - outdatedQuiet :: Flag Bool, - outdatedIgnore :: [PackageName], - outdatedMinor :: Maybe IgnoreMajorVersionBumps - } - -defaultOutdatedFlags :: OutdatedFlags -defaultOutdatedFlags = OutdatedFlags { - outdatedVerbosity = toFlag normal, - outdatedFreezeFile = mempty, - outdatedNewFreezeFile = mempty, - outdatedProjectFile = mempty, - outdatedSimpleOutput = mempty, - outdatedExitCode = mempty, - outdatedQuiet = mempty, - outdatedIgnore = mempty, - outdatedMinor = mempty - } - -outdatedCommand :: CommandUI OutdatedFlags -outdatedCommand = CommandUI { - commandName = "outdated", - commandSynopsis = "Check for outdated dependencies", - commandDescription = Just $ \_ -> wrapText $ - "Checks for outdated dependencies in the package description file " - ++ "or freeze file", - commandNotes = Nothing, - commandUsage = usageFlags "outdated", - commandDefaultFlags = defaultOutdatedFlags, - commandOptions = \ _ -> [ - optionVerbosity outdatedVerbosity - (\v flags -> flags { outdatedVerbosity = v }) - - ,option [] ["freeze-file", "v1-freeze-file"] - "Act on the freeze file" - outdatedFreezeFile (\v flags -> flags { outdatedFreezeFile = v }) - trueArg - - ,option [] ["new-freeze-file", "v2-freeze-file"] - "Act on the new-style freeze file (default: cabal.project.freeze)" - outdatedNewFreezeFile (\v flags -> flags { outdatedNewFreezeFile = v }) - trueArg - - ,option [] ["project-file"] - "Act on the new-style freeze file named PROJECTFILE.freeze rather than the default cabal.project.freeze" - outdatedProjectFile (\v flags -> flags { outdatedProjectFile = v }) - (reqArgFlag "PROJECTFILE") - - ,option [] ["simple-output"] - "Only print names of outdated dependencies, one per line" - outdatedSimpleOutput (\v flags -> flags { outdatedSimpleOutput = v }) - trueArg - - ,option [] ["exit-code"] - "Exit with non-zero when there are outdated dependencies" - outdatedExitCode (\v flags -> flags { outdatedExitCode = v }) - trueArg - - ,option ['q'] ["quiet"] - "Don't print any output. Implies '--exit-code' and '-v0'" - outdatedQuiet (\v flags -> flags { outdatedQuiet = v }) - trueArg - - ,option [] ["ignore"] - "Packages to ignore" - outdatedIgnore (\v flags -> flags { outdatedIgnore = v }) - (reqArg "PKGS" pkgNameListParser (map display)) - - ,option [] ["minor"] - "Ignore major version bumps for these packages" - outdatedMinor (\v flags -> flags { outdatedMinor = v }) - (optArg "PKGS" ignoreMajorVersionBumpsParser - (Just IgnoreMajorVersionBumpsAll) ignoreMajorVersionBumpsPrinter) - ] - } - where - ignoreMajorVersionBumpsPrinter :: (Maybe IgnoreMajorVersionBumps) - -> [Maybe String] - ignoreMajorVersionBumpsPrinter Nothing = [] - ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsNone)= [] - ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsAll) = [Nothing] - ignoreMajorVersionBumpsPrinter (Just (IgnoreMajorVersionBumpsSome pkgs)) = - map (Just . display) $ pkgs - - ignoreMajorVersionBumpsParser = - (Just . IgnoreMajorVersionBumpsSome) `fmap` pkgNameListParser - - pkgNameListParser = readP_to_E - ("Couldn't parse the list of package names: " ++) - (Parse.sepBy1 parse (Parse.char ',')) - --- ------------------------------------------------------------ --- * Update command --- ------------------------------------------------------------ - -data UpdateFlags - = UpdateFlags { - updateVerbosity :: Flag Verbosity, - updateIndexState :: Flag IndexState - } deriving Generic - -defaultUpdateFlags :: UpdateFlags -defaultUpdateFlags - = UpdateFlags { - updateVerbosity = toFlag normal, - updateIndexState = toFlag IndexStateHead - } - -updateCommand :: CommandUI UpdateFlags -updateCommand = CommandUI { - commandName = "update", - commandSynopsis = "Updates list of known packages.", - commandDescription = Just $ \_ -> - "For all known remote repositories, download the package list.\n", - commandNotes = Just $ \_ -> - relevantConfigValuesText ["remote-repo" - ,"remote-repo-cache" - ,"local-repo"], - commandUsage = usageFlags "v1-update", - commandDefaultFlags = defaultUpdateFlags, - commandOptions = \_ -> [ - optionVerbosity updateVerbosity (\v flags -> flags { updateVerbosity = v }), - option [] ["index-state"] - ("Update the source package index to its state as it existed at a previous time. " ++ - "Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++ - "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').") - updateIndexState (\v flags -> flags { updateIndexState = v }) - (reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++ - "unix-timestamps (e.g. '@1474732068'), " ++ - "a ISO8601 UTC timestamp " ++ - "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'") - (toFlag `fmap` parse)) - (flagToList . fmap display)) - ] - } - --- ------------------------------------------------------------ --- * Other commands --- ------------------------------------------------------------ - -upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -upgradeCommand = configureCommand { - commandName = "upgrade", - commandSynopsis = "(command disabled, use install instead)", - commandDescription = Nothing, - commandUsage = usageFlagsOrPackages "upgrade", - commandDefaultFlags = (mempty, mempty, mempty, mempty), - commandOptions = commandOptions installCommand - } - -cleanCommand :: CommandUI CleanFlags -cleanCommand = Cabal.cleanCommand - { commandUsage = \pname -> - "Usage: " ++ pname ++ " v1-clean [FLAGS]\n" - } - -checkCommand :: CommandUI (Flag Verbosity) -checkCommand = CommandUI { - commandName = "check", - commandSynopsis = "Check the package for common mistakes.", - commandDescription = Just $ \_ -> wrapText $ - "Expects a .cabal package file in the current directory.\n" - ++ "\n" - ++ "The checks correspond to the requirements to packages on Hackage. " - ++ "If no errors and warnings are reported, Hackage will accept this " - ++ "package.\n", - commandNotes = Nothing, - commandUsage = usageFlags "check", - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [optionVerbosity id const] - } - -formatCommand :: CommandUI (Flag Verbosity) -formatCommand = CommandUI { - commandName = "format", - commandSynopsis = "Reformat the .cabal file using the standard style.", - commandDescription = Nothing, - commandNotes = Nothing, - commandUsage = usageAlternatives "format" ["[FILE]"], - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [] - } - -uninstallCommand :: CommandUI (Flag Verbosity) -uninstallCommand = CommandUI { - commandName = "uninstall", - commandSynopsis = "Warn about 'uninstall' not being implemented.", - commandDescription = Nothing, - commandNotes = Nothing, - commandUsage = usageAlternatives "uninstall" ["PACKAGES"], - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [] - } - -manpageCommand :: CommandUI (Flag Verbosity) -manpageCommand = CommandUI { - commandName = "manpage", - commandSynopsis = "Outputs manpage source.", - commandDescription = Just $ \_ -> - "Output manpage source to STDOUT.\n", - commandNotes = Nothing, - commandUsage = usageFlags "manpage", - commandDefaultFlags = toFlag normal, - commandOptions = \_ -> [optionVerbosity id const] - } - -runCommand :: CommandUI (BuildFlags, BuildExFlags) -runCommand = CommandUI { - commandName = "run", - commandSynopsis = "Builds and runs an executable.", - commandDescription = Just $ \pname -> wrapText $ - "Builds and then runs the specified executable. If no executable is " - ++ "specified, but the package contains just one executable, that one " - ++ "is built and executed.\n" - ++ "\n" - ++ "Use `" ++ pname ++ " v1-test --show-details=streaming` to run a " - ++ "test-suite and get its full output.\n", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " v1-run\n" - ++ " Run the only executable in the current package;\n" - ++ " " ++ pname ++ " v1-run foo -- --fooflag\n" - ++ " Works similar to `./foo --fooflag`.\n", - commandUsage = usageAlternatives "v1-run" - ["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"], - commandDefaultFlags = mempty, - commandOptions = - \showOrParseArgs -> liftOptions fst setFst - (commandOptions parent showOrParseArgs) - ++ - liftOptions snd setSnd - (buildExOptions showOrParseArgs) - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - - parent = Cabal.buildCommand defaultProgramDb - --- ------------------------------------------------------------ --- * Report flags --- ------------------------------------------------------------ - -data ReportFlags = ReportFlags { - reportUsername :: Flag Username, - reportPassword :: Flag Password, - reportVerbosity :: Flag Verbosity - } deriving Generic - -defaultReportFlags :: ReportFlags -defaultReportFlags = ReportFlags { - reportUsername = mempty, - reportPassword = mempty, - reportVerbosity = toFlag normal - } - -reportCommand :: CommandUI ReportFlags -reportCommand = CommandUI { - commandName = "report", - commandSynopsis = "Upload build reports to a remote server.", - commandDescription = Nothing, - commandNotes = Just $ \_ -> - "You can store your Hackage login in the ~/.cabal/config file\n", - commandUsage = usageAlternatives "report" ["[FLAGS]"], - commandDefaultFlags = defaultReportFlags, - commandOptions = \_ -> - [optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v }) - - ,option ['u'] ["username"] - "Hackage username." - reportUsername (\v flags -> flags { reportUsername = v }) - (reqArg' "USERNAME" (toFlag . Username) - (flagToList . fmap unUsername)) - - ,option ['p'] ["password"] - "Hackage password." - reportPassword (\v flags -> flags { reportPassword = v }) - (reqArg' "PASSWORD" (toFlag . Password) - (flagToList . fmap unPassword)) - ] - } - -instance Monoid ReportFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ReportFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Get flags --- ------------------------------------------------------------ - -data GetFlags = GetFlags { - getDestDir :: Flag FilePath, - getPristine :: Flag Bool, - getIndexState :: Flag IndexState, - getSourceRepository :: Flag (Maybe RepoKind), - getVerbosity :: Flag Verbosity - } deriving Generic - -defaultGetFlags :: GetFlags -defaultGetFlags = GetFlags { - getDestDir = mempty, - getPristine = mempty, - getIndexState = mempty, - getSourceRepository = mempty, - getVerbosity = toFlag normal - } - -getCommand :: CommandUI GetFlags -getCommand = CommandUI { - commandName = "get", - commandSynopsis = "Download/Extract a package's source code (repository).", - commandDescription = Just $ \_ -> wrapText $ - "Creates a local copy of a package's source code. By default it gets " - ++ "the source\ntarball and unpacks it in a local subdirectory. " - ++ "Alternatively, with -s it will\nget the code from the source " - ++ "repository specified by the package.\n", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " get hlint\n" - ++ " Download the latest stable version of hlint;\n" - ++ " " ++ pname ++ " get lens --source-repository=head\n" - ++ " Download the source repository (i.e. git clone from github).\n", - commandUsage = usagePackages "get", - commandDefaultFlags = defaultGetFlags, - commandOptions = \_ -> [ - optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v }) - - ,option "d" ["destdir"] - "Where to place the package source, defaults to the current directory." - getDestDir (\v flags -> flags { getDestDir = v }) - (reqArgFlag "PATH") - - ,option "s" ["source-repository"] - "Copy the package's source repository (ie git clone, darcs get, etc as appropriate)." - getSourceRepository (\v flags -> flags { getSourceRepository = v }) - (optArg "[head|this|...]" (readP_to_E (const "invalid source-repository") - (fmap (toFlag . Just) parse)) - (Flag Nothing) - (map (fmap show) . flagToList)) - - , option [] ["index-state"] - ("Use source package index state as it existed at a previous time. " ++ - "Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++ - "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD'). " ++ - "This determines which package versions are available as well as " ++ - ".cabal file revision is selected (unless --pristine is used).") - getIndexState (\v flags -> flags { getIndexState = v }) - (reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++ - "unix-timestamps (e.g. '@1474732068'), " ++ - "a ISO8601 UTC timestamp " ++ - "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'") - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option [] ["pristine"] - ("Unpack the original pristine tarball, rather than updating the " - ++ ".cabal file with the latest revision from the package archive.") - getPristine (\v flags -> flags { getPristine = v }) - trueArg - ] - } - --- 'cabal unpack' is a deprecated alias for 'cabal get'. -unpackCommand :: CommandUI GetFlags -unpackCommand = getCommand { - commandName = "unpack", - commandUsage = usagePackages "unpack" - } - -instance Monoid GetFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup GetFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * List flags --- ------------------------------------------------------------ - -data ListFlags = ListFlags { - listInstalled :: Flag Bool, - listSimpleOutput :: Flag Bool, - listVerbosity :: Flag Verbosity, - listPackageDBs :: [Maybe PackageDB] - } deriving Generic - -defaultListFlags :: ListFlags -defaultListFlags = ListFlags { - listInstalled = Flag False, - listSimpleOutput = Flag False, - listVerbosity = toFlag normal, - listPackageDBs = [] - } - -listCommand :: CommandUI ListFlags -listCommand = CommandUI { - commandName = "list", - commandSynopsis = "List packages matching a search string.", - commandDescription = Just $ \_ -> wrapText $ - "List all packages, or all packages matching one of the search" - ++ " strings.\n" - ++ "\n" - ++ "If there is a sandbox in the current directory and " - ++ "config:ignore-sandbox is False, use the sandbox package database. " - ++ "Otherwise, use the package database specified with --package-db. " - ++ "If not specified, use the user package database.\n", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " list pandoc\n" - ++ " Will find pandoc, pandoc-citeproc, pandoc-lens, ...\n", - commandUsage = usageAlternatives "list" [ "[FLAGS]" - , "[FLAGS] STRINGS"], - commandDefaultFlags = defaultListFlags, - commandOptions = \_ -> [ - optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v }) - - , option [] ["installed"] - "Only print installed packages" - listInstalled (\v flags -> flags { listInstalled = v }) - trueArg - - , option [] ["simple-output"] - "Print in a easy-to-parse format" - listSimpleOutput (\v flags -> flags { listSimpleOutput = v }) - trueArg - - , option "" ["package-db"] - ( "Append the given package database to the list of package" - ++ " databases used (to satisfy dependencies and register into)." - ++ " May be a specific file, 'global' or 'user'. The initial list" - ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," - ++ " depending on context. Use 'clear' to reset the list to empty." - ++ " See the user guide for details.") - listPackageDBs (\v flags -> flags { listPackageDBs = v }) - (reqArg' "DB" readPackageDbList showPackageDbList) - - ] - } - -instance Monoid ListFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ListFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Info flags --- ------------------------------------------------------------ - -data InfoFlags = InfoFlags { - infoVerbosity :: Flag Verbosity, - infoPackageDBs :: [Maybe PackageDB] - } deriving Generic - -defaultInfoFlags :: InfoFlags -defaultInfoFlags = InfoFlags { - infoVerbosity = toFlag normal, - infoPackageDBs = [] - } - -infoCommand :: CommandUI InfoFlags -infoCommand = CommandUI { - commandName = "info", - commandSynopsis = "Display detailed information about a particular package.", - commandDescription = Just $ \_ -> wrapText $ - "If there is a sandbox in the current directory and " - ++ "config:ignore-sandbox is False, use the sandbox package database. " - ++ "Otherwise, use the package database specified with --package-db. " - ++ "If not specified, use the user package database.\n", - commandNotes = Nothing, - commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"], - commandDefaultFlags = defaultInfoFlags, - commandOptions = \_ -> [ - optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v }) - - , option "" ["package-db"] - ( "Append the given package database to the list of package" - ++ " databases used (to satisfy dependencies and register into)." - ++ " May be a specific file, 'global' or 'user'. The initial list" - ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," - ++ " depending on context. Use 'clear' to reset the list to empty." - ++ " See the user guide for details.") - infoPackageDBs (\v flags -> flags { infoPackageDBs = v }) - (reqArg' "DB" readPackageDbList showPackageDbList) - - ] - } - -instance Monoid InfoFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup InfoFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Install flags --- ------------------------------------------------------------ - --- | Install takes the same flags as configure along with a few extras. --- -data InstallFlags = InstallFlags { - installDocumentation :: Flag Bool, - installHaddockIndex :: Flag PathTemplate, - installDest :: Flag Cabal.CopyDest, - installDryRun :: Flag Bool, - installMaxBackjumps :: Flag Int, - installReorderGoals :: Flag ReorderGoals, - installCountConflicts :: Flag CountConflicts, - installIndependentGoals :: Flag IndependentGoals, - installShadowPkgs :: Flag ShadowPkgs, - installStrongFlags :: Flag StrongFlags, - installAllowBootLibInstalls :: Flag AllowBootLibInstalls, - installReinstall :: Flag Bool, - installAvoidReinstalls :: Flag AvoidReinstalls, - installOverrideReinstall :: Flag Bool, - installUpgradeDeps :: Flag Bool, - installOnly :: Flag Bool, - installOnlyDeps :: Flag Bool, - installIndexState :: Flag IndexState, - installRootCmd :: Flag String, - installSummaryFile :: NubList PathTemplate, - installLogFile :: Flag PathTemplate, - installBuildReports :: Flag ReportLevel, - installReportPlanningFailure :: Flag Bool, - installSymlinkBinDir :: Flag FilePath, - installPerComponent :: Flag Bool, - installOneShot :: Flag Bool, - installNumJobs :: Flag (Maybe Int), - installKeepGoing :: Flag Bool, - installRunTests :: Flag Bool, - installOfflineMode :: Flag Bool, - -- | The cabal project file name; defaults to @cabal.project@. - -- Th name itself denotes the cabal project file name, but it also - -- is the base of auxiliary project files, such as - -- @cabal.project.local@ and @cabal.project.freeze@ which are also - -- read and written out in some cases. If the path is not found - -- in the current working directory, we will successively probe - -- relative to parent directories until this name is found. - installProjectFileName :: Flag FilePath - } - deriving (Eq, Generic) - -instance Binary InstallFlags - -defaultInstallFlags :: InstallFlags -defaultInstallFlags = InstallFlags { - installDocumentation = Flag False, - installHaddockIndex = Flag docIndexFile, - installDest = Flag Cabal.NoCopyDest, - installDryRun = Flag False, - installMaxBackjumps = Flag defaultMaxBackjumps, - installReorderGoals = Flag (ReorderGoals False), - installCountConflicts = Flag (CountConflicts True), - installIndependentGoals= Flag (IndependentGoals False), - installShadowPkgs = Flag (ShadowPkgs False), - installStrongFlags = Flag (StrongFlags False), - installAllowBootLibInstalls = Flag (AllowBootLibInstalls False), - installReinstall = Flag False, - installAvoidReinstalls = Flag (AvoidReinstalls False), - installOverrideReinstall = Flag False, - installUpgradeDeps = Flag False, - installOnly = Flag False, - installOnlyDeps = Flag False, - installIndexState = mempty, - installRootCmd = mempty, - installSummaryFile = mempty, - installLogFile = mempty, - installBuildReports = Flag NoReports, - installReportPlanningFailure = Flag False, - installSymlinkBinDir = mempty, - installPerComponent = Flag True, - installOneShot = Flag False, - installNumJobs = mempty, - installKeepGoing = Flag False, - installRunTests = mempty, - installOfflineMode = Flag False, - installProjectFileName = mempty - } - where - docIndexFile = toPathTemplate ("$datadir" "doc" - "$arch-$os-$compiler" "index.html") - -defaultMaxBackjumps :: Int -defaultMaxBackjumps = 2000 - -defaultSolver :: PreSolver -defaultSolver = AlwaysModular - -allSolvers :: String -allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [PreSolver])) - -installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) -installCommand = CommandUI { - commandName = "install", - commandSynopsis = "Install packages.", - commandUsage = usageAlternatives "v1-install" [ "[FLAGS]" - , "[FLAGS] PACKAGES" - ], - commandDescription = Just $ \_ -> wrapText $ - "Installs one or more packages. By default, the installed package" - ++ " will be registered in the user's package database or, if a sandbox" - ++ " is present in the current directory, inside the sandbox.\n" - ++ "\n" - ++ "If PACKAGES are specified, downloads and installs those packages." - ++ " Otherwise, install the package in the current directory (and/or its" - ++ " dependencies) (there must be exactly one .cabal file in the current" - ++ " directory).\n" - ++ "\n" - ++ "When using a sandbox, the flags for `v1-install` only affect the" - ++ " current command and have no effect on future commands. (To achieve" - ++ " that, `v1-configure` must be used.)\n" - ++ " In contrast, without a sandbox, the flags to `v1-install` are saved and" - ++ " affect future commands such as `v1-build` and `v1-repl`. See the help for" - ++ " `v1-configure` for a list of commands being affected.\n" - ++ "\n" - ++ "Installed executables will by default (and without a sandbox)" - ++ " be put into `~/.cabal/bin/`." - ++ " If you want installed executable to be available globally, make" - ++ " sure that the PATH environment variable contains that directory.\n" - ++ "When using a sandbox, executables will be put into" - ++ " `$SANDBOX/bin/` (by default: `./.cabal-sandbox/bin/`).\n" - ++ "\n" - ++ "When specifying --bindir, consider also specifying --datadir;" - ++ " this way the sandbox can be deleted and the executable should" - ++ " continue working as long as bindir and datadir are left untouched.", - commandNotes = Just $ \pname -> - ( case commandNotes - $ Cabal.configureCommand defaultProgramDb - of Just desc -> desc pname ++ "\n" - Nothing -> "" - ) - ++ "Examples:\n" - ++ " " ++ pname ++ " v1-install " - ++ " Package in the current directory\n" - ++ " " ++ pname ++ " v1-install foo " - ++ " Package from the hackage server\n" - ++ " " ++ pname ++ " v1-install foo-1.0 " - ++ " Specific version of a package\n" - ++ " " ++ pname ++ " v1-install 'foo < 2' " - ++ " Constrained package version\n" - ++ " " ++ pname ++ " v1-install haddock --bindir=$HOME/hask-bin/ --datadir=$HOME/hask-data/\n" - ++ " " ++ (map (const ' ') pname) - ++ " " - ++ " Change installation destination\n", - commandDefaultFlags = (mempty, mempty, mempty, mempty), - commandOptions = \showOrParseArgs -> - liftOptions get1 set1 - -- Note: [Hidden Flags] - -- hide "constraint", "dependency", and - -- "exact-configuration" from the configure options. - (filter ((`notElem` ["constraint", "dependency" - , "exact-configuration"]) - . optionName) $ - configureOptions showOrParseArgs) - ++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) - ++ liftOptions get3 set3 - -- hide "target-package-db" flag from the - -- install options. - (filter ((`notElem` ["target-package-db"]) - . optionName) $ - installOptions showOrParseArgs) - ++ liftOptions get4 set4 (haddockOptions showOrParseArgs) - } - where - get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d) - get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d) - get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d) - get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d) - -haddockCommand :: CommandUI HaddockFlags -haddockCommand = Cabal.haddockCommand - { commandUsage = usageAlternatives "v1-haddock" $ - [ "[FLAGS]", "COMPONENTS [FLAGS]" ] - } - -filterHaddockArgs :: [String] -> Version -> [String] -filterHaddockArgs args cabalLibVersion - | cabalLibVersion >= mkVersion [2,3,0] = args_latest - | cabalLibVersion < mkVersion [2,3,0] = args_2_3_0 - | otherwise = args_latest - where - args_latest = args - - -- Cabal < 2.3 doesn't know about per-component haddock - args_2_3_0 = [] - -filterHaddockFlags :: HaddockFlags -> Version -> HaddockFlags -filterHaddockFlags flags cabalLibVersion - | cabalLibVersion >= mkVersion [2,3,0] = flags_latest - | cabalLibVersion < mkVersion [2,3,0] = flags_2_3_0 - | otherwise = flags_latest - where - flags_latest = flags - - flags_2_3_0 = flags_latest { - -- Cabal < 2.3 doesn't know about per-component haddock - haddockArgs = [] - } - -haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] -haddockOptions showOrParseArgs - = [ opt { optionName = "haddock-" ++ name, - optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr - | descr <- optionDescr opt] } - | opt <- commandOptions Cabal.haddockCommand showOrParseArgs - , let name = optionName opt - , name `elem` ["hoogle", "html", "html-location" - ,"executables", "tests", "benchmarks", "all", "internal", "css" - ,"hyperlink-source", "quickjump", "hscolour-css" - ,"contents-location", "for-hackage"] - ] - where - fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a - fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w - fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w - fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs] - fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w - -installOptions :: ShowOrParseArgs -> [OptionField InstallFlags] -installOptions showOrParseArgs = - [ option "" ["documentation"] - "building of documentation" - installDocumentation (\v flags -> flags { installDocumentation = v }) - (boolOpt [] []) - - , option [] ["doc-index-file"] - "A central index of haddock API documentation (template cannot use $pkgid)" - installHaddockIndex (\v flags -> flags { installHaddockIndex = v }) - (reqArg' "TEMPLATE" (toFlag.toPathTemplate) - (flagToList . fmap fromPathTemplate)) - - , option [] ["dry-run"] - "Do not install anything, only print what would be installed." - installDryRun (\v flags -> flags { installDryRun = v }) - trueArg - - , option "" ["target-package-db"] - "package database to install into. Required when using ${pkgroot} prefix." - installDest (\v flags -> flags { installDest = v }) - (reqArg "DATABASE" (succeedReadE (Flag . Cabal.CopyToDb)) - (\f -> case f of Flag (Cabal.CopyToDb p) -> [p]; _ -> [])) - ] ++ - - optionSolverFlags showOrParseArgs - installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v }) - installReorderGoals (\v flags -> flags { installReorderGoals = v }) - installCountConflicts (\v flags -> flags { installCountConflicts = v }) - installIndependentGoals (\v flags -> flags { installIndependentGoals = v }) - installShadowPkgs (\v flags -> flags { installShadowPkgs = v }) - installStrongFlags (\v flags -> flags { installStrongFlags = v }) - installAllowBootLibInstalls (\v flags -> flags { installAllowBootLibInstalls = v }) ++ - - [ option [] ["reinstall"] - "Install even if it means installing the same version again." - installReinstall (\v flags -> flags { installReinstall = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["avoid-reinstalls"] - "Do not select versions that would destructively overwrite installed packages." - (fmap asBool . installAvoidReinstalls) - (\v flags -> flags { installAvoidReinstalls = fmap AvoidReinstalls v }) - (yesNoOpt showOrParseArgs) - - , option [] ["force-reinstalls"] - "Reinstall packages even if they will most likely break other installed packages." - installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["upgrade-dependencies"] - "Pick the latest version for all dependencies, rather than trying to pick an installed version." - installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["only-dependencies"] - "Install only the dependencies necessary to build the given packages" - installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["dependencies-only"] - "A synonym for --only-dependencies" - installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["index-state"] - ("Use source package index state as it existed at a previous time. " ++ - "Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++ - "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').") - installIndexState (\v flags -> flags { installIndexState = v }) - (reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++ - "unix-timestamps (e.g. '@1474732068'), " ++ - "a ISO8601 UTC timestamp " ++ - "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'") - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option [] ["root-cmd"] - "(No longer supported, do not use.)" - installRootCmd (\v flags -> flags { installRootCmd = v }) - (reqArg' "COMMAND" toFlag flagToList) - - , option [] ["symlink-bindir"] - "Add symlinks to installed executables into this directory." - installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v }) - (reqArgFlag "DIR") - - , option [] ["build-summary"] - "Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)" - installSummaryFile (\v flags -> flags { installSummaryFile = v }) - (reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList)) - - , option [] ["build-log"] - "Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)" - installLogFile (\v flags -> flags { installLogFile = v }) - (reqArg' "TEMPLATE" (toFlag.toPathTemplate) - (flagToList . fmap fromPathTemplate)) - - , option [] ["remote-build-reporting"] - "Generate build reports to send to a remote server (none, anonymous or detailed)." - installBuildReports (\v flags -> flags { installBuildReports = v }) - (reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', " - ++ "'anonymous' or 'detailed'") - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option [] ["report-planning-failure"] - "Generate build reports when the dependency solver fails. This is used by the Hackage build bot." - installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v }) - trueArg - - , option "" ["per-component"] - "Per-component builds when possible" - installPerComponent (\v flags -> flags { installPerComponent = v }) - (boolOpt [] []) - - , option [] ["one-shot"] - "Do not record the packages in the world file." - installOneShot (\v flags -> flags { installOneShot = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["run-tests"] - "Run package test suites during installation." - installRunTests (\v flags -> flags { installRunTests = v }) - trueArg - - , optionNumJobs - installNumJobs (\v flags -> flags { installNumJobs = v }) - - , option [] ["keep-going"] - "After a build failure, continue to build other unaffected packages." - installKeepGoing (\v flags -> flags { installKeepGoing = v }) - trueArg - - , option [] ["offline"] - "Don't download packages from the Internet." - installOfflineMode (\v flags -> flags { installOfflineMode = v }) - (yesNoOpt showOrParseArgs) - - , option [] ["project-file"] - "Set the name of the cabal.project file to search for in parent directories" - installProjectFileName (\v flags -> flags {installProjectFileName = v}) - (reqArgFlag "FILE") - ] ++ case showOrParseArgs of -- TODO: remove when "cabal install" - -- avoids - ParseArgs -> - [ option [] ["only"] - "Only installs the package in the current directory." - installOnly (\v flags -> flags { installOnly = v }) - trueArg ] - _ -> [] - - -instance Monoid InstallFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup InstallFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Upload flags --- ------------------------------------------------------------ - --- | Is this a candidate package or a package to be published? -data IsCandidate = IsCandidate | IsPublished - deriving Eq - -data UploadFlags = UploadFlags { - uploadCandidate :: Flag IsCandidate, - uploadDoc :: Flag Bool, - uploadUsername :: Flag Username, - uploadPassword :: Flag Password, - uploadPasswordCmd :: Flag [String], - uploadVerbosity :: Flag Verbosity - } deriving Generic - -defaultUploadFlags :: UploadFlags -defaultUploadFlags = UploadFlags { - uploadCandidate = toFlag IsCandidate, - uploadDoc = toFlag False, - uploadUsername = mempty, - uploadPassword = mempty, - uploadPasswordCmd = mempty, - uploadVerbosity = toFlag normal - } - -uploadCommand :: CommandUI UploadFlags -uploadCommand = CommandUI { - commandName = "upload", - commandSynopsis = "Uploads source packages or documentation to Hackage.", - commandDescription = Nothing, - commandNotes = Just $ \_ -> - "You can store your Hackage login in the ~/.cabal/config file\n" - ++ relevantConfigValuesText ["username", "password"], - commandUsage = \pname -> - "Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n", - commandDefaultFlags = defaultUploadFlags, - commandOptions = \_ -> - [optionVerbosity uploadVerbosity - (\v flags -> flags { uploadVerbosity = v }) - - ,option [] ["publish"] - "Publish the package instead of uploading it as a candidate." - uploadCandidate (\v flags -> flags { uploadCandidate = v }) - (noArg (Flag IsPublished)) - - ,option ['d'] ["documentation"] - ("Upload documentation instead of a source package. " - ++ "By default, this uploads documentation for a package candidate. " - ++ "To upload documentation for " - ++ "a published package, combine with --publish.") - uploadDoc (\v flags -> flags { uploadDoc = v }) - trueArg - - ,option ['u'] ["username"] - "Hackage username." - uploadUsername (\v flags -> flags { uploadUsername = v }) - (reqArg' "USERNAME" (toFlag . Username) - (flagToList . fmap unUsername)) - - ,option ['p'] ["password"] - "Hackage password." - uploadPassword (\v flags -> flags { uploadPassword = v }) - (reqArg' "PASSWORD" (toFlag . Password) - (flagToList . fmap unPassword)) - - ,option ['P'] ["password-command"] - "Command to get Hackage password." - uploadPasswordCmd (\v flags -> flags { uploadPasswordCmd = v }) - (reqArg' "PASSWORD" (Flag . words) (fromMaybe [] . flagToMaybe)) - ] - } - -instance Monoid UploadFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup UploadFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Init flags --- ------------------------------------------------------------ - -emptyInitFlags :: IT.InitFlags -emptyInitFlags = mempty - -defaultInitFlags :: IT.InitFlags -defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal } - -initCommand :: CommandUI IT.InitFlags -initCommand = CommandUI { - commandName = "init", - commandSynopsis = "Create a new .cabal package file (interactively).", - commandDescription = Just $ \_ -> wrapText $ - "Cabalise a project by creating a .cabal, Setup.hs, and " - ++ "optionally a LICENSE file.\n" - ++ "\n" - ++ "Calling init with no arguments (recommended) uses an " - ++ "interactive mode, which will try to guess as much as " - ++ "possible and prompt you for the rest. Command-line " - ++ "arguments are provided for scripting purposes. " - ++ "If you don't want interactive mode, be sure to pass " - ++ "the -n flag.\n", - commandNotes = Nothing, - commandUsage = \pname -> - "Usage: " ++ pname ++ " init [FLAGS]\n", - commandDefaultFlags = defaultInitFlags, - commandOptions = \_ -> - [ option ['n'] ["non-interactive"] - "Non-interactive mode." - IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v }) - trueArg - - , option ['q'] ["quiet"] - "Do not generate log messages to stdout." - IT.quiet (\v flags -> flags { IT.quiet = v }) - trueArg - - , option [] ["no-comments"] - "Do not generate explanatory comments in the .cabal file." - IT.noComments (\v flags -> flags { IT.noComments = v }) - trueArg - - , option ['m'] ["minimal"] - "Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments." - IT.minimal (\v flags -> flags { IT.minimal = v }) - trueArg - - , option [] ["overwrite"] - "Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning." - IT.overwrite (\v flags -> flags { IT.overwrite = v }) - trueArg - - , option [] ["package-dir", "packagedir"] - "Root directory of the package (default = current directory)." - IT.packageDir (\v flags -> flags { IT.packageDir = v }) - (reqArgFlag "DIRECTORY") - - , option ['p'] ["package-name"] - "Name of the Cabal package to create." - IT.packageName (\v flags -> flags { IT.packageName = v }) - (reqArg "PACKAGE" (readP_to_E ("Cannot parse package name: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option [] ["version"] - "Initial version of the package." - IT.version (\v flags -> flags { IT.version = v }) - (reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option [] ["cabal-version"] - "Version of the Cabal specification." - IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v }) - (reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal specification version: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option ['l'] ["license"] - "Project license." - IT.license (\v flags -> flags { IT.license = v }) - (reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option ['a'] ["author"] - "Name of the project's author." - IT.author (\v flags -> flags { IT.author = v }) - (reqArgFlag "NAME") - - , option ['e'] ["email"] - "Email address of the maintainer." - IT.email (\v flags -> flags { IT.email = v }) - (reqArgFlag "EMAIL") - - , option ['u'] ["homepage"] - "Project homepage and/or repository." - IT.homepage (\v flags -> flags { IT.homepage = v }) - (reqArgFlag "URL") - - , option ['s'] ["synopsis"] - "Short project synopsis." - IT.synopsis (\v flags -> flags { IT.synopsis = v }) - (reqArgFlag "TEXT") - - , option ['c'] ["category"] - "Project category." - IT.category (\v flags -> flags { IT.category = v }) - (reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s)) - (flagToList . fmap (either id show))) - - , option ['x'] ["extra-source-file"] - "Extra source file to be distributed with tarball." - IT.extraSrc (\v flags -> flags { IT.extraSrc = v }) - (reqArg' "FILE" (Just . (:[])) - (fromMaybe [])) - - , option [] ["is-library"] - "Build a library." - IT.packageType (\v flags -> flags { IT.packageType = v }) - (noArg (Flag IT.Library)) - - , option [] ["is-executable"] - "Build an executable." - IT.packageType - (\v flags -> flags { IT.packageType = v }) - (noArg (Flag IT.Executable)) - - , option [] ["is-libandexe"] - "Build a library and an executable." - IT.packageType - (\v flags -> flags { IT.packageType = v }) - (noArg (Flag IT.LibraryAndExecutable)) - - , option [] ["main-is"] - "Specify the main module." - IT.mainIs - (\v flags -> flags { IT.mainIs = v }) - (reqArgFlag "FILE") - - , option [] ["language"] - "Specify the default language." - IT.language - (\v flags -> flags { IT.language = v }) - (reqArg "LANGUAGE" (readP_to_E ("Cannot parse language: "++) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - - , option ['o'] ["expose-module"] - "Export a module from the package." - IT.exposedModules - (\v flags -> flags { IT.exposedModules = v }) - (reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++) - ((Just . (:[])) `fmap` parse)) - (maybe [] (fmap display))) - - , option [] ["extension"] - "Use a LANGUAGE extension (in the other-extensions field)." - IT.otherExts - (\v flags -> flags { IT.otherExts = v }) - (reqArg "EXTENSION" (readP_to_E ("Cannot parse extension: "++) - ((Just . (:[])) `fmap` parse)) - (maybe [] (fmap display))) - - , option ['d'] ["dependency"] - "Package dependency." - IT.dependencies (\v flags -> flags { IT.dependencies = v }) - (reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++) - ((Just . (:[])) `fmap` parse)) - (maybe [] (fmap display))) - - , option [] ["source-dir", "sourcedir"] - "Directory containing package source." - IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v }) - (reqArg' "DIR" (Just . (:[])) - (fromMaybe [])) - - , option [] ["build-tool"] - "Required external build tool." - IT.buildTools (\v flags -> flags { IT.buildTools = v }) - (reqArg' "TOOL" (Just . (:[])) - (fromMaybe [])) - - , optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v }) - ] - } - --- ------------------------------------------------------------ --- * SDist flags --- ------------------------------------------------------------ - --- | Extra flags to @sdist@ beyond runghc Setup sdist --- -data SDistExFlags = SDistExFlags { - sDistFormat :: Flag ArchiveFormat - } - deriving (Show, Generic) - -data ArchiveFormat = TargzFormat | ZipFormat -- ... - deriving (Show, Eq) - -defaultSDistExFlags :: SDistExFlags -defaultSDistExFlags = SDistExFlags { - sDistFormat = Flag TargzFormat - } - -sdistCommand :: CommandUI (SDistFlags, SDistExFlags) -sdistCommand = Cabal.sdistCommand { - commandUsage = \pname -> - "Usage: " ++ pname ++ " v1-sdist [FLAGS]\n", - commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags), - commandOptions = \showOrParseArgs -> - liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs) - ++ liftOptions snd setSnd sdistExOptions - } - where - setFst a (_,b) = (a,b) - setSnd b (a,_) = (a,b) - - sdistExOptions = - [option [] ["archive-format"] "archive-format" - sDistFormat (\v flags -> flags { sDistFormat = v }) - (choiceOpt - [ (Flag TargzFormat, ([], ["targz"]), - "Produce a '.tar.gz' format archive (default and required for uploading to hackage)") - , (Flag ZipFormat, ([], ["zip"]), - "Produce a '.zip' format archive") - ]) - ] - -instance Monoid SDistExFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup SDistExFlags where - (<>) = gmappend - --- - -doctestCommand :: CommandUI DoctestFlags -doctestCommand = Cabal.doctestCommand - { commandUsage = \pname -> "Usage: " ++ pname ++ " v1-doctest [FLAGS]\n" } - -copyCommand :: CommandUI CopyFlags -copyCommand = Cabal.copyCommand - { commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " v1-copy " - ++ " All the components in the package\n" - ++ " " ++ pname ++ " v1-copy foo " - ++ " A component (i.e. lib, exe, test suite)" - , commandUsage = usageAlternatives "v1-copy" $ - [ "[FLAGS]" - , "COMPONENTS [FLAGS]" - ] - } - -registerCommand :: CommandUI RegisterFlags -registerCommand = Cabal.registerCommand - { commandUsage = \pname -> "Usage: " ++ pname ++ " v1-register [FLAGS]\n" } - --- ------------------------------------------------------------ --- * Win32SelfUpgrade flags --- ------------------------------------------------------------ - -data Win32SelfUpgradeFlags = Win32SelfUpgradeFlags { - win32SelfUpgradeVerbosity :: Flag Verbosity -} deriving Generic - -defaultWin32SelfUpgradeFlags :: Win32SelfUpgradeFlags -defaultWin32SelfUpgradeFlags = Win32SelfUpgradeFlags { - win32SelfUpgradeVerbosity = toFlag normal -} - -win32SelfUpgradeCommand :: CommandUI Win32SelfUpgradeFlags -win32SelfUpgradeCommand = CommandUI { - commandName = "win32selfupgrade", - commandSynopsis = "Self-upgrade the executable on Windows", - commandDescription = Nothing, - commandNotes = Nothing, - commandUsage = \pname -> - "Usage: " ++ pname ++ " win32selfupgrade PID PATH\n", - commandDefaultFlags = defaultWin32SelfUpgradeFlags, - commandOptions = \_ -> - [optionVerbosity win32SelfUpgradeVerbosity - (\v flags -> flags { win32SelfUpgradeVerbosity = v}) - ] -} - -instance Monoid Win32SelfUpgradeFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup Win32SelfUpgradeFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * ActAsSetup flags --- ------------------------------------------------------------ - -data ActAsSetupFlags = ActAsSetupFlags { - actAsSetupBuildType :: Flag BuildType -} deriving Generic - -defaultActAsSetupFlags :: ActAsSetupFlags -defaultActAsSetupFlags = ActAsSetupFlags { - actAsSetupBuildType = toFlag Simple -} - -actAsSetupCommand :: CommandUI ActAsSetupFlags -actAsSetupCommand = CommandUI { - commandName = "act-as-setup", - commandSynopsis = "Run as-if this was a Setup.hs", - commandDescription = Nothing, - commandNotes = Nothing, - commandUsage = \pname -> - "Usage: " ++ pname ++ " act-as-setup\n", - commandDefaultFlags = defaultActAsSetupFlags, - commandOptions = \_ -> - [option "" ["build-type"] - "Use the given build type." - actAsSetupBuildType (\v flags -> flags { actAsSetupBuildType = v }) - (reqArg "BUILD-TYPE" (readP_to_E ("Cannot parse build type: "++) - (fmap toFlag parse)) - (map display . flagToList)) - ] -} - -instance Monoid ActAsSetupFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ActAsSetupFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Sandbox-related flags --- ------------------------------------------------------------ - -data SandboxFlags = SandboxFlags { - sandboxVerbosity :: Flag Verbosity, - sandboxSnapshot :: Flag Bool, -- FIXME: this should be an 'add-source'-only - -- flag. - sandboxLocation :: Flag FilePath -} deriving Generic - -defaultSandboxLocation :: FilePath -defaultSandboxLocation = ".cabal-sandbox" - -defaultSandboxFlags :: SandboxFlags -defaultSandboxFlags = SandboxFlags { - sandboxVerbosity = toFlag normal, - sandboxSnapshot = toFlag False, - sandboxLocation = toFlag defaultSandboxLocation - } - -sandboxCommand :: CommandUI SandboxFlags -sandboxCommand = CommandUI { - commandName = "sandbox", - commandSynopsis = "Create/modify/delete a sandbox.", - commandDescription = Just $ \pname -> concat - [ paragraph $ "Sandboxes are isolated package databases that can be used" - ++ " to prevent dependency conflicts that arise when many different" - ++ " packages are installed in the same database (i.e. the user's" - ++ " database in the home directory)." - , paragraph $ "A sandbox in the current directory (created by" - ++ " `v1-sandbox init`) will be used instead of the user's database for" - ++ " commands such as `v1-install` and `v1-build`. Note that (a directly" - ++ " invoked) GHC will not automatically be aware of sandboxes;" - ++ " only if called via appropriate " ++ pname - ++ " commands, e.g. `v1-repl`, `v1-build`, `v1-exec`." - , paragraph $ "Currently, " ++ pname ++ " will not search for a sandbox" - ++ " in folders above the current one, so cabal will not see the sandbox" - ++ " if you are in a subfolder of a sandbox." - , paragraph "Subcommands:" - , headLine "init:" - , indentParagraph $ "Initialize a sandbox in the current directory." - ++ " An existing package database will not be modified, but settings" - ++ " (such as the location of the database) can be modified this way." - , headLine "delete:" - , indentParagraph $ "Remove the sandbox; deleting all the packages" - ++ " installed inside." - , headLine "add-source:" - , indentParagraph $ "Make one or more local packages available in the" - ++ " sandbox. PATHS may be relative or absolute." - ++ " Typical usecase is when you need" - ++ " to make a (temporary) modification to a dependency: You download" - ++ " the package into a different directory, make the modification," - ++ " and add that directory to the sandbox with `add-source`." - , indentParagraph $ "Unless given `--snapshot`, any add-source'd" - ++ " dependency that was modified since the last build will be" - ++ " re-installed automatically." - , headLine "delete-source:" - , indentParagraph $ "Remove an add-source dependency; however, this will" - ++ " not delete the package(s) that have been installed in the sandbox" - ++ " from this dependency. You can either unregister the package(s) via" - ++ " `" ++ pname ++ " v1-sandbox hc-pkg unregister` or re-create the" - ++ " sandbox (`v1-sandbox delete; v1-sandbox init`)." - , headLine "list-sources:" - , indentParagraph $ "List the directories of local packages made" - ++ " available via `" ++ pname ++ " v1-sandbox add-source`." - , headLine "hc-pkg:" - , indentParagraph $ "Similar to `ghc-pkg`, but for the sandbox package" - ++ " database. Can be used to list specific/all packages that are" - ++ " installed in the sandbox. For subcommands, see the help for" - ++ " ghc-pkg. Affected by the compiler version specified by `v1-configure`." - ], - commandNotes = Just $ \pname -> - relevantConfigValuesText ["require-sandbox" - ,"ignore-sandbox"] - ++ "\n" - ++ "Examples:\n" - ++ " Set up a sandbox with one local dependency, located at ../foo:\n" - ++ " " ++ pname ++ " v1-sandbox init\n" - ++ " " ++ pname ++ " v1-sandbox add-source ../foo\n" - ++ " " ++ pname ++ " v1-install --only-dependencies\n" - ++ " Reset the sandbox:\n" - ++ " " ++ pname ++ " v1-sandbox delete\n" - ++ " " ++ pname ++ " v1-sandbox init\n" - ++ " " ++ pname ++ " v1-install --only-dependencies\n" - ++ " List the packages in the sandbox:\n" - ++ " " ++ pname ++ " v1-sandbox hc-pkg list\n" - ++ " Unregister the `broken` package from the sandbox:\n" - ++ " " ++ pname ++ " v1-sandbox hc-pkg -- --force unregister broken\n", - commandUsage = usageAlternatives "v1-sandbox" - [ "init [FLAGS]" - , "delete [FLAGS]" - , "add-source [FLAGS] PATHS" - , "delete-source [FLAGS] PATHS" - , "list-sources [FLAGS]" - , "hc-pkg [FLAGS] [--] COMMAND [--] [ARGS]" - ], - - commandDefaultFlags = defaultSandboxFlags, - commandOptions = \_ -> - [ optionVerbosity sandboxVerbosity - (\v flags -> flags { sandboxVerbosity = v }) - - , option [] ["snapshot"] - "Take a snapshot instead of creating a link (only applies to 'add-source')" - sandboxSnapshot (\v flags -> flags { sandboxSnapshot = v }) - trueArg - - , option [] ["sandbox"] - "Sandbox location (default: './.cabal-sandbox')." - sandboxLocation (\v flags -> flags { sandboxLocation = v }) - (reqArgFlag "DIR") - ] - } - -instance Monoid SandboxFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup SandboxFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * Exec Flags --- ------------------------------------------------------------ - -data ExecFlags = ExecFlags { - execVerbosity :: Flag Verbosity, - execDistPref :: Flag FilePath -} deriving Generic - -defaultExecFlags :: ExecFlags -defaultExecFlags = ExecFlags { - execVerbosity = toFlag normal, - execDistPref = NoFlag - } - -execCommand :: CommandUI ExecFlags -execCommand = CommandUI { - commandName = "exec", - commandSynopsis = "Give a command access to the sandbox package repository.", - commandDescription = Just $ \pname -> wrapText $ - -- TODO: this is too GHC-focused for my liking.. - "A directly invoked GHC will not automatically be aware of any" - ++ " sandboxes: the GHC_PACKAGE_PATH environment variable controls what" - ++ " GHC uses. `" ++ pname ++ " v1-exec` can be used to modify this variable:" - ++ " COMMAND will be executed in a modified environment and thereby uses" - ++ " the sandbox package database.\n" - ++ "\n" - ++ "If there is no sandbox, behaves as identity (executing COMMAND).\n" - ++ "\n" - ++ "Note that other " ++ pname ++ " commands change the environment" - ++ " variable appropriately already, so there is no need to wrap those" - ++ " in `" ++ pname ++ " v1-exec`. But with `" ++ pname ++ " v1-exec`, the user" - ++ " has more control and can, for example, execute custom scripts which" - ++ " indirectly execute GHC.\n" - ++ "\n" - ++ "Note that `" ++ pname ++ " v1-repl` is different from `" ++ pname - ++ " v1-exec -- ghci` as the latter will not forward any additional flags" - ++ " being defined in the local package to ghci.\n" - ++ "\n" - ++ "See `" ++ pname ++ " sandbox`.\n", - commandNotes = Just $ \pname -> - "Examples:\n" - ++ " " ++ pname ++ " v1-exec -- ghci -Wall\n" - ++ " Start a repl session with sandbox packages and all warnings;\n" - ++ " " ++ pname ++ " v1-exec gitit -- -f gitit.cnf\n" - ++ " Give gitit access to the sandbox packages, and pass it a flag;\n" - ++ " " ++ pname ++ " v1-exec runghc Foo.hs\n" - ++ " Execute runghc on Foo.hs with runghc configured to use the\n" - ++ " sandbox package database (if a sandbox is being used).\n", - commandUsage = \pname -> - "Usage: " ++ pname ++ " v1-exec [FLAGS] [--] COMMAND [--] [ARGS]\n", - - commandDefaultFlags = defaultExecFlags, - commandOptions = \showOrParseArgs -> - [ optionVerbosity execVerbosity - (\v flags -> flags { execVerbosity = v }) - , Cabal.optionDistPref - execDistPref (\d flags -> flags { execDistPref = d }) - showOrParseArgs - ] - } - -instance Monoid ExecFlags where - mempty = gmempty - mappend = (<>) - -instance Semigroup ExecFlags where - (<>) = gmappend - --- ------------------------------------------------------------ --- * UserConfig flags --- ------------------------------------------------------------ - -data UserConfigFlags = UserConfigFlags { - userConfigVerbosity :: Flag Verbosity, - userConfigForce :: Flag Bool, - userConfigAppendLines :: Flag [String] - } deriving Generic - -instance Monoid UserConfigFlags where - mempty = UserConfigFlags { - userConfigVerbosity = toFlag normal, - userConfigForce = toFlag False, - userConfigAppendLines = toFlag [] - } - mappend = (<>) - -instance Semigroup UserConfigFlags where - (<>) = gmappend - -userConfigCommand :: CommandUI UserConfigFlags -userConfigCommand = CommandUI { - commandName = "user-config", - commandSynopsis = "Display and update the user's global cabal configuration.", - commandDescription = Just $ \_ -> wrapText $ - "When upgrading cabal, the set of configuration keys and their default" - ++ " values may change. This command provides means to merge the existing" - ++ " config in ~/.cabal/config" - ++ " (i.e. all bindings that are actually defined and not commented out)" - ++ " and the default config of the new version.\n" - ++ "\n" - ++ "init: Creates a new config file at either ~/.cabal/config or as" - ++ " specified by --config-file, if given. An existing file won't be " - ++ " overwritten unless -f or --force is given.\n" - ++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and" - ++ " the default configuration that would be created by cabal if the" - ++ " config file did not exist.\n" - ++ "update: Applies the pseudo-diff to the configuration that would be" - ++ " created by default, and write the result back to ~/.cabal/config.", - - commandNotes = Nothing, - commandUsage = usageAlternatives "user-config" ["init", "diff", "update"], - commandDefaultFlags = mempty, - commandOptions = \ _ -> [ - optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v }) - , option ['f'] ["force"] - "Overwrite the config file if it already exists." - userConfigForce (\v flags -> flags { userConfigForce = v }) - trueArg - , option ['a'] ["augment"] - "Additional setting to augment the config file (replacing a previous setting if it existed)." - userConfigAppendLines (\v flags -> flags - {userConfigAppendLines = - Flag $ concat (flagToList (userConfigAppendLines flags) ++ flagToList v)}) - (reqArg' "CONFIGLINE" (Flag . (:[])) (fromMaybe [] . flagToMaybe)) - ] - } - --- ------------------------------------------------------------ --- * GetOpt Utils --- ------------------------------------------------------------ - -reqArgFlag :: ArgPlaceHolder -> - MkOptDescr (b -> Flag String) (Flag String -> b -> b) b -reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList - -liftOptions :: (b -> a) -> (a -> b -> b) - -> [OptionField a] -> [OptionField b] -liftOptions get set = map (liftOption get set) - -yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b -yesNoOpt ShowArgs sf lf = trueArg sf lf -yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf - -optionSolver :: (flags -> Flag PreSolver) - -> (Flag PreSolver -> flags -> flags) - -> OptionField flags -optionSolver get set = - option [] ["solver"] - ("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ".") - get set - (reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers) - (toFlag `fmap` parse)) - (flagToList . fmap display)) - -optionSolverFlags :: ShowOrParseArgs - -> (flags -> Flag Int ) -> (Flag Int -> flags -> flags) - -> (flags -> Flag ReorderGoals) -> (Flag ReorderGoals -> flags -> flags) - -> (flags -> Flag CountConflicts) -> (Flag CountConflicts -> flags -> flags) - -> (flags -> Flag IndependentGoals) -> (Flag IndependentGoals -> flags -> flags) - -> (flags -> Flag ShadowPkgs) -> (Flag ShadowPkgs -> flags -> flags) - -> (flags -> Flag StrongFlags) -> (Flag StrongFlags -> flags -> flags) - -> (flags -> Flag AllowBootLibInstalls) -> (Flag AllowBootLibInstalls -> flags -> flags) - -> [OptionField flags] -optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg getcc setcc getig setig - getsip setsip getstrfl setstrfl getib setib = - [ option [] ["max-backjumps"] - ("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.") - getmbj setmbj - (reqArg "NUM" (readP_to_E ("Cannot parse number: "++) (fmap toFlag parse)) - (map show . flagToList)) - , option [] ["reorder-goals"] - "Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages." - (fmap asBool . getrg) - (setrg . fmap ReorderGoals) - (yesNoOpt showOrParseArgs) - , option [] ["count-conflicts"] - "Try to speed up solving by preferring goals that are involved in a lot of conflicts (default)." - (fmap asBool . getcc) - (setcc . fmap CountConflicts) - (yesNoOpt showOrParseArgs) - , option [] ["independent-goals"] - "Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen." - (fmap asBool . getig) - (setig . fmap IndependentGoals) - (yesNoOpt showOrParseArgs) - , option [] ["shadow-installed-packages"] - "If multiple package instances of the same version are installed, treat all but one as shadowed." - (fmap asBool . getsip) - (setsip . fmap ShadowPkgs) - (yesNoOpt showOrParseArgs) - , option [] ["strong-flags"] - "Do not defer flag choices (this used to be the default in cabal-install <= 1.20)." - (fmap asBool . getstrfl) - (setstrfl . fmap StrongFlags) - (yesNoOpt showOrParseArgs) - , option [] ["allow-boot-library-installs"] - "Allow cabal to install base, ghc-prim, integer-simple, integer-gmp, and template-haskell." - (fmap asBool . getib) - (setib . fmap AllowBootLibInstalls) - (yesNoOpt showOrParseArgs) - ] - -usageFlagsOrPackages :: String -> String -> String -usageFlagsOrPackages name pname = - "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" - ++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" - -usagePackages :: String -> String -> String -usagePackages name pname = - "Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" - -usageFlags :: String -> String -> String -usageFlags name pname = - "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" - ---TODO: do we want to allow per-package flags? -parsePackageArgs :: [String] -> Either String [Dependency] -parsePackageArgs = parsePkgArgs [] - where - parsePkgArgs ds [] = Right (reverse ds) - parsePkgArgs ds (arg:args) = - case readPToMaybe parseDependencyOrPackageId arg of - Just dep -> parsePkgArgs (dep:ds) args - Nothing -> Left $ - show arg ++ " is not valid syntax for a package name or" - ++ " package dependency." - -parseDependencyOrPackageId :: Parse.ReadP r Dependency -parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse - where - pkgidToDependency :: PackageIdentifier -> Dependency - pkgidToDependency p = case packageVersion p of - v | v == nullVersion -> Dependency (packageName p) anyVersion - | otherwise -> Dependency (packageName p) (thisVersion v) - -showRepo :: RemoteRepo -> String -showRepo repo = remoteRepoName repo ++ ":" - ++ uriToString id (remoteRepoURI repo) [] - -readRepo :: String -> Maybe RemoteRepo -readRepo = readPToMaybe parseRepo - -parseRepo :: Parse.ReadP r RemoteRepo -parseRepo = do - name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.") - _ <- Parse.char ':' - uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~") - uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr) - return RemoteRepo { - remoteRepoName = name, - remoteRepoURI = uri, - remoteRepoSecure = Nothing, - remoteRepoRootKeys = [], - remoteRepoKeyThreshold = 0, - remoteRepoShouldTryHttps = False - } - --- ------------------------------------------------------------ --- * Helpers for Documentation --- ------------------------------------------------------------ - -headLine :: String -> String -headLine = unlines - . map unwords - . wrapLine 79 - . words - -paragraph :: String -> String -paragraph = (++"\n") - . unlines - . map unwords - . wrapLine 79 - . words - -indentParagraph :: String -> String -indentParagraph = unlines - . (flip (++)) [""] - . map ((" "++).unwords) - . wrapLine 77 - . words - -relevantConfigValuesText :: [String] -> String -relevantConfigValuesText vs = - "Relevant global configuration keys:\n" - ++ concat [" " ++ v ++ "\n" |v <- vs] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SetupWrapper.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SetupWrapper.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SetupWrapper.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SetupWrapper.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,917 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE FlexibleContexts #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.SetupWrapper --- Copyright : (c) The University of Glasgow 2006, --- Duncan Coutts 2008 --- --- Maintainer : cabal-devel@haskell.org --- Stability : alpha --- Portability : portable --- --- An interface to building and installing Cabal packages. --- If the @Built-Type@ field is specified as something other than --- 'Custom', and the current version of Cabal is acceptable, this performs --- setup actions directly. Otherwise it builds the setup script and --- runs it with the given arguments. - -module Distribution.Client.SetupWrapper ( - getSetup, runSetup, runSetupCommand, setupWrapper, - SetupScriptOptions(..), - defaultSetupScriptOptions, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import qualified Distribution.Make as Make -import qualified Distribution.Simple as Simple -import Distribution.Version - ( Version, mkVersion, versionNumbers, VersionRange, anyVersion - , intersectVersionRanges, orLaterVersion - , withinRange ) -import qualified Distribution.Backpack as Backpack -import Distribution.Package - ( newSimpleUnitId, unsafeMkDefUnitId, ComponentId - , PackageId, mkPackageName - , PackageIdentifier(..), packageVersion, packageName ) -import Distribution.Types.Dependency -import Distribution.PackageDescription - ( GenericPackageDescription(packageDescription) - , PackageDescription(..), specVersion, buildType - , BuildType(..), defaultRenaming ) -import Distribution.PackageDescription.Parsec - ( readGenericPackageDescription ) -import Distribution.Simple.Configure - ( configCompilerEx ) -import Distribution.Compiler - ( buildCompilerId, CompilerFlavor(GHC, GHCJS) ) -import Distribution.Simple.Compiler - ( Compiler(compilerId), compilerFlavor, PackageDB(..), PackageDBStack ) -import Distribution.Simple.PreProcess - ( runSimplePreProcessor, ppUnlit ) -import Distribution.Simple.Build.Macros - ( generatePackageVersionMacros ) -import Distribution.Simple.Program - ( ProgramDb, emptyProgramDb - , getProgramSearchPath, getDbProgramOutput, runDbProgram, ghcProgram - , ghcjsProgram ) -import Distribution.Simple.Program.Find - ( programSearchPathAsPATHVar - , ProgramSearchPathEntry(ProgramSearchPathDir) ) -import Distribution.Simple.Program.Run - ( getEffectiveEnvironment ) -import qualified Distribution.Simple.Program.Strip as Strip -import Distribution.Simple.BuildPaths - ( defaultDistPref, exeExtension ) - -import Distribution.Simple.Command - ( CommandUI(..), commandShowOptions ) -import Distribution.Simple.Program.GHC - ( GhcMode(..), GhcOptions(..), renderGhcOptions ) -import qualified Distribution.Simple.PackageIndex as PackageIndex -import Distribution.Simple.PackageIndex (InstalledPackageIndex) -import qualified Distribution.InstalledPackageInfo as IPI -import Distribution.Client.Types -import Distribution.Client.Config - ( getCabalDir ) -import Distribution.Client.IndexUtils - ( getInstalledPackages ) -import Distribution.Client.JobControl - ( Lock, criticalSection ) -import Distribution.Simple.Setup - ( Flag(..) ) -import Distribution.Simple.Utils - ( die', debug, info, infoNoWrap - , cabalVersion, tryFindPackageDesc, comparing - , createDirectoryIfMissingVerbose, installExecutableFile - , copyFileVerbose, rewriteFileEx ) -import Distribution.Client.Utils - ( inDir, tryCanonicalizePath, withExtraPathEnv - , existsAndIsMoreRecentThan, moreRecentFile, withEnv, withEnvOverrides -#ifdef mingw32_HOST_OS - , canonicalizePathNoThrow -#endif - ) - -import Distribution.ReadE -import Distribution.System ( Platform(..), buildPlatform ) -import Distribution.Text - ( display ) -import Distribution.Utils.NubList - ( toNubListR ) -import Distribution.Verbosity -import Distribution.Compat.Exception - ( catchIO ) -import Distribution.Compat.Stack - -import System.Directory ( doesFileExist ) -import System.FilePath ( (), (<.>) ) -import System.IO ( Handle, hPutStr ) -import System.Exit ( ExitCode(..), exitWith ) -import System.Process ( createProcess, StdStream(..), proc, waitForProcess - , ProcessHandle ) -import qualified System.Process as Process -import Data.List ( foldl1' ) -import Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) - -#ifdef mingw32_HOST_OS -import Distribution.Simple.Utils - ( withTempDirectory ) - -import Control.Exception ( bracket ) -import System.FilePath ( equalFilePath, takeDirectory ) -import System.Directory ( doesDirectoryExist ) -import qualified System.Win32 as Win32 -#endif - --- | @Setup@ encapsulates the outcome of configuring a setup method to build a --- particular package. -data Setup = Setup { setupMethod :: SetupMethod - , setupScriptOptions :: SetupScriptOptions - , setupVersion :: Version - , setupBuildType :: BuildType - , setupPackage :: PackageDescription - } - --- | @SetupMethod@ represents one of the methods used to run Cabal commands. -data SetupMethod = InternalMethod - -- ^ run Cabal commands through \"cabal\" in the - -- current process - | SelfExecMethod - -- ^ run Cabal commands through \"cabal\" as a - -- child process - | ExternalMethod FilePath - -- ^ run Cabal commands through a custom \"Setup\" executable - --- TODO: The 'setupWrapper' and 'SetupScriptOptions' should be split into two --- parts: one that has no policy and just does as it's told with all the --- explicit options, and an optional initial part that applies certain --- policies (like if we should add the Cabal lib as a dep, and if so which --- version). This could be structured as an action that returns a fully --- elaborated 'SetupScriptOptions' containing no remaining policy choices. --- --- See also the discussion at https://github.com/haskell/cabal/pull/3094 - --- | @SetupScriptOptions@ are options used to configure and run 'Setup', as --- opposed to options given to the Cabal command at runtime. -data SetupScriptOptions = SetupScriptOptions { - -- | The version of the Cabal library to use (if 'useDependenciesExclusive' - -- is not set). A suitable version of the Cabal library must be installed - -- (or for some build-types be the one cabal-install was built with). - -- - -- The version found also determines the version of the Cabal specification - -- that we us for talking to the Setup.hs, unless overridden by - -- 'useCabalSpecVersion'. - -- - useCabalVersion :: VersionRange, - - -- | This is the version of the Cabal specification that we believe that - -- this package uses. This affects the semantics and in particular the - -- Setup command line interface. - -- - -- This is similar to 'useCabalVersion' but instead of probing the system - -- for a version of the /Cabal library/ you just say exactly which version - -- of the /spec/ we will use. Using this also avoid adding the Cabal - -- library as an additional dependency, so add it to 'useDependencies' - -- if needed. - -- - useCabalSpecVersion :: Maybe Version, - useCompiler :: Maybe Compiler, - usePlatform :: Maybe Platform, - usePackageDB :: PackageDBStack, - usePackageIndex :: Maybe InstalledPackageIndex, - useProgramDb :: ProgramDb, - useDistPref :: FilePath, - useLoggingHandle :: Maybe Handle, - useWorkingDir :: Maybe FilePath, - -- | Extra things to add to PATH when invoking the setup script. - useExtraPathEnv :: [FilePath], - -- | Extra environment variables paired with overrides, where - -- - -- * @'Just' v@ means \"set the environment variable's value to @v@\". - -- * 'Nothing' means \"unset the environment variable\". - useExtraEnvOverrides :: [(String, Maybe FilePath)], - forceExternalSetupMethod :: Bool, - - -- | List of dependencies to use when building Setup.hs. - useDependencies :: [(ComponentId, PackageId)], - - -- | Is the list of setup dependencies exclusive? - -- - -- When this is @False@, if we compile the Setup.hs script we do so with the - -- list in 'useDependencies' but all other packages in the environment are - -- also visible. A suitable version of @Cabal@ library (see - -- 'useCabalVersion') is also added to the list of dependencies, unless - -- 'useDependencies' already contains a Cabal dependency. - -- - -- When @True@, only the 'useDependencies' packages are used, with other - -- packages in the environment hidden. - -- - -- This feature is here to support the setup stanza in .cabal files that - -- specifies explicit (and exclusive) dependencies, as well as the old - -- style with no dependencies. - useDependenciesExclusive :: Bool, - - -- | Should we build the Setup.hs with CPP version macros available? - -- We turn this on when we have a setup stanza in .cabal that declares - -- explicit setup dependencies. - -- - useVersionMacros :: Bool, - - -- Used only by 'cabal clean' on Windows. - -- - -- Note: win32 clean hack - ------------------------- - -- On Windows, running './dist/setup/setup clean' doesn't work because the - -- setup script will try to delete itself (which causes it to fail horribly, - -- unlike on Linux). So we have to move the setup exe out of the way first - -- and then delete it manually. This applies only to the external setup - -- method. - useWin32CleanHack :: Bool, - - -- Used only when calling setupWrapper from parallel code to serialise - -- access to the setup cache; should be Nothing otherwise. - -- - -- Note: setup exe cache - ------------------------ - -- When we are installing in parallel, we always use the external setup - -- method. Since compiling the setup script each time adds noticeable - -- overhead, we use a shared setup script cache - -- ('~/.cabal/setup-exe-cache'). For each (compiler, platform, Cabal - -- version) combination the cache holds a compiled setup script - -- executable. This only affects the Simple build type; for the Custom, - -- Configure and Make build types we always compile the setup script anew. - setupCacheLock :: Maybe Lock, - - -- | Is the task we are going to run an interactive foreground task, - -- or an non-interactive background task? Based on this flag we - -- decide whether or not to delegate ctrl+c to the spawned task - isInteractive :: Bool - } - -defaultSetupScriptOptions :: SetupScriptOptions -defaultSetupScriptOptions = SetupScriptOptions { - useCabalVersion = anyVersion, - useCabalSpecVersion = Nothing, - useCompiler = Nothing, - usePlatform = Nothing, - usePackageDB = [GlobalPackageDB, UserPackageDB], - usePackageIndex = Nothing, - useDependencies = [], - useDependenciesExclusive = False, - useVersionMacros = False, - useProgramDb = emptyProgramDb, - useDistPref = defaultDistPref, - useLoggingHandle = Nothing, - useWorkingDir = Nothing, - useExtraPathEnv = [], - useExtraEnvOverrides = [], - useWin32CleanHack = False, - forceExternalSetupMethod = False, - setupCacheLock = Nothing, - isInteractive = False - } - -workingDir :: SetupScriptOptions -> FilePath -workingDir options = - case fromMaybe "" (useWorkingDir options) of - [] -> "." - dir -> dir - --- | A @SetupRunner@ implements a 'SetupMethod'. -type SetupRunner = Verbosity - -> SetupScriptOptions - -> BuildType - -> [String] - -> IO () - --- | Prepare to build a package by configuring a 'SetupMethod'. The returned --- 'Setup' object identifies the method. The 'SetupScriptOptions' may be changed --- during the configuration process; the final values are given by --- 'setupScriptOptions'. -getSetup :: Verbosity - -> SetupScriptOptions - -> Maybe PackageDescription - -> IO Setup -getSetup verbosity options mpkg = do - pkg <- maybe getPkg return mpkg - let options' = options { - useCabalVersion = intersectVersionRanges - (useCabalVersion options) - (orLaterVersion (specVersion pkg)) - } - buildType' = buildType pkg - (version, method, options'') <- - getSetupMethod verbosity options' pkg buildType' - return Setup { setupMethod = method - , setupScriptOptions = options'' - , setupVersion = version - , setupBuildType = buildType' - , setupPackage = pkg - } - where - getPkg = tryFindPackageDesc (fromMaybe "." (useWorkingDir options)) - >>= readGenericPackageDescription verbosity - >>= return . packageDescription - --- | Decide if we're going to be able to do a direct internal call to the --- entry point in the Cabal library or if we're going to have to compile --- and execute an external Setup.hs script. --- -getSetupMethod - :: Verbosity -> SetupScriptOptions -> PackageDescription -> BuildType - -> IO (Version, SetupMethod, SetupScriptOptions) -getSetupMethod verbosity options pkg buildType' - | buildType' == Custom - || maybe False (cabalVersion /=) (useCabalSpecVersion options) - || not (cabalVersion `withinRange` useCabalVersion options) = - getExternalSetupMethod verbosity options pkg buildType' - | isJust (useLoggingHandle options) - -- Forcing is done to use an external process e.g. due to parallel - -- build concerns. - || forceExternalSetupMethod options = - return (cabalVersion, SelfExecMethod, options) - | otherwise = return (cabalVersion, InternalMethod, options) - -runSetupMethod :: WithCallStack (SetupMethod -> SetupRunner) -runSetupMethod InternalMethod = internalSetupMethod -runSetupMethod (ExternalMethod path) = externalSetupMethod path -runSetupMethod SelfExecMethod = selfExecSetupMethod - --- | Run a configured 'Setup' with specific arguments. -runSetup :: Verbosity -> Setup - -> [String] -- ^ command-line arguments - -> IO () -runSetup verbosity setup args0 = do - let method = setupMethod setup - options = setupScriptOptions setup - bt = setupBuildType setup - args = verbosityHack (setupVersion setup) args0 - when (verbosity >= deafening {- avoid test if not debug -} && args /= args0) $ - infoNoWrap verbose $ - "Applied verbosity hack:\n" ++ - " Before: " ++ show args0 ++ "\n" ++ - " After: " ++ show args ++ "\n" - runSetupMethod method verbosity options bt args - --- | This is a horrible hack to make sure passing fancy verbosity --- flags (e.g., @-v'info +callstack'@) doesn't break horribly on --- old Setup. We can't do it in 'filterConfigureFlags' because --- verbosity applies to ALL commands. -verbosityHack :: Version -> [String] -> [String] -verbosityHack ver args0 - | ver >= mkVersion [2,1] = args0 - | otherwise = go args0 - where - go (('-':'v':rest) : args) - | Just rest' <- munch rest = ("-v" ++ rest') : go args - go (('-':'-':'v':'e':'r':'b':'o':'s':'e':'=':rest) : args) - | Just rest' <- munch rest = ("--verbose=" ++ rest') : go args - go ("--verbose" : rest : args) - | Just rest' <- munch rest = "--verbose" : rest' : go args - go rest@("--" : _) = rest - go (arg:args) = arg : go args - go [] = [] - - munch rest = - case runReadE flagToVerbosity rest of - Right v - | ver < mkVersion [2,0], verboseHasFlags v - -- We could preserve the prefix, but since we're assuming - -- it's Cabal's verbosity flag, we can assume that - -- any format is OK - -> Just (showForCabal (verboseNoFlags v)) - | ver < mkVersion [2,1], isVerboseTimestamp v - -- +timestamp wasn't yet available in Cabal-2.0.0 - -> Just (showForCabal (verboseNoTimestamp v)) - _ -> Nothing - --- | Run a command through a configured 'Setup'. -runSetupCommand :: Verbosity -> Setup - -> CommandUI flags -- ^ command definition - -> flags -- ^ command flags - -> [String] -- ^ extra command-line arguments - -> IO () -runSetupCommand verbosity setup cmd flags extraArgs = do - let args = commandName cmd : commandShowOptions cmd flags ++ extraArgs - runSetup verbosity setup args - --- | Configure a 'Setup' and run a command in one step. The command flags --- may depend on the Cabal library version in use. -setupWrapper :: Verbosity - -> SetupScriptOptions - -> Maybe PackageDescription - -> CommandUI flags - -> (Version -> flags) - -- ^ produce command flags given the Cabal library version - -> (Version -> [String]) - -> IO () -setupWrapper verbosity options mpkg cmd flags extraArgs = do - setup <- getSetup verbosity options mpkg - runSetupCommand verbosity setup - cmd (flags $ setupVersion setup) - (extraArgs $ setupVersion setup) - --- ------------------------------------------------------------ --- * Internal SetupMethod --- ------------------------------------------------------------ - -internalSetupMethod :: SetupRunner -internalSetupMethod verbosity options bt args = do - info verbosity $ "Using internal setup method with build-type " ++ show bt - ++ " and args:\n " ++ show args - inDir (useWorkingDir options) $ do - withEnv "HASKELL_DIST_DIR" (useDistPref options) $ - withExtraPathEnv (useExtraPathEnv options) $ - withEnvOverrides (useExtraEnvOverrides options) $ - buildTypeAction bt args - -buildTypeAction :: BuildType -> ([String] -> IO ()) -buildTypeAction Simple = Simple.defaultMainArgs -buildTypeAction Configure = Simple.defaultMainWithHooksArgs - Simple.autoconfUserHooks -buildTypeAction Make = Make.defaultMainArgs -buildTypeAction Custom = error "buildTypeAction Custom" - - --- | @runProcess'@ is a version of @runProcess@ where we have --- the additional option to decide whether or not we should --- delegate CTRL+C to the spawned process. -runProcess' :: FilePath -- ^ Filename of the executable - -> [String] -- ^ Arguments to pass to executable - -> Maybe FilePath -- ^ Optional path to working directory - -> Maybe [(String, String)] -- ^ Optional environment - -> Maybe Handle -- ^ Handle for @stdin@ - -> Maybe Handle -- ^ Handle for @stdout@ - -> Maybe Handle -- ^ Handle for @stderr@ - -> Bool -- ^ Delegate Ctrl+C ? - -> IO ProcessHandle -runProcess' cmd args mb_cwd mb_env mb_stdin mb_stdout mb_stderr _delegate = do - (_,_,_,ph) <- - createProcess - (proc cmd args){ Process.cwd = mb_cwd - , Process.env = mb_env - , Process.std_in = mbToStd mb_stdin - , Process.std_out = mbToStd mb_stdout - , Process.std_err = mbToStd mb_stderr - , Process.delegate_ctlc = _delegate - } - return ph - where - mbToStd :: Maybe Handle -> StdStream - mbToStd Nothing = Inherit - mbToStd (Just hdl) = UseHandle hdl --- ------------------------------------------------------------ --- * Self-Exec SetupMethod --- ------------------------------------------------------------ - -selfExecSetupMethod :: SetupRunner -selfExecSetupMethod verbosity options bt args0 = do - let args = ["act-as-setup", - "--build-type=" ++ display bt, - "--"] ++ args0 - info verbosity $ "Using self-exec internal setup method with build-type " - ++ show bt ++ " and args:\n " ++ show args - path <- getExecutablePath - info verbosity $ unwords (path : args) - case useLoggingHandle options of - Nothing -> return () - Just logHandle -> info verbosity $ "Redirecting build log to " - ++ show logHandle - - searchpath <- programSearchPathAsPATHVar - (map ProgramSearchPathDir (useExtraPathEnv options) ++ - getProgramSearchPath (useProgramDb options)) - env <- getEffectiveEnvironment $ - [ ("PATH", Just searchpath) - , ("HASKELL_DIST_DIR", Just (useDistPref options)) - ] ++ useExtraEnvOverrides options - process <- runProcess' path args - (useWorkingDir options) env Nothing - (useLoggingHandle options) (useLoggingHandle options) - (isInteractive options) - exitCode <- waitForProcess process - unless (exitCode == ExitSuccess) $ exitWith exitCode - --- ------------------------------------------------------------ --- * External SetupMethod --- ------------------------------------------------------------ - -externalSetupMethod :: WithCallStack (FilePath -> SetupRunner) -externalSetupMethod path verbosity options _ args = do - info verbosity $ unwords (path : args) - case useLoggingHandle options of - Nothing -> return () - Just logHandle -> info verbosity $ "Redirecting build log to " - ++ show logHandle - - -- See 'Note: win32 clean hack' above. -#ifdef mingw32_HOST_OS - if useWin32CleanHack options then doWin32CleanHack path else doInvoke path -#else - doInvoke path -#endif - - where - doInvoke path' = do - searchpath <- programSearchPathAsPATHVar - (map ProgramSearchPathDir (useExtraPathEnv options) ++ - getProgramSearchPath (useProgramDb options)) - env <- getEffectiveEnvironment $ - [ ("PATH", Just searchpath) - , ("HASKELL_DIST_DIR", Just (useDistPref options)) - ] ++ useExtraEnvOverrides options - - debug verbosity $ "Setup arguments: "++unwords args - process <- runProcess' path' args - (useWorkingDir options) env Nothing - (useLoggingHandle options) (useLoggingHandle options) - (isInteractive options) - exitCode <- waitForProcess process - unless (exitCode == ExitSuccess) $ exitWith exitCode - -#ifdef mingw32_HOST_OS - doWin32CleanHack path' = do - info verbosity $ "Using the Win32 clean hack." - -- Recursively removes the temp dir on exit. - withTempDirectory verbosity (workingDir options) "cabal-tmp" $ \tmpDir -> - bracket (moveOutOfTheWay tmpDir path') - (maybeRestore path') - doInvoke - - moveOutOfTheWay tmpDir path' = do - let newPath = tmpDir "setup" <.> exeExtension buildPlatform - Win32.moveFile path' newPath - return newPath - - maybeRestore oldPath path' = do - let oldPathDir = takeDirectory oldPath - oldPathDirExists <- doesDirectoryExist oldPathDir - -- 'setup clean' didn't complete, 'dist/setup' still exists. - when oldPathDirExists $ - Win32.moveFile path' oldPath -#endif - -getExternalSetupMethod - :: Verbosity -> SetupScriptOptions -> PackageDescription -> BuildType - -> IO (Version, SetupMethod, SetupScriptOptions) -getExternalSetupMethod verbosity options pkg bt = do - debug verbosity $ "Using external setup method with build-type " ++ show bt - debug verbosity $ "Using explicit dependencies: " - ++ show (useDependenciesExclusive options) - createDirectoryIfMissingVerbose verbosity True setupDir - (cabalLibVersion, mCabalLibInstalledPkgId, options') <- cabalLibVersionToUse - debug verbosity $ "Using Cabal library version " ++ display cabalLibVersion - path <- if useCachedSetupExecutable - then getCachedSetupExecutable options' - cabalLibVersion mCabalLibInstalledPkgId - else compileSetupExecutable options' - cabalLibVersion mCabalLibInstalledPkgId False - - -- Since useWorkingDir can change the relative path, the path argument must - -- be turned into an absolute path. On some systems, runProcess' will take - -- path as relative to the new working directory instead of the current - -- working directory. - path' <- tryCanonicalizePath path - - -- See 'Note: win32 clean hack' above. -#ifdef mingw32_HOST_OS - -- setupProgFile may not exist if we're using a cached program - setupProgFile' <- canonicalizePathNoThrow setupProgFile - let win32CleanHackNeeded = (useWin32CleanHack options) - -- Skip when a cached setup script is used. - && setupProgFile' `equalFilePath` path' -#else - let win32CleanHackNeeded = False -#endif - let options'' = options' { useWin32CleanHack = win32CleanHackNeeded } - - return (cabalLibVersion, ExternalMethod path', options'') - - where - setupDir = workingDir options useDistPref options "setup" - setupVersionFile = setupDir "setup" <.> "version" - setupHs = setupDir "setup" <.> "hs" - setupProgFile = setupDir "setup" <.> exeExtension buildPlatform - platform = fromMaybe buildPlatform (usePlatform options) - - useCachedSetupExecutable = (bt == Simple || bt == Configure || bt == Make) - - maybeGetInstalledPackages :: SetupScriptOptions -> Compiler - -> ProgramDb -> IO InstalledPackageIndex - maybeGetInstalledPackages options' comp progdb = - case usePackageIndex options' of - Just index -> return index - Nothing -> getInstalledPackages verbosity - comp (usePackageDB options') progdb - - -- Choose the version of Cabal to use if the setup script has a dependency on - -- Cabal, and possibly update the setup script options. The version also - -- determines how to filter the flags to Setup. - -- - -- We first check whether the dependency solver has specified a Cabal version. - -- If it has, we use the solver's version without looking at the installed - -- package index (See issue #3436). Otherwise, we pick the Cabal version by - -- checking 'useCabalSpecVersion', then the saved version, and finally the - -- versions available in the index. - -- - -- The version chosen here must match the one used in 'compileSetupExecutable' - -- (See issue #3433). - cabalLibVersionToUse :: IO (Version, Maybe ComponentId - ,SetupScriptOptions) - cabalLibVersionToUse = - case find (isCabalPkgId . snd) (useDependencies options) of - Just (unitId, pkgId) -> do - let version = pkgVersion pkgId - updateSetupScript version bt - writeSetupVersionFile version - return (version, Just unitId, options) - Nothing -> - case useCabalSpecVersion options of - Just version -> do - updateSetupScript version bt - writeSetupVersionFile version - return (version, Nothing, options) - Nothing -> do - savedVer <- savedVersion - case savedVer of - Just version | version `withinRange` useCabalVersion options - -> do updateSetupScript version bt - -- Does the previously compiled setup executable - -- still exist and is it up-to date? - useExisting <- canUseExistingSetup version - if useExisting - then return (version, Nothing, options) - else installedVersion - _ -> installedVersion - where - -- This check duplicates the checks in 'getCachedSetupExecutable' / - -- 'compileSetupExecutable'. Unfortunately, we have to perform it twice - -- because the selected Cabal version may change as a result of this - -- check. - canUseExistingSetup :: Version -> IO Bool - canUseExistingSetup version = - if useCachedSetupExecutable - then do - (_, cachedSetupProgFile) <- cachedSetupDirAndProg options version - doesFileExist cachedSetupProgFile - else - (&&) <$> setupProgFile `existsAndIsMoreRecentThan` setupHs - <*> setupProgFile `existsAndIsMoreRecentThan` setupVersionFile - - writeSetupVersionFile :: Version -> IO () - writeSetupVersionFile version = - writeFile setupVersionFile (show version ++ "\n") - - installedVersion :: IO (Version, Maybe InstalledPackageId - ,SetupScriptOptions) - installedVersion = do - (comp, progdb, options') <- configureCompiler options - (version, mipkgid, options'') <- installedCabalVersion options' - comp progdb - updateSetupScript version bt - writeSetupVersionFile version - return (version, mipkgid, options'') - - savedVersion :: IO (Maybe Version) - savedVersion = do - versionString <- readFile setupVersionFile `catchIO` \_ -> return "" - case reads versionString of - [(version,s)] | all isSpace s -> return (Just version) - _ -> return Nothing - - -- | Update a Setup.hs script, creating it if necessary. - updateSetupScript :: Version -> BuildType -> IO () - updateSetupScript _ Custom = do - useHs <- doesFileExist customSetupHs - useLhs <- doesFileExist customSetupLhs - unless (useHs || useLhs) $ die' verbosity - "Using 'build-type: Custom' but there is no Setup.hs or Setup.lhs script." - let src = (if useHs then customSetupHs else customSetupLhs) - srcNewer <- src `moreRecentFile` setupHs - when srcNewer $ if useHs - then copyFileVerbose verbosity src setupHs - else runSimplePreProcessor ppUnlit src setupHs verbosity - where - customSetupHs = workingDir options "Setup.hs" - customSetupLhs = workingDir options "Setup.lhs" - - updateSetupScript cabalLibVersion _ = - rewriteFileEx verbosity setupHs (buildTypeScript cabalLibVersion) - - buildTypeScript :: Version -> String - buildTypeScript cabalLibVersion = case bt of - Simple -> "import Distribution.Simple; main = defaultMain\n" - Configure -> "import Distribution.Simple; main = defaultMainWithHooks " - ++ if cabalLibVersion >= mkVersion [1,3,10] - then "autoconfUserHooks\n" - else "defaultUserHooks\n" - Make -> "import Distribution.Make; main = defaultMain\n" - Custom -> error "buildTypeScript Custom" - - installedCabalVersion :: SetupScriptOptions -> Compiler -> ProgramDb - -> IO (Version, Maybe InstalledPackageId - ,SetupScriptOptions) - installedCabalVersion options' _ _ | packageName pkg == mkPackageName "Cabal" - && bt == Custom = - return (packageVersion pkg, Nothing, options') - installedCabalVersion options' compiler progdb = do - index <- maybeGetInstalledPackages options' compiler progdb - let cabalDep = Dependency (mkPackageName "Cabal") - (useCabalVersion options') - options'' = options' { usePackageIndex = Just index } - case PackageIndex.lookupDependency index cabalDep of - [] -> die' verbosity $ "The package '" ++ display (packageName pkg) - ++ "' requires Cabal library version " - ++ display (useCabalVersion options) - ++ " but no suitable version is installed." - pkgs -> let ipkginfo = head . snd . bestVersion fst $ pkgs - in return (packageVersion ipkginfo - ,Just . IPI.installedComponentId $ ipkginfo, options'') - - bestVersion :: (a -> Version) -> [a] -> a - bestVersion f = firstMaximumBy (comparing (preference . f)) - where - -- Like maximumBy, but picks the first maximum element instead of the - -- last. In general, we expect the preferred version to go first in the - -- list. For the default case, this has the effect of choosing the version - -- installed in the user package DB instead of the global one. See #1463. - -- - -- Note: firstMaximumBy could be written as just - -- `maximumBy cmp . reverse`, but the problem is that the behaviour of - -- maximumBy is not fully specified in the case when there is not a single - -- greatest element. - firstMaximumBy :: (a -> a -> Ordering) -> [a] -> a - firstMaximumBy _ [] = - error "Distribution.Client.firstMaximumBy: empty list" - firstMaximumBy cmp xs = foldl1' maxBy xs - where - maxBy x y = case cmp x y of { GT -> x; EQ -> x; LT -> y; } - - preference version = (sameVersion, sameMajorVersion - ,stableVersion, latestVersion) - where - sameVersion = version == cabalVersion - sameMajorVersion = majorVersion version == majorVersion cabalVersion - majorVersion = take 2 . versionNumbers - stableVersion = case versionNumbers version of - (_:x:_) -> even x - _ -> False - latestVersion = version - - configureCompiler :: SetupScriptOptions - -> IO (Compiler, ProgramDb, SetupScriptOptions) - configureCompiler options' = do - (comp, progdb) <- case useCompiler options' of - Just comp -> return (comp, useProgramDb options') - Nothing -> do (comp, _, progdb) <- - configCompilerEx (Just GHC) Nothing Nothing - (useProgramDb options') verbosity - return (comp, progdb) - -- Whenever we need to call configureCompiler, we also need to access the - -- package index, so let's cache it in SetupScriptOptions. - index <- maybeGetInstalledPackages options' comp progdb - return (comp, progdb, options' { useCompiler = Just comp, - usePackageIndex = Just index, - useProgramDb = progdb }) - - -- | Path to the setup exe cache directory and path to the cached setup - -- executable. - cachedSetupDirAndProg :: SetupScriptOptions -> Version - -> IO (FilePath, FilePath) - cachedSetupDirAndProg options' cabalLibVersion = do - cabalDir <- getCabalDir - let setupCacheDir = cabalDir "setup-exe-cache" - cachedSetupProgFile = setupCacheDir - ("setup-" ++ buildTypeString ++ "-" - ++ cabalVersionString ++ "-" - ++ platformString ++ "-" - ++ compilerVersionString) - <.> exeExtension buildPlatform - return (setupCacheDir, cachedSetupProgFile) - where - buildTypeString = show bt - cabalVersionString = "Cabal-" ++ (display cabalLibVersion) - compilerVersionString = display $ - maybe buildCompilerId compilerId - $ useCompiler options' - platformString = display platform - - -- | Look up the setup executable in the cache; update the cache if the setup - -- executable is not found. - getCachedSetupExecutable :: SetupScriptOptions - -> Version -> Maybe InstalledPackageId - -> IO FilePath - getCachedSetupExecutable options' cabalLibVersion - maybeCabalLibInstalledPkgId = do - (setupCacheDir, cachedSetupProgFile) <- - cachedSetupDirAndProg options' cabalLibVersion - cachedSetupExists <- doesFileExist cachedSetupProgFile - if cachedSetupExists - then debug verbosity $ - "Found cached setup executable: " ++ cachedSetupProgFile - else criticalSection' $ do - -- The cache may have been populated while we were waiting. - cachedSetupExists' <- doesFileExist cachedSetupProgFile - if cachedSetupExists' - then debug verbosity $ - "Found cached setup executable: " ++ cachedSetupProgFile - else do - debug verbosity $ "Setup executable not found in the cache." - src <- compileSetupExecutable options' - cabalLibVersion maybeCabalLibInstalledPkgId True - createDirectoryIfMissingVerbose verbosity True setupCacheDir - installExecutableFile verbosity src cachedSetupProgFile - -- Do not strip if we're using GHCJS, since the result may be a script - when (maybe True ((/=GHCJS).compilerFlavor) $ useCompiler options') $ - Strip.stripExe verbosity platform (useProgramDb options') - cachedSetupProgFile - return cachedSetupProgFile - where - criticalSection' = maybe id criticalSection $ setupCacheLock options' - - -- | If the Setup.hs is out of date wrt the executable then recompile it. - -- Currently this is GHC/GHCJS only. It should really be generalised. - -- - compileSetupExecutable :: SetupScriptOptions - -> Version -> Maybe ComponentId -> Bool - -> IO FilePath - compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId - forceCompile = do - setupHsNewer <- setupHs `moreRecentFile` setupProgFile - cabalVersionNewer <- setupVersionFile `moreRecentFile` setupProgFile - let outOfDate = setupHsNewer || cabalVersionNewer - when (outOfDate || forceCompile) $ do - debug verbosity "Setup executable needs to be updated, compiling..." - (compiler, progdb, options'') <- configureCompiler options' - let cabalPkgid = PackageIdentifier (mkPackageName "Cabal") cabalLibVersion - (program, extraOpts) - = case compilerFlavor compiler of - GHCJS -> (ghcjsProgram, ["-build-runner"]) - _ -> (ghcProgram, ["-threaded"]) - cabalDep = maybe [] (\ipkgid -> [(ipkgid, cabalPkgid)]) - maybeCabalLibInstalledPkgId - - -- With 'useDependenciesExclusive' we enforce the deps specified, - -- so only the given ones can be used. Otherwise we allow the use - -- of packages in the ambient environment, and add on a dep on the - -- Cabal library (unless 'useDependencies' already contains one). - -- - -- With 'useVersionMacros' we use a version CPP macros .h file. - -- - -- Both of these options should be enabled for packages that have - -- opted-in and declared a custom-settup stanza. - -- - selectedDeps | useDependenciesExclusive options' - = useDependencies options' - | otherwise = useDependencies options' ++ - if any (isCabalPkgId . snd) - (useDependencies options') - then [] - else cabalDep - addRenaming (ipid, _) = - -- Assert 'DefUnitId' invariant - (Backpack.DefiniteUnitId (unsafeMkDefUnitId (newSimpleUnitId ipid)) - ,defaultRenaming) - cppMacrosFile = setupDir "setup_macros.h" - ghcOptions = mempty { - -- Respect -v0, but don't crank up verbosity on GHC if - -- Cabal verbosity is requested. For that, use - -- --ghc-option=-v instead! - ghcOptVerbosity = Flag (min verbosity normal) - , ghcOptMode = Flag GhcModeMake - , ghcOptInputFiles = toNubListR [setupHs] - , ghcOptOutputFile = Flag setupProgFile - , ghcOptObjDir = Flag setupDir - , ghcOptHiDir = Flag setupDir - , ghcOptSourcePathClear = Flag True - , ghcOptSourcePath = case bt of - Custom -> toNubListR [workingDir options'] - _ -> mempty - , ghcOptPackageDBs = usePackageDB options'' - , ghcOptHideAllPackages = Flag (useDependenciesExclusive options') - , ghcOptCabal = Flag (useDependenciesExclusive options') - , ghcOptPackages = toNubListR $ map addRenaming selectedDeps - , ghcOptCppIncludes = toNubListR [ cppMacrosFile - | useVersionMacros options' ] - , ghcOptExtra = extraOpts - } - let ghcCmdLine = renderGhcOptions compiler platform ghcOptions - when (useVersionMacros options') $ - rewriteFileEx verbosity cppMacrosFile - (generatePackageVersionMacros (map snd selectedDeps)) - case useLoggingHandle options of - Nothing -> runDbProgram verbosity program progdb ghcCmdLine - - -- If build logging is enabled, redirect compiler output to - -- the log file. - (Just logHandle) -> do output <- getDbProgramOutput verbosity program - progdb ghcCmdLine - hPutStr logHandle output - return setupProgFile - - -isCabalPkgId :: PackageIdentifier -> Bool -isCabalPkgId (PackageIdentifier pname _) = pname == mkPackageName "Cabal" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SolverInstallPlan.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SolverInstallPlan.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SolverInstallPlan.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SolverInstallPlan.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,444 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE TypeFamilies #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.SolverInstallPlan --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- Stability : provisional --- Portability : portable --- --- The 'SolverInstallPlan' is the graph of packages produced by the --- dependency solver, and specifies at the package-granularity what --- things are going to be installed. To put it another way: the --- dependency solver produces a 'SolverInstallPlan', which is then --- consumed by various other parts of Cabal. --- ------------------------------------------------------------------------------ -module Distribution.Client.SolverInstallPlan( - SolverInstallPlan(..), - SolverPlanPackage, - ResolverPackage(..), - - -- * Operations on 'SolverInstallPlan's - new, - toList, - toMap, - - remove, - - showPlanIndex, - showInstallPlan, - - -- * Checking validity of plans - valid, - closed, - consistent, - acyclic, - - -- ** Details on invalid plans - SolverPlanProblem(..), - showPlanProblem, - problems, - - -- ** Querying the install plan - dependencyClosure, - reverseDependencyClosure, - topologicalOrder, - reverseTopologicalOrder, -) where - -import Distribution.Package - ( PackageIdentifier(..), Package(..), PackageName - , HasUnitId(..), PackageId, packageVersion, packageName ) -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Text - ( display ) - -import Distribution.Client.Types - ( UnresolvedPkgLoc ) -import Distribution.Version - ( Version ) - -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.ResolverPackage -import Distribution.Solver.Types.SolverId - -import Data.List - ( intercalate ) -import Data.Maybe - ( fromMaybe, mapMaybe ) -import Distribution.Compat.Binary (Binary(..)) -import Distribution.Compat.Graph (Graph, IsNode(..)) -import qualified Data.Graph as OldGraph -import qualified Distribution.Compat.Graph as Graph -import qualified Data.Map as Map -import Data.Map (Map) -import Data.Array ((!)) -import Data.Typeable - -type SolverPlanPackage = ResolverPackage UnresolvedPkgLoc - -type SolverPlanIndex = Graph SolverPlanPackage - -data SolverInstallPlan = SolverInstallPlan { - planIndex :: !SolverPlanIndex, - planIndepGoals :: !IndependentGoals - } - deriving (Typeable) - -{- --- | Much like 'planPkgIdOf', but mapping back to full packages. -planPkgOf :: SolverInstallPlan - -> Graph.Vertex - -> SolverPlanPackage -planPkgOf plan v = - case Graph.lookupKey (planIndex plan) - (planPkgIdOf plan v) of - Just pkg -> pkg - Nothing -> error "InstallPlan: internal error: planPkgOf lookup failed" --} - -mkInstallPlan :: SolverPlanIndex - -> IndependentGoals - -> SolverInstallPlan -mkInstallPlan index indepGoals = - SolverInstallPlan { - planIndex = index, - planIndepGoals = indepGoals - } - -instance Binary SolverInstallPlan where - put SolverInstallPlan { - planIndex = index, - planIndepGoals = indepGoals - } = put (index, indepGoals) - - get = do - (index, indepGoals) <- get - return $! mkInstallPlan index indepGoals - -showPlanIndex :: [SolverPlanPackage] -> String -showPlanIndex = intercalate "\n" . map showPlanPackage - -showInstallPlan :: SolverInstallPlan -> String -showInstallPlan = showPlanIndex . toList - -showPlanPackage :: SolverPlanPackage -> String -showPlanPackage (PreExisting ipkg) = "PreExisting " ++ display (packageId ipkg) - ++ " (" ++ display (installedUnitId ipkg) - ++ ")" -showPlanPackage (Configured spkg) = "Configured " ++ display (packageId spkg) - --- | Build an installation plan from a valid set of resolved packages. --- -new :: IndependentGoals - -> SolverPlanIndex - -> Either [SolverPlanProblem] SolverInstallPlan -new indepGoals index = - case problems indepGoals index of - [] -> Right (mkInstallPlan index indepGoals) - probs -> Left probs - -toList :: SolverInstallPlan -> [SolverPlanPackage] -toList = Graph.toList . planIndex - -toMap :: SolverInstallPlan -> Map SolverId SolverPlanPackage -toMap = Graph.toMap . planIndex - --- | Remove packages from the install plan. This will result in an --- error if there are remaining packages that depend on any matching --- package. This is primarily useful for obtaining an install plan for --- the dependencies of a package or set of packages without actually --- installing the package itself, as when doing development. --- -remove :: (SolverPlanPackage -> Bool) - -> SolverInstallPlan - -> Either [SolverPlanProblem] - (SolverInstallPlan) -remove shouldRemove plan = - new (planIndepGoals plan) newIndex - where - newIndex = Graph.fromDistinctList $ - filter (not . shouldRemove) (toList plan) - --- ------------------------------------------------------------ --- * Checking validity of plans --- ------------------------------------------------------------ - --- | A valid installation plan is a set of packages that is 'acyclic', --- 'closed' and 'consistent'. Also, every 'ConfiguredPackage' in the --- plan has to have a valid configuration (see 'configuredPackageValid'). --- --- * if the result is @False@ use 'problems' to get a detailed list. --- -valid :: IndependentGoals - -> SolverPlanIndex - -> Bool -valid indepGoals index = - null $ problems indepGoals index - -data SolverPlanProblem = - PackageMissingDeps SolverPlanPackage - [PackageIdentifier] - | PackageCycle [SolverPlanPackage] - | PackageInconsistency PackageName [(PackageIdentifier, Version)] - | PackageStateInvalid SolverPlanPackage SolverPlanPackage - -showPlanProblem :: SolverPlanProblem -> String -showPlanProblem (PackageMissingDeps pkg missingDeps) = - "Package " ++ display (packageId pkg) - ++ " depends on the following packages which are missing from the plan: " - ++ intercalate ", " (map display missingDeps) - -showPlanProblem (PackageCycle cycleGroup) = - "The following packages are involved in a dependency cycle " - ++ intercalate ", " (map (display.packageId) cycleGroup) - -showPlanProblem (PackageInconsistency name inconsistencies) = - "Package " ++ display name - ++ " is required by several packages," - ++ " but they require inconsistent versions:\n" - ++ unlines [ " package " ++ display pkg ++ " requires " - ++ display (PackageIdentifier name ver) - | (pkg, ver) <- inconsistencies ] - -showPlanProblem (PackageStateInvalid pkg pkg') = - "Package " ++ display (packageId pkg) - ++ " is in the " ++ showPlanState pkg - ++ " state but it depends on package " ++ display (packageId pkg') - ++ " which is in the " ++ showPlanState pkg' - ++ " state" - where - showPlanState (PreExisting _) = "pre-existing" - showPlanState (Configured _) = "configured" - --- | For an invalid plan, produce a detailed list of problems as human readable --- error messages. This is mainly intended for debugging purposes. --- Use 'showPlanProblem' for a human readable explanation. --- -problems :: IndependentGoals - -> SolverPlanIndex - -> [SolverPlanProblem] -problems indepGoals index = - - [ PackageMissingDeps pkg - (mapMaybe - (fmap packageId . flip Graph.lookup index) - missingDeps) - | (pkg, missingDeps) <- Graph.broken index ] - - ++ [ PackageCycle cycleGroup - | cycleGroup <- Graph.cycles index ] - - ++ [ PackageInconsistency name inconsistencies - | (name, inconsistencies) <- - dependencyInconsistencies indepGoals index ] - - ++ [ PackageStateInvalid pkg pkg' - | pkg <- Graph.toList index - , Just pkg' <- map (flip Graph.lookup index) - (nodeNeighbors pkg) - , not (stateDependencyRelation pkg pkg') ] - - --- | Compute all roots of the install plan, and verify that the transitive --- plans from those roots are all consistent. --- --- NOTE: This does not check for dependency cycles. Moreover, dependency cycles --- may be absent from the subplans even if the larger plan contains a dependency --- cycle. Such cycles may or may not be an issue; either way, we don't check --- for them here. -dependencyInconsistencies :: IndependentGoals - -> SolverPlanIndex - -> [(PackageName, [(PackageIdentifier, Version)])] -dependencyInconsistencies indepGoals index = - concatMap dependencyInconsistencies' subplans - where - subplans :: [SolverPlanIndex] - subplans = -- Not Graph.closure!! - map (nonSetupClosure index) - (rootSets indepGoals index) - --- NB: When we check for inconsistencies, packages from the setup --- scripts don't count as part of the closure (this way, we --- can build, e.g., Cabal-1.24.1 even if its setup script is --- built with Cabal-1.24.0). --- --- This is a best effort function that swallows any non-existent --- SolverIds. -nonSetupClosure :: SolverPlanIndex - -> [SolverId] - -> SolverPlanIndex -nonSetupClosure index pkgids0 = closure Graph.empty pkgids0 - where - closure completed [] = completed - closure completed (pkgid:pkgids) = - case Graph.lookup pkgid index of - Nothing -> closure completed pkgids - Just pkg -> - case Graph.lookup (nodeKey pkg) completed of - Just _ -> closure completed pkgids - Nothing -> closure completed' pkgids' - where completed' = Graph.insert pkg completed - pkgids' = CD.nonSetupDeps (resolverPackageLibDeps pkg) ++ pkgids - --- | Compute the root sets of a plan --- --- A root set is a set of packages whose dependency closure must be consistent. --- This is the set of all top-level library roots (taken together normally, or --- as singletons sets if we are considering them as independent goals), along --- with all setup dependencies of all packages. -rootSets :: IndependentGoals -> SolverPlanIndex -> [[SolverId]] -rootSets (IndependentGoals indepGoals) index = - if indepGoals then map (:[]) libRoots else [libRoots] - ++ setupRoots index - where - libRoots = libraryRoots index - --- | Compute the library roots of a plan --- --- The library roots are the set of packages with no reverse dependencies --- (no reverse library dependencies but also no reverse setup dependencies). -libraryRoots :: SolverPlanIndex -> [SolverId] -libraryRoots index = - map (nodeKey . toPkgId) roots - where - (graph, toPkgId, _) = Graph.toGraph index - indegree = OldGraph.indegree graph - roots = filter isRoot (OldGraph.vertices graph) - isRoot v = indegree ! v == 0 - --- | The setup dependencies of each package in the plan -setupRoots :: SolverPlanIndex -> [[SolverId]] -setupRoots = filter (not . null) - . map (CD.setupDeps . resolverPackageLibDeps) - . Graph.toList - --- | Given a package index where we assume we want to use all the packages --- (use 'dependencyClosure' if you need to get such a index subset) find out --- if the dependencies within it use consistent versions of each package. --- Return all cases where multiple packages depend on different versions of --- some other package. --- --- Each element in the result is a package name along with the packages that --- depend on it and the versions they require. These are guaranteed to be --- distinct. --- -dependencyInconsistencies' :: SolverPlanIndex - -> [(PackageName, [(PackageIdentifier, Version)])] -dependencyInconsistencies' index = - [ (name, [ (pid, packageVersion dep) | (dep,pids) <- uses, pid <- pids]) - | (name, ipid_map) <- Map.toList inverseIndex - , let uses = Map.elems ipid_map - , reallyIsInconsistent (map fst uses) - ] - where - -- For each package name (of a dependency, somewhere) - -- and each installed ID of that that package - -- the associated package instance - -- and a list of reverse dependencies (as source IDs) - inverseIndex :: Map PackageName (Map SolverId (SolverPlanPackage, [PackageId])) - inverseIndex = Map.fromListWith (Map.unionWith (\(a,b) (_,b') -> (a,b++b'))) - [ (packageName dep, Map.fromList [(sid,(dep,[packageId pkg]))]) - | -- For each package @pkg@ - pkg <- Graph.toList index - -- Find out which @sid@ @pkg@ depends on - , sid <- CD.nonSetupDeps (resolverPackageLibDeps pkg) - -- And look up those @sid@ (i.e., @sid@ is the ID of @dep@) - , Just dep <- [Graph.lookup sid index] - ] - - -- If, in a single install plan, we depend on more than one version of a - -- package, then this is ONLY okay in the (rather special) case that we - -- depend on precisely two versions of that package, and one of them - -- depends on the other. This is necessary for example for the base where - -- we have base-3 depending on base-4. - reallyIsInconsistent :: [SolverPlanPackage] -> Bool - reallyIsInconsistent [] = False - reallyIsInconsistent [_p] = False - reallyIsInconsistent [p1, p2] = - let pid1 = nodeKey p1 - pid2 = nodeKey p2 - in pid1 `notElem` CD.nonSetupDeps (resolverPackageLibDeps p2) - && pid2 `notElem` CD.nonSetupDeps (resolverPackageLibDeps p1) - reallyIsInconsistent _ = True - - --- | The graph of packages (nodes) and dependencies (edges) must be acyclic. --- --- * if the result is @False@ use 'PackageIndex.dependencyCycles' to find out --- which packages are involved in dependency cycles. --- -acyclic :: SolverPlanIndex -> Bool -acyclic = null . Graph.cycles - --- | An installation plan is closed if for every package in the set, all of --- its dependencies are also in the set. That is, the set is closed under the --- dependency relation. --- --- * if the result is @False@ use 'PackageIndex.brokenPackages' to find out --- which packages depend on packages not in the index. --- -closed :: SolverPlanIndex -> Bool -closed = null . Graph.broken - --- | An installation plan is consistent if all dependencies that target a --- single package name, target the same version. --- --- This is slightly subtle. It is not the same as requiring that there be at --- most one version of any package in the set. It only requires that of --- packages which have more than one other package depending on them. We could --- actually make the condition even more precise and say that different --- versions are OK so long as they are not both in the transitive closure of --- any other package (or equivalently that their inverse closures do not --- intersect). The point is we do not want to have any packages depending --- directly or indirectly on two different versions of the same package. The --- current definition is just a safe approximation of that. --- --- * if the result is @False@ use 'PackageIndex.dependencyInconsistencies' to --- find out which packages are. --- -consistent :: SolverPlanIndex -> Bool -consistent = null . dependencyInconsistencies (IndependentGoals False) - --- | The states of packages have that depend on each other must respect --- this relation. That is for very case where package @a@ depends on --- package @b@ we require that @dependencyStatesOk a b = True@. --- -stateDependencyRelation :: SolverPlanPackage - -> SolverPlanPackage - -> Bool -stateDependencyRelation PreExisting{} PreExisting{} = True - -stateDependencyRelation (Configured _) PreExisting{} = True -stateDependencyRelation (Configured _) (Configured _) = True - -stateDependencyRelation _ _ = False - - --- | Compute the dependency closure of a package in a install plan --- -dependencyClosure :: SolverInstallPlan - -> [SolverId] - -> [SolverPlanPackage] -dependencyClosure plan = fromMaybe [] . Graph.closure (planIndex plan) - - -reverseDependencyClosure :: SolverInstallPlan - -> [SolverId] - -> [SolverPlanPackage] -reverseDependencyClosure plan = fromMaybe [] . Graph.revClosure (planIndex plan) - - -topologicalOrder :: SolverInstallPlan - -> [SolverPlanPackage] -topologicalOrder plan = Graph.topSort (planIndex plan) - - -reverseTopologicalOrder :: SolverInstallPlan - -> [SolverPlanPackage] -reverseTopologicalOrder plan = Graph.revTopSort (planIndex plan) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SourceFiles.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SourceFiles.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SourceFiles.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SourceFiles.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,168 +0,0 @@ --- | Contains an @sdist@ like function which computes the source files --- that we should track to determine if a rebuild is necessary. --- Unlike @sdist@, we can operate directly on the true --- 'PackageDescription' (not flattened). --- --- The naming convention, roughly, is that to declare we need the --- source for some type T, you use the function needT; some functions --- need auxiliary information. --- --- We can only use this code for non-Custom scripts; Custom scripts --- may have arbitrary extra dependencies (esp. new preprocessors) which --- we cannot "see" easily. -module Distribution.Client.SourceFiles (needElaboratedConfiguredPackage) where - -import Distribution.Client.ProjectPlanning.Types -import Distribution.Client.RebuildMonad - -import Distribution.Solver.Types.OptionalStanza - -import Distribution.Simple.PreProcess - -import Distribution.Types.PackageDescription -import Distribution.Types.Component -import Distribution.Types.ComponentRequestedSpec -import Distribution.Types.Library -import Distribution.Types.Executable -import Distribution.Types.Benchmark -import Distribution.Types.BenchmarkInterface -import Distribution.Types.TestSuite -import Distribution.Types.TestSuiteInterface -import Distribution.Types.BuildInfo -import Distribution.Types.ForeignLib - -import Distribution.ModuleName - -import Prelude () -import Distribution.Client.Compat.Prelude - -import System.FilePath -import Control.Monad -import qualified Data.Set as Set - -needElaboratedConfiguredPackage :: ElaboratedConfiguredPackage -> Rebuild () -needElaboratedConfiguredPackage elab = - case elabPkgOrComp elab of - ElabComponent ecomp -> needElaboratedComponent elab ecomp - ElabPackage epkg -> needElaboratedPackage elab epkg - -needElaboratedPackage :: ElaboratedConfiguredPackage -> ElaboratedPackage -> Rebuild () -needElaboratedPackage elab epkg = - mapM_ (needComponent pkg_descr) (enabledComponents pkg_descr enabled) - where - pkg_descr = elabPkgDescription elab - enabled_stanzas = pkgStanzasEnabled epkg - -- TODO: turn this into a helper function somewhere - enabled = - ComponentRequestedSpec { - testsRequested = TestStanzas `Set.member` enabled_stanzas, - benchmarksRequested = BenchStanzas `Set.member` enabled_stanzas - } - -needElaboratedComponent :: ElaboratedConfiguredPackage -> ElaboratedComponent -> Rebuild () -needElaboratedComponent elab ecomp = - case mb_comp of - Nothing -> needSetup - Just comp -> needComponent pkg_descr comp - where - pkg_descr = elabPkgDescription elab - mb_comp = fmap (getComponent pkg_descr) (compComponentName ecomp) - -needComponent :: PackageDescription -> Component -> Rebuild () -needComponent pkg_descr comp = - case comp of - CLib lib -> needLibrary pkg_descr lib - CFLib flib -> needForeignLib pkg_descr flib - CExe exe -> needExecutable pkg_descr exe - CTest test -> needTestSuite pkg_descr test - CBench bench -> needBenchmark pkg_descr bench - -needSetup :: Rebuild () -needSetup = findFirstFileMonitored id ["Setup.hs", "Setup.lhs"] >> return () - -needLibrary :: PackageDescription -> Library -> Rebuild () -needLibrary pkg_descr (Library { exposedModules = modules - , signatures = sigs - , libBuildInfo = bi }) - = needBuildInfo pkg_descr bi (modules ++ sigs) - -needForeignLib :: PackageDescription -> ForeignLib -> Rebuild () -needForeignLib pkg_descr (ForeignLib { foreignLibModDefFile = fs - , foreignLibBuildInfo = bi }) - = do mapM_ needIfExists fs - needBuildInfo pkg_descr bi [] - -needExecutable :: PackageDescription -> Executable -> Rebuild () -needExecutable pkg_descr (Executable { modulePath = mainPath - , buildInfo = bi }) - = do needBuildInfo pkg_descr bi [] - needMainFile bi mainPath - -needTestSuite :: PackageDescription -> TestSuite -> Rebuild () -needTestSuite pkg_descr t - = case testInterface t of - TestSuiteExeV10 _ mainPath -> do - needBuildInfo pkg_descr bi [] - needMainFile bi mainPath - TestSuiteLibV09 _ m -> - needBuildInfo pkg_descr bi [m] - TestSuiteUnsupported _ -> return () -- soft fail - where - bi = testBuildInfo t - -needMainFile :: BuildInfo -> FilePath -> Rebuild () -needMainFile bi mainPath = do - -- The matter here is subtle. It might *seem* that we - -- should just search for mainPath, but as per - -- b61cb051f63ed5869b8f4a6af996ff7e833e4b39 'main-is' - -- will actually be the source file AFTER preprocessing, - -- whereas we need to get the file *prior* to preprocessing. - ppFile <- findFileWithExtensionMonitored - (ppSuffixes knownSuffixHandlers) - (hsSourceDirs bi) - (dropExtension mainPath) - case ppFile of - -- But check the original path in the end, because - -- maybe it's a non-preprocessed file with a non-traditional - -- extension. - Nothing -> findFileMonitored (hsSourceDirs bi) mainPath - >>= maybe (return ()) need - Just pp -> need pp - -needBenchmark :: PackageDescription -> Benchmark -> Rebuild () -needBenchmark pkg_descr bm - = case benchmarkInterface bm of - BenchmarkExeV10 _ mainPath -> do - needBuildInfo pkg_descr bi [] - needMainFile bi mainPath - BenchmarkUnsupported _ -> return () -- soft fail - where - bi = benchmarkBuildInfo bm - -needBuildInfo :: PackageDescription -> BuildInfo -> [ModuleName] -> Rebuild () -needBuildInfo pkg_descr bi modules = do - -- NB: These are separate because there may be both A.hs and - -- A.hs-boot; need to track both. - findNeededModules ["hs", "lhs", "hsig", "lhsig"] - findNeededModules ["hs-boot", "lhs-boot"] - mapM_ needIfExists (cSources bi ++ jsSources bi) - -- A MASSIVE HACK to (1) make sure we rebuild when header - -- files change, but (2) not have to rebuild when anything - -- in extra-src-files changes (most of these won't affect - -- compilation). It would be even better if we knew on a - -- per-component basis which headers would be used but that - -- seems to be too difficult. - mapM_ needIfExists (filter ((==".h").takeExtension) (extraSrcFiles pkg_descr)) - forM_ (installIncludes bi) $ \f -> - findFileMonitored ("." : includeDirs bi) f - >>= maybe (return ()) need - where - findNeededModules exts = - mapM_ (findNeededModule exts) - (modules ++ otherModules bi) - findNeededModule exts m = - findFileWithExtensionMonitored - (ppSuffixes knownSuffixHandlers ++ exts) - (hsSourceDirs bi) - (toFilePath m) - >>= maybe (return ()) need diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SourceRepoParse.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SourceRepoParse.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SourceRepoParse.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SourceRepoParse.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -module Distribution.Client.SourceRepoParse where - -import Distribution.Client.Compat.Prelude -import Prelude () - -import Distribution.FieldGrammar.FieldDescrs (fieldDescrsToList) -import Distribution.PackageDescription.FieldGrammar (sourceRepoFieldGrammar) -import Distribution.Parsec.Class (explicitEitherParsec) -import Distribution.ParseUtils (FieldDescr (..), syntaxError) -import Distribution.Types.SourceRepo (SourceRepo, RepoKind (..)) - -sourceRepoFieldDescrs :: [FieldDescr SourceRepo] -sourceRepoFieldDescrs = - map toDescr . fieldDescrsToList $ sourceRepoFieldGrammar (RepoKindUnknown "unused") - where - toDescr (name, pretty, parse) = FieldDescr - { fieldName = name - , fieldGet = pretty - , fieldSet = \lineNo str x -> - either (syntaxError lineNo) return - $ explicitEitherParsec (parse x) str - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SrcDist.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SrcDist.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/SrcDist.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/SrcDist.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,195 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE NondecreasingIndentation #-} -{-# LANGUAGE FlexibleContexts #-} --- Implements the \"@.\/cabal sdist@\" command, which creates a source --- distribution for this package. That is, packs up the source code --- into a tarball, making use of the corresponding Cabal module. -module Distribution.Client.SrcDist ( - sdist, - allPackageSourceFiles - ) where - - -import Distribution.Client.SetupWrapper - ( SetupScriptOptions(..), defaultSetupScriptOptions, setupWrapper ) -import Distribution.Client.Tar (createTarGzFile) - -import Distribution.Package - ( Package(..), packageName ) -import Distribution.PackageDescription - ( PackageDescription ) -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) -import Distribution.PackageDescription.Parsec - ( readGenericPackageDescription ) -import Distribution.Simple.Utils - ( createDirectoryIfMissingVerbose, defaultPackageDesc - , warn, die', notice, withTempDirectory ) -import Distribution.Client.Setup - ( SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) ) -import Distribution.Simple.Setup - ( Flag(..), sdistCommand, flagToList, fromFlag, fromFlagOrDefault - , defaultSDistFlags ) -import Distribution.Simple.BuildPaths ( srcPref) -import Distribution.Simple.Program (requireProgram, simpleProgram, programPath) -import Distribution.Simple.Program.Db (emptyProgramDb) -import Distribution.Text ( display ) -import Distribution.Verbosity (Verbosity, normal, lessVerbose) -import Distribution.Version (mkVersion, orLaterVersion, intersectVersionRanges) - -import Distribution.Client.Utils - (tryFindAddSourcePackageDesc) -import Distribution.Compat.Exception (catchIO) - -import System.FilePath ((), (<.>)) -import Control.Monad (when, unless, liftM) -import System.Directory (doesFileExist, removeFile, canonicalizePath, getTemporaryDirectory) -import System.Process (runProcess, waitForProcess) -import System.Exit (ExitCode(..)) -import Control.Exception (IOException, evaluate) - --- |Create a source distribution. -sdist :: SDistFlags -> SDistExFlags -> IO () -sdist flags exflags = do - pkg <- liftM flattenPackageDescription - (readGenericPackageDescription verbosity =<< defaultPackageDesc verbosity) - let withDir :: (FilePath -> IO a) -> IO a - withDir = if not needMakeArchive then \f -> f tmpTargetDir - else withTempDirectory verbosity tmpTargetDir "sdist." - -- 'withTempDir' fails if we don't create 'tmpTargetDir'... - when needMakeArchive $ - createDirectoryIfMissingVerbose verbosity True tmpTargetDir - withDir $ \tmpDir -> do - let outDir = if isOutDirectory then tmpDir else tmpDir tarBallName pkg - flags' = (if not needMakeArchive then flags - else flags { sDistDirectory = Flag outDir }) - unless isListSources $ - createDirectoryIfMissingVerbose verbosity True outDir - - -- Run 'setup sdist --output-directory=tmpDir' (or - -- '--list-source'/'--output-directory=someOtherDir') in case we were passed - -- those options. - setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags') (const []) - - -- Unless we were given --list-sources or --output-directory ourselves, - -- create an archive. - when needMakeArchive $ - createArchive verbosity pkg tmpDir distPref - - when isOutDirectory $ - notice verbosity $ "Source directory created: " ++ tmpTargetDir - - when isListSources $ - notice verbosity $ "List of package sources written to file '" - ++ (fromFlag . sDistListSources $ flags) ++ "'" - - where - flagEnabled f = not . null . flagToList . f $ flags - - isListSources = flagEnabled sDistListSources - isOutDirectory = flagEnabled sDistDirectory - needMakeArchive = not (isListSources || isOutDirectory) - verbosity = fromFlag (sDistVerbosity flags) - distPref = fromFlag (sDistDistPref flags) - tmpTargetDir = fromFlagOrDefault (srcPref distPref) (sDistDirectory flags) - setupOpts = defaultSetupScriptOptions { - useDistPref = distPref, - -- The '--output-directory' sdist flag was introduced in Cabal 1.12, and - -- '--list-sources' in 1.17. - useCabalVersion = if isListSources - then orLaterVersion $ mkVersion [1,17,0] - else orLaterVersion $ mkVersion [1,12,0] - } - format = fromFlag (sDistFormat exflags) - createArchive = case format of - TargzFormat -> createTarGzArchive - ZipFormat -> createZipArchive - -tarBallName :: PackageDescription -> String -tarBallName = display . packageId - --- | Create a tar.gz archive from a tree of source files. -createTarGzArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath - -> IO () -createTarGzArchive verbosity pkg tmpDir targetPref = do - createTarGzFile tarBallFilePath tmpDir (tarBallName pkg) - notice verbosity $ "Source tarball created: " ++ tarBallFilePath - where - tarBallFilePath = targetPref tarBallName pkg <.> "tar.gz" - --- | Create a zip archive from a tree of source files. -createZipArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath - -> IO () -createZipArchive verbosity pkg tmpDir targetPref = do - let dir = tarBallName pkg - zipfile = targetPref dir <.> "zip" - (zipProg, _) <- requireProgram verbosity zipProgram emptyProgramDb - - -- zip has an annoying habit of updating the target rather than creating - -- it from scratch. While that might sound like an optimisation, it doesn't - -- remove files already in the archive that are no longer present in the - -- uncompressed tree. - alreadyExists <- doesFileExist zipfile - when alreadyExists $ removeFile zipfile - - -- We call zip with a different CWD, so have to make the path - -- absolute. Can't just use 'canonicalizePath zipfile' since this function - -- requires its argument to refer to an existing file. - zipfileAbs <- fmap ( dir <.> "zip") . canonicalizePath $ targetPref - - --TODO: use runProgramInvocation, but has to be able to set CWD - hnd <- runProcess (programPath zipProg) ["-q", "-r", zipfileAbs, dir] - (Just tmpDir) - Nothing Nothing Nothing Nothing - exitCode <- waitForProcess hnd - unless (exitCode == ExitSuccess) $ - die' verbosity $ "Generating the zip file failed " - ++ "(zip returned exit code " ++ show exitCode ++ ")" - notice verbosity $ "Source zip archive created: " ++ zipfile - where - zipProgram = simpleProgram "zip" - --- | List all source files of a given add-source dependency. Exits with error if --- something is wrong (e.g. there is no .cabal file in the given directory). -allPackageSourceFiles :: Verbosity -> SetupScriptOptions -> FilePath - -> IO [FilePath] -allPackageSourceFiles verbosity setupOpts0 packageDir = do - pkg <- do - let err = "Error reading source files of package." - desc <- tryFindAddSourcePackageDesc verbosity packageDir err - flattenPackageDescription `fmap` readGenericPackageDescription verbosity desc - globalTmp <- getTemporaryDirectory - withTempDirectory verbosity globalTmp "cabal-list-sources." $ \tempDir -> do - let file = tempDir "cabal-sdist-list-sources" - flags = defaultSDistFlags { - sDistVerbosity = Flag $ if verbosity == normal - then lessVerbose verbosity else verbosity, - sDistListSources = Flag file - } - setupOpts = setupOpts0 { - -- 'sdist --list-sources' was introduced in Cabal 1.18. - useCabalVersion = intersectVersionRanges - (orLaterVersion $ mkVersion [1,18,0]) - (useCabalVersion setupOpts0), - useWorkingDir = Just packageDir - } - - doListSources :: IO [FilePath] - doListSources = do - setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags) (const []) - fmap lines . readFile $ file - - onFailedListSources :: IOException -> IO () - onFailedListSources e = do - warn verbosity $ - "Could not list sources of the package '" - ++ display (packageName pkg) ++ "'." - warn verbosity $ - "Exception was: " ++ show e - - -- Run setup sdist --list-sources=TMPFILE - r <- doListSources `catchIO` (\e -> onFailedListSources e >> return []) - -- Ensure that we've closed the 'readFile' handle before we exit the - -- temporary directory. - _ <- evaluate (length r) - return r diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Store.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Store.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Store.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Store.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,251 +0,0 @@ -{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} - - --- | Management for the installed package store. --- -module Distribution.Client.Store ( - - -- * The store layout - StoreDirLayout(..), - defaultStoreDirLayout, - - -- * Reading store entries - getStoreEntries, - doesStoreEntryExist, - - -- * Creating store entries - newStoreEntry, - NewStoreEntryOutcome(..), - - -- * Concurrency strategy - -- $concurrency - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude -import Distribution.Client.Compat.FileLock - -import Distribution.Client.DistDirLayout -import Distribution.Client.RebuildMonad - -import Distribution.Package (UnitId, mkUnitId) -import Distribution.Compiler (CompilerId) - -import Distribution.Simple.Utils - ( withTempDirectory, debug, info ) -import Distribution.Verbosity -import Distribution.Text - -import Data.Set (Set) -import qualified Data.Set as Set -import Control.Exception -import Control.Monad (forM_) -import System.FilePath -import System.Directory -import System.IO - - --- $concurrency --- --- We access and update the store concurrently. Our strategy to do that safely --- is as follows. --- --- The store entries once created are immutable. This alone simplifies matters --- considerably. --- --- Additionally, the way 'UnitId' hashes are constructed means that if a store --- entry exists already then we can assume its content is ok to reuse, rather --- than having to re-recreate. This is the nix-style input hashing concept. --- --- A consequence of this is that with a little care it is /safe/ to race --- updates against each other. Consider two independent concurrent builds that --- both want to build a particular 'UnitId', where that entry does not yet --- exist in the store. It is safe for both to build and try to install this --- entry into the store provided that: --- --- * only one succeeds --- * the looser discovers that they lost, they abandon their own build and --- re-use the store entry installed by the winner. --- --- Note that because builds are not reproducible in general (nor even --- necessarily ABI compatible) then it is essential that the loser abandon --- their build and use the one installed by the winner, so that subsequent --- packages are built against the exact package from the store rather than some --- morally equivalent package that may not be ABI compatible. --- --- Our overriding goal is that store reads be simple, cheap and not require --- locking. We will derive our write-side protocol to make this possible. --- --- The read-side protocol is simply: --- --- * check for the existence of a directory entry named after the 'UnitId' in --- question. That is, if the dir entry @$root/foo-1.0-fe56a...@ exists then --- the store entry can be assumed to be complete and immutable. --- --- Given our read-side protocol, the final step on the write side must be to --- atomically rename a fully-formed store entry directory into its final --- location. While this will indeed be the final step, the preparatory steps --- are more complicated. The tricky aspect is that the store also contains a --- number of shared package databases (one per compiler version). Our read --- strategy means that by the time we install the store dir entry the package --- db must already have been updated. We cannot do the package db update --- as part of atomically renaming the store entry directory however. Furthermore --- it is not safe to allow either package db update because the db entry --- contains the ABI hash and this is not guaranteed to be deterministic. So we --- must register the new package prior to the atomic dir rename. Since this --- combination of steps are not atomic then we need locking. --- --- The write-side protocol is: --- --- * Create a unique temp dir and write all store entry files into it. --- --- * Take a lock named after the 'UnitId' in question. --- --- * Once holding the lock, check again for the existence of the final store --- entry directory. If the entry exists then the process lost the race and it --- must abandon, unlock and re-use the existing store entry. If the entry --- does not exist then the process won the race and it can proceed. --- --- * Register the package into the package db. Note that the files are not in --- their final location at this stage so registration file checks may need --- to be disabled. --- --- * Atomically rename the temp dir to the final store entry location. --- --- * Release the previously-acquired lock. --- --- Obviously this means it is possible to fail after registering but before --- installing the store entry, leaving a dangling package db entry. This is not --- much of a problem because this entry does not determine package existence --- for cabal. It does mean however that the package db update should be insert --- or replace, i.e. not failing if the db entry already exists. - - --- | Check if a particular 'UnitId' exists in the store. --- -doesStoreEntryExist :: StoreDirLayout -> CompilerId -> UnitId -> IO Bool -doesStoreEntryExist StoreDirLayout{storePackageDirectory} compid unitid = - doesDirectoryExist (storePackageDirectory compid unitid) - - --- | Return the 'UnitId's of all packages\/components already installed in the --- store. --- -getStoreEntries :: StoreDirLayout -> CompilerId -> Rebuild (Set UnitId) -getStoreEntries StoreDirLayout{storeDirectory} compid = do - paths <- getDirectoryContentsMonitored (storeDirectory compid) - return $! mkEntries paths - where - mkEntries = Set.delete (mkUnitId "package.db") - . Set.delete (mkUnitId "incoming") - . Set.fromList - . map mkUnitId - . filter valid - valid ('.':_) = False - valid _ = True - - --- | The outcome of 'newStoreEntry': either the store entry was newly created --- or it existed already. The latter case happens if there was a race between --- two builds of the same store entry. --- -data NewStoreEntryOutcome = UseNewStoreEntry - | UseExistingStoreEntry - deriving (Eq, Show) - --- | Place a new entry into the store. See the concurrency strategy description --- for full details. --- --- In particular, it takes two actions: one to place files into a temporary --- location, and a second to perform any necessary registration. The first --- action is executed without any locks held (the temp dir is unique). The --- second action holds a lock that guarantees that only one cabal process is --- able to install this store entry. This means it is safe to register into --- the compiler package DB or do other similar actions. --- --- Note that if you need to use the registration information later then you --- /must/ check the 'NewStoreEntryOutcome' and if it's'UseExistingStoreEntry' --- then you must read the existing registration information (unless your --- registration information is constructed fully deterministically). --- -newStoreEntry :: Verbosity - -> StoreDirLayout - -> CompilerId - -> UnitId - -> (FilePath -> IO (FilePath, [FilePath])) -- ^ Action to place files. - -> IO () -- ^ Register action, if necessary. - -> IO NewStoreEntryOutcome -newStoreEntry verbosity storeDirLayout@StoreDirLayout{..} - compid unitid - copyFiles register = - -- See $concurrency above for an explanation of the concurrency protocol - - withTempIncomingDir storeDirLayout compid $ \incomingTmpDir -> do - - -- Write all store entry files within the temp dir and return the prefix. - (incomingEntryDir, otherFiles) <- copyFiles incomingTmpDir - - -- Take a lock named after the 'UnitId' in question. - withIncomingUnitIdLock verbosity storeDirLayout compid unitid $ do - - -- Check for the existence of the final store entry directory. - exists <- doesStoreEntryExist storeDirLayout compid unitid - - if exists - -- If the entry exists then we lost the race and we must abandon, - -- unlock and re-use the existing store entry. - then do - info verbosity $ - "Concurrent build race: abandoning build in favour of existing " - ++ "store entry " ++ display compid display unitid - return UseExistingStoreEntry - - -- If the entry does not exist then we won the race and can proceed. - else do - - -- Register the package into the package db (if appropriate). - register - - -- Atomically rename the temp dir to the final store entry location. - renameDirectory incomingEntryDir finalEntryDir - forM_ otherFiles $ \file -> do - let finalStoreFile = storeDirectory compid makeRelative (incomingTmpDir (dropDrive (storeDirectory compid))) file - createDirectoryIfMissing True (takeDirectory finalStoreFile) - renameFile file finalStoreFile - - debug verbosity $ - "Installed store entry " ++ display compid display unitid - return UseNewStoreEntry - where - finalEntryDir = storePackageDirectory compid unitid - - -withTempIncomingDir :: StoreDirLayout -> CompilerId - -> (FilePath -> IO a) -> IO a -withTempIncomingDir StoreDirLayout{storeIncomingDirectory} compid action = do - createDirectoryIfMissing True incomingDir - withTempDirectory silent incomingDir "new" action - where - incomingDir = storeIncomingDirectory compid - - -withIncomingUnitIdLock :: Verbosity -> StoreDirLayout - -> CompilerId -> UnitId - -> IO a -> IO a -withIncomingUnitIdLock verbosity StoreDirLayout{storeIncomingLock} - compid unitid action = - bracket takeLock releaseLock (\_hnd -> action) - where - takeLock = do - h <- openFile (storeIncomingLock compid unitid) ReadWriteMode - -- First try non-blocking, but if we would have to wait then - -- log an explanation and do it again in blocking mode. - gotlock <- hTryLock h ExclusiveLock - unless gotlock $ do - info verbosity $ "Waiting for file lock on store entry " - ++ display compid display unitid - hLock h ExclusiveLock - return h - - releaseLock = hClose - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/TargetSelector.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/TargetSelector.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/TargetSelector.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/TargetSelector.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2461 +0,0 @@ -{-# LANGUAGE CPP, DeriveGeneric, DeriveFunctor, - RecordWildCards, NamedFieldPuns #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.TargetSelector --- Copyright : (c) Duncan Coutts 2012, 2015, 2016 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- --- Handling for user-specified target selectors. --- ------------------------------------------------------------------------------ -module Distribution.Client.TargetSelector ( - - -- * Target selectors - TargetSelector(..), - TargetImplicitCwd(..), - ComponentKind(..), - ComponentKindFilter, - SubComponentTarget(..), - QualLevel(..), - componentKind, - - -- * Reading target selectors - readTargetSelectors, - TargetSelectorProblem(..), - reportTargetSelectorProblems, - showTargetSelector, - TargetString(..), - showTargetString, - parseTargetString, - -- ** non-IO - readTargetSelectorsWith, - DirActions(..), - defaultDirActions, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Package - ( Package(..), PackageId, PackageName, packageName ) -import Distribution.Types.UnqualComponentName - ( UnqualComponentName, mkUnqualComponentName, unUnqualComponentName - , packageNameToUnqualComponentName ) -import Distribution.Client.Types - ( PackageLocation(..), PackageSpecifier(..) ) - -import Distribution.Verbosity -import Distribution.PackageDescription - ( PackageDescription - , Executable(..) - , TestSuite(..), TestSuiteInterface(..), testModules - , Benchmark(..), BenchmarkInterface(..), benchmarkModules - , BuildInfo(..), explicitLibModules, exeModules ) -import Distribution.PackageDescription.Configuration - ( flattenPackageDescription ) -import Distribution.Solver.Types.SourcePackage - ( SourcePackage(..) ) -import Distribution.ModuleName - ( ModuleName, toFilePath ) -import Distribution.Simple.LocalBuildInfo - ( Component(..), ComponentName(..) - , pkgComponents, componentName, componentBuildInfo ) -import Distribution.Types.ForeignLib - -import Distribution.Text - ( Text, display, simpleParse ) -import Distribution.Simple.Utils - ( die', lowercase, ordNub ) -import Distribution.Client.Utils - ( makeRelativeCanonical ) - -import Data.Either - ( partitionEithers ) -import Data.Function - ( on ) -import Data.List - ( stripPrefix, partition, groupBy ) -import Data.Ord - ( comparing ) -import qualified Data.Map.Lazy as Map.Lazy -import qualified Data.Map.Strict as Map -import qualified Data.Set as Set -import Control.Arrow ((&&&)) -import Control.Monad - hiding ( mfilter ) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP - ( (+++), (<++) ) -import Distribution.ParseUtils - ( readPToMaybe ) -import System.FilePath as FilePath - ( takeExtension, dropExtension - , splitDirectories, joinPath, splitPath ) -import qualified System.Directory as IO - ( doesFileExist, doesDirectoryExist, canonicalizePath - , getCurrentDirectory ) -import System.FilePath - ( (), (<.>), normalise, dropTrailingPathSeparator ) -import Text.EditDistance - ( defaultEditCosts, restrictedDamerauLevenshteinDistance ) - - --- ------------------------------------------------------------ --- * Target selector terms --- ------------------------------------------------------------ - --- | A target selector is expression selecting a set of components (as targets --- for a actions like @build@, @run@, @test@ etc). A target selector --- corresponds to the user syntax for referring to targets on the command line. --- --- From the users point of view a target can be many things: packages, dirs, --- component names, files etc. Internally we consider a target to be a specific --- component (or module\/file within a component), and all the users' notions --- of targets are just different ways of referring to these component targets. --- --- So target selectors are expressions in the sense that they are interpreted --- to refer to one or more components. For example a 'TargetPackage' gets --- interpreted differently by different commands to refer to all or a subset --- of components within the package. --- --- The syntax has lots of optional parts: --- --- > [ package name | package dir | package .cabal file ] --- > [ [lib:|exe:] component name ] --- > [ module name | source file ] --- -data TargetSelector = - - -- | One (or more) packages as a whole, or all the components of a - -- particular kind within the package(s). - -- - -- These are always packages that are local to the project. In the case - -- that there is more than one, they all share the same directory location. - -- - TargetPackage TargetImplicitCwd [PackageId] (Maybe ComponentKindFilter) - - -- | A package specified by name. This may refer to @extra-packages@ from - -- the @cabal.project@ file, or a dependency of a known project package or - -- could refer to a package from a hackage archive. It needs further - -- context to resolve to a specific package. - -- - | TargetPackageNamed PackageName (Maybe ComponentKindFilter) - - -- | All packages, or all components of a particular kind in all packages. - -- - | TargetAllPackages (Maybe ComponentKindFilter) - - -- | A specific component in a package within the project. - -- - | TargetComponent PackageId ComponentName SubComponentTarget - - -- | A component in a package, but where it cannot be verified that the - -- package has such a component, or because the package is itself not - -- known. - -- - | TargetComponentUnknown PackageName - (Either UnqualComponentName ComponentName) - SubComponentTarget - deriving (Eq, Ord, Show, Generic) - --- | Does this 'TargetPackage' selector arise from syntax referring to a --- package in the current directory (e.g. @tests@ or no giving no explicit --- target at all) or does it come from syntax referring to a package name --- or location. --- -data TargetImplicitCwd = TargetImplicitCwd | TargetExplicitNamed - deriving (Eq, Ord, Show, Generic) - -data ComponentKind = LibKind | FLibKind | ExeKind | TestKind | BenchKind - deriving (Eq, Ord, Enum, Show) - -type ComponentKindFilter = ComponentKind - --- | Either the component as a whole or detail about a file or module target --- within a component. --- -data SubComponentTarget = - - -- | The component as a whole - WholeComponent - - -- | A specific module within a component. - | ModuleTarget ModuleName - - -- | A specific file within a component. - | FileTarget FilePath - deriving (Eq, Ord, Show, Generic) - -instance Binary SubComponentTarget - - --- ------------------------------------------------------------ --- * Top level, do everything --- ------------------------------------------------------------ - - --- | Parse a bunch of command line args as 'TargetSelector's, failing with an --- error if any are unrecognised. The possible target selectors are based on --- the available packages (and their locations). --- -readTargetSelectors :: [PackageSpecifier (SourcePackage (PackageLocation a))] - -> Maybe ComponentKindFilter - -- ^ This parameter is used when there are ambiguous selectors. - -- If it is 'Just', then we attempt to resolve ambiguitiy - -- by applying it, since otherwise there is no way to allow - -- contextually valid yet syntactically ambiguous selectors. - -- (#4676, #5461) - -> [String] - -> IO (Either [TargetSelectorProblem] [TargetSelector]) -readTargetSelectors = readTargetSelectorsWith defaultDirActions - -readTargetSelectorsWith :: (Applicative m, Monad m) => DirActions m - -> [PackageSpecifier (SourcePackage (PackageLocation a))] - -> Maybe ComponentKindFilter - -> [String] - -> m (Either [TargetSelectorProblem] [TargetSelector]) -readTargetSelectorsWith dirActions@DirActions{..} pkgs mfilter targetStrs = - case parseTargetStrings targetStrs of - ([], usertargets) -> do - usertargets' <- mapM (getTargetStringFileStatus dirActions) usertargets - knowntargets <- getKnownTargets dirActions pkgs - case resolveTargetSelectors knowntargets usertargets' mfilter of - ([], btargets) -> return (Right btargets) - (problems, _) -> return (Left problems) - (strs, _) -> return (Left (map TargetSelectorUnrecognised strs)) - - -data DirActions m = DirActions { - doesFileExist :: FilePath -> m Bool, - doesDirectoryExist :: FilePath -> m Bool, - canonicalizePath :: FilePath -> m FilePath, - getCurrentDirectory :: m FilePath - } - -defaultDirActions :: DirActions IO -defaultDirActions = - DirActions { - doesFileExist = IO.doesFileExist, - doesDirectoryExist = IO.doesDirectoryExist, - -- Workaround for - canonicalizePath = IO.canonicalizePath . dropTrailingPathSeparator, - getCurrentDirectory = IO.getCurrentDirectory - } - -makeRelativeToCwd :: Applicative m => DirActions m -> FilePath -> m FilePath -makeRelativeToCwd DirActions{..} path = - makeRelativeCanonical <$> canonicalizePath path <*> getCurrentDirectory - - --- ------------------------------------------------------------ --- * Parsing target strings --- ------------------------------------------------------------ - --- | The outline parse of a target selector. It takes one of the forms: --- --- > str1 --- > str1:str2 --- > str1:str2:str3 --- > str1:str2:str3:str4 --- -data TargetString = - TargetString1 String - | TargetString2 String String - | TargetString3 String String String - | TargetString4 String String String String - | TargetString5 String String String String String - | TargetString7 String String String String String String String - deriving (Show, Eq) - --- | Parse a bunch of 'TargetString's (purely without throwing exceptions). --- -parseTargetStrings :: [String] -> ([String], [TargetString]) -parseTargetStrings = - partitionEithers - . map (\str -> maybe (Left str) Right (parseTargetString str)) - -parseTargetString :: String -> Maybe TargetString -parseTargetString = - readPToMaybe parseTargetApprox - where - parseTargetApprox :: Parse.ReadP r TargetString - parseTargetApprox = - (do a <- tokenQ - return (TargetString1 a)) - +++ (do a <- tokenQ0 - _ <- Parse.char ':' - b <- tokenQ - return (TargetString2 a b)) - +++ (do a <- tokenQ0 - _ <- Parse.char ':' - b <- tokenQ - _ <- Parse.char ':' - c <- tokenQ - return (TargetString3 a b c)) - +++ (do a <- tokenQ0 - _ <- Parse.char ':' - b <- token - _ <- Parse.char ':' - c <- tokenQ - _ <- Parse.char ':' - d <- tokenQ - return (TargetString4 a b c d)) - +++ (do a <- tokenQ0 - _ <- Parse.char ':' - b <- token - _ <- Parse.char ':' - c <- tokenQ - _ <- Parse.char ':' - d <- tokenQ - _ <- Parse.char ':' - e <- tokenQ - return (TargetString5 a b c d e)) - +++ (do a <- tokenQ0 - _ <- Parse.char ':' - b <- token - _ <- Parse.char ':' - c <- tokenQ - _ <- Parse.char ':' - d <- tokenQ - _ <- Parse.char ':' - e <- tokenQ - _ <- Parse.char ':' - f <- tokenQ - _ <- Parse.char ':' - g <- tokenQ - return (TargetString7 a b c d e f g)) - - token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') - tokenQ = parseHaskellString <++ token - token0 = Parse.munch (\x -> not (isSpace x) && x /= ':') - tokenQ0= parseHaskellString <++ token0 - parseHaskellString :: Parse.ReadP r String - parseHaskellString = Parse.readS_to_P reads - - --- | Render a 'TargetString' back as the external syntax. This is mainly for --- error messages. --- -showTargetString :: TargetString -> String -showTargetString = intercalate ":" . components - where - components (TargetString1 s1) = [s1] - components (TargetString2 s1 s2) = [s1,s2] - components (TargetString3 s1 s2 s3) = [s1,s2,s3] - components (TargetString4 s1 s2 s3 s4) = [s1,s2,s3,s4] - components (TargetString5 s1 s2 s3 s4 s5) = [s1,s2,s3,s4,s5] - components (TargetString7 s1 s2 s3 s4 s5 s6 s7) = [s1,s2,s3,s4,s5,s6,s7] - -showTargetSelector :: TargetSelector -> String -showTargetSelector ts = - case [ t | ql <- [QL1 .. QLFull] - , t <- renderTargetSelector ql ts ] - of (t':_) -> showTargetString (forgetFileStatus t') - [] -> "" - -showTargetSelectorKind :: TargetSelector -> String -showTargetSelectorKind bt = case bt of - TargetPackage TargetExplicitNamed _ Nothing -> "package" - TargetPackage TargetExplicitNamed _ (Just _) -> "package:filter" - TargetPackage TargetImplicitCwd _ Nothing -> "cwd-package" - TargetPackage TargetImplicitCwd _ (Just _) -> "cwd-package:filter" - TargetPackageNamed _ Nothing -> "named-package" - TargetPackageNamed _ (Just _) -> "named-package:filter" - TargetAllPackages Nothing -> "package *" - TargetAllPackages (Just _) -> "package *:filter" - TargetComponent _ _ WholeComponent -> "component" - TargetComponent _ _ ModuleTarget{} -> "module" - TargetComponent _ _ FileTarget{} -> "file" - TargetComponentUnknown _ _ WholeComponent -> "unknown-component" - TargetComponentUnknown _ _ ModuleTarget{} -> "unknown-module" - TargetComponentUnknown _ _ FileTarget{} -> "unknown-file" - - --- ------------------------------------------------------------ --- * Checking if targets exist as files --- ------------------------------------------------------------ - -data TargetStringFileStatus = - TargetStringFileStatus1 String FileStatus - | TargetStringFileStatus2 String FileStatus String - | TargetStringFileStatus3 String FileStatus String String - | TargetStringFileStatus4 String String String String - | TargetStringFileStatus5 String String String String String - | TargetStringFileStatus7 String String String String String String String - deriving (Eq, Ord, Show) - -data FileStatus = FileStatusExistsFile FilePath -- the canonicalised filepath - | FileStatusExistsDir FilePath -- the canonicalised filepath - | FileStatusNotExists Bool -- does the parent dir exist even? - deriving (Eq, Ord, Show) - -noFileStatus :: FileStatus -noFileStatus = FileStatusNotExists False - -getTargetStringFileStatus :: (Applicative m, Monad m) => DirActions m - -> TargetString -> m TargetStringFileStatus -getTargetStringFileStatus DirActions{..} t = - case t of - TargetString1 s1 -> - (\f1 -> TargetStringFileStatus1 s1 f1) <$> fileStatus s1 - TargetString2 s1 s2 -> - (\f1 -> TargetStringFileStatus2 s1 f1 s2) <$> fileStatus s1 - TargetString3 s1 s2 s3 -> - (\f1 -> TargetStringFileStatus3 s1 f1 s2 s3) <$> fileStatus s1 - TargetString4 s1 s2 s3 s4 -> - return (TargetStringFileStatus4 s1 s2 s3 s4) - TargetString5 s1 s2 s3 s4 s5 -> - return (TargetStringFileStatus5 s1 s2 s3 s4 s5) - TargetString7 s1 s2 s3 s4 s5 s6 s7 -> - return (TargetStringFileStatus7 s1 s2 s3 s4 s5 s6 s7) - where - fileStatus f = do - fexists <- doesFileExist f - dexists <- doesDirectoryExist f - case splitPath f of - _ | fexists -> FileStatusExistsFile <$> canonicalizePath f - | dexists -> FileStatusExistsDir <$> canonicalizePath f - (d:_) -> FileStatusNotExists <$> doesDirectoryExist d - _ -> pure (FileStatusNotExists False) - -forgetFileStatus :: TargetStringFileStatus -> TargetString -forgetFileStatus t = case t of - TargetStringFileStatus1 s1 _ -> TargetString1 s1 - TargetStringFileStatus2 s1 _ s2 -> TargetString2 s1 s2 - TargetStringFileStatus3 s1 _ s2 s3 -> TargetString3 s1 s2 s3 - TargetStringFileStatus4 s1 s2 s3 s4 -> TargetString4 s1 s2 s3 s4 - TargetStringFileStatus5 s1 s2 s3 s4 - s5 -> TargetString5 s1 s2 s3 s4 s5 - TargetStringFileStatus7 s1 s2 s3 s4 - s5 s6 s7 -> TargetString7 s1 s2 s3 s4 s5 s6 s7 - - --- ------------------------------------------------------------ --- * Resolving target strings to target selectors --- ------------------------------------------------------------ - - --- | Given a bunch of user-specified targets, try to resolve what it is they --- refer to. --- -resolveTargetSelectors :: KnownTargets - -> [TargetStringFileStatus] - -> Maybe ComponentKindFilter - -> ([TargetSelectorProblem], - [TargetSelector]) --- default local dir target if there's no given target: -resolveTargetSelectors (KnownTargets{knownPackagesAll = []}) [] _ = - ([TargetSelectorNoTargetsInProject], []) - -resolveTargetSelectors (KnownTargets{knownPackagesPrimary = []}) [] _ = - ([TargetSelectorNoTargetsInCwd], []) - -resolveTargetSelectors (KnownTargets{knownPackagesPrimary}) [] _ = - ([], [TargetPackage TargetImplicitCwd pkgids Nothing]) - where - pkgids = [ pinfoId | KnownPackage{pinfoId} <- knownPackagesPrimary ] - -resolveTargetSelectors knowntargets targetStrs mfilter = - partitionEithers - . map (resolveTargetSelector knowntargets mfilter) - $ targetStrs - -resolveTargetSelector :: KnownTargets - -> Maybe ComponentKindFilter - -> TargetStringFileStatus - -> Either TargetSelectorProblem TargetSelector -resolveTargetSelector knowntargets@KnownTargets{..} mfilter targetStrStatus = - case findMatch (matcher targetStrStatus) of - - Unambiguous _ - | projectIsEmpty -> Left TargetSelectorNoTargetsInProject - - Unambiguous (TargetPackage TargetImplicitCwd [] _) - -> Left (TargetSelectorNoCurrentPackage targetStr) - - Unambiguous target -> Right target - - None errs - | projectIsEmpty -> Left TargetSelectorNoTargetsInProject - | otherwise -> Left (classifyMatchErrors errs) - - Ambiguous _ targets - | Just kfilter <- mfilter - , [target] <- applyKindFilter kfilter targets -> Right target - - Ambiguous exactMatch targets -> - case disambiguateTargetSelectors - matcher targetStrStatus exactMatch - targets of - Right targets' -> Left (TargetSelectorAmbiguous targetStr targets') - Left ((m, ms):_) -> Left (MatchingInternalError targetStr m ms) - Left [] -> internalError "resolveTargetSelector" - where - matcher = matchTargetSelector knowntargets - - targetStr = forgetFileStatus targetStrStatus - - projectIsEmpty = null knownPackagesAll - - classifyMatchErrors errs - | not (null expected) - = let (things, got:_) = unzip expected in - TargetSelectorExpected targetStr things got - - | not (null nosuch) - = TargetSelectorNoSuch targetStr nosuch - - | otherwise - = internalError $ "classifyMatchErrors: " ++ show errs - where - expected = [ (thing, got) - | (_, MatchErrorExpected thing got) - <- map (innerErr Nothing) errs ] - -- Trim the list of alternatives by dropping duplicates and - -- retaining only at most three most similar (by edit distance) ones. - nosuch = Map.foldrWithKey genResults [] $ Map.fromListWith Set.union $ - [ ((inside, thing, got), Set.fromList alts) - | (inside, MatchErrorNoSuch thing got alts) - <- map (innerErr Nothing) errs - ] - - genResults (inside, thing, got) alts acc = ( - inside - , thing - , got - , take maxResults - $ map fst - $ takeWhile distanceLow - $ sortBy (comparing snd) - $ map addLevDist - $ Set.toList alts - ) : acc - where - addLevDist = id &&& restrictedDamerauLevenshteinDistance - defaultEditCosts got - - distanceLow (_, dist) = dist < length got `div` 2 - - maxResults = 3 - - innerErr _ (MatchErrorIn kind thing m) - = innerErr (Just (kind,thing)) m - innerErr c m = (c,m) - - applyKindFilter :: ComponentKindFilter -> [TargetSelector] -> [TargetSelector] - applyKindFilter kfilter = filter go - where - go (TargetPackage _ _ (Just filter')) = kfilter == filter' - go (TargetPackageNamed _ (Just filter')) = kfilter == filter' - go (TargetAllPackages (Just filter')) = kfilter == filter' - go (TargetComponent _ cname _) - | CLibName <- cname = kfilter == LibKind - | CSubLibName _ <- cname = kfilter == LibKind - | CFLibName _ <- cname = kfilter == FLibKind - | CExeName _ <- cname = kfilter == ExeKind - | CTestName _ <- cname = kfilter == TestKind - | CBenchName _ <- cname = kfilter == BenchKind - go _ = True - --- | The various ways that trying to resolve a 'TargetString' to a --- 'TargetSelector' can fail. --- -data TargetSelectorProblem - = TargetSelectorExpected TargetString [String] String - -- ^ [expected thing] (actually got) - | TargetSelectorNoSuch TargetString - [(Maybe (String, String), String, String, [String])] - -- ^ [([in thing], no such thing, actually got, alternatives)] - | TargetSelectorAmbiguous TargetString - [(TargetString, TargetSelector)] - - | MatchingInternalError TargetString TargetSelector - [(TargetString, [TargetSelector])] - | TargetSelectorUnrecognised String - -- ^ Syntax error when trying to parse a target string. - | TargetSelectorNoCurrentPackage TargetString - | TargetSelectorNoTargetsInCwd - | TargetSelectorNoTargetsInProject - deriving (Show, Eq) - -data QualLevel = QL1 | QL2 | QL3 | QLFull - deriving (Eq, Enum, Show) - -disambiguateTargetSelectors - :: (TargetStringFileStatus -> Match TargetSelector) - -> TargetStringFileStatus -> MatchClass - -> [TargetSelector] - -> Either [(TargetSelector, [(TargetString, [TargetSelector])])] - [(TargetString, TargetSelector)] -disambiguateTargetSelectors matcher matchInput exactMatch matchResults = - case partitionEithers results of - (errs@(_:_), _) -> Left errs - ([], ok) -> Right ok - where - -- So, here's the strategy. We take the original match results, and make a - -- table of all their renderings at all qualification levels. - -- Note there can be multiple renderings at each qualification level. - matchResultsRenderings :: [(TargetSelector, [TargetStringFileStatus])] - matchResultsRenderings = - [ (matchResult, matchRenderings) - | matchResult <- matchResults - , let matchRenderings = - [ rendering - | ql <- [QL1 .. QLFull] - , rendering <- renderTargetSelector ql matchResult ] - ] - - -- Of course the point is that we're looking for renderings that are - -- unambiguous matches. So we build another memo table of all the matches - -- for all of those renderings. So by looking up in this table we can see - -- if we've got an unambiguous match. - - memoisedMatches :: Map TargetStringFileStatus (Match TargetSelector) - memoisedMatches = - -- avoid recomputing the main one if it was an exact match - (if exactMatch == Exact - then Map.insert matchInput (Match Exact 0 matchResults) - else id) - $ Map.Lazy.fromList - [ (rendering, matcher rendering) - | rendering <- concatMap snd matchResultsRenderings ] - - -- Finally, for each of the match results, we go through all their - -- possible renderings (in order of qualification level, though remember - -- there can be multiple renderings per level), and find the first one - -- that has an unambiguous match. - results :: [Either (TargetSelector, [(TargetString, [TargetSelector])]) - (TargetString, TargetSelector)] - results = - [ case findUnambiguous originalMatch matchRenderings of - Just unambiguousRendering -> - Right ( forgetFileStatus unambiguousRendering - , originalMatch) - - -- This case is an internal error, but we bubble it up and report it - Nothing -> - Left ( originalMatch - , [ (forgetFileStatus rendering, matches) - | rendering <- matchRenderings - , let Match m _ matches = - memoisedMatches Map.! rendering - , m /= Inexact - ] ) - - | (originalMatch, matchRenderings) <- matchResultsRenderings ] - - findUnambiguous :: TargetSelector - -> [TargetStringFileStatus] - -> Maybe TargetStringFileStatus - findUnambiguous _ [] = Nothing - findUnambiguous t (r:rs) = - case memoisedMatches Map.! r of - Match Exact _ [t'] | t == t' - -> Just r - Match Exact _ _ -> findUnambiguous t rs - Match Unknown _ _ -> findUnambiguous t rs - Match Inexact _ _ -> internalError "Match Inexact" - NoMatch _ _ -> internalError "NoMatch" - -internalError :: String -> a -internalError msg = - error $ "TargetSelector: internal error: " ++ msg - - --- | Throw an exception with a formatted message if there are any problems. --- -reportTargetSelectorProblems :: Verbosity -> [TargetSelectorProblem] -> IO a -reportTargetSelectorProblems verbosity problems = do - - case [ str | TargetSelectorUnrecognised str <- problems ] of - [] -> return () - targets -> - die' verbosity $ unlines - [ "Unrecognised target syntax for '" ++ name ++ "'." - | name <- targets ] - - case [ (t, m, ms) | MatchingInternalError t m ms <- problems ] of - [] -> return () - ((target, originalMatch, renderingsAndMatches):_) -> - die' verbosity $ "Internal error in target matching. It should always " - ++ "be possible to find a syntax that's sufficiently qualified to " - ++ "give an unambiguous match. However when matching '" - ++ showTargetString target ++ "' we found " - ++ showTargetSelector originalMatch - ++ " (" ++ showTargetSelectorKind originalMatch ++ ") which does " - ++ "not have an unambiguous syntax. The possible syntax and the " - ++ "targets they match are as follows:\n" - ++ unlines - [ "'" ++ showTargetString rendering ++ "' which matches " - ++ intercalate ", " - [ showTargetSelector match ++ - " (" ++ showTargetSelectorKind match ++ ")" - | match <- matches ] - | (rendering, matches) <- renderingsAndMatches ] - - case [ (t, e, g) | TargetSelectorExpected t e g <- problems ] of - [] -> return () - targets -> - die' verbosity $ unlines - [ "Unrecognised target '" ++ showTargetString target - ++ "'.\n" - ++ "Expected a " ++ intercalate " or " expected - ++ ", rather than '" ++ got ++ "'." - | (target, expected, got) <- targets ] - - case [ (t, e) | TargetSelectorNoSuch t e <- problems ] of - [] -> return () - targets -> - die' verbosity $ unlines - [ "Unknown target '" ++ showTargetString target ++ - "'.\n" ++ unlines - [ (case inside of - Just (kind, "") - -> "The " ++ kind ++ " has no " - Just (kind, thing) - -> "The " ++ kind ++ " " ++ thing ++ " has no " - Nothing -> "There is no ") - ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" - | (thing, got, _alts) <- nosuch' ] ++ "." - ++ if null alternatives then "" else - "\nPerhaps you meant " ++ intercalate ";\nor " - [ "the " ++ thing ++ " '" ++ intercalate "' or '" alts ++ "'?" - | (thing, alts) <- alternatives ] - | (inside, nosuch') <- groupByContainer nosuch - , let alternatives = - [ (thing, alts) - | (thing,_got,alts@(_:_)) <- nosuch' ] - ] - | (target, nosuch) <- targets - , let groupByContainer = - map (\g@((inside,_,_,_):_) -> - (inside, [ (thing,got,alts) - | (_,thing,got,alts) <- g ])) - . groupBy ((==) `on` (\(x,_,_,_) -> x)) - . sortBy (compare `on` (\(x,_,_,_) -> x)) - ] - where - mungeThing "file" = "file target" - mungeThing thing = thing - - case [ (t, ts) | TargetSelectorAmbiguous t ts <- problems ] of - [] -> return () - targets -> - die' verbosity $ unlines - [ "Ambiguous target '" ++ showTargetString target - ++ "'. It could be:\n " - ++ unlines [ " "++ showTargetString ut ++ - " (" ++ showTargetSelectorKind bt ++ ")" - | (ut, bt) <- amb ] - | (target, amb) <- targets ] - - case [ t | TargetSelectorNoCurrentPackage t <- problems ] of - [] -> return () - target:_ -> - die' verbosity $ - "The target '" ++ showTargetString target ++ "' refers to the " - ++ "components in the package in the current directory, but there " - ++ "is no package in the current directory (or at least not listed " - ++ "as part of the project)." - --TODO: report a different error if there is a .cabal file but it's - -- not a member of the project - - case [ () | TargetSelectorNoTargetsInCwd <- problems ] of - [] -> return () - _:_ -> - die' verbosity $ - "No targets given and there is no package in the current " - ++ "directory. Use the target 'all' for all packages in the " - ++ "project or specify packages or components by name or location. " - ++ "See 'cabal build --help' for more details on target options." - - case [ () | TargetSelectorNoTargetsInProject <- problems ] of - [] -> return () - _:_ -> - die' verbosity $ - "There is no .cabal package file or cabal.project file. " - ++ "To build packages locally you need at minimum a .cabal " - ++ "file. You can use 'cabal init' to create one.\n" - ++ "\n" - ++ "For non-trivial projects you will also want a cabal.project " - ++ "file in the root directory of your project. This file lists the " - ++ "packages in your project and all other build configuration. " - ++ "See the Cabal user guide for full details." - - fail "reportTargetSelectorProblems: internal error" - - ----------------------------------- --- Syntax type --- - --- | Syntax for the 'TargetSelector': the matcher and renderer --- -data Syntax = Syntax QualLevel Matcher Renderer - | AmbiguousAlternatives Syntax Syntax - | ShadowingAlternatives Syntax Syntax - -type Matcher = TargetStringFileStatus -> Match TargetSelector -type Renderer = TargetSelector -> [TargetStringFileStatus] - -foldSyntax :: (a -> a -> a) -> (a -> a -> a) - -> (QualLevel -> Matcher -> Renderer -> a) - -> (Syntax -> a) -foldSyntax ambiguous unambiguous syntax = go - where - go (Syntax ql match render) = syntax ql match render - go (AmbiguousAlternatives a b) = ambiguous (go a) (go b) - go (ShadowingAlternatives a b) = unambiguous (go a) (go b) - - ----------------------------------- --- Top level renderer and matcher --- - -renderTargetSelector :: QualLevel -> TargetSelector - -> [TargetStringFileStatus] -renderTargetSelector ql ts = - foldSyntax - (++) (++) - (\ql' _ render -> guard (ql == ql') >> render ts) - syntax - where - syntax = syntaxForms emptyKnownTargets - -- don't need known targets for rendering - -matchTargetSelector :: KnownTargets - -> TargetStringFileStatus - -> Match TargetSelector -matchTargetSelector knowntargets = \usertarget -> - nubMatchesBy (==) $ - - let ql = targetQualLevel usertarget in - foldSyntax - (<|>) () - (\ql' match _ -> guard (ql == ql') >> match usertarget) - syntax - where - syntax = syntaxForms knowntargets - - targetQualLevel TargetStringFileStatus1{} = QL1 - targetQualLevel TargetStringFileStatus2{} = QL2 - targetQualLevel TargetStringFileStatus3{} = QL3 - targetQualLevel TargetStringFileStatus4{} = QLFull - targetQualLevel TargetStringFileStatus5{} = QLFull - targetQualLevel TargetStringFileStatus7{} = QLFull - - ----------------------------------- --- Syntax forms --- - --- | All the forms of syntax for 'TargetSelector'. --- -syntaxForms :: KnownTargets -> Syntax -syntaxForms KnownTargets { - knownPackagesAll = pinfo, - knownPackagesPrimary = ppinfo, - knownComponentsAll = cinfo, - knownComponentsPrimary = pcinfo, - knownComponentsOther = ocinfo - } = - -- The various forms of syntax here are ambiguous in many cases. - -- Our policy is by default we expose that ambiguity and report - -- ambiguous matches. In certain cases we override the ambiguity - -- by having some forms shadow others. - -- - -- We make modules shadow files because module name "Q" clashes - -- with file "Q" with no extension but these refer to the same - -- thing anyway so it's not a useful ambiguity. Other cases are - -- not ambiguous like "Q" vs "Q.hs" or "Data.Q" vs "Data/Q". - - ambiguousAlternatives - -- convenient single-component forms - [ shadowingAlternatives - [ ambiguousAlternatives - [ syntaxForm1All - , syntaxForm1Filter ppinfo - , shadowingAlternatives - [ syntaxForm1Component pcinfo - , syntaxForm1Package pinfo - ] - ] - , syntaxForm1Component ocinfo - , syntaxForm1Module cinfo - , syntaxForm1File pinfo - ] - - -- two-component partially qualified forms - -- fully qualified form for 'all' - , syntaxForm2MetaAll - , syntaxForm2AllFilter - , syntaxForm2NamespacePackage pinfo - , syntaxForm2PackageComponent pinfo - , syntaxForm2PackageFilter pinfo - , syntaxForm2KindComponent cinfo - , shadowingAlternatives - [ syntaxForm2PackageModule pinfo - , syntaxForm2PackageFile pinfo - ] - , shadowingAlternatives - [ syntaxForm2ComponentModule cinfo - , syntaxForm2ComponentFile cinfo - ] - - -- rarely used partially qualified forms - , syntaxForm3PackageKindComponent pinfo - , shadowingAlternatives - [ syntaxForm3PackageComponentModule pinfo - , syntaxForm3PackageComponentFile pinfo - ] - , shadowingAlternatives - [ syntaxForm3KindComponentModule cinfo - , syntaxForm3KindComponentFile cinfo - ] - , syntaxForm3NamespacePackageFilter pinfo - - -- fully-qualified forms for all and cwd with filter - , syntaxForm3MetaAllFilter - , syntaxForm3MetaCwdFilter ppinfo - - -- fully-qualified form for package and package with filter - , syntaxForm3MetaNamespacePackage pinfo - , syntaxForm4MetaNamespacePackageFilter pinfo - - -- fully-qualified forms for component, module and file - , syntaxForm5MetaNamespacePackageKindComponent pinfo - , syntaxForm7MetaNamespacePackageKindComponentNamespaceModule pinfo - , syntaxForm7MetaNamespacePackageKindComponentNamespaceFile pinfo - ] - where - ambiguousAlternatives = foldr1 AmbiguousAlternatives - shadowingAlternatives = foldr1 ShadowingAlternatives - - --- | Syntax: "all" to select all packages in the project --- --- > cabal build all --- -syntaxForm1All :: Syntax -syntaxForm1All = - syntaxForm1 render $ \str1 _fstatus1 -> do - guardMetaAll str1 - return (TargetAllPackages Nothing) - where - render (TargetAllPackages Nothing) = - [TargetStringFileStatus1 "all" noFileStatus] - render _ = [] - --- | Syntax: filter --- --- > cabal build tests --- -syntaxForm1Filter :: [KnownPackage] -> Syntax -syntaxForm1Filter ps = - syntaxForm1 render $ \str1 _fstatus1 -> do - kfilter <- matchComponentKindFilter str1 - return (TargetPackage TargetImplicitCwd pids (Just kfilter)) - where - pids = [ pinfoId | KnownPackage{pinfoId} <- ps ] - render (TargetPackage TargetImplicitCwd _ (Just kfilter)) = - [TargetStringFileStatus1 (dispF kfilter) noFileStatus] - render _ = [] - - --- | Syntax: package (name, dir or file) --- --- > cabal build foo --- > cabal build ../bar ../bar/bar.cabal --- -syntaxForm1Package :: [KnownPackage] -> Syntax -syntaxForm1Package pinfo = - syntaxForm1 render $ \str1 fstatus1 -> do - guardPackage str1 fstatus1 - p <- matchPackage pinfo str1 fstatus1 - case p of - KnownPackage{pinfoId} -> - return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) - KnownPackageName pn -> - return (TargetPackageNamed pn Nothing) - where - render (TargetPackage TargetExplicitNamed [p] Nothing) = - [TargetStringFileStatus1 (dispP p) noFileStatus] - render (TargetPackageNamed pn Nothing) = - [TargetStringFileStatus1 (dispPN pn) noFileStatus] - render _ = [] - --- | Syntax: component --- --- > cabal build foo --- -syntaxForm1Component :: [KnownComponent] -> Syntax -syntaxForm1Component cs = - syntaxForm1 render $ \str1 _fstatus1 -> do - guardComponentName str1 - c <- matchComponentName cs str1 - return (TargetComponent (cinfoPackageId c) (cinfoName c) WholeComponent) - where - render (TargetComponent p c WholeComponent) = - [TargetStringFileStatus1 (dispC p c) noFileStatus] - render _ = [] - --- | Syntax: module --- --- > cabal build Data.Foo --- -syntaxForm1Module :: [KnownComponent] -> Syntax -syntaxForm1Module cs = - syntaxForm1 render $ \str1 _fstatus1 -> do - guardModuleName str1 - let ms = [ (m,c) | c <- cs, m <- cinfoModules c ] - (m,c) <- matchModuleNameAnd ms str1 - return (TargetComponent (cinfoPackageId c) (cinfoName c) (ModuleTarget m)) - where - render (TargetComponent _p _c (ModuleTarget m)) = - [TargetStringFileStatus1 (dispM m) noFileStatus] - render _ = [] - --- | Syntax: file name --- --- > cabal build Data/Foo.hs bar/Main.hsc --- -syntaxForm1File :: [KnownPackage] -> Syntax -syntaxForm1File ps = - -- Note there's a bit of an inconsistency here vs the other syntax forms - -- for files. For the single-part syntax the target has to point to a file - -- that exists (due to our use of matchPackageDirectoryPrefix), whereas for - -- all the other forms we don't require that. - syntaxForm1 render $ \str1 fstatus1 -> - expecting "file" str1 $ do - (pkgfile, ~KnownPackage{pinfoId, pinfoComponents}) - -- always returns the KnownPackage case - <- matchPackageDirectoryPrefix ps fstatus1 - orNoThingIn "package" (display (packageName pinfoId)) $ do - (filepath, c) <- matchComponentFile pinfoComponents pkgfile - return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) - where - render (TargetComponent _p _c (FileTarget f)) = - [TargetStringFileStatus1 f noFileStatus] - render _ = [] - ---- - --- | Syntax: :all --- --- > cabal build :all --- -syntaxForm2MetaAll :: Syntax -syntaxForm2MetaAll = - syntaxForm2 render $ \str1 _fstatus1 str2 -> do - guardNamespaceMeta str1 - guardMetaAll str2 - return (TargetAllPackages Nothing) - where - render (TargetAllPackages Nothing) = - [TargetStringFileStatus2 "" noFileStatus "all"] - render _ = [] - --- | Syntax: all : filer --- --- > cabal build all:tests --- -syntaxForm2AllFilter :: Syntax -syntaxForm2AllFilter = - syntaxForm2 render $ \str1 _fstatus1 str2 -> do - guardMetaAll str1 - kfilter <- matchComponentKindFilter str2 - return (TargetAllPackages (Just kfilter)) - where - render (TargetAllPackages (Just kfilter)) = - [TargetStringFileStatus2 "all" noFileStatus (dispF kfilter)] - render _ = [] - --- | Syntax: package : filer --- --- > cabal build foo:tests --- -syntaxForm2PackageFilter :: [KnownPackage] -> Syntax -syntaxForm2PackageFilter ps = - syntaxForm2 render $ \str1 fstatus1 str2 -> do - guardPackage str1 fstatus1 - p <- matchPackage ps str1 fstatus1 - kfilter <- matchComponentKindFilter str2 - case p of - KnownPackage{pinfoId} -> - return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) - KnownPackageName pn -> - return (TargetPackageNamed pn (Just kfilter)) - where - render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = - [TargetStringFileStatus2 (dispP p) noFileStatus (dispF kfilter)] - render (TargetPackageNamed pn (Just kfilter)) = - [TargetStringFileStatus2 (dispPN pn) noFileStatus (dispF kfilter)] - render _ = [] - --- | Syntax: pkg : package name --- --- > cabal build pkg:foo --- -syntaxForm2NamespacePackage :: [KnownPackage] -> Syntax -syntaxForm2NamespacePackage pinfo = - syntaxForm2 render $ \str1 _fstatus1 str2 -> do - guardNamespacePackage str1 - guardPackageName str2 - p <- matchPackage pinfo str2 noFileStatus - case p of - KnownPackage{pinfoId} -> - return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) - KnownPackageName pn -> - return (TargetPackageNamed pn Nothing) - where - render (TargetPackage TargetExplicitNamed [p] Nothing) = - [TargetStringFileStatus2 "pkg" noFileStatus (dispP p)] - render (TargetPackageNamed pn Nothing) = - [TargetStringFileStatus2 "pkg" noFileStatus (dispPN pn)] - render _ = [] - --- | Syntax: package : component --- --- > cabal build foo:foo --- > cabal build ./foo:foo --- > cabal build ./foo.cabal:foo --- -syntaxForm2PackageComponent :: [KnownPackage] -> Syntax -syntaxForm2PackageComponent ps = - syntaxForm2 render $ \str1 fstatus1 str2 -> do - guardPackage str1 fstatus1 - guardComponentName str2 - p <- matchPackage ps str1 fstatus1 - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - c <- matchComponentName pinfoComponents str2 - return (TargetComponent pinfoId (cinfoName c) WholeComponent) - --TODO: the error here ought to say there's no component by that name in - -- this package, and name the package - KnownPackageName pn -> - let cn = mkUnqualComponentName str2 in - return (TargetComponentUnknown pn (Left cn) WholeComponent) - where - render (TargetComponent p c WholeComponent) = - [TargetStringFileStatus2 (dispP p) noFileStatus (dispC p c)] - render (TargetComponentUnknown pn (Left cn) WholeComponent) = - [TargetStringFileStatus2 (dispPN pn) noFileStatus (display cn)] - render _ = [] - --- | Syntax: namespace : component --- --- > cabal build lib:foo exe:foo --- -syntaxForm2KindComponent :: [KnownComponent] -> Syntax -syntaxForm2KindComponent cs = - syntaxForm2 render $ \str1 _fstatus1 str2 -> do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - return (TargetComponent (cinfoPackageId c) (cinfoName c) WholeComponent) - where - render (TargetComponent p c WholeComponent) = - [TargetStringFileStatus2 (dispCK c) noFileStatus (dispC p c)] - render _ = [] - --- | Syntax: package : module --- --- > cabal build foo:Data.Foo --- > cabal build ./foo:Data.Foo --- > cabal build ./foo.cabal:Data.Foo --- -syntaxForm2PackageModule :: [KnownPackage] -> Syntax -syntaxForm2PackageModule ps = - syntaxForm2 render $ \str1 fstatus1 str2 -> do - guardPackage str1 fstatus1 - guardModuleName str2 - p <- matchPackage ps str1 fstatus1 - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - let ms = [ (m,c) | c <- pinfoComponents, m <- cinfoModules c ] - (m,c) <- matchModuleNameAnd ms str2 - return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) - KnownPackageName pn -> do - m <- matchModuleNameUnknown str2 - -- We assume the primary library component of the package: - return (TargetComponentUnknown pn (Right CLibName) (ModuleTarget m)) - where - render (TargetComponent p _c (ModuleTarget m)) = - [TargetStringFileStatus2 (dispP p) noFileStatus (dispM m)] - render _ = [] - --- | Syntax: component : module --- --- > cabal build foo:Data.Foo --- -syntaxForm2ComponentModule :: [KnownComponent] -> Syntax -syntaxForm2ComponentModule cs = - syntaxForm2 render $ \str1 _fstatus1 str2 -> do - guardComponentName str1 - guardModuleName str2 - c <- matchComponentName cs str1 - orNoThingIn "component" (cinfoStrName c) $ do - let ms = cinfoModules c - m <- matchModuleName ms str2 - return (TargetComponent (cinfoPackageId c) (cinfoName c) - (ModuleTarget m)) - where - render (TargetComponent p c (ModuleTarget m)) = - [TargetStringFileStatus2 (dispC p c) noFileStatus (dispM m)] - render _ = [] - --- | Syntax: package : filename --- --- > cabal build foo:Data/Foo.hs --- > cabal build ./foo:Data/Foo.hs --- > cabal build ./foo.cabal:Data/Foo.hs --- -syntaxForm2PackageFile :: [KnownPackage] -> Syntax -syntaxForm2PackageFile ps = - syntaxForm2 render $ \str1 fstatus1 str2 -> do - guardPackage str1 fstatus1 - p <- matchPackage ps str1 fstatus1 - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - (filepath, c) <- matchComponentFile pinfoComponents str2 - return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) - KnownPackageName pn -> - let filepath = str2 in - -- We assume the primary library component of the package: - return (TargetComponentUnknown pn (Right CLibName) (FileTarget filepath)) - where - render (TargetComponent p _c (FileTarget f)) = - [TargetStringFileStatus2 (dispP p) noFileStatus f] - render _ = [] - --- | Syntax: component : filename --- --- > cabal build foo:Data/Foo.hs --- -syntaxForm2ComponentFile :: [KnownComponent] -> Syntax -syntaxForm2ComponentFile cs = - syntaxForm2 render $ \str1 _fstatus1 str2 -> do - guardComponentName str1 - c <- matchComponentName cs str1 - orNoThingIn "component" (cinfoStrName c) $ do - (filepath, _) <- matchComponentFile [c] str2 - return (TargetComponent (cinfoPackageId c) (cinfoName c) - (FileTarget filepath)) - where - render (TargetComponent p c (FileTarget f)) = - [TargetStringFileStatus2 (dispC p c) noFileStatus f] - render _ = [] - ---- - --- | Syntax: :all : filter --- --- > cabal build :all:tests --- -syntaxForm3MetaAllFilter :: Syntax -syntaxForm3MetaAllFilter = - syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do - guardNamespaceMeta str1 - guardMetaAll str2 - kfilter <- matchComponentKindFilter str3 - return (TargetAllPackages (Just kfilter)) - where - render (TargetAllPackages (Just kfilter)) = - [TargetStringFileStatus3 "" noFileStatus "all" (dispF kfilter)] - render _ = [] - -syntaxForm3MetaCwdFilter :: [KnownPackage] -> Syntax -syntaxForm3MetaCwdFilter ps = - syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do - guardNamespaceMeta str1 - guardNamespaceCwd str2 - kfilter <- matchComponentKindFilter str3 - return (TargetPackage TargetImplicitCwd pids (Just kfilter)) - where - pids = [ pinfoId | KnownPackage{pinfoId} <- ps ] - render (TargetPackage TargetImplicitCwd _ (Just kfilter)) = - [TargetStringFileStatus3 "" noFileStatus "cwd" (dispF kfilter)] - render _ = [] - --- | Syntax: :pkg : package name --- --- > cabal build :pkg:foo --- -syntaxForm3MetaNamespacePackage :: [KnownPackage] -> Syntax -syntaxForm3MetaNamespacePackage pinfo = - syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do - guardNamespaceMeta str1 - guardNamespacePackage str2 - guardPackageName str3 - p <- matchPackage pinfo str3 noFileStatus - case p of - KnownPackage{pinfoId} -> - return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) - KnownPackageName pn -> - return (TargetPackageNamed pn Nothing) - where - render (TargetPackage TargetExplicitNamed [p] Nothing) = - [TargetStringFileStatus3 "" noFileStatus "pkg" (dispP p)] - render (TargetPackageNamed pn Nothing) = - [TargetStringFileStatus3 "" noFileStatus "pkg" (dispPN pn)] - render _ = [] - --- | Syntax: package : namespace : component --- --- > cabal build foo:lib:foo --- > cabal build foo/:lib:foo --- > cabal build foo.cabal:lib:foo --- -syntaxForm3PackageKindComponent :: [KnownPackage] -> Syntax -syntaxForm3PackageKindComponent ps = - syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do - guardPackage str1 fstatus1 - ckind <- matchComponentKind str2 - guardComponentName str3 - p <- matchPackage ps str1 fstatus1 - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - c <- matchComponentKindAndName pinfoComponents ckind str3 - return (TargetComponent pinfoId (cinfoName c) WholeComponent) - KnownPackageName pn -> - let cn = mkComponentName pn ckind (mkUnqualComponentName str3) in - return (TargetComponentUnknown pn (Right cn) WholeComponent) - where - render (TargetComponent p c WholeComponent) = - [TargetStringFileStatus3 (dispP p) noFileStatus (dispCK c) (dispC p c)] - render (TargetComponentUnknown pn (Right c) WholeComponent) = - [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCK c) (dispC' pn c)] - render _ = [] - --- | Syntax: package : component : module --- --- > cabal build foo:foo:Data.Foo --- > cabal build foo/:foo:Data.Foo --- > cabal build foo.cabal:foo:Data.Foo --- -syntaxForm3PackageComponentModule :: [KnownPackage] -> Syntax -syntaxForm3PackageComponentModule ps = - syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do - guardPackage str1 fstatus1 - guardComponentName str2 - guardModuleName str3 - p <- matchPackage ps str1 fstatus1 - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - c <- matchComponentName pinfoComponents str2 - orNoThingIn "component" (cinfoStrName c) $ do - let ms = cinfoModules c - m <- matchModuleName ms str3 - return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) - KnownPackageName pn -> do - let cn = mkUnqualComponentName str2 - m <- matchModuleNameUnknown str3 - return (TargetComponentUnknown pn (Left cn) (ModuleTarget m)) - where - render (TargetComponent p c (ModuleTarget m)) = - [TargetStringFileStatus3 (dispP p) noFileStatus (dispC p c) (dispM m)] - render (TargetComponentUnknown pn (Left c) (ModuleTarget m)) = - [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCN c) (dispM m)] - render _ = [] - --- | Syntax: namespace : component : module --- --- > cabal build lib:foo:Data.Foo --- -syntaxForm3KindComponentModule :: [KnownComponent] -> Syntax -syntaxForm3KindComponentModule cs = - syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do - ckind <- matchComponentKind str1 - guardComponentName str2 - guardModuleName str3 - c <- matchComponentKindAndName cs ckind str2 - orNoThingIn "component" (cinfoStrName c) $ do - let ms = cinfoModules c - m <- matchModuleName ms str3 - return (TargetComponent (cinfoPackageId c) (cinfoName c) - (ModuleTarget m)) - where - render (TargetComponent p c (ModuleTarget m)) = - [TargetStringFileStatus3 (dispCK c) noFileStatus (dispC p c) (dispM m)] - render _ = [] - --- | Syntax: package : component : filename --- --- > cabal build foo:foo:Data/Foo.hs --- > cabal build foo/:foo:Data/Foo.hs --- > cabal build foo.cabal:foo:Data/Foo.hs --- -syntaxForm3PackageComponentFile :: [KnownPackage] -> Syntax -syntaxForm3PackageComponentFile ps = - syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do - guardPackage str1 fstatus1 - guardComponentName str2 - p <- matchPackage ps str1 fstatus1 - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - c <- matchComponentName pinfoComponents str2 - orNoThingIn "component" (cinfoStrName c) $ do - (filepath, _) <- matchComponentFile [c] str3 - return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) - KnownPackageName pn -> - let cn = mkUnqualComponentName str2 - filepath = str3 in - return (TargetComponentUnknown pn (Left cn) (FileTarget filepath)) - where - render (TargetComponent p c (FileTarget f)) = - [TargetStringFileStatus3 (dispP p) noFileStatus (dispC p c) f] - render (TargetComponentUnknown pn (Left c) (FileTarget f)) = - [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCN c) f] - render _ = [] - --- | Syntax: namespace : component : filename --- --- > cabal build lib:foo:Data/Foo.hs --- -syntaxForm3KindComponentFile :: [KnownComponent] -> Syntax -syntaxForm3KindComponentFile cs = - syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do - ckind <- matchComponentKind str1 - guardComponentName str2 - c <- matchComponentKindAndName cs ckind str2 - orNoThingIn "component" (cinfoStrName c) $ do - (filepath, _) <- matchComponentFile [c] str3 - return (TargetComponent (cinfoPackageId c) (cinfoName c) - (FileTarget filepath)) - where - render (TargetComponent p c (FileTarget f)) = - [TargetStringFileStatus3 (dispCK c) noFileStatus (dispC p c) f] - render _ = [] - -syntaxForm3NamespacePackageFilter :: [KnownPackage] -> Syntax -syntaxForm3NamespacePackageFilter ps = - syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do - guardNamespacePackage str1 - guardPackageName str2 - p <- matchPackage ps str2 noFileStatus - kfilter <- matchComponentKindFilter str3 - case p of - KnownPackage{pinfoId} -> - return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) - KnownPackageName pn -> - return (TargetPackageNamed pn (Just kfilter)) - where - render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = - [TargetStringFileStatus3 "pkg" noFileStatus (dispP p) (dispF kfilter)] - render (TargetPackageNamed pn (Just kfilter)) = - [TargetStringFileStatus3 "pkg" noFileStatus (dispPN pn) (dispF kfilter)] - render _ = [] - --- - -syntaxForm4MetaNamespacePackageFilter :: [KnownPackage] -> Syntax -syntaxForm4MetaNamespacePackageFilter ps = - syntaxForm4 render $ \str1 str2 str3 str4 -> do - guardNamespaceMeta str1 - guardNamespacePackage str2 - guardPackageName str3 - p <- matchPackage ps str3 noFileStatus - kfilter <- matchComponentKindFilter str4 - case p of - KnownPackage{pinfoId} -> - return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) - KnownPackageName pn -> - return (TargetPackageNamed pn (Just kfilter)) - where - render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = - [TargetStringFileStatus4 "" "pkg" (dispP p) (dispF kfilter)] - render (TargetPackageNamed pn (Just kfilter)) = - [TargetStringFileStatus4 "" "pkg" (dispPN pn) (dispF kfilter)] - render _ = [] - --- | Syntax: :pkg : package : namespace : component --- --- > cabal build :pkg:foo:lib:foo --- -syntaxForm5MetaNamespacePackageKindComponent :: [KnownPackage] -> Syntax -syntaxForm5MetaNamespacePackageKindComponent ps = - syntaxForm5 render $ \str1 str2 str3 str4 str5 -> do - guardNamespaceMeta str1 - guardNamespacePackage str2 - guardPackageName str3 - ckind <- matchComponentKind str4 - guardComponentName str5 - p <- matchPackage ps str3 noFileStatus - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - c <- matchComponentKindAndName pinfoComponents ckind str5 - return (TargetComponent pinfoId (cinfoName c) WholeComponent) - KnownPackageName pn -> - let cn = mkComponentName pn ckind (mkUnqualComponentName str5) in - return (TargetComponentUnknown pn (Right cn) WholeComponent) - where - render (TargetComponent p c WholeComponent) = - [TargetStringFileStatus5 "" "pkg" (dispP p) (dispCK c) (dispC p c)] - render (TargetComponentUnknown pn (Right c) WholeComponent) = - [TargetStringFileStatus5 "" "pkg" (dispPN pn) (dispCK c) (dispC' pn c)] - render _ = [] - --- | Syntax: :pkg : package : namespace : component : module : module --- --- > cabal build :pkg:foo:lib:foo:module:Data.Foo --- -syntaxForm7MetaNamespacePackageKindComponentNamespaceModule - :: [KnownPackage] -> Syntax -syntaxForm7MetaNamespacePackageKindComponentNamespaceModule ps = - syntaxForm7 render $ \str1 str2 str3 str4 str5 str6 str7 -> do - guardNamespaceMeta str1 - guardNamespacePackage str2 - guardPackageName str3 - ckind <- matchComponentKind str4 - guardComponentName str5 - guardNamespaceModule str6 - p <- matchPackage ps str3 noFileStatus - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - c <- matchComponentKindAndName pinfoComponents ckind str5 - orNoThingIn "component" (cinfoStrName c) $ do - let ms = cinfoModules c - m <- matchModuleName ms str7 - return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) - KnownPackageName pn -> do - let cn = mkComponentName pn ckind (mkUnqualComponentName str2) - m <- matchModuleNameUnknown str7 - return (TargetComponentUnknown pn (Right cn) (ModuleTarget m)) - where - render (TargetComponent p c (ModuleTarget m)) = - [TargetStringFileStatus7 "" "pkg" (dispP p) - (dispCK c) (dispC p c) - "module" (dispM m)] - render (TargetComponentUnknown pn (Right c) (ModuleTarget m)) = - [TargetStringFileStatus7 "" "pkg" (dispPN pn) - (dispCK c) (dispC' pn c) - "module" (dispM m)] - render _ = [] - --- | Syntax: :pkg : package : namespace : component : file : filename --- --- > cabal build :pkg:foo:lib:foo:file:Data/Foo.hs --- -syntaxForm7MetaNamespacePackageKindComponentNamespaceFile - :: [KnownPackage] -> Syntax -syntaxForm7MetaNamespacePackageKindComponentNamespaceFile ps = - syntaxForm7 render $ \str1 str2 str3 str4 str5 str6 str7 -> do - guardNamespaceMeta str1 - guardNamespacePackage str2 - guardPackageName str3 - ckind <- matchComponentKind str4 - guardComponentName str5 - guardNamespaceFile str6 - p <- matchPackage ps str3 noFileStatus - case p of - KnownPackage{pinfoId, pinfoComponents} -> - orNoThingIn "package" (display (packageName pinfoId)) $ do - c <- matchComponentKindAndName pinfoComponents ckind str5 - orNoThingIn "component" (cinfoStrName c) $ do - (filepath,_) <- matchComponentFile [c] str7 - return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) - KnownPackageName pn -> - let cn = mkComponentName pn ckind (mkUnqualComponentName str5) - filepath = str7 in - return (TargetComponentUnknown pn (Right cn) (FileTarget filepath)) - where - render (TargetComponent p c (FileTarget f)) = - [TargetStringFileStatus7 "" "pkg" (dispP p) - (dispCK c) (dispC p c) - "file" f] - render (TargetComponentUnknown pn (Right c) (FileTarget f)) = - [TargetStringFileStatus7 "" "pkg" (dispPN pn) - (dispCK c) (dispC' pn c) - "file" f] - render _ = [] - - ---------------------------------------- --- Syntax utils --- - -type Match1 = String -> FileStatus -> Match TargetSelector -type Match2 = String -> FileStatus -> String - -> Match TargetSelector -type Match3 = String -> FileStatus -> String -> String - -> Match TargetSelector -type Match4 = String -> String -> String -> String - -> Match TargetSelector -type Match5 = String -> String -> String -> String -> String - -> Match TargetSelector -type Match7 = String -> String -> String -> String -> String -> String -> String - -> Match TargetSelector - -syntaxForm1 :: Renderer -> Match1 -> Syntax -syntaxForm2 :: Renderer -> Match2 -> Syntax -syntaxForm3 :: Renderer -> Match3 -> Syntax -syntaxForm4 :: Renderer -> Match4 -> Syntax -syntaxForm5 :: Renderer -> Match5 -> Syntax -syntaxForm7 :: Renderer -> Match7 -> Syntax - -syntaxForm1 render f = - Syntax QL1 match render - where - match = \(TargetStringFileStatus1 str1 fstatus1) -> - f str1 fstatus1 - -syntaxForm2 render f = - Syntax QL2 match render - where - match = \(TargetStringFileStatus2 str1 fstatus1 str2) -> - f str1 fstatus1 str2 - -syntaxForm3 render f = - Syntax QL3 match render - where - match = \(TargetStringFileStatus3 str1 fstatus1 str2 str3) -> - f str1 fstatus1 str2 str3 - -syntaxForm4 render f = - Syntax QLFull match render - where - match (TargetStringFileStatus4 str1 str2 str3 str4) - = f str1 str2 str3 str4 - match _ = mzero - -syntaxForm5 render f = - Syntax QLFull match render - where - match (TargetStringFileStatus5 str1 str2 str3 str4 str5) - = f str1 str2 str3 str4 str5 - match _ = mzero - -syntaxForm7 render f = - Syntax QLFull match render - where - match (TargetStringFileStatus7 str1 str2 str3 str4 str5 str6 str7) - = f str1 str2 str3 str4 str5 str6 str7 - match _ = mzero - -dispP :: Package p => p -> String -dispP = display . packageName - -dispPN :: PackageName -> String -dispPN = display - -dispC :: PackageId -> ComponentName -> String -dispC = componentStringName . packageName - -dispC' :: PackageName -> ComponentName -> String -dispC' = componentStringName - -dispCN :: UnqualComponentName -> String -dispCN = display - -dispK :: ComponentKind -> String -dispK = showComponentKindShort - -dispCK :: ComponentName -> String -dispCK = dispK . componentKind - -dispF :: ComponentKind -> String -dispF = showComponentKindFilterShort - -dispM :: ModuleName -> String -dispM = display - - -------------------------------- --- Package and component info --- - -data KnownTargets = KnownTargets { - knownPackagesAll :: [KnownPackage], - knownPackagesPrimary :: [KnownPackage], - knownPackagesOther :: [KnownPackage], - knownComponentsAll :: [KnownComponent], - knownComponentsPrimary :: [KnownComponent], - knownComponentsOther :: [KnownComponent] - } - deriving Show - -data KnownPackage = - KnownPackage { - pinfoId :: PackageId, - pinfoDirectory :: Maybe (FilePath, FilePath), - pinfoPackageFile :: Maybe (FilePath, FilePath), - pinfoComponents :: [KnownComponent] - } - | KnownPackageName { - pinfoName :: PackageName - } - deriving Show - -data KnownComponent = KnownComponent { - cinfoName :: ComponentName, - cinfoStrName :: ComponentStringName, - cinfoPackageId :: PackageId, - cinfoSrcDirs :: [FilePath], - cinfoModules :: [ModuleName], - cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) - cinfoCFiles :: [FilePath], - cinfoJsFiles :: [FilePath] - } - deriving Show - -type ComponentStringName = String - -knownPackageName :: KnownPackage -> PackageName -knownPackageName KnownPackage{pinfoId} = packageName pinfoId -knownPackageName KnownPackageName{pinfoName} = pinfoName - -emptyKnownTargets :: KnownTargets -emptyKnownTargets = KnownTargets [] [] [] [] [] [] - -getKnownTargets :: (Applicative m, Monad m) - => DirActions m - -> [PackageSpecifier (SourcePackage (PackageLocation a))] - -> m KnownTargets -getKnownTargets dirActions@DirActions{..} pkgs = do - pinfo <- mapM (collectKnownPackageInfo dirActions) pkgs - cwd <- getCurrentDirectory - let (ppinfo, opinfo) = selectPrimaryPackage cwd pinfo - return KnownTargets { - knownPackagesAll = pinfo, - knownPackagesPrimary = ppinfo, - knownPackagesOther = opinfo, - knownComponentsAll = allComponentsIn pinfo, - knownComponentsPrimary = allComponentsIn ppinfo, - knownComponentsOther = allComponentsIn opinfo - } - where - selectPrimaryPackage :: FilePath - -> [KnownPackage] - -> ([KnownPackage], [KnownPackage]) - selectPrimaryPackage cwd = partition isPkgDirCwd - where - isPkgDirCwd KnownPackage { pinfoDirectory = Just (dir,_) } - | dir == cwd = True - isPkgDirCwd _ = False - allComponentsIn ps = - [ c | KnownPackage{pinfoComponents} <- ps, c <- pinfoComponents ] - - -collectKnownPackageInfo :: (Applicative m, Monad m) => DirActions m - -> PackageSpecifier (SourcePackage (PackageLocation a)) - -> m KnownPackage -collectKnownPackageInfo _ (NamedPackage pkgname _props) = - return (KnownPackageName pkgname) -collectKnownPackageInfo dirActions@DirActions{..} - (SpecificSourcePackage SourcePackage { - packageDescription = pkg, - packageSource = loc - }) = do - (pkgdir, pkgfile) <- - case loc of - --TODO: local tarballs, remote tarballs etc - LocalUnpackedPackage dir -> do - dirabs <- canonicalizePath dir - dirrel <- makeRelativeToCwd dirActions dirabs - --TODO: ought to get this earlier in project reading - let fileabs = dirabs display (packageName pkg) <.> "cabal" - filerel = dirrel display (packageName pkg) <.> "cabal" - exists <- doesFileExist fileabs - return ( Just (dirabs, dirrel) - , if exists then Just (fileabs, filerel) else Nothing - ) - _ -> return (Nothing, Nothing) - let pinfo = - KnownPackage { - pinfoId = packageId pkg, - pinfoDirectory = pkgdir, - pinfoPackageFile = pkgfile, - pinfoComponents = collectKnownComponentInfo - (flattenPackageDescription pkg) - } - return pinfo - - -collectKnownComponentInfo :: PackageDescription -> [KnownComponent] -collectKnownComponentInfo pkg = - [ KnownComponent { - cinfoName = componentName c, - cinfoStrName = componentStringName (packageName pkg) (componentName c), - cinfoPackageId = packageId pkg, - cinfoSrcDirs = ordNub (hsSourceDirs bi), - cinfoModules = ordNub (componentModules c), - cinfoHsFiles = ordNub (componentHsFiles c), - cinfoCFiles = ordNub (cSources bi), - cinfoJsFiles = ordNub (jsSources bi) - } - | c <- pkgComponents pkg - , let bi = componentBuildInfo c ] - - -componentStringName :: PackageName -> ComponentName -> ComponentStringName -componentStringName pkgname CLibName = display pkgname -componentStringName _ (CSubLibName name) = unUnqualComponentName name -componentStringName _ (CFLibName name) = unUnqualComponentName name -componentStringName _ (CExeName name) = unUnqualComponentName name -componentStringName _ (CTestName name) = unUnqualComponentName name -componentStringName _ (CBenchName name) = unUnqualComponentName name - -componentModules :: Component -> [ModuleName] --- I think it's unlikely users will ask to build a requirement --- which is not mentioned locally. -componentModules (CLib lib) = explicitLibModules lib -componentModules (CFLib flib) = foreignLibModules flib -componentModules (CExe exe) = exeModules exe -componentModules (CTest test) = testModules test -componentModules (CBench bench) = benchmarkModules bench - -componentHsFiles :: Component -> [FilePath] -componentHsFiles (CExe exe) = [modulePath exe] -componentHsFiles (CTest TestSuite { - testInterface = TestSuiteExeV10 _ mainfile - }) = [mainfile] -componentHsFiles (CBench Benchmark { - benchmarkInterface = BenchmarkExeV10 _ mainfile - }) = [mainfile] -componentHsFiles _ = [] - - ------------------------------- --- Matching meta targets --- - -guardNamespaceMeta :: String -> Match () -guardNamespaceMeta = guardToken [""] "meta namespace" - -guardMetaAll :: String -> Match () -guardMetaAll = guardToken ["all"] "meta-target 'all'" - -guardNamespacePackage :: String -> Match () -guardNamespacePackage = guardToken ["pkg", "package"] "'pkg' namespace" - -guardNamespaceCwd :: String -> Match () -guardNamespaceCwd = guardToken ["cwd"] "'cwd' namespace" - -guardNamespaceModule :: String -> Match () -guardNamespaceModule = guardToken ["mod", "module"] "'module' namespace" - -guardNamespaceFile :: String -> Match () -guardNamespaceFile = guardToken ["file"] "'file' namespace" - -guardToken :: [String] -> String -> String -> Match () -guardToken tokens msg s - | caseFold s `elem` tokens = increaseConfidence - | otherwise = matchErrorExpected msg s - - ------------------------------- --- Matching component kinds --- - -componentKind :: ComponentName -> ComponentKind -componentKind CLibName = LibKind -componentKind (CSubLibName _) = LibKind -componentKind (CFLibName _) = FLibKind -componentKind (CExeName _) = ExeKind -componentKind (CTestName _) = TestKind -componentKind (CBenchName _) = BenchKind - -cinfoKind :: KnownComponent -> ComponentKind -cinfoKind = componentKind . cinfoName - -matchComponentKind :: String -> Match ComponentKind -matchComponentKind s - | s' `elem` liblabels = increaseConfidence >> return LibKind - | s' `elem` fliblabels = increaseConfidence >> return FLibKind - | s' `elem` exelabels = increaseConfidence >> return ExeKind - | s' `elem` testlabels = increaseConfidence >> return TestKind - | s' `elem` benchlabels = increaseConfidence >> return BenchKind - | otherwise = matchErrorExpected "component kind" s - where - s' = caseFold s - liblabels = ["lib", "library"] - fliblabels = ["flib", "foreign-library"] - exelabels = ["exe", "executable"] - testlabels = ["tst", "test", "test-suite"] - benchlabels = ["bench", "benchmark"] - -matchComponentKindFilter :: String -> Match ComponentKind -matchComponentKindFilter s - | s' `elem` liblabels = increaseConfidence >> return LibKind - | s' `elem` fliblabels = increaseConfidence >> return FLibKind - | s' `elem` exelabels = increaseConfidence >> return ExeKind - | s' `elem` testlabels = increaseConfidence >> return TestKind - | s' `elem` benchlabels = increaseConfidence >> return BenchKind - | otherwise = matchErrorExpected "component kind filter" s - where - s' = caseFold s - liblabels = ["libs", "libraries"] - fliblabels = ["flibs", "foreign-libraries"] - exelabels = ["exes", "executables"] - testlabels = ["tests", "test-suites"] - benchlabels = ["benches", "benchmarks"] - -showComponentKind :: ComponentKind -> String -showComponentKind LibKind = "library" -showComponentKind FLibKind = "foreign library" -showComponentKind ExeKind = "executable" -showComponentKind TestKind = "test-suite" -showComponentKind BenchKind = "benchmark" - -showComponentKindShort :: ComponentKind -> String -showComponentKindShort LibKind = "lib" -showComponentKindShort FLibKind = "flib" -showComponentKindShort ExeKind = "exe" -showComponentKindShort TestKind = "test" -showComponentKindShort BenchKind = "bench" - -showComponentKindFilterShort :: ComponentKind -> String -showComponentKindFilterShort LibKind = "libs" -showComponentKindFilterShort FLibKind = "flibs" -showComponentKindFilterShort ExeKind = "exes" -showComponentKindFilterShort TestKind = "tests" -showComponentKindFilterShort BenchKind = "benchmarks" - - ------------------------------- --- Matching package targets --- - -guardPackage :: String -> FileStatus -> Match () -guardPackage str fstatus = - guardPackageName str - <|> guardPackageDir str fstatus - <|> guardPackageFile str fstatus - - -guardPackageName :: String -> Match () -guardPackageName s - | validPackageName s = increaseConfidence - | otherwise = matchErrorExpected "package name" s - -validPackageName :: String -> Bool -validPackageName s = - all validPackageNameChar s - && not (null s) - where - validPackageNameChar c = isAlphaNum c || c == '-' - - -guardPackageDir :: String -> FileStatus -> Match () -guardPackageDir _ (FileStatusExistsDir _) = increaseConfidence -guardPackageDir str _ = matchErrorExpected "package directory" str - - -guardPackageFile :: String -> FileStatus -> Match () -guardPackageFile _ (FileStatusExistsFile file) - | takeExtension file == ".cabal" - = increaseConfidence -guardPackageFile str _ = matchErrorExpected "package .cabal file" str - - -matchPackage :: [KnownPackage] -> String -> FileStatus -> Match KnownPackage -matchPackage pinfo = \str fstatus -> - orNoThingIn "project" "" $ - matchPackageName pinfo str - (matchPackageNameUnknown str - <|> matchPackageDir pinfo str fstatus - <|> matchPackageFile pinfo str fstatus) - - -matchPackageName :: [KnownPackage] -> String -> Match KnownPackage -matchPackageName ps = \str -> do - guard (validPackageName str) - orNoSuchThing "package" str - (map (display . knownPackageName) ps) $ - increaseConfidenceFor $ - matchInexactly caseFold (display . knownPackageName) ps str - - -matchPackageNameUnknown :: String -> Match KnownPackage -matchPackageNameUnknown str = do - pn <- matchParse str - unknownMatch (KnownPackageName pn) - - -matchPackageDir :: [KnownPackage] - -> String -> FileStatus -> Match KnownPackage -matchPackageDir ps = \str fstatus -> - case fstatus of - FileStatusExistsDir canondir -> - orNoSuchThing "package directory" str (map (snd . fst) dirs) $ - increaseConfidenceFor $ - fmap snd $ matchExactly (fst . fst) dirs canondir - _ -> mzero - where - dirs = [ ((dabs,drel),p) - | p@KnownPackage{ pinfoDirectory = Just (dabs,drel) } <- ps ] - - -matchPackageFile :: [KnownPackage] -> String -> FileStatus -> Match KnownPackage -matchPackageFile ps = \str fstatus -> do - case fstatus of - FileStatusExistsFile canonfile -> - orNoSuchThing "package .cabal file" str (map (snd . fst) files) $ - increaseConfidenceFor $ - fmap snd $ matchExactly (fst . fst) files canonfile - _ -> mzero - where - files = [ ((fabs,frel),p) - | p@KnownPackage{ pinfoPackageFile = Just (fabs,frel) } <- ps ] - ---TODO: test outcome when dir exists but doesn't match any known one - ---TODO: perhaps need another distinction, vs no such thing, point is the --- thing is not known, within the project, but could be outside project - - ------------------------------- --- Matching component targets --- - - -guardComponentName :: String -> Match () -guardComponentName s - | all validComponentChar s - && not (null s) = increaseConfidence - | otherwise = matchErrorExpected "component name" s - where - validComponentChar c = isAlphaNum c || c == '.' - || c == '_' || c == '-' || c == '\'' - - -matchComponentName :: [KnownComponent] -> String -> Match KnownComponent -matchComponentName cs str = - orNoSuchThing "component" str (map cinfoStrName cs) - $ increaseConfidenceFor - $ matchInexactly caseFold cinfoStrName cs str - - -matchComponentKindAndName :: [KnownComponent] -> ComponentKind -> String - -> Match KnownComponent -matchComponentKindAndName cs ckind str = - orNoSuchThing (showComponentKind ckind ++ " component") str - (map render cs) - $ increaseConfidenceFor - $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) - (\c -> (cinfoKind c, cinfoStrName c)) - cs - (ckind, str) - where - render c = showComponentKindShort (cinfoKind c) ++ ":" ++ cinfoStrName c - - ------------------------------- --- Matching module targets --- - -guardModuleName :: String -> Match () -guardModuleName s = - case simpleParse s :: Maybe ModuleName of - Just _ -> increaseConfidence - _ | all validModuleChar s - && not (null s) -> return () - | otherwise -> matchErrorExpected "module name" s - where - validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' - - -matchModuleName :: [ModuleName] -> String -> Match ModuleName -matchModuleName ms str = - orNoSuchThing "module" str (map display ms) - $ increaseConfidenceFor - $ matchInexactly caseFold display ms str - - -matchModuleNameAnd :: [(ModuleName, a)] -> String -> Match (ModuleName, a) -matchModuleNameAnd ms str = - orNoSuchThing "module" str (map (display . fst) ms) - $ increaseConfidenceFor - $ matchInexactly caseFold (display . fst) ms str - - -matchModuleNameUnknown :: String -> Match ModuleName -matchModuleNameUnknown str = - expecting "module" str - $ increaseConfidenceFor - $ matchParse str - - ------------------------------- --- Matching file targets --- - -matchPackageDirectoryPrefix :: [KnownPackage] -> FileStatus - -> Match (FilePath, KnownPackage) -matchPackageDirectoryPrefix ps (FileStatusExistsFile filepath) = - increaseConfidenceFor $ - matchDirectoryPrefix pkgdirs filepath - where - pkgdirs = [ (dir, p) - | p@KnownPackage { pinfoDirectory = Just (dir,_) } <- ps ] -matchPackageDirectoryPrefix _ _ = mzero - - -matchComponentFile :: [KnownComponent] -> String - -> Match (FilePath, KnownComponent) -matchComponentFile cs str = - orNoSuchThing "file" str [] $ - matchComponentModuleFile cs str - <|> matchComponentOtherFile cs str - - -matchComponentOtherFile :: [KnownComponent] -> String - -> Match (FilePath, KnownComponent) -matchComponentOtherFile cs = - matchFile - [ (file, c) - | c <- cs - , file <- cinfoHsFiles c - ++ cinfoCFiles c - ++ cinfoJsFiles c - ] - - -matchComponentModuleFile :: [KnownComponent] -> String - -> Match (FilePath, KnownComponent) -matchComponentModuleFile cs str = do - matchFile - [ (normalise (d toFilePath m), c) - | c <- cs - , d <- cinfoSrcDirs c - , m <- cinfoModules c - ] - (dropExtension (normalise str)) - --- utils - -matchFile :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) -matchFile fs = - increaseConfidenceFor - . matchInexactly caseFold fst fs - -matchDirectoryPrefix :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) -matchDirectoryPrefix dirs filepath = - tryEach $ - [ (file, x) - | (dir,x) <- dirs - , file <- maybeToList (stripDirectory dir) ] - where - stripDirectory :: FilePath -> Maybe FilePath - stripDirectory dir = - joinPath `fmap` stripPrefix (splitDirectories dir) filepathsplit - - filepathsplit = splitDirectories filepath - - ------------------------------- --- Matching monad --- - --- | A matcher embodies a way to match some input as being some recognised --- value. In particular it deals with multiple and ambiguous matches. --- --- There are various matcher primitives ('matchExactly', 'matchInexactly'), --- ways to combine matchers ('matchPlus', 'matchPlusShadowing') and finally we --- can run a matcher against an input using 'findMatch'. --- -data Match a = NoMatch !Confidence [MatchError] - | Match !MatchClass !Confidence [a] - deriving Show - --- | The kind of match, inexact or exact. We keep track of this so we can --- prefer exact over inexact matches. The 'Ord' here is important: we try --- to maximise this, so 'Exact' is the top value and 'Inexact' the bottom. --- -data MatchClass = Unknown -- ^ Matches an unknown thing e.g. parses as a package - -- name without it being a specific known package - | Inexact -- ^ Matches a known thing inexactly - -- e.g. matches a known package case insensitively - | Exact -- ^ Exactly matches a known thing, - -- e.g. matches a known package case sensitively - deriving (Show, Eq, Ord) - -type Confidence = Int - -data MatchError = MatchErrorExpected String String -- thing got - | MatchErrorNoSuch String String [String] -- thing got alts - | MatchErrorIn String String MatchError -- kind thing - deriving (Show, Eq) - - -instance Functor Match where - fmap _ (NoMatch d ms) = NoMatch d ms - fmap f (Match m d xs) = Match m d (fmap f xs) - -instance Applicative Match where - pure a = Match Exact 0 [a] - (<*>) = ap - -instance Alternative Match where - empty = NoMatch 0 [] - (<|>) = matchPlus - -instance Monad Match where - return = pure - NoMatch d ms >>= _ = NoMatch d ms - Match m d xs >>= f = - -- To understand this, it needs to be read in context with the - -- implementation of 'matchPlus' below - case msum (map f xs) of - Match m' d' xs' -> Match (min m m') (d + d') xs' - -- The minimum match class is the one we keep. The match depth is - -- tracked but not used in the Match case. - - NoMatch d' ms -> NoMatch (d + d') ms - -- Here is where we transfer the depth we were keeping track of in - -- the Match case over to the NoMatch case where it finally gets used. - -instance MonadPlus Match where - mzero = empty - mplus = matchPlus - -() :: Match a -> Match a -> Match a -() = matchPlusShadowing - -infixl 3 - --- | Combine two matchers. Exact matches are used over inexact matches --- but if we have multiple exact, or inexact then the we collect all the --- ambiguous matches. --- --- This operator is associative, has unit 'mzero' and is also commutative. --- -matchPlus :: Match a -> Match a -> Match a -matchPlus a@(Match _ _ _ ) (NoMatch _ _) = a -matchPlus (NoMatch _ _ ) b@(Match _ _ _) = b -matchPlus a@(NoMatch d_a ms_a) b@(NoMatch d_b ms_b) - | d_a > d_b = a -- We only really make use of the depth in the NoMatch case. - | d_a < d_b = b - | otherwise = NoMatch d_a (ms_a ++ ms_b) -matchPlus a@(Match m_a d_a xs_a) b@(Match m_b d_b xs_b) - | m_a > m_b = a -- exact over inexact - | m_a < m_b = b -- exact over inexact - | otherwise = Match m_a (max d_a d_b) (xs_a ++ xs_b) - --- | Combine two matchers. This is similar to 'matchPlus' with the --- difference that an exact match from the left matcher shadows any exact --- match on the right. Inexact matches are still collected however. --- --- This operator is associative, has unit 'mzero' and is not commutative. --- -matchPlusShadowing :: Match a -> Match a -> Match a -matchPlusShadowing a@(Match Exact _ _) _ = a -matchPlusShadowing a b = matchPlus a b - - ------------------------------- --- Various match primitives --- - -matchErrorExpected :: String -> String -> Match a -matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] - -matchErrorNoSuch :: String -> String -> [String] -> Match a -matchErrorNoSuch thing got alts = NoMatch 0 [MatchErrorNoSuch thing got alts] - -expecting :: String -> String -> Match a -> Match a -expecting thing got (NoMatch 0 _) = matchErrorExpected thing got -expecting _ _ m = m - -orNoSuchThing :: String -> String -> [String] -> Match a -> Match a -orNoSuchThing thing got alts (NoMatch 0 _) = matchErrorNoSuch thing got alts -orNoSuchThing _ _ _ m = m - -orNoThingIn :: String -> String -> Match a -> Match a -orNoThingIn kind name (NoMatch n ms) = - NoMatch n [ MatchErrorIn kind name m | m <- ms ] -orNoThingIn _ _ m = m - -increaseConfidence :: Match () -increaseConfidence = Match Exact 1 [()] - -increaseConfidenceFor :: Match a -> Match a -increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r - -nubMatchesBy :: (a -> a -> Bool) -> Match a -> Match a -nubMatchesBy _ (NoMatch d msgs) = NoMatch d msgs -nubMatchesBy eq (Match m d xs) = Match m d (nubBy eq xs) - --- | Lift a list of matches to an exact match. --- -exactMatches, inexactMatches :: [a] -> Match a - -exactMatches [] = mzero -exactMatches xs = Match Exact 0 xs - -inexactMatches [] = mzero -inexactMatches xs = Match Inexact 0 xs - -unknownMatch :: a -> Match a -unknownMatch x = Match Unknown 0 [x] - -tryEach :: [a] -> Match a -tryEach = exactMatches - - ------------------------------- --- Top level match runner --- - --- | Given a matcher and a key to look up, use the matcher to find all the --- possible matches. There may be 'None', a single 'Unambiguous' match or --- you may have an 'Ambiguous' match with several possibilities. --- -findMatch :: Match a -> MaybeAmbiguous a -findMatch match = case match of - NoMatch _ msgs -> None msgs - Match _ _ [x] -> Unambiguous x - Match m d [] -> error $ "findMatch: impossible: " ++ show match' - where match' = Match m d [] :: Match () - -- TODO: Maybe use Data.List.NonEmpty inside - -- Match so that this case would be correct - -- by construction? - Match m _ xs -> Ambiguous m xs - -data MaybeAmbiguous a = None [MatchError] - | Unambiguous a - | Ambiguous MatchClass [a] - deriving Show - - ------------------------------- --- Basic matchers --- - --- | A primitive matcher that looks up a value in a finite 'Map'. The --- value must match exactly. --- -matchExactly :: Ord k => (a -> k) -> [a] -> (k -> Match a) -matchExactly key xs = - \k -> case Map.lookup k m of - Nothing -> mzero - Just ys -> exactMatches ys - where - m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] - --- | A primitive matcher that looks up a value in a finite 'Map'. It checks --- for an exact or inexact match. We get an inexact match if the match --- is not exact, but the canonical forms match. It takes a canonicalisation --- function for this purpose. --- --- So for example if we used string case fold as the canonicalisation --- function, then we would get case insensitive matching (but it will still --- report an exact match when the case matches too). --- -matchInexactly :: (Ord k, Ord k') => (k -> k') -> (a -> k) - -> [a] -> (k -> Match a) -matchInexactly cannonicalise key xs = - \k -> case Map.lookup k m of - Just ys -> exactMatches ys - Nothing -> case Map.lookup (cannonicalise k) m' of - Just ys -> inexactMatches ys - Nothing -> mzero - where - m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] - - -- the map of canonicalised keys to groups of inexact matches - m' = Map.mapKeysWith (++) cannonicalise m - -matchParse :: Text a => String -> Match a -matchParse = maybe mzero return . simpleParse - - ------------------------------- --- Utils --- - -caseFold :: String -> String -caseFold = lowercase - --- | Make a 'ComponentName' given an 'UnqualComponentName' and knowing the --- 'ComponentKind'. We also need the 'PackageName' to distinguish the package's --- primary library from named private libraries. --- -mkComponentName :: PackageName - -> ComponentKind - -> UnqualComponentName - -> ComponentName -mkComponentName pkgname ckind ucname = - case ckind of - LibKind - | packageNameToUnqualComponentName pkgname == ucname - -> CLibName - | otherwise -> CSubLibName ucname - FLibKind -> CFLibName ucname - ExeKind -> CExeName ucname - TestKind -> CTestName ucname - BenchKind -> CBenchName ucname - - ------------------------------- --- Example inputs --- - -{- -ex1pinfo :: [KnownPackage] -ex1pinfo = - [ addComponent (CExeName (mkUnqualComponentName "foo-exe")) [] ["Data.Foo"] $ - KnownPackage { - pinfoId = PackageIdentifier (mkPackageName "foo") (mkVersion [1]), - pinfoDirectory = Just ("/the/foo", "foo"), - pinfoPackageFile = Just ("/the/foo/foo.cabal", "foo/foo.cabal"), - pinfoComponents = [] - } - , KnownPackage { - pinfoId = PackageIdentifier (mkPackageName "bar") (mkVersion [1]), - pinfoDirectory = Just ("/the/bar", "bar"), - pinfoPackageFile = Just ("/the/bar/bar.cabal", "bar/bar.cabal"), - pinfoComponents = [] - } - ] - where - addComponent n ds ms p = - p { - pinfoComponents = - KnownComponent n (componentStringName (pinfoId p) n) - p ds (map mkMn ms) - [] [] [] - : pinfoComponents p - } - - mkMn :: String -> ModuleName - mkMn = ModuleName.fromString --} -{- -stargets = - [ TargetComponent (CExeName "foo") WholeComponent - , TargetComponent (CExeName "foo") (ModuleTarget (mkMn "Foo")) - , TargetComponent (CExeName "tst") (ModuleTarget (mkMn "Foo")) - ] - where - mkMn :: String -> ModuleName - mkMn = fromJust . simpleParse - -ex_pkgid :: PackageIdentifier -Just ex_pkgid = simpleParse "thelib" --} - -{- -ex_cs :: [KnownComponent] -ex_cs = - [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) - , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) - ] - where - mkC n ds ms = KnownComponent n (componentStringName n) ds (map mkMn ms) - mkMn :: String -> ModuleName - mkMn = fromJust . simpleParse - pkgid :: PackageIdentifier - Just pkgid = simpleParse "thelib" --} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Targets.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Targets.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Targets.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Targets.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,781 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} - ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Targets --- Copyright : (c) Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : duncan@community.haskell.org --- --- Handling for user-specified targets ------------------------------------------------------------------------------ -module Distribution.Client.Targets ( - -- * User targets - UserTarget(..), - readUserTargets, - - -- * Resolving user targets to package specifiers - resolveUserTargets, - - -- ** Detailed interface - UserTargetProblem(..), - readUserTarget, - reportUserTargetProblems, - expandUserTarget, - - PackageTarget(..), - fetchPackageTarget, - readPackageTarget, - - PackageTargetProblem(..), - reportPackageTargetProblems, - - disambiguatePackageTargets, - disambiguatePackageName, - - -- * User constraints - UserQualifier(..), - UserConstraintScope(..), - UserConstraint(..), - userConstraintPackageName, - readUserConstraint, - userToPackageConstraint, - - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Package - ( Package(..), PackageName, unPackageName, mkPackageName - , PackageIdentifier(..), packageName, packageVersion ) -import Distribution.Types.Dependency -import Distribution.Client.Types - ( PackageLocation(..), ResolvedPkgLoc, UnresolvedSourcePackage - , PackageSpecifier(..) ) - -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackageConstraint -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.PackageIndex (PackageIndex) -import qualified Distribution.Solver.Types.PackageIndex as PackageIndex -import Distribution.Solver.Types.SourcePackage - -import qualified Distribution.Client.World as World -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Distribution.Client.Tar as Tar -import Distribution.Client.FetchUtils -import Distribution.Client.Utils ( tryFindPackageDesc ) -import Distribution.Client.GlobalFlags - ( RepoContext(..) ) - -import Distribution.PackageDescription - ( GenericPackageDescription, parseFlagAssignment, nullFlagAssignment ) -import Distribution.Version - ( nullVersion, thisVersion, anyVersion, isAnyVersion ) -import Distribution.Text - ( Text(..), display ) -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.Utils - ( die', warn, lowercase ) - -import Distribution.PackageDescription.Parsec - ( readGenericPackageDescription, parseGenericPackageDescriptionMaybe ) - --- import Data.List ( find, nub ) -import Data.Either - ( partitionEithers ) -import qualified Data.Map as Map -import qualified Data.ByteString.Lazy as BS -import qualified Distribution.Client.GZipUtils as GZipUtils -import Control.Monad (mapM) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.ReadP - ( (+++), (<++) ) -import Distribution.ParseUtils - ( readPToMaybe ) -import System.FilePath - ( takeExtension, dropExtension, takeDirectory, splitPath ) -import System.Directory - ( doesFileExist, doesDirectoryExist ) -import Network.URI - ( URI(..), URIAuth(..), parseAbsoluteURI ) - --- ------------------------------------------------------------ --- * User targets --- ------------------------------------------------------------ - --- | Various ways that a user may specify a package or package collection. --- -data UserTarget = - - -- | A partially specified package, identified by name and possibly with - -- an exact version or a version constraint. - -- - -- > cabal install foo - -- > cabal install foo-1.0 - -- > cabal install 'foo < 2' - -- - UserTargetNamed Dependency - - -- | A special virtual package that refers to the collection of packages - -- recorded in the world file that the user specifically installed. - -- - -- > cabal install world - -- - | UserTargetWorld - - -- | A specific package that is unpacked in a local directory, often the - -- current directory. - -- - -- > cabal install . - -- > cabal install ../lib/other - -- - -- * Note: in future, if multiple @.cabal@ files are allowed in a single - -- directory then this will refer to the collection of packages. - -- - | UserTargetLocalDir FilePath - - -- | A specific local unpacked package, identified by its @.cabal@ file. - -- - -- > cabal install foo.cabal - -- > cabal install ../lib/other/bar.cabal - -- - | UserTargetLocalCabalFile FilePath - - -- | A specific package that is available as a local tarball file - -- - -- > cabal install dist/foo-1.0.tar.gz - -- > cabal install ../build/baz-1.0.tar.gz - -- - | UserTargetLocalTarball FilePath - - -- | A specific package that is available as a remote tarball file - -- - -- > cabal install http://code.haskell.org/~user/foo/foo-0.9.tar.gz - -- - | UserTargetRemoteTarball URI - deriving (Show,Eq) - - --- ------------------------------------------------------------ --- * Parsing and checking user targets --- ------------------------------------------------------------ - -readUserTargets :: Verbosity -> [String] -> IO [UserTarget] -readUserTargets verbosity targetStrs = do - (problems, targets) <- liftM partitionEithers - (mapM readUserTarget targetStrs) - reportUserTargetProblems verbosity problems - return targets - - -data UserTargetProblem - = UserTargetUnexpectedFile String - | UserTargetNonexistantFile String - | UserTargetUnexpectedUriScheme String - | UserTargetUnrecognisedUri String - | UserTargetUnrecognised String - | UserTargetBadWorldPkg - deriving Show - -readUserTarget :: String -> IO (Either UserTargetProblem UserTarget) -readUserTarget targetstr = - case testNamedTargets targetstr of - Just (Dependency pkgn verrange) - | pkgn == mkPackageName "world" - -> return $ if verrange == anyVersion - then Right UserTargetWorld - else Left UserTargetBadWorldPkg - Just dep -> return (Right (UserTargetNamed dep)) - Nothing -> do - fileTarget <- testFileTargets targetstr - case fileTarget of - Just target -> return target - Nothing -> - case testUriTargets targetstr of - Just target -> return target - Nothing -> return (Left (UserTargetUnrecognised targetstr)) - where - testNamedTargets = readPToMaybe parseDependencyOrPackageId - - testFileTargets filename = do - isDir <- doesDirectoryExist filename - isFile <- doesFileExist filename - parentDirExists <- case takeDirectory filename of - [] -> return False - dir -> doesDirectoryExist dir - let result - | isDir - = Just (Right (UserTargetLocalDir filename)) - - | isFile && extensionIsTarGz filename - = Just (Right (UserTargetLocalTarball filename)) - - | isFile && takeExtension filename == ".cabal" - = Just (Right (UserTargetLocalCabalFile filename)) - - | isFile - = Just (Left (UserTargetUnexpectedFile filename)) - - | parentDirExists - = Just (Left (UserTargetNonexistantFile filename)) - - | otherwise - = Nothing - return result - - testUriTargets str = - case parseAbsoluteURI str of - Just uri@URI { - uriScheme = scheme, - uriAuthority = Just URIAuth { uriRegName = host } - } - | scheme /= "http:" && scheme /= "https:" -> - Just (Left (UserTargetUnexpectedUriScheme targetstr)) - - | null host -> - Just (Left (UserTargetUnrecognisedUri targetstr)) - - | otherwise -> - Just (Right (UserTargetRemoteTarball uri)) - _ -> Nothing - - extensionIsTarGz f = takeExtension f == ".gz" - && takeExtension (dropExtension f) == ".tar" - - parseDependencyOrPackageId :: Parse.ReadP r Dependency - parseDependencyOrPackageId = parse - +++ liftM pkgidToDependency parse - where - pkgidToDependency :: PackageIdentifier -> Dependency - pkgidToDependency p = case packageVersion p of - v | v == nullVersion -> Dependency (packageName p) anyVersion - | otherwise -> Dependency (packageName p) (thisVersion v) - - -reportUserTargetProblems :: Verbosity -> [UserTargetProblem] -> IO () -reportUserTargetProblems verbosity problems = do - case [ target | UserTargetUnrecognised target <- problems ] of - [] -> return () - target -> die' verbosity - $ unlines - [ "Unrecognised target '" ++ name ++ "'." - | name <- target ] - ++ "Targets can be:\n" - ++ " - package names, e.g. 'pkgname', 'pkgname-1.0.1', 'pkgname < 2.0'\n" - ++ " - the special 'world' target\n" - ++ " - cabal files 'pkgname.cabal' or package directories 'pkgname/'\n" - ++ " - package tarballs 'pkgname.tar.gz' or 'http://example.com/pkgname.tar.gz'" - - case [ () | UserTargetBadWorldPkg <- problems ] of - [] -> return () - _ -> die' verbosity "The special 'world' target does not take any version." - - case [ target | UserTargetNonexistantFile target <- problems ] of - [] -> return () - target -> die' verbosity - $ unlines - [ "The file does not exist '" ++ name ++ "'." - | name <- target ] - - case [ target | UserTargetUnexpectedFile target <- problems ] of - [] -> return () - target -> die' verbosity - $ unlines - [ "Unrecognised file target '" ++ name ++ "'." - | name <- target ] - ++ "File targets can be either package tarballs 'pkgname.tar.gz' " - ++ "or cabal files 'pkgname.cabal'." - - case [ target | UserTargetUnexpectedUriScheme target <- problems ] of - [] -> return () - target -> die' verbosity - $ unlines - [ "URL target not supported '" ++ name ++ "'." - | name <- target ] - ++ "Only 'http://' and 'https://' URLs are supported." - - case [ target | UserTargetUnrecognisedUri target <- problems ] of - [] -> return () - target -> die' verbosity - $ unlines - [ "Unrecognise URL target '" ++ name ++ "'." - | name <- target ] - - --- ------------------------------------------------------------ --- * Resolving user targets to package specifiers --- ------------------------------------------------------------ - --- | Given a bunch of user-specified targets, try to resolve what it is they --- refer to. They can either be specific packages (local dirs, tarballs etc) --- or they can be named packages (with or without version info). --- -resolveUserTargets :: Package pkg - => Verbosity - -> RepoContext - -> FilePath - -> PackageIndex pkg - -> [UserTarget] - -> IO [PackageSpecifier UnresolvedSourcePackage] -resolveUserTargets verbosity repoCtxt worldFile available userTargets = do - - -- given the user targets, get a list of fully or partially resolved - -- package references - packageTargets <- mapM (readPackageTarget verbosity) - =<< mapM (fetchPackageTarget verbosity repoCtxt) . concat - =<< mapM (expandUserTarget verbosity worldFile) userTargets - - -- users are allowed to give package names case-insensitively, so we must - -- disambiguate named package references - let (problems, packageSpecifiers) = - disambiguatePackageTargets available availableExtra packageTargets - - -- use any extra specific available packages to help us disambiguate - availableExtra = [ packageName pkg - | PackageTargetLocation pkg <- packageTargets ] - - reportPackageTargetProblems verbosity problems - - return packageSpecifiers - - --- ------------------------------------------------------------ --- * Package targets --- ------------------------------------------------------------ - --- | An intermediate between a 'UserTarget' and a resolved 'PackageSpecifier'. --- Unlike a 'UserTarget', a 'PackageTarget' refers only to a single package. --- -data PackageTarget pkg = - PackageTargetNamed PackageName [PackageProperty] UserTarget - - -- | A package identified by name, but case insensitively, so it needs - -- to be resolved to the right case-sensitive name. - | PackageTargetNamedFuzzy PackageName [PackageProperty] UserTarget - | PackageTargetLocation pkg - deriving (Show, Functor, Foldable, Traversable) - - --- ------------------------------------------------------------ --- * Converting user targets to package targets --- ------------------------------------------------------------ - --- | Given a user-specified target, expand it to a bunch of package targets --- (each of which refers to only one package). --- -expandUserTarget :: Verbosity - -> FilePath - -> UserTarget - -> IO [PackageTarget (PackageLocation ())] -expandUserTarget verbosity worldFile userTarget = case userTarget of - - UserTargetNamed (Dependency name vrange) -> - let props = [ PackagePropertyVersion vrange - | not (isAnyVersion vrange) ] - in return [PackageTargetNamedFuzzy name props userTarget] - - UserTargetWorld -> do - worldPkgs <- World.getContents verbosity worldFile - --TODO: should we warn if there are no world targets? - return [ PackageTargetNamed name props userTarget - | World.WorldPkgInfo (Dependency name vrange) flags <- worldPkgs - , let props = [ PackagePropertyVersion vrange - | not (isAnyVersion vrange) ] - ++ [ PackagePropertyFlags flags - | not (nullFlagAssignment flags) ] ] - - UserTargetLocalDir dir -> - return [ PackageTargetLocation (LocalUnpackedPackage dir) ] - - UserTargetLocalCabalFile file -> do - let dir = takeDirectory file - _ <- tryFindPackageDesc verbosity dir (localPackageError dir) -- just as a check - return [ PackageTargetLocation (LocalUnpackedPackage dir) ] - - UserTargetLocalTarball tarballFile -> - return [ PackageTargetLocation (LocalTarballPackage tarballFile) ] - - UserTargetRemoteTarball tarballURL -> - return [ PackageTargetLocation (RemoteTarballPackage tarballURL ()) ] - -localPackageError :: FilePath -> String -localPackageError dir = - "Error reading local package.\nCouldn't find .cabal file in: " ++ dir - --- ------------------------------------------------------------ --- * Fetching and reading package targets --- ------------------------------------------------------------ - - --- | Fetch any remote targets so that they can be read. --- -fetchPackageTarget :: Verbosity - -> RepoContext - -> PackageTarget (PackageLocation ()) - -> IO (PackageTarget ResolvedPkgLoc) -fetchPackageTarget verbosity repoCtxt = traverse $ - fetchPackage verbosity repoCtxt . fmap (const Nothing) - - --- | Given a package target that has been fetched, read the .cabal file. --- --- This only affects targets given by location, named targets are unaffected. --- -readPackageTarget :: Verbosity - -> PackageTarget ResolvedPkgLoc - -> IO (PackageTarget UnresolvedSourcePackage) -readPackageTarget verbosity = traverse modifyLocation - where - modifyLocation location = case location of - - LocalUnpackedPackage dir -> do - pkg <- tryFindPackageDesc verbosity dir (localPackageError dir) >>= - readGenericPackageDescription verbosity - return $ SourcePackage { - packageInfoId = packageId pkg, - packageDescription = pkg, - packageSource = fmap Just location, - packageDescrOverride = Nothing - } - - LocalTarballPackage tarballFile -> - readTarballPackageTarget location tarballFile tarballFile - - RemoteTarballPackage tarballURL tarballFile -> - readTarballPackageTarget location tarballFile (show tarballURL) - - RepoTarballPackage _repo _pkgid _ -> - error "TODO: readPackageTarget RepoTarballPackage" - -- For repo tarballs this info should be obtained from the index. - - RemoteSourceRepoPackage _srcRepo _ -> - error "TODO: readPackageTarget RemoteSourceRepoPackage" - -- This can't happen, because it would have errored out already - -- in fetchPackage, via fetchPackageTarget before it gets to this - -- function. - -- - -- When that is corrected, this will also need to be fixed. - - readTarballPackageTarget location tarballFile tarballOriginalLoc = do - (filename, content) <- extractTarballPackageCabalFile - tarballFile tarballOriginalLoc - case parsePackageDescription' content of - Nothing -> die' verbosity $ "Could not parse the cabal file " - ++ filename ++ " in " ++ tarballFile - Just pkg -> - return $ SourcePackage { - packageInfoId = packageId pkg, - packageDescription = pkg, - packageSource = fmap Just location, - packageDescrOverride = Nothing - } - - extractTarballPackageCabalFile :: FilePath -> String - -> IO (FilePath, BS.ByteString) - extractTarballPackageCabalFile tarballFile tarballOriginalLoc = - either (die' verbosity . formatErr) return - . check - . accumEntryMap - . Tar.filterEntries isCabalFile - . Tar.read - . GZipUtils.maybeDecompress - =<< BS.readFile tarballFile - where - formatErr msg = "Error reading " ++ tarballOriginalLoc ++ ": " ++ msg - - accumEntryMap = Tar.foldlEntries - (\m e -> Map.insert (Tar.entryTarPath e) e m) - Map.empty - - check (Left e) = Left (show e) - check (Right m) = case Map.elems m of - [] -> Left noCabalFile - [file] -> case Tar.entryContent file of - Tar.NormalFile content _ -> Right (Tar.entryPath file, content) - _ -> Left noCabalFile - _files -> Left multipleCabalFiles - where - noCabalFile = "No cabal file found" - multipleCabalFiles = "Multiple cabal files found" - - isCabalFile e = case splitPath (Tar.entryPath e) of - [ _dir, file] -> takeExtension file == ".cabal" - [".", _dir, file] -> takeExtension file == ".cabal" - _ -> False - - parsePackageDescription' :: BS.ByteString -> Maybe GenericPackageDescription - parsePackageDescription' bs = - parseGenericPackageDescriptionMaybe (BS.toStrict bs) - --- ------------------------------------------------------------ --- * Checking package targets --- ------------------------------------------------------------ - -data PackageTargetProblem - = PackageNameUnknown PackageName UserTarget - | PackageNameAmbiguous PackageName [PackageName] UserTarget - deriving Show - - --- | Users are allowed to give package names case-insensitively, so we must --- disambiguate named package references. --- -disambiguatePackageTargets :: Package pkg' - => PackageIndex pkg' - -> [PackageName] - -> [PackageTarget pkg] - -> ( [PackageTargetProblem] - , [PackageSpecifier pkg] ) -disambiguatePackageTargets availablePkgIndex availableExtra targets = - partitionEithers (map disambiguatePackageTarget targets) - where - disambiguatePackageTarget packageTarget = case packageTarget of - PackageTargetLocation pkg -> Right (SpecificSourcePackage pkg) - - PackageTargetNamed pkgname props userTarget - | null (PackageIndex.lookupPackageName availablePkgIndex pkgname) - -> Left (PackageNameUnknown pkgname userTarget) - | otherwise -> Right (NamedPackage pkgname props) - - PackageTargetNamedFuzzy pkgname props userTarget -> - case disambiguatePackageName packageNameEnv pkgname of - None -> Left (PackageNameUnknown - pkgname userTarget) - Ambiguous pkgnames -> Left (PackageNameAmbiguous - pkgname pkgnames userTarget) - Unambiguous pkgname' -> Right (NamedPackage pkgname' props) - - -- use any extra specific available packages to help us disambiguate - packageNameEnv :: PackageNameEnv - packageNameEnv = mappend (indexPackageNameEnv availablePkgIndex) - (extraPackageNameEnv availableExtra) - - --- | Report problems to the user. That is, if there are any problems --- then raise an exception. -reportPackageTargetProblems :: Verbosity - -> [PackageTargetProblem] -> IO () -reportPackageTargetProblems verbosity problems = do - case [ pkg | PackageNameUnknown pkg originalTarget <- problems - , not (isUserTagetWorld originalTarget) ] of - [] -> return () - pkgs -> die' verbosity $ unlines - [ "There is no package named '" ++ display name ++ "'. " - | name <- pkgs ] - ++ "You may need to run 'cabal update' to get the latest " - ++ "list of available packages." - - case [ (pkg, matches) | PackageNameAmbiguous pkg matches _ <- problems ] of - [] -> return () - ambiguities -> die' verbosity $ unlines - [ "There is no package named '" ++ display name ++ "'. " - ++ (if length matches > 1 - then "However, the following package names exist: " - else "However, the following package name exists: ") - ++ intercalate ", " [ "'" ++ display m ++ "'" | m <- matches] - ++ "." - | (name, matches) <- ambiguities ] - - case [ pkg | PackageNameUnknown pkg UserTargetWorld <- problems ] of - [] -> return () - pkgs -> warn verbosity $ - "The following 'world' packages will be ignored because " - ++ "they refer to packages that cannot be found: " - ++ intercalate ", " (map display pkgs) ++ "\n" - ++ "You can suppress this warning by correcting the world file." - where - isUserTagetWorld UserTargetWorld = True; isUserTagetWorld _ = False - - --- ------------------------------------------------------------ --- * Disambiguating package names --- ------------------------------------------------------------ - -data MaybeAmbiguous a = None | Unambiguous a | Ambiguous [a] - --- | Given a package name and a list of matching names, figure out --- which one it might be referring to. If there is an exact --- case-sensitive match then that's ok (i.e. returned via --- 'Unambiguous'). If it matches just one package case-insensitively --- or if it matches multiple packages case-insensitively, in that case --- the result is 'Ambiguous'. --- --- Note: Before cabal 2.2, when only a single package matched --- case-insensitively it would be considered 'Unambigious'. --- -disambiguatePackageName :: PackageNameEnv - -> PackageName - -> MaybeAmbiguous PackageName -disambiguatePackageName (PackageNameEnv pkgNameLookup) name = - case nub (pkgNameLookup name) of - [] -> None - names -> case find (name==) names of - Just name' -> Unambiguous name' - Nothing -> Ambiguous names - - -newtype PackageNameEnv = PackageNameEnv (PackageName -> [PackageName]) - -instance Monoid PackageNameEnv where - mempty = PackageNameEnv (const []) - mappend = (<>) - -instance Semigroup PackageNameEnv where - PackageNameEnv lookupA <> PackageNameEnv lookupB = - PackageNameEnv (\name -> lookupA name ++ lookupB name) - -indexPackageNameEnv :: PackageIndex pkg -> PackageNameEnv -indexPackageNameEnv pkgIndex = PackageNameEnv pkgNameLookup - where - pkgNameLookup pname = - map fst (PackageIndex.searchByName pkgIndex $ unPackageName pname) - -extraPackageNameEnv :: [PackageName] -> PackageNameEnv -extraPackageNameEnv names = PackageNameEnv pkgNameLookup - where - pkgNameLookup pname = - [ pname' - | let lname = lowercase (unPackageName pname) - , pname' <- names - , lowercase (unPackageName pname') == lname ] - - --- ------------------------------------------------------------ --- * Package constraints --- ------------------------------------------------------------ - --- | Version of 'Qualifier' that a user may specify on the --- command line. -data UserQualifier = - -- | Top-level dependency. - UserQualToplevel - - -- | Setup dependency. - | UserQualSetup PackageName - - -- | Executable dependency. - | UserQualExe PackageName PackageName - deriving (Eq, Show, Generic) - -instance Binary UserQualifier - --- | Version of 'ConstraintScope' that a user may specify on the --- command line. -data UserConstraintScope = - -- | Scope that applies to the package when it has the specified qualifier. - UserQualified UserQualifier PackageName - - -- | Scope that applies to the package when it has a setup qualifier. - | UserAnySetupQualifier PackageName - - -- | Scope that applies to the package when it has any qualifier. - | UserAnyQualifier PackageName - deriving (Eq, Show, Generic) - -instance Binary UserConstraintScope - -fromUserQualifier :: UserQualifier -> Qualifier -fromUserQualifier UserQualToplevel = QualToplevel -fromUserQualifier (UserQualSetup name) = QualSetup name -fromUserQualifier (UserQualExe name1 name2) = QualExe name1 name2 - -fromUserConstraintScope :: UserConstraintScope -> ConstraintScope -fromUserConstraintScope (UserQualified q pn) = - ScopeQualified (fromUserQualifier q) pn -fromUserConstraintScope (UserAnySetupQualifier pn) = ScopeAnySetupQualifier pn -fromUserConstraintScope (UserAnyQualifier pn) = ScopeAnyQualifier pn - --- | Version of 'PackageConstraint' that the user can specify on --- the command line. -data UserConstraint = - UserConstraint UserConstraintScope PackageProperty - deriving (Eq, Show, Generic) - -instance Binary UserConstraint - -userConstraintPackageName :: UserConstraint -> PackageName -userConstraintPackageName (UserConstraint scope _) = scopePN scope - where - scopePN (UserQualified _ pn) = pn - scopePN (UserAnyQualifier pn) = pn - scopePN (UserAnySetupQualifier pn) = pn - -userToPackageConstraint :: UserConstraint -> PackageConstraint -userToPackageConstraint (UserConstraint scope prop) = - PackageConstraint (fromUserConstraintScope scope) prop - -readUserConstraint :: String -> Either String UserConstraint -readUserConstraint str = - case readPToMaybe parse str of - Nothing -> Left msgCannotParse - Just c -> Right c - where - msgCannotParse = - "expected a (possibly qualified) package name followed by a " ++ - "constraint, which is either a version range, 'installed', " ++ - "'source', 'test', 'bench', or flags" - -instance Text UserConstraint where - disp (UserConstraint scope prop) = - dispPackageConstraint $ PackageConstraint (fromUserConstraintScope scope) prop - - parse = - let parseConstraintScope :: Parse.ReadP a UserConstraintScope - parseConstraintScope = - do - _ <- Parse.string "any." - pn <- parse - return (UserAnyQualifier pn) - +++ - do - _ <- Parse.string "setup." - pn <- parse - return (UserAnySetupQualifier pn) - +++ - do - -- Qualified name - pn <- parse - (return (UserQualified UserQualToplevel pn) - +++ - do _ <- Parse.string ":setup." - pn2 <- parse - return (UserQualified (UserQualSetup pn) pn2)) - - -- -- TODO: Re-enable parsing of UserQualExe once we decide on a syntax. - -- - -- +++ - -- do _ <- Parse.string ":" - -- pn2 <- parse - -- _ <- Parse.string ":exe." - -- pn3 <- parse - -- return (UserQualExe pn pn2, pn3) - in do - scope <- parseConstraintScope - - -- Package property - let keyword str x = Parse.skipSpaces1 >> Parse.string str >> return x - prop <- ((parse >>= return . PackagePropertyVersion) - +++ - keyword "installed" PackagePropertyInstalled - +++ - keyword "source" PackagePropertySource - +++ - keyword "test" (PackagePropertyStanzas [TestStanzas]) - +++ - keyword "bench" (PackagePropertyStanzas [BenchStanzas])) - -- Note: the parser is left-biased here so that we - -- don't get an ambiguous parse from 'installed', - -- 'source', etc. being regarded as flags. - <++ - (Parse.skipSpaces1 >> parseFlagAssignment - >>= return . PackagePropertyFlags) - - -- Result - return (UserConstraint scope prop) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Tar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Tar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Tar.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Tar.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,110 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Tar --- Copyright : (c) 2007 Bjorn Bringert, --- 2008 Andrea Vezzosi, --- 2008-2009 Duncan Coutts --- License : BSD3 --- --- Maintainer : duncan@community.haskell.org --- Portability : portable --- --- Reading, writing and manipulating \"@.tar@\" archive files. --- ------------------------------------------------------------------------------ -module Distribution.Client.Tar ( - -- * @tar.gz@ operations - createTarGzFile, - extractTarGzFile, - - -- * Other local utils - buildTreeRefTypeCode, - buildTreeSnapshotTypeCode, - isBuildTreeRefTypeCode, - filterEntries, - filterEntriesM, - entriesToList, - ) where - -import qualified Data.ByteString.Lazy as BS -import qualified Codec.Archive.Tar as Tar -import qualified Codec.Archive.Tar.Entry as Tar -import qualified Codec.Archive.Tar.Check as Tar -import qualified Codec.Compression.GZip as GZip -import qualified Distribution.Client.GZipUtils as GZipUtils - -import Control.Exception (Exception(..), throw) - --- --- * High level operations --- - -createTarGzFile :: FilePath -- ^ Full Tarball path - -> FilePath -- ^ Base directory - -> FilePath -- ^ Directory to archive, relative to base dir - -> IO () -createTarGzFile tar base dir = - BS.writeFile tar . GZip.compress . Tar.write =<< Tar.pack base [dir] - -extractTarGzFile :: FilePath -- ^ Destination directory - -> FilePath -- ^ Expected subdir (to check for tarbombs) - -> FilePath -- ^ Tarball - -> IO () -extractTarGzFile dir expected tar = - Tar.unpack dir . Tar.checkTarbomb expected . Tar.read - . GZipUtils.maybeDecompress =<< BS.readFile tar - -instance (Exception a, Exception b) => Exception (Either a b) where - toException (Left e) = toException e - toException (Right e) = toException e - - fromException e = - case fromException e of - Just e' -> Just (Left e') - Nothing -> case fromException e of - Just e' -> Just (Right e') - Nothing -> Nothing - - --- | Type code for the local build tree reference entry type. We don't use the --- symbolic link entry type because it allows only 100 ASCII characters for the --- path. -buildTreeRefTypeCode :: Tar.TypeCode -buildTreeRefTypeCode = 'C' - --- | Type code for the local build tree snapshot entry type. -buildTreeSnapshotTypeCode :: Tar.TypeCode -buildTreeSnapshotTypeCode = 'S' - --- | Is this a type code for a build tree reference? -isBuildTreeRefTypeCode :: Tar.TypeCode -> Bool -isBuildTreeRefTypeCode typeCode - | (typeCode == buildTreeRefTypeCode - || typeCode == buildTreeSnapshotTypeCode) = True - | otherwise = False - -filterEntries :: (Tar.Entry -> Bool) -> Tar.Entries e -> Tar.Entries e -filterEntries p = - Tar.foldEntries - (\e es -> if p e then Tar.Next e es else es) - Tar.Done - Tar.Fail - -filterEntriesM :: Monad m => (Tar.Entry -> m Bool) - -> Tar.Entries e -> m (Tar.Entries e) -filterEntriesM p = - Tar.foldEntries - (\entry rest -> do - keep <- p entry - xs <- rest - if keep - then return (Tar.Next entry xs) - else return xs) - (return Tar.Done) - (return . Tar.Fail) - -entriesToList :: Exception e => Tar.Entries e -> [Tar.Entry] -entriesToList = Tar.foldEntries (:) [] throw - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Types.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Types.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,591 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE TypeFamilies #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Types --- Copyright : (c) David Himmelstrup 2005 --- Duncan Coutts 2011 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Various common data types for the entire cabal-install system ------------------------------------------------------------------------------ -module Distribution.Client.Types where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Package - ( Package(..), HasMungedPackageId(..), HasUnitId(..) - , PackageIdentifier(..), packageVersion, packageName - , PackageInstalled(..), newSimpleUnitId ) -import Distribution.InstalledPackageInfo - ( InstalledPackageInfo, installedComponentId, sourceComponentName ) -import Distribution.PackageDescription - ( FlagAssignment ) -import Distribution.Version - ( VersionRange, nullVersion, thisVersion ) -import Distribution.Types.ComponentId - ( ComponentId ) -import Distribution.Types.MungedPackageId - ( computeCompatPackageId ) -import Distribution.Types.PackageId - ( PackageId ) -import Distribution.Types.AnnotatedId -import Distribution.Types.UnitId - ( UnitId ) -import Distribution.Types.PackageName - ( PackageName, mkPackageName ) -import Distribution.Types.ComponentName - ( ComponentName(..) ) -import Distribution.Types.SourceRepo - ( SourceRepo ) - -import Distribution.Solver.Types.PackageIndex - ( PackageIndex ) -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.ComponentDeps - ( ComponentDeps ) -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackageConstraint -import Distribution.Solver.Types.PackageFixedDeps -import Distribution.Solver.Types.SourcePackage -import Distribution.Compat.Graph (IsNode(..)) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.ParseUtils (parseOptCommaList) -import Distribution.Simple.Utils (ordNub) -import Distribution.Text (Text(..)) - -import Network.URI (URI(..), URIAuth(..), nullURI) -import Control.Exception - ( Exception, SomeException ) -import qualified Text.PrettyPrint as Disp - - -newtype Username = Username { unUsername :: String } -newtype Password = Password { unPassword :: String } - --- | This is the information we get from a @00-index.tar.gz@ hackage index. --- -data SourcePackageDb = SourcePackageDb { - packageIndex :: PackageIndex UnresolvedSourcePackage, - packagePreferences :: Map PackageName VersionRange -} - deriving (Eq, Generic) - -instance Binary SourcePackageDb - --- ------------------------------------------------------------ --- * Various kinds of information about packages --- ------------------------------------------------------------ - --- | Within Cabal the library we no longer have a @InstalledPackageId@ type. --- That's because it deals with the compilers' notion of a registered library, --- and those really are libraries not packages. Those are now named units. --- --- The package management layer does however deal with installed packages, as --- whole packages not just as libraries. So we do still need a type for --- installed package ids. At the moment however we track instaled packages via --- their primary library, which is a unit id. In future this may change --- slightly and we may distinguish these two types and have an explicit --- conversion when we register units with the compiler. --- -type InstalledPackageId = ComponentId - - --- | A 'ConfiguredPackage' is a not-yet-installed package along with the --- total configuration information. The configuration information is total in --- the sense that it provides all the configuration information and so the --- final configure process will be independent of the environment. --- --- 'ConfiguredPackage' is assumed to not support Backpack. Only the --- @new-build@ codepath supports Backpack. --- -data ConfiguredPackage loc = ConfiguredPackage { - confPkgId :: InstalledPackageId, - confPkgSource :: SourcePackage loc, -- package info, including repo - confPkgFlags :: FlagAssignment, -- complete flag assignment for the package - confPkgStanzas :: [OptionalStanza], -- list of enabled optional stanzas for the package - confPkgDeps :: ComponentDeps [ConfiguredId] - -- set of exact dependencies (installed or source). - -- These must be consistent with the 'buildDepends' - -- in the 'PackageDescription' that you'd get by - -- applying the flag assignment and optional stanzas. - } - deriving (Eq, Show, Generic) - --- | 'HasConfiguredId' indicates data types which have a 'ConfiguredId'. --- This type class is mostly used to conveniently finesse between --- 'ElaboratedPackage' and 'ElaboratedComponent'. --- -instance HasConfiguredId (ConfiguredPackage loc) where - configuredId pkg = ConfiguredId (packageId pkg) (Just CLibName) (confPkgId pkg) - --- 'ConfiguredPackage' is the legacy codepath, we are guaranteed --- to never have a nontrivial 'UnitId' -instance PackageFixedDeps (ConfiguredPackage loc) where - depends = fmap (map (newSimpleUnitId . confInstId)) . confPkgDeps - -instance IsNode (ConfiguredPackage loc) where - type Key (ConfiguredPackage loc) = UnitId - nodeKey = newSimpleUnitId . confPkgId - -- TODO: if we update ConfiguredPackage to support order-only - -- dependencies, need to include those here. - -- NB: have to deduplicate, otherwise the planner gets confused - nodeNeighbors = ordNub . CD.flatDeps . depends - -instance (Binary loc) => Binary (ConfiguredPackage loc) - - --- | A ConfiguredId is a package ID for a configured package. --- --- Once we configure a source package we know its UnitId. It is still --- however useful in lots of places to also know the source ID for the package. --- We therefore bundle the two. --- --- An already installed package of course is also "configured" (all its --- configuration parameters and dependencies have been specified). -data ConfiguredId = ConfiguredId { - confSrcId :: PackageId - , confCompName :: Maybe ComponentName - , confInstId :: ComponentId - } - deriving (Eq, Ord, Generic) - -annotatedIdToConfiguredId :: AnnotatedId ComponentId -> ConfiguredId -annotatedIdToConfiguredId aid = ConfiguredId { - confSrcId = ann_pid aid, - confCompName = Just (ann_cname aid), - confInstId = ann_id aid - } - -instance Binary ConfiguredId - -instance Show ConfiguredId where - show cid = show (confInstId cid) - -instance Package ConfiguredId where - packageId = confSrcId - -instance Package (ConfiguredPackage loc) where - packageId cpkg = packageId (confPkgSource cpkg) - -instance HasMungedPackageId (ConfiguredPackage loc) where - mungedId cpkg = computeCompatPackageId (packageId cpkg) Nothing - --- Never has nontrivial UnitId -instance HasUnitId (ConfiguredPackage loc) where - installedUnitId = newSimpleUnitId . confPkgId - -instance PackageInstalled (ConfiguredPackage loc) where - installedDepends = CD.flatDeps . depends - -class HasConfiguredId a where - configuredId :: a -> ConfiguredId - --- NB: This instance is slightly dangerous, in that you'll lose --- information about the specific UnitId you depended on. -instance HasConfiguredId InstalledPackageInfo where - configuredId ipkg = ConfiguredId (packageId ipkg) - (Just (sourceComponentName ipkg)) - (installedComponentId ipkg) - --- | Like 'ConfiguredPackage', but with all dependencies guaranteed to be --- installed already, hence itself ready to be installed. -newtype GenericReadyPackage srcpkg = ReadyPackage srcpkg -- see 'ConfiguredPackage'. - deriving (Eq, Show, Generic, Package, PackageFixedDeps, - HasMungedPackageId, HasUnitId, PackageInstalled, Binary) - --- Can't newtype derive this -instance IsNode srcpkg => IsNode (GenericReadyPackage srcpkg) where - type Key (GenericReadyPackage srcpkg) = Key srcpkg - nodeKey (ReadyPackage spkg) = nodeKey spkg - nodeNeighbors (ReadyPackage spkg) = nodeNeighbors spkg - -type ReadyPackage = GenericReadyPackage (ConfiguredPackage UnresolvedPkgLoc) - --- | Convenience alias for 'SourcePackage UnresolvedPkgLoc'. -type UnresolvedSourcePackage = SourcePackage UnresolvedPkgLoc - - --- ------------------------------------------------------------ --- * Package specifier --- ------------------------------------------------------------ - --- | A fully or partially resolved reference to a package. --- -data PackageSpecifier pkg = - - -- | A partially specified reference to a package (either source or - -- installed). It is specified by package name and optionally some - -- required properties. Use a dependency resolver to pick a specific - -- package satisfying these properties. - -- - NamedPackage PackageName [PackageProperty] - - -- | A fully specified source package. - -- - | SpecificSourcePackage pkg - deriving (Eq, Show, Functor, Generic) - -instance Binary pkg => Binary (PackageSpecifier pkg) - -pkgSpecifierTarget :: Package pkg => PackageSpecifier pkg -> PackageName -pkgSpecifierTarget (NamedPackage name _) = name -pkgSpecifierTarget (SpecificSourcePackage pkg) = packageName pkg - -pkgSpecifierConstraints :: Package pkg - => PackageSpecifier pkg -> [LabeledPackageConstraint] -pkgSpecifierConstraints (NamedPackage name props) = map toLpc props - where - toLpc prop = LabeledPackageConstraint - (PackageConstraint (scopeToplevel name) prop) - ConstraintSourceUserTarget -pkgSpecifierConstraints (SpecificSourcePackage pkg) = - [LabeledPackageConstraint pc ConstraintSourceUserTarget] - where - pc = PackageConstraint - (ScopeTarget $ packageName pkg) - (PackagePropertyVersion $ thisVersion (packageVersion pkg)) - - --- ------------------------------------------------------------ --- * Package locations and repositories --- ------------------------------------------------------------ - -type UnresolvedPkgLoc = PackageLocation (Maybe FilePath) - -type ResolvedPkgLoc = PackageLocation FilePath - -data PackageLocation local = - - -- | An unpacked package in the given dir, or current dir - LocalUnpackedPackage FilePath - - -- | A package as a tarball that's available as a local tarball - | LocalTarballPackage FilePath - - -- | A package as a tarball from a remote URI - | RemoteTarballPackage URI local - - -- | A package available as a tarball from a repository. - -- - -- It may be from a local repository or from a remote repository, with a - -- locally cached copy. ie a package available from hackage - | RepoTarballPackage Repo PackageId local - - -- | A package available from a version control system source repository - | RemoteSourceRepoPackage SourceRepo local - deriving (Show, Functor, Eq, Ord, Generic, Typeable) - -instance Binary local => Binary (PackageLocation local) - --- note, network-uri-2.6.0.3+ provide a Generic instance but earlier --- versions do not, so we use manual Binary instances here -instance Binary URI where - put (URI a b c d e) = do put a; put b; put c; put d; put e - get = do !a <- get; !b <- get; !c <- get; !d <- get; !e <- get - return (URI a b c d e) - -instance Binary URIAuth where - put (URIAuth a b c) = do put a; put b; put c - get = do !a <- get; !b <- get; !c <- get; return (URIAuth a b c) - -data RemoteRepo = - RemoteRepo { - remoteRepoName :: String, - remoteRepoURI :: URI, - - -- | Enable secure access? - -- - -- 'Nothing' here represents "whatever the default is"; this is important - -- to allow for a smooth transition from opt-in to opt-out security - -- (once we switch to opt-out, all access to the central Hackage - -- repository should be secure by default) - remoteRepoSecure :: Maybe Bool, - - -- | Root key IDs (for bootstrapping) - remoteRepoRootKeys :: [String], - - -- | Threshold for verification during bootstrapping - remoteRepoKeyThreshold :: Int, - - -- | Normally a repo just specifies an HTTP or HTTPS URI, but as a - -- special case we may know a repo supports both and want to try HTTPS - -- if we can, but still allow falling back to HTTP. - -- - -- This field is not currently stored in the config file, but is filled - -- in automagically for known repos. - remoteRepoShouldTryHttps :: Bool - } - - deriving (Show, Eq, Ord, Generic) - -instance Binary RemoteRepo - --- | Construct a partial 'RemoteRepo' value to fold the field parser list over. -emptyRemoteRepo :: String -> RemoteRepo -emptyRemoteRepo name = RemoteRepo name nullURI Nothing [] 0 False - --- | Different kinds of repositories --- --- NOTE: It is important that this type remains serializable. -data Repo = - -- | Local repositories - RepoLocal { - repoLocalDir :: FilePath - } - - -- | Standard (unsecured) remote repositores - | RepoRemote { - repoRemote :: RemoteRepo - , repoLocalDir :: FilePath - } - - -- | Secure repositories - -- - -- Although this contains the same fields as 'RepoRemote', we use a separate - -- constructor to avoid confusing the two. - -- - -- Not all access to a secure repo goes through the hackage-security - -- library currently; code paths that do not still make use of the - -- 'repoRemote' and 'repoLocalDir' fields directly. - | RepoSecure { - repoRemote :: RemoteRepo - , repoLocalDir :: FilePath - } - deriving (Show, Eq, Ord, Generic) - -instance Binary Repo - --- | Check if this is a remote repo -isRepoRemote :: Repo -> Bool -isRepoRemote RepoLocal{} = False -isRepoRemote _ = True - --- | Extract @RemoteRepo@ from @Repo@ if remote. -maybeRepoRemote :: Repo -> Maybe RemoteRepo -maybeRepoRemote (RepoLocal _localDir) = Nothing -maybeRepoRemote (RepoRemote r _localDir) = Just r -maybeRepoRemote (RepoSecure r _localDir) = Just r - --- ------------------------------------------------------------ --- * Build results --- ------------------------------------------------------------ - --- | A summary of the outcome for building a single package. --- -type BuildOutcome = Either BuildFailure BuildResult - --- | A summary of the outcome for building a whole set of packages. --- -type BuildOutcomes = Map UnitId BuildOutcome - -data BuildFailure = PlanningFailed - | DependentFailed PackageId - | DownloadFailed SomeException - | UnpackFailed SomeException - | ConfigureFailed SomeException - | BuildFailed SomeException - | TestsFailed SomeException - | InstallFailed SomeException - deriving (Show, Typeable, Generic) - -instance Exception BuildFailure - --- Note that the @Maybe InstalledPackageInfo@ is a slight hack: we only --- the public library's 'InstalledPackageInfo' is stored here, even if --- there were 'InstalledPackageInfo' from internal libraries. This --- 'InstalledPackageInfo' is not used anyway, so it makes no difference. -data BuildResult = BuildResult DocsResult TestsResult - (Maybe InstalledPackageInfo) - deriving (Show, Generic) - -data DocsResult = DocsNotTried | DocsFailed | DocsOk - deriving (Show, Generic, Typeable) -data TestsResult = TestsNotTried | TestsOk - deriving (Show, Generic, Typeable) - -instance Binary BuildFailure -instance Binary BuildResult -instance Binary DocsResult -instance Binary TestsResult - ---FIXME: this is a total cheat -instance Binary SomeException where - put _ = return () - get = fail "cannot serialise exceptions" - - --- ------------------------------------------------------------ --- * --allow-newer/--allow-older --- ------------------------------------------------------------ - --- TODO: When https://github.com/haskell/cabal/issues/4203 gets tackled, --- it may make sense to move these definitions to the Solver.Types --- module - --- | 'RelaxDeps' in the context of upper bounds (i.e. for @--allow-newer@ flag) -newtype AllowNewer = AllowNewer { unAllowNewer :: RelaxDeps } - deriving (Eq, Read, Show, Generic) - --- | 'RelaxDeps' in the context of lower bounds (i.e. for @--allow-older@ flag) -newtype AllowOlder = AllowOlder { unAllowOlder :: RelaxDeps } - deriving (Eq, Read, Show, Generic) - --- | Generic data type for policy when relaxing bounds in dependencies. --- Don't use this directly: use 'AllowOlder' or 'AllowNewer' depending --- on whether or not you are relaxing an lower or upper bound --- (respectively). -data RelaxDeps = - - -- | Ignore upper (resp. lower) bounds in some (or no) dependencies on the given packages. - -- - -- @RelaxDepsSome []@ is the default, i.e. honor the bounds in all - -- dependencies, never choose versions newer (resp. older) than allowed. - RelaxDepsSome [RelaxedDep] - - -- | Ignore upper (resp. lower) bounds in dependencies on all packages. - -- - -- __Note__: This is should be semantically equivalent to - -- - -- > RelaxDepsSome [RelaxedDep RelaxDepScopeAll RelaxDepModNone RelaxDepSubjectAll] - -- - -- (TODO: consider normalising 'RelaxDeps' and/or 'RelaxedDep') - | RelaxDepsAll - deriving (Eq, Read, Show, Generic) - --- | Dependencies can be relaxed either for all packages in the install plan, or --- only for some packages. -data RelaxedDep = RelaxedDep !RelaxDepScope !RelaxDepMod !RelaxDepSubject - deriving (Eq, Read, Show, Generic) - --- | Specify the scope of a relaxation, i.e. limit which depending --- packages are allowed to have their version constraints relaxed. -data RelaxDepScope = RelaxDepScopeAll - -- ^ Apply relaxation in any package - | RelaxDepScopePackage !PackageName - -- ^ Apply relaxation to in all versions of a package - | RelaxDepScopePackageId !PackageId - -- ^ Apply relaxation to a specific version of a package only - deriving (Eq, Read, Show, Generic) - --- | Modifier for dependency relaxation -data RelaxDepMod = RelaxDepModNone -- ^ Default semantics - | RelaxDepModCaret -- ^ Apply relaxation only to @^>=@ constraints - deriving (Eq, Read, Show, Generic) - --- | Express whether to relax bounds /on/ @all@ packages, or a single package -data RelaxDepSubject = RelaxDepSubjectAll - | RelaxDepSubjectPkg !PackageName - deriving (Eq, Ord, Read, Show, Generic) - -instance Text RelaxedDep where - disp (RelaxedDep scope rdmod subj) = case scope of - RelaxDepScopeAll -> Disp.text "all:" Disp.<> modDep - RelaxDepScopePackage p0 -> disp p0 Disp.<> Disp.colon Disp.<> modDep - RelaxDepScopePackageId p0 -> disp p0 Disp.<> Disp.colon Disp.<> modDep - where - modDep = case rdmod of - RelaxDepModNone -> disp subj - RelaxDepModCaret -> Disp.char '^' Disp.<> disp subj - - parse = RelaxedDep <$> scopeP <*> modP <*> parse - where - -- "greedy" choices - scopeP = (pure RelaxDepScopeAll <* Parse.char '*' <* Parse.char ':') - Parse.<++ (pure RelaxDepScopeAll <* Parse.string "all:") - Parse.<++ (RelaxDepScopePackageId <$> pidP <* Parse.char ':') - Parse.<++ (RelaxDepScopePackage <$> parse <* Parse.char ':') - Parse.<++ (pure RelaxDepScopeAll) - - modP = (pure RelaxDepModCaret <* Parse.char '^') - Parse.<++ (pure RelaxDepModNone) - - -- | Stricter 'PackageId' parser which doesn't overlap with 'PackageName' parser - pidP = do - p0 <- parse - when (pkgVersion p0 == nullVersion) Parse.pfail - pure p0 - -instance Text RelaxDepSubject where - disp RelaxDepSubjectAll = Disp.text "all" - disp (RelaxDepSubjectPkg pn) = disp pn - - parse = (pure RelaxDepSubjectAll <* Parse.char '*') Parse.<++ pkgn - where - pkgn = do - pn <- parse - pure (if (pn == mkPackageName "all") - then RelaxDepSubjectAll - else RelaxDepSubjectPkg pn) - -instance Text RelaxDeps where - disp rd | not (isRelaxDeps rd) = Disp.text "none" - disp (RelaxDepsSome pkgs) = Disp.fsep . - Disp.punctuate Disp.comma . - map disp $ pkgs - disp RelaxDepsAll = Disp.text "all" - - parse = (const mempty <$> ((Parse.string "none" Parse.+++ - Parse.string "None") <* Parse.eof)) - Parse.<++ (const RelaxDepsAll <$> ((Parse.string "all" Parse.+++ - Parse.string "All" Parse.+++ - Parse.string "*") <* Parse.eof)) - Parse.<++ ( RelaxDepsSome <$> parseOptCommaList parse) - -instance Binary RelaxDeps -instance Binary RelaxDepMod -instance Binary RelaxDepScope -instance Binary RelaxDepSubject -instance Binary RelaxedDep -instance Binary AllowNewer -instance Binary AllowOlder - --- | Return 'True' if 'RelaxDeps' specifies a non-empty set of relaxations --- --- Equivalent to @isRelaxDeps = (/= 'mempty')@ -isRelaxDeps :: RelaxDeps -> Bool -isRelaxDeps (RelaxDepsSome []) = False -isRelaxDeps (RelaxDepsSome (_:_)) = True -isRelaxDeps RelaxDepsAll = True - --- | 'RelaxDepsAll' is the /absorbing element/ -instance Semigroup RelaxDeps where - -- identity element - RelaxDepsSome [] <> r = r - l@(RelaxDepsSome _) <> RelaxDepsSome [] = l - -- absorbing element - l@RelaxDepsAll <> _ = l - (RelaxDepsSome _) <> r@RelaxDepsAll = r - -- combining non-{identity,absorbing} elements - (RelaxDepsSome a) <> (RelaxDepsSome b) = RelaxDepsSome (a ++ b) - --- | @'RelaxDepsSome' []@ is the /identity element/ -instance Monoid RelaxDeps where - mempty = RelaxDepsSome [] - mappend = (<>) - -instance Semigroup AllowNewer where - AllowNewer x <> AllowNewer y = AllowNewer (x <> y) - -instance Semigroup AllowOlder where - AllowOlder x <> AllowOlder y = AllowOlder (x <> y) - -instance Monoid AllowNewer where - mempty = AllowNewer mempty - mappend = (<>) - -instance Monoid AllowOlder where - mempty = AllowOlder mempty - mappend = (<>) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Update.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Update.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Update.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Update.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,110 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Update --- Copyright : (c) David Himmelstrup 2005 --- License : BSD-like --- --- Maintainer : lemmih@gmail.com --- Stability : provisional --- Portability : portable --- --- ------------------------------------------------------------------------------ -{-# LANGUAGE RecordWildCards #-} -module Distribution.Client.Update - ( update - ) where - -import Distribution.Simple.Setup - ( fromFlag ) -import Distribution.Client.Compat.Directory - ( setModificationTime ) -import Distribution.Client.Types - ( Repo(..), RemoteRepo(..), maybeRepoRemote ) -import Distribution.Client.HttpUtils - ( DownloadResult(..) ) -import Distribution.Client.FetchUtils - ( downloadIndex ) -import Distribution.Client.IndexUtils.Timestamp -import Distribution.Client.IndexUtils - ( updateRepoIndexCache, Index(..), writeIndexTimestamp - , currentIndexTimestamp, indexBaseName ) -import Distribution.Client.JobControl - ( newParallelJobControl, spawnJob, collectJob ) -import Distribution.Client.Setup - ( RepoContext(..), UpdateFlags(..) ) -import Distribution.Text - ( display ) -import Distribution.Verbosity - -import Distribution.Simple.Utils - ( writeFileAtomic, warn, notice, noticeNoWrap ) - -import qualified Data.ByteString.Lazy as BS -import Distribution.Client.GZipUtils (maybeDecompress) -import System.FilePath ((<.>), dropExtension) -import Data.Maybe (mapMaybe) -import Data.Time (getCurrentTime) -import Control.Monad - -import qualified Hackage.Security.Client as Sec - --- | 'update' downloads the package list from all known servers -update :: Verbosity -> UpdateFlags -> RepoContext -> IO () -update verbosity _ repoCtxt | null (repoContextRepos repoCtxt) = do - warn verbosity $ "No remote package servers have been specified. Usually " - ++ "you would have one specified in the config file." -update verbosity updateFlags repoCtxt = do - let repos = repoContextRepos repoCtxt - remoteRepos = mapMaybe maybeRepoRemote repos - case remoteRepos of - [] -> return () - [remoteRepo] -> - notice verbosity $ "Downloading the latest package list from " - ++ remoteRepoName remoteRepo - _ -> notice verbosity . unlines - $ "Downloading the latest package lists from: " - : map (("- " ++) . remoteRepoName) remoteRepos - jobCtrl <- newParallelJobControl (length repos) - mapM_ (spawnJob jobCtrl . updateRepo verbosity updateFlags repoCtxt) repos - mapM_ (\_ -> collectJob jobCtrl) repos - -updateRepo :: Verbosity -> UpdateFlags -> RepoContext -> Repo -> IO () -updateRepo verbosity updateFlags repoCtxt repo = do - transport <- repoContextGetTransport repoCtxt - case repo of - RepoLocal{..} -> return () - RepoRemote{..} -> do - downloadResult <- downloadIndex transport verbosity repoRemote repoLocalDir - case downloadResult of - FileAlreadyInCache -> - setModificationTime (indexBaseName repo <.> "tar") =<< getCurrentTime - FileDownloaded indexPath -> do - writeFileAtomic (dropExtension indexPath) . maybeDecompress - =<< BS.readFile indexPath - updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) - RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> do - let index = RepoIndex repoCtxt repo - -- NB: This may be a nullTimestamp if we've never updated before - current_ts <- currentIndexTimestamp (lessVerbose verbosity) repoCtxt repo - -- NB: always update the timestamp, even if we didn't actually - -- download anything - writeIndexTimestamp index (fromFlag (updateIndexState updateFlags)) - ce <- if repoContextIgnoreExpiry repoCtxt - then Just `fmap` getCurrentTime - else return Nothing - updated <- Sec.uncheckClientErrors $ Sec.checkForUpdates repoSecure ce - -- Update cabal's internal index as well so that it's not out of sync - -- (If all access to the cache goes through hackage-security this can go) - case updated of - Sec.NoUpdates -> - setModificationTime (indexBaseName repo <.> "tar") =<< getCurrentTime - Sec.HasUpdates -> - updateRepoIndexCache verbosity index - -- TODO: This will print multiple times if there are multiple - -- repositories: main problem is we don't have a way of updating - -- a specific repo. Once we implement that, update this. - when (current_ts /= nullTimestamp) $ - noticeNoWrap verbosity $ - "To revert to previous state run:\n" ++ - " cabal update --index-state='" ++ display current_ts ++ "'\n" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Upload.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Upload.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Upload.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Upload.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,222 +0,0 @@ -module Distribution.Client.Upload (upload, uploadDoc, report) where - -import Distribution.Client.Types ( Username(..), Password(..) - , RemoteRepo(..), maybeRepoRemote ) -import Distribution.Client.HttpUtils - ( HttpTransport(..), remoteRepoTryUpgradeToHttps ) -import Distribution.Client.Setup - ( IsCandidate(..), RepoContext(..) ) - -import Distribution.Simple.Utils (notice, warn, info, die') -import Distribution.Verbosity (Verbosity) -import Distribution.Text (display) -import Distribution.Client.Config - -import qualified Distribution.Client.BuildReports.Anonymous as BuildReport -import qualified Distribution.Client.BuildReports.Upload as BuildReport - -import Network.URI (URI(uriPath)) -import Network.HTTP (Header(..), HeaderName(..)) - -import System.IO (hFlush, stdout) -import System.IO.Echo (withoutInputEcho) -import System.Exit (exitFailure) -import System.FilePath ((), takeExtension, takeFileName, dropExtension) -import qualified System.FilePath.Posix as FilePath.Posix (()) -import System.Directory -import Control.Monad (forM_, when, foldM) -import Data.Maybe (mapMaybe) -import Data.Char (isSpace) - -type Auth = Maybe (String, String) - --- > stripExtensions ["tar", "gz"] "foo.tar.gz" --- Just "foo" --- > stripExtensions ["tar", "gz"] "foo.gz.tar" --- Nothing -stripExtensions :: [String] -> FilePath -> Maybe String -stripExtensions exts path = foldM f path (reverse exts) - where - f p e - | takeExtension p == '.':e = Just (dropExtension p) - | otherwise = Nothing - -upload :: Verbosity -> RepoContext - -> Maybe Username -> Maybe Password -> IsCandidate -> [FilePath] - -> IO () -upload verbosity repoCtxt mUsername mPassword isCandidate paths = do - let repos = repoContextRepos repoCtxt - transport <- repoContextGetTransport repoCtxt - targetRepo <- - case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of - [] -> die' verbosity "Cannot upload. No remote repositories are configured." - rs -> remoteRepoTryUpgradeToHttps verbosity transport (last rs) - let targetRepoURI = remoteRepoURI targetRepo - rootIfEmpty x = if null x then "/" else x - uploadURI = targetRepoURI { - uriPath = rootIfEmpty (uriPath targetRepoURI) FilePath.Posix. - case isCandidate of - IsCandidate -> "packages/candidates" - IsPublished -> "upload" - } - packageURI pkgid = targetRepoURI { - uriPath = rootIfEmpty (uriPath targetRepoURI) - FilePath.Posix. concat - [ "package/", pkgid - , case isCandidate of - IsCandidate -> "/candidate" - IsPublished -> "" - ] - } - Username username <- maybe promptUsername return mUsername - Password password <- maybe promptPassword return mPassword - let auth = Just (username,password) - forM_ paths $ \path -> do - notice verbosity $ "Uploading " ++ path ++ "... " - case fmap takeFileName (stripExtensions ["tar", "gz"] path) of - Just pkgid -> handlePackage transport verbosity uploadURI - (packageURI pkgid) auth isCandidate path - -- This case shouldn't really happen, since we check in Main that we - -- only pass tar.gz files to upload. - Nothing -> die' verbosity $ "Not a tar.gz file: " ++ path - -uploadDoc :: Verbosity -> RepoContext - -> Maybe Username -> Maybe Password -> IsCandidate -> FilePath - -> IO () -uploadDoc verbosity repoCtxt mUsername mPassword isCandidate path = do - let repos = repoContextRepos repoCtxt - transport <- repoContextGetTransport repoCtxt - targetRepo <- - case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of - [] -> die' verbosity $ "Cannot upload. No remote repositories are configured." - rs -> remoteRepoTryUpgradeToHttps verbosity transport (last rs) - let targetRepoURI = remoteRepoURI targetRepo - rootIfEmpty x = if null x then "/" else x - uploadURI = targetRepoURI { - uriPath = rootIfEmpty (uriPath targetRepoURI) - FilePath.Posix. concat - [ "package/", pkgid - , case isCandidate of - IsCandidate -> "/candidate" - IsPublished -> "" - , "/docs" - ] - } - packageUri = targetRepoURI { - uriPath = rootIfEmpty (uriPath targetRepoURI) - FilePath.Posix. concat - [ "package/", pkgid - , case isCandidate of - IsCandidate -> "/candidate" - IsPublished -> "" - ] - } - (reverseSuffix, reversePkgid) = break (== '-') - (reverse (takeFileName path)) - pkgid = reverse $ tail reversePkgid - when (reverse reverseSuffix /= "docs.tar.gz" - || null reversePkgid || head reversePkgid /= '-') $ - die' verbosity "Expected a file name matching the pattern -docs.tar.gz" - Username username <- maybe promptUsername return mUsername - Password password <- maybe promptPassword return mPassword - - let auth = Just (username,password) - headers = - [ Header HdrContentType "application/x-tar" - , Header HdrContentEncoding "gzip" - ] - notice verbosity $ "Uploading documentation " ++ path ++ "... " - resp <- putHttpFile transport verbosity uploadURI path auth headers - case resp of - -- Hackage responds with 204 No Content when docs are uploaded - -- successfully. - (code,_) | code `elem` [200,204] -> do - notice verbosity $ okMessage packageUri - (code,err) -> do - notice verbosity $ "Error uploading documentation " - ++ path ++ ": " - ++ "http code " ++ show code ++ "\n" - ++ err - exitFailure - where - okMessage packageUri = case isCandidate of - IsCandidate -> - "Documentation successfully uploaded for package candidate. " - ++ "You can now preview the result at '" ++ show packageUri - ++ "'. To upload non-candidate documentation, use 'cabal upload --publish'." - IsPublished -> - "Package documentation successfully published. You can now view it at '" - ++ show packageUri ++ "'." - - -promptUsername :: IO Username -promptUsername = do - putStr "Hackage username: " - hFlush stdout - fmap Username getLine - -promptPassword :: IO Password -promptPassword = do - putStr "Hackage password: " - hFlush stdout - -- save/restore the terminal echoing status (no echoing for entering the password) - passwd <- withoutInputEcho $ fmap Password getLine - putStrLn "" - return passwd - -report :: Verbosity -> RepoContext -> Maybe Username -> Maybe Password -> IO () -report verbosity repoCtxt mUsername mPassword = do - Username username <- maybe promptUsername return mUsername - Password password <- maybe promptPassword return mPassword - let auth = (username, password) - repos = repoContextRepos repoCtxt - remoteRepos = mapMaybe maybeRepoRemote repos - forM_ remoteRepos $ \remoteRepo -> - do dotCabal <- getCabalDir - let srcDir = dotCabal "reports" remoteRepoName remoteRepo - -- We don't want to bomb out just because we haven't built any packages - -- from this repo yet. - srcExists <- doesDirectoryExist srcDir - when srcExists $ do - contents <- getDirectoryContents srcDir - forM_ (filter (\c -> takeExtension c ==".log") contents) $ \logFile -> - do inp <- readFile (srcDir logFile) - let (reportStr, buildLog) = read inp :: (String,String) -- TODO: eradicateNoParse - case BuildReport.parse reportStr of - Left errs -> warn verbosity $ "Errors: " ++ errs -- FIXME - Right report' -> - do info verbosity $ "Uploading report for " - ++ display (BuildReport.package report') - BuildReport.uploadReports verbosity repoCtxt auth - (remoteRepoURI remoteRepo) [(report', Just buildLog)] - return () - -handlePackage :: HttpTransport -> Verbosity -> URI -> URI -> Auth - -> IsCandidate -> FilePath -> IO () -handlePackage transport verbosity uri packageUri auth isCandidate path = - do resp <- postHttpFile transport verbosity uri path auth - case resp of - (code,warnings) | code `elem` [200, 204] -> - notice verbosity $ okMessage isCandidate ++ - if null warnings then "" else "\n" ++ formatWarnings (trim warnings) - (code,err) -> do - notice verbosity $ "Error uploading " ++ path ++ ": " - ++ "http code " ++ show code ++ "\n" - ++ err - exitFailure - where - okMessage IsCandidate = - "Package successfully uploaded as candidate. " - ++ "You can now preview the result at '" ++ show packageUri - ++ "'. To publish the candidate, use 'cabal upload --publish'." - okMessage IsPublished = - "Package successfully published. You can now view it at '" - ++ show packageUri ++ "'." - -formatWarnings :: String -> String -formatWarnings x = "Warnings:\n" ++ (unlines . map ("- " ++) . lines) x - --- Trim -trim :: String -> String -trim = f . f - where f = reverse . dropWhile isSpace diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Utils/Assertion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Utils/Assertion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Utils/Assertion.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Utils/Assertion.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -{-# LANGUAGE CPP #-} -module Distribution.Client.Utils.Assertion (expensiveAssert) where - -#ifdef DEBUG_EXPENSIVE_ASSERTIONS -import Control.Exception (assert) -import Distribution.Compat.Stack -#endif - --- | Like 'assert', but only enabled with -fdebug-expensive-assertions. This --- function can be used for expensive assertions that should only be turned on --- during testing or debugging. -#ifdef DEBUG_EXPENSIVE_ASSERTIONS -expensiveAssert :: WithCallStack (Bool -> a -> a) -expensiveAssert = assert -#else -expensiveAssert :: Bool -> a -> a -expensiveAssert _ = id -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Utils/Json.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Utils/Json.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Utils/Json.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Utils/Json.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,225 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - --- | Minimal JSON / RFC 7159 support --- --- The API is heavily inspired by @aeson@'s API but puts emphasis on --- simplicity rather than performance. The 'ToJSON' instances are --- intended to have an encoding compatible with @aeson@'s encoding. --- -module Distribution.Client.Utils.Json - ( Value(..) - , Object, object, Pair, (.=) - , encodeToString - , encodeToBuilder - , ToJSON(toJSON) - ) - where - -import Data.Char -import Data.Int -import Data.String -import Data.Word -import Data.List -import Data.Monoid - -import Data.ByteString.Builder (Builder) -import qualified Data.ByteString.Builder as BB - --- TODO: We may want to replace 'String' with 'Text' or 'ByteString' - --- | A JSON value represented as a Haskell value. -data Value = Object !Object - | Array [Value] - | String String - | Number !Double - | Bool !Bool - | Null - deriving (Eq, Read, Show) - --- | A key\/value pair for an 'Object' -type Pair = (String, Value) - --- | A JSON \"object\" (key/value map). -type Object = [Pair] - -infixr 8 .= - --- | A key-value pair for encoding a JSON object. -(.=) :: ToJSON v => String -> v -> Pair -k .= v = (k, toJSON v) - --- | Create a 'Value' from a list of name\/value 'Pair's. -object :: [Pair] -> Value -object = Object - -instance IsString Value where - fromString = String - - --- | A type that can be converted to JSON. -class ToJSON a where - -- | Convert a Haskell value to a JSON-friendly intermediate type. - toJSON :: a -> Value - -instance ToJSON () where - toJSON () = Array [] - -instance ToJSON Value where - toJSON = id - -instance ToJSON Bool where - toJSON = Bool - -instance ToJSON a => ToJSON [a] where - toJSON = Array . map toJSON - -instance ToJSON a => ToJSON (Maybe a) where - toJSON Nothing = Null - toJSON (Just a) = toJSON a - -instance (ToJSON a,ToJSON b) => ToJSON (a,b) where - toJSON (a,b) = Array [toJSON a, toJSON b] - -instance (ToJSON a,ToJSON b,ToJSON c) => ToJSON (a,b,c) where - toJSON (a,b,c) = Array [toJSON a, toJSON b, toJSON c] - -instance (ToJSON a,ToJSON b,ToJSON c, ToJSON d) => ToJSON (a,b,c,d) where - toJSON (a,b,c,d) = Array [toJSON a, toJSON b, toJSON c, toJSON d] - -instance ToJSON Float where - toJSON = Number . realToFrac - -instance ToJSON Double where - toJSON = Number - -instance ToJSON Int where toJSON = Number . realToFrac -instance ToJSON Int8 where toJSON = Number . realToFrac -instance ToJSON Int16 where toJSON = Number . realToFrac -instance ToJSON Int32 where toJSON = Number . realToFrac - -instance ToJSON Word where toJSON = Number . realToFrac -instance ToJSON Word8 where toJSON = Number . realToFrac -instance ToJSON Word16 where toJSON = Number . realToFrac -instance ToJSON Word32 where toJSON = Number . realToFrac - --- | Possibly lossy due to conversion to 'Double' -instance ToJSON Int64 where toJSON = Number . realToFrac - --- | Possibly lossy due to conversion to 'Double' -instance ToJSON Word64 where toJSON = Number . realToFrac - --- | Possibly lossy due to conversion to 'Double' -instance ToJSON Integer where toJSON = Number . fromInteger - ------------------------------------------------------------------------------- --- 'BB.Builder'-based encoding - --- | Serialise value as JSON/UTF8-encoded 'Builder' -encodeToBuilder :: ToJSON a => a -> Builder -encodeToBuilder = encodeValueBB . toJSON - -encodeValueBB :: Value -> Builder -encodeValueBB jv = case jv of - Bool True -> "true" - Bool False -> "false" - Null -> "null" - Number n - | isNaN n || isInfinite n -> encodeValueBB Null - | Just i <- doubleToInt64 n -> BB.int64Dec i - | otherwise -> BB.doubleDec n - Array a -> encodeArrayBB a - String s -> encodeStringBB s - Object o -> encodeObjectBB o - -encodeArrayBB :: [Value] -> Builder -encodeArrayBB [] = "[]" -encodeArrayBB jvs = BB.char8 '[' <> go jvs <> BB.char8 ']' - where - go = Data.Monoid.mconcat . intersperse (BB.char8 ',') . map encodeValueBB - -encodeObjectBB :: Object -> Builder -encodeObjectBB [] = "{}" -encodeObjectBB jvs = BB.char8 '{' <> go jvs <> BB.char8 '}' - where - go = Data.Monoid.mconcat . intersperse (BB.char8 ',') . map encPair - encPair (l,x) = encodeStringBB l <> BB.char8 ':' <> encodeValueBB x - -encodeStringBB :: String -> Builder -encodeStringBB str = BB.char8 '"' <> go str <> BB.char8 '"' - where - go = BB.stringUtf8 . escapeString - ------------------------------------------------------------------------------- --- 'String'-based encoding - --- | Serialise value as JSON-encoded Unicode 'String' -encodeToString :: ToJSON a => a -> String -encodeToString jv = encodeValue (toJSON jv) [] - -encodeValue :: Value -> ShowS -encodeValue jv = case jv of - Bool b -> showString (if b then "true" else "false") - Null -> showString "null" - Number n - | isNaN n || isInfinite n -> encodeValue Null - | Just i <- doubleToInt64 n -> shows i - | otherwise -> shows n - Array a -> encodeArray a - String s -> encodeString s - Object o -> encodeObject o - -encodeArray :: [Value] -> ShowS -encodeArray [] = showString "[]" -encodeArray jvs = ('[':) . go jvs . (']':) - where - go [] = id - go [x] = encodeValue x - go (x:xs) = encodeValue x . (',':) . go xs - -encodeObject :: Object -> ShowS -encodeObject [] = showString "{}" -encodeObject jvs = ('{':) . go jvs . ('}':) - where - go [] = id - go [(l,x)] = encodeString l . (':':) . encodeValue x - go ((l,x):lxs) = encodeString l . (':':) . encodeValue x . (',':) . go lxs - -encodeString :: String -> ShowS -encodeString str = ('"':) . showString (escapeString str) . ('"':) - ------------------------------------------------------------------------------- --- helpers - --- | Try to convert 'Double' into 'Int64', return 'Nothing' if not --- representable loss-free as integral 'Int64' value. -doubleToInt64 :: Double -> Maybe Int64 -doubleToInt64 x - | fromInteger x' == x - , x' <= toInteger (maxBound :: Int64) - , x' >= toInteger (minBound :: Int64) - = Just (fromIntegral x') - | otherwise = Nothing - where - x' = round x - --- | Minimally escape a 'String' in accordance with RFC 7159, "7. Strings" -escapeString :: String -> String -escapeString s - | not (any needsEscape s) = s - | otherwise = escape s - where - escape [] = [] - escape (x:xs) = case x of - '\\' -> '\\':'\\':escape xs - '"' -> '\\':'"':escape xs - '\b' -> '\\':'b':escape xs - '\f' -> '\\':'f':escape xs - '\n' -> '\\':'n':escape xs - '\r' -> '\\':'r':escape xs - '\t' -> '\\':'t':escape xs - c | ord c < 0x10 -> '\\':'u':'0':'0':'0':intToDigit (ord c):escape xs - | ord c < 0x20 -> '\\':'u':'0':'0':'1':intToDigit (ord c - 0x10):escape xs - | otherwise -> c : escape xs - - -- unescaped = %x20-21 / %x23-5B / %x5D-10FFFF - needsEscape c = ord c < 0x20 || c `elem` ['\\','"'] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Utils.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Utils.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,358 +0,0 @@ -{-# LANGUAGE ForeignFunctionInterface, CPP #-} - -module Distribution.Client.Utils ( MergeResult(..) - , mergeBy, duplicates, duplicatesBy - , readMaybe - , inDir, withEnv, withEnvOverrides - , logDirChange, withExtraPathEnv - , determineNumJobs, numberOfProcessors - , removeExistingFile - , withTempFileName - , makeAbsoluteToCwd - , makeRelativeToCwd, makeRelativeToDir - , makeRelativeCanonical - , filePathToByteString - , byteStringToFilePath, tryCanonicalizePath - , canonicalizePathNoThrow - , moreRecentFile, existsAndIsMoreRecentThan - , tryFindAddSourcePackageDesc - , tryFindPackageDesc - , relaxEncodingErrors - , ProgressPhase (..) - , progressMessage) - where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Compat.Environment -import Distribution.Compat.Exception ( catchIO ) -import Distribution.Compat.Time ( getModTime ) -import Distribution.Simple.Setup ( Flag(..) ) -import Distribution.Verbosity -import Distribution.Simple.Utils ( die', findPackageDesc, noticeNoWrap ) -import qualified Data.ByteString.Lazy as BS -import Data.Bits - ( (.|.), shiftL, shiftR ) -import System.FilePath -import Control.Monad - ( mapM, mapM_, zipWithM_ ) -import Data.List - ( groupBy ) -import Foreign.C.Types ( CInt(..) ) -import qualified Control.Exception as Exception - ( finally, bracket ) -import System.Directory - ( canonicalizePath, doesFileExist, getCurrentDirectory - , removeFile, setCurrentDirectory ) -import System.IO - ( Handle, hClose, openTempFile - , hGetEncoding, hSetEncoding - ) -import System.IO.Unsafe ( unsafePerformIO ) - -import GHC.IO.Encoding - ( recover, TextEncoding(TextEncoding) ) -import GHC.IO.Encoding.Failure - ( recoverEncode, CodingFailureMode(TransliterateCodingFailure) ) - -#if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) -import qualified System.Directory as Dir -import qualified System.IO.Error as IOError -#endif - --- | Generic merging utility. For sorted input lists this is a full outer join. --- -mergeBy :: (a -> b -> Ordering) -> [a] -> [b] -> [MergeResult a b] -mergeBy cmp = merge - where - merge [] ys = [ OnlyInRight y | y <- ys] - merge xs [] = [ OnlyInLeft x | x <- xs] - merge (x:xs) (y:ys) = - case x `cmp` y of - GT -> OnlyInRight y : merge (x:xs) ys - EQ -> InBoth x y : merge xs ys - LT -> OnlyInLeft x : merge xs (y:ys) - -data MergeResult a b = OnlyInLeft a | InBoth a b | OnlyInRight b - -duplicates :: Ord a => [a] -> [[a]] -duplicates = duplicatesBy compare - -duplicatesBy :: (a -> a -> Ordering) -> [a] -> [[a]] -duplicatesBy cmp = filter moreThanOne . groupBy eq . sortBy cmp - where - eq a b = case cmp a b of - EQ -> True - _ -> False - moreThanOne (_:_:_) = True - moreThanOne _ = False - --- | Like 'removeFile', but does not throw an exception when the file does not --- exist. -removeExistingFile :: FilePath -> IO () -removeExistingFile path = do - exists <- doesFileExist path - when exists $ - removeFile path - --- | A variant of 'withTempFile' that only gives us the file name, and while --- it will clean up the file afterwards, it's lenient if the file is --- moved\/deleted. --- -withTempFileName :: FilePath - -> String - -> (FilePath -> IO a) -> IO a -withTempFileName tmpDir template action = - Exception.bracket - (openTempFile tmpDir template) - (\(name, _) -> removeExistingFile name) - (\(name, h) -> hClose h >> action name) - --- | Executes the action in the specified directory. --- --- Warning: This operation is NOT thread-safe, because current --- working directory is a process-global concept. -inDir :: Maybe FilePath -> IO a -> IO a -inDir Nothing m = m -inDir (Just d) m = do - old <- getCurrentDirectory - setCurrentDirectory d - m `Exception.finally` setCurrentDirectory old - --- | Executes the action with an environment variable set to some --- value. --- --- Warning: This operation is NOT thread-safe, because current --- environment is a process-global concept. -withEnv :: String -> String -> IO a -> IO a -withEnv k v m = do - mb_old <- lookupEnv k - setEnv k v - m `Exception.finally` (case mb_old of - Nothing -> unsetEnv k - Just old -> setEnv k old) - --- | Executes the action with a list of environment variables and --- corresponding overrides, where --- --- * @'Just' v@ means \"set the environment variable's value to @v@\". --- * 'Nothing' means \"unset the environment variable\". --- --- Warning: This operation is NOT thread-safe, because current --- environment is a process-global concept. -withEnvOverrides :: [(String, Maybe FilePath)] -> IO a -> IO a -withEnvOverrides overrides m = do - mb_olds <- mapM lookupEnv envVars - mapM_ (uncurry update) overrides - m `Exception.finally` zipWithM_ update envVars mb_olds - where - envVars :: [String] - envVars = map fst overrides - - update :: String -> Maybe FilePath -> IO () - update var Nothing = unsetEnv var - update var (Just val) = setEnv var val - --- | Executes the action, increasing the PATH environment --- in some way --- --- Warning: This operation is NOT thread-safe, because the --- environment variables are a process-global concept. -withExtraPathEnv :: [FilePath] -> IO a -> IO a -withExtraPathEnv paths m = do - oldPathSplit <- getSearchPath - let newPath = mungePath $ intercalate [searchPathSeparator] (paths ++ oldPathSplit) - oldPath = mungePath $ intercalate [searchPathSeparator] oldPathSplit - -- TODO: This is a horrible hack to work around the fact that - -- setEnv can't take empty values as an argument - mungePath p | p == "" = "/dev/null" - | otherwise = p - setEnv "PATH" newPath - m `Exception.finally` setEnv "PATH" oldPath - --- | Log directory change in 'make' compatible syntax -logDirChange :: (String -> IO ()) -> Maybe FilePath -> IO a -> IO a -logDirChange _ Nothing m = m -logDirChange l (Just d) m = do - l $ "cabal: Entering directory '" ++ d ++ "'\n" - m `Exception.finally` - (l $ "cabal: Leaving directory '" ++ d ++ "'\n") - -foreign import ccall "getNumberOfProcessors" c_getNumberOfProcessors :: IO CInt - --- The number of processors is not going to change during the duration of the --- program, so unsafePerformIO is safe here. -numberOfProcessors :: Int -numberOfProcessors = fromEnum $ unsafePerformIO c_getNumberOfProcessors - --- | Determine the number of jobs to use given the value of the '-j' flag. -determineNumJobs :: Flag (Maybe Int) -> Int -determineNumJobs numJobsFlag = - case numJobsFlag of - NoFlag -> 1 - Flag Nothing -> numberOfProcessors - Flag (Just n) -> n - --- | Given a relative path, make it absolute relative to the current --- directory. Absolute paths are returned unmodified. -makeAbsoluteToCwd :: FilePath -> IO FilePath -makeAbsoluteToCwd path | isAbsolute path = return path - | otherwise = do cwd <- getCurrentDirectory - return $! cwd path - --- | Given a path (relative or absolute), make it relative to the current --- directory, including using @../..@ if necessary. -makeRelativeToCwd :: FilePath -> IO FilePath -makeRelativeToCwd path = - makeRelativeCanonical <$> canonicalizePath path <*> getCurrentDirectory - --- | Given a path (relative or absolute), make it relative to the given --- directory, including using @../..@ if necessary. -makeRelativeToDir :: FilePath -> FilePath -> IO FilePath -makeRelativeToDir path dir = - makeRelativeCanonical <$> canonicalizePath path <*> canonicalizePath dir - --- | Given a canonical absolute path and canonical absolute dir, make the path --- relative to the directory, including using @../..@ if necessary. Returns --- the original absolute path if it is not on the same drive as the given dir. -makeRelativeCanonical :: FilePath -> FilePath -> FilePath -makeRelativeCanonical path dir - | takeDrive path /= takeDrive dir = path - | otherwise = go (splitPath path) (splitPath dir) - where - go (p:ps) (d:ds) | p == d = go ps ds - go [] [] = "./" - go ps ds = joinPath (replicate (length ds) ".." ++ ps) - --- | Convert a 'FilePath' to a lazy 'ByteString'. Each 'Char' is --- encoded as a little-endian 'Word32'. -filePathToByteString :: FilePath -> BS.ByteString -filePathToByteString p = - BS.pack $ foldr conv [] codepts - where - codepts :: [Word32] - codepts = map (fromIntegral . ord) p - - conv :: Word32 -> [Word8] -> [Word8] - conv w32 rest = b0:b1:b2:b3:rest - where - b0 = fromIntegral $ w32 - b1 = fromIntegral $ w32 `shiftR` 8 - b2 = fromIntegral $ w32 `shiftR` 16 - b3 = fromIntegral $ w32 `shiftR` 24 - --- | Reverse operation to 'filePathToByteString'. -byteStringToFilePath :: BS.ByteString -> FilePath -byteStringToFilePath bs | bslen `mod` 4 /= 0 = unexpected - | otherwise = go 0 - where - unexpected = "Distribution.Client.Utils.byteStringToFilePath: unexpected" - bslen = BS.length bs - - go i | i == bslen = [] - | otherwise = (chr . fromIntegral $ w32) : go (i+4) - where - w32 :: Word32 - w32 = b0 .|. (b1 `shiftL` 8) .|. (b2 `shiftL` 16) .|. (b3 `shiftL` 24) - b0 = fromIntegral $ BS.index bs i - b1 = fromIntegral $ BS.index bs (i + 1) - b2 = fromIntegral $ BS.index bs (i + 2) - b3 = fromIntegral $ BS.index bs (i + 3) - --- | Workaround for the inconsistent behaviour of 'canonicalizePath'. Always --- throws an error if the path refers to a non-existent file. -tryCanonicalizePath :: FilePath -> IO FilePath -tryCanonicalizePath path = do - ret <- canonicalizePath path -#if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) - exists <- liftM2 (||) (doesFileExist ret) (Dir.doesDirectoryExist ret) - unless exists $ - IOError.ioError $ IOError.mkIOError IOError.doesNotExistErrorType "canonicalizePath" - Nothing (Just ret) -#endif - return ret - --- | A non-throwing wrapper for 'canonicalizePath'. If 'canonicalizePath' throws --- an exception, returns the path argument unmodified. -canonicalizePathNoThrow :: FilePath -> IO FilePath -canonicalizePathNoThrow path = do - canonicalizePath path `catchIO` (\_ -> return path) - --------------------- --- Modification time - --- | Like Distribution.Simple.Utils.moreRecentFile, but uses getModTime instead --- of getModificationTime for higher precision. We can't merge the two because --- Distribution.Client.Time uses MIN_VERSION macros. -moreRecentFile :: FilePath -> FilePath -> IO Bool -moreRecentFile a b = do - exists <- doesFileExist b - if not exists - then return True - else do tb <- getModTime b - ta <- getModTime a - return (ta > tb) - --- | Like 'moreRecentFile', but also checks that the first file exists. -existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool -existsAndIsMoreRecentThan a b = do - exists <- doesFileExist a - if not exists - then return False - else a `moreRecentFile` b - --- | Sets the handler for encoding errors to one that transliterates invalid --- characters into one present in the encoding (i.e., \'?\'). --- This is opposed to the default behavior, which is to throw an exception on --- error. This function will ignore file handles that have a Unicode encoding --- set. It's a no-op for versions of `base` less than 4.4. -relaxEncodingErrors :: Handle -> IO () -relaxEncodingErrors handle = do - maybeEncoding <- hGetEncoding handle - case maybeEncoding of - Just (TextEncoding name decoder encoder) | not ("UTF" `isPrefixOf` name) -> - let relax x = x { recover = recoverEncode TransliterateCodingFailure } - in hSetEncoding handle (TextEncoding name decoder (fmap relax encoder)) - _ -> - return () - --- |Like 'tryFindPackageDesc', but with error specific to add-source deps. -tryFindAddSourcePackageDesc :: Verbosity -> FilePath -> String -> IO FilePath -tryFindAddSourcePackageDesc verbosity depPath err = tryFindPackageDesc verbosity depPath $ - err ++ "\n" ++ "Failed to read cabal file of add-source dependency: " - ++ depPath - --- |Try to find a @.cabal@ file, in directory @depPath@. Fails if one cannot be --- found, with @err@ prefixing the error message. This function simply allows --- us to give a more descriptive error than that provided by @findPackageDesc@. -tryFindPackageDesc :: Verbosity -> FilePath -> String -> IO FilePath -tryFindPackageDesc verbosity depPath err = do - errOrCabalFile <- findPackageDesc depPath - case errOrCabalFile of - Right file -> return file - Left _ -> die' verbosity err - --- | Phase of building a dependency. Represents current status of package --- dependency processing. See #4040 for details. -data ProgressPhase - = ProgressDownloading - | ProgressDownloaded - | ProgressStarting - | ProgressBuilding - | ProgressHaddock - | ProgressInstalling - | ProgressCompleted - -progressMessage :: Verbosity -> ProgressPhase -> String -> IO () -progressMessage verbosity phase subject = do - noticeNoWrap verbosity $ phaseStr ++ subject ++ "\n" - where - phaseStr = case phase of - ProgressDownloading -> "Downloading " - ProgressDownloaded -> "Downloaded " - ProgressStarting -> "Starting " - ProgressBuilding -> "Building " - ProgressHaddock -> "Haddock " - ProgressInstalling -> "Installing " - ProgressCompleted -> "Completed " diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/VCS.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/VCS.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/VCS.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/VCS.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,518 +0,0 @@ -{-# LANGUAGE NamedFieldPuns, RecordWildCards #-} -module Distribution.Client.VCS ( - -- * VCS driver type - VCS, - vcsRepoType, - vcsProgram, - -- ** Type re-exports - SourceRepo, - RepoType, - RepoKind, - Program, - ConfiguredProgram, - - -- * Selecting amongst source repos - selectPackageSourceRepo, - - -- * Validating 'SourceRepo's and configuring VCS drivers - validateSourceRepo, - validateSourceRepos, - SourceRepoProblem(..), - configureVCS, - configureVCSs, - - -- * Running the VCS driver - cloneSourceRepo, - syncSourceRepos, - - -- * The individual VCS drivers - knownVCSs, - vcsBzr, - vcsDarcs, - vcsGit, - vcsHg, - vcsSvn, - ) where - -import Prelude () -import Distribution.Client.Compat.Prelude - -import Distribution.Types.SourceRepo - ( SourceRepo(..), RepoType(..), RepoKind(..) ) -import Distribution.Client.RebuildMonad - ( Rebuild, monitorFiles, MonitorFilePath, monitorDirectoryExistence ) -import Distribution.Verbosity as Verbosity - ( Verbosity, normal ) -import Distribution.Simple.Program - ( Program(programFindVersion) - , ConfiguredProgram(programVersion) - , simpleProgram, findProgramVersion - , ProgramInvocation(..), programInvocation, runProgramInvocation - , emptyProgramDb, requireProgram ) -import Distribution.Version - ( mkVersion ) - -import Control.Monad - ( mapM_ ) -import Control.Monad.Trans - ( liftIO ) -import qualified Data.Char as Char -import qualified Data.Map as Map -import Data.Ord - ( comparing ) -import Data.Either - ( partitionEithers ) -import System.FilePath - ( takeDirectory ) -import System.Directory - ( doesDirectoryExist ) - - --- | A driver for a version control system, e.g. git, darcs etc. --- -data VCS program = VCS { - -- | The type of repository this driver is for. - vcsRepoType :: RepoType, - - -- | The vcs program itself. - -- This is used at type 'Program' and 'ConfiguredProgram'. - vcsProgram :: program, - - -- | The program invocation(s) to get\/clone a repository into a fresh - -- local directory. - vcsCloneRepo :: Verbosity - -> ConfiguredProgram - -> SourceRepo - -> FilePath -- Source URI - -> FilePath -- Destination directory - -> [ProgramInvocation], - - -- | The program invocation(s) to synchronise a whole set of /related/ - -- repositories with corresponding local directories. Also returns the - -- files that the command depends on, for change monitoring. - vcsSyncRepos :: Verbosity - -> ConfiguredProgram - -> [(SourceRepo, FilePath)] - -> IO [MonitorFilePath] - } - - --- ------------------------------------------------------------ --- * Selecting repos and drivers --- ------------------------------------------------------------ - --- | Pick the 'SourceRepo' to use to get the package sources from. --- --- Note that this does /not/ depend on what 'VCS' drivers we are able to --- successfully configure. It is based only on the 'SourceRepo's declared --- in the package, and optionally on a preferred 'RepoKind'. --- -selectPackageSourceRepo :: Maybe RepoKind - -> [SourceRepo] - -> Maybe SourceRepo -selectPackageSourceRepo preferredRepoKind = - listToMaybe - -- Sort repositories by kind, from This to Head to Unknown. Repositories - -- with equivalent kinds are selected based on the order they appear in - -- the Cabal description file. - . sortBy (comparing thisFirst) - -- If the user has specified the repo kind, filter out the repositories - -- they're not interested in. - . filter (\repo -> maybe True (repoKind repo ==) preferredRepoKind) - where - thisFirst :: SourceRepo -> Int - thisFirst r = case repoKind r of - RepoThis -> 0 - RepoHead -> case repoTag r of - -- If the type is 'head' but the author specified a tag, they - -- probably meant to create a 'this' repository but screwed up. - Just _ -> 0 - Nothing -> 1 - RepoKindUnknown _ -> 2 - -data SourceRepoProblem = SourceRepoRepoTypeUnspecified - | SourceRepoRepoTypeUnsupported RepoType - | SourceRepoLocationUnspecified - deriving Show - --- | Validates that the 'SourceRepo' specifies a location URI and a repository --- type that is supported by a VCS driver. --- --- | It also returns the 'VCS' driver we should use to work with it. --- -validateSourceRepo :: SourceRepo - -> Either SourceRepoProblem - (SourceRepo, String, RepoType, VCS Program) -validateSourceRepo = \repo -> do - rtype <- repoType repo ?! SourceRepoRepoTypeUnspecified - vcs <- Map.lookup rtype knownVCSs ?! SourceRepoRepoTypeUnsupported rtype - uri <- repoLocation repo ?! SourceRepoLocationUnspecified - return (repo, uri, rtype, vcs) - where - a ?! e = maybe (Left e) Right a - - --- | As 'validateSourceRepo' but for a bunch of 'SourceRepo's, and return --- things in a convenient form to pass to 'configureVCSs', or to report --- problems. --- -validateSourceRepos :: [SourceRepo] - -> Either [(SourceRepo, SourceRepoProblem)] - [(SourceRepo, String, RepoType, VCS Program)] -validateSourceRepos rs = - case partitionEithers (map validateSourceRepo' rs) of - (problems@(_:_), _) -> Left problems - ([], vcss) -> Right vcss - where - validateSourceRepo' r = either (Left . (,) r) Right - (validateSourceRepo r) - - -configureVCS :: Verbosity - -> VCS Program - -> IO (VCS ConfiguredProgram) -configureVCS verbosity vcs@VCS{vcsProgram = prog} = - asVcsConfigured <$> requireProgram verbosity prog emptyProgramDb - where - asVcsConfigured (prog', _) = vcs { vcsProgram = prog' } - -configureVCSs :: Verbosity - -> Map RepoType (VCS Program) - -> IO (Map RepoType (VCS ConfiguredProgram)) -configureVCSs verbosity = traverse (configureVCS verbosity) - - --- ------------------------------------------------------------ --- * Running the driver --- ------------------------------------------------------------ - --- | Clone a single source repo into a fresh directory, using a configured VCS. --- --- This is for making a new copy, not synchronising an existing copy. It will --- fail if the destination directory already exists. --- --- Make sure to validate the 'SourceRepo' using 'validateSourceRepo' first. --- -cloneSourceRepo :: Verbosity - -> VCS ConfiguredProgram - -> SourceRepo -- ^ Must have 'repoLocation' filled. - -> FilePath -- ^ Destination directory - -> IO () -cloneSourceRepo _ _ repo@SourceRepo{ repoLocation = Nothing } _ = - error $ "cloneSourceRepo: precondition violation, missing repoLocation: \"" - ++ show repo ++ "\". Validate using validateSourceRepo first." - -cloneSourceRepo verbosity vcs - repo@SourceRepo{ repoLocation = Just srcuri } destdir = - mapM_ (runProgramInvocation verbosity) invocations - where - invocations = vcsCloneRepo vcs verbosity - (vcsProgram vcs) repo - srcuri destdir - - --- | Syncronise a set of 'SourceRepo's referring to the same repository with --- corresponding local directories. The local directories may or may not --- already exist. --- --- The 'SourceRepo' values used in a single invocation of 'syncSourceRepos', --- or used across a series of invocations with any local directory must refer --- to the /same/ repository. That means it must be the same location but they --- can differ in the branch, or tag or subdir. --- --- The reason to allow multiple related 'SourceRepo's is to allow for the --- network or storage to be shared between different checkouts of the repo. --- For example if a single repo contains multiple packages in different subdirs --- and in some project it may make sense to use a different state of the repo --- for one subdir compared to another. --- -syncSourceRepos :: Verbosity - -> VCS ConfiguredProgram - -> [(SourceRepo, FilePath)] - -> Rebuild () -syncSourceRepos verbosity vcs repos = do - files <- liftIO $ vcsSyncRepos vcs verbosity (vcsProgram vcs) repos - monitorFiles files - - --- ------------------------------------------------------------ --- * The various VCS drivers --- ------------------------------------------------------------ - --- | The set of all supported VCS drivers, organised by 'RepoType'. --- -knownVCSs :: Map RepoType (VCS Program) -knownVCSs = Map.fromList [ (vcsRepoType vcs, vcs) | vcs <- vcss ] - where - vcss = [ vcsBzr, vcsDarcs, vcsGit, vcsHg, vcsSvn ] - - --- | VCS driver for Bazaar. --- -vcsBzr :: VCS Program -vcsBzr = - VCS { - vcsRepoType = Bazaar, - vcsProgram = bzrProgram, - vcsCloneRepo, - vcsSyncRepos - } - where - vcsCloneRepo :: Verbosity - -> ConfiguredProgram - -> SourceRepo - -> FilePath - -> FilePath - -> [ProgramInvocation] - vcsCloneRepo verbosity prog repo srcuri destdir = - [ programInvocation prog - ([branchCmd, srcuri, destdir] ++ tagArgs ++ verboseArg) ] - where - -- The @get@ command was deprecated in version 2.4 in favour of - -- the alias @branch@ - branchCmd | programVersion prog >= Just (mkVersion [2,4]) - = "branch" - | otherwise = "get" - - tagArgs = case repoTag repo of - Nothing -> [] - Just tag -> ["-r", "tag:" ++ tag] - verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] - - vcsSyncRepos :: Verbosity -> ConfiguredProgram - -> [(SourceRepo, FilePath)] -> IO [MonitorFilePath] - vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for bzr" - -bzrProgram :: Program -bzrProgram = (simpleProgram "bzr") { - programFindVersion = findProgramVersion "--version" $ \str -> - case words str of - -- "Bazaar (bzr) 2.6.0\n ... lots of extra stuff" - (_:_:ver:_) -> ver - _ -> "" - } - - --- | VCS driver for Darcs. --- -vcsDarcs :: VCS Program -vcsDarcs = - VCS { - vcsRepoType = Darcs, - vcsProgram = darcsProgram, - vcsCloneRepo, - vcsSyncRepos - } - where - vcsCloneRepo :: Verbosity - -> ConfiguredProgram - -> SourceRepo - -> FilePath - -> FilePath - -> [ProgramInvocation] - vcsCloneRepo verbosity prog repo srcuri destdir = - [ programInvocation prog cloneArgs ] - where - cloneArgs = [cloneCmd, srcuri, destdir] ++ tagArgs ++ verboseArg - -- At some point the @clone@ command was introduced as an alias for - -- @get@, and @clone@ seems to be the recommended one now. - cloneCmd | programVersion prog >= Just (mkVersion [2,8]) - = "clone" - | otherwise = "get" - tagArgs = case repoTag repo of - Nothing -> [] - Just tag -> ["-t", tag] - verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] - - vcsSyncRepos :: Verbosity -> ConfiguredProgram - -> [(SourceRepo, FilePath)] -> IO [MonitorFilePath] - vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for darcs" - -darcsProgram :: Program -darcsProgram = (simpleProgram "darcs") { - programFindVersion = findProgramVersion "--version" $ \str -> - case words str of - -- "2.8.5 (release)" - (ver:_) -> ver - _ -> "" - } - - --- | VCS driver for Git. --- -vcsGit :: VCS Program -vcsGit = - VCS { - vcsRepoType = Git, - vcsProgram = gitProgram, - vcsCloneRepo, - vcsSyncRepos - } - where - vcsCloneRepo :: Verbosity - -> ConfiguredProgram - -> SourceRepo - -> FilePath - -> FilePath - -> [ProgramInvocation] - vcsCloneRepo verbosity prog repo srcuri destdir = - [ programInvocation prog cloneArgs ] - -- And if there's a tag, we have to do that in a second step: - ++ [ (programInvocation prog (checkoutArgs tag)) { - progInvokeCwd = Just destdir - } - | tag <- maybeToList (repoTag repo) ] - where - cloneArgs = ["clone", srcuri, destdir] - ++ branchArgs ++ verboseArg - branchArgs = case repoBranch repo of - Just b -> ["--branch", b] - Nothing -> [] - checkoutArgs tag = "checkout" : verboseArg ++ [tag, "--"] - verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] - - vcsSyncRepos :: Verbosity - -> ConfiguredProgram - -> [(SourceRepo, FilePath)] - -> IO [MonitorFilePath] - vcsSyncRepos _ _ [] = return [] - vcsSyncRepos verbosity gitProg - ((primaryRepo, primaryLocalDir) : secondaryRepos) = do - - vcsSyncRepo verbosity gitProg primaryRepo primaryLocalDir Nothing - sequence_ - [ vcsSyncRepo verbosity gitProg repo localDir (Just primaryLocalDir) - | (repo, localDir) <- secondaryRepos ] - return [ monitorDirectoryExistence dir - | dir <- (primaryLocalDir : map snd secondaryRepos) ] - - vcsSyncRepo verbosity gitProg SourceRepo{..} localDir peer = do - exists <- doesDirectoryExist localDir - if exists - then git localDir ["fetch"] - else git (takeDirectory localDir) cloneArgs - git localDir checkoutArgs - where - git :: FilePath -> [String] -> IO () - git cwd args = runProgramInvocation verbosity $ - (programInvocation gitProg args) { - progInvokeCwd = Just cwd - } - - cloneArgs = ["clone", "--no-checkout", loc, localDir] - ++ case peer of - Nothing -> [] - Just peerLocalDir -> ["--reference", peerLocalDir] - ++ verboseArg - where Just loc = repoLocation - checkoutArgs = "checkout" : verboseArg ++ ["--detach", "--force" - , checkoutTarget, "--" ] - checkoutTarget = fromMaybe "HEAD" (repoBranch `mplus` repoTag) - verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] - -gitProgram :: Program -gitProgram = (simpleProgram "git") { - programFindVersion = findProgramVersion "--version" $ \str -> - case words str of - -- "git version 2.5.5" - (_:_:ver:_) | all isTypical ver -> ver - - -- or annoyingly "git version 2.17.1.windows.2" yes, really - (_:_:ver:_) -> intercalate "." - . takeWhile (all isNum) - . split - $ ver - _ -> "" - } - where - isNum c = c >= '0' && c <= '9' - isTypical c = isNum c || c == '.' - split cs = case break (=='.') cs of - (chunk,[]) -> chunk : [] - (chunk,_:rest) -> chunk : split rest - --- | VCS driver for Mercurial. --- -vcsHg :: VCS Program -vcsHg = - VCS { - vcsRepoType = Mercurial, - vcsProgram = hgProgram, - vcsCloneRepo, - vcsSyncRepos - } - where - vcsCloneRepo :: Verbosity - -> ConfiguredProgram - -> SourceRepo - -> FilePath - -> FilePath - -> [ProgramInvocation] - vcsCloneRepo verbosity prog repo srcuri destdir = - [ programInvocation prog cloneArgs ] - where - cloneArgs = ["clone", srcuri, destdir] - ++ branchArgs ++ tagArgs ++ verboseArg - branchArgs = case repoBranch repo of - Just b -> ["--branch", b] - Nothing -> [] - tagArgs = case repoTag repo of - Just t -> ["--rev", t] - Nothing -> [] - verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] - - vcsSyncRepos :: Verbosity - -> ConfiguredProgram - -> [(SourceRepo, FilePath)] - -> IO [MonitorFilePath] - vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for hg" - -hgProgram :: Program -hgProgram = (simpleProgram "hg") { - programFindVersion = findProgramVersion "--version" $ \str -> - case words str of - -- Mercurial Distributed SCM (version 3.5.2)\n ... long message - (_:_:_:_:ver:_) -> takeWhile (\c -> Char.isDigit c || c == '.') ver - _ -> "" - } - - --- | VCS driver for Subversion. --- -vcsSvn :: VCS Program -vcsSvn = - VCS { - vcsRepoType = SVN, - vcsProgram = svnProgram, - vcsCloneRepo, - vcsSyncRepos - } - where - vcsCloneRepo :: Verbosity - -> ConfiguredProgram - -> SourceRepo - -> FilePath - -> FilePath - -> [ProgramInvocation] - vcsCloneRepo verbosity prog _repo srcuri destdir = - [ programInvocation prog checkoutArgs ] - where - checkoutArgs = ["checkout", srcuri, destdir] ++ verboseArg - verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] - --TODO: branch or tag? - - vcsSyncRepos :: Verbosity - -> ConfiguredProgram - -> [(SourceRepo, FilePath)] - -> IO [MonitorFilePath] - vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for svn" - -svnProgram :: Program -svnProgram = (simpleProgram "svn") { - programFindVersion = findProgramVersion "--version" $ \str -> - case words str of - -- svn, version 1.9.4 (r1740329)\n ... long message - (_:_:ver:_) -> ver - _ -> "" - } - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Win32SelfUpgrade.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Win32SelfUpgrade.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/Win32SelfUpgrade.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/Win32SelfUpgrade.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,225 +0,0 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.Win32SelfUpgrade --- Copyright : (c) Duncan Coutts 2008 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Stability : provisional --- Portability : portable --- --- Support for self-upgrading executables on Windows platforms. ------------------------------------------------------------------------------ -module Distribution.Client.Win32SelfUpgrade ( --- * Explanation --- --- | Windows inherited a design choice from DOS that while initially innocuous --- has rather unfortunate consequences. It maintains the invariant that every --- open file has a corresponding name on disk. One positive consequence of this --- is that an executable can always find its own executable file. The downside --- is that a program cannot be deleted or upgraded while it is running without --- hideous workarounds. This module implements one such hideous workaround. --- --- The basic idea is: --- --- * Move our own exe file to a new name --- * Copy a new exe file to the previous name --- * Run the new exe file, passing our own PID and new path --- * Wait for the new process to start --- * Close the new exe file --- * Exit old process --- --- Then in the new process: --- --- * Inform the old process that we've started --- * Wait for the old process to die --- * Delete the old exe file --- * Exit new process --- - - possibleSelfUpgrade, - deleteOldExeFile, - ) where - -#ifdef mingw32_HOST_OS - -import qualified System.Win32 as Win32 -import System.Win32 (DWORD, BOOL, HANDLE, LPCTSTR) -import Foreign.Ptr (Ptr, nullPtr) -import System.Process (runProcess) -import System.Directory (canonicalizePath) -import System.FilePath (takeBaseName, replaceBaseName, equalFilePath) - -import Distribution.Verbosity as Verbosity (Verbosity, showForCabal) -import Distribution.Simple.Utils (debug, info) - -import Prelude hiding (log) - --- | If one of the given files is our own exe file then we arrange things such --- that the nested action can replace our own exe file. --- --- We require that the new process accepts a command line invocation that --- calls 'deleteOldExeFile', passing in the PID and exe file. --- -possibleSelfUpgrade :: Verbosity - -> [FilePath] - -> IO a -> IO a -possibleSelfUpgrade verbosity newPaths action = do - dstPath <- canonicalizePath =<< Win32.getModuleFileName Win32.nullHANDLE - - newPaths' <- mapM canonicalizePath newPaths - let doingSelfUpgrade = any (equalFilePath dstPath) newPaths' - - if not doingSelfUpgrade - then action - else do - info verbosity $ "cabal-install does the replace-own-exe-file dance..." - tmpPath <- moveOurExeOutOfTheWay verbosity - result <- action - scheduleOurDemise verbosity dstPath tmpPath - (\pid path -> ["win32selfupgrade", pid, path - ,"--verbose=" ++ Verbosity.showForCabal verbosity]) - return result - --- | The name of a Win32 Event object that we use to synchronise between the --- old and new processes. We need to synchronise to make sure that the old --- process has not yet terminated by the time the new one starts up and looks --- for the old process. Otherwise the old one might have already terminated --- and we could not wait on it terminating reliably (eg the PID might get --- re-used). --- -syncEventName :: String -syncEventName = "Local\\cabal-install-upgrade" - --- | The first part of allowing our exe file to be replaced is to move the --- existing exe file out of the way. Although we cannot delete our exe file --- while we're still running, fortunately we can rename it, at least within --- the same directory. --- -moveOurExeOutOfTheWay :: Verbosity -> IO FilePath -moveOurExeOutOfTheWay verbosity = do - ourPID <- getCurrentProcessId - dstPath <- Win32.getModuleFileName Win32.nullHANDLE - - let tmpPath = replaceBaseName dstPath (takeBaseName dstPath ++ show ourPID) - - debug verbosity $ "moving " ++ dstPath ++ " to " ++ tmpPath - Win32.moveFile dstPath tmpPath - return tmpPath - --- | Assuming we've now installed the new exe file in the right place, we --- launch it and ask it to delete our exe file when we eventually terminate. --- -scheduleOurDemise :: Verbosity -> FilePath -> FilePath - -> (String -> FilePath -> [String]) -> IO () -scheduleOurDemise verbosity dstPath tmpPath mkArgs = do - ourPID <- getCurrentProcessId - event <- createEvent syncEventName - - let args = mkArgs (show ourPID) tmpPath - log $ "launching child " ++ unwords (dstPath : map show args) - _ <- runProcess dstPath args Nothing Nothing Nothing Nothing Nothing - - log $ "waiting for the child to start up" - waitForSingleObject event (10*1000) -- wait at most 10 sec - log $ "child started ok" - - where - log msg = debug verbosity ("Win32Reinstall.parent: " ++ msg) - --- | Assuming we're now in the new child process, we've been asked by the old --- process to wait for it to terminate and then we can remove the old exe file --- that it renamed itself to. --- -deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () -deleteOldExeFile verbosity oldPID tmpPath = do - log $ "process started. Will delete exe file of process " - ++ show oldPID ++ " at path " ++ tmpPath - - log $ "getting handle of parent process " ++ show oldPID - oldPHANDLE <- Win32.openProcess Win32.sYNCHORNIZE False (fromIntegral oldPID) - - log $ "synchronising with parent" - event <- openEvent syncEventName - setEvent event - - log $ "waiting for parent process to terminate" - waitForSingleObject oldPHANDLE Win32.iNFINITE - log $ "parent process terminated" - - log $ "deleting parent's old .exe file" - Win32.deleteFile tmpPath - - where - log msg = debug verbosity ("Win32Reinstall.child: " ++ msg) - ------------------------- --- Win32 foreign imports --- - --- A bunch of functions sadly not provided by the Win32 package. - -#ifdef x86_64_HOST_ARCH -#define CALLCONV ccall -#else -#define CALLCONV stdcall -#endif - -foreign import CALLCONV unsafe "windows.h GetCurrentProcessId" - getCurrentProcessId :: IO DWORD - -foreign import CALLCONV unsafe "windows.h WaitForSingleObject" - waitForSingleObject_ :: HANDLE -> DWORD -> IO DWORD - -waitForSingleObject :: HANDLE -> DWORD -> IO () -waitForSingleObject handle timeout = - Win32.failIf_ bad "WaitForSingleObject" $ - waitForSingleObject_ handle timeout - where - bad result = not (result == 0 || result == wAIT_TIMEOUT) - wAIT_TIMEOUT = 0x00000102 - -foreign import CALLCONV unsafe "windows.h CreateEventW" - createEvent_ :: Ptr () -> BOOL -> BOOL -> LPCTSTR -> IO HANDLE - -createEvent :: String -> IO HANDLE -createEvent name = do - Win32.failIfNull "CreateEvent" $ - Win32.withTString name $ - createEvent_ nullPtr False False - -foreign import CALLCONV unsafe "windows.h OpenEventW" - openEvent_ :: DWORD -> BOOL -> LPCTSTR -> IO HANDLE - -openEvent :: String -> IO HANDLE -openEvent name = do - Win32.failIfNull "OpenEvent" $ - Win32.withTString name $ - openEvent_ eVENT_MODIFY_STATE False - where - eVENT_MODIFY_STATE :: DWORD - eVENT_MODIFY_STATE = 0x0002 - -foreign import CALLCONV unsafe "windows.h SetEvent" - setEvent_ :: HANDLE -> IO BOOL - -setEvent :: HANDLE -> IO () -setEvent handle = - Win32.failIfFalse_ "SetEvent" $ - setEvent_ handle - -#else - -import Distribution.Verbosity (Verbosity) -import Distribution.Simple.Utils (die') - -possibleSelfUpgrade :: Verbosity - -> [FilePath] - -> IO a -> IO a -possibleSelfUpgrade _ _ action = action - -deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () -deleteOldExeFile verbosity _ _ = die' verbosity "win32selfupgrade not needed except on win32" - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/World.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/World.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Client/World.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Client/World.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,173 +0,0 @@ ------------------------------------------------------------------------------ --- | --- Module : Distribution.Client.World --- Copyright : (c) Peter Robinson 2009 --- License : BSD-like --- --- Maintainer : thaldyron@gmail.com --- Stability : provisional --- Portability : portable --- --- Interface to the world-file that contains a list of explicitly --- requested packages. Meant to be imported qualified. --- --- A world file entry stores the package-name, package-version, and --- user flags. --- For example, the entry generated by --- # cabal install stm-io-hooks --flags="-debug" --- looks like this: --- # stm-io-hooks -any --flags="-debug" --- To rebuild/upgrade the packages in world (e.g. when updating the compiler) --- use --- # cabal install world --- ------------------------------------------------------------------------------ -module Distribution.Client.World ( - WorldPkgInfo(..), - insert, - delete, - getContents, - ) where - -import Prelude (sequence) -import Distribution.Client.Compat.Prelude hiding (getContents) - -import Distribution.Types.Dependency -import Distribution.PackageDescription - ( FlagAssignment, mkFlagAssignment, unFlagAssignment - , mkFlagName, unFlagName ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Simple.Utils - ( die', info, chattyTry, writeFileAtomic ) -import Distribution.Text - ( Text(..), display, simpleParse ) -import qualified Distribution.Compat.ReadP as Parse -import Distribution.Compat.Exception ( catchIO ) -import qualified Text.PrettyPrint as Disp - - -import Data.Char as Char - -import Data.List - ( unionBy, deleteFirstsBy ) -import System.IO.Error - ( isDoesNotExistError ) -import qualified Data.ByteString.Lazy.Char8 as B - - -data WorldPkgInfo = WorldPkgInfo Dependency FlagAssignment - deriving (Show,Eq) - --- | Adds packages to the world file; creates the file if it doesn't --- exist yet. Version constraints and flag assignments for a package are --- updated if already present. IO errors are non-fatal. -insert :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () -insert = modifyWorld $ unionBy equalUDep - --- | Removes packages from the world file. --- Note: Currently unused as there is no mechanism in Cabal (yet) to --- handle uninstalls. IO errors are non-fatal. -delete :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () -delete = modifyWorld $ flip (deleteFirstsBy equalUDep) - --- | WorldPkgInfo values are considered equal if they refer to --- the same package, i.e., we don't care about differing versions or flags. -equalUDep :: WorldPkgInfo -> WorldPkgInfo -> Bool -equalUDep (WorldPkgInfo (Dependency pkg1 _) _) - (WorldPkgInfo (Dependency pkg2 _) _) = pkg1 == pkg2 - --- | Modifies the world file by applying an update-function ('unionBy' --- for 'insert', 'deleteFirstsBy' for 'delete') to the given list of --- packages. IO errors are considered non-fatal. -modifyWorld :: ([WorldPkgInfo] -> [WorldPkgInfo] - -> [WorldPkgInfo]) - -- ^ Function that defines how - -- the list of user packages are merged with - -- existing world packages. - -> Verbosity - -> FilePath -- ^ Location of the world file - -> [WorldPkgInfo] -- ^ list of user supplied packages - -> IO () -modifyWorld _ _ _ [] = return () -modifyWorld f verbosity world pkgs = - chattyTry "Error while updating world-file. " $ do - pkgsOldWorld <- getContents verbosity world - -- Filter out packages that are not in the world file: - let pkgsNewWorld = nubBy equalUDep $ f pkgs pkgsOldWorld - -- 'Dependency' is not an Ord instance, so we need to check for - -- equivalence the awkward way: - if not (all (`elem` pkgsOldWorld) pkgsNewWorld && - all (`elem` pkgsNewWorld) pkgsOldWorld) - then do - info verbosity "Updating world file..." - writeFileAtomic world . B.pack $ unlines - [ (display pkg) | pkg <- pkgsNewWorld] - else - info verbosity "World file is already up to date." - - --- | Returns the content of the world file as a list -getContents :: Verbosity -> FilePath -> IO [WorldPkgInfo] -getContents verbosity world = do - content <- safelyReadFile world - let result = map simpleParse (lines $ B.unpack content) - case sequence result of - Nothing -> die' verbosity "Could not parse world file." - Just xs -> return xs - where - safelyReadFile :: FilePath -> IO B.ByteString - safelyReadFile file = B.readFile file `catchIO` handler - where - handler e | isDoesNotExistError e = return B.empty - | otherwise = ioError e - - -instance Text WorldPkgInfo where - disp (WorldPkgInfo dep flags) = disp dep Disp.<+> dispFlags (unFlagAssignment flags) - where - dispFlags [] = Disp.empty - dispFlags fs = Disp.text "--flags=" - <<>> Disp.doubleQuotes (flagAssToDoc fs) - flagAssToDoc = foldr (\(fname,val) flagAssDoc -> - (if not val then Disp.char '-' - else Disp.empty) - <<>> Disp.text (unFlagName fname) - Disp.<+> flagAssDoc) - Disp.empty - parse = do - dep <- parse - Parse.skipSpaces - flagAss <- Parse.option mempty parseFlagAssignment - return $ WorldPkgInfo dep flagAss - where - parseFlagAssignment :: Parse.ReadP r FlagAssignment - parseFlagAssignment = do - _ <- Parse.string "--flags" - Parse.skipSpaces - _ <- Parse.char '=' - Parse.skipSpaces - mkFlagAssignment <$> (inDoubleQuotes $ Parse.many1 flag) - where - inDoubleQuotes :: Parse.ReadP r a -> Parse.ReadP r a - inDoubleQuotes = Parse.between (Parse.char '"') (Parse.char '"') - - flag = do - Parse.skipSpaces - val <- negative Parse.+++ positive - name <- ident - Parse.skipSpaces - return (mkFlagName name,val) - negative = do - _ <- Parse.char '-' - return False - positive = return True - - ident :: Parse.ReadP r String - ident = do - -- First character must be a letter/digit to avoid flags - -- like "+-debug": - c <- Parse.satisfy Char.isAlphaNum - cs <- Parse.munch (\ch -> Char.isAlphaNum ch || ch == '_' - || ch == '-') - return (c:cs) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Compat/Prelude.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Compat/Prelude.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Compat/Prelude.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Compat/Prelude.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ --- to suppress WARNING in "Distribution.Compat.Prelude.Internal" -{-# OPTIONS_GHC -fno-warn-deprecations #-} - --- | This module does two things: --- --- * Acts as a compatiblity layer, like @base-compat@. --- --- * Provides commonly used imports. --- --- This module is a superset of "Distribution.Compat.Prelude" (which --- this module re-exports) --- -module Distribution.Solver.Compat.Prelude - ( module Distribution.Compat.Prelude.Internal - , Prelude.IO - ) where - -import Prelude (IO) -import Distribution.Compat.Prelude.Internal hiding (IO) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Assignment.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Assignment.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Assignment.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Assignment.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,94 +0,0 @@ -module Distribution.Solver.Modular.Assignment - ( Assignment(..) - , PAssignment - , FAssignment - , SAssignment - , toCPs - ) where - -import Prelude () -import Distribution.Solver.Compat.Prelude hiding (pi) - -import Data.Array as A -import Data.List as L -import Data.Map as M -import Data.Maybe - -import Distribution.PackageDescription (FlagAssignment, mkFlagAssignment) -- from Cabal - -import Distribution.Solver.Types.ComponentDeps (ComponentDeps, Component) -import qualified Distribution.Solver.Types.ComponentDeps as CD -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackagePath - -import Distribution.Solver.Modular.Configured -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.LabeledGraph -import Distribution.Solver.Modular.Package - --- | A (partial) package assignment. Qualified package names --- are associated with instances. -type PAssignment = Map QPN I - -type FAssignment = Map QFN Bool -type SAssignment = Map QSN Bool - --- | A (partial) assignment of variables. -data Assignment = A PAssignment FAssignment SAssignment - deriving (Show, Eq) - --- | Delivers an ordered list of fully configured packages. --- --- TODO: This function is (sort of) ok. However, there's an open bug --- w.r.t. unqualification. There might be several different instances --- of one package version chosen by the solver, which will lead to --- clashes. -toCPs :: Assignment -> RevDepMap -> [CP QPN] -toCPs (A pa fa sa) rdm = - let - -- get hold of the graph - g :: Graph Component - vm :: Vertex -> ((), QPN, [(Component, QPN)]) - cvm :: QPN -> Maybe Vertex - -- Note that the RevDepMap contains duplicate dependencies. Therefore the nub. - (g, vm, cvm) = graphFromEdges (L.map (\ (x, xs) -> ((), x, nub xs)) - (M.toList rdm)) - tg :: Graph Component - tg = transposeG g - -- Topsort the dependency graph, yielding a list of pkgs in the right order. - -- The graph will still contain all the installed packages, and it might - -- contain duplicates, because several variables might actually resolve to - -- the same package in the presence of qualified package names. - ps :: [PI QPN] - ps = L.map ((\ (_, x, _) -> PI x (pa M.! x)) . vm) $ - topSort g - -- Determine the flags per package, by walking over and regrouping the - -- complete flag assignment by package. - fapp :: Map QPN FlagAssignment - fapp = M.fromListWith mappend $ - L.map (\ ((FN qpn fn), b) -> (qpn, mkFlagAssignment [(fn, b)])) $ - M.toList $ - fa - -- Stanzas per package. - sapp :: Map QPN [OptionalStanza] - sapp = M.fromListWith (++) $ - L.map (\ ((SN qpn sn), b) -> (qpn, if b then [sn] else [])) $ - M.toList $ - sa - -- Dependencies per package. - depp :: QPN -> [(Component, PI QPN)] - depp qpn = let v :: Vertex - v = fromJust (cvm qpn) - dvs :: [(Component, Vertex)] - dvs = tg A.! v - in L.map (\ (comp, dv) -> case vm dv of (_, x, _) -> (comp, PI x (pa M.! x))) dvs - -- Translated to PackageDeps - depp' :: QPN -> ComponentDeps [PI QPN] - depp' = CD.fromList . L.map (\(comp, d) -> (comp, [d])) . depp - in - L.map (\ pi@(PI qpn _) -> CP pi - (M.findWithDefault mempty qpn fapp) - (M.findWithDefault mempty qpn sapp) - (depp' qpn)) - ps diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Builder.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Builder.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Builder.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Builder.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,298 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} -module Distribution.Solver.Modular.Builder ( - buildTree - , splits -- for testing - ) where - --- Building the search tree. --- --- In this phase, we build a search tree that is too large, i.e, it contains --- invalid solutions. We keep track of the open goals at each point. We --- nondeterministically pick an open goal (via a goal choice node), create --- subtrees according to the index and the available solutions, and extend the --- set of open goals by superficially looking at the dependencies recorded in --- the index. --- --- For each goal, we keep track of all the *reasons* why it is being --- introduced. These are for debugging and error messages, mainly. A little bit --- of care has to be taken due to the way we treat flags. If a package has --- flag-guarded dependencies, we cannot introduce them immediately. Instead, we --- store the entire dependency. - -import Data.List as L -import Data.Map as M -import Data.Set as S -import Prelude hiding (sequence, mapM) - -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Index -import Distribution.Solver.Modular.Package -import qualified Distribution.Solver.Modular.PSQ as P -import Distribution.Solver.Modular.Tree -import qualified Distribution.Solver.Modular.WeightedPSQ as W - -import Distribution.Solver.Types.ComponentDeps -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.Settings - --- | All state needed to build and link the search tree. It has a type variable --- because the linking phase doesn't need to know about the state used to build --- the tree. -data Linker a = Linker { - buildState :: a, - linkingState :: LinkingState -} - --- | The state needed to build the search tree without creating any linked nodes. -data BuildState = BS { - index :: Index, -- ^ information about packages and their dependencies - rdeps :: RevDepMap, -- ^ set of all package goals, completed and open, with reverse dependencies - open :: [OpenGoal], -- ^ set of still open goals (flag and package goals) - next :: BuildType, -- ^ kind of node to generate next - qualifyOptions :: QualifyOptions -- ^ qualification options -} - --- | Map of available linking targets. -type LinkingState = Map (PN, I) [PackagePath] - --- | Extend the set of open goals with the new goals listed. --- --- We also adjust the map of overall goals, and keep track of the --- reverse dependencies of each of the goals. -extendOpen :: QPN -> [FlaggedDep QPN] -> BuildState -> BuildState -extendOpen qpn' gs s@(BS { rdeps = gs', open = o' }) = go gs' o' gs - where - go :: RevDepMap -> [OpenGoal] -> [FlaggedDep QPN] -> BuildState - go g o [] = s { rdeps = g, open = o } - go g o ((Flagged fn@(FN qpn _) fInfo t f) : ngs) = - go g (FlagGoal fn fInfo t f (flagGR qpn) : o) ngs - -- Note: for 'Flagged' goals, we always insert, so later additions win. - -- This is important, because in general, if a goal is inserted twice, - -- the later addition will have better dependency information. - go g o ((Stanza sn@(SN qpn _) t) : ngs) = - go g (StanzaGoal sn t (flagGR qpn) : o) ngs - go g o ((Simple (LDep dr (Dep (PkgComponent qpn _) _)) c) : ngs) - | qpn == qpn' = - -- We currently only add a self-dependency to the graph if it is - -- between a package and its setup script. The edge creates a cycle - -- and causes the solver to backtrack and choose a different - -- instance for the setup script. We may need to track other - -- self-dependencies once we implement component-based solving. - case c of - ComponentSetup -> go (M.adjust (addIfAbsent (ComponentSetup, qpn')) qpn g) o ngs - _ -> go g o ngs - | qpn `M.member` g = go (M.adjust (addIfAbsent (c, qpn')) qpn g) o ngs - | otherwise = go (M.insert qpn [(c, qpn')] g) (PkgGoal qpn (DependencyGoal dr) : o) ngs - -- code above is correct; insert/adjust have different arg order - go g o ((Simple (LDep _dr (Ext _ext )) _) : ngs) = go g o ngs - go g o ((Simple (LDep _dr (Lang _lang))_) : ngs) = go g o ngs - go g o ((Simple (LDep _dr (Pkg _pn _vr))_) : ngs) = go g o ngs - - addIfAbsent :: Eq a => a -> [a] -> [a] - addIfAbsent x xs = if x `elem` xs then xs else x : xs - - -- GoalReason for a flag or stanza. Each flag/stanza is introduced only by - -- its containing package. - flagGR :: qpn -> GoalReason qpn - flagGR qpn = DependencyGoal (DependencyReason qpn M.empty S.empty) - --- | Given the current scope, qualify all the package names in the given set of --- dependencies and then extend the set of open goals accordingly. -scopedExtendOpen :: QPN -> FlaggedDeps PN -> FlagInfo -> - BuildState -> BuildState -scopedExtendOpen qpn fdeps fdefs s = extendOpen qpn gs s - where - -- Qualify all package names - qfdeps = qualifyDeps (qualifyOptions s) qpn fdeps - -- Introduce all package flags - qfdefs = L.map (\ (fn, b) -> Flagged (FN qpn fn) b [] []) $ M.toList fdefs - -- Combine new package and flag goals - gs = qfdefs ++ qfdeps - -- NOTE: - -- - -- In the expression @qfdefs ++ qfdeps@ above, flags occur potentially - -- multiple times, both via the flag declaration and via dependencies. - --- | Datatype that encodes what to build next -data BuildType = - Goals -- ^ build a goal choice node - | OneGoal OpenGoal -- ^ build a node for this goal - | Instance QPN PInfo -- ^ build a tree for a concrete instance - -build :: Linker BuildState -> Tree () QGoalReason -build = ana go - where - go :: Linker BuildState -> TreeF () QGoalReason (Linker BuildState) - go s = addLinking (linkingState s) $ addChildren (buildState s) - -addChildren :: BuildState -> TreeF () QGoalReason BuildState - --- If we have a choice between many goals, we just record the choice in --- the tree. We select each open goal in turn, and before we descend, remove --- it from the queue of open goals. -addChildren bs@(BS { rdeps = rdm, open = gs, next = Goals }) - | L.null gs = DoneF rdm () - | otherwise = GoalChoiceF rdm $ P.fromList - $ L.map (\ (g, gs') -> (close g, bs { next = OneGoal g, open = gs' })) - $ splits gs - --- If we have already picked a goal, then the choice depends on the kind --- of goal. --- --- For a package, we look up the instances available in the global info, --- and then handle each instance in turn. -addChildren bs@(BS { rdeps = rdm, index = idx, next = OneGoal (PkgGoal qpn@(Q _ pn) gr) }) = - -- If the package does not exist in the index, we construct an emty PChoiceF node for it - -- After all, we have no choices here. Alternatively, we could immediately construct - -- a Fail node here, but that would complicate the construction of conflict sets. - -- We will probably want to give this case special treatment when generating error - -- messages though. - case M.lookup pn idx of - Nothing -> PChoiceF qpn rdm gr (W.fromList []) - Just pis -> PChoiceF qpn rdm gr (W.fromList (L.map (\ (i, info) -> - ([], POption i Nothing, bs { next = Instance qpn info })) - (M.toList pis))) - -- TODO: data structure conversion is rather ugly here - --- For a flag, we create only two subtrees, and we create them in the order --- that is indicated by the flag default. -addChildren bs@(BS { rdeps = rdm, next = OneGoal (FlagGoal qfn@(FN qpn _) (FInfo b m w) t f gr) }) = - FChoiceF qfn rdm gr weak m b (W.fromList - [([if b then 0 else 1], True, (extendOpen qpn t bs) { next = Goals }), - ([if b then 1 else 0], False, (extendOpen qpn f bs) { next = Goals })]) - where - trivial = L.null t && L.null f - weak = WeakOrTrivial $ unWeakOrTrivial w || trivial - --- For a stanza, we also create only two subtrees. The order is initially --- False, True. This can be changed later by constraints (force enabling --- the stanza by replacing the False branch with failure) or preferences --- (try enabling the stanza if possible by moving the True branch first). - -addChildren bs@(BS { rdeps = rdm, next = OneGoal (StanzaGoal qsn@(SN qpn _) t gr) }) = - SChoiceF qsn rdm gr trivial (W.fromList - [([0], False, bs { next = Goals }), - ([1], True, (extendOpen qpn t bs) { next = Goals })]) - where - trivial = WeakOrTrivial (L.null t) - --- For a particular instance, we change the state: we update the scope, --- and furthermore we update the set of goals. --- --- TODO: We could inline this above. -addChildren bs@(BS { next = Instance qpn (PInfo fdeps _ fdefs _) }) = - addChildren ((scopedExtendOpen qpn fdeps fdefs bs) - { next = Goals }) - -{------------------------------------------------------------------------------- - Add linking --------------------------------------------------------------------------------} - --- | Introduce link nodes into the tree --- --- Linking is a phase that adapts package choice nodes and adds the option to --- link wherever appropriate: Package goals are called "related" if they are for --- the same instance of the same package (but have different prefixes). A link --- option is available in a package choice node whenever we can choose an --- instance that has already been chosen for a related goal at a higher position --- in the tree. We only create link options for related goals that are not --- themselves linked, because the choice to link to a linked goal is the same as --- the choice to link to the target of that goal's linking. --- --- The code here proceeds by maintaining a finite map recording choices that --- have been made at higher positions in the tree. For each pair of package name --- and instance, it stores the prefixes at which we have made a choice for this --- package instance. Whenever we make an unlinked choice, we extend the map. --- Whenever we find a choice, we look into the map in order to find out what --- link options we have to add. --- --- A separate tree traversal would be simpler. However, 'addLinking' creates --- linked nodes from existing unlinked nodes, which leads to sharing between the --- nodes. If we copied the nodes when they were full trees of type --- 'Tree () QGoalReason', then the sharing would cause a space leak during --- exploration of the tree. Instead, we only copy the 'BuildState', which is --- relatively small, while the tree is being constructed. See --- https://github.com/haskell/cabal/issues/2899 -addLinking :: LinkingState -> TreeF () c a -> TreeF () c (Linker a) --- The only nodes of interest are package nodes -addLinking ls (PChoiceF qpn@(Q pp pn) rdm gr cs) = - let linkedCs = fmap (\bs -> Linker bs ls) $ - W.fromList $ concatMap (linkChoices ls qpn) (W.toList cs) - unlinkedCs = W.mapWithKey goP cs - allCs = unlinkedCs `W.union` linkedCs - - -- Recurse underneath package choices. Here we just need to make sure - -- that we record the package choice so that it is available below - goP :: POption -> a -> Linker a - goP (POption i Nothing) bs = Linker bs $ M.insertWith (++) (pn, i) [pp] ls - goP _ _ = alreadyLinked - in PChoiceF qpn rdm gr allCs -addLinking ls t = fmap (\bs -> Linker bs ls) t - -linkChoices :: forall a w . LinkingState - -> QPN - -> (w, POption, a) - -> [(w, POption, a)] -linkChoices related (Q _pp pn) (weight, POption i Nothing, subtree) = - L.map aux (M.findWithDefault [] (pn, i) related) - where - aux :: PackagePath -> (w, POption, a) - aux pp = (weight, POption i (Just pp), subtree) -linkChoices _ _ (_, POption _ (Just _), _) = - alreadyLinked - -alreadyLinked :: a -alreadyLinked = error "addLinking called on tree that already contains linked nodes" - -------------------------------------------------------------------------------- - --- | Interface to the tree builder. Just takes an index and a list of package names, --- and computes the initial state and then the tree from there. -buildTree :: Index -> IndependentGoals -> [PN] -> Tree () QGoalReason -buildTree idx (IndependentGoals ind) igs = - build Linker { - buildState = BS { - index = idx - , rdeps = M.fromList (L.map (\ qpn -> (qpn, [])) qpns) - , open = L.map topLevelGoal qpns - , next = Goals - , qualifyOptions = defaultQualifyOptions idx - } - , linkingState = M.empty - } - where - topLevelGoal qpn = PkgGoal qpn UserGoal - - qpns | ind = L.map makeIndependent igs - | otherwise = L.map (Q (PackagePath DefaultNamespace QualToplevel)) igs - -{------------------------------------------------------------------------------- - Goals --------------------------------------------------------------------------------} - --- | Information needed about a dependency before it is converted into a Goal. -data OpenGoal = - FlagGoal (FN QPN) FInfo (FlaggedDeps QPN) (FlaggedDeps QPN) QGoalReason - | StanzaGoal (SN QPN) (FlaggedDeps QPN) QGoalReason - | PkgGoal QPN QGoalReason - --- | Closes a goal, i.e., removes all the extraneous information that we --- need only during the build phase. -close :: OpenGoal -> Goal QPN -close (FlagGoal qfn _ _ _ gr) = Goal (F qfn) gr -close (StanzaGoal qsn _ gr) = Goal (S qsn) gr -close (PkgGoal qpn gr) = Goal (P qpn) gr - -{------------------------------------------------------------------------------- - Auxiliary --------------------------------------------------------------------------------} - --- | Pairs each element of a list with the list resulting from removal of that --- element from the original list. -splits :: [a] -> [(a, [a])] -splits = go id - where - go :: ([a] -> [a]) -> [a] -> [(a, [a])] - go _ [] = [] - go f (x : xs) = (x, f xs) : go (f . (x :)) xs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/ConfiguredConversion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/ConfiguredConversion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/ConfiguredConversion.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/ConfiguredConversion.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -module Distribution.Solver.Modular.ConfiguredConversion - ( convCP - ) where - -import Data.Maybe -import Prelude hiding (pi) -import Data.Either (partitionEithers) - -import Distribution.Package (UnitId, packageId) - -import qualified Distribution.Simple.PackageIndex as SI - -import Distribution.Solver.Modular.Configured -import Distribution.Solver.Modular.Package - -import Distribution.Solver.Types.ComponentDeps (ComponentDeps) -import qualified Distribution.Solver.Types.PackageIndex as CI -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.ResolverPackage -import Distribution.Solver.Types.SolverId -import Distribution.Solver.Types.SolverPackage -import Distribution.Solver.Types.InstSolverPackage -import Distribution.Solver.Types.SourcePackage - --- | Converts from the solver specific result @CP QPN@ into --- a 'ResolverPackage', which can then be converted into --- the install plan. -convCP :: SI.InstalledPackageIndex -> - CI.PackageIndex (SourcePackage loc) -> - CP QPN -> ResolverPackage loc -convCP iidx sidx (CP qpi fa es ds) = - case convPI qpi of - Left pi -> PreExisting $ - InstSolverPackage { - instSolverPkgIPI = fromJust $ SI.lookupUnitId iidx pi, - instSolverPkgLibDeps = fmap fst ds', - instSolverPkgExeDeps = fmap snd ds' - } - Right pi -> Configured $ - SolverPackage { - solverPkgSource = srcpkg, - solverPkgFlags = fa, - solverPkgStanzas = es, - solverPkgLibDeps = fmap fst ds', - solverPkgExeDeps = fmap snd ds' - } - where - Just srcpkg = CI.lookupPackageId sidx pi - where - ds' :: ComponentDeps ([SolverId] {- lib -}, [SolverId] {- exe -}) - ds' = fmap (partitionEithers . map convConfId) ds - -convPI :: PI QPN -> Either UnitId PackageId -convPI (PI _ (I _ (Inst pi))) = Left pi -convPI pi = Right (packageId (either id id (convConfId pi))) - -convConfId :: PI QPN -> Either SolverId {- is lib -} SolverId {- is exe -} -convConfId (PI (Q (PackagePath _ q) pn) (I v loc)) = - case loc of - Inst pi -> Left (PreExistingId sourceId pi) - _otherwise - | QualExe _ pn' <- q - -- NB: the dependencies of the executable are also - -- qualified. So the way to tell if this is an executable - -- dependency is to make sure the qualifier is pointing - -- at the actual thing. Fortunately for us, I was - -- silly and didn't allow arbitrarily nested build-tools - -- dependencies, so a shallow check works. - , pn == pn' -> Right (PlannedId sourceId) - | otherwise -> Left (PlannedId sourceId) - where - sourceId = PackageIdentifier pn v diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Configured.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Configured.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Configured.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Configured.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -module Distribution.Solver.Modular.Configured - ( CP(..) - ) where - -import Distribution.PackageDescription (FlagAssignment) - -import Distribution.Solver.Modular.Package -import Distribution.Solver.Types.ComponentDeps (ComponentDeps) -import Distribution.Solver.Types.OptionalStanza - --- | A configured package is a package instance together with --- a flag assignment and complete dependencies. -data CP qpn = CP (PI qpn) FlagAssignment [OptionalStanza] (ComponentDeps [PI qpn]) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/ConflictSet.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/ConflictSet.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/ConflictSet.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/ConflictSet.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,192 +0,0 @@ -{-# LANGUAGE CPP #-} -#ifdef DEBUG_CONFLICT_SETS -{-# LANGUAGE ImplicitParams #-} -#endif --- | Conflict sets --- --- Intended for double import --- --- > import Distribution.Solver.Modular.ConflictSet (ConflictSet) --- > import qualified Distribution.Solver.Modular.ConflictSet as CS -module Distribution.Solver.Modular.ConflictSet ( - ConflictSet -- opaque - , ConflictMap -#ifdef DEBUG_CONFLICT_SETS - , conflictSetOrigin -#endif - , showConflictSet - , showCSSortedByFrequency - , showCSWithFrequency - -- Set-like operations - , toList - , union - , unions - , insert - , empty - , singleton - , member - , filter - , fromList - ) where - -import Prelude hiding (filter) -import Data.List (intercalate, sortBy) -import Data.Map (Map) -import Data.Set (Set) -import Data.Function (on) -import qualified Data.Set as S -import qualified Data.Map as M - -#ifdef DEBUG_CONFLICT_SETS -import Data.Tree -import GHC.Stack -#endif - -import Distribution.Solver.Modular.Var -import Distribution.Solver.Types.PackagePath - --- | The set of variables involved in a solver conflict --- --- Since these variables should be preprocessed in some way, this type is --- kept abstract. -data ConflictSet = CS { - -- | The set of variables involved on the conflict - conflictSetToSet :: !(Set (Var QPN)) - -#ifdef DEBUG_CONFLICT_SETS - -- | The origin of the conflict set - -- - -- When @DEBUG_CONFLICT_SETS@ is defined @(-f debug-conflict-sets)@, - -- we record the origin of every conflict set. For new conflict sets - -- ('empty', 'fromVars', ..) we just record the 'CallStack'; for operations - -- that construct new conflict sets from existing conflict sets ('union', - -- 'filter', ..) we record the 'CallStack' to the call to the combinator - -- as well as the 'CallStack's of the input conflict sets. - -- - -- Requires @GHC >= 7.10@. - , conflictSetOrigin :: Tree CallStack -#endif - } - deriving (Show) - -instance Eq ConflictSet where - (==) = (==) `on` conflictSetToSet - -instance Ord ConflictSet where - compare = compare `on` conflictSetToSet - -showConflictSet :: ConflictSet -> String -showConflictSet = intercalate ", " . map showVar . toList - -showCSSortedByFrequency :: ConflictMap -> ConflictSet -> String -showCSSortedByFrequency = showCS False - -showCSWithFrequency :: ConflictMap -> ConflictSet -> String -showCSWithFrequency = showCS True - -showCS :: Bool -> ConflictMap -> ConflictSet -> String -showCS showCount cm = - intercalate ", " . map showWithFrequency . indexByFrequency - where - indexByFrequency = sortBy (flip compare `on` snd) . map (\c -> (c, M.lookup c cm)) . toList - showWithFrequency (conflict, maybeFrequency) = case maybeFrequency of - Just frequency - | showCount -> showVar conflict ++ " (" ++ show frequency ++ ")" - _ -> showVar conflict - -{------------------------------------------------------------------------------- - Set-like operations --------------------------------------------------------------------------------} - -toList :: ConflictSet -> [Var QPN] -toList = S.toList . conflictSetToSet - -union :: -#ifdef DEBUG_CONFLICT_SETS - (?loc :: CallStack) => -#endif - ConflictSet -> ConflictSet -> ConflictSet -union cs cs' = CS { - conflictSetToSet = S.union (conflictSetToSet cs) (conflictSetToSet cs') -#ifdef DEBUG_CONFLICT_SETS - , conflictSetOrigin = Node ?loc (map conflictSetOrigin [cs, cs']) -#endif - } - -unions :: -#ifdef DEBUG_CONFLICT_SETS - (?loc :: CallStack) => -#endif - [ConflictSet] -> ConflictSet -unions css = CS { - conflictSetToSet = S.unions (map conflictSetToSet css) -#ifdef DEBUG_CONFLICT_SETS - , conflictSetOrigin = Node ?loc (map conflictSetOrigin css) -#endif - } - -insert :: -#ifdef DEBUG_CONFLICT_SETS - (?loc :: CallStack) => -#endif - Var QPN -> ConflictSet -> ConflictSet -insert var cs = CS { - conflictSetToSet = S.insert var (conflictSetToSet cs) -#ifdef DEBUG_CONFLICT_SETS - , conflictSetOrigin = Node ?loc [conflictSetOrigin cs] -#endif - } - -empty :: -#ifdef DEBUG_CONFLICT_SETS - (?loc :: CallStack) => -#endif - ConflictSet -empty = CS { - conflictSetToSet = S.empty -#ifdef DEBUG_CONFLICT_SETS - , conflictSetOrigin = Node ?loc [] -#endif - } - -singleton :: -#ifdef DEBUG_CONFLICT_SETS - (?loc :: CallStack) => -#endif - Var QPN -> ConflictSet -singleton var = CS { - conflictSetToSet = S.singleton var -#ifdef DEBUG_CONFLICT_SETS - , conflictSetOrigin = Node ?loc [] -#endif - } - -member :: Var QPN -> ConflictSet -> Bool -member var = S.member var . conflictSetToSet - -filter :: -#ifdef DEBUG_CONFLICT_SETS - (?loc :: CallStack) => -#endif - (Var QPN -> Bool) -> ConflictSet -> ConflictSet -filter p cs = CS { - conflictSetToSet = S.filter p (conflictSetToSet cs) -#ifdef DEBUG_CONFLICT_SETS - , conflictSetOrigin = Node ?loc [conflictSetOrigin cs] -#endif - } - -fromList :: -#ifdef DEBUG_CONFLICT_SETS - (?loc :: CallStack) => -#endif - [Var QPN] -> ConflictSet -fromList vars = CS { - conflictSetToSet = S.fromList vars -#ifdef DEBUG_CONFLICT_SETS - , conflictSetOrigin = Node ?loc [] -#endif - } - -type ConflictMap = Map (Var QPN) Int - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Cycles.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Cycles.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Cycles.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Cycles.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,118 +0,0 @@ -{-# LANGUAGE TypeFamilies #-} -module Distribution.Solver.Modular.Cycles ( - detectCyclesPhase - ) where - -import Prelude hiding (cycle) -import qualified Data.Map as M -import qualified Data.Set as S - -import qualified Distribution.Compat.Graph as G -import Distribution.Simple.Utils (ordNub) -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Tree -import qualified Distribution.Solver.Modular.ConflictSet as CS -import Distribution.Solver.Types.ComponentDeps (Component) -import Distribution.Solver.Types.PackagePath - --- | Find and reject any nodes with cyclic dependencies -detectCyclesPhase :: Tree d c -> Tree d c -detectCyclesPhase = cata go - where - -- Only check children of choice nodes. - go :: TreeF d c (Tree d c) -> Tree d c - go (PChoiceF qpn rdm gr cs) = - PChoice qpn rdm gr $ fmap (checkChild qpn) cs - go (FChoiceF qfn@(FN qpn _) rdm gr w m d cs) = - FChoice qfn rdm gr w m d $ fmap (checkChild qpn) cs - go (SChoiceF qsn@(SN qpn _) rdm gr w cs) = - SChoice qsn rdm gr w $ fmap (checkChild qpn) cs - go x = inn x - - checkChild :: QPN -> Tree d c -> Tree d c - checkChild qpn x@(PChoice _ rdm _ _) = failIfCycle qpn rdm x - checkChild qpn x@(FChoice _ rdm _ _ _ _ _) = failIfCycle qpn rdm x - checkChild qpn x@(SChoice _ rdm _ _ _) = failIfCycle qpn rdm x - checkChild qpn x@(GoalChoice rdm _) = failIfCycle qpn rdm x - checkChild _ x@(Fail _ _) = x - checkChild qpn x@(Done rdm _) = failIfCycle qpn rdm x - - failIfCycle :: QPN -> RevDepMap -> Tree d c -> Tree d c - failIfCycle qpn rdm x = - case findCycles qpn rdm of - Nothing -> x - Just relSet -> Fail relSet CyclicDependencies - --- | Given the reverse dependency map from a node in the tree, check --- if the solution is cyclic. If it is, return the conflict set containing --- all decisions that could potentially break the cycle. --- --- TODO: The conflict set should also contain flag and stanza variables. -findCycles :: QPN -> RevDepMap -> Maybe ConflictSet -findCycles pkg rdm = - -- This function has two parts: a faster cycle check that is called at every - -- step and a slower calculation of the conflict set. - -- - -- 'hasCycle' checks for cycles incrementally by only looking for cycles - -- containing the current package, 'pkg'. It searches for cycles in the - -- 'RevDepMap', which is the data structure used to store reverse - -- dependencies in the search tree. We store the reverse dependencies in a - -- map, because Data.Map is smaller and/or has better sharing than - -- Distribution.Compat.Graph. - -- - -- If there is a cycle, we call G.cycles to find a strongly connected - -- component. Then we choose one cycle from the component to use for the - -- conflict set. Choosing only one cycle can lead to a smaller conflict set, - -- such as when a choice to enable testing introduces many cycles at once. - -- In that case, all cycles contain the current package and are in one large - -- strongly connected component. - -- - if hasCycle - then let scc :: G.Graph RevDepMapNode - scc = case G.cycles $ revDepMapToGraph rdm of - [] -> findCyclesError "cannot find a strongly connected component" - c : _ -> G.fromDistinctList c - - next :: QPN -> QPN - next p = case G.neighbors scc p of - Just (n : _) -> G.nodeKey n - _ -> findCyclesError "cannot find next node in the cycle" - - -- This function also assumes that all cycles contain 'pkg'. - oneCycle :: [QPN] - oneCycle = case iterate next pkg of - [] -> findCyclesError "empty cycle" - x : xs -> x : takeWhile (/= x) xs - in Just $ CS.fromList $ map P oneCycle - else Nothing - where - hasCycle :: Bool - hasCycle = pkg `S.member` closure (neighbors pkg) - - closure :: [QPN] -> S.Set QPN - closure = foldl go S.empty - where - go :: S.Set QPN -> QPN -> S.Set QPN - go s x = - if x `S.member` s - then s - else foldl go (S.insert x s) $ neighbors x - - neighbors :: QPN -> [QPN] - neighbors x = case x `M.lookup` rdm of - Nothing -> findCyclesError "cannot find node" - Just xs -> map snd xs - - findCyclesError = error . ("Distribution.Solver.Modular.Cycles.findCycles: " ++) - -data RevDepMapNode = RevDepMapNode QPN [(Component, QPN)] - -instance G.IsNode RevDepMapNode where - type Key RevDepMapNode = QPN - nodeKey (RevDepMapNode qpn _) = qpn - nodeNeighbors (RevDepMapNode _ ns) = ordNub $ map snd ns - -revDepMapToGraph :: RevDepMap -> G.Graph RevDepMapNode -revDepMapToGraph rdm = G.fromDistinctList - [RevDepMapNode qpn ns | (qpn, ns) <- M.toList rdm] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Dependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Dependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Dependency.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,298 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE RecordWildCards #-} -module Distribution.Solver.Modular.Dependency ( - -- * Variables - Var(..) - , showVar - , varPN - -- * Conflict sets - , ConflictSet - , ConflictMap - , CS.showConflictSet - -- * Constrained instances - , CI(..) - -- * Flagged dependencies - , FlaggedDeps - , FlaggedDep(..) - , LDep(..) - , Dep(..) - , PkgComponent(..) - , ExposedComponent(..) - , DependencyReason(..) - , showDependencyReason - , flattenFlaggedDeps - , QualifyOptions(..) - , qualifyDeps - , unqualifyDeps - -- * Reverse dependency map - , RevDepMap - -- * Goals - , Goal(..) - , GoalReason(..) - , QGoalReason - , goalToVar - , varToConflictSet - , goalReasonToCS - , dependencyReasonToCS - ) where - -import Prelude () -import qualified Data.Map as M -import qualified Data.Set as S -import Distribution.Solver.Compat.Prelude hiding (pi) - -import Language.Haskell.Extension (Extension(..), Language(..)) - -import Distribution.Solver.Modular.ConflictSet (ConflictSet, ConflictMap) -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Package -import Distribution.Solver.Modular.Var -import Distribution.Solver.Modular.Version -import qualified Distribution.Solver.Modular.ConflictSet as CS - -import Distribution.Solver.Types.ComponentDeps (Component(..)) -import Distribution.Solver.Types.PackagePath -import Distribution.Types.UnqualComponentName - -{------------------------------------------------------------------------------- - Constrained instances --------------------------------------------------------------------------------} - --- | Constrained instance. It represents the allowed instances for a package, --- which can be either a fixed instance or a version range. -data CI = Fixed I | Constrained VR - deriving (Eq, Show) - -{------------------------------------------------------------------------------- - Flagged dependencies --------------------------------------------------------------------------------} - --- | Flagged dependencies --- --- 'FlaggedDeps' is the modular solver's view of a packages dependencies: --- rather than having the dependencies indexed by component, each dependency --- defines what component it is in. --- --- Note that each dependency is associated with a Component. We must know what --- component the dependencies belong to, or else we won't be able to construct --- fine-grained reverse dependencies. -type FlaggedDeps qpn = [FlaggedDep qpn] - --- | Flagged dependencies can either be plain dependency constraints, --- or flag-dependent dependency trees. -data FlaggedDep qpn = - -- | Dependencies which are conditional on a flag choice. - Flagged (FN qpn) FInfo (TrueFlaggedDeps qpn) (FalseFlaggedDeps qpn) - -- | Dependencies which are conditional on whether or not a stanza - -- (e.g., a test suite or benchmark) is enabled. - | Stanza (SN qpn) (TrueFlaggedDeps qpn) - -- | Dependencies which are always enabled, for the component 'comp'. - | Simple (LDep qpn) Component - --- | Conversatively flatten out flagged dependencies --- --- NOTE: We do not filter out duplicates. -flattenFlaggedDeps :: FlaggedDeps qpn -> [(LDep qpn, Component)] -flattenFlaggedDeps = concatMap aux - where - aux :: FlaggedDep qpn -> [(LDep qpn, Component)] - aux (Flagged _ _ t f) = flattenFlaggedDeps t ++ flattenFlaggedDeps f - aux (Stanza _ t) = flattenFlaggedDeps t - aux (Simple d c) = [(d, c)] - -type TrueFlaggedDeps qpn = FlaggedDeps qpn -type FalseFlaggedDeps qpn = FlaggedDeps qpn - --- | A 'Dep' labeled with the reason it was introduced. --- --- 'LDep' intentionally has no 'Functor' instance because the type variable --- is used both to record the dependencies as well as who's doing the --- depending; having a 'Functor' instance makes bugs where we don't distinguish --- these two far too likely. (By rights 'LDep' ought to have two type variables.) -data LDep qpn = LDep (DependencyReason qpn) (Dep qpn) - --- | A dependency (constraint) associates a package name with a constrained --- instance. It can also represent other types of dependencies, such as --- dependencies on language extensions. -data Dep qpn = Dep (PkgComponent qpn) CI -- ^ dependency on a package component - | Ext Extension -- ^ dependency on a language extension - | Lang Language -- ^ dependency on a language version - | Pkg PkgconfigName VR -- ^ dependency on a pkg-config package - deriving Functor - --- | An exposed component within a package. This type is used to represent --- build-depends and build-tool-depends dependencies. -data PkgComponent qpn = PkgComponent qpn ExposedComponent - deriving (Eq, Ord, Functor, Show) - --- | A component that can be depended upon by another package, i.e., a library --- or an executable. -data ExposedComponent = ExposedLib | ExposedExe UnqualComponentName - deriving (Eq, Ord, Show) - --- | The reason that a dependency is active. It identifies the package and any --- flag and stanza choices that introduced the dependency. It contains --- everything needed for creating ConflictSets or describing conflicts in solver --- log messages. -data DependencyReason qpn = DependencyReason qpn (Map Flag FlagValue) (S.Set Stanza) - deriving (Functor, Eq, Show) - --- | Print the reason that a dependency was introduced. -showDependencyReason :: DependencyReason QPN -> String -showDependencyReason (DependencyReason qpn flags stanzas) = - intercalate " " $ - showQPN qpn - : map (uncurry showFlagValue) (M.toList flags) - ++ map (\s -> showSBool s True) (S.toList stanzas) - --- | Options for goal qualification (used in 'qualifyDeps') --- --- See also 'defaultQualifyOptions' -data QualifyOptions = QO { - -- | Do we have a version of base relying on another version of base? - qoBaseShim :: Bool - - -- Should dependencies of the setup script be treated as independent? - , qoSetupIndependent :: Bool - } - deriving Show - --- | Apply built-in rules for package qualifiers --- --- Although the behaviour of 'qualifyDeps' depends on the 'QualifyOptions', --- it is important that these 'QualifyOptions' are _static_. Qualification --- does NOT depend on flag assignment; in other words, it behaves the same no --- matter which choices the solver makes (modulo the global 'QualifyOptions'); --- we rely on this in 'linkDeps' (see comment there). --- --- NOTE: It's the _dependencies_ of a package that may or may not be independent --- from the package itself. Package flag choices must of course be consistent. -qualifyDeps :: QualifyOptions -> QPN -> FlaggedDeps PN -> FlaggedDeps QPN -qualifyDeps QO{..} (Q pp@(PackagePath ns q) pn) = go - where - go :: FlaggedDeps PN -> FlaggedDeps QPN - go = map go1 - - go1 :: FlaggedDep PN -> FlaggedDep QPN - go1 (Flagged fn nfo t f) = Flagged (fmap (Q pp) fn) nfo (go t) (go f) - go1 (Stanza sn t) = Stanza (fmap (Q pp) sn) (go t) - go1 (Simple dep comp) = Simple (goLDep dep comp) comp - - -- Suppose package B has a setup dependency on package A. - -- This will be recorded as something like - -- - -- > LDep (DependencyReason "B") (Dep (PkgComponent "A" ExposedLib) (Constrained AnyVersion)) - -- - -- Observe that when we qualify this dependency, we need to turn that - -- @"A"@ into @"B-setup.A"@, but we should not apply that same qualifier - -- to the DependencyReason. - goLDep :: LDep PN -> Component -> LDep QPN - goLDep (LDep dr dep) comp = LDep (fmap (Q pp) dr) (goD dep comp) - - goD :: Dep PN -> Component -> Dep QPN - goD (Ext ext) _ = Ext ext - goD (Lang lang) _ = Lang lang - goD (Pkg pkn vr) _ = Pkg pkn vr - goD (Dep dep@(PkgComponent qpn (ExposedExe _)) ci) _ = - Dep (Q (PackagePath ns (QualExe pn qpn)) <$> dep) ci - goD (Dep dep@(PkgComponent qpn ExposedLib) ci) comp - | qBase qpn = Dep (Q (PackagePath ns (QualBase pn)) <$> dep) ci - | qSetup comp = Dep (Q (PackagePath ns (QualSetup pn)) <$> dep) ci - | otherwise = Dep (Q (PackagePath ns inheritedQ ) <$> dep) ci - - -- If P has a setup dependency on Q, and Q has a regular dependency on R, then - -- we say that the 'Setup' qualifier is inherited: P has an (indirect) setup - -- dependency on R. We do not do this for the base qualifier however. - -- - -- The inherited qualifier is only used for regular dependencies; for setup - -- and base deppendencies we override the existing qualifier. See #3160 for - -- a detailed discussion. - inheritedQ :: Qualifier - inheritedQ = case q of - QualSetup _ -> q - QualExe _ _ -> q - QualToplevel -> q - QualBase _ -> QualToplevel - - -- Should we qualify this goal with the 'Base' package path? - qBase :: PN -> Bool - qBase dep = qoBaseShim && unPackageName dep == "base" - - -- Should we qualify this goal with the 'Setup' package path? - qSetup :: Component -> Bool - qSetup comp = qoSetupIndependent && comp == ComponentSetup - --- | Remove qualifiers from set of dependencies --- --- This is used during link validation: when we link package @Q.A@ to @Q'.A@, --- then all dependencies @Q.B@ need to be linked to @Q'.B@. In order to compute --- what to link these dependencies to, we need to requalify @Q.B@ to become --- @Q'.B@; we do this by first removing all qualifiers and then calling --- 'qualifyDeps' again. -unqualifyDeps :: FlaggedDeps QPN -> FlaggedDeps PN -unqualifyDeps = go - where - go :: FlaggedDeps QPN -> FlaggedDeps PN - go = map go1 - - go1 :: FlaggedDep QPN -> FlaggedDep PN - go1 (Flagged fn nfo t f) = Flagged (fmap unq fn) nfo (go t) (go f) - go1 (Stanza sn t) = Stanza (fmap unq sn) (go t) - go1 (Simple dep comp) = Simple (goLDep dep) comp - - goLDep :: LDep QPN -> LDep PN - goLDep (LDep dr dep) = LDep (fmap unq dr) (fmap unq dep) - - unq :: QPN -> PN - unq (Q _ pn) = pn - -{------------------------------------------------------------------------------- - Reverse dependency map --------------------------------------------------------------------------------} - --- | A map containing reverse dependencies between qualified --- package names. -type RevDepMap = Map QPN [(Component, QPN)] - -{------------------------------------------------------------------------------- - Goals --------------------------------------------------------------------------------} - --- | A goal is just a solver variable paired with a reason. --- The reason is only used for tracing. -data Goal qpn = Goal (Var qpn) (GoalReason qpn) - deriving (Eq, Show, Functor) - --- | Reason why a goal is being added to a goal set. -data GoalReason qpn = - UserGoal -- introduced by a build target - | DependencyGoal (DependencyReason qpn) -- introduced by a package - deriving (Eq, Show, Functor) - -type QGoalReason = GoalReason QPN - -goalToVar :: Goal a -> Var a -goalToVar (Goal v _) = v - --- | Compute a singleton conflict set from a 'Var' -varToConflictSet :: Var QPN -> ConflictSet -varToConflictSet = CS.singleton - -goalReasonToCS :: GoalReason QPN -> ConflictSet -goalReasonToCS UserGoal = CS.empty -goalReasonToCS (DependencyGoal dr) = dependencyReasonToCS dr - --- | This function returns the solver variables responsible for the dependency. --- It drops the flag and stanza values, which are only needed for log messages. -dependencyReasonToCS :: DependencyReason QPN -> ConflictSet -dependencyReasonToCS (DependencyReason qpn flags stanzas) = - CS.fromList $ P qpn : flagVars ++ map stanzaToVar (S.toList stanzas) - where - -- Filter out any flags that introduced the dependency with both values. - -- They don't need to be included in the conflict set, because changing the - -- flag value can't remove the dependency. - flagVars :: [Var QPN] - flagVars = [F (FN qpn fn) | (fn, fv) <- M.toList flags, fv /= FlagBoth] - - stanzaToVar :: Stanza -> Var QPN - stanzaToVar = S . SN qpn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Explore.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Explore.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Explore.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Explore.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,220 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE ScopedTypeVariables #-} -module Distribution.Solver.Modular.Explore - ( backjump - , backjumpAndExplore - ) where - -import qualified Distribution.Solver.Types.Progress as P - -import Data.Foldable as F -import Data.List as L (foldl') -import Data.Map.Strict as M - -import Distribution.Solver.Modular.Assignment -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Log -import Distribution.Solver.Modular.Message -import qualified Distribution.Solver.Modular.PSQ as P -import qualified Distribution.Solver.Modular.ConflictSet as CS -import Distribution.Solver.Modular.RetryLog -import Distribution.Solver.Modular.Tree -import qualified Distribution.Solver.Modular.WeightedPSQ as W -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.Settings (EnableBackjumping(..), CountConflicts(..)) - --- | This function takes the variable we're currently considering, a --- last conflict set and a list of children's logs. Each log yields --- either a solution or a conflict set. The result is a combined log for --- the parent node that has explored a prefix of the children. --- --- We can stop traversing the children's logs if we find an individual --- conflict set that does not contain the current variable. In this --- case, we can just lift the conflict set to the current level, --- because the current level cannot possibly have contributed to this --- conflict, so no other choice at the current level would avoid the --- conflict. --- --- If any of the children might contain a successful solution, we can --- return it immediately. If all children contain conflict sets, we can --- take the union as the combined conflict set. --- --- The last conflict set corresponds to the justification that we --- have to choose this goal at all. There is a reason why we have --- introduced the goal in the first place, and this reason is in conflict --- with the (virtual) option not to choose anything for the current --- variable. See also the comments for 'avoidSet'. --- -backjump :: Maybe Int -> EnableBackjumping -> Var QPN - -> ConflictSet -> W.WeightedPSQ w k (ExploreState -> ConflictSetLog a) - -> ExploreState -> ConflictSetLog a -backjump mbj (EnableBackjumping enableBj) var lastCS xs = - F.foldr combine avoidGoal xs CS.empty - where - combine :: forall a . (ExploreState -> ConflictSetLog a) - -> (ConflictSet -> ExploreState -> ConflictSetLog a) - -> ConflictSet -> ExploreState -> ConflictSetLog a - combine x f csAcc es = retry (x es) next - where - next :: IntermediateFailure -> ConflictSetLog a - next BackjumpLimit = fromProgress (P.Fail BackjumpLimit) - next (NoSolution !cs es') - | enableBj && not (var `CS.member` cs) = skipLoggingBackjump cs es' - | otherwise = f (csAcc `CS.union` cs) es' - - -- This function represents the option to not choose a value for this goal. - avoidGoal :: ConflictSet -> ExploreState -> ConflictSetLog a - avoidGoal cs !es = - logBackjump (cs `CS.union` lastCS) $ - - -- Use 'lastCS' below instead of 'cs' since we do not want to - -- double-count the additionally accumulated conflicts. - es { esConflictMap = updateCM lastCS (esConflictMap es) } - - logBackjump :: ConflictSet -> ExploreState -> ConflictSetLog a - logBackjump cs es = - failWith (Failure cs Backjump) $ - if reachedBjLimit (esBackjumps es) - then BackjumpLimit - else NoSolution cs es { esBackjumps = esBackjumps es + 1 } - where - reachedBjLimit = case mbj of - Nothing -> const False - Just limit -> (== limit) - - -- The solver does not count or log backjumps at levels where the conflict - -- set does not contain the current variable. Otherwise, there would be many - -- consecutive log messages about backjumping with the same conflict set. - skipLoggingBackjump :: ConflictSet -> ExploreState -> ConflictSetLog a - skipLoggingBackjump cs es = fromProgress $ P.Fail (NoSolution cs es) - --- | The state that is read and written while exploring the search tree. -data ExploreState = ES { - esConflictMap :: !ConflictMap - , esBackjumps :: !Int - } - -data IntermediateFailure = - NoSolution ConflictSet ExploreState - | BackjumpLimit - -type ConflictSetLog = RetryLog Message IntermediateFailure - -getBestGoal :: ConflictMap -> P.PSQ (Goal QPN) a -> (Goal QPN, a) -getBestGoal cm = - P.maximumBy - ( flip (M.findWithDefault 0) cm - . (\ (Goal v _) -> v) - ) - -getFirstGoal :: P.PSQ (Goal QPN) a -> (Goal QPN, a) -getFirstGoal ts = - P.casePSQ ts - (error "getFirstGoal: empty goal choice") -- empty goal choice is an internal error - (\ k v _xs -> (k, v)) -- commit to the first goal choice - -updateCM :: ConflictSet -> ConflictMap -> ConflictMap -updateCM cs cm = - L.foldl' (\ cmc k -> M.insertWith (+) k 1 cmc) cm (CS.toList cs) - --- | Record complete assignments on 'Done' nodes. -assign :: Tree d c -> Tree Assignment c -assign tree = cata go tree $ A M.empty M.empty M.empty - where - go :: TreeF d c (Assignment -> Tree Assignment c) - -> (Assignment -> Tree Assignment c) - go (FailF c fr) _ = Fail c fr - go (DoneF rdm _) a = Done rdm a - go (PChoiceF qpn rdm y ts) (A pa fa sa) = PChoice qpn rdm y $ W.mapWithKey f ts - where f (POption k _) r = r (A (M.insert qpn k pa) fa sa) - go (FChoiceF qfn rdm y t m d ts) (A pa fa sa) = FChoice qfn rdm y t m d $ W.mapWithKey f ts - where f k r = r (A pa (M.insert qfn k fa) sa) - go (SChoiceF qsn rdm y t ts) (A pa fa sa) = SChoice qsn rdm y t $ W.mapWithKey f ts - where f k r = r (A pa fa (M.insert qsn k sa)) - go (GoalChoiceF rdm ts) a = GoalChoice rdm $ fmap ($ a) ts - --- | A tree traversal that simultaneously propagates conflict sets up --- the tree from the leaves and creates a log. -exploreLog :: Maybe Int -> EnableBackjumping -> CountConflicts - -> Tree Assignment QGoalReason - -> ConflictSetLog (Assignment, RevDepMap) -exploreLog mbj enableBj (CountConflicts countConflicts) t = cata go t initES - where - getBestGoal' :: P.PSQ (Goal QPN) a -> ConflictMap -> (Goal QPN, a) - getBestGoal' - | countConflicts = \ ts cm -> getBestGoal cm ts - | otherwise = \ ts _ -> getFirstGoal ts - - go :: TreeF Assignment QGoalReason (ExploreState -> ConflictSetLog (Assignment, RevDepMap)) - -> (ExploreState -> ConflictSetLog (Assignment, RevDepMap)) - go (FailF c fr) = \ !es -> - let es' = es { esConflictMap = updateCM c (esConflictMap es) } - in failWith (Failure c fr) (NoSolution c es') - go (DoneF rdm a) = \ _ -> succeedWith Success (a, rdm) - go (PChoiceF qpn _ gr ts) = - backjump mbj enableBj (P qpn) (avoidSet (P qpn) gr) $ -- try children in order, - W.mapWithKey -- when descending ... - (\ k r es -> tryWith (TryP qpn k) (r es)) - ts - go (FChoiceF qfn _ gr _ _ _ ts) = - backjump mbj enableBj (F qfn) (avoidSet (F qfn) gr) $ -- try children in order, - W.mapWithKey -- when descending ... - (\ k r es -> tryWith (TryF qfn k) (r es)) - ts - go (SChoiceF qsn _ gr _ ts) = - backjump mbj enableBj (S qsn) (avoidSet (S qsn) gr) $ -- try children in order, - W.mapWithKey -- when descending ... - (\ k r es -> tryWith (TryS qsn k) (r es)) - ts - go (GoalChoiceF _ ts) = \ es -> - let (k, v) = getBestGoal' ts (esConflictMap es) - in continueWith (Next k) (v es) - - initES = ES { - esConflictMap = M.empty - , esBackjumps = 0 - } - --- | Build a conflict set corresponding to the (virtual) option not to --- choose a solution for a goal at all. --- --- In the solver, the set of goals is not statically determined, but depends --- on the choices we make. Therefore, when dealing with conflict sets, we --- always have to consider that we could perhaps make choices that would --- avoid the existence of the goal completely. --- --- Whenever we actually introduce a choice in the tree, we have already established --- that the goal cannot be avoided. This is tracked in the "goal reason". --- The choice to avoid the goal therefore is a conflict between the goal itself --- and its goal reason. We build this set here, and pass it to the 'backjump' --- function as the last conflict set. --- --- This has two effects: --- --- - In a situation where there are no choices available at all (this happens --- if an unknown package is requested), the last conflict set becomes the --- actual conflict set. --- --- - In a situation where all of the children's conflict sets contain the --- current variable, the goal reason of the current node will be added to the --- conflict set. --- -avoidSet :: Var QPN -> QGoalReason -> ConflictSet -avoidSet var gr = - CS.union (CS.singleton var) (goalReasonToCS gr) - --- | Interface. --- --- Takes as an argument a limit on allowed backjumps. If the limit is 'Nothing', --- then infinitely many backjumps are allowed. If the limit is 'Just 0', --- backtracking is completely disabled. -backjumpAndExplore :: Maybe Int - -> EnableBackjumping - -> CountConflicts - -> Tree d QGoalReason - -> RetryLog Message SolverFailure (Assignment, RevDepMap) -backjumpAndExplore mbj enableBj countConflicts = - mapFailure convertFailure . exploreLog mbj enableBj countConflicts . assign - where - convertFailure (NoSolution cs es) = ExhaustiveSearch cs (esConflictMap es) - convertFailure BackjumpLimit = BackjumpLimitReached diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Flag.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Flag.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Flag.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Flag.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,108 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -module Distribution.Solver.Modular.Flag - ( FInfo(..) - , Flag - , FlagInfo - , FN(..) - , QFN - , QSN - , Stanza - , SN(..) - , WeakOrTrivial(..) - , FlagValue(..) - , mkFlag - , showQFN - , showQFNBool - , showFlagValue - , showQSN - , showQSNBool - , showSBool - ) where - -import Data.Map as M -import Prelude hiding (pi) - -import qualified Distribution.PackageDescription as P -- from Cabal - -import Distribution.Solver.Types.Flag -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackagePath - --- | Flag name. Consists of a package instance and the flag identifier itself. -data FN qpn = FN qpn Flag - deriving (Eq, Ord, Show, Functor) - --- | Flag identifier. Just a string. -type Flag = P.FlagName - --- | Stanza identifier. -type Stanza = OptionalStanza - -unFlag :: Flag -> String -unFlag = P.unFlagName - -mkFlag :: String -> Flag -mkFlag = P.mkFlagName - --- | Flag info. Default value, whether the flag is manual, and --- whether the flag is weak. Manual flags can only be set explicitly. --- Weak flags are typically deferred by the solver. -data FInfo = FInfo { fdefault :: Bool, fmanual :: FlagType, fweak :: WeakOrTrivial } - deriving (Eq, Show) - --- | Flag defaults. -type FlagInfo = Map Flag FInfo - --- | Qualified flag name. -type QFN = FN QPN - --- | Stanza name. Paired with a package name, much like a flag. -data SN qpn = SN qpn Stanza - deriving (Eq, Ord, Show, Functor) - --- | Qualified stanza name. -type QSN = SN QPN - --- | A property of flag and stanza choices that determines whether the --- choice should be deferred in the solving process. --- --- A choice is called weak if we do want to defer it. This is the --- case for flags that should be implied by what's currently installed on --- the system, as opposed to flags that are used to explicitly enable or --- disable some functionality. --- --- A choice is called trivial if it clearly does not matter. The --- special case of triviality we actually consider is if there are no new --- dependencies introduced by the choice. -newtype WeakOrTrivial = WeakOrTrivial { unWeakOrTrivial :: Bool } - deriving (Eq, Ord, Show) - --- | Value shown for a flag in a solver log message. The message can refer to --- only the true choice, only the false choice, or both choices. -data FlagValue = FlagTrue | FlagFalse | FlagBoth - deriving (Eq, Show) - -showQFNBool :: QFN -> Bool -> String -showQFNBool qfn@(FN qpn _f) b = showQPN qpn ++ ":" ++ showFBool qfn b - -showQSNBool :: QSN -> Bool -> String -showQSNBool (SN qpn s) b = showQPN qpn ++ ":" ++ showSBool s b - -showFBool :: FN qpn -> Bool -> String -showFBool (FN _ f) v = P.showFlagValue (f, v) - --- | String representation of a flag-value pair. -showFlagValue :: P.FlagName -> FlagValue -> String -showFlagValue f FlagTrue = '+' : unFlag f -showFlagValue f FlagFalse = '-' : unFlag f -showFlagValue f FlagBoth = "+/-" ++ unFlag f - -showSBool :: Stanza -> Bool -> String -showSBool s True = "*" ++ showStanza s -showSBool s False = "!" ++ showStanza s - -showQFN :: QFN -> String -showQFN (FN qpn f) = showQPN qpn ++ ":" ++ unFlag f - -showQSN :: QSN -> String -showQSN (SN qpn s) = showQPN qpn ++ ":" ++ showStanza s diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/IndexConversion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/IndexConversion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/IndexConversion.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/IndexConversion.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,575 +0,0 @@ -module Distribution.Solver.Modular.IndexConversion - ( convPIs - ) where - -import Data.List as L -import Data.Map.Strict (Map) -import qualified Data.Map.Strict as M -import Data.Maybe -import Data.Monoid as Mon -import Data.Set as S - -import Distribution.Compiler -import Distribution.InstalledPackageInfo as IPI -import Distribution.Package -- from Cabal -import Distribution.Simple.BuildToolDepends -- from Cabal -import Distribution.Simple.Utils (cabalVersion) -- from Cabal -import Distribution.Types.ExeDependency -- from Cabal -import Distribution.Types.PkgconfigDependency -- from Cabal -import Distribution.Types.ComponentName -- from Cabal -import Distribution.Types.UnqualComponentName -- from Cabal -import Distribution.Types.CondTree -- from Cabal -import Distribution.Types.MungedPackageId -- from Cabal -import Distribution.Types.MungedPackageName -- from Cabal -import Distribution.PackageDescription as PD -- from Cabal -import Distribution.PackageDescription.Configuration as PDC -import qualified Distribution.Simple.PackageIndex as SI -import Distribution.System -import Distribution.Types.ForeignLib - -import Distribution.Solver.Types.ComponentDeps - ( Component(..), componentNameToComponent ) -import Distribution.Solver.Types.Flag -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackageConstraint -import qualified Distribution.Solver.Types.PackageIndex as CI -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.SourcePackage - -import Distribution.Solver.Modular.Dependency as D -import Distribution.Solver.Modular.Flag as F -import Distribution.Solver.Modular.Index -import Distribution.Solver.Modular.Package -import Distribution.Solver.Modular.Tree -import Distribution.Solver.Modular.Version - --- | Convert both the installed package index and the source package --- index into one uniform solver index. --- --- We use 'allPackagesBySourcePackageId' for the installed package index --- because that returns us several instances of the same package and version --- in order of preference. This allows us in principle to \"shadow\" --- packages if there are several installed packages of the same version. --- There are currently some shortcomings in both GHC and Cabal in --- resolving these situations. However, the right thing to do is to --- fix the problem there, so for now, shadowing is only activated if --- explicitly requested. -convPIs :: OS -> Arch -> CompilerInfo -> Map PN [LabeledPackageConstraint] - -> ShadowPkgs -> StrongFlags -> SolveExecutables - -> SI.InstalledPackageIndex -> CI.PackageIndex (SourcePackage loc) - -> Index -convPIs os arch comp constraints sip strfl solveExes iidx sidx = - mkIndex $ - convIPI' sip iidx ++ convSPI' os arch comp constraints strfl solveExes sidx - --- | Convert a Cabal installed package index to the simpler, --- more uniform index format of the solver. -convIPI' :: ShadowPkgs -> SI.InstalledPackageIndex -> [(PN, I, PInfo)] -convIPI' (ShadowPkgs sip) idx = - -- apply shadowing whenever there are multiple installed packages with - -- the same version - [ maybeShadow (convIP idx pkg) - -- IMPORTANT to get internal libraries. See - -- Note [Index conversion with internal libraries] - | (_, pkgs) <- SI.allPackagesBySourcePackageIdAndLibName idx - , (maybeShadow, pkg) <- zip (id : repeat shadow) pkgs ] - where - - -- shadowing is recorded in the package info - shadow (pn, i, PInfo fdeps comps fds _) - | sip = (pn, i, PInfo fdeps comps fds (Just Shadowed)) - shadow x = x - --- | Extract/recover the the package ID from an installed package info, and convert it to a solver's I. -convId :: InstalledPackageInfo -> (PN, I) -convId ipi = (pn, I ver $ Inst $ IPI.installedUnitId ipi) - where MungedPackageId mpn ver = mungedId ipi - -- HACK. See Note [Index conversion with internal libraries] - pn = mkPackageName (unMungedPackageName mpn) - --- | Convert a single installed package into the solver-specific format. -convIP :: SI.InstalledPackageIndex -> InstalledPackageInfo -> (PN, I, PInfo) -convIP idx ipi = - case mapM (convIPId (DependencyReason pn M.empty S.empty) comp idx) (IPI.depends ipi) of - Nothing -> (pn, i, PInfo [] M.empty M.empty (Just Broken)) - Just fds -> ( pn - , i - , PInfo fds (M.singleton ExposedLib (IsBuildable True)) M.empty Nothing) - where - (pn, i) = convId ipi - -- 'sourceLibName' is unreliable, but for now we only really use this for - -- primary libs anyways - comp = componentNameToComponent $ libraryComponentName $ sourceLibName ipi --- TODO: Installed packages should also store their encapsulations! - --- Note [Index conversion with internal libraries] --- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --- Something very interesting happens when we have internal libraries --- in our index. In this case, we maybe have p-0.1, which itself --- depends on the internal library p-internal ALSO from p-0.1. --- Here's the danger: --- --- - If we treat both of these packages as having PN "p", --- then the solver will try to pick one or the other, --- but never both. --- --- - If we drop the internal packages, now p-0.1 has a --- dangling dependency on an "installed" package we know --- nothing about. Oops. --- --- An expedient hack is to put p-internal into cabal-install's --- index as a MUNGED package name, so that it doesn't conflict --- with anyone else (except other instances of itself). But --- yet, we ought NOT to say that PNs in the solver are munged --- package names, because they're not; for source packages, --- we really will never see munged package names. --- --- The tension here is that the installed package index is actually --- per library, but the solver is per package. We need to smooth --- it over, and munging the package names is a pretty good way to --- do it. - --- | Convert dependencies specified by an installed package id into --- flagged dependencies of the solver. --- --- May return Nothing if the package can't be found in the index. That --- indicates that the original package having this dependency is broken --- and should be ignored. -convIPId :: DependencyReason PN -> Component -> SI.InstalledPackageIndex -> UnitId -> Maybe (FlaggedDep PN) -convIPId dr comp idx ipid = - case SI.lookupUnitId idx ipid of - Nothing -> Nothing - Just ipi -> let (pn, i) = convId ipi - in Just (D.Simple (LDep dr (Dep (PkgComponent pn ExposedLib) (Fixed i))) comp) - -- NB: something we pick up from the - -- InstalledPackageIndex is NEVER an executable - --- | Convert a cabal-install source package index to the simpler, --- more uniform index format of the solver. -convSPI' :: OS -> Arch -> CompilerInfo -> Map PN [LabeledPackageConstraint] - -> StrongFlags -> SolveExecutables - -> CI.PackageIndex (SourcePackage loc) -> [(PN, I, PInfo)] -convSPI' os arch cinfo constraints strfl solveExes = - L.map (convSP os arch cinfo constraints strfl solveExes) . CI.allPackages - --- | Convert a single source package into the solver-specific format. -convSP :: OS -> Arch -> CompilerInfo -> Map PN [LabeledPackageConstraint] - -> StrongFlags -> SolveExecutables -> SourcePackage loc -> (PN, I, PInfo) -convSP os arch cinfo constraints strfl solveExes (SourcePackage (PackageIdentifier pn pv) gpd _ _pl) = - let i = I pv InRepo - pkgConstraints = fromMaybe [] $ M.lookup pn constraints - in (pn, i, convGPD os arch cinfo pkgConstraints strfl solveExes pn gpd) - --- We do not use 'flattenPackageDescription' or 'finalizePD' --- from 'Distribution.PackageDescription.Configuration' here, because we --- want to keep the condition tree, but simplify much of the test. - --- | Convert a generic package description to a solver-specific 'PInfo'. -convGPD :: OS -> Arch -> CompilerInfo -> [LabeledPackageConstraint] - -> StrongFlags -> SolveExecutables -> PN -> GenericPackageDescription - -> PInfo -convGPD os arch cinfo constraints strfl solveExes pn - (GenericPackageDescription pkg flags mlib sub_libs flibs exes tests benchs) = - let - fds = flagInfo strfl flags - - -- | We have to be careful to filter out dependencies on - -- internal libraries, since they don't refer to real packages - -- and thus cannot actually be solved over. We'll do this - -- by creating a set of package names which are "internal" - -- and dropping them as we convert. - - ipns = S.fromList $ [ unqualComponentNameToPackageName nm - | (nm, _) <- sub_libs ] - - conv :: Mon.Monoid a => Component -> (a -> BuildInfo) -> DependencyReason PN -> - CondTree ConfVar [Dependency] a -> FlaggedDeps PN - conv comp getInfo dr = - convCondTree M.empty dr pkg os arch cinfo pn fds comp getInfo ipns solveExes . - PDC.addBuildableCondition getInfo - - initDR = DependencyReason pn M.empty S.empty - - flagged_deps - = concatMap (\ds -> conv ComponentLib libBuildInfo initDR ds) (maybeToList mlib) - ++ concatMap (\(nm, ds) -> conv (ComponentSubLib nm) libBuildInfo initDR ds) sub_libs - ++ concatMap (\(nm, ds) -> conv (ComponentFLib nm) foreignLibBuildInfo initDR ds) flibs - ++ concatMap (\(nm, ds) -> conv (ComponentExe nm) buildInfo initDR ds) exes - ++ prefix (Stanza (SN pn TestStanzas)) - (L.map (\(nm, ds) -> conv (ComponentTest nm) testBuildInfo (addStanza TestStanzas initDR) ds) - tests) - ++ prefix (Stanza (SN pn BenchStanzas)) - (L.map (\(nm, ds) -> conv (ComponentBench nm) benchmarkBuildInfo (addStanza BenchStanzas initDR) ds) - benchs) - ++ maybe [] (convSetupBuildInfo pn) (setupBuildInfo pkg) - - addStanza :: Stanza -> DependencyReason pn -> DependencyReason pn - addStanza s (DependencyReason pn' fs ss) = DependencyReason pn' fs (S.insert s ss) - - -- | We infer the maximally supported spec-version from @lib:Cabal@'s version - -- - -- As we cannot predict the future, we can only properly support - -- spec-versions predating (and including) the @lib:Cabal@ version - -- used by @cabal-install@. - -- - -- This relies on 'cabalVersion' having always at least 3 components to avoid - -- comparisons like @2.0.0 > 2.0@ which would result in confusing results. - -- - -- NOTE: Before we can switch to a /normalised/ spec-version - -- comparison (e.g. by truncating to 3 components, and removing - -- trailing zeroes) we'd have to make sure all other places where - -- the spec-version is compared against a bound do it - -- consistently. - maxSpecVer = cabalVersion - - -- | Required/declared spec-version of the package - -- - -- We don't truncate patch-levels, as specifying a patch-level - -- spec-version is discouraged and not supported anymore starting - -- with spec-version 2.2. - reqSpecVer = specVersion pkg - - -- | A too-new specVersion is turned into a global 'FailReason' - -- which prevents the solver from selecting this release (and if - -- forced to, emit a meaningful solver error message). - fr | reqSpecVer > maxSpecVer = Just (UnsupportedSpecVer reqSpecVer) - | otherwise = Nothing - - components :: Map ExposedComponent IsBuildable - components = M.fromList $ libComps ++ exeComps - where - libComps = [ (ExposedLib, IsBuildable $ isBuildable libBuildInfo lib) - | lib <- maybeToList mlib ] - exeComps = [ (ExposedExe name, IsBuildable $ isBuildable buildInfo exe) - | (name, exe) <- exes ] - isBuildable = isBuildableComponent os arch cinfo constraints - - in PInfo flagged_deps components fds fr - --- | Returns true if the component is buildable in the given environment. --- This function can give false-positives. For example, it only considers flags --- that are set by unqualified flag constraints, and it doesn't check whether --- the intra-package dependencies of a component are buildable. It is also --- possible for the solver to later assign a value to an automatic flag that --- makes the component unbuildable. -isBuildableComponent :: OS - -> Arch - -> CompilerInfo - -> [LabeledPackageConstraint] - -> (a -> BuildInfo) - -> CondTree ConfVar [Dependency] a - -> Bool -isBuildableComponent os arch cinfo constraints getInfo tree = - case simplifyCondition $ extractCondition (buildable . getInfo) tree of - Lit False -> False - _ -> True - where - flagAssignment :: [(FlagName, Bool)] - flagAssignment = - mconcat [ unFlagAssignment fa - | PackageConstraint (ScopeAnyQualifier _) (PackagePropertyFlags fa) - <- L.map unlabelPackageConstraint constraints] - - -- Simplify the condition, using the current environment. Most of this - -- function was copied from convBranch and - -- Distribution.Types.Condition.simplifyCondition. - simplifyCondition :: Condition ConfVar -> Condition ConfVar - simplifyCondition (Var (OS os')) = Lit (os == os') - simplifyCondition (Var (Arch arch')) = Lit (arch == arch') - simplifyCondition (Var (Impl cf cvr)) - | matchImpl (compilerInfoId cinfo) || - -- fixme: Nothing should be treated as unknown, rather than empty - -- list. This code should eventually be changed to either - -- support partial resolution of compiler flags or to - -- complain about incompletely configured compilers. - any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = Lit True - | otherwise = Lit False - where - matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv - simplifyCondition (Var (Flag f)) - | Just b <- L.lookup f flagAssignment = Lit b - simplifyCondition (Var v) = Var v - simplifyCondition (Lit b) = Lit b - simplifyCondition (CNot c) = - case simplifyCondition c of - Lit True -> Lit False - Lit False -> Lit True - c' -> CNot c' - simplifyCondition (COr c d) = - case (simplifyCondition c, simplifyCondition d) of - (Lit False, d') -> d' - (Lit True, _) -> Lit True - (c', Lit False) -> c' - (_, Lit True) -> Lit True - (c', d') -> COr c' d' - simplifyCondition (CAnd c d) = - case (simplifyCondition c, simplifyCondition d) of - (Lit False, _) -> Lit False - (Lit True, d') -> d' - (_, Lit False) -> Lit False - (c', Lit True) -> c' - (c', d') -> CAnd c' d' - --- | Create a flagged dependency tree from a list @fds@ of flagged --- dependencies, using @f@ to form the tree node (@f@ will be --- something like @Stanza sn@). -prefix :: (FlaggedDeps qpn -> FlaggedDep qpn) - -> [FlaggedDeps qpn] -> FlaggedDeps qpn -prefix _ [] = [] -prefix f fds = [f (concat fds)] - --- | Convert flag information. Automatic flags are now considered weak --- unless strong flags have been selected explicitly. -flagInfo :: StrongFlags -> [PD.Flag] -> FlagInfo -flagInfo (StrongFlags strfl) = - M.fromList . L.map (\ (MkFlag fn _ b m) -> (fn, FInfo b (flagType m) (weak m))) - where - weak m = WeakOrTrivial $ not (strfl || m) - flagType m = if m then Manual else Automatic - --- | Internal package names, which should not be interpreted as true --- dependencies. -type IPNs = Set PN - --- | Convenience function to delete a 'Dependency' if it's --- for a 'PN' that isn't actually real. -filterIPNs :: IPNs -> Dependency -> Maybe Dependency -filterIPNs ipns d@(Dependency pn _) - | S.notMember pn ipns = Just d - | otherwise = Nothing - --- | Convert condition trees to flagged dependencies. Mutually --- recursive with 'convBranch'. See 'convBranch' for an explanation --- of all arguments preceeding the input 'CondTree'. -convCondTree :: Map FlagName Bool -> DependencyReason PN -> PackageDescription -> OS -> Arch -> CompilerInfo -> PN -> FlagInfo -> - Component -> - (a -> BuildInfo) -> - IPNs -> - SolveExecutables -> - CondTree ConfVar [Dependency] a -> FlaggedDeps PN -convCondTree flags dr pkg os arch cinfo pn fds comp getInfo ipns solveExes@(SolveExecutables solveExes') (CondNode info ds branches) = - -- Merge all library and build-tool dependencies at every level in - -- the tree of flagged dependencies. Otherwise 'extractCommon' - -- could create duplicate dependencies, and the number of - -- duplicates could grow exponentially from the leaves to the root - -- of the tree. - mergeSimpleDeps $ - L.map (\d -> D.Simple (convLibDep dr d) comp) - (mapMaybe (filterIPNs ipns) ds) -- unconditional package dependencies - ++ L.map (\e -> D.Simple (LDep dr (Ext e)) comp) (PD.allExtensions bi) -- unconditional extension dependencies - ++ L.map (\l -> D.Simple (LDep dr (Lang l)) comp) (PD.allLanguages bi) -- unconditional language dependencies - ++ L.map (\(PkgconfigDependency pkn vr) -> D.Simple (LDep dr (Pkg pkn vr)) comp) (PD.pkgconfigDepends bi) -- unconditional pkg-config dependencies - ++ concatMap (convBranch flags dr pkg os arch cinfo pn fds comp getInfo ipns solveExes) branches - -- build-tools dependencies - -- NB: Only include these dependencies if SolveExecutables - -- is True. It might be false in the legacy solver - -- codepath, in which case there won't be any record of - -- an executable we need. - ++ [ D.Simple (convExeDep dr exeDep) comp - | solveExes' - , exeDep <- getAllToolDependencies pkg bi - , not $ isInternal pkg exeDep - ] - where - bi = getInfo info - -data SimpleFlaggedDepKey qpn = - SimpleFlaggedDepKey (PkgComponent qpn) Component - deriving (Eq, Ord) - -data SimpleFlaggedDepValue qpn = SimpleFlaggedDepValue (DependencyReason qpn) VR - --- | Merge 'Simple' dependencies that apply to the same library or build-tool. --- This function should be able to merge any two dependencies that can be merged --- by extractCommon, in order to prevent the exponential growth of dependencies. --- --- Note that this function can merge dependencies that have different --- DependencyReasons, which can make the DependencyReasons less precise. This --- loss of precision only affects performance and log messages, not correctness. --- However, when 'mergeSimpleDeps' is only called on dependencies at a single --- location in the dependency tree, the only difference between --- DependencyReasons should be flags that have value FlagBoth. Adding extra --- flags with value FlagBoth should not affect performance, since they are not --- added to the conflict set. The only downside is the possibility of the log --- incorrectly saying that the flag contributed to excluding a specific version --- of a dependency. For example, if +/-flagA introduces pkg >=2 and +/-flagB --- introduces pkg <5, the merged dependency would mean that --- +/-flagA and +/-flagB introduce pkg >=2 && <5, which would incorrectly imply --- that +/-flagA excludes pkg-6. -mergeSimpleDeps :: Ord qpn => FlaggedDeps qpn -> FlaggedDeps qpn -mergeSimpleDeps deps = L.map (uncurry toFlaggedDep) (M.toList merged) ++ unmerged - where - (merged, unmerged) = L.foldl' f (M.empty, []) deps - where - f :: Ord qpn - => (Map (SimpleFlaggedDepKey qpn) (SimpleFlaggedDepValue qpn), FlaggedDeps qpn) - -> FlaggedDep qpn - -> (Map (SimpleFlaggedDepKey qpn) (SimpleFlaggedDepValue qpn), FlaggedDeps qpn) - f (merged', unmerged') (D.Simple (LDep dr (Dep dep (Constrained vr))) comp) = - ( M.insertWith mergeValues - (SimpleFlaggedDepKey dep comp) - (SimpleFlaggedDepValue dr vr) - merged' - , unmerged') - f (merged', unmerged') unmergeableDep = (merged', unmergeableDep : unmerged') - - mergeValues :: SimpleFlaggedDepValue qpn - -> SimpleFlaggedDepValue qpn - -> SimpleFlaggedDepValue qpn - mergeValues (SimpleFlaggedDepValue dr1 vr1) (SimpleFlaggedDepValue dr2 vr2) = - SimpleFlaggedDepValue (unionDRs dr1 dr2) (vr1 .&&. vr2) - - toFlaggedDep :: SimpleFlaggedDepKey qpn - -> SimpleFlaggedDepValue qpn - -> FlaggedDep qpn - toFlaggedDep (SimpleFlaggedDepKey dep comp) (SimpleFlaggedDepValue dr vr) = - D.Simple (LDep dr (Dep dep (Constrained vr))) comp - --- | Branch interpreter. Mutually recursive with 'convCondTree'. --- --- Here, we try to simplify one of Cabal's condition tree branches into the --- solver's flagged dependency format, which is weaker. Condition trees can --- contain complex logical expression composed from flag choices and special --- flags (such as architecture, or compiler flavour). We try to evaluate the --- special flags and subsequently simplify to a tree that only depends on --- simple flag choices. --- --- This function takes a number of arguments: --- --- 1. A map of flag values that have already been chosen. It allows --- convBranch to avoid creating nested FlaggedDeps that are --- controlled by the same flag and avoid creating DependencyReasons with --- conflicting values for the same flag. --- --- 2. The DependencyReason calculated at this point in the tree of --- conditionals. The flag values in the DependencyReason are similar to --- the values in the map above, except for the use of FlagBoth. --- --- 3. Some pre dependency-solving known information ('OS', 'Arch', --- 'CompilerInfo') for @os()@, @arch()@ and @impl()@ variables, --- --- 4. The package name @'PN'@ which this condition tree --- came from, so that we can correctly associate @flag()@ --- variables with the correct package name qualifier, --- --- 5. The flag defaults 'FlagInfo' so that we can populate --- 'Flagged' dependencies with 'FInfo', --- --- 6. The name of the component 'Component' so we can record where --- the fine-grained information about where the component came --- from (see 'convCondTree'), and --- --- 7. A selector to extract the 'BuildInfo' from the leaves of --- the 'CondTree' (which actually contains the needed --- dependency information.) --- --- 8. The set of package names which should be considered internal --- dependencies, and thus not handled as dependencies. -convBranch :: Map FlagName Bool - -> DependencyReason PN - -> PackageDescription - -> OS - -> Arch - -> CompilerInfo - -> PN - -> FlagInfo - -> Component - -> (a -> BuildInfo) - -> IPNs - -> SolveExecutables - -> CondBranch ConfVar [Dependency] a - -> FlaggedDeps PN -convBranch flags dr pkg os arch cinfo pn fds comp getInfo ipns solveExes (CondBranch c' t' mf') = - go c' - (\flags' dr' -> convCondTree flags' dr' pkg os arch cinfo pn fds comp getInfo ipns solveExes t') - (\flags' dr' -> maybe [] (convCondTree flags' dr' pkg os arch cinfo pn fds comp getInfo ipns solveExes) mf') - flags dr - where - go :: Condition ConfVar - -> (Map FlagName Bool -> DependencyReason PN -> FlaggedDeps PN) - -> (Map FlagName Bool -> DependencyReason PN -> FlaggedDeps PN) - -> Map FlagName Bool -> DependencyReason PN -> FlaggedDeps PN - go (Lit True) t _ = t - go (Lit False) _ f = f - go (CNot c) t f = go c f t - go (CAnd c d) t f = go c (go d t f) f - go (COr c d) t f = go c t (go d t f) - go (Var (Flag fn)) t f = \flags' -> - case M.lookup fn flags' of - Just True -> t flags' - Just False -> f flags' - Nothing -> \dr' -> - -- Add each flag to the DependencyReason for all dependencies below, - -- including any extracted dependencies. Extracted dependencies are - -- introduced by both flag values (FlagBoth). Note that we don't - -- actually need to add the flag to the extracted dependencies for - -- correct backjumping; the information only improves log messages - -- by giving the user the full reason for each dependency. - let addFlagValue v = addFlagToDependencyReason fn v dr' - addFlag v = M.insert fn v flags' - in extractCommon (t (addFlag True) (addFlagValue FlagBoth)) - (f (addFlag False) (addFlagValue FlagBoth)) - ++ [ Flagged (FN pn fn) (fds M.! fn) (t (addFlag True) (addFlagValue FlagTrue)) - (f (addFlag False) (addFlagValue FlagFalse)) ] - go (Var (OS os')) t f - | os == os' = t - | otherwise = f - go (Var (Arch arch')) t f - | arch == arch' = t - | otherwise = f - go (Var (Impl cf cvr)) t f - | matchImpl (compilerInfoId cinfo) || - -- fixme: Nothing should be treated as unknown, rather than empty - -- list. This code should eventually be changed to either - -- support partial resolution of compiler flags or to - -- complain about incompletely configured compilers. - any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = t - | otherwise = f - where - matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv - - addFlagToDependencyReason :: FlagName -> FlagValue -> DependencyReason pn -> DependencyReason pn - addFlagToDependencyReason fn v (DependencyReason pn' fs ss) = - DependencyReason pn' (M.insert fn v fs) ss - - -- If both branches contain the same package as a simple dep, we lift it to - -- the next higher-level, but with the union of version ranges. This - -- heuristic together with deferring flag choices will then usually first - -- resolve this package, and try an already installed version before imposing - -- a default flag choice that might not be what we want. - -- - -- Note that we make assumptions here on the form of the dependencies that - -- can occur at this point. In particular, no occurrences of Fixed, as all - -- dependencies below this point have been generated using 'convLibDep'. - -- - -- WARNING: This is quadratic! - extractCommon :: Eq pn => FlaggedDeps pn -> FlaggedDeps pn -> FlaggedDeps pn - extractCommon ps ps' = - -- Union the DependencyReasons, because the extracted dependency can be - -- avoided by removing the dependency from either side of the - -- conditional. - [ D.Simple (LDep (unionDRs vs1 vs2) (Dep dep1 (Constrained $ vr1 .||. vr2))) comp - | D.Simple (LDep vs1 (Dep dep1 (Constrained vr1))) _ <- ps - , D.Simple (LDep vs2 (Dep dep2 (Constrained vr2))) _ <- ps' - , dep1 == dep2 - ] - --- | Merge DependencyReasons by unioning their variables. -unionDRs :: DependencyReason pn -> DependencyReason pn -> DependencyReason pn -unionDRs (DependencyReason pn' fs1 ss1) (DependencyReason _ fs2 ss2) = - DependencyReason pn' (M.union fs1 fs2) (S.union ss1 ss2) - --- | Convert a Cabal dependency on a library to a solver-specific dependency. -convLibDep :: DependencyReason PN -> Dependency -> LDep PN -convLibDep dr (Dependency pn vr) = LDep dr $ Dep (PkgComponent pn ExposedLib) (Constrained vr) - --- | Convert a Cabal dependency on an executable (build-tools) to a solver-specific dependency. -convExeDep :: DependencyReason PN -> ExeDependency -> LDep PN -convExeDep dr (ExeDependency pn exe vr) = LDep dr $ Dep (PkgComponent pn (ExposedExe exe)) (Constrained vr) - --- | Convert setup dependencies -convSetupBuildInfo :: PN -> SetupBuildInfo -> FlaggedDeps PN -convSetupBuildInfo pn nfo = - L.map (\d -> D.Simple (convLibDep (DependencyReason pn M.empty S.empty) d) ComponentSetup) - (PD.setupDepends nfo) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Index.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Index.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Index.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Index.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -module Distribution.Solver.Modular.Index - ( Index - , PInfo(..) - , IsBuildable(..) - , defaultQualifyOptions - , mkIndex - ) where - -import Data.List as L -import Data.Map as M -import Prelude hiding (pi) - -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Package -import Distribution.Solver.Modular.Tree - --- | An index contains information about package instances. This is a nested --- dictionary. Package names are mapped to instances, which in turn is mapped --- to info. -type Index = Map PN (Map I PInfo) - --- | Info associated with a package instance. --- Currently, dependencies, component names, flags and failure reasons. --- The component map records whether any components are unbuildable in the --- current environment (compiler, os, arch, and global flag constraints). --- Packages that have a failure reason recorded for them are disabled --- globally, for reasons external to the solver. We currently use this --- for shadowing which essentially is a GHC limitation, and for --- installed packages that are broken. -data PInfo = PInfo (FlaggedDeps PN) (Map ExposedComponent IsBuildable) FlagInfo (Maybe FailReason) - --- | Whether a component is made unbuildable by a "buildable: False" field. -newtype IsBuildable = IsBuildable Bool - -mkIndex :: [(PN, I, PInfo)] -> Index -mkIndex xs = M.map M.fromList (groupMap (L.map (\ (pn, i, pi) -> (pn, (i, pi))) xs)) - -groupMap :: Ord a => [(a, b)] -> Map a [b] -groupMap xs = M.fromListWith (flip (++)) (L.map (\ (x, y) -> (x, [y])) xs) - -defaultQualifyOptions :: Index -> QualifyOptions -defaultQualifyOptions idx = QO { - qoBaseShim = or [ dep == base - | -- Find all versions of base .. - Just is <- [M.lookup base idx] - -- .. which are installed .. - , (I _ver (Inst _), PInfo deps _comps _flagNfo _fr) <- M.toList is - -- .. and flatten all their dependencies .. - , (LDep _ (Dep (PkgComponent dep _) _ci), _comp) <- flattenFlaggedDeps deps - ] - , qoSetupIndependent = True - } - where - base = mkPackageName "base" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/LabeledGraph.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/LabeledGraph.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/LabeledGraph.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/LabeledGraph.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,116 +0,0 @@ --- | Wrapper around Data.Graph with support for edge labels -{-# LANGUAGE ScopedTypeVariables #-} -module Distribution.Solver.Modular.LabeledGraph ( - -- * Graphs - Graph - , Vertex - -- ** Building graphs - , graphFromEdges - , graphFromEdges' - , buildG - , transposeG - -- ** Graph properties - , vertices - , edges - -- ** Operations on the underlying unlabeled graph - , forgetLabels - , topSort - ) where - -import Data.Array -import Data.Graph (Vertex, Bounds) -import Data.List (sortBy) -import Data.Maybe (mapMaybe) -import qualified Data.Graph as G - -{------------------------------------------------------------------------------- - Types --------------------------------------------------------------------------------} - -type Graph e = Array Vertex [(e, Vertex)] -type Edge e = (Vertex, e, Vertex) - -{------------------------------------------------------------------------------- - Building graphs --------------------------------------------------------------------------------} - --- | Construct an edge-labeled graph --- --- This is a simple adaptation of the definition in Data.Graph -graphFromEdges :: forall key node edge. Ord key - => [ (node, key, [(edge, key)]) ] - -> ( Graph edge - , Vertex -> (node, key, [(edge, key)]) - , key -> Maybe Vertex - ) -graphFromEdges edges0 = - (graph, \v -> vertex_map ! v, key_vertex) - where - max_v = length edges0 - 1 - bounds0 = (0, max_v) :: (Vertex, Vertex) - sorted_edges = sortBy lt edges0 - edges1 = zip [0..] sorted_edges - - graph = array bounds0 [(v, (mapMaybe mk_edge ks)) - | (v, (_, _, ks)) <- edges1] - key_map = array bounds0 [(v, k ) - | (v, (_, k, _ )) <- edges1] - vertex_map = array bounds0 edges1 - - (_,k1,_) `lt` (_,k2,_) = k1 `compare` k2 - - mk_edge :: (edge, key) -> Maybe (edge, Vertex) - mk_edge (edge, key) = do v <- key_vertex key ; return (edge, v) - - -- returns Nothing for non-interesting vertices - key_vertex :: key -> Maybe Vertex - key_vertex k = findVertex 0 max_v - where - findVertex a b - | a > b = Nothing - | otherwise = case compare k (key_map ! mid) of - LT -> findVertex a (mid-1) - EQ -> Just mid - GT -> findVertex (mid+1) b - where - mid = a + (b - a) `div` 2 - -graphFromEdges' :: Ord key - => [ (node, key, [(edge, key)]) ] - -> ( Graph edge - , Vertex -> (node, key, [(edge, key)]) - ) -graphFromEdges' x = (a,b) - where - (a,b,_) = graphFromEdges x - -transposeG :: Graph e -> Graph e -transposeG g = buildG (bounds g) (reverseE g) - -buildG :: Bounds -> [Edge e] -> Graph e -buildG bounds0 edges0 = accumArray (flip (:)) [] bounds0 (map reassoc edges0) - where - reassoc (v, e, w) = (v, (e, w)) - -reverseE :: Graph e -> [Edge e] -reverseE g = [ (w, e, v) | (v, e, w) <- edges g ] - -{------------------------------------------------------------------------------- - Graph properties --------------------------------------------------------------------------------} - -vertices :: Graph e -> [Vertex] -vertices = indices - -edges :: Graph e -> [Edge e] -edges g = [ (v, e, w) | v <- vertices g, (e, w) <- g!v ] - -{------------------------------------------------------------------------------- - Operations on the underlying unlabelled graph --------------------------------------------------------------------------------} - -forgetLabels :: Graph e -> G.Graph -forgetLabels = fmap (map snd) - -topSort :: Graph e -> [Vertex] -topSort = G.topSort . forgetLabels diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Linking.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Linking.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Linking.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Linking.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,518 +0,0 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE MultiParamTypeClasses #-} -module Distribution.Solver.Modular.Linking ( - validateLinking - ) where - -import Prelude () -import Distribution.Solver.Compat.Prelude hiding (get,put) - -import Control.Exception (assert) -import Control.Monad.Reader -import Control.Monad.State -import Data.Function (on) -import Data.Map ((!)) -import Data.Set (Set) -import qualified Data.Map as M -import qualified Data.Set as S -import qualified Data.Traversable as T - -import Distribution.Client.Utils.Assertion -import Distribution.Solver.Modular.Assignment -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Index -import Distribution.Solver.Modular.Package -import Distribution.Solver.Modular.Tree -import qualified Distribution.Solver.Modular.ConflictSet as CS -import qualified Distribution.Solver.Modular.WeightedPSQ as W - -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackagePath -import Distribution.Types.GenericPackageDescription (unFlagName) - -{------------------------------------------------------------------------------- - Validation - - Validation of links is a separate pass that's performed after normal - validation. Validation of links checks that if the tree indicates that a - package is linked, then everything underneath that choice really matches the - package we have linked to. - - This is interesting because it isn't unidirectional. Consider that we've - chosen a.foo to be version 1 and later decide that b.foo should link to a.foo. - Now foo depends on bar. Because a.foo and b.foo are linked, it's required that - a.bar and b.bar are also linked. However, it's not required that we actually - choose a.bar before b.bar. Goal choice order is relatively free. It's possible - that we choose a.bar first, but also possible that we choose b.bar first. In - both cases, we have to recognize that we have freedom of choice for the first - of the two, but no freedom of choice for the second. - - This is what LinkGroups are all about. Using LinkGroup, we can record (in the - situation above) that a.bar and b.bar need to be linked even if we haven't - chosen either of them yet. --------------------------------------------------------------------------------} - -data ValidateState = VS { - vsIndex :: Index - , vsLinks :: Map QPN LinkGroup - , vsFlags :: FAssignment - , vsStanzas :: SAssignment - , vsQualifyOptions :: QualifyOptions - - -- Saved qualified dependencies. Every time 'validateLinking' makes a - -- package choice, it qualifies the package's dependencies and saves them in - -- this map. Then the qualified dependencies are available for subsequent - -- flag and stanza choices for the same package. - , vsSaved :: Map QPN (FlaggedDeps QPN) - } - -type Validate = Reader ValidateState - --- | Validate linked packages --- --- Verify that linked packages have --- --- * Linked dependencies, --- * Equal flag assignments --- * Equal stanza assignments -validateLinking :: Index -> Tree d c -> Tree d c -validateLinking index = (`runReader` initVS) . cata go - where - go :: TreeF d c (Validate (Tree d c)) -> Validate (Tree d c) - - go (PChoiceF qpn rdm gr cs) = - PChoice qpn rdm gr <$> T.sequence (W.mapWithKey (goP qpn) cs) - go (FChoiceF qfn rdm gr t m d cs) = - FChoice qfn rdm gr t m d <$> T.sequence (W.mapWithKey (goF qfn) cs) - go (SChoiceF qsn rdm gr t cs) = - SChoice qsn rdm gr t <$> T.sequence (W.mapWithKey (goS qsn) cs) - - -- For the other nodes we just recurse - go (GoalChoiceF rdm cs) = GoalChoice rdm <$> T.sequence cs - go (DoneF revDepMap s) = return $ Done revDepMap s - go (FailF conflictSet failReason) = return $ Fail conflictSet failReason - - -- Package choices - goP :: QPN -> POption -> Validate (Tree d c) -> Validate (Tree d c) - goP qpn@(Q _pp pn) opt@(POption i _) r = do - vs <- ask - let PInfo deps _ _ _ = vsIndex vs ! pn ! i - qdeps = qualifyDeps (vsQualifyOptions vs) qpn deps - newSaved = M.insert qpn qdeps (vsSaved vs) - case execUpdateState (pickPOption qpn opt qdeps) vs of - Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) - Right vs' -> local (const vs' { vsSaved = newSaved }) r - - -- Flag choices - goF :: QFN -> Bool -> Validate (Tree d c) -> Validate (Tree d c) - goF qfn b r = do - vs <- ask - case execUpdateState (pickFlag qfn b) vs of - Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) - Right vs' -> local (const vs') r - - -- Stanza choices (much the same as flag choices) - goS :: QSN -> Bool -> Validate (Tree d c) -> Validate (Tree d c) - goS qsn b r = do - vs <- ask - case execUpdateState (pickStanza qsn b) vs of - Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) - Right vs' -> local (const vs') r - - initVS :: ValidateState - initVS = VS { - vsIndex = index - , vsLinks = M.empty - , vsFlags = M.empty - , vsStanzas = M.empty - , vsQualifyOptions = defaultQualifyOptions index - , vsSaved = M.empty - } - -{------------------------------------------------------------------------------- - Updating the validation state --------------------------------------------------------------------------------} - -type Conflict = (ConflictSet, String) - -newtype UpdateState a = UpdateState { - unUpdateState :: StateT ValidateState (Either Conflict) a - } - deriving (Functor, Applicative, Monad) - -instance MonadState ValidateState UpdateState where - get = UpdateState $ get - put st = UpdateState $ do - expensiveAssert (lgInvariant $ vsLinks st) $ return () - put st - -lift' :: Either Conflict a -> UpdateState a -lift' = UpdateState . lift - -conflict :: Conflict -> UpdateState a -conflict = lift' . Left - -execUpdateState :: UpdateState () -> ValidateState -> Either Conflict ValidateState -execUpdateState = execStateT . unUpdateState - -pickPOption :: QPN -> POption -> FlaggedDeps QPN -> UpdateState () -pickPOption qpn (POption i Nothing) _deps = pickConcrete qpn i -pickPOption qpn (POption i (Just pp')) deps = pickLink qpn i pp' deps - -pickConcrete :: QPN -> I -> UpdateState () -pickConcrete qpn@(Q pp _) i = do - vs <- get - case M.lookup qpn (vsLinks vs) of - -- Package is not yet in a LinkGroup. Create a new singleton link group. - Nothing -> do - let lg = lgSingleton qpn (Just $ PI pp i) - updateLinkGroup lg - - -- Package is already in a link group. Since we are picking a concrete - -- instance here, it must by definition be the canonical package. - Just lg -> - makeCanonical lg qpn i - -pickLink :: QPN -> I -> PackagePath -> FlaggedDeps QPN -> UpdateState () -pickLink qpn@(Q _pp pn) i pp' deps = do - vs <- get - - -- The package might already be in a link group - -- (because one of its reverse dependencies is) - let lgSource = case M.lookup qpn (vsLinks vs) of - Nothing -> lgSingleton qpn Nothing - Just lg -> lg - - -- Find the link group for the package we are linking to - -- - -- Since the builder never links to a package without having first picked a - -- concrete instance for that package, and since we create singleton link - -- groups for concrete instances, this link group must exist (and must - -- in fact already have a canonical member). - let target = Q pp' pn - lgTarget = vsLinks vs ! target - - -- Verify here that the member we add is in fact for the same package and - -- matches the version of the canonical instance. However, violations of - -- these checks would indicate a bug in the linker, not a true conflict. - let sanityCheck :: Maybe (PI PackagePath) -> Bool - sanityCheck Nothing = False - sanityCheck (Just (PI _ canonI)) = pn == lgPackage lgTarget && i == canonI - assert (sanityCheck (lgCanon lgTarget)) $ return () - - -- Merge the two link groups (updateLinkGroup will propagate the change) - lgTarget' <- lift' $ lgMerge CS.empty lgSource lgTarget - updateLinkGroup lgTarget' - - -- Make sure all dependencies are linked as well - linkDeps target deps - -makeCanonical :: LinkGroup -> QPN -> I -> UpdateState () -makeCanonical lg qpn@(Q pp _) i = - case lgCanon lg of - -- There is already a canonical member. Fail. - Just _ -> - conflict ( CS.insert (P qpn) (lgConflictSet lg) - , "cannot make " ++ showQPN qpn - ++ " canonical member of " ++ showLinkGroup lg - ) - Nothing -> do - let lg' = lg { lgCanon = Just (PI pp i) } - updateLinkGroup lg' - --- | Link the dependencies of linked parents. --- --- When we decide to link one package against another we walk through the --- package's direct depedencies and make sure that they're all linked to each --- other by merging their link groups (or creating new singleton link groups if --- they don't have link groups yet). We do not need to do this recursively, --- because having the direct dependencies in a link group means that we must --- have already made or will make sooner or later a link choice for one of these --- as well, and cover their dependencies at that point. -linkDeps :: QPN -> FlaggedDeps QPN -> UpdateState () -linkDeps target = \deps -> do - -- linkDeps is called in two places: when we first link one package to - -- another, and when we discover more dependencies of an already linked - -- package after doing some flag assignment. It is therefore important that - -- flag assignments cannot influence _how_ dependencies are qualified; - -- fortunately this is a documented property of 'qualifyDeps'. - rdeps <- requalify deps - go deps rdeps - where - go :: FlaggedDeps QPN -> FlaggedDeps QPN -> UpdateState () - go = zipWithM_ go1 - - go1 :: FlaggedDep QPN -> FlaggedDep QPN -> UpdateState () - go1 dep rdep = case (dep, rdep) of - (Simple (LDep dr1 (Dep (PkgComponent qpn _) _)) _, ~(Simple (LDep dr2 (Dep (PkgComponent qpn' _) _)) _)) -> do - vs <- get - let lg = M.findWithDefault (lgSingleton qpn Nothing) qpn $ vsLinks vs - lg' = M.findWithDefault (lgSingleton qpn' Nothing) qpn' $ vsLinks vs - lg'' <- lift' $ lgMerge ((CS.union `on` dependencyReasonToCS) dr1 dr2) lg lg' - updateLinkGroup lg'' - (Flagged fn _ t f, ~(Flagged _ _ t' f')) -> do - vs <- get - case M.lookup fn (vsFlags vs) of - Nothing -> return () -- flag assignment not yet known - Just True -> go t t' - Just False -> go f f' - (Stanza sn t, ~(Stanza _ t')) -> do - vs <- get - case M.lookup sn (vsStanzas vs) of - Nothing -> return () -- stanza assignment not yet known - Just True -> go t t' - Just False -> return () -- stanza not enabled; no new deps - -- For extensions and language dependencies, there is nothing to do. - -- No choice is involved, just checking, so there is nothing to link. - -- The same goes for for pkg-config constraints. - (Simple (LDep _ (Ext _)) _, _) -> return () - (Simple (LDep _ (Lang _)) _, _) -> return () - (Simple (LDep _ (Pkg _ _)) _, _) -> return () - - requalify :: FlaggedDeps QPN -> UpdateState (FlaggedDeps QPN) - requalify deps = do - vs <- get - return $ qualifyDeps (vsQualifyOptions vs) target (unqualifyDeps deps) - -pickFlag :: QFN -> Bool -> UpdateState () -pickFlag qfn b = do - modify $ \vs -> vs { vsFlags = M.insert qfn b (vsFlags vs) } - verifyFlag qfn - linkNewDeps (F qfn) b - -pickStanza :: QSN -> Bool -> UpdateState () -pickStanza qsn b = do - modify $ \vs -> vs { vsStanzas = M.insert qsn b (vsStanzas vs) } - verifyStanza qsn - linkNewDeps (S qsn) b - --- | Link dependencies that we discover after making a flag or stanza choice. --- --- When we make a flag choice for a package, then new dependencies for that --- package might become available. If the package under consideration is in a --- non-trivial link group, then these new dependencies have to be linked as --- well. In linkNewDeps, we compute such new dependencies and make sure they are --- linked. -linkNewDeps :: Var QPN -> Bool -> UpdateState () -linkNewDeps var b = do - vs <- get - let qpn@(Q pp pn) = varPN var - qdeps = vsSaved vs ! qpn - lg = vsLinks vs ! qpn - newDeps = findNewDeps vs qdeps - linkedTo = S.delete pp (lgMembers lg) - forM_ (S.toList linkedTo) $ \pp' -> linkDeps (Q pp' pn) newDeps - where - findNewDeps :: ValidateState -> FlaggedDeps QPN -> FlaggedDeps QPN - findNewDeps vs = concatMap (findNewDeps' vs) - - findNewDeps' :: ValidateState -> FlaggedDep QPN -> FlaggedDeps QPN - findNewDeps' _ (Simple _ _) = [] - findNewDeps' vs (Flagged qfn _ t f) = - case (F qfn == var, M.lookup qfn (vsFlags vs)) of - (True, _) -> if b then t else f - (_, Nothing) -> [] -- not yet known - (_, Just b') -> findNewDeps vs (if b' then t else f) - findNewDeps' vs (Stanza qsn t) = - case (S qsn == var, M.lookup qsn (vsStanzas vs)) of - (True, _) -> if b then t else [] - (_, Nothing) -> [] -- not yet known - (_, Just b') -> findNewDeps vs (if b' then t else []) - -updateLinkGroup :: LinkGroup -> UpdateState () -updateLinkGroup lg = do - verifyLinkGroup lg - modify $ \vs -> vs { - vsLinks = M.fromList (map aux (S.toList (lgMembers lg))) - `M.union` vsLinks vs - } - where - aux pp = (Q pp (lgPackage lg), lg) - -{------------------------------------------------------------------------------- - Verification --------------------------------------------------------------------------------} - -verifyLinkGroup :: LinkGroup -> UpdateState () -verifyLinkGroup lg = - case lgInstance lg of - -- No instance picked yet. Nothing to verify - Nothing -> - return () - - -- We picked an instance. Verify flags and stanzas - -- TODO: The enumeration of OptionalStanza names is very brittle; - -- if a constructor is added to the datatype we won't notice it here - Just i -> do - vs <- get - let PInfo _deps _exes finfo _ = vsIndex vs ! lgPackage lg ! i - flags = M.keys finfo - stanzas = [TestStanzas, BenchStanzas] - forM_ flags $ \fn -> do - let flag = FN (lgPackage lg) fn - verifyFlag' flag lg - forM_ stanzas $ \sn -> do - let stanza = SN (lgPackage lg) sn - verifyStanza' stanza lg - -verifyFlag :: QFN -> UpdateState () -verifyFlag (FN qpn@(Q _pp pn) fn) = do - vs <- get - -- We can only pick a flag after picking an instance; link group must exist - verifyFlag' (FN pn fn) (vsLinks vs ! qpn) - -verifyStanza :: QSN -> UpdateState () -verifyStanza (SN qpn@(Q _pp pn) sn) = do - vs <- get - -- We can only pick a stanza after picking an instance; link group must exist - verifyStanza' (SN pn sn) (vsLinks vs ! qpn) - --- | Verify that all packages in the link group agree on flag assignments --- --- For the given flag and the link group, obtain all assignments for the flag --- that have already been made for link group members, and check that they are --- equal. -verifyFlag' :: FN PN -> LinkGroup -> UpdateState () -verifyFlag' (FN pn fn) lg = do - vs <- get - let flags = map (\pp' -> FN (Q pp' pn) fn) (S.toList (lgMembers lg)) - vals = map (`M.lookup` vsFlags vs) flags - if allEqual (catMaybes vals) -- We ignore not-yet assigned flags - then return () - else conflict ( CS.fromList (map F flags) `CS.union` lgConflictSet lg - , "flag \"" ++ unFlagName fn ++ "\" incompatible" - ) - --- | Verify that all packages in the link group agree on stanza assignments --- --- For the given stanza and the link group, obtain all assignments for the --- stanza that have already been made for link group members, and check that --- they are equal. --- --- This function closely mirrors 'verifyFlag''. -verifyStanza' :: SN PN -> LinkGroup -> UpdateState () -verifyStanza' (SN pn sn) lg = do - vs <- get - let stanzas = map (\pp' -> SN (Q pp' pn) sn) (S.toList (lgMembers lg)) - vals = map (`M.lookup` vsStanzas vs) stanzas - if allEqual (catMaybes vals) -- We ignore not-yet assigned stanzas - then return () - else conflict ( CS.fromList (map S stanzas) `CS.union` lgConflictSet lg - , "stanza \"" ++ showStanza sn ++ "\" incompatible" - ) - -{------------------------------------------------------------------------------- - Link groups --------------------------------------------------------------------------------} - --- | Set of packages that must be linked together --- --- A LinkGroup is between several qualified package names. In the validation --- state, we maintain a map vsLinks from qualified package names to link groups. --- There is an invariant that for all members of a link group, vsLinks must map --- to the same link group. The function updateLinkGroup can be used to --- re-establish this invariant after creating or expanding a LinkGroup. -data LinkGroup = LinkGroup { - -- | The name of the package of this link group - lgPackage :: PN - - -- | The canonical member of this link group (the one where we picked - -- a concrete instance). Once we have picked a canonical member, all - -- other packages must link to this one. - -- - -- We may not know this yet (if we are constructing link groups - -- for dependencies) - , lgCanon :: Maybe (PI PackagePath) - - -- | The members of the link group - , lgMembers :: Set PackagePath - - -- | The set of variables that should be added to the conflict set if - -- something goes wrong with this link set (in addition to the members - -- of the link group itself) - , lgBlame :: ConflictSet - } - deriving (Show, Eq) - --- | Invariant for the set of link groups: every element in the link group --- must be pointing to the /same/ link group -lgInvariant :: Map QPN LinkGroup -> Bool -lgInvariant links = all invGroup (M.elems links) - where - invGroup :: LinkGroup -> Bool - invGroup lg = allEqual $ map (`M.lookup` links) members - where - members :: [QPN] - members = map (`Q` lgPackage lg) $ S.toList (lgMembers lg) - --- | Package version of this group --- --- This is only known once we have picked a canonical element. -lgInstance :: LinkGroup -> Maybe I -lgInstance = fmap (\(PI _ i) -> i) . lgCanon - -showLinkGroup :: LinkGroup -> String -showLinkGroup lg = - "{" ++ intercalate "," (map showMember (S.toList (lgMembers lg))) ++ "}" - where - showMember :: PackagePath -> String - showMember pp = case lgCanon lg of - Just (PI pp' _i) | pp == pp' -> "*" - _otherwise -> "" - ++ case lgInstance lg of - Nothing -> showQPN (qpn pp) - Just i -> showPI (PI (qpn pp) i) - - qpn :: PackagePath -> QPN - qpn pp = Q pp (lgPackage lg) - --- | Creates a link group that contains a single member. -lgSingleton :: QPN -> Maybe (PI PackagePath) -> LinkGroup -lgSingleton (Q pp pn) canon = LinkGroup { - lgPackage = pn - , lgCanon = canon - , lgMembers = S.singleton pp - , lgBlame = CS.empty - } - -lgMerge :: ConflictSet -> LinkGroup -> LinkGroup -> Either Conflict LinkGroup -lgMerge blame lg lg' = do - canon <- pick (lgCanon lg) (lgCanon lg') - return LinkGroup { - lgPackage = lgPackage lg - , lgCanon = canon - , lgMembers = lgMembers lg `S.union` lgMembers lg' - , lgBlame = CS.unions [blame, lgBlame lg, lgBlame lg'] - } - where - pick :: Eq a => Maybe a -> Maybe a -> Either Conflict (Maybe a) - pick Nothing Nothing = Right Nothing - pick (Just x) Nothing = Right $ Just x - pick Nothing (Just y) = Right $ Just y - pick (Just x) (Just y) = - if x == y then Right $ Just x - else Left ( CS.unions [ - blame - , lgConflictSet lg - , lgConflictSet lg' - ] - , "cannot merge " ++ showLinkGroup lg - ++ " and " ++ showLinkGroup lg' - ) - -lgConflictSet :: LinkGroup -> ConflictSet -lgConflictSet lg = - CS.fromList (map aux (S.toList (lgMembers lg))) - `CS.union` lgBlame lg - where - aux pp = P (Q pp (lgPackage lg)) - -{------------------------------------------------------------------------------- - Auxiliary --------------------------------------------------------------------------------} - -allEqual :: Eq a => [a] -> Bool -allEqual [] = True -allEqual [_] = True -allEqual (x:y:ys) = x == y && allEqual (y:ys) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Log.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Log.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Log.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Log.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,56 +0,0 @@ -module Distribution.Solver.Modular.Log - ( logToProgress - , SolverFailure(..) - ) where - -import Prelude () -import Distribution.Solver.Compat.Prelude - -import Distribution.Solver.Types.Progress - -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Message -import qualified Distribution.Solver.Modular.ConflictSet as CS -import Distribution.Solver.Modular.RetryLog -import Distribution.Verbosity - --- | Information about a dependency solver failure. -data SolverFailure = - ExhaustiveSearch ConflictSet ConflictMap - | BackjumpLimitReached - --- | Postprocesses a log file. When the dependency solver fails to find a --- solution, the log ends with a SolverFailure and a message describing the --- failure. This function discards all log messages and avoids calling --- 'showMessages' if the log isn't needed (specified by 'keepLog'), for --- efficiency. -logToProgress :: Bool - -> Verbosity - -> Maybe Int - -> RetryLog Message SolverFailure a - -> Progress String (SolverFailure, String) a -logToProgress keepLog verbosity mbj lg = - if keepLog - then showMessages progress - else foldProgress (const id) Fail Done progress - where - progress = - -- Convert the RetryLog to a Progress (with toProgress) as late as - -- possible, to take advantage of efficient updates at failures. - toProgress $ - mapFailure (\failure -> (failure, finalErrorMsg failure)) lg - - finalErrorMsg :: SolverFailure -> String - finalErrorMsg (ExhaustiveSearch cs cm) = - "After searching the rest of the dependency tree exhaustively, " - ++ "these were the goals I've had most trouble fulfilling: " - ++ showCS cm cs - where - showCS = if verbosity > normal - then CS.showCSWithFrequency - else CS.showCSSortedByFrequency - finalErrorMsg BackjumpLimitReached = - "Backjump limit reached (" ++ currlimit mbj ++ - "change with --max-backjumps or try to run with --reorder-goals).\n" - where currlimit (Just n) = "currently " ++ show n ++ ", " - currlimit Nothing = "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Message.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Message.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Message.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Message.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,154 +0,0 @@ -{-# LANGUAGE BangPatterns #-} - -module Distribution.Solver.Modular.Message ( - Message(..), - showMessages - ) where - -import qualified Data.List as L -import Prelude hiding (pi) - -import Distribution.Text -- from Cabal - -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Package -import Distribution.Solver.Modular.Tree - ( FailReason(..), POption(..), ConflictingDep(..) ) -import Distribution.Solver.Modular.Version -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.Progress -import Distribution.Types.UnqualComponentName - -data Message = - Enter -- ^ increase indentation level - | Leave -- ^ decrease indentation level - | TryP QPN POption - | TryF QFN Bool - | TryS QSN Bool - | Next (Goal QPN) - | Success - | Failure ConflictSet FailReason - --- | Transforms the structured message type to actual messages (strings). --- --- The log contains level numbers, which are useful for any trace that involves --- backtracking, because only the level numbers will allow to keep track of --- backjumps. -showMessages :: Progress Message a b -> Progress String a b -showMessages = go 0 - where - -- 'go' increments the level for a recursive call when it encounters - -- 'TryP', 'TryF', or 'TryS' and decrements the level when it encounters 'Leave'. - go :: Int -> Progress Message a b -> Progress String a b - go !_ (Done x) = Done x - go !_ (Fail x) = Fail x - -- complex patterns - go !l (Step (TryP qpn i) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = - goPReject l qpn [i] c fr ms - go !l (Step (TryF qfn b) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = - (atLevel l $ "rejecting: " ++ showQFNBool qfn b ++ showFR c fr) (go l ms) - go !l (Step (TryS qsn b) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = - (atLevel l $ "rejecting: " ++ showQSNBool qsn b ++ showFR c fr) (go l ms) - go !l (Step (Next (Goal (P _ ) gr)) (Step (TryP qpn' i) ms@(Step Enter (Step (Next _) _)))) = - (atLevel l $ "trying: " ++ showQPNPOpt qpn' i ++ showGR gr) (go l ms) - go !l (Step (Next (Goal (P qpn) gr)) ms@(Step (Failure _c Backjump) _)) = - (atLevel l $ "unknown package: " ++ showQPN qpn ++ showGR gr) $ go l ms - go !l (Step (Next (Goal (P qpn) gr)) (Step (Failure c fr) ms)) = - (atLevel l $ showPackageGoal qpn gr) $ (atLevel l $ showFailure c fr) (go l ms) - -- standard display - go !l (Step Enter ms) = go (l+1) ms - go !l (Step Leave ms) = go (l-1) ms - go !l (Step (TryP qpn i) ms) = (atLevel l $ "trying: " ++ showQPNPOpt qpn i) (go l ms) - go !l (Step (TryF qfn b) ms) = (atLevel l $ "trying: " ++ showQFNBool qfn b) (go l ms) - go !l (Step (TryS qsn b) ms) = (atLevel l $ "trying: " ++ showQSNBool qsn b) (go l ms) - go !l (Step (Next (Goal (P qpn) gr)) ms) = (atLevel l $ showPackageGoal qpn gr) (go l ms) - go !l (Step (Next _) ms) = go l ms -- ignore flag goals in the log - go !l (Step (Success) ms) = (atLevel l $ "done") (go l ms) - go !l (Step (Failure c fr) ms) = (atLevel l $ showFailure c fr) (go l ms) - - showPackageGoal :: QPN -> QGoalReason -> String - showPackageGoal qpn gr = "next goal: " ++ showQPN qpn ++ showGR gr - - showFailure :: ConflictSet -> FailReason -> String - showFailure c fr = "fail" ++ showFR c fr - - -- special handler for many subsequent package rejections - goPReject :: Int - -> QPN - -> [POption] - -> ConflictSet - -> FailReason - -> Progress Message a b - -> Progress String a b - goPReject l qpn is c fr (Step (TryP qpn' i) (Step Enter (Step (Failure _ fr') (Step Leave ms)))) - | qpn == qpn' && fr == fr' = goPReject l qpn (i : is) c fr ms - goPReject l qpn is c fr ms = - (atLevel l $ "rejecting: " ++ L.intercalate ", " (map (showQPNPOpt qpn) (reverse is)) ++ showFR c fr) (go l ms) - - -- write a message with the current level number - atLevel :: Int -> String -> Progress String a b -> Progress String a b - atLevel l x xs = - let s = show l - in Step ("[" ++ replicate (3 - length s) '_' ++ s ++ "] " ++ x) xs - -showQPNPOpt :: QPN -> POption -> String -showQPNPOpt qpn@(Q _pp pn) (POption i linkedTo) = - case linkedTo of - Nothing -> showPI (PI qpn i) -- Consistent with prior to POption - Just pp' -> showQPN qpn ++ "~>" ++ showPI (PI (Q pp' pn) i) - -showGR :: QGoalReason -> String -showGR UserGoal = " (user goal)" -showGR (DependencyGoal dr) = " (dependency of " ++ showDependencyReason dr ++ ")" - -showFR :: ConflictSet -> FailReason -> String -showFR _ (UnsupportedExtension ext) = " (conflict: requires " ++ display ext ++ ")" -showFR _ (UnsupportedLanguage lang) = " (conflict: requires " ++ display lang ++ ")" -showFR _ (MissingPkgconfigPackage pn vr) = " (conflict: pkg-config package " ++ display pn ++ display vr ++ ", not found in the pkg-config database)" -showFR _ (NewPackageDoesNotMatchExistingConstraint d) = " (conflict: " ++ showConflictingDep d ++ ")" -showFR _ (ConflictingConstraints d1 d2) = " (conflict: " ++ L.intercalate ", " (L.map showConflictingDep [d1, d2]) ++ ")" -showFR _ (NewPackageIsMissingRequiredComponent comp dr) = " (does not contain " ++ showExposedComponent comp ++ ", which is required by " ++ showDependencyReason dr ++ ")" -showFR _ (NewPackageHasUnbuildableRequiredComponent comp dr) = " (" ++ showExposedComponent comp ++ " is not buildable in the current environment, but it is required by " ++ showDependencyReason dr ++ ")" -showFR _ (PackageRequiresMissingComponent qpn comp) = " (requires " ++ showExposedComponent comp ++ " from " ++ showQPN qpn ++ ", but the component does not exist)" -showFR _ (PackageRequiresUnbuildableComponent qpn comp) = " (requires " ++ showExposedComponent comp ++ " from " ++ showQPN qpn ++ ", but the component is not buildable in the current environment)" -showFR _ CannotInstall = " (only already installed instances can be used)" -showFR _ CannotReinstall = " (avoiding to reinstall a package with same version but new dependencies)" -showFR _ Shadowed = " (shadowed by another installed package with same version)" -showFR _ Broken = " (package is broken)" -showFR _ (GlobalConstraintVersion vr src) = " (" ++ constraintSource src ++ " requires " ++ display vr ++ ")" -showFR _ (GlobalConstraintInstalled src) = " (" ++ constraintSource src ++ " requires installed instance)" -showFR _ (GlobalConstraintSource src) = " (" ++ constraintSource src ++ " requires source instance)" -showFR _ (GlobalConstraintFlag src) = " (" ++ constraintSource src ++ " requires opposite flag selection)" -showFR _ ManualFlag = " (manual flag can only be changed explicitly)" -showFR c Backjump = " (backjumping, conflict set: " ++ showConflictSet c ++ ")" -showFR _ MultipleInstances = " (multiple instances)" -showFR c (DependenciesNotLinked msg) = " (dependencies not linked: " ++ msg ++ "; conflict set: " ++ showConflictSet c ++ ")" -showFR c CyclicDependencies = " (cyclic dependencies; conflict set: " ++ showConflictSet c ++ ")" -showFR _ (UnsupportedSpecVer ver) = " (unsupported spec-version " ++ display ver ++ ")" --- The following are internal failures. They should not occur. In the --- interest of not crashing unnecessarily, we still just print an error --- message though. -showFR _ (MalformedFlagChoice qfn) = " (INTERNAL ERROR: MALFORMED FLAG CHOICE: " ++ showQFN qfn ++ ")" -showFR _ (MalformedStanzaChoice qsn) = " (INTERNAL ERROR: MALFORMED STANZA CHOICE: " ++ showQSN qsn ++ ")" -showFR _ EmptyGoalChoice = " (INTERNAL ERROR: EMPTY GOAL CHOICE)" - -showExposedComponent :: ExposedComponent -> String -showExposedComponent ExposedLib = "library" -showExposedComponent (ExposedExe name) = "executable '" ++ unUnqualComponentName name ++ "'" - -constraintSource :: ConstraintSource -> String -constraintSource src = "constraint from " ++ showConstraintSource src - -showConflictingDep :: ConflictingDep -> String -showConflictingDep (ConflictingDep dr (PkgComponent qpn comp) ci) = - let DependencyReason qpn' _ _ = dr - componentStr = case comp of - ExposedExe exe -> " (exe " ++ unUnqualComponentName exe ++ ")" - ExposedLib -> "" - in case ci of - Fixed i -> (if qpn /= qpn' then showDependencyReason dr ++ " => " else "") ++ - showQPN qpn ++ componentStr ++ "==" ++ showI i - Constrained vr -> showDependencyReason dr ++ " => " ++ showQPN qpn ++ - componentStr ++ showVR vr diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Package.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Package.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Package.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Package.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,107 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -module Distribution.Solver.Modular.Package - ( I(..) - , Loc(..) - , PackageId - , PackageIdentifier(..) - , PackageName, mkPackageName, unPackageName - , PkgconfigName, mkPkgconfigName, unPkgconfigName - , PI(..) - , PN - , QPV - , instI - , makeIndependent - , primaryPP - , setupPP - , showI - , showPI - , unPN - ) where - -import Data.List as L - -import Distribution.Package -- from Cabal -import Distribution.Text (display) - -import Distribution.Solver.Modular.Version -import Distribution.Solver.Types.PackagePath - --- | A package name. -type PN = PackageName - --- | Unpacking a package name. -unPN :: PN -> String -unPN = unPackageName - --- | Package version. A package name plus a version number. -type PV = PackageId - --- | Qualified package version. -type QPV = Qualified PV - --- | Package id. Currently just a black-box string. -type PId = UnitId - --- | Location. Info about whether a package is installed or not, and where --- exactly it is located. For installed packages, uniquely identifies the --- package instance via its 'PId'. --- --- TODO: More information is needed about the repo. -data Loc = Inst PId | InRepo - deriving (Eq, Ord, Show) - --- | Instance. A version number and a location. -data I = I Ver Loc - deriving (Eq, Ord, Show) - --- | String representation of an instance. -showI :: I -> String -showI (I v InRepo) = showVer v -showI (I v (Inst uid)) = showVer v ++ "/installed" ++ shortId uid - where - -- A hack to extract the beginning of the package ABI hash - shortId = snip (splitAt 4) (++ "...") - . snip ((\ (x, y) -> (reverse x, y)) . break (=='-') . reverse) ('-':) - . display - snip p f xs = case p xs of - (ys, zs) -> (if L.null zs then id else f) ys - --- | Package instance. A package name and an instance. -data PI qpn = PI qpn I - deriving (Eq, Ord, Show, Functor) - --- | String representation of a package instance. -showPI :: PI QPN -> String -showPI (PI qpn i) = showQPN qpn ++ "-" ++ showI i - -instI :: I -> Bool -instI (I _ (Inst _)) = True -instI _ = False - --- | Is the package in the primary group of packages. This is used to --- determine (1) if we should try to establish stanza preferences --- for this goal, and (2) whether or not a user specified @--constraint@ --- should apply to this dependency (grep 'primaryPP' to see the --- use sites). In particular this does not include packages pulled in --- as setup deps. --- -primaryPP :: PackagePath -> Bool -primaryPP (PackagePath _ns q) = go q - where - go QualToplevel = True - go (QualBase _) = True - go (QualSetup _) = False - go (QualExe _ _) = False - --- | Is the package a dependency of a setup script. This is used to --- establish whether or not certain constraints should apply to this --- dependency (grep 'setupPP' to see the use sites). --- -setupPP :: PackagePath -> Bool -setupPP (PackagePath _ns (QualSetup _)) = True -setupPP (PackagePath _ns _) = False - --- | Qualify a target package with its own name so that its dependencies are not --- required to be consistent with other targets. -makeIndependent :: PN -> QPN -makeIndependent pn = Q (PackagePath (Independent pn) QualToplevel) pn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Preference.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Preference.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Preference.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Preference.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,471 +0,0 @@ -{-# LANGUAGE ScopedTypeVariables #-} --- | Reordering or pruning the tree in order to prefer or make certain choices. -module Distribution.Solver.Modular.Preference - ( avoidReinstalls - , deferSetupChoices - , deferWeakFlagChoices - , enforceManualFlags - , enforcePackageConstraints - , enforceSingleInstanceRestriction - , firstGoal - , preferBaseGoalChoice - , preferLinked - , preferPackagePreferences - , preferReallyEasyGoalChoices - , requireInstalled - , sortGoals - , pruneAfterFirstSuccess - ) where - -import Prelude () -import Distribution.Solver.Compat.Prelude - -import Data.Function (on) -import qualified Data.List as L -import qualified Data.Map as M -import Control.Monad.Reader hiding (sequence) -import Data.Traversable (sequence) - -import Distribution.PackageDescription (lookupFlagAssignment, unFlagAssignment) -- from Cabal - -import Distribution.Solver.Types.Flag -import Distribution.Solver.Types.InstalledPreference -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackageConstraint -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.PackagePreferences -import Distribution.Solver.Types.Variable - -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Package -import qualified Distribution.Solver.Modular.PSQ as P -import Distribution.Solver.Modular.Tree -import Distribution.Solver.Modular.Version -import qualified Distribution.Solver.Modular.ConflictSet as CS -import qualified Distribution.Solver.Modular.WeightedPSQ as W - --- | Update the weights of children under 'PChoice' nodes. 'addWeights' takes a --- list of weight-calculating functions in order to avoid sorting the package --- choices multiple times. Each function takes the package name, sorted list of --- children's versions, and package option. 'addWeights' prepends the new --- weights to the existing weights, which gives precedence to preferences that --- are applied later. -addWeights :: [PN -> [Ver] -> POption -> Weight] -> Tree d c -> Tree d c -addWeights fs = trav go - where - go :: TreeF d c (Tree d c) -> TreeF d c (Tree d c) - go (PChoiceF qpn@(Q _ pn) rdm x cs) = - let sortedVersions = L.sortBy (flip compare) $ L.map version (W.keys cs) - weights k = [f pn sortedVersions k | f <- fs] - - elemsToWhnf :: [a] -> () - elemsToWhnf = foldr seq () - in PChoiceF qpn rdm x - -- Evaluate the children's versions before evaluating any of the - -- subtrees, so that 'sortedVersions' doesn't hold onto all of the - -- subtrees (referenced by cs) and cause a space leak. - (elemsToWhnf sortedVersions `seq` - W.mapWeightsWithKey (\k w -> weights k ++ w) cs) - go x = x - -addWeight :: (PN -> [Ver] -> POption -> Weight) -> Tree d c -> Tree d c -addWeight f = addWeights [f] - -version :: POption -> Ver -version (POption (I v _) _) = v - --- | Prefer to link packages whenever possible. -preferLinked :: Tree d c -> Tree d c -preferLinked = addWeight (const (const linked)) - where - linked (POption _ Nothing) = 1 - linked (POption _ (Just _)) = 0 - --- Works by setting weights on choice nodes. Also applies stanza preferences. -preferPackagePreferences :: (PN -> PackagePreferences) -> Tree d c -> Tree d c -preferPackagePreferences pcs = - preferPackageStanzaPreferences pcs . - addWeights [ - \pn _ opt -> preferred pn opt - - -- Note that we always rank installed before uninstalled, and later - -- versions before earlier, but we can change the priority of the - -- two orderings. - , \pn vs opt -> case preference pn of - PreferInstalled -> installed opt - PreferLatest -> latest vs opt - , \pn vs opt -> case preference pn of - PreferInstalled -> latest vs opt - PreferLatest -> installed opt - ] - where - -- Prefer packages with higher version numbers over packages with - -- lower version numbers. - latest :: [Ver] -> POption -> Weight - latest sortedVersions opt = - let l = length sortedVersions - index = fromMaybe l $ L.findIndex (<= version opt) sortedVersions - in fromIntegral index / fromIntegral l - - preference :: PN -> InstalledPreference - preference pn = - let PackagePreferences _ ipref _ = pcs pn - in ipref - - -- | Prefer versions satisfying more preferred version ranges. - preferred :: PN -> POption -> Weight - preferred pn opt = - let PackagePreferences vrs _ _ = pcs pn - in fromIntegral . negate . L.length $ - L.filter (flip checkVR (version opt)) vrs - - -- Prefer installed packages over non-installed packages. - installed :: POption -> Weight - installed (POption (I _ (Inst _)) _) = 0 - installed _ = 1 - --- | Traversal that tries to establish package stanza enable\/disable --- preferences. Works by reordering the branches of stanza choices. -preferPackageStanzaPreferences :: (PN -> PackagePreferences) -> Tree d c -> Tree d c -preferPackageStanzaPreferences pcs = trav go - where - go (SChoiceF qsn@(SN (Q pp pn) s) rdm gr _tr ts) - | primaryPP pp && enableStanzaPref pn s = - -- move True case first to try enabling the stanza - let ts' = W.mapWeightsWithKey (\k w -> weight k : w) ts - weight k = if k then 0 else 1 - -- defer the choice by setting it to weak - in SChoiceF qsn rdm gr (WeakOrTrivial True) ts' - go x = x - - enableStanzaPref :: PN -> OptionalStanza -> Bool - enableStanzaPref pn s = - let PackagePreferences _ _ spref = pcs pn - in s `elem` spref - --- | Helper function that tries to enforce a single package constraint on a --- given instance for a P-node. Translates the constraint into a --- tree-transformer that either leaves the subtree untouched, or replaces it --- with an appropriate failure node. -processPackageConstraintP :: forall d c. QPN - -> ConflictSet - -> I - -> LabeledPackageConstraint - -> Tree d c - -> Tree d c -processPackageConstraintP qpn c i (LabeledPackageConstraint (PackageConstraint scope prop) src) r = - if constraintScopeMatches scope qpn - then go i prop - else r - where - go :: I -> PackageProperty -> Tree d c - go (I v _) (PackagePropertyVersion vr) - | checkVR vr v = r - | otherwise = Fail c (GlobalConstraintVersion vr src) - go _ PackagePropertyInstalled - | instI i = r - | otherwise = Fail c (GlobalConstraintInstalled src) - go _ PackagePropertySource - | not (instI i) = r - | otherwise = Fail c (GlobalConstraintSource src) - go _ _ = r - --- | Helper function that tries to enforce a single package constraint on a --- given flag setting for an F-node. Translates the constraint into a --- tree-transformer that either leaves the subtree untouched, or replaces it --- with an appropriate failure node. -processPackageConstraintF :: forall d c. QPN - -> Flag - -> ConflictSet - -> Bool - -> LabeledPackageConstraint - -> Tree d c - -> Tree d c -processPackageConstraintF qpn f c b' (LabeledPackageConstraint (PackageConstraint scope prop) src) r = - if constraintScopeMatches scope qpn - then go prop - else r - where - go :: PackageProperty -> Tree d c - go (PackagePropertyFlags fa) = - case lookupFlagAssignment f fa of - Nothing -> r - Just b | b == b' -> r - | otherwise -> Fail c (GlobalConstraintFlag src) - go _ = r - --- | Helper function that tries to enforce a single package constraint on a --- given flag setting for an F-node. Translates the constraint into a --- tree-transformer that either leaves the subtree untouched, or replaces it --- with an appropriate failure node. -processPackageConstraintS :: forall d c. QPN - -> OptionalStanza - -> ConflictSet - -> Bool - -> LabeledPackageConstraint - -> Tree d c - -> Tree d c -processPackageConstraintS qpn s c b' (LabeledPackageConstraint (PackageConstraint scope prop) src) r = - if constraintScopeMatches scope qpn - then go prop - else r - where - go :: PackageProperty -> Tree d c - go (PackagePropertyStanzas ss) = - if not b' && s `elem` ss then Fail c (GlobalConstraintFlag src) - else r - go _ = r - --- | Traversal that tries to establish various kinds of user constraints. Works --- by selectively disabling choices that have been ruled out by global user --- constraints. -enforcePackageConstraints :: M.Map PN [LabeledPackageConstraint] - -> Tree d c - -> Tree d c -enforcePackageConstraints pcs = trav go - where - go (PChoiceF qpn@(Q _ pn) rdm gr ts) = - let c = varToConflictSet (P qpn) - -- compose the transformation functions for each of the relevant constraint - g = \ (POption i _) -> foldl (\ h pc -> h . processPackageConstraintP qpn c i pc) - id - (M.findWithDefault [] pn pcs) - in PChoiceF qpn rdm gr (W.mapWithKey g ts) - go (FChoiceF qfn@(FN qpn@(Q _ pn) f) rdm gr tr m d ts) = - let c = varToConflictSet (F qfn) - -- compose the transformation functions for each of the relevant constraint - g = \ b -> foldl (\ h pc -> h . processPackageConstraintF qpn f c b pc) - id - (M.findWithDefault [] pn pcs) - in FChoiceF qfn rdm gr tr m d (W.mapWithKey g ts) - go (SChoiceF qsn@(SN qpn@(Q _ pn) f) rdm gr tr ts) = - let c = varToConflictSet (S qsn) - -- compose the transformation functions for each of the relevant constraint - g = \ b -> foldl (\ h pc -> h . processPackageConstraintS qpn f c b pc) - id - (M.findWithDefault [] pn pcs) - in SChoiceF qsn rdm gr tr (W.mapWithKey g ts) - go x = x - --- | Transformation that tries to enforce the rule that manual flags can only be --- set by the user. --- --- If there are no constraints on a manual flag, this function prunes all but --- the default value. If there are constraints, then the flag is allowed to have --- the values specified by the constraints. Note that the type used for flag --- values doesn't need to be Bool. --- --- This function makes an exception for the case where there are multiple goals --- for a single package (with different qualifiers), and flag constraints for --- manual flag x only apply to some of those goals. In that case, we allow the --- unconstrained goals to use the default value for x OR any of the values in --- the constraints on x (even though the constraints don't apply), in order to --- allow the unconstrained goals to be linked to the constrained goals. See --- https://github.com/haskell/cabal/issues/4299. Removing the single instance --- restriction (SIR) would also fix #4299, so we may want to remove this --- exception and only let the user toggle manual flags if we remove the SIR. --- --- This function does not enforce any of the constraints, since that is done by --- 'enforcePackageConstraints'. -enforceManualFlags :: M.Map PN [LabeledPackageConstraint] -> Tree d c -> Tree d c -enforceManualFlags pcs = trav go - where - go (FChoiceF qfn@(FN (Q _ pn) fn) rdm gr tr Manual d ts) = - FChoiceF qfn rdm gr tr Manual d $ - let -- A list of all values specified by constraints on 'fn'. - -- We ignore the constraint scope in order to handle issue #4299. - flagConstraintValues :: [Bool] - flagConstraintValues = - [ flagVal - | let lpcs = M.findWithDefault [] pn pcs - , (LabeledPackageConstraint (PackageConstraint _ (PackagePropertyFlags fa)) _) <- lpcs - , (fn', flagVal) <- unFlagAssignment fa - , fn' == fn ] - - -- Prune flag values that are not the default and do not match any - -- of the constraints. - restrictToggling :: Eq a => a -> [a] -> a -> Tree d c -> Tree d c - restrictToggling flagDefault constraintVals flagVal r = - if flagVal `elem` constraintVals || flagVal == flagDefault - then r - else Fail (varToConflictSet (F qfn)) ManualFlag - - in W.mapWithKey (restrictToggling d flagConstraintValues) ts - go x = x - --- | Require installed packages. -requireInstalled :: (PN -> Bool) -> Tree d c -> Tree d c -requireInstalled p = trav go - where - go (PChoiceF v@(Q _ pn) rdm gr cs) - | p pn = PChoiceF v rdm gr (W.mapWithKey installed cs) - | otherwise = PChoiceF v rdm gr cs - where - installed (POption (I _ (Inst _)) _) x = x - installed _ _ = Fail (varToConflictSet (P v)) CannotInstall - go x = x - --- | Avoid reinstalls. --- --- This is a tricky strategy. If a package version is installed already and the --- same version is available from a repo, the repo version will never be chosen. --- This would result in a reinstall (either destructively, or potentially, --- shadowing). The old instance won't be visible or even present anymore, but --- other packages might have depended on it. --- --- TODO: It would be better to actually check the reverse dependencies of installed --- packages. If they're not depended on, then reinstalling should be fine. Even if --- they are, perhaps this should just result in trying to reinstall those other --- packages as well. However, doing this all neatly in one pass would require to --- change the builder, or at least to change the goal set after building. -avoidReinstalls :: (PN -> Bool) -> Tree d c -> Tree d c -avoidReinstalls p = trav go - where - go (PChoiceF qpn@(Q _ pn) rdm gr cs) - | p pn = PChoiceF qpn rdm gr disableReinstalls - | otherwise = PChoiceF qpn rdm gr cs - where - disableReinstalls = - let installed = [ v | (_, POption (I v (Inst _)) _, _) <- W.toList cs ] - in W.mapWithKey (notReinstall installed) cs - - notReinstall vs (POption (I v InRepo) _) _ | v `elem` vs = - Fail (varToConflictSet (P qpn)) CannotReinstall - notReinstall _ _ x = - x - go x = x - --- | Sort all goals using the provided function. -sortGoals :: (Variable QPN -> Variable QPN -> Ordering) -> Tree d c -> Tree d c -sortGoals variableOrder = trav go - where - go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.sortByKeys goalOrder xs) - go x = x - - goalOrder :: Goal QPN -> Goal QPN -> Ordering - goalOrder = variableOrder `on` (varToVariable . goalToVar) - - varToVariable :: Var QPN -> Variable QPN - varToVariable (P qpn) = PackageVar qpn - varToVariable (F (FN qpn fn)) = FlagVar qpn fn - varToVariable (S (SN qpn stanza)) = StanzaVar qpn stanza - --- | Reduce the branching degree of the search tree by removing all choices --- after the first successful choice at each level. The returned tree is the --- minimal subtree containing the path to the first backjump. -pruneAfterFirstSuccess :: Tree d c -> Tree d c -pruneAfterFirstSuccess = trav go - where - go (PChoiceF qpn rdm gr ts) = PChoiceF qpn rdm gr (W.takeUntil active ts) - go (FChoiceF qfn rdm gr w m d ts) = FChoiceF qfn rdm gr w m d (W.takeUntil active ts) - go (SChoiceF qsn rdm gr w ts) = SChoiceF qsn rdm gr w (W.takeUntil active ts) - go x = x - --- | Always choose the first goal in the list next, abandoning all --- other choices. --- --- This is unnecessary for the default search strategy, because --- it descends only into the first goal choice anyway, --- but may still make sense to just reduce the tree size a bit. -firstGoal :: Tree d c -> Tree d c -firstGoal = trav go - where - go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.firstOnly xs) - go x = x - -- Note that we keep empty choice nodes, because they mean success. - --- | Transformation that tries to make a decision on base as early as --- possible by pruning all other goals when base is available. In nearly --- all cases, there's a single choice for the base package. Also, fixing --- base early should lead to better error messages. -preferBaseGoalChoice :: Tree d c -> Tree d c -preferBaseGoalChoice = trav go - where - go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.filterIfAnyByKeys isBase xs) - go x = x - - isBase :: Goal QPN -> Bool - isBase (Goal (P (Q _pp pn)) _) = unPN pn == "base" - isBase _ = False - --- | Deal with setup dependencies after regular dependencies, so that we can --- will link setup dependencies against package dependencies when possible -deferSetupChoices :: Tree d c -> Tree d c -deferSetupChoices = trav go - where - go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.preferByKeys noSetup xs) - go x = x - - noSetup :: Goal QPN -> Bool - noSetup (Goal (P (Q (PackagePath _ns (QualSetup _)) _)) _) = False - noSetup _ = True - --- | Transformation that tries to avoid making weak flag choices early. --- Weak flags are trivial flags (not influencing dependencies) or such --- flags that are explicitly declared to be weak in the index. -deferWeakFlagChoices :: Tree d c -> Tree d c -deferWeakFlagChoices = trav go - where - go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.prefer noWeakFlag (P.prefer noWeakStanza xs)) - go x = x - - noWeakStanza :: Tree d c -> Bool - noWeakStanza (SChoice _ _ _ (WeakOrTrivial True) _) = False - noWeakStanza _ = True - - noWeakFlag :: Tree d c -> Bool - noWeakFlag (FChoice _ _ _ (WeakOrTrivial True) _ _ _) = False - noWeakFlag _ = True - --- | Transformation that prefers goals with lower branching degrees. --- --- When a goal choice node has at least one goal with zero or one children, this --- function prunes all other goals. This transformation can help the solver find --- a solution in fewer steps by allowing it to backtrack sooner when it is --- exploring a subtree with no solutions. However, each step is more expensive. -preferReallyEasyGoalChoices :: Tree d c -> Tree d c -preferReallyEasyGoalChoices = trav go - where - go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.filterIfAny zeroOrOneChoices xs) - go x = x - --- | Monad used internally in enforceSingleInstanceRestriction --- --- For each package instance we record the goal for which we picked a concrete --- instance. The SIR means that for any package instance there can only be one. -type EnforceSIR = Reader (Map (PI PN) QPN) - --- | Enforce ghc's single instance restriction --- --- From the solver's perspective, this means that for any package instance --- (that is, package name + package version) there can be at most one qualified --- goal resolving to that instance (there may be other goals _linking_ to that --- instance however). -enforceSingleInstanceRestriction :: Tree d c -> Tree d c -enforceSingleInstanceRestriction = (`runReader` M.empty) . cata go - where - go :: TreeF d c (EnforceSIR (Tree d c)) -> EnforceSIR (Tree d c) - - -- We just verify package choices. - go (PChoiceF qpn rdm gr cs) = - PChoice qpn rdm gr <$> sequence (W.mapWithKey (goP qpn) cs) - go _otherwise = - innM _otherwise - - -- The check proper - goP :: QPN -> POption -> EnforceSIR (Tree d c) -> EnforceSIR (Tree d c) - goP qpn@(Q _ pn) (POption i linkedTo) r = do - let inst = PI pn i - env <- ask - case (linkedTo, M.lookup inst env) of - (Just _, _) -> - -- For linked nodes we don't check anything - r - (Nothing, Nothing) -> - -- Not linked, not already used - local (M.insert inst qpn) r - (Nothing, Just qpn') -> do - -- Not linked, already used. This is an error - return $ Fail (CS.union (varToConflictSet (P qpn)) (varToConflictSet (P qpn'))) MultipleInstances diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/PSQ.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/PSQ.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/PSQ.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/PSQ.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,149 +0,0 @@ -{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} -module Distribution.Solver.Modular.PSQ - ( PSQ(..) -- Unit test needs constructor access - , casePSQ - , cons - , length - , lookup - , filter - , filterIfAny - , filterIfAnyByKeys - , filterKeys - , firstOnly - , fromList - , isZeroOrOne - , keys - , map - , mapKeys - , mapWithKey - , maximumBy - , minimumBy - , null - , prefer - , preferByKeys - , snoc - , sortBy - , sortByKeys - , toList - , union - ) where - --- Priority search queues. --- --- I am not yet sure what exactly is needed. But we need a data structure with --- key-based lookup that can be sorted. We're using a sequence right now with --- (inefficiently implemented) lookup, because I think that queue-based --- operations and sorting turn out to be more efficiency-critical in practice. - -import Control.Arrow (first, second) - -import qualified Data.Foldable as F -import Data.Function -import qualified Data.List as S -import Data.Ord (comparing) -import Data.Traversable -import Prelude hiding (foldr, length, lookup, filter, null, map) - -newtype PSQ k v = PSQ [(k, v)] - deriving (Eq, Show, Functor, F.Foldable, Traversable) -- Qualified Foldable to avoid issues with FTP - -keys :: PSQ k v -> [k] -keys (PSQ xs) = fmap fst xs - -lookup :: Eq k => k -> PSQ k v -> Maybe v -lookup k (PSQ xs) = S.lookup k xs - -map :: (v1 -> v2) -> PSQ k v1 -> PSQ k v2 -map f (PSQ xs) = PSQ (fmap (second f) xs) - -mapKeys :: (k1 -> k2) -> PSQ k1 v -> PSQ k2 v -mapKeys f (PSQ xs) = PSQ (fmap (first f) xs) - -mapWithKey :: (k -> a -> b) -> PSQ k a -> PSQ k b -mapWithKey f (PSQ xs) = PSQ (fmap (\ (k, v) -> (k, f k v)) xs) - -fromList :: [(k, a)] -> PSQ k a -fromList = PSQ - -cons :: k -> a -> PSQ k a -> PSQ k a -cons k x (PSQ xs) = PSQ ((k, x) : xs) - -snoc :: PSQ k a -> k -> a -> PSQ k a -snoc (PSQ xs) k x = PSQ (xs ++ [(k, x)]) - -casePSQ :: PSQ k a -> r -> (k -> a -> PSQ k a -> r) -> r -casePSQ (PSQ xs) n c = - case xs of - [] -> n - (k, v) : ys -> c k v (PSQ ys) - -sortBy :: (a -> a -> Ordering) -> PSQ k a -> PSQ k a -sortBy cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` snd) xs) - -sortByKeys :: (k -> k -> Ordering) -> PSQ k a -> PSQ k a -sortByKeys cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` fst) xs) - -maximumBy :: (k -> Int) -> PSQ k a -> (k, a) -maximumBy sel (PSQ xs) = - S.minimumBy (flip (comparing (sel . fst))) xs - -minimumBy :: (a -> Int) -> PSQ k a -> PSQ k a -minimumBy sel (PSQ xs) = - PSQ [snd (S.minimumBy (comparing fst) (S.map (\ x -> (sel (snd x), x)) xs))] - --- | Sort the list so that values satisfying the predicate are first. -prefer :: (a -> Bool) -> PSQ k a -> PSQ k a -prefer p = sortBy $ flip (comparing p) - --- | Sort the list so that keys satisfying the predicate are first. -preferByKeys :: (k -> Bool) -> PSQ k a -> PSQ k a -preferByKeys p = sortByKeys $ flip (comparing p) - --- | Will partition the list according to the predicate. If --- there is any element that satisfies the precidate, then only --- the elements satisfying the predicate are returned. --- Otherwise, the rest is returned. --- -filterIfAny :: (a -> Bool) -> PSQ k a -> PSQ k a -filterIfAny p (PSQ xs) = - let - (pro, con) = S.partition (p . snd) xs - in - if S.null pro then PSQ con else PSQ pro - --- | Variant of 'filterIfAny' that takes a predicate on the keys --- rather than on the values. --- -filterIfAnyByKeys :: (k -> Bool) -> PSQ k a -> PSQ k a -filterIfAnyByKeys p (PSQ xs) = - let - (pro, con) = S.partition (p . fst) xs - in - if S.null pro then PSQ con else PSQ pro - -filterKeys :: (k -> Bool) -> PSQ k a -> PSQ k a -filterKeys p (PSQ xs) = PSQ (S.filter (p . fst) xs) - -filter :: (a -> Bool) -> PSQ k a -> PSQ k a -filter p (PSQ xs) = PSQ (S.filter (p . snd) xs) - -length :: PSQ k a -> Int -length (PSQ xs) = S.length xs - -null :: PSQ k a -> Bool -null (PSQ xs) = S.null xs - -isZeroOrOne :: PSQ k a -> Bool -isZeroOrOne (PSQ []) = True -isZeroOrOne (PSQ [_]) = True -isZeroOrOne _ = False - -firstOnly :: PSQ k a -> PSQ k a -firstOnly (PSQ []) = PSQ [] -firstOnly (PSQ (x : _)) = PSQ [x] - -toList :: PSQ k a -> [(k, a)] -toList (PSQ xs) = xs - -union :: PSQ k a -> PSQ k a -> PSQ k a -union (PSQ xs) (PSQ ys) = PSQ (xs ++ ys) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/RetryLog.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/RetryLog.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/RetryLog.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/RetryLog.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ -{-# LANGUAGE Rank2Types #-} -module Distribution.Solver.Modular.RetryLog - ( RetryLog - , toProgress - , fromProgress - , mapFailure - , retry - , failWith - , succeedWith - , continueWith - , tryWith - ) where - -import Distribution.Solver.Modular.Message -import Distribution.Solver.Types.Progress - --- | 'Progress' as a difference list that allows efficient appends at failures. -newtype RetryLog step fail done = RetryLog { - unRetryLog :: forall fail2 . (fail -> Progress step fail2 done) - -> Progress step fail2 done - } - --- | /O(1)/. Convert a 'RetryLog' to a 'Progress'. -toProgress :: RetryLog step fail done -> Progress step fail done -toProgress (RetryLog f) = f Fail - --- | /O(N)/. Convert a 'Progress' to a 'RetryLog'. -fromProgress :: Progress step fail done -> RetryLog step fail done -fromProgress l = RetryLog $ \f -> go f l - where - go :: (fail1 -> Progress step fail2 done) - -> Progress step fail1 done - -> Progress step fail2 done - go _ (Done d) = Done d - go f (Fail failure) = f failure - go f (Step m ms) = Step m (go f ms) - --- | /O(1)/. Apply a function to the failure value in a log. -mapFailure :: (fail1 -> fail2) - -> RetryLog step fail1 done - -> RetryLog step fail2 done -mapFailure f l = retry l $ \failure -> RetryLog $ \g -> g (f failure) - --- | /O(1)/. If the first log leads to failure, continue with the second. -retry :: RetryLog step fail1 done - -> (fail1 -> RetryLog step fail2 done) - -> RetryLog step fail2 done -retry (RetryLog f) g = - RetryLog $ \extendLog -> f $ \failure -> unRetryLog (g failure) extendLog - --- | /O(1)/. Create a log with one message before a failure. -failWith :: step -> fail -> RetryLog step fail done -failWith m failure = RetryLog $ \f -> Step m (f failure) - --- | /O(1)/. Create a log with one message before a success. -succeedWith :: step -> done -> RetryLog step fail done -succeedWith m d = RetryLog $ const $ Step m (Done d) - --- | /O(1)/. Prepend a message to a log. -continueWith :: step - -> RetryLog step fail done - -> RetryLog step fail done -continueWith m (RetryLog f) = RetryLog $ Step m . f - --- | /O(1)/. Prepend the given message and 'Enter' to the log, and insert --- 'Leave' before the failure if the log fails. -tryWith :: Message -> RetryLog Message fail done -> RetryLog Message fail done -tryWith m f = - RetryLog $ Step m . Step Enter . unRetryLog (retry f (failWith Leave)) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Solver.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Solver.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Solver.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Solver.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,247 +0,0 @@ -{-# LANGUAGE CPP #-} -#ifdef DEBUG_TRACETREE -{-# LANGUAGE FlexibleInstances #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} -#endif -module Distribution.Solver.Modular.Solver - ( SolverConfig(..) - , solve - , PruneAfterFirstSuccess(..) - ) where - -import Data.Map as M -import Data.List as L -import Data.Set as S -import Distribution.Verbosity - -import Distribution.Compiler (CompilerInfo) - -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.PackagePreferences -import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb) -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.Settings -import Distribution.Solver.Types.Variable - -import Distribution.Solver.Modular.Assignment -import Distribution.Solver.Modular.Builder -import Distribution.Solver.Modular.Cycles -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Explore -import Distribution.Solver.Modular.Index -import Distribution.Solver.Modular.Log -import Distribution.Solver.Modular.Message -import Distribution.Solver.Modular.Package -import qualified Distribution.Solver.Modular.Preference as P -import Distribution.Solver.Modular.Validate -import Distribution.Solver.Modular.Linking -import Distribution.Solver.Modular.PSQ (PSQ) -import Distribution.Solver.Modular.RetryLog -import Distribution.Solver.Modular.Tree -import qualified Distribution.Solver.Modular.PSQ as PSQ - -import Distribution.Simple.Setup (BooleanFlag(..)) - -#ifdef DEBUG_TRACETREE -import qualified Distribution.Solver.Modular.ConflictSet as CS -import qualified Distribution.Solver.Modular.WeightedPSQ as W -import qualified Distribution.Text as T - -import Debug.Trace.Tree (gtraceJson) -import Debug.Trace.Tree.Simple -import Debug.Trace.Tree.Generic -import Debug.Trace.Tree.Assoc (Assoc(..)) -#endif - --- | Various options for the modular solver. -data SolverConfig = SolverConfig { - reorderGoals :: ReorderGoals, - countConflicts :: CountConflicts, - independentGoals :: IndependentGoals, - avoidReinstalls :: AvoidReinstalls, - shadowPkgs :: ShadowPkgs, - strongFlags :: StrongFlags, - allowBootLibInstalls :: AllowBootLibInstalls, - maxBackjumps :: Maybe Int, - enableBackjumping :: EnableBackjumping, - solveExecutables :: SolveExecutables, - goalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering), - solverVerbosity :: Verbosity, - pruneAfterFirstSuccess :: PruneAfterFirstSuccess -} - --- | Whether to remove all choices after the first successful choice at each --- level in the search tree. -newtype PruneAfterFirstSuccess = PruneAfterFirstSuccess Bool - --- | Run all solver phases. --- --- In principle, we have a valid tree after 'validationPhase', which --- means that every 'Done' node should correspond to valid solution. --- --- There is one exception, though, and that is cycle detection, which --- has been added relatively recently. Cycles are only removed directly --- before exploration. --- -solve :: SolverConfig -- ^ solver parameters - -> CompilerInfo - -> Index -- ^ all available packages as an index - -> PkgConfigDb -- ^ available pkg-config pkgs - -> (PN -> PackagePreferences) -- ^ preferences - -> Map PN [LabeledPackageConstraint] -- ^ global constraints - -> Set PN -- ^ global goals - -> RetryLog Message SolverFailure (Assignment, RevDepMap) -solve sc cinfo idx pkgConfigDB userPrefs userConstraints userGoals = - explorePhase $ - detectCycles $ - heuristicsPhase $ - preferencesPhase $ - validationPhase $ - prunePhase $ - buildPhase - where - explorePhase = backjumpAndExplore (maxBackjumps sc) - (enableBackjumping sc) - (countConflicts sc) - detectCycles = traceTree "cycles.json" id . detectCyclesPhase - heuristicsPhase = - let heuristicsTree = traceTree "heuristics.json" id - sortGoals = case goalOrder sc of - Nothing -> goalChoiceHeuristics . - heuristicsTree . - P.deferSetupChoices . - P.deferWeakFlagChoices . - P.preferBaseGoalChoice - Just order -> P.firstGoal . - heuristicsTree . - P.sortGoals order - PruneAfterFirstSuccess prune = pruneAfterFirstSuccess sc - in sortGoals . - (if prune then P.pruneAfterFirstSuccess else id) - preferencesPhase = P.preferLinked . - P.preferPackagePreferences userPrefs - validationPhase = traceTree "validated.json" id . - P.enforcePackageConstraints userConstraints . - P.enforceManualFlags userConstraints . - P.enforceSingleInstanceRestriction . - validateLinking idx . - validateTree cinfo idx pkgConfigDB - prunePhase = (if asBool (avoidReinstalls sc) then P.avoidReinstalls (const True) else id) . - (if asBool (allowBootLibInstalls sc) - then id - else P.requireInstalled (`elem` nonInstallable)) - buildPhase = traceTree "build.json" id - $ buildTree idx (independentGoals sc) (S.toList userGoals) - - -- packages that can never be installed or upgraded - -- If you change this enumeration, make sure to update the list in - -- "Distribution.Client.Dependency" as well - nonInstallable :: [PackageName] - nonInstallable = - L.map mkPackageName - [ "base" - , "ghc-prim" - , "integer-gmp" - , "integer-simple" - , "template-haskell" - ] - - -- When --reorder-goals is set, we use preferReallyEasyGoalChoices, which - -- prefers (keeps) goals only if the have 0 or 1 enabled choice. - -- - -- In the past, we furthermore used P.firstGoal to trim down the goal choice nodes - -- to just a single option. This was a way to work around a space leak that was - -- unnecessary and is now fixed, so we no longer do it. - -- - -- If --count-conflicts is active, it will then choose among the remaining goals - -- the one that has been responsible for the most conflicts so far. - -- - -- Otherwise, we simply choose the first remaining goal. - -- - goalChoiceHeuristics - | asBool (reorderGoals sc) = P.preferReallyEasyGoalChoices - | otherwise = id {- P.firstGoal -} - --- | Dump solver tree to a file (in debugging mode) --- --- This only does something if the @debug-tracetree@ configure argument was --- given; otherwise this is just the identity function. -traceTree :: -#ifdef DEBUG_TRACETREE - GSimpleTree a => -#endif - FilePath -- ^ Output file - -> (a -> a) -- ^ Function to summarize the tree before dumping - -> a -> a -#ifdef DEBUG_TRACETREE -traceTree = gtraceJson -#else -traceTree _ _ = id -#endif - -#ifdef DEBUG_TRACETREE -instance GSimpleTree (Tree d c) where - fromGeneric = go - where - go :: Tree d c -> SimpleTree - go (PChoice qpn _ _ psq) = Node "P" $ Assoc $ L.map (uncurry (goP qpn)) $ psqToList psq - go (FChoice _ _ _ _ _ _ psq) = Node "F" $ Assoc $ L.map (uncurry goFS) $ psqToList psq - go (SChoice _ _ _ _ psq) = Node "S" $ Assoc $ L.map (uncurry goFS) $ psqToList psq - go (GoalChoice _ psq) = Node "G" $ Assoc $ L.map (uncurry goG) $ PSQ.toList psq - go (Done _rdm _s) = Node "D" $ Assoc [] - go (Fail cs _reason) = Node "X" $ Assoc [("CS", Leaf $ goCS cs)] - - psqToList :: W.WeightedPSQ w k v -> [(k, v)] - psqToList = L.map (\(_, k, v) -> (k, v)) . W.toList - - -- Show package choice - goP :: QPN -> POption -> Tree d c -> (String, SimpleTree) - goP _ (POption (I ver _loc) Nothing) subtree = (T.display ver, go subtree) - goP (Q _ pn) (POption _ (Just pp)) subtree = (showQPN (Q pp pn), go subtree) - - -- Show flag or stanza choice - goFS :: Bool -> Tree d c -> (String, SimpleTree) - goFS val subtree = (show val, go subtree) - - -- Show goal choice - goG :: Goal QPN -> Tree d c -> (String, SimpleTree) - goG (Goal var gr) subtree = (showVar var ++ " (" ++ shortGR gr ++ ")", go subtree) - - -- Variation on 'showGR' that produces shorter strings - -- (Actually, QGoalReason records more info than necessary: we only need - -- to know the variable that introduced the goal, not the value assigned - -- to that variable) - shortGR :: QGoalReason -> String - shortGR UserGoal = "user" - shortGR (DependencyGoal dr) = showDependencyReason dr - - -- Show conflict set - goCS :: ConflictSet -> String - goCS cs = "{" ++ (intercalate "," . L.map showVar . CS.toList $ cs) ++ "}" -#endif - --- | Replace all goal reasons with a dummy goal reason in the tree --- --- This is useful for debugging (when experimenting with the impact of GRs) -_removeGR :: Tree d c -> Tree d QGoalReason -_removeGR = trav go - where - go :: TreeF d c (Tree d QGoalReason) -> TreeF d QGoalReason (Tree d QGoalReason) - go (PChoiceF qpn rdm _ psq) = PChoiceF qpn rdm dummy psq - go (FChoiceF qfn rdm _ a b d psq) = FChoiceF qfn rdm dummy a b d psq - go (SChoiceF qsn rdm _ a psq) = SChoiceF qsn rdm dummy a psq - go (GoalChoiceF rdm psq) = GoalChoiceF rdm (goG psq) - go (DoneF rdm s) = DoneF rdm s - go (FailF cs reason) = FailF cs reason - - goG :: PSQ (Goal QPN) (Tree d QGoalReason) -> PSQ (Goal QPN) (Tree d QGoalReason) - goG = PSQ.fromList - . L.map (\(Goal var _, subtree) -> (Goal var dummy, subtree)) - . PSQ.toList - - dummy :: QGoalReason - dummy = - DependencyGoal $ - DependencyReason - (Q (PackagePath DefaultNamespace QualToplevel) (mkPackageName "$")) - M.empty S.empty diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Tree.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Tree.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Tree.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Tree.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,193 +0,0 @@ -{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} -module Distribution.Solver.Modular.Tree - ( POption(..) - , Tree(..) - , TreeF(..) - , Weight - , FailReason(..) - , ConflictingDep(..) - , ana - , cata - , inn - , innM - , para - , trav - , zeroOrOneChoices - , active - ) where - -import Control.Monad hiding (mapM, sequence) -import Data.Foldable -import Data.Traversable -import Prelude hiding (foldr, mapM, sequence) - -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Package -import Distribution.Solver.Modular.PSQ (PSQ) -import Distribution.Solver.Modular.Version -import Distribution.Solver.Modular.WeightedPSQ (WeightedPSQ) -import qualified Distribution.Solver.Modular.WeightedPSQ as W -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.Flag -import Distribution.Solver.Types.PackagePath -import Language.Haskell.Extension (Extension, Language) - -type Weight = Double - --- | Type of the search tree. Inlining the choice nodes for now. Weights on --- package, flag, and stanza choices control the traversal order. --- --- The tree can hold additional data on 'Done' nodes (type 'd') and choice nodes --- (type 'c'). For example, during the final traversal, choice nodes contain the --- variables that introduced the choices, and 'Done' nodes contain the --- assignments for all variables. --- --- TODO: The weight type should be changed from [Double] to Double to avoid --- giving too much weight to preferences that are applied later. -data Tree d c = - -- | Choose a version for a package (or choose to link) - PChoice QPN RevDepMap c (WeightedPSQ [Weight] POption (Tree d c)) - - -- | Choose a value for a flag - -- - -- The Bool is the default value. - | FChoice QFN RevDepMap c WeakOrTrivial FlagType Bool (WeightedPSQ [Weight] Bool (Tree d c)) - - -- | Choose whether or not to enable a stanza - | SChoice QSN RevDepMap c WeakOrTrivial (WeightedPSQ [Weight] Bool (Tree d c)) - - -- | Choose which choice to make next - -- - -- Invariants: - -- - -- * PSQ should never be empty - -- * For each choice we additionally record the 'QGoalReason' why we are - -- introducing that goal into tree. Note that most of the time we are - -- working with @Tree QGoalReason@; in that case, we must have the - -- invariant that the 'QGoalReason' cached in the 'PChoice', 'FChoice' - -- or 'SChoice' directly below a 'GoalChoice' node must equal the reason - -- recorded on that 'GoalChoice' node. - | GoalChoice RevDepMap (PSQ (Goal QPN) (Tree d c)) - - -- | We're done -- we found a solution! - | Done RevDepMap d - - -- | We failed to find a solution in this path through the tree - | Fail ConflictSet FailReason - --- | A package option is a package instance with an optional linking annotation --- --- The modular solver has a number of package goals to solve for, and can only --- pick a single package version for a single goal. In order to allow to --- install multiple versions of the same package as part of a single solution --- the solver uses qualified goals. For example, @0.P@ and @1.P@ might both --- be qualified goals for @P@, allowing to pick a difference version of package --- @P@ for @0.P@ and @1.P@. --- --- Linking is an essential part of this story. In addition to picking a specific --- version for @1.P@, the solver can also decide to link @1.P@ to @0.P@ (or --- vice versa). It means that @1.P@ and @0.P@ really must be the very same package --- (and hence must have the same build time configuration, and their --- dependencies must also be the exact same). --- --- See for details. -data POption = POption I (Maybe PackagePath) - deriving (Eq, Show) - -data FailReason = UnsupportedExtension Extension - | UnsupportedLanguage Language - | MissingPkgconfigPackage PkgconfigName VR - | NewPackageDoesNotMatchExistingConstraint ConflictingDep - | ConflictingConstraints ConflictingDep ConflictingDep - | NewPackageIsMissingRequiredComponent ExposedComponent (DependencyReason QPN) - | NewPackageHasUnbuildableRequiredComponent ExposedComponent (DependencyReason QPN) - | PackageRequiresMissingComponent QPN ExposedComponent - | PackageRequiresUnbuildableComponent QPN ExposedComponent - | CannotInstall - | CannotReinstall - | Shadowed - | Broken - | GlobalConstraintVersion VR ConstraintSource - | GlobalConstraintInstalled ConstraintSource - | GlobalConstraintSource ConstraintSource - | GlobalConstraintFlag ConstraintSource - | ManualFlag - | MalformedFlagChoice QFN - | MalformedStanzaChoice QSN - | EmptyGoalChoice - | Backjump - | MultipleInstances - | DependenciesNotLinked String - | CyclicDependencies - | UnsupportedSpecVer Ver - deriving (Eq, Show) - --- | Information about a dependency involved in a conflict, for error messages. -data ConflictingDep = ConflictingDep (DependencyReason QPN) (PkgComponent QPN) CI - deriving (Eq, Show) - --- | Functor for the tree type. 'a' is the type of nodes' children. 'd' and 'c' --- have the same meaning as in 'Tree'. -data TreeF d c a = - PChoiceF QPN RevDepMap c (WeightedPSQ [Weight] POption a) - | FChoiceF QFN RevDepMap c WeakOrTrivial FlagType Bool (WeightedPSQ [Weight] Bool a) - | SChoiceF QSN RevDepMap c WeakOrTrivial (WeightedPSQ [Weight] Bool a) - | GoalChoiceF RevDepMap (PSQ (Goal QPN) a) - | DoneF RevDepMap d - | FailF ConflictSet FailReason - deriving (Functor, Foldable, Traversable) - -out :: Tree d c -> TreeF d c (Tree d c) -out (PChoice p s i ts) = PChoiceF p s i ts -out (FChoice p s i b m d ts) = FChoiceF p s i b m d ts -out (SChoice p s i b ts) = SChoiceF p s i b ts -out (GoalChoice s ts) = GoalChoiceF s ts -out (Done x s ) = DoneF x s -out (Fail c x ) = FailF c x - -inn :: TreeF d c (Tree d c) -> Tree d c -inn (PChoiceF p s i ts) = PChoice p s i ts -inn (FChoiceF p s i b m d ts) = FChoice p s i b m d ts -inn (SChoiceF p s i b ts) = SChoice p s i b ts -inn (GoalChoiceF s ts) = GoalChoice s ts -inn (DoneF x s ) = Done x s -inn (FailF c x ) = Fail c x - -innM :: Monad m => TreeF d c (m (Tree d c)) -> m (Tree d c) -innM (PChoiceF p s i ts) = liftM (PChoice p s i ) (sequence ts) -innM (FChoiceF p s i b m d ts) = liftM (FChoice p s i b m d) (sequence ts) -innM (SChoiceF p s i b ts) = liftM (SChoice p s i b ) (sequence ts) -innM (GoalChoiceF s ts) = liftM (GoalChoice s ) (sequence ts) -innM (DoneF x s ) = return $ Done x s -innM (FailF c x ) = return $ Fail c x - --- | Determines whether a tree is active, i.e., isn't a failure node. -active :: Tree d c -> Bool -active (Fail _ _) = False -active _ = True - --- | Approximates the number of active choices that are available in a node. --- Note that we count goal choices as having one choice, always. -zeroOrOneChoices :: Tree d c -> Bool -zeroOrOneChoices (PChoice _ _ _ ts) = W.isZeroOrOne (W.filter active ts) -zeroOrOneChoices (FChoice _ _ _ _ _ _ ts) = W.isZeroOrOne (W.filter active ts) -zeroOrOneChoices (SChoice _ _ _ _ ts) = W.isZeroOrOne (W.filter active ts) -zeroOrOneChoices (GoalChoice _ _ ) = True -zeroOrOneChoices (Done _ _ ) = True -zeroOrOneChoices (Fail _ _ ) = True - --- | Catamorphism on trees. -cata :: (TreeF d c a -> a) -> Tree d c -> a -cata phi x = (phi . fmap (cata phi) . out) x - -trav :: (TreeF d c (Tree d a) -> TreeF d a (Tree d a)) -> Tree d c -> Tree d a -trav psi x = cata (inn . psi) x - --- | Paramorphism on trees. -para :: (TreeF d c (a, Tree d c) -> a) -> Tree d c -> a -para phi = phi . fmap (\ x -> (para phi x, x)) . out - --- | Anamorphism on trees. -ana :: (a -> TreeF d c a) -> a -> Tree d c -ana psi = inn . fmap (ana psi) . psi diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Validate.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Validate.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Validate.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Validate.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,536 +0,0 @@ -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE MultiParamTypeClasses #-} -{-# LANGUAGE CPP #-} -#ifdef DEBUG_CONFLICT_SETS -{-# LANGUAGE ImplicitParams #-} -#endif -module Distribution.Solver.Modular.Validate (validateTree) where - --- Validation of the tree. --- --- The task here is to make sure all constraints hold. After validation, any --- assignment returned by exploration of the tree should be a complete valid --- assignment, i.e., actually constitute a solution. - -import Control.Applicative -import Control.Monad.Reader hiding (sequence) -import Data.Function (on) -import Data.List as L -import Data.Set as S -import Data.Traversable -import Prelude hiding (sequence) - -import Language.Haskell.Extension (Extension, Language) - -import Data.Map.Strict as M -import Distribution.Compiler (CompilerInfo(..)) - -import Distribution.Solver.Modular.Assignment -import qualified Distribution.Solver.Modular.ConflictSet as CS -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Index -import Distribution.Solver.Modular.Package -import Distribution.Solver.Modular.Tree -import Distribution.Solver.Modular.Version -import qualified Distribution.Solver.Modular.WeightedPSQ as W - -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb, pkgConfigPkgIsPresent) - -#ifdef DEBUG_CONFLICT_SETS -import GHC.Stack (CallStack) -#endif - --- In practice, most constraints are implication constraints (IF we have made --- a number of choices, THEN we also have to ensure that). We call constraints --- that for which the preconditions are fulfilled ACTIVE. We maintain a set --- of currently active constraints that we pass down the node. --- --- We aim at detecting inconsistent states as early as possible. --- --- Whenever we make a choice, there are two things that need to happen: --- --- (1) We must check that the choice is consistent with the currently --- active constraints. --- --- (2) The choice increases the set of active constraints. For the new --- active constraints, we must check that they are consistent with --- the current state. --- --- We can actually merge (1) and (2) by saying the the current choice is --- a new active constraint, fixing the choice. --- --- If a test fails, we have detected an inconsistent state. We can --- disable the current subtree and do not have to traverse it any further. --- --- We need a good way to represent the current state, i.e., the current --- set of active constraints. Since the main situation where we have to --- search in it is (1), it seems best to store the state by package: for --- every package, we store which versions are still allowed. If for any --- package, we have inconsistent active constraints, we can also stop. --- This is a particular way to read task (2): --- --- (2, weak) We only check if the new constraints are consistent with --- the choices we've already made, and add them to the active set. --- --- (2, strong) We check if the new constraints are consistent with the --- choices we've already made, and the constraints we already have. --- --- It currently seems as if we're implementing the weak variant. However, --- when used together with 'preferEasyGoalChoices', we will find an --- inconsistent state in the very next step. --- --- What do we do about flags? --- --- Like for packages, we store the flag choices we have already made. --- Now, regarding (1), we only have to test whether we've decided the --- current flag before. Regarding (2), the interesting bit is in discovering --- the new active constraints. To this end, we look up the constraints for --- the package the flag belongs to, and traverse its flagged dependencies. --- Wherever we find the flag in question, we start recording dependencies --- underneath as new active dependencies. If we encounter other flags, we --- check if we've chosen them already and either proceed or stop. - --- | The state needed during validation. -data ValidateState = VS { - supportedExt :: Extension -> Bool, - supportedLang :: Language -> Bool, - presentPkgs :: PkgconfigName -> VR -> Bool, - index :: Index, - - -- Saved, scoped, dependencies. Every time 'validate' makes a package choice, - -- it qualifies the package's dependencies and saves them in this map. Then - -- the qualified dependencies are available for subsequent flag and stanza - -- choices for the same package. - saved :: Map QPN (FlaggedDeps QPN), - - pa :: PreAssignment, - - -- Map from package name to the components that are provided by the chosen - -- instance of that package, and whether those components are buildable. - availableComponents :: Map QPN (Map ExposedComponent IsBuildable), - - -- Map from package name to the components that are required from that - -- package. - requiredComponents :: Map QPN ComponentDependencyReasons, - - qualifyOptions :: QualifyOptions -} - -newtype Validate a = Validate (Reader ValidateState a) - deriving (Functor, Applicative, Monad, MonadReader ValidateState) - -runValidate :: Validate a -> ValidateState -> a -runValidate (Validate r) = runReader r - --- | A preassignment comprises knowledge about variables, but not --- necessarily fixed values. -data PreAssignment = PA PPreAssignment FAssignment SAssignment - --- | A (partial) package preassignment. Qualified package names --- are associated with MergedPkgDeps. -type PPreAssignment = Map QPN MergedPkgDep - --- | A dependency on a component, including its DependencyReason. -data PkgDep = PkgDep (DependencyReason QPN) (PkgComponent QPN) CI - --- | Map from component name to one of the reasons that the component is --- required. -type ComponentDependencyReasons = Map ExposedComponent (DependencyReason QPN) - --- | MergedPkgDep records constraints about the instances that can still be --- chosen, and in the extreme case fixes a concrete instance. Otherwise, it is a --- list of version ranges paired with the goals / variables that introduced --- them. It also records whether a package is a build-tool dependency, for each --- reason that it was introduced. --- --- It is important to store the component name with the version constraint, for --- error messages, because whether something is a build-tool dependency affects --- its qualifier, which affects which constraint is applied. -data MergedPkgDep = - MergedDepFixed ExposedComponent (DependencyReason QPN) I - | MergedDepConstrained [VROrigin] - --- | Version ranges paired with origins. -type VROrigin = (VR, ExposedComponent, DependencyReason QPN) - --- | The information needed to create a 'Fail' node. -type Conflict = (ConflictSet, FailReason) - -validate :: Tree d c -> Validate (Tree d c) -validate = cata go - where - go :: TreeF d c (Validate (Tree d c)) -> Validate (Tree d c) - - go (PChoiceF qpn rdm gr ts) = PChoice qpn rdm gr <$> sequence (W.mapWithKey (goP qpn) ts) - go (FChoiceF qfn rdm gr b m d ts) = - do - -- Flag choices may occur repeatedly (because they can introduce new constraints - -- in various places). However, subsequent choices must be consistent. We thereby - -- collapse repeated flag choice nodes. - PA _ pfa _ <- asks pa -- obtain current flag-preassignment - case M.lookup qfn pfa of - Just rb -> -- flag has already been assigned; collapse choice to the correct branch - case W.lookup rb ts of - Just t -> goF qfn rb t - Nothing -> return $ Fail (varToConflictSet (F qfn)) (MalformedFlagChoice qfn) - Nothing -> -- flag choice is new, follow both branches - FChoice qfn rdm gr b m d <$> sequence (W.mapWithKey (goF qfn) ts) - go (SChoiceF qsn rdm gr b ts) = - do - -- Optional stanza choices are very similar to flag choices. - PA _ _ psa <- asks pa -- obtain current stanza-preassignment - case M.lookup qsn psa of - Just rb -> -- stanza choice has already been made; collapse choice to the correct branch - case W.lookup rb ts of - Just t -> goS qsn rb t - Nothing -> return $ Fail (varToConflictSet (S qsn)) (MalformedStanzaChoice qsn) - Nothing -> -- stanza choice is new, follow both branches - SChoice qsn rdm gr b <$> sequence (W.mapWithKey (goS qsn) ts) - - -- We don't need to do anything for goal choices or failure nodes. - go (GoalChoiceF rdm ts) = GoalChoice rdm <$> sequence ts - go (DoneF rdm s ) = pure (Done rdm s) - go (FailF c fr ) = pure (Fail c fr) - - -- What to do for package nodes ... - goP :: QPN -> POption -> Validate (Tree d c) -> Validate (Tree d c) - goP qpn@(Q _pp pn) (POption i _) r = do - PA ppa pfa psa <- asks pa -- obtain current preassignment - extSupported <- asks supportedExt -- obtain the supported extensions - langSupported <- asks supportedLang -- obtain the supported languages - pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs - idx <- asks index -- obtain the index - svd <- asks saved -- obtain saved dependencies - aComps <- asks availableComponents - rComps <- asks requiredComponents - qo <- asks qualifyOptions - -- obtain dependencies and index-dictated exclusions introduced by the choice - let (PInfo deps comps _ mfr) = idx ! pn ! i - -- qualify the deps in the current scope - let qdeps = qualifyDeps qo qpn deps - -- the new active constraints are given by the instance we have chosen, - -- plus the dependency information we have for that instance - let newactives = extractAllDeps pfa psa qdeps - -- We now try to extend the partial assignment with the new active constraints. - let mnppa = extend extSupported langSupported pkgPresent newactives - =<< extendWithPackageChoice (PI qpn i) ppa - -- In case we continue, we save the scoped dependencies - let nsvd = M.insert qpn qdeps svd - case mfr of - Just fr -> -- The index marks this as an invalid choice. We can stop. - return (Fail (varToConflictSet (P qpn)) fr) - Nothing -> - let newDeps :: Either Conflict (PPreAssignment, Map QPN ComponentDependencyReasons) - newDeps = do - nppa <- mnppa - rComps' <- extendRequiredComponents aComps rComps newactives - checkComponentsInNewPackage (M.findWithDefault M.empty qpn rComps) qpn comps - return (nppa, rComps') - in case newDeps of - Left (c, fr) -> -- We have an inconsistency. We can stop. - return (Fail c fr) - Right (nppa, rComps') -> -- We have an updated partial assignment for the recursive validation. - local (\ s -> s { pa = PA nppa pfa psa - , saved = nsvd - , availableComponents = M.insert qpn comps aComps - , requiredComponents = rComps' - }) r - - -- What to do for flag nodes ... - goF :: QFN -> Bool -> Validate (Tree d c) -> Validate (Tree d c) - goF qfn@(FN qpn _f) b r = do - PA ppa pfa psa <- asks pa -- obtain current preassignment - extSupported <- asks supportedExt -- obtain the supported extensions - langSupported <- asks supportedLang -- obtain the supported languages - pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs - svd <- asks saved -- obtain saved dependencies - aComps <- asks availableComponents - rComps <- asks requiredComponents - -- Note that there should be saved dependencies for the package in question, - -- because while building, we do not choose flags before we see the packages - -- that define them. - let qdeps = svd ! qpn - -- We take the *saved* dependencies, because these have been qualified in the - -- correct scope. - -- - -- Extend the flag assignment - let npfa = M.insert qfn b pfa - -- We now try to get the new active dependencies we might learn about because - -- we have chosen a new flag. - let newactives = extractNewDeps (F qfn) b npfa psa qdeps - mNewRequiredComps = extendRequiredComponents aComps rComps newactives - -- As in the package case, we try to extend the partial assignment. - let mnppa = extend extSupported langSupported pkgPresent newactives ppa - case liftM2 (,) mnppa mNewRequiredComps of - Left (c, fr) -> return (Fail c fr) -- inconsistency found - Right (nppa, rComps') -> - local (\ s -> s { pa = PA nppa npfa psa, requiredComponents = rComps' }) r - - -- What to do for stanza nodes (similar to flag nodes) ... - goS :: QSN -> Bool -> Validate (Tree d c) -> Validate (Tree d c) - goS qsn@(SN qpn _f) b r = do - PA ppa pfa psa <- asks pa -- obtain current preassignment - extSupported <- asks supportedExt -- obtain the supported extensions - langSupported <- asks supportedLang -- obtain the supported languages - pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs - svd <- asks saved -- obtain saved dependencies - aComps <- asks availableComponents - rComps <- asks requiredComponents - -- Note that there should be saved dependencies for the package in question, - -- because while building, we do not choose flags before we see the packages - -- that define them. - let qdeps = svd ! qpn - -- We take the *saved* dependencies, because these have been qualified in the - -- correct scope. - -- - -- Extend the flag assignment - let npsa = M.insert qsn b psa - -- We now try to get the new active dependencies we might learn about because - -- we have chosen a new flag. - let newactives = extractNewDeps (S qsn) b pfa npsa qdeps - mNewRequiredComps = extendRequiredComponents aComps rComps newactives - -- As in the package case, we try to extend the partial assignment. - let mnppa = extend extSupported langSupported pkgPresent newactives ppa - case liftM2 (,) mnppa mNewRequiredComps of - Left (c, fr) -> return (Fail c fr) -- inconsistency found - Right (nppa, rComps') -> - local (\ s -> s { pa = PA nppa pfa npsa, requiredComponents = rComps' }) r - --- | Check that a newly chosen package instance contains all components that --- are required from that package so far. The components must also be buildable. -checkComponentsInNewPackage :: ComponentDependencyReasons - -> QPN - -> Map ExposedComponent IsBuildable - -> Either Conflict () -checkComponentsInNewPackage required qpn providedComps = - case M.toList $ deleteKeys (M.keys providedComps) required of - (missingComp, dr) : _ -> - Left $ mkConflict missingComp dr NewPackageIsMissingRequiredComponent - [] -> - case M.toList $ deleteKeys buildableProvidedComps required of - (unbuildableComp, dr) : _ -> - Left $ mkConflict unbuildableComp dr NewPackageHasUnbuildableRequiredComponent - [] -> Right () - where - mkConflict :: ExposedComponent - -> DependencyReason QPN - -> (ExposedComponent -> DependencyReason QPN -> FailReason) - -> Conflict - mkConflict comp dr mkFailure = - (CS.insert (P qpn) (dependencyReasonToCS dr), mkFailure comp dr) - - buildableProvidedComps :: [ExposedComponent] - buildableProvidedComps = [comp | (comp, IsBuildable True) <- M.toList providedComps] - - deleteKeys :: Ord k => [k] -> Map k v -> Map k v - deleteKeys ks m = L.foldr M.delete m ks - --- | We try to extract as many concrete dependencies from the given flagged --- dependencies as possible. We make use of all the flag knowledge we have --- already acquired. -extractAllDeps :: FAssignment -> SAssignment -> FlaggedDeps QPN -> [LDep QPN] -extractAllDeps fa sa deps = do - d <- deps - case d of - Simple sd _ -> return sd - Flagged qfn _ td fd -> case M.lookup qfn fa of - Nothing -> mzero - Just True -> extractAllDeps fa sa td - Just False -> extractAllDeps fa sa fd - Stanza qsn td -> case M.lookup qsn sa of - Nothing -> mzero - Just True -> extractAllDeps fa sa td - Just False -> [] - --- | We try to find new dependencies that become available due to the given --- flag or stanza choice. We therefore look for the choice in question, and then call --- 'extractAllDeps' for everything underneath. -extractNewDeps :: Var QPN -> Bool -> FAssignment -> SAssignment -> FlaggedDeps QPN -> [LDep QPN] -extractNewDeps v b fa sa = go - where - go :: FlaggedDeps QPN -> [LDep QPN] - go deps = do - d <- deps - case d of - Simple _ _ -> mzero - Flagged qfn' _ td fd - | v == F qfn' -> if b then extractAllDeps fa sa td else extractAllDeps fa sa fd - | otherwise -> case M.lookup qfn' fa of - Nothing -> mzero - Just True -> go td - Just False -> go fd - Stanza qsn' td - | v == S qsn' -> if b then extractAllDeps fa sa td else [] - | otherwise -> case M.lookup qsn' sa of - Nothing -> mzero - Just True -> go td - Just False -> [] - --- | Extend a package preassignment. --- --- Takes the variable that causes the new constraints, a current preassignment --- and a set of new dependency constraints. --- --- We're trying to extend the preassignment with each dependency one by one. --- Each dependency is for a particular variable. We check if we already have --- constraints for that variable in the current preassignment. If so, we're --- trying to merge the constraints. --- --- Either returns a witness of the conflict that would arise during the merge, --- or the successfully extended assignment. -extend :: (Extension -> Bool) -- ^ is a given extension supported - -> (Language -> Bool) -- ^ is a given language supported - -> (PkgconfigName -> VR -> Bool) -- ^ is a given pkg-config requirement satisfiable - -> [LDep QPN] - -> PPreAssignment - -> Either Conflict PPreAssignment -extend extSupported langSupported pkgPresent newactives ppa = foldM extendSingle ppa newactives - where - - extendSingle :: PPreAssignment -> LDep QPN -> Either Conflict PPreAssignment - extendSingle a (LDep dr (Ext ext )) = - if extSupported ext then Right a - else Left (dependencyReasonToCS dr, UnsupportedExtension ext) - extendSingle a (LDep dr (Lang lang)) = - if langSupported lang then Right a - else Left (dependencyReasonToCS dr, UnsupportedLanguage lang) - extendSingle a (LDep dr (Pkg pn vr)) = - if pkgPresent pn vr then Right a - else Left (dependencyReasonToCS dr, MissingPkgconfigPackage pn vr) - extendSingle a (LDep dr (Dep dep@(PkgComponent qpn _) ci)) = - let mergedDep = M.findWithDefault (MergedDepConstrained []) qpn a - in case (\ x -> M.insert qpn x a) <$> merge mergedDep (PkgDep dr dep ci) of - Left (c, (d, d')) -> Left (c, ConflictingConstraints d d') - Right x -> Right x - --- | Extend a package preassignment with a package choice. For example, when --- the solver chooses foo-2.0, it tries to add the constraint foo==2.0. --- --- TODO: The new constraint is implemented as a dependency from foo to foo's --- library. That isn't correct, because foo might only be needed as a build --- tool dependency. The implemention may need to change when we support --- component-based dependency solving. -extendWithPackageChoice :: PI QPN -> PPreAssignment -> Either Conflict PPreAssignment -extendWithPackageChoice (PI qpn i) ppa = - let mergedDep = M.findWithDefault (MergedDepConstrained []) qpn ppa - newChoice = PkgDep (DependencyReason qpn M.empty S.empty) (PkgComponent qpn ExposedLib) (Fixed i) - in case (\ x -> M.insert qpn x ppa) <$> merge mergedDep newChoice of - Left (c, (d, _d')) -> -- Don't include the package choice in the - -- FailReason, because it is redundant. - Left (c, NewPackageDoesNotMatchExistingConstraint d) - Right x -> Right x - --- | Merge constrained instances. We currently adopt a lazy strategy for --- merging, i.e., we only perform actual checking if one of the two choices --- is fixed. If the merge fails, we return a conflict set indicating the --- variables responsible for the failure, as well as the two conflicting --- fragments. --- --- Note that while there may be more than one conflicting pair of version --- ranges, we only return the first we find. --- --- The ConflictingDeps are returned in order, i.e., the first describes the --- conflicting part of the MergedPkgDep, and the second describes the PkgDep. --- --- TODO: Different pairs might have different conflict sets. We're --- obviously interested to return a conflict that has a "better" conflict --- set in the sense the it contains variables that allow us to backjump --- further. We might apply some heuristics here, such as to change the --- order in which we check the constraints. -merge :: -#ifdef DEBUG_CONFLICT_SETS - (?loc :: CallStack) => -#endif - MergedPkgDep -> PkgDep -> Either (ConflictSet, (ConflictingDep, ConflictingDep)) MergedPkgDep -merge (MergedDepFixed comp1 vs1 i1) (PkgDep vs2 (PkgComponent p comp2) ci@(Fixed i2)) - | i1 == i2 = Right $ MergedDepFixed comp1 vs1 i1 - | otherwise = - Left ( (CS.union `on` dependencyReasonToCS) vs1 vs2 - , ( ConflictingDep vs1 (PkgComponent p comp1) (Fixed i1) - , ConflictingDep vs2 (PkgComponent p comp2) ci ) ) - -merge (MergedDepFixed comp1 vs1 i@(I v _)) (PkgDep vs2 (PkgComponent p comp2) ci@(Constrained vr)) - | checkVR vr v = Right $ MergedDepFixed comp1 vs1 i - | otherwise = - Left ( (CS.union `on` dependencyReasonToCS) vs1 vs2 - , ( ConflictingDep vs1 (PkgComponent p comp1) (Fixed i) - , ConflictingDep vs2 (PkgComponent p comp2) ci ) ) - -merge (MergedDepConstrained vrOrigins) (PkgDep vs2 (PkgComponent p comp2) ci@(Fixed i@(I v _))) = - go vrOrigins -- I tried "reverse vrOrigins" here, but it seems to slow things down ... - where - go :: [VROrigin] -> Either (ConflictSet, (ConflictingDep, ConflictingDep)) MergedPkgDep - go [] = Right (MergedDepFixed comp2 vs2 i) - go ((vr, comp1, vs1) : vros) - | checkVR vr v = go vros - | otherwise = - Left ( (CS.union `on` dependencyReasonToCS) vs1 vs2 - , ( ConflictingDep vs1 (PkgComponent p comp1) (Constrained vr) - , ConflictingDep vs2 (PkgComponent p comp2) ci ) ) - -merge (MergedDepConstrained vrOrigins) (PkgDep vs2 (PkgComponent _ comp2) (Constrained vr)) = - Right (MergedDepConstrained $ - - -- TODO: This line appends the new version range, to preserve the order used - -- before a refactoring. Consider prepending the version range, if there is - -- no negative performance impact. - vrOrigins ++ [(vr, comp2, vs2)]) - --- | Takes a list of new dependencies and uses it to try to update the map of --- known component dependencies. It returns a failure when a new dependency --- requires a component that is missing or unbuildable in a previously chosen --- packages. -extendRequiredComponents :: Map QPN (Map ExposedComponent IsBuildable) - -> Map QPN ComponentDependencyReasons - -> [LDep QPN] - -> Either Conflict (Map QPN ComponentDependencyReasons) -extendRequiredComponents available = foldM extendSingle - where - extendSingle :: Map QPN ComponentDependencyReasons - -> LDep QPN - -> Either Conflict (Map QPN ComponentDependencyReasons) - extendSingle required (LDep dr (Dep (PkgComponent qpn comp) _)) = - let compDeps = M.findWithDefault M.empty qpn required - in -- Only check for the existence of the component if its package has - -- already been chosen. - case M.lookup qpn available of - Just comps - | M.notMember comp comps -> - Left $ mkConflict qpn comp dr PackageRequiresMissingComponent - | L.notElem comp (buildableComps comps) -> - Left $ mkConflict qpn comp dr PackageRequiresUnbuildableComponent - _ -> - Right $ M.insertWith M.union qpn (M.insert comp dr compDeps) required - extendSingle required _ = Right required - - mkConflict :: QPN - -> ExposedComponent - -> DependencyReason QPN - -> (QPN -> ExposedComponent -> FailReason) - -> Conflict - mkConflict qpn comp dr mkFailure = - (CS.insert (P qpn) (dependencyReasonToCS dr), mkFailure qpn comp) - - buildableComps :: Map comp IsBuildable -> [comp] - buildableComps comps = [comp | (comp, IsBuildable True) <- M.toList comps] - - --- | Interface. -validateTree :: CompilerInfo -> Index -> PkgConfigDb -> Tree d c -> Tree d c -validateTree cinfo idx pkgConfigDb t = runValidate (validate t) VS { - supportedExt = maybe (const True) -- if compiler has no list of extensions, we assume everything is supported - (\ es -> let s = S.fromList es in \ x -> S.member x s) - (compilerInfoExtensions cinfo) - , supportedLang = maybe (const True) - (flip L.elem) -- use list lookup because language list is small and no Ord instance - (compilerInfoLanguages cinfo) - , presentPkgs = pkgConfigPkgIsPresent pkgConfigDb - , index = idx - , saved = M.empty - , pa = PA M.empty M.empty M.empty - , availableComponents = M.empty - , requiredComponents = M.empty - , qualifyOptions = defaultQualifyOptions idx - } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Var.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Var.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Var.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Var.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -module Distribution.Solver.Modular.Var ( - Var(..) - , showVar - , varPN - ) where - -import Prelude hiding (pi) - -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Types.PackagePath - -{------------------------------------------------------------------------------- - Variables --------------------------------------------------------------------------------} - --- | The type of variables that play a role in the solver. --- Note that the tree currently does not use this type directly, --- and rather has separate tree nodes for the different types of --- variables. This fits better with the fact that in most cases, --- these have to be treated differently. -data Var qpn = P qpn | F (FN qpn) | S (SN qpn) - deriving (Eq, Ord, Show, Functor) - -showVar :: Var QPN -> String -showVar (P qpn) = showQPN qpn -showVar (F qfn) = showQFN qfn -showVar (S qsn) = showQSN qsn - --- | Extract the package name from a Var -varPN :: Var qpn -> qpn -varPN (P qpn) = qpn -varPN (F (FN qpn _)) = qpn -varPN (S (SN qpn _)) = qpn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Version.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/Version.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -module Distribution.Solver.Modular.Version - ( Ver - , VR - , anyVR - , checkVR - , eqVR - , showVer - , showVR - , simplifyVR - , (.&&.) - , (.||.) - ) where - -import qualified Distribution.Version as CV -- from Cabal -import Distribution.Text -- from Cabal - --- | Preliminary type for versions. -type Ver = CV.Version - --- | String representation of a version. -showVer :: Ver -> String -showVer = display - --- | Version range. Consists of a lower and upper bound. -type VR = CV.VersionRange - --- | String representation of a version range. -showVR :: VR -> String -showVR = display - --- | Unconstrained version range. -anyVR :: VR -anyVR = CV.anyVersion - --- | Version range fixing a single version. -eqVR :: Ver -> VR -eqVR = CV.thisVersion - --- | Intersect two version ranges. -(.&&.) :: VR -> VR -> VR -v1 .&&. v2 = simplifyVR $ CV.intersectVersionRanges v1 v2 - --- | Union of two version ranges. -(.||.) :: VR -> VR -> VR -v1 .||. v2 = simplifyVR $ CV.unionVersionRanges v1 v2 - --- | Simplify a version range. -simplifyVR :: VR -> VR -simplifyVR = CV.simplifyVersionRange - --- | Checking a version against a version range. -checkVR :: VR -> Ver -> Bool -checkVR = flip CV.withinRange diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/WeightedPSQ.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/WeightedPSQ.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/WeightedPSQ.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular/WeightedPSQ.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,97 +0,0 @@ -{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} -{-# LANGUAGE ScopedTypeVariables #-} -module Distribution.Solver.Modular.WeightedPSQ ( - WeightedPSQ - , fromList - , toList - , keys - , weights - , isZeroOrOne - , filter - , lookup - , mapWithKey - , mapWeightsWithKey - , union - , takeUntil - ) where - -import qualified Data.Foldable as F -import qualified Data.List as L -import Data.Ord (comparing) -import qualified Data.Traversable as T -import Prelude hiding (filter, lookup) - --- | An association list that is sorted by weight. --- --- Each element has a key ('k'), value ('v'), and weight ('w'). All operations --- that add elements or modify weights stably sort the elements by weight. -newtype WeightedPSQ w k v = WeightedPSQ [(w, k, v)] - deriving (Eq, Show, Functor, F.Foldable, T.Traversable) - --- | /O(N)/. -filter :: (v -> Bool) -> WeightedPSQ k w v -> WeightedPSQ k w v -filter p (WeightedPSQ xs) = WeightedPSQ (L.filter (p . triple_3) xs) - --- | /O(1)/. Return @True@ if the @WeightedPSQ@ contains zero or one elements. -isZeroOrOne :: WeightedPSQ w k v -> Bool -isZeroOrOne (WeightedPSQ []) = True -isZeroOrOne (WeightedPSQ [_]) = True -isZeroOrOne _ = False - --- | /O(1)/. Return the elements in order. -toList :: WeightedPSQ w k v -> [(w, k, v)] -toList (WeightedPSQ xs) = xs - --- | /O(N log N)/. -fromList :: Ord w => [(w, k, v)] -> WeightedPSQ w k v -fromList = WeightedPSQ . L.sortBy (comparing triple_1) - --- | /O(N)/. Return the weights in order. -weights :: WeightedPSQ w k v -> [w] -weights (WeightedPSQ xs) = L.map triple_1 xs - --- | /O(N)/. Return the keys in order. -keys :: WeightedPSQ w k v -> [k] -keys (WeightedPSQ xs) = L.map triple_2 xs - --- | /O(N)/. Return the value associated with the first occurrence of the give --- key, if it exists. -lookup :: Eq k => k -> WeightedPSQ w k v -> Maybe v -lookup k (WeightedPSQ xs) = triple_3 `fmap` L.find ((k ==) . triple_2) xs - --- | /O(N log N)/. Update the weights. -mapWeightsWithKey :: Ord w2 - => (k -> w1 -> w2) - -> WeightedPSQ w1 k v - -> WeightedPSQ w2 k v -mapWeightsWithKey f (WeightedPSQ xs) = fromList $ - L.map (\ (w, k, v) -> (f k w, k, v)) xs - --- | /O(N)/. Update the values. -mapWithKey :: (k -> v1 -> v2) -> WeightedPSQ w k v1 -> WeightedPSQ w k v2 -mapWithKey f (WeightedPSQ xs) = WeightedPSQ $ - L.map (\ (w, k, v) -> (w, k, f k v)) xs - --- | /O((N + M) log (N + M))/. Combine two @WeightedPSQ@s, preserving all --- elements. Elements from the first @WeightedPSQ@ come before elements in the --- second when they have the same weight. -union :: Ord w => WeightedPSQ w k v -> WeightedPSQ w k v -> WeightedPSQ w k v -union (WeightedPSQ xs) (WeightedPSQ ys) = fromList (xs ++ ys) - --- | /O(N)/. Return the prefix of values ending with the first element that --- satisfies p, or all elements if none satisfy p. -takeUntil :: forall w k v. (v -> Bool) -> WeightedPSQ w k v -> WeightedPSQ w k v -takeUntil p (WeightedPSQ xs) = WeightedPSQ (go xs) - where - go :: [(w, k, v)] -> [(w, k, v)] - go [] = [] - go (y : ys) = y : if p (triple_3 y) then [] else go ys - -triple_1 :: (x, y, z) -> x -triple_1 (x, _, _) = x - -triple_2 :: (x, y, z) -> y -triple_2 (_, y, _) = y - -triple_3 :: (x, y, z) -> z -triple_3 (_, _, z) = z diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Modular.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Modular.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,175 +0,0 @@ -module Distribution.Solver.Modular - ( modularResolver, SolverConfig(..), PruneAfterFirstSuccess(..)) where - --- Here, we try to map between the external cabal-install solver --- interface and the internal interface that the solver actually --- expects. There are a number of type conversions to perform: we --- have to convert the package indices to the uniform index used --- by the solver; we also have to convert the initial constraints; --- and finally, we have to convert back the resulting install --- plan. - -import Prelude () -import Distribution.Solver.Compat.Prelude - -import qualified Data.Map as M -import Data.Set (Set) -import Data.Ord -import Distribution.Compat.Graph - ( IsNode(..) ) -import Distribution.Compiler - ( CompilerInfo ) -import Distribution.Solver.Modular.Assignment - ( Assignment, toCPs ) -import Distribution.Solver.Modular.ConfiguredConversion - ( convCP ) -import qualified Distribution.Solver.Modular.ConflictSet as CS -import Distribution.Solver.Modular.Dependency -import Distribution.Solver.Modular.Flag -import Distribution.Solver.Modular.Index -import Distribution.Solver.Modular.IndexConversion - ( convPIs ) -import Distribution.Solver.Modular.Log - ( SolverFailure(..), logToProgress ) -import Distribution.Solver.Modular.Package - ( PN ) -import Distribution.Solver.Modular.Solver - ( SolverConfig(..), PruneAfterFirstSuccess(..), solve ) -import Distribution.Solver.Types.DependencyResolver -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.PackageConstraint -import Distribution.Solver.Types.PackagePath -import Distribution.Solver.Types.PackagePreferences -import Distribution.Solver.Types.PkgConfigDb - ( PkgConfigDb ) -import Distribution.Solver.Types.Progress -import Distribution.Solver.Types.Variable -import Distribution.System - ( Platform(..) ) -import Distribution.Simple.Utils - ( ordNubBy ) -import Distribution.Verbosity - - --- | Ties the two worlds together: classic cabal-install vs. the modular --- solver. Performs the necessary translations before and after. -modularResolver :: SolverConfig -> DependencyResolver loc -modularResolver sc (Platform arch os) cinfo iidx sidx pkgConfigDB pprefs pcs pns = - fmap (uncurry postprocess) $ -- convert install plan - solve' sc cinfo idx pkgConfigDB pprefs gcs pns - where - -- Indices have to be converted into solver-specific uniform index. - idx = convPIs os arch cinfo gcs (shadowPkgs sc) (strongFlags sc) (solveExecutables sc) iidx sidx - -- Constraints have to be converted into a finite map indexed by PN. - gcs = M.fromListWith (++) (map pair pcs) - where - pair lpc = (pcName $ unlabelPackageConstraint lpc, [lpc]) - - -- Results have to be converted into an install plan. 'convCP' removes - -- package qualifiers, which means that linked packages become duplicates - -- and can be removed. - postprocess a rdm = ordNubBy nodeKey $ - map (convCP iidx sidx) (toCPs a rdm) - - -- Helper function to extract the PN from a constraint. - pcName :: PackageConstraint -> PN - pcName (PackageConstraint scope _) = scopeToPackageName scope - --- | Run 'D.S.Modular.Solver.solve' and then produce a summarized log to display --- in the error case. --- --- When there is no solution, we produce the error message by rerunning the --- solver but making it prefer the goals from the final conflict set from the --- first run. We also set the backjump limit to 0, so that the log stops at the --- first backjump and is relatively short. Preferring goals from the final --- conflict set increases the probability that the log to the first backjump --- contains package, flag, and stanza choices that are relevant to the final --- failure. The solver shouldn't need to choose any packages that aren't in the --- final conflict set. (For every variable in the final conflict set, the final --- conflict set should also contain the variable that introduced that variable. --- The solver can then follow that chain of variables in reverse order from the --- user target to the conflict.) However, it is possible that the conflict set --- contains unnecessary variables. --- --- Producing an error message when the solver reaches the backjump limit is more --- complicated. There is no final conflict set, so we create one for the minimal --- subtree containing the path that the solver took to the first backjump. This --- conflict set helps explain why the solver reached the backjump limit, because --- the first backjump contributes to reaching the backjump limit. Additionally, --- the solver is much more likely to be able to finish traversing this subtree --- before the backjump limit, since its size is linear (not exponential) in the --- number of goal choices. We create it by pruning all children after the first --- successful child under each node in the original tree, so that there is at --- most one valid choice at each level. Then we use the final conflict set from --- that run to generate an error message, as in the case where the solver found --- that there was no solution. --- --- Using the full log from a rerun of the solver ensures that the log is --- complete, i.e., it shows the whole chain of dependencies from the user --- targets to the conflicting packages. -solve' :: SolverConfig - -> CompilerInfo - -> Index - -> PkgConfigDb - -> (PN -> PackagePreferences) - -> Map PN [LabeledPackageConstraint] - -> Set PN - -> Progress String String (Assignment, RevDepMap) -solve' sc cinfo idx pkgConfigDB pprefs gcs pns = - foldProgress Step (uncurry createErrorMsg) Done (runSolver printFullLog sc) - where - runSolver :: Bool -> SolverConfig - -> Progress String (SolverFailure, String) (Assignment, RevDepMap) - runSolver keepLog sc' = - logToProgress keepLog (solverVerbosity sc') (maxBackjumps sc') $ - solve sc' cinfo idx pkgConfigDB pprefs gcs pns - - createErrorMsg :: SolverFailure -> String - -> Progress String String (Assignment, RevDepMap) - createErrorMsg (ExhaustiveSearch cs _) msg = - Fail $ rerunSolverForErrorMsg cs ++ msg - createErrorMsg BackjumpLimitReached msg = - Step ("Backjump limit reached. Rerunning dependency solver to generate " - ++ "a final conflict set for the search tree containing the " - ++ "first backjump.") $ - foldProgress Step (f . fst) Done $ - runSolver printFullLog - sc { pruneAfterFirstSuccess = PruneAfterFirstSuccess True } - where - f :: SolverFailure -> Progress String String (Assignment, RevDepMap) - f (ExhaustiveSearch cs _) = Fail $ rerunSolverForErrorMsg cs ++ msg - f BackjumpLimitReached = - -- This case is possible when the number of goals involved in - -- conflicts is greater than the backjump limit. - Fail $ msg ++ "Failed to generate a summarized dependency solver " - ++ "log due to low backjump limit." - - rerunSolverForErrorMsg :: ConflictSet -> String - rerunSolverForErrorMsg cs = - let sc' = sc { - goalOrder = Just goalOrder' - , maxBackjumps = Just 0 - } - - -- Preferring goals from the conflict set takes precedence over the - -- original goal order. - goalOrder' = preferGoalsFromConflictSet cs <> fromMaybe mempty (goalOrder sc) - - in unlines ("Could not resolve dependencies:" : messages (runSolver True sc')) - - printFullLog = solverVerbosity sc >= verbose - - messages :: Progress step fail done -> [step] - messages = foldProgress (:) (const []) (const []) - --- | Goal ordering that chooses goals contained in the conflict set before --- other goals. -preferGoalsFromConflictSet :: ConflictSet - -> Variable QPN -> Variable QPN -> Ordering -preferGoalsFromConflictSet cs = - comparing $ \v -> not $ CS.member (toVar v) cs - where - toVar :: Variable QPN -> Var QPN - toVar (PackageVar qpn) = P qpn - toVar (FlagVar qpn fn) = F (FN qpn fn) - toVar (StanzaVar qpn sn) = S (SN qpn sn) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ComponentDeps.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ComponentDeps.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ComponentDeps.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ComponentDeps.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,194 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} - --- | Fine-grained package dependencies --- --- Like many others, this module is meant to be "double-imported": --- --- > import Distribution.Solver.Types.ComponentDeps ( --- > Component --- > , ComponentDep --- > , ComponentDeps --- > ) --- > import qualified Distribution.Solver.Types.ComponentDeps as CD -module Distribution.Solver.Types.ComponentDeps ( - -- * Fine-grained package dependencies - Component(..) - , componentNameToComponent - , ComponentDep - , ComponentDeps -- opaque - -- ** Constructing ComponentDeps - , empty - , fromList - , singleton - , insert - , zip - , filterDeps - , fromLibraryDeps - , fromSetupDeps - , fromInstalled - -- ** Deconstructing ComponentDeps - , toList - , flatDeps - , nonSetupDeps - , libraryDeps - , setupDeps - , select - ) where - -import Prelude () -import Distribution.Types.UnqualComponentName -import Distribution.Solver.Compat.Prelude hiding (empty,zip) - -import qualified Data.Map as Map -import Data.Foldable (fold) - -import qualified Distribution.Types.ComponentName as CN - -{------------------------------------------------------------------------------- - Types --------------------------------------------------------------------------------} - --- | Component of a package. -data Component = - ComponentLib - | ComponentSubLib UnqualComponentName - | ComponentFLib UnqualComponentName - | ComponentExe UnqualComponentName - | ComponentTest UnqualComponentName - | ComponentBench UnqualComponentName - | ComponentSetup - deriving (Show, Eq, Ord, Generic) - -instance Binary Component - --- | Dependency for a single component. -type ComponentDep a = (Component, a) - --- | Fine-grained dependencies for a package. --- --- Typically used as @ComponentDeps [Dependency]@, to represent the list of --- dependencies for each named component within a package. --- -newtype ComponentDeps a = ComponentDeps { unComponentDeps :: Map Component a } - deriving (Show, Functor, Eq, Ord, Generic) - -instance Semigroup a => Monoid (ComponentDeps a) where - mempty = ComponentDeps Map.empty - mappend = (<>) - -instance Semigroup a => Semigroup (ComponentDeps a) where - ComponentDeps d <> ComponentDeps d' = - ComponentDeps (Map.unionWith (<>) d d') - -instance Foldable ComponentDeps where - foldMap f = foldMap f . unComponentDeps - -instance Traversable ComponentDeps where - traverse f = fmap ComponentDeps . traverse f . unComponentDeps - -instance Binary a => Binary (ComponentDeps a) - -componentNameToComponent :: CN.ComponentName -> Component -componentNameToComponent (CN.CLibName) = ComponentLib -componentNameToComponent (CN.CSubLibName s) = ComponentSubLib s -componentNameToComponent (CN.CFLibName s) = ComponentFLib s -componentNameToComponent (CN.CExeName s) = ComponentExe s -componentNameToComponent (CN.CTestName s) = ComponentTest s -componentNameToComponent (CN.CBenchName s) = ComponentBench s - -{------------------------------------------------------------------------------- - Construction --------------------------------------------------------------------------------} - -empty :: ComponentDeps a -empty = ComponentDeps $ Map.empty - -fromList :: Monoid a => [ComponentDep a] -> ComponentDeps a -fromList = ComponentDeps . Map.fromListWith mappend - -singleton :: Component -> a -> ComponentDeps a -singleton comp = ComponentDeps . Map.singleton comp - -insert :: Monoid a => Component -> a -> ComponentDeps a -> ComponentDeps a -insert comp a = ComponentDeps . Map.alter aux comp . unComponentDeps - where - aux Nothing = Just a - aux (Just a') = Just $ a `mappend` a' - --- | Zip two 'ComponentDeps' together by 'Component', using 'mempty' --- as the neutral element when a 'Component' is present only in one. -zip :: (Monoid a, Monoid b) => ComponentDeps a -> ComponentDeps b -> ComponentDeps (a, b) -{- TODO/FIXME: Once we can expect containers>=0.5, switch to the more efficient version below: - -zip (ComponentDeps d1) (ComponentDeps d2) = - ComponentDeps $ - Map.mergeWithKey - (\_ a b -> Just (a,b)) - (fmap (\a -> (a, mempty))) - (fmap (\b -> (mempty, b))) - d1 d2 - --} -zip (ComponentDeps d1) (ComponentDeps d2) = - ComponentDeps $ - Map.unionWith - mappend - (Map.map (\a -> (a, mempty)) d1) - (Map.map (\b -> (mempty, b)) d2) - - --- | Keep only selected components (and their associated deps info). -filterDeps :: (Component -> a -> Bool) -> ComponentDeps a -> ComponentDeps a -filterDeps p = ComponentDeps . Map.filterWithKey p . unComponentDeps - --- | ComponentDeps containing library dependencies only -fromLibraryDeps :: a -> ComponentDeps a -fromLibraryDeps = singleton ComponentLib - --- | ComponentDeps containing setup dependencies only. -fromSetupDeps :: a -> ComponentDeps a -fromSetupDeps = singleton ComponentSetup - --- | ComponentDeps for installed packages. --- --- We assume that installed packages only record their library dependencies. -fromInstalled :: a -> ComponentDeps a -fromInstalled = fromLibraryDeps - -{------------------------------------------------------------------------------- - Deconstruction --------------------------------------------------------------------------------} - -toList :: ComponentDeps a -> [ComponentDep a] -toList = Map.toList . unComponentDeps - --- | All dependencies of a package. --- --- This is just a synonym for 'fold', but perhaps a use of 'flatDeps' is more --- obvious than a use of 'fold', and moreover this avoids introducing lots of --- @#ifdef@s for 7.10 just for the use of 'fold'. -flatDeps :: Monoid a => ComponentDeps a -> a -flatDeps = fold - --- | All dependencies except the setup dependencies. --- --- Prior to the introduction of setup dependencies in version 1.24 this --- would have been _all_ dependencies. -nonSetupDeps :: Monoid a => ComponentDeps a -> a -nonSetupDeps = select (/= ComponentSetup) - --- | Library dependencies proper only. (Includes dependencies --- of internal libraries.) -libraryDeps :: Monoid a => ComponentDeps a -> a -libraryDeps = select (\c -> case c of ComponentSubLib _ -> True - ComponentLib -> True - _ -> False) - --- | Setup dependencies. -setupDeps :: Monoid a => ComponentDeps a -> a -setupDeps = select (== ComponentSetup) - --- | Select dependencies satisfying a given predicate. -select :: Monoid a => (Component -> Bool) -> ComponentDeps a -> a -select p = foldMap snd . filter (p . fst) . toList diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ConstraintSource.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ConstraintSource.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ConstraintSource.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ConstraintSource.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,82 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Solver.Types.ConstraintSource - ( ConstraintSource(..) - , showConstraintSource - ) where - -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary(..)) - --- | Source of a 'PackageConstraint'. -data ConstraintSource = - - -- | Main config file, which is ~/.cabal/config by default. - ConstraintSourceMainConfig FilePath - - -- | Local cabal.project file - | ConstraintSourceProjectConfig FilePath - - -- | Sandbox config file, which is ./cabal.sandbox.config by default. - | ConstraintSourceSandboxConfig FilePath - - -- | User config file, which is ./cabal.config by default. - | ConstraintSourceUserConfig FilePath - - -- | Flag specified on the command line. - | ConstraintSourceCommandlineFlag - - -- | Target specified by the user, e.g., @cabal install package-0.1.0.0@ - -- implies @package==0.1.0.0@. - | ConstraintSourceUserTarget - - -- | Internal requirement to use installed versions of packages like ghc-prim. - | ConstraintSourceNonUpgradeablePackage - - -- | Internal requirement to use the add-source version of a package when that - -- version is installed and the source is modified. - | ConstraintSourceModifiedAddSourceDep - - -- | Internal constraint used by @cabal freeze@. - | ConstraintSourceFreeze - - -- | Constraint specified by a config file, a command line flag, or a user - -- target, when a more specific source is not known. - | ConstraintSourceConfigFlagOrTarget - - -- | The source of the constraint is not specified. - | ConstraintSourceUnknown - - -- | An internal constraint due to compatibility issues with the Setup.hs - -- command line interface requires a minimum lower bound on Cabal - | ConstraintSetupCabalMinVersion - - -- | An internal constraint due to compatibility issues with the Setup.hs - -- command line interface requires a maximum upper bound on Cabal - | ConstraintSetupCabalMaxVersion - deriving (Eq, Show, Generic) - -instance Binary ConstraintSource - --- | Description of a 'ConstraintSource'. -showConstraintSource :: ConstraintSource -> String -showConstraintSource (ConstraintSourceMainConfig path) = - "main config " ++ path -showConstraintSource (ConstraintSourceProjectConfig path) = - "project config " ++ path -showConstraintSource (ConstraintSourceSandboxConfig path) = - "sandbox config " ++ path -showConstraintSource (ConstraintSourceUserConfig path)= "user config " ++ path -showConstraintSource ConstraintSourceCommandlineFlag = "command line flag" -showConstraintSource ConstraintSourceUserTarget = "user target" -showConstraintSource ConstraintSourceNonUpgradeablePackage = - "non-upgradeable package" -showConstraintSource ConstraintSourceModifiedAddSourceDep = - "modified add-source dependency" -showConstraintSource ConstraintSourceFreeze = "cabal freeze" -showConstraintSource ConstraintSourceConfigFlagOrTarget = - "config file, command line flag, or user target" -showConstraintSource ConstraintSourceUnknown = "unknown source" -showConstraintSource ConstraintSetupCabalMinVersion = - "minimum version of Cabal used by Setup.hs" -showConstraintSource ConstraintSetupCabalMaxVersion = - "maximum version of Cabal used by Setup.hs" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/DependencyResolver.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/DependencyResolver.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/DependencyResolver.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/DependencyResolver.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,36 +0,0 @@ -module Distribution.Solver.Types.DependencyResolver - ( DependencyResolver - ) where - -import Data.Set (Set) - -import Distribution.Solver.Types.LabeledPackageConstraint -import Distribution.Solver.Types.PkgConfigDb ( PkgConfigDb ) -import Distribution.Solver.Types.PackagePreferences -import Distribution.Solver.Types.PackageIndex ( PackageIndex ) -import Distribution.Solver.Types.Progress -import Distribution.Solver.Types.ResolverPackage -import Distribution.Solver.Types.SourcePackage - -import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) -import Distribution.Package ( PackageName ) -import Distribution.Compiler ( CompilerInfo ) -import Distribution.System ( Platform ) - --- | A dependency resolver is a function that works out an installation plan --- given the set of installed and available packages and a set of deps to --- solve for. --- --- The reason for this interface is because there are dozens of approaches to --- solving the package dependency problem and we want to make it easy to swap --- in alternatives. --- -type DependencyResolver loc = Platform - -> CompilerInfo - -> InstalledPackageIndex - -> PackageIndex (SourcePackage loc) - -> PkgConfigDb - -> (PackageName -> PackagePreferences) - -> [LabeledPackageConstraint] - -> Set PackageName - -> Progress String String [ResolverPackage loc] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Flag.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Flag.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Flag.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Flag.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -module Distribution.Solver.Types.Flag - ( FlagType(..) - ) where - -data FlagType = Manual | Automatic - deriving (Eq, Show) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/InstalledPreference.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/InstalledPreference.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/InstalledPreference.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/InstalledPreference.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -module Distribution.Solver.Types.InstalledPreference - ( InstalledPreference(..), - ) where - --- | Whether we prefer an installed version of a package or simply the latest --- version. --- -data InstalledPreference = PreferInstalled | PreferLatest - deriving Show diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/InstSolverPackage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/InstSolverPackage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/InstSolverPackage.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/InstSolverPackage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Solver.Types.InstSolverPackage - ( InstSolverPackage(..) - ) where - -import Distribution.Compat.Binary (Binary(..)) -import Distribution.Package ( Package(..), HasMungedPackageId(..), HasUnitId(..) ) -import Distribution.Solver.Types.ComponentDeps ( ComponentDeps ) -import Distribution.Solver.Types.SolverId -import Distribution.Types.MungedPackageId -import Distribution.Types.PackageId -import Distribution.Types.PackageName -import Distribution.Types.MungedPackageName -import Distribution.InstalledPackageInfo (InstalledPackageInfo) -import GHC.Generics (Generic) - --- | An 'InstSolverPackage' is a pre-existing installed pacakge --- specified by the dependency solver. -data InstSolverPackage = InstSolverPackage { - instSolverPkgIPI :: InstalledPackageInfo, - instSolverPkgLibDeps :: ComponentDeps [SolverId], - instSolverPkgExeDeps :: ComponentDeps [SolverId] - } - deriving (Eq, Show, Generic) - -instance Binary InstSolverPackage - -instance Package InstSolverPackage where - packageId i = - -- HACK! See Note [Index conversion with internal libraries] - let MungedPackageId mpn v = mungedId i - in PackageIdentifier (mkPackageName (unMungedPackageName mpn)) v - -instance HasMungedPackageId InstSolverPackage where - mungedId = mungedId . instSolverPkgIPI - -instance HasUnitId InstSolverPackage where - installedUnitId = installedUnitId . instSolverPkgIPI diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/LabeledPackageConstraint.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/LabeledPackageConstraint.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/LabeledPackageConstraint.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/LabeledPackageConstraint.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -module Distribution.Solver.Types.LabeledPackageConstraint - ( LabeledPackageConstraint(..) - , unlabelPackageConstraint - ) where - -import Distribution.Solver.Types.ConstraintSource -import Distribution.Solver.Types.PackageConstraint - --- | 'PackageConstraint' labeled with its source. -data LabeledPackageConstraint - = LabeledPackageConstraint PackageConstraint ConstraintSource - -unlabelPackageConstraint :: LabeledPackageConstraint -> PackageConstraint -unlabelPackageConstraint (LabeledPackageConstraint pc _) = pc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/OptionalStanza.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/OptionalStanza.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/OptionalStanza.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/OptionalStanza.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} -module Distribution.Solver.Types.OptionalStanza - ( OptionalStanza(..) - , showStanza - , enableStanzas - ) where - -import GHC.Generics (Generic) -import Data.Typeable -import Distribution.Compat.Binary (Binary(..)) -import Distribution.Types.ComponentRequestedSpec - (ComponentRequestedSpec(..), defaultComponentRequestedSpec) -import Data.List (foldl') - -data OptionalStanza - = TestStanzas - | BenchStanzas - deriving (Eq, Ord, Enum, Bounded, Show, Generic, Typeable) - --- | String representation of an OptionalStanza. -showStanza :: OptionalStanza -> String -showStanza TestStanzas = "test" -showStanza BenchStanzas = "bench" - --- | Convert a list of 'OptionalStanza' into the corresponding --- 'ComponentRequestedSpec' which records what components are enabled. -enableStanzas :: [OptionalStanza] -> ComponentRequestedSpec -enableStanzas = foldl' addStanza defaultComponentRequestedSpec - where - addStanza enabled TestStanzas = enabled { testsRequested = True } - addStanza enabled BenchStanzas = enabled { benchmarksRequested = True } - -instance Binary OptionalStanza diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageConstraint.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageConstraint.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageConstraint.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageConstraint.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,149 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} - --- | Per-package constraints. Package constraints must be respected by the --- solver. Multiple constraints for each package can be given, though obviously --- it is possible to construct conflicting constraints (eg impossible version --- range or inconsistent flag assignment). --- -module Distribution.Solver.Types.PackageConstraint ( - ConstraintScope(..), - scopeToplevel, - scopeToPackageName, - constraintScopeMatches, - PackageProperty(..), - dispPackageProperty, - PackageConstraint(..), - dispPackageConstraint, - showPackageConstraint, - packageConstraintToDependency - ) where - -import Distribution.Compat.Binary (Binary(..)) -import Distribution.Package (PackageName) -import Distribution.PackageDescription (FlagAssignment, dispFlagAssignment) -import Distribution.Types.Dependency (Dependency(..)) -import Distribution.Version (VersionRange, simplifyVersionRange) - -import Distribution.Solver.Compat.Prelude ((<<>>)) -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.PackagePath - -import Distribution.Text (disp, flatStyle) -import GHC.Generics (Generic) -import Text.PrettyPrint ((<+>)) -import qualified Text.PrettyPrint as Disp - - --- | Determines to what packages and in what contexts a --- constraint applies. -data ConstraintScope - -- | A scope that applies when the given package is used as a build target. - -- In other words, the scope applies iff a goal has a top-level qualifier - -- and its namespace matches the given package name. A namespace is - -- considered to match a package name when it is either the default - -- namespace (for --no-independent-goals) or it is an independent namespace - -- with the given package name (for --independent-goals). - - -- TODO: Try to generalize the ConstraintScopes once component-based - -- solving is implemented, and remove this special case for targets. - = ScopeTarget PackageName - -- | The package with the specified name and qualifier. - | ScopeQualified Qualifier PackageName - -- | The package with the specified name when it has a - -- setup qualifier. - | ScopeAnySetupQualifier PackageName - -- | The package with the specified name regardless of - -- qualifier. - | ScopeAnyQualifier PackageName - deriving (Eq, Show) - --- | Constructor for a common use case: the constraint applies to --- the package with the specified name when that package is a --- top-level dependency in the default namespace. -scopeToplevel :: PackageName -> ConstraintScope -scopeToplevel = ScopeQualified QualToplevel - --- | Returns the package name associated with a constraint scope. -scopeToPackageName :: ConstraintScope -> PackageName -scopeToPackageName (ScopeTarget pn) = pn -scopeToPackageName (ScopeQualified _ pn) = pn -scopeToPackageName (ScopeAnySetupQualifier pn) = pn -scopeToPackageName (ScopeAnyQualifier pn) = pn - -constraintScopeMatches :: ConstraintScope -> QPN -> Bool -constraintScopeMatches (ScopeTarget pn) (Q (PackagePath ns q) pn') = - let namespaceMatches DefaultNamespace = True - namespaceMatches (Independent namespacePn) = pn == namespacePn - in namespaceMatches ns && q == QualToplevel && pn == pn' -constraintScopeMatches (ScopeQualified q pn) (Q (PackagePath _ q') pn') = - q == q' && pn == pn' -constraintScopeMatches (ScopeAnySetupQualifier pn) (Q pp pn') = - let setup (PackagePath _ (QualSetup _)) = True - setup _ = False - in setup pp && pn == pn' -constraintScopeMatches (ScopeAnyQualifier pn) (Q _ pn') = pn == pn' - --- | Pretty-prints a constraint scope. -dispConstraintScope :: ConstraintScope -> Disp.Doc -dispConstraintScope (ScopeTarget pn) = disp pn <<>> Disp.text "." <<>> disp pn -dispConstraintScope (ScopeQualified q pn) = dispQualifier q <<>> disp pn -dispConstraintScope (ScopeAnySetupQualifier pn) = Disp.text "setup." <<>> disp pn -dispConstraintScope (ScopeAnyQualifier pn) = Disp.text "any." <<>> disp pn - --- | A package property is a logical predicate on packages. -data PackageProperty - = PackagePropertyVersion VersionRange - | PackagePropertyInstalled - | PackagePropertySource - | PackagePropertyFlags FlagAssignment - | PackagePropertyStanzas [OptionalStanza] - deriving (Eq, Show, Generic) - -instance Binary PackageProperty - --- | Pretty-prints a package property. -dispPackageProperty :: PackageProperty -> Disp.Doc -dispPackageProperty (PackagePropertyVersion verrange) = disp verrange -dispPackageProperty PackagePropertyInstalled = Disp.text "installed" -dispPackageProperty PackagePropertySource = Disp.text "source" -dispPackageProperty (PackagePropertyFlags flags) = dispFlagAssignment flags -dispPackageProperty (PackagePropertyStanzas stanzas) = - Disp.hsep $ map (Disp.text . showStanza) stanzas - --- | A package constraint consists of a scope plus a property --- that must hold for all packages within that scope. -data PackageConstraint = PackageConstraint ConstraintScope PackageProperty - deriving (Eq, Show) - --- | Pretty-prints a package constraint. -dispPackageConstraint :: PackageConstraint -> Disp.Doc -dispPackageConstraint (PackageConstraint scope prop) = - dispConstraintScope scope <+> dispPackageProperty prop - --- | Alternative textual representation of a package constraint --- for debugging purposes (slightly more verbose than that --- produced by 'dispPackageConstraint'). --- -showPackageConstraint :: PackageConstraint -> String -showPackageConstraint pc@(PackageConstraint scope prop) = - Disp.renderStyle flatStyle . postprocess $ dispPackageConstraint pc2 - where - pc2 = case prop of - PackagePropertyVersion vr -> - PackageConstraint scope $ PackagePropertyVersion (simplifyVersionRange vr) - _ -> pc - postprocess = case prop of - PackagePropertyFlags _ -> (Disp.text "flags" <+>) - PackagePropertyStanzas _ -> (Disp.text "stanzas" <+>) - _ -> id - --- | Lossily convert a 'PackageConstraint' to a 'Dependency'. -packageConstraintToDependency :: PackageConstraint -> Maybe Dependency -packageConstraintToDependency (PackageConstraint scope prop) = toDep prop - where - toDep (PackagePropertyVersion vr) = - Just $ Dependency (scopeToPackageName scope) vr - toDep (PackagePropertyInstalled) = Nothing - toDep (PackagePropertySource) = Nothing - toDep (PackagePropertyFlags _) = Nothing - toDep (PackagePropertyStanzas _) = Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageFixedDeps.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageFixedDeps.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageFixedDeps.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageFixedDeps.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -module Distribution.Solver.Types.PackageFixedDeps - ( PackageFixedDeps(..) - ) where - -import Distribution.InstalledPackageInfo ( InstalledPackageInfo ) -import Distribution.Package - ( Package(..), UnitId, installedDepends) -import Distribution.Solver.Types.ComponentDeps ( ComponentDeps ) -import qualified Distribution.Solver.Types.ComponentDeps as CD - --- | Subclass of packages that have specific versioned dependencies. --- --- So for example a not-yet-configured package has dependencies on version --- ranges, not specific versions. A configured or an already installed package --- depends on exact versions. Some operations or data structures (like --- dependency graphs) only make sense on this subclass of package types. --- -class Package pkg => PackageFixedDeps pkg where - depends :: pkg -> ComponentDeps [UnitId] - -instance PackageFixedDeps InstalledPackageInfo where - depends pkg = CD.fromInstalled (installedDepends pkg) - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageIndex.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageIndex.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageIndex.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackageIndex.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,316 +0,0 @@ -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveGeneric #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Solver.Types.PackageIndex --- Copyright : (c) David Himmelstrup 2005, --- Bjorn Bringert 2007, --- Duncan Coutts 2008 --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- An index of packages. --- -module Distribution.Solver.Types.PackageIndex ( - -- * Package index data type - PackageIndex, - - -- * Creating an index - fromList, - - -- * Updates - merge, - insert, - deletePackageName, - deletePackageId, - deleteDependency, - - -- * Queries - - -- ** Precise lookups - elemByPackageId, - elemByPackageName, - lookupPackageName, - lookupPackageId, - lookupDependency, - - -- ** Case-insensitive searches - searchByName, - SearchResult(..), - searchByNameSubstring, - - -- ** Bulk queries - allPackages, - allPackagesByName, - ) where - -import Prelude () -import Distribution.Solver.Compat.Prelude hiding (lookup) - -import Control.Exception (assert) -import qualified Data.Map as Map -import Data.List (groupBy, isInfixOf) - -import Distribution.Package - ( PackageName, unPackageName, PackageIdentifier(..) - , Package(..), packageName, packageVersion ) -import Distribution.Types.Dependency -import Distribution.Version - ( withinRange ) -import Distribution.Simple.Utils - ( lowercase, comparing ) - - --- | The collection of information about packages from one or more 'PackageDB's. --- --- It can be searched efficiently by package name and version. --- -newtype PackageIndex pkg = PackageIndex - -- This index package names to all the package records matching that package - -- name case-sensitively. It includes all versions. - -- - -- This allows us to find all versions satisfying a dependency. - -- Most queries are a map lookup followed by a linear scan of the bucket. - -- - (Map PackageName [pkg]) - - deriving (Eq, Show, Read, Functor, Generic) ---FIXME: the Functor instance here relies on no package id changes - -instance Package pkg => Semigroup (PackageIndex pkg) where - (<>) = merge - -instance Package pkg => Monoid (PackageIndex pkg) where - mempty = PackageIndex Map.empty - mappend = (<>) - --save one mappend with empty in the common case: - mconcat [] = mempty - mconcat xs = foldr1 mappend xs - -instance Binary pkg => Binary (PackageIndex pkg) - -invariant :: Package pkg => PackageIndex pkg -> Bool -invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m) - where - goodBucket _ [] = False - goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0 - where - check pkgid [] = packageName pkgid == name - check pkgid (pkg':pkgs) = packageName pkgid == name - && pkgid < pkgid' - && check pkgid' pkgs - where pkgid' = packageId pkg' - --- --- * Internal helpers --- - -mkPackageIndex :: Package pkg => Map PackageName [pkg] -> PackageIndex pkg -mkPackageIndex index = assert (invariant (PackageIndex index)) - (PackageIndex index) - -internalError :: String -> a -internalError name = error ("PackageIndex." ++ name ++ ": internal error") - --- | Lookup a name in the index to get all packages that match that name --- case-sensitively. --- -lookup :: PackageIndex pkg -> PackageName -> [pkg] -lookup (PackageIndex m) name = fromMaybe [] $ Map.lookup name m - --- --- * Construction --- - --- | Build an index out of a bunch of packages. --- --- If there are duplicates, later ones mask earlier ones. --- -fromList :: Package pkg => [pkg] -> PackageIndex pkg -fromList pkgs = mkPackageIndex - . Map.map fixBucket - . Map.fromListWith (++) - $ [ (packageName pkg, [pkg]) - | pkg <- pkgs ] - where - fixBucket = -- out of groups of duplicates, later ones mask earlier ones - -- but Map.fromListWith (++) constructs groups in reverse order - map head - -- Eq instance for PackageIdentifier is wrong, so use Ord: - . groupBy (\a b -> EQ == comparing packageId a b) - -- relies on sortBy being a stable sort so we - -- can pick consistently among duplicates - . sortBy (comparing packageId) - --- --- * Updates --- - --- | Merge two indexes. --- --- Packages from the second mask packages of the same exact name --- (case-sensitively) from the first. --- -merge :: Package pkg => PackageIndex pkg -> PackageIndex pkg -> PackageIndex pkg -merge i1@(PackageIndex m1) i2@(PackageIndex m2) = - assert (invariant i1 && invariant i2) $ - mkPackageIndex (Map.unionWith mergeBuckets m1 m2) - --- | Elements in the second list mask those in the first. -mergeBuckets :: Package pkg => [pkg] -> [pkg] -> [pkg] -mergeBuckets [] ys = ys -mergeBuckets xs [] = xs -mergeBuckets xs@(x:xs') ys@(y:ys') = - case packageId x `compare` packageId y of - GT -> y : mergeBuckets xs ys' - EQ -> y : mergeBuckets xs' ys' - LT -> x : mergeBuckets xs' ys - --- | Inserts a single package into the index. --- --- This is equivalent to (but slightly quicker than) using 'mappend' or --- 'merge' with a singleton index. --- -insert :: Package pkg => pkg -> PackageIndex pkg -> PackageIndex pkg -insert pkg (PackageIndex index) = mkPackageIndex $ - Map.insertWith (\_ -> insertNoDup) (packageName pkg) [pkg] index - where - pkgid = packageId pkg - insertNoDup [] = [pkg] - insertNoDup pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of - LT -> pkg : pkgs - EQ -> pkg : pkgs' - GT -> pkg' : insertNoDup pkgs' - --- | Internal delete helper. --- -delete :: Package pkg => PackageName -> (pkg -> Bool) -> PackageIndex pkg - -> PackageIndex pkg -delete name p (PackageIndex index) = mkPackageIndex $ - Map.update filterBucket name index - where - filterBucket = deleteEmptyBucket - . filter (not . p) - deleteEmptyBucket [] = Nothing - deleteEmptyBucket remaining = Just remaining - --- | Removes a single package from the index. --- -deletePackageId :: Package pkg => PackageIdentifier -> PackageIndex pkg - -> PackageIndex pkg -deletePackageId pkgid = - delete (packageName pkgid) (\pkg -> packageId pkg == pkgid) - --- | Removes all packages with this (case-sensitive) name from the index. --- -deletePackageName :: Package pkg => PackageName -> PackageIndex pkg - -> PackageIndex pkg -deletePackageName name = - delete name (\pkg -> packageName pkg == name) - --- | Removes all packages satisfying this dependency from the index. --- -deleteDependency :: Package pkg => Dependency -> PackageIndex pkg - -> PackageIndex pkg -deleteDependency (Dependency name verstionRange) = - delete name (\pkg -> packageVersion pkg `withinRange` verstionRange) - --- --- * Bulk queries --- - --- | Get all the packages from the index. --- -allPackages :: PackageIndex pkg -> [pkg] -allPackages (PackageIndex m) = concat (Map.elems m) - --- | Get all the packages from the index. --- --- They are grouped by package name, case-sensitively. --- -allPackagesByName :: PackageIndex pkg -> [[pkg]] -allPackagesByName (PackageIndex m) = Map.elems m - --- --- * Lookups --- - -elemByPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Bool -elemByPackageId index = isJust . lookupPackageId index - -elemByPackageName :: Package pkg => PackageIndex pkg -> PackageName -> Bool -elemByPackageName index = not . null . lookupPackageName index - - --- | Does a lookup by package id (name & version). --- --- Since multiple package DBs mask each other case-sensitively by package name, --- then we get back at most one package. --- -lookupPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier - -> Maybe pkg -lookupPackageId index pkgid = - case [ pkg | pkg <- lookup index (packageName pkgid) - , packageId pkg == pkgid ] of - [] -> Nothing - [pkg] -> Just pkg - _ -> internalError "lookupPackageIdentifier" - --- | Does a case-sensitive search by package name. --- -lookupPackageName :: Package pkg => PackageIndex pkg -> PackageName -> [pkg] -lookupPackageName index name = - [ pkg | pkg <- lookup index name - , packageName pkg == name ] - --- | Does a case-sensitive search by package name and a range of versions. --- --- We get back any number of versions of the specified package name, all --- satisfying the version range constraint. --- -lookupDependency :: Package pkg => PackageIndex pkg -> Dependency -> [pkg] -lookupDependency index (Dependency name versionRange) = - [ pkg | pkg <- lookup index name - , packageName pkg == name - , packageVersion pkg `withinRange` versionRange ] - --- --- * Case insensitive name lookups --- - --- | Does a case-insensitive search by package name. --- --- If there is only one package that compares case-insensitively to this name --- then the search is unambiguous and we get back all versions of that package. --- If several match case-insensitively but one matches exactly then it is also --- unambiguous. --- --- If however several match case-insensitively and none match exactly then we --- have an ambiguous result, and we get back all the versions of all the --- packages. The list of ambiguous results is split by exact package name. So --- it is a non-empty list of non-empty lists. --- -searchByName :: PackageIndex pkg - -> String -> [(PackageName, [pkg])] -searchByName (PackageIndex m) name = - [ pkgs - | pkgs@(pname,_) <- Map.toList m - , lowercase (unPackageName pname) == lname ] - where - lname = lowercase name - -data SearchResult a = None | Unambiguous a | Ambiguous [a] - --- | Does a case-insensitive substring search by package name. --- --- That is, all packages that contain the given string in their name. --- -searchByNameSubstring :: PackageIndex pkg - -> String -> [(PackageName, [pkg])] -searchByNameSubstring (PackageIndex m) searchterm = - [ pkgs - | pkgs@(pname, _) <- Map.toList m - , lsearchterm `isInfixOf` lowercase (unPackageName pname) ] - where - lsearchterm = lowercase searchterm diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackagePath.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackagePath.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackagePath.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackagePath.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,101 +0,0 @@ -module Distribution.Solver.Types.PackagePath - ( PackagePath(..) - , Namespace(..) - , Qualifier(..) - , dispQualifier - , Qualified(..) - , QPN - , dispQPN - , showQPN - ) where - -import Distribution.Package -import Distribution.Text -import qualified Text.PrettyPrint as Disp -import Distribution.Solver.Compat.Prelude ((<<>>)) - --- | A package path consists of a namespace and a package path inside that --- namespace. -data PackagePath = PackagePath Namespace Qualifier - deriving (Eq, Ord, Show) - --- | Top-level namespace --- --- Package choices in different namespaces are considered completely independent --- by the solver. -data Namespace = - -- | The default namespace - DefaultNamespace - - -- | A namespace for a specific build target - | Independent PackageName - deriving (Eq, Ord, Show) - --- | Pretty-prints a namespace. The result is either empty or --- ends in a period, so it can be prepended onto a qualifier. -dispNamespace :: Namespace -> Disp.Doc -dispNamespace DefaultNamespace = Disp.empty -dispNamespace (Independent i) = disp i <<>> Disp.text "." - --- | Qualifier of a package within a namespace (see 'PackagePath') -data Qualifier = - -- | Top-level dependency in this namespace - QualToplevel - - -- | Any dependency on base is considered independent - -- - -- This makes it possible to have base shims. - | QualBase PackageName - - -- | Setup dependency - -- - -- By rights setup dependencies ought to be nestable; after all, the setup - -- dependencies of a package might themselves have setup dependencies, which - -- are independent from everything else. However, this very quickly leads to - -- infinite search trees in the solver. Therefore we limit ourselves to - -- a single qualifier (within a given namespace). - | QualSetup PackageName - - -- | If we depend on an executable from a package (via - -- @build-tools@), we should solve for the dependencies of that - -- package separately (since we're not going to actually try to - -- link it.) We qualify for EACH package separately; e.g., - -- @'Exe' pn1 pn2@ qualifies the @build-tools@ dependency on - -- @pn2@ from package @pn1@. (If we tracked only @pn1@, that - -- would require a consistent dependency resolution for all - -- of the depended upon executables from a package; if we - -- tracked only @pn2@, that would require us to pick only one - -- version of an executable over the entire install plan.) - | QualExe PackageName PackageName - deriving (Eq, Ord, Show) - --- | Pretty-prints a qualifier. The result is either empty or --- ends in a period, so it can be prepended onto a package name. --- --- NOTE: the base qualifier is for a dependency _on_ base; the qualifier is --- there to make sure different dependencies on base are all independent. --- So we want to print something like @"A.base"@, where the @"A."@ part --- is the qualifier and @"base"@ is the actual dependency (which, for the --- 'Base' qualifier, will always be @base@). -dispQualifier :: Qualifier -> Disp.Doc -dispQualifier QualToplevel = Disp.empty -dispQualifier (QualSetup pn) = disp pn <<>> Disp.text ":setup." -dispQualifier (QualExe pn pn2) = disp pn <<>> Disp.text ":" <<>> - disp pn2 <<>> Disp.text ":exe." -dispQualifier (QualBase pn) = disp pn <<>> Disp.text "." - --- | A qualified entity. Pairs a package path with the entity. -data Qualified a = Q PackagePath a - deriving (Eq, Ord, Show) - --- | Qualified package name. -type QPN = Qualified PackageName - --- | Pretty-prints a qualified package name. -dispQPN :: QPN -> Disp.Doc -dispQPN (Q (PackagePath ns qual) pn) = - dispNamespace ns <<>> dispQualifier qual <<>> disp pn - --- | String representation of a qualified package name. -showQPN :: QPN -> String -showQPN = Disp.renderStyle flatStyle . dispQPN diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackagePreferences.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackagePreferences.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackagePreferences.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PackagePreferences.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -module Distribution.Solver.Types.PackagePreferences - ( PackagePreferences(..) - ) where - -import Distribution.Solver.Types.InstalledPreference -import Distribution.Solver.Types.OptionalStanza -import Distribution.Version (VersionRange) - --- | Per-package preferences on the version. It is a soft constraint that the --- 'DependencyResolver' should try to respect where possible. It consists of --- an 'InstalledPreference' which says if we prefer versions of packages --- that are already installed. It also has (possibly multiple) --- 'PackageVersionPreference's which are suggested constraints on the version --- number. The resolver should try to use package versions that satisfy --- the maximum number of the suggested version constraints. --- --- It is not specified if preferences on some packages are more important than --- others. --- -data PackagePreferences = PackagePreferences [VersionRange] - InstalledPreference - [OptionalStanza] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PkgConfigDb.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PkgConfigDb.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PkgConfigDb.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/PkgConfigDb.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,159 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} ------------------------------------------------------------------------------ --- | --- Module : Distribution.Solver.Types.PkgConfigDb --- Copyright : (c) Iñaki García Etxebarria 2016 --- License : BSD-like --- --- Maintainer : cabal-devel@haskell.org --- Portability : portable --- --- Read the list of packages available to pkg-config. ------------------------------------------------------------------------------ -module Distribution.Solver.Types.PkgConfigDb - ( PkgConfigDb - , readPkgConfigDb - , pkgConfigDbFromList - , pkgConfigPkgIsPresent - , pkgConfigDbPkgVersion - , getPkgConfigDbDirs - ) where - -import Prelude () -import Distribution.Solver.Compat.Prelude - -import Control.Exception (IOException, handle) -import qualified Data.Map as M -import Data.Version (parseVersion) -import Text.ParserCombinators.ReadP (readP_to_S) -import System.FilePath (splitSearchPath) - -import Distribution.Package - ( PkgconfigName, mkPkgconfigName ) -import Distribution.Verbosity - ( Verbosity ) -import Distribution.Version - ( Version, mkVersion', VersionRange, withinRange ) - -import Distribution.Compat.Environment - ( lookupEnv ) -import Distribution.Simple.Program - ( ProgramDb, pkgConfigProgram, getProgramOutput, requireProgram ) -import Distribution.Simple.Utils - ( info ) - --- | The list of packages installed in the system visible to --- @pkg-config@. This is an opaque datatype, to be constructed with --- `readPkgConfigDb` and queried with `pkgConfigPkgPresent`. -data PkgConfigDb = PkgConfigDb (M.Map PkgconfigName (Maybe Version)) - -- ^ If an entry is `Nothing`, this means that the - -- package seems to be present, but we don't know the - -- exact version (because parsing of the version - -- number failed). - | NoPkgConfigDb - -- ^ For when we could not run pkg-config successfully. - deriving (Show, Generic, Typeable) - -instance Binary PkgConfigDb - --- | Query pkg-config for the list of installed packages, together --- with their versions. Return a `PkgConfigDb` encapsulating this --- information. -readPkgConfigDb :: Verbosity -> ProgramDb -> IO PkgConfigDb -readPkgConfigDb verbosity progdb = handle ioErrorHandler $ do - (pkgConfig, _) <- requireProgram verbosity pkgConfigProgram progdb - pkgList <- lines <$> getProgramOutput verbosity pkgConfig ["--list-all"] - -- The output of @pkg-config --list-all@ also includes a description - -- for each package, which we do not need. - let pkgNames = map (takeWhile (not . isSpace)) pkgList - pkgVersions <- lines <$> getProgramOutput verbosity pkgConfig - ("--modversion" : pkgNames) - (return . pkgConfigDbFromList . zip pkgNames) pkgVersions - where - -- For when pkg-config invocation fails (possibly because of a - -- too long command line). - ioErrorHandler :: IOException -> IO PkgConfigDb - ioErrorHandler e = do - info verbosity ("Failed to query pkg-config, Cabal will continue" - ++ " without solving for pkg-config constraints: " - ++ show e) - return NoPkgConfigDb - --- | Create a `PkgConfigDb` from a list of @(packageName, version)@ pairs. -pkgConfigDbFromList :: [(String, String)] -> PkgConfigDb -pkgConfigDbFromList pairs = (PkgConfigDb . M.fromList . map convert) pairs - where - convert :: (String, String) -> (PkgconfigName, Maybe Version) - convert (n,vs) = (mkPkgconfigName n, - case (reverse . readP_to_S parseVersion) vs of - (v, "") : _ -> Just (mkVersion' v) - _ -> Nothing -- Version not (fully) - -- understood. - ) - --- | Check whether a given package range is satisfiable in the given --- @pkg-config@ database. -pkgConfigPkgIsPresent :: PkgConfigDb -> PkgconfigName -> VersionRange -> Bool -pkgConfigPkgIsPresent (PkgConfigDb db) pn vr = - case M.lookup pn db of - Nothing -> False -- Package not present in the DB. - Just Nothing -> True -- Package present, but version unknown. - Just (Just v) -> withinRange v vr --- If we could not read the pkg-config database successfully we allow --- the check to succeed. The plan found by the solver may fail to be --- executed later on, but we have no grounds for rejecting the plan at --- this stage. -pkgConfigPkgIsPresent NoPkgConfigDb _ _ = True - - --- | Query the version of a package in the @pkg-config@ database. --- @Nothing@ indicates the package is not in the database, while --- @Just Nothing@ indicates that the package is in the database, --- but its version is not known. -pkgConfigDbPkgVersion :: PkgConfigDb -> PkgconfigName -> Maybe (Maybe Version) -pkgConfigDbPkgVersion (PkgConfigDb db) pn = M.lookup pn db --- NB: Since the solver allows solving to succeed if there is --- NoPkgConfigDb, we should report that we *guess* that there --- is a matching pkg-config configuration, but that we just --- don't know about it. -pkgConfigDbPkgVersion NoPkgConfigDb _ = Just Nothing - - --- | Query pkg-config for the locations of pkg-config's package files. Use this --- to monitor for changes in the pkg-config DB. --- -getPkgConfigDbDirs :: Verbosity -> ProgramDb -> IO [FilePath] -getPkgConfigDbDirs verbosity progdb = - (++) <$> getEnvPath <*> getDefPath - where - -- According to @man pkg-config@: - -- - -- PKG_CONFIG_PATH - -- A colon-separated (on Windows, semicolon-separated) list of directories - -- to search for .pc files. The default directory will always be searched - -- after searching the path - -- - getEnvPath = maybe [] parseSearchPath - <$> lookupEnv "PKG_CONFIG_PATH" - - -- Again according to @man pkg-config@: - -- - -- pkg-config can be used to query itself for the default search path, - -- version number and other information, for instance using: - -- - -- > pkg-config --variable pc_path pkg-config - -- - getDefPath = handle ioErrorHandler $ do - (pkgConfig, _) <- requireProgram verbosity pkgConfigProgram progdb - parseSearchPath <$> - getProgramOutput verbosity pkgConfig - ["--variable", "pc_path", "pkg-config"] - - parseSearchPath str = - case lines str of - [p] | not (null p) -> splitSearchPath p - _ -> [] - - ioErrorHandler :: IOException -> IO [FilePath] - ioErrorHandler _e = return [] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Progress.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Progress.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Progress.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Progress.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -module Distribution.Solver.Types.Progress - ( Progress(..) - , foldProgress - ) where - -import Prelude () -import Distribution.Solver.Compat.Prelude hiding (fail) - --- | A type to represent the unfolding of an expensive long running --- calculation that may fail. We may get intermediate steps before the final --- result which may be used to indicate progress and\/or logging messages. --- -data Progress step fail done = Step step (Progress step fail done) - | Fail fail - | Done done - --- This Functor instance works around a bug in GHC 7.6.3. --- See https://ghc.haskell.org/trac/ghc/ticket/7436#comment:6. --- The derived functor instance caused a space leak in the solver. -instance Functor (Progress step fail) where - fmap f (Step s p) = Step s (fmap f p) - fmap _ (Fail x) = Fail x - fmap f (Done r) = Done (f r) - --- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two --- base cases, one for a final result and one for failure. --- --- Eg to convert into a simple 'Either' result use: --- --- > foldProgress (flip const) Left Right --- -foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a) - -> Progress step fail done -> a -foldProgress step fail done = fold - where fold (Step s p) = step s (fold p) - fold (Fail f) = fail f - fold (Done r) = done r - -instance Monad (Progress step fail) where - return = pure - p >>= f = foldProgress Step Fail f p - -instance Applicative (Progress step fail) where - pure a = Done a - p <*> x = foldProgress Step Fail (flip fmap x) p - -instance Monoid fail => Alternative (Progress step fail) where - empty = Fail mempty - p <|> q = foldProgress Step (const q) Done p diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ResolverPackage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ResolverPackage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ResolverPackage.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/ResolverPackage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,50 +0,0 @@ -{-# LANGUAGE TypeFamilies #-} -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Solver.Types.ResolverPackage - ( ResolverPackage(..) - , resolverPackageLibDeps - , resolverPackageExeDeps - ) where - -import Distribution.Solver.Types.InstSolverPackage -import Distribution.Solver.Types.SolverId -import Distribution.Solver.Types.SolverPackage -import qualified Distribution.Solver.Types.ComponentDeps as CD - -import Distribution.Compat.Binary (Binary(..)) -import Distribution.Compat.Graph (IsNode(..)) -import Distribution.Package (Package(..), HasUnitId(..)) -import Distribution.Simple.Utils (ordNub) -import GHC.Generics (Generic) - --- | The dependency resolver picks either pre-existing installed packages --- or it picks source packages along with package configuration. --- --- This is like the 'InstallPlan.PlanPackage' but with fewer cases. --- -data ResolverPackage loc = PreExisting InstSolverPackage - | Configured (SolverPackage loc) - deriving (Eq, Show, Generic) - -instance Binary loc => Binary (ResolverPackage loc) - -instance Package (ResolverPackage loc) where - packageId (PreExisting ipkg) = packageId ipkg - packageId (Configured spkg) = packageId spkg - -resolverPackageLibDeps :: ResolverPackage loc -> CD.ComponentDeps [SolverId] -resolverPackageLibDeps (PreExisting ipkg) = instSolverPkgLibDeps ipkg -resolverPackageLibDeps (Configured spkg) = solverPkgLibDeps spkg - -resolverPackageExeDeps :: ResolverPackage loc -> CD.ComponentDeps [SolverId] -resolverPackageExeDeps (PreExisting ipkg) = instSolverPkgExeDeps ipkg -resolverPackageExeDeps (Configured spkg) = solverPkgExeDeps spkg - -instance IsNode (ResolverPackage loc) where - type Key (ResolverPackage loc) = SolverId - nodeKey (PreExisting ipkg) = PreExistingId (packageId ipkg) (installedUnitId ipkg) - nodeKey (Configured spkg) = PlannedId (packageId spkg) - -- Use dependencies for ALL components - nodeNeighbors pkg = - ordNub $ CD.flatDeps (resolverPackageLibDeps pkg) ++ - CD.flatDeps (resolverPackageExeDeps pkg) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Settings.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Settings.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Settings.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Settings.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -module Distribution.Solver.Types.Settings - ( ReorderGoals(..) - , IndependentGoals(..) - , AvoidReinstalls(..) - , ShadowPkgs(..) - , StrongFlags(..) - , AllowBootLibInstalls(..) - , EnableBackjumping(..) - , CountConflicts(..) - , SolveExecutables(..) - ) where - -import Distribution.Simple.Setup ( BooleanFlag(..) ) -import Distribution.Compat.Binary (Binary(..)) -import GHC.Generics (Generic) - -newtype ReorderGoals = ReorderGoals Bool - deriving (BooleanFlag, Eq, Generic, Show) - -newtype CountConflicts = CountConflicts Bool - deriving (BooleanFlag, Eq, Generic, Show) - -newtype IndependentGoals = IndependentGoals Bool - deriving (BooleanFlag, Eq, Generic, Show) - -newtype AvoidReinstalls = AvoidReinstalls Bool - deriving (BooleanFlag, Eq, Generic, Show) - -newtype ShadowPkgs = ShadowPkgs Bool - deriving (BooleanFlag, Eq, Generic, Show) - -newtype StrongFlags = StrongFlags Bool - deriving (BooleanFlag, Eq, Generic, Show) - -newtype AllowBootLibInstalls = AllowBootLibInstalls Bool - deriving (BooleanFlag, Eq, Generic, Show) - -newtype EnableBackjumping = EnableBackjumping Bool - deriving (BooleanFlag, Eq, Generic, Show) - -newtype SolveExecutables = SolveExecutables Bool - deriving (BooleanFlag, Eq, Generic, Show) - -instance Binary ReorderGoals -instance Binary CountConflicts -instance Binary IndependentGoals -instance Binary AvoidReinstalls -instance Binary ShadowPkgs -instance Binary StrongFlags -instance Binary AllowBootLibInstalls -instance Binary SolveExecutables diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SolverId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SolverId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SolverId.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SolverId.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Solver.Types.SolverId - ( SolverId(..) - ) - -where - -import Distribution.Compat.Binary (Binary(..)) -import Distribution.Package (PackageId, Package(..), UnitId) -import GHC.Generics (Generic) - --- | The solver can produce references to existing packages or --- packages we plan to install. Unlike 'ConfiguredId' we don't --- yet know the 'UnitId' for planned packages, because it's --- not the solver's job to compute them. --- -data SolverId = PreExistingId { solverSrcId :: PackageId, solverInstId :: UnitId } - | PlannedId { solverSrcId :: PackageId } - deriving (Eq, Ord, Generic) - -instance Binary SolverId - -instance Show SolverId where - show = show . solverSrcId - -instance Package SolverId where - packageId = solverSrcId diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SolverPackage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SolverPackage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SolverPackage.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SolverPackage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -module Distribution.Solver.Types.SolverPackage - ( SolverPackage(..) - ) where - -import Distribution.Compat.Binary (Binary(..)) -import Distribution.Package ( Package(..) ) -import Distribution.PackageDescription ( FlagAssignment ) -import Distribution.Solver.Types.ComponentDeps ( ComponentDeps ) -import Distribution.Solver.Types.OptionalStanza -import Distribution.Solver.Types.SolverId -import Distribution.Solver.Types.SourcePackage -import GHC.Generics (Generic) - --- | A 'SolverPackage' is a package specified by the dependency solver. --- It will get elaborated into a 'ConfiguredPackage' or even an --- 'ElaboratedConfiguredPackage'. --- --- NB: 'SolverPackage's are essentially always with 'UnresolvedPkgLoc', --- but for symmetry we have the parameter. (Maybe it can be removed.) --- -data SolverPackage loc = SolverPackage { - solverPkgSource :: SourcePackage loc, - solverPkgFlags :: FlagAssignment, - solverPkgStanzas :: [OptionalStanza], - solverPkgLibDeps :: ComponentDeps [SolverId], - solverPkgExeDeps :: ComponentDeps [SolverId] - } - deriving (Eq, Show, Generic) - -instance Binary loc => Binary (SolverPackage loc) - -instance Package (SolverPackage loc) where - packageId = packageId . solverPkgSource diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SourcePackage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SourcePackage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SourcePackage.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/SourcePackage.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,34 +0,0 @@ -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DeriveDataTypeable #-} -module Distribution.Solver.Types.SourcePackage - ( PackageDescriptionOverride - , SourcePackage(..) - ) where - -import Distribution.Package - ( PackageId, Package(..) ) -import Distribution.PackageDescription - ( GenericPackageDescription(..) ) - -import Data.ByteString.Lazy (ByteString) -import GHC.Generics (Generic) -import Distribution.Compat.Binary (Binary(..)) -import Data.Typeable - --- | A package description along with the location of the package sources. --- -data SourcePackage loc = SourcePackage { - packageInfoId :: PackageId, - packageDescription :: GenericPackageDescription, - packageSource :: loc, - packageDescrOverride :: PackageDescriptionOverride - } - deriving (Eq, Show, Generic, Typeable) - -instance (Binary loc) => Binary (SourcePackage loc) - -instance Package (SourcePackage a) where packageId = packageInfoId - --- | We sometimes need to override the .cabal file in the tarball with --- the newer one from the package index. -type PackageDescriptionOverride = Maybe ByteString diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Variable.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Variable.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Variable.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Distribution/Solver/Types/Variable.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -module Distribution.Solver.Types.Variable where - -import Distribution.Solver.Types.OptionalStanza - -import Distribution.PackageDescription (FlagName) - --- | Variables used by the dependency solver. This type is similar to the --- internal 'Var' type. -data Variable qpn = - PackageVar qpn - | FlagVar qpn FlagName - | StanzaVar qpn OptionalStanza - deriving (Eq, Show) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/LICENSE cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/LICENSE --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/LICENSE 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -Copyright (c) 2003-2017, Cabal Development Team. -See the AUTHORS file for the full list of copyright holders. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - - * Neither the name of Isaac Jones nor the names of other - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/main/Main.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/main/Main.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/main/Main.hs 2018-10-17 15:59:06.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/main/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1251 +0,0 @@ -{-# LANGUAGE CPP #-} - ------------------------------------------------------------------------------ --- | --- Module : Main --- Copyright : (c) David Himmelstrup 2005 --- License : BSD-like --- --- Maintainer : lemmih@gmail.com --- Stability : provisional --- Portability : portable --- --- Entry point to the default cabal-install front-end. ------------------------------------------------------------------------------ - -module Main (main) where - -import Distribution.Client.Setup - ( GlobalFlags(..), globalCommand, withRepoContext - , ConfigFlags(..) - , ConfigExFlags(..), defaultConfigExFlags, configureExCommand - , reconfigureCommand - , configCompilerAux', configPackageDB' - , BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) - , buildCommand, replCommand, testCommand, benchmarkCommand - , InstallFlags(..), defaultInstallFlags - , installCommand, upgradeCommand, uninstallCommand - , FetchFlags(..), fetchCommand - , FreezeFlags(..), freezeCommand - , genBoundsCommand - , OutdatedFlags(..), outdatedCommand - , GetFlags(..), getCommand, unpackCommand - , checkCommand - , formatCommand - , UpdateFlags(..), updateCommand - , ListFlags(..), listCommand - , InfoFlags(..), infoCommand - , UploadFlags(..), uploadCommand - , ReportFlags(..), reportCommand - , runCommand - , InitFlags(initVerbosity), initCommand - , SDistFlags(..), SDistExFlags(..), sdistCommand - , Win32SelfUpgradeFlags(..), win32SelfUpgradeCommand - , ActAsSetupFlags(..), actAsSetupCommand - , SandboxFlags(..), sandboxCommand - , ExecFlags(..), execCommand - , UserConfigFlags(..), userConfigCommand - , reportCommand - , manpageCommand - , haddockCommand - , cleanCommand - , doctestCommand - , copyCommand - , registerCommand - ) -import Distribution.Simple.Setup - ( HaddockTarget(..) - , DoctestFlags(..) - , HaddockFlags(..), defaultHaddockFlags - , HscolourFlags(..), hscolourCommand - , ReplFlags(..) - , CopyFlags(..) - , RegisterFlags(..) - , CleanFlags(..) - , TestFlags(..), BenchmarkFlags(..) - , Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe, toFlag - , configAbsolutePaths - ) - -import Prelude () -import Distribution.Solver.Compat.Prelude hiding (get) - -import Distribution.Client.SetupWrapper - ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) -import Distribution.Client.Config - ( SavedConfig(..), loadConfig, defaultConfigFile, userConfigDiff - , userConfigUpdate, createDefaultConfigFile, getConfigFilePath ) -import Distribution.Client.Targets - ( readUserTargets ) -import qualified Distribution.Client.List as List - ( list, info ) - - -import qualified Distribution.Client.CmdConfigure as CmdConfigure -import qualified Distribution.Client.CmdUpdate as CmdUpdate -import qualified Distribution.Client.CmdBuild as CmdBuild -import qualified Distribution.Client.CmdRepl as CmdRepl -import qualified Distribution.Client.CmdFreeze as CmdFreeze -import qualified Distribution.Client.CmdHaddock as CmdHaddock -import qualified Distribution.Client.CmdInstall as CmdInstall -import qualified Distribution.Client.CmdRun as CmdRun -import qualified Distribution.Client.CmdTest as CmdTest -import qualified Distribution.Client.CmdBench as CmdBench -import qualified Distribution.Client.CmdExec as CmdExec -import qualified Distribution.Client.CmdClean as CmdClean -import qualified Distribution.Client.CmdSdist as CmdSdist -import Distribution.Client.CmdLegacy - -import Distribution.Client.Install (install) -import Distribution.Client.Configure (configure, writeConfigFlags) -import Distribution.Client.Update (update) -import Distribution.Client.Exec (exec) -import Distribution.Client.Fetch (fetch) -import Distribution.Client.Freeze (freeze) -import Distribution.Client.GenBounds (genBounds) -import Distribution.Client.Outdated (outdated) -import Distribution.Client.Check as Check (check) ---import Distribution.Client.Clean (clean) -import qualified Distribution.Client.Upload as Upload -import Distribution.Client.Run (run, splitRunArgs) -import Distribution.Client.SrcDist (sdist) -import Distribution.Client.Get (get) -import Distribution.Client.Reconfigure (Check(..), reconfigure) -import Distribution.Client.Nix (nixInstantiate - ,nixShell - ,nixShellIfSandboxed) -import Distribution.Client.Sandbox (sandboxInit - ,sandboxAddSource - ,sandboxDelete - ,sandboxDeleteSource - ,sandboxListSources - ,sandboxHcPkg - ,dumpPackageEnvironment - - ,loadConfigOrSandboxConfig - ,findSavedDistPref - ,initPackageDBIfNeeded - ,maybeWithSandboxDirOnSearchPath - ,maybeWithSandboxPackageInfo - ,tryGetIndexFilePath - ,sandboxBuildDir - ,updateSandboxConfigFileFlag - ,updateInstallDirs - - ,getPersistOrConfigCompiler) -import Distribution.Client.Sandbox.PackageEnvironment (setPackageDB) -import Distribution.Client.Sandbox.Timestamp (maybeAddCompilerTimestampRecord) -import Distribution.Client.Sandbox.Types (UseSandbox(..), whenUsingSandbox) -import Distribution.Client.Tar (createTarGzFile) -import Distribution.Client.Types (Password (..)) -import Distribution.Client.Init (initCabal) -import Distribution.Client.Manpage (manpage) -import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade -import Distribution.Client.Utils (determineNumJobs -#if defined(mingw32_HOST_OS) - ,relaxEncodingErrors -#endif - ) - -import Distribution.Package (packageId) -import Distribution.PackageDescription - ( BuildType(..), Executable(..), buildable ) -import Distribution.PackageDescription.Parsec ( readGenericPackageDescription ) - -import Distribution.PackageDescription.PrettyPrint - ( writeGenericPackageDescription ) -import qualified Distribution.Simple as Simple -import qualified Distribution.Make as Make -import qualified Distribution.Types.UnqualComponentName as Make -import Distribution.Simple.Build - ( startInterpreter ) -import Distribution.Simple.Command - ( CommandParse(..), CommandUI(..), Command, CommandSpec(..) - , CommandType(..), commandsRun, commandAddAction, hiddenCommand - , commandFromSpec, commandShowOptions ) -import Distribution.Simple.Compiler (Compiler(..), PackageDBStack) -import Distribution.Simple.Configure - ( configCompilerAuxEx, ConfigStateFileError(..) - , getPersistBuildConfig, interpretPackageDbFlags - , tryGetPersistBuildConfig ) -import qualified Distribution.Simple.LocalBuildInfo as LBI -import Distribution.Simple.Program (defaultProgramDb - ,configureAllKnownPrograms - ,simpleProgramInvocation - ,getProgramInvocationOutput) -import Distribution.Simple.Program.Db (reconfigurePrograms) -import qualified Distribution.Simple.Setup as Cabal -import Distribution.Simple.Utils - ( cabalVersion, die', dieNoVerbosity, info, notice, topHandler - , findPackageDesc, tryFindPackageDesc ) -import Distribution.Text - ( display ) -import Distribution.Verbosity as Verbosity - ( Verbosity, normal ) -import Distribution.Version - ( Version, mkVersion, orLaterVersion ) -import qualified Paths_cabal_install (version) - -import System.Environment (getArgs, getProgName) -import System.Exit (exitFailure, exitSuccess) -import System.FilePath ( dropExtension, splitExtension - , takeExtension, (), (<.>)) -import System.IO ( BufferMode(LineBuffering), hSetBuffering -#ifdef mingw32_HOST_OS - , stderr -#endif - , stdout ) -import System.Directory (doesFileExist, getCurrentDirectory) -import Data.Monoid (Any(..)) -import Control.Exception (SomeException(..), try) -import Control.Monad (mapM_) - -#ifdef MONOLITHIC -import qualified UnitTests -import qualified MemoryUsageTests -import qualified SolverQuickCheck -import qualified IntegrationTests2 -import qualified System.Environment as Monolithic -#endif - --- | Entry point --- -main :: IO () -#ifdef MONOLITHIC -main = do - mb_exec <- Monolithic.lookupEnv "CABAL_INSTALL_MONOLITHIC_MODE" - case mb_exec of - Just "UnitTests" -> UnitTests.main - Just "MemoryUsageTests" -> MemoryUsageTests.main - Just "SolverQuickCheck" -> SolverQuickCheck.main - Just "IntegrationTests2" -> IntegrationTests2.main - Just s -> error $ "Unrecognized mode '" ++ show s ++ "' in CABAL_INSTALL_MONOLITHIC_MODE" - Nothing -> main' -#else -main = main' -#endif - -main' :: IO () -main' = do - -- Enable line buffering so that we can get fast feedback even when piped. - -- This is especially important for CI and build systems. - hSetBuffering stdout LineBuffering - -- The default locale encoding for Windows CLI is not UTF-8 and printing - -- Unicode characters to it will fail unless we relax the handling of encoding - -- errors when writing to stderr and stdout. -#ifdef mingw32_HOST_OS - relaxEncodingErrors stdout - relaxEncodingErrors stderr -#endif - getArgs >>= mainWorker - -mainWorker :: [String] -> IO () -mainWorker args = do - validScript <- - if null args - then return False - else doesFileExist (last args) - - topHandler $ - case commandsRun (globalCommand commands) commands args of - CommandHelp help -> printGlobalHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs -> printErrors errs - CommandReadyToGo (globalFlags, commandParse) -> - case commandParse of - _ | fromFlagOrDefault False (globalVersion globalFlags) - -> printVersion - | fromFlagOrDefault False (globalNumericVersion globalFlags) - -> printNumericVersion - CommandHelp help -> printCommandHelp help - CommandList opts -> printOptionsList opts - CommandErrors errs - | validScript -> CmdRun.handleShebang (last args) - | otherwise -> printErrors errs - CommandReadyToGo action -> do - globalFlags' <- updateSandboxConfigFileFlag globalFlags - action globalFlags' - - where - printCommandHelp help = do - pname <- getProgName - putStr (help pname) - printGlobalHelp help = do - pname <- getProgName - configFile <- defaultConfigFile - putStr (help pname) - putStr $ "\nYou can edit the cabal configuration file to set defaults:\n" - ++ " " ++ configFile ++ "\n" - exists <- doesFileExist configFile - unless exists $ - putStrLn $ "This file will be generated with sensible " - ++ "defaults if you run 'cabal update'." - printOptionsList = putStr . unlines - printErrors errs = dieNoVerbosity $ intercalate "\n" errs - printNumericVersion = putStrLn $ display Paths_cabal_install.version - printVersion = putStrLn $ "cabal-install version " - ++ display Paths_cabal_install.version - ++ "\ncompiled using version " - ++ display cabalVersion - ++ " of the Cabal library " - - commands = map commandFromSpec commandSpecs - commandSpecs = - [ regularCmd listCommand listAction - , regularCmd infoCommand infoAction - , regularCmd fetchCommand fetchAction - , regularCmd getCommand getAction - , hiddenCmd unpackCommand unpackAction - , regularCmd checkCommand checkAction - , regularCmd uploadCommand uploadAction - , regularCmd reportCommand reportAction - , regularCmd initCommand initAction - , regularCmd userConfigCommand userConfigAction - , regularCmd genBoundsCommand genBoundsAction - , regularCmd outdatedCommand outdatedAction - , wrapperCmd hscolourCommand hscolourVerbosity hscolourDistPref - , hiddenCmd uninstallCommand uninstallAction - , hiddenCmd formatCommand formatAction - , hiddenCmd upgradeCommand upgradeAction - , hiddenCmd win32SelfUpgradeCommand win32SelfUpgradeAction - , hiddenCmd actAsSetupCommand actAsSetupAction - , hiddenCmd manpageCommand (manpageAction commandSpecs) - - ] ++ concat - [ newCmd CmdConfigure.configureCommand CmdConfigure.configureAction - , newCmd CmdUpdate.updateCommand CmdUpdate.updateAction - , newCmd CmdBuild.buildCommand CmdBuild.buildAction - , newCmd CmdRepl.replCommand CmdRepl.replAction - , newCmd CmdFreeze.freezeCommand CmdFreeze.freezeAction - , newCmd CmdHaddock.haddockCommand CmdHaddock.haddockAction - , newCmd CmdInstall.installCommand CmdInstall.installAction - , newCmd CmdRun.runCommand CmdRun.runAction - , newCmd CmdTest.testCommand CmdTest.testAction - , newCmd CmdBench.benchCommand CmdBench.benchAction - , newCmd CmdExec.execCommand CmdExec.execAction - , newCmd CmdClean.cleanCommand CmdClean.cleanAction - , newCmd CmdSdist.sdistCommand CmdSdist.sdistAction - - , legacyCmd configureExCommand configureAction - , legacyCmd updateCommand updateAction - , legacyCmd buildCommand buildAction - , legacyCmd replCommand replAction - , legacyCmd freezeCommand freezeAction - , legacyCmd haddockCommand haddockAction - , legacyCmd installCommand installAction - , legacyCmd runCommand runAction - , legacyCmd testCommand testAction - , legacyCmd benchmarkCommand benchmarkAction - , legacyCmd execCommand execAction - , legacyCmd cleanCommand cleanAction - , legacyCmd sdistCommand sdistAction - , legacyCmd doctestCommand doctestAction - , legacyWrapperCmd copyCommand copyVerbosity copyDistPref - , legacyWrapperCmd registerCommand regVerbosity regDistPref - , legacyCmd reconfigureCommand reconfigureAction - , legacyCmd sandboxCommand sandboxAction - ] - -type Action = GlobalFlags -> IO () - --- Duplicated in Distribution.Client.CmdLegacy. Any changes must be --- reflected there, as well. -regularCmd :: CommandUI flags -> (flags -> [String] -> action) - -> CommandSpec action -regularCmd ui action = - CommandSpec ui ((flip commandAddAction) action) NormalCommand - -hiddenCmd :: CommandUI flags -> (flags -> [String] -> action) - -> CommandSpec action -hiddenCmd ui action = - CommandSpec ui (\ui' -> hiddenCommand (commandAddAction ui' action)) - HiddenCommand - -wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Flag Verbosity) - -> (flags -> Flag String) -> CommandSpec Action -wrapperCmd ui verbosity distPref = - CommandSpec ui (\ui' -> wrapperAction ui' verbosity distPref) NormalCommand - -wrapperAction :: Monoid flags - => CommandUI flags - -> (flags -> Flag Verbosity) - -> (flags -> Flag String) - -> Command Action -wrapperAction command verbosityFlag distPrefFlag = - commandAddAction command - { commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do - let verbosity = fromFlagOrDefault normal (verbosityFlag flags) - load <- try (loadConfigOrSandboxConfig verbosity globalFlags) - let config = either (\(SomeException _) -> mempty) snd load - distPref <- findSavedDistPref config (distPrefFlag flags) - let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } - setupWrapper verbosity setupScriptOptions Nothing - command (const flags) (const extraArgs) - -configureAction :: (ConfigFlags, ConfigExFlags) - -> [String] -> Action -configureAction (configFlags, configExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - (useSandbox, config) <- updateInstallDirs (configUserInstall configFlags) - <$> loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (configDistPref configFlags) - nixInstantiate verbosity distPref True globalFlags config - nixShell verbosity distPref globalFlags config $ do - let configFlags' = savedConfigureFlags config `mappend` configFlags - configExFlags' = savedConfigureExFlags config `mappend` configExFlags - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, progdb) <- configCompilerAuxEx configFlags' - - -- If we're working inside a sandbox and the user has set the -w option, we - -- may need to create a sandbox-local package DB for this compiler and add a - -- timestamp record for this compiler to the timestamp file. - let configFlags'' = case useSandbox of - NoSandbox -> configFlags' - (UseSandbox sandboxDir) -> setPackageDB sandboxDir - comp platform configFlags' - - writeConfigFlags verbosity distPref (configFlags'', configExFlags') - - -- What package database(s) to use - let packageDBs :: PackageDBStack - packageDBs - = interpretPackageDbFlags - (fromFlag (configUserInstall configFlags'')) - (configPackageDBs configFlags'') - - whenUsingSandbox useSandbox $ \sandboxDir -> do - initPackageDBIfNeeded verbosity configFlags'' comp progdb - -- NOTE: We do not write the new sandbox package DB location to - -- 'cabal.sandbox.config' here because 'configure -w' must not affect - -- subsequent 'install' (for UI compatibility with non-sandboxed mode). - - indexFile <- tryGetIndexFilePath verbosity config - maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile - (compilerId comp) platform - - maybeWithSandboxDirOnSearchPath useSandbox $ - withRepoContext verbosity globalFlags' $ \repoContext -> - configure verbosity packageDBs repoContext - comp platform progdb configFlags'' configExFlags' extraArgs - -reconfigureAction :: (ConfigFlags, ConfigExFlags) - -> [String] -> Action -reconfigureAction flags@(configFlags, _) _ globalFlags = do - let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) - (useSandbox, config) <- updateInstallDirs (configUserInstall configFlags) - <$> loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (configDistPref configFlags) - let checkFlags = Check $ \_ saved -> do - let flags' = saved <> flags - unless (saved == flags') $ info verbosity message - pure (Any True, flags') - where - -- This message is correct, but not very specific: it will list all - -- of the new flags, even if some have not actually changed. The - -- *minimal* set of changes is more difficult to determine. - message = - "flags changed: " - ++ unwords (commandShowOptions configureExCommand flags) - nixInstantiate verbosity distPref True globalFlags config - _ <- - reconfigure configureAction - verbosity distPref useSandbox DontSkipAddSourceDepsCheck NoFlag - checkFlags [] globalFlags config - pure () - -buildAction :: (BuildFlags, BuildExFlags) -> [String] -> Action -buildAction (buildFlags, buildExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) - noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (buildDistPref buildFlags) - -- Calls 'configureAction' to do the real work, so nothing special has to be - -- done to support sandboxes. - config' <- - reconfigure configureAction - verbosity distPref useSandbox noAddSource (buildNumJobs buildFlags) - mempty [] globalFlags config - nixShell verbosity distPref globalFlags config $ do - maybeWithSandboxDirOnSearchPath useSandbox $ - build verbosity config' distPref buildFlags extraArgs - - --- | Actually do the work of building the package. This is separate from --- 'buildAction' so that 'testAction' and 'benchmarkAction' do not invoke --- 'reconfigure' twice. -build :: Verbosity -> SavedConfig -> FilePath -> BuildFlags -> [String] -> IO () -build verbosity config distPref buildFlags extraArgs = - setupWrapper verbosity setupOptions Nothing - (Cabal.buildCommand progDb) mkBuildFlags (const extraArgs) - where - progDb = defaultProgramDb - setupOptions = defaultSetupScriptOptions { useDistPref = distPref } - - mkBuildFlags version = filterBuildFlags version config buildFlags' - buildFlags' = buildFlags - { buildVerbosity = toFlag verbosity - , buildDistPref = toFlag distPref - } - --- | Make sure that we don't pass new flags to setup scripts compiled against --- old versions of Cabal. -filterBuildFlags :: Version -> SavedConfig -> BuildFlags -> BuildFlags -filterBuildFlags version config buildFlags - | version >= mkVersion [1,19,1] = buildFlags_latest - -- Cabal < 1.19.1 doesn't support 'build -j'. - | otherwise = buildFlags_pre_1_19_1 - where - buildFlags_pre_1_19_1 = buildFlags { - buildNumJobs = NoFlag - } - buildFlags_latest = buildFlags { - -- Take the 'jobs' setting '~/.cabal/config' into account. - buildNumJobs = Flag . Just . determineNumJobs $ - (numJobsConfigFlag `mappend` numJobsCmdLineFlag) - } - numJobsConfigFlag = installNumJobs . savedInstallFlags $ config - numJobsCmdLineFlag = buildNumJobs buildFlags - - -replAction :: (ReplFlags, BuildExFlags) -> [String] -> Action -replAction (replFlags, buildExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (replVerbosity replFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (replDistPref replFlags) - cwd <- getCurrentDirectory - pkgDesc <- findPackageDesc cwd - let - -- There is a .cabal file in the current directory: start a REPL and load - -- the project's modules. - onPkgDesc = do - let noAddSource = case replReload replFlags of - Flag True -> SkipAddSourceDepsCheck - _ -> fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - - -- Calls 'configureAction' to do the real work, so nothing special has to - -- be done to support sandboxes. - _ <- - reconfigure configureAction - verbosity distPref useSandbox noAddSource NoFlag - mempty [] globalFlags config - let progDb = defaultProgramDb - setupOptions = defaultSetupScriptOptions - { useCabalVersion = orLaterVersion $ mkVersion [1,18,0] - , useDistPref = distPref - } - replFlags' = replFlags - { replVerbosity = toFlag verbosity - , replDistPref = toFlag distPref - } - - nixShell verbosity distPref globalFlags config $ do - maybeWithSandboxDirOnSearchPath useSandbox $ - setupWrapper verbosity setupOptions Nothing - (Cabal.replCommand progDb) (const replFlags') (const extraArgs) - - -- No .cabal file in the current directory: just start the REPL (possibly - -- using the sandbox package DB). - onNoPkgDesc = do - let configFlags = savedConfigureFlags config - (comp, platform, programDb) <- configCompilerAux' configFlags - programDb' <- reconfigurePrograms verbosity - (replProgramPaths replFlags) - (replProgramArgs replFlags) - programDb - nixShell verbosity distPref globalFlags config $ do - startInterpreter verbosity programDb' comp platform - (configPackageDB' configFlags) - - either (const onNoPkgDesc) (const onPkgDesc) pkgDesc - -installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> Action -installAction (configFlags, _, installFlags, _) _ globalFlags - | fromFlagOrDefault False (installOnly installFlags) = do - let verb = fromFlagOrDefault normal (configVerbosity configFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verb globalFlags - dist <- findSavedDistPref config (configDistPref configFlags) - let setupOpts = defaultSetupScriptOptions { useDistPref = dist } - nixShellIfSandboxed verb dist globalFlags config useSandbox $ - setupWrapper - verb setupOpts Nothing - installCommand (const mempty) (const []) - -installAction - (configFlags, configExFlags, installFlags, haddockFlags) - extraArgs globalFlags = do - let verb = fromFlagOrDefault normal (configVerbosity configFlags) - (useSandbox, config) <- updateInstallDirs (configUserInstall configFlags) - <$> loadConfigOrSandboxConfig verb globalFlags - - let sandboxDist = - case useSandbox of - NoSandbox -> NoFlag - UseSandbox sandboxDir -> Flag $ sandboxBuildDir sandboxDir - dist <- findSavedDistPref config - (configDistPref configFlags `mappend` sandboxDist) - - nixShellIfSandboxed verb dist globalFlags config useSandbox $ do - targets <- readUserTargets verb extraArgs - - -- TODO: It'd be nice if 'cabal install' picked up the '-w' flag passed to - -- 'configure' when run inside a sandbox. Right now, running - -- - -- $ cabal sandbox init && cabal configure -w /path/to/ghc - -- && cabal build && cabal install - -- - -- performs the compilation twice unless you also pass -w to 'install'. - -- However, this is the same behaviour that 'cabal install' has in the normal - -- mode of operation, so we stick to it for consistency. - - let configFlags' = maybeForceTests installFlags' $ - savedConfigureFlags config `mappend` - configFlags { configDistPref = toFlag dist } - configExFlags' = defaultConfigExFlags `mappend` - savedConfigureExFlags config `mappend` configExFlags - installFlags' = defaultInstallFlags `mappend` - savedInstallFlags config `mappend` installFlags - haddockFlags' = defaultHaddockFlags `mappend` - savedHaddockFlags config `mappend` - haddockFlags { haddockDistPref = toFlag dist } - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, progdb) <- configCompilerAux' configFlags' - -- TODO: Redesign ProgramDB API to prevent such problems as #2241 in the - -- future. - progdb' <- configureAllKnownPrograms verb progdb - - -- If we're working inside a sandbox and the user has set the -w option, we - -- may need to create a sandbox-local package DB for this compiler and add a - -- timestamp record for this compiler to the timestamp file. - configFlags'' <- case useSandbox of - NoSandbox -> configAbsolutePaths $ configFlags' - (UseSandbox sandboxDir) -> return $ setPackageDB sandboxDir comp platform - configFlags' - - whenUsingSandbox useSandbox $ \sandboxDir -> do - initPackageDBIfNeeded verb configFlags'' comp progdb' - - indexFile <- tryGetIndexFilePath verb config - maybeAddCompilerTimestampRecord verb sandboxDir indexFile - (compilerId comp) platform - - -- TODO: Passing 'SandboxPackageInfo' to install unconditionally here means - -- that 'cabal install some-package' inside a sandbox will sometimes reinstall - -- modified add-source deps, even if they are not among the dependencies of - -- 'some-package'. This can also prevent packages that depend on older - -- versions of add-source'd packages from building (see #1362). - maybeWithSandboxPackageInfo verb configFlags'' globalFlags' - comp platform progdb useSandbox $ \mSandboxPkgInfo -> - maybeWithSandboxDirOnSearchPath useSandbox $ - withRepoContext verb globalFlags' $ \repoContext -> - install verb - (configPackageDB' configFlags'') - repoContext - comp platform progdb' - useSandbox mSandboxPkgInfo - globalFlags' configFlags'' configExFlags' - installFlags' haddockFlags' - targets - - where - -- '--run-tests' implies '--enable-tests'. - maybeForceTests installFlags' configFlags' = - if fromFlagOrDefault False (installRunTests installFlags') - then configFlags' { configTests = toFlag True } - else configFlags' - -testAction :: (TestFlags, BuildFlags, BuildExFlags) -> [String] -> GlobalFlags - -> IO () -testAction (testFlags, buildFlags, buildExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (testVerbosity testFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (testDistPref testFlags) - let noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - buildFlags' = buildFlags - { buildVerbosity = testVerbosity testFlags } - checkFlags = Check $ \_ flags@(configFlags, configExFlags) -> - if fromFlagOrDefault False (configTests configFlags) - then pure (mempty, flags) - else do - info verbosity "reconfiguring to enable tests" - let flags' = ( configFlags { configTests = toFlag True } - , configExFlags - ) - pure (Any True, flags') - - -- reconfigure also checks if we're in a sandbox and reinstalls add-source - -- deps if needed. - _ <- - reconfigure configureAction - verbosity distPref useSandbox noAddSource (buildNumJobs buildFlags') - checkFlags [] globalFlags config - nixShell verbosity distPref globalFlags config $ do - let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } - testFlags' = testFlags { testDistPref = toFlag distPref } - - -- The package was just configured, so the LBI must be available. - names <- componentNamesFromLBI verbosity distPref "test suites" - (\c -> case c of { LBI.CTest{} -> True; _ -> False }) - let extraArgs' - | null extraArgs = case names of - ComponentNamesUnknown -> [] - ComponentNames names' -> [ Make.unUnqualComponentName name - | LBI.CTestName name <- names' ] - | otherwise = extraArgs - - maybeWithSandboxDirOnSearchPath useSandbox $ - build verbosity config distPref buildFlags' extraArgs' - - maybeWithSandboxDirOnSearchPath useSandbox $ - setupWrapper verbosity setupOptions Nothing - Cabal.testCommand (const testFlags') (const extraArgs') - -data ComponentNames = ComponentNamesUnknown - | ComponentNames [LBI.ComponentName] - --- | Return the names of all buildable components matching a given predicate. -componentNamesFromLBI :: Verbosity -> FilePath -> String - -> (LBI.Component -> Bool) - -> IO ComponentNames -componentNamesFromLBI verbosity distPref targetsDescr compPred = do - eLBI <- tryGetPersistBuildConfig distPref - case eLBI of - Left err -> case err of - -- Note: the build config could have been generated by a custom setup - -- script built against a different Cabal version, so it's crucial that - -- we ignore the bad version error here. - ConfigStateFileBadVersion _ _ _ -> return ComponentNamesUnknown - _ -> die' verbosity (show err) - Right lbi -> do - let pkgDescr = LBI.localPkgDescr lbi - names = map LBI.componentName - . filter (buildable . LBI.componentBuildInfo) - . filter compPred $ - LBI.pkgComponents pkgDescr - if null names - then do notice verbosity $ "Package has no buildable " - ++ targetsDescr ++ "." - exitSuccess -- See #3215. - - else return $! (ComponentNames names) - -benchmarkAction :: (BenchmarkFlags, BuildFlags, BuildExFlags) - -> [String] -> GlobalFlags - -> IO () -benchmarkAction - (benchmarkFlags, buildFlags, buildExFlags) - extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal - (benchmarkVerbosity benchmarkFlags) - - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (benchmarkDistPref benchmarkFlags) - let buildFlags' = buildFlags - { buildVerbosity = benchmarkVerbosity benchmarkFlags } - noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - - let checkFlags = Check $ \_ flags@(configFlags, configExFlags) -> - if fromFlagOrDefault False (configBenchmarks configFlags) - then pure (mempty, flags) - else do - info verbosity "reconfiguring to enable benchmarks" - let flags' = ( configFlags { configBenchmarks = toFlag True } - , configExFlags - ) - pure (Any True, flags') - - - -- reconfigure also checks if we're in a sandbox and reinstalls add-source - -- deps if needed. - config' <- - reconfigure configureAction - verbosity distPref useSandbox noAddSource (buildNumJobs buildFlags') - checkFlags [] globalFlags config - nixShell verbosity distPref globalFlags config $ do - let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } - benchmarkFlags'= benchmarkFlags { benchmarkDistPref = toFlag distPref } - - -- The package was just configured, so the LBI must be available. - names <- componentNamesFromLBI verbosity distPref "benchmarks" - (\c -> case c of { LBI.CBench{} -> True; _ -> False; }) - let extraArgs' - | null extraArgs = case names of - ComponentNamesUnknown -> [] - ComponentNames names' -> [ Make.unUnqualComponentName name - | LBI.CBenchName name <- names'] - | otherwise = extraArgs - - maybeWithSandboxDirOnSearchPath useSandbox $ - build verbosity config' distPref buildFlags' extraArgs' - - maybeWithSandboxDirOnSearchPath useSandbox $ - setupWrapper verbosity setupOptions Nothing - Cabal.benchmarkCommand (const benchmarkFlags') (const extraArgs') - -haddockAction :: HaddockFlags -> [String] -> Action -haddockAction haddockFlags extraArgs globalFlags = do - let verbosity = fromFlag (haddockVerbosity haddockFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (haddockDistPref haddockFlags) - config' <- - reconfigure configureAction - verbosity distPref useSandbox DontSkipAddSourceDepsCheck NoFlag - mempty [] globalFlags config - nixShell verbosity distPref globalFlags config $ do - let haddockFlags' = defaultHaddockFlags `mappend` - savedHaddockFlags config' `mappend` - haddockFlags { haddockDistPref = toFlag distPref } - setupScriptOptions = defaultSetupScriptOptions - { useDistPref = distPref } - setupWrapper verbosity setupScriptOptions Nothing - haddockCommand (const haddockFlags') (const extraArgs) - when (haddockForHackage haddockFlags == Flag ForHackage) $ do - pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) - let dest = distPref name <.> "tar.gz" - name = display (packageId pkg) ++ "-docs" - docDir = distPref "doc" "html" - createTarGzFile dest docDir name - notice verbosity $ "Documentation tarball created: " ++ dest - -doctestAction :: DoctestFlags -> [String] -> Action -doctestAction doctestFlags extraArgs _globalFlags = do - let verbosity = fromFlag (doctestVerbosity doctestFlags) - - setupWrapper verbosity defaultSetupScriptOptions Nothing - doctestCommand (const doctestFlags) (const extraArgs) - -cleanAction :: CleanFlags -> [String] -> Action -cleanAction cleanFlags extraArgs globalFlags = do - load <- try (loadConfigOrSandboxConfig verbosity globalFlags) - let config = either (\(SomeException _) -> mempty) snd load - distPref <- findSavedDistPref config (cleanDistPref cleanFlags) - let setupScriptOptions = defaultSetupScriptOptions - { useDistPref = distPref - , useWin32CleanHack = True - } - cleanFlags' = cleanFlags { cleanDistPref = toFlag distPref } - setupWrapper verbosity setupScriptOptions Nothing - cleanCommand (const cleanFlags') (const extraArgs) - where - verbosity = fromFlagOrDefault normal (cleanVerbosity cleanFlags) - -listAction :: ListFlags -> [String] -> Action -listAction listFlags extraArgs globalFlags = do - let verbosity = fromFlag (listVerbosity listFlags) - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let configFlags' = savedConfigureFlags config - configFlags = configFlags' { - configPackageDBs = configPackageDBs configFlags' - `mappend` listPackageDBs listFlags - } - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, _, progdb) <- configCompilerAux' configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - List.list verbosity - (configPackageDB' configFlags) - repoContext - comp - progdb - listFlags - extraArgs - -infoAction :: InfoFlags -> [String] -> Action -infoAction infoFlags extraArgs globalFlags = do - let verbosity = fromFlag (infoVerbosity infoFlags) - targets <- readUserTargets verbosity extraArgs - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let configFlags' = savedConfigureFlags config - configFlags = configFlags' { - configPackageDBs = configPackageDBs configFlags' - `mappend` infoPackageDBs infoFlags - } - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, _, progdb) <- configCompilerAuxEx configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - List.info verbosity - (configPackageDB' configFlags) - repoContext - comp - progdb - globalFlags' - infoFlags - targets - -updateAction :: UpdateFlags -> [String] -> Action -updateAction updateFlags extraArgs globalFlags = do - let verbosity = fromFlag (updateVerbosity updateFlags) - unless (null extraArgs) $ - die' verbosity $ "'update' doesn't take any extra arguments: " ++ unwords extraArgs - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let globalFlags' = savedGlobalFlags config `mappend` globalFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - update verbosity updateFlags repoContext - -upgradeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) - -> [String] -> Action -upgradeAction (configFlags, _, _, _) _ _ = die' verbosity $ - "Use the 'cabal install' command instead of 'cabal upgrade'.\n" - ++ "You can install the latest version of a package using 'cabal install'. " - ++ "The 'cabal upgrade' command has been removed because people found it " - ++ "confusing and it often led to broken packages.\n" - ++ "If you want the old upgrade behaviour then use the install command " - ++ "with the --upgrade-dependencies flag (but check first with --dry-run " - ++ "to see what would happen). This will try to pick the latest versions " - ++ "of all dependencies, rather than the usual behaviour of trying to pick " - ++ "installed versions of all dependencies. If you do use " - ++ "--upgrade-dependencies, it is recommended that you do not upgrade core " - ++ "packages (e.g. by using appropriate --constraint= flags)." - where - verbosity = fromFlag (configVerbosity configFlags) - -fetchAction :: FetchFlags -> [String] -> Action -fetchAction fetchFlags extraArgs globalFlags = do - let verbosity = fromFlag (fetchVerbosity fetchFlags) - targets <- readUserTargets verbosity extraArgs - config <- loadConfig verbosity (globalConfigFile globalFlags) - let configFlags = savedConfigureFlags config - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, progdb) <- configCompilerAux' configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - fetch verbosity - (configPackageDB' configFlags) - repoContext - comp platform progdb globalFlags' fetchFlags - targets - -freezeAction :: FreezeFlags -> [String] -> Action -freezeAction freezeFlags _extraArgs globalFlags = do - let verbosity = fromFlag (freezeVerbosity freezeFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config NoFlag - nixShell verbosity distPref globalFlags config $ do - let configFlags = savedConfigureFlags config - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, progdb) <- configCompilerAux' configFlags - - maybeWithSandboxPackageInfo - verbosity configFlags globalFlags' - comp platform progdb useSandbox $ \mSandboxPkgInfo -> - maybeWithSandboxDirOnSearchPath useSandbox $ - withRepoContext verbosity globalFlags' $ \repoContext -> - freeze verbosity - (configPackageDB' configFlags) - repoContext - comp platform progdb - mSandboxPkgInfo - globalFlags' freezeFlags - -genBoundsAction :: FreezeFlags -> [String] -> GlobalFlags -> IO () -genBoundsAction freezeFlags _extraArgs globalFlags = do - let verbosity = fromFlag (freezeVerbosity freezeFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config NoFlag - nixShell verbosity distPref globalFlags config $ do - let configFlags = savedConfigureFlags config - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, progdb) <- configCompilerAux' configFlags - - maybeWithSandboxPackageInfo - verbosity configFlags globalFlags' - comp platform progdb useSandbox $ \mSandboxPkgInfo -> - maybeWithSandboxDirOnSearchPath useSandbox $ - withRepoContext verbosity globalFlags' $ \repoContext -> - genBounds verbosity - (configPackageDB' configFlags) - repoContext - comp platform progdb - mSandboxPkgInfo - globalFlags' freezeFlags - -outdatedAction :: OutdatedFlags -> [String] -> GlobalFlags -> IO () -outdatedAction outdatedFlags _extraArgs globalFlags = do - let verbosity = fromFlag (outdatedVerbosity outdatedFlags) - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - let configFlags = savedConfigureFlags config - globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, platform, _progdb) <- configCompilerAux' configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - outdated verbosity outdatedFlags repoContext - comp platform - -uploadAction :: UploadFlags -> [String] -> Action -uploadAction uploadFlags extraArgs globalFlags = do - config <- loadConfig verbosity (globalConfigFile globalFlags) - let uploadFlags' = savedUploadFlags config `mappend` uploadFlags - globalFlags' = savedGlobalFlags config `mappend` globalFlags - tarfiles = extraArgs - when (null tarfiles && not (fromFlag (uploadDoc uploadFlags'))) $ - die' verbosity "the 'upload' command expects at least one .tar.gz archive." - checkTarFiles extraArgs - maybe_password <- - case uploadPasswordCmd uploadFlags' - of Flag (xs:xss) -> Just . Password <$> - getProgramInvocationOutput verbosity - (simpleProgramInvocation xs xss) - _ -> pure $ flagToMaybe $ uploadPassword uploadFlags' - withRepoContext verbosity globalFlags' $ \repoContext -> do - if fromFlag (uploadDoc uploadFlags') - then do - when (length tarfiles > 1) $ - die' verbosity $ "the 'upload' command can only upload documentation " - ++ "for one package at a time." - tarfile <- maybe (generateDocTarball config) return $ listToMaybe tarfiles - Upload.uploadDoc verbosity - repoContext - (flagToMaybe $ uploadUsername uploadFlags') - maybe_password - (fromFlag (uploadCandidate uploadFlags')) - tarfile - else do - Upload.upload verbosity - repoContext - (flagToMaybe $ uploadUsername uploadFlags') - maybe_password - (fromFlag (uploadCandidate uploadFlags')) - tarfiles - where - verbosity = fromFlag (uploadVerbosity uploadFlags) - checkTarFiles tarfiles - | not (null otherFiles) - = die' verbosity $ "the 'upload' command expects only .tar.gz archives: " - ++ intercalate ", " otherFiles - | otherwise = sequence_ - [ do exists <- doesFileExist tarfile - unless exists $ die' verbosity $ "file not found: " ++ tarfile - | tarfile <- tarfiles ] - - where otherFiles = filter (not . isTarGzFile) tarfiles - isTarGzFile file = case splitExtension file of - (file', ".gz") -> takeExtension file' == ".tar" - _ -> False - generateDocTarball config = do - notice verbosity $ - "No documentation tarball specified. " - ++ "Building a documentation tarball with default settings...\n" - ++ "If you need to customise Haddock options, " - ++ "run 'haddock --for-hackage' first " - ++ "to generate a documentation tarball." - haddockAction (defaultHaddockFlags { haddockForHackage = Flag ForHackage }) - [] globalFlags - distPref <- findSavedDistPref config NoFlag - pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) - return $ distPref display (packageId pkg) ++ "-docs" <.> "tar.gz" - -checkAction :: Flag Verbosity -> [String] -> Action -checkAction verbosityFlag extraArgs _globalFlags = do - let verbosity = fromFlag verbosityFlag - unless (null extraArgs) $ - die' verbosity $ "'check' doesn't take any extra arguments: " ++ unwords extraArgs - allOk <- Check.check (fromFlag verbosityFlag) - unless allOk exitFailure - -formatAction :: Flag Verbosity -> [String] -> Action -formatAction verbosityFlag extraArgs _globalFlags = do - let verbosity = fromFlag verbosityFlag - path <- case extraArgs of - [] -> do cwd <- getCurrentDirectory - tryFindPackageDesc cwd - (p:_) -> return p - pkgDesc <- readGenericPackageDescription verbosity path - -- Uses 'writeFileAtomic' under the hood. - writeGenericPackageDescription path pkgDesc - -uninstallAction :: Flag Verbosity -> [String] -> Action -uninstallAction verbosityFlag extraArgs _globalFlags = do - let verbosity = fromFlag verbosityFlag - package = case extraArgs of - p:_ -> p - _ -> "PACKAGE_NAME" - die' verbosity $ "This version of 'cabal-install' does not support the 'uninstall' " - ++ "operation. " - ++ "It will likely be implemented at some point in the future; " - ++ "in the meantime you're advised to use either 'ghc-pkg unregister " - ++ package ++ "' or 'cabal sandbox hc-pkg -- unregister " ++ package ++ "'." - - -sdistAction :: (SDistFlags, SDistExFlags) -> [String] -> Action -sdistAction (sdistFlags, sdistExFlags) extraArgs globalFlags = do - let verbosity = fromFlag (sDistVerbosity sdistFlags) - unless (null extraArgs) $ - die' verbosity $ "'sdist' doesn't take any extra arguments: " ++ unwords extraArgs - load <- try (loadConfigOrSandboxConfig verbosity globalFlags) - let config = either (\(SomeException _) -> mempty) snd load - distPref <- findSavedDistPref config (sDistDistPref sdistFlags) - let sdistFlags' = sdistFlags { sDistDistPref = toFlag distPref } - sdist sdistFlags' sdistExFlags - -reportAction :: ReportFlags -> [String] -> Action -reportAction reportFlags extraArgs globalFlags = do - let verbosity = fromFlag (reportVerbosity reportFlags) - unless (null extraArgs) $ - die' verbosity $ "'report' doesn't take any extra arguments: " ++ unwords extraArgs - config <- loadConfig verbosity (globalConfigFile globalFlags) - let globalFlags' = savedGlobalFlags config `mappend` globalFlags - reportFlags' = savedReportFlags config `mappend` reportFlags - - withRepoContext verbosity globalFlags' $ \repoContext -> - Upload.report verbosity repoContext - (flagToMaybe $ reportUsername reportFlags') - (flagToMaybe $ reportPassword reportFlags') - -runAction :: (BuildFlags, BuildExFlags) -> [String] -> Action -runAction (buildFlags, buildExFlags) extraArgs globalFlags = do - let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (buildDistPref buildFlags) - let noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck - (buildOnly buildExFlags) - -- reconfigure also checks if we're in a sandbox and reinstalls add-source - -- deps if needed. - config' <- - reconfigure configureAction - verbosity distPref useSandbox noAddSource (buildNumJobs buildFlags) - mempty [] globalFlags config - nixShell verbosity distPref globalFlags config $ do - lbi <- getPersistBuildConfig distPref - (exe, exeArgs) <- splitRunArgs verbosity lbi extraArgs - - maybeWithSandboxDirOnSearchPath useSandbox $ - build verbosity config' distPref buildFlags ["exe:" ++ display (exeName exe)] - - maybeWithSandboxDirOnSearchPath useSandbox $ - run verbosity lbi exe exeArgs - -getAction :: GetFlags -> [String] -> Action -getAction getFlags extraArgs globalFlags = do - let verbosity = fromFlag (getVerbosity getFlags) - targets <- readUserTargets verbosity extraArgs - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let globalFlags' = savedGlobalFlags config `mappend` globalFlags - withRepoContext verbosity (savedGlobalFlags config) $ \repoContext -> - get verbosity - repoContext - globalFlags' - getFlags - targets - -unpackAction :: GetFlags -> [String] -> Action -unpackAction getFlags extraArgs globalFlags = do - getAction getFlags extraArgs globalFlags - -initAction :: InitFlags -> [String] -> Action -initAction initFlags extraArgs globalFlags = do - let verbosity = fromFlag (initVerbosity initFlags) - when (extraArgs /= []) $ - die' verbosity $ "'init' doesn't take any extra arguments: " ++ unwords extraArgs - (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity - (globalFlags { globalRequireSandbox = Flag False }) - let configFlags = savedConfigureFlags config - let globalFlags' = savedGlobalFlags config `mappend` globalFlags - (comp, _, progdb) <- configCompilerAux' configFlags - withRepoContext verbosity globalFlags' $ \repoContext -> - initCabal verbosity - (configPackageDB' configFlags) - repoContext - comp - progdb - initFlags - -sandboxAction :: SandboxFlags -> [String] -> Action -sandboxAction sandboxFlags extraArgs globalFlags = do - let verbosity = fromFlag (sandboxVerbosity sandboxFlags) - case extraArgs of - -- Basic sandbox commands. - ["init"] -> sandboxInit verbosity sandboxFlags globalFlags - ["delete"] -> sandboxDelete verbosity sandboxFlags globalFlags - ("add-source":extra) -> do - when (noExtraArgs extra) $ - die' verbosity "The 'sandbox add-source' command expects at least one argument" - sandboxAddSource verbosity extra sandboxFlags globalFlags - ("delete-source":extra) -> do - when (noExtraArgs extra) $ - die' verbosity ("The 'sandbox delete-source' command expects " ++ - "at least one argument") - sandboxDeleteSource verbosity extra sandboxFlags globalFlags - ["list-sources"] -> sandboxListSources verbosity sandboxFlags globalFlags - - -- More advanced commands. - ("hc-pkg":extra) -> do - when (noExtraArgs extra) $ - die' verbosity $ "The 'sandbox hc-pkg' command expects at least one argument" - sandboxHcPkg verbosity sandboxFlags globalFlags extra - ["buildopts"] -> die' verbosity "Not implemented!" - - -- Hidden commands. - ["dump-pkgenv"] -> dumpPackageEnvironment verbosity sandboxFlags globalFlags - - -- Error handling. - [] -> die' verbosity $ "Please specify a subcommand (see 'help sandbox')" - _ -> die' verbosity $ "Unknown 'sandbox' subcommand: " ++ unwords extraArgs - - where - noExtraArgs = (<1) . length - -execAction :: ExecFlags -> [String] -> Action -execAction execFlags extraArgs globalFlags = do - let verbosity = fromFlag (execVerbosity execFlags) - (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags - distPref <- findSavedDistPref config (execDistPref execFlags) - let configFlags = savedConfigureFlags config - configFlags' = configFlags { configDistPref = Flag distPref } - (comp, platform, progdb) <- getPersistOrConfigCompiler configFlags' - exec verbosity useSandbox comp platform progdb extraArgs - -userConfigAction :: UserConfigFlags -> [String] -> Action -userConfigAction ucflags extraArgs globalFlags = do - let verbosity = fromFlag (userConfigVerbosity ucflags) - force = fromFlag (userConfigForce ucflags) - extraLines = fromFlag (userConfigAppendLines ucflags) - case extraArgs of - ("init":_) -> do - path <- configFile - fileExists <- doesFileExist path - if (not fileExists || (fileExists && force)) - then void $ createDefaultConfigFile verbosity extraLines path - else die' verbosity $ path ++ " already exists." - ("diff":_) -> mapM_ putStrLn =<< userConfigDiff verbosity globalFlags extraLines - ("update":_) -> userConfigUpdate verbosity globalFlags extraLines - -- Error handling. - [] -> die' verbosity $ "Please specify a subcommand (see 'help user-config')" - _ -> die' verbosity $ "Unknown 'user-config' subcommand: " ++ unwords extraArgs - where configFile = getConfigFilePath (globalConfigFile globalFlags) - --- | See 'Distribution.Client.Install.withWin32SelfUpgrade' for details. --- -win32SelfUpgradeAction :: Win32SelfUpgradeFlags -> [String] -> Action -win32SelfUpgradeAction selfUpgradeFlags (pid:path:_extraArgs) _globalFlags = do - let verbosity = fromFlag (win32SelfUpgradeVerbosity selfUpgradeFlags) - Win32SelfUpgrade.deleteOldExeFile verbosity (read pid) path -- TODO: eradicateNoParse -win32SelfUpgradeAction _ _ _ = return () - --- | Used as an entry point when cabal-install needs to invoke itself --- as a setup script. This can happen e.g. when doing parallel builds. --- -actAsSetupAction :: ActAsSetupFlags -> [String] -> Action -actAsSetupAction actAsSetupFlags args _globalFlags = - let bt = fromFlag (actAsSetupBuildType actAsSetupFlags) - in case bt of - Simple -> Simple.defaultMainArgs args - Configure -> Simple.defaultMainWithHooksArgs - Simple.autoconfUserHooks args - Make -> Make.defaultMainArgs args - Custom -> error "actAsSetupAction Custom" - -manpageAction :: [CommandSpec action] -> Flag Verbosity -> [String] -> Action -manpageAction commands flagVerbosity extraArgs _ = do - let verbosity = fromFlag flagVerbosity - unless (null extraArgs) $ - die' verbosity $ "'manpage' doesn't take any extra arguments: " ++ unwords extraArgs - pname <- getProgName - let cabalCmd = if takeExtension pname == ".exe" - then dropExtension pname - else pname - putStrLn $ manpage cabalCmd commands diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/README.md 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,155 +0,0 @@ -The cabal-install package -========================= - -See the [Cabal web site] for more information. - -The `cabal-install` package provides a command line tool named `cabal`. -It uses the [Cabal] library and provides a user interface to the -Cabal/[Hackage] build automation and package management system. It can -build and install both local and remote packages, including -dependencies. - -[Cabal web site]: http://www.haskell.org/cabal/ -[Cabal]: ../Cabal/README.md - -Installing the `cabal` command-line tool -======================================== - -The `cabal-install` package requires a number of other packages, most of -which come with a standard GHC installation. It requires the [network] -package, which is sometimes packaged separately by Linux distributions; -for example, on Debian or Ubuntu, it is located in the -"libghc6-network-dev" package. - -`cabal` requires a few other Haskell packages that are not always -installed. The exact list is specified in the [.cabal] file or in the -[bootstrap.sh] file. All these packages are available from [Hackage]. - -Note that on some Unix systems you may need to install an additional -zlib development package using your system package manager; for example, -on Debian or Ubuntu, it is located in the "zlib1g-dev" package; on -Fedora, it is located in the "zlib-devel" package. It is required -because the Haskell zlib package uses the system zlib C library and -header files. - -The `cabal-install` package is now part of the [Haskell Platform], so you -do not usually need to install it separately. However, if you are -starting from a minimal GHC installation, you need to install -`cabal-install` manually. Since it is an ordinary Cabal package, -`cabal-install` can be built the standard way; to facilitate this, the -process has been partially automated. It is described below. - -[.cabal]: cabal-install.cabal -[network]: http://hackage.haskell.org/package/network -[Haskell Platform]: http://www.haskell.org/platform/ - -Quick start on Unix-like systems --------------------------------- - -As a convenience for users on Unix-like systems, there is a -[bootstrap.sh] script that will download and install each of -`cabal-install`'s dependencies in turn. - - $ ./bootstrap.sh - -It will download and install the dependencies. The script will install the -library packages (vanilla, profiling and shared) into `$HOME/.cabal/` and the -`cabal` program into `$HOME/.cabal/bin/`. If you don't want to install profiling -and shared versions of the libraries, use - - $ EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh - -You then have the choice either to place `$HOME/.cabal/bin` on your -`$PATH` or move the `cabal` program to somewhere on your `$PATH`. Next, -you can get the latest list of packages by running: - - $ cabal update - -This will also create a default configuration file, if it does not -already exist, at `$HOME/.cabal/config`. - -By default, `cabal` will install programs to `$HOME/.cabal/bin`. If you -do not want to add this directory to your `$PATH`, you can change -the setting in the config file; for example, you could use the -following: - - symlink-bindir: $HOME/bin - - -Quick start on Windows systems ------------------------------- - -For Windows users, a precompiled program ([cabal.exe]) is provided. -Download and put it somewhere on your `%PATH%` (for example, -`C:\Program Files\Haskell\bin`.) - -Next, you can get the latest list of packages by running: - - $ cabal update - -This will also create a default configuration file (if it does not -already exist) at -`C:\Documents and Settings\%USERNAME%\Application Data\cabal\config`. - -[cabal.exe]: http://www.haskell.org/cabal/release/cabal-install-latest/ - -Using `cabal` -============= - -There are two sets of commands: commands for working with a local -project build tree and those for working with packages distributed -from [Hackage]. - -For the list of the full set of commands and flags for each command, -run: - - $ cabal help - - -Commands for developers for local build trees ---------------------------------------------- - -The commands for local project build trees are almost the same as the -`runghc Setup` command-line interface you may already be familiar with. -In particular, it has the following commands: - - * `cabal configure` - * `cabal build` - * `cabal haddock` - * `cabal clean` - * `cabal sdist` - -The `install` command is somewhat different; it is an all-in-one -operation. If you run `cabal install` in your build tree, it will -configure, build, and install. It takes all the flags that `configure` -takes such as `--global` and `--prefix`. - -In addition, `cabal` will download and install any dependencies that are -not already installed. It can also rebuild packages to ensure a -consistent set of dependencies. - - -Commands for released Hackage packages --------------------------------------- - - $ cabal update - -This command gets the latest list of packages from the [Hackage] server. -On occasion, this command must be run manually--for instance, if you -want to install a newly released package. - - $ cabal install xmonad - -This command installs one or more named packages, and all their -dependencies, from Hackage. By default, it installs the latest available -version; however, you may specify exact versions or version ranges. For -example, `cabal install alex-2.2` or `cabal install parsec < 3`. - - $ cabal list xml - -This does a search of the installed and available packages. It does a -case-insensitive substring match on the package name. - - -[Hackage]: http://hackage.haskell.org -[bootstrap.sh]: bootstrap.sh diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/Setup.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,63 +0,0 @@ -import Distribution.PackageDescription ( PackageDescription ) -import Distribution.Simple ( defaultMainWithHooks - , simpleUserHooks - , postBuild - , postCopy - , postInst - ) -import Distribution.Simple.InstallDirs ( mandir - , CopyDest (NoCopyDest) - ) -import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) - , absoluteInstallDirs - ) -import Distribution.Simple.Utils ( installOrdinaryFiles - , notice ) -import Distribution.Simple.Setup ( buildVerbosity - , copyDest - , copyVerbosity - , fromFlag - , installVerbosity - ) -import Distribution.Verbosity ( Verbosity ) - -import System.IO ( openFile - , IOMode (WriteMode) - ) -import System.Process ( runProcess ) -import System.FilePath ( () ) - --- WARNING to editors of this file: --- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ --- At this moment (Cabal 1.23), whatever you write here must be --- compatible with ALL Cabal libraries which we support bootstrapping --- with. This is because pre-setup-depends versions of cabal-install will --- build Setup.hs against the version of Cabal which MATCHES the library --- that cabal-install was built against. There is no way of overriding --- this behavior without bumping the required 'cabal-version' in our --- Cabal file. Travis will let you know if we fail to install from --- tarball! - -main :: IO () -main = defaultMainWithHooks $ simpleUserHooks - { postBuild = \ _ flags _ lbi -> - buildManpage lbi (fromFlag $ buildVerbosity flags) - , postCopy = \ _ flags pkg lbi -> - installManpage pkg lbi (fromFlag $ copyVerbosity flags) (fromFlag $ copyDest flags) - , postInst = \ _ flags pkg lbi -> - installManpage pkg lbi (fromFlag $ installVerbosity flags) NoCopyDest - } - -buildManpage :: LocalBuildInfo -> Verbosity -> IO () -buildManpage lbi verbosity = do - let cabal = buildDir lbi "cabal/cabal" - manpage = buildDir lbi "cabal/cabal.1" - manpageHandle <- openFile manpage WriteMode - notice verbosity ("Generating manual page " ++ manpage ++ " ...") - _ <- runProcess cabal ["manpage"] Nothing Nothing Nothing (Just manpageHandle) Nothing - return () - -installManpage :: PackageDescription -> LocalBuildInfo -> Verbosity -> CopyDest -> IO () -installManpage pkg lbi verbosity copy = do - let destDir = mandir (absoluteInstallDirs pkg lbi copy) "man1" - installOrdinaryFiles verbosity destDir [(buildDir lbi "cabal", "cabal.1")] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: p q diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/p/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: P - build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/p/P.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/p/P.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/p/P.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/p/P.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module P where - -p :: Int -p = this_is_not_expected_to_compile diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: Q - build-depends: base - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/q/Q.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/q/Q.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/q/Q.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/keep-going/q/Q.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module Q where - -q :: Int -q = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -packages: p-0.1.tar.gz - q/ Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/p-0.1.tar.gz and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/p-0.1.tar.gz differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: Q - build-depends: base, p diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/q/Q.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/q/Q.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/q/Q.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/local-tarball/q/Q.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -module Q where - -import P - -q = p ++ " world" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/a.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/a.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,13 +0,0 @@ -name: a -version: 0.1 -build-type: Custom -cabal-version: >= 1.10 - --- explicit setup deps: -custom-setup - setup-depends: base, Cabal >= 1.18 - -library - exposed-modules: A - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/A.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/A.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/A.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/A.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module A where - -a :: Int -a = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/Setup.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom1/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain >> writeFile "marker" "ok" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/a.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/a.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,11 +0,0 @@ -name: a -version: 0.1 -build-type: Custom -cabal-version: >= 1.10 - --- no explicit setup deps - -library - exposed-modules: A - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/A.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/A.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/A.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/A.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module A where - -a :: Int -a = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/Setup.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-custom2/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain >> writeFile "marker" "ok" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/a.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/a.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: a -version: 0.1 -build-type: Simple -cabal-version: >= 1.10 - -library - exposed-modules: A - build-depends: base - default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/A.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/A.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/A.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/A.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module A where - -a :: Int -a = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/Setup.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/build/setup-simple/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMain diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/bad-config/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/bad-config/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/bad-config/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/bad-config/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -packages: - -package foo - ghc-location: bar diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/build/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/build/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/build/a.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/build/a.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: a -version: 1 -build-type: Simple -cabal-version: >= 1.2 - -executable a - main-is: Main.hs - build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/build/Main.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/build/Main.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/build/Main.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/build/Main.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -main = thisNameDoesNotExist diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/configure/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/configure/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/configure/a.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/configure/a.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: a -version: 1 -build-type: Simple --- This used to be a blank package with no components, --- but I refactored new-build so that if a package has --- no buildable components, we skip configuring it. --- So put in a (failing) component so that we try to --- configure. -executable a diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/no-pkg/empty.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/no-pkg/empty.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/no-pkg/empty.in 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/no-pkg/empty.in 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -this is just here to ensure the source control creates the dir diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/no-pkg2/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/no-pkg2/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/no-pkg2/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/exception/no-pkg2/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: p q diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/p/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: P - build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/p/P.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/p/P.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/p/P.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/p/P.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -module P where - -p :: Int -p = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: Q - build-depends: base - -- missing a dep on p here, so expect failure initially diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/q/Q.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/q/Q.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/q/Q.hs 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/regression/3324/q/Q.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -module Q where - -import P - -q :: Int -q = p diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/all-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/all-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/all-disabled/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/all-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/all-disabled/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/all-disabled/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/all-disabled/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/all-disabled/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,23 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: Q - build-depends: base, filepath - buildable: False - -executable buildable-false - main-is: Main.hs - buildable: False - -test-suite solver-disabled - type: exitcode-stdio-1.0 - main-is: Test.hs - build-depends: a-package-that-does-not-exist - -benchmark user-disabled - type: exitcode-stdio-1.0 - main-is: Test.hs - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ ./q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.10 - -benchmark solver-disabled - type: exitcode-stdio-1.0 - main-is: Test.hs - build-depends: a-package-that-does-not-exist - -benchmark user-disabled - type: exitcode-stdio-1.0 - main-is: Test.hs - build-depends: base - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.10 - -benchmark buildable-false - type: exitcode-stdio-1.0 - main-is: Main.hs - buildable: False - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/complex/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/complex/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/complex/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/complex/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/complex/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/complex/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/complex/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/complex/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: Q - build-depends: base, filepath - -executable buildable-false - main-is: Main.hs - buildable: False - -test-suite solver-disabled - type: exitcode-stdio-1.0 - main-is: Test.hs - build-depends: a-package-that-does-not-exist - -benchmark user-disabled - type: exitcode-stdio-1.0 - main-is: Test.hs - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty-pkg/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty-pkg/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty-pkg/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty-pkg/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty-pkg/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty-pkg/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty-pkg/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/empty-pkg/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: p/ q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/p/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -executable p - main-is: P.hs - build-depends: base - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/exes-disabled/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -executable q - main-is: Q.hs - build-depends: base - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/lib-only/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/lib-only/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/lib-only/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/lib-only/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: P - build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: p/ q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/p/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: P - build-depends: base - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/libs-disabled/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: Q - build-depends: base - buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-exes/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-exes/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-exes/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-exes/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-exes/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-exes/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-exes/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-exes/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.10 - -executable p1 - main-is: P1.hs - build-depends: base - -executable p2 - main-is: P2.hs - build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: p/ q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/p/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: P - build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-libs/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: Q - build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-tests/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-tests/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-tests/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-tests/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-tests/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-tests/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-tests/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/multiple-tests/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,14 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.10 - -test-suite p1 - type: exitcode-stdio-1.0 - main-is: P1.hs - build-depends: base - -test-suite p2 - type: exitcode-stdio-1.0 - main-is: P2.hs - build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: P - build-depends: base - -executable pexe - main-is: Main.hs - other-modules: PMain diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/simple/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -library - exposed-modules: QQ - build-depends: base - -executable qexe - main-is: Main.hs - other-modules: QMain diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/test-only/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/test-only/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/test-only/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/test-only/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,9 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.2 - -test-suite pexe - type: exitcode-stdio-1.0 - main-is: Main.hs - other-modules: PMain diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ ./q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.10 - -test-suite solver-disabled - type: exitcode-stdio-1.0 - main-is: Test.hs - build-depends: a-package-that-does-not-exist - -test-suite user-disabled - type: exitcode-stdio-1.0 - main-is: Test.hs - build-depends: base - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/q/q.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/tests-disabled/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -name: q -version: 0.1 -build-type: Simple -cabal-version: >= 1.10 - -test-suite buildable-false - type: exitcode-stdio-1.0 - main-is: Main.hs - buildable: False - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/variety/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/variety/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/variety/cabal.project 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/variety/cabal.project 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/variety/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/variety/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/variety/p.cabal 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/IntegrationTests2/targets/variety/p.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -name: p -version: 0.1 -build-type: Simple -cabal-version: >= 1.10 - -library - exposed-modules: P - build-depends: base - -foreign-library libp - type: native-shared - other-modules: FLib - -executable an-exe - main-is: Main.hs - other-modules: AModule - -test-suite a-testsuite - type: exitcode-stdio-1.0 - main-is: Test.hs - other-modules: AModule - -benchmark a-benchmark - type: exitcode-stdio-1.0 - main-is: Test.hs - other-modules: AModule - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.0.0/tests/README.md 2018-10-17 15:59:07.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.0.0/tests/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Integration Tests -================= - -Each test is a shell script. Tests that share files (e.g., `.cabal` files) are -grouped under a common sub-directory of [IntegrationTests]. The framework -copies the whole group's directory before running each test, which allows tests -to reuse files, yet run independently. A group's tests are further divided into -`should_run` and `should_fail` directories, based on the expected exit status. -For example, the test -`IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh` has access -to all files under `exec` and is expected to fail. - -Tests can specify their expected output. For a test named `x.sh`, `x.out` -specifies `stdout` and `x.err` specifies `stderr`. Both files are optional. -The framework expects an exact match between lines in the file and output, -except for lines beginning with "RE:", which are interpreted as regular -expressions. - -[IntegrationTests.hs] defines several environment variables: - -* `CABAL` - The path to the executable being tested. -* `GHC_PKG` - The path to ghc-pkg. -* `CABAL_ARGS` - A common set of arguments for running cabal. -* `CABAL_ARGS_NO_CONFIG_FILE` - `CABAL_ARGS` without `--config-file`. - -[IntegrationTests]: IntegrationTests -[IntegrationTests.hs]: IntegrationTests.hs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/bash-completion/cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/bash-completion/cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/bash-completion/cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/bash-completion/cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,94 @@ +# cabal command line completion +# Copyright 2007-2008 "Lennart Kolmodin" +# "Duncan Coutts" +# + +# List cabal targets by type, pass: +# - test-suite for test suites +# - benchmark for benchmarks +# - executable for executables +# - executable|test-suite|benchmark for the three +_cabal_list() +{ + for f in ./*.cabal; do + grep -Ei "^[[:space:]]*($1)[[:space:]]" "$f" | + sed -e "s/.* \([^ ]*\).*/\1/" + done +} + +# List possible targets depending on the command supplied as parameter. The +# ideal option would be to implement this via --list-options on cabal directly. +# This is a temporary workaround. +_cabal_targets() +{ + # If command ($*) contains build, repl, test or bench completes with + # targets of according type. + local comp + for comp in "$@"; do + [ "$comp" == new-build ] && _cabal_list "executable|test-suite|benchmark" && break + [ "$comp" == build ] && _cabal_list "executable|test-suite|benchmark" && break + [ "$comp" == repl ] && _cabal_list "executable|test-suite|benchmark" && break + [ "$comp" == run ] && _cabal_list "executable" && break + [ "$comp" == test ] && _cabal_list "test-suite" && break + [ "$comp" == bench ] && _cabal_list "benchmark" && break + done +} + +# List possible subcommands of a cabal subcommand. +# +# In example "sandbox" is a cabal subcommand that itself has subcommands. Since +# "cabal --list-options" doesn't work in such cases we have to get the list +# using other means. +_cabal_subcommands() +{ + local word + for word in "$@"; do + case "$word" in + sandbox) + # Get list of "cabal sandbox" subcommands from its help message. + "$1" help sandbox | + sed -n '1,/^Subcommands:$/d;/^Flags for sandbox:$/,$d;/^ /d;s/^\(.*\):/\1/p' + break # Terminate for loop. + ;; + esac + done +} + +__cabal_has_doubledash () +{ + local c=1 + # Ignore the last word, because it is replaced anyways. + # This allows expansion for flags on "cabal foo --", + # but does not try to complete after "cabal foo -- ". + local n=$((${#COMP_WORDS[@]} - 1)) + while [ $c -lt $n ]; do + if [ "--" = "${COMP_WORDS[c]}" ]; then + return 0 + fi + ((c++)) + done + return 1 +} + +_cabal() +{ + # no completion past cabal arguments. + __cabal_has_doubledash && return + + # get the word currently being completed + local cur + cur=${COMP_WORDS[$COMP_CWORD]} + + # create a command line to run + local cmd + # copy all words the user has entered + cmd=( ${COMP_WORDS[@]} ) + + # replace the current word with --list-options + cmd[${COMP_CWORD}]="--list-options" + + # the resulting completions should be put into this array + COMPREPLY=( $( compgen -W "$( eval "${cmd[@]}" 2>/dev/null ) $( _cabal_targets "${cmd[@]}" ) $( _cabal_subcommands "${COMP_WORDS[@]}" )" -- "$cur" ) ) +} + +complete -F _cabal -o default cabal diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/bootstrap.sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/bootstrap.sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/bootstrap.sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/bootstrap.sh 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,536 @@ +#!/bin/sh +set -e + +# A script to bootstrap cabal-install. + +# It works by downloading and installing the Cabal, zlib and +# HTTP packages. It then installs cabal-install itself. +# It expects to be run inside the cabal-install directory. + +# Install settings, you can override these by setting environment vars. E.g. if +# you don't want profiling and dynamic versions of libraries to be installed in +# addition to vanilla, run 'EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh' + +#VERBOSE +DEFAULT_CONFIGURE_OPTS="--enable-library-profiling --enable-shared" +EXTRA_CONFIGURE_OPTS=${EXTRA_CONFIGURE_OPTS-$DEFAULT_CONFIGURE_OPTS} +#EXTRA_BUILD_OPTS +#EXTRA_INSTALL_OPTS + +die() { + printf "\nError during cabal-install bootstrap:\n%s\n" "$1" >&2 + exit 2 +} + +# programs, you can override these by setting environment vars +GHC="${GHC:-ghc}" +GHC_PKG="${GHC_PKG:-ghc-pkg}" +GHC_VER="$(${GHC} --numeric-version)" +HADDOCK=${HADDOCK:-haddock} +WGET="${WGET:-wget}" +CURL="${CURL:-curl}" +FETCH="${FETCH:-fetch}" +TAR="${TAR:-tar}" +GZIP_PROGRAM="${GZIP_PROGRAM:-gzip}" + +# The variable SCOPE_OF_INSTALLATION can be set on the command line to +# use/install the libaries needed to build cabal-install to a custom package +# database instead of the user or global package database. +# +# Example: +# +# $ ghc-pkg init /my/package/database +# $ SCOPE_OF_INSTALLATION='--package-db=/my/package/database' ./bootstrap.sh +# +# You can also combine SCOPE_OF_INSTALLATION with PREFIX: +# +# $ ghc-pkg init /my/prefix/packages.conf.d +# $ SCOPE_OF_INSTALLATION='--package-db=/my/prefix/packages.conf.d' \ +# PREFIX=/my/prefix ./bootstrap.sh +# +# If you use the --global,--user or --sandbox arguments, this will +# override the SCOPE_OF_INSTALLATION setting and not use the package +# database you pass in the SCOPE_OF_INSTALLATION variable. + +SCOPE_OF_INSTALLATION="${SCOPE_OF_INSTALLATION:---user}" +DEFAULT_PREFIX="${HOME}/.cabal" + +TMPDIR=$(mktemp -d -p /tmp -t cabal-XXXXXXX || mktemp -d -t cabal-XXXXXXX) +export TMPDIR + +# Check for a C compiler, using user-set $CC, if any, first. +for c in $CC gcc clang cc icc; do + $c --version 1>/dev/null 2>&1 && CC=$c && + echo "Using $c for C compiler. If this is not what you want, set CC." >&2 && + break +done + +# None found. +[ -"$CC"- = -""- ] && die 'C compiler not found (or could not be run). + If a C compiler is installed make sure it is on your PATH, or set $CC.' + +# Find the correct linker/linker-wrapper. +# +# See https://github.com/haskell/cabal/pull/4187#issuecomment-269074153. +LINK="$(for link in collect2 ld; do + if [ $($CC -print-prog-name=$link) = $link ] + then + continue + else + $CC -print-prog-name=$link && break + fi + done)" + +# Fall back to "ld"... might work. +[ -$LINK- = -""- ] && LINK=ld + +# And finally, see if we can compile and link something. + echo 'int main(){}' | $CC -xc - -o /dev/null || + die "C compiler and linker could not compile a simple test program. + Please check your toolchain." + +# Warn that were's overriding $LD if set (if you want). +[ -"$LD"- != -""- ] && [ -"$LD"- != -"$LINK"- ] && + echo "Warning: value set in $LD is not the same as C compiler's $LINK." >&2 + echo "Using $LINK instead." >&2 + +# Set LD, overriding environment if necessary. +export LD=$LINK + +# Check we're in the right directory, etc. +grep "cabal-install" ./cabal-install.cabal > /dev/null 2>&1 || + die "The bootstrap.sh script must be run in the cabal-install directory" + +${GHC} --numeric-version > /dev/null 2>&1 || + die "${GHC} not found (or could not be run). + If ghc is installed, make sure it is on your PATH, + or set the GHC and GHC_PKG vars." + +${GHC_PKG} --version > /dev/null 2>&1 || die "${GHC_PKG} not found." + +GHC_PKG_VER="$(${GHC_PKG} --version | cut -d' ' -f 5)" + +[ ${GHC_VER} = ${GHC_PKG_VER} ] || + die "Version mismatch between ${GHC} and ${GHC_PKG}. + If you set the GHC variable then set GHC_PKG too." + +JOBS="-j1" +while [ "$#" -gt 0 ]; do + case "${1}" in + "--user") + SCOPE_OF_INSTALLATION="${1}" + shift;; + "--global") + SCOPE_OF_INSTALLATION="${1}" + DEFAULT_PREFIX="/usr/local" + shift;; + "--sandbox") + shift + # check if there is another argument which doesn't start with -- + if [ "$#" -le 0 ] || [ ! -z $(echo "${1}" | grep "^--") ] + then + SANDBOX=".cabal-sandbox" + else + SANDBOX="${1}" + shift + fi;; + "--no-doc") + NO_DOCUMENTATION=1 + shift;; + "-j"|"--jobs") + shift + # check if there is another argument which doesn't start with - or -- + if [ "$#" -le 0 ] \ + || [ ! -z $(echo "${1}" | grep "^-") ] \ + || [ ! -z $(echo "${1}" | grep "^--") ] + then + JOBS="-j" + else + JOBS="-j${1}" + shift + fi;; + *) + echo "Unknown argument or option, quitting: ${1}" + echo "usage: bootstrap.sh [OPTION]" + echo + echo "options:" + echo " -j/--jobs Number of concurrent workers to use (Default: 1)" + echo " -j without an argument will use all available cores" + echo " --user Install for the local user (default)" + echo " --global Install systemwide (must be run as root)" + echo " --no-doc Do not generate documentation for installed"\ + "packages" + echo " --sandbox Install to a sandbox in the default location"\ + "(.cabal-sandbox)" + echo " --sandbox path Install to a sandbox located at path" + exit;; + esac +done + +# Do not try to use -j with GHC 7.8 or older +case $GHC_VER in + 7.4*|7.6*|7.8*) + JOBS="" + ;; + *) + ;; +esac + +abspath () { case "$1" in /*)printf "%s\n" "$1";; *)printf "%s\n" "$PWD/$1";; + esac; } + +if [ ! -z "$SANDBOX" ] +then # set up variables for sandbox bootstrap + # Make the sandbox path absolute since it will be used from + # different working directories when the dependency packages are + # installed. + SANDBOX=$(abspath "$SANDBOX") + # Get the name of the package database which cabal sandbox would use. + GHC_ARCH=$(ghc --info | + sed -n 's/.*"Target platform".*"\([^-]\+\)-[^-]\+-\([^"]\+\)".*/\1-\2/p') + PACKAGEDB="$SANDBOX/${GHC_ARCH}-ghc-${GHC_VER}-packages.conf.d" + # Assume that if the directory is already there, it is already a + # package database. We will get an error immediately below if it + # isn't. Uses -r to try to be compatible with Solaris, and allow + # symlinks as well as a normal dir/file. + [ ! -r "$PACKAGEDB" ] && ghc-pkg init "$PACKAGEDB" + PREFIX="$SANDBOX" + SCOPE_OF_INSTALLATION="--package-db=$PACKAGEDB" + echo Bootstrapping in sandbox at \'$SANDBOX\'. +fi + +# Check for haddock unless no documentation should be generated. +if [ ! ${NO_DOCUMENTATION} ] +then + ${HADDOCK} --version > /dev/null 2>&1 || die "${HADDOCK} not found." +fi + +PREFIX=${PREFIX:-${DEFAULT_PREFIX}} + +# Versions of the packages to install. +# The version regex says what existing installed versions are ok. +PARSEC_VER="3.1.13.0"; PARSEC_VER_REGEXP="[3]\.[1]\." + # >= 3.1 && < 3.2 +DEEPSEQ_VER="1.4.3.0"; DEEPSEQ_VER_REGEXP="1\.[1-9]\." + # >= 1.1 && < 2 +BINARY_VER="0.8.5.1"; BINARY_VER_REGEXP="[0]\.[78]\." + # >= 0.7 && < 0.9 +TEXT_VER="1.2.3.0"; TEXT_VER_REGEXP="[1]\.[2]\." + # >= 1.2 && < 1.3 +NETWORK_URI_VER="2.6.1.0"; NETWORK_URI_VER_REGEXP="2\.6\.(0\.[2-9]|[1-9])" + # >= 2.6.0.2 && < 2.7 +NETWORK_VER="2.7.0.0"; NETWORK_VER_REGEXP="2\.[0-7]\." + # >= 2.0 && < 2.7 +CABAL_VER="2.4.1.0"; CABAL_VER_REGEXP="2\.4\.[1-9]" + # >= 2.4.1.0 && < 2.5 +TRANS_VER="0.5.5.0"; TRANS_VER_REGEXP="0\.[45]\." + # >= 0.2.* && < 0.6 +MTL_VER="2.2.2"; MTL_VER_REGEXP="[2]\." + # >= 2.0 && < 3 +HTTP_VER="4000.3.12"; HTTP_VER_REGEXP="4000\.(2\.([5-9]|1[0-9]|2[0-9])|3\.?)" + # >= 4000.2.5 < 4000.4 +ZLIB_VER="0.6.2"; ZLIB_VER_REGEXP="(0\.5\.([3-9]|1[0-9])|0\.6)" + # >= 0.5.3 && <= 0.7 +TIME_VER="1.9.1" TIME_VER_REGEXP="1\.[1-9]\.?" + # >= 1.1 && < 1.10 +RANDOM_VER="1.1" RANDOM_VER_REGEXP="1\.[01]\.?" + # >= 1 && < 1.2 +STM_VER="2.4.5.0"; STM_VER_REGEXP="2\." + # == 2.* +HASHABLE_VER="1.2.7.0"; HASHABLE_VER_REGEXP="1\." + # 1.* +ASYNC_VER="2.2.1"; ASYNC_VER_REGEXP="2\." + # 2.* +BASE16_BYTESTRING_VER="0.1.1.6"; BASE16_BYTESTRING_VER_REGEXP="0\.1" + # 0.1.* +BASE64_BYTESTRING_VER="1.0.0.1"; BASE64_BYTESTRING_VER_REGEXP="1\." + # >=1.0 +CRYPTOHASH_SHA256_VER="0.11.101.0"; CRYPTOHASH_SHA256_VER_REGEXP="0\.11\.?" + # 0.11.* +RESOLV_VER="0.1.1.1"; RESOLV_VER_REGEXP="0\.1\.[1-9]" + # >= 0.1.1 && < 0.2 +MINTTY_VER="0.1.2"; MINTTY_VER_REGEXP="0\.1\.?" + # 0.1.* +ECHO_VER="0.1.3"; ECHO_VER_REGEXP="0\.1\.[3-9]" + # >= 0.1.3 && < 0.2 +EDIT_DISTANCE_VER="0.2.2.1"; EDIT_DISTANCE_VER_REGEXP="0\.2\.2\.?" + # 0.2.2.* +ED25519_VER="0.0.5.0"; ED25519_VER_REGEXP="0\.0\.?" + # 0.0.* +HACKAGE_SECURITY_VER="0.5.3.0"; HACKAGE_SECURITY_VER_REGEXP="0\.5\.((2\.[2-9]|[3-9])|3)" + # >= 0.5.2 && < 0.6 +TAR_VER="0.5.1.0"; TAR_VER_REGEXP="0\.5\.([1-9]|1[0-9]|0\.[3-9]|0\.1[0-9])\.?" + # >= 0.5.0.3 && < 0.6 +DIGEST_VER="0.0.1.2"; DIGEST_REGEXP="0\.0\.(1\.[2-9]|[2-9]\.?)" + # >= 0.0.1.2 && < 0.1 +ZIP_ARCHIVE_VER="0.3.3"; ZIP_ARCHIVE_REGEXP="0\.3\.[3-9]" + # >= 0.3.3 && < 0.4 + +HACKAGE_URL="https://hackage.haskell.org/package" + +# Haddock fails for hackage-security for GHC <8, +# c.f. https://github.com/well-typed/hackage-security/issues/149 +NO_DOCS_PACKAGES_VER_REGEXP="hackage-security-0\.5\.[0-9]+\.[0-9]+" + +# Cache the list of packages: +echo "Checking installed packages for ghc-${GHC_VER}..." +${GHC_PKG} list --global ${SCOPE_OF_INSTALLATION} > ghc-pkg.list || + die "running '${GHC_PKG} list' failed" + +# Will we need to install this package, or is a suitable version installed? +need_pkg () { + PKG=$1 + VER_MATCH=$2 + if egrep " ${PKG}-${VER_MATCH}" ghc-pkg.list > /dev/null 2>&1 + then + return 1; + else + return 0; + fi + #Note: we cannot use "! grep" here as Solaris 9 /bin/sh doesn't like it. +} + +info_pkg () { + PKG=$1 + VER=$2 + VER_MATCH=$3 + + if need_pkg ${PKG} ${VER_MATCH} + then + if [ -r "${PKG}-${VER}.tar.gz" ] + then + echo "${PKG}-${VER} will be installed from local tarball." + else + echo "${PKG}-${VER} will be downloaded and installed." + fi + else + echo "${PKG} is already installed and the version is ok." + fi +} + +fetch_pkg () { + PKG=$1 + VER=$2 + + URL_PKG=${HACKAGE_URL}/${PKG}-${VER}/${PKG}-${VER}.tar.gz + URL_PKGDESC=${HACKAGE_URL}/${PKG}-${VER}/${PKG}.cabal + if which ${CURL} > /dev/null + then + ${CURL} -L --fail -C - -O ${URL_PKG} || die "Failed to download ${PKG}." + ${CURL} -L --fail -C - -O ${URL_PKGDESC} \ + || die "Failed to download '${PKG}.cabal'." + elif which ${WGET} > /dev/null + then + ${WGET} -c ${URL_PKG} || die "Failed to download ${PKG}." + ${WGET} -c ${URL_PKGDESC} || die "Failed to download '${PKG}.cabal'." + elif which ${FETCH} > /dev/null + then + ${FETCH} ${URL_PKG} || die "Failed to download ${PKG}." + ${FETCH} ${URL_PKGDESC} || die "Failed to download '${PKG}.cabal'." + else + die "Failed to find a downloader. 'curl', 'wget' or 'fetch' is required." + fi + [ -f "${PKG}-${VER}.tar.gz" ] || + die "Downloading ${URL_PKG} did not create ${PKG}-${VER}.tar.gz" + [ -f "${PKG}.cabal" ] || + die "Downloading ${URL_PKGDESC} did not create ${PKG}.cabal" + mv "${PKG}.cabal" "${PKG}.cabal.hackage" +} + +unpack_pkg () { + PKG=$1 + VER=$2 + + rm -rf "${PKG}-${VER}.tar" "${PKG}-${VER}" + ${GZIP_PROGRAM} -d < "${PKG}-${VER}.tar.gz" | ${TAR} -xf - + [ -d "${PKG}-${VER}" ] || die "Failed to unpack ${PKG}-${VER}.tar.gz" + cp "${PKG}.cabal.hackage" "${PKG}-${VER}/${PKG}.cabal" +} + +install_pkg () { + PKG=$1 + VER=$2 + + [ -x Setup ] && ./Setup clean + [ -f Setup ] && rm Setup + + PKG_DBS=$(printf '%s\n' "${SCOPE_OF_INSTALLATION}" \ + | sed -e 's/--package-db/-package-db/' \ + -e 's/--global/-global-package-db/' \ + -e 's/--user/-user-package-db/') + + ${GHC} --make ${JOBS} ${PKG_DBS} Setup -o Setup -XRank2Types -XFlexibleContexts || + die "Compiling the Setup script failed." + + [ -x Setup ] || die "The Setup script does not exist or cannot be run" + + args="${SCOPE_OF_INSTALLATION} --prefix=${PREFIX} --with-compiler=${GHC}" + args="$args --with-hc-pkg=${GHC_PKG} --with-gcc=${CC} --with-ld=${LD}" + args="$args ${EXTRA_CONFIGURE_OPTS} ${VERBOSE}" + + ./Setup configure $args || die "Configuring the ${PKG} package failed." + + ./Setup build ${JOBS} ${EXTRA_BUILD_OPTS} ${VERBOSE} || + die "Building the ${PKG} package failed." + + if [ ! ${NO_DOCUMENTATION} ] + then + if echo "${PKG}-${VER}" | egrep ${NO_DOCS_PACKAGES_VER_REGEXP} \ + > /dev/null 2>&1 + then + echo "Skipping documentation for the ${PKG} package." + else + ./Setup haddock --with-ghc=${GHC} --with-haddock=${HADDOCK} ${VERBOSE} || + die "Documenting the ${PKG} package failed." + fi + fi + + ./Setup install ${EXTRA_INSTALL_OPTS} ${VERBOSE} || + die "Installing the ${PKG} package failed." +} + +do_pkg () { + PKG=$1 + VER=$2 + VER_MATCH=$3 + + if need_pkg ${PKG} ${VER_MATCH} + then + echo + if [ -r "${PKG}-${VER}.tar.gz" ] + then + echo "Using local tarball for ${PKG}-${VER}." + else + echo "Downloading ${PKG}-${VER}..." + fetch_pkg ${PKG} ${VER} + fi + unpack_pkg "${PKG}" "${VER}" + (cd "${PKG}-${VER}" && install_pkg ${PKG} ${VER}) + fi +} + +# If we're bootstrapping from a Git clone, install the local version of Cabal +# instead of downloading one from Hackage. +do_Cabal_pkg () { + if [ -d "../.git" ] + then + if need_pkg "Cabal" ${CABAL_VER_REGEXP} + then + echo "Cabal-${CABAL_VER} will be installed from the local Git clone." + (cd ../Cabal && install_pkg ${CABAL_VER} ${CABAL_VER_REGEXP}) + else + echo "Cabal is already installed and the version is ok." + fi + else + info_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} + do_pkg "Cabal" ${CABAL_VER} ${CABAL_VER_REGEXP} + fi +} + +# Actually do something! + +info_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} +info_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} +info_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} +info_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} +info_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} +info_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} +info_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} +info_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} +info_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} +info_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} +info_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} +info_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} +info_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} +info_pkg "hashable" ${HASHABLE_VER} ${HASHABLE_VER_REGEXP} +info_pkg "async" ${ASYNC_VER} ${ASYNC_VER_REGEXP} +info_pkg "base16-bytestring" ${BASE16_BYTESTRING_VER} \ + ${BASE16_BYTESTRING_VER_REGEXP} +info_pkg "base64-bytestring" ${BASE64_BYTESTRING_VER} \ + ${BASE64_BYTESTRING_VER_REGEXP} +info_pkg "cryptohash-sha256" ${CRYPTOHASH_SHA256_VER} \ + ${CRYPTOHASH_SHA256_VER_REGEXP} +info_pkg "resolv" ${RESOLV_VER} ${RESOLV_VER_REGEXP} +info_pkg "mintty" ${MINTTY_VER} ${MINTTY_VER_REGEXP} +info_pkg "echo" ${ECHO_VER} ${ECHO_VER_REGEXP} +info_pkg "edit-distance" ${EDIT_DISTANCE_VER} ${EDIT_DISTANCE_VER_REGEXP} +info_pkg "ed25519" ${ED25519_VER} ${ED25519_VER_REGEXP} +info_pkg "tar" ${TAR_VER} ${TAR_VER_REGEXP} +info_pkg "digest" ${DIGEST_VER} ${DIGEST_REGEXP} +info_pkg "zip-archive" ${ZIP_ARCHIVE_VER} ${ZIP_ARCHIVE_REGEXP} +info_pkg "hackage-security" ${HACKAGE_SECURITY_VER} \ + ${HACKAGE_SECURITY_VER_REGEXP} + +do_pkg "deepseq" ${DEEPSEQ_VER} ${DEEPSEQ_VER_REGEXP} +do_pkg "binary" ${BINARY_VER} ${BINARY_VER_REGEXP} +do_pkg "time" ${TIME_VER} ${TIME_VER_REGEXP} + +# Cabal might depend on these +do_pkg "transformers" ${TRANS_VER} ${TRANS_VER_REGEXP} +do_pkg "mtl" ${MTL_VER} ${MTL_VER_REGEXP} +do_pkg "text" ${TEXT_VER} ${TEXT_VER_REGEXP} +do_pkg "parsec" ${PARSEC_VER} ${PARSEC_VER_REGEXP} + +# Install the Cabal library from the local Git clone if possible. +do_Cabal_pkg + +do_pkg "network-uri" ${NETWORK_URI_VER} ${NETWORK_URI_VER_REGEXP} +do_pkg "network" ${NETWORK_VER} ${NETWORK_VER_REGEXP} +do_pkg "HTTP" ${HTTP_VER} ${HTTP_VER_REGEXP} +do_pkg "zlib" ${ZLIB_VER} ${ZLIB_VER_REGEXP} +do_pkg "random" ${RANDOM_VER} ${RANDOM_VER_REGEXP} +do_pkg "stm" ${STM_VER} ${STM_VER_REGEXP} +do_pkg "hashable" ${HASHABLE_VER} ${HASHABLE_VER_REGEXP} +do_pkg "async" ${ASYNC_VER} ${ASYNC_VER_REGEXP} +do_pkg "base16-bytestring" ${BASE16_BYTESTRING_VER} \ + ${BASE16_BYTESTRING_VER_REGEXP} +do_pkg "base64-bytestring" ${BASE64_BYTESTRING_VER} \ + ${BASE64_BYTESTRING_VER_REGEXP} +do_pkg "cryptohash-sha256" ${CRYPTOHASH_SHA256_VER} \ + ${CRYPTOHASH_SHA256_VER_REGEXP} +do_pkg "resolv" ${RESOLV_VER} ${RESOLV_VER_REGEXP} +do_pkg "mintty" ${MINTTY_VER} ${MINTTY_VER_REGEXP} +do_pkg "echo" ${ECHO_VER} ${ECHO_VER_REGEXP} +do_pkg "edit-distance" ${EDIT_DISTANCE_VER} ${EDIT_DISTANCE_VER_REGEXP} +do_pkg "ed25519" ${ED25519_VER} ${ED25519_VER_REGEXP} +do_pkg "tar" ${TAR_VER} ${TAR_VER_REGEXP} +do_pkg "digest" ${DIGEST_VER} ${DIGEST_REGEXP} +do_pkg "zip-archive" ${ZIP_ARCHIVE_VER} ${ZIP_ARCHIVE_REGEXP} +do_pkg "hackage-security" ${HACKAGE_SECURITY_VER} \ + ${HACKAGE_SECURITY_VER_REGEXP} + + +install_pkg "cabal-install" + +# Use the newly built cabal to turn the prefix/package database into a +# legit cabal sandbox. This works because 'cabal sandbox init' will +# reuse the already existing package database and other files if they +# are in the expected locations. +[ ! -z "$SANDBOX" ] && $SANDBOX/bin/cabal sandbox init --sandbox $SANDBOX + +echo +echo "===========================================" +CABAL_BIN="$PREFIX/bin" +if [ -x "$CABAL_BIN/cabal" ] +then + echo "The 'cabal' program has been installed in $CABAL_BIN/" + echo "You should either add $CABAL_BIN to your PATH" + echo "or copy the cabal program to a directory that is on your PATH." + echo + echo "The first thing to do is to get the latest list of packages with:" + echo " cabal update" + echo "This will also create a default config file (if it does not already" + echo "exist) at $HOME/.cabal/config" + echo + echo "By default cabal will install programs to $HOME/.cabal/bin" + echo "If you do not want to add this directory to your PATH then you can" + echo "change the setting in the config file, for example you could use:" + echo "symlink-bindir: $HOME/bin" +else + echo "Sorry, something went wrong." + echo "The 'cabal' executable was not successfully installed into" + echo "$CABAL_BIN/" +fi +echo + +rm ghc-pkg.list diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/cabal-install.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/cabal-install.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/cabal-install.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/cabal-install.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,351 @@ +Cabal-Version: >= 1.10 +-- NOTE: This file is autogenerated from 'cabal-install.cabal.pp'. +-- DO NOT EDIT MANUALLY. +-- To update this file, edit 'cabal-install.cabal.pp' and run +-- 'make cabal-install-prod' in the project's root folder. +Name: cabal-install +Version: 2.4.1.0 +Synopsis: The command-line interface for Cabal and Hackage. +Description: + The \'cabal\' command-line program simplifies the process of managing + Haskell software by automating the fetching, configuration, compilation + and installation of Haskell libraries and programs. +homepage: http://www.haskell.org/cabal/ +bug-reports: https://github.com/haskell/cabal/issues +License: BSD3 +License-File: LICENSE +Author: Cabal Development Team (see AUTHORS file) +Maintainer: Cabal Development Team +Copyright: 2003-2018, Cabal Development Team +Category: Distribution +Build-type: Custom +Extra-Source-Files: + README.md bash-completion/cabal bootstrap.sh changelog + tests/README.md + + -- Generated with 'make gen-extra-source-files' + -- Do NOT edit this section manually; instead, run the script. + -- BEGIN gen-extra-source-files + tests/IntegrationTests2/build/keep-going/cabal.project + tests/IntegrationTests2/build/keep-going/p/P.hs + tests/IntegrationTests2/build/keep-going/p/p.cabal + tests/IntegrationTests2/build/keep-going/q/Q.hs + tests/IntegrationTests2/build/keep-going/q/q.cabal + tests/IntegrationTests2/build/local-tarball/cabal.project + tests/IntegrationTests2/build/local-tarball/q/Q.hs + tests/IntegrationTests2/build/local-tarball/q/q.cabal + tests/IntegrationTests2/build/setup-custom1/A.hs + tests/IntegrationTests2/build/setup-custom1/Setup.hs + tests/IntegrationTests2/build/setup-custom1/a.cabal + tests/IntegrationTests2/build/setup-custom2/A.hs + tests/IntegrationTests2/build/setup-custom2/Setup.hs + tests/IntegrationTests2/build/setup-custom2/a.cabal + tests/IntegrationTests2/build/setup-simple/A.hs + tests/IntegrationTests2/build/setup-simple/Setup.hs + tests/IntegrationTests2/build/setup-simple/a.cabal + tests/IntegrationTests2/exception/bad-config/cabal.project + tests/IntegrationTests2/exception/build/Main.hs + tests/IntegrationTests2/exception/build/a.cabal + tests/IntegrationTests2/exception/configure/a.cabal + tests/IntegrationTests2/exception/no-pkg/empty.in + tests/IntegrationTests2/exception/no-pkg2/cabal.project + tests/IntegrationTests2/regression/3324/cabal.project + tests/IntegrationTests2/regression/3324/p/P.hs + tests/IntegrationTests2/regression/3324/p/p.cabal + tests/IntegrationTests2/regression/3324/q/Q.hs + tests/IntegrationTests2/regression/3324/q/q.cabal + tests/IntegrationTests2/targets/all-disabled/cabal.project + tests/IntegrationTests2/targets/all-disabled/p.cabal + tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project + tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal + tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal + tests/IntegrationTests2/targets/complex/cabal.project + tests/IntegrationTests2/targets/complex/q/Q.hs + tests/IntegrationTests2/targets/complex/q/q.cabal + tests/IntegrationTests2/targets/empty-pkg/cabal.project + tests/IntegrationTests2/targets/empty-pkg/p.cabal + tests/IntegrationTests2/targets/empty/cabal.project + tests/IntegrationTests2/targets/empty/foo.hs + tests/IntegrationTests2/targets/exes-disabled/cabal.project + tests/IntegrationTests2/targets/exes-disabled/p/p.cabal + tests/IntegrationTests2/targets/exes-disabled/q/q.cabal + tests/IntegrationTests2/targets/lib-only/p.cabal + tests/IntegrationTests2/targets/libs-disabled/cabal.project + tests/IntegrationTests2/targets/libs-disabled/p/p.cabal + tests/IntegrationTests2/targets/libs-disabled/q/q.cabal + tests/IntegrationTests2/targets/multiple-exes/cabal.project + tests/IntegrationTests2/targets/multiple-exes/p.cabal + tests/IntegrationTests2/targets/multiple-libs/cabal.project + tests/IntegrationTests2/targets/multiple-libs/p/p.cabal + tests/IntegrationTests2/targets/multiple-libs/q/q.cabal + tests/IntegrationTests2/targets/multiple-tests/cabal.project + tests/IntegrationTests2/targets/multiple-tests/p.cabal + tests/IntegrationTests2/targets/simple/P.hs + tests/IntegrationTests2/targets/simple/cabal.project + tests/IntegrationTests2/targets/simple/p.cabal + tests/IntegrationTests2/targets/simple/q/QQ.hs + tests/IntegrationTests2/targets/simple/q/q.cabal + tests/IntegrationTests2/targets/test-only/p.cabal + tests/IntegrationTests2/targets/tests-disabled/cabal.project + tests/IntegrationTests2/targets/tests-disabled/p.cabal + tests/IntegrationTests2/targets/tests-disabled/q/q.cabal + tests/IntegrationTests2/targets/variety/cabal.project + tests/IntegrationTests2/targets/variety/p.cabal + -- END gen-extra-source-files + + -- Additional manual extra-source-files: + tests/IntegrationTests2/build/local-tarball/p-0.1.tar.gz + + +source-repository head + type: git + location: https://github.com/haskell/cabal/ + subdir: cabal-install + +Flag native-dns + description: Enable use of the [resolv](https://hackage.haskell.org/package/resolv) & [windns](https://hackage.haskell.org/package/windns) packages for performing DNS lookups + default: True + manual: True + +Flag debug-expensive-assertions + description: Enable expensive assertions for testing or debugging + default: False + manual: True + +Flag debug-conflict-sets + description: Add additional information to ConflictSets + default: False + manual: True + +Flag debug-tracetree + description: Compile in support for tracetree (used to debug the solver) + default: False + manual: True + +custom-setup + setup-depends: + Cabal >= 2.2, + base, + process >= 1.1.0.1 && < 1.7, + filepath >= 1.3 && < 1.5 + +executable cabal + main-is: Main.hs + hs-source-dirs: main + default-language: Haskell2010 + ghc-options: -Wall -fwarn-tabs + if impl(ghc >= 8.0) + ghc-options: -Wcompat + -Wnoncanonical-monad-instances + -Wnoncanonical-monadfail-instances + + ghc-options: -rtsopts -threaded + + -- On AIX, some legacy BSD operations such as flock(2) are provided by libbsd.a + if os(aix) + extra-libraries: bsd + hs-source-dirs: . + other-modules: + Distribution.Client.BuildReports.Anonymous + Distribution.Client.BuildReports.Storage + Distribution.Client.BuildReports.Types + Distribution.Client.BuildReports.Upload + Distribution.Client.Check + Distribution.Client.CmdBench + Distribution.Client.CmdBuild + Distribution.Client.CmdClean + Distribution.Client.CmdConfigure + Distribution.Client.CmdUpdate + Distribution.Client.CmdErrorMessages + Distribution.Client.CmdExec + Distribution.Client.CmdFreeze + Distribution.Client.CmdHaddock + Distribution.Client.CmdInstall + Distribution.Client.CmdRepl + Distribution.Client.CmdRun + Distribution.Client.CmdTest + Distribution.Client.CmdLegacy + Distribution.Client.CmdSdist + Distribution.Client.Compat.Directory + Distribution.Client.Compat.ExecutablePath + Distribution.Client.Compat.FileLock + Distribution.Client.Compat.FilePerms + Distribution.Client.Compat.Prelude + Distribution.Client.Compat.Process + Distribution.Client.Compat.Semaphore + Distribution.Client.Config + Distribution.Client.Configure + Distribution.Client.Dependency + Distribution.Client.Dependency.Types + Distribution.Client.DistDirLayout + Distribution.Client.Exec + Distribution.Client.Fetch + Distribution.Client.FetchUtils + Distribution.Client.FileMonitor + Distribution.Client.Freeze + Distribution.Client.GZipUtils + Distribution.Client.GenBounds + Distribution.Client.Get + Distribution.Client.Glob + Distribution.Client.GlobalFlags + Distribution.Client.Haddock + Distribution.Client.HttpUtils + Distribution.Client.IndexUtils + Distribution.Client.IndexUtils.Timestamp + Distribution.Client.Init + Distribution.Client.Init.Heuristics + Distribution.Client.Init.Licenses + Distribution.Client.Init.Types + Distribution.Client.Install + Distribution.Client.InstallPlan + Distribution.Client.InstallSymlink + Distribution.Client.JobControl + Distribution.Client.List + Distribution.Client.Manpage + Distribution.Client.Nix + Distribution.Client.Outdated + Distribution.Client.PackageHash + Distribution.Client.PackageUtils + Distribution.Client.ParseUtils + Distribution.Client.ProjectBuilding + Distribution.Client.ProjectBuilding.Types + Distribution.Client.ProjectConfig + Distribution.Client.ProjectConfig.Legacy + Distribution.Client.ProjectConfig.Types + Distribution.Client.ProjectOrchestration + Distribution.Client.ProjectPlanOutput + Distribution.Client.ProjectPlanning + Distribution.Client.ProjectPlanning.Types + Distribution.Client.RebuildMonad + Distribution.Client.Reconfigure + Distribution.Client.Run + Distribution.Client.Sandbox + Distribution.Client.Sandbox.Index + Distribution.Client.Sandbox.PackageEnvironment + Distribution.Client.Sandbox.Timestamp + Distribution.Client.Sandbox.Types + Distribution.Client.SavedFlags + Distribution.Client.Security.DNS + Distribution.Client.Security.HTTP + Distribution.Client.Setup + Distribution.Client.SetupWrapper + Distribution.Client.SolverInstallPlan + Distribution.Client.SourceFiles + Distribution.Client.SourceRepoParse + Distribution.Client.SrcDist + Distribution.Client.Store + Distribution.Client.Tar + Distribution.Client.TargetSelector + Distribution.Client.Targets + Distribution.Client.Types + Distribution.Client.Update + Distribution.Client.Upload + Distribution.Client.Utils + Distribution.Client.Utils.Assertion + Distribution.Client.Utils.Json + Distribution.Client.VCS + Distribution.Client.Win32SelfUpgrade + Distribution.Client.World + Distribution.Solver.Compat.Prelude + Distribution.Solver.Modular + Distribution.Solver.Modular.Assignment + Distribution.Solver.Modular.Builder + Distribution.Solver.Modular.Configured + Distribution.Solver.Modular.ConfiguredConversion + Distribution.Solver.Modular.ConflictSet + Distribution.Solver.Modular.Cycles + Distribution.Solver.Modular.Dependency + Distribution.Solver.Modular.Explore + Distribution.Solver.Modular.Flag + Distribution.Solver.Modular.Index + Distribution.Solver.Modular.IndexConversion + Distribution.Solver.Modular.LabeledGraph + Distribution.Solver.Modular.Linking + Distribution.Solver.Modular.Log + Distribution.Solver.Modular.Message + Distribution.Solver.Modular.PSQ + Distribution.Solver.Modular.Package + Distribution.Solver.Modular.Preference + Distribution.Solver.Modular.RetryLog + Distribution.Solver.Modular.Solver + Distribution.Solver.Modular.Tree + Distribution.Solver.Modular.Validate + Distribution.Solver.Modular.Var + Distribution.Solver.Modular.Version + Distribution.Solver.Modular.WeightedPSQ + Distribution.Solver.Types.ComponentDeps + Distribution.Solver.Types.ConstraintSource + Distribution.Solver.Types.DependencyResolver + Distribution.Solver.Types.Flag + Distribution.Solver.Types.InstSolverPackage + Distribution.Solver.Types.InstalledPreference + Distribution.Solver.Types.LabeledPackageConstraint + Distribution.Solver.Types.OptionalStanza + Distribution.Solver.Types.PackageConstraint + Distribution.Solver.Types.PackageFixedDeps + Distribution.Solver.Types.PackageIndex + Distribution.Solver.Types.PackagePath + Distribution.Solver.Types.PackagePreferences + Distribution.Solver.Types.PkgConfigDb + Distribution.Solver.Types.Progress + Distribution.Solver.Types.ResolverPackage + Distribution.Solver.Types.Settings + Distribution.Solver.Types.SolverId + Distribution.Solver.Types.SolverPackage + Distribution.Solver.Types.SourcePackage + Distribution.Solver.Types.Variable + Paths_cabal_install + + build-depends: + async >= 2.0 && < 2.3, + array >= 0.4 && < 0.6, + base >= 4.8 && < 4.13, + base16-bytestring >= 0.1.1 && < 0.2, + binary >= 0.7.3 && < 0.9, + bytestring >= 0.10.6.0 && < 0.11, + Cabal >= 2.4.1.0 && < 2.5, + containers >= 0.5.6.2 && < 0.7, + cryptohash-sha256 >= 0.11 && < 0.12, + deepseq >= 1.4.1.1 && < 1.5, + directory >= 1.2.2.0 && < 1.4, + echo >= 0.1.3 && < 0.2, + edit-distance >= 0.2.2 && < 0.3, + filepath >= 1.4.0.0 && < 1.5, + hashable >= 1.0 && < 1.3, + HTTP >= 4000.1.5 && < 4000.4, + mtl >= 2.0 && < 2.3, + network-uri >= 2.6.0.2 && < 2.7, + network >= 2.6 && < 2.9, + pretty >= 1.1 && < 1.2, + process >= 1.2.3.0 && < 1.7, + random >= 1 && < 1.2, + stm >= 2.0 && < 2.6, + tar >= 0.5.0.3 && < 0.6, + time >= 1.5.0.1 && < 1.10, + zlib >= 0.5.3 && < 0.7, + hackage-security >= 0.5.2.2 && < 0.6, + text >= 1.2.3 && < 1.3, + zip-archive >= 0.3.2.5 && < 0.4, + parsec >= 3.1.13.0 && < 3.2 + + if flag(native-dns) + if os(windows) + build-depends: windns >= 0.1.0 && < 0.2 + else + build-depends: resolv >= 0.1.1 && < 0.2 + + if os(windows) + build-depends: Win32 >= 2 && < 3 + else + build-depends: unix >= 2.5 && < 2.8 + + if flag(debug-expensive-assertions) + cpp-options: -DDEBUG_EXPENSIVE_ASSERTIONS + + if flag(debug-conflict-sets) + cpp-options: -DDEBUG_CONFLICT_SETS + build-depends: base >= 4.8 + + if flag(debug-tracetree) + cpp-options: -DDEBUG_TRACETREE + build-depends: tracetree >= 0.1 && < 0.2 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/changelog cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/changelog --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/changelog 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/changelog 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,544 @@ +-*-change-log-*- + +2.4.1.0 Mikhail Glushenkov November 2018 + * Add message to alert user to potential package casing errors. (#5635) + * new-clean no longer deletes dist-newstyle/src with `-s`. (#5699) + * 'new-install' now warns when failing to symlink an exe (#5602) + * Extend 'cabal init' support for 'cabal-version' selection (#5567) + * 'new-sdist' now generates tarballs with file modification + times from a date in 2001. Using the Unix epoch caused + problems on Windows. (#5596) + * Register monolithic packages installed into the store due to a + build-tool dependency if they also happen to contain a buildable + public lib. (#5379,#5604) + * Fixed a Windows bug where cabal-install tried to copy files + after moving them (#5631). + * 'cabal v2-repl' now works for indefinite (in the Backpack sense) components. (#5619) + * Set data dir environment variable for tarballs and remote repos (#5469) + * Fix monolithic inplace build tool PATH (#5633) + * 'cabal init' now supports '-w'/'--with-compiler' flag (#4936, #5654) + * Fix ambiguous --builddir on new-install (#5652) + * Allow relative --storedir (#5662) + * Respect --dry on new-install (#5671) + * Warn when new-installing zero exes (#5666) + * Add 'pkg-cabal-sha256' field to plan.json (#5695) + * New v2-build flag: '--only-configure'. (#5578) + * Fixed a 'new-install' failure that manifested when it + encountered remote source dependencies in a project. (#5643) + * New 'v2-[build,configure' flag: '--write-ghc-environment-files' + to control the generation of .ghc.environment files. (#5711) + +2.4.0.0 Mikhail Glushenkov September 2018 + * Bugfix: "cabal new-build --ghc-option '--bogus' --ghc-option '-O1'" + no longer ignores all arguments except the last one (#5512). + * Add the following option aliases for '-dir'-suffixed options: + 'storedir', 'logsdir', 'packagedir', 'sourcedir', 'outputdir' (#5484). + * 'new-run' now allows the user to run scripts that use a special block + to define their requirements (as in the executable stanza) in place + of a target. This also allows the use of 'cabal' as an interpreter + in a shebang line. + * Add aliases for the "new-" commands that won't change when they + lose their prefix or are eventually replaced by a third UI + paradigm in the future. (#5429) + * 'outdated' now accepts '--project-file FILE', which will look for bounds + from the new-style freeze file named FILE.freeze. This is only + available when `--new-freeze-file` has been passed. + * 'new-repl' now accepts a '--build-depends' flag which accepts the + same syntax as is used in .cabal files to add additional dependencies + to the environment when developing in the REPL. It is now usable outside + of projects. (#5425, #5454) + * 'new-build' now treats Haddock errors non-fatally. In addition, + it attempts to avoid trying to generate Haddocks when there is + nothing to generate them from. (#5232, #5459) + * 'new-run', 'new-test', and 'new-bench' now will attempt to resolve + ambiguous selectors by filtering out selectors that would be invalid. + (#4679, #5461) + * 'new-install' now supports installing libraries and local + components. (#5399) + * Drop support for GHC 7.4, since it is out of our support window + (and has been for over a year!). + * 'new-update' now works outside of projects. (#5096) + * Extend `plan.json` with `pkg-src` provenance information. (#5487) + * Add 'new-sdist' command (#5389). Creates stable archives based on + cabal projects in '.zip' and '.tar.gz' formats. + * Add '--repl-options' flag to 'cabal repl' and 'cabal new-repl' + commands. Passes its arguments to the invoked repl, bypassing the + new-build's cached configurations. This assures they don't trigger + useless rebuilds and are always applied within the repl. (#4247, #5287) + * Add 'v1-' prefixes for the commands that will be replaced in the + new-build universe, in preparation for it becoming the default. + (#5358) + * 'outdated' accepts '--v1-freeze-file' and '--v2-freeze-file' + in the same spirit. + * Completed the 'new-clean' command (#5357). The functionality is + equivalent to old-style clean, but for nix-style builds. + * Ensure that each package selected for a build-depends dependency + contains a library (#5304). + * Support packages from local tarballs in the cabal.project file. + * Default changelog generated by 'cabal init' is now named + 'CHANGELOG.md' (#5441). + * Align output of 'new-build' command phases (#4040). + * Add suport for specifying remote VCS dependencies via new + 'source-repository-package' stanzas in 'cabal.project' files + (#5351). + +2.2.0.0 Mikhail Glushenkov March 2018 + * '--with-PROG' and '--PROG-options' are applied to all packages + and not local packages only (#5019). + * Completed the 'new-update' command (#4809), which respects nix-style + cabal.project(.local) files and allows to update from + multiple repositories when using overlays. + * Completed the 'new-run' command (#4477). The functionality is the + same of the old 'run' command but using nix-style builds. + Additionally, it can run executables across packages in a project. + Tests and benchmarks are also treated as executables, providing a + quick way to pass them arguments. + * Completed the 'new-bench' command (#3638). Same as above. + * Completed the 'new-exec' command (#3638). Same as above. + * Added a preliminary 'new-install' command (#4558, nonlocal exes + part) which allows to quickly install executables from Hackage. + * Set symlink-bindir (used by new-install) to .cabal/bin by default on + .cabal/config initialization (#5188). + * 'cabal update' now supports '--index-state' which can be used to + roll back the index to an earlier state. + * '--allow-{newer,older}' syntax has been enhanced. Dependency + relaxation can be now limited to a specific release of a package, + plus there's a new syntax for relaxing only caret-style (i.e. '^>=') + dependencies (#4575, #4669). + * New config file field: 'cxx-options' to specify which options to be + passed to the compiler when compiling C++ sources specified by the + 'cxx-sources' field. (#3700) + * New config file field: 'cxx-sources' to specify C++ files to be + compiled separately from C source files. Useful in conjunction with the + 'cxx-options' flag to pass different compiler options to C and C++ + source files. (#3700) + * Use [lfxtb] letters to differentiate component kind instead of + opaque "c" in dist-dir layout. + * 'cabal configure' now supports '--enable-static', which can be + used to build static libaries with GHC via GHC's `-staticlib` + flag. + * 'cabal user-config now supports '--augment' which can append + additional lines to a new or updated cabal config file. + * Added support for '--enable-tests' and '--enable-benchmarks' to + 'cabal fetch' (#4948). + * Misspelled package-names on CLI will no longer be silently + case-corrected (#4778). + * 'cabal new-configure' now backs up the old 'cabal.project.local' + file if it exists (#4460). + * On macOS, `new-build` will now place dynamic libraries into + `store/lib` and aggressively shorten their names in an effort to + stay within the load command size limits of macOSs mach-o linker. + * 'new-build' now checks for the existence of executables for + build-tools and build-tool-depends dependencies in the solver + (#4884). + * Fixed a spurious warning telling the user to run 'cabal update' + when it wasn't necessary (#4444). + * Packages installed in sandboxes via 'add-source' now have + their timestamps updated correctly and so will not be reinstalled + unncecessarily if the main install command fails (#1375). + * Add Windows device path support for copyFile, renameFile. Allows cabal + new-build to use temporary store path of up to 32k length + (#3972, #4914, #4515). + * When a flag value is specified multiple times on the command + line, the last one is now preferred, so e.g. '-f+dev -f-dev' is + now equivalent to '-f-dev' (#4452). + * Removed support for building cabal-install with GHC < 7.10 (#4870). + * New 'package *' section in 'cabal.project' files that applies + options to all packages, not just those local to the project. + * Paths_ autogen modules now compile when `RebindableSyntax` or + `OverloadedStrings` is used in `default-extensions`. + [stack#3789](https://github.com/commercialhaskell/stack/issues/3789) + * `getDataDir` and other `Paths_autogen` functions now work correctly + when compiling a custom `Setup.hs` script using `new-build` (#5164). + +2.0.0.1 Mikhail Glushenkov December 2017 + * Support for GHC's numeric -g debug levels (#4673). + * Demoted 'scope' field version check to a warning (#4714). + * Fixed verbosity flags getting removed before being passed to + 'printPlan' (#4724). + * Added a '--store-dir' option that can be used to configure the + location of the build global build store (#4623). + * Turned `allow-{newer,older}` in `cabal.project` files into an + accumulating field to match CLI flag semantics (#4679). + * Improve success message when `cabal upload`ing documentation + (#4777). + * Documentation fixes. + +2.0.0.0 Mikhail Glushenkov August 2017 + * See http://coldwa.st/e/blog/2017-09-09-Cabal-2-0.html + for more detailed release notes. + * Removed the '--root-cmd' parameter of the 'install' command + (#3356). + * Deprecated 'cabal install --global' (#3356). + * Changed 'cabal upload' to upload a package candidate by default + (#3419). Same applies to uploading documentation. + * Added a new 'cabal upload' flag '--publish' for publishing a + package on Hackage instead of uploading a candidate (#3419). + * Added optional solver output visualisation support via the + tracetree package. Mainly intended for debugging (#3410). + * Removed the '--check' option from 'cabal upload' + (#1823). It was replaced by package candidates. + * Fixed various behaviour differences between network transports + (#3429). + * The bootstrap script now works correctly when run from a Git + clone (#3439). + * Removed the top-down solver (#3598). + * The '-v/--verbosity' option no longer affects GHC verbosity + (except in the case of '-v0'). Use '--ghc-options=-v' to enable + verbose GHC output (#3540, #3671). + * Changed the default logfile template from + '.../$pkgid.log' to '.../$compiler/$libname.log' (#3807). + * Added a new command, 'cabal reconfigure', which re-runs 'configure' + with the most recently used flags (#2214). + * Added the '--index-state' flag for requesting a specific + version of the package index (#3893, #4115). + * Support for building Backpack packages. See + https://github.com/ezyang/ghc-proposals/blob/backpack/proposals/0000-backpack.rst + for more details. + * Support the Nix package manager (#3651). + * Made the 'template-haskell' package non-upgradable again (#4185). + * Fixed password echoing on MinTTY (#4128). + * Added a new solver flag, '--allow-boot-library-installs', that allows + any package to be installed or upgraded (#4209). + * New 'cabal-install' command: 'outdated', for listing outdated + version bounds in a .cabal file or a freeze file (#4207). + * Added qualified constraints for setup dependencies. For example, + --constraint="setup.bar == 1.0" constrains all setup dependencies on + bar, and --constraint="foo:setup.bar == 1.0" constrains foo's setup + dependency on bar (part of #3502). + * Non-qualified constraints, such as --constraint="bar == 1.0", now + only apply to top-level dependencies. They don't constrain setup or + build-tool dependencies. The new syntax --constraint="any.bar == 1.0" + constrains all uses of bar. + * Added a technical preview version of the 'cabal doctest' command + (#4480). + +1.24.0.2 Mikhail Glushenkov December 2016 + * Adapted to the revert of a PVP-noncompliant API change in + Cabal 1.24.2.0 (#4123). + * Bumped the directory upper bound to < 1.4 (#4158). + +1.24.0.1 Ryan Thomas October 2016 + * Fixed issue with passing '--enable-profiling' when invoking + Setup scripts built with older versions of Cabal (#3873). + * Fixed various behaviour differences between network transports + (#3429). + * Updated to depend on the latest hackage-security that fixes + various issues on Windows. + * Fixed 'new-build' to exit with a non-zero exit code on failure + (#3506). + * Store secure repo index data as 01-index.* (#3862). + * Added new hackage-security root keys for distribution with + cabal-install. + * Fix an issue where 'cabal install' sometimes had to be run twice + for packages with build-type: Custom and a custom-setup stanza + (#3723). + * 'cabal sdist' no longer ignores '--builddir' when the package's + build-type is Custom (#3794). + +1.24.0.0 Ryan Thomas March 2016 + * If there are multiple remote repos, 'cabal update' now updates + them in parallel (#2503). + * New 'cabal upload' option '-P'/'--password-command' for reading + Hackage password from arbitrary program output (#2506). + * Better warning for 'cabal run' (#2510). + * 'cabal init' now warns if the chosen package name is already + registered in the source package index (#2436). + * New 'cabal install' option: '--offline' (#2578). + * Accept 'builddir' field in cabal.config (#2484) + * Read 'builddir' option from 'CABAL_BUILDDIR' environment variable. + * Remote repos may now be configured to use https URLs. This uses + either curl or wget or, on Windows, PowerShell, under the hood (#2687). + * Install target URLs can now use https e.g. 'cabal install + https://example.com/foo-1.0.tar.gz'. + * Automatically use https for cabal upload for the main + hackage.haskell.org (other repos will use whatever they are + configured to use). + * Support for dependencies of custom Setup.hs scripts + (see http://www.well-typed.com/blog/2015/07/cabal-setup-deps/). + * 'cabal' program itself now can be used as an external setup + method. This fixes an issue when Cabal version mismatch caused + unnecessary reconfigures (#2633). + * Improved error message for unsatisfiable package constraints + (#2727). + * Fixed a space leak in 'cabal update' (#2826). + * 'cabal exec' and 'sandbox hc-pkg' now use the configured + compiler (#2859). + * New 'cabal haddock' option: '--for-hackage' (#2852). + * Added a warning when the solver cannot find a dependency (#2853). + * New 'cabal upload' option: '--doc': upload documentation to + hackage (#2890). + * Improved error handling for 'sandbox delete-source' (#2943). + * Solver support for extension and language flavours (#2873). + * Support for secure repos using hackage-security (#2983). + * Added a log file message similar to one printed by 'make' when + building in another directory (#2642). + * Added new subcommand 'init' to 'cabal user-config'. This + subcommand creates a cabal configuration file in either the + default location or as specified by --config-file (#2553). + * The man page for 'cabal-install' is now automatically generated + (#2877). + * The '--allow-newer' option now works as expected when specified + multiple times (#2588). + * New config file field: 'extra-framework-dirs' (extra locations + to find OS X frameworks in). Can be also specified as an argument + for 'install' and 'configure' commands (#3158). + * It's now possible to limit the scope of '--allow-newer' to + single packages in the install plan (#2756). + * Full '--allow-newer' syntax is now supported in the config file + (that is, 'allow-newer: base, ghc-prim, some-package:vector') + (#3171). + * Improved performance of '--reorder-goals' (#3208). + * Fixed space leaks in modular solver (#2916, #2914). + * Made the solver aware of pkg-config constraints (#3023). + * Added a new command: 'gen-bounds' (#3223). See + http://softwaresimply.blogspot.se/2015/08/cabal-gen-bounds-easy-generation-of.html. + * Tech preview of new nix-style isolated project-based builds. + Currently provides the commands (new-)build/repl/configure. + +1.22.9.0 Ryan Thomas March 2016 + * Include Cabal-1.22.8.0 + +1.22.8.0 Ryan Thomas February 2016 + * Only Custom setup scripts should be compiled with '-i -i.'. + * installedCabalVersion: Don't special-case Cabal anymore. + * Bump the HTTP upper bound. See #3069. + +1.22.7.0 Ryan Thomas December 2015 + * Remove GZipUtils tests + * maybeDecompress: bail on all errors at the beginning of the + stream with zlib < 0.6 + * Correct maybeDecompress + +1.22.6.0 Ryan Thomas June 2015 + * A fix for @ezyang's fix for #2502. (Mikhail Glushenkov) + +1.22.5.0 Ryan Thomas June 2015 + * Reduce temporary directory name length, fixes #2502. (Edward Z. Yang) + +1.22.4.0 Ryan Thomas May 2015 + * Force cabal upload to always use digest auth and never basic auth. + * Add dependency-graph information to `printPlan` output + * bootstrap.sh: fixes linker matching to avoid cases where tested + linker names appear unexpectedly in compiler output (fixes #2542) + +1.22.3.0 Ryan Thomas April 2015 + * Fix bash completion for sandbox subcommands - Fixes #2513 + (Mikhail Glushenkov) + * filterConfigureFlags: filter more flags (Mikhail Glushenkov) + +1.22.2.0 Ryan Thomas March 2015 + * Don't pass '--{en,dis}able-profiling' to old setup exes. + * -Wall police + * Allow filepath 1.4 + +1.22.0.0 Johan Tibell January 2015 + * New command: user-config (#2159). + * Implement 'cabal repl --only' (#2016). + * Fix an issue when 'cabal repl' was doing unnecessary compilation + (#1715). + * Prompt the user to specify source directory in 'cabal init' + (#1989). + * Remove the self-upgrade check (#2090). + * Don't redownload already downloaded packages when bootstrapping + (#2133). + * Support sandboxes in 'bootstrap.sh' (#2137). + * Install profiling and shared libs by default in 'bootstrap.sh' + (#2009). + +1.20.2.0 Ryan Thomas February 2016 + * Only Custom setup scripts should be compiled with '-i -i.'. + * installedCabalVersion: Don't special-case Cabal anymore. + +1.20.1.0 Ryan Thomas May 2015 + * Force cabal upload to always use digest auth and never basic auth. + * bootstrap.sh: install network-uri before HTTP + +1.20.0.5 Johan Tibell December 2014 + * Support random 1.1. + * Fix bootstrap script after network package split. + * Support network-2.6 in test suite. + +1.20.0.3 Johan Tibell June 2014 + * Don't attempt to rename dist if it is already named correctly + * Treat all flags of a package as interdependent. + * Allow template-haskell to be upgradable again + +1.20.0.2 Johan Tibell May 2014 + * Increase max-backjumps to 2000. + * Fix solver bug which led to missed install plans. + * Fix streaming test output. + * Tweak solver heuristics to avoid reinstalls. + +1.20.0.1 Johan Tibell May 2014 + * Fix cabal repl search path bug on Windows + * Include OS and arch in cabal-install user agent + * Revert --constraint flag behavior in configure to 1.18 behavior + +1.20.0.0 Johan Tibell April 2014 + * Build only selected executables + * Add -j flag to build/test/bench/run + * Improve install log file + * Don't symlink executables when in a sandbox + * Add --package-db flag to 'list' and 'info' + * Make upload more efficient + * Add --require-sandbox option + * Add experimental Cabal file format command + * Add haddock section to config file + * Add --main-is flag to init + +1.18.2.0 Ryan Thomas February 2016 + * Only Custom setup scripts should be compiled with '-i -i.'. + * installedCabalVersion: Don't special-case Cabal anymore. + +1.18.1.0 Ryan Thomas May 2015 + * Force cabal upload to always use digest auth and never basic auth. + * Merge pull request #2367 from juhp/patch-2 + * Fix bootstrap.sh by bumping HTTP to 4000.2.16.1 + +1.18.0.7 Johan Tibell December 2014 + * Support random 1.1. + * Fix bootstrap script after network package split. + * Support network-2.6 in test suite. + +1.18.0.5 Johan Tibell July 2014 + * Make solver flag resolution more conservative. + +1.18.0.4 Johan Tibell May 2014 + * Increase max-backjumps to 2000. + * Fix solver bug which led to missed install plans. + * Tweak solver heuristics to avoid reinstalls. + +0.14.0 Andres Loeh April 2012 + * Works with ghc-7.4 + * Completely new modular dependency solver (default in most cases) + * Some tweaks to old topdown dependency solver + * Install plans are now checked for reinstalls that break packages + * Flags --constraint and --preference work for nonexisting packages + * New constraint forms for source and installed packages + * New constraint form for package-specific use flags + * New constraint form for package-specific stanza flags + * Test suite dependencies are pulled in on demand + * No longer install packages on --enable-tests when tests fail + * New "cabal bench" command + * Various "cabal init" tweaks + +0.10.0 Duncan Coutts February 2011 + * New package targets: local dirs, local and remote tarballs + * Initial support for a "world" package target + * Partial fix for situation where user packages mask global ones + * Removed cabal upgrade, new --upgrade-dependencies flag + * New cabal install --only-dependencies flag + * New cabal fetch --no-dependencies and --dry-run flags + * Improved output for cabal info + * Simpler and faster bash command line completion + * Fix for broken proxies that decompress wrongly + * Fix for cabal unpack to preserve executable permissions + * Adjusted the output for the -v verbosity level in a few places + +0.8.2 Duncan Coutts March 2010 + * Fix for cabal update on Windows + * On windows switch to per-user installs (rather than global) + * Handle intra-package dependencies in dependency planning + * Minor tweaks to cabal init feature + * Fix various -Wall warnings + * Fix for cabal sdist --snapshot + +0.8.0 Duncan Coutts Dec 2009 + * Works with ghc-6.12 + * New "cabal init" command for making initial project .cabal file + * New feature to maintain an index of haddock documentation + +0.6.4 Duncan Coutts Nov 2009 + * Improve the algorithm for selecting the base package version + * Hackage errors now reported by "cabal upload [--check]" + * Improved format of messages from "cabal check" + * Config file can now be selected by an env var + * Updated tar reading/writing code + * Improve instructions in the README and bootstrap output + * Fix bootstrap.sh on Solaris 9 + * Fix bootstrap for systems where network uses parsec 3 + * Fix building with ghc-6.6 + +0.6.2 Duncan Coutts Feb 2009 + * The upgrade command has been disabled in this release + * The configure and install commands now have consistent behaviour + * Reduce the tendancy to re-install already existing packages + * The --constraint= flag now works for the install command + * New --preference= flag for soft constraints / version preferences + * Improved bootstrap.sh script, smarter and better error checking + * New cabal info command to display detailed info on packages + * New cabal unpack command to download and untar a package + * HTTP-4000 package required, should fix bugs with http proxies + * Now works with authenticated proxies. + * On Windows can now override the proxy setting using an env var + * Fix compatibility with config files generated by older versions + * Warn if the hackage package list is very old + * More helpful --help output, mention config file and examples + * Better documentation in ~/.cabal/config file + * Improved command line interface for logging and build reporting + * Minor improvements to some messages + +0.6.0 Duncan Coutts Oct 2008 + * Constraint solver can now cope with base 3 and base 4 + * Allow use of package version preferences from hackage index + * More detailed output from cabal install --dry-run -v + * Improved bootstrap.sh + +0.5.2 Duncan Coutts Aug 2008 + * Suport building haddock documentaion + * Self-reinstall now works on Windows + * Allow adding symlinks to excutables into a separate bindir + * New self-documenting config file + * New install --reinstall flag + * More helpful status messages in a couple places + * Upload failures now report full text error message from the server + * Support for local package repositories + * New build logging and reporting + * New command to upload build reports to (a compatible) server + * Allow tilde in hackage server URIs + * Internal code improvements + * Many other minor improvements and bug fixes + +0.5.1 Duncan Coutts June 2008 + * Restore minimal hugs support in dependency resolver + * Fix for disabled http proxies on Windows + * Revert to global installs on Windows by default + +0.5.0 Duncan Coutts June 2008 + * New package dependency resolver, solving diamond dep problem + * Integrate cabal-setup functionality + * Integrate cabal-upload functionality + * New cabal update and check commands + * Improved behavior for install and upgrade commands + * Full Windows support + * New command line handling + * Bash command line completion + * Allow case insensitive package names on command line + * New --dry-run flag for install, upgrade and fetch commands + * New --root-cmd flag to allow installing as root + * New --cabal-lib-version flag to select different Cabal lib versions + * Support for HTTP proxies + * Improved cabal list output + * Build other non-dependent packages even when some fail + * Report a summary of all build failures at the end + * Partial support for hugs + * Partial implementation of build reporting and logging + * More consistent logging and verbosity + * Significant internal code restructuring + +0.4 Duncan Coutts Oct 2007 + * Renamed executable from 'cabal-install' to 'cabal' + * Partial Windows compatibility + * Do per-user installs by default + * cabal install now installs the package in the current directory + * Allow multiple remote servers + * Use zlib lib and internal tar code and rather than external tar + * Reorganised configuration files + * Significant code restructuring + * Cope with packages with conditional dependencies + +0.3 and older versions by Lemmih, Paolo Martini and others 2006-2007 + * Switch from smart-server, dumb-client model to the reverse + * New .tar.gz based index format + * New remote and local package archive format diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Anonymous.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Anonymous.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Anonymous.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Anonymous.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,317 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Reporting +-- Copyright : (c) David Waern 2008 +-- License : BSD-like +-- +-- Maintainer : david.waern@gmail.com +-- Stability : experimental +-- Portability : portable +-- +-- Anonymous build report data structure, printing and parsing +-- +----------------------------------------------------------------------------- +module Distribution.Client.BuildReports.Anonymous ( + BuildReport(..), + InstallOutcome(..), + Outcome(..), + + -- * Constructing and writing reports + new, + + -- * parsing and pretty printing + parse, + parseList, + show, +-- showList, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude hiding (show) + +import qualified Distribution.Client.Types as BR + ( BuildOutcome, BuildFailure(..), BuildResult(..) + , DocsResult(..), TestsResult(..) ) +import Distribution.Client.Utils + ( mergeBy, MergeResult(..) ) +import qualified Paths_cabal_install (version) + +import Distribution.Package + ( PackageIdentifier(..), mkPackageName ) +import Distribution.PackageDescription + ( FlagName, mkFlagName, unFlagName + , FlagAssignment, mkFlagAssignment, unFlagAssignment ) +import Distribution.Version + ( mkVersion' ) +import Distribution.System + ( OS, Arch ) +import Distribution.Compiler + ( CompilerId(..) ) +import qualified Distribution.Text as Text + ( Text(disp, parse) ) +import Distribution.ParseUtils + ( FieldDescr(..), ParseResult(..), Field(..) + , simpleField, listField, ppFields, readFields + , syntaxError, locatedErrorMsg ) +import Distribution.Simple.Utils + ( comparing ) + +import qualified Distribution.Compat.ReadP as Parse + ( ReadP, pfail, munch1, skipSpaces ) +import qualified Text.PrettyPrint as Disp + ( Doc, render, char, text ) +import Text.PrettyPrint + ( (<+>) ) + +import Data.Char as Char + ( isAlpha, isAlphaNum ) + +data BuildReport + = BuildReport { + -- | The package this build report is about + package :: PackageIdentifier, + + -- | The OS and Arch the package was built on + os :: OS, + arch :: Arch, + + -- | The Haskell compiler (and hopefully version) used + compiler :: CompilerId, + + -- | The uploading client, ie cabal-install-x.y.z + client :: PackageIdentifier, + + -- | Which configurations flags we used + flagAssignment :: FlagAssignment, + + -- | Which dependent packages we were using exactly + dependencies :: [PackageIdentifier], + + -- | Did installing work ok? + installOutcome :: InstallOutcome, + + -- Which version of the Cabal library was used to compile the Setup.hs +-- cabalVersion :: Version, + + -- Which build tools we were using (with versions) +-- tools :: [PackageIdentifier], + + -- | Configure outcome, did configure work ok? + docsOutcome :: Outcome, + + -- | Configure outcome, did configure work ok? + testsOutcome :: Outcome + } + +data InstallOutcome + = PlanningFailed + | DependencyFailed PackageIdentifier + | DownloadFailed + | UnpackFailed + | SetupFailed + | ConfigureFailed + | BuildFailed + | TestsFailed + | InstallFailed + | InstallOk + deriving Eq + +data Outcome = NotTried | Failed | Ok + deriving Eq + +new :: OS -> Arch -> CompilerId -> PackageIdentifier -> FlagAssignment + -> [PackageIdentifier] -> BR.BuildOutcome -> BuildReport +new os' arch' comp pkgid flags deps result = + BuildReport { + package = pkgid, + os = os', + arch = arch', + compiler = comp, + client = cabalInstallID, + flagAssignment = flags, + dependencies = deps, + installOutcome = convertInstallOutcome, +-- cabalVersion = undefined + docsOutcome = convertDocsOutcome, + testsOutcome = convertTestsOutcome + } + where + convertInstallOutcome = case result of + Left BR.PlanningFailed -> PlanningFailed + Left (BR.DependentFailed p) -> DependencyFailed p + Left (BR.DownloadFailed _) -> DownloadFailed + Left (BR.UnpackFailed _) -> UnpackFailed + Left (BR.ConfigureFailed _) -> ConfigureFailed + Left (BR.BuildFailed _) -> BuildFailed + Left (BR.TestsFailed _) -> TestsFailed + Left (BR.InstallFailed _) -> InstallFailed + Right (BR.BuildResult _ _ _) -> InstallOk + convertDocsOutcome = case result of + Left _ -> NotTried + Right (BR.BuildResult BR.DocsNotTried _ _) -> NotTried + Right (BR.BuildResult BR.DocsFailed _ _) -> Failed + Right (BR.BuildResult BR.DocsOk _ _) -> Ok + convertTestsOutcome = case result of + Left (BR.TestsFailed _) -> Failed + Left _ -> NotTried + Right (BR.BuildResult _ BR.TestsNotTried _) -> NotTried + Right (BR.BuildResult _ BR.TestsOk _) -> Ok + +cabalInstallID :: PackageIdentifier +cabalInstallID = + PackageIdentifier (mkPackageName "cabal-install") + (mkVersion' Paths_cabal_install.version) + +-- ------------------------------------------------------------ +-- * External format +-- ------------------------------------------------------------ + +initialBuildReport :: BuildReport +initialBuildReport = BuildReport { + package = requiredField "package", + os = requiredField "os", + arch = requiredField "arch", + compiler = requiredField "compiler", + client = requiredField "client", + flagAssignment = mempty, + dependencies = [], + installOutcome = requiredField "install-outcome", +-- cabalVersion = Nothing, +-- tools = [], + docsOutcome = NotTried, + testsOutcome = NotTried + } + where + requiredField fname = error ("required field: " ++ fname) + +-- ----------------------------------------------------------------------------- +-- Parsing + +parse :: String -> Either String BuildReport +parse s = case parseFields s of + ParseFailed perror -> Left msg where (_, msg) = locatedErrorMsg perror + ParseOk _ report -> Right report + +parseFields :: String -> ParseResult BuildReport +parseFields input = do + fields <- traverse extractField =<< readFields input + let merged = mergeBy (\desc (_,name,_) -> compare (fieldName desc) name) + sortedFieldDescrs + (sortBy (comparing (\(_,name,_) -> name)) fields) + checkMerged initialBuildReport merged + + where + extractField :: Field -> ParseResult (Int, String, String) + extractField (F line name value) = return (line, name, value) + extractField (Section line _ _ _) = syntaxError line "Unrecognized stanza" + extractField (IfBlock line _ _ _) = syntaxError line "Unrecognized stanza" + + checkMerged report [] = return report + checkMerged report (merged:remaining) = case merged of + InBoth fieldDescr (line, _name, value) -> do + report' <- fieldSet fieldDescr line value report + checkMerged report' remaining + OnlyInRight (line, name, _) -> + syntaxError line ("Unrecognized field " ++ name) + OnlyInLeft fieldDescr -> + fail ("Missing field " ++ fieldName fieldDescr) + +parseList :: String -> [BuildReport] +parseList str = + [ report | Right report <- map parse (split str) ] + + where + split :: String -> [String] + split = filter (not . null) . unfoldr chunk . lines + chunk [] = Nothing + chunk ls = case break null ls of + (r, rs) -> Just (unlines r, dropWhile null rs) + +-- ----------------------------------------------------------------------------- +-- Pretty-printing + +show :: BuildReport -> String +show = Disp.render . ppFields fieldDescrs + +-- ----------------------------------------------------------------------------- +-- Description of the fields, for parsing/printing + +fieldDescrs :: [FieldDescr BuildReport] +fieldDescrs = + [ simpleField "package" Text.disp Text.parse + package (\v r -> r { package = v }) + , simpleField "os" Text.disp Text.parse + os (\v r -> r { os = v }) + , simpleField "arch" Text.disp Text.parse + arch (\v r -> r { arch = v }) + , simpleField "compiler" Text.disp Text.parse + compiler (\v r -> r { compiler = v }) + , simpleField "client" Text.disp Text.parse + client (\v r -> r { client = v }) + , listField "flags" dispFlag parseFlag + (unFlagAssignment . flagAssignment) + (\v r -> r { flagAssignment = mkFlagAssignment v }) + , listField "dependencies" Text.disp Text.parse + dependencies (\v r -> r { dependencies = v }) + , simpleField "install-outcome" Text.disp Text.parse + installOutcome (\v r -> r { installOutcome = v }) + , simpleField "docs-outcome" Text.disp Text.parse + docsOutcome (\v r -> r { docsOutcome = v }) + , simpleField "tests-outcome" Text.disp Text.parse + testsOutcome (\v r -> r { testsOutcome = v }) + ] + +sortedFieldDescrs :: [FieldDescr BuildReport] +sortedFieldDescrs = sortBy (comparing fieldName) fieldDescrs + +dispFlag :: (FlagName, Bool) -> Disp.Doc +dispFlag (fname, True) = Disp.text (unFlagName fname) +dispFlag (fname, False) = Disp.char '-' <<>> Disp.text (unFlagName fname) + +parseFlag :: Parse.ReadP r (FlagName, Bool) +parseFlag = do + name <- Parse.munch1 (\c -> Char.isAlphaNum c || c == '_' || c == '-') + case name of + ('-':flag) -> return (mkFlagName flag, False) + flag -> return (mkFlagName flag, True) + +instance Text.Text InstallOutcome where + disp PlanningFailed = Disp.text "PlanningFailed" + disp (DependencyFailed pkgid) = Disp.text "DependencyFailed" <+> Text.disp pkgid + disp DownloadFailed = Disp.text "DownloadFailed" + disp UnpackFailed = Disp.text "UnpackFailed" + disp SetupFailed = Disp.text "SetupFailed" + disp ConfigureFailed = Disp.text "ConfigureFailed" + disp BuildFailed = Disp.text "BuildFailed" + disp TestsFailed = Disp.text "TestsFailed" + disp InstallFailed = Disp.text "InstallFailed" + disp InstallOk = Disp.text "InstallOk" + + parse = do + name <- Parse.munch1 Char.isAlphaNum + case name of + "PlanningFailed" -> return PlanningFailed + "DependencyFailed" -> do Parse.skipSpaces + pkgid <- Text.parse + return (DependencyFailed pkgid) + "DownloadFailed" -> return DownloadFailed + "UnpackFailed" -> return UnpackFailed + "SetupFailed" -> return SetupFailed + "ConfigureFailed" -> return ConfigureFailed + "BuildFailed" -> return BuildFailed + "TestsFailed" -> return TestsFailed + "InstallFailed" -> return InstallFailed + "InstallOk" -> return InstallOk + _ -> Parse.pfail + +instance Text.Text Outcome where + disp NotTried = Disp.text "NotTried" + disp Failed = Disp.text "Failed" + disp Ok = Disp.text "Ok" + parse = do + name <- Parse.munch1 Char.isAlpha + case name of + "NotTried" -> return NotTried + "Failed" -> return Failed + "Ok" -> return Ok + _ -> Parse.pfail diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Storage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Storage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Storage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Storage.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,159 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Reporting +-- Copyright : (c) David Waern 2008 +-- License : BSD-like +-- +-- Maintainer : david.waern@gmail.com +-- Stability : experimental +-- Portability : portable +-- +-- Anonymous build report data structure, printing and parsing +-- +----------------------------------------------------------------------------- +module Distribution.Client.BuildReports.Storage ( + + -- * Storing and retrieving build reports + storeAnonymous, + storeLocal, +-- retrieve, + + -- * 'InstallPlan' support + fromInstallPlan, + fromPlanningFailure, + ) where + +import qualified Distribution.Client.BuildReports.Anonymous as BuildReport +import Distribution.Client.BuildReports.Anonymous (BuildReport) + +import Distribution.Client.Types +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.InstallPlan + ( InstallPlan ) + +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.SourcePackage + +import Distribution.Package + ( PackageId, packageId ) +import Distribution.PackageDescription + ( FlagAssignment ) +import Distribution.Simple.InstallDirs + ( PathTemplate, fromPathTemplate + , initialPathTemplateEnv, substPathTemplate ) +import Distribution.System + ( Platform(Platform) ) +import Distribution.Compiler + ( CompilerId(..), CompilerInfo(..) ) +import Distribution.Simple.Utils + ( comparing, equating ) + +import Data.List + ( groupBy, sortBy ) +import Data.Maybe + ( mapMaybe ) +import System.FilePath + ( (), takeDirectory ) +import System.Directory + ( createDirectoryIfMissing ) + +storeAnonymous :: [(BuildReport, Maybe Repo)] -> IO () +storeAnonymous reports = sequence_ + [ appendFile file (concatMap format reports') + | (repo, reports') <- separate reports + , let file = repoLocalDir repo "build-reports.log" ] + --TODO: make this concurrency safe, either lock the report file or make sure + -- the writes for each report are atomic (under 4k and flush at boundaries) + + where + format r = '\n' : BuildReport.show r ++ "\n" + separate :: [(BuildReport, Maybe Repo)] + -> [(Repo, [BuildReport])] + separate = map (\rs@((_,repo,_):_) -> (repo, [ r | (r,_,_) <- rs ])) + . map concat + . groupBy (equating (repoName . head)) + . sortBy (comparing (repoName . head)) + . groupBy (equating repoName) + . onlyRemote + repoName (_,_,rrepo) = remoteRepoName rrepo + + onlyRemote :: [(BuildReport, Maybe Repo)] + -> [(BuildReport, Repo, RemoteRepo)] + onlyRemote rs = + [ (report, repo, remoteRepo) + | (report, Just repo) <- rs + , Just remoteRepo <- [maybeRepoRemote repo] + ] + +storeLocal :: CompilerInfo -> [PathTemplate] -> [(BuildReport, Maybe Repo)] + -> Platform -> IO () +storeLocal cinfo templates reports platform = sequence_ + [ do createDirectoryIfMissing True (takeDirectory file) + appendFile file output + --TODO: make this concurrency safe, either lock the report file or make + -- sure the writes for each report are atomic + | (file, reports') <- groupByFileName + [ (reportFileName template report, report) + | template <- templates + , (report, _repo) <- reports ] + , let output = concatMap format reports' + ] + where + format r = '\n' : BuildReport.show r ++ "\n" + + reportFileName template report = + fromPathTemplate (substPathTemplate env template) + where env = initialPathTemplateEnv + (BuildReport.package report) + -- TODO: In principle, we can support $pkgkey, but only + -- if the configure step succeeds. So add a Maybe field + -- to the build report, and either use that or make up + -- a fake identifier if it's not available. + (error "storeLocal: package key not available") + cinfo + platform + + groupByFileName = map (\grp@((filename,_):_) -> (filename, map snd grp)) + . groupBy (equating fst) + . sortBy (comparing fst) + +-- ------------------------------------------------------------ +-- * InstallPlan support +-- ------------------------------------------------------------ + +fromInstallPlan :: Platform -> CompilerId + -> InstallPlan + -> BuildOutcomes + -> [(BuildReport, Maybe Repo)] +fromInstallPlan platform comp plan buildOutcomes = + mapMaybe (\pkg -> fromPlanPackage + platform comp pkg + (InstallPlan.lookupBuildOutcome pkg buildOutcomes)) + . InstallPlan.toList + $ plan + +fromPlanPackage :: Platform -> CompilerId + -> InstallPlan.PlanPackage + -> Maybe BuildOutcome + -> Maybe (BuildReport, Maybe Repo) +fromPlanPackage (Platform arch os) comp + (InstallPlan.Configured (ConfiguredPackage _ srcPkg flags _ deps)) + (Just buildResult) = + Just ( BuildReport.new os arch comp + (packageId srcPkg) flags + (map packageId (CD.nonSetupDeps deps)) + buildResult + , extractRepo srcPkg) + where + extractRepo (SourcePackage { packageSource = RepoTarballPackage repo _ _ }) + = Just repo + extractRepo _ = Nothing + +fromPlanPackage _ _ _ _ = Nothing + + +fromPlanningFailure :: Platform -> CompilerId + -> [PackageId] -> FlagAssignment -> [(BuildReport, Maybe Repo)] +fromPlanningFailure (Platform arch os) comp pkgids flags = + [ (BuildReport.new os arch comp pkgid flags [] (Left PlanningFailed), Nothing) + | pkgid <- pkgids ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Types.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,50 @@ +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.BuildReports.Types +-- Copyright : (c) Duncan Coutts 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Types related to build reporting +-- +----------------------------------------------------------------------------- +module Distribution.Client.BuildReports.Types ( + ReportLevel(..), + ) where + +import qualified Distribution.Text as Text + ( Text(..) ) + +import qualified Distribution.Compat.ReadP as Parse + ( pfail, munch1 ) +import qualified Text.PrettyPrint as Disp + ( text ) + +import Data.Char as Char + ( isAlpha, toLower ) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary) + + +data ReportLevel = NoReports | AnonymousReports | DetailedReports + deriving (Eq, Ord, Enum, Show, Generic) + +instance Binary ReportLevel + +instance Text.Text ReportLevel where + disp NoReports = Disp.text "none" + disp AnonymousReports = Disp.text "anonymous" + disp DetailedReports = Disp.text "detailed" + parse = do + name <- Parse.munch1 Char.isAlpha + case lowercase name of + "none" -> return NoReports + "anonymous" -> return AnonymousReports + "detailed" -> return DetailedReports + _ -> Parse.pfail + +lowercase :: String -> String +lowercase = map Char.toLower diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Upload.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Upload.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Upload.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/BuildReports/Upload.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,92 @@ +{-# LANGUAGE CPP, PatternGuards #-} +-- This is a quick hack for uploading build reports to Hackage. + +module Distribution.Client.BuildReports.Upload + ( BuildLog + , BuildReportId + , uploadReports + ) where + +{- +import Network.Browser + ( BrowserAction, request, setAllowRedirects ) +import Network.HTTP + ( Header(..), HeaderName(..) + , Request(..), RequestMethod(..), Response(..) ) +import Network.TCP (HandleStream) +-} +import Network.URI (URI, uriPath) --parseRelativeReference, relativeTo) + +import Control.Monad + ( forM_ ) +import System.FilePath.Posix + ( () ) +import qualified Distribution.Client.BuildReports.Anonymous as BuildReport +import Distribution.Client.BuildReports.Anonymous (BuildReport) +import Distribution.Text (display) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.Utils (die') +import Distribution.Client.HttpUtils +import Distribution.Client.Setup + ( RepoContext(..) ) + +type BuildReportId = URI +type BuildLog = String + +uploadReports :: Verbosity -> RepoContext -> (String, String) -> URI -> [(BuildReport, Maybe BuildLog)] -> IO () +uploadReports verbosity repoCtxt auth uri reports = do + forM_ reports $ \(report, mbBuildLog) -> do + buildId <- postBuildReport verbosity repoCtxt auth uri report + case mbBuildLog of + Just buildLog -> putBuildLog verbosity repoCtxt auth buildId buildLog + Nothing -> return () + +postBuildReport :: Verbosity -> RepoContext -> (String, String) -> URI -> BuildReport -> IO BuildReportId +postBuildReport verbosity repoCtxt auth uri buildReport = do + let fullURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" } + transport <- repoContextGetTransport repoCtxt + res <- postHttp transport verbosity fullURI (BuildReport.show buildReport) (Just auth) + case res of + (303, redir) -> return $ undefined redir --TODO parse redir + _ -> die' verbosity "unrecognized response" -- give response + +{- + setAllowRedirects False + (_, response) <- request Request { + rqURI = uri { uriPath = "/package" display (BuildReport.package buildReport) "reports" }, + rqMethod = POST, + rqHeaders = [Header HdrContentType ("text/plain"), + Header HdrContentLength (show (length body)), + Header HdrAccept ("text/plain")], + rqBody = body + } + case rspCode response of + (3,0,3) | [Just buildId] <- [ do rel <- parseRelativeReference location +#if defined(VERSION_network_uri) + return $ relativeTo rel uri +#elif defined(VERSION_network) +#if MIN_VERSION_network(2,4,0) + return $ relativeTo rel uri +#else + relativeTo rel uri +#endif +#endif + | Header HdrLocation location <- rspHeaders response ] + -> return $ buildId + _ -> error "Unrecognised response from server." + where body = BuildReport.show buildReport +-} + + +-- TODO force this to be a PUT? + +putBuildLog :: Verbosity -> RepoContext -> (String, String) + -> BuildReportId -> BuildLog + -> IO () +putBuildLog verbosity repoCtxt auth reportId buildLog = do + let fullURI = reportId {uriPath = uriPath reportId "log"} + transport <- repoContextGetTransport repoCtxt + res <- postHttp transport verbosity fullURI buildLog (Just auth) + case res of + (200, _) -> return () + _ -> die' verbosity "unrecognized response" -- give response diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Check.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Check.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Check.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Check.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,111 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Check +-- Copyright : (c) Lennart Kolmodin 2008 +-- License : BSD-like +-- +-- Maintainer : kolmodin@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Check a package for common mistakes +-- +----------------------------------------------------------------------------- +module Distribution.Client.Check ( + check + ) where + +import Distribution.Client.Compat.Prelude +import Prelude () + +import Distribution.PackageDescription (GenericPackageDescription) +import Distribution.PackageDescription.Check +import Distribution.PackageDescription.Configuration (flattenPackageDescription) +import Distribution.PackageDescription.Parsec + (parseGenericPackageDescription, runParseResult) +import Distribution.Parsec.Common (PWarning (..), showPError, showPWarning) +import Distribution.Simple.Utils (defaultPackageDesc, die', notice, warn) +import Distribution.Verbosity (Verbosity) + +import qualified Data.ByteString as BS +import qualified System.Directory as Dir + +readGenericPackageDescriptionCheck :: Verbosity -> FilePath -> IO ([PWarning], GenericPackageDescription) +readGenericPackageDescriptionCheck verbosity fpath = do + exists <- Dir.doesFileExist fpath + unless exists $ + die' verbosity $ + "Error Parsing: file \"" ++ fpath ++ "\" doesn't exist. Cannot continue." + bs <- BS.readFile fpath + let (warnings, result) = runParseResult (parseGenericPackageDescription bs) + case result of + Left (_, errors) -> do + traverse_ (warn verbosity . showPError fpath) errors + die' verbosity $ "Failed parsing \"" ++ fpath ++ "\"." + Right x -> return (warnings, x) + +-- | Note: must be called with the CWD set to the directory containing +-- the '.cabal' file. +check :: Verbosity -> IO Bool +check verbosity = do + pdfile <- defaultPackageDesc verbosity + (ws, ppd) <- readGenericPackageDescriptionCheck verbosity pdfile + -- convert parse warnings into PackageChecks + -- Note: we /could/ pick different levels, based on warning type. + let ws' = [ PackageDistSuspicious (showPWarning pdfile w) | w <- ws ] + -- flatten the generic package description into a regular package + -- description + -- TODO: this may give more warnings than it should give; + -- consider two branches of a condition, one saying + -- ghc-options: -Wall + -- and the other + -- ghc-options: -Werror + -- joined into + -- ghc-options: -Wall -Werror + -- checkPackages will yield a warning on the last line, but it + -- would not on each individual branch. + -- Hovever, this is the same way hackage does it, so we will yield + -- the exact same errors as it will. + let pkg_desc = flattenPackageDescription ppd + ioChecks <- checkPackageFiles verbosity pkg_desc "." + let packageChecks = ioChecks ++ checkPackage ppd (Just pkg_desc) ++ ws' + buildImpossible = [ x | x@PackageBuildImpossible {} <- packageChecks ] + buildWarning = [ x | x@PackageBuildWarning {} <- packageChecks ] + distSuspicious = [ x | x@PackageDistSuspicious {} <- packageChecks ] + ++ [ x | x@PackageDistSuspiciousWarn {} <- packageChecks ] + distInexusable = [ x | x@PackageDistInexcusable {} <- packageChecks ] + + unless (null buildImpossible) $ do + warn verbosity "The package will not build sanely due to these errors:" + printCheckMessages buildImpossible + + unless (null buildWarning) $ do + warn verbosity "The following warnings are likely to affect your build negatively:" + printCheckMessages buildWarning + + unless (null distSuspicious) $ do + warn verbosity "These warnings may cause trouble when distributing the package:" + printCheckMessages distSuspicious + + unless (null distInexusable) $ do + warn verbosity "The following errors will cause portability problems on other environments:" + printCheckMessages distInexusable + + let isDistError (PackageDistSuspicious {}) = False + isDistError (PackageDistSuspiciousWarn {}) = False + isDistError _ = True + isCheckError (PackageDistSuspiciousWarn {}) = False + isCheckError _ = True + errors = filter isDistError packageChecks + + unless (null errors) $ + warn verbosity "Hackage would reject this package." + + when (null packageChecks) $ + notice verbosity "No errors or warnings could be found in the package." + + return (not . any isCheckError $ packageChecks) + + where + printCheckMessages = traverse_ (warn verbosity . explanation) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdBench.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdBench.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdBench.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdBench.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,242 @@ +{-# LANGUAGE NamedFieldPuns #-} + +-- | cabal-install CLI command: bench +-- +module Distribution.Client.CmdBench ( + -- * The @bench@ CLI and action + benchCommand, + benchAction, + + -- * Internals exposed for testing + TargetProblem(..), + selectPackageTargets, + selectComponentTarget + ) where + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.CmdErrorMessages + +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import qualified Distribution.Client.Setup as Client +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Simple.Command + ( CommandUI(..), usageAlternatives ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity, normal ) +import Distribution.Simple.Utils + ( wrapText, die' ) + +import Control.Monad (when) + + +benchCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +benchCommand = Client.installCommand { + commandName = "new-bench", + commandSynopsis = "Run benchmarks", + commandUsage = usageAlternatives "new-bench" [ "[TARGETS] [FLAGS]" ], + commandDescription = Just $ \_ -> wrapText $ + "Runs the specified benchmarks, first ensuring they are up to " + ++ "date.\n\n" + + ++ "Any benchmark in any package in the project can be specified. " + ++ "A package can be specified in which case all the benchmarks in the " + ++ "package are run. The default is to run all the benchmarks in the " + ++ "package in the current directory.\n\n" + + ++ "Dependencies are built or rebuilt as necessary. Additional " + ++ "configuration flags can be specified on the command line and these " + ++ "extend the project configuration from the 'cabal.project', " + ++ "'cabal.project.local' and other files.", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " new-bench\n" + ++ " Run all the benchmarks in the package in the current directory\n" + ++ " " ++ pname ++ " new-bench pkgname\n" + ++ " Run all the benchmarks in the package named pkgname\n" + ++ " " ++ pname ++ " new-bench cname\n" + ++ " Run the benchmark named cname\n" + ++ " " ++ pname ++ " new-bench cname -O2\n" + ++ " Run the benchmark built with '-O2' (including local libs used)\n\n" + + ++ cmdCommonHelpTextNewBuildBeta + } + + +-- | The @build@ command does a lot. It brings the install plan up to date, +-- selects that part of the plan needed by the given or implicit targets and +-- then executes the plan. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +benchAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +benchAction (configFlags, configExFlags, installFlags, haddockFlags) + targetStrings globalFlags = do + + baseCtx <- establishProjectBaseContext verbosity cliConfig + + targetSelectors <- either (reportTargetSelectorProblems verbosity) return + =<< readTargetSelectors (localPackages baseCtx) (Just BenchKind) targetStrings + + buildCtx <- + runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do + + when (buildSettingOnlyDeps (buildSettings baseCtx)) $ + die' verbosity $ + "The bench command does not support '--only-dependencies'. " + ++ "You may wish to use 'build --only-dependencies' and then " + ++ "use 'bench'." + + -- Interpret the targets on the command line as bench targets + -- (as opposed to say build or haddock targets). + targets <- either (reportTargetProblems verbosity) return + $ resolveTargets + selectPackageTargets + selectComponentTarget + TargetProblemCommon + elaboratedPlan + Nothing + targetSelectors + + let elaboratedPlan' = pruneInstallPlanToTargets + TargetActionBench + targets + elaboratedPlan + return (elaboratedPlan', targets) + + printPlan verbosity baseCtx buildCtx + + buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx + runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + +-- | This defines what a 'TargetSelector' means for the @bench@ command. +-- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, +-- or otherwise classifies the problem. +-- +-- For the @bench@ command we select all buildable benchmarks, +-- or fail if there are no benchmarks or no buildable benchmarks. +-- +selectPackageTargets :: TargetSelector + -> [AvailableTarget k] -> Either TargetProblem [k] +selectPackageTargets targetSelector targets + + -- If there are any buildable benchmark targets then we select those + | not (null targetsBenchBuildable) + = Right targetsBenchBuildable + + -- If there are benchmarks but none are buildable then we report those + | not (null targetsBench) + = Left (TargetProblemNoneEnabled targetSelector targetsBench) + + -- If there are no benchmarks but some other targets then we report that + | not (null targets) + = Left (TargetProblemNoBenchmarks targetSelector) + + -- If there are no targets at all then we report that + | otherwise + = Left (TargetProblemNoTargets targetSelector) + where + targetsBenchBuildable = selectBuildableTargets + . filterTargetsKind BenchKind + $ targets + + targetsBench = forgetTargetsDetail + . filterTargetsKind BenchKind + $ targets + + +-- | For a 'TargetComponent' 'TargetSelector', check if the component can be +-- selected. +-- +-- For the @bench@ command we just need to check it is a benchmark, in addition +-- to the basic checks on being buildable etc. +-- +selectComponentTarget :: SubComponentTarget + -> AvailableTarget k -> Either TargetProblem k +selectComponentTarget subtarget@WholeComponent t + | CBenchName _ <- availableTargetComponentName t + = either (Left . TargetProblemCommon) return $ + selectComponentTargetBasic subtarget t + | otherwise + = Left (TargetProblemComponentNotBenchmark (availableTargetPackageId t) + (availableTargetComponentName t)) + +selectComponentTarget subtarget t + = Left (TargetProblemIsSubComponent (availableTargetPackageId t) + (availableTargetComponentName t) + subtarget) + +-- | The various error conditions that can occur when matching a +-- 'TargetSelector' against 'AvailableTarget's for the @bench@ command. +-- +data TargetProblem = + TargetProblemCommon TargetProblemCommon + + -- | The 'TargetSelector' matches benchmarks but none are buildable + | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] + + -- | There are no targets at all + | TargetProblemNoTargets TargetSelector + + -- | The 'TargetSelector' matches targets but no benchmarks + | TargetProblemNoBenchmarks TargetSelector + + -- | The 'TargetSelector' refers to a component that is not a benchmark + | TargetProblemComponentNotBenchmark PackageId ComponentName + + -- | Asking to benchmark an individual file or module is not supported + | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget + deriving (Eq, Show) + +reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a +reportTargetProblems verbosity = + die' verbosity . unlines . map renderTargetProblem + +renderTargetProblem :: TargetProblem -> String +renderTargetProblem (TargetProblemCommon problem) = + renderTargetProblemCommon "run" problem + +renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = + renderTargetProblemNoneEnabled "benchmark" targetSelector targets + +renderTargetProblem (TargetProblemNoBenchmarks targetSelector) = + "Cannot run benchmarks for the target '" ++ showTargetSelector targetSelector + ++ "' which refers to " ++ renderTargetSelector targetSelector + ++ " because " + ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" + ++ " not contain any benchmarks." + +renderTargetProblem (TargetProblemNoTargets targetSelector) = + case targetSelectorFilter targetSelector of + Just kind | kind /= BenchKind + -> "The bench command is for running benchmarks, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ "." + + _ -> renderTargetProblemNoTargets "benchmark" targetSelector + +renderTargetProblem (TargetProblemComponentNotBenchmark pkgid cname) = + "The bench command is for running benchmarks, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ " from the package " + ++ display pkgid ++ "." + where + targetSelector = TargetComponent pkgid cname WholeComponent + +renderTargetProblem (TargetProblemIsSubComponent pkgid cname subtarget) = + "The bench command can only run benchmarks as a whole, " + ++ "not files or modules within them, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ "." + where + targetSelector = TargetComponent pkgid cname subtarget diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdBuild.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdBuild.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdBuild.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdBuild.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,226 @@ +-- | cabal-install CLI command: build +-- +module Distribution.Client.CmdBuild ( + -- * The @build@ CLI and action + buildCommand, + buildAction, + + -- * Internals exposed for testing + TargetProblem(..), + selectPackageTargets, + selectComponentTarget + ) where + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.CmdErrorMessages + +import Distribution.Compat.Semigroup ((<>)) +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags + , liftOptions, yesNoOpt ) +import qualified Distribution.Client.Setup as Client +import Distribution.Simple.Setup + ( HaddockFlags, Flag(..), toFlag, fromFlag, fromFlagOrDefault ) +import Distribution.Simple.Command + ( CommandUI(..), usageAlternatives, option ) +import Distribution.Verbosity + ( Verbosity, normal ) +import Distribution.Simple.Utils + ( wrapText, die' ) + +import qualified Data.Map as Map + + +buildCommand :: CommandUI (BuildFlags, (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)) +buildCommand = CommandUI { + commandName = "new-build", + commandSynopsis = "Compile targets within the project.", + commandUsage = usageAlternatives "new-build" [ "[TARGETS] [FLAGS]" ], + commandDescription = Just $ \_ -> wrapText $ + "Build one or more targets from within the project. The available " + ++ "targets are the packages in the project as well as individual " + ++ "components within those packages, including libraries, executables, " + ++ "test-suites or benchmarks. Targets can be specified by name or " + ++ "location. If no target is specified then the default is to build " + ++ "the package in the current directory.\n\n" + + ++ "Dependencies are built or rebuilt as necessary. Additional " + ++ "configuration flags can be specified on the command line and these " + ++ "extend the project configuration from the 'cabal.project', " + ++ "'cabal.project.local' and other files.", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " new-build\n" + ++ " Build the package in the current directory or all packages in the project\n" + ++ " " ++ pname ++ " new-build pkgname\n" + ++ " Build the package named pkgname in the project\n" + ++ " " ++ pname ++ " new-build ./pkgfoo\n" + ++ " Build the package in the ./pkgfoo directory\n" + ++ " " ++ pname ++ " new-build cname\n" + ++ " Build the component named cname in the project\n" + ++ " " ++ pname ++ " new-build cname --enable-profiling\n" + ++ " Build the component in profiling mode (including dependencies as needed)\n\n" + + ++ cmdCommonHelpTextNewBuildBeta, + commandDefaultFlags = + (defaultBuildFlags, commandDefaultFlags Client.installCommand), + commandOptions = \ showOrParseArgs -> + liftOptions snd setSnd + (commandOptions Client.installCommand showOrParseArgs) ++ + liftOptions fst setFst + [ option [] ["only-configure"] + "Instead of performing a full build just run the configure step" + buildOnlyConfigure (\v flags -> flags { buildOnlyConfigure = v }) + (yesNoOpt showOrParseArgs) + ] + } + + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + +data BuildFlags = BuildFlags + { buildOnlyConfigure :: Flag Bool + } + +defaultBuildFlags :: BuildFlags +defaultBuildFlags = BuildFlags + { buildOnlyConfigure = toFlag False + } + +-- | The @build@ command does a lot. It brings the install plan up to date, +-- selects that part of the plan needed by the given or implicit targets and +-- then executes the plan. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +buildAction :: (BuildFlags, (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)) + -> [String] -> GlobalFlags -> IO () +buildAction (buildFlags, + (configFlags, configExFlags, installFlags, haddockFlags)) + targetStrings globalFlags = do + -- TODO: This flags defaults business is ugly + let onlyConfigure = fromFlag (buildOnlyConfigure defaultBuildFlags + <> buildOnlyConfigure buildFlags) + targetAction + | onlyConfigure = TargetActionConfigure + | otherwise = TargetActionBuild + + baseCtx <- establishProjectBaseContext verbosity cliConfig + + targetSelectors <- either (reportTargetSelectorProblems verbosity) return + =<< readTargetSelectors (localPackages baseCtx) Nothing targetStrings + + buildCtx <- + runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do + + -- Interpret the targets on the command line as build targets + -- (as opposed to say repl or haddock targets). + targets <- either (reportTargetProblems verbosity) return + $ resolveTargets + selectPackageTargets + selectComponentTarget + TargetProblemCommon + elaboratedPlan + Nothing + targetSelectors + + let elaboratedPlan' = pruneInstallPlanToTargets + targetAction + targets + elaboratedPlan + elaboratedPlan'' <- + if buildSettingOnlyDeps (buildSettings baseCtx) + then either (reportCannotPruneDependencies verbosity) return $ + pruneInstallPlanToDependencies (Map.keysSet targets) + elaboratedPlan' + else return elaboratedPlan' + + return (elaboratedPlan'', targets) + + printPlan verbosity baseCtx buildCtx + + buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx + runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + +-- | This defines what a 'TargetSelector' means for the @bench@ command. +-- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, +-- or otherwise classifies the problem. +-- +-- For the @build@ command select all components except non-buildable and disabled +-- tests\/benchmarks, fail if there are no such components +-- +selectPackageTargets :: TargetSelector + -> [AvailableTarget k] -> Either TargetProblem [k] +selectPackageTargets targetSelector targets + + -- If there are any buildable targets then we select those + | not (null targetsBuildable) + = Right targetsBuildable + + -- If there are targets but none are buildable then we report those + | not (null targets) + = Left (TargetProblemNoneEnabled targetSelector targets') + + -- If there are no targets at all then we report that + | otherwise + = Left (TargetProblemNoTargets targetSelector) + where + targets' = forgetTargetsDetail targets + targetsBuildable = selectBuildableTargetsWith + (buildable targetSelector) + targets + + -- When there's a target filter like "pkg:tests" then we do select tests, + -- but if it's just a target like "pkg" then we don't build tests unless + -- they are requested by default (i.e. by using --enable-tests) + buildable (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False + buildable (TargetAllPackages Nothing) TargetNotRequestedByDefault = False + buildable _ _ = True + +-- | For a 'TargetComponent' 'TargetSelector', check if the component can be +-- selected. +-- +-- For the @build@ command we just need the basic checks on being buildable etc. +-- +selectComponentTarget :: SubComponentTarget + -> AvailableTarget k -> Either TargetProblem k +selectComponentTarget subtarget = + either (Left . TargetProblemCommon) Right + . selectComponentTargetBasic subtarget + + +-- | The various error conditions that can occur when matching a +-- 'TargetSelector' against 'AvailableTarget's for the @build@ command. +-- +data TargetProblem = + TargetProblemCommon TargetProblemCommon + + -- | The 'TargetSelector' matches targets but none are buildable + | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] + + -- | There are no targets at all + | TargetProblemNoTargets TargetSelector + deriving (Eq, Show) + +reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a +reportTargetProblems verbosity = + die' verbosity . unlines . map renderTargetProblem + +renderTargetProblem :: TargetProblem -> String +renderTargetProblem (TargetProblemCommon problem) = + renderTargetProblemCommon "build" problem +renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = + renderTargetProblemNoneEnabled "build" targetSelector targets +renderTargetProblem(TargetProblemNoTargets targetSelector) = + renderTargetProblemNoTargets "build" targetSelector + +reportCannotPruneDependencies :: Verbosity -> CannotPruneDependencies -> IO a +reportCannotPruneDependencies verbosity = + die' verbosity . renderCannotPruneDependencies diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdClean.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdClean.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdClean.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdClean.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,115 @@ +{-# LANGUAGE RecordWildCards #-} +module Distribution.Client.CmdClean (cleanCommand, cleanAction) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.DistDirLayout + ( DistDirLayout(..), defaultDistDirLayout ) +import Distribution.Client.ProjectConfig + ( findProjectRoot ) +import Distribution.Client.Setup + ( GlobalFlags ) +import Distribution.ReadE ( succeedReadE ) +import Distribution.Simple.Setup + ( Flag(..), toFlag, fromFlagOrDefault, flagToList, flagToMaybe + , optionDistPref, optionVerbosity, falseArg + ) +import Distribution.Simple.Command + ( CommandUI(..), option, reqArg ) +import Distribution.Simple.Utils + ( info, die', wrapText, handleDoesNotExist ) +import Distribution.Verbosity + ( Verbosity, normal ) + +import Control.Monad + ( mapM_ ) +import Control.Exception + ( throwIO ) +import System.Directory + ( removeDirectoryRecursive, removeFile + , doesDirectoryExist, getDirectoryContents ) +import System.FilePath + ( () ) + +data CleanFlags = CleanFlags + { cleanSaveConfig :: Flag Bool + , cleanVerbosity :: Flag Verbosity + , cleanDistDir :: Flag FilePath + , cleanProjectFile :: Flag FilePath + } deriving (Eq) + +defaultCleanFlags :: CleanFlags +defaultCleanFlags = CleanFlags + { cleanSaveConfig = toFlag False + , cleanVerbosity = toFlag normal + , cleanDistDir = NoFlag + , cleanProjectFile = mempty + } + +cleanCommand :: CommandUI CleanFlags +cleanCommand = CommandUI + { commandName = "new-clean" + , commandSynopsis = "Clean the package store and remove temporary files." + , commandUsage = \pname -> + "Usage: " ++ pname ++ " new-clean [FLAGS]\n" + , commandDescription = Just $ \_ -> wrapText $ + "Removes all temporary files created during the building process " + ++ "(.hi, .o, preprocessed sources, etc.) and also empties out the " + ++ "local caches (by default).\n\n" + , commandNotes = Nothing + , commandDefaultFlags = defaultCleanFlags + , commandOptions = \showOrParseArgs -> + [ optionVerbosity + cleanVerbosity (\v flags -> flags { cleanVerbosity = v }) + , optionDistPref + cleanDistDir (\dd flags -> flags { cleanDistDir = dd }) + showOrParseArgs + , option [] ["project-file"] + ("Set the name of the cabal.project file" + ++ " to search for in parent directories") + cleanProjectFile (\pf flags -> flags {cleanProjectFile = pf}) + (reqArg "FILE" (succeedReadE Flag) flagToList) + , option ['s'] ["save-config"] + "Save configuration, only remove build artifacts" + cleanSaveConfig (\sc flags -> flags { cleanSaveConfig = sc }) + falseArg + ] + } + +cleanAction :: CleanFlags -> [String] -> GlobalFlags -> IO () +cleanAction CleanFlags{..} extraArgs _ = do + let verbosity = fromFlagOrDefault normal cleanVerbosity + saveConfig = fromFlagOrDefault False cleanSaveConfig + mdistDirectory = flagToMaybe cleanDistDir + mprojectFile = flagToMaybe cleanProjectFile + + unless (null extraArgs) $ + die' verbosity $ "'clean' doesn't take any extra arguments: " + ++ unwords extraArgs + + projectRoot <- either throwIO return =<< findProjectRoot Nothing mprojectFile + + let distLayout = defaultDistDirLayout projectRoot mdistDirectory + + if saveConfig + then do + let buildRoot = distBuildRootDirectory distLayout + + buildRootExists <- doesDirectoryExist buildRoot + + when buildRootExists $ do + info verbosity ("Deleting build root (" ++ buildRoot ++ ")") + handleDoesNotExist () $ removeDirectoryRecursive buildRoot + else do + let distRoot = distDirectory distLayout + + info verbosity ("Deleting dist-newstyle (" ++ distRoot ++ ")") + handleDoesNotExist () $ removeDirectoryRecursive distRoot + + removeEnvFiles (distProjectRootDirectory distLayout) + +removeEnvFiles :: FilePath -> IO () +removeEnvFiles dir = + (mapM_ (removeFile . (dir )) . filter ((".ghc.environment" ==) . take 16)) + =<< getDirectoryContents dir diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdConfigure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdConfigure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdConfigure.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdConfigure.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,125 @@ +-- | cabal-install CLI command: configure +-- +module Distribution.Client.CmdConfigure ( + configureCommand, + configureAction, + ) where + +import System.Directory +import Control.Monad +import qualified Data.Map as Map + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.ProjectConfig + ( writeProjectLocalExtraConfig ) + +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Verbosity + ( normal ) + +import Distribution.Simple.Command + ( CommandUI(..), usageAlternatives ) +import Distribution.Simple.Utils + ( wrapText, notice ) +import qualified Distribution.Client.Setup as Client + +configureCommand :: CommandUI (ConfigFlags, ConfigExFlags + ,InstallFlags, HaddockFlags) +configureCommand = Client.installCommand { + commandName = "new-configure", + commandSynopsis = "Add extra project configuration", + commandUsage = usageAlternatives "new-configure" [ "[FLAGS]" ], + commandDescription = Just $ \_ -> wrapText $ + "Adjust how the project is built by setting additional package flags " + ++ "and other flags.\n\n" + + ++ "The configuration options are written to the 'cabal.project.local' " + ++ "file (or '$project_file.local', if '--project-file' is specified) " + ++ "which extends the configuration from the 'cabal.project' file " + ++ "(if any). This combination is used as the project configuration for " + ++ "all other commands (such as 'new-build', 'new-repl' etc) though it " + ++ "can be extended/overridden on a per-command basis.\n\n" + + ++ "The new-configure command also checks that the project configuration " + ++ "will work. In particular it checks that there is a consistent set of " + ++ "dependencies for the project as a whole.\n\n" + + ++ "The 'cabal.project.local' file persists across 'new-clean' but is " + ++ "overwritten on the next use of the 'new-configure' command. The " + ++ "intention is that the 'cabal.project' file should be kept in source " + ++ "control but the 'cabal.project.local' should not.\n\n" + + ++ "It is never necessary to use the 'new-configure' command. It is " + ++ "merely a convenience in cases where you do not want to specify flags " + ++ "to 'new-build' (and other commands) every time and yet do not want " + ++ "to alter the 'cabal.project' persistently.", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " new-configure --with-compiler ghc-7.10.3\n" + ++ " Adjust the project configuration to use the given compiler\n" + ++ " program and check the resulting configuration works.\n" + ++ " " ++ pname ++ " new-configure\n" + ++ " Reset the local configuration to empty and check the overall\n" + ++ " project configuration works.\n\n" + + ++ cmdCommonHelpTextNewBuildBeta + } + +-- | To a first approximation, the @configure@ just runs the first phase of +-- the @build@ command where we bring the install plan up to date (thus +-- checking that it's possible). +-- +-- The only difference is that @configure@ also allows the user to specify +-- some extra config flags which we save in the file @cabal.project.local@. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +configureAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +configureAction (configFlags, configExFlags, installFlags, haddockFlags) + _extraArgs globalFlags = do + --TODO: deal with _extraArgs, since flags with wrong syntax end up there + + baseCtx <- establishProjectBaseContext verbosity cliConfig + + -- Write out the @cabal.project.local@ so it gets picked up by the + -- planning phase. If old config exists, then print the contents + -- before overwriting + exists <- doesFileExist "cabal.project.local" + when exists $ do + notice verbosity "'cabal.project.local' file already exists. Now overwriting it." + copyFile "cabal.project.local" "cabal.project.local~" + writeProjectLocalExtraConfig (distDirLayout baseCtx) + cliConfig + + buildCtx <- + runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> + + -- TODO: Select the same subset of targets as 'CmdBuild' would + -- pick (ignoring, for example, executables in libraries + -- we depend on). But we don't want it to fail, so actually we + -- have to do it slightly differently from build. + return (elaboratedPlan, Map.empty) + + let baseCtx' = baseCtx { + buildSettings = (buildSettings baseCtx) { + buildSettingDryRun = True + } + } + + -- TODO: Hmm, but we don't have any targets. Currently this prints + -- what we would build if we were to build everything. Could pick + -- implicit target like "." + -- + -- TODO: should we say what's in the project (+deps) as a whole? + printPlan verbosity baseCtx' buildCtx + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdErrorMessages.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdErrorMessages.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdErrorMessages.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdErrorMessages.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,410 @@ +{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} + +-- | Utilities to help format error messages for the various CLI commands. +-- +module Distribution.Client.CmdErrorMessages ( + module Distribution.Client.CmdErrorMessages, + module Distribution.Client.TargetSelector, + ) where + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.TargetSelector + ( ComponentKindFilter, componentKind, showTargetSelector ) + +import Distribution.Package + ( packageId, PackageName, packageName ) +import Distribution.Types.ComponentName + ( showComponentName ) +import Distribution.Solver.Types.OptionalStanza + ( OptionalStanza(..) ) +import Distribution.Text + ( display ) + +import Data.Maybe (isNothing) +import Data.List (sortBy, groupBy, nub) +import Data.Function (on) + + +----------------------- +-- Singular or plural +-- + +-- | A tag used in rendering messages to distinguish singular or plural. +-- +data Plural = Singular | Plural + +-- | Used to render a singular or plural version of something +-- +-- > plural (listPlural theThings) "it is" "they are" +-- +plural :: Plural -> a -> a -> a +plural Singular si _pl = si +plural Plural _si pl = pl + +-- | Singular for singleton lists and plural otherwise. +-- +listPlural :: [a] -> Plural +listPlural [_] = Singular +listPlural _ = Plural + + +-------------------- +-- Rendering lists +-- + +-- | Render a list of things in the style @foo, bar and baz@ +renderListCommaAnd :: [String] -> String +renderListCommaAnd [] = "" +renderListCommaAnd [x] = x +renderListCommaAnd [x,x'] = x ++ " and " ++ x' +renderListCommaAnd (x:xs) = x ++ ", " ++ renderListCommaAnd xs + +-- | Render a list of things in the style @blah blah; this that; and the other@ +renderListSemiAnd :: [String] -> String +renderListSemiAnd [] = "" +renderListSemiAnd [x] = x +renderListSemiAnd [x,x'] = x ++ "; and " ++ x' +renderListSemiAnd (x:xs) = x ++ "; " ++ renderListSemiAnd xs + +-- | When rendering lists of things it often reads better to group related +-- things, e.g. grouping components by package name +-- +-- > renderListSemiAnd +-- > [ "the package " ++ display pkgname ++ " components " +-- > ++ renderListCommaAnd showComponentName components +-- > | (pkgname, components) <- sortGroupOn packageName allcomponents ] +-- +sortGroupOn :: Ord b => (a -> b) -> [a] -> [(b, [a])] +sortGroupOn key = map (\xs@(x:_) -> (key x, xs)) + . groupBy ((==) `on` key) + . sortBy (compare `on` key) + + +---------------------------------------------------- +-- Renderering for a few project and package types +-- + +renderTargetSelector :: TargetSelector -> String +renderTargetSelector (TargetPackage _ pkgids Nothing) = + "the " ++ plural (listPlural pkgids) "package" "packages" ++ " " + ++ renderListCommaAnd (map display pkgids) + +renderTargetSelector (TargetPackage _ pkgids (Just kfilter)) = + "the " ++ renderComponentKind Plural kfilter + ++ " in the " ++ plural (listPlural pkgids) "package" "packages" ++ " " + ++ renderListCommaAnd (map display pkgids) + +renderTargetSelector (TargetPackageNamed pkgname Nothing) = + "the package " ++ display pkgname + +renderTargetSelector (TargetPackageNamed pkgname (Just kfilter)) = + "the " ++ renderComponentKind Plural kfilter + ++ " in the package " ++ display pkgname + +renderTargetSelector (TargetAllPackages Nothing) = + "all the packages in the project" + +renderTargetSelector (TargetAllPackages (Just kfilter)) = + "all the " ++ renderComponentKind Plural kfilter + ++ " in the project" + +renderTargetSelector (TargetComponent pkgid cname subtarget) = + renderSubComponentTarget subtarget ++ "the " + ++ renderComponentName (packageName pkgid) cname + +renderTargetSelector (TargetComponentUnknown pkgname (Left ucname) subtarget) = + renderSubComponentTarget subtarget ++ "the component " ++ display ucname + ++ " in the package " ++ display pkgname + +renderTargetSelector (TargetComponentUnknown pkgname (Right cname) subtarget) = + renderSubComponentTarget subtarget ++ "the " + ++ renderComponentName pkgname cname + +renderSubComponentTarget :: SubComponentTarget -> String +renderSubComponentTarget WholeComponent = "" +renderSubComponentTarget (FileTarget filename) = + "the file " ++ filename ++ "in " +renderSubComponentTarget (ModuleTarget modname) = + "the module" ++ display modname ++ "in " + + +renderOptionalStanza :: Plural -> OptionalStanza -> String +renderOptionalStanza Singular TestStanzas = "test suite" +renderOptionalStanza Plural TestStanzas = "test suites" +renderOptionalStanza Singular BenchStanzas = "benchmark" +renderOptionalStanza Plural BenchStanzas = "benchmarks" + +-- | The optional stanza type (test suite or benchmark), if it is one. +optionalStanza :: ComponentName -> Maybe OptionalStanza +optionalStanza (CTestName _) = Just TestStanzas +optionalStanza (CBenchName _) = Just BenchStanzas +optionalStanza _ = Nothing + +-- | Does the 'TargetSelector' potentially refer to one package or many? +-- +targetSelectorPluralPkgs :: TargetSelector -> Plural +targetSelectorPluralPkgs (TargetAllPackages _) = Plural +targetSelectorPluralPkgs (TargetPackage _ pids _) = listPlural pids +targetSelectorPluralPkgs (TargetPackageNamed _ _) = Singular +targetSelectorPluralPkgs TargetComponent{} = Singular +targetSelectorPluralPkgs TargetComponentUnknown{} = Singular + +-- | Does the 'TargetSelector' refer to packages or to components? +targetSelectorRefersToPkgs :: TargetSelector -> Bool +targetSelectorRefersToPkgs (TargetAllPackages mkfilter) = isNothing mkfilter +targetSelectorRefersToPkgs (TargetPackage _ _ mkfilter) = isNothing mkfilter +targetSelectorRefersToPkgs (TargetPackageNamed _ mkfilter) = isNothing mkfilter +targetSelectorRefersToPkgs TargetComponent{} = False +targetSelectorRefersToPkgs TargetComponentUnknown{} = False + +targetSelectorFilter :: TargetSelector -> Maybe ComponentKindFilter +targetSelectorFilter (TargetPackage _ _ mkfilter) = mkfilter +targetSelectorFilter (TargetPackageNamed _ mkfilter) = mkfilter +targetSelectorFilter (TargetAllPackages mkfilter) = mkfilter +targetSelectorFilter TargetComponent{} = Nothing +targetSelectorFilter TargetComponentUnknown{} = Nothing + +renderComponentName :: PackageName -> ComponentName -> String +renderComponentName pkgname CLibName = "library " ++ display pkgname +renderComponentName _ (CSubLibName name) = "library " ++ display name +renderComponentName _ (CFLibName name) = "foreign library " ++ display name +renderComponentName _ (CExeName name) = "executable " ++ display name +renderComponentName _ (CTestName name) = "test suite " ++ display name +renderComponentName _ (CBenchName name) = "benchmark " ++ display name + +renderComponentKind :: Plural -> ComponentKind -> String +renderComponentKind Singular ckind = case ckind of + LibKind -> "library" -- internal/sub libs? + FLibKind -> "foreign library" + ExeKind -> "executable" + TestKind -> "test suite" + BenchKind -> "benchmark" +renderComponentKind Plural ckind = case ckind of + LibKind -> "libraries" -- internal/sub libs? + FLibKind -> "foreign libraries" + ExeKind -> "executables" + TestKind -> "test suites" + BenchKind -> "benchmarks" + + +------------------------------------------------------- +-- Renderering error messages for TargetProblemCommon +-- + +renderTargetProblemCommon :: String -> TargetProblemCommon -> String +renderTargetProblemCommon verb (TargetNotInProject pkgname) = + "Cannot " ++ verb ++ " the package " ++ display pkgname ++ ", it is not " + ++ "in this project (either directly or indirectly). If you want to add it " + ++ "to the project then edit the cabal.project file." + +renderTargetProblemCommon verb (TargetAvailableInIndex pkgname) = + "Cannot " ++ verb ++ " the package " ++ display pkgname ++ ", it is not " + ++ "in this project (either directly or indirectly), but it is in the current " + ++ "package index. If you want to add it to the project then edit the " + ++ "cabal.project file." + +renderTargetProblemCommon verb (TargetComponentNotProjectLocal pkgid cname _) = + "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because the " + ++ "package " ++ display pkgid ++ " is not local to the project, and cabal " + ++ "does not currently support building test suites or benchmarks of " + ++ "non-local dependencies. To run test suites or benchmarks from " + ++ "dependencies you can unpack the package locally and adjust the " + ++ "cabal.project file to include that package directory." + +renderTargetProblemCommon verb (TargetComponentNotBuildable pkgid cname _) = + "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because it is " + ++ "marked as 'buildable: False' within the '" ++ display (packageName pkgid) + ++ ".cabal' file (at least for the current configuration). If you believe it " + ++ "should be buildable then check the .cabal file to see if the buildable " + ++ "property is conditional on flags. Alternatively you may simply have to " + ++ "edit the .cabal file to declare it as buildable and fix any resulting " + ++ "build problems." + +renderTargetProblemCommon verb (TargetOptionalStanzaDisabledByUser _ cname _) = + "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because " + ++ "building " ++ compkinds ++ " has been explicitly disabled in the " + ++ "configuration. You can adjust this configuration in the " + ++ "cabal.project{.local} file either for all packages in the project or on " + ++ "a per-package basis. Note that if you do not explicitly disable " + ++ compkinds ++ " then the solver will merely try to make a plan with " + ++ "them available, so you may wish to explicitly enable them which will " + ++ "require the solver to find a plan with them available or to fail with an " + ++ "explanation." + where + compkinds = renderComponentKind Plural (componentKind cname) + +renderTargetProblemCommon verb (TargetOptionalStanzaDisabledBySolver pkgid cname _) = + "Cannot " ++ verb ++ " the " ++ showComponentName cname ++ " because the " + ++ "solver did not find a plan that included the " ++ compkinds + ++ " for " ++ display pkgid ++ ". It is probably worth trying again with " + ++ compkinds ++ " explicitly enabled in the configuration in the " + ++ "cabal.project{.local} file. This will ask the solver to find a plan with " + ++ "the " ++ compkinds ++ " available. It will either fail with an " + ++ "explanation or find a different plan that uses different versions of some " + ++ "other packages. Use the '--dry-run' flag to see package versions and " + ++ "check that you are happy with the choices." + where + compkinds = renderComponentKind Plural (componentKind cname) + +renderTargetProblemCommon verb (TargetProblemUnknownComponent pkgname ecname) = + "Cannot " ++ verb ++ " the " + ++ (case ecname of + Left ucname -> "component " ++ display ucname + Right cname -> renderComponentName pkgname cname) + ++ " from the package " ++ display pkgname + ++ ", because the package does not contain a " + ++ (case ecname of + Left _ -> "component" + Right cname -> renderComponentKind Singular (componentKind cname)) + ++ " with that name." + +renderTargetProblemCommon verb (TargetProblemNoSuchPackage pkgid) = + "Internal error when trying to " ++ verb ++ " the package " + ++ display pkgid ++ ". The package is not in the set of available targets " + ++ "for the project plan, which would suggest an inconsistency " + ++ "between readTargetSelectors and resolveTargets." + +renderTargetProblemCommon verb (TargetProblemNoSuchComponent pkgid cname) = + "Internal error when trying to " ++ verb ++ " the " + ++ showComponentName cname ++ " from the package " ++ display pkgid + ++ ". The package,component pair is not in the set of available targets " + ++ "for the project plan, which would suggest an inconsistency " + ++ "between readTargetSelectors and resolveTargets." + + +------------------------------------------------------------ +-- Renderering error messages for TargetProblemNoneEnabled +-- + +-- | Several commands have a @TargetProblemNoneEnabled@ problem constructor. +-- This renders an error message for those cases. +-- +renderTargetProblemNoneEnabled :: String + -> TargetSelector + -> [AvailableTarget ()] + -> String +renderTargetProblemNoneEnabled verb targetSelector targets = + "Cannot " ++ verb ++ " " ++ renderTargetSelector targetSelector + ++ " because none of the components are available to build: " + ++ renderListSemiAnd + [ case (status, mstanza) of + (TargetDisabledByUser, Just stanza) -> + renderListCommaAnd + [ "the " ++ showComponentName availableTargetComponentName + | AvailableTarget {availableTargetComponentName} <- targets' ] + ++ plural (listPlural targets') " is " " are " + ++ " not available because building " + ++ renderOptionalStanza Plural stanza + ++ " has been disabled in the configuration" + (TargetDisabledBySolver, Just stanza) -> + renderListCommaAnd + [ "the " ++ showComponentName availableTargetComponentName + | AvailableTarget {availableTargetComponentName} <- targets' ] + ++ plural (listPlural targets') " is " " are " + ++ "not available because the solver did not find a plan that " + ++ "included the " ++ renderOptionalStanza Plural stanza + (TargetNotBuildable, _) -> + renderListCommaAnd + [ "the " ++ showComponentName availableTargetComponentName + | AvailableTarget {availableTargetComponentName} <- targets' ] + ++ plural (listPlural targets') " is " " are all " + ++ "marked as 'buildable: False'" + (TargetNotLocal, _) -> + renderListCommaAnd + [ "the " ++ showComponentName availableTargetComponentName + | AvailableTarget {availableTargetComponentName} <- targets' ] + ++ " cannot be built because cabal does not currently support " + ++ "building test suites or benchmarks of non-local dependencies" + (TargetBuildable () TargetNotRequestedByDefault, Just stanza) -> + renderListCommaAnd + [ "the " ++ showComponentName availableTargetComponentName + | AvailableTarget {availableTargetComponentName} <- targets' ] + ++ " will not be built because " ++ renderOptionalStanza Plural stanza + ++ " are not built by default in the current configuration (but you " + ++ "can still build them specifically)" --TODO: say how + _ -> error $ "renderBuildTargetProblem: unexpected status " + ++ show (status, mstanza) + | ((status, mstanza), targets') <- sortGroupOn groupingKey targets + ] + where + groupingKey t = + ( availableTargetStatus t + , case availableTargetStatus t of + TargetNotBuildable -> Nothing + TargetNotLocal -> Nothing + _ -> optionalStanza (availableTargetComponentName t) + ) + +------------------------------------------------------------ +-- Renderering error messages for TargetProblemNoneEnabled +-- + +-- | Several commands have a @TargetProblemNoTargets@ problem constructor. +-- This renders an error message for those cases. +-- +renderTargetProblemNoTargets :: String -> TargetSelector -> String +renderTargetProblemNoTargets verb targetSelector = + "Cannot " ++ verb ++ " " ++ renderTargetSelector targetSelector + ++ " because " ++ reason targetSelector ++ ". " + ++ "Check the .cabal " + ++ plural (targetSelectorPluralPkgs targetSelector) + "file for the package and make sure that it properly declares " + "files for the packages and make sure that they properly declare " + ++ "the components that you expect." + where + reason (TargetPackage _ _ Nothing) = + "it does not contain any components at all" + reason (TargetPackage _ _ (Just kfilter)) = + "it does not contain any " ++ renderComponentKind Plural kfilter + reason (TargetPackageNamed _ Nothing) = + "it does not contain any components at all" + reason (TargetPackageNamed _ (Just kfilter)) = + "it does not contain any " ++ renderComponentKind Plural kfilter + reason (TargetAllPackages Nothing) = + "none of them contain any components at all" + reason (TargetAllPackages (Just kfilter)) = + "none of the packages contain any " + ++ renderComponentKind Plural kfilter + reason ts@TargetComponent{} = + error $ "renderTargetProblemNoTargets: " ++ show ts + reason ts@TargetComponentUnknown{} = + error $ "renderTargetProblemNoTargets: " ++ show ts + +----------------------------------------------------------- +-- Renderering error messages for CannotPruneDependencies +-- + +renderCannotPruneDependencies :: CannotPruneDependencies -> String +renderCannotPruneDependencies (CannotPruneDependencies brokenPackages) = + "Cannot select only the dependencies (as requested by the " + ++ "'--only-dependencies' flag), " + ++ (case pkgids of + [pkgid] -> "the package " ++ display pkgid ++ " is " + _ -> "the packages " + ++ renderListCommaAnd (map display pkgids) ++ " are ") + ++ "required by a dependency of one of the other targets." + where + -- throw away the details and just list the deps that are needed + pkgids :: [PackageId] + pkgids = nub . map packageId . concatMap snd $ brokenPackages + +{- + ++ "Syntax:\n" + ++ " - build [package]\n" + ++ " - build [package:]component\n" + ++ " - build [package:][component:]module\n" + ++ " - build [package:][component:]file\n" + ++ " where\n" + ++ " package is a package name, package dir or .cabal file\n\n" + ++ "Examples:\n" + ++ " - build foo -- package name\n" + ++ " - build tests -- component name\n" + ++ " (name of library, executable, test-suite or benchmark)\n" + ++ " - build Data.Foo -- module name\n" + ++ " - build Data/Foo.hsc -- file name\n\n" + ++ "An ambigious target can be qualified by package, component\n" + ++ "and/or component kind (lib|exe|test|bench|flib)\n" + ++ " - build foo:tests -- component qualified by package\n" + ++ " - build tests:Data.Foo -- module qualified by component\n" + ++ " - build lib:foo -- component qualified by kind" +-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdExec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdExec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdExec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdExec.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,258 @@ +------------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Exec +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Implementation of the 'new-exec' command for running an arbitrary executable +-- in an environment suited to the part of the store built for a project. +------------------------------------------------------------------------------- + +{-# LANGUAGE RecordWildCards #-} +module Distribution.Client.CmdExec + ( execAction + , execCommand + ) where + +import Distribution.Client.DistDirLayout + ( DistDirLayout(..) + ) +import Distribution.Client.InstallPlan + ( GenericPlanPackage(..) + , toGraph + ) +import Distribution.Client.Setup + ( ConfigExFlags + , ConfigFlags(configVerbosity) + , GlobalFlags + , InstallFlags + ) +import qualified Distribution.Client.Setup as Client +import Distribution.Client.ProjectOrchestration + ( ProjectBuildContext(..) + , runProjectPreBuildPhase + , establishProjectBaseContext + , distDirLayout + , commandLineFlagsToProjectConfig + , ProjectBaseContext(..) + ) +import Distribution.Client.ProjectPlanOutput + ( updatePostBuildProjectStatus + , createPackageEnvironment + , argsEquivalentOfGhcEnvironmentFile + , PostBuildProjectStatus + ) +import qualified Distribution.Client.ProjectPlanning as Planning +import Distribution.Client.ProjectPlanning + ( ElaboratedInstallPlan + , ElaboratedSharedConfig(..) + ) +import Distribution.Simple.Command + ( CommandUI(..) + ) +import Distribution.Simple.Program.Db + ( modifyProgramSearchPath + , requireProgram + , configuredPrograms + ) +import Distribution.Simple.Program.Find + ( ProgramSearchPathEntry(..) + ) +import Distribution.Simple.Program.Run + ( programInvocation + , runProgramInvocation + ) +import Distribution.Simple.Program.Types + ( programOverrideEnv + , programDefaultArgs + , programPath + , simpleProgram + , ConfiguredProgram + ) +import Distribution.Simple.GHC + ( getImplInfo + , GhcImplInfo(supportsPkgEnvFiles) ) +import Distribution.Simple.Setup + ( HaddockFlags + , fromFlagOrDefault + ) +import Distribution.Simple.Utils + ( die' + , info + , withTempDirectory + , wrapText + ) +import Distribution.Verbosity + ( Verbosity + , normal + ) + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Data.Set (Set) +import qualified Data.Set as S +import qualified Data.Map as M + +execCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +execCommand = CommandUI + { commandName = "new-exec" + , commandSynopsis = "Give a command access to the store." + , commandUsage = \pname -> + "Usage: " ++ pname ++ " new-exec [FLAGS] [--] COMMAND [--] [ARGS]\n" + , commandDescription = Just $ \pname -> wrapText $ + "During development it is often useful to run build tasks and perform" + ++ " one-off program executions to experiment with the behavior of build" + ++ " tools. It is convenient to run these tools in the same way " ++ pname + ++ " itself would. The `" ++ pname ++ " new-exec` command provides a way to" + ++ " do so.\n" + ++ "\n" + ++ "Compiler tools will be configured to see the same subset of the store" + ++ " that builds would see. The PATH is modified to make all executables in" + ++ " the dependency tree available (provided they have been built already)." + ++ " Commands are also rewritten in the way cabal itself would. For" + ++ " example, `" ++ pname ++ " new-exec ghc` will consult the configuration" + ++ " to choose an appropriate version of ghc and to include any" + ++ " ghc-specific flags requested." + , commandNotes = Nothing + , commandOptions = commandOptions Client.installCommand + , commandDefaultFlags = commandDefaultFlags Client.installCommand + } + +execAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +execAction (configFlags, configExFlags, installFlags, haddockFlags) + extraArgs globalFlags = do + + baseCtx <- establishProjectBaseContext verbosity cliConfig + + -- To set up the environment, we'd like to select the libraries in our + -- dependency tree that we've already built. So first we set up an install + -- plan, but we walk the dependency tree without first executing the plan. + buildCtx <- runProjectPreBuildPhase + verbosity + baseCtx + (\plan -> return (plan, M.empty)) + + -- We use the build status below to decide what libraries to include in the + -- compiler environment, but we don't want to actually build anything. So we + -- pass mempty to indicate that nothing happened and we just want the current + -- status. + buildStatus <- updatePostBuildProjectStatus + verbosity + (distDirLayout baseCtx) + (elaboratedPlanOriginal buildCtx) + (pkgsBuildStatus buildCtx) + mempty + + -- Some dependencies may have executables. Let's put those on the PATH. + extraPaths <- pathAdditions verbosity baseCtx buildCtx + let programDb = modifyProgramSearchPath + (map ProgramSearchPathDir extraPaths ++) + . pkgConfigCompilerProgs + . elaboratedShared + $ buildCtx + + -- Now that we have the packages, set up the environment. We accomplish this + -- by creating an environment file that selects the databases and packages we + -- computed in the previous step, and setting an environment variable to + -- point at the file. + -- In case ghc is too old to support environment files, + -- we pass the same info as arguments + let compiler = pkgConfigCompiler $ elaboratedShared buildCtx + envFilesSupported = supportsPkgEnvFiles (getImplInfo compiler) + case extraArgs of + [] -> die' verbosity "Please specify an executable to run" + exe:args -> do + (program, _) <- requireProgram verbosity (simpleProgram exe) programDb + let argOverrides = + argsEquivalentOfGhcEnvironmentFile + compiler + (distDirLayout baseCtx) + (elaboratedPlanOriginal buildCtx) + buildStatus + programIsConfiguredCompiler = matchCompilerPath + (elaboratedShared buildCtx) + program + argOverrides' = + if envFilesSupported + || not programIsConfiguredCompiler + then [] + else argOverrides + + (if envFilesSupported + then withTempEnvFile verbosity baseCtx buildCtx buildStatus + else \f -> f []) $ \envOverrides -> do + let program' = withOverrides + envOverrides + argOverrides' + program + invocation = programInvocation program' args + runProgramInvocation verbosity invocation + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + withOverrides env args program = program + { programOverrideEnv = programOverrideEnv program ++ env + , programDefaultArgs = programDefaultArgs program ++ args} + +matchCompilerPath :: ElaboratedSharedConfig -> ConfiguredProgram -> Bool +matchCompilerPath elaboratedShared program = + programPath program + `elem` + (programPath <$> configuredCompilers) + where + configuredCompilers = configuredPrograms $ pkgConfigCompilerProgs elaboratedShared + +-- | Execute an action with a temporary .ghc.environment file reflecting the +-- current environment. The action takes an environment containing the env +-- variable which points ghc to the file. +withTempEnvFile :: Verbosity + -> ProjectBaseContext + -> ProjectBuildContext + -> PostBuildProjectStatus + -> ([(String, Maybe String)] -> IO a) + -> IO a +withTempEnvFile verbosity + baseCtx + buildCtx + buildStatus + action = + withTempDirectory + verbosity + (distTempDirectory (distDirLayout baseCtx)) + "environment." + (\tmpDir -> do + envOverrides <- createPackageEnvironment + verbosity + tmpDir + (elaboratedPlanToExecute buildCtx) + (elaboratedShared buildCtx) + buildStatus + action envOverrides) + +pathAdditions :: Verbosity -> ProjectBaseContext -> ProjectBuildContext -> IO [FilePath] +pathAdditions verbosity ProjectBaseContext{..}ProjectBuildContext{..} = do + info verbosity . unlines $ "Including the following directories in PATH:" + : paths + return paths + where + paths = S.toList + $ binDirectories distDirLayout elaboratedShared elaboratedPlanToExecute + +binDirectories + :: DistDirLayout + -> ElaboratedSharedConfig + -> ElaboratedInstallPlan + -> Set FilePath +binDirectories layout config = fromElaboratedInstallPlan where + fromElaboratedInstallPlan = fromGraph . toGraph + fromGraph = foldMap fromPlan + fromSrcPkg = S.fromList . Planning.binDirectories layout config + + fromPlan (PreExisting _) = mempty + fromPlan (Configured pkg) = fromSrcPkg pkg + fromPlan (Installed pkg) = fromSrcPkg pkg + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdFreeze.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdFreeze.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdFreeze.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdFreeze.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,230 @@ +{-# LANGUAGE NamedFieldPuns, RecordWildCards #-} + +-- | cabal-install CLI command: freeze +-- +module Distribution.Client.CmdFreeze ( + freezeCommand, + freezeAction, + ) where + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.ProjectPlanning +import Distribution.Client.ProjectConfig + ( ProjectConfig(..), ProjectConfigShared(..) + , writeProjectLocalFreezeConfig ) +import Distribution.Client.Targets + ( UserQualifier(..), UserConstraintScope(..), UserConstraint(..) ) +import Distribution.Solver.Types.PackageConstraint + ( PackageProperty(..) ) +import Distribution.Solver.Types.ConstraintSource + ( ConstraintSource(..) ) +import Distribution.Client.DistDirLayout + ( DistDirLayout(distProjectFile) ) +import qualified Distribution.Client.InstallPlan as InstallPlan + + +import Distribution.Package + ( PackageName, packageName, packageVersion ) +import Distribution.Version + ( VersionRange, thisVersion + , unionVersionRanges, simplifyVersionRange ) +import Distribution.PackageDescription + ( FlagAssignment, nullFlagAssignment ) +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Simple.Utils + ( die', notice, wrapText ) +import Distribution.Verbosity + ( normal ) + +import Data.Monoid as Monoid +import qualified Data.Map as Map +import Data.Map (Map) +import Control.Monad (unless) + +import Distribution.Simple.Command + ( CommandUI(..), usageAlternatives ) +import qualified Distribution.Client.Setup as Client + + +freezeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +freezeCommand = Client.installCommand { + commandName = "new-freeze", + commandSynopsis = "Freeze dependencies.", + commandUsage = usageAlternatives "new-freeze" [ "[FLAGS]" ], + commandDescription = Just $ \_ -> wrapText $ + "The project configuration is frozen so that it will be reproducible " + ++ "in future.\n\n" + + ++ "The precise dependency configuration for the project is written to " + ++ "the 'cabal.project.freeze' file (or '$project_file.freeze' if " + ++ "'--project-file' is specified). This file extends the configuration " + ++ "from the 'cabal.project' file and thus is used as the project " + ++ "configuration for all other commands (such as 'new-build', " + ++ "'new-repl' etc).\n\n" + + ++ "The freeze file can be kept in source control. To make small " + ++ "adjustments it may be edited manually, or to make bigger changes " + ++ "you may wish to delete the file and re-freeze. For more control, " + ++ "one approach is to try variations using 'new-build --dry-run' with " + ++ "solver flags such as '--constraint=\"pkg < 1.2\"' and once you have " + ++ "a satisfactory solution to freeze it using the 'new-freeze' command " + ++ "with the same set of flags.", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " new-freeze\n" + ++ " Freeze the configuration of the current project\n\n" + ++ " " ++ pname ++ " new-build --dry-run --constraint=\"aeson < 1\"\n" + ++ " Check what a solution with the given constraints would look like\n" + ++ " " ++ pname ++ " new-freeze --constraint=\"aeson < 1\"\n" + ++ " Freeze a solution using the given constraints\n\n" + + ++ "Note: this command is part of the new project-based system (aka " + ++ "nix-style\nlocal builds). These features are currently in beta. " + ++ "Please see\n" + ++ "http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html " + ++ "for\ndetails and advice on what you can expect to work. If you " + ++ "encounter problems\nplease file issues at " + ++ "https://github.com/haskell/cabal/issues and if you\nhave any time " + ++ "to get involved and help with testing, fixing bugs etc then\nthat " + ++ "is very much appreciated.\n" + } + +-- | To a first approximation, the @freeze@ command runs the first phase of +-- the @build@ command where we bring the install plan up to date, and then +-- based on the install plan we write out a @cabal.project.freeze@ config file. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +freezeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +freezeAction (configFlags, configExFlags, installFlags, haddockFlags) + extraArgs globalFlags = do + + unless (null extraArgs) $ + die' verbosity $ "'freeze' doesn't take any extra arguments: " + ++ unwords extraArgs + + ProjectBaseContext { + distDirLayout, + cabalDirLayout, + projectConfig, + localPackages + } <- establishProjectBaseContext verbosity cliConfig + + (_, elaboratedPlan, _) <- + rebuildInstallPlan verbosity + distDirLayout cabalDirLayout + projectConfig + localPackages + + let freezeConfig = projectFreezeConfig elaboratedPlan + writeProjectLocalFreezeConfig distDirLayout freezeConfig + notice verbosity $ + "Wrote freeze file: " ++ distProjectFile distDirLayout "freeze" + + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + + + +-- | Given the install plan, produce a config value with constraints that +-- freezes the versions of packages used in the plan. +-- +projectFreezeConfig :: ElaboratedInstallPlan -> ProjectConfig +projectFreezeConfig elaboratedPlan = + Monoid.mempty { + projectConfigShared = Monoid.mempty { + projectConfigConstraints = + concat (Map.elems (projectFreezeConstraints elaboratedPlan)) + } + } + +-- | Given the install plan, produce solver constraints that will ensure the +-- solver picks the same solution again in future in different environments. +-- +projectFreezeConstraints :: ElaboratedInstallPlan + -> Map PackageName [(UserConstraint, ConstraintSource)] +projectFreezeConstraints plan = + -- + -- TODO: [required eventually] this is currently an underapproximation + -- since the constraints language is not expressive enough to specify the + -- precise solution. See https://github.com/haskell/cabal/issues/3502. + -- + -- For the moment we deal with multiple versions in the solution by using + -- constraints that allow either version. Also, we do not include any + -- /version/ constraints for packages that are local to the project (e.g. + -- if the solution has two instances of Cabal, one from the local project + -- and one pulled in as a setup deps then we exclude all constraints on + -- Cabal, not just the constraint for the local instance since any + -- constraint would apply to both instances). We do however keep flag + -- constraints of local packages. + -- + deleteLocalPackagesVersionConstraints + (Map.unionWith (++) versionConstraints flagConstraints) + where + versionConstraints :: Map PackageName [(UserConstraint, ConstraintSource)] + versionConstraints = + Map.mapWithKey + (\p v -> [(UserConstraint (UserAnyQualifier p) (PackagePropertyVersion v), + ConstraintSourceFreeze)]) + versionRanges + + versionRanges :: Map PackageName VersionRange + versionRanges = + Map.map simplifyVersionRange $ + Map.fromListWith unionVersionRanges $ + [ (packageName pkg, thisVersion (packageVersion pkg)) + | InstallPlan.PreExisting pkg <- InstallPlan.toList plan + ] + ++ [ (packageName pkg, thisVersion (packageVersion pkg)) + | InstallPlan.Configured pkg <- InstallPlan.toList plan + ] + + flagConstraints :: Map PackageName [(UserConstraint, ConstraintSource)] + flagConstraints = + Map.mapWithKey + (\p f -> [(UserConstraint (UserQualified UserQualToplevel p) (PackagePropertyFlags f), + ConstraintSourceFreeze)]) + flagAssignments + + flagAssignments :: Map PackageName FlagAssignment + flagAssignments = + Map.fromList + [ (pkgname, flags) + | InstallPlan.Configured elab <- InstallPlan.toList plan + , let flags = elabFlagAssignment elab + pkgname = packageName elab + , not (nullFlagAssignment flags) ] + + -- As described above, remove the version constraints on local packages, + -- but leave any flag constraints. + deleteLocalPackagesVersionConstraints + :: Map PackageName [(UserConstraint, ConstraintSource)] + -> Map PackageName [(UserConstraint, ConstraintSource)] + deleteLocalPackagesVersionConstraints = + Map.mergeWithKey + (\_pkgname () constraints -> + case filter (not . isVersionConstraint . fst) constraints of + [] -> Nothing + constraints' -> Just constraints') + (const Map.empty) id + localPackages + + isVersionConstraint (UserConstraint _ (PackagePropertyVersion _)) = True + isVersionConstraint _ = False + + localPackages :: Map PackageName () + localPackages = + Map.fromList + [ (packageName elab, ()) + | InstallPlan.Configured elab <- InstallPlan.toList plan + , elabLocalToProject elab + ] + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdHaddock.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdHaddock.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdHaddock.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdHaddock.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,206 @@ +{-# LANGUAGE NamedFieldPuns #-} + +-- | cabal-install CLI command: haddock +-- +module Distribution.Client.CmdHaddock ( + -- * The @haddock@ CLI and action + haddockCommand, + haddockAction, + + -- * Internals exposed for testing + TargetProblem(..), + selectPackageTargets, + selectComponentTarget + ) where + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.CmdErrorMessages + +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import qualified Distribution.Client.Setup as Client +import Distribution.Simple.Setup + ( HaddockFlags(..), fromFlagOrDefault ) +import Distribution.Simple.Command + ( CommandUI(..), usageAlternatives ) +import Distribution.Verbosity + ( Verbosity, normal ) +import Distribution.Simple.Utils + ( wrapText, die' ) + +import Control.Monad (when) + + +haddockCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags + ,HaddockFlags) +haddockCommand = Client.installCommand { + commandName = "new-haddock", + commandSynopsis = "Build Haddock documentation", + commandUsage = usageAlternatives "new-haddock" [ "[FLAGS] TARGET" ], + commandDescription = Just $ \_ -> wrapText $ + "Build Haddock documentation for the specified packages within the " + ++ "project.\n\n" + + ++ "Any package in the project can be specified. If no package is " + ++ "specified, the default is to build the documentation for the package " + ++ "in the current directory. The default behaviour is to build " + ++ "documentation for the exposed modules of the library component (if " + ++ "any). This can be changed with the '--internal', '--executables', " + ++ "'--tests', '--benchmarks' or '--all' flags.\n\n" + + ++ "Currently, documentation for dependencies is NOT built. This " + ++ "behavior may change in future.\n\n" + + ++ "Additional configuration flags can be specified on the command line " + ++ "and these extend the project configuration from the 'cabal.project', " + ++ "'cabal.project.local' and other files.", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " new-haddock pkgname" + ++ " Build documentation for the package named pkgname\n\n" + + ++ cmdCommonHelpTextNewBuildBeta + } + --TODO: [nice to have] support haddock on specific components, not just + -- whole packages and the silly --executables etc modifiers. + +-- | The @haddock@ command is TODO. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +haddockAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +haddockAction (configFlags, configExFlags, installFlags, haddockFlags) + targetStrings globalFlags = do + + baseCtx <- establishProjectBaseContext verbosity cliConfig + + targetSelectors <- either (reportTargetSelectorProblems verbosity) return + =<< readTargetSelectors (localPackages baseCtx) Nothing targetStrings + + buildCtx <- + runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do + + when (buildSettingOnlyDeps (buildSettings baseCtx)) $ + die' verbosity + "The haddock command does not support '--only-dependencies'." + + -- When we interpret the targets on the command line, interpret them as + -- haddock targets + targets <- either (reportTargetProblems verbosity) return + $ resolveTargets + (selectPackageTargets haddockFlags) + selectComponentTarget + TargetProblemCommon + elaboratedPlan + Nothing + targetSelectors + + let elaboratedPlan' = pruneInstallPlanToTargets + TargetActionHaddock + targets + elaboratedPlan + return (elaboratedPlan', targets) + + printPlan verbosity baseCtx buildCtx + + buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx + runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + +-- | This defines what a 'TargetSelector' means for the @haddock@ command. +-- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, +-- or otherwise classifies the problem. +-- +-- For the @haddock@ command we select all buildable libraries. Additionally, +-- depending on the @--executables@ flag we also select all the buildable exes. +-- We do similarly for test-suites, benchmarks and foreign libs. +-- +selectPackageTargets :: HaddockFlags -> TargetSelector + -> [AvailableTarget k] -> Either TargetProblem [k] +selectPackageTargets haddockFlags targetSelector targets + + -- If there are any buildable targets then we select those + | not (null targetsBuildable) + = Right targetsBuildable + + -- If there are targets but none are buildable then we report those + | not (null targets) + = Left (TargetProblemNoneEnabled targetSelector targets') + + -- If there are no targets at all then we report that + | otherwise + = Left (TargetProblemNoTargets targetSelector) + where + targets' = forgetTargetsDetail (map disableNotRequested targets) + targetsBuildable = selectBuildableTargets (map disableNotRequested targets) + + -- When there's a target filter like "pkg:exes" then we do select exes, + -- but if it's just a target like "pkg" then we don't build docs for exes + -- unless they are requested by default (i.e. by using --executables) + disableNotRequested t@(AvailableTarget _ cname (TargetBuildable _ _) _) + | not (isRequested targetSelector (componentKind cname)) + = t { availableTargetStatus = TargetDisabledByUser } + disableNotRequested t = t + + isRequested (TargetPackage _ _ (Just _)) _ = True + isRequested (TargetAllPackages (Just _)) _ = True + isRequested _ LibKind = True +-- isRequested _ SubLibKind = True --TODO: what about sublibs? + + -- TODO/HACK, we encode some defaults here as new-haddock's logic; + -- make sure this matches the defaults applied in + -- "Distribution.Client.ProjectPlanning"; this may need more work + -- to be done properly + -- + -- See also https://github.com/haskell/cabal/pull/4886 + isRequested _ FLibKind = fromFlagOrDefault False (haddockForeignLibs haddockFlags) + isRequested _ ExeKind = fromFlagOrDefault False (haddockExecutables haddockFlags) + isRequested _ TestKind = fromFlagOrDefault False (haddockTestSuites haddockFlags) + isRequested _ BenchKind = fromFlagOrDefault False (haddockBenchmarks haddockFlags) + + +-- | For a 'TargetComponent' 'TargetSelector', check if the component can be +-- selected. +-- +-- For the @haddock@ command we just need the basic checks on being buildable +-- etc. +-- +selectComponentTarget :: SubComponentTarget + -> AvailableTarget k -> Either TargetProblem k +selectComponentTarget subtarget = + either (Left . TargetProblemCommon) Right + . selectComponentTargetBasic subtarget + + +-- | The various error conditions that can occur when matching a +-- 'TargetSelector' against 'AvailableTarget's for the @haddock@ command. +-- +data TargetProblem = + TargetProblemCommon TargetProblemCommon + + -- | The 'TargetSelector' matches targets but none are buildable + | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] + + -- | There are no targets at all + | TargetProblemNoTargets TargetSelector + deriving (Eq, Show) + +reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a +reportTargetProblems verbosity = + die' verbosity . unlines . map renderTargetProblem + +renderTargetProblem :: TargetProblem -> String +renderTargetProblem (TargetProblemCommon problem) = + renderTargetProblemCommon "build documentation for" problem + +renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = + renderTargetProblemNoneEnabled "build documentation for" targetSelector targets + +renderTargetProblem(TargetProblemNoTargets targetSelector) = + renderTargetProblemNoTargets "build documentation for" targetSelector diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdInstall.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdInstall.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdInstall.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdInstall.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,867 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE ViewPatterns #-} + +-- | cabal-install CLI command: build +-- +module Distribution.Client.CmdInstall ( + -- * The @build@ CLI and action + installCommand, + installAction, + + -- * Internals exposed for testing + TargetProblem(..), + selectPackageTargets, + selectComponentTarget, + establishDummyProjectBaseContext + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.CmdErrorMessages +import Distribution.Client.CmdSdist + +import Distribution.Client.Setup + ( GlobalFlags(..), ConfigFlags(..), ConfigExFlags, InstallFlags(..) + , configureExOptions, installOptions, liftOptions ) +import Distribution.Solver.Types.ConstraintSource + ( ConstraintSource(..) ) +import Distribution.Client.Types + ( PackageSpecifier(..), PackageLocation(..), UnresolvedSourcePackage + , SourcePackageDb(..) ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Package + ( Package(..), PackageName, mkPackageName, unPackageName ) +import Distribution.Types.PackageId + ( PackageIdentifier(..) ) +import Distribution.Client.ProjectConfig.Types + ( ProjectConfig(..), ProjectConfigShared(..) + , ProjectConfigBuildOnly(..), PackageConfig(..) + , getMapLast, getMapMappend, projectConfigLogsDir + , projectConfigStoreDir, projectConfigBuildOnly + , projectConfigDistDir, projectConfigConfigFile ) +import Distribution.Simple.Program.Db + ( userSpecifyPaths, userSpecifyArgss, defaultProgramDb + , modifyProgramSearchPath ) +import Distribution.Simple.Program.Find + ( ProgramSearchPathEntry(..) ) +import Distribution.Client.Config + ( getCabalDir ) +import qualified Distribution.Simple.PackageIndex as PI +import Distribution.Solver.Types.PackageIndex + ( lookupPackageName, searchByName ) +import Distribution.Types.InstalledPackageInfo + ( InstalledPackageInfo(..) ) +import Distribution.Types.Version + ( nullVersion ) +import Distribution.Types.VersionRange + ( thisVersion ) +import Distribution.Solver.Types.PackageConstraint + ( PackageProperty(..) ) +import Distribution.Client.IndexUtils + ( getSourcePackages, getInstalledPackages ) +import Distribution.Client.ProjectConfig + ( readGlobalConfig, projectConfigWithBuilderRepoContext + , resolveBuildTimeSettings, withProjectOrGlobalConfig ) +import Distribution.Client.DistDirLayout + ( defaultDistDirLayout, DistDirLayout(..), mkCabalDirLayout + , ProjectRoot(ProjectRootImplicit) + , storePackageDirectory, cabalStoreDirLayout + , CabalDirLayout(..), StoreDirLayout(..) ) +import Distribution.Client.RebuildMonad + ( runRebuild ) +import Distribution.Client.InstallSymlink + ( OverwritePolicy(..), symlinkBinary ) +import Distribution.Simple.Setup + ( Flag(..), HaddockFlags, fromFlagOrDefault, flagToMaybe, toFlag + , trueArg, configureOptions, haddockOptions, flagToList ) +import Distribution.Solver.Types.SourcePackage + ( SourcePackage(..) ) +import Distribution.ReadE + ( ReadE(..), succeedReadE ) +import Distribution.Simple.Command + ( CommandUI(..), ShowOrParseArgs(..), OptionField(..) + , option, usageAlternatives, reqArg ) +import Distribution.Simple.Configure + ( configCompilerEx ) +import Distribution.Simple.Compiler + ( Compiler(..), CompilerId(..), CompilerFlavor(..) ) +import Distribution.Simple.GHC + ( ghcPlatformAndVersionString + , GhcImplInfo(..), getImplInfo + , GhcEnvironmentFileEntry(..) + , renderGhcEnvironmentFile, readGhcEnvironmentFile, ParseErrorExc ) +import Distribution.Types.UnitId + ( UnitId ) +import Distribution.Types.UnqualComponentName + ( UnqualComponentName, unUnqualComponentName ) +import Distribution.Verbosity + ( Verbosity, normal, lessVerbose ) +import Distribution.Simple.Utils + ( wrapText, die', notice, warn + , withTempDirectory, createDirectoryIfMissingVerbose + , ordNub ) +import Distribution.Utils.Generic + ( writeFileAtomic ) +import Distribution.Text + ( simpleParse ) +import Distribution.Pretty + ( prettyShow ) + +import Control.Exception + ( catch ) +import Control.Monad + ( mapM, mapM_ ) +import qualified Data.ByteString.Lazy.Char8 as BS +import Data.Either + ( partitionEithers, isLeft ) +import Data.Ord + ( comparing, Down(..) ) +import qualified Data.Map as Map +import Distribution.Utils.NubList + ( fromNubList ) +import System.Directory + ( getHomeDirectory, doesFileExist, createDirectoryIfMissing + , getTemporaryDirectory, makeAbsolute, doesDirectoryExist ) +import System.FilePath + ( (), takeDirectory, takeBaseName ) + +data NewInstallFlags = NewInstallFlags + { ninstInstallLibs :: Flag Bool + , ninstEnvironmentPath :: Flag FilePath + , ninstOverwritePolicy :: Flag OverwritePolicy + } + +defaultNewInstallFlags :: NewInstallFlags +defaultNewInstallFlags = NewInstallFlags + { ninstInstallLibs = toFlag False + , ninstEnvironmentPath = mempty + , ninstOverwritePolicy = toFlag NeverOverwrite + } + +newInstallOptions :: ShowOrParseArgs -> [OptionField NewInstallFlags] +newInstallOptions _ = + [ option [] ["lib"] + "Install libraries rather than executables from the target package." + ninstInstallLibs (\v flags -> flags { ninstInstallLibs = v }) + trueArg + , option [] ["package-env", "env"] + "Set the environment file that may be modified." + ninstEnvironmentPath (\pf flags -> flags { ninstEnvironmentPath = pf }) + (reqArg "ENV" (succeedReadE Flag) flagToList) + , option [] ["overwrite-policy"] + "How to handle already existing symlinks." + ninstOverwritePolicy (\v flags -> flags { ninstOverwritePolicy = v }) + $ reqArg + "always|never" + readOverwritePolicyFlag + showOverwritePolicyFlag + ] + where + readOverwritePolicyFlag = ReadE $ \case + "always" -> Right $ Flag AlwaysOverwrite + "never" -> Right $ Flag NeverOverwrite + policy -> Left $ "'" <> policy <> "' isn't a valid overwrite policy" + showOverwritePolicyFlag (Flag AlwaysOverwrite) = ["always"] + showOverwritePolicyFlag (Flag NeverOverwrite) = ["never"] + showOverwritePolicyFlag NoFlag = [] + +installCommand :: CommandUI ( ConfigFlags, ConfigExFlags, InstallFlags + , HaddockFlags, NewInstallFlags + ) +installCommand = CommandUI + { commandName = "new-install" + , commandSynopsis = "Install packages." + , commandUsage = usageAlternatives + "new-install" [ "[TARGETS] [FLAGS]" ] + , commandDescription = Just $ \_ -> wrapText $ + "Installs one or more packages. This is done by installing them " + ++ "in the store and symlinking the executables in the directory " + ++ "specified by the --symlink-bindir flag (`~/.cabal/bin/` by default). " + ++ "If you want the installed executables to be available globally, " + ++ "make sure that the PATH environment variable contains that directory. " + ++ "\n\n" + ++ "If TARGET is a library, it will be added to the global environment. " + ++ "When doing this, cabal will try to build a plan that includes all " + ++ "the previously installed libraries. This is currently not implemented." + , commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " new-install\n" + ++ " Install the package in the current directory\n" + ++ " " ++ pname ++ " new-install pkgname\n" + ++ " Install the package named pkgname" + ++ " (fetching it from hackage if necessary)\n" + ++ " " ++ pname ++ " new-install ./pkgfoo\n" + ++ " Install the package in the ./pkgfoo directory\n" + + ++ cmdCommonHelpTextNewBuildBeta + , commandOptions = \showOrParseArgs -> + liftOptions get1 set1 + -- Note: [Hidden Flags] + -- hide "constraint", "dependency", and + -- "exact-configuration" from the configure options. + (filter ((`notElem` ["constraint", "dependency" + , "exact-configuration"]) + . optionName) $ configureOptions showOrParseArgs) + ++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) + ++ liftOptions get3 set3 + -- hide "target-package-db" flag from the + -- install options. + (filter ((`notElem` ["target-package-db"]) + . optionName) $ + installOptions showOrParseArgs) + ++ liftOptions get4 set4 + -- hide "verbose" and "builddir" flags from the + -- haddock options. + (filter ((`notElem` ["v", "verbose", "builddir"]) + . optionName) $ + haddockOptions showOrParseArgs) + ++ liftOptions get5 set5 (newInstallOptions showOrParseArgs) + , commandDefaultFlags = (mempty, mempty, mempty, mempty, defaultNewInstallFlags) + } + where + get1 (a,_,_,_,_) = a; set1 a (_,b,c,d,e) = (a,b,c,d,e) + get2 (_,b,_,_,_) = b; set2 b (a,_,c,d,e) = (a,b,c,d,e) + get3 (_,_,c,_,_) = c; set3 c (a,b,_,d,e) = (a,b,c,d,e) + get4 (_,_,_,d,_) = d; set4 d (a,b,c,_,e) = (a,b,c,d,e) + get5 (_,_,_,_,e) = e; set5 e (a,b,c,d,_) = (a,b,c,d,e) + + +-- | The @install@ command actually serves four different needs. It installs: +-- * exes: +-- For example a program from hackage. The behavior is similar to the old +-- install command, except that now conflicts between separate runs of the +-- command are impossible thanks to the store. +-- Exes are installed in the store like a normal dependency, then they are +-- symlinked uin the directory specified by --symlink-bindir. +-- To do this we need a dummy projectBaseContext containing the targets as +-- estra packages and using a temporary dist directory. +-- * libraries +-- Libraries install through a similar process, but using GHC environment +-- files instead of symlinks. This means that 'new-install'ing libraries +-- only works on GHC >= 8.0. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags, NewInstallFlags) + -> [String] -> GlobalFlags -> IO () +installAction (configFlags, configExFlags, installFlags, haddockFlags, newInstallFlags) + targetStrings globalFlags = do + -- We never try to build tests/benchmarks for remote packages. + -- So we set them as disabled by default and error if they are explicitly + -- enabled. + when (configTests configFlags' == Flag True) $ + die' verbosity $ "--enable-tests was specified, but tests can't " + ++ "be enabled in a remote package" + when (configBenchmarks configFlags' == Flag True) $ + die' verbosity $ "--enable-benchmarks was specified, but benchmarks can't " + ++ "be enabled in a remote package" + + let + withProject = do + let verbosity' = lessVerbose verbosity + + -- First, we need to learn about what's available to be installed. + localBaseCtx <- establishProjectBaseContext verbosity' cliConfig + let localDistDirLayout = distDirLayout localBaseCtx + pkgDb <- projectConfigWithBuilderRepoContext verbosity' (buildSettings localBaseCtx) (getSourcePackages verbosity) + + let + (targetStrings', packageIds) = partitionEithers . flip fmap targetStrings $ + \str -> case simpleParse str of + Just (pkgId :: PackageId) + | pkgVersion pkgId /= nullVersion -> Right pkgId + _ -> Left str + packageSpecifiers = flip fmap packageIds $ \case + PackageIdentifier{..} + | pkgVersion == nullVersion -> NamedPackage pkgName [] + | otherwise -> + NamedPackage pkgName [PackagePropertyVersion (thisVersion pkgVersion)] + packageTargets = flip TargetPackageNamed Nothing . pkgName <$> packageIds + + if null targetStrings' + then return (packageSpecifiers, packageTargets, projectConfig localBaseCtx) + else do + targetSelectors <- either (reportTargetSelectorProblems verbosity) return + =<< readTargetSelectors (localPackages localBaseCtx) Nothing targetStrings' + + (specs, selectors) <- withInstallPlan verbosity' localBaseCtx $ \elaboratedPlan _ -> do + -- Split into known targets and hackage packages. + (targets, hackageNames) <- case + resolveTargets + selectPackageTargets + selectComponentTarget + TargetProblemCommon + elaboratedPlan + (Just pkgDb) + targetSelectors of + Right targets -> do + -- Everything is a local dependency. + return (targets, []) + Left errs -> do + -- Not everything is local. + let + (errs', hackageNames) = partitionEithers . flip fmap errs $ \case + TargetProblemCommon (TargetAvailableInIndex name) -> Right name + err -> Left err + + -- report incorrect case for known package. + for_ errs' $ \case + TargetProblemCommon (TargetNotInProject hn) -> + case searchByName (packageIndex pkgDb) (unPackageName hn) of + [] -> return () + xs -> die' verbosity . concat $ + [ "Unknown package \"", unPackageName hn, "\". " + , "Did you mean any of the following?\n" + , unlines (("- " ++) . unPackageName . fst <$> xs) + ] + _ -> return () + + when (not . null $ errs') $ reportTargetProblems verbosity errs' + + let + targetSelectors' = flip filter targetSelectors $ \case + TargetComponentUnknown name _ _ + | name `elem` hackageNames -> False + TargetPackageNamed name _ + | name `elem` hackageNames -> False + _ -> True + + -- This can't fail, because all of the errors are removed (or we've given up). + targets <- either (reportTargetProblems verbosity) return $ resolveTargets + selectPackageTargets + selectComponentTarget + TargetProblemCommon + elaboratedPlan + Nothing + targetSelectors' + + return (targets, hackageNames) + + let + planMap = InstallPlan.toMap elaboratedPlan + targetIds = Map.keys targets + + sdistize (SpecificSourcePackage spkg@SourcePackage{..}) = SpecificSourcePackage spkg' + where + sdistPath = distSdistFile localDistDirLayout packageInfoId TargzFormat + spkg' = spkg { packageSource = LocalTarballPackage sdistPath } + sdistize named = named + + local = sdistize <$> localPackages localBaseCtx + + gatherTargets :: UnitId -> TargetSelector + gatherTargets targetId = TargetPackageNamed pkgName Nothing + where + Just targetUnit = Map.lookup targetId planMap + PackageIdentifier{..} = packageId targetUnit + + targets' = fmap gatherTargets targetIds + + hackagePkgs :: [PackageSpecifier UnresolvedSourcePackage] + hackagePkgs = flip NamedPackage [] <$> hackageNames + hackageTargets :: [TargetSelector] + hackageTargets = flip TargetPackageNamed Nothing <$> hackageNames + + createDirectoryIfMissing True (distSdistDirectory localDistDirLayout) + + unless (Map.null targets) $ + mapM_ + (\(SpecificSourcePackage pkg) -> packageToSdist verbosity + (distProjectRootDirectory localDistDirLayout) (Archive TargzFormat) + (distSdistFile localDistDirLayout (packageId pkg) TargzFormat) pkg + ) (localPackages localBaseCtx) + + if null targets + then return (hackagePkgs, hackageTargets) + else return (local ++ hackagePkgs, targets' ++ hackageTargets) + + return (specs ++ packageSpecifiers, selectors ++ packageTargets, projectConfig localBaseCtx) + + withoutProject globalConfig = do + let + parsePkg pkgName + | Just (pkg :: PackageId) <- simpleParse pkgName = return pkg + | otherwise = die' verbosity ("Invalid package ID: " ++ pkgName) + packageIds <- mapM parsePkg targetStrings + + cabalDir <- getCabalDir + let + projectConfig = globalConfig <> cliConfig + + ProjectConfigBuildOnly { + projectConfigLogsDir + } = projectConfigBuildOnly projectConfig + + ProjectConfigShared { + projectConfigStoreDir + } = projectConfigShared projectConfig + + mlogsDir = flagToMaybe projectConfigLogsDir + mstoreDir = flagToMaybe projectConfigStoreDir + cabalDirLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir + + buildSettings = resolveBuildTimeSettings + verbosity cabalDirLayout + projectConfig + + SourcePackageDb { packageIndex } <- projectConfigWithBuilderRepoContext + verbosity buildSettings + (getSourcePackages verbosity) + + for_ targetStrings $ \case + name + | null (lookupPackageName packageIndex (mkPackageName name)) + , xs@(_:_) <- searchByName packageIndex name -> + die' verbosity . concat $ + [ "Unknown package \"", name, "\". " + , "Did you mean any of the following?\n" + , unlines (("- " ++) . unPackageName . fst <$> xs) + ] + _ -> return () + + let + packageSpecifiers = flip fmap packageIds $ \case + PackageIdentifier{..} + | pkgVersion == nullVersion -> NamedPackage pkgName [] + | otherwise -> + NamedPackage pkgName [PackagePropertyVersion (thisVersion pkgVersion)] + packageTargets = flip TargetPackageNamed Nothing . pkgName <$> packageIds + return (packageSpecifiers, packageTargets, projectConfig) + + (specs, selectors, config) <- withProjectOrGlobalConfig verbosity globalConfigFlag + withProject withoutProject + + home <- getHomeDirectory + let + ProjectConfig { + projectConfigShared = ProjectConfigShared { + projectConfigHcFlavor, + projectConfigHcPath, + projectConfigHcPkg + }, + projectConfigLocalPackages = PackageConfig { + packageConfigProgramPaths, + packageConfigProgramArgs, + packageConfigProgramPathExtra + } + } = config + + hcFlavor = flagToMaybe projectConfigHcFlavor + hcPath = flagToMaybe projectConfigHcPath + hcPkg = flagToMaybe projectConfigHcPkg + + progDb = + userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) + . userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs)) + . modifyProgramSearchPath + (++ [ ProgramSearchPathDir dir + | dir <- fromNubList packageConfigProgramPathExtra ]) + $ defaultProgramDb + + (compiler@Compiler { compilerId = + compilerId@(CompilerId compilerFlavor compilerVersion) }, platform, progDb') <- + configCompilerEx hcFlavor hcPath hcPkg progDb verbosity + + let + globalEnv name = + home ".ghc" ghcPlatformAndVersionString platform compilerVersion + "environments" name + localEnv dir = + dir ".ghc.environment." ++ ghcPlatformAndVersionString platform compilerVersion + + GhcImplInfo{ supportsPkgEnvFiles } = getImplInfo compiler + -- Why? We know what the first part will be, we only care about the packages. + filterEnvEntries = filter $ \case + GhcEnvFilePackageId _ -> True + _ -> False + + envFile <- case flagToMaybe (ninstEnvironmentPath newInstallFlags) of + Just spec + -- Is spec a bare word without any "pathy" content, then it refers to + -- a named global environment. + | takeBaseName spec == spec -> return (globalEnv spec) + | otherwise -> do + spec' <- makeAbsolute spec + isDir <- doesDirectoryExist spec' + if isDir + -- If spec is a directory, then make an ambient environment inside + -- that directory. + then return (localEnv spec') + -- Otherwise, treat it like a literal file path. + else return spec' + Nothing -> return (globalEnv "default") + + envFileExists <- doesFileExist envFile + envEntries <- filterEnvEntries <$> if + (compilerFlavor == GHC || compilerFlavor == GHCJS) + && supportsPkgEnvFiles && envFileExists + then catch (readGhcEnvironmentFile envFile) $ \(_ :: ParseErrorExc) -> + warn verbosity ("The environment file " ++ envFile ++ + " is unparsable. Libraries cannot be installed.") >> return [] + else return [] + + cabalDir <- getCabalDir + mstoreDir <- sequenceA $ makeAbsolute <$> flagToMaybe (globalStoreDir globalFlags) + let + mlogsDir = flagToMaybe (globalLogsDir globalFlags) + cabalLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir + packageDbs = storePackageDBStack (cabalStoreDirLayout cabalLayout) compilerId + + installedIndex <- getInstalledPackages verbosity compiler packageDbs progDb' + + let (envSpecs, envEntries') = environmentFileToSpecifiers installedIndex envEntries + + -- Second, we need to use a fake project to let Cabal build the + -- installables correctly. For that, we need a place to put a + -- temporary dist directory. + globalTmp <- getTemporaryDirectory + withTempDirectory + verbosity + globalTmp + "cabal-install." + $ \tmpDir -> do + baseCtx <- establishDummyProjectBaseContext + verbosity + config + tmpDir + (envSpecs ++ specs) + + buildCtx <- + runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do + + -- Interpret the targets on the command line as build targets + targets <- either (reportTargetProblems verbosity) return + $ resolveTargets + selectPackageTargets + selectComponentTarget + TargetProblemCommon + elaboratedPlan + Nothing + selectors + + let elaboratedPlan' = pruneInstallPlanToTargets + TargetActionBuild + targets + elaboratedPlan + elaboratedPlan'' <- + if buildSettingOnlyDeps (buildSettings baseCtx) + then either (reportCannotPruneDependencies verbosity) return $ + pruneInstallPlanToDependencies (Map.keysSet targets) + elaboratedPlan' + else return elaboratedPlan' + + return (elaboratedPlan'', targets) + + printPlan verbosity baseCtx buildCtx + + buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx + -- Temporary fix for #5641 + when (any isLeft buildOutcomes) $ + warn verbosity $ "Some package(s) failed to build. " + <> "Try rerunning with -j1 if you can't see the error." + runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes + + let + dryRun = buildSettingDryRun $ buildSettings baseCtx + mkPkgBinDir = ( "bin") . + storePackageDirectory + (cabalStoreDirLayout $ cabalDirLayout baseCtx) + compilerId + installLibs = fromFlagOrDefault False (ninstInstallLibs newInstallFlags) + + when (not installLibs && not dryRun) $ do + -- If there are exes, symlink them + let symlinkBindirUnknown = + "symlink-bindir is not defined. Set it in your cabal config file " + ++ "or use --symlink-bindir=" + symlinkBindir <- fromFlagOrDefault (die' verbosity symlinkBindirUnknown) + $ fmap makeAbsolute + $ projectConfigSymlinkBinDir + $ projectConfigBuildOnly + $ projectConfig $ baseCtx + createDirectoryIfMissingVerbose verbosity False symlinkBindir + warnIfNoExes verbosity buildCtx + let + doSymlink = symlinkBuiltPackage + verbosity + overwritePolicy + mkPkgBinDir symlinkBindir + in traverse_ doSymlink $ Map.toList $ targetsMap buildCtx + + when (installLibs && not dryRun) $ + if supportsPkgEnvFiles + then do + -- Why do we get it again? If we updated a globalPackage then we need + -- the new version. + installedIndex' <- getInstalledPackages verbosity compiler packageDbs progDb' + let + getLatest = fmap (head . snd) . take 1 . sortBy (comparing (Down . fst)) + . PI.lookupPackageName installedIndex' + globalLatest = concat (getLatest <$> globalPackages) + + baseEntries = + GhcEnvFileClearPackageDbStack : fmap GhcEnvFilePackageDb packageDbs + globalEntries = GhcEnvFilePackageId . installedUnitId <$> globalLatest + pkgEntries = ordNub $ + globalEntries + ++ envEntries' + ++ entriesForLibraryComponents (targetsMap buildCtx) + contents' = renderGhcEnvironmentFile (baseEntries ++ pkgEntries) + createDirectoryIfMissing True (takeDirectory envFile) + writeFileAtomic envFile (BS.pack contents') + else + warn verbosity $ + "The current compiler doesn't support safely installing libraries, " + ++ "so only executables will be available. (Library installation is " + ++ "supported on GHC 8.0+ only)" + where + configFlags' = disableTestsBenchsByDefault configFlags + verbosity = fromFlagOrDefault normal (configVerbosity configFlags') + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags' configExFlags + installFlags haddockFlags + globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) + overwritePolicy = fromFlagOrDefault NeverOverwrite + $ ninstOverwritePolicy newInstallFlags + +warnIfNoExes :: Verbosity -> ProjectBuildContext -> IO () +warnIfNoExes verbosity buildCtx = + when noExes $ + warn verbosity $ "You asked to install executables, " + <> "but there are no executables in " + <> plural (listPlural selectors) "target" "targets" <> ": " + <> intercalate ", " (showTargetSelector <$> selectors) <> ". " + <> "Perhaps you want to use --lib " + <> "to install libraries instead." + where + targets = concat $ Map.elems $ targetsMap buildCtx + components = fst <$> targets + selectors = concatMap snd targets + noExes = null $ catMaybes $ exeMaybe <$> components + exeMaybe (ComponentTarget (CExeName exe) _) = Just exe + exeMaybe _ = Nothing + +globalPackages :: [PackageName] +globalPackages = mkPackageName <$> + [ "ghc", "hoopl", "bytestring", "unix", "base", "time", "hpc", "filepath" + , "process", "array", "integer-gmp", "containers", "ghc-boot", "binary" + , "ghc-prim", "ghci", "rts", "terminfo", "transformers", "deepseq" + , "ghc-boot-th", "pretty", "template-haskell", "directory", "text" + , "bin-package-db" + ] + +environmentFileToSpecifiers :: PI.InstalledPackageIndex -> [GhcEnvironmentFileEntry] + -> ([PackageSpecifier a], [GhcEnvironmentFileEntry]) +environmentFileToSpecifiers ipi = foldMap $ \case + (GhcEnvFilePackageId unitId) + | Just InstalledPackageInfo{ sourcePackageId = PackageIdentifier{..}, installedUnitId } + <- PI.lookupUnitId ipi unitId + , let pkgSpec = NamedPackage pkgName [PackagePropertyVersion (thisVersion pkgVersion)] + -> if pkgName `elem` globalPackages + then ([pkgSpec], []) + else ([pkgSpec], [GhcEnvFilePackageId installedUnitId]) + _ -> ([], []) + + +-- | Disables tests and benchmarks if they weren't explicitly enabled. +disableTestsBenchsByDefault :: ConfigFlags -> ConfigFlags +disableTestsBenchsByDefault configFlags = + configFlags { configTests = Flag False <> configTests configFlags + , configBenchmarks = Flag False <> configBenchmarks configFlags } + +-- | Symlink every exe from a package from the store to a given location +symlinkBuiltPackage :: Verbosity + -> OverwritePolicy -- ^ Whether to overwrite existing files + -> (UnitId -> FilePath) -- ^ A function to get an UnitId's + -- store directory + -> FilePath -- ^ Where to put the symlink + -> ( UnitId + , [(ComponentTarget, [TargetSelector])] ) + -> IO () +symlinkBuiltPackage verbosity overwritePolicy + mkSourceBinDir destDir + (pkg, components) = + traverse_ symlinkAndWarn exes + where + exes = catMaybes $ (exeMaybe . fst) <$> components + exeMaybe (ComponentTarget (CExeName exe) _) = Just exe + exeMaybe _ = Nothing + symlinkAndWarn exe = do + success <- symlinkBuiltExe + verbosity overwritePolicy + (mkSourceBinDir pkg) destDir exe + let errorMessage = case overwritePolicy of + NeverOverwrite -> + "Path '" <> (destDir prettyShow exe) <> "' already exists. " + <> "Use --overwrite-policy=always to overwrite." + -- This shouldn't even be possible, but we keep it in case + -- symlinking logic changes + AlwaysOverwrite -> "Symlinking '" <> prettyShow exe <> "' failed." + unless success $ die' verbosity errorMessage + +-- | Symlink a specific exe. +symlinkBuiltExe :: Verbosity -> OverwritePolicy + -> FilePath -> FilePath + -> UnqualComponentName + -> IO Bool +symlinkBuiltExe verbosity overwritePolicy sourceDir destDir exe = do + notice verbosity $ "Symlinking '" <> prettyShow exe <> "'" + symlinkBinary + overwritePolicy + destDir + sourceDir + exe + $ unUnqualComponentName exe + +-- | Create 'GhcEnvironmentFileEntry's for packages with exposed libraries. +entriesForLibraryComponents :: TargetsMap -> [GhcEnvironmentFileEntry] +entriesForLibraryComponents = Map.foldrWithKey' (\k v -> mappend (go k v)) [] + where + hasLib :: (ComponentTarget, [TargetSelector]) -> Bool + hasLib (ComponentTarget CLibName _, _) = True + hasLib (ComponentTarget (CSubLibName _) _, _) = True + hasLib _ = False + + go :: UnitId -> [(ComponentTarget, [TargetSelector])] -> [GhcEnvironmentFileEntry] + go unitId targets + | any hasLib targets = [GhcEnvFilePackageId unitId] + | otherwise = [] + +-- | Create a dummy project context, without a .cabal or a .cabal.project file +-- (a place where to put a temporary dist directory is still needed) +establishDummyProjectBaseContext + :: Verbosity + -> ProjectConfig + -> FilePath + -- ^ Where to put the dist directory + -> [PackageSpecifier UnresolvedSourcePackage] + -- ^ The packages to be included in the project + -> IO ProjectBaseContext +establishDummyProjectBaseContext verbosity cliConfig tmpDir localPackages = do + + cabalDir <- getCabalDir + + -- Create the dist directories + createDirectoryIfMissingVerbose verbosity True $ distDirectory distDirLayout + createDirectoryIfMissingVerbose verbosity True $ + distProjectCacheDirectory distDirLayout + + globalConfig <- runRebuild "" + $ readGlobalConfig verbosity + $ projectConfigConfigFile + $ projectConfigShared cliConfig + let projectConfig = globalConfig <> cliConfig + + let ProjectConfigBuildOnly { + projectConfigLogsDir + } = projectConfigBuildOnly projectConfig + + ProjectConfigShared { + projectConfigStoreDir + } = projectConfigShared projectConfig + + mlogsDir = flagToMaybe projectConfigLogsDir + mstoreDir = flagToMaybe projectConfigStoreDir + cabalDirLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir + + buildSettings = resolveBuildTimeSettings + verbosity cabalDirLayout + projectConfig + + return ProjectBaseContext { + distDirLayout, + cabalDirLayout, + projectConfig, + localPackages, + buildSettings + } + where + mdistDirectory = flagToMaybe + $ projectConfigDistDir + $ projectConfigShared cliConfig + projectRoot = ProjectRootImplicit tmpDir + distDirLayout = defaultDistDirLayout projectRoot + mdistDirectory + +-- | This defines what a 'TargetSelector' means for the @bench@ command. +-- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, +-- or otherwise classifies the problem. +-- +-- For the @build@ command select all components except non-buildable +-- and disabled tests\/benchmarks, fail if there are no such +-- components +-- +selectPackageTargets :: TargetSelector + -> [AvailableTarget k] -> Either TargetProblem [k] +selectPackageTargets targetSelector targets + + -- If there are any buildable targets then we select those + | not (null targetsBuildable) + = Right targetsBuildable + + -- If there are targets but none are buildable then we report those + | not (null targets) + = Left (TargetProblemNoneEnabled targetSelector targets') + + -- If there are no targets at all then we report that + | otherwise + = Left (TargetProblemNoTargets targetSelector) + where + targets' = forgetTargetsDetail targets + targetsBuildable = selectBuildableTargetsWith + (buildable targetSelector) + targets + + -- When there's a target filter like "pkg:tests" then we do select tests, + -- but if it's just a target like "pkg" then we don't build tests unless + -- they are requested by default (i.e. by using --enable-tests) + buildable (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False + buildable (TargetAllPackages Nothing) TargetNotRequestedByDefault = False + buildable _ _ = True + +-- | For a 'TargetComponent' 'TargetSelector', check if the component can be +-- selected. +-- +-- For the @build@ command we just need the basic checks on being buildable etc. +-- +selectComponentTarget :: SubComponentTarget + -> AvailableTarget k -> Either TargetProblem k +selectComponentTarget subtarget = + either (Left . TargetProblemCommon) Right + . selectComponentTargetBasic subtarget + + +-- | The various error conditions that can occur when matching a +-- 'TargetSelector' against 'AvailableTarget's for the @build@ command. +-- +data TargetProblem = + TargetProblemCommon TargetProblemCommon + + -- | The 'TargetSelector' matches targets but none are buildable + | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] + + -- | There are no targets at all + | TargetProblemNoTargets TargetSelector + deriving (Eq, Show) + +reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a +reportTargetProblems verbosity = + die' verbosity . unlines . map renderTargetProblem + +renderTargetProblem :: TargetProblem -> String +renderTargetProblem (TargetProblemCommon problem) = + renderTargetProblemCommon "build" problem +renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = + renderTargetProblemNoneEnabled "build" targetSelector targets +renderTargetProblem(TargetProblemNoTargets targetSelector) = + renderTargetProblemNoTargets "build" targetSelector + +reportCannotPruneDependencies :: Verbosity -> CannotPruneDependencies -> IO a +reportCannotPruneDependencies verbosity = + die' verbosity . renderCannotPruneDependencies diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdLegacy.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdLegacy.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdLegacy.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdLegacy.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,173 @@ +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ViewPatterns #-} +module Distribution.Client.CmdLegacy ( legacyCmd, legacyWrapperCmd, newCmd ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.Sandbox + ( loadConfigOrSandboxConfig, findSavedDistPref ) +import qualified Distribution.Client.Setup as Client +import Distribution.Client.SetupWrapper + ( SetupScriptOptions(..), setupWrapper, defaultSetupScriptOptions ) +import qualified Distribution.Simple.Setup as Setup +import Distribution.Simple.Command +import Distribution.Simple.Utils + ( warn, wrapText ) +import Distribution.Verbosity + ( Verbosity, normal ) + +import Control.Exception + ( SomeException(..), try ) +import qualified Data.Text as T + +-- Tweaked versions of code from Main. +regularCmd :: (HasVerbosity flags) => CommandUI flags -> (flags -> [String] -> globals -> IO action) -> Bool -> CommandSpec (globals -> IO action) +regularCmd ui action shouldWarn = + CommandSpec ui ((flip commandAddAction) (\flags extra globals -> showWarning flags >> action flags extra globals)) NormalCommand + where + showWarning flags = if shouldWarn + then warn (verbosity flags) (deprecationNote (commandName ui) ++ "\n") + else return () + +wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> Bool -> CommandSpec (Client.GlobalFlags -> IO ()) +wrapperCmd ui verbosity' distPref shouldWarn = + CommandSpec ui (\ui' -> wrapperAction ui' verbosity' distPref shouldWarn) NormalCommand + +wrapperAction :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> Bool -> Command (Client.GlobalFlags -> IO ()) +wrapperAction command verbosityFlag distPrefFlag shouldWarn = + commandAddAction command + { commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do + let verbosity' = Setup.fromFlagOrDefault normal (verbosityFlag flags) + + if shouldWarn + then warn verbosity' (deprecationNote (commandName command) ++ "\n") + else return () + + load <- try (loadConfigOrSandboxConfig verbosity' globalFlags) + let config = either (\(SomeException _) -> mempty) snd load + distPref <- findSavedDistPref config (distPrefFlag flags) + let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } + + let command' = command { commandName = T.unpack . T.replace "v1-" "" . T.pack . commandName $ command } + + setupWrapper verbosity' setupScriptOptions Nothing + command' (const flags) (const extraArgs) + +-- + +class HasVerbosity a where + verbosity :: a -> Verbosity + +instance HasVerbosity (Setup.Flag Verbosity) where + verbosity = Setup.fromFlagOrDefault normal + +instance (HasVerbosity a) => HasVerbosity (a, b) where + verbosity (a, _) = verbosity a + +instance (HasVerbosity b) => HasVerbosity (a, b, c) where + verbosity (_ , b, _) = verbosity b + +instance (HasVerbosity a) => HasVerbosity (a, b, c, d) where + verbosity (a, _, _, _) = verbosity a + +instance HasVerbosity Setup.BuildFlags where + verbosity = verbosity . Setup.buildVerbosity + +instance HasVerbosity Setup.ConfigFlags where + verbosity = verbosity . Setup.configVerbosity + +instance HasVerbosity Setup.ReplFlags where + verbosity = verbosity . Setup.replVerbosity + +instance HasVerbosity Client.FreezeFlags where + verbosity = verbosity . Client.freezeVerbosity + +instance HasVerbosity Setup.HaddockFlags where + verbosity = verbosity . Setup.haddockVerbosity + +instance HasVerbosity Client.ExecFlags where + verbosity = verbosity . Client.execVerbosity + +instance HasVerbosity Client.UpdateFlags where + verbosity = verbosity . Client.updateVerbosity + +instance HasVerbosity Setup.CleanFlags where + verbosity = verbosity . Setup.cleanVerbosity + +instance HasVerbosity Client.SDistFlags where + verbosity = verbosity . Client.sDistVerbosity + +instance HasVerbosity Client.SandboxFlags where + verbosity = verbosity . Client.sandboxVerbosity + +instance HasVerbosity Setup.DoctestFlags where + verbosity = verbosity . Setup.doctestVerbosity + +-- + +deprecationNote :: String -> String +deprecationNote cmd = wrapText $ + "The " ++ cmd ++ " command is a part of the legacy v1 style of cabal usage.\n\n" ++ + + "Please switch to using either the new project style and the new-" ++ cmd ++ + " command or the legacy v1-" ++ cmd ++ " alias as new-style projects will" ++ + " become the default in the next version of cabal-install. Please file a" ++ + " bug if you cannot replicate a working v1- use case with the new-style commands.\n\n" ++ + + "For more information, see: https://wiki.haskell.org/Cabal/NewBuild\n" + +legacyNote :: String -> String +legacyNote cmd = wrapText $ + "The v1-" ++ cmd ++ " command is a part of the legacy v1 style of cabal usage.\n\n" ++ + + "It is a legacy feature and will be removed in a future release of cabal-install." ++ + " Please file a bug if you cannot replicate a working v1- use case with the new-style" ++ + " commands.\n\n" ++ + + "For more information, see: https://wiki.haskell.org/Cabal/NewBuild\n" + +toLegacyCmd :: (Bool -> CommandSpec (globals -> IO action)) -> [CommandSpec (globals -> IO action)] +toLegacyCmd mkSpec = [toDeprecated (mkSpec True), toLegacy (mkSpec False)] + where + legacyMsg = T.unpack . T.replace "v1-" "" . T.pack + + toLegacy (CommandSpec origUi@CommandUI{..} action type') = CommandSpec legUi action type' + where + legUi = origUi + { commandName = "v1-" ++ commandName + , commandNotes = Just $ \pname -> case commandNotes of + Just notes -> notes pname ++ "\n" ++ legacyNote commandName + Nothing -> legacyNote commandName + } + + toDeprecated (CommandSpec origUi@CommandUI{..} action type') = CommandSpec depUi action type' + where + depUi = origUi + { commandName = legacyMsg commandName + , commandUsage = legacyMsg . commandUsage + , commandDescription = (legacyMsg .) <$> commandDescription + , commandNotes = Just $ \pname -> case commandNotes of + Just notes -> legacyMsg (notes pname) ++ "\n" ++ deprecationNote commandName + Nothing -> deprecationNote commandName + } + +legacyCmd :: (HasVerbosity flags) => CommandUI flags -> (flags -> [String] -> globals -> IO action) -> [CommandSpec (globals -> IO action)] +legacyCmd ui action = toLegacyCmd (regularCmd ui action) + +legacyWrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Setup.Flag Verbosity) -> (flags -> Setup.Flag String) -> [CommandSpec (Client.GlobalFlags -> IO ())] +legacyWrapperCmd ui verbosity' distPref = toLegacyCmd (wrapperCmd ui verbosity' distPref) + +newCmd :: CommandUI flags -> (flags -> [String] -> globals -> IO action) -> [CommandSpec (globals -> IO action)] +newCmd origUi@CommandUI{..} action = [cmd v2Ui, cmd origUi] + where + cmd ui = CommandSpec ui (flip commandAddAction action) NormalCommand + v2Msg = T.unpack . T.replace "new-" "v2-" . T.pack + v2Ui = origUi + { commandName = v2Msg commandName + , commandUsage = v2Msg . commandUsage + , commandDescription = (v2Msg .) <$> commandDescription + , commandNotes = (v2Msg .) <$> commandDescription + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdRepl.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdRepl.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdRepl.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdRepl.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,578 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} + +-- | cabal-install CLI command: repl +-- +module Distribution.Client.CmdRepl ( + -- * The @repl@ CLI and action + replCommand, + replAction, + + -- * Internals exposed for testing + TargetProblem(..), + selectPackageTargets, + selectComponentTarget + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Compat.Lens +import qualified Distribution.Types.Lens as L + +import Distribution.Client.CmdErrorMessages +import Distribution.Client.CmdInstall + ( establishDummyProjectBaseContext ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.ProjectBuilding + ( rebuildTargetsDryRun, improveInstallPlanWithUpToDatePackages ) +import Distribution.Client.ProjectConfig + ( ProjectConfig(..), withProjectOrGlobalConfig + , projectConfigConfigFile, readGlobalConfig ) +import Distribution.Client.ProjectOrchestration +import Distribution.Client.ProjectPlanning + ( ElaboratedSharedConfig(..), ElaboratedInstallPlan ) +import Distribution.Client.ProjectPlanning.Types + ( elabOrderExeDependencies ) +import Distribution.Client.RebuildMonad + ( runRebuild ) +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import qualified Distribution.Client.Setup as Client +import Distribution.Client.Types + ( PackageLocation(..), PackageSpecifier(..), UnresolvedSourcePackage ) +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault, replOptions + , Flag(..), toFlag, trueArg, falseArg ) +import Distribution.Simple.Command + ( CommandUI(..), liftOption, usageAlternatives, option + , ShowOrParseArgs, OptionField, reqArg ) +import Distribution.Package + ( Package(..), packageName, UnitId, installedUnitId ) +import Distribution.PackageDescription.PrettyPrint +import Distribution.Parsec.Class + ( Parsec(..) ) +import Distribution.Pretty + ( prettyShow ) +import Distribution.ReadE + ( ReadE, parsecToReadE ) +import qualified Distribution.SPDX.License as SPDX +import Distribution.Solver.Types.SourcePackage + ( SourcePackage(..) ) +import Distribution.Types.BuildInfo + ( BuildInfo(..), emptyBuildInfo ) +import Distribution.Types.ComponentName + ( componentNameString ) +import Distribution.Types.CondTree + ( CondTree(..), traverseCondTreeC ) +import Distribution.Types.Dependency + ( Dependency(..) ) +import Distribution.Types.GenericPackageDescription + ( emptyGenericPackageDescription ) +import Distribution.Types.PackageDescription + ( PackageDescription(..), emptyPackageDescription ) +import Distribution.Types.Library + ( Library(..), emptyLibrary ) +import Distribution.Types.PackageId + ( PackageIdentifier(..) ) +import Distribution.Types.Version + ( mkVersion, version0 ) +import Distribution.Types.VersionRange + ( anyVersion ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity, normal, lessVerbose ) +import Distribution.Simple.Utils + ( wrapText, die', debugNoWrap, ordNub, createTempDirectory, handleDoesNotExist ) +import Language.Haskell.Extension + ( Language(..) ) + +import Data.List + ( (\\) ) +import qualified Data.Map as Map +import qualified Data.Set as Set +import System.Directory + ( getTemporaryDirectory, removeDirectoryRecursive ) +import System.FilePath + ( () ) + +type ReplFlags = [String] + +data EnvFlags = EnvFlags + { envPackages :: [Dependency] + , envIncludeTransitive :: Flag Bool + , envIgnoreProject :: Flag Bool + } + +defaultEnvFlags :: EnvFlags +defaultEnvFlags = EnvFlags + { envPackages = [] + , envIncludeTransitive = toFlag True + , envIgnoreProject = toFlag False + } + +envOptions :: ShowOrParseArgs -> [OptionField EnvFlags] +envOptions _ = + [ option ['b'] ["build-depends"] + "Include an additional package in the environment presented to GHCi." + envPackages (\p flags -> flags { envPackages = p ++ envPackages flags }) + (reqArg "DEPENDENCY" dependencyReadE (fmap prettyShow :: [Dependency] -> [String])) + , option [] ["no-transitive-deps"] + "Don't automatically include transitive dependencies of requested packages." + envIncludeTransitive (\p flags -> flags { envIncludeTransitive = p }) + falseArg + , option ['z'] ["ignore-project"] + "Only include explicitly specified packages (and 'base')." + envIgnoreProject (\p flags -> flags { envIgnoreProject = p }) + trueArg + ] + where + dependencyReadE :: ReadE [Dependency] + dependencyReadE = + fmap pure $ + parsecToReadE + ("couldn't parse dependency: " ++) + parsec + +replCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags, ReplFlags, EnvFlags) +replCommand = Client.installCommand { + commandName = "new-repl", + commandSynopsis = "Open an interactive session for the given component.", + commandUsage = usageAlternatives "new-repl" [ "[TARGET] [FLAGS]" ], + commandDescription = Just $ \_ -> wrapText $ + "Open an interactive session for a component within the project. The " + ++ "available targets are the same as for the 'new-build' command: " + ++ "individual components within packages in the project, including " + ++ "libraries, executables, test-suites or benchmarks. Packages can " + ++ "also be specified in which case the library component in the " + ++ "package will be used, or the (first listed) executable in the " + ++ "package if there is no library.\n\n" + + ++ "Dependencies are built or rebuilt as necessary. Additional " + ++ "configuration flags can be specified on the command line and these " + ++ "extend the project configuration from the 'cabal.project', " + ++ "'cabal.project.local' and other files.", + commandNotes = Just $ \pname -> + "Examples, open an interactive session:\n" + ++ " " ++ pname ++ " new-repl\n" + ++ " for the default component in the package in the current directory\n" + ++ " " ++ pname ++ " new-repl pkgname\n" + ++ " for the default component in the package named 'pkgname'\n" + ++ " " ++ pname ++ " new-repl ./pkgfoo\n" + ++ " for the default component in the package in the ./pkgfoo directory\n" + ++ " " ++ pname ++ " new-repl cname\n" + ++ " for the component named 'cname'\n" + ++ " " ++ pname ++ " new-repl pkgname:cname\n" + ++ " for the component 'cname' in the package 'pkgname'\n\n" + ++ " " ++ pname ++ " new-repl --build-depends lens\n" + ++ " add the latest version of the library 'lens' to the default component " + ++ "(or no componentif there is no project present)\n" + ++ " " ++ pname ++ " new-repl --build-depends \"lens >= 4.15 && < 4.18\"\n" + ++ " add a version (constrained between 4.15 and 4.18) of the library 'lens' " + ++ "to the default component (or no component if there is no project present)\n" + + ++ cmdCommonHelpTextNewBuildBeta, + commandDefaultFlags = (configFlags,configExFlags,installFlags,haddockFlags,[],defaultEnvFlags), + commandOptions = \showOrParseArgs -> + map liftOriginal (commandOptions Client.installCommand showOrParseArgs) + ++ map liftReplOpts (replOptions showOrParseArgs) + ++ map liftEnvOpts (envOptions showOrParseArgs) + } + where + (configFlags,configExFlags,installFlags,haddockFlags) = commandDefaultFlags Client.installCommand + + liftOriginal = liftOption projectOriginal updateOriginal + liftReplOpts = liftOption projectReplOpts updateReplOpts + liftEnvOpts = liftOption projectEnvOpts updateEnvOpts + + projectOriginal (a,b,c,d,_,_) = (a,b,c,d) + updateOriginal (a,b,c,d) (_,_,_,_,e,f) = (a,b,c,d,e,f) + + projectReplOpts (_,_,_,_,e,_) = e + updateReplOpts e (a,b,c,d,_,f) = (a,b,c,d,e,f) + + projectEnvOpts (_,_,_,_,_,f) = f + updateEnvOpts f (a,b,c,d,e,_) = (a,b,c,d,e,f) + +-- | The @repl@ command is very much like @build@. It brings the install plan +-- up to date, selects that part of the plan needed by the given or implicit +-- repl target and then executes the plan. +-- +-- Compared to @build@ the difference is that only one target is allowed +-- (given or implicit) and the target type is repl rather than build. The +-- general plan execution infrastructure handles both build and repl targets. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +replAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags, ReplFlags, EnvFlags) + -> [String] -> GlobalFlags -> IO () +replAction (configFlags, configExFlags, installFlags, haddockFlags, replFlags, envFlags) + targetStrings globalFlags = do + let + ignoreProject = fromFlagOrDefault False (envIgnoreProject envFlags) + with = withProject cliConfig verbosity targetStrings + without config = withoutProject (config <> cliConfig) verbosity targetStrings + + (baseCtx, targetSelectors, finalizer) <- if ignoreProject + then do + globalConfig <- runRebuild "" $ readGlobalConfig verbosity globalConfigFlag + without globalConfig + else withProjectOrGlobalConfig verbosity globalConfigFlag with without + + when (buildSettingOnlyDeps (buildSettings baseCtx)) $ + die' verbosity $ "The repl command does not support '--only-dependencies'. " + ++ "You may wish to use 'build --only-dependencies' and then " + ++ "use 'repl'." + + (originalComponent, baseCtx') <- if null (envPackages envFlags) + then return (Nothing, baseCtx) + else + -- Unfortunately, the best way to do this is to let the normal solver + -- help us resolve the targets, but that isn't ideal for performance, + -- especially in the no-project case. + withInstallPlan (lessVerbose verbosity) baseCtx $ \elaboratedPlan _ -> do + targets <- validatedTargets elaboratedPlan targetSelectors + + let + (unitId, _) = head $ Map.toList targets + originalDeps = installedUnitId <$> InstallPlan.directDeps elaboratedPlan unitId + oci = OriginalComponentInfo unitId originalDeps + Just pkgId = packageId <$> InstallPlan.lookup elaboratedPlan unitId + baseCtx' = addDepsToProjectTarget (envPackages envFlags) pkgId baseCtx + + return (Just oci, baseCtx') + + -- Now, we run the solver again with the added packages. While the graph + -- won't actually reflect the addition of transitive dependencies, + -- they're going to be available already and will be offered to the REPL + -- and that's good enough. + -- + -- In addition, to avoid a *third* trip through the solver, we are + -- replicating the second half of 'runProjectPreBuildPhase' by hand + -- here. + (buildCtx, replFlags') <- withInstallPlan verbosity baseCtx' $ + \elaboratedPlan elaboratedShared' -> do + let ProjectBaseContext{..} = baseCtx' + + -- Recalculate with updated project. + targets <- validatedTargets elaboratedPlan targetSelectors + + let + elaboratedPlan' = pruneInstallPlanToTargets + TargetActionRepl + targets + elaboratedPlan + includeTransitive = fromFlagOrDefault True (envIncludeTransitive envFlags) + replFlags' = case originalComponent of + Just oci -> generateReplFlags includeTransitive elaboratedPlan' oci + Nothing -> [] + + pkgsBuildStatus <- rebuildTargetsDryRun distDirLayout elaboratedShared' + elaboratedPlan' + + let elaboratedPlan'' = improveInstallPlanWithUpToDatePackages + pkgsBuildStatus elaboratedPlan' + debugNoWrap verbosity (InstallPlan.showInstallPlan elaboratedPlan'') + + let + buildCtx = ProjectBuildContext + { elaboratedPlanOriginal = elaboratedPlan + , elaboratedPlanToExecute = elaboratedPlan'' + , elaboratedShared = elaboratedShared' + , pkgsBuildStatus + , targetsMap = targets + } + return (buildCtx, replFlags') + + let buildCtx' = buildCtx + { elaboratedShared = (elaboratedShared buildCtx) + { pkgConfigReplOptions = replFlags ++ replFlags' } + } + printPlan verbosity baseCtx' buildCtx' + + buildOutcomes <- runProjectBuildPhase verbosity baseCtx' buildCtx' + runProjectPostBuildPhase verbosity baseCtx' buildCtx' buildOutcomes + finalizer + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) + + validatedTargets elaboratedPlan targetSelectors = do + -- Interpret the targets on the command line as repl targets + -- (as opposed to say build or haddock targets). + targets <- either (reportTargetProblems verbosity) return + $ resolveTargets + selectPackageTargets + selectComponentTarget + TargetProblemCommon + elaboratedPlan + Nothing + targetSelectors + + -- Reject multiple targets, or at least targets in different + -- components. It is ok to have two module/file targets in the + -- same component, but not two that live in different components. + when (Set.size (distinctTargetComponents targets) > 1) $ + reportTargetProblems verbosity + [TargetProblemMultipleTargets targets] + + return targets + +data OriginalComponentInfo = OriginalComponentInfo + { ociUnitId :: UnitId + , ociOriginalDeps :: [UnitId] + } + deriving (Show) + +withProject :: ProjectConfig -> Verbosity -> [String] -> IO (ProjectBaseContext, [TargetSelector], IO ()) +withProject cliConfig verbosity targetStrings = do + baseCtx <- establishProjectBaseContext verbosity cliConfig + + targetSelectors <- either (reportTargetSelectorProblems verbosity) return + =<< readTargetSelectors (localPackages baseCtx) (Just LibKind) targetStrings + + return (baseCtx, targetSelectors, return ()) + +withoutProject :: ProjectConfig -> Verbosity -> [String] -> IO (ProjectBaseContext, [TargetSelector], IO ()) +withoutProject config verbosity extraArgs = do + unless (null extraArgs) $ + die' verbosity $ "'repl' doesn't take any extra arguments when outside a project: " ++ unwords extraArgs + + globalTmp <- getTemporaryDirectory + tempDir <- createTempDirectory globalTmp "cabal-repl." + + -- We need to create a dummy package that lives in our dummy project. + let + sourcePackage = SourcePackage + { packageInfoId = pkgId + , packageDescription = genericPackageDescription + , packageSource = LocalUnpackedPackage tempDir + , packageDescrOverride = Nothing + } + genericPackageDescription = emptyGenericPackageDescription + & L.packageDescription .~ packageDescription + & L.condLibrary .~ Just (CondNode library [baseDep] []) + packageDescription = emptyPackageDescription + { package = pkgId + , specVersionRaw = Left (mkVersion [2, 2]) + , licenseRaw = Left SPDX.NONE + } + library = emptyLibrary { libBuildInfo = buildInfo } + buildInfo = emptyBuildInfo + { targetBuildDepends = [baseDep] + , defaultLanguage = Just Haskell2010 + } + baseDep = Dependency "base" anyVersion + pkgId = PackageIdentifier "fake-package" version0 + + writeGenericPackageDescription (tempDir "fake-package.cabal") genericPackageDescription + + baseCtx <- + establishDummyProjectBaseContext + verbosity + config + tempDir + [SpecificSourcePackage sourcePackage] + + let + targetSelectors = [TargetPackage TargetExplicitNamed [pkgId] Nothing] + finalizer = handleDoesNotExist () (removeDirectoryRecursive tempDir) + + return (baseCtx, targetSelectors, finalizer) + +addDepsToProjectTarget :: [Dependency] + -> PackageId + -> ProjectBaseContext + -> ProjectBaseContext +addDepsToProjectTarget deps pkgId ctx = + (\p -> ctx { localPackages = p }) . fmap addDeps . localPackages $ ctx + where + addDeps :: PackageSpecifier UnresolvedSourcePackage + -> PackageSpecifier UnresolvedSourcePackage + addDeps (SpecificSourcePackage pkg) + | packageId pkg /= pkgId = SpecificSourcePackage pkg + | SourcePackage{..} <- pkg = + SpecificSourcePackage $ pkg { packageDescription = + packageDescription & (\f -> L.allCondTrees $ traverseCondTreeC f) + %~ (deps ++) + } + addDeps spec = spec + +generateReplFlags :: Bool -> ElaboratedInstallPlan -> OriginalComponentInfo -> ReplFlags +generateReplFlags includeTransitive elaboratedPlan OriginalComponentInfo{..} = flags + where + exeDeps :: [UnitId] + exeDeps = + foldMap + (InstallPlan.foldPlanPackage (const []) elabOrderExeDependencies) + (InstallPlan.dependencyClosure elaboratedPlan [ociUnitId]) + + deps, deps', trans, trans' :: [UnitId] + flags :: ReplFlags + deps = installedUnitId <$> InstallPlan.directDeps elaboratedPlan ociUnitId + deps' = deps \\ ociOriginalDeps + trans = installedUnitId <$> InstallPlan.dependencyClosure elaboratedPlan deps' + trans' = trans \\ ociOriginalDeps + flags = fmap (("-package-id " ++) . prettyShow) . (\\ exeDeps) + $ if includeTransitive then trans' else deps' + +-- | This defines what a 'TargetSelector' means for the @repl@ command. +-- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, +-- or otherwise classifies the problem. +-- +-- For repl we select: +-- +-- * the library if there is only one and it's buildable; or +-- +-- * the exe if there is only one and it's buildable; or +-- +-- * any other buildable component. +-- +-- Fail if there are no buildable lib\/exe components, or if there are +-- multiple libs or exes. +-- +selectPackageTargets :: TargetSelector + -> [AvailableTarget k] -> Either TargetProblem [k] +selectPackageTargets targetSelector targets + + -- If there is exactly one buildable library then we select that + | [target] <- targetsLibsBuildable + = Right [target] + + -- but fail if there are multiple buildable libraries. + | not (null targetsLibsBuildable) + = Left (TargetProblemMatchesMultiple targetSelector targetsLibsBuildable') + + -- If there is exactly one buildable executable then we select that + | [target] <- targetsExesBuildable + = Right [target] + + -- but fail if there are multiple buildable executables. + | not (null targetsExesBuildable) + = Left (TargetProblemMatchesMultiple targetSelector targetsExesBuildable') + + -- If there is exactly one other target then we select that + | [target] <- targetsBuildable + = Right [target] + + -- but fail if there are multiple such targets + | not (null targetsBuildable) + = Left (TargetProblemMatchesMultiple targetSelector targetsBuildable') + + -- If there are targets but none are buildable then we report those + | not (null targets) + = Left (TargetProblemNoneEnabled targetSelector targets') + + -- If there are no targets at all then we report that + | otherwise + = Left (TargetProblemNoTargets targetSelector) + where + targets' = forgetTargetsDetail targets + (targetsLibsBuildable, + targetsLibsBuildable') = selectBuildableTargets' + . filterTargetsKind LibKind + $ targets + (targetsExesBuildable, + targetsExesBuildable') = selectBuildableTargets' + . filterTargetsKind ExeKind + $ targets + (targetsBuildable, + targetsBuildable') = selectBuildableTargetsWith' + (isRequested targetSelector) targets + + -- When there's a target filter like "pkg:tests" then we do select tests, + -- but if it's just a target like "pkg" then we don't build tests unless + -- they are requested by default (i.e. by using --enable-tests) + isRequested (TargetAllPackages Nothing) TargetNotRequestedByDefault = False + isRequested (TargetPackage _ _ Nothing) TargetNotRequestedByDefault = False + isRequested _ _ = True + + +-- | For a 'TargetComponent' 'TargetSelector', check if the component can be +-- selected. +-- +-- For the @repl@ command we just need the basic checks on being buildable etc. +-- +selectComponentTarget :: SubComponentTarget + -> AvailableTarget k -> Either TargetProblem k +selectComponentTarget subtarget = + either (Left . TargetProblemCommon) Right + . selectComponentTargetBasic subtarget + + +-- | The various error conditions that can occur when matching a +-- 'TargetSelector' against 'AvailableTarget's for the @repl@ command. +-- +data TargetProblem = + TargetProblemCommon TargetProblemCommon + + -- | The 'TargetSelector' matches targets but none are buildable + | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] + + -- | There are no targets at all + | TargetProblemNoTargets TargetSelector + + -- | A single 'TargetSelector' matches multiple targets + | TargetProblemMatchesMultiple TargetSelector [AvailableTarget ()] + + -- | Multiple 'TargetSelector's match multiple targets + | TargetProblemMultipleTargets TargetsMap + deriving (Eq, Show) + +reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a +reportTargetProblems verbosity = + die' verbosity . unlines . map renderTargetProblem + +renderTargetProblem :: TargetProblem -> String +renderTargetProblem (TargetProblemCommon problem) = + renderTargetProblemCommon "open a repl for" problem + +renderTargetProblem (TargetProblemMatchesMultiple targetSelector targets) = + "Cannot open a repl for multiple components at once. The target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ " which " + ++ (if targetSelectorRefersToPkgs targetSelector then "includes " else "are ") + ++ renderListSemiAnd + [ "the " ++ renderComponentKind Plural ckind ++ " " ++ + renderListCommaAnd + [ maybe (display pkgname) display (componentNameString cname) + | t <- ts + , let cname = availableTargetComponentName t + pkgname = packageName (availableTargetPackageId t) + ] + | (ckind, ts) <- sortGroupOn availableTargetComponentKind targets + ] + ++ ".\n\n" ++ explanationSingleComponentLimitation + where + availableTargetComponentKind = componentKind + . availableTargetComponentName + +renderTargetProblem (TargetProblemMultipleTargets selectorMap) = + "Cannot open a repl for multiple components at once. The targets " + ++ renderListCommaAnd + [ "'" ++ showTargetSelector ts ++ "'" + | ts <- ordNub (concatMap snd (concat (Map.elems selectorMap))) ] + ++ " refer to different components." + ++ ".\n\n" ++ explanationSingleComponentLimitation + +renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = + renderTargetProblemNoneEnabled "open a repl for" targetSelector targets + +renderTargetProblem (TargetProblemNoTargets targetSelector) = + renderTargetProblemNoTargets "open a repl for" targetSelector + + +explanationSingleComponentLimitation :: String +explanationSingleComponentLimitation = + "The reason for this limitation is that current versions of ghci do not " + ++ "support loading multiple components as source. Load just one component " + ++ "and when you make changes to a dependent component then quit and reload." + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdRun.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdRun.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdRun.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdRun.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,559 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +-- | cabal-install CLI command: run +-- +module Distribution.Client.CmdRun ( + -- * The @run@ CLI and action + runCommand, + runAction, + handleShebang, + + -- * Internals exposed for testing + TargetProblem(..), + selectPackageTargets, + selectComponentTarget + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.CmdErrorMessages + +import Distribution.Client.Setup + ( GlobalFlags(..), ConfigFlags(..), ConfigExFlags, InstallFlags ) +import Distribution.Client.GlobalFlags + ( defaultGlobalFlags ) +import qualified Distribution.Client.Setup as Client +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Simple.Command + ( CommandUI(..), usageAlternatives ) +import Distribution.Types.ComponentName + ( showComponentName ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity, normal ) +import Distribution.Simple.Utils + ( wrapText, die', ordNub, info + , createTempDirectory, handleDoesNotExist ) +import Distribution.Client.CmdInstall + ( establishDummyProjectBaseContext ) +import Distribution.Client.ProjectConfig + ( ProjectConfig(..), ProjectConfigShared(..) + , withProjectOrGlobalConfig ) +import Distribution.Client.ProjectPlanning + ( ElaboratedConfiguredPackage(..) + , ElaboratedInstallPlan, binDirectoryFor ) +import Distribution.Client.ProjectPlanning.Types + ( dataDirsEnvironmentForPlan ) +import Distribution.Client.TargetSelector + ( TargetSelectorProblem(..), TargetString(..) ) +import Distribution.Client.InstallPlan + ( toList, foldPlanPackage ) +import Distribution.Types.UnqualComponentName + ( UnqualComponentName, unUnqualComponentName ) +import Distribution.Simple.Program.Run + ( runProgramInvocation, ProgramInvocation(..), + emptyProgramInvocation ) +import Distribution.Types.UnitId + ( UnitId ) + +import Distribution.CabalSpecVersion + ( cabalSpecLatest ) +import Distribution.Client.Types + ( PackageLocation(..), PackageSpecifier(..) ) +import Distribution.FieldGrammar + ( takeFields, parseFieldGrammar ) +import Distribution.PackageDescription.FieldGrammar + ( executableFieldGrammar ) +import Distribution.PackageDescription.PrettyPrint + ( writeGenericPackageDescription ) +import Distribution.Parsec.Common + ( Position(..) ) +import Distribution.Parsec.ParseResult + ( ParseResult, parseString, parseFatalFailure ) +import Distribution.Parsec.Parser + ( readFields ) +import qualified Distribution.SPDX.License as SPDX +import Distribution.Solver.Types.SourcePackage as SP + ( SourcePackage(..) ) +import Distribution.Types.BuildInfo + ( BuildInfo(..) ) +import Distribution.Types.CondTree + ( CondTree(..) ) +import Distribution.Types.Executable + ( Executable(..) ) +import Distribution.Types.GenericPackageDescription as GPD + ( GenericPackageDescription(..), emptyGenericPackageDescription ) +import Distribution.Types.PackageDescription + ( PackageDescription(..), emptyPackageDescription ) +import Distribution.Types.PackageId + ( PackageIdentifier(..) ) +import Distribution.Types.Version + ( mkVersion, version0 ) +import Language.Haskell.Extension + ( Language(..) ) + +import qualified Data.ByteString.Char8 as BS +import qualified Data.Map as Map +import qualified Data.Set as Set +import qualified Text.Parsec as P +import System.Directory + ( getTemporaryDirectory, removeDirectoryRecursive, doesFileExist ) +import System.FilePath + ( () ) + +runCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +runCommand = Client.installCommand { + commandName = "new-run", + commandSynopsis = "Run an executable.", + commandUsage = usageAlternatives "new-run" + [ "[TARGET] [FLAGS] [-- EXECUTABLE_FLAGS]" ], + commandDescription = Just $ \pname -> wrapText $ + "Runs the specified executable-like component (an executable, a test, " + ++ "or a benchmark), first ensuring it is up to date.\n\n" + + ++ "Any executable-like component in any package in the project can be " + ++ "specified. A package can be specified if contains just one " + ++ "executable-like. The default is to use the package in the current " + ++ "directory if it contains just one executable-like.\n\n" + + ++ "Extra arguments can be passed to the program, but use '--' to " + ++ "separate arguments for the program from arguments for " ++ pname + ++ ". The executable is run in an environment where it can find its " + ++ "data files inplace in the build tree.\n\n" + + ++ "Dependencies are built or rebuilt as necessary. Additional " + ++ "configuration flags can be specified on the command line and these " + ++ "extend the project configuration from the 'cabal.project', " + ++ "'cabal.project.local' and other files.", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " new-run\n" + ++ " Run the executable-like in the package in the current directory\n" + ++ " " ++ pname ++ " new-run foo-tool\n" + ++ " Run the named executable-like (in any package in the project)\n" + ++ " " ++ pname ++ " new-run pkgfoo:foo-tool\n" + ++ " Run the executable-like 'foo-tool' in the package 'pkgfoo'\n" + ++ " " ++ pname ++ " new-run foo -O2 -- dothing --fooflag\n" + ++ " Build with '-O2' and run the program, passing it extra arguments.\n\n" + + ++ cmdCommonHelpTextNewBuildBeta + } + +-- | The @run@ command runs a specified executable-like component, building it +-- first if necessary. The component can be either an executable, a test, +-- or a benchmark. This is particularly useful for passing arguments to +-- exes/tests/benchs by simply appending them after a @--@. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +runAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +runAction (configFlags, configExFlags, installFlags, haddockFlags) + targetStrings globalFlags = do + globalTmp <- getTemporaryDirectory + tempDir <- createTempDirectory globalTmp "cabal-repl." + + let + with = + establishProjectBaseContext verbosity cliConfig + without config = + establishDummyProjectBaseContext verbosity (config <> cliConfig) tempDir [] + + baseCtx <- withProjectOrGlobalConfig verbosity globalConfigFlag with without + + let + scriptOrError script err = do + exists <- doesFileExist script + if exists + then BS.readFile script >>= handleScriptCase verbosity baseCtx tempDir + else reportTargetSelectorProblems verbosity err + + (baseCtx', targetSelectors) <- + readTargetSelectors (localPackages baseCtx) (Just ExeKind) (take 1 targetStrings) + >>= \case + Left err@(TargetSelectorNoTargetsInProject:_) + | (script:_) <- targetStrings -> scriptOrError script err + Left err@(TargetSelectorNoSuch t _:_) + | TargetString1 script <- t -> scriptOrError script err + Left err@(TargetSelectorExpected t _ _:_) + | TargetString1 script <- t -> scriptOrError script err + Left err -> reportTargetSelectorProblems verbosity err + Right sels -> return (baseCtx, sels) + + buildCtx <- + runProjectPreBuildPhase verbosity baseCtx' $ \elaboratedPlan -> do + + when (buildSettingOnlyDeps (buildSettings baseCtx')) $ + die' verbosity $ + "The run command does not support '--only-dependencies'. " + ++ "You may wish to use 'build --only-dependencies' and then " + ++ "use 'run'." + + -- Interpret the targets on the command line as build targets + -- (as opposed to say repl or haddock targets). + targets <- either (reportTargetProblems verbosity) return + $ resolveTargets + selectPackageTargets + selectComponentTarget + TargetProblemCommon + elaboratedPlan + Nothing + targetSelectors + + -- Reject multiple targets, or at least targets in different + -- components. It is ok to have two module/file targets in the + -- same component, but not two that live in different components. + -- + -- Note that we discard the target and return the whole 'TargetsMap', + -- so this check will be repeated (and must succeed) after + -- the 'runProjectPreBuildPhase'. Keep it in mind when modifying this. + _ <- singleExeOrElse + (reportTargetProblems + verbosity + [TargetProblemMultipleTargets targets]) + targets + + let elaboratedPlan' = pruneInstallPlanToTargets + TargetActionBuild + targets + elaboratedPlan + return (elaboratedPlan', targets) + + (selectedUnitId, selectedComponent) <- + -- Slight duplication with 'runProjectPreBuildPhase'. + singleExeOrElse + (die' verbosity $ "No or multiple targets given, but the run " + ++ "phase has been reached. This is a bug.") + $ targetsMap buildCtx + + printPlan verbosity baseCtx' buildCtx + + buildOutcomes <- runProjectBuildPhase verbosity baseCtx' buildCtx + runProjectPostBuildPhase verbosity baseCtx' buildCtx buildOutcomes + + + let elaboratedPlan = elaboratedPlanToExecute buildCtx + matchingElaboratedConfiguredPackages = + matchingPackagesByUnitId + selectedUnitId + elaboratedPlan + + let exeName = unUnqualComponentName selectedComponent + + -- In the common case, we expect @matchingElaboratedConfiguredPackages@ + -- to consist of a single element that provides a single way of building + -- an appropriately-named executable. In that case we take that + -- package and continue. + -- + -- However, multiple packages/components could provide that + -- executable, or it's possible we don't find the executable anywhere + -- in the build plan. I suppose in principle it's also possible that + -- a single package provides an executable in two different ways, + -- though that's probably a bug if. Anyway it's a good lint to report + -- an error in all of these cases, even if some seem like they + -- shouldn't happen. + pkg <- case matchingElaboratedConfiguredPackages of + [] -> die' verbosity $ "Unknown executable " + ++ exeName + ++ " in package " + ++ display selectedUnitId + [elabPkg] -> do + info verbosity $ "Selecting " + ++ display selectedUnitId + ++ " to supply " ++ exeName + return elabPkg + elabPkgs -> die' verbosity + $ "Multiple matching executables found matching " + ++ exeName + ++ ":\n" + ++ unlines (fmap (\p -> " - in package " ++ display (elabUnitId p)) elabPkgs) + let exePath = binDirectoryFor (distDirLayout baseCtx) + (elaboratedShared buildCtx) + pkg + exeName + exeName + let args = drop 1 targetStrings + runProgramInvocation + verbosity + emptyProgramInvocation { + progInvokePath = exePath, + progInvokeArgs = args, + progInvokeEnv = dataDirsEnvironmentForPlan + (distDirLayout baseCtx) + elaboratedPlan + } + + handleDoesNotExist () (removeDirectoryRecursive tempDir) + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) + +handleShebang :: String -> IO () +handleShebang script = + runAction (commandDefaultFlags runCommand) [script] defaultGlobalFlags + +parseScriptBlock :: BS.ByteString -> ParseResult Executable +parseScriptBlock str = + case readFields str of + Right fs -> do + let (fields, _) = takeFields fs + parseFieldGrammar cabalSpecLatest fields (executableFieldGrammar "script") + Left perr -> parseFatalFailure pos (show perr) where + ppos = P.errorPos perr + pos = Position (P.sourceLine ppos) (P.sourceColumn ppos) + +readScriptBlock :: Verbosity -> BS.ByteString -> IO Executable +readScriptBlock verbosity = parseString parseScriptBlock verbosity "script block" + +readScriptBlockFromScript :: Verbosity -> BS.ByteString -> IO (Executable, BS.ByteString) +readScriptBlockFromScript verbosity str = + (\x -> (x, noShebang)) <$> readScriptBlock verbosity str' + where + start = "{- cabal:" + end = "-}" + + str' = BS.unlines + . takeWhile (/= end) + . drop 1 . dropWhile (/= start) + $ lines' + + noShebang = BS.unlines + . filter ((/= "#!") . BS.take 2) + $ lines' + + lines' = BS.lines str + +handleScriptCase :: Verbosity + -> ProjectBaseContext + -> FilePath + -> BS.ByteString + -> IO (ProjectBaseContext, [TargetSelector]) +handleScriptCase verbosity baseCtx tempDir scriptContents = do + (executable, contents') <- readScriptBlockFromScript verbosity scriptContents + + -- We need to create a dummy package that lives in our dummy project. + let + sourcePackage = SourcePackage + { packageInfoId = pkgId + , SP.packageDescription = genericPackageDescription + , packageSource = LocalUnpackedPackage tempDir + , packageDescrOverride = Nothing + } + genericPackageDescription = emptyGenericPackageDescription + { GPD.packageDescription = packageDescription + , condExecutables = [("script", CondNode executable' targetBuildDepends [])] + } + executable' = executable + { modulePath = "Main.hs" + , buildInfo = binfo + { defaultLanguage = + case defaultLanguage of + just@(Just _) -> just + Nothing -> Just Haskell2010 + } + } + binfo@BuildInfo{..} = buildInfo executable + packageDescription = emptyPackageDescription + { package = pkgId + , specVersionRaw = Left (mkVersion [2, 2]) + , licenseRaw = Left SPDX.NONE + } + pkgId = PackageIdentifier "fake-package" version0 + + writeGenericPackageDescription (tempDir "fake-package.cabal") genericPackageDescription + BS.writeFile (tempDir "Main.hs") contents' + + let + baseCtx' = baseCtx + { localPackages = localPackages baseCtx ++ [SpecificSourcePackage sourcePackage] } + targetSelectors = [TargetPackage TargetExplicitNamed [pkgId] Nothing] + + return (baseCtx', targetSelectors) + +singleExeOrElse :: IO (UnitId, UnqualComponentName) -> TargetsMap -> IO (UnitId, UnqualComponentName) +singleExeOrElse action targetsMap = + case Set.toList . distinctTargetComponents $ targetsMap + of [(unitId, CExeName component)] -> return (unitId, component) + [(unitId, CTestName component)] -> return (unitId, component) + [(unitId, CBenchName component)] -> return (unitId, component) + _ -> action + +-- | Filter the 'ElaboratedInstallPlan' keeping only the +-- 'ElaboratedConfiguredPackage's that match the specified +-- 'UnitId'. +matchingPackagesByUnitId :: UnitId + -> ElaboratedInstallPlan + -> [ElaboratedConfiguredPackage] +matchingPackagesByUnitId uid = + catMaybes + . fmap (foldPlanPackage + (const Nothing) + (\x -> if elabUnitId x == uid + then Just x + else Nothing)) + . toList + +-- | This defines what a 'TargetSelector' means for the @run@ command. +-- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, +-- or otherwise classifies the problem. +-- +-- For the @run@ command we select the exe if there is only one and it's +-- buildable. Fail if there are no or multiple buildable exe components. +-- +selectPackageTargets :: TargetSelector + -> [AvailableTarget k] -> Either TargetProblem [k] +selectPackageTargets targetSelector targets + + -- If there is exactly one buildable executable then we select that + | [target] <- targetsExesBuildable + = Right [target] + + -- but fail if there are multiple buildable executables. + | not (null targetsExesBuildable) + = Left (TargetProblemMatchesMultiple targetSelector targetsExesBuildable') + + -- If there are executables but none are buildable then we report those + | not (null targetsExes) + = Left (TargetProblemNoneEnabled targetSelector targetsExes) + + -- If there are no executables but some other targets then we report that + | not (null targets) + = Left (TargetProblemNoExes targetSelector) + + -- If there are no targets at all then we report that + | otherwise + = Left (TargetProblemNoTargets targetSelector) + where + -- Targets that can be executed + targetsExecutableLike = + concatMap (\kind -> filterTargetsKind kind targets) + [ExeKind, TestKind, BenchKind] + (targetsExesBuildable, + targetsExesBuildable') = selectBuildableTargets' targetsExecutableLike + + targetsExes = forgetTargetsDetail targetsExecutableLike + + +-- | For a 'TargetComponent' 'TargetSelector', check if the component can be +-- selected. +-- +-- For the @run@ command we just need to check it is a executable-like +-- (an executable, a test, or a benchmark), in addition +-- to the basic checks on being buildable etc. +-- +selectComponentTarget :: SubComponentTarget + -> AvailableTarget k -> Either TargetProblem k +selectComponentTarget subtarget@WholeComponent t + = case availableTargetComponentName t + of CExeName _ -> component + CTestName _ -> component + CBenchName _ -> component + _ -> Left (TargetProblemComponentNotExe pkgid cname) + where pkgid = availableTargetPackageId t + cname = availableTargetComponentName t + component = either (Left . TargetProblemCommon) return $ + selectComponentTargetBasic subtarget t + +selectComponentTarget subtarget t + = Left (TargetProblemIsSubComponent (availableTargetPackageId t) + (availableTargetComponentName t) + subtarget) + +-- | The various error conditions that can occur when matching a +-- 'TargetSelector' against 'AvailableTarget's for the @run@ command. +-- +data TargetProblem = + TargetProblemCommon TargetProblemCommon + -- | The 'TargetSelector' matches targets but none are buildable + | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] + + -- | There are no targets at all + | TargetProblemNoTargets TargetSelector + + -- | The 'TargetSelector' matches targets but no executables + | TargetProblemNoExes TargetSelector + + -- | A single 'TargetSelector' matches multiple targets + | TargetProblemMatchesMultiple TargetSelector [AvailableTarget ()] + + -- | Multiple 'TargetSelector's match multiple targets + | TargetProblemMultipleTargets TargetsMap + + -- | The 'TargetSelector' refers to a component that is not an executable + | TargetProblemComponentNotExe PackageId ComponentName + + -- | Asking to run an individual file or module is not supported + | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget + deriving (Eq, Show) + +reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a +reportTargetProblems verbosity = + die' verbosity . unlines . map renderTargetProblem + +renderTargetProblem :: TargetProblem -> String +renderTargetProblem (TargetProblemCommon problem) = + renderTargetProblemCommon "run" problem + +renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = + renderTargetProblemNoneEnabled "run" targetSelector targets + +renderTargetProblem (TargetProblemNoExes targetSelector) = + "Cannot run the target '" ++ showTargetSelector targetSelector + ++ "' which refers to " ++ renderTargetSelector targetSelector + ++ " because " + ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" + ++ " not contain any executables." + +renderTargetProblem (TargetProblemNoTargets targetSelector) = + case targetSelectorFilter targetSelector of + Just kind | kind /= ExeKind + -> "The run command is for running executables, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ "." + + _ -> renderTargetProblemNoTargets "run" targetSelector + +renderTargetProblem (TargetProblemMatchesMultiple targetSelector targets) = + "The run command is for running a single executable at once. The target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ " which includes " + ++ renderListCommaAnd ( ("the "++) <$> + showComponentName <$> + availableTargetComponentName <$> + foldMap + (\kind -> filterTargetsKind kind targets) + [ExeKind, TestKind, BenchKind] ) + ++ "." + +renderTargetProblem (TargetProblemMultipleTargets selectorMap) = + "The run command is for running a single executable at once. The targets " + ++ renderListCommaAnd [ "'" ++ showTargetSelector ts ++ "'" + | ts <- ordNub (concatMap snd (concat (Map.elems selectorMap))) ] + ++ " refer to different executables." + +renderTargetProblem (TargetProblemComponentNotExe pkgid cname) = + "The run command is for running executables, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ " from the package " + ++ display pkgid ++ "." + where + targetSelector = TargetComponent pkgid cname WholeComponent + +renderTargetProblem (TargetProblemIsSubComponent pkgid cname subtarget) = + "The run command can only run an executable as a whole, " + ++ "not files or modules within them, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ "." + where + targetSelector = TargetComponent pkgid cname subtarget diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdSdist.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdSdist.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdSdist.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdSdist.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,362 @@ +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE ViewPatterns #-} +module Distribution.Client.CmdSdist + ( sdistCommand, sdistAction, packageToSdist + , SdistFlags(..), defaultSdistFlags + , OutputFormat(..), ArchiveFormat(..) ) where + +import Distribution.Client.CmdErrorMessages + ( Plural(..), renderComponentKind ) +import Distribution.Client.ProjectOrchestration + ( ProjectBaseContext(..), establishProjectBaseContext ) +import Distribution.Client.TargetSelector + ( TargetSelector(..), ComponentKind + , readTargetSelectors, reportTargetSelectorProblems ) +import Distribution.Client.RebuildMonad + ( runRebuild ) +import Distribution.Client.Setup + ( ArchiveFormat(..), GlobalFlags(..) ) +import Distribution.Solver.Types.SourcePackage + ( SourcePackage(..) ) +import Distribution.Client.Types + ( PackageSpecifier(..), PackageLocation(..), UnresolvedSourcePackage ) +import Distribution.Client.DistDirLayout + ( DistDirLayout(..), defaultDistDirLayout ) +import Distribution.Client.ProjectConfig + ( findProjectRoot, readProjectConfig ) + +import Distribution.Compat.Semigroup + ((<>)) + +import Distribution.Package + ( Package(packageId) ) +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) +import Distribution.Pretty + ( prettyShow ) +import Distribution.ReadE + ( succeedReadE ) +import Distribution.Simple.Command + ( CommandUI(..), option, choiceOpt, reqArg ) +import Distribution.Simple.PreProcess + ( knownSuffixHandlers ) +import Distribution.Simple.Setup + ( Flag(..), toFlag, fromFlagOrDefault, flagToList, flagToMaybe + , optionVerbosity, optionDistPref, trueArg + ) +import Distribution.Simple.SrcDist + ( listPackageSources ) +import Distribution.Simple.Utils + ( die', notice, withOutputMarker ) +import Distribution.Types.ComponentName + ( ComponentName, showComponentName ) +import Distribution.Types.PackageName + ( PackageName, unPackageName ) +import Distribution.Verbosity + ( Verbosity, normal ) + +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Codec.Archive.Zip as Zip +import qualified Codec.Compression.GZip as GZip +import Control.Exception + ( throwIO ) +import Control.Monad + ( when, forM, forM_ ) +import Control.Monad.Trans + ( liftIO ) +import Control.Monad.State.Lazy + ( StateT, modify, gets, evalStateT ) +import Control.Monad.Writer.Lazy + ( WriterT, tell, execWriterT ) +import Data.Bits + ( shiftL ) +import qualified Data.ByteString.Char8 as BS +import qualified Data.ByteString.Lazy.Char8 as BSL +import Data.Either + ( partitionEithers ) +import Data.List + ( find, sortOn, nub, intercalate ) +import qualified Data.Set as Set +import System.Directory + ( getCurrentDirectory, setCurrentDirectory + , createDirectoryIfMissing, makeAbsolute ) +import System.FilePath + ( (), (<.>), makeRelative, normalise, takeDirectory ) + +sdistCommand :: CommandUI SdistFlags +sdistCommand = CommandUI + { commandName = "new-sdist" + , commandSynopsis = "Generate a source distribution file (.tar.gz)." + , commandUsage = \pname -> + "Usage: " ++ pname ++ " new-sdist [FLAGS] [PACKAGES]\n" + , commandDescription = Just $ \_ -> + "Generates tarballs of project packages suitable for upload to Hackage." + , commandNotes = Nothing + , commandDefaultFlags = defaultSdistFlags + , commandOptions = \showOrParseArgs -> + [ optionVerbosity + sdistVerbosity (\v flags -> flags { sdistVerbosity = v }) + , optionDistPref + sdistDistDir (\dd flags -> flags { sdistDistDir = dd }) + showOrParseArgs + , option [] ["project-file"] + "Set the name of the cabal.project file to search for in parent directories" + sdistProjectFile (\pf flags -> flags { sdistProjectFile = pf }) + (reqArg "FILE" (succeedReadE Flag) flagToList) + , option ['l'] ["list-only"] + "Just list the sources, do not make a tarball" + sdistListSources (\v flags -> flags { sdistListSources = v }) + trueArg + , option ['z'] ["null-sep"] + "Separate the source files with NUL bytes rather than newlines." + sdistNulSeparated (\v flags -> flags { sdistNulSeparated = v }) + trueArg + , option [] ["archive-format"] + "Choose what type of archive to create. No effect if given with '--list-only'" + sdistArchiveFormat (\v flags -> flags { sdistArchiveFormat = v }) + (choiceOpt + [ (Flag TargzFormat, ([], ["targz"]), + "Produce a '.tar.gz' format archive (default and required for uploading to hackage)") + , (Flag ZipFormat, ([], ["zip"]), + "Produce a '.zip' format archive") + ] + ) + , option ['o'] ["output-dir", "outputdir"] + "Choose the output directory of this command. '-' sends all output to stdout" + sdistOutputPath (\o flags -> flags { sdistOutputPath = o }) + (reqArg "PATH" (succeedReadE Flag) flagToList) + ] + } + +data SdistFlags = SdistFlags + { sdistVerbosity :: Flag Verbosity + , sdistDistDir :: Flag FilePath + , sdistProjectFile :: Flag FilePath + , sdistListSources :: Flag Bool + , sdistNulSeparated :: Flag Bool + , sdistArchiveFormat :: Flag ArchiveFormat + , sdistOutputPath :: Flag FilePath + } + +defaultSdistFlags :: SdistFlags +defaultSdistFlags = SdistFlags + { sdistVerbosity = toFlag normal + , sdistDistDir = mempty + , sdistProjectFile = mempty + , sdistListSources = toFlag False + , sdistNulSeparated = toFlag False + , sdistArchiveFormat = toFlag TargzFormat + , sdistOutputPath = mempty + } + +-- + +sdistAction :: SdistFlags -> [String] -> GlobalFlags -> IO () +sdistAction SdistFlags{..} targetStrings globalFlags = do + let verbosity = fromFlagOrDefault normal sdistVerbosity + mDistDirectory = flagToMaybe sdistDistDir + mProjectFile = flagToMaybe sdistProjectFile + globalConfig = globalConfigFile globalFlags + listSources = fromFlagOrDefault False sdistListSources + nulSeparated = fromFlagOrDefault False sdistNulSeparated + archiveFormat = fromFlagOrDefault TargzFormat sdistArchiveFormat + mOutputPath = flagToMaybe sdistOutputPath + + projectRoot <- either throwIO return =<< findProjectRoot Nothing mProjectFile + let distLayout = defaultDistDirLayout projectRoot mDistDirectory + dir <- getCurrentDirectory + projectConfig <- runRebuild dir $ readProjectConfig verbosity globalConfig distLayout + baseCtx <- establishProjectBaseContext verbosity projectConfig + let localPkgs = localPackages baseCtx + + targetSelectors <- either (reportTargetSelectorProblems verbosity) return + =<< readTargetSelectors localPkgs Nothing targetStrings + + mOutputPath' <- case mOutputPath of + Just "-" -> return (Just "-") + Just path -> Just <$> makeAbsolute path + Nothing -> return Nothing + + let + format = + if | listSources, nulSeparated -> SourceList '\0' + | listSources -> SourceList '\n' + | otherwise -> Archive archiveFormat + + ext = case format of + SourceList _ -> "list" + Archive TargzFormat -> "tar.gz" + Archive ZipFormat -> "zip" + + outputPath pkg = case mOutputPath' of + Just path + | path == "-" -> "-" + | otherwise -> path prettyShow (packageId pkg) <.> ext + Nothing + | listSources -> "-" + | otherwise -> distSdistFile distLayout (packageId pkg) archiveFormat + + createDirectoryIfMissing True (distSdistDirectory distLayout) + + case reifyTargetSelectors localPkgs targetSelectors of + Left errs -> die' verbosity . unlines . fmap renderTargetProblem $ errs + Right pkgs + | length pkgs > 1, not listSources, Just "-" <- mOutputPath' -> + die' verbosity "Can't write multiple tarballs to standard output!" + | otherwise -> + mapM_ (\pkg -> packageToSdist verbosity (distProjectRootDirectory distLayout) format (outputPath pkg) pkg) pkgs + +data IsExec = Exec | NoExec + deriving (Show, Eq) + +data OutputFormat = SourceList Char + | Archive ArchiveFormat + deriving (Show, Eq) + +packageToSdist :: Verbosity -> FilePath -> OutputFormat -> FilePath -> UnresolvedSourcePackage -> IO () +packageToSdist verbosity projectRootDir format outputFile pkg = do + let death = die' verbosity ("The impossible happened: a local package isn't local" <> (show pkg)) + dir0 <- case packageSource pkg of + LocalUnpackedPackage path -> pure (Right path) + RemoteSourceRepoPackage _ (Just path) -> pure (Right path) + RemoteSourceRepoPackage {} -> death + LocalTarballPackage tgz -> pure (Left tgz) + RemoteTarballPackage _ (Just tgz) -> pure (Left tgz) + RemoteTarballPackage {} -> death + RepoTarballPackage {} -> death + + let write = if outputFile == "-" + then putStr . withOutputMarker verbosity . BSL.unpack + else BSL.writeFile outputFile + + case dir0 of + Left tgz -> do + case format of + Archive TargzFormat -> do + write =<< BSL.readFile tgz + when (outputFile /= "-") $ + notice verbosity $ "Wrote tarball sdist to " ++ outputFile ++ "\n" + _ -> die' verbosity ("cannot convert tarball package to " ++ show format) + + Right dir -> do + oldPwd <- getCurrentDirectory + setCurrentDirectory dir + + let norm flag = fmap ((flag, ) . normalise) + (norm NoExec -> nonexec, norm Exec -> exec) <- + listPackageSources verbosity (flattenPackageDescription $ packageDescription pkg) knownSuffixHandlers + + let files = nub . sortOn snd $ nonexec ++ exec + + case format of + SourceList nulSep -> do + let prefix = makeRelative projectRootDir dir + write (BSL.pack . (++ [nulSep]) . intercalate [nulSep] . fmap ((prefix ) . snd) $ files) + when (outputFile /= "-") $ + notice verbosity $ "Wrote source list to " ++ outputFile ++ "\n" + Archive TargzFormat -> do + let entriesM :: StateT (Set.Set FilePath) (WriterT [Tar.Entry] IO) () + entriesM = do + let prefix = prettyShow (packageId pkg) + modify (Set.insert prefix) + case Tar.toTarPath True prefix of + Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) + Right path -> tell [Tar.directoryEntry path] + + forM_ files $ \(perm, file) -> do + let fileDir = takeDirectory (prefix file) + perm' = case perm of + Exec -> Tar.executableFilePermissions + NoExec -> Tar.ordinaryFilePermissions + needsEntry <- gets (Set.notMember fileDir) + + when needsEntry $ do + modify (Set.insert fileDir) + case Tar.toTarPath True fileDir of + Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) + Right path -> tell [Tar.directoryEntry path] + + contents <- liftIO . fmap BSL.fromStrict . BS.readFile $ file + case Tar.toTarPath False (prefix file) of + Left err -> liftIO $ die' verbosity ("Error packing sdist: " ++ err) + Right path -> tell [(Tar.fileEntry path contents) { Tar.entryPermissions = perm' }] + + entries <- execWriterT (evalStateT entriesM mempty) + let -- Pretend our GZip file is made on Unix. + normalize bs = BSL.concat [first, "\x03", rest'] + where + (first, rest) = BSL.splitAt 9 bs + rest' = BSL.tail rest + -- The Unix epoch, which is the default value, is + -- unsuitable because it causes unpacking problems on + -- Windows; we need a post-1980 date. One gigasecond + -- after the epoch is during 2001-09-09, so that does + -- nicely. See #5596. + setModTime entry = entry { Tar.entryTime = 1000000000 } + write . normalize . GZip.compress . Tar.write $ fmap setModTime entries + when (outputFile /= "-") $ + notice verbosity $ "Wrote tarball sdist to " ++ outputFile ++ "\n" + Archive ZipFormat -> do + let prefix = prettyShow (packageId pkg) + entries <- forM files $ \(perm, file) -> do + let perm' = case perm of + -- -rwxr-xr-x + Exec -> 0o010755 `shiftL` 16 + -- -rw-r--r-- + NoExec -> 0o010644 `shiftL` 16 + contents <- BSL.readFile file + return $ (Zip.toEntry (prefix file) 0 contents) { Zip.eExternalFileAttributes = perm' } + let archive = foldr Zip.addEntryToArchive Zip.emptyArchive entries + write (Zip.fromArchive archive) + when (outputFile /= "-") $ + notice verbosity $ "Wrote zip sdist to " ++ outputFile ++ "\n" + setCurrentDirectory oldPwd + +-- + +reifyTargetSelectors :: [PackageSpecifier UnresolvedSourcePackage] -> [TargetSelector] -> Either [TargetProblem] [UnresolvedSourcePackage] +reifyTargetSelectors pkgs sels = + case partitionEithers (foldMap go sels) of + ([], sels') -> Right sels' + (errs, _) -> Left errs + where + flatten (SpecificSourcePackage pkg@SourcePackage{}) = pkg + flatten _ = error "The impossible happened: how do we not know about a local package?" + pkgs' = fmap flatten pkgs + + getPkg pid = case find ((== pid) . packageId) pkgs' of + Just pkg -> Right pkg + Nothing -> error "The impossible happened: we have a reference to a local package that isn't in localPackages." + + go :: TargetSelector -> [Either TargetProblem UnresolvedSourcePackage] + go (TargetPackage _ pids Nothing) = fmap getPkg pids + go (TargetAllPackages Nothing) = Right <$> pkgs' + + go (TargetPackage _ _ (Just kind)) = [Left (AllComponentsOnly kind)] + go (TargetAllPackages (Just kind)) = [Left (AllComponentsOnly kind)] + + go (TargetPackageNamed pname _) = [Left (NonlocalPackageNotAllowed pname)] + go (TargetComponentUnknown pname _ _) = [Left (NonlocalPackageNotAllowed pname)] + + go (TargetComponent _ cname _) = [Left (ComponentsNotAllowed cname)] + +data TargetProblem = AllComponentsOnly ComponentKind + | NonlocalPackageNotAllowed PackageName + | ComponentsNotAllowed ComponentName + +renderTargetProblem :: TargetProblem -> String +renderTargetProblem (AllComponentsOnly kind) = + "It is not possible to package only the " ++ renderComponentKind Plural kind ++ " from a package " + ++ "for distribution. Only entire packages may be packaged for distribution." +renderTargetProblem (ComponentsNotAllowed cname) = + "The component " ++ showComponentName cname ++ " cannot be packaged for distribution on its own. " + ++ "Only entire packages may be packaged for distribution." +renderTargetProblem (NonlocalPackageNotAllowed pname) = + "The package " ++ unPackageName pname ++ " cannot be packaged for distribution, because it is not " + ++ "local to this project." + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdTest.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdTest.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdTest.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdTest.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,248 @@ +{-# LANGUAGE NamedFieldPuns #-} + +-- | cabal-install CLI command: test +-- +module Distribution.Client.CmdTest ( + -- * The @test@ CLI and action + testCommand, + testAction, + + -- * Internals exposed for testing + TargetProblem(..), + selectPackageTargets, + selectComponentTarget + ) where + +import Distribution.Client.ProjectOrchestration +import Distribution.Client.CmdErrorMessages + +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags ) +import qualified Distribution.Client.Setup as Client +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Simple.Command + ( CommandUI(..), usageAlternatives ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity, normal ) +import Distribution.Simple.Utils + ( wrapText, die' ) + +import Control.Monad (when) + + +testCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +testCommand = Client.installCommand { + commandName = "new-test", + commandSynopsis = "Run test-suites", + commandUsage = usageAlternatives "new-test" [ "[TARGETS] [FLAGS]" ], + commandDescription = Just $ \_ -> wrapText $ + "Runs the specified test-suites, first ensuring they are up to " + ++ "date.\n\n" + + ++ "Any test-suite in any package in the project can be specified. " + ++ "A package can be specified in which case all the test-suites in the " + ++ "package are run. The default is to run all the test-suites in the " + ++ "package in the current directory.\n\n" + + ++ "Dependencies are built or rebuilt as necessary. Additional " + ++ "configuration flags can be specified on the command line and these " + ++ "extend the project configuration from the 'cabal.project', " + ++ "'cabal.project.local' and other files.\n\n" + + ++ "To pass command-line arguments to a test suite, see the " + ++ "new-run command.", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " new-test\n" + ++ " Run all the test-suites in the package in the current directory\n" + ++ " " ++ pname ++ " new-test pkgname\n" + ++ " Run all the test-suites in the package named pkgname\n" + ++ " " ++ pname ++ " new-test cname\n" + ++ " Run the test-suite named cname\n" + ++ " " ++ pname ++ " new-test cname --enable-coverage\n" + ++ " Run the test-suite built with code coverage (including local libs used)\n\n" + + ++ cmdCommonHelpTextNewBuildBeta + } + + +-- | The @test@ command is very much like @build@. It brings the install plan +-- up to date, selects that part of the plan needed by the given or implicit +-- test target(s) and then executes the plan. +-- +-- Compared to @build@ the difference is that there's also test targets +-- which are ephemeral. +-- +-- For more details on how this works, see the module +-- "Distribution.Client.ProjectOrchestration" +-- +testAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +testAction (configFlags, configExFlags, installFlags, haddockFlags) + targetStrings globalFlags = do + + baseCtx <- establishProjectBaseContext verbosity cliConfig + + targetSelectors <- either (reportTargetSelectorProblems verbosity) return + =<< readTargetSelectors (localPackages baseCtx) (Just TestKind) targetStrings + + buildCtx <- + runProjectPreBuildPhase verbosity baseCtx $ \elaboratedPlan -> do + + when (buildSettingOnlyDeps (buildSettings baseCtx)) $ + die' verbosity $ + "The test command does not support '--only-dependencies'. " + ++ "You may wish to use 'build --only-dependencies' and then " + ++ "use 'test'." + + -- Interpret the targets on the command line as test targets + -- (as opposed to say build or haddock targets). + targets <- either (reportTargetProblems verbosity) return + $ resolveTargets + selectPackageTargets + selectComponentTarget + TargetProblemCommon + elaboratedPlan + Nothing + targetSelectors + + let elaboratedPlan' = pruneInstallPlanToTargets + TargetActionTest + targets + elaboratedPlan + return (elaboratedPlan', targets) + + printPlan verbosity baseCtx buildCtx + + buildOutcomes <- runProjectBuildPhase verbosity baseCtx buildCtx + runProjectPostBuildPhase verbosity baseCtx buildCtx buildOutcomes + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + +-- | This defines what a 'TargetSelector' means for the @test@ command. +-- It selects the 'AvailableTarget's that the 'TargetSelector' refers to, +-- or otherwise classifies the problem. +-- +-- For the @test@ command we select all buildable test-suites, +-- or fail if there are no test-suites or no buildable test-suites. +-- +selectPackageTargets :: TargetSelector + -> [AvailableTarget k] -> Either TargetProblem [k] +selectPackageTargets targetSelector targets + + -- If there are any buildable test-suite targets then we select those + | not (null targetsTestsBuildable) + = Right targetsTestsBuildable + + -- If there are test-suites but none are buildable then we report those + | not (null targetsTests) + = Left (TargetProblemNoneEnabled targetSelector targetsTests) + + -- If there are no test-suite but some other targets then we report that + | not (null targets) + = Left (TargetProblemNoTests targetSelector) + + -- If there are no targets at all then we report that + | otherwise + = Left (TargetProblemNoTargets targetSelector) + where + targetsTestsBuildable = selectBuildableTargets + . filterTargetsKind TestKind + $ targets + + targetsTests = forgetTargetsDetail + . filterTargetsKind TestKind + $ targets + + +-- | For a 'TargetComponent' 'TargetSelector', check if the component can be +-- selected. +-- +-- For the @test@ command we just need to check it is a test-suite, in addition +-- to the basic checks on being buildable etc. +-- +selectComponentTarget :: SubComponentTarget + -> AvailableTarget k -> Either TargetProblem k +selectComponentTarget subtarget@WholeComponent t + | CTestName _ <- availableTargetComponentName t + = either (Left . TargetProblemCommon) return $ + selectComponentTargetBasic subtarget t + | otherwise + = Left (TargetProblemComponentNotTest (availableTargetPackageId t) + (availableTargetComponentName t)) + +selectComponentTarget subtarget t + = Left (TargetProblemIsSubComponent (availableTargetPackageId t) + (availableTargetComponentName t) + subtarget) + +-- | The various error conditions that can occur when matching a +-- 'TargetSelector' against 'AvailableTarget's for the @test@ command. +-- +data TargetProblem = + TargetProblemCommon TargetProblemCommon + + -- | The 'TargetSelector' matches targets but none are buildable + | TargetProblemNoneEnabled TargetSelector [AvailableTarget ()] + + -- | There are no targets at all + | TargetProblemNoTargets TargetSelector + + -- | The 'TargetSelector' matches targets but no test-suites + | TargetProblemNoTests TargetSelector + + -- | The 'TargetSelector' refers to a component that is not a test-suite + | TargetProblemComponentNotTest PackageId ComponentName + + -- | Asking to test an individual file or module is not supported + | TargetProblemIsSubComponent PackageId ComponentName SubComponentTarget + deriving (Eq, Show) + +reportTargetProblems :: Verbosity -> [TargetProblem] -> IO a +reportTargetProblems verbosity = + die' verbosity . unlines . map renderTargetProblem + +renderTargetProblem :: TargetProblem -> String +renderTargetProblem (TargetProblemCommon problem) = + renderTargetProblemCommon "run" problem + +renderTargetProblem (TargetProblemNoneEnabled targetSelector targets) = + renderTargetProblemNoneEnabled "test" targetSelector targets + +renderTargetProblem (TargetProblemNoTests targetSelector) = + "Cannot run tests for the target '" ++ showTargetSelector targetSelector + ++ "' which refers to " ++ renderTargetSelector targetSelector + ++ " because " + ++ plural (targetSelectorPluralPkgs targetSelector) "it does" "they do" + ++ " not contain any test suites." + +renderTargetProblem (TargetProblemNoTargets targetSelector) = + case targetSelectorFilter targetSelector of + Just kind | kind /= TestKind + -> "The test command is for running test suites, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ "." + + _ -> renderTargetProblemNoTargets "test" targetSelector + +renderTargetProblem (TargetProblemComponentNotTest pkgid cname) = + "The test command is for running test suites, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ " from the package " + ++ display pkgid ++ "." + where + targetSelector = TargetComponent pkgid cname WholeComponent + +renderTargetProblem (TargetProblemIsSubComponent pkgid cname subtarget) = + "The test command can only run test suites as a whole, " + ++ "not files or modules within them, but the target '" + ++ showTargetSelector targetSelector ++ "' refers to " + ++ renderTargetSelector targetSelector ++ "." + where + targetSelector = TargetComponent pkgid cname subtarget diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdUpdate.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdUpdate.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/CmdUpdate.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/CmdUpdate.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,217 @@ +{-# LANGUAGE CPP, LambdaCase, NamedFieldPuns, RecordWildCards, ViewPatterns, + TupleSections #-} + +-- | cabal-install CLI command: update +-- +module Distribution.Client.CmdUpdate ( + updateCommand, + updateAction, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.Compat.Directory + ( setModificationTime ) +import Distribution.Client.ProjectOrchestration +import Distribution.Client.ProjectConfig + ( ProjectConfig(..) + , ProjectConfigShared(projectConfigConfigFile) + , projectConfigWithSolverRepoContext + , withProjectOrGlobalConfig ) +import Distribution.Client.Types + ( Repo(..), RemoteRepo(..), isRepoRemote ) +import Distribution.Client.HttpUtils + ( DownloadResult(..) ) +import Distribution.Client.FetchUtils + ( downloadIndex ) +import Distribution.Client.JobControl + ( newParallelJobControl, spawnJob, collectJob ) +import Distribution.Client.Setup + ( GlobalFlags, ConfigFlags(..), ConfigExFlags, InstallFlags + , UpdateFlags, defaultUpdateFlags + , RepoContext(..) ) +import Distribution.Simple.Setup + ( HaddockFlags, fromFlagOrDefault ) +import Distribution.Simple.Utils + ( die', notice, wrapText, writeFileAtomic, noticeNoWrap ) +import Distribution.Verbosity + ( Verbosity, normal, lessVerbose ) +import Distribution.Client.IndexUtils.Timestamp +import Distribution.Client.IndexUtils + ( updateRepoIndexCache, Index(..), writeIndexTimestamp + , currentIndexTimestamp, indexBaseName ) +import Distribution.Text + ( Text(..), display, simpleParse ) + +import Data.Maybe (fromJust) +import qualified Distribution.Compat.ReadP as ReadP +import qualified Text.PrettyPrint as Disp + +import Control.Monad (mapM, mapM_) +import qualified Data.ByteString.Lazy as BS +import Distribution.Client.GZipUtils (maybeDecompress) +import System.FilePath ((<.>), dropExtension) +import Data.Time (getCurrentTime) +import Distribution.Simple.Command + ( CommandUI(..), usageAlternatives ) +import qualified Distribution.Client.Setup as Client + +import qualified Hackage.Security.Client as Sec + +updateCommand :: CommandUI ( ConfigFlags, ConfigExFlags + , InstallFlags, HaddockFlags ) +updateCommand = Client.installCommand { + commandName = "new-update", + commandSynopsis = "Updates list of known packages.", + commandUsage = usageAlternatives "new-update" [ "[FLAGS] [REPOS]" ], + commandDescription = Just $ \_ -> wrapText $ + "For all known remote repositories, download the package list.", + commandNotes = Just $ \pname -> + "REPO has the format [,] where index-state follows\n" + ++ "the same format and syntax that is supported by the --index-state flag.\n\n" + ++ "Examples:\n" + ++ " " ++ pname ++ " new-update\n" + ++ " Download the package list for all known remote repositories.\n\n" + ++ " " ++ pname ++ " new-update hackage.haskell.org,@1474732068\n" + ++ " " ++ pname ++ " new-update hackage.haskell.org,2016-09-24T17:47:48Z\n" + ++ " " ++ pname ++ " new-update hackage.haskell.org,HEAD\n" + ++ " " ++ pname ++ " new-update hackage.haskell.org\n" + ++ " Download hackage.haskell.org at a specific index state.\n\n" + ++ " " ++ pname ++ " new update hackage.haskell.org head.hackage\n" + ++ " Download hackage.haskell.org and head.hackage\n" + ++ " head.hackage must be a known repo-id. E.g. from\n" + ++ " your cabal.project(.local) file.\n\n" + ++ "Note: this command is part of the new project-based system (aka " + ++ "nix-style\nlocal builds). These features are currently in beta. " + ++ "Please see\n" + ++ "http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html " + ++ "for\ndetails and advice on what you can expect to work. If you " + ++ "encounter problems\nplease file issues at " + ++ "https://github.com/haskell/cabal/issues and if you\nhave any time " + ++ "to get involved and help with testing, fixing bugs etc then\nthat " + ++ "is very much appreciated.\n" + } + +data UpdateRequest = UpdateRequest + { _updateRequestRepoName :: String + , _updateRequestRepoState :: IndexState + } deriving (Show) + +instance Text UpdateRequest where + disp (UpdateRequest n s) = Disp.text n Disp.<> Disp.char ',' Disp.<> disp s + parse = parseWithState ReadP.+++ parseHEAD + where parseWithState = do + name <- ReadP.many1 (ReadP.satisfy (\c -> c /= ',')) + _ <- ReadP.char ',' + state <- parse + return (UpdateRequest name state) + parseHEAD = do + name <- ReadP.manyTill (ReadP.satisfy (\c -> c /= ',')) ReadP.eof + return (UpdateRequest name IndexStateHead) + +updateAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> GlobalFlags -> IO () +updateAction (configFlags, configExFlags, installFlags, haddockFlags) + extraArgs globalFlags = do + projectConfig <- withProjectOrGlobalConfig verbosity globalConfigFlag + (projectConfig <$> establishProjectBaseContext verbosity cliConfig) + (\globalConfig -> return $ globalConfig <> cliConfig) + + projectConfigWithSolverRepoContext verbosity + (projectConfigShared projectConfig) (projectConfigBuildOnly projectConfig) + $ \repoCtxt -> do + let repos = filter isRepoRemote $ repoContextRepos repoCtxt + repoName = remoteRepoName . repoRemote + parseArg :: String -> IO UpdateRequest + parseArg s = case simpleParse s of + Just r -> return r + Nothing -> die' verbosity $ + "'new-update' unable to parse repo: \"" ++ s ++ "\"" + updateRepoRequests <- mapM parseArg extraArgs + + unless (null updateRepoRequests) $ do + let remoteRepoNames = map repoName repos + unknownRepos = [r | (UpdateRequest r _) <- updateRepoRequests + , not (r `elem` remoteRepoNames)] + unless (null unknownRepos) $ + die' verbosity $ "'new-update' repo(s): \"" + ++ intercalate "\", \"" unknownRepos + ++ "\" can not be found in known remote repo(s): " + ++ intercalate ", " remoteRepoNames + + let reposToUpdate :: [(Repo, IndexState)] + reposToUpdate = case updateRepoRequests of + -- If we are not given any specific repository, update all + -- repositories to HEAD. + [] -> map (,IndexStateHead) repos + updateRequests -> let repoMap = [(repoName r, r) | r <- repos] + lookup' k = fromJust (lookup k repoMap) + in [ (lookup' name, state) + | (UpdateRequest name state) <- updateRequests ] + + case reposToUpdate of + [] -> return () + [(remoteRepo, _)] -> + notice verbosity $ "Downloading the latest package list from " + ++ repoName remoteRepo + _ -> notice verbosity . unlines + $ "Downloading the latest package lists from: " + : map (("- " ++) . repoName . fst) reposToUpdate + + jobCtrl <- newParallelJobControl (length reposToUpdate) + mapM_ (spawnJob jobCtrl . updateRepo verbosity defaultUpdateFlags repoCtxt) + reposToUpdate + mapM_ (\_ -> collectJob jobCtrl) reposToUpdate + + where + verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + cliConfig = commandLineFlagsToProjectConfig + globalFlags configFlags configExFlags + installFlags haddockFlags + globalConfigFlag = projectConfigConfigFile (projectConfigShared cliConfig) + +updateRepo :: Verbosity -> UpdateFlags -> RepoContext -> (Repo, IndexState) + -> IO () +updateRepo verbosity _updateFlags repoCtxt (repo, indexState) = do + transport <- repoContextGetTransport repoCtxt + case repo of + RepoLocal{..} -> return () + RepoRemote{..} -> do + downloadResult <- downloadIndex transport verbosity + repoRemote repoLocalDir + case downloadResult of + FileAlreadyInCache -> + setModificationTime (indexBaseName repo <.> "tar") + =<< getCurrentTime + FileDownloaded indexPath -> do + writeFileAtomic (dropExtension indexPath) . maybeDecompress + =<< BS.readFile indexPath + updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) + RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> do + let index = RepoIndex repoCtxt repo + -- NB: This may be a nullTimestamp if we've never updated before + current_ts <- currentIndexTimestamp (lessVerbose verbosity) repoCtxt repo + -- NB: always update the timestamp, even if we didn't actually + -- download anything + writeIndexTimestamp index indexState + ce <- if repoContextIgnoreExpiry repoCtxt + then Just `fmap` getCurrentTime + else return Nothing + updated <- Sec.uncheckClientErrors $ Sec.checkForUpdates repoSecure ce + -- Update cabal's internal index as well so that it's not out of sync + -- (If all access to the cache goes through hackage-security this can go) + case updated of + Sec.NoUpdates -> + setModificationTime (indexBaseName repo <.> "tar") + =<< getCurrentTime + Sec.HasUpdates -> + updateRepoIndexCache verbosity index + -- TODO: This will print multiple times if there are multiple + -- repositories: main problem is we don't have a way of updating + -- a specific repo. Once we implement that, update this. + when (current_ts /= nullTimestamp) $ + noticeNoWrap verbosity $ + "To revert to previous state run:\n" ++ + " cabal new-update '" ++ remoteRepoName (repoRemote repo) + ++ "," ++ display current_ts ++ "'\n" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Directory.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Directory.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Directory.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Directory.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,13 @@ +{-# LANGUAGE CPP #-} +module Distribution.Client.Compat.Directory (setModificationTime) where + +#if MIN_VERSION_directory(1,2,3) +import System.Directory (setModificationTime) +#else + +import Data.Time.Clock (UTCTime) + +setModificationTime :: FilePath -> UTCTime -> IO () +setModificationTime _fp _t = return () + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/ExecutablePath.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/ExecutablePath.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/ExecutablePath.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/ExecutablePath.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,164 @@ +{-# LANGUAGE ForeignFunctionInterface #-} +{-# LANGUAGE CPP #-} + +-- Copied verbatim from base-4.6.0.0. We can't simply import +-- System.Environment.getExecutablePath because we need compatibility with older +-- GHCs. + +module Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) where + +-- The imports are purposely kept completely disjoint to prevent edits +-- to one OS implementation from breaking another. + +#if defined(darwin_HOST_OS) +import Data.Word +import Foreign.C +import Foreign.Marshal.Alloc +import Foreign.Ptr +import Foreign.Storable +import System.Posix.Internals +#elif defined(linux_HOST_OS) +import Foreign.C +import Foreign.Marshal.Array +import System.Posix.Internals +#elif defined(mingw32_HOST_OS) +import Data.Word +import Foreign.C +import Foreign.Marshal.Array +import Foreign.Ptr +import System.Posix.Internals +#else +import Foreign.C +import Foreign.Marshal.Alloc +import Foreign.Ptr +import Foreign.Storable +import System.Posix.Internals +#endif + +-- The exported function is defined outside any if-guard to make sure +-- every OS implements it with the same type. + +-- | Returns the absolute pathname of the current executable. +-- +-- Note that for scripts and interactive sessions, this is the path to +-- the interpreter (e.g. ghci.) +-- +-- /Since: 4.6.0.0/ +getExecutablePath :: IO FilePath + +-------------------------------------------------------------------------------- +-- Mac OS X + +#if defined(darwin_HOST_OS) + +type UInt32 = Word32 + +foreign import ccall unsafe "mach-o/dyld.h _NSGetExecutablePath" + c__NSGetExecutablePath :: CString -> Ptr UInt32 -> IO CInt + +-- | Returns the path of the main executable. The path may be a +-- symbolic link and not the real file. +-- +-- See dyld(3) +_NSGetExecutablePath :: IO FilePath +_NSGetExecutablePath = + allocaBytes 1024 $ \ buf -> -- PATH_MAX is 1024 on OS X + alloca $ \ bufsize -> do + poke bufsize 1024 + status <- c__NSGetExecutablePath buf bufsize + if status == 0 + then peekFilePath buf + else do reqBufsize <- fromIntegral `fmap` peek bufsize + allocaBytes reqBufsize $ \ newBuf -> do + status2 <- c__NSGetExecutablePath newBuf bufsize + if status2 == 0 + then peekFilePath newBuf + else error "_NSGetExecutablePath: buffer too small" + +foreign import ccall unsafe "stdlib.h realpath" + c_realpath :: CString -> CString -> IO CString + +-- | Resolves all symbolic links, extra \/ characters, and references +-- to \/.\/ and \/..\/. Returns an absolute pathname. +-- +-- See realpath(3) +realpath :: FilePath -> IO FilePath +realpath path = + withFilePath path $ \ fileName -> + allocaBytes 1024 $ \ resolvedName -> do + _ <- throwErrnoIfNull "realpath" $ c_realpath fileName resolvedName + peekFilePath resolvedName + +getExecutablePath = _NSGetExecutablePath >>= realpath + +-------------------------------------------------------------------------------- +-- Linux + +#elif defined(linux_HOST_OS) + +foreign import ccall unsafe "readlink" + c_readlink :: CString -> CString -> CSize -> IO CInt + +-- | Reads the @FilePath@ pointed to by the symbolic link and returns +-- it. +-- +-- See readlink(2) +readSymbolicLink :: FilePath -> IO FilePath +readSymbolicLink file = + allocaArray0 4096 $ \buf -> do + withFilePath file $ \s -> do + len <- throwErrnoPathIfMinus1 "readSymbolicLink" file $ + c_readlink s buf 4096 + peekFilePathLen (buf,fromIntegral len) + +getExecutablePath = readSymbolicLink $ "/proc/self/exe" + +-------------------------------------------------------------------------------- +-- Windows + +#elif defined(mingw32_HOST_OS) + +# if defined(i386_HOST_ARCH) +# define WINDOWS_CCONV stdcall +# elif defined(x86_64_HOST_ARCH) +# define WINDOWS_CCONV ccall +# else +# error Unknown mingw32 arch +# endif + +foreign import WINDOWS_CCONV unsafe "windows.h GetModuleFileNameW" + c_GetModuleFileName :: Ptr () -> CWString -> Word32 -> IO Word32 + +getExecutablePath = go 2048 -- plenty, PATH_MAX is 512 under Win32 + where + go size = allocaArray (fromIntegral size) $ \ buf -> do + ret <- c_GetModuleFileName nullPtr buf size + case ret of + 0 -> error "getExecutablePath: GetModuleFileNameW returned an error" + _ | ret < size -> peekFilePath buf + | otherwise -> go (size * 2) + +-------------------------------------------------------------------------------- +-- Fallback to argv[0] + +#else + +foreign import ccall unsafe "getFullProgArgv" + c_getFullProgArgv :: Ptr CInt -> Ptr (Ptr CString) -> IO () + +getExecutablePath = + alloca $ \ p_argc -> + alloca $ \ p_argv -> do + c_getFullProgArgv p_argc p_argv + argc <- peek p_argc + if argc > 0 + -- If argc > 0 then argv[0] is guaranteed by the standard + -- to be a pointer to a null-terminated string. + then peek p_argv >>= peek >>= peekFilePath + else error $ "getExecutablePath: " ++ msg + where msg = "no OS specific implementation and program name couldn't be " ++ + "found in argv" + +-------------------------------------------------------------------------------- + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/FileLock.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/FileLock.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/FileLock.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/FileLock.hsc 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,201 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE InterruptibleFFI #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE MultiWayIf #-} +{-# LANGUAGE DeriveDataTypeable #-} + +-- | This compat module can be removed once base-4.10 (ghc-8.2) is the minimum +-- required version. Though note that the locking functionality is not in +-- public modules in base-4.10, just in the "GHC.IO.Handle.Lock" module. +module Distribution.Client.Compat.FileLock ( + FileLockingNotSupported(..) + , LockMode(..) + , hLock + , hTryLock + ) where + +#if MIN_VERSION_base(4,10,0) + +import GHC.IO.Handle.Lock + +#else + +-- The remainder of this file is a modified copy +-- of GHC.IO.Handle.Lock from ghc-8.2.x +-- +-- The modifications were just to the imports and the CPP, since we do not have +-- access to the HAVE_FLOCK from the ./configure script. We approximate the +-- lack of HAVE_FLOCK with defined(solaris2_HOST_OS) instead since that is the +-- only known major Unix platform lacking flock(). + +import Control.Exception (Exception) +import Data.Typeable + +#if defined(solaris2_HOST_OS) + +import Control.Exception (throwIO) +import System.IO (Handle) + +#else + +import Data.Bits +import Data.Function +import Control.Concurrent.MVar + +import Foreign.C.Error +import Foreign.C.Types + +import GHC.IO.Handle.Types +import GHC.IO.FD +import GHC.IO.Exception + +#if defined(mingw32_HOST_OS) + +#if defined(i386_HOST_ARCH) +## define WINDOWS_CCONV stdcall +#elif defined(x86_64_HOST_ARCH) +## define WINDOWS_CCONV ccall +#else +# error Unknown mingw32 arch +#endif + +#include + +import Foreign.Marshal.Alloc +import Foreign.Marshal.Utils +import Foreign.Ptr +import GHC.Windows + +#else /* !defined(mingw32_HOST_OS), so assume unix with flock() */ + +#include + +#endif /* !defined(mingw32_HOST_OS) */ + +#endif /* !defined(solaris2_HOST_OS) */ + + +-- | Exception thrown by 'hLock' on non-Windows platforms that don't support +-- 'flock'. +data FileLockingNotSupported = FileLockingNotSupported + deriving (Typeable, Show) + +instance Exception FileLockingNotSupported + + +-- | Indicates a mode in which a file should be locked. +data LockMode = SharedLock | ExclusiveLock + +-- | If a 'Handle' references a file descriptor, attempt to lock contents of the +-- underlying file in appropriate mode. If the file is already locked in +-- incompatible mode, this function blocks until the lock is established. The +-- lock is automatically released upon closing a 'Handle'. +-- +-- Things to be aware of: +-- +-- 1) This function may block inside a C call. If it does, in order to be able +-- to interrupt it with asynchronous exceptions and/or for other threads to +-- continue working, you MUST use threaded version of the runtime system. +-- +-- 2) The implementation uses 'LockFileEx' on Windows and 'flock' otherwise, +-- hence all of their caveats also apply here. +-- +-- 3) On non-Windows plaftorms that don't support 'flock' (e.g. Solaris) this +-- function throws 'FileLockingNotImplemented'. We deliberately choose to not +-- provide fcntl based locking instead because of its broken semantics. +-- +-- @since 4.10.0.0 +hLock :: Handle -> LockMode -> IO () +hLock h mode = lockImpl h "hLock" mode True >> return () + +-- | Non-blocking version of 'hLock'. +-- +-- @since 4.10.0.0 +hTryLock :: Handle -> LockMode -> IO Bool +hTryLock h mode = lockImpl h "hTryLock" mode False + +---------------------------------------- + +#if defined(solaris2_HOST_OS) + +-- | No-op implementation. +lockImpl :: Handle -> String -> LockMode -> Bool -> IO Bool +lockImpl _ _ _ _ = throwIO FileLockingNotSupported + +#else /* !defined(solaris2_HOST_OS) */ + +#if defined(mingw32_HOST_OS) + +lockImpl :: Handle -> String -> LockMode -> Bool -> IO Bool +lockImpl h ctx mode block = do + FD{fdFD = fd} <- handleToFd h + wh <- throwErrnoIf (== iNVALID_HANDLE_VALUE) ctx $ c_get_osfhandle fd + allocaBytes sizeof_OVERLAPPED $ \ovrlpd -> do + fillBytes ovrlpd (fromIntegral sizeof_OVERLAPPED) 0 + let flags = cmode .|. (if block then 0 else #{const LOCKFILE_FAIL_IMMEDIATELY}) + -- We want to lock the whole file without looking up its size to be + -- consistent with what flock does. According to documentation of LockFileEx + -- "locking a region that goes beyond the current end-of-file position is + -- not an error", however e.g. Windows 10 doesn't accept maximum possible + -- value (a pair of MAXDWORDs) for mysterious reasons. Work around that by + -- trying 2^32-1. + fix $ \retry -> c_LockFileEx wh flags 0 0xffffffff 0x0 ovrlpd >>= \case + True -> return True + False -> getLastError >>= \err -> if + | not block && err == #{const ERROR_LOCK_VIOLATION} -> return False + | err == #{const ERROR_OPERATION_ABORTED} -> retry + | otherwise -> failWith ctx err + where + sizeof_OVERLAPPED = #{size OVERLAPPED} + + cmode = case mode of + SharedLock -> 0 + ExclusiveLock -> #{const LOCKFILE_EXCLUSIVE_LOCK} + +-- https://msdn.microsoft.com/en-us/library/aa297958.aspx +foreign import ccall unsafe "_get_osfhandle" + c_get_osfhandle :: CInt -> IO HANDLE + +-- https://msdn.microsoft.com/en-us/library/windows/desktop/aa365203.aspx +foreign import WINDOWS_CCONV interruptible "LockFileEx" + c_LockFileEx :: HANDLE -> DWORD -> DWORD -> DWORD -> DWORD -> Ptr () -> IO BOOL + +#else /* !defined(mingw32_HOST_OS), so assume unix with flock() */ + +lockImpl :: Handle -> String -> LockMode -> Bool -> IO Bool +lockImpl h ctx mode block = do + FD{fdFD = fd} <- handleToFd h + let flags = cmode .|. (if block then 0 else #{const LOCK_NB}) + fix $ \retry -> c_flock fd flags >>= \case + 0 -> return True + _ -> getErrno >>= \errno -> if + | not block && errno == eWOULDBLOCK -> return False + | errno == eINTR -> retry + | otherwise -> ioException $ errnoToIOError ctx errno (Just h) Nothing + where + cmode = case mode of + SharedLock -> #{const LOCK_SH} + ExclusiveLock -> #{const LOCK_EX} + +foreign import ccall interruptible "flock" + c_flock :: CInt -> CInt -> IO CInt + +#endif /* !defined(mingw32_HOST_OS) */ + +-- | Turn an existing Handle into a file descriptor. This function throws an +-- IOError if the Handle does not reference a file descriptor. +handleToFd :: Handle -> IO FD +handleToFd h = case h of + FileHandle _ mv -> do + Handle__{haDevice = dev} <- readMVar mv + case cast dev of + Just fd -> return fd + Nothing -> throwErr "not a file descriptor" + DuplexHandle{} -> throwErr "not a file handle" + where + throwErr msg = ioException $ IOError (Just h) + InappropriateType "handleToFd" msg Nothing Nothing + +#endif /* defined(solaris2_HOST_OS) */ + +#endif /* MIN_VERSION_base */ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/FilePerms.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/FilePerms.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/FilePerms.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/FilePerms.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,35 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_HADDOCK hide #-} +module Distribution.Client.Compat.FilePerms ( + setFileOrdinary, + setFileExecutable, + setFileHidden, + ) where + +#ifndef mingw32_HOST_OS +import System.Posix.Types + ( FileMode ) +import System.Posix.Internals + ( c_chmod ) +import Foreign.C + ( withCString + , throwErrnoPathIfMinus1_ ) +#else +import System.Win32.File (setFileAttributes, fILE_ATTRIBUTE_HIDDEN) +#endif /* mingw32_HOST_OS */ + +setFileHidden, setFileOrdinary, setFileExecutable :: FilePath -> IO () +#ifndef mingw32_HOST_OS +setFileOrdinary path = setFileMode path 0o644 -- file perms -rw-r--r-- +setFileExecutable path = setFileMode path 0o755 -- file perms -rwxr-xr-x +setFileHidden _ = return () + +setFileMode :: FilePath -> FileMode -> IO () +setFileMode name m = + withCString name $ \s -> + throwErrnoPathIfMinus1_ "setFileMode" name (c_chmod s m) +#else +setFileOrdinary _ = return () +setFileExecutable _ = return () +setFileHidden path = setFileAttributes path fILE_ATTRIBUTE_HIDDEN +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Prelude.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Prelude.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Prelude.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Prelude.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,22 @@ +-- to suppress WARNING in "Distribution.Compat.Prelude.Internal" +{-# OPTIONS_GHC -fno-warn-deprecations #-} + +-- | This module does two things: +-- +-- * Acts as a compatiblity layer, like @base-compat@. +-- +-- * Provides commonly used imports. +-- +-- This module is a superset of "Distribution.Compat.Prelude" (which +-- this module re-exports) +-- +module Distribution.Client.Compat.Prelude + ( module Distribution.Compat.Prelude.Internal + , Prelude.IO + , readMaybe + ) where + +import Prelude (IO) +import Distribution.Compat.Prelude.Internal hiding (IO) +import Text.Read + ( readMaybe ) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Process.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Process.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Process.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Process.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,43 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Compat.Process +-- Copyright : (c) 2013 Liu Hao, Brent Yorgey +-- License : BSD-style (see the file LICENSE) +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Cross-platform utilities for invoking processes. +-- +----------------------------------------------------------------------------- + +module Distribution.Client.Compat.Process ( + readProcessWithExitCode +) where + +import Control.Exception (catch, throw) +import System.Exit (ExitCode (ExitFailure)) +import System.IO.Error (isDoesNotExistError, isPermissionError) +import qualified System.Process as P + +-- | @readProcessWithExitCode@ creates an external process, reads its +-- standard output and standard error strictly, waits until the +-- process terminates, and then returns the @ExitCode@ of the +-- process, the standard output, and the standard error. +-- +-- See the documentation of the version from @System.Process@ for +-- more information. +-- +-- The version from @System.Process@ behaves inconsistently across +-- platforms when an executable with the given name is not found: in +-- some cases it returns an @ExitFailure@, in others it throws an +-- exception. This variant catches \"does not exist\" and +-- \"permission denied\" exceptions and turns them into +-- @ExitFailure@s. +readProcessWithExitCode :: FilePath -> [String] -> String -> IO (ExitCode, String, String) +readProcessWithExitCode cmd args input = + P.readProcessWithExitCode cmd args input + `catch` \e -> if isDoesNotExistError e || isPermissionError e + then return (ExitFailure 127, "", "") + else throw e diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Semaphore.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Semaphore.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Semaphore.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Compat/Semaphore.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,104 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# OPTIONS_GHC -funbox-strict-fields #-} +module Distribution.Client.Compat.Semaphore + ( QSem + , newQSem + , waitQSem + , signalQSem + ) where + +import Control.Concurrent.STM (TVar, atomically, newTVar, readTVar, retry, + writeTVar) +import Control.Exception (mask_, onException) +import Control.Monad (join, unless) +import Data.Typeable (Typeable) + +-- | 'QSem' is a quantity semaphore in which the resource is aqcuired +-- and released in units of one. It provides guaranteed FIFO ordering +-- for satisfying blocked `waitQSem` calls. +-- +data QSem = QSem !(TVar Int) !(TVar [TVar Bool]) !(TVar [TVar Bool]) + deriving (Eq, Typeable) + +newQSem :: Int -> IO QSem +newQSem i = atomically $ do + q <- newTVar i + b1 <- newTVar [] + b2 <- newTVar [] + return (QSem q b1 b2) + +waitQSem :: QSem -> IO () +waitQSem s@(QSem q _b1 b2) = + mask_ $ join $ atomically $ do + -- join, because if we need to block, we have to add a TVar to + -- the block queue. + -- mask_, because we need a chance to set up an exception handler + -- after the join returns. + v <- readTVar q + if v == 0 + then do b <- newTVar False + ys <- readTVar b2 + writeTVar b2 (b:ys) + return (wait b) + else do writeTVar q $! v - 1 + return (return ()) + where + -- + -- very careful here: if we receive an exception, then we need to + -- (a) write True into the TVar, so that another signalQSem doesn't + -- try to wake up this thread, and + -- (b) if the TVar is *already* True, then we need to do another + -- signalQSem to avoid losing a unit of the resource. + -- + -- The 'wake' function does both (a) and (b), so we can just call + -- it here. + -- + wait t = + flip onException (wake s t) $ + atomically $ do + b <- readTVar t + unless b retry + + +wake :: QSem -> TVar Bool -> IO () +wake s x = join $ atomically $ do + b <- readTVar x + if b then return (signalQSem s) + else do writeTVar x True + return (return ()) + +{- + property we want: + + bracket waitQSem (\_ -> signalQSem) (\_ -> ...) + + never loses a unit of the resource. +-} + +signalQSem :: QSem -> IO () +signalQSem s@(QSem q b1 b2) = + mask_ $ join $ atomically $ do + -- join, so we don't force the reverse inside the txn + -- mask_ is needed so we don't lose a wakeup + v <- readTVar q + if v /= 0 + then do writeTVar q $! v + 1 + return (return ()) + else do xs <- readTVar b1 + checkwake1 xs + where + checkwake1 [] = do + ys <- readTVar b2 + checkwake2 ys + checkwake1 (x:xs) = do + writeTVar b1 xs + return (wake s x) + + checkwake2 [] = do + writeTVar q 1 + return (return ()) + checkwake2 ys = do + let (z:zs) = reverse ys + writeTVar b1 zs + writeTVar b2 [] + return (wake s z) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Config.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Config.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Config.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Config.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,1249 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Config +-- Copyright : (c) David Himmelstrup 2005 +-- License : BSD-like +-- +-- Maintainer : lemmih@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Utilities for handling saved state such as known packages, known servers and +-- downloaded packages. +----------------------------------------------------------------------------- +module Distribution.Client.Config ( + SavedConfig(..), + loadConfig, + getConfigFilePath, + + showConfig, + showConfigWithComments, + parseConfig, + + getCabalDir, + defaultConfigFile, + defaultCacheDir, + defaultCompiler, + defaultLogsDir, + defaultUserInstall, + + baseSavedConfig, + commentSavedConfig, + initialSavedConfig, + configFieldDescriptions, + haddockFlagsFields, + installDirsFields, + withProgramsFields, + withProgramOptionsFields, + userConfigDiff, + userConfigUpdate, + createDefaultConfigFile, + + remoteRepoFields + ) where + +import Distribution.Client.Types + ( RemoteRepo(..), Username(..), Password(..), emptyRemoteRepo + , AllowOlder(..), AllowNewer(..), RelaxDeps(..), isRelaxDeps + ) +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) +import Distribution.Client.Setup + ( GlobalFlags(..), globalCommand, defaultGlobalFlags + , ConfigExFlags(..), configureExOptions, defaultConfigExFlags + , InstallFlags(..), installOptions, defaultInstallFlags + , UploadFlags(..), uploadCommand + , ReportFlags(..), reportCommand + , showRepo, parseRepo, readRepo ) +import Distribution.Utils.NubList + ( NubList, fromNubList, toNubList, overNubList ) + +import Distribution.Simple.Compiler + ( DebugInfoLevel(..), OptimisationLevel(..) ) +import Distribution.Simple.Setup + ( ConfigFlags(..), configureOptions, defaultConfigFlags + , HaddockFlags(..), haddockOptions, defaultHaddockFlags + , installDirsOptions, optionDistPref + , programDbPaths', programDbOptions + , Flag(..), toFlag, flagToMaybe, fromFlagOrDefault ) +import Distribution.Simple.InstallDirs + ( InstallDirs(..), defaultInstallDirs + , PathTemplate, toPathTemplate ) +import Distribution.ParseUtils + ( FieldDescr(..), liftField + , ParseResult(..), PError(..), PWarning(..) + , locatedErrorMsg, showPWarning + , readFields, warning, lineNo + , simpleField, listField, spaceListField + , parseFilePathQ, parseOptCommaList, parseTokenQ ) +import Distribution.Client.ParseUtils + ( parseFields, ppFields, ppSection ) +import Distribution.Client.HttpUtils + ( isOldHackageURI ) +import qualified Distribution.ParseUtils as ParseUtils + ( Field(..) ) +import qualified Distribution.Text as Text + ( Text(..), display ) +import Distribution.Simple.Command + ( CommandUI(commandOptions), commandDefaultFlags, ShowOrParseArgs(..) + , viewAsFieldDescr ) +import Distribution.Simple.Program + ( defaultProgramDb ) +import Distribution.Simple.Utils + ( die', notice, warn, lowercase, cabalVersion ) +import Distribution.Compiler + ( CompilerFlavor(..), defaultCompilerFlavor ) +import Distribution.Verbosity + ( Verbosity, normal ) + +import Distribution.Solver.Types.ConstraintSource + +import Data.List + ( partition, find, foldl', nubBy ) +import Data.Maybe + ( fromMaybe ) +import Control.Monad + ( when, unless, foldM, liftM ) +import qualified Distribution.Compat.ReadP as Parse + ( (<++), option ) +import Distribution.Compat.Semigroup +import qualified Text.PrettyPrint as Disp + ( render, text, empty ) +import Text.PrettyPrint + ( ($+$) ) +import Text.PrettyPrint.HughesPJ + ( text, Doc ) +import System.Directory + ( createDirectoryIfMissing, getAppUserDataDirectory, renameFile ) +import Network.URI + ( URI(..), URIAuth(..), parseURI ) +import System.FilePath + ( (<.>), (), takeDirectory ) +import System.IO.Error + ( isDoesNotExistError ) +import Distribution.Compat.Environment + ( getEnvironment, lookupEnv ) +import Distribution.Compat.Exception + ( catchIO ) +import qualified Paths_cabal_install + ( version ) +import Data.Version + ( showVersion ) +import Data.Char + ( isSpace ) +import qualified Data.Map as M +import Data.Function + ( on ) +import GHC.Generics ( Generic ) + +-- +-- * Configuration saved in the config file +-- + +data SavedConfig = SavedConfig { + savedGlobalFlags :: GlobalFlags, + savedInstallFlags :: InstallFlags, + savedConfigureFlags :: ConfigFlags, + savedConfigureExFlags :: ConfigExFlags, + savedUserInstallDirs :: InstallDirs (Flag PathTemplate), + savedGlobalInstallDirs :: InstallDirs (Flag PathTemplate), + savedUploadFlags :: UploadFlags, + savedReportFlags :: ReportFlags, + savedHaddockFlags :: HaddockFlags + } deriving Generic + +instance Monoid SavedConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup SavedConfig where + a <> b = SavedConfig { + savedGlobalFlags = combinedSavedGlobalFlags, + savedInstallFlags = combinedSavedInstallFlags, + savedConfigureFlags = combinedSavedConfigureFlags, + savedConfigureExFlags = combinedSavedConfigureExFlags, + savedUserInstallDirs = combinedSavedUserInstallDirs, + savedGlobalInstallDirs = combinedSavedGlobalInstallDirs, + savedUploadFlags = combinedSavedUploadFlags, + savedReportFlags = combinedSavedReportFlags, + savedHaddockFlags = combinedSavedHaddockFlags + } + where + -- This is ugly, but necessary. If we're mappending two config files, we + -- want the values of the *non-empty* list fields from the second one to + -- *override* the corresponding values from the first one. Default + -- behaviour (concatenation) is confusing and makes some use cases (see + -- #1884) impossible. + -- + -- However, we also want to allow specifying multiple values for a list + -- field in a *single* config file. For example, we want the following to + -- continue to work: + -- + -- remote-repo: hackage.haskell.org:http://hackage.haskell.org/ + -- remote-repo: private-collection:http://hackage.local/ + -- + -- So we can't just wrap the list fields inside Flags; we have to do some + -- special-casing just for SavedConfig. + + -- NB: the signature prevents us from using 'combine' on lists. + combine' :: (SavedConfig -> flags) -> (flags -> Flag a) -> Flag a + combine' field subfield = + (subfield . field $ a) `mappend` (subfield . field $ b) + + combineMonoid :: Monoid mon => (SavedConfig -> flags) -> (flags -> mon) + -> mon + combineMonoid field subfield = + (subfield . field $ a) `mappend` (subfield . field $ b) + + lastNonEmpty' :: (SavedConfig -> flags) -> (flags -> [a]) -> [a] + lastNonEmpty' field subfield = + let a' = subfield . field $ a + b' = subfield . field $ b + in case b' of [] -> a' + _ -> b' + + lastNonMempty' :: (Eq a, Monoid a) => (SavedConfig -> flags) -> (flags -> a) -> a + lastNonMempty' field subfield = + let a' = subfield . field $ a + b' = subfield . field $ b + in if b' == mempty then a' else b' + + lastNonEmptyNL' :: (SavedConfig -> flags) -> (flags -> NubList a) + -> NubList a + lastNonEmptyNL' field subfield = + let a' = subfield . field $ a + b' = subfield . field $ b + in case fromNubList b' of [] -> a' + _ -> b' + + combinedSavedGlobalFlags = GlobalFlags { + globalVersion = combine globalVersion, + globalNumericVersion = combine globalNumericVersion, + globalConfigFile = combine globalConfigFile, + globalSandboxConfigFile = combine globalSandboxConfigFile, + globalConstraintsFile = combine globalConstraintsFile, + globalRemoteRepos = lastNonEmptyNL globalRemoteRepos, + globalCacheDir = combine globalCacheDir, + globalLocalRepos = lastNonEmptyNL globalLocalRepos, + globalLogsDir = combine globalLogsDir, + globalWorldFile = combine globalWorldFile, + globalRequireSandbox = combine globalRequireSandbox, + globalIgnoreSandbox = combine globalIgnoreSandbox, + globalIgnoreExpiry = combine globalIgnoreExpiry, + globalHttpTransport = combine globalHttpTransport, + globalNix = combine globalNix, + globalStoreDir = combine globalStoreDir, + globalProgPathExtra = lastNonEmptyNL globalProgPathExtra + } + where + combine = combine' savedGlobalFlags + lastNonEmptyNL = lastNonEmptyNL' savedGlobalFlags + + combinedSavedInstallFlags = InstallFlags { + installDocumentation = combine installDocumentation, + installHaddockIndex = combine installHaddockIndex, + installDryRun = combine installDryRun, + installDest = combine installDest, + installMaxBackjumps = combine installMaxBackjumps, + installReorderGoals = combine installReorderGoals, + installCountConflicts = combine installCountConflicts, + installIndependentGoals = combine installIndependentGoals, + installShadowPkgs = combine installShadowPkgs, + installStrongFlags = combine installStrongFlags, + installAllowBootLibInstalls = combine installAllowBootLibInstalls, + installReinstall = combine installReinstall, + installAvoidReinstalls = combine installAvoidReinstalls, + installOverrideReinstall = combine installOverrideReinstall, + installUpgradeDeps = combine installUpgradeDeps, + installOnly = combine installOnly, + installOnlyDeps = combine installOnlyDeps, + installIndexState = combine installIndexState, + installRootCmd = combine installRootCmd, + installSummaryFile = lastNonEmptyNL installSummaryFile, + installLogFile = combine installLogFile, + installBuildReports = combine installBuildReports, + installReportPlanningFailure = combine installReportPlanningFailure, + installSymlinkBinDir = combine installSymlinkBinDir, + installPerComponent = combine installPerComponent, + installOneShot = combine installOneShot, + installNumJobs = combine installNumJobs, + installKeepGoing = combine installKeepGoing, + installRunTests = combine installRunTests, + installOfflineMode = combine installOfflineMode, + installProjectFileName = combine installProjectFileName + } + where + combine = combine' savedInstallFlags + lastNonEmptyNL = lastNonEmptyNL' savedInstallFlags + + combinedSavedConfigureFlags = ConfigFlags { + configArgs = lastNonEmpty configArgs, + configPrograms_ = configPrograms_ . savedConfigureFlags $ b, + -- TODO: NubListify + configProgramPaths = lastNonEmpty configProgramPaths, + -- TODO: NubListify + configProgramArgs = lastNonEmpty configProgramArgs, + configProgramPathExtra = lastNonEmptyNL configProgramPathExtra, + configInstantiateWith = lastNonEmpty configInstantiateWith, + configHcFlavor = combine configHcFlavor, + configHcPath = combine configHcPath, + configHcPkg = combine configHcPkg, + configVanillaLib = combine configVanillaLib, + configProfLib = combine configProfLib, + configProf = combine configProf, + configSharedLib = combine configSharedLib, + configStaticLib = combine configStaticLib, + configDynExe = combine configDynExe, + configProfExe = combine configProfExe, + configProfDetail = combine configProfDetail, + configProfLibDetail = combine configProfLibDetail, + -- TODO: NubListify + configConfigureArgs = lastNonEmpty configConfigureArgs, + configOptimization = combine configOptimization, + configDebugInfo = combine configDebugInfo, + configProgPrefix = combine configProgPrefix, + configProgSuffix = combine configProgSuffix, + -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. + configInstallDirs = + (configInstallDirs . savedConfigureFlags $ a) + `mappend` (configInstallDirs . savedConfigureFlags $ b), + configScratchDir = combine configScratchDir, + -- TODO: NubListify + configExtraLibDirs = lastNonEmpty configExtraLibDirs, + -- TODO: NubListify + configExtraFrameworkDirs = lastNonEmpty configExtraFrameworkDirs, + -- TODO: NubListify + configExtraIncludeDirs = lastNonEmpty configExtraIncludeDirs, + configDeterministic = combine configDeterministic, + configIPID = combine configIPID, + configCID = combine configCID, + configDistPref = combine configDistPref, + configCabalFilePath = combine configCabalFilePath, + configVerbosity = combine configVerbosity, + configUserInstall = combine configUserInstall, + -- TODO: NubListify + configPackageDBs = lastNonEmpty configPackageDBs, + configGHCiLib = combine configGHCiLib, + configSplitSections = combine configSplitSections, + configSplitObjs = combine configSplitObjs, + configStripExes = combine configStripExes, + configStripLibs = combine configStripLibs, + -- TODO: NubListify + configConstraints = lastNonEmpty configConstraints, + -- TODO: NubListify + configDependencies = lastNonEmpty configDependencies, + -- TODO: NubListify + configConfigurationsFlags = lastNonMempty configConfigurationsFlags, + configTests = combine configTests, + configBenchmarks = combine configBenchmarks, + configCoverage = combine configCoverage, + configLibCoverage = combine configLibCoverage, + configExactConfiguration = combine configExactConfiguration, + configFlagError = combine configFlagError, + configRelocatable = combine configRelocatable, + configUseResponseFiles = combine configUseResponseFiles + } + where + combine = combine' savedConfigureFlags + lastNonEmpty = lastNonEmpty' savedConfigureFlags + lastNonEmptyNL = lastNonEmptyNL' savedConfigureFlags + lastNonMempty = lastNonMempty' savedConfigureFlags + + combinedSavedConfigureExFlags = ConfigExFlags { + configCabalVersion = combine configCabalVersion, + -- TODO: NubListify + configExConstraints = lastNonEmpty configExConstraints, + -- TODO: NubListify + configPreferences = lastNonEmpty configPreferences, + configSolver = combine configSolver, + configAllowNewer = combineMonoid savedConfigureExFlags configAllowNewer, + configAllowOlder = combineMonoid savedConfigureExFlags configAllowOlder, + configWriteGhcEnvironmentFilesPolicy + = combine configWriteGhcEnvironmentFilesPolicy + } + where + combine = combine' savedConfigureExFlags + lastNonEmpty = lastNonEmpty' savedConfigureExFlags + + -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. + combinedSavedUserInstallDirs = savedUserInstallDirs a + `mappend` savedUserInstallDirs b + + -- Parametrised by (Flag PathTemplate), so safe to use 'mappend'. + combinedSavedGlobalInstallDirs = savedGlobalInstallDirs a + `mappend` savedGlobalInstallDirs b + + combinedSavedUploadFlags = UploadFlags { + uploadCandidate = combine uploadCandidate, + uploadDoc = combine uploadDoc, + uploadUsername = combine uploadUsername, + uploadPassword = combine uploadPassword, + uploadPasswordCmd = combine uploadPasswordCmd, + uploadVerbosity = combine uploadVerbosity + } + where + combine = combine' savedUploadFlags + + combinedSavedReportFlags = ReportFlags { + reportUsername = combine reportUsername, + reportPassword = combine reportPassword, + reportVerbosity = combine reportVerbosity + } + where + combine = combine' savedReportFlags + + combinedSavedHaddockFlags = HaddockFlags { + -- TODO: NubListify + haddockProgramPaths = lastNonEmpty haddockProgramPaths, + -- TODO: NubListify + haddockProgramArgs = lastNonEmpty haddockProgramArgs, + haddockHoogle = combine haddockHoogle, + haddockHtml = combine haddockHtml, + haddockHtmlLocation = combine haddockHtmlLocation, + haddockForHackage = combine haddockForHackage, + haddockExecutables = combine haddockExecutables, + haddockTestSuites = combine haddockTestSuites, + haddockBenchmarks = combine haddockBenchmarks, + haddockForeignLibs = combine haddockForeignLibs, + haddockInternal = combine haddockInternal, + haddockCss = combine haddockCss, + haddockLinkedSource = combine haddockLinkedSource, + haddockQuickJump = combine haddockQuickJump, + haddockHscolourCss = combine haddockHscolourCss, + haddockContents = combine haddockContents, + haddockDistPref = combine haddockDistPref, + haddockKeepTempFiles = combine haddockKeepTempFiles, + haddockVerbosity = combine haddockVerbosity, + haddockCabalFilePath = combine haddockCabalFilePath, + haddockArgs = lastNonEmpty haddockArgs + } + where + combine = combine' savedHaddockFlags + lastNonEmpty = lastNonEmpty' savedHaddockFlags + + +-- +-- * Default config +-- + +-- | These are the absolute basic defaults. The fields that must be +-- initialised. When we load the config from the file we layer the loaded +-- values over these ones, so any missing fields in the file take their values +-- from here. +-- +baseSavedConfig :: IO SavedConfig +baseSavedConfig = do + userPrefix <- getCabalDir + cacheDir <- defaultCacheDir + logsDir <- defaultLogsDir + worldFile <- defaultWorldFile + return mempty { + savedConfigureFlags = mempty { + configHcFlavor = toFlag defaultCompiler, + configUserInstall = toFlag defaultUserInstall, + configVerbosity = toFlag normal + }, + savedUserInstallDirs = mempty { + prefix = toFlag (toPathTemplate userPrefix) + }, + savedGlobalFlags = mempty { + globalCacheDir = toFlag cacheDir, + globalLogsDir = toFlag logsDir, + globalWorldFile = toFlag worldFile + } + } + +-- | This is the initial configuration that we write out to to the config file +-- if the file does not exist (or the config we use if the file cannot be read +-- for some other reason). When the config gets loaded it gets layered on top +-- of 'baseSavedConfig' so we do not need to include it into the initial +-- values we save into the config file. +-- +initialSavedConfig :: IO SavedConfig +initialSavedConfig = do + cacheDir <- defaultCacheDir + logsDir <- defaultLogsDir + worldFile <- defaultWorldFile + extraPath <- defaultExtraPath + symlinkPath <- defaultSymlinkPath + return mempty { + savedGlobalFlags = mempty { + globalCacheDir = toFlag cacheDir, + globalRemoteRepos = toNubList [defaultRemoteRepo], + globalWorldFile = toFlag worldFile + }, + savedConfigureFlags = mempty { + configProgramPathExtra = toNubList extraPath + }, + savedInstallFlags = mempty { + installSummaryFile = toNubList [toPathTemplate (logsDir "build.log")], + installBuildReports= toFlag AnonymousReports, + installNumJobs = toFlag Nothing, + installSymlinkBinDir = toFlag symlinkPath + } + } + +defaultCabalDir :: IO FilePath +defaultCabalDir = getAppUserDataDirectory "cabal" + +getCabalDir :: IO FilePath +getCabalDir = do + mDir <- lookupEnv "CABAL_DIR" + case mDir of + Nothing -> defaultCabalDir + Just dir -> return dir + +defaultConfigFile :: IO FilePath +defaultConfigFile = do + dir <- getCabalDir + return $ dir "config" + +defaultCacheDir :: IO FilePath +defaultCacheDir = do + dir <- getCabalDir + return $ dir "packages" + +defaultLogsDir :: IO FilePath +defaultLogsDir = do + dir <- getCabalDir + return $ dir "logs" + +-- | Default position of the world file +defaultWorldFile :: IO FilePath +defaultWorldFile = do + dir <- getCabalDir + return $ dir "world" + +defaultExtraPath :: IO [FilePath] +defaultExtraPath = do + dir <- getCabalDir + return [dir "bin"] + +defaultSymlinkPath :: IO FilePath +defaultSymlinkPath = do + dir <- getCabalDir + return (dir "bin") + +defaultCompiler :: CompilerFlavor +defaultCompiler = fromMaybe GHC defaultCompilerFlavor + +defaultUserInstall :: Bool +defaultUserInstall = True +-- We do per-user installs by default on all platforms. We used to default to +-- global installs on Windows but that no longer works on Windows Vista or 7. + +defaultRemoteRepo :: RemoteRepo +defaultRemoteRepo = RemoteRepo name uri Nothing [] 0 False + where + name = "hackage.haskell.org" + uri = URI "http:" (Just (URIAuth "" name "")) "/" "" "" + -- Note that lots of old ~/.cabal/config files will have the old url + -- http://hackage.haskell.org/packages/archive + -- but new config files can use the new url (without the /packages/archive) + -- and avoid having to do a http redirect + +-- For the default repo we know extra information, fill this in. +-- +-- We need this because the 'defaultRemoteRepo' above is only used for the +-- first time when a config file is made. So for users with older config files +-- we might have only have older info. This lets us fill that in even for old +-- config files. +-- +addInfoForKnownRepos :: RemoteRepo -> RemoteRepo +addInfoForKnownRepos repo + | remoteRepoName repo == remoteRepoName defaultRemoteRepo + = useSecure . tryHttps . fixOldURI $ repo + where + fixOldURI r + | isOldHackageURI (remoteRepoURI r) + = r { remoteRepoURI = remoteRepoURI defaultRemoteRepo } + | otherwise = r + + tryHttps r = r { remoteRepoShouldTryHttps = True } + + useSecure r@RemoteRepo{ + remoteRepoSecure = secure, + remoteRepoRootKeys = [], + remoteRepoKeyThreshold = 0 + } | secure /= Just False + = r { + -- Use hackage-security by default unless you opt-out with + -- secure: False + remoteRepoSecure = Just True, + remoteRepoRootKeys = defaultHackageRemoteRepoKeys, + remoteRepoKeyThreshold = defaultHackageRemoteRepoKeyThreshold + } + useSecure r = r +addInfoForKnownRepos other = other + +-- | The current hackage.haskell.org repo root keys that we ship with cabal. +--- +-- This lets us bootstrap trust in this repo without user intervention. +-- These keys need to be periodically updated when new root keys are added. +-- See the root key procedures for details. +-- +defaultHackageRemoteRepoKeys :: [String] +defaultHackageRemoteRepoKeys = + [ "fe331502606802feac15e514d9b9ea83fee8b6ffef71335479a2e68d84adc6b0", + "1ea9ba32c526d1cc91ab5e5bd364ec5e9e8cb67179a471872f6e26f0ae773d42", + "2c6c3627bd6c982990239487f1abd02e08a02e6cf16edb105a8012d444d870c3", + "0a5c7ea47cd1b15f01f5f51a33adda7e655bc0f0b0615baa8e271f4c3351e21d", + "51f0161b906011b52c6613376b1ae937670da69322113a246a09f807c62f6921" + ] + +-- | The required threshold of root key signatures for hackage.haskell.org +-- +defaultHackageRemoteRepoKeyThreshold :: Int +defaultHackageRemoteRepoKeyThreshold = 3 + +-- +-- * Config file reading +-- + +-- | Loads the main configuration, and applies additional defaults to give the +-- effective configuration. To loads just what is actually in the config file, +-- use 'loadRawConfig'. +-- +loadConfig :: Verbosity -> Flag FilePath -> IO SavedConfig +loadConfig verbosity configFileFlag = do + config <- loadRawConfig verbosity configFileFlag + extendToEffectiveConfig config + +extendToEffectiveConfig :: SavedConfig -> IO SavedConfig +extendToEffectiveConfig config = do + base <- baseSavedConfig + let effective0 = base `mappend` config + globalFlags0 = savedGlobalFlags effective0 + effective = effective0 { + savedGlobalFlags = globalFlags0 { + globalRemoteRepos = + overNubList (map addInfoForKnownRepos) + (globalRemoteRepos globalFlags0) + } + } + return effective + +-- | Like 'loadConfig' but does not apply any additional defaults, it just +-- loads what is actually in the config file. This is thus suitable for +-- comparing or editing a config file, but not suitable for using as the +-- effective configuration. +-- +loadRawConfig :: Verbosity -> Flag FilePath -> IO SavedConfig +loadRawConfig verbosity configFileFlag = do + (source, configFile) <- getConfigFilePathAndSource configFileFlag + minp <- readConfigFile mempty configFile + case minp of + Nothing -> do + notice verbosity $ "Config file path source is " ++ sourceMsg source ++ "." + notice verbosity $ "Config file " ++ configFile ++ " not found." + createDefaultConfigFile verbosity [] configFile + Just (ParseOk ws conf) -> do + unless (null ws) $ warn verbosity $ + unlines (map (showPWarning configFile) ws) + return conf + Just (ParseFailed err) -> do + let (line, msg) = locatedErrorMsg err + die' verbosity $ + "Error parsing config file " ++ configFile + ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg + + where + sourceMsg CommandlineOption = "commandline option" + sourceMsg EnvironmentVariable = "env var CABAL_CONFIG" + sourceMsg Default = "default config file" + +data ConfigFileSource = CommandlineOption + | EnvironmentVariable + | Default + +-- | Returns the config file path, without checking that the file exists. +-- The order of precedence is: input flag, CABAL_CONFIG, default location. +getConfigFilePath :: Flag FilePath -> IO FilePath +getConfigFilePath = fmap snd . getConfigFilePathAndSource + +getConfigFilePathAndSource :: Flag FilePath -> IO (ConfigFileSource, FilePath) +getConfigFilePathAndSource configFileFlag = + getSource sources + where + sources = + [ (CommandlineOption, return . flagToMaybe $ configFileFlag) + , (EnvironmentVariable, lookup "CABAL_CONFIG" `liftM` getEnvironment) + , (Default, Just `liftM` defaultConfigFile) ] + + getSource [] = error "no config file path candidate found." + getSource ((source,action): xs) = + action >>= maybe (getSource xs) (return . (,) source) + +readConfigFile :: SavedConfig -> FilePath -> IO (Maybe (ParseResult SavedConfig)) +readConfigFile initial file = handleNotExists $ + fmap (Just . parseConfig (ConstraintSourceMainConfig file) initial) + (readFile file) + + where + handleNotExists action = catchIO action $ \ioe -> + if isDoesNotExistError ioe + then return Nothing + else ioError ioe + +createDefaultConfigFile :: Verbosity -> [String] -> FilePath -> IO SavedConfig +createDefaultConfigFile verbosity extraLines filePath = do + commentConf <- commentSavedConfig + initialConf <- initialSavedConfig + extraConf <- parseExtraLines verbosity extraLines + notice verbosity $ "Writing default configuration to " ++ filePath + writeConfigFile filePath commentConf (initialConf `mappend` extraConf) + return initialConf + +writeConfigFile :: FilePath -> SavedConfig -> SavedConfig -> IO () +writeConfigFile file comments vals = do + let tmpFile = file <.> "tmp" + createDirectoryIfMissing True (takeDirectory file) + writeFile tmpFile $ explanation ++ showConfigWithComments comments vals ++ "\n" + renameFile tmpFile file + where + explanation = unlines + ["-- This is the configuration file for the 'cabal' command line tool." + ,"--" + ,"-- The available configuration options are listed below." + ,"-- Some of them have default values listed." + ,"--" + ,"-- Lines (like this one) beginning with '--' are comments." + ,"-- Be careful with spaces and indentation because they are" + ,"-- used to indicate layout for nested sections." + ,"--" + ,"-- This config file was generated using the following versions" + ,"-- of Cabal and cabal-install:" + ,"-- Cabal library version: " ++ Text.display cabalVersion + ,"-- cabal-install version: " ++ showVersion Paths_cabal_install.version + ,"","" + ] + +-- | These are the default values that get used in Cabal if a no value is +-- given. We use these here to include in comments when we write out the +-- initial config file so that the user can see what default value they are +-- overriding. +-- +commentSavedConfig :: IO SavedConfig +commentSavedConfig = do + userInstallDirs <- defaultInstallDirs defaultCompiler True True + globalInstallDirs <- defaultInstallDirs defaultCompiler False True + let conf0 = mempty { + savedGlobalFlags = defaultGlobalFlags { + globalRemoteRepos = toNubList [defaultRemoteRepo] + }, + savedInstallFlags = defaultInstallFlags, + savedConfigureExFlags = defaultConfigExFlags { + configAllowNewer = Just (AllowNewer mempty), + configAllowOlder = Just (AllowOlder mempty) + }, + savedConfigureFlags = (defaultConfigFlags defaultProgramDb) { + configUserInstall = toFlag defaultUserInstall + }, + savedUserInstallDirs = fmap toFlag userInstallDirs, + savedGlobalInstallDirs = fmap toFlag globalInstallDirs, + savedUploadFlags = commandDefaultFlags uploadCommand, + savedReportFlags = commandDefaultFlags reportCommand, + savedHaddockFlags = defaultHaddockFlags + + } + conf1 <- extendToEffectiveConfig conf0 + let globalFlagsConf1 = savedGlobalFlags conf1 + conf2 = conf1 { + savedGlobalFlags = globalFlagsConf1 { + globalRemoteRepos = overNubList (map removeRootKeys) + (globalRemoteRepos globalFlagsConf1) + } + } + return conf2 + where + -- Most people don't want to see default root keys, so don't print them. + removeRootKeys :: RemoteRepo -> RemoteRepo + removeRootKeys r = r { remoteRepoRootKeys = [] } + +-- | All config file fields. +-- +configFieldDescriptions :: ConstraintSource -> [FieldDescr SavedConfig] +configFieldDescriptions src = + + toSavedConfig liftGlobalFlag + (commandOptions (globalCommand []) ParseArgs) + ["version", "numeric-version", "config-file", "sandbox-config-file"] [] + + ++ toSavedConfig liftConfigFlag + (configureOptions ParseArgs) + (["builddir", "constraint", "dependency", "ipid"] + ++ map fieldName installDirsFields) + + -- This is only here because viewAsFieldDescr gives us a parser + -- that only recognises 'ghc' etc, the case-sensitive flag names, not + -- what the normal case-insensitive parser gives us. + [simpleField "compiler" + (fromFlagOrDefault Disp.empty . fmap Text.disp) (optional Text.parse) + configHcFlavor (\v flags -> flags { configHcFlavor = v }) + + -- TODO: The following is a temporary fix. The "optimization" + -- and "debug-info" fields are OptArg, and viewAsFieldDescr + -- fails on that. Instead of a hand-written hackaged parser + -- and printer, we should handle this case properly in the + -- library. + ,liftField configOptimization (\v flags -> + flags { configOptimization = v }) $ + let name = "optimization" in + FieldDescr name + (\f -> case f of + Flag NoOptimisation -> Disp.text "False" + Flag NormalOptimisation -> Disp.text "True" + Flag MaximumOptimisation -> Disp.text "2" + _ -> Disp.empty) + (\line str _ -> case () of + _ | str == "False" -> ParseOk [] (Flag NoOptimisation) + | str == "True" -> ParseOk [] (Flag NormalOptimisation) + | str == "0" -> ParseOk [] (Flag NoOptimisation) + | str == "1" -> ParseOk [] (Flag NormalOptimisation) + | str == "2" -> ParseOk [] (Flag MaximumOptimisation) + | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) + | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) + | otherwise -> ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name + ++ "' field is case sensitive, use 'True' or 'False'.") + ,liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ + let name = "debug-info" in + FieldDescr name + (\f -> case f of + Flag NoDebugInfo -> Disp.text "False" + Flag MinimalDebugInfo -> Disp.text "1" + Flag NormalDebugInfo -> Disp.text "True" + Flag MaximalDebugInfo -> Disp.text "3" + _ -> Disp.empty) + (\line str _ -> case () of + _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) + | str == "True" -> ParseOk [] (Flag NormalDebugInfo) + | str == "0" -> ParseOk [] (Flag NoDebugInfo) + | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) + | str == "2" -> ParseOk [] (Flag NormalDebugInfo) + | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) + | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) + | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) + | otherwise -> ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name + ++ "' field is case sensitive, use 'True' or 'False'.") + ] + + ++ toSavedConfig liftConfigExFlag + (configureExOptions ParseArgs src) + [] + [let pkgs = (Just . AllowOlder . RelaxDepsSome) `fmap` parseOptCommaList Text.parse + parseAllowOlder = ((Just . AllowOlder . toRelaxDeps) `fmap` Text.parse) Parse.<++ pkgs in + simpleField "allow-older" + (showRelaxDeps . fmap unAllowOlder) parseAllowOlder + configAllowOlder (\v flags -> flags { configAllowOlder = v }) + ,let pkgs = (Just . AllowNewer . RelaxDepsSome) `fmap` parseOptCommaList Text.parse + parseAllowNewer = ((Just . AllowNewer . toRelaxDeps) `fmap` Text.parse) Parse.<++ pkgs in + simpleField "allow-newer" + (showRelaxDeps . fmap unAllowNewer) parseAllowNewer + configAllowNewer (\v flags -> flags { configAllowNewer = v }) + ] + + ++ toSavedConfig liftInstallFlag + (installOptions ParseArgs) + ["dry-run", "only", "only-dependencies", "dependencies-only"] [] + + ++ toSavedConfig liftUploadFlag + (commandOptions uploadCommand ParseArgs) + ["verbose", "check", "documentation", "publish"] [] + + ++ toSavedConfig liftReportFlag + (commandOptions reportCommand ParseArgs) + ["verbose", "username", "password"] [] + --FIXME: this is a hack, hiding the user name and password. + -- But otherwise it masks the upload ones. Either need to + -- share the options or make then distinct. In any case + -- they should probably be per-server. + + ++ [ viewAsFieldDescr + $ optionDistPref + (configDistPref . savedConfigureFlags) + (\distPref config -> + config + { savedConfigureFlags = (savedConfigureFlags config) { + configDistPref = distPref } + , savedHaddockFlags = (savedHaddockFlags config) { + haddockDistPref = distPref } + } + ) + ParseArgs + ] + + where + toSavedConfig lift options exclusions replacements = + [ lift (fromMaybe field replacement) + | opt <- options + , let field = viewAsFieldDescr opt + name = fieldName field + replacement = find ((== name) . fieldName) replacements + , name `notElem` exclusions ] + optional = Parse.option mempty . fmap toFlag + + + showRelaxDeps Nothing = mempty + showRelaxDeps (Just rd) | isRelaxDeps rd = Disp.text "True" + | otherwise = Disp.text "False" + + toRelaxDeps True = RelaxDepsAll + toRelaxDeps False = mempty + + +-- TODO: next step, make the deprecated fields elicit a warning. +-- +deprecatedFieldDescriptions :: [FieldDescr SavedConfig] +deprecatedFieldDescriptions = + [ liftGlobalFlag $ + listField "repos" + (Disp.text . showRepo) parseRepo + (fromNubList . globalRemoteRepos) + (\rs cfg -> cfg { globalRemoteRepos = toNubList rs }) + , liftGlobalFlag $ + simpleField "cachedir" + (Disp.text . fromFlagOrDefault "") (optional parseFilePathQ) + globalCacheDir (\d cfg -> cfg { globalCacheDir = d }) + , liftUploadFlag $ + simpleField "hackage-username" + (Disp.text . fromFlagOrDefault "" . fmap unUsername) + (optional (fmap Username parseTokenQ)) + uploadUsername (\d cfg -> cfg { uploadUsername = d }) + , liftUploadFlag $ + simpleField "hackage-password" + (Disp.text . fromFlagOrDefault "" . fmap unPassword) + (optional (fmap Password parseTokenQ)) + uploadPassword (\d cfg -> cfg { uploadPassword = d }) + , liftUploadFlag $ + spaceListField "hackage-password-command" + Disp.text parseTokenQ + (fromFlagOrDefault [] . uploadPasswordCmd) + (\d cfg -> cfg { uploadPasswordCmd = Flag d }) + ] + ++ map (modifyFieldName ("user-"++) . liftUserInstallDirs) installDirsFields + ++ map (modifyFieldName ("global-"++) . liftGlobalInstallDirs) installDirsFields + where + optional = Parse.option mempty . fmap toFlag + modifyFieldName :: (String -> String) -> FieldDescr a -> FieldDescr a + modifyFieldName f d = d { fieldName = f (fieldName d) } + +liftUserInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) + -> FieldDescr SavedConfig +liftUserInstallDirs = liftField + savedUserInstallDirs (\flags conf -> conf { savedUserInstallDirs = flags }) + +liftGlobalInstallDirs :: FieldDescr (InstallDirs (Flag PathTemplate)) + -> FieldDescr SavedConfig +liftGlobalInstallDirs = liftField + savedGlobalInstallDirs (\flags conf -> conf { savedGlobalInstallDirs = flags }) + +liftGlobalFlag :: FieldDescr GlobalFlags -> FieldDescr SavedConfig +liftGlobalFlag = liftField + savedGlobalFlags (\flags conf -> conf { savedGlobalFlags = flags }) + +liftConfigFlag :: FieldDescr ConfigFlags -> FieldDescr SavedConfig +liftConfigFlag = liftField + savedConfigureFlags (\flags conf -> conf { savedConfigureFlags = flags }) + +liftConfigExFlag :: FieldDescr ConfigExFlags -> FieldDescr SavedConfig +liftConfigExFlag = liftField + savedConfigureExFlags (\flags conf -> conf { savedConfigureExFlags = flags }) + +liftInstallFlag :: FieldDescr InstallFlags -> FieldDescr SavedConfig +liftInstallFlag = liftField + savedInstallFlags (\flags conf -> conf { savedInstallFlags = flags }) + +liftUploadFlag :: FieldDescr UploadFlags -> FieldDescr SavedConfig +liftUploadFlag = liftField + savedUploadFlags (\flags conf -> conf { savedUploadFlags = flags }) + +liftReportFlag :: FieldDescr ReportFlags -> FieldDescr SavedConfig +liftReportFlag = liftField + savedReportFlags (\flags conf -> conf { savedReportFlags = flags }) + +parseConfig :: ConstraintSource + -> SavedConfig + -> String + -> ParseResult SavedConfig +parseConfig src initial = \str -> do + fields <- readFields str + let (knownSections, others) = partition isKnownSection fields + config <- parse others + let user0 = savedUserInstallDirs config + global0 = savedGlobalInstallDirs config + (remoteRepoSections0, haddockFlags, user, global, paths, args) <- + foldM parseSections + ([], savedHaddockFlags config, user0, global0, [], []) + knownSections + + let remoteRepoSections = + reverse + . nubBy ((==) `on` remoteRepoName) + $ remoteRepoSections0 + + return config { + savedGlobalFlags = (savedGlobalFlags config) { + globalRemoteRepos = toNubList remoteRepoSections, + -- the global extra prog path comes from the configure flag prog path + globalProgPathExtra = configProgramPathExtra (savedConfigureFlags config) + }, + savedConfigureFlags = (savedConfigureFlags config) { + configProgramPaths = paths, + configProgramArgs = args + }, + savedHaddockFlags = haddockFlags, + savedUserInstallDirs = user, + savedGlobalInstallDirs = global + } + + where + isKnownSection (ParseUtils.Section _ "repository" _ _) = True + isKnownSection (ParseUtils.F _ "remote-repo" _) = True + isKnownSection (ParseUtils.Section _ "haddock" _ _) = True + isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True + isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True + isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True + isKnownSection _ = False + + parse = parseFields (configFieldDescriptions src + ++ deprecatedFieldDescriptions) initial + + parseSections (rs, h, u, g, p, a) + (ParseUtils.Section _ "repository" name fs) = do + r' <- parseFields remoteRepoFields (emptyRemoteRepo name) fs + when (remoteRepoKeyThreshold r' > length (remoteRepoRootKeys r')) $ + warning $ "'key-threshold' for repository " ++ show (remoteRepoName r') + ++ " higher than number of keys" + when (not (null (remoteRepoRootKeys r')) + && remoteRepoSecure r' /= Just True) $ + warning $ "'root-keys' for repository " ++ show (remoteRepoName r') + ++ " non-empty, but 'secure' not set to True." + return (r':rs, h, u, g, p, a) + + parseSections (rs, h, u, g, p, a) + (ParseUtils.F lno "remote-repo" raw) = do + let mr' = readRepo raw + r' <- maybe (ParseFailed $ NoParse "remote-repo" lno) return mr' + return (r':rs, h, u, g, p, a) + + parseSections accum@(rs, h, u, g, p, a) + (ParseUtils.Section _ "haddock" name fs) + | name == "" = do h' <- parseFields haddockFlagsFields h fs + return (rs, h', u, g, p, a) + | otherwise = do + warning "The 'haddock' section should be unnamed" + return accum + parseSections accum@(rs, h, u, g, p, a) + (ParseUtils.Section _ "install-dirs" name fs) + | name' == "user" = do u' <- parseFields installDirsFields u fs + return (rs, h, u', g, p, a) + | name' == "global" = do g' <- parseFields installDirsFields g fs + return (rs, h, u, g', p, a) + | otherwise = do + warning "The 'install-paths' section should be for 'user' or 'global'" + return accum + where name' = lowercase name + parseSections accum@(rs, h, u, g, p, a) + (ParseUtils.Section _ "program-locations" name fs) + | name == "" = do p' <- parseFields withProgramsFields p fs + return (rs, h, u, g, p', a) + | otherwise = do + warning "The 'program-locations' section should be unnamed" + return accum + parseSections accum@(rs, h, u, g, p, a) + (ParseUtils.Section _ "program-default-options" name fs) + | name == "" = do a' <- parseFields withProgramOptionsFields a fs + return (rs, h, u, g, p, a') + | otherwise = do + warning "The 'program-default-options' section should be unnamed" + return accum + parseSections accum f = do + warning $ "Unrecognized stanza on line " ++ show (lineNo f) + return accum + +showConfig :: SavedConfig -> String +showConfig = showConfigWithComments mempty + +showConfigWithComments :: SavedConfig -> SavedConfig -> String +showConfigWithComments comment vals = Disp.render $ + case fmap (uncurry ppRemoteRepoSection) + (zip (getRemoteRepos comment) (getRemoteRepos vals)) of + [] -> Disp.text "" + (x:xs) -> foldl' (\ r r' -> r $+$ Disp.text "" $+$ r') x xs + $+$ Disp.text "" + $+$ ppFields (skipSomeFields (configFieldDescriptions ConstraintSourceUnknown)) + mcomment vals + $+$ Disp.text "" + $+$ ppSection "haddock" "" haddockFlagsFields + (fmap savedHaddockFlags mcomment) (savedHaddockFlags vals) + $+$ Disp.text "" + $+$ installDirsSection "user" savedUserInstallDirs + $+$ Disp.text "" + $+$ installDirsSection "global" savedGlobalInstallDirs + $+$ Disp.text "" + $+$ configFlagsSection "program-locations" withProgramsFields + configProgramPaths + $+$ Disp.text "" + $+$ configFlagsSection "program-default-options" withProgramOptionsFields + configProgramArgs + where + getRemoteRepos = fromNubList . globalRemoteRepos . savedGlobalFlags + mcomment = Just comment + installDirsSection name field = + ppSection "install-dirs" name installDirsFields + (fmap field mcomment) (field vals) + configFlagsSection name fields field = + ppSection name "" fields + (fmap (field . savedConfigureFlags) mcomment) + ((field . savedConfigureFlags) vals) + + -- skip fields based on field name. currently only skips "remote-repo", + -- because that is rendered as a section. (see 'ppRemoteRepoSection'.) + skipSomeFields = filter ((/= "remote-repo") . fieldName) + +-- | Fields for the 'install-dirs' sections. +installDirsFields :: [FieldDescr (InstallDirs (Flag PathTemplate))] +installDirsFields = map viewAsFieldDescr installDirsOptions + +ppRemoteRepoSection :: RemoteRepo -> RemoteRepo -> Doc +ppRemoteRepoSection def vals = ppSection "repository" (remoteRepoName vals) + remoteRepoFields (Just def) vals + +remoteRepoFields :: [FieldDescr RemoteRepo] +remoteRepoFields = + [ simpleField "url" + (text . show) (parseTokenQ >>= parseURI') + remoteRepoURI (\x repo -> repo { remoteRepoURI = x }) + , simpleField "secure" + showSecure (Just `fmap` Text.parse) + remoteRepoSecure (\x repo -> repo { remoteRepoSecure = x }) + , listField "root-keys" + text parseTokenQ + remoteRepoRootKeys (\x repo -> repo { remoteRepoRootKeys = x }) + , simpleField "key-threshold" + showThreshold Text.parse + remoteRepoKeyThreshold (\x repo -> repo { remoteRepoKeyThreshold = x }) + ] + where + parseURI' uriString = + case parseURI uriString of + Nothing -> fail $ "remote-repo: no parse on " ++ show uriString + Just uri -> return uri + + showSecure Nothing = mempty -- default 'secure' setting + showSecure (Just True) = text "True" -- user explicitly enabled it + showSecure (Just False) = text "False" -- user explicitly disabled it + + -- If the key-threshold is set to 0, we omit it as this is the default + -- and it looks odd to have a value for key-threshold but not for 'secure' + -- (note that an empty list of keys is already omitted by default, since + -- that is what we do for all list fields) + showThreshold 0 = mempty + showThreshold t = text (show t) + +-- | Fields for the 'haddock' section. +haddockFlagsFields :: [FieldDescr HaddockFlags] +haddockFlagsFields = [ field + | opt <- haddockOptions ParseArgs + , let field = viewAsFieldDescr opt + name = fieldName field + , name `notElem` exclusions ] + where + exclusions = ["verbose", "builddir", "for-hackage"] + +-- | Fields for the 'program-locations' section. +withProgramsFields :: [FieldDescr [(String, FilePath)]] +withProgramsFields = + map viewAsFieldDescr $ + programDbPaths' (++ "-location") defaultProgramDb + ParseArgs id (++) + +-- | Fields for the 'program-default-options' section. +withProgramOptionsFields :: [FieldDescr [(String, [String])]] +withProgramOptionsFields = + map viewAsFieldDescr $ + programDbOptions defaultProgramDb ParseArgs id (++) + +parseExtraLines :: Verbosity -> [String] -> IO SavedConfig +parseExtraLines verbosity extraLines = + case parseConfig (ConstraintSourceMainConfig "additional lines") + mempty (unlines extraLines) of + ParseFailed err -> + let (line, msg) = locatedErrorMsg err + in die' verbosity $ + "Error parsing additional config lines\n" + ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg + ParseOk [] r -> return r + ParseOk ws _ -> die' verbosity $ + unlines (map (showPWarning "Error parsing additional config lines") ws) + +-- | Get the differences (as a pseudo code diff) between the user's +-- '~/.cabal/config' and the one that cabal would generate if it didn't exist. +userConfigDiff :: Verbosity -> GlobalFlags -> [String] -> IO [String] +userConfigDiff verbosity globalFlags extraLines = do + userConfig <- loadRawConfig normal (globalConfigFile globalFlags) + extraConfig <- parseExtraLines verbosity extraLines + testConfig <- initialSavedConfig + return $ reverse . foldl' createDiff [] . M.toList + $ M.unionWith combine + (M.fromList . map justFst $ filterShow testConfig) + (M.fromList . map justSnd $ filterShow (userConfig `mappend` extraConfig)) + where + justFst (a, b) = (a, (Just b, Nothing)) + justSnd (a, b) = (a, (Nothing, Just b)) + + combine (Nothing, Just b) (Just a, Nothing) = (Just a, Just b) + combine (Just a, Nothing) (Nothing, Just b) = (Just a, Just b) + combine x y = error $ "Can't happen : userConfigDiff " + ++ show x ++ " " ++ show y + + createDiff :: [String] -> (String, (Maybe String, Maybe String)) -> [String] + createDiff acc (key, (Just a, Just b)) + | a == b = acc + | otherwise = ("+ " ++ key ++ ": " ++ b) + : ("- " ++ key ++ ": " ++ a) : acc + createDiff acc (key, (Nothing, Just b)) = ("+ " ++ key ++ ": " ++ b) : acc + createDiff acc (key, (Just a, Nothing)) = ("- " ++ key ++ ": " ++ a) : acc + createDiff acc (_, (Nothing, Nothing)) = acc + + filterShow :: SavedConfig -> [(String, String)] + filterShow cfg = map keyValueSplit + . filter (\s -> not (null s) && ':' `elem` s) + . map nonComment + . lines + $ showConfig cfg + + nonComment [] = [] + nonComment ('-':'-':_) = [] + nonComment (x:xs) = x : nonComment xs + + topAndTail = reverse . dropWhile isSpace . reverse . dropWhile isSpace + + keyValueSplit s = + let (left, right) = break (== ':') s + in (topAndTail left, topAndTail (drop 1 right)) + + +-- | Update the user's ~/.cabal/config' keeping the user's customizations. +userConfigUpdate :: Verbosity -> GlobalFlags -> [String] -> IO () +userConfigUpdate verbosity globalFlags extraLines = do + userConfig <- loadRawConfig normal (globalConfigFile globalFlags) + extraConfig <- parseExtraLines verbosity extraLines + newConfig <- initialSavedConfig + commentConf <- commentSavedConfig + cabalFile <- getConfigFilePath $ globalConfigFile globalFlags + let backup = cabalFile ++ ".backup" + notice verbosity $ "Renaming " ++ cabalFile ++ " to " ++ backup ++ "." + renameFile cabalFile backup + notice verbosity $ "Writing merged config to " ++ cabalFile ++ "." + writeConfigFile cabalFile commentConf (newConfig `mappend` userConfig `mappend` extraConfig) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Configure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Configure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Configure.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Configure.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,460 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Configure +-- Copyright : (c) David Himmelstrup 2005, +-- Duncan Coutts 2005 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- High level interface to configuring a package. +----------------------------------------------------------------------------- +module Distribution.Client.Configure ( + configure, + configureSetupScript, + chooseCabalVersion, + checkConfigExFlags, + -- * Saved configure flags + readConfigFlagsFrom, readConfigFlags, + cabalConfigFlagsFile, + writeConfigFlagsTo, writeConfigFlags, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.Dependency +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.SolverInstallPlan (SolverInstallPlan) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import Distribution.Client.Setup + ( ConfigExFlags(..), RepoContext(..) + , configureCommand, configureExCommand, filterConfigureFlags ) +import Distribution.Client.Types as Source +import Distribution.Client.SetupWrapper + ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) +import Distribution.Client.Targets + ( userToPackageConstraint, userConstraintPackageName ) +import Distribution.Client.JobControl (Lock) + +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackageIndex + ( PackageIndex, elemByPackageName ) +import Distribution.Solver.Types.PkgConfigDb + (PkgConfigDb, readPkgConfigDb) +import Distribution.Solver.Types.SourcePackage + +import Distribution.Simple.Compiler + ( Compiler, CompilerInfo, compilerInfo, PackageDB(..), PackageDBStack ) +import Distribution.Simple.Program (ProgramDb) +import Distribution.Client.SavedFlags ( readCommandFlags, writeCommandFlags ) +import Distribution.Simple.Setup + ( ConfigFlags(..) + , fromFlag, toFlag, flagToMaybe, fromFlagOrDefault ) +import Distribution.Simple.PackageIndex + ( InstalledPackageIndex, lookupPackageName ) +import Distribution.Package + ( Package(..), packageName, PackageId ) +import Distribution.Types.Dependency + ( Dependency(..), thisPackageVersion ) +import qualified Distribution.PackageDescription as PkgDesc +import Distribution.PackageDescription.Parsec + ( readGenericPackageDescription ) +import Distribution.PackageDescription.Configuration + ( finalizePD ) +import Distribution.Version + ( Version, mkVersion, anyVersion, thisVersion + , VersionRange, orLaterVersion ) +import Distribution.Simple.Utils as Utils + ( warn, notice, debug, die' + , defaultPackageDesc ) +import Distribution.System + ( Platform ) +import Distribution.Text ( display ) +import Distribution.Verbosity as Verbosity + ( Verbosity ) + +import System.FilePath ( () ) + +-- | Choose the Cabal version such that the setup scripts compiled against this +-- version will support the given command-line flags. +chooseCabalVersion :: ConfigExFlags -> Maybe Version -> VersionRange +chooseCabalVersion configExFlags maybeVersion = + maybe defaultVersionRange thisVersion maybeVersion + where + -- Cabal < 1.19.2 doesn't support '--exact-configuration' which is needed + -- for '--allow-newer' to work. + allowNewer = isRelaxDeps + (maybe mempty unAllowNewer $ configAllowNewer configExFlags) + allowOlder = isRelaxDeps + (maybe mempty unAllowOlder $ configAllowOlder configExFlags) + + defaultVersionRange = if allowOlder || allowNewer + then orLaterVersion (mkVersion [1,19,2]) + else anyVersion + +-- | Configure the package found in the local directory +configure :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramDb + -> ConfigFlags + -> ConfigExFlags + -> [String] + -> IO () +configure verbosity packageDBs repoCtxt comp platform progdb + configFlags configExFlags extraArgs = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb + sourcePkgDb <- getSourcePackages verbosity repoCtxt + pkgConfigDb <- readPkgConfigDb verbosity progdb + + checkConfigExFlags verbosity installedPkgIndex + (packageIndex sourcePkgDb) configExFlags + + progress <- planLocalPackage verbosity comp platform configFlags configExFlags + installedPkgIndex sourcePkgDb pkgConfigDb + + notice verbosity "Resolving dependencies..." + maybePlan <- foldProgress logMsg (return . Left) (return . Right) + progress + case maybePlan of + Left message -> do + warn verbosity $ + "solver failed to find a solution:\n" + ++ message + ++ "\nTrying configure anyway." + setupWrapper verbosity (setupScriptOptions installedPkgIndex Nothing) + Nothing configureCommand (const configFlags) (const extraArgs) + + Right installPlan0 -> + let installPlan = InstallPlan.configureInstallPlan configFlags installPlan0 + in case fst (InstallPlan.ready installPlan) of + [pkg@(ReadyPackage + (ConfiguredPackage _ (SourcePackage _ _ (LocalUnpackedPackage _) _) + _ _ _))] -> do + configurePackage verbosity + platform (compilerInfo comp) + (setupScriptOptions installedPkgIndex (Just pkg)) + configFlags pkg extraArgs + + _ -> die' verbosity $ "internal error: configure install plan should have exactly " + ++ "one local ready package." + + where + setupScriptOptions :: InstalledPackageIndex + -> Maybe ReadyPackage + -> SetupScriptOptions + setupScriptOptions = + configureSetupScript + packageDBs + comp + platform + progdb + (fromFlagOrDefault + (useDistPref defaultSetupScriptOptions) + (configDistPref configFlags)) + (chooseCabalVersion + configExFlags + (flagToMaybe (configCabalVersion configExFlags))) + Nothing + False + + logMsg message rest = debug verbosity message >> rest + +configureSetupScript :: PackageDBStack + -> Compiler + -> Platform + -> ProgramDb + -> FilePath + -> VersionRange + -> Maybe Lock + -> Bool + -> InstalledPackageIndex + -> Maybe ReadyPackage + -> SetupScriptOptions +configureSetupScript packageDBs + comp + platform + progdb + distPref + cabalVersion + lock + forceExternal + index + mpkg + = SetupScriptOptions { + useCabalVersion = cabalVersion + , useCabalSpecVersion = Nothing + , useCompiler = Just comp + , usePlatform = Just platform + , usePackageDB = packageDBs' + , usePackageIndex = index' + , useProgramDb = progdb + , useDistPref = distPref + , useLoggingHandle = Nothing + , useWorkingDir = Nothing + , useExtraPathEnv = [] + , useExtraEnvOverrides = [] + , setupCacheLock = lock + , useWin32CleanHack = False + , forceExternalSetupMethod = forceExternal + -- If we have explicit setup dependencies, list them; otherwise, we give + -- the empty list of dependencies; ideally, we would fix the version of + -- Cabal here, so that we no longer need the special case for that in + -- `compileSetupExecutable` in `externalSetupMethod`, but we don't yet + -- know the version of Cabal at this point, but only find this there. + -- Therefore, for now, we just leave this blank. + , useDependencies = fromMaybe [] explicitSetupDeps + , useDependenciesExclusive = not defaultSetupDeps && isJust explicitSetupDeps + , useVersionMacros = not defaultSetupDeps && isJust explicitSetupDeps + , isInteractive = False + } + where + -- When we are compiling a legacy setup script without an explicit + -- setup stanza, we typically want to allow the UserPackageDB for + -- finding the Cabal lib when compiling any Setup.hs even if we're doing + -- a global install. However we also allow looking in a specific package + -- db. + packageDBs' :: PackageDBStack + index' :: Maybe InstalledPackageIndex + (packageDBs', index') = + case packageDBs of + (GlobalPackageDB:dbs) | UserPackageDB `notElem` dbs + , Nothing <- explicitSetupDeps + -> (GlobalPackageDB:UserPackageDB:dbs, Nothing) + -- but if the user is using an odd db stack, don't touch it + _otherwise -> (packageDBs, Just index) + + maybeSetupBuildInfo :: Maybe PkgDesc.SetupBuildInfo + maybeSetupBuildInfo = do + ReadyPackage cpkg <- mpkg + let gpkg = packageDescription (confPkgSource cpkg) + PkgDesc.setupBuildInfo (PkgDesc.packageDescription gpkg) + + -- Was a default 'custom-setup' stanza added by 'cabal-install' itself? If + -- so, 'setup-depends' must not be exclusive. See #3199. + defaultSetupDeps :: Bool + defaultSetupDeps = maybe False PkgDesc.defaultSetupDepends + maybeSetupBuildInfo + + explicitSetupDeps :: Maybe [(InstalledPackageId, PackageId)] + explicitSetupDeps = do + -- Check if there is an explicit setup stanza. + _buildInfo <- maybeSetupBuildInfo + -- Return the setup dependencies computed by the solver + ReadyPackage cpkg <- mpkg + return [ ( cid, srcid ) + | ConfiguredId srcid (Just PkgDesc.CLibName) cid <- CD.setupDeps (confPkgDeps cpkg) + ] + +-- | Warn if any constraints or preferences name packages that are not in the +-- source package index or installed package index. +checkConfigExFlags :: Package pkg + => Verbosity + -> InstalledPackageIndex + -> PackageIndex pkg + -> ConfigExFlags + -> IO () +checkConfigExFlags verbosity installedPkgIndex sourcePkgIndex flags = do + unless (null unknownConstraints) $ warn verbosity $ + "Constraint refers to an unknown package: " + ++ showConstraint (head unknownConstraints) + unless (null unknownPreferences) $ warn verbosity $ + "Preference refers to an unknown package: " + ++ display (head unknownPreferences) + where + unknownConstraints = filter (unknown . userConstraintPackageName . fst) $ + configExConstraints flags + unknownPreferences = filter (unknown . \(Dependency name _) -> name) $ + configPreferences flags + unknown pkg = null (lookupPackageName installedPkgIndex pkg) + && not (elemByPackageName sourcePkgIndex pkg) + showConstraint (uc, src) = + display uc ++ " (" ++ showConstraintSource src ++ ")" + +-- | Make an 'InstallPlan' for the unpacked package in the current directory, +-- and all its dependencies. +-- +planLocalPackage :: Verbosity -> Compiler + -> Platform + -> ConfigFlags -> ConfigExFlags + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> IO (Progress String String SolverInstallPlan) +planLocalPackage verbosity comp platform configFlags configExFlags + installedPkgIndex (SourcePackageDb _ packagePrefs) pkgConfigDb = do + pkg <- readGenericPackageDescription verbosity =<< + case flagToMaybe (configCabalFilePath configFlags) of + Nothing -> defaultPackageDesc verbosity + Just fp -> return fp + solver <- chooseSolver verbosity (fromFlag $ configSolver configExFlags) + (compilerInfo comp) + + let -- We create a local package and ask to resolve a dependency on it + localPkg = SourcePackage { + packageInfoId = packageId pkg, + packageDescription = pkg, + packageSource = LocalUnpackedPackage ".", + packageDescrOverride = Nothing + } + + testsEnabled = fromFlagOrDefault False $ configTests configFlags + benchmarksEnabled = + fromFlagOrDefault False $ configBenchmarks configFlags + + resolverParams = + removeLowerBounds + (fromMaybe (AllowOlder mempty) $ configAllowOlder configExFlags) + . removeUpperBounds + (fromMaybe (AllowNewer mempty) $ configAllowNewer configExFlags) + + . addPreferences + -- preferences from the config file or command line + [ PackageVersionPreference name ver + | Dependency name ver <- configPreferences configExFlags ] + + . addConstraints + -- version constraints from the config file or command line + -- TODO: should warn or error on constraints that are not on direct + -- deps or flag constraints not on the package in question. + [ LabeledPackageConstraint (userToPackageConstraint uc) src + | (uc, src) <- configExConstraints configExFlags ] + + . addConstraints + -- package flags from the config file or command line + [ let pc = PackageConstraint + (scopeToplevel $ packageName pkg) + (PackagePropertyFlags $ configConfigurationsFlags configFlags) + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + ] + + . addConstraints + -- '--enable-tests' and '--enable-benchmarks' constraints from + -- the config file or command line + [ let pc = PackageConstraint (scopeToplevel $ packageName pkg) . + PackagePropertyStanzas $ + [ TestStanzas | testsEnabled ] ++ + [ BenchStanzas | benchmarksEnabled ] + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + ] + + -- Don't solve for executables, since we use an empty source + -- package database and executables never show up in the + -- installed package index + . setSolveExecutables (SolveExecutables False) + + . setSolverVerbosity verbosity + + $ standardInstallPolicy + installedPkgIndex + -- NB: We pass in an *empty* source package database, + -- because cabal configure assumes that all dependencies + -- have already been installed + (SourcePackageDb mempty packagePrefs) + [SpecificSourcePackage localPkg] + + return (resolveDependencies platform (compilerInfo comp) pkgConfigDb solver resolverParams) + + +-- | Call an installer for an 'SourcePackage' but override the configure +-- flags with the ones given by the 'ReadyPackage'. In particular the +-- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly +-- versioned package dependencies. So we ignore any previous partial flag +-- assignment or dependency constraints and use the new ones. +-- +-- NB: when updating this function, don't forget to also update +-- 'installReadyPackage' in D.C.Install. +configurePackage :: Verbosity + -> Platform -> CompilerInfo + -> SetupScriptOptions + -> ConfigFlags + -> ReadyPackage + -> [String] + -> IO () +configurePackage verbosity platform comp scriptOptions configFlags + (ReadyPackage (ConfiguredPackage ipid spkg flags stanzas deps)) + extraArgs = + + setupWrapper verbosity + scriptOptions (Just pkg) configureCommand configureFlags (const extraArgs) + + where + gpkg = packageDescription spkg + configureFlags = filterConfigureFlags configFlags { + configIPID = if isJust (flagToMaybe (configIPID configFlags)) + -- Make sure cabal configure --ipid works. + then configIPID configFlags + else toFlag (display ipid), + configConfigurationsFlags = flags, + -- We generate the legacy constraints as well as the new style precise + -- deps. In the end only one set gets passed to Setup.hs configure, + -- depending on the Cabal version we are talking to. + configConstraints = [ thisPackageVersion srcid + | ConfiguredId srcid (Just PkgDesc.CLibName) _uid <- CD.nonSetupDeps deps ], + configDependencies = [ (packageName srcid, uid) + | ConfiguredId srcid (Just PkgDesc.CLibName) uid <- CD.nonSetupDeps deps ], + -- Use '--exact-configuration' if supported. + configExactConfiguration = toFlag True, + configVerbosity = toFlag verbosity, + -- NB: if the user explicitly specified + -- --enable-tests/--enable-benchmarks, always respect it. + -- (But if they didn't, let solver decide.) + configBenchmarks = toFlag (BenchStanzas `elem` stanzas) + `mappend` configBenchmarks configFlags, + configTests = toFlag (TestStanzas `elem` stanzas) + `mappend` configTests configFlags + } + + pkg = case finalizePD flags (enableStanzas stanzas) + (const True) + platform comp [] gpkg of + Left _ -> error "finalizePD ReadyPackage failed" + Right (desc, _) -> desc + +-- ----------------------------------------------------------------------------- +-- * Saved configure environments and flags +-- ----------------------------------------------------------------------------- + +-- | Read saved configure flags and restore the saved environment from the +-- specified files. +readConfigFlagsFrom :: FilePath -- ^ path to saved flags file + -> IO (ConfigFlags, ConfigExFlags) +readConfigFlagsFrom flags = do + readCommandFlags flags configureExCommand + +-- | The path (relative to @--build-dir@) where the arguments to @configure@ +-- should be saved. +cabalConfigFlagsFile :: FilePath -> FilePath +cabalConfigFlagsFile dist = dist "cabal-config-flags" + +-- | Read saved configure flags and restore the saved environment from the +-- usual location. +readConfigFlags :: FilePath -- ^ @--build-dir@ + -> IO (ConfigFlags, ConfigExFlags) +readConfigFlags dist = + readConfigFlagsFrom (cabalConfigFlagsFile dist) + +-- | Save the configure flags and environment to the specified files. +writeConfigFlagsTo :: FilePath -- ^ path to saved flags file + -> Verbosity -> (ConfigFlags, ConfigExFlags) + -> IO () +writeConfigFlagsTo file verb flags = do + writeCommandFlags verb file configureExCommand flags + +-- | Save the build flags to the usual location. +writeConfigFlags :: Verbosity + -> FilePath -- ^ @--build-dir@ + -> (ConfigFlags, ConfigExFlags) -> IO () +writeConfigFlags verb dist = + writeConfigFlagsTo (cabalConfigFlagsFile dist) verb diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Dependency/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Dependency/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Dependency/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Dependency/Types.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,66 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Client.Dependency.Types ( + PreSolver(..), + Solver(..), + + PackagesPreferenceDefault(..), + + ) where + +import Data.Char + ( isAlpha, toLower ) + +import qualified Distribution.Compat.ReadP as Parse + ( pfail, munch1 ) +import Distribution.Text + ( Text(..) ) + +import Text.PrettyPrint + ( text ) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary(..)) + + +-- | All the solvers that can be selected. +data PreSolver = AlwaysModular + deriving (Eq, Ord, Show, Bounded, Enum, Generic) + +-- | All the solvers that can be used. +data Solver = Modular + deriving (Eq, Ord, Show, Bounded, Enum, Generic) + +instance Binary PreSolver +instance Binary Solver + +instance Text PreSolver where + disp AlwaysModular = text "modular" + parse = do + name <- Parse.munch1 isAlpha + case map toLower name of + "modular" -> return AlwaysModular + _ -> Parse.pfail + +-- | Global policy for all packages to say if we prefer package versions that +-- are already installed locally or if we just prefer the latest available. +-- +data PackagesPreferenceDefault = + + -- | Always prefer the latest version irrespective of any existing + -- installed version. + -- + -- * This is the standard policy for upgrade. + -- + PreferAllLatest + + -- | Always prefer the installed versions over ones that would need to be + -- installed. Secondarily, prefer latest versions (eg the latest installed + -- version or if there are none then the latest source version). + | PreferAllInstalled + + -- | Prefer the latest version for packages that are explicitly requested + -- but prefers the installed version for any other packages. + -- + -- * This is the standard policy for install. + -- + | PreferLatestForSelected + deriving Show diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Dependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Dependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Dependency.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,1062 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Dependency +-- Copyright : (c) David Himmelstrup 2005, +-- Bjorn Bringert 2007 +-- Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Top level interface to dependency resolution. +----------------------------------------------------------------------------- +module Distribution.Client.Dependency ( + -- * The main package dependency resolver + chooseSolver, + resolveDependencies, + Progress(..), + foldProgress, + + -- * Alternate, simple resolver that does not do dependencies recursively + resolveWithoutDependencies, + + -- * Constructing resolver policies + PackageProperty(..), + PackageConstraint(..), + scopeToplevel, + PackagesPreferenceDefault(..), + PackagePreference(..), + + -- ** Standard policy + basicInstallPolicy, + standardInstallPolicy, + PackageSpecifier(..), + + -- ** Sandbox policy + applySandboxInstallPolicy, + + -- ** Extra policy options + upgradeDependencies, + reinstallTargets, + + -- ** Policy utils + addConstraints, + addPreferences, + setPreferenceDefault, + setReorderGoals, + setCountConflicts, + setIndependentGoals, + setAvoidReinstalls, + setShadowPkgs, + setStrongFlags, + setAllowBootLibInstalls, + setMaxBackjumps, + setEnableBackjumping, + setSolveExecutables, + setGoalOrder, + setSolverVerbosity, + removeLowerBounds, + removeUpperBounds, + addDefaultSetupDependencies, + addSetupCabalMinVersionConstraint, + addSetupCabalMaxVersionConstraint, + ) where + +import Distribution.Solver.Modular + ( modularResolver, SolverConfig(..), PruneAfterFirstSuccess(..) ) +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import Distribution.Client.SolverInstallPlan (SolverInstallPlan) +import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan +import Distribution.Client.Types + ( SourcePackageDb(SourcePackageDb) + , PackageSpecifier(..), pkgSpecifierTarget, pkgSpecifierConstraints + , UnresolvedPkgLoc, UnresolvedSourcePackage + , AllowNewer(..), AllowOlder(..), RelaxDeps(..), RelaxedDep(..) + , RelaxDepScope(..), RelaxDepMod(..), RelaxDepSubject(..), isRelaxDeps + ) +import Distribution.Client.Dependency.Types + ( PreSolver(..), Solver(..) + , PackagesPreferenceDefault(..) ) +import Distribution.Client.Sandbox.Types + ( SandboxPackageInfo(..) ) +import Distribution.Package + ( PackageName, mkPackageName, PackageIdentifier(PackageIdentifier), PackageId + , Package(..), packageName, packageVersion ) +import Distribution.Types.Dependency +import qualified Distribution.PackageDescription as PD +import qualified Distribution.PackageDescription.Configuration as PD +import Distribution.PackageDescription.Configuration + ( finalizePD ) +import Distribution.Client.PackageUtils + ( externalBuildDepends ) +import Distribution.Compiler + ( CompilerInfo(..) ) +import Distribution.System + ( Platform ) +import Distribution.Client.Utils + ( duplicatesBy, mergeBy, MergeResult(..) ) +import Distribution.Simple.Utils + ( comparing ) +import Distribution.Simple.Setup + ( asBool ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( normal, Verbosity ) +import Distribution.Version +import qualified Distribution.Compat.Graph as Graph + +import Distribution.Solver.Types.ComponentDeps (ComponentDeps) +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.DependencyResolver +import Distribution.Solver.Types.InstalledPreference +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackageConstraint +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.PackagePreferences +import qualified Distribution.Solver.Types.PackageIndex as PackageIndex +import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb) +import Distribution.Solver.Types.Progress +import Distribution.Solver.Types.ResolverPackage +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.SolverId +import Distribution.Solver.Types.SolverPackage +import Distribution.Solver.Types.SourcePackage +import Distribution.Solver.Types.Variable + +import Data.List + ( foldl', sort, sortBy, nubBy, maximumBy, intercalate, nub ) +import Data.Function (on) +import Data.Maybe (fromMaybe, mapMaybe) +import qualified Data.Map as Map +import qualified Data.Set as Set +import Data.Set (Set) +import Control.Exception + ( assert ) + + +-- ------------------------------------------------------------ +-- * High level planner policy +-- ------------------------------------------------------------ + +-- | The set of parameters to the dependency resolver. These parameters are +-- relatively low level but many kinds of high level policies can be +-- implemented in terms of adjustments to the parameters. +-- +data DepResolverParams = DepResolverParams { + depResolverTargets :: Set PackageName, + depResolverConstraints :: [LabeledPackageConstraint], + depResolverPreferences :: [PackagePreference], + depResolverPreferenceDefault :: PackagesPreferenceDefault, + depResolverInstalledPkgIndex :: InstalledPackageIndex, + depResolverSourcePkgIndex :: PackageIndex.PackageIndex UnresolvedSourcePackage, + depResolverReorderGoals :: ReorderGoals, + depResolverCountConflicts :: CountConflicts, + depResolverIndependentGoals :: IndependentGoals, + depResolverAvoidReinstalls :: AvoidReinstalls, + depResolverShadowPkgs :: ShadowPkgs, + depResolverStrongFlags :: StrongFlags, + + -- | Whether to allow base and its dependencies to be installed. + depResolverAllowBootLibInstalls :: AllowBootLibInstalls, + + depResolverMaxBackjumps :: Maybe Int, + depResolverEnableBackjumping :: EnableBackjumping, + -- | Whether or not to solve for dependencies on executables. + -- This should be true, except in the legacy code path where + -- we can't tell if an executable has been installed or not, + -- so we shouldn't solve for them. See #3875. + depResolverSolveExecutables :: SolveExecutables, + + -- | Function to override the solver's goal-ordering heuristics. + depResolverGoalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering), + depResolverVerbosity :: Verbosity + } + +showDepResolverParams :: DepResolverParams -> String +showDepResolverParams p = + "targets: " ++ intercalate ", " (map display $ Set.toList (depResolverTargets p)) + ++ "\nconstraints: " + ++ concatMap (("\n " ++) . showLabeledConstraint) + (depResolverConstraints p) + ++ "\npreferences: " + ++ concatMap (("\n " ++) . showPackagePreference) + (depResolverPreferences p) + ++ "\nstrategy: " ++ show (depResolverPreferenceDefault p) + ++ "\nreorder goals: " ++ show (asBool (depResolverReorderGoals p)) + ++ "\ncount conflicts: " ++ show (asBool (depResolverCountConflicts p)) + ++ "\nindependent goals: " ++ show (asBool (depResolverIndependentGoals p)) + ++ "\navoid reinstalls: " ++ show (asBool (depResolverAvoidReinstalls p)) + ++ "\nshadow packages: " ++ show (asBool (depResolverShadowPkgs p)) + ++ "\nstrong flags: " ++ show (asBool (depResolverStrongFlags p)) + ++ "\nallow boot library installs: " ++ show (asBool (depResolverAllowBootLibInstalls p)) + ++ "\nmax backjumps: " ++ maybe "infinite" show + (depResolverMaxBackjumps p) + where + showLabeledConstraint :: LabeledPackageConstraint -> String + showLabeledConstraint (LabeledPackageConstraint pc src) = + showPackageConstraint pc ++ " (" ++ showConstraintSource src ++ ")" + +-- | A package selection preference for a particular package. +-- +-- Preferences are soft constraints that the dependency resolver should try to +-- respect where possible. It is not specified if preferences on some packages +-- are more important than others. +-- +data PackagePreference = + + -- | A suggested constraint on the version number. + PackageVersionPreference PackageName VersionRange + + -- | If we prefer versions of packages that are already installed. + | PackageInstalledPreference PackageName InstalledPreference + + -- | If we would prefer to enable these optional stanzas + -- (i.e. test suites and/or benchmarks) + | PackageStanzasPreference PackageName [OptionalStanza] + + +-- | Provide a textual representation of a package preference +-- for debugging purposes. +-- +showPackagePreference :: PackagePreference -> String +showPackagePreference (PackageVersionPreference pn vr) = + display pn ++ " " ++ display (simplifyVersionRange vr) +showPackagePreference (PackageInstalledPreference pn ip) = + display pn ++ " " ++ show ip +showPackagePreference (PackageStanzasPreference pn st) = + display pn ++ " " ++ show st + +basicDepResolverParams :: InstalledPackageIndex + -> PackageIndex.PackageIndex UnresolvedSourcePackage + -> DepResolverParams +basicDepResolverParams installedPkgIndex sourcePkgIndex = + DepResolverParams { + depResolverTargets = Set.empty, + depResolverConstraints = [], + depResolverPreferences = [], + depResolverPreferenceDefault = PreferLatestForSelected, + depResolverInstalledPkgIndex = installedPkgIndex, + depResolverSourcePkgIndex = sourcePkgIndex, + depResolverReorderGoals = ReorderGoals False, + depResolverCountConflicts = CountConflicts True, + depResolverIndependentGoals = IndependentGoals False, + depResolverAvoidReinstalls = AvoidReinstalls False, + depResolverShadowPkgs = ShadowPkgs False, + depResolverStrongFlags = StrongFlags False, + depResolverAllowBootLibInstalls = AllowBootLibInstalls False, + depResolverMaxBackjumps = Nothing, + depResolverEnableBackjumping = EnableBackjumping True, + depResolverSolveExecutables = SolveExecutables True, + depResolverGoalOrder = Nothing, + depResolverVerbosity = normal + } + +addTargets :: [PackageName] + -> DepResolverParams -> DepResolverParams +addTargets extraTargets params = + params { + depResolverTargets = Set.fromList extraTargets `Set.union` depResolverTargets params + } + +addConstraints :: [LabeledPackageConstraint] + -> DepResolverParams -> DepResolverParams +addConstraints extraConstraints params = + params { + depResolverConstraints = extraConstraints + ++ depResolverConstraints params + } + +addPreferences :: [PackagePreference] + -> DepResolverParams -> DepResolverParams +addPreferences extraPreferences params = + params { + depResolverPreferences = extraPreferences + ++ depResolverPreferences params + } + +setPreferenceDefault :: PackagesPreferenceDefault + -> DepResolverParams -> DepResolverParams +setPreferenceDefault preferenceDefault params = + params { + depResolverPreferenceDefault = preferenceDefault + } + +setReorderGoals :: ReorderGoals -> DepResolverParams -> DepResolverParams +setReorderGoals reorder params = + params { + depResolverReorderGoals = reorder + } + +setCountConflicts :: CountConflicts -> DepResolverParams -> DepResolverParams +setCountConflicts count params = + params { + depResolverCountConflicts = count + } + +setIndependentGoals :: IndependentGoals -> DepResolverParams -> DepResolverParams +setIndependentGoals indep params = + params { + depResolverIndependentGoals = indep + } + +setAvoidReinstalls :: AvoidReinstalls -> DepResolverParams -> DepResolverParams +setAvoidReinstalls avoid params = + params { + depResolverAvoidReinstalls = avoid + } + +setShadowPkgs :: ShadowPkgs -> DepResolverParams -> DepResolverParams +setShadowPkgs shadow params = + params { + depResolverShadowPkgs = shadow + } + +setStrongFlags :: StrongFlags -> DepResolverParams -> DepResolverParams +setStrongFlags sf params = + params { + depResolverStrongFlags = sf + } + +setAllowBootLibInstalls :: AllowBootLibInstalls -> DepResolverParams -> DepResolverParams +setAllowBootLibInstalls i params = + params { + depResolverAllowBootLibInstalls = i + } + +setMaxBackjumps :: Maybe Int -> DepResolverParams -> DepResolverParams +setMaxBackjumps n params = + params { + depResolverMaxBackjumps = n + } + +setEnableBackjumping :: EnableBackjumping -> DepResolverParams -> DepResolverParams +setEnableBackjumping b params = + params { + depResolverEnableBackjumping = b + } + +setSolveExecutables :: SolveExecutables -> DepResolverParams -> DepResolverParams +setSolveExecutables b params = + params { + depResolverSolveExecutables = b + } + +setGoalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering) + -> DepResolverParams + -> DepResolverParams +setGoalOrder order params = + params { + depResolverGoalOrder = order + } + +setSolverVerbosity :: Verbosity -> DepResolverParams -> DepResolverParams +setSolverVerbosity verbosity params = + params { + depResolverVerbosity = verbosity + } + +-- | Some packages are specific to a given compiler version and should never be +-- upgraded. +dontUpgradeNonUpgradeablePackages :: DepResolverParams -> DepResolverParams +dontUpgradeNonUpgradeablePackages params = + addConstraints extraConstraints params + where + extraConstraints = + [ LabeledPackageConstraint + (PackageConstraint (ScopeAnyQualifier pkgname) PackagePropertyInstalled) + ConstraintSourceNonUpgradeablePackage + | Set.notMember (mkPackageName "base") (depResolverTargets params) + -- If you change this enumeration, make sure to update the list in + -- "Distribution.Solver.Modular.Solver" as well + , pkgname <- [ mkPackageName "base" + , mkPackageName "ghc-prim" + , mkPackageName "integer-gmp" + , mkPackageName "integer-simple" + , mkPackageName "template-haskell" + ] + , isInstalled pkgname ] + + isInstalled = not . null + . InstalledPackageIndex.lookupPackageName + (depResolverInstalledPkgIndex params) + +addSourcePackages :: [UnresolvedSourcePackage] + -> DepResolverParams -> DepResolverParams +addSourcePackages pkgs params = + params { + depResolverSourcePkgIndex = + foldl (flip PackageIndex.insert) + (depResolverSourcePkgIndex params) pkgs + } + +hideInstalledPackagesSpecificBySourcePackageId :: [PackageId] + -> DepResolverParams + -> DepResolverParams +hideInstalledPackagesSpecificBySourcePackageId pkgids params = + --TODO: this should work using exclude constraints instead + params { + depResolverInstalledPkgIndex = + foldl' (flip InstalledPackageIndex.deleteSourcePackageId) + (depResolverInstalledPkgIndex params) pkgids + } + +hideInstalledPackagesAllVersions :: [PackageName] + -> DepResolverParams -> DepResolverParams +hideInstalledPackagesAllVersions pkgnames params = + --TODO: this should work using exclude constraints instead + params { + depResolverInstalledPkgIndex = + foldl' (flip InstalledPackageIndex.deletePackageName) + (depResolverInstalledPkgIndex params) pkgnames + } + + +-- | Remove upper bounds in dependencies using the policy specified by the +-- 'AllowNewer' argument (all/some/none). +-- +-- Note: It's important to apply 'removeUpperBounds' after +-- 'addSourcePackages'. Otherwise, the packages inserted by +-- 'addSourcePackages' won't have upper bounds in dependencies relaxed. +-- +removeUpperBounds :: AllowNewer -> DepResolverParams -> DepResolverParams +removeUpperBounds (AllowNewer relDeps) = removeBounds RelaxUpper relDeps + +-- | Dual of 'removeUpperBounds' +removeLowerBounds :: AllowOlder -> DepResolverParams -> DepResolverParams +removeLowerBounds (AllowOlder relDeps) = removeBounds RelaxLower relDeps + +data RelaxKind = RelaxLower | RelaxUpper + +-- | Common internal implementation of 'removeLowerBounds'/'removeUpperBounds' +removeBounds :: RelaxKind -> RelaxDeps -> DepResolverParams -> DepResolverParams +removeBounds _ rd params | not (isRelaxDeps rd) = params -- no-op optimisation +removeBounds relKind relDeps params = + params { + depResolverSourcePkgIndex = sourcePkgIndex' + } + where + sourcePkgIndex' = fmap relaxDeps $ depResolverSourcePkgIndex params + + relaxDeps :: UnresolvedSourcePackage -> UnresolvedSourcePackage + relaxDeps srcPkg = srcPkg { + packageDescription = relaxPackageDeps relKind relDeps + (packageDescription srcPkg) + } + +-- | Relax the dependencies of this package if needed. +-- +-- Helper function used by 'removeBounds' +relaxPackageDeps :: RelaxKind + -> RelaxDeps + -> PD.GenericPackageDescription -> PD.GenericPackageDescription +relaxPackageDeps _ rd gpd | not (isRelaxDeps rd) = gpd -- subsumed by no-op case in 'removeBounds' +relaxPackageDeps relKind RelaxDepsAll gpd = PD.transformAllBuildDepends relaxAll gpd + where + relaxAll :: Dependency -> Dependency + relaxAll (Dependency pkgName verRange) = + Dependency pkgName (removeBound relKind RelaxDepModNone verRange) + +relaxPackageDeps relKind (RelaxDepsSome depsToRelax0) gpd = + PD.transformAllBuildDepends relaxSome gpd + where + thisPkgName = packageName gpd + thisPkgId = packageId gpd + depsToRelax = Map.fromList $ mapMaybe f depsToRelax0 + + f :: RelaxedDep -> Maybe (RelaxDepSubject,RelaxDepMod) + f (RelaxedDep scope rdm p) = case scope of + RelaxDepScopeAll -> Just (p,rdm) + RelaxDepScopePackage p0 + | p0 == thisPkgName -> Just (p,rdm) + | otherwise -> Nothing + RelaxDepScopePackageId p0 + | p0 == thisPkgId -> Just (p,rdm) + | otherwise -> Nothing + + relaxSome :: Dependency -> Dependency + relaxSome d@(Dependency depName verRange) + | Just relMod <- Map.lookup RelaxDepSubjectAll depsToRelax = + -- a '*'-subject acts absorbing, for consistency with + -- the 'Semigroup RelaxDeps' instance + Dependency depName (removeBound relKind relMod verRange) + | Just relMod <- Map.lookup (RelaxDepSubjectPkg depName) depsToRelax = + Dependency depName (removeBound relKind relMod verRange) + | otherwise = d -- no-op + +-- | Internal helper for 'relaxPackageDeps' +removeBound :: RelaxKind -> RelaxDepMod -> VersionRange -> VersionRange +removeBound RelaxLower RelaxDepModNone = removeLowerBound +removeBound RelaxUpper RelaxDepModNone = removeUpperBound +removeBound relKind RelaxDepModCaret = hyloVersionRange embed projectVersionRange + where + embed (MajorBoundVersionF v) = caretTransformation v (majorUpperBound v) + embed vr = embedVersionRange vr + + -- This function is the interesting part as it defines the meaning + -- of 'RelaxDepModCaret', i.e. to transform only @^>=@ constraints; + caretTransformation l u = case relKind of + RelaxUpper -> orLaterVersion l -- rewrite @^>= x.y.z@ into @>= x.y.z@ + RelaxLower -> earlierVersion u -- rewrite @^>= x.y.z@ into @< x.(y+1)@ + +-- | Supply defaults for packages without explicit Setup dependencies +-- +-- Note: It's important to apply 'addDefaultSetupDepends' after +-- 'addSourcePackages'. Otherwise, the packages inserted by +-- 'addSourcePackages' won't have upper bounds in dependencies relaxed. +-- +addDefaultSetupDependencies :: (UnresolvedSourcePackage -> Maybe [Dependency]) + -> DepResolverParams -> DepResolverParams +addDefaultSetupDependencies defaultSetupDeps params = + params { + depResolverSourcePkgIndex = + fmap applyDefaultSetupDeps (depResolverSourcePkgIndex params) + } + where + applyDefaultSetupDeps :: UnresolvedSourcePackage -> UnresolvedSourcePackage + applyDefaultSetupDeps srcpkg = + srcpkg { + packageDescription = gpkgdesc { + PD.packageDescription = pkgdesc { + PD.setupBuildInfo = + case PD.setupBuildInfo pkgdesc of + Just sbi -> Just sbi + Nothing -> case defaultSetupDeps srcpkg of + Nothing -> Nothing + Just deps | isCustom -> Just PD.SetupBuildInfo { + PD.defaultSetupDepends = True, + PD.setupDepends = deps + } + | otherwise -> Nothing + } + } + } + where + isCustom = PD.buildType pkgdesc == PD.Custom + gpkgdesc = packageDescription srcpkg + pkgdesc = PD.packageDescription gpkgdesc + +-- | If a package has a custom setup then we need to add a setup-depends +-- on Cabal. +-- +addSetupCabalMinVersionConstraint :: Version + -> DepResolverParams -> DepResolverParams +addSetupCabalMinVersionConstraint minVersion = + addConstraints + [ LabeledPackageConstraint + (PackageConstraint (ScopeAnySetupQualifier cabalPkgname) + (PackagePropertyVersion $ orLaterVersion minVersion)) + ConstraintSetupCabalMinVersion + ] + where + cabalPkgname = mkPackageName "Cabal" + +-- | Variant of 'addSetupCabalMinVersionConstraint' which sets an +-- upper bound on @setup.Cabal@ labeled with 'ConstraintSetupCabalMaxVersion'. +-- +addSetupCabalMaxVersionConstraint :: Version + -> DepResolverParams -> DepResolverParams +addSetupCabalMaxVersionConstraint maxVersion = + addConstraints + [ LabeledPackageConstraint + (PackageConstraint (ScopeAnySetupQualifier cabalPkgname) + (PackagePropertyVersion $ earlierVersion maxVersion)) + ConstraintSetupCabalMaxVersion + ] + where + cabalPkgname = mkPackageName "Cabal" + + +upgradeDependencies :: DepResolverParams -> DepResolverParams +upgradeDependencies = setPreferenceDefault PreferAllLatest + + +reinstallTargets :: DepResolverParams -> DepResolverParams +reinstallTargets params = + hideInstalledPackagesAllVersions (Set.toList $ depResolverTargets params) params + + +-- | A basic solver policy on which all others are built. +-- +basicInstallPolicy :: InstalledPackageIndex + -> SourcePackageDb + -> [PackageSpecifier UnresolvedSourcePackage] + -> DepResolverParams +basicInstallPolicy + installedPkgIndex (SourcePackageDb sourcePkgIndex sourcePkgPrefs) + pkgSpecifiers + + = addPreferences + [ PackageVersionPreference name ver + | (name, ver) <- Map.toList sourcePkgPrefs ] + + . addConstraints + (concatMap pkgSpecifierConstraints pkgSpecifiers) + + . addTargets + (map pkgSpecifierTarget pkgSpecifiers) + + . hideInstalledPackagesSpecificBySourcePackageId + [ packageId pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] + + . addSourcePackages + [ pkg | SpecificSourcePackage pkg <- pkgSpecifiers ] + + $ basicDepResolverParams + installedPkgIndex sourcePkgIndex + + +-- | The policy used by all the standard commands, install, fetch, freeze etc +-- (but not the new-build and related commands). +-- +-- It extends the 'basicInstallPolicy' with a policy on setup deps. +-- +standardInstallPolicy :: InstalledPackageIndex + -> SourcePackageDb + -> [PackageSpecifier UnresolvedSourcePackage] + -> DepResolverParams +standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers + + = addDefaultSetupDependencies mkDefaultSetupDeps + + $ basicInstallPolicy + installedPkgIndex sourcePkgDb pkgSpecifiers + + where + -- Force Cabal >= 1.24 dep when the package is affected by #3199. + mkDefaultSetupDeps :: UnresolvedSourcePackage -> Maybe [Dependency] + mkDefaultSetupDeps srcpkg | affected = + Just [Dependency (mkPackageName "Cabal") + (orLaterVersion $ mkVersion [1,24])] + | otherwise = Nothing + where + gpkgdesc = packageDescription srcpkg + pkgdesc = PD.packageDescription gpkgdesc + bt = PD.buildType pkgdesc + affected = bt == PD.Custom && hasBuildableFalse gpkgdesc + + -- Does this package contain any components with non-empty 'build-depends' + -- and a 'buildable' field that could potentially be set to 'False'? False + -- positives are possible. + hasBuildableFalse :: PD.GenericPackageDescription -> Bool + hasBuildableFalse gpkg = + not (all alwaysTrue (zipWith PD.cOr buildableConditions noDepConditions)) + where + buildableConditions = PD.extractConditions PD.buildable gpkg + noDepConditions = PD.extractConditions + (null . PD.targetBuildDepends) gpkg + alwaysTrue (PD.Lit True) = True + alwaysTrue _ = False + + +applySandboxInstallPolicy :: SandboxPackageInfo + -> DepResolverParams + -> DepResolverParams +applySandboxInstallPolicy + (SandboxPackageInfo modifiedDeps otherDeps allSandboxPkgs _allDeps) + params + + = addPreferences [ PackageInstalledPreference n PreferInstalled + | n <- installedNotModified ] + + . addTargets installedNotModified + + . addPreferences + [ PackageVersionPreference (packageName pkg) + (thisVersion (packageVersion pkg)) | pkg <- otherDeps ] + + . addConstraints + [ let pc = PackageConstraint + (scopeToplevel $ packageName pkg) + (PackagePropertyVersion $ thisVersion (packageVersion pkg)) + in LabeledPackageConstraint pc ConstraintSourceModifiedAddSourceDep + | pkg <- modifiedDeps ] + + . addTargets [ packageName pkg | pkg <- modifiedDeps ] + + . hideInstalledPackagesSpecificBySourcePackageId + [ packageId pkg | pkg <- modifiedDeps ] + + -- We don't need to add source packages for add-source deps to the + -- 'installedPkgIndex' since 'getSourcePackages' did that for us. + + $ params + + where + installedPkgIds = + map fst . InstalledPackageIndex.allPackagesBySourcePackageId + $ allSandboxPkgs + modifiedPkgIds = map packageId modifiedDeps + installedNotModified = [ packageName pkg | pkg <- installedPkgIds, + pkg `notElem` modifiedPkgIds ] + +-- ------------------------------------------------------------ +-- * Interface to the standard resolver +-- ------------------------------------------------------------ + +chooseSolver :: Verbosity -> PreSolver -> CompilerInfo -> IO Solver +chooseSolver _verbosity preSolver _cinfo = + case preSolver of + AlwaysModular -> do + return Modular + +runSolver :: Solver -> SolverConfig -> DependencyResolver UnresolvedPkgLoc +runSolver Modular = modularResolver + +-- | Run the dependency solver. +-- +-- Since this is potentially an expensive operation, the result is wrapped in a +-- a 'Progress' structure that can be unfolded to provide progress information, +-- logging messages and the final result or an error. +-- +resolveDependencies :: Platform + -> CompilerInfo + -> PkgConfigDb + -> Solver + -> DepResolverParams + -> Progress String String SolverInstallPlan + + --TODO: is this needed here? see dontUpgradeNonUpgradeablePackages +resolveDependencies platform comp _pkgConfigDB _solver params + | Set.null (depResolverTargets params) + = return (validateSolverResult platform comp indGoals []) + where + indGoals = depResolverIndependentGoals params + +resolveDependencies platform comp pkgConfigDB solver params = + + Step (showDepResolverParams finalparams) + $ fmap (validateSolverResult platform comp indGoals) + $ runSolver solver (SolverConfig reordGoals cntConflicts + indGoals noReinstalls + shadowing strFlags allowBootLibs maxBkjumps enableBj + solveExes order verbosity (PruneAfterFirstSuccess False)) + platform comp installedPkgIndex sourcePkgIndex + pkgConfigDB preferences constraints targets + where + + finalparams @ (DepResolverParams + targets constraints + prefs defpref + installedPkgIndex + sourcePkgIndex + reordGoals + cntConflicts + indGoals + noReinstalls + shadowing + strFlags + allowBootLibs + maxBkjumps + enableBj + solveExes + order + verbosity) = + if asBool (depResolverAllowBootLibInstalls params) + then params + else dontUpgradeNonUpgradeablePackages params + + preferences = interpretPackagesPreference targets defpref prefs + + +-- | Give an interpretation to the global 'PackagesPreference' as +-- specific per-package 'PackageVersionPreference'. +-- +interpretPackagesPreference :: Set PackageName + -> PackagesPreferenceDefault + -> [PackagePreference] + -> (PackageName -> PackagePreferences) +interpretPackagesPreference selected defaultPref prefs = + \pkgname -> PackagePreferences (versionPref pkgname) + (installPref pkgname) + (stanzasPref pkgname) + where + versionPref pkgname = + fromMaybe [anyVersion] (Map.lookup pkgname versionPrefs) + versionPrefs = Map.fromListWith (++) + [(pkgname, [pref]) + | PackageVersionPreference pkgname pref <- prefs] + + installPref pkgname = + fromMaybe (installPrefDefault pkgname) (Map.lookup pkgname installPrefs) + installPrefs = Map.fromList + [ (pkgname, pref) + | PackageInstalledPreference pkgname pref <- prefs ] + installPrefDefault = case defaultPref of + PreferAllLatest -> const PreferLatest + PreferAllInstalled -> const PreferInstalled + PreferLatestForSelected -> \pkgname -> + -- When you say cabal install foo, what you really mean is, prefer the + -- latest version of foo, but the installed version of everything else + if pkgname `Set.member` selected then PreferLatest + else PreferInstalled + + stanzasPref pkgname = + fromMaybe [] (Map.lookup pkgname stanzasPrefs) + stanzasPrefs = Map.fromListWith (\a b -> nub (a ++ b)) + [ (pkgname, pref) + | PackageStanzasPreference pkgname pref <- prefs ] + + +-- ------------------------------------------------------------ +-- * Checking the result of the solver +-- ------------------------------------------------------------ + +-- | Make an install plan from the output of the dep resolver. +-- It checks that the plan is valid, or it's an error in the dep resolver. +-- +validateSolverResult :: Platform + -> CompilerInfo + -> IndependentGoals + -> [ResolverPackage UnresolvedPkgLoc] + -> SolverInstallPlan +validateSolverResult platform comp indepGoals pkgs = + case planPackagesProblems platform comp pkgs of + [] -> case SolverInstallPlan.new indepGoals graph of + Right plan -> plan + Left problems -> error (formatPlanProblems problems) + problems -> error (formatPkgProblems problems) + + where + graph = Graph.fromDistinctList pkgs + + formatPkgProblems = formatProblemMessage . map showPlanPackageProblem + formatPlanProblems = formatProblemMessage . map SolverInstallPlan.showPlanProblem + + formatProblemMessage problems = + unlines $ + "internal error: could not construct a valid install plan." + : "The proposed (invalid) plan contained the following problems:" + : problems + ++ "Proposed plan:" + : [SolverInstallPlan.showPlanIndex pkgs] + + +data PlanPackageProblem = + InvalidConfiguredPackage (SolverPackage UnresolvedPkgLoc) + [PackageProblem] + | DuplicatePackageSolverId SolverId [ResolverPackage UnresolvedPkgLoc] + +showPlanPackageProblem :: PlanPackageProblem -> String +showPlanPackageProblem (InvalidConfiguredPackage pkg packageProblems) = + "Package " ++ display (packageId pkg) + ++ " has an invalid configuration, in particular:\n" + ++ unlines [ " " ++ showPackageProblem problem + | problem <- packageProblems ] +showPlanPackageProblem (DuplicatePackageSolverId pid dups) = + "Package " ++ display (packageId pid) ++ " has " + ++ show (length dups) ++ " duplicate instances." + +planPackagesProblems :: Platform -> CompilerInfo + -> [ResolverPackage UnresolvedPkgLoc] + -> [PlanPackageProblem] +planPackagesProblems platform cinfo pkgs = + [ InvalidConfiguredPackage pkg packageProblems + | Configured pkg <- pkgs + , let packageProblems = configuredPackageProblems platform cinfo pkg + , not (null packageProblems) ] + ++ [ DuplicatePackageSolverId (Graph.nodeKey (head dups)) dups + | dups <- duplicatesBy (comparing Graph.nodeKey) pkgs ] + +data PackageProblem = DuplicateFlag PD.FlagName + | MissingFlag PD.FlagName + | ExtraFlag PD.FlagName + | DuplicateDeps [PackageId] + | MissingDep Dependency + | ExtraDep PackageId + | InvalidDep Dependency PackageId + +showPackageProblem :: PackageProblem -> String +showPackageProblem (DuplicateFlag flag) = + "duplicate flag in the flag assignment: " ++ PD.unFlagName flag + +showPackageProblem (MissingFlag flag) = + "missing an assignment for the flag: " ++ PD.unFlagName flag + +showPackageProblem (ExtraFlag flag) = + "extra flag given that is not used by the package: " ++ PD.unFlagName flag + +showPackageProblem (DuplicateDeps pkgids) = + "duplicate packages specified as selected dependencies: " + ++ intercalate ", " (map display pkgids) + +showPackageProblem (MissingDep dep) = + "the package has a dependency " ++ display dep + ++ " but no package has been selected to satisfy it." + +showPackageProblem (ExtraDep pkgid) = + "the package configuration specifies " ++ display pkgid + ++ " but (with the given flag assignment) the package does not actually" + ++ " depend on any version of that package." + +showPackageProblem (InvalidDep dep pkgid) = + "the package depends on " ++ display dep + ++ " but the configuration specifies " ++ display pkgid + ++ " which does not satisfy the dependency." + +-- | A 'ConfiguredPackage' is valid if the flag assignment is total and if +-- in the configuration given by the flag assignment, all the package +-- dependencies are satisfied by the specified packages. +-- +configuredPackageProblems :: Platform -> CompilerInfo + -> SolverPackage UnresolvedPkgLoc -> [PackageProblem] +configuredPackageProblems platform cinfo + (SolverPackage pkg specifiedFlags stanzas specifiedDeps' _specifiedExeDeps') = + [ DuplicateFlag flag + | flag <- PD.findDuplicateFlagAssignments specifiedFlags ] + ++ [ MissingFlag flag | OnlyInLeft flag <- mergedFlags ] + ++ [ ExtraFlag flag | OnlyInRight flag <- mergedFlags ] + ++ [ DuplicateDeps pkgs + | pkgs <- CD.nonSetupDeps (fmap (duplicatesBy (comparing packageName)) + specifiedDeps) ] + ++ [ MissingDep dep | OnlyInLeft dep <- mergedDeps ] + ++ [ ExtraDep pkgid | OnlyInRight pkgid <- mergedDeps ] + ++ [ InvalidDep dep pkgid | InBoth dep pkgid <- mergedDeps + , not (packageSatisfiesDependency pkgid dep) ] + -- TODO: sanity tests on executable deps + where + specifiedDeps :: ComponentDeps [PackageId] + specifiedDeps = fmap (map solverSrcId) specifiedDeps' + + mergedFlags = mergeBy compare + (sort $ map PD.flagName (PD.genPackageFlags (packageDescription pkg))) + (sort $ map fst (PD.unFlagAssignment specifiedFlags)) -- TODO + + packageSatisfiesDependency + (PackageIdentifier name version) + (Dependency name' versionRange) = assert (name == name') $ + version `withinRange` versionRange + + dependencyName (Dependency name _) = name + + mergedDeps :: [MergeResult Dependency PackageId] + mergedDeps = mergeDeps requiredDeps (CD.flatDeps specifiedDeps) + + mergeDeps :: [Dependency] -> [PackageId] + -> [MergeResult Dependency PackageId] + mergeDeps required specified = + let sortNubOn f = nubBy ((==) `on` f) . sortBy (compare `on` f) in + mergeBy + (\dep pkgid -> dependencyName dep `compare` packageName pkgid) + (sortNubOn dependencyName required) + (sortNubOn packageName specified) + + compSpec = enableStanzas stanzas + -- TODO: It would be nicer to use ComponentDeps here so we can be more + -- precise in our checks. In fact, this no longer relies on buildDepends and + -- thus should be easier to fix. As long as we _do_ use a flat list here, we + -- have to allow for duplicates when we fold specifiedDeps; once we have + -- proper ComponentDeps here we should get rid of the `nubOn` in + -- `mergeDeps`. + requiredDeps :: [Dependency] + requiredDeps = + --TODO: use something lower level than finalizePD + case finalizePD specifiedFlags + compSpec + (const True) + platform cinfo + [] + (packageDescription pkg) of + Right (resolvedPkg, _) -> + externalBuildDepends resolvedPkg compSpec + ++ maybe [] PD.setupDepends (PD.setupBuildInfo resolvedPkg) + Left _ -> + error "configuredPackageInvalidDeps internal error" + + +-- ------------------------------------------------------------ +-- * Simple resolver that ignores dependencies +-- ------------------------------------------------------------ + +-- | A simplistic method of resolving a list of target package names to +-- available packages. +-- +-- Specifically, it does not consider package dependencies at all. Unlike +-- 'resolveDependencies', no attempt is made to ensure that the selected +-- packages have dependencies that are satisfiable or consistent with +-- each other. +-- +-- It is suitable for tasks such as selecting packages to download for user +-- inspection. It is not suitable for selecting packages to install. +-- +-- Note: if no installed package index is available, it is OK to pass 'mempty'. +-- It simply means preferences for installed packages will be ignored. +-- +resolveWithoutDependencies :: DepResolverParams + -> Either [ResolveNoDepsError] [UnresolvedSourcePackage] +resolveWithoutDependencies (DepResolverParams targets constraints + prefs defpref installedPkgIndex sourcePkgIndex + _reorderGoals _countConflicts _indGoals _avoidReinstalls + _shadowing _strFlags _maxBjumps _enableBj + _solveExes _allowBootLibInstalls _order _verbosity) = + collectEithers $ map selectPackage (Set.toList targets) + where + selectPackage :: PackageName -> Either ResolveNoDepsError UnresolvedSourcePackage + selectPackage pkgname + | null choices = Left $! ResolveUnsatisfiable pkgname requiredVersions + | otherwise = Right $! maximumBy bestByPrefs choices + + where + -- Constraints + requiredVersions = packageConstraints pkgname + pkgDependency = Dependency pkgname requiredVersions + choices = PackageIndex.lookupDependency sourcePkgIndex + pkgDependency + + -- Preferences + PackagePreferences preferredVersions preferInstalled _ + = packagePreferences pkgname + + bestByPrefs = comparing $ \pkg -> + (installPref pkg, versionPref pkg, packageVersion pkg) + installPref = case preferInstalled of + PreferLatest -> const False + PreferInstalled -> not . null + . InstalledPackageIndex.lookupSourcePackageId + installedPkgIndex + . packageId + versionPref pkg = length . filter (packageVersion pkg `withinRange`) $ + preferredVersions + + packageConstraints :: PackageName -> VersionRange + packageConstraints pkgname = + Map.findWithDefault anyVersion pkgname packageVersionConstraintMap + packageVersionConstraintMap = + let pcs = map unlabelPackageConstraint constraints + in Map.fromList [ (scopeToPackageName scope, range) + | PackageConstraint + scope (PackagePropertyVersion range) <- pcs ] + + packagePreferences :: PackageName -> PackagePreferences + packagePreferences = interpretPackagesPreference targets defpref prefs + + +collectEithers :: [Either a b] -> Either [a] [b] +collectEithers = collect . partitionEithers + where + collect ([], xs) = Right xs + collect (errs,_) = Left errs + partitionEithers :: [Either a b] -> ([a],[b]) + partitionEithers = foldr (either left right) ([],[]) + where + left a (l, r) = (a:l, r) + right a (l, r) = (l, a:r) + +-- | Errors for 'resolveWithoutDependencies'. +-- +data ResolveNoDepsError = + + -- | A package name which cannot be resolved to a specific package. + -- Also gives the constraint on the version and whether there was + -- a constraint on the package being installed. + ResolveUnsatisfiable PackageName VersionRange + +instance Show ResolveNoDepsError where + show (ResolveUnsatisfiable name ver) = + "There is no available version of " ++ display name + ++ " that satisfies " ++ display (simplifyVersionRange ver) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/DistDirLayout.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/DistDirLayout.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/DistDirLayout.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/DistDirLayout.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,285 @@ +{-# LANGUAGE RecordWildCards #-} + +-- | +-- +-- The layout of the .\/dist\/ directory where cabal keeps all of its state +-- and build artifacts. +-- +module Distribution.Client.DistDirLayout ( + -- * 'DistDirLayout' + DistDirLayout(..), + DistDirParams(..), + defaultDistDirLayout, + ProjectRoot(..), + + -- * 'StoreDirLayout' + StoreDirLayout(..), + defaultStoreDirLayout, + + -- * 'CabalDirLayout' + CabalDirLayout(..), + mkCabalDirLayout, + defaultCabalDirLayout +) where + +import Data.Maybe (fromMaybe) +import System.FilePath + +import Distribution.Package + ( PackageId, ComponentId, UnitId ) +import Distribution.Client.Setup + ( ArchiveFormat(..) ) +import Distribution.Compiler +import Distribution.Simple.Compiler + ( PackageDB(..), PackageDBStack, OptimisationLevel(..) ) +import Distribution.Text +import Distribution.Pretty + ( prettyShow ) +import Distribution.Types.ComponentName +import Distribution.System + + +-- | Information which can be used to construct the path to +-- the build directory of a build. This is LESS fine-grained +-- than what goes into the hashed 'InstalledPackageId', +-- and for good reason: we don't want this path to change if +-- the user, say, adds a dependency to their project. +data DistDirParams = DistDirParams { + distParamUnitId :: UnitId, + distParamPackageId :: PackageId, + distParamComponentId :: ComponentId, + distParamComponentName :: Maybe ComponentName, + distParamCompilerId :: CompilerId, + distParamPlatform :: Platform, + distParamOptimization :: OptimisationLevel + -- TODO (see #3343): + -- Flag assignments + -- Optimization + } + + +-- | The layout of the project state directory. Traditionally this has been +-- called the @dist@ directory. +-- +data DistDirLayout = DistDirLayout { + + -- | The root directory of the project. Many other files are relative to + -- this location. In particular, the @cabal.project@ lives here. + -- + distProjectRootDirectory :: FilePath, + + -- | The @cabal.project@ file and related like @cabal.project.freeze@. + -- The parameter is for the extension, like \"freeze\", or \"\" for the + -- main file. + -- + distProjectFile :: String -> FilePath, + + -- | The \"dist\" directory, which is the root of where cabal keeps all + -- its state including the build artifacts from each package we build. + -- + distDirectory :: FilePath, + + -- | The directory under dist where we keep the build artifacts for a + -- package we're building from a local directory. + -- + -- This uses a 'UnitId' not just a 'PackageName' because technically + -- we can have multiple instances of the same package in a solution + -- (e.g. setup deps). + -- + distBuildDirectory :: DistDirParams -> FilePath, + distBuildRootDirectory :: FilePath, + + -- | The directory under dist where we download tarballs and source + -- control repos to. + -- + distDownloadSrcDirectory :: FilePath, + + -- | The directory under dist where we put the unpacked sources of + -- packages, in those cases where it makes sense to keep the build + -- artifacts to reduce rebuild times. + -- + distUnpackedSrcDirectory :: PackageId -> FilePath, + distUnpackedSrcRootDirectory :: FilePath, + + -- | The location for project-wide cache files (e.g. state used in + -- incremental rebuilds). + -- + distProjectCacheFile :: String -> FilePath, + distProjectCacheDirectory :: FilePath, + + -- | The location for package-specific cache files (e.g. state used in + -- incremental rebuilds). + -- + distPackageCacheFile :: DistDirParams -> String -> FilePath, + distPackageCacheDirectory :: DistDirParams -> FilePath, + + -- | The location that sdists are placed by default. + distSdistFile :: PackageId -> ArchiveFormat -> FilePath, + distSdistDirectory :: FilePath, + + distTempDirectory :: FilePath, + distBinDirectory :: FilePath, + + distPackageDB :: CompilerId -> PackageDB + } + + +-- | The layout of a cabal nix-style store. +-- +data StoreDirLayout = StoreDirLayout { + storeDirectory :: CompilerId -> FilePath, + storePackageDirectory :: CompilerId -> UnitId -> FilePath, + storePackageDBPath :: CompilerId -> FilePath, + storePackageDB :: CompilerId -> PackageDB, + storePackageDBStack :: CompilerId -> PackageDBStack, + storeIncomingDirectory :: CompilerId -> FilePath, + storeIncomingLock :: CompilerId -> UnitId -> FilePath + } + + +--TODO: move to another module, e.g. CabalDirLayout? +-- or perhaps rename this module to DirLayouts. + +-- | The layout of the user-wide cabal directory, that is the @~/.cabal@ dir +-- on unix, and equivalents on other systems. +-- +-- At the moment this is just a partial specification, but the idea is +-- eventually to cover it all. +-- +data CabalDirLayout = CabalDirLayout { + cabalStoreDirLayout :: StoreDirLayout, + + cabalLogsDirectory :: FilePath, + cabalWorldFile :: FilePath + } + + +-- | Information about the root directory of the project. +-- +-- It can either be an implict project root in the current dir if no +-- @cabal.project@ file is found, or an explicit root if the file is found. +-- +data ProjectRoot = + -- | -- ^ An implict project root. It contains the absolute project + -- root dir. + ProjectRootImplicit FilePath + + -- | -- ^ An explicit project root. It contains the absolute project + -- root dir and the relative @cabal.project@ file (or explicit override) + | ProjectRootExplicit FilePath FilePath + deriving (Eq, Show) + +-- | Make the default 'DistDirLayout' based on the project root dir and +-- optional overrides for the location of the @dist@ directory and the +-- @cabal.project@ file. +-- +defaultDistDirLayout :: ProjectRoot -- ^ the project root + -> Maybe FilePath -- ^ the @dist@ directory or default + -- (absolute or relative to the root) + -> DistDirLayout +defaultDistDirLayout projectRoot mdistDirectory = + DistDirLayout {..} + where + (projectRootDir, projectFile) = case projectRoot of + ProjectRootImplicit dir -> (dir, dir "cabal.project") + ProjectRootExplicit dir file -> (dir, dir file) + + distProjectRootDirectory = projectRootDir + distProjectFile ext = projectFile <.> ext + + distDirectory = distProjectRootDirectory + fromMaybe "dist-newstyle" mdistDirectory + --TODO: switch to just dist at some point, or some other new name + + distBuildRootDirectory = distDirectory "build" + distBuildDirectory params = + distBuildRootDirectory + display (distParamPlatform params) + display (distParamCompilerId params) + display (distParamPackageId params) + (case distParamComponentName params of + Nothing -> "" + Just CLibName -> "" + Just (CSubLibName name) -> "l" display name + Just (CFLibName name) -> "f" display name + Just (CExeName name) -> "x" display name + Just (CTestName name) -> "t" display name + Just (CBenchName name) -> "b" display name) + (case distParamOptimization params of + NoOptimisation -> "noopt" + NormalOptimisation -> "" + MaximumOptimisation -> "opt") + (let uid_str = display (distParamUnitId params) + in if uid_str == display (distParamComponentId params) + then "" + else uid_str) + + distUnpackedSrcRootDirectory = distDirectory "src" + distUnpackedSrcDirectory pkgid = distUnpackedSrcRootDirectory + display pkgid + -- we shouldn't get name clashes so this should be fine: + distDownloadSrcDirectory = distUnpackedSrcRootDirectory + + distProjectCacheDirectory = distDirectory "cache" + distProjectCacheFile name = distProjectCacheDirectory name + + distPackageCacheDirectory params = distBuildDirectory params "cache" + distPackageCacheFile params name = distPackageCacheDirectory params name + + distSdistFile pid format = distSdistDirectory prettyShow pid <.> ext + where + ext = case format of + TargzFormat -> "tar.gz" + ZipFormat -> "zip" + + distSdistDirectory = distDirectory "sdist" + + distTempDirectory = distDirectory "tmp" + + distBinDirectory = distDirectory "bin" + + distPackageDBPath compid = distDirectory "packagedb" display compid + distPackageDB = SpecificPackageDB . distPackageDBPath + + +defaultStoreDirLayout :: FilePath -> StoreDirLayout +defaultStoreDirLayout storeRoot = + StoreDirLayout {..} + where + storeDirectory compid = + storeRoot display compid + + storePackageDirectory compid ipkgid = + storeDirectory compid display ipkgid + + storePackageDBPath compid = + storeDirectory compid "package.db" + + storePackageDB compid = + SpecificPackageDB (storePackageDBPath compid) + + storePackageDBStack compid = + [GlobalPackageDB, storePackageDB compid] + + storeIncomingDirectory compid = + storeDirectory compid "incoming" + + storeIncomingLock compid unitid = + storeIncomingDirectory compid display unitid <.> "lock" + + +defaultCabalDirLayout :: FilePath -> CabalDirLayout +defaultCabalDirLayout cabalDir = + mkCabalDirLayout cabalDir Nothing Nothing + +mkCabalDirLayout :: FilePath -- ^ Cabal directory + -> Maybe FilePath -- ^ Store directory. Must be absolute + -> Maybe FilePath -- ^ Log directory + -> CabalDirLayout +mkCabalDirLayout cabalDir mstoreDir mlogDir = + CabalDirLayout {..} + where + cabalStoreDirLayout = + defaultStoreDirLayout (fromMaybe (cabalDir "store") mstoreDir) + cabalLogsDirectory = fromMaybe (cabalDir "logs") mlogDir + cabalWorldFile = cabalDir "world" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Exec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Exec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Exec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Exec.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,181 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Exec +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Implementation of the 'exec' command. Runs an arbitrary executable in an +-- environment suitable for making use of the sandbox. +----------------------------------------------------------------------------- + +module Distribution.Client.Exec ( exec + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import qualified Distribution.Simple.GHC as GHC +import qualified Distribution.Simple.GHCJS as GHCJS + +import Distribution.Client.Sandbox (getSandboxConfigFilePath) +import Distribution.Client.Sandbox.PackageEnvironment (sandboxPackageDBPath) +import Distribution.Client.Sandbox.Types (UseSandbox (..)) + +import Distribution.Simple.Compiler (Compiler, CompilerFlavor(..), compilerFlavor) +import Distribution.Simple.Program (ghcProgram, ghcjsProgram, lookupProgram) +import Distribution.Simple.Program.Db (ProgramDb, requireProgram, modifyProgramSearchPath) +import Distribution.Simple.Program.Find (ProgramSearchPathEntry(..)) +import Distribution.Simple.Program.Run (programInvocation, runProgramInvocation) +import Distribution.Simple.Program.Types ( simpleProgram, ConfiguredProgram(..) ) +import Distribution.Simple.Utils (die', warn) + +import Distribution.System (Platform(..), OS(..), buildOS) +import Distribution.Verbosity (Verbosity) + +import System.Directory ( doesDirectoryExist ) +import System.Environment (lookupEnv) +import System.FilePath (searchPathSeparator, ()) + + +-- | Execute the given command in the package's environment. +-- +-- The given command is executed with GHC configured to use the correct +-- package database and with the sandbox bin directory added to the PATH. +exec :: Verbosity + -> UseSandbox + -> Compiler + -> Platform + -> ProgramDb + -> [String] + -> IO () +exec verbosity useSandbox comp platform programDb extraArgs = + case extraArgs of + (exe:args) -> do + program <- requireProgram' verbosity useSandbox programDb exe + env <- environmentOverrides (programOverrideEnv program) + let invocation = programInvocation + program { programOverrideEnv = env } + args + runProgramInvocation verbosity invocation + + [] -> die' verbosity "Please specify an executable to run" + where + environmentOverrides env = + case useSandbox of + NoSandbox -> return env + (UseSandbox sandboxDir) -> + sandboxEnvironment verbosity sandboxDir comp platform programDb env + + +-- | Return the package's sandbox environment. +-- +-- The environment sets GHC_PACKAGE_PATH so that GHC will use the sandbox. +sandboxEnvironment :: Verbosity + -> FilePath + -> Compiler + -> Platform + -> ProgramDb + -> [(String, Maybe String)] -- environment overrides so far + -> IO [(String, Maybe String)] +sandboxEnvironment verbosity sandboxDir comp platform programDb iEnv = + case compilerFlavor comp of + GHC -> env GHC.getGlobalPackageDB ghcProgram "GHC_PACKAGE_PATH" + GHCJS -> env GHCJS.getGlobalPackageDB ghcjsProgram "GHCJS_PACKAGE_PATH" + _ -> die' verbosity "exec only works with GHC and GHCJS" + where + (Platform _ os) = platform + ldPath = case os of + OSX -> "DYLD_LIBRARY_PATH" + Windows -> "PATH" + _ -> "LD_LIBRARY_PATH" + env getGlobalPackageDB hcProgram packagePathEnvVar = do + let Just program = lookupProgram hcProgram programDb + gDb <- getGlobalPackageDB verbosity program + sandboxConfigFilePath <- getSandboxConfigFilePath mempty + let sandboxPackagePath = sandboxPackageDBPath sandboxDir comp platform + compilerPackagePaths = prependToSearchPath gDb sandboxPackagePath + -- Packages database must exist, otherwise things will start + -- failing in mysterious ways. + exists <- doesDirectoryExist sandboxPackagePath + unless exists $ warn verbosity $ "Package database is not a directory: " + ++ sandboxPackagePath + -- MASSIVE HACK. We need this to be synchronized with installLibDir + -- in defaultInstallDirs' in Distribution.Simple.InstallDirs, + -- which has a special case for Windows (WHY? Who knows; it's been + -- around as long as Windows exists.) The sane thing to do here + -- would be to read out the actual install dirs that were associated + -- with the package in question, but that's not a well-formed question + -- here because there is not actually install directory for the + -- "entire" sandbox. Since we want to kill this code in favor of + -- new-build, I decided it wasn't worth fixing this "properly." + -- Also, this doesn't handle LHC correctly but I don't care -- ezyang + let extraLibPath = + case buildOS of + Windows -> sandboxDir + _ -> sandboxDir "lib" + -- 2016-11-26 Apologies for the spaghetti code here. + -- Essentially we just want to add the sandbox's lib/ dir to + -- whatever the library search path environment variable is: + -- this allows running existing executables against foreign + -- libraries (meaning Haskell code with a bunch of foreign + -- exports). However, on Windows this variable is equal to the + -- executable search path env var. And we try to keep not only + -- what was already set in the environment, but also the + -- additional directories we add below in requireProgram'. So + -- the strategy is that we first take the environment + -- overrides from requireProgram' below. If the library search + -- path env is overridden (e.g. because we're on windows), we + -- prepend the lib/ dir to the relevant override. If not, we + -- want to avoid wiping the user's own settings, so we first + -- read the env var's current value, and then prefix ours if + -- the user had any set. + iEnv' <- + if any ((==ldPath) . fst) iEnv + then return $ updateLdPath extraLibPath iEnv + else do + currentLibraryPath <- lookupEnv ldPath + let updatedLdPath = + case currentLibraryPath of + Nothing -> Just extraLibPath + Just paths -> + Just $ extraLibPath ++ [searchPathSeparator] ++ paths + return $ (ldPath, updatedLdPath) : iEnv + + -- Build the environment + return $ [ (packagePathEnvVar, Just compilerPackagePaths) + , ("CABAL_SANDBOX_PACKAGE_PATH", Just compilerPackagePaths) + , ("CABAL_SANDBOX_CONFIG", Just sandboxConfigFilePath) + ] ++ iEnv' + + prependToSearchPath path newValue = + newValue ++ [searchPathSeparator] ++ path + + updateLdPath path = map update + where + update (name, Just current) + | name == ldPath = (ldPath, Just $ path ++ [searchPathSeparator] ++ current) + update (name, Nothing) + | name == ldPath = (ldPath, Just path) + update x = x + + +-- | Check that a program is configured and available to be run. If +-- a sandbox is available check in the sandbox's directory. +requireProgram' :: Verbosity + -> UseSandbox + -> ProgramDb + -> String + -> IO ConfiguredProgram +requireProgram' verbosity useSandbox programDb exe = do + (program, _) <- requireProgram + verbosity + (simpleProgram exe) + updateSearchPath + return program + where + updateSearchPath = + flip modifyProgramSearchPath programDb $ \searchPath -> + case useSandbox of + NoSandbox -> searchPath + UseSandbox sandboxDir -> + ProgramSearchPathDir (sandboxDir "bin") : searchPath diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Fetch.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Fetch.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Fetch.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Fetch.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,227 @@ +------------------------------------------------------------------------------- | +-- Module : Distribution.Client.Fetch +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- The cabal fetch command +----------------------------------------------------------------------------- +module Distribution.Client.Fetch ( + fetch, + ) where + +import Distribution.Client.Types +import Distribution.Client.Targets +import Distribution.Client.FetchUtils hiding (fetchPackage) +import Distribution.Client.Dependency +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan +import Distribution.Client.Setup + ( GlobalFlags(..), FetchFlags(..), RepoContext(..) ) + +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PkgConfigDb ( PkgConfigDb, readPkgConfigDb ) +import Distribution.Solver.Types.SolverPackage +import Distribution.Solver.Types.SourcePackage + +import Distribution.Package + ( packageId ) +import Distribution.Simple.Compiler + ( Compiler, compilerInfo, PackageDBStack ) +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.Simple.Program + ( ProgramDb ) +import Distribution.Simple.Setup + ( fromFlag, fromFlagOrDefault ) +import Distribution.Simple.Utils + ( die', notice, debug ) +import Distribution.System + ( Platform ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity ) + +import Control.Monad + ( filterM ) + +-- ------------------------------------------------------------ +-- * The fetch command +-- ------------------------------------------------------------ + +--TODO: +-- * add fetch -o support +-- * support tarball URLs via ad-hoc download cache (or in -o mode?) +-- * suggest using --no-deps, unpack or fetch -o if deps cannot be satisfied +-- * Port various flags from install: +-- * --updage-dependencies +-- * --constraint and --preference +-- * --only-dependencies, but note it conflicts with --no-deps + + +-- | Fetch a list of packages and their dependencies. +-- +fetch :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramDb + -> GlobalFlags + -> FetchFlags + -> [UserTarget] + -> IO () +fetch verbosity _ _ _ _ _ _ _ [] = + notice verbosity "No packages requested. Nothing to do." + +fetch verbosity packageDBs repoCtxt comp platform progdb + globalFlags fetchFlags userTargets = do + + mapM_ (checkTarget verbosity) userTargets + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb + sourcePkgDb <- getSourcePackages verbosity repoCtxt + pkgConfigDb <- readPkgConfigDb verbosity progdb + + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + (packageIndex sourcePkgDb) + userTargets + + pkgs <- planPackages + verbosity comp platform fetchFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers + + pkgs' <- filterM (fmap not . isFetched . packageSource) pkgs + if null pkgs' + --TODO: when we add support for remote tarballs then this message + -- will need to be changed because for remote tarballs we fetch them + -- at the earlier phase. + then notice verbosity $ "No packages need to be fetched. " + ++ "All the requested packages are already local " + ++ "or cached locally." + else if dryRun + then notice verbosity $ unlines $ + "The following packages would be fetched:" + : map (display . packageId) pkgs' + + else mapM_ (fetchPackage verbosity repoCtxt . packageSource) pkgs' + + where + dryRun = fromFlag (fetchDryRun fetchFlags) + +planPackages :: Verbosity + -> Compiler + -> Platform + -> FetchFlags + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> [PackageSpecifier UnresolvedSourcePackage] + -> IO [UnresolvedSourcePackage] +planPackages verbosity comp platform fetchFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers + + | includeDependencies = do + solver <- chooseSolver verbosity + (fromFlag (fetchSolver fetchFlags)) (compilerInfo comp) + notice verbosity "Resolving dependencies..." + installPlan <- foldProgress logMsg (die' verbosity) return $ + resolveDependencies + platform (compilerInfo comp) pkgConfigDb + solver + resolverParams + + -- The packages we want to fetch are those packages the 'InstallPlan' + -- that are in the 'InstallPlan.Configured' state. + return + [ solverPkgSource cpkg + | (SolverInstallPlan.Configured cpkg) + <- SolverInstallPlan.toList installPlan ] + + | otherwise = + either (die' verbosity . unlines . map show) return $ + resolveWithoutDependencies resolverParams + + where + resolverParams = + + setMaxBackjumps (if maxBackjumps < 0 then Nothing + else Just maxBackjumps) + + . setIndependentGoals independentGoals + + . setReorderGoals reorderGoals + + . setCountConflicts countConflicts + + . setShadowPkgs shadowPkgs + + . setStrongFlags strongFlags + + . setAllowBootLibInstalls allowBootLibInstalls + + . setSolverVerbosity verbosity + + . addConstraints + [ let pc = PackageConstraint + (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) + (PackagePropertyStanzas stanzas) + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + | pkgSpecifier <- pkgSpecifiers ] + + -- Reinstall the targets given on the command line so that the dep + -- resolver will decide that they need fetching, even if they're + -- already installed. Since we want to get the source packages of + -- things we might have installed (but not have the sources for). + . reinstallTargets + + $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers + + includeDependencies = fromFlag (fetchDeps fetchFlags) + logMsg message rest = debug verbosity message >> rest + + stanzas = [ TestStanzas | testsEnabled ] + ++ [ BenchStanzas | benchmarksEnabled ] + testsEnabled = fromFlagOrDefault False $ fetchTests fetchFlags + benchmarksEnabled = fromFlagOrDefault False $ fetchBenchmarks fetchFlags + + reorderGoals = fromFlag (fetchReorderGoals fetchFlags) + countConflicts = fromFlag (fetchCountConflicts fetchFlags) + independentGoals = fromFlag (fetchIndependentGoals fetchFlags) + shadowPkgs = fromFlag (fetchShadowPkgs fetchFlags) + strongFlags = fromFlag (fetchStrongFlags fetchFlags) + maxBackjumps = fromFlag (fetchMaxBackjumps fetchFlags) + allowBootLibInstalls = fromFlag (fetchAllowBootLibInstalls fetchFlags) + + +checkTarget :: Verbosity -> UserTarget -> IO () +checkTarget verbosity target = case target of + UserTargetRemoteTarball _uri + -> die' verbosity $ "The 'fetch' command does not yet support remote tarballs. " + ++ "In the meantime you can use the 'unpack' commands." + _ -> return () + +fetchPackage :: Verbosity -> RepoContext -> PackageLocation a -> IO () +fetchPackage verbosity repoCtxt pkgsrc = case pkgsrc of + LocalUnpackedPackage _dir -> return () + LocalTarballPackage _file -> return () + + RemoteTarballPackage _uri _ -> + die' verbosity $ "The 'fetch' command does not yet support remote tarballs. " + ++ "In the meantime you can use the 'unpack' commands." + + RemoteSourceRepoPackage _repo _ -> + die' verbosity $ "The 'fetch' command does not yet support remote " + ++ "source repositores." + + RepoTarballPackage repo pkgid _ -> do + _ <- fetchRepoTarball verbosity repoCtxt repo pkgid + return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/FetchUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/FetchUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/FetchUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/FetchUtils.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,315 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.FetchUtils +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Functions for fetching packages +----------------------------------------------------------------------------- +{-# LANGUAGE RecordWildCards #-} +module Distribution.Client.FetchUtils ( + + -- * fetching packages + fetchPackage, + isFetched, + checkFetched, + + -- ** specifically for repo packages + checkRepoTarballFetched, + fetchRepoTarball, + + -- ** fetching packages asynchronously + asyncFetchPackages, + waitAsyncFetchPackage, + AsyncFetchMap, + + -- * fetching other things + downloadIndex, + ) where + +import Distribution.Client.Types +import Distribution.Client.HttpUtils + ( downloadURI, isOldHackageURI, DownloadResult(..) + , HttpTransport(..), transportCheckHttps, remoteRepoCheckHttps ) + +import Distribution.Package + ( PackageId, packageName, packageVersion ) +import Distribution.Simple.Utils + ( notice, info, debug, die' ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity, verboseUnmarkOutput ) +import Distribution.Client.GlobalFlags + ( RepoContext(..) ) +import Distribution.Client.Utils + ( ProgressPhase(..), progressMessage ) + +import Data.Maybe +import Data.Map (Map) +import qualified Data.Map as Map +import Control.Monad +import Control.Exception +import Control.Concurrent.Async +import Control.Concurrent.MVar +import System.Directory + ( doesFileExist, createDirectoryIfMissing, getTemporaryDirectory ) +import System.IO + ( openTempFile, hClose ) +import System.FilePath + ( (), (<.>) ) +import qualified System.FilePath.Posix as FilePath.Posix + ( combine, joinPath ) +import Network.URI + ( URI(uriPath) ) + +import qualified Hackage.Security.Client as Sec + +-- ------------------------------------------------------------ +-- * Actually fetch things +-- ------------------------------------------------------------ + +-- | Returns @True@ if the package has already been fetched +-- or does not need fetching. +-- +isFetched :: UnresolvedPkgLoc -> IO Bool +isFetched loc = case loc of + LocalUnpackedPackage _dir -> return True + LocalTarballPackage _file -> return True + RemoteTarballPackage _uri local -> return (isJust local) + RepoTarballPackage repo pkgid _ -> doesFileExist (packageFile repo pkgid) + RemoteSourceRepoPackage _ local -> return (isJust local) + + +-- | Checks if the package has already been fetched (or does not need +-- fetching) and if so returns evidence in the form of a 'PackageLocation' +-- with a resolved local file location. +-- +checkFetched :: UnresolvedPkgLoc + -> IO (Maybe ResolvedPkgLoc) +checkFetched loc = case loc of + LocalUnpackedPackage dir -> + return (Just $ LocalUnpackedPackage dir) + LocalTarballPackage file -> + return (Just $ LocalTarballPackage file) + RemoteTarballPackage uri (Just file) -> + return (Just $ RemoteTarballPackage uri file) + RepoTarballPackage repo pkgid (Just file) -> + return (Just $ RepoTarballPackage repo pkgid file) + RemoteSourceRepoPackage repo (Just dir) -> + return (Just $ RemoteSourceRepoPackage repo dir) + + RemoteTarballPackage _uri Nothing -> return Nothing + RemoteSourceRepoPackage _repo Nothing -> return Nothing + RepoTarballPackage repo pkgid Nothing -> + fmap (fmap (RepoTarballPackage repo pkgid)) + (checkRepoTarballFetched repo pkgid) + +-- | Like 'checkFetched' but for the specific case of a 'RepoTarballPackage'. +-- +checkRepoTarballFetched :: Repo -> PackageId -> IO (Maybe FilePath) +checkRepoTarballFetched repo pkgid = do + let file = packageFile repo pkgid + exists <- doesFileExist file + if exists + then return (Just file) + else return Nothing + + +-- | Fetch a package if we don't have it already. +-- +fetchPackage :: Verbosity + -> RepoContext + -> UnresolvedPkgLoc + -> IO ResolvedPkgLoc +fetchPackage verbosity repoCtxt loc = case loc of + LocalUnpackedPackage dir -> + return (LocalUnpackedPackage dir) + LocalTarballPackage file -> + return (LocalTarballPackage file) + RemoteTarballPackage uri (Just file) -> + return (RemoteTarballPackage uri file) + RepoTarballPackage repo pkgid (Just file) -> + return (RepoTarballPackage repo pkgid file) + RemoteSourceRepoPackage repo (Just dir) -> + return (RemoteSourceRepoPackage repo dir) + + RemoteTarballPackage uri Nothing -> do + path <- downloadTarballPackage uri + return (RemoteTarballPackage uri path) + RepoTarballPackage repo pkgid Nothing -> do + local <- fetchRepoTarball verbosity repoCtxt repo pkgid + return (RepoTarballPackage repo pkgid local) + RemoteSourceRepoPackage _repo Nothing -> + die' verbosity "fetchPackage: source repos not supported" + where + downloadTarballPackage uri = do + transport <- repoContextGetTransport repoCtxt + transportCheckHttps verbosity transport uri + notice verbosity ("Downloading " ++ show uri) + tmpdir <- getTemporaryDirectory + (path, hnd) <- openTempFile tmpdir "cabal-.tar.gz" + hClose hnd + _ <- downloadURI transport verbosity uri path + return path + + +-- | Fetch a repo package if we don't have it already. +-- +fetchRepoTarball :: Verbosity -> RepoContext -> Repo -> PackageId -> IO FilePath +fetchRepoTarball verbosity repoCtxt repo pkgid = do + fetched <- doesFileExist (packageFile repo pkgid) + if fetched + then do info verbosity $ display pkgid ++ " has already been downloaded." + return (packageFile repo pkgid) + else do progressMessage verbosity ProgressDownloading (display pkgid) + res <- downloadRepoPackage + progressMessage verbosity ProgressDownloaded (display pkgid) + return res + + + where + downloadRepoPackage = case repo of + RepoLocal{..} -> return (packageFile repo pkgid) + + RepoRemote{..} -> do + transport <- repoContextGetTransport repoCtxt + remoteRepoCheckHttps verbosity transport repoRemote + let uri = packageURI repoRemote pkgid + dir = packageDir repo pkgid + path = packageFile repo pkgid + createDirectoryIfMissing True dir + _ <- downloadURI transport verbosity uri path + return path + + RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \rep -> do + let dir = packageDir repo pkgid + path = packageFile repo pkgid + createDirectoryIfMissing True dir + Sec.uncheckClientErrors $ do + info verbosity ("Writing " ++ path) + Sec.downloadPackage' rep pkgid path + return path + +-- | Downloads an index file to [config-dir/packages/serv-id] without +-- hackage-security. You probably don't want to call this directly; +-- use 'updateRepo' instead. +-- +downloadIndex :: HttpTransport -> Verbosity -> RemoteRepo -> FilePath -> IO DownloadResult +downloadIndex transport verbosity remoteRepo cacheDir = do + remoteRepoCheckHttps verbosity transport remoteRepo + let uri = (remoteRepoURI remoteRepo) { + uriPath = uriPath (remoteRepoURI remoteRepo) + `FilePath.Posix.combine` "00-index.tar.gz" + } + path = cacheDir "00-index" <.> "tar.gz" + createDirectoryIfMissing True cacheDir + downloadURI transport verbosity uri path + + +-- ------------------------------------------------------------ +-- * Async fetch wrapper utilities +-- ------------------------------------------------------------ + +type AsyncFetchMap = Map UnresolvedPkgLoc + (MVar (Either SomeException ResolvedPkgLoc)) + +-- | Fork off an async action to download the given packages (by location). +-- +-- The downloads are initiated in order, so you can arrange for packages that +-- will likely be needed sooner to be earlier in the list. +-- +-- The body action is passed a map from those packages (identified by their +-- location) to a completion var for that package. So the body action should +-- lookup the location and use 'asyncFetchPackage' to get the result. +-- +asyncFetchPackages :: Verbosity + -> RepoContext + -> [UnresolvedPkgLoc] + -> (AsyncFetchMap -> IO a) + -> IO a +asyncFetchPackages verbosity repoCtxt pkglocs body = do + --TODO: [nice to have] use parallel downloads? + + asyncDownloadVars <- sequence [ do v <- newEmptyMVar + return (pkgloc, v) + | pkgloc <- pkglocs ] + + let fetchPackages :: IO () + fetchPackages = + forM_ asyncDownloadVars $ \(pkgloc, var) -> do + -- Suppress marking here, because 'withAsync' means + -- that we get nondeterministic interleaving + result <- try $ fetchPackage (verboseUnmarkOutput verbosity) + repoCtxt pkgloc + putMVar var result + + withAsync fetchPackages $ \_ -> + body (Map.fromList asyncDownloadVars) + + +-- | Expect to find a download in progress in the given 'AsyncFetchMap' +-- and wait on it to finish. +-- +-- If the download failed with an exception then this will be thrown. +-- +-- Note: This function is supposed to be idempotent, as our install plans +-- can now use the same tarball for many builds, e.g. different +-- components and/or qualified goals, and these all go through the +-- download phase so we end up using 'waitAsyncFetchPackage' twice on +-- the same package. C.f. #4461. +waitAsyncFetchPackage :: Verbosity + -> AsyncFetchMap + -> UnresolvedPkgLoc + -> IO ResolvedPkgLoc +waitAsyncFetchPackage verbosity downloadMap srcloc = + case Map.lookup srcloc downloadMap of + Just hnd -> do + debug verbosity $ "Waiting for download of " ++ show srcloc + either throwIO return =<< readMVar hnd + Nothing -> fail "waitAsyncFetchPackage: package not being downloaded" + + +-- ------------------------------------------------------------ +-- * Path utilities +-- ------------------------------------------------------------ + +-- | Generate the full path to the locally cached copy of +-- the tarball for a given @PackageIdentifer@. +-- +packageFile :: Repo -> PackageId -> FilePath +packageFile repo pkgid = packageDir repo pkgid + display pkgid + <.> "tar.gz" + +-- | Generate the full path to the directory where the local cached copy of +-- the tarball for a given @PackageIdentifer@ is stored. +-- +packageDir :: Repo -> PackageId -> FilePath +packageDir repo pkgid = repoLocalDir repo + display (packageName pkgid) + display (packageVersion pkgid) + +-- | Generate the URI of the tarball for a given package. +-- +packageURI :: RemoteRepo -> PackageId -> URI +packageURI repo pkgid | isOldHackageURI (remoteRepoURI repo) = + (remoteRepoURI repo) { + uriPath = FilePath.Posix.joinPath + [uriPath (remoteRepoURI repo) + ,display (packageName pkgid) + ,display (packageVersion pkgid) + ,display pkgid <.> "tar.gz"] + } +packageURI repo pkgid = + (remoteRepoURI repo) { + uriPath = FilePath.Posix.joinPath + [uriPath (remoteRepoURI repo) + ,"package" + ,display pkgid <.> "tar.gz"] + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/FileMonitor.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/FileMonitor.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/FileMonitor.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/FileMonitor.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,1119 @@ +{-# LANGUAGE DeriveGeneric, DeriveFunctor, GeneralizedNewtypeDeriving, + NamedFieldPuns, BangPatterns #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} + +-- | An abstraction to help with re-running actions when files or other +-- input values they depend on have changed. +-- +module Distribution.Client.FileMonitor ( + + -- * Declaring files to monitor + MonitorFilePath(..), + MonitorKindFile(..), + MonitorKindDir(..), + FilePathGlob(..), + monitorFile, + monitorFileHashed, + monitorNonExistentFile, + monitorFileExistence, + monitorDirectory, + monitorNonExistentDirectory, + monitorDirectoryExistence, + monitorFileOrDirectory, + monitorFileGlob, + monitorFileGlobExistence, + monitorFileSearchPath, + monitorFileHashedSearchPath, + + -- * Creating and checking sets of monitored files + FileMonitor(..), + newFileMonitor, + MonitorChanged(..), + MonitorChangedReason(..), + checkFileMonitorChanged, + updateFileMonitor, + MonitorTimestamp, + beginUpdateFileMonitor, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import qualified Data.Map.Strict as Map +import qualified Data.ByteString.Lazy as BS +import qualified Distribution.Compat.Binary as Binary +import qualified Data.Hashable as Hashable + +import Control.Monad +import Control.Monad.Trans (MonadIO, liftIO) +import Control.Monad.State (StateT, mapStateT) +import qualified Control.Monad.State as State +import Control.Monad.Except (ExceptT, runExceptT, withExceptT, + throwError) +import Control.Exception + +import Distribution.Compat.Time +import Distribution.Client.Glob +import Distribution.Simple.Utils (handleDoesNotExist, writeFileAtomic) +import Distribution.Client.Utils (mergeBy, MergeResult(..)) + +import System.FilePath +import System.Directory +import System.IO + +------------------------------------------------------------------------------ +-- Types for specifying files to monitor +-- + + +-- | A description of a file (or set of files) to monitor for changes. +-- +-- Where file paths are relative they are relative to a common directory +-- (e.g. project root), not necessarily the process current directory. +-- +data MonitorFilePath = + MonitorFile { + monitorKindFile :: !MonitorKindFile, + monitorKindDir :: !MonitorKindDir, + monitorPath :: !FilePath + } + | MonitorFileGlob { + monitorKindFile :: !MonitorKindFile, + monitorKindDir :: !MonitorKindDir, + monitorPathGlob :: !FilePathGlob + } + deriving (Eq, Show, Generic) + +data MonitorKindFile = FileExists + | FileModTime + | FileHashed + | FileNotExists + deriving (Eq, Show, Generic) + +data MonitorKindDir = DirExists + | DirModTime + | DirNotExists + deriving (Eq, Show, Generic) + +instance Binary MonitorFilePath +instance Binary MonitorKindFile +instance Binary MonitorKindDir + +-- | Monitor a single file for changes, based on its modification time. +-- The monitored file is considered to have changed if it no longer +-- exists or if its modification time has changed. +-- +monitorFile :: FilePath -> MonitorFilePath +monitorFile = MonitorFile FileModTime DirNotExists + +-- | Monitor a single file for changes, based on its modification time +-- and content hash. The monitored file is considered to have changed if +-- it no longer exists or if its modification time and content hash have +-- changed. +-- +monitorFileHashed :: FilePath -> MonitorFilePath +monitorFileHashed = MonitorFile FileHashed DirNotExists + +-- | Monitor a single non-existent file for changes. The monitored file +-- is considered to have changed if it exists. +-- +monitorNonExistentFile :: FilePath -> MonitorFilePath +monitorNonExistentFile = MonitorFile FileNotExists DirNotExists + +-- | Monitor a single file for existence only. The monitored file is +-- considered to have changed if it no longer exists. +-- +monitorFileExistence :: FilePath -> MonitorFilePath +monitorFileExistence = MonitorFile FileExists DirNotExists + +-- | Monitor a single directory for changes, based on its modification +-- time. The monitored directory is considered to have changed if it no +-- longer exists or if its modification time has changed. +-- +monitorDirectory :: FilePath -> MonitorFilePath +monitorDirectory = MonitorFile FileNotExists DirModTime + +-- | Monitor a single non-existent directory for changes. The monitored +-- directory is considered to have changed if it exists. +-- +monitorNonExistentDirectory :: FilePath -> MonitorFilePath +-- Just an alias for monitorNonExistentFile, since you can't +-- tell the difference between a non-existent directory and +-- a non-existent file :) +monitorNonExistentDirectory = monitorNonExistentFile + +-- | Monitor a single directory for existence. The monitored directory is +-- considered to have changed only if it no longer exists. +-- +monitorDirectoryExistence :: FilePath -> MonitorFilePath +monitorDirectoryExistence = MonitorFile FileNotExists DirExists + +-- | Monitor a single file or directory for changes, based on its modification +-- time. The monitored file is considered to have changed if it no longer +-- exists or if its modification time has changed. +-- +monitorFileOrDirectory :: FilePath -> MonitorFilePath +monitorFileOrDirectory = MonitorFile FileModTime DirModTime + +-- | Monitor a set of files (or directories) identified by a file glob. +-- The monitored glob is considered to have changed if the set of files +-- matching the glob changes (i.e. creations or deletions), or for files if the +-- modification time and content hash of any matching file has changed. +-- +monitorFileGlob :: FilePathGlob -> MonitorFilePath +monitorFileGlob = MonitorFileGlob FileHashed DirExists + +-- | Monitor a set of files (or directories) identified by a file glob for +-- existence only. The monitored glob is considered to have changed if the set +-- of files matching the glob changes (i.e. creations or deletions). +-- +monitorFileGlobExistence :: FilePathGlob -> MonitorFilePath +monitorFileGlobExistence = MonitorFileGlob FileExists DirExists + +-- | Creates a list of files to monitor when you search for a file which +-- unsuccessfully looked in @notFoundAtPaths@ before finding it at +-- @foundAtPath@. +monitorFileSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] +monitorFileSearchPath notFoundAtPaths foundAtPath = + monitorFile foundAtPath + : map monitorNonExistentFile notFoundAtPaths + +-- | Similar to 'monitorFileSearchPath', but also instructs us to +-- monitor the hash of the found file. +monitorFileHashedSearchPath :: [FilePath] -> FilePath -> [MonitorFilePath] +monitorFileHashedSearchPath notFoundAtPaths foundAtPath = + monitorFileHashed foundAtPath + : map monitorNonExistentFile notFoundAtPaths + + +------------------------------------------------------------------------------ +-- Implementation types, files status +-- + +-- | The state necessary to determine whether a set of monitored +-- files has changed. It consists of two parts: a set of specific +-- files to be monitored (index by their path), and a list of +-- globs, which monitor may files at once. +data MonitorStateFileSet + = MonitorStateFileSet ![MonitorStateFile] + ![MonitorStateGlob] + -- Morally this is not actually a set but a bag (represented by lists). + -- There is no principled reason to use a bag here rather than a set, but + -- there is also no particular gain either. That said, we do preserve the + -- order of the lists just to reduce confusion (and have predictable I/O + -- patterns). + deriving Show + +type Hash = Int + +-- | The state necessary to determine whether a monitored file has changed. +-- +-- This covers all the cases of 'MonitorFilePath' except for globs which is +-- covered separately by 'MonitorStateGlob'. +-- +-- The @Maybe ModTime@ is to cover the case where we already consider the +-- file to have changed, either because it had already changed by the time we +-- did the snapshot (i.e. too new, changed since start of update process) or it +-- no longer exists at all. +-- +data MonitorStateFile = MonitorStateFile !MonitorKindFile !MonitorKindDir + !FilePath !MonitorStateFileStatus + deriving (Show, Generic) + +data MonitorStateFileStatus + = MonitorStateFileExists + | MonitorStateFileModTime !ModTime -- ^ cached file mtime + | MonitorStateFileHashed !ModTime !Hash -- ^ cached mtime and content hash + | MonitorStateDirExists + | MonitorStateDirModTime !ModTime -- ^ cached dir mtime + | MonitorStateNonExistent + | MonitorStateAlreadyChanged + deriving (Show, Generic) + +instance Binary MonitorStateFile +instance Binary MonitorStateFileStatus + +-- | The state necessary to determine whether the files matched by a globbing +-- match have changed. +-- +data MonitorStateGlob = MonitorStateGlob !MonitorKindFile !MonitorKindDir + !FilePathRoot !MonitorStateGlobRel + deriving (Show, Generic) + +data MonitorStateGlobRel + = MonitorStateGlobDirs + !Glob !FilePathGlobRel + !ModTime + ![(FilePath, MonitorStateGlobRel)] -- invariant: sorted + + | MonitorStateGlobFiles + !Glob + !ModTime + ![(FilePath, MonitorStateFileStatus)] -- invariant: sorted + + | MonitorStateGlobDirTrailing + deriving (Show, Generic) + +instance Binary MonitorStateGlob +instance Binary MonitorStateGlobRel + +-- | We can build a 'MonitorStateFileSet' from a set of 'MonitorFilePath' by +-- inspecting the state of the file system, and we can go in the reverse +-- direction by just forgetting the extra info. +-- +reconstructMonitorFilePaths :: MonitorStateFileSet -> [MonitorFilePath] +reconstructMonitorFilePaths (MonitorStateFileSet singlePaths globPaths) = + map getSinglePath singlePaths + ++ map getGlobPath globPaths + where + getSinglePath (MonitorStateFile kindfile kinddir filepath _) = + MonitorFile kindfile kinddir filepath + + getGlobPath (MonitorStateGlob kindfile kinddir root gstate) = + MonitorFileGlob kindfile kinddir $ FilePathGlob root $ + case gstate of + MonitorStateGlobDirs glob globs _ _ -> GlobDir glob globs + MonitorStateGlobFiles glob _ _ -> GlobFile glob + MonitorStateGlobDirTrailing -> GlobDirTrailing + +------------------------------------------------------------------------------ +-- Checking the status of monitored files +-- + +-- | A monitor for detecting changes to a set of files. It can be used to +-- efficiently test if any of a set of files (specified individually or by +-- glob patterns) has changed since some snapshot. In addition, it also checks +-- for changes in a value (of type @a@), and when there are no changes in +-- either it returns a saved value (of type @b@). +-- +-- The main use case looks like this: suppose we have some expensive action +-- that depends on certain pure inputs and reads some set of files, and +-- produces some pure result. We want to avoid re-running this action when it +-- would produce the same result. So we need to monitor the files the action +-- looked at, the other pure input values, and we need to cache the result. +-- Then at some later point, if the input value didn't change, and none of the +-- files changed, then we can re-use the cached result rather than re-running +-- the action. +-- +-- This can be achieved using a 'FileMonitor'. Each 'FileMonitor' instance +-- saves state in a disk file, so the file for that has to be specified, +-- making sure it is unique. The pattern is to use 'checkFileMonitorChanged' +-- to see if there's been any change. If there is, re-run the action, keeping +-- track of the files, then use 'updateFileMonitor' to record the current +-- set of files to monitor, the current input value for the action, and the +-- result of the action. +-- +-- The typical occurrence of this pattern is captured by 'rerunIfChanged' +-- and the 'Rebuild' monad. More complicated cases may need to use +-- 'checkFileMonitorChanged' and 'updateFileMonitor' directly. +-- +data FileMonitor a b + = FileMonitor { + + -- | The file where this 'FileMonitor' should store its state. + -- + fileMonitorCacheFile :: FilePath, + + -- | Compares a new cache key with old one to determine if a + -- corresponding cached value is still valid. + -- + -- Typically this is just an equality test, but in some + -- circumstances it can make sense to do things like subset + -- comparisons. + -- + -- The first arg is the new value, the second is the old cached value. + -- + fileMonitorKeyValid :: a -> a -> Bool, + + -- | When this mode is enabled, if 'checkFileMonitorChanged' returns + -- 'MonitoredValueChanged' then we have the guarantee that no files + -- changed, that the value change was the only change. In the default + -- mode no such guarantee is provided which is slightly faster. + -- + fileMonitorCheckIfOnlyValueChanged :: Bool + } + +-- | Define a new file monitor. +-- +-- It's best practice to define file monitor values once, and then use the +-- same value for 'checkFileMonitorChanged' and 'updateFileMonitor' as this +-- ensures you get the same types @a@ and @b@ for reading and writing. +-- +-- The path of the file monitor itself must be unique because it keeps state +-- on disk and these would clash. +-- +newFileMonitor :: Eq a => FilePath -- ^ The file to cache the state of the + -- file monitor. Must be unique. + -> FileMonitor a b +newFileMonitor path = FileMonitor path (==) False + +-- | The result of 'checkFileMonitorChanged': either the monitored files or +-- value changed (and it tells us which it was) or nothing changed and we get +-- the cached result. +-- +data MonitorChanged a b = + -- | The monitored files and value did not change. The cached result is + -- @b@. + -- + -- The set of monitored files is also returned. This is useful + -- for composing or nesting 'FileMonitor's. + MonitorUnchanged b [MonitorFilePath] + + -- | The monitor found that something changed. The reason is given. + -- + | MonitorChanged (MonitorChangedReason a) + deriving Show + +-- | What kind of change 'checkFileMonitorChanged' detected. +-- +data MonitorChangedReason a = + + -- | One of the files changed (existence, file type, mtime or file + -- content, depending on the 'MonitorFilePath' in question) + MonitoredFileChanged FilePath + + -- | The pure input value changed. + -- + -- The previous cached key value is also returned. This is sometimes + -- useful when using a 'fileMonitorKeyValid' function that is not simply + -- '(==)', when invalidation can be partial. In such cases it can make + -- sense to 'updateFileMonitor' with a key value that's a combination of + -- the new and old (e.g. set union). + | MonitoredValueChanged a + + -- | There was no saved monitor state, cached value etc. Ie the file + -- for the 'FileMonitor' does not exist. + | MonitorFirstRun + + -- | There was existing state, but we could not read it. This typically + -- happens when the code has changed compared to an existing 'FileMonitor' + -- cache file and type of the input value or cached value has changed such + -- that we cannot decode the values. This is completely benign as we can + -- treat is just as if there were no cache file and re-run. + | MonitorCorruptCache + deriving (Eq, Show, Functor) + +-- | Test if the input value or files monitored by the 'FileMonitor' have +-- changed. If not, return the cached value. +-- +-- See 'FileMonitor' for a full explanation. +-- +checkFileMonitorChanged + :: (Binary a, Binary b) + => FileMonitor a b -- ^ cache file path + -> FilePath -- ^ root directory + -> a -- ^ guard or key value + -> IO (MonitorChanged a b) -- ^ did the key or any paths change? +checkFileMonitorChanged + monitor@FileMonitor { fileMonitorKeyValid, + fileMonitorCheckIfOnlyValueChanged } + root currentKey = + + -- Consider it a change if the cache file does not exist, + -- or we cannot decode it. Sadly ErrorCall can still happen, despite + -- using decodeFileOrFail, e.g. Data.Char.chr errors + + handleDoesNotExist (MonitorChanged MonitorFirstRun) $ + handleErrorCall (MonitorChanged MonitorCorruptCache) $ + readCacheFile monitor + >>= either (\_ -> return (MonitorChanged MonitorCorruptCache)) + checkStatusCache + + where + checkStatusCache (cachedFileStatus, cachedKey, cachedResult) = do + change <- checkForChanges + case change of + Just reason -> return (MonitorChanged reason) + Nothing -> return (MonitorUnchanged cachedResult monitorFiles) + where monitorFiles = reconstructMonitorFilePaths cachedFileStatus + where + -- In fileMonitorCheckIfOnlyValueChanged mode we want to guarantee that + -- if we return MonitoredValueChanged that only the value changed. + -- We do that by checkin for file changes first. Otherwise it makes + -- more sense to do the cheaper test first. + checkForChanges + | fileMonitorCheckIfOnlyValueChanged + = checkFileChange cachedFileStatus cachedKey cachedResult + `mplusMaybeT` + checkValueChange cachedKey + + | otherwise + = checkValueChange cachedKey + `mplusMaybeT` + checkFileChange cachedFileStatus cachedKey cachedResult + + mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) + mplusMaybeT ma mb = do + mx <- ma + case mx of + Nothing -> mb + Just x -> return (Just x) + + -- Check if the guard value has changed + checkValueChange cachedKey + | not (fileMonitorKeyValid currentKey cachedKey) + = return (Just (MonitoredValueChanged cachedKey)) + | otherwise + = return Nothing + + -- Check if any file has changed + checkFileChange cachedFileStatus cachedKey cachedResult = do + res <- probeFileSystem root cachedFileStatus + case res of + -- Some monitored file has changed + Left changedPath -> + return (Just (MonitoredFileChanged (normalise changedPath))) + + -- No monitored file has changed + Right (cachedFileStatus', cacheStatus) -> do + + -- But we might still want to update the cache + whenCacheChanged cacheStatus $ + rewriteCacheFile monitor cachedFileStatus' cachedKey cachedResult + + return Nothing + +-- | Helper for reading the cache file. +-- +-- This determines the type and format of the binary cache file. +-- +readCacheFile :: (Binary a, Binary b) + => FileMonitor a b + -> IO (Either String (MonitorStateFileSet, a, b)) +readCacheFile FileMonitor {fileMonitorCacheFile} = + withBinaryFile fileMonitorCacheFile ReadMode $ \hnd -> + Binary.decodeOrFailIO =<< BS.hGetContents hnd + +-- | Helper for writing the cache file. +-- +-- This determines the type and format of the binary cache file. +-- +rewriteCacheFile :: (Binary a, Binary b) + => FileMonitor a b + -> MonitorStateFileSet -> a -> b -> IO () +rewriteCacheFile FileMonitor {fileMonitorCacheFile} fileset key result = + writeFileAtomic fileMonitorCacheFile $ + Binary.encode (fileset, key, result) + +-- | Probe the file system to see if any of the monitored files have changed. +-- +-- It returns Nothing if any file changed, or returns a possibly updated +-- file 'MonitorStateFileSet' plus an indicator of whether it actually changed. +-- +-- We may need to update the cache since there may be changes in the filesystem +-- state which don't change any of our affected files. +-- +-- Consider the glob @{proj1,proj2}\/\*.cabal@. Say we first run and find a +-- @proj1@ directory containing @proj1.cabal@ yet no @proj2@. If we later run +-- and find @proj2@ was created, yet contains no files matching @*.cabal@ then +-- we want to update the cache despite no changes in our relevant file set. +-- Specifically, we should add an mtime for this directory so we can avoid +-- re-traversing the directory in future runs. +-- +probeFileSystem :: FilePath -> MonitorStateFileSet + -> IO (Either FilePath (MonitorStateFileSet, CacheChanged)) +probeFileSystem root (MonitorStateFileSet singlePaths globPaths) = + runChangedM $ do + sequence_ + [ probeMonitorStateFileStatus root file status + | MonitorStateFile _ _ file status <- singlePaths ] + -- The glob monitors can require state changes + globPaths' <- + sequence + [ probeMonitorStateGlob root globPath + | globPath <- globPaths ] + return (MonitorStateFileSet singlePaths globPaths') + + +----------------------------------------------- +-- Monad for checking for file system changes +-- +-- We need to be able to bail out if we detect a change (using ExceptT), +-- but if there's no change we need to be able to rebuild the monitor +-- state. And we want to optimise that rebuilding by keeping track if +-- anything actually changed (using StateT), so that in the typical case +-- we can avoid rewriting the state file. + +newtype ChangedM a = ChangedM (StateT CacheChanged (ExceptT FilePath IO) a) + deriving (Functor, Applicative, Monad, MonadIO) + +runChangedM :: ChangedM a -> IO (Either FilePath (a, CacheChanged)) +runChangedM (ChangedM action) = + runExceptT $ State.runStateT action CacheUnchanged + +somethingChanged :: FilePath -> ChangedM a +somethingChanged path = ChangedM $ throwError path + +cacheChanged :: ChangedM () +cacheChanged = ChangedM $ State.put CacheChanged + +mapChangedFile :: (FilePath -> FilePath) -> ChangedM a -> ChangedM a +mapChangedFile adjust (ChangedM a) = + ChangedM (mapStateT (withExceptT adjust) a) + +data CacheChanged = CacheChanged | CacheUnchanged + +whenCacheChanged :: Monad m => CacheChanged -> m () -> m () +whenCacheChanged CacheChanged action = action +whenCacheChanged CacheUnchanged _ = return () + +---------------------- + +-- | Probe the file system to see if a single monitored file has changed. +-- +probeMonitorStateFileStatus :: FilePath -> FilePath + -> MonitorStateFileStatus + -> ChangedM () +probeMonitorStateFileStatus root file status = + case status of + MonitorStateFileExists -> + probeFileExistence root file + + MonitorStateFileModTime mtime -> + probeFileModificationTime root file mtime + + MonitorStateFileHashed mtime hash -> + probeFileModificationTimeAndHash root file mtime hash + + MonitorStateDirExists -> + probeDirExistence root file + + MonitorStateDirModTime mtime -> + probeFileModificationTime root file mtime + + MonitorStateNonExistent -> + probeFileNonExistence root file + + MonitorStateAlreadyChanged -> + somethingChanged file + + +-- | Probe the file system to see if a monitored file glob has changed. +-- +probeMonitorStateGlob :: FilePath -- ^ root path + -> MonitorStateGlob + -> ChangedM MonitorStateGlob +probeMonitorStateGlob relroot + (MonitorStateGlob kindfile kinddir globroot glob) = do + root <- liftIO $ getFilePathRootDirectory globroot relroot + case globroot of + FilePathRelative -> + MonitorStateGlob kindfile kinddir globroot <$> + probeMonitorStateGlobRel kindfile kinddir root "." glob + + -- for absolute cases, make the changed file we report absolute too + _ -> + mapChangedFile (root ) $ + MonitorStateGlob kindfile kinddir globroot <$> + probeMonitorStateGlobRel kindfile kinddir root "" glob + +probeMonitorStateGlobRel :: MonitorKindFile -> MonitorKindDir + -> FilePath -- ^ root path + -> FilePath -- ^ path of the directory we are + -- looking in relative to @root@ + -> MonitorStateGlobRel + -> ChangedM MonitorStateGlobRel +probeMonitorStateGlobRel kindfile kinddir root dirName + (MonitorStateGlobDirs glob globPath mtime children) = do + change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime + case change of + Nothing -> do + children' <- sequence + [ do fstate' <- probeMonitorStateGlobRel + kindfile kinddir root + (dirName fname) fstate + return (fname, fstate') + | (fname, fstate) <- children ] + return $! MonitorStateGlobDirs glob globPath mtime children' + + Just mtime' -> do + -- directory modification time changed: + -- a matching subdir may have been added or deleted + matches <- filterM (\entry -> let subdir = root dirName entry + in liftIO $ doesDirectoryExist subdir) + . filter (matchGlob glob) + =<< liftIO (getDirectoryContents (root dirName)) + + children' <- mapM probeMergeResult $ + mergeBy (\(path1,_) path2 -> compare path1 path2) + children + (sort matches) + return $! MonitorStateGlobDirs glob globPath mtime' children' + -- Note that just because the directory has changed, we don't force + -- a cache rewrite with 'cacheChanged' since that has some cost, and + -- all we're saving is scanning the directory. But we do rebuild the + -- cache with the new mtime', so that if the cache is rewritten for + -- some other reason, we'll take advantage of that. + + where + probeMergeResult :: MergeResult (FilePath, MonitorStateGlobRel) FilePath + -> ChangedM (FilePath, MonitorStateGlobRel) + + -- Only in cached (directory deleted) + probeMergeResult (OnlyInLeft (path, fstate)) = do + case allMatchingFiles (dirName path) fstate of + [] -> return (path, fstate) + -- Strictly speaking we should be returning 'CacheChanged' above + -- as we should prune the now-missing 'MonitorStateGlobRel'. However + -- we currently just leave these now-redundant entries in the + -- cache as they cost no IO and keeping them allows us to avoid + -- rewriting the cache. + (file:_) -> somethingChanged file + + -- Only in current filesystem state (directory added) + probeMergeResult (OnlyInRight path) = do + fstate <- liftIO $ buildMonitorStateGlobRel Nothing Map.empty + kindfile kinddir root (dirName path) globPath + case allMatchingFiles (dirName path) fstate of + (file:_) -> somethingChanged file + -- This is the only case where we use 'cacheChanged' because we can + -- have a whole new dir subtree (of unbounded size and cost), so we + -- need to save the state of that new subtree in the cache. + [] -> cacheChanged >> return (path, fstate) + + -- Found in path + probeMergeResult (InBoth (path, fstate) _) = do + fstate' <- probeMonitorStateGlobRel kindfile kinddir + root (dirName path) fstate + return (path, fstate') + + -- | Does a 'MonitorStateGlob' have any relevant files within it? + allMatchingFiles :: FilePath -> MonitorStateGlobRel -> [FilePath] + allMatchingFiles dir (MonitorStateGlobFiles _ _ entries) = + [ dir fname | (fname, _) <- entries ] + allMatchingFiles dir (MonitorStateGlobDirs _ _ _ entries) = + [ res + | (subdir, fstate) <- entries + , res <- allMatchingFiles (dir subdir) fstate ] + allMatchingFiles dir MonitorStateGlobDirTrailing = + [dir] + +probeMonitorStateGlobRel _ _ root dirName + (MonitorStateGlobFiles glob mtime children) = do + change <- liftIO $ checkDirectoryModificationTime (root dirName) mtime + mtime' <- case change of + Nothing -> return mtime + Just mtime' -> do + -- directory modification time changed: + -- a matching file may have been added or deleted + matches <- return . filter (matchGlob glob) + =<< liftIO (getDirectoryContents (root dirName)) + + mapM_ probeMergeResult $ + mergeBy (\(path1,_) path2 -> compare path1 path2) + children + (sort matches) + return mtime' + + -- Check that none of the children have changed + forM_ children $ \(file, status) -> + probeMonitorStateFileStatus root (dirName file) status + + + return (MonitorStateGlobFiles glob mtime' children) + -- Again, we don't force a cache rewite with 'cacheChanged', but we do use + -- the new mtime' if any. + where + probeMergeResult :: MergeResult (FilePath, MonitorStateFileStatus) FilePath + -> ChangedM () + probeMergeResult mr = case mr of + InBoth _ _ -> return () + -- this is just to be able to accurately report which file changed: + OnlyInLeft (path, _) -> somethingChanged (dirName path) + OnlyInRight path -> somethingChanged (dirName path) + +probeMonitorStateGlobRel _ _ _ _ MonitorStateGlobDirTrailing = + return MonitorStateGlobDirTrailing + +------------------------------------------------------------------------------ + +-- | Update the input value and the set of files monitored by the +-- 'FileMonitor', plus the cached value that may be returned in future. +-- +-- This takes a snapshot of the state of the monitored files right now, so +-- 'checkFileMonitorChanged' will look for file system changes relative to +-- this snapshot. +-- +-- This is typically done once the action has been completed successfully and +-- we have the action's result and we know what files it looked at. See +-- 'FileMonitor' for a full explanation. +-- +-- If we do take the snapshot after the action has completed then we have a +-- problem. The problem is that files might have changed /while/ the action was +-- running but /after/ the action read them. If we take the snapshot after the +-- action completes then we will miss these changes. The solution is to record +-- a timestamp before beginning execution of the action and then we make the +-- conservative assumption that any file that has changed since then has +-- already changed, ie the file monitor state for these files will be such that +-- 'checkFileMonitorChanged' will report that they have changed. +-- +-- So if you do use 'updateFileMonitor' after the action (so you can discover +-- the files used rather than predicting them in advance) then use +-- 'beginUpdateFileMonitor' to get a timestamp and pass that. Alternatively, +-- if you take the snapshot in advance of the action, or you're not monitoring +-- any files then you can use @Nothing@ for the timestamp parameter. +-- +updateFileMonitor + :: (Binary a, Binary b) + => FileMonitor a b -- ^ cache file path + -> FilePath -- ^ root directory + -> Maybe MonitorTimestamp -- ^ timestamp when the update action started + -> [MonitorFilePath] -- ^ files of interest relative to root + -> a -- ^ the current key value + -> b -- ^ the current result value + -> IO () +updateFileMonitor monitor root startTime monitorFiles + cachedKey cachedResult = do + hashcache <- readCacheFileHashes monitor + msfs <- buildMonitorStateFileSet startTime hashcache root monitorFiles + rewriteCacheFile monitor msfs cachedKey cachedResult + +-- | A timestamp to help with the problem of file changes during actions. +-- See 'updateFileMonitor' for details. +-- +newtype MonitorTimestamp = MonitorTimestamp ModTime + +-- | Record a timestamp at the beginning of an action, and when the action +-- completes call 'updateFileMonitor' passing it the timestamp. +-- See 'updateFileMonitor' for details. +-- +beginUpdateFileMonitor :: IO MonitorTimestamp +beginUpdateFileMonitor = MonitorTimestamp <$> getCurTime + +-- | Take the snapshot of the monitored files. That is, given the +-- specification of the set of files we need to monitor, inspect the state +-- of the file system now and collect the information we'll need later to +-- determine if anything has changed. +-- +buildMonitorStateFileSet :: Maybe MonitorTimestamp -- ^ optional: timestamp + -- of the start of the action + -> FileHashCache -- ^ existing file hashes + -> FilePath -- ^ root directory + -> [MonitorFilePath] -- ^ patterns of interest + -- relative to root + -> IO MonitorStateFileSet +buildMonitorStateFileSet mstartTime hashcache root = + go [] [] + where + go :: [MonitorStateFile] -> [MonitorStateGlob] + -> [MonitorFilePath] -> IO MonitorStateFileSet + go !singlePaths !globPaths [] = + return (MonitorStateFileSet (reverse singlePaths) (reverse globPaths)) + + go !singlePaths !globPaths + (MonitorFile kindfile kinddir path : monitors) = do + monitorState <- MonitorStateFile kindfile kinddir path + <$> buildMonitorStateFile mstartTime hashcache + kindfile kinddir root path + go (monitorState : singlePaths) globPaths monitors + + go !singlePaths !globPaths + (MonitorFileGlob kindfile kinddir globPath : monitors) = do + monitorState <- buildMonitorStateGlob mstartTime hashcache + kindfile kinddir root globPath + go singlePaths (monitorState : globPaths) monitors + + +buildMonitorStateFile :: Maybe MonitorTimestamp -- ^ start time of update + -> FileHashCache -- ^ existing file hashes + -> MonitorKindFile -> MonitorKindDir + -> FilePath -- ^ the root directory + -> FilePath + -> IO MonitorStateFileStatus +buildMonitorStateFile mstartTime hashcache kindfile kinddir root path = do + let abspath = root path + isFile <- doesFileExist abspath + isDir <- doesDirectoryExist abspath + case (isFile, kindfile, isDir, kinddir) of + (_, FileNotExists, _, DirNotExists) -> + -- we don't need to care if it exists now, since we check at probe time + return MonitorStateNonExistent + + (False, _, False, _) -> + return MonitorStateAlreadyChanged + + (True, FileExists, _, _) -> + return MonitorStateFileExists + + (True, FileModTime, _, _) -> + handleIOException MonitorStateAlreadyChanged $ do + mtime <- getModTime abspath + if changedDuringUpdate mstartTime mtime + then return MonitorStateAlreadyChanged + else return (MonitorStateFileModTime mtime) + + (True, FileHashed, _, _) -> + handleIOException MonitorStateAlreadyChanged $ do + mtime <- getModTime abspath + if changedDuringUpdate mstartTime mtime + then return MonitorStateAlreadyChanged + else do hash <- getFileHash hashcache abspath abspath mtime + return (MonitorStateFileHashed mtime hash) + + (_, _, True, DirExists) -> + return MonitorStateDirExists + + (_, _, True, DirModTime) -> + handleIOException MonitorStateAlreadyChanged $ do + mtime <- getModTime abspath + if changedDuringUpdate mstartTime mtime + then return MonitorStateAlreadyChanged + else return (MonitorStateDirModTime mtime) + + (False, _, True, DirNotExists) -> return MonitorStateAlreadyChanged + (True, FileNotExists, False, _) -> return MonitorStateAlreadyChanged + +-- | If we have a timestamp for the beginning of the update, then any file +-- mtime later than this means that it changed during the update and we ought +-- to consider the file as already changed. +-- +changedDuringUpdate :: Maybe MonitorTimestamp -> ModTime -> Bool +changedDuringUpdate (Just (MonitorTimestamp startTime)) mtime + = mtime > startTime +changedDuringUpdate _ _ = False + +-- | Much like 'buildMonitorStateFileSet' but for the somewhat complicated case +-- of a file glob. +-- +-- This gets used both by 'buildMonitorStateFileSet' when we're taking the +-- file system snapshot, but also by 'probeGlobStatus' as part of checking +-- the monitored (globed) files for changes when we find a whole new subtree. +-- +buildMonitorStateGlob :: Maybe MonitorTimestamp -- ^ start time of update + -> FileHashCache -- ^ existing file hashes + -> MonitorKindFile -> MonitorKindDir + -> FilePath -- ^ the root directory + -> FilePathGlob -- ^ the matching glob + -> IO MonitorStateGlob +buildMonitorStateGlob mstartTime hashcache kindfile kinddir relroot + (FilePathGlob globroot globPath) = do + root <- liftIO $ getFilePathRootDirectory globroot relroot + MonitorStateGlob kindfile kinddir globroot <$> + buildMonitorStateGlobRel + mstartTime hashcache kindfile kinddir root "." globPath + +buildMonitorStateGlobRel :: Maybe MonitorTimestamp -- ^ start time of update + -> FileHashCache -- ^ existing file hashes + -> MonitorKindFile -> MonitorKindDir + -> FilePath -- ^ the root directory + -> FilePath -- ^ directory we are examining + -- relative to the root + -> FilePathGlobRel -- ^ the matching glob + -> IO MonitorStateGlobRel +buildMonitorStateGlobRel mstartTime hashcache kindfile kinddir root + dir globPath = do + let absdir = root dir + dirEntries <- getDirectoryContents absdir + dirMTime <- getModTime absdir + case globPath of + GlobDir glob globPath' -> do + subdirs <- filterM (\subdir -> doesDirectoryExist (absdir subdir)) + $ filter (matchGlob glob) dirEntries + subdirStates <- + forM (sort subdirs) $ \subdir -> do + fstate <- buildMonitorStateGlobRel + mstartTime hashcache kindfile kinddir root + (dir subdir) globPath' + return (subdir, fstate) + return $! MonitorStateGlobDirs glob globPath' dirMTime subdirStates + + GlobFile glob -> do + let files = filter (matchGlob glob) dirEntries + filesStates <- + forM (sort files) $ \file -> do + fstate <- buildMonitorStateFile + mstartTime hashcache kindfile kinddir root + (dir file) + return (file, fstate) + return $! MonitorStateGlobFiles glob dirMTime filesStates + + GlobDirTrailing -> + return MonitorStateGlobDirTrailing + + +-- | We really want to avoid re-hashing files all the time. We already make +-- the assumption that if a file mtime has not changed then we don't need to +-- bother checking if the content hash has changed. We can apply the same +-- assumption when updating the file monitor state. In the typical case of +-- updating a file monitor the set of files is the same or largely the same so +-- we can grab the previously known content hashes with their corresponding +-- mtimes. +-- +type FileHashCache = Map FilePath (ModTime, Hash) + +-- | We declare it a cache hit if the mtime of a file is the same as before. +-- +lookupFileHashCache :: FileHashCache -> FilePath -> ModTime -> Maybe Hash +lookupFileHashCache hashcache file mtime = do + (mtime', hash) <- Map.lookup file hashcache + guard (mtime' == mtime) + return hash + +-- | Either get it from the cache or go read the file +getFileHash :: FileHashCache -> FilePath -> FilePath -> ModTime -> IO Hash +getFileHash hashcache relfile absfile mtime = + case lookupFileHashCache hashcache relfile mtime of + Just hash -> return hash + Nothing -> readFileHash absfile + +-- | Build a 'FileHashCache' from the previous 'MonitorStateFileSet'. While +-- in principle we could preserve the structure of the previous state, given +-- that the set of files to monitor can change then it's simpler just to throw +-- away the structure and use a finite map. +-- +readCacheFileHashes :: (Binary a, Binary b) + => FileMonitor a b -> IO FileHashCache +readCacheFileHashes monitor = + handleDoesNotExist Map.empty $ + handleErrorCall Map.empty $ do + res <- readCacheFile monitor + case res of + Left _ -> return Map.empty + Right (msfs, _, _) -> return (mkFileHashCache msfs) + where + mkFileHashCache :: MonitorStateFileSet -> FileHashCache + mkFileHashCache (MonitorStateFileSet singlePaths globPaths) = + collectAllFileHashes singlePaths + `Map.union` collectAllGlobHashes globPaths + + collectAllFileHashes singlePaths = + Map.fromList [ (fpath, (mtime, hash)) + | MonitorStateFile _ _ fpath + (MonitorStateFileHashed mtime hash) <- singlePaths ] + + collectAllGlobHashes globPaths = + Map.fromList [ (fpath, (mtime, hash)) + | MonitorStateGlob _ _ _ gstate <- globPaths + , (fpath, (mtime, hash)) <- collectGlobHashes "" gstate ] + + collectGlobHashes dir (MonitorStateGlobDirs _ _ _ entries) = + [ res + | (subdir, fstate) <- entries + , res <- collectGlobHashes (dir subdir) fstate ] + + collectGlobHashes dir (MonitorStateGlobFiles _ _ entries) = + [ (dir fname, (mtime, hash)) + | (fname, MonitorStateFileHashed mtime hash) <- entries ] + + collectGlobHashes _dir MonitorStateGlobDirTrailing = + [] + + +------------------------------------------------------------------------------ +-- Utils +-- + +-- | Within the @root@ directory, check if @file@ has its 'ModTime' is +-- the same as @mtime@, short-circuiting if it is different. +probeFileModificationTime :: FilePath -> FilePath -> ModTime -> ChangedM () +probeFileModificationTime root file mtime = do + unchanged <- liftIO $ checkModificationTimeUnchanged root file mtime + unless unchanged (somethingChanged file) + +-- | Within the @root@ directory, check if @file@ has its 'ModTime' and +-- 'Hash' is the same as @mtime@ and @hash@, short-circuiting if it is +-- different. +probeFileModificationTimeAndHash :: FilePath -> FilePath -> ModTime -> Hash + -> ChangedM () +probeFileModificationTimeAndHash root file mtime hash = do + unchanged <- liftIO $ + checkFileModificationTimeAndHashUnchanged root file mtime hash + unless unchanged (somethingChanged file) + +-- | Within the @root@ directory, check if @file@ still exists as a file. +-- If it *does not* exist, short-circuit. +probeFileExistence :: FilePath -> FilePath -> ChangedM () +probeFileExistence root file = do + existsFile <- liftIO $ doesFileExist (root file) + unless existsFile (somethingChanged file) + +-- | Within the @root@ directory, check if @dir@ still exists. +-- If it *does not* exist, short-circuit. +probeDirExistence :: FilePath -> FilePath -> ChangedM () +probeDirExistence root dir = do + existsDir <- liftIO $ doesDirectoryExist (root dir) + unless existsDir (somethingChanged dir) + +-- | Within the @root@ directory, check if @file@ still does not exist. +-- If it *does* exist, short-circuit. +probeFileNonExistence :: FilePath -> FilePath -> ChangedM () +probeFileNonExistence root file = do + existsFile <- liftIO $ doesFileExist (root file) + existsDir <- liftIO $ doesDirectoryExist (root file) + when (existsFile || existsDir) (somethingChanged file) + +-- | Returns @True@ if, inside the @root@ directory, @file@ has the same +-- 'ModTime' as @mtime@. +checkModificationTimeUnchanged :: FilePath -> FilePath + -> ModTime -> IO Bool +checkModificationTimeUnchanged root file mtime = + handleIOException False $ do + mtime' <- getModTime (root file) + return (mtime == mtime') + +-- | Returns @True@ if, inside the @root@ directory, @file@ has the +-- same 'ModTime' and 'Hash' as @mtime and @chash@. +checkFileModificationTimeAndHashUnchanged :: FilePath -> FilePath + -> ModTime -> Hash -> IO Bool +checkFileModificationTimeAndHashUnchanged root file mtime chash = + handleIOException False $ do + mtime' <- getModTime (root file) + if mtime == mtime' + then return True + else do + chash' <- readFileHash (root file) + return (chash == chash') + +-- | Read a non-cryptographic hash of a @file@. +readFileHash :: FilePath -> IO Hash +readFileHash file = + withBinaryFile file ReadMode $ \hnd -> + evaluate . Hashable.hash =<< BS.hGetContents hnd + +-- | Given a directory @dir@, return @Nothing@ if its 'ModTime' +-- is the same as @mtime@, and the new 'ModTime' if it is not. +checkDirectoryModificationTime :: FilePath -> ModTime -> IO (Maybe ModTime) +checkDirectoryModificationTime dir mtime = + handleIOException Nothing $ do + mtime' <- getModTime dir + if mtime == mtime' + then return Nothing + else return (Just mtime') + +-- | Run an IO computation, returning @e@ if there is an 'error' +-- call. ('ErrorCall') +handleErrorCall :: a -> IO a -> IO a +handleErrorCall e = + handle (\(ErrorCall _) -> return e) + +-- | Run an IO computation, returning @e@ if there is any 'IOException'. +-- +-- This policy is OK in the file monitor code because it just causes the +-- monitor to report that something changed, and then code reacting to that +-- will normally encounter the same IO exception when it re-runs the action +-- that uses the file. +-- +handleIOException :: a -> IO a -> IO a +handleIOException e = + handle (anyIOException e) + where + anyIOException :: a -> IOException -> IO a + anyIOException x _ = return x + + +------------------------------------------------------------------------------ +-- Instances +-- + +instance Binary MonitorStateFileSet where + put (MonitorStateFileSet singlePaths globPaths) = do + put (1 :: Int) -- version + put singlePaths + put globPaths + get = do + ver <- get + if ver == (1 :: Int) + then do singlePaths <- get + globPaths <- get + return $! MonitorStateFileSet singlePaths globPaths + else fail "MonitorStateFileSet: wrong version" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Freeze.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Freeze.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Freeze.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Freeze.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,269 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Freeze +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- The cabal freeze command +----------------------------------------------------------------------------- +module Distribution.Client.Freeze ( + freeze, getFreezePkgs + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.Config ( SavedConfig(..) ) +import Distribution.Client.Types +import Distribution.Client.Targets +import Distribution.Client.Dependency +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import Distribution.Client.SolverInstallPlan + ( SolverInstallPlan, SolverPlanPackage ) +import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan +import Distribution.Client.Setup + ( GlobalFlags(..), FreezeFlags(..), ConfigExFlags(..) + , RepoContext(..) ) +import Distribution.Client.Sandbox.PackageEnvironment + ( loadUserConfig, pkgEnvSavedConfig, showPackageEnvironment, + userPackageEnvironmentFile ) +import Distribution.Client.Sandbox.Types + ( SandboxPackageInfo(..) ) + +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PkgConfigDb +import Distribution.Solver.Types.SolverId + +import Distribution.Package + ( Package, packageId, packageName, packageVersion ) +import Distribution.Simple.Compiler + ( Compiler, compilerInfo, PackageDBStack ) +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.Simple.Program + ( ProgramDb ) +import Distribution.Simple.Setup + ( fromFlag, fromFlagOrDefault, flagToMaybe ) +import Distribution.Simple.Utils + ( die', notice, debug, writeFileAtomic ) +import Distribution.System + ( Platform ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity ) + +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 +import Distribution.Version + ( thisVersion ) + +-- ------------------------------------------------------------ +-- * The freeze command +-- ------------------------------------------------------------ + +-- | Freeze all of the dependencies by writing a constraints section +-- constraining each dependency to an exact version. +-- +freeze :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramDb + -> Maybe SandboxPackageInfo + -> GlobalFlags + -> FreezeFlags + -> IO () +freeze verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo + globalFlags freezeFlags = do + + pkgs <- getFreezePkgs + verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo + globalFlags freezeFlags + + if null pkgs + then notice verbosity $ "No packages to be frozen. " + ++ "As this package has no dependencies." + else if dryRun + then notice verbosity $ unlines $ + "The following packages would be frozen:" + : formatPkgs pkgs + + else freezePackages verbosity globalFlags pkgs + + where + dryRun = fromFlag (freezeDryRun freezeFlags) + +-- | Get the list of packages whose versions would be frozen by the @freeze@ +-- command. +getFreezePkgs :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramDb + -> Maybe SandboxPackageInfo + -> GlobalFlags + -> FreezeFlags + -> IO [SolverPlanPackage] +getFreezePkgs verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo + globalFlags freezeFlags = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb + sourcePkgDb <- getSourcePackages verbosity repoCtxt + pkgConfigDb <- readPkgConfigDb verbosity progdb + + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + (packageIndex sourcePkgDb) + [UserTargetLocalDir "."] + + sanityCheck pkgSpecifiers + planPackages + verbosity comp platform mSandboxPkgInfo freezeFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers + where + sanityCheck pkgSpecifiers = do + when (not . null $ [n | n@(NamedPackage _ _) <- pkgSpecifiers]) $ + die' verbosity $ "internal error: 'resolveUserTargets' returned " + ++ "unexpected named package specifiers!" + when (length pkgSpecifiers /= 1) $ + die' verbosity $ "internal error: 'resolveUserTargets' returned " + ++ "unexpected source package specifiers!" + +planPackages :: Verbosity + -> Compiler + -> Platform + -> Maybe SandboxPackageInfo + -> FreezeFlags + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> [PackageSpecifier UnresolvedSourcePackage] + -> IO [SolverPlanPackage] +planPackages verbosity comp platform mSandboxPkgInfo freezeFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = do + + solver <- chooseSolver verbosity + (fromFlag (freezeSolver freezeFlags)) (compilerInfo comp) + notice verbosity "Resolving dependencies..." + + installPlan <- foldProgress logMsg (die' verbosity) return $ + resolveDependencies + platform (compilerInfo comp) pkgConfigDb + solver + resolverParams + + return $ pruneInstallPlan installPlan pkgSpecifiers + + where + resolverParams = + + setMaxBackjumps (if maxBackjumps < 0 then Nothing + else Just maxBackjumps) + + . setIndependentGoals independentGoals + + . setReorderGoals reorderGoals + + . setCountConflicts countConflicts + + . setShadowPkgs shadowPkgs + + . setStrongFlags strongFlags + + . setAllowBootLibInstalls allowBootLibInstalls + + . setSolverVerbosity verbosity + + . addConstraints + [ let pkg = pkgSpecifierTarget pkgSpecifier + pc = PackageConstraint (scopeToplevel pkg) + (PackagePropertyStanzas stanzas) + in LabeledPackageConstraint pc ConstraintSourceFreeze + | pkgSpecifier <- pkgSpecifiers ] + + . maybe id applySandboxInstallPolicy mSandboxPkgInfo + + $ standardInstallPolicy installedPkgIndex sourcePkgDb pkgSpecifiers + + logMsg message rest = debug verbosity message >> rest + + stanzas = [ TestStanzas | testsEnabled ] + ++ [ BenchStanzas | benchmarksEnabled ] + testsEnabled = fromFlagOrDefault False $ freezeTests freezeFlags + benchmarksEnabled = fromFlagOrDefault False $ freezeBenchmarks freezeFlags + + reorderGoals = fromFlag (freezeReorderGoals freezeFlags) + countConflicts = fromFlag (freezeCountConflicts freezeFlags) + independentGoals = fromFlag (freezeIndependentGoals freezeFlags) + shadowPkgs = fromFlag (freezeShadowPkgs freezeFlags) + strongFlags = fromFlag (freezeStrongFlags freezeFlags) + maxBackjumps = fromFlag (freezeMaxBackjumps freezeFlags) + allowBootLibInstalls = fromFlag (freezeAllowBootLibInstalls freezeFlags) + + +-- | Remove all unneeded packages from an install plan. +-- +-- A package is unneeded if it is either +-- +-- 1) the package that we are freezing, or +-- +-- 2) not a dependency (directly or transitively) of the package we are +-- freezing. This is useful for removing previously installed packages +-- which are no longer required from the install plan. +-- +-- Invariant: @pkgSpecifiers@ must refer to packages which are not +-- 'PreExisting' in the 'SolverInstallPlan'. +pruneInstallPlan :: SolverInstallPlan + -> [PackageSpecifier UnresolvedSourcePackage] + -> [SolverPlanPackage] +pruneInstallPlan installPlan pkgSpecifiers = + removeSelf pkgIds $ + SolverInstallPlan.dependencyClosure installPlan pkgIds + where + pkgIds = [ PlannedId (packageId pkg) + | SpecificSourcePackage pkg <- pkgSpecifiers ] + removeSelf [thisPkg] = filter (\pp -> packageId pp /= packageId thisPkg) + removeSelf _ = error $ "internal error: 'pruneInstallPlan' given " + ++ "unexpected package specifiers!" + + +freezePackages :: Package pkg => Verbosity -> GlobalFlags -> [pkg] -> IO () +freezePackages verbosity globalFlags pkgs = do + + pkgEnv <- fmap (createPkgEnv . addFrozenConstraints) $ + loadUserConfig verbosity "" + (flagToMaybe . globalConstraintsFile $ globalFlags) + writeFileAtomic userPackageEnvironmentFile $ showPkgEnv pkgEnv + where + addFrozenConstraints config = + config { + savedConfigureExFlags = (savedConfigureExFlags config) { + configExConstraints = map constraint pkgs + } + } + constraint pkg = + (pkgIdToConstraint $ packageId pkg + ,ConstraintSourceUserConfig userPackageEnvironmentFile) + where + pkgIdToConstraint pkgId = + UserConstraint (UserQualified UserQualToplevel (packageName pkgId)) + (PackagePropertyVersion $ thisVersion (packageVersion pkgId)) + createPkgEnv config = mempty { pkgEnvSavedConfig = config } + showPkgEnv = BS.Char8.pack . showPackageEnvironment + + +formatPkgs :: Package pkg => [pkg] -> [String] +formatPkgs = map $ showPkg . packageId + where + showPkg pid = name pid ++ " == " ++ version pid + name = display . packageName + version = display . packageVersion diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/GenBounds.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/GenBounds.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/GenBounds.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/GenBounds.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,169 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.GenBounds +-- Copyright : (c) Doug Beardsley 2015 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- The cabal gen-bounds command for generating PVP-compliant version bounds. +----------------------------------------------------------------------------- +module Distribution.Client.GenBounds ( + genBounds + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.Init + ( incVersion ) +import Distribution.Client.Freeze + ( getFreezePkgs ) +import Distribution.Client.Sandbox.Types + ( SandboxPackageInfo(..) ) +import Distribution.Client.Setup + ( GlobalFlags(..), FreezeFlags(..), RepoContext ) +import Distribution.Package + ( Package(..), unPackageName, packageName, packageVersion ) +import Distribution.PackageDescription + ( enabledBuildDepends ) +import Distribution.PackageDescription.Configuration + ( finalizePD ) +import Distribution.PackageDescription.Parsec + ( readGenericPackageDescription ) +import Distribution.Types.ComponentRequestedSpec + ( defaultComponentRequestedSpec ) +import Distribution.Types.Dependency +import Distribution.Simple.Compiler + ( Compiler, PackageDBStack, compilerInfo ) +import Distribution.Simple.Program + ( ProgramDb ) +import Distribution.Simple.Utils + ( tryFindPackageDesc ) +import Distribution.System + ( Platform ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Version + ( Version, alterVersion + , LowerBound(..), UpperBound(..), VersionRange(..), asVersionIntervals + , orLaterVersion, earlierVersion, intersectVersionRanges ) +import System.Directory + ( getCurrentDirectory ) + +-- | Does this version range have an upper bound? +hasUpperBound :: VersionRange -> Bool +hasUpperBound vr = + case asVersionIntervals vr of + [] -> False + is -> if snd (last is) == NoUpperBound then False else True + +-- | Given a version, return an API-compatible (according to PVP) version range. +-- +-- Example: @0.4.1.2@ produces the version range @>= 0.4.1 && < 0.5@. +-- +-- This version is slightly different than the one in +-- 'Distribution.Client.Init'. This one uses a.b.c as the lower bound because +-- the user could be using a new function introduced in a.b.c which would make +-- ">= a.b" incorrect. +pvpize :: Version -> VersionRange +pvpize v = orLaterVersion (vn 3) + `intersectVersionRanges` + earlierVersion (incVersion 1 (vn 2)) + where + vn n = alterVersion (take n) v + +-- | Show the PVP-mandated version range for this package. The @padTo@ parameter +-- specifies the width of the package name column. +showBounds :: Package pkg => Int -> pkg -> String +showBounds padTo p = unwords $ + (padAfter padTo $ unPackageName $ packageName p) : + map showInterval (asVersionIntervals $ pvpize $ packageVersion p) + where + padAfter :: Int -> String -> String + padAfter n str = str ++ replicate (n - length str) ' ' + + showInterval :: (LowerBound, UpperBound) -> String + showInterval (LowerBound _ _, NoUpperBound) = + error "Error: expected upper bound...this should never happen!" + showInterval (LowerBound l _, UpperBound u _) = + unwords [">=", display l, "&& <", display u] + +-- | Entry point for the @gen-bounds@ command. +genBounds + :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramDb + -> Maybe SandboxPackageInfo + -> GlobalFlags + -> FreezeFlags + -> IO () +genBounds verbosity packageDBs repoCtxt comp platform progdb mSandboxPkgInfo + globalFlags freezeFlags = do + + let cinfo = compilerInfo comp + + cwd <- getCurrentDirectory + path <- tryFindPackageDesc cwd + gpd <- readGenericPackageDescription verbosity path + -- NB: We don't enable tests or benchmarks, since often they + -- don't really have useful bounds. + let epd = finalizePD mempty defaultComponentRequestedSpec + (const True) platform cinfo [] gpd + case epd of + Left _ -> putStrLn "finalizePD failed" + Right (pd,_) -> do + let needBounds = filter (not . hasUpperBound . depVersion) $ + enabledBuildDepends pd defaultComponentRequestedSpec + + if (null needBounds) + then putStrLn + "Congratulations, all your dependencies have upper bounds!" + else go needBounds + where + go needBounds = do + pkgs <- getFreezePkgs + verbosity packageDBs repoCtxt comp platform progdb + mSandboxPkgInfo globalFlags freezeFlags + + putStrLn boundsNeededMsg + + let isNeeded pkg = unPackageName (packageName pkg) + `elem` map depName needBounds + let thePkgs = filter isNeeded pkgs + + let padTo = maximum $ map (length . unPackageName . packageName) pkgs + traverse_ (putStrLn . (++",") . showBounds padTo) thePkgs + + depName :: Dependency -> String + depName (Dependency pn _) = unPackageName pn + + depVersion :: Dependency -> VersionRange + depVersion (Dependency _ vr) = vr + +-- | The message printed when some dependencies are found to be lacking proper +-- PVP-mandated bounds. +boundsNeededMsg :: String +boundsNeededMsg = unlines + [ "" + , "The following packages need bounds and here is a suggested starting point." + , "You can copy and paste this into the build-depends section in your .cabal" + , "file and it should work (with the appropriate removal of commas)." + , "" + , "Note that version bounds are a statement that you've successfully built and" + , "tested your package and expect it to work with any of the specified package" + , "versions (PROVIDED that those packages continue to conform with the PVP)." + , "Therefore, the version bounds generated here are the most conservative" + , "based on the versions that you are currently building with. If you know" + , "your package will work with versions outside the ranges generated here," + , "feel free to widen them." + , "" + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Get.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Get.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Get.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Get.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,299 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Get +-- Copyright : (c) Andrea Vezzosi 2008 +-- Duncan Coutts 2011 +-- John Millikin 2012 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- The 'cabal get' command. +----------------------------------------------------------------------------- + +module Distribution.Client.Get ( + get, + + -- * Cloning 'SourceRepo's + -- | Mainly exported for testing purposes + clonePackagesFromSourceRepo, + ClonePackageException(..), + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude hiding (get) + +import Distribution.Package + ( PackageId, packageId, packageName ) +import Distribution.Simple.Setup + ( Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe ) +import Distribution.Simple.Utils + ( notice, die', info, writeFileAtomic ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Text (display) +import qualified Distribution.PackageDescription as PD +import Distribution.Simple.Program + ( programName ) + +import Distribution.Client.Setup + ( GlobalFlags(..), GetFlags(..), RepoContext(..) ) +import Distribution.Client.Types +import Distribution.Client.Targets +import Distribution.Client.Dependency +import Distribution.Client.VCS +import Distribution.Client.FetchUtils +import qualified Distribution.Client.Tar as Tar (extractTarGzFile) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackagesAtIndexState ) +import Distribution.Solver.Types.SourcePackage + +import Control.Exception + ( Exception(..), catch, throwIO ) +import Control.Monad + ( mapM, forM_, mapM_ ) +import qualified Data.Map as Map +import System.Directory + ( createDirectoryIfMissing, doesDirectoryExist, doesFileExist ) +import System.Exit + ( ExitCode(..) ) +import System.FilePath + ( (), (<.>), addTrailingPathSeparator ) + + +-- | Entry point for the 'cabal get' command. +get :: Verbosity + -> RepoContext + -> GlobalFlags + -> GetFlags + -> [UserTarget] + -> IO () +get verbosity _ _ _ [] = + notice verbosity "No packages requested. Nothing to do." + +get verbosity repoCtxt globalFlags getFlags userTargets = do + let useSourceRepo = case getSourceRepository getFlags of + NoFlag -> False + _ -> True + + unless useSourceRepo $ + mapM_ (checkTarget verbosity) userTargets + + let idxState = flagToMaybe $ getIndexState getFlags + + sourcePkgDb <- getSourcePackagesAtIndexState verbosity repoCtxt idxState + + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + (packageIndex sourcePkgDb) + userTargets + + pkgs <- either (die' verbosity . unlines . map show) return $ + resolveWithoutDependencies + (resolverParams sourcePkgDb pkgSpecifiers) + + unless (null prefix) $ + createDirectoryIfMissing True prefix + + if useSourceRepo + then clone pkgs + else unpack pkgs + + where + resolverParams sourcePkgDb pkgSpecifiers = + --TODO: add command-line constraint and preference args for unpack + standardInstallPolicy mempty sourcePkgDb pkgSpecifiers + + prefix = fromFlagOrDefault "" (getDestDir getFlags) + + clone :: [UnresolvedSourcePackage] -> IO () + clone = clonePackagesFromSourceRepo verbosity prefix kind + . map (\pkg -> (packageId pkg, packageSourceRepos pkg)) + where + kind = fromFlag . getSourceRepository $ getFlags + packageSourceRepos :: SourcePackage loc -> [SourceRepo] + packageSourceRepos = PD.sourceRepos + . PD.packageDescription + . packageDescription + + unpack :: [UnresolvedSourcePackage] -> IO () + unpack pkgs = do + forM_ pkgs $ \pkg -> do + location <- fetchPackage verbosity repoCtxt (packageSource pkg) + let pkgid = packageId pkg + descOverride | usePristine = Nothing + | otherwise = packageDescrOverride pkg + case location of + LocalTarballPackage tarballPath -> + unpackPackage verbosity prefix pkgid descOverride tarballPath + + RemoteTarballPackage _tarballURL tarballPath -> + unpackPackage verbosity prefix pkgid descOverride tarballPath + + RepoTarballPackage _repo _pkgid tarballPath -> + unpackPackage verbosity prefix pkgid descOverride tarballPath + + RemoteSourceRepoPackage _repo _ -> + die' verbosity $ "The 'get' command does no yet support targets " + ++ "that are remote source repositories." + + LocalUnpackedPackage _ -> + error "Distribution.Client.Get.unpack: the impossible happened." + where + usePristine = fromFlagOrDefault False (getPristine getFlags) + +checkTarget :: Verbosity -> UserTarget -> IO () +checkTarget verbosity target = case target of + UserTargetLocalDir dir -> die' verbosity (notTarball dir) + UserTargetLocalCabalFile file -> die' verbosity (notTarball file) + _ -> return () + where + notTarball t = + "The 'get' command is for tarball packages. " + ++ "The target '" ++ t ++ "' is not a tarball." + +-- ------------------------------------------------------------ +-- * Unpacking the source tarball +-- ------------------------------------------------------------ + +unpackPackage :: Verbosity -> FilePath -> PackageId + -> PackageDescriptionOverride + -> FilePath -> IO () +unpackPackage verbosity prefix pkgid descOverride pkgPath = do + let pkgdirname = display pkgid + pkgdir = prefix pkgdirname + pkgdir' = addTrailingPathSeparator pkgdir + existsDir <- doesDirectoryExist pkgdir + when existsDir $ die' verbosity $ + "The directory \"" ++ pkgdir' ++ "\" already exists, not unpacking." + existsFile <- doesFileExist pkgdir + when existsFile $ die' verbosity $ + "A file \"" ++ pkgdir ++ "\" is in the way, not unpacking." + notice verbosity $ "Unpacking to " ++ pkgdir' + Tar.extractTarGzFile prefix pkgdirname pkgPath + + case descOverride of + Nothing -> return () + Just pkgtxt -> do + let descFilePath = pkgdir display (packageName pkgid) <.> "cabal" + info verbosity $ + "Updating " ++ descFilePath + ++ " with the latest revision from the index." + writeFileAtomic descFilePath pkgtxt + + +-- ------------------------------------------------------------ +-- * Cloning packages from their declared source repositories +-- ------------------------------------------------------------ + + +data ClonePackageException = + ClonePackageNoSourceRepos PackageId + | ClonePackageNoSourceReposOfKind PackageId (Maybe RepoKind) + | ClonePackageNoRepoType PackageId SourceRepo + | ClonePackageUnsupportedRepoType PackageId SourceRepo RepoType + | ClonePackageNoRepoLocation PackageId SourceRepo + | ClonePackageDestinationExists PackageId FilePath Bool + | ClonePackageFailedWithExitCode PackageId SourceRepo String ExitCode + deriving (Show, Eq) + +instance Exception ClonePackageException where + displayException (ClonePackageNoSourceRepos pkgid) = + "Cannot fetch a source repository for package " ++ display pkgid + ++ ". The package does not specify any source repositories." + + displayException (ClonePackageNoSourceReposOfKind pkgid repoKind) = + "Cannot fetch a source repository for package " ++ display pkgid + ++ ". The package does not specify a source repository of the requested " + ++ "kind" ++ maybe "." (\k -> " (kind " ++ display k ++ ").") repoKind + + displayException (ClonePackageNoRepoType pkgid _repo) = + "Cannot fetch the source repository for package " ++ display pkgid + ++ ". The package's description specifies a source repository but does " + ++ "not specify the repository 'type' field (e.g. git, darcs or hg)." + + displayException (ClonePackageUnsupportedRepoType pkgid _ repoType) = + "Cannot fetch the source repository for package " ++ display pkgid + ++ ". The repository type '" ++ display repoType + ++ "' is not yet supported." + + displayException (ClonePackageNoRepoLocation pkgid _repo) = + "Cannot fetch the source repository for package " ++ display pkgid + ++ ". The package's description specifies a source repository but does " + ++ "not specify the repository 'location' field (i.e. the URL)." + + displayException (ClonePackageDestinationExists pkgid dest isdir) = + "Not fetching the source repository for package " ++ display pkgid ++ ". " + ++ if isdir then "The destination directory " ++ dest ++ " already exists." + else "A file " ++ dest ++ " is in the way." + + displayException (ClonePackageFailedWithExitCode + pkgid repo vcsprogname exitcode) = + "Failed to fetch the source repository for package " ++ display pkgid + ++ maybe "" (", repository location " ++) (PD.repoLocation repo) ++ " (" + ++ vcsprogname ++ " failed with " ++ show exitcode ++ ")." + + +-- | Given a bunch of package ids and their corresponding available +-- 'SourceRepo's, pick a single 'SourceRepo' for each one and clone into +-- new subdirs of the given directory. +-- +clonePackagesFromSourceRepo :: Verbosity + -> FilePath -- ^ destination dir prefix + -> Maybe RepoKind -- ^ preferred 'RepoKind' + -> [(PackageId, [SourceRepo])] + -- ^ the packages and their + -- available 'SourceRepo's + -> IO () +clonePackagesFromSourceRepo verbosity destDirPrefix + preferredRepoKind pkgrepos = do + + -- Do a bunch of checks and collect the required info + pkgrepos' <- mapM preCloneChecks pkgrepos + + -- Configure the VCS drivers for all the repository types we may need + vcss <- configureVCSs verbosity $ + Map.fromList [ (vcsRepoType vcs, vcs) + | (_, _, vcs, _) <- pkgrepos' ] + + -- Now execute all the required commands for each repo + sequence_ + [ cloneSourceRepo verbosity vcs' repo destDir + `catch` \exitcode -> + throwIO (ClonePackageFailedWithExitCode + pkgid repo (programName (vcsProgram vcs)) exitcode) + | (pkgid, repo, vcs, destDir) <- pkgrepos' + , let Just vcs' = Map.lookup (vcsRepoType vcs) vcss + ] + + where + preCloneChecks :: (PackageId, [SourceRepo]) + -> IO (PackageId, SourceRepo, VCS Program, FilePath) + preCloneChecks (pkgid, repos) = do + repo <- case selectPackageSourceRepo preferredRepoKind repos of + Just repo -> return repo + Nothing | null repos -> throwIO (ClonePackageNoSourceRepos pkgid) + Nothing -> throwIO (ClonePackageNoSourceReposOfKind + pkgid preferredRepoKind) + + vcs <- case validateSourceRepo repo of + Right (_, _, _, vcs) -> return vcs + Left SourceRepoRepoTypeUnspecified -> + throwIO (ClonePackageNoRepoType pkgid repo) + + Left (SourceRepoRepoTypeUnsupported repoType) -> + throwIO (ClonePackageUnsupportedRepoType pkgid repo repoType) + + Left SourceRepoLocationUnspecified -> + throwIO (ClonePackageNoRepoLocation pkgid repo) + + let destDir = destDirPrefix display (packageName pkgid) + destDirExists <- doesDirectoryExist destDir + destFileExists <- doesFileExist destDir + when (destDirExists || destFileExists) $ + throwIO (ClonePackageDestinationExists pkgid destDir destDirExists) + + return (pkgid, repo, vcs, destDir) + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/GlobalFlags.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/GlobalFlags.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/GlobalFlags.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/GlobalFlags.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,283 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE RecordWildCards #-} + +module Distribution.Client.GlobalFlags ( + GlobalFlags(..) + , defaultGlobalFlags + , RepoContext(..) + , withRepoContext + , withRepoContext' + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.Types + ( Repo(..), RemoteRepo(..) ) +import Distribution.Simple.Setup + ( Flag(..), fromFlag, flagToMaybe ) +import Distribution.Utils.NubList + ( NubList, fromNubList ) +import Distribution.Client.HttpUtils + ( HttpTransport, configureTransport ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Simple.Utils + ( info ) + +import Control.Concurrent + ( MVar, newMVar, modifyMVar ) +import Control.Exception + ( throwIO ) +import System.FilePath + ( () ) +import Network.URI + ( URI, uriScheme, uriPath ) +import qualified Data.Map as Map + +import qualified Hackage.Security.Client as Sec +import qualified Hackage.Security.Util.Path as Sec +import qualified Hackage.Security.Util.Pretty as Sec +import qualified Hackage.Security.Client.Repository.Cache as Sec +import qualified Hackage.Security.Client.Repository.Local as Sec.Local +import qualified Hackage.Security.Client.Repository.Remote as Sec.Remote +import qualified Distribution.Client.Security.HTTP as Sec.HTTP +import qualified Distribution.Client.Security.DNS as Sec.DNS + +-- ------------------------------------------------------------ +-- * Global flags +-- ------------------------------------------------------------ + +-- | Flags that apply at the top level, not to any sub-command. +data GlobalFlags = GlobalFlags { + globalVersion :: Flag Bool, + globalNumericVersion :: Flag Bool, + globalConfigFile :: Flag FilePath, + globalSandboxConfigFile :: Flag FilePath, + globalConstraintsFile :: Flag FilePath, + globalRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. + globalCacheDir :: Flag FilePath, + globalLocalRepos :: NubList FilePath, + globalLogsDir :: Flag FilePath, + globalWorldFile :: Flag FilePath, + globalRequireSandbox :: Flag Bool, + globalIgnoreSandbox :: Flag Bool, + globalIgnoreExpiry :: Flag Bool, -- ^ Ignore security expiry dates + globalHttpTransport :: Flag String, + globalNix :: Flag Bool, -- ^ Integrate with Nix + globalStoreDir :: Flag FilePath, + globalProgPathExtra :: NubList FilePath -- ^ Extra program path used for packagedb lookups in a global context (i.e. for http transports) + } deriving Generic + +defaultGlobalFlags :: GlobalFlags +defaultGlobalFlags = GlobalFlags { + globalVersion = Flag False, + globalNumericVersion = Flag False, + globalConfigFile = mempty, + globalSandboxConfigFile = mempty, + globalConstraintsFile = mempty, + globalRemoteRepos = mempty, + globalCacheDir = mempty, + globalLocalRepos = mempty, + globalLogsDir = mempty, + globalWorldFile = mempty, + globalRequireSandbox = Flag False, + globalIgnoreSandbox = Flag False, + globalIgnoreExpiry = Flag False, + globalHttpTransport = mempty, + globalNix = Flag False, + globalStoreDir = mempty, + globalProgPathExtra = mempty + } + +instance Monoid GlobalFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup GlobalFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Repo context +-- ------------------------------------------------------------ + +-- | Access to repositories +data RepoContext = RepoContext { + -- | All user-specified repositories + repoContextRepos :: [Repo] + + -- | Get the HTTP transport + -- + -- The transport will be initialized on the first call to this function. + -- + -- NOTE: It is important that we don't eagerly initialize the transport. + -- Initializing the transport is not free, and especially in contexts where + -- we don't know a-priori whether or not we need the transport (for instance + -- when using cabal in "nix mode") incurring the overhead of transport + -- initialization on _every_ invocation (eg @cabal build@) is undesirable. + , repoContextGetTransport :: IO HttpTransport + + -- | Get the (initialized) secure repo + -- + -- (the 'Repo' type itself is stateless and must remain so, because it + -- must be serializable) + , repoContextWithSecureRepo :: forall a. + Repo + -> (forall down. Sec.Repository down -> IO a) + -> IO a + + -- | Should we ignore expiry times (when checking security)? + , repoContextIgnoreExpiry :: Bool + } + +-- | Wrapper around 'Repository', hiding the type argument +data SecureRepo = forall down. SecureRepo (Sec.Repository down) + +withRepoContext :: Verbosity -> GlobalFlags -> (RepoContext -> IO a) -> IO a +withRepoContext verbosity globalFlags = + withRepoContext' + verbosity + (fromNubList (globalRemoteRepos globalFlags)) + (fromNubList (globalLocalRepos globalFlags)) + (fromFlag (globalCacheDir globalFlags)) + (flagToMaybe (globalHttpTransport globalFlags)) + (flagToMaybe (globalIgnoreExpiry globalFlags)) + (fromNubList (globalProgPathExtra globalFlags)) + +withRepoContext' :: Verbosity -> [RemoteRepo] -> [FilePath] + -> FilePath -> Maybe String -> Maybe Bool + -> [FilePath] + -> (RepoContext -> IO a) + -> IO a +withRepoContext' verbosity remoteRepos localRepos + sharedCacheDir httpTransport ignoreExpiry extraPaths = \callback -> do + transportRef <- newMVar Nothing + let httpLib = Sec.HTTP.transportAdapter + verbosity + (getTransport transportRef) + initSecureRepos verbosity httpLib secureRemoteRepos $ \secureRepos' -> + callback RepoContext { + repoContextRepos = allRemoteRepos + ++ map RepoLocal localRepos + , repoContextGetTransport = getTransport transportRef + , repoContextWithSecureRepo = withSecureRepo secureRepos' + , repoContextIgnoreExpiry = fromMaybe False ignoreExpiry + } + where + secureRemoteRepos = + [ (remote, cacheDir) | RepoSecure remote cacheDir <- allRemoteRepos ] + allRemoteRepos = + [ (if isSecure then RepoSecure else RepoRemote) remote cacheDir + | remote <- remoteRepos + , let cacheDir = sharedCacheDir remoteRepoName remote + isSecure = remoteRepoSecure remote == Just True + ] + + getTransport :: MVar (Maybe HttpTransport) -> IO HttpTransport + getTransport transportRef = + modifyMVar transportRef $ \mTransport -> do + transport <- case mTransport of + Just tr -> return tr + Nothing -> configureTransport verbosity extraPaths httpTransport + return (Just transport, transport) + + withSecureRepo :: Map Repo SecureRepo + -> Repo + -> (forall down. Sec.Repository down -> IO a) + -> IO a + withSecureRepo secureRepos repo callback = + case Map.lookup repo secureRepos of + Just (SecureRepo secureRepo) -> callback secureRepo + Nothing -> throwIO $ userError "repoContextWithSecureRepo: unknown repo" + +-- | Initialize the provided secure repositories +-- +-- Assumed invariant: `remoteRepoSecure` should be set for all these repos. +initSecureRepos :: forall a. Verbosity + -> Sec.HTTP.HttpLib + -> [(RemoteRepo, FilePath)] + -> (Map Repo SecureRepo -> IO a) + -> IO a +initSecureRepos verbosity httpLib repos callback = go Map.empty repos + where + go :: Map Repo SecureRepo -> [(RemoteRepo, FilePath)] -> IO a + go !acc [] = callback acc + go !acc ((r,cacheDir):rs) = do + cachePath <- Sec.makeAbsolute $ Sec.fromFilePath cacheDir + initSecureRepo verbosity httpLib r cachePath $ \r' -> + go (Map.insert (RepoSecure r cacheDir) r' acc) rs + +-- | Initialize the given secure repo +-- +-- The security library has its own concept of a "local" repository, distinct +-- from @cabal-install@'s; these are secure repositories, but live in the local +-- file system. We use the convention that these repositories are identified by +-- URLs of the form @file:/path/to/local/repo@. +initSecureRepo :: Verbosity + -> Sec.HTTP.HttpLib + -> RemoteRepo -- ^ Secure repo ('remoteRepoSecure' assumed) + -> Sec.Path Sec.Absolute -- ^ Cache dir + -> (SecureRepo -> IO a) -- ^ Callback + -> IO a +initSecureRepo verbosity httpLib RemoteRepo{..} cachePath = \callback -> do + requiresBootstrap <- withRepo [] Sec.requiresBootstrap + + mirrors <- if requiresBootstrap + then do + info verbosity $ "Trying to locate mirrors via DNS for " ++ + "initial bootstrap of secure " ++ + "repository '" ++ show remoteRepoURI ++ + "' ..." + + Sec.DNS.queryBootstrapMirrors verbosity remoteRepoURI + else pure [] + + withRepo mirrors $ \r -> do + when requiresBootstrap $ Sec.uncheckClientErrors $ + Sec.bootstrap r + (map Sec.KeyId remoteRepoRootKeys) + (Sec.KeyThreshold (fromIntegral remoteRepoKeyThreshold)) + callback $ SecureRepo r + where + -- Initialize local or remote repo depending on the URI + withRepo :: [URI] -> (forall down. Sec.Repository down -> IO a) -> IO a + withRepo _ callback | uriScheme remoteRepoURI == "file:" = do + dir <- Sec.makeAbsolute $ Sec.fromFilePath (uriPath remoteRepoURI) + Sec.Local.withRepository dir + cache + Sec.hackageRepoLayout + Sec.hackageIndexLayout + logTUF + callback + withRepo mirrors callback = + Sec.Remote.withRepository httpLib + (remoteRepoURI:mirrors) + Sec.Remote.defaultRepoOpts + cache + Sec.hackageRepoLayout + Sec.hackageIndexLayout + logTUF + callback + + cache :: Sec.Cache + cache = Sec.Cache { + cacheRoot = cachePath + , cacheLayout = Sec.cabalCacheLayout { + Sec.cacheLayoutIndexTar = cacheFn "01-index.tar" + , Sec.cacheLayoutIndexIdx = cacheFn "01-index.tar.idx" + , Sec.cacheLayoutIndexTarGz = cacheFn "01-index.tar.gz" + } + } + + cacheFn :: FilePath -> Sec.CachePath + cacheFn = Sec.rootPath . Sec.fragment + + -- We display any TUF progress only in verbose mode, including any transient + -- verification errors. If verification fails, then the final exception that + -- is thrown will of course be shown. + logTUF :: Sec.LogMessage -> IO () + logTUF = info verbosity . Sec.pretty diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Glob.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Glob.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Glob.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Glob.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,266 @@ +{-# LANGUAGE DeriveGeneric #-} + +--TODO: [code cleanup] plausibly much of this module should be merged with +-- similar functionality in Cabal. +module Distribution.Client.Glob + ( FilePathGlob(..) + , FilePathRoot(..) + , FilePathGlobRel(..) + , Glob + , GlobPiece(..) + , matchFileGlob + , matchFileGlobRel + , matchGlob + , isTrivialFilePathGlob + , getFilePathRootDirectory + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Data.List (stripPrefix) +import Control.Monad (mapM) + +import Distribution.Text +import Distribution.Compat.ReadP (ReadP, (<++), (+++)) +import qualified Distribution.Compat.ReadP as Parse +import qualified Text.PrettyPrint as Disp + +import System.FilePath +import System.Directory + + +-- | A file path specified by globbing +-- +data FilePathGlob = FilePathGlob FilePathRoot FilePathGlobRel + deriving (Eq, Show, Generic) + +data FilePathGlobRel + = GlobDir !Glob !FilePathGlobRel + | GlobFile !Glob + | GlobDirTrailing -- ^ trailing dir, a glob ending in @/@ + deriving (Eq, Show, Generic) + +-- | A single directory or file component of a globbed path +type Glob = [GlobPiece] + +-- | A piece of a globbing pattern +data GlobPiece = WildCard + | Literal String + | Union [Glob] + deriving (Eq, Show, Generic) + +data FilePathRoot + = FilePathRelative + | FilePathRoot FilePath -- ^ e.g. @"/"@, @"c:\"@ or result of 'takeDrive' + | FilePathHomeDir + deriving (Eq, Show, Generic) + +instance Binary FilePathGlob +instance Binary FilePathRoot +instance Binary FilePathGlobRel +instance Binary GlobPiece + + +-- | Check if a 'FilePathGlob' doesn't actually make use of any globbing and +-- is in fact equivalent to a non-glob 'FilePath'. +-- +-- If it is trivial in this sense then the result is the equivalent constant +-- 'FilePath'. On the other hand if it is not trivial (so could in principle +-- match more than one file) then the result is @Nothing@. +-- +isTrivialFilePathGlob :: FilePathGlob -> Maybe FilePath +isTrivialFilePathGlob (FilePathGlob root pathglob) = + case root of + FilePathRelative -> go [] pathglob + FilePathRoot root' -> go [root'] pathglob + FilePathHomeDir -> Nothing + where + go paths (GlobDir [Literal path] globs) = go (path:paths) globs + go paths (GlobFile [Literal path]) = Just (joinPath (reverse (path:paths))) + go paths GlobDirTrailing = Just (addTrailingPathSeparator + (joinPath (reverse paths))) + go _ _ = Nothing + +-- | Get the 'FilePath' corresponding to a 'FilePathRoot'. +-- +-- The 'FilePath' argument is required to supply the path for the +-- 'FilePathRelative' case. +-- +getFilePathRootDirectory :: FilePathRoot + -> FilePath -- ^ root for relative paths + -> IO FilePath +getFilePathRootDirectory FilePathRelative root = return root +getFilePathRootDirectory (FilePathRoot root) _ = return root +getFilePathRootDirectory FilePathHomeDir _ = getHomeDirectory + + +------------------------------------------------------------------------------ +-- Matching +-- + +-- | Match a 'FilePathGlob' against the file system, starting from a given +-- root directory for relative paths. The results of relative globs are +-- relative to the given root. Matches for absolute globs are absolute. +-- +matchFileGlob :: FilePath -> FilePathGlob -> IO [FilePath] +matchFileGlob relroot (FilePathGlob globroot glob) = do + root <- getFilePathRootDirectory globroot relroot + matches <- matchFileGlobRel root glob + case globroot of + FilePathRelative -> return matches + _ -> return (map (root ) matches) + +-- | Match a 'FilePathGlobRel' against the file system, starting from a +-- given root directory. The results are all relative to the given root. +-- +matchFileGlobRel :: FilePath -> FilePathGlobRel -> IO [FilePath] +matchFileGlobRel root glob0 = go glob0 "" + where + go (GlobFile glob) dir = do + entries <- getDirectoryContents (root dir) + let files = filter (matchGlob glob) entries + return (map (dir ) files) + + go (GlobDir glob globPath) dir = do + entries <- getDirectoryContents (root dir) + subdirs <- filterM (\subdir -> doesDirectoryExist + (root dir subdir)) + $ filter (matchGlob glob) entries + concat <$> mapM (\subdir -> go globPath (dir subdir)) subdirs + + go GlobDirTrailing dir = return [dir] + + +-- | Match a globbing pattern against a file path component +-- +matchGlob :: Glob -> String -> Bool +matchGlob = goStart + where + -- From the man page, glob(7): + -- "If a filename starts with a '.', this character must be + -- matched explicitly." + + go, goStart :: [GlobPiece] -> String -> Bool + + goStart (WildCard:_) ('.':_) = False + goStart (Union globs:rest) cs = any (\glob -> goStart (glob ++ rest) cs) + globs + goStart rest cs = go rest cs + + go [] "" = True + go (Literal lit:rest) cs + | Just cs' <- stripPrefix lit cs + = go rest cs' + | otherwise = False + go [WildCard] "" = True + go (WildCard:rest) (c:cs) = go rest (c:cs) || go (WildCard:rest) cs + go (Union globs:rest) cs = any (\glob -> go (glob ++ rest) cs) globs + go [] (_:_) = False + go (_:_) "" = False + + +------------------------------------------------------------------------------ +-- Parsing & printing +-- + +instance Text FilePathGlob where + disp (FilePathGlob root pathglob) = disp root Disp.<> disp pathglob + parse = + parse >>= \root -> + (FilePathGlob root <$> parse) + <++ (when (root == FilePathRelative) Parse.pfail >> + return (FilePathGlob root GlobDirTrailing)) + +instance Text FilePathRoot where + disp FilePathRelative = Disp.empty + disp (FilePathRoot root) = Disp.text root + disp FilePathHomeDir = Disp.char '~' Disp.<> Disp.char '/' + + parse = + ( (Parse.char '/' >> return (FilePathRoot "/")) + +++ (Parse.char '~' >> Parse.char '/' >> return FilePathHomeDir) + +++ (do drive <- Parse.satisfy (\c -> (c >= 'a' && c <= 'z') + || (c >= 'A' && c <= 'Z')) + _ <- Parse.char ':' + _ <- Parse.char '/' +++ Parse.char '\\' + return (FilePathRoot (toUpper drive : ":\\"))) + ) + <++ return FilePathRelative + +instance Text FilePathGlobRel where + disp (GlobDir glob pathglob) = dispGlob glob + Disp.<> Disp.char '/' + Disp.<> disp pathglob + disp (GlobFile glob) = dispGlob glob + disp GlobDirTrailing = Disp.empty + + parse = parsePath + where + parsePath :: ReadP r FilePathGlobRel + parsePath = + parseGlob >>= \globpieces -> + asDir globpieces + <++ asTDir globpieces + <++ asFile globpieces + + asDir glob = do dirSep + globs <- parsePath + return (GlobDir glob globs) + asTDir glob = do dirSep + return (GlobDir glob GlobDirTrailing) + asFile glob = return (GlobFile glob) + + dirSep = (Parse.char '/' >> return ()) + +++ (do _ <- Parse.char '\\' + -- check this isn't an escape code + following <- Parse.look + case following of + (c:_) | isGlobEscapedChar c -> Parse.pfail + _ -> return ()) + + +dispGlob :: Glob -> Disp.Doc +dispGlob = Disp.hcat . map dispPiece + where + dispPiece WildCard = Disp.char '*' + dispPiece (Literal str) = Disp.text (escape str) + dispPiece (Union globs) = Disp.braces + (Disp.hcat (Disp.punctuate + (Disp.char ',') + (map dispGlob globs))) + escape [] = [] + escape (c:cs) + | isGlobEscapedChar c = '\\' : c : escape cs + | otherwise = c : escape cs + +parseGlob :: ReadP r Glob +parseGlob = Parse.many1 parsePiece + where + parsePiece = literal +++ wildcard +++ union + + wildcard = Parse.char '*' >> return WildCard + + union = Parse.between (Parse.char '{') (Parse.char '}') $ + fmap Union (Parse.sepBy1 parseGlob (Parse.char ',')) + + literal = Literal `fmap` litchars1 + + litchar = normal +++ escape + + normal = Parse.satisfy (\c -> not (isGlobEscapedChar c) + && c /= '/' && c /= '\\') + escape = Parse.char '\\' >> Parse.satisfy isGlobEscapedChar + + litchars1 :: ReadP r [Char] + litchars1 = liftM2 (:) litchar litchars + + litchars :: ReadP r [Char] + litchars = litchars1 <++ return [] + +isGlobEscapedChar :: Char -> Bool +isGlobEscapedChar '*' = True +isGlobEscapedChar '{' = True +isGlobEscapedChar '}' = True +isGlobEscapedChar ',' = True +isGlobEscapedChar _ = False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/GZipUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/GZipUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/GZipUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/GZipUtils.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,86 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.GZipUtils +-- Copyright : (c) Dmitry Astapov 2010 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Provides a convenience functions for working with files that may or may not +-- be zipped. +----------------------------------------------------------------------------- +module Distribution.Client.GZipUtils ( + maybeDecompress, + ) where + +import Codec.Compression.Zlib.Internal +import Data.ByteString.Lazy.Internal as BS (ByteString(Empty, Chunk)) + +#if MIN_VERSION_zlib(0,6,0) +import Control.Exception (throw) +import Control.Monad (liftM) +import Control.Monad.ST.Lazy (ST, runST) +import qualified Data.ByteString as Strict +#endif + +-- | Attempts to decompress the `bytes' under the assumption that +-- "data format" error at the very beginning of the stream means +-- that it is already decompressed. Caller should make sanity checks +-- to verify that it is not, in fact, garbage. +-- +-- This is to deal with http proxies that lie to us and transparently +-- decompress without removing the content-encoding header. See: +-- +-- +maybeDecompress :: ByteString -> ByteString +#if MIN_VERSION_zlib(0,6,0) +maybeDecompress bytes = runST (go bytes decompressor) + where + decompressor :: DecompressStream (ST s) + decompressor = decompressST gzipOrZlibFormat defaultDecompressParams + + -- DataError at the beginning of the stream probably means that stream is + -- not compressed, so we return it as-is. + -- TODO: alternatively, we might consider looking for the two magic bytes + -- at the beginning of the gzip header. (not an option for zlib, though.) + go :: Monad m => ByteString -> DecompressStream m -> m ByteString + go cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k + go _ (DecompressStreamEnd _bs ) = return Empty + go _ (DecompressStreamError _err ) = return bytes + go cs (DecompressInputRequired k) = go cs' =<< k c + where + (c, cs') = uncons cs + + -- Once we have received any output though we regard errors as actual errors + -- and we throw them (as pure exceptions). + -- TODO: We could (and should) avoid these pure exceptions. + go' :: Monad m => ByteString -> DecompressStream m -> m ByteString + go' cs (DecompressOutputAvailable bs k) = liftM (Chunk bs) $ go' cs =<< k + go' _ (DecompressStreamEnd _bs ) = return Empty + go' _ (DecompressStreamError err ) = throw err + go' cs (DecompressInputRequired k) = go' cs' =<< k c + where + (c, cs') = uncons cs + + uncons :: ByteString -> (Strict.ByteString, ByteString) + uncons Empty = (Strict.empty, Empty) + uncons (Chunk c cs) = (c, cs) +#else +maybeDecompress bytes = foldStream $ decompressWithErrors gzipOrZlibFormat defaultDecompressParams bytes + where + -- DataError at the beginning of the stream probably means that stream is not compressed. + -- Returning it as-is. + -- TODO: alternatively, we might consider looking for the two magic bytes + -- at the beginning of the gzip header. + foldStream (StreamError _ _) = bytes + foldStream somethingElse = doFold somethingElse + + doFold StreamEnd = BS.Empty + doFold (StreamChunk bs stream) = BS.Chunk bs (doFold stream) + doFold (StreamError _ msg) = error $ "Codec.Compression.Zlib: " ++ msg +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Haddock.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Haddock.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Haddock.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Haddock.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,69 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Haddock +-- Copyright : (c) Andrea Vezzosi 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Interfacing with Haddock +-- +----------------------------------------------------------------------------- +module Distribution.Client.Haddock + ( + regenerateHaddockIndex + ) + where + +import Data.List (maximumBy) +import Data.Foldable (forM_) +import System.Directory (createDirectoryIfMissing, renameFile) +import System.FilePath ((), splitFileName) +import Distribution.Package + ( packageVersion ) +import Distribution.Simple.Haddock (haddockPackagePaths) +import Distribution.Simple.Program (haddockProgram, ProgramDb + , runProgram, requireProgramVersion) +import Distribution.Version (mkVersion, orLaterVersion) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.PackageIndex + ( InstalledPackageIndex, allPackagesByName ) +import Distribution.Simple.Utils + ( comparing, debug, installDirectoryContents, withTempDirectory ) +import Distribution.InstalledPackageInfo as InstalledPackageInfo + ( InstalledPackageInfo(exposed) ) + +regenerateHaddockIndex :: Verbosity + -> InstalledPackageIndex -> ProgramDb + -> FilePath + -> IO () +regenerateHaddockIndex verbosity pkgs progdb index = do + (paths, warns) <- haddockPackagePaths pkgs' Nothing + let paths' = [ (interface, html) | (interface, Just html, _) <- paths] + forM_ warns (debug verbosity) + + (confHaddock, _, _) <- + requireProgramVersion verbosity haddockProgram + (orLaterVersion (mkVersion [0,6])) progdb + + createDirectoryIfMissing True destDir + + withTempDirectory verbosity destDir "tmphaddock" $ \tempDir -> do + + let flags = [ "--gen-contents" + , "--gen-index" + , "--odir=" ++ tempDir + , "--title=Haskell modules on this system" ] + ++ [ "--read-interface=" ++ html ++ "," ++ interface + | (interface, html) <- paths' ] + runProgram verbosity confHaddock flags + renameFile (tempDir "index.html") (tempDir destFile) + installDirectoryContents verbosity tempDir destDir + + where + (destDir,destFile) = splitFileName index + pkgs' = [ maximumBy (comparing packageVersion) pkgvers' + | (_pname, pkgvers) <- allPackagesByName pkgs + , let pkgvers' = filter exposed pkgvers + , not (null pkgvers') ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/HttpUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/HttpUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/HttpUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/HttpUtils.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,857 @@ +{-# LANGUAGE BangPatterns #-} +----------------------------------------------------------------------------- +-- | Separate module for HTTP actions, using a proxy server if one exists. +----------------------------------------------------------------------------- +module Distribution.Client.HttpUtils ( + DownloadResult(..), + configureTransport, + HttpTransport(..), + HttpCode, + downloadURI, + transportCheckHttps, + remoteRepoCheckHttps, + remoteRepoTryUpgradeToHttps, + isOldHackageURI + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Network.HTTP + ( Request (..), Response (..), RequestMethod (..) + , Header(..), HeaderName(..), lookupHeader ) +import Network.HTTP.Proxy ( Proxy(..), fetchProxy) +import Network.URI + ( URI (..), URIAuth (..), uriToString ) +import Network.Browser + ( browse, setOutHandler, setErrHandler, setProxy + , setAuthorityGen, request, setAllowBasicAuth, setUserAgent ) +import qualified Control.Exception as Exception +import Control.Exception + ( evaluate ) +import Control.DeepSeq + ( force ) +import Control.Monad + ( guard ) +import qualified Data.ByteString.Lazy.Char8 as BS +import qualified Paths_cabal_install (version) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.Utils + ( die', info, warn, debug, notice, writeFileAtomic + , copyFileVerbose, withTempFile ) +import Distribution.Client.Utils + ( withTempFileName ) +import Distribution.Client.Types + ( RemoteRepo(..) ) +import Distribution.System + ( buildOS, buildArch ) +import Distribution.Text + ( display ) +import qualified System.FilePath.Posix as FilePath.Posix + ( splitDirectories ) +import System.FilePath + ( (<.>), takeFileName, takeDirectory ) +import System.Directory + ( doesFileExist, renameFile, canonicalizePath ) +import System.IO + ( withFile, IOMode(ReadMode), hGetContents, hClose ) +import System.IO.Error + ( isDoesNotExistError ) +import Distribution.Simple.Program + ( Program, simpleProgram, ConfiguredProgram, programPath + , ProgramInvocation(..), programInvocation + , ProgramSearchPathEntry(..) + , getProgramInvocationOutput ) +import Distribution.Simple.Program.Db + ( ProgramDb, emptyProgramDb, addKnownPrograms + , configureAllKnownPrograms + , requireProgram, lookupProgram + , modifyProgramSearchPath ) +import Distribution.Simple.Program.Run + ( getProgramInvocationOutputAndErrors ) +import Numeric (showHex) +import System.Random (randomRIO) +import System.Exit (ExitCode(..)) + + +------------------------------------------------------------------------------ +-- Downloading a URI, given an HttpTransport +-- + +data DownloadResult = FileAlreadyInCache + | FileDownloaded FilePath + deriving (Eq) + +downloadURI :: HttpTransport + -> Verbosity + -> URI -- ^ What to download + -> FilePath -- ^ Where to put it + -> IO DownloadResult +downloadURI _transport verbosity uri path | uriScheme uri == "file:" = do + copyFileVerbose verbosity (uriPath uri) path + return (FileDownloaded path) + -- Can we store the hash of the file so we can safely return path when the + -- hash matches to avoid unnecessary computation? + +downloadURI transport verbosity uri path = do + + let etagPath = path <.> "etag" + targetExists <- doesFileExist path + etagPathExists <- doesFileExist etagPath + -- In rare cases the target file doesn't exist, but the etag does. + etag <- if targetExists && etagPathExists + then Just <$> readFile etagPath + else return Nothing + + -- Only use the external http transports if we actually have to + -- (or have been told to do so) + let transport' + | uriScheme uri == "http:" + , not (transportManuallySelected transport) + = plainHttpTransport + + | otherwise + = transport + + withTempFileName (takeDirectory path) (takeFileName path) $ \tmpFile -> do + result <- getHttp transport' verbosity uri etag tmpFile [] + + -- Only write the etag if we get a 200 response code. + -- A 304 still sends us an etag header. + case result of + (200, Just newEtag) -> writeFile etagPath newEtag + _ -> return () + + case fst result of + 200 -> do + info verbosity ("Downloaded to " ++ path) + renameFile tmpFile path + return (FileDownloaded path) + 304 -> do + notice verbosity "Skipping download: local and remote files match." + return FileAlreadyInCache + errCode -> die' verbosity $ "Failed to download " ++ show uri + ++ " : HTTP code " ++ show errCode + +------------------------------------------------------------------------------ +-- Utilities for repo url management +-- + +remoteRepoCheckHttps :: Verbosity -> HttpTransport -> RemoteRepo -> IO () +remoteRepoCheckHttps verbosity transport repo + | uriScheme (remoteRepoURI repo) == "https:" + , not (transportSupportsHttps transport) + = die' verbosity $ "The remote repository '" ++ remoteRepoName repo + ++ "' specifies a URL that " ++ requiresHttpsErrorMessage + | otherwise = return () + +transportCheckHttps :: Verbosity -> HttpTransport -> URI -> IO () +transportCheckHttps verbosity transport uri + | uriScheme uri == "https:" + , not (transportSupportsHttps transport) + = die' verbosity $ "The URL " ++ show uri + ++ " " ++ requiresHttpsErrorMessage + | otherwise = return () + +requiresHttpsErrorMessage :: String +requiresHttpsErrorMessage = + "requires HTTPS however the built-in HTTP implementation " + ++ "does not support HTTPS. The transport implementations with HTTPS " + ++ "support are " ++ intercalate ", " + [ name | (name, _, True, _ ) <- supportedTransports ] + ++ ". One of these will be selected automatically if the corresponding " + ++ "external program is available, or one can be selected specifically " + ++ "with the global flag --http-transport=" + +remoteRepoTryUpgradeToHttps :: Verbosity -> HttpTransport -> RemoteRepo -> IO RemoteRepo +remoteRepoTryUpgradeToHttps verbosity transport repo + | remoteRepoShouldTryHttps repo + , uriScheme (remoteRepoURI repo) == "http:" + , not (transportSupportsHttps transport) + , not (transportManuallySelected transport) + = die' verbosity $ "The builtin HTTP implementation does not support HTTPS, but using " + ++ "HTTPS for authenticated uploads is recommended. " + ++ "The transport implementations with HTTPS support are " + ++ intercalate ", " [ name | (name, _, True, _ ) <- supportedTransports ] + ++ "but they require the corresponding external program to be " + ++ "available. You can either make one available or use plain HTTP by " + ++ "using the global flag --http-transport=plain-http (or putting the " + ++ "equivalent in the config file). With plain HTTP, your password " + ++ "is sent using HTTP digest authentication so it cannot be easily " + ++ "intercepted, but it is not as secure as using HTTPS." + + | remoteRepoShouldTryHttps repo + , uriScheme (remoteRepoURI repo) == "http:" + , transportSupportsHttps transport + = return repo { + remoteRepoURI = (remoteRepoURI repo) { uriScheme = "https:" } + } + + | otherwise + = return repo + +-- | Utility function for legacy support. +isOldHackageURI :: URI -> Bool +isOldHackageURI uri + = case uriAuthority uri of + Just (URIAuth {uriRegName = "hackage.haskell.org"}) -> + FilePath.Posix.splitDirectories (uriPath uri) + == ["/","packages","archive"] + _ -> False + + +------------------------------------------------------------------------------ +-- Setting up a HttpTransport +-- + +data HttpTransport = HttpTransport { + -- | GET a URI, with an optional ETag (to do a conditional fetch), + -- write the resource to the given file and return the HTTP status code, + -- and optional ETag. + getHttp :: Verbosity -> URI -> Maybe ETag -> FilePath -> [Header] + -> IO (HttpCode, Maybe ETag), + + -- | POST a resource to a URI, with optional auth (username, password) + -- and return the HTTP status code and any redirect URL. + postHttp :: Verbosity -> URI -> String -> Maybe Auth + -> IO (HttpCode, String), + + -- | POST a file resource to a URI using multipart\/form-data encoding, + -- with optional auth (username, password) and return the HTTP status + -- code and any error string. + postHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth + -> IO (HttpCode, String), + + -- | PUT a file resource to a URI, with optional auth + -- (username, password), extra headers and return the HTTP status code + -- and any error string. + putHttpFile :: Verbosity -> URI -> FilePath -> Maybe Auth -> [Header] + -> IO (HttpCode, String), + + -- | Whether this transport supports https or just http. + transportSupportsHttps :: Bool, + + -- | Whether this transport implementation was specifically chosen by + -- the user via configuration, or whether it was automatically selected. + -- Strictly speaking this is not a property of the transport itself but + -- about how it was chosen. Nevertheless it's convenient to keep here. + transportManuallySelected :: Bool + } + --TODO: why does postHttp return a redirect, but postHttpFile return errors? + +type HttpCode = Int +type ETag = String +type Auth = (String, String) + +noPostYet :: Verbosity -> URI -> String -> Maybe (String, String) + -> IO (Int, String) +noPostYet verbosity _ _ _ = die' verbosity "Posting (for report upload) is not implemented yet" + +supportedTransports :: [(String, Maybe Program, Bool, + ProgramDb -> Maybe HttpTransport)] +supportedTransports = + [ let prog = simpleProgram "curl" in + ( "curl", Just prog, True + , \db -> curlTransport <$> lookupProgram prog db ) + + , let prog = simpleProgram "wget" in + ( "wget", Just prog, True + , \db -> wgetTransport <$> lookupProgram prog db ) + + , let prog = simpleProgram "powershell" in + ( "powershell", Just prog, True + , \db -> powershellTransport <$> lookupProgram prog db ) + + , ( "plain-http", Nothing, False + , \_ -> Just plainHttpTransport ) + ] + +configureTransport :: Verbosity -> [FilePath] -> Maybe String -> IO HttpTransport + +configureTransport verbosity extraPath (Just name) = + -- the user secifically selected a transport by name so we'll try and + -- configure that one + + case find (\(name',_,_,_) -> name' == name) supportedTransports of + Just (_, mprog, _tls, mkTrans) -> do + + let baseProgDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb + progdb <- case mprog of + Nothing -> return emptyProgramDb + Just prog -> snd <$> requireProgram verbosity prog baseProgDb + -- ^^ if it fails, it'll fail here + + let Just transport = mkTrans progdb + return transport { transportManuallySelected = True } + + Nothing -> die' verbosity $ "Unknown HTTP transport specified: " ++ name + ++ ". The supported transports are " + ++ intercalate ", " + [ name' | (name', _, _, _ ) <- supportedTransports ] + +configureTransport verbosity extraPath Nothing = do + -- the user hasn't selected a transport, so we'll pick the first one we + -- can configure successfully, provided that it supports tls + + -- for all the transports except plain-http we need to try and find + -- their external executable + let baseProgDb = modifyProgramSearchPath (\p -> map ProgramSearchPathDir extraPath ++ p) emptyProgramDb + progdb <- configureAllKnownPrograms verbosity $ + addKnownPrograms + [ prog | (_, Just prog, _, _) <- supportedTransports ] + baseProgDb + + let availableTransports = + [ (name, transport) + | (name, _, _, mkTrans) <- supportedTransports + , transport <- maybeToList (mkTrans progdb) ] + -- there's always one because the plain one is last and never fails + let (name, transport) = head availableTransports + debug verbosity $ "Selected http transport implementation: " ++ name + + return transport { transportManuallySelected = False } + + +------------------------------------------------------------------------------ +-- The HttpTransports based on external programs +-- + +curlTransport :: ConfiguredProgram -> HttpTransport +curlTransport prog = + HttpTransport gethttp posthttp posthttpfile puthttpfile True False + where + gethttp verbosity uri etag destPath reqHeaders = do + withTempFile (takeDirectory destPath) + "curl-headers.txt" $ \tmpFile tmpHandle -> do + hClose tmpHandle + let args = [ show uri + , "--output", destPath + , "--location" + , "--write-out", "%{http_code}" + , "--user-agent", userAgent + , "--silent", "--show-error" + , "--dump-header", tmpFile ] + ++ concat + [ ["--header", "If-None-Match: " ++ t] + | t <- maybeToList etag ] + ++ concat + [ ["--header", show name ++ ": " ++ value] + | Header name value <- reqHeaders ] + + resp <- getProgramInvocationOutput verbosity + (programInvocation prog args) + withFile tmpFile ReadMode $ \hnd -> do + headers <- hGetContents hnd + (code, _err, etag') <- parseResponse verbosity uri resp headers + evaluate $ force (code, etag') + + posthttp = noPostYet + + addAuthConfig auth progInvocation = progInvocation + { progInvokeInput = do + (uname, passwd) <- auth + return $ unlines + [ "--digest" + , "--user " ++ uname ++ ":" ++ passwd + ] + , progInvokeArgs = ["--config", "-"] ++ progInvokeArgs progInvocation + } + + posthttpfile verbosity uri path auth = do + let args = [ show uri + , "--form", "package=@"++path + , "--write-out", "\n%{http_code}" + , "--user-agent", userAgent + , "--silent", "--show-error" + , "--header", "Accept: text/plain" + , "--location" + ] + resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth + (programInvocation prog args) + (code, err, _etag) <- parseResponse verbosity uri resp "" + return (code, err) + + puthttpfile verbosity uri path auth headers = do + let args = [ show uri + , "--request", "PUT", "--data-binary", "@"++path + , "--write-out", "\n%{http_code}" + , "--user-agent", userAgent + , "--silent", "--show-error" + , "--location" + , "--header", "Accept: text/plain" + ] + ++ concat + [ ["--header", show name ++ ": " ++ value] + | Header name value <- headers ] + resp <- getProgramInvocationOutput verbosity $ addAuthConfig auth + (programInvocation prog args) + (code, err, _etag) <- parseResponse verbosity uri resp "" + return (code, err) + + -- on success these curl invocations produces an output like "200" + -- and on failure it has the server error response first + parseResponse :: Verbosity -> URI -> String -> String -> IO (Int, String, Maybe ETag) + parseResponse verbosity uri resp headers = + let codeerr = + case reverse (lines resp) of + (codeLine:rerrLines) -> + case readMaybe (trim codeLine) of + Just i -> let errstr = mkErrstr rerrLines + in Just (i, errstr) + Nothing -> Nothing + [] -> Nothing + + mkErrstr = unlines . reverse . dropWhile (all isSpace) + + mb_etag :: Maybe ETag + mb_etag = listToMaybe $ reverse + [ etag + | ["ETag:", etag] <- map words (lines headers) ] + + in case codeerr of + Just (i, err) -> return (i, err, mb_etag) + _ -> statusParseFail verbosity uri resp + + +wgetTransport :: ConfiguredProgram -> HttpTransport +wgetTransport prog = + HttpTransport gethttp posthttp posthttpfile puthttpfile True False + where + gethttp verbosity uri etag destPath reqHeaders = do + resp <- runWGet verbosity uri args + + -- wget doesn't support range requests. + -- so, we not only ignore range request headers, + -- but we also dispay a warning message when we see them. + let hasRangeHeader = any isRangeHeader reqHeaders + warningMsg = "the 'wget' transport currently doesn't support" + ++ " range requests, which wastes network bandwidth." + ++ " To fix this, set 'http-transport' to 'curl' or" + ++ " 'plain-http' in '~/.cabal/config'." + ++ " Note that the 'plain-http' transport doesn't" + ++ " support HTTPS.\n" + + when (hasRangeHeader) $ warn verbosity warningMsg + (code, etag') <- parseOutput verbosity uri resp + return (code, etag') + where + args = [ "--output-document=" ++ destPath + , "--user-agent=" ++ userAgent + , "--tries=5" + , "--timeout=15" + , "--server-response" ] + ++ concat + [ ["--header", "If-None-Match: " ++ t] + | t <- maybeToList etag ] + ++ [ "--header=" ++ show name ++ ": " ++ value + | hdr@(Header name value) <- reqHeaders + , (not (isRangeHeader hdr)) ] + + -- wget doesn't support range requests. + -- so, we ignore range request headers, lest we get errors. + isRangeHeader :: Header -> Bool + isRangeHeader (Header HdrRange _) = True + isRangeHeader _ = False + + posthttp = noPostYet + + posthttpfile verbosity uri path auth = + withTempFile (takeDirectory path) + (takeFileName path) $ \tmpFile tmpHandle -> + withTempFile (takeDirectory path) "response" $ + \responseFile responseHandle -> do + hClose responseHandle + (body, boundary) <- generateMultipartBody path + BS.hPut tmpHandle body + hClose tmpHandle + let args = [ "--post-file=" ++ tmpFile + , "--user-agent=" ++ userAgent + , "--server-response" + , "--output-document=" ++ responseFile + , "--header=Accept: text/plain" + , "--header=Content-type: multipart/form-data; " ++ + "boundary=" ++ boundary ] + out <- runWGet verbosity (addUriAuth auth uri) args + (code, _etag) <- parseOutput verbosity uri out + withFile responseFile ReadMode $ \hnd -> do + resp <- hGetContents hnd + evaluate $ force (code, resp) + + puthttpfile verbosity uri path auth headers = + withTempFile (takeDirectory path) "response" $ + \responseFile responseHandle -> do + hClose responseHandle + let args = [ "--method=PUT", "--body-file="++path + , "--user-agent=" ++ userAgent + , "--server-response" + , "--output-document=" ++ responseFile + , "--header=Accept: text/plain" ] + ++ [ "--header=" ++ show name ++ ": " ++ value + | Header name value <- headers ] + + out <- runWGet verbosity (addUriAuth auth uri) args + (code, _etag) <- parseOutput verbosity uri out + withFile responseFile ReadMode $ \hnd -> do + resp <- hGetContents hnd + evaluate $ force (code, resp) + + addUriAuth Nothing uri = uri + addUriAuth (Just (user, pass)) uri = uri + { uriAuthority = Just a { uriUserInfo = user ++ ":" ++ pass ++ "@" } + } + where + a = fromMaybe (URIAuth "" "" "") (uriAuthority uri) + + runWGet verbosity uri args = do + -- We pass the URI via STDIN because it contains the users' credentials + -- and sensitive data should not be passed via command line arguments. + let + invocation = (programInvocation prog ("--input-file=-" : args)) + { progInvokeInput = Just (uriToString id uri "") + } + + -- wget returns its output on stderr rather than stdout + (_, resp, exitCode) <- getProgramInvocationOutputAndErrors verbosity + invocation + -- wget returns exit code 8 for server "errors" like "304 not modified" + if exitCode == ExitSuccess || exitCode == ExitFailure 8 + then return resp + else die' verbosity $ "'" ++ programPath prog + ++ "' exited with an error:\n" ++ resp + + -- With the --server-response flag, wget produces output with the full + -- http server response with all headers, we want to find a line like + -- "HTTP/1.1 200 OK", but only the last one, since we can have multiple + -- requests due to redirects. + parseOutput verbosity uri resp = + let parsedCode = listToMaybe + [ code + | (protocol:codestr:_err) <- map words (reverse (lines resp)) + , "HTTP/" `isPrefixOf` protocol + , code <- maybeToList (readMaybe codestr) ] + mb_etag :: Maybe ETag + mb_etag = listToMaybe + [ etag + | ["ETag:", etag] <- map words (reverse (lines resp)) ] + in case parsedCode of + Just i -> return (i, mb_etag) + _ -> statusParseFail verbosity uri resp + + +powershellTransport :: ConfiguredProgram -> HttpTransport +powershellTransport prog = + HttpTransport gethttp posthttp posthttpfile puthttpfile True False + where + gethttp verbosity uri etag destPath reqHeaders = do + resp <- runPowershellScript verbosity $ + webclientScript + (escape (show uri)) + (("$targetStream = New-Object -TypeName System.IO.FileStream -ArgumentList " ++ (escape destPath) ++ ", Create") + :(setupHeaders ((useragentHeader : etagHeader) ++ reqHeaders))) + [ "$response = $request.GetResponse()" + , "$responseStream = $response.GetResponseStream()" + , "$buffer = new-object byte[] 10KB" + , "$count = $responseStream.Read($buffer, 0, $buffer.length)" + , "while ($count -gt 0)" + , "{" + , " $targetStream.Write($buffer, 0, $count)" + , " $count = $responseStream.Read($buffer, 0, $buffer.length)" + , "}" + , "Write-Host ($response.StatusCode -as [int]);" + , "Write-Host $response.GetResponseHeader(\"ETag\").Trim('\"')" + ] + [ "$targetStream.Flush()" + , "$targetStream.Close()" + , "$targetStream.Dispose()" + , "$responseStream.Dispose()" + ] + parseResponse resp + where + parseResponse :: String -> IO (HttpCode, Maybe ETag) + parseResponse x = + case lines $ trim x of + (code:etagv:_) -> fmap (\c -> (c, Just etagv)) $ parseCode code x + (code: _) -> fmap (\c -> (c, Nothing )) $ parseCode code x + _ -> statusParseFail verbosity uri x + parseCode :: String -> String -> IO HttpCode + parseCode code x = case readMaybe code of + Just i -> return i + Nothing -> statusParseFail verbosity uri x + etagHeader = [ Header HdrIfNoneMatch t | t <- maybeToList etag ] + + posthttp = noPostYet + + posthttpfile verbosity uri path auth = + withTempFile (takeDirectory path) + (takeFileName path) $ \tmpFile tmpHandle -> do + (body, boundary) <- generateMultipartBody path + BS.hPut tmpHandle body + hClose tmpHandle + fullPath <- canonicalizePath tmpFile + + let contentHeader = Header HdrContentType + ("multipart/form-data; boundary=" ++ boundary) + resp <- runPowershellScript verbosity $ webclientScript + (escape (show uri)) + (setupHeaders (contentHeader : extraHeaders) ++ setupAuth auth) + (uploadFileAction "POST" uri fullPath) + uploadFileCleanup + parseUploadResponse verbosity uri resp + + puthttpfile verbosity uri path auth headers = do + fullPath <- canonicalizePath path + resp <- runPowershellScript verbosity $ webclientScript + (escape (show uri)) + (setupHeaders (extraHeaders ++ headers) ++ setupAuth auth) + (uploadFileAction "PUT" uri fullPath) + uploadFileCleanup + parseUploadResponse verbosity uri resp + + runPowershellScript verbosity script = do + let args = + [ "-InputFormat", "None" + -- the default execution policy doesn't allow running + -- unsigned scripts, so we need to tell powershell to bypass it + , "-ExecutionPolicy", "bypass" + , "-NoProfile", "-NonInteractive" + , "-Command", "-" + ] + debug verbosity script + getProgramInvocationOutput verbosity (programInvocation prog args) + { progInvokeInput = Just (script ++ "\nExit(0);") + } + + escape = show + + useragentHeader = Header HdrUserAgent userAgent + extraHeaders = [Header HdrAccept "text/plain", useragentHeader] + + setupHeaders headers = + [ "$request." ++ addHeader name value + | Header name value <- headers + ] + where + addHeader header value + = case header of + HdrAccept -> "Accept = " ++ escape value + HdrUserAgent -> "UserAgent = " ++ escape value + HdrConnection -> "Connection = " ++ escape value + HdrContentLength -> "ContentLength = " ++ escape value + HdrContentType -> "ContentType = " ++ escape value + HdrDate -> "Date = " ++ escape value + HdrExpect -> "Expect = " ++ escape value + HdrHost -> "Host = " ++ escape value + HdrIfModifiedSince -> "IfModifiedSince = " ++ escape value + HdrReferer -> "Referer = " ++ escape value + HdrTransferEncoding -> "TransferEncoding = " ++ escape value + HdrRange -> let (start, _:end) = + if "bytes=" `isPrefixOf` value + then break (== '-') value' + else error $ "Could not decode range: " ++ value + value' = drop 6 value + in "AddRange(\"bytes\", " ++ escape start ++ ", " ++ escape end ++ ");" + name -> "Headers.Add(" ++ escape (show name) ++ "," ++ escape value ++ ");" + + setupAuth auth = + [ "$request.Credentials = new-object System.Net.NetworkCredential(" + ++ escape uname ++ "," ++ escape passwd ++ ",\"\");" + | (uname,passwd) <- maybeToList auth + ] + + uploadFileAction method _uri fullPath = + [ "$request.Method = " ++ show method + , "$requestStream = $request.GetRequestStream()" + , "$fileStream = [System.IO.File]::OpenRead(" ++ escape fullPath ++ ")" + , "$bufSize=10000" + , "$chunk = New-Object byte[] $bufSize" + , "while( $bytesRead = $fileStream.Read($chunk,0,$bufsize) )" + , "{" + , " $requestStream.write($chunk, 0, $bytesRead)" + , " $requestStream.Flush()" + , "}" + , "" + , "$responseStream = $request.getresponse()" + , "$responseReader = new-object System.IO.StreamReader $responseStream.GetResponseStream()" + , "$code = $response.StatusCode -as [int]" + , "if ($code -eq 0) {" + , " $code = 200;" + , "}" + , "Write-Host $code" + , "Write-Host $responseReader.ReadToEnd()" + ] + + uploadFileCleanup = + [ "$fileStream.Close()" + , "$requestStream.Close()" + , "$responseStream.Close()" + ] + + parseUploadResponse verbosity uri resp = case lines (trim resp) of + (codeStr : message) + | Just code <- readMaybe codeStr -> return (code, unlines message) + _ -> statusParseFail verbosity uri resp + + webclientScript uri setup action cleanup = unlines + [ "[Net.ServicePointManager]::SecurityProtocol = \"tls12, tls11, tls\"" + , "$uri = New-Object \"System.Uri\" " ++ uri + , "$request = [System.Net.HttpWebRequest]::Create($uri)" + , unlines setup + , "Try {" + , unlines (map (" " ++) action) + , "} Catch [System.Net.WebException] {" + , " $exception = $_.Exception;" + , " If ($exception.Status -eq " + ++ "[System.Net.WebExceptionStatus]::ProtocolError) {" + , " $response = $exception.Response -as [System.Net.HttpWebResponse];" + , " $reader = new-object " + ++ "System.IO.StreamReader($response.GetResponseStream());" + , " Write-Host ($response.StatusCode -as [int]);" + , " Write-Host $reader.ReadToEnd();" + , " } Else {" + , " Write-Host $exception.Message;" + , " }" + , "} Catch {" + , " Write-Host $_.Exception.Message;" + , "} finally {" + , unlines (map (" " ++) cleanup) + , "}" + ] + + +------------------------------------------------------------------------------ +-- The builtin plain HttpTransport +-- + +plainHttpTransport :: HttpTransport +plainHttpTransport = + HttpTransport gethttp posthttp posthttpfile puthttpfile False False + where + gethttp verbosity uri etag destPath reqHeaders = do + let req = Request{ + rqURI = uri, + rqMethod = GET, + rqHeaders = [ Header HdrIfNoneMatch t + | t <- maybeToList etag ] + ++ reqHeaders, + rqBody = BS.empty + } + (_, resp) <- cabalBrowse verbosity Nothing (request req) + let code = convertRspCode (rspCode resp) + etag' = lookupHeader HdrETag (rspHeaders resp) + -- 206 Partial Content is a normal response to a range request; see #3385. + when (code==200 || code==206) $ + writeFileAtomic destPath $ rspBody resp + return (code, etag') + + posthttp = noPostYet + + posthttpfile verbosity uri path auth = do + (body, boundary) <- generateMultipartBody path + let headers = [ Header HdrContentType + ("multipart/form-data; boundary="++boundary) + , Header HdrContentLength (show (BS.length body)) + , Header HdrAccept ("text/plain") + ] + req = Request { + rqURI = uri, + rqMethod = POST, + rqHeaders = headers, + rqBody = body + } + (_, resp) <- cabalBrowse verbosity auth (request req) + return (convertRspCode (rspCode resp), rspErrorString resp) + + puthttpfile verbosity uri path auth headers = do + body <- BS.readFile path + let req = Request { + rqURI = uri, + rqMethod = PUT, + rqHeaders = Header HdrContentLength (show (BS.length body)) + : Header HdrAccept "text/plain" + : headers, + rqBody = body + } + (_, resp) <- cabalBrowse verbosity auth (request req) + return (convertRspCode (rspCode resp), rspErrorString resp) + + convertRspCode (a,b,c) = a*100 + b*10 + c + + rspErrorString resp = + case lookupHeader HdrContentType (rspHeaders resp) of + Just contenttype + | takeWhile (/= ';') contenttype == "text/plain" + -> BS.unpack (rspBody resp) + _ -> rspReason resp + + cabalBrowse verbosity auth act = do + p <- fixupEmptyProxy <$> fetchProxy True + Exception.handleJust + (guard . isDoesNotExistError) + (const . die' verbosity $ "Couldn't establish HTTP connection. " + ++ "Possible cause: HTTP proxy server is down.") $ + browse $ do + setProxy p + setErrHandler (warn verbosity . ("http error: "++)) + setOutHandler (debug verbosity) + setUserAgent userAgent + setAllowBasicAuth False + setAuthorityGen (\_ _ -> return auth) + act + + fixupEmptyProxy (Proxy uri _) | null uri = NoProxy + fixupEmptyProxy p = p + + +------------------------------------------------------------------------------ +-- Common stuff used by multiple transport impls +-- + +userAgent :: String +userAgent = concat [ "cabal-install/", display Paths_cabal_install.version + , " (", display buildOS, "; ", display buildArch, ")" + ] + +statusParseFail :: Verbosity -> URI -> String -> IO a +statusParseFail verbosity uri r = + die' verbosity $ "Failed to download " ++ show uri ++ " : " + ++ "No Status Code could be parsed from response: " ++ r + +-- Trim +trim :: String -> String +trim = f . f + where f = reverse . dropWhile isSpace + + +------------------------------------------------------------------------------ +-- Multipart stuff partially taken from cgi package. +-- + +generateMultipartBody :: FilePath -> IO (BS.ByteString, String) +generateMultipartBody path = do + content <- BS.readFile path + boundary <- genBoundary + let !body = formatBody content (BS.pack boundary) + return (body, boundary) + where + formatBody content boundary = + BS.concat $ + [ crlf, dd, boundary, crlf ] + ++ [ BS.pack (show header) | header <- headers ] + ++ [ crlf + , content + , crlf, dd, boundary, dd, crlf ] + + headers = + [ Header (HdrCustom "Content-disposition") + ("form-data; name=package; " ++ + "filename=\"" ++ takeFileName path ++ "\"") + , Header HdrContentType "application/x-gzip" + ] + + crlf = BS.pack "\r\n" + dd = BS.pack "--" + +genBoundary :: IO String +genBoundary = do + i <- randomRIO (0x10000000000000,0xFFFFFFFFFFFFFF) :: IO Integer + return $ showHex i "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/IndexUtils/Timestamp.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/IndexUtils/Timestamp.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/IndexUtils/Timestamp.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/IndexUtils/Timestamp.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,192 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE RecordWildCards #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.IndexUtils.Timestamp +-- Copyright : (c) 2016 Herbert Valerio Riedel +-- License : BSD3 +-- +-- Timestamp type used in package indexes + +module Distribution.Client.IndexUtils.Timestamp + ( Timestamp + , nullTimestamp + , epochTimeToTimestamp + , timestampToUTCTime + , utcTimeToTimestamp + , maximumTimestamp + + , IndexState(..) + ) where + +import qualified Codec.Archive.Tar.Entry as Tar +import Control.DeepSeq +import Control.Monad +import Data.Char (isDigit) +import Data.Int (Int64) +import Data.Time (UTCTime (..), fromGregorianValid, + makeTimeOfDayValid, showGregorian, + timeOfDayToTime, timeToTimeOfDay) +import Data.Time.Clock.POSIX (posixSecondsToUTCTime, + utcTimeToPOSIXSeconds) +import Distribution.Compat.Binary +import qualified Distribution.Compat.ReadP as ReadP +import Distribution.Text +import qualified Text.PrettyPrint as Disp +import GHC.Generics (Generic) + +-- | UNIX timestamp (expressed in seconds since unix epoch, i.e. 1970). +newtype Timestamp = TS Int64 -- Tar.EpochTime + deriving (Eq,Ord,Enum,NFData,Show) + +epochTimeToTimestamp :: Tar.EpochTime -> Maybe Timestamp +epochTimeToTimestamp et + | ts == nullTimestamp = Nothing + | otherwise = Just ts + where + ts = TS et + +timestampToUTCTime :: Timestamp -> Maybe UTCTime +timestampToUTCTime (TS t) + | t == minBound = Nothing + | otherwise = Just $ posixSecondsToUTCTime (fromIntegral t) + +utcTimeToTimestamp :: UTCTime -> Maybe Timestamp +utcTimeToTimestamp utct + | minTime <= t, t <= maxTime = Just (TS (fromIntegral t)) + | otherwise = Nothing + where + maxTime = toInteger (maxBound :: Int64) + minTime = toInteger (succ minBound :: Int64) + t :: Integer + t = round . utcTimeToPOSIXSeconds $ utct + +-- | Compute the maximum 'Timestamp' value +-- +-- Returns 'nullTimestamp' for the empty list. Also note that +-- 'nullTimestamp' compares as smaller to all non-'nullTimestamp' +-- values. +maximumTimestamp :: [Timestamp] -> Timestamp +maximumTimestamp [] = nullTimestamp +maximumTimestamp xs@(_:_) = maximum xs + +-- returns 'Nothing' if not representable as 'Timestamp' +posixSecondsToTimestamp :: Integer -> Maybe Timestamp +posixSecondsToTimestamp pt + | minTs <= pt, pt <= maxTs = Just (TS (fromInteger pt)) + | otherwise = Nothing + where + maxTs = toInteger (maxBound :: Int64) + minTs = toInteger (succ minBound :: Int64) + +-- | Pretty-prints 'Timestamp' in ISO8601/RFC3339 format +-- (e.g. @"2017-12-31T23:59:59Z"@) +-- +-- Returns empty string for 'nullTimestamp' in order for +-- +-- > null (display nullTimestamp) == True +-- +-- to hold. +showTimestamp :: Timestamp -> String +showTimestamp ts = case timestampToUTCTime ts of + Nothing -> "" + -- Note: we don't use 'formatTime' here to avoid incurring a + -- dependency on 'old-locale' for older `time` libs + Just UTCTime{..} -> showGregorian utctDay ++ ('T':showTOD utctDayTime) ++ "Z" + where + showTOD = show . timeToTimeOfDay + +instance Binary Timestamp where + put (TS t) = put t + get = TS `fmap` get + +instance Text Timestamp where + disp = Disp.text . showTimestamp + + parse = parsePosix ReadP.+++ parseUTC + where + -- | Parses unix timestamps, e.g. @"\@1474626019"@ + parsePosix = do + _ <- ReadP.char '@' + t <- parseInteger + maybe ReadP.pfail return $ posixSecondsToTimestamp t + + -- | Parses ISO8601/RFC3339-style UTC timestamps, + -- e.g. @"2017-12-31T23:59:59Z"@ + -- + -- TODO: support numeric tz offsets; allow to leave off seconds + parseUTC = do + -- Note: we don't use 'Data.Time.Format.parseTime' here since + -- we want more control over the accepted formats. + + ye <- parseYear + _ <- ReadP.char '-' + mo <- parseTwoDigits + _ <- ReadP.char '-' + da <- parseTwoDigits + _ <- ReadP.char 'T' + + utctDay <- maybe ReadP.pfail return $ + fromGregorianValid ye mo da + + ho <- parseTwoDigits + _ <- ReadP.char ':' + mi <- parseTwoDigits + _ <- ReadP.char ':' + se <- parseTwoDigits + _ <- ReadP.char 'Z' + + utctDayTime <- maybe ReadP.pfail (return . timeOfDayToTime) $ + makeTimeOfDayValid ho mi (realToFrac (se::Int)) + + maybe ReadP.pfail return $ utcTimeToTimestamp (UTCTime{..}) + + parseTwoDigits = do + d1 <- ReadP.satisfy isDigit + d2 <- ReadP.satisfy isDigit + return (read [d1,d2]) + + -- A year must have at least 4 digits; e.g. "0097" is fine, + -- while "97" is not c.f. RFC3339 which + -- deprecates 2-digit years + parseYear = do + sign <- ReadP.option ' ' (ReadP.char '-') + ds <- ReadP.munch1 isDigit + when (length ds < 4) ReadP.pfail + return (read (sign:ds)) + + parseInteger = do + sign <- ReadP.option ' ' (ReadP.char '-') + ds <- ReadP.munch1 isDigit + return (read (sign:ds) :: Integer) + +-- | Special timestamp value to be used when 'timestamp' is +-- missing/unknown/invalid +nullTimestamp :: Timestamp +nullTimestamp = TS minBound + +---------------------------------------------------------------------------- +-- defined here for now to avoid import cycles + +-- | Specification of the state of a specific repo package index +data IndexState = IndexStateHead -- ^ Use all available entries + | IndexStateTime !Timestamp -- ^ Use all entries that existed at + -- the specified time + deriving (Eq,Generic,Show) + +instance Binary IndexState +instance NFData IndexState + +instance Text IndexState where + disp IndexStateHead = Disp.text "HEAD" + disp (IndexStateTime ts) = disp ts + + parse = parseHead ReadP.+++ parseTime + where + parseHead = do + _ <- ReadP.string "HEAD" + return IndexStateHead + + parseTime = IndexStateTime `fmap` parse diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/IndexUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/IndexUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/IndexUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/IndexUtils.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,987 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE GADTs #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.IndexUtils +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Extra utils related to the package indexes. +----------------------------------------------------------------------------- +module Distribution.Client.IndexUtils ( + getIndexFileAge, + getInstalledPackages, + indexBaseName, + Configure.getInstalledPackagesMonitorFiles, + getSourcePackages, + getSourcePackagesMonitorFiles, + + IndexState(..), + getSourcePackagesAtIndexState, + + Index(..), + PackageEntry(..), + parsePackageIndex, + updateRepoIndexCache, + updatePackageIndexCacheFile, + writeIndexTimestamp, + currentIndexTimestamp, + readCacheStrict, -- only used by soon-to-be-obsolete sandbox code + + BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Codec.Archive.Tar.Index as Tar +import qualified Distribution.Client.Tar as Tar +import Distribution.Client.IndexUtils.Timestamp +import Distribution.Client.Types +import Distribution.Verbosity + +import Distribution.Package + ( PackageId, PackageIdentifier(..), mkPackageName + , Package(..), packageVersion, packageName ) +import Distribution.Types.Dependency +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.PackageDescription + ( GenericPackageDescription(..) + , PackageDescription(..), emptyPackageDescription ) +import Distribution.Simple.Compiler + ( Compiler, PackageDBStack ) +import Distribution.Simple.Program + ( ProgramDb ) +import qualified Distribution.Simple.Configure as Configure + ( getInstalledPackages, getInstalledPackagesMonitorFiles ) +import Distribution.Version + ( Version, mkVersion, intersectVersionRanges ) +import Distribution.Text + ( display, simpleParse ) +import Distribution.Simple.Utils + ( die', warn, info ) +import Distribution.Client.Setup + ( RepoContext(..) ) + +import Distribution.PackageDescription.Parsec + ( parseGenericPackageDescription, parseGenericPackageDescriptionMaybe ) +import qualified Distribution.PackageDescription.Parsec as PackageDesc.Parse + +import Distribution.Solver.Types.PackageIndex (PackageIndex) +import qualified Distribution.Solver.Types.PackageIndex as PackageIndex +import Distribution.Solver.Types.SourcePackage + +import qualified Data.Map as Map +import Control.DeepSeq +import Control.Monad +import Control.Exception +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Lazy.Char8 as BS.Char8 +import qualified Data.ByteString.Char8 as BSS +import Data.ByteString.Lazy (ByteString) +import Distribution.Client.GZipUtils (maybeDecompress) +import Distribution.Client.Utils ( byteStringToFilePath + , tryFindAddSourcePackageDesc ) +import Distribution.Compat.Binary +import Distribution.Compat.Exception (catchIO) +import Distribution.Compat.Time (getFileAge, getModTime) +import System.Directory (doesFileExist, doesDirectoryExist) +import System.FilePath + ( (), (<.>), takeExtension, replaceExtension, splitDirectories, normalise ) +import System.FilePath.Posix as FilePath.Posix + ( takeFileName ) +import System.IO +import System.IO.Unsafe (unsafeInterleaveIO) +import System.IO.Error (isDoesNotExistError) + +import qualified Hackage.Security.Client as Sec +import qualified Hackage.Security.Util.Some as Sec + +-- | Reduced-verbosity version of 'Configure.getInstalledPackages' +getInstalledPackages :: Verbosity -> Compiler + -> PackageDBStack -> ProgramDb + -> IO InstalledPackageIndex +getInstalledPackages verbosity comp packageDbs progdb = + Configure.getInstalledPackages verbosity' comp packageDbs progdb + where + verbosity' = lessVerbose verbosity + + +-- | Get filename base (i.e. without file extension) for index-related files +-- +-- /Secure/ cabal repositories use a new extended & incremental +-- @01-index.tar@. In order to avoid issues resulting from clobbering +-- new/old-style index data, we save them locally to different names. +-- +-- Example: Use @indexBaseName repo <.> "tar.gz"@ to compute the 'FilePath' of the +-- @00-index.tar.gz@/@01-index.tar.gz@ file. +indexBaseName :: Repo -> FilePath +indexBaseName repo = repoLocalDir repo fn + where + fn = case repo of + RepoSecure {} -> "01-index" + RepoRemote {} -> "00-index" + RepoLocal {} -> "00-index" + +------------------------------------------------------------------------ +-- Reading the source package index +-- + +-- Note: 'data IndexState' is defined in +-- "Distribution.Client.IndexUtils.Timestamp" to avoid import cycles + +-- | 'IndexStateInfo' contains meta-information about the resulting +-- filtered 'Cache' 'after applying 'filterCache' according to a +-- requested 'IndexState'. +data IndexStateInfo = IndexStateInfo + { isiMaxTime :: !Timestamp + -- ^ 'Timestamp' of maximum/latest 'Timestamp' in the current + -- filtered view of the cache. + -- + -- The following property holds + -- + -- > filterCache (IndexState (isiMaxTime isi)) cache == (cache, isi) + -- + + , isiHeadTime :: !Timestamp + -- ^ 'Timestamp' equivalent to 'IndexStateHead', i.e. the latest + -- known 'Timestamp'; 'isiHeadTime' is always greater or equal to + -- 'isiMaxTime'. + } + +emptyStateInfo :: IndexStateInfo +emptyStateInfo = IndexStateInfo nullTimestamp nullTimestamp + +-- | Filters a 'Cache' according to an 'IndexState' +-- specification. Also returns 'IndexStateInfo' describing the +-- resulting index cache. +-- +-- Note: 'filterCache' is idempotent in the 'Cache' value +filterCache :: IndexState -> Cache -> (Cache, IndexStateInfo) +filterCache IndexStateHead cache = (cache, IndexStateInfo{..}) + where + isiMaxTime = cacheHeadTs cache + isiHeadTime = cacheHeadTs cache +filterCache (IndexStateTime ts0) cache0 = (cache, IndexStateInfo{..}) + where + cache = Cache { cacheEntries = ents, cacheHeadTs = isiMaxTime } + isiHeadTime = cacheHeadTs cache0 + isiMaxTime = maximumTimestamp (map cacheEntryTimestamp ents) + ents = filter ((<= ts0) . cacheEntryTimestamp) (cacheEntries cache0) + +-- | Read a repository index from disk, from the local files specified by +-- a list of 'Repo's. +-- +-- All the 'SourcePackage's are marked as having come from the appropriate +-- 'Repo'. +-- +-- This is a higher level wrapper used internally in cabal-install. +getSourcePackages :: Verbosity -> RepoContext -> IO SourcePackageDb +getSourcePackages verbosity repoCtxt = + getSourcePackagesAtIndexState verbosity repoCtxt Nothing + +-- | Variant of 'getSourcePackages' which allows getting the source +-- packages at a particular 'IndexState'. +-- +-- Current choices are either the latest (aka HEAD), or the index as +-- it was at a particular time. +-- +-- TODO: Enhance to allow specifying per-repo 'IndexState's and also +-- report back per-repo 'IndexStateInfo's (in order for @new-freeze@ +-- to access it) +getSourcePackagesAtIndexState :: Verbosity -> RepoContext -> Maybe IndexState + -> IO SourcePackageDb +getSourcePackagesAtIndexState verbosity repoCtxt _ + | null (repoContextRepos repoCtxt) = do + -- In the test suite, we routinely don't have any remote package + -- servers, so don't bleat about it + warn (verboseUnmarkOutput verbosity) $ + "No remote package servers have been specified. Usually " ++ + "you would have one specified in the config file." + return SourcePackageDb { + packageIndex = mempty, + packagePreferences = mempty + } +getSourcePackagesAtIndexState verbosity repoCtxt mb_idxState = do + let describeState IndexStateHead = "most recent state" + describeState (IndexStateTime time) = "historical state as of " ++ display time + + pkgss <- forM (repoContextRepos repoCtxt) $ \r -> do + let rname = maybe "" remoteRepoName $ maybeRepoRemote r + info verbosity ("Reading available packages of " ++ rname ++ "...") + + idxState <- case mb_idxState of + Just idxState -> do + info verbosity $ "Using " ++ describeState idxState ++ + " as explicitly requested (via command line / project configuration)" + return idxState + Nothing -> do + mb_idxState' <- readIndexTimestamp (RepoIndex repoCtxt r) + case mb_idxState' of + Nothing -> do + info verbosity "Using most recent state (could not read timestamp file)" + return IndexStateHead + Just idxState -> do + info verbosity $ "Using " ++ describeState idxState ++ + " specified from most recent cabal update" + return idxState + + unless (idxState == IndexStateHead) $ + case r of + RepoLocal path -> warn verbosity ("index-state ignored for old-format repositories (local repository '" ++ path ++ "')") + RepoRemote {} -> warn verbosity ("index-state ignored for old-format (remote repository '" ++ rname ++ "')") + RepoSecure {} -> pure () + + let idxState' = case r of + RepoSecure {} -> idxState + _ -> IndexStateHead + + (pis,deps,isi) <- readRepoIndex verbosity repoCtxt r idxState' + + case idxState' of + IndexStateHead -> do + info verbosity ("index-state("++rname++") = " ++ + display (isiHeadTime isi)) + return () + IndexStateTime ts0 -> do + when (isiMaxTime isi /= ts0) $ + if ts0 > isiMaxTime isi + then warn verbosity $ + "Requested index-state" ++ display ts0 + ++ " is newer than '" ++ rname ++ "'!" + ++ " Falling back to older state (" + ++ display (isiMaxTime isi) ++ ")." + else info verbosity $ + "Requested index-state " ++ display ts0 + ++ " does not exist in '"++rname++"'!" + ++ " Falling back to older state (" + ++ display (isiMaxTime isi) ++ ")." + info verbosity ("index-state("++rname++") = " ++ + display (isiMaxTime isi) ++ " (HEAD = " ++ + display (isiHeadTime isi) ++ ")") + + pure (pis,deps) + + let (pkgs, prefs) = mconcat pkgss + prefs' = Map.fromListWith intersectVersionRanges + [ (name, range) | Dependency name range <- prefs ] + _ <- evaluate pkgs + _ <- evaluate prefs' + return SourcePackageDb { + packageIndex = pkgs, + packagePreferences = prefs' + } + +readCacheStrict :: NFData pkg => Verbosity -> Index -> (PackageEntry -> pkg) -> IO ([pkg], [Dependency]) +readCacheStrict verbosity index mkPkg = do + updateRepoIndexCache verbosity index + cache <- readIndexCache verbosity index + withFile (indexFile index) ReadMode $ \indexHnd -> + evaluate . force =<< packageListFromCache verbosity mkPkg indexHnd cache + +-- | Read a repository index from disk, from the local file specified by +-- the 'Repo'. +-- +-- All the 'SourcePackage's are marked as having come from the given 'Repo'. +-- +-- This is a higher level wrapper used internally in cabal-install. +-- +readRepoIndex :: Verbosity -> RepoContext -> Repo -> IndexState + -> IO (PackageIndex UnresolvedSourcePackage, [Dependency], IndexStateInfo) +readRepoIndex verbosity repoCtxt repo idxState = + handleNotFound $ do + warnIfIndexIsOld =<< getIndexFileAge repo + updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) + readPackageIndexCacheFile verbosity mkAvailablePackage + (RepoIndex repoCtxt repo) + idxState + + where + mkAvailablePackage pkgEntry = + SourcePackage { + packageInfoId = pkgid, + packageDescription = packageDesc pkgEntry, + packageSource = case pkgEntry of + NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing + BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path, + packageDescrOverride = case pkgEntry of + NormalPackage _ _ pkgtxt _ -> Just pkgtxt + _ -> Nothing + } + where + pkgid = packageId pkgEntry + + handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e + then do + case repo of + RepoRemote{..} -> warn verbosity $ errMissingPackageList repoRemote + RepoSecure{..} -> warn verbosity $ errMissingPackageList repoRemote + RepoLocal{..} -> warn verbosity $ + "The package list for the local repo '" ++ repoLocalDir + ++ "' is missing. The repo is invalid." + return (mempty,mempty,emptyStateInfo) + else ioError e + + isOldThreshold = 15 --days + warnIfIndexIsOld dt = do + when (dt >= isOldThreshold) $ case repo of + RepoRemote{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt + RepoSecure{..} -> warn verbosity $ errOutdatedPackageList repoRemote dt + RepoLocal{..} -> return () + + errMissingPackageList repoRemote = + "The package list for '" ++ remoteRepoName repoRemote + ++ "' does not exist. Run 'cabal update' to download it." + errOutdatedPackageList repoRemote dt = + "The package list for '" ++ remoteRepoName repoRemote + ++ "' is " ++ shows (floor dt :: Int) " days old.\nRun " + ++ "'cabal update' to get the latest list of available packages." + +-- | Return the age of the index file in days (as a Double). +getIndexFileAge :: Repo -> IO Double +getIndexFileAge repo = getFileAge $ indexBaseName repo <.> "tar" + +-- | A set of files (or directories) that can be monitored to detect when +-- there might have been a change in the source packages. +-- +getSourcePackagesMonitorFiles :: [Repo] -> [FilePath] +getSourcePackagesMonitorFiles repos = + concat [ [ indexBaseName repo <.> "cache" + , indexBaseName repo <.> "timestamp" ] + | repo <- repos ] + +-- | It is not necessary to call this, as the cache will be updated when the +-- index is read normally. However you can do the work earlier if you like. +-- +updateRepoIndexCache :: Verbosity -> Index -> IO () +updateRepoIndexCache verbosity index = + whenCacheOutOfDate index $ do + updatePackageIndexCacheFile verbosity index + +whenCacheOutOfDate :: Index -> IO () -> IO () +whenCacheOutOfDate index action = do + exists <- doesFileExist $ cacheFile index + if not exists + then action + else do + indexTime <- getModTime $ indexFile index + cacheTime <- getModTime $ cacheFile index + when (indexTime > cacheTime) action + +------------------------------------------------------------------------ +-- Reading the index file +-- + +-- | An index entry is either a normal package, or a local build tree reference. +data PackageEntry = + NormalPackage PackageId GenericPackageDescription ByteString BlockNo + | BuildTreeRef BuildTreeRefType + PackageId GenericPackageDescription FilePath BlockNo + +-- | A build tree reference is either a link or a snapshot. +data BuildTreeRefType = SnapshotRef | LinkRef + deriving (Eq,Generic) + +instance Binary BuildTreeRefType + +refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType +refTypeFromTypeCode t + | t == Tar.buildTreeRefTypeCode = LinkRef + | t == Tar.buildTreeSnapshotTypeCode = SnapshotRef + | otherwise = + error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code" + +typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode +typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode +typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode + +instance Package PackageEntry where + packageId (NormalPackage pkgid _ _ _) = pkgid + packageId (BuildTreeRef _ pkgid _ _ _) = pkgid + +packageDesc :: PackageEntry -> GenericPackageDescription +packageDesc (NormalPackage _ descr _ _) = descr +packageDesc (BuildTreeRef _ _ descr _ _) = descr + +-- | Parse an uncompressed \"00-index.tar\" repository index file represented +-- as a 'ByteString'. +-- + +data PackageOrDep = Pkg PackageEntry | Dep Dependency + +-- | Read @00-index.tar.gz@ and extract @.cabal@ and @preferred-versions@ files +-- +-- We read the index using 'Tar.read', which gives us a lazily constructed +-- 'TarEntries'. We translate it to a list of entries using 'tarEntriesList', +-- which preserves the lazy nature of 'TarEntries', and finally 'concatMap' a +-- function over this to translate it to a list of IO actions returning +-- 'PackageOrDep's. We can use 'lazySequence' to turn this into a list of +-- 'PackageOrDep's, still maintaining the lazy nature of the original tar read. +parsePackageIndex :: Verbosity -> ByteString -> [IO (Maybe PackageOrDep)] +parsePackageIndex verbosity = concatMap (uncurry extract) . tarEntriesList . Tar.read + where + extract :: BlockNo -> Tar.Entry -> [IO (Maybe PackageOrDep)] + extract blockNo entry = tryExtractPkg ++ tryExtractPrefs + where + tryExtractPkg = do + mkPkgEntry <- maybeToList $ extractPkg verbosity entry blockNo + return $ fmap (fmap Pkg) mkPkgEntry + + tryExtractPrefs = do + prefs' <- maybeToList $ extractPrefs entry + fmap (return . Just . Dep) prefs' + +-- | Turn the 'Entries' data structure from the @tar@ package into a list, +-- and pair each entry with its block number. +-- +-- NOTE: This preserves the lazy nature of 'Entries': the tar file is only read +-- as far as the list is evaluated. +tarEntriesList :: Show e => Tar.Entries e -> [(BlockNo, Tar.Entry)] +tarEntriesList = go 0 + where + go !_ Tar.Done = [] + go !_ (Tar.Fail e) = error ("tarEntriesList: " ++ show e) + go !n (Tar.Next e es') = (n, e) : go (Tar.nextEntryOffset e n) es' + +extractPkg :: Verbosity -> Tar.Entry -> BlockNo -> Maybe (IO (Maybe PackageEntry)) +extractPkg verbosity entry blockNo = case Tar.entryContent entry of + Tar.NormalFile content _ + | takeExtension fileName == ".cabal" + -> case splitDirectories (normalise fileName) of + [pkgname,vers,_] -> case simpleParse vers of + Just ver -> Just . return $ Just (NormalPackage pkgid descr content blockNo) + where + pkgid = PackageIdentifier (mkPackageName pkgname) ver + parsed = parseGenericPackageDescriptionMaybe (BS.toStrict content) + descr = case parsed of + Just d -> d + Nothing -> error $ "Couldn't read cabal file " + ++ show fileName + _ -> Nothing + _ -> Nothing + + Tar.OtherEntryType typeCode content _ + | Tar.isBuildTreeRefTypeCode typeCode -> + Just $ do + let path = byteStringToFilePath content + dirExists <- doesDirectoryExist path + result <- if not dirExists then return Nothing + else do + cabalFile <- tryFindAddSourcePackageDesc verbosity path "Error reading package index." + descr <- PackageDesc.Parse.readGenericPackageDescription normal cabalFile + return . Just $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr) + descr path blockNo + return result + + _ -> Nothing + + where + fileName = Tar.entryPath entry + +extractPrefs :: Tar.Entry -> Maybe [Dependency] +extractPrefs entry = case Tar.entryContent entry of + Tar.NormalFile content _ + | takeFileName entrypath == "preferred-versions" + -> Just prefs + where + entrypath = Tar.entryPath entry + prefs = parsePreferredVersions content + _ -> Nothing + +parsePreferredVersions :: ByteString -> [Dependency] +parsePreferredVersions = mapMaybe simpleParse + . filter (not . isPrefixOf "--") + . lines + . BS.Char8.unpack -- TODO: Are we sure no unicode? + +------------------------------------------------------------------------ +-- Reading and updating the index cache +-- + +-- | Variation on 'sequence' which evaluates the actions lazily +-- +-- Pattern matching on the result list will execute just the first action; +-- more generally pattern matching on the first @n@ '(:)' nodes will execute +-- the first @n@ actions. +lazySequence :: [IO a] -> IO [a] +lazySequence = unsafeInterleaveIO . go + where + go [] = return [] + go (x:xs) = do x' <- x + xs' <- lazySequence xs + return (x' : xs') + +-- | A lazy unfolder for lookup operations which return the current +-- value and (possibly) the next key +lazyUnfold :: (k -> IO (v, Maybe k)) -> k -> IO [(k,v)] +lazyUnfold step = goLazy . Just + where + goLazy s = unsafeInterleaveIO (go s) + + go Nothing = return [] + go (Just k) = do + (v, mk') <- step k + vs' <- goLazy mk' + return ((k,v):vs') + +-- | Which index do we mean? +data Index = + -- | The main index for the specified repository + RepoIndex RepoContext Repo + + -- | A sandbox-local repository + -- Argument is the location of the index file + | SandboxIndex FilePath + +indexFile :: Index -> FilePath +indexFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "tar" +indexFile (SandboxIndex index) = index + +cacheFile :: Index -> FilePath +cacheFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "cache" +cacheFile (SandboxIndex index) = index `replaceExtension` "cache" + +timestampFile :: Index -> FilePath +timestampFile (RepoIndex _ctxt repo) = indexBaseName repo <.> "timestamp" +timestampFile (SandboxIndex index) = index `replaceExtension` "timestamp" + +-- | Return 'True' if 'Index' uses 01-index format (aka secure repo) +is01Index :: Index -> Bool +is01Index (RepoIndex _ repo) = case repo of + RepoSecure {} -> True + RepoRemote {} -> False + RepoLocal {} -> False +is01Index (SandboxIndex _) = False + + +updatePackageIndexCacheFile :: Verbosity -> Index -> IO () +updatePackageIndexCacheFile verbosity index = do + info verbosity ("Updating index cache file " ++ cacheFile index ++ " ...") + withIndexEntries verbosity index $ \entries -> do + let !maxTs = maximumTimestamp (map cacheEntryTimestamp entries) + cache = Cache { cacheHeadTs = maxTs + , cacheEntries = entries + } + writeIndexCache index cache + info verbosity ("Index cache updated to index-state " + ++ display (cacheHeadTs cache)) + +-- | Read the index (for the purpose of building a cache) +-- +-- The callback is provided with list of cache entries, which is guaranteed to +-- be lazily constructed. This list must ONLY be used in the scope of the +-- callback; when the callback is terminated the file handle to the index will +-- be closed and further attempts to read from the list will result in (pure) +-- I/O exceptions. +-- +-- In the construction of the index for a secure repo we take advantage of the +-- index built by the @hackage-security@ library to avoid reading the @.tar@ +-- file as much as possible (we need to read it only to extract preferred +-- versions). This helps performance, but is also required for correctness: +-- the new @01-index.tar.gz@ may have multiple versions of preferred-versions +-- files, and 'parsePackageIndex' does not correctly deal with that (see #2956); +-- by reading the already-built cache from the security library we will be sure +-- to only read the latest versions of all files. +-- +-- TODO: It would be nicer if we actually incrementally updated @cabal@'s +-- cache, rather than reconstruct it from zero on each update. However, this +-- would require a change in the cache format. +withIndexEntries :: Verbosity -> Index -> ([IndexCacheEntry] -> IO a) -> IO a +withIndexEntries _ (RepoIndex repoCtxt repo@RepoSecure{..}) callback = + repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> + Sec.withIndex repoSecure $ \Sec.IndexCallbacks{..} -> do + -- Incrementally (lazily) read all the entries in the tar file in order, + -- including all revisions, not just the last revision of each file + indexEntries <- lazyUnfold indexLookupEntry (Sec.directoryFirst indexDirectory) + callback [ cacheEntry + | (dirEntry, indexEntry) <- indexEntries + , cacheEntry <- toCacheEntries dirEntry indexEntry ] + where + toCacheEntries :: Sec.DirectoryEntry -> Sec.Some Sec.IndexEntry + -> [IndexCacheEntry] + toCacheEntries dirEntry (Sec.Some sie) = + case Sec.indexEntryPathParsed sie of + Nothing -> [] -- skip unrecognized file + Just (Sec.IndexPkgMetadata _pkgId) -> [] -- skip metadata + Just (Sec.IndexPkgCabal pkgId) -> force + [CachePackageId pkgId blockNo timestamp] + Just (Sec.IndexPkgPrefs _pkgName) -> force + [ CachePreference dep blockNo timestamp + | dep <- parsePreferredVersions (Sec.indexEntryContent sie) + ] + where + blockNo = Sec.directoryEntryBlockNo dirEntry + timestamp = fromMaybe (error "withIndexEntries: invalid timestamp") $ + epochTimeToTimestamp $ Sec.indexEntryTime sie + +withIndexEntries verbosity index callback = do -- non-secure repositories + withFile (indexFile index) ReadMode $ \h -> do + bs <- maybeDecompress `fmap` BS.hGetContents h + pkgsOrPrefs <- lazySequence $ parsePackageIndex verbosity bs + callback $ map toCache (catMaybes pkgsOrPrefs) + where + toCache :: PackageOrDep -> IndexCacheEntry + toCache (Pkg (NormalPackage pkgid _ _ blockNo)) = CachePackageId pkgid blockNo nullTimestamp + toCache (Pkg (BuildTreeRef refType _ _ _ blockNo)) = CacheBuildTreeRef refType blockNo + toCache (Dep d) = CachePreference d 0 nullTimestamp + +readPackageIndexCacheFile :: Package pkg + => Verbosity + -> (PackageEntry -> pkg) + -> Index + -> IndexState + -> IO (PackageIndex pkg, [Dependency], IndexStateInfo) +readPackageIndexCacheFile verbosity mkPkg index idxState = do + cache0 <- readIndexCache verbosity index + indexHnd <- openFile (indexFile index) ReadMode + let (cache,isi) = filterCache idxState cache0 + (pkgs,deps) <- packageIndexFromCache verbosity mkPkg indexHnd cache + pure (pkgs,deps,isi) + + +packageIndexFromCache :: Package pkg + => Verbosity + -> (PackageEntry -> pkg) + -> Handle + -> Cache + -> IO (PackageIndex pkg, [Dependency]) +packageIndexFromCache verbosity mkPkg hnd cache = do + (pkgs, prefs) <- packageListFromCache verbosity mkPkg hnd cache + pkgIndex <- evaluate $ PackageIndex.fromList pkgs + return (pkgIndex, prefs) + +-- | Read package list +-- +-- The result package releases and preference entries are guaranteed +-- to be unique. +-- +-- Note: 01-index.tar is an append-only index and therefore contains +-- all .cabal edits and preference-updates. The masking happens +-- here, i.e. the semantics that later entries in a tar file mask +-- earlier ones is resolved in this function. +packageListFromCache :: Verbosity + -> (PackageEntry -> pkg) + -> Handle + -> Cache + -> IO ([pkg], [Dependency]) +packageListFromCache verbosity mkPkg hnd Cache{..} = accum mempty [] mempty cacheEntries + where + accum !srcpkgs btrs !prefs [] = return (Map.elems srcpkgs ++ btrs, Map.elems prefs) + + accum srcpkgs btrs prefs (CachePackageId pkgid blockno _ : entries) = do + -- Given the cache entry, make a package index entry. + -- The magic here is that we use lazy IO to read the .cabal file + -- from the index tarball if it turns out that we need it. + -- Most of the time we only need the package id. + ~(pkg, pkgtxt) <- unsafeInterleaveIO $ do + pkgtxt <- getEntryContent blockno + pkg <- readPackageDescription pkgid pkgtxt + return (pkg, pkgtxt) + + let srcpkg = mkPkg (NormalPackage pkgid pkg pkgtxt blockno) + accum (Map.insert pkgid srcpkg srcpkgs) btrs prefs entries + + accum srcpkgs btrs prefs (CacheBuildTreeRef refType blockno : entries) = do + -- We have to read the .cabal file eagerly here because we can't cache the + -- package id for build tree references - the user might edit the .cabal + -- file after the reference was added to the index. + path <- liftM byteStringToFilePath . getEntryContent $ blockno + pkg <- do let err = "Error reading package index from cache." + file <- tryFindAddSourcePackageDesc verbosity path err + PackageDesc.Parse.readGenericPackageDescription normal file + let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno) + accum srcpkgs (srcpkg:btrs) prefs entries + + accum srcpkgs btrs prefs (CachePreference pref@(Dependency pn _) _ _ : entries) = + accum srcpkgs btrs (Map.insert pn pref prefs) entries + + getEntryContent :: BlockNo -> IO ByteString + getEntryContent blockno = do + entry <- Tar.hReadEntry hnd blockno + case Tar.entryContent entry of + Tar.NormalFile content _size -> return content + Tar.OtherEntryType typecode content _size + | Tar.isBuildTreeRefTypeCode typecode + -> return content + _ -> interror "unexpected tar entry type" + + readPackageDescription :: PackageIdentifier -> ByteString -> IO GenericPackageDescription + readPackageDescription pkgid content = + case snd $ PackageDesc.Parse.runParseResult $ parseGenericPackageDescription $ BS.toStrict content of + Right gpd -> return gpd + Left (Just specVer, _) | specVer >= mkVersion [2,2] -> return (dummyPackageDescription specVer) + Left _ -> interror "failed to parse .cabal file" + where + dummyPackageDescription :: Version -> GenericPackageDescription + dummyPackageDescription specVer = GenericPackageDescription + { packageDescription = emptyPackageDescription + { specVersionRaw = Left specVer + , package = pkgid + , synopsis = dummySynopsis + } + , genPackageFlags = [] + , condLibrary = Nothing + , condSubLibraries = [] + , condForeignLibs = [] + , condExecutables = [] + , condTestSuites = [] + , condBenchmarks = [] + } + + dummySynopsis = "" + + interror :: String -> IO a + interror msg = die' verbosity $ "internal error when reading package index: " ++ msg + ++ "The package index or index cache is probably " + ++ "corrupt. Running cabal update might fix it." + + + +------------------------------------------------------------------------ +-- Index cache data structure +-- + +-- | Read the 'Index' cache from the filesystem +-- +-- If a corrupted index cache is detected this function regenerates +-- the index cache and then reattempt to read the index once (and +-- 'die's if it fails again). +readIndexCache :: Verbosity -> Index -> IO Cache +readIndexCache verbosity index = do + cacheOrFail <- readIndexCache' index + case cacheOrFail of + Left msg -> do + warn verbosity $ concat + [ "Parsing the index cache failed (", msg, "). " + , "Trying to regenerate the index cache..." + ] + + updatePackageIndexCacheFile verbosity index + + either (die' verbosity) (return . hashConsCache) =<< readIndexCache' index + + Right res -> return (hashConsCache res) + +-- | Read the 'Index' cache from the filesystem without attempting to +-- regenerate on parsing failures. +readIndexCache' :: Index -> IO (Either String Cache) +readIndexCache' index + | is01Index index = decodeFileOrFail' (cacheFile index) + | otherwise = liftM (Right .read00IndexCache) $ + BSS.readFile (cacheFile index) + +-- | Write the 'Index' cache to the filesystem +writeIndexCache :: Index -> Cache -> IO () +writeIndexCache index cache + | is01Index index = encodeFile (cacheFile index) cache + | otherwise = writeFile (cacheFile index) (show00IndexCache cache) + +-- | Write the 'IndexState' to the filesystem +writeIndexTimestamp :: Index -> IndexState -> IO () +writeIndexTimestamp index st + = writeFile (timestampFile index) (display st) + +-- | Read out the "current" index timestamp, i.e., what +-- timestamp you would use to revert to this version +currentIndexTimestamp :: Verbosity -> RepoContext -> Repo -> IO Timestamp +currentIndexTimestamp verbosity repoCtxt r = do + mb_is <- readIndexTimestamp (RepoIndex repoCtxt r) + case mb_is of + Just (IndexStateTime ts) -> return ts + _ -> do + (_,_,isi) <- readRepoIndex verbosity repoCtxt r IndexStateHead + return (isiHeadTime isi) + +-- | Read the 'IndexState' from the filesystem +readIndexTimestamp :: Index -> IO (Maybe IndexState) +readIndexTimestamp index + = fmap simpleParse (readFile (timestampFile index)) + `catchIO` \e -> + if isDoesNotExistError e + then return Nothing + else ioError e + +-- | Optimise sharing of equal values inside 'Cache' +-- +-- c.f. https://en.wikipedia.org/wiki/Hash_consing +hashConsCache :: Cache -> Cache +hashConsCache cache0 + = cache0 { cacheEntries = go mempty mempty (cacheEntries cache0) } + where + -- TODO/NOTE: + -- + -- If/when we redo the binary serialisation via e.g. CBOR and we + -- are able to use incremental decoding, we may want to move the + -- hash-consing into the incremental deserialisation, or + -- alterantively even do something like + -- http://cbor.schmorp.de/value-sharing + -- + go _ _ [] = [] + -- for now we only optimise only CachePackageIds since those + -- represent the vast majority + go !pns !pvs (CachePackageId pid bno ts : rest) + = CachePackageId pid' bno ts : go pns' pvs' rest + where + !pid' = PackageIdentifier pn' pv' + (!pn',!pns') = mapIntern pn pns + (!pv',!pvs') = mapIntern pv pvs + PackageIdentifier pn pv = pid + + go pns pvs (x:xs) = x : go pns pvs xs + + mapIntern :: Ord k => k -> Map.Map k k -> (k,Map.Map k k) + mapIntern k m = maybe (k,Map.insert k k m) (\k' -> (k',m)) (Map.lookup k m) + +-- | Cabal caches various information about the Hackage index +data Cache = Cache + { cacheHeadTs :: Timestamp + -- ^ maximum/latest 'Timestamp' among 'cacheEntries'; unless the + -- invariant of 'cacheEntries' being in chronological order is + -- violated, this corresponds to the last (seen) 'Timestamp' in + -- 'cacheEntries' + , cacheEntries :: [IndexCacheEntry] + } + +instance NFData Cache where + rnf = rnf . cacheEntries + +-- | Tar files are block structured with 512 byte blocks. Every header and file +-- content starts on a block boundary. +-- +type BlockNo = Word32 -- Tar.TarEntryOffset + + +data IndexCacheEntry + = CachePackageId PackageId !BlockNo !Timestamp + | CachePreference Dependency !BlockNo !Timestamp + | CacheBuildTreeRef !BuildTreeRefType !BlockNo + -- NB: CacheBuildTreeRef is irrelevant for 01-index & new-build + deriving (Eq,Generic) + +instance NFData IndexCacheEntry where + rnf (CachePackageId pkgid _ _) = rnf pkgid + rnf (CachePreference dep _ _) = rnf dep + rnf (CacheBuildTreeRef _ _) = () + +cacheEntryTimestamp :: IndexCacheEntry -> Timestamp +cacheEntryTimestamp (CacheBuildTreeRef _ _) = nullTimestamp +cacheEntryTimestamp (CachePreference _ _ ts) = ts +cacheEntryTimestamp (CachePackageId _ _ ts) = ts + +---------------------------------------------------------------------------- +-- new binary 01-index.cache format + +instance Binary Cache where + put (Cache headTs ents) = do + -- magic / format version + -- + -- NB: this currently encodes word-size implicitly; when we + -- switch to CBOR encoding, we will have a platform + -- independent binary encoding + put (0xcaba1002::Word) + put headTs + put ents + + get = do + magic <- get + when (magic /= (0xcaba1002::Word)) $ + fail ("01-index.cache: unexpected magic marker encountered: " ++ show magic) + Cache <$> get <*> get + +instance Binary IndexCacheEntry + +---------------------------------------------------------------------------- +-- legacy 00-index.cache format + +packageKey, blocknoKey, buildTreeRefKey, preferredVersionKey :: String +packageKey = "pkg:" +blocknoKey = "b#" +buildTreeRefKey = "build-tree-ref:" +preferredVersionKey = "pref-ver:" + +-- legacy 00-index.cache format +read00IndexCache :: BSS.ByteString -> Cache +read00IndexCache bs = Cache + { cacheHeadTs = nullTimestamp + , cacheEntries = mapMaybe read00IndexCacheEntry $ BSS.lines bs + } + +read00IndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry +read00IndexCacheEntry = \line -> + case BSS.words line of + [key, pkgnamestr, pkgverstr, sep, blocknostr] + | key == BSS.pack packageKey && sep == BSS.pack blocknoKey -> + case (parseName pkgnamestr, parseVer pkgverstr [], + parseBlockNo blocknostr) of + (Just pkgname, Just pkgver, Just blockno) + -> Just (CachePackageId (PackageIdentifier pkgname pkgver) + blockno nullTimestamp) + _ -> Nothing + [key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey -> + case (parseRefType typecodestr, parseBlockNo blocknostr) of + (Just refType, Just blockno) + -> Just (CacheBuildTreeRef refType blockno) + _ -> Nothing + + (key: remainder) | key == BSS.pack preferredVersionKey -> do + pref <- simpleParse (BSS.unpack (BSS.unwords remainder)) + return $ CachePreference pref 0 nullTimestamp + + _ -> Nothing + where + parseName str + | BSS.all (\c -> isAlphaNum c || c == '-') str + = Just (mkPackageName (BSS.unpack str)) + | otherwise = Nothing + + parseVer str vs = + case BSS.readInt str of + Nothing -> Nothing + Just (v, str') -> case BSS.uncons str' of + Just ('.', str'') -> parseVer str'' (v:vs) + Just _ -> Nothing + Nothing -> Just (mkVersion (reverse (v:vs))) + + parseBlockNo str = + case BSS.readInt str of + Just (blockno, remainder) + | BSS.null remainder -> Just (fromIntegral blockno) + _ -> Nothing + + parseRefType str = + case BSS.uncons str of + Just (typeCode, remainder) + | BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode + -> Just (refTypeFromTypeCode typeCode) + _ -> Nothing + +-- legacy 00-index.cache format +show00IndexCache :: Cache -> String +show00IndexCache Cache{..} = unlines $ map show00IndexCacheEntry cacheEntries + +show00IndexCacheEntry :: IndexCacheEntry -> String +show00IndexCacheEntry entry = unwords $ case entry of + CachePackageId pkgid b _ -> [ packageKey + , display (packageName pkgid) + , display (packageVersion pkgid) + , blocknoKey + , show b + ] + CacheBuildTreeRef tr b -> [ buildTreeRefKey + , [typeCodeFromRefType tr] + , show b + ] + CachePreference dep _ _ -> [ preferredVersionKey + , display dep + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Init/Heuristics.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Init/Heuristics.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Init/Heuristics.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Init/Heuristics.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,394 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Init.Heuristics +-- Copyright : (c) Benedikt Huber 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Heuristics for creating initial cabal files. +-- +----------------------------------------------------------------------------- +module Distribution.Client.Init.Heuristics ( + guessPackageName, + scanForModules, SourceFileEntry(..), + neededBuildPrograms, + guessMainFileCandidates, + guessAuthorNameMail, + knownCategories, +) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Text (simpleParse) +import Distribution.Simple.Setup (Flag(..), flagToMaybe) +import Distribution.ModuleName + ( ModuleName, toFilePath ) +import qualified Distribution.Package as P +import qualified Distribution.PackageDescription as PD + ( category, packageDescription ) +import Distribution.Client.Utils + ( tryCanonicalizePath ) +import Language.Haskell.Extension ( Extension ) + +import Distribution.Solver.Types.PackageIndex + ( allPackagesByName ) +import Distribution.Solver.Types.SourcePackage + ( packageDescription ) + +import Distribution.Client.Types ( SourcePackageDb(..) ) +import Control.Monad ( mapM ) +import Data.Char ( isNumber, isLower ) +import Data.Either ( partitionEithers ) +import Data.List ( isInfixOf ) +import Data.Ord ( comparing ) +import qualified Data.Set as Set ( fromList, toList ) +import System.Directory ( getCurrentDirectory, getDirectoryContents, + doesDirectoryExist, doesFileExist, getHomeDirectory, ) +import Distribution.Compat.Environment ( getEnvironment ) +import System.FilePath ( takeExtension, takeBaseName, dropExtension, + (), (<.>), splitDirectories, makeRelative ) + +import Distribution.Client.Init.Types ( InitFlags(..) ) +import Distribution.Client.Compat.Process ( readProcessWithExitCode ) +import System.Exit ( ExitCode(..) ) + +-- | Return a list of candidate main files for this executable: top-level +-- modules including the word 'Main' in the file name. The list is sorted in +-- order of preference, shorter file names are preferred. 'Right's are existing +-- candidates and 'Left's are those that do not yet exist. +guessMainFileCandidates :: InitFlags -> IO [Either FilePath FilePath] +guessMainFileCandidates flags = do + dir <- + maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) + files <- getDirectoryContents dir + let existingCandidates = filter isMain files + -- We always want to give the user at least one default choice. If either + -- Main.hs or Main.lhs has already been created, then we don't want to + -- suggest the other; however, if neither has been created, then we + -- suggest both. + newCandidates = + if any (`elem` existingCandidates) ["Main.hs", "Main.lhs"] + then [] + else ["Main.hs", "Main.lhs"] + candidates = + sortBy (\x y -> comparing (length . either id id) x y + `mappend` compare x y) + (map Left newCandidates ++ map Right existingCandidates) + return candidates + + where + isMain f = (isInfixOf "Main" f || isInfixOf "main" f) + && (isSuffixOf ".hs" f || isSuffixOf ".lhs" f) + +-- | Guess the package name based on the given root directory. +guessPackageName :: FilePath -> IO P.PackageName +guessPackageName = liftM (P.mkPackageName . repair . last . splitDirectories) + . tryCanonicalizePath + where + -- Treat each span of non-alphanumeric characters as a hyphen. Each + -- hyphenated component of a package name must contain at least one + -- alphabetic character. An arbitrary character ('x') will be prepended if + -- this is not the case for the first component, and subsequent components + -- will simply be run together. For example, "1+2_foo-3" will become + -- "x12-foo3". + repair = repair' ('x' :) id + repair' invalid valid x = case dropWhile (not . isAlphaNum) x of + "" -> repairComponent "" + x' -> let (c, r) = first repairComponent $ break (not . isAlphaNum) x' + in c ++ repairRest r + where + repairComponent c | all isNumber c = invalid c + | otherwise = valid c + repairRest = repair' id ('-' :) + +-- |Data type of source files found in the working directory +data SourceFileEntry = SourceFileEntry + { relativeSourcePath :: FilePath + , moduleName :: ModuleName + , fileExtension :: String + , imports :: [ModuleName] + , extensions :: [Extension] + } deriving Show + +sfToFileName :: FilePath -> SourceFileEntry -> FilePath +sfToFileName projectRoot (SourceFileEntry relPath m ext _ _) + = projectRoot relPath toFilePath m <.> ext + +-- |Search for source files in the given directory +-- and return pairs of guessed Haskell source path and +-- module names. +scanForModules :: FilePath -> IO [SourceFileEntry] +scanForModules rootDir = scanForModulesIn rootDir rootDir + +scanForModulesIn :: FilePath -> FilePath -> IO [SourceFileEntry] +scanForModulesIn projectRoot srcRoot = scan srcRoot [] + where + scan dir hierarchy = do + entries <- getDirectoryContents (projectRoot dir) + (files, dirs) <- liftM partitionEithers (mapM (tagIsDir dir) entries) + let modules = catMaybes [ guessModuleName hierarchy file + | file <- files + , isUpper (head file) ] + modules' <- mapM (findImportsAndExts projectRoot) modules + recMods <- mapM (scanRecursive dir hierarchy) dirs + return $ concat (modules' : recMods) + tagIsDir parent entry = do + isDir <- doesDirectoryExist (parent entry) + return $ (if isDir then Right else Left) entry + guessModuleName hierarchy entry + | takeBaseName entry == "Setup" = Nothing + | ext `elem` sourceExtensions = + SourceFileEntry <$> pure relRoot <*> modName <*> pure ext <*> pure [] <*> pure [] + | otherwise = Nothing + where + relRoot = makeRelative projectRoot srcRoot + unqualModName = dropExtension entry + modName = simpleParse + $ intercalate "." . reverse $ (unqualModName : hierarchy) + ext = case takeExtension entry of '.':e -> e; e -> e + scanRecursive parent hierarchy entry + | isUpper (head entry) = scan (parent entry) (entry : hierarchy) + | isLower (head entry) && not (ignoreDir entry) = + scanForModulesIn projectRoot $ foldl () srcRoot (reverse (entry : hierarchy)) + | otherwise = return [] + ignoreDir ('.':_) = True + ignoreDir dir = dir `elem` ["dist", "_darcs"] + +findImportsAndExts :: FilePath -> SourceFileEntry -> IO SourceFileEntry +findImportsAndExts projectRoot sf = do + s <- readFile (sfToFileName projectRoot sf) + + let modules = mapMaybe + ( getModName + . drop 1 + . filter (not . null) + . dropWhile (/= "import") + . words + ) + . filter (not . ("--" `isPrefixOf`)) -- poor man's comment filtering + . lines + $ s + + -- TODO: We should probably make a better attempt at parsing + -- comments above. Unfortunately we can't use a full-fledged + -- Haskell parser since cabal's dependencies must be kept at a + -- minimum. + + -- A poor man's LANGUAGE pragma parser. + exts = mapMaybe simpleParse + . concatMap getPragmas + . filter isLANGUAGEPragma + . map fst + . drop 1 + . takeWhile (not . null . snd) + . iterate (takeBraces . snd) + $ ("",s) + + takeBraces = break (== '}') . dropWhile (/= '{') + + isLANGUAGEPragma = ("{-# LANGUAGE " `isPrefixOf`) + + getPragmas = map trim . splitCommas . takeWhile (/= '#') . drop 13 + + splitCommas "" = [] + splitCommas xs = x : splitCommas (drop 1 y) + where (x,y) = break (==',') xs + + return sf { imports = modules + , extensions = exts + } + + where getModName :: [String] -> Maybe ModuleName + getModName [] = Nothing + getModName ("qualified":ws) = getModName ws + getModName (ms:_) = simpleParse ms + + + +-- Unfortunately we cannot use the version exported by Distribution.Simple.Program +knownSuffixHandlers :: [(String,String)] +knownSuffixHandlers = + [ ("gc", "greencard") + , ("chs", "chs") + , ("hsc", "hsc2hs") + , ("x", "alex") + , ("y", "happy") + , ("ly", "happy") + , ("cpphs", "cpp") + ] + +sourceExtensions :: [String] +sourceExtensions = "hs" : "lhs" : map fst knownSuffixHandlers + +neededBuildPrograms :: [SourceFileEntry] -> [String] +neededBuildPrograms entries = + [ handler + | ext <- nubSet (map fileExtension entries) + , handler <- maybeToList (lookup ext knownSuffixHandlers) + ] + +-- | Guess author and email using darcs and git configuration options. Use +-- the following in decreasing order of preference: +-- +-- 1. vcs env vars ($DARCS_EMAIL, $GIT_AUTHOR_*) +-- 2. Local repo configs +-- 3. Global vcs configs +-- 4. The generic $EMAIL +-- +-- Name and email are processed separately, so the guess might end up being +-- a name from DARCS_EMAIL and an email from git config. +-- +-- Darcs has preference, for tradition's sake. +guessAuthorNameMail :: IO (Flag String, Flag String) +guessAuthorNameMail = fmap authorGuessPure authorGuessIO + +-- Ordered in increasing preference, since Flag-as-monoid is identical to +-- Last. +authorGuessPure :: AuthorGuessIO -> AuthorGuess +authorGuessPure (AuthorGuessIO { authorGuessEnv = env + , authorGuessLocalDarcs = darcsLocalF + , authorGuessGlobalDarcs = darcsGlobalF + , authorGuessLocalGit = gitLocal + , authorGuessGlobalGit = gitGlobal }) + = mconcat + [ emailEnv env + , gitGlobal + , darcsCfg darcsGlobalF + , gitLocal + , darcsCfg darcsLocalF + , gitEnv env + , darcsEnv env + ] + +authorGuessIO :: IO AuthorGuessIO +authorGuessIO = AuthorGuessIO + <$> getEnvironment + <*> (maybeReadFile $ "_darcs" "prefs" "author") + <*> (maybeReadFile =<< liftM ( (".darcs" "author")) getHomeDirectory) + <*> gitCfg Local + <*> gitCfg Global + +-- Types and functions used for guessing the author are now defined: + +type AuthorGuess = (Flag String, Flag String) +type Enviro = [(String, String)] +data GitLoc = Local | Global +data AuthorGuessIO = AuthorGuessIO { + authorGuessEnv :: Enviro, -- ^ Environment lookup table + authorGuessLocalDarcs :: (Maybe String), -- ^ Contents of local darcs author info + authorGuessGlobalDarcs :: (Maybe String), -- ^ Contents of global darcs author info + authorGuessLocalGit :: AuthorGuess, -- ^ Git config --local + authorGuessGlobalGit :: AuthorGuess -- ^ Git config --global + } + +darcsEnv :: Enviro -> AuthorGuess +darcsEnv = maybe mempty nameAndMail . lookup "DARCS_EMAIL" + +gitEnv :: Enviro -> AuthorGuess +gitEnv env = (name, mail) + where + name = maybeFlag "GIT_AUTHOR_NAME" env + mail = maybeFlag "GIT_AUTHOR_EMAIL" env + +darcsCfg :: Maybe String -> AuthorGuess +darcsCfg = maybe mempty nameAndMail + +emailEnv :: Enviro -> AuthorGuess +emailEnv env = (mempty, mail) + where + mail = maybeFlag "EMAIL" env + +gitCfg :: GitLoc -> IO AuthorGuess +gitCfg which = do + name <- gitVar which "user.name" + mail <- gitVar which "user.email" + return (name, mail) + +gitVar :: GitLoc -> String -> IO (Flag String) +gitVar which = fmap happyOutput . gitConfigQuery which + +happyOutput :: (ExitCode, a, t) -> Flag a +happyOutput v = case v of + (ExitSuccess, s, _) -> Flag s + _ -> mempty + +gitConfigQuery :: GitLoc -> String -> IO (ExitCode, String, String) +gitConfigQuery which key = + fmap trim' $ readProcessWithExitCode "git" ["config", w, key] "" + where + w = case which of + Local -> "--local" + Global -> "--global" + trim' (a, b, c) = (a, trim b, c) + +maybeFlag :: String -> Enviro -> Flag String +maybeFlag k = maybe mempty Flag . lookup k + +-- | Read the first non-comment, non-trivial line of a file, if it exists +maybeReadFile :: String -> IO (Maybe String) +maybeReadFile f = do + exists <- doesFileExist f + if exists + then fmap getFirstLine $ readFile f + else return Nothing + where + getFirstLine content = + let nontrivialLines = dropWhile (\l -> (null l) || ("#" `isPrefixOf` l)) . lines $ content + in case nontrivialLines of + [] -> Nothing + (l:_) -> Just l + +-- |Get list of categories used in Hackage. NOTE: Very slow, needs to be cached +knownCategories :: SourcePackageDb -> [String] +knownCategories (SourcePackageDb sourcePkgIndex _) = nubSet + [ cat | pkg <- map head (allPackagesByName sourcePkgIndex) + , let catList = (PD.category . PD.packageDescription . packageDescription) pkg + , cat <- splitString ',' catList + ] + +-- Parse name and email, from darcs pref files or environment variable +nameAndMail :: String -> (Flag String, Flag String) +nameAndMail str + | all isSpace nameOrEmail = mempty + | null erest = (mempty, Flag $ trim nameOrEmail) + | otherwise = (Flag $ trim nameOrEmail, Flag mail) + where + (nameOrEmail,erest) = break (== '<') str + (mail,_) = break (== '>') (tail erest) + +trim :: String -> String +trim = removeLeadingSpace . reverse . removeLeadingSpace . reverse + where + removeLeadingSpace = dropWhile isSpace + +-- split string at given character, and remove whitespace +splitString :: Char -> String -> [String] +splitString sep str = go str where + go s = if null s' then [] else tok : go rest where + s' = dropWhile (\c -> c == sep || isSpace c) s + (tok,rest) = break (==sep) s' + +nubSet :: (Ord a) => [a] -> [a] +nubSet = Set.toList . Set.fromList + +{- +test db testProjectRoot = do + putStrLn "Guessed package name" + (guessPackageName >=> print) testProjectRoot + putStrLn "Guessed name and email" + guessAuthorNameMail >>= print + + mods <- scanForModules testProjectRoot + + putStrLn "Guessed modules" + mapM_ print mods + putStrLn "Needed build programs" + print (neededBuildPrograms mods) + + putStrLn "List of known categories" + print $ knownCategories db +-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Init/Licenses.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Init/Licenses.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Init/Licenses.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Init/Licenses.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,3065 @@ +module Distribution.Client.Init.Licenses + ( License + , bsd2 + , bsd3 + , gplv2 + , gplv3 + , lgpl21 + , lgpl3 + , agplv3 + , apache20 + , mit + , mpl20 + , isc + ) where + +type License = String + +bsd2 :: String -> String -> License +bsd2 authors year = unlines + [ "Copyright (c) " ++ year ++ ", " ++ authors + , "All rights reserved." + , "" + , "Redistribution and use in source and binary forms, with or without" + , "modification, are permitted provided that the following conditions are" + , "met:" + , "" + , "1. Redistributions of source code must retain the above copyright" + , " notice, this list of conditions and the following disclaimer." + , "" + , "2. Redistributions in binary form must reproduce the above copyright" + , " notice, this list of conditions and the following disclaimer in the" + , " documentation and/or other materials provided with the" + , " distribution." + , "" + , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" + , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" + , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" + , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" + , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," + , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" + , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," + , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" + , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" + , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" + , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." + ] + +bsd3 :: String -> String -> License +bsd3 authors year = unlines + [ "Copyright (c) " ++ year ++ ", " ++ authors + , "" + , "All rights reserved." + , "" + , "Redistribution and use in source and binary forms, with or without" + , "modification, are permitted provided that the following conditions are met:" + , "" + , " * Redistributions of source code must retain the above copyright" + , " notice, this list of conditions and the following disclaimer." + , "" + , " * Redistributions in binary form must reproduce the above" + , " copyright notice, this list of conditions and the following" + , " disclaimer in the documentation and/or other materials provided" + , " with the distribution." + , "" + , " * Neither the name of " ++ authors ++ " nor the names of other" + , " contributors may be used to endorse or promote products derived" + , " from this software without specific prior written permission." + , "" + , "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS" + , "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT" + , "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR" + , "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT" + , "OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL," + , "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT" + , "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE," + , "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY" + , "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT" + , "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE" + , "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." + ] + +gplv2 :: License +gplv2 = unlines + [ " GNU GENERAL PUBLIC LICENSE" + , " Version 2, June 1991" + , "" + , " Copyright (C) 1989, 1991 Free Software Foundation, Inc.," + , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , " Preamble" + , "" + , " The licenses for most software are designed to take away your" + , "freedom to share and change it. By contrast, the GNU General Public" + , "License is intended to guarantee your freedom to share and change free" + , "software--to make sure the software is free for all its users. This" + , "General Public License applies to most of the Free Software" + , "Foundation's software and to any other program whose authors commit to" + , "using it. (Some other Free Software Foundation software is covered by" + , "the GNU Lesser General Public License instead.) You can apply it to" + , "your programs, too." + , "" + , " When we speak of free software, we are referring to freedom, not" + , "price. Our General Public Licenses are designed to make sure that you" + , "have the freedom to distribute copies of free software (and charge for" + , "this service if you wish), that you receive source code or can get it" + , "if you want it, that you can change the software or use pieces of it" + , "in new free programs; and that you know you can do these things." + , "" + , " To protect your rights, we need to make restrictions that forbid" + , "anyone to deny you these rights or to ask you to surrender the rights." + , "These restrictions translate to certain responsibilities for you if you" + , "distribute copies of the software, or if you modify it." + , "" + , " For example, if you distribute copies of such a program, whether" + , "gratis or for a fee, you must give the recipients all the rights that" + , "you have. You must make sure that they, too, receive or can get the" + , "source code. And you must show them these terms so they know their" + , "rights." + , "" + , " We protect your rights with two steps: (1) copyright the software, and" + , "(2) offer you this license which gives you legal permission to copy," + , "distribute and/or modify the software." + , "" + , " Also, for each author's protection and ours, we want to make certain" + , "that everyone understands that there is no warranty for this free" + , "software. If the software is modified by someone else and passed on, we" + , "want its recipients to know that what they have is not the original, so" + , "that any problems introduced by others will not reflect on the original" + , "authors' reputations." + , "" + , " Finally, any free program is threatened constantly by software" + , "patents. We wish to avoid the danger that redistributors of a free" + , "program will individually obtain patent licenses, in effect making the" + , "program proprietary. To prevent this, we have made it clear that any" + , "patent must be licensed for everyone's free use or not licensed at all." + , "" + , " The precise terms and conditions for copying, distribution and" + , "modification follow." + , "" + , " GNU GENERAL PUBLIC LICENSE" + , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" + , "" + , " 0. This License applies to any program or other work which contains" + , "a notice placed by the copyright holder saying it may be distributed" + , "under the terms of this General Public License. The \"Program\", below," + , "refers to any such program or work, and a \"work based on the Program\"" + , "means either the Program or any derivative work under copyright law:" + , "that is to say, a work containing the Program or a portion of it," + , "either verbatim or with modifications and/or translated into another" + , "language. (Hereinafter, translation is included without limitation in" + , "the term \"modification\".) Each licensee is addressed as \"you\"." + , "" + , "Activities other than copying, distribution and modification are not" + , "covered by this License; they are outside its scope. The act of" + , "running the Program is not restricted, and the output from the Program" + , "is covered only if its contents constitute a work based on the" + , "Program (independent of having been made by running the Program)." + , "Whether that is true depends on what the Program does." + , "" + , " 1. You may copy and distribute verbatim copies of the Program's" + , "source code as you receive it, in any medium, provided that you" + , "conspicuously and appropriately publish on each copy an appropriate" + , "copyright notice and disclaimer of warranty; keep intact all the" + , "notices that refer to this License and to the absence of any warranty;" + , "and give any other recipients of the Program a copy of this License" + , "along with the Program." + , "" + , "You may charge a fee for the physical act of transferring a copy, and" + , "you may at your option offer warranty protection in exchange for a fee." + , "" + , " 2. You may modify your copy or copies of the Program or any portion" + , "of it, thus forming a work based on the Program, and copy and" + , "distribute such modifications or work under the terms of Section 1" + , "above, provided that you also meet all of these conditions:" + , "" + , " a) You must cause the modified files to carry prominent notices" + , " stating that you changed the files and the date of any change." + , "" + , " b) You must cause any work that you distribute or publish, that in" + , " whole or in part contains or is derived from the Program or any" + , " part thereof, to be licensed as a whole at no charge to all third" + , " parties under the terms of this License." + , "" + , " c) If the modified program normally reads commands interactively" + , " when run, you must cause it, when started running for such" + , " interactive use in the most ordinary way, to print or display an" + , " announcement including an appropriate copyright notice and a" + , " notice that there is no warranty (or else, saying that you provide" + , " a warranty) and that users may redistribute the program under" + , " these conditions, and telling the user how to view a copy of this" + , " License. (Exception: if the Program itself is interactive but" + , " does not normally print such an announcement, your work based on" + , " the Program is not required to print an announcement.)" + , "" + , "These requirements apply to the modified work as a whole. If" + , "identifiable sections of that work are not derived from the Program," + , "and can be reasonably considered independent and separate works in" + , "themselves, then this License, and its terms, do not apply to those" + , "sections when you distribute them as separate works. But when you" + , "distribute the same sections as part of a whole which is a work based" + , "on the Program, the distribution of the whole must be on the terms of" + , "this License, whose permissions for other licensees extend to the" + , "entire whole, and thus to each and every part regardless of who wrote it." + , "" + , "Thus, it is not the intent of this section to claim rights or contest" + , "your rights to work written entirely by you; rather, the intent is to" + , "exercise the right to control the distribution of derivative or" + , "collective works based on the Program." + , "" + , "In addition, mere aggregation of another work not based on the Program" + , "with the Program (or with a work based on the Program) on a volume of" + , "a storage or distribution medium does not bring the other work under" + , "the scope of this License." + , "" + , " 3. You may copy and distribute the Program (or a work based on it," + , "under Section 2) in object code or executable form under the terms of" + , "Sections 1 and 2 above provided that you also do one of the following:" + , "" + , " a) Accompany it with the complete corresponding machine-readable" + , " source code, which must be distributed under the terms of Sections" + , " 1 and 2 above on a medium customarily used for software interchange; or," + , "" + , " b) Accompany it with a written offer, valid for at least three" + , " years, to give any third party, for a charge no more than your" + , " cost of physically performing source distribution, a complete" + , " machine-readable copy of the corresponding source code, to be" + , " distributed under the terms of Sections 1 and 2 above on a medium" + , " customarily used for software interchange; or," + , "" + , " c) Accompany it with the information you received as to the offer" + , " to distribute corresponding source code. (This alternative is" + , " allowed only for noncommercial distribution and only if you" + , " received the program in object code or executable form with such" + , " an offer, in accord with Subsection b above.)" + , "" + , "The source code for a work means the preferred form of the work for" + , "making modifications to it. For an executable work, complete source" + , "code means all the source code for all modules it contains, plus any" + , "associated interface definition files, plus the scripts used to" + , "control compilation and installation of the executable. However, as a" + , "special exception, the source code distributed need not include" + , "anything that is normally distributed (in either source or binary" + , "form) with the major components (compiler, kernel, and so on) of the" + , "operating system on which the executable runs, unless that component" + , "itself accompanies the executable." + , "" + , "If distribution of executable or object code is made by offering" + , "access to copy from a designated place, then offering equivalent" + , "access to copy the source code from the same place counts as" + , "distribution of the source code, even though third parties are not" + , "compelled to copy the source along with the object code." + , "" + , " 4. You may not copy, modify, sublicense, or distribute the Program" + , "except as expressly provided under this License. Any attempt" + , "otherwise to copy, modify, sublicense or distribute the Program is" + , "void, and will automatically terminate your rights under this License." + , "However, parties who have received copies, or rights, from you under" + , "this License will not have their licenses terminated so long as such" + , "parties remain in full compliance." + , "" + , " 5. You are not required to accept this License, since you have not" + , "signed it. However, nothing else grants you permission to modify or" + , "distribute the Program or its derivative works. These actions are" + , "prohibited by law if you do not accept this License. Therefore, by" + , "modifying or distributing the Program (or any work based on the" + , "Program), you indicate your acceptance of this License to do so, and" + , "all its terms and conditions for copying, distributing or modifying" + , "the Program or works based on it." + , "" + , " 6. Each time you redistribute the Program (or any work based on the" + , "Program), the recipient automatically receives a license from the" + , "original licensor to copy, distribute or modify the Program subject to" + , "these terms and conditions. You may not impose any further" + , "restrictions on the recipients' exercise of the rights granted herein." + , "You are not responsible for enforcing compliance by third parties to" + , "this License." + , "" + , " 7. If, as a consequence of a court judgment or allegation of patent" + , "infringement or for any other reason (not limited to patent issues)," + , "conditions are imposed on you (whether by court order, agreement or" + , "otherwise) that contradict the conditions of this License, they do not" + , "excuse you from the conditions of this License. If you cannot" + , "distribute so as to satisfy simultaneously your obligations under this" + , "License and any other pertinent obligations, then as a consequence you" + , "may not distribute the Program at all. For example, if a patent" + , "license would not permit royalty-free redistribution of the Program by" + , "all those who receive copies directly or indirectly through you, then" + , "the only way you could satisfy both it and this License would be to" + , "refrain entirely from distribution of the Program." + , "" + , "If any portion of this section is held invalid or unenforceable under" + , "any particular circumstance, the balance of the section is intended to" + , "apply and the section as a whole is intended to apply in other" + , "circumstances." + , "" + , "It is not the purpose of this section to induce you to infringe any" + , "patents or other property right claims or to contest validity of any" + , "such claims; this section has the sole purpose of protecting the" + , "integrity of the free software distribution system, which is" + , "implemented by public license practices. Many people have made" + , "generous contributions to the wide range of software distributed" + , "through that system in reliance on consistent application of that" + , "system; it is up to the author/donor to decide if he or she is willing" + , "to distribute software through any other system and a licensee cannot" + , "impose that choice." + , "" + , "This section is intended to make thoroughly clear what is believed to" + , "be a consequence of the rest of this License." + , "" + , " 8. If the distribution and/or use of the Program is restricted in" + , "certain countries either by patents or by copyrighted interfaces, the" + , "original copyright holder who places the Program under this License" + , "may add an explicit geographical distribution limitation excluding" + , "those countries, so that distribution is permitted only in or among" + , "countries not thus excluded. In such case, this License incorporates" + , "the limitation as if written in the body of this License." + , "" + , " 9. The Free Software Foundation may publish revised and/or new versions" + , "of the General Public License from time to time. Such new versions will" + , "be similar in spirit to the present version, but may differ in detail to" + , "address new problems or concerns." + , "" + , "Each version is given a distinguishing version number. If the Program" + , "specifies a version number of this License which applies to it and \"any" + , "later version\", you have the option of following the terms and conditions" + , "either of that version or of any later version published by the Free" + , "Software Foundation. If the Program does not specify a version number of" + , "this License, you may choose any version ever published by the Free Software" + , "Foundation." + , "" + , " 10. If you wish to incorporate parts of the Program into other free" + , "programs whose distribution conditions are different, write to the author" + , "to ask for permission. For software which is copyrighted by the Free" + , "Software Foundation, write to the Free Software Foundation; we sometimes" + , "make exceptions for this. Our decision will be guided by the two goals" + , "of preserving the free status of all derivatives of our free software and" + , "of promoting the sharing and reuse of software generally." + , "" + , " NO WARRANTY" + , "" + , " 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY" + , "FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN" + , "OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES" + , "PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED" + , "OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF" + , "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS" + , "TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE" + , "PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING," + , "REPAIR OR CORRECTION." + , "" + , " 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" + , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR" + , "REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES," + , "INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING" + , "OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED" + , "TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY" + , "YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER" + , "PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE" + , "POSSIBILITY OF SUCH DAMAGES." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " How to Apply These Terms to Your New Programs" + , "" + , " If you develop a new program, and you want it to be of the greatest" + , "possible use to the public, the best way to achieve this is to make it" + , "free software which everyone can redistribute and change under these terms." + , "" + , " To do so, attach the following notices to the program. It is safest" + , "to attach them to the start of each source file to most effectively" + , "convey the exclusion of warranty; and each file should have at least" + , "the \"copyright\" line and a pointer to where the full notice is found." + , "" + , " " + , " Copyright (C) " + , "" + , " This program is free software; you can redistribute it and/or modify" + , " it under the terms of the GNU General Public License as published by" + , " the Free Software Foundation; either version 2 of the License, or" + , " (at your option) any later version." + , "" + , " This program is distributed in the hope that it will be useful," + , " but WITHOUT ANY WARRANTY; without even the implied warranty of" + , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" + , " GNU General Public License for more details." + , "" + , " You should have received a copy of the GNU General Public License along" + , " with this program; if not, write to the Free Software Foundation, Inc.," + , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA." + , "" + , "Also add information on how to contact you by electronic and paper mail." + , "" + , "If the program is interactive, make it output a short notice like this" + , "when it starts in an interactive mode:" + , "" + , " Gnomovision version 69, Copyright (C) year name of author" + , " Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'." + , " This is free software, and you are welcome to redistribute it" + , " under certain conditions; type `show c' for details." + , "" + , "The hypothetical commands `show w' and `show c' should show the appropriate" + , "parts of the General Public License. Of course, the commands you use may" + , "be called something other than `show w' and `show c'; they could even be" + , "mouse-clicks or menu items--whatever suits your program." + , "" + , "You should also get your employer (if you work as a programmer) or your" + , "school, if any, to sign a \"copyright disclaimer\" for the program, if" + , "necessary. Here is a sample; alter the names:" + , "" + , " Yoyodyne, Inc., hereby disclaims all copyright interest in the program" + , " `Gnomovision' (which makes passes at compilers) written by James Hacker." + , "" + , " , 1 April 1989" + , " Ty Coon, President of Vice" + , "" + , "This General Public License does not permit incorporating your program into" + , "proprietary programs. If your program is a subroutine library, you may" + , "consider it more useful to permit linking proprietary applications with the" + , "library. If this is what you want to do, use the GNU Lesser General" + , "Public License instead of this License." + ] + +gplv3 :: License +gplv3 = unlines + [ " GNU GENERAL PUBLIC LICENSE" + , " Version 3, 29 June 2007" + , "" + , " Copyright (C) 2007 Free Software Foundation, Inc. " + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , " Preamble" + , "" + , " The GNU General Public License is a free, copyleft license for" + , "software and other kinds of works." + , "" + , " The licenses for most software and other practical works are designed" + , "to take away your freedom to share and change the works. By contrast," + , "the GNU General Public License is intended to guarantee your freedom to" + , "share and change all versions of a program--to make sure it remains free" + , "software for all its users. We, the Free Software Foundation, use the" + , "GNU General Public License for most of our software; it applies also to" + , "any other work released this way by its authors. You can apply it to" + , "your programs, too." + , "" + , " When we speak of free software, we are referring to freedom, not" + , "price. Our General Public Licenses are designed to make sure that you" + , "have the freedom to distribute copies of free software (and charge for" + , "them if you wish), that you receive source code or can get it if you" + , "want it, that you can change the software or use pieces of it in new" + , "free programs, and that you know you can do these things." + , "" + , " To protect your rights, we need to prevent others from denying you" + , "these rights or asking you to surrender the rights. Therefore, you have" + , "certain responsibilities if you distribute copies of the software, or if" + , "you modify it: responsibilities to respect the freedom of others." + , "" + , " For example, if you distribute copies of such a program, whether" + , "gratis or for a fee, you must pass on to the recipients the same" + , "freedoms that you received. You must make sure that they, too, receive" + , "or can get the source code. And you must show them these terms so they" + , "know their rights." + , "" + , " Developers that use the GNU GPL protect your rights with two steps:" + , "(1) assert copyright on the software, and (2) offer you this License" + , "giving you legal permission to copy, distribute and/or modify it." + , "" + , " For the developers' and authors' protection, the GPL clearly explains" + , "that there is no warranty for this free software. For both users' and" + , "authors' sake, the GPL requires that modified versions be marked as" + , "changed, so that their problems will not be attributed erroneously to" + , "authors of previous versions." + , "" + , " Some devices are designed to deny users access to install or run" + , "modified versions of the software inside them, although the manufacturer" + , "can do so. This is fundamentally incompatible with the aim of" + , "protecting users' freedom to change the software. The systematic" + , "pattern of such abuse occurs in the area of products for individuals to" + , "use, which is precisely where it is most unacceptable. Therefore, we" + , "have designed this version of the GPL to prohibit the practice for those" + , "products. If such problems arise substantially in other domains, we" + , "stand ready to extend this provision to those domains in future versions" + , "of the GPL, as needed to protect the freedom of users." + , "" + , " Finally, every program is threatened constantly by software patents." + , "States should not allow patents to restrict development and use of" + , "software on general-purpose computers, but in those that do, we wish to" + , "avoid the special danger that patents applied to a free program could" + , "make it effectively proprietary. To prevent this, the GPL assures that" + , "patents cannot be used to render the program non-free." + , "" + , " The precise terms and conditions for copying, distribution and" + , "modification follow." + , "" + , " TERMS AND CONDITIONS" + , "" + , " 0. Definitions." + , "" + , " \"This License\" refers to version 3 of the GNU General Public License." + , "" + , " \"Copyright\" also means copyright-like laws that apply to other kinds of" + , "works, such as semiconductor masks." + , "" + , " \"The Program\" refers to any copyrightable work licensed under this" + , "License. Each licensee is addressed as \"you\". \"Licensees\" and" + , "\"recipients\" may be individuals or organizations." + , "" + , " To \"modify\" a work means to copy from or adapt all or part of the work" + , "in a fashion requiring copyright permission, other than the making of an" + , "exact copy. The resulting work is called a \"modified version\" of the" + , "earlier work or a work \"based on\" the earlier work." + , "" + , " A \"covered work\" means either the unmodified Program or a work based" + , "on the Program." + , "" + , " To \"propagate\" a work means to do anything with it that, without" + , "permission, would make you directly or secondarily liable for" + , "infringement under applicable copyright law, except executing it on a" + , "computer or modifying a private copy. Propagation includes copying," + , "distribution (with or without modification), making available to the" + , "public, and in some countries other activities as well." + , "" + , " To \"convey\" a work means any kind of propagation that enables other" + , "parties to make or receive copies. Mere interaction with a user through" + , "a computer network, with no transfer of a copy, is not conveying." + , "" + , " An interactive user interface displays \"Appropriate Legal Notices\"" + , "to the extent that it includes a convenient and prominently visible" + , "feature that (1) displays an appropriate copyright notice, and (2)" + , "tells the user that there is no warranty for the work (except to the" + , "extent that warranties are provided), that licensees may convey the" + , "work under this License, and how to view a copy of this License. If" + , "the interface presents a list of user commands or options, such as a" + , "menu, a prominent item in the list meets this criterion." + , "" + , " 1. Source Code." + , "" + , " The \"source code\" for a work means the preferred form of the work" + , "for making modifications to it. \"Object code\" means any non-source" + , "form of a work." + , "" + , " A \"Standard Interface\" means an interface that either is an official" + , "standard defined by a recognized standards body, or, in the case of" + , "interfaces specified for a particular programming language, one that" + , "is widely used among developers working in that language." + , "" + , " The \"System Libraries\" of an executable work include anything, other" + , "than the work as a whole, that (a) is included in the normal form of" + , "packaging a Major Component, but which is not part of that Major" + , "Component, and (b) serves only to enable use of the work with that" + , "Major Component, or to implement a Standard Interface for which an" + , "implementation is available to the public in source code form. A" + , "\"Major Component\", in this context, means a major essential component" + , "(kernel, window system, and so on) of the specific operating system" + , "(if any) on which the executable work runs, or a compiler used to" + , "produce the work, or an object code interpreter used to run it." + , "" + , " The \"Corresponding Source\" for a work in object code form means all" + , "the source code needed to generate, install, and (for an executable" + , "work) run the object code and to modify the work, including scripts to" + , "control those activities. However, it does not include the work's" + , "System Libraries, or general-purpose tools or generally available free" + , "programs which are used unmodified in performing those activities but" + , "which are not part of the work. For example, Corresponding Source" + , "includes interface definition files associated with source files for" + , "the work, and the source code for shared libraries and dynamically" + , "linked subprograms that the work is specifically designed to require," + , "such as by intimate data communication or control flow between those" + , "subprograms and other parts of the work." + , "" + , " The Corresponding Source need not include anything that users" + , "can regenerate automatically from other parts of the Corresponding" + , "Source." + , "" + , " The Corresponding Source for a work in source code form is that" + , "same work." + , "" + , " 2. Basic Permissions." + , "" + , " All rights granted under this License are granted for the term of" + , "copyright on the Program, and are irrevocable provided the stated" + , "conditions are met. This License explicitly affirms your unlimited" + , "permission to run the unmodified Program. The output from running a" + , "covered work is covered by this License only if the output, given its" + , "content, constitutes a covered work. This License acknowledges your" + , "rights of fair use or other equivalent, as provided by copyright law." + , "" + , " You may make, run and propagate covered works that you do not" + , "convey, without conditions so long as your license otherwise remains" + , "in force. You may convey covered works to others for the sole purpose" + , "of having them make modifications exclusively for you, or provide you" + , "with facilities for running those works, provided that you comply with" + , "the terms of this License in conveying all material for which you do" + , "not control copyright. Those thus making or running the covered works" + , "for you must do so exclusively on your behalf, under your direction" + , "and control, on terms that prohibit them from making any copies of" + , "your copyrighted material outside their relationship with you." + , "" + , " Conveying under any other circumstances is permitted solely under" + , "the conditions stated below. Sublicensing is not allowed; section 10" + , "makes it unnecessary." + , "" + , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." + , "" + , " No covered work shall be deemed part of an effective technological" + , "measure under any applicable law fulfilling obligations under article" + , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" + , "similar laws prohibiting or restricting circumvention of such" + , "measures." + , "" + , " When you convey a covered work, you waive any legal power to forbid" + , "circumvention of technological measures to the extent such circumvention" + , "is effected by exercising rights under this License with respect to" + , "the covered work, and you disclaim any intention to limit operation or" + , "modification of the work as a means of enforcing, against the work's" + , "users, your or third parties' legal rights to forbid circumvention of" + , "technological measures." + , "" + , " 4. Conveying Verbatim Copies." + , "" + , " You may convey verbatim copies of the Program's source code as you" + , "receive it, in any medium, provided that you conspicuously and" + , "appropriately publish on each copy an appropriate copyright notice;" + , "keep intact all notices stating that this License and any" + , "non-permissive terms added in accord with section 7 apply to the code;" + , "keep intact all notices of the absence of any warranty; and give all" + , "recipients a copy of this License along with the Program." + , "" + , " You may charge any price or no price for each copy that you convey," + , "and you may offer support or warranty protection for a fee." + , "" + , " 5. Conveying Modified Source Versions." + , "" + , " You may convey a work based on the Program, or the modifications to" + , "produce it from the Program, in the form of source code under the" + , "terms of section 4, provided that you also meet all of these conditions:" + , "" + , " a) The work must carry prominent notices stating that you modified" + , " it, and giving a relevant date." + , "" + , " b) The work must carry prominent notices stating that it is" + , " released under this License and any conditions added under section" + , " 7. This requirement modifies the requirement in section 4 to" + , " \"keep intact all notices\"." + , "" + , " c) You must license the entire work, as a whole, under this" + , " License to anyone who comes into possession of a copy. This" + , " License will therefore apply, along with any applicable section 7" + , " additional terms, to the whole of the work, and all its parts," + , " regardless of how they are packaged. This License gives no" + , " permission to license the work in any other way, but it does not" + , " invalidate such permission if you have separately received it." + , "" + , " d) If the work has interactive user interfaces, each must display" + , " Appropriate Legal Notices; however, if the Program has interactive" + , " interfaces that do not display Appropriate Legal Notices, your" + , " work need not make them do so." + , "" + , " A compilation of a covered work with other separate and independent" + , "works, which are not by their nature extensions of the covered work," + , "and which are not combined with it such as to form a larger program," + , "in or on a volume of a storage or distribution medium, is called an" + , "\"aggregate\" if the compilation and its resulting copyright are not" + , "used to limit the access or legal rights of the compilation's users" + , "beyond what the individual works permit. Inclusion of a covered work" + , "in an aggregate does not cause this License to apply to the other" + , "parts of the aggregate." + , "" + , " 6. Conveying Non-Source Forms." + , "" + , " You may convey a covered work in object code form under the terms" + , "of sections 4 and 5, provided that you also convey the" + , "machine-readable Corresponding Source under the terms of this License," + , "in one of these ways:" + , "" + , " a) Convey the object code in, or embodied in, a physical product" + , " (including a physical distribution medium), accompanied by the" + , " Corresponding Source fixed on a durable physical medium" + , " customarily used for software interchange." + , "" + , " b) Convey the object code in, or embodied in, a physical product" + , " (including a physical distribution medium), accompanied by a" + , " written offer, valid for at least three years and valid for as" + , " long as you offer spare parts or customer support for that product" + , " model, to give anyone who possesses the object code either (1) a" + , " copy of the Corresponding Source for all the software in the" + , " product that is covered by this License, on a durable physical" + , " medium customarily used for software interchange, for a price no" + , " more than your reasonable cost of physically performing this" + , " conveying of source, or (2) access to copy the" + , " Corresponding Source from a network server at no charge." + , "" + , " c) Convey individual copies of the object code with a copy of the" + , " written offer to provide the Corresponding Source. This" + , " alternative is allowed only occasionally and noncommercially, and" + , " only if you received the object code with such an offer, in accord" + , " with subsection 6b." + , "" + , " d) Convey the object code by offering access from a designated" + , " place (gratis or for a charge), and offer equivalent access to the" + , " Corresponding Source in the same way through the same place at no" + , " further charge. You need not require recipients to copy the" + , " Corresponding Source along with the object code. If the place to" + , " copy the object code is a network server, the Corresponding Source" + , " may be on a different server (operated by you or a third party)" + , " that supports equivalent copying facilities, provided you maintain" + , " clear directions next to the object code saying where to find the" + , " Corresponding Source. Regardless of what server hosts the" + , " Corresponding Source, you remain obligated to ensure that it is" + , " available for as long as needed to satisfy these requirements." + , "" + , " e) Convey the object code using peer-to-peer transmission, provided" + , " you inform other peers where the object code and Corresponding" + , " Source of the work are being offered to the general public at no" + , " charge under subsection 6d." + , "" + , " A separable portion of the object code, whose source code is excluded" + , "from the Corresponding Source as a System Library, need not be" + , "included in conveying the object code work." + , "" + , " A \"User Product\" is either (1) a \"consumer product\", which means any" + , "tangible personal property which is normally used for personal, family," + , "or household purposes, or (2) anything designed or sold for incorporation" + , "into a dwelling. In determining whether a product is a consumer product," + , "doubtful cases shall be resolved in favor of coverage. For a particular" + , "product received by a particular user, \"normally used\" refers to a" + , "typical or common use of that class of product, regardless of the status" + , "of the particular user or of the way in which the particular user" + , "actually uses, or expects or is expected to use, the product. A product" + , "is a consumer product regardless of whether the product has substantial" + , "commercial, industrial or non-consumer uses, unless such uses represent" + , "the only significant mode of use of the product." + , "" + , " \"Installation Information\" for a User Product means any methods," + , "procedures, authorization keys, or other information required to install" + , "and execute modified versions of a covered work in that User Product from" + , "a modified version of its Corresponding Source. The information must" + , "suffice to ensure that the continued functioning of the modified object" + , "code is in no case prevented or interfered with solely because" + , "modification has been made." + , "" + , " If you convey an object code work under this section in, or with, or" + , "specifically for use in, a User Product, and the conveying occurs as" + , "part of a transaction in which the right of possession and use of the" + , "User Product is transferred to the recipient in perpetuity or for a" + , "fixed term (regardless of how the transaction is characterized), the" + , "Corresponding Source conveyed under this section must be accompanied" + , "by the Installation Information. But this requirement does not apply" + , "if neither you nor any third party retains the ability to install" + , "modified object code on the User Product (for example, the work has" + , "been installed in ROM)." + , "" + , " The requirement to provide Installation Information does not include a" + , "requirement to continue to provide support service, warranty, or updates" + , "for a work that has been modified or installed by the recipient, or for" + , "the User Product in which it has been modified or installed. Access to a" + , "network may be denied when the modification itself materially and" + , "adversely affects the operation of the network or violates the rules and" + , "protocols for communication across the network." + , "" + , " Corresponding Source conveyed, and Installation Information provided," + , "in accord with this section must be in a format that is publicly" + , "documented (and with an implementation available to the public in" + , "source code form), and must require no special password or key for" + , "unpacking, reading or copying." + , "" + , " 7. Additional Terms." + , "" + , " \"Additional permissions\" are terms that supplement the terms of this" + , "License by making exceptions from one or more of its conditions." + , "Additional permissions that are applicable to the entire Program shall" + , "be treated as though they were included in this License, to the extent" + , "that they are valid under applicable law. If additional permissions" + , "apply only to part of the Program, that part may be used separately" + , "under those permissions, but the entire Program remains governed by" + , "this License without regard to the additional permissions." + , "" + , " When you convey a copy of a covered work, you may at your option" + , "remove any additional permissions from that copy, or from any part of" + , "it. (Additional permissions may be written to require their own" + , "removal in certain cases when you modify the work.) You may place" + , "additional permissions on material, added by you to a covered work," + , "for which you have or can give appropriate copyright permission." + , "" + , " Notwithstanding any other provision of this License, for material you" + , "add to a covered work, you may (if authorized by the copyright holders of" + , "that material) supplement the terms of this License with terms:" + , "" + , " a) Disclaiming warranty or limiting liability differently from the" + , " terms of sections 15 and 16 of this License; or" + , "" + , " b) Requiring preservation of specified reasonable legal notices or" + , " author attributions in that material or in the Appropriate Legal" + , " Notices displayed by works containing it; or" + , "" + , " c) Prohibiting misrepresentation of the origin of that material, or" + , " requiring that modified versions of such material be marked in" + , " reasonable ways as different from the original version; or" + , "" + , " d) Limiting the use for publicity purposes of names of licensors or" + , " authors of the material; or" + , "" + , " e) Declining to grant rights under trademark law for use of some" + , " trade names, trademarks, or service marks; or" + , "" + , " f) Requiring indemnification of licensors and authors of that" + , " material by anyone who conveys the material (or modified versions of" + , " it) with contractual assumptions of liability to the recipient, for" + , " any liability that these contractual assumptions directly impose on" + , " those licensors and authors." + , "" + , " All other non-permissive additional terms are considered \"further" + , "restrictions\" within the meaning of section 10. If the Program as you" + , "received it, or any part of it, contains a notice stating that it is" + , "governed by this License along with a term that is a further" + , "restriction, you may remove that term. If a license document contains" + , "a further restriction but permits relicensing or conveying under this" + , "License, you may add to a covered work material governed by the terms" + , "of that license document, provided that the further restriction does" + , "not survive such relicensing or conveying." + , "" + , " If you add terms to a covered work in accord with this section, you" + , "must place, in the relevant source files, a statement of the" + , "additional terms that apply to those files, or a notice indicating" + , "where to find the applicable terms." + , "" + , " Additional terms, permissive or non-permissive, may be stated in the" + , "form of a separately written license, or stated as exceptions;" + , "the above requirements apply either way." + , "" + , " 8. Termination." + , "" + , " You may not propagate or modify a covered work except as expressly" + , "provided under this License. Any attempt otherwise to propagate or" + , "modify it is void, and will automatically terminate your rights under" + , "this License (including any patent licenses granted under the third" + , "paragraph of section 11)." + , "" + , " However, if you cease all violation of this License, then your" + , "license from a particular copyright holder is reinstated (a)" + , "provisionally, unless and until the copyright holder explicitly and" + , "finally terminates your license, and (b) permanently, if the copyright" + , "holder fails to notify you of the violation by some reasonable means" + , "prior to 60 days after the cessation." + , "" + , " Moreover, your license from a particular copyright holder is" + , "reinstated permanently if the copyright holder notifies you of the" + , "violation by some reasonable means, this is the first time you have" + , "received notice of violation of this License (for any work) from that" + , "copyright holder, and you cure the violation prior to 30 days after" + , "your receipt of the notice." + , "" + , " Termination of your rights under this section does not terminate the" + , "licenses of parties who have received copies or rights from you under" + , "this License. If your rights have been terminated and not permanently" + , "reinstated, you do not qualify to receive new licenses for the same" + , "material under section 10." + , "" + , " 9. Acceptance Not Required for Having Copies." + , "" + , " You are not required to accept this License in order to receive or" + , "run a copy of the Program. Ancillary propagation of a covered work" + , "occurring solely as a consequence of using peer-to-peer transmission" + , "to receive a copy likewise does not require acceptance. However," + , "nothing other than this License grants you permission to propagate or" + , "modify any covered work. These actions infringe copyright if you do" + , "not accept this License. Therefore, by modifying or propagating a" + , "covered work, you indicate your acceptance of this License to do so." + , "" + , " 10. Automatic Licensing of Downstream Recipients." + , "" + , " Each time you convey a covered work, the recipient automatically" + , "receives a license from the original licensors, to run, modify and" + , "propagate that work, subject to this License. You are not responsible" + , "for enforcing compliance by third parties with this License." + , "" + , " An \"entity transaction\" is a transaction transferring control of an" + , "organization, or substantially all assets of one, or subdividing an" + , "organization, or merging organizations. If propagation of a covered" + , "work results from an entity transaction, each party to that" + , "transaction who receives a copy of the work also receives whatever" + , "licenses to the work the party's predecessor in interest had or could" + , "give under the previous paragraph, plus a right to possession of the" + , "Corresponding Source of the work from the predecessor in interest, if" + , "the predecessor has it or can get it with reasonable efforts." + , "" + , " You may not impose any further restrictions on the exercise of the" + , "rights granted or affirmed under this License. For example, you may" + , "not impose a license fee, royalty, or other charge for exercise of" + , "rights granted under this License, and you may not initiate litigation" + , "(including a cross-claim or counterclaim in a lawsuit) alleging that" + , "any patent claim is infringed by making, using, selling, offering for" + , "sale, or importing the Program or any portion of it." + , "" + , " 11. Patents." + , "" + , " A \"contributor\" is a copyright holder who authorizes use under this" + , "License of the Program or a work on which the Program is based. The" + , "work thus licensed is called the contributor's \"contributor version\"." + , "" + , " A contributor's \"essential patent claims\" are all patent claims" + , "owned or controlled by the contributor, whether already acquired or" + , "hereafter acquired, that would be infringed by some manner, permitted" + , "by this License, of making, using, or selling its contributor version," + , "but do not include claims that would be infringed only as a" + , "consequence of further modification of the contributor version. For" + , "purposes of this definition, \"control\" includes the right to grant" + , "patent sublicenses in a manner consistent with the requirements of" + , "this License." + , "" + , " Each contributor grants you a non-exclusive, worldwide, royalty-free" + , "patent license under the contributor's essential patent claims, to" + , "make, use, sell, offer for sale, import and otherwise run, modify and" + , "propagate the contents of its contributor version." + , "" + , " In the following three paragraphs, a \"patent license\" is any express" + , "agreement or commitment, however denominated, not to enforce a patent" + , "(such as an express permission to practice a patent or covenant not to" + , "sue for patent infringement). To \"grant\" such a patent license to a" + , "party means to make such an agreement or commitment not to enforce a" + , "patent against the party." + , "" + , " If you convey a covered work, knowingly relying on a patent license," + , "and the Corresponding Source of the work is not available for anyone" + , "to copy, free of charge and under the terms of this License, through a" + , "publicly available network server or other readily accessible means," + , "then you must either (1) cause the Corresponding Source to be so" + , "available, or (2) arrange to deprive yourself of the benefit of the" + , "patent license for this particular work, or (3) arrange, in a manner" + , "consistent with the requirements of this License, to extend the patent" + , "license to downstream recipients. \"Knowingly relying\" means you have" + , "actual knowledge that, but for the patent license, your conveying the" + , "covered work in a country, or your recipient's use of the covered work" + , "in a country, would infringe one or more identifiable patents in that" + , "country that you have reason to believe are valid." + , "" + , " If, pursuant to or in connection with a single transaction or" + , "arrangement, you convey, or propagate by procuring conveyance of, a" + , "covered work, and grant a patent license to some of the parties" + , "receiving the covered work authorizing them to use, propagate, modify" + , "or convey a specific copy of the covered work, then the patent license" + , "you grant is automatically extended to all recipients of the covered" + , "work and works based on it." + , "" + , " A patent license is \"discriminatory\" if it does not include within" + , "the scope of its coverage, prohibits the exercise of, or is" + , "conditioned on the non-exercise of one or more of the rights that are" + , "specifically granted under this License. You may not convey a covered" + , "work if you are a party to an arrangement with a third party that is" + , "in the business of distributing software, under which you make payment" + , "to the third party based on the extent of your activity of conveying" + , "the work, and under which the third party grants, to any of the" + , "parties who would receive the covered work from you, a discriminatory" + , "patent license (a) in connection with copies of the covered work" + , "conveyed by you (or copies made from those copies), or (b) primarily" + , "for and in connection with specific products or compilations that" + , "contain the covered work, unless you entered into that arrangement," + , "or that patent license was granted, prior to 28 March 2007." + , "" + , " Nothing in this License shall be construed as excluding or limiting" + , "any implied license or other defenses to infringement that may" + , "otherwise be available to you under applicable patent law." + , "" + , " 12. No Surrender of Others' Freedom." + , "" + , " If conditions are imposed on you (whether by court order, agreement or" + , "otherwise) that contradict the conditions of this License, they do not" + , "excuse you from the conditions of this License. If you cannot convey a" + , "covered work so as to satisfy simultaneously your obligations under this" + , "License and any other pertinent obligations, then as a consequence you may" + , "not convey it at all. For example, if you agree to terms that obligate you" + , "to collect a royalty for further conveying from those to whom you convey" + , "the Program, the only way you could satisfy both those terms and this" + , "License would be to refrain entirely from conveying the Program." + , "" + , " 13. Use with the GNU Affero General Public License." + , "" + , " Notwithstanding any other provision of this License, you have" + , "permission to link or combine any covered work with a work licensed" + , "under version 3 of the GNU Affero General Public License into a single" + , "combined work, and to convey the resulting work. The terms of this" + , "License will continue to apply to the part which is the covered work," + , "but the special requirements of the GNU Affero General Public License," + , "section 13, concerning interaction through a network will apply to the" + , "combination as such." + , "" + , " 14. Revised Versions of this License." + , "" + , " The Free Software Foundation may publish revised and/or new versions of" + , "the GNU General Public License from time to time. Such new versions will" + , "be similar in spirit to the present version, but may differ in detail to" + , "address new problems or concerns." + , "" + , " Each version is given a distinguishing version number. If the" + , "Program specifies that a certain numbered version of the GNU General" + , "Public License \"or any later version\" applies to it, you have the" + , "option of following the terms and conditions either of that numbered" + , "version or of any later version published by the Free Software" + , "Foundation. If the Program does not specify a version number of the" + , "GNU General Public License, you may choose any version ever published" + , "by the Free Software Foundation." + , "" + , " If the Program specifies that a proxy can decide which future" + , "versions of the GNU General Public License can be used, that proxy's" + , "public statement of acceptance of a version permanently authorizes you" + , "to choose that version for the Program." + , "" + , " Later license versions may give you additional or different" + , "permissions. However, no additional obligations are imposed on any" + , "author or copyright holder as a result of your choosing to follow a" + , "later version." + , "" + , " 15. Disclaimer of Warranty." + , "" + , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" + , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" + , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" + , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," + , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" + , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" + , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" + , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." + , "" + , " 16. Limitation of Liability." + , "" + , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" + , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" + , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" + , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" + , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" + , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" + , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," + , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" + , "SUCH DAMAGES." + , "" + , " 17. Interpretation of Sections 15 and 16." + , "" + , " If the disclaimer of warranty and limitation of liability provided" + , "above cannot be given local legal effect according to their terms," + , "reviewing courts shall apply local law that most closely approximates" + , "an absolute waiver of all civil liability in connection with the" + , "Program, unless a warranty or assumption of liability accompanies a" + , "copy of the Program in return for a fee." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " How to Apply These Terms to Your New Programs" + , "" + , " If you develop a new program, and you want it to be of the greatest" + , "possible use to the public, the best way to achieve this is to make it" + , "free software which everyone can redistribute and change under these terms." + , "" + , " To do so, attach the following notices to the program. It is safest" + , "to attach them to the start of each source file to most effectively" + , "state the exclusion of warranty; and each file should have at least" + , "the \"copyright\" line and a pointer to where the full notice is found." + , "" + , " " + , " Copyright (C) " + , "" + , " This program is free software: you can redistribute it and/or modify" + , " it under the terms of the GNU General Public License as published by" + , " the Free Software Foundation, either version 3 of the License, or" + , " (at your option) any later version." + , "" + , " This program is distributed in the hope that it will be useful," + , " but WITHOUT ANY WARRANTY; without even the implied warranty of" + , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" + , " GNU General Public License for more details." + , "" + , " You should have received a copy of the GNU General Public License" + , " along with this program. If not, see ." + , "" + , "Also add information on how to contact you by electronic and paper mail." + , "" + , " If the program does terminal interaction, make it output a short" + , "notice like this when it starts in an interactive mode:" + , "" + , " Copyright (C) " + , " This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'." + , " This is free software, and you are welcome to redistribute it" + , " under certain conditions; type `show c' for details." + , "" + , "The hypothetical commands `show w' and `show c' should show the appropriate" + , "parts of the General Public License. Of course, your program's commands" + , "might be different; for a GUI interface, you would use an \"about box\"." + , "" + , " You should also get your employer (if you work as a programmer) or school," + , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." + , "For more information on this, and how to apply and follow the GNU GPL, see" + , "." + , "" + , " The GNU General Public License does not permit incorporating your program" + , "into proprietary programs. If your program is a subroutine library, you" + , "may consider it more useful to permit linking proprietary applications with" + , "the library. If this is what you want to do, use the GNU Lesser General" + , "Public License instead of this License. But first, please read" + , "." + ] + +agplv3 :: License +agplv3 = unlines + [ " GNU AFFERO GENERAL PUBLIC LICENSE" + , " Version 3, 19 November 2007" + , "" + , " Copyright (C) 2007 Free Software Foundation, Inc. " + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , " Preamble" + , "" + , " The GNU Affero General Public License is a free, copyleft license for" + , "software and other kinds of works, specifically designed to ensure" + , "cooperation with the community in the case of network server software." + , "" + , " The licenses for most software and other practical works are designed" + , "to take away your freedom to share and change the works. By contrast," + , "our General Public Licenses are intended to guarantee your freedom to" + , "share and change all versions of a program--to make sure it remains free" + , "software for all its users." + , "" + , " When we speak of free software, we are referring to freedom, not" + , "price. Our General Public Licenses are designed to make sure that you" + , "have the freedom to distribute copies of free software (and charge for" + , "them if you wish), that you receive source code or can get it if you" + , "want it, that you can change the software or use pieces of it in new" + , "free programs, and that you know you can do these things." + , "" + , " Developers that use our General Public Licenses protect your rights" + , "with two steps: (1) assert copyright on the software, and (2) offer" + , "you this License which gives you legal permission to copy, distribute" + , "and/or modify the software." + , "" + , " A secondary benefit of defending all users' freedom is that" + , "improvements made in alternate versions of the program, if they" + , "receive widespread use, become available for other developers to" + , "incorporate. Many developers of free software are heartened and" + , "encouraged by the resulting cooperation. However, in the case of" + , "software used on network servers, this result may fail to come about." + , "The GNU General Public License permits making a modified version and" + , "letting the public access it on a server without ever releasing its" + , "source code to the public." + , "" + , " The GNU Affero General Public License is designed specifically to" + , "ensure that, in such cases, the modified source code becomes available" + , "to the community. It requires the operator of a network server to" + , "provide the source code of the modified version running there to the" + , "users of that server. Therefore, public use of a modified version, on" + , "a publicly accessible server, gives the public access to the source" + , "code of the modified version." + , "" + , " An older license, called the Affero General Public License and" + , "published by Affero, was designed to accomplish similar goals. This is" + , "a different license, not a version of the Affero GPL, but Affero has" + , "released a new version of the Affero GPL which permits relicensing under" + , "this license." + , "" + , " The precise terms and conditions for copying, distribution and" + , "modification follow." + , "" + , " TERMS AND CONDITIONS" + , "" + , " 0. Definitions." + , "" + , " \"This License\" refers to version 3 of the GNU Affero General Public License." + , "" + , " \"Copyright\" also means copyright-like laws that apply to other kinds of" + , "works, such as semiconductor masks." + , "" + , " \"The Program\" refers to any copyrightable work licensed under this" + , "License. Each licensee is addressed as \"you\". \"Licensees\" and" + , "\"recipients\" may be individuals or organizations." + , "" + , " To \"modify\" a work means to copy from or adapt all or part of the work" + , "in a fashion requiring copyright permission, other than the making of an" + , "exact copy. The resulting work is called a \"modified version\" of the" + , "earlier work or a work \"based on\" the earlier work." + , "" + , " A \"covered work\" means either the unmodified Program or a work based" + , "on the Program." + , "" + , " To \"propagate\" a work means to do anything with it that, without" + , "permission, would make you directly or secondarily liable for" + , "infringement under applicable copyright law, except executing it on a" + , "computer or modifying a private copy. Propagation includes copying," + , "distribution (with or without modification), making available to the" + , "public, and in some countries other activities as well." + , "" + , " To \"convey\" a work means any kind of propagation that enables other" + , "parties to make or receive copies. Mere interaction with a user through" + , "a computer network, with no transfer of a copy, is not conveying." + , "" + , " An interactive user interface displays \"Appropriate Legal Notices\"" + , "to the extent that it includes a convenient and prominently visible" + , "feature that (1) displays an appropriate copyright notice, and (2)" + , "tells the user that there is no warranty for the work (except to the" + , "extent that warranties are provided), that licensees may convey the" + , "work under this License, and how to view a copy of this License. If" + , "the interface presents a list of user commands or options, such as a" + , "menu, a prominent item in the list meets this criterion." + , "" + , " 1. Source Code." + , "" + , " The \"source code\" for a work means the preferred form of the work" + , "for making modifications to it. \"Object code\" means any non-source" + , "form of a work." + , "" + , " A \"Standard Interface\" means an interface that either is an official" + , "standard defined by a recognized standards body, or, in the case of" + , "interfaces specified for a particular programming language, one that" + , "is widely used among developers working in that language." + , "" + , " The \"System Libraries\" of an executable work include anything, other" + , "than the work as a whole, that (a) is included in the normal form of" + , "packaging a Major Component, but which is not part of that Major" + , "Component, and (b) serves only to enable use of the work with that" + , "Major Component, or to implement a Standard Interface for which an" + , "implementation is available to the public in source code form. A" + , "\"Major Component\", in this context, means a major essential component" + , "(kernel, window system, and so on) of the specific operating system" + , "(if any) on which the executable work runs, or a compiler used to" + , "produce the work, or an object code interpreter used to run it." + , "" + , " The \"Corresponding Source\" for a work in object code form means all" + , "the source code needed to generate, install, and (for an executable" + , "work) run the object code and to modify the work, including scripts to" + , "control those activities. However, it does not include the work's" + , "System Libraries, or general-purpose tools or generally available free" + , "programs which are used unmodified in performing those activities but" + , "which are not part of the work. For example, Corresponding Source" + , "includes interface definition files associated with source files for" + , "the work, and the source code for shared libraries and dynamically" + , "linked subprograms that the work is specifically designed to require," + , "such as by intimate data communication or control flow between those" + , "subprograms and other parts of the work." + , "" + , " The Corresponding Source need not include anything that users" + , "can regenerate automatically from other parts of the Corresponding" + , "Source." + , "" + , " The Corresponding Source for a work in source code form is that" + , "same work." + , "" + , " 2. Basic Permissions." + , "" + , " All rights granted under this License are granted for the term of" + , "copyright on the Program, and are irrevocable provided the stated" + , "conditions are met. This License explicitly affirms your unlimited" + , "permission to run the unmodified Program. The output from running a" + , "covered work is covered by this License only if the output, given its" + , "content, constitutes a covered work. This License acknowledges your" + , "rights of fair use or other equivalent, as provided by copyright law." + , "" + , " You may make, run and propagate covered works that you do not" + , "convey, without conditions so long as your license otherwise remains" + , "in force. You may convey covered works to others for the sole purpose" + , "of having them make modifications exclusively for you, or provide you" + , "with facilities for running those works, provided that you comply with" + , "the terms of this License in conveying all material for which you do" + , "not control copyright. Those thus making or running the covered works" + , "for you must do so exclusively on your behalf, under your direction" + , "and control, on terms that prohibit them from making any copies of" + , "your copyrighted material outside their relationship with you." + , "" + , " Conveying under any other circumstances is permitted solely under" + , "the conditions stated below. Sublicensing is not allowed; section 10" + , "makes it unnecessary." + , "" + , " 3. Protecting Users' Legal Rights From Anti-Circumvention Law." + , "" + , " No covered work shall be deemed part of an effective technological" + , "measure under any applicable law fulfilling obligations under article" + , "11 of the WIPO copyright treaty adopted on 20 December 1996, or" + , "similar laws prohibiting or restricting circumvention of such" + , "measures." + , "" + , " When you convey a covered work, you waive any legal power to forbid" + , "circumvention of technological measures to the extent such circumvention" + , "is effected by exercising rights under this License with respect to" + , "the covered work, and you disclaim any intention to limit operation or" + , "modification of the work as a means of enforcing, against the work's" + , "users, your or third parties' legal rights to forbid circumvention of" + , "technological measures." + , "" + , " 4. Conveying Verbatim Copies." + , "" + , " You may convey verbatim copies of the Program's source code as you" + , "receive it, in any medium, provided that you conspicuously and" + , "appropriately publish on each copy an appropriate copyright notice;" + , "keep intact all notices stating that this License and any" + , "non-permissive terms added in accord with section 7 apply to the code;" + , "keep intact all notices of the absence of any warranty; and give all" + , "recipients a copy of this License along with the Program." + , "" + , " You may charge any price or no price for each copy that you convey," + , "and you may offer support or warranty protection for a fee." + , "" + , " 5. Conveying Modified Source Versions." + , "" + , " You may convey a work based on the Program, or the modifications to" + , "produce it from the Program, in the form of source code under the" + , "terms of section 4, provided that you also meet all of these conditions:" + , "" + , " a) The work must carry prominent notices stating that you modified" + , " it, and giving a relevant date." + , "" + , " b) The work must carry prominent notices stating that it is" + , " released under this License and any conditions added under section" + , " 7. This requirement modifies the requirement in section 4 to" + , " \"keep intact all notices\"." + , "" + , " c) You must license the entire work, as a whole, under this" + , " License to anyone who comes into possession of a copy. This" + , " License will therefore apply, along with any applicable section 7" + , " additional terms, to the whole of the work, and all its parts," + , " regardless of how they are packaged. This License gives no" + , " permission to license the work in any other way, but it does not" + , " invalidate such permission if you have separately received it." + , "" + , " d) If the work has interactive user interfaces, each must display" + , " Appropriate Legal Notices; however, if the Program has interactive" + , " interfaces that do not display Appropriate Legal Notices, your" + , " work need not make them do so." + , "" + , " A compilation of a covered work with other separate and independent" + , "works, which are not by their nature extensions of the covered work," + , "and which are not combined with it such as to form a larger program," + , "in or on a volume of a storage or distribution medium, is called an" + , "\"aggregate\" if the compilation and its resulting copyright are not" + , "used to limit the access or legal rights of the compilation's users" + , "beyond what the individual works permit. Inclusion of a covered work" + , "in an aggregate does not cause this License to apply to the other" + , "parts of the aggregate." + , "" + , " 6. Conveying Non-Source Forms." + , "" + , " You may convey a covered work in object code form under the terms" + , "of sections 4 and 5, provided that you also convey the" + , "machine-readable Corresponding Source under the terms of this License," + , "in one of these ways:" + , "" + , " a) Convey the object code in, or embodied in, a physical product" + , " (including a physical distribution medium), accompanied by the" + , " Corresponding Source fixed on a durable physical medium" + , " customarily used for software interchange." + , "" + , " b) Convey the object code in, or embodied in, a physical product" + , " (including a physical distribution medium), accompanied by a" + , " written offer, valid for at least three years and valid for as" + , " long as you offer spare parts or customer support for that product" + , " model, to give anyone who possesses the object code either (1) a" + , " copy of the Corresponding Source for all the software in the" + , " product that is covered by this License, on a durable physical" + , " medium customarily used for software interchange, for a price no" + , " more than your reasonable cost of physically performing this" + , " conveying of source, or (2) access to copy the" + , " Corresponding Source from a network server at no charge." + , "" + , " c) Convey individual copies of the object code with a copy of the" + , " written offer to provide the Corresponding Source. This" + , " alternative is allowed only occasionally and noncommercially, and" + , " only if you received the object code with such an offer, in accord" + , " with subsection 6b." + , "" + , " d) Convey the object code by offering access from a designated" + , " place (gratis or for a charge), and offer equivalent access to the" + , " Corresponding Source in the same way through the same place at no" + , " further charge. You need not require recipients to copy the" + , " Corresponding Source along with the object code. If the place to" + , " copy the object code is a network server, the Corresponding Source" + , " may be on a different server (operated by you or a third party)" + , " that supports equivalent copying facilities, provided you maintain" + , " clear directions next to the object code saying where to find the" + , " Corresponding Source. Regardless of what server hosts the" + , " Corresponding Source, you remain obligated to ensure that it is" + , " available for as long as needed to satisfy these requirements." + , "" + , " e) Convey the object code using peer-to-peer transmission, provided" + , " you inform other peers where the object code and Corresponding" + , " Source of the work are being offered to the general public at no" + , " charge under subsection 6d." + , "" + , " A separable portion of the object code, whose source code is excluded" + , "from the Corresponding Source as a System Library, need not be" + , "included in conveying the object code work." + , "" + , " A \"User Product\" is either (1) a \"consumer product\", which means any" + , "tangible personal property which is normally used for personal, family," + , "or household purposes, or (2) anything designed or sold for incorporation" + , "into a dwelling. In determining whether a product is a consumer product," + , "doubtful cases shall be resolved in favor of coverage. For a particular" + , "product received by a particular user, \"normally used\" refers to a" + , "typical or common use of that class of product, regardless of the status" + , "of the particular user or of the way in which the particular user" + , "actually uses, or expects or is expected to use, the product. A product" + , "is a consumer product regardless of whether the product has substantial" + , "commercial, industrial or non-consumer uses, unless such uses represent" + , "the only significant mode of use of the product." + , "" + , " \"Installation Information\" for a User Product means any methods," + , "procedures, authorization keys, or other information required to install" + , "and execute modified versions of a covered work in that User Product from" + , "a modified version of its Corresponding Source. The information must" + , "suffice to ensure that the continued functioning of the modified object" + , "code is in no case prevented or interfered with solely because" + , "modification has been made." + , "" + , " If you convey an object code work under this section in, or with, or" + , "specifically for use in, a User Product, and the conveying occurs as" + , "part of a transaction in which the right of possession and use of the" + , "User Product is transferred to the recipient in perpetuity or for a" + , "fixed term (regardless of how the transaction is characterized), the" + , "Corresponding Source conveyed under this section must be accompanied" + , "by the Installation Information. But this requirement does not apply" + , "if neither you nor any third party retains the ability to install" + , "modified object code on the User Product (for example, the work has" + , "been installed in ROM)." + , "" + , " The requirement to provide Installation Information does not include a" + , "requirement to continue to provide support service, warranty, or updates" + , "for a work that has been modified or installed by the recipient, or for" + , "the User Product in which it has been modified or installed. Access to a" + , "network may be denied when the modification itself materially and" + , "adversely affects the operation of the network or violates the rules and" + , "protocols for communication across the network." + , "" + , " Corresponding Source conveyed, and Installation Information provided," + , "in accord with this section must be in a format that is publicly" + , "documented (and with an implementation available to the public in" + , "source code form), and must require no special password or key for" + , "unpacking, reading or copying." + , "" + , " 7. Additional Terms." + , "" + , " \"Additional permissions\" are terms that supplement the terms of this" + , "License by making exceptions from one or more of its conditions." + , "Additional permissions that are applicable to the entire Program shall" + , "be treated as though they were included in this License, to the extent" + , "that they are valid under applicable law. If additional permissions" + , "apply only to part of the Program, that part may be used separately" + , "under those permissions, but the entire Program remains governed by" + , "this License without regard to the additional permissions." + , "" + , " When you convey a copy of a covered work, you may at your option" + , "remove any additional permissions from that copy, or from any part of" + , "it. (Additional permissions may be written to require their own" + , "removal in certain cases when you modify the work.) You may place" + , "additional permissions on material, added by you to a covered work," + , "for which you have or can give appropriate copyright permission." + , "" + , " Notwithstanding any other provision of this License, for material you" + , "add to a covered work, you may (if authorized by the copyright holders of" + , "that material) supplement the terms of this License with terms:" + , "" + , " a) Disclaiming warranty or limiting liability differently from the" + , " terms of sections 15 and 16 of this License; or" + , "" + , " b) Requiring preservation of specified reasonable legal notices or" + , " author attributions in that material or in the Appropriate Legal" + , " Notices displayed by works containing it; or" + , "" + , " c) Prohibiting misrepresentation of the origin of that material, or" + , " requiring that modified versions of such material be marked in" + , " reasonable ways as different from the original version; or" + , "" + , " d) Limiting the use for publicity purposes of names of licensors or" + , " authors of the material; or" + , "" + , " e) Declining to grant rights under trademark law for use of some" + , " trade names, trademarks, or service marks; or" + , "" + , " f) Requiring indemnification of licensors and authors of that" + , " material by anyone who conveys the material (or modified versions of" + , " it) with contractual assumptions of liability to the recipient, for" + , " any liability that these contractual assumptions directly impose on" + , " those licensors and authors." + , "" + , " All other non-permissive additional terms are considered \"further" + , "restrictions\" within the meaning of section 10. If the Program as you" + , "received it, or any part of it, contains a notice stating that it is" + , "governed by this License along with a term that is a further" + , "restriction, you may remove that term. If a license document contains" + , "a further restriction but permits relicensing or conveying under this" + , "License, you may add to a covered work material governed by the terms" + , "of that license document, provided that the further restriction does" + , "not survive such relicensing or conveying." + , "" + , " If you add terms to a covered work in accord with this section, you" + , "must place, in the relevant source files, a statement of the" + , "additional terms that apply to those files, or a notice indicating" + , "where to find the applicable terms." + , "" + , " Additional terms, permissive or non-permissive, may be stated in the" + , "form of a separately written license, or stated as exceptions;" + , "the above requirements apply either way." + , "" + , " 8. Termination." + , "" + , " You may not propagate or modify a covered work except as expressly" + , "provided under this License. Any attempt otherwise to propagate or" + , "modify it is void, and will automatically terminate your rights under" + , "this License (including any patent licenses granted under the third" + , "paragraph of section 11)." + , "" + , " However, if you cease all violation of this License, then your" + , "license from a particular copyright holder is reinstated (a)" + , "provisionally, unless and until the copyright holder explicitly and" + , "finally terminates your license, and (b) permanently, if the copyright" + , "holder fails to notify you of the violation by some reasonable means" + , "prior to 60 days after the cessation." + , "" + , " Moreover, your license from a particular copyright holder is" + , "reinstated permanently if the copyright holder notifies you of the" + , "violation by some reasonable means, this is the first time you have" + , "received notice of violation of this License (for any work) from that" + , "copyright holder, and you cure the violation prior to 30 days after" + , "your receipt of the notice." + , "" + , " Termination of your rights under this section does not terminate the" + , "licenses of parties who have received copies or rights from you under" + , "this License. If your rights have been terminated and not permanently" + , "reinstated, you do not qualify to receive new licenses for the same" + , "material under section 10." + , "" + , " 9. Acceptance Not Required for Having Copies." + , "" + , " You are not required to accept this License in order to receive or" + , "run a copy of the Program. Ancillary propagation of a covered work" + , "occurring solely as a consequence of using peer-to-peer transmission" + , "to receive a copy likewise does not require acceptance. However," + , "nothing other than this License grants you permission to propagate or" + , "modify any covered work. These actions infringe copyright if you do" + , "not accept this License. Therefore, by modifying or propagating a" + , "covered work, you indicate your acceptance of this License to do so." + , "" + , " 10. Automatic Licensing of Downstream Recipients." + , "" + , " Each time you convey a covered work, the recipient automatically" + , "receives a license from the original licensors, to run, modify and" + , "propagate that work, subject to this License. You are not responsible" + , "for enforcing compliance by third parties with this License." + , "" + , " An \"entity transaction\" is a transaction transferring control of an" + , "organization, or substantially all assets of one, or subdividing an" + , "organization, or merging organizations. If propagation of a covered" + , "work results from an entity transaction, each party to that" + , "transaction who receives a copy of the work also receives whatever" + , "licenses to the work the party's predecessor in interest had or could" + , "give under the previous paragraph, plus a right to possession of the" + , "Corresponding Source of the work from the predecessor in interest, if" + , "the predecessor has it or can get it with reasonable efforts." + , "" + , " You may not impose any further restrictions on the exercise of the" + , "rights granted or affirmed under this License. For example, you may" + , "not impose a license fee, royalty, or other charge for exercise of" + , "rights granted under this License, and you may not initiate litigation" + , "(including a cross-claim or counterclaim in a lawsuit) alleging that" + , "any patent claim is infringed by making, using, selling, offering for" + , "sale, or importing the Program or any portion of it." + , "" + , " 11. Patents." + , "" + , " A \"contributor\" is a copyright holder who authorizes use under this" + , "License of the Program or a work on which the Program is based. The" + , "work thus licensed is called the contributor's \"contributor version\"." + , "" + , " A contributor's \"essential patent claims\" are all patent claims" + , "owned or controlled by the contributor, whether already acquired or" + , "hereafter acquired, that would be infringed by some manner, permitted" + , "by this License, of making, using, or selling its contributor version," + , "but do not include claims that would be infringed only as a" + , "consequence of further modification of the contributor version. For" + , "purposes of this definition, \"control\" includes the right to grant" + , "patent sublicenses in a manner consistent with the requirements of" + , "this License." + , "" + , " Each contributor grants you a non-exclusive, worldwide, royalty-free" + , "patent license under the contributor's essential patent claims, to" + , "make, use, sell, offer for sale, import and otherwise run, modify and" + , "propagate the contents of its contributor version." + , "" + , " In the following three paragraphs, a \"patent license\" is any express" + , "agreement or commitment, however denominated, not to enforce a patent" + , "(such as an express permission to practice a patent or covenant not to" + , "sue for patent infringement). To \"grant\" such a patent license to a" + , "party means to make such an agreement or commitment not to enforce a" + , "patent against the party." + , "" + , " If you convey a covered work, knowingly relying on a patent license," + , "and the Corresponding Source of the work is not available for anyone" + , "to copy, free of charge and under the terms of this License, through a" + , "publicly available network server or other readily accessible means," + , "then you must either (1) cause the Corresponding Source to be so" + , "available, or (2) arrange to deprive yourself of the benefit of the" + , "patent license for this particular work, or (3) arrange, in a manner" + , "consistent with the requirements of this License, to extend the patent" + , "license to downstream recipients. \"Knowingly relying\" means you have" + , "actual knowledge that, but for the patent license, your conveying the" + , "covered work in a country, or your recipient's use of the covered work" + , "in a country, would infringe one or more identifiable patents in that" + , "country that you have reason to believe are valid." + , "" + , " If, pursuant to or in connection with a single transaction or" + , "arrangement, you convey, or propagate by procuring conveyance of, a" + , "covered work, and grant a patent license to some of the parties" + , "receiving the covered work authorizing them to use, propagate, modify" + , "or convey a specific copy of the covered work, then the patent license" + , "you grant is automatically extended to all recipients of the covered" + , "work and works based on it." + , "" + , " A patent license is \"discriminatory\" if it does not include within" + , "the scope of its coverage, prohibits the exercise of, or is" + , "conditioned on the non-exercise of one or more of the rights that are" + , "specifically granted under this License. You may not convey a covered" + , "work if you are a party to an arrangement with a third party that is" + , "in the business of distributing software, under which you make payment" + , "to the third party based on the extent of your activity of conveying" + , "the work, and under which the third party grants, to any of the" + , "parties who would receive the covered work from you, a discriminatory" + , "patent license (a) in connection with copies of the covered work" + , "conveyed by you (or copies made from those copies), or (b) primarily" + , "for and in connection with specific products or compilations that" + , "contain the covered work, unless you entered into that arrangement," + , "or that patent license was granted, prior to 28 March 2007." + , "" + , " Nothing in this License shall be construed as excluding or limiting" + , "any implied license or other defenses to infringement that may" + , "otherwise be available to you under applicable patent law." + , "" + , " 12. No Surrender of Others' Freedom." + , "" + , " If conditions are imposed on you (whether by court order, agreement or" + , "otherwise) that contradict the conditions of this License, they do not" + , "excuse you from the conditions of this License. If you cannot convey a" + , "covered work so as to satisfy simultaneously your obligations under this" + , "License and any other pertinent obligations, then as a consequence you may" + , "not convey it at all. For example, if you agree to terms that obligate you" + , "to collect a royalty for further conveying from those to whom you convey" + , "the Program, the only way you could satisfy both those terms and this" + , "License would be to refrain entirely from conveying the Program." + , "" + , " 13. Remote Network Interaction; Use with the GNU General Public License." + , "" + , " Notwithstanding any other provision of this License, if you modify the" + , "Program, your modified version must prominently offer all users" + , "interacting with it remotely through a computer network (if your version" + , "supports such interaction) an opportunity to receive the Corresponding" + , "Source of your version by providing access to the Corresponding Source" + , "from a network server at no charge, through some standard or customary" + , "means of facilitating copying of software. This Corresponding Source" + , "shall include the Corresponding Source for any work covered by version 3" + , "of the GNU General Public License that is incorporated pursuant to the" + , "following paragraph." + , "" + , " Notwithstanding any other provision of this License, you have" + , "permission to link or combine any covered work with a work licensed" + , "under version 3 of the GNU General Public License into a single" + , "combined work, and to convey the resulting work. The terms of this" + , "License will continue to apply to the part which is the covered work," + , "but the work with which it is combined will remain governed by version" + , "3 of the GNU General Public License." + , "" + , " 14. Revised Versions of this License." + , "" + , " The Free Software Foundation may publish revised and/or new versions of" + , "the GNU Affero General Public License from time to time. Such new versions" + , "will be similar in spirit to the present version, but may differ in detail to" + , "address new problems or concerns." + , "" + , " Each version is given a distinguishing version number. If the" + , "Program specifies that a certain numbered version of the GNU Affero General" + , "Public License \"or any later version\" applies to it, you have the" + , "option of following the terms and conditions either of that numbered" + , "version or of any later version published by the Free Software" + , "Foundation. If the Program does not specify a version number of the" + , "GNU Affero General Public License, you may choose any version ever published" + , "by the Free Software Foundation." + , "" + , " If the Program specifies that a proxy can decide which future" + , "versions of the GNU Affero General Public License can be used, that proxy's" + , "public statement of acceptance of a version permanently authorizes you" + , "to choose that version for the Program." + , "" + , " Later license versions may give you additional or different" + , "permissions. However, no additional obligations are imposed on any" + , "author or copyright holder as a result of your choosing to follow a" + , "later version." + , "" + , " 15. Disclaimer of Warranty." + , "" + , " THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY" + , "APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT" + , "HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM \"AS IS\" WITHOUT WARRANTY" + , "OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO," + , "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" + , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM" + , "IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF" + , "ALL NECESSARY SERVICING, REPAIR OR CORRECTION." + , "" + , " 16. Limitation of Liability." + , "" + , " IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING" + , "WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS" + , "THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY" + , "GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE" + , "USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF" + , "DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD" + , "PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS)," + , "EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF" + , "SUCH DAMAGES." + , "" + , " 17. Interpretation of Sections 15 and 16." + , "" + , " If the disclaimer of warranty and limitation of liability provided" + , "above cannot be given local legal effect according to their terms," + , "reviewing courts shall apply local law that most closely approximates" + , "an absolute waiver of all civil liability in connection with the" + , "Program, unless a warranty or assumption of liability accompanies a" + , "copy of the Program in return for a fee." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " How to Apply These Terms to Your New Programs" + , "" + , " If you develop a new program, and you want it to be of the greatest" + , "possible use to the public, the best way to achieve this is to make it" + , "free software which everyone can redistribute and change under these terms." + , "" + , " To do so, attach the following notices to the program. It is safest" + , "to attach them to the start of each source file to most effectively" + , "state the exclusion of warranty; and each file should have at least" + , "the \"copyright\" line and a pointer to where the full notice is found." + , "" + , " " + , " Copyright (C) " + , "" + , " This program is free software: you can redistribute it and/or modify" + , " it under the terms of the GNU Affero General Public License as published by" + , " the Free Software Foundation, either version 3 of the License, or" + , " (at your option) any later version." + , "" + , " This program is distributed in the hope that it will be useful," + , " but WITHOUT ANY WARRANTY; without even the implied warranty of" + , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the" + , " GNU Affero General Public License for more details." + , "" + , " You should have received a copy of the GNU Affero General Public License" + , " along with this program. If not, see ." + , "" + , "Also add information on how to contact you by electronic and paper mail." + , "" + , " If your software can interact with users remotely through a computer" + , "network, you should also make sure that it provides a way for users to" + , "get its source. For example, if your program is a web application, its" + , "interface could display a \"Source\" link that leads users to an archive" + , "of the code. There are many ways you could offer source, and different" + , "solutions will be better for different programs; see section 13 for the" + , "specific requirements." + , "" + , " You should also get your employer (if you work as a programmer) or school," + , "if any, to sign a \"copyright disclaimer\" for the program, if necessary." + , "For more information on this, and how to apply and follow the GNU AGPL, see" + , "." + ] + +lgpl21 :: License +lgpl21 = unlines + [ " GNU LESSER GENERAL PUBLIC LICENSE" + , " Version 2.1, February 1999" + , "" + , " Copyright (C) 1991, 1999 Free Software Foundation, Inc." + , " 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , "[This is the first released version of the Lesser GPL. It also counts" + , " as the successor of the GNU Library Public License, version 2, hence" + , " the version number 2.1.]" + , "" + , " Preamble" + , "" + , " The licenses for most software are designed to take away your" + , "freedom to share and change it. By contrast, the GNU General Public" + , "Licenses are intended to guarantee your freedom to share and change" + , "free software--to make sure the software is free for all its users." + , "" + , " This license, the Lesser General Public License, applies to some" + , "specially designated software packages--typically libraries--of the" + , "Free Software Foundation and other authors who decide to use it. You" + , "can use it too, but we suggest you first think carefully about whether" + , "this license or the ordinary General Public License is the better" + , "strategy to use in any particular case, based on the explanations below." + , "" + , " When we speak of free software, we are referring to freedom of use," + , "not price. Our General Public Licenses are designed to make sure that" + , "you have the freedom to distribute copies of free software (and charge" + , "for this service if you wish); that you receive source code or can get" + , "it if you want it; that you can change the software and use pieces of" + , "it in new free programs; and that you are informed that you can do" + , "these things." + , "" + , " To protect your rights, we need to make restrictions that forbid" + , "distributors to deny you these rights or to ask you to surrender these" + , "rights. These restrictions translate to certain responsibilities for" + , "you if you distribute copies of the library or if you modify it." + , "" + , " For example, if you distribute copies of the library, whether gratis" + , "or for a fee, you must give the recipients all the rights that we gave" + , "you. You must make sure that they, too, receive or can get the source" + , "code. If you link other code with the library, you must provide" + , "complete object files to the recipients, so that they can relink them" + , "with the library after making changes to the library and recompiling" + , "it. And you must show them these terms so they know their rights." + , "" + , " We protect your rights with a two-step method: (1) we copyright the" + , "library, and (2) we offer you this license, which gives you legal" + , "permission to copy, distribute and/or modify the library." + , "" + , " To protect each distributor, we want to make it very clear that" + , "there is no warranty for the free library. Also, if the library is" + , "modified by someone else and passed on, the recipients should know" + , "that what they have is not the original version, so that the original" + , "author's reputation will not be affected by problems that might be" + , "introduced by others." + , "" + , " Finally, software patents pose a constant threat to the existence of" + , "any free program. We wish to make sure that a company cannot" + , "effectively restrict the users of a free program by obtaining a" + , "restrictive license from a patent holder. Therefore, we insist that" + , "any patent license obtained for a version of the library must be" + , "consistent with the full freedom of use specified in this license." + , "" + , " Most GNU software, including some libraries, is covered by the" + , "ordinary GNU General Public License. This license, the GNU Lesser" + , "General Public License, applies to certain designated libraries, and" + , "is quite different from the ordinary General Public License. We use" + , "this license for certain libraries in order to permit linking those" + , "libraries into non-free programs." + , "" + , " When a program is linked with a library, whether statically or using" + , "a shared library, the combination of the two is legally speaking a" + , "combined work, a derivative of the original library. The ordinary" + , "General Public License therefore permits such linking only if the" + , "entire combination fits its criteria of freedom. The Lesser General" + , "Public License permits more lax criteria for linking other code with" + , "the library." + , "" + , " We call this license the \"Lesser\" General Public License because it" + , "does Less to protect the user's freedom than the ordinary General" + , "Public License. It also provides other free software developers Less" + , "of an advantage over competing non-free programs. These disadvantages" + , "are the reason we use the ordinary General Public License for many" + , "libraries. However, the Lesser license provides advantages in certain" + , "special circumstances." + , "" + , " For example, on rare occasions, there may be a special need to" + , "encourage the widest possible use of a certain library, so that it becomes" + , "a de-facto standard. To achieve this, non-free programs must be" + , "allowed to use the library. A more frequent case is that a free" + , "library does the same job as widely used non-free libraries. In this" + , "case, there is little to gain by limiting the free library to free" + , "software only, so we use the Lesser General Public License." + , "" + , " In other cases, permission to use a particular library in non-free" + , "programs enables a greater number of people to use a large body of" + , "free software. For example, permission to use the GNU C Library in" + , "non-free programs enables many more people to use the whole GNU" + , "operating system, as well as its variant, the GNU/Linux operating" + , "system." + , "" + , " Although the Lesser General Public License is Less protective of the" + , "users' freedom, it does ensure that the user of a program that is" + , "linked with the Library has the freedom and the wherewithal to run" + , "that program using a modified version of the Library." + , "" + , " The precise terms and conditions for copying, distribution and" + , "modification follow. Pay close attention to the difference between a" + , "\"work based on the library\" and a \"work that uses the library\". The" + , "former contains code derived from the library, whereas the latter must" + , "be combined with the library in order to run." + , "" + , " GNU LESSER GENERAL PUBLIC LICENSE" + , " TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION" + , "" + , " 0. This License Agreement applies to any software library or other" + , "program which contains a notice placed by the copyright holder or" + , "other authorized party saying it may be distributed under the terms of" + , "this Lesser General Public License (also called \"this License\")." + , "Each licensee is addressed as \"you\"." + , "" + , " A \"library\" means a collection of software functions and/or data" + , "prepared so as to be conveniently linked with application programs" + , "(which use some of those functions and data) to form executables." + , "" + , " The \"Library\", below, refers to any such software library or work" + , "which has been distributed under these terms. A \"work based on the" + , "Library\" means either the Library or any derivative work under" + , "copyright law: that is to say, a work containing the Library or a" + , "portion of it, either verbatim or with modifications and/or translated" + , "straightforwardly into another language. (Hereinafter, translation is" + , "included without limitation in the term \"modification\".)" + , "" + , " \"Source code\" for a work means the preferred form of the work for" + , "making modifications to it. For a library, complete source code means" + , "all the source code for all modules it contains, plus any associated" + , "interface definition files, plus the scripts used to control compilation" + , "and installation of the library." + , "" + , " Activities other than copying, distribution and modification are not" + , "covered by this License; they are outside its scope. The act of" + , "running a program using the Library is not restricted, and output from" + , "such a program is covered only if its contents constitute a work based" + , "on the Library (independent of the use of the Library in a tool for" + , "writing it). Whether that is true depends on what the Library does" + , "and what the program that uses the Library does." + , "" + , " 1. You may copy and distribute verbatim copies of the Library's" + , "complete source code as you receive it, in any medium, provided that" + , "you conspicuously and appropriately publish on each copy an" + , "appropriate copyright notice and disclaimer of warranty; keep intact" + , "all the notices that refer to this License and to the absence of any" + , "warranty; and distribute a copy of this License along with the" + , "Library." + , "" + , " You may charge a fee for the physical act of transferring a copy," + , "and you may at your option offer warranty protection in exchange for a" + , "fee." + , "" + , " 2. You may modify your copy or copies of the Library or any portion" + , "of it, thus forming a work based on the Library, and copy and" + , "distribute such modifications or work under the terms of Section 1" + , "above, provided that you also meet all of these conditions:" + , "" + , " a) The modified work must itself be a software library." + , "" + , " b) You must cause the files modified to carry prominent notices" + , " stating that you changed the files and the date of any change." + , "" + , " c) You must cause the whole of the work to be licensed at no" + , " charge to all third parties under the terms of this License." + , "" + , " d) If a facility in the modified Library refers to a function or a" + , " table of data to be supplied by an application program that uses" + , " the facility, other than as an argument passed when the facility" + , " is invoked, then you must make a good faith effort to ensure that," + , " in the event an application does not supply such function or" + , " table, the facility still operates, and performs whatever part of" + , " its purpose remains meaningful." + , "" + , " (For example, a function in a library to compute square roots has" + , " a purpose that is entirely well-defined independent of the" + , " application. Therefore, Subsection 2d requires that any" + , " application-supplied function or table used by this function must" + , " be optional: if the application does not supply it, the square" + , " root function must still compute square roots.)" + , "" + , "These requirements apply to the modified work as a whole. If" + , "identifiable sections of that work are not derived from the Library," + , "and can be reasonably considered independent and separate works in" + , "themselves, then this License, and its terms, do not apply to those" + , "sections when you distribute them as separate works. But when you" + , "distribute the same sections as part of a whole which is a work based" + , "on the Library, the distribution of the whole must be on the terms of" + , "this License, whose permissions for other licensees extend to the" + , "entire whole, and thus to each and every part regardless of who wrote" + , "it." + , "" + , "Thus, it is not the intent of this section to claim rights or contest" + , "your rights to work written entirely by you; rather, the intent is to" + , "exercise the right to control the distribution of derivative or" + , "collective works based on the Library." + , "" + , "In addition, mere aggregation of another work not based on the Library" + , "with the Library (or with a work based on the Library) on a volume of" + , "a storage or distribution medium does not bring the other work under" + , "the scope of this License." + , "" + , " 3. You may opt to apply the terms of the ordinary GNU General Public" + , "License instead of this License to a given copy of the Library. To do" + , "this, you must alter all the notices that refer to this License, so" + , "that they refer to the ordinary GNU General Public License, version 2," + , "instead of to this License. (If a newer version than version 2 of the" + , "ordinary GNU General Public License has appeared, then you can specify" + , "that version instead if you wish.) Do not make any other change in" + , "these notices." + , "" + , " Once this change is made in a given copy, it is irreversible for" + , "that copy, so the ordinary GNU General Public License applies to all" + , "subsequent copies and derivative works made from that copy." + , "" + , " This option is useful when you wish to copy part of the code of" + , "the Library into a program that is not a library." + , "" + , " 4. You may copy and distribute the Library (or a portion or" + , "derivative of it, under Section 2) in object code or executable form" + , "under the terms of Sections 1 and 2 above provided that you accompany" + , "it with the complete corresponding machine-readable source code, which" + , "must be distributed under the terms of Sections 1 and 2 above on a" + , "medium customarily used for software interchange." + , "" + , " If distribution of object code is made by offering access to copy" + , "from a designated place, then offering equivalent access to copy the" + , "source code from the same place satisfies the requirement to" + , "distribute the source code, even though third parties are not" + , "compelled to copy the source along with the object code." + , "" + , " 5. A program that contains no derivative of any portion of the" + , "Library, but is designed to work with the Library by being compiled or" + , "linked with it, is called a \"work that uses the Library\". Such a" + , "work, in isolation, is not a derivative work of the Library, and" + , "therefore falls outside the scope of this License." + , "" + , " However, linking a \"work that uses the Library\" with the Library" + , "creates an executable that is a derivative of the Library (because it" + , "contains portions of the Library), rather than a \"work that uses the" + , "library\". The executable is therefore covered by this License." + , "Section 6 states terms for distribution of such executables." + , "" + , " When a \"work that uses the Library\" uses material from a header file" + , "that is part of the Library, the object code for the work may be a" + , "derivative work of the Library even though the source code is not." + , "Whether this is true is especially significant if the work can be" + , "linked without the Library, or if the work is itself a library. The" + , "threshold for this to be true is not precisely defined by law." + , "" + , " If such an object file uses only numerical parameters, data" + , "structure layouts and accessors, and small macros and small inline" + , "functions (ten lines or less in length), then the use of the object" + , "file is unrestricted, regardless of whether it is legally a derivative" + , "work. (Executables containing this object code plus portions of the" + , "Library will still fall under Section 6.)" + , "" + , " Otherwise, if the work is a derivative of the Library, you may" + , "distribute the object code for the work under the terms of Section 6." + , "Any executables containing that work also fall under Section 6," + , "whether or not they are linked directly with the Library itself." + , "" + , " 6. As an exception to the Sections above, you may also combine or" + , "link a \"work that uses the Library\" with the Library to produce a" + , "work containing portions of the Library, and distribute that work" + , "under terms of your choice, provided that the terms permit" + , "modification of the work for the customer's own use and reverse" + , "engineering for debugging such modifications." + , "" + , " You must give prominent notice with each copy of the work that the" + , "Library is used in it and that the Library and its use are covered by" + , "this License. You must supply a copy of this License. If the work" + , "during execution displays copyright notices, you must include the" + , "copyright notice for the Library among them, as well as a reference" + , "directing the user to the copy of this License. Also, you must do one" + , "of these things:" + , "" + , " a) Accompany the work with the complete corresponding" + , " machine-readable source code for the Library including whatever" + , " changes were used in the work (which must be distributed under" + , " Sections 1 and 2 above); and, if the work is an executable linked" + , " with the Library, with the complete machine-readable \"work that" + , " uses the Library\", as object code and/or source code, so that the" + , " user can modify the Library and then relink to produce a modified" + , " executable containing the modified Library. (It is understood" + , " that the user who changes the contents of definitions files in the" + , " Library will not necessarily be able to recompile the application" + , " to use the modified definitions.)" + , "" + , " b) Use a suitable shared library mechanism for linking with the" + , " Library. A suitable mechanism is one that (1) uses at run time a" + , " copy of the library already present on the user's computer system," + , " rather than copying library functions into the executable, and (2)" + , " will operate properly with a modified version of the library, if" + , " the user installs one, as long as the modified version is" + , " interface-compatible with the version that the work was made with." + , "" + , " c) Accompany the work with a written offer, valid for at" + , " least three years, to give the same user the materials" + , " specified in Subsection 6a, above, for a charge no more" + , " than the cost of performing this distribution." + , "" + , " d) If distribution of the work is made by offering access to copy" + , " from a designated place, offer equivalent access to copy the above" + , " specified materials from the same place." + , "" + , " e) Verify that the user has already received a copy of these" + , " materials or that you have already sent this user a copy." + , "" + , " For an executable, the required form of the \"work that uses the" + , "Library\" must include any data and utility programs needed for" + , "reproducing the executable from it. However, as a special exception," + , "the materials to be distributed need not include anything that is" + , "normally distributed (in either source or binary form) with the major" + , "components (compiler, kernel, and so on) of the operating system on" + , "which the executable runs, unless that component itself accompanies" + , "the executable." + , "" + , " It may happen that this requirement contradicts the license" + , "restrictions of other proprietary libraries that do not normally" + , "accompany the operating system. Such a contradiction means you cannot" + , "use both them and the Library together in an executable that you" + , "distribute." + , "" + , " 7. You may place library facilities that are a work based on the" + , "Library side-by-side in a single library together with other library" + , "facilities not covered by this License, and distribute such a combined" + , "library, provided that the separate distribution of the work based on" + , "the Library and of the other library facilities is otherwise" + , "permitted, and provided that you do these two things:" + , "" + , " a) Accompany the combined library with a copy of the same work" + , " based on the Library, uncombined with any other library" + , " facilities. This must be distributed under the terms of the" + , " Sections above." + , "" + , " b) Give prominent notice with the combined library of the fact" + , " that part of it is a work based on the Library, and explaining" + , " where to find the accompanying uncombined form of the same work." + , "" + , " 8. You may not copy, modify, sublicense, link with, or distribute" + , "the Library except as expressly provided under this License. Any" + , "attempt otherwise to copy, modify, sublicense, link with, or" + , "distribute the Library is void, and will automatically terminate your" + , "rights under this License. However, parties who have received copies," + , "or rights, from you under this License will not have their licenses" + , "terminated so long as such parties remain in full compliance." + , "" + , " 9. You are not required to accept this License, since you have not" + , "signed it. However, nothing else grants you permission to modify or" + , "distribute the Library or its derivative works. These actions are" + , "prohibited by law if you do not accept this License. Therefore, by" + , "modifying or distributing the Library (or any work based on the" + , "Library), you indicate your acceptance of this License to do so, and" + , "all its terms and conditions for copying, distributing or modifying" + , "the Library or works based on it." + , "" + , " 10. Each time you redistribute the Library (or any work based on the" + , "Library), the recipient automatically receives a license from the" + , "original licensor to copy, distribute, link with or modify the Library" + , "subject to these terms and conditions. You may not impose any further" + , "restrictions on the recipients' exercise of the rights granted herein." + , "You are not responsible for enforcing compliance by third parties with" + , "this License." + , "" + , " 11. If, as a consequence of a court judgment or allegation of patent" + , "infringement or for any other reason (not limited to patent issues)," + , "conditions are imposed on you (whether by court order, agreement or" + , "otherwise) that contradict the conditions of this License, they do not" + , "excuse you from the conditions of this License. If you cannot" + , "distribute so as to satisfy simultaneously your obligations under this" + , "License and any other pertinent obligations, then as a consequence you" + , "may not distribute the Library at all. For example, if a patent" + , "license would not permit royalty-free redistribution of the Library by" + , "all those who receive copies directly or indirectly through you, then" + , "the only way you could satisfy both it and this License would be to" + , "refrain entirely from distribution of the Library." + , "" + , "If any portion of this section is held invalid or unenforceable under any" + , "particular circumstance, the balance of the section is intended to apply," + , "and the section as a whole is intended to apply in other circumstances." + , "" + , "It is not the purpose of this section to induce you to infringe any" + , "patents or other property right claims or to contest validity of any" + , "such claims; this section has the sole purpose of protecting the" + , "integrity of the free software distribution system which is" + , "implemented by public license practices. Many people have made" + , "generous contributions to the wide range of software distributed" + , "through that system in reliance on consistent application of that" + , "system; it is up to the author/donor to decide if he or she is willing" + , "to distribute software through any other system and a licensee cannot" + , "impose that choice." + , "" + , "This section is intended to make thoroughly clear what is believed to" + , "be a consequence of the rest of this License." + , "" + , " 12. If the distribution and/or use of the Library is restricted in" + , "certain countries either by patents or by copyrighted interfaces, the" + , "original copyright holder who places the Library under this License may add" + , "an explicit geographical distribution limitation excluding those countries," + , "so that distribution is permitted only in or among countries not thus" + , "excluded. In such case, this License incorporates the limitation as if" + , "written in the body of this License." + , "" + , " 13. The Free Software Foundation may publish revised and/or new" + , "versions of the Lesser General Public License from time to time." + , "Such new versions will be similar in spirit to the present version," + , "but may differ in detail to address new problems or concerns." + , "" + , "Each version is given a distinguishing version number. If the Library" + , "specifies a version number of this License which applies to it and" + , "\"any later version\", you have the option of following the terms and" + , "conditions either of that version or of any later version published by" + , "the Free Software Foundation. If the Library does not specify a" + , "license version number, you may choose any version ever published by" + , "the Free Software Foundation." + , "" + , " 14. If you wish to incorporate parts of the Library into other free" + , "programs whose distribution conditions are incompatible with these," + , "write to the author to ask for permission. For software which is" + , "copyrighted by the Free Software Foundation, write to the Free" + , "Software Foundation; we sometimes make exceptions for this. Our" + , "decision will be guided by the two goals of preserving the free status" + , "of all derivatives of our free software and of promoting the sharing" + , "and reuse of software generally." + , "" + , " NO WARRANTY" + , "" + , " 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO" + , "WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW." + , "EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR" + , "OTHER PARTIES PROVIDE THE LIBRARY \"AS IS\" WITHOUT WARRANTY OF ANY" + , "KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE" + , "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR" + , "PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE" + , "LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME" + , "THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION." + , "" + , " 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN" + , "WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY" + , "AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU" + , "FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR" + , "CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE" + , "LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING" + , "RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A" + , "FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF" + , "SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH" + , "DAMAGES." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " How to Apply These Terms to Your New Libraries" + , "" + , " If you develop a new library, and you want it to be of the greatest" + , "possible use to the public, we recommend making it free software that" + , "everyone can redistribute and change. You can do so by permitting" + , "redistribution under these terms (or, alternatively, under the terms of the" + , "ordinary General Public License)." + , "" + , " To apply these terms, attach the following notices to the library. It is" + , "safest to attach them to the start of each source file to most effectively" + , "convey the exclusion of warranty; and each file should have at least the" + , "\"copyright\" line and a pointer to where the full notice is found." + , "" + , " " + , " Copyright (C) " + , "" + , " This library is free software; you can redistribute it and/or" + , " modify it under the terms of the GNU Lesser General Public" + , " License as published by the Free Software Foundation; either" + , " version 2.1 of the License, or (at your option) any later version." + , "" + , " This library is distributed in the hope that it will be useful," + , " but WITHOUT ANY WARRANTY; without even the implied warranty of" + , " MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU" + , " Lesser General Public License for more details." + , "" + , " You should have received a copy of the GNU Lesser General Public" + , " License along with this library; if not, write to the Free Software" + , " Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA" + , "" + , "Also add information on how to contact you by electronic and paper mail." + , "" + , "You should also get your employer (if you work as a programmer) or your" + , "school, if any, to sign a \"copyright disclaimer\" for the library, if" + , "necessary. Here is a sample; alter the names:" + , "" + , " Yoyodyne, Inc., hereby disclaims all copyright interest in the" + , " library `Frob' (a library for tweaking knobs) written by James Random Hacker." + , "" + , " , 1 April 1990" + , " Ty Coon, President of Vice" + , "" + , "That's all there is to it!" + ] + +lgpl3 :: License +lgpl3 = unlines + [ " GNU LESSER GENERAL PUBLIC LICENSE" + , " Version 3, 29 June 2007" + , "" + , " Copyright (C) 2007 Free Software Foundation, Inc. " + , " Everyone is permitted to copy and distribute verbatim copies" + , " of this license document, but changing it is not allowed." + , "" + , "" + , " This version of the GNU Lesser General Public License incorporates" + , "the terms and conditions of version 3 of the GNU General Public" + , "License, supplemented by the additional permissions listed below." + , "" + , " 0. Additional Definitions." + , "" + , " As used herein, \"this License\" refers to version 3 of the GNU Lesser" + , "General Public License, and the \"GNU GPL\" refers to version 3 of the GNU" + , "General Public License." + , "" + , " \"The Library\" refers to a covered work governed by this License," + , "other than an Application or a Combined Work as defined below." + , "" + , " An \"Application\" is any work that makes use of an interface provided" + , "by the Library, but which is not otherwise based on the Library." + , "Defining a subclass of a class defined by the Library is deemed a mode" + , "of using an interface provided by the Library." + , "" + , " A \"Combined Work\" is a work produced by combining or linking an" + , "Application with the Library. The particular version of the Library" + , "with which the Combined Work was made is also called the \"Linked" + , "Version\"." + , "" + , " The \"Minimal Corresponding Source\" for a Combined Work means the" + , "Corresponding Source for the Combined Work, excluding any source code" + , "for portions of the Combined Work that, considered in isolation, are" + , "based on the Application, and not on the Linked Version." + , "" + , " The \"Corresponding Application Code\" for a Combined Work means the" + , "object code and/or source code for the Application, including any data" + , "and utility programs needed for reproducing the Combined Work from the" + , "Application, but excluding the System Libraries of the Combined Work." + , "" + , " 1. Exception to Section 3 of the GNU GPL." + , "" + , " You may convey a covered work under sections 3 and 4 of this License" + , "without being bound by section 3 of the GNU GPL." + , "" + , " 2. Conveying Modified Versions." + , "" + , " If you modify a copy of the Library, and, in your modifications, a" + , "facility refers to a function or data to be supplied by an Application" + , "that uses the facility (other than as an argument passed when the" + , "facility is invoked), then you may convey a copy of the modified" + , "version:" + , "" + , " a) under this License, provided that you make a good faith effort to" + , " ensure that, in the event an Application does not supply the" + , " function or data, the facility still operates, and performs" + , " whatever part of its purpose remains meaningful, or" + , "" + , " b) under the GNU GPL, with none of the additional permissions of" + , " this License applicable to that copy." + , "" + , " 3. Object Code Incorporating Material from Library Header Files." + , "" + , " The object code form of an Application may incorporate material from" + , "a header file that is part of the Library. You may convey such object" + , "code under terms of your choice, provided that, if the incorporated" + , "material is not limited to numerical parameters, data structure" + , "layouts and accessors, or small macros, inline functions and templates" + , "(ten or fewer lines in length), you do both of the following:" + , "" + , " a) Give prominent notice with each copy of the object code that the" + , " Library is used in it and that the Library and its use are" + , " covered by this License." + , "" + , " b) Accompany the object code with a copy of the GNU GPL and this license" + , " document." + , "" + , " 4. Combined Works." + , "" + , " You may convey a Combined Work under terms of your choice that," + , "taken together, effectively do not restrict modification of the" + , "portions of the Library contained in the Combined Work and reverse" + , "engineering for debugging such modifications, if you also do each of" + , "the following:" + , "" + , " a) Give prominent notice with each copy of the Combined Work that" + , " the Library is used in it and that the Library and its use are" + , " covered by this License." + , "" + , " b) Accompany the Combined Work with a copy of the GNU GPL and this license" + , " document." + , "" + , " c) For a Combined Work that displays copyright notices during" + , " execution, include the copyright notice for the Library among" + , " these notices, as well as a reference directing the user to the" + , " copies of the GNU GPL and this license document." + , "" + , " d) Do one of the following:" + , "" + , " 0) Convey the Minimal Corresponding Source under the terms of this" + , " License, and the Corresponding Application Code in a form" + , " suitable for, and under terms that permit, the user to" + , " recombine or relink the Application with a modified version of" + , " the Linked Version to produce a modified Combined Work, in the" + , " manner specified by section 6 of the GNU GPL for conveying" + , " Corresponding Source." + , "" + , " 1) Use a suitable shared library mechanism for linking with the" + , " Library. A suitable mechanism is one that (a) uses at run time" + , " a copy of the Library already present on the user's computer" + , " system, and (b) will operate properly with a modified version" + , " of the Library that is interface-compatible with the Linked" + , " Version." + , "" + , " e) Provide Installation Information, but only if you would otherwise" + , " be required to provide such information under section 6 of the" + , " GNU GPL, and only to the extent that such information is" + , " necessary to install and execute a modified version of the" + , " Combined Work produced by recombining or relinking the" + , " Application with a modified version of the Linked Version. (If" + , " you use option 4d0, the Installation Information must accompany" + , " the Minimal Corresponding Source and Corresponding Application" + , " Code. If you use option 4d1, you must provide the Installation" + , " Information in the manner specified by section 6 of the GNU GPL" + , " for conveying Corresponding Source.)" + , "" + , " 5. Combined Libraries." + , "" + , " You may place library facilities that are a work based on the" + , "Library side by side in a single library together with other library" + , "facilities that are not Applications and are not covered by this" + , "License, and convey such a combined library under terms of your" + , "choice, if you do both of the following:" + , "" + , " a) Accompany the combined library with a copy of the same work based" + , " on the Library, uncombined with any other library facilities," + , " conveyed under the terms of this License." + , "" + , " b) Give prominent notice with the combined library that part of it" + , " is a work based on the Library, and explaining where to find the" + , " accompanying uncombined form of the same work." + , "" + , " 6. Revised Versions of the GNU Lesser General Public License." + , "" + , " The Free Software Foundation may publish revised and/or new versions" + , "of the GNU Lesser General Public License from time to time. Such new" + , "versions will be similar in spirit to the present version, but may" + , "differ in detail to address new problems or concerns." + , "" + , " Each version is given a distinguishing version number. If the" + , "Library as you received it specifies that a certain numbered version" + , "of the GNU Lesser General Public License \"or any later version\"" + , "applies to it, you have the option of following the terms and" + , "conditions either of that published version or of any later version" + , "published by the Free Software Foundation. If the Library as you" + , "received it does not specify a version number of the GNU Lesser" + , "General Public License, you may choose any version of the GNU Lesser" + , "General Public License ever published by the Free Software Foundation." + , "" + , " If the Library as you received it specifies that a proxy can decide" + , "whether future versions of the GNU Lesser General Public License shall" + , "apply, that proxy's public statement of acceptance of any version is" + , "permanent authorization for you to choose that version for the" + , "Library." + ] + +apache20 :: License +apache20 = unlines + [ "" + , " Apache License" + , " Version 2.0, January 2004" + , " http://www.apache.org/licenses/" + , "" + , " TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION" + , "" + , " 1. Definitions." + , "" + , " \"License\" shall mean the terms and conditions for use, reproduction," + , " and distribution as defined by Sections 1 through 9 of this document." + , "" + , " \"Licensor\" shall mean the copyright owner or entity authorized by" + , " the copyright owner that is granting the License." + , "" + , " \"Legal Entity\" shall mean the union of the acting entity and all" + , " other entities that control, are controlled by, or are under common" + , " control with that entity. For the purposes of this definition," + , " \"control\" means (i) the power, direct or indirect, to cause the" + , " direction or management of such entity, whether by contract or" + , " otherwise, or (ii) ownership of fifty percent (50%) or more of the" + , " outstanding shares, or (iii) beneficial ownership of such entity." + , "" + , " \"You\" (or \"Your\") shall mean an individual or Legal Entity" + , " exercising permissions granted by this License." + , "" + , " \"Source\" form shall mean the preferred form for making modifications," + , " including but not limited to software source code, documentation" + , " source, and configuration files." + , "" + , " \"Object\" form shall mean any form resulting from mechanical" + , " transformation or translation of a Source form, including but" + , " not limited to compiled object code, generated documentation," + , " and conversions to other media types." + , "" + , " \"Work\" shall mean the work of authorship, whether in Source or" + , " Object form, made available under the License, as indicated by a" + , " copyright notice that is included in or attached to the work" + , " (an example is provided in the Appendix below)." + , "" + , " \"Derivative Works\" shall mean any work, whether in Source or Object" + , " form, that is based on (or derived from) the Work and for which the" + , " editorial revisions, annotations, elaborations, or other modifications" + , " represent, as a whole, an original work of authorship. For the purposes" + , " of this License, Derivative Works shall not include works that remain" + , " separable from, or merely link (or bind by name) to the interfaces of," + , " the Work and Derivative Works thereof." + , "" + , " \"Contribution\" shall mean any work of authorship, including" + , " the original version of the Work and any modifications or additions" + , " to that Work or Derivative Works thereof, that is intentionally" + , " submitted to Licensor for inclusion in the Work by the copyright owner" + , " or by an individual or Legal Entity authorized to submit on behalf of" + , " the copyright owner. For the purposes of this definition, \"submitted\"" + , " means any form of electronic, verbal, or written communication sent" + , " to the Licensor or its representatives, including but not limited to" + , " communication on electronic mailing lists, source code control systems," + , " and issue tracking systems that are managed by, or on behalf of, the" + , " Licensor for the purpose of discussing and improving the Work, but" + , " excluding communication that is conspicuously marked or otherwise" + , " designated in writing by the copyright owner as \"Not a Contribution.\"" + , "" + , " \"Contributor\" shall mean Licensor and any individual or Legal Entity" + , " on behalf of whom a Contribution has been received by Licensor and" + , " subsequently incorporated within the Work." + , "" + , " 2. Grant of Copyright License. Subject to the terms and conditions of" + , " this License, each Contributor hereby grants to You a perpetual," + , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" + , " copyright license to reproduce, prepare Derivative Works of," + , " publicly display, publicly perform, sublicense, and distribute the" + , " Work and such Derivative Works in Source or Object form." + , "" + , " 3. Grant of Patent License. Subject to the terms and conditions of" + , " this License, each Contributor hereby grants to You a perpetual," + , " worldwide, non-exclusive, no-charge, royalty-free, irrevocable" + , " (except as stated in this section) patent license to make, have made," + , " use, offer to sell, sell, import, and otherwise transfer the Work," + , " where such license applies only to those patent claims licensable" + , " by such Contributor that are necessarily infringed by their" + , " Contribution(s) alone or by combination of their Contribution(s)" + , " with the Work to which such Contribution(s) was submitted. If You" + , " institute patent litigation against any entity (including a" + , " cross-claim or counterclaim in a lawsuit) alleging that the Work" + , " or a Contribution incorporated within the Work constitutes direct" + , " or contributory patent infringement, then any patent licenses" + , " granted to You under this License for that Work shall terminate" + , " as of the date such litigation is filed." + , "" + , " 4. Redistribution. You may reproduce and distribute copies of the" + , " Work or Derivative Works thereof in any medium, with or without" + , " modifications, and in Source or Object form, provided that You" + , " meet the following conditions:" + , "" + , " (a) You must give any other recipients of the Work or" + , " Derivative Works a copy of this License; and" + , "" + , " (b) You must cause any modified files to carry prominent notices" + , " stating that You changed the files; and" + , "" + , " (c) You must retain, in the Source form of any Derivative Works" + , " that You distribute, all copyright, patent, trademark, and" + , " attribution notices from the Source form of the Work," + , " excluding those notices that do not pertain to any part of" + , " the Derivative Works; and" + , "" + , " (d) If the Work includes a \"NOTICE\" text file as part of its" + , " distribution, then any Derivative Works that You distribute must" + , " include a readable copy of the attribution notices contained" + , " within such NOTICE file, excluding those notices that do not" + , " pertain to any part of the Derivative Works, in at least one" + , " of the following places: within a NOTICE text file distributed" + , " as part of the Derivative Works; within the Source form or" + , " documentation, if provided along with the Derivative Works; or," + , " within a display generated by the Derivative Works, if and" + , " wherever such third-party notices normally appear. The contents" + , " of the NOTICE file are for informational purposes only and" + , " do not modify the License. You may add Your own attribution" + , " notices within Derivative Works that You distribute, alongside" + , " or as an addendum to the NOTICE text from the Work, provided" + , " that such additional attribution notices cannot be construed" + , " as modifying the License." + , "" + , " You may add Your own copyright statement to Your modifications and" + , " may provide additional or different license terms and conditions" + , " for use, reproduction, or distribution of Your modifications, or" + , " for any such Derivative Works as a whole, provided Your use," + , " reproduction, and distribution of the Work otherwise complies with" + , " the conditions stated in this License." + , "" + , " 5. Submission of Contributions. Unless You explicitly state otherwise," + , " any Contribution intentionally submitted for inclusion in the Work" + , " by You to the Licensor shall be under the terms and conditions of" + , " this License, without any additional terms or conditions." + , " Notwithstanding the above, nothing herein shall supersede or modify" + , " the terms of any separate license agreement you may have executed" + , " with Licensor regarding such Contributions." + , "" + , " 6. Trademarks. This License does not grant permission to use the trade" + , " names, trademarks, service marks, or product names of the Licensor," + , " except as required for reasonable and customary use in describing the" + , " origin of the Work and reproducing the content of the NOTICE file." + , "" + , " 7. Disclaimer of Warranty. Unless required by applicable law or" + , " agreed to in writing, Licensor provides the Work (and each" + , " Contributor provides its Contributions) on an \"AS IS\" BASIS," + , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or" + , " implied, including, without limitation, any warranties or conditions" + , " of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A" + , " PARTICULAR PURPOSE. You are solely responsible for determining the" + , " appropriateness of using or redistributing the Work and assume any" + , " risks associated with Your exercise of permissions under this License." + , "" + , " 8. Limitation of Liability. In no event and under no legal theory," + , " whether in tort (including negligence), contract, or otherwise," + , " unless required by applicable law (such as deliberate and grossly" + , " negligent acts) or agreed to in writing, shall any Contributor be" + , " liable to You for damages, including any direct, indirect, special," + , " incidental, or consequential damages of any character arising as a" + , " result of this License or out of the use or inability to use the" + , " Work (including but not limited to damages for loss of goodwill," + , " work stoppage, computer failure or malfunction, or any and all" + , " other commercial damages or losses), even if such Contributor" + , " has been advised of the possibility of such damages." + , "" + , " 9. Accepting Warranty or Additional Liability. While redistributing" + , " the Work or Derivative Works thereof, You may choose to offer," + , " and charge a fee for, acceptance of support, warranty, indemnity," + , " or other liability obligations and/or rights consistent with this" + , " License. However, in accepting such obligations, You may act only" + , " on Your own behalf and on Your sole responsibility, not on behalf" + , " of any other Contributor, and only if You agree to indemnify," + , " defend, and hold each Contributor harmless for any liability" + , " incurred by, or claims asserted against, such Contributor by reason" + , " of your accepting any such warranty or additional liability." + , "" + , " END OF TERMS AND CONDITIONS" + , "" + , " APPENDIX: How to apply the Apache License to your work." + , "" + , " To apply the Apache License to your work, attach the following" + , " boilerplate notice, with the fields enclosed by brackets \"[]\"" + , " replaced with your own identifying information. (Don't include" + , " the brackets!) The text should be enclosed in the appropriate" + , " comment syntax for the file format. We also recommend that a" + , " file or class name and description of purpose be included on the" + , " same \"printed page\" as the copyright notice for easier" + , " identification within third-party archives." + , "" + , " Copyright [yyyy] [name of copyright owner]" + , "" + , " Licensed under the Apache License, Version 2.0 (the \"License\");" + , " you may not use this file except in compliance with the License." + , " You may obtain a copy of the License at" + , "" + , " http://www.apache.org/licenses/LICENSE-2.0" + , "" + , " Unless required by applicable law or agreed to in writing, software" + , " distributed under the License is distributed on an \"AS IS\" BASIS," + , " WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied." + , " See the License for the specific language governing permissions and" + , " limitations under the License." + ] + +mit :: String -> String -> License +mit authors year = unlines + [ "Copyright (c) " ++ year ++ " " ++ authors + , "" + , "Permission is hereby granted, free of charge, to any person obtaining" + , "a copy of this software and associated documentation files (the" + , "\"Software\"), to deal in the Software without restriction, including" + , "without limitation the rights to use, copy, modify, merge, publish," + , "distribute, sublicense, and/or sell copies of the Software, and to" + , "permit persons to whom the Software is furnished to do so, subject to" + , "the following conditions:" + , "" + , "The above copyright notice and this permission notice shall be included" + , "in all copies or substantial portions of the Software." + , "" + , "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND," + , "EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF" + , "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT." + , "IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY" + , "CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT," + , "TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE" + , "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE." + ] + +mpl20 :: License +mpl20 = unlines + [ "Mozilla Public License Version 2.0" + , "==================================" + , "" + , "1. Definitions" + , "--------------" + , "" + , "1.1. \"Contributor\"" + , " means each individual or legal entity that creates, contributes to" + , " the creation of, or owns Covered Software." + , "" + , "1.2. \"Contributor Version\"" + , " means the combination of the Contributions of others (if any) used" + , " by a Contributor and that particular Contributor's Contribution." + , "" + , "1.3. \"Contribution\"" + , " means Covered Software of a particular Contributor." + , "" + , "1.4. \"Covered Software\"" + , " means Source Code Form to which the initial Contributor has attached" + , " the notice in Exhibit A, the Executable Form of such Source Code" + , " Form, and Modifications of such Source Code Form, in each case" + , " including portions thereof." + , "" + , "1.5. \"Incompatible With Secondary Licenses\"" + , " means" + , "" + , " (a) that the initial Contributor has attached the notice described" + , " in Exhibit B to the Covered Software; or" + , "" + , " (b) that the Covered Software was made available under the terms of" + , " version 1.1 or earlier of the License, but not also under the" + , " terms of a Secondary License." + , "" + , "1.6. \"Executable Form\"" + , " means any form of the work other than Source Code Form." + , "" + , "1.7. \"Larger Work\"" + , " means a work that combines Covered Software with other material, in" + , " a separate file or files, that is not Covered Software." + , "" + , "1.8. \"License\"" + , " means this document." + , "" + , "1.9. \"Licensable\"" + , " means having the right to grant, to the maximum extent possible," + , " whether at the time of the initial grant or subsequently, any and" + , " all of the rights conveyed by this License." + , "" + , "1.10. \"Modifications\"" + , " means any of the following:" + , "" + , " (a) any file in Source Code Form that results from an addition to," + , " deletion from, or modification of the contents of Covered" + , " Software; or" + , "" + , " (b) any new file in Source Code Form that contains any Covered" + , " Software." + , "" + , "1.11. \"Patent Claims\" of a Contributor" + , " means any patent claim(s), including without limitation, method," + , " process, and apparatus claims, in any patent Licensable by such" + , " Contributor that would be infringed, but for the grant of the" + , " License, by the making, using, selling, offering for sale, having" + , " made, import, or transfer of either its Contributions or its" + , " Contributor Version." + , "" + , "1.12. \"Secondary License\"" + , " means either the GNU General Public License, Version 2.0, the GNU" + , " Lesser General Public License, Version 2.1, the GNU Affero General" + , " Public License, Version 3.0, or any later versions of those" + , " licenses." + , "" + , "1.13. \"Source Code Form\"" + , " means the form of the work preferred for making modifications." + , "" + , "1.14. \"You\" (or \"Your\")" + , " means an individual or a legal entity exercising rights under this" + , " License. For legal entities, \"You\" includes any entity that" + , " controls, is controlled by, or is under common control with You. For" + , " purposes of this definition, \"control\" means (a) the power, direct" + , " or indirect, to cause the direction or management of such entity," + , " whether by contract or otherwise, or (b) ownership of more than" + , " fifty percent (50%) of the outstanding shares or beneficial" + , " ownership of such entity." + , "" + , "2. License Grants and Conditions" + , "--------------------------------" + , "" + , "2.1. Grants" + , "" + , "Each Contributor hereby grants You a world-wide, royalty-free," + , "non-exclusive license:" + , "" + , "(a) under intellectual property rights (other than patent or trademark)" + , " Licensable by such Contributor to use, reproduce, make available," + , " modify, display, perform, distribute, and otherwise exploit its" + , " Contributions, either on an unmodified basis, with Modifications, or" + , " as part of a Larger Work; and" + , "" + , "(b) under Patent Claims of such Contributor to make, use, sell, offer" + , " for sale, have made, import, and otherwise transfer either its" + , " Contributions or its Contributor Version." + , "" + , "2.2. Effective Date" + , "" + , "The licenses granted in Section 2.1 with respect to any Contribution" + , "become effective for each Contribution on the date the Contributor first" + , "distributes such Contribution." + , "" + , "2.3. Limitations on Grant Scope" + , "" + , "The licenses granted in this Section 2 are the only rights granted under" + , "this License. No additional rights or licenses will be implied from the" + , "distribution or licensing of Covered Software under this License." + , "Notwithstanding Section 2.1(b) above, no patent license is granted by a" + , "Contributor:" + , "" + , "(a) for any code that a Contributor has removed from Covered Software;" + , " or" + , "" + , "(b) for infringements caused by: (i) Your and any other third party's" + , " modifications of Covered Software, or (ii) the combination of its" + , " Contributions with other software (except as part of its Contributor" + , " Version); or" + , "" + , "(c) under Patent Claims infringed by Covered Software in the absence of" + , " its Contributions." + , "" + , "This License does not grant any rights in the trademarks, service marks," + , "or logos of any Contributor (except as may be necessary to comply with" + , "the notice requirements in Section 3.4)." + , "" + , "2.4. Subsequent Licenses" + , "" + , "No Contributor makes additional grants as a result of Your choice to" + , "distribute the Covered Software under a subsequent version of this" + , "License (see Section 10.2) or under the terms of a Secondary License (if" + , "permitted under the terms of Section 3.3)." + , "" + , "2.5. Representation" + , "" + , "Each Contributor represents that the Contributor believes its" + , "Contributions are its original creation(s) or it has sufficient rights" + , "to grant the rights to its Contributions conveyed by this License." + , "" + , "2.6. Fair Use" + , "" + , "This License is not intended to limit any rights You have under" + , "applicable copyright doctrines of fair use, fair dealing, or other" + , "equivalents." + , "" + , "2.7. Conditions" + , "" + , "Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted" + , "in Section 2.1." + , "" + , "3. Responsibilities" + , "-------------------" + , "" + , "3.1. Distribution of Source Form" + , "" + , "All distribution of Covered Software in Source Code Form, including any" + , "Modifications that You create or to which You contribute, must be under" + , "the terms of this License. You must inform recipients that the Source" + , "Code Form of the Covered Software is governed by the terms of this" + , "License, and how they can obtain a copy of this License. You may not" + , "attempt to alter or restrict the recipients' rights in the Source Code" + , "Form." + , "" + , "3.2. Distribution of Executable Form" + , "" + , "If You distribute Covered Software in Executable Form then:" + , "" + , "(a) such Covered Software must also be made available in Source Code" + , " Form, as described in Section 3.1, and You must inform recipients of" + , " the Executable Form how they can obtain a copy of such Source Code" + , " Form by reasonable means in a timely manner, at a charge no more" + , " than the cost of distribution to the recipient; and" + , "" + , "(b) You may distribute such Executable Form under the terms of this" + , " License, or sublicense it under different terms, provided that the" + , " license for the Executable Form does not attempt to limit or alter" + , " the recipients' rights in the Source Code Form under this License." + , "" + , "3.3. Distribution of a Larger Work" + , "" + , "You may create and distribute a Larger Work under terms of Your choice," + , "provided that You also comply with the requirements of this License for" + , "the Covered Software. If the Larger Work is a combination of Covered" + , "Software with a work governed by one or more Secondary Licenses, and the" + , "Covered Software is not Incompatible With Secondary Licenses, this" + , "License permits You to additionally distribute such Covered Software" + , "under the terms of such Secondary License(s), so that the recipient of" + , "the Larger Work may, at their option, further distribute the Covered" + , "Software under the terms of either this License or such Secondary" + , "License(s)." + , "" + , "3.4. Notices" + , "" + , "You may not remove or alter the substance of any license notices" + , "(including copyright notices, patent notices, disclaimers of warranty," + , "or limitations of liability) contained within the Source Code Form of" + , "the Covered Software, except that You may alter any license notices to" + , "the extent required to remedy known factual inaccuracies." + , "" + , "3.5. Application of Additional Terms" + , "" + , "You may choose to offer, and to charge a fee for, warranty, support," + , "indemnity or liability obligations to one or more recipients of Covered" + , "Software. However, You may do so only on Your own behalf, and not on" + , "behalf of any Contributor. You must make it absolutely clear that any" + , "such warranty, support, indemnity, or liability obligation is offered by" + , "You alone, and You hereby agree to indemnify every Contributor for any" + , "liability incurred by such Contributor as a result of warranty, support," + , "indemnity or liability terms You offer. You may include additional" + , "disclaimers of warranty and limitations of liability specific to any" + , "jurisdiction." + , "" + , "4. Inability to Comply Due to Statute or Regulation" + , "---------------------------------------------------" + , "" + , "If it is impossible for You to comply with any of the terms of this" + , "License with respect to some or all of the Covered Software due to" + , "statute, judicial order, or regulation then You must: (a) comply with" + , "the terms of this License to the maximum extent possible; and (b)" + , "describe the limitations and the code they affect. Such description must" + , "be placed in a text file included with all distributions of the Covered" + , "Software under this License. Except to the extent prohibited by statute" + , "or regulation, such description must be sufficiently detailed for a" + , "recipient of ordinary skill to be able to understand it." + , "" + , "5. Termination" + , "--------------" + , "" + , "5.1. The rights granted under this License will terminate automatically" + , "if You fail to comply with any of its terms. However, if You become" + , "compliant, then the rights granted under this License from a particular" + , "Contributor are reinstated (a) provisionally, unless and until such" + , "Contributor explicitly and finally terminates Your grants, and (b) on an" + , "ongoing basis, if such Contributor fails to notify You of the" + , "non-compliance by some reasonable means prior to 60 days after You have" + , "come back into compliance. Moreover, Your grants from a particular" + , "Contributor are reinstated on an ongoing basis if such Contributor" + , "notifies You of the non-compliance by some reasonable means, this is the" + , "first time You have received notice of non-compliance with this License" + , "from such Contributor, and You become compliant prior to 30 days after" + , "Your receipt of the notice." + , "" + , "5.2. If You initiate litigation against any entity by asserting a patent" + , "infringement claim (excluding declaratory judgment actions," + , "counter-claims, and cross-claims) alleging that a Contributor Version" + , "directly or indirectly infringes any patent, then the rights granted to" + , "You by any and all Contributors for the Covered Software under Section" + , "2.1 of this License shall terminate." + , "" + , "5.3. In the event of termination under Sections 5.1 or 5.2 above, all" + , "end user license agreements (excluding distributors and resellers) which" + , "have been validly granted by You or Your distributors under this License" + , "prior to termination shall survive termination." + , "" + , "************************************************************************" + , "* *" + , "* 6. Disclaimer of Warranty *" + , "* ------------------------- *" + , "* *" + , "* Covered Software is provided under this License on an \"as is\" *" + , "* basis, without warranty of any kind, either expressed, implied, or *" + , "* statutory, including, without limitation, warranties that the *" + , "* Covered Software is free of defects, merchantable, fit for a *" + , "* particular purpose or non-infringing. The entire risk as to the *" + , "* quality and performance of the Covered Software is with You. *" + , "* Should any Covered Software prove defective in any respect, You *" + , "* (not any Contributor) assume the cost of any necessary servicing, *" + , "* repair, or correction. This disclaimer of warranty constitutes an *" + , "* essential part of this License. No use of any Covered Software is *" + , "* authorized under this License except under this disclaimer. *" + , "* *" + , "************************************************************************" + , "" + , "************************************************************************" + , "* *" + , "* 7. Limitation of Liability *" + , "* -------------------------- *" + , "* *" + , "* Under no circumstances and under no legal theory, whether tort *" + , "* (including negligence), contract, or otherwise, shall any *" + , "* Contributor, or anyone who distributes Covered Software as *" + , "* permitted above, be liable to You for any direct, indirect, *" + , "* special, incidental, or consequential damages of any character *" + , "* including, without limitation, damages for lost profits, loss of *" + , "* goodwill, work stoppage, computer failure or malfunction, or any *" + , "* and all other commercial damages or losses, even if such party *" + , "* shall have been informed of the possibility of such damages. This *" + , "* limitation of liability shall not apply to liability for death or *" + , "* personal injury resulting from such party's negligence to the *" + , "* extent applicable law prohibits such limitation. Some *" + , "* jurisdictions do not allow the exclusion or limitation of *" + , "* incidental or consequential damages, so this exclusion and *" + , "* limitation may not apply to You. *" + , "* *" + , "************************************************************************" + , "" + , "8. Litigation" + , "-------------" + , "" + , "Any litigation relating to this License may be brought only in the" + , "courts of a jurisdiction where the defendant maintains its principal" + , "place of business and such litigation shall be governed by laws of that" + , "jurisdiction, without reference to its conflict-of-law provisions." + , "Nothing in this Section shall prevent a party's ability to bring" + , "cross-claims or counter-claims." + , "" + , "9. Miscellaneous" + , "----------------" + , "" + , "This License represents the complete agreement concerning the subject" + , "matter hereof. If any provision of this License is held to be" + , "unenforceable, such provision shall be reformed only to the extent" + , "necessary to make it enforceable. Any law or regulation which provides" + , "that the language of a contract shall be construed against the drafter" + , "shall not be used to construe this License against a Contributor." + , "" + , "10. Versions of the License" + , "---------------------------" + , "" + , "10.1. New Versions" + , "" + , "Mozilla Foundation is the license steward. Except as provided in Section" + , "10.3, no one other than the license steward has the right to modify or" + , "publish new versions of this License. Each version will be given a" + , "distinguishing version number." + , "" + , "10.2. Effect of New Versions" + , "" + , "You may distribute the Covered Software under the terms of the version" + , "of the License under which You originally received the Covered Software," + , "or under the terms of any subsequent version published by the license" + , "steward." + , "" + , "10.3. Modified Versions" + , "" + , "If you create software not governed by this License, and you want to" + , "create a new license for such software, you may create and use a" + , "modified version of this License if you rename the license and remove" + , "any references to the name of the license steward (except to note that" + , "such modified license differs from this License)." + , "" + , "10.4. Distributing Source Code Form that is Incompatible With Secondary" + , "Licenses" + , "" + , "If You choose to distribute Source Code Form that is Incompatible With" + , "Secondary Licenses under the terms of this version of the License, the" + , "notice described in Exhibit B of this License must be attached." + , "" + , "Exhibit A - Source Code Form License Notice" + , "-------------------------------------------" + , "" + , " This Source Code Form is subject to the terms of the Mozilla Public" + , " License, v. 2.0. If a copy of the MPL was not distributed with this" + , " file, You can obtain one at http://mozilla.org/MPL/2.0/." + , "" + , "If it is not possible or desirable to put the notice in a particular" + , "file, then You may include the notice in a location (such as a LICENSE" + , "file in a relevant directory) where a recipient would be likely to look" + , "for such a notice." + , "" + , "You may add additional accurate notices of copyright ownership." + , "" + , "Exhibit B - \"Incompatible With Secondary Licenses\" Notice" + , "---------------------------------------------------------" + , "" + , " This Source Code Form is \"Incompatible With Secondary Licenses\", as" + , " defined by the Mozilla Public License, v. 2.0." + ] + +isc :: String -> String -> License +isc authors year = unlines + [ "Copyright (c) " ++ year ++ " " ++ authors + , "" + , "Permission to use, copy, modify, and/or distribute this software for any purpose" + , "with or without fee is hereby granted, provided that the above copyright notice" + , "and this permission notice appear in all copies." + , "" + , "THE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH" + , "REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND" + , "FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT," + , "INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS" + , "OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER" + , "TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF" + , "THIS SOFTWARE." + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Init/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Init/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Init/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Init/Types.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,123 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Init.Types +-- Copyright : (c) Brent Yorgey, Benedikt Huber 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Some types used by the 'cabal init' command. +-- +----------------------------------------------------------------------------- +module Distribution.Client.Init.Types where + +import Distribution.Simple.Setup + ( Flag(..) ) + +import Distribution.Types.Dependency as P +import Distribution.Compat.Semigroup +import Distribution.Version +import Distribution.Verbosity +import qualified Distribution.Package as P +import Distribution.License +import Distribution.ModuleName +import Language.Haskell.Extension ( Language(..), Extension ) + +import qualified Text.PrettyPrint as Disp +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Text + +import GHC.Generics ( Generic ) + +-- | InitFlags is really just a simple type to represent certain +-- portions of a .cabal file. Rather than have a flag for EVERY +-- possible field, we just have one for each field that the user is +-- likely to want and/or that we are likely to be able to +-- intelligently guess. +data InitFlags = + InitFlags { nonInteractive :: Flag Bool + , quiet :: Flag Bool + , packageDir :: Flag FilePath + , noComments :: Flag Bool + , minimal :: Flag Bool + + , packageName :: Flag P.PackageName + , version :: Flag Version + , cabalVersion :: Flag Version + , license :: Flag License + , author :: Flag String + , email :: Flag String + , homepage :: Flag String + + , synopsis :: Flag String + , category :: Flag (Either String Category) + , extraSrc :: Maybe [String] + + , packageType :: Flag PackageType + , mainIs :: Flag FilePath + , language :: Flag Language + + , exposedModules :: Maybe [ModuleName] + , otherModules :: Maybe [ModuleName] + , otherExts :: Maybe [Extension] + + , dependencies :: Maybe [P.Dependency] + , sourceDirs :: Maybe [String] + , buildTools :: Maybe [String] + + , initHcPath :: Flag FilePath + + , initVerbosity :: Flag Verbosity + , overwrite :: Flag Bool + } + deriving (Show, Generic) + + -- the Monoid instance for Flag has later values override earlier + -- ones, which is why we want Maybe [foo] for collecting foo values, + -- not Flag [foo]. + +data BuildType = LibBuild | ExecBuild + +data PackageType = Library | Executable | LibraryAndExecutable + deriving (Show, Read, Eq) + +displayPackageType :: PackageType -> String +displayPackageType LibraryAndExecutable = "Library and Executable" +displayPackageType pkgtype = show pkgtype + +instance Monoid InitFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup InitFlags where + (<>) = gmappend + +-- | Some common package categories. +data Category + = Codec + | Concurrency + | Control + | Data + | Database + | Development + | Distribution + | Game + | Graphics + | Language + | Math + | Network + | Sound + | System + | Testing + | Text + | Web + deriving (Read, Show, Eq, Ord, Bounded, Enum) + +instance Text Category where + disp = Disp.text . show + parse = Parse.choice $ map (fmap read . Parse.string . show) [Codec .. ] -- TODO: eradicateNoParse + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Init.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Init.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Init.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Init.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,1058 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Init +-- Copyright : (c) Brent Yorgey 2009 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Implementation of the 'cabal init' command, which creates an initial .cabal +-- file for a project. +-- +----------------------------------------------------------------------------- + +module Distribution.Client.Init ( + + -- * Commands + initCabal + , incVersion + + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude hiding (empty) + +import System.IO + ( hSetBuffering, stdout, BufferMode(..) ) +import System.Directory + ( getCurrentDirectory, doesDirectoryExist, doesFileExist, copyFile + , getDirectoryContents, createDirectoryIfMissing ) +import System.FilePath + ( (), (<.>), takeBaseName, equalFilePath ) +import Data.Time + ( getCurrentTime, utcToLocalTime, toGregorian, localDay, getCurrentTimeZone ) + +import Data.List + ( groupBy, (\\) ) +import Data.Function + ( on ) +import qualified Data.Map as M +import Control.Monad + ( (>=>), join, forM_, mapM, mapM_ ) +import Control.Arrow + ( (&&&), (***) ) + +import Text.PrettyPrint hiding (mode, cat) + +import Distribution.Version + ( Version, mkVersion, alterVersion, versionNumbers, majorBoundVersion + , orLaterVersion, earlierVersion, intersectVersionRanges, VersionRange ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.ModuleName + ( ModuleName ) -- And for the Text instance +import Distribution.InstalledPackageInfo + ( InstalledPackageInfo, exposed ) +import qualified Distribution.Package as P +import Language.Haskell.Extension ( Language(..) ) + +import Distribution.Client.Init.Types + ( InitFlags(..), BuildType(..), PackageType(..), Category(..) + , displayPackageType ) +import Distribution.Client.Init.Licenses + ( bsd2, bsd3, gplv2, gplv3, lgpl21, lgpl3, agplv3, apache20, mit, mpl20, isc ) +import Distribution.Client.Init.Heuristics + ( guessPackageName, guessAuthorNameMail, guessMainFileCandidates, + SourceFileEntry(..), + scanForModules, neededBuildPrograms ) + +import Distribution.License + ( License(..), knownLicenses, licenseToSPDX ) +import qualified Distribution.SPDX as SPDX + +import Distribution.ReadE + ( runReadE, readP_to_E ) +import Distribution.Simple.Setup + ( Flag(..), flagToMaybe ) +import Distribution.Simple.Utils + ( dropWhileEndLE ) +import Distribution.Simple.Configure + ( getInstalledPackages ) +import Distribution.Simple.Compiler + ( PackageDBStack, Compiler ) +import Distribution.Simple.Program + ( ProgramDb ) +import Distribution.Simple.PackageIndex + ( InstalledPackageIndex, moduleNameIndex ) +import Distribution.Text + ( display, Text(..) ) +import Distribution.Pretty + ( prettyShow ) +import Distribution.Parsec.Class + ( eitherParsec ) + +import Distribution.Solver.Types.PackageIndex + ( elemByPackageName ) + +import Distribution.Client.IndexUtils + ( getSourcePackages ) +import Distribution.Client.Types + ( SourcePackageDb(..) ) +import Distribution.Client.Setup + ( RepoContext(..) ) + +initCabal :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> ProgramDb + -> InitFlags + -> IO () +initCabal verbosity packageDBs repoCtxt comp progdb initFlags = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb + sourcePkgDb <- getSourcePackages verbosity repoCtxt + + hSetBuffering stdout NoBuffering + + initFlags' <- extendFlags installedPkgIndex sourcePkgDb initFlags + + case license initFlags' of + Flag PublicDomain -> return () + _ -> writeLicense initFlags' + writeSetupFile initFlags' + writeChangeLog initFlags' + createSourceDirectories initFlags' + createMainHs initFlags' + success <- writeCabalFile initFlags' + + when success $ generateWarnings initFlags' + +--------------------------------------------------------------------------- +-- Flag acquisition ----------------------------------------------------- +--------------------------------------------------------------------------- + +-- | Fill in more details by guessing, discovering, or prompting the +-- user. +extendFlags :: InstalledPackageIndex -> SourcePackageDb -> InitFlags -> IO InitFlags +extendFlags pkgIx sourcePkgDb = + getCabalVersion + >=> getPackageName sourcePkgDb + >=> getVersion + >=> getLicense + >=> getAuthorInfo + >=> getHomepage + >=> getSynopsis + >=> getCategory + >=> getExtraSourceFiles + >=> getLibOrExec + >=> getSrcDir + >=> getLanguage + >=> getGenComments + >=> getModulesBuildToolsAndDeps pkgIx + +-- | Combine two actions which may return a value, preferring the first. That +-- is, run the second action only if the first doesn't return a value. +infixr 1 ?>> +(?>>) :: IO (Maybe a) -> IO (Maybe a) -> IO (Maybe a) +f ?>> g = do + ma <- f + if isJust ma + then return ma + else g + +-- | Witness the isomorphism between Maybe and Flag. +maybeToFlag :: Maybe a -> Flag a +maybeToFlag = maybe NoFlag Flag + +defaultCabalVersion :: Version +defaultCabalVersion = mkVersion [1,10] + +displayCabalVersion :: Version -> String +displayCabalVersion v = case versionNumbers v of + [1,10] -> "1.10 (legacy)" + [2,0] -> "2.0 (+ support for Backpack, internal sub-libs, '^>=' operator)" + [2,2] -> "2.2 (+ support for 'common', 'elif', redundant commas, SPDX)" + [2,4] -> "2.4 (+ support for '**' globbing)" + _ -> display v + +-- | Ask which version of the cabal spec to use. +getCabalVersion :: InitFlags -> IO InitFlags +getCabalVersion flags = do + cabVer <- return (flagToMaybe $ cabalVersion flags) + ?>> maybePrompt flags (either (const defaultCabalVersion) id `fmap` + promptList "Please choose version of the Cabal specification to use" + [mkVersion [1,10], mkVersion [2,0], mkVersion [2,2], mkVersion [2,4]] + (Just defaultCabalVersion) displayCabalVersion False) + ?>> return (Just defaultCabalVersion) + + return $ flags { cabalVersion = maybeToFlag cabVer } + + +-- | Get the package name: use the package directory (supplied, or the current +-- directory by default) as a guess. It looks at the SourcePackageDb to avoid +-- using an existing package name. +getPackageName :: SourcePackageDb -> InitFlags -> IO InitFlags +getPackageName sourcePkgDb flags = do + guess <- traverse guessPackageName (flagToMaybe $ packageDir flags) + ?>> Just `fmap` (getCurrentDirectory >>= guessPackageName) + + let guess' | isPkgRegistered guess = Nothing + | otherwise = guess + + pkgName' <- return (flagToMaybe $ packageName flags) + ?>> maybePrompt flags (prompt "Package name" guess') + ?>> return guess' + + chooseAgain <- if isPkgRegistered pkgName' + then promptYesNo promptOtherNameMsg (Just True) + else return False + + if chooseAgain + then getPackageName sourcePkgDb flags + else return $ flags { packageName = maybeToFlag pkgName' } + + where + isPkgRegistered (Just pkg) = elemByPackageName (packageIndex sourcePkgDb) pkg + isPkgRegistered Nothing = False + + promptOtherNameMsg = "This package name is already used by another " ++ + "package on hackage. Do you want to choose a " ++ + "different name" + +-- | Package version: use 0.1.0.0 as a last resort, but try prompting the user +-- if possible. +getVersion :: InitFlags -> IO InitFlags +getVersion flags = do + let v = Just $ mkVersion [0,1,0,0] + v' <- return (flagToMaybe $ version flags) + ?>> maybePrompt flags (prompt "Package version" v) + ?>> return v + return $ flags { version = maybeToFlag v' } + +-- | Choose a license. +getLicense :: InitFlags -> IO InitFlags +getLicense flags = do + lic <- return (flagToMaybe $ license flags) + ?>> fmap (fmap (either UnknownLicense id)) + (maybePrompt flags + (promptList "Please choose a license" listedLicenses + (Just BSD3) displayLicense True)) + + case checkLicenseInvalid lic of + Just msg -> putStrLn msg >> getLicense flags + Nothing -> return $ flags { license = maybeToFlag lic } + + where + displayLicense l | needSpdx = prettyShow (licenseToSPDX l) + | otherwise = display l + + checkLicenseInvalid (Just (UnknownLicense t)) + | needSpdx = case eitherParsec t :: Either String SPDX.License of + Right _ -> Nothing + Left _ -> Just "\nThe license must be a valid SPDX expression." + | otherwise = if any (not . isAlphaNum) t + then Just promptInvalidOtherLicenseMsg + else Nothing + checkLicenseInvalid _ = Nothing + + promptInvalidOtherLicenseMsg = "\nThe license must be alphanumeric. " ++ + "If your license name has many words, " ++ + "the convention is to use camel case (e.g. PublicDomain). " ++ + "Please choose a different license." + + listedLicenses = + knownLicenses \\ [GPL Nothing, LGPL Nothing, AGPL Nothing + , Apache Nothing, OtherLicense] + + needSpdx = maybe False (>= mkVersion [2,2]) $ flagToMaybe (cabalVersion flags) + +-- | The author's name and email. Prompt, or try to guess from an existing +-- darcs repo. +getAuthorInfo :: InitFlags -> IO InitFlags +getAuthorInfo flags = do + (authorName, authorEmail) <- + (flagToMaybe *** flagToMaybe) `fmap` guessAuthorNameMail + authorName' <- return (flagToMaybe $ author flags) + ?>> maybePrompt flags (promptStr "Author name" authorName) + ?>> return authorName + + authorEmail' <- return (flagToMaybe $ email flags) + ?>> maybePrompt flags (promptStr "Maintainer email" authorEmail) + ?>> return authorEmail + + return $ flags { author = maybeToFlag authorName' + , email = maybeToFlag authorEmail' + } + +-- | Prompt for a homepage URL. +getHomepage :: InitFlags -> IO InitFlags +getHomepage flags = do + hp <- queryHomepage + hp' <- return (flagToMaybe $ homepage flags) + ?>> maybePrompt flags (promptStr "Project homepage URL" hp) + ?>> return hp + + return $ flags { homepage = maybeToFlag hp' } + +-- | Right now this does nothing, but it could be changed to do some +-- intelligent guessing. +queryHomepage :: IO (Maybe String) +queryHomepage = return Nothing -- get default remote darcs repo? + +-- | Prompt for a project synopsis. +getSynopsis :: InitFlags -> IO InitFlags +getSynopsis flags = do + syn <- return (flagToMaybe $ synopsis flags) + ?>> maybePrompt flags (promptStr "Project synopsis" Nothing) + + return $ flags { synopsis = maybeToFlag syn } + +-- | Prompt for a package category. +-- Note that it should be possible to do some smarter guessing here too, i.e. +-- look at the name of the top level source directory. +getCategory :: InitFlags -> IO InitFlags +getCategory flags = do + cat <- return (flagToMaybe $ category flags) + ?>> fmap join (maybePrompt flags + (promptListOptional "Project category" [Codec ..])) + return $ flags { category = maybeToFlag cat } + +-- | Try to guess extra source files (don't prompt the user). +getExtraSourceFiles :: InitFlags -> IO InitFlags +getExtraSourceFiles flags = do + extraSrcFiles <- return (extraSrc flags) + ?>> Just `fmap` guessExtraSourceFiles flags + + return $ flags { extraSrc = extraSrcFiles } + +defaultChangeLog :: FilePath +defaultChangeLog = "CHANGELOG.md" + +-- | Try to guess things to include in the extra-source-files field. +-- For now, we just look for things in the root directory named +-- 'readme', 'changes', or 'changelog', with any sort of +-- capitalization and any extension. +guessExtraSourceFiles :: InitFlags -> IO [FilePath] +guessExtraSourceFiles flags = do + dir <- + maybe getCurrentDirectory return . flagToMaybe $ packageDir flags + files <- getDirectoryContents dir + let extraFiles = filter isExtra files + if any isLikeChangeLog extraFiles + then return extraFiles + else return (defaultChangeLog : extraFiles) + + where + isExtra = likeFileNameBase ("README" : changeLogLikeBases) + isLikeChangeLog = likeFileNameBase changeLogLikeBases + likeFileNameBase candidates = (`elem` candidates) . map toUpper . takeBaseName + changeLogLikeBases = ["CHANGES", "CHANGELOG"] + +-- | Ask whether the project builds a library or executable. +getLibOrExec :: InitFlags -> IO InitFlags +getLibOrExec flags = do + pkgType <- return (flagToMaybe $ packageType flags) + ?>> maybePrompt flags (either (const Library) id `fmap` + promptList "What does the package build" + [Library, Executable, LibraryAndExecutable] + Nothing displayPackageType False) + ?>> return (Just Library) + mainFile <- if pkgType == Just Library then return Nothing else + getMainFile flags + + return $ flags { packageType = maybeToFlag pkgType + , mainIs = maybeToFlag mainFile + } + +-- | Try to guess the main file of the executable, and prompt the user to choose +-- one of them. Top-level modules including the word 'Main' in the file name +-- will be candidates, and shorter filenames will be preferred. +getMainFile :: InitFlags -> IO (Maybe FilePath) +getMainFile flags = + return (flagToMaybe $ mainIs flags) + ?>> do + candidates <- guessMainFileCandidates flags + let showCandidate = either (++" (does not yet exist, but will be created)") id + defaultFile = listToMaybe candidates + maybePrompt flags (either id (either id id) `fmap` + promptList "What is the main module of the executable" + candidates + defaultFile showCandidate True) + ?>> return (fmap (either id id) defaultFile) + +-- | Ask for the base language of the package. +getLanguage :: InitFlags -> IO InitFlags +getLanguage flags = do + lang <- return (flagToMaybe $ language flags) + ?>> maybePrompt flags + (either UnknownLanguage id `fmap` + promptList "What base language is the package written in" + [Haskell2010, Haskell98] + (Just Haskell2010) display True) + ?>> return (Just Haskell2010) + + if invalidLanguage lang + then putStrLn invalidOtherLanguageMsg >> getLanguage flags + else return $ flags { language = maybeToFlag lang } + + where + invalidLanguage (Just (UnknownLanguage t)) = any (not . isAlphaNum) t + invalidLanguage _ = False + + invalidOtherLanguageMsg = "\nThe language must be alphanumeric. " ++ + "Please enter a different language." + +-- | Ask whether to generate explanatory comments. +getGenComments :: InitFlags -> IO InitFlags +getGenComments flags = do + genComments <- return (not <$> flagToMaybe (noComments flags)) + ?>> maybePrompt flags (promptYesNo promptMsg (Just False)) + ?>> return (Just False) + return $ flags { noComments = maybeToFlag (fmap not genComments) } + where + promptMsg = "Add informative comments to each field in the cabal file (y/n)" + +-- | Ask for the source root directory. +getSrcDir :: InitFlags -> IO InitFlags +getSrcDir flags = do + srcDirs <- return (sourceDirs flags) + ?>> fmap (:[]) `fmap` guessSourceDir flags + ?>> fmap (>>= fmap ((:[]) . either id id)) (maybePrompt + flags + (promptListOptional' "Source directory" ["src"] id)) + + return $ flags { sourceDirs = srcDirs } + +-- | Try to guess source directory. Could try harder; for the +-- moment just looks to see whether there is a directory called 'src'. +guessSourceDir :: InitFlags -> IO (Maybe String) +guessSourceDir flags = do + dir <- + maybe getCurrentDirectory return . flagToMaybe $ packageDir flags + srcIsDir <- doesDirectoryExist (dir "src") + return $ if srcIsDir + then Just "src" + else Nothing + +-- | Check whether a potential source file is located in one of the +-- source directories. +isSourceFile :: Maybe [FilePath] -> SourceFileEntry -> Bool +isSourceFile Nothing sf = isSourceFile (Just ["."]) sf +isSourceFile (Just srcDirs) sf = any (equalFilePath (relativeSourcePath sf)) srcDirs + +-- | Get the list of exposed modules and extra tools needed to build them. +getModulesBuildToolsAndDeps :: InstalledPackageIndex -> InitFlags -> IO InitFlags +getModulesBuildToolsAndDeps pkgIx flags = do + dir <- maybe getCurrentDirectory return . flagToMaybe $ packageDir flags + + sourceFiles0 <- scanForModules dir + + let sourceFiles = filter (isSourceFile (sourceDirs flags)) sourceFiles0 + + Just mods <- return (exposedModules flags) + ?>> (return . Just . map moduleName $ sourceFiles) + + tools <- return (buildTools flags) + ?>> (return . Just . neededBuildPrograms $ sourceFiles) + + deps <- return (dependencies flags) + ?>> Just <$> importsToDeps flags + (fromString "Prelude" : -- to ensure we get base as a dep + ( nub -- only need to consider each imported package once + . filter (`notElem` mods) -- don't consider modules from + -- this package itself + . concatMap imports + $ sourceFiles + ) + ) + pkgIx + + exts <- return (otherExts flags) + ?>> (return . Just . nub . concatMap extensions $ sourceFiles) + + return $ flags { exposedModules = Just mods + , buildTools = tools + , dependencies = deps + , otherExts = exts + } + +importsToDeps :: InitFlags -> [ModuleName] -> InstalledPackageIndex -> IO [P.Dependency] +importsToDeps flags mods pkgIx = do + + let modMap :: M.Map ModuleName [InstalledPackageInfo] + modMap = M.map (filter exposed) $ moduleNameIndex pkgIx + + modDeps :: [(ModuleName, Maybe [InstalledPackageInfo])] + modDeps = map (id &&& flip M.lookup modMap) mods + + message flags "\nGuessing dependencies..." + nub . catMaybes <$> mapM (chooseDep flags) modDeps + +-- Given a module and a list of installed packages providing it, +-- choose a dependency (i.e. package + version range) to use for that +-- module. +chooseDep :: InitFlags -> (ModuleName, Maybe [InstalledPackageInfo]) + -> IO (Maybe P.Dependency) + +chooseDep flags (m, Nothing) + = message flags ("\nWarning: no package found providing " ++ display m ++ ".") + >> return Nothing + +chooseDep flags (m, Just []) + = message flags ("\nWarning: no package found providing " ++ display m ++ ".") + >> return Nothing + + -- We found some packages: group them by name. +chooseDep flags (m, Just ps) + = case pkgGroups of + -- if there's only one group, i.e. multiple versions of a single package, + -- we make it into a dependency, choosing the latest-ish version (see toDep). + [grp] -> Just <$> toDep grp + -- otherwise, we refuse to choose between different packages and make the user + -- do it. + grps -> do message flags ("\nWarning: multiple packages found providing " + ++ display m + ++ ": " ++ intercalate ", " (map (display . P.pkgName . head) grps)) + message flags "You will need to pick one and manually add it to the Build-depends: field." + return Nothing + where + pkgGroups = groupBy ((==) `on` P.pkgName) (map P.packageId ps) + + desugar = maybe True (< mkVersion [2]) $ flagToMaybe (cabalVersion flags) + + -- Given a list of available versions of the same package, pick a dependency. + toDep :: [P.PackageIdentifier] -> IO P.Dependency + + -- If only one version, easy. We change e.g. 0.4.2 into 0.4.* + toDep [pid] = return $ P.Dependency (P.pkgName pid) (pvpize desugar . P.pkgVersion $ pid) + + -- Otherwise, choose the latest version and issue a warning. + toDep pids = do + message flags ("\nWarning: multiple versions of " ++ display (P.pkgName . head $ pids) ++ " provide " ++ display m ++ ", choosing the latest.") + return $ P.Dependency (P.pkgName . head $ pids) + (pvpize desugar . maximum . map P.pkgVersion $ pids) + +-- | Given a version, return an API-compatible (according to PVP) version range. +-- +-- If the boolean argument denotes whether to use a desugared +-- representation (if 'True') or the new-style @^>=@-form (if +-- 'False'). +-- +-- Example: @pvpize True (mkVersion [0,4,1])@ produces the version range @>= 0.4 && < 0.5@ (which is the +-- same as @0.4.*@). +pvpize :: Bool -> Version -> VersionRange +pvpize False v = majorBoundVersion v +pvpize True v = orLaterVersion v' + `intersectVersionRanges` + earlierVersion (incVersion 1 v') + where v' = alterVersion (take 2) v + +-- | Increment the nth version component (counting from 0). +incVersion :: Int -> Version -> Version +incVersion n = alterVersion (incVersion' n) + where + incVersion' 0 [] = [1] + incVersion' 0 (v:_) = [v+1] + incVersion' m [] = replicate m 0 ++ [1] + incVersion' m (v:vs) = v : incVersion' (m-1) vs + +--------------------------------------------------------------------------- +-- Prompting/user interaction ------------------------------------------- +--------------------------------------------------------------------------- + +-- | Run a prompt or not based on the nonInteractive flag of the +-- InitFlags structure. +maybePrompt :: InitFlags -> IO t -> IO (Maybe t) +maybePrompt flags p = + case nonInteractive flags of + Flag True -> return Nothing + _ -> Just `fmap` p + +-- | Create a prompt with optional default value that returns a +-- String. +promptStr :: String -> Maybe String -> IO String +promptStr = promptDefault' Just id + +-- | Create a yes/no prompt with optional default value. +-- +promptYesNo :: String -> Maybe Bool -> IO Bool +promptYesNo = + promptDefault' recogniseYesNo showYesNo + where + recogniseYesNo s | s == "y" || s == "Y" = Just True + | s == "n" || s == "N" = Just False + | otherwise = Nothing + showYesNo True = "y" + showYesNo False = "n" + +-- | Create a prompt with optional default value that returns a value +-- of some Text instance. +prompt :: Text t => String -> Maybe t -> IO t +prompt = promptDefault' + (either (const Nothing) Just . runReadE (readP_to_E id parse)) + display + +-- | Create a prompt with an optional default value. +promptDefault' :: (String -> Maybe t) -- ^ parser + -> (t -> String) -- ^ pretty-printer + -> String -- ^ prompt message + -> Maybe t -- ^ optional default value + -> IO t +promptDefault' parser pretty pr def = do + putStr $ mkDefPrompt pr (pretty `fmap` def) + inp <- getLine + case (inp, def) of + ("", Just d) -> return d + _ -> case parser inp of + Just t -> return t + Nothing -> do putStrLn $ "Couldn't parse " ++ inp ++ ", please try again!" + promptDefault' parser pretty pr def + +-- | Create a prompt from a prompt string and a String representation +-- of an optional default value. +mkDefPrompt :: String -> Maybe String -> String +mkDefPrompt pr def = pr ++ "?" ++ defStr def + where defStr Nothing = " " + defStr (Just s) = " [default: " ++ s ++ "] " + +promptListOptional :: (Text t, Eq t) + => String -- ^ prompt + -> [t] -- ^ choices + -> IO (Maybe (Either String t)) +promptListOptional pr choices = promptListOptional' pr choices display + +promptListOptional' :: Eq t + => String -- ^ prompt + -> [t] -- ^ choices + -> (t -> String) -- ^ show an item + -> IO (Maybe (Either String t)) +promptListOptional' pr choices displayItem = + fmap rearrange + $ promptList pr (Nothing : map Just choices) (Just Nothing) + (maybe "(none)" displayItem) True + where + rearrange = either (Just . Left) (fmap Right) + +-- | Create a prompt from a list of items. +promptList :: Eq t + => String -- ^ prompt + -> [t] -- ^ choices + -> Maybe t -- ^ optional default value + -> (t -> String) -- ^ show an item + -> Bool -- ^ whether to allow an 'other' option + -> IO (Either String t) +promptList pr choices def displayItem other = do + putStrLn $ pr ++ ":" + let options1 = map (\c -> (Just c == def, displayItem c)) choices + options2 = zip ([1..]::[Int]) + (options1 ++ [(False, "Other (specify)") | other]) + mapM_ (putStrLn . \(n,(i,s)) -> showOption n i ++ s) options2 + promptList' displayItem (length options2) choices def other + where showOption n i | n < 10 = " " ++ star i ++ " " ++ rest + | otherwise = " " ++ star i ++ rest + where rest = show n ++ ") " + star True = "*" + star False = " " + +promptList' :: (t -> String) -> Int -> [t] -> Maybe t -> Bool -> IO (Either String t) +promptList' displayItem numChoices choices def other = do + putStr $ mkDefPrompt "Your choice" (displayItem `fmap` def) + inp <- getLine + case (inp, def) of + ("", Just d) -> return $ Right d + _ -> case readMaybe inp of + Nothing -> invalidChoice inp + Just n -> getChoice n + where invalidChoice inp = do putStrLn $ inp ++ " is not a valid choice." + promptList' displayItem numChoices choices def other + getChoice n | n < 1 || n > numChoices = invalidChoice (show n) + | n < numChoices || + (n == numChoices && not other) + = return . Right $ choices !! (n-1) + | otherwise = Left `fmap` promptStr "Please specify" Nothing + +--------------------------------------------------------------------------- +-- File generation ------------------------------------------------------ +--------------------------------------------------------------------------- + +writeLicense :: InitFlags -> IO () +writeLicense flags = do + message flags "\nGenerating LICENSE..." + year <- show <$> getYear + let authors = fromMaybe "???" . flagToMaybe . author $ flags + let licenseFile = + case license flags of + Flag BSD2 + -> Just $ bsd2 authors year + + Flag BSD3 + -> Just $ bsd3 authors year + + Flag (GPL (Just v)) | v == mkVersion [2] + -> Just gplv2 + + Flag (GPL (Just v)) | v == mkVersion [3] + -> Just gplv3 + + Flag (LGPL (Just v)) | v == mkVersion [2,1] + -> Just lgpl21 + + Flag (LGPL (Just v)) | v == mkVersion [3] + -> Just lgpl3 + + Flag (AGPL (Just v)) | v == mkVersion [3] + -> Just agplv3 + + Flag (Apache (Just v)) | v == mkVersion [2,0] + -> Just apache20 + + Flag MIT + -> Just $ mit authors year + + Flag (MPL v) | v == mkVersion [2,0] + -> Just mpl20 + + Flag ISC + -> Just $ isc authors year + + _ -> Nothing + + case licenseFile of + Just licenseText -> writeFileSafe flags "LICENSE" licenseText + Nothing -> message flags "Warning: unknown license type, you must put a copy in LICENSE yourself." + +getYear :: IO Integer +getYear = do + u <- getCurrentTime + z <- getCurrentTimeZone + let l = utcToLocalTime z u + (y, _, _) = toGregorian $ localDay l + return y + +writeSetupFile :: InitFlags -> IO () +writeSetupFile flags = do + message flags "Generating Setup.hs..." + writeFileSafe flags "Setup.hs" setupFile + where + setupFile = unlines + [ "import Distribution.Simple" + , "main = defaultMain" + ] + +writeChangeLog :: InitFlags -> IO () +writeChangeLog flags = when ((defaultChangeLog `elem`) $ fromMaybe [] (extraSrc flags)) $ do + message flags ("Generating "++ defaultChangeLog ++"...") + writeFileSafe flags defaultChangeLog changeLog + where + changeLog = unlines + [ "# Revision history for " ++ pname + , "" + , "## " ++ pver ++ " -- YYYY-mm-dd" + , "" + , "* First version. Released on an unsuspecting world." + ] + pname = maybe "" display $ flagToMaybe $ packageName flags + pver = maybe "" display $ flagToMaybe $ version flags + + + +writeCabalFile :: InitFlags -> IO Bool +writeCabalFile flags@(InitFlags{packageName = NoFlag}) = do + message flags "Error: no package name provided." + return False +writeCabalFile flags@(InitFlags{packageName = Flag p}) = do + let cabalFileName = display p ++ ".cabal" + message flags $ "Generating " ++ cabalFileName ++ "..." + writeFileSafe flags cabalFileName (generateCabalFile cabalFileName flags) + return True + +-- | Write a file \"safely\", backing up any existing version (unless +-- the overwrite flag is set). +writeFileSafe :: InitFlags -> FilePath -> String -> IO () +writeFileSafe flags fileName content = do + moveExistingFile flags fileName + writeFile fileName content + +-- | Create source directories, if they were given. +createSourceDirectories :: InitFlags -> IO () +createSourceDirectories flags = case sourceDirs flags of + Just dirs -> forM_ dirs (createDirectoryIfMissing True) + Nothing -> return () + +-- | Create Main.hs, but only if we are init'ing an executable and +-- the mainIs flag has been provided. +createMainHs :: InitFlags -> IO () +createMainHs flags = + if hasMainHs flags then + case sourceDirs flags of + Just (srcPath:_) -> writeMainHs flags (srcPath mainFile) + _ -> writeMainHs flags mainFile + else return () + where + Flag mainFile = mainIs flags + +--- | Write a main file if it doesn't already exist. +writeMainHs :: InitFlags -> FilePath -> IO () +writeMainHs flags mainPath = do + dir <- maybe getCurrentDirectory return (flagToMaybe $ packageDir flags) + let mainFullPath = dir mainPath + exists <- doesFileExist mainFullPath + unless exists $ do + message flags $ "Generating " ++ mainPath ++ "..." + writeFileSafe flags mainFullPath mainHs + +-- | Check that a main file exists. +hasMainHs :: InitFlags -> Bool +hasMainHs flags = case mainIs flags of + Flag _ -> (packageType flags == Flag Executable + || packageType flags == Flag LibraryAndExecutable) + _ -> False + +-- | Default Main.hs file. Used when no Main.hs exists. +mainHs :: String +mainHs = unlines + [ "module Main where" + , "" + , "main :: IO ()" + , "main = putStrLn \"Hello, Haskell!\"" + ] + +-- | Move an existing file, if there is one, and the overwrite flag is +-- not set. +moveExistingFile :: InitFlags -> FilePath -> IO () +moveExistingFile flags fileName = + unless (overwrite flags == Flag True) $ do + e <- doesFileExist fileName + when e $ do + newName <- findNewName fileName + message flags $ "Warning: " ++ fileName ++ " already exists, backing up old version in " ++ newName + copyFile fileName newName + +findNewName :: FilePath -> IO FilePath +findNewName oldName = findNewName' 0 + where + findNewName' :: Integer -> IO FilePath + findNewName' n = do + let newName = oldName <.> ("save" ++ show n) + e <- doesFileExist newName + if e then findNewName' (n+1) else return newName + +-- | Generate a .cabal file from an InitFlags structure. NOTE: this +-- is rather ad-hoc! What we would REALLY like is to have a +-- standard low-level AST type representing .cabal files, which +-- preserves things like comments, and to write an *inverse* +-- parser/pretty-printer pair between .cabal files and this AST. +-- Then instead of this ad-hoc code we could just map an InitFlags +-- structure onto a low-level AST structure and use the existing +-- pretty-printing code to generate the file. +generateCabalFile :: String -> InitFlags -> String +generateCabalFile fileName c = trimTrailingWS $ + (++ "\n") . + renderStyle style { lineLength = 79, ribbonsPerLine = 1.1 } $ + -- Starting with 2.2 the `cabal-version` field needs to be the first line of the PD + (if specVer < mkVersion [1,12] + then field "cabal-version" (Flag $ orLaterVersion specVer) -- legacy + else field "cabal-version" (Flag $ specVer)) + Nothing -- NB: the first line must be the 'cabal-version' declaration + False + $$ + (if minimal c /= Flag True + then showComment (Just $ "Initial package description '" ++ fileName ++ "' generated " + ++ "by 'cabal init'. For further documentation, see " + ++ "http://haskell.org/cabal/users-guide/") + $$ text "" + else empty) + $$ + vcat [ field "name" (packageName c) + (Just "The name of the package.") + True + + , field "version" (version c) + (Just $ "The package version. See the Haskell package versioning policy (PVP) for standards guiding when and how versions should be incremented.\nhttps://pvp.haskell.org\n" + ++ "PVP summary: +-+------- breaking API changes\n" + ++ " | | +----- non-breaking API additions\n" + ++ " | | | +--- code changes with no API change") + True + + , fieldS "synopsis" (synopsis c) + (Just "A short (one-line) description of the package.") + True + + , fieldS "description" NoFlag + (Just "A longer description of the package.") + True + + , fieldS "homepage" (homepage c) + (Just "URL for the project homepage or repository.") + False + + , fieldS "bug-reports" NoFlag + (Just "A URL where users can report bugs.") + True + + , fieldS "license" licenseStr + (Just "The license under which the package is released.") + True + + , case (license c) of + Flag PublicDomain -> empty + _ -> fieldS "license-file" (Flag "LICENSE") + (Just "The file containing the license text.") + True + + , fieldS "author" (author c) + (Just "The package author(s).") + True + + , fieldS "maintainer" (email c) + (Just "An email address to which users can send suggestions, bug reports, and patches.") + True + + , case (license c) of + Flag PublicDomain -> empty + _ -> fieldS "copyright" NoFlag + (Just "A copyright notice.") + True + + , fieldS "category" (either id display `fmap` category c) + Nothing + True + + , fieldS "build-type" (if specVer >= mkVersion [2,2] then NoFlag else Flag "Simple") + Nothing + False + + , fieldS "extra-source-files" (listFieldS (extraSrc c)) + (Just "Extra files to be distributed with the package, such as examples or a README.") + True + + , case packageType c of + Flag Executable -> executableStanza + Flag Library -> libraryStanza + Flag LibraryAndExecutable -> libraryStanza $+$ executableStanza + _ -> empty + ] + where + specVer = fromMaybe defaultCabalVersion $ flagToMaybe (cabalVersion c) + + licenseStr | specVer < mkVersion [2,2] = prettyShow `fmap` license c + | otherwise = go `fmap` license c + where + go (UnknownLicense s) = s + go l = prettyShow (licenseToSPDX l) + + generateBuildInfo :: BuildType -> InitFlags -> Doc + generateBuildInfo buildType c' = vcat + [ fieldS "other-modules" (listField (otherModules c')) + (Just $ case buildType of + LibBuild -> "Modules included in this library but not exported." + ExecBuild -> "Modules included in this executable, other than Main.") + True + + , fieldS "other-extensions" (listField (otherExts c')) + (Just "LANGUAGE extensions used by modules in this package.") + True + + , fieldS "build-depends" (listField (dependencies c')) + (Just "Other library packages from which modules are imported.") + True + + , fieldS "hs-source-dirs" (listFieldS (sourceDirs c')) + (Just "Directories containing source files.") + True + + , fieldS "build-tools" (listFieldS (buildTools c')) + (Just "Extra tools (e.g. alex, hsc2hs, ...) needed to build the source.") + False + + , field "default-language" (language c') + (Just "Base language which the package is written in.") + True + ] + + listField :: Text s => Maybe [s] -> Flag String + listField = listFieldS . fmap (map display) + + listFieldS :: Maybe [String] -> Flag String + listFieldS = Flag . maybe "" (intercalate ", ") + + field :: Text t => String -> Flag t -> Maybe String -> Bool -> Doc + field s f = fieldS s (fmap display f) + + fieldS :: String -- ^ Name of the field + -> Flag String -- ^ Field contents + -> Maybe String -- ^ Comment to explain the field + -> Bool -- ^ Should the field be included (commented out) even if blank? + -> Doc + fieldS _ NoFlag _ inc | not inc || (minimal c == Flag True) = empty + fieldS _ (Flag "") _ inc | not inc || (minimal c == Flag True) = empty + fieldS s f com _ = case (isJust com, noComments c, minimal c) of + (_, _, Flag True) -> id + (_, Flag True, _) -> id + (True, _, _) -> (showComment com $$) . ($$ text "") + (False, _, _) -> ($$ text "") + $ + comment f <<>> text s <<>> colon + <<>> text (replicate (20 - length s) ' ') + <<>> text (fromMaybe "" . flagToMaybe $ f) + comment NoFlag = text "-- " + comment (Flag "") = text "-- " + comment _ = text "" + + showComment :: Maybe String -> Doc + showComment (Just t) = vcat + . map (text . ("-- "++)) . lines + . renderStyle style { + lineLength = 76, + ribbonsPerLine = 1.05 + } + . vcat + . map (fcat . map text . breakLine) + . lines + $ t + showComment Nothing = text "" + + breakLine [] = [] + breakLine cs = case break (==' ') cs of (w,cs') -> w : breakLine' cs' + breakLine' [] = [] + breakLine' cs = case span (==' ') cs of (w,cs') -> w : breakLine cs' + + trimTrailingWS :: String -> String + trimTrailingWS = unlines . map (dropWhileEndLE isSpace) . lines + + executableStanza :: Doc + executableStanza = text "\nexecutable" <+> + text (maybe "" display . flagToMaybe $ packageName c) $$ + nest 2 (vcat + [ fieldS "main-is" (mainIs c) (Just ".hs or .lhs file containing the Main module.") True + + , generateBuildInfo ExecBuild c + ]) + + libraryStanza :: Doc + libraryStanza = text "\nlibrary" $$ nest 2 (vcat + [ fieldS "exposed-modules" (listField (exposedModules c)) + (Just "Modules exported by the library.") + True + + , generateBuildInfo LibBuild c + ]) + + +-- | Generate warnings for missing fields etc. +generateWarnings :: InitFlags -> IO () +generateWarnings flags = do + message flags "" + when (synopsis flags `elem` [NoFlag, Flag ""]) + (message flags "Warning: no synopsis given. You should edit the .cabal file and add one.") + + message flags "You may want to edit the .cabal file and add a Description field." + +-- | Possibly generate a message to stdout, taking into account the +-- --quiet flag. +message :: InitFlags -> String -> IO () +message (InitFlags{quiet = Flag True}) _ = return () +message _ s = putStrLn s diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Install.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Install.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Install.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Install.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,1622 @@ +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Install +-- Copyright : (c) 2005 David Himmelstrup +-- 2007 Bjorn Bringert +-- 2007-2010 Duncan Coutts +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- High level interface to package installation. +----------------------------------------------------------------------------- +module Distribution.Client.Install ( + -- * High-level interface + install, + + -- * Lower-level interface that allows to manipulate the install plan + makeInstallContext, + makeInstallPlan, + processInstallPlan, + InstallArgs, + InstallContext, + + -- * Prune certain packages from the install plan + pruneInstallPlan + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import qualified Data.Map as Map +import qualified Data.Set as S +import Control.Exception as Exception + ( Exception(toException), bracket, catches + , Handler(Handler), handleJust, IOException, SomeException ) +#ifndef mingw32_HOST_OS +import Control.Exception as Exception + ( Exception(fromException) ) +#endif +import System.Exit + ( ExitCode(..) ) +import Distribution.Compat.Exception + ( catchIO, catchExit ) +import Control.Monad + ( forM_, mapM ) +import System.Directory + ( getTemporaryDirectory, doesDirectoryExist, doesFileExist, + createDirectoryIfMissing, removeFile, renameDirectory, + getDirectoryContents ) +import System.FilePath + ( (), (<.>), equalFilePath, takeDirectory ) +import System.IO + ( openFile, IOMode(AppendMode), hClose ) +import System.IO.Error + ( isDoesNotExistError, ioeGetFileName ) + +import Distribution.Client.Targets +import Distribution.Client.Configure + ( chooseCabalVersion, configureSetupScript, checkConfigExFlags ) +import Distribution.Client.Dependency +import Distribution.Client.Dependency.Types + ( Solver(..) ) +import Distribution.Client.FetchUtils +import Distribution.Client.HttpUtils + ( HttpTransport (..) ) +import Distribution.Solver.Types.PackageFixedDeps +import qualified Distribution.Client.Haddock as Haddock (regenerateHaddockIndex) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackagesAtIndexState, getInstalledPackages ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan +import Distribution.Client.InstallPlan (InstallPlan) +import Distribution.Client.SolverInstallPlan (SolverInstallPlan) +import Distribution.Client.Setup + ( GlobalFlags(..), RepoContext(..) + , ConfigFlags(..), configureCommand, filterConfigureFlags + , ConfigExFlags(..), InstallFlags(..) ) +import Distribution.Client.Config + ( getCabalDir, defaultUserInstall ) +import Distribution.Client.Sandbox.Timestamp + ( withUpdateTimestamps ) +import Distribution.Client.Sandbox.Types + ( SandboxPackageInfo(..), UseSandbox(..), isUseSandbox + , whenUsingSandbox ) +import Distribution.Client.Tar (extractTarGzFile) +import Distribution.Client.Types as Source +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) +import Distribution.Client.SetupWrapper + ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) +import qualified Distribution.Client.BuildReports.Anonymous as BuildReports +import qualified Distribution.Client.BuildReports.Storage as BuildReports + ( storeAnonymous, storeLocal, fromInstallPlan, fromPlanningFailure ) +import qualified Distribution.Client.InstallSymlink as InstallSymlink + ( OverwritePolicy(..), symlinkBinaries ) +import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade +import qualified Distribution.Client.World as World +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Client.JobControl + +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import qualified Distribution.Solver.Types.PackageIndex as SourcePackageIndex +import Distribution.Solver.Types.PkgConfigDb + ( PkgConfigDb, readPkgConfigDb ) +import Distribution.Solver.Types.SourcePackage as SourcePackage + +import Distribution.Utils.NubList +import Distribution.Simple.Compiler + ( CompilerId(..), Compiler(compilerId), compilerFlavor + , CompilerInfo(..), compilerInfo, PackageDB(..), PackageDBStack ) +import Distribution.Simple.Program (ProgramDb) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.Simple.Setup + ( haddockCommand, HaddockFlags(..) + , buildCommand, BuildFlags(..), emptyBuildFlags + , toFlag, fromFlag, fromFlagOrDefault, flagToMaybe, defaultDistPref ) +import qualified Distribution.Simple.Setup as Cabal + ( Flag(..) + , copyCommand, CopyFlags(..), emptyCopyFlags + , registerCommand, RegisterFlags(..), emptyRegisterFlags + , testCommand, TestFlags(..), emptyTestFlags ) +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose, comparing + , writeFileAtomic, withUTF8FileContents ) +import Distribution.Simple.InstallDirs as InstallDirs + ( PathTemplate, fromPathTemplate, toPathTemplate, substPathTemplate + , initialPathTemplateEnv, installDirsTemplateEnv ) +import Distribution.Simple.Configure (interpretPackageDbFlags) +import Distribution.Simple.Register (registerPackage, defaultRegisterOptions) +import Distribution.Package + ( PackageIdentifier(..), PackageId, packageName, packageVersion + , Package(..), HasMungedPackageId(..), HasUnitId(..) + , UnitId ) +import Distribution.Types.Dependency + ( Dependency(..), thisPackageVersion ) +import Distribution.Types.MungedPackageId +import qualified Distribution.PackageDescription as PackageDescription +import Distribution.PackageDescription + ( PackageDescription, GenericPackageDescription(..), Flag(..) + , FlagAssignment, mkFlagAssignment, unFlagAssignment + , showFlagValue, diffFlagAssignment, nullFlagAssignment ) +import Distribution.PackageDescription.Configuration + ( finalizePD ) +import Distribution.ParseUtils + ( showPWarning ) +import Distribution.Version + ( Version, VersionRange, foldVersionRange ) +import Distribution.Simple.Utils as Utils + ( notice, info, warn, debug, debugNoWrap, die' + , withTempDirectory ) +import Distribution.Client.Utils + ( determineNumJobs, logDirChange, mergeBy, MergeResult(..) + , tryCanonicalizePath, ProgressPhase(..), progressMessage ) +import Distribution.System + ( Platform, OS(Windows), buildOS, buildPlatform ) +import Distribution.Text + ( display ) +import Distribution.Verbosity as Verbosity + ( Verbosity, modifyVerbosity, normal, verbose ) +import Distribution.Simple.BuildPaths ( exeExtension ) + +--TODO: +-- * assign flags to packages individually +-- * complain about flags that do not apply to any package given as target +-- so flags do not apply to dependencies, only listed, can use flag +-- constraints for dependencies +-- * only record applicable flags in world file +-- * allow flag constraints +-- * allow installed constraints +-- * allow flag and installed preferences +-- * change world file to use cabal section syntax +-- * allow persistent configure flags for each package individually + +-- ------------------------------------------------------------ +-- * Top level user actions +-- ------------------------------------------------------------ + +-- | Installs the packages needed to satisfy a list of dependencies. +-- +install + :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> Platform + -> ProgramDb + -> UseSandbox + -> Maybe SandboxPackageInfo + -> GlobalFlags + -> ConfigFlags + -> ConfigExFlags + -> InstallFlags + -> HaddockFlags + -> [UserTarget] + -> IO () +install verbosity packageDBs repos comp platform progdb useSandbox mSandboxPkgInfo + globalFlags configFlags configExFlags installFlags haddockFlags + userTargets0 = do + + unless (installRootCmd installFlags == Cabal.NoFlag) $ + warn verbosity $ "--root-cmd is no longer supported, " + ++ "see https://github.com/haskell/cabal/issues/3353" + ++ " (if you didn't type --root-cmd, comment out root-cmd" + ++ " in your ~/.cabal/config file)" + let userOrSandbox = fromFlag (configUserInstall configFlags) + || isUseSandbox useSandbox + unless userOrSandbox $ + warn verbosity $ "the --global flag is deprecated -- " + ++ "it is generally considered a bad idea to install packages " + ++ "into the global store" + + installContext <- makeInstallContext verbosity args (Just userTargets0) + planResult <- foldProgress logMsg (return . Left) (return . Right) =<< + makeInstallPlan verbosity args installContext + + case planResult of + Left message -> do + reportPlanningFailure verbosity args installContext message + die'' message + Right installPlan -> + processInstallPlan verbosity args installContext installPlan + where + args :: InstallArgs + args = (packageDBs, repos, comp, platform, progdb, useSandbox, + mSandboxPkgInfo, globalFlags, configFlags, configExFlags, + installFlags, haddockFlags) + + die'' message = die' verbosity (message ++ if isUseSandbox useSandbox + then installFailedInSandbox else []) + -- TODO: use a better error message, remove duplication. + installFailedInSandbox = + "\nNote: when using a sandbox, all packages are required to have " + ++ "consistent dependencies. " + ++ "Try reinstalling/unregistering the offending packages or " + ++ "recreating the sandbox." + logMsg message rest = debugNoWrap verbosity message >> rest + +-- TODO: Make InstallContext a proper data type with documented fields. +-- | Common context for makeInstallPlan and processInstallPlan. +type InstallContext = ( InstalledPackageIndex, SourcePackageDb + , PkgConfigDb + , [UserTarget], [PackageSpecifier UnresolvedSourcePackage] + , HttpTransport ) + +-- TODO: Make InstallArgs a proper data type with documented fields or just get +-- rid of it completely. +-- | Initial arguments given to 'install' or 'makeInstallContext'. +type InstallArgs = ( PackageDBStack + , RepoContext + , Compiler + , Platform + , ProgramDb + , UseSandbox + , Maybe SandboxPackageInfo + , GlobalFlags + , ConfigFlags + , ConfigExFlags + , InstallFlags + , HaddockFlags ) + +-- | Make an install context given install arguments. +makeInstallContext :: Verbosity -> InstallArgs -> Maybe [UserTarget] + -> IO InstallContext +makeInstallContext verbosity + (packageDBs, repoCtxt, comp, _, progdb,_,_, + globalFlags, _, configExFlags, installFlags, _) mUserTargets = do + + let idxState = flagToMaybe (installIndexState installFlags) + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb + sourcePkgDb <- getSourcePackagesAtIndexState verbosity repoCtxt idxState + pkgConfigDb <- readPkgConfigDb verbosity progdb + + checkConfigExFlags verbosity installedPkgIndex + (packageIndex sourcePkgDb) configExFlags + transport <- repoContextGetTransport repoCtxt + + (userTargets, pkgSpecifiers) <- case mUserTargets of + Nothing -> + -- We want to distinguish between the case where the user has given an + -- empty list of targets on the command-line and the case where we + -- specifically want to have an empty list of targets. + return ([], []) + Just userTargets0 -> do + -- For install, if no target is given it means we use the current + -- directory as the single target. + let userTargets | null userTargets0 = [UserTargetLocalDir "."] + | otherwise = userTargets0 + + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + (packageIndex sourcePkgDb) + userTargets + return (userTargets, pkgSpecifiers) + + return (installedPkgIndex, sourcePkgDb, pkgConfigDb, userTargets + ,pkgSpecifiers, transport) + +-- | Make an install plan given install context and install arguments. +makeInstallPlan :: Verbosity -> InstallArgs -> InstallContext + -> IO (Progress String String SolverInstallPlan) +makeInstallPlan verbosity + (_, _, comp, platform, _, _, mSandboxPkgInfo, + _, configFlags, configExFlags, installFlags, + _) + (installedPkgIndex, sourcePkgDb, pkgConfigDb, + _, pkgSpecifiers, _) = do + + solver <- chooseSolver verbosity (fromFlag (configSolver configExFlags)) + (compilerInfo comp) + notice verbosity "Resolving dependencies..." + return $ planPackages verbosity comp platform mSandboxPkgInfo solver + configFlags configExFlags installFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers + +-- | Given an install plan, perform the actual installations. +processInstallPlan :: Verbosity -> InstallArgs -> InstallContext + -> SolverInstallPlan + -> IO () +processInstallPlan verbosity + args@(_,_, _, _, _, _, _, _, configFlags, _, installFlags, _) + (installedPkgIndex, sourcePkgDb, _, + userTargets, pkgSpecifiers, _) installPlan0 = do + + checkPrintPlan verbosity installedPkgIndex installPlan sourcePkgDb + installFlags pkgSpecifiers + + unless (dryRun || nothingToInstall) $ do + buildOutcomes <- performInstallations verbosity + args installedPkgIndex installPlan + postInstallActions verbosity args userTargets installPlan buildOutcomes + where + installPlan = InstallPlan.configureInstallPlan configFlags installPlan0 + dryRun = fromFlag (installDryRun installFlags) + nothingToInstall = null (fst (InstallPlan.ready installPlan)) + +-- ------------------------------------------------------------ +-- * Installation planning +-- ------------------------------------------------------------ + +planPackages :: Verbosity + -> Compiler + -> Platform + -> Maybe SandboxPackageInfo + -> Solver + -> ConfigFlags + -> ConfigExFlags + -> InstallFlags + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> [PackageSpecifier UnresolvedSourcePackage] + -> Progress String String SolverInstallPlan +planPackages verbosity comp platform mSandboxPkgInfo solver + configFlags configExFlags installFlags + installedPkgIndex sourcePkgDb pkgConfigDb pkgSpecifiers = + + resolveDependencies + platform (compilerInfo comp) pkgConfigDb + solver + resolverParams + + >>= if onlyDeps then pruneInstallPlan pkgSpecifiers else return + + where + resolverParams = + + setMaxBackjumps (if maxBackjumps < 0 then Nothing + else Just maxBackjumps) + + . setIndependentGoals independentGoals + + . setReorderGoals reorderGoals + + . setCountConflicts countConflicts + + . setAvoidReinstalls avoidReinstalls + + . setShadowPkgs shadowPkgs + + . setStrongFlags strongFlags + + . setAllowBootLibInstalls allowBootLibInstalls + + . setSolverVerbosity verbosity + + . setPreferenceDefault (if upgradeDeps then PreferAllLatest + else PreferLatestForSelected) + + . removeLowerBounds allowOlder + . removeUpperBounds allowNewer + + . addPreferences + -- preferences from the config file or command line + [ PackageVersionPreference name ver + | Dependency name ver <- configPreferences configExFlags ] + + . addConstraints + -- version constraints from the config file or command line + [ LabeledPackageConstraint (userToPackageConstraint pc) src + | (pc, src) <- configExConstraints configExFlags ] + + . addConstraints + --FIXME: this just applies all flags to all targets which + -- is silly. We should check if the flags are appropriate + [ let pc = PackageConstraint + (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) + (PackagePropertyFlags flags) + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + | let flags = configConfigurationsFlags configFlags + , not (nullFlagAssignment flags) + , pkgSpecifier <- pkgSpecifiers ] + + . addConstraints + [ let pc = PackageConstraint + (scopeToplevel $ pkgSpecifierTarget pkgSpecifier) + (PackagePropertyStanzas stanzas) + in LabeledPackageConstraint pc ConstraintSourceConfigFlagOrTarget + | pkgSpecifier <- pkgSpecifiers ] + + . maybe id applySandboxInstallPolicy mSandboxPkgInfo + + . (if reinstall then reinstallTargets else id) + + -- Don't solve for executables, the legacy install codepath + -- doesn't understand how to install them + . setSolveExecutables (SolveExecutables False) + + $ standardInstallPolicy + installedPkgIndex sourcePkgDb pkgSpecifiers + + stanzas = [ TestStanzas | testsEnabled ] + ++ [ BenchStanzas | benchmarksEnabled ] + testsEnabled = fromFlagOrDefault False $ configTests configFlags + benchmarksEnabled = fromFlagOrDefault False $ configBenchmarks configFlags + + reinstall = fromFlag (installOverrideReinstall installFlags) || + fromFlag (installReinstall installFlags) + reorderGoals = fromFlag (installReorderGoals installFlags) + countConflicts = fromFlag (installCountConflicts installFlags) + independentGoals = fromFlag (installIndependentGoals installFlags) + avoidReinstalls = fromFlag (installAvoidReinstalls installFlags) + shadowPkgs = fromFlag (installShadowPkgs installFlags) + strongFlags = fromFlag (installStrongFlags installFlags) + maxBackjumps = fromFlag (installMaxBackjumps installFlags) + allowBootLibInstalls = fromFlag (installAllowBootLibInstalls installFlags) + upgradeDeps = fromFlag (installUpgradeDeps installFlags) + onlyDeps = fromFlag (installOnlyDeps installFlags) + + allowOlder = fromMaybe (AllowOlder mempty) + (configAllowOlder configExFlags) + allowNewer = fromMaybe (AllowNewer mempty) + (configAllowNewer configExFlags) + +-- | Remove the provided targets from the install plan. +pruneInstallPlan :: Package targetpkg + => [PackageSpecifier targetpkg] + -> SolverInstallPlan + -> Progress String String SolverInstallPlan +pruneInstallPlan pkgSpecifiers = + -- TODO: this is a general feature and should be moved to D.C.Dependency + -- Also, the InstallPlan.remove should return info more precise to the + -- problem, rather than the very general PlanProblem type. + either (Fail . explain) Done + . SolverInstallPlan.remove (\pkg -> packageName pkg `elem` targetnames) + where + explain :: [SolverInstallPlan.SolverPlanProblem] -> String + explain problems = + "Cannot select only the dependencies (as requested by the " + ++ "'--only-dependencies' flag), " + ++ (case pkgids of + [pkgid] -> "the package " ++ display pkgid ++ " is " + _ -> "the packages " + ++ intercalate ", " (map display pkgids) ++ " are ") + ++ "required by a dependency of one of the other targets." + where + pkgids = + nub [ depid + | SolverInstallPlan.PackageMissingDeps _ depids <- problems + , depid <- depids + , packageName depid `elem` targetnames ] + + targetnames = map pkgSpecifierTarget pkgSpecifiers + +-- ------------------------------------------------------------ +-- * Informational messages +-- ------------------------------------------------------------ + +-- | Perform post-solver checks of the install plan and print it if +-- either requested or needed. +checkPrintPlan :: Verbosity + -> InstalledPackageIndex + -> InstallPlan + -> SourcePackageDb + -> InstallFlags + -> [PackageSpecifier UnresolvedSourcePackage] + -> IO () +checkPrintPlan verbosity installed installPlan sourcePkgDb + installFlags pkgSpecifiers = do + + -- User targets that are already installed. + let preExistingTargets = + [ p | let tgts = map pkgSpecifierTarget pkgSpecifiers, + InstallPlan.PreExisting p <- InstallPlan.toList installPlan, + packageName p `elem` tgts ] + + -- If there's nothing to install, we print the already existing + -- target packages as an explanation. + when nothingToInstall $ + notice verbosity $ unlines $ + "All the requested packages are already installed:" + : map (display . packageId) preExistingTargets + ++ ["Use --reinstall if you want to reinstall anyway."] + + let lPlan = + [ (pkg, status) + | pkg <- InstallPlan.executionOrder installPlan + , let status = packageStatus installed pkg ] + -- Are any packages classified as reinstalls? + let reinstalledPkgs = + [ ipkg + | (_pkg, status) <- lPlan + , ipkg <- extractReinstalls status ] + -- Packages that are already broken. + let oldBrokenPkgs = + map Installed.installedUnitId + . PackageIndex.reverseDependencyClosure installed + . map (Installed.installedUnitId . fst) + . PackageIndex.brokenPackages + $ installed + let excluded = reinstalledPkgs ++ oldBrokenPkgs + -- Packages that are reverse dependencies of replaced packages are very + -- likely to be broken. We exclude packages that are already broken. + let newBrokenPkgs = + filter (\ p -> not (Installed.installedUnitId p `elem` excluded)) + (PackageIndex.reverseDependencyClosure installed reinstalledPkgs) + let containsReinstalls = not (null reinstalledPkgs) + let breaksPkgs = not (null newBrokenPkgs) + + let adaptedVerbosity + | containsReinstalls + , not overrideReinstall = modifyVerbosity (max verbose) verbosity + | otherwise = verbosity + + -- We print the install plan if we are in a dry-run or if we are confronted + -- with a dangerous install plan. + when (dryRun || containsReinstalls && not overrideReinstall) $ + printPlan (dryRun || breaksPkgs && not overrideReinstall) + adaptedVerbosity lPlan sourcePkgDb + + -- If the install plan is dangerous, we print various warning messages. In + -- particular, if we can see that packages are likely to be broken, we even + -- bail out (unless installation has been forced with --force-reinstalls). + when containsReinstalls $ do + if breaksPkgs + then do + (if dryRun || overrideReinstall then warn else die') verbosity $ unlines $ + "The following packages are likely to be broken by the reinstalls:" + : map (display . mungedId) newBrokenPkgs + ++ if overrideReinstall + then if dryRun then [] else + ["Continuing even though " ++ + "the plan contains dangerous reinstalls."] + else + ["Use --force-reinstalls if you want to install anyway."] + else unless dryRun $ warn verbosity + "Note that reinstalls are always dangerous. Continuing anyway..." + + -- If we are explicitly told to not download anything, check that all packages + -- are already fetched. + let offline = fromFlagOrDefault False (installOfflineMode installFlags) + when offline $ do + let pkgs = [ confPkgSource cpkg + | InstallPlan.Configured cpkg <- InstallPlan.toList installPlan ] + notFetched <- fmap (map packageInfoId) + . filterM (fmap isNothing . checkFetched . packageSource) + $ pkgs + unless (null notFetched) $ + die' verbosity $ "Can't download packages in offline mode. " + ++ "Must download the following packages to proceed:\n" + ++ intercalate ", " (map display notFetched) + ++ "\nTry using 'cabal fetch'." + + where + nothingToInstall = null (fst (InstallPlan.ready installPlan)) + + dryRun = fromFlag (installDryRun installFlags) + overrideReinstall = fromFlag (installOverrideReinstall installFlags) + +data PackageStatus = NewPackage + | NewVersion [Version] + | Reinstall [UnitId] [PackageChange] + +type PackageChange = MergeResult MungedPackageId MungedPackageId + +extractReinstalls :: PackageStatus -> [UnitId] +extractReinstalls (Reinstall ipids _) = ipids +extractReinstalls _ = [] + +packageStatus :: InstalledPackageIndex + -> ReadyPackage + -> PackageStatus +packageStatus installedPkgIndex cpkg = + case PackageIndex.lookupPackageName installedPkgIndex + (packageName cpkg) of + [] -> NewPackage + ps -> case filter ((== mungedId cpkg) + . mungedId) (concatMap snd ps) of + [] -> NewVersion (map fst ps) + pkgs@(pkg:_) -> Reinstall (map Installed.installedUnitId pkgs) + (changes pkg cpkg) + + where + + changes :: Installed.InstalledPackageInfo + -> ReadyPackage + -> [PackageChange] + changes pkg (ReadyPackage pkg') = filter changed $ + mergeBy (comparing mungedName) + -- deps of installed pkg + (resolveInstalledIds $ Installed.depends pkg) + -- deps of configured pkg + (resolveInstalledIds $ CD.nonSetupDeps (depends pkg')) + + -- convert to source pkg ids via index + resolveInstalledIds :: [UnitId] -> [MungedPackageId] + resolveInstalledIds = + nub + . sort + . map mungedId + . mapMaybe (PackageIndex.lookupUnitId installedPkgIndex) + + changed (InBoth pkgid pkgid') = pkgid /= pkgid' + changed _ = True + +printPlan :: Bool -- is dry run + -> Verbosity + -> [(ReadyPackage, PackageStatus)] + -> SourcePackageDb + -> IO () +printPlan dryRun verbosity plan sourcePkgDb = case plan of + [] -> return () + pkgs + | verbosity >= Verbosity.verbose -> notice verbosity $ unlines $ + ("In order, the following " ++ wouldWill ++ " be installed:") + : map showPkgAndReason pkgs + | otherwise -> notice verbosity $ unlines $ + ("In order, the following " ++ wouldWill + ++ " be installed (use -v for more details):") + : map showPkg pkgs + where + wouldWill | dryRun = "would" + | otherwise = "will" + + showPkg (pkg, _) = display (packageId pkg) ++ + showLatest (pkg) + + showPkgAndReason (ReadyPackage pkg', pr) = display (packageId pkg') ++ + showLatest pkg' ++ + showFlagAssignment (nonDefaultFlags pkg') ++ + showStanzas (confPkgStanzas pkg') ++ + showDep pkg' ++ + case pr of + NewPackage -> " (new package)" + NewVersion _ -> " (new version)" + Reinstall _ cs -> " (reinstall)" ++ case cs of + [] -> "" + diff -> " (changes: " ++ intercalate ", " (map change diff) + ++ ")" + + showLatest :: Package srcpkg => srcpkg -> String + showLatest pkg = case mLatestVersion of + Just latestVersion -> + if packageVersion pkg < latestVersion + then (" (latest: " ++ display latestVersion ++ ")") + else "" + Nothing -> "" + where + mLatestVersion :: Maybe Version + mLatestVersion = case SourcePackageIndex.lookupPackageName + (packageIndex sourcePkgDb) + (packageName pkg) of + [] -> Nothing + x -> Just $ packageVersion $ last x + + toFlagAssignment :: [Flag] -> FlagAssignment + toFlagAssignment = mkFlagAssignment . map (\ f -> (flagName f, flagDefault f)) + + nonDefaultFlags :: ConfiguredPackage loc -> FlagAssignment + nonDefaultFlags cpkg = + let defaultAssignment = + toFlagAssignment + (genPackageFlags (SourcePackage.packageDescription $ + confPkgSource cpkg)) + in confPkgFlags cpkg `diffFlagAssignment` defaultAssignment + + showStanzas :: [OptionalStanza] -> String + showStanzas = concatMap ((" *" ++) . showStanza) + + showFlagAssignment :: FlagAssignment -> String + showFlagAssignment = concatMap ((' ' :) . showFlagValue) . unFlagAssignment + + change (OnlyInLeft pkgid) = display pkgid ++ " removed" + change (InBoth pkgid pkgid') = display pkgid ++ " -> " + ++ display (mungedVersion pkgid') + change (OnlyInRight pkgid') = display pkgid' ++ " added" + + showDep pkg | Just rdeps <- Map.lookup (packageId pkg) revDeps + = " (via: " ++ unwords (map display rdeps) ++ ")" + | otherwise = "" + + revDepGraphEdges :: [(PackageId, PackageId)] + revDepGraphEdges = [ (rpid, packageId cpkg) + | (ReadyPackage cpkg, _) <- plan + , ConfiguredId rpid (Just PackageDescription.CLibName) _ + <- CD.flatDeps (confPkgDeps cpkg) ] + + revDeps :: Map.Map PackageId [PackageId] + revDeps = Map.fromListWith (++) (map (fmap (:[])) revDepGraphEdges) + +-- ------------------------------------------------------------ +-- * Post installation stuff +-- ------------------------------------------------------------ + +-- | Report a solver failure. This works slightly differently to +-- 'postInstallActions', as (by definition) we don't have an install plan. +reportPlanningFailure :: Verbosity -> InstallArgs -> InstallContext -> String + -> IO () +reportPlanningFailure verbosity + (_, _, comp, platform, _, _, _ + ,_, configFlags, _, installFlags, _) + (_, sourcePkgDb, _, _, pkgSpecifiers, _) + message = do + + when reportFailure $ do + + -- Only create reports for explicitly named packages + let pkgids = filter + (SourcePackageIndex.elemByPackageId (packageIndex sourcePkgDb)) $ + mapMaybe theSpecifiedPackage pkgSpecifiers + + buildReports = BuildReports.fromPlanningFailure platform + (compilerId comp) pkgids + (configConfigurationsFlags configFlags) + + unless (null buildReports) $ + info verbosity $ + "Solver failure will be reported for " + ++ intercalate "," (map display pkgids) + + -- Save reports + BuildReports.storeLocal (compilerInfo comp) + (fromNubList $ installSummaryFile installFlags) + buildReports platform + + -- Save solver log + case logFile of + Nothing -> return () + Just template -> forM_ pkgids $ \pkgid -> + let env = initialPathTemplateEnv pkgid dummyIpid + (compilerInfo comp) platform + path = fromPathTemplate $ substPathTemplate env template + in writeFile path message + + where + reportFailure = fromFlag (installReportPlanningFailure installFlags) + logFile = flagToMaybe (installLogFile installFlags) + + -- A IPID is calculated from the transitive closure of + -- dependencies, but when the solver fails we don't have that. + -- So we fail. + dummyIpid = error "reportPlanningFailure: installed package ID not available" + +-- | If a 'PackageSpecifier' refers to a single package, return Just that +-- package. +theSpecifiedPackage :: Package pkg => PackageSpecifier pkg -> Maybe PackageId +theSpecifiedPackage pkgSpec = + case pkgSpec of + NamedPackage name [PackagePropertyVersion version] + -> PackageIdentifier name <$> trivialRange version + NamedPackage _ _ -> Nothing + SpecificSourcePackage pkg -> Just $ packageId pkg + where + -- | If a range includes only a single version, return Just that version. + trivialRange :: VersionRange -> Maybe Version + trivialRange = foldVersionRange + Nothing + Just -- "== v" + (\_ -> Nothing) + (\_ -> Nothing) + (\_ _ -> Nothing) + (\_ _ -> Nothing) + +-- | Various stuff we do after successful or unsuccessfully installing a bunch +-- of packages. This includes: +-- +-- * build reporting, local and remote +-- * symlinking binaries +-- * updating indexes +-- * updating world file +-- * error reporting +-- +postInstallActions :: Verbosity + -> InstallArgs + -> [UserTarget] + -> InstallPlan + -> BuildOutcomes + -> IO () +postInstallActions verbosity + (packageDBs, _, comp, platform, progdb, useSandbox, mSandboxPkgInfo + ,globalFlags, configFlags, _, installFlags, _) + targets installPlan buildOutcomes = do + + updateSandboxTimestampsFile verbosity useSandbox mSandboxPkgInfo + comp platform installPlan buildOutcomes + + unless oneShot $ + World.insert verbosity worldFile + --FIXME: does not handle flags + [ World.WorldPkgInfo dep mempty + | UserTargetNamed dep <- targets ] + + let buildReports = BuildReports.fromInstallPlan platform (compilerId comp) + installPlan buildOutcomes + BuildReports.storeLocal (compilerInfo comp) + (fromNubList $ installSummaryFile installFlags) + buildReports + platform + when (reportingLevel >= AnonymousReports) $ + BuildReports.storeAnonymous buildReports + when (reportingLevel == DetailedReports) $ + storeDetailedBuildReports verbosity logsDir buildReports + + regenerateHaddockIndex verbosity packageDBs comp platform progdb useSandbox + configFlags installFlags buildOutcomes + + symlinkBinaries verbosity platform comp configFlags installFlags + installPlan buildOutcomes + + printBuildFailures verbosity buildOutcomes + + where + reportingLevel = fromFlag (installBuildReports installFlags) + logsDir = fromFlag (globalLogsDir globalFlags) + oneShot = fromFlag (installOneShot installFlags) + worldFile = fromFlag $ globalWorldFile globalFlags + +storeDetailedBuildReports :: Verbosity -> FilePath + -> [(BuildReports.BuildReport, Maybe Repo)] -> IO () +storeDetailedBuildReports verbosity logsDir reports = sequence_ + [ do dotCabal <- getCabalDir + let logFileName = display (BuildReports.package report) <.> "log" + logFile = logsDir logFileName + reportsDir = dotCabal "reports" remoteRepoName remoteRepo + reportFile = reportsDir logFileName + + handleMissingLogFile $ do + buildLog <- readFile logFile + createDirectoryIfMissing True reportsDir -- FIXME + writeFile reportFile (show (BuildReports.show report, buildLog)) + + | (report, Just repo) <- reports + , Just remoteRepo <- [maybeRepoRemote repo] + , isLikelyToHaveLogFile (BuildReports.installOutcome report) ] + + where + isLikelyToHaveLogFile BuildReports.ConfigureFailed {} = True + isLikelyToHaveLogFile BuildReports.BuildFailed {} = True + isLikelyToHaveLogFile BuildReports.InstallFailed {} = True + isLikelyToHaveLogFile BuildReports.InstallOk {} = True + isLikelyToHaveLogFile _ = False + + handleMissingLogFile = Exception.handleJust missingFile $ \ioe -> + warn verbosity $ "Missing log file for build report: " + ++ fromMaybe "" (ioeGetFileName ioe) + + missingFile ioe + | isDoesNotExistError ioe = Just ioe + missingFile _ = Nothing + + +regenerateHaddockIndex :: Verbosity + -> [PackageDB] + -> Compiler + -> Platform + -> ProgramDb + -> UseSandbox + -> ConfigFlags + -> InstallFlags + -> BuildOutcomes + -> IO () +regenerateHaddockIndex verbosity packageDBs comp platform progdb useSandbox + configFlags installFlags buildOutcomes + | haddockIndexFileIsRequested && shouldRegenerateHaddockIndex = do + + defaultDirs <- InstallDirs.defaultInstallDirs + (compilerFlavor comp) + (fromFlag (configUserInstall configFlags)) + True + let indexFileTemplate = fromFlag (installHaddockIndex installFlags) + indexFile = substHaddockIndexFileName defaultDirs indexFileTemplate + + notice verbosity $ + "Updating documentation index " ++ indexFile + + --TODO: might be nice if the install plan gave us the new InstalledPackageInfo + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb + Haddock.regenerateHaddockIndex verbosity installedPkgIndex progdb indexFile + + | otherwise = return () + where + haddockIndexFileIsRequested = + fromFlag (installDocumentation installFlags) + && isJust (flagToMaybe (installHaddockIndex installFlags)) + + -- We want to regenerate the index if some new documentation was actually + -- installed. Since the index can be only per-user or per-sandbox (see + -- #1337), we don't do it for global installs or special cases where we're + -- installing into a specific db. + shouldRegenerateHaddockIndex = (isUseSandbox useSandbox || normalUserInstall) + && someDocsWereInstalled buildOutcomes + where + someDocsWereInstalled = any installedDocs . Map.elems + installedDocs (Right (BuildResult DocsOk _ _)) = True + installedDocs _ = False + + normalUserInstall = (UserPackageDB `elem` packageDBs) + && all (not . isSpecificPackageDB) packageDBs + isSpecificPackageDB (SpecificPackageDB _) = True + isSpecificPackageDB _ = False + + substHaddockIndexFileName defaultDirs = fromPathTemplate + . substPathTemplate env + where + env = env0 ++ installDirsTemplateEnv absoluteDirs + env0 = InstallDirs.compilerTemplateEnv (compilerInfo comp) + ++ InstallDirs.platformTemplateEnv platform + ++ InstallDirs.abiTemplateEnv (compilerInfo comp) platform + absoluteDirs = InstallDirs.substituteInstallDirTemplates + env0 templateDirs + templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault + defaultDirs (configInstallDirs configFlags) + + +symlinkBinaries :: Verbosity + -> Platform -> Compiler + -> ConfigFlags + -> InstallFlags + -> InstallPlan + -> BuildOutcomes + -> IO () +symlinkBinaries verbosity platform comp configFlags installFlags + plan buildOutcomes = do + failed <- InstallSymlink.symlinkBinaries platform comp + InstallSymlink.NeverOverwrite + configFlags installFlags + plan buildOutcomes + case failed of + [] -> return () + [(_, exe, path)] -> + warn verbosity $ + "could not create a symlink in " ++ bindir ++ " for " + ++ display exe ++ " because the file exists there already but is not " + ++ "managed by cabal. You can create a symlink for this executable " + ++ "manually if you wish. The executable file has been installed at " + ++ path + exes -> + warn verbosity $ + "could not create symlinks in " ++ bindir ++ " for " + ++ intercalate ", " [ display exe | (_, exe, _) <- exes ] + ++ " because the files exist there already and are not " + ++ "managed by cabal. You can create symlinks for these executables " + ++ "manually if you wish. The executable files have been installed at " + ++ intercalate ", " [ path | (_, _, path) <- exes ] + where + bindir = fromFlag (installSymlinkBinDir installFlags) + + +printBuildFailures :: Verbosity -> BuildOutcomes -> IO () +printBuildFailures verbosity buildOutcomes = + case [ (pkgid, failure) + | (pkgid, Left failure) <- Map.toList buildOutcomes ] of + [] -> return () + failed -> die' verbosity . unlines + $ "Error: some packages failed to install:" + : [ display pkgid ++ printFailureReason reason + | (pkgid, reason) <- failed ] + where + printFailureReason reason = case reason of + DependentFailed pkgid -> " depends on " ++ display pkgid + ++ " which failed to install." + DownloadFailed e -> " failed while downloading the package." + ++ showException e + UnpackFailed e -> " failed while unpacking the package." + ++ showException e + ConfigureFailed e -> " failed during the configure step." + ++ showException e + BuildFailed e -> " failed during the building phase." + ++ showException e + TestsFailed e -> " failed during the tests phase." + ++ showException e + InstallFailed e -> " failed during the final install step." + ++ showException e + + -- This will never happen, but we include it for completeness + PlanningFailed -> " failed during the planning phase." + + showException e = " The exception was:\n " ++ show e ++ maybeOOM e +#ifdef mingw32_HOST_OS + maybeOOM _ = "" +#else + maybeOOM e = maybe "" onExitFailure (fromException e) + onExitFailure (ExitFailure n) + | n == 9 || n == -9 = + "\nThis may be due to an out-of-memory condition." + onExitFailure _ = "" +#endif + + +-- | If we're working inside a sandbox and some add-source deps were installed, +-- update the timestamps of those deps. +updateSandboxTimestampsFile :: Verbosity -> UseSandbox -> Maybe SandboxPackageInfo + -> Compiler -> Platform + -> InstallPlan + -> BuildOutcomes + -> IO () +updateSandboxTimestampsFile verbosity (UseSandbox sandboxDir) + (Just (SandboxPackageInfo _ _ _ allAddSourceDeps)) + comp platform installPlan buildOutcomes = + withUpdateTimestamps verbosity sandboxDir (compilerId comp) platform $ \_ -> do + let allInstalled = [ pkg + | InstallPlan.Configured pkg + <- InstallPlan.toList installPlan + , case InstallPlan.lookupBuildOutcome pkg buildOutcomes of + Just (Right _success) -> True + _ -> False + ] + allSrcPkgs = [ confPkgSource cpkg | cpkg <- allInstalled ] + allPaths = [ pth | LocalUnpackedPackage pth + <- map packageSource allSrcPkgs] + allPathsCanonical <- mapM tryCanonicalizePath allPaths + return $! filter (`S.member` allAddSourceDeps) allPathsCanonical + +updateSandboxTimestampsFile _ _ _ _ _ _ _ = return () + +-- ------------------------------------------------------------ +-- * Actually do the installations +-- ------------------------------------------------------------ + +data InstallMisc = InstallMisc { + libVersion :: Maybe Version + } + +-- | If logging is enabled, contains location of the log file and the verbosity +-- level for logging. +type UseLogFile = Maybe (PackageIdentifier -> UnitId -> FilePath, Verbosity) + +performInstallations :: Verbosity + -> InstallArgs + -> InstalledPackageIndex + -> InstallPlan + -> IO BuildOutcomes +performInstallations verbosity + (packageDBs, repoCtxt, comp, platform, progdb, useSandbox, _, + globalFlags, configFlags, configExFlags, installFlags, haddockFlags) + installedPkgIndex installPlan = do + + -- With 'install -j' it can be a bit hard to tell whether a sandbox is used. + whenUsingSandbox useSandbox $ \sandboxDir -> + when parallelInstall $ + notice verbosity $ "Notice: installing into a sandbox located at " + ++ sandboxDir + info verbosity $ "Number of threads used: " ++ (show numJobs) ++ "." + + jobControl <- if parallelInstall then newParallelJobControl numJobs + else newSerialJobControl + fetchLimit <- newJobLimit (min numJobs numFetchJobs) + installLock <- newLock -- serialise installation + cacheLock <- newLock -- serialise access to setup exe cache + + executeInstallPlan verbosity jobControl keepGoing useLogFile + installPlan $ \rpkg -> + installReadyPackage platform cinfo configFlags + rpkg $ \configFlags' src pkg pkgoverride -> + fetchSourcePackage verbosity repoCtxt fetchLimit src $ \src' -> + installLocalPackage verbosity (packageId pkg) src' distPref $ \mpath -> + installUnpackedPackage verbosity installLock numJobs + (setupScriptOptions installedPkgIndex + cacheLock rpkg) + configFlags' + installFlags haddockFlags comp progdb + platform pkg rpkg pkgoverride mpath useLogFile + + where + cinfo = compilerInfo comp + + numJobs = determineNumJobs (installNumJobs installFlags) + numFetchJobs = 2 + parallelInstall = numJobs >= 2 + keepGoing = fromFlag (installKeepGoing installFlags) + distPref = fromFlagOrDefault (useDistPref defaultSetupScriptOptions) + (configDistPref configFlags) + + setupScriptOptions index lock rpkg = + configureSetupScript + packageDBs + comp + platform + progdb + distPref + (chooseCabalVersion configExFlags (libVersion miscOptions)) + (Just lock) + parallelInstall + index + (Just rpkg) + + reportingLevel = fromFlag (installBuildReports installFlags) + logsDir = fromFlag (globalLogsDir globalFlags) + + -- Should the build output be written to a log file instead of stdout? + useLogFile :: UseLogFile + useLogFile = fmap ((\f -> (f, loggingVerbosity)) . substLogFileName) + logFileTemplate + where + installLogFile' = flagToMaybe $ installLogFile installFlags + defaultTemplate = toPathTemplate $ + logsDir "$compiler" "$libname" <.> "log" + + -- If the user has specified --remote-build-reporting=detailed, use the + -- default log file location. If the --build-log option is set, use the + -- provided location. Otherwise don't use logging, unless building in + -- parallel (in which case the default location is used). + logFileTemplate :: Maybe PathTemplate + logFileTemplate + | useDefaultTemplate = Just defaultTemplate + | otherwise = installLogFile' + + -- If the user has specified --remote-build-reporting=detailed or + -- --build-log, use more verbose logging. + loggingVerbosity :: Verbosity + loggingVerbosity | overrideVerbosity = modifyVerbosity (max verbose) verbosity + | otherwise = verbosity + + useDefaultTemplate :: Bool + useDefaultTemplate + | reportingLevel == DetailedReports = True + | isJust installLogFile' = False + | parallelInstall = True + | otherwise = False + + overrideVerbosity :: Bool + overrideVerbosity + | reportingLevel == DetailedReports = True + | isJust installLogFile' = True + | parallelInstall = False + | otherwise = False + + substLogFileName :: PathTemplate -> PackageIdentifier -> UnitId -> FilePath + substLogFileName template pkg uid = fromPathTemplate + . substPathTemplate env + $ template + where env = initialPathTemplateEnv (packageId pkg) uid + (compilerInfo comp) platform + + miscOptions = InstallMisc { + libVersion = flagToMaybe (configCabalVersion configExFlags) + } + + +executeInstallPlan :: Verbosity + -> JobControl IO (UnitId, BuildOutcome) + -> Bool + -> UseLogFile + -> InstallPlan + -> (ReadyPackage -> IO BuildOutcome) + -> IO BuildOutcomes +executeInstallPlan verbosity jobCtl keepGoing useLogFile plan0 installPkg = + InstallPlan.execute + jobCtl keepGoing depsFailure plan0 $ \pkg -> do + buildOutcome <- installPkg pkg + printBuildResult (packageId pkg) (installedUnitId pkg) buildOutcome + return buildOutcome + + where + depsFailure = DependentFailed . packageId + + -- Print build log if something went wrong, and 'Installed $PKGID' + -- otherwise. + printBuildResult :: PackageId -> UnitId -> BuildOutcome -> IO () + printBuildResult pkgid uid buildOutcome = case buildOutcome of + (Right _) -> progressMessage verbosity ProgressCompleted (display pkgid) + (Left _) -> do + notice verbosity $ "Failed to install " ++ display pkgid + when (verbosity >= normal) $ + case useLogFile of + Nothing -> return () + Just (mkLogFileName, _) -> do + let logName = mkLogFileName pkgid uid + putStr $ "Build log ( " ++ logName ++ " ):\n" + printFile logName + + printFile :: FilePath -> IO () + printFile path = readFile path >>= putStr + +-- | Call an installer for an 'SourcePackage' but override the configure +-- flags with the ones given by the 'ReadyPackage'. In particular the +-- 'ReadyPackage' specifies an exact 'FlagAssignment' and exactly +-- versioned package dependencies. So we ignore any previous partial flag +-- assignment or dependency constraints and use the new ones. +-- +-- NB: when updating this function, don't forget to also update +-- 'configurePackage' in D.C.Configure. +installReadyPackage :: Platform -> CompilerInfo + -> ConfigFlags + -> ReadyPackage + -> (ConfigFlags -> UnresolvedPkgLoc + -> PackageDescription + -> PackageDescriptionOverride + -> a) + -> a +installReadyPackage platform cinfo configFlags + (ReadyPackage (ConfiguredPackage ipid + (SourcePackage _ gpkg source pkgoverride) + flags stanzas deps)) + installPkg = + installPkg configFlags { + configIPID = toFlag (display ipid), + configConfigurationsFlags = flags, + -- We generate the legacy constraints as well as the new style precise deps. + -- In the end only one set gets passed to Setup.hs configure, depending on + -- the Cabal version we are talking to. + configConstraints = [ thisPackageVersion srcid + | ConfiguredId srcid (Just PackageDescription.CLibName) _ipid + <- CD.nonSetupDeps deps ], + configDependencies = [ (packageName srcid, dep_ipid) + | ConfiguredId srcid (Just PackageDescription.CLibName) dep_ipid + <- CD.nonSetupDeps deps ], + -- Use '--exact-configuration' if supported. + configExactConfiguration = toFlag True, + configBenchmarks = toFlag False, + configTests = toFlag (TestStanzas `elem` stanzas) + } source pkg pkgoverride + where + pkg = case finalizePD flags (enableStanzas stanzas) + (const True) + platform cinfo [] gpkg of + Left _ -> error "finalizePD ReadyPackage failed" + Right (desc, _) -> desc + +fetchSourcePackage + :: Verbosity + -> RepoContext + -> JobLimit + -> UnresolvedPkgLoc + -> (ResolvedPkgLoc -> IO BuildOutcome) + -> IO BuildOutcome +fetchSourcePackage verbosity repoCtxt fetchLimit src installPkg = do + fetched <- checkFetched src + case fetched of + Just src' -> installPkg src' + Nothing -> onFailure DownloadFailed $ do + loc <- withJobLimit fetchLimit $ + fetchPackage verbosity repoCtxt src + installPkg loc + + +installLocalPackage + :: Verbosity + -> PackageIdentifier -> ResolvedPkgLoc -> FilePath + -> (Maybe FilePath -> IO BuildOutcome) + -> IO BuildOutcome +installLocalPackage verbosity pkgid location distPref installPkg = + + case location of + + LocalUnpackedPackage dir -> + installPkg (Just dir) + + RemoteSourceRepoPackage _repo dir -> + installPkg (Just dir) + + LocalTarballPackage tarballPath -> + installLocalTarballPackage verbosity + pkgid tarballPath distPref installPkg + + RemoteTarballPackage _ tarballPath -> + installLocalTarballPackage verbosity + pkgid tarballPath distPref installPkg + + RepoTarballPackage _ _ tarballPath -> + installLocalTarballPackage verbosity + pkgid tarballPath distPref installPkg + +installLocalTarballPackage + :: Verbosity + -> PackageIdentifier -> FilePath -> FilePath + -> (Maybe FilePath -> IO BuildOutcome) + -> IO BuildOutcome +installLocalTarballPackage verbosity pkgid + tarballPath distPref installPkg = do + tmp <- getTemporaryDirectory + withTempDirectory verbosity tmp "cabal-tmp" $ \tmpDirPath -> + onFailure UnpackFailed $ do + let relUnpackedPath = display pkgid + absUnpackedPath = tmpDirPath relUnpackedPath + descFilePath = absUnpackedPath + display (packageName pkgid) <.> "cabal" + info verbosity $ "Extracting " ++ tarballPath + ++ " to " ++ tmpDirPath ++ "..." + extractTarGzFile tmpDirPath relUnpackedPath tarballPath + exists <- doesFileExist descFilePath + unless exists $ + die' verbosity $ "Package .cabal file not found: " ++ show descFilePath + maybeRenameDistDir absUnpackedPath + installPkg (Just absUnpackedPath) + + where + -- 'cabal sdist' puts pre-generated files in the 'dist' + -- directory. This fails when a nonstandard build directory name + -- is used (as is the case with sandboxes), so we need to rename + -- the 'dist' dir here. + -- + -- TODO: 'cabal get happy && cd sandbox && cabal install ../happy' still + -- fails even with this workaround. We probably can live with that. + maybeRenameDistDir :: FilePath -> IO () + maybeRenameDistDir absUnpackedPath = do + let distDirPath = absUnpackedPath defaultDistPref + distDirPathTmp = absUnpackedPath (defaultDistPref ++ "-tmp") + distDirPathNew = absUnpackedPath distPref + distDirExists <- doesDirectoryExist distDirPath + when (distDirExists + && (not $ distDirPath `equalFilePath` distDirPathNew)) $ do + -- NB: we need to handle the case when 'distDirPathNew' is a + -- subdirectory of 'distDirPath' (e.g. the former is + -- 'dist/dist-sandbox-3688fbc2' and the latter is 'dist'). + debug verbosity $ "Renaming '" ++ distDirPath ++ "' to '" + ++ distDirPathTmp ++ "'." + renameDirectory distDirPath distDirPathTmp + when (distDirPath `isPrefixOf` distDirPathNew) $ + createDirectoryIfMissingVerbose verbosity False distDirPath + debug verbosity $ "Renaming '" ++ distDirPathTmp ++ "' to '" + ++ distDirPathNew ++ "'." + renameDirectory distDirPathTmp distDirPathNew + +installUnpackedPackage + :: Verbosity + -> Lock + -> Int + -> SetupScriptOptions + -> ConfigFlags + -> InstallFlags + -> HaddockFlags + -> Compiler + -> ProgramDb + -> Platform + -> PackageDescription + -> ReadyPackage + -> PackageDescriptionOverride + -> Maybe FilePath -- ^ Directory to change to before starting the installation. + -> UseLogFile -- ^ File to log output to (if any) + -> IO BuildOutcome +installUnpackedPackage verbosity installLock numJobs + scriptOptions + configFlags installFlags haddockFlags comp progdb + platform pkg rpkg pkgoverride workingDir useLogFile = do + -- Override the .cabal file if necessary + case pkgoverride of + Nothing -> return () + Just pkgtxt -> do + let descFilePath = fromMaybe "." workingDir + display (packageName pkgid) <.> "cabal" + info verbosity $ + "Updating " ++ display (packageName pkgid) <.> "cabal" + ++ " with the latest revision from the index." + writeFileAtomic descFilePath pkgtxt + + -- Make sure that we pass --libsubdir etc to 'setup configure' (necessary if + -- the setup script was compiled against an old version of the Cabal lib). + configFlags' <- addDefaultInstallDirs configFlags + -- Filter out flags not supported by the old versions of the Cabal lib. + let configureFlags :: Version -> ConfigFlags + configureFlags = filterConfigureFlags configFlags' { + configVerbosity = toFlag verbosity' + } + + -- Path to the optional log file. + mLogPath <- maybeLogPath + + logDirChange (maybe (const (return ())) appendFile mLogPath) workingDir $ do + -- Configure phase + onFailure ConfigureFailed $ do + noticeProgress ProgressStarting + setup configureCommand configureFlags mLogPath + + -- Build phase + onFailure BuildFailed $ do + noticeProgress ProgressBuilding + setup buildCommand' buildFlags mLogPath + + -- Doc generation phase + docsResult <- if shouldHaddock + then (do setup haddockCommand haddockFlags' mLogPath + return DocsOk) + `catchIO` (\_ -> return DocsFailed) + `catchExit` (\_ -> return DocsFailed) + else return DocsNotTried + + -- Tests phase + onFailure TestsFailed $ do + when (testsEnabled && PackageDescription.hasTests pkg) $ + setup Cabal.testCommand testFlags mLogPath + + let testsResult | testsEnabled = TestsOk + | otherwise = TestsNotTried + + -- Install phase + onFailure InstallFailed $ criticalSection installLock $ do + -- Actual installation + withWin32SelfUpgrade verbosity uid configFlags + cinfo platform pkg $ do + setup Cabal.copyCommand copyFlags mLogPath + + -- Capture installed package configuration file, so that + -- it can be incorporated into the final InstallPlan + ipkgs <- genPkgConfs mLogPath + let ipkgs' = case ipkgs of + [ipkg] -> [ipkg { Installed.installedUnitId = uid }] + _ -> ipkgs + let packageDBs = interpretPackageDbFlags + (fromFlag (configUserInstall configFlags)) + (configPackageDBs configFlags) + forM_ ipkgs' $ \ipkg' -> + registerPackage verbosity comp progdb + packageDBs ipkg' + defaultRegisterOptions + + return (Right (BuildResult docsResult testsResult (find ((==uid).installedUnitId) ipkgs'))) + + where + pkgid = packageId pkg + uid = installedUnitId rpkg + cinfo = compilerInfo comp + buildCommand' = buildCommand progdb + dispname = display pkgid + isParallelBuild = numJobs >= 2 + + noticeProgress phase = when isParallelBuild $ + progressMessage verbosity phase dispname + + buildFlags _ = emptyBuildFlags { + buildDistPref = configDistPref configFlags, + buildVerbosity = toFlag verbosity' + } + shouldHaddock = fromFlag (installDocumentation installFlags) + haddockFlags' _ = haddockFlags { + haddockVerbosity = toFlag verbosity', + haddockDistPref = configDistPref configFlags + } + testsEnabled = fromFlag (configTests configFlags) + && fromFlagOrDefault False (installRunTests installFlags) + testFlags _ = Cabal.emptyTestFlags { + Cabal.testDistPref = configDistPref configFlags + } + copyFlags _ = Cabal.emptyCopyFlags { + Cabal.copyDistPref = configDistPref configFlags, + Cabal.copyDest = toFlag InstallDirs.NoCopyDest, + Cabal.copyVerbosity = toFlag verbosity' + } + shouldRegister = PackageDescription.hasLibs pkg + registerFlags _ = Cabal.emptyRegisterFlags { + Cabal.regDistPref = configDistPref configFlags, + Cabal.regVerbosity = toFlag verbosity' + } + verbosity' = maybe verbosity snd useLogFile + tempTemplate name = name ++ "-" ++ display pkgid + + addDefaultInstallDirs :: ConfigFlags -> IO ConfigFlags + addDefaultInstallDirs configFlags' = do + defInstallDirs <- InstallDirs.defaultInstallDirs flavor userInstall False + return $ configFlags' { + configInstallDirs = fmap Cabal.Flag . + InstallDirs.substituteInstallDirTemplates env $ + InstallDirs.combineInstallDirs fromFlagOrDefault + defInstallDirs (configInstallDirs configFlags) + } + where + CompilerId flavor _ = compilerInfoId cinfo + env = initialPathTemplateEnv pkgid uid cinfo platform + userInstall = fromFlagOrDefault defaultUserInstall + (configUserInstall configFlags') + + genPkgConfs :: Maybe FilePath + -> IO [Installed.InstalledPackageInfo] + genPkgConfs mLogPath = + if shouldRegister then do + tmp <- getTemporaryDirectory + withTempDirectory verbosity tmp (tempTemplate "pkgConf") $ \dir -> do + let pkgConfDest = dir "pkgConf" + registerFlags' version = (registerFlags version) { + Cabal.regGenPkgConf = toFlag (Just pkgConfDest) + } + setup Cabal.registerCommand registerFlags' mLogPath + is_dir <- doesDirectoryExist pkgConfDest + let notHidden = not . isHidden + isHidden name = "." `isPrefixOf` name + if is_dir + -- Sort so that each prefix of the package + -- configurations is well formed + then mapM (readPkgConf pkgConfDest) . sort . filter notHidden + =<< getDirectoryContents pkgConfDest + else fmap (:[]) $ readPkgConf "." pkgConfDest + else return [] + + readPkgConf :: FilePath -> FilePath + -> IO Installed.InstalledPackageInfo + readPkgConf pkgConfDir pkgConfFile = + (withUTF8FileContents (pkgConfDir pkgConfFile) $ \pkgConfText -> + case Installed.parseInstalledPackageInfo pkgConfText of + Installed.ParseFailed perror -> pkgConfParseFailed perror + Installed.ParseOk warns pkgConf -> do + unless (null warns) $ + warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) + return pkgConf) + + pkgConfParseFailed :: Installed.PError -> IO a + pkgConfParseFailed perror = + die' verbosity $ "Couldn't parse the output of 'setup register --gen-pkg-config':" + ++ show perror + + maybeLogPath :: IO (Maybe FilePath) + maybeLogPath = + case useLogFile of + Nothing -> return Nothing + Just (mkLogFileName, _) -> do + let logFileName = mkLogFileName (packageId pkg) uid + logDir = takeDirectory logFileName + unless (null logDir) $ createDirectoryIfMissing True logDir + logFileExists <- doesFileExist logFileName + when logFileExists $ removeFile logFileName + return (Just logFileName) + + setup cmd flags mLogPath = + Exception.bracket + (traverse (\path -> openFile path AppendMode) mLogPath) + (traverse_ hClose) + (\logFileHandle -> + setupWrapper verbosity + scriptOptions { useLoggingHandle = logFileHandle + , useWorkingDir = workingDir } + (Just pkg) + cmd flags (const [])) + + +-- helper +onFailure :: (SomeException -> BuildFailure) -> IO BuildOutcome -> IO BuildOutcome +onFailure result action = + action `catches` + [ Handler $ \ioe -> handler (ioe :: IOException) + , Handler $ \exit -> handler (exit :: ExitCode) + ] + where + handler :: Exception e => e -> IO BuildOutcome + handler = return . Left . result . toException + + +-- ------------------------------------------------------------ +-- * Weird windows hacks +-- ------------------------------------------------------------ + +withWin32SelfUpgrade :: Verbosity + -> UnitId + -> ConfigFlags + -> CompilerInfo + -> Platform + -> PackageDescription + -> IO a -> IO a +withWin32SelfUpgrade _ _ _ _ _ _ action | buildOS /= Windows = action +withWin32SelfUpgrade verbosity uid configFlags cinfo platform pkg action = do + + defaultDirs <- InstallDirs.defaultInstallDirs + compFlavor + (fromFlag (configUserInstall configFlags)) + (PackageDescription.hasLibs pkg) + + Win32SelfUpgrade.possibleSelfUpgrade verbosity + (exeInstallPaths defaultDirs) action + + where + pkgid = packageId pkg + (CompilerId compFlavor _) = compilerInfoId cinfo + + exeInstallPaths defaultDirs = + [ InstallDirs.bindir absoluteDirs exeName <.> exeExtension buildPlatform + | exe <- PackageDescription.executables pkg + , PackageDescription.buildable (PackageDescription.buildInfo exe) + , let exeName = prefix ++ display (PackageDescription.exeName exe) ++ suffix + prefix = substTemplate prefixTemplate + suffix = substTemplate suffixTemplate ] + where + fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") + prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) + suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) + templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault + defaultDirs (configInstallDirs configFlags) + absoluteDirs = InstallDirs.absoluteInstallDirs + pkgid uid + cinfo InstallDirs.NoCopyDest + platform templateDirs + substTemplate = InstallDirs.fromPathTemplate + . InstallDirs.substPathTemplate env + where env = InstallDirs.initialPathTemplateEnv pkgid uid + cinfo platform diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/InstallPlan.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/InstallPlan.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/InstallPlan.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/InstallPlan.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,952 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE FlexibleContexts #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.InstallPlan +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Package installation plan +-- +----------------------------------------------------------------------------- +module Distribution.Client.InstallPlan ( + InstallPlan, + GenericInstallPlan, + PlanPackage, + GenericPlanPackage(..), + foldPlanPackage, + IsUnit, + + -- * Operations on 'InstallPlan's + new, + toGraph, + toList, + toMap, + keys, + keysSet, + planIndepGoals, + depends, + + fromSolverInstallPlan, + fromSolverInstallPlanWithProgress, + configureInstallPlan, + remove, + installed, + lookup, + directDeps, + revDirectDeps, + + -- * Traversal + executionOrder, + execute, + BuildOutcomes, + lookupBuildOutcome, + -- ** Traversal helpers + -- $traversal + Processing, + ready, + completed, + failed, + + -- * Display + showPlanGraph, + showInstallPlan, + + -- * Graph-like operations + dependencyClosure, + reverseTopologicalOrder, + reverseDependencyClosure, + ) where + +import Distribution.Client.Types hiding (BuildOutcomes) +import qualified Distribution.PackageDescription as PD +import qualified Distribution.Simple.Configure as Configure +import qualified Distribution.Simple.Setup as Cabal + +import Distribution.InstalledPackageInfo + ( InstalledPackageInfo ) +import Distribution.Package + ( Package(..), HasMungedPackageId(..) + , HasUnitId(..), UnitId ) +import Distribution.Solver.Types.SolverPackage +import Distribution.Client.JobControl +import Distribution.Text +import Text.PrettyPrint +import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan +import Distribution.Client.SolverInstallPlan (SolverInstallPlan) + +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.SolverId +import Distribution.Solver.Types.InstSolverPackage + +import Distribution.Utils.LogProgress + +-- TODO: Need this when we compute final UnitIds +-- import qualified Distribution.Simple.Configure as Configure + +import Data.List + ( foldl', intercalate ) +import qualified Data.Foldable as Foldable (all) +import Data.Maybe + ( fromMaybe, mapMaybe ) +import qualified Distribution.Compat.Graph as Graph +import Distribution.Compat.Graph (Graph, IsNode(..)) +import Distribution.Compat.Binary (Binary(..)) +import GHC.Generics +import Data.Typeable +import Control.Monad +import Control.Exception + ( assert ) +import qualified Data.Map as Map +import Data.Map (Map) +import qualified Data.Set as Set +import Data.Set (Set) + +import Prelude hiding (lookup) + + +-- When cabal tries to install a number of packages, including all their +-- dependencies it has a non-trivial problem to solve. +-- +-- The Problem: +-- +-- In general we start with a set of installed packages and a set of source +-- packages. +-- +-- Installed packages have fixed dependencies. They have already been built and +-- we know exactly what packages they were built against, including their exact +-- versions. +-- +-- Source package have somewhat flexible dependencies. They are specified as +-- version ranges, though really they're predicates. To make matters worse they +-- have conditional flexible dependencies. Configuration flags can affect which +-- packages are required and can place additional constraints on their +-- versions. +-- +-- These two sets of package can and usually do overlap. There can be installed +-- packages that are also available as source packages which means they could +-- be re-installed if required, though there will also be packages which are +-- not available as source and cannot be re-installed. Very often there will be +-- extra versions available than are installed. Sometimes we may like to prefer +-- installed packages over source ones or perhaps always prefer the latest +-- available version whether installed or not. +-- +-- The goal is to calculate an installation plan that is closed, acyclic and +-- consistent and where every configured package is valid. +-- +-- An installation plan is a set of packages that are going to be used +-- together. It will consist of a mixture of installed packages and source +-- packages along with their exact version dependencies. An installation plan +-- is closed if for every package in the set, all of its dependencies are +-- also in the set. It is consistent if for every package in the set, all +-- dependencies which target that package have the same version. + +-- Note that plans do not necessarily compose. You might have a valid plan for +-- package A and a valid plan for package B. That does not mean the composition +-- is simultaneously valid for A and B. In particular you're most likely to +-- have problems with inconsistent dependencies. +-- On the other hand it is true that every closed sub plan is valid. + +-- | Packages in an install plan +-- +-- NOTE: 'ConfiguredPackage', 'GenericReadyPackage' and 'GenericPlanPackage' +-- intentionally have no 'PackageInstalled' instance. `This is important: +-- PackageInstalled returns only library dependencies, but for package that +-- aren't yet installed we know many more kinds of dependencies (setup +-- dependencies, exe, test-suite, benchmark, ..). Any functions that operate on +-- dependencies in cabal-install should consider what to do with these +-- dependencies; if we give a 'PackageInstalled' instance it would be too easy +-- to get this wrong (and, for instance, call graph traversal functions from +-- Cabal rather than from cabal-install). Instead, see 'PackageInstalled'. +data GenericPlanPackage ipkg srcpkg + = PreExisting ipkg + | Configured srcpkg + | Installed srcpkg + deriving (Eq, Show, Generic) + +-- | Convenience combinator for destructing 'GenericPlanPackage'. +-- This is handy because if you case manually, you have to handle +-- 'Configured' and 'Installed' separately (where often you want +-- them to be the same.) +foldPlanPackage :: (ipkg -> a) + -> (srcpkg -> a) + -> GenericPlanPackage ipkg srcpkg + -> a +foldPlanPackage f _ (PreExisting ipkg) = f ipkg +foldPlanPackage _ g (Configured srcpkg) = g srcpkg +foldPlanPackage _ g (Installed srcpkg) = g srcpkg + +type IsUnit a = (IsNode a, Key a ~ UnitId) + +depends :: IsUnit a => a -> [UnitId] +depends = nodeNeighbors + +-- NB: Expanded constraint synonym here to avoid undecidable +-- instance errors in GHC 7.8 and earlier. +instance (IsNode ipkg, IsNode srcpkg, Key ipkg ~ UnitId, Key srcpkg ~ UnitId) + => IsNode (GenericPlanPackage ipkg srcpkg) where + type Key (GenericPlanPackage ipkg srcpkg) = UnitId + nodeKey (PreExisting ipkg) = nodeKey ipkg + nodeKey (Configured spkg) = nodeKey spkg + nodeKey (Installed spkg) = nodeKey spkg + nodeNeighbors (PreExisting ipkg) = nodeNeighbors ipkg + nodeNeighbors (Configured spkg) = nodeNeighbors spkg + nodeNeighbors (Installed spkg) = nodeNeighbors spkg + +instance (Binary ipkg, Binary srcpkg) + => Binary (GenericPlanPackage ipkg srcpkg) + +type PlanPackage = GenericPlanPackage + InstalledPackageInfo (ConfiguredPackage UnresolvedPkgLoc) + +instance (Package ipkg, Package srcpkg) => + Package (GenericPlanPackage ipkg srcpkg) where + packageId (PreExisting ipkg) = packageId ipkg + packageId (Configured spkg) = packageId spkg + packageId (Installed spkg) = packageId spkg + +instance (HasMungedPackageId ipkg, HasMungedPackageId srcpkg) => + HasMungedPackageId (GenericPlanPackage ipkg srcpkg) where + mungedId (PreExisting ipkg) = mungedId ipkg + mungedId (Configured spkg) = mungedId spkg + mungedId (Installed spkg) = mungedId spkg + +instance (HasUnitId ipkg, HasUnitId srcpkg) => + HasUnitId + (GenericPlanPackage ipkg srcpkg) where + installedUnitId (PreExisting ipkg) = installedUnitId ipkg + installedUnitId (Configured spkg) = installedUnitId spkg + installedUnitId (Installed spkg) = installedUnitId spkg + +instance (HasConfiguredId ipkg, HasConfiguredId srcpkg) => + HasConfiguredId (GenericPlanPackage ipkg srcpkg) where + configuredId (PreExisting ipkg) = configuredId ipkg + configuredId (Configured spkg) = configuredId spkg + configuredId (Installed spkg) = configuredId spkg + +data GenericInstallPlan ipkg srcpkg = GenericInstallPlan { + planGraph :: !(Graph (GenericPlanPackage ipkg srcpkg)), + planIndepGoals :: !IndependentGoals + } + deriving (Typeable) + +-- | 'GenericInstallPlan' specialised to most commonly used types. +type InstallPlan = GenericInstallPlan + InstalledPackageInfo (ConfiguredPackage UnresolvedPkgLoc) + +-- | Smart constructor that deals with caching the 'Graph' representation. +-- +mkInstallPlan :: (IsUnit ipkg, IsUnit srcpkg) + => String + -> Graph (GenericPlanPackage ipkg srcpkg) + -> IndependentGoals + -> GenericInstallPlan ipkg srcpkg +mkInstallPlan loc graph indepGoals = + assert (valid loc graph) + GenericInstallPlan { + planGraph = graph, + planIndepGoals = indepGoals + } + +internalError :: String -> String -> a +internalError loc msg = error $ "internal error in InstallPlan." ++ loc + ++ if null msg then "" else ": " ++ msg + +instance (IsNode ipkg, Key ipkg ~ UnitId, IsNode srcpkg, Key srcpkg ~ UnitId, + Binary ipkg, Binary srcpkg) + => Binary (GenericInstallPlan ipkg srcpkg) where + put GenericInstallPlan { + planGraph = graph, + planIndepGoals = indepGoals + } = put (graph, indepGoals) + + get = do + (graph, indepGoals) <- get + return $! mkInstallPlan "(instance Binary)" graph indepGoals + +showPlanGraph :: (Package ipkg, Package srcpkg, + IsUnit ipkg, IsUnit srcpkg) + => Graph (GenericPlanPackage ipkg srcpkg) -> String +showPlanGraph graph = renderStyle defaultStyle $ + vcat (map dispPlanPackage (Graph.toList graph)) + where dispPlanPackage p = + hang (hsep [ text (showPlanPackageTag p) + , disp (packageId p) + , parens (disp (nodeKey p))]) 2 + (vcat (map disp (nodeNeighbors p))) + +showInstallPlan :: (Package ipkg, Package srcpkg, + IsUnit ipkg, IsUnit srcpkg) + => GenericInstallPlan ipkg srcpkg -> String +showInstallPlan = showPlanGraph . planGraph + +showPlanPackageTag :: GenericPlanPackage ipkg srcpkg -> String +showPlanPackageTag (PreExisting _) = "PreExisting" +showPlanPackageTag (Configured _) = "Configured" +showPlanPackageTag (Installed _) = "Installed" + +-- | Build an installation plan from a valid set of resolved packages. +-- +new :: (IsUnit ipkg, IsUnit srcpkg) + => IndependentGoals + -> Graph (GenericPlanPackage ipkg srcpkg) + -> GenericInstallPlan ipkg srcpkg +new indepGoals graph = mkInstallPlan "new" graph indepGoals + +toGraph :: GenericInstallPlan ipkg srcpkg + -> Graph (GenericPlanPackage ipkg srcpkg) +toGraph = planGraph + +toList :: GenericInstallPlan ipkg srcpkg + -> [GenericPlanPackage ipkg srcpkg] +toList = Graph.toList . planGraph + +toMap :: GenericInstallPlan ipkg srcpkg + -> Map UnitId (GenericPlanPackage ipkg srcpkg) +toMap = Graph.toMap . planGraph + +keys :: GenericInstallPlan ipkg srcpkg -> [UnitId] +keys = Graph.keys . planGraph + +keysSet :: GenericInstallPlan ipkg srcpkg -> Set UnitId +keysSet = Graph.keysSet . planGraph + +-- | Remove packages from the install plan. This will result in an +-- error if there are remaining packages that depend on any matching +-- package. This is primarily useful for obtaining an install plan for +-- the dependencies of a package or set of packages without actually +-- installing the package itself, as when doing development. +-- +remove :: (IsUnit ipkg, IsUnit srcpkg) + => (GenericPlanPackage ipkg srcpkg -> Bool) + -> GenericInstallPlan ipkg srcpkg + -> GenericInstallPlan ipkg srcpkg +remove shouldRemove plan = + mkInstallPlan "remove" newGraph (planIndepGoals plan) + where + newGraph = Graph.fromDistinctList $ + filter (not . shouldRemove) (toList plan) + +-- | Change a number of packages in the 'Configured' state to the 'Installed' +-- state. +-- +-- To preserve invariants, the package must have all of its dependencies +-- already installed too (that is 'PreExisting' or 'Installed'). +-- +installed :: (IsUnit ipkg, IsUnit srcpkg) + => (srcpkg -> Bool) + -> GenericInstallPlan ipkg srcpkg + -> GenericInstallPlan ipkg srcpkg +installed shouldBeInstalled installPlan = + foldl' markInstalled installPlan + [ pkg + | Configured pkg <- reverseTopologicalOrder installPlan + , shouldBeInstalled pkg ] + where + markInstalled plan pkg = + assert (all isInstalled (directDeps plan (nodeKey pkg))) $ + plan { + planGraph = Graph.insert (Installed pkg) (planGraph plan) + } + +-- | Lookup a package in the plan. +-- +lookup :: (IsUnit ipkg, IsUnit srcpkg) + => GenericInstallPlan ipkg srcpkg + -> UnitId + -> Maybe (GenericPlanPackage ipkg srcpkg) +lookup plan pkgid = Graph.lookup pkgid (planGraph plan) + +-- | Find all the direct dependencies of the given package. +-- +-- Note that the package must exist in the plan or it is an error. +-- +directDeps :: GenericInstallPlan ipkg srcpkg + -> UnitId + -> [GenericPlanPackage ipkg srcpkg] +directDeps plan pkgid = + case Graph.neighbors (planGraph plan) pkgid of + Just deps -> deps + Nothing -> internalError "directDeps" "package not in graph" + +-- | Find all the direct reverse dependencies of the given package. +-- +-- Note that the package must exist in the plan or it is an error. +-- +revDirectDeps :: GenericInstallPlan ipkg srcpkg + -> UnitId + -> [GenericPlanPackage ipkg srcpkg] +revDirectDeps plan pkgid = + case Graph.revNeighbors (planGraph plan) pkgid of + Just deps -> deps + Nothing -> internalError "revDirectDeps" "package not in graph" + +-- | Return all the packages in the 'InstallPlan' in reverse topological order. +-- That is, for each package, all dependencies of the package appear first. +-- +-- Compared to 'executionOrder', this function returns all the installed and +-- source packages rather than just the source ones. Also, while both this +-- and 'executionOrder' produce reverse topological orderings of the package +-- dependency graph, it is not necessarily exactly the same order. +-- +reverseTopologicalOrder :: GenericInstallPlan ipkg srcpkg + -> [GenericPlanPackage ipkg srcpkg] +reverseTopologicalOrder plan = Graph.revTopSort (planGraph plan) + + +-- | Return the packages in the plan that are direct or indirect dependencies of +-- the given packages. +-- +dependencyClosure :: GenericInstallPlan ipkg srcpkg + -> [UnitId] + -> [GenericPlanPackage ipkg srcpkg] +dependencyClosure plan = fromMaybe [] + . Graph.closure (planGraph plan) + +-- | Return the packages in the plan that depend directly or indirectly on the +-- given packages. +-- +reverseDependencyClosure :: GenericInstallPlan ipkg srcpkg + -> [UnitId] + -> [GenericPlanPackage ipkg srcpkg] +reverseDependencyClosure plan = fromMaybe [] + . Graph.revClosure (planGraph plan) + + +-- Alert alert! Why does SolverId map to a LIST of plan packages? +-- The sordid story has to do with 'build-depends' on a package +-- with libraries and executables. In an ideal world, we would +-- ONLY depend on the library in this situation. But c.f. #3661 +-- some people rely on the build-depends to ALSO implicitly +-- depend on an executable. +-- +-- I don't want to commit to a strategy yet, so the only possible +-- thing you can do in this case is return EVERYTHING and let +-- the client filter out what they want (executables? libraries? +-- etc). This similarly implies we can't return a 'ConfiguredId' +-- because that's not enough information. + +fromSolverInstallPlan :: + (IsUnit ipkg, IsUnit srcpkg) + => ( (SolverId -> [GenericPlanPackage ipkg srcpkg]) + -> SolverInstallPlan.SolverPlanPackage + -> [GenericPlanPackage ipkg srcpkg] ) + -> SolverInstallPlan + -> GenericInstallPlan ipkg srcpkg +fromSolverInstallPlan f plan = + mkInstallPlan "fromSolverInstallPlan" + (Graph.fromDistinctList pkgs'') + (SolverInstallPlan.planIndepGoals plan) + where + (_, _, pkgs'') = foldl' f' (Map.empty, Map.empty, []) + (SolverInstallPlan.reverseTopologicalOrder plan) + + f' (pidMap, ipiMap, pkgs) pkg = (pidMap', ipiMap', pkgs' ++ pkgs) + where + pkgs' = f (mapDep pidMap ipiMap) pkg + + (pidMap', ipiMap') + = case nodeKey pkg of + PreExistingId _ uid -> (pidMap, Map.insert uid pkgs' ipiMap) + PlannedId pid -> (Map.insert pid pkgs' pidMap, ipiMap) + + mapDep _ ipiMap (PreExistingId _pid uid) + | Just pkgs <- Map.lookup uid ipiMap = pkgs + | otherwise = error ("fromSolverInstallPlan: PreExistingId " ++ display uid) + mapDep pidMap _ (PlannedId pid) + | Just pkgs <- Map.lookup pid pidMap = pkgs + | otherwise = error ("fromSolverInstallPlan: PlannedId " ++ display pid) + -- This shouldn't happen, since mapDep should only be called + -- on neighbor SolverId, which must have all been done already + -- by the reverse top-sort (we assume the graph is not broken). + + +fromSolverInstallPlanWithProgress :: + (IsUnit ipkg, IsUnit srcpkg) + => ( (SolverId -> [GenericPlanPackage ipkg srcpkg]) + -> SolverInstallPlan.SolverPlanPackage + -> LogProgress [GenericPlanPackage ipkg srcpkg] ) + -> SolverInstallPlan + -> LogProgress (GenericInstallPlan ipkg srcpkg) +fromSolverInstallPlanWithProgress f plan = do + (_, _, pkgs'') <- foldM f' (Map.empty, Map.empty, []) + (SolverInstallPlan.reverseTopologicalOrder plan) + return $ mkInstallPlan "fromSolverInstallPlanWithProgress" + (Graph.fromDistinctList pkgs'') + (SolverInstallPlan.planIndepGoals plan) + where + f' (pidMap, ipiMap, pkgs) pkg = do + pkgs' <- f (mapDep pidMap ipiMap) pkg + let (pidMap', ipiMap') + = case nodeKey pkg of + PreExistingId _ uid -> (pidMap, Map.insert uid pkgs' ipiMap) + PlannedId pid -> (Map.insert pid pkgs' pidMap, ipiMap) + return (pidMap', ipiMap', pkgs' ++ pkgs) + + mapDep _ ipiMap (PreExistingId _pid uid) + | Just pkgs <- Map.lookup uid ipiMap = pkgs + | otherwise = error ("fromSolverInstallPlan: PreExistingId " ++ display uid) + mapDep pidMap _ (PlannedId pid) + | Just pkgs <- Map.lookup pid pidMap = pkgs + | otherwise = error ("fromSolverInstallPlan: PlannedId " ++ display pid) + -- This shouldn't happen, since mapDep should only be called + -- on neighbor SolverId, which must have all been done already + -- by the reverse top-sort (we assume the graph is not broken). + +-- | Conversion of 'SolverInstallPlan' to 'InstallPlan'. +-- Similar to 'elaboratedInstallPlan' +configureInstallPlan :: Cabal.ConfigFlags -> SolverInstallPlan -> InstallPlan +configureInstallPlan configFlags solverPlan = + flip fromSolverInstallPlan solverPlan $ \mapDep planpkg -> + [case planpkg of + SolverInstallPlan.PreExisting pkg -> + PreExisting (instSolverPkgIPI pkg) + + SolverInstallPlan.Configured pkg -> + Configured (configureSolverPackage mapDep pkg) + ] + where + configureSolverPackage :: (SolverId -> [PlanPackage]) + -> SolverPackage UnresolvedPkgLoc + -> ConfiguredPackage UnresolvedPkgLoc + configureSolverPackage mapDep spkg = + ConfiguredPackage { + confPkgId = Configure.computeComponentId + (Cabal.fromFlagOrDefault False + (Cabal.configDeterministic configFlags)) + Cabal.NoFlag + Cabal.NoFlag + (packageId spkg) + PD.CLibName + (Just (map confInstId (CD.libraryDeps deps), + solverPkgFlags spkg)), + confPkgSource = solverPkgSource spkg, + confPkgFlags = solverPkgFlags spkg, + confPkgStanzas = solverPkgStanzas spkg, + confPkgDeps = deps + -- NB: no support for executable dependencies + } + where + deps = fmap (concatMap (map configuredId . mapDep)) (solverPkgLibDeps spkg) + + +-- ------------------------------------------------------------ +-- * Primitives for traversing plans +-- ------------------------------------------------------------ + +-- $traversal +-- +-- Algorithms to traverse or execute an 'InstallPlan', especially in parallel, +-- may make use of the 'Processing' type and the associated operations +-- 'ready', 'completed' and 'failed'. +-- +-- The 'Processing' type is used to keep track of the state of a traversal and +-- includes the set of packages that are in the processing state, e.g. in the +-- process of being installed, plus those that have been completed and those +-- where processing failed. +-- +-- Traversal algorithms start with an 'InstallPlan': +-- +-- * Initially there will be certain packages that can be processed immediately +-- (since they are configured source packages and have all their dependencies +-- installed already). The function 'ready' returns these packages plus a +-- 'Processing' state that marks these same packages as being in the +-- processing state. +-- +-- * The algorithm must now arrange for these packages to be processed +-- (possibly in parallel). When a package has completed processing, the +-- algorithm needs to know which other packages (if any) are now ready to +-- process as a result. The 'completed' function marks a package as completed +-- and returns any packages that are newly in the processing state (ie ready +-- to process), along with the updated 'Processing' state. +-- +-- * If failure is possible then when processing a package fails, the algorithm +-- needs to know which other packages have also failed as a result. The +-- 'failed' function marks the given package as failed as well as all the +-- other packages that depend on the failed package. In addition it returns +-- the other failed packages. + + +-- | The 'Processing' type is used to keep track of the state of a traversal +-- and includes the set of packages that are in the processing state, e.g. in +-- the process of being installed, plus those that have been completed and +-- those where processing failed. +-- +data Processing = Processing !(Set UnitId) !(Set UnitId) !(Set UnitId) + -- processing, completed, failed + +-- | The packages in the plan that are initially ready to be installed. +-- That is they are in the configured state and have all their dependencies +-- installed already. +-- +-- The result is both the packages that are now ready to be installed and also +-- a 'Processing' state containing those same packages. The assumption is that +-- all the packages that are ready will now be processed and so we can consider +-- them to be in the processing state. +-- +ready :: (IsUnit ipkg, IsUnit srcpkg) + => GenericInstallPlan ipkg srcpkg + -> ([GenericReadyPackage srcpkg], Processing) +ready plan = + assert (processingInvariant plan processing) $ + (readyPackages, processing) + where + !processing = + Processing + (Set.fromList [ nodeKey pkg | pkg <- readyPackages ]) + (Set.fromList [ nodeKey pkg | pkg <- toList plan, isInstalled pkg ]) + Set.empty + readyPackages = + [ ReadyPackage pkg + | Configured pkg <- toList plan + , all isInstalled (directDeps plan (nodeKey pkg)) + ] + +isInstalled :: GenericPlanPackage a b -> Bool +isInstalled (PreExisting {}) = True +isInstalled (Installed {}) = True +isInstalled _ = False + +-- | Given a package in the processing state, mark the package as completed +-- and return any packages that are newly in the processing state (ie ready to +-- process), along with the updated 'Processing' state. +-- +completed :: (IsUnit ipkg, IsUnit srcpkg) + => GenericInstallPlan ipkg srcpkg + -> Processing -> UnitId + -> ([GenericReadyPackage srcpkg], Processing) +completed plan (Processing processingSet completedSet failedSet) pkgid = + assert (pkgid `Set.member` processingSet) $ + assert (processingInvariant plan processing') $ + + ( map asReadyPackage newlyReady + , processing' ) + where + completedSet' = Set.insert pkgid completedSet + + -- each direct reverse dep where all direct deps are completed + newlyReady = [ dep + | dep <- revDirectDeps plan pkgid + , all ((`Set.member` completedSet') . nodeKey) + (directDeps plan (nodeKey dep)) + ] + + processingSet' = foldl' (flip Set.insert) + (Set.delete pkgid processingSet) + (map nodeKey newlyReady) + processing' = Processing processingSet' completedSet' failedSet + + asReadyPackage (Configured pkg) = ReadyPackage pkg + asReadyPackage _ = internalError "completed" "" + +failed :: (IsUnit ipkg, IsUnit srcpkg) + => GenericInstallPlan ipkg srcpkg + -> Processing -> UnitId + -> ([srcpkg], Processing) +failed plan (Processing processingSet completedSet failedSet) pkgid = + assert (pkgid `Set.member` processingSet) $ + assert (all (`Set.notMember` processingSet) (tail newlyFailedIds)) $ + assert (all (`Set.notMember` completedSet) (tail newlyFailedIds)) $ + -- but note that some newlyFailed may already be in the failed set + -- since one package can depend on two packages that both fail and + -- so would be in the rev-dep closure for both. + assert (processingInvariant plan processing') $ + + ( map asConfiguredPackage (tail newlyFailed) + , processing' ) + where + processingSet' = Set.delete pkgid processingSet + failedSet' = failedSet `Set.union` Set.fromList newlyFailedIds + newlyFailedIds = map nodeKey newlyFailed + newlyFailed = fromMaybe (internalError "failed" "package not in graph") + $ Graph.revClosure (planGraph plan) [pkgid] + processing' = Processing processingSet' completedSet failedSet' + + asConfiguredPackage (Configured pkg) = pkg + asConfiguredPackage _ = internalError "failed" "not in configured state" + +processingInvariant :: (IsUnit ipkg, IsUnit srcpkg) + => GenericInstallPlan ipkg srcpkg + -> Processing -> Bool +processingInvariant plan (Processing processingSet completedSet failedSet) = + + -- All the packages in the three sets are actually in the graph + assert (Foldable.all (flip Graph.member (planGraph plan)) processingSet) $ + assert (Foldable.all (flip Graph.member (planGraph plan)) completedSet) $ + assert (Foldable.all (flip Graph.member (planGraph plan)) failedSet) $ + + -- The processing, completed and failed sets are disjoint from each other + assert (noIntersection processingSet completedSet) $ + assert (noIntersection processingSet failedSet) $ + assert (noIntersection failedSet completedSet) $ + + -- Packages that depend on a package that's still processing cannot be + -- completed + assert (noIntersection (reverseClosure processingSet) completedSet) $ + + -- On the other hand, packages that depend on a package that's still + -- processing /can/ have failed (since they may have depended on multiple + -- packages that were processing, but it only takes one to fail to cause + -- knock-on failures) so it is quite possible to have an + -- intersection (reverseClosure processingSet) failedSet + + -- The failed set is upwards closed, i.e. equal to its own rev dep closure + assert (failedSet == reverseClosure failedSet) $ + + -- All immediate reverse deps of packges that are currently processing + -- are not currently being processed (ie not in the processing set). + assert (and [ rdeppkgid `Set.notMember` processingSet + | pkgid <- Set.toList processingSet + , rdeppkgid <- maybe (internalError "processingInvariant" "") + (map nodeKey) + (Graph.revNeighbors (planGraph plan) pkgid) + ]) $ + + -- Packages from the processing or failed sets are only ever in the + -- configured state. + assert (and [ case Graph.lookup pkgid (planGraph plan) of + Just (Configured _) -> True + Just (PreExisting _) -> False + Just (Installed _) -> False + Nothing -> False + | pkgid <- Set.toList processingSet ++ Set.toList failedSet ]) + + -- We use asserts rather than returning False so that on failure we get + -- better details on which bit of the invariant was violated. + True + where + reverseClosure = Set.fromList + . map nodeKey + . fromMaybe (internalError "processingInvariant" "") + . Graph.revClosure (planGraph plan) + . Set.toList + noIntersection a b = Set.null (Set.intersection a b) + + +-- ------------------------------------------------------------ +-- * Traversing plans +-- ------------------------------------------------------------ + +-- | Flatten an 'InstallPlan', producing the sequence of source packages in +-- the order in which they would be processed when the plan is executed. This +-- can be used for simultations or presenting execution dry-runs. +-- +-- It is guaranteed to give the same order as using 'execute' (with a serial +-- in-order 'JobControl'), which is a reverse topological orderings of the +-- source packages in the dependency graph, albeit not necessarily exactly the +-- same ordering as that produced by 'reverseTopologicalOrder'. +-- +executionOrder :: (IsUnit ipkg, IsUnit srcpkg) + => GenericInstallPlan ipkg srcpkg + -> [GenericReadyPackage srcpkg] +executionOrder plan = + let (newpkgs, processing) = ready plan + in tryNewTasks processing newpkgs + where + tryNewTasks _processing [] = [] + tryNewTasks processing (p:todo) = waitForTasks processing p todo + + waitForTasks processing p todo = + p : tryNewTasks processing' (todo++nextpkgs) + where + (nextpkgs, processing') = completed plan processing (nodeKey p) + + +-- ------------------------------------------------------------ +-- * Executing plans +-- ------------------------------------------------------------ + +-- | The set of results we get from executing an install plan. +-- +type BuildOutcomes failure result = Map UnitId (Either failure result) + +-- | Lookup the build result for a single package. +-- +lookupBuildOutcome :: HasUnitId pkg + => pkg -> BuildOutcomes failure result + -> Maybe (Either failure result) +lookupBuildOutcome = Map.lookup . installedUnitId + +-- | Execute an install plan. This traverses the plan in dependency order. +-- +-- Executing each individual package can fail and if so all dependents fail +-- too. The result for each package is collected as a 'BuildOutcomes' map. +-- +-- Visiting each package happens with optional parallelism, as determined by +-- the 'JobControl'. By default, after any failure we stop as soon as possible +-- (using the 'JobControl' to try to cancel in-progress tasks). This behaviour +-- can be reversed to keep going and build as many packages as possible. +-- +-- Note that the 'BuildOutcomes' is /not/ guaranteed to cover all the packages +-- in the plan. In particular in the default mode where we stop as soon as +-- possible after a failure then there may be packages which are skipped and +-- these will have no 'BuildOutcome'. +-- +execute :: forall m ipkg srcpkg result failure. + (IsUnit ipkg, IsUnit srcpkg, + Monad m) + => JobControl m (UnitId, Either failure result) + -> Bool -- ^ Keep going after failure + -> (srcpkg -> failure) -- ^ Value for dependents of failed packages + -> GenericInstallPlan ipkg srcpkg + -> (GenericReadyPackage srcpkg -> m (Either failure result)) + -> m (BuildOutcomes failure result) +execute jobCtl keepGoing depFailure plan installPkg = + let (newpkgs, processing) = ready plan + in tryNewTasks Map.empty False False processing newpkgs + where + tryNewTasks :: BuildOutcomes failure result + -> Bool -> Bool -> Processing + -> [GenericReadyPackage srcpkg] + -> m (BuildOutcomes failure result) + + tryNewTasks !results tasksFailed tasksRemaining !processing newpkgs + -- we were in the process of cancelling and now we're finished + | tasksFailed && not keepGoing && not tasksRemaining + = return results + + -- we are still in the process of cancelling, wait for remaining tasks + | tasksFailed && not keepGoing && tasksRemaining + = waitForTasks results tasksFailed processing + + -- no new tasks to do and all tasks are done so we're finished + | null newpkgs && not tasksRemaining + = return results + + -- no new tasks to do, remaining tasks to wait for + | null newpkgs + = waitForTasks results tasksFailed processing + + -- new tasks to do, spawn them, then wait for tasks to complete + | otherwise + = do sequence_ [ spawnJob jobCtl $ do + result <- installPkg pkg + return (nodeKey pkg, result) + | pkg <- newpkgs ] + waitForTasks results tasksFailed processing + + waitForTasks :: BuildOutcomes failure result + -> Bool -> Processing + -> m (BuildOutcomes failure result) + waitForTasks !results tasksFailed !processing = do + (pkgid, result) <- collectJob jobCtl + + case result of + + Right _success -> do + tasksRemaining <- remainingJobs jobCtl + tryNewTasks results' tasksFailed tasksRemaining + processing' nextpkgs + where + results' = Map.insert pkgid result results + (nextpkgs, processing') = completed plan processing pkgid + + Left _failure -> do + -- if this is the first failure and we're not trying to keep going + -- then try to cancel as many of the remaining jobs as possible + when (not tasksFailed && not keepGoing) $ + cancelJobs jobCtl + + tasksRemaining <- remainingJobs jobCtl + tryNewTasks results' True tasksRemaining processing' [] + where + (depsfailed, processing') = failed plan processing pkgid + results' = Map.insert pkgid result results `Map.union` depResults + depResults = Map.fromList + [ (nodeKey deppkg, Left (depFailure deppkg)) + | deppkg <- depsfailed ] + +-- ------------------------------------------------------------ +-- * Checking validity of plans +-- ------------------------------------------------------------ + +-- | A valid installation plan is a set of packages that is closed, acyclic +-- and respects the package state relation. +-- +-- * if the result is @False@ use 'problems' to get a detailed list. +-- +valid :: (IsUnit ipkg, IsUnit srcpkg) + => String -> Graph (GenericPlanPackage ipkg srcpkg) -> Bool +valid loc graph = + case problems graph of + [] -> True + ps -> internalError loc ('\n' : unlines (map showPlanProblem ps)) + +data PlanProblem ipkg srcpkg = + PackageMissingDeps (GenericPlanPackage ipkg srcpkg) [UnitId] + | PackageCycle [GenericPlanPackage ipkg srcpkg] + | PackageStateInvalid (GenericPlanPackage ipkg srcpkg) + (GenericPlanPackage ipkg srcpkg) + +showPlanProblem :: (IsUnit ipkg, IsUnit srcpkg) + => PlanProblem ipkg srcpkg -> String +showPlanProblem (PackageMissingDeps pkg missingDeps) = + "Package " ++ display (nodeKey pkg) + ++ " depends on the following packages which are missing from the plan: " + ++ intercalate ", " (map display missingDeps) + +showPlanProblem (PackageCycle cycleGroup) = + "The following packages are involved in a dependency cycle " + ++ intercalate ", " (map (display . nodeKey) cycleGroup) +showPlanProblem (PackageStateInvalid pkg pkg') = + "Package " ++ display (nodeKey pkg) + ++ " is in the " ++ showPlanPackageTag pkg + ++ " state but it depends on package " ++ display (nodeKey pkg') + ++ " which is in the " ++ showPlanPackageTag pkg' + ++ " state" + +-- | For an invalid plan, produce a detailed list of problems as human readable +-- error messages. This is mainly intended for debugging purposes. +-- Use 'showPlanProblem' for a human readable explanation. +-- +problems :: (IsUnit ipkg, IsUnit srcpkg) + => Graph (GenericPlanPackage ipkg srcpkg) + -> [PlanProblem ipkg srcpkg] +problems graph = + + [ PackageMissingDeps pkg + (mapMaybe + (fmap nodeKey . flip Graph.lookup graph) + missingDeps) + | (pkg, missingDeps) <- Graph.broken graph ] + + ++ [ PackageCycle cycleGroup + | cycleGroup <- Graph.cycles graph ] +{- + ++ [ PackageInconsistency name inconsistencies + | (name, inconsistencies) <- + dependencyInconsistencies indepGoals graph ] + --TODO: consider re-enabling this one, see SolverInstallPlan +-} + ++ [ PackageStateInvalid pkg pkg' + | pkg <- Graph.toList graph + , Just pkg' <- map (flip Graph.lookup graph) + (nodeNeighbors pkg) + , not (stateDependencyRelation pkg pkg') ] + +-- | The states of packages have that depend on each other must respect +-- this relation. That is for very case where package @a@ depends on +-- package @b@ we require that @stateDependencyRelation a b = True@. +-- +stateDependencyRelation :: GenericPlanPackage ipkg srcpkg + -> GenericPlanPackage ipkg srcpkg -> Bool +stateDependencyRelation PreExisting{} PreExisting{} = True + +stateDependencyRelation Installed{} PreExisting{} = True +stateDependencyRelation Installed{} Installed{} = True + +stateDependencyRelation Configured{} PreExisting{} = True +stateDependencyRelation Configured{} Installed{} = True +stateDependencyRelation Configured{} Configured{} = True + +stateDependencyRelation _ _ = False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/InstallSymlink.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/InstallSymlink.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/InstallSymlink.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/InstallSymlink.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,280 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.InstallSymlink +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Managing installing binaries with symlinks. +----------------------------------------------------------------------------- +module Distribution.Client.InstallSymlink ( + OverwritePolicy(..), + symlinkBinaries, + symlinkBinary, + ) where + +#ifdef mingw32_HOST_OS + +import Distribution.Package (PackageIdentifier) +import Distribution.Types.UnqualComponentName +import Distribution.Client.InstallPlan (InstallPlan) +import Distribution.Client.Types (BuildOutcomes) +import Distribution.Client.Setup (InstallFlags) +import Distribution.Simple.Setup (ConfigFlags) +import Distribution.Simple.Compiler +import Distribution.System + +data OverwritePolicy = NeverOverwrite | AlwaysOverwrite + deriving (Show, Eq) + +symlinkBinaries :: Platform -> Compiler + -> OverwritePolicy + -> ConfigFlags + -> InstallFlags + -> InstallPlan + -> BuildOutcomes + -> IO [(PackageIdentifier, UnqualComponentName, FilePath)] +symlinkBinaries _ _ _ _ _ _ _ = return [] + +symlinkBinary :: OverwritePolicy + -> FilePath -> FilePath -> UnqualComponentName -> String + -> IO Bool +symlinkBinary _ _ _ _ _ = fail "Symlinking feature not available on Windows" + +#else + +import Distribution.Client.Types + ( ConfiguredPackage(..), BuildOutcomes ) +import Distribution.Client.Setup + ( InstallFlags(installSymlinkBinDir) ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.InstallPlan (InstallPlan) + +import Distribution.Solver.Types.SourcePackage +import Distribution.Solver.Types.OptionalStanza + +import Distribution.Package + ( PackageIdentifier, Package(packageId), UnitId, installedUnitId ) +import Distribution.Types.UnqualComponentName +import Distribution.Compiler + ( CompilerId(..) ) +import qualified Distribution.PackageDescription as PackageDescription +import Distribution.PackageDescription + ( PackageDescription ) +import Distribution.PackageDescription.Configuration + ( finalizePD ) +import Distribution.Simple.Setup + ( ConfigFlags(..), fromFlag, fromFlagOrDefault, flagToMaybe ) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Simple.Compiler + ( Compiler, compilerInfo, CompilerInfo(..) ) +import Distribution.System + ( Platform ) +import Distribution.Text + ( display ) + +import System.Posix.Files + ( getSymbolicLinkStatus, isSymbolicLink, createSymbolicLink + , removeLink ) +import System.Directory + ( canonicalizePath ) +import System.FilePath + ( (), splitPath, joinPath, isAbsolute ) + +import Prelude hiding (ioError) +import System.IO.Error + ( isDoesNotExistError, ioError ) +import Distribution.Compat.Exception ( catchIO ) +import Control.Exception + ( assert ) +import Data.Maybe + ( catMaybes ) + +data OverwritePolicy = NeverOverwrite | AlwaysOverwrite + deriving (Show, Eq) + +-- | We would like by default to install binaries into some location that is on +-- the user's PATH. For per-user installations on Unix systems that basically +-- means the @~/bin/@ directory. On the majority of platforms the @~/bin/@ +-- directory will be on the user's PATH. However some people are a bit nervous +-- about letting a package manager install programs into @~/bin/@. +-- +-- A compromise solution is that instead of installing binaries directly into +-- @~/bin/@, we could install them in a private location under @~/.cabal/bin@ +-- and then create symlinks in @~/bin/@. We can be careful when setting up the +-- symlinks that we do not overwrite any binary that the user installed. We can +-- check if it was a symlink we made because it would point to the private dir +-- where we install our binaries. This means we can install normally without +-- worrying and in a later phase set up symlinks, and if that fails then we +-- report it to the user, but even in this case the package is still in an OK +-- installed state. +-- +-- This is an optional feature that users can choose to use or not. It is +-- controlled from the config file. Of course it only works on POSIX systems +-- with symlinks so is not available to Windows users. +-- +symlinkBinaries :: Platform -> Compiler + -> OverwritePolicy + -> ConfigFlags + -> InstallFlags + -> InstallPlan + -> BuildOutcomes + -> IO [(PackageIdentifier, UnqualComponentName, FilePath)] +symlinkBinaries platform comp overwritePolicy + configFlags installFlags + plan buildOutcomes = + case flagToMaybe (installSymlinkBinDir installFlags) of + Nothing -> return [] + Just symlinkBinDir + | null exes -> return [] + | otherwise -> do + publicBinDir <- canonicalizePath symlinkBinDir +-- TODO: do we want to do this here? : +-- createDirectoryIfMissing True publicBinDir + fmap catMaybes $ sequence + [ do privateBinDir <- pkgBinDir pkg ipid + ok <- symlinkBinary + overwritePolicy + publicBinDir privateBinDir + publicExeName privateExeName + if ok + then return Nothing + else return (Just (pkgid, publicExeName, + privateBinDir privateExeName)) + | (rpkg, pkg, exe) <- exes + , let pkgid = packageId pkg + -- This is a bit dodgy; probably won't work for Backpack packages + ipid = installedUnitId rpkg + publicExeName = PackageDescription.exeName exe + privateExeName = prefix ++ unUnqualComponentName publicExeName ++ suffix + prefix = substTemplate pkgid ipid prefixTemplate + suffix = substTemplate pkgid ipid suffixTemplate ] + where + exes = + [ (cpkg, pkg, exe) + | InstallPlan.Configured cpkg <- InstallPlan.toList plan + , case InstallPlan.lookupBuildOutcome cpkg buildOutcomes of + Just (Right _success) -> True + _ -> False + , let pkg :: PackageDescription + pkg = pkgDescription cpkg + , exe <- PackageDescription.executables pkg + , PackageDescription.buildable (PackageDescription.buildInfo exe) ] + + pkgDescription (ConfiguredPackage _ (SourcePackage _ pkg _ _) + flags stanzas _) = + case finalizePD flags (enableStanzas stanzas) + (const True) + platform cinfo [] pkg of + Left _ -> error "finalizePD ReadyPackage failed" + Right (desc, _) -> desc + + -- This is sadly rather complicated. We're kind of re-doing part of the + -- configuration for the package. :-( + pkgBinDir :: PackageDescription -> UnitId -> IO FilePath + pkgBinDir pkg ipid = do + defaultDirs <- InstallDirs.defaultInstallDirs + compilerFlavor + (fromFlag (configUserInstall configFlags)) + (PackageDescription.hasLibs pkg) + let templateDirs = InstallDirs.combineInstallDirs fromFlagOrDefault + defaultDirs (configInstallDirs configFlags) + absoluteDirs = InstallDirs.absoluteInstallDirs + (packageId pkg) ipid + cinfo InstallDirs.NoCopyDest + platform templateDirs + canonicalizePath (InstallDirs.bindir absoluteDirs) + + substTemplate pkgid ipid = InstallDirs.fromPathTemplate + . InstallDirs.substPathTemplate env + where env = InstallDirs.initialPathTemplateEnv pkgid ipid + cinfo platform + + fromFlagTemplate = fromFlagOrDefault (InstallDirs.toPathTemplate "") + prefixTemplate = fromFlagTemplate (configProgPrefix configFlags) + suffixTemplate = fromFlagTemplate (configProgSuffix configFlags) + cinfo = compilerInfo comp + (CompilerId compilerFlavor _) = compilerInfoId cinfo + +symlinkBinary :: + OverwritePolicy -- ^ Whether to force overwrite an existing file + -> FilePath -- ^ The canonical path of the public bin dir eg + -- @/home/user/bin@ + -> FilePath -- ^ The canonical path of the private bin dir eg + -- @/home/user/.cabal/bin@ + -> UnqualComponentName -- ^ The name of the executable to go in the public bin + -- dir, eg @foo@ + -> String -- ^ The name of the executable to in the private bin + -- dir, eg @foo-1.0@ + -> IO Bool -- ^ If creating the symlink was successful. @False@ if + -- there was another file there already that we did + -- not own. Other errors like permission errors just + -- propagate as exceptions. +symlinkBinary overwritePolicy publicBindir privateBindir publicName privateName = do + ok <- targetOkToOverwrite (publicBindir publicName') + (privateBindir privateName) + case ok of + NotExists -> mkLink >> return True + OkToOverwrite -> rmLink >> mkLink >> return True + NotOurFile -> + case overwritePolicy of + NeverOverwrite -> return False + AlwaysOverwrite -> rmLink >> mkLink >> return True + where + publicName' = display publicName + relativeBindir = makeRelative publicBindir privateBindir + mkLink = createSymbolicLink (relativeBindir privateName) + (publicBindir publicName') + rmLink = removeLink (publicBindir publicName') + +-- | Check a file path of a symlink that we would like to create to see if it +-- is OK. For it to be OK to overwrite it must either not already exist yet or +-- be a symlink to our target (in which case we can assume ownership). +-- +targetOkToOverwrite :: FilePath -- ^ The file path of the symlink to the private + -- binary that we would like to create + -> FilePath -- ^ The canonical path of the private binary. + -- Use 'canonicalizePath' to make this. + -> IO SymlinkStatus +targetOkToOverwrite symlink target = handleNotExist $ do + status <- getSymbolicLinkStatus symlink + if not (isSymbolicLink status) + then return NotOurFile + else do target' <- canonicalizePath symlink + -- This relies on canonicalizePath handling symlinks + if target == target' + then return OkToOverwrite + else return NotOurFile + + where + handleNotExist action = catchIO action $ \ioexception -> + -- If the target doesn't exist then there's no problem overwriting it! + if isDoesNotExistError ioexception + then return NotExists + else ioError ioexception + +data SymlinkStatus + = NotExists -- ^ The file doesn't exist so we can make a symlink. + | OkToOverwrite -- ^ A symlink already exists, though it is ours. We'll + -- have to delete it first before we make a new symlink. + | NotOurFile -- ^ A file already exists and it is not one of our existing + -- symlinks (either because it is not a symlink or because + -- it points somewhere other than our managed space). + deriving Show + +-- | Take two canonical paths and produce a relative path to get from the first +-- to the second, even if it means adding @..@ path components. +-- +makeRelative :: FilePath -> FilePath -> FilePath +makeRelative a b = assert (isAbsolute a && isAbsolute b) $ + let as = splitPath a + bs = splitPath b + commonLen = length $ takeWhile id $ zipWith (==) as bs + in joinPath $ [ ".." | _ <- drop commonLen as ] + ++ drop commonLen bs + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/JobControl.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/JobControl.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/JobControl.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/JobControl.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,174 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.JobControl +-- Copyright : (c) Duncan Coutts 2012 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- A job control concurrency abstraction +----------------------------------------------------------------------------- +module Distribution.Client.JobControl ( + JobControl, + newSerialJobControl, + newParallelJobControl, + spawnJob, + collectJob, + remainingJobs, + cancelJobs, + + JobLimit, + newJobLimit, + withJobLimit, + + Lock, + newLock, + criticalSection + ) where + +import Control.Monad +import Control.Concurrent (forkIO) +import Control.Concurrent.MVar +import Control.Concurrent.STM (STM, atomically) +import Control.Concurrent.STM.TVar +import Control.Concurrent.STM.TChan +import Control.Exception (SomeException, bracket_, throwIO, try) +import Distribution.Client.Compat.Semaphore + + +-- | A simple concurrency abstraction. Jobs can be spawned and can complete +-- in any order. This allows both serial and parallel implementations. +-- +data JobControl m a = JobControl { + -- | Add a new job to the pool of jobs + spawnJob :: m a -> m (), + + -- | Wait until one job is complete + collectJob :: m a, + + -- | Returns True if there are any outstanding jobs + -- (ie spawned but yet to be collected) + remainingJobs :: m Bool, + + -- | Try to cancel any outstanding but not-yet-started jobs. + -- Call 'remainingJobs' after this to find out if any jobs are left + -- (ie could not be cancelled). + cancelJobs :: m () + } + + +-- | Make a 'JobControl' that executes all jobs serially and in order. +-- It only executes jobs on demand when they are collected, not eagerly. +-- +-- Cancelling will cancel /all/ jobs that have not been collected yet. +-- +newSerialJobControl :: IO (JobControl IO a) +newSerialJobControl = do + qVar <- newTChanIO + return JobControl { + spawnJob = spawn qVar, + collectJob = collect qVar, + remainingJobs = remaining qVar, + cancelJobs = cancel qVar + } + where + spawn :: TChan (IO a) -> IO a -> IO () + spawn qVar job = atomically $ writeTChan qVar job + + collect :: TChan (IO a) -> IO a + collect qVar = + join $ atomically $ readTChan qVar + + remaining :: TChan (IO a) -> IO Bool + remaining qVar = fmap not $ atomically $ isEmptyTChan qVar + + cancel :: TChan (IO a) -> IO () + cancel qVar = do + _ <- atomically $ readAllTChan qVar + return () + +-- | Make a 'JobControl' that eagerly executes jobs in parallel, with a given +-- maximum degree of parallelism. +-- +-- Cancelling will cancel jobs that have not yet begun executing, but jobs +-- that have already been executed or are currently executing cannot be +-- cancelled. +-- +newParallelJobControl :: Int -> IO (JobControl IO a) +newParallelJobControl n | n < 1 || n > 1000 = + error $ "newParallelJobControl: not a sensible number of jobs: " ++ show n +newParallelJobControl maxJobLimit = do + inqVar <- newTChanIO + outqVar <- newTChanIO + countVar <- newTVarIO 0 + replicateM_ maxJobLimit $ + forkIO $ + worker inqVar outqVar + return JobControl { + spawnJob = spawn inqVar countVar, + collectJob = collect outqVar countVar, + remainingJobs = remaining countVar, + cancelJobs = cancel inqVar countVar + } + where + worker :: TChan (IO a) -> TChan (Either SomeException a) -> IO () + worker inqVar outqVar = + forever $ do + job <- atomically $ readTChan inqVar + res <- try job + atomically $ writeTChan outqVar res + + spawn :: TChan (IO a) -> TVar Int -> IO a -> IO () + spawn inqVar countVar job = + atomically $ do + modifyTVar' countVar (+1) + writeTChan inqVar job + + collect :: TChan (Either SomeException a) -> TVar Int -> IO a + collect outqVar countVar = do + res <- atomically $ do + modifyTVar' countVar (subtract 1) + readTChan outqVar + either throwIO return res + + remaining :: TVar Int -> IO Bool + remaining countVar = fmap (/=0) $ atomically $ readTVar countVar + + cancel :: TChan (IO a) -> TVar Int -> IO () + cancel inqVar countVar = + atomically $ do + xs <- readAllTChan inqVar + modifyTVar' countVar (subtract (length xs)) + +readAllTChan :: TChan a -> STM [a] +readAllTChan qvar = go [] + where + go xs = do + mx <- tryReadTChan qvar + case mx of + Nothing -> return (reverse xs) + Just x -> go (x:xs) + +------------------------- +-- Job limits and locks +-- + +data JobLimit = JobLimit QSem + +newJobLimit :: Int -> IO JobLimit +newJobLimit n = + fmap JobLimit (newQSem n) + +withJobLimit :: JobLimit -> IO a -> IO a +withJobLimit (JobLimit sem) = + bracket_ (waitQSem sem) (signalQSem sem) + +newtype Lock = Lock (MVar ()) + +newLock :: IO Lock +newLock = fmap Lock $ newMVar () + +criticalSection :: Lock -> IO a -> IO a +criticalSection (Lock lck) act = bracket_ (takeMVar lck) (putMVar lck ()) act diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/List.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/List.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/List.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/List.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,603 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.List +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2008-2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- +-- Search for and print information about packages +----------------------------------------------------------------------------- +module Distribution.Client.List ( + list, info + ) where + +import Distribution.Package + ( PackageName, Package(..), packageName + , packageVersion, UnitId ) +import Distribution.Types.Dependency +import Distribution.Types.UnqualComponentName +import Distribution.ModuleName (ModuleName) +import Distribution.License (License) +import qualified Distribution.InstalledPackageInfo as Installed +import qualified Distribution.PackageDescription as Source +import Distribution.PackageDescription + ( Flag(..), unFlagName ) +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) +import Distribution.Pretty (pretty) + +import Distribution.Simple.Compiler + ( Compiler, PackageDBStack ) +import Distribution.Simple.Program (ProgramDb) +import Distribution.Simple.Utils + ( equating, comparing, die', notice ) +import Distribution.Simple.Setup (fromFlag) +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import Distribution.Version + ( Version, mkVersion, versionNumbers, VersionRange, withinRange, anyVersion + , intersectVersionRanges, simplifyVersionRange ) +import Distribution.Verbosity (Verbosity) +import Distribution.Text + ( Text(disp), display ) + +import qualified Distribution.SPDX as SPDX + +import Distribution.Solver.Types.PackageConstraint +import qualified Distribution.Solver.Types.PackageIndex as PackageIndex +import Distribution.Solver.Types.SourcePackage + +import Distribution.Client.Types + ( SourcePackageDb(..), PackageSpecifier(..), UnresolvedSourcePackage ) +import Distribution.Client.Targets + ( UserTarget, resolveUserTargets ) +import Distribution.Client.Setup + ( GlobalFlags(..), ListFlags(..), InfoFlags(..) + , RepoContext(..) ) +import Distribution.Client.Utils + ( mergeBy, MergeResult(..) ) +import Distribution.Client.IndexUtils as IndexUtils + ( getSourcePackages, getInstalledPackages ) +import Distribution.Client.FetchUtils + ( isFetched ) + +import Data.List + ( sortBy, groupBy, sort, nub, intersperse, maximumBy, partition ) +import Data.Maybe + ( listToMaybe, fromJust, fromMaybe, isJust, maybeToList ) +import qualified Data.Map as Map +import Data.Tree as Tree +import Control.Monad + ( MonadPlus(mplus), join ) +import Control.Exception + ( assert ) +import Text.PrettyPrint as Disp +import System.Directory + ( doesDirectoryExist ) + + +-- | Return a list of packages matching given search strings. +getPkgList :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> ProgramDb + -> ListFlags + -> [String] + -> IO [PackageDisplayInfo] +getPkgList verbosity packageDBs repoCtxt comp progdb listFlags pats = do + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb + sourcePkgDb <- getSourcePackages verbosity repoCtxt + let sourcePkgIndex = packageIndex sourcePkgDb + prefs name = fromMaybe anyVersion + (Map.lookup name (packagePreferences sourcePkgDb)) + + pkgsInfo :: + [(PackageName, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage])] + pkgsInfo + -- gather info for all packages + | null pats = mergePackages + (InstalledPackageIndex.allPackages installedPkgIndex) + ( PackageIndex.allPackages sourcePkgIndex) + + -- gather info for packages matching search term + | otherwise = pkgsInfoMatching + + pkgsInfoMatching :: + [(PackageName, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage])] + pkgsInfoMatching = + let matchingInstalled = matchingPackages + InstalledPackageIndex.searchByNameSubstring + installedPkgIndex + matchingSource = matchingPackages + (\ idx n -> + concatMap snd + (PackageIndex.searchByNameSubstring idx n)) + sourcePkgIndex + in mergePackages matchingInstalled matchingSource + + matches :: [PackageDisplayInfo] + matches = [ mergePackageInfo pref + installedPkgs sourcePkgs selectedPkg False + | (pkgname, installedPkgs, sourcePkgs) <- pkgsInfo + , not onlyInstalled || not (null installedPkgs) + , let pref = prefs pkgname + selectedPkg = latestWithPref pref sourcePkgs ] + return matches + where + onlyInstalled = fromFlag (listInstalled listFlags) + matchingPackages search index = + [ pkg + | pat <- pats + , pkg <- search index pat ] + + +-- | Show information about packages. +list :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> ProgramDb + -> ListFlags + -> [String] + -> IO () +list verbosity packageDBs repos comp progdb listFlags pats = do + matches <- getPkgList verbosity packageDBs repos comp progdb listFlags pats + + if simpleOutput + then putStr $ unlines + [ display (pkgName pkg) ++ " " ++ display version + | pkg <- matches + , version <- if onlyInstalled + then installedVersions pkg + else nub . sort $ installedVersions pkg + ++ sourceVersions pkg ] + -- Note: this only works because for 'list', one cannot currently + -- specify any version constraints, so listing all installed + -- and source ones works. + else + if null matches + then notice verbosity "No matches found." + else putStr $ unlines (map showPackageSummaryInfo matches) + where + onlyInstalled = fromFlag (listInstalled listFlags) + simpleOutput = fromFlag (listSimpleOutput listFlags) + +info :: Verbosity + -> PackageDBStack + -> RepoContext + -> Compiler + -> ProgramDb + -> GlobalFlags + -> InfoFlags + -> [UserTarget] + -> IO () +info verbosity _ _ _ _ _ _ [] = + notice verbosity "No packages requested. Nothing to do." + +info verbosity packageDBs repoCtxt comp progdb + globalFlags _listFlags userTargets = do + + installedPkgIndex <- getInstalledPackages verbosity comp packageDBs progdb + sourcePkgDb <- getSourcePackages verbosity repoCtxt + let sourcePkgIndex = packageIndex sourcePkgDb + prefs name = fromMaybe anyVersion + (Map.lookup name (packagePreferences sourcePkgDb)) + + -- Users may specify names of packages that are only installed, not + -- just available source packages, so we must resolve targets using + -- the combination of installed and source packages. + let sourcePkgs' = PackageIndex.fromList + $ map packageId + (InstalledPackageIndex.allPackages installedPkgIndex) + ++ map packageId + (PackageIndex.allPackages sourcePkgIndex) + pkgSpecifiers <- resolveUserTargets verbosity repoCtxt + (fromFlag $ globalWorldFile globalFlags) + sourcePkgs' userTargets + + pkgsinfo <- sequence + [ do pkginfo <- either (die' verbosity) return $ + gatherPkgInfo prefs + installedPkgIndex sourcePkgIndex + pkgSpecifier + updateFileSystemPackageDetails pkginfo + | pkgSpecifier <- pkgSpecifiers ] + + putStr $ unlines (map showPackageDetailedInfo pkgsinfo) + + where + gatherPkgInfo :: (PackageName -> VersionRange) -> + InstalledPackageIndex -> + PackageIndex.PackageIndex UnresolvedSourcePackage -> + PackageSpecifier UnresolvedSourcePackage -> + Either String PackageDisplayInfo + gatherPkgInfo prefs installedPkgIndex sourcePkgIndex + (NamedPackage name props) + | null (selectedInstalledPkgs) && null (selectedSourcePkgs) + = Left $ "There is no available version of " ++ display name + ++ " that satisfies " + ++ display (simplifyVersionRange verConstraint) + + | otherwise + = Right $ mergePackageInfo pref installedPkgs + sourcePkgs selectedSourcePkg' + showPkgVersion + where + (pref, installedPkgs, sourcePkgs) = + sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex + + selectedInstalledPkgs = InstalledPackageIndex.lookupDependency + installedPkgIndex + (Dependency name verConstraint) + selectedSourcePkgs = PackageIndex.lookupDependency sourcePkgIndex + (Dependency name verConstraint) + selectedSourcePkg' = latestWithPref pref selectedSourcePkgs + + -- display a specific package version if the user + -- supplied a non-trivial version constraint + showPkgVersion = not (null verConstraints) + verConstraint = foldr intersectVersionRanges anyVersion verConstraints + verConstraints = [ vr | PackagePropertyVersion vr <- props ] + + gatherPkgInfo prefs installedPkgIndex sourcePkgIndex + (SpecificSourcePackage pkg) = + Right $ mergePackageInfo pref installedPkgs sourcePkgs + selectedPkg True + where + name = packageName pkg + selectedPkg = Just pkg + (pref, installedPkgs, sourcePkgs) = + sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex + +sourcePkgsInfo :: + (PackageName -> VersionRange) + -> PackageName + -> InstalledPackageIndex + -> PackageIndex.PackageIndex UnresolvedSourcePackage + -> (VersionRange, [Installed.InstalledPackageInfo], [UnresolvedSourcePackage]) +sourcePkgsInfo prefs name installedPkgIndex sourcePkgIndex = + (pref, installedPkgs, sourcePkgs) + where + pref = prefs name + installedPkgs = concatMap snd (InstalledPackageIndex.lookupPackageName + installedPkgIndex name) + sourcePkgs = PackageIndex.lookupPackageName sourcePkgIndex name + + +-- | The info that we can display for each package. It is information per +-- package name and covers all installed and available versions. +-- +data PackageDisplayInfo = PackageDisplayInfo { + pkgName :: PackageName, + selectedVersion :: Maybe Version, + selectedSourcePkg :: Maybe UnresolvedSourcePackage, + installedVersions :: [Version], + sourceVersions :: [Version], + preferredVersions :: VersionRange, + homepage :: String, + bugReports :: String, + sourceRepo :: String, + synopsis :: String, + description :: String, + category :: String, + license :: Either SPDX.License License, + author :: String, + maintainer :: String, + dependencies :: [ExtDependency], + flags :: [Flag], + hasLib :: Bool, + hasExe :: Bool, + executables :: [UnqualComponentName], + modules :: [ModuleName], + haddockHtml :: FilePath, + haveTarball :: Bool + } + +-- | Covers source dependencies and installed dependencies in +-- one type. +data ExtDependency = SourceDependency Dependency + | InstalledDependency UnitId + +showPackageSummaryInfo :: PackageDisplayInfo -> String +showPackageSummaryInfo pkginfo = + renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ + char '*' <+> disp (pkgName pkginfo) + $+$ + (nest 4 $ vcat [ + maybeShow (synopsis pkginfo) "Synopsis:" reflowParagraphs + , text "Default available version:" <+> + case selectedSourcePkg pkginfo of + Nothing -> text "[ Not available from any configured repository ]" + Just pkg -> disp (packageVersion pkg) + , text "Installed versions:" <+> + case installedVersions pkginfo of + [] | hasLib pkginfo -> text "[ Not installed ]" + | otherwise -> text "[ Unknown ]" + versions -> dispTopVersions 4 + (preferredVersions pkginfo) versions + , maybeShow (homepage pkginfo) "Homepage:" text + , text "License: " <+> either pretty pretty (license pkginfo) + ]) + $+$ text "" + where + maybeShow [] _ _ = empty + maybeShow l s f = text s <+> (f l) + +showPackageDetailedInfo :: PackageDisplayInfo -> String +showPackageDetailedInfo pkginfo = + renderStyle (style {lineLength = 80, ribbonsPerLine = 1}) $ + char '*' <+> disp (pkgName pkginfo) + Disp.<> maybe empty (\v -> char '-' Disp.<> disp v) (selectedVersion pkginfo) + <+> text (replicate (16 - length (display (pkgName pkginfo))) ' ') + Disp.<> parens pkgkind + $+$ + (nest 4 $ vcat [ + entry "Synopsis" synopsis hideIfNull reflowParagraphs + , entry "Versions available" sourceVersions + (altText null "[ Not available from server ]") + (dispTopVersions 9 (preferredVersions pkginfo)) + , entry "Versions installed" installedVersions + (altText null (if hasLib pkginfo then "[ Not installed ]" + else "[ Unknown ]")) + (dispTopVersions 4 (preferredVersions pkginfo)) + , entry "Homepage" homepage orNotSpecified text + , entry "Bug reports" bugReports orNotSpecified text + , entry "Description" description hideIfNull reflowParagraphs + , entry "Category" category hideIfNull text + , entry "License" license alwaysShow (either pretty pretty) + , entry "Author" author hideIfNull reflowLines + , entry "Maintainer" maintainer hideIfNull reflowLines + , entry "Source repo" sourceRepo orNotSpecified text + , entry "Executables" executables hideIfNull (commaSep disp) + , entry "Flags" flags hideIfNull (commaSep dispFlag) + , entry "Dependencies" dependencies hideIfNull (commaSep dispExtDep) + , entry "Documentation" haddockHtml showIfInstalled text + , entry "Cached" haveTarball alwaysShow dispYesNo + , if not (hasLib pkginfo) then empty else + text "Modules:" $+$ nest 4 (vcat (map disp . sort . modules $ pkginfo)) + ]) + $+$ text "" + where + entry fname field cond format = case cond (field pkginfo) of + Nothing -> label <+> format (field pkginfo) + Just Nothing -> empty + Just (Just other) -> label <+> text other + where + label = text fname Disp.<> char ':' Disp.<> padding + padding = text (replicate (13 - length fname ) ' ') + + normal = Nothing + hide = Just Nothing + replace msg = Just (Just msg) + + alwaysShow = const normal + hideIfNull v = if null v then hide else normal + showIfInstalled v + | not isInstalled = hide + | null v = replace "[ Not installed ]" + | otherwise = normal + altText nul msg v = if nul v then replace msg else normal + orNotSpecified = altText null "[ Not specified ]" + + commaSep f = Disp.fsep . Disp.punctuate (Disp.char ',') . map f + dispFlag = text . unFlagName . flagName + dispYesNo True = text "Yes" + dispYesNo False = text "No" + + dispExtDep (SourceDependency dep) = disp dep + dispExtDep (InstalledDependency dep) = disp dep + + isInstalled = not (null (installedVersions pkginfo)) + hasExes = length (executables pkginfo) >= 2 + --TODO: exclude non-buildable exes + pkgkind | hasLib pkginfo && hasExes = text "programs and library" + | hasLib pkginfo && hasExe pkginfo = text "program and library" + | hasLib pkginfo = text "library" + | hasExes = text "programs" + | hasExe pkginfo = text "program" + | otherwise = empty + + +reflowParagraphs :: String -> Doc +reflowParagraphs = + vcat + . intersperse (text "") -- re-insert blank lines + . map (fsep . map text . concatMap words) -- reflow paragraphs + . filter (/= [""]) + . groupBy (\x y -> "" `notElem` [x,y]) -- break on blank lines + . lines + +reflowLines :: String -> Doc +reflowLines = vcat . map text . lines + +-- | We get the 'PackageDisplayInfo' by combining the info for the installed +-- and available versions of a package. +-- +-- * We're building info about a various versions of a single named package so +-- the input package info records are all supposed to refer to the same +-- package name. +-- +mergePackageInfo :: VersionRange + -> [Installed.InstalledPackageInfo] + -> [UnresolvedSourcePackage] + -> Maybe UnresolvedSourcePackage + -> Bool + -> PackageDisplayInfo +mergePackageInfo versionPref installedPkgs sourcePkgs selectedPkg showVer = + assert (length installedPkgs + length sourcePkgs > 0) $ + PackageDisplayInfo { + pkgName = combine packageName source + packageName installed, + selectedVersion = if showVer then fmap packageVersion selectedPkg + else Nothing, + selectedSourcePkg = sourceSelected, + installedVersions = map packageVersion installedPkgs, + sourceVersions = map packageVersion sourcePkgs, + preferredVersions = versionPref, + + license = combine Source.licenseRaw source + Installed.license installed, + maintainer = combine Source.maintainer source + Installed.maintainer installed, + author = combine Source.author source + Installed.author installed, + homepage = combine Source.homepage source + Installed.homepage installed, + bugReports = maybe "" Source.bugReports source, + sourceRepo = fromMaybe "" . join + . fmap (uncons Nothing Source.repoLocation + . sortBy (comparing Source.repoKind) + . Source.sourceRepos) + $ source, + --TODO: installed package info is missing synopsis + synopsis = maybe "" Source.synopsis source, + description = combine Source.description source + Installed.description installed, + category = combine Source.category source + Installed.category installed, + flags = maybe [] Source.genPackageFlags sourceGeneric, + hasLib = isJust installed + || maybe False (isJust . Source.condLibrary) sourceGeneric, + hasExe = maybe False (not . null . Source.condExecutables) sourceGeneric, + executables = map fst (maybe [] Source.condExecutables sourceGeneric), + modules = combine (map Installed.exposedName . Installed.exposedModules) + installed + -- NB: only for the PUBLIC library + (concatMap getListOfExposedModules . maybeToList . Source.library) + source, + dependencies = + combine (map (SourceDependency . simplifyDependency) + . Source.allBuildDepends) source + (map InstalledDependency . Installed.depends) installed, + haddockHtml = fromMaybe "" . join + . fmap (listToMaybe . Installed.haddockHTMLs) + $ installed, + haveTarball = False + } + where + combine f x g y = fromJust (fmap f x `mplus` fmap g y) + installed :: Maybe Installed.InstalledPackageInfo + installed = latestWithPref versionPref installedPkgs + + getListOfExposedModules lib = Source.exposedModules lib + ++ map Source.moduleReexportName + (Source.reexportedModules lib) + + sourceSelected + | isJust selectedPkg = selectedPkg + | otherwise = latestWithPref versionPref sourcePkgs + sourceGeneric = fmap packageDescription sourceSelected + source = fmap flattenPackageDescription sourceGeneric + + uncons :: b -> (a -> b) -> [a] -> b + uncons z _ [] = z + uncons _ f (x:_) = f x + + +-- | Not all the info is pure. We have to check if the docs really are +-- installed, because the registered package info lies. Similarly we have to +-- check if the tarball has indeed been fetched. +-- +updateFileSystemPackageDetails :: PackageDisplayInfo -> IO PackageDisplayInfo +updateFileSystemPackageDetails pkginfo = do + fetched <- maybe (return False) (isFetched . packageSource) + (selectedSourcePkg pkginfo) + docsExist <- doesDirectoryExist (haddockHtml pkginfo) + return pkginfo { + haveTarball = fetched, + haddockHtml = if docsExist then haddockHtml pkginfo else "" + } + +latestWithPref :: Package pkg => VersionRange -> [pkg] -> Maybe pkg +latestWithPref _ [] = Nothing +latestWithPref pref pkgs = Just (maximumBy (comparing prefThenVersion) pkgs) + where + prefThenVersion pkg = let ver = packageVersion pkg + in (withinRange ver pref, ver) + + +-- | Rearrange installed and source packages into groups referring to the +-- same package by name. In the result pairs, the lists are guaranteed to not +-- both be empty. +-- +mergePackages :: [Installed.InstalledPackageInfo] + -> [UnresolvedSourcePackage] + -> [( PackageName + , [Installed.InstalledPackageInfo] + , [UnresolvedSourcePackage] )] +mergePackages installedPkgs sourcePkgs = + map collect + $ mergeBy (\i a -> fst i `compare` fst a) + (groupOn packageName installedPkgs) + (groupOn packageName sourcePkgs) + where + collect (OnlyInLeft (name,is) ) = (name, is, []) + collect ( InBoth (_,is) (name,as)) = (name, is, as) + collect (OnlyInRight (name,as)) = (name, [], as) + +groupOn :: Ord key => (a -> key) -> [a] -> [(key,[a])] +groupOn key = map (\xs -> (key (head xs), xs)) + . groupBy (equating key) + . sortBy (comparing key) + +dispTopVersions :: Int -> VersionRange -> [Version] -> Doc +dispTopVersions n pref vs = + (Disp.fsep . Disp.punctuate (Disp.char ',') + . map (\ver -> if ispref ver then disp ver else parens (disp ver)) + . sort . take n . interestingVersions ispref + $ vs) + <+> trailingMessage + + where + ispref ver = withinRange ver pref + extra = length vs - n + trailingMessage + | extra <= 0 = Disp.empty + | otherwise = Disp.parens $ Disp.text "and" + <+> Disp.int (length vs - n) + <+> if extra == 1 then Disp.text "other" + else Disp.text "others" + +-- | Reorder a bunch of versions to put the most interesting / significant +-- versions first. A preferred version range is taken into account. +-- +-- This may be used in a user interface to select a small number of versions +-- to present to the user, e.g. +-- +-- > let selectVersions = sort . take 5 . interestingVersions pref +-- +interestingVersions :: (Version -> Bool) -> [Version] -> [Version] +interestingVersions pref = + map (mkVersion . fst) . filter snd + . concat . Tree.levels + . swizzleTree + . reorderTree (\(Node (v,_) _) -> pref (mkVersion v)) + . reverseTree + . mkTree + . map versionNumbers + + where + swizzleTree = unfoldTree (spine []) + where + spine ts' (Node x []) = (x, ts') + spine ts' (Node x (t:ts)) = spine (Node x ts:ts') t + + reorderTree _ (Node x []) = Node x [] + reorderTree p (Node x ts) = Node x (ts' ++ ts'') + where + (ts',ts'') = partition p (map (reorderTree p) ts) + + reverseTree (Node x cs) = Node x (reverse (map reverseTree cs)) + + mkTree xs = unfoldTree step (False, [], xs) + where + step (node,ns,vs) = + ( (reverse ns, node) + , [ (any null vs', n:ns, filter (not . null) vs') + | (n, vs') <- groups vs ] + ) + groups = map (\g -> (head (head g), map tail g)) + . groupBy (equating head) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Manpage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Manpage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Manpage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Manpage.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,171 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Manpage +-- Copyright : (c) Maciek Makowski 2015 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Functions for building the manual page. + +module Distribution.Client.Manpage + ( -- * Manual page generation + manpage + ) where + +import Distribution.Simple.Command +import Distribution.Client.Setup (globalCommand) + +import Data.Char (toUpper) +import Data.List (intercalate) + +data FileInfo = FileInfo String String -- ^ path, description + +-- | A list of files that should be documented in the manual page. +files :: [FileInfo] +files = + [ (FileInfo "~/.cabal/config" "The defaults that can be overridden with command-line options.") + , (FileInfo "~/.cabal/world" "A list of all packages whose installation has been explicitly requested.") + ] + +-- | Produces a manual page with @troff@ markup. +manpage :: String -> [CommandSpec a] -> String +manpage pname commands = unlines $ + [ ".TH " ++ map toUpper pname ++ " 1" + , ".SH NAME" + , pname ++ " \\- a system for building and packaging Haskell libraries and programs" + , ".SH SYNOPSIS" + , ".B " ++ pname + , ".I command" + , ".RI < arguments |[ options ]>..." + , "" + , "Where the" + , ".I commands" + , "are" + , "" + ] ++ + concatMap (commandSynopsisLines pname) commands ++ + [ ".SH DESCRIPTION" + , "Cabal is the standard package system for Haskell software. It helps people to configure, " + , "build and install Haskell software and to distribute it easily to other users and developers." + , "" + , "The command line " ++ pname ++ " tool (also referred to as cabal-install) helps with " + , "installing existing packages and developing new packages. " + , "It can be used to work with local packages or to install packages from online package archives, " + , "including automatically installing dependencies. By default it is configured to use Hackage, " + , "which is Haskell's central package archive that contains thousands of libraries and applications " + , "in the Cabal package format." + , ".SH OPTIONS" + , "Global options:" + , "" + ] ++ + optionsLines (globalCommand []) ++ + [ ".SH COMMANDS" + ] ++ + concatMap (commandDetailsLines pname) commands ++ + [ ".SH FILES" + ] ++ + concatMap fileLines files ++ + [ ".SH BUGS" + , "To browse the list of known issues or report a new one please see " + , "https://github.com/haskell/cabal/labels/cabal-install." + ] + +commandSynopsisLines :: String -> CommandSpec action -> [String] +commandSynopsisLines pname (CommandSpec ui _ NormalCommand) = + [ ".B " ++ pname ++ " " ++ (commandName ui) + , ".R - " ++ commandSynopsis ui + , ".br" + ] +commandSynopsisLines _ (CommandSpec _ _ HiddenCommand) = [] + +commandDetailsLines :: String -> CommandSpec action -> [String] +commandDetailsLines pname (CommandSpec ui _ NormalCommand) = + [ ".B " ++ pname ++ " " ++ (commandName ui) + , "" + , commandUsage ui pname + , "" + ] ++ + optional commandDescription ++ + optional commandNotes ++ + [ "Flags:" + , ".RS" + ] ++ + optionsLines ui ++ + [ ".RE" + , "" + ] + where + optional field = + case field ui of + Just text -> [text pname, ""] + Nothing -> [] +commandDetailsLines _ (CommandSpec _ _ HiddenCommand) = [] + +optionsLines :: CommandUI flags -> [String] +optionsLines command = concatMap optionLines (concatMap optionDescr (commandOptions command ParseArgs)) + +data ArgumentRequired = Optional | Required +type OptionArg = (ArgumentRequired, ArgPlaceHolder) + +optionLines :: OptDescr flags -> [String] +optionLines (ReqArg description (optionChars, optionStrings) placeHolder _ _) = + argOptionLines description optionChars optionStrings (Required, placeHolder) +optionLines (OptArg description (optionChars, optionStrings) placeHolder _ _ _) = + argOptionLines description optionChars optionStrings (Optional, placeHolder) +optionLines (BoolOpt description (trueChars, trueStrings) (falseChars, falseStrings) _ _) = + optionLinesIfPresent trueChars trueStrings ++ + optionLinesIfPresent falseChars falseStrings ++ + optionDescriptionLines description +optionLines (ChoiceOpt options) = + concatMap choiceLines options + where + choiceLines (description, (optionChars, optionStrings), _, _) = + [ optionsLine optionChars optionStrings ] ++ + optionDescriptionLines description + +argOptionLines :: String -> [Char] -> [String] -> OptionArg -> [String] +argOptionLines description optionChars optionStrings arg = + [ optionsLine optionChars optionStrings + , optionArgLine arg + ] ++ + optionDescriptionLines description + +optionLinesIfPresent :: [Char] -> [String] -> [String] +optionLinesIfPresent optionChars optionStrings = + if null optionChars && null optionStrings then [] + else [ optionsLine optionChars optionStrings, ".br" ] + +optionDescriptionLines :: String -> [String] +optionDescriptionLines description = + [ ".RS" + , description + , ".RE" + , "" + ] + +optionsLine :: [Char] -> [String] -> String +optionsLine optionChars optionStrings = + intercalate ", " (shortOptions optionChars ++ longOptions optionStrings) + +shortOptions :: [Char] -> [String] +shortOptions = map (\c -> "\\-" ++ [c]) + +longOptions :: [String] -> [String] +longOptions = map (\s -> "\\-\\-" ++ s) + +optionArgLine :: OptionArg -> String +optionArgLine (Required, placeHolder) = ".I " ++ placeHolder +optionArgLine (Optional, placeHolder) = ".RI [ " ++ placeHolder ++ " ]" + +fileLines :: FileInfo -> [String] +fileLines (FileInfo path description) = + [ path + , ".RS" + , description + , ".RE" + , "" + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Nix.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Nix.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Nix.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Nix.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,202 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE ViewPatterns #-} + +module Distribution.Client.Nix + ( findNixExpr + , inNixShell + , nixInstantiate + , nixShell + , nixShellIfSandboxed + ) where + +import Distribution.Client.Compat.Prelude + +import Control.Exception (bracket, catch) +import System.Directory + ( canonicalizePath, createDirectoryIfMissing, doesDirectoryExist + , doesFileExist, removeDirectoryRecursive, removeFile ) +import System.Environment (getArgs, getExecutablePath) +import System.FilePath + ( (), replaceExtension, takeDirectory, takeFileName ) +import System.IO (IOMode(..), hClose, openFile) +import System.IO.Error (isDoesNotExistError) +import System.Process (showCommandForUser) + +import Distribution.Compat.Environment + ( lookupEnv, setEnv, unsetEnv ) + +import Distribution.Verbosity + +import Distribution.Simple.Program + ( Program(..), ProgramDb + , addKnownProgram, configureProgram, emptyProgramDb, getDbProgramOutput + , runDbProgram, simpleProgram ) +import Distribution.Simple.Setup (fromFlagOrDefault) +import Distribution.Simple.Utils (debug, existsAndIsMoreRecentThan) + +import Distribution.Client.Config (SavedConfig(..)) +import Distribution.Client.GlobalFlags (GlobalFlags(..)) +import Distribution.Client.Sandbox.Types (UseSandbox(..)) + + +configureOneProgram :: Verbosity -> Program -> IO ProgramDb +configureOneProgram verb prog = + configureProgram verb prog (addKnownProgram prog emptyProgramDb) + + +touchFile :: FilePath -> IO () +touchFile path = do + catch (removeFile path) (\e -> when (isDoesNotExistError e) (return ())) + createDirectoryIfMissing True (takeDirectory path) + openFile path WriteMode >>= hClose + + +findNixExpr :: GlobalFlags -> SavedConfig -> IO (Maybe FilePath) +findNixExpr globalFlags config = do + -- criteria for deciding to run nix-shell + let nixEnabled = + fromFlagOrDefault False + (globalNix (savedGlobalFlags config) <> globalNix globalFlags) + + if nixEnabled + then do + let exprPaths = [ "shell.nix", "default.nix" ] + filterM doesFileExist exprPaths >>= \case + [] -> return Nothing + (path : _) -> return (Just path) + else return Nothing + + +-- set IN_NIX_SHELL so that builtins.getEnv in Nix works as in nix-shell +inFakeNixShell :: IO a -> IO a +inFakeNixShell f = + bracket (fakeEnv "IN_NIX_SHELL" "1") (resetEnv "IN_NIX_SHELL") (\_ -> f) + where + fakeEnv var new = do + old <- lookupEnv var + setEnv var new + return old + resetEnv var = maybe (unsetEnv var) (setEnv var) + + +nixInstantiate + :: Verbosity + -> FilePath + -> Bool + -> GlobalFlags + -> SavedConfig + -> IO () +nixInstantiate verb dist force globalFlags config = + findNixExpr globalFlags config >>= \case + Nothing -> return () + Just shellNix -> do + alreadyInShell <- inNixShell + shellDrv <- drvPath dist shellNix + instantiated <- doesFileExist shellDrv + -- an extra timestamp file is necessary because the derivation lives in + -- the store so its mtime is always 1. + let timestamp = timestampPath dist shellNix + upToDate <- existsAndIsMoreRecentThan timestamp shellNix + + let ready = alreadyInShell || (instantiated && upToDate && not force) + unless ready $ do + + let prog = simpleProgram "nix-instantiate" + progdb <- configureOneProgram verb prog + + removeGCRoots verb dist + touchFile timestamp + + _ <- inFakeNixShell + (getDbProgramOutput verb prog progdb + [ "--add-root", shellDrv, "--indirect", shellNix ]) + return () + + +nixShell + :: Verbosity + -> FilePath + -> GlobalFlags + -> SavedConfig + -> IO () + -- ^ The action to perform inside a nix-shell. This is also the action + -- that will be performed immediately if Nix is disabled. + -> IO () +nixShell verb dist globalFlags config go = do + + alreadyInShell <- inNixShell + + if alreadyInShell + then go + else do + findNixExpr globalFlags config >>= \case + Nothing -> go + Just shellNix -> do + + let prog = simpleProgram "nix-shell" + progdb <- configureOneProgram verb prog + + cabal <- getExecutablePath + + -- alreadyInShell == True in child process + setEnv "CABAL_IN_NIX_SHELL" "1" + + -- Run cabal with the same arguments inside nix-shell. + -- When the child process reaches the top of nixShell, it will + -- detect that it is running inside the shell and fall back + -- automatically. + shellDrv <- drvPath dist shellNix + args <- getArgs + runDbProgram verb prog progdb + [ "--add-root", gcrootPath dist "result", "--indirect", shellDrv + , "--run", showCommandForUser cabal args + ] + + +drvPath :: FilePath -> FilePath -> IO FilePath +drvPath dist path = do + -- We do not actually care about canonicity, but makeAbsolute is only + -- available in newer versions of directory. + -- We expect the path to be a symlink if it exists, so we do not canonicalize + -- the entire path because that would dereference the symlink. + distNix <- canonicalizePath (dist "nix") + -- Nix garbage collector roots must be absolute paths + return (distNix replaceExtension (takeFileName path) "drv") + + +timestampPath :: FilePath -> FilePath -> FilePath +timestampPath dist path = + dist "nix" replaceExtension (takeFileName path) "drv.timestamp" + + +gcrootPath :: FilePath -> FilePath +gcrootPath dist = dist "nix" "gcroots" + + +inNixShell :: IO Bool +inNixShell = isJust <$> lookupEnv "CABAL_IN_NIX_SHELL" + + +removeGCRoots :: Verbosity -> FilePath -> IO () +removeGCRoots verb dist = do + let tgt = gcrootPath dist + exists <- doesDirectoryExist tgt + when exists $ do + debug verb ("removing Nix gcroots from " ++ tgt) + removeDirectoryRecursive tgt + + +nixShellIfSandboxed + :: Verbosity + -> FilePath + -> GlobalFlags + -> SavedConfig + -> UseSandbox + -> IO () + -- ^ The action to perform inside a nix-shell. This is also the action + -- that will be performed immediately if Nix is disabled. + -> IO () +nixShellIfSandboxed verb dist globalFlags config useSandbox go = + case useSandbox of + NoSandbox -> go + UseSandbox _ -> nixShell verb dist globalFlags config go diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Outdated.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Outdated.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Outdated.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Outdated.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,211 @@ +{-# LANGUAGE CPP #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Outdated +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Implementation of the 'outdated' command. Checks for outdated +-- dependencies in the package description file or freeze file. +----------------------------------------------------------------------------- + +module Distribution.Client.Outdated ( outdated + , ListOutdatedSettings(..), listOutdated ) +where + +import Prelude () +import Distribution.Client.Config +import Distribution.Client.IndexUtils as IndexUtils +import Distribution.Client.Compat.Prelude +import Distribution.Client.ProjectConfig +import Distribution.Client.DistDirLayout +import Distribution.Client.RebuildMonad +import Distribution.Client.Setup hiding (quiet) +import Distribution.Client.Targets +import Distribution.Client.Types +import Distribution.Solver.Types.PackageConstraint +import Distribution.Solver.Types.PackageIndex +import Distribution.Client.Sandbox.PackageEnvironment + +import Distribution.Package (PackageName, packageVersion) +import Distribution.PackageDescription (allBuildDepends) +import Distribution.PackageDescription.Configuration (finalizePD) +import Distribution.Simple.Compiler (Compiler, compilerInfo) +import Distribution.Simple.Setup + (fromFlagOrDefault, flagToMaybe) +import Distribution.Simple.Utils + (die', notice, debug, tryFindPackageDesc) +import Distribution.System (Platform) +import Distribution.Text (display) +import Distribution.Types.ComponentRequestedSpec + (ComponentRequestedSpec(..)) +import Distribution.Types.Dependency + (Dependency(..), depPkgName, simplifyDependency) +import Distribution.Verbosity (Verbosity, silent) +import Distribution.Version + (Version, LowerBound(..), UpperBound(..) + ,asVersionIntervals, majorBoundVersion) +import Distribution.PackageDescription.Parsec + (readGenericPackageDescription) + +import qualified Data.Set as S +import System.Directory (getCurrentDirectory) +import System.Exit (exitFailure) +import Control.Exception (throwIO) + +-- | Entry point for the 'outdated' command. +outdated :: Verbosity -> OutdatedFlags -> RepoContext + -> Compiler -> Platform + -> IO () +outdated verbosity0 outdatedFlags repoContext comp platform = do + let freezeFile = fromFlagOrDefault False (outdatedFreezeFile outdatedFlags) + newFreezeFile = fromFlagOrDefault False + (outdatedNewFreezeFile outdatedFlags) + mprojectFile = flagToMaybe + (outdatedProjectFile outdatedFlags) + simpleOutput = fromFlagOrDefault False + (outdatedSimpleOutput outdatedFlags) + quiet = fromFlagOrDefault False (outdatedQuiet outdatedFlags) + exitCode = fromFlagOrDefault quiet (outdatedExitCode outdatedFlags) + ignorePred = let ignoreSet = S.fromList (outdatedIgnore outdatedFlags) + in \pkgname -> pkgname `S.member` ignoreSet + minorPred = case outdatedMinor outdatedFlags of + Nothing -> const False + Just IgnoreMajorVersionBumpsNone -> const False + Just IgnoreMajorVersionBumpsAll -> const True + Just (IgnoreMajorVersionBumpsSome pkgs) -> + let minorSet = S.fromList pkgs + in \pkgname -> pkgname `S.member` minorSet + verbosity = if quiet then silent else verbosity0 + + when (not newFreezeFile && isJust mprojectFile) $ + die' verbosity $ + "--project-file must only be used with --new-freeze-file." + + sourcePkgDb <- IndexUtils.getSourcePackages verbosity repoContext + let pkgIndex = packageIndex sourcePkgDb + deps <- if freezeFile + then depsFromFreezeFile verbosity + else if newFreezeFile + then depsFromNewFreezeFile verbosity mprojectFile + else depsFromPkgDesc verbosity comp platform + debug verbosity $ "Dependencies loaded: " + ++ (intercalate ", " $ map display deps) + let outdatedDeps = listOutdated deps pkgIndex + (ListOutdatedSettings ignorePred minorPred) + when (not quiet) $ + showResult verbosity outdatedDeps simpleOutput + if (exitCode && (not . null $ outdatedDeps)) + then exitFailure + else return () + +-- | Print either the list of all outdated dependencies, or a message +-- that there are none. +showResult :: Verbosity -> [(Dependency,Version)] -> Bool -> IO () +showResult verbosity outdatedDeps simpleOutput = + if (not . null $ outdatedDeps) + then + do when (not simpleOutput) $ + notice verbosity "Outdated dependencies:" + for_ outdatedDeps $ \(d@(Dependency pn _), v) -> + let outdatedDep = if simpleOutput then display pn + else display d ++ " (latest: " ++ display v ++ ")" + in notice verbosity outdatedDep + else notice verbosity "All dependencies are up to date." + +-- | Convert a list of 'UserConstraint's to a 'Dependency' list. +userConstraintsToDependencies :: [UserConstraint] -> [Dependency] +userConstraintsToDependencies ucnstrs = + mapMaybe (packageConstraintToDependency . userToPackageConstraint) ucnstrs + +-- | Read the list of dependencies from the freeze file. +depsFromFreezeFile :: Verbosity -> IO [Dependency] +depsFromFreezeFile verbosity = do + cwd <- getCurrentDirectory + userConfig <- loadUserConfig verbosity cwd Nothing + let ucnstrs = map fst . configExConstraints . savedConfigureExFlags $ + userConfig + deps = userConstraintsToDependencies ucnstrs + debug verbosity "Reading the list of dependencies from the freeze file" + return deps + +-- | Read the list of dependencies from the new-style freeze file. +depsFromNewFreezeFile :: Verbosity -> Maybe FilePath -> IO [Dependency] +depsFromNewFreezeFile verbosity mprojectFile = do + projectRoot <- either throwIO return =<< + findProjectRoot Nothing mprojectFile + let distDirLayout = defaultDistDirLayout projectRoot + {- TODO: Support dist dir override -} Nothing + projectConfig <- runRebuild (distProjectRootDirectory distDirLayout) $ + readProjectLocalFreezeConfig verbosity distDirLayout + let ucnstrs = map fst . projectConfigConstraints . projectConfigShared + $ projectConfig + deps = userConstraintsToDependencies ucnstrs + debug verbosity $ + "Reading the list of dependencies from the new-style freeze file " ++ distProjectFile distDirLayout "freeze" + return deps + +-- | Read the list of dependencies from the package description. +depsFromPkgDesc :: Verbosity -> Compiler -> Platform -> IO [Dependency] +depsFromPkgDesc verbosity comp platform = do + cwd <- getCurrentDirectory + path <- tryFindPackageDesc cwd + gpd <- readGenericPackageDescription verbosity path + let cinfo = compilerInfo comp + epd = finalizePD mempty (ComponentRequestedSpec True True) + (const True) platform cinfo [] gpd + case epd of + Left _ -> die' verbosity "finalizePD failed" + Right (pd, _) -> do + let bd = allBuildDepends pd + debug verbosity + "Reading the list of dependencies from the package description" + return bd + +-- | Various knobs for customising the behaviour of 'listOutdated'. +data ListOutdatedSettings = ListOutdatedSettings { + -- | Should this package be ignored? + listOutdatedIgnorePred :: PackageName -> Bool, + -- | Should major version bumps should be ignored for this package? + listOutdatedMinorPred :: PackageName -> Bool + } + +-- | Find all outdated dependencies. +listOutdated :: [Dependency] + -> PackageIndex UnresolvedSourcePackage + -> ListOutdatedSettings + -> [(Dependency, Version)] +listOutdated deps pkgIndex (ListOutdatedSettings ignorePred minorPred) = + mapMaybe isOutdated $ map simplifyDependency deps + where + isOutdated :: Dependency -> Maybe (Dependency, Version) + isOutdated dep + | ignorePred (depPkgName dep) = Nothing + | otherwise = + let this = map packageVersion $ lookupDependency pkgIndex dep + latest = lookupLatest dep + in (\v -> (dep, v)) `fmap` isOutdated' this latest + + isOutdated' :: [Version] -> [Version] -> Maybe Version + isOutdated' [] _ = Nothing + isOutdated' _ [] = Nothing + isOutdated' this latest = + let this' = maximum this + latest' = maximum latest + in if this' < latest' then Just latest' else Nothing + + lookupLatest :: Dependency -> [Version] + lookupLatest dep + | minorPred (depPkgName dep) = + map packageVersion $ lookupDependency pkgIndex (relaxMinor dep) + | otherwise = + map packageVersion $ lookupPackageName pkgIndex (depPkgName dep) + + relaxMinor :: Dependency -> Dependency + relaxMinor (Dependency pn vr) = (Dependency pn vr') + where + vr' = let vis = asVersionIntervals vr + (LowerBound v0 _,upper) = last vis + in case upper of + NoUpperBound -> vr + UpperBound _v1 _ -> majorBoundVersion v0 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/PackageHash.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/PackageHash.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/PackageHash.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/PackageHash.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,385 @@ +{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} +{-# LANGUAGE DeriveDataTypeable, DeriveGeneric #-} + +-- | Functions to calculate nix-style hashes for package ids. +-- +-- The basic idea is simple, hash the combination of: +-- +-- * the package tarball +-- * the ids of all the direct dependencies +-- * other local configuration (flags, profiling, etc) +-- +module Distribution.Client.PackageHash ( + -- * Calculating package hashes + PackageHashInputs(..), + PackageHashConfigInputs(..), + PackageSourceHash, + hashedInstalledPackageId, + hashPackageHashInputs, + renderPackageHashInputs, + -- ** Platform-specific variations + hashedInstalledPackageIdLong, + hashedInstalledPackageIdShort, + + -- * Low level hash choice + HashValue, + hashValue, + showHashValue, + readFileHashValue, + hashFromTUF, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Package + ( PackageId, PackageIdentifier(..), mkComponentId + , PkgconfigName ) +import Distribution.System + ( Platform, OS(Windows, OSX), buildOS ) +import Distribution.PackageDescription + ( FlagAssignment, unFlagAssignment, showFlagValue ) +import Distribution.Simple.Compiler + ( CompilerId, OptimisationLevel(..), DebugInfoLevel(..) + , ProfDetailLevel(..), showProfDetailLevel ) +import Distribution.Simple.InstallDirs + ( PathTemplate, fromPathTemplate ) +import Distribution.Text + ( display ) +import Distribution.Version +import Distribution.Client.Types + ( InstalledPackageId ) +import qualified Distribution.Solver.Types.ComponentDeps as CD + +import qualified Hackage.Security.Client as Sec + +import qualified Crypto.Hash.SHA256 as SHA256 +import qualified Data.ByteString.Base16 as Base16 +import qualified Data.ByteString.Char8 as BS +import qualified Data.ByteString.Lazy.Char8 as LBS +import qualified Data.Map as Map +import qualified Data.Set as Set +import Data.Set (Set) + +import Data.Function (on) +import Control.Exception (evaluate) +import System.IO (withBinaryFile, IOMode(..)) + + +------------------------------- +-- Calculating package hashes +-- + +-- | Calculate a 'InstalledPackageId' for a package using our nix-style +-- inputs hashing method. +-- +-- Note that due to path length limitations on Windows, this function uses +-- a different method on Windows that produces shorted package ids. +-- See 'hashedInstalledPackageIdLong' vs 'hashedInstalledPackageIdShort'. +-- +hashedInstalledPackageId :: PackageHashInputs -> InstalledPackageId +hashedInstalledPackageId + | buildOS == Windows = hashedInstalledPackageIdShort + | buildOS == OSX = hashedInstalledPackageIdVeryShort + | otherwise = hashedInstalledPackageIdLong + +-- | Calculate a 'InstalledPackageId' for a package using our nix-style +-- inputs hashing method. +-- +-- This produces large ids with big hashes. It is only suitable for systems +-- without significant path length limitations (ie not Windows). +-- +hashedInstalledPackageIdLong :: PackageHashInputs -> InstalledPackageId +hashedInstalledPackageIdLong pkghashinputs@PackageHashInputs{pkgHashPkgId} = + mkComponentId $ + display pkgHashPkgId -- to be a bit user friendly + ++ "-" + ++ showHashValue (hashPackageHashInputs pkghashinputs) + +-- | On Windows we have serious problems with path lengths. Windows imposes a +-- maximum path length of 260 chars, and even if we can use the windows long +-- path APIs ourselves, we cannot guarantee that ghc, gcc, ld, ar, etc etc all +-- do so too. +-- +-- So our only choice is to limit the lengths of the paths, and the only real +-- way to do that is to limit the size of the 'InstalledPackageId's that we +-- generate. We do this by truncating the package names and versions and also +-- by truncating the hash sizes. +-- +-- Truncating the package names and versions is technically ok because they are +-- just included for human convenience, the full source package id is included +-- in the hash. +-- +-- Truncating the hash size is disappointing but also technically ok. We +-- rely on the hash primarily for collision avoidance not for any security +-- properties (at least for now). +-- +hashedInstalledPackageIdShort :: PackageHashInputs -> InstalledPackageId +hashedInstalledPackageIdShort pkghashinputs@PackageHashInputs{pkgHashPkgId} = + mkComponentId $ + intercalate "-" + -- max length now 64 + [ truncateStr 14 (display name) + , truncateStr 8 (display version) + , showHashValue (truncateHash (hashPackageHashInputs pkghashinputs)) + ] + where + PackageIdentifier name version = pkgHashPkgId + + -- Truncate a 32 byte SHA256 hash to 160bits, 20 bytes :-( + -- It'll render as 40 hex chars. + truncateHash (HashValue h) = HashValue (BS.take 20 h) + + -- Truncate a string, with a visual indication that it is truncated. + truncateStr n s | length s <= n = s + | otherwise = take (n-1) s ++ "_" + +-- | On macOS we shorten the name very aggressively. The mach-o linker on +-- macOS has a limited load command size, to which the name of the library +-- as well as its relative path (\@rpath) entry count. To circumvent this, +-- on macOS the libraries are not stored as +-- @store//libHS.dylib@ +-- where libraryname contains the libraries name, version and abi hash, but in +-- @store/lib/libHS.dylib@ +-- where the very short library name drops all vowels from the package name, +-- and truncates the hash to 4 bytes. +-- +-- We therefore we only need one \@rpath entry to @store/lib@ instead of one +-- \@rpath entry for each library. And the reduced library name saves some +-- additional space. +-- +-- This however has two major drawbacks: +-- 1) Packages can collide more easily due to the shortened hash. +-- 2) The libraries are *not* prefix relocatable anymore as they all end up +-- in the same @store/lib@ folder. +-- +-- The ultimate solution would have to include generating proxy dynamic +-- libraries on macOS, such that the proxy libraries and the linked libraries +-- stay under the load command limit, and the recursive linker is still able +-- to link all of them. +hashedInstalledPackageIdVeryShort :: PackageHashInputs -> InstalledPackageId +hashedInstalledPackageIdVeryShort pkghashinputs@PackageHashInputs{pkgHashPkgId} = + mkComponentId $ + intercalate "-" + [ filter (not . flip elem "aeiou") (display name) + , display version + , showHashValue (truncateHash (hashPackageHashInputs pkghashinputs)) + ] + where + PackageIdentifier name version = pkgHashPkgId + truncateHash (HashValue h) = HashValue (BS.take 4 h) + +-- | All the information that contribues to a package's hash, and thus its +-- 'InstalledPackageId'. +-- +data PackageHashInputs = PackageHashInputs { + pkgHashPkgId :: PackageId, + pkgHashComponent :: Maybe CD.Component, + pkgHashSourceHash :: PackageSourceHash, + pkgHashPkgConfigDeps :: Set (PkgconfigName, Maybe Version), + pkgHashDirectDeps :: Set InstalledPackageId, + pkgHashOtherConfig :: PackageHashConfigInputs + } + +type PackageSourceHash = HashValue + +-- | Those parts of the package configuration that contribute to the +-- package hash. +-- +data PackageHashConfigInputs = PackageHashConfigInputs { + pkgHashCompilerId :: CompilerId, + pkgHashPlatform :: Platform, + pkgHashFlagAssignment :: FlagAssignment, -- complete not partial + pkgHashConfigureScriptArgs :: [String], -- just ./configure for build-type Configure + pkgHashVanillaLib :: Bool, + pkgHashSharedLib :: Bool, + pkgHashDynExe :: Bool, + pkgHashGHCiLib :: Bool, + pkgHashProfLib :: Bool, + pkgHashProfExe :: Bool, + pkgHashProfLibDetail :: ProfDetailLevel, + pkgHashProfExeDetail :: ProfDetailLevel, + pkgHashCoverage :: Bool, + pkgHashOptimization :: OptimisationLevel, + pkgHashSplitObjs :: Bool, + pkgHashSplitSections :: Bool, + pkgHashStripLibs :: Bool, + pkgHashStripExes :: Bool, + pkgHashDebugInfo :: DebugInfoLevel, + pkgHashProgramArgs :: Map String [String], + pkgHashExtraLibDirs :: [FilePath], + pkgHashExtraFrameworkDirs :: [FilePath], + pkgHashExtraIncludeDirs :: [FilePath], + pkgHashProgPrefix :: Maybe PathTemplate, + pkgHashProgSuffix :: Maybe PathTemplate, + + -- Haddock options + pkgHashDocumentation :: Bool, + pkgHashHaddockHoogle :: Bool, + pkgHashHaddockHtml :: Bool, + pkgHashHaddockHtmlLocation :: Maybe String, + pkgHashHaddockForeignLibs :: Bool, + pkgHashHaddockExecutables :: Bool, + pkgHashHaddockTestSuites :: Bool, + pkgHashHaddockBenchmarks :: Bool, + pkgHashHaddockInternal :: Bool, + pkgHashHaddockCss :: Maybe FilePath, + pkgHashHaddockLinkedSource :: Bool, + pkgHashHaddockQuickJump :: Bool, + pkgHashHaddockContents :: Maybe PathTemplate + +-- TODO: [required eventually] pkgHashToolsVersions ? +-- TODO: [required eventually] pkgHashToolsExtraOptions ? + } + deriving Show + + +-- | Calculate the overall hash to be used for an 'InstalledPackageId'. +-- +hashPackageHashInputs :: PackageHashInputs -> HashValue +hashPackageHashInputs = hashValue . renderPackageHashInputs + +-- | Render a textual representation of the 'PackageHashInputs'. +-- +-- The 'hashValue' of this text is the overall package hash. +-- +renderPackageHashInputs :: PackageHashInputs -> LBS.ByteString +renderPackageHashInputs PackageHashInputs{ + pkgHashPkgId, + pkgHashComponent, + pkgHashSourceHash, + pkgHashDirectDeps, + pkgHashPkgConfigDeps, + pkgHashOtherConfig = + PackageHashConfigInputs{..} + } = + -- The purpose of this somewhat laboured rendering (e.g. why not just + -- use show?) is so that existing package hashes do not change + -- unnecessarily when new configuration inputs are added into the hash. + + -- In particular, the assumption is that when a new configuration input + -- is included into the hash, that existing packages will typically get + -- the default value for that feature. So if we avoid adding entries with + -- the default value then most of the time adding new features will not + -- change the hashes of existing packages and so fewer packages will need + -- to be rebuilt. + + --TODO: [nice to have] ultimately we probably want to put this config info + -- into the ghc-pkg db. At that point this should probably be changed to + -- use the config file infrastructure so it can be read back in again. + LBS.pack $ unlines $ catMaybes $ + [ entry "pkgid" display pkgHashPkgId + , mentry "component" show pkgHashComponent + , entry "src" showHashValue pkgHashSourceHash + , entry "pkg-config-deps" + (intercalate ", " . map (\(pn, mb_v) -> display pn ++ + case mb_v of + Nothing -> "" + Just v -> " " ++ display v) + . Set.toList) pkgHashPkgConfigDeps + , entry "deps" (intercalate ", " . map display + . Set.toList) pkgHashDirectDeps + -- and then all the config + , entry "compilerid" display pkgHashCompilerId + , entry "platform" display pkgHashPlatform + , opt "flags" mempty showFlagAssignment pkgHashFlagAssignment + , opt "configure-script" [] unwords pkgHashConfigureScriptArgs + , opt "vanilla-lib" True display pkgHashVanillaLib + , opt "shared-lib" False display pkgHashSharedLib + , opt "dynamic-exe" False display pkgHashDynExe + , opt "ghci-lib" False display pkgHashGHCiLib + , opt "prof-lib" False display pkgHashProfLib + , opt "prof-exe" False display pkgHashProfExe + , opt "prof-lib-detail" ProfDetailDefault showProfDetailLevel pkgHashProfLibDetail + , opt "prof-exe-detail" ProfDetailDefault showProfDetailLevel pkgHashProfExeDetail + , opt "hpc" False display pkgHashCoverage + , opt "optimisation" NormalOptimisation (show . fromEnum) pkgHashOptimization + , opt "split-objs" False display pkgHashSplitObjs + , opt "split-sections" False display pkgHashSplitSections + , opt "stripped-lib" False display pkgHashStripLibs + , opt "stripped-exe" True display pkgHashStripExes + , opt "debug-info" NormalDebugInfo (show . fromEnum) pkgHashDebugInfo + , opt "extra-lib-dirs" [] unwords pkgHashExtraLibDirs + , opt "extra-framework-dirs" [] unwords pkgHashExtraFrameworkDirs + , opt "extra-include-dirs" [] unwords pkgHashExtraIncludeDirs + , opt "prog-prefix" Nothing (maybe "" fromPathTemplate) pkgHashProgPrefix + , opt "prog-suffix" Nothing (maybe "" fromPathTemplate) pkgHashProgSuffix + + , opt "documentation" False display pkgHashDocumentation + , opt "haddock-hoogle" False display pkgHashHaddockHoogle + , opt "haddock-html" False display pkgHashHaddockHtml + , opt "haddock-html-location" Nothing (fromMaybe "") pkgHashHaddockHtmlLocation + , opt "haddock-foreign-libraries" False display pkgHashHaddockForeignLibs + , opt "haddock-executables" False display pkgHashHaddockExecutables + , opt "haddock-tests" False display pkgHashHaddockTestSuites + , opt "haddock-benchmarks" False display pkgHashHaddockBenchmarks + , opt "haddock-internal" False display pkgHashHaddockInternal + , opt "haddock-css" Nothing (fromMaybe "") pkgHashHaddockCss + , opt "haddock-hyperlink-source" False display pkgHashHaddockLinkedSource + , opt "haddock-quickjump" False display pkgHashHaddockQuickJump + , opt "haddock-contents-location" Nothing (maybe "" fromPathTemplate) pkgHashHaddockContents + + ] ++ Map.foldrWithKey (\prog args acc -> opt (prog ++ "-options") [] unwords args : acc) [] pkgHashProgramArgs + where + entry key format value = Just (key ++ ": " ++ format value) + mentry key format value = fmap (\v -> key ++ ": " ++ format v) value + opt key def format value + | value == def = Nothing + | otherwise = entry key format value + + showFlagAssignment = unwords . map showFlagValue . sortBy (compare `on` fst) . unFlagAssignment + +----------------------------------------------- +-- The specific choice of hash implementation +-- + +-- Is a crypto hash necessary here? One thing to consider is who controls the +-- inputs and what's the result of a hash collision. Obviously we should not +-- install packages we don't trust because they can run all sorts of code, but +-- if I've checked there's no TH, no custom Setup etc, is there still a +-- problem? If someone provided us a tarball that hashed to the same value as +-- some other package and we installed it, we could end up re-using that +-- installed package in place of another one we wanted. So yes, in general +-- there is some value in preventing intentional hash collisions in installed +-- package ids. + +newtype HashValue = HashValue BS.ByteString + deriving (Eq, Generic, Show, Typeable) + +instance Binary HashValue where + put (HashValue digest) = put digest + get = do + digest <- get + -- Cannot do any sensible validation here. Although we use SHA256 + -- for stuff we hash ourselves, we can also get hashes from TUF + -- and that can in principle use different hash functions in future. + return (HashValue digest) + +-- | Hash some data. Currently uses SHA256. +-- +hashValue :: LBS.ByteString -> HashValue +hashValue = HashValue . SHA256.hashlazy + +showHashValue :: HashValue -> String +showHashValue (HashValue digest) = BS.unpack (Base16.encode digest) + +-- | Hash the content of a file. Uses SHA256. +-- +readFileHashValue :: FilePath -> IO HashValue +readFileHashValue tarball = + withBinaryFile tarball ReadMode $ \hnd -> + evaluate . hashValue =<< LBS.hGetContents hnd + +-- | Convert a hash from TUF metadata into a 'PackageSourceHash'. +-- +-- Note that TUF hashes don't neessarily have to be SHA256, since it can +-- support new algorithms in future. +-- +hashFromTUF :: Sec.Hash -> HashValue +hashFromTUF (Sec.Hash hashstr) = + --TODO: [code cleanup] either we should get TUF to use raw bytestrings or + -- perhaps we should also just use a base16 string as the internal rep. + case Base16.decode (BS.pack hashstr) of + (hash, trailing) | not (BS.null hash) && BS.null trailing + -> HashValue hash + _ -> error "hashFromTUF: cannot decode base16 hash" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/PackageUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/PackageUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/PackageUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/PackageUtils.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,40 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.PackageUtils +-- Copyright : (c) Duncan Coutts 2010 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Various package description utils that should be in the Cabal lib +----------------------------------------------------------------------------- +module Distribution.Client.PackageUtils ( + externalBuildDepends, + ) where + +import Distribution.Package + ( packageVersion, packageName ) +import Distribution.Types.ComponentRequestedSpec + ( ComponentRequestedSpec ) +import Distribution.Types.Dependency +import Distribution.Types.UnqualComponentName +import Distribution.PackageDescription + ( PackageDescription(..), libName, enabledBuildDepends ) +import Distribution.Version + ( withinRange, isAnyVersion ) + +-- | The list of dependencies that refer to external packages +-- rather than internal package components. +-- +externalBuildDepends :: PackageDescription -> ComponentRequestedSpec -> [Dependency] +externalBuildDepends pkg spec = filter (not . internal) (enabledBuildDepends pkg spec) + where + -- True if this dependency is an internal one (depends on a library + -- defined in the same package). + internal (Dependency depName versionRange) = + (depName == packageName pkg && + packageVersion pkg `withinRange` versionRange) || + (Just (packageNameToUnqualComponentName depName) `elem` map libName (subLibraries pkg) && + isAnyVersion versionRange) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ParseUtils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ParseUtils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ParseUtils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ParseUtils.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,279 @@ +{-# LANGUAGE ExistentialQuantification, NamedFieldPuns #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.ParseUtils +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Parsing utilities. +----------------------------------------------------------------------------- + +module Distribution.Client.ParseUtils ( + + -- * Fields and field utilities + FieldDescr(..), + liftField, + liftFields, + filterFields, + mapFieldNames, + commandOptionToField, + commandOptionsToFields, + + -- * Sections and utilities + SectionDescr(..), + liftSection, + + -- * Parsing and printing flat config + parseFields, + ppFields, + ppSection, + + -- * Parsing and printing config with sections and subsections + parseFieldsAndSections, + ppFieldsAndSections, + + -- ** Top level of config files + parseConfig, + showConfig, + ) + where + +import Distribution.ParseUtils + ( FieldDescr(..), ParseResult(..), warning, LineNo, lineNo + , Field(..), liftField, readFieldsFlat ) +import Distribution.Simple.Command + ( OptionField, viewAsFieldDescr ) + +import Control.Monad ( foldM ) +import Text.PrettyPrint ( (<+>), ($+$) ) +import qualified Data.Map as Map +import qualified Text.PrettyPrint as Disp + ( (<>), Doc, text, colon, vcat, empty, isEmpty, nest ) + + +------------------------- +-- FieldDescr utilities +-- + +liftFields :: (b -> a) + -> (a -> b -> b) + -> [FieldDescr a] + -> [FieldDescr b] +liftFields get set = map (liftField get set) + + +-- | Given a collection of field descriptions, keep only a given list of them, +-- identified by name. +-- +filterFields :: [String] -> [FieldDescr a] -> [FieldDescr a] +filterFields includeFields = filter ((`elem` includeFields) . fieldName) + +-- | Apply a name mangling function to the field names of all the field +-- descriptions. The typical use case is to apply some prefix. +-- +mapFieldNames :: (String -> String) -> [FieldDescr a] -> [FieldDescr a] +mapFieldNames mangleName = + map (\descr -> descr { fieldName = mangleName (fieldName descr) }) + + +-- | Reuse a command line 'OptionField' as a config file 'FieldDescr'. +-- +commandOptionToField :: OptionField a -> FieldDescr a +commandOptionToField = viewAsFieldDescr + +-- | Reuse a bunch of command line 'OptionField's as config file 'FieldDescr's. +-- +commandOptionsToFields :: [OptionField a] -> [FieldDescr a] +commandOptionsToFields = map viewAsFieldDescr + + +------------------------------------------ +-- SectionDescr definition and utilities +-- + +-- | The description of a section in a config file. It can contain both +-- fields and optionally further subsections. See also 'FieldDescr'. +-- +data SectionDescr a = forall b. SectionDescr { + sectionName :: String, + sectionFields :: [FieldDescr b], + sectionSubsections :: [SectionDescr b], + sectionGet :: a -> [(String, b)], + sectionSet :: LineNo -> String -> b -> a -> ParseResult a, + sectionEmpty :: b + } + +-- | To help construction of config file descriptions in a modular way it is +-- useful to define fields and sections on local types and then hoist them +-- into the parent types when combining them in bigger descriptions. +-- +-- This is essentially a lens operation for 'SectionDescr' to help embedding +-- one inside another. +-- +liftSection :: (b -> a) + -> (a -> b -> b) + -> SectionDescr a + -> SectionDescr b +liftSection get' set' (SectionDescr name fields sections get set empty) = + let sectionGet' = get . get' + sectionSet' lineno param x y = do + x' <- set lineno param x (get' y) + return (set' x' y) + in SectionDescr name fields sections sectionGet' sectionSet' empty + + +------------------------------------- +-- Parsing and printing flat config +-- + +-- | Parse a bunch of semi-parsed 'Field's according to a set of field +-- descriptions. It accumulates the result on top of a given initial value. +-- +-- This only covers the case of flat configuration without subsections. See +-- also 'parseFieldsAndSections'. +-- +parseFields :: [FieldDescr a] -> a -> [Field] -> ParseResult a +parseFields fieldDescrs = + foldM setField + where + fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] + + setField accum (F line name value) = + case Map.lookup name fieldMap of + Just (FieldDescr _ _ set) -> set line value accum + Nothing -> do + warning $ "Unrecognized field " ++ name ++ " on line " ++ show line + return accum + + setField accum f = do + warning $ "Unrecognized stanza on line " ++ show (lineNo f) + return accum + +-- | This is a customised version of the functions from Distribution.ParseUtils +-- that also optionally print default values for empty fields as comments. +-- +ppFields :: [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc +ppFields fields def cur = + Disp.vcat [ ppField name (fmap getter def) (getter cur) + | FieldDescr name getter _ <- fields] + +ppField :: String -> (Maybe Disp.Doc) -> Disp.Doc -> Disp.Doc +ppField name mdef cur + | Disp.isEmpty cur = maybe Disp.empty + (\def -> Disp.text "--" <+> Disp.text name + Disp.<> Disp.colon <+> def) mdef + | otherwise = Disp.text name Disp.<> Disp.colon <+> cur + +-- | Pretty print a section. +-- +-- Since 'ppFields' does not cover subsections you can use this to add them. +-- Or alternatively use a 'SectionDescr' and use 'ppFieldsAndSections'. +-- +ppSection :: String -> String -> [FieldDescr a] -> (Maybe a) -> a -> Disp.Doc +ppSection name arg fields def cur + | Disp.isEmpty fieldsDoc = Disp.empty + | otherwise = Disp.text name <+> argDoc + $+$ (Disp.nest 2 fieldsDoc) + where + fieldsDoc = ppFields fields def cur + argDoc | arg == "" = Disp.empty + | otherwise = Disp.text arg + + +----------------------------------------- +-- Parsing and printing non-flat config +-- + +-- | Much like 'parseFields' but it also allows subsections. The permitted +-- subsections are given by a list of 'SectionDescr's. +-- +parseFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> a + -> [Field] -> ParseResult a +parseFieldsAndSections fieldDescrs sectionDescrs = + foldM setField + where + fieldMap = Map.fromList [ (fieldName f, f) | f <- fieldDescrs ] + sectionMap = Map.fromList [ (sectionName s, s) | s <- sectionDescrs ] + + setField a (F line name value) = + case Map.lookup name fieldMap of + Just (FieldDescr _ _ set) -> set line value a + Nothing -> do + warning $ "Unrecognized field '" ++ name + ++ "' on line " ++ show line + return a + + setField a (Section line name param fields) = + case Map.lookup name sectionMap of + Just (SectionDescr _ fieldDescrs' sectionDescrs' _ set sectionEmpty) -> do + b <- parseFieldsAndSections fieldDescrs' sectionDescrs' sectionEmpty fields + set line param b a + Nothing -> do + warning $ "Unrecognized section '" ++ name + ++ "' on line " ++ show line + return a + + setField accum (block@IfBlock {}) = do + warning $ "Unrecognized stanza on line " ++ show (lineNo block) + return accum + +-- | Much like 'ppFields' but also pretty prints any subsections. Subsection +-- are only shown if they are non-empty. +-- +-- Note that unlike 'ppFields', at present it does not support printing +-- default values. If needed, adding such support would be quite reasonable. +-- +ppFieldsAndSections :: [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc +ppFieldsAndSections fieldDescrs sectionDescrs val = + ppFields fieldDescrs Nothing val + $+$ + Disp.vcat + [ Disp.text "" $+$ sectionDoc + | SectionDescr { + sectionName, sectionGet, + sectionFields, sectionSubsections + } <- sectionDescrs + , (param, x) <- sectionGet val + , let sectionDoc = ppSectionAndSubsections + sectionName param + sectionFields sectionSubsections x + , not (Disp.isEmpty sectionDoc) + ] + +-- | Unlike 'ppSection' which has to be called directly, this gets used via +-- 'ppFieldsAndSections' and so does not need to be exported. +-- +ppSectionAndSubsections :: String -> String + -> [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc +ppSectionAndSubsections name arg fields sections cur + | Disp.isEmpty fieldsDoc = Disp.empty + | otherwise = Disp.text name <+> argDoc + $+$ (Disp.nest 2 fieldsDoc) + where + fieldsDoc = showConfig fields sections cur + argDoc | arg == "" = Disp.empty + | otherwise = Disp.text arg + + +----------------------------------------------- +-- Top level config file parsing and printing +-- + +-- | Parse a string in the config file syntax into a value, based on a +-- description of the configuration file in terms of its fields and sections. +-- +-- It accumulates the result on top of a given initial (typically empty) value. +-- +parseConfig :: [FieldDescr a] -> [SectionDescr a] -> a + -> String -> ParseResult a +parseConfig fieldDescrs sectionDescrs empty str = + parseFieldsAndSections fieldDescrs sectionDescrs empty + =<< readFieldsFlat str + +-- | Render a value in the config file syntax, based on a description of the +-- configuration file in terms of its fields and sections. +-- +showConfig :: [FieldDescr a] -> [SectionDescr a] -> a -> Disp.Doc +showConfig = ppFieldsAndSections + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectBuilding/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectBuilding/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectBuilding/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectBuilding/Types.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,206 @@ +{-# LANGUAGE DeriveDataTypeable #-} + +-- | Types for the "Distribution.Client.ProjectBuilding" +-- +-- Moved out to avoid module cycles. +-- +module Distribution.Client.ProjectBuilding.Types ( + -- * Pre-build status + BuildStatusMap, + BuildStatus(..), + buildStatusRequiresBuild, + buildStatusToString, + BuildStatusRebuild(..), + BuildReason(..), + MonitorChangedReason(..), + + -- * Build outcomes + BuildOutcomes, + BuildOutcome, + BuildResult(..), + BuildFailure(..), + BuildFailureReason(..), + ) where + +import Distribution.Client.Types (DocsResult, TestsResult) +import Distribution.Client.FileMonitor (MonitorChangedReason(..)) + +import Distribution.Package (UnitId, PackageId) +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import Distribution.Simple.LocalBuildInfo (ComponentName) + +import Data.Map (Map) +import Data.Set (Set) +import Data.Typeable (Typeable) +import Control.Exception (Exception, SomeException) + + +------------------------------------------------------------------------------ +-- Pre-build status: result of the dry run +-- + +-- | The 'BuildStatus' of every package in the 'ElaboratedInstallPlan'. +-- +-- This is used as the result of the dry-run of building an install plan. +-- +type BuildStatusMap = Map UnitId BuildStatus + +-- | The build status for an individual package is the state that the +-- package is in /prior/ to initiating a (re)build. +-- +-- This should not be confused with a 'BuildResult' which is the result +-- /after/ successfully building a package. +-- +-- It serves two purposes: +-- +-- * For dry-run output, it lets us explain to the user if and why a package +-- is going to be (re)built. +-- +-- * It tell us what step to start or resume building from, and carries +-- enough information for us to be able to do so. +-- +data BuildStatus = + + -- | The package is in the 'InstallPlan.PreExisting' state, so does not + -- need building. + BuildStatusPreExisting + + -- | The package is in the 'InstallPlan.Installed' state, so does not + -- need building. + | BuildStatusInstalled + + -- | The package has not been downloaded yet, so it will have to be + -- downloaded, unpacked and built. + | BuildStatusDownload + + -- | The package has not been unpacked yet, so it will have to be + -- unpacked and built. + | BuildStatusUnpack FilePath + + -- | The package exists in a local dir already, and just needs building + -- or rebuilding. So this can only happen for 'BuildInplaceOnly' style + -- packages. + | BuildStatusRebuild FilePath BuildStatusRebuild + + -- | The package exists in a local dir already, and is fully up to date. + -- So this package can be put into the 'InstallPlan.Installed' state + -- and it does not need to be built. + | BuildStatusUpToDate BuildResult + + +-- | Which 'BuildStatus' values indicate we'll have to do some build work of +-- some sort. In particular we use this as part of checking if any of a +-- package's deps have changed. +-- +buildStatusRequiresBuild :: BuildStatus -> Bool +buildStatusRequiresBuild BuildStatusPreExisting = False +buildStatusRequiresBuild BuildStatusInstalled = False +buildStatusRequiresBuild BuildStatusUpToDate {} = False +buildStatusRequiresBuild _ = True + +-- | This is primarily here for debugging. It's not actually used anywhere. +-- +buildStatusToString :: BuildStatus -> String +buildStatusToString BuildStatusPreExisting = "BuildStatusPreExisting" +buildStatusToString BuildStatusInstalled = "BuildStatusInstalled" +buildStatusToString BuildStatusDownload = "BuildStatusDownload" +buildStatusToString (BuildStatusUnpack fp) = "BuildStatusUnpack " ++ show fp +buildStatusToString (BuildStatusRebuild fp _) = "BuildStatusRebuild " ++ show fp +buildStatusToString (BuildStatusUpToDate _) = "BuildStatusUpToDate" + + +-- | For a package that is going to be built or rebuilt, the state it's in now. +-- +-- So again, this tells us why a package needs to be rebuilt and what build +-- phases need to be run. The 'MonitorChangedReason' gives us details like +-- which file changed, which is mainly for high verbosity debug output. +-- +data BuildStatusRebuild = + + -- | The package configuration changed, so the configure and build phases + -- needs to be (re)run. + BuildStatusConfigure (MonitorChangedReason ()) + + -- | The configuration has not changed but the build phase needs to be + -- rerun. We record the reason the (re)build is needed. + -- + -- The optional registration info here tells us if we've registered the + -- package already, or if we still need to do that after building. + -- @Just Nothing@ indicates that we know that no registration is + -- necessary (e.g., executable.) + -- + | BuildStatusBuild (Maybe (Maybe InstalledPackageInfo)) BuildReason + +data BuildReason = + -- | The dependencies of this package have been (re)built so the build + -- phase needs to be rerun. + -- + BuildReasonDepsRebuilt + + -- | Changes in files within the package (or first run or corrupt cache) + | BuildReasonFilesChanged (MonitorChangedReason ()) + + -- | An important special case is that no files have changed but the + -- set of components the /user asked to build/ has changed. We track the + -- set of components /we have built/, which of course only grows (until + -- some other change resets it). + -- + -- The @Set 'ComponentName'@ is the set of components we have built + -- previously. When we update the monitor we take the union of the ones + -- we have built previously with the ones the user has asked for this + -- time and save those. See 'updatePackageBuildFileMonitor'. + -- + | BuildReasonExtraTargets (Set ComponentName) + + -- | Although we're not going to build any additional targets as a whole, + -- we're going to build some part of a component or run a repl or any + -- other action that does not result in additional persistent artifacts. + -- + | BuildReasonEphemeralTargets + + +------------------------------------------------------------------------------ +-- Build outcomes: result of the build +-- + +-- | A summary of the outcome for building a whole set of packages. +-- +type BuildOutcomes = Map UnitId BuildOutcome + +-- | A summary of the outcome for building a single package: either success +-- or failure. +-- +type BuildOutcome = Either BuildFailure BuildResult + +-- | Information arising from successfully building a single package. +-- +data BuildResult = BuildResult { + buildResultDocs :: DocsResult, + buildResultTests :: TestsResult, + buildResultLogFile :: Maybe FilePath + } + deriving Show + +-- | Information arising from the failure to build a single package. +-- +data BuildFailure = BuildFailure { + buildFailureLogFile :: Maybe FilePath, + buildFailureReason :: BuildFailureReason + } + deriving (Show, Typeable) + +instance Exception BuildFailure + +-- | Detail on the reason that a package failed to build. +-- +data BuildFailureReason = DependentFailed PackageId + | DownloadFailed SomeException + | UnpackFailed SomeException + | ConfigureFailed SomeException + | BuildFailed SomeException + | ReplFailed SomeException + | HaddocksFailed SomeException + | TestsFailed SomeException + | BenchFailed SomeException + | InstallFailed SomeException + deriving Show diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectBuilding.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectBuilding.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectBuilding.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectBuilding.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,1451 @@ +{-# LANGUAGE CPP, BangPatterns, RecordWildCards, NamedFieldPuns, + ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE NoMonoLocalBinds #-} +{-# LANGUAGE ConstraintKinds #-} + +-- | +-- +module Distribution.Client.ProjectBuilding ( + -- * Dry run phase + -- | What bits of the plan will we execute? The dry run does not change + -- anything but tells us what will need to be built. + rebuildTargetsDryRun, + improveInstallPlanWithUpToDatePackages, + + -- ** Build status + -- | This is the detailed status information we get from the dry run. + BuildStatusMap, + BuildStatus(..), + BuildStatusRebuild(..), + BuildReason(..), + MonitorChangedReason(..), + buildStatusToString, + + -- * Build phase + -- | Now we actually execute the plan. + rebuildTargets, + -- ** Build outcomes + -- | This is the outcome for each package of executing the plan. + -- For each package, did the build succeed or fail? + BuildOutcomes, + BuildOutcome, + BuildResult(..), + BuildFailure(..), + BuildFailureReason(..), + ) where + +#if !MIN_VERSION_base(4,8,0) +import Control.Applicative ((<$>)) +#endif + +import Distribution.Client.PackageHash (renderPackageHashInputs) +import Distribution.Client.RebuildMonad +import Distribution.Client.ProjectConfig +import Distribution.Client.ProjectPlanning +import Distribution.Client.ProjectPlanning.Types +import Distribution.Client.ProjectBuilding.Types +import Distribution.Client.Store + +import Distribution.Client.Types + hiding (BuildOutcomes, BuildOutcome, + BuildResult(..), BuildFailure(..)) +import Distribution.Client.InstallPlan + ( GenericInstallPlan, GenericPlanPackage, IsUnit ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.DistDirLayout +import Distribution.Client.FileMonitor +import Distribution.Client.SetupWrapper +import Distribution.Client.JobControl +import Distribution.Client.FetchUtils +import Distribution.Client.GlobalFlags (RepoContext) +import qualified Distribution.Client.Tar as Tar +import Distribution.Client.Setup + ( filterConfigureFlags, filterHaddockArgs + , filterHaddockFlags ) +import Distribution.Client.SourceFiles +import Distribution.Client.SrcDist (allPackageSourceFiles) +import Distribution.Client.Utils + ( ProgressPhase(..), progressMessage, removeExistingFile ) + +import Distribution.Compat.Lens +import Distribution.Package hiding (InstalledPackageId, installedPackageId) +import qualified Distribution.PackageDescription as PD +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import qualified Distribution.InstalledPackageInfo as Installed +import Distribution.Simple.BuildPaths (haddockDirName) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Types.BuildType +import Distribution.Types.PackageDescription.Lens (componentModules) +import Distribution.Simple.Program +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Command (CommandUI) +import qualified Distribution.Simple.Register as Cabal +import Distribution.Simple.LocalBuildInfo (ComponentName(..)) +import Distribution.Simple.Compiler + ( Compiler, compilerId, PackageDB(..) ) + +import Distribution.Simple.Utils +import Distribution.Version +import Distribution.Verbosity +import Distribution.Text +import Distribution.ParseUtils ( showPWarning ) +import Distribution.Compat.Graph (IsNode(..)) + +import Data.Map (Map) +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set +import qualified Data.ByteString.Lazy as LBS +import Data.List (isPrefixOf) + +import Control.Monad +import Control.Exception +import Data.Function (on) +import Data.Maybe + +import System.FilePath +import System.IO +import System.Directory + +#if !MIN_VERSION_directory(1,2,5) +listDirectory :: FilePath -> IO [FilePath] +listDirectory path = + (filter f) <$> (getDirectoryContents path) + where f filename = filename /= "." && filename /= ".." +#endif + +------------------------------------------------------------------------------ +-- * Overall building strategy. +------------------------------------------------------------------------------ +-- +-- We start with an 'ElaboratedInstallPlan' that has already been improved by +-- reusing packages from the store, and pruned to include only the targets of +-- interest and their dependencies. So the remaining packages in the +-- 'InstallPlan.Configured' state are ones we either need to build or rebuild. +-- +-- First, we do a preliminary dry run phase where we work out which packages +-- we really need to (re)build, and for the ones we do need to build which +-- build phase to start at. +-- +-- We use this to improve the 'ElaboratedInstallPlan' again by changing +-- up-to-date 'InstallPlan.Configured' packages to 'InstallPlan.Installed' +-- so that the build phase will skip them. +-- +-- Then we execute the plan, that is actually build packages. The outcomes of +-- trying to build all the packages are collected and returned. +-- +-- We split things like this (dry run and execute) for a couple reasons. +-- Firstly we need to be able to do dry runs anyway, and these need to be +-- reasonably accurate in terms of letting users know what (and why) things +-- are going to be (re)built. +-- +-- Given that we need to be able to do dry runs, it would not be great if +-- we had to repeat all the same work when we do it for real. Not only is +-- it duplicate work, but it's duplicate code which is likely to get out of +-- sync. So we do things only once. We preserve info we discover in the dry +-- run phase and rely on it later when we build things for real. This also +-- somewhat simplifies the build phase. So this way the dry run can't so +-- easily drift out of sync with the real thing since we're relying on the +-- info it produces. +-- +-- An additional advantage is that it makes it easier to debug rebuild +-- errors (ie rebuilding too much or too little), since all the rebuild +-- decisions are made without making any state changes at the same time +-- (that would make it harder to reproduce the problem situation). +-- +-- Finally, we can use the dry run build status and the build outcomes to +-- give us some information on the overall status of packages in the project. +-- This includes limited information about the status of things that were +-- not actually in the subset of the plan that was used for the dry run or +-- execution phases. In particular we may know that some packages are now +-- definitely out of date. See "Distribution.Client.ProjectPlanOutput" for +-- details. + + +------------------------------------------------------------------------------ +-- * Dry run: what bits of the 'ElaboratedInstallPlan' will we execute? +------------------------------------------------------------------------------ + +-- Refer to ProjectBuilding.Types for details of these important types: + +-- type BuildStatusMap = ... +-- data BuildStatus = ... +-- data BuildStatusRebuild = ... +-- data BuildReason = ... + +-- | Do the dry run pass. This is a prerequisite of 'rebuildTargets'. +-- +-- It gives us the 'BuildStatusMap'. This should be used with +-- 'improveInstallPlanWithUpToDatePackages' to give an improved version of +-- the 'ElaboratedInstallPlan' with packages switched to the +-- 'InstallPlan.Installed' state when we find that they're already up to date. +-- +rebuildTargetsDryRun :: DistDirLayout + -> ElaboratedSharedConfig + -> ElaboratedInstallPlan + -> IO BuildStatusMap +rebuildTargetsDryRun distDirLayout@DistDirLayout{..} shared = + -- Do the various checks to work out the 'BuildStatus' of each package + foldMInstallPlanDepOrder dryRunPkg + where + dryRunPkg :: ElaboratedPlanPackage + -> [BuildStatus] + -> IO BuildStatus + dryRunPkg (InstallPlan.PreExisting _pkg) _depsBuildStatus = + return BuildStatusPreExisting + + dryRunPkg (InstallPlan.Installed _pkg) _depsBuildStatus = + return BuildStatusInstalled + + dryRunPkg (InstallPlan.Configured pkg) depsBuildStatus = do + mloc <- checkFetched (elabPkgSourceLocation pkg) + case mloc of + Nothing -> return BuildStatusDownload + + Just (LocalUnpackedPackage srcdir) -> + -- For the case of a user-managed local dir, irrespective of the + -- build style, we build from that directory and put build + -- artifacts under the shared dist directory. + dryRunLocalPkg pkg depsBuildStatus srcdir + + Just (RemoteSourceRepoPackage _repo srcdir) -> + -- At this point, source repos are essentially the same as local + -- dirs, since we've already download them. + dryRunLocalPkg pkg depsBuildStatus srcdir + + -- The three tarball cases are handled the same as each other, + -- though depending on the build style. + Just (LocalTarballPackage tarball) -> + dryRunTarballPkg pkg depsBuildStatus tarball + + Just (RemoteTarballPackage _ tarball) -> + dryRunTarballPkg pkg depsBuildStatus tarball + + Just (RepoTarballPackage _ _ tarball) -> + dryRunTarballPkg pkg depsBuildStatus tarball + + dryRunTarballPkg :: ElaboratedConfiguredPackage + -> [BuildStatus] + -> FilePath + -> IO BuildStatus + dryRunTarballPkg pkg depsBuildStatus tarball = + case elabBuildStyle pkg of + BuildAndInstall -> return (BuildStatusUnpack tarball) + BuildInplaceOnly -> do + -- TODO: [nice to have] use a proper file monitor rather than this dir exists test + exists <- doesDirectoryExist srcdir + if exists + then dryRunLocalPkg pkg depsBuildStatus srcdir + else return (BuildStatusUnpack tarball) + where + srcdir = distUnpackedSrcDirectory (packageId pkg) + + dryRunLocalPkg :: ElaboratedConfiguredPackage + -> [BuildStatus] + -> FilePath + -> IO BuildStatus + dryRunLocalPkg pkg depsBuildStatus srcdir = do + -- Go and do lots of I/O, reading caches and probing files to work out + -- if anything has changed + change <- checkPackageFileMonitorChanged + packageFileMonitor pkg srcdir depsBuildStatus + case change of + -- It did change, giving us 'BuildStatusRebuild' info on why + Left rebuild -> + return (BuildStatusRebuild srcdir rebuild) + + -- No changes, the package is up to date. Use the saved build results. + Right buildResult -> + return (BuildStatusUpToDate buildResult) + where + packageFileMonitor = + newPackageFileMonitor shared distDirLayout (elabDistDirParams shared pkg) + + +-- | A specialised traversal over the packages in an install plan. +-- +-- The packages are visited in dependency order, starting with packages with no +-- dependencies. The result for each package is accumulated into a 'Map' and +-- returned as the final result. In addition, when visting a package, the +-- visiting function is passed the results for all the immediate package +-- dependencies. This can be used to propagate information from dependencies. +-- +foldMInstallPlanDepOrder + :: forall m ipkg srcpkg b. + (Monad m, IsUnit ipkg, IsUnit srcpkg) + => (GenericPlanPackage ipkg srcpkg -> + [b] -> m b) + -> GenericInstallPlan ipkg srcpkg + -> m (Map UnitId b) +foldMInstallPlanDepOrder visit = + go Map.empty . InstallPlan.reverseTopologicalOrder + where + go :: Map UnitId b + -> [GenericPlanPackage ipkg srcpkg] + -> m (Map UnitId b) + go !results [] = return results + + go !results (pkg : pkgs) = do + -- we go in the right order so the results map has entries for all deps + let depresults :: [b] + depresults = + map (\ipkgid -> let Just result = Map.lookup ipkgid results + in result) + (InstallPlan.depends pkg) + result <- visit pkg depresults + let results' = Map.insert (nodeKey pkg) result results + go results' pkgs + +improveInstallPlanWithUpToDatePackages :: BuildStatusMap + -> ElaboratedInstallPlan + -> ElaboratedInstallPlan +improveInstallPlanWithUpToDatePackages pkgsBuildStatus = + InstallPlan.installed canPackageBeImproved + where + canPackageBeImproved pkg = + case Map.lookup (installedUnitId pkg) pkgsBuildStatus of + Just BuildStatusUpToDate {} -> True + Just _ -> False + Nothing -> error $ "improveInstallPlanWithUpToDatePackages: " + ++ display (packageId pkg) ++ " not in status map" + + +----------------------------- +-- Package change detection +-- + +-- | As part of the dry run for local unpacked packages we have to check if the +-- package config or files have changed. That is the purpose of +-- 'PackageFileMonitor' and 'checkPackageFileMonitorChanged'. +-- +-- When a package is (re)built, the monitor must be updated to reflect the new +-- state of the package. Because we sometimes build without reconfiguring the +-- state updates are split into two, one for package config changes and one +-- for other changes. This is the purpose of 'updatePackageConfigFileMonitor' +-- and 'updatePackageBuildFileMonitor'. +-- +data PackageFileMonitor = PackageFileMonitor { + pkgFileMonitorConfig :: FileMonitor ElaboratedConfiguredPackage (), + pkgFileMonitorBuild :: FileMonitor (Set ComponentName) BuildResultMisc, + pkgFileMonitorReg :: FileMonitor () (Maybe InstalledPackageInfo) + } + +-- | This is all the components of the 'BuildResult' other than the +-- @['InstalledPackageInfo']@. +-- +-- We have to split up the 'BuildResult' components since they get produced +-- at different times (or rather, when different things change). +-- +type BuildResultMisc = (DocsResult, TestsResult) + +newPackageFileMonitor :: ElaboratedSharedConfig + -> DistDirLayout + -> DistDirParams + -> PackageFileMonitor +newPackageFileMonitor shared + DistDirLayout{distPackageCacheFile} + dparams = + PackageFileMonitor { + pkgFileMonitorConfig = + FileMonitor { + fileMonitorCacheFile = distPackageCacheFile dparams "config", + fileMonitorKeyValid = (==) `on` normaliseConfiguredPackage shared, + fileMonitorCheckIfOnlyValueChanged = False + }, + + pkgFileMonitorBuild = + FileMonitor { + fileMonitorCacheFile = distPackageCacheFile dparams "build", + fileMonitorKeyValid = \componentsToBuild componentsAlreadyBuilt -> + componentsToBuild `Set.isSubsetOf` componentsAlreadyBuilt, + fileMonitorCheckIfOnlyValueChanged = True + }, + + pkgFileMonitorReg = + newFileMonitor (distPackageCacheFile dparams "registration") + } + +-- | Helper function for 'checkPackageFileMonitorChanged', +-- 'updatePackageConfigFileMonitor' and 'updatePackageBuildFileMonitor'. +-- +-- It selects the info from a 'ElaboratedConfiguredPackage' that are used by +-- the 'FileMonitor's (in the 'PackageFileMonitor') to detect value changes. +-- +packageFileMonitorKeyValues :: ElaboratedConfiguredPackage + -> (ElaboratedConfiguredPackage, Set ComponentName) +packageFileMonitorKeyValues elab = + (elab_config, buildComponents) + where + -- The first part is the value used to guard (re)configuring the package. + -- That is, if this value changes then we will reconfigure. + -- The ElaboratedConfiguredPackage consists mostly (but not entirely) of + -- information that affects the (re)configure step. But those parts that + -- do not affect the configure step need to be nulled out. Those parts are + -- the specific targets that we're going to build. + -- + elab_config = + elab { + elabBuildTargets = [], + elabTestTargets = [], + elabBenchTargets = [], + elabReplTarget = Nothing, + elabHaddockTargets = [], + elabBuildHaddocks = False + } + + -- The second part is the value used to guard the build step. So this is + -- more or less the opposite of the first part, as it's just the info about + -- what targets we're going to build. + -- + buildComponents = elabBuildTargetWholeComponents elab + +-- | Do all the checks on whether a package has changed and thus needs either +-- rebuilding or reconfiguring and rebuilding. +-- +checkPackageFileMonitorChanged :: PackageFileMonitor + -> ElaboratedConfiguredPackage + -> FilePath + -> [BuildStatus] + -> IO (Either BuildStatusRebuild BuildResult) +checkPackageFileMonitorChanged PackageFileMonitor{..} + pkg srcdir depsBuildStatus = do + --TODO: [nice to have] some debug-level message about file changes, like rerunIfChanged + configChanged <- checkFileMonitorChanged + pkgFileMonitorConfig srcdir pkgconfig + case configChanged of + MonitorChanged monitorReason -> + return (Left (BuildStatusConfigure monitorReason')) + where + monitorReason' = fmap (const ()) monitorReason + + MonitorUnchanged () _ + -- The configChanged here includes the identity of the dependencies, + -- so depsBuildStatus is just needed for the changes in the content + -- of dependencies. + | any buildStatusRequiresBuild depsBuildStatus -> do + regChanged <- checkFileMonitorChanged pkgFileMonitorReg srcdir () + let mreg = changedToMaybe regChanged + return (Left (BuildStatusBuild mreg BuildReasonDepsRebuilt)) + + | otherwise -> do + buildChanged <- checkFileMonitorChanged + pkgFileMonitorBuild srcdir buildComponents + regChanged <- checkFileMonitorChanged + pkgFileMonitorReg srcdir () + let mreg = changedToMaybe regChanged + case (buildChanged, regChanged) of + (MonitorChanged (MonitoredValueChanged prevBuildComponents), _) -> + return (Left (BuildStatusBuild mreg buildReason)) + where + buildReason = BuildReasonExtraTargets prevBuildComponents + + (MonitorChanged monitorReason, _) -> + return (Left (BuildStatusBuild mreg buildReason)) + where + buildReason = BuildReasonFilesChanged monitorReason' + monitorReason' = fmap (const ()) monitorReason + + (MonitorUnchanged _ _, MonitorChanged monitorReason) -> + -- this should only happen if the file is corrupt or been + -- manually deleted. We don't want to bother with another + -- phase just for this, so we'll reregister by doing a build. + return (Left (BuildStatusBuild Nothing buildReason)) + where + buildReason = BuildReasonFilesChanged monitorReason' + monitorReason' = fmap (const ()) monitorReason + + (MonitorUnchanged _ _, MonitorUnchanged _ _) + | pkgHasEphemeralBuildTargets pkg -> + return (Left (BuildStatusBuild mreg buildReason)) + where + buildReason = BuildReasonEphemeralTargets + + (MonitorUnchanged buildResult _, MonitorUnchanged _ _) -> + return $ Right BuildResult { + buildResultDocs = docsResult, + buildResultTests = testsResult, + buildResultLogFile = Nothing + } + where + (docsResult, testsResult) = buildResult + where + (pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg + changedToMaybe (MonitorChanged _) = Nothing + changedToMaybe (MonitorUnchanged x _) = Just x + + +updatePackageConfigFileMonitor :: PackageFileMonitor + -> FilePath + -> ElaboratedConfiguredPackage + -> IO () +updatePackageConfigFileMonitor PackageFileMonitor{pkgFileMonitorConfig} + srcdir pkg = + updateFileMonitor pkgFileMonitorConfig srcdir Nothing + [] pkgconfig () + where + (pkgconfig, _buildComponents) = packageFileMonitorKeyValues pkg + +updatePackageBuildFileMonitor :: PackageFileMonitor + -> FilePath + -> MonitorTimestamp + -> ElaboratedConfiguredPackage + -> BuildStatusRebuild + -> [MonitorFilePath] + -> BuildResultMisc + -> IO () +updatePackageBuildFileMonitor PackageFileMonitor{pkgFileMonitorBuild} + srcdir timestamp pkg pkgBuildStatus + monitors buildResult = + updateFileMonitor pkgFileMonitorBuild srcdir (Just timestamp) + monitors buildComponents' buildResult + where + (_pkgconfig, buildComponents) = packageFileMonitorKeyValues pkg + + -- If the only thing that's changed is that we're now building extra + -- components, then we can avoid later unnecessary rebuilds by saving the + -- total set of components that have been built, namely the union of the + -- existing ones plus the new ones. If files also changed this would be + -- the wrong thing to do. Note that we rely on the + -- fileMonitorCheckIfOnlyValueChanged = True mode to get this guarantee + -- that it's /only/ the value that changed not any files that changed. + buildComponents' = + case pkgBuildStatus of + BuildStatusBuild _ (BuildReasonExtraTargets prevBuildComponents) + -> buildComponents `Set.union` prevBuildComponents + _ -> buildComponents + +updatePackageRegFileMonitor :: PackageFileMonitor + -> FilePath + -> Maybe InstalledPackageInfo + -> IO () +updatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} + srcdir mipkg = + updateFileMonitor pkgFileMonitorReg srcdir Nothing + [] () mipkg + +invalidatePackageRegFileMonitor :: PackageFileMonitor -> IO () +invalidatePackageRegFileMonitor PackageFileMonitor{pkgFileMonitorReg} = + removeExistingFile (fileMonitorCacheFile pkgFileMonitorReg) + + +------------------------------------------------------------------------------ +-- * Doing it: executing an 'ElaboratedInstallPlan' +------------------------------------------------------------------------------ + +-- Refer to ProjectBuilding.Types for details of these important types: + +-- type BuildOutcomes = ... +-- type BuildOutcome = ... +-- data BuildResult = ... +-- data BuildFailure = ... +-- data BuildFailureReason = ... + +-- | Build things for real. +-- +-- It requires the 'BuildStatusMap' gathered by 'rebuildTargetsDryRun'. +-- +rebuildTargets :: Verbosity + -> DistDirLayout + -> StoreDirLayout + -> ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> BuildStatusMap + -> BuildTimeSettings + -> IO BuildOutcomes +rebuildTargets verbosity + distDirLayout@DistDirLayout{..} + storeDirLayout + installPlan + sharedPackageConfig@ElaboratedSharedConfig { + pkgConfigCompiler = compiler, + pkgConfigCompilerProgs = progdb + } + pkgsBuildStatus + buildSettings@BuildTimeSettings{ + buildSettingNumJobs, + buildSettingKeepGoing + } = do + + -- Concurrency control: create the job controller and concurrency limits + -- for downloading, building and installing. + jobControl <- if isParallelBuild + then newParallelJobControl buildSettingNumJobs + else newSerialJobControl + registerLock <- newLock -- serialise registration + cacheLock <- newLock -- serialise access to setup exe cache + --TODO: [code cleanup] eliminate setup exe cache + + debug verbosity $ + "Executing install plan " + ++ if isParallelBuild + then " in parallel using " ++ show buildSettingNumJobs ++ " threads." + else " serially." + + createDirectoryIfMissingVerbose verbosity True distBuildRootDirectory + createDirectoryIfMissingVerbose verbosity True distTempDirectory + mapM_ (createPackageDBIfMissing verbosity compiler progdb) packageDBsToUse + + -- Before traversing the install plan, pre-emptively find all packages that + -- will need to be downloaded and start downloading them. + asyncDownloadPackages verbosity withRepoCtx + installPlan pkgsBuildStatus $ \downloadMap -> + + -- For each package in the plan, in dependency order, but in parallel... + InstallPlan.execute jobControl keepGoing + (BuildFailure Nothing . DependentFailed . packageId) + installPlan $ \pkg -> + --TODO: review exception handling + handle (\(e :: BuildFailure) -> return (Left e)) $ fmap Right $ + + let uid = installedUnitId pkg + Just pkgBuildStatus = Map.lookup uid pkgsBuildStatus in + + rebuildTarget + verbosity + distDirLayout + storeDirLayout + buildSettings downloadMap + registerLock cacheLock + sharedPackageConfig + installPlan pkg + pkgBuildStatus + where + isParallelBuild = buildSettingNumJobs >= 2 + keepGoing = buildSettingKeepGoing + withRepoCtx = projectConfigWithBuilderRepoContext verbosity + buildSettings + packageDBsToUse = -- all the package dbs we may need to create + (Set.toList . Set.fromList) + [ pkgdb + | InstallPlan.Configured elab <- InstallPlan.toList installPlan + , pkgdb <- concat [ elabBuildPackageDBStack elab + , elabRegisterPackageDBStack elab + , elabSetupPackageDBStack elab ] + ] + + +-- | Create a package DB if it does not currently exist. Note that this action +-- is /not/ safe to run concurrently. +-- +createPackageDBIfMissing :: Verbosity -> Compiler -> ProgramDb + -> PackageDB -> IO () +createPackageDBIfMissing verbosity compiler progdb + (SpecificPackageDB dbPath) = do + exists <- Cabal.doesPackageDBExist dbPath + unless exists $ do + createDirectoryIfMissingVerbose verbosity True (takeDirectory dbPath) + Cabal.createPackageDB verbosity compiler progdb False dbPath +createPackageDBIfMissing _ _ _ _ = return () + + +-- | Given all the context and resources, (re)build an individual package. +-- +rebuildTarget :: Verbosity + -> DistDirLayout + -> StoreDirLayout + -> BuildTimeSettings + -> AsyncFetchMap + -> Lock -> Lock + -> ElaboratedSharedConfig + -> ElaboratedInstallPlan + -> ElaboratedReadyPackage + -> BuildStatus + -> IO BuildResult +rebuildTarget verbosity + distDirLayout@DistDirLayout{distBuildDirectory} + storeDirLayout + buildSettings downloadMap + registerLock cacheLock + sharedPackageConfig + plan rpkg@(ReadyPackage pkg) + pkgBuildStatus = + + -- We rely on the 'BuildStatus' to decide which phase to start from: + case pkgBuildStatus of + BuildStatusDownload -> downloadPhase + BuildStatusUnpack tarball -> unpackTarballPhase tarball + BuildStatusRebuild srcdir status -> rebuildPhase status srcdir + + -- TODO: perhaps re-nest the types to make these impossible + BuildStatusPreExisting {} -> unexpectedState + BuildStatusInstalled {} -> unexpectedState + BuildStatusUpToDate {} -> unexpectedState + where + unexpectedState = error "rebuildTarget: unexpected package status" + + downloadPhase = do + downsrcloc <- annotateFailureNoLog DownloadFailed $ + waitAsyncPackageDownload verbosity downloadMap pkg + case downsrcloc of + DownloadedTarball tarball -> unpackTarballPhase tarball + --TODO: [nice to have] git/darcs repos etc + + + unpackTarballPhase tarball = + withTarballLocalDirectory + verbosity distDirLayout tarball + (packageId pkg) (elabDistDirParams sharedPackageConfig pkg) (elabBuildStyle pkg) + (elabPkgDescriptionOverride pkg) $ + + case elabBuildStyle pkg of + BuildAndInstall -> buildAndInstall + BuildInplaceOnly -> buildInplace buildStatus + where + buildStatus = BuildStatusConfigure MonitorFirstRun + + -- Note that this really is rebuild, not build. It can only happen for + -- 'BuildInplaceOnly' style packages. 'BuildAndInstall' style packages + -- would only start from download or unpack phases. + -- + rebuildPhase buildStatus srcdir = + assert (elabBuildStyle pkg == BuildInplaceOnly) $ + + buildInplace buildStatus srcdir builddir + where + builddir = distBuildDirectory (elabDistDirParams sharedPackageConfig pkg) + + buildAndInstall srcdir builddir = + buildAndInstallUnpackedPackage + verbosity distDirLayout storeDirLayout + buildSettings registerLock cacheLock + sharedPackageConfig + plan rpkg + srcdir builddir' + where + builddir' = makeRelative srcdir builddir + --TODO: [nice to have] ^^ do this relative stuff better + + buildInplace buildStatus srcdir builddir = + --TODO: [nice to have] use a relative build dir rather than absolute + buildInplaceUnpackedPackage + verbosity distDirLayout + buildSettings registerLock cacheLock + sharedPackageConfig + plan rpkg + buildStatus + srcdir builddir + +-- TODO: [nice to have] do we need to use a with-style for the temp +-- files for downloading http packages, or are we going to cache them +-- persistently? + +-- | Given the current 'InstallPlan' and 'BuildStatusMap', select all the +-- packages we have to download and fork off an async action to download them. +-- We download them in dependency order so that the one's we'll need +-- first are the ones we will start downloading first. +-- +-- The body action is passed a map from those packages (identified by their +-- location) to a completion var for that package. So the body action should +-- lookup the location and use 'waitAsyncPackageDownload' to get the result. +-- +asyncDownloadPackages :: Verbosity + -> ((RepoContext -> IO a) -> IO a) + -> ElaboratedInstallPlan + -> BuildStatusMap + -> (AsyncFetchMap -> IO a) + -> IO a +asyncDownloadPackages verbosity withRepoCtx installPlan pkgsBuildStatus body + | null pkgsToDownload = body Map.empty + | otherwise = withRepoCtx $ \repoctx -> + asyncFetchPackages verbosity repoctx + pkgsToDownload body + where + pkgsToDownload = + ordNub $ + [ elabPkgSourceLocation elab + | InstallPlan.Configured elab + <- InstallPlan.reverseTopologicalOrder installPlan + , let uid = installedUnitId elab + Just pkgBuildStatus = Map.lookup uid pkgsBuildStatus + , BuildStatusDownload <- [pkgBuildStatus] + ] + + +-- | Check if a package needs downloading, and if so expect to find a download +-- in progress in the given 'AsyncFetchMap' and wait on it to finish. +-- +waitAsyncPackageDownload :: Verbosity + -> AsyncFetchMap + -> ElaboratedConfiguredPackage + -> IO DownloadedSourceLocation +waitAsyncPackageDownload verbosity downloadMap elab = do + pkgloc <- waitAsyncFetchPackage verbosity downloadMap + (elabPkgSourceLocation elab) + case downloadedSourceLocation pkgloc of + Just loc -> return loc + Nothing -> fail "waitAsyncPackageDownload: unexpected source location" + +data DownloadedSourceLocation = DownloadedTarball FilePath + --TODO: [nice to have] git/darcs repos etc + +downloadedSourceLocation :: PackageLocation FilePath + -> Maybe DownloadedSourceLocation +downloadedSourceLocation pkgloc = + case pkgloc of + RemoteTarballPackage _ tarball -> Just (DownloadedTarball tarball) + RepoTarballPackage _ _ tarball -> Just (DownloadedTarball tarball) + _ -> Nothing + + + + +-- | Ensure that the package is unpacked in an appropriate directory, either +-- a temporary one or a persistent one under the shared dist directory. +-- +withTarballLocalDirectory + :: Verbosity + -> DistDirLayout + -> FilePath + -> PackageId + -> DistDirParams + -> BuildStyle + -> Maybe CabalFileText + -> (FilePath -> -- Source directory + FilePath -> -- Build directory + IO a) + -> IO a +withTarballLocalDirectory verbosity distDirLayout@DistDirLayout{..} + tarball pkgid dparams buildstyle pkgTextOverride + buildPkg = + case buildstyle of + -- In this case we make a temp dir (e.g. tmp/src2345/), unpack + -- the tarball to it (e.g. tmp/src2345/foo-1.0/), and for + -- compatibility we put the dist dir within it + -- (i.e. tmp/src2345/foo-1.0/dist/). + -- + -- Unfortunately, a few custom Setup.hs scripts do not respect + -- the --builddir flag and always look for it at ./dist/ so + -- this way we avoid breaking those packages + BuildAndInstall -> + let tmpdir = distTempDirectory in + withTempDirectory verbosity tmpdir "src" $ \unpackdir -> do + unpackPackageTarball verbosity tarball unpackdir + pkgid pkgTextOverride + let srcdir = unpackdir display pkgid + builddir = srcdir "dist" + buildPkg srcdir builddir + + -- In this case we make sure the tarball has been unpacked to the + -- appropriate location under the shared dist dir, and then build it + -- inplace there + BuildInplaceOnly -> do + let srcrootdir = distUnpackedSrcRootDirectory + srcdir = distUnpackedSrcDirectory pkgid + builddir = distBuildDirectory dparams + -- TODO: [nice to have] use a proper file monitor rather than this dir exists test + exists <- doesDirectoryExist srcdir + unless exists $ do + createDirectoryIfMissingVerbose verbosity True srcrootdir + unpackPackageTarball verbosity tarball srcrootdir + pkgid pkgTextOverride + moveTarballShippedDistDirectory verbosity distDirLayout + srcrootdir pkgid dparams + buildPkg srcdir builddir + + +unpackPackageTarball :: Verbosity -> FilePath -> FilePath + -> PackageId -> Maybe CabalFileText + -> IO () +unpackPackageTarball verbosity tarball parentdir pkgid pkgTextOverride = + --TODO: [nice to have] switch to tar package and catch tar exceptions + annotateFailureNoLog UnpackFailed $ do + + -- Unpack the tarball + -- + info verbosity $ "Extracting " ++ tarball ++ " to " ++ parentdir ++ "..." + Tar.extractTarGzFile parentdir pkgsubdir tarball + + -- Sanity check + -- + exists <- doesFileExist cabalFile + unless exists $ + die' verbosity $ "Package .cabal file not found in the tarball: " ++ cabalFile + + -- Overwrite the .cabal with the one from the index, when appropriate + -- + case pkgTextOverride of + Nothing -> return () + Just pkgtxt -> do + info verbosity $ "Updating " ++ display pkgname <.> "cabal" + ++ " with the latest revision from the index." + writeFileAtomic cabalFile pkgtxt + + where + cabalFile = parentdir pkgsubdir + display pkgname <.> "cabal" + pkgsubdir = display pkgid + pkgname = packageName pkgid + + +-- | This is a bit of a hacky workaround. A number of packages ship +-- pre-processed .hs files in a dist directory inside the tarball. We don't +-- use the standard 'dist' location so unless we move this dist dir to the +-- right place then we'll miss the shipped pre-procssed files. This hacky +-- approach to shipped pre-procssed files ought to be replaced by a proper +-- system, though we'll still need to keep this hack for older packages. +-- +moveTarballShippedDistDirectory :: Verbosity -> DistDirLayout + -> FilePath -> PackageId -> DistDirParams -> IO () +moveTarballShippedDistDirectory verbosity DistDirLayout{distBuildDirectory} + parentdir pkgid dparams = do + distDirExists <- doesDirectoryExist tarballDistDir + when distDirExists $ do + debug verbosity $ "Moving '" ++ tarballDistDir ++ "' to '" + ++ targetDistDir ++ "'" + --TODO: [nice to have] or perhaps better to copy, and use a file monitor + renameDirectory tarballDistDir targetDistDir + where + tarballDistDir = parentdir display pkgid "dist" + targetDistDir = distBuildDirectory dparams + + +buildAndInstallUnpackedPackage :: Verbosity + -> DistDirLayout + -> StoreDirLayout + -> BuildTimeSettings -> Lock -> Lock + -> ElaboratedSharedConfig + -> ElaboratedInstallPlan + -> ElaboratedReadyPackage + -> FilePath -> FilePath + -> IO BuildResult +buildAndInstallUnpackedPackage verbosity + distDirLayout@DistDirLayout{distTempDirectory} + storeDirLayout@StoreDirLayout { + storePackageDBStack + } + BuildTimeSettings { + buildSettingNumJobs, + buildSettingLogFile + } + registerLock cacheLock + pkgshared@ElaboratedSharedConfig { + pkgConfigPlatform = platform, + pkgConfigCompiler = compiler, + pkgConfigCompilerProgs = progdb + } + plan rpkg@(ReadyPackage pkg) + srcdir builddir = do + + createDirectoryIfMissingVerbose verbosity True builddir + initLogFile + + --TODO: [code cleanup] deal consistently with talking to older Setup.hs versions, much like + -- we do for ghc, with a proper options type and rendering step + -- which will also let us call directly into the lib, rather than always + -- going via the lib's command line interface, which would also allow + -- passing data like installed packages, compiler, and program db for a + -- quicker configure. + + --TODO: [required feature] docs and tests + --TODO: [required feature] sudo re-exec + + -- Configure phase + noticeProgress ProgressStarting + + annotateFailure mlogFile ConfigureFailed $ + setup' configureCommand configureFlags configureArgs + + -- Build phase + noticeProgress ProgressBuilding + + annotateFailure mlogFile BuildFailed $ + setup buildCommand buildFlags + + -- Haddock phase + whenHaddock $ do + noticeProgress ProgressHaddock + annotateFailureNoLog HaddocksFailed $ + setup haddockCommand haddockFlags + + -- Install phase + noticeProgress ProgressInstalling + annotateFailure mlogFile InstallFailed $ do + + let copyPkgFiles tmpDir = do + let tmpDirNormalised = normalise tmpDir + setup Cabal.copyCommand (copyFlags tmpDirNormalised) + -- Note that the copy command has put the files into + -- @$tmpDir/$prefix@ so we need to return this dir so + -- the store knows which dir will be the final store entry. + let prefix = normalise $ dropDrive (InstallDirs.prefix (elabInstallDirs pkg)) + entryDir = tmpDirNormalised prefix + LBS.writeFile + (entryDir "cabal-hash.txt") + (renderPackageHashInputs (packageHashInputs pkgshared pkg)) + + -- Ensure that there are no files in `tmpDir`, that are not in `entryDir` + -- While this breaks the prefix-relocatable property of the lirbaries + -- it is necessary on macOS to stay under the load command limit of the + -- macOS mach-o linker. See also @PackageHash.hashedInstalledPackageIdVeryShort@. + -- We also normalise paths to ensure that there are no different representations + -- for the same path. Like / and \\ on windows under msys. + otherFiles <- filter (not . isPrefixOf entryDir) <$> listFilesRecursive tmpDirNormalised + -- here's where we could keep track of the installed files ourselves + -- if we wanted to by making a manifest of the files in the tmp dir + return (entryDir, otherFiles) + where + listFilesRecursive :: FilePath -> IO [FilePath] + listFilesRecursive path = do + files <- fmap (path ) <$> (listDirectory path) + allFiles <- forM files $ \file -> do + isDir <- doesDirectoryExist file + if isDir + then listFilesRecursive file + else return [file] + return (concat allFiles) + + registerPkg + | not (elabRequiresRegistration pkg) = + debug verbosity $ + "registerPkg: elab does NOT require registration for " ++ display uid + | otherwise = do + -- We register ourselves rather than via Setup.hs. We need to + -- grab and modify the InstalledPackageInfo. We decide what + -- the installed package id is, not the build system. + ipkg0 <- generateInstalledPackageInfo + let ipkg = ipkg0 { Installed.installedUnitId = uid } + assert ( elabRegisterPackageDBStack pkg + == storePackageDBStack compid) (return ()) + criticalSection registerLock $ + Cabal.registerPackage + verbosity compiler progdb + (storePackageDBStack compid) ipkg + Cabal.defaultRegisterOptions { + Cabal.registerMultiInstance = True, + Cabal.registerSuppressFilesCheck = True + } + + + -- Actual installation + void $ newStoreEntry verbosity storeDirLayout + compid uid + copyPkgFiles registerPkg + + --TODO: [nice to have] we currently rely on Setup.hs copy to do the right + -- thing. Although we do copy into an image dir and do the move into the + -- final location ourselves, perhaps we ought to do some sanity checks on + -- the image dir first. + + -- TODO: [required eventually] note that for nix-style installations it is not necessary to do + -- the 'withWin32SelfUpgrade' dance, but it would be necessary for a + -- shared bin dir. + + --TODO: [required feature] docs and test phases + let docsResult = DocsNotTried + testsResult = TestsNotTried + + noticeProgress ProgressCompleted + + return BuildResult { + buildResultDocs = docsResult, + buildResultTests = testsResult, + buildResultLogFile = mlogFile + } + + where + pkgid = packageId rpkg + uid = installedUnitId rpkg + compid = compilerId compiler + + dispname = case elabPkgOrComp pkg of + ElabPackage _ -> display pkgid + ++ " (all, legacy fallback)" + ElabComponent comp -> display pkgid + ++ " (" ++ maybe "custom" display (compComponentName comp) ++ ")" + + noticeProgress phase = when isParallelBuild $ + progressMessage verbosity phase dispname + + isParallelBuild = buildSettingNumJobs >= 2 + + whenHaddock action + | hasValidHaddockTargets pkg = action + | otherwise = return () + + configureCommand = Cabal.configureCommand defaultProgramDb + configureFlags v = flip filterConfigureFlags v $ + setupHsConfigureFlags rpkg pkgshared + verbosity builddir + configureArgs _ = setupHsConfigureArgs pkg + + buildCommand = Cabal.buildCommand defaultProgramDb + buildFlags _ = setupHsBuildFlags pkg pkgshared verbosity builddir + + haddockCommand = Cabal.haddockCommand + haddockFlags _ = setupHsHaddockFlags pkg pkgshared + verbosity builddir + + generateInstalledPackageInfo :: IO InstalledPackageInfo + generateInstalledPackageInfo = + withTempInstalledPackageInfoFile + verbosity distTempDirectory $ \pkgConfDest -> do + let registerFlags _ = setupHsRegisterFlags + pkg pkgshared + verbosity builddir + pkgConfDest + setup Cabal.registerCommand registerFlags + + copyFlags destdir _ = setupHsCopyFlags pkg pkgshared verbosity + builddir destdir + + scriptOptions = setupHsScriptOptions rpkg plan pkgshared + distDirLayout srcdir builddir + isParallelBuild cacheLock + + setup :: CommandUI flags -> (Version -> flags) -> IO () + setup cmd flags = setup' cmd flags (const []) + + setup' :: CommandUI flags -> (Version -> flags) -> (Version -> [String]) -> IO () + setup' cmd flags args = + withLogging $ \mLogFileHandle -> + setupWrapper + verbosity + scriptOptions + { useLoggingHandle = mLogFileHandle + , useExtraEnvOverrides = dataDirsEnvironmentForPlan distDirLayout plan } + (Just (elabPkgDescription pkg)) + cmd flags args + + mlogFile :: Maybe FilePath + mlogFile = + case buildSettingLogFile of + Nothing -> Nothing + Just mkLogFile -> Just (mkLogFile compiler platform pkgid uid) + + initLogFile = + case mlogFile of + Nothing -> return () + Just logFile -> do + createDirectoryIfMissing True (takeDirectory logFile) + exists <- doesFileExist logFile + when exists $ removeFile logFile + + withLogging action = + case mlogFile of + Nothing -> action Nothing + Just logFile -> withFile logFile AppendMode (action . Just) + + +hasValidHaddockTargets :: ElaboratedConfiguredPackage -> Bool +hasValidHaddockTargets ElaboratedConfiguredPackage{..} + | not elabBuildHaddocks = False + | otherwise = any componentHasHaddocks components + where + components = elabBuildTargets ++ elabTestTargets ++ elabBenchTargets + ++ maybeToList elabReplTarget ++ elabHaddockTargets + + componentHasHaddocks :: ComponentTarget -> Bool + componentHasHaddocks (ComponentTarget name _) = + case name of + CLibName -> hasHaddocks + CSubLibName _ -> elabHaddockInternal && hasHaddocks + CFLibName _ -> elabHaddockForeignLibs && hasHaddocks + CExeName _ -> elabHaddockExecutables && hasHaddocks + CTestName _ -> elabHaddockTestSuites && hasHaddocks + CBenchName _ -> elabHaddockBenchmarks && hasHaddocks + where + hasHaddocks = not (null (elabPkgDescription ^. componentModules name)) + + +buildInplaceUnpackedPackage :: Verbosity + -> DistDirLayout + -> BuildTimeSettings -> Lock -> Lock + -> ElaboratedSharedConfig + -> ElaboratedInstallPlan + -> ElaboratedReadyPackage + -> BuildStatusRebuild + -> FilePath -> FilePath + -> IO BuildResult +buildInplaceUnpackedPackage verbosity + distDirLayout@DistDirLayout { + distTempDirectory, + distPackageCacheDirectory, + distDirectory + } + BuildTimeSettings{buildSettingNumJobs} + registerLock cacheLock + pkgshared@ElaboratedSharedConfig { + pkgConfigCompiler = compiler, + pkgConfigCompilerProgs = progdb + } + plan + rpkg@(ReadyPackage pkg) + buildStatus + srcdir builddir = do + + --TODO: [code cleanup] there is duplication between the distdirlayout and the builddir here + -- builddir is not enough, we also need the per-package cachedir + createDirectoryIfMissingVerbose verbosity True builddir + createDirectoryIfMissingVerbose verbosity True (distPackageCacheDirectory dparams) + + -- Configure phase + -- + whenReConfigure $ do + annotateFailureNoLog ConfigureFailed $ + setup configureCommand configureFlags configureArgs + invalidatePackageRegFileMonitor packageFileMonitor + updatePackageConfigFileMonitor packageFileMonitor srcdir pkg + + -- Build phase + -- + let docsResult = DocsNotTried + testsResult = TestsNotTried + + buildResult :: BuildResultMisc + buildResult = (docsResult, testsResult) + + whenRebuild $ do + timestamp <- beginUpdateFileMonitor + annotateFailureNoLog BuildFailed $ + setup buildCommand buildFlags buildArgs + + let listSimple = + execRebuild srcdir (needElaboratedConfiguredPackage pkg) + listSdist = + fmap (map monitorFileHashed) $ + allPackageSourceFiles verbosity scriptOptions srcdir + ifNullThen m m' = do xs <- m + if null xs then m' else return xs + monitors <- case PD.buildType (elabPkgDescription pkg) of + Simple -> listSimple + -- If a Custom setup was used, AND the Cabal is recent + -- enough to have sdist --list-sources, use that to + -- determine the files that we need to track. This can + -- cause unnecessary rebuilding (for example, if README + -- is edited, we will try to rebuild) but there isn't + -- a more accurate Custom interface we can use to get + -- this info. We prefer not to use listSimple here + -- as it can miss extra source files that are considered + -- by the Custom setup. + _ | elabSetupScriptCliVersion pkg >= mkVersion [1,17] + -- However, sometimes sdist --list-sources will fail + -- and return an empty list. In that case, fall + -- back on the (inaccurate) simple tracking. + -> listSdist `ifNullThen` listSimple + | otherwise + -> listSimple + + let dep_monitors = map monitorFileHashed + $ elabInplaceDependencyBuildCacheFiles + distDirLayout pkgshared plan pkg + updatePackageBuildFileMonitor packageFileMonitor srcdir timestamp + pkg buildStatus + (monitors ++ dep_monitors) buildResult + + -- PURPOSELY omitted: no copy! + + whenReRegister $ annotateFailureNoLog InstallFailed $ do + -- Register locally + mipkg <- if elabRequiresRegistration pkg + then do + ipkg0 <- generateInstalledPackageInfo + -- We register ourselves rather than via Setup.hs. We need to + -- grab and modify the InstalledPackageInfo. We decide what + -- the installed package id is, not the build system. + let ipkg = ipkg0 { Installed.installedUnitId = ipkgid } + criticalSection registerLock $ + Cabal.registerPackage verbosity compiler progdb + (elabRegisterPackageDBStack pkg) + ipkg Cabal.defaultRegisterOptions + return (Just ipkg) + + else return Nothing + + updatePackageRegFileMonitor packageFileMonitor srcdir mipkg + + whenTest $ do + annotateFailureNoLog TestsFailed $ + setup testCommand testFlags testArgs + + whenBench $ + annotateFailureNoLog BenchFailed $ + setup benchCommand benchFlags benchArgs + + -- Repl phase + -- + whenRepl $ + annotateFailureNoLog ReplFailed $ + setupInteractive replCommand replFlags replArgs + + -- Haddock phase + whenHaddock $ + annotateFailureNoLog HaddocksFailed $ do + setup haddockCommand haddockFlags haddockArgs + let haddockTarget = elabHaddockForHackage pkg + when (haddockTarget == Cabal.ForHackage) $ do + let dest = distDirectory name <.> "tar.gz" + name = haddockDirName haddockTarget (elabPkgDescription pkg) + docDir = distBuildDirectory distDirLayout dparams "doc" "html" + Tar.createTarGzFile dest docDir name + notice verbosity $ "Documentation tarball created: " ++ dest + + return BuildResult { + buildResultDocs = docsResult, + buildResultTests = testsResult, + buildResultLogFile = Nothing + } + + where + ipkgid = installedUnitId pkg + dparams = elabDistDirParams pkgshared pkg + + isParallelBuild = buildSettingNumJobs >= 2 + + packageFileMonitor = newPackageFileMonitor pkgshared distDirLayout dparams + + whenReConfigure action = case buildStatus of + BuildStatusConfigure _ -> action + _ -> return () + + whenRebuild action + | null (elabBuildTargets pkg) + -- NB: we have to build the test/bench suite! + , null (elabTestTargets pkg) + , null (elabBenchTargets pkg) = return () + | otherwise = action + + whenTest action + | null (elabTestTargets pkg) = return () + | otherwise = action + + whenBench action + | null (elabBenchTargets pkg) = return () + | otherwise = action + + whenRepl action + | isNothing (elabReplTarget pkg) = return () + | otherwise = action + + whenHaddock action + | hasValidHaddockTargets pkg = action + | otherwise = return () + + whenReRegister action + = case buildStatus of + -- We registered the package already + BuildStatusBuild (Just _) _ -> info verbosity "whenReRegister: previously registered" + -- There is nothing to register + _ | null (elabBuildTargets pkg) -> info verbosity "whenReRegister: nothing to register" + | otherwise -> action + + configureCommand = Cabal.configureCommand defaultProgramDb + configureFlags v = flip filterConfigureFlags v $ + setupHsConfigureFlags rpkg pkgshared + verbosity builddir + configureArgs _ = setupHsConfigureArgs pkg + + buildCommand = Cabal.buildCommand defaultProgramDb + buildFlags _ = setupHsBuildFlags pkg pkgshared + verbosity builddir + buildArgs _ = setupHsBuildArgs pkg + + testCommand = Cabal.testCommand -- defaultProgramDb + testFlags _ = setupHsTestFlags pkg pkgshared + verbosity builddir + testArgs _ = setupHsTestArgs pkg + + benchCommand = Cabal.benchmarkCommand + benchFlags _ = setupHsBenchFlags pkg pkgshared + verbosity builddir + benchArgs _ = setupHsBenchArgs pkg + + replCommand = Cabal.replCommand defaultProgramDb + replFlags _ = setupHsReplFlags pkg pkgshared + verbosity builddir + replArgs _ = setupHsReplArgs pkg + + haddockCommand = Cabal.haddockCommand + haddockFlags v = flip filterHaddockFlags v $ + setupHsHaddockFlags pkg pkgshared + verbosity builddir + haddockArgs v = flip filterHaddockArgs v $ + setupHsHaddockArgs pkg + + scriptOptions = setupHsScriptOptions rpkg plan pkgshared + distDirLayout srcdir builddir + isParallelBuild cacheLock + + setupInteractive :: CommandUI flags + -> (Version -> flags) -> (Version -> [String]) -> IO () + setupInteractive cmd flags args = + setupWrapper verbosity + scriptOptions { isInteractive = True } + (Just (elabPkgDescription pkg)) + cmd flags args + + setup :: CommandUI flags -> (Version -> flags) -> (Version -> [String]) -> IO () + setup cmd flags args = + setupWrapper verbosity + scriptOptions + (Just (elabPkgDescription pkg)) + cmd flags args + + generateInstalledPackageInfo :: IO InstalledPackageInfo + generateInstalledPackageInfo = + withTempInstalledPackageInfoFile + verbosity distTempDirectory $ \pkgConfDest -> do + let registerFlags _ = setupHsRegisterFlags + pkg pkgshared + verbosity builddir + pkgConfDest + setup Cabal.registerCommand registerFlags (const []) + +withTempInstalledPackageInfoFile :: Verbosity -> FilePath + -> (FilePath -> IO ()) + -> IO InstalledPackageInfo +withTempInstalledPackageInfoFile verbosity tempdir action = + withTempDirectory verbosity tempdir "package-registration-" $ \dir -> do + -- make absolute since @action@ will often change directory + abs_dir <- canonicalizePath dir + + let pkgConfDest = abs_dir "pkgConf" + action pkgConfDest + + readPkgConf "." pkgConfDest + where + pkgConfParseFailed :: Installed.PError -> IO a + pkgConfParseFailed perror = + die' verbosity $ "Couldn't parse the output of 'setup register --gen-pkg-config':" + ++ show perror + + readPkgConf pkgConfDir pkgConfFile = do + (warns, ipkg) <- withUTF8FileContents (pkgConfDir pkgConfFile) $ \pkgConfStr -> + case Installed.parseInstalledPackageInfo pkgConfStr of + Installed.ParseFailed perror -> pkgConfParseFailed perror + Installed.ParseOk warns ipkg -> return (warns, ipkg) + + unless (null warns) $ + warn verbosity $ unlines (map (showPWarning pkgConfFile) warns) + + return ipkg + + +------------------------------------------------------------------------------ +-- * Utilities +------------------------------------------------------------------------------ + +annotateFailureNoLog :: (SomeException -> BuildFailureReason) + -> IO a -> IO a +annotateFailureNoLog annotate action = + annotateFailure Nothing annotate action + +annotateFailure :: Maybe FilePath + -> (SomeException -> BuildFailureReason) + -> IO a -> IO a +annotateFailure mlogFile annotate action = + action `catches` + -- It's not just IOException and ExitCode we have to deal with, there's + -- lots, including exceptions from the hackage-security and tar packages. + -- So we take the strategy of catching everything except async exceptions. + [ +#if MIN_VERSION_base(4,7,0) + Handler $ \async -> throwIO (async :: SomeAsyncException) +#else + Handler $ \async -> throwIO (async :: AsyncException) +#endif + , Handler $ \other -> handler (other :: SomeException) + ] + where + handler :: Exception e => e -> IO a + handler = throwIO . BuildFailure mlogFile . annotate . toException diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig/Legacy.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig/Legacy.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig/Legacy.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig/Legacy.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,1373 @@ +{-# LANGUAGE RecordWildCards, NamedFieldPuns, DeriveGeneric #-} + +-- | Project configuration, implementation in terms of legacy types. +-- +module Distribution.Client.ProjectConfig.Legacy ( + + -- * Project config in terms of legacy types + LegacyProjectConfig, + parseLegacyProjectConfig, + showLegacyProjectConfig, + + -- * Conversion to and from legacy config types + commandLineFlagsToProjectConfig, + convertLegacyProjectConfig, + convertLegacyGlobalConfig, + convertToLegacyProjectConfig, + + -- * Internals, just for tests + parsePackageLocationTokenQ, + renderPackageLocationToken, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.ProjectConfig.Types +import Distribution.Client.Types + ( RemoteRepo(..), emptyRemoteRepo + , AllowNewer(..), AllowOlder(..) ) + +import Distribution.Client.Config + ( SavedConfig(..), remoteRepoFields ) + +import Distribution.Solver.Types.ConstraintSource + +import Distribution.Package +import Distribution.PackageDescription + ( SourceRepo(..), RepoKind(..) + , dispFlagAssignment, parseFlagAssignment ) +import Distribution.Client.SourceRepoParse + ( sourceRepoFieldDescrs ) +import Distribution.Simple.Compiler + ( OptimisationLevel(..), DebugInfoLevel(..) ) +import Distribution.Simple.InstallDirs ( CopyDest (NoCopyDest) ) +import Distribution.Simple.Setup + ( Flag(Flag), toFlag, fromFlagOrDefault + , ConfigFlags(..), configureOptions + , HaddockFlags(..), haddockOptions, defaultHaddockFlags + , programDbPaths', splitArgs + ) +import Distribution.Client.Setup + ( GlobalFlags(..), globalCommand + , ConfigExFlags(..), configureExOptions, defaultConfigExFlags + , InstallFlags(..), installOptions, defaultInstallFlags ) +import Distribution.Simple.Program + ( programName, knownPrograms ) +import Distribution.Simple.Program.Db + ( ProgramDb, defaultProgramDb ) +import Distribution.Simple.Utils + ( lowercase ) +import Distribution.Utils.NubList + ( toNubList, fromNubList, overNubList ) +import Distribution.Simple.LocalBuildInfo + ( toPathTemplate, fromPathTemplate ) + +import Distribution.Text +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP + ( ReadP, (+++), (<++) ) +import qualified Text.Read as Read +import qualified Text.PrettyPrint as Disp +import Text.PrettyPrint + ( Doc, ($+$) ) +import qualified Distribution.ParseUtils as ParseUtils (field) +import Distribution.ParseUtils + ( ParseResult(..), PError(..), syntaxError, PWarning(..), warning + , simpleField, commaNewLineListField + , showToken ) +import Distribution.Client.ParseUtils +import Distribution.Simple.Command + ( CommandUI(commandOptions), ShowOrParseArgs(..) + , OptionField, option, reqArg' ) + +import qualified Data.Map as Map +------------------------------------------------------------------ +-- Representing the project config file in terms of legacy types +-- + +-- | We already have parsers\/pretty-printers for almost all the fields in the +-- project config file, but they're in terms of the types used for the command +-- line flags for Setup.hs or cabal commands. We don't want to redefine them +-- all, at least not yet so for the moment we use the parsers at the old types +-- and use conversion functions. +-- +-- Ultimately if\/when this project-based approach becomes the default then we +-- can redefine the parsers directly for the new types. +-- +data LegacyProjectConfig = LegacyProjectConfig { + legacyPackages :: [String], + legacyPackagesOptional :: [String], + legacyPackagesRepo :: [SourceRepo], + legacyPackagesNamed :: [Dependency], + + legacySharedConfig :: LegacySharedConfig, + legacyAllConfig :: LegacyPackageConfig, + legacyLocalConfig :: LegacyPackageConfig, + legacySpecificConfig :: MapMappend PackageName LegacyPackageConfig + } deriving Generic + +instance Monoid LegacyProjectConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup LegacyProjectConfig where + (<>) = gmappend + +data LegacyPackageConfig = LegacyPackageConfig { + legacyConfigureFlags :: ConfigFlags, + legacyInstallPkgFlags :: InstallFlags, + legacyHaddockFlags :: HaddockFlags + } deriving Generic + +instance Monoid LegacyPackageConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup LegacyPackageConfig where + (<>) = gmappend + +data LegacySharedConfig = LegacySharedConfig { + legacyGlobalFlags :: GlobalFlags, + legacyConfigureShFlags :: ConfigFlags, + legacyConfigureExFlags :: ConfigExFlags, + legacyInstallFlags :: InstallFlags + } deriving Generic + +instance Monoid LegacySharedConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup LegacySharedConfig where + (<>) = gmappend + + +------------------------------------------------------------------ +-- Converting from and to the legacy types +-- + +-- | Convert configuration from the @cabal configure@ or @cabal build@ command +-- line into a 'ProjectConfig' value that can combined with configuration from +-- other sources. +-- +-- At the moment this uses the legacy command line flag types. See +-- 'LegacyProjectConfig' for an explanation. +-- +commandLineFlagsToProjectConfig :: GlobalFlags + -> ConfigFlags -> ConfigExFlags + -> InstallFlags -> HaddockFlags + -> ProjectConfig +commandLineFlagsToProjectConfig globalFlags configFlags configExFlags + installFlags haddockFlags = + mempty { + projectConfigBuildOnly = convertLegacyBuildOnlyFlags + globalFlags configFlags + installFlags haddockFlags, + projectConfigShared = convertLegacyAllPackageFlags + globalFlags configFlags + configExFlags installFlags, + projectConfigLocalPackages = localConfig, + projectConfigAllPackages = allConfig + } + where (localConfig, allConfig) = splitConfig + (convertLegacyPerPackageFlags + configFlags installFlags haddockFlags) + -- split the package config (from command line arguments) into + -- those applied to all packages and those to local only. + -- + -- for now we will just copy over the ProgramPaths/Args/Extra into + -- the AllPackages. The LocalPackages do not inherit them from + -- AllPackages, and as such need to retain them. + -- + -- The general decision rule for what to put into allConfig + -- into localConfig is the following: + -- + -- - anything that is host/toolchain/env specific should be applied + -- to all packages, as packagesets have to be host/toolchain/env + -- consistent. + -- - anything else should be in the local config and could potentially + -- be lifted into all-packages vial the `package *` cabal.project + -- section. + -- + splitConfig :: PackageConfig -> (PackageConfig, PackageConfig) + splitConfig pc = (pc + , mempty { packageConfigProgramPaths = packageConfigProgramPaths pc + , packageConfigProgramArgs = packageConfigProgramArgs pc + , packageConfigProgramPathExtra = packageConfigProgramPathExtra pc + , packageConfigDocumentation = packageConfigDocumentation pc }) + +-- | Convert from the types currently used for the user-wide @~/.cabal/config@ +-- file into the 'ProjectConfig' type. +-- +-- Only a subset of the 'ProjectConfig' can be represented in the user-wide +-- config. In particular it does not include packages that are in the project, +-- and it also doesn't support package-specific configuration (only +-- configuration that applies to all packages). +-- +convertLegacyGlobalConfig :: SavedConfig -> ProjectConfig +convertLegacyGlobalConfig + SavedConfig { + savedGlobalFlags = globalFlags, + savedInstallFlags = installFlags, + savedConfigureFlags = configFlags, + savedConfigureExFlags = configExFlags, + savedUserInstallDirs = _, + savedGlobalInstallDirs = _, + savedUploadFlags = _, + savedReportFlags = _, + savedHaddockFlags = haddockFlags + } = + mempty { + projectConfigBuildOnly = configBuildOnly, + projectConfigShared = configShared, + projectConfigAllPackages = configAllPackages + } + where + --TODO: [code cleanup] eliminate use of default*Flags here and specify the + -- defaults in the various resolve functions in terms of the new types. + configExFlags' = defaultConfigExFlags <> configExFlags + installFlags' = defaultInstallFlags <> installFlags + haddockFlags' = defaultHaddockFlags <> haddockFlags + + configAllPackages = convertLegacyPerPackageFlags + configFlags installFlags' haddockFlags' + configShared = convertLegacyAllPackageFlags + globalFlags configFlags + configExFlags' installFlags' + configBuildOnly = convertLegacyBuildOnlyFlags + globalFlags configFlags + installFlags' haddockFlags' + + +-- | Convert the project config from the legacy types to the 'ProjectConfig' +-- and associated types. See 'LegacyProjectConfig' for an explanation of the +-- approach. +-- +convertLegacyProjectConfig :: LegacyProjectConfig -> ProjectConfig +convertLegacyProjectConfig + LegacyProjectConfig { + legacyPackages, + legacyPackagesOptional, + legacyPackagesRepo, + legacyPackagesNamed, + legacySharedConfig = LegacySharedConfig globalFlags configShFlags + configExFlags installSharedFlags, + legacyAllConfig, + legacyLocalConfig = LegacyPackageConfig configFlags installPerPkgFlags + haddockFlags, + legacySpecificConfig + } = + + ProjectConfig { + projectPackages = legacyPackages, + projectPackagesOptional = legacyPackagesOptional, + projectPackagesRepo = legacyPackagesRepo, + projectPackagesNamed = legacyPackagesNamed, + + projectConfigBuildOnly = configBuildOnly, + projectConfigShared = configPackagesShared, + projectConfigProvenance = mempty, + projectConfigAllPackages = configAllPackages, + projectConfigLocalPackages = configLocalPackages, + projectConfigSpecificPackage = fmap perPackage legacySpecificConfig + } + where + configAllPackages = convertLegacyPerPackageFlags g i h + where LegacyPackageConfig g i h = legacyAllConfig + configLocalPackages = convertLegacyPerPackageFlags + configFlags installPerPkgFlags haddockFlags + configPackagesShared= convertLegacyAllPackageFlags + globalFlags (configFlags <> configShFlags) + configExFlags installSharedFlags + configBuildOnly = convertLegacyBuildOnlyFlags + globalFlags configShFlags + installSharedFlags haddockFlags + + perPackage (LegacyPackageConfig perPkgConfigFlags perPkgInstallFlags + perPkgHaddockFlags) = + convertLegacyPerPackageFlags + perPkgConfigFlags perPkgInstallFlags perPkgHaddockFlags + + +-- | Helper used by other conversion functions that returns the +-- 'ProjectConfigShared' subset of the 'ProjectConfig'. +-- +convertLegacyAllPackageFlags :: GlobalFlags -> ConfigFlags + -> ConfigExFlags -> InstallFlags + -> ProjectConfigShared +convertLegacyAllPackageFlags globalFlags configFlags + configExFlags installFlags = + ProjectConfigShared{..} + where + GlobalFlags { + globalConfigFile = projectConfigConfigFile, + globalSandboxConfigFile = _, -- ?? + globalRemoteRepos = projectConfigRemoteRepos, + globalLocalRepos = projectConfigLocalRepos, + globalProgPathExtra = projectConfigProgPathExtra, + globalStoreDir = projectConfigStoreDir + } = globalFlags + + ConfigFlags { + configDistPref = projectConfigDistDir, + configHcFlavor = projectConfigHcFlavor, + configHcPath = projectConfigHcPath, + configHcPkg = projectConfigHcPkg + --configProgramPathExtra = projectConfigProgPathExtra DELETE ME + --configInstallDirs = projectConfigInstallDirs, + --configUserInstall = projectConfigUserInstall, + --configPackageDBs = projectConfigPackageDBs, + } = configFlags + + ConfigExFlags { + configCabalVersion = projectConfigCabalVersion, + configExConstraints = projectConfigConstraints, + configPreferences = projectConfigPreferences, + configSolver = projectConfigSolver, + configAllowOlder = projectConfigAllowOlder, + configAllowNewer = projectConfigAllowNewer, + configWriteGhcEnvironmentFilesPolicy + = projectConfigWriteGhcEnvironmentFilesPolicy + } = configExFlags + + InstallFlags { + installProjectFileName = projectConfigProjectFile, + installHaddockIndex = projectConfigHaddockIndex, + --installReinstall = projectConfigReinstall, + --installAvoidReinstalls = projectConfigAvoidReinstalls, + --installOverrideReinstall = projectConfigOverrideReinstall, + installIndexState = projectConfigIndexState, + installMaxBackjumps = projectConfigMaxBackjumps, + --installUpgradeDeps = projectConfigUpgradeDeps, + installReorderGoals = projectConfigReorderGoals, + installCountConflicts = projectConfigCountConflicts, + installPerComponent = projectConfigPerComponent, + installIndependentGoals = projectConfigIndependentGoals, + --installShadowPkgs = projectConfigShadowPkgs, + installStrongFlags = projectConfigStrongFlags, + installAllowBootLibInstalls = projectConfigAllowBootLibInstalls + } = installFlags + + + +-- | Helper used by other conversion functions that returns the +-- 'PackageConfig' subset of the 'ProjectConfig'. +-- +convertLegacyPerPackageFlags :: ConfigFlags -> InstallFlags -> HaddockFlags + -> PackageConfig +convertLegacyPerPackageFlags configFlags installFlags haddockFlags = + PackageConfig{..} + where + ConfigFlags { + configProgramPaths, + configProgramArgs, + configProgramPathExtra = packageConfigProgramPathExtra, + configVanillaLib = packageConfigVanillaLib, + configProfLib = packageConfigProfLib, + configSharedLib = packageConfigSharedLib, + configStaticLib = packageConfigStaticLib, + configDynExe = packageConfigDynExe, + configProfExe = packageConfigProfExe, + configProf = packageConfigProf, + configProfDetail = packageConfigProfDetail, + configProfLibDetail = packageConfigProfLibDetail, + configConfigureArgs = packageConfigConfigureArgs, + configOptimization = packageConfigOptimization, + configProgPrefix = packageConfigProgPrefix, + configProgSuffix = packageConfigProgSuffix, + configGHCiLib = packageConfigGHCiLib, + configSplitSections = packageConfigSplitSections, + configSplitObjs = packageConfigSplitObjs, + configStripExes = packageConfigStripExes, + configStripLibs = packageConfigStripLibs, + configExtraLibDirs = packageConfigExtraLibDirs, + configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, + configExtraIncludeDirs = packageConfigExtraIncludeDirs, + configConfigurationsFlags = packageConfigFlagAssignment, + configTests = packageConfigTests, + configBenchmarks = packageConfigBenchmarks, + configCoverage = coverage, + configLibCoverage = libcoverage, --deprecated + configDebugInfo = packageConfigDebugInfo, + configRelocatable = packageConfigRelocatable + } = configFlags + packageConfigProgramPaths = MapLast (Map.fromList configProgramPaths) + packageConfigProgramArgs = MapMappend (Map.fromListWith (++) configProgramArgs) + + packageConfigCoverage = coverage <> libcoverage + --TODO: defer this merging to the resolve phase + + InstallFlags { + installDocumentation = packageConfigDocumentation, + installRunTests = packageConfigRunTests + } = installFlags + + HaddockFlags { + haddockHoogle = packageConfigHaddockHoogle, + haddockHtml = packageConfigHaddockHtml, + haddockHtmlLocation = packageConfigHaddockHtmlLocation, + haddockForeignLibs = packageConfigHaddockForeignLibs, + haddockForHackage = packageConfigHaddockForHackage, + haddockExecutables = packageConfigHaddockExecutables, + haddockTestSuites = packageConfigHaddockTestSuites, + haddockBenchmarks = packageConfigHaddockBenchmarks, + haddockInternal = packageConfigHaddockInternal, + haddockCss = packageConfigHaddockCss, + haddockLinkedSource = packageConfigHaddockLinkedSource, + haddockQuickJump = packageConfigHaddockQuickJump, + haddockHscolourCss = packageConfigHaddockHscolourCss, + haddockContents = packageConfigHaddockContents + } = haddockFlags + + + +-- | Helper used by other conversion functions that returns the +-- 'ProjectConfigBuildOnly' subset of the 'ProjectConfig'. +-- +convertLegacyBuildOnlyFlags :: GlobalFlags -> ConfigFlags + -> InstallFlags -> HaddockFlags + -> ProjectConfigBuildOnly +convertLegacyBuildOnlyFlags globalFlags configFlags + installFlags haddockFlags = + ProjectConfigBuildOnly{..} + where + GlobalFlags { + globalCacheDir = projectConfigCacheDir, + globalLogsDir = projectConfigLogsDir, + globalWorldFile = _, + globalHttpTransport = projectConfigHttpTransport, + globalIgnoreExpiry = projectConfigIgnoreExpiry + } = globalFlags + + ConfigFlags { + configVerbosity = projectConfigVerbosity + } = configFlags + + InstallFlags { + installDryRun = projectConfigDryRun, + installOnly = _, + installOnlyDeps = projectConfigOnlyDeps, + installRootCmd = _, + installSummaryFile = projectConfigSummaryFile, + installLogFile = projectConfigLogFile, + installBuildReports = projectConfigBuildReports, + installReportPlanningFailure = projectConfigReportPlanningFailure, + installSymlinkBinDir = projectConfigSymlinkBinDir, + installOneShot = projectConfigOneShot, + installNumJobs = projectConfigNumJobs, + installKeepGoing = projectConfigKeepGoing, + installOfflineMode = projectConfigOfflineMode + } = installFlags + + HaddockFlags { + haddockKeepTempFiles = projectConfigKeepTempFiles --TODO: this ought to live elsewhere + } = haddockFlags + + +convertToLegacyProjectConfig :: ProjectConfig -> LegacyProjectConfig +convertToLegacyProjectConfig + projectConfig@ProjectConfig { + projectPackages, + projectPackagesOptional, + projectPackagesRepo, + projectPackagesNamed, + projectConfigAllPackages, + projectConfigLocalPackages, + projectConfigSpecificPackage + } = + LegacyProjectConfig { + legacyPackages = projectPackages, + legacyPackagesOptional = projectPackagesOptional, + legacyPackagesRepo = projectPackagesRepo, + legacyPackagesNamed = projectPackagesNamed, + legacySharedConfig = convertToLegacySharedConfig projectConfig, + legacyAllConfig = convertToLegacyPerPackageConfig + projectConfigAllPackages, + legacyLocalConfig = convertToLegacyAllPackageConfig projectConfig + <> convertToLegacyPerPackageConfig + projectConfigLocalPackages, + legacySpecificConfig = fmap convertToLegacyPerPackageConfig + projectConfigSpecificPackage + } + +convertToLegacySharedConfig :: ProjectConfig -> LegacySharedConfig +convertToLegacySharedConfig + ProjectConfig { + projectConfigBuildOnly = ProjectConfigBuildOnly {..}, + projectConfigShared = ProjectConfigShared {..}, + projectConfigAllPackages = PackageConfig { + packageConfigDocumentation + } + } = + + LegacySharedConfig { + legacyGlobalFlags = globalFlags, + legacyConfigureShFlags = configFlags, + legacyConfigureExFlags = configExFlags, + legacyInstallFlags = installFlags + } + where + globalFlags = GlobalFlags { + globalVersion = mempty, + globalNumericVersion = mempty, + globalConfigFile = projectConfigConfigFile, + globalSandboxConfigFile = mempty, + globalConstraintsFile = mempty, + globalRemoteRepos = projectConfigRemoteRepos, + globalCacheDir = projectConfigCacheDir, + globalLocalRepos = projectConfigLocalRepos, + globalLogsDir = projectConfigLogsDir, + globalWorldFile = mempty, + globalRequireSandbox = mempty, + globalIgnoreSandbox = mempty, + globalIgnoreExpiry = projectConfigIgnoreExpiry, + globalHttpTransport = projectConfigHttpTransport, + globalNix = mempty, + globalStoreDir = projectConfigStoreDir, + globalProgPathExtra = projectConfigProgPathExtra + } + + configFlags = mempty { + configVerbosity = projectConfigVerbosity, + configDistPref = projectConfigDistDir + } + + configExFlags = ConfigExFlags { + configCabalVersion = projectConfigCabalVersion, + configExConstraints = projectConfigConstraints, + configPreferences = projectConfigPreferences, + configSolver = projectConfigSolver, + configAllowOlder = projectConfigAllowOlder, + configAllowNewer = projectConfigAllowNewer, + configWriteGhcEnvironmentFilesPolicy + = projectConfigWriteGhcEnvironmentFilesPolicy + } + + installFlags = InstallFlags { + installDocumentation = packageConfigDocumentation, + installHaddockIndex = projectConfigHaddockIndex, + installDest = Flag NoCopyDest, + installDryRun = projectConfigDryRun, + installReinstall = mempty, --projectConfigReinstall, + installAvoidReinstalls = mempty, --projectConfigAvoidReinstalls, + installOverrideReinstall = mempty, --projectConfigOverrideReinstall, + installMaxBackjumps = projectConfigMaxBackjumps, + installUpgradeDeps = mempty, --projectConfigUpgradeDeps, + installReorderGoals = projectConfigReorderGoals, + installCountConflicts = projectConfigCountConflicts, + installIndependentGoals = projectConfigIndependentGoals, + installShadowPkgs = mempty, --projectConfigShadowPkgs, + installStrongFlags = projectConfigStrongFlags, + installAllowBootLibInstalls = projectConfigAllowBootLibInstalls, + installOnly = mempty, + installOnlyDeps = projectConfigOnlyDeps, + installIndexState = projectConfigIndexState, + installRootCmd = mempty, --no longer supported + installSummaryFile = projectConfigSummaryFile, + installLogFile = projectConfigLogFile, + installBuildReports = projectConfigBuildReports, + installReportPlanningFailure = projectConfigReportPlanningFailure, + installSymlinkBinDir = projectConfigSymlinkBinDir, + installPerComponent = projectConfigPerComponent, + installOneShot = projectConfigOneShot, + installNumJobs = projectConfigNumJobs, + installKeepGoing = projectConfigKeepGoing, + installRunTests = mempty, + installOfflineMode = projectConfigOfflineMode, + installProjectFileName = projectConfigProjectFile + } + + +convertToLegacyAllPackageConfig :: ProjectConfig -> LegacyPackageConfig +convertToLegacyAllPackageConfig + ProjectConfig { + projectConfigBuildOnly = ProjectConfigBuildOnly {..}, + projectConfigShared = ProjectConfigShared {..} + } = + + LegacyPackageConfig { + legacyConfigureFlags = configFlags, + legacyInstallPkgFlags= mempty, + legacyHaddockFlags = haddockFlags + } + where + configFlags = ConfigFlags { + configArgs = mempty, + configPrograms_ = mempty, + configProgramPaths = mempty, + configProgramArgs = mempty, + configProgramPathExtra = mempty, + configHcFlavor = projectConfigHcFlavor, + configHcPath = projectConfigHcPath, + configHcPkg = projectConfigHcPkg, + configInstantiateWith = mempty, + configVanillaLib = mempty, + configProfLib = mempty, + configSharedLib = mempty, + configStaticLib = mempty, + configDynExe = mempty, + configProfExe = mempty, + configProf = mempty, + configProfDetail = mempty, + configProfLibDetail = mempty, + configConfigureArgs = mempty, + configOptimization = mempty, + configProgPrefix = mempty, + configProgSuffix = mempty, + configInstallDirs = mempty, + configScratchDir = mempty, + configDistPref = mempty, + configCabalFilePath = mempty, + configVerbosity = mempty, + configUserInstall = mempty, --projectConfigUserInstall, + configPackageDBs = mempty, --projectConfigPackageDBs, + configGHCiLib = mempty, + configSplitSections = mempty, + configSplitObjs = mempty, + configStripExes = mempty, + configStripLibs = mempty, + configExtraLibDirs = mempty, + configExtraFrameworkDirs = mempty, + configConstraints = mempty, + configDependencies = mempty, + configExtraIncludeDirs = mempty, + configDeterministic = mempty, + configIPID = mempty, + configCID = mempty, + configConfigurationsFlags = mempty, + configTests = mempty, + configCoverage = mempty, --TODO: don't merge + configLibCoverage = mempty, --TODO: don't merge + configExactConfiguration = mempty, + configBenchmarks = mempty, + configFlagError = mempty, --TODO: ??? + configRelocatable = mempty, + configDebugInfo = mempty, + configUseResponseFiles = mempty + } + + haddockFlags = mempty { + haddockKeepTempFiles = projectConfigKeepTempFiles + } + + +convertToLegacyPerPackageConfig :: PackageConfig -> LegacyPackageConfig +convertToLegacyPerPackageConfig PackageConfig {..} = + LegacyPackageConfig { + legacyConfigureFlags = configFlags, + legacyInstallPkgFlags = installFlags, + legacyHaddockFlags = haddockFlags + } + where + configFlags = ConfigFlags { + configArgs = mempty, + configPrograms_ = configPrograms_ mempty, + configProgramPaths = Map.toList (getMapLast packageConfigProgramPaths), + configProgramArgs = Map.toList (getMapMappend packageConfigProgramArgs), + configProgramPathExtra = packageConfigProgramPathExtra, + configHcFlavor = mempty, + configHcPath = mempty, + configHcPkg = mempty, + configInstantiateWith = mempty, + configVanillaLib = packageConfigVanillaLib, + configProfLib = packageConfigProfLib, + configSharedLib = packageConfigSharedLib, + configStaticLib = packageConfigStaticLib, + configDynExe = packageConfigDynExe, + configProfExe = packageConfigProfExe, + configProf = packageConfigProf, + configProfDetail = packageConfigProfDetail, + configProfLibDetail = packageConfigProfLibDetail, + configConfigureArgs = packageConfigConfigureArgs, + configOptimization = packageConfigOptimization, + configProgPrefix = packageConfigProgPrefix, + configProgSuffix = packageConfigProgSuffix, + configInstallDirs = mempty, + configScratchDir = mempty, + configDistPref = mempty, + configCabalFilePath = mempty, + configVerbosity = mempty, + configUserInstall = mempty, + configPackageDBs = mempty, + configGHCiLib = packageConfigGHCiLib, + configSplitSections = packageConfigSplitSections, + configSplitObjs = packageConfigSplitObjs, + configStripExes = packageConfigStripExes, + configStripLibs = packageConfigStripLibs, + configExtraLibDirs = packageConfigExtraLibDirs, + configExtraFrameworkDirs = packageConfigExtraFrameworkDirs, + configConstraints = mempty, + configDependencies = mempty, + configExtraIncludeDirs = packageConfigExtraIncludeDirs, + configIPID = mempty, + configCID = mempty, + configDeterministic = mempty, + configConfigurationsFlags = packageConfigFlagAssignment, + configTests = packageConfigTests, + configCoverage = packageConfigCoverage, --TODO: don't merge + configLibCoverage = packageConfigCoverage, --TODO: don't merge + configExactConfiguration = mempty, + configBenchmarks = packageConfigBenchmarks, + configFlagError = mempty, --TODO: ??? + configRelocatable = packageConfigRelocatable, + configDebugInfo = packageConfigDebugInfo, + configUseResponseFiles = mempty + } + + installFlags = mempty { + installDocumentation = packageConfigDocumentation, + installRunTests = packageConfigRunTests + } + + haddockFlags = HaddockFlags { + haddockProgramPaths = mempty, + haddockProgramArgs = mempty, + haddockHoogle = packageConfigHaddockHoogle, + haddockHtml = packageConfigHaddockHtml, + haddockHtmlLocation = packageConfigHaddockHtmlLocation, + haddockForHackage = packageConfigHaddockForHackage, + haddockForeignLibs = packageConfigHaddockForeignLibs, + haddockExecutables = packageConfigHaddockExecutables, + haddockTestSuites = packageConfigHaddockTestSuites, + haddockBenchmarks = packageConfigHaddockBenchmarks, + haddockInternal = packageConfigHaddockInternal, + haddockCss = packageConfigHaddockCss, + haddockLinkedSource = packageConfigHaddockLinkedSource, + haddockQuickJump = packageConfigHaddockQuickJump, + haddockHscolourCss = packageConfigHaddockHscolourCss, + haddockContents = packageConfigHaddockContents, + haddockDistPref = mempty, + haddockKeepTempFiles = mempty, + haddockVerbosity = mempty, + haddockCabalFilePath = mempty, + haddockArgs = mempty + } + + +------------------------------------------------ +-- Parsing and showing the project config file +-- + +parseLegacyProjectConfig :: String -> ParseResult LegacyProjectConfig +parseLegacyProjectConfig = + parseConfig legacyProjectConfigFieldDescrs + legacyPackageConfigSectionDescrs + mempty + +showLegacyProjectConfig :: LegacyProjectConfig -> String +showLegacyProjectConfig config = + Disp.render $ + showConfig legacyProjectConfigFieldDescrs + legacyPackageConfigSectionDescrs + config + $+$ + Disp.text "" + + +legacyProjectConfigFieldDescrs :: [FieldDescr LegacyProjectConfig] +legacyProjectConfigFieldDescrs = + + [ newLineListField "packages" + (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ + legacyPackages + (\v flags -> flags { legacyPackages = v }) + , newLineListField "optional-packages" + (Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ + legacyPackagesOptional + (\v flags -> flags { legacyPackagesOptional = v }) + , commaNewLineListField "extra-packages" + disp parse + legacyPackagesNamed + (\v flags -> flags { legacyPackagesNamed = v }) + ] + + ++ map (liftField + legacySharedConfig + (\flags conf -> conf { legacySharedConfig = flags })) + legacySharedConfigFieldDescrs + + ++ map (liftField + legacyLocalConfig + (\flags conf -> conf { legacyLocalConfig = flags })) + legacyPackageConfigFieldDescrs + +-- | This is a bit tricky since it has to cover globs which have embedded @,@ +-- chars. But we don't just want to parse strictly as a glob since we want to +-- allow http urls which don't parse as globs, and possibly some +-- system-dependent file paths. So we parse fairly liberally as a token, but +-- we allow @,@ inside matched @{}@ braces. +-- +parsePackageLocationTokenQ :: ReadP r String +parsePackageLocationTokenQ = parseHaskellString + Parse.<++ parsePackageLocationToken + where + parsePackageLocationToken :: ReadP r String + parsePackageLocationToken = fmap fst (Parse.gather outerTerm) + where + outerTerm = alternateEither1 outerToken (braces innerTerm) + innerTerm = alternateEither innerToken (braces innerTerm) + outerToken = Parse.munch1 outerChar >> return () + innerToken = Parse.munch1 innerChar >> return () + outerChar c = not (isSpace c || c == '{' || c == '}' || c == ',') + innerChar c = not (isSpace c || c == '{' || c == '}') + braces = Parse.between (Parse.char '{') (Parse.char '}') + + alternateEither, alternateEither1, + alternatePQs, alternate1PQs, alternateQsP, alternate1QsP + :: ReadP r () -> ReadP r () -> ReadP r () + + alternateEither1 p q = alternate1PQs p q +++ alternate1QsP q p + alternateEither p q = alternateEither1 p q +++ return () + alternate1PQs p q = p >> alternateQsP q p + alternatePQs p q = alternate1PQs p q +++ return () + alternate1QsP q p = Parse.many1 q >> alternatePQs p q + alternateQsP q p = alternate1QsP q p +++ return () + +renderPackageLocationToken :: String -> String +renderPackageLocationToken s | needsQuoting = show s + | otherwise = s + where + needsQuoting = not (ok 0 s) + || s == "." -- . on its own on a line has special meaning + || take 2 s == "--" -- on its own line is comment syntax + --TODO: [code cleanup] these "." and "--" escaping issues + -- ought to be dealt with systematically in ParseUtils. + ok :: Int -> String -> Bool + ok n [] = n == 0 + ok _ ('"':_) = False + ok n ('{':cs) = ok (n+1) cs + ok n ('}':cs) = ok (n-1) cs + ok n (',':cs) = (n > 0) && ok n cs + ok _ (c:_) + | isSpace c = False + ok n (_ :cs) = ok n cs + + +legacySharedConfigFieldDescrs :: [FieldDescr LegacySharedConfig] +legacySharedConfigFieldDescrs = + + ( liftFields + legacyGlobalFlags + (\flags conf -> conf { legacyGlobalFlags = flags }) + . addFields + [ newLineListField "local-repo" + showTokenQ parseTokenQ + (fromNubList . globalLocalRepos) + (\v conf -> conf { globalLocalRepos = toNubList v }), + newLineListField "extra-prog-path-shared-only" + showTokenQ parseTokenQ + (fromNubList . globalProgPathExtra) + (\v conf -> conf { globalProgPathExtra = toNubList v }) + ] + . filterFields + [ "remote-repo-cache" + , "logs-dir", "store-dir", "ignore-expiry", "http-transport" + ] + . commandOptionsToFields + ) (commandOptions (globalCommand []) ParseArgs) + ++ + ( liftFields + legacyConfigureShFlags + (\flags conf -> conf { legacyConfigureShFlags = flags }) + . filterFields ["verbose", "builddir" ] + . commandOptionsToFields + ) (configureOptions ParseArgs) + ++ + ( liftFields + legacyConfigureExFlags + (\flags conf -> conf { legacyConfigureExFlags = flags }) + . addFields + [ commaNewLineListField "constraints" + (disp . fst) (fmap (\constraint -> (constraint, constraintSrc)) parse) + configExConstraints (\v conf -> conf { configExConstraints = v }) + + , commaNewLineListField "preferences" + disp parse + configPreferences (\v conf -> conf { configPreferences = v }) + + , monoidField "allow-older" + (maybe mempty disp) (fmap Just parse) + (fmap unAllowOlder . configAllowOlder) + (\v conf -> conf { configAllowOlder = fmap AllowOlder v }) + + , monoidField "allow-newer" + (maybe mempty disp) (fmap Just parse) + (fmap unAllowNewer . configAllowNewer) + (\v conf -> conf { configAllowNewer = fmap AllowNewer v }) + ] + . filterFields + [ "cabal-lib-version", "solver", "write-ghc-environment-files" + -- not "constraint" or "preference", we use our own plural ones above + ] + . commandOptionsToFields + ) (configureExOptions ParseArgs constraintSrc) + ++ + ( liftFields + legacyInstallFlags + (\flags conf -> conf { legacyInstallFlags = flags }) + . addFields + [ newLineListField "build-summary" + (showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ) + (fromNubList . installSummaryFile) + (\v conf -> conf { installSummaryFile = toNubList v }) + ] + . filterFields + [ "doc-index-file" + , "root-cmd", "symlink-bindir" + , "build-log" + , "remote-build-reporting", "report-planning-failure" + , "one-shot", "jobs", "keep-going", "offline", "per-component" + -- solver flags: + , "max-backjumps", "reorder-goals", "count-conflicts", "independent-goals" + , "strong-flags" , "allow-boot-library-installs", "index-state" + ] + . commandOptionsToFields + ) (installOptions ParseArgs) + where + constraintSrc = ConstraintSourceProjectConfig "TODO" + + +legacyPackageConfigFieldDescrs :: [FieldDescr LegacyPackageConfig] +legacyPackageConfigFieldDescrs = + ( liftFields + legacyConfigureFlags + (\flags conf -> conf { legacyConfigureFlags = flags }) + . addFields + [ newLineListField "extra-include-dirs" + showTokenQ parseTokenQ + configExtraIncludeDirs + (\v conf -> conf { configExtraIncludeDirs = v }) + , newLineListField "extra-lib-dirs" + showTokenQ parseTokenQ + configExtraLibDirs + (\v conf -> conf { configExtraLibDirs = v }) + , newLineListField "extra-framework-dirs" + showTokenQ parseTokenQ + configExtraFrameworkDirs + (\v conf -> conf { configExtraFrameworkDirs = v }) + , newLineListField "extra-prog-path" + showTokenQ parseTokenQ + (fromNubList . configProgramPathExtra) + (\v conf -> conf { configProgramPathExtra = toNubList v }) + , newLineListField "configure-options" + showTokenQ parseTokenQ + configConfigureArgs + (\v conf -> conf { configConfigureArgs = v }) + , simpleField "flags" + dispFlagAssignment parseFlagAssignment + configConfigurationsFlags + (\v conf -> conf { configConfigurationsFlags = v }) + ] + . filterFields + [ "with-compiler", "with-hc-pkg" + , "program-prefix", "program-suffix" + , "library-vanilla", "library-profiling" + , "shared", "static", "executable-dynamic" + , "profiling", "executable-profiling" + , "profiling-detail", "library-profiling-detail" + , "library-for-ghci", "split-objs", "split-sections" + , "executable-stripping", "library-stripping" + , "tests", "benchmarks" + , "coverage", "library-coverage" + , "relocatable" + -- not "extra-include-dirs", "extra-lib-dirs", "extra-framework-dirs" + -- or "extra-prog-path". We use corrected ones above that parse + -- as list fields. + ] + . commandOptionsToFields + ) (configureOptions ParseArgs) + ++ + liftFields + legacyConfigureFlags + (\flags conf -> conf { legacyConfigureFlags = flags }) + [ overrideFieldCompiler + , overrideFieldOptimization + , overrideFieldDebugInfo + ] + ++ + ( liftFields + legacyInstallPkgFlags + (\flags conf -> conf { legacyInstallPkgFlags = flags }) + . filterFields + [ "documentation", "run-tests" + ] + . commandOptionsToFields + ) (installOptions ParseArgs) + ++ + ( liftFields + legacyHaddockFlags + (\flags conf -> conf { legacyHaddockFlags = flags }) + . mapFieldNames + ("haddock-"++) + . addFields + [ simpleField "for-hackage" + -- TODO: turn this into a library function + (fromFlagOrDefault Disp.empty . fmap disp) (Parse.option mempty (fmap toFlag parse)) + haddockForHackage (\v conf -> conf { haddockForHackage = v }) + ] + . filterFields + [ "hoogle", "html", "html-location" + , "foreign-libraries" + , "executables", "tests", "benchmarks", "all", "internal", "css" + , "hyperlink-source", "quickjump", "hscolour-css" + , "contents-location", "keep-temp-files" + ] + . commandOptionsToFields + ) (haddockOptions ParseArgs) + + where + overrideFieldCompiler = + simpleField "compiler" + (fromFlagOrDefault Disp.empty . fmap disp) + (Parse.option mempty (fmap toFlag parse)) + configHcFlavor (\v flags -> flags { configHcFlavor = v }) + + + -- TODO: [code cleanup] The following is a hack. The "optimization" and + -- "debug-info" fields are OptArg, and viewAsFieldDescr fails on that. + -- Instead of a hand-written parser and printer, we should handle this case + -- properly in the library. + + overrideFieldOptimization = + liftField configOptimization + (\v flags -> flags { configOptimization = v }) $ + let name = "optimization" in + FieldDescr name + (\f -> case f of + Flag NoOptimisation -> Disp.text "False" + Flag NormalOptimisation -> Disp.text "True" + Flag MaximumOptimisation -> Disp.text "2" + _ -> Disp.empty) + (\line str _ -> case () of + _ | str == "False" -> ParseOk [] (Flag NoOptimisation) + | str == "True" -> ParseOk [] (Flag NormalOptimisation) + | str == "0" -> ParseOk [] (Flag NoOptimisation) + | str == "1" -> ParseOk [] (Flag NormalOptimisation) + | str == "2" -> ParseOk [] (Flag MaximumOptimisation) + | lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation) + | lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation) + | otherwise -> ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") + + overrideFieldDebugInfo = + liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $ + let name = "debug-info" in + FieldDescr name + (\f -> case f of + Flag NoDebugInfo -> Disp.text "False" + Flag MinimalDebugInfo -> Disp.text "1" + Flag NormalDebugInfo -> Disp.text "True" + Flag MaximalDebugInfo -> Disp.text "3" + _ -> Disp.empty) + (\line str _ -> case () of + _ | str == "False" -> ParseOk [] (Flag NoDebugInfo) + | str == "True" -> ParseOk [] (Flag NormalDebugInfo) + | str == "0" -> ParseOk [] (Flag NoDebugInfo) + | str == "1" -> ParseOk [] (Flag MinimalDebugInfo) + | str == "2" -> ParseOk [] (Flag NormalDebugInfo) + | str == "3" -> ParseOk [] (Flag MaximalDebugInfo) + | lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo) + | lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo) + | otherwise -> ParseFailed (NoParse name line) + where + lstr = lowercase str + caseWarning = PWarning $ + "The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.") + + +legacyPackageConfigSectionDescrs :: [SectionDescr LegacyProjectConfig] +legacyPackageConfigSectionDescrs = + [ packageRepoSectionDescr + , packageSpecificOptionsSectionDescr + , liftSection + legacyLocalConfig + (\flags conf -> conf { legacyLocalConfig = flags }) + programOptionsSectionDescr + , liftSection + legacyLocalConfig + (\flags conf -> conf { legacyLocalConfig = flags }) + programLocationsSectionDescr + , liftSection + legacySharedConfig + (\flags conf -> conf { legacySharedConfig = flags }) $ + liftSection + legacyGlobalFlags + (\flags conf -> conf { legacyGlobalFlags = flags }) + remoteRepoSectionDescr + ] + +packageRepoSectionDescr :: SectionDescr LegacyProjectConfig +packageRepoSectionDescr = + SectionDescr { + sectionName = "source-repository-package", + sectionFields = sourceRepoFieldDescrs, + sectionSubsections = [], + sectionGet = map (\x->("", x)) + . legacyPackagesRepo, + sectionSet = + \lineno unused pkgrepo projconf -> do + unless (null unused) $ + syntaxError lineno "the section 'source-repository-package' takes no arguments" + return projconf { + legacyPackagesRepo = legacyPackagesRepo projconf ++ [pkgrepo] + }, + sectionEmpty = SourceRepo { + repoKind = RepoThis, -- hopefully unused + repoType = Nothing, + repoLocation = Nothing, + repoModule = Nothing, + repoBranch = Nothing, + repoTag = Nothing, + repoSubdir = Nothing + } + } + +-- | The definitions of all the fields that can appear in the @package pkgfoo@ +-- and @package *@ sections of the @cabal.project@-format files. +-- +packageSpecificOptionsFieldDescrs :: [FieldDescr LegacyPackageConfig] +packageSpecificOptionsFieldDescrs = + legacyPackageConfigFieldDescrs + ++ programOptionsFieldDescrs + (configProgramArgs . legacyConfigureFlags) + (\args pkgconf -> pkgconf { + legacyConfigureFlags = (legacyConfigureFlags pkgconf) { + configProgramArgs = args + } + } + ) + ++ liftFields + legacyConfigureFlags + (\flags pkgconf -> pkgconf { + legacyConfigureFlags = flags + } + ) + programLocationsFieldDescrs + +-- | The definition of the @package pkgfoo@ sections of the @cabal.project@-format +-- files. This section is per-package name. The special package @*@ applies to all +-- packages used anywhere by the project, locally or as dependencies. +-- +packageSpecificOptionsSectionDescr :: SectionDescr LegacyProjectConfig +packageSpecificOptionsSectionDescr = + SectionDescr { + sectionName = "package", + sectionFields = packageSpecificOptionsFieldDescrs, + sectionSubsections = [], + sectionGet = \projconf -> + [ (display pkgname, pkgconf) + | (pkgname, pkgconf) <- + Map.toList . getMapMappend + . legacySpecificConfig $ projconf ] + ++ [ ("*", legacyAllConfig projconf) ], + sectionSet = + \lineno pkgnamestr pkgconf projconf -> case pkgnamestr of + "*" -> return projconf { + legacyAllConfig = legacyAllConfig projconf <> pkgconf + } + _ -> do + pkgname <- case simpleParse pkgnamestr of + Just pkgname -> return pkgname + Nothing -> syntaxError lineno $ + "a 'package' section requires a package name " + ++ "as an argument" + return projconf { + legacySpecificConfig = + MapMappend $ + Map.insertWith mappend pkgname pkgconf + (getMapMappend $ legacySpecificConfig projconf) + }, + sectionEmpty = mempty + } + +programOptionsFieldDescrs :: (a -> [(String, [String])]) + -> ([(String, [String])] -> a -> a) + -> [FieldDescr a] +programOptionsFieldDescrs get' set = + commandOptionsToFields + $ programDbOptions + defaultProgramDb + ParseArgs get' set + +programOptionsSectionDescr :: SectionDescr LegacyPackageConfig +programOptionsSectionDescr = + SectionDescr { + sectionName = "program-options", + sectionFields = programOptionsFieldDescrs + configProgramArgs + (\args conf -> conf { configProgramArgs = args }), + sectionSubsections = [], + sectionGet = (\x->[("", x)]) + . legacyConfigureFlags, + sectionSet = + \lineno unused confflags pkgconf -> do + unless (null unused) $ + syntaxError lineno "the section 'program-options' takes no arguments" + return pkgconf { + legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags + }, + sectionEmpty = mempty + } + +programLocationsFieldDescrs :: [FieldDescr ConfigFlags] +programLocationsFieldDescrs = + commandOptionsToFields + $ programDbPaths' + (++ "-location") + defaultProgramDb + ParseArgs + configProgramPaths + (\paths conf -> conf { configProgramPaths = paths }) + +programLocationsSectionDescr :: SectionDescr LegacyPackageConfig +programLocationsSectionDescr = + SectionDescr { + sectionName = "program-locations", + sectionFields = programLocationsFieldDescrs, + sectionSubsections = [], + sectionGet = (\x->[("", x)]) + . legacyConfigureFlags, + sectionSet = + \lineno unused confflags pkgconf -> do + unless (null unused) $ + syntaxError lineno "the section 'program-locations' takes no arguments" + return pkgconf { + legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags + }, + sectionEmpty = mempty + } + + +-- | For each known program @PROG@ in 'progDb', produce a @PROG-options@ +-- 'OptionField'. +programDbOptions + :: ProgramDb + -> ShowOrParseArgs + -> (flags -> [(String, [String])]) + -> ([(String, [String])] -> (flags -> flags)) + -> [OptionField flags] +programDbOptions progDb showOrParseArgs get' set = + case showOrParseArgs of + -- we don't want a verbose help text list so we just show a generic one: + ShowArgs -> [programOptions "PROG"] + ParseArgs -> map (programOptions . programName . fst) + (knownPrograms progDb) + where + programOptions prog = + option "" [prog ++ "-options"] + ("give extra options to " ++ prog) + get' set + (reqArg' "OPTS" (\args -> [(prog, splitArgs args)]) + (\progArgs -> [ joinsArgs args + | (prog', args) <- progArgs, prog==prog' ])) + + + joinsArgs = unwords . map escape + escape arg | any isSpace arg = "\"" ++ arg ++ "\"" + | otherwise = arg + + +remoteRepoSectionDescr :: SectionDescr GlobalFlags +remoteRepoSectionDescr = + SectionDescr { + sectionName = "repository", + sectionFields = remoteRepoFields, + sectionSubsections = [], + sectionGet = map (\x->(remoteRepoName x, x)) . fromNubList + . globalRemoteRepos, + sectionSet = + \lineno reponame repo0 conf -> do + when (null reponame) $ + syntaxError lineno $ "a 'repository' section requires the " + ++ "repository name as an argument" + let repo = repo0 { remoteRepoName = reponame } + when (remoteRepoKeyThreshold repo + > length (remoteRepoRootKeys repo)) $ + warning $ "'key-threshold' for repository " + ++ show (remoteRepoName repo) + ++ " higher than number of keys" + when (not (null (remoteRepoRootKeys repo)) + && remoteRepoSecure repo /= Just True) $ + warning $ "'root-keys' for repository " + ++ show (remoteRepoName repo) + ++ " non-empty, but 'secure' not set to True." + return conf { + globalRemoteRepos = overNubList (++[repo]) (globalRemoteRepos conf) + }, + sectionEmpty = emptyRemoteRepo "" + } + + +------------------------------- +-- Local field utils +-- + +--TODO: [code cleanup] all these utils should move to Distribution.ParseUtils +-- either augmenting or replacing the ones there + +--TODO: [code cleanup] this is a different definition from listField, like +-- commaNewLineListField it pretty prints on multiple lines +newLineListField :: String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +newLineListField = listFieldWithSep Disp.sep + +--TODO: [code cleanup] local copy purely so we can use the fixed version +-- of parseOptCommaList below +listFieldWithSep :: ([Doc] -> Doc) -> String -> (a -> Doc) -> ReadP [a] a + -> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b +listFieldWithSep separator name showF readF get' set = + liftField get' set' $ + ParseUtils.field name showF' (parseOptCommaList readF) + where + set' xs b = set (get' b ++ xs) b + showF' = separator . map showF + +-- | Parser combinator for simple fields which uses the field type's +-- 'Monoid' instance for combining multiple occurences of the field. +monoidField :: Monoid a => String -> (a -> Doc) -> ReadP a a + -> (b -> a) -> (a -> b -> b) -> FieldDescr b +monoidField name showF readF get' set = + liftField get' set' $ ParseUtils.field name showF readF + where + set' xs b = set (get' b `mappend` xs) b + +--TODO: [code cleanup] local redefinition that should replace the version in +-- D.ParseUtils. This version avoid parse ambiguity for list element parsers +-- that have multiple valid parses of prefixes. +parseOptCommaList :: ReadP r a -> ReadP r [a] +parseOptCommaList p = Parse.sepBy p sep + where + -- The separator must not be empty or it introduces ambiguity + sep = (Parse.skipSpaces >> Parse.char ',' >> Parse.skipSpaces) + +++ (Parse.satisfy isSpace >> Parse.skipSpaces) + +--TODO: [code cleanup] local redefinition that should replace the version in +-- D.ParseUtils called showFilePath. This version escapes "." and "--" which +-- otherwise are special syntax. +showTokenQ :: String -> Doc +showTokenQ "" = Disp.empty +showTokenQ x@('-':'-':_) = Disp.text (show x) +showTokenQ x@('.':[]) = Disp.text (show x) +showTokenQ x = showToken x + +-- This is just a copy of parseTokenQ, using the fixed parseHaskellString +parseTokenQ :: ReadP r String +parseTokenQ = parseHaskellString + <++ Parse.munch1 (\x -> not (isSpace x) && x /= ',') + +--TODO: [code cleanup] use this to replace the parseHaskellString in +-- Distribution.ParseUtils. It turns out Read instance for String accepts +-- the ['a', 'b'] syntax, which we do not want. In particular it messes +-- up any token starting with []. +parseHaskellString :: ReadP r String +parseHaskellString = + Parse.readS_to_P $ + Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0 + +-- Handy util +addFields :: [FieldDescr a] + -> ([FieldDescr a] -> [FieldDescr a]) +addFields = (++) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig/Types.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,425 @@ +{-# LANGUAGE DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving #-} + +-- | Handling project configuration, types. +-- +module Distribution.Client.ProjectConfig.Types ( + + -- * Types for project config + ProjectConfig(..), + ProjectConfigBuildOnly(..), + ProjectConfigShared(..), + ProjectConfigProvenance(..), + PackageConfig(..), + + -- * Resolving configuration + SolverSettings(..), + BuildTimeSettings(..), + + -- * Extra useful Monoids + MapLast(..), + MapMappend(..), + ) where + +import Distribution.Client.Types + ( RemoteRepo, AllowNewer(..), AllowOlder(..) + , WriteGhcEnvironmentFilesPolicy ) +import Distribution.Client.Dependency.Types + ( PreSolver ) +import Distribution.Client.Targets + ( UserConstraint ) +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) + +import Distribution.Client.IndexUtils.Timestamp + ( IndexState ) + +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.ConstraintSource + +import Distribution.Package + ( PackageName, PackageId, UnitId ) +import Distribution.Types.Dependency +import Distribution.Version + ( Version ) +import Distribution.System + ( Platform ) +import Distribution.PackageDescription + ( FlagAssignment, SourceRepo(..) ) +import Distribution.Simple.Compiler + ( Compiler, CompilerFlavor + , OptimisationLevel(..), ProfDetailLevel, DebugInfoLevel(..) ) +import Distribution.Simple.Setup + ( Flag, HaddockTarget(..) ) +import Distribution.Simple.InstallDirs + ( PathTemplate ) +import Distribution.Utils.NubList + ( NubList ) +import Distribution.Verbosity + ( Verbosity ) + +import Data.Map (Map) +import qualified Data.Map as Map +import Data.Set (Set) +import Distribution.Compat.Binary (Binary) +import Distribution.Compat.Semigroup +import GHC.Generics (Generic) +import Data.Typeable + + +------------------------------- +-- Project config types +-- + +-- | This type corresponds directly to what can be written in the +-- @cabal.project@ file. Other sources of configuration can also be injected +-- into this type, such as the user-wide @~/.cabal/config@ file and the +-- command line of @cabal configure@ or @cabal build@. +-- +-- Since it corresponds to the external project file it is an instance of +-- 'Monoid' and all the fields can be empty. This also means there has to +-- be a step where we resolve configuration. At a minimum resolving means +-- applying defaults but it can also mean merging information from multiple +-- sources. For example for package-specific configuration the project file +-- can specify configuration that applies to all local packages, and then +-- additional configuration for a specific package. +-- +-- Future directions: multiple profiles, conditionals. If we add these +-- features then the gap between configuration as written in the config file +-- and resolved settings we actually use will become even bigger. +-- +data ProjectConfig + = ProjectConfig { + + -- | Packages in this project, including local dirs, local .cabal files + -- local and remote tarballs. When these are file globs, they must + -- match at least one package. + projectPackages :: [String], + + -- | Like 'projectConfigPackageGlobs' but /optional/ in the sense that + -- file globs are allowed to match nothing. The primary use case for + -- this is to be able to say @optional-packages: */@ to automagically + -- pick up deps that we unpack locally without erroring when + -- there aren't any. + projectPackagesOptional :: [String], + + -- | Packages in this project from remote source repositories. + projectPackagesRepo :: [SourceRepo], + + -- | Packages in this project from hackage repositories. + projectPackagesNamed :: [Dependency], + + -- See respective types for an explanation of what these + -- values are about: + projectConfigBuildOnly :: ProjectConfigBuildOnly, + projectConfigShared :: ProjectConfigShared, + projectConfigProvenance :: Set ProjectConfigProvenance, + + -- | Configuration to be applied to *all* packages, + -- whether named in `cabal.project` or not. + projectConfigAllPackages :: PackageConfig, + + -- | Configuration to be applied to *local* packages; i.e., + -- any packages which are explicitly named in `cabal.project`. + projectConfigLocalPackages :: PackageConfig, + projectConfigSpecificPackage :: MapMappend PackageName PackageConfig + } + deriving (Eq, Show, Generic, Typeable) + +-- | That part of the project configuration that only affects /how/ we build +-- and not the /value/ of the things we build. This means this information +-- does not need to be tracked for changes since it does not affect the +-- outcome. +-- +data ProjectConfigBuildOnly + = ProjectConfigBuildOnly { + projectConfigVerbosity :: Flag Verbosity, + projectConfigDryRun :: Flag Bool, + projectConfigOnlyDeps :: Flag Bool, + projectConfigSummaryFile :: NubList PathTemplate, + projectConfigLogFile :: Flag PathTemplate, + projectConfigBuildReports :: Flag ReportLevel, + projectConfigReportPlanningFailure :: Flag Bool, + projectConfigSymlinkBinDir :: Flag FilePath, + projectConfigOneShot :: Flag Bool, + projectConfigNumJobs :: Flag (Maybe Int), + projectConfigKeepGoing :: Flag Bool, + projectConfigOfflineMode :: Flag Bool, + projectConfigKeepTempFiles :: Flag Bool, + projectConfigHttpTransport :: Flag String, + projectConfigIgnoreExpiry :: Flag Bool, + projectConfigCacheDir :: Flag FilePath, + projectConfigLogsDir :: Flag FilePath + } + deriving (Eq, Show, Generic) + + +-- | Project configuration that is shared between all packages in the project. +-- In particular this includes configuration that affects the solver. +-- +data ProjectConfigShared + = ProjectConfigShared { + projectConfigDistDir :: Flag FilePath, + projectConfigConfigFile :: Flag FilePath, + projectConfigProjectFile :: Flag FilePath, + projectConfigHcFlavor :: Flag CompilerFlavor, + projectConfigHcPath :: Flag FilePath, + projectConfigHcPkg :: Flag FilePath, + projectConfigHaddockIndex :: Flag PathTemplate, + + -- Things that only make sense for manual mode, not --local mode + -- too much control! + --projectConfigUserInstall :: Flag Bool, + --projectConfigInstallDirs :: InstallDirs (Flag PathTemplate), + --TODO: [required eventually] decide what to do with InstallDirs + -- currently we don't allow it to be specified in the config file + --projectConfigPackageDBs :: [Maybe PackageDB], + + -- configuration used both by the solver and other phases + projectConfigRemoteRepos :: NubList RemoteRepo, -- ^ Available Hackage servers. + projectConfigLocalRepos :: NubList FilePath, + projectConfigIndexState :: Flag IndexState, + projectConfigStoreDir :: Flag FilePath, + + -- solver configuration + projectConfigConstraints :: [(UserConstraint, ConstraintSource)], + projectConfigPreferences :: [Dependency], + projectConfigCabalVersion :: Flag Version, --TODO: [required eventually] unused + projectConfigSolver :: Flag PreSolver, + projectConfigAllowOlder :: Maybe AllowOlder, + projectConfigAllowNewer :: Maybe AllowNewer, + projectConfigWriteGhcEnvironmentFilesPolicy + :: Flag WriteGhcEnvironmentFilesPolicy, + projectConfigMaxBackjumps :: Flag Int, + projectConfigReorderGoals :: Flag ReorderGoals, + projectConfigCountConflicts :: Flag CountConflicts, + projectConfigStrongFlags :: Flag StrongFlags, + projectConfigAllowBootLibInstalls :: Flag AllowBootLibInstalls, + projectConfigPerComponent :: Flag Bool, + projectConfigIndependentGoals :: Flag IndependentGoals, + + projectConfigProgPathExtra :: NubList FilePath + + -- More things that only make sense for manual mode, not --local mode + -- too much control! + --projectConfigShadowPkgs :: Flag Bool, + --projectConfigReinstall :: Flag Bool, + --projectConfigAvoidReinstalls :: Flag Bool, + --projectConfigOverrideReinstall :: Flag Bool, + --projectConfigUpgradeDeps :: Flag Bool + } + deriving (Eq, Show, Generic) + + +-- | Specifies the provenance of project configuration, whether defaults were +-- used or if the configuration was read from an explicit file path. +data ProjectConfigProvenance + + -- | The configuration is implicit due to no explicit configuration + -- being found. See 'Distribution.Client.ProjectConfig.readProjectConfig' + -- for how implicit configuration is determined. + = Implicit + + -- | The path the project configuration was explicitly read from. + -- | The configuration was explicitly read from the specified 'FilePath'. + | Explicit FilePath + deriving (Eq, Ord, Show, Generic) + + +-- | Project configuration that is specific to each package, that is where we +-- can in principle have different values for different packages in the same +-- project. +-- +data PackageConfig + = PackageConfig { + packageConfigProgramPaths :: MapLast String FilePath, + packageConfigProgramArgs :: MapMappend String [String], + packageConfigProgramPathExtra :: NubList FilePath, + packageConfigFlagAssignment :: FlagAssignment, + packageConfigVanillaLib :: Flag Bool, + packageConfigSharedLib :: Flag Bool, + packageConfigStaticLib :: Flag Bool, + packageConfigDynExe :: Flag Bool, + packageConfigProf :: Flag Bool, --TODO: [code cleanup] sort out + packageConfigProfLib :: Flag Bool, -- this duplication + packageConfigProfExe :: Flag Bool, -- and consistency + packageConfigProfDetail :: Flag ProfDetailLevel, + packageConfigProfLibDetail :: Flag ProfDetailLevel, + packageConfigConfigureArgs :: [String], + packageConfigOptimization :: Flag OptimisationLevel, + packageConfigProgPrefix :: Flag PathTemplate, + packageConfigProgSuffix :: Flag PathTemplate, + packageConfigExtraLibDirs :: [FilePath], + packageConfigExtraFrameworkDirs :: [FilePath], + packageConfigExtraIncludeDirs :: [FilePath], + packageConfigGHCiLib :: Flag Bool, + packageConfigSplitSections :: Flag Bool, + packageConfigSplitObjs :: Flag Bool, + packageConfigStripExes :: Flag Bool, + packageConfigStripLibs :: Flag Bool, + packageConfigTests :: Flag Bool, + packageConfigBenchmarks :: Flag Bool, + packageConfigCoverage :: Flag Bool, + packageConfigRelocatable :: Flag Bool, + packageConfigDebugInfo :: Flag DebugInfoLevel, + packageConfigRunTests :: Flag Bool, --TODO: [required eventually] use this + packageConfigDocumentation :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockHoogle :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockHtml :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockHtmlLocation :: Flag String, --TODO: [required eventually] use this + packageConfigHaddockForeignLibs :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockExecutables :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockTestSuites :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockBenchmarks :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockInternal :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockCss :: Flag FilePath, --TODO: [required eventually] use this + packageConfigHaddockLinkedSource :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockQuickJump :: Flag Bool, --TODO: [required eventually] use this + packageConfigHaddockHscolourCss :: Flag FilePath, --TODO: [required eventually] use this + packageConfigHaddockContents :: Flag PathTemplate, --TODO: [required eventually] use this + packageConfigHaddockForHackage :: Flag HaddockTarget + } + deriving (Eq, Show, Generic) + +instance Binary ProjectConfig +instance Binary ProjectConfigBuildOnly +instance Binary ProjectConfigShared +instance Binary ProjectConfigProvenance +instance Binary PackageConfig + + +-- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that takes +-- the last value rather than the first value for overlapping keys. +newtype MapLast k v = MapLast { getMapLast :: Map k v } + deriving (Eq, Show, Functor, Generic, Binary, Typeable) + +instance Ord k => Monoid (MapLast k v) where + mempty = MapLast Map.empty + mappend = (<>) + +instance Ord k => Semigroup (MapLast k v) where + MapLast a <> MapLast b = MapLast $ Map.union b a + -- rather than Map.union which is the normal Map monoid instance + + +-- | Newtype wrapper for 'Map' that provides a 'Monoid' instance that +-- 'mappend's values of overlapping keys rather than taking the first. +newtype MapMappend k v = MapMappend { getMapMappend :: Map k v } + deriving (Eq, Show, Functor, Generic, Binary, Typeable) + +instance (Semigroup v, Ord k) => Monoid (MapMappend k v) where + mempty = MapMappend Map.empty + mappend = (<>) + +instance (Semigroup v, Ord k) => Semigroup (MapMappend k v) where + MapMappend a <> MapMappend b = MapMappend (Map.unionWith (<>) a b) + -- rather than Map.union which is the normal Map monoid instance + + +instance Monoid ProjectConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup ProjectConfig where + (<>) = gmappend + + +instance Monoid ProjectConfigBuildOnly where + mempty = gmempty + mappend = (<>) + +instance Semigroup ProjectConfigBuildOnly where + (<>) = gmappend + + +instance Monoid ProjectConfigShared where + mempty = gmempty + mappend = (<>) + +instance Semigroup ProjectConfigShared where + (<>) = gmappend + + +instance Monoid PackageConfig where + mempty = gmempty + mappend = (<>) + +instance Semigroup PackageConfig where + (<>) = gmappend + +---------------------------------------- +-- Resolving configuration to settings +-- + +-- | Resolved configuration for the solver. The idea is that this is easier to +-- use than the raw configuration because in the raw configuration everything +-- is optional (monoidial). In the 'BuildTimeSettings' every field is filled +-- in, if only with the defaults. +-- +-- Use 'resolveSolverSettings' to make one from the project config (by +-- applying defaults etc). +-- +data SolverSettings + = SolverSettings { + solverSettingRemoteRepos :: [RemoteRepo], -- ^ Available Hackage servers. + solverSettingLocalRepos :: [FilePath], + solverSettingConstraints :: [(UserConstraint, ConstraintSource)], + solverSettingPreferences :: [Dependency], + solverSettingFlagAssignment :: FlagAssignment, -- ^ For all local packages + solverSettingFlagAssignments :: Map PackageName FlagAssignment, + solverSettingCabalVersion :: Maybe Version, --TODO: [required eventually] unused + solverSettingSolver :: PreSolver, + solverSettingAllowOlder :: AllowOlder, + solverSettingAllowNewer :: AllowNewer, + solverSettingMaxBackjumps :: Maybe Int, + solverSettingReorderGoals :: ReorderGoals, + solverSettingCountConflicts :: CountConflicts, + solverSettingStrongFlags :: StrongFlags, + solverSettingAllowBootLibInstalls :: AllowBootLibInstalls, + solverSettingIndexState :: Maybe IndexState, + solverSettingIndependentGoals :: IndependentGoals + -- Things that only make sense for manual mode, not --local mode + -- too much control! + --solverSettingShadowPkgs :: Bool, + --solverSettingReinstall :: Bool, + --solverSettingAvoidReinstalls :: Bool, + --solverSettingOverrideReinstall :: Bool, + --solverSettingUpgradeDeps :: Bool + } + deriving (Eq, Show, Generic, Typeable) + +instance Binary SolverSettings + + +-- | Resolved configuration for things that affect how we build and not the +-- value of the things we build. The idea is that this is easier to use than +-- the raw configuration because in the raw configuration everything is +-- optional (monoidial). In the 'BuildTimeSettings' every field is filled in, +-- if only with the defaults. +-- +-- Use 'resolveBuildTimeSettings' to make one from the project config (by +-- applying defaults etc). +-- +data BuildTimeSettings + = BuildTimeSettings { + buildSettingDryRun :: Bool, + buildSettingOnlyDeps :: Bool, + buildSettingSummaryFile :: [PathTemplate], + buildSettingLogFile :: Maybe (Compiler -> Platform + -> PackageId -> UnitId + -> FilePath), + buildSettingLogVerbosity :: Verbosity, + buildSettingBuildReports :: ReportLevel, + buildSettingReportPlanningFailure :: Bool, + buildSettingSymlinkBinDir :: [FilePath], + buildSettingOneShot :: Bool, + buildSettingNumJobs :: Int, + buildSettingKeepGoing :: Bool, + buildSettingOfflineMode :: Bool, + buildSettingKeepTempFiles :: Bool, + buildSettingRemoteRepos :: [RemoteRepo], + buildSettingLocalRepos :: [FilePath], + buildSettingCacheDir :: FilePath, + buildSettingHttpTransport :: Maybe String, + buildSettingIgnoreExpiry :: Bool, + buildSettingProgPathExtra :: [FilePath] + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectConfig.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,1411 @@ +{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, DeriveDataTypeable, LambdaCase #-} + +-- | Handling project configuration. +-- +module Distribution.Client.ProjectConfig ( + + -- * Types for project config + ProjectConfig(..), + ProjectConfigBuildOnly(..), + ProjectConfigShared(..), + ProjectConfigProvenance(..), + PackageConfig(..), + MapLast(..), + MapMappend(..), + + -- * Project root + findProjectRoot, + ProjectRoot(..), + BadProjectRoot(..), + + -- * Project config files + readProjectConfig, + readGlobalConfig, + readProjectLocalFreezeConfig, + withProjectOrGlobalConfig, + writeProjectLocalExtraConfig, + writeProjectLocalFreezeConfig, + writeProjectConfigFile, + commandLineFlagsToProjectConfig, + + -- * Packages within projects + ProjectPackageLocation(..), + BadPackageLocations(..), + BadPackageLocation(..), + BadPackageLocationMatch(..), + findProjectPackages, + fetchAndReadSourcePackages, + + -- * Resolving configuration + lookupLocalPackageConfig, + projectConfigWithBuilderRepoContext, + projectConfigWithSolverRepoContext, + SolverSettings(..), + resolveSolverSettings, + BuildTimeSettings(..), + resolveBuildTimeSettings, + + -- * Checking configuration + checkBadPerPackageCompilerPaths, + BadPerPackageCompilerPaths(..) + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.ProjectConfig.Types +import Distribution.Client.ProjectConfig.Legacy +import Distribution.Client.RebuildMonad +import Distribution.Client.Glob + ( isTrivialFilePathGlob ) +import Distribution.Client.VCS + ( validateSourceRepos, SourceRepoProblem(..) + , VCS(..), knownVCSs, configureVCS, syncSourceRepos ) + +import Distribution.Client.Types +import Distribution.Client.DistDirLayout + ( DistDirLayout(..), CabalDirLayout(..), ProjectRoot(..) ) +import Distribution.Client.GlobalFlags + ( RepoContext(..), withRepoContext' ) +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) +import Distribution.Client.Config + ( loadConfig, getConfigFilePath ) +import Distribution.Client.HttpUtils + ( HttpTransport, configureTransport, transportCheckHttps + , downloadURI ) + +import Distribution.Solver.Types.SourcePackage +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.PackageConstraint + ( PackageProperty(..) ) + +import Distribution.Package + ( PackageName, PackageId, packageId, UnitId ) +import Distribution.Types.Dependency +import Distribution.System + ( Platform ) +import Distribution.Types.GenericPackageDescription + ( GenericPackageDescription ) +import Distribution.PackageDescription.Parsec + ( parseGenericPackageDescription ) +import Distribution.Parsec.ParseResult + ( runParseResult ) +import Distribution.Parsec.Common as NewParser + ( PError, PWarning, showPWarning ) +import Distribution.Types.SourceRepo + ( SourceRepo(..), RepoType(..), ) +import Distribution.Simple.Compiler + ( Compiler, compilerInfo ) +import Distribution.Simple.Program + ( ConfiguredProgram(..) ) +import Distribution.Simple.Setup + ( Flag(Flag), toFlag, flagToMaybe, flagToList + , fromFlag, fromFlagOrDefault ) +import Distribution.Client.Setup + ( defaultSolver, defaultMaxBackjumps ) +import Distribution.Simple.InstallDirs + ( PathTemplate, fromPathTemplate + , toPathTemplate, substPathTemplate, initialPathTemplateEnv ) +import Distribution.Simple.Utils + ( die', warn, notice, info, createDirectoryIfMissingVerbose ) +import Distribution.Client.Utils + ( determineNumJobs ) +import Distribution.Utils.NubList + ( fromNubList ) +import Distribution.Verbosity + ( Verbosity, modifyVerbosity, verbose ) +import Distribution.Version + ( Version ) +import Distribution.Text +import Distribution.ParseUtils as OldParser + ( ParseResult(..), locatedErrorMsg, showPWarning ) + +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Distribution.Client.Tar as Tar +import qualified Distribution.Client.GZipUtils as GZipUtils + +import Control.Monad +import Control.Monad.Trans (liftIO) +import Control.Exception +import Data.Either +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as LBS +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set +import qualified Data.Hashable as Hashable +import Numeric (showHex) + +import System.FilePath hiding (combine) +import System.IO + ( withBinaryFile, IOMode(ReadMode) ) +import System.Directory +import Network.URI + ( URI(..), URIAuth(..), parseAbsoluteURI, uriToString ) + + +---------------------------------------- +-- Resolving configuration to settings +-- + +-- | Look up a 'PackageConfig' field in the 'ProjectConfig' for a specific +-- 'PackageName'. This returns the configuration that applies to all local +-- packages plus any package-specific configuration for this package. +-- +lookupLocalPackageConfig :: (Semigroup a, Monoid a) + => (PackageConfig -> a) + -> ProjectConfig + -> PackageName -> a +lookupLocalPackageConfig field ProjectConfig { + projectConfigLocalPackages, + projectConfigSpecificPackage + } pkgname = + field projectConfigLocalPackages + <> maybe mempty field + (Map.lookup pkgname (getMapMappend projectConfigSpecificPackage)) + + +-- | Use a 'RepoContext' based on the 'BuildTimeSettings'. +-- +projectConfigWithBuilderRepoContext :: Verbosity + -> BuildTimeSettings + -> (RepoContext -> IO a) -> IO a +projectConfigWithBuilderRepoContext verbosity BuildTimeSettings{..} = + withRepoContext' + verbosity + buildSettingRemoteRepos + buildSettingLocalRepos + buildSettingCacheDir + buildSettingHttpTransport + (Just buildSettingIgnoreExpiry) + buildSettingProgPathExtra + + +-- | Use a 'RepoContext', but only for the solver. The solver does not use the +-- full facilities of the 'RepoContext' so we can get away with making one +-- that doesn't have an http transport. And that avoids having to have access +-- to the 'BuildTimeSettings' +-- +projectConfigWithSolverRepoContext :: Verbosity + -> ProjectConfigShared + -> ProjectConfigBuildOnly + -> (RepoContext -> IO a) -> IO a +projectConfigWithSolverRepoContext verbosity + ProjectConfigShared{..} + ProjectConfigBuildOnly{..} = + withRepoContext' + verbosity + (fromNubList projectConfigRemoteRepos) + (fromNubList projectConfigLocalRepos) + (fromFlagOrDefault (error "projectConfigWithSolverRepoContext: projectConfigCacheDir") + projectConfigCacheDir) + (flagToMaybe projectConfigHttpTransport) + (flagToMaybe projectConfigIgnoreExpiry) + (fromNubList projectConfigProgPathExtra) + + +-- | Resolve the project configuration, with all its optional fields, into +-- 'SolverSettings' with no optional fields (by applying defaults). +-- +resolveSolverSettings :: ProjectConfig -> SolverSettings +resolveSolverSettings ProjectConfig{ + projectConfigShared, + projectConfigLocalPackages, + projectConfigSpecificPackage + } = + SolverSettings {..} + where + --TODO: [required eventually] some of these settings need validation, e.g. + -- the flag assignments need checking. + solverSettingRemoteRepos = fromNubList projectConfigRemoteRepos + solverSettingLocalRepos = fromNubList projectConfigLocalRepos + solverSettingConstraints = projectConfigConstraints + solverSettingPreferences = projectConfigPreferences + solverSettingFlagAssignment = packageConfigFlagAssignment projectConfigLocalPackages + solverSettingFlagAssignments = fmap packageConfigFlagAssignment + (getMapMappend projectConfigSpecificPackage) + solverSettingCabalVersion = flagToMaybe projectConfigCabalVersion + solverSettingSolver = fromFlag projectConfigSolver + solverSettingAllowOlder = fromMaybe mempty projectConfigAllowOlder + solverSettingAllowNewer = fromMaybe mempty projectConfigAllowNewer + solverSettingMaxBackjumps = case fromFlag projectConfigMaxBackjumps of + n | n < 0 -> Nothing + | otherwise -> Just n + solverSettingReorderGoals = fromFlag projectConfigReorderGoals + solverSettingCountConflicts = fromFlag projectConfigCountConflicts + solverSettingStrongFlags = fromFlag projectConfigStrongFlags + solverSettingAllowBootLibInstalls = fromFlag projectConfigAllowBootLibInstalls + solverSettingIndexState = flagToMaybe projectConfigIndexState + solverSettingIndependentGoals = fromFlag projectConfigIndependentGoals + --solverSettingShadowPkgs = fromFlag projectConfigShadowPkgs + --solverSettingReinstall = fromFlag projectConfigReinstall + --solverSettingAvoidReinstalls = fromFlag projectConfigAvoidReinstalls + --solverSettingOverrideReinstall = fromFlag projectConfigOverrideReinstall + --solverSettingUpgradeDeps = fromFlag projectConfigUpgradeDeps + + ProjectConfigShared {..} = defaults <> projectConfigShared + + defaults = mempty { + projectConfigSolver = Flag defaultSolver, + projectConfigAllowOlder = Just (AllowOlder mempty), + projectConfigAllowNewer = Just (AllowNewer mempty), + projectConfigMaxBackjumps = Flag defaultMaxBackjumps, + projectConfigReorderGoals = Flag (ReorderGoals False), + projectConfigCountConflicts = Flag (CountConflicts True), + projectConfigStrongFlags = Flag (StrongFlags False), + projectConfigAllowBootLibInstalls = Flag (AllowBootLibInstalls False), + projectConfigIndependentGoals = Flag (IndependentGoals False) + --projectConfigShadowPkgs = Flag False, + --projectConfigReinstall = Flag False, + --projectConfigAvoidReinstalls = Flag False, + --projectConfigOverrideReinstall = Flag False, + --projectConfigUpgradeDeps = Flag False + } + + +-- | Resolve the project configuration, with all its optional fields, into +-- 'BuildTimeSettings' with no optional fields (by applying defaults). +-- +resolveBuildTimeSettings :: Verbosity + -> CabalDirLayout + -> ProjectConfig + -> BuildTimeSettings +resolveBuildTimeSettings verbosity + CabalDirLayout { + cabalLogsDirectory + } + ProjectConfig { + projectConfigShared = ProjectConfigShared { + projectConfigRemoteRepos, + projectConfigLocalRepos, + projectConfigProgPathExtra + }, + projectConfigBuildOnly + } = + BuildTimeSettings {..} + where + buildSettingDryRun = fromFlag projectConfigDryRun + buildSettingOnlyDeps = fromFlag projectConfigOnlyDeps + buildSettingSummaryFile = fromNubList projectConfigSummaryFile + --buildSettingLogFile -- defined below, more complicated + --buildSettingLogVerbosity -- defined below, more complicated + buildSettingBuildReports = fromFlag projectConfigBuildReports + buildSettingSymlinkBinDir = flagToList projectConfigSymlinkBinDir + buildSettingOneShot = fromFlag projectConfigOneShot + buildSettingNumJobs = determineNumJobs projectConfigNumJobs + buildSettingKeepGoing = fromFlag projectConfigKeepGoing + buildSettingOfflineMode = fromFlag projectConfigOfflineMode + buildSettingKeepTempFiles = fromFlag projectConfigKeepTempFiles + buildSettingRemoteRepos = fromNubList projectConfigRemoteRepos + buildSettingLocalRepos = fromNubList projectConfigLocalRepos + buildSettingCacheDir = fromFlag projectConfigCacheDir + buildSettingHttpTransport = flagToMaybe projectConfigHttpTransport + buildSettingIgnoreExpiry = fromFlag projectConfigIgnoreExpiry + buildSettingReportPlanningFailure + = fromFlag projectConfigReportPlanningFailure + buildSettingProgPathExtra = fromNubList projectConfigProgPathExtra + + ProjectConfigBuildOnly{..} = defaults + <> projectConfigBuildOnly + + defaults = mempty { + projectConfigDryRun = toFlag False, + projectConfigOnlyDeps = toFlag False, + projectConfigBuildReports = toFlag NoReports, + projectConfigReportPlanningFailure = toFlag False, + projectConfigKeepGoing = toFlag False, + projectConfigOneShot = toFlag False, + projectConfigOfflineMode = toFlag False, + projectConfigKeepTempFiles = toFlag False, + projectConfigIgnoreExpiry = toFlag False + } + + -- The logging logic: what log file to use and what verbosity. + -- + -- If the user has specified --remote-build-reporting=detailed, use the + -- default log file location. If the --build-log option is set, use the + -- provided location. Otherwise don't use logging, unless building in + -- parallel (in which case the default location is used). + -- + buildSettingLogFile :: Maybe (Compiler -> Platform + -> PackageId -> UnitId -> FilePath) + buildSettingLogFile + | useDefaultTemplate = Just (substLogFileName defaultTemplate) + | otherwise = fmap substLogFileName givenTemplate + + defaultTemplate = toPathTemplate $ + cabalLogsDirectory + "$compiler" "$libname" <.> "log" + givenTemplate = flagToMaybe projectConfigLogFile + + useDefaultTemplate + | buildSettingBuildReports == DetailedReports = True + | isJust givenTemplate = False + | isParallelBuild = True + | otherwise = False + + isParallelBuild = buildSettingNumJobs >= 2 + + substLogFileName :: PathTemplate + -> Compiler -> Platform + -> PackageId -> UnitId -> FilePath + substLogFileName template compiler platform pkgid uid = + fromPathTemplate (substPathTemplate env template) + where + env = initialPathTemplateEnv + pkgid uid (compilerInfo compiler) platform + + -- If the user has specified --remote-build-reporting=detailed or + -- --build-log, use more verbose logging. + -- + buildSettingLogVerbosity + | overrideVerbosity = modifyVerbosity (max verbose) verbosity + | otherwise = verbosity + + overrideVerbosity + | buildSettingBuildReports == DetailedReports = True + | isJust givenTemplate = True + | isParallelBuild = False + | otherwise = False + + +--------------------------------------------- +-- Reading and writing project config files +-- + +-- | Find the root of this project. +-- +-- Searches for an explicit @cabal.project@ file, in the current directory or +-- parent directories. If no project file is found then the current dir is the +-- project root (and the project will use an implicit config). +-- +findProjectRoot :: Maybe FilePath -- ^ starting directory, or current directory + -> Maybe FilePath -- ^ @cabal.project@ file name override + -> IO (Either BadProjectRoot ProjectRoot) +findProjectRoot _ (Just projectFile) + | isAbsolute projectFile = do + exists <- doesFileExist projectFile + if exists + then do projectFile' <- canonicalizePath projectFile + let projectRoot = ProjectRootExplicit (takeDirectory projectFile') + (takeFileName projectFile') + return (Right projectRoot) + else return (Left (BadProjectRootExplicitFile projectFile)) + +findProjectRoot mstartdir mprojectFile = do + startdir <- maybe getCurrentDirectory canonicalizePath mstartdir + homedir <- getHomeDirectory + probe startdir homedir + where + projectFileName = fromMaybe "cabal.project" mprojectFile + + -- Search upwards. If we get to the users home dir or the filesystem root, + -- then use the current dir + probe startdir homedir = go startdir + where + go dir | isDrive dir || dir == homedir = + case mprojectFile of + Nothing -> return (Right (ProjectRootImplicit startdir)) + Just file -> return (Left (BadProjectRootExplicitFile file)) + go dir = do + exists <- doesFileExist (dir projectFileName) + if exists + then return (Right (ProjectRootExplicit dir projectFileName)) + else go (takeDirectory dir) + + --TODO: [nice to have] add compat support for old style sandboxes + + +-- | Errors returned by 'findProjectRoot'. +-- +data BadProjectRoot = BadProjectRootExplicitFile FilePath +#if MIN_VERSION_base(4,8,0) + deriving (Show, Typeable) +#else + deriving (Typeable) + +instance Show BadProjectRoot where + show = renderBadProjectRoot +#endif + +instance Exception BadProjectRoot where +#if MIN_VERSION_base(4,8,0) + displayException = renderBadProjectRoot +#endif + +renderBadProjectRoot :: BadProjectRoot -> String +renderBadProjectRoot (BadProjectRootExplicitFile projectFile) = + "The given project file '" ++ projectFile ++ "' does not exist." + +withProjectOrGlobalConfig :: Verbosity + -> Flag FilePath + -> IO a + -> (ProjectConfig -> IO a) + -> IO a +withProjectOrGlobalConfig verbosity globalConfigFlag with without = do + globalConfig <- runRebuild "" $ readGlobalConfig verbosity globalConfigFlag + + let + res' = catch with + $ \case + (BadPackageLocations prov locs) + | prov == Set.singleton Implicit + , let + isGlobErr (BadLocGlobEmptyMatch _) = True + isGlobErr _ = False + , any isGlobErr locs -> + without globalConfig + err -> throwIO err + + catch res' + $ \case + (BadProjectRootExplicitFile "") -> without globalConfig + err -> throwIO err + +-- | Read all the config relevant for a project. This includes the project +-- file if any, plus other global config. +-- +readProjectConfig :: Verbosity + -> Flag FilePath + -> DistDirLayout + -> Rebuild ProjectConfig +readProjectConfig verbosity configFileFlag distDirLayout = do + global <- readGlobalConfig verbosity configFileFlag + local <- readProjectLocalConfigOrDefault verbosity distDirLayout + freeze <- readProjectLocalFreezeConfig verbosity distDirLayout + extra <- readProjectLocalExtraConfig verbosity distDirLayout + return (global <> local <> freeze <> extra) + + +-- | Reads an explicit @cabal.project@ file in the given project root dir, +-- or returns the default project config for an implicitly defined project. +-- +readProjectLocalConfigOrDefault :: Verbosity + -> DistDirLayout + -> Rebuild ProjectConfig +readProjectLocalConfigOrDefault verbosity distDirLayout = do + usesExplicitProjectRoot <- liftIO $ doesFileExist projectFile + if usesExplicitProjectRoot + then do + readProjectFile verbosity distDirLayout "" "project file" + else do + monitorFiles [monitorNonExistentFile projectFile] + return defaultImplicitProjectConfig + + where + projectFile = distProjectFile distDirLayout "" + + defaultImplicitProjectConfig :: ProjectConfig + defaultImplicitProjectConfig = + mempty { + -- We expect a package in the current directory. + projectPackages = [ "./*.cabal" ], + + -- This is to automatically pick up deps that we unpack locally. + projectPackagesOptional = [ "./*/*.cabal" ], + + projectConfigProvenance = Set.singleton Implicit + } + +-- | Reads a @cabal.project.local@ file in the given project root dir, +-- or returns empty. This file gets written by @cabal configure@, or in +-- principle can be edited manually or by other tools. +-- +readProjectLocalExtraConfig :: Verbosity -> DistDirLayout + -> Rebuild ProjectConfig +readProjectLocalExtraConfig verbosity distDirLayout = + readProjectFile verbosity distDirLayout "local" + "project local configuration file" + +-- | Reads a @cabal.project.freeze@ file in the given project root dir, +-- or returns empty. This file gets written by @cabal freeze@, or in +-- principle can be edited manually or by other tools. +-- +readProjectLocalFreezeConfig :: Verbosity -> DistDirLayout + -> Rebuild ProjectConfig +readProjectLocalFreezeConfig verbosity distDirLayout = + readProjectFile verbosity distDirLayout "freeze" + "project freeze file" + +-- | Reads a named config file in the given project root dir, or returns empty. +-- +readProjectFile :: Verbosity + -> DistDirLayout + -> String + -> String + -> Rebuild ProjectConfig +readProjectFile verbosity DistDirLayout{distProjectFile} + extensionName extensionDescription = do + exists <- liftIO $ doesFileExist extensionFile + if exists + then do monitorFiles [monitorFileHashed extensionFile] + addProjectFileProvenance <$> liftIO readExtensionFile + else do monitorFiles [monitorNonExistentFile extensionFile] + return mempty + where + extensionFile = distProjectFile extensionName + + readExtensionFile = + reportParseResult verbosity extensionDescription extensionFile + . parseProjectConfig + =<< readFile extensionFile + + addProjectFileProvenance config = + config { + projectConfigProvenance = + Set.insert (Explicit extensionFile) (projectConfigProvenance config) + } + + +-- | Parse the 'ProjectConfig' format. +-- +-- For the moment this is implemented in terms of parsers for legacy +-- configuration types, plus a conversion. +-- +parseProjectConfig :: String -> ParseResult ProjectConfig +parseProjectConfig content = + convertLegacyProjectConfig <$> + parseLegacyProjectConfig content + + +-- | Render the 'ProjectConfig' format. +-- +-- For the moment this is implemented in terms of a pretty printer for the +-- legacy configuration types, plus a conversion. +-- +showProjectConfig :: ProjectConfig -> String +showProjectConfig = + showLegacyProjectConfig . convertToLegacyProjectConfig + + +-- | Write a @cabal.project.local@ file in the given project root dir. +-- +writeProjectLocalExtraConfig :: DistDirLayout -> ProjectConfig -> IO () +writeProjectLocalExtraConfig DistDirLayout{distProjectFile} = + writeProjectConfigFile (distProjectFile "local") + + +-- | Write a @cabal.project.freeze@ file in the given project root dir. +-- +writeProjectLocalFreezeConfig :: DistDirLayout -> ProjectConfig -> IO () +writeProjectLocalFreezeConfig DistDirLayout{distProjectFile} = + writeProjectConfigFile (distProjectFile "freeze") + + +-- | Write in the @cabal.project@ format to the given file. +-- +writeProjectConfigFile :: FilePath -> ProjectConfig -> IO () +writeProjectConfigFile file = + writeFile file . showProjectConfig + + +-- | Read the user's @~/.cabal/config@ file. +-- +readGlobalConfig :: Verbosity -> Flag FilePath -> Rebuild ProjectConfig +readGlobalConfig verbosity configFileFlag = do + config <- liftIO (loadConfig verbosity configFileFlag) + configFile <- liftIO (getConfigFilePath configFileFlag) + monitorFiles [monitorFileHashed configFile] + return (convertLegacyGlobalConfig config) + +reportParseResult :: Verbosity -> String -> FilePath -> ParseResult a -> IO a +reportParseResult verbosity _filetype filename (ParseOk warnings x) = do + unless (null warnings) $ + let msg = unlines (map (OldParser.showPWarning filename) warnings) + in warn verbosity msg + return x +reportParseResult verbosity filetype filename (ParseFailed err) = + let (line, msg) = locatedErrorMsg err + in die' verbosity $ "Error parsing " ++ filetype ++ " " ++ filename + ++ maybe "" (\n -> ':' : show n) line ++ ":\n" ++ msg + + +--------------------------------------------- +-- Finding packages in the project +-- + +-- | The location of a package as part of a project. Local file paths are +-- either absolute (if the user specified it as such) or they are relative +-- to the project root. +-- +data ProjectPackageLocation = + ProjectPackageLocalCabalFile FilePath + | ProjectPackageLocalDirectory FilePath FilePath -- dir and .cabal file + | ProjectPackageLocalTarball FilePath + | ProjectPackageRemoteTarball URI + | ProjectPackageRemoteRepo SourceRepo + | ProjectPackageNamed Dependency + deriving Show + + +-- | Exception thrown by 'findProjectPackages'. +-- +data BadPackageLocations + = BadPackageLocations (Set ProjectConfigProvenance) [BadPackageLocation] +#if MIN_VERSION_base(4,8,0) + deriving (Show, Typeable) +#else + deriving (Typeable) + +instance Show BadPackageLocations where + show = renderBadPackageLocations +#endif + +instance Exception BadPackageLocations where +#if MIN_VERSION_base(4,8,0) + displayException = renderBadPackageLocations +#endif +--TODO: [nice to have] custom exception subclass for Doc rendering, colour etc + +data BadPackageLocation + = BadPackageLocationFile BadPackageLocationMatch + | BadLocGlobEmptyMatch String + | BadLocGlobBadMatches String [BadPackageLocationMatch] + | BadLocUnexpectedUriScheme String + | BadLocUnrecognisedUri String + | BadLocUnrecognised String + deriving Show + +data BadPackageLocationMatch + = BadLocUnexpectedFile String + | BadLocNonexistantFile String + | BadLocDirNoCabalFile String + | BadLocDirManyCabalFiles String + deriving Show + +renderBadPackageLocations :: BadPackageLocations -> String +renderBadPackageLocations (BadPackageLocations provenance bpls) + -- There is no provenance information, + -- render standard bad package error information. + | Set.null provenance = renderErrors renderBadPackageLocation + + -- The configuration is implicit, render bad package locations + -- using possibly specialized error messages. + | Set.singleton Implicit == provenance = + renderErrors renderImplicitBadPackageLocation + + -- The configuration contains both implicit and explicit provenance. + -- This should not occur, and a message is output to assist debugging. + | Implicit `Set.member` provenance = + "Warning: both implicit and explicit configuration is present." + ++ renderExplicit + + -- The configuration was read from one or more explicit path(s), + -- list the locations and render the bad package error information. + -- The intent is to supersede this with the relevant location information + -- per package error. + | otherwise = renderExplicit + where + renderErrors f = unlines (map f bpls) + + renderExplicit = + "When using configuration(s) from " + ++ intercalate ", " (mapMaybe getExplicit (Set.toList provenance)) + ++ ", the following errors occurred:\n" + ++ renderErrors renderBadPackageLocation + + getExplicit (Explicit path) = Just path + getExplicit Implicit = Nothing + +--TODO: [nice to have] keep track of the config file (and src loc) packages +-- were listed, to use in error messages + +-- | Render bad package location error information for the implicit +-- @cabal.project@ configuration. +-- +-- TODO: This is currently not fully realized, with only one of the implicit +-- cases handled. More cases should be added with informative help text +-- about the issues related specifically when having no project configuration +-- is present. +renderImplicitBadPackageLocation :: BadPackageLocation -> String +renderImplicitBadPackageLocation bpl = case bpl of + BadLocGlobEmptyMatch pkglocstr -> + "No cabal.project file or cabal file matching the default glob '" + ++ pkglocstr ++ "' was found.\n" + ++ "Please create a package description file .cabal " + ++ "or a cabal.project file referencing the packages you " + ++ "want to build." + _ -> renderBadPackageLocation bpl + +renderBadPackageLocation :: BadPackageLocation -> String +renderBadPackageLocation bpl = case bpl of + BadPackageLocationFile badmatch -> + renderBadPackageLocationMatch badmatch + BadLocGlobEmptyMatch pkglocstr -> + "The package location glob '" ++ pkglocstr + ++ "' does not match any files or directories." + BadLocGlobBadMatches pkglocstr failures -> + "The package location glob '" ++ pkglocstr ++ "' does not match any " + ++ "recognised forms of package. " + ++ concatMap ((' ':) . renderBadPackageLocationMatch) failures + BadLocUnexpectedUriScheme pkglocstr -> + "The package location URI '" ++ pkglocstr ++ "' does not use a " + ++ "supported URI scheme. The supported URI schemes are http, https and " + ++ "file." + BadLocUnrecognisedUri pkglocstr -> + "The package location URI '" ++ pkglocstr ++ "' does not appear to " + ++ "be a valid absolute URI." + BadLocUnrecognised pkglocstr -> + "The package location syntax '" ++ pkglocstr ++ "' is not recognised." + +renderBadPackageLocationMatch :: BadPackageLocationMatch -> String +renderBadPackageLocationMatch bplm = case bplm of + BadLocUnexpectedFile pkglocstr -> + "The package location '" ++ pkglocstr ++ "' is not recognised. The " + ++ "supported file targets are .cabal files, .tar.gz tarballs or package " + ++ "directories (i.e. directories containing a .cabal file)." + BadLocNonexistantFile pkglocstr -> + "The package location '" ++ pkglocstr ++ "' does not exist." + BadLocDirNoCabalFile pkglocstr -> + "The package directory '" ++ pkglocstr ++ "' does not contain any " + ++ ".cabal file." + BadLocDirManyCabalFiles pkglocstr -> + "The package directory '" ++ pkglocstr ++ "' contains multiple " + ++ ".cabal files (which is not currently supported)." + +-- | Given the project config, +-- +-- Throws 'BadPackageLocations'. +-- +findProjectPackages :: DistDirLayout -> ProjectConfig + -> Rebuild [ProjectPackageLocation] +findProjectPackages DistDirLayout{distProjectRootDirectory} + ProjectConfig{..} = do + + requiredPkgs <- findPackageLocations True projectPackages + optionalPkgs <- findPackageLocations False projectPackagesOptional + let repoPkgs = map ProjectPackageRemoteRepo projectPackagesRepo + namedPkgs = map ProjectPackageNamed projectPackagesNamed + + return (concat [requiredPkgs, optionalPkgs, repoPkgs, namedPkgs]) + where + findPackageLocations required pkglocstr = do + (problems, pkglocs) <- + partitionEithers <$> mapM (findPackageLocation required) pkglocstr + unless (null problems) $ + liftIO $ throwIO $ BadPackageLocations projectConfigProvenance problems + return (concat pkglocs) + + + findPackageLocation :: Bool -> String + -> Rebuild (Either BadPackageLocation + [ProjectPackageLocation]) + findPackageLocation _required@True pkglocstr = + -- strategy: try first as a file:// or http(s):// URL. + -- then as a file glob (usually encompassing single file) + -- finally as a single file, for files that fail to parse as globs + checkIsUriPackage pkglocstr + `mplusMaybeT` checkIsFileGlobPackage pkglocstr + `mplusMaybeT` checkIsSingleFilePackage pkglocstr + >>= maybe (return (Left (BadLocUnrecognised pkglocstr))) return + + + findPackageLocation _required@False pkglocstr = do + -- just globs for optional case + res <- checkIsFileGlobPackage pkglocstr + case res of + Nothing -> return (Left (BadLocUnrecognised pkglocstr)) + Just (Left _) -> return (Right []) -- it's optional + Just (Right pkglocs) -> return (Right pkglocs) + + + checkIsUriPackage, checkIsFileGlobPackage, checkIsSingleFilePackage + :: String -> Rebuild (Maybe (Either BadPackageLocation + [ProjectPackageLocation])) + checkIsUriPackage pkglocstr = + case parseAbsoluteURI pkglocstr of + Just uri@URI { + uriScheme = scheme, + uriAuthority = Just URIAuth { uriRegName = host }, + uriPath = path, + uriQuery = query, + uriFragment = frag + } + | recognisedScheme && not (null host) -> + return (Just (Right [ProjectPackageRemoteTarball uri])) + + | scheme == "file:" && null host && null query && null frag -> + checkIsSingleFilePackage path + + | not recognisedScheme && not (null host) -> + return (Just (Left (BadLocUnexpectedUriScheme pkglocstr))) + + | recognisedScheme && null host -> + return (Just (Left (BadLocUnrecognisedUri pkglocstr))) + where + recognisedScheme = scheme == "http:" || scheme == "https:" + || scheme == "file:" + + _ -> return Nothing + + + checkIsFileGlobPackage pkglocstr = + case simpleParse pkglocstr of + Nothing -> return Nothing + Just glob -> liftM Just $ do + matches <- matchFileGlob glob + case matches of + [] | isJust (isTrivialFilePathGlob glob) + -> return (Left (BadPackageLocationFile + (BadLocNonexistantFile pkglocstr))) + + [] -> return (Left (BadLocGlobEmptyMatch pkglocstr)) + + _ -> do + (failures, pkglocs) <- partitionEithers <$> + mapM checkFilePackageMatch matches + return $! case (failures, pkglocs) of + ([failure], []) | isJust (isTrivialFilePathGlob glob) + -> Left (BadPackageLocationFile failure) + (_, []) -> Left (BadLocGlobBadMatches pkglocstr failures) + _ -> Right pkglocs + + + checkIsSingleFilePackage pkglocstr = do + let filename = distProjectRootDirectory pkglocstr + isFile <- liftIO $ doesFileExist filename + isDir <- liftIO $ doesDirectoryExist filename + if isFile || isDir + then checkFilePackageMatch pkglocstr + >>= either (return . Just . Left . BadPackageLocationFile) + (return . Just . Right . (\x->[x])) + else return Nothing + + + checkFilePackageMatch :: String -> Rebuild (Either BadPackageLocationMatch + ProjectPackageLocation) + checkFilePackageMatch pkglocstr = do + -- The pkglocstr may be absolute or may be relative to the project root. + -- Either way, does the right thing here. We return relative paths if + -- they were relative in the first place. + let abspath = distProjectRootDirectory pkglocstr + isFile <- liftIO $ doesFileExist abspath + isDir <- liftIO $ doesDirectoryExist abspath + parentDirExists <- case takeDirectory abspath of + [] -> return False + dir -> liftIO $ doesDirectoryExist dir + case () of + _ | isDir + -> do matches <- matchFileGlob (globStarDotCabal pkglocstr) + case matches of + [cabalFile] + -> return (Right (ProjectPackageLocalDirectory + pkglocstr cabalFile)) + [] -> return (Left (BadLocDirNoCabalFile pkglocstr)) + _ -> return (Left (BadLocDirManyCabalFiles pkglocstr)) + + | extensionIsTarGz pkglocstr + -> return (Right (ProjectPackageLocalTarball pkglocstr)) + + | takeExtension pkglocstr == ".cabal" + -> return (Right (ProjectPackageLocalCabalFile pkglocstr)) + + | isFile + -> return (Left (BadLocUnexpectedFile pkglocstr)) + + | parentDirExists + -> return (Left (BadLocNonexistantFile pkglocstr)) + + | otherwise + -> return (Left (BadLocUnexpectedFile pkglocstr)) + + + extensionIsTarGz f = takeExtension f == ".gz" + && takeExtension (dropExtension f) == ".tar" + + +-- | A glob to find all the cabal files in a directory. +-- +-- For a directory @some/dir/@, this is a glob of the form @some/dir/\*.cabal@. +-- The directory part can be either absolute or relative. +-- +globStarDotCabal :: FilePath -> FilePathGlob +globStarDotCabal dir = + FilePathGlob + (if isAbsolute dir then FilePathRoot root else FilePathRelative) + (foldr (\d -> GlobDir [Literal d]) + (GlobFile [WildCard, Literal ".cabal"]) dirComponents) + where + (root, dirComponents) = fmap splitDirectories (splitDrive dir) + + +--TODO: [code cleanup] use sufficiently recent transformers package +mplusMaybeT :: Monad m => m (Maybe a) -> m (Maybe a) -> m (Maybe a) +mplusMaybeT ma mb = do + mx <- ma + case mx of + Nothing -> mb + Just x -> return (Just x) + + +------------------------------------------------- +-- Fetching and reading packages in the project +-- + +-- | Read the @.cabal@ files for a set of packages. For remote tarballs and +-- VCS source repos this also fetches them if needed. +-- +-- Note here is where we convert from project-root relative paths to absolute +-- paths. +-- +fetchAndReadSourcePackages + :: Verbosity + -> DistDirLayout + -> ProjectConfigShared + -> ProjectConfigBuildOnly + -> [ProjectPackageLocation] + -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] +fetchAndReadSourcePackages verbosity distDirLayout + projectConfigShared + projectConfigBuildOnly + pkgLocations = do + + pkgsLocalDirectory <- + sequence + [ readSourcePackageLocalDirectory verbosity dir cabalFile + | location <- pkgLocations + , (dir, cabalFile) <- projectPackageLocal location ] + + pkgsLocalTarball <- + sequence + [ readSourcePackageLocalTarball verbosity path + | ProjectPackageLocalTarball path <- pkgLocations ] + + pkgsRemoteTarball <- do + getTransport <- delayInitSharedResource $ + configureTransport verbosity progPathExtra + preferredHttpTransport + sequence + [ fetchAndReadSourcePackageRemoteTarball verbosity distDirLayout + getTransport uri + | ProjectPackageRemoteTarball uri <- pkgLocations ] + + pkgsRemoteRepo <- + syncAndReadSourcePackagesRemoteRepos + verbosity distDirLayout + projectConfigShared + [ repo | ProjectPackageRemoteRepo repo <- pkgLocations ] + + let pkgsNamed = + [ NamedPackage pkgname [PackagePropertyVersion verrange] + | ProjectPackageNamed (Dependency pkgname verrange) <- pkgLocations ] + + return $ concat + [ pkgsLocalDirectory + , pkgsLocalTarball + , pkgsRemoteTarball + , pkgsRemoteRepo + , pkgsNamed + ] + where + projectPackageLocal (ProjectPackageLocalDirectory dir file) = [(dir, file)] + projectPackageLocal (ProjectPackageLocalCabalFile file) = [(dir, file)] + where dir = takeDirectory file + projectPackageLocal _ = [] + + progPathExtra = fromNubList (projectConfigProgPathExtra projectConfigShared) + preferredHttpTransport = + flagToMaybe (projectConfigHttpTransport projectConfigBuildOnly) + +-- | A helper for 'fetchAndReadSourcePackages' to handle the case of +-- 'ProjectPackageLocalDirectory' and 'ProjectPackageLocalCabalFile'. +-- We simply read the @.cabal@ file. +-- +readSourcePackageLocalDirectory + :: Verbosity + -> FilePath -- ^ The package directory + -> FilePath -- ^ The package @.cabal@ file + -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) +readSourcePackageLocalDirectory verbosity dir cabalFile = do + monitorFiles [monitorFileHashed cabalFile] + root <- askRoot + let location = LocalUnpackedPackage (root dir) + liftIO $ fmap (mkSpecificSourcePackage location) + . readSourcePackageCabalFile verbosity cabalFile + =<< BS.readFile (root cabalFile) + + +-- | A helper for 'fetchAndReadSourcePackages' to handle the case of +-- 'ProjectPackageLocalTarball'. We scan through the @.tar.gz@ file to find +-- the @.cabal@ file and read that. +-- +readSourcePackageLocalTarball + :: Verbosity + -> FilePath + -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) +readSourcePackageLocalTarball verbosity tarballFile = do + monitorFiles [monitorFile tarballFile] + root <- askRoot + let location = LocalTarballPackage (root tarballFile) + liftIO $ fmap (mkSpecificSourcePackage location) + . uncurry (readSourcePackageCabalFile verbosity) + =<< extractTarballPackageCabalFile (root tarballFile) + + +-- | A helper for 'fetchAndReadSourcePackages' to handle the case of +-- 'ProjectPackageRemoteTarball'. We download the tarball to the dist src dir +-- and after that handle it like the local tarball case. +-- +fetchAndReadSourcePackageRemoteTarball + :: Verbosity + -> DistDirLayout + -> Rebuild HttpTransport + -> URI + -> Rebuild (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) +fetchAndReadSourcePackageRemoteTarball verbosity + DistDirLayout { + distDownloadSrcDirectory + } + getTransport + tarballUri = + -- The tarball download is expensive so we use another layer of file + -- monitor to avoid it whenever possible. + rerunIfChanged verbosity monitor tarballUri $ do + + -- Download + transport <- getTransport + liftIO $ do + transportCheckHttps verbosity transport tarballUri + notice verbosity ("Downloading " ++ show tarballUri) + createDirectoryIfMissingVerbose verbosity True + distDownloadSrcDirectory + _ <- downloadURI transport verbosity tarballUri tarballFile + return () + + -- Read + monitorFiles [monitorFile tarballFile] + let location = RemoteTarballPackage tarballUri tarballFile + liftIO $ fmap (mkSpecificSourcePackage location) + . uncurry (readSourcePackageCabalFile verbosity) + =<< extractTarballPackageCabalFile tarballFile + where + tarballStem = distDownloadSrcDirectory + localFileNameForRemoteTarball tarballUri + tarballFile = tarballStem <.> "tar.gz" + + monitor :: FileMonitor URI (PackageSpecifier (SourcePackage UnresolvedPkgLoc)) + monitor = newFileMonitor (tarballStem <.> "cache") + + +-- | A helper for 'fetchAndReadSourcePackages' to handle all the cases of +-- 'ProjectPackageRemoteRepo'. +-- +syncAndReadSourcePackagesRemoteRepos + :: Verbosity + -> DistDirLayout + -> ProjectConfigShared + -> [SourceRepo] + -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] +syncAndReadSourcePackagesRemoteRepos verbosity + DistDirLayout{distDownloadSrcDirectory} + ProjectConfigShared { + projectConfigProgPathExtra + } + repos = do + + repos' <- either reportSourceRepoProblems return $ + validateSourceRepos repos + + -- All 'SourceRepo's grouped by referring to the "same" remote repo + -- instance. So same location but can differ in commit/tag/branch/subdir. + let reposByLocation :: Map (RepoType, String) + [(SourceRepo, RepoType)] + reposByLocation = Map.fromListWith (++) + [ ((rtype, rloc), [(repo, vcsRepoType vcs)]) + | (repo, rloc, rtype, vcs) <- repos' ] + + --TODO: pass progPathExtra on to 'configureVCS' + let _progPathExtra = fromNubList projectConfigProgPathExtra + getConfiguredVCS <- delayInitSharedResources $ \repoType -> + let Just vcs = Map.lookup repoType knownVCSs in + configureVCS verbosity {-progPathExtra-} vcs + + concat <$> sequence + [ rerunIfChanged verbosity monitor repoGroup' $ do + vcs' <- getConfiguredVCS repoType + syncRepoGroupAndReadSourcePackages vcs' pathStem repoGroup' + | repoGroup@((primaryRepo, repoType):_) <- Map.elems reposByLocation + , let repoGroup' = map fst repoGroup + pathStem = distDownloadSrcDirectory + localFileNameForRemoteRepo primaryRepo + monitor :: FileMonitor + [SourceRepo] + [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] + monitor = newFileMonitor (pathStem <.> "cache") + ] + where + syncRepoGroupAndReadSourcePackages + :: VCS ConfiguredProgram + -> FilePath + -> [SourceRepo] + -> Rebuild [PackageSpecifier (SourcePackage UnresolvedPkgLoc)] + syncRepoGroupAndReadSourcePackages vcs pathStem repoGroup = do + liftIO $ createDirectoryIfMissingVerbose verbosity False + distDownloadSrcDirectory + + -- For syncing we don't care about different 'SourceRepo' values that + -- are just different subdirs in the same repo. + syncSourceRepos verbosity vcs + [ (repo, repoPath) + | (repo, _, repoPath) <- repoGroupWithPaths ] + + -- But for reading we go through each 'SourceRepo' including its subdir + -- value and have to know which path each one ended up in. + sequence + [ readPackageFromSourceRepo repoWithSubdir repoPath + | (_, reposWithSubdir, repoPath) <- repoGroupWithPaths + , repoWithSubdir <- reposWithSubdir ] + where + -- So to do both things above, we pair them up here. + repoGroupWithPaths = + zipWith (\(x, y) z -> (x,y,z)) + (Map.toList + (Map.fromListWith (++) + [ (repo { repoSubdir = Nothing }, [repo]) + | repo <- repoGroup ])) + repoPaths + + -- The repos in a group are given distinct names by simple enumeration + -- foo, foo-2, foo-3 etc + repoPaths = pathStem + : [ pathStem ++ "-" ++ show (i :: Int) | i <- [2..] ] + + readPackageFromSourceRepo repo repoPath = do + let packageDir = maybe repoPath (repoPath ) (repoSubdir repo) + entries <- liftIO $ getDirectoryContents packageDir + --TODO: wrap exceptions + case filter (\e -> takeExtension e == ".cabal") entries of + [] -> liftIO $ throwIO NoCabalFileFound + (_:_:_) -> liftIO $ throwIO MultipleCabalFilesFound + [cabalFileName] -> do + monitorFiles [monitorFileHashed cabalFilePath] + liftIO $ fmap (mkSpecificSourcePackage location) + . readSourcePackageCabalFile verbosity cabalFilePath + =<< BS.readFile cabalFilePath + where + cabalFilePath = packageDir cabalFileName + location = RemoteSourceRepoPackage repo packageDir + + + reportSourceRepoProblems :: [(SourceRepo, SourceRepoProblem)] -> Rebuild a + reportSourceRepoProblems = liftIO . die' verbosity . renderSourceRepoProblems + + renderSourceRepoProblems :: [(SourceRepo, SourceRepoProblem)] -> String + renderSourceRepoProblems = unlines . map show -- "TODO: the repo problems" + + +-- | Utility used by all the helpers of 'fetchAndReadSourcePackages' to make an +-- appropriate @'PackageSpecifier' ('SourcePackage' (..))@ for a given package +-- from a given location. +-- +mkSpecificSourcePackage :: PackageLocation FilePath + -> GenericPackageDescription + -> PackageSpecifier + (SourcePackage (PackageLocation (Maybe FilePath))) +mkSpecificSourcePackage location pkg = + SpecificSourcePackage SourcePackage { + packageInfoId = packageId pkg, + packageDescription = pkg, + --TODO: it is silly that we still have to use a Maybe FilePath here + packageSource = fmap Just location, + packageDescrOverride = Nothing + } + + +-- | Errors reported upon failing to parse a @.cabal@ file. +-- +data CabalFileParseError = + CabalFileParseError + FilePath + [PError] + (Maybe Version) -- We might discover the spec version the package needs + [PWarning] + deriving (Show, Typeable) + +instance Exception CabalFileParseError + + +-- | Wrapper for the @.cabal@ file parser. It reports warnings on higher +-- verbosity levels and throws 'CabalFileParseError' on failure. +-- +readSourcePackageCabalFile :: Verbosity + -> FilePath + -> BS.ByteString + -> IO GenericPackageDescription +readSourcePackageCabalFile verbosity pkgfilename content = + case runParseResult (parseGenericPackageDescription content) of + (warnings, Right pkg) -> do + unless (null warnings) $ + info verbosity (formatWarnings warnings) + return pkg + + (warnings, Left (mspecVersion, errors)) -> + throwIO $ CabalFileParseError pkgfilename errors mspecVersion warnings + where + formatWarnings warnings = + "The package description file " ++ pkgfilename + ++ " has warnings: " + ++ unlines (map (NewParser.showPWarning pkgfilename) warnings) + + +-- | When looking for a package's @.cabal@ file we can find none, or several, +-- both of which are failures. +-- +data CabalFileSearchFailure = + NoCabalFileFound + | MultipleCabalFilesFound + deriving (Show, Typeable) + +instance Exception CabalFileSearchFailure + + +-- | Find the @.cabal@ file within a tarball file and return it by value. +-- +-- Can fail with a 'Tar.FormatError' or 'CabalFileSearchFailure' exception. +-- +extractTarballPackageCabalFile :: FilePath -> IO (FilePath, BS.ByteString) +extractTarballPackageCabalFile tarballFile = + withBinaryFile tarballFile ReadMode $ \hnd -> do + content <- LBS.hGetContents hnd + case extractTarballPackageCabalFilePure content of + Left (Left e) -> throwIO e + Left (Right e) -> throwIO e + Right (fileName, fileContent) -> + (,) fileName <$> evaluate (LBS.toStrict fileContent) + + +-- | Scan through a tar file stream and collect the @.cabal@ file, or fail. +-- +extractTarballPackageCabalFilePure :: LBS.ByteString + -> Either (Either Tar.FormatError + CabalFileSearchFailure) + (FilePath, LBS.ByteString) +extractTarballPackageCabalFilePure = + check + . accumEntryMap + . Tar.filterEntries isCabalFile + . Tar.read + . GZipUtils.maybeDecompress + where + accumEntryMap = Tar.foldlEntries + (\m e -> Map.insert (Tar.entryTarPath e) e m) + Map.empty + + check (Left (e, _m)) = Left (Left e) + check (Right m) = case Map.elems m of + [] -> Left (Right NoCabalFileFound) + [file] -> case Tar.entryContent file of + Tar.NormalFile content _ -> Right (Tar.entryPath file, content) + _ -> Left (Right NoCabalFileFound) + _files -> Left (Right MultipleCabalFilesFound) + + isCabalFile e = case splitPath (Tar.entryPath e) of + [ _dir, file] -> takeExtension file == ".cabal" + [".", _dir, file] -> takeExtension file == ".cabal" + _ -> False + + +-- | The name to use for a local file for a remote tarball 'SourceRepo'. +-- This is deterministic based on the remote tarball URI, and is intended +-- to produce non-clashing file names for different tarballs. +-- +localFileNameForRemoteTarball :: URI -> FilePath +localFileNameForRemoteTarball uri = + mangleName uri + ++ "-" ++ showHex locationHash "" + where + mangleName = truncateString 10 . dropExtension . dropExtension + . takeFileName . dropTrailingPathSeparator . uriPath + + locationHash :: Word + locationHash = fromIntegral (Hashable.hash (uriToString id uri "")) + + +-- | The name to use for a local file or dir for a remote 'SourceRepo'. +-- This is deterministic based on the source repo identity details, and +-- intended to produce non-clashing file names for different repos. +-- +localFileNameForRemoteRepo :: SourceRepo -> FilePath +localFileNameForRemoteRepo SourceRepo{repoType, repoLocation, repoModule} = + maybe "" ((++ "-") . mangleName) repoLocation + ++ showHex locationHash "" + where + mangleName = truncateString 10 . dropExtension + . takeFileName . dropTrailingPathSeparator + + -- just the parts that make up the "identity" of the repo + locationHash :: Word + locationHash = + fromIntegral (Hashable.hash (show repoType, repoLocation, repoModule)) + + +-- | Truncate a string, with a visual indication that it is truncated. +truncateString :: Int -> String -> String +truncateString n s | length s <= n = s + | otherwise = take (n-1) s ++ "_" + + +-- TODO: add something like this, here or in the project planning +-- Based on the package location, which packages will be built inplace in the +-- build tree vs placed in the store. This has various implications on what we +-- can do with the package, e.g. can we run tests, ghci etc. +-- +-- packageIsLocalToProject :: ProjectPackageLocation -> Bool + + +--------------------------------------------- +-- Checking configuration sanity +-- + +data BadPerPackageCompilerPaths + = BadPerPackageCompilerPaths [(PackageName, String)] +#if MIN_VERSION_base(4,8,0) + deriving (Show, Typeable) +#else + deriving (Typeable) + +instance Show BadPerPackageCompilerPaths where + show = renderBadPerPackageCompilerPaths +#endif + +instance Exception BadPerPackageCompilerPaths where +#if MIN_VERSION_base(4,8,0) + displayException = renderBadPerPackageCompilerPaths +#endif +--TODO: [nice to have] custom exception subclass for Doc rendering, colour etc + +renderBadPerPackageCompilerPaths :: BadPerPackageCompilerPaths -> String +renderBadPerPackageCompilerPaths + (BadPerPackageCompilerPaths ((pkgname, progname) : _)) = + "The path to the compiler program (or programs used by the compiler) " + ++ "cannot be specified on a per-package basis in the cabal.project file " + ++ "(i.e. setting the '" ++ progname ++ "-location' for package '" + ++ display pkgname ++ "'). All packages have to use the same compiler, so " + ++ "specify the path in a global 'program-locations' section." + --TODO: [nice to have] better format control so we can pretty-print the + -- offending part of the project file. Currently the line wrapping breaks any + -- formatting. +renderBadPerPackageCompilerPaths _ = error "renderBadPerPackageCompilerPaths" + +-- | The project configuration is not allowed to specify program locations for +-- programs used by the compiler as these have to be the same for each set of +-- packages. +-- +-- We cannot check this until we know which programs the compiler uses, which +-- in principle is not until we've configured the compiler. +-- +-- Throws 'BadPerPackageCompilerPaths' +-- +checkBadPerPackageCompilerPaths :: [ConfiguredProgram] + -> Map PackageName PackageConfig + -> IO () +checkBadPerPackageCompilerPaths compilerPrograms packagesConfig = + case [ (pkgname, progname) + | let compProgNames = Set.fromList (map programId compilerPrograms) + , (pkgname, pkgconf) <- Map.toList packagesConfig + , progname <- Map.keys (getMapLast (packageConfigProgramPaths pkgconf)) + , progname `Set.member` compProgNames ] of + [] -> return () + ps -> throwIO (BadPerPackageCompilerPaths ps) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectOrchestration.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectOrchestration.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectOrchestration.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectOrchestration.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,1181 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} +{-# LANGUAGE RankNTypes, ScopedTypeVariables #-} + +-- | This module deals with building and incrementally rebuilding a collection +-- of packages. It is what backs the @cabal build@ and @configure@ commands, +-- as well as being a core part of @run@, @test@, @bench@ and others. +-- +-- The primary thing is in fact rebuilding (and trying to make that quick by +-- not redoing unnecessary work), so building from scratch is just a special +-- case. +-- +-- The build process and the code can be understood by breaking it down into +-- three major parts: +-- +-- * The 'ElaboratedInstallPlan' type +-- +-- * The \"what to do\" phase, where we look at the all input configuration +-- (project files, .cabal files, command line etc) and produce a detailed +-- plan of what to do -- the 'ElaboratedInstallPlan'. +-- +-- * The \"do it\" phase, where we take the 'ElaboratedInstallPlan' and we +-- re-execute it. +-- +-- As far as possible, the \"what to do\" phase embodies all the policy, leaving +-- the \"do it\" phase policy free. The first phase contains more of the +-- complicated logic, but it is contained in code that is either pure or just +-- has read effects (except cache updates). Then the second phase does all the +-- actions to build packages, but as far as possible it just follows the +-- instructions and avoids any logic for deciding what to do (apart from +-- recompilation avoidance in executing the plan). +-- +-- This division helps us keep the code under control, making it easier to +-- understand, test and debug. So when you are extending these modules, please +-- think about which parts of your change belong in which part. It is +-- perfectly ok to extend the description of what to do (i.e. the +-- 'ElaboratedInstallPlan') if that helps keep the policy decisions in the +-- first phase. Also, the second phase does not have direct access to any of +-- the input configuration anyway; all the information has to flow via the +-- 'ElaboratedInstallPlan'. +-- +module Distribution.Client.ProjectOrchestration ( + -- * Discovery phase: what is in the project? + establishProjectBaseContext, + ProjectBaseContext(..), + BuildTimeSettings(..), + commandLineFlagsToProjectConfig, + + -- * Pre-build phase: decide what to do. + withInstallPlan, + runProjectPreBuildPhase, + ProjectBuildContext(..), + + -- ** Selecting what targets we mean + readTargetSelectors, + reportTargetSelectorProblems, + resolveTargets, + TargetsMap, + TargetSelector(..), + TargetImplicitCwd(..), + PackageId, + AvailableTarget(..), + AvailableTargetStatus(..), + TargetRequested(..), + ComponentName(..), + ComponentKind(..), + ComponentTarget(..), + SubComponentTarget(..), + TargetProblemCommon(..), + selectComponentTargetBasic, + distinctTargetComponents, + -- ** Utils for selecting targets + filterTargetsKind, + filterTargetsKindWith, + selectBuildableTargets, + selectBuildableTargetsWith, + selectBuildableTargets', + selectBuildableTargetsWith', + forgetTargetsDetail, + + -- ** Adjusting the plan + pruneInstallPlanToTargets, + TargetAction(..), + pruneInstallPlanToDependencies, + CannotPruneDependencies(..), + printPlan, + + -- * Build phase: now do it. + runProjectBuildPhase, + + -- * Post build actions + runProjectPostBuildPhase, + dieOnBuildFailures, + + -- * Shared CLI utils + cmdCommonHelpTextNewBuildBeta, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude +import Distribution.Compat.Directory + ( makeAbsolute ) + +import Distribution.Client.ProjectConfig +import Distribution.Client.ProjectPlanning + hiding ( pruneInstallPlanToTargets ) +import qualified Distribution.Client.ProjectPlanning as ProjectPlanning + ( pruneInstallPlanToTargets ) +import Distribution.Client.ProjectPlanning.Types +import Distribution.Client.ProjectBuilding +import Distribution.Client.ProjectPlanOutput + +import Distribution.Client.Types + ( GenericReadyPackage(..), UnresolvedSourcePackage + , PackageSpecifier(..) + , SourcePackageDb(..) + , WriteGhcEnvironmentFilesPolicy(..) ) +import Distribution.Solver.Types.PackageIndex + ( lookupPackageName ) +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.TargetSelector + ( TargetSelector(..), TargetImplicitCwd(..) + , ComponentKind(..), componentKind + , readTargetSelectors, reportTargetSelectorProblems ) +import Distribution.Client.DistDirLayout +import Distribution.Client.Config (getCabalDir) +import Distribution.Client.Setup hiding (packageName) +import Distribution.Compiler + ( CompilerFlavor(GHC) ) +import Distribution.Types.ComponentName + ( componentNameString ) +import Distribution.Types.UnqualComponentName + ( UnqualComponentName, packageNameToUnqualComponentName ) + +import Distribution.Solver.Types.OptionalStanza + +import Distribution.Package + hiding (InstalledPackageId, installedPackageId) +import Distribution.PackageDescription + ( FlagAssignment, unFlagAssignment, showFlagValue + , diffFlagAssignment ) +import Distribution.Simple.LocalBuildInfo + ( ComponentName(..), pkgComponents ) +import Distribution.Simple.Flag + ( fromFlagOrDefault ) +import qualified Distribution.Simple.Setup as Setup +import Distribution.Simple.Command (commandShowOptions) +import Distribution.Simple.Configure (computeEffectiveProfiling) + +import Distribution.Simple.Utils + ( die', warn, notice, noticeNoWrap, debugNoWrap ) +import Distribution.Verbosity +import Distribution.Version + ( mkVersion ) +import Distribution.Text +import Distribution.Simple.Compiler + ( compilerCompatVersion, showCompilerId + , OptimisationLevel(..)) + +import qualified Data.Monoid as Mon +import qualified Data.Set as Set +import qualified Data.Map as Map +import Data.Either +import Control.Exception (Exception(..), throwIO, assert) +import System.Exit (ExitCode(..), exitFailure) +#ifdef MIN_VERSION_unix +import System.Posix.Signals (sigKILL, sigSEGV) +#endif + + +-- | This holds the context of a project prior to solving: the content of the +-- @cabal.project@ and all the local package @.cabal@ files. +-- +data ProjectBaseContext = ProjectBaseContext { + distDirLayout :: DistDirLayout, + cabalDirLayout :: CabalDirLayout, + projectConfig :: ProjectConfig, + localPackages :: [PackageSpecifier UnresolvedSourcePackage], + buildSettings :: BuildTimeSettings + } + +establishProjectBaseContext :: Verbosity + -> ProjectConfig + -> IO ProjectBaseContext +establishProjectBaseContext verbosity cliConfig = do + + cabalDir <- getCabalDir + projectRoot <- either throwIO return =<< + findProjectRoot Nothing mprojectFile + + let distDirLayout = defaultDistDirLayout projectRoot + mdistDirectory + + (projectConfig, localPackages) <- + rebuildProjectConfig verbosity + distDirLayout + cliConfig + + let ProjectConfigBuildOnly { + projectConfigLogsDir + } = projectConfigBuildOnly projectConfig + + ProjectConfigShared { + projectConfigStoreDir + } = projectConfigShared projectConfig + + mlogsDir = Setup.flagToMaybe projectConfigLogsDir + mstoreDir <- sequenceA $ makeAbsolute <$> Setup.flagToMaybe projectConfigStoreDir + let cabalDirLayout = mkCabalDirLayout cabalDir mstoreDir mlogsDir + + buildSettings = resolveBuildTimeSettings + verbosity cabalDirLayout + projectConfig + + return ProjectBaseContext { + distDirLayout, + cabalDirLayout, + projectConfig, + localPackages, + buildSettings + } + where + mdistDirectory = Setup.flagToMaybe projectConfigDistDir + mprojectFile = Setup.flagToMaybe projectConfigProjectFile + ProjectConfigShared { + projectConfigDistDir, + projectConfigProjectFile + } = projectConfigShared cliConfig + + +-- | This holds the context between the pre-build, build and post-build phases. +-- +data ProjectBuildContext = ProjectBuildContext { + -- | This is the improved plan, before we select a plan subset based on + -- the build targets, and before we do the dry-run. So this contains + -- all packages in the project. + elaboratedPlanOriginal :: ElaboratedInstallPlan, + + -- | This is the 'elaboratedPlanOriginal' after we select a plan subset + -- and do the dry-run phase to find out what is up-to or out-of date. + -- This is the plan that will be executed during the build phase. So + -- this contains only a subset of packages in the project. + elaboratedPlanToExecute:: ElaboratedInstallPlan, + + -- | The part of the install plan that's shared between all packages in + -- the plan. This does not change between the two plan variants above, + -- so there is just the one copy. + elaboratedShared :: ElaboratedSharedConfig, + + -- | The result of the dry-run phase. This tells us about each member of + -- the 'elaboratedPlanToExecute'. + pkgsBuildStatus :: BuildStatusMap, + + -- | The targets selected by @selectPlanSubset@. This is useful eg. in + -- CmdRun, where we need a valid target to execute. + targetsMap :: TargetsMap + } + + +-- | Pre-build phase: decide what to do. +-- +withInstallPlan + :: Verbosity + -> ProjectBaseContext + -> (ElaboratedInstallPlan -> ElaboratedSharedConfig -> IO a) + -> IO a +withInstallPlan + verbosity + ProjectBaseContext { + distDirLayout, + cabalDirLayout, + projectConfig, + localPackages + } + action = do + -- Take the project configuration and make a plan for how to build + -- everything in the project. This is independent of any specific targets + -- the user has asked for. + -- + (elaboratedPlan, _, elaboratedShared) <- + rebuildInstallPlan verbosity + distDirLayout cabalDirLayout + projectConfig + localPackages + action elaboratedPlan elaboratedShared + +runProjectPreBuildPhase + :: Verbosity + -> ProjectBaseContext + -> (ElaboratedInstallPlan -> IO (ElaboratedInstallPlan, TargetsMap)) + -> IO ProjectBuildContext +runProjectPreBuildPhase + verbosity + ProjectBaseContext { + distDirLayout, + cabalDirLayout, + projectConfig, + localPackages + } + selectPlanSubset = do + -- Take the project configuration and make a plan for how to build + -- everything in the project. This is independent of any specific targets + -- the user has asked for. + -- + (elaboratedPlan, _, elaboratedShared) <- + rebuildInstallPlan verbosity + distDirLayout cabalDirLayout + projectConfig + localPackages + + -- The plan for what to do is represented by an 'ElaboratedInstallPlan' + + -- Now given the specific targets the user has asked for, decide + -- which bits of the plan we will want to execute. + -- + (elaboratedPlan', targets) <- selectPlanSubset elaboratedPlan + + -- Check which packages need rebuilding. + -- This also gives us more accurate reasons for the --dry-run output. + -- + pkgsBuildStatus <- rebuildTargetsDryRun distDirLayout elaboratedShared + elaboratedPlan' + + -- Improve the plan by marking up-to-date packages as installed. + -- + let elaboratedPlan'' = improveInstallPlanWithUpToDatePackages + pkgsBuildStatus elaboratedPlan' + debugNoWrap verbosity (InstallPlan.showInstallPlan elaboratedPlan'') + + return ProjectBuildContext { + elaboratedPlanOriginal = elaboratedPlan, + elaboratedPlanToExecute = elaboratedPlan'', + elaboratedShared, + pkgsBuildStatus, + targetsMap = targets + } + + +-- | Build phase: now do it. +-- +-- Execute all or parts of the description of what to do to build or +-- rebuild the various packages needed. +-- +runProjectBuildPhase :: Verbosity + -> ProjectBaseContext + -> ProjectBuildContext + -> IO BuildOutcomes +runProjectBuildPhase _ ProjectBaseContext{buildSettings} _ + | buildSettingDryRun buildSettings + = return Map.empty + +runProjectBuildPhase verbosity + ProjectBaseContext{..} ProjectBuildContext {..} = + fmap (Map.union (previousBuildOutcomes pkgsBuildStatus)) $ + rebuildTargets verbosity + distDirLayout + (cabalStoreDirLayout cabalDirLayout) + elaboratedPlanToExecute + elaboratedShared + pkgsBuildStatus + buildSettings + where + previousBuildOutcomes :: BuildStatusMap -> BuildOutcomes + previousBuildOutcomes = + Map.mapMaybe $ \status -> case status of + BuildStatusUpToDate buildSuccess -> Just (Right buildSuccess) + --TODO: [nice to have] record build failures persistently + _ -> Nothing + +-- | Post-build phase: various administrative tasks +-- +-- Update bits of state based on the build outcomes and report any failures. +-- +runProjectPostBuildPhase :: Verbosity + -> ProjectBaseContext + -> ProjectBuildContext + -> BuildOutcomes + -> IO () +runProjectPostBuildPhase _ ProjectBaseContext{buildSettings} _ _ + | buildSettingDryRun buildSettings + = return () + +runProjectPostBuildPhase verbosity + ProjectBaseContext {..} ProjectBuildContext {..} + buildOutcomes = do + -- Update other build artefacts + -- TODO: currently none, but could include: + -- - bin symlinks/wrappers + -- - haddock/hoogle/ctags indexes + -- - delete stale lib registrations + -- - delete stale package dirs + + postBuildStatus <- updatePostBuildProjectStatus + verbosity + distDirLayout + elaboratedPlanOriginal + pkgsBuildStatus + buildOutcomes + + -- Write the .ghc.environment file (if allowed by the env file write policy). + let writeGhcEnvFilesPolicy = + projectConfigWriteGhcEnvironmentFilesPolicy . projectConfigShared + $ projectConfig + + shouldWriteGhcEnvironment = + case fromFlagOrDefault WriteGhcEnvironmentFilesOnlyForGhc844AndNewer + writeGhcEnvFilesPolicy + of + AlwaysWriteGhcEnvironmentFiles -> True + NeverWriteGhcEnvironmentFiles -> False + WriteGhcEnvironmentFilesOnlyForGhc844AndNewer -> + let compiler = pkgConfigCompiler elaboratedShared + ghcCompatVersion = compilerCompatVersion GHC compiler + in maybe False (>= mkVersion [8,4,4]) ghcCompatVersion + + when shouldWriteGhcEnvironment $ + void $ writePlanGhcEnvironment (distProjectRootDirectory distDirLayout) + elaboratedPlanOriginal + elaboratedShared + postBuildStatus + + -- Finally if there were any build failures then report them and throw + -- an exception to terminate the program + dieOnBuildFailures verbosity elaboratedPlanToExecute buildOutcomes + + -- Note that it is a deliberate design choice that the 'buildTargets' is + -- not passed to phase 1, and the various bits of input config is not + -- passed to phase 2. + -- + -- We make the install plan without looking at the particular targets the + -- user asks us to build. The set of available things we can build is + -- discovered from the env and config and is used to make the install plan. + -- The targets just tell us which parts of the install plan to execute. + -- + -- Conversely, executing the plan does not directly depend on any of the + -- input config. The bits that are needed (or better, the decisions based + -- on it) all go into the install plan. + + -- Notionally, the 'BuildFlags' should be things that do not affect what + -- we build, just how we do it. These ones of course do + + +------------------------------------------------------------------------------ +-- Taking targets into account, selecting what to build +-- + +-- | The set of components to build, represented as a mapping from 'UnitId's +-- to the 'ComponentTarget's within the unit that will be selected +-- (e.g. selected to build, test or repl). +-- +-- Associated with each 'ComponentTarget' is the set of 'TargetSelector's that +-- matched this target. Typically this is exactly one, but in general it is +-- possible to for different selectors to match the same target. This extra +-- information is primarily to help make helpful error messages. +-- +type TargetsMap = Map UnitId [(ComponentTarget, [TargetSelector])] + +-- | Given a set of 'TargetSelector's, resolve which 'UnitId's and +-- 'ComponentTarget's they ought to refer to. +-- +-- The idea is that every user target identifies one or more roots in the +-- 'ElaboratedInstallPlan', which we will use to determine the closure +-- of what packages need to be built, dropping everything from the plan +-- that is unnecessary. This closure and pruning is done by +-- 'pruneInstallPlanToTargets' and this needs to be told the roots in terms +-- of 'UnitId's and the 'ComponentTarget's within those. +-- +-- This means we first need to translate the 'TargetSelector's into the +-- 'UnitId's and 'ComponentTarget's. This translation has to be different for +-- the different command line commands, like @build@, @repl@ etc. For example +-- the command @build pkgfoo@ could select a different set of components in +-- pkgfoo than @repl pkgfoo@. The @build@ command would select any library and +-- all executables, whereas @repl@ would select the library or a single +-- executable. Furthermore, both of these examples could fail, and fail in +-- different ways and each needs to be able to produce helpful error messages. +-- +-- So 'resolveTargets' takes two helpers: one to select the targets to be used +-- by user targets that refer to a whole package ('TargetPackage'), and +-- another to check user targets that refer to a component (or a module or +-- file within a component). These helpers can fail, and use their own error +-- type. Both helpers get given the 'AvailableTarget' info about the +-- component(s). +-- +-- While commands vary quite a bit in their behaviour about which components to +-- select for a whole-package target, most commands have the same behaviour for +-- checking a user target that refers to a specific component. To help with +-- this commands can use 'selectComponentTargetBasic', either directly or as +-- a basis for their own @selectComponentTarget@ implementation. +-- +resolveTargets :: forall err. + (forall k. TargetSelector + -> [AvailableTarget k] + -> Either err [k]) + -> (forall k. SubComponentTarget + -> AvailableTarget k + -> Either err k ) + -> (TargetProblemCommon -> err) + -> ElaboratedInstallPlan + -> Maybe (SourcePackageDb) + -> [TargetSelector] + -> Either [err] TargetsMap +resolveTargets selectPackageTargets selectComponentTarget liftProblem + installPlan mPkgDb = + fmap mkTargetsMap + . checkErrors + . map (\ts -> (,) ts <$> checkTarget ts) + where + mkTargetsMap :: [(TargetSelector, [(UnitId, ComponentTarget)])] + -> TargetsMap + mkTargetsMap targets = + Map.map nubComponentTargets + $ Map.fromListWith (++) + [ (uid, [(ct, ts)]) + | (ts, cts) <- targets + , (uid, ct) <- cts ] + + AvailableTargetIndexes{..} = availableTargetIndexes installPlan + + checkTarget :: TargetSelector -> Either err [(UnitId, ComponentTarget)] + + -- We can ask to build any whole package, project-local or a dependency + checkTarget bt@(TargetPackage _ [pkgid] mkfilter) + | Just ats <- fmap (maybe id filterTargetsKind mkfilter) + $ Map.lookup pkgid availableTargetsByPackageId + = fmap (componentTargets WholeComponent) + $ selectPackageTargets bt ats + + | otherwise + = Left (liftProblem (TargetProblemNoSuchPackage pkgid)) + + checkTarget (TargetPackage _ _ _) + = error "TODO: add support for multiple packages in a directory" + -- For the moment this error cannot happen here, because it gets + -- detected when the package config is being constructed. This case + -- will need handling properly when we do add support. + -- + -- TODO: how should this use case play together with the + -- '--cabal-file' option of 'configure' which allows using multiple + -- .cabal files for a single package? + + checkTarget bt@(TargetAllPackages mkfilter) = + fmap (componentTargets WholeComponent) + . selectPackageTargets bt + . maybe id filterTargetsKind mkfilter + . filter availableTargetLocalToProject + $ concat (Map.elems availableTargetsByPackageId) + + checkTarget (TargetComponent pkgid cname subtarget) + | Just ats <- Map.lookup (pkgid, cname) + availableTargetsByPackageIdAndComponentName + = fmap (componentTargets subtarget) + $ selectComponentTargets subtarget ats + + | Map.member pkgid availableTargetsByPackageId + = Left (liftProblem (TargetProblemNoSuchComponent pkgid cname)) + + | otherwise + = Left (liftProblem (TargetProblemNoSuchPackage pkgid)) + + checkTarget (TargetComponentUnknown pkgname ecname subtarget) + | Just ats <- case ecname of + Left ucname -> + Map.lookup (pkgname, ucname) + availableTargetsByPackageNameAndUnqualComponentName + Right cname -> + Map.lookup (pkgname, cname) + availableTargetsByPackageNameAndComponentName + = fmap (componentTargets subtarget) + $ selectComponentTargets subtarget ats + + | Map.member pkgname availableTargetsByPackageName + = Left (liftProblem (TargetProblemUnknownComponent pkgname ecname)) + + | otherwise + = Left (liftProblem (TargetNotInProject pkgname)) + + checkTarget bt@(TargetPackageNamed pkgname mkfilter) + | Just ats <- fmap (maybe id filterTargetsKind mkfilter) + $ Map.lookup pkgname availableTargetsByPackageName + = fmap (componentTargets WholeComponent) + . selectPackageTargets bt + $ ats + + | Just SourcePackageDb{ packageIndex } <- mPkgDb + , let pkg = lookupPackageName packageIndex pkgname + , not (null pkg) + = Left (liftProblem (TargetAvailableInIndex pkgname)) + + | otherwise + = Left (liftProblem (TargetNotInProject pkgname)) + + componentTargets :: SubComponentTarget + -> [(b, ComponentName)] + -> [(b, ComponentTarget)] + componentTargets subtarget = + map (fmap (\cname -> ComponentTarget cname subtarget)) + + selectComponentTargets :: SubComponentTarget + -> [AvailableTarget k] + -> Either err [k] + selectComponentTargets subtarget = + either (Left . head) Right + . checkErrors + . map (selectComponentTarget subtarget) + + checkErrors :: [Either e a] -> Either [e] [a] + checkErrors = (\(es, xs) -> if null es then Right xs else Left es) + . partitionEithers + + +data AvailableTargetIndexes = AvailableTargetIndexes { + availableTargetsByPackageIdAndComponentName + :: AvailableTargetsMap (PackageId, ComponentName), + + availableTargetsByPackageId + :: AvailableTargetsMap PackageId, + + availableTargetsByPackageName + :: AvailableTargetsMap PackageName, + + availableTargetsByPackageNameAndComponentName + :: AvailableTargetsMap (PackageName, ComponentName), + + availableTargetsByPackageNameAndUnqualComponentName + :: AvailableTargetsMap (PackageName, UnqualComponentName) + } +type AvailableTargetsMap k = Map k [AvailableTarget (UnitId, ComponentName)] + +-- We define a bunch of indexes to help 'resolveTargets' with resolving +-- 'TargetSelector's to specific 'UnitId's. +-- +-- They are all derived from the 'availableTargets' index. +-- The 'availableTargetsByPackageIdAndComponentName' is just that main index, +-- while the others are derived by re-grouping on the index key. +-- +-- They are all constructed lazily because they are not necessarily all used. +-- +availableTargetIndexes :: ElaboratedInstallPlan -> AvailableTargetIndexes +availableTargetIndexes installPlan = AvailableTargetIndexes{..} + where + availableTargetsByPackageIdAndComponentName = + availableTargets installPlan + + availableTargetsByPackageId = + Map.mapKeysWith + (++) (\(pkgid, _cname) -> pkgid) + availableTargetsByPackageIdAndComponentName + `Map.union` availableTargetsEmptyPackages + + availableTargetsByPackageName = + Map.mapKeysWith + (++) packageName + availableTargetsByPackageId + + availableTargetsByPackageNameAndComponentName = + Map.mapKeysWith + (++) (\(pkgid, cname) -> (packageName pkgid, cname)) + availableTargetsByPackageIdAndComponentName + + availableTargetsByPackageNameAndUnqualComponentName = + Map.mapKeysWith + (++) (\(pkgid, cname) -> let pname = packageName pkgid + cname' = unqualComponentName pname cname + in (pname, cname')) + availableTargetsByPackageIdAndComponentName + where + unqualComponentName :: PackageName -> ComponentName -> UnqualComponentName + unqualComponentName pkgname = + fromMaybe (packageNameToUnqualComponentName pkgname) + . componentNameString + + -- Add in all the empty packages. These do not appear in the + -- availableTargetsByComponent map, since that only contains components + -- so packages with no components are invisible from that perspective. + -- The empty packages need to be there for proper error reporting, so users + -- can select the empty package and then we can report that it is empty, + -- otherwise we falsely report there is no such package at all. + availableTargetsEmptyPackages = + Map.fromList + [ (packageId pkg, []) + | InstallPlan.Configured pkg <- InstallPlan.toList installPlan + , case elabPkgOrComp pkg of + ElabComponent _ -> False + ElabPackage _ -> null (pkgComponents (elabPkgDescription pkg)) + ] + + --TODO: [research required] what if the solution has multiple versions of this package? + -- e.g. due to setup deps or due to multiple independent sets of + -- packages being built (e.g. ghc + ghcjs in a project) + +filterTargetsKind :: ComponentKind -> [AvailableTarget k] -> [AvailableTarget k] +filterTargetsKind ckind = filterTargetsKindWith (== ckind) + +filterTargetsKindWith :: (ComponentKind -> Bool) + -> [AvailableTarget k] -> [AvailableTarget k] +filterTargetsKindWith p ts = + [ t | t@(AvailableTarget _ cname _ _) <- ts + , p (componentKind cname) ] + +selectBuildableTargets :: [AvailableTarget k] -> [k] +selectBuildableTargets ts = + [ k | AvailableTarget _ _ (TargetBuildable k _) _ <- ts ] + +selectBuildableTargetsWith :: (TargetRequested -> Bool) + -> [AvailableTarget k] -> [k] +selectBuildableTargetsWith p ts = + [ k | AvailableTarget _ _ (TargetBuildable k req) _ <- ts, p req ] + +selectBuildableTargets' :: [AvailableTarget k] -> ([k], [AvailableTarget ()]) +selectBuildableTargets' ts = + (,) [ k | AvailableTarget _ _ (TargetBuildable k _) _ <- ts ] + [ forgetTargetDetail t + | t@(AvailableTarget _ _ (TargetBuildable _ _) _) <- ts ] + +selectBuildableTargetsWith' :: (TargetRequested -> Bool) + -> [AvailableTarget k] -> ([k], [AvailableTarget ()]) +selectBuildableTargetsWith' p ts = + (,) [ k | AvailableTarget _ _ (TargetBuildable k req) _ <- ts, p req ] + [ forgetTargetDetail t + | t@(AvailableTarget _ _ (TargetBuildable _ req) _) <- ts, p req ] + + +forgetTargetDetail :: AvailableTarget k -> AvailableTarget () +forgetTargetDetail = fmap (const ()) + +forgetTargetsDetail :: [AvailableTarget k] -> [AvailableTarget ()] +forgetTargetsDetail = map forgetTargetDetail + +-- | A basic @selectComponentTarget@ implementation to use or pass to +-- 'resolveTargets', that does the basic checks that the component is +-- buildable and isn't a test suite or benchmark that is disabled. This +-- can also be used to do these basic checks as part of a custom impl that +-- +selectComponentTargetBasic :: SubComponentTarget + -> AvailableTarget k + -> Either TargetProblemCommon k +selectComponentTargetBasic subtarget + AvailableTarget { + availableTargetPackageId = pkgid, + availableTargetComponentName = cname, + availableTargetStatus + } = + case availableTargetStatus of + TargetDisabledByUser -> + Left (TargetOptionalStanzaDisabledByUser pkgid cname subtarget) + + TargetDisabledBySolver -> + Left (TargetOptionalStanzaDisabledBySolver pkgid cname subtarget) + + TargetNotLocal -> + Left (TargetComponentNotProjectLocal pkgid cname subtarget) + + TargetNotBuildable -> + Left (TargetComponentNotBuildable pkgid cname subtarget) + + TargetBuildable targetKey _ -> + Right targetKey + +data TargetProblemCommon + = TargetNotInProject PackageName + | TargetAvailableInIndex PackageName + | TargetComponentNotProjectLocal PackageId ComponentName SubComponentTarget + | TargetComponentNotBuildable PackageId ComponentName SubComponentTarget + | TargetOptionalStanzaDisabledByUser PackageId ComponentName SubComponentTarget + | TargetOptionalStanzaDisabledBySolver PackageId ComponentName SubComponentTarget + | TargetProblemUnknownComponent PackageName + (Either UnqualComponentName ComponentName) + + -- The target matching stuff only returns packages local to the project, + -- so these lookups should never fail, but if 'resolveTargets' is called + -- directly then of course it can. + | TargetProblemNoSuchPackage PackageId + | TargetProblemNoSuchComponent PackageId ComponentName + deriving (Eq, Show) + +-- | Wrapper around 'ProjectPlanning.pruneInstallPlanToTargets' that adjusts +-- for the extra unneeded info in the 'TargetsMap'. +-- +pruneInstallPlanToTargets :: TargetAction -> TargetsMap + -> ElaboratedInstallPlan -> ElaboratedInstallPlan +pruneInstallPlanToTargets targetActionType targetsMap elaboratedPlan = + assert (Map.size targetsMap > 0) $ + ProjectPlanning.pruneInstallPlanToTargets + targetActionType + (Map.map (map fst) targetsMap) + elaboratedPlan + +-- | Utility used by repl and run to check if the targets spans multiple +-- components, since those commands do not support multiple components. +-- +distinctTargetComponents :: TargetsMap -> Set.Set (UnitId, ComponentName) +distinctTargetComponents targetsMap = + Set.fromList [ (uid, cname) + | (uid, cts) <- Map.toList targetsMap + , (ComponentTarget cname _, _) <- cts ] + + +------------------------------------------------------------------------------ +-- Displaying what we plan to do +-- + +-- | Print a user-oriented presentation of the install plan, indicating what +-- will be built. +-- +printPlan :: Verbosity + -> ProjectBaseContext + -> ProjectBuildContext + -> IO () +printPlan verbosity + ProjectBaseContext { + buildSettings = BuildTimeSettings{buildSettingDryRun}, + projectConfig = ProjectConfig { + projectConfigLocalPackages = PackageConfig {packageConfigOptimization} + } + } + ProjectBuildContext { + elaboratedPlanToExecute = elaboratedPlan, + elaboratedShared, + pkgsBuildStatus + } + + | null pkgs + = notice verbosity "Up to date" + + | otherwise + = noticeNoWrap verbosity $ unlines $ + (showBuildProfile ++ "In order, the following " ++ wouldWill ++ " be built" ++ + ifNormal " (use -v for more details)" ++ ":") + : map showPkgAndReason pkgs + + where + pkgs = InstallPlan.executionOrder elaboratedPlan + + ifVerbose s | verbosity >= verbose = s + | otherwise = "" + + ifNormal s | verbosity >= verbose = "" + | otherwise = s + + wouldWill | buildSettingDryRun = "would" + | otherwise = "will" + + showPkgAndReason :: ElaboratedReadyPackage -> String + showPkgAndReason (ReadyPackage elab) = + " - " ++ + (if verbosity >= deafening + then display (installedUnitId elab) + else display (packageId elab) + ) ++ + (case elabPkgOrComp elab of + ElabPackage pkg -> showTargets elab ++ ifVerbose (showStanzas pkg) + ElabComponent comp -> + " (" ++ showComp elab comp ++ ")" + ) ++ + showFlagAssignment (nonDefaultFlags elab) ++ + showConfigureFlags elab ++ + let buildStatus = pkgsBuildStatus Map.! installedUnitId elab in + " (" ++ showBuildStatus buildStatus ++ ")" + + showComp elab comp = + maybe "custom" display (compComponentName comp) ++ + if Map.null (elabInstantiatedWith elab) + then "" + else " with " ++ + intercalate ", " + -- TODO: Abbreviate the UnitIds + [ display k ++ "=" ++ display v + | (k,v) <- Map.toList (elabInstantiatedWith elab) ] + + nonDefaultFlags :: ElaboratedConfiguredPackage -> FlagAssignment + nonDefaultFlags elab = elabFlagAssignment elab `diffFlagAssignment` elabFlagDefaults elab + + showStanzas pkg = concat + $ [ " *test" + | TestStanzas `Set.member` pkgStanzasEnabled pkg ] + ++ [ " *bench" + | BenchStanzas `Set.member` pkgStanzasEnabled pkg ] + + showTargets elab + | null (elabBuildTargets elab) = "" + | otherwise + = " (" ++ intercalate ", " [ showComponentTarget (packageId elab) t | t <- elabBuildTargets elab ] + ++ ")" + + showFlagAssignment :: FlagAssignment -> String + showFlagAssignment = concatMap ((' ' :) . showFlagValue) . unFlagAssignment + + showConfigureFlags elab = + let fullConfigureFlags + = setupHsConfigureFlags + (ReadyPackage elab) + elaboratedShared + verbosity + "$builddir" + -- | Given a default value @x@ for a flag, nub @Flag x@ + -- into @NoFlag@. This gives us a tidier command line + -- rendering. + nubFlag :: Eq a => a -> Setup.Flag a -> Setup.Flag a + nubFlag x (Setup.Flag x') | x == x' = Setup.NoFlag + nubFlag _ f = f + (tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling fullConfigureFlags + partialConfigureFlags + = Mon.mempty { + configProf = + nubFlag False (configProf fullConfigureFlags), + configProfExe = + nubFlag tryExeProfiling (configProfExe fullConfigureFlags), + configProfLib = + nubFlag tryLibProfiling (configProfLib fullConfigureFlags) + -- Maybe there are more we can add + } + -- Not necessary to "escape" it, it's just for user output + in unwords . ("":) $ + commandShowOptions + (Setup.configureCommand (pkgConfigCompilerProgs elaboratedShared)) + partialConfigureFlags + + showBuildStatus status = case status of + BuildStatusPreExisting -> "existing package" + BuildStatusInstalled -> "already installed" + BuildStatusDownload {} -> "requires download & build" + BuildStatusUnpack {} -> "requires build" + BuildStatusRebuild _ rebuild -> case rebuild of + BuildStatusConfigure + (MonitoredValueChanged _) -> "configuration changed" + BuildStatusConfigure mreason -> showMonitorChangedReason mreason + BuildStatusBuild _ buildreason -> case buildreason of + BuildReasonDepsRebuilt -> "dependency rebuilt" + BuildReasonFilesChanged + mreason -> showMonitorChangedReason mreason + BuildReasonExtraTargets _ -> "additional components to build" + BuildReasonEphemeralTargets -> "ephemeral targets" + BuildStatusUpToDate {} -> "up to date" -- doesn't happen + + showMonitorChangedReason (MonitoredFileChanged file) = "file " ++ file ++ " changed" + showMonitorChangedReason (MonitoredValueChanged _) = "value changed" + showMonitorChangedReason MonitorFirstRun = "first run" + showMonitorChangedReason MonitorCorruptCache = "cannot read state cache" + + showBuildProfile = "Build profile: " ++ unwords [ + "-w " ++ (showCompilerId . pkgConfigCompiler) elaboratedShared, + "-O" ++ (case packageConfigOptimization of + Setup.Flag NoOptimisation -> "0" + Setup.Flag NormalOptimisation -> "1" + Setup.Flag MaximumOptimisation -> "2" + Setup.NoFlag -> "1")] + ++ "\n" + +-- | If there are build failures then report them and throw an exception. +-- +dieOnBuildFailures :: Verbosity + -> ElaboratedInstallPlan -> BuildOutcomes -> IO () +dieOnBuildFailures verbosity plan buildOutcomes + | null failures = return () + + | isSimpleCase = exitFailure + + | otherwise = do + -- For failures where we have a build log, print the log plus a header + sequence_ + [ do notice verbosity $ + '\n' : renderFailureDetail False pkg reason + ++ "\nBuild log ( " ++ logfile ++ " ):" + readFile logfile >>= noticeNoWrap verbosity + | (pkg, ShowBuildSummaryAndLog reason logfile) + <- failuresClassification + ] + + -- For all failures, print either a short summary (if we showed the + -- build log) or all details + dieIfNotHaddockFailure verbosity $ unlines + [ case failureClassification of + ShowBuildSummaryAndLog reason _ + | verbosity > normal + -> renderFailureDetail mentionDepOf pkg reason + + | otherwise + -> renderFailureSummary mentionDepOf pkg reason + ++ ". See the build log above for details." + + ShowBuildSummaryOnly reason -> + renderFailureDetail mentionDepOf pkg reason + + | let mentionDepOf = verbosity <= normal + , (pkg, failureClassification) <- failuresClassification ] + where + failures = [ (pkgid, failure) + | (pkgid, Left failure) <- Map.toList buildOutcomes ] + + failuresClassification = + [ (pkg, classifyBuildFailure failure) + | (pkgid, failure) <- failures + , case buildFailureReason failure of + DependentFailed {} -> verbosity > normal + _ -> True + , InstallPlan.Configured pkg <- + maybeToList (InstallPlan.lookup plan pkgid) + ] + + dieIfNotHaddockFailure + | all isHaddockFailure failuresClassification = warn + | otherwise = die' + where + isHaddockFailure (_, ShowBuildSummaryOnly (HaddocksFailed _) ) = True + isHaddockFailure (_, ShowBuildSummaryAndLog (HaddocksFailed _) _) = True + isHaddockFailure _ = False + + + classifyBuildFailure :: BuildFailure -> BuildFailurePresentation + classifyBuildFailure BuildFailure { + buildFailureReason = reason, + buildFailureLogFile = mlogfile + } = + maybe (ShowBuildSummaryOnly reason) + (ShowBuildSummaryAndLog reason) $ do + logfile <- mlogfile + e <- buildFailureException reason + ExitFailure 1 <- fromException e + return logfile + + -- Special case: we don't want to report anything complicated in the case + -- of just doing build on the current package, since it's clear from + -- context which package failed. + -- + -- We generalise this rule as follows: + -- - if only one failure occurs, and it is in a single root package (ie a + -- package with nothing else depending on it) + -- - and that failure is of a kind that always reports enough detail + -- itself (e.g. ghc reporting errors on stdout) + -- - then we do not report additional error detail or context. + -- + isSimpleCase + | [(pkgid, failure)] <- failures + , [pkg] <- rootpkgs + , installedUnitId pkg == pkgid + , isFailureSelfExplanatory (buildFailureReason failure) + = True + | otherwise + = False + + -- NB: if the Setup script segfaulted or was interrupted, + -- we should give more detailed information. So only + -- assume that exit code 1 is "pedestrian failure." + isFailureSelfExplanatory (BuildFailed e) + | Just (ExitFailure 1) <- fromException e = True + + isFailureSelfExplanatory (ConfigureFailed e) + | Just (ExitFailure 1) <- fromException e = True + + isFailureSelfExplanatory _ = False + + rootpkgs = + [ pkg + | InstallPlan.Configured pkg <- InstallPlan.toList plan + , hasNoDependents pkg ] + + ultimateDeps pkgid = + filter (\pkg -> hasNoDependents pkg && installedUnitId pkg /= pkgid) + (InstallPlan.reverseDependencyClosure plan [pkgid]) + + hasNoDependents :: HasUnitId pkg => pkg -> Bool + hasNoDependents = null . InstallPlan.revDirectDeps plan . installedUnitId + + renderFailureDetail mentionDepOf pkg reason = + renderFailureSummary mentionDepOf pkg reason ++ "." + ++ renderFailureExtraDetail reason + ++ maybe "" showException (buildFailureException reason) + + renderFailureSummary mentionDepOf pkg reason = + case reason of + DownloadFailed _ -> "Failed to download " ++ pkgstr + UnpackFailed _ -> "Failed to unpack " ++ pkgstr + ConfigureFailed _ -> "Failed to build " ++ pkgstr + BuildFailed _ -> "Failed to build " ++ pkgstr + ReplFailed _ -> "repl failed for " ++ pkgstr + HaddocksFailed _ -> "Failed to build documentation for " ++ pkgstr + TestsFailed _ -> "Tests failed for " ++ pkgstr + BenchFailed _ -> "Benchmarks failed for " ++ pkgstr + InstallFailed _ -> "Failed to build " ++ pkgstr + DependentFailed depid + -> "Failed to build " ++ display (packageId pkg) + ++ " because it depends on " ++ display depid + ++ " which itself failed to build" + where + pkgstr = elabConfiguredName verbosity pkg + ++ if mentionDepOf + then renderDependencyOf (installedUnitId pkg) + else "" + + renderFailureExtraDetail reason = + case reason of + ConfigureFailed _ -> " The failure occurred during the configure step." + InstallFailed _ -> " The failure occurred during the final install step." + _ -> "" + + renderDependencyOf pkgid = + case ultimateDeps pkgid of + [] -> "" + (p1:[]) -> " (which is required by " ++ elabPlanPackageName verbosity p1 ++ ")" + (p1:p2:[]) -> " (which is required by " ++ elabPlanPackageName verbosity p1 + ++ " and " ++ elabPlanPackageName verbosity p2 ++ ")" + (p1:p2:_) -> " (which is required by " ++ elabPlanPackageName verbosity p1 + ++ ", " ++ elabPlanPackageName verbosity p2 + ++ " and others)" + + showException e = case fromException e of + Just (ExitFailure 1) -> "" + +#ifdef MIN_VERSION_unix + -- Note [Positive "signal" exit code] + -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + -- What's the business with the test for negative and positive + -- signal values? The API for process specifies that if the + -- process died due to a signal, it returns a *negative* exit + -- code. So that's the negative test. + -- + -- What about the positive test? Well, when we find out that + -- a process died due to a signal, we ourselves exit with that + -- exit code. However, we don't "kill ourselves" with the + -- signal; we just exit with the same code as the signal: thus + -- the caller sees a *positive* exit code. So that's what + -- happens when we get a positive exit code. + Just (ExitFailure n) + | -n == fromIntegral sigSEGV -> + " The build process segfaulted (i.e. SIGSEGV)." + + | n == fromIntegral sigSEGV -> + " The build process terminated with exit code " ++ show n + ++ " which may be because some part of it segfaulted. (i.e. SIGSEGV)." + + | -n == fromIntegral sigKILL -> + " The build process was killed (i.e. SIGKILL). " ++ explanation + + | n == fromIntegral sigKILL -> + " The build process terminated with exit code " ++ show n + ++ " which may be because some part of it was killed " + ++ "(i.e. SIGKILL). " ++ explanation + where + explanation = "The typical reason for this is that there is not " + ++ "enough memory available (e.g. the OS killed a process " + ++ "using lots of memory)." +#endif + Just (ExitFailure n) -> + " The build process terminated with exit code " ++ show n + + _ -> " The exception was:\n " +#if MIN_VERSION_base(4,8,0) + ++ displayException e +#else + ++ show e +#endif + + buildFailureException reason = + case reason of + DownloadFailed e -> Just e + UnpackFailed e -> Just e + ConfigureFailed e -> Just e + BuildFailed e -> Just e + ReplFailed e -> Just e + HaddocksFailed e -> Just e + TestsFailed e -> Just e + BenchFailed e -> Just e + InstallFailed e -> Just e + DependentFailed _ -> Nothing + +data BuildFailurePresentation = + ShowBuildSummaryOnly BuildFailureReason + | ShowBuildSummaryAndLog BuildFailureReason FilePath + + +cmdCommonHelpTextNewBuildBeta :: String +cmdCommonHelpTextNewBuildBeta = + "Note: this command is part of the new project-based system (aka " + ++ "nix-style\nlocal builds). These features are currently in beta. " + ++ "Please see\n" + ++ "http://cabal.readthedocs.io/en/latest/nix-local-build-overview.html " + ++ "for\ndetails and advice on what you can expect to work. If you " + ++ "encounter problems\nplease file issues at " + ++ "https://github.com/haskell/cabal/issues and if you\nhave any time " + ++ "to get involved and help with testing, fixing bugs etc then\nthat " + ++ "is very much appreciated.\n" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanning/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanning/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanning/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanning/Types.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,817 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE TypeFamilies #-} + +-- | Types used while planning how to build everything in a project. +-- +-- Primarily this is the 'ElaboratedInstallPlan'. +-- +module Distribution.Client.ProjectPlanning.Types ( + SolverInstallPlan, + + -- * Elaborated install plan types + ElaboratedInstallPlan, + normaliseConfiguredPackage, + ElaboratedConfiguredPackage(..), + + elabDistDirParams, + elabExeDependencyPaths, + elabLibDependencies, + elabOrderLibDependencies, + elabExeDependencies, + elabOrderExeDependencies, + elabSetupDependencies, + elabPkgConfigDependencies, + elabInplaceDependencyBuildCacheFiles, + elabRequiresRegistration, + dataDirsEnvironmentForPlan, + + elabPlanPackageName, + elabConfiguredName, + elabComponentName, + + ElaboratedPackageOrComponent(..), + ElaboratedComponent(..), + ElaboratedPackage(..), + pkgOrderDependencies, + ElaboratedPlanPackage, + ElaboratedSharedConfig(..), + ElaboratedReadyPackage, + BuildStyle(..), + CabalFileText, + + -- * Build targets + ComponentTarget(..), + showComponentTarget, + showTestComponentTarget, + showBenchComponentTarget, + SubComponentTarget(..), + + isSubLibComponentTarget, + isForeignLibComponentTarget, + isExeComponentTarget, + isTestComponentTarget, + isBenchComponentTarget, + + componentOptionalStanza, + + -- * Setup script + SetupScriptStyle(..), + ) where + +import Distribution.Client.TargetSelector + ( SubComponentTarget(..) ) +import Distribution.Client.PackageHash + +import Distribution.Client.Types +import qualified Distribution.Client.InstallPlan as InstallPlan +import Distribution.Client.InstallPlan + ( GenericInstallPlan, GenericPlanPackage(..) ) +import Distribution.Client.SolverInstallPlan + ( SolverInstallPlan ) +import Distribution.Client.DistDirLayout + +import Distribution.Backpack +import Distribution.Backpack.ModuleShape + +import Distribution.Verbosity +import Distribution.Text +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.PackageDescription (PackageDescription(..)) +import Distribution.Package + hiding (InstalledPackageId, installedPackageId) +import Distribution.System +import qualified Distribution.PackageDescription as Cabal +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import Distribution.Simple.Compiler +import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) +import qualified Distribution.Simple.BuildTarget as Cabal +import Distribution.Simple.Program +import Distribution.ModuleName (ModuleName) +import Distribution.Simple.LocalBuildInfo (ComponentName(..)) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import Distribution.Simple.InstallDirs (PathTemplate) +import Distribution.Simple.Setup (HaddockTarget) +import Distribution.Version + +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.ComponentDeps (ComponentDeps) +import Distribution.Solver.Types.OptionalStanza +import Distribution.Compat.Graph (IsNode(..)) +import Distribution.Simple.Utils (ordNub) + +import Data.Map (Map) +import qualified Data.Map as Map +import Data.Maybe (catMaybes) +import Data.Set (Set) +import qualified Data.ByteString.Lazy as LBS +import Distribution.Compat.Binary +import GHC.Generics (Generic) +import qualified Data.Monoid as Mon +import Data.Typeable +import Control.Monad +import System.FilePath (()) + + +-- | The combination of an elaborated install plan plus a +-- 'ElaboratedSharedConfig' contains all the details necessary to be able +-- to execute the plan without having to make further policy decisions. +-- +-- It does not include dynamic elements such as resources (such as http +-- connections). +-- +type ElaboratedInstallPlan + = GenericInstallPlan InstalledPackageInfo + ElaboratedConfiguredPackage + +type ElaboratedPlanPackage + = GenericPlanPackage InstalledPackageInfo + ElaboratedConfiguredPackage + +-- | User-friendly display string for an 'ElaboratedPlanPackage'. +elabPlanPackageName :: Verbosity -> ElaboratedPlanPackage -> String +elabPlanPackageName verbosity (PreExisting ipkg) + | verbosity <= normal = display (packageName ipkg) + | otherwise = display (installedUnitId ipkg) +elabPlanPackageName verbosity (Configured elab) + = elabConfiguredName verbosity elab +elabPlanPackageName verbosity (Installed elab) + = elabConfiguredName verbosity elab + +--TODO: [code cleanup] decide if we really need this, there's not much in it, and in principle +-- even platform and compiler could be different if we're building things +-- like a server + client with ghc + ghcjs +data ElaboratedSharedConfig + = ElaboratedSharedConfig { + + pkgConfigPlatform :: Platform, + pkgConfigCompiler :: Compiler, --TODO: [code cleanup] replace with CompilerInfo + -- | The programs that the compiler configured (e.g. for GHC, the progs + -- ghc & ghc-pkg). Once constructed, only the 'configuredPrograms' are + -- used. + pkgConfigCompilerProgs :: ProgramDb, + pkgConfigReplOptions :: [String] + } + deriving (Show, Generic, Typeable) + --TODO: [code cleanup] no Eq instance + +instance Binary ElaboratedSharedConfig + +data ElaboratedConfiguredPackage + = ElaboratedConfiguredPackage { + -- | The 'UnitId' which uniquely identifies this item in a build plan + elabUnitId :: UnitId, + + elabComponentId :: ComponentId, + elabInstantiatedWith :: Map ModuleName Module, + elabLinkedInstantiatedWith :: Map ModuleName OpenModule, + + -- | This is true if this is an indefinite package, or this is a + -- package with no signatures. (Notably, it's not true for instantiated + -- packages.) The motivation for this is if you ask to build + -- @foo-indef@, this probably means that you want to typecheck + -- it, NOT that you want to rebuild all of the various + -- instantiations of it. + elabIsCanonical :: Bool, + + -- | The 'PackageId' of the originating package + elabPkgSourceId :: PackageId, + + -- | Shape of the package/component, for Backpack. + elabModuleShape :: ModuleShape, + + -- | A total flag assignment for the package. + -- TODO: Actually this can be per-component if we drop + -- all flags that don't affect a component. + elabFlagAssignment :: Cabal.FlagAssignment, + + -- | The original default flag assignment, used only for reporting. + elabFlagDefaults :: Cabal.FlagAssignment, + + elabPkgDescription :: Cabal.PackageDescription, + + -- | Where the package comes from, e.g. tarball, local dir etc. This + -- is not the same as where it may be unpacked to for the build. + elabPkgSourceLocation :: PackageLocation (Maybe FilePath), + + -- | The hash of the source, e.g. the tarball. We don't have this for + -- local source dir packages. + elabPkgSourceHash :: Maybe PackageSourceHash, + + -- | Is this package one of the ones specified by location in the + -- project file? (As opposed to a dependency, or a named package pulled + -- in) + elabLocalToProject :: Bool, + + -- | Are we going to build and install this package to the store, or are + -- we going to build it and register it locally. + elabBuildStyle :: BuildStyle, + + -- | Another way of phrasing 'pkgStanzasAvailable'. + elabEnabledSpec :: ComponentRequestedSpec, + + -- | Which optional stanzas (ie testsuites, benchmarks) can be built. + -- This means the solver produced a plan that has them available. + -- This doesn't necessary mean we build them by default. + elabStanzasAvailable :: Set OptionalStanza, + + -- | Which optional stanzas the user explicitly asked to enable or + -- to disable. This tells us which ones we build by default, and + -- helps with error messages when the user asks to build something + -- they explicitly disabled. + -- + -- TODO: The 'Bool' here should be refined into an ADT with three + -- cases: NotRequested, ExplicitlyRequested and + -- ImplicitlyRequested. A stanza is explicitly requested if + -- the user asked, for this *specific* package, that the stanza + -- be enabled; it's implicitly requested if the user asked for + -- all global packages to have this stanza enabled. The + -- difference between an explicit and implicit request is + -- error reporting behavior: if a user asks for tests to be + -- enabled for a specific package that doesn't have any tests, + -- we should warn them about it, but we shouldn't complain + -- that a user enabled tests globally, and some local packages + -- just happen not to have any tests. (But perhaps we should + -- warn if ALL local packages don't have any tests.) + elabStanzasRequested :: Map OptionalStanza Bool, + + elabSetupPackageDBStack :: PackageDBStack, + elabBuildPackageDBStack :: PackageDBStack, + elabRegisterPackageDBStack :: PackageDBStack, + + elabPkgDescriptionOverride :: Maybe CabalFileText, + + -- TODO: make per-component variants of these flags + elabVanillaLib :: Bool, + elabSharedLib :: Bool, + elabStaticLib :: Bool, + elabDynExe :: Bool, + elabGHCiLib :: Bool, + elabProfLib :: Bool, + elabProfExe :: Bool, + elabProfLibDetail :: ProfDetailLevel, + elabProfExeDetail :: ProfDetailLevel, + elabCoverage :: Bool, + elabOptimization :: OptimisationLevel, + elabSplitObjs :: Bool, + elabSplitSections :: Bool, + elabStripLibs :: Bool, + elabStripExes :: Bool, + elabDebugInfo :: DebugInfoLevel, + + elabProgramPaths :: Map String FilePath, + elabProgramArgs :: Map String [String], + elabProgramPathExtra :: [FilePath], + elabConfigureScriptArgs :: [String], + elabExtraLibDirs :: [FilePath], + elabExtraFrameworkDirs :: [FilePath], + elabExtraIncludeDirs :: [FilePath], + elabProgPrefix :: Maybe PathTemplate, + elabProgSuffix :: Maybe PathTemplate, + + elabInstallDirs :: InstallDirs.InstallDirs FilePath, + + elabHaddockHoogle :: Bool, + elabHaddockHtml :: Bool, + elabHaddockHtmlLocation :: Maybe String, + elabHaddockForeignLibs :: Bool, + elabHaddockForHackage :: HaddockTarget, + elabHaddockExecutables :: Bool, + elabHaddockTestSuites :: Bool, + elabHaddockBenchmarks :: Bool, + elabHaddockInternal :: Bool, + elabHaddockCss :: Maybe FilePath, + elabHaddockLinkedSource :: Bool, + elabHaddockQuickJump :: Bool, + elabHaddockHscolourCss :: Maybe FilePath, + elabHaddockContents :: Maybe PathTemplate, + + -- Setup.hs related things: + + -- | One of four modes for how we build and interact with the Setup.hs + -- script, based on whether it's a build-type Custom, with or without + -- explicit deps and the cabal spec version the .cabal file needs. + elabSetupScriptStyle :: SetupScriptStyle, + + -- | The version of the Cabal command line interface that we are using + -- for this package. This is typically the version of the Cabal lib + -- that the Setup.hs is built against. + elabSetupScriptCliVersion :: Version, + + -- Build time related: + elabConfigureTargets :: [ComponentTarget], + elabBuildTargets :: [ComponentTarget], + elabTestTargets :: [ComponentTarget], + elabBenchTargets :: [ComponentTarget], + elabReplTarget :: Maybe ComponentTarget, + elabHaddockTargets :: [ComponentTarget], + + elabBuildHaddocks :: Bool, + + --pkgSourceDir ? -- currently passed in later because they can use temp locations + --pkgBuildDir ? -- but could in principle still have it here, with optional instr to use temp loc + + -- | Component/package specific information + elabPkgOrComp :: ElaboratedPackageOrComponent + } + deriving (Eq, Show, Generic, Typeable) + +normaliseConfiguredPackage :: ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> ElaboratedConfiguredPackage +normaliseConfiguredPackage ElaboratedSharedConfig{pkgConfigCompilerProgs} pkg = + pkg { elabProgramArgs = Map.mapMaybeWithKey lookupFilter (elabProgramArgs pkg) } + where + knownProgramDb = addKnownPrograms builtinPrograms pkgConfigCompilerProgs + + pkgDesc :: PackageDescription + pkgDesc = elabPkgDescription pkg + + removeEmpty :: [String] -> Maybe [String] + removeEmpty [] = Nothing + removeEmpty xs = Just xs + + lookupFilter :: String -> [String] -> Maybe [String] + lookupFilter n args = removeEmpty $ case lookupKnownProgram n knownProgramDb of + Just p -> programNormaliseArgs p (getVersion p) pkgDesc args + Nothing -> args + + getVersion :: Program -> Maybe Version + getVersion p = lookupProgram p knownProgramDb >>= programVersion + +-- | The package/component contains/is a library and so must be registered +elabRequiresRegistration :: ElaboratedConfiguredPackage -> Bool +elabRequiresRegistration elab = + case elabPkgOrComp elab of + ElabComponent comp -> + case compComponentName comp of + Just cn -> is_lib cn && build_target + _ -> False + ElabPackage pkg -> + -- Tricky! Not only do we have to test if the user selected + -- a library as a build target, we also have to test if + -- the library was TRANSITIVELY depended upon, since we will + -- also require a register in this case. + -- + -- NB: It would have been far nicer to just unconditionally + -- register in all cases, but some Custom Setups will fall + -- over if you try to do that, ESPECIALLY if there actually is + -- a library but they hadn't built it. + -- + -- However, as the case of `cpphs-1.20.8` has shown in + -- #5379, in cases when a monolithic package gets + -- installed due to its executable components + -- (i.e. exe:cpphs) into the store we *have* to register + -- if there's a buildable public library (i.e. lib:cpphs) + -- that was built and installed into the same store folder + -- as otherwise this will cause build failures once a + -- target actually depends on lib:cpphs. + build_target || (elabBuildStyle elab == BuildAndInstall && + Cabal.hasPublicLib (elabPkgDescription elab)) + -- the next sub-condition below is currently redundant + -- (see discussion in #5604 for more details), but it's + -- being kept intentionally here as a safeguard because if + -- internal libraries ever start working with + -- non-per-component builds this condition won't be + -- redundant anymore. + || any (depends_on_lib pkg) (elabBuildTargets elab) + where + depends_on_lib pkg (ComponentTarget cn _) = + not (null (CD.select (== CD.componentNameToComponent cn) + (pkgDependsOnSelfLib pkg))) + build_target = + if not (null (elabBuildTargets elab)) + then any is_lib_target (elabBuildTargets elab) + -- Empty build targets mean we build /everything/; + -- that means we have to look more carefully to see + -- if there is anything to register + else Cabal.hasLibs (elabPkgDescription elab) + -- NB: this means we DO NOT reregister if you just built a + -- single file + is_lib_target (ComponentTarget cn WholeComponent) = is_lib cn + is_lib_target _ = False + is_lib CLibName = True + is_lib (CSubLibName _) = True + is_lib _ = False + +-- | Construct the environment needed for the data files to work. +-- This consists of a separate @*_datadir@ variable for each +-- inplace package in the plan. +dataDirsEnvironmentForPlan :: DistDirLayout + -> ElaboratedInstallPlan + -> [(String, Maybe FilePath)] +dataDirsEnvironmentForPlan distDirLayout = catMaybes + . fmap (InstallPlan.foldPlanPackage + (const Nothing) + (dataDirEnvVarForPackage distDirLayout)) + . InstallPlan.toList + +-- | Construct an environment variable that points +-- the package's datadir to its correct location. +-- This might be: +-- * 'Just' the package's source directory plus the data subdirectory +-- for inplace packages. +-- * 'Nothing' for packages installed in the store (the path was +-- already included in the package at install/build time). +dataDirEnvVarForPackage :: DistDirLayout + -> ElaboratedConfiguredPackage + -> Maybe (String, Maybe FilePath) +dataDirEnvVarForPackage distDirLayout pkg = + case elabBuildStyle pkg + of BuildAndInstall -> Nothing + BuildInplaceOnly -> Just + ( pkgPathEnvVar (elabPkgDescription pkg) "datadir" + , Just $ srcPath (elabPkgSourceLocation pkg) + dataDir (elabPkgDescription pkg)) + where + srcPath (LocalUnpackedPackage path) = path + srcPath (LocalTarballPackage _path) = unpackedPath + srcPath (RemoteTarballPackage _uri _localTar) = unpackedPath + srcPath (RepoTarballPackage _repo _packageId _localTar) = unpackedPath + srcPath (RemoteSourceRepoPackage _sourceRepo (Just localCheckout)) = localCheckout + -- TODO: see https://github.com/haskell/cabal/wiki/Potential-Refactors#unresolvedpkgloc + srcPath (RemoteSourceRepoPackage _sourceRepo Nothing) = error + "calling dataDirEnvVarForPackage on a not-downloaded repo is an error" + unpackedPath = + distUnpackedSrcDirectory distDirLayout $ elabPkgSourceId pkg + +instance Package ElaboratedConfiguredPackage where + packageId = elabPkgSourceId + +instance HasConfiguredId ElaboratedConfiguredPackage where + configuredId elab = + ConfiguredId (packageId elab) (elabComponentName elab) (elabComponentId elab) + +instance HasUnitId ElaboratedConfiguredPackage where + installedUnitId = elabUnitId + +instance IsNode ElaboratedConfiguredPackage where + type Key ElaboratedConfiguredPackage = UnitId + nodeKey = elabUnitId + nodeNeighbors = elabOrderDependencies + +instance Binary ElaboratedConfiguredPackage + +data ElaboratedPackageOrComponent + = ElabPackage ElaboratedPackage + | ElabComponent ElaboratedComponent + deriving (Eq, Show, Generic) + +instance Binary ElaboratedPackageOrComponent + +elabComponentName :: ElaboratedConfiguredPackage -> Maybe ComponentName +elabComponentName elab = + case elabPkgOrComp elab of + ElabPackage _ -> Just CLibName -- there could be more, but default this + ElabComponent comp -> compComponentName comp + +-- | A user-friendly descriptor for an 'ElaboratedConfiguredPackage'. +elabConfiguredName :: Verbosity -> ElaboratedConfiguredPackage -> String +elabConfiguredName verbosity elab + | verbosity <= normal + = (case elabPkgOrComp elab of + ElabPackage _ -> "" + ElabComponent comp -> + case compComponentName comp of + Nothing -> "setup from " + Just CLibName -> "" + Just cname -> display cname ++ " from ") + ++ display (packageId elab) + | otherwise + = display (elabUnitId elab) + +elabDistDirParams :: ElaboratedSharedConfig -> ElaboratedConfiguredPackage -> DistDirParams +elabDistDirParams shared elab = DistDirParams { + distParamUnitId = installedUnitId elab, + distParamComponentId = elabComponentId elab, + distParamPackageId = elabPkgSourceId elab, + distParamComponentName = case elabPkgOrComp elab of + ElabComponent comp -> compComponentName comp + ElabPackage _ -> Nothing, + distParamCompilerId = compilerId (pkgConfigCompiler shared), + distParamPlatform = pkgConfigPlatform shared, + distParamOptimization = elabOptimization elab + } + +-- | The full set of dependencies which dictate what order we +-- need to build things in the install plan: "order dependencies" +-- balls everything together. This is mostly only useful for +-- ordering; if you are, for example, trying to compute what +-- @--dependency@ flags to pass to a Setup script, you need to +-- use 'elabLibDependencies'. This method is the same as +-- 'nodeNeighbors'. +-- +-- NB: this method DOES include setup deps. +elabOrderDependencies :: ElaboratedConfiguredPackage -> [UnitId] +elabOrderDependencies elab = + case elabPkgOrComp elab of + -- Important not to have duplicates: otherwise InstallPlan gets + -- confused. + ElabPackage pkg -> ordNub (CD.flatDeps (pkgOrderDependencies pkg)) + ElabComponent comp -> compOrderDependencies comp + +-- | Like 'elabOrderDependencies', but only returns dependencies on +-- libraries. +elabOrderLibDependencies :: ElaboratedConfiguredPackage -> [UnitId] +elabOrderLibDependencies elab = + case elabPkgOrComp elab of + ElabPackage pkg -> map (newSimpleUnitId . confInstId) $ + ordNub $ CD.flatDeps (pkgLibDependencies pkg) + ElabComponent comp -> compOrderLibDependencies comp + +-- | The library dependencies (i.e., the libraries we depend on, NOT +-- the dependencies of the library), NOT including setup dependencies. +-- These are passed to the @Setup@ script via @--dependency@. +elabLibDependencies :: ElaboratedConfiguredPackage -> [ConfiguredId] +elabLibDependencies elab = + case elabPkgOrComp elab of + ElabPackage pkg -> ordNub (CD.nonSetupDeps (pkgLibDependencies pkg)) + ElabComponent comp -> compLibDependencies comp + +-- | Like 'elabOrderDependencies', but only returns dependencies on +-- executables. (This coincides with 'elabExeDependencies'.) +elabOrderExeDependencies :: ElaboratedConfiguredPackage -> [UnitId] +elabOrderExeDependencies = + map newSimpleUnitId . elabExeDependencies + +-- | The executable dependencies (i.e., the executables we depend on); +-- these are the executables we must add to the PATH before we invoke +-- the setup script. +elabExeDependencies :: ElaboratedConfiguredPackage -> [ComponentId] +elabExeDependencies elab = map confInstId $ + case elabPkgOrComp elab of + ElabPackage pkg -> CD.nonSetupDeps (pkgExeDependencies pkg) + ElabComponent comp -> compExeDependencies comp + +-- | This returns the paths of all the executables we depend on; we +-- must add these paths to PATH before invoking the setup script. +-- (This is usually what you want, not 'elabExeDependencies', if you +-- actually want to build something.) +elabExeDependencyPaths :: ElaboratedConfiguredPackage -> [FilePath] +elabExeDependencyPaths elab = + case elabPkgOrComp elab of + ElabPackage pkg -> map snd $ CD.nonSetupDeps (pkgExeDependencyPaths pkg) + ElabComponent comp -> map snd (compExeDependencyPaths comp) + +-- | The setup dependencies (the library dependencies of the setup executable; +-- note that it is not legal for setup scripts to have executable +-- dependencies at the moment.) +elabSetupDependencies :: ElaboratedConfiguredPackage -> [ConfiguredId] +elabSetupDependencies elab = + case elabPkgOrComp elab of + ElabPackage pkg -> CD.setupDeps (pkgLibDependencies pkg) + -- TODO: Custom setups not supported for components yet. When + -- they are, need to do this differently + ElabComponent _ -> [] + +elabPkgConfigDependencies :: ElaboratedConfiguredPackage -> [(PkgconfigName, Maybe Version)] +elabPkgConfigDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage pkg } + = pkgPkgConfigDependencies pkg +elabPkgConfigDependencies ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp } + = compPkgConfigDependencies comp + +-- | The cache files of all our inplace dependencies which, +-- when updated, require us to rebuild. See #4202 for +-- more details. Essentially, this is a list of filepaths +-- that, if our dependencies get rebuilt, will themselves +-- get updated. +-- +-- Note: the hash of these cache files gets built into +-- the build cache ourselves, which means that we end +-- up tracking transitive dependencies! +-- +-- Note: This tracks the "build" cache file, but not +-- "registration" or "config" cache files. Why not? +-- Arguably we should... +-- +-- Note: This is a bit of a hack, because it is not really +-- the hashes of the SOURCES of our (transitive) dependencies +-- that we should use to decide whether or not to rebuild, +-- but the output BUILD PRODUCTS. The strategy we use +-- here will never work if we want to implement unchanging +-- rebuilds. +elabInplaceDependencyBuildCacheFiles + :: DistDirLayout + -> ElaboratedSharedConfig + -> ElaboratedInstallPlan + -> ElaboratedConfiguredPackage + -> [FilePath] +elabInplaceDependencyBuildCacheFiles layout sconf plan root_elab = + go =<< InstallPlan.directDeps plan (nodeKey root_elab) + where + go = InstallPlan.foldPlanPackage (const []) $ \elab -> do + guard (elabBuildStyle elab == BuildInplaceOnly) + return $ distPackageCacheFile layout (elabDistDirParams sconf elab) "build" + +-- | Some extra metadata associated with an +-- 'ElaboratedConfiguredPackage' which indicates that the "package" +-- in question is actually a single component to be built. Arguably +-- it would be clearer if there were an ADT which branched into +-- package work items and component work items, but I've structured +-- it this way to minimize change to the existing code (which I +-- don't feel qualified to rewrite.) +data ElaboratedComponent + = ElaboratedComponent { + -- | The name of the component to be built according to the solver + compSolverName :: CD.Component, + -- | The name of the component to be built. Nothing if + -- it's a setup dep. + compComponentName :: Maybe ComponentName, + -- | The *external* library dependencies of this component. We + -- pass this to the configure script. + compLibDependencies :: [ConfiguredId], + -- | In a component prior to instantiation, this list specifies + -- the 'OpenUnitId's which, after instantiation, are the + -- actual dependencies of this package. Note that this does + -- NOT include signature packages, which do not turn into real + -- ordering dependencies when we instantiate. This is intended to be + -- a purely temporary field, to carry some information to the + -- instantiation phase. It's more precise than + -- 'compLibDependencies', and also stores information about internal + -- dependencies. + compLinkedLibDependencies :: [OpenUnitId], + -- | The executable dependencies of this component (including + -- internal executables). + compExeDependencies :: [ConfiguredId], + -- | The @pkg-config@ dependencies of the component + compPkgConfigDependencies :: [(PkgconfigName, Maybe Version)], + -- | The paths all our executable dependencies will be installed + -- to once they are installed. + compExeDependencyPaths :: [(ConfiguredId, FilePath)], + compOrderLibDependencies :: [UnitId] + } + deriving (Eq, Show, Generic) + +instance Binary ElaboratedComponent + +-- | See 'elabOrderDependencies'. +compOrderDependencies :: ElaboratedComponent -> [UnitId] +compOrderDependencies comp = + compOrderLibDependencies comp + ++ compOrderExeDependencies comp + +-- | See 'elabOrderExeDependencies'. +compOrderExeDependencies :: ElaboratedComponent -> [UnitId] +compOrderExeDependencies = map (newSimpleUnitId . confInstId) . compExeDependencies + +data ElaboratedPackage + = ElaboratedPackage { + pkgInstalledId :: InstalledPackageId, + + -- | The exact dependencies (on other plan packages) + -- + pkgLibDependencies :: ComponentDeps [ConfiguredId], + + -- | Components which depend (transitively) on an internally + -- defined library. These are used by 'elabRequiresRegistration', + -- to determine if a user-requested build is going to need + -- a library registration + -- + pkgDependsOnSelfLib :: ComponentDeps [()], + + -- | Dependencies on executable packages. + -- + pkgExeDependencies :: ComponentDeps [ConfiguredId], + + -- | Paths where executable dependencies live. + -- + pkgExeDependencyPaths :: ComponentDeps [(ConfiguredId, FilePath)], + + -- | Dependencies on @pkg-config@ packages. + -- NB: this is NOT per-component (although it could be) + -- because Cabal library does not track per-component + -- pkg-config depends; it always does them all at once. + -- + pkgPkgConfigDependencies :: [(PkgconfigName, Maybe Version)], + + -- | Which optional stanzas (ie testsuites, benchmarks) will actually + -- be enabled during the package configure step. + pkgStanzasEnabled :: Set OptionalStanza + } + deriving (Eq, Show, Generic) + +instance Binary ElaboratedPackage + +-- | See 'elabOrderDependencies'. This gives the unflattened version, +-- which can be useful in some circumstances. +pkgOrderDependencies :: ElaboratedPackage -> ComponentDeps [UnitId] +pkgOrderDependencies pkg = + fmap (map (newSimpleUnitId . confInstId)) (pkgLibDependencies pkg) `Mon.mappend` + fmap (map (newSimpleUnitId . confInstId)) (pkgExeDependencies pkg) + +-- | This is used in the install plan to indicate how the package will be +-- built. +-- +data BuildStyle = + -- | The classic approach where the package is built, then the files + -- installed into some location and the result registered in a package db. + -- + -- If the package came from a tarball then it's built in a temp dir and + -- the results discarded. + BuildAndInstall + + -- | The package is built, but the files are not installed anywhere, + -- rather the build dir is kept and the package is registered inplace. + -- + -- Such packages can still subsequently be installed. + -- + -- Typically 'BuildAndInstall' packages will only depend on other + -- 'BuildAndInstall' style packages and not on 'BuildInplaceOnly' ones. + -- + | BuildInplaceOnly + deriving (Eq, Show, Generic) + +instance Binary BuildStyle + +type CabalFileText = LBS.ByteString + +type ElaboratedReadyPackage = GenericReadyPackage ElaboratedConfiguredPackage + + +--------------------------- +-- Build targets +-- + +-- | Specific targets within a package or component to act on e.g. to build, +-- haddock or open a repl. +-- +data ComponentTarget = ComponentTarget ComponentName SubComponentTarget + deriving (Eq, Ord, Show, Generic) + +instance Binary ComponentTarget + +-- | Unambiguously render a 'ComponentTarget', e.g., to pass +-- to a Cabal Setup script. +showComponentTarget :: PackageId -> ComponentTarget -> String +showComponentTarget pkgid = + Cabal.showBuildTarget pkgid . toBuildTarget + where + toBuildTarget :: ComponentTarget -> Cabal.BuildTarget + toBuildTarget (ComponentTarget cname subtarget) = + case subtarget of + WholeComponent -> Cabal.BuildTargetComponent cname + ModuleTarget mname -> Cabal.BuildTargetModule cname mname + FileTarget fname -> Cabal.BuildTargetFile cname fname + +showTestComponentTarget :: PackageId -> ComponentTarget -> Maybe String +showTestComponentTarget _ (ComponentTarget (CTestName n) _) = Just $ display n +showTestComponentTarget _ _ = Nothing + +isTestComponentTarget :: ComponentTarget -> Bool +isTestComponentTarget (ComponentTarget (CTestName _) _) = True +isTestComponentTarget _ = False + +showBenchComponentTarget :: PackageId -> ComponentTarget -> Maybe String +showBenchComponentTarget _ (ComponentTarget (CBenchName n) _) = Just $ display n +showBenchComponentTarget _ _ = Nothing + +isBenchComponentTarget :: ComponentTarget -> Bool +isBenchComponentTarget (ComponentTarget (CBenchName _) _) = True +isBenchComponentTarget _ = False + +isForeignLibComponentTarget :: ComponentTarget -> Bool +isForeignLibComponentTarget (ComponentTarget (CFLibName _) _) = True +isForeignLibComponentTarget _ = False + +isExeComponentTarget :: ComponentTarget -> Bool +isExeComponentTarget (ComponentTarget (CExeName _) _ ) = True +isExeComponentTarget _ = False + +isSubLibComponentTarget :: ComponentTarget -> Bool +isSubLibComponentTarget (ComponentTarget (CSubLibName _) _) = True +isSubLibComponentTarget _ = False + +componentOptionalStanza :: CD.Component -> Maybe OptionalStanza +componentOptionalStanza (CD.ComponentTest _) = Just TestStanzas +componentOptionalStanza (CD.ComponentBench _) = Just BenchStanzas +componentOptionalStanza _ = Nothing + +--------------------------- +-- Setup.hs script policy +-- + +-- | There are four major cases for Setup.hs handling: +-- +-- 1. @build-type@ Custom with a @custom-setup@ section +-- 2. @build-type@ Custom without a @custom-setup@ section +-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ +-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ +-- +-- It's also worth noting that packages specifying @cabal-version: >= 1.23@ +-- or later that have @build-type@ Custom will always have a @custom-setup@ +-- section. Therefore in case 2, the specified @cabal-version@ will always be +-- less than 1.23. +-- +-- In cases 1 and 2 we obviously have to build an external Setup.hs script, +-- while in case 4 we can use the internal library API. In case 3 we also have +-- to build an external Setup.hs script because the package needs a later +-- Cabal lib version than we can support internally. +-- +data SetupScriptStyle = SetupCustomExplicitDeps + | SetupCustomImplicitDeps + | SetupNonCustomExternalLib + | SetupNonCustomInternalLib + deriving (Eq, Show, Generic, Typeable) + +instance Binary SetupScriptStyle diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanning.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanning.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanning.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanning.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,3699 @@ +{-# LANGUAGE CPP, RecordWildCards, NamedFieldPuns, RankNTypes #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE NoMonoLocalBinds #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveFunctor #-} + +-- | Planning how to build everything in a project. +-- +module Distribution.Client.ProjectPlanning ( + -- * elaborated install plan types + ElaboratedInstallPlan, + ElaboratedConfiguredPackage(..), + ElaboratedPlanPackage, + ElaboratedSharedConfig(..), + ElaboratedReadyPackage, + BuildStyle(..), + CabalFileText, + + -- * Producing the elaborated install plan + rebuildProjectConfig, + rebuildInstallPlan, + + -- * Build targets + availableTargets, + AvailableTarget(..), + AvailableTargetStatus(..), + TargetRequested(..), + ComponentTarget(..), + SubComponentTarget(..), + showComponentTarget, + nubComponentTargets, + + -- * Selecting a plan subset + pruneInstallPlanToTargets, + TargetAction(..), + pruneInstallPlanToDependencies, + CannotPruneDependencies(..), + + -- * Utils required for building + pkgHasEphemeralBuildTargets, + elabBuildTargetWholeComponents, + + -- * Setup.hs CLI flags for building + setupHsScriptOptions, + setupHsConfigureFlags, + setupHsConfigureArgs, + setupHsBuildFlags, + setupHsBuildArgs, + setupHsReplFlags, + setupHsReplArgs, + setupHsTestFlags, + setupHsTestArgs, + setupHsBenchFlags, + setupHsBenchArgs, + setupHsCopyFlags, + setupHsRegisterFlags, + setupHsHaddockFlags, + setupHsHaddockArgs, + + packageHashInputs, + + -- * Path construction + binDirectoryFor, + binDirectories + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.ProjectPlanning.Types as Ty +import Distribution.Client.PackageHash +import Distribution.Client.RebuildMonad +import Distribution.Client.Store +import Distribution.Client.ProjectConfig +import Distribution.Client.ProjectPlanOutput + +import Distribution.Client.Types +import qualified Distribution.Client.InstallPlan as InstallPlan +import qualified Distribution.Client.SolverInstallPlan as SolverInstallPlan +import Distribution.Client.Dependency +import Distribution.Client.Dependency.Types +import qualified Distribution.Client.IndexUtils as IndexUtils +import Distribution.Client.Init (incVersion) +import Distribution.Client.Targets (userToPackageConstraint) +import Distribution.Client.DistDirLayout +import Distribution.Client.SetupWrapper +import Distribution.Client.JobControl +import Distribution.Client.FetchUtils +import Distribution.Client.Config +import qualified Hackage.Security.Client as Sec +import Distribution.Client.Setup hiding (packageName, cabalVersion) +import Distribution.Utils.NubList +import Distribution.Utils.LogProgress +import Distribution.Utils.MapAccum + +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.ComponentDeps (ComponentDeps) +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PkgConfigDb +import Distribution.Solver.Types.ResolverPackage +import Distribution.Solver.Types.SolverId +import Distribution.Solver.Types.SolverPackage +import Distribution.Solver.Types.InstSolverPackage +import Distribution.Solver.Types.SourcePackage +import Distribution.Solver.Types.Settings + +import Distribution.ModuleName +import Distribution.Package hiding + (InstalledPackageId, installedPackageId) +import Distribution.Types.AnnotatedId +import Distribution.Types.ComponentName +import Distribution.Types.PkgconfigDependency +import Distribution.Types.UnqualComponentName +import Distribution.System +import qualified Distribution.PackageDescription as Cabal +import qualified Distribution.PackageDescription as PD +import qualified Distribution.PackageDescription.Configuration as PD +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import Distribution.Simple.Compiler hiding (Flag) +import qualified Distribution.Simple.GHC as GHC --TODO: [code cleanup] eliminate +import qualified Distribution.Simple.GHCJS as GHCJS --TODO: [code cleanup] eliminate +import Distribution.Simple.Program +import Distribution.Simple.Program.Db +import Distribution.Simple.Program.Find +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Setup + (Flag, toFlag, flagToMaybe, flagToList, fromFlagOrDefault) +import qualified Distribution.Simple.Configure as Cabal +import qualified Distribution.Simple.LocalBuildInfo as Cabal +import Distribution.Simple.LocalBuildInfo + ( Component(..), pkgComponents, componentBuildInfo + , componentName ) +import qualified Distribution.Simple.InstallDirs as InstallDirs +import qualified Distribution.InstalledPackageInfo as IPI + +import Distribution.Backpack.ConfiguredComponent +import Distribution.Backpack.LinkedComponent +import Distribution.Backpack.ComponentsGraph +import Distribution.Backpack.ModuleShape +import Distribution.Backpack.FullUnitId +import Distribution.Backpack +import Distribution.Types.ComponentInclude + +import Distribution.Simple.Utils +import Distribution.Version +import Distribution.Verbosity +import Distribution.Text + +import qualified Distribution.Compat.Graph as Graph +import Distribution.Compat.Graph(IsNode(..)) + +import Text.PrettyPrint hiding ((<>)) +import qualified Text.PrettyPrint as Disp +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set +import Control.Monad +import qualified Data.Traversable as T +import Control.Monad.State as State +import Control.Exception +import Data.List (groupBy) +import Data.Either +import Data.Function +import System.FilePath + +------------------------------------------------------------------------------ +-- * Elaborated install plan +------------------------------------------------------------------------------ + +-- "Elaborated" -- worked out with great care and nicety of detail; +-- executed with great minuteness: elaborate preparations; +-- elaborate care. +-- +-- So here's the idea: +-- +-- Rather than a miscellaneous collection of 'ConfigFlags', 'InstallFlags' etc +-- all passed in as separate args and which are then further selected, +-- transformed etc during the execution of the build. Instead we construct +-- an elaborated install plan that includes everything we will need, and then +-- during the execution of the plan we do as little transformation of this +-- info as possible. +-- +-- So we're trying to split the work into two phases: construction of the +-- elaborated install plan (which as far as possible should be pure) and +-- then simple execution of that plan without any smarts, just doing what the +-- plan says to do. +-- +-- So that means we need a representation of this fully elaborated install +-- plan. The representation consists of two parts: +-- +-- * A 'ElaboratedInstallPlan'. This is a 'GenericInstallPlan' with a +-- representation of source packages that includes a lot more detail about +-- that package's individual configuration +-- +-- * A 'ElaboratedSharedConfig'. Some package configuration is the same for +-- every package in a plan. Rather than duplicate that info every entry in +-- the 'GenericInstallPlan' we keep that separately. +-- +-- The division between the shared and per-package config is /not set in stone +-- for all time/. For example if we wanted to generalise the install plan to +-- describe a situation where we want to build some packages with GHC and some +-- with GHCJS then the platform and compiler would no longer be shared between +-- all packages but would have to be per-package (probably with some sanity +-- condition on the graph structure). +-- + +-- Refer to ProjectPlanning.Types for details of these important types: + +-- type ElaboratedInstallPlan = ... +-- type ElaboratedPlanPackage = ... +-- data ElaboratedSharedConfig = ... +-- data ElaboratedConfiguredPackage = ... +-- data BuildStyle = + + +-- | Check that an 'ElaboratedConfiguredPackage' actually makes +-- sense under some 'ElaboratedSharedConfig'. +sanityCheckElaboratedConfiguredPackage + :: ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> a + -> a +sanityCheckElaboratedConfiguredPackage sharedConfig + elab@ElaboratedConfiguredPackage{..} = + (case elabPkgOrComp of + ElabPackage pkg -> sanityCheckElaboratedPackage elab pkg + ElabComponent comp -> sanityCheckElaboratedComponent elab comp) + + -- either a package is being built inplace, or the + -- 'installedPackageId' we assigned is consistent with + -- the 'hashedInstalledPackageId' we would compute from + -- the elaborated configured package + . assert (elabBuildStyle == BuildInplaceOnly || + elabComponentId == hashedInstalledPackageId + (packageHashInputs sharedConfig elab)) + + -- the stanzas explicitly disabled should not be available + . assert (Set.null (Map.keysSet (Map.filter not elabStanzasRequested) + `Set.intersection` elabStanzasAvailable)) + + -- either a package is built inplace, or we are not attempting to + -- build any test suites or benchmarks (we never build these + -- for remote packages!) + . assert (elabBuildStyle == BuildInplaceOnly || + Set.null elabStanzasAvailable) + +sanityCheckElaboratedComponent + :: ElaboratedConfiguredPackage + -> ElaboratedComponent + -> a + -> a +sanityCheckElaboratedComponent ElaboratedConfiguredPackage{..} + ElaboratedComponent{..} = + + -- Should not be building bench or test if not inplace. + assert (elabBuildStyle == BuildInplaceOnly || + case compComponentName of + Nothing -> True + Just CLibName -> True + Just (CSubLibName _) -> True + Just (CExeName _) -> True + -- This is interesting: there's no way to declare a dependency + -- on a foreign library at the moment, but you may still want + -- to install these to the store + Just (CFLibName _) -> True + Just (CBenchName _) -> False + Just (CTestName _) -> False) + + +sanityCheckElaboratedPackage + :: ElaboratedConfiguredPackage + -> ElaboratedPackage + -> a + -> a +sanityCheckElaboratedPackage ElaboratedConfiguredPackage{..} + ElaboratedPackage{..} = + -- we should only have enabled stanzas that actually can be built + -- (according to the solver) + assert (pkgStanzasEnabled `Set.isSubsetOf` elabStanzasAvailable) + + -- the stanzas that the user explicitly requested should be + -- enabled (by the previous test, they are also available) + . assert (Map.keysSet (Map.filter id elabStanzasRequested) + `Set.isSubsetOf` pkgStanzasEnabled) + +------------------------------------------------------------------------------ +-- * Deciding what to do: making an 'ElaboratedInstallPlan' +------------------------------------------------------------------------------ + +-- | Return the up-to-date project config and information about the local +-- packages within the project. +-- +rebuildProjectConfig :: Verbosity + -> DistDirLayout + -> ProjectConfig + -> IO ( ProjectConfig + , [PackageSpecifier UnresolvedSourcePackage] ) +rebuildProjectConfig verbosity + distDirLayout@DistDirLayout { + distProjectRootDirectory, + distDirectory, + distProjectCacheFile, + distProjectCacheDirectory, + distProjectFile + } + cliConfig = do + + fileMonitorProjectConfigKey <- do + configPath <- getConfigFilePath projectConfigConfigFile + return (configPath, distProjectFile "") + + (projectConfig, localPackages) <- + runRebuild distProjectRootDirectory + $ rerunIfChanged verbosity + fileMonitorProjectConfig + fileMonitorProjectConfigKey + $ do + liftIO $ info verbosity "Project settings changed, reconfiguring..." + projectConfig <- phaseReadProjectConfig + localPackages <- phaseReadLocalPackages projectConfig + return (projectConfig, localPackages) + + info verbosity + $ unlines + $ ("this build was affected by the following (project) config files:" :) + $ [ "- " ++ path + | Explicit path <- Set.toList $ projectConfigProvenance projectConfig + ] + + return (projectConfig <> cliConfig, localPackages) + + where + + ProjectConfigShared { projectConfigConfigFile } = + projectConfigShared cliConfig + + fileMonitorProjectConfig = + newFileMonitor (distProjectCacheFile "config") :: FileMonitor + (FilePath, FilePath) + (ProjectConfig, [PackageSpecifier UnresolvedSourcePackage]) + + -- Read the cabal.project (or implicit config) and combine it with + -- arguments from the command line + -- + phaseReadProjectConfig :: Rebuild ProjectConfig + phaseReadProjectConfig = do + readProjectConfig verbosity projectConfigConfigFile distDirLayout + + -- Look for all the cabal packages in the project + -- some of which may be local src dirs, tarballs etc + -- + phaseReadLocalPackages :: ProjectConfig + -> Rebuild [PackageSpecifier UnresolvedSourcePackage] + phaseReadLocalPackages projectConfig@ProjectConfig { + projectConfigShared, + projectConfigBuildOnly + } = do + pkgLocations <- findProjectPackages distDirLayout projectConfig + + -- Create folder only if findProjectPackages did not throw a + -- BadPackageLocations exception. + liftIO $ do + createDirectoryIfMissingVerbose verbosity True distDirectory + createDirectoryIfMissingVerbose verbosity True distProjectCacheDirectory + + fetchAndReadSourcePackages verbosity distDirLayout + projectConfigShared + projectConfigBuildOnly + pkgLocations + + +-- | Return an up-to-date elaborated install plan. +-- +-- Two variants of the install plan are returned: with and without packages +-- from the store. That is, the \"improved\" plan where source packages are +-- replaced by pre-existing installed packages from the store (when their ids +-- match), and also the original elaborated plan which uses primarily source +-- packages. + +-- The improved plan is what we use for building, but the original elaborated +-- plan is useful for reporting and configuration. For example the @freeze@ +-- command needs the source package info to know about flag choices and +-- dependencies of executables and setup scripts. +-- +rebuildInstallPlan :: Verbosity + -> DistDirLayout -> CabalDirLayout + -> ProjectConfig + -> [PackageSpecifier UnresolvedSourcePackage] + -> IO ( ElaboratedInstallPlan -- with store packages + , ElaboratedInstallPlan -- with source packages + , ElaboratedSharedConfig ) + -- ^ @(improvedPlan, elaboratedPlan, _, _)@ +rebuildInstallPlan verbosity + distDirLayout@DistDirLayout { + distProjectRootDirectory, + distProjectCacheFile + } + CabalDirLayout { + cabalStoreDirLayout + } = \projectConfig localPackages -> + runRebuild distProjectRootDirectory $ do + progsearchpath <- liftIO $ getSystemSearchPath + let projectConfigMonitored = projectConfig { projectConfigBuildOnly = mempty } + + -- The overall improved plan is cached + rerunIfChanged verbosity fileMonitorImprovedPlan + -- react to changes in the project config, + -- the package .cabal files and the path + (projectConfigMonitored, localPackages, progsearchpath) $ do + + -- And so is the elaborated plan that the improved plan based on + (elaboratedPlan, elaboratedShared) <- + rerunIfChanged verbosity fileMonitorElaboratedPlan + (projectConfigMonitored, localPackages, + progsearchpath) $ do + + compilerEtc <- phaseConfigureCompiler projectConfig + _ <- phaseConfigurePrograms projectConfig compilerEtc + (solverPlan, pkgConfigDB) + <- phaseRunSolver projectConfig + compilerEtc + localPackages + (elaboratedPlan, + elaboratedShared) <- phaseElaboratePlan projectConfig + compilerEtc pkgConfigDB + solverPlan + localPackages + + phaseMaintainPlanOutputs elaboratedPlan elaboratedShared + return (elaboratedPlan, elaboratedShared) + + -- The improved plan changes each time we install something, whereas + -- the underlying elaborated plan only changes when input config + -- changes, so it's worth caching them separately. + improvedPlan <- phaseImprovePlan elaboratedPlan elaboratedShared + + return (improvedPlan, elaboratedPlan, elaboratedShared) + + where + fileMonitorCompiler = newFileMonitorInCacheDir "compiler" + fileMonitorSolverPlan = newFileMonitorInCacheDir "solver-plan" + fileMonitorSourceHashes = newFileMonitorInCacheDir "source-hashes" + fileMonitorElaboratedPlan = newFileMonitorInCacheDir "elaborated-plan" + fileMonitorImprovedPlan = newFileMonitorInCacheDir "improved-plan" + + newFileMonitorInCacheDir :: Eq a => FilePath -> FileMonitor a b + newFileMonitorInCacheDir = newFileMonitor . distProjectCacheFile + + + -- Configure the compiler we're using. + -- + -- This is moderately expensive and doesn't change that often so we cache + -- it independently. + -- + phaseConfigureCompiler :: ProjectConfig + -> Rebuild (Compiler, Platform, ProgramDb) + phaseConfigureCompiler ProjectConfig { + projectConfigShared = ProjectConfigShared { + projectConfigHcFlavor, + projectConfigHcPath, + projectConfigHcPkg + }, + projectConfigLocalPackages = PackageConfig { + packageConfigProgramPaths, + packageConfigProgramArgs, + packageConfigProgramPathExtra + } + } = do + progsearchpath <- liftIO $ getSystemSearchPath + rerunIfChanged verbosity fileMonitorCompiler + (hcFlavor, hcPath, hcPkg, progsearchpath, + packageConfigProgramPaths, + packageConfigProgramArgs, + packageConfigProgramPathExtra) $ do + + liftIO $ info verbosity "Compiler settings changed, reconfiguring..." + result@(_, _, progdb') <- liftIO $ + Cabal.configCompilerEx + hcFlavor hcPath hcPkg + progdb verbosity + + -- Note that we added the user-supplied program locations and args + -- for /all/ programs, not just those for the compiler prog and + -- compiler-related utils. In principle we don't know which programs + -- the compiler will configure (and it does vary between compilers). + -- We do know however that the compiler will only configure the + -- programs it cares about, and those are the ones we monitor here. + monitorFiles (programsMonitorFiles progdb') + + return result + where + hcFlavor = flagToMaybe projectConfigHcFlavor + hcPath = flagToMaybe projectConfigHcPath + hcPkg = flagToMaybe projectConfigHcPkg + progdb = + userSpecifyPaths (Map.toList (getMapLast packageConfigProgramPaths)) + . userSpecifyArgss (Map.toList (getMapMappend packageConfigProgramArgs)) + . modifyProgramSearchPath + (++ [ ProgramSearchPathDir dir + | dir <- fromNubList packageConfigProgramPathExtra ]) + $ defaultProgramDb + + + -- Configuring other programs. + -- + -- Having configred the compiler, now we configure all the remaining + -- programs. This is to check we can find them, and to monitor them for + -- changes. + -- + -- TODO: [required eventually] we don't actually do this yet. + -- + -- We rely on the fact that the previous phase added the program config for + -- all local packages, but that all the programs configured so far are the + -- compiler program or related util programs. + -- + phaseConfigurePrograms :: ProjectConfig + -> (Compiler, Platform, ProgramDb) + -> Rebuild () + phaseConfigurePrograms projectConfig (_, _, compilerprogdb) = do + -- Users are allowed to specify program locations independently for + -- each package (e.g. to use a particular version of a pre-processor + -- for some packages). However they cannot do this for the compiler + -- itself as that's just not going to work. So we check for this. + liftIO $ checkBadPerPackageCompilerPaths + (configuredPrograms compilerprogdb) + (getMapMappend (projectConfigSpecificPackage projectConfig)) + + --TODO: [required eventually] find/configure other programs that the + -- user specifies. + + --TODO: [required eventually] find/configure all build-tools + -- but note that some of them may be built as part of the plan. + + + -- Run the solver to get the initial install plan. + -- This is expensive so we cache it independently. + -- + phaseRunSolver :: ProjectConfig + -> (Compiler, Platform, ProgramDb) + -> [PackageSpecifier UnresolvedSourcePackage] + -> Rebuild (SolverInstallPlan, PkgConfigDb) + phaseRunSolver projectConfig@ProjectConfig { + projectConfigShared, + projectConfigBuildOnly + } + (compiler, platform, progdb) + localPackages = + rerunIfChanged verbosity fileMonitorSolverPlan + (solverSettings, + localPackages, localPackagesEnabledStanzas, + compiler, platform, programDbSignature progdb) $ do + + installedPkgIndex <- getInstalledPackages verbosity + compiler progdb platform + corePackageDbs + sourcePkgDb <- getSourcePackages verbosity withRepoCtx + (solverSettingIndexState solverSettings) + pkgConfigDB <- getPkgConfigDb verbosity progdb + + --TODO: [code cleanup] it'd be better if the Compiler contained the + -- ConfiguredPrograms that it needs, rather than relying on the progdb + -- since we don't need to depend on all the programs here, just the + -- ones relevant for the compiler. + + liftIO $ do + solver <- chooseSolver verbosity + (solverSettingSolver solverSettings) + (compilerInfo compiler) + + notice verbosity "Resolving dependencies..." + plan <- foldProgress logMsg (die' verbosity) return $ + planPackages verbosity compiler platform solver solverSettings + installedPkgIndex sourcePkgDb pkgConfigDB + localPackages localPackagesEnabledStanzas + return (plan, pkgConfigDB) + where + corePackageDbs = [GlobalPackageDB] + withRepoCtx = projectConfigWithSolverRepoContext verbosity + projectConfigShared + projectConfigBuildOnly + solverSettings = resolveSolverSettings projectConfig + logMsg message rest = debugNoWrap verbosity message >> rest + + localPackagesEnabledStanzas = + Map.fromList + [ (pkgname, stanzas) + | pkg <- localPackages + , let pkgname = pkgSpecifierTarget pkg + testsEnabled = lookupLocalPackageConfig + packageConfigTests + projectConfig pkgname + benchmarksEnabled = lookupLocalPackageConfig + packageConfigBenchmarks + projectConfig pkgname + stanzas = + Map.fromList $ + [ (TestStanzas, enabled) + | enabled <- flagToList testsEnabled ] + ++ [ (BenchStanzas , enabled) + | enabled <- flagToList benchmarksEnabled ] + ] + + -- Elaborate the solver's install plan to get a fully detailed plan. This + -- version of the plan has the final nix-style hashed ids. + -- + phaseElaboratePlan :: ProjectConfig + -> (Compiler, Platform, ProgramDb) + -> PkgConfigDb + -> SolverInstallPlan + -> [PackageSpecifier (SourcePackage (PackageLocation loc))] + -> Rebuild ( ElaboratedInstallPlan + , ElaboratedSharedConfig ) + phaseElaboratePlan ProjectConfig { + projectConfigShared, + projectConfigAllPackages, + projectConfigLocalPackages, + projectConfigSpecificPackage, + projectConfigBuildOnly + } + (compiler, platform, progdb) pkgConfigDB + solverPlan localPackages = do + + liftIO $ debug verbosity "Elaborating the install plan..." + + sourcePackageHashes <- + rerunIfChanged verbosity fileMonitorSourceHashes + (packageLocationsSignature solverPlan) $ + getPackageSourceHashes verbosity withRepoCtx solverPlan + + defaultInstallDirs <- liftIO $ userInstallDirTemplates compiler + (elaboratedPlan, elaboratedShared) + <- liftIO . runLogProgress verbosity $ + elaborateInstallPlan + verbosity + platform compiler progdb pkgConfigDB + distDirLayout + cabalStoreDirLayout + solverPlan + localPackages + sourcePackageHashes + defaultInstallDirs + projectConfigShared + projectConfigAllPackages + projectConfigLocalPackages + (getMapMappend projectConfigSpecificPackage) + let instantiatedPlan = instantiateInstallPlan elaboratedPlan + liftIO $ debugNoWrap verbosity (InstallPlan.showInstallPlan instantiatedPlan) + return (instantiatedPlan, elaboratedShared) + where + withRepoCtx = projectConfigWithSolverRepoContext verbosity + projectConfigShared + projectConfigBuildOnly + + -- Update the files we maintain that reflect our current build environment. + -- In particular we maintain a JSON representation of the elaborated + -- install plan (but not the improved plan since that reflects the state + -- of the build rather than just the input environment). + -- + phaseMaintainPlanOutputs :: ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> Rebuild () + phaseMaintainPlanOutputs elaboratedPlan elaboratedShared = liftIO $ do + debug verbosity "Updating plan.json" + writePlanExternalRepresentation + distDirLayout + elaboratedPlan + elaboratedShared + + + -- Improve the elaborated install plan. The elaborated plan consists + -- mostly of source packages (with full nix-style hashed ids). Where + -- corresponding installed packages already exist in the store, replace + -- them in the plan. + -- + -- Note that we do monitor the store's package db here, so we will redo + -- this improvement phase when the db changes -- including as a result of + -- executing a plan and installing things. + -- + phaseImprovePlan :: ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> Rebuild ElaboratedInstallPlan + phaseImprovePlan elaboratedPlan elaboratedShared = do + + liftIO $ debug verbosity "Improving the install plan..." + storePkgIdSet <- getStoreEntries cabalStoreDirLayout compid + let improvedPlan = improveInstallPlanWithInstalledPackages + storePkgIdSet + elaboratedPlan + liftIO $ debugNoWrap verbosity (InstallPlan.showInstallPlan improvedPlan) + -- TODO: [nice to have] having checked which packages from the store + -- we're using, it may be sensible to sanity check those packages + -- by loading up the compiler package db and checking everything + -- matches up as expected, e.g. no dangling deps, files deleted. + return improvedPlan + where + compid = compilerId (pkgConfigCompiler elaboratedShared) + + +programsMonitorFiles :: ProgramDb -> [MonitorFilePath] +programsMonitorFiles progdb = + [ monitor + | prog <- configuredPrograms progdb + , monitor <- monitorFileSearchPath (programMonitorFiles prog) + (programPath prog) + ] + +-- | Select the bits of a 'ProgramDb' to monitor for value changes. +-- Use 'programsMonitorFiles' for the files to monitor. +-- +programDbSignature :: ProgramDb -> [ConfiguredProgram] +programDbSignature progdb = + [ prog { programMonitorFiles = [] + , programOverrideEnv = filter ((/="PATH") . fst) + (programOverrideEnv prog) } + | prog <- configuredPrograms progdb ] + +getInstalledPackages :: Verbosity + -> Compiler -> ProgramDb -> Platform + -> PackageDBStack + -> Rebuild InstalledPackageIndex +getInstalledPackages verbosity compiler progdb platform packagedbs = do + monitorFiles . map monitorFileOrDirectory + =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles + verbosity compiler + packagedbs progdb platform) + liftIO $ IndexUtils.getInstalledPackages + verbosity compiler + packagedbs progdb + +{- +--TODO: [nice to have] use this but for sanity / consistency checking +getPackageDBContents :: Verbosity + -> Compiler -> ProgramDb -> Platform + -> PackageDB + -> Rebuild InstalledPackageIndex +getPackageDBContents verbosity compiler progdb platform packagedb = do + monitorFiles . map monitorFileOrDirectory + =<< liftIO (IndexUtils.getInstalledPackagesMonitorFiles + verbosity compiler + [packagedb] progdb platform) + liftIO $ do + createPackageDBIfMissing verbosity compiler progdb packagedb + Cabal.getPackageDBContents verbosity compiler + packagedb progdb +-} + +getSourcePackages :: Verbosity -> (forall a. (RepoContext -> IO a) -> IO a) + -> Maybe IndexUtils.IndexState -> Rebuild SourcePackageDb +getSourcePackages verbosity withRepoCtx idxState = do + (sourcePkgDb, repos) <- + liftIO $ + withRepoCtx $ \repoctx -> do + sourcePkgDb <- IndexUtils.getSourcePackagesAtIndexState verbosity + repoctx idxState + return (sourcePkgDb, repoContextRepos repoctx) + + mapM_ needIfExists + . IndexUtils.getSourcePackagesMonitorFiles + $ repos + return sourcePkgDb + + +getPkgConfigDb :: Verbosity -> ProgramDb -> Rebuild PkgConfigDb +getPkgConfigDb verbosity progdb = do + dirs <- liftIO $ getPkgConfigDbDirs verbosity progdb + -- Just monitor the dirs so we'll notice new .pc files. + -- Alternatively we could monitor all the .pc files too. + mapM_ monitorDirectoryStatus dirs + liftIO $ readPkgConfigDb verbosity progdb + + +-- | Select the config values to monitor for changes package source hashes. +packageLocationsSignature :: SolverInstallPlan + -> [(PackageId, PackageLocation (Maybe FilePath))] +packageLocationsSignature solverPlan = + [ (packageId pkg, packageSource pkg) + | SolverInstallPlan.Configured (SolverPackage { solverPkgSource = pkg}) + <- SolverInstallPlan.toList solverPlan + ] + + +-- | Get the 'HashValue' for all the source packages where we use hashes, +-- and download any packages required to do so. +-- +-- Note that we don't get hashes for local unpacked packages. +-- +getPackageSourceHashes :: Verbosity + -> (forall a. (RepoContext -> IO a) -> IO a) + -> SolverInstallPlan + -> Rebuild (Map PackageId PackageSourceHash) +getPackageSourceHashes verbosity withRepoCtx solverPlan = do + + -- Determine if and where to get the package's source hash from. + -- + let allPkgLocations :: [(PackageId, PackageLocation (Maybe FilePath))] + allPkgLocations = + [ (packageId pkg, packageSource pkg) + | SolverInstallPlan.Configured (SolverPackage { solverPkgSource = pkg}) + <- SolverInstallPlan.toList solverPlan ] + + -- Tarballs that were local in the first place. + -- We'll hash these tarball files directly. + localTarballPkgs :: [(PackageId, FilePath)] + localTarballPkgs = + [ (pkgid, tarball) + | (pkgid, LocalTarballPackage tarball) <- allPkgLocations ] + + -- Tarballs from remote URLs. We must have downloaded these already + -- (since we extracted the .cabal file earlier) + --TODO: [required eventually] finish remote tarball functionality +-- allRemoteTarballPkgs = +-- [ (pkgid, ) +-- | (pkgid, RemoteTarballPackage ) <- allPkgLocations ] + + -- Tarballs from repositories, either where the repository provides + -- hashes as part of the repo metadata, or where we will have to + -- download and hash the tarball. + repoTarballPkgsWithMetadata :: [(PackageId, Repo)] + repoTarballPkgsWithoutMetadata :: [(PackageId, Repo)] + (repoTarballPkgsWithMetadata, + repoTarballPkgsWithoutMetadata) = + partitionEithers + [ case repo of + RepoSecure{} -> Left (pkgid, repo) + _ -> Right (pkgid, repo) + | (pkgid, RepoTarballPackage repo _ _) <- allPkgLocations ] + + -- For tarballs from repos that do not have hashes available we now have + -- to check if the packages were downloaded already. + -- + (repoTarballPkgsToDownload, + repoTarballPkgsDownloaded) + <- fmap partitionEithers $ + liftIO $ sequence + [ do mtarball <- checkRepoTarballFetched repo pkgid + case mtarball of + Nothing -> return (Left (pkgid, repo)) + Just tarball -> return (Right (pkgid, tarball)) + | (pkgid, repo) <- repoTarballPkgsWithoutMetadata ] + + (hashesFromRepoMetadata, + repoTarballPkgsNewlyDownloaded) <- + -- Avoid having to initialise the repository (ie 'withRepoCtx') if we + -- don't have to. (The main cost is configuring the http client.) + if null repoTarballPkgsToDownload && null repoTarballPkgsWithMetadata + then return (Map.empty, []) + else liftIO $ withRepoCtx $ \repoctx -> do + + -- For tarballs from repos that do have hashes available as part of the + -- repo metadata we now load up the index for each repo and retrieve + -- the hashes for the packages + -- + hashesFromRepoMetadata <- + Sec.uncheckClientErrors $ --TODO: [code cleanup] wrap in our own exceptions + fmap (Map.fromList . concat) $ + sequence + -- Reading the repo index is expensive so we group the packages by repo + [ repoContextWithSecureRepo repoctx repo $ \secureRepo -> + Sec.withIndex secureRepo $ \repoIndex -> + sequence + [ do hash <- Sec.trusted <$> -- strip off Trusted tag + Sec.indexLookupHash repoIndex pkgid + -- Note that hackage-security currently uses SHA256 + -- but this API could in principle give us some other + -- choice in future. + return (pkgid, hashFromTUF hash) + | pkgid <- pkgids ] + | (repo, pkgids) <- + map (\grp@((_,repo):_) -> (repo, map fst grp)) + . groupBy ((==) `on` (remoteRepoName . repoRemote . snd)) + . sortBy (compare `on` (remoteRepoName . repoRemote . snd)) + $ repoTarballPkgsWithMetadata + ] + + -- For tarballs from repos that do not have hashes available, download + -- the ones we previously determined we need. + -- + repoTarballPkgsNewlyDownloaded <- + sequence + [ do tarball <- fetchRepoTarball verbosity repoctx repo pkgid + return (pkgid, tarball) + | (pkgid, repo) <- repoTarballPkgsToDownload ] + + return (hashesFromRepoMetadata, + repoTarballPkgsNewlyDownloaded) + + -- Hash tarball files for packages where we have to do that. This includes + -- tarballs that were local in the first place, plus tarballs from repos, + -- either previously cached or freshly downloaded. + -- + let allTarballFilePkgs :: [(PackageId, FilePath)] + allTarballFilePkgs = localTarballPkgs + ++ repoTarballPkgsDownloaded + ++ repoTarballPkgsNewlyDownloaded + hashesFromTarballFiles <- liftIO $ + fmap Map.fromList $ + sequence + [ do srchash <- readFileHashValue tarball + return (pkgid, srchash) + | (pkgid, tarball) <- allTarballFilePkgs + ] + monitorFiles [ monitorFile tarball + | (_pkgid, tarball) <- allTarballFilePkgs ] + + -- Return the combination + return $! hashesFromRepoMetadata + <> hashesFromTarballFiles + + +-- ------------------------------------------------------------ +-- * Installation planning +-- ------------------------------------------------------------ + +planPackages :: Verbosity + -> Compiler + -> Platform + -> Solver -> SolverSettings + -> InstalledPackageIndex + -> SourcePackageDb + -> PkgConfigDb + -> [PackageSpecifier UnresolvedSourcePackage] + -> Map PackageName (Map OptionalStanza Bool) + -> Progress String String SolverInstallPlan +planPackages verbosity comp platform solver SolverSettings{..} + installedPkgIndex sourcePkgDb pkgConfigDB + localPackages pkgStanzasEnable = + + resolveDependencies + platform (compilerInfo comp) + pkgConfigDB solver + resolverParams + + where + + --TODO: [nice to have] disable multiple instances restriction in the solver, but then + -- make sure we can cope with that in the output. + resolverParams = + + setMaxBackjumps solverSettingMaxBackjumps + + . setIndependentGoals solverSettingIndependentGoals + + . setReorderGoals solverSettingReorderGoals + + . setCountConflicts solverSettingCountConflicts + + --TODO: [required eventually] should only be configurable for custom installs + -- . setAvoidReinstalls solverSettingAvoidReinstalls + + --TODO: [required eventually] should only be configurable for custom installs + -- . setShadowPkgs solverSettingShadowPkgs + + . setStrongFlags solverSettingStrongFlags + + . setAllowBootLibInstalls solverSettingAllowBootLibInstalls + + . setSolverVerbosity verbosity + + --TODO: [required eventually] decide if we need to prefer installed for + -- global packages, or prefer latest even for global packages. Perhaps + -- should be configurable but with a different name than "upgrade-dependencies". + . setPreferenceDefault PreferLatestForSelected + {-(if solverSettingUpgradeDeps + then PreferAllLatest + else PreferLatestForSelected)-} + + . removeLowerBounds solverSettingAllowOlder + . removeUpperBounds solverSettingAllowNewer + + . addDefaultSetupDependencies (defaultSetupDeps comp platform + . PD.packageDescription + . packageDescription) + + . addSetupCabalMinVersionConstraint setupMinCabalVersionConstraint + . addSetupCabalMaxVersionConstraint setupMaxCabalVersionConstraint + + . addPreferences + -- preferences from the config file or command line + [ PackageVersionPreference name ver + | Dependency name ver <- solverSettingPreferences ] + + . addConstraints + -- version constraints from the config file or command line + [ LabeledPackageConstraint (userToPackageConstraint pc) src + | (pc, src) <- solverSettingConstraints ] + + . addPreferences + -- enable stanza preference where the user did not specify + [ PackageStanzasPreference pkgname stanzas + | pkg <- localPackages + , let pkgname = pkgSpecifierTarget pkg + stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable + stanzas = [ stanza | stanza <- [minBound..maxBound] + , Map.lookup stanza stanzaM == Nothing ] + , not (null stanzas) + ] + + . addConstraints + -- enable stanza constraints where the user asked to enable + [ LabeledPackageConstraint + (PackageConstraint (scopeToplevel pkgname) + (PackagePropertyStanzas stanzas)) + ConstraintSourceConfigFlagOrTarget + | pkg <- localPackages + , let pkgname = pkgSpecifierTarget pkg + stanzaM = Map.findWithDefault Map.empty pkgname pkgStanzasEnable + stanzas = [ stanza | stanza <- [minBound..maxBound] + , Map.lookup stanza stanzaM == Just True ] + , not (null stanzas) + ] + + . addConstraints + --TODO: [nice to have] should have checked at some point that the + -- package in question actually has these flags. + [ LabeledPackageConstraint + (PackageConstraint (scopeToplevel pkgname) + (PackagePropertyFlags flags)) + ConstraintSourceConfigFlagOrTarget + | (pkgname, flags) <- Map.toList solverSettingFlagAssignments ] + + . addConstraints + --TODO: [nice to have] we have user-supplied flags for unspecified + -- local packages (as well as specific per-package flags). For the + -- former we just apply all these flags to all local targets which + -- is silly. We should check if the flags are appropriate. + [ LabeledPackageConstraint + (PackageConstraint (scopeToplevel pkgname) + (PackagePropertyFlags flags)) + ConstraintSourceConfigFlagOrTarget + | let flags = solverSettingFlagAssignment + , not (PD.nullFlagAssignment flags) + , pkg <- localPackages + , let pkgname = pkgSpecifierTarget pkg ] + + $ stdResolverParams + + stdResolverParams = + -- Note: we don't use the standardInstallPolicy here, since that uses + -- its own addDefaultSetupDependencies that is not appropriate for us. + basicInstallPolicy + installedPkgIndex sourcePkgDb + localPackages + + -- While we can talk to older Cabal versions (we need to be able to + -- do so for custom Setup scripts that require older Cabal lib + -- versions), we have problems talking to some older versions that + -- don't support certain features. + -- + -- For example, Cabal-1.16 and older do not know about build targets. + -- Even worse, 1.18 and older only supported the --constraint flag + -- with source package ids, not --dependency with installed package + -- ids. That is bad because we cannot reliably select the right + -- dependencies in the presence of multiple instances (i.e. the + -- store). See issue #3932. So we require Cabal 1.20 as a minimum. + -- + -- Moreover, lib:Cabal generally only supports the interface of + -- current and past compilers; in fact recent lib:Cabal versions + -- will warn when they encounter a too new or unknown GHC compiler + -- version (c.f. #415). To avoid running into unsupported + -- configurations we encode the compatiblity matrix as lower + -- bounds on lib:Cabal here (effectively corresponding to the + -- respective major Cabal version bundled with the respective GHC + -- release). + -- + -- GHC 8.4 needs Cabal >= 2.4 + -- GHC 8.4 needs Cabal >= 2.2 + -- GHC 8.2 needs Cabal >= 2.0 + -- GHC 8.0 needs Cabal >= 1.24 + -- GHC 7.10 needs Cabal >= 1.22 + -- + -- (NB: we don't need to consider older GHCs as Cabal >= 1.20 is + -- the absolute lower bound) + -- + -- TODO: long-term, this compatibility matrix should be + -- stored as a field inside 'Distribution.Compiler.Compiler' + setupMinCabalVersionConstraint + | isGHC, compVer >= mkVersion [8,6,1] = mkVersion [2,4] + -- GHC 8.6alpha2 (GHC 8.6.0.20180714) still shipped with a + -- devel snapshot of Cabal-2.3.0.0; the rule below can be + -- dropped at some point + | isGHC, compVer >= mkVersion [8,6] = mkVersion [2,3] + | isGHC, compVer >= mkVersion [8,4] = mkVersion [2,2] + | isGHC, compVer >= mkVersion [8,2] = mkVersion [2,0] + | isGHC, compVer >= mkVersion [8,0] = mkVersion [1,24] + | isGHC, compVer >= mkVersion [7,10] = mkVersion [1,22] + | otherwise = mkVersion [1,20] + where + isGHC = compFlav `elem` [GHC,GHCJS] + compFlav = compilerFlavor comp + compVer = compilerVersion comp + + -- As we can't predict the future, we also place a global upper + -- bound on the lib:Cabal version we know how to interact with: + -- + -- The upper bound is computed by incrementing the current major + -- version twice in order to allow for the current version, as + -- well as the next adjacent major version (one of which will not + -- be released, as only "even major" versions of Cabal are + -- released to Hackage or bundled with proper GHC releases). + -- + -- For instance, if the current version of cabal-install is an odd + -- development version, e.g. Cabal-2.1.0.0, then we impose an + -- upper bound `setup.Cabal < 2.3`; if `cabal-install` is on a + -- stable/release even version, e.g. Cabal-2.2.1.0, the upper + -- bound is `setup.Cabal < 2.4`. This gives us enough flexibility + -- when dealing with development snapshots of Cabal and cabal-install. + -- + setupMaxCabalVersionConstraint = + alterVersion (take 2) $ incVersion 1 $ incVersion 1 cabalVersion + +------------------------------------------------------------------------------ +-- * Install plan post-processing +------------------------------------------------------------------------------ + +-- This phase goes from the InstallPlan we get from the solver and has to +-- make an elaborated install plan. +-- +-- We go in two steps: +-- +-- 1. elaborate all the source packages that the solver has chosen. +-- 2. swap source packages for pre-existing installed packages wherever +-- possible. +-- +-- We do it in this order, elaborating and then replacing, because the easiest +-- way to calculate the installed package ids used for the replacement step is +-- from the elaborated configuration for each package. + + + + +------------------------------------------------------------------------------ +-- * Install plan elaboration +------------------------------------------------------------------------------ + +-- Note [SolverId to ConfiguredId] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Dependency solving is a per package affair, so after we're done, we +-- end up with 'SolverInstallPlan' that records in 'solverPkgLibDeps' +-- and 'solverPkgExeDeps' what packages provide the libraries and executables +-- needed by each component of the package (phew!) For example, if I have +-- +-- library +-- build-depends: lib +-- build-tool-depends: pkg:exe1 +-- build-tools: alex +-- +-- After dependency solving, I find out that this library component has +-- library dependencies on lib-0.2, and executable dependencies on pkg-0.1 +-- and alex-0.3 (other components of the package may have different +-- dependencies). Note that I've "lost" the knowledge that I depend +-- *specifically* on the exe1 executable from pkg. +-- +-- So, we have a this graph of packages, and we need to transform it into +-- a graph of components which we are actually going to build. In particular: +-- +-- NODE changes from PACKAGE (SolverPackage) to COMPONENTS (ElaboratedConfiguredPackage) +-- EDGE changes from PACKAGE DEP (SolverId) to COMPONENT DEPS (ConfiguredId) +-- +-- In both cases, what was previously a single node/edge may turn into multiple +-- nodes/edges. Multiple components, because there may be multiple components +-- in a package; multiple component deps, because we may depend upon multiple +-- executables from the same package (and maybe, some day, multiple libraries +-- from the same package.) +-- +-- Let's talk about how to do this transformation. Naively, we might consider +-- just processing each package, converting it into (zero or) one or more +-- components. But we also have to update the edges; this leads to +-- two complications: +-- +-- 1. We don't know what the ConfiguredId of a component is until +-- we've configured it, but we cannot configure a component unless +-- we know the ConfiguredId of all its dependencies. Thus, we must +-- process the 'SolverInstallPlan' in topological order. +-- +-- 2. When we process a package, we know the SolverIds of its +-- dependencies, but we have to do some work to turn these into +-- ConfiguredIds. For example, in the case of build-tool-depends, the +-- SolverId isn't enough to uniquely determine the ConfiguredId we should +-- elaborate to: we have to look at the executable name attached to +-- the package name in the package description to figure it out. +-- At the same time, we NEED to use the SolverId, because there might +-- be multiple versions of the same package in the build plan +-- (due to setup dependencies); we can't just look up the package name +-- from the package description. +-- +-- We can adopt the following strategy: +-- +-- * When a package is transformed into components, record +-- a mapping from SolverId to ALL of the components +-- which were elaborated. +-- +-- * When we look up an edge, we use our knowledge of the +-- component name to *filter* the list of components into +-- the ones we actually wanted to refer to. +-- +-- By the way, we can tell that SolverInstallPlan is not the "right" type +-- because a SolverId cannot adequately represent all possible dependency +-- solver states: we may need to record foo-0.1 multiple times in +-- the solver install plan with different dependencies. This imprecision in the +-- type currently doesn't cause any problems because the dependency solver +-- continues to enforce the single instance restriction regardless of compiler +-- version. The right way to solve this is to come up with something very much +-- like a 'ConfiguredId', in that it incorporates the version choices of its +-- dependencies, but less fine grained. + + +-- | Produce an elaborated install plan using the policy for local builds with +-- a nix-style shared store. +-- +-- In theory should be able to make an elaborated install plan with a policy +-- matching that of the classic @cabal install --user@ or @--global@ +-- +elaborateInstallPlan + :: Verbosity -> Platform -> Compiler -> ProgramDb -> PkgConfigDb + -> DistDirLayout + -> StoreDirLayout + -> SolverInstallPlan + -> [PackageSpecifier (SourcePackage (PackageLocation loc))] + -> Map PackageId PackageSourceHash + -> InstallDirs.InstallDirTemplates + -> ProjectConfigShared + -> PackageConfig + -> PackageConfig + -> Map PackageName PackageConfig + -> LogProgress (ElaboratedInstallPlan, ElaboratedSharedConfig) +elaborateInstallPlan verbosity platform compiler compilerprogdb pkgConfigDB + distDirLayout@DistDirLayout{..} + storeDirLayout@StoreDirLayout{storePackageDBStack} + solverPlan localPackages + sourcePackageHashes + defaultInstallDirs + sharedPackageConfig + allPackagesConfig + localPackagesConfig + perPackageConfig = do + x <- elaboratedInstallPlan + return (x, elaboratedSharedConfig) + where + elaboratedSharedConfig = + ElaboratedSharedConfig { + pkgConfigPlatform = platform, + pkgConfigCompiler = compiler, + pkgConfigCompilerProgs = compilerprogdb, + pkgConfigReplOptions = [] + } + + preexistingInstantiatedPkgs = + Map.fromList (mapMaybe f (SolverInstallPlan.toList solverPlan)) + where + f (SolverInstallPlan.PreExisting inst) + | let ipkg = instSolverPkgIPI inst + , not (IPI.indefinite ipkg) + = Just (IPI.installedUnitId ipkg, + (FullUnitId (IPI.installedComponentId ipkg) + (Map.fromList (IPI.instantiatedWith ipkg)))) + f _ = Nothing + + elaboratedInstallPlan = + flip InstallPlan.fromSolverInstallPlanWithProgress solverPlan $ \mapDep planpkg -> + case planpkg of + SolverInstallPlan.PreExisting pkg -> + return [InstallPlan.PreExisting (instSolverPkgIPI pkg)] + + SolverInstallPlan.Configured pkg -> + let inplace_doc | shouldBuildInplaceOnly pkg = text "inplace" + | otherwise = Disp.empty + in addProgressCtx (text "In the" <+> inplace_doc <+> text "package" <+> + quotes (disp (packageId pkg))) $ + map InstallPlan.Configured <$> elaborateSolverToComponents mapDep pkg + + -- NB: We don't INSTANTIATE packages at this point. That's + -- a post-pass. This makes it simpler to compute dependencies. + elaborateSolverToComponents + :: (SolverId -> [ElaboratedPlanPackage]) + -> SolverPackage UnresolvedPkgLoc + -> LogProgress [ElaboratedConfiguredPackage] + elaborateSolverToComponents mapDep spkg@(SolverPackage _ _ _ deps0 exe_deps0) + = case mkComponentsGraph (elabEnabledSpec elab0) pd of + Right g -> do + let src_comps = componentsGraphToList g + infoProgress $ hang (text "Component graph for" <+> disp pkgid <<>> colon) + 4 (dispComponentsWithDeps src_comps) + (_, comps) <- mapAccumM buildComponent + (Map.empty, Map.empty, Map.empty) + (map fst src_comps) + let not_per_component_reasons = why_not_per_component src_comps + if null not_per_component_reasons + then return comps + else do checkPerPackageOk comps not_per_component_reasons + return [elaborateSolverToPackage spkg g $ + comps ++ maybeToList setupComponent] + Left cns -> + dieProgress $ + hang (text "Dependency cycle between the following components:") 4 + (vcat (map (text . componentNameStanza) cns)) + where + -- You are eligible to per-component build if this list is empty + why_not_per_component g + = cuz_buildtype ++ cuz_spec ++ cuz_length ++ cuz_flag ++ cuz_coverage + where + cuz reason = [text reason] + -- We have to disable per-component for now with + -- Configure-type scripts in order to prevent parallel + -- invocation of the same `./configure` script. + -- See https://github.com/haskell/cabal/issues/4548 + -- + -- Moreoever, at this point in time, only non-Custom setup scripts + -- are supported. Implementing per-component builds with + -- Custom would require us to create a new 'ElabSetup' + -- type, and teach all of the code paths how to handle it. + -- Once you've implemented this, swap it for the code below. + cuz_buildtype = + case PD.buildType (elabPkgDescription elab0) of + PD.Configure -> cuz "build-type is Configure" + PD.Custom -> cuz "build-type is Custom" + _ -> [] + -- cabal-format versions prior to 1.8 have different build-depends semantics + -- for now it's easier to just fallback to legacy-mode when specVersion < 1.8 + -- see, https://github.com/haskell/cabal/issues/4121 + cuz_spec + | PD.specVersion pd >= mkVersion [1,8] = [] + | otherwise = cuz "cabal-version is less than 1.8" + -- In the odd corner case that a package has no components at all + -- then keep it as a whole package, since otherwise it turns into + -- 0 component graph nodes and effectively vanishes. We want to + -- keep it around at least for error reporting purposes. + cuz_length + | length g > 0 = [] + | otherwise = cuz "there are no buildable components" + -- For ease of testing, we let per-component builds be toggled + -- at the top level + cuz_flag + | fromFlagOrDefault True (projectConfigPerComponent sharedPackageConfig) + = [] + | otherwise = cuz "you passed --disable-per-component" + -- Enabling program coverage introduces odd runtime dependencies + -- between components. + cuz_coverage + | fromFlagOrDefault False (packageConfigCoverage localPackagesConfig) + = cuz "program coverage is enabled" + | otherwise = [] + + -- | Sometimes a package may make use of features which are only + -- supported in per-package mode. If this is the case, we should + -- give an error when this occurs. + checkPerPackageOk comps reasons = do + let is_sublib (CSubLibName _) = True + is_sublib _ = False + when (any (matchElabPkg is_sublib) comps) $ + dieProgress $ + text "Internal libraries only supported with per-component builds." $$ + text "Per-component builds were disabled because" <+> + fsep (punctuate comma reasons) + -- TODO: Maybe exclude Backpack too + + elab0 = elaborateSolverToCommon spkg + pkgid = elabPkgSourceId elab0 + pd = elabPkgDescription elab0 + + -- TODO: This is just a skeleton to get elaborateSolverToPackage + -- working correctly + -- TODO: When we actually support building these components, we + -- have to add dependencies on this from all other components + setupComponent :: Maybe ElaboratedConfiguredPackage + setupComponent + | PD.buildType (elabPkgDescription elab0) == PD.Custom + = Just elab0 { + elabModuleShape = emptyModuleShape, + elabUnitId = notImpl "elabUnitId", + elabComponentId = notImpl "elabComponentId", + elabLinkedInstantiatedWith = Map.empty, + elabInstallDirs = notImpl "elabInstallDirs", + elabPkgOrComp = ElabComponent (ElaboratedComponent {..}) + } + | otherwise + = Nothing + where + compSolverName = CD.ComponentSetup + compComponentName = Nothing + dep_pkgs = elaborateLibSolverId mapDep =<< CD.setupDeps deps0 + compLibDependencies + = map configuredId dep_pkgs + compLinkedLibDependencies = notImpl "compLinkedLibDependencies" + compOrderLibDependencies = notImpl "compOrderLibDependencies" + -- Not supported: + compExeDependencies = [] + compExeDependencyPaths = [] + compPkgConfigDependencies = [] + + notImpl f = + error $ "Distribution.Client.ProjectPlanning.setupComponent: " ++ + f ++ " not implemented yet" + + + buildComponent + :: (ConfiguredComponentMap, + LinkedComponentMap, + Map ComponentId FilePath) + -> Cabal.Component + -> LogProgress + ((ConfiguredComponentMap, + LinkedComponentMap, + Map ComponentId FilePath), + ElaboratedConfiguredPackage) + buildComponent (cc_map, lc_map, exe_map) comp = + addProgressCtx (text "In the stanza" <+> + quotes (text (componentNameStanza cname))) $ do + + -- 1. Configure the component, but with a place holder ComponentId. + cc0 <- toConfiguredComponent + pd + (error "Distribution.Client.ProjectPlanning.cc_cid: filled in later") + (Map.unionWith Map.union external_lib_cc_map cc_map) + (Map.unionWith Map.union external_exe_cc_map cc_map) + comp + + + -- 2. Read out the dependencies from the ConfiguredComponent cc0 + let compLibDependencies = + -- Nub because includes can show up multiple times + ordNub (map (annotatedIdToConfiguredId . ci_ann_id) + (cc_includes cc0)) + compExeDependencies = + map annotatedIdToConfiguredId + (cc_exe_deps cc0) + compExeDependencyPaths = + [ (annotatedIdToConfiguredId aid', path) + | aid' <- cc_exe_deps cc0 + , Just paths <- [Map.lookup (ann_id aid') exe_map1] + , path <- paths ] + elab_comp = ElaboratedComponent {..} + + -- 3. Construct a preliminary ElaboratedConfiguredPackage, + -- and use this to compute the component ID. Fix up cc_id + -- correctly. + let elab1 = elab0 { + elabPkgOrComp = ElabComponent $ elab_comp + } + cid = case elabBuildStyle elab0 of + BuildInplaceOnly -> + mkComponentId $ + display pkgid ++ "-inplace" ++ + (case Cabal.componentNameString cname of + Nothing -> "" + Just s -> "-" ++ display s) + BuildAndInstall -> + hashedInstalledPackageId + (packageHashInputs + elaboratedSharedConfig + elab1) -- knot tied + cc = cc0 { cc_ann_id = fmap (const cid) (cc_ann_id cc0) } + infoProgress $ dispConfiguredComponent cc + + -- 4. Perform mix-in linking + let lookup_uid def_uid = + case Map.lookup (unDefUnitId def_uid) preexistingInstantiatedPkgs of + Just full -> full + Nothing -> error ("lookup_uid: " ++ display def_uid) + lc <- toLinkedComponent verbosity lookup_uid (elabPkgSourceId elab0) + (Map.union external_lc_map lc_map) cc + infoProgress $ dispLinkedComponent lc + -- NB: elab is setup to be the correct form for an + -- indefinite library, or a definite library with no holes. + -- We will modify it in 'instantiateInstallPlan' to handle + -- instantiated packages. + + -- 5. Construct the final ElaboratedConfiguredPackage + let + elab = elab1 { + elabModuleShape = lc_shape lc, + elabUnitId = abstractUnitId (lc_uid lc), + elabComponentId = lc_cid lc, + elabLinkedInstantiatedWith = Map.fromList (lc_insts lc), + elabPkgOrComp = ElabComponent $ elab_comp { + compLinkedLibDependencies = ordNub (map ci_id (lc_includes lc)), + compOrderLibDependencies = + ordNub (map (abstractUnitId . ci_id) + (lc_includes lc ++ lc_sig_includes lc)) + }, + elabInstallDirs = install_dirs cid + } + + -- 6. Construct the updated local maps + let cc_map' = extendConfiguredComponentMap cc cc_map + lc_map' = extendLinkedComponentMap lc lc_map + exe_map' = Map.insert cid (inplace_bin_dir elab) exe_map + + return ((cc_map', lc_map', exe_map'), elab) + where + compLinkedLibDependencies = error "buildComponent: compLinkedLibDependencies" + compOrderLibDependencies = error "buildComponent: compOrderLibDependencies" + + cname = Cabal.componentName comp + compComponentName = Just cname + compSolverName = CD.componentNameToComponent cname + + -- NB: compLinkedLibDependencies and + -- compOrderLibDependencies are defined when we define + -- 'elab'. + external_lib_dep_sids = CD.select (== compSolverName) deps0 + external_exe_dep_sids = CD.select (== compSolverName) exe_deps0 + + external_lib_dep_pkgs = concatMap mapDep external_lib_dep_sids + + -- Combine library and build-tool dependencies, for backwards + -- compatibility (See issue #5412 and the documentation for + -- InstallPlan.fromSolverInstallPlan), but prefer the versions + -- specified as build-tools. + external_exe_dep_pkgs = + concatMap mapDep $ + ordNubBy (pkgName . packageId) $ + external_exe_dep_sids ++ external_lib_dep_sids + + external_exe_map = Map.fromList $ + [ (getComponentId pkg, paths) + | pkg <- external_exe_dep_pkgs + , let paths = planPackageExePaths pkg ] + exe_map1 = Map.union external_exe_map $ fmap (\x -> [x]) exe_map + + external_lib_cc_map = Map.fromListWith Map.union + $ map mkCCMapping external_lib_dep_pkgs + external_exe_cc_map = Map.fromListWith Map.union + $ map mkCCMapping external_exe_dep_pkgs + external_lc_map = + Map.fromList $ map mkShapeMapping $ + external_lib_dep_pkgs ++ concatMap mapDep external_exe_dep_sids + + compPkgConfigDependencies = + [ (pn, fromMaybe (error $ "compPkgConfigDependencies: impossible! " + ++ display pn ++ " from " + ++ display (elabPkgSourceId elab0)) + (pkgConfigDbPkgVersion pkgConfigDB pn)) + | PkgconfigDependency pn _ <- PD.pkgconfigDepends + (Cabal.componentBuildInfo comp) ] + + install_dirs cid + | shouldBuildInplaceOnly spkg + -- use the ordinary default install dirs + = (InstallDirs.absoluteInstallDirs + pkgid + (newSimpleUnitId cid) + (compilerInfo compiler) + InstallDirs.NoCopyDest + platform + defaultInstallDirs) { + + -- absoluteInstallDirs sets these as 'undefined' but we have + -- to use them as "Setup.hs configure" args + InstallDirs.libsubdir = "", + InstallDirs.libexecsubdir = "", + InstallDirs.datasubdir = "" + } + + | otherwise + -- use special simplified install dirs + = storePackageInstallDirs + storeDirLayout + (compilerId compiler) + cid + + inplace_bin_dir elab = + binDirectoryFor + distDirLayout + elaboratedSharedConfig + elab $ + case Cabal.componentNameString cname of + Just n -> display n + Nothing -> "" + + + -- | Given a 'SolverId' referencing a dependency on a library, return + -- the 'ElaboratedPlanPackage' corresponding to the library. This + -- returns at most one result. + elaborateLibSolverId :: (SolverId -> [ElaboratedPlanPackage]) + -> SolverId -> [ElaboratedPlanPackage] + elaborateLibSolverId mapDep = filter (matchPlanPkg (== CLibName)) . mapDep + + -- | Given an 'ElaboratedPlanPackage', return the paths to where the + -- executables that this package represents would be installed. + -- The only case where multiple paths can be returned is the inplace + -- monolithic package one, since there can be multiple exes and each one + -- has its own directory. + planPackageExePaths :: ElaboratedPlanPackage -> [FilePath] + planPackageExePaths = + -- Pre-existing executables are assumed to be in PATH + -- already. In fact, this should be impossible. + InstallPlan.foldPlanPackage (const []) $ \elab -> + let + executables :: [FilePath] + executables = + case elabPkgOrComp elab of + -- Monolithic mode: all exes of the package + ElabPackage _ -> unUnqualComponentName . PD.exeName + <$> PD.executables (elabPkgDescription elab) + -- Per-component mode: just the selected exe + ElabComponent comp -> + case fmap Cabal.componentNameString + (compComponentName comp) of + Just (Just n) -> [display n] + _ -> [""] + in + binDirectoryFor + distDirLayout + elaboratedSharedConfig + elab + <$> executables + + elaborateSolverToPackage :: SolverPackage UnresolvedPkgLoc + -> ComponentsGraph + -> [ElaboratedConfiguredPackage] + -> ElaboratedConfiguredPackage + elaborateSolverToPackage + pkg@(SolverPackage (SourcePackage pkgid _gdesc _srcloc _descOverride) + _flags _stanzas _deps0 _exe_deps0) + compGraph comps = + -- Knot tying: the final elab includes the + -- pkgInstalledId, which is calculated by hashing many + -- of the other fields of the elaboratedPackage. + elab + where + elab0@ElaboratedConfiguredPackage{..} = elaborateSolverToCommon pkg + elab = elab0 { + elabUnitId = newSimpleUnitId pkgInstalledId, + elabComponentId = pkgInstalledId, + elabLinkedInstantiatedWith = Map.empty, + elabInstallDirs = install_dirs, + elabPkgOrComp = ElabPackage $ ElaboratedPackage {..}, + elabModuleShape = modShape + } + + modShape = case find (matchElabPkg (== CLibName)) comps of + Nothing -> emptyModuleShape + Just e -> Ty.elabModuleShape e + + pkgInstalledId + | shouldBuildInplaceOnly pkg + = mkComponentId (display pkgid ++ "-inplace") + + | otherwise + = assert (isJust elabPkgSourceHash) $ + hashedInstalledPackageId + (packageHashInputs + elaboratedSharedConfig + elab) -- recursive use of elab + + | otherwise + = error $ "elaborateInstallPlan: non-inplace package " + ++ " is missing a source hash: " ++ display pkgid + + -- Need to filter out internal dependencies, because they don't + -- correspond to anything real anymore. + isExt confid = confSrcId confid /= pkgid + filterExt = filter isExt + filterExt' = filter (isExt . fst) + + pkgLibDependencies + = buildComponentDeps (filterExt . compLibDependencies) + pkgExeDependencies + = buildComponentDeps (filterExt . compExeDependencies) + pkgExeDependencyPaths + = buildComponentDeps (filterExt' . compExeDependencyPaths) + -- TODO: Why is this flat? + pkgPkgConfigDependencies + = CD.flatDeps $ buildComponentDeps compPkgConfigDependencies + + pkgDependsOnSelfLib + = CD.fromList [ (CD.componentNameToComponent cn, [()]) + | Graph.N _ cn _ <- fromMaybe [] mb_closure ] + where + mb_closure = Graph.revClosure compGraph [ k | k <- Graph.keys compGraph, is_lib k ] + is_lib CLibName = True + -- NB: this case should not occur, because sub-libraries + -- are not supported without per-component builds + is_lib (CSubLibName _) = True + is_lib _ = False + + buildComponentDeps f + = CD.fromList [ (compSolverName comp, f comp) + | ElaboratedConfiguredPackage{ + elabPkgOrComp = ElabComponent comp + } <- comps + ] + + -- NB: This is not the final setting of 'pkgStanzasEnabled'. + -- See [Sticky enabled testsuites]; we may enable some extra + -- stanzas opportunistically when it is cheap to do so. + -- + -- However, we start off by enabling everything that was + -- requested, so that we can maintain an invariant that + -- pkgStanzasEnabled is a superset of elabStanzasRequested + pkgStanzasEnabled = Map.keysSet (Map.filter (id :: Bool -> Bool) elabStanzasRequested) + + install_dirs + | shouldBuildInplaceOnly pkg + -- use the ordinary default install dirs + = (InstallDirs.absoluteInstallDirs + pkgid + (newSimpleUnitId pkgInstalledId) + (compilerInfo compiler) + InstallDirs.NoCopyDest + platform + defaultInstallDirs) { + + -- absoluteInstallDirs sets these as 'undefined' but we have to + -- use them as "Setup.hs configure" args + InstallDirs.libsubdir = "", + InstallDirs.libexecsubdir = "", + InstallDirs.datasubdir = "" + } + + | otherwise + -- use special simplified install dirs + = storePackageInstallDirs + storeDirLayout + (compilerId compiler) + pkgInstalledId + + elaborateSolverToCommon :: SolverPackage UnresolvedPkgLoc + -> ElaboratedConfiguredPackage + elaborateSolverToCommon + pkg@(SolverPackage (SourcePackage pkgid gdesc srcloc descOverride) + flags stanzas deps0 _exe_deps0) = + elaboratedPackage + where + elaboratedPackage = ElaboratedConfiguredPackage {..} + + -- These get filled in later + elabUnitId = error "elaborateSolverToCommon: elabUnitId" + elabComponentId = error "elaborateSolverToCommon: elabComponentId" + elabInstantiatedWith = Map.empty + elabLinkedInstantiatedWith = error "elaborateSolverToCommon: elabLinkedInstantiatedWith" + elabPkgOrComp = error "elaborateSolverToCommon: elabPkgOrComp" + elabInstallDirs = error "elaborateSolverToCommon: elabInstallDirs" + elabModuleShape = error "elaborateSolverToCommon: elabModuleShape" + + elabIsCanonical = True + elabPkgSourceId = pkgid + elabPkgDescription = let Right (desc, _) = + PD.finalizePD + flags elabEnabledSpec (const True) + platform (compilerInfo compiler) + [] gdesc + in desc + elabFlagAssignment = flags + elabFlagDefaults = PD.mkFlagAssignment + [ (Cabal.flagName flag, Cabal.flagDefault flag) + | flag <- PD.genPackageFlags gdesc ] + + elabEnabledSpec = enableStanzas stanzas + elabStanzasAvailable = Set.fromList stanzas + elabStanzasRequested = + -- NB: even if a package stanza is requested, if the package + -- doesn't actually have any of that stanza we omit it from + -- the request, to ensure that we don't decide that this + -- package needs to be rebuilt. (It needs to be done here, + -- because the ElaboratedConfiguredPackage is where we test + -- whether or not there have been changes.) + Map.fromList $ [ (TestStanzas, v) | v <- maybeToList tests + , _ <- PD.testSuites elabPkgDescription ] + ++ [ (BenchStanzas, v) | v <- maybeToList benchmarks + , _ <- PD.benchmarks elabPkgDescription ] + where + tests, benchmarks :: Maybe Bool + tests = perPkgOptionMaybe pkgid packageConfigTests + benchmarks = perPkgOptionMaybe pkgid packageConfigBenchmarks + + -- This is a placeholder which will get updated by 'pruneInstallPlanPass1' + -- and 'pruneInstallPlanPass2'. We can't populate it here + -- because whether or not tests/benchmarks should be enabled + -- is heuristically calculated based on whether or not the + -- dependencies of the test suite have already been installed, + -- but this function doesn't know what is installed (since + -- we haven't improved the plan yet), so we do it in another pass. + -- Check the comments of those functions for more details. + elabConfigureTargets = [] + elabBuildTargets = [] + elabTestTargets = [] + elabBenchTargets = [] + elabReplTarget = Nothing + elabHaddockTargets = [] + + elabBuildHaddocks = + perPkgOptionFlag pkgid False packageConfigDocumentation + + elabPkgSourceLocation = srcloc + elabPkgSourceHash = Map.lookup pkgid sourcePackageHashes + elabLocalToProject = isLocalToProject pkg + elabBuildStyle = if shouldBuildInplaceOnly pkg + then BuildInplaceOnly else BuildAndInstall + elabBuildPackageDBStack = buildAndRegisterDbs + elabRegisterPackageDBStack = buildAndRegisterDbs + + elabSetupScriptStyle = packageSetupScriptStyle elabPkgDescription + elabSetupScriptCliVersion = + packageSetupScriptSpecVersion + elabSetupScriptStyle elabPkgDescription libDepGraph deps0 + elabSetupPackageDBStack = buildAndRegisterDbs + + buildAndRegisterDbs + | shouldBuildInplaceOnly pkg = inplacePackageDbs + | otherwise = storePackageDbs + + elabPkgDescriptionOverride = descOverride + + elabVanillaLib = perPkgOptionFlag pkgid True packageConfigVanillaLib --TODO: [required feature]: also needs to be handled recursively + elabSharedLib = pkgid `Set.member` pkgsUseSharedLibrary + elabStaticLib = perPkgOptionFlag pkgid False packageConfigStaticLib + elabDynExe = perPkgOptionFlag pkgid False packageConfigDynExe + elabGHCiLib = perPkgOptionFlag pkgid False packageConfigGHCiLib --TODO: [required feature] needs to default to enabled on windows still + + elabProfExe = perPkgOptionFlag pkgid False packageConfigProf + elabProfLib = pkgid `Set.member` pkgsUseProfilingLibrary + + (elabProfExeDetail, + elabProfLibDetail) = perPkgOptionLibExeFlag pkgid ProfDetailDefault + packageConfigProfDetail + packageConfigProfLibDetail + elabCoverage = perPkgOptionFlag pkgid False packageConfigCoverage + + elabOptimization = perPkgOptionFlag pkgid NormalOptimisation packageConfigOptimization + elabSplitObjs = perPkgOptionFlag pkgid False packageConfigSplitObjs + elabSplitSections = perPkgOptionFlag pkgid False packageConfigSplitSections + elabStripLibs = perPkgOptionFlag pkgid False packageConfigStripLibs + elabStripExes = perPkgOptionFlag pkgid False packageConfigStripExes + elabDebugInfo = perPkgOptionFlag pkgid NoDebugInfo packageConfigDebugInfo + + -- Combine the configured compiler prog settings with the user-supplied + -- config. For the compiler progs any user-supplied config was taken + -- into account earlier when configuring the compiler so its ok that + -- our configured settings for the compiler override the user-supplied + -- config here. + elabProgramPaths = Map.fromList + [ (programId prog, programPath prog) + | prog <- configuredPrograms compilerprogdb ] + <> perPkgOptionMapLast pkgid packageConfigProgramPaths + elabProgramArgs = Map.fromList + [ (programId prog, args) + | prog <- configuredPrograms compilerprogdb + , let args = programOverrideArgs prog + , not (null args) + ] + <> perPkgOptionMapMappend pkgid packageConfigProgramArgs + elabProgramPathExtra = perPkgOptionNubList pkgid packageConfigProgramPathExtra + elabConfigureScriptArgs = perPkgOptionList pkgid packageConfigConfigureArgs + elabExtraLibDirs = perPkgOptionList pkgid packageConfigExtraLibDirs + elabExtraFrameworkDirs = perPkgOptionList pkgid packageConfigExtraFrameworkDirs + elabExtraIncludeDirs = perPkgOptionList pkgid packageConfigExtraIncludeDirs + elabProgPrefix = perPkgOptionMaybe pkgid packageConfigProgPrefix + elabProgSuffix = perPkgOptionMaybe pkgid packageConfigProgSuffix + + + elabHaddockHoogle = perPkgOptionFlag pkgid False packageConfigHaddockHoogle + elabHaddockHtml = perPkgOptionFlag pkgid False packageConfigHaddockHtml + elabHaddockHtmlLocation = perPkgOptionMaybe pkgid packageConfigHaddockHtmlLocation + elabHaddockForeignLibs = perPkgOptionFlag pkgid False packageConfigHaddockForeignLibs + elabHaddockForHackage = perPkgOptionFlag pkgid Cabal.ForDevelopment packageConfigHaddockForHackage + elabHaddockExecutables = perPkgOptionFlag pkgid False packageConfigHaddockExecutables + elabHaddockTestSuites = perPkgOptionFlag pkgid False packageConfigHaddockTestSuites + elabHaddockBenchmarks = perPkgOptionFlag pkgid False packageConfigHaddockBenchmarks + elabHaddockInternal = perPkgOptionFlag pkgid False packageConfigHaddockInternal + elabHaddockCss = perPkgOptionMaybe pkgid packageConfigHaddockCss + elabHaddockLinkedSource = perPkgOptionFlag pkgid False packageConfigHaddockLinkedSource + elabHaddockQuickJump = perPkgOptionFlag pkgid False packageConfigHaddockQuickJump + elabHaddockHscolourCss = perPkgOptionMaybe pkgid packageConfigHaddockHscolourCss + elabHaddockContents = perPkgOptionMaybe pkgid packageConfigHaddockContents + + perPkgOptionFlag :: PackageId -> a -> (PackageConfig -> Flag a) -> a + perPkgOptionMaybe :: PackageId -> (PackageConfig -> Flag a) -> Maybe a + perPkgOptionList :: PackageId -> (PackageConfig -> [a]) -> [a] + + perPkgOptionFlag pkgid def f = fromFlagOrDefault def (lookupPerPkgOption pkgid f) + perPkgOptionMaybe pkgid f = flagToMaybe (lookupPerPkgOption pkgid f) + perPkgOptionList pkgid f = lookupPerPkgOption pkgid f + perPkgOptionNubList pkgid f = fromNubList (lookupPerPkgOption pkgid f) + perPkgOptionMapLast pkgid f = getMapLast (lookupPerPkgOption pkgid f) + perPkgOptionMapMappend pkgid f = getMapMappend (lookupPerPkgOption pkgid f) + + perPkgOptionLibExeFlag pkgid def fboth flib = (exe, lib) + where + exe = fromFlagOrDefault def bothflag + lib = fromFlagOrDefault def (bothflag <> libflag) + + bothflag = lookupPerPkgOption pkgid fboth + libflag = lookupPerPkgOption pkgid flib + + lookupPerPkgOption :: (Package pkg, Monoid m) + => pkg -> (PackageConfig -> m) -> m + lookupPerPkgOption pkg f = + -- This is where we merge the options from the project config that + -- apply to all packages, all project local packages, and to specific + -- named packages + global `mappend` local `mappend` perpkg + where + global = f allPackagesConfig + local | isLocalToProject pkg + = f localPackagesConfig + | otherwise + = mempty + perpkg = maybe mempty f (Map.lookup (packageName pkg) perPackageConfig) + + inplacePackageDbs = storePackageDbs + ++ [ distPackageDB (compilerId compiler) ] + + storePackageDbs = storePackageDBStack (compilerId compiler) + + -- For this local build policy, every package that lives in a local source + -- dir (as opposed to a tarball), or depends on such a package, will be + -- built inplace into a shared dist dir. Tarball packages that depend on + -- source dir packages will also get unpacked locally. + shouldBuildInplaceOnly :: SolverPackage loc -> Bool + shouldBuildInplaceOnly pkg = Set.member (packageId pkg) + pkgsToBuildInplaceOnly + + pkgsToBuildInplaceOnly :: Set PackageId + pkgsToBuildInplaceOnly = + Set.fromList + $ map packageId + $ SolverInstallPlan.reverseDependencyClosure + solverPlan + (map PlannedId (Set.toList pkgsLocalToProject)) + + isLocalToProject :: Package pkg => pkg -> Bool + isLocalToProject pkg = Set.member (packageId pkg) + pkgsLocalToProject + + pkgsLocalToProject :: Set PackageId + pkgsLocalToProject = + Set.fromList (catMaybes (map shouldBeLocal localPackages)) + --TODO: localPackages is a misnomer, it's all project packages + -- here is where we decide which ones will be local! + where + shouldBeLocal :: PackageSpecifier (SourcePackage (PackageLocation loc)) -> Maybe PackageId + shouldBeLocal NamedPackage{} = Nothing + shouldBeLocal (SpecificSourcePackage pkg) + | LocalTarballPackage _ <- packageSource pkg = Nothing + | otherwise = Just (packageId pkg) + -- TODO: Is it only LocalTarballPackages we can know about without + -- them being "local" in the sense meant here? + -- + -- Also, review use of SourcePackage's loc vs ProjectPackageLocation + + pkgsUseSharedLibrary :: Set PackageId + pkgsUseSharedLibrary = + packagesWithLibDepsDownwardClosedProperty needsSharedLib + where + needsSharedLib pkg = + fromMaybe compilerShouldUseSharedLibByDefault + (liftM2 (||) pkgSharedLib pkgDynExe) + where + pkgid = packageId pkg + pkgSharedLib = perPkgOptionMaybe pkgid packageConfigSharedLib + pkgDynExe = perPkgOptionMaybe pkgid packageConfigDynExe + + --TODO: [code cleanup] move this into the Cabal lib. It's currently open + -- coded in Distribution.Simple.Configure, but should be made a proper + -- function of the Compiler or CompilerInfo. + compilerShouldUseSharedLibByDefault = + case compilerFlavor compiler of + GHC -> GHC.isDynamic compiler + GHCJS -> GHCJS.isDynamic compiler + _ -> False + + pkgsUseProfilingLibrary :: Set PackageId + pkgsUseProfilingLibrary = + packagesWithLibDepsDownwardClosedProperty needsProfilingLib + where + needsProfilingLib pkg = + fromFlagOrDefault False (profBothFlag <> profLibFlag) + where + pkgid = packageId pkg + profBothFlag = lookupPerPkgOption pkgid packageConfigProf + profLibFlag = lookupPerPkgOption pkgid packageConfigProfLib + --TODO: [code cleanup] unused: the old deprecated packageConfigProfExe + + libDepGraph = Graph.fromDistinctList $ + map NonSetupLibDepSolverPlanPackage + (SolverInstallPlan.toList solverPlan) + + packagesWithLibDepsDownwardClosedProperty property = + Set.fromList + . map packageId + . fromMaybe [] + $ Graph.closure + libDepGraph + [ Graph.nodeKey pkg + | pkg <- SolverInstallPlan.toList solverPlan + , property pkg ] -- just the packages that satisfy the property + --TODO: [nice to have] this does not check the config consistency, + -- e.g. a package explicitly turning off profiling, but something + -- depending on it that needs profiling. This really needs a separate + -- package config validation/resolution pass. + + --TODO: [nice to have] config consistency checking: + -- + profiling libs & exes, exe needs lib, recursive + -- + shared libs & exes, exe needs lib, recursive + -- + vanilla libs & exes, exe needs lib, recursive + -- + ghci or shared lib needed by TH, recursive, ghc version dependent + +-- TODO: Drop matchPlanPkg/matchElabPkg in favor of mkCCMapping + +-- | Given a 'ElaboratedPlanPackage', report if it matches a 'ComponentName'. +matchPlanPkg :: (ComponentName -> Bool) -> ElaboratedPlanPackage -> Bool +matchPlanPkg p = InstallPlan.foldPlanPackage (p . ipiComponentName) (matchElabPkg p) + +-- | Get the appropriate 'ComponentName' which identifies an installed +-- component. +ipiComponentName :: IPI.InstalledPackageInfo -> ComponentName +ipiComponentName ipkg = + case IPI.sourceLibName ipkg of + Nothing -> CLibName + Just n -> (CSubLibName n) + +-- | Given a 'ElaboratedConfiguredPackage', report if it matches a +-- 'ComponentName'. +matchElabPkg :: (ComponentName -> Bool) -> ElaboratedConfiguredPackage -> Bool +matchElabPkg p elab = + case elabPkgOrComp elab of + ElabComponent comp -> maybe False p (compComponentName comp) + ElabPackage _ -> + -- So, what should we do here? One possibility is to + -- unconditionally return 'True', because whatever it is + -- that we're looking for, it better be in this package. + -- But this is a bit dodgy if the package doesn't actually + -- have, e.g., a library. Fortunately, it's not possible + -- for the build of the library/executables to be toggled + -- by 'pkgStanzasEnabled', so the only thing we have to + -- test is if the component in question is *buildable.* + any (p . componentName) + (Cabal.pkgBuildableComponents (elabPkgDescription elab)) + +-- | Given an 'ElaboratedPlanPackage', generate the mapping from 'PackageName' +-- and 'ComponentName' to the 'ComponentId' that that should be used +-- in this case. +mkCCMapping :: ElaboratedPlanPackage + -> (PackageName, Map ComponentName (AnnotatedId ComponentId)) +mkCCMapping = + InstallPlan.foldPlanPackage + (\ipkg -> (packageName ipkg, + Map.singleton (ipiComponentName ipkg) + -- TODO: libify + (AnnotatedId { + ann_id = IPI.installedComponentId ipkg, + ann_pid = packageId ipkg, + ann_cname = IPI.sourceComponentName ipkg + }))) + $ \elab -> + let mk_aid cn = AnnotatedId { + ann_id = elabComponentId elab, + ann_pid = packageId elab, + ann_cname = cn + } + in (packageName elab, + case elabPkgOrComp elab of + ElabComponent comp -> + case compComponentName comp of + Nothing -> Map.empty + Just n -> Map.singleton n (mk_aid n) + ElabPackage _ -> + Map.fromList $ + map (\comp -> let cn = Cabal.componentName comp in (cn, mk_aid cn)) + (Cabal.pkgBuildableComponents (elabPkgDescription elab))) + +-- | Given an 'ElaboratedPlanPackage', generate the mapping from 'ComponentId' +-- to the shape of this package, as per mix-in linking. +mkShapeMapping :: ElaboratedPlanPackage + -> (ComponentId, (OpenUnitId, ModuleShape)) +mkShapeMapping dpkg = + (getComponentId dpkg, (indef_uid, shape)) + where + (dcid, shape) = + InstallPlan.foldPlanPackage + -- Uses Monad (->) + (liftM2 (,) IPI.installedComponentId shapeInstalledPackage) + (liftM2 (,) elabComponentId elabModuleShape) + dpkg + indef_uid = + IndefFullUnitId dcid + (Map.fromList [ (req, OpenModuleVar req) + | req <- Set.toList (modShapeRequires shape)]) + +-- | Get the bin\/ directories that a package's executables should reside in. +-- +-- The result may be empty if the package does not build any executables. +-- +-- The result may have several entries if this is an inplace build of a package +-- with multiple executables. +binDirectories + :: DistDirLayout + -> ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> [FilePath] +binDirectories layout config package = case elabBuildStyle package of + -- quick sanity check: no sense returning a bin directory if we're not going + -- to put any executables in it, that will just clog up the PATH + _ | noExecutables -> [] + BuildAndInstall -> [installedBinDirectory package] + BuildInplaceOnly -> map (root) $ case elabPkgOrComp package of + ElabComponent comp -> case compSolverName comp of + CD.ComponentExe n -> [display n] + _ -> [] + ElabPackage _ -> map (display . PD.exeName) + . PD.executables + . elabPkgDescription + $ package + where + noExecutables = null . PD.executables . elabPkgDescription $ package + root = distBuildDirectory layout (elabDistDirParams config package) + "build" + +-- | A newtype for 'SolverInstallPlan.SolverPlanPackage' for which the +-- dependency graph considers only dependencies on libraries which are +-- NOT from setup dependencies. Used to compute the set +-- of packages needed for profiling and dynamic libraries. +newtype NonSetupLibDepSolverPlanPackage + = NonSetupLibDepSolverPlanPackage + { unNonSetupLibDepSolverPlanPackage :: SolverInstallPlan.SolverPlanPackage } + +instance Package NonSetupLibDepSolverPlanPackage where + packageId = packageId . unNonSetupLibDepSolverPlanPackage + +instance IsNode NonSetupLibDepSolverPlanPackage where + type Key NonSetupLibDepSolverPlanPackage = SolverId + nodeKey = nodeKey . unNonSetupLibDepSolverPlanPackage + nodeNeighbors (NonSetupLibDepSolverPlanPackage spkg) + = ordNub $ CD.nonSetupDeps (resolverPackageLibDeps spkg) + +type InstS = Map UnitId ElaboratedPlanPackage +type InstM a = State InstS a + +getComponentId :: ElaboratedPlanPackage + -> ComponentId +getComponentId (InstallPlan.PreExisting dipkg) = IPI.installedComponentId dipkg +getComponentId (InstallPlan.Configured elab) = elabComponentId elab +getComponentId (InstallPlan.Installed elab) = elabComponentId elab + +instantiateInstallPlan :: ElaboratedInstallPlan -> ElaboratedInstallPlan +instantiateInstallPlan plan = + InstallPlan.new (IndependentGoals False) + (Graph.fromDistinctList (Map.elems ready_map)) + where + pkgs = InstallPlan.toList plan + + cmap = Map.fromList [ (getComponentId pkg, pkg) | pkg <- pkgs ] + + instantiateUnitId :: ComponentId -> Map ModuleName Module + -> InstM DefUnitId + instantiateUnitId cid insts = state $ \s -> + case Map.lookup uid s of + Nothing -> + -- Knot tied + let (r, s') = runState (instantiateComponent uid cid insts) + (Map.insert uid r s) + in (def_uid, Map.insert uid r s') + Just _ -> (def_uid, s) + where + def_uid = mkDefUnitId cid insts + uid = unDefUnitId def_uid + + instantiateComponent + :: UnitId -> ComponentId -> Map ModuleName Module + -> InstM ElaboratedPlanPackage + instantiateComponent uid cid insts + | Just planpkg <- Map.lookup cid cmap + = case planpkg of + InstallPlan.Configured (elab@ElaboratedConfiguredPackage + { elabPkgOrComp = ElabComponent comp }) -> do + deps <- mapM (substUnitId insts) + (compLinkedLibDependencies comp) + let getDep (Module dep_uid _) = [dep_uid] + return $ InstallPlan.Configured elab { + elabUnitId = uid, + elabComponentId = cid, + elabInstantiatedWith = insts, + elabIsCanonical = Map.null insts, + elabPkgOrComp = ElabComponent comp { + compOrderLibDependencies = + (if Map.null insts then [] else [newSimpleUnitId cid]) ++ + ordNub (map unDefUnitId + (deps ++ concatMap getDep (Map.elems insts))) + } + } + _ -> return planpkg + | otherwise = error ("instantiateComponent: " ++ display cid) + + substUnitId :: Map ModuleName Module -> OpenUnitId -> InstM DefUnitId + substUnitId _ (DefiniteUnitId uid) = + return uid + substUnitId subst (IndefFullUnitId cid insts) = do + insts' <- substSubst subst insts + instantiateUnitId cid insts' + + -- NB: NOT composition + substSubst :: Map ModuleName Module + -> Map ModuleName OpenModule + -> InstM (Map ModuleName Module) + substSubst subst insts = T.mapM (substModule subst) insts + + substModule :: Map ModuleName Module -> OpenModule -> InstM Module + substModule subst (OpenModuleVar mod_name) + | Just m <- Map.lookup mod_name subst = return m + | otherwise = error "substModule: non-closing substitution" + substModule subst (OpenModule uid mod_name) = do + uid' <- substUnitId subst uid + return (Module uid' mod_name) + + indefiniteUnitId :: ComponentId -> InstM UnitId + indefiniteUnitId cid = do + let uid = newSimpleUnitId cid + r <- indefiniteComponent uid cid + state $ \s -> (uid, Map.insert uid r s) + + indefiniteComponent :: UnitId -> ComponentId -> InstM ElaboratedPlanPackage + indefiniteComponent _uid cid + | Just planpkg <- Map.lookup cid cmap + = return planpkg + | otherwise = error ("indefiniteComponent: " ++ display cid) + + ready_map = execState work Map.empty + + work = forM_ pkgs $ \pkg -> + case pkg of + InstallPlan.Configured elab + | not (Map.null (elabLinkedInstantiatedWith elab)) + -> indefiniteUnitId (elabComponentId elab) + >> return () + _ -> instantiateUnitId (getComponentId pkg) Map.empty + >> return () + +--------------------------- +-- Build targets +-- + +-- Refer to ProjectPlanning.Types for details of these important types: + +-- data ComponentTarget = ... +-- data SubComponentTarget = ... + +-- One step in the build system is to translate higher level intentions like +-- "build this package", "test that package", or "repl that component" into +-- a more detailed specification of exactly which components to build (or other +-- actions like repl or build docs). This translation is somewhat different for +-- different commands. For example "test" for a package will build a different +-- set of components than "build". In addition, the translation of these +-- intentions can fail. For example "run" for a package is only unambiguous +-- when the package has a single executable. +-- +-- So we need a little bit of infrastructure to make it easy for the command +-- implementations to select what component targets are meant when a user asks +-- to do something with a package or component. To do this (and to be able to +-- produce good error messages for mistakes and when targets are not available) +-- we need to gather and summarise accurate information about all the possible +-- targets, both available and unavailable. Then a command implementation can +-- decide which of the available component targets should be selected. + +-- | An available target represents a component within a package that a user +-- command could plausibly refer to. In this sense, all the components defined +-- within the package are things the user could refer to, whether or not it +-- would actually be possible to build that component. +-- +-- In particular the available target contains an 'AvailableTargetStatus' which +-- informs us about whether it's actually possible to select this component to +-- be built, and if not why not. This detail makes it possible for command +-- implementations (like @build@, @test@ etc) to accurately report why a target +-- cannot be used. +-- +-- Note that the type parameter is used to help enforce that command +-- implementations can only select targets that can actually be built (by +-- forcing them to return the @k@ value for the selected targets). +-- In particular 'resolveTargets' makes use of this (with @k@ as +-- @('UnitId', ComponentName')@) to identify the targets thus selected. +-- +data AvailableTarget k = AvailableTarget { + availableTargetPackageId :: PackageId, + availableTargetComponentName :: ComponentName, + availableTargetStatus :: AvailableTargetStatus k, + availableTargetLocalToProject :: Bool + } + deriving (Eq, Show, Functor) + +-- | The status of a an 'AvailableTarget' component. This tells us whether +-- it's actually possible to select this component to be built, and if not +-- why not. +-- +data AvailableTargetStatus k = + TargetDisabledByUser -- ^ When the user does @tests: False@ + | TargetDisabledBySolver -- ^ When the solver could not enable tests + | TargetNotBuildable -- ^ When the component has @buildable: False@ + | TargetNotLocal -- ^ When the component is non-core in a non-local package + | TargetBuildable k TargetRequested -- ^ The target can or should be built + deriving (Eq, Ord, Show, Functor) + +-- | This tells us whether a target ought to be built by default, or only if +-- specifically requested. The policy is that components like libraries and +-- executables are built by default by @build@, but test suites and benchmarks +-- are not, unless this is overridden in the project configuration. +-- +data TargetRequested = + TargetRequestedByDefault -- ^ To be built by default + | TargetNotRequestedByDefault -- ^ Not to be built by default + deriving (Eq, Ord, Show) + +-- | Given the install plan, produce the set of 'AvailableTarget's for each +-- package-component pair. +-- +-- Typically there will only be one such target for each component, but for +-- example if we have a plan with both normal and profiling variants of a +-- component then we would get both as available targets, or similarly if we +-- had a plan that contained two instances of the same version of a package. +-- This approach makes it relatively easy to select all instances\/variants +-- of a component. +-- +availableTargets :: ElaboratedInstallPlan + -> Map (PackageId, ComponentName) + [AvailableTarget (UnitId, ComponentName)] +availableTargets installPlan = + let rs = [ (pkgid, cname, fake, target) + | pkg <- InstallPlan.toList installPlan + , (pkgid, cname, fake, target) <- case pkg of + InstallPlan.PreExisting ipkg -> availableInstalledTargets ipkg + InstallPlan.Installed elab -> availableSourceTargets elab + InstallPlan.Configured elab -> availableSourceTargets elab + ] + in Map.union + (Map.fromListWith (++) + [ ((pkgid, cname), [target]) + | (pkgid, cname, fake, target) <- rs, not fake]) + (Map.fromList + [ ((pkgid, cname), [target]) + | (pkgid, cname, fake, target) <- rs, fake]) + -- The normal targets mask the fake ones. We get all instances of the + -- normal ones and only one copy of the fake ones (as there are many + -- duplicates of the fake ones). See 'availableSourceTargets' below for + -- more details on this fake stuff is about. + +availableInstalledTargets :: IPI.InstalledPackageInfo + -> [(PackageId, ComponentName, Bool, + AvailableTarget (UnitId, ComponentName))] +availableInstalledTargets ipkg = + let unitid = installedUnitId ipkg + cname = CLibName + status = TargetBuildable (unitid, cname) TargetRequestedByDefault + target = AvailableTarget (packageId ipkg) cname status False + fake = False + in [(packageId ipkg, cname, fake, target)] + +availableSourceTargets :: ElaboratedConfiguredPackage + -> [(PackageId, ComponentName, Bool, + AvailableTarget (UnitId, ComponentName))] +availableSourceTargets elab = + -- We have a somewhat awkward problem here. We need to know /all/ the + -- components from /all/ the packages because these are the things that + -- users could refer to. Unfortunately, at this stage the elaborated install + -- plan does /not/ contain all components: some components have already + -- been deleted because they cannot possibly be built. This is the case + -- for components that are marked @buildable: False@ in their .cabal files. + -- (It's not unreasonable that the unbuildable components have been pruned + -- as the plan invariant is considerably simpler if all nodes can be built) + -- + -- We can recover the missing components but it's not exactly elegant. For + -- a graph node corresponding to a component we still have the information + -- about the package that it came from, and this includes the names of + -- /all/ the other components in the package. So in principle this lets us + -- find the names of all components, plus full details of the buildable + -- components. + -- + -- Consider for example a package with 3 exe components: foo, bar and baz + -- where foo and bar are buildable, but baz is not. So the plan contains + -- nodes for the components foo and bar. Now we look at each of these two + -- nodes and look at the package they come from and the names of the + -- components in this package. This will give us the names foo, bar and + -- baz, twice (once for each of the two buildable components foo and bar). + -- + -- We refer to these reconstructed missing components as fake targets. + -- It is an invariant that they are not available to be built. + -- + -- To produce the final set of targets we put the fake targets in a finite + -- map (thus eliminating the duplicates) and then we overlay that map with + -- the normal buildable targets. (This is done above in 'availableTargets'.) + -- + [ (packageId elab, cname, fake, target) + | component <- pkgComponents (elabPkgDescription elab) + , let cname = componentName component + status = componentAvailableTargetStatus component + target = AvailableTarget { + availableTargetPackageId = packageId elab, + availableTargetComponentName = cname, + availableTargetStatus = status, + availableTargetLocalToProject = elabLocalToProject elab + } + fake = isFakeTarget cname + + -- TODO: The goal of this test is to exclude "instantiated" + -- packages as available targets. This means that you can't + -- ask for a particular instantiated component to be built; + -- it will only get built by a dependency. Perhaps the + -- correct way to implement this is to run selection + -- prior to instantiating packages. If you refactor + -- this, then you can delete this test. + , elabIsCanonical elab + + -- Filter out some bogus parts of the cross product that are never needed + , case status of + TargetBuildable{} | fake -> False + _ -> True + ] + where + isFakeTarget cname = + case elabPkgOrComp elab of + ElabPackage _ -> False + ElabComponent elabComponent -> compComponentName elabComponent + /= Just cname + + componentAvailableTargetStatus + :: Component -> AvailableTargetStatus (UnitId, ComponentName) + componentAvailableTargetStatus component = + case componentOptionalStanza $ CD.componentNameToComponent cname of + -- it is not an optional stanza, so a library, exe or foreign lib + Nothing + | not buildable -> TargetNotBuildable + | otherwise -> TargetBuildable (elabUnitId elab, cname) + TargetRequestedByDefault + + -- it is not an optional stanza, so a testsuite or benchmark + Just stanza -> + case (Map.lookup stanza (elabStanzasRequested elab), + Set.member stanza (elabStanzasAvailable elab)) of + _ | not withinPlan -> TargetNotLocal + (Just False, _) -> TargetDisabledByUser + (Nothing, False) -> TargetDisabledBySolver + _ | not buildable -> TargetNotBuildable + (Just True, True) -> TargetBuildable (elabUnitId elab, cname) + TargetRequestedByDefault + (Nothing, True) -> TargetBuildable (elabUnitId elab, cname) + TargetNotRequestedByDefault + (Just True, False) -> + error "componentAvailableTargetStatus: impossible" + where + cname = componentName component + buildable = PD.buildable (componentBuildInfo component) + withinPlan = elabLocalToProject elab + || case elabPkgOrComp elab of + ElabComponent elabComponent -> + compComponentName elabComponent == Just cname + ElabPackage _ -> + case componentName component of + CLibName -> True + CExeName _ -> True + --TODO: what about sub-libs and foreign libs? + _ -> False + +-- | Merge component targets that overlap each other. Specially when we have +-- multiple targets for the same component and one of them refers to the whole +-- component (rather than a module or file within) then all the other targets +-- for that component are subsumed. +-- +-- We also allow for information associated with each component target, and +-- whenever we targets subsume each other we aggregate their associated info. +-- +nubComponentTargets :: [(ComponentTarget, a)] -> [(ComponentTarget, [a])] +nubComponentTargets = + concatMap (wholeComponentOverrides . map snd) + . groupBy ((==) `on` fst) + . sortBy (compare `on` fst) + . map (\t@((ComponentTarget cname _, _)) -> (cname, t)) + . map compatSubComponentTargets + where + -- If we're building the whole component then that the only target all we + -- need, otherwise we can have several targets within the component. + wholeComponentOverrides :: [(ComponentTarget, a )] + -> [(ComponentTarget, [a])] + wholeComponentOverrides ts = + case [ t | (t@(ComponentTarget _ WholeComponent), _) <- ts ] of + (t:_) -> [ (t, map snd ts) ] + [] -> [ (t,[x]) | (t,x) <- ts ] + + -- Not all Cabal Setup.hs versions support sub-component targets, so switch + -- them over to the whole component + compatSubComponentTargets :: (ComponentTarget, a) -> (ComponentTarget, a) + compatSubComponentTargets target@(ComponentTarget cname _subtarget, x) + | not setupHsSupportsSubComponentTargets + = (ComponentTarget cname WholeComponent, x) + | otherwise = target + + -- Actually the reality is that no current version of Cabal's Setup.hs + -- build command actually support building specific files or modules. + setupHsSupportsSubComponentTargets = False + -- TODO: when that changes, adjust this test, e.g. + -- | pkgSetupScriptCliVersion >= Version [x,y] [] + +pkgHasEphemeralBuildTargets :: ElaboratedConfiguredPackage -> Bool +pkgHasEphemeralBuildTargets elab = + isJust (elabReplTarget elab) + || (not . null) (elabTestTargets elab) + || (not . null) (elabBenchTargets elab) + || (not . null) (elabHaddockTargets elab) + || (not . null) [ () | ComponentTarget _ subtarget <- elabBuildTargets elab + , subtarget /= WholeComponent ] + +-- | The components that we'll build all of, meaning that after they're built +-- we can skip building them again (unlike with building just some modules or +-- other files within a component). +-- +elabBuildTargetWholeComponents :: ElaboratedConfiguredPackage + -> Set ComponentName +elabBuildTargetWholeComponents elab = + Set.fromList + [ cname | ComponentTarget cname WholeComponent <- elabBuildTargets elab ] + + + +------------------------------------------------------------------------------ +-- * Install plan pruning +------------------------------------------------------------------------------ + +-- | How 'pruneInstallPlanToTargets' should interpret the per-package +-- 'ComponentTarget's: as build, repl or haddock targets. +-- +data TargetAction = TargetActionConfigure + | TargetActionBuild + | TargetActionRepl + | TargetActionTest + | TargetActionBench + | TargetActionHaddock + +-- | Given a set of per-package\/per-component targets, take the subset of the +-- install plan needed to build those targets. Also, update the package config +-- to specify which optional stanzas to enable, and which targets within each +-- package to build. +-- +-- NB: Pruning happens after improvement, which is important because we +-- will prune differently depending on what is already installed (to +-- implement "sticky" test suite enabling behavior). +-- +pruneInstallPlanToTargets :: TargetAction + -> Map UnitId [ComponentTarget] + -> ElaboratedInstallPlan -> ElaboratedInstallPlan +pruneInstallPlanToTargets targetActionType perPkgTargetsMap elaboratedPlan = + InstallPlan.new (InstallPlan.planIndepGoals elaboratedPlan) + . Graph.fromDistinctList + -- We have to do the pruning in two passes + . pruneInstallPlanPass2 + . pruneInstallPlanPass1 + -- Set the targets that will be the roots for pruning + . setRootTargets targetActionType perPkgTargetsMap + . InstallPlan.toList + $ elaboratedPlan + +-- | This is a temporary data type, where we temporarily +-- override the graph dependencies of an 'ElaboratedPackage', +-- so we can take a closure over them. We'll throw out the +-- overriden dependencies when we're done so it's strictly temporary. +-- +-- For 'ElaboratedComponent', this the cached unit IDs always +-- coincide with the real thing. +data PrunedPackage = PrunedPackage ElaboratedConfiguredPackage [UnitId] + +instance Package PrunedPackage where + packageId (PrunedPackage elab _) = packageId elab + +instance HasUnitId PrunedPackage where + installedUnitId = nodeKey + +instance IsNode PrunedPackage where + type Key PrunedPackage = UnitId + nodeKey (PrunedPackage elab _) = nodeKey elab + nodeNeighbors (PrunedPackage _ deps) = deps + +fromPrunedPackage :: PrunedPackage -> ElaboratedConfiguredPackage +fromPrunedPackage (PrunedPackage elab _) = elab + +-- | Set the build targets based on the user targets (but not rev deps yet). +-- This is required before we can prune anything. +-- +setRootTargets :: TargetAction + -> Map UnitId [ComponentTarget] + -> [ElaboratedPlanPackage] + -> [ElaboratedPlanPackage] +setRootTargets targetAction perPkgTargetsMap = + assert (not (Map.null perPkgTargetsMap)) $ + assert (all (not . null) (Map.elems perPkgTargetsMap)) $ + + map (mapConfiguredPackage setElabBuildTargets) + where + -- Set the targets we'll build for this package/component. This is just + -- based on the root targets from the user, not targets implied by reverse + -- dependencies. Those comes in the second pass once we know the rev deps. + -- + setElabBuildTargets elab = + case (Map.lookup (installedUnitId elab) perPkgTargetsMap, + targetAction) of + (Nothing, _) -> elab + (Just tgts, TargetActionConfigure) -> elab { elabConfigureTargets = tgts } + (Just tgts, TargetActionBuild) -> elab { elabBuildTargets = tgts } + (Just tgts, TargetActionTest) -> elab { elabTestTargets = tgts } + (Just tgts, TargetActionBench) -> elab { elabBenchTargets = tgts } + (Just [tgt], TargetActionRepl) -> elab { elabReplTarget = Just tgt + , elabBuildHaddocks = False } + (Just tgts, TargetActionHaddock) -> + foldr setElabHaddockTargets (elab { elabHaddockTargets = tgts + , elabBuildHaddocks = True }) tgts + (Just _, TargetActionRepl) -> + error "pruneInstallPlanToTargets: multiple repl targets" + + setElabHaddockTargets tgt elab + | isTestComponentTarget tgt = elab { elabHaddockTestSuites = True } + | isBenchComponentTarget tgt = elab { elabHaddockBenchmarks = True } + | isForeignLibComponentTarget tgt = elab { elabHaddockForeignLibs = True } + | isExeComponentTarget tgt = elab { elabHaddockExecutables = True } + | isSubLibComponentTarget tgt = elab { elabHaddockInternal = True } + | otherwise = elab + +-- | Assuming we have previously set the root build targets (i.e. the user +-- targets but not rev deps yet), the first pruning pass does two things: +-- +-- * A first go at determining which optional stanzas (testsuites, benchmarks) +-- are needed. We have a second go in the next pass. +-- * Take the dependency closure using pruned dependencies. We prune deps that +-- are used only by unneeded optional stanzas. These pruned deps are only +-- used for the dependency closure and are not persisted in this pass. +-- +pruneInstallPlanPass1 :: [ElaboratedPlanPackage] + -> [ElaboratedPlanPackage] +pruneInstallPlanPass1 pkgs = + map (mapConfiguredPackage fromPrunedPackage) + (fromMaybe [] $ Graph.closure graph roots) + where + pkgs' = map (mapConfiguredPackage prune) pkgs + graph = Graph.fromDistinctList pkgs' + roots = mapMaybe find_root pkgs' + + prune elab = PrunedPackage elab' (pruneOptionalDependencies elab') + where elab' = + setDocumentation + $ addOptionalStanzas elab + + find_root (InstallPlan.Configured (PrunedPackage elab _)) = + if not $ and [ null (elabConfigureTargets elab) + , null (elabBuildTargets elab) + , null (elabTestTargets elab) + , null (elabBenchTargets elab) + , isNothing (elabReplTarget elab) + , null (elabHaddockTargets elab) + ] + then Just (installedUnitId elab) + else Nothing + find_root _ = Nothing + + -- Note [Sticky enabled testsuites] + -- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + -- The testsuite and benchmark targets are somewhat special in that we need + -- to configure the packages with them enabled, and we need to do that even + -- if we only want to build one of several testsuites. + -- + -- There are two cases in which we will enable the testsuites (or + -- benchmarks): if one of the targets is a testsuite, or if all of the + -- testsuite dependencies are already cached in the store. The rationale + -- for the latter is to minimise how often we have to reconfigure due to + -- the particular targets we choose to build. Otherwise choosing to build + -- a testsuite target, and then later choosing to build an exe target + -- would involve unnecessarily reconfiguring the package with testsuites + -- disabled. Technically this introduces a little bit of stateful + -- behaviour to make this "sticky", but it should be benign. + + -- Decide whether or not to enable testsuites and benchmarks. + -- See [Sticky enabled testsuites] + addOptionalStanzas :: ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage + addOptionalStanzas elab@ElaboratedConfiguredPackage{ elabPkgOrComp = ElabPackage pkg } = + elab { + elabPkgOrComp = ElabPackage (pkg { pkgStanzasEnabled = stanzas }) + } + where + stanzas :: Set OptionalStanza + -- By default, we enabled all stanzas requested by the user, + -- as per elabStanzasRequested, done in + -- 'elaborateSolverToPackage' + stanzas = pkgStanzasEnabled pkg + -- optionalStanzasRequiredByTargets has to be done at + -- prune-time because it depends on 'elabTestTargets' + -- et al, which is done by 'setRootTargets' at the + -- beginning of pruning. + <> optionalStanzasRequiredByTargets elab + -- optionalStanzasWithDepsAvailable has to be done at + -- prune-time because it depends on what packages are + -- installed, which is not known until after improvement + -- (pruning is done after improvement) + <> optionalStanzasWithDepsAvailable availablePkgs elab pkg + addOptionalStanzas elab = elab + + setDocumentation :: ElaboratedConfiguredPackage -> ElaboratedConfiguredPackage + setDocumentation elab@ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp } = + elab { + elabBuildHaddocks = + elabBuildHaddocks elab && documentationEnabled (compSolverName comp) elab + } + + where + documentationEnabled c = + case c of + CD.ComponentLib -> const True + CD.ComponentSubLib _ -> elabHaddockInternal + CD.ComponentFLib _ -> elabHaddockForeignLibs + CD.ComponentExe _ -> elabHaddockExecutables + CD.ComponentTest _ -> elabHaddockTestSuites + CD.ComponentBench _ -> elabHaddockBenchmarks + CD.ComponentSetup -> const False + + setDocumentation elab = elab + + -- Calculate package dependencies but cut out those needed only by + -- optional stanzas that we've determined we will not enable. + -- These pruned deps are not persisted in this pass since they're based on + -- the optional stanzas and we'll make further tweaks to the optional + -- stanzas in the next pass. + -- + pruneOptionalDependencies :: ElaboratedConfiguredPackage -> [UnitId] + pruneOptionalDependencies elab@ElaboratedConfiguredPackage{ elabPkgOrComp = ElabComponent _ } + = InstallPlan.depends elab -- no pruning + pruneOptionalDependencies ElaboratedConfiguredPackage{ elabPkgOrComp = ElabPackage pkg } + = (CD.flatDeps . CD.filterDeps keepNeeded) (pkgOrderDependencies pkg) + where + keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas + keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas + keepNeeded _ _ = True + stanzas = pkgStanzasEnabled pkg + + optionalStanzasRequiredByTargets :: ElaboratedConfiguredPackage + -> Set OptionalStanza + optionalStanzasRequiredByTargets pkg = + Set.fromList + [ stanza + | ComponentTarget cname _ <- elabBuildTargets pkg + ++ elabTestTargets pkg + ++ elabBenchTargets pkg + ++ maybeToList (elabReplTarget pkg) + ++ elabHaddockTargets pkg + , stanza <- maybeToList $ + componentOptionalStanza $ + CD.componentNameToComponent cname + ] + + availablePkgs = + Set.fromList + [ installedUnitId pkg + | InstallPlan.PreExisting pkg <- pkgs ] + +-- | Given a set of already installed packages @availablePkgs@, +-- determine the set of available optional stanzas from @pkg@ +-- which have all of their dependencies already installed. This is used +-- to implement "sticky" testsuites, where once we have installed +-- all of the deps needed for the test suite, we go ahead and +-- enable it always. +optionalStanzasWithDepsAvailable :: Set UnitId + -> ElaboratedConfiguredPackage + -> ElaboratedPackage + -> Set OptionalStanza +optionalStanzasWithDepsAvailable availablePkgs elab pkg = + Set.fromList + [ stanza + | stanza <- Set.toList (elabStanzasAvailable elab) + , let deps :: [UnitId] + deps = CD.select (optionalStanzaDeps stanza) + -- TODO: probably need to select other + -- dep types too eventually + (pkgOrderDependencies pkg) + , all (`Set.member` availablePkgs) deps + ] + where + optionalStanzaDeps TestStanzas (CD.ComponentTest _) = True + optionalStanzaDeps BenchStanzas (CD.ComponentBench _) = True + optionalStanzaDeps _ _ = False + + +-- The second pass does three things: +-- +-- * A second go at deciding which optional stanzas to enable. +-- * Prune the dependencies based on the final choice of optional stanzas. +-- * Extend the targets within each package to build, now we know the reverse +-- dependencies, ie we know which libs are needed as deps by other packages. +-- +-- Achieving sticky behaviour with enabling\/disabling optional stanzas is +-- tricky. The first approximation was handled by the first pass above, but +-- it's not quite enough. That pass will enable stanzas if all of the deps +-- of the optional stanza are already installed /in the store/. That's important +-- but it does not account for dependencies that get built inplace as part of +-- the project. We cannot take those inplace build deps into account in the +-- pruning pass however because we don't yet know which ones we're going to +-- build. Once we do know, we can have another go and enable stanzas that have +-- all their deps available. Now we can consider all packages in the pruned +-- plan to be available, including ones we already decided to build from +-- source. +-- +-- Deciding which targets to build depends on knowing which packages have +-- reverse dependencies (ie are needed). This requires the result of first +-- pass, which is another reason we have to split it into two passes. +-- +-- Note that just because we might enable testsuites or benchmarks (in the +-- first or second pass) doesn't mean that we build all (or even any) of them. +-- That depends on which targets we picked in the first pass. +-- +pruneInstallPlanPass2 :: [ElaboratedPlanPackage] + -> [ElaboratedPlanPackage] +pruneInstallPlanPass2 pkgs = + map (mapConfiguredPackage setStanzasDepsAndTargets) pkgs + where + setStanzasDepsAndTargets elab = + elab { + elabBuildTargets = ordNub + $ elabBuildTargets elab + ++ libTargetsRequiredForRevDeps + ++ exeTargetsRequiredForRevDeps, + elabPkgOrComp = + case elabPkgOrComp elab of + ElabPackage pkg -> + let stanzas = pkgStanzasEnabled pkg + <> optionalStanzasWithDepsAvailable availablePkgs elab pkg + keepNeeded (CD.ComponentTest _) _ = TestStanzas `Set.member` stanzas + keepNeeded (CD.ComponentBench _) _ = BenchStanzas `Set.member` stanzas + keepNeeded _ _ = True + in ElabPackage $ pkg { + pkgStanzasEnabled = stanzas, + pkgLibDependencies = CD.filterDeps keepNeeded (pkgLibDependencies pkg), + pkgExeDependencies = CD.filterDeps keepNeeded (pkgExeDependencies pkg), + pkgExeDependencyPaths = CD.filterDeps keepNeeded (pkgExeDependencyPaths pkg) + } + r@(ElabComponent _) -> r + } + where + libTargetsRequiredForRevDeps = + [ ComponentTarget Cabal.defaultLibName WholeComponent + | installedUnitId elab `Set.member` hasReverseLibDeps + ] + exeTargetsRequiredForRevDeps = + -- TODO: allow requesting executable with different name + -- than package name + [ ComponentTarget (Cabal.CExeName + $ packageNameToUnqualComponentName + $ packageName $ elabPkgSourceId elab) + WholeComponent + | installedUnitId elab `Set.member` hasReverseExeDeps + ] + + + availablePkgs :: Set UnitId + availablePkgs = Set.fromList (map installedUnitId pkgs) + + hasReverseLibDeps :: Set UnitId + hasReverseLibDeps = + Set.fromList [ depid + | InstallPlan.Configured pkg <- pkgs + , depid <- elabOrderLibDependencies pkg ] + + hasReverseExeDeps :: Set UnitId + hasReverseExeDeps = + Set.fromList [ depid + | InstallPlan.Configured pkg <- pkgs + , depid <- elabOrderExeDependencies pkg ] + +mapConfiguredPackage :: (srcpkg -> srcpkg') + -> InstallPlan.GenericPlanPackage ipkg srcpkg + -> InstallPlan.GenericPlanPackage ipkg srcpkg' +mapConfiguredPackage f (InstallPlan.Configured pkg) = + InstallPlan.Configured (f pkg) +mapConfiguredPackage f (InstallPlan.Installed pkg) = + InstallPlan.Installed (f pkg) +mapConfiguredPackage _ (InstallPlan.PreExisting pkg) = + InstallPlan.PreExisting pkg + +------------------------------------ +-- Support for --only-dependencies +-- + +-- | Try to remove the given targets from the install plan. +-- +-- This is not always possible. +-- +pruneInstallPlanToDependencies :: Set UnitId + -> ElaboratedInstallPlan + -> Either CannotPruneDependencies + ElaboratedInstallPlan +pruneInstallPlanToDependencies pkgTargets installPlan = + assert (all (isJust . InstallPlan.lookup installPlan) + (Set.toList pkgTargets)) $ + + fmap (InstallPlan.new (InstallPlan.planIndepGoals installPlan)) + . checkBrokenDeps + . Graph.fromDistinctList + . filter (\pkg -> installedUnitId pkg `Set.notMember` pkgTargets) + . InstallPlan.toList + $ installPlan + where + -- Our strategy is to remove the packages we don't want and then check + -- if the remaining graph is broken or not, ie any packages with dangling + -- dependencies. If there are then we cannot prune the given targets. + checkBrokenDeps :: Graph.Graph ElaboratedPlanPackage + -> Either CannotPruneDependencies + (Graph.Graph ElaboratedPlanPackage) + checkBrokenDeps graph = + case Graph.broken graph of + [] -> Right graph + brokenPackages -> + Left $ CannotPruneDependencies + [ (pkg, missingDeps) + | (pkg, missingDepIds) <- brokenPackages + , let missingDeps = mapMaybe lookupDep missingDepIds + ] + where + -- lookup in the original unpruned graph + lookupDep = InstallPlan.lookup installPlan + +-- | It is not always possible to prune to only the dependencies of a set of +-- targets. It may be the case that removing a package leaves something else +-- that still needed the pruned package. +-- +-- This lists all the packages that would be broken, and their dependencies +-- that would be missing if we did prune. +-- +newtype CannotPruneDependencies = + CannotPruneDependencies [(ElaboratedPlanPackage, + [ElaboratedPlanPackage])] + deriving (Show) + + +--------------------------- +-- Setup.hs script policy +-- + +-- Handling for Setup.hs scripts is a bit tricky, part of it lives in the +-- solver phase, and part in the elaboration phase. We keep the helper +-- functions for both phases together here so at least you can see all of it +-- in one place. +-- +-- There are four major cases for Setup.hs handling: +-- +-- 1. @build-type@ Custom with a @custom-setup@ section +-- 2. @build-type@ Custom without a @custom-setup@ section +-- 3. @build-type@ not Custom with @cabal-version > $our-cabal-version@ +-- 4. @build-type@ not Custom with @cabal-version <= $our-cabal-version@ +-- +-- It's also worth noting that packages specifying @cabal-version: >= 1.23@ +-- or later that have @build-type@ Custom will always have a @custom-setup@ +-- section. Therefore in case 2, the specified @cabal-version@ will always be +-- less than 1.23. +-- +-- In cases 1 and 2 we obviously have to build an external Setup.hs script, +-- while in case 4 we can use the internal library API. In case 3 we also have +-- to build an external Setup.hs script because the package needs a later +-- Cabal lib version than we can support internally. +-- +-- data SetupScriptStyle = ... -- see ProjectPlanning.Types + +-- | Work out the 'SetupScriptStyle' given the package description. +-- +packageSetupScriptStyle :: PD.PackageDescription -> SetupScriptStyle +packageSetupScriptStyle pkg + | buildType == PD.Custom + , Just setupbi <- PD.setupBuildInfo pkg -- does have a custom-setup stanza + , not (PD.defaultSetupDepends setupbi) -- but not one we added internally + = SetupCustomExplicitDeps + + | buildType == PD.Custom + , Just setupbi <- PD.setupBuildInfo pkg -- we get this case post-solver as + , PD.defaultSetupDepends setupbi -- the solver fills in the deps + = SetupCustomImplicitDeps + + | buildType == PD.Custom + , Nothing <- PD.setupBuildInfo pkg -- we get this case pre-solver + = SetupCustomImplicitDeps + + | PD.specVersion pkg > cabalVersion -- one cabal-install is built against + = SetupNonCustomExternalLib + + | otherwise + = SetupNonCustomInternalLib + where + buildType = PD.buildType pkg + + +-- | Part of our Setup.hs handling policy is implemented by getting the solver +-- to work out setup dependencies for packages. The solver already handles +-- packages that explicitly specify setup dependencies, but we can also tell +-- the solver to treat other packages as if they had setup dependencies. +-- That's what this function does, it gets called by the solver for all +-- packages that don't already have setup dependencies. +-- +-- The dependencies we want to add is different for each 'SetupScriptStyle'. +-- +-- Note that adding default deps means these deps are actually /added/ to the +-- packages that we get out of the solver in the 'SolverInstallPlan'. Making +-- implicit setup deps explicit is a problem in the post-solver stages because +-- we still need to distinguish the case of explicit and implict setup deps. +-- See 'rememberImplicitSetupDeps'. +-- +-- Note in addition to adding default setup deps, we also use +-- 'addSetupCabalMinVersionConstraint' (in 'planPackages') to require +-- @Cabal >= 1.20@ for Setup scripts. +-- +defaultSetupDeps :: Compiler -> Platform + -> PD.PackageDescription + -> Maybe [Dependency] +defaultSetupDeps compiler platform pkg = + case packageSetupScriptStyle pkg of + + -- For packages with build type custom that do not specify explicit + -- setup dependencies, we add a dependency on Cabal and a number + -- of other packages. + SetupCustomImplicitDeps -> + Just $ + [ Dependency depPkgname anyVersion + | depPkgname <- legacyCustomSetupPkgs compiler platform ] ++ + [ Dependency cabalPkgname cabalConstraint + | packageName pkg /= cabalPkgname ] + where + -- The Cabal dep is slightly special: + -- * We omit the dep for the Cabal lib itself, since it bootstraps. + -- * We constrain it to be < 1.25 + -- + -- Note: we also add a global constraint to require Cabal >= 1.20 + -- for Setup scripts (see use addSetupCabalMinVersionConstraint). + -- + cabalConstraint = orLaterVersion (PD.specVersion pkg) + `intersectVersionRanges` + earlierVersion cabalCompatMaxVer + -- The idea here is that at some point we will make significant + -- breaking changes to the Cabal API that Setup.hs scripts use. + -- So for old custom Setup scripts that do not specify explicit + -- constraints, we constrain them to use a compatible Cabal version. + cabalCompatMaxVer = mkVersion [1,25] + + -- For other build types (like Simple) if we still need to compile an + -- external Setup.hs, it'll be one of the simple ones that only depends + -- on Cabal and base. + SetupNonCustomExternalLib -> + Just [ Dependency cabalPkgname cabalConstraint + , Dependency basePkgname anyVersion ] + where + cabalConstraint = orLaterVersion (PD.specVersion pkg) + + -- The internal setup wrapper method has no deps at all. + SetupNonCustomInternalLib -> Just [] + + -- This case gets ruled out by the caller, planPackages, see the note + -- above in the SetupCustomImplicitDeps case. + SetupCustomExplicitDeps -> + error $ "defaultSetupDeps: called for a package with explicit " + ++ "setup deps: " ++ display (packageId pkg) + + +-- | Work out which version of the Cabal spec we will be using to talk to the +-- Setup.hs interface for this package. +-- +-- This depends somewhat on the 'SetupScriptStyle' but most cases are a result +-- of what the solver picked for us, based on the explicit setup deps or the +-- ones added implicitly by 'defaultSetupDeps'. +-- +packageSetupScriptSpecVersion :: SetupScriptStyle + -> PD.PackageDescription + -> Graph.Graph NonSetupLibDepSolverPlanPackage + -> ComponentDeps [SolverId] + -> Version + +-- We're going to be using the internal Cabal library, so the spec version of +-- that is simply the version of the Cabal library that cabal-install has been +-- built with. +packageSetupScriptSpecVersion SetupNonCustomInternalLib _ _ _ = + cabalVersion + +-- If we happen to be building the Cabal lib itself then because that +-- bootstraps itself then we use the version of the lib we're building. +packageSetupScriptSpecVersion SetupCustomImplicitDeps pkg _ _ + | packageName pkg == cabalPkgname + = packageVersion pkg + +-- In all other cases we have a look at what version of the Cabal lib the +-- solver picked. Or if it didn't depend on Cabal at all (which is very rare) +-- then we look at the .cabal file to see what spec version it declares. +packageSetupScriptSpecVersion _ pkg libDepGraph deps = + case find ((cabalPkgname ==) . packageName) setupLibDeps of + Just dep -> packageVersion dep + Nothing -> PD.specVersion pkg + where + setupLibDeps = map packageId $ fromMaybe [] $ + Graph.closure libDepGraph (CD.setupDeps deps) + + +cabalPkgname, basePkgname :: PackageName +cabalPkgname = mkPackageName "Cabal" +basePkgname = mkPackageName "base" + + +legacyCustomSetupPkgs :: Compiler -> Platform -> [PackageName] +legacyCustomSetupPkgs compiler (Platform _ os) = + map mkPackageName $ + [ "array", "base", "binary", "bytestring", "containers" + , "deepseq", "directory", "filepath", "old-time", "pretty" + , "process", "time", "transformers" ] + ++ [ "Win32" | os == Windows ] + ++ [ "unix" | os /= Windows ] + ++ [ "ghc-prim" | isGHC ] + ++ [ "template-haskell" | isGHC ] + where + isGHC = compilerCompatFlavor GHC compiler + +-- The other aspects of our Setup.hs policy lives here where we decide on +-- the 'SetupScriptOptions'. +-- +-- Our current policy for the 'SetupCustomImplicitDeps' case is that we +-- try to make the implicit deps cover everything, and we don't allow the +-- compiler to pick up other deps. This may or may not be sustainable, and +-- we might have to allow the deps to be non-exclusive, but that itself would +-- be tricky since we would have to allow the Setup access to all the packages +-- in the store and local dbs. + +setupHsScriptOptions :: ElaboratedReadyPackage + -> ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> DistDirLayout + -> FilePath + -> FilePath + -> Bool + -> Lock + -> SetupScriptOptions +-- TODO: Fix this so custom is a separate component. Custom can ALWAYS +-- be a separate component!!! +setupHsScriptOptions (ReadyPackage elab@ElaboratedConfiguredPackage{..}) + plan ElaboratedSharedConfig{..} distdir srcdir builddir + isParallelBuild cacheLock = + SetupScriptOptions { + useCabalVersion = thisVersion elabSetupScriptCliVersion, + useCabalSpecVersion = Just elabSetupScriptCliVersion, + useCompiler = Just pkgConfigCompiler, + usePlatform = Just pkgConfigPlatform, + usePackageDB = elabSetupPackageDBStack, + usePackageIndex = Nothing, + useDependencies = [ (uid, srcid) + | ConfiguredId srcid (Just CLibName) uid + <- elabSetupDependencies elab ], + useDependenciesExclusive = True, + useVersionMacros = elabSetupScriptStyle == SetupCustomExplicitDeps, + useProgramDb = pkgConfigCompilerProgs, + useDistPref = builddir, + useLoggingHandle = Nothing, -- this gets set later + useWorkingDir = Just srcdir, + useExtraPathEnv = elabExeDependencyPaths elab, + useExtraEnvOverrides = dataDirsEnvironmentForPlan distdir plan, + useWin32CleanHack = False, --TODO: [required eventually] + forceExternalSetupMethod = isParallelBuild, + setupCacheLock = Just cacheLock, + isInteractive = False + } + + +-- | To be used for the input for elaborateInstallPlan. +-- +-- TODO: [code cleanup] make InstallDirs.defaultInstallDirs pure. +-- +userInstallDirTemplates :: Compiler + -> IO InstallDirs.InstallDirTemplates +userInstallDirTemplates compiler = do + InstallDirs.defaultInstallDirs + (compilerFlavor compiler) + True -- user install + False -- unused + +storePackageInstallDirs :: StoreDirLayout + -> CompilerId + -> InstalledPackageId + -> InstallDirs.InstallDirs FilePath +storePackageInstallDirs StoreDirLayout{ storePackageDirectory + , storeDirectory } + compid ipkgid = + InstallDirs.InstallDirs {..} + where + store = storeDirectory compid + prefix = storePackageDirectory compid (newSimpleUnitId ipkgid) + bindir = prefix "bin" + libdir = prefix "lib" + libsubdir = "" + -- Note: on macOS, we place libraries into + -- @store/lib@ to work around the load + -- command size limit of macOSs mach-o linker. + -- See also @PackageHash.hashedInstalledPackageIdVeryShort@ + dynlibdir | buildOS == OSX = store "lib" + | otherwise = libdir + flibdir = libdir + libexecdir = prefix "libexec" + libexecsubdir= "" + includedir = libdir "include" + datadir = prefix "share" + datasubdir = "" + docdir = datadir "doc" + mandir = datadir "man" + htmldir = docdir "html" + haddockdir = htmldir + sysconfdir = prefix "etc" + + +--TODO: [code cleanup] perhaps reorder this code +-- based on the ElaboratedInstallPlan + ElaboratedSharedConfig, +-- make the various Setup.hs {configure,build,copy} flags + + +setupHsConfigureFlags :: ElaboratedReadyPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.ConfigFlags +setupHsConfigureFlags (ReadyPackage elab@ElaboratedConfiguredPackage{..}) + sharedConfig@ElaboratedSharedConfig{..} + verbosity builddir = + sanityCheckElaboratedConfiguredPackage sharedConfig elab + (Cabal.ConfigFlags {..}) + where + configArgs = mempty -- unused, passed via args + configDistPref = toFlag builddir + configCabalFilePath = mempty + configVerbosity = toFlag verbosity + + configInstantiateWith = Map.toList elabInstantiatedWith + + configDeterministic = mempty -- doesn't matter, configIPID/configCID overridese + configIPID = case elabPkgOrComp of + ElabPackage pkg -> toFlag (display (pkgInstalledId pkg)) + ElabComponent _ -> mempty + configCID = case elabPkgOrComp of + ElabPackage _ -> mempty + ElabComponent _ -> toFlag elabComponentId + + configProgramPaths = Map.toList elabProgramPaths + configProgramArgs + | {- elabSetupScriptCliVersion < mkVersion [1,24,3] -} True + -- workaround for + -- + -- It turns out, that even with Cabal 2.0, there's still cases such as e.g. + -- custom Setup.hs scripts calling out to GHC even when going via + -- @runProgram ghcProgram@, as e.g. happy does in its + -- + -- (see also ) + -- + -- So for now, let's pass the rather harmless and idempotent + -- `-hide-all-packages` flag to all invocations (which has + -- the benefit that every GHC invocation starts with a + -- conistently well-defined clean slate) until we find a + -- better way. + = Map.toList $ + Map.insertWith (++) "ghc" ["-hide-all-packages"] + elabProgramArgs + | otherwise = Map.toList elabProgramArgs + configProgramPathExtra = toNubList elabProgramPathExtra + configHcFlavor = toFlag (compilerFlavor pkgConfigCompiler) + configHcPath = mempty -- we use configProgramPaths instead + configHcPkg = mempty -- we use configProgramPaths instead + + configVanillaLib = toFlag elabVanillaLib + configSharedLib = toFlag elabSharedLib + configStaticLib = toFlag elabStaticLib + + configDynExe = toFlag elabDynExe + configGHCiLib = toFlag elabGHCiLib + configProfExe = mempty + configProfLib = toFlag elabProfLib + configProf = toFlag elabProfExe + + -- configProfDetail is for exe+lib, but overridden by configProfLibDetail + -- so we specify both so we can specify independently + configProfDetail = toFlag elabProfExeDetail + configProfLibDetail = toFlag elabProfLibDetail + + configCoverage = toFlag elabCoverage + configLibCoverage = mempty + + configOptimization = toFlag elabOptimization + configSplitSections = toFlag elabSplitSections + configSplitObjs = toFlag elabSplitObjs + configStripExes = toFlag elabStripExes + configStripLibs = toFlag elabStripLibs + configDebugInfo = toFlag elabDebugInfo + + configConfigurationsFlags = elabFlagAssignment + configConfigureArgs = elabConfigureScriptArgs + configExtraLibDirs = elabExtraLibDirs + configExtraFrameworkDirs = elabExtraFrameworkDirs + configExtraIncludeDirs = elabExtraIncludeDirs + configProgPrefix = maybe mempty toFlag elabProgPrefix + configProgSuffix = maybe mempty toFlag elabProgSuffix + + configInstallDirs = fmap (toFlag . InstallDirs.toPathTemplate) + elabInstallDirs + + -- we only use configDependencies, unless we're talking to an old Cabal + -- in which case we use configConstraints + -- NB: This does NOT use InstallPlan.depends, which includes executable + -- dependencies which should NOT be fed in here (also you don't have + -- enough info anyway) + configDependencies = [ (case mb_cn of + -- Special case for internal libraries + Just (CSubLibName uqn) + | packageId elab == srcid + -> mkPackageName (unUnqualComponentName uqn) + _ -> packageName srcid, + cid) + | ConfiguredId srcid mb_cn cid <- elabLibDependencies elab ] + configConstraints = + case elabPkgOrComp of + ElabPackage _ -> + [ thisPackageVersion srcid + | ConfiguredId srcid _ _uid <- elabLibDependencies elab ] + ElabComponent _ -> [] + + + -- explicitly clear, then our package db stack + -- TODO: [required eventually] have to do this differently for older Cabal versions + configPackageDBs = Nothing : map Just elabBuildPackageDBStack + + configTests = case elabPkgOrComp of + ElabPackage pkg -> toFlag (TestStanzas `Set.member` pkgStanzasEnabled pkg) + ElabComponent _ -> mempty + configBenchmarks = case elabPkgOrComp of + ElabPackage pkg -> toFlag (BenchStanzas `Set.member` pkgStanzasEnabled pkg) + ElabComponent _ -> mempty + + configExactConfiguration = toFlag True + configFlagError = mempty --TODO: [research required] appears not to be implemented + configRelocatable = mempty --TODO: [research required] ??? + configScratchDir = mempty -- never use + configUserInstall = mempty -- don't rely on defaults + configPrograms_ = mempty -- never use, shouldn't exist + configUseResponseFiles = mempty + +setupHsConfigureArgs :: ElaboratedConfiguredPackage + -> [String] +setupHsConfigureArgs (ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage _ }) = [] +setupHsConfigureArgs elab@(ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent comp }) = + [showComponentTarget (packageId elab) (ComponentTarget cname WholeComponent)] + where + cname = fromMaybe (error "setupHsConfigureArgs: trying to configure setup") + (compComponentName comp) + +setupHsBuildFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.BuildFlags +setupHsBuildFlags _ _ verbosity builddir = + Cabal.BuildFlags { + buildProgramPaths = mempty, --unused, set at configure time + buildProgramArgs = mempty, --unused, set at configure time + buildVerbosity = toFlag verbosity, + buildDistPref = toFlag builddir, + buildNumJobs = mempty, --TODO: [nice to have] sometimes want to use toFlag (Just numBuildJobs), + buildArgs = mempty, -- unused, passed via args not flags + buildCabalFilePath= mempty + } + + +setupHsBuildArgs :: ElaboratedConfiguredPackage -> [String] +setupHsBuildArgs elab@(ElaboratedConfiguredPackage { elabPkgOrComp = ElabPackage _ }) + -- Fix for #3335, don't pass build arguments if it's not supported + | elabSetupScriptCliVersion elab >= mkVersion [1,17] + = map (showComponentTarget (packageId elab)) (elabBuildTargets elab) + | otherwise + = [] +setupHsBuildArgs (ElaboratedConfiguredPackage { elabPkgOrComp = ElabComponent _ }) + = [] + + +setupHsTestFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.TestFlags +setupHsTestFlags _ _ verbosity builddir = Cabal.TestFlags + { testDistPref = toFlag builddir + , testVerbosity = toFlag verbosity + , testMachineLog = mempty + , testHumanLog = mempty + , testShowDetails = toFlag Cabal.Always + , testKeepTix = mempty + , testOptions = mempty + } + +setupHsTestArgs :: ElaboratedConfiguredPackage -> [String] +-- TODO: Does the issue #3335 affects test as well +setupHsTestArgs elab = + mapMaybe (showTestComponentTarget (packageId elab)) (elabTestTargets elab) + + +setupHsBenchFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.BenchmarkFlags +setupHsBenchFlags _ _ verbosity builddir = Cabal.BenchmarkFlags + { benchmarkDistPref = toFlag builddir + , benchmarkVerbosity = toFlag verbosity + , benchmarkOptions = mempty + } + +setupHsBenchArgs :: ElaboratedConfiguredPackage -> [String] +setupHsBenchArgs elab = + mapMaybe (showBenchComponentTarget (packageId elab)) (elabBenchTargets elab) + + +setupHsReplFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.ReplFlags +setupHsReplFlags _ sharedConfig verbosity builddir = + Cabal.ReplFlags { + replProgramPaths = mempty, --unused, set at configure time + replProgramArgs = mempty, --unused, set at configure time + replVerbosity = toFlag verbosity, + replDistPref = toFlag builddir, + replReload = mempty, --only used as callback from repl + replReplOptions = pkgConfigReplOptions sharedConfig --runtime override for repl flags + } + + +setupHsReplArgs :: ElaboratedConfiguredPackage -> [String] +setupHsReplArgs elab = + maybe [] (\t -> [showComponentTarget (packageId elab) t]) (elabReplTarget elab) + --TODO: should be able to give multiple modules in one component + + +setupHsCopyFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> FilePath + -> Cabal.CopyFlags +setupHsCopyFlags _ _ verbosity builddir destdir = + Cabal.CopyFlags { + copyArgs = [], -- TODO: could use this to only copy what we enabled + copyDest = toFlag (InstallDirs.CopyTo destdir), + copyDistPref = toFlag builddir, + copyVerbosity = toFlag verbosity, + copyCabalFilePath = mempty + } + +setupHsRegisterFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> FilePath + -> Cabal.RegisterFlags +setupHsRegisterFlags ElaboratedConfiguredPackage{..} _ + verbosity builddir pkgConfFile = + Cabal.RegisterFlags { + regPackageDB = mempty, -- misfeature + regGenScript = mempty, -- never use + regGenPkgConf = toFlag (Just pkgConfFile), + regInPlace = case elabBuildStyle of + BuildInplaceOnly -> toFlag True + _ -> toFlag False, + regPrintId = mempty, -- never use + regDistPref = toFlag builddir, + regArgs = [], + regVerbosity = toFlag verbosity, + regCabalFilePath = mempty + } + +setupHsHaddockFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.HaddockFlags +setupHsHaddockFlags (ElaboratedConfiguredPackage{..}) _ verbosity builddir = + Cabal.HaddockFlags { + haddockProgramPaths = mempty, --unused, set at configure time + haddockProgramArgs = mempty, --unused, set at configure time + haddockHoogle = toFlag elabHaddockHoogle, + haddockHtml = toFlag elabHaddockHtml, + haddockHtmlLocation = maybe mempty toFlag elabHaddockHtmlLocation, + haddockForHackage = toFlag elabHaddockForHackage, + haddockForeignLibs = toFlag elabHaddockForeignLibs, + haddockExecutables = toFlag elabHaddockExecutables, + haddockTestSuites = toFlag elabHaddockTestSuites, + haddockBenchmarks = toFlag elabHaddockBenchmarks, + haddockInternal = toFlag elabHaddockInternal, + haddockCss = maybe mempty toFlag elabHaddockCss, + haddockLinkedSource = toFlag elabHaddockLinkedSource, + haddockQuickJump = toFlag elabHaddockQuickJump, + haddockHscolourCss = maybe mempty toFlag elabHaddockHscolourCss, + haddockContents = maybe mempty toFlag elabHaddockContents, + haddockDistPref = toFlag builddir, + haddockKeepTempFiles = mempty, --TODO: from build settings + haddockVerbosity = toFlag verbosity, + haddockCabalFilePath = mempty, + haddockArgs = mempty + } + +setupHsHaddockArgs :: ElaboratedConfiguredPackage -> [String] +-- TODO: Does the issue #3335 affects test as well +setupHsHaddockArgs elab = + map (showComponentTarget (packageId elab)) (elabHaddockTargets elab) + +{- +setupHsTestFlags :: ElaboratedConfiguredPackage + -> ElaboratedSharedConfig + -> Verbosity + -> FilePath + -> Cabal.TestFlags +setupHsTestFlags _ _ verbosity builddir = + Cabal.TestFlags { + } +-} + +------------------------------------------------------------------------------ +-- * Sharing installed packages +------------------------------------------------------------------------------ + +-- +-- Nix style store management for tarball packages +-- +-- So here's our strategy: +-- +-- We use a per-user nix-style hashed store, but /only/ for tarball packages. +-- So that includes packages from hackage repos (and other http and local +-- tarballs). For packages in local directories we do not register them into +-- the shared store by default, we just build them locally inplace. +-- +-- The reason we do it like this is that it's easy to make stable hashes for +-- tarball packages, and these packages benefit most from sharing. By contrast +-- unpacked dir packages are harder to hash and they tend to change more +-- frequently so there's less benefit to sharing them. +-- +-- When using the nix store approach we have to run the solver *without* +-- looking at the packages installed in the store, just at the source packages +-- (plus core\/global installed packages). Then we do a post-processing pass +-- to replace configured packages in the plan with pre-existing ones, where +-- possible. Where possible of course means where the nix-style package hash +-- equals one that's already in the store. +-- +-- One extra wrinkle is that unless we know package tarball hashes upfront, we +-- will have to download the tarballs to find their hashes. So we have two +-- options: delay replacing source with pre-existing installed packages until +-- the point during the execution of the install plan where we have the +-- tarball, or try to do as much up-front as possible and then check again +-- during plan execution. The former isn't great because we would end up +-- telling users we're going to re-install loads of packages when in fact we +-- would just share them. It'd be better to give as accurate a prediction as +-- we can. The latter is better for users, but we do still have to check +-- during plan execution because it's important that we don't replace existing +-- installed packages even if they have the same package hash, because we +-- don't guarantee ABI stability. + +-- TODO: [required eventually] for safety of concurrent installs, we must make sure we register but +-- not replace installed packages with ghc-pkg. + +packageHashInputs :: ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> PackageHashInputs +packageHashInputs + pkgshared + elab@(ElaboratedConfiguredPackage { + elabPkgSourceHash = Just srchash + }) = + PackageHashInputs { + pkgHashPkgId = packageId elab, + pkgHashComponent = + case elabPkgOrComp elab of + ElabPackage _ -> Nothing + ElabComponent comp -> Just (compSolverName comp), + pkgHashSourceHash = srchash, + pkgHashPkgConfigDeps = Set.fromList (elabPkgConfigDependencies elab), + pkgHashDirectDeps = + case elabPkgOrComp elab of + ElabPackage (ElaboratedPackage{..}) -> + Set.fromList $ + [ confInstId dep + | dep <- CD.select relevantDeps pkgLibDependencies ] ++ + [ confInstId dep + | dep <- CD.select relevantDeps pkgExeDependencies ] + ElabComponent comp -> + Set.fromList (map confInstId (compLibDependencies comp + ++ compExeDependencies comp)), + pkgHashOtherConfig = packageHashConfigInputs pkgshared elab + } + where + -- Obviously the main deps are relevant + relevantDeps CD.ComponentLib = True + relevantDeps (CD.ComponentSubLib _) = True + relevantDeps (CD.ComponentFLib _) = True + relevantDeps (CD.ComponentExe _) = True + -- Setup deps can affect the Setup.hs behaviour and thus what is built + relevantDeps CD.ComponentSetup = True + -- However testsuites and benchmarks do not get installed and should not + -- affect the result, so we do not include them. + relevantDeps (CD.ComponentTest _) = False + relevantDeps (CD.ComponentBench _) = False + +packageHashInputs _ pkg = + error $ "packageHashInputs: only for packages with source hashes. " + ++ display (packageId pkg) + +packageHashConfigInputs :: ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> PackageHashConfigInputs +packageHashConfigInputs shared@ElaboratedSharedConfig{..} pkg = + PackageHashConfigInputs { + pkgHashCompilerId = compilerId pkgConfigCompiler, + pkgHashPlatform = pkgConfigPlatform, + pkgHashFlagAssignment = elabFlagAssignment, + pkgHashConfigureScriptArgs = elabConfigureScriptArgs, + pkgHashVanillaLib = elabVanillaLib, + pkgHashSharedLib = elabSharedLib, + pkgHashDynExe = elabDynExe, + pkgHashGHCiLib = elabGHCiLib, + pkgHashProfLib = elabProfLib, + pkgHashProfExe = elabProfExe, + pkgHashProfLibDetail = elabProfLibDetail, + pkgHashProfExeDetail = elabProfExeDetail, + pkgHashCoverage = elabCoverage, + pkgHashOptimization = elabOptimization, + pkgHashSplitSections = elabSplitSections, + pkgHashSplitObjs = elabSplitObjs, + pkgHashStripLibs = elabStripLibs, + pkgHashStripExes = elabStripExes, + pkgHashDebugInfo = elabDebugInfo, + pkgHashProgramArgs = elabProgramArgs, + pkgHashExtraLibDirs = elabExtraLibDirs, + pkgHashExtraFrameworkDirs = elabExtraFrameworkDirs, + pkgHashExtraIncludeDirs = elabExtraIncludeDirs, + pkgHashProgPrefix = elabProgPrefix, + pkgHashProgSuffix = elabProgSuffix, + + pkgHashDocumentation = elabBuildHaddocks, + pkgHashHaddockHoogle = elabHaddockHoogle, + pkgHashHaddockHtml = elabHaddockHtml, + pkgHashHaddockHtmlLocation = elabHaddockHtmlLocation, + pkgHashHaddockForeignLibs = elabHaddockForeignLibs, + pkgHashHaddockExecutables = elabHaddockExecutables, + pkgHashHaddockTestSuites = elabHaddockTestSuites, + pkgHashHaddockBenchmarks = elabHaddockBenchmarks, + pkgHashHaddockInternal = elabHaddockInternal, + pkgHashHaddockCss = elabHaddockCss, + pkgHashHaddockLinkedSource = elabHaddockLinkedSource, + pkgHashHaddockQuickJump = elabHaddockQuickJump, + pkgHashHaddockContents = elabHaddockContents + } + where + ElaboratedConfiguredPackage{..} = normaliseConfiguredPackage shared pkg + +-- | Given the 'InstalledPackageIndex' for a nix-style package store, and an +-- 'ElaboratedInstallPlan', replace configured source packages by installed +-- packages from the store whenever they exist. +-- +improveInstallPlanWithInstalledPackages :: Set UnitId + -> ElaboratedInstallPlan + -> ElaboratedInstallPlan +improveInstallPlanWithInstalledPackages installedPkgIdSet = + InstallPlan.installed canPackageBeImproved + where + canPackageBeImproved pkg = + installedUnitId pkg `Set.member` installedPkgIdSet + --TODO: sanity checks: + -- * the installed package must have the expected deps etc + -- * the installed package must not be broken, valid dep closure + + --TODO: decide what to do if we encounter broken installed packages, + -- since overwriting is never safe. + + +-- Path construction +------ + +-- | The path to the directory that contains a specific executable. +-- NB: For inplace NOT InstallPaths.bindir installDirs; for an +-- inplace build those values are utter nonsense. So we +-- have to guess where the directory is going to be. +-- Fortunately this is "stable" part of Cabal API. +-- But the way we get the build directory is A HORRIBLE +-- HACK. +binDirectoryFor + :: DistDirLayout + -> ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> FilePath + -> FilePath +binDirectoryFor layout config package exe = case elabBuildStyle package of + BuildAndInstall -> installedBinDirectory package + BuildInplaceOnly -> inplaceBinRoot layout config package exe + +-- package has been built and installed. +installedBinDirectory :: ElaboratedConfiguredPackage -> FilePath +installedBinDirectory = InstallDirs.bindir . elabInstallDirs + +-- | The path to the @build@ directory for an inplace build. +inplaceBinRoot + :: DistDirLayout + -> ElaboratedSharedConfig + -> ElaboratedConfiguredPackage + -> FilePath +inplaceBinRoot layout config package + = distBuildDirectory layout (elabDistDirParams config package) + "build" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanOutput.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanOutput.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanOutput.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/ProjectPlanOutput.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,938 @@ +{-# LANGUAGE BangPatterns, RecordWildCards, NamedFieldPuns, + DeriveGeneric, DeriveDataTypeable, GeneralizedNewtypeDeriving, + ScopedTypeVariables #-} + +module Distribution.Client.ProjectPlanOutput ( + -- * Plan output + writePlanExternalRepresentation, + + -- * Project status + -- | Several outputs rely on having a general overview of + PostBuildProjectStatus(..), + updatePostBuildProjectStatus, + createPackageEnvironment, + writePlanGhcEnvironment, + argsEquivalentOfGhcEnvironmentFile, + ) where + +import Distribution.Client.ProjectPlanning.Types +import Distribution.Client.ProjectBuilding.Types +import Distribution.Client.DistDirLayout +import Distribution.Client.Types (Repo(..), RemoteRepo(..), PackageLocation(..), confInstId) +import Distribution.Client.PackageHash (showHashValue, hashValue) + +import qualified Distribution.Client.InstallPlan as InstallPlan +import qualified Distribution.Client.Utils.Json as J +import qualified Distribution.Simple.InstallDirs as InstallDirs + +import qualified Distribution.Solver.Types.ComponentDeps as ComponentDeps + +import Distribution.Package +import Distribution.System +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import qualified Distribution.PackageDescription as PD +import Distribution.Compiler (CompilerFlavor(GHC, GHCJS)) +import Distribution.Simple.Compiler + ( PackageDBStack, PackageDB(..) + , compilerVersion, compilerFlavor, showCompilerId + , compilerId, CompilerId(..), Compiler ) +import Distribution.Simple.GHC + ( getImplInfo, GhcImplInfo(supportsPkgEnvFiles) + , GhcEnvironmentFileEntry(..), simpleGhcEnvironmentFile + , writeGhcEnvironmentFile ) +import Distribution.Text +import qualified Distribution.Compat.Graph as Graph +import Distribution.Compat.Graph (Graph, Node) +import qualified Distribution.Compat.Binary as Binary +import Distribution.Simple.Utils +import Distribution.Verbosity +import qualified Paths_cabal_install as Our (version) + +import Prelude () +import Distribution.Client.Compat.Prelude + +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set +import qualified Data.ByteString.Lazy as BS +import qualified Data.ByteString.Builder as BB + +import System.FilePath +import System.IO + +import Distribution.Simple.Program.GHC (packageDbArgsDb) + +----------------------------------------------------------------------------- +-- Writing plan.json files +-- + +-- | Write out a representation of the elaborated install plan. +-- +-- This is for the benefit of debugging and external tools like editors. +-- +writePlanExternalRepresentation :: DistDirLayout + -> ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> IO () +writePlanExternalRepresentation distDirLayout elaboratedInstallPlan + elaboratedSharedConfig = + writeFileAtomic (distProjectCacheFile distDirLayout "plan.json") $ + BB.toLazyByteString + . J.encodeToBuilder + $ encodePlanAsJson distDirLayout elaboratedInstallPlan elaboratedSharedConfig + +-- | Renders a subset of the elaborated install plan in a semi-stable JSON +-- format. +-- +encodePlanAsJson :: DistDirLayout -> ElaboratedInstallPlan -> ElaboratedSharedConfig -> J.Value +encodePlanAsJson distDirLayout elaboratedInstallPlan elaboratedSharedConfig = + --TODO: [nice to have] include all of the sharedPackageConfig and all of + -- the parts of the elaboratedInstallPlan + J.object [ "cabal-version" J..= jdisplay Our.version + , "cabal-lib-version" J..= jdisplay cabalVersion + , "compiler-id" J..= (J.String . showCompilerId . pkgConfigCompiler) + elaboratedSharedConfig + , "os" J..= jdisplay os + , "arch" J..= jdisplay arch + , "install-plan" J..= installPlanToJ elaboratedInstallPlan + ] + where + Platform arch os = pkgConfigPlatform elaboratedSharedConfig + + installPlanToJ :: ElaboratedInstallPlan -> [J.Value] + installPlanToJ = map planPackageToJ . InstallPlan.toList + + planPackageToJ :: ElaboratedPlanPackage -> J.Value + planPackageToJ pkg = + case pkg of + InstallPlan.PreExisting ipi -> installedPackageInfoToJ ipi + InstallPlan.Configured elab -> elaboratedPackageToJ False elab + InstallPlan.Installed elab -> elaboratedPackageToJ True elab + -- Note that the plan.json currently only uses the elaborated plan, + -- not the improved plan. So we will not get the Installed state for + -- that case, but the code supports it in case we want to use this + -- later in some use case where we want the status of the build. + + installedPackageInfoToJ :: InstalledPackageInfo -> J.Value + installedPackageInfoToJ ipi = + -- Pre-existing packages lack configuration information such as their flag + -- settings or non-lib components. We only get pre-existing packages for + -- the global/core packages however, so this isn't generally a problem. + -- So these packages are never local to the project. + -- + J.object + [ "type" J..= J.String "pre-existing" + , "id" J..= (jdisplay . installedUnitId) ipi + , "pkg-name" J..= (jdisplay . pkgName . packageId) ipi + , "pkg-version" J..= (jdisplay . pkgVersion . packageId) ipi + , "depends" J..= map jdisplay (installedDepends ipi) + ] + + elaboratedPackageToJ :: Bool -> ElaboratedConfiguredPackage -> J.Value + elaboratedPackageToJ isInstalled elab = + J.object $ + [ "type" J..= J.String (if isInstalled then "installed" + else "configured") + , "id" J..= (jdisplay . installedUnitId) elab + , "pkg-name" J..= (jdisplay . pkgName . packageId) elab + , "pkg-version" J..= (jdisplay . pkgVersion . packageId) elab + , "flags" J..= J.object [ PD.unFlagName fn J..= v + | (fn,v) <- PD.unFlagAssignment (elabFlagAssignment elab) ] + , "style" J..= J.String (style2str (elabLocalToProject elab) (elabBuildStyle elab)) + , "pkg-src" J..= packageLocationToJ (elabPkgSourceLocation elab) + ] ++ + [ "pkg-cabal-sha256" J..= J.String (showHashValue hash) + | Just hash <- [ fmap hashValue (elabPkgDescriptionOverride elab) ] ] ++ + [ "pkg-src-sha256" J..= J.String (showHashValue hash) + | Just hash <- [elabPkgSourceHash elab] ] ++ + (case elabBuildStyle elab of + BuildInplaceOnly -> + ["dist-dir" J..= J.String dist_dir] + BuildAndInstall -> + -- TODO: install dirs? + [] + ) ++ + case elabPkgOrComp elab of + ElabPackage pkg -> + let components = J.object $ + [ comp2str c J..= (J.object $ + [ "depends" J..= map (jdisplay . confInstId) ldeps + , "exe-depends" J..= map (jdisplay . confInstId) edeps + ] ++ + bin_file c) + | (c,(ldeps,edeps)) + <- ComponentDeps.toList $ + ComponentDeps.zip (pkgLibDependencies pkg) + (pkgExeDependencies pkg) ] + in ["components" J..= components] + ElabComponent comp -> + ["depends" J..= map (jdisplay . confInstId) (elabLibDependencies elab) + ,"exe-depends" J..= map jdisplay (elabExeDependencies elab) + ,"component-name" J..= J.String (comp2str (compSolverName comp)) + ] ++ + bin_file (compSolverName comp) + where + packageLocationToJ :: PackageLocation (Maybe FilePath) -> J.Value + packageLocationToJ pkgloc = + case pkgloc of + LocalUnpackedPackage local -> + J.object [ "type" J..= J.String "local" + , "path" J..= J.String local + ] + LocalTarballPackage local -> + J.object [ "type" J..= J.String "local-tar" + , "path" J..= J.String local + ] + RemoteTarballPackage uri _ -> + J.object [ "type" J..= J.String "remote-tar" + , "uri" J..= J.String (show uri) + ] + RepoTarballPackage repo _ _ -> + J.object [ "type" J..= J.String "repo-tar" + , "repo" J..= repoToJ repo + ] + RemoteSourceRepoPackage srcRepo _ -> + J.object [ "type" J..= J.String "source-repo" + , "source-repo" J..= sourceRepoToJ srcRepo + ] + + repoToJ :: Repo -> J.Value + repoToJ repo = + case repo of + RepoLocal{..} -> + J.object [ "type" J..= J.String "local-repo" + , "path" J..= J.String repoLocalDir + ] + RepoRemote{..} -> + J.object [ "type" J..= J.String "remote-repo" + , "uri" J..= J.String (show (remoteRepoURI repoRemote)) + ] + RepoSecure{..} -> + J.object [ "type" J..= J.String "secure-repo" + , "uri" J..= J.String (show (remoteRepoURI repoRemote)) + ] + + sourceRepoToJ :: PD.SourceRepo -> J.Value + sourceRepoToJ PD.SourceRepo{..} = + J.object $ filter ((/= J.Null) . snd) $ + [ "type" J..= fmap jdisplay repoType + , "location" J..= fmap J.String repoLocation + , "module" J..= fmap J.String repoModule + , "branch" J..= fmap J.String repoBranch + , "tag" J..= fmap J.String repoTag + , "subdir" J..= fmap J.String repoSubdir + ] + + dist_dir = distBuildDirectory distDirLayout + (elabDistDirParams elaboratedSharedConfig elab) + + bin_file c = case c of + ComponentDeps.ComponentExe s -> bin_file' s + ComponentDeps.ComponentTest s -> bin_file' s + ComponentDeps.ComponentBench s -> bin_file' s + _ -> [] + bin_file' s = + ["bin-file" J..= J.String bin] + where + bin = if elabBuildStyle elab == BuildInplaceOnly + then dist_dir "build" display s display s + else InstallDirs.bindir (elabInstallDirs elab) display s + + -- TODO: maybe move this helper to "ComponentDeps" module? + -- Or maybe define a 'Text' instance? + comp2str :: ComponentDeps.Component -> String + comp2str c = case c of + ComponentDeps.ComponentLib -> "lib" + ComponentDeps.ComponentSubLib s -> "lib:" <> display s + ComponentDeps.ComponentFLib s -> "flib:" <> display s + ComponentDeps.ComponentExe s -> "exe:" <> display s + ComponentDeps.ComponentTest s -> "test:" <> display s + ComponentDeps.ComponentBench s -> "bench:" <> display s + ComponentDeps.ComponentSetup -> "setup" + + style2str :: Bool -> BuildStyle -> String + style2str True _ = "local" + style2str False BuildInplaceOnly = "inplace" + style2str False BuildAndInstall = "global" + + jdisplay :: Text a => a -> J.Value + jdisplay = J.String . display + + +----------------------------------------------------------------------------- +-- Project status +-- + +-- So, what is the status of a project after a build? That is, how do the +-- inputs (package source files etc) compare to the output artefacts (build +-- libs, exes etc)? Do the outputs reflect the current values of the inputs +-- or are outputs out of date or invalid? +-- +-- First of all, what do we mean by out-of-date and what do we mean by +-- invalid? We think of the build system as a morally pure function that +-- computes the output artefacts given input values. We say an output artefact +-- is out of date when its value is not the value that would be computed by a +-- build given the current values of the inputs. An output artefact can be +-- out-of-date but still be perfectly usable; it simply correspond to a +-- previous state of the inputs. +-- +-- On the other hand there are cases where output artefacts cannot safely be +-- used. For example libraries and dynamically linked executables cannot be +-- used when the libs they depend on change without them being recompiled +-- themselves. Whether an artefact is still usable depends on what it is, e.g. +-- dynamically linked vs statically linked and on how it gets updated (e.g. +-- only atomically on success or if failure can leave invalid states). We need +-- a definition (or two) that is independent of the kind of artefact and can +-- be computed just in terms of changes in package graphs, but are still +-- useful for determining when particular kinds of artefacts are invalid. +-- +-- Note that when we talk about packages in this context we just mean nodes +-- in the elaborated install plan, which can be components or packages. +-- +-- There's obviously a close connection between packages being out of date and +-- their output artefacts being unusable: most of the time if a package +-- remains out of date at the end of a build then some of its output artefacts +-- will be unusable. That is true most of the time because a build will have +-- attempted to build one of the out-of-date package's dependencies. If the +-- build of the dependency succeeded then it changed output artefacts (like +-- libs) and if it failed then it may have failed after already changing +-- things (think failure after updating some but not all .hi files). +-- +-- There are a few reasons we may end up with still-usable output artefacts +-- for a package even when it remains out of date at the end of a build. +-- Firstly if executing a plan fails then packages can be skipped, and thus we +-- may have packages where all their dependencies were skipped. Secondly we +-- have artefacts like statically linked executables which are not affected by +-- libs they depend on being recompiled. Furthermore, packages can be out of +-- date due to changes in build tools or Setup.hs scripts they depend on, but +-- again libraries or executables in those out-of-date packages remain usable. +-- +-- So we have two useful definitions of invalid. Both are useful, for +-- different purposes, so we will compute both. The first corresponds to the +-- invalid libraries and dynamic executables. We say a package is invalid by +-- changed deps if any of the packages it depends on (via library dep edges) +-- were rebuilt (successfully or unsuccessfully). The second definition +-- corresponds to invalid static executables. We say a package is invalid by +-- a failed build simply if the package was built but unsuccessfully. +-- +-- So how do we find out what packages are out of date or invalid? +-- +-- Obviously we know something for all the packages that were part of the plan +-- that was executed, but that is just a subset since we prune the plan down +-- to the targets and their dependencies. +-- +-- Recall the steps we go though: +-- +-- + starting with the initial improved plan (this is the full project); +-- +-- + prune the plan to the user's build targets; +-- +-- + rebuildTargetsDryRun on the pruned plan giving us a BuildStatusMap +-- covering the pruned subset of the original plan; +-- +-- + execute the plan giving us BuildOutcomes which tell us success/failure +-- for each package. +-- +-- So given that the BuildStatusMap and BuildOutcomes do not cover everything +-- in the original plan, what can they tell us about the original plan? +-- +-- The BuildStatusMap tells us directly that some packages are up to date and +-- others out of date (but only for the pruned subset). But we know that +-- everything that is a reverse dependency of an out-of-date package is itself +-- out-of-date (whether or not it is in the pruned subset). Of course after +-- a build the BuildOutcomes may tell us that some of those out-of-date +-- packages are now up to date (ie a successful build outcome). +-- +-- The difference is packages that are reverse dependencies of out-of-date +-- packages but are not brought up-to-date by the build (i.e. did not have +-- successful outcomes, either because they failed or were not in the pruned +-- subset to be built). We also know which packages were rebuilt, so we can +-- use this to find the now-invalid packages. +-- +-- Note that there are still packages for which we cannot discover full status +-- information. There may be packages outside of the pruned plan that do not +-- depend on packages within the pruned plan that were discovered to be +-- out-of-date. For these packages we do not know if their build artefacts +-- are out-of-date or not. We do know however that they are not invalid, as +-- that's not possible given our definition of invalid. Intuitively it is +-- because we have not disturbed anything that these packages depend on, e.g. +-- we've not rebuilt any libs they depend on. Recall that our widest +-- definition of invalid was only concerned about dependencies on libraries +-- (to cover problems like shared libs or GHC seeing inconsistent .hi files). +-- +-- So our algorithm for out-of-date packages is relatively simple: take the +-- reverse dependency closure in the original improved plan (pre-pruning) of +-- the out-of-date packages (as determined by the BuildStatusMap from the dry +-- run). That gives a set of packages that were definitely out of date after +-- the dry run. Now we remove from this set the packages that the +-- BuildOutcomes tells us are now up-to-date after the build. The remaining +-- set is the out-of-date packages. +-- +-- As for packages that are invalid by changed deps, we start with the plan +-- dependency graph but keep only those edges that point to libraries (so +-- ignoring deps on exes and setup scripts). We take the packages for which a +-- build was attempted (successfully or unsuccessfully, but not counting +-- knock-on failures) and take the reverse dependency closure. We delete from +-- this set all the packages that were built successfully. Note that we do not +-- need to intersect with the out-of-date packages since this follows +-- automatically: all rev deps of packages we attempted to build must have +-- been out of date at the start of the build, and if they were not built +-- successfully then they're still out of date -- meeting our definition of +-- invalid. + + +type PackageIdSet = Set UnitId +type PackagesUpToDate = PackageIdSet + +data PostBuildProjectStatus = PostBuildProjectStatus { + + -- | Packages that are known to be up to date. These were found to be + -- up to date before the build, or they have a successful build outcome + -- afterwards. + -- + -- This does not include any packages outside of the subset of the plan + -- that was executed because we did not check those and so don't know + -- for sure that they're still up to date. + -- + packagesDefinitelyUpToDate :: PackageIdSet, + + -- | Packages that are probably still up to date (and at least not + -- known to be out of date, and certainly not invalid). This includes + -- 'packagesDefinitelyUpToDate' plus packages that were up to date + -- previously and are outside of the subset of the plan that was + -- executed. It excludes 'packagesOutOfDate'. + -- + packagesProbablyUpToDate :: PackageIdSet, + + -- | Packages that are known to be out of date. These are packages + -- that were determined to be out of date before the build, and they + -- do not have a successful build outcome afterwards. + -- + -- Note that this can sometimes include packages outside of the subset + -- of the plan that was executed. For example suppose package A and B + -- depend on C, and A is the target so only A and C are in the subset + -- to be built. Now suppose C is found to have changed, then both A + -- and B are out-of-date before the build and since B is outside the + -- subset to be built then it will remain out of date. + -- + -- Note also that this is /not/ the inverse of + -- 'packagesDefinitelyUpToDate' or 'packagesProbablyUpToDate'. + -- There are packages where we have no information (ones that were not + -- in the subset of the plan that was executed). + -- + packagesOutOfDate :: PackageIdSet, + + -- | Packages that depend on libraries that have changed during the + -- build (either build success or failure). + -- + -- This corresponds to the fact that libraries and dynamic executables + -- are invalid once any of the libs they depend on change. + -- + -- This does include packages that themselves failed (i.e. it is a + -- superset of 'packagesInvalidByFailedBuild'). It does not include + -- changes in dependencies on executables (i.e. build tools). + -- + packagesInvalidByChangedLibDeps :: PackageIdSet, + + -- | Packages that themselves failed during the build (i.e. them + -- directly not a dep). + -- + -- This corresponds to the fact that static executables are invalid + -- in unlucky circumstances such as linking failing half way though, + -- or data file generation failing. + -- + -- This is a subset of 'packagesInvalidByChangedLibDeps'. + -- + packagesInvalidByFailedBuild :: PackageIdSet, + + -- | A subset of the plan graph, including only dependency-on-library + -- edges. That is, dependencies /on/ libraries, not dependencies /of/ + -- libraries. This tells us all the libraries that packages link to. + -- + -- This is here as a convenience, as strictly speaking it's not status + -- as it's just a function of the original 'ElaboratedInstallPlan'. + -- + packagesLibDepGraph :: Graph (Node UnitId ElaboratedPlanPackage), + + -- | As a convenience for 'Set.intersection' with any of the other + -- 'PackageIdSet's to select only packages that are part of the + -- project locally (i.e. with a local source dir). + -- + packagesBuildLocal :: PackageIdSet, + + -- | As a convenience for 'Set.intersection' with any of the other + -- 'PackageIdSet's to select only packages that are being built + -- in-place within the project (i.e. not destined for the store). + -- + packagesBuildInplace :: PackageIdSet, + + -- | As a convenience for 'Set.intersection' or 'Set.difference' with + -- any of the other 'PackageIdSet's to select only packages that were + -- pre-installed or already in the store prior to the build. + -- + packagesAlreadyInStore :: PackageIdSet + } + +-- | Work out which packages are out of date or invalid after a build. +-- +postBuildProjectStatus :: ElaboratedInstallPlan + -> PackagesUpToDate + -> BuildStatusMap + -> BuildOutcomes + -> PostBuildProjectStatus +postBuildProjectStatus plan previousPackagesUpToDate + pkgBuildStatus buildOutcomes = + PostBuildProjectStatus { + packagesDefinitelyUpToDate, + packagesProbablyUpToDate, + packagesOutOfDate, + packagesInvalidByChangedLibDeps, + packagesInvalidByFailedBuild, + -- convenience stuff + packagesLibDepGraph, + packagesBuildLocal, + packagesBuildInplace, + packagesAlreadyInStore + } + where + packagesDefinitelyUpToDate = + packagesUpToDatePreBuild + `Set.union` + packagesSuccessfulPostBuild + + packagesProbablyUpToDate = + packagesDefinitelyUpToDate + `Set.union` + (previousPackagesUpToDate' `Set.difference` packagesOutOfDatePreBuild) + + packagesOutOfDate = + packagesOutOfDatePreBuild `Set.difference` packagesSuccessfulPostBuild + + packagesInvalidByChangedLibDeps = + packagesDepOnChangedLib `Set.difference` packagesSuccessfulPostBuild + + packagesInvalidByFailedBuild = + packagesFailurePostBuild + + -- Note: if any of the intermediate values below turn out to be useful in + -- their own right then we can simply promote them to the result record + + -- The previous set of up-to-date packages will contain bogus package ids + -- when the solver plan or config contributing to the hash changes. + -- So keep only the ones where the package id (i.e. hash) is the same. + previousPackagesUpToDate' = + Set.intersection + previousPackagesUpToDate + (InstallPlan.keysSet plan) + + packagesUpToDatePreBuild = + Set.filter + (\ipkgid -> not (lookupBuildStatusRequiresBuild True ipkgid)) + -- For packages not in the plan subset we did the dry-run on we don't + -- know anything about their status, so not known to be /up to date/. + (InstallPlan.keysSet plan) + + packagesOutOfDatePreBuild = + Set.fromList . map installedUnitId $ + InstallPlan.reverseDependencyClosure plan + [ ipkgid + | pkg <- InstallPlan.toList plan + , let ipkgid = installedUnitId pkg + , lookupBuildStatusRequiresBuild False ipkgid + -- For packages not in the plan subset we did the dry-run on we don't + -- know anything about their status, so not known to be /out of date/. + ] + + packagesSuccessfulPostBuild = + Set.fromList + [ ikgid | (ikgid, Right _) <- Map.toList buildOutcomes ] + + -- direct failures, not failures due to deps + packagesFailurePostBuild = + Set.fromList + [ ikgid + | (ikgid, Left failure) <- Map.toList buildOutcomes + , case buildFailureReason failure of + DependentFailed _ -> False + _ -> True + ] + + -- Packages that have a library dependency on a package for which a build + -- was attempted + packagesDepOnChangedLib = + Set.fromList . map Graph.nodeKey $ + fromMaybe (error "packagesBuildStatusAfterBuild: broken dep closure") $ + Graph.revClosure packagesLibDepGraph + ( Map.keys + . Map.filter (uncurry buildAttempted) + $ Map.intersectionWith (,) pkgBuildStatus buildOutcomes + ) + + -- The plan graph but only counting dependency-on-library edges + packagesLibDepGraph :: Graph (Node UnitId ElaboratedPlanPackage) + packagesLibDepGraph = + Graph.fromDistinctList + [ Graph.N pkg (installedUnitId pkg) libdeps + | pkg <- InstallPlan.toList plan + , let libdeps = case pkg of + InstallPlan.PreExisting ipkg -> installedDepends ipkg + InstallPlan.Configured srcpkg -> elabLibDeps srcpkg + InstallPlan.Installed srcpkg -> elabLibDeps srcpkg + ] + elabLibDeps = map (newSimpleUnitId . confInstId) . elabLibDependencies + + -- Was a build was attempted for this package? + -- If it doesn't have both a build status and outcome then the answer is no. + buildAttempted :: BuildStatus -> BuildOutcome -> Bool + -- And not if it didn't need rebuilding in the first place. + buildAttempted buildStatus _buildOutcome + | not (buildStatusRequiresBuild buildStatus) + = False + + -- And not if it was skipped due to a dep failing first. + buildAttempted _ (Left BuildFailure {buildFailureReason}) + | DependentFailed _ <- buildFailureReason + = False + + -- Otherwise, succeeded or failed, yes the build was tried. + buildAttempted _ (Left BuildFailure {}) = True + buildAttempted _ (Right _) = True + + lookupBuildStatusRequiresBuild def ipkgid = + case Map.lookup ipkgid pkgBuildStatus of + Nothing -> def -- Not in the plan subset we did the dry-run on + Just buildStatus -> buildStatusRequiresBuild buildStatus + + packagesBuildLocal = + selectPlanPackageIdSet $ \pkg -> + case pkg of + InstallPlan.PreExisting _ -> False + InstallPlan.Installed _ -> False + InstallPlan.Configured srcpkg -> elabLocalToProject srcpkg + + packagesBuildInplace = + selectPlanPackageIdSet $ \pkg -> + case pkg of + InstallPlan.PreExisting _ -> False + InstallPlan.Installed _ -> False + InstallPlan.Configured srcpkg -> elabBuildStyle srcpkg + == BuildInplaceOnly + + packagesAlreadyInStore = + selectPlanPackageIdSet $ \pkg -> + case pkg of + InstallPlan.PreExisting _ -> True + InstallPlan.Installed _ -> True + InstallPlan.Configured _ -> False + + selectPlanPackageIdSet p = Map.keysSet + . Map.filter p + $ InstallPlan.toMap plan + + + +updatePostBuildProjectStatus :: Verbosity + -> DistDirLayout + -> ElaboratedInstallPlan + -> BuildStatusMap + -> BuildOutcomes + -> IO PostBuildProjectStatus +updatePostBuildProjectStatus verbosity distDirLayout + elaboratedInstallPlan + pkgsBuildStatus buildOutcomes = do + + -- Read the previous up-to-date set, update it and write it back + previousUpToDate <- readPackagesUpToDateCacheFile distDirLayout + let currentBuildStatus@PostBuildProjectStatus{..} + = postBuildProjectStatus + elaboratedInstallPlan + previousUpToDate + pkgsBuildStatus + buildOutcomes + let currentUpToDate = packagesProbablyUpToDate + writePackagesUpToDateCacheFile distDirLayout currentUpToDate + + -- Report various possibly interesting things + -- We additionally intersect with the packagesBuildInplace so that + -- we don't show huge numbers of boring packages from the store. + debugNoWrap verbosity $ + "packages definitely up to date: " + ++ displayPackageIdSet (packagesDefinitelyUpToDate + `Set.intersection` packagesBuildInplace) + + debugNoWrap verbosity $ + "packages previously probably up to date: " + ++ displayPackageIdSet (previousUpToDate + `Set.intersection` packagesBuildInplace) + + debugNoWrap verbosity $ + "packages now probably up to date: " + ++ displayPackageIdSet (packagesProbablyUpToDate + `Set.intersection` packagesBuildInplace) + + debugNoWrap verbosity $ + "packages newly up to date: " + ++ displayPackageIdSet (packagesDefinitelyUpToDate + `Set.difference` previousUpToDate + `Set.intersection` packagesBuildInplace) + + debugNoWrap verbosity $ + "packages out to date: " + ++ displayPackageIdSet (packagesOutOfDate + `Set.intersection` packagesBuildInplace) + + debugNoWrap verbosity $ + "packages invalid due to dep change: " + ++ displayPackageIdSet packagesInvalidByChangedLibDeps + + debugNoWrap verbosity $ + "packages invalid due to build failure: " + ++ displayPackageIdSet packagesInvalidByFailedBuild + + return currentBuildStatus + where + displayPackageIdSet = intercalate ", " . map display . Set.toList + +-- | Helper for reading the cache file. +-- +-- This determines the type and format of the binary cache file. +-- +readPackagesUpToDateCacheFile :: DistDirLayout -> IO PackagesUpToDate +readPackagesUpToDateCacheFile DistDirLayout{distProjectCacheFile} = + handleDoesNotExist Set.empty $ + handleDecodeFailure $ + withBinaryFile (distProjectCacheFile "up-to-date") ReadMode $ \hnd -> + Binary.decodeOrFailIO =<< BS.hGetContents hnd + where + handleDecodeFailure = fmap (either (const Set.empty) id) + +-- | Helper for writing the package up-to-date cache file. +-- +-- This determines the type and format of the binary cache file. +-- +writePackagesUpToDateCacheFile :: DistDirLayout -> PackagesUpToDate -> IO () +writePackagesUpToDateCacheFile DistDirLayout{distProjectCacheFile} upToDate = + writeFileAtomic (distProjectCacheFile "up-to-date") $ + Binary.encode upToDate + +-- | Prepare a package environment that includes all the library dependencies +-- for a plan. +-- +-- When running cabal new-exec, we want to set things up so that the compiler +-- can find all the right packages (and nothing else). This function is +-- intended to do that work. It takes a location where it can write files +-- temporarily, in case the compiler wants to learn this information via the +-- filesystem, and returns any environment variable overrides the compiler +-- needs. +createPackageEnvironment :: Verbosity + -> FilePath + -> ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> PostBuildProjectStatus + -> IO [(String, Maybe String)] +createPackageEnvironment verbosity + path + elaboratedPlan + elaboratedShared + buildStatus + | compilerFlavor (pkgConfigCompiler elaboratedShared) == GHC + = do + envFileM <- writePlanGhcEnvironment + path + elaboratedPlan + elaboratedShared + buildStatus + case envFileM of + Just envFile -> return [("GHC_ENVIRONMENT", Just envFile)] + Nothing -> do + warn verbosity "the configured version of GHC does not support reading package lists from the environment; commands that need the current project's package database are likely to fail" + return [] + | otherwise + = do + warn verbosity "package environment configuration is not supported for the currently configured compiler; commands that need the current project's package database are likely to fail" + return [] + +-- Writing .ghc.environment files +-- + +writePlanGhcEnvironment :: FilePath + -> ElaboratedInstallPlan + -> ElaboratedSharedConfig + -> PostBuildProjectStatus + -> IO (Maybe FilePath) +writePlanGhcEnvironment path + elaboratedInstallPlan + ElaboratedSharedConfig { + pkgConfigCompiler = compiler, + pkgConfigPlatform = platform + } + postBuildStatus + | compilerFlavor compiler == GHC + , supportsPkgEnvFiles (getImplInfo compiler) + --TODO: check ghcjs compat + = fmap Just $ writeGhcEnvironmentFile + path + platform (compilerVersion compiler) + (renderGhcEnvironmentFile path + elaboratedInstallPlan + postBuildStatus) + --TODO: [required eventually] support for writing user-wide package + -- environments, e.g. like a global project, but we would not put the + -- env file in the home dir, rather it lives under ~/.ghc/ + +writePlanGhcEnvironment _ _ _ _ = return Nothing + +renderGhcEnvironmentFile :: FilePath + -> ElaboratedInstallPlan + -> PostBuildProjectStatus + -> [GhcEnvironmentFileEntry] +renderGhcEnvironmentFile projectRootDir elaboratedInstallPlan + postBuildStatus = + headerComment + : simpleGhcEnvironmentFile packageDBs unitIds + where + headerComment = + GhcEnvFileComment + $ "This is a GHC environment file written by cabal. This means you can\n" + ++ "run ghc or ghci and get the environment of the project as a whole.\n" + ++ "But you still need to use cabal repl $target to get the environment\n" + ++ "of specific components (libs, exes, tests etc) because each one can\n" + ++ "have its own source dirs, cpp flags etc.\n\n" + unitIds = selectGhcEnvironmentFileLibraries postBuildStatus + packageDBs = relativePackageDBPaths projectRootDir $ + selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan + + +argsEquivalentOfGhcEnvironmentFile + :: Compiler + -> DistDirLayout + -> ElaboratedInstallPlan + -> PostBuildProjectStatus + -> [String] +argsEquivalentOfGhcEnvironmentFile compiler = + case compilerId compiler + of CompilerId GHC _ -> argsEquivalentOfGhcEnvironmentFileGhc + CompilerId GHCJS _ -> argsEquivalentOfGhcEnvironmentFileGhc + CompilerId _ _ -> error "Only GHC and GHCJS are supported" + +-- TODO remove this when we drop support for non-.ghc.env ghc +argsEquivalentOfGhcEnvironmentFileGhc + :: DistDirLayout + -> ElaboratedInstallPlan + -> PostBuildProjectStatus + -> [String] +argsEquivalentOfGhcEnvironmentFileGhc + distDirLayout + elaboratedInstallPlan + postBuildStatus = + clearPackageDbStackFlag + ++ packageDbArgsDb packageDBs + ++ foldMap packageIdFlag packageIds + where + projectRootDir = distProjectRootDirectory distDirLayout + packageIds = selectGhcEnvironmentFileLibraries postBuildStatus + packageDBs = relativePackageDBPaths projectRootDir $ + selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan + -- TODO use proper flags? but packageDbArgsDb is private + clearPackageDbStackFlag = ["-clear-package-db", "-global-package-db"] + packageIdFlag uid = ["-package-id", display uid] + + +-- We're producing an environment for users to use in ghci, so of course +-- that means libraries only (can't put exes into the ghc package env!). +-- The library environment should be /consistent/ with the environment +-- that each of the packages in the project use (ie same lib versions). +-- So that means all the normal library dependencies of all the things +-- in the project (including deps of exes that are local to the project). +-- We do not however want to include the dependencies of Setup.hs scripts, +-- since these are generally uninteresting but also they need not in +-- general be consistent with the library versions that packages local to +-- the project use (recall that Setup.hs script's deps can be picked +-- independently of other packages in the project). +-- +-- So, our strategy is as follows: +-- +-- produce a dependency graph of all the packages in the install plan, +-- but only consider normal library deps as edges in the graph. Thus we +-- exclude the dependencies on Setup.hs scripts (in the case of +-- per-component granularity) or of Setup.hs scripts (in the case of +-- per-package granularity). Then take a dependency closure, using as +-- roots all the packages/components local to the project. This will +-- exclude Setup scripts and their dependencies. +-- +-- Note: this algorithm will have to be adapted if/when the install plan +-- is extended to cover multiple compilers at once, and may also have to +-- change if we start to treat unshared deps of test suites in a similar +-- way to how we treat Setup.hs script deps (ie being able to pick them +-- independently). +-- +-- Since we had to use all the local packages, including exes, (as roots +-- to find the libs) then those exes still end up in our list so we have +-- to filter them out at the end. +-- +selectGhcEnvironmentFileLibraries :: PostBuildProjectStatus -> [UnitId] +selectGhcEnvironmentFileLibraries PostBuildProjectStatus{..} = + case Graph.closure packagesLibDepGraph (Set.toList packagesBuildLocal) of + Nothing -> error "renderGhcEnvironmentFile: broken dep closure" + Just nodes -> [ pkgid | Graph.N pkg pkgid _ <- nodes + , hasUpToDateLib pkg ] + where + hasUpToDateLib planpkg = case planpkg of + -- A pre-existing global lib + InstallPlan.PreExisting _ -> True + + -- A package in the store. Check it's a lib. + InstallPlan.Installed pkg -> elabRequiresRegistration pkg + + -- A package we were installing this time, either destined for the store + -- or just locally. Check it's a lib and that it is probably up to date. + InstallPlan.Configured pkg -> + elabRequiresRegistration pkg + && installedUnitId pkg `Set.member` packagesProbablyUpToDate + + +selectGhcEnvironmentFilePackageDbs :: ElaboratedInstallPlan -> PackageDBStack +selectGhcEnvironmentFilePackageDbs elaboratedInstallPlan = + -- If we have any inplace packages then their package db stack is the + -- one we should use since it'll include the store + the local db but + -- it's certainly possible to have no local inplace packages + -- e.g. just "extra" packages coming from the store. + case (inplacePackages, sourcePackages) of + ([], pkgs) -> checkSamePackageDBs pkgs + (pkgs, _) -> checkSamePackageDBs pkgs + where + checkSamePackageDBs pkgs = + case ordNub (map elabBuildPackageDBStack pkgs) of + [packageDbs] -> packageDbs + [] -> [] + _ -> error $ "renderGhcEnvironmentFile: packages with " + ++ "different package db stacks" + -- This should not happen at the moment but will happen as soon + -- as we support projects where we build packages with different + -- compilers, at which point we have to consider how to adapt + -- this feature, e.g. write out multiple env files, one for each + -- compiler / project profile. + + inplacePackages = + [ srcpkg + | srcpkg <- sourcePackages + , elabBuildStyle srcpkg == BuildInplaceOnly ] + sourcePackages = + [ srcpkg + | pkg <- InstallPlan.toList elaboratedInstallPlan + , srcpkg <- maybeToList $ case pkg of + InstallPlan.Configured srcpkg -> Just srcpkg + InstallPlan.Installed srcpkg -> Just srcpkg + InstallPlan.PreExisting _ -> Nothing + ] + +relativePackageDBPaths :: FilePath -> PackageDBStack -> PackageDBStack +relativePackageDBPaths relroot = map (relativePackageDBPath relroot) + +relativePackageDBPath :: FilePath -> PackageDB -> PackageDB +relativePackageDBPath relroot pkgdb = + case pkgdb of + GlobalPackageDB -> GlobalPackageDB + UserPackageDB -> UserPackageDB + SpecificPackageDB path -> SpecificPackageDB relpath + where relpath = makeRelative relroot path diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/RebuildMonad.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/RebuildMonad.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/RebuildMonad.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/RebuildMonad.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,311 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving, ScopedTypeVariables, BangPatterns #-} + +-- | An abstraction for re-running actions if values or files have changed. +-- +-- This is not a full-blown make-style incremental build system, it's a bit +-- more ad-hoc than that, but it's easier to integrate with existing code. +-- +-- It's a convenient interface to the "Distribution.Client.FileMonitor" +-- functions. +-- +module Distribution.Client.RebuildMonad ( + -- * Rebuild monad + Rebuild, + runRebuild, + execRebuild, + askRoot, + + -- * Setting up file monitoring + monitorFiles, + MonitorFilePath, + monitorFile, + monitorFileHashed, + monitorNonExistentFile, + monitorDirectory, + monitorNonExistentDirectory, + monitorDirectoryExistence, + monitorFileOrDirectory, + monitorFileSearchPath, + monitorFileHashedSearchPath, + -- ** Monitoring file globs + monitorFileGlob, + monitorFileGlobExistence, + FilePathGlob(..), + FilePathRoot(..), + FilePathGlobRel(..), + GlobPiece(..), + + -- * Using a file monitor + FileMonitor(..), + newFileMonitor, + rerunIfChanged, + + -- * Utils + delayInitSharedResource, + delayInitSharedResources, + matchFileGlob, + getDirectoryContentsMonitored, + createDirectoryMonitored, + monitorDirectoryStatus, + doesFileExistMonitored, + need, + needIfExists, + findFileWithExtensionMonitored, + findFirstFileMonitored, + findFileMonitored, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.FileMonitor +import Distribution.Client.Glob hiding (matchFileGlob) +import qualified Distribution.Client.Glob as Glob (matchFileGlob) + +import Distribution.Simple.Utils (debug) +import Distribution.Verbosity (Verbosity) + +import qualified Data.Map.Strict as Map +import Control.Monad.State as State +import Control.Monad.Reader as Reader +import Control.Concurrent.MVar (MVar, newMVar, modifyMVar) +import System.FilePath +import System.Directory + + +-- | A monad layered on top of 'IO' to help with re-running actions when the +-- input files and values they depend on change. The crucial operations are +-- 'rerunIfChanged' and 'monitorFiles'. +-- +newtype Rebuild a = Rebuild (ReaderT FilePath (StateT [MonitorFilePath] IO) a) + deriving (Functor, Applicative, Monad, MonadIO) + +-- | Use this wihin the body action of 'rerunIfChanged' to declare that the +-- action depends on the given files. This can be based on what the action +-- actually did. It is these files that will be checked for changes next +-- time 'rerunIfChanged' is called for that 'FileMonitor'. +-- +-- Relative paths are interpreted as relative to an implicit root, ultimately +-- passed in to 'runRebuild'. +-- +monitorFiles :: [MonitorFilePath] -> Rebuild () +monitorFiles filespecs = Rebuild (State.modify (filespecs++)) + +-- | Run a 'Rebuild' IO action. +unRebuild :: FilePath -> Rebuild a -> IO (a, [MonitorFilePath]) +unRebuild rootDir (Rebuild action) = runStateT (runReaderT action rootDir) [] + +-- | Run a 'Rebuild' IO action. +runRebuild :: FilePath -> Rebuild a -> IO a +runRebuild rootDir (Rebuild action) = evalStateT (runReaderT action rootDir) [] + +-- | Run a 'Rebuild' IO action. +execRebuild :: FilePath -> Rebuild a -> IO [MonitorFilePath] +execRebuild rootDir (Rebuild action) = execStateT (runReaderT action rootDir) [] + +-- | The root that relative paths are interpreted as being relative to. +askRoot :: Rebuild FilePath +askRoot = Rebuild Reader.ask + +-- | This captures the standard use pattern for a 'FileMonitor': given a +-- monitor, an action and the input value the action depends on, either +-- re-run the action to get its output, or if the value and files the action +-- depends on have not changed then return a previously cached action result. +-- +-- The result is still in the 'Rebuild' monad, so these can be nested. +-- +-- Do not share 'FileMonitor's between different uses of 'rerunIfChanged'. +-- +rerunIfChanged :: (Binary a, Binary b) + => Verbosity + -> FileMonitor a b + -> a + -> Rebuild b + -> Rebuild b +rerunIfChanged verbosity monitor key action = do + rootDir <- askRoot + changed <- liftIO $ checkFileMonitorChanged monitor rootDir key + case changed of + MonitorUnchanged result files -> do + liftIO $ debug verbosity $ "File monitor '" ++ monitorName + ++ "' unchanged." + monitorFiles files + return result + + MonitorChanged reason -> do + liftIO $ debug verbosity $ "File monitor '" ++ monitorName + ++ "' changed: " ++ showReason reason + startTime <- liftIO $ beginUpdateFileMonitor + (result, files) <- liftIO $ unRebuild rootDir action + liftIO $ updateFileMonitor monitor rootDir + (Just startTime) files key result + monitorFiles files + return result + where + monitorName = takeFileName (fileMonitorCacheFile monitor) + + showReason (MonitoredFileChanged file) = "file " ++ file + showReason (MonitoredValueChanged _) = "monitor value changed" + showReason MonitorFirstRun = "first run" + showReason MonitorCorruptCache = "invalid cache file" + + +-- | When using 'rerunIfChanged' for each element of a list of actions, it is +-- sometimes the case that each action needs to make use of some resource. e.g. +-- +-- > sequence +-- > [ rerunIfChanged verbosity monitor key $ do +-- > resource <- mkResource +-- > ... -- use the resource +-- > | ... ] +-- +-- For efficiency one would like to share the resource between the actions +-- but the straightforward way of doing this means initialising it every time +-- even when no actions need re-running. +-- +-- > resource <- mkResource +-- > sequence +-- > [ rerunIfChanged verbosity monitor key $ do +-- > ... -- use the resource +-- > | ... ] +-- +-- This utility allows one to get the best of both worlds: +-- +-- > getResource <- delayInitSharedResource mkResource +-- > sequence +-- > [ rerunIfChanged verbosity monitor key $ do +-- > resource <- getResource +-- > ... -- use the resource +-- > | ... ] +-- +delayInitSharedResource :: forall a. IO a -> Rebuild (Rebuild a) +delayInitSharedResource action = do + var <- liftIO (newMVar Nothing) + return (liftIO (getOrInitResource var)) + where + getOrInitResource :: MVar (Maybe a) -> IO a + getOrInitResource var = + modifyMVar var $ \mx -> + case mx of + Just x -> return (Just x, x) + Nothing -> do + x <- action + return (Just x, x) + + +-- | Much like 'delayInitSharedResource' but for a keyed set of resources. +-- +-- > getResource <- delayInitSharedResource mkResource +-- > sequence +-- > [ rerunIfChanged verbosity monitor key $ do +-- > resource <- getResource key +-- > ... -- use the resource +-- > | ... ] +-- +delayInitSharedResources :: forall k v. Ord k + => (k -> IO v) + -> Rebuild (k -> Rebuild v) +delayInitSharedResources action = do + var <- liftIO (newMVar Map.empty) + return (liftIO . getOrInitResource var) + where + getOrInitResource :: MVar (Map k v) -> k -> IO v + getOrInitResource var k = + modifyMVar var $ \m -> + case Map.lookup k m of + Just x -> return (m, x) + Nothing -> do + x <- action k + let !m' = Map.insert k x m + return (m', x) + + +-- | Utility to match a file glob against the file system, starting from a +-- given root directory. The results are all relative to the given root. +-- +-- Since this operates in the 'Rebuild' monad, it also monitors the given glob +-- for changes. +-- +matchFileGlob :: FilePathGlob -> Rebuild [FilePath] +matchFileGlob glob = do + root <- askRoot + monitorFiles [monitorFileGlobExistence glob] + liftIO $ Glob.matchFileGlob root glob + +getDirectoryContentsMonitored :: FilePath -> Rebuild [FilePath] +getDirectoryContentsMonitored dir = do + exists <- monitorDirectoryStatus dir + if exists + then liftIO $ getDirectoryContents dir + else return [] + +createDirectoryMonitored :: Bool -> FilePath -> Rebuild () +createDirectoryMonitored createParents dir = do + monitorFiles [monitorDirectoryExistence dir] + liftIO $ createDirectoryIfMissing createParents dir + +-- | Monitor a directory as in 'monitorDirectory' if it currently exists or +-- as 'monitorNonExistentDirectory' if it does not. +monitorDirectoryStatus :: FilePath -> Rebuild Bool +monitorDirectoryStatus dir = do + exists <- liftIO $ doesDirectoryExist dir + monitorFiles [if exists + then monitorDirectory dir + else monitorNonExistentDirectory dir] + return exists + +-- | Like 'doesFileExist', but in the 'Rebuild' monad. This does +-- NOT track the contents of 'FilePath'; use 'need' in that case. +doesFileExistMonitored :: FilePath -> Rebuild Bool +doesFileExistMonitored f = do + root <- askRoot + exists <- liftIO $ doesFileExist (root f) + monitorFiles [if exists + then monitorFileExistence f + else monitorNonExistentFile f] + return exists + +-- | Monitor a single file +need :: FilePath -> Rebuild () +need f = monitorFiles [monitorFileHashed f] + +-- | Monitor a file if it exists; otherwise check for when it +-- gets created. This is a bit better for recompilation avoidance +-- because sometimes users give bad package metadata, and we don't +-- want to repeatedly rebuild in this case (which we would if we +-- need'ed a non-existent file). +needIfExists :: FilePath -> Rebuild () +needIfExists f = do + root <- askRoot + exists <- liftIO $ doesFileExist (root f) + monitorFiles [if exists + then monitorFileHashed f + else monitorNonExistentFile f] + +-- | Like 'findFileWithExtension', but in the 'Rebuild' monad. +findFileWithExtensionMonitored + :: [String] + -> [FilePath] + -> FilePath + -> Rebuild (Maybe FilePath) +findFileWithExtensionMonitored extensions searchPath baseName = + findFirstFileMonitored id + [ path baseName <.> ext + | path <- nub searchPath + , ext <- nub extensions ] + +-- | Like 'findFirstFile', but in the 'Rebuild' monad. +findFirstFileMonitored :: (a -> FilePath) -> [a] -> Rebuild (Maybe a) +findFirstFileMonitored file = findFirst + where findFirst [] = return Nothing + findFirst (x:xs) = do exists <- doesFileExistMonitored (file x) + if exists + then return (Just x) + else findFirst xs + +-- | Like 'findFile', but in the 'Rebuild' monad. +findFileMonitored :: [FilePath] -> FilePath -> Rebuild (Maybe FilePath) +findFileMonitored searchPath fileName = + findFirstFileMonitored id + [ path fileName + | path <- nub searchPath] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Reconfigure.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Reconfigure.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Reconfigure.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Reconfigure.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,233 @@ +module Distribution.Client.Reconfigure ( Check(..), reconfigure ) where + +import Distribution.Client.Compat.Prelude + +import Data.Monoid ( Any(..) ) +import System.Directory ( doesFileExist ) + +import Distribution.Verbosity + +import Distribution.Simple.Configure ( localBuildInfoFile ) +import Distribution.Simple.Setup ( Flag, flagToMaybe, toFlag ) +import Distribution.Simple.Utils + ( existsAndIsMoreRecentThan, defaultPackageDesc, info ) + +import Distribution.Client.Config ( SavedConfig(..) ) +import Distribution.Client.Configure ( readConfigFlags ) +import Distribution.Client.Nix ( findNixExpr, inNixShell, nixInstantiate ) +import Distribution.Client.Sandbox + ( WereDepsReinstalled(..), findSavedDistPref, getSandboxConfigFilePath + , maybeReinstallAddSourceDeps, updateInstallDirs ) +import Distribution.Client.Sandbox.PackageEnvironment + ( userPackageEnvironmentFile ) +import Distribution.Client.Sandbox.Types ( UseSandbox(..) ) +import Distribution.Client.Setup + ( ConfigFlags(..), ConfigExFlags, GlobalFlags(..) + , SkipAddSourceDepsCheck(..) ) + + +-- | @Check@ represents a function to check some condition on type @a@. The +-- returned 'Any' is 'True' if any part of the condition failed. +newtype Check a = Check { + runCheck :: Any -- Did any previous check fail? + -> a -- value returned by previous checks + -> IO (Any, a) -- Did this check fail? What value is returned? +} + +instance Semigroup (Check a) where + (<>) c d = Check $ \any0 a0 -> do + (any1, a1) <- runCheck c any0 a0 + (any2, a2) <- runCheck d (any0 <> any1) a1 + return (any0 <> any1 <> any2, a2) + +instance Monoid (Check a) where + mempty = Check $ \_ a -> return (mempty, a) + mappend = (<>) + + +-- | Re-configure the package in the current directory if needed. Deciding +-- when to reconfigure and with which options is convoluted: +-- +-- If we are reconfiguring, we must always run @configure@ with the +-- verbosity option we are given; however, that a previous configuration +-- uses a different verbosity setting is not reason enough to reconfigure. +-- +-- The package should be configured to use the same \"dist\" prefix as +-- given to the @build@ command, otherwise the build will probably +-- fail. Not only does this determine the \"dist\" prefix setting if we +-- need to reconfigure anyway, but an existing configuration should be +-- invalidated if its \"dist\" prefix differs. +-- +-- If the package has never been configured (i.e., there is no +-- LocalBuildInfo), we must configure first, using the default options. +-- +-- If the package has been configured, there will be a 'LocalBuildInfo'. +-- If there no package description file, we assume that the +-- 'PackageDescription' is up to date, though the configuration may need +-- to be updated for other reasons (see above). If there is a package +-- description file, and it has been modified since the 'LocalBuildInfo' +-- was generated, then we need to reconfigure. +-- +-- The caller of this function may also have specific requirements +-- regarding the flags the last configuration used. For example, +-- 'testAction' requires that the package be configured with test suites +-- enabled. The caller may pass the required settings to this function +-- along with a function to check the validity of the saved 'ConfigFlags'; +-- these required settings will be checked first upon determining that +-- a previous configuration exists. +reconfigure + :: ((ConfigFlags, ConfigExFlags) -> [String] -> GlobalFlags -> IO ()) + -- ^ configure action + -> Verbosity + -- ^ Verbosity setting + -> FilePath + -- ^ \"dist\" prefix + -> UseSandbox + -> SkipAddSourceDepsCheck + -- ^ Should we skip the timestamp check for modified + -- add-source dependencies? + -> Flag (Maybe Int) + -- ^ -j flag for reinstalling add-source deps. + -> Check (ConfigFlags, ConfigExFlags) + -- ^ Check that the required flags are set. + -- If they are not set, provide a message explaining the + -- reason for reconfiguration. + -> [String] -- ^ Extra arguments + -> GlobalFlags -- ^ Global flags + -> SavedConfig + -> IO SavedConfig +reconfigure + configureAction + verbosity + dist + useSandbox + skipAddSourceDepsCheck + numJobsFlag + check + extraArgs + globalFlags + config + = do + + savedFlags@(_, _) <- readConfigFlags dist + + useNix <- fmap isJust (findNixExpr globalFlags config) + alreadyInNixShell <- inNixShell + + if useNix && not alreadyInNixShell + then do + + -- If we are using Nix, we must reinstantiate the derivation outside + -- the shell. Eventually, the caller will invoke 'nixShell' which will + -- rerun cabal inside the shell. That will bring us back to 'reconfigure', + -- but inside the shell we'll take the second branch, below. + + -- This seems to have a problem: won't 'configureAction' call 'nixShell' + -- yet again, spawning an infinite tree of subprocesses? + -- No, because 'nixShell' doesn't spawn a new process if it is already + -- running in a Nix shell. + + nixInstantiate verbosity dist False globalFlags config + return config + + else do + + let checks = + checkVerb + <> checkDist + <> checkOutdated + <> check + <> checkAddSourceDeps + (Any force, flags@(configFlags, _)) <- runCheck checks mempty savedFlags + + let (_, config') = + updateInstallDirs + (configUserInstall configFlags) + (useSandbox, config) + + when force $ configureAction flags extraArgs globalFlags + return config' + + where + + -- Changing the verbosity does not require reconfiguration, but the new + -- verbosity should be used if reconfiguring. + checkVerb = Check $ \_ (configFlags, configExFlags) -> do + let configFlags' = configFlags { configVerbosity = toFlag verbosity} + return (mempty, (configFlags', configExFlags)) + + -- Reconfiguration is required if @--build-dir@ changes. + checkDist = Check $ \_ (configFlags, configExFlags) -> do + -- Always set the chosen @--build-dir@ before saving the flags, + -- or bad things could happen. + savedDist <- findSavedDistPref config (configDistPref configFlags) + let distChanged = dist /= savedDist + when distChanged $ info verbosity "build directory changed" + let configFlags' = configFlags { configDistPref = toFlag dist } + return (Any distChanged, (configFlags', configExFlags)) + + checkOutdated = Check $ \_ flags@(configFlags, _) -> do + let buildConfig = localBuildInfoFile dist + + -- Has the package ever been configured? If not, reconfiguration is + -- required. + configured <- doesFileExist buildConfig + unless configured $ info verbosity "package has never been configured" + + -- Is the configuration older than the sandbox configuration file? + -- If so, reconfiguration is required. + sandboxConfig <- getSandboxConfigFilePath globalFlags + sandboxConfigNewer <- existsAndIsMoreRecentThan sandboxConfig buildConfig + when sandboxConfigNewer $ + info verbosity "sandbox was created after the package was configured" + + -- Is the @cabal.config@ file newer than @dist/setup.config@? Then we need + -- to force reconfigure. Note that it's possible to use @cabal.config@ + -- even without sandboxes. + userPackageEnvironmentFileModified <- + existsAndIsMoreRecentThan userPackageEnvironmentFile buildConfig + when userPackageEnvironmentFileModified $ + info verbosity ("user package environment file ('" + ++ userPackageEnvironmentFile ++ "') was modified") + + -- Is the configuration older than the package description? + descrFile <- maybe (defaultPackageDesc verbosity) return + (flagToMaybe (configCabalFilePath configFlags)) + outdated <- existsAndIsMoreRecentThan descrFile buildConfig + when outdated $ info verbosity (descrFile ++ " was changed") + + let failed = + Any outdated + <> Any userPackageEnvironmentFileModified + <> Any sandboxConfigNewer + <> Any (not configured) + return (failed, flags) + + checkAddSourceDeps = Check $ \(Any force') flags@(configFlags, _) -> do + let (_, config') = + updateInstallDirs + (configUserInstall configFlags) + (useSandbox, config) + + skipAddSourceDepsCheck' + | force' = SkipAddSourceDepsCheck + | otherwise = skipAddSourceDepsCheck + + when (skipAddSourceDepsCheck' == SkipAddSourceDepsCheck) $ + info verbosity "skipping add-source deps check" + + -- Were any add-source dependencies reinstalled in the sandbox? + depsReinstalled <- + case skipAddSourceDepsCheck' of + DontSkipAddSourceDepsCheck -> + maybeReinstallAddSourceDeps + verbosity numJobsFlag configFlags globalFlags + (useSandbox, config') + SkipAddSourceDepsCheck -> do + return NoDepsReinstalled + + case depsReinstalled of + NoDepsReinstalled -> return (mempty, flags) + ReinstalledSomeDeps -> do + info verbosity "some add-source dependencies were reinstalled" + return (Any True, flags) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Run.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Run.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Run.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Run.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,143 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Run +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Implementation of the 'run' command. +----------------------------------------------------------------------------- + +module Distribution.Client.Run ( run, splitRunArgs ) + where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Types.TargetInfo (targetCLBI) +import Distribution.Types.LocalBuildInfo (componentNameTargets') + +import Distribution.Client.Utils (tryCanonicalizePath) + +import Distribution.Types.UnqualComponentName +import Distribution.PackageDescription (Executable (..), + TestSuite(..), + Benchmark(..), + PackageDescription (..), + BuildInfo(buildable)) +import Distribution.Simple.Compiler (compilerFlavor, CompilerFlavor(..)) +import Distribution.Simple.Build.PathsModule (pkgPathEnvVar) +import Distribution.Simple.BuildPaths (exeExtension) +import Distribution.Simple.LocalBuildInfo (ComponentName (..), + LocalBuildInfo (..), + depLibraryPaths) +import Distribution.Simple.Utils (die', notice, warn, + rawSystemExitWithEnv, + addLibraryPath) +import Distribution.System (Platform (..)) +import Distribution.Verbosity (Verbosity) +import Distribution.Text (display) + +import qualified Distribution.Simple.GHCJS as GHCJS + +import System.Directory (getCurrentDirectory) +import Distribution.Compat.Environment (getEnvironment) +import System.FilePath ((<.>), ()) + + +-- | Return the executable to run and any extra arguments that should be +-- forwarded to it. Die in case of error. +splitRunArgs :: Verbosity -> LocalBuildInfo -> [String] + -> IO (Executable, [String]) +splitRunArgs verbosity lbi args = + case whichExecutable of -- Either err (wasManuallyChosen, exe, paramsRest) + Left err -> do + warn verbosity `traverse_` maybeWarning -- If there is a warning, print it. + die' verbosity err + Right (True, exe, xs) -> return (exe, xs) + Right (False, exe, xs) -> do + let addition = " Interpreting all parameters to `run` as a parameter to" + ++ " the default executable." + -- If there is a warning, print it together with the addition. + warn verbosity `traverse_` fmap (++addition) maybeWarning + return (exe, xs) + where + pkg_descr = localPkgDescr lbi + whichExecutable :: Either String -- Error string. + ( Bool -- If it was manually chosen. + , Executable -- The executable. + , [String] -- The remaining parameters. + ) + whichExecutable = case (enabledExes, args) of + ([] , _) -> Left "Couldn't find any enabled executables." + ([exe], []) -> return (False, exe, []) + ([exe], (x:xs)) + | x == unUnqualComponentName (exeName exe) -> return (True, exe, xs) + | otherwise -> return (False, exe, args) + (_ , []) -> Left + $ "This package contains multiple executables. " + ++ "You must pass the executable name as the first argument " + ++ "to 'cabal run'." + (_ , (x:xs)) -> + case find (\exe -> unUnqualComponentName (exeName exe) == x) enabledExes of + Nothing -> Left $ "No executable named '" ++ x ++ "'." + Just exe -> return (True, exe, xs) + where + enabledExes = filter (buildable . buildInfo) (executables pkg_descr) + + maybeWarning :: Maybe String + maybeWarning = case args of + [] -> Nothing + (x:_) -> lookup (mkUnqualComponentName x) components + where + components :: [(UnqualComponentName, String)] -- Component name, message. + components = + [ (name, "The executable '" ++ display name ++ "' is disabled.") + | e <- executables pkg_descr + , not . buildable . buildInfo $ e, let name = exeName e] + + ++ [ (name, "There is a test-suite '" ++ display name ++ "'," + ++ " but the `run` command is only for executables.") + | t <- testSuites pkg_descr + , let name = testName t] + + ++ [ (name, "There is a benchmark '" ++ display name ++ "'," + ++ " but the `run` command is only for executables.") + | b <- benchmarks pkg_descr + , let name = benchmarkName b] + +-- | Run a given executable. +run :: Verbosity -> LocalBuildInfo -> Executable -> [String] -> IO () +run verbosity lbi exe exeArgs = do + curDir <- getCurrentDirectory + let buildPref = buildDir lbi + pkg_descr = localPkgDescr lbi + dataDirEnvVar = (pkgPathEnvVar pkg_descr "datadir", + curDir dataDir pkg_descr) + + (path, runArgs) <- + let exeName' = display $ exeName exe + in case compilerFlavor (compiler lbi) of + GHCJS -> do + let (script, cmd, cmdArgs) = + GHCJS.runCmd (withPrograms lbi) + (buildPref exeName' exeName') + script' <- tryCanonicalizePath script + return (cmd, cmdArgs ++ [script']) + _ -> do + p <- tryCanonicalizePath $ + buildPref exeName' (exeName' <.> exeExtension (hostPlatform lbi)) + return (p, []) + + env <- (dataDirEnvVar:) <$> getEnvironment + -- Add (DY)LD_LIBRARY_PATH if needed + env' <- if withDynExe lbi + then do let (Platform _ os) = hostPlatform lbi + clbi <- case componentNameTargets' pkg_descr lbi (CExeName (exeName exe)) of + [target] -> return (targetCLBI target) + [] -> die' verbosity "run: Could not find executable in LocalBuildInfo" + _ -> die' verbosity "run: Found multiple matching exes in LocalBuildInfo" + paths <- depLibraryPaths True False lbi clbi + return (addLibraryPath os paths env) + else return env + notice verbosity $ "Running " ++ display (exeName exe) ++ "..." + rawSystemExitWithEnv verbosity path (runArgs++exeArgs) env' diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Index.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Index.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Index.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Index.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,285 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox.Index +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Querying and modifying local build tree references in the package index. +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox.Index ( + createEmpty, + addBuildTreeRefs, + removeBuildTreeRefs, + ListIgnoredBuildTreeRefs(..), RefTypesToList(..), + DeleteSourceError(..), + listBuildTreeRefs, + validateIndexPath, + + defaultIndexFileName + ) where + +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Codec.Archive.Tar.Index as Tar +import qualified Distribution.Client.Tar as Tar +import Distribution.Client.IndexUtils ( BuildTreeRefType(..) + , refTypeFromTypeCode + , typeCodeFromRefType + , updatePackageIndexCacheFile + , readCacheStrict + , Index(..) ) +import qualified Distribution.Client.IndexUtils as IndexUtils +import Distribution.Client.Utils ( byteStringToFilePath, filePathToByteString + , makeAbsoluteToCwd, tryCanonicalizePath + , tryFindAddSourcePackageDesc ) + +import Distribution.Simple.Utils ( die', debug ) +import Distribution.Compat.Exception ( tryIO ) +import Distribution.Verbosity ( Verbosity ) + +import qualified Data.ByteString.Lazy as BS +import Control.DeepSeq ( NFData(rnf) ) +import Control.Exception ( evaluate, throw, Exception ) +import Control.Monad ( liftM, unless ) +import Control.Monad.Writer.Lazy (WriterT(..), runWriterT, tell) +import Data.List ( (\\), intersect, nub, find ) +import Data.Maybe ( catMaybes ) +import Data.Either (partitionEithers) +import System.Directory ( createDirectoryIfMissing, + doesDirectoryExist, doesFileExist, + renameFile, canonicalizePath) +import System.FilePath ( (), (<.>), takeDirectory, takeExtension ) +import System.IO ( IOMode(..), withBinaryFile ) + +-- | A reference to a local build tree. +data BuildTreeRef = BuildTreeRef { + buildTreeRefType :: !BuildTreeRefType, + buildTreePath :: !FilePath + } + +instance NFData BuildTreeRef where + rnf (BuildTreeRef _ fp) = rnf fp + +defaultIndexFileName :: FilePath +defaultIndexFileName = "00-index.tar" + +-- | Given a path, ensure that it refers to a local build tree. +buildTreeRefFromPath :: Verbosity -> BuildTreeRefType -> FilePath -> IO (Maybe BuildTreeRef) +buildTreeRefFromPath verbosity refType dir = do + dirExists <- doesDirectoryExist dir + unless dirExists $ + die' verbosity $ "directory '" ++ dir ++ "' does not exist" + _ <- tryFindAddSourcePackageDesc verbosity dir "Error adding source reference." + return . Just $ BuildTreeRef refType dir + +-- | Given a tar archive entry, try to parse it as a local build tree reference. +readBuildTreeRef :: Tar.Entry -> Maybe BuildTreeRef +readBuildTreeRef entry = case Tar.entryContent entry of + (Tar.OtherEntryType typeCode bs size) + | (Tar.isBuildTreeRefTypeCode typeCode) + && (size == BS.length bs) -> Just $! BuildTreeRef + (refTypeFromTypeCode typeCode) + (byteStringToFilePath bs) + | otherwise -> Nothing + _ -> Nothing + +-- | Given a sequence of tar archive entries, extract all references to local +-- build trees. +readBuildTreeRefs :: Exception e => Tar.Entries e -> [BuildTreeRef] +readBuildTreeRefs = + catMaybes + . Tar.foldEntries (\e r -> readBuildTreeRef e : r) + [] throw + +-- | Given a path to a tar archive, extract all references to local build trees. +readBuildTreeRefsFromFile :: FilePath -> IO [BuildTreeRef] +readBuildTreeRefsFromFile = liftM (readBuildTreeRefs . Tar.read) . BS.readFile + +-- | Read build tree references from an index cache +readBuildTreeRefsFromCache :: Verbosity -> FilePath -> IO [BuildTreeRef] +readBuildTreeRefsFromCache verbosity indexPath = do + (mRefs, _prefs) <- readCacheStrict verbosity (SandboxIndex indexPath) buildTreeRef + return (catMaybes mRefs) + where + buildTreeRef pkgEntry = + case pkgEntry of + IndexUtils.NormalPackage _ _ _ _ -> Nothing + IndexUtils.BuildTreeRef typ _ _ path _ -> Just $ BuildTreeRef typ path + +-- | Given a local build tree ref, serialise it to a tar archive entry. +writeBuildTreeRef :: BuildTreeRef -> Tar.Entry +writeBuildTreeRef (BuildTreeRef refType path) = Tar.simpleEntry tarPath content + where + bs = filePathToByteString path + -- Provide a filename for tools that treat custom entries as ordinary files. + tarPath' = "local-build-tree-reference" + -- fromRight can't fail because the path is shorter than 255 characters. + tarPath = fromRight $ Tar.toTarPath True tarPath' + content = Tar.OtherEntryType (typeCodeFromRefType refType) bs (BS.length bs) + + -- TODO: Move this to D.C.Utils? + fromRight (Left err) = error err + fromRight (Right a) = a + +-- | Check that the provided path is either an existing directory, or a tar +-- archive in an existing directory. +validateIndexPath :: Verbosity -> FilePath -> IO FilePath +validateIndexPath verbosity path' = do + path <- makeAbsoluteToCwd path' + if (== ".tar") . takeExtension $ path + then return path + else do dirExists <- doesDirectoryExist path + unless dirExists $ + die' verbosity $ "directory does not exist: '" ++ path ++ "'" + return $ path defaultIndexFileName + +-- | Create an empty index file. +createEmpty :: Verbosity -> FilePath -> IO () +createEmpty verbosity path = do + indexExists <- doesFileExist path + if indexExists + then debug verbosity $ "Package index already exists: " ++ path + else do + debug verbosity $ "Creating the index file '" ++ path ++ "'" + createDirectoryIfMissing True (takeDirectory path) + -- Equivalent to 'tar cvf empty.tar --files-from /dev/null'. + let zeros = BS.replicate (512*20) 0 + BS.writeFile path zeros + +-- | Add given local build tree references to the index. +addBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] -> BuildTreeRefType + -> IO () +addBuildTreeRefs _ _ [] _ = + error "Distribution.Client.Sandbox.Index.addBuildTreeRefs: unexpected" +addBuildTreeRefs verbosity path l' refType = do + checkIndexExists verbosity path + l <- liftM nub . mapM tryCanonicalizePath $ l' + treesInIndex <- fmap (map buildTreePath) (readBuildTreeRefsFromFile path) + -- Add only those paths that aren't already in the index. + treesToAdd <- mapM (buildTreeRefFromPath verbosity refType) (l \\ treesInIndex) + let entries = map writeBuildTreeRef (catMaybes treesToAdd) + unless (null entries) $ do + withBinaryFile path ReadWriteMode $ \h -> do + block <- Tar.hSeekEndEntryOffset h Nothing + debug verbosity $ "Writing at tar block: " ++ show block + BS.hPut h (Tar.write entries) + debug verbosity $ "Successfully appended to '" ++ path ++ "'" + updatePackageIndexCacheFile verbosity $ SandboxIndex path + +data DeleteSourceError = ErrNonregisteredSource { nrPath :: FilePath } + | ErrNonexistentSource { nePath :: FilePath } deriving Show + +-- | Remove given local build tree references from the index. +-- +-- Returns a tuple with either removed build tree refs or errors and a function +-- that converts from a provided build tree ref to corresponding full directory path. +removeBuildTreeRefs :: Verbosity -> FilePath -> [FilePath] + -> IO ([Either DeleteSourceError FilePath], + (FilePath -> FilePath)) +removeBuildTreeRefs _ _ [] = + error "Distribution.Client.Sandbox.Index.removeBuildTreeRefs: unexpected" +removeBuildTreeRefs verbosity indexPath l = do + checkIndexExists verbosity indexPath + let tmpFile = indexPath <.> "tmp" + + canonRes <- mapM (\btr -> do res <- tryIO $ canonicalizePath btr + return $ case res of + Right pth -> Right (btr, pth) + Left _ -> Left $ ErrNonexistentSource btr) l + let (failures, convDict) = partitionEithers canonRes + allRefs = fmap snd convDict + + -- Performance note: on my system, it takes 'index --remove-source' + -- approx. 3,5s to filter a 65M file. Real-life indices are expected to be + -- much smaller. + removedRefs <- doRemove convDict tmpFile + + renameFile tmpFile indexPath + debug verbosity $ "Successfully renamed '" ++ tmpFile + ++ "' to '" ++ indexPath ++ "'" + + unless (null removedRefs) $ + updatePackageIndexCacheFile verbosity $ SandboxIndex indexPath + + let results = fmap Right removedRefs + ++ fmap Left failures + ++ fmap (Left . ErrNonregisteredSource) + (fmap (convertWith convDict) (allRefs \\ removedRefs)) + + return (results, convertWith convDict) + + where + doRemove :: [(FilePath, FilePath)] -> FilePath -> IO [FilePath] + doRemove srcRefs tmpFile = do + (newIdx, changedPaths) <- + Tar.read `fmap` BS.readFile indexPath + >>= runWriterT . Tar.filterEntriesM (p $ fmap snd srcRefs) + BS.writeFile tmpFile . Tar.write . Tar.entriesToList $ newIdx + return changedPaths + + p :: [FilePath] -> Tar.Entry -> WriterT [FilePath] IO Bool + p refs entry = case readBuildTreeRef entry of + Nothing -> return True + -- FIXME: removing snapshot deps is done with `delete-source + -- .cabal-sandbox/snapshots/$SNAPSHOT_NAME`. Perhaps we also want to + -- support removing snapshots by providing the original path. + (Just (BuildTreeRef _ pth)) -> if pth `elem` refs + then tell [pth] >> return False + else return True + + convertWith dict pth = maybe pth fst $ find ((==pth) . snd) dict + +-- | A build tree ref can become ignored if the user later adds a build tree ref +-- with the same package ID. We display ignored build tree refs when the user +-- runs 'cabal sandbox list-sources', but do not look at their timestamps in +-- 'reinstallAddSourceDeps'. +data ListIgnoredBuildTreeRefs = ListIgnored | DontListIgnored + +-- | Which types of build tree refs should be listed? +data RefTypesToList = OnlySnapshots | OnlyLinks | LinksAndSnapshots + +-- | List the local build trees that are referred to from the index. +listBuildTreeRefs :: Verbosity -> ListIgnoredBuildTreeRefs -> RefTypesToList + -> FilePath + -> IO [FilePath] +listBuildTreeRefs verbosity listIgnored refTypesToList path = do + checkIndexExists verbosity path + buildTreeRefs <- + case listIgnored of + DontListIgnored -> do + paths <- listWithoutIgnored + case refTypesToList of + LinksAndSnapshots -> return paths + _ -> do + allPathsFiltered <- fmap (map buildTreePath . filter predicate) + listWithIgnored + _ <- evaluate (length allPathsFiltered) + return (paths `intersect` allPathsFiltered) + + ListIgnored -> fmap (map buildTreePath . filter predicate) listWithIgnored + + _ <- evaluate (length buildTreeRefs) + return buildTreeRefs + + where + predicate :: BuildTreeRef -> Bool + predicate = case refTypesToList of + OnlySnapshots -> (==) SnapshotRef . buildTreeRefType + OnlyLinks -> (==) LinkRef . buildTreeRefType + LinksAndSnapshots -> const True + + listWithIgnored :: IO [BuildTreeRef] + listWithIgnored = readBuildTreeRefsFromFile path + + listWithoutIgnored :: IO [FilePath] + listWithoutIgnored = fmap (map buildTreePath) + $ readBuildTreeRefsFromCache verbosity path + + +-- | Check that the package index file exists and exit with error if it does not. +checkIndexExists :: Verbosity -> FilePath -> IO () +checkIndexExists verbosity path = do + indexExists <- doesFileExist path + unless indexExists $ + die' verbosity $ "index does not exist: '" ++ path ++ "'" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/PackageEnvironment.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/PackageEnvironment.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/PackageEnvironment.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/PackageEnvironment.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,573 @@ +{-# LANGUAGE DeriveGeneric #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox.PackageEnvironment +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Utilities for working with the package environment file. Patterned after +-- Distribution.Client.Config. +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox.PackageEnvironment ( + PackageEnvironment(..) + , PackageEnvironmentType(..) + , classifyPackageEnvironment + , createPackageEnvironmentFile + , tryLoadSandboxPackageEnvironmentFile + , readPackageEnvironmentFile + , showPackageEnvironment + , showPackageEnvironmentWithComments + , setPackageDB + , sandboxPackageDBPath + , loadUserConfig + + , basePackageEnvironment + , initialPackageEnvironment + , commentPackageEnvironment + , sandboxPackageEnvironmentFile + , userPackageEnvironmentFile + ) where + +import Distribution.Client.Config ( SavedConfig(..), commentSavedConfig + , loadConfig, configFieldDescriptions + , haddockFlagsFields + , installDirsFields, withProgramsFields + , withProgramOptionsFields + , defaultCompiler ) +import Distribution.Client.ParseUtils ( parseFields, ppFields, ppSection ) +import Distribution.Client.Setup ( GlobalFlags(..), ConfigExFlags(..) + , InstallFlags(..) + , defaultSandboxLocation ) +import Distribution.Client.Targets ( userConstraintPackageName ) +import Distribution.Utils.NubList ( toNubList ) +import Distribution.Simple.Compiler ( Compiler, PackageDB(..) + , compilerFlavor, showCompilerIdWithAbi ) +import Distribution.Simple.InstallDirs ( InstallDirs(..), PathTemplate + , defaultInstallDirs, combineInstallDirs + , fromPathTemplate, toPathTemplate ) +import Distribution.Simple.Setup ( Flag(..) + , ConfigFlags(..), HaddockFlags(..) + , fromFlagOrDefault, toFlag, flagToMaybe ) +import Distribution.Simple.Utils ( die', info, notice, warn, debug ) +import Distribution.Solver.Types.ConstraintSource +import Distribution.ParseUtils ( FieldDescr(..), ParseResult(..) + , commaListField, commaNewLineListField + , liftField, lineNo, locatedErrorMsg + , parseFilePathQ, readFields + , showPWarning, simpleField + , syntaxError, warning ) +import Distribution.System ( Platform ) +import Distribution.Verbosity ( Verbosity, normal ) +import Control.Monad ( foldM, liftM2, unless ) +import Data.List ( partition, sortBy ) +import Data.Maybe ( isJust ) +import Data.Ord ( comparing ) +import Distribution.Compat.Exception ( catchIO ) +import Distribution.Compat.Semigroup +import System.Directory ( doesDirectoryExist, doesFileExist + , renameFile ) +import System.FilePath ( (<.>), (), takeDirectory ) +import System.IO.Error ( isDoesNotExistError ) +import Text.PrettyPrint ( ($+$) ) + +import qualified Text.PrettyPrint as Disp +import qualified Distribution.Compat.ReadP as Parse +import qualified Distribution.ParseUtils as ParseUtils ( Field(..) ) +import qualified Distribution.Text as Text +import GHC.Generics ( Generic ) + + +-- +-- * Configuration saved in the package environment file +-- + +-- TODO: would be nice to remove duplication between +-- D.C.Sandbox.PackageEnvironment and D.C.Config. +data PackageEnvironment = PackageEnvironment { + -- The 'inherit' feature is not used ATM, but could be useful in the future + -- for constructing nested sandboxes (see discussion in #1196). + pkgEnvInherit :: Flag FilePath, + pkgEnvSavedConfig :: SavedConfig +} deriving Generic + +instance Monoid PackageEnvironment where + mempty = gmempty + mappend = (<>) + +instance Semigroup PackageEnvironment where + (<>) = gmappend + +-- | The automatically-created package environment file that should not be +-- touched by the user. +sandboxPackageEnvironmentFile :: FilePath +sandboxPackageEnvironmentFile = "cabal.sandbox.config" + +-- | Optional package environment file that can be used to customize the default +-- settings. Created by the user. +userPackageEnvironmentFile :: FilePath +userPackageEnvironmentFile = "cabal.config" + +-- | Type of the current package environment. +data PackageEnvironmentType = + SandboxPackageEnvironment -- ^ './cabal.sandbox.config' + | UserPackageEnvironment -- ^ './cabal.config' + | AmbientPackageEnvironment -- ^ '~/.cabal/config' + +-- | Is there a 'cabal.sandbox.config' or 'cabal.config' in this +-- directory? +classifyPackageEnvironment :: FilePath -> Flag FilePath -> Flag Bool + -> IO PackageEnvironmentType +classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag ignoreSandboxFlag = + do isSandbox <- liftM2 (||) (return forceSandboxConfig) + (configExists sandboxPackageEnvironmentFile) + isUser <- configExists userPackageEnvironmentFile + return (classify isSandbox isUser) + where + configExists fname = doesFileExist (pkgEnvDir fname) + ignoreSandbox = fromFlagOrDefault False ignoreSandboxFlag + forceSandboxConfig = isJust . flagToMaybe $ sandboxConfigFileFlag + + classify :: Bool -> Bool -> PackageEnvironmentType + classify True _ + | not ignoreSandbox = SandboxPackageEnvironment + classify _ True = UserPackageEnvironment + classify _ False = AmbientPackageEnvironment + +-- | Defaults common to 'initialPackageEnvironment' and +-- 'commentPackageEnvironment'. +commonPackageEnvironmentConfig :: FilePath -> SavedConfig +commonPackageEnvironmentConfig sandboxDir = + mempty { + savedConfigureFlags = mempty { + -- TODO: Currently, we follow cabal-dev and set 'user-install: False' in + -- the config file. In the future we may want to distinguish between + -- global, sandbox and user install types. + configUserInstall = toFlag False, + configInstallDirs = installDirs + }, + savedUserInstallDirs = installDirs, + savedGlobalInstallDirs = installDirs, + savedGlobalFlags = mempty { + globalLogsDir = toFlag $ sandboxDir "logs", + -- Is this right? cabal-dev uses the global world file. + globalWorldFile = toFlag $ sandboxDir "world" + } + } + where + installDirs = sandboxInstallDirs sandboxDir + +-- | 'commonPackageEnvironmentConfig' wrapped inside a 'PackageEnvironment'. +commonPackageEnvironment :: FilePath -> PackageEnvironment +commonPackageEnvironment sandboxDir = mempty { + pkgEnvSavedConfig = commonPackageEnvironmentConfig sandboxDir + } + +-- | Given a path to a sandbox, return the corresponding InstallDirs record. +sandboxInstallDirs :: FilePath -> InstallDirs (Flag PathTemplate) +sandboxInstallDirs sandboxDir = mempty { + prefix = toFlag (toPathTemplate sandboxDir) + } + +-- | These are the absolute basic defaults, the fields that must be +-- initialised. When we load the package environment from the file we layer the +-- loaded values over these ones. +basePackageEnvironment :: PackageEnvironment +basePackageEnvironment = + mempty { + pkgEnvSavedConfig = mempty { + savedConfigureFlags = mempty { + configHcFlavor = toFlag defaultCompiler, + configVerbosity = toFlag normal + } + } + } + +-- | Initial configuration that we write out to the package environment file if +-- it does not exist. When the package environment gets loaded this +-- configuration gets layered on top of 'basePackageEnvironment'. +initialPackageEnvironment :: FilePath -> Compiler -> Platform + -> IO PackageEnvironment +initialPackageEnvironment sandboxDir compiler platform = do + defInstallDirs <- defaultInstallDirs (compilerFlavor compiler) + {- userInstall= -} False {- _hasLibs= -} False + let initialConfig = commonPackageEnvironmentConfig sandboxDir + installDirs = combineInstallDirs (\d f -> Flag $ fromFlagOrDefault d f) + defInstallDirs (savedUserInstallDirs initialConfig) + return $ mempty { + pkgEnvSavedConfig = initialConfig { + savedUserInstallDirs = installDirs, + savedGlobalInstallDirs = installDirs, + savedGlobalFlags = (savedGlobalFlags initialConfig) { + globalLocalRepos = toNubList [sandboxDir "packages"] + }, + savedConfigureFlags = setPackageDB sandboxDir compiler platform + (savedConfigureFlags initialConfig), + savedInstallFlags = (savedInstallFlags initialConfig) { + installSummaryFile = toNubList [toPathTemplate (sandboxDir + "logs" "build.log")] + } + } + } + +-- | Return the path to the sandbox package database. +sandboxPackageDBPath :: FilePath -> Compiler -> Platform -> String +sandboxPackageDBPath sandboxDir compiler platform = + sandboxDir + (Text.display platform ++ "-" + ++ showCompilerIdWithAbi compiler + ++ "-packages.conf.d") +-- The path in sandboxPackageDBPath should be kept in sync with the +-- path in the bootstrap.sh which is used to bootstrap cabal-install +-- into a sandbox. + +-- | Use the package DB location specific for this compiler. +setPackageDB :: FilePath -> Compiler -> Platform -> ConfigFlags -> ConfigFlags +setPackageDB sandboxDir compiler platform configFlags = + configFlags { + configPackageDBs = [Just (SpecificPackageDB $ sandboxPackageDBPath + sandboxDir + compiler + platform)] + } + +-- | Almost the same as 'savedConf `mappend` pkgEnv', but some settings are +-- overridden instead of mappend'ed. +overrideSandboxSettings :: PackageEnvironment -> PackageEnvironment -> + PackageEnvironment +overrideSandboxSettings pkgEnv0 pkgEnv = + pkgEnv { + pkgEnvSavedConfig = mappendedConf { + savedConfigureFlags = (savedConfigureFlags mappendedConf) { + configPackageDBs = configPackageDBs pkgEnvConfigureFlags + } + , savedInstallFlags = (savedInstallFlags mappendedConf) { + installSummaryFile = installSummaryFile pkgEnvInstallFlags + } + }, + pkgEnvInherit = pkgEnvInherit pkgEnv0 + } + where + pkgEnvConf = pkgEnvSavedConfig pkgEnv + mappendedConf = (pkgEnvSavedConfig pkgEnv0) `mappend` pkgEnvConf + pkgEnvConfigureFlags = savedConfigureFlags pkgEnvConf + pkgEnvInstallFlags = savedInstallFlags pkgEnvConf + +-- | Default values that get used if no value is given. Used here to include in +-- comments when we write out the initial package environment. +commentPackageEnvironment :: FilePath -> IO PackageEnvironment +commentPackageEnvironment sandboxDir = do + commentConf <- commentSavedConfig + let baseConf = commonPackageEnvironmentConfig sandboxDir + return $ mempty { + pkgEnvSavedConfig = commentConf `mappend` baseConf + } + +-- | If this package environment inherits from some other package environment, +-- return that package environment; otherwise return mempty. +inheritedPackageEnvironment :: Verbosity -> PackageEnvironment + -> IO PackageEnvironment +inheritedPackageEnvironment verbosity pkgEnv = do + case (pkgEnvInherit pkgEnv) of + NoFlag -> return mempty + confPathFlag@(Flag _) -> do + conf <- loadConfig verbosity confPathFlag + return $ mempty { pkgEnvSavedConfig = conf } + +-- | Load the user package environment if it exists (the optional "cabal.config" +-- file). If it does not exist locally, attempt to load an optional global one. +userPackageEnvironment :: Verbosity -> FilePath -> Maybe FilePath + -> IO PackageEnvironment +userPackageEnvironment verbosity pkgEnvDir globalConfigLocation = do + let path = pkgEnvDir userPackageEnvironmentFile + minp <- readPackageEnvironmentFile (ConstraintSourceUserConfig path) + mempty path + case (minp, globalConfigLocation) of + (Just parseRes, _) -> processConfigParse path parseRes + (_, Just globalLoc) -> do + minp' <- readPackageEnvironmentFile (ConstraintSourceUserConfig globalLoc) + mempty globalLoc + maybe (warn verbosity ("no constraints file found at " ++ globalLoc) + >> return mempty) + (processConfigParse globalLoc) + minp' + _ -> do + debug verbosity ("no user package environment file found at " ++ pkgEnvDir) + return mempty + where + processConfigParse path (ParseOk warns parseResult) = do + unless (null warns) $ warn verbosity $ + unlines (map (showPWarning path) warns) + return parseResult + processConfigParse path (ParseFailed err) = do + let (line, msg) = locatedErrorMsg err + warn verbosity $ "Error parsing package environment file " ++ path + ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg + return mempty + +-- | Same as @userPackageEnvironmentFile@, but returns a SavedConfig. +loadUserConfig :: Verbosity -> FilePath -> Maybe FilePath -> IO SavedConfig +loadUserConfig verbosity pkgEnvDir globalConfigLocation = + fmap pkgEnvSavedConfig $ + userPackageEnvironment verbosity pkgEnvDir globalConfigLocation + +-- | Common error handling code used by 'tryLoadSandboxPackageEnvironment' and +-- 'updatePackageEnvironment'. +handleParseResult :: Verbosity -> FilePath + -> Maybe (ParseResult PackageEnvironment) + -> IO PackageEnvironment +handleParseResult verbosity path minp = + case minp of + Nothing -> die' verbosity $ + "The package environment file '" ++ path ++ "' doesn't exist" + Just (ParseOk warns parseResult) -> do + unless (null warns) $ warn verbosity $ + unlines (map (showPWarning path) warns) + return parseResult + Just (ParseFailed err) -> do + let (line, msg) = locatedErrorMsg err + die' verbosity $ "Error parsing package environment file " ++ path + ++ maybe "" (\n -> ":" ++ show n) line ++ ":\n" ++ msg + +-- | Try to load the given package environment file, exiting with error if it +-- doesn't exist. Also returns the path to the sandbox directory. The path +-- parameter should refer to an existing file. +tryLoadSandboxPackageEnvironmentFile :: Verbosity -> FilePath -> (Flag FilePath) + -> IO (FilePath, PackageEnvironment) +tryLoadSandboxPackageEnvironmentFile verbosity pkgEnvFile configFileFlag = do + let pkgEnvDir = takeDirectory pkgEnvFile + minp <- readPackageEnvironmentFile + (ConstraintSourceSandboxConfig pkgEnvFile) mempty pkgEnvFile + pkgEnv <- handleParseResult verbosity pkgEnvFile minp + + -- Get the saved sandbox directory. + -- TODO: Use substPathTemplate with + -- compilerTemplateEnv ++ platformTemplateEnv ++ abiTemplateEnv. + let sandboxDir = fromFlagOrDefault defaultSandboxLocation + . fmap fromPathTemplate . prefix . savedUserInstallDirs + . pkgEnvSavedConfig $ pkgEnv + + -- Do some sanity checks + dirExists <- doesDirectoryExist sandboxDir + -- TODO: Also check for an initialised package DB? + unless dirExists $ + die' verbosity ("No sandbox exists at " ++ sandboxDir) + info verbosity $ "Using a sandbox located at " ++ sandboxDir + + let base = basePackageEnvironment + let common = commonPackageEnvironment sandboxDir + user <- userPackageEnvironment verbosity pkgEnvDir Nothing --TODO + inherited <- inheritedPackageEnvironment verbosity user + + -- Layer the package environment settings over settings from ~/.cabal/config. + cabalConfig <- fmap unsetSymlinkBinDir $ loadConfig verbosity configFileFlag + return (sandboxDir, + updateInstallDirs $ + (base `mappend` (toPkgEnv cabalConfig) `mappend` + common `mappend` inherited `mappend` user) + `overrideSandboxSettings` pkgEnv) + where + toPkgEnv config = mempty { pkgEnvSavedConfig = config } + + updateInstallDirs pkgEnv = + let config = pkgEnvSavedConfig pkgEnv + configureFlags = savedConfigureFlags config + installDirs = savedUserInstallDirs config + in pkgEnv { + pkgEnvSavedConfig = config { + savedConfigureFlags = configureFlags { + configInstallDirs = installDirs + } + } + } + + -- We don't want to inherit the value of 'symlink-bindir' from + -- '~/.cabal/config'. See #1514. + unsetSymlinkBinDir config = + let installFlags = savedInstallFlags config + in config { + savedInstallFlags = installFlags { + installSymlinkBinDir = NoFlag + } + } + +-- | Create a new package environment file, replacing the existing one if it +-- exists. Note that the path parameters should point to existing directories. +createPackageEnvironmentFile :: Verbosity -> FilePath -> FilePath + -> Compiler + -> Platform + -> IO () +createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile compiler platform = do + notice verbosity $ "Writing a default package environment file to " ++ pkgEnvFile + initialPkgEnv <- initialPackageEnvironment sandboxDir compiler platform + writePackageEnvironmentFile pkgEnvFile initialPkgEnv + +-- | Descriptions of all fields in the package environment file. +pkgEnvFieldDescrs :: ConstraintSource -> [FieldDescr PackageEnvironment] +pkgEnvFieldDescrs src = [ + simpleField "inherit" + (fromFlagOrDefault Disp.empty . fmap Disp.text) (optional parseFilePathQ) + pkgEnvInherit (\v pkgEnv -> pkgEnv { pkgEnvInherit = v }) + + , commaNewLineListField "constraints" + (Text.disp . fst) ((\pc -> (pc, src)) `fmap` Text.parse) + (sortConstraints . configExConstraints + . savedConfigureExFlags . pkgEnvSavedConfig) + (\v pkgEnv -> updateConfigureExFlags pkgEnv + (\flags -> flags { configExConstraints = v })) + + , commaListField "preferences" + Text.disp Text.parse + (configPreferences . savedConfigureExFlags . pkgEnvSavedConfig) + (\v pkgEnv -> updateConfigureExFlags pkgEnv + (\flags -> flags { configPreferences = v })) + ] + ++ map toPkgEnv configFieldDescriptions' + where + optional = Parse.option mempty . fmap toFlag + + configFieldDescriptions' :: [FieldDescr SavedConfig] + configFieldDescriptions' = filter + (\(FieldDescr name _ _) -> name /= "preference" && name /= "constraint") + (configFieldDescriptions src) + + toPkgEnv :: FieldDescr SavedConfig -> FieldDescr PackageEnvironment + toPkgEnv fieldDescr = + liftField pkgEnvSavedConfig + (\savedConfig pkgEnv -> pkgEnv { pkgEnvSavedConfig = savedConfig}) + fieldDescr + + updateConfigureExFlags :: PackageEnvironment + -> (ConfigExFlags -> ConfigExFlags) + -> PackageEnvironment + updateConfigureExFlags pkgEnv f = pkgEnv { + pkgEnvSavedConfig = (pkgEnvSavedConfig pkgEnv) { + savedConfigureExFlags = f . savedConfigureExFlags . pkgEnvSavedConfig + $ pkgEnv + } + } + + sortConstraints = sortBy (comparing $ userConstraintPackageName . fst) + +-- | Read the package environment file. +readPackageEnvironmentFile :: ConstraintSource -> PackageEnvironment -> FilePath + -> IO (Maybe (ParseResult PackageEnvironment)) +readPackageEnvironmentFile src initial file = + handleNotExists $ + fmap (Just . parsePackageEnvironment src initial) (readFile file) + where + handleNotExists action = catchIO action $ \ioe -> + if isDoesNotExistError ioe + then return Nothing + else ioError ioe + +-- | Parse the package environment file. +parsePackageEnvironment :: ConstraintSource -> PackageEnvironment -> String + -> ParseResult PackageEnvironment +parsePackageEnvironment src initial str = do + fields <- readFields str + let (knownSections, others) = partition isKnownSection fields + pkgEnv <- parse others + let config = pkgEnvSavedConfig pkgEnv + installDirs0 = savedUserInstallDirs config + (haddockFlags, installDirs, paths, args) <- + foldM parseSections + (savedHaddockFlags config, installDirs0, [], []) + knownSections + return pkgEnv { + pkgEnvSavedConfig = config { + savedConfigureFlags = (savedConfigureFlags config) { + configProgramPaths = paths, + configProgramArgs = args + }, + savedHaddockFlags = haddockFlags, + savedUserInstallDirs = installDirs, + savedGlobalInstallDirs = installDirs + } + } + + where + isKnownSection :: ParseUtils.Field -> Bool + isKnownSection (ParseUtils.Section _ "haddock" _ _) = True + isKnownSection (ParseUtils.Section _ "install-dirs" _ _) = True + isKnownSection (ParseUtils.Section _ "program-locations" _ _) = True + isKnownSection (ParseUtils.Section _ "program-default-options" _ _) = True + isKnownSection _ = False + + parse :: [ParseUtils.Field] -> ParseResult PackageEnvironment + parse = parseFields (pkgEnvFieldDescrs src) initial + + parseSections :: SectionsAccum -> ParseUtils.Field + -> ParseResult SectionsAccum + parseSections accum@(h,d,p,a) + (ParseUtils.Section _ "haddock" name fs) + | name == "" = do h' <- parseFields haddockFlagsFields h fs + return (h', d, p, a) + | otherwise = do + warning "The 'haddock' section should be unnamed" + return accum + parseSections (h,d,p,a) + (ParseUtils.Section line "install-dirs" name fs) + | name == "" = do d' <- parseFields installDirsFields d fs + return (h, d',p,a) + | otherwise = + syntaxError line $ + "Named 'install-dirs' section: '" ++ name + ++ "'. Note that named 'install-dirs' sections are not allowed in the '" + ++ userPackageEnvironmentFile ++ "' file." + parseSections accum@(h, d,p,a) + (ParseUtils.Section _ "program-locations" name fs) + | name == "" = do p' <- parseFields withProgramsFields p fs + return (h, d, p', a) + | otherwise = do + warning "The 'program-locations' section should be unnamed" + return accum + parseSections accum@(h, d, p, a) + (ParseUtils.Section _ "program-default-options" name fs) + | name == "" = do a' <- parseFields withProgramOptionsFields a fs + return (h, d, p, a') + | otherwise = do + warning "The 'program-default-options' section should be unnamed" + return accum + parseSections accum f = do + warning $ "Unrecognized stanza on line " ++ show (lineNo f) + return accum + +-- | Accumulator type for 'parseSections'. +type SectionsAccum = (HaddockFlags, InstallDirs (Flag PathTemplate) + , [(String, FilePath)], [(String, [String])]) + +-- | Write out the package environment file. +writePackageEnvironmentFile :: FilePath -> PackageEnvironment -> IO () +writePackageEnvironmentFile path pkgEnv = do + let tmpPath = (path <.> "tmp") + writeFile tmpPath $ explanation ++ pkgEnvStr ++ "\n" + renameFile tmpPath path + where + pkgEnvStr = showPackageEnvironment pkgEnv + explanation = unlines + ["-- This is a Cabal package environment file." + ,"-- THIS FILE IS AUTO-GENERATED. DO NOT EDIT DIRECTLY." + ,"-- Please create a 'cabal.config' file in the same directory" + ,"-- if you want to change the default settings for this sandbox." + ,"","" + ] + +-- | Pretty-print the package environment. +showPackageEnvironment :: PackageEnvironment -> String +showPackageEnvironment pkgEnv = showPackageEnvironmentWithComments Nothing pkgEnv + +-- | Pretty-print the package environment with default values for empty fields +-- commented out (just like the default ~/.cabal/config). +showPackageEnvironmentWithComments :: (Maybe PackageEnvironment) + -> PackageEnvironment + -> String +showPackageEnvironmentWithComments mdefPkgEnv pkgEnv = Disp.render $ + ppFields (pkgEnvFieldDescrs ConstraintSourceUnknown) + mdefPkgEnv pkgEnv + $+$ Disp.text "" + $+$ ppSection "install-dirs" "" installDirsFields + (fmap installDirsSection mdefPkgEnv) (installDirsSection pkgEnv) + where + installDirsSection = savedUserInstallDirs . pkgEnvSavedConfig diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Timestamp.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Timestamp.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Timestamp.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Timestamp.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,273 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox.Timestamp +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Timestamp file handling (for add-source dependencies). +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox.Timestamp ( + AddSourceTimestamp, + withAddTimestamps, + withUpdateTimestamps, + maybeAddCompilerTimestampRecord, + listModifiedDeps, + removeTimestamps, + + -- * For testing + TimestampFileRecord, + readTimestampFile, + writeTimestampFile + ) where + +import Control.Monad (filterM, forM, when) +import Data.Char (isSpace) +import Data.List (partition) +import System.Directory (renameFile) +import System.FilePath ((<.>), ()) +import qualified Data.Map as M + +import Distribution.Compiler (CompilerId) +import Distribution.Simple.Utils (debug, die', warn) +import Distribution.System (Platform) +import Distribution.Text (display) +import Distribution.Verbosity (Verbosity) + +import Distribution.Client.SrcDist (allPackageSourceFiles) +import Distribution.Client.Sandbox.Index + (ListIgnoredBuildTreeRefs (ListIgnored), RefTypesToList(OnlyLinks) + ,listBuildTreeRefs) +import Distribution.Client.SetupWrapper + +import Distribution.Compat.Exception (catchIO) +import Distribution.Compat.Time (ModTime, getCurTime, + getModTime, + posixSecondsToModTime) + + +-- | Timestamp of an add-source dependency. +type AddSourceTimestamp = (FilePath, ModTime) +-- | Timestamp file record - a string identifying the compiler & platform plus a +-- list of add-source timestamps. +type TimestampFileRecord = (String, [AddSourceTimestamp]) + +timestampRecordKey :: CompilerId -> Platform -> String +timestampRecordKey compId platform = display platform ++ "-" ++ display compId + +-- | The 'add-source-timestamps' file keeps the timestamps of all add-source +-- dependencies. It is initially populated by 'sandbox add-source' and kept +-- current by 'reinstallAddSourceDeps' and 'configure -w'. The user can install +-- add-source deps manually with 'cabal install' after having edited them, so we +-- can err on the side of caution sometimes. +-- FIXME: We should keep this info in the index file, together with build tree +-- refs. +timestampFileName :: FilePath +timestampFileName = "add-source-timestamps" + +-- | Read the timestamp file. Exits with error if the timestamp file is +-- corrupted. Returns an empty list if the file doesn't exist. +readTimestampFile :: Verbosity -> FilePath -> IO [TimestampFileRecord] +readTimestampFile verbosity timestampFile = do + timestampString <- readFile timestampFile `catchIO` \_ -> return "[]" + case reads timestampString of + [(version, s)] + | version == (2::Int) -> + case reads s of + [(timestamps, s')] | all isSpace s' -> return timestamps + _ -> dieCorrupted + | otherwise -> dieWrongFormat + + -- Old format (timestamps are POSIX seconds). Convert to new format. + [] -> + case reads timestampString of + [(timestamps, s)] | all isSpace s -> do + let timestamps' = map (\(i, ts) -> + (i, map (\(p, t) -> + (p, posixSecondsToModTime t)) ts)) + timestamps + writeTimestampFile timestampFile timestamps' + return timestamps' + _ -> dieCorrupted + _ -> dieCorrupted + where + dieWrongFormat = die' verbosity $ wrongFormat ++ deleteAndRecreate + dieCorrupted = die' verbosity $ corrupted ++ deleteAndRecreate + wrongFormat = "The timestamps file is in the wrong format." + corrupted = "The timestamps file is corrupted." + deleteAndRecreate = " Please delete and recreate the sandbox." + +-- | Write the timestamp file, atomically. +writeTimestampFile :: FilePath -> [TimestampFileRecord] -> IO () +writeTimestampFile timestampFile timestamps = do + writeFile timestampTmpFile "2\n" -- version + appendFile timestampTmpFile (show timestamps ++ "\n") + renameFile timestampTmpFile timestampFile + where + timestampTmpFile = timestampFile <.> "tmp" + +-- | Read, process and write the timestamp file in one go. +withTimestampFile :: Verbosity -> FilePath + -> ([TimestampFileRecord] -> IO [TimestampFileRecord]) + -> IO () +withTimestampFile verbosity sandboxDir process = do + let timestampFile = sandboxDir timestampFileName + timestampRecords <- readTimestampFile verbosity timestampFile >>= process + writeTimestampFile timestampFile timestampRecords + +-- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps +-- we've added and an initial timestamp, add an 'AddSourceTimestamp' to the list +-- for each path. If a timestamp for a given path already exists in the list, +-- update it. +addTimestamps :: ModTime -> [AddSourceTimestamp] -> [FilePath] + -> [AddSourceTimestamp] +addTimestamps initial timestamps newPaths = + [ (p, initial) | p <- newPaths ] ++ oldTimestamps + where + (oldTimestamps, _toBeUpdated) = + partition (\(path, _) -> path `notElem` newPaths) timestamps + +-- | Given a list of 'AddSourceTimestamp's, a list of paths to add-source deps +-- we've reinstalled and a new timestamp value, update the timestamp value for +-- the deps in the list. If there are new paths in the list, ignore them. +updateTimestamps :: [AddSourceTimestamp] -> [FilePath] -> ModTime + -> [AddSourceTimestamp] +updateTimestamps timestamps pathsToUpdate newTimestamp = + foldr updateTimestamp [] timestamps + where + updateTimestamp t@(path, _oldTimestamp) rest + | path `elem` pathsToUpdate = (path, newTimestamp) : rest + | otherwise = t : rest + +-- | Given a list of 'TimestampFileRecord's and a list of paths to add-source +-- deps we've removed, remove those deps from the list. +removeTimestamps' :: [AddSourceTimestamp] -> [FilePath] -> [AddSourceTimestamp] +removeTimestamps' l pathsToRemove = foldr removeTimestamp [] l + where + removeTimestamp t@(path, _oldTimestamp) rest = + if path `elem` pathsToRemove + then rest + else t : rest + +-- | If a timestamp record for this compiler doesn't exist, add a new one. +maybeAddCompilerTimestampRecord :: Verbosity -> FilePath -> FilePath + -> CompilerId -> Platform + -> IO () +maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + compId platform = do + let key = timestampRecordKey compId platform + withTimestampFile verbosity sandboxDir $ \timestampRecords -> do + case lookup key timestampRecords of + Just _ -> return timestampRecords + Nothing -> do + buildTreeRefs <- listBuildTreeRefs verbosity ListIgnored OnlyLinks + indexFile + now <- getCurTime + let timestamps = map (\p -> (p, now)) buildTreeRefs + return $ (key, timestamps):timestampRecords + +-- | Given an IO action that returns a list of build tree refs, add those +-- build tree refs to the timestamps file (for all compilers). +withAddTimestamps :: Verbosity -> FilePath -> IO [FilePath] -> IO () +withAddTimestamps verbosity sandboxDir act = do + let initialTimestamp = minBound + withActionOnAllTimestamps (addTimestamps initialTimestamp) verbosity sandboxDir act + +-- | Given a list of build tree refs, remove those +-- build tree refs from the timestamps file (for all compilers). +removeTimestamps :: Verbosity -> FilePath -> [FilePath] -> IO () +removeTimestamps verbosity idxFile = + withActionOnAllTimestamps removeTimestamps' verbosity idxFile . return + +-- | Given an IO action that returns a list of build tree refs, update the +-- timestamps of the returned build tree refs to the current time (only for the +-- given compiler & platform). +withUpdateTimestamps :: Verbosity -> FilePath -> CompilerId -> Platform + ->([AddSourceTimestamp] -> IO [FilePath]) + -> IO () +withUpdateTimestamps = + withActionOnCompilerTimestamps updateTimestamps + +-- | Helper for implementing 'withAddTimestamps' and +-- 'withRemoveTimestamps'. Runs a given action on the list of +-- 'AddSourceTimestamp's for all compilers, applies 'f' to the result and then +-- updates the timestamp file. The IO action is run only once. +withActionOnAllTimestamps :: ([AddSourceTimestamp] -> [FilePath] + -> [AddSourceTimestamp]) + -> Verbosity + -> FilePath + -> IO [FilePath] + -> IO () +withActionOnAllTimestamps f verbosity sandboxDir act = + withTimestampFile verbosity sandboxDir $ \timestampRecords -> do + paths <- act + return [(key, f timestamps paths) | (key, timestamps) <- timestampRecords] + +-- | Helper for implementing 'withUpdateTimestamps'. Runs a given action on the +-- list of 'AddSourceTimestamp's for this compiler, applies 'f' to the result +-- and then updates the timestamp file record. The IO action is run only once. +withActionOnCompilerTimestamps :: ([AddSourceTimestamp] + -> [FilePath] -> ModTime + -> [AddSourceTimestamp]) + -> Verbosity + -> FilePath + -> CompilerId + -> Platform + -> ([AddSourceTimestamp] -> IO [FilePath]) + -> IO () +withActionOnCompilerTimestamps f verbosity sandboxDir compId platform act = do + let needle = timestampRecordKey compId platform + withTimestampFile verbosity sandboxDir $ \timestampRecords -> do + timestampRecords' <- forM timestampRecords $ \r@(key, timestamps) -> + if key == needle + then do paths <- act timestamps + now <- getCurTime + return (key, f timestamps paths now) + else return r + return timestampRecords' + +-- | Has this dependency been modified since we have last looked at it? +isDepModified :: Verbosity -> ModTime -> AddSourceTimestamp -> IO Bool +isDepModified verbosity now (packageDir, timestamp) = do + debug verbosity ("Checking whether the dependency is modified: " ++ packageDir) + -- TODO: we should properly plumb the correct options through + -- instead of using defaultSetupScriptOptions + depSources <- allPackageSourceFiles verbosity defaultSetupScriptOptions packageDir + go depSources + + where + go [] = return False + go (dep0:rest) = do + -- FIXME: What if the clock jumps backwards at any point? For now we only + -- print a warning. + let dep = packageDir dep0 + modTime <- getModTime dep + when (modTime > now) $ + warn verbosity $ "File '" ++ dep + ++ "' has a modification time that is in the future." + if modTime >= timestamp + then do + debug verbosity ("Dependency has a modified source file: " ++ dep) + return True + else go rest + +-- | List all modified dependencies. +listModifiedDeps :: Verbosity -> FilePath -> CompilerId -> Platform + -> M.Map FilePath a + -- ^ The set of all installed add-source deps. + -> IO [FilePath] +listModifiedDeps verbosity sandboxDir compId platform installedDepsMap = do + timestampRecords <- readTimestampFile verbosity (sandboxDir timestampFileName) + let needle = timestampRecordKey compId platform + timestamps <- maybe noTimestampRecord return + (lookup needle timestampRecords) + now <- getCurTime + fmap (map fst) . filterM (isDepModified verbosity now) + . filter (\ts -> fst ts `M.member` installedDepsMap) + $ timestamps + + where + noTimestampRecord = die' verbosity $ "Сouldn't find a timestamp record for the given " + ++ "compiler/platform pair. " + ++ "Please report this on the Cabal bug tracker: " + ++ "https://github.com/haskell/cabal/issues/new ." diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox/Types.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,65 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox.Types +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Helpers for writing code that works both inside and outside a sandbox. +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox.Types ( + UseSandbox(..), isUseSandbox, whenUsingSandbox, + SandboxPackageInfo(..) + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import Distribution.Client.Types (UnresolvedSourcePackage) + +import qualified Data.Set as S + +-- | Are we using a sandbox? +data UseSandbox = UseSandbox FilePath | NoSandbox + +instance Monoid UseSandbox where + mempty = NoSandbox + mappend = (<>) + +instance Semigroup UseSandbox where + NoSandbox <> s = s + u0@(UseSandbox _) <> NoSandbox = u0 + (UseSandbox _) <> u1@(UseSandbox _) = u1 + +-- | Convert a @UseSandbox@ value to a boolean. Useful in conjunction with +-- @when@. +isUseSandbox :: UseSandbox -> Bool +isUseSandbox (UseSandbox _) = True +isUseSandbox NoSandbox = False + +-- | Execute an action only if we're in a sandbox, feeding to it the path to the +-- sandbox directory. +whenUsingSandbox :: UseSandbox -> (FilePath -> IO ()) -> IO () +whenUsingSandbox NoSandbox _ = return () +whenUsingSandbox (UseSandbox sandboxDir) act = act sandboxDir + +-- | Data about the packages installed in the sandbox that is passed from +-- 'reinstallAddSourceDeps' to the solver. +data SandboxPackageInfo = SandboxPackageInfo { + modifiedAddSourceDependencies :: ![UnresolvedSourcePackage], + -- ^ Modified add-source deps that we want to reinstall. These are guaranteed + -- to be already installed in the sandbox. + + otherAddSourceDependencies :: ![UnresolvedSourcePackage], + -- ^ Remaining add-source deps. Some of these may be not installed in the + -- sandbox. + + otherInstalledSandboxPackages :: !InstalledPackageIndex.InstalledPackageIndex, + -- ^ All packages installed in the sandbox. Intersection with + -- 'modifiedAddSourceDependencies' and/or 'otherAddSourceDependencies' can be + -- non-empty. + + allAddSourceDependencies :: !(S.Set FilePath) + -- ^ A set of paths to all add-source dependencies, for convenience. + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Sandbox.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,867 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleContexts #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Sandbox +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- UI for the sandboxing functionality. +----------------------------------------------------------------------------- + +module Distribution.Client.Sandbox ( + sandboxInit, + sandboxDelete, + sandboxAddSource, + sandboxAddSourceSnapshot, + sandboxDeleteSource, + sandboxListSources, + sandboxHcPkg, + dumpPackageEnvironment, + withSandboxBinDirOnSearchPath, + + getSandboxConfigFilePath, + loadConfigOrSandboxConfig, + findSavedDistPref, + initPackageDBIfNeeded, + maybeWithSandboxDirOnSearchPath, + + WereDepsReinstalled(..), + reinstallAddSourceDeps, + maybeReinstallAddSourceDeps, + + SandboxPackageInfo(..), + maybeWithSandboxPackageInfo, + + tryGetIndexFilePath, + sandboxBuildDir, + getInstalledPackagesInSandbox, + updateSandboxConfigFileFlag, + updateInstallDirs, + + getPersistOrConfigCompiler + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Client.Setup + ( SandboxFlags(..), ConfigFlags(..), ConfigExFlags(..), InstallFlags(..) + , GlobalFlags(..), configCompilerAux', configPackageDB' + , defaultConfigExFlags, defaultInstallFlags + , defaultSandboxLocation, withRepoContext ) +import Distribution.Client.Sandbox.Timestamp ( listModifiedDeps + , maybeAddCompilerTimestampRecord + , withAddTimestamps + , removeTimestamps ) +import Distribution.Client.Config + ( SavedConfig(..), defaultUserInstall, loadConfig ) +import Distribution.Client.Dependency ( foldProgress ) +import Distribution.Client.IndexUtils ( BuildTreeRefType(..) ) +import Distribution.Client.Install ( InstallArgs, + makeInstallContext, + makeInstallPlan, + processInstallPlan ) +import Distribution.Utils.NubList ( fromNubList ) + +import Distribution.Client.Sandbox.PackageEnvironment + ( PackageEnvironment(..), PackageEnvironmentType(..) + , createPackageEnvironmentFile, classifyPackageEnvironment + , tryLoadSandboxPackageEnvironmentFile, loadUserConfig + , commentPackageEnvironment, showPackageEnvironmentWithComments + , sandboxPackageEnvironmentFile, userPackageEnvironmentFile + , sandboxPackageDBPath ) +import Distribution.Client.Sandbox.Types ( SandboxPackageInfo(..) + , UseSandbox(..) ) +import Distribution.Client.SetupWrapper + ( SetupScriptOptions(..), defaultSetupScriptOptions ) +import Distribution.Client.Types ( PackageLocation(..) ) +import Distribution.Client.Utils ( inDir, tryCanonicalizePath + , tryFindAddSourcePackageDesc) +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) +import Distribution.PackageDescription.Parsec ( readGenericPackageDescription ) +import Distribution.Simple.Compiler ( Compiler(..), PackageDB(..) ) +import Distribution.Simple.Configure ( configCompilerAuxEx + , getPackageDBContents + , maybeGetPersistBuildConfig + , findDistPrefOrDefault + , findDistPref ) +import qualified Distribution.Simple.LocalBuildInfo as LocalBuildInfo +import Distribution.Simple.PreProcess ( knownSuffixHandlers ) +import Distribution.Simple.Program ( ProgramDb ) +import Distribution.Simple.Setup ( Flag(..), HaddockFlags(..) + , fromFlagOrDefault, flagToMaybe ) +import Distribution.Simple.SrcDist ( prepareTree ) +import Distribution.Simple.Utils ( die', debug, notice, info, warn + , debugNoWrap, defaultPackageDesc + , topHandlerWith + , createDirectoryIfMissingVerbose ) +import Distribution.Package ( Package(..) ) +import Distribution.System ( Platform ) +import Distribution.Text ( display ) +import Distribution.Verbosity ( Verbosity ) +import Distribution.Compat.Environment ( lookupEnv, setEnv ) +import Distribution.Client.Compat.FilePerms ( setFileHidden ) +import qualified Distribution.Client.Sandbox.Index as Index +import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) +import qualified Distribution.Simple.PackageIndex as InstalledPackageIndex +import qualified Distribution.Simple.Register as Register + +import Distribution.Solver.Types.SourcePackage + +import qualified Data.Map as M +import qualified Data.Set as S +import Data.Either (partitionEithers) +import Control.Exception ( assert, bracket_ ) +import Control.Monad ( forM, mapM, mapM_ ) +import Data.Bits ( shiftL, shiftR, xor ) +import Data.IORef ( newIORef, writeIORef, readIORef ) +import Data.List ( delete + , groupBy ) +import Data.Maybe ( fromJust ) +import Numeric ( showHex ) +import System.Directory ( canonicalizePath + , createDirectory + , doesDirectoryExist + , doesFileExist + , getCurrentDirectory + , removeDirectoryRecursive + , removeFile + , renameDirectory ) +import System.FilePath ( (), equalFilePath + , getSearchPath + , searchPathSeparator + , splitSearchPath + , takeDirectory ) + +-- +-- * Constants +-- + +-- | The name of the sandbox subdirectory where we keep snapshots of add-source +-- dependencies. +snapshotDirectoryName :: FilePath +snapshotDirectoryName = "snapshots" + +-- | Non-standard build dir that is used for building add-source deps instead of +-- "dist". Fixes surprising behaviour in some cases (see issue #1281). +sandboxBuildDir :: FilePath -> FilePath +sandboxBuildDir sandboxDir = "dist/dist-sandbox-" ++ showHex sandboxDirHash "" + where + sandboxDirHash = jenkins sandboxDir + + -- See http://en.wikipedia.org/wiki/Jenkins_hash_function + jenkins :: String -> Word32 + jenkins str = loop_finish $ foldl' loop 0 str + where + loop :: Word32 -> Char -> Word32 + loop hash key_i' = hash''' + where + key_i = toEnum . ord $ key_i' + hash' = hash + key_i + hash'' = hash' + (shiftL hash' 10) + hash''' = hash'' `xor` (shiftR hash'' 6) + + loop_finish :: Word32 -> Word32 + loop_finish hash = hash''' + where + hash' = hash + (shiftL hash 3) + hash'' = hash' `xor` (shiftR hash' 11) + hash''' = hash'' + (shiftL hash'' 15) + +-- +-- * Basic sandbox functions. +-- + +-- | If @--sandbox-config-file@ wasn't given on the command-line, set it to the +-- value of the @CABAL_SANDBOX_CONFIG@ environment variable, or else to +-- 'NoFlag'. +updateSandboxConfigFileFlag :: GlobalFlags -> IO GlobalFlags +updateSandboxConfigFileFlag globalFlags = + case globalSandboxConfigFile globalFlags of + Flag _ -> return globalFlags + NoFlag -> do + f' <- fmap (maybe NoFlag Flag) . lookupEnv $ "CABAL_SANDBOX_CONFIG" + return globalFlags { globalSandboxConfigFile = f' } + +-- | Return the path to the sandbox config file - either the default or the one +-- specified with @--sandbox-config-file@. +getSandboxConfigFilePath :: GlobalFlags -> IO FilePath +getSandboxConfigFilePath globalFlags = do + let sandboxConfigFileFlag = globalSandboxConfigFile globalFlags + case sandboxConfigFileFlag of + NoFlag -> do pkgEnvDir <- getCurrentDirectory + return (pkgEnvDir sandboxPackageEnvironmentFile) + Flag path -> return path + +-- | Load the @cabal.sandbox.config@ file (and possibly the optional +-- @cabal.config@). In addition to a @PackageEnvironment@, also return a +-- canonical path to the sandbox. Exit with error if the sandbox directory or +-- the package environment file do not exist. +tryLoadSandboxConfig :: Verbosity -> GlobalFlags + -> IO (FilePath, PackageEnvironment) +tryLoadSandboxConfig verbosity globalFlags = do + path <- getSandboxConfigFilePath globalFlags + tryLoadSandboxPackageEnvironmentFile verbosity path + (globalConfigFile globalFlags) + +-- | Return the name of the package index file for this package environment. +tryGetIndexFilePath :: Verbosity -> SavedConfig -> IO FilePath +tryGetIndexFilePath verbosity config = tryGetIndexFilePath' verbosity (savedGlobalFlags config) + +-- | The same as 'tryGetIndexFilePath', but takes 'GlobalFlags' instead of +-- 'SavedConfig'. +tryGetIndexFilePath' :: Verbosity -> GlobalFlags -> IO FilePath +tryGetIndexFilePath' verbosity globalFlags = do + let paths = fromNubList $ globalLocalRepos globalFlags + case paths of + [] -> die' verbosity $ "Distribution.Client.Sandbox.tryGetIndexFilePath: " ++ + "no local repos found. " ++ checkConfiguration + _ -> return $ (last paths) Index.defaultIndexFileName + where + checkConfiguration = "Please check your configuration ('" + ++ userPackageEnvironmentFile ++ "')." + +-- | Try to extract a 'PackageDB' from 'ConfigFlags'. Gives a better error +-- message than just pattern-matching. +getSandboxPackageDB :: Verbosity -> ConfigFlags -> IO PackageDB +getSandboxPackageDB verbosity configFlags = do + case configPackageDBs configFlags of + [Just sandboxDB@(SpecificPackageDB _)] -> return sandboxDB + -- TODO: should we allow multiple package DBs (e.g. with 'inherit')? + + [] -> + die' verbosity $ "Sandbox package DB is not specified. " ++ sandboxConfigCorrupt + [_] -> + die' verbosity $ "Unexpected contents of the 'package-db' field. " + ++ sandboxConfigCorrupt + _ -> + die' verbosity $ "Too many package DBs provided. " ++ sandboxConfigCorrupt + + where + sandboxConfigCorrupt = "Your 'cabal.sandbox.config' is probably corrupt." + + +-- | Which packages are installed in the sandbox package DB? +getInstalledPackagesInSandbox :: Verbosity -> ConfigFlags + -> Compiler -> ProgramDb + -> IO InstalledPackageIndex +getInstalledPackagesInSandbox verbosity configFlags comp progdb = do + sandboxDB <- getSandboxPackageDB verbosity configFlags + getPackageDBContents verbosity comp sandboxDB progdb + +-- | Temporarily add $SANDBOX_DIR/bin to $PATH. +withSandboxBinDirOnSearchPath :: FilePath -> IO a -> IO a +withSandboxBinDirOnSearchPath sandboxDir = bracket_ addBinDir rmBinDir + where + -- TODO: Instead of modifying the global process state, it'd be better to + -- set the environment individually for each subprocess invocation. This + -- will have to wait until the Shell monad is implemented; without it the + -- required changes are too intrusive. + addBinDir :: IO () + addBinDir = do + mbOldPath <- lookupEnv "PATH" + let newPath = maybe sandboxBin ((++) sandboxBin . (:) searchPathSeparator) + mbOldPath + setEnv "PATH" newPath + + rmBinDir :: IO () + rmBinDir = do + oldPath <- getSearchPath + let newPath = intercalate [searchPathSeparator] + (delete sandboxBin oldPath) + setEnv "PATH" newPath + + sandboxBin = sandboxDir "bin" + +-- | Initialise a package DB for this compiler if it doesn't exist. +initPackageDBIfNeeded :: Verbosity -> ConfigFlags + -> Compiler -> ProgramDb + -> IO () +initPackageDBIfNeeded verbosity configFlags comp progdb = do + SpecificPackageDB dbPath <- getSandboxPackageDB verbosity configFlags + packageDBExists <- doesDirectoryExist dbPath + unless packageDBExists $ + Register.initPackageDB verbosity comp progdb dbPath + when packageDBExists $ + debug verbosity $ "The package database already exists: " ++ dbPath + +-- | Entry point for the 'cabal sandbox dump-pkgenv' command. +dumpPackageEnvironment :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () +dumpPackageEnvironment verbosity _sandboxFlags globalFlags = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + commentPkgEnv <- commentPackageEnvironment sandboxDir + putStrLn . showPackageEnvironmentWithComments (Just commentPkgEnv) $ pkgEnv + +-- | Entry point for the 'cabal sandbox init' command. +sandboxInit :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () +sandboxInit verbosity sandboxFlags globalFlags = do + -- Warn if there's a 'cabal-dev' sandbox. + isCabalDevSandbox <- liftM2 (&&) (doesDirectoryExist "cabal-dev") + (doesFileExist $ "cabal-dev" "cabal.config") + when isCabalDevSandbox $ + warn verbosity $ + "You are apparently using a legacy (cabal-dev) sandbox. " + ++ "Legacy sandboxes may interact badly with native Cabal sandboxes. " + ++ "You may want to delete the 'cabal-dev' directory to prevent issues." + + -- Create the sandbox directory. + let sandboxDir' = fromFlagOrDefault defaultSandboxLocation + (sandboxLocation sandboxFlags) + createDirectoryIfMissingVerbose verbosity True sandboxDir' + sandboxDir <- tryCanonicalizePath sandboxDir' + setFileHidden sandboxDir + + -- Determine which compiler to use (using the value from ~/.cabal/config). + userConfig <- loadConfig verbosity (globalConfigFile globalFlags) + (comp, platform, progdb) <- configCompilerAuxEx (savedConfigureFlags userConfig) + + -- Create the package environment file. + pkgEnvFile <- getSandboxConfigFilePath globalFlags + createPackageEnvironmentFile verbosity sandboxDir pkgEnvFile comp platform + (_sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + let config = pkgEnvSavedConfig pkgEnv + configFlags = savedConfigureFlags config + + -- Create the index file if it doesn't exist. + indexFile <- tryGetIndexFilePath verbosity config + indexFileExists <- doesFileExist indexFile + if indexFileExists + then notice verbosity $ "Using an existing sandbox located at " ++ sandboxDir + else notice verbosity $ "Creating a new sandbox at " ++ sandboxDir + Index.createEmpty verbosity indexFile + + -- Create the package DB for the default compiler. + initPackageDBIfNeeded verbosity configFlags comp progdb + maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + (compilerId comp) platform + +-- | Entry point for the 'cabal sandbox delete' command. +sandboxDelete :: Verbosity -> SandboxFlags -> GlobalFlags -> IO () +sandboxDelete verbosity _sandboxFlags globalFlags = do + (useSandbox, _) <- loadConfigOrSandboxConfig + verbosity + globalFlags { globalRequireSandbox = Flag False } + case useSandbox of + NoSandbox -> warn verbosity "Not in a sandbox." + UseSandbox sandboxDir -> do + curDir <- getCurrentDirectory + pkgEnvFile <- getSandboxConfigFilePath globalFlags + + -- Remove the @cabal.sandbox.config@ file, unless it's in a non-standard + -- location. + let isNonDefaultConfigLocation = not $ equalFilePath pkgEnvFile $ + curDir sandboxPackageEnvironmentFile + + if isNonDefaultConfigLocation + then warn verbosity $ "Sandbox config file is in non-default location: '" + ++ pkgEnvFile ++ "'.\n Please delete manually." + else removeFile pkgEnvFile + + -- Remove the sandbox directory, unless we're using a shared sandbox. + let isNonDefaultSandboxLocation = not $ equalFilePath sandboxDir $ + curDir defaultSandboxLocation + + when isNonDefaultSandboxLocation $ + die' verbosity $ "Non-default sandbox location used: '" ++ sandboxDir + ++ "'.\nAssuming a shared sandbox. Please delete '" + ++ sandboxDir ++ "' manually." + + absSandboxDir <- canonicalizePath sandboxDir + notice verbosity $ "Deleting the sandbox located at " ++ absSandboxDir + removeDirectoryRecursive absSandboxDir + + let + pathInsideSandbox = isPrefixOf absSandboxDir + + -- Warn the user if deleting the sandbox deleted a package database + -- referenced in the current environment. + checkPackagePaths var = do + let + checkPath path = do + absPath <- canonicalizePath path + (when (pathInsideSandbox absPath) . warn verbosity) + (var ++ " refers to package database " ++ path + ++ " inside the deleted sandbox.") + liftM (maybe [] splitSearchPath) (lookupEnv var) >>= mapM_ checkPath + + checkPackagePaths "CABAL_SANDBOX_PACKAGE_PATH" + checkPackagePaths "GHC_PACKAGE_PATH" + checkPackagePaths "GHCJS_PACKAGE_PATH" + +-- Common implementation of 'sandboxAddSource' and 'sandboxAddSourceSnapshot'. +doAddSource :: Verbosity -> [FilePath] -> FilePath -> PackageEnvironment + -> BuildTreeRefType + -> IO () +doAddSource verbosity buildTreeRefs sandboxDir pkgEnv refType = do + let savedConfig = pkgEnvSavedConfig pkgEnv + indexFile <- tryGetIndexFilePath verbosity savedConfig + + -- If we're running 'sandbox add-source' for the first time for this compiler, + -- we need to create an initial timestamp record. + (comp, platform, _) <- configCompilerAuxEx . savedConfigureFlags $ savedConfig + maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + (compilerId comp) platform + + withAddTimestamps verbosity sandboxDir $ do + -- Path canonicalisation is done in addBuildTreeRefs, but we do it + -- twice because of the timestamps file. + buildTreeRefs' <- mapM tryCanonicalizePath buildTreeRefs + Index.addBuildTreeRefs verbosity indexFile buildTreeRefs' refType + return buildTreeRefs' + +-- | Entry point for the 'cabal sandbox add-source' command. +sandboxAddSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags + -> IO () +sandboxAddSource verbosity buildTreeRefs sandboxFlags globalFlags = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + + if fromFlagOrDefault False (sandboxSnapshot sandboxFlags) + then sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv + else doAddSource verbosity buildTreeRefs sandboxDir pkgEnv LinkRef + +-- | Entry point for the 'cabal sandbox add-source --snapshot' command. +sandboxAddSourceSnapshot :: Verbosity -> [FilePath] -> FilePath + -> PackageEnvironment + -> IO () +sandboxAddSourceSnapshot verbosity buildTreeRefs sandboxDir pkgEnv = do + let snapshotDir = sandboxDir snapshotDirectoryName + + -- Use 'D.S.SrcDist.prepareTree' to copy each package's files to our private + -- location. + createDirectoryIfMissingVerbose verbosity True snapshotDir + + -- Collect the package descriptions first, so that if some path does not refer + -- to a cabal package, we fail immediately. + pkgs <- forM buildTreeRefs $ \buildTreeRef -> + inDir (Just buildTreeRef) $ + return . flattenPackageDescription + =<< readGenericPackageDescription verbosity + =<< defaultPackageDesc verbosity + + -- Copy the package sources to "snapshots/$PKGNAME-$VERSION-tmp". If + -- 'prepareTree' throws an error at any point, the old snapshots will still be + -- in consistent state. + tmpDirs <- forM (zip buildTreeRefs pkgs) $ \(buildTreeRef, pkg) -> + inDir (Just buildTreeRef) $ do + let targetDir = snapshotDir (display . packageId $ pkg) + targetTmpDir = targetDir ++ "-tmp" + dirExists <- doesDirectoryExist targetTmpDir + when dirExists $ + removeDirectoryRecursive targetDir + createDirectory targetTmpDir + prepareTree verbosity pkg Nothing targetTmpDir knownSuffixHandlers + return (targetTmpDir, targetDir) + + -- Now rename the "snapshots/$PKGNAME-$VERSION-tmp" dirs to + -- "snapshots/$PKGNAME-$VERSION". + snapshots <- forM tmpDirs $ \(targetTmpDir, targetDir) -> do + dirExists <- doesDirectoryExist targetDir + when dirExists $ + removeDirectoryRecursive targetDir + renameDirectory targetTmpDir targetDir + return targetDir + + -- Once the packages are copied, just 'add-source' them as usual. + doAddSource verbosity snapshots sandboxDir pkgEnv SnapshotRef + +-- | Entry point for the 'cabal sandbox delete-source' command. +sandboxDeleteSource :: Verbosity -> [FilePath] -> SandboxFlags -> GlobalFlags + -> IO () +sandboxDeleteSource verbosity buildTreeRefs _sandboxFlags globalFlags = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + indexFile <- tryGetIndexFilePath verbosity (pkgEnvSavedConfig pkgEnv) + + (results, convDict) <- + Index.removeBuildTreeRefs verbosity indexFile buildTreeRefs + + let (failedPaths, removedPaths) = partitionEithers results + removedRefs = fmap convDict removedPaths + + unless (null removedPaths) $ do + removeTimestamps verbosity sandboxDir removedPaths + + notice verbosity $ "Success deleting sources: " ++ + showL removedRefs ++ "\n\n" + + unless (null failedPaths) $ do + let groupedFailures = groupBy errorType failedPaths + mapM_ handleErrors groupedFailures + die' verbosity $ "The sources with the above errors were skipped. (" ++ + showL (fmap getPath failedPaths) ++ ")" + + notice verbosity $ "Note: 'sandbox delete-source' only unregisters the " ++ + "source dependency, but does not remove the package " ++ + "from the sandbox package DB.\n\n" ++ + "Use 'sandbox hc-pkg -- unregister' to do that." + where + getPath (Index.ErrNonregisteredSource p) = p + getPath (Index.ErrNonexistentSource p) = p + + showPaths f = concat . intersperse " " . fmap (show . f) + + showL = showPaths id + + showE [] = return ' ' + showE errs = showPaths getPath errs + + errorType Index.ErrNonregisteredSource{} Index.ErrNonregisteredSource{} = + True + errorType Index.ErrNonexistentSource{} Index.ErrNonexistentSource{} = True + errorType _ _ = False + + handleErrors [] = return () + handleErrors errs@(Index.ErrNonregisteredSource{}:_) = + warn verbosity ("Sources not registered: " ++ showE errs ++ "\n\n") + handleErrors errs@(Index.ErrNonexistentSource{}:_) = + warn verbosity + ("Source directory not found for paths: " ++ showE errs ++ "\n" + ++ "If you are trying to delete a reference to a removed directory, " + ++ "please provide the full absolute path " + ++ "(as given by `sandbox list-sources`).\n\n") + +-- | Entry point for the 'cabal sandbox list-sources' command. +sandboxListSources :: Verbosity -> SandboxFlags -> GlobalFlags + -> IO () +sandboxListSources verbosity _sandboxFlags globalFlags = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + indexFile <- tryGetIndexFilePath verbosity (pkgEnvSavedConfig pkgEnv) + + refs <- Index.listBuildTreeRefs verbosity + Index.ListIgnored Index.LinksAndSnapshots indexFile + when (null refs) $ + notice verbosity $ "Index file '" ++ indexFile + ++ "' has no references to local build trees." + when (not . null $ refs) $ do + notice verbosity $ "Source dependencies registered " + ++ "in the current sandbox ('" ++ sandboxDir ++ "'):\n\n" + mapM_ putStrLn refs + notice verbosity $ "\nTo unregister source dependencies, " + ++ "use the 'sandbox delete-source' command." + +-- | Entry point for the 'cabal sandbox hc-pkg' command. Invokes the @hc-pkg@ +-- tool with provided arguments, restricted to the sandbox. +sandboxHcPkg :: Verbosity -> SandboxFlags -> GlobalFlags -> [String] -> IO () +sandboxHcPkg verbosity _sandboxFlags globalFlags extraArgs = do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + let configFlags = savedConfigureFlags . pkgEnvSavedConfig $ pkgEnv + -- Invoke hc-pkg for the most recently configured compiler (if any), + -- using the right package-db for the compiler (see #1935). + (comp, platform, progdb) <- getPersistOrConfigCompiler configFlags + let dir = sandboxPackageDBPath sandboxDir comp platform + dbStack = [GlobalPackageDB, SpecificPackageDB dir] + Register.invokeHcPkg verbosity comp progdb dbStack extraArgs + +updateInstallDirs :: Flag Bool + -> (UseSandbox, SavedConfig) -> (UseSandbox, SavedConfig) +updateInstallDirs userInstallFlag (useSandbox, savedConfig) = + case useSandbox of + NoSandbox -> + let savedConfig' = savedConfig { + savedConfigureFlags = configureFlags { + configInstallDirs = installDirs + } + } + in (useSandbox, savedConfig') + _ -> (useSandbox, savedConfig) + where + configureFlags = savedConfigureFlags savedConfig + userInstallDirs = savedUserInstallDirs savedConfig + globalInstallDirs = savedGlobalInstallDirs savedConfig + installDirs | userInstall = userInstallDirs + | otherwise = globalInstallDirs + userInstall = fromFlagOrDefault defaultUserInstall + (configUserInstall configureFlags `mappend` userInstallFlag) + +-- | Check which type of package environment we're in and return a +-- correctly-initialised @SavedConfig@ and a @UseSandbox@ value that indicates +-- whether we're working in a sandbox. +loadConfigOrSandboxConfig :: Verbosity + -> GlobalFlags -- ^ For @--config-file@ and + -- @--sandbox-config-file@. + -> IO (UseSandbox, SavedConfig) +loadConfigOrSandboxConfig verbosity globalFlags = do + let configFileFlag = globalConfigFile globalFlags + sandboxConfigFileFlag = globalSandboxConfigFile globalFlags + ignoreSandboxFlag = globalIgnoreSandbox globalFlags + + pkgEnvDir <- getPkgEnvDir sandboxConfigFileFlag + pkgEnvType <- classifyPackageEnvironment pkgEnvDir sandboxConfigFileFlag + ignoreSandboxFlag + case pkgEnvType of + -- A @cabal.sandbox.config@ file (and possibly @cabal.config@) is present. + SandboxPackageEnvironment -> do + (sandboxDir, pkgEnv) <- tryLoadSandboxConfig verbosity globalFlags + -- Prints an error message and exits on error. + let config = pkgEnvSavedConfig pkgEnv + return (UseSandbox sandboxDir, config) + + -- Only @cabal.config@ is present. + UserPackageEnvironment -> do + config <- loadConfig verbosity configFileFlag + userConfig <- loadUserConfig verbosity pkgEnvDir Nothing + let config' = config `mappend` userConfig + dieIfSandboxRequired config' + return (NoSandbox, config') + + -- Neither @cabal.sandbox.config@ nor @cabal.config@ are present. + AmbientPackageEnvironment -> do + config <- loadConfig verbosity configFileFlag + let globalConstraintsOpt = + flagToMaybe . globalConstraintsFile . savedGlobalFlags $ config + globalConstraintConfig <- + loadUserConfig verbosity pkgEnvDir globalConstraintsOpt + let config' = config `mappend` globalConstraintConfig + dieIfSandboxRequired config + return (NoSandbox, config') + + where + -- Return the path to the package environment directory - either the + -- current directory or the one that @--sandbox-config-file@ resides in. + getPkgEnvDir :: (Flag FilePath) -> IO FilePath + getPkgEnvDir sandboxConfigFileFlag = do + case sandboxConfigFileFlag of + NoFlag -> getCurrentDirectory + Flag path -> tryCanonicalizePath . takeDirectory $ path + + -- Die if @--require-sandbox@ was specified and we're not inside a sandbox. + dieIfSandboxRequired :: SavedConfig -> IO () + dieIfSandboxRequired config = checkFlag flag + where + flag = (globalRequireSandbox . savedGlobalFlags $ config) + `mappend` (globalRequireSandbox globalFlags) + checkFlag (Flag True) = + die' verbosity $ "'require-sandbox' is set to True, but no sandbox is present. " + ++ "Use '--no-require-sandbox' if you want to override " + ++ "'require-sandbox' temporarily." + checkFlag (Flag False) = return () + checkFlag (NoFlag) = return () + +-- | Return the saved \"dist/\" prefix, or the default prefix. +findSavedDistPref :: SavedConfig -> Flag FilePath -> IO FilePath +findSavedDistPref config flagDistPref = do + let defDistPref = useDistPref defaultSetupScriptOptions + flagDistPref' = configDistPref (savedConfigureFlags config) + `mappend` flagDistPref + findDistPref defDistPref flagDistPref' + +-- | If we're in a sandbox, call @withSandboxBinDirOnSearchPath@, otherwise do +-- nothing. +maybeWithSandboxDirOnSearchPath :: UseSandbox -> IO a -> IO a +maybeWithSandboxDirOnSearchPath NoSandbox act = act +maybeWithSandboxDirOnSearchPath (UseSandbox sandboxDir) act = + withSandboxBinDirOnSearchPath sandboxDir $ act + +-- | Had reinstallAddSourceDeps actually reinstalled any dependencies? +data WereDepsReinstalled = ReinstalledSomeDeps | NoDepsReinstalled + +-- | Reinstall those add-source dependencies that have been modified since +-- we've last installed them. Assumes that we're working inside a sandbox. +reinstallAddSourceDeps :: Verbosity + -> ConfigFlags -> ConfigExFlags + -> InstallFlags -> GlobalFlags + -> FilePath + -> IO WereDepsReinstalled +reinstallAddSourceDeps verbosity configFlags' configExFlags + installFlags globalFlags sandboxDir = topHandler' $ do + let sandboxDistPref = sandboxBuildDir sandboxDir + configFlags = configFlags' + { configDistPref = Flag sandboxDistPref } + haddockFlags = mempty + { haddockDistPref = Flag sandboxDistPref } + (comp, platform, progdb) <- configCompilerAux' configFlags + retVal <- newIORef NoDepsReinstalled + + withSandboxPackageInfo verbosity configFlags globalFlags + comp platform progdb sandboxDir $ \sandboxPkgInfo -> + unless (null $ modifiedAddSourceDependencies sandboxPkgInfo) $ do + + withRepoContext verbosity globalFlags $ \repoContext -> do + let args :: InstallArgs + args = ((configPackageDB' configFlags) + ,repoContext + ,comp, platform, progdb + ,UseSandbox sandboxDir, Just sandboxPkgInfo + ,globalFlags, configFlags, configExFlags, installFlags + ,haddockFlags) + + -- This can actually be replaced by a call to 'install', but we use a + -- lower-level API because of layer separation reasons. Additionally, we + -- might want to use some lower-level features this in the future. + withSandboxBinDirOnSearchPath sandboxDir $ do + installContext <- makeInstallContext verbosity args Nothing + installPlan <- foldProgress logMsg die'' return =<< + makeInstallPlan verbosity args installContext + + processInstallPlan verbosity args installContext installPlan + writeIORef retVal ReinstalledSomeDeps + + readIORef retVal + + where + die'' message = die' verbosity (message ++ installFailedInSandbox) + -- TODO: use a better error message, remove duplication. + installFailedInSandbox = + "Note: when using a sandbox, all packages are required to have " + ++ "consistent dependencies. Try reinstalling/unregistering the " + ++ "offending packages or recreating the sandbox." + logMsg message rest = debugNoWrap verbosity message >> rest + + topHandler' = topHandlerWith $ \_ -> do + warn verbosity "Couldn't reinstall some add-source dependencies." + -- Here we can't know whether any deps have been reinstalled, so we have + -- to be conservative. + return ReinstalledSomeDeps + +-- | Produce a 'SandboxPackageInfo' and feed it to the given action. Note that +-- we don't update the timestamp file here - this is done in +-- 'postInstallActions'. +withSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags + -> Compiler -> Platform -> ProgramDb + -> FilePath + -> (SandboxPackageInfo -> IO ()) + -> IO () +withSandboxPackageInfo verbosity configFlags globalFlags + comp platform progdb sandboxDir cont = do + -- List all add-source deps. + indexFile <- tryGetIndexFilePath' verbosity globalFlags + buildTreeRefs <- Index.listBuildTreeRefs verbosity + Index.DontListIgnored Index.OnlyLinks indexFile + let allAddSourceDepsSet = S.fromList buildTreeRefs + + -- List all packages installed in the sandbox. + installedPkgIndex <- getInstalledPackagesInSandbox verbosity + configFlags comp progdb + let err = "Error reading sandbox package information." + -- Get the package descriptions for all add-source deps. + depsCabalFiles <- mapM (flip (tryFindAddSourcePackageDesc verbosity) err) buildTreeRefs + depsPkgDescs <- mapM (readGenericPackageDescription verbosity) depsCabalFiles + let depsMap = M.fromList (zip buildTreeRefs depsPkgDescs) + isInstalled pkgid = not . null + . InstalledPackageIndex.lookupSourcePackageId installedPkgIndex $ pkgid + installedDepsMap = M.filter (isInstalled . packageId) depsMap + + -- Get the package ids of modified (and installed) add-source deps. + modifiedAddSourceDeps <- listModifiedDeps verbosity sandboxDir + (compilerId comp) platform installedDepsMap + -- 'fromJust' here is safe because 'modifiedAddSourceDeps' are guaranteed to + -- be a subset of the keys of 'depsMap'. + let modifiedDeps = [ (modDepPath, fromJust $ M.lookup modDepPath depsMap) + | modDepPath <- modifiedAddSourceDeps ] + modifiedDepsMap = M.fromList modifiedDeps + + assert (all (`S.member` allAddSourceDepsSet) modifiedAddSourceDeps) (return ()) + if (null modifiedDeps) + then info verbosity $ "Found no modified add-source deps." + else notice verbosity $ "Some add-source dependencies have been modified. " + ++ "They will be reinstalled..." + + -- Get the package ids of the remaining add-source deps (some are possibly not + -- installed). + let otherDeps = M.assocs (depsMap `M.difference` modifiedDepsMap) + + -- Finally, assemble a 'SandboxPackageInfo'. + cont $ SandboxPackageInfo (map toSourcePackage modifiedDeps) + (map toSourcePackage otherDeps) installedPkgIndex allAddSourceDepsSet + + where + toSourcePackage (path, pkgDesc) = SourcePackage + (packageId pkgDesc) pkgDesc (LocalUnpackedPackage path) Nothing + +-- | Same as 'withSandboxPackageInfo' if we're inside a sandbox and the +-- identity otherwise. +maybeWithSandboxPackageInfo :: Verbosity -> ConfigFlags -> GlobalFlags + -> Compiler -> Platform -> ProgramDb + -> UseSandbox + -> (Maybe SandboxPackageInfo -> IO ()) + -> IO () +maybeWithSandboxPackageInfo verbosity configFlags globalFlags + comp platform progdb useSandbox cont = + case useSandbox of + NoSandbox -> cont Nothing + UseSandbox sandboxDir -> withSandboxPackageInfo verbosity + configFlags globalFlags + comp platform progdb sandboxDir + (\spi -> cont (Just spi)) + +-- | Check if a sandbox is present and call @reinstallAddSourceDeps@ in that +-- case. +maybeReinstallAddSourceDeps :: Verbosity + -> Flag (Maybe Int) -- ^ The '-j' flag + -> ConfigFlags -- ^ Saved configure flags + -- (from dist/setup-config) + -> GlobalFlags + -> (UseSandbox, SavedConfig) + -> IO WereDepsReinstalled +maybeReinstallAddSourceDeps verbosity numJobsFlag configFlags' + globalFlags' (useSandbox, config) = do + case useSandbox of + NoSandbox -> return NoDepsReinstalled + UseSandbox sandboxDir -> do + -- Reinstall the modified add-source deps. + let configFlags = savedConfigureFlags config + `mappendSomeSavedFlags` + configFlags' + configExFlags = defaultConfigExFlags + `mappend` savedConfigureExFlags config + installFlags' = defaultInstallFlags + `mappend` savedInstallFlags config + installFlags = installFlags' { + installNumJobs = installNumJobs installFlags' + `mappend` numJobsFlag + } + globalFlags = savedGlobalFlags config + -- This makes it possible to override things like 'remote-repo-cache' + -- from the command line. These options are hidden, and are only + -- useful for debugging, so this should be fine. + `mappend` globalFlags' + reinstallAddSourceDeps + verbosity configFlags configExFlags + installFlags globalFlags sandboxDir + + where + + -- NOTE: we can't simply do @sandboxConfigFlags `mappend` savedFlags@ + -- because we don't want to auto-enable things like 'library-profiling' for + -- all add-source dependencies even if the user has passed + -- '--enable-library-profiling' to 'cabal configure'. These options are + -- supposed to be set in 'cabal.config'. + mappendSomeSavedFlags :: ConfigFlags -> ConfigFlags -> ConfigFlags + mappendSomeSavedFlags sandboxConfigFlags savedFlags = + sandboxConfigFlags { + configHcFlavor = configHcFlavor sandboxConfigFlags + `mappend` configHcFlavor savedFlags, + configHcPath = configHcPath sandboxConfigFlags + `mappend` configHcPath savedFlags, + configHcPkg = configHcPkg sandboxConfigFlags + `mappend` configHcPkg savedFlags, + configProgramPaths = configProgramPaths sandboxConfigFlags + `mappend` configProgramPaths savedFlags, + configProgramArgs = configProgramArgs sandboxConfigFlags + `mappend` configProgramArgs savedFlags, + -- NOTE: Unconditionally choosing the value from + -- 'dist/setup-config'. Sandbox package DB location may have been + -- changed by 'configure -w'. + configPackageDBs = configPackageDBs savedFlags + -- FIXME: Is this compatible with the 'inherit' feature? + } + +-- +-- Utils (transitionary) +-- + +-- | Try to read the most recently configured compiler from the +-- 'localBuildInfoFile', falling back on 'configCompilerAuxEx' if it +-- cannot be read. +getPersistOrConfigCompiler :: ConfigFlags + -> IO (Compiler, Platform, ProgramDb) +getPersistOrConfigCompiler configFlags = do + distPref <- findDistPrefOrDefault (configDistPref configFlags) + mlbi <- maybeGetPersistBuildConfig distPref + case mlbi of + Nothing -> do configCompilerAux' configFlags + Just lbi -> return ( LocalBuildInfo.compiler lbi + , LocalBuildInfo.hostPlatform lbi + , LocalBuildInfo.withPrograms lbi + ) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SavedFlags.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SavedFlags.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SavedFlags.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SavedFlags.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,83 @@ +{-# LANGUAGE DeriveDataTypeable #-} + +module Distribution.Client.SavedFlags + ( readCommandFlags, writeCommandFlags + , readSavedArgs, writeSavedArgs + ) where + +import Distribution.Simple.Command +import Distribution.Simple.UserHooks ( Args ) +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose, unintersperse ) +import Distribution.Verbosity + +import Control.Exception ( Exception, throwIO ) +import Control.Monad ( liftM ) +import Data.List ( intercalate ) +import Data.Maybe ( fromMaybe ) +import Data.Typeable +import System.Directory ( doesFileExist ) +import System.FilePath ( takeDirectory ) + + +writeSavedArgs :: Verbosity -> FilePath -> [String] -> IO () +writeSavedArgs verbosity path args = do + createDirectoryIfMissingVerbose + (lessVerbose verbosity) True (takeDirectory path) + writeFile path (intercalate "\0" args) + + +-- | Write command-line flags to a file, separated by null characters. This +-- format is also suitable for the @xargs -0@ command. Using the null +-- character also avoids the problem of escaping newlines or spaces, +-- because unlike other whitespace characters, the null character is +-- not valid in command-line arguments. +writeCommandFlags :: Verbosity -> FilePath -> CommandUI flags -> flags -> IO () +writeCommandFlags verbosity path command flags = + writeSavedArgs verbosity path (commandShowOptions command flags) + + +readSavedArgs :: FilePath -> IO (Maybe [String]) +readSavedArgs path = do + exists <- doesFileExist path + if exists + then liftM (Just . unintersperse '\0') (readFile path) + else return Nothing + + +-- | Read command-line arguments, separated by null characters, from a file. +-- Returns the default flags if the file does not exist. +readCommandFlags :: FilePath -> CommandUI flags -> IO flags +readCommandFlags path command = do + savedArgs <- liftM (fromMaybe []) (readSavedArgs path) + case (commandParseArgs command True savedArgs) of + CommandHelp _ -> throwIO (SavedArgsErrorHelp savedArgs) + CommandList _ -> throwIO (SavedArgsErrorList savedArgs) + CommandErrors errs -> throwIO (SavedArgsErrorOther savedArgs errs) + CommandReadyToGo (mkFlags, _) -> + return (mkFlags (commandDefaultFlags command)) + +-- ----------------------------------------------------------------------------- +-- * Exceptions +-- ----------------------------------------------------------------------------- + +data SavedArgsError + = SavedArgsErrorHelp Args + | SavedArgsErrorList Args + | SavedArgsErrorOther Args [String] + deriving (Typeable) + +instance Show SavedArgsError where + show (SavedArgsErrorHelp args) = + "unexpected flag '--help', saved command line was:\n" + ++ intercalate " " args + show (SavedArgsErrorList args) = + "unexpected flag '--list-options', saved command line was:\n" + ++ intercalate " " args + show (SavedArgsErrorOther args errs) = + "saved command line was:\n" + ++ intercalate " " args ++ "\n" + ++ "encountered errors:\n" + ++ intercalate "\n" errs + +instance Exception SavedArgsError diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Security/DNS.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Security/DNS.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Security/DNS.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Security/DNS.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,196 @@ +{-# LANGUAGE CPP #-} + +module Distribution.Client.Security.DNS + ( queryBootstrapMirrors + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude +import Network.URI (URI(..), URIAuth(..), parseURI) +import Distribution.Verbosity +import Control.Monad +import Control.DeepSeq (force) +import Control.Exception (SomeException, evaluate, try) +import Distribution.Simple.Utils +import Distribution.Compat.Exception (displayException) + +#if defined(MIN_VERSION_resolv) || defined(MIN_VERSION_windns) +import Network.DNS (queryTXT, Name(..), CharStr(..)) +import qualified Data.ByteString.Char8 as BS.Char8 +#else +import Distribution.Simple.Program.Db + ( emptyProgramDb, addKnownProgram + , configureAllKnownPrograms, lookupProgram ) +import Distribution.Simple.Program + ( simpleProgram + , programInvocation + , getProgramInvocationOutput ) +#endif + +-- | Try to lookup RFC1464-encoded mirror urls for a Hackage +-- repository url by performing a DNS TXT lookup on the +-- @_mirrors.@-prefixed URL hostname. +-- +-- Example: for @http://hackage.haskell.org/@ +-- perform a DNS TXT query for the hostname +-- @_mirrors.hackage.haskell.org@ which may look like e.g. +-- +-- > _mirrors.hackage.haskell.org. 300 IN TXT +-- > "0.urlbase=http://hackage.fpcomplete.com/" +-- > "1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/" +-- +-- NB: hackage-security doesn't require DNS lookups being trustworthy, +-- as the trust is established via the cryptographically signed TUF +-- meta-data that is retrieved from the resolved Hackage repository. +-- Moreover, we already have to protect against a compromised +-- @hackage.haskell.org@ DNS entry, so an the additional +-- @_mirrors.hackage.haskell.org@ DNS entry in the same SOA doesn't +-- constitute a significant new attack vector anyway. +-- +queryBootstrapMirrors :: Verbosity -> URI -> IO [URI] + +#if defined(MIN_VERSION_resolv) || defined(MIN_VERSION_windns) +-- use @resolv@ package for performing DNS queries +queryBootstrapMirrors verbosity repoUri + | Just auth <- uriAuthority repoUri = do + let mirrorsDnsName = Name (BS.Char8.pack ("_mirrors." ++ uriRegName auth)) + + mirrors' <- try $ do + txts <- queryTXT mirrorsDnsName + evaluate (force $ extractMirrors (map snd txts)) + + mirrors <- case mirrors' of + Left e -> do + warn verbosity ("Caught exception during _mirrors lookup:"++ + displayException (e :: SomeException)) + return [] + Right v -> return v + + if null mirrors + then warn verbosity ("No mirrors found for " ++ show repoUri) + else do info verbosity ("located " ++ show (length mirrors) ++ + " mirrors for " ++ show repoUri ++ " :") + forM_ mirrors $ \url -> info verbosity ("- " ++ show url) + + return mirrors + + | otherwise = return [] + +-- | Extract list of mirrors from 'queryTXT' result +extractMirrors :: [[CharStr]] -> [URI] +extractMirrors txtChunks = mapMaybe (parseURI . snd) . sort $ vals + where + vals = [ (kn,v) | CharStr e <- concat txtChunks + , Just (k,v) <- [splitRfc1464 (BS.Char8.unpack e)] + , Just kn <- [isUrlBase k] + ] + +---------------------------------------------------------------------------- +#else /* !defined(MIN_VERSION_resolv) */ +-- use external method via @nslookup@ +queryBootstrapMirrors verbosity repoUri + | Just auth <- uriAuthority repoUri = do + progdb <- configureAllKnownPrograms verbosity $ + addKnownProgram nslookupProg emptyProgramDb + + case lookupProgram nslookupProg progdb of + Nothing -> do + warn verbosity "'nslookup' tool missing - can't locate mirrors" + return [] + + Just nslookup -> do + let mirrorsDnsName = "_mirrors." ++ uriRegName auth + + mirrors' <- try $ do + out <- getProgramInvocationOutput verbosity $ + programInvocation nslookup ["-query=TXT", mirrorsDnsName] + evaluate (force $ extractMirrors mirrorsDnsName out) + + mirrors <- case mirrors' of + Left e -> do + warn verbosity ("Caught exception during _mirrors lookup:"++ + displayException (e :: SomeException)) + return [] + Right v -> return v + + if null mirrors + then warn verbosity ("No mirrors found for " ++ show repoUri) + else do info verbosity ("located " ++ show (length mirrors) ++ + " mirrors for " ++ show repoUri ++ " :") + forM_ mirrors $ \url -> info verbosity ("- " ++ show url) + + return mirrors + + | otherwise = return [] + where + nslookupProg = simpleProgram "nslookup" + +-- | Extract list of mirrors from @nslookup -query=TXT@ output. +extractMirrors :: String -> String -> [URI] +extractMirrors hostname s0 = mapMaybe (parseURI . snd) . sort $ vals + where + vals = [ (kn,v) | (h,ents) <- fromMaybe [] $ parseNsLookupTxt s0 + , h == hostname + , e <- ents + , Just (k,v) <- [splitRfc1464 e] + , Just kn <- [isUrlBase k] + ] + +-- | Parse output of @nslookup -query=TXT $HOSTNAME@ tolerantly +parseNsLookupTxt :: String -> Maybe [(String,[String])] +parseNsLookupTxt = go0 [] [] + where + -- approximate grammar: + -- := { } + -- ( starts at begin of line, but may span multiple lines) + -- := ^ TAB "text =" { } + -- := string enclosed by '"'s ('\' and '"' are \-escaped) + + -- scan for ^ "text =" + go0 [] _ [] = Nothing + go0 res _ [] = Just (reverse res) + go0 res _ ('\n':xs) = go0 res [] xs + go0 res lw ('\t':'t':'e':'x':'t':' ':'=':xs) = go1 res (reverse lw) [] (dropWhile isSpace xs) + go0 res lw (x:xs) = go0 res (x:lw) xs + + -- collect at least one + go1 res lw qs ('"':xs) = case qstr "" xs of + Just (s, xs') -> go1 res lw (s:qs) (dropWhile isSpace xs') + Nothing -> Nothing -- bad quoting + go1 _ _ [] _ = Nothing -- missing qstring + go1 res lw qs xs = go0 ((lw,reverse qs):res) [] xs + + qstr _ ('\n':_) = Nothing -- We don't support unquoted LFs + qstr acc ('\\':'\\':cs) = qstr ('\\':acc) cs + qstr acc ('\\':'"':cs) = qstr ('"':acc) cs + qstr acc ('"':cs) = Just (reverse acc, cs) + qstr acc (c:cs) = qstr (c:acc) cs + qstr _ [] = Nothing + +#endif +---------------------------------------------------------------------------- + +-- | Helper used by 'extractMirrors' for extracting @urlbase@ keys from Rfc1464-encoded data +isUrlBase :: String -> Maybe Int +isUrlBase s + | ".urlbase" `isSuffixOf` s, not (null ns), all isDigit ns = readMaybe ns + | otherwise = Nothing + where + ns = take (length s - 8) s + +-- | Split a TXT string into key and value according to RFC1464. +-- Returns 'Nothing' if parsing fails. +splitRfc1464 :: String -> Maybe (String,String) +splitRfc1464 = go "" + where + go _ [] = Nothing + go acc ('`':c:cs) = go (c:acc) cs + go acc ('=':cs) = go2 (reverse acc) "" cs + go acc (c:cs) + | isSpace c = go acc cs + | otherwise = go (c:acc) cs + + go2 k acc [] = Just (k,reverse acc) + go2 _ _ ['`'] = Nothing + go2 k acc ('`':c:cs) = go2 k (c:acc) cs + go2 k acc (c:cs) = go2 k (c:acc) cs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Security/HTTP.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Security/HTTP.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Security/HTTP.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Security/HTTP.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,174 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE StandaloneDeriving #-} +-- | Implementation of 'HttpLib' using cabal-install's own 'HttpTransport' +module Distribution.Client.Security.HTTP (HttpLib, transportAdapter) where + +-- stdlibs +import Control.Exception + ( Exception(..), IOException ) +import Data.List + ( intercalate ) +import Data.Typeable + ( Typeable ) +import System.Directory + ( getTemporaryDirectory ) +import Network.URI + ( URI ) +import qualified Data.ByteString.Lazy as BS.L +import qualified Network.HTTP as HTTP + +-- Cabal/cabal-install +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Client.HttpUtils + ( HttpTransport(..), HttpCode ) +import Distribution.Client.Utils + ( withTempFileName ) + +-- hackage-security +import Hackage.Security.Client +import Hackage.Security.Client.Repository.HttpLib +import Hackage.Security.Util.Checked +import Hackage.Security.Util.Pretty +import qualified Hackage.Security.Util.Lens as Lens + +{------------------------------------------------------------------------------- + 'HttpLib' implementation +-------------------------------------------------------------------------------} + +-- | Translate from hackage-security's 'HttpLib' to cabal-install's 'HttpTransport' +-- +-- NOTE: The match between these two APIs is currently not perfect: +-- +-- * We don't get any response headers back from the 'HttpTransport', so we +-- don't know if the server supports range requests. For now we optimistically +-- assume that it does. +-- * The 'HttpTransport' wants to know where to place the resulting file, +-- whereas the 'HttpLib' expects an 'IO' action which streams the download; +-- the security library then makes sure that the file gets written to a +-- location which is suitable (in particular, to a temporary file in the +-- directory where the file needs to end up, so that it can "finalize" the +-- file simply by doing 'renameFile'). Right now we write the file to a +-- temporary file in the system temp directory here and then read it again +-- to pass it to the security library; this is a problem for two reasons: it +-- is a source of inefficiency; and it means that the security library cannot +-- insist on a minimum download rate (potential security attack). +-- Fixing it however would require changing the 'HttpTransport'. +transportAdapter :: Verbosity -> IO HttpTransport -> HttpLib +transportAdapter verbosity getTransport = HttpLib{ + httpGet = \headers uri callback -> do + transport <- getTransport + get verbosity transport headers uri callback + , httpGetRange = \headers uri range callback -> do + transport <- getTransport + getRange verbosity transport headers uri range callback + } + +get :: Throws SomeRemoteError + => Verbosity + -> HttpTransport + -> [HttpRequestHeader] -> URI + -> ([HttpResponseHeader] -> BodyReader -> IO a) + -> IO a +get verbosity transport reqHeaders uri callback = wrapCustomEx $ do + get' verbosity transport reqHeaders uri Nothing $ \code respHeaders br -> + case code of + 200 -> callback respHeaders br + _ -> throwChecked $ UnexpectedResponse uri code + +getRange :: Throws SomeRemoteError + => Verbosity + -> HttpTransport + -> [HttpRequestHeader] -> URI -> (Int, Int) + -> (HttpStatus -> [HttpResponseHeader] -> BodyReader -> IO a) + -> IO a +getRange verbosity transport reqHeaders uri range callback = wrapCustomEx $ do + get' verbosity transport reqHeaders uri (Just range) $ \code respHeaders br -> + case code of + 200 -> callback HttpStatus200OK respHeaders br + 206 -> callback HttpStatus206PartialContent respHeaders br + _ -> throwChecked $ UnexpectedResponse uri code + +-- | Internal generalization of 'get' and 'getRange' +get' :: Verbosity + -> HttpTransport + -> [HttpRequestHeader] -> URI -> Maybe (Int, Int) + -> (HttpCode -> [HttpResponseHeader] -> BodyReader -> IO a) + -> IO a +get' verbosity transport reqHeaders uri mRange callback = do + tempDir <- getTemporaryDirectory + withTempFileName tempDir "transportAdapterGet" $ \temp -> do + (code, _etag) <- getHttp transport verbosity uri Nothing temp reqHeaders' + br <- bodyReaderFromBS =<< BS.L.readFile temp + callback code [HttpResponseAcceptRangesBytes] br + where + reqHeaders' = mkReqHeaders reqHeaders mRange + +{------------------------------------------------------------------------------- + Request headers +-------------------------------------------------------------------------------} + +mkRangeHeader :: Int -> Int -> HTTP.Header +mkRangeHeader from to = HTTP.Header HTTP.HdrRange rangeHeader + where + -- Content-Range header uses inclusive rather than exclusive bounds + -- See + rangeHeader = "bytes=" ++ show from ++ "-" ++ show (to - 1) + +mkReqHeaders :: [HttpRequestHeader] -> Maybe (Int, Int) -> [HTTP.Header] +mkReqHeaders reqHeaders mRange = concat [ + tr [] reqHeaders + , [mkRangeHeader fr to | Just (fr, to) <- [mRange]] + ] + where + tr :: [(HTTP.HeaderName, [String])] -> [HttpRequestHeader] -> [HTTP.Header] + tr acc [] = + concatMap finalize acc + tr acc (HttpRequestMaxAge0:os) = + tr (insert HTTP.HdrCacheControl ["max-age=0"] acc) os + tr acc (HttpRequestNoTransform:os) = + tr (insert HTTP.HdrCacheControl ["no-transform"] acc) os + + -- Some headers are comma-separated, others need multiple headers for + -- multiple options. + -- + -- TODO: Right we we just comma-separate all of them. + finalize :: (HTTP.HeaderName, [String]) -> [HTTP.Header] + finalize (name, strs) = [HTTP.Header name (intercalate ", " (reverse strs))] + + insert :: Eq a => a -> [b] -> [(a, [b])] -> [(a, [b])] + insert x y = Lens.modify (Lens.lookupM x) (++ y) + +{------------------------------------------------------------------------------- + Custom exceptions +-------------------------------------------------------------------------------} + +data UnexpectedResponse = UnexpectedResponse URI Int + deriving (Typeable) + +instance Pretty UnexpectedResponse where + pretty (UnexpectedResponse uri code) = "Unexpected response " ++ show code + ++ "for " ++ show uri + +#if MIN_VERSION_base(4,8,0) +deriving instance Show UnexpectedResponse +instance Exception UnexpectedResponse where displayException = pretty +#else +instance Show UnexpectedResponse where show = pretty +instance Exception UnexpectedResponse +#endif + +wrapCustomEx :: ( ( Throws UnexpectedResponse + , Throws IOException + ) => IO a) + -> (Throws SomeRemoteError => IO a) +wrapCustomEx act = handleChecked (\(ex :: UnexpectedResponse) -> go ex) + $ handleChecked (\(ex :: IOException) -> go ex) + $ act + where + go ex = throwChecked (SomeRemoteError ex) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Setup.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,2883 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE LambdaCase #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Setup +-- Copyright : (c) David Himmelstrup 2005 +-- License : BSD-like +-- +-- Maintainer : lemmih@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- +----------------------------------------------------------------------------- +module Distribution.Client.Setup + ( globalCommand, GlobalFlags(..), defaultGlobalFlags + , RepoContext(..), withRepoContext + , configureCommand, ConfigFlags(..), filterConfigureFlags + , configPackageDB', configCompilerAux' + , configureExCommand, ConfigExFlags(..), defaultConfigExFlags + , buildCommand, BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) + , replCommand, testCommand, benchmarkCommand + , configureExOptions, reconfigureCommand + , installCommand, InstallFlags(..), installOptions, defaultInstallFlags + , filterHaddockArgs, filterHaddockFlags + , defaultSolver, defaultMaxBackjumps + , listCommand, ListFlags(..) + , updateCommand, UpdateFlags(..), defaultUpdateFlags + , upgradeCommand + , uninstallCommand + , infoCommand, InfoFlags(..) + , fetchCommand, FetchFlags(..) + , freezeCommand, FreezeFlags(..) + , genBoundsCommand + , outdatedCommand, OutdatedFlags(..), IgnoreMajorVersionBumps(..) + , getCommand, unpackCommand, GetFlags(..) + , checkCommand + , formatCommand + , uploadCommand, UploadFlags(..), IsCandidate(..) + , reportCommand, ReportFlags(..) + , runCommand + , initCommand, IT.InitFlags(..) + , sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) + , win32SelfUpgradeCommand, Win32SelfUpgradeFlags(..) + , actAsSetupCommand, ActAsSetupFlags(..) + , sandboxCommand, defaultSandboxLocation, SandboxFlags(..) + , execCommand, ExecFlags(..), defaultExecFlags + , userConfigCommand, UserConfigFlags(..) + , manpageCommand + , haddockCommand + , cleanCommand + , doctestCommand + , copyCommand + , registerCommand + + , parsePackageArgs + , liftOptions + , yesNoOpt + --TODO: stop exporting these: + , showRepo + , parseRepo + , readRepo + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude hiding (get) + +import Distribution.Client.Types + ( Username(..), Password(..), RemoteRepo(..) + , AllowNewer(..), AllowOlder(..), RelaxDeps(..) + , WriteGhcEnvironmentFilesPolicy(..) + ) +import Distribution.Client.BuildReports.Types + ( ReportLevel(..) ) +import Distribution.Client.Dependency.Types + ( PreSolver(..) ) +import Distribution.Client.IndexUtils.Timestamp + ( IndexState(..) ) +import qualified Distribution.Client.Init.Types as IT + ( InitFlags(..), PackageType(..) ) +import Distribution.Client.Targets + ( UserConstraint, readUserConstraint ) +import Distribution.Utils.NubList + ( NubList, toNubList, fromNubList) + +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.Settings + +import Distribution.Simple.Compiler ( Compiler, PackageDB, PackageDBStack ) +import Distribution.Simple.Program (ProgramDb, defaultProgramDb) +import Distribution.Simple.Command hiding (boolOpt, boolOpt') +import qualified Distribution.Simple.Command as Command +import Distribution.Simple.Configure + ( configCompilerAuxEx, interpretPackageDbFlags, computeEffectiveProfiling ) +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Setup + ( ConfigFlags(..), BuildFlags(..), ReplFlags + , TestFlags(..), BenchmarkFlags(..) + , SDistFlags(..), HaddockFlags(..) + , CleanFlags(..), DoctestFlags(..) + , CopyFlags(..), RegisterFlags(..) + , readPackageDbList, showPackageDbList + , Flag(..), toFlag, flagToMaybe, flagToList, maybeToFlag + , BooleanFlag(..), optionVerbosity + , boolOpt, boolOpt', trueArg, falseArg + , optionNumJobs ) +import Distribution.Simple.InstallDirs + ( PathTemplate, InstallDirs(..) + , toPathTemplate, fromPathTemplate, combinePathTemplate ) +import Distribution.Version + ( Version, mkVersion, nullVersion, anyVersion, thisVersion ) +import Distribution.Package + ( PackageIdentifier, PackageName, packageName, packageVersion ) +import Distribution.Types.Dependency +import Distribution.PackageDescription + ( BuildType(..), RepoKind(..) ) +import Distribution.System ( Platform ) +import Distribution.Text + ( Text(..), display ) +import Distribution.ReadE + ( ReadE(..), readP_to_E, succeedReadE ) +import qualified Distribution.Compat.ReadP as Parse + ( ReadP, char, munch1, pfail, sepBy1, (+++) ) +import Distribution.ParseUtils + ( readPToMaybe ) +import Distribution.Verbosity + ( Verbosity, lessVerbose, normal, verboseNoFlags, verboseNoTimestamp ) +import Distribution.Simple.Utils + ( wrapText, wrapLine ) +import Distribution.Client.GlobalFlags + ( GlobalFlags(..), defaultGlobalFlags + , RepoContext(..), withRepoContext + ) + +import Data.List + ( deleteFirstsBy ) +import System.FilePath + ( () ) +import Network.URI + ( parseAbsoluteURI, uriToString ) + +globalCommand :: [Command action] -> CommandUI GlobalFlags +globalCommand commands = CommandUI { + commandName = "", + commandSynopsis = + "Command line interface to the Haskell Cabal infrastructure.", + commandUsage = \pname -> + "See http://www.haskell.org/cabal/ for more information.\n" + ++ "\n" + ++ "Usage: " ++ pname ++ " [GLOBAL FLAGS] [COMMAND [FLAGS]]\n", + commandDescription = Just $ \pname -> + let + commands' = commands ++ [commandAddAction helpCommandUI undefined] + cmdDescs = getNormalCommandDescriptions commands' + -- if new commands are added, we want them to appear even if they + -- are not included in the custom listing below. Thus, we calculate + -- the `otherCmds` list and append it under the `other` category. + -- Alternatively, a new testcase could be added that ensures that + -- the set of commands listed here is equal to the set of commands + -- that are actually available. + otherCmds = deleteFirstsBy (==) (map fst cmdDescs) + [ "help" + , "update" + , "install" + , "fetch" + , "list" + , "info" + , "user-config" + , "get" + , "init" + , "configure" + , "reconfigure" + , "build" + , "clean" + , "run" + , "repl" + , "test" + , "bench" + , "check" + , "sdist" + , "upload" + , "report" + , "freeze" + , "gen-bounds" + , "outdated" + , "doctest" + , "haddock" + , "hscolour" + , "copy" + , "register" + , "sandbox" + , "exec" + , "new-build" + , "new-configure" + , "new-repl" + , "new-freeze" + , "new-run" + , "new-test" + , "new-bench" + , "new-haddock" + , "new-exec" + , "new-update" + , "new-install" + , "new-clean" + , "new-sdist" + -- v1 commands, stateful style + , "v1-build" + , "v1-configure" + , "v1-repl" + , "v1-freeze" + , "v1-run" + , "v1-test" + , "v1-bench" + , "v1-haddock" + , "v1-exec" + , "v1-update" + , "v1-install" + , "v1-clean" + , "v1-sdist" + , "v1-doctest" + , "v1-copy" + , "v1-register" + , "v1-reconfigure" + , "v1-sandbox" + -- v2 commands, nix-style + , "v2-build" + , "v2-configure" + , "v2-repl" + , "v2-freeze" + , "v2-run" + , "v2-test" + , "v2-bench" + , "v2-haddock" + , "v2-exec" + , "v2-update" + , "v2-install" + , "v2-clean" + , "v2-sdist" + ] + maxlen = maximum $ [length name | (name, _) <- cmdDescs] + align str = str ++ replicate (maxlen - length str) ' ' + startGroup n = " ["++n++"]" + par = "" + addCmd n = case lookup n cmdDescs of + Nothing -> "" + Just d -> " " ++ align n ++ " " ++ d + addCmdCustom n d = case lookup n cmdDescs of -- make sure that the + -- command still exists. + Nothing -> "" + Just _ -> " " ++ align n ++ " " ++ d + in + "Commands:\n" + ++ unlines ( + [ startGroup "global" + , addCmd "update" + , addCmd "install" + , par + , addCmd "help" + , addCmd "info" + , addCmd "list" + , addCmd "fetch" + , addCmd "user-config" + , par + , startGroup "package" + , addCmd "get" + , addCmd "init" + , par + , addCmd "configure" + , addCmd "build" + , addCmd "clean" + , par + , addCmd "run" + , addCmd "repl" + , addCmd "test" + , addCmd "bench" + , par + , addCmd "check" + , addCmd "sdist" + , addCmd "upload" + , addCmd "report" + , par + , addCmd "freeze" + , addCmd "gen-bounds" + , addCmd "outdated" + , addCmd "doctest" + , addCmd "haddock" + , addCmd "hscolour" + , addCmd "copy" + , addCmd "register" + , addCmd "reconfigure" + , par + , startGroup "sandbox" + , addCmd "sandbox" + , addCmd "exec" + , addCmdCustom "repl" "Open interpreter with access to sandbox packages." + , par + , startGroup "new-style projects (beta)" + , addCmd "new-build" + , addCmd "new-configure" + , addCmd "new-repl" + , addCmd "new-run" + , addCmd "new-test" + , addCmd "new-bench" + , addCmd "new-freeze" + , addCmd "new-haddock" + , addCmd "new-exec" + , addCmd "new-update" + , addCmd "new-install" + , addCmd "new-clean" + , addCmd "new-sdist" + , par + , startGroup "new-style projects (forwards-compatible aliases)" + , addCmd "v2-build" + , addCmd "v2-configure" + , addCmd "v2-repl" + , addCmd "v2-run" + , addCmd "v2-test" + , addCmd "v2-bench" + , addCmd "v2-freeze" + , addCmd "v2-haddock" + , addCmd "v2-exec" + , addCmd "v2-update" + , addCmd "v2-install" + , addCmd "v2-clean" + , addCmd "v2-sdist" + , par + , startGroup "legacy command aliases" + , addCmd "v1-build" + , addCmd "v1-configure" + , addCmd "v1-repl" + , addCmd "v1-run" + , addCmd "v1-test" + , addCmd "v1-bench" + , addCmd "v1-freeze" + , addCmd "v1-haddock" + , addCmd "v1-exec" + , addCmd "v1-update" + , addCmd "v1-install" + , addCmd "v1-clean" + , addCmd "v1-sdist" + , addCmd "v1-doctest" + , addCmd "v1-copy" + , addCmd "v1-register" + , addCmd "v1-reconfigure" + , addCmd "v1-sandbox" + ] ++ if null otherCmds then [] else par + :startGroup "other" + :[addCmd n | n <- otherCmds]) + ++ "\n" + ++ "For more information about a command use:\n" + ++ " " ++ pname ++ " COMMAND --help\n" + ++ "or " ++ pname ++ " help COMMAND\n" + ++ "\n" + ++ "To install Cabal packages from hackage use:\n" + ++ " " ++ pname ++ " install foo [--dry-run]\n" + ++ "\n" + ++ "Occasionally you need to update the list of available packages:\n" + ++ " " ++ pname ++ " update\n", + commandNotes = Nothing, + commandDefaultFlags = mempty, + commandOptions = args + } + where + args :: ShowOrParseArgs -> [OptionField GlobalFlags] + args ShowArgs = argsShown + args ParseArgs = argsShown ++ argsNotShown + + -- arguments we want to show in the help + argsShown :: [OptionField GlobalFlags] + argsShown = [ + option ['V'] ["version"] + "Print version information" + globalVersion (\v flags -> flags { globalVersion = v }) + trueArg + + ,option [] ["numeric-version"] + "Print just the version number" + globalNumericVersion (\v flags -> flags { globalNumericVersion = v }) + trueArg + + ,option [] ["config-file"] + "Set an alternate location for the config file" + globalConfigFile (\v flags -> flags { globalConfigFile = v }) + (reqArgFlag "FILE") + + ,option [] ["sandbox-config-file"] + "Set an alternate location for the sandbox config file (default: './cabal.sandbox.config')" + globalSandboxConfigFile (\v flags -> flags { globalSandboxConfigFile = v }) + (reqArgFlag "FILE") + + ,option [] ["default-user-config"] + "Set a location for a cabal.config file for projects without their own cabal.config freeze file." + globalConstraintsFile (\v flags -> flags {globalConstraintsFile = v}) + (reqArgFlag "FILE") + + ,option [] ["require-sandbox"] + "requiring the presence of a sandbox for sandbox-aware commands" + globalRequireSandbox (\v flags -> flags { globalRequireSandbox = v }) + (boolOpt' ([], ["require-sandbox"]) ([], ["no-require-sandbox"])) + + ,option [] ["ignore-sandbox"] + "Ignore any existing sandbox" + globalIgnoreSandbox (\v flags -> flags { globalIgnoreSandbox = v }) + trueArg + + ,option [] ["ignore-expiry"] + "Ignore expiry dates on signed metadata (use only in exceptional circumstances)" + globalIgnoreExpiry (\v flags -> flags { globalIgnoreExpiry = v }) + trueArg + + ,option [] ["http-transport"] + "Set a transport for http(s) requests. Accepts 'curl', 'wget', 'powershell', and 'plain-http'. (default: 'curl')" + globalHttpTransport (\v flags -> flags { globalHttpTransport = v }) + (reqArgFlag "HttpTransport") + ,option [] ["nix"] + "Nix integration: run commands through nix-shell if a 'shell.nix' file exists" + globalNix (\v flags -> flags { globalNix = v }) + (boolOpt [] []) + ] + + -- arguments we don't want shown in the help + argsNotShown :: [OptionField GlobalFlags] + argsNotShown = [ + option [] ["remote-repo"] + "The name and url for a remote repository" + globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v }) + (reqArg' "NAME:URL" (toNubList . maybeToList . readRepo) (map showRepo . fromNubList)) + + ,option [] ["remote-repo-cache"] + "The location where downloads from all remote repos are cached" + globalCacheDir (\v flags -> flags { globalCacheDir = v }) + (reqArgFlag "DIR") + + ,option [] ["local-repo"] + "The location of a local repository" + globalLocalRepos (\v flags -> flags { globalLocalRepos = v }) + (reqArg' "DIR" (\x -> toNubList [x]) fromNubList) + + ,option [] ["logs-dir", "logsdir"] + "The location to put log files" + globalLogsDir (\v flags -> flags { globalLogsDir = v }) + (reqArgFlag "DIR") + + ,option [] ["world-file"] + "The location of the world file" + globalWorldFile (\v flags -> flags { globalWorldFile = v }) + (reqArgFlag "FILE") + + ,option [] ["store-dir", "storedir"] + "The location of the nix-local-build store" + globalStoreDir (\v flags -> flags { globalStoreDir = v }) + (reqArgFlag "DIR") + ] + +-- ------------------------------------------------------------ +-- * Config flags +-- ------------------------------------------------------------ + +configureCommand :: CommandUI ConfigFlags +configureCommand = c + { commandName = "configure" + , commandDefaultFlags = mempty + , commandDescription = Just $ \_ -> wrapText $ + "Configure how the package is built by setting " + ++ "package (and other) flags.\n" + ++ "\n" + ++ "The configuration affects several other commands, " + ++ "including v1-build, v1-test, v1-bench, v1-run, v1-repl.\n" + , commandUsage = \pname -> + "Usage: " ++ pname ++ " v1-configure [FLAGS]\n" + , commandNotes = Just $ \pname -> + (Cabal.programFlagsDescription defaultProgramDb ++ "\n") + ++ "Examples:\n" + ++ " " ++ pname ++ " v1-configure\n" + ++ " Configure with defaults;\n" + ++ " " ++ pname ++ " v1-configure --enable-tests -fcustomflag\n" + ++ " Configure building package including tests,\n" + ++ " with some package-specific flag.\n" + } + where + c = Cabal.configureCommand defaultProgramDb + +configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags] +configureOptions = commandOptions configureCommand + +-- | Given some 'ConfigFlags' for the version of Cabal that +-- cabal-install was built with, and a target older 'Version' of +-- Cabal that we want to pass these flags to, convert the +-- flags into a form that will be accepted by the older +-- Setup script. Generally speaking, this just means filtering +-- out flags that the old Cabal library doesn't understand, but +-- in some cases it may also mean "emulating" a feature using +-- some more legacy flags. +filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags +filterConfigureFlags flags cabalLibVersion + -- NB: we expect the latest version to be the most common case, + -- so test it first. + | cabalLibVersion >= mkVersion [2,1,0] = flags_latest + -- The naming convention is that flags_version gives flags with + -- all flags *introduced* in version eliminated. + -- It is NOT the latest version of Cabal library that + -- these flags work for; version of introduction is a more + -- natural metric. + | cabalLibVersion < mkVersion [1,3,10] = flags_1_3_10 + | cabalLibVersion < mkVersion [1,10,0] = flags_1_10_0 + | cabalLibVersion < mkVersion [1,12,0] = flags_1_12_0 + | cabalLibVersion < mkVersion [1,14,0] = flags_1_14_0 + | cabalLibVersion < mkVersion [1,18,0] = flags_1_18_0 + | cabalLibVersion < mkVersion [1,19,1] = flags_1_19_1 + | cabalLibVersion < mkVersion [1,19,2] = flags_1_19_2 + | cabalLibVersion < mkVersion [1,21,1] = flags_1_21_1 + | cabalLibVersion < mkVersion [1,22,0] = flags_1_22_0 + | cabalLibVersion < mkVersion [1,23,0] = flags_1_23_0 + | cabalLibVersion < mkVersion [1,25,0] = flags_1_25_0 + | cabalLibVersion < mkVersion [2,1,0] = flags_2_1_0 + | otherwise = flags_latest + where + flags_latest = flags { + -- Cabal >= 1.19.1 uses '--dependency' and does not need '--constraint'. + configConstraints = [] + } + + flags_2_1_0 = flags_latest { + -- Cabal < 2.1 doesn't know about -v +timestamp modifier + configVerbosity = fmap verboseNoTimestamp (configVerbosity flags_latest) + -- Cabal < 2.1 doesn't know about ---static + , configStaticLib = NoFlag + , configSplitSections = NoFlag + } + + flags_1_25_0 = flags_2_1_0 { + -- Cabal < 1.25.0 doesn't know about --dynlibdir. + configInstallDirs = configInstallDirs_1_25_0, + -- Cabal < 1.25 doesn't have extended verbosity syntax + configVerbosity = fmap verboseNoFlags (configVerbosity flags_2_1_0), + -- Cabal < 1.25 doesn't support --deterministic + configDeterministic = mempty + } + configInstallDirs_1_25_0 = let dirs = configInstallDirs flags in + dirs { dynlibdir = NoFlag + , libexecsubdir = NoFlag + , libexecdir = maybeToFlag $ + combinePathTemplate <$> flagToMaybe (libexecdir dirs) + <*> flagToMaybe (libexecsubdir dirs) + } + -- Cabal < 1.23 doesn't know about '--profiling-detail'. + -- Cabal < 1.23 has a hacked up version of 'enable-profiling' + -- which we shouldn't use. + (tryLibProfiling, tryExeProfiling) = computeEffectiveProfiling flags + flags_1_23_0 = flags_1_25_0 { configProfDetail = NoFlag + , configProfLibDetail = NoFlag + , configIPID = NoFlag + , configProf = NoFlag + , configProfExe = Flag tryExeProfiling + , configProfLib = Flag tryLibProfiling + } + + -- Cabal < 1.22 doesn't know about '--disable-debug-info'. + flags_1_22_0 = flags_1_23_0 { configDebugInfo = NoFlag } + + -- Cabal < 1.21.1 doesn't know about 'disable-relocatable' + -- Cabal < 1.21.1 doesn't know about 'enable-profiling' + -- (but we already dealt with it in flags_1_23_0) + flags_1_21_1 = + flags_1_22_0 { configRelocatable = NoFlag + , configCoverage = NoFlag + , configLibCoverage = configCoverage flags + } + -- Cabal < 1.19.2 doesn't know about '--exact-configuration' and + -- '--enable-library-stripping'. + flags_1_19_2 = flags_1_21_1 { configExactConfiguration = NoFlag + , configStripLibs = NoFlag } + -- Cabal < 1.19.1 uses '--constraint' instead of '--dependency'. + flags_1_19_1 = flags_1_19_2 { configDependencies = [] + , configConstraints = configConstraints flags } + -- Cabal < 1.18.0 doesn't know about --extra-prog-path and --sysconfdir. + flags_1_18_0 = flags_1_19_1 { configProgramPathExtra = toNubList [] + , configInstallDirs = configInstallDirs_1_18_0} + configInstallDirs_1_18_0 = (configInstallDirs flags_1_19_1) { sysconfdir = NoFlag } + -- Cabal < 1.14.0 doesn't know about '--disable-benchmarks'. + flags_1_14_0 = flags_1_18_0 { configBenchmarks = NoFlag } + -- Cabal < 1.12.0 doesn't know about '--enable/disable-executable-dynamic' + -- and '--enable/disable-library-coverage'. + flags_1_12_0 = flags_1_14_0 { configLibCoverage = NoFlag + , configDynExe = NoFlag } + -- Cabal < 1.10.0 doesn't know about '--disable-tests'. + flags_1_10_0 = flags_1_12_0 { configTests = NoFlag } + -- Cabal < 1.3.10 does not grok the '--constraints' flag. + flags_1_3_10 = flags_1_10_0 { configConstraints = [] } + +-- | Get the package database settings from 'ConfigFlags', accounting for +-- @--package-db@ and @--user@ flags. +configPackageDB' :: ConfigFlags -> PackageDBStack +configPackageDB' cfg = + interpretPackageDbFlags userInstall (configPackageDBs cfg) + where + userInstall = Cabal.fromFlagOrDefault True (configUserInstall cfg) + +-- | Configure the compiler, but reduce verbosity during this step. +configCompilerAux' :: ConfigFlags -> IO (Compiler, Platform, ProgramDb) +configCompilerAux' configFlags = + configCompilerAuxEx configFlags + --FIXME: make configCompilerAux use a sensible verbosity + { configVerbosity = fmap lessVerbose (configVerbosity configFlags) } + +-- ------------------------------------------------------------ +-- * Config extra flags +-- ------------------------------------------------------------ + +-- | cabal configure takes some extra flags beyond runghc Setup configure +-- +data ConfigExFlags = ConfigExFlags { + configCabalVersion :: Flag Version, + configExConstraints :: [(UserConstraint, ConstraintSource)], + configPreferences :: [Dependency], + configSolver :: Flag PreSolver, + configAllowNewer :: Maybe AllowNewer, + configAllowOlder :: Maybe AllowOlder, + configWriteGhcEnvironmentFilesPolicy + :: Flag WriteGhcEnvironmentFilesPolicy + } + deriving (Eq, Generic) + +defaultConfigExFlags :: ConfigExFlags +defaultConfigExFlags = mempty { configSolver = Flag defaultSolver } + +configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags) +configureExCommand = configureCommand { + commandDefaultFlags = (mempty, defaultConfigExFlags), + commandOptions = \showOrParseArgs -> + liftOptions fst setFst + (filter ((`notElem` ["constraint", "dependency", "exact-configuration"]) + . optionName) $ configureOptions showOrParseArgs) + ++ liftOptions snd setSnd + (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + +configureExOptions :: ShowOrParseArgs + -> ConstraintSource + -> [OptionField ConfigExFlags] +configureExOptions _showOrParseArgs src = + [ option [] ["cabal-lib-version"] + ("Select which version of the Cabal lib to use to build packages " + ++ "(useful for testing).") + configCabalVersion (\v flags -> flags { configCabalVersion = v }) + (reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++) + (fmap toFlag parse)) + (map display . flagToList)) + , option [] ["constraint"] + "Specify constraints on a package (version, installed/source, flags)" + configExConstraints (\v flags -> flags { configExConstraints = v }) + (reqArg "CONSTRAINT" + ((\x -> [(x, src)]) `fmap` ReadE readUserConstraint) + (map $ display . fst)) + + , option [] ["preference"] + "Specify preferences (soft constraints) on the version of a package" + configPreferences (\v flags -> flags { configPreferences = v }) + (reqArg "CONSTRAINT" + (readP_to_E (const "dependency expected") + (fmap (\x -> [x]) parse)) + (map display)) + + , optionSolver configSolver (\v flags -> flags { configSolver = v }) + + , option [] ["allow-older"] + ("Ignore lower bounds in all dependencies or DEPS") + (fmap unAllowOlder . configAllowOlder) + (\v flags -> flags { configAllowOlder = fmap AllowOlder v}) + (optArg "DEPS" + (readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser) + (Just RelaxDepsAll) relaxDepsPrinter) + + , option [] ["allow-newer"] + ("Ignore upper bounds in all dependencies or DEPS") + (fmap unAllowNewer . configAllowNewer) + (\v flags -> flags { configAllowNewer = fmap AllowNewer v}) + (optArg "DEPS" + (readP_to_E ("Cannot parse the list of packages: " ++) relaxDepsParser) + (Just RelaxDepsAll) relaxDepsPrinter) + + , option [] ["write-ghc-environment-files"] + ("Whether to create a .ghc.environment file after a successful build" + ++ " (v2-build only)") + configWriteGhcEnvironmentFilesPolicy + (\v flags -> flags { configWriteGhcEnvironmentFilesPolicy = v}) + (reqArg "always|never|ghc8.4.4+" + writeGhcEnvironmentFilesPolicyParser + writeGhcEnvironmentFilesPolicyPrinter) + ] + + +writeGhcEnvironmentFilesPolicyParser :: ReadE (Flag WriteGhcEnvironmentFilesPolicy) +writeGhcEnvironmentFilesPolicyParser = ReadE $ \case + "always" -> Right $ Flag AlwaysWriteGhcEnvironmentFiles + "never" -> Right $ Flag NeverWriteGhcEnvironmentFiles + "ghc8.4.4+" -> Right $ Flag WriteGhcEnvironmentFilesOnlyForGhc844AndNewer + policy -> Left $ "Cannot parse the GHC environment file write policy '" + <> policy <> "'" + +writeGhcEnvironmentFilesPolicyPrinter + :: Flag WriteGhcEnvironmentFilesPolicy -> [String] +writeGhcEnvironmentFilesPolicyPrinter = \case + (Flag AlwaysWriteGhcEnvironmentFiles) -> ["always"] + (Flag NeverWriteGhcEnvironmentFiles) -> ["never"] + (Flag WriteGhcEnvironmentFilesOnlyForGhc844AndNewer) -> ["ghc8.4.4+"] + NoFlag -> [] + + +relaxDepsParser :: Parse.ReadP r (Maybe RelaxDeps) +relaxDepsParser = + (Just . RelaxDepsSome) `fmap` Parse.sepBy1 parse (Parse.char ',') + +relaxDepsPrinter :: (Maybe RelaxDeps) -> [Maybe String] +relaxDepsPrinter Nothing = [] +relaxDepsPrinter (Just RelaxDepsAll) = [Nothing] +relaxDepsPrinter (Just (RelaxDepsSome pkgs)) = map (Just . display) $ pkgs + + +instance Monoid ConfigExFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ConfigExFlags where + (<>) = gmappend + +reconfigureCommand :: CommandUI (ConfigFlags, ConfigExFlags) +reconfigureCommand + = configureExCommand + { commandName = "reconfigure" + , commandSynopsis = "Reconfigure the package if necessary." + , commandDescription = Just $ \pname -> wrapText $ + "Run `configure` with the most recently used flags, or append FLAGS " + ++ "to the most recently used configuration. " + ++ "Accepts the same flags as `" ++ pname ++ " v1-configure'. " + ++ "If the package has never been configured, the default flags are " + ++ "used." + , commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " v1-reconfigure\n" + ++ " Configure with the most recently used flags.\n" + ++ " " ++ pname ++ " v1-reconfigure -w PATH\n" + ++ " Reconfigure with the most recently used flags,\n" + ++ " but use the compiler at PATH.\n\n" + , commandUsage = usageAlternatives "v1-reconfigure" [ "[FLAGS]" ] + , commandDefaultFlags = mempty + } + +-- ------------------------------------------------------------ +-- * Build flags +-- ------------------------------------------------------------ + +data SkipAddSourceDepsCheck = + SkipAddSourceDepsCheck | DontSkipAddSourceDepsCheck + deriving Eq + +data BuildExFlags = BuildExFlags { + buildOnly :: Flag SkipAddSourceDepsCheck +} deriving Generic + +buildExOptions :: ShowOrParseArgs -> [OptionField BuildExFlags] +buildExOptions _showOrParseArgs = + option [] ["only"] + "Don't reinstall add-source dependencies (sandbox-only)" + buildOnly (\v flags -> flags { buildOnly = v }) + (noArg (Flag SkipAddSourceDepsCheck)) + + : [] + +buildCommand :: CommandUI (BuildFlags, BuildExFlags) +buildCommand = parent { + commandName = "build", + commandDescription = Just $ \_ -> wrapText $ + "Components encompass executables, tests, and benchmarks.\n" + ++ "\n" + ++ "Affected by configuration options, see `v1-configure`.\n", + commandDefaultFlags = (commandDefaultFlags parent, mempty), + commandUsage = usageAlternatives "v1-build" $ + [ "[FLAGS]", "COMPONENTS [FLAGS]" ], + commandOptions = + \showOrParseArgs -> liftOptions fst setFst + (commandOptions parent showOrParseArgs) + ++ + liftOptions snd setSnd (buildExOptions showOrParseArgs) + , commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " v1-build " + ++ " All the components in the package\n" + ++ " " ++ pname ++ " v1-build foo " + ++ " A component (i.e. lib, exe, test suite)\n\n" + ++ Cabal.programFlagsDescription defaultProgramDb + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + + parent = Cabal.buildCommand defaultProgramDb + +instance Monoid BuildExFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup BuildExFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Repl command +-- ------------------------------------------------------------ + +replCommand :: CommandUI (ReplFlags, BuildExFlags) +replCommand = parent { + commandName = "repl", + commandDescription = Just $ \pname -> wrapText $ + "If the current directory contains no package, ignores COMPONENT " + ++ "parameters and opens an interactive interpreter session; if a " + ++ "sandbox is present, its package database will be used.\n" + ++ "\n" + ++ "Otherwise, (re)configures with the given or default flags, and " + ++ "loads the interpreter with the relevant modules. For executables, " + ++ "tests and benchmarks, loads the main module (and its " + ++ "dependencies); for libraries all exposed/other modules.\n" + ++ "\n" + ++ "The default component is the library itself, or the executable " + ++ "if that is the only component.\n" + ++ "\n" + ++ "Support for loading specific modules is planned but not " + ++ "implemented yet. For certain scenarios, `" ++ pname + ++ " v1-exec -- ghci :l Foo` may be used instead. Note that `v1-exec` will " + ++ "not (re)configure and you will have to specify the location of " + ++ "other modules, if required.\n", + commandUsage = \pname -> "Usage: " ++ pname ++ " v1-repl [COMPONENT] [FLAGS]\n", + commandDefaultFlags = (commandDefaultFlags parent, mempty), + commandOptions = + \showOrParseArgs -> liftOptions fst setFst + (commandOptions parent showOrParseArgs) + ++ + liftOptions snd setSnd (buildExOptions showOrParseArgs), + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " v1-repl " + ++ " The first component in the package\n" + ++ " " ++ pname ++ " v1-repl foo " + ++ " A named component (i.e. lib, exe, test suite)\n" + ++ " " ++ pname ++ " v1-repl --ghc-options=\"-lstdc++\"" + ++ " Specifying flags for interpreter\n" + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + + parent = Cabal.replCommand defaultProgramDb + +-- ------------------------------------------------------------ +-- * Test command +-- ------------------------------------------------------------ + +testCommand :: CommandUI (TestFlags, BuildFlags, BuildExFlags) +testCommand = parent { + commandName = "test", + commandDescription = Just $ \pname -> wrapText $ + "If necessary (re)configures with `--enable-tests` flag and builds" + ++ " the test suite.\n" + ++ "\n" + ++ "Remember that the tests' dependencies must be installed if there" + ++ " are additional ones; e.g. with `" ++ pname + ++ " v1-install --only-dependencies --enable-tests`.\n" + ++ "\n" + ++ "By defining UserHooks in a custom Setup.hs, the package can" + ++ " define actions to be executed before and after running tests.\n", + commandUsage = usageAlternatives "v1-test" + [ "[FLAGS]", "TESTCOMPONENTS [FLAGS]" ], + commandDefaultFlags = (commandDefaultFlags parent, + Cabal.defaultBuildFlags, mempty), + commandOptions = + \showOrParseArgs -> liftOptions get1 set1 + (commandOptions parent showOrParseArgs) + ++ + liftOptions get2 set2 + (Cabal.buildOptions progDb showOrParseArgs) + ++ + liftOptions get3 set3 (buildExOptions showOrParseArgs) + } + where + get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) + get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) + get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) + + parent = Cabal.testCommand + progDb = defaultProgramDb + +-- ------------------------------------------------------------ +-- * Bench command +-- ------------------------------------------------------------ + +benchmarkCommand :: CommandUI (BenchmarkFlags, BuildFlags, BuildExFlags) +benchmarkCommand = parent { + commandName = "bench", + commandUsage = usageAlternatives "v1-bench" + [ "[FLAGS]", "BENCHCOMPONENTS [FLAGS]" ], + commandDescription = Just $ \pname -> wrapText $ + "If necessary (re)configures with `--enable-benchmarks` flag and" + ++ " builds the benchmarks.\n" + ++ "\n" + ++ "Remember that the benchmarks' dependencies must be installed if" + ++ " there are additional ones; e.g. with `" ++ pname + ++ " v1-install --only-dependencies --enable-benchmarks`.\n" + ++ "\n" + ++ "By defining UserHooks in a custom Setup.hs, the package can" + ++ " define actions to be executed before and after running" + ++ " benchmarks.\n", + commandDefaultFlags = (commandDefaultFlags parent, + Cabal.defaultBuildFlags, mempty), + commandOptions = + \showOrParseArgs -> liftOptions get1 set1 + (commandOptions parent showOrParseArgs) + ++ + liftOptions get2 set2 + (Cabal.buildOptions progDb showOrParseArgs) + ++ + liftOptions get3 set3 (buildExOptions showOrParseArgs) + } + where + get1 (a,_,_) = a; set1 a (_,b,c) = (a,b,c) + get2 (_,b,_) = b; set2 b (a,_,c) = (a,b,c) + get3 (_,_,c) = c; set3 c (a,b,_) = (a,b,c) + + parent = Cabal.benchmarkCommand + progDb = defaultProgramDb + +-- ------------------------------------------------------------ +-- * Fetch command +-- ------------------------------------------------------------ + +data FetchFlags = FetchFlags { +-- fetchOutput :: Flag FilePath, + fetchDeps :: Flag Bool, + fetchDryRun :: Flag Bool, + fetchSolver :: Flag PreSolver, + fetchMaxBackjumps :: Flag Int, + fetchReorderGoals :: Flag ReorderGoals, + fetchCountConflicts :: Flag CountConflicts, + fetchIndependentGoals :: Flag IndependentGoals, + fetchShadowPkgs :: Flag ShadowPkgs, + fetchStrongFlags :: Flag StrongFlags, + fetchAllowBootLibInstalls :: Flag AllowBootLibInstalls, + fetchTests :: Flag Bool, + fetchBenchmarks :: Flag Bool, + fetchVerbosity :: Flag Verbosity + } + +defaultFetchFlags :: FetchFlags +defaultFetchFlags = FetchFlags { +-- fetchOutput = mempty, + fetchDeps = toFlag True, + fetchDryRun = toFlag False, + fetchSolver = Flag defaultSolver, + fetchMaxBackjumps = Flag defaultMaxBackjumps, + fetchReorderGoals = Flag (ReorderGoals False), + fetchCountConflicts = Flag (CountConflicts True), + fetchIndependentGoals = Flag (IndependentGoals False), + fetchShadowPkgs = Flag (ShadowPkgs False), + fetchStrongFlags = Flag (StrongFlags False), + fetchAllowBootLibInstalls = Flag (AllowBootLibInstalls False), + fetchTests = toFlag False, + fetchBenchmarks = toFlag False, + fetchVerbosity = toFlag normal + } + +fetchCommand :: CommandUI FetchFlags +fetchCommand = CommandUI { + commandName = "fetch", + commandSynopsis = "Downloads packages for later installation.", + commandUsage = usageAlternatives "fetch" [ "[FLAGS] PACKAGES" + ], + commandDescription = Just $ \_ -> + "Note that it currently is not possible to fetch the dependencies for a\n" + ++ "package in the current directory.\n", + commandNotes = Nothing, + commandDefaultFlags = defaultFetchFlags, + commandOptions = \ showOrParseArgs -> [ + optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v }) + +-- , option "o" ["output"] +-- "Put the package(s) somewhere specific rather than the usual cache." +-- fetchOutput (\v flags -> flags { fetchOutput = v }) +-- (reqArgFlag "PATH") + + , option [] ["dependencies", "deps"] + "Resolve and fetch dependencies (default)" + fetchDeps (\v flags -> flags { fetchDeps = v }) + trueArg + + , option [] ["no-dependencies", "no-deps"] + "Ignore dependencies" + fetchDeps (\v flags -> flags { fetchDeps = v }) + falseArg + + , option [] ["dry-run"] + "Do not install anything, only print what would be installed." + fetchDryRun (\v flags -> flags { fetchDryRun = v }) + trueArg + + , option "" ["tests"] + "dependency checking and compilation for test suites listed in the package description file." + fetchTests (\v flags -> flags { fetchTests = v }) + (boolOpt [] []) + + , option "" ["benchmarks"] + "dependency checking and compilation for benchmarks listed in the package description file." + fetchBenchmarks (\v flags -> flags { fetchBenchmarks = v }) + (boolOpt [] []) + + ] ++ + + optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) : + optionSolverFlags showOrParseArgs + fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v }) + fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v }) + fetchCountConflicts (\v flags -> flags { fetchCountConflicts = v }) + fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v }) + fetchShadowPkgs (\v flags -> flags { fetchShadowPkgs = v }) + fetchStrongFlags (\v flags -> flags { fetchStrongFlags = v }) + fetchAllowBootLibInstalls (\v flags -> flags { fetchAllowBootLibInstalls = v }) + + } + +-- ------------------------------------------------------------ +-- * Freeze command +-- ------------------------------------------------------------ + +data FreezeFlags = FreezeFlags { + freezeDryRun :: Flag Bool, + freezeTests :: Flag Bool, + freezeBenchmarks :: Flag Bool, + freezeSolver :: Flag PreSolver, + freezeMaxBackjumps :: Flag Int, + freezeReorderGoals :: Flag ReorderGoals, + freezeCountConflicts :: Flag CountConflicts, + freezeIndependentGoals :: Flag IndependentGoals, + freezeShadowPkgs :: Flag ShadowPkgs, + freezeStrongFlags :: Flag StrongFlags, + freezeAllowBootLibInstalls :: Flag AllowBootLibInstalls, + freezeVerbosity :: Flag Verbosity + } + +defaultFreezeFlags :: FreezeFlags +defaultFreezeFlags = FreezeFlags { + freezeDryRun = toFlag False, + freezeTests = toFlag False, + freezeBenchmarks = toFlag False, + freezeSolver = Flag defaultSolver, + freezeMaxBackjumps = Flag defaultMaxBackjumps, + freezeReorderGoals = Flag (ReorderGoals False), + freezeCountConflicts = Flag (CountConflicts True), + freezeIndependentGoals = Flag (IndependentGoals False), + freezeShadowPkgs = Flag (ShadowPkgs False), + freezeStrongFlags = Flag (StrongFlags False), + freezeAllowBootLibInstalls = Flag (AllowBootLibInstalls False), + freezeVerbosity = toFlag normal + } + +freezeCommand :: CommandUI FreezeFlags +freezeCommand = CommandUI { + commandName = "freeze", + commandSynopsis = "Freeze dependencies.", + commandDescription = Just $ \_ -> wrapText $ + "Calculates a valid set of dependencies and their exact versions. " + ++ "If successful, saves the result to the file `cabal.config`.\n" + ++ "\n" + ++ "The package versions specified in `cabal.config` will be used for " + ++ "any future installs.\n" + ++ "\n" + ++ "An existing `cabal.config` is ignored and overwritten.\n", + commandNotes = Nothing, + commandUsage = usageFlags "freeze", + commandDefaultFlags = defaultFreezeFlags, + commandOptions = \ showOrParseArgs -> [ + optionVerbosity freezeVerbosity + (\v flags -> flags { freezeVerbosity = v }) + + , option [] ["dry-run"] + "Do not freeze anything, only print what would be frozen" + freezeDryRun (\v flags -> flags { freezeDryRun = v }) + trueArg + + , option [] ["tests"] + ("freezing of the dependencies of any tests suites " + ++ "in the package description file.") + freezeTests (\v flags -> flags { freezeTests = v }) + (boolOpt [] []) + + , option [] ["benchmarks"] + ("freezing of the dependencies of any benchmarks suites " + ++ "in the package description file.") + freezeBenchmarks (\v flags -> flags { freezeBenchmarks = v }) + (boolOpt [] []) + + ] ++ + + optionSolver + freezeSolver (\v flags -> flags { freezeSolver = v }): + optionSolverFlags showOrParseArgs + freezeMaxBackjumps (\v flags -> flags { freezeMaxBackjumps = v }) + freezeReorderGoals (\v flags -> flags { freezeReorderGoals = v }) + freezeCountConflicts (\v flags -> flags { freezeCountConflicts = v }) + freezeIndependentGoals (\v flags -> flags { freezeIndependentGoals = v }) + freezeShadowPkgs (\v flags -> flags { freezeShadowPkgs = v }) + freezeStrongFlags (\v flags -> flags { freezeStrongFlags = v }) + freezeAllowBootLibInstalls (\v flags -> flags { freezeAllowBootLibInstalls = v }) + + } + +-- ------------------------------------------------------------ +-- * 'gen-bounds' command +-- ------------------------------------------------------------ + +genBoundsCommand :: CommandUI FreezeFlags +genBoundsCommand = CommandUI { + commandName = "gen-bounds", + commandSynopsis = "Generate dependency bounds.", + commandDescription = Just $ \_ -> wrapText $ + "Generates bounds for all dependencies that do not currently have them. " + ++ "Generated bounds are printed to stdout. " + ++ "You can then paste them into your .cabal file.\n" + ++ "\n", + commandNotes = Nothing, + commandUsage = usageFlags "gen-bounds", + commandDefaultFlags = defaultFreezeFlags, + commandOptions = \ _ -> [ + optionVerbosity freezeVerbosity (\v flags -> flags { freezeVerbosity = v }) + ] + } + +-- ------------------------------------------------------------ +-- * 'outdated' command +-- ------------------------------------------------------------ + +data IgnoreMajorVersionBumps = IgnoreMajorVersionBumpsNone + | IgnoreMajorVersionBumpsAll + | IgnoreMajorVersionBumpsSome [PackageName] + +instance Monoid IgnoreMajorVersionBumps where + mempty = IgnoreMajorVersionBumpsNone + mappend = (<>) + +instance Semigroup IgnoreMajorVersionBumps where + IgnoreMajorVersionBumpsNone <> r = r + l@IgnoreMajorVersionBumpsAll <> _ = l + l@(IgnoreMajorVersionBumpsSome _) <> IgnoreMajorVersionBumpsNone = l + (IgnoreMajorVersionBumpsSome _) <> r@IgnoreMajorVersionBumpsAll = r + (IgnoreMajorVersionBumpsSome a) <> (IgnoreMajorVersionBumpsSome b) = + IgnoreMajorVersionBumpsSome (a ++ b) + +data OutdatedFlags = OutdatedFlags { + outdatedVerbosity :: Flag Verbosity, + outdatedFreezeFile :: Flag Bool, + outdatedNewFreezeFile :: Flag Bool, + outdatedProjectFile :: Flag FilePath, + outdatedSimpleOutput :: Flag Bool, + outdatedExitCode :: Flag Bool, + outdatedQuiet :: Flag Bool, + outdatedIgnore :: [PackageName], + outdatedMinor :: Maybe IgnoreMajorVersionBumps + } + +defaultOutdatedFlags :: OutdatedFlags +defaultOutdatedFlags = OutdatedFlags { + outdatedVerbosity = toFlag normal, + outdatedFreezeFile = mempty, + outdatedNewFreezeFile = mempty, + outdatedProjectFile = mempty, + outdatedSimpleOutput = mempty, + outdatedExitCode = mempty, + outdatedQuiet = mempty, + outdatedIgnore = mempty, + outdatedMinor = mempty + } + +outdatedCommand :: CommandUI OutdatedFlags +outdatedCommand = CommandUI { + commandName = "outdated", + commandSynopsis = "Check for outdated dependencies", + commandDescription = Just $ \_ -> wrapText $ + "Checks for outdated dependencies in the package description file " + ++ "or freeze file", + commandNotes = Nothing, + commandUsage = usageFlags "outdated", + commandDefaultFlags = defaultOutdatedFlags, + commandOptions = \ _ -> [ + optionVerbosity outdatedVerbosity + (\v flags -> flags { outdatedVerbosity = v }) + + ,option [] ["freeze-file", "v1-freeze-file"] + "Act on the freeze file" + outdatedFreezeFile (\v flags -> flags { outdatedFreezeFile = v }) + trueArg + + ,option [] ["new-freeze-file", "v2-freeze-file"] + "Act on the new-style freeze file (default: cabal.project.freeze)" + outdatedNewFreezeFile (\v flags -> flags { outdatedNewFreezeFile = v }) + trueArg + + ,option [] ["project-file"] + "Act on the new-style freeze file named PROJECTFILE.freeze rather than the default cabal.project.freeze" + outdatedProjectFile (\v flags -> flags { outdatedProjectFile = v }) + (reqArgFlag "PROJECTFILE") + + ,option [] ["simple-output"] + "Only print names of outdated dependencies, one per line" + outdatedSimpleOutput (\v flags -> flags { outdatedSimpleOutput = v }) + trueArg + + ,option [] ["exit-code"] + "Exit with non-zero when there are outdated dependencies" + outdatedExitCode (\v flags -> flags { outdatedExitCode = v }) + trueArg + + ,option ['q'] ["quiet"] + "Don't print any output. Implies '--exit-code' and '-v0'" + outdatedQuiet (\v flags -> flags { outdatedQuiet = v }) + trueArg + + ,option [] ["ignore"] + "Packages to ignore" + outdatedIgnore (\v flags -> flags { outdatedIgnore = v }) + (reqArg "PKGS" pkgNameListParser (map display)) + + ,option [] ["minor"] + "Ignore major version bumps for these packages" + outdatedMinor (\v flags -> flags { outdatedMinor = v }) + (optArg "PKGS" ignoreMajorVersionBumpsParser + (Just IgnoreMajorVersionBumpsAll) ignoreMajorVersionBumpsPrinter) + ] + } + where + ignoreMajorVersionBumpsPrinter :: (Maybe IgnoreMajorVersionBumps) + -> [Maybe String] + ignoreMajorVersionBumpsPrinter Nothing = [] + ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsNone)= [] + ignoreMajorVersionBumpsPrinter (Just IgnoreMajorVersionBumpsAll) = [Nothing] + ignoreMajorVersionBumpsPrinter (Just (IgnoreMajorVersionBumpsSome pkgs)) = + map (Just . display) $ pkgs + + ignoreMajorVersionBumpsParser = + (Just . IgnoreMajorVersionBumpsSome) `fmap` pkgNameListParser + + pkgNameListParser = readP_to_E + ("Couldn't parse the list of package names: " ++) + (Parse.sepBy1 parse (Parse.char ',')) + +-- ------------------------------------------------------------ +-- * Update command +-- ------------------------------------------------------------ + +data UpdateFlags + = UpdateFlags { + updateVerbosity :: Flag Verbosity, + updateIndexState :: Flag IndexState + } deriving Generic + +defaultUpdateFlags :: UpdateFlags +defaultUpdateFlags + = UpdateFlags { + updateVerbosity = toFlag normal, + updateIndexState = toFlag IndexStateHead + } + +updateCommand :: CommandUI UpdateFlags +updateCommand = CommandUI { + commandName = "update", + commandSynopsis = "Updates list of known packages.", + commandDescription = Just $ \_ -> + "For all known remote repositories, download the package list.\n", + commandNotes = Just $ \_ -> + relevantConfigValuesText ["remote-repo" + ,"remote-repo-cache" + ,"local-repo"], + commandUsage = usageFlags "v1-update", + commandDefaultFlags = defaultUpdateFlags, + commandOptions = \_ -> [ + optionVerbosity updateVerbosity (\v flags -> flags { updateVerbosity = v }), + option [] ["index-state"] + ("Update the source package index to its state as it existed at a previous time. " ++ + "Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++ + "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').") + updateIndexState (\v flags -> flags { updateIndexState = v }) + (reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++ + "unix-timestamps (e.g. '@1474732068'), " ++ + "a ISO8601 UTC timestamp " ++ + "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'") + (toFlag `fmap` parse)) + (flagToList . fmap display)) + ] + } + +-- ------------------------------------------------------------ +-- * Other commands +-- ------------------------------------------------------------ + +upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +upgradeCommand = configureCommand { + commandName = "upgrade", + commandSynopsis = "(command disabled, use install instead)", + commandDescription = Nothing, + commandUsage = usageFlagsOrPackages "upgrade", + commandDefaultFlags = (mempty, mempty, mempty, mempty), + commandOptions = commandOptions installCommand + } + +cleanCommand :: CommandUI CleanFlags +cleanCommand = Cabal.cleanCommand + { commandUsage = \pname -> + "Usage: " ++ pname ++ " v1-clean [FLAGS]\n" + } + +checkCommand :: CommandUI (Flag Verbosity) +checkCommand = CommandUI { + commandName = "check", + commandSynopsis = "Check the package for common mistakes.", + commandDescription = Just $ \_ -> wrapText $ + "Expects a .cabal package file in the current directory.\n" + ++ "\n" + ++ "The checks correspond to the requirements to packages on Hackage. " + ++ "If no errors and warnings are reported, Hackage will accept this " + ++ "package.\n", + commandNotes = Nothing, + commandUsage = usageFlags "check", + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [optionVerbosity id const] + } + +formatCommand :: CommandUI (Flag Verbosity) +formatCommand = CommandUI { + commandName = "format", + commandSynopsis = "Reformat the .cabal file using the standard style.", + commandDescription = Nothing, + commandNotes = Nothing, + commandUsage = usageAlternatives "format" ["[FILE]"], + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [] + } + +uninstallCommand :: CommandUI (Flag Verbosity) +uninstallCommand = CommandUI { + commandName = "uninstall", + commandSynopsis = "Warn about 'uninstall' not being implemented.", + commandDescription = Nothing, + commandNotes = Nothing, + commandUsage = usageAlternatives "uninstall" ["PACKAGES"], + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [] + } + +manpageCommand :: CommandUI (Flag Verbosity) +manpageCommand = CommandUI { + commandName = "manpage", + commandSynopsis = "Outputs manpage source.", + commandDescription = Just $ \_ -> + "Output manpage source to STDOUT.\n", + commandNotes = Nothing, + commandUsage = usageFlags "manpage", + commandDefaultFlags = toFlag normal, + commandOptions = \_ -> [optionVerbosity id const] + } + +runCommand :: CommandUI (BuildFlags, BuildExFlags) +runCommand = CommandUI { + commandName = "run", + commandSynopsis = "Builds and runs an executable.", + commandDescription = Just $ \pname -> wrapText $ + "Builds and then runs the specified executable. If no executable is " + ++ "specified, but the package contains just one executable, that one " + ++ "is built and executed.\n" + ++ "\n" + ++ "Use `" ++ pname ++ " v1-test --show-details=streaming` to run a " + ++ "test-suite and get its full output.\n", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " v1-run\n" + ++ " Run the only executable in the current package;\n" + ++ " " ++ pname ++ " v1-run foo -- --fooflag\n" + ++ " Works similar to `./foo --fooflag`.\n", + commandUsage = usageAlternatives "v1-run" + ["[FLAGS] [EXECUTABLE] [-- EXECUTABLE_FLAGS]"], + commandDefaultFlags = mempty, + commandOptions = + \showOrParseArgs -> liftOptions fst setFst + (commandOptions parent showOrParseArgs) + ++ + liftOptions snd setSnd + (buildExOptions showOrParseArgs) + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + + parent = Cabal.buildCommand defaultProgramDb + +-- ------------------------------------------------------------ +-- * Report flags +-- ------------------------------------------------------------ + +data ReportFlags = ReportFlags { + reportUsername :: Flag Username, + reportPassword :: Flag Password, + reportVerbosity :: Flag Verbosity + } deriving Generic + +defaultReportFlags :: ReportFlags +defaultReportFlags = ReportFlags { + reportUsername = mempty, + reportPassword = mempty, + reportVerbosity = toFlag normal + } + +reportCommand :: CommandUI ReportFlags +reportCommand = CommandUI { + commandName = "report", + commandSynopsis = "Upload build reports to a remote server.", + commandDescription = Nothing, + commandNotes = Just $ \_ -> + "You can store your Hackage login in the ~/.cabal/config file\n", + commandUsage = usageAlternatives "report" ["[FLAGS]"], + commandDefaultFlags = defaultReportFlags, + commandOptions = \_ -> + [optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v }) + + ,option ['u'] ["username"] + "Hackage username." + reportUsername (\v flags -> flags { reportUsername = v }) + (reqArg' "USERNAME" (toFlag . Username) + (flagToList . fmap unUsername)) + + ,option ['p'] ["password"] + "Hackage password." + reportPassword (\v flags -> flags { reportPassword = v }) + (reqArg' "PASSWORD" (toFlag . Password) + (flagToList . fmap unPassword)) + ] + } + +instance Monoid ReportFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ReportFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Get flags +-- ------------------------------------------------------------ + +data GetFlags = GetFlags { + getDestDir :: Flag FilePath, + getPristine :: Flag Bool, + getIndexState :: Flag IndexState, + getSourceRepository :: Flag (Maybe RepoKind), + getVerbosity :: Flag Verbosity + } deriving Generic + +defaultGetFlags :: GetFlags +defaultGetFlags = GetFlags { + getDestDir = mempty, + getPristine = mempty, + getIndexState = mempty, + getSourceRepository = mempty, + getVerbosity = toFlag normal + } + +getCommand :: CommandUI GetFlags +getCommand = CommandUI { + commandName = "get", + commandSynopsis = "Download/Extract a package's source code (repository).", + commandDescription = Just $ \_ -> wrapText $ + "Creates a local copy of a package's source code. By default it gets " + ++ "the source\ntarball and unpacks it in a local subdirectory. " + ++ "Alternatively, with -s it will\nget the code from the source " + ++ "repository specified by the package.\n", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " get hlint\n" + ++ " Download the latest stable version of hlint;\n" + ++ " " ++ pname ++ " get lens --source-repository=head\n" + ++ " Download the source repository (i.e. git clone from github).\n", + commandUsage = usagePackages "get", + commandDefaultFlags = defaultGetFlags, + commandOptions = \_ -> [ + optionVerbosity getVerbosity (\v flags -> flags { getVerbosity = v }) + + ,option "d" ["destdir"] + "Where to place the package source, defaults to the current directory." + getDestDir (\v flags -> flags { getDestDir = v }) + (reqArgFlag "PATH") + + ,option "s" ["source-repository"] + "Copy the package's source repository (ie git clone, darcs get, etc as appropriate)." + getSourceRepository (\v flags -> flags { getSourceRepository = v }) + (optArg "[head|this|...]" (readP_to_E (const "invalid source-repository") + (fmap (toFlag . Just) parse)) + (Flag Nothing) + (map (fmap show) . flagToList)) + + , option [] ["index-state"] + ("Use source package index state as it existed at a previous time. " ++ + "Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++ + "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD'). " ++ + "This determines which package versions are available as well as " ++ + ".cabal file revision is selected (unless --pristine is used).") + getIndexState (\v flags -> flags { getIndexState = v }) + (reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++ + "unix-timestamps (e.g. '@1474732068'), " ++ + "a ISO8601 UTC timestamp " ++ + "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'") + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option [] ["pristine"] + ("Unpack the original pristine tarball, rather than updating the " + ++ ".cabal file with the latest revision from the package archive.") + getPristine (\v flags -> flags { getPristine = v }) + trueArg + ] + } + +-- 'cabal unpack' is a deprecated alias for 'cabal get'. +unpackCommand :: CommandUI GetFlags +unpackCommand = getCommand { + commandName = "unpack", + commandUsage = usagePackages "unpack" + } + +instance Monoid GetFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup GetFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * List flags +-- ------------------------------------------------------------ + +data ListFlags = ListFlags { + listInstalled :: Flag Bool, + listSimpleOutput :: Flag Bool, + listVerbosity :: Flag Verbosity, + listPackageDBs :: [Maybe PackageDB] + } deriving Generic + +defaultListFlags :: ListFlags +defaultListFlags = ListFlags { + listInstalled = Flag False, + listSimpleOutput = Flag False, + listVerbosity = toFlag normal, + listPackageDBs = [] + } + +listCommand :: CommandUI ListFlags +listCommand = CommandUI { + commandName = "list", + commandSynopsis = "List packages matching a search string.", + commandDescription = Just $ \_ -> wrapText $ + "List all packages, or all packages matching one of the search" + ++ " strings.\n" + ++ "\n" + ++ "If there is a sandbox in the current directory and " + ++ "config:ignore-sandbox is False, use the sandbox package database. " + ++ "Otherwise, use the package database specified with --package-db. " + ++ "If not specified, use the user package database.\n", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " list pandoc\n" + ++ " Will find pandoc, pandoc-citeproc, pandoc-lens, ...\n", + commandUsage = usageAlternatives "list" [ "[FLAGS]" + , "[FLAGS] STRINGS"], + commandDefaultFlags = defaultListFlags, + commandOptions = \_ -> [ + optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v }) + + , option [] ["installed"] + "Only print installed packages" + listInstalled (\v flags -> flags { listInstalled = v }) + trueArg + + , option [] ["simple-output"] + "Print in a easy-to-parse format" + listSimpleOutput (\v flags -> flags { listSimpleOutput = v }) + trueArg + + , option "" ["package-db"] + ( "Append the given package database to the list of package" + ++ " databases used (to satisfy dependencies and register into)." + ++ " May be a specific file, 'global' or 'user'. The initial list" + ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," + ++ " depending on context. Use 'clear' to reset the list to empty." + ++ " See the user guide for details.") + listPackageDBs (\v flags -> flags { listPackageDBs = v }) + (reqArg' "DB" readPackageDbList showPackageDbList) + + ] + } + +instance Monoid ListFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ListFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Info flags +-- ------------------------------------------------------------ + +data InfoFlags = InfoFlags { + infoVerbosity :: Flag Verbosity, + infoPackageDBs :: [Maybe PackageDB] + } deriving Generic + +defaultInfoFlags :: InfoFlags +defaultInfoFlags = InfoFlags { + infoVerbosity = toFlag normal, + infoPackageDBs = [] + } + +infoCommand :: CommandUI InfoFlags +infoCommand = CommandUI { + commandName = "info", + commandSynopsis = "Display detailed information about a particular package.", + commandDescription = Just $ \_ -> wrapText $ + "If there is a sandbox in the current directory and " + ++ "config:ignore-sandbox is False, use the sandbox package database. " + ++ "Otherwise, use the package database specified with --package-db. " + ++ "If not specified, use the user package database.\n", + commandNotes = Nothing, + commandUsage = usageAlternatives "info" ["[FLAGS] PACKAGES"], + commandDefaultFlags = defaultInfoFlags, + commandOptions = \_ -> [ + optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v }) + + , option "" ["package-db"] + ( "Append the given package database to the list of package" + ++ " databases used (to satisfy dependencies and register into)." + ++ " May be a specific file, 'global' or 'user'. The initial list" + ++ " is ['global'], ['global', 'user'], or ['global', $sandbox]," + ++ " depending on context. Use 'clear' to reset the list to empty." + ++ " See the user guide for details.") + infoPackageDBs (\v flags -> flags { infoPackageDBs = v }) + (reqArg' "DB" readPackageDbList showPackageDbList) + + ] + } + +instance Monoid InfoFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup InfoFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Install flags +-- ------------------------------------------------------------ + +-- | Install takes the same flags as configure along with a few extras. +-- +data InstallFlags = InstallFlags { + installDocumentation :: Flag Bool, + installHaddockIndex :: Flag PathTemplate, + installDest :: Flag Cabal.CopyDest, + installDryRun :: Flag Bool, + installMaxBackjumps :: Flag Int, + installReorderGoals :: Flag ReorderGoals, + installCountConflicts :: Flag CountConflicts, + installIndependentGoals :: Flag IndependentGoals, + installShadowPkgs :: Flag ShadowPkgs, + installStrongFlags :: Flag StrongFlags, + installAllowBootLibInstalls :: Flag AllowBootLibInstalls, + installReinstall :: Flag Bool, + installAvoidReinstalls :: Flag AvoidReinstalls, + installOverrideReinstall :: Flag Bool, + installUpgradeDeps :: Flag Bool, + installOnly :: Flag Bool, + installOnlyDeps :: Flag Bool, + installIndexState :: Flag IndexState, + installRootCmd :: Flag String, + installSummaryFile :: NubList PathTemplate, + installLogFile :: Flag PathTemplate, + installBuildReports :: Flag ReportLevel, + installReportPlanningFailure :: Flag Bool, + installSymlinkBinDir :: Flag FilePath, + installPerComponent :: Flag Bool, + installOneShot :: Flag Bool, + installNumJobs :: Flag (Maybe Int), + installKeepGoing :: Flag Bool, + installRunTests :: Flag Bool, + installOfflineMode :: Flag Bool, + -- | The cabal project file name; defaults to @cabal.project@. + -- Th name itself denotes the cabal project file name, but it also + -- is the base of auxiliary project files, such as + -- @cabal.project.local@ and @cabal.project.freeze@ which are also + -- read and written out in some cases. If the path is not found + -- in the current working directory, we will successively probe + -- relative to parent directories until this name is found. + installProjectFileName :: Flag FilePath + } + deriving (Eq, Generic) + +instance Binary InstallFlags + +defaultInstallFlags :: InstallFlags +defaultInstallFlags = InstallFlags { + installDocumentation = Flag False, + installHaddockIndex = Flag docIndexFile, + installDest = Flag Cabal.NoCopyDest, + installDryRun = Flag False, + installMaxBackjumps = Flag defaultMaxBackjumps, + installReorderGoals = Flag (ReorderGoals False), + installCountConflicts = Flag (CountConflicts True), + installIndependentGoals= Flag (IndependentGoals False), + installShadowPkgs = Flag (ShadowPkgs False), + installStrongFlags = Flag (StrongFlags False), + installAllowBootLibInstalls = Flag (AllowBootLibInstalls False), + installReinstall = Flag False, + installAvoidReinstalls = Flag (AvoidReinstalls False), + installOverrideReinstall = Flag False, + installUpgradeDeps = Flag False, + installOnly = Flag False, + installOnlyDeps = Flag False, + installIndexState = mempty, + installRootCmd = mempty, + installSummaryFile = mempty, + installLogFile = mempty, + installBuildReports = Flag NoReports, + installReportPlanningFailure = Flag False, + installSymlinkBinDir = mempty, + installPerComponent = Flag True, + installOneShot = Flag False, + installNumJobs = mempty, + installKeepGoing = Flag False, + installRunTests = mempty, + installOfflineMode = Flag False, + installProjectFileName = mempty + } + where + docIndexFile = toPathTemplate ("$datadir" "doc" + "$arch-$os-$compiler" "index.html") + +defaultMaxBackjumps :: Int +defaultMaxBackjumps = 2000 + +defaultSolver :: PreSolver +defaultSolver = AlwaysModular + +allSolvers :: String +allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [PreSolver])) + +installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) +installCommand = CommandUI { + commandName = "install", + commandSynopsis = "Install packages.", + commandUsage = usageAlternatives "v1-install" [ "[FLAGS]" + , "[FLAGS] PACKAGES" + ], + commandDescription = Just $ \_ -> wrapText $ + "Installs one or more packages. By default, the installed package" + ++ " will be registered in the user's package database or, if a sandbox" + ++ " is present in the current directory, inside the sandbox.\n" + ++ "\n" + ++ "If PACKAGES are specified, downloads and installs those packages." + ++ " Otherwise, install the package in the current directory (and/or its" + ++ " dependencies) (there must be exactly one .cabal file in the current" + ++ " directory).\n" + ++ "\n" + ++ "When using a sandbox, the flags for `v1-install` only affect the" + ++ " current command and have no effect on future commands. (To achieve" + ++ " that, `v1-configure` must be used.)\n" + ++ " In contrast, without a sandbox, the flags to `v1-install` are saved and" + ++ " affect future commands such as `v1-build` and `v1-repl`. See the help for" + ++ " `v1-configure` for a list of commands being affected.\n" + ++ "\n" + ++ "Installed executables will by default (and without a sandbox)" + ++ " be put into `~/.cabal/bin/`." + ++ " If you want installed executable to be available globally, make" + ++ " sure that the PATH environment variable contains that directory.\n" + ++ "When using a sandbox, executables will be put into" + ++ " `$SANDBOX/bin/` (by default: `./.cabal-sandbox/bin/`).\n" + ++ "\n" + ++ "When specifying --bindir, consider also specifying --datadir;" + ++ " this way the sandbox can be deleted and the executable should" + ++ " continue working as long as bindir and datadir are left untouched.", + commandNotes = Just $ \pname -> + ( case commandNotes + $ Cabal.configureCommand defaultProgramDb + of Just desc -> desc pname ++ "\n" + Nothing -> "" + ) + ++ "Examples:\n" + ++ " " ++ pname ++ " v1-install " + ++ " Package in the current directory\n" + ++ " " ++ pname ++ " v1-install foo " + ++ " Package from the hackage server\n" + ++ " " ++ pname ++ " v1-install foo-1.0 " + ++ " Specific version of a package\n" + ++ " " ++ pname ++ " v1-install 'foo < 2' " + ++ " Constrained package version\n" + ++ " " ++ pname ++ " v1-install haddock --bindir=$HOME/hask-bin/ --datadir=$HOME/hask-data/\n" + ++ " " ++ (map (const ' ') pname) + ++ " " + ++ " Change installation destination\n", + commandDefaultFlags = (mempty, mempty, mempty, mempty), + commandOptions = \showOrParseArgs -> + liftOptions get1 set1 + -- Note: [Hidden Flags] + -- hide "constraint", "dependency", and + -- "exact-configuration" from the configure options. + (filter ((`notElem` ["constraint", "dependency" + , "exact-configuration"]) + . optionName) $ + configureOptions showOrParseArgs) + ++ liftOptions get2 set2 (configureExOptions showOrParseArgs ConstraintSourceCommandlineFlag) + ++ liftOptions get3 set3 + -- hide "target-package-db" flag from the + -- install options. + (filter ((`notElem` ["target-package-db"]) + . optionName) $ + installOptions showOrParseArgs) + ++ liftOptions get4 set4 (haddockOptions showOrParseArgs) + } + where + get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d) + get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d) + get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d) + get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d) + +haddockCommand :: CommandUI HaddockFlags +haddockCommand = Cabal.haddockCommand + { commandUsage = usageAlternatives "v1-haddock" $ + [ "[FLAGS]", "COMPONENTS [FLAGS]" ] + } + +filterHaddockArgs :: [String] -> Version -> [String] +filterHaddockArgs args cabalLibVersion + | cabalLibVersion >= mkVersion [2,3,0] = args_latest + | cabalLibVersion < mkVersion [2,3,0] = args_2_3_0 + | otherwise = args_latest + where + args_latest = args + + -- Cabal < 2.3 doesn't know about per-component haddock + args_2_3_0 = [] + +filterHaddockFlags :: HaddockFlags -> Version -> HaddockFlags +filterHaddockFlags flags cabalLibVersion + | cabalLibVersion >= mkVersion [2,3,0] = flags_latest + | cabalLibVersion < mkVersion [2,3,0] = flags_2_3_0 + | otherwise = flags_latest + where + flags_latest = flags + + flags_2_3_0 = flags_latest { + -- Cabal < 2.3 doesn't know about per-component haddock + haddockArgs = [] + } + +haddockOptions :: ShowOrParseArgs -> [OptionField HaddockFlags] +haddockOptions showOrParseArgs + = [ opt { optionName = "haddock-" ++ name, + optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr + | descr <- optionDescr opt] } + | opt <- commandOptions Cabal.haddockCommand showOrParseArgs + , let name = optionName opt + , name `elem` ["hoogle", "html", "html-location" + ,"executables", "tests", "benchmarks", "all", "internal", "css" + ,"hyperlink-source", "quickjump", "hscolour-css" + ,"contents-location", "for-hackage"] + ] + where + fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a + fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w + fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w + fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs] + fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w + +installOptions :: ShowOrParseArgs -> [OptionField InstallFlags] +installOptions showOrParseArgs = + [ option "" ["documentation"] + "building of documentation" + installDocumentation (\v flags -> flags { installDocumentation = v }) + (boolOpt [] []) + + , option [] ["doc-index-file"] + "A central index of haddock API documentation (template cannot use $pkgid)" + installHaddockIndex (\v flags -> flags { installHaddockIndex = v }) + (reqArg' "TEMPLATE" (toFlag.toPathTemplate) + (flagToList . fmap fromPathTemplate)) + + , option [] ["dry-run"] + "Do not install anything, only print what would be installed." + installDryRun (\v flags -> flags { installDryRun = v }) + trueArg + + , option "" ["target-package-db"] + "package database to install into. Required when using ${pkgroot} prefix." + installDest (\v flags -> flags { installDest = v }) + (reqArg "DATABASE" (succeedReadE (Flag . Cabal.CopyToDb)) + (\f -> case f of Flag (Cabal.CopyToDb p) -> [p]; _ -> [])) + ] ++ + + optionSolverFlags showOrParseArgs + installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v }) + installReorderGoals (\v flags -> flags { installReorderGoals = v }) + installCountConflicts (\v flags -> flags { installCountConflicts = v }) + installIndependentGoals (\v flags -> flags { installIndependentGoals = v }) + installShadowPkgs (\v flags -> flags { installShadowPkgs = v }) + installStrongFlags (\v flags -> flags { installStrongFlags = v }) + installAllowBootLibInstalls (\v flags -> flags { installAllowBootLibInstalls = v }) ++ + + [ option [] ["reinstall"] + "Install even if it means installing the same version again." + installReinstall (\v flags -> flags { installReinstall = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["avoid-reinstalls"] + "Do not select versions that would destructively overwrite installed packages." + (fmap asBool . installAvoidReinstalls) + (\v flags -> flags { installAvoidReinstalls = fmap AvoidReinstalls v }) + (yesNoOpt showOrParseArgs) + + , option [] ["force-reinstalls"] + "Reinstall packages even if they will most likely break other installed packages." + installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["upgrade-dependencies"] + "Pick the latest version for all dependencies, rather than trying to pick an installed version." + installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["only-dependencies"] + "Install only the dependencies necessary to build the given packages" + installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["dependencies-only"] + "A synonym for --only-dependencies" + installOnlyDeps (\v flags -> flags { installOnlyDeps = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["index-state"] + ("Use source package index state as it existed at a previous time. " ++ + "Accepts unix-timestamps (e.g. '@1474732068'), ISO8601 UTC timestamps " ++ + "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD' (default: 'HEAD').") + installIndexState (\v flags -> flags { installIndexState = v }) + (reqArg "STATE" (readP_to_E (const $ "index-state must be a " ++ + "unix-timestamps (e.g. '@1474732068'), " ++ + "a ISO8601 UTC timestamp " ++ + "(e.g. '2016-09-24T17:47:48Z'), or 'HEAD'") + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option [] ["root-cmd"] + "(No longer supported, do not use.)" + installRootCmd (\v flags -> flags { installRootCmd = v }) + (reqArg' "COMMAND" toFlag flagToList) + + , option [] ["symlink-bindir"] + "Add symlinks to installed executables into this directory." + installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v }) + (reqArgFlag "DIR") + + , option [] ["build-summary"] + "Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)" + installSummaryFile (\v flags -> flags { installSummaryFile = v }) + (reqArg' "TEMPLATE" (\x -> toNubList [toPathTemplate x]) (map fromPathTemplate . fromNubList)) + + , option [] ["build-log"] + "Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)" + installLogFile (\v flags -> flags { installLogFile = v }) + (reqArg' "TEMPLATE" (toFlag.toPathTemplate) + (flagToList . fmap fromPathTemplate)) + + , option [] ["remote-build-reporting"] + "Generate build reports to send to a remote server (none, anonymous or detailed)." + installBuildReports (\v flags -> flags { installBuildReports = v }) + (reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', " + ++ "'anonymous' or 'detailed'") + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option [] ["report-planning-failure"] + "Generate build reports when the dependency solver fails. This is used by the Hackage build bot." + installReportPlanningFailure (\v flags -> flags { installReportPlanningFailure = v }) + trueArg + + , option "" ["per-component"] + "Per-component builds when possible" + installPerComponent (\v flags -> flags { installPerComponent = v }) + (boolOpt [] []) + + , option [] ["one-shot"] + "Do not record the packages in the world file." + installOneShot (\v flags -> flags { installOneShot = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["run-tests"] + "Run package test suites during installation." + installRunTests (\v flags -> flags { installRunTests = v }) + trueArg + + , optionNumJobs + installNumJobs (\v flags -> flags { installNumJobs = v }) + + , option [] ["keep-going"] + "After a build failure, continue to build other unaffected packages." + installKeepGoing (\v flags -> flags { installKeepGoing = v }) + trueArg + + , option [] ["offline"] + "Don't download packages from the Internet." + installOfflineMode (\v flags -> flags { installOfflineMode = v }) + (yesNoOpt showOrParseArgs) + + , option [] ["project-file"] + "Set the name of the cabal.project file to search for in parent directories" + installProjectFileName (\v flags -> flags {installProjectFileName = v}) + (reqArgFlag "FILE") + ] ++ case showOrParseArgs of -- TODO: remove when "cabal install" + -- avoids + ParseArgs -> + [ option [] ["only"] + "Only installs the package in the current directory." + installOnly (\v flags -> flags { installOnly = v }) + trueArg ] + _ -> [] + + +instance Monoid InstallFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup InstallFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Upload flags +-- ------------------------------------------------------------ + +-- | Is this a candidate package or a package to be published? +data IsCandidate = IsCandidate | IsPublished + deriving Eq + +data UploadFlags = UploadFlags { + uploadCandidate :: Flag IsCandidate, + uploadDoc :: Flag Bool, + uploadUsername :: Flag Username, + uploadPassword :: Flag Password, + uploadPasswordCmd :: Flag [String], + uploadVerbosity :: Flag Verbosity + } deriving Generic + +defaultUploadFlags :: UploadFlags +defaultUploadFlags = UploadFlags { + uploadCandidate = toFlag IsCandidate, + uploadDoc = toFlag False, + uploadUsername = mempty, + uploadPassword = mempty, + uploadPasswordCmd = mempty, + uploadVerbosity = toFlag normal + } + +uploadCommand :: CommandUI UploadFlags +uploadCommand = CommandUI { + commandName = "upload", + commandSynopsis = "Uploads source packages or documentation to Hackage.", + commandDescription = Nothing, + commandNotes = Just $ \_ -> + "You can store your Hackage login in the ~/.cabal/config file\n" + ++ relevantConfigValuesText ["username", "password"], + commandUsage = \pname -> + "Usage: " ++ pname ++ " upload [FLAGS] TARFILES\n", + commandDefaultFlags = defaultUploadFlags, + commandOptions = \_ -> + [optionVerbosity uploadVerbosity + (\v flags -> flags { uploadVerbosity = v }) + + ,option [] ["publish"] + "Publish the package instead of uploading it as a candidate." + uploadCandidate (\v flags -> flags { uploadCandidate = v }) + (noArg (Flag IsPublished)) + + ,option ['d'] ["documentation"] + ("Upload documentation instead of a source package. " + ++ "By default, this uploads documentation for a package candidate. " + ++ "To upload documentation for " + ++ "a published package, combine with --publish.") + uploadDoc (\v flags -> flags { uploadDoc = v }) + trueArg + + ,option ['u'] ["username"] + "Hackage username." + uploadUsername (\v flags -> flags { uploadUsername = v }) + (reqArg' "USERNAME" (toFlag . Username) + (flagToList . fmap unUsername)) + + ,option ['p'] ["password"] + "Hackage password." + uploadPassword (\v flags -> flags { uploadPassword = v }) + (reqArg' "PASSWORD" (toFlag . Password) + (flagToList . fmap unPassword)) + + ,option ['P'] ["password-command"] + "Command to get Hackage password." + uploadPasswordCmd (\v flags -> flags { uploadPasswordCmd = v }) + (reqArg' "PASSWORD" (Flag . words) (fromMaybe [] . flagToMaybe)) + ] + } + +instance Monoid UploadFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup UploadFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Init flags +-- ------------------------------------------------------------ + +emptyInitFlags :: IT.InitFlags +emptyInitFlags = mempty + +defaultInitFlags :: IT.InitFlags +defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal } + +initCommand :: CommandUI IT.InitFlags +initCommand = CommandUI { + commandName = "init", + commandSynopsis = "Create a new .cabal package file (interactively).", + commandDescription = Just $ \_ -> wrapText $ + "Cabalise a project by creating a .cabal, Setup.hs, and " + ++ "optionally a LICENSE file.\n" + ++ "\n" + ++ "Calling init with no arguments (recommended) uses an " + ++ "interactive mode, which will try to guess as much as " + ++ "possible and prompt you for the rest. Command-line " + ++ "arguments are provided for scripting purposes. " + ++ "If you don't want interactive mode, be sure to pass " + ++ "the -n flag.\n", + commandNotes = Nothing, + commandUsage = \pname -> + "Usage: " ++ pname ++ " init [FLAGS]\n", + commandDefaultFlags = defaultInitFlags, + commandOptions = \_ -> + [ option ['n'] ["non-interactive"] + "Non-interactive mode." + IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v }) + trueArg + + , option ['q'] ["quiet"] + "Do not generate log messages to stdout." + IT.quiet (\v flags -> flags { IT.quiet = v }) + trueArg + + , option [] ["no-comments"] + "Do not generate explanatory comments in the .cabal file." + IT.noComments (\v flags -> flags { IT.noComments = v }) + trueArg + + , option ['m'] ["minimal"] + "Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments." + IT.minimal (\v flags -> flags { IT.minimal = v }) + trueArg + + , option [] ["overwrite"] + "Overwrite any existing .cabal, LICENSE, or Setup.hs files without warning." + IT.overwrite (\v flags -> flags { IT.overwrite = v }) + trueArg + + , option [] ["package-dir", "packagedir"] + "Root directory of the package (default = current directory)." + IT.packageDir (\v flags -> flags { IT.packageDir = v }) + (reqArgFlag "DIRECTORY") + + , option ['p'] ["package-name"] + "Name of the Cabal package to create." + IT.packageName (\v flags -> flags { IT.packageName = v }) + (reqArg "PACKAGE" (readP_to_E ("Cannot parse package name: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option [] ["version"] + "Initial version of the package." + IT.version (\v flags -> flags { IT.version = v }) + (reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option [] ["cabal-version"] + "Version of the Cabal specification." + IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v }) + (reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal specification version: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option ['l'] ["license"] + "Project license." + IT.license (\v flags -> flags { IT.license = v }) + (reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option ['a'] ["author"] + "Name of the project's author." + IT.author (\v flags -> flags { IT.author = v }) + (reqArgFlag "NAME") + + , option ['e'] ["email"] + "Email address of the maintainer." + IT.email (\v flags -> flags { IT.email = v }) + (reqArgFlag "EMAIL") + + , option ['u'] ["homepage"] + "Project homepage and/or repository." + IT.homepage (\v flags -> flags { IT.homepage = v }) + (reqArgFlag "URL") + + , option ['s'] ["synopsis"] + "Short project synopsis." + IT.synopsis (\v flags -> flags { IT.synopsis = v }) + (reqArgFlag "TEXT") + + , option ['c'] ["category"] + "Project category." + IT.category (\v flags -> flags { IT.category = v }) + (reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s)) + (flagToList . fmap (either id show))) + + , option ['x'] ["extra-source-file"] + "Extra source file to be distributed with tarball." + IT.extraSrc (\v flags -> flags { IT.extraSrc = v }) + (reqArg' "FILE" (Just . (:[])) + (fromMaybe [])) + + , option [] ["is-library"] + "Build a library." + IT.packageType (\v flags -> flags { IT.packageType = v }) + (noArg (Flag IT.Library)) + + , option [] ["is-executable"] + "Build an executable." + IT.packageType + (\v flags -> flags { IT.packageType = v }) + (noArg (Flag IT.Executable)) + + , option [] ["is-libandexe"] + "Build a library and an executable." + IT.packageType + (\v flags -> flags { IT.packageType = v }) + (noArg (Flag IT.LibraryAndExecutable)) + + , option [] ["main-is"] + "Specify the main module." + IT.mainIs + (\v flags -> flags { IT.mainIs = v }) + (reqArgFlag "FILE") + + , option [] ["language"] + "Specify the default language." + IT.language + (\v flags -> flags { IT.language = v }) + (reqArg "LANGUAGE" (readP_to_E ("Cannot parse language: "++) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + + , option ['o'] ["expose-module"] + "Export a module from the package." + IT.exposedModules + (\v flags -> flags { IT.exposedModules = v }) + (reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++) + ((Just . (:[])) `fmap` parse)) + (maybe [] (fmap display))) + + , option [] ["extension"] + "Use a LANGUAGE extension (in the other-extensions field)." + IT.otherExts + (\v flags -> flags { IT.otherExts = v }) + (reqArg "EXTENSION" (readP_to_E ("Cannot parse extension: "++) + ((Just . (:[])) `fmap` parse)) + (maybe [] (fmap display))) + + , option ['d'] ["dependency"] + "Package dependency." + IT.dependencies (\v flags -> flags { IT.dependencies = v }) + (reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++) + ((Just . (:[])) `fmap` parse)) + (maybe [] (fmap display))) + + , option [] ["source-dir", "sourcedir"] + "Directory containing package source." + IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v }) + (reqArg' "DIR" (Just . (:[])) + (fromMaybe [])) + + , option [] ["build-tool"] + "Required external build tool." + IT.buildTools (\v flags -> flags { IT.buildTools = v }) + (reqArg' "TOOL" (Just . (:[])) + (fromMaybe [])) + + -- NB: this is a bit of a transitional hack and will likely be + -- removed again if `cabal init` is migrated to the v2-* command + -- framework + , option "w" ["with-compiler"] + "give the path to a particular compiler" + IT.initHcPath (\v flags -> flags { IT.initHcPath = v }) + (reqArgFlag "PATH") + + , optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v }) + ] + } + +-- ------------------------------------------------------------ +-- * SDist flags +-- ------------------------------------------------------------ + +-- | Extra flags to @sdist@ beyond runghc Setup sdist +-- +data SDistExFlags = SDistExFlags { + sDistFormat :: Flag ArchiveFormat + } + deriving (Show, Generic) + +data ArchiveFormat = TargzFormat | ZipFormat -- ... + deriving (Show, Eq) + +defaultSDistExFlags :: SDistExFlags +defaultSDistExFlags = SDistExFlags { + sDistFormat = Flag TargzFormat + } + +sdistCommand :: CommandUI (SDistFlags, SDistExFlags) +sdistCommand = Cabal.sdistCommand { + commandUsage = \pname -> + "Usage: " ++ pname ++ " v1-sdist [FLAGS]\n", + commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags), + commandOptions = \showOrParseArgs -> + liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs) + ++ liftOptions snd setSnd sdistExOptions + } + where + setFst a (_,b) = (a,b) + setSnd b (a,_) = (a,b) + + sdistExOptions = + [option [] ["archive-format"] "archive-format" + sDistFormat (\v flags -> flags { sDistFormat = v }) + (choiceOpt + [ (Flag TargzFormat, ([], ["targz"]), + "Produce a '.tar.gz' format archive (default and required for uploading to hackage)") + , (Flag ZipFormat, ([], ["zip"]), + "Produce a '.zip' format archive") + ]) + ] + +instance Monoid SDistExFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup SDistExFlags where + (<>) = gmappend + +-- + +doctestCommand :: CommandUI DoctestFlags +doctestCommand = Cabal.doctestCommand + { commandUsage = \pname -> "Usage: " ++ pname ++ " v1-doctest [FLAGS]\n" } + +copyCommand :: CommandUI CopyFlags +copyCommand = Cabal.copyCommand + { commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " v1-copy " + ++ " All the components in the package\n" + ++ " " ++ pname ++ " v1-copy foo " + ++ " A component (i.e. lib, exe, test suite)" + , commandUsage = usageAlternatives "v1-copy" $ + [ "[FLAGS]" + , "COMPONENTS [FLAGS]" + ] + } + +registerCommand :: CommandUI RegisterFlags +registerCommand = Cabal.registerCommand + { commandUsage = \pname -> "Usage: " ++ pname ++ " v1-register [FLAGS]\n" } + +-- ------------------------------------------------------------ +-- * Win32SelfUpgrade flags +-- ------------------------------------------------------------ + +data Win32SelfUpgradeFlags = Win32SelfUpgradeFlags { + win32SelfUpgradeVerbosity :: Flag Verbosity +} deriving Generic + +defaultWin32SelfUpgradeFlags :: Win32SelfUpgradeFlags +defaultWin32SelfUpgradeFlags = Win32SelfUpgradeFlags { + win32SelfUpgradeVerbosity = toFlag normal +} + +win32SelfUpgradeCommand :: CommandUI Win32SelfUpgradeFlags +win32SelfUpgradeCommand = CommandUI { + commandName = "win32selfupgrade", + commandSynopsis = "Self-upgrade the executable on Windows", + commandDescription = Nothing, + commandNotes = Nothing, + commandUsage = \pname -> + "Usage: " ++ pname ++ " win32selfupgrade PID PATH\n", + commandDefaultFlags = defaultWin32SelfUpgradeFlags, + commandOptions = \_ -> + [optionVerbosity win32SelfUpgradeVerbosity + (\v flags -> flags { win32SelfUpgradeVerbosity = v}) + ] +} + +instance Monoid Win32SelfUpgradeFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup Win32SelfUpgradeFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * ActAsSetup flags +-- ------------------------------------------------------------ + +data ActAsSetupFlags = ActAsSetupFlags { + actAsSetupBuildType :: Flag BuildType +} deriving Generic + +defaultActAsSetupFlags :: ActAsSetupFlags +defaultActAsSetupFlags = ActAsSetupFlags { + actAsSetupBuildType = toFlag Simple +} + +actAsSetupCommand :: CommandUI ActAsSetupFlags +actAsSetupCommand = CommandUI { + commandName = "act-as-setup", + commandSynopsis = "Run as-if this was a Setup.hs", + commandDescription = Nothing, + commandNotes = Nothing, + commandUsage = \pname -> + "Usage: " ++ pname ++ " act-as-setup\n", + commandDefaultFlags = defaultActAsSetupFlags, + commandOptions = \_ -> + [option "" ["build-type"] + "Use the given build type." + actAsSetupBuildType (\v flags -> flags { actAsSetupBuildType = v }) + (reqArg "BUILD-TYPE" (readP_to_E ("Cannot parse build type: "++) + (fmap toFlag parse)) + (map display . flagToList)) + ] +} + +instance Monoid ActAsSetupFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ActAsSetupFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Sandbox-related flags +-- ------------------------------------------------------------ + +data SandboxFlags = SandboxFlags { + sandboxVerbosity :: Flag Verbosity, + sandboxSnapshot :: Flag Bool, -- FIXME: this should be an 'add-source'-only + -- flag. + sandboxLocation :: Flag FilePath +} deriving Generic + +defaultSandboxLocation :: FilePath +defaultSandboxLocation = ".cabal-sandbox" + +defaultSandboxFlags :: SandboxFlags +defaultSandboxFlags = SandboxFlags { + sandboxVerbosity = toFlag normal, + sandboxSnapshot = toFlag False, + sandboxLocation = toFlag defaultSandboxLocation + } + +sandboxCommand :: CommandUI SandboxFlags +sandboxCommand = CommandUI { + commandName = "sandbox", + commandSynopsis = "Create/modify/delete a sandbox.", + commandDescription = Just $ \pname -> concat + [ paragraph $ "Sandboxes are isolated package databases that can be used" + ++ " to prevent dependency conflicts that arise when many different" + ++ " packages are installed in the same database (i.e. the user's" + ++ " database in the home directory)." + , paragraph $ "A sandbox in the current directory (created by" + ++ " `v1-sandbox init`) will be used instead of the user's database for" + ++ " commands such as `v1-install` and `v1-build`. Note that (a directly" + ++ " invoked) GHC will not automatically be aware of sandboxes;" + ++ " only if called via appropriate " ++ pname + ++ " commands, e.g. `v1-repl`, `v1-build`, `v1-exec`." + , paragraph $ "Currently, " ++ pname ++ " will not search for a sandbox" + ++ " in folders above the current one, so cabal will not see the sandbox" + ++ " if you are in a subfolder of a sandbox." + , paragraph "Subcommands:" + , headLine "init:" + , indentParagraph $ "Initialize a sandbox in the current directory." + ++ " An existing package database will not be modified, but settings" + ++ " (such as the location of the database) can be modified this way." + , headLine "delete:" + , indentParagraph $ "Remove the sandbox; deleting all the packages" + ++ " installed inside." + , headLine "add-source:" + , indentParagraph $ "Make one or more local packages available in the" + ++ " sandbox. PATHS may be relative or absolute." + ++ " Typical usecase is when you need" + ++ " to make a (temporary) modification to a dependency: You download" + ++ " the package into a different directory, make the modification," + ++ " and add that directory to the sandbox with `add-source`." + , indentParagraph $ "Unless given `--snapshot`, any add-source'd" + ++ " dependency that was modified since the last build will be" + ++ " re-installed automatically." + , headLine "delete-source:" + , indentParagraph $ "Remove an add-source dependency; however, this will" + ++ " not delete the package(s) that have been installed in the sandbox" + ++ " from this dependency. You can either unregister the package(s) via" + ++ " `" ++ pname ++ " v1-sandbox hc-pkg unregister` or re-create the" + ++ " sandbox (`v1-sandbox delete; v1-sandbox init`)." + , headLine "list-sources:" + , indentParagraph $ "List the directories of local packages made" + ++ " available via `" ++ pname ++ " v1-sandbox add-source`." + , headLine "hc-pkg:" + , indentParagraph $ "Similar to `ghc-pkg`, but for the sandbox package" + ++ " database. Can be used to list specific/all packages that are" + ++ " installed in the sandbox. For subcommands, see the help for" + ++ " ghc-pkg. Affected by the compiler version specified by `v1-configure`." + ], + commandNotes = Just $ \pname -> + relevantConfigValuesText ["require-sandbox" + ,"ignore-sandbox"] + ++ "\n" + ++ "Examples:\n" + ++ " Set up a sandbox with one local dependency, located at ../foo:\n" + ++ " " ++ pname ++ " v1-sandbox init\n" + ++ " " ++ pname ++ " v1-sandbox add-source ../foo\n" + ++ " " ++ pname ++ " v1-install --only-dependencies\n" + ++ " Reset the sandbox:\n" + ++ " " ++ pname ++ " v1-sandbox delete\n" + ++ " " ++ pname ++ " v1-sandbox init\n" + ++ " " ++ pname ++ " v1-install --only-dependencies\n" + ++ " List the packages in the sandbox:\n" + ++ " " ++ pname ++ " v1-sandbox hc-pkg list\n" + ++ " Unregister the `broken` package from the sandbox:\n" + ++ " " ++ pname ++ " v1-sandbox hc-pkg -- --force unregister broken\n", + commandUsage = usageAlternatives "v1-sandbox" + [ "init [FLAGS]" + , "delete [FLAGS]" + , "add-source [FLAGS] PATHS" + , "delete-source [FLAGS] PATHS" + , "list-sources [FLAGS]" + , "hc-pkg [FLAGS] [--] COMMAND [--] [ARGS]" + ], + + commandDefaultFlags = defaultSandboxFlags, + commandOptions = \_ -> + [ optionVerbosity sandboxVerbosity + (\v flags -> flags { sandboxVerbosity = v }) + + , option [] ["snapshot"] + "Take a snapshot instead of creating a link (only applies to 'add-source')" + sandboxSnapshot (\v flags -> flags { sandboxSnapshot = v }) + trueArg + + , option [] ["sandbox"] + "Sandbox location (default: './.cabal-sandbox')." + sandboxLocation (\v flags -> flags { sandboxLocation = v }) + (reqArgFlag "DIR") + ] + } + +instance Monoid SandboxFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup SandboxFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * Exec Flags +-- ------------------------------------------------------------ + +data ExecFlags = ExecFlags { + execVerbosity :: Flag Verbosity, + execDistPref :: Flag FilePath +} deriving Generic + +defaultExecFlags :: ExecFlags +defaultExecFlags = ExecFlags { + execVerbosity = toFlag normal, + execDistPref = NoFlag + } + +execCommand :: CommandUI ExecFlags +execCommand = CommandUI { + commandName = "exec", + commandSynopsis = "Give a command access to the sandbox package repository.", + commandDescription = Just $ \pname -> wrapText $ + -- TODO: this is too GHC-focused for my liking.. + "A directly invoked GHC will not automatically be aware of any" + ++ " sandboxes: the GHC_PACKAGE_PATH environment variable controls what" + ++ " GHC uses. `" ++ pname ++ " v1-exec` can be used to modify this variable:" + ++ " COMMAND will be executed in a modified environment and thereby uses" + ++ " the sandbox package database.\n" + ++ "\n" + ++ "If there is no sandbox, behaves as identity (executing COMMAND).\n" + ++ "\n" + ++ "Note that other " ++ pname ++ " commands change the environment" + ++ " variable appropriately already, so there is no need to wrap those" + ++ " in `" ++ pname ++ " v1-exec`. But with `" ++ pname ++ " v1-exec`, the user" + ++ " has more control and can, for example, execute custom scripts which" + ++ " indirectly execute GHC.\n" + ++ "\n" + ++ "Note that `" ++ pname ++ " v1-repl` is different from `" ++ pname + ++ " v1-exec -- ghci` as the latter will not forward any additional flags" + ++ " being defined in the local package to ghci.\n" + ++ "\n" + ++ "See `" ++ pname ++ " sandbox`.\n", + commandNotes = Just $ \pname -> + "Examples:\n" + ++ " " ++ pname ++ " v1-exec -- ghci -Wall\n" + ++ " Start a repl session with sandbox packages and all warnings;\n" + ++ " " ++ pname ++ " v1-exec gitit -- -f gitit.cnf\n" + ++ " Give gitit access to the sandbox packages, and pass it a flag;\n" + ++ " " ++ pname ++ " v1-exec runghc Foo.hs\n" + ++ " Execute runghc on Foo.hs with runghc configured to use the\n" + ++ " sandbox package database (if a sandbox is being used).\n", + commandUsage = \pname -> + "Usage: " ++ pname ++ " v1-exec [FLAGS] [--] COMMAND [--] [ARGS]\n", + + commandDefaultFlags = defaultExecFlags, + commandOptions = \showOrParseArgs -> + [ optionVerbosity execVerbosity + (\v flags -> flags { execVerbosity = v }) + , Cabal.optionDistPref + execDistPref (\d flags -> flags { execDistPref = d }) + showOrParseArgs + ] + } + +instance Monoid ExecFlags where + mempty = gmempty + mappend = (<>) + +instance Semigroup ExecFlags where + (<>) = gmappend + +-- ------------------------------------------------------------ +-- * UserConfig flags +-- ------------------------------------------------------------ + +data UserConfigFlags = UserConfigFlags { + userConfigVerbosity :: Flag Verbosity, + userConfigForce :: Flag Bool, + userConfigAppendLines :: Flag [String] + } deriving Generic + +instance Monoid UserConfigFlags where + mempty = UserConfigFlags { + userConfigVerbosity = toFlag normal, + userConfigForce = toFlag False, + userConfigAppendLines = toFlag [] + } + mappend = (<>) + +instance Semigroup UserConfigFlags where + (<>) = gmappend + +userConfigCommand :: CommandUI UserConfigFlags +userConfigCommand = CommandUI { + commandName = "user-config", + commandSynopsis = "Display and update the user's global cabal configuration.", + commandDescription = Just $ \_ -> wrapText $ + "When upgrading cabal, the set of configuration keys and their default" + ++ " values may change. This command provides means to merge the existing" + ++ " config in ~/.cabal/config" + ++ " (i.e. all bindings that are actually defined and not commented out)" + ++ " and the default config of the new version.\n" + ++ "\n" + ++ "init: Creates a new config file at either ~/.cabal/config or as" + ++ " specified by --config-file, if given. An existing file won't be " + ++ " overwritten unless -f or --force is given.\n" + ++ "diff: Shows a pseudo-diff of the user's ~/.cabal/config file and" + ++ " the default configuration that would be created by cabal if the" + ++ " config file did not exist.\n" + ++ "update: Applies the pseudo-diff to the configuration that would be" + ++ " created by default, and write the result back to ~/.cabal/config.", + + commandNotes = Nothing, + commandUsage = usageAlternatives "user-config" ["init", "diff", "update"], + commandDefaultFlags = mempty, + commandOptions = \ _ -> [ + optionVerbosity userConfigVerbosity (\v flags -> flags { userConfigVerbosity = v }) + , option ['f'] ["force"] + "Overwrite the config file if it already exists." + userConfigForce (\v flags -> flags { userConfigForce = v }) + trueArg + , option ['a'] ["augment"] + "Additional setting to augment the config file (replacing a previous setting if it existed)." + userConfigAppendLines (\v flags -> flags + {userConfigAppendLines = + Flag $ concat (flagToList (userConfigAppendLines flags) ++ flagToList v)}) + (reqArg' "CONFIGLINE" (Flag . (:[])) (fromMaybe [] . flagToMaybe)) + ] + } + +-- ------------------------------------------------------------ +-- * GetOpt Utils +-- ------------------------------------------------------------ + +reqArgFlag :: ArgPlaceHolder -> + MkOptDescr (b -> Flag String) (Flag String -> b -> b) b +reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList + +liftOptions :: (b -> a) -> (a -> b -> b) + -> [OptionField a] -> [OptionField b] +liftOptions get set = map (liftOption get set) + +yesNoOpt :: ShowOrParseArgs -> MkOptDescr (b -> Flag Bool) (Flag Bool -> b -> b) b +yesNoOpt ShowArgs sf lf = trueArg sf lf +yesNoOpt _ sf lf = Command.boolOpt' flagToMaybe Flag (sf, lf) ([], map ("no-" ++) lf) sf lf + +optionSolver :: (flags -> Flag PreSolver) + -> (Flag PreSolver -> flags -> flags) + -> OptionField flags +optionSolver get set = + option [] ["solver"] + ("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ".") + get set + (reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers) + (toFlag `fmap` parse)) + (flagToList . fmap display)) + +optionSolverFlags :: ShowOrParseArgs + -> (flags -> Flag Int ) -> (Flag Int -> flags -> flags) + -> (flags -> Flag ReorderGoals) -> (Flag ReorderGoals -> flags -> flags) + -> (flags -> Flag CountConflicts) -> (Flag CountConflicts -> flags -> flags) + -> (flags -> Flag IndependentGoals) -> (Flag IndependentGoals -> flags -> flags) + -> (flags -> Flag ShadowPkgs) -> (Flag ShadowPkgs -> flags -> flags) + -> (flags -> Flag StrongFlags) -> (Flag StrongFlags -> flags -> flags) + -> (flags -> Flag AllowBootLibInstalls) -> (Flag AllowBootLibInstalls -> flags -> flags) + -> [OptionField flags] +optionSolverFlags showOrParseArgs getmbj setmbj getrg setrg getcc setcc getig setig + getsip setsip getstrfl setstrfl getib setib = + [ option [] ["max-backjumps"] + ("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.") + getmbj setmbj + (reqArg "NUM" (readP_to_E ("Cannot parse number: "++) (fmap toFlag parse)) + (map show . flagToList)) + , option [] ["reorder-goals"] + "Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages." + (fmap asBool . getrg) + (setrg . fmap ReorderGoals) + (yesNoOpt showOrParseArgs) + , option [] ["count-conflicts"] + "Try to speed up solving by preferring goals that are involved in a lot of conflicts (default)." + (fmap asBool . getcc) + (setcc . fmap CountConflicts) + (yesNoOpt showOrParseArgs) + , option [] ["independent-goals"] + "Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen." + (fmap asBool . getig) + (setig . fmap IndependentGoals) + (yesNoOpt showOrParseArgs) + , option [] ["shadow-installed-packages"] + "If multiple package instances of the same version are installed, treat all but one as shadowed." + (fmap asBool . getsip) + (setsip . fmap ShadowPkgs) + (yesNoOpt showOrParseArgs) + , option [] ["strong-flags"] + "Do not defer flag choices (this used to be the default in cabal-install <= 1.20)." + (fmap asBool . getstrfl) + (setstrfl . fmap StrongFlags) + (yesNoOpt showOrParseArgs) + , option [] ["allow-boot-library-installs"] + "Allow cabal to install base, ghc-prim, integer-simple, integer-gmp, and template-haskell." + (fmap asBool . getib) + (setib . fmap AllowBootLibInstalls) + (yesNoOpt showOrParseArgs) + ] + +usageFlagsOrPackages :: String -> String -> String +usageFlagsOrPackages name pname = + "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" + ++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" + +usagePackages :: String -> String -> String +usagePackages name pname = + "Usage: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n" + +usageFlags :: String -> String -> String +usageFlags name pname = + "Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n" + +--TODO: do we want to allow per-package flags? +parsePackageArgs :: [String] -> Either String [Dependency] +parsePackageArgs = parsePkgArgs [] + where + parsePkgArgs ds [] = Right (reverse ds) + parsePkgArgs ds (arg:args) = + case readPToMaybe parseDependencyOrPackageId arg of + Just dep -> parsePkgArgs (dep:ds) args + Nothing -> Left $ + show arg ++ " is not valid syntax for a package name or" + ++ " package dependency." + +parseDependencyOrPackageId :: Parse.ReadP r Dependency +parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse + where + pkgidToDependency :: PackageIdentifier -> Dependency + pkgidToDependency p = case packageVersion p of + v | v == nullVersion -> Dependency (packageName p) anyVersion + | otherwise -> Dependency (packageName p) (thisVersion v) + +showRepo :: RemoteRepo -> String +showRepo repo = remoteRepoName repo ++ ":" + ++ uriToString id (remoteRepoURI repo) [] + +readRepo :: String -> Maybe RemoteRepo +readRepo = readPToMaybe parseRepo + +parseRepo :: Parse.ReadP r RemoteRepo +parseRepo = do + name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.") + _ <- Parse.char ':' + uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~") + uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr) + return RemoteRepo { + remoteRepoName = name, + remoteRepoURI = uri, + remoteRepoSecure = Nothing, + remoteRepoRootKeys = [], + remoteRepoKeyThreshold = 0, + remoteRepoShouldTryHttps = False + } + +-- ------------------------------------------------------------ +-- * Helpers for Documentation +-- ------------------------------------------------------------ + +headLine :: String -> String +headLine = unlines + . map unwords + . wrapLine 79 + . words + +paragraph :: String -> String +paragraph = (++"\n") + . unlines + . map unwords + . wrapLine 79 + . words + +indentParagraph :: String -> String +indentParagraph = unlines + . (flip (++)) [""] + . map ((" "++).unwords) + . wrapLine 77 + . words + +relevantConfigValuesText :: [String] -> String +relevantConfigValuesText vs = + "Relevant global configuration keys:\n" + ++ concat [" " ++ v ++ "\n" |v <- vs] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SetupWrapper.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SetupWrapper.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SetupWrapper.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SetupWrapper.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,917 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleContexts #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.SetupWrapper +-- Copyright : (c) The University of Glasgow 2006, +-- Duncan Coutts 2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : alpha +-- Portability : portable +-- +-- An interface to building and installing Cabal packages. +-- If the @Built-Type@ field is specified as something other than +-- 'Custom', and the current version of Cabal is acceptable, this performs +-- setup actions directly. Otherwise it builds the setup script and +-- runs it with the given arguments. + +module Distribution.Client.SetupWrapper ( + getSetup, runSetup, runSetupCommand, setupWrapper, + SetupScriptOptions(..), + defaultSetupScriptOptions, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import qualified Distribution.Make as Make +import qualified Distribution.Simple as Simple +import Distribution.Version + ( Version, mkVersion, versionNumbers, VersionRange, anyVersion + , intersectVersionRanges, orLaterVersion + , withinRange ) +import qualified Distribution.Backpack as Backpack +import Distribution.Package + ( newSimpleUnitId, unsafeMkDefUnitId, ComponentId + , PackageId, mkPackageName + , PackageIdentifier(..), packageVersion, packageName ) +import Distribution.Types.Dependency +import Distribution.PackageDescription + ( GenericPackageDescription(packageDescription) + , PackageDescription(..), specVersion, buildType + , BuildType(..), defaultRenaming ) +import Distribution.PackageDescription.Parsec + ( readGenericPackageDescription ) +import Distribution.Simple.Configure + ( configCompilerEx ) +import Distribution.Compiler + ( buildCompilerId, CompilerFlavor(GHC, GHCJS) ) +import Distribution.Simple.Compiler + ( Compiler(compilerId), compilerFlavor, PackageDB(..), PackageDBStack ) +import Distribution.Simple.PreProcess + ( runSimplePreProcessor, ppUnlit ) +import Distribution.Simple.Build.Macros + ( generatePackageVersionMacros ) +import Distribution.Simple.Program + ( ProgramDb, emptyProgramDb + , getProgramSearchPath, getDbProgramOutput, runDbProgram, ghcProgram + , ghcjsProgram ) +import Distribution.Simple.Program.Find + ( programSearchPathAsPATHVar + , ProgramSearchPathEntry(ProgramSearchPathDir) ) +import Distribution.Simple.Program.Run + ( getEffectiveEnvironment ) +import qualified Distribution.Simple.Program.Strip as Strip +import Distribution.Simple.BuildPaths + ( defaultDistPref, exeExtension ) + +import Distribution.Simple.Command + ( CommandUI(..), commandShowOptions ) +import Distribution.Simple.Program.GHC + ( GhcMode(..), GhcOptions(..), renderGhcOptions ) +import qualified Distribution.Simple.PackageIndex as PackageIndex +import Distribution.Simple.PackageIndex (InstalledPackageIndex) +import qualified Distribution.InstalledPackageInfo as IPI +import Distribution.Client.Types +import Distribution.Client.Config + ( getCabalDir ) +import Distribution.Client.IndexUtils + ( getInstalledPackages ) +import Distribution.Client.JobControl + ( Lock, criticalSection ) +import Distribution.Simple.Setup + ( Flag(..) ) +import Distribution.Simple.Utils + ( die', debug, info, infoNoWrap + , cabalVersion, tryFindPackageDesc, comparing + , createDirectoryIfMissingVerbose, installExecutableFile + , copyFileVerbose, rewriteFileEx ) +import Distribution.Client.Utils + ( inDir, tryCanonicalizePath, withExtraPathEnv + , existsAndIsMoreRecentThan, moreRecentFile, withEnv, withEnvOverrides +#ifdef mingw32_HOST_OS + , canonicalizePathNoThrow +#endif + ) + +import Distribution.ReadE +import Distribution.System ( Platform(..), buildPlatform ) +import Distribution.Text + ( display ) +import Distribution.Utils.NubList + ( toNubListR ) +import Distribution.Verbosity +import Distribution.Compat.Exception + ( catchIO ) +import Distribution.Compat.Stack + +import System.Directory ( doesFileExist ) +import System.FilePath ( (), (<.>) ) +import System.IO ( Handle, hPutStr ) +import System.Exit ( ExitCode(..), exitWith ) +import System.Process ( createProcess, StdStream(..), proc, waitForProcess + , ProcessHandle ) +import qualified System.Process as Process +import Data.List ( foldl1' ) +import Distribution.Client.Compat.ExecutablePath ( getExecutablePath ) + +#ifdef mingw32_HOST_OS +import Distribution.Simple.Utils + ( withTempDirectory ) + +import Control.Exception ( bracket ) +import System.FilePath ( equalFilePath, takeDirectory ) +import System.Directory ( doesDirectoryExist ) +import qualified System.Win32 as Win32 +#endif + +-- | @Setup@ encapsulates the outcome of configuring a setup method to build a +-- particular package. +data Setup = Setup { setupMethod :: SetupMethod + , setupScriptOptions :: SetupScriptOptions + , setupVersion :: Version + , setupBuildType :: BuildType + , setupPackage :: PackageDescription + } + +-- | @SetupMethod@ represents one of the methods used to run Cabal commands. +data SetupMethod = InternalMethod + -- ^ run Cabal commands through \"cabal\" in the + -- current process + | SelfExecMethod + -- ^ run Cabal commands through \"cabal\" as a + -- child process + | ExternalMethod FilePath + -- ^ run Cabal commands through a custom \"Setup\" executable + +-- TODO: The 'setupWrapper' and 'SetupScriptOptions' should be split into two +-- parts: one that has no policy and just does as it's told with all the +-- explicit options, and an optional initial part that applies certain +-- policies (like if we should add the Cabal lib as a dep, and if so which +-- version). This could be structured as an action that returns a fully +-- elaborated 'SetupScriptOptions' containing no remaining policy choices. +-- +-- See also the discussion at https://github.com/haskell/cabal/pull/3094 + +-- | @SetupScriptOptions@ are options used to configure and run 'Setup', as +-- opposed to options given to the Cabal command at runtime. +data SetupScriptOptions = SetupScriptOptions { + -- | The version of the Cabal library to use (if 'useDependenciesExclusive' + -- is not set). A suitable version of the Cabal library must be installed + -- (or for some build-types be the one cabal-install was built with). + -- + -- The version found also determines the version of the Cabal specification + -- that we us for talking to the Setup.hs, unless overridden by + -- 'useCabalSpecVersion'. + -- + useCabalVersion :: VersionRange, + + -- | This is the version of the Cabal specification that we believe that + -- this package uses. This affects the semantics and in particular the + -- Setup command line interface. + -- + -- This is similar to 'useCabalVersion' but instead of probing the system + -- for a version of the /Cabal library/ you just say exactly which version + -- of the /spec/ we will use. Using this also avoid adding the Cabal + -- library as an additional dependency, so add it to 'useDependencies' + -- if needed. + -- + useCabalSpecVersion :: Maybe Version, + useCompiler :: Maybe Compiler, + usePlatform :: Maybe Platform, + usePackageDB :: PackageDBStack, + usePackageIndex :: Maybe InstalledPackageIndex, + useProgramDb :: ProgramDb, + useDistPref :: FilePath, + useLoggingHandle :: Maybe Handle, + useWorkingDir :: Maybe FilePath, + -- | Extra things to add to PATH when invoking the setup script. + useExtraPathEnv :: [FilePath], + -- | Extra environment variables paired with overrides, where + -- + -- * @'Just' v@ means \"set the environment variable's value to @v@\". + -- * 'Nothing' means \"unset the environment variable\". + useExtraEnvOverrides :: [(String, Maybe FilePath)], + forceExternalSetupMethod :: Bool, + + -- | List of dependencies to use when building Setup.hs. + useDependencies :: [(ComponentId, PackageId)], + + -- | Is the list of setup dependencies exclusive? + -- + -- When this is @False@, if we compile the Setup.hs script we do so with the + -- list in 'useDependencies' but all other packages in the environment are + -- also visible. A suitable version of @Cabal@ library (see + -- 'useCabalVersion') is also added to the list of dependencies, unless + -- 'useDependencies' already contains a Cabal dependency. + -- + -- When @True@, only the 'useDependencies' packages are used, with other + -- packages in the environment hidden. + -- + -- This feature is here to support the setup stanza in .cabal files that + -- specifies explicit (and exclusive) dependencies, as well as the old + -- style with no dependencies. + useDependenciesExclusive :: Bool, + + -- | Should we build the Setup.hs with CPP version macros available? + -- We turn this on when we have a setup stanza in .cabal that declares + -- explicit setup dependencies. + -- + useVersionMacros :: Bool, + + -- Used only by 'cabal clean' on Windows. + -- + -- Note: win32 clean hack + ------------------------- + -- On Windows, running './dist/setup/setup clean' doesn't work because the + -- setup script will try to delete itself (which causes it to fail horribly, + -- unlike on Linux). So we have to move the setup exe out of the way first + -- and then delete it manually. This applies only to the external setup + -- method. + useWin32CleanHack :: Bool, + + -- Used only when calling setupWrapper from parallel code to serialise + -- access to the setup cache; should be Nothing otherwise. + -- + -- Note: setup exe cache + ------------------------ + -- When we are installing in parallel, we always use the external setup + -- method. Since compiling the setup script each time adds noticeable + -- overhead, we use a shared setup script cache + -- ('~/.cabal/setup-exe-cache'). For each (compiler, platform, Cabal + -- version) combination the cache holds a compiled setup script + -- executable. This only affects the Simple build type; for the Custom, + -- Configure and Make build types we always compile the setup script anew. + setupCacheLock :: Maybe Lock, + + -- | Is the task we are going to run an interactive foreground task, + -- or an non-interactive background task? Based on this flag we + -- decide whether or not to delegate ctrl+c to the spawned task + isInteractive :: Bool + } + +defaultSetupScriptOptions :: SetupScriptOptions +defaultSetupScriptOptions = SetupScriptOptions { + useCabalVersion = anyVersion, + useCabalSpecVersion = Nothing, + useCompiler = Nothing, + usePlatform = Nothing, + usePackageDB = [GlobalPackageDB, UserPackageDB], + usePackageIndex = Nothing, + useDependencies = [], + useDependenciesExclusive = False, + useVersionMacros = False, + useProgramDb = emptyProgramDb, + useDistPref = defaultDistPref, + useLoggingHandle = Nothing, + useWorkingDir = Nothing, + useExtraPathEnv = [], + useExtraEnvOverrides = [], + useWin32CleanHack = False, + forceExternalSetupMethod = False, + setupCacheLock = Nothing, + isInteractive = False + } + +workingDir :: SetupScriptOptions -> FilePath +workingDir options = + case fromMaybe "" (useWorkingDir options) of + [] -> "." + dir -> dir + +-- | A @SetupRunner@ implements a 'SetupMethod'. +type SetupRunner = Verbosity + -> SetupScriptOptions + -> BuildType + -> [String] + -> IO () + +-- | Prepare to build a package by configuring a 'SetupMethod'. The returned +-- 'Setup' object identifies the method. The 'SetupScriptOptions' may be changed +-- during the configuration process; the final values are given by +-- 'setupScriptOptions'. +getSetup :: Verbosity + -> SetupScriptOptions + -> Maybe PackageDescription + -> IO Setup +getSetup verbosity options mpkg = do + pkg <- maybe getPkg return mpkg + let options' = options { + useCabalVersion = intersectVersionRanges + (useCabalVersion options) + (orLaterVersion (specVersion pkg)) + } + buildType' = buildType pkg + (version, method, options'') <- + getSetupMethod verbosity options' pkg buildType' + return Setup { setupMethod = method + , setupScriptOptions = options'' + , setupVersion = version + , setupBuildType = buildType' + , setupPackage = pkg + } + where + getPkg = tryFindPackageDesc (fromMaybe "." (useWorkingDir options)) + >>= readGenericPackageDescription verbosity + >>= return . packageDescription + +-- | Decide if we're going to be able to do a direct internal call to the +-- entry point in the Cabal library or if we're going to have to compile +-- and execute an external Setup.hs script. +-- +getSetupMethod + :: Verbosity -> SetupScriptOptions -> PackageDescription -> BuildType + -> IO (Version, SetupMethod, SetupScriptOptions) +getSetupMethod verbosity options pkg buildType' + | buildType' == Custom + || maybe False (cabalVersion /=) (useCabalSpecVersion options) + || not (cabalVersion `withinRange` useCabalVersion options) = + getExternalSetupMethod verbosity options pkg buildType' + | isJust (useLoggingHandle options) + -- Forcing is done to use an external process e.g. due to parallel + -- build concerns. + || forceExternalSetupMethod options = + return (cabalVersion, SelfExecMethod, options) + | otherwise = return (cabalVersion, InternalMethod, options) + +runSetupMethod :: WithCallStack (SetupMethod -> SetupRunner) +runSetupMethod InternalMethod = internalSetupMethod +runSetupMethod (ExternalMethod path) = externalSetupMethod path +runSetupMethod SelfExecMethod = selfExecSetupMethod + +-- | Run a configured 'Setup' with specific arguments. +runSetup :: Verbosity -> Setup + -> [String] -- ^ command-line arguments + -> IO () +runSetup verbosity setup args0 = do + let method = setupMethod setup + options = setupScriptOptions setup + bt = setupBuildType setup + args = verbosityHack (setupVersion setup) args0 + when (verbosity >= deafening {- avoid test if not debug -} && args /= args0) $ + infoNoWrap verbose $ + "Applied verbosity hack:\n" ++ + " Before: " ++ show args0 ++ "\n" ++ + " After: " ++ show args ++ "\n" + runSetupMethod method verbosity options bt args + +-- | This is a horrible hack to make sure passing fancy verbosity +-- flags (e.g., @-v'info +callstack'@) doesn't break horribly on +-- old Setup. We can't do it in 'filterConfigureFlags' because +-- verbosity applies to ALL commands. +verbosityHack :: Version -> [String] -> [String] +verbosityHack ver args0 + | ver >= mkVersion [2,1] = args0 + | otherwise = go args0 + where + go (('-':'v':rest) : args) + | Just rest' <- munch rest = ("-v" ++ rest') : go args + go (('-':'-':'v':'e':'r':'b':'o':'s':'e':'=':rest) : args) + | Just rest' <- munch rest = ("--verbose=" ++ rest') : go args + go ("--verbose" : rest : args) + | Just rest' <- munch rest = "--verbose" : rest' : go args + go rest@("--" : _) = rest + go (arg:args) = arg : go args + go [] = [] + + munch rest = + case runReadE flagToVerbosity rest of + Right v + | ver < mkVersion [2,0], verboseHasFlags v + -- We could preserve the prefix, but since we're assuming + -- it's Cabal's verbosity flag, we can assume that + -- any format is OK + -> Just (showForCabal (verboseNoFlags v)) + | ver < mkVersion [2,1], isVerboseTimestamp v + -- +timestamp wasn't yet available in Cabal-2.0.0 + -> Just (showForCabal (verboseNoTimestamp v)) + _ -> Nothing + +-- | Run a command through a configured 'Setup'. +runSetupCommand :: Verbosity -> Setup + -> CommandUI flags -- ^ command definition + -> flags -- ^ command flags + -> [String] -- ^ extra command-line arguments + -> IO () +runSetupCommand verbosity setup cmd flags extraArgs = do + let args = commandName cmd : commandShowOptions cmd flags ++ extraArgs + runSetup verbosity setup args + +-- | Configure a 'Setup' and run a command in one step. The command flags +-- may depend on the Cabal library version in use. +setupWrapper :: Verbosity + -> SetupScriptOptions + -> Maybe PackageDescription + -> CommandUI flags + -> (Version -> flags) + -- ^ produce command flags given the Cabal library version + -> (Version -> [String]) + -> IO () +setupWrapper verbosity options mpkg cmd flags extraArgs = do + setup <- getSetup verbosity options mpkg + runSetupCommand verbosity setup + cmd (flags $ setupVersion setup) + (extraArgs $ setupVersion setup) + +-- ------------------------------------------------------------ +-- * Internal SetupMethod +-- ------------------------------------------------------------ + +internalSetupMethod :: SetupRunner +internalSetupMethod verbosity options bt args = do + info verbosity $ "Using internal setup method with build-type " ++ show bt + ++ " and args:\n " ++ show args + inDir (useWorkingDir options) $ do + withEnv "HASKELL_DIST_DIR" (useDistPref options) $ + withExtraPathEnv (useExtraPathEnv options) $ + withEnvOverrides (useExtraEnvOverrides options) $ + buildTypeAction bt args + +buildTypeAction :: BuildType -> ([String] -> IO ()) +buildTypeAction Simple = Simple.defaultMainArgs +buildTypeAction Configure = Simple.defaultMainWithHooksArgs + Simple.autoconfUserHooks +buildTypeAction Make = Make.defaultMainArgs +buildTypeAction Custom = error "buildTypeAction Custom" + + +-- | @runProcess'@ is a version of @runProcess@ where we have +-- the additional option to decide whether or not we should +-- delegate CTRL+C to the spawned process. +runProcess' :: FilePath -- ^ Filename of the executable + -> [String] -- ^ Arguments to pass to executable + -> Maybe FilePath -- ^ Optional path to working directory + -> Maybe [(String, String)] -- ^ Optional environment + -> Maybe Handle -- ^ Handle for @stdin@ + -> Maybe Handle -- ^ Handle for @stdout@ + -> Maybe Handle -- ^ Handle for @stderr@ + -> Bool -- ^ Delegate Ctrl+C ? + -> IO ProcessHandle +runProcess' cmd args mb_cwd mb_env mb_stdin mb_stdout mb_stderr _delegate = do + (_,_,_,ph) <- + createProcess + (proc cmd args){ Process.cwd = mb_cwd + , Process.env = mb_env + , Process.std_in = mbToStd mb_stdin + , Process.std_out = mbToStd mb_stdout + , Process.std_err = mbToStd mb_stderr + , Process.delegate_ctlc = _delegate + } + return ph + where + mbToStd :: Maybe Handle -> StdStream + mbToStd Nothing = Inherit + mbToStd (Just hdl) = UseHandle hdl +-- ------------------------------------------------------------ +-- * Self-Exec SetupMethod +-- ------------------------------------------------------------ + +selfExecSetupMethod :: SetupRunner +selfExecSetupMethod verbosity options bt args0 = do + let args = ["act-as-setup", + "--build-type=" ++ display bt, + "--"] ++ args0 + info verbosity $ "Using self-exec internal setup method with build-type " + ++ show bt ++ " and args:\n " ++ show args + path <- getExecutablePath + info verbosity $ unwords (path : args) + case useLoggingHandle options of + Nothing -> return () + Just logHandle -> info verbosity $ "Redirecting build log to " + ++ show logHandle + + searchpath <- programSearchPathAsPATHVar + (map ProgramSearchPathDir (useExtraPathEnv options) ++ + getProgramSearchPath (useProgramDb options)) + env <- getEffectiveEnvironment $ + [ ("PATH", Just searchpath) + , ("HASKELL_DIST_DIR", Just (useDistPref options)) + ] ++ useExtraEnvOverrides options + process <- runProcess' path args + (useWorkingDir options) env Nothing + (useLoggingHandle options) (useLoggingHandle options) + (isInteractive options) + exitCode <- waitForProcess process + unless (exitCode == ExitSuccess) $ exitWith exitCode + +-- ------------------------------------------------------------ +-- * External SetupMethod +-- ------------------------------------------------------------ + +externalSetupMethod :: WithCallStack (FilePath -> SetupRunner) +externalSetupMethod path verbosity options _ args = do + info verbosity $ unwords (path : args) + case useLoggingHandle options of + Nothing -> return () + Just logHandle -> info verbosity $ "Redirecting build log to " + ++ show logHandle + + -- See 'Note: win32 clean hack' above. +#ifdef mingw32_HOST_OS + if useWin32CleanHack options then doWin32CleanHack path else doInvoke path +#else + doInvoke path +#endif + + where + doInvoke path' = do + searchpath <- programSearchPathAsPATHVar + (map ProgramSearchPathDir (useExtraPathEnv options) ++ + getProgramSearchPath (useProgramDb options)) + env <- getEffectiveEnvironment $ + [ ("PATH", Just searchpath) + , ("HASKELL_DIST_DIR", Just (useDistPref options)) + ] ++ useExtraEnvOverrides options + + debug verbosity $ "Setup arguments: "++unwords args + process <- runProcess' path' args + (useWorkingDir options) env Nothing + (useLoggingHandle options) (useLoggingHandle options) + (isInteractive options) + exitCode <- waitForProcess process + unless (exitCode == ExitSuccess) $ exitWith exitCode + +#ifdef mingw32_HOST_OS + doWin32CleanHack path' = do + info verbosity $ "Using the Win32 clean hack." + -- Recursively removes the temp dir on exit. + withTempDirectory verbosity (workingDir options) "cabal-tmp" $ \tmpDir -> + bracket (moveOutOfTheWay tmpDir path') + (maybeRestore path') + doInvoke + + moveOutOfTheWay tmpDir path' = do + let newPath = tmpDir "setup" <.> exeExtension buildPlatform + Win32.moveFile path' newPath + return newPath + + maybeRestore oldPath path' = do + let oldPathDir = takeDirectory oldPath + oldPathDirExists <- doesDirectoryExist oldPathDir + -- 'setup clean' didn't complete, 'dist/setup' still exists. + when oldPathDirExists $ + Win32.moveFile path' oldPath +#endif + +getExternalSetupMethod + :: Verbosity -> SetupScriptOptions -> PackageDescription -> BuildType + -> IO (Version, SetupMethod, SetupScriptOptions) +getExternalSetupMethod verbosity options pkg bt = do + debug verbosity $ "Using external setup method with build-type " ++ show bt + debug verbosity $ "Using explicit dependencies: " + ++ show (useDependenciesExclusive options) + createDirectoryIfMissingVerbose verbosity True setupDir + (cabalLibVersion, mCabalLibInstalledPkgId, options') <- cabalLibVersionToUse + debug verbosity $ "Using Cabal library version " ++ display cabalLibVersion + path <- if useCachedSetupExecutable + then getCachedSetupExecutable options' + cabalLibVersion mCabalLibInstalledPkgId + else compileSetupExecutable options' + cabalLibVersion mCabalLibInstalledPkgId False + + -- Since useWorkingDir can change the relative path, the path argument must + -- be turned into an absolute path. On some systems, runProcess' will take + -- path as relative to the new working directory instead of the current + -- working directory. + path' <- tryCanonicalizePath path + + -- See 'Note: win32 clean hack' above. +#ifdef mingw32_HOST_OS + -- setupProgFile may not exist if we're using a cached program + setupProgFile' <- canonicalizePathNoThrow setupProgFile + let win32CleanHackNeeded = (useWin32CleanHack options) + -- Skip when a cached setup script is used. + && setupProgFile' `equalFilePath` path' +#else + let win32CleanHackNeeded = False +#endif + let options'' = options' { useWin32CleanHack = win32CleanHackNeeded } + + return (cabalLibVersion, ExternalMethod path', options'') + + where + setupDir = workingDir options useDistPref options "setup" + setupVersionFile = setupDir "setup" <.> "version" + setupHs = setupDir "setup" <.> "hs" + setupProgFile = setupDir "setup" <.> exeExtension buildPlatform + platform = fromMaybe buildPlatform (usePlatform options) + + useCachedSetupExecutable = (bt == Simple || bt == Configure || bt == Make) + + maybeGetInstalledPackages :: SetupScriptOptions -> Compiler + -> ProgramDb -> IO InstalledPackageIndex + maybeGetInstalledPackages options' comp progdb = + case usePackageIndex options' of + Just index -> return index + Nothing -> getInstalledPackages verbosity + comp (usePackageDB options') progdb + + -- Choose the version of Cabal to use if the setup script has a dependency on + -- Cabal, and possibly update the setup script options. The version also + -- determines how to filter the flags to Setup. + -- + -- We first check whether the dependency solver has specified a Cabal version. + -- If it has, we use the solver's version without looking at the installed + -- package index (See issue #3436). Otherwise, we pick the Cabal version by + -- checking 'useCabalSpecVersion', then the saved version, and finally the + -- versions available in the index. + -- + -- The version chosen here must match the one used in 'compileSetupExecutable' + -- (See issue #3433). + cabalLibVersionToUse :: IO (Version, Maybe ComponentId + ,SetupScriptOptions) + cabalLibVersionToUse = + case find (isCabalPkgId . snd) (useDependencies options) of + Just (unitId, pkgId) -> do + let version = pkgVersion pkgId + updateSetupScript version bt + writeSetupVersionFile version + return (version, Just unitId, options) + Nothing -> + case useCabalSpecVersion options of + Just version -> do + updateSetupScript version bt + writeSetupVersionFile version + return (version, Nothing, options) + Nothing -> do + savedVer <- savedVersion + case savedVer of + Just version | version `withinRange` useCabalVersion options + -> do updateSetupScript version bt + -- Does the previously compiled setup executable + -- still exist and is it up-to date? + useExisting <- canUseExistingSetup version + if useExisting + then return (version, Nothing, options) + else installedVersion + _ -> installedVersion + where + -- This check duplicates the checks in 'getCachedSetupExecutable' / + -- 'compileSetupExecutable'. Unfortunately, we have to perform it twice + -- because the selected Cabal version may change as a result of this + -- check. + canUseExistingSetup :: Version -> IO Bool + canUseExistingSetup version = + if useCachedSetupExecutable + then do + (_, cachedSetupProgFile) <- cachedSetupDirAndProg options version + doesFileExist cachedSetupProgFile + else + (&&) <$> setupProgFile `existsAndIsMoreRecentThan` setupHs + <*> setupProgFile `existsAndIsMoreRecentThan` setupVersionFile + + writeSetupVersionFile :: Version -> IO () + writeSetupVersionFile version = + writeFile setupVersionFile (show version ++ "\n") + + installedVersion :: IO (Version, Maybe InstalledPackageId + ,SetupScriptOptions) + installedVersion = do + (comp, progdb, options') <- configureCompiler options + (version, mipkgid, options'') <- installedCabalVersion options' + comp progdb + updateSetupScript version bt + writeSetupVersionFile version + return (version, mipkgid, options'') + + savedVersion :: IO (Maybe Version) + savedVersion = do + versionString <- readFile setupVersionFile `catchIO` \_ -> return "" + case reads versionString of + [(version,s)] | all isSpace s -> return (Just version) + _ -> return Nothing + + -- | Update a Setup.hs script, creating it if necessary. + updateSetupScript :: Version -> BuildType -> IO () + updateSetupScript _ Custom = do + useHs <- doesFileExist customSetupHs + useLhs <- doesFileExist customSetupLhs + unless (useHs || useLhs) $ die' verbosity + "Using 'build-type: Custom' but there is no Setup.hs or Setup.lhs script." + let src = (if useHs then customSetupHs else customSetupLhs) + srcNewer <- src `moreRecentFile` setupHs + when srcNewer $ if useHs + then copyFileVerbose verbosity src setupHs + else runSimplePreProcessor ppUnlit src setupHs verbosity + where + customSetupHs = workingDir options "Setup.hs" + customSetupLhs = workingDir options "Setup.lhs" + + updateSetupScript cabalLibVersion _ = + rewriteFileEx verbosity setupHs (buildTypeScript cabalLibVersion) + + buildTypeScript :: Version -> String + buildTypeScript cabalLibVersion = case bt of + Simple -> "import Distribution.Simple; main = defaultMain\n" + Configure -> "import Distribution.Simple; main = defaultMainWithHooks " + ++ if cabalLibVersion >= mkVersion [1,3,10] + then "autoconfUserHooks\n" + else "defaultUserHooks\n" + Make -> "import Distribution.Make; main = defaultMain\n" + Custom -> error "buildTypeScript Custom" + + installedCabalVersion :: SetupScriptOptions -> Compiler -> ProgramDb + -> IO (Version, Maybe InstalledPackageId + ,SetupScriptOptions) + installedCabalVersion options' _ _ | packageName pkg == mkPackageName "Cabal" + && bt == Custom = + return (packageVersion pkg, Nothing, options') + installedCabalVersion options' compiler progdb = do + index <- maybeGetInstalledPackages options' compiler progdb + let cabalDep = Dependency (mkPackageName "Cabal") + (useCabalVersion options') + options'' = options' { usePackageIndex = Just index } + case PackageIndex.lookupDependency index cabalDep of + [] -> die' verbosity $ "The package '" ++ display (packageName pkg) + ++ "' requires Cabal library version " + ++ display (useCabalVersion options) + ++ " but no suitable version is installed." + pkgs -> let ipkginfo = head . snd . bestVersion fst $ pkgs + in return (packageVersion ipkginfo + ,Just . IPI.installedComponentId $ ipkginfo, options'') + + bestVersion :: (a -> Version) -> [a] -> a + bestVersion f = firstMaximumBy (comparing (preference . f)) + where + -- Like maximumBy, but picks the first maximum element instead of the + -- last. In general, we expect the preferred version to go first in the + -- list. For the default case, this has the effect of choosing the version + -- installed in the user package DB instead of the global one. See #1463. + -- + -- Note: firstMaximumBy could be written as just + -- `maximumBy cmp . reverse`, but the problem is that the behaviour of + -- maximumBy is not fully specified in the case when there is not a single + -- greatest element. + firstMaximumBy :: (a -> a -> Ordering) -> [a] -> a + firstMaximumBy _ [] = + error "Distribution.Client.firstMaximumBy: empty list" + firstMaximumBy cmp xs = foldl1' maxBy xs + where + maxBy x y = case cmp x y of { GT -> x; EQ -> x; LT -> y; } + + preference version = (sameVersion, sameMajorVersion + ,stableVersion, latestVersion) + where + sameVersion = version == cabalVersion + sameMajorVersion = majorVersion version == majorVersion cabalVersion + majorVersion = take 2 . versionNumbers + stableVersion = case versionNumbers version of + (_:x:_) -> even x + _ -> False + latestVersion = version + + configureCompiler :: SetupScriptOptions + -> IO (Compiler, ProgramDb, SetupScriptOptions) + configureCompiler options' = do + (comp, progdb) <- case useCompiler options' of + Just comp -> return (comp, useProgramDb options') + Nothing -> do (comp, _, progdb) <- + configCompilerEx (Just GHC) Nothing Nothing + (useProgramDb options') verbosity + return (comp, progdb) + -- Whenever we need to call configureCompiler, we also need to access the + -- package index, so let's cache it in SetupScriptOptions. + index <- maybeGetInstalledPackages options' comp progdb + return (comp, progdb, options' { useCompiler = Just comp, + usePackageIndex = Just index, + useProgramDb = progdb }) + + -- | Path to the setup exe cache directory and path to the cached setup + -- executable. + cachedSetupDirAndProg :: SetupScriptOptions -> Version + -> IO (FilePath, FilePath) + cachedSetupDirAndProg options' cabalLibVersion = do + cabalDir <- getCabalDir + let setupCacheDir = cabalDir "setup-exe-cache" + cachedSetupProgFile = setupCacheDir + ("setup-" ++ buildTypeString ++ "-" + ++ cabalVersionString ++ "-" + ++ platformString ++ "-" + ++ compilerVersionString) + <.> exeExtension buildPlatform + return (setupCacheDir, cachedSetupProgFile) + where + buildTypeString = show bt + cabalVersionString = "Cabal-" ++ (display cabalLibVersion) + compilerVersionString = display $ + maybe buildCompilerId compilerId + $ useCompiler options' + platformString = display platform + + -- | Look up the setup executable in the cache; update the cache if the setup + -- executable is not found. + getCachedSetupExecutable :: SetupScriptOptions + -> Version -> Maybe InstalledPackageId + -> IO FilePath + getCachedSetupExecutable options' cabalLibVersion + maybeCabalLibInstalledPkgId = do + (setupCacheDir, cachedSetupProgFile) <- + cachedSetupDirAndProg options' cabalLibVersion + cachedSetupExists <- doesFileExist cachedSetupProgFile + if cachedSetupExists + then debug verbosity $ + "Found cached setup executable: " ++ cachedSetupProgFile + else criticalSection' $ do + -- The cache may have been populated while we were waiting. + cachedSetupExists' <- doesFileExist cachedSetupProgFile + if cachedSetupExists' + then debug verbosity $ + "Found cached setup executable: " ++ cachedSetupProgFile + else do + debug verbosity $ "Setup executable not found in the cache." + src <- compileSetupExecutable options' + cabalLibVersion maybeCabalLibInstalledPkgId True + createDirectoryIfMissingVerbose verbosity True setupCacheDir + installExecutableFile verbosity src cachedSetupProgFile + -- Do not strip if we're using GHCJS, since the result may be a script + when (maybe True ((/=GHCJS).compilerFlavor) $ useCompiler options') $ + Strip.stripExe verbosity platform (useProgramDb options') + cachedSetupProgFile + return cachedSetupProgFile + where + criticalSection' = maybe id criticalSection $ setupCacheLock options' + + -- | If the Setup.hs is out of date wrt the executable then recompile it. + -- Currently this is GHC/GHCJS only. It should really be generalised. + -- + compileSetupExecutable :: SetupScriptOptions + -> Version -> Maybe ComponentId -> Bool + -> IO FilePath + compileSetupExecutable options' cabalLibVersion maybeCabalLibInstalledPkgId + forceCompile = do + setupHsNewer <- setupHs `moreRecentFile` setupProgFile + cabalVersionNewer <- setupVersionFile `moreRecentFile` setupProgFile + let outOfDate = setupHsNewer || cabalVersionNewer + when (outOfDate || forceCompile) $ do + debug verbosity "Setup executable needs to be updated, compiling..." + (compiler, progdb, options'') <- configureCompiler options' + let cabalPkgid = PackageIdentifier (mkPackageName "Cabal") cabalLibVersion + (program, extraOpts) + = case compilerFlavor compiler of + GHCJS -> (ghcjsProgram, ["-build-runner"]) + _ -> (ghcProgram, ["-threaded"]) + cabalDep = maybe [] (\ipkgid -> [(ipkgid, cabalPkgid)]) + maybeCabalLibInstalledPkgId + + -- With 'useDependenciesExclusive' we enforce the deps specified, + -- so only the given ones can be used. Otherwise we allow the use + -- of packages in the ambient environment, and add on a dep on the + -- Cabal library (unless 'useDependencies' already contains one). + -- + -- With 'useVersionMacros' we use a version CPP macros .h file. + -- + -- Both of these options should be enabled for packages that have + -- opted-in and declared a custom-settup stanza. + -- + selectedDeps | useDependenciesExclusive options' + = useDependencies options' + | otherwise = useDependencies options' ++ + if any (isCabalPkgId . snd) + (useDependencies options') + then [] + else cabalDep + addRenaming (ipid, _) = + -- Assert 'DefUnitId' invariant + (Backpack.DefiniteUnitId (unsafeMkDefUnitId (newSimpleUnitId ipid)) + ,defaultRenaming) + cppMacrosFile = setupDir "setup_macros.h" + ghcOptions = mempty { + -- Respect -v0, but don't crank up verbosity on GHC if + -- Cabal verbosity is requested. For that, use + -- --ghc-option=-v instead! + ghcOptVerbosity = Flag (min verbosity normal) + , ghcOptMode = Flag GhcModeMake + , ghcOptInputFiles = toNubListR [setupHs] + , ghcOptOutputFile = Flag setupProgFile + , ghcOptObjDir = Flag setupDir + , ghcOptHiDir = Flag setupDir + , ghcOptSourcePathClear = Flag True + , ghcOptSourcePath = case bt of + Custom -> toNubListR [workingDir options'] + _ -> mempty + , ghcOptPackageDBs = usePackageDB options'' + , ghcOptHideAllPackages = Flag (useDependenciesExclusive options') + , ghcOptCabal = Flag (useDependenciesExclusive options') + , ghcOptPackages = toNubListR $ map addRenaming selectedDeps + , ghcOptCppIncludes = toNubListR [ cppMacrosFile + | useVersionMacros options' ] + , ghcOptExtra = extraOpts + } + let ghcCmdLine = renderGhcOptions compiler platform ghcOptions + when (useVersionMacros options') $ + rewriteFileEx verbosity cppMacrosFile + (generatePackageVersionMacros (map snd selectedDeps)) + case useLoggingHandle options of + Nothing -> runDbProgram verbosity program progdb ghcCmdLine + + -- If build logging is enabled, redirect compiler output to + -- the log file. + (Just logHandle) -> do output <- getDbProgramOutput verbosity program + progdb ghcCmdLine + hPutStr logHandle output + return setupProgFile + + +isCabalPkgId :: PackageIdentifier -> Bool +isCabalPkgId (PackageIdentifier pname _) = pname == mkPackageName "Cabal" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SolverInstallPlan.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SolverInstallPlan.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SolverInstallPlan.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SolverInstallPlan.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,444 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE TypeFamilies #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.SolverInstallPlan +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- The 'SolverInstallPlan' is the graph of packages produced by the +-- dependency solver, and specifies at the package-granularity what +-- things are going to be installed. To put it another way: the +-- dependency solver produces a 'SolverInstallPlan', which is then +-- consumed by various other parts of Cabal. +-- +----------------------------------------------------------------------------- +module Distribution.Client.SolverInstallPlan( + SolverInstallPlan(..), + SolverPlanPackage, + ResolverPackage(..), + + -- * Operations on 'SolverInstallPlan's + new, + toList, + toMap, + + remove, + + showPlanIndex, + showInstallPlan, + + -- * Checking validity of plans + valid, + closed, + consistent, + acyclic, + + -- ** Details on invalid plans + SolverPlanProblem(..), + showPlanProblem, + problems, + + -- ** Querying the install plan + dependencyClosure, + reverseDependencyClosure, + topologicalOrder, + reverseTopologicalOrder, +) where + +import Distribution.Package + ( PackageIdentifier(..), Package(..), PackageName + , HasUnitId(..), PackageId, packageVersion, packageName ) +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Text + ( display ) + +import Distribution.Client.Types + ( UnresolvedPkgLoc ) +import Distribution.Version + ( Version ) + +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.ResolverPackage +import Distribution.Solver.Types.SolverId + +import Data.List + ( intercalate ) +import Data.Maybe + ( fromMaybe, mapMaybe ) +import Distribution.Compat.Binary (Binary(..)) +import Distribution.Compat.Graph (Graph, IsNode(..)) +import qualified Data.Graph as OldGraph +import qualified Distribution.Compat.Graph as Graph +import qualified Data.Map as Map +import Data.Map (Map) +import Data.Array ((!)) +import Data.Typeable + +type SolverPlanPackage = ResolverPackage UnresolvedPkgLoc + +type SolverPlanIndex = Graph SolverPlanPackage + +data SolverInstallPlan = SolverInstallPlan { + planIndex :: !SolverPlanIndex, + planIndepGoals :: !IndependentGoals + } + deriving (Typeable) + +{- +-- | Much like 'planPkgIdOf', but mapping back to full packages. +planPkgOf :: SolverInstallPlan + -> Graph.Vertex + -> SolverPlanPackage +planPkgOf plan v = + case Graph.lookupKey (planIndex plan) + (planPkgIdOf plan v) of + Just pkg -> pkg + Nothing -> error "InstallPlan: internal error: planPkgOf lookup failed" +-} + +mkInstallPlan :: SolverPlanIndex + -> IndependentGoals + -> SolverInstallPlan +mkInstallPlan index indepGoals = + SolverInstallPlan { + planIndex = index, + planIndepGoals = indepGoals + } + +instance Binary SolverInstallPlan where + put SolverInstallPlan { + planIndex = index, + planIndepGoals = indepGoals + } = put (index, indepGoals) + + get = do + (index, indepGoals) <- get + return $! mkInstallPlan index indepGoals + +showPlanIndex :: [SolverPlanPackage] -> String +showPlanIndex = intercalate "\n" . map showPlanPackage + +showInstallPlan :: SolverInstallPlan -> String +showInstallPlan = showPlanIndex . toList + +showPlanPackage :: SolverPlanPackage -> String +showPlanPackage (PreExisting ipkg) = "PreExisting " ++ display (packageId ipkg) + ++ " (" ++ display (installedUnitId ipkg) + ++ ")" +showPlanPackage (Configured spkg) = "Configured " ++ display (packageId spkg) + +-- | Build an installation plan from a valid set of resolved packages. +-- +new :: IndependentGoals + -> SolverPlanIndex + -> Either [SolverPlanProblem] SolverInstallPlan +new indepGoals index = + case problems indepGoals index of + [] -> Right (mkInstallPlan index indepGoals) + probs -> Left probs + +toList :: SolverInstallPlan -> [SolverPlanPackage] +toList = Graph.toList . planIndex + +toMap :: SolverInstallPlan -> Map SolverId SolverPlanPackage +toMap = Graph.toMap . planIndex + +-- | Remove packages from the install plan. This will result in an +-- error if there are remaining packages that depend on any matching +-- package. This is primarily useful for obtaining an install plan for +-- the dependencies of a package or set of packages without actually +-- installing the package itself, as when doing development. +-- +remove :: (SolverPlanPackage -> Bool) + -> SolverInstallPlan + -> Either [SolverPlanProblem] + (SolverInstallPlan) +remove shouldRemove plan = + new (planIndepGoals plan) newIndex + where + newIndex = Graph.fromDistinctList $ + filter (not . shouldRemove) (toList plan) + +-- ------------------------------------------------------------ +-- * Checking validity of plans +-- ------------------------------------------------------------ + +-- | A valid installation plan is a set of packages that is 'acyclic', +-- 'closed' and 'consistent'. Also, every 'ConfiguredPackage' in the +-- plan has to have a valid configuration (see 'configuredPackageValid'). +-- +-- * if the result is @False@ use 'problems' to get a detailed list. +-- +valid :: IndependentGoals + -> SolverPlanIndex + -> Bool +valid indepGoals index = + null $ problems indepGoals index + +data SolverPlanProblem = + PackageMissingDeps SolverPlanPackage + [PackageIdentifier] + | PackageCycle [SolverPlanPackage] + | PackageInconsistency PackageName [(PackageIdentifier, Version)] + | PackageStateInvalid SolverPlanPackage SolverPlanPackage + +showPlanProblem :: SolverPlanProblem -> String +showPlanProblem (PackageMissingDeps pkg missingDeps) = + "Package " ++ display (packageId pkg) + ++ " depends on the following packages which are missing from the plan: " + ++ intercalate ", " (map display missingDeps) + +showPlanProblem (PackageCycle cycleGroup) = + "The following packages are involved in a dependency cycle " + ++ intercalate ", " (map (display.packageId) cycleGroup) + +showPlanProblem (PackageInconsistency name inconsistencies) = + "Package " ++ display name + ++ " is required by several packages," + ++ " but they require inconsistent versions:\n" + ++ unlines [ " package " ++ display pkg ++ " requires " + ++ display (PackageIdentifier name ver) + | (pkg, ver) <- inconsistencies ] + +showPlanProblem (PackageStateInvalid pkg pkg') = + "Package " ++ display (packageId pkg) + ++ " is in the " ++ showPlanState pkg + ++ " state but it depends on package " ++ display (packageId pkg') + ++ " which is in the " ++ showPlanState pkg' + ++ " state" + where + showPlanState (PreExisting _) = "pre-existing" + showPlanState (Configured _) = "configured" + +-- | For an invalid plan, produce a detailed list of problems as human readable +-- error messages. This is mainly intended for debugging purposes. +-- Use 'showPlanProblem' for a human readable explanation. +-- +problems :: IndependentGoals + -> SolverPlanIndex + -> [SolverPlanProblem] +problems indepGoals index = + + [ PackageMissingDeps pkg + (mapMaybe + (fmap packageId . flip Graph.lookup index) + missingDeps) + | (pkg, missingDeps) <- Graph.broken index ] + + ++ [ PackageCycle cycleGroup + | cycleGroup <- Graph.cycles index ] + + ++ [ PackageInconsistency name inconsistencies + | (name, inconsistencies) <- + dependencyInconsistencies indepGoals index ] + + ++ [ PackageStateInvalid pkg pkg' + | pkg <- Graph.toList index + , Just pkg' <- map (flip Graph.lookup index) + (nodeNeighbors pkg) + , not (stateDependencyRelation pkg pkg') ] + + +-- | Compute all roots of the install plan, and verify that the transitive +-- plans from those roots are all consistent. +-- +-- NOTE: This does not check for dependency cycles. Moreover, dependency cycles +-- may be absent from the subplans even if the larger plan contains a dependency +-- cycle. Such cycles may or may not be an issue; either way, we don't check +-- for them here. +dependencyInconsistencies :: IndependentGoals + -> SolverPlanIndex + -> [(PackageName, [(PackageIdentifier, Version)])] +dependencyInconsistencies indepGoals index = + concatMap dependencyInconsistencies' subplans + where + subplans :: [SolverPlanIndex] + subplans = -- Not Graph.closure!! + map (nonSetupClosure index) + (rootSets indepGoals index) + +-- NB: When we check for inconsistencies, packages from the setup +-- scripts don't count as part of the closure (this way, we +-- can build, e.g., Cabal-1.24.1 even if its setup script is +-- built with Cabal-1.24.0). +-- +-- This is a best effort function that swallows any non-existent +-- SolverIds. +nonSetupClosure :: SolverPlanIndex + -> [SolverId] + -> SolverPlanIndex +nonSetupClosure index pkgids0 = closure Graph.empty pkgids0 + where + closure completed [] = completed + closure completed (pkgid:pkgids) = + case Graph.lookup pkgid index of + Nothing -> closure completed pkgids + Just pkg -> + case Graph.lookup (nodeKey pkg) completed of + Just _ -> closure completed pkgids + Nothing -> closure completed' pkgids' + where completed' = Graph.insert pkg completed + pkgids' = CD.nonSetupDeps (resolverPackageLibDeps pkg) ++ pkgids + +-- | Compute the root sets of a plan +-- +-- A root set is a set of packages whose dependency closure must be consistent. +-- This is the set of all top-level library roots (taken together normally, or +-- as singletons sets if we are considering them as independent goals), along +-- with all setup dependencies of all packages. +rootSets :: IndependentGoals -> SolverPlanIndex -> [[SolverId]] +rootSets (IndependentGoals indepGoals) index = + if indepGoals then map (:[]) libRoots else [libRoots] + ++ setupRoots index + where + libRoots = libraryRoots index + +-- | Compute the library roots of a plan +-- +-- The library roots are the set of packages with no reverse dependencies +-- (no reverse library dependencies but also no reverse setup dependencies). +libraryRoots :: SolverPlanIndex -> [SolverId] +libraryRoots index = + map (nodeKey . toPkgId) roots + where + (graph, toPkgId, _) = Graph.toGraph index + indegree = OldGraph.indegree graph + roots = filter isRoot (OldGraph.vertices graph) + isRoot v = indegree ! v == 0 + +-- | The setup dependencies of each package in the plan +setupRoots :: SolverPlanIndex -> [[SolverId]] +setupRoots = filter (not . null) + . map (CD.setupDeps . resolverPackageLibDeps) + . Graph.toList + +-- | Given a package index where we assume we want to use all the packages +-- (use 'dependencyClosure' if you need to get such a index subset) find out +-- if the dependencies within it use consistent versions of each package. +-- Return all cases where multiple packages depend on different versions of +-- some other package. +-- +-- Each element in the result is a package name along with the packages that +-- depend on it and the versions they require. These are guaranteed to be +-- distinct. +-- +dependencyInconsistencies' :: SolverPlanIndex + -> [(PackageName, [(PackageIdentifier, Version)])] +dependencyInconsistencies' index = + [ (name, [ (pid, packageVersion dep) | (dep,pids) <- uses, pid <- pids]) + | (name, ipid_map) <- Map.toList inverseIndex + , let uses = Map.elems ipid_map + , reallyIsInconsistent (map fst uses) + ] + where + -- For each package name (of a dependency, somewhere) + -- and each installed ID of that that package + -- the associated package instance + -- and a list of reverse dependencies (as source IDs) + inverseIndex :: Map PackageName (Map SolverId (SolverPlanPackage, [PackageId])) + inverseIndex = Map.fromListWith (Map.unionWith (\(a,b) (_,b') -> (a,b++b'))) + [ (packageName dep, Map.fromList [(sid,(dep,[packageId pkg]))]) + | -- For each package @pkg@ + pkg <- Graph.toList index + -- Find out which @sid@ @pkg@ depends on + , sid <- CD.nonSetupDeps (resolverPackageLibDeps pkg) + -- And look up those @sid@ (i.e., @sid@ is the ID of @dep@) + , Just dep <- [Graph.lookup sid index] + ] + + -- If, in a single install plan, we depend on more than one version of a + -- package, then this is ONLY okay in the (rather special) case that we + -- depend on precisely two versions of that package, and one of them + -- depends on the other. This is necessary for example for the base where + -- we have base-3 depending on base-4. + reallyIsInconsistent :: [SolverPlanPackage] -> Bool + reallyIsInconsistent [] = False + reallyIsInconsistent [_p] = False + reallyIsInconsistent [p1, p2] = + let pid1 = nodeKey p1 + pid2 = nodeKey p2 + in pid1 `notElem` CD.nonSetupDeps (resolverPackageLibDeps p2) + && pid2 `notElem` CD.nonSetupDeps (resolverPackageLibDeps p1) + reallyIsInconsistent _ = True + + +-- | The graph of packages (nodes) and dependencies (edges) must be acyclic. +-- +-- * if the result is @False@ use 'PackageIndex.dependencyCycles' to find out +-- which packages are involved in dependency cycles. +-- +acyclic :: SolverPlanIndex -> Bool +acyclic = null . Graph.cycles + +-- | An installation plan is closed if for every package in the set, all of +-- its dependencies are also in the set. That is, the set is closed under the +-- dependency relation. +-- +-- * if the result is @False@ use 'PackageIndex.brokenPackages' to find out +-- which packages depend on packages not in the index. +-- +closed :: SolverPlanIndex -> Bool +closed = null . Graph.broken + +-- | An installation plan is consistent if all dependencies that target a +-- single package name, target the same version. +-- +-- This is slightly subtle. It is not the same as requiring that there be at +-- most one version of any package in the set. It only requires that of +-- packages which have more than one other package depending on them. We could +-- actually make the condition even more precise and say that different +-- versions are OK so long as they are not both in the transitive closure of +-- any other package (or equivalently that their inverse closures do not +-- intersect). The point is we do not want to have any packages depending +-- directly or indirectly on two different versions of the same package. The +-- current definition is just a safe approximation of that. +-- +-- * if the result is @False@ use 'PackageIndex.dependencyInconsistencies' to +-- find out which packages are. +-- +consistent :: SolverPlanIndex -> Bool +consistent = null . dependencyInconsistencies (IndependentGoals False) + +-- | The states of packages have that depend on each other must respect +-- this relation. That is for very case where package @a@ depends on +-- package @b@ we require that @dependencyStatesOk a b = True@. +-- +stateDependencyRelation :: SolverPlanPackage + -> SolverPlanPackage + -> Bool +stateDependencyRelation PreExisting{} PreExisting{} = True + +stateDependencyRelation (Configured _) PreExisting{} = True +stateDependencyRelation (Configured _) (Configured _) = True + +stateDependencyRelation _ _ = False + + +-- | Compute the dependency closure of a package in a install plan +-- +dependencyClosure :: SolverInstallPlan + -> [SolverId] + -> [SolverPlanPackage] +dependencyClosure plan = fromMaybe [] . Graph.closure (planIndex plan) + + +reverseDependencyClosure :: SolverInstallPlan + -> [SolverId] + -> [SolverPlanPackage] +reverseDependencyClosure plan = fromMaybe [] . Graph.revClosure (planIndex plan) + + +topologicalOrder :: SolverInstallPlan + -> [SolverPlanPackage] +topologicalOrder plan = Graph.topSort (planIndex plan) + + +reverseTopologicalOrder :: SolverInstallPlan + -> [SolverPlanPackage] +reverseTopologicalOrder plan = Graph.revTopSort (planIndex plan) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SourceFiles.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SourceFiles.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SourceFiles.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SourceFiles.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,168 @@ +-- | Contains an @sdist@ like function which computes the source files +-- that we should track to determine if a rebuild is necessary. +-- Unlike @sdist@, we can operate directly on the true +-- 'PackageDescription' (not flattened). +-- +-- The naming convention, roughly, is that to declare we need the +-- source for some type T, you use the function needT; some functions +-- need auxiliary information. +-- +-- We can only use this code for non-Custom scripts; Custom scripts +-- may have arbitrary extra dependencies (esp. new preprocessors) which +-- we cannot "see" easily. +module Distribution.Client.SourceFiles (needElaboratedConfiguredPackage) where + +import Distribution.Client.ProjectPlanning.Types +import Distribution.Client.RebuildMonad + +import Distribution.Solver.Types.OptionalStanza + +import Distribution.Simple.PreProcess + +import Distribution.Types.PackageDescription +import Distribution.Types.Component +import Distribution.Types.ComponentRequestedSpec +import Distribution.Types.Library +import Distribution.Types.Executable +import Distribution.Types.Benchmark +import Distribution.Types.BenchmarkInterface +import Distribution.Types.TestSuite +import Distribution.Types.TestSuiteInterface +import Distribution.Types.BuildInfo +import Distribution.Types.ForeignLib + +import Distribution.ModuleName + +import Prelude () +import Distribution.Client.Compat.Prelude + +import System.FilePath +import Control.Monad +import qualified Data.Set as Set + +needElaboratedConfiguredPackage :: ElaboratedConfiguredPackage -> Rebuild () +needElaboratedConfiguredPackage elab = + case elabPkgOrComp elab of + ElabComponent ecomp -> needElaboratedComponent elab ecomp + ElabPackage epkg -> needElaboratedPackage elab epkg + +needElaboratedPackage :: ElaboratedConfiguredPackage -> ElaboratedPackage -> Rebuild () +needElaboratedPackage elab epkg = + mapM_ (needComponent pkg_descr) (enabledComponents pkg_descr enabled) + where + pkg_descr = elabPkgDescription elab + enabled_stanzas = pkgStanzasEnabled epkg + -- TODO: turn this into a helper function somewhere + enabled = + ComponentRequestedSpec { + testsRequested = TestStanzas `Set.member` enabled_stanzas, + benchmarksRequested = BenchStanzas `Set.member` enabled_stanzas + } + +needElaboratedComponent :: ElaboratedConfiguredPackage -> ElaboratedComponent -> Rebuild () +needElaboratedComponent elab ecomp = + case mb_comp of + Nothing -> needSetup + Just comp -> needComponent pkg_descr comp + where + pkg_descr = elabPkgDescription elab + mb_comp = fmap (getComponent pkg_descr) (compComponentName ecomp) + +needComponent :: PackageDescription -> Component -> Rebuild () +needComponent pkg_descr comp = + case comp of + CLib lib -> needLibrary pkg_descr lib + CFLib flib -> needForeignLib pkg_descr flib + CExe exe -> needExecutable pkg_descr exe + CTest test -> needTestSuite pkg_descr test + CBench bench -> needBenchmark pkg_descr bench + +needSetup :: Rebuild () +needSetup = findFirstFileMonitored id ["Setup.hs", "Setup.lhs"] >> return () + +needLibrary :: PackageDescription -> Library -> Rebuild () +needLibrary pkg_descr (Library { exposedModules = modules + , signatures = sigs + , libBuildInfo = bi }) + = needBuildInfo pkg_descr bi (modules ++ sigs) + +needForeignLib :: PackageDescription -> ForeignLib -> Rebuild () +needForeignLib pkg_descr (ForeignLib { foreignLibModDefFile = fs + , foreignLibBuildInfo = bi }) + = do mapM_ needIfExists fs + needBuildInfo pkg_descr bi [] + +needExecutable :: PackageDescription -> Executable -> Rebuild () +needExecutable pkg_descr (Executable { modulePath = mainPath + , buildInfo = bi }) + = do needBuildInfo pkg_descr bi [] + needMainFile bi mainPath + +needTestSuite :: PackageDescription -> TestSuite -> Rebuild () +needTestSuite pkg_descr t + = case testInterface t of + TestSuiteExeV10 _ mainPath -> do + needBuildInfo pkg_descr bi [] + needMainFile bi mainPath + TestSuiteLibV09 _ m -> + needBuildInfo pkg_descr bi [m] + TestSuiteUnsupported _ -> return () -- soft fail + where + bi = testBuildInfo t + +needMainFile :: BuildInfo -> FilePath -> Rebuild () +needMainFile bi mainPath = do + -- The matter here is subtle. It might *seem* that we + -- should just search for mainPath, but as per + -- b61cb051f63ed5869b8f4a6af996ff7e833e4b39 'main-is' + -- will actually be the source file AFTER preprocessing, + -- whereas we need to get the file *prior* to preprocessing. + ppFile <- findFileWithExtensionMonitored + (ppSuffixes knownSuffixHandlers) + (hsSourceDirs bi) + (dropExtension mainPath) + case ppFile of + -- But check the original path in the end, because + -- maybe it's a non-preprocessed file with a non-traditional + -- extension. + Nothing -> findFileMonitored (hsSourceDirs bi) mainPath + >>= maybe (return ()) need + Just pp -> need pp + +needBenchmark :: PackageDescription -> Benchmark -> Rebuild () +needBenchmark pkg_descr bm + = case benchmarkInterface bm of + BenchmarkExeV10 _ mainPath -> do + needBuildInfo pkg_descr bi [] + needMainFile bi mainPath + BenchmarkUnsupported _ -> return () -- soft fail + where + bi = benchmarkBuildInfo bm + +needBuildInfo :: PackageDescription -> BuildInfo -> [ModuleName] -> Rebuild () +needBuildInfo pkg_descr bi modules = do + -- NB: These are separate because there may be both A.hs and + -- A.hs-boot; need to track both. + findNeededModules ["hs", "lhs", "hsig", "lhsig"] + findNeededModules ["hs-boot", "lhs-boot"] + mapM_ needIfExists (cSources bi ++ jsSources bi) + -- A MASSIVE HACK to (1) make sure we rebuild when header + -- files change, but (2) not have to rebuild when anything + -- in extra-src-files changes (most of these won't affect + -- compilation). It would be even better if we knew on a + -- per-component basis which headers would be used but that + -- seems to be too difficult. + mapM_ needIfExists (filter ((==".h").takeExtension) (extraSrcFiles pkg_descr)) + forM_ (installIncludes bi) $ \f -> + findFileMonitored ("." : includeDirs bi) f + >>= maybe (return ()) need + where + findNeededModules exts = + mapM_ (findNeededModule exts) + (modules ++ otherModules bi) + findNeededModule exts m = + findFileWithExtensionMonitored + (ppSuffixes knownSuffixHandlers ++ exts) + (hsSourceDirs bi) + (toFilePath m) + >>= maybe (return ()) need diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SourceRepoParse.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SourceRepoParse.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SourceRepoParse.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SourceRepoParse.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,22 @@ +module Distribution.Client.SourceRepoParse where + +import Distribution.Client.Compat.Prelude +import Prelude () + +import Distribution.FieldGrammar.FieldDescrs (fieldDescrsToList) +import Distribution.PackageDescription.FieldGrammar (sourceRepoFieldGrammar) +import Distribution.Parsec.Class (explicitEitherParsec) +import Distribution.ParseUtils (FieldDescr (..), syntaxError) +import Distribution.Types.SourceRepo (SourceRepo, RepoKind (..)) + +sourceRepoFieldDescrs :: [FieldDescr SourceRepo] +sourceRepoFieldDescrs = + map toDescr . fieldDescrsToList $ sourceRepoFieldGrammar (RepoKindUnknown "unused") + where + toDescr (name, pretty, parse) = FieldDescr + { fieldName = name + , fieldGet = pretty + , fieldSet = \lineNo str x -> + either (syntaxError lineNo) return + $ explicitEitherParsec (parse x) str + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SrcDist.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SrcDist.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/SrcDist.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/SrcDist.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,195 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE NondecreasingIndentation #-} +{-# LANGUAGE FlexibleContexts #-} +-- Implements the \"@.\/cabal sdist@\" command, which creates a source +-- distribution for this package. That is, packs up the source code +-- into a tarball, making use of the corresponding Cabal module. +module Distribution.Client.SrcDist ( + sdist, + allPackageSourceFiles + ) where + + +import Distribution.Client.SetupWrapper + ( SetupScriptOptions(..), defaultSetupScriptOptions, setupWrapper ) +import Distribution.Client.Tar (createTarGzFile) + +import Distribution.Package + ( Package(..), packageName ) +import Distribution.PackageDescription + ( PackageDescription ) +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) +import Distribution.PackageDescription.Parsec + ( readGenericPackageDescription ) +import Distribution.Simple.Utils + ( createDirectoryIfMissingVerbose, defaultPackageDesc + , warn, die', notice, withTempDirectory ) +import Distribution.Client.Setup + ( SDistFlags(..), SDistExFlags(..), ArchiveFormat(..) ) +import Distribution.Simple.Setup + ( Flag(..), sdistCommand, flagToList, fromFlag, fromFlagOrDefault + , defaultSDistFlags ) +import Distribution.Simple.BuildPaths ( srcPref) +import Distribution.Simple.Program (requireProgram, simpleProgram, programPath) +import Distribution.Simple.Program.Db (emptyProgramDb) +import Distribution.Text ( display ) +import Distribution.Verbosity (Verbosity, normal, lessVerbose) +import Distribution.Version (mkVersion, orLaterVersion, intersectVersionRanges) + +import Distribution.Client.Utils + (tryFindAddSourcePackageDesc) +import Distribution.Compat.Exception (catchIO) + +import System.FilePath ((), (<.>)) +import Control.Monad (when, unless, liftM) +import System.Directory (doesFileExist, removeFile, canonicalizePath, getTemporaryDirectory) +import System.Process (runProcess, waitForProcess) +import System.Exit (ExitCode(..)) +import Control.Exception (IOException, evaluate) + +-- |Create a source distribution. +sdist :: SDistFlags -> SDistExFlags -> IO () +sdist flags exflags = do + pkg <- liftM flattenPackageDescription + (readGenericPackageDescription verbosity =<< defaultPackageDesc verbosity) + let withDir :: (FilePath -> IO a) -> IO a + withDir = if not needMakeArchive then \f -> f tmpTargetDir + else withTempDirectory verbosity tmpTargetDir "sdist." + -- 'withTempDir' fails if we don't create 'tmpTargetDir'... + when needMakeArchive $ + createDirectoryIfMissingVerbose verbosity True tmpTargetDir + withDir $ \tmpDir -> do + let outDir = if isOutDirectory then tmpDir else tmpDir tarBallName pkg + flags' = (if not needMakeArchive then flags + else flags { sDistDirectory = Flag outDir }) + unless isListSources $ + createDirectoryIfMissingVerbose verbosity True outDir + + -- Run 'setup sdist --output-directory=tmpDir' (or + -- '--list-source'/'--output-directory=someOtherDir') in case we were passed + -- those options. + setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags') (const []) + + -- Unless we were given --list-sources or --output-directory ourselves, + -- create an archive. + when needMakeArchive $ + createArchive verbosity pkg tmpDir distPref + + when isOutDirectory $ + notice verbosity $ "Source directory created: " ++ tmpTargetDir + + when isListSources $ + notice verbosity $ "List of package sources written to file '" + ++ (fromFlag . sDistListSources $ flags) ++ "'" + + where + flagEnabled f = not . null . flagToList . f $ flags + + isListSources = flagEnabled sDistListSources + isOutDirectory = flagEnabled sDistDirectory + needMakeArchive = not (isListSources || isOutDirectory) + verbosity = fromFlag (sDistVerbosity flags) + distPref = fromFlag (sDistDistPref flags) + tmpTargetDir = fromFlagOrDefault (srcPref distPref) (sDistDirectory flags) + setupOpts = defaultSetupScriptOptions { + useDistPref = distPref, + -- The '--output-directory' sdist flag was introduced in Cabal 1.12, and + -- '--list-sources' in 1.17. + useCabalVersion = if isListSources + then orLaterVersion $ mkVersion [1,17,0] + else orLaterVersion $ mkVersion [1,12,0] + } + format = fromFlag (sDistFormat exflags) + createArchive = case format of + TargzFormat -> createTarGzArchive + ZipFormat -> createZipArchive + +tarBallName :: PackageDescription -> String +tarBallName = display . packageId + +-- | Create a tar.gz archive from a tree of source files. +createTarGzArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath + -> IO () +createTarGzArchive verbosity pkg tmpDir targetPref = do + createTarGzFile tarBallFilePath tmpDir (tarBallName pkg) + notice verbosity $ "Source tarball created: " ++ tarBallFilePath + where + tarBallFilePath = targetPref tarBallName pkg <.> "tar.gz" + +-- | Create a zip archive from a tree of source files. +createZipArchive :: Verbosity -> PackageDescription -> FilePath -> FilePath + -> IO () +createZipArchive verbosity pkg tmpDir targetPref = do + let dir = tarBallName pkg + zipfile = targetPref dir <.> "zip" + (zipProg, _) <- requireProgram verbosity zipProgram emptyProgramDb + + -- zip has an annoying habit of updating the target rather than creating + -- it from scratch. While that might sound like an optimisation, it doesn't + -- remove files already in the archive that are no longer present in the + -- uncompressed tree. + alreadyExists <- doesFileExist zipfile + when alreadyExists $ removeFile zipfile + + -- We call zip with a different CWD, so have to make the path + -- absolute. Can't just use 'canonicalizePath zipfile' since this function + -- requires its argument to refer to an existing file. + zipfileAbs <- fmap ( dir <.> "zip") . canonicalizePath $ targetPref + + --TODO: use runProgramInvocation, but has to be able to set CWD + hnd <- runProcess (programPath zipProg) ["-q", "-r", zipfileAbs, dir] + (Just tmpDir) + Nothing Nothing Nothing Nothing + exitCode <- waitForProcess hnd + unless (exitCode == ExitSuccess) $ + die' verbosity $ "Generating the zip file failed " + ++ "(zip returned exit code " ++ show exitCode ++ ")" + notice verbosity $ "Source zip archive created: " ++ zipfile + where + zipProgram = simpleProgram "zip" + +-- | List all source files of a given add-source dependency. Exits with error if +-- something is wrong (e.g. there is no .cabal file in the given directory). +allPackageSourceFiles :: Verbosity -> SetupScriptOptions -> FilePath + -> IO [FilePath] +allPackageSourceFiles verbosity setupOpts0 packageDir = do + pkg <- do + let err = "Error reading source files of package." + desc <- tryFindAddSourcePackageDesc verbosity packageDir err + flattenPackageDescription `fmap` readGenericPackageDescription verbosity desc + globalTmp <- getTemporaryDirectory + withTempDirectory verbosity globalTmp "cabal-list-sources." $ \tempDir -> do + let file = tempDir "cabal-sdist-list-sources" + flags = defaultSDistFlags { + sDistVerbosity = Flag $ if verbosity == normal + then lessVerbose verbosity else verbosity, + sDistListSources = Flag file + } + setupOpts = setupOpts0 { + -- 'sdist --list-sources' was introduced in Cabal 1.18. + useCabalVersion = intersectVersionRanges + (orLaterVersion $ mkVersion [1,18,0]) + (useCabalVersion setupOpts0), + useWorkingDir = Just packageDir + } + + doListSources :: IO [FilePath] + doListSources = do + setupWrapper verbosity setupOpts (Just pkg) sdistCommand (const flags) (const []) + fmap lines . readFile $ file + + onFailedListSources :: IOException -> IO () + onFailedListSources e = do + warn verbosity $ + "Could not list sources of the package '" + ++ display (packageName pkg) ++ "'." + warn verbosity $ + "Exception was: " ++ show e + + -- Run setup sdist --list-sources=TMPFILE + r <- doListSources `catchIO` (\e -> onFailedListSources e >> return []) + -- Ensure that we've closed the 'readFile' handle before we exit the + -- temporary directory. + _ <- evaluate (length r) + return r diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Store.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Store.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Store.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Store.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,251 @@ +{-# LANGUAGE RecordWildCards, NamedFieldPuns #-} + + +-- | Management for the installed package store. +-- +module Distribution.Client.Store ( + + -- * The store layout + StoreDirLayout(..), + defaultStoreDirLayout, + + -- * Reading store entries + getStoreEntries, + doesStoreEntryExist, + + -- * Creating store entries + newStoreEntry, + NewStoreEntryOutcome(..), + + -- * Concurrency strategy + -- $concurrency + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude +import Distribution.Client.Compat.FileLock + +import Distribution.Client.DistDirLayout +import Distribution.Client.RebuildMonad + +import Distribution.Package (UnitId, mkUnitId) +import Distribution.Compiler (CompilerId) + +import Distribution.Simple.Utils + ( withTempDirectory, debug, info ) +import Distribution.Verbosity +import Distribution.Text + +import Data.Set (Set) +import qualified Data.Set as Set +import Control.Exception +import Control.Monad (forM_) +import System.FilePath +import System.Directory +import System.IO + + +-- $concurrency +-- +-- We access and update the store concurrently. Our strategy to do that safely +-- is as follows. +-- +-- The store entries once created are immutable. This alone simplifies matters +-- considerably. +-- +-- Additionally, the way 'UnitId' hashes are constructed means that if a store +-- entry exists already then we can assume its content is ok to reuse, rather +-- than having to re-recreate. This is the nix-style input hashing concept. +-- +-- A consequence of this is that with a little care it is /safe/ to race +-- updates against each other. Consider two independent concurrent builds that +-- both want to build a particular 'UnitId', where that entry does not yet +-- exist in the store. It is safe for both to build and try to install this +-- entry into the store provided that: +-- +-- * only one succeeds +-- * the looser discovers that they lost, they abandon their own build and +-- re-use the store entry installed by the winner. +-- +-- Note that because builds are not reproducible in general (nor even +-- necessarily ABI compatible) then it is essential that the loser abandon +-- their build and use the one installed by the winner, so that subsequent +-- packages are built against the exact package from the store rather than some +-- morally equivalent package that may not be ABI compatible. +-- +-- Our overriding goal is that store reads be simple, cheap and not require +-- locking. We will derive our write-side protocol to make this possible. +-- +-- The read-side protocol is simply: +-- +-- * check for the existence of a directory entry named after the 'UnitId' in +-- question. That is, if the dir entry @$root/foo-1.0-fe56a...@ exists then +-- the store entry can be assumed to be complete and immutable. +-- +-- Given our read-side protocol, the final step on the write side must be to +-- atomically rename a fully-formed store entry directory into its final +-- location. While this will indeed be the final step, the preparatory steps +-- are more complicated. The tricky aspect is that the store also contains a +-- number of shared package databases (one per compiler version). Our read +-- strategy means that by the time we install the store dir entry the package +-- db must already have been updated. We cannot do the package db update +-- as part of atomically renaming the store entry directory however. Furthermore +-- it is not safe to allow either package db update because the db entry +-- contains the ABI hash and this is not guaranteed to be deterministic. So we +-- must register the new package prior to the atomic dir rename. Since this +-- combination of steps are not atomic then we need locking. +-- +-- The write-side protocol is: +-- +-- * Create a unique temp dir and write all store entry files into it. +-- +-- * Take a lock named after the 'UnitId' in question. +-- +-- * Once holding the lock, check again for the existence of the final store +-- entry directory. If the entry exists then the process lost the race and it +-- must abandon, unlock and re-use the existing store entry. If the entry +-- does not exist then the process won the race and it can proceed. +-- +-- * Register the package into the package db. Note that the files are not in +-- their final location at this stage so registration file checks may need +-- to be disabled. +-- +-- * Atomically rename the temp dir to the final store entry location. +-- +-- * Release the previously-acquired lock. +-- +-- Obviously this means it is possible to fail after registering but before +-- installing the store entry, leaving a dangling package db entry. This is not +-- much of a problem because this entry does not determine package existence +-- for cabal. It does mean however that the package db update should be insert +-- or replace, i.e. not failing if the db entry already exists. + + +-- | Check if a particular 'UnitId' exists in the store. +-- +doesStoreEntryExist :: StoreDirLayout -> CompilerId -> UnitId -> IO Bool +doesStoreEntryExist StoreDirLayout{storePackageDirectory} compid unitid = + doesDirectoryExist (storePackageDirectory compid unitid) + + +-- | Return the 'UnitId's of all packages\/components already installed in the +-- store. +-- +getStoreEntries :: StoreDirLayout -> CompilerId -> Rebuild (Set UnitId) +getStoreEntries StoreDirLayout{storeDirectory} compid = do + paths <- getDirectoryContentsMonitored (storeDirectory compid) + return $! mkEntries paths + where + mkEntries = Set.delete (mkUnitId "package.db") + . Set.delete (mkUnitId "incoming") + . Set.fromList + . map mkUnitId + . filter valid + valid ('.':_) = False + valid _ = True + + +-- | The outcome of 'newStoreEntry': either the store entry was newly created +-- or it existed already. The latter case happens if there was a race between +-- two builds of the same store entry. +-- +data NewStoreEntryOutcome = UseNewStoreEntry + | UseExistingStoreEntry + deriving (Eq, Show) + +-- | Place a new entry into the store. See the concurrency strategy description +-- for full details. +-- +-- In particular, it takes two actions: one to place files into a temporary +-- location, and a second to perform any necessary registration. The first +-- action is executed without any locks held (the temp dir is unique). The +-- second action holds a lock that guarantees that only one cabal process is +-- able to install this store entry. This means it is safe to register into +-- the compiler package DB or do other similar actions. +-- +-- Note that if you need to use the registration information later then you +-- /must/ check the 'NewStoreEntryOutcome' and if it's'UseExistingStoreEntry' +-- then you must read the existing registration information (unless your +-- registration information is constructed fully deterministically). +-- +newStoreEntry :: Verbosity + -> StoreDirLayout + -> CompilerId + -> UnitId + -> (FilePath -> IO (FilePath, [FilePath])) -- ^ Action to place files. + -> IO () -- ^ Register action, if necessary. + -> IO NewStoreEntryOutcome +newStoreEntry verbosity storeDirLayout@StoreDirLayout{..} + compid unitid + copyFiles register = + -- See $concurrency above for an explanation of the concurrency protocol + + withTempIncomingDir storeDirLayout compid $ \incomingTmpDir -> do + + -- Write all store entry files within the temp dir and return the prefix. + (incomingEntryDir, otherFiles) <- copyFiles incomingTmpDir + + -- Take a lock named after the 'UnitId' in question. + withIncomingUnitIdLock verbosity storeDirLayout compid unitid $ do + + -- Check for the existence of the final store entry directory. + exists <- doesStoreEntryExist storeDirLayout compid unitid + + if exists + -- If the entry exists then we lost the race and we must abandon, + -- unlock and re-use the existing store entry. + then do + info verbosity $ + "Concurrent build race: abandoning build in favour of existing " + ++ "store entry " ++ display compid display unitid + return UseExistingStoreEntry + + -- If the entry does not exist then we won the race and can proceed. + else do + + -- Register the package into the package db (if appropriate). + register + + -- Atomically rename the temp dir to the final store entry location. + renameDirectory incomingEntryDir finalEntryDir + forM_ otherFiles $ \file -> do + let finalStoreFile = storeDirectory compid makeRelative (incomingTmpDir (dropDrive (storeDirectory compid))) file + createDirectoryIfMissing True (takeDirectory finalStoreFile) + renameFile file finalStoreFile + + debug verbosity $ + "Installed store entry " ++ display compid display unitid + return UseNewStoreEntry + where + finalEntryDir = storePackageDirectory compid unitid + + +withTempIncomingDir :: StoreDirLayout -> CompilerId + -> (FilePath -> IO a) -> IO a +withTempIncomingDir StoreDirLayout{storeIncomingDirectory} compid action = do + createDirectoryIfMissing True incomingDir + withTempDirectory silent incomingDir "new" action + where + incomingDir = storeIncomingDirectory compid + + +withIncomingUnitIdLock :: Verbosity -> StoreDirLayout + -> CompilerId -> UnitId + -> IO a -> IO a +withIncomingUnitIdLock verbosity StoreDirLayout{storeIncomingLock} + compid unitid action = + bracket takeLock releaseLock (\_hnd -> action) + where + takeLock = do + h <- openFile (storeIncomingLock compid unitid) ReadWriteMode + -- First try non-blocking, but if we would have to wait then + -- log an explanation and do it again in blocking mode. + gotlock <- hTryLock h ExclusiveLock + unless gotlock $ do + info verbosity $ "Waiting for file lock on store entry " + ++ display compid display unitid + hLock h ExclusiveLock + return h + + releaseLock = hClose + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/TargetSelector.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/TargetSelector.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/TargetSelector.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/TargetSelector.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,2461 @@ +{-# LANGUAGE CPP, DeriveGeneric, DeriveFunctor, + RecordWildCards, NamedFieldPuns #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.TargetSelector +-- Copyright : (c) Duncan Coutts 2012, 2015, 2016 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- +-- Handling for user-specified target selectors. +-- +----------------------------------------------------------------------------- +module Distribution.Client.TargetSelector ( + + -- * Target selectors + TargetSelector(..), + TargetImplicitCwd(..), + ComponentKind(..), + ComponentKindFilter, + SubComponentTarget(..), + QualLevel(..), + componentKind, + + -- * Reading target selectors + readTargetSelectors, + TargetSelectorProblem(..), + reportTargetSelectorProblems, + showTargetSelector, + TargetString(..), + showTargetString, + parseTargetString, + -- ** non-IO + readTargetSelectorsWith, + DirActions(..), + defaultDirActions, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Package + ( Package(..), PackageId, PackageName, packageName ) +import Distribution.Types.UnqualComponentName + ( UnqualComponentName, mkUnqualComponentName, unUnqualComponentName + , packageNameToUnqualComponentName ) +import Distribution.Client.Types + ( PackageLocation(..), PackageSpecifier(..) ) + +import Distribution.Verbosity +import Distribution.PackageDescription + ( PackageDescription + , Executable(..) + , TestSuite(..), TestSuiteInterface(..), testModules + , Benchmark(..), BenchmarkInterface(..), benchmarkModules + , BuildInfo(..), explicitLibModules, exeModules ) +import Distribution.PackageDescription.Configuration + ( flattenPackageDescription ) +import Distribution.Solver.Types.SourcePackage + ( SourcePackage(..) ) +import Distribution.ModuleName + ( ModuleName, toFilePath ) +import Distribution.Simple.LocalBuildInfo + ( Component(..), ComponentName(..) + , pkgComponents, componentName, componentBuildInfo ) +import Distribution.Types.ForeignLib + +import Distribution.Text + ( Text, display, simpleParse ) +import Distribution.Simple.Utils + ( die', lowercase, ordNub ) +import Distribution.Client.Utils + ( makeRelativeCanonical ) + +import Data.Either + ( partitionEithers ) +import Data.Function + ( on ) +import Data.List + ( stripPrefix, partition, groupBy ) +import Data.Ord + ( comparing ) +import qualified Data.Map.Lazy as Map.Lazy +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set +import Control.Arrow ((&&&)) +import Control.Monad + hiding ( mfilter ) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP + ( (+++), (<++) ) +import Distribution.ParseUtils + ( readPToMaybe ) +import System.FilePath as FilePath + ( takeExtension, dropExtension + , splitDirectories, joinPath, splitPath ) +import qualified System.Directory as IO + ( doesFileExist, doesDirectoryExist, canonicalizePath + , getCurrentDirectory ) +import System.FilePath + ( (), (<.>), normalise, dropTrailingPathSeparator ) +import Text.EditDistance + ( defaultEditCosts, restrictedDamerauLevenshteinDistance ) + + +-- ------------------------------------------------------------ +-- * Target selector terms +-- ------------------------------------------------------------ + +-- | A target selector is expression selecting a set of components (as targets +-- for a actions like @build@, @run@, @test@ etc). A target selector +-- corresponds to the user syntax for referring to targets on the command line. +-- +-- From the users point of view a target can be many things: packages, dirs, +-- component names, files etc. Internally we consider a target to be a specific +-- component (or module\/file within a component), and all the users' notions +-- of targets are just different ways of referring to these component targets. +-- +-- So target selectors are expressions in the sense that they are interpreted +-- to refer to one or more components. For example a 'TargetPackage' gets +-- interpreted differently by different commands to refer to all or a subset +-- of components within the package. +-- +-- The syntax has lots of optional parts: +-- +-- > [ package name | package dir | package .cabal file ] +-- > [ [lib:|exe:] component name ] +-- > [ module name | source file ] +-- +data TargetSelector = + + -- | One (or more) packages as a whole, or all the components of a + -- particular kind within the package(s). + -- + -- These are always packages that are local to the project. In the case + -- that there is more than one, they all share the same directory location. + -- + TargetPackage TargetImplicitCwd [PackageId] (Maybe ComponentKindFilter) + + -- | A package specified by name. This may refer to @extra-packages@ from + -- the @cabal.project@ file, or a dependency of a known project package or + -- could refer to a package from a hackage archive. It needs further + -- context to resolve to a specific package. + -- + | TargetPackageNamed PackageName (Maybe ComponentKindFilter) + + -- | All packages, or all components of a particular kind in all packages. + -- + | TargetAllPackages (Maybe ComponentKindFilter) + + -- | A specific component in a package within the project. + -- + | TargetComponent PackageId ComponentName SubComponentTarget + + -- | A component in a package, but where it cannot be verified that the + -- package has such a component, or because the package is itself not + -- known. + -- + | TargetComponentUnknown PackageName + (Either UnqualComponentName ComponentName) + SubComponentTarget + deriving (Eq, Ord, Show, Generic) + +-- | Does this 'TargetPackage' selector arise from syntax referring to a +-- package in the current directory (e.g. @tests@ or no giving no explicit +-- target at all) or does it come from syntax referring to a package name +-- or location. +-- +data TargetImplicitCwd = TargetImplicitCwd | TargetExplicitNamed + deriving (Eq, Ord, Show, Generic) + +data ComponentKind = LibKind | FLibKind | ExeKind | TestKind | BenchKind + deriving (Eq, Ord, Enum, Show) + +type ComponentKindFilter = ComponentKind + +-- | Either the component as a whole or detail about a file or module target +-- within a component. +-- +data SubComponentTarget = + + -- | The component as a whole + WholeComponent + + -- | A specific module within a component. + | ModuleTarget ModuleName + + -- | A specific file within a component. + | FileTarget FilePath + deriving (Eq, Ord, Show, Generic) + +instance Binary SubComponentTarget + + +-- ------------------------------------------------------------ +-- * Top level, do everything +-- ------------------------------------------------------------ + + +-- | Parse a bunch of command line args as 'TargetSelector's, failing with an +-- error if any are unrecognised. The possible target selectors are based on +-- the available packages (and their locations). +-- +readTargetSelectors :: [PackageSpecifier (SourcePackage (PackageLocation a))] + -> Maybe ComponentKindFilter + -- ^ This parameter is used when there are ambiguous selectors. + -- If it is 'Just', then we attempt to resolve ambiguitiy + -- by applying it, since otherwise there is no way to allow + -- contextually valid yet syntactically ambiguous selectors. + -- (#4676, #5461) + -> [String] + -> IO (Either [TargetSelectorProblem] [TargetSelector]) +readTargetSelectors = readTargetSelectorsWith defaultDirActions + +readTargetSelectorsWith :: (Applicative m, Monad m) => DirActions m + -> [PackageSpecifier (SourcePackage (PackageLocation a))] + -> Maybe ComponentKindFilter + -> [String] + -> m (Either [TargetSelectorProblem] [TargetSelector]) +readTargetSelectorsWith dirActions@DirActions{..} pkgs mfilter targetStrs = + case parseTargetStrings targetStrs of + ([], usertargets) -> do + usertargets' <- mapM (getTargetStringFileStatus dirActions) usertargets + knowntargets <- getKnownTargets dirActions pkgs + case resolveTargetSelectors knowntargets usertargets' mfilter of + ([], btargets) -> return (Right btargets) + (problems, _) -> return (Left problems) + (strs, _) -> return (Left (map TargetSelectorUnrecognised strs)) + + +data DirActions m = DirActions { + doesFileExist :: FilePath -> m Bool, + doesDirectoryExist :: FilePath -> m Bool, + canonicalizePath :: FilePath -> m FilePath, + getCurrentDirectory :: m FilePath + } + +defaultDirActions :: DirActions IO +defaultDirActions = + DirActions { + doesFileExist = IO.doesFileExist, + doesDirectoryExist = IO.doesDirectoryExist, + -- Workaround for + canonicalizePath = IO.canonicalizePath . dropTrailingPathSeparator, + getCurrentDirectory = IO.getCurrentDirectory + } + +makeRelativeToCwd :: Applicative m => DirActions m -> FilePath -> m FilePath +makeRelativeToCwd DirActions{..} path = + makeRelativeCanonical <$> canonicalizePath path <*> getCurrentDirectory + + +-- ------------------------------------------------------------ +-- * Parsing target strings +-- ------------------------------------------------------------ + +-- | The outline parse of a target selector. It takes one of the forms: +-- +-- > str1 +-- > str1:str2 +-- > str1:str2:str3 +-- > str1:str2:str3:str4 +-- +data TargetString = + TargetString1 String + | TargetString2 String String + | TargetString3 String String String + | TargetString4 String String String String + | TargetString5 String String String String String + | TargetString7 String String String String String String String + deriving (Show, Eq) + +-- | Parse a bunch of 'TargetString's (purely without throwing exceptions). +-- +parseTargetStrings :: [String] -> ([String], [TargetString]) +parseTargetStrings = + partitionEithers + . map (\str -> maybe (Left str) Right (parseTargetString str)) + +parseTargetString :: String -> Maybe TargetString +parseTargetString = + readPToMaybe parseTargetApprox + where + parseTargetApprox :: Parse.ReadP r TargetString + parseTargetApprox = + (do a <- tokenQ + return (TargetString1 a)) + +++ (do a <- tokenQ0 + _ <- Parse.char ':' + b <- tokenQ + return (TargetString2 a b)) + +++ (do a <- tokenQ0 + _ <- Parse.char ':' + b <- tokenQ + _ <- Parse.char ':' + c <- tokenQ + return (TargetString3 a b c)) + +++ (do a <- tokenQ0 + _ <- Parse.char ':' + b <- token + _ <- Parse.char ':' + c <- tokenQ + _ <- Parse.char ':' + d <- tokenQ + return (TargetString4 a b c d)) + +++ (do a <- tokenQ0 + _ <- Parse.char ':' + b <- token + _ <- Parse.char ':' + c <- tokenQ + _ <- Parse.char ':' + d <- tokenQ + _ <- Parse.char ':' + e <- tokenQ + return (TargetString5 a b c d e)) + +++ (do a <- tokenQ0 + _ <- Parse.char ':' + b <- token + _ <- Parse.char ':' + c <- tokenQ + _ <- Parse.char ':' + d <- tokenQ + _ <- Parse.char ':' + e <- tokenQ + _ <- Parse.char ':' + f <- tokenQ + _ <- Parse.char ':' + g <- tokenQ + return (TargetString7 a b c d e f g)) + + token = Parse.munch1 (\x -> not (isSpace x) && x /= ':') + tokenQ = parseHaskellString <++ token + token0 = Parse.munch (\x -> not (isSpace x) && x /= ':') + tokenQ0= parseHaskellString <++ token0 + parseHaskellString :: Parse.ReadP r String + parseHaskellString = Parse.readS_to_P reads + + +-- | Render a 'TargetString' back as the external syntax. This is mainly for +-- error messages. +-- +showTargetString :: TargetString -> String +showTargetString = intercalate ":" . components + where + components (TargetString1 s1) = [s1] + components (TargetString2 s1 s2) = [s1,s2] + components (TargetString3 s1 s2 s3) = [s1,s2,s3] + components (TargetString4 s1 s2 s3 s4) = [s1,s2,s3,s4] + components (TargetString5 s1 s2 s3 s4 s5) = [s1,s2,s3,s4,s5] + components (TargetString7 s1 s2 s3 s4 s5 s6 s7) = [s1,s2,s3,s4,s5,s6,s7] + +showTargetSelector :: TargetSelector -> String +showTargetSelector ts = + case [ t | ql <- [QL1 .. QLFull] + , t <- renderTargetSelector ql ts ] + of (t':_) -> showTargetString (forgetFileStatus t') + [] -> "" + +showTargetSelectorKind :: TargetSelector -> String +showTargetSelectorKind bt = case bt of + TargetPackage TargetExplicitNamed _ Nothing -> "package" + TargetPackage TargetExplicitNamed _ (Just _) -> "package:filter" + TargetPackage TargetImplicitCwd _ Nothing -> "cwd-package" + TargetPackage TargetImplicitCwd _ (Just _) -> "cwd-package:filter" + TargetPackageNamed _ Nothing -> "named-package" + TargetPackageNamed _ (Just _) -> "named-package:filter" + TargetAllPackages Nothing -> "package *" + TargetAllPackages (Just _) -> "package *:filter" + TargetComponent _ _ WholeComponent -> "component" + TargetComponent _ _ ModuleTarget{} -> "module" + TargetComponent _ _ FileTarget{} -> "file" + TargetComponentUnknown _ _ WholeComponent -> "unknown-component" + TargetComponentUnknown _ _ ModuleTarget{} -> "unknown-module" + TargetComponentUnknown _ _ FileTarget{} -> "unknown-file" + + +-- ------------------------------------------------------------ +-- * Checking if targets exist as files +-- ------------------------------------------------------------ + +data TargetStringFileStatus = + TargetStringFileStatus1 String FileStatus + | TargetStringFileStatus2 String FileStatus String + | TargetStringFileStatus3 String FileStatus String String + | TargetStringFileStatus4 String String String String + | TargetStringFileStatus5 String String String String String + | TargetStringFileStatus7 String String String String String String String + deriving (Eq, Ord, Show) + +data FileStatus = FileStatusExistsFile FilePath -- the canonicalised filepath + | FileStatusExistsDir FilePath -- the canonicalised filepath + | FileStatusNotExists Bool -- does the parent dir exist even? + deriving (Eq, Ord, Show) + +noFileStatus :: FileStatus +noFileStatus = FileStatusNotExists False + +getTargetStringFileStatus :: (Applicative m, Monad m) => DirActions m + -> TargetString -> m TargetStringFileStatus +getTargetStringFileStatus DirActions{..} t = + case t of + TargetString1 s1 -> + (\f1 -> TargetStringFileStatus1 s1 f1) <$> fileStatus s1 + TargetString2 s1 s2 -> + (\f1 -> TargetStringFileStatus2 s1 f1 s2) <$> fileStatus s1 + TargetString3 s1 s2 s3 -> + (\f1 -> TargetStringFileStatus3 s1 f1 s2 s3) <$> fileStatus s1 + TargetString4 s1 s2 s3 s4 -> + return (TargetStringFileStatus4 s1 s2 s3 s4) + TargetString5 s1 s2 s3 s4 s5 -> + return (TargetStringFileStatus5 s1 s2 s3 s4 s5) + TargetString7 s1 s2 s3 s4 s5 s6 s7 -> + return (TargetStringFileStatus7 s1 s2 s3 s4 s5 s6 s7) + where + fileStatus f = do + fexists <- doesFileExist f + dexists <- doesDirectoryExist f + case splitPath f of + _ | fexists -> FileStatusExistsFile <$> canonicalizePath f + | dexists -> FileStatusExistsDir <$> canonicalizePath f + (d:_) -> FileStatusNotExists <$> doesDirectoryExist d + _ -> pure (FileStatusNotExists False) + +forgetFileStatus :: TargetStringFileStatus -> TargetString +forgetFileStatus t = case t of + TargetStringFileStatus1 s1 _ -> TargetString1 s1 + TargetStringFileStatus2 s1 _ s2 -> TargetString2 s1 s2 + TargetStringFileStatus3 s1 _ s2 s3 -> TargetString3 s1 s2 s3 + TargetStringFileStatus4 s1 s2 s3 s4 -> TargetString4 s1 s2 s3 s4 + TargetStringFileStatus5 s1 s2 s3 s4 + s5 -> TargetString5 s1 s2 s3 s4 s5 + TargetStringFileStatus7 s1 s2 s3 s4 + s5 s6 s7 -> TargetString7 s1 s2 s3 s4 s5 s6 s7 + + +-- ------------------------------------------------------------ +-- * Resolving target strings to target selectors +-- ------------------------------------------------------------ + + +-- | Given a bunch of user-specified targets, try to resolve what it is they +-- refer to. +-- +resolveTargetSelectors :: KnownTargets + -> [TargetStringFileStatus] + -> Maybe ComponentKindFilter + -> ([TargetSelectorProblem], + [TargetSelector]) +-- default local dir target if there's no given target: +resolveTargetSelectors (KnownTargets{knownPackagesAll = []}) [] _ = + ([TargetSelectorNoTargetsInProject], []) + +resolveTargetSelectors (KnownTargets{knownPackagesPrimary = []}) [] _ = + ([TargetSelectorNoTargetsInCwd], []) + +resolveTargetSelectors (KnownTargets{knownPackagesPrimary}) [] _ = + ([], [TargetPackage TargetImplicitCwd pkgids Nothing]) + where + pkgids = [ pinfoId | KnownPackage{pinfoId} <- knownPackagesPrimary ] + +resolveTargetSelectors knowntargets targetStrs mfilter = + partitionEithers + . map (resolveTargetSelector knowntargets mfilter) + $ targetStrs + +resolveTargetSelector :: KnownTargets + -> Maybe ComponentKindFilter + -> TargetStringFileStatus + -> Either TargetSelectorProblem TargetSelector +resolveTargetSelector knowntargets@KnownTargets{..} mfilter targetStrStatus = + case findMatch (matcher targetStrStatus) of + + Unambiguous _ + | projectIsEmpty -> Left TargetSelectorNoTargetsInProject + + Unambiguous (TargetPackage TargetImplicitCwd [] _) + -> Left (TargetSelectorNoCurrentPackage targetStr) + + Unambiguous target -> Right target + + None errs + | projectIsEmpty -> Left TargetSelectorNoTargetsInProject + | otherwise -> Left (classifyMatchErrors errs) + + Ambiguous _ targets + | Just kfilter <- mfilter + , [target] <- applyKindFilter kfilter targets -> Right target + + Ambiguous exactMatch targets -> + case disambiguateTargetSelectors + matcher targetStrStatus exactMatch + targets of + Right targets' -> Left (TargetSelectorAmbiguous targetStr targets') + Left ((m, ms):_) -> Left (MatchingInternalError targetStr m ms) + Left [] -> internalError "resolveTargetSelector" + where + matcher = matchTargetSelector knowntargets + + targetStr = forgetFileStatus targetStrStatus + + projectIsEmpty = null knownPackagesAll + + classifyMatchErrors errs + | not (null expected) + = let (things, got:_) = unzip expected in + TargetSelectorExpected targetStr things got + + | not (null nosuch) + = TargetSelectorNoSuch targetStr nosuch + + | otherwise + = internalError $ "classifyMatchErrors: " ++ show errs + where + expected = [ (thing, got) + | (_, MatchErrorExpected thing got) + <- map (innerErr Nothing) errs ] + -- Trim the list of alternatives by dropping duplicates and + -- retaining only at most three most similar (by edit distance) ones. + nosuch = Map.foldrWithKey genResults [] $ Map.fromListWith Set.union $ + [ ((inside, thing, got), Set.fromList alts) + | (inside, MatchErrorNoSuch thing got alts) + <- map (innerErr Nothing) errs + ] + + genResults (inside, thing, got) alts acc = ( + inside + , thing + , got + , take maxResults + $ map fst + $ takeWhile distanceLow + $ sortBy (comparing snd) + $ map addLevDist + $ Set.toList alts + ) : acc + where + addLevDist = id &&& restrictedDamerauLevenshteinDistance + defaultEditCosts got + + distanceLow (_, dist) = dist < length got `div` 2 + + maxResults = 3 + + innerErr _ (MatchErrorIn kind thing m) + = innerErr (Just (kind,thing)) m + innerErr c m = (c,m) + + applyKindFilter :: ComponentKindFilter -> [TargetSelector] -> [TargetSelector] + applyKindFilter kfilter = filter go + where + go (TargetPackage _ _ (Just filter')) = kfilter == filter' + go (TargetPackageNamed _ (Just filter')) = kfilter == filter' + go (TargetAllPackages (Just filter')) = kfilter == filter' + go (TargetComponent _ cname _) + | CLibName <- cname = kfilter == LibKind + | CSubLibName _ <- cname = kfilter == LibKind + | CFLibName _ <- cname = kfilter == FLibKind + | CExeName _ <- cname = kfilter == ExeKind + | CTestName _ <- cname = kfilter == TestKind + | CBenchName _ <- cname = kfilter == BenchKind + go _ = True + +-- | The various ways that trying to resolve a 'TargetString' to a +-- 'TargetSelector' can fail. +-- +data TargetSelectorProblem + = TargetSelectorExpected TargetString [String] String + -- ^ [expected thing] (actually got) + | TargetSelectorNoSuch TargetString + [(Maybe (String, String), String, String, [String])] + -- ^ [([in thing], no such thing, actually got, alternatives)] + | TargetSelectorAmbiguous TargetString + [(TargetString, TargetSelector)] + + | MatchingInternalError TargetString TargetSelector + [(TargetString, [TargetSelector])] + | TargetSelectorUnrecognised String + -- ^ Syntax error when trying to parse a target string. + | TargetSelectorNoCurrentPackage TargetString + | TargetSelectorNoTargetsInCwd + | TargetSelectorNoTargetsInProject + deriving (Show, Eq) + +data QualLevel = QL1 | QL2 | QL3 | QLFull + deriving (Eq, Enum, Show) + +disambiguateTargetSelectors + :: (TargetStringFileStatus -> Match TargetSelector) + -> TargetStringFileStatus -> MatchClass + -> [TargetSelector] + -> Either [(TargetSelector, [(TargetString, [TargetSelector])])] + [(TargetString, TargetSelector)] +disambiguateTargetSelectors matcher matchInput exactMatch matchResults = + case partitionEithers results of + (errs@(_:_), _) -> Left errs + ([], ok) -> Right ok + where + -- So, here's the strategy. We take the original match results, and make a + -- table of all their renderings at all qualification levels. + -- Note there can be multiple renderings at each qualification level. + matchResultsRenderings :: [(TargetSelector, [TargetStringFileStatus])] + matchResultsRenderings = + [ (matchResult, matchRenderings) + | matchResult <- matchResults + , let matchRenderings = + [ rendering + | ql <- [QL1 .. QLFull] + , rendering <- renderTargetSelector ql matchResult ] + ] + + -- Of course the point is that we're looking for renderings that are + -- unambiguous matches. So we build another memo table of all the matches + -- for all of those renderings. So by looking up in this table we can see + -- if we've got an unambiguous match. + + memoisedMatches :: Map TargetStringFileStatus (Match TargetSelector) + memoisedMatches = + -- avoid recomputing the main one if it was an exact match + (if exactMatch == Exact + then Map.insert matchInput (Match Exact 0 matchResults) + else id) + $ Map.Lazy.fromList + [ (rendering, matcher rendering) + | rendering <- concatMap snd matchResultsRenderings ] + + -- Finally, for each of the match results, we go through all their + -- possible renderings (in order of qualification level, though remember + -- there can be multiple renderings per level), and find the first one + -- that has an unambiguous match. + results :: [Either (TargetSelector, [(TargetString, [TargetSelector])]) + (TargetString, TargetSelector)] + results = + [ case findUnambiguous originalMatch matchRenderings of + Just unambiguousRendering -> + Right ( forgetFileStatus unambiguousRendering + , originalMatch) + + -- This case is an internal error, but we bubble it up and report it + Nothing -> + Left ( originalMatch + , [ (forgetFileStatus rendering, matches) + | rendering <- matchRenderings + , let Match m _ matches = + memoisedMatches Map.! rendering + , m /= Inexact + ] ) + + | (originalMatch, matchRenderings) <- matchResultsRenderings ] + + findUnambiguous :: TargetSelector + -> [TargetStringFileStatus] + -> Maybe TargetStringFileStatus + findUnambiguous _ [] = Nothing + findUnambiguous t (r:rs) = + case memoisedMatches Map.! r of + Match Exact _ [t'] | t == t' + -> Just r + Match Exact _ _ -> findUnambiguous t rs + Match Unknown _ _ -> findUnambiguous t rs + Match Inexact _ _ -> internalError "Match Inexact" + NoMatch _ _ -> internalError "NoMatch" + +internalError :: String -> a +internalError msg = + error $ "TargetSelector: internal error: " ++ msg + + +-- | Throw an exception with a formatted message if there are any problems. +-- +reportTargetSelectorProblems :: Verbosity -> [TargetSelectorProblem] -> IO a +reportTargetSelectorProblems verbosity problems = do + + case [ str | TargetSelectorUnrecognised str <- problems ] of + [] -> return () + targets -> + die' verbosity $ unlines + [ "Unrecognised target syntax for '" ++ name ++ "'." + | name <- targets ] + + case [ (t, m, ms) | MatchingInternalError t m ms <- problems ] of + [] -> return () + ((target, originalMatch, renderingsAndMatches):_) -> + die' verbosity $ "Internal error in target matching. It should always " + ++ "be possible to find a syntax that's sufficiently qualified to " + ++ "give an unambiguous match. However when matching '" + ++ showTargetString target ++ "' we found " + ++ showTargetSelector originalMatch + ++ " (" ++ showTargetSelectorKind originalMatch ++ ") which does " + ++ "not have an unambiguous syntax. The possible syntax and the " + ++ "targets they match are as follows:\n" + ++ unlines + [ "'" ++ showTargetString rendering ++ "' which matches " + ++ intercalate ", " + [ showTargetSelector match ++ + " (" ++ showTargetSelectorKind match ++ ")" + | match <- matches ] + | (rendering, matches) <- renderingsAndMatches ] + + case [ (t, e, g) | TargetSelectorExpected t e g <- problems ] of + [] -> return () + targets -> + die' verbosity $ unlines + [ "Unrecognised target '" ++ showTargetString target + ++ "'.\n" + ++ "Expected a " ++ intercalate " or " expected + ++ ", rather than '" ++ got ++ "'." + | (target, expected, got) <- targets ] + + case [ (t, e) | TargetSelectorNoSuch t e <- problems ] of + [] -> return () + targets -> + die' verbosity $ unlines + [ "Unknown target '" ++ showTargetString target ++ + "'.\n" ++ unlines + [ (case inside of + Just (kind, "") + -> "The " ++ kind ++ " has no " + Just (kind, thing) + -> "The " ++ kind ++ " " ++ thing ++ " has no " + Nothing -> "There is no ") + ++ intercalate " or " [ mungeThing thing ++ " '" ++ got ++ "'" + | (thing, got, _alts) <- nosuch' ] ++ "." + ++ if null alternatives then "" else + "\nPerhaps you meant " ++ intercalate ";\nor " + [ "the " ++ thing ++ " '" ++ intercalate "' or '" alts ++ "'?" + | (thing, alts) <- alternatives ] + | (inside, nosuch') <- groupByContainer nosuch + , let alternatives = + [ (thing, alts) + | (thing,_got,alts@(_:_)) <- nosuch' ] + ] + | (target, nosuch) <- targets + , let groupByContainer = + map (\g@((inside,_,_,_):_) -> + (inside, [ (thing,got,alts) + | (_,thing,got,alts) <- g ])) + . groupBy ((==) `on` (\(x,_,_,_) -> x)) + . sortBy (compare `on` (\(x,_,_,_) -> x)) + ] + where + mungeThing "file" = "file target" + mungeThing thing = thing + + case [ (t, ts) | TargetSelectorAmbiguous t ts <- problems ] of + [] -> return () + targets -> + die' verbosity $ unlines + [ "Ambiguous target '" ++ showTargetString target + ++ "'. It could be:\n " + ++ unlines [ " "++ showTargetString ut ++ + " (" ++ showTargetSelectorKind bt ++ ")" + | (ut, bt) <- amb ] + | (target, amb) <- targets ] + + case [ t | TargetSelectorNoCurrentPackage t <- problems ] of + [] -> return () + target:_ -> + die' verbosity $ + "The target '" ++ showTargetString target ++ "' refers to the " + ++ "components in the package in the current directory, but there " + ++ "is no package in the current directory (or at least not listed " + ++ "as part of the project)." + --TODO: report a different error if there is a .cabal file but it's + -- not a member of the project + + case [ () | TargetSelectorNoTargetsInCwd <- problems ] of + [] -> return () + _:_ -> + die' verbosity $ + "No targets given and there is no package in the current " + ++ "directory. Use the target 'all' for all packages in the " + ++ "project or specify packages or components by name or location. " + ++ "See 'cabal build --help' for more details on target options." + + case [ () | TargetSelectorNoTargetsInProject <- problems ] of + [] -> return () + _:_ -> + die' verbosity $ + "There is no .cabal package file or cabal.project file. " + ++ "To build packages locally you need at minimum a .cabal " + ++ "file. You can use 'cabal init' to create one.\n" + ++ "\n" + ++ "For non-trivial projects you will also want a cabal.project " + ++ "file in the root directory of your project. This file lists the " + ++ "packages in your project and all other build configuration. " + ++ "See the Cabal user guide for full details." + + fail "reportTargetSelectorProblems: internal error" + + +---------------------------------- +-- Syntax type +-- + +-- | Syntax for the 'TargetSelector': the matcher and renderer +-- +data Syntax = Syntax QualLevel Matcher Renderer + | AmbiguousAlternatives Syntax Syntax + | ShadowingAlternatives Syntax Syntax + +type Matcher = TargetStringFileStatus -> Match TargetSelector +type Renderer = TargetSelector -> [TargetStringFileStatus] + +foldSyntax :: (a -> a -> a) -> (a -> a -> a) + -> (QualLevel -> Matcher -> Renderer -> a) + -> (Syntax -> a) +foldSyntax ambiguous unambiguous syntax = go + where + go (Syntax ql match render) = syntax ql match render + go (AmbiguousAlternatives a b) = ambiguous (go a) (go b) + go (ShadowingAlternatives a b) = unambiguous (go a) (go b) + + +---------------------------------- +-- Top level renderer and matcher +-- + +renderTargetSelector :: QualLevel -> TargetSelector + -> [TargetStringFileStatus] +renderTargetSelector ql ts = + foldSyntax + (++) (++) + (\ql' _ render -> guard (ql == ql') >> render ts) + syntax + where + syntax = syntaxForms emptyKnownTargets + -- don't need known targets for rendering + +matchTargetSelector :: KnownTargets + -> TargetStringFileStatus + -> Match TargetSelector +matchTargetSelector knowntargets = \usertarget -> + nubMatchesBy (==) $ + + let ql = targetQualLevel usertarget in + foldSyntax + (<|>) () + (\ql' match _ -> guard (ql == ql') >> match usertarget) + syntax + where + syntax = syntaxForms knowntargets + + targetQualLevel TargetStringFileStatus1{} = QL1 + targetQualLevel TargetStringFileStatus2{} = QL2 + targetQualLevel TargetStringFileStatus3{} = QL3 + targetQualLevel TargetStringFileStatus4{} = QLFull + targetQualLevel TargetStringFileStatus5{} = QLFull + targetQualLevel TargetStringFileStatus7{} = QLFull + + +---------------------------------- +-- Syntax forms +-- + +-- | All the forms of syntax for 'TargetSelector'. +-- +syntaxForms :: KnownTargets -> Syntax +syntaxForms KnownTargets { + knownPackagesAll = pinfo, + knownPackagesPrimary = ppinfo, + knownComponentsAll = cinfo, + knownComponentsPrimary = pcinfo, + knownComponentsOther = ocinfo + } = + -- The various forms of syntax here are ambiguous in many cases. + -- Our policy is by default we expose that ambiguity and report + -- ambiguous matches. In certain cases we override the ambiguity + -- by having some forms shadow others. + -- + -- We make modules shadow files because module name "Q" clashes + -- with file "Q" with no extension but these refer to the same + -- thing anyway so it's not a useful ambiguity. Other cases are + -- not ambiguous like "Q" vs "Q.hs" or "Data.Q" vs "Data/Q". + + ambiguousAlternatives + -- convenient single-component forms + [ shadowingAlternatives + [ ambiguousAlternatives + [ syntaxForm1All + , syntaxForm1Filter ppinfo + , shadowingAlternatives + [ syntaxForm1Component pcinfo + , syntaxForm1Package pinfo + ] + ] + , syntaxForm1Component ocinfo + , syntaxForm1Module cinfo + , syntaxForm1File pinfo + ] + + -- two-component partially qualified forms + -- fully qualified form for 'all' + , syntaxForm2MetaAll + , syntaxForm2AllFilter + , syntaxForm2NamespacePackage pinfo + , syntaxForm2PackageComponent pinfo + , syntaxForm2PackageFilter pinfo + , syntaxForm2KindComponent cinfo + , shadowingAlternatives + [ syntaxForm2PackageModule pinfo + , syntaxForm2PackageFile pinfo + ] + , shadowingAlternatives + [ syntaxForm2ComponentModule cinfo + , syntaxForm2ComponentFile cinfo + ] + + -- rarely used partially qualified forms + , syntaxForm3PackageKindComponent pinfo + , shadowingAlternatives + [ syntaxForm3PackageComponentModule pinfo + , syntaxForm3PackageComponentFile pinfo + ] + , shadowingAlternatives + [ syntaxForm3KindComponentModule cinfo + , syntaxForm3KindComponentFile cinfo + ] + , syntaxForm3NamespacePackageFilter pinfo + + -- fully-qualified forms for all and cwd with filter + , syntaxForm3MetaAllFilter + , syntaxForm3MetaCwdFilter ppinfo + + -- fully-qualified form for package and package with filter + , syntaxForm3MetaNamespacePackage pinfo + , syntaxForm4MetaNamespacePackageFilter pinfo + + -- fully-qualified forms for component, module and file + , syntaxForm5MetaNamespacePackageKindComponent pinfo + , syntaxForm7MetaNamespacePackageKindComponentNamespaceModule pinfo + , syntaxForm7MetaNamespacePackageKindComponentNamespaceFile pinfo + ] + where + ambiguousAlternatives = foldr1 AmbiguousAlternatives + shadowingAlternatives = foldr1 ShadowingAlternatives + + +-- | Syntax: "all" to select all packages in the project +-- +-- > cabal build all +-- +syntaxForm1All :: Syntax +syntaxForm1All = + syntaxForm1 render $ \str1 _fstatus1 -> do + guardMetaAll str1 + return (TargetAllPackages Nothing) + where + render (TargetAllPackages Nothing) = + [TargetStringFileStatus1 "all" noFileStatus] + render _ = [] + +-- | Syntax: filter +-- +-- > cabal build tests +-- +syntaxForm1Filter :: [KnownPackage] -> Syntax +syntaxForm1Filter ps = + syntaxForm1 render $ \str1 _fstatus1 -> do + kfilter <- matchComponentKindFilter str1 + return (TargetPackage TargetImplicitCwd pids (Just kfilter)) + where + pids = [ pinfoId | KnownPackage{pinfoId} <- ps ] + render (TargetPackage TargetImplicitCwd _ (Just kfilter)) = + [TargetStringFileStatus1 (dispF kfilter) noFileStatus] + render _ = [] + + +-- | Syntax: package (name, dir or file) +-- +-- > cabal build foo +-- > cabal build ../bar ../bar/bar.cabal +-- +syntaxForm1Package :: [KnownPackage] -> Syntax +syntaxForm1Package pinfo = + syntaxForm1 render $ \str1 fstatus1 -> do + guardPackage str1 fstatus1 + p <- matchPackage pinfo str1 fstatus1 + case p of + KnownPackage{pinfoId} -> + return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) + KnownPackageName pn -> + return (TargetPackageNamed pn Nothing) + where + render (TargetPackage TargetExplicitNamed [p] Nothing) = + [TargetStringFileStatus1 (dispP p) noFileStatus] + render (TargetPackageNamed pn Nothing) = + [TargetStringFileStatus1 (dispPN pn) noFileStatus] + render _ = [] + +-- | Syntax: component +-- +-- > cabal build foo +-- +syntaxForm1Component :: [KnownComponent] -> Syntax +syntaxForm1Component cs = + syntaxForm1 render $ \str1 _fstatus1 -> do + guardComponentName str1 + c <- matchComponentName cs str1 + return (TargetComponent (cinfoPackageId c) (cinfoName c) WholeComponent) + where + render (TargetComponent p c WholeComponent) = + [TargetStringFileStatus1 (dispC p c) noFileStatus] + render _ = [] + +-- | Syntax: module +-- +-- > cabal build Data.Foo +-- +syntaxForm1Module :: [KnownComponent] -> Syntax +syntaxForm1Module cs = + syntaxForm1 render $ \str1 _fstatus1 -> do + guardModuleName str1 + let ms = [ (m,c) | c <- cs, m <- cinfoModules c ] + (m,c) <- matchModuleNameAnd ms str1 + return (TargetComponent (cinfoPackageId c) (cinfoName c) (ModuleTarget m)) + where + render (TargetComponent _p _c (ModuleTarget m)) = + [TargetStringFileStatus1 (dispM m) noFileStatus] + render _ = [] + +-- | Syntax: file name +-- +-- > cabal build Data/Foo.hs bar/Main.hsc +-- +syntaxForm1File :: [KnownPackage] -> Syntax +syntaxForm1File ps = + -- Note there's a bit of an inconsistency here vs the other syntax forms + -- for files. For the single-part syntax the target has to point to a file + -- that exists (due to our use of matchPackageDirectoryPrefix), whereas for + -- all the other forms we don't require that. + syntaxForm1 render $ \str1 fstatus1 -> + expecting "file" str1 $ do + (pkgfile, ~KnownPackage{pinfoId, pinfoComponents}) + -- always returns the KnownPackage case + <- matchPackageDirectoryPrefix ps fstatus1 + orNoThingIn "package" (display (packageName pinfoId)) $ do + (filepath, c) <- matchComponentFile pinfoComponents pkgfile + return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) + where + render (TargetComponent _p _c (FileTarget f)) = + [TargetStringFileStatus1 f noFileStatus] + render _ = [] + +--- + +-- | Syntax: :all +-- +-- > cabal build :all +-- +syntaxForm2MetaAll :: Syntax +syntaxForm2MetaAll = + syntaxForm2 render $ \str1 _fstatus1 str2 -> do + guardNamespaceMeta str1 + guardMetaAll str2 + return (TargetAllPackages Nothing) + where + render (TargetAllPackages Nothing) = + [TargetStringFileStatus2 "" noFileStatus "all"] + render _ = [] + +-- | Syntax: all : filer +-- +-- > cabal build all:tests +-- +syntaxForm2AllFilter :: Syntax +syntaxForm2AllFilter = + syntaxForm2 render $ \str1 _fstatus1 str2 -> do + guardMetaAll str1 + kfilter <- matchComponentKindFilter str2 + return (TargetAllPackages (Just kfilter)) + where + render (TargetAllPackages (Just kfilter)) = + [TargetStringFileStatus2 "all" noFileStatus (dispF kfilter)] + render _ = [] + +-- | Syntax: package : filer +-- +-- > cabal build foo:tests +-- +syntaxForm2PackageFilter :: [KnownPackage] -> Syntax +syntaxForm2PackageFilter ps = + syntaxForm2 render $ \str1 fstatus1 str2 -> do + guardPackage str1 fstatus1 + p <- matchPackage ps str1 fstatus1 + kfilter <- matchComponentKindFilter str2 + case p of + KnownPackage{pinfoId} -> + return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) + KnownPackageName pn -> + return (TargetPackageNamed pn (Just kfilter)) + where + render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = + [TargetStringFileStatus2 (dispP p) noFileStatus (dispF kfilter)] + render (TargetPackageNamed pn (Just kfilter)) = + [TargetStringFileStatus2 (dispPN pn) noFileStatus (dispF kfilter)] + render _ = [] + +-- | Syntax: pkg : package name +-- +-- > cabal build pkg:foo +-- +syntaxForm2NamespacePackage :: [KnownPackage] -> Syntax +syntaxForm2NamespacePackage pinfo = + syntaxForm2 render $ \str1 _fstatus1 str2 -> do + guardNamespacePackage str1 + guardPackageName str2 + p <- matchPackage pinfo str2 noFileStatus + case p of + KnownPackage{pinfoId} -> + return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) + KnownPackageName pn -> + return (TargetPackageNamed pn Nothing) + where + render (TargetPackage TargetExplicitNamed [p] Nothing) = + [TargetStringFileStatus2 "pkg" noFileStatus (dispP p)] + render (TargetPackageNamed pn Nothing) = + [TargetStringFileStatus2 "pkg" noFileStatus (dispPN pn)] + render _ = [] + +-- | Syntax: package : component +-- +-- > cabal build foo:foo +-- > cabal build ./foo:foo +-- > cabal build ./foo.cabal:foo +-- +syntaxForm2PackageComponent :: [KnownPackage] -> Syntax +syntaxForm2PackageComponent ps = + syntaxForm2 render $ \str1 fstatus1 str2 -> do + guardPackage str1 fstatus1 + guardComponentName str2 + p <- matchPackage ps str1 fstatus1 + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + c <- matchComponentName pinfoComponents str2 + return (TargetComponent pinfoId (cinfoName c) WholeComponent) + --TODO: the error here ought to say there's no component by that name in + -- this package, and name the package + KnownPackageName pn -> + let cn = mkUnqualComponentName str2 in + return (TargetComponentUnknown pn (Left cn) WholeComponent) + where + render (TargetComponent p c WholeComponent) = + [TargetStringFileStatus2 (dispP p) noFileStatus (dispC p c)] + render (TargetComponentUnknown pn (Left cn) WholeComponent) = + [TargetStringFileStatus2 (dispPN pn) noFileStatus (display cn)] + render _ = [] + +-- | Syntax: namespace : component +-- +-- > cabal build lib:foo exe:foo +-- +syntaxForm2KindComponent :: [KnownComponent] -> Syntax +syntaxForm2KindComponent cs = + syntaxForm2 render $ \str1 _fstatus1 str2 -> do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + return (TargetComponent (cinfoPackageId c) (cinfoName c) WholeComponent) + where + render (TargetComponent p c WholeComponent) = + [TargetStringFileStatus2 (dispCK c) noFileStatus (dispC p c)] + render _ = [] + +-- | Syntax: package : module +-- +-- > cabal build foo:Data.Foo +-- > cabal build ./foo:Data.Foo +-- > cabal build ./foo.cabal:Data.Foo +-- +syntaxForm2PackageModule :: [KnownPackage] -> Syntax +syntaxForm2PackageModule ps = + syntaxForm2 render $ \str1 fstatus1 str2 -> do + guardPackage str1 fstatus1 + guardModuleName str2 + p <- matchPackage ps str1 fstatus1 + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + let ms = [ (m,c) | c <- pinfoComponents, m <- cinfoModules c ] + (m,c) <- matchModuleNameAnd ms str2 + return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) + KnownPackageName pn -> do + m <- matchModuleNameUnknown str2 + -- We assume the primary library component of the package: + return (TargetComponentUnknown pn (Right CLibName) (ModuleTarget m)) + where + render (TargetComponent p _c (ModuleTarget m)) = + [TargetStringFileStatus2 (dispP p) noFileStatus (dispM m)] + render _ = [] + +-- | Syntax: component : module +-- +-- > cabal build foo:Data.Foo +-- +syntaxForm2ComponentModule :: [KnownComponent] -> Syntax +syntaxForm2ComponentModule cs = + syntaxForm2 render $ \str1 _fstatus1 str2 -> do + guardComponentName str1 + guardModuleName str2 + c <- matchComponentName cs str1 + orNoThingIn "component" (cinfoStrName c) $ do + let ms = cinfoModules c + m <- matchModuleName ms str2 + return (TargetComponent (cinfoPackageId c) (cinfoName c) + (ModuleTarget m)) + where + render (TargetComponent p c (ModuleTarget m)) = + [TargetStringFileStatus2 (dispC p c) noFileStatus (dispM m)] + render _ = [] + +-- | Syntax: package : filename +-- +-- > cabal build foo:Data/Foo.hs +-- > cabal build ./foo:Data/Foo.hs +-- > cabal build ./foo.cabal:Data/Foo.hs +-- +syntaxForm2PackageFile :: [KnownPackage] -> Syntax +syntaxForm2PackageFile ps = + syntaxForm2 render $ \str1 fstatus1 str2 -> do + guardPackage str1 fstatus1 + p <- matchPackage ps str1 fstatus1 + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + (filepath, c) <- matchComponentFile pinfoComponents str2 + return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) + KnownPackageName pn -> + let filepath = str2 in + -- We assume the primary library component of the package: + return (TargetComponentUnknown pn (Right CLibName) (FileTarget filepath)) + where + render (TargetComponent p _c (FileTarget f)) = + [TargetStringFileStatus2 (dispP p) noFileStatus f] + render _ = [] + +-- | Syntax: component : filename +-- +-- > cabal build foo:Data/Foo.hs +-- +syntaxForm2ComponentFile :: [KnownComponent] -> Syntax +syntaxForm2ComponentFile cs = + syntaxForm2 render $ \str1 _fstatus1 str2 -> do + guardComponentName str1 + c <- matchComponentName cs str1 + orNoThingIn "component" (cinfoStrName c) $ do + (filepath, _) <- matchComponentFile [c] str2 + return (TargetComponent (cinfoPackageId c) (cinfoName c) + (FileTarget filepath)) + where + render (TargetComponent p c (FileTarget f)) = + [TargetStringFileStatus2 (dispC p c) noFileStatus f] + render _ = [] + +--- + +-- | Syntax: :all : filter +-- +-- > cabal build :all:tests +-- +syntaxForm3MetaAllFilter :: Syntax +syntaxForm3MetaAllFilter = + syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do + guardNamespaceMeta str1 + guardMetaAll str2 + kfilter <- matchComponentKindFilter str3 + return (TargetAllPackages (Just kfilter)) + where + render (TargetAllPackages (Just kfilter)) = + [TargetStringFileStatus3 "" noFileStatus "all" (dispF kfilter)] + render _ = [] + +syntaxForm3MetaCwdFilter :: [KnownPackage] -> Syntax +syntaxForm3MetaCwdFilter ps = + syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do + guardNamespaceMeta str1 + guardNamespaceCwd str2 + kfilter <- matchComponentKindFilter str3 + return (TargetPackage TargetImplicitCwd pids (Just kfilter)) + where + pids = [ pinfoId | KnownPackage{pinfoId} <- ps ] + render (TargetPackage TargetImplicitCwd _ (Just kfilter)) = + [TargetStringFileStatus3 "" noFileStatus "cwd" (dispF kfilter)] + render _ = [] + +-- | Syntax: :pkg : package name +-- +-- > cabal build :pkg:foo +-- +syntaxForm3MetaNamespacePackage :: [KnownPackage] -> Syntax +syntaxForm3MetaNamespacePackage pinfo = + syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do + guardNamespaceMeta str1 + guardNamespacePackage str2 + guardPackageName str3 + p <- matchPackage pinfo str3 noFileStatus + case p of + KnownPackage{pinfoId} -> + return (TargetPackage TargetExplicitNamed [pinfoId] Nothing) + KnownPackageName pn -> + return (TargetPackageNamed pn Nothing) + where + render (TargetPackage TargetExplicitNamed [p] Nothing) = + [TargetStringFileStatus3 "" noFileStatus "pkg" (dispP p)] + render (TargetPackageNamed pn Nothing) = + [TargetStringFileStatus3 "" noFileStatus "pkg" (dispPN pn)] + render _ = [] + +-- | Syntax: package : namespace : component +-- +-- > cabal build foo:lib:foo +-- > cabal build foo/:lib:foo +-- > cabal build foo.cabal:lib:foo +-- +syntaxForm3PackageKindComponent :: [KnownPackage] -> Syntax +syntaxForm3PackageKindComponent ps = + syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do + guardPackage str1 fstatus1 + ckind <- matchComponentKind str2 + guardComponentName str3 + p <- matchPackage ps str1 fstatus1 + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + c <- matchComponentKindAndName pinfoComponents ckind str3 + return (TargetComponent pinfoId (cinfoName c) WholeComponent) + KnownPackageName pn -> + let cn = mkComponentName pn ckind (mkUnqualComponentName str3) in + return (TargetComponentUnknown pn (Right cn) WholeComponent) + where + render (TargetComponent p c WholeComponent) = + [TargetStringFileStatus3 (dispP p) noFileStatus (dispCK c) (dispC p c)] + render (TargetComponentUnknown pn (Right c) WholeComponent) = + [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCK c) (dispC' pn c)] + render _ = [] + +-- | Syntax: package : component : module +-- +-- > cabal build foo:foo:Data.Foo +-- > cabal build foo/:foo:Data.Foo +-- > cabal build foo.cabal:foo:Data.Foo +-- +syntaxForm3PackageComponentModule :: [KnownPackage] -> Syntax +syntaxForm3PackageComponentModule ps = + syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do + guardPackage str1 fstatus1 + guardComponentName str2 + guardModuleName str3 + p <- matchPackage ps str1 fstatus1 + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + c <- matchComponentName pinfoComponents str2 + orNoThingIn "component" (cinfoStrName c) $ do + let ms = cinfoModules c + m <- matchModuleName ms str3 + return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) + KnownPackageName pn -> do + let cn = mkUnqualComponentName str2 + m <- matchModuleNameUnknown str3 + return (TargetComponentUnknown pn (Left cn) (ModuleTarget m)) + where + render (TargetComponent p c (ModuleTarget m)) = + [TargetStringFileStatus3 (dispP p) noFileStatus (dispC p c) (dispM m)] + render (TargetComponentUnknown pn (Left c) (ModuleTarget m)) = + [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCN c) (dispM m)] + render _ = [] + +-- | Syntax: namespace : component : module +-- +-- > cabal build lib:foo:Data.Foo +-- +syntaxForm3KindComponentModule :: [KnownComponent] -> Syntax +syntaxForm3KindComponentModule cs = + syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do + ckind <- matchComponentKind str1 + guardComponentName str2 + guardModuleName str3 + c <- matchComponentKindAndName cs ckind str2 + orNoThingIn "component" (cinfoStrName c) $ do + let ms = cinfoModules c + m <- matchModuleName ms str3 + return (TargetComponent (cinfoPackageId c) (cinfoName c) + (ModuleTarget m)) + where + render (TargetComponent p c (ModuleTarget m)) = + [TargetStringFileStatus3 (dispCK c) noFileStatus (dispC p c) (dispM m)] + render _ = [] + +-- | Syntax: package : component : filename +-- +-- > cabal build foo:foo:Data/Foo.hs +-- > cabal build foo/:foo:Data/Foo.hs +-- > cabal build foo.cabal:foo:Data/Foo.hs +-- +syntaxForm3PackageComponentFile :: [KnownPackage] -> Syntax +syntaxForm3PackageComponentFile ps = + syntaxForm3 render $ \str1 fstatus1 str2 str3 -> do + guardPackage str1 fstatus1 + guardComponentName str2 + p <- matchPackage ps str1 fstatus1 + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + c <- matchComponentName pinfoComponents str2 + orNoThingIn "component" (cinfoStrName c) $ do + (filepath, _) <- matchComponentFile [c] str3 + return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) + KnownPackageName pn -> + let cn = mkUnqualComponentName str2 + filepath = str3 in + return (TargetComponentUnknown pn (Left cn) (FileTarget filepath)) + where + render (TargetComponent p c (FileTarget f)) = + [TargetStringFileStatus3 (dispP p) noFileStatus (dispC p c) f] + render (TargetComponentUnknown pn (Left c) (FileTarget f)) = + [TargetStringFileStatus3 (dispPN pn) noFileStatus (dispCN c) f] + render _ = [] + +-- | Syntax: namespace : component : filename +-- +-- > cabal build lib:foo:Data/Foo.hs +-- +syntaxForm3KindComponentFile :: [KnownComponent] -> Syntax +syntaxForm3KindComponentFile cs = + syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do + ckind <- matchComponentKind str1 + guardComponentName str2 + c <- matchComponentKindAndName cs ckind str2 + orNoThingIn "component" (cinfoStrName c) $ do + (filepath, _) <- matchComponentFile [c] str3 + return (TargetComponent (cinfoPackageId c) (cinfoName c) + (FileTarget filepath)) + where + render (TargetComponent p c (FileTarget f)) = + [TargetStringFileStatus3 (dispCK c) noFileStatus (dispC p c) f] + render _ = [] + +syntaxForm3NamespacePackageFilter :: [KnownPackage] -> Syntax +syntaxForm3NamespacePackageFilter ps = + syntaxForm3 render $ \str1 _fstatus1 str2 str3 -> do + guardNamespacePackage str1 + guardPackageName str2 + p <- matchPackage ps str2 noFileStatus + kfilter <- matchComponentKindFilter str3 + case p of + KnownPackage{pinfoId} -> + return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) + KnownPackageName pn -> + return (TargetPackageNamed pn (Just kfilter)) + where + render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = + [TargetStringFileStatus3 "pkg" noFileStatus (dispP p) (dispF kfilter)] + render (TargetPackageNamed pn (Just kfilter)) = + [TargetStringFileStatus3 "pkg" noFileStatus (dispPN pn) (dispF kfilter)] + render _ = [] + +-- + +syntaxForm4MetaNamespacePackageFilter :: [KnownPackage] -> Syntax +syntaxForm4MetaNamespacePackageFilter ps = + syntaxForm4 render $ \str1 str2 str3 str4 -> do + guardNamespaceMeta str1 + guardNamespacePackage str2 + guardPackageName str3 + p <- matchPackage ps str3 noFileStatus + kfilter <- matchComponentKindFilter str4 + case p of + KnownPackage{pinfoId} -> + return (TargetPackage TargetExplicitNamed [pinfoId] (Just kfilter)) + KnownPackageName pn -> + return (TargetPackageNamed pn (Just kfilter)) + where + render (TargetPackage TargetExplicitNamed [p] (Just kfilter)) = + [TargetStringFileStatus4 "" "pkg" (dispP p) (dispF kfilter)] + render (TargetPackageNamed pn (Just kfilter)) = + [TargetStringFileStatus4 "" "pkg" (dispPN pn) (dispF kfilter)] + render _ = [] + +-- | Syntax: :pkg : package : namespace : component +-- +-- > cabal build :pkg:foo:lib:foo +-- +syntaxForm5MetaNamespacePackageKindComponent :: [KnownPackage] -> Syntax +syntaxForm5MetaNamespacePackageKindComponent ps = + syntaxForm5 render $ \str1 str2 str3 str4 str5 -> do + guardNamespaceMeta str1 + guardNamespacePackage str2 + guardPackageName str3 + ckind <- matchComponentKind str4 + guardComponentName str5 + p <- matchPackage ps str3 noFileStatus + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + c <- matchComponentKindAndName pinfoComponents ckind str5 + return (TargetComponent pinfoId (cinfoName c) WholeComponent) + KnownPackageName pn -> + let cn = mkComponentName pn ckind (mkUnqualComponentName str5) in + return (TargetComponentUnknown pn (Right cn) WholeComponent) + where + render (TargetComponent p c WholeComponent) = + [TargetStringFileStatus5 "" "pkg" (dispP p) (dispCK c) (dispC p c)] + render (TargetComponentUnknown pn (Right c) WholeComponent) = + [TargetStringFileStatus5 "" "pkg" (dispPN pn) (dispCK c) (dispC' pn c)] + render _ = [] + +-- | Syntax: :pkg : package : namespace : component : module : module +-- +-- > cabal build :pkg:foo:lib:foo:module:Data.Foo +-- +syntaxForm7MetaNamespacePackageKindComponentNamespaceModule + :: [KnownPackage] -> Syntax +syntaxForm7MetaNamespacePackageKindComponentNamespaceModule ps = + syntaxForm7 render $ \str1 str2 str3 str4 str5 str6 str7 -> do + guardNamespaceMeta str1 + guardNamespacePackage str2 + guardPackageName str3 + ckind <- matchComponentKind str4 + guardComponentName str5 + guardNamespaceModule str6 + p <- matchPackage ps str3 noFileStatus + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + c <- matchComponentKindAndName pinfoComponents ckind str5 + orNoThingIn "component" (cinfoStrName c) $ do + let ms = cinfoModules c + m <- matchModuleName ms str7 + return (TargetComponent pinfoId (cinfoName c) (ModuleTarget m)) + KnownPackageName pn -> do + let cn = mkComponentName pn ckind (mkUnqualComponentName str2) + m <- matchModuleNameUnknown str7 + return (TargetComponentUnknown pn (Right cn) (ModuleTarget m)) + where + render (TargetComponent p c (ModuleTarget m)) = + [TargetStringFileStatus7 "" "pkg" (dispP p) + (dispCK c) (dispC p c) + "module" (dispM m)] + render (TargetComponentUnknown pn (Right c) (ModuleTarget m)) = + [TargetStringFileStatus7 "" "pkg" (dispPN pn) + (dispCK c) (dispC' pn c) + "module" (dispM m)] + render _ = [] + +-- | Syntax: :pkg : package : namespace : component : file : filename +-- +-- > cabal build :pkg:foo:lib:foo:file:Data/Foo.hs +-- +syntaxForm7MetaNamespacePackageKindComponentNamespaceFile + :: [KnownPackage] -> Syntax +syntaxForm7MetaNamespacePackageKindComponentNamespaceFile ps = + syntaxForm7 render $ \str1 str2 str3 str4 str5 str6 str7 -> do + guardNamespaceMeta str1 + guardNamespacePackage str2 + guardPackageName str3 + ckind <- matchComponentKind str4 + guardComponentName str5 + guardNamespaceFile str6 + p <- matchPackage ps str3 noFileStatus + case p of + KnownPackage{pinfoId, pinfoComponents} -> + orNoThingIn "package" (display (packageName pinfoId)) $ do + c <- matchComponentKindAndName pinfoComponents ckind str5 + orNoThingIn "component" (cinfoStrName c) $ do + (filepath,_) <- matchComponentFile [c] str7 + return (TargetComponent pinfoId (cinfoName c) (FileTarget filepath)) + KnownPackageName pn -> + let cn = mkComponentName pn ckind (mkUnqualComponentName str5) + filepath = str7 in + return (TargetComponentUnknown pn (Right cn) (FileTarget filepath)) + where + render (TargetComponent p c (FileTarget f)) = + [TargetStringFileStatus7 "" "pkg" (dispP p) + (dispCK c) (dispC p c) + "file" f] + render (TargetComponentUnknown pn (Right c) (FileTarget f)) = + [TargetStringFileStatus7 "" "pkg" (dispPN pn) + (dispCK c) (dispC' pn c) + "file" f] + render _ = [] + + +--------------------------------------- +-- Syntax utils +-- + +type Match1 = String -> FileStatus -> Match TargetSelector +type Match2 = String -> FileStatus -> String + -> Match TargetSelector +type Match3 = String -> FileStatus -> String -> String + -> Match TargetSelector +type Match4 = String -> String -> String -> String + -> Match TargetSelector +type Match5 = String -> String -> String -> String -> String + -> Match TargetSelector +type Match7 = String -> String -> String -> String -> String -> String -> String + -> Match TargetSelector + +syntaxForm1 :: Renderer -> Match1 -> Syntax +syntaxForm2 :: Renderer -> Match2 -> Syntax +syntaxForm3 :: Renderer -> Match3 -> Syntax +syntaxForm4 :: Renderer -> Match4 -> Syntax +syntaxForm5 :: Renderer -> Match5 -> Syntax +syntaxForm7 :: Renderer -> Match7 -> Syntax + +syntaxForm1 render f = + Syntax QL1 match render + where + match = \(TargetStringFileStatus1 str1 fstatus1) -> + f str1 fstatus1 + +syntaxForm2 render f = + Syntax QL2 match render + where + match = \(TargetStringFileStatus2 str1 fstatus1 str2) -> + f str1 fstatus1 str2 + +syntaxForm3 render f = + Syntax QL3 match render + where + match = \(TargetStringFileStatus3 str1 fstatus1 str2 str3) -> + f str1 fstatus1 str2 str3 + +syntaxForm4 render f = + Syntax QLFull match render + where + match (TargetStringFileStatus4 str1 str2 str3 str4) + = f str1 str2 str3 str4 + match _ = mzero + +syntaxForm5 render f = + Syntax QLFull match render + where + match (TargetStringFileStatus5 str1 str2 str3 str4 str5) + = f str1 str2 str3 str4 str5 + match _ = mzero + +syntaxForm7 render f = + Syntax QLFull match render + where + match (TargetStringFileStatus7 str1 str2 str3 str4 str5 str6 str7) + = f str1 str2 str3 str4 str5 str6 str7 + match _ = mzero + +dispP :: Package p => p -> String +dispP = display . packageName + +dispPN :: PackageName -> String +dispPN = display + +dispC :: PackageId -> ComponentName -> String +dispC = componentStringName . packageName + +dispC' :: PackageName -> ComponentName -> String +dispC' = componentStringName + +dispCN :: UnqualComponentName -> String +dispCN = display + +dispK :: ComponentKind -> String +dispK = showComponentKindShort + +dispCK :: ComponentName -> String +dispCK = dispK . componentKind + +dispF :: ComponentKind -> String +dispF = showComponentKindFilterShort + +dispM :: ModuleName -> String +dispM = display + + +------------------------------- +-- Package and component info +-- + +data KnownTargets = KnownTargets { + knownPackagesAll :: [KnownPackage], + knownPackagesPrimary :: [KnownPackage], + knownPackagesOther :: [KnownPackage], + knownComponentsAll :: [KnownComponent], + knownComponentsPrimary :: [KnownComponent], + knownComponentsOther :: [KnownComponent] + } + deriving Show + +data KnownPackage = + KnownPackage { + pinfoId :: PackageId, + pinfoDirectory :: Maybe (FilePath, FilePath), + pinfoPackageFile :: Maybe (FilePath, FilePath), + pinfoComponents :: [KnownComponent] + } + | KnownPackageName { + pinfoName :: PackageName + } + deriving Show + +data KnownComponent = KnownComponent { + cinfoName :: ComponentName, + cinfoStrName :: ComponentStringName, + cinfoPackageId :: PackageId, + cinfoSrcDirs :: [FilePath], + cinfoModules :: [ModuleName], + cinfoHsFiles :: [FilePath], -- other hs files (like main.hs) + cinfoCFiles :: [FilePath], + cinfoJsFiles :: [FilePath] + } + deriving Show + +type ComponentStringName = String + +knownPackageName :: KnownPackage -> PackageName +knownPackageName KnownPackage{pinfoId} = packageName pinfoId +knownPackageName KnownPackageName{pinfoName} = pinfoName + +emptyKnownTargets :: KnownTargets +emptyKnownTargets = KnownTargets [] [] [] [] [] [] + +getKnownTargets :: (Applicative m, Monad m) + => DirActions m + -> [PackageSpecifier (SourcePackage (PackageLocation a))] + -> m KnownTargets +getKnownTargets dirActions@DirActions{..} pkgs = do + pinfo <- mapM (collectKnownPackageInfo dirActions) pkgs + cwd <- getCurrentDirectory + let (ppinfo, opinfo) = selectPrimaryPackage cwd pinfo + return KnownTargets { + knownPackagesAll = pinfo, + knownPackagesPrimary = ppinfo, + knownPackagesOther = opinfo, + knownComponentsAll = allComponentsIn pinfo, + knownComponentsPrimary = allComponentsIn ppinfo, + knownComponentsOther = allComponentsIn opinfo + } + where + selectPrimaryPackage :: FilePath + -> [KnownPackage] + -> ([KnownPackage], [KnownPackage]) + selectPrimaryPackage cwd = partition isPkgDirCwd + where + isPkgDirCwd KnownPackage { pinfoDirectory = Just (dir,_) } + | dir == cwd = True + isPkgDirCwd _ = False + allComponentsIn ps = + [ c | KnownPackage{pinfoComponents} <- ps, c <- pinfoComponents ] + + +collectKnownPackageInfo :: (Applicative m, Monad m) => DirActions m + -> PackageSpecifier (SourcePackage (PackageLocation a)) + -> m KnownPackage +collectKnownPackageInfo _ (NamedPackage pkgname _props) = + return (KnownPackageName pkgname) +collectKnownPackageInfo dirActions@DirActions{..} + (SpecificSourcePackage SourcePackage { + packageDescription = pkg, + packageSource = loc + }) = do + (pkgdir, pkgfile) <- + case loc of + --TODO: local tarballs, remote tarballs etc + LocalUnpackedPackage dir -> do + dirabs <- canonicalizePath dir + dirrel <- makeRelativeToCwd dirActions dirabs + --TODO: ought to get this earlier in project reading + let fileabs = dirabs display (packageName pkg) <.> "cabal" + filerel = dirrel display (packageName pkg) <.> "cabal" + exists <- doesFileExist fileabs + return ( Just (dirabs, dirrel) + , if exists then Just (fileabs, filerel) else Nothing + ) + _ -> return (Nothing, Nothing) + let pinfo = + KnownPackage { + pinfoId = packageId pkg, + pinfoDirectory = pkgdir, + pinfoPackageFile = pkgfile, + pinfoComponents = collectKnownComponentInfo + (flattenPackageDescription pkg) + } + return pinfo + + +collectKnownComponentInfo :: PackageDescription -> [KnownComponent] +collectKnownComponentInfo pkg = + [ KnownComponent { + cinfoName = componentName c, + cinfoStrName = componentStringName (packageName pkg) (componentName c), + cinfoPackageId = packageId pkg, + cinfoSrcDirs = ordNub (hsSourceDirs bi), + cinfoModules = ordNub (componentModules c), + cinfoHsFiles = ordNub (componentHsFiles c), + cinfoCFiles = ordNub (cSources bi), + cinfoJsFiles = ordNub (jsSources bi) + } + | c <- pkgComponents pkg + , let bi = componentBuildInfo c ] + + +componentStringName :: PackageName -> ComponentName -> ComponentStringName +componentStringName pkgname CLibName = display pkgname +componentStringName _ (CSubLibName name) = unUnqualComponentName name +componentStringName _ (CFLibName name) = unUnqualComponentName name +componentStringName _ (CExeName name) = unUnqualComponentName name +componentStringName _ (CTestName name) = unUnqualComponentName name +componentStringName _ (CBenchName name) = unUnqualComponentName name + +componentModules :: Component -> [ModuleName] +-- I think it's unlikely users will ask to build a requirement +-- which is not mentioned locally. +componentModules (CLib lib) = explicitLibModules lib +componentModules (CFLib flib) = foreignLibModules flib +componentModules (CExe exe) = exeModules exe +componentModules (CTest test) = testModules test +componentModules (CBench bench) = benchmarkModules bench + +componentHsFiles :: Component -> [FilePath] +componentHsFiles (CExe exe) = [modulePath exe] +componentHsFiles (CTest TestSuite { + testInterface = TestSuiteExeV10 _ mainfile + }) = [mainfile] +componentHsFiles (CBench Benchmark { + benchmarkInterface = BenchmarkExeV10 _ mainfile + }) = [mainfile] +componentHsFiles _ = [] + + +------------------------------ +-- Matching meta targets +-- + +guardNamespaceMeta :: String -> Match () +guardNamespaceMeta = guardToken [""] "meta namespace" + +guardMetaAll :: String -> Match () +guardMetaAll = guardToken ["all"] "meta-target 'all'" + +guardNamespacePackage :: String -> Match () +guardNamespacePackage = guardToken ["pkg", "package"] "'pkg' namespace" + +guardNamespaceCwd :: String -> Match () +guardNamespaceCwd = guardToken ["cwd"] "'cwd' namespace" + +guardNamespaceModule :: String -> Match () +guardNamespaceModule = guardToken ["mod", "module"] "'module' namespace" + +guardNamespaceFile :: String -> Match () +guardNamespaceFile = guardToken ["file"] "'file' namespace" + +guardToken :: [String] -> String -> String -> Match () +guardToken tokens msg s + | caseFold s `elem` tokens = increaseConfidence + | otherwise = matchErrorExpected msg s + + +------------------------------ +-- Matching component kinds +-- + +componentKind :: ComponentName -> ComponentKind +componentKind CLibName = LibKind +componentKind (CSubLibName _) = LibKind +componentKind (CFLibName _) = FLibKind +componentKind (CExeName _) = ExeKind +componentKind (CTestName _) = TestKind +componentKind (CBenchName _) = BenchKind + +cinfoKind :: KnownComponent -> ComponentKind +cinfoKind = componentKind . cinfoName + +matchComponentKind :: String -> Match ComponentKind +matchComponentKind s + | s' `elem` liblabels = increaseConfidence >> return LibKind + | s' `elem` fliblabels = increaseConfidence >> return FLibKind + | s' `elem` exelabels = increaseConfidence >> return ExeKind + | s' `elem` testlabels = increaseConfidence >> return TestKind + | s' `elem` benchlabels = increaseConfidence >> return BenchKind + | otherwise = matchErrorExpected "component kind" s + where + s' = caseFold s + liblabels = ["lib", "library"] + fliblabels = ["flib", "foreign-library"] + exelabels = ["exe", "executable"] + testlabels = ["tst", "test", "test-suite"] + benchlabels = ["bench", "benchmark"] + +matchComponentKindFilter :: String -> Match ComponentKind +matchComponentKindFilter s + | s' `elem` liblabels = increaseConfidence >> return LibKind + | s' `elem` fliblabels = increaseConfidence >> return FLibKind + | s' `elem` exelabels = increaseConfidence >> return ExeKind + | s' `elem` testlabels = increaseConfidence >> return TestKind + | s' `elem` benchlabels = increaseConfidence >> return BenchKind + | otherwise = matchErrorExpected "component kind filter" s + where + s' = caseFold s + liblabels = ["libs", "libraries"] + fliblabels = ["flibs", "foreign-libraries"] + exelabels = ["exes", "executables"] + testlabels = ["tests", "test-suites"] + benchlabels = ["benches", "benchmarks"] + +showComponentKind :: ComponentKind -> String +showComponentKind LibKind = "library" +showComponentKind FLibKind = "foreign library" +showComponentKind ExeKind = "executable" +showComponentKind TestKind = "test-suite" +showComponentKind BenchKind = "benchmark" + +showComponentKindShort :: ComponentKind -> String +showComponentKindShort LibKind = "lib" +showComponentKindShort FLibKind = "flib" +showComponentKindShort ExeKind = "exe" +showComponentKindShort TestKind = "test" +showComponentKindShort BenchKind = "bench" + +showComponentKindFilterShort :: ComponentKind -> String +showComponentKindFilterShort LibKind = "libs" +showComponentKindFilterShort FLibKind = "flibs" +showComponentKindFilterShort ExeKind = "exes" +showComponentKindFilterShort TestKind = "tests" +showComponentKindFilterShort BenchKind = "benchmarks" + + +------------------------------ +-- Matching package targets +-- + +guardPackage :: String -> FileStatus -> Match () +guardPackage str fstatus = + guardPackageName str + <|> guardPackageDir str fstatus + <|> guardPackageFile str fstatus + + +guardPackageName :: String -> Match () +guardPackageName s + | validPackageName s = increaseConfidence + | otherwise = matchErrorExpected "package name" s + +validPackageName :: String -> Bool +validPackageName s = + all validPackageNameChar s + && not (null s) + where + validPackageNameChar c = isAlphaNum c || c == '-' + + +guardPackageDir :: String -> FileStatus -> Match () +guardPackageDir _ (FileStatusExistsDir _) = increaseConfidence +guardPackageDir str _ = matchErrorExpected "package directory" str + + +guardPackageFile :: String -> FileStatus -> Match () +guardPackageFile _ (FileStatusExistsFile file) + | takeExtension file == ".cabal" + = increaseConfidence +guardPackageFile str _ = matchErrorExpected "package .cabal file" str + + +matchPackage :: [KnownPackage] -> String -> FileStatus -> Match KnownPackage +matchPackage pinfo = \str fstatus -> + orNoThingIn "project" "" $ + matchPackageName pinfo str + (matchPackageNameUnknown str + <|> matchPackageDir pinfo str fstatus + <|> matchPackageFile pinfo str fstatus) + + +matchPackageName :: [KnownPackage] -> String -> Match KnownPackage +matchPackageName ps = \str -> do + guard (validPackageName str) + orNoSuchThing "package" str + (map (display . knownPackageName) ps) $ + increaseConfidenceFor $ + matchInexactly caseFold (display . knownPackageName) ps str + + +matchPackageNameUnknown :: String -> Match KnownPackage +matchPackageNameUnknown str = do + pn <- matchParse str + unknownMatch (KnownPackageName pn) + + +matchPackageDir :: [KnownPackage] + -> String -> FileStatus -> Match KnownPackage +matchPackageDir ps = \str fstatus -> + case fstatus of + FileStatusExistsDir canondir -> + orNoSuchThing "package directory" str (map (snd . fst) dirs) $ + increaseConfidenceFor $ + fmap snd $ matchExactly (fst . fst) dirs canondir + _ -> mzero + where + dirs = [ ((dabs,drel),p) + | p@KnownPackage{ pinfoDirectory = Just (dabs,drel) } <- ps ] + + +matchPackageFile :: [KnownPackage] -> String -> FileStatus -> Match KnownPackage +matchPackageFile ps = \str fstatus -> do + case fstatus of + FileStatusExistsFile canonfile -> + orNoSuchThing "package .cabal file" str (map (snd . fst) files) $ + increaseConfidenceFor $ + fmap snd $ matchExactly (fst . fst) files canonfile + _ -> mzero + where + files = [ ((fabs,frel),p) + | p@KnownPackage{ pinfoPackageFile = Just (fabs,frel) } <- ps ] + +--TODO: test outcome when dir exists but doesn't match any known one + +--TODO: perhaps need another distinction, vs no such thing, point is the +-- thing is not known, within the project, but could be outside project + + +------------------------------ +-- Matching component targets +-- + + +guardComponentName :: String -> Match () +guardComponentName s + | all validComponentChar s + && not (null s) = increaseConfidence + | otherwise = matchErrorExpected "component name" s + where + validComponentChar c = isAlphaNum c || c == '.' + || c == '_' || c == '-' || c == '\'' + + +matchComponentName :: [KnownComponent] -> String -> Match KnownComponent +matchComponentName cs str = + orNoSuchThing "component" str (map cinfoStrName cs) + $ increaseConfidenceFor + $ matchInexactly caseFold cinfoStrName cs str + + +matchComponentKindAndName :: [KnownComponent] -> ComponentKind -> String + -> Match KnownComponent +matchComponentKindAndName cs ckind str = + orNoSuchThing (showComponentKind ckind ++ " component") str + (map render cs) + $ increaseConfidenceFor + $ matchInexactly (\(ck, cn) -> (ck, caseFold cn)) + (\c -> (cinfoKind c, cinfoStrName c)) + cs + (ckind, str) + where + render c = showComponentKindShort (cinfoKind c) ++ ":" ++ cinfoStrName c + + +------------------------------ +-- Matching module targets +-- + +guardModuleName :: String -> Match () +guardModuleName s = + case simpleParse s :: Maybe ModuleName of + Just _ -> increaseConfidence + _ | all validModuleChar s + && not (null s) -> return () + | otherwise -> matchErrorExpected "module name" s + where + validModuleChar c = isAlphaNum c || c == '.' || c == '_' || c == '\'' + + +matchModuleName :: [ModuleName] -> String -> Match ModuleName +matchModuleName ms str = + orNoSuchThing "module" str (map display ms) + $ increaseConfidenceFor + $ matchInexactly caseFold display ms str + + +matchModuleNameAnd :: [(ModuleName, a)] -> String -> Match (ModuleName, a) +matchModuleNameAnd ms str = + orNoSuchThing "module" str (map (display . fst) ms) + $ increaseConfidenceFor + $ matchInexactly caseFold (display . fst) ms str + + +matchModuleNameUnknown :: String -> Match ModuleName +matchModuleNameUnknown str = + expecting "module" str + $ increaseConfidenceFor + $ matchParse str + + +------------------------------ +-- Matching file targets +-- + +matchPackageDirectoryPrefix :: [KnownPackage] -> FileStatus + -> Match (FilePath, KnownPackage) +matchPackageDirectoryPrefix ps (FileStatusExistsFile filepath) = + increaseConfidenceFor $ + matchDirectoryPrefix pkgdirs filepath + where + pkgdirs = [ (dir, p) + | p@KnownPackage { pinfoDirectory = Just (dir,_) } <- ps ] +matchPackageDirectoryPrefix _ _ = mzero + + +matchComponentFile :: [KnownComponent] -> String + -> Match (FilePath, KnownComponent) +matchComponentFile cs str = + orNoSuchThing "file" str [] $ + matchComponentModuleFile cs str + <|> matchComponentOtherFile cs str + + +matchComponentOtherFile :: [KnownComponent] -> String + -> Match (FilePath, KnownComponent) +matchComponentOtherFile cs = + matchFile + [ (file, c) + | c <- cs + , file <- cinfoHsFiles c + ++ cinfoCFiles c + ++ cinfoJsFiles c + ] + + +matchComponentModuleFile :: [KnownComponent] -> String + -> Match (FilePath, KnownComponent) +matchComponentModuleFile cs str = do + matchFile + [ (normalise (d toFilePath m), c) + | c <- cs + , d <- cinfoSrcDirs c + , m <- cinfoModules c + ] + (dropExtension (normalise str)) + +-- utils + +matchFile :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) +matchFile fs = + increaseConfidenceFor + . matchInexactly caseFold fst fs + +matchDirectoryPrefix :: [(FilePath, a)] -> FilePath -> Match (FilePath, a) +matchDirectoryPrefix dirs filepath = + tryEach $ + [ (file, x) + | (dir,x) <- dirs + , file <- maybeToList (stripDirectory dir) ] + where + stripDirectory :: FilePath -> Maybe FilePath + stripDirectory dir = + joinPath `fmap` stripPrefix (splitDirectories dir) filepathsplit + + filepathsplit = splitDirectories filepath + + +------------------------------ +-- Matching monad +-- + +-- | A matcher embodies a way to match some input as being some recognised +-- value. In particular it deals with multiple and ambiguous matches. +-- +-- There are various matcher primitives ('matchExactly', 'matchInexactly'), +-- ways to combine matchers ('matchPlus', 'matchPlusShadowing') and finally we +-- can run a matcher against an input using 'findMatch'. +-- +data Match a = NoMatch !Confidence [MatchError] + | Match !MatchClass !Confidence [a] + deriving Show + +-- | The kind of match, inexact or exact. We keep track of this so we can +-- prefer exact over inexact matches. The 'Ord' here is important: we try +-- to maximise this, so 'Exact' is the top value and 'Inexact' the bottom. +-- +data MatchClass = Unknown -- ^ Matches an unknown thing e.g. parses as a package + -- name without it being a specific known package + | Inexact -- ^ Matches a known thing inexactly + -- e.g. matches a known package case insensitively + | Exact -- ^ Exactly matches a known thing, + -- e.g. matches a known package case sensitively + deriving (Show, Eq, Ord) + +type Confidence = Int + +data MatchError = MatchErrorExpected String String -- thing got + | MatchErrorNoSuch String String [String] -- thing got alts + | MatchErrorIn String String MatchError -- kind thing + deriving (Show, Eq) + + +instance Functor Match where + fmap _ (NoMatch d ms) = NoMatch d ms + fmap f (Match m d xs) = Match m d (fmap f xs) + +instance Applicative Match where + pure a = Match Exact 0 [a] + (<*>) = ap + +instance Alternative Match where + empty = NoMatch 0 [] + (<|>) = matchPlus + +instance Monad Match where + return = pure + NoMatch d ms >>= _ = NoMatch d ms + Match m d xs >>= f = + -- To understand this, it needs to be read in context with the + -- implementation of 'matchPlus' below + case msum (map f xs) of + Match m' d' xs' -> Match (min m m') (d + d') xs' + -- The minimum match class is the one we keep. The match depth is + -- tracked but not used in the Match case. + + NoMatch d' ms -> NoMatch (d + d') ms + -- Here is where we transfer the depth we were keeping track of in + -- the Match case over to the NoMatch case where it finally gets used. + +instance MonadPlus Match where + mzero = empty + mplus = matchPlus + +() :: Match a -> Match a -> Match a +() = matchPlusShadowing + +infixl 3 + +-- | Combine two matchers. Exact matches are used over inexact matches +-- but if we have multiple exact, or inexact then the we collect all the +-- ambiguous matches. +-- +-- This operator is associative, has unit 'mzero' and is also commutative. +-- +matchPlus :: Match a -> Match a -> Match a +matchPlus a@(Match _ _ _ ) (NoMatch _ _) = a +matchPlus (NoMatch _ _ ) b@(Match _ _ _) = b +matchPlus a@(NoMatch d_a ms_a) b@(NoMatch d_b ms_b) + | d_a > d_b = a -- We only really make use of the depth in the NoMatch case. + | d_a < d_b = b + | otherwise = NoMatch d_a (ms_a ++ ms_b) +matchPlus a@(Match m_a d_a xs_a) b@(Match m_b d_b xs_b) + | m_a > m_b = a -- exact over inexact + | m_a < m_b = b -- exact over inexact + | otherwise = Match m_a (max d_a d_b) (xs_a ++ xs_b) + +-- | Combine two matchers. This is similar to 'matchPlus' with the +-- difference that an exact match from the left matcher shadows any exact +-- match on the right. Inexact matches are still collected however. +-- +-- This operator is associative, has unit 'mzero' and is not commutative. +-- +matchPlusShadowing :: Match a -> Match a -> Match a +matchPlusShadowing a@(Match Exact _ _) _ = a +matchPlusShadowing a b = matchPlus a b + + +------------------------------ +-- Various match primitives +-- + +matchErrorExpected :: String -> String -> Match a +matchErrorExpected thing got = NoMatch 0 [MatchErrorExpected thing got] + +matchErrorNoSuch :: String -> String -> [String] -> Match a +matchErrorNoSuch thing got alts = NoMatch 0 [MatchErrorNoSuch thing got alts] + +expecting :: String -> String -> Match a -> Match a +expecting thing got (NoMatch 0 _) = matchErrorExpected thing got +expecting _ _ m = m + +orNoSuchThing :: String -> String -> [String] -> Match a -> Match a +orNoSuchThing thing got alts (NoMatch 0 _) = matchErrorNoSuch thing got alts +orNoSuchThing _ _ _ m = m + +orNoThingIn :: String -> String -> Match a -> Match a +orNoThingIn kind name (NoMatch n ms) = + NoMatch n [ MatchErrorIn kind name m | m <- ms ] +orNoThingIn _ _ m = m + +increaseConfidence :: Match () +increaseConfidence = Match Exact 1 [()] + +increaseConfidenceFor :: Match a -> Match a +increaseConfidenceFor m = m >>= \r -> increaseConfidence >> return r + +nubMatchesBy :: (a -> a -> Bool) -> Match a -> Match a +nubMatchesBy _ (NoMatch d msgs) = NoMatch d msgs +nubMatchesBy eq (Match m d xs) = Match m d (nubBy eq xs) + +-- | Lift a list of matches to an exact match. +-- +exactMatches, inexactMatches :: [a] -> Match a + +exactMatches [] = mzero +exactMatches xs = Match Exact 0 xs + +inexactMatches [] = mzero +inexactMatches xs = Match Inexact 0 xs + +unknownMatch :: a -> Match a +unknownMatch x = Match Unknown 0 [x] + +tryEach :: [a] -> Match a +tryEach = exactMatches + + +------------------------------ +-- Top level match runner +-- + +-- | Given a matcher and a key to look up, use the matcher to find all the +-- possible matches. There may be 'None', a single 'Unambiguous' match or +-- you may have an 'Ambiguous' match with several possibilities. +-- +findMatch :: Match a -> MaybeAmbiguous a +findMatch match = case match of + NoMatch _ msgs -> None msgs + Match _ _ [x] -> Unambiguous x + Match m d [] -> error $ "findMatch: impossible: " ++ show match' + where match' = Match m d [] :: Match () + -- TODO: Maybe use Data.List.NonEmpty inside + -- Match so that this case would be correct + -- by construction? + Match m _ xs -> Ambiguous m xs + +data MaybeAmbiguous a = None [MatchError] + | Unambiguous a + | Ambiguous MatchClass [a] + deriving Show + + +------------------------------ +-- Basic matchers +-- + +-- | A primitive matcher that looks up a value in a finite 'Map'. The +-- value must match exactly. +-- +matchExactly :: Ord k => (a -> k) -> [a] -> (k -> Match a) +matchExactly key xs = + \k -> case Map.lookup k m of + Nothing -> mzero + Just ys -> exactMatches ys + where + m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] + +-- | A primitive matcher that looks up a value in a finite 'Map'. It checks +-- for an exact or inexact match. We get an inexact match if the match +-- is not exact, but the canonical forms match. It takes a canonicalisation +-- function for this purpose. +-- +-- So for example if we used string case fold as the canonicalisation +-- function, then we would get case insensitive matching (but it will still +-- report an exact match when the case matches too). +-- +matchInexactly :: (Ord k, Ord k') => (k -> k') -> (a -> k) + -> [a] -> (k -> Match a) +matchInexactly cannonicalise key xs = + \k -> case Map.lookup k m of + Just ys -> exactMatches ys + Nothing -> case Map.lookup (cannonicalise k) m' of + Just ys -> inexactMatches ys + Nothing -> mzero + where + m = Map.fromListWith (++) [ (key x, [x]) | x <- xs ] + + -- the map of canonicalised keys to groups of inexact matches + m' = Map.mapKeysWith (++) cannonicalise m + +matchParse :: Text a => String -> Match a +matchParse = maybe mzero return . simpleParse + + +------------------------------ +-- Utils +-- + +caseFold :: String -> String +caseFold = lowercase + +-- | Make a 'ComponentName' given an 'UnqualComponentName' and knowing the +-- 'ComponentKind'. We also need the 'PackageName' to distinguish the package's +-- primary library from named private libraries. +-- +mkComponentName :: PackageName + -> ComponentKind + -> UnqualComponentName + -> ComponentName +mkComponentName pkgname ckind ucname = + case ckind of + LibKind + | packageNameToUnqualComponentName pkgname == ucname + -> CLibName + | otherwise -> CSubLibName ucname + FLibKind -> CFLibName ucname + ExeKind -> CExeName ucname + TestKind -> CTestName ucname + BenchKind -> CBenchName ucname + + +------------------------------ +-- Example inputs +-- + +{- +ex1pinfo :: [KnownPackage] +ex1pinfo = + [ addComponent (CExeName (mkUnqualComponentName "foo-exe")) [] ["Data.Foo"] $ + KnownPackage { + pinfoId = PackageIdentifier (mkPackageName "foo") (mkVersion [1]), + pinfoDirectory = Just ("/the/foo", "foo"), + pinfoPackageFile = Just ("/the/foo/foo.cabal", "foo/foo.cabal"), + pinfoComponents = [] + } + , KnownPackage { + pinfoId = PackageIdentifier (mkPackageName "bar") (mkVersion [1]), + pinfoDirectory = Just ("/the/bar", "bar"), + pinfoPackageFile = Just ("/the/bar/bar.cabal", "bar/bar.cabal"), + pinfoComponents = [] + } + ] + where + addComponent n ds ms p = + p { + pinfoComponents = + KnownComponent n (componentStringName (pinfoId p) n) + p ds (map mkMn ms) + [] [] [] + : pinfoComponents p + } + + mkMn :: String -> ModuleName + mkMn = ModuleName.fromString +-} +{- +stargets = + [ TargetComponent (CExeName "foo") WholeComponent + , TargetComponent (CExeName "foo") (ModuleTarget (mkMn "Foo")) + , TargetComponent (CExeName "tst") (ModuleTarget (mkMn "Foo")) + ] + where + mkMn :: String -> ModuleName + mkMn = fromJust . simpleParse + +ex_pkgid :: PackageIdentifier +Just ex_pkgid = simpleParse "thelib" +-} + +{- +ex_cs :: [KnownComponent] +ex_cs = + [ (mkC (CExeName "foo") ["src1", "src1/src2"] ["Foo", "Src2.Bar", "Bar"]) + , (mkC (CExeName "tst") ["src1", "test"] ["Foo"]) + ] + where + mkC n ds ms = KnownComponent n (componentStringName n) ds (map mkMn ms) + mkMn :: String -> ModuleName + mkMn = fromJust . simpleParse + pkgid :: PackageIdentifier + Just pkgid = simpleParse "thelib" +-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Targets.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Targets.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Targets.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Targets.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,781 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Targets +-- Copyright : (c) Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : duncan@community.haskell.org +-- +-- Handling for user-specified targets +----------------------------------------------------------------------------- +module Distribution.Client.Targets ( + -- * User targets + UserTarget(..), + readUserTargets, + + -- * Resolving user targets to package specifiers + resolveUserTargets, + + -- ** Detailed interface + UserTargetProblem(..), + readUserTarget, + reportUserTargetProblems, + expandUserTarget, + + PackageTarget(..), + fetchPackageTarget, + readPackageTarget, + + PackageTargetProblem(..), + reportPackageTargetProblems, + + disambiguatePackageTargets, + disambiguatePackageName, + + -- * User constraints + UserQualifier(..), + UserConstraintScope(..), + UserConstraint(..), + userConstraintPackageName, + readUserConstraint, + userToPackageConstraint, + + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Package + ( Package(..), PackageName, unPackageName, mkPackageName + , PackageIdentifier(..), packageName, packageVersion ) +import Distribution.Types.Dependency +import Distribution.Client.Types + ( PackageLocation(..), ResolvedPkgLoc, UnresolvedSourcePackage + , PackageSpecifier(..) ) + +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackageConstraint +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.PackageIndex (PackageIndex) +import qualified Distribution.Solver.Types.PackageIndex as PackageIndex +import Distribution.Solver.Types.SourcePackage + +import qualified Distribution.Client.World as World +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Distribution.Client.Tar as Tar +import Distribution.Client.FetchUtils +import Distribution.Client.Utils ( tryFindPackageDesc ) +import Distribution.Client.GlobalFlags + ( RepoContext(..) ) + +import Distribution.PackageDescription + ( GenericPackageDescription, parseFlagAssignment, nullFlagAssignment ) +import Distribution.Version + ( nullVersion, thisVersion, anyVersion, isAnyVersion ) +import Distribution.Text + ( Text(..), display ) +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.Utils + ( die', warn, lowercase ) + +import Distribution.PackageDescription.Parsec + ( readGenericPackageDescription, parseGenericPackageDescriptionMaybe ) + +-- import Data.List ( find, nub ) +import Data.Either + ( partitionEithers ) +import qualified Data.Map as Map +import qualified Data.ByteString.Lazy as BS +import qualified Distribution.Client.GZipUtils as GZipUtils +import Control.Monad (mapM) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.ReadP + ( (+++), (<++) ) +import Distribution.ParseUtils + ( readPToMaybe ) +import System.FilePath + ( takeExtension, dropExtension, takeDirectory, splitPath ) +import System.Directory + ( doesFileExist, doesDirectoryExist ) +import Network.URI + ( URI(..), URIAuth(..), parseAbsoluteURI ) + +-- ------------------------------------------------------------ +-- * User targets +-- ------------------------------------------------------------ + +-- | Various ways that a user may specify a package or package collection. +-- +data UserTarget = + + -- | A partially specified package, identified by name and possibly with + -- an exact version or a version constraint. + -- + -- > cabal install foo + -- > cabal install foo-1.0 + -- > cabal install 'foo < 2' + -- + UserTargetNamed Dependency + + -- | A special virtual package that refers to the collection of packages + -- recorded in the world file that the user specifically installed. + -- + -- > cabal install world + -- + | UserTargetWorld + + -- | A specific package that is unpacked in a local directory, often the + -- current directory. + -- + -- > cabal install . + -- > cabal install ../lib/other + -- + -- * Note: in future, if multiple @.cabal@ files are allowed in a single + -- directory then this will refer to the collection of packages. + -- + | UserTargetLocalDir FilePath + + -- | A specific local unpacked package, identified by its @.cabal@ file. + -- + -- > cabal install foo.cabal + -- > cabal install ../lib/other/bar.cabal + -- + | UserTargetLocalCabalFile FilePath + + -- | A specific package that is available as a local tarball file + -- + -- > cabal install dist/foo-1.0.tar.gz + -- > cabal install ../build/baz-1.0.tar.gz + -- + | UserTargetLocalTarball FilePath + + -- | A specific package that is available as a remote tarball file + -- + -- > cabal install http://code.haskell.org/~user/foo/foo-0.9.tar.gz + -- + | UserTargetRemoteTarball URI + deriving (Show,Eq) + + +-- ------------------------------------------------------------ +-- * Parsing and checking user targets +-- ------------------------------------------------------------ + +readUserTargets :: Verbosity -> [String] -> IO [UserTarget] +readUserTargets verbosity targetStrs = do + (problems, targets) <- liftM partitionEithers + (mapM readUserTarget targetStrs) + reportUserTargetProblems verbosity problems + return targets + + +data UserTargetProblem + = UserTargetUnexpectedFile String + | UserTargetNonexistantFile String + | UserTargetUnexpectedUriScheme String + | UserTargetUnrecognisedUri String + | UserTargetUnrecognised String + | UserTargetBadWorldPkg + deriving Show + +readUserTarget :: String -> IO (Either UserTargetProblem UserTarget) +readUserTarget targetstr = + case testNamedTargets targetstr of + Just (Dependency pkgn verrange) + | pkgn == mkPackageName "world" + -> return $ if verrange == anyVersion + then Right UserTargetWorld + else Left UserTargetBadWorldPkg + Just dep -> return (Right (UserTargetNamed dep)) + Nothing -> do + fileTarget <- testFileTargets targetstr + case fileTarget of + Just target -> return target + Nothing -> + case testUriTargets targetstr of + Just target -> return target + Nothing -> return (Left (UserTargetUnrecognised targetstr)) + where + testNamedTargets = readPToMaybe parseDependencyOrPackageId + + testFileTargets filename = do + isDir <- doesDirectoryExist filename + isFile <- doesFileExist filename + parentDirExists <- case takeDirectory filename of + [] -> return False + dir -> doesDirectoryExist dir + let result + | isDir + = Just (Right (UserTargetLocalDir filename)) + + | isFile && extensionIsTarGz filename + = Just (Right (UserTargetLocalTarball filename)) + + | isFile && takeExtension filename == ".cabal" + = Just (Right (UserTargetLocalCabalFile filename)) + + | isFile + = Just (Left (UserTargetUnexpectedFile filename)) + + | parentDirExists + = Just (Left (UserTargetNonexistantFile filename)) + + | otherwise + = Nothing + return result + + testUriTargets str = + case parseAbsoluteURI str of + Just uri@URI { + uriScheme = scheme, + uriAuthority = Just URIAuth { uriRegName = host } + } + | scheme /= "http:" && scheme /= "https:" -> + Just (Left (UserTargetUnexpectedUriScheme targetstr)) + + | null host -> + Just (Left (UserTargetUnrecognisedUri targetstr)) + + | otherwise -> + Just (Right (UserTargetRemoteTarball uri)) + _ -> Nothing + + extensionIsTarGz f = takeExtension f == ".gz" + && takeExtension (dropExtension f) == ".tar" + + parseDependencyOrPackageId :: Parse.ReadP r Dependency + parseDependencyOrPackageId = parse + +++ liftM pkgidToDependency parse + where + pkgidToDependency :: PackageIdentifier -> Dependency + pkgidToDependency p = case packageVersion p of + v | v == nullVersion -> Dependency (packageName p) anyVersion + | otherwise -> Dependency (packageName p) (thisVersion v) + + +reportUserTargetProblems :: Verbosity -> [UserTargetProblem] -> IO () +reportUserTargetProblems verbosity problems = do + case [ target | UserTargetUnrecognised target <- problems ] of + [] -> return () + target -> die' verbosity + $ unlines + [ "Unrecognised target '" ++ name ++ "'." + | name <- target ] + ++ "Targets can be:\n" + ++ " - package names, e.g. 'pkgname', 'pkgname-1.0.1', 'pkgname < 2.0'\n" + ++ " - the special 'world' target\n" + ++ " - cabal files 'pkgname.cabal' or package directories 'pkgname/'\n" + ++ " - package tarballs 'pkgname.tar.gz' or 'http://example.com/pkgname.tar.gz'" + + case [ () | UserTargetBadWorldPkg <- problems ] of + [] -> return () + _ -> die' verbosity "The special 'world' target does not take any version." + + case [ target | UserTargetNonexistantFile target <- problems ] of + [] -> return () + target -> die' verbosity + $ unlines + [ "The file does not exist '" ++ name ++ "'." + | name <- target ] + + case [ target | UserTargetUnexpectedFile target <- problems ] of + [] -> return () + target -> die' verbosity + $ unlines + [ "Unrecognised file target '" ++ name ++ "'." + | name <- target ] + ++ "File targets can be either package tarballs 'pkgname.tar.gz' " + ++ "or cabal files 'pkgname.cabal'." + + case [ target | UserTargetUnexpectedUriScheme target <- problems ] of + [] -> return () + target -> die' verbosity + $ unlines + [ "URL target not supported '" ++ name ++ "'." + | name <- target ] + ++ "Only 'http://' and 'https://' URLs are supported." + + case [ target | UserTargetUnrecognisedUri target <- problems ] of + [] -> return () + target -> die' verbosity + $ unlines + [ "Unrecognise URL target '" ++ name ++ "'." + | name <- target ] + + +-- ------------------------------------------------------------ +-- * Resolving user targets to package specifiers +-- ------------------------------------------------------------ + +-- | Given a bunch of user-specified targets, try to resolve what it is they +-- refer to. They can either be specific packages (local dirs, tarballs etc) +-- or they can be named packages (with or without version info). +-- +resolveUserTargets :: Package pkg + => Verbosity + -> RepoContext + -> FilePath + -> PackageIndex pkg + -> [UserTarget] + -> IO [PackageSpecifier UnresolvedSourcePackage] +resolveUserTargets verbosity repoCtxt worldFile available userTargets = do + + -- given the user targets, get a list of fully or partially resolved + -- package references + packageTargets <- mapM (readPackageTarget verbosity) + =<< mapM (fetchPackageTarget verbosity repoCtxt) . concat + =<< mapM (expandUserTarget verbosity worldFile) userTargets + + -- users are allowed to give package names case-insensitively, so we must + -- disambiguate named package references + let (problems, packageSpecifiers) = + disambiguatePackageTargets available availableExtra packageTargets + + -- use any extra specific available packages to help us disambiguate + availableExtra = [ packageName pkg + | PackageTargetLocation pkg <- packageTargets ] + + reportPackageTargetProblems verbosity problems + + return packageSpecifiers + + +-- ------------------------------------------------------------ +-- * Package targets +-- ------------------------------------------------------------ + +-- | An intermediate between a 'UserTarget' and a resolved 'PackageSpecifier'. +-- Unlike a 'UserTarget', a 'PackageTarget' refers only to a single package. +-- +data PackageTarget pkg = + PackageTargetNamed PackageName [PackageProperty] UserTarget + + -- | A package identified by name, but case insensitively, so it needs + -- to be resolved to the right case-sensitive name. + | PackageTargetNamedFuzzy PackageName [PackageProperty] UserTarget + | PackageTargetLocation pkg + deriving (Show, Functor, Foldable, Traversable) + + +-- ------------------------------------------------------------ +-- * Converting user targets to package targets +-- ------------------------------------------------------------ + +-- | Given a user-specified target, expand it to a bunch of package targets +-- (each of which refers to only one package). +-- +expandUserTarget :: Verbosity + -> FilePath + -> UserTarget + -> IO [PackageTarget (PackageLocation ())] +expandUserTarget verbosity worldFile userTarget = case userTarget of + + UserTargetNamed (Dependency name vrange) -> + let props = [ PackagePropertyVersion vrange + | not (isAnyVersion vrange) ] + in return [PackageTargetNamedFuzzy name props userTarget] + + UserTargetWorld -> do + worldPkgs <- World.getContents verbosity worldFile + --TODO: should we warn if there are no world targets? + return [ PackageTargetNamed name props userTarget + | World.WorldPkgInfo (Dependency name vrange) flags <- worldPkgs + , let props = [ PackagePropertyVersion vrange + | not (isAnyVersion vrange) ] + ++ [ PackagePropertyFlags flags + | not (nullFlagAssignment flags) ] ] + + UserTargetLocalDir dir -> + return [ PackageTargetLocation (LocalUnpackedPackage dir) ] + + UserTargetLocalCabalFile file -> do + let dir = takeDirectory file + _ <- tryFindPackageDesc verbosity dir (localPackageError dir) -- just as a check + return [ PackageTargetLocation (LocalUnpackedPackage dir) ] + + UserTargetLocalTarball tarballFile -> + return [ PackageTargetLocation (LocalTarballPackage tarballFile) ] + + UserTargetRemoteTarball tarballURL -> + return [ PackageTargetLocation (RemoteTarballPackage tarballURL ()) ] + +localPackageError :: FilePath -> String +localPackageError dir = + "Error reading local package.\nCouldn't find .cabal file in: " ++ dir + +-- ------------------------------------------------------------ +-- * Fetching and reading package targets +-- ------------------------------------------------------------ + + +-- | Fetch any remote targets so that they can be read. +-- +fetchPackageTarget :: Verbosity + -> RepoContext + -> PackageTarget (PackageLocation ()) + -> IO (PackageTarget ResolvedPkgLoc) +fetchPackageTarget verbosity repoCtxt = traverse $ + fetchPackage verbosity repoCtxt . fmap (const Nothing) + + +-- | Given a package target that has been fetched, read the .cabal file. +-- +-- This only affects targets given by location, named targets are unaffected. +-- +readPackageTarget :: Verbosity + -> PackageTarget ResolvedPkgLoc + -> IO (PackageTarget UnresolvedSourcePackage) +readPackageTarget verbosity = traverse modifyLocation + where + modifyLocation location = case location of + + LocalUnpackedPackage dir -> do + pkg <- tryFindPackageDesc verbosity dir (localPackageError dir) >>= + readGenericPackageDescription verbosity + return $ SourcePackage { + packageInfoId = packageId pkg, + packageDescription = pkg, + packageSource = fmap Just location, + packageDescrOverride = Nothing + } + + LocalTarballPackage tarballFile -> + readTarballPackageTarget location tarballFile tarballFile + + RemoteTarballPackage tarballURL tarballFile -> + readTarballPackageTarget location tarballFile (show tarballURL) + + RepoTarballPackage _repo _pkgid _ -> + error "TODO: readPackageTarget RepoTarballPackage" + -- For repo tarballs this info should be obtained from the index. + + RemoteSourceRepoPackage _srcRepo _ -> + error "TODO: readPackageTarget RemoteSourceRepoPackage" + -- This can't happen, because it would have errored out already + -- in fetchPackage, via fetchPackageTarget before it gets to this + -- function. + -- + -- When that is corrected, this will also need to be fixed. + + readTarballPackageTarget location tarballFile tarballOriginalLoc = do + (filename, content) <- extractTarballPackageCabalFile + tarballFile tarballOriginalLoc + case parsePackageDescription' content of + Nothing -> die' verbosity $ "Could not parse the cabal file " + ++ filename ++ " in " ++ tarballFile + Just pkg -> + return $ SourcePackage { + packageInfoId = packageId pkg, + packageDescription = pkg, + packageSource = fmap Just location, + packageDescrOverride = Nothing + } + + extractTarballPackageCabalFile :: FilePath -> String + -> IO (FilePath, BS.ByteString) + extractTarballPackageCabalFile tarballFile tarballOriginalLoc = + either (die' verbosity . formatErr) return + . check + . accumEntryMap + . Tar.filterEntries isCabalFile + . Tar.read + . GZipUtils.maybeDecompress + =<< BS.readFile tarballFile + where + formatErr msg = "Error reading " ++ tarballOriginalLoc ++ ": " ++ msg + + accumEntryMap = Tar.foldlEntries + (\m e -> Map.insert (Tar.entryTarPath e) e m) + Map.empty + + check (Left e) = Left (show e) + check (Right m) = case Map.elems m of + [] -> Left noCabalFile + [file] -> case Tar.entryContent file of + Tar.NormalFile content _ -> Right (Tar.entryPath file, content) + _ -> Left noCabalFile + _files -> Left multipleCabalFiles + where + noCabalFile = "No cabal file found" + multipleCabalFiles = "Multiple cabal files found" + + isCabalFile e = case splitPath (Tar.entryPath e) of + [ _dir, file] -> takeExtension file == ".cabal" + [".", _dir, file] -> takeExtension file == ".cabal" + _ -> False + + parsePackageDescription' :: BS.ByteString -> Maybe GenericPackageDescription + parsePackageDescription' bs = + parseGenericPackageDescriptionMaybe (BS.toStrict bs) + +-- ------------------------------------------------------------ +-- * Checking package targets +-- ------------------------------------------------------------ + +data PackageTargetProblem + = PackageNameUnknown PackageName UserTarget + | PackageNameAmbiguous PackageName [PackageName] UserTarget + deriving Show + + +-- | Users are allowed to give package names case-insensitively, so we must +-- disambiguate named package references. +-- +disambiguatePackageTargets :: Package pkg' + => PackageIndex pkg' + -> [PackageName] + -> [PackageTarget pkg] + -> ( [PackageTargetProblem] + , [PackageSpecifier pkg] ) +disambiguatePackageTargets availablePkgIndex availableExtra targets = + partitionEithers (map disambiguatePackageTarget targets) + where + disambiguatePackageTarget packageTarget = case packageTarget of + PackageTargetLocation pkg -> Right (SpecificSourcePackage pkg) + + PackageTargetNamed pkgname props userTarget + | null (PackageIndex.lookupPackageName availablePkgIndex pkgname) + -> Left (PackageNameUnknown pkgname userTarget) + | otherwise -> Right (NamedPackage pkgname props) + + PackageTargetNamedFuzzy pkgname props userTarget -> + case disambiguatePackageName packageNameEnv pkgname of + None -> Left (PackageNameUnknown + pkgname userTarget) + Ambiguous pkgnames -> Left (PackageNameAmbiguous + pkgname pkgnames userTarget) + Unambiguous pkgname' -> Right (NamedPackage pkgname' props) + + -- use any extra specific available packages to help us disambiguate + packageNameEnv :: PackageNameEnv + packageNameEnv = mappend (indexPackageNameEnv availablePkgIndex) + (extraPackageNameEnv availableExtra) + + +-- | Report problems to the user. That is, if there are any problems +-- then raise an exception. +reportPackageTargetProblems :: Verbosity + -> [PackageTargetProblem] -> IO () +reportPackageTargetProblems verbosity problems = do + case [ pkg | PackageNameUnknown pkg originalTarget <- problems + , not (isUserTagetWorld originalTarget) ] of + [] -> return () + pkgs -> die' verbosity $ unlines + [ "There is no package named '" ++ display name ++ "'. " + | name <- pkgs ] + ++ "You may need to run 'cabal update' to get the latest " + ++ "list of available packages." + + case [ (pkg, matches) | PackageNameAmbiguous pkg matches _ <- problems ] of + [] -> return () + ambiguities -> die' verbosity $ unlines + [ "There is no package named '" ++ display name ++ "'. " + ++ (if length matches > 1 + then "However, the following package names exist: " + else "However, the following package name exists: ") + ++ intercalate ", " [ "'" ++ display m ++ "'" | m <- matches] + ++ "." + | (name, matches) <- ambiguities ] + + case [ pkg | PackageNameUnknown pkg UserTargetWorld <- problems ] of + [] -> return () + pkgs -> warn verbosity $ + "The following 'world' packages will be ignored because " + ++ "they refer to packages that cannot be found: " + ++ intercalate ", " (map display pkgs) ++ "\n" + ++ "You can suppress this warning by correcting the world file." + where + isUserTagetWorld UserTargetWorld = True; isUserTagetWorld _ = False + + +-- ------------------------------------------------------------ +-- * Disambiguating package names +-- ------------------------------------------------------------ + +data MaybeAmbiguous a = None | Unambiguous a | Ambiguous [a] + +-- | Given a package name and a list of matching names, figure out +-- which one it might be referring to. If there is an exact +-- case-sensitive match then that's ok (i.e. returned via +-- 'Unambiguous'). If it matches just one package case-insensitively +-- or if it matches multiple packages case-insensitively, in that case +-- the result is 'Ambiguous'. +-- +-- Note: Before cabal 2.2, when only a single package matched +-- case-insensitively it would be considered 'Unambigious'. +-- +disambiguatePackageName :: PackageNameEnv + -> PackageName + -> MaybeAmbiguous PackageName +disambiguatePackageName (PackageNameEnv pkgNameLookup) name = + case nub (pkgNameLookup name) of + [] -> None + names -> case find (name==) names of + Just name' -> Unambiguous name' + Nothing -> Ambiguous names + + +newtype PackageNameEnv = PackageNameEnv (PackageName -> [PackageName]) + +instance Monoid PackageNameEnv where + mempty = PackageNameEnv (const []) + mappend = (<>) + +instance Semigroup PackageNameEnv where + PackageNameEnv lookupA <> PackageNameEnv lookupB = + PackageNameEnv (\name -> lookupA name ++ lookupB name) + +indexPackageNameEnv :: PackageIndex pkg -> PackageNameEnv +indexPackageNameEnv pkgIndex = PackageNameEnv pkgNameLookup + where + pkgNameLookup pname = + map fst (PackageIndex.searchByName pkgIndex $ unPackageName pname) + +extraPackageNameEnv :: [PackageName] -> PackageNameEnv +extraPackageNameEnv names = PackageNameEnv pkgNameLookup + where + pkgNameLookup pname = + [ pname' + | let lname = lowercase (unPackageName pname) + , pname' <- names + , lowercase (unPackageName pname') == lname ] + + +-- ------------------------------------------------------------ +-- * Package constraints +-- ------------------------------------------------------------ + +-- | Version of 'Qualifier' that a user may specify on the +-- command line. +data UserQualifier = + -- | Top-level dependency. + UserQualToplevel + + -- | Setup dependency. + | UserQualSetup PackageName + + -- | Executable dependency. + | UserQualExe PackageName PackageName + deriving (Eq, Show, Generic) + +instance Binary UserQualifier + +-- | Version of 'ConstraintScope' that a user may specify on the +-- command line. +data UserConstraintScope = + -- | Scope that applies to the package when it has the specified qualifier. + UserQualified UserQualifier PackageName + + -- | Scope that applies to the package when it has a setup qualifier. + | UserAnySetupQualifier PackageName + + -- | Scope that applies to the package when it has any qualifier. + | UserAnyQualifier PackageName + deriving (Eq, Show, Generic) + +instance Binary UserConstraintScope + +fromUserQualifier :: UserQualifier -> Qualifier +fromUserQualifier UserQualToplevel = QualToplevel +fromUserQualifier (UserQualSetup name) = QualSetup name +fromUserQualifier (UserQualExe name1 name2) = QualExe name1 name2 + +fromUserConstraintScope :: UserConstraintScope -> ConstraintScope +fromUserConstraintScope (UserQualified q pn) = + ScopeQualified (fromUserQualifier q) pn +fromUserConstraintScope (UserAnySetupQualifier pn) = ScopeAnySetupQualifier pn +fromUserConstraintScope (UserAnyQualifier pn) = ScopeAnyQualifier pn + +-- | Version of 'PackageConstraint' that the user can specify on +-- the command line. +data UserConstraint = + UserConstraint UserConstraintScope PackageProperty + deriving (Eq, Show, Generic) + +instance Binary UserConstraint + +userConstraintPackageName :: UserConstraint -> PackageName +userConstraintPackageName (UserConstraint scope _) = scopePN scope + where + scopePN (UserQualified _ pn) = pn + scopePN (UserAnyQualifier pn) = pn + scopePN (UserAnySetupQualifier pn) = pn + +userToPackageConstraint :: UserConstraint -> PackageConstraint +userToPackageConstraint (UserConstraint scope prop) = + PackageConstraint (fromUserConstraintScope scope) prop + +readUserConstraint :: String -> Either String UserConstraint +readUserConstraint str = + case readPToMaybe parse str of + Nothing -> Left msgCannotParse + Just c -> Right c + where + msgCannotParse = + "expected a (possibly qualified) package name followed by a " ++ + "constraint, which is either a version range, 'installed', " ++ + "'source', 'test', 'bench', or flags" + +instance Text UserConstraint where + disp (UserConstraint scope prop) = + dispPackageConstraint $ PackageConstraint (fromUserConstraintScope scope) prop + + parse = + let parseConstraintScope :: Parse.ReadP a UserConstraintScope + parseConstraintScope = + do + _ <- Parse.string "any." + pn <- parse + return (UserAnyQualifier pn) + +++ + do + _ <- Parse.string "setup." + pn <- parse + return (UserAnySetupQualifier pn) + +++ + do + -- Qualified name + pn <- parse + (return (UserQualified UserQualToplevel pn) + +++ + do _ <- Parse.string ":setup." + pn2 <- parse + return (UserQualified (UserQualSetup pn) pn2)) + + -- -- TODO: Re-enable parsing of UserQualExe once we decide on a syntax. + -- + -- +++ + -- do _ <- Parse.string ":" + -- pn2 <- parse + -- _ <- Parse.string ":exe." + -- pn3 <- parse + -- return (UserQualExe pn pn2, pn3) + in do + scope <- parseConstraintScope + + -- Package property + let keyword str x = Parse.skipSpaces1 >> Parse.string str >> return x + prop <- ((parse >>= return . PackagePropertyVersion) + +++ + keyword "installed" PackagePropertyInstalled + +++ + keyword "source" PackagePropertySource + +++ + keyword "test" (PackagePropertyStanzas [TestStanzas]) + +++ + keyword "bench" (PackagePropertyStanzas [BenchStanzas])) + -- Note: the parser is left-biased here so that we + -- don't get an ambiguous parse from 'installed', + -- 'source', etc. being regarded as flags. + <++ + (Parse.skipSpaces1 >> parseFlagAssignment + >>= return . PackagePropertyFlags) + + -- Result + return (UserConstraint scope prop) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Tar.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Tar.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Tar.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Tar.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,110 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Tar +-- Copyright : (c) 2007 Bjorn Bringert, +-- 2008 Andrea Vezzosi, +-- 2008-2009 Duncan Coutts +-- License : BSD3 +-- +-- Maintainer : duncan@community.haskell.org +-- Portability : portable +-- +-- Reading, writing and manipulating \"@.tar@\" archive files. +-- +----------------------------------------------------------------------------- +module Distribution.Client.Tar ( + -- * @tar.gz@ operations + createTarGzFile, + extractTarGzFile, + + -- * Other local utils + buildTreeRefTypeCode, + buildTreeSnapshotTypeCode, + isBuildTreeRefTypeCode, + filterEntries, + filterEntriesM, + entriesToList, + ) where + +import qualified Data.ByteString.Lazy as BS +import qualified Codec.Archive.Tar as Tar +import qualified Codec.Archive.Tar.Entry as Tar +import qualified Codec.Archive.Tar.Check as Tar +import qualified Codec.Compression.GZip as GZip +import qualified Distribution.Client.GZipUtils as GZipUtils + +import Control.Exception (Exception(..), throw) + +-- +-- * High level operations +-- + +createTarGzFile :: FilePath -- ^ Full Tarball path + -> FilePath -- ^ Base directory + -> FilePath -- ^ Directory to archive, relative to base dir + -> IO () +createTarGzFile tar base dir = + BS.writeFile tar . GZip.compress . Tar.write =<< Tar.pack base [dir] + +extractTarGzFile :: FilePath -- ^ Destination directory + -> FilePath -- ^ Expected subdir (to check for tarbombs) + -> FilePath -- ^ Tarball + -> IO () +extractTarGzFile dir expected tar = + Tar.unpack dir . Tar.checkTarbomb expected . Tar.read + . GZipUtils.maybeDecompress =<< BS.readFile tar + +instance (Exception a, Exception b) => Exception (Either a b) where + toException (Left e) = toException e + toException (Right e) = toException e + + fromException e = + case fromException e of + Just e' -> Just (Left e') + Nothing -> case fromException e of + Just e' -> Just (Right e') + Nothing -> Nothing + + +-- | Type code for the local build tree reference entry type. We don't use the +-- symbolic link entry type because it allows only 100 ASCII characters for the +-- path. +buildTreeRefTypeCode :: Tar.TypeCode +buildTreeRefTypeCode = 'C' + +-- | Type code for the local build tree snapshot entry type. +buildTreeSnapshotTypeCode :: Tar.TypeCode +buildTreeSnapshotTypeCode = 'S' + +-- | Is this a type code for a build tree reference? +isBuildTreeRefTypeCode :: Tar.TypeCode -> Bool +isBuildTreeRefTypeCode typeCode + | (typeCode == buildTreeRefTypeCode + || typeCode == buildTreeSnapshotTypeCode) = True + | otherwise = False + +filterEntries :: (Tar.Entry -> Bool) -> Tar.Entries e -> Tar.Entries e +filterEntries p = + Tar.foldEntries + (\e es -> if p e then Tar.Next e es else es) + Tar.Done + Tar.Fail + +filterEntriesM :: Monad m => (Tar.Entry -> m Bool) + -> Tar.Entries e -> m (Tar.Entries e) +filterEntriesM p = + Tar.foldEntries + (\entry rest -> do + keep <- p entry + xs <- rest + if keep + then return (Tar.Next entry xs) + else return xs) + (return Tar.Done) + (return . Tar.Fail) + +entriesToList :: Exception e => Tar.Entries e -> [Tar.Entry] +entriesToList = Tar.foldEntries (:) [] throw + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Types.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Types.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Types.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Types.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,607 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE TypeFamilies #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Types +-- Copyright : (c) David Himmelstrup 2005 +-- Duncan Coutts 2011 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Various common data types for the entire cabal-install system +----------------------------------------------------------------------------- +module Distribution.Client.Types where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Package + ( Package(..), HasMungedPackageId(..), HasUnitId(..) + , PackageIdentifier(..), packageVersion, packageName + , PackageInstalled(..), newSimpleUnitId ) +import Distribution.InstalledPackageInfo + ( InstalledPackageInfo, installedComponentId, sourceComponentName ) +import Distribution.PackageDescription + ( FlagAssignment ) +import Distribution.Version + ( VersionRange, nullVersion, thisVersion ) +import Distribution.Types.ComponentId + ( ComponentId ) +import Distribution.Types.MungedPackageId + ( computeCompatPackageId ) +import Distribution.Types.PackageId + ( PackageId ) +import Distribution.Types.AnnotatedId +import Distribution.Types.UnitId + ( UnitId ) +import Distribution.Types.PackageName + ( PackageName, mkPackageName ) +import Distribution.Types.ComponentName + ( ComponentName(..) ) +import Distribution.Types.SourceRepo + ( SourceRepo ) + +import Distribution.Solver.Types.PackageIndex + ( PackageIndex ) +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.ComponentDeps + ( ComponentDeps ) +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackageConstraint +import Distribution.Solver.Types.PackageFixedDeps +import Distribution.Solver.Types.SourcePackage +import Distribution.Compat.Graph (IsNode(..)) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.ParseUtils (parseOptCommaList) +import Distribution.Simple.Utils (ordNub) +import Distribution.Text (Text(..)) + +import Network.URI (URI(..), URIAuth(..), nullURI) +import Control.Exception + ( Exception, SomeException ) +import qualified Text.PrettyPrint as Disp + + +newtype Username = Username { unUsername :: String } +newtype Password = Password { unPassword :: String } + +-- | This is the information we get from a @00-index.tar.gz@ hackage index. +-- +data SourcePackageDb = SourcePackageDb { + packageIndex :: PackageIndex UnresolvedSourcePackage, + packagePreferences :: Map PackageName VersionRange +} + deriving (Eq, Generic) + +instance Binary SourcePackageDb + +-- ------------------------------------------------------------ +-- * Various kinds of information about packages +-- ------------------------------------------------------------ + +-- | Within Cabal the library we no longer have a @InstalledPackageId@ type. +-- That's because it deals with the compilers' notion of a registered library, +-- and those really are libraries not packages. Those are now named units. +-- +-- The package management layer does however deal with installed packages, as +-- whole packages not just as libraries. So we do still need a type for +-- installed package ids. At the moment however we track instaled packages via +-- their primary library, which is a unit id. In future this may change +-- slightly and we may distinguish these two types and have an explicit +-- conversion when we register units with the compiler. +-- +type InstalledPackageId = ComponentId + + +-- | A 'ConfiguredPackage' is a not-yet-installed package along with the +-- total configuration information. The configuration information is total in +-- the sense that it provides all the configuration information and so the +-- final configure process will be independent of the environment. +-- +-- 'ConfiguredPackage' is assumed to not support Backpack. Only the +-- @new-build@ codepath supports Backpack. +-- +data ConfiguredPackage loc = ConfiguredPackage { + confPkgId :: InstalledPackageId, + confPkgSource :: SourcePackage loc, -- package info, including repo + confPkgFlags :: FlagAssignment, -- complete flag assignment for the package + confPkgStanzas :: [OptionalStanza], -- list of enabled optional stanzas for the package + confPkgDeps :: ComponentDeps [ConfiguredId] + -- set of exact dependencies (installed or source). + -- These must be consistent with the 'buildDepends' + -- in the 'PackageDescription' that you'd get by + -- applying the flag assignment and optional stanzas. + } + deriving (Eq, Show, Generic) + +-- | 'HasConfiguredId' indicates data types which have a 'ConfiguredId'. +-- This type class is mostly used to conveniently finesse between +-- 'ElaboratedPackage' and 'ElaboratedComponent'. +-- +instance HasConfiguredId (ConfiguredPackage loc) where + configuredId pkg = ConfiguredId (packageId pkg) (Just CLibName) (confPkgId pkg) + +-- 'ConfiguredPackage' is the legacy codepath, we are guaranteed +-- to never have a nontrivial 'UnitId' +instance PackageFixedDeps (ConfiguredPackage loc) where + depends = fmap (map (newSimpleUnitId . confInstId)) . confPkgDeps + +instance IsNode (ConfiguredPackage loc) where + type Key (ConfiguredPackage loc) = UnitId + nodeKey = newSimpleUnitId . confPkgId + -- TODO: if we update ConfiguredPackage to support order-only + -- dependencies, need to include those here. + -- NB: have to deduplicate, otherwise the planner gets confused + nodeNeighbors = ordNub . CD.flatDeps . depends + +instance (Binary loc) => Binary (ConfiguredPackage loc) + + +-- | A ConfiguredId is a package ID for a configured package. +-- +-- Once we configure a source package we know its UnitId. It is still +-- however useful in lots of places to also know the source ID for the package. +-- We therefore bundle the two. +-- +-- An already installed package of course is also "configured" (all its +-- configuration parameters and dependencies have been specified). +data ConfiguredId = ConfiguredId { + confSrcId :: PackageId + , confCompName :: Maybe ComponentName + , confInstId :: ComponentId + } + deriving (Eq, Ord, Generic) + +annotatedIdToConfiguredId :: AnnotatedId ComponentId -> ConfiguredId +annotatedIdToConfiguredId aid = ConfiguredId { + confSrcId = ann_pid aid, + confCompName = Just (ann_cname aid), + confInstId = ann_id aid + } + +instance Binary ConfiguredId + +instance Show ConfiguredId where + show cid = show (confInstId cid) + +instance Package ConfiguredId where + packageId = confSrcId + +instance Package (ConfiguredPackage loc) where + packageId cpkg = packageId (confPkgSource cpkg) + +instance HasMungedPackageId (ConfiguredPackage loc) where + mungedId cpkg = computeCompatPackageId (packageId cpkg) Nothing + +-- Never has nontrivial UnitId +instance HasUnitId (ConfiguredPackage loc) where + installedUnitId = newSimpleUnitId . confPkgId + +instance PackageInstalled (ConfiguredPackage loc) where + installedDepends = CD.flatDeps . depends + +class HasConfiguredId a where + configuredId :: a -> ConfiguredId + +-- NB: This instance is slightly dangerous, in that you'll lose +-- information about the specific UnitId you depended on. +instance HasConfiguredId InstalledPackageInfo where + configuredId ipkg = ConfiguredId (packageId ipkg) + (Just (sourceComponentName ipkg)) + (installedComponentId ipkg) + +-- | Like 'ConfiguredPackage', but with all dependencies guaranteed to be +-- installed already, hence itself ready to be installed. +newtype GenericReadyPackage srcpkg = ReadyPackage srcpkg -- see 'ConfiguredPackage'. + deriving (Eq, Show, Generic, Package, PackageFixedDeps, + HasMungedPackageId, HasUnitId, PackageInstalled, Binary) + +-- Can't newtype derive this +instance IsNode srcpkg => IsNode (GenericReadyPackage srcpkg) where + type Key (GenericReadyPackage srcpkg) = Key srcpkg + nodeKey (ReadyPackage spkg) = nodeKey spkg + nodeNeighbors (ReadyPackage spkg) = nodeNeighbors spkg + +type ReadyPackage = GenericReadyPackage (ConfiguredPackage UnresolvedPkgLoc) + +-- | Convenience alias for 'SourcePackage UnresolvedPkgLoc'. +type UnresolvedSourcePackage = SourcePackage UnresolvedPkgLoc + + +-- ------------------------------------------------------------ +-- * Package specifier +-- ------------------------------------------------------------ + +-- | A fully or partially resolved reference to a package. +-- +data PackageSpecifier pkg = + + -- | A partially specified reference to a package (either source or + -- installed). It is specified by package name and optionally some + -- required properties. Use a dependency resolver to pick a specific + -- package satisfying these properties. + -- + NamedPackage PackageName [PackageProperty] + + -- | A fully specified source package. + -- + | SpecificSourcePackage pkg + deriving (Eq, Show, Functor, Generic) + +instance Binary pkg => Binary (PackageSpecifier pkg) + +pkgSpecifierTarget :: Package pkg => PackageSpecifier pkg -> PackageName +pkgSpecifierTarget (NamedPackage name _) = name +pkgSpecifierTarget (SpecificSourcePackage pkg) = packageName pkg + +pkgSpecifierConstraints :: Package pkg + => PackageSpecifier pkg -> [LabeledPackageConstraint] +pkgSpecifierConstraints (NamedPackage name props) = map toLpc props + where + toLpc prop = LabeledPackageConstraint + (PackageConstraint (scopeToplevel name) prop) + ConstraintSourceUserTarget +pkgSpecifierConstraints (SpecificSourcePackage pkg) = + [LabeledPackageConstraint pc ConstraintSourceUserTarget] + where + pc = PackageConstraint + (ScopeTarget $ packageName pkg) + (PackagePropertyVersion $ thisVersion (packageVersion pkg)) + + +-- ------------------------------------------------------------ +-- * Package locations and repositories +-- ------------------------------------------------------------ + +type UnresolvedPkgLoc = PackageLocation (Maybe FilePath) + +type ResolvedPkgLoc = PackageLocation FilePath + +data PackageLocation local = + + -- | An unpacked package in the given dir, or current dir + LocalUnpackedPackage FilePath + + -- | A package as a tarball that's available as a local tarball + | LocalTarballPackage FilePath + + -- | A package as a tarball from a remote URI + | RemoteTarballPackage URI local + + -- | A package available as a tarball from a repository. + -- + -- It may be from a local repository or from a remote repository, with a + -- locally cached copy. ie a package available from hackage + | RepoTarballPackage Repo PackageId local + + -- | A package available from a version control system source repository + | RemoteSourceRepoPackage SourceRepo local + deriving (Show, Functor, Eq, Ord, Generic, Typeable) + +instance Binary local => Binary (PackageLocation local) + +-- note, network-uri-2.6.0.3+ provide a Generic instance but earlier +-- versions do not, so we use manual Binary instances here +instance Binary URI where + put (URI a b c d e) = do put a; put b; put c; put d; put e + get = do !a <- get; !b <- get; !c <- get; !d <- get; !e <- get + return (URI a b c d e) + +instance Binary URIAuth where + put (URIAuth a b c) = do put a; put b; put c + get = do !a <- get; !b <- get; !c <- get; return (URIAuth a b c) + +data RemoteRepo = + RemoteRepo { + remoteRepoName :: String, + remoteRepoURI :: URI, + + -- | Enable secure access? + -- + -- 'Nothing' here represents "whatever the default is"; this is important + -- to allow for a smooth transition from opt-in to opt-out security + -- (once we switch to opt-out, all access to the central Hackage + -- repository should be secure by default) + remoteRepoSecure :: Maybe Bool, + + -- | Root key IDs (for bootstrapping) + remoteRepoRootKeys :: [String], + + -- | Threshold for verification during bootstrapping + remoteRepoKeyThreshold :: Int, + + -- | Normally a repo just specifies an HTTP or HTTPS URI, but as a + -- special case we may know a repo supports both and want to try HTTPS + -- if we can, but still allow falling back to HTTP. + -- + -- This field is not currently stored in the config file, but is filled + -- in automagically for known repos. + remoteRepoShouldTryHttps :: Bool + } + + deriving (Show, Eq, Ord, Generic) + +instance Binary RemoteRepo + +-- | Construct a partial 'RemoteRepo' value to fold the field parser list over. +emptyRemoteRepo :: String -> RemoteRepo +emptyRemoteRepo name = RemoteRepo name nullURI Nothing [] 0 False + +-- | Different kinds of repositories +-- +-- NOTE: It is important that this type remains serializable. +data Repo = + -- | Local repositories + RepoLocal { + repoLocalDir :: FilePath + } + + -- | Standard (unsecured) remote repositores + | RepoRemote { + repoRemote :: RemoteRepo + , repoLocalDir :: FilePath + } + + -- | Secure repositories + -- + -- Although this contains the same fields as 'RepoRemote', we use a separate + -- constructor to avoid confusing the two. + -- + -- Not all access to a secure repo goes through the hackage-security + -- library currently; code paths that do not still make use of the + -- 'repoRemote' and 'repoLocalDir' fields directly. + | RepoSecure { + repoRemote :: RemoteRepo + , repoLocalDir :: FilePath + } + deriving (Show, Eq, Ord, Generic) + +instance Binary Repo + +-- | Check if this is a remote repo +isRepoRemote :: Repo -> Bool +isRepoRemote RepoLocal{} = False +isRepoRemote _ = True + +-- | Extract @RemoteRepo@ from @Repo@ if remote. +maybeRepoRemote :: Repo -> Maybe RemoteRepo +maybeRepoRemote (RepoLocal _localDir) = Nothing +maybeRepoRemote (RepoRemote r _localDir) = Just r +maybeRepoRemote (RepoSecure r _localDir) = Just r + +-- ------------------------------------------------------------ +-- * Build results +-- ------------------------------------------------------------ + +-- | A summary of the outcome for building a single package. +-- +type BuildOutcome = Either BuildFailure BuildResult + +-- | A summary of the outcome for building a whole set of packages. +-- +type BuildOutcomes = Map UnitId BuildOutcome + +data BuildFailure = PlanningFailed + | DependentFailed PackageId + | DownloadFailed SomeException + | UnpackFailed SomeException + | ConfigureFailed SomeException + | BuildFailed SomeException + | TestsFailed SomeException + | InstallFailed SomeException + deriving (Show, Typeable, Generic) + +instance Exception BuildFailure + +-- Note that the @Maybe InstalledPackageInfo@ is a slight hack: we only +-- the public library's 'InstalledPackageInfo' is stored here, even if +-- there were 'InstalledPackageInfo' from internal libraries. This +-- 'InstalledPackageInfo' is not used anyway, so it makes no difference. +data BuildResult = BuildResult DocsResult TestsResult + (Maybe InstalledPackageInfo) + deriving (Show, Generic) + +data DocsResult = DocsNotTried | DocsFailed | DocsOk + deriving (Show, Generic, Typeable) +data TestsResult = TestsNotTried | TestsOk + deriving (Show, Generic, Typeable) + +instance Binary BuildFailure +instance Binary BuildResult +instance Binary DocsResult +instance Binary TestsResult + +--FIXME: this is a total cheat +instance Binary SomeException where + put _ = return () + get = fail "cannot serialise exceptions" + + +-- ------------------------------------------------------------ +-- * --allow-newer/--allow-older +-- ------------------------------------------------------------ + +-- TODO: When https://github.com/haskell/cabal/issues/4203 gets tackled, +-- it may make sense to move these definitions to the Solver.Types +-- module + +-- | 'RelaxDeps' in the context of upper bounds (i.e. for @--allow-newer@ flag) +newtype AllowNewer = AllowNewer { unAllowNewer :: RelaxDeps } + deriving (Eq, Read, Show, Generic) + +-- | 'RelaxDeps' in the context of lower bounds (i.e. for @--allow-older@ flag) +newtype AllowOlder = AllowOlder { unAllowOlder :: RelaxDeps } + deriving (Eq, Read, Show, Generic) + +-- | Generic data type for policy when relaxing bounds in dependencies. +-- Don't use this directly: use 'AllowOlder' or 'AllowNewer' depending +-- on whether or not you are relaxing an lower or upper bound +-- (respectively). +data RelaxDeps = + + -- | Ignore upper (resp. lower) bounds in some (or no) dependencies on the given packages. + -- + -- @RelaxDepsSome []@ is the default, i.e. honor the bounds in all + -- dependencies, never choose versions newer (resp. older) than allowed. + RelaxDepsSome [RelaxedDep] + + -- | Ignore upper (resp. lower) bounds in dependencies on all packages. + -- + -- __Note__: This is should be semantically equivalent to + -- + -- > RelaxDepsSome [RelaxedDep RelaxDepScopeAll RelaxDepModNone RelaxDepSubjectAll] + -- + -- (TODO: consider normalising 'RelaxDeps' and/or 'RelaxedDep') + | RelaxDepsAll + deriving (Eq, Read, Show, Generic) + +-- | Dependencies can be relaxed either for all packages in the install plan, or +-- only for some packages. +data RelaxedDep = RelaxedDep !RelaxDepScope !RelaxDepMod !RelaxDepSubject + deriving (Eq, Read, Show, Generic) + +-- | Specify the scope of a relaxation, i.e. limit which depending +-- packages are allowed to have their version constraints relaxed. +data RelaxDepScope = RelaxDepScopeAll + -- ^ Apply relaxation in any package + | RelaxDepScopePackage !PackageName + -- ^ Apply relaxation to in all versions of a package + | RelaxDepScopePackageId !PackageId + -- ^ Apply relaxation to a specific version of a package only + deriving (Eq, Read, Show, Generic) + +-- | Modifier for dependency relaxation +data RelaxDepMod = RelaxDepModNone -- ^ Default semantics + | RelaxDepModCaret -- ^ Apply relaxation only to @^>=@ constraints + deriving (Eq, Read, Show, Generic) + +-- | Express whether to relax bounds /on/ @all@ packages, or a single package +data RelaxDepSubject = RelaxDepSubjectAll + | RelaxDepSubjectPkg !PackageName + deriving (Eq, Ord, Read, Show, Generic) + +instance Text RelaxedDep where + disp (RelaxedDep scope rdmod subj) = case scope of + RelaxDepScopeAll -> Disp.text "all:" Disp.<> modDep + RelaxDepScopePackage p0 -> disp p0 Disp.<> Disp.colon Disp.<> modDep + RelaxDepScopePackageId p0 -> disp p0 Disp.<> Disp.colon Disp.<> modDep + where + modDep = case rdmod of + RelaxDepModNone -> disp subj + RelaxDepModCaret -> Disp.char '^' Disp.<> disp subj + + parse = RelaxedDep <$> scopeP <*> modP <*> parse + where + -- "greedy" choices + scopeP = (pure RelaxDepScopeAll <* Parse.char '*' <* Parse.char ':') + Parse.<++ (pure RelaxDepScopeAll <* Parse.string "all:") + Parse.<++ (RelaxDepScopePackageId <$> pidP <* Parse.char ':') + Parse.<++ (RelaxDepScopePackage <$> parse <* Parse.char ':') + Parse.<++ (pure RelaxDepScopeAll) + + modP = (pure RelaxDepModCaret <* Parse.char '^') + Parse.<++ (pure RelaxDepModNone) + + -- | Stricter 'PackageId' parser which doesn't overlap with 'PackageName' parser + pidP = do + p0 <- parse + when (pkgVersion p0 == nullVersion) Parse.pfail + pure p0 + +instance Text RelaxDepSubject where + disp RelaxDepSubjectAll = Disp.text "all" + disp (RelaxDepSubjectPkg pn) = disp pn + + parse = (pure RelaxDepSubjectAll <* Parse.char '*') Parse.<++ pkgn + where + pkgn = do + pn <- parse + pure (if (pn == mkPackageName "all") + then RelaxDepSubjectAll + else RelaxDepSubjectPkg pn) + +instance Text RelaxDeps where + disp rd | not (isRelaxDeps rd) = Disp.text "none" + disp (RelaxDepsSome pkgs) = Disp.fsep . + Disp.punctuate Disp.comma . + map disp $ pkgs + disp RelaxDepsAll = Disp.text "all" + + parse = (const mempty <$> ((Parse.string "none" Parse.+++ + Parse.string "None") <* Parse.eof)) + Parse.<++ (const RelaxDepsAll <$> ((Parse.string "all" Parse.+++ + Parse.string "All" Parse.+++ + Parse.string "*") <* Parse.eof)) + Parse.<++ ( RelaxDepsSome <$> parseOptCommaList parse) + +instance Binary RelaxDeps +instance Binary RelaxDepMod +instance Binary RelaxDepScope +instance Binary RelaxDepSubject +instance Binary RelaxedDep +instance Binary AllowNewer +instance Binary AllowOlder + +-- | Return 'True' if 'RelaxDeps' specifies a non-empty set of relaxations +-- +-- Equivalent to @isRelaxDeps = (/= 'mempty')@ +isRelaxDeps :: RelaxDeps -> Bool +isRelaxDeps (RelaxDepsSome []) = False +isRelaxDeps (RelaxDepsSome (_:_)) = True +isRelaxDeps RelaxDepsAll = True + +-- | 'RelaxDepsAll' is the /absorbing element/ +instance Semigroup RelaxDeps where + -- identity element + RelaxDepsSome [] <> r = r + l@(RelaxDepsSome _) <> RelaxDepsSome [] = l + -- absorbing element + l@RelaxDepsAll <> _ = l + (RelaxDepsSome _) <> r@RelaxDepsAll = r + -- combining non-{identity,absorbing} elements + (RelaxDepsSome a) <> (RelaxDepsSome b) = RelaxDepsSome (a ++ b) + +-- | @'RelaxDepsSome' []@ is the /identity element/ +instance Monoid RelaxDeps where + mempty = RelaxDepsSome [] + mappend = (<>) + +instance Semigroup AllowNewer where + AllowNewer x <> AllowNewer y = AllowNewer (x <> y) + +instance Semigroup AllowOlder where + AllowOlder x <> AllowOlder y = AllowOlder (x <> y) + +instance Monoid AllowNewer where + mempty = AllowNewer mempty + mappend = (<>) + +instance Monoid AllowOlder where + mempty = AllowOlder mempty + mappend = (<>) + +-- ------------------------------------------------------------ +-- * --write-ghc-environment-file +-- ------------------------------------------------------------ + +-- | Whether 'v2-build' should write a .ghc.environment file after +-- success. Possible values: 'always', 'never', 'ghc8.4.4+' (the +-- default; GHC 8.4.4 is the earliest version that supports +-- '-package-env -'). +data WriteGhcEnvironmentFilesPolicy + = AlwaysWriteGhcEnvironmentFiles + | NeverWriteGhcEnvironmentFiles + | WriteGhcEnvironmentFilesOnlyForGhc844AndNewer + deriving (Eq, Enum, Bounded, Generic, Show) + +instance Binary WriteGhcEnvironmentFilesPolicy diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Update.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Update.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Update.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Update.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,110 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Update +-- Copyright : (c) David Himmelstrup 2005 +-- License : BSD-like +-- +-- Maintainer : lemmih@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- +----------------------------------------------------------------------------- +{-# LANGUAGE RecordWildCards #-} +module Distribution.Client.Update + ( update + ) where + +import Distribution.Simple.Setup + ( fromFlag ) +import Distribution.Client.Compat.Directory + ( setModificationTime ) +import Distribution.Client.Types + ( Repo(..), RemoteRepo(..), maybeRepoRemote ) +import Distribution.Client.HttpUtils + ( DownloadResult(..) ) +import Distribution.Client.FetchUtils + ( downloadIndex ) +import Distribution.Client.IndexUtils.Timestamp +import Distribution.Client.IndexUtils + ( updateRepoIndexCache, Index(..), writeIndexTimestamp + , currentIndexTimestamp, indexBaseName ) +import Distribution.Client.JobControl + ( newParallelJobControl, spawnJob, collectJob ) +import Distribution.Client.Setup + ( RepoContext(..), UpdateFlags(..) ) +import Distribution.Text + ( display ) +import Distribution.Verbosity + +import Distribution.Simple.Utils + ( writeFileAtomic, warn, notice, noticeNoWrap ) + +import qualified Data.ByteString.Lazy as BS +import Distribution.Client.GZipUtils (maybeDecompress) +import System.FilePath ((<.>), dropExtension) +import Data.Maybe (mapMaybe) +import Data.Time (getCurrentTime) +import Control.Monad + +import qualified Hackage.Security.Client as Sec + +-- | 'update' downloads the package list from all known servers +update :: Verbosity -> UpdateFlags -> RepoContext -> IO () +update verbosity _ repoCtxt | null (repoContextRepos repoCtxt) = do + warn verbosity $ "No remote package servers have been specified. Usually " + ++ "you would have one specified in the config file." +update verbosity updateFlags repoCtxt = do + let repos = repoContextRepos repoCtxt + remoteRepos = mapMaybe maybeRepoRemote repos + case remoteRepos of + [] -> return () + [remoteRepo] -> + notice verbosity $ "Downloading the latest package list from " + ++ remoteRepoName remoteRepo + _ -> notice verbosity . unlines + $ "Downloading the latest package lists from: " + : map (("- " ++) . remoteRepoName) remoteRepos + jobCtrl <- newParallelJobControl (length repos) + mapM_ (spawnJob jobCtrl . updateRepo verbosity updateFlags repoCtxt) repos + mapM_ (\_ -> collectJob jobCtrl) repos + +updateRepo :: Verbosity -> UpdateFlags -> RepoContext -> Repo -> IO () +updateRepo verbosity updateFlags repoCtxt repo = do + transport <- repoContextGetTransport repoCtxt + case repo of + RepoLocal{..} -> return () + RepoRemote{..} -> do + downloadResult <- downloadIndex transport verbosity repoRemote repoLocalDir + case downloadResult of + FileAlreadyInCache -> + setModificationTime (indexBaseName repo <.> "tar") =<< getCurrentTime + FileDownloaded indexPath -> do + writeFileAtomic (dropExtension indexPath) . maybeDecompress + =<< BS.readFile indexPath + updateRepoIndexCache verbosity (RepoIndex repoCtxt repo) + RepoSecure{} -> repoContextWithSecureRepo repoCtxt repo $ \repoSecure -> do + let index = RepoIndex repoCtxt repo + -- NB: This may be a nullTimestamp if we've never updated before + current_ts <- currentIndexTimestamp (lessVerbose verbosity) repoCtxt repo + -- NB: always update the timestamp, even if we didn't actually + -- download anything + writeIndexTimestamp index (fromFlag (updateIndexState updateFlags)) + ce <- if repoContextIgnoreExpiry repoCtxt + then Just `fmap` getCurrentTime + else return Nothing + updated <- Sec.uncheckClientErrors $ Sec.checkForUpdates repoSecure ce + -- Update cabal's internal index as well so that it's not out of sync + -- (If all access to the cache goes through hackage-security this can go) + case updated of + Sec.NoUpdates -> + setModificationTime (indexBaseName repo <.> "tar") =<< getCurrentTime + Sec.HasUpdates -> + updateRepoIndexCache verbosity index + -- TODO: This will print multiple times if there are multiple + -- repositories: main problem is we don't have a way of updating + -- a specific repo. Once we implement that, update this. + when (current_ts /= nullTimestamp) $ + noticeNoWrap verbosity $ + "To revert to previous state run:\n" ++ + " cabal update --index-state='" ++ display current_ts ++ "'\n" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Upload.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Upload.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Upload.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Upload.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,222 @@ +module Distribution.Client.Upload (upload, uploadDoc, report) where + +import Distribution.Client.Types ( Username(..), Password(..) + , RemoteRepo(..), maybeRepoRemote ) +import Distribution.Client.HttpUtils + ( HttpTransport(..), remoteRepoTryUpgradeToHttps ) +import Distribution.Client.Setup + ( IsCandidate(..), RepoContext(..) ) + +import Distribution.Simple.Utils (notice, warn, info, die') +import Distribution.Verbosity (Verbosity) +import Distribution.Text (display) +import Distribution.Client.Config + +import qualified Distribution.Client.BuildReports.Anonymous as BuildReport +import qualified Distribution.Client.BuildReports.Upload as BuildReport + +import Network.URI (URI(uriPath)) +import Network.HTTP (Header(..), HeaderName(..)) + +import System.IO (hFlush, stdout) +import System.IO.Echo (withoutInputEcho) +import System.Exit (exitFailure) +import System.FilePath ((), takeExtension, takeFileName, dropExtension) +import qualified System.FilePath.Posix as FilePath.Posix (()) +import System.Directory +import Control.Monad (forM_, when, foldM) +import Data.Maybe (mapMaybe) +import Data.Char (isSpace) + +type Auth = Maybe (String, String) + +-- > stripExtensions ["tar", "gz"] "foo.tar.gz" +-- Just "foo" +-- > stripExtensions ["tar", "gz"] "foo.gz.tar" +-- Nothing +stripExtensions :: [String] -> FilePath -> Maybe String +stripExtensions exts path = foldM f path (reverse exts) + where + f p e + | takeExtension p == '.':e = Just (dropExtension p) + | otherwise = Nothing + +upload :: Verbosity -> RepoContext + -> Maybe Username -> Maybe Password -> IsCandidate -> [FilePath] + -> IO () +upload verbosity repoCtxt mUsername mPassword isCandidate paths = do + let repos = repoContextRepos repoCtxt + transport <- repoContextGetTransport repoCtxt + targetRepo <- + case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of + [] -> die' verbosity "Cannot upload. No remote repositories are configured." + rs -> remoteRepoTryUpgradeToHttps verbosity transport (last rs) + let targetRepoURI = remoteRepoURI targetRepo + rootIfEmpty x = if null x then "/" else x + uploadURI = targetRepoURI { + uriPath = rootIfEmpty (uriPath targetRepoURI) FilePath.Posix. + case isCandidate of + IsCandidate -> "packages/candidates" + IsPublished -> "upload" + } + packageURI pkgid = targetRepoURI { + uriPath = rootIfEmpty (uriPath targetRepoURI) + FilePath.Posix. concat + [ "package/", pkgid + , case isCandidate of + IsCandidate -> "/candidate" + IsPublished -> "" + ] + } + Username username <- maybe promptUsername return mUsername + Password password <- maybe promptPassword return mPassword + let auth = Just (username,password) + forM_ paths $ \path -> do + notice verbosity $ "Uploading " ++ path ++ "... " + case fmap takeFileName (stripExtensions ["tar", "gz"] path) of + Just pkgid -> handlePackage transport verbosity uploadURI + (packageURI pkgid) auth isCandidate path + -- This case shouldn't really happen, since we check in Main that we + -- only pass tar.gz files to upload. + Nothing -> die' verbosity $ "Not a tar.gz file: " ++ path + +uploadDoc :: Verbosity -> RepoContext + -> Maybe Username -> Maybe Password -> IsCandidate -> FilePath + -> IO () +uploadDoc verbosity repoCtxt mUsername mPassword isCandidate path = do + let repos = repoContextRepos repoCtxt + transport <- repoContextGetTransport repoCtxt + targetRepo <- + case [ remoteRepo | Just remoteRepo <- map maybeRepoRemote repos ] of + [] -> die' verbosity $ "Cannot upload. No remote repositories are configured." + rs -> remoteRepoTryUpgradeToHttps verbosity transport (last rs) + let targetRepoURI = remoteRepoURI targetRepo + rootIfEmpty x = if null x then "/" else x + uploadURI = targetRepoURI { + uriPath = rootIfEmpty (uriPath targetRepoURI) + FilePath.Posix. concat + [ "package/", pkgid + , case isCandidate of + IsCandidate -> "/candidate" + IsPublished -> "" + , "/docs" + ] + } + packageUri = targetRepoURI { + uriPath = rootIfEmpty (uriPath targetRepoURI) + FilePath.Posix. concat + [ "package/", pkgid + , case isCandidate of + IsCandidate -> "/candidate" + IsPublished -> "" + ] + } + (reverseSuffix, reversePkgid) = break (== '-') + (reverse (takeFileName path)) + pkgid = reverse $ tail reversePkgid + when (reverse reverseSuffix /= "docs.tar.gz" + || null reversePkgid || head reversePkgid /= '-') $ + die' verbosity "Expected a file name matching the pattern -docs.tar.gz" + Username username <- maybe promptUsername return mUsername + Password password <- maybe promptPassword return mPassword + + let auth = Just (username,password) + headers = + [ Header HdrContentType "application/x-tar" + , Header HdrContentEncoding "gzip" + ] + notice verbosity $ "Uploading documentation " ++ path ++ "... " + resp <- putHttpFile transport verbosity uploadURI path auth headers + case resp of + -- Hackage responds with 204 No Content when docs are uploaded + -- successfully. + (code,_) | code `elem` [200,204] -> do + notice verbosity $ okMessage packageUri + (code,err) -> do + notice verbosity $ "Error uploading documentation " + ++ path ++ ": " + ++ "http code " ++ show code ++ "\n" + ++ err + exitFailure + where + okMessage packageUri = case isCandidate of + IsCandidate -> + "Documentation successfully uploaded for package candidate. " + ++ "You can now preview the result at '" ++ show packageUri + ++ "'. To upload non-candidate documentation, use 'cabal upload --publish'." + IsPublished -> + "Package documentation successfully published. You can now view it at '" + ++ show packageUri ++ "'." + + +promptUsername :: IO Username +promptUsername = do + putStr "Hackage username: " + hFlush stdout + fmap Username getLine + +promptPassword :: IO Password +promptPassword = do + putStr "Hackage password: " + hFlush stdout + -- save/restore the terminal echoing status (no echoing for entering the password) + passwd <- withoutInputEcho $ fmap Password getLine + putStrLn "" + return passwd + +report :: Verbosity -> RepoContext -> Maybe Username -> Maybe Password -> IO () +report verbosity repoCtxt mUsername mPassword = do + Username username <- maybe promptUsername return mUsername + Password password <- maybe promptPassword return mPassword + let auth = (username, password) + repos = repoContextRepos repoCtxt + remoteRepos = mapMaybe maybeRepoRemote repos + forM_ remoteRepos $ \remoteRepo -> + do dotCabal <- getCabalDir + let srcDir = dotCabal "reports" remoteRepoName remoteRepo + -- We don't want to bomb out just because we haven't built any packages + -- from this repo yet. + srcExists <- doesDirectoryExist srcDir + when srcExists $ do + contents <- getDirectoryContents srcDir + forM_ (filter (\c -> takeExtension c ==".log") contents) $ \logFile -> + do inp <- readFile (srcDir logFile) + let (reportStr, buildLog) = read inp :: (String,String) -- TODO: eradicateNoParse + case BuildReport.parse reportStr of + Left errs -> warn verbosity $ "Errors: " ++ errs -- FIXME + Right report' -> + do info verbosity $ "Uploading report for " + ++ display (BuildReport.package report') + BuildReport.uploadReports verbosity repoCtxt auth + (remoteRepoURI remoteRepo) [(report', Just buildLog)] + return () + +handlePackage :: HttpTransport -> Verbosity -> URI -> URI -> Auth + -> IsCandidate -> FilePath -> IO () +handlePackage transport verbosity uri packageUri auth isCandidate path = + do resp <- postHttpFile transport verbosity uri path auth + case resp of + (code,warnings) | code `elem` [200, 204] -> + notice verbosity $ okMessage isCandidate ++ + if null warnings then "" else "\n" ++ formatWarnings (trim warnings) + (code,err) -> do + notice verbosity $ "Error uploading " ++ path ++ ": " + ++ "http code " ++ show code ++ "\n" + ++ err + exitFailure + where + okMessage IsCandidate = + "Package successfully uploaded as candidate. " + ++ "You can now preview the result at '" ++ show packageUri + ++ "'. To publish the candidate, use 'cabal upload --publish'." + okMessage IsPublished = + "Package successfully published. You can now view it at '" + ++ show packageUri ++ "'." + +formatWarnings :: String -> String +formatWarnings x = "Warnings:\n" ++ (unlines . map ("- " ++) . lines) x + +-- Trim +trim :: String -> String +trim = f . f + where f = reverse . dropWhile isSpace diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Utils/Assertion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Utils/Assertion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Utils/Assertion.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Utils/Assertion.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,18 @@ +{-# LANGUAGE CPP #-} +module Distribution.Client.Utils.Assertion (expensiveAssert) where + +#ifdef DEBUG_EXPENSIVE_ASSERTIONS +import Control.Exception (assert) +import Distribution.Compat.Stack +#endif + +-- | Like 'assert', but only enabled with -fdebug-expensive-assertions. This +-- function can be used for expensive assertions that should only be turned on +-- during testing or debugging. +#ifdef DEBUG_EXPENSIVE_ASSERTIONS +expensiveAssert :: WithCallStack (Bool -> a -> a) +expensiveAssert = assert +#else +expensiveAssert :: Bool -> a -> a +expensiveAssert _ = id +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Utils/Json.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Utils/Json.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Utils/Json.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Utils/Json.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,225 @@ +{-# LANGUAGE OverloadedStrings #-} + +-- | Minimal JSON / RFC 7159 support +-- +-- The API is heavily inspired by @aeson@'s API but puts emphasis on +-- simplicity rather than performance. The 'ToJSON' instances are +-- intended to have an encoding compatible with @aeson@'s encoding. +-- +module Distribution.Client.Utils.Json + ( Value(..) + , Object, object, Pair, (.=) + , encodeToString + , encodeToBuilder + , ToJSON(toJSON) + ) + where + +import Data.Char +import Data.Int +import Data.String +import Data.Word +import Data.List +import Data.Monoid + +import Data.ByteString.Builder (Builder) +import qualified Data.ByteString.Builder as BB + +-- TODO: We may want to replace 'String' with 'Text' or 'ByteString' + +-- | A JSON value represented as a Haskell value. +data Value = Object !Object + | Array [Value] + | String String + | Number !Double + | Bool !Bool + | Null + deriving (Eq, Read, Show) + +-- | A key\/value pair for an 'Object' +type Pair = (String, Value) + +-- | A JSON \"object\" (key/value map). +type Object = [Pair] + +infixr 8 .= + +-- | A key-value pair for encoding a JSON object. +(.=) :: ToJSON v => String -> v -> Pair +k .= v = (k, toJSON v) + +-- | Create a 'Value' from a list of name\/value 'Pair's. +object :: [Pair] -> Value +object = Object + +instance IsString Value where + fromString = String + + +-- | A type that can be converted to JSON. +class ToJSON a where + -- | Convert a Haskell value to a JSON-friendly intermediate type. + toJSON :: a -> Value + +instance ToJSON () where + toJSON () = Array [] + +instance ToJSON Value where + toJSON = id + +instance ToJSON Bool where + toJSON = Bool + +instance ToJSON a => ToJSON [a] where + toJSON = Array . map toJSON + +instance ToJSON a => ToJSON (Maybe a) where + toJSON Nothing = Null + toJSON (Just a) = toJSON a + +instance (ToJSON a,ToJSON b) => ToJSON (a,b) where + toJSON (a,b) = Array [toJSON a, toJSON b] + +instance (ToJSON a,ToJSON b,ToJSON c) => ToJSON (a,b,c) where + toJSON (a,b,c) = Array [toJSON a, toJSON b, toJSON c] + +instance (ToJSON a,ToJSON b,ToJSON c, ToJSON d) => ToJSON (a,b,c,d) where + toJSON (a,b,c,d) = Array [toJSON a, toJSON b, toJSON c, toJSON d] + +instance ToJSON Float where + toJSON = Number . realToFrac + +instance ToJSON Double where + toJSON = Number + +instance ToJSON Int where toJSON = Number . realToFrac +instance ToJSON Int8 where toJSON = Number . realToFrac +instance ToJSON Int16 where toJSON = Number . realToFrac +instance ToJSON Int32 where toJSON = Number . realToFrac + +instance ToJSON Word where toJSON = Number . realToFrac +instance ToJSON Word8 where toJSON = Number . realToFrac +instance ToJSON Word16 where toJSON = Number . realToFrac +instance ToJSON Word32 where toJSON = Number . realToFrac + +-- | Possibly lossy due to conversion to 'Double' +instance ToJSON Int64 where toJSON = Number . realToFrac + +-- | Possibly lossy due to conversion to 'Double' +instance ToJSON Word64 where toJSON = Number . realToFrac + +-- | Possibly lossy due to conversion to 'Double' +instance ToJSON Integer where toJSON = Number . fromInteger + +------------------------------------------------------------------------------ +-- 'BB.Builder'-based encoding + +-- | Serialise value as JSON/UTF8-encoded 'Builder' +encodeToBuilder :: ToJSON a => a -> Builder +encodeToBuilder = encodeValueBB . toJSON + +encodeValueBB :: Value -> Builder +encodeValueBB jv = case jv of + Bool True -> "true" + Bool False -> "false" + Null -> "null" + Number n + | isNaN n || isInfinite n -> encodeValueBB Null + | Just i <- doubleToInt64 n -> BB.int64Dec i + | otherwise -> BB.doubleDec n + Array a -> encodeArrayBB a + String s -> encodeStringBB s + Object o -> encodeObjectBB o + +encodeArrayBB :: [Value] -> Builder +encodeArrayBB [] = "[]" +encodeArrayBB jvs = BB.char8 '[' <> go jvs <> BB.char8 ']' + where + go = Data.Monoid.mconcat . intersperse (BB.char8 ',') . map encodeValueBB + +encodeObjectBB :: Object -> Builder +encodeObjectBB [] = "{}" +encodeObjectBB jvs = BB.char8 '{' <> go jvs <> BB.char8 '}' + where + go = Data.Monoid.mconcat . intersperse (BB.char8 ',') . map encPair + encPair (l,x) = encodeStringBB l <> BB.char8 ':' <> encodeValueBB x + +encodeStringBB :: String -> Builder +encodeStringBB str = BB.char8 '"' <> go str <> BB.char8 '"' + where + go = BB.stringUtf8 . escapeString + +------------------------------------------------------------------------------ +-- 'String'-based encoding + +-- | Serialise value as JSON-encoded Unicode 'String' +encodeToString :: ToJSON a => a -> String +encodeToString jv = encodeValue (toJSON jv) [] + +encodeValue :: Value -> ShowS +encodeValue jv = case jv of + Bool b -> showString (if b then "true" else "false") + Null -> showString "null" + Number n + | isNaN n || isInfinite n -> encodeValue Null + | Just i <- doubleToInt64 n -> shows i + | otherwise -> shows n + Array a -> encodeArray a + String s -> encodeString s + Object o -> encodeObject o + +encodeArray :: [Value] -> ShowS +encodeArray [] = showString "[]" +encodeArray jvs = ('[':) . go jvs . (']':) + where + go [] = id + go [x] = encodeValue x + go (x:xs) = encodeValue x . (',':) . go xs + +encodeObject :: Object -> ShowS +encodeObject [] = showString "{}" +encodeObject jvs = ('{':) . go jvs . ('}':) + where + go [] = id + go [(l,x)] = encodeString l . (':':) . encodeValue x + go ((l,x):lxs) = encodeString l . (':':) . encodeValue x . (',':) . go lxs + +encodeString :: String -> ShowS +encodeString str = ('"':) . showString (escapeString str) . ('"':) + +------------------------------------------------------------------------------ +-- helpers + +-- | Try to convert 'Double' into 'Int64', return 'Nothing' if not +-- representable loss-free as integral 'Int64' value. +doubleToInt64 :: Double -> Maybe Int64 +doubleToInt64 x + | fromInteger x' == x + , x' <= toInteger (maxBound :: Int64) + , x' >= toInteger (minBound :: Int64) + = Just (fromIntegral x') + | otherwise = Nothing + where + x' = round x + +-- | Minimally escape a 'String' in accordance with RFC 7159, "7. Strings" +escapeString :: String -> String +escapeString s + | not (any needsEscape s) = s + | otherwise = escape s + where + escape [] = [] + escape (x:xs) = case x of + '\\' -> '\\':'\\':escape xs + '"' -> '\\':'"':escape xs + '\b' -> '\\':'b':escape xs + '\f' -> '\\':'f':escape xs + '\n' -> '\\':'n':escape xs + '\r' -> '\\':'r':escape xs + '\t' -> '\\':'t':escape xs + c | ord c < 0x10 -> '\\':'u':'0':'0':'0':intToDigit (ord c):escape xs + | ord c < 0x20 -> '\\':'u':'0':'0':'1':intToDigit (ord c - 0x10):escape xs + | otherwise -> c : escape xs + + -- unescaped = %x20-21 / %x23-5B / %x5D-10FFFF + needsEscape c = ord c < 0x20 || c `elem` ['\\','"'] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Utils.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Utils.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Utils.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Utils.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,358 @@ +{-# LANGUAGE ForeignFunctionInterface, CPP #-} + +module Distribution.Client.Utils ( MergeResult(..) + , mergeBy, duplicates, duplicatesBy + , readMaybe + , inDir, withEnv, withEnvOverrides + , logDirChange, withExtraPathEnv + , determineNumJobs, numberOfProcessors + , removeExistingFile + , withTempFileName + , makeAbsoluteToCwd + , makeRelativeToCwd, makeRelativeToDir + , makeRelativeCanonical + , filePathToByteString + , byteStringToFilePath, tryCanonicalizePath + , canonicalizePathNoThrow + , moreRecentFile, existsAndIsMoreRecentThan + , tryFindAddSourcePackageDesc + , tryFindPackageDesc + , relaxEncodingErrors + , ProgressPhase (..) + , progressMessage) + where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Compat.Environment +import Distribution.Compat.Exception ( catchIO ) +import Distribution.Compat.Time ( getModTime ) +import Distribution.Simple.Setup ( Flag(..) ) +import Distribution.Verbosity +import Distribution.Simple.Utils ( die', findPackageDesc, noticeNoWrap ) +import qualified Data.ByteString.Lazy as BS +import Data.Bits + ( (.|.), shiftL, shiftR ) +import System.FilePath +import Control.Monad + ( mapM, mapM_, zipWithM_ ) +import Data.List + ( groupBy ) +import Foreign.C.Types ( CInt(..) ) +import qualified Control.Exception as Exception + ( finally, bracket ) +import System.Directory + ( canonicalizePath, doesFileExist, getCurrentDirectory + , removeFile, setCurrentDirectory ) +import System.IO + ( Handle, hClose, openTempFile + , hGetEncoding, hSetEncoding + ) +import System.IO.Unsafe ( unsafePerformIO ) + +import GHC.IO.Encoding + ( recover, TextEncoding(TextEncoding) ) +import GHC.IO.Encoding.Failure + ( recoverEncode, CodingFailureMode(TransliterateCodingFailure) ) + +#if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) +import qualified System.Directory as Dir +import qualified System.IO.Error as IOError +#endif + +-- | Generic merging utility. For sorted input lists this is a full outer join. +-- +mergeBy :: (a -> b -> Ordering) -> [a] -> [b] -> [MergeResult a b] +mergeBy cmp = merge + where + merge [] ys = [ OnlyInRight y | y <- ys] + merge xs [] = [ OnlyInLeft x | x <- xs] + merge (x:xs) (y:ys) = + case x `cmp` y of + GT -> OnlyInRight y : merge (x:xs) ys + EQ -> InBoth x y : merge xs ys + LT -> OnlyInLeft x : merge xs (y:ys) + +data MergeResult a b = OnlyInLeft a | InBoth a b | OnlyInRight b + +duplicates :: Ord a => [a] -> [[a]] +duplicates = duplicatesBy compare + +duplicatesBy :: (a -> a -> Ordering) -> [a] -> [[a]] +duplicatesBy cmp = filter moreThanOne . groupBy eq . sortBy cmp + where + eq a b = case cmp a b of + EQ -> True + _ -> False + moreThanOne (_:_:_) = True + moreThanOne _ = False + +-- | Like 'removeFile', but does not throw an exception when the file does not +-- exist. +removeExistingFile :: FilePath -> IO () +removeExistingFile path = do + exists <- doesFileExist path + when exists $ + removeFile path + +-- | A variant of 'withTempFile' that only gives us the file name, and while +-- it will clean up the file afterwards, it's lenient if the file is +-- moved\/deleted. +-- +withTempFileName :: FilePath + -> String + -> (FilePath -> IO a) -> IO a +withTempFileName tmpDir template action = + Exception.bracket + (openTempFile tmpDir template) + (\(name, _) -> removeExistingFile name) + (\(name, h) -> hClose h >> action name) + +-- | Executes the action in the specified directory. +-- +-- Warning: This operation is NOT thread-safe, because current +-- working directory is a process-global concept. +inDir :: Maybe FilePath -> IO a -> IO a +inDir Nothing m = m +inDir (Just d) m = do + old <- getCurrentDirectory + setCurrentDirectory d + m `Exception.finally` setCurrentDirectory old + +-- | Executes the action with an environment variable set to some +-- value. +-- +-- Warning: This operation is NOT thread-safe, because current +-- environment is a process-global concept. +withEnv :: String -> String -> IO a -> IO a +withEnv k v m = do + mb_old <- lookupEnv k + setEnv k v + m `Exception.finally` (case mb_old of + Nothing -> unsetEnv k + Just old -> setEnv k old) + +-- | Executes the action with a list of environment variables and +-- corresponding overrides, where +-- +-- * @'Just' v@ means \"set the environment variable's value to @v@\". +-- * 'Nothing' means \"unset the environment variable\". +-- +-- Warning: This operation is NOT thread-safe, because current +-- environment is a process-global concept. +withEnvOverrides :: [(String, Maybe FilePath)] -> IO a -> IO a +withEnvOverrides overrides m = do + mb_olds <- mapM lookupEnv envVars + mapM_ (uncurry update) overrides + m `Exception.finally` zipWithM_ update envVars mb_olds + where + envVars :: [String] + envVars = map fst overrides + + update :: String -> Maybe FilePath -> IO () + update var Nothing = unsetEnv var + update var (Just val) = setEnv var val + +-- | Executes the action, increasing the PATH environment +-- in some way +-- +-- Warning: This operation is NOT thread-safe, because the +-- environment variables are a process-global concept. +withExtraPathEnv :: [FilePath] -> IO a -> IO a +withExtraPathEnv paths m = do + oldPathSplit <- getSearchPath + let newPath = mungePath $ intercalate [searchPathSeparator] (paths ++ oldPathSplit) + oldPath = mungePath $ intercalate [searchPathSeparator] oldPathSplit + -- TODO: This is a horrible hack to work around the fact that + -- setEnv can't take empty values as an argument + mungePath p | p == "" = "/dev/null" + | otherwise = p + setEnv "PATH" newPath + m `Exception.finally` setEnv "PATH" oldPath + +-- | Log directory change in 'make' compatible syntax +logDirChange :: (String -> IO ()) -> Maybe FilePath -> IO a -> IO a +logDirChange _ Nothing m = m +logDirChange l (Just d) m = do + l $ "cabal: Entering directory '" ++ d ++ "'\n" + m `Exception.finally` + (l $ "cabal: Leaving directory '" ++ d ++ "'\n") + +foreign import ccall "getNumberOfProcessors" c_getNumberOfProcessors :: IO CInt + +-- The number of processors is not going to change during the duration of the +-- program, so unsafePerformIO is safe here. +numberOfProcessors :: Int +numberOfProcessors = fromEnum $ unsafePerformIO c_getNumberOfProcessors + +-- | Determine the number of jobs to use given the value of the '-j' flag. +determineNumJobs :: Flag (Maybe Int) -> Int +determineNumJobs numJobsFlag = + case numJobsFlag of + NoFlag -> 1 + Flag Nothing -> numberOfProcessors + Flag (Just n) -> n + +-- | Given a relative path, make it absolute relative to the current +-- directory. Absolute paths are returned unmodified. +makeAbsoluteToCwd :: FilePath -> IO FilePath +makeAbsoluteToCwd path | isAbsolute path = return path + | otherwise = do cwd <- getCurrentDirectory + return $! cwd path + +-- | Given a path (relative or absolute), make it relative to the current +-- directory, including using @../..@ if necessary. +makeRelativeToCwd :: FilePath -> IO FilePath +makeRelativeToCwd path = + makeRelativeCanonical <$> canonicalizePath path <*> getCurrentDirectory + +-- | Given a path (relative or absolute), make it relative to the given +-- directory, including using @../..@ if necessary. +makeRelativeToDir :: FilePath -> FilePath -> IO FilePath +makeRelativeToDir path dir = + makeRelativeCanonical <$> canonicalizePath path <*> canonicalizePath dir + +-- | Given a canonical absolute path and canonical absolute dir, make the path +-- relative to the directory, including using @../..@ if necessary. Returns +-- the original absolute path if it is not on the same drive as the given dir. +makeRelativeCanonical :: FilePath -> FilePath -> FilePath +makeRelativeCanonical path dir + | takeDrive path /= takeDrive dir = path + | otherwise = go (splitPath path) (splitPath dir) + where + go (p:ps) (d:ds) | p == d = go ps ds + go [] [] = "./" + go ps ds = joinPath (replicate (length ds) ".." ++ ps) + +-- | Convert a 'FilePath' to a lazy 'ByteString'. Each 'Char' is +-- encoded as a little-endian 'Word32'. +filePathToByteString :: FilePath -> BS.ByteString +filePathToByteString p = + BS.pack $ foldr conv [] codepts + where + codepts :: [Word32] + codepts = map (fromIntegral . ord) p + + conv :: Word32 -> [Word8] -> [Word8] + conv w32 rest = b0:b1:b2:b3:rest + where + b0 = fromIntegral $ w32 + b1 = fromIntegral $ w32 `shiftR` 8 + b2 = fromIntegral $ w32 `shiftR` 16 + b3 = fromIntegral $ w32 `shiftR` 24 + +-- | Reverse operation to 'filePathToByteString'. +byteStringToFilePath :: BS.ByteString -> FilePath +byteStringToFilePath bs | bslen `mod` 4 /= 0 = unexpected + | otherwise = go 0 + where + unexpected = "Distribution.Client.Utils.byteStringToFilePath: unexpected" + bslen = BS.length bs + + go i | i == bslen = [] + | otherwise = (chr . fromIntegral $ w32) : go (i+4) + where + w32 :: Word32 + w32 = b0 .|. (b1 `shiftL` 8) .|. (b2 `shiftL` 16) .|. (b3 `shiftL` 24) + b0 = fromIntegral $ BS.index bs i + b1 = fromIntegral $ BS.index bs (i + 1) + b2 = fromIntegral $ BS.index bs (i + 2) + b3 = fromIntegral $ BS.index bs (i + 3) + +-- | Workaround for the inconsistent behaviour of 'canonicalizePath'. Always +-- throws an error if the path refers to a non-existent file. +tryCanonicalizePath :: FilePath -> IO FilePath +tryCanonicalizePath path = do + ret <- canonicalizePath path +#if defined(mingw32_HOST_OS) || MIN_VERSION_directory(1,2,3) + exists <- liftM2 (||) (doesFileExist ret) (Dir.doesDirectoryExist ret) + unless exists $ + IOError.ioError $ IOError.mkIOError IOError.doesNotExistErrorType "canonicalizePath" + Nothing (Just ret) +#endif + return ret + +-- | A non-throwing wrapper for 'canonicalizePath'. If 'canonicalizePath' throws +-- an exception, returns the path argument unmodified. +canonicalizePathNoThrow :: FilePath -> IO FilePath +canonicalizePathNoThrow path = do + canonicalizePath path `catchIO` (\_ -> return path) + +-------------------- +-- Modification time + +-- | Like Distribution.Simple.Utils.moreRecentFile, but uses getModTime instead +-- of getModificationTime for higher precision. We can't merge the two because +-- Distribution.Client.Time uses MIN_VERSION macros. +moreRecentFile :: FilePath -> FilePath -> IO Bool +moreRecentFile a b = do + exists <- doesFileExist b + if not exists + then return True + else do tb <- getModTime b + ta <- getModTime a + return (ta > tb) + +-- | Like 'moreRecentFile', but also checks that the first file exists. +existsAndIsMoreRecentThan :: FilePath -> FilePath -> IO Bool +existsAndIsMoreRecentThan a b = do + exists <- doesFileExist a + if not exists + then return False + else a `moreRecentFile` b + +-- | Sets the handler for encoding errors to one that transliterates invalid +-- characters into one present in the encoding (i.e., \'?\'). +-- This is opposed to the default behavior, which is to throw an exception on +-- error. This function will ignore file handles that have a Unicode encoding +-- set. It's a no-op for versions of `base` less than 4.4. +relaxEncodingErrors :: Handle -> IO () +relaxEncodingErrors handle = do + maybeEncoding <- hGetEncoding handle + case maybeEncoding of + Just (TextEncoding name decoder encoder) | not ("UTF" `isPrefixOf` name) -> + let relax x = x { recover = recoverEncode TransliterateCodingFailure } + in hSetEncoding handle (TextEncoding name decoder (fmap relax encoder)) + _ -> + return () + +-- |Like 'tryFindPackageDesc', but with error specific to add-source deps. +tryFindAddSourcePackageDesc :: Verbosity -> FilePath -> String -> IO FilePath +tryFindAddSourcePackageDesc verbosity depPath err = tryFindPackageDesc verbosity depPath $ + err ++ "\n" ++ "Failed to read cabal file of add-source dependency: " + ++ depPath + +-- |Try to find a @.cabal@ file, in directory @depPath@. Fails if one cannot be +-- found, with @err@ prefixing the error message. This function simply allows +-- us to give a more descriptive error than that provided by @findPackageDesc@. +tryFindPackageDesc :: Verbosity -> FilePath -> String -> IO FilePath +tryFindPackageDesc verbosity depPath err = do + errOrCabalFile <- findPackageDesc depPath + case errOrCabalFile of + Right file -> return file + Left _ -> die' verbosity err + +-- | Phase of building a dependency. Represents current status of package +-- dependency processing. See #4040 for details. +data ProgressPhase + = ProgressDownloading + | ProgressDownloaded + | ProgressStarting + | ProgressBuilding + | ProgressHaddock + | ProgressInstalling + | ProgressCompleted + +progressMessage :: Verbosity -> ProgressPhase -> String -> IO () +progressMessage verbosity phase subject = do + noticeNoWrap verbosity $ phaseStr ++ subject ++ "\n" + where + phaseStr = case phase of + ProgressDownloading -> "Downloading " + ProgressDownloaded -> "Downloaded " + ProgressStarting -> "Starting " + ProgressBuilding -> "Building " + ProgressHaddock -> "Haddock " + ProgressInstalling -> "Installing " + ProgressCompleted -> "Completed " diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/VCS.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/VCS.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/VCS.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/VCS.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,518 @@ +{-# LANGUAGE NamedFieldPuns, RecordWildCards #-} +module Distribution.Client.VCS ( + -- * VCS driver type + VCS, + vcsRepoType, + vcsProgram, + -- ** Type re-exports + SourceRepo, + RepoType, + RepoKind, + Program, + ConfiguredProgram, + + -- * Selecting amongst source repos + selectPackageSourceRepo, + + -- * Validating 'SourceRepo's and configuring VCS drivers + validateSourceRepo, + validateSourceRepos, + SourceRepoProblem(..), + configureVCS, + configureVCSs, + + -- * Running the VCS driver + cloneSourceRepo, + syncSourceRepos, + + -- * The individual VCS drivers + knownVCSs, + vcsBzr, + vcsDarcs, + vcsGit, + vcsHg, + vcsSvn, + ) where + +import Prelude () +import Distribution.Client.Compat.Prelude + +import Distribution.Types.SourceRepo + ( SourceRepo(..), RepoType(..), RepoKind(..) ) +import Distribution.Client.RebuildMonad + ( Rebuild, monitorFiles, MonitorFilePath, monitorDirectoryExistence ) +import Distribution.Verbosity as Verbosity + ( Verbosity, normal ) +import Distribution.Simple.Program + ( Program(programFindVersion) + , ConfiguredProgram(programVersion) + , simpleProgram, findProgramVersion + , ProgramInvocation(..), programInvocation, runProgramInvocation + , emptyProgramDb, requireProgram ) +import Distribution.Version + ( mkVersion ) + +import Control.Monad + ( mapM_ ) +import Control.Monad.Trans + ( liftIO ) +import qualified Data.Char as Char +import qualified Data.Map as Map +import Data.Ord + ( comparing ) +import Data.Either + ( partitionEithers ) +import System.FilePath + ( takeDirectory ) +import System.Directory + ( doesDirectoryExist ) + + +-- | A driver for a version control system, e.g. git, darcs etc. +-- +data VCS program = VCS { + -- | The type of repository this driver is for. + vcsRepoType :: RepoType, + + -- | The vcs program itself. + -- This is used at type 'Program' and 'ConfiguredProgram'. + vcsProgram :: program, + + -- | The program invocation(s) to get\/clone a repository into a fresh + -- local directory. + vcsCloneRepo :: Verbosity + -> ConfiguredProgram + -> SourceRepo + -> FilePath -- Source URI + -> FilePath -- Destination directory + -> [ProgramInvocation], + + -- | The program invocation(s) to synchronise a whole set of /related/ + -- repositories with corresponding local directories. Also returns the + -- files that the command depends on, for change monitoring. + vcsSyncRepos :: Verbosity + -> ConfiguredProgram + -> [(SourceRepo, FilePath)] + -> IO [MonitorFilePath] + } + + +-- ------------------------------------------------------------ +-- * Selecting repos and drivers +-- ------------------------------------------------------------ + +-- | Pick the 'SourceRepo' to use to get the package sources from. +-- +-- Note that this does /not/ depend on what 'VCS' drivers we are able to +-- successfully configure. It is based only on the 'SourceRepo's declared +-- in the package, and optionally on a preferred 'RepoKind'. +-- +selectPackageSourceRepo :: Maybe RepoKind + -> [SourceRepo] + -> Maybe SourceRepo +selectPackageSourceRepo preferredRepoKind = + listToMaybe + -- Sort repositories by kind, from This to Head to Unknown. Repositories + -- with equivalent kinds are selected based on the order they appear in + -- the Cabal description file. + . sortBy (comparing thisFirst) + -- If the user has specified the repo kind, filter out the repositories + -- they're not interested in. + . filter (\repo -> maybe True (repoKind repo ==) preferredRepoKind) + where + thisFirst :: SourceRepo -> Int + thisFirst r = case repoKind r of + RepoThis -> 0 + RepoHead -> case repoTag r of + -- If the type is 'head' but the author specified a tag, they + -- probably meant to create a 'this' repository but screwed up. + Just _ -> 0 + Nothing -> 1 + RepoKindUnknown _ -> 2 + +data SourceRepoProblem = SourceRepoRepoTypeUnspecified + | SourceRepoRepoTypeUnsupported RepoType + | SourceRepoLocationUnspecified + deriving Show + +-- | Validates that the 'SourceRepo' specifies a location URI and a repository +-- type that is supported by a VCS driver. +-- +-- | It also returns the 'VCS' driver we should use to work with it. +-- +validateSourceRepo :: SourceRepo + -> Either SourceRepoProblem + (SourceRepo, String, RepoType, VCS Program) +validateSourceRepo = \repo -> do + rtype <- repoType repo ?! SourceRepoRepoTypeUnspecified + vcs <- Map.lookup rtype knownVCSs ?! SourceRepoRepoTypeUnsupported rtype + uri <- repoLocation repo ?! SourceRepoLocationUnspecified + return (repo, uri, rtype, vcs) + where + a ?! e = maybe (Left e) Right a + + +-- | As 'validateSourceRepo' but for a bunch of 'SourceRepo's, and return +-- things in a convenient form to pass to 'configureVCSs', or to report +-- problems. +-- +validateSourceRepos :: [SourceRepo] + -> Either [(SourceRepo, SourceRepoProblem)] + [(SourceRepo, String, RepoType, VCS Program)] +validateSourceRepos rs = + case partitionEithers (map validateSourceRepo' rs) of + (problems@(_:_), _) -> Left problems + ([], vcss) -> Right vcss + where + validateSourceRepo' r = either (Left . (,) r) Right + (validateSourceRepo r) + + +configureVCS :: Verbosity + -> VCS Program + -> IO (VCS ConfiguredProgram) +configureVCS verbosity vcs@VCS{vcsProgram = prog} = + asVcsConfigured <$> requireProgram verbosity prog emptyProgramDb + where + asVcsConfigured (prog', _) = vcs { vcsProgram = prog' } + +configureVCSs :: Verbosity + -> Map RepoType (VCS Program) + -> IO (Map RepoType (VCS ConfiguredProgram)) +configureVCSs verbosity = traverse (configureVCS verbosity) + + +-- ------------------------------------------------------------ +-- * Running the driver +-- ------------------------------------------------------------ + +-- | Clone a single source repo into a fresh directory, using a configured VCS. +-- +-- This is for making a new copy, not synchronising an existing copy. It will +-- fail if the destination directory already exists. +-- +-- Make sure to validate the 'SourceRepo' using 'validateSourceRepo' first. +-- +cloneSourceRepo :: Verbosity + -> VCS ConfiguredProgram + -> SourceRepo -- ^ Must have 'repoLocation' filled. + -> FilePath -- ^ Destination directory + -> IO () +cloneSourceRepo _ _ repo@SourceRepo{ repoLocation = Nothing } _ = + error $ "cloneSourceRepo: precondition violation, missing repoLocation: \"" + ++ show repo ++ "\". Validate using validateSourceRepo first." + +cloneSourceRepo verbosity vcs + repo@SourceRepo{ repoLocation = Just srcuri } destdir = + mapM_ (runProgramInvocation verbosity) invocations + where + invocations = vcsCloneRepo vcs verbosity + (vcsProgram vcs) repo + srcuri destdir + + +-- | Syncronise a set of 'SourceRepo's referring to the same repository with +-- corresponding local directories. The local directories may or may not +-- already exist. +-- +-- The 'SourceRepo' values used in a single invocation of 'syncSourceRepos', +-- or used across a series of invocations with any local directory must refer +-- to the /same/ repository. That means it must be the same location but they +-- can differ in the branch, or tag or subdir. +-- +-- The reason to allow multiple related 'SourceRepo's is to allow for the +-- network or storage to be shared between different checkouts of the repo. +-- For example if a single repo contains multiple packages in different subdirs +-- and in some project it may make sense to use a different state of the repo +-- for one subdir compared to another. +-- +syncSourceRepos :: Verbosity + -> VCS ConfiguredProgram + -> [(SourceRepo, FilePath)] + -> Rebuild () +syncSourceRepos verbosity vcs repos = do + files <- liftIO $ vcsSyncRepos vcs verbosity (vcsProgram vcs) repos + monitorFiles files + + +-- ------------------------------------------------------------ +-- * The various VCS drivers +-- ------------------------------------------------------------ + +-- | The set of all supported VCS drivers, organised by 'RepoType'. +-- +knownVCSs :: Map RepoType (VCS Program) +knownVCSs = Map.fromList [ (vcsRepoType vcs, vcs) | vcs <- vcss ] + where + vcss = [ vcsBzr, vcsDarcs, vcsGit, vcsHg, vcsSvn ] + + +-- | VCS driver for Bazaar. +-- +vcsBzr :: VCS Program +vcsBzr = + VCS { + vcsRepoType = Bazaar, + vcsProgram = bzrProgram, + vcsCloneRepo, + vcsSyncRepos + } + where + vcsCloneRepo :: Verbosity + -> ConfiguredProgram + -> SourceRepo + -> FilePath + -> FilePath + -> [ProgramInvocation] + vcsCloneRepo verbosity prog repo srcuri destdir = + [ programInvocation prog + ([branchCmd, srcuri, destdir] ++ tagArgs ++ verboseArg) ] + where + -- The @get@ command was deprecated in version 2.4 in favour of + -- the alias @branch@ + branchCmd | programVersion prog >= Just (mkVersion [2,4]) + = "branch" + | otherwise = "get" + + tagArgs = case repoTag repo of + Nothing -> [] + Just tag -> ["-r", "tag:" ++ tag] + verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] + + vcsSyncRepos :: Verbosity -> ConfiguredProgram + -> [(SourceRepo, FilePath)] -> IO [MonitorFilePath] + vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for bzr" + +bzrProgram :: Program +bzrProgram = (simpleProgram "bzr") { + programFindVersion = findProgramVersion "--version" $ \str -> + case words str of + -- "Bazaar (bzr) 2.6.0\n ... lots of extra stuff" + (_:_:ver:_) -> ver + _ -> "" + } + + +-- | VCS driver for Darcs. +-- +vcsDarcs :: VCS Program +vcsDarcs = + VCS { + vcsRepoType = Darcs, + vcsProgram = darcsProgram, + vcsCloneRepo, + vcsSyncRepos + } + where + vcsCloneRepo :: Verbosity + -> ConfiguredProgram + -> SourceRepo + -> FilePath + -> FilePath + -> [ProgramInvocation] + vcsCloneRepo verbosity prog repo srcuri destdir = + [ programInvocation prog cloneArgs ] + where + cloneArgs = [cloneCmd, srcuri, destdir] ++ tagArgs ++ verboseArg + -- At some point the @clone@ command was introduced as an alias for + -- @get@, and @clone@ seems to be the recommended one now. + cloneCmd | programVersion prog >= Just (mkVersion [2,8]) + = "clone" + | otherwise = "get" + tagArgs = case repoTag repo of + Nothing -> [] + Just tag -> ["-t", tag] + verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] + + vcsSyncRepos :: Verbosity -> ConfiguredProgram + -> [(SourceRepo, FilePath)] -> IO [MonitorFilePath] + vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for darcs" + +darcsProgram :: Program +darcsProgram = (simpleProgram "darcs") { + programFindVersion = findProgramVersion "--version" $ \str -> + case words str of + -- "2.8.5 (release)" + (ver:_) -> ver + _ -> "" + } + + +-- | VCS driver for Git. +-- +vcsGit :: VCS Program +vcsGit = + VCS { + vcsRepoType = Git, + vcsProgram = gitProgram, + vcsCloneRepo, + vcsSyncRepos + } + where + vcsCloneRepo :: Verbosity + -> ConfiguredProgram + -> SourceRepo + -> FilePath + -> FilePath + -> [ProgramInvocation] + vcsCloneRepo verbosity prog repo srcuri destdir = + [ programInvocation prog cloneArgs ] + -- And if there's a tag, we have to do that in a second step: + ++ [ (programInvocation prog (checkoutArgs tag)) { + progInvokeCwd = Just destdir + } + | tag <- maybeToList (repoTag repo) ] + where + cloneArgs = ["clone", srcuri, destdir] + ++ branchArgs ++ verboseArg + branchArgs = case repoBranch repo of + Just b -> ["--branch", b] + Nothing -> [] + checkoutArgs tag = "checkout" : verboseArg ++ [tag, "--"] + verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] + + vcsSyncRepos :: Verbosity + -> ConfiguredProgram + -> [(SourceRepo, FilePath)] + -> IO [MonitorFilePath] + vcsSyncRepos _ _ [] = return [] + vcsSyncRepos verbosity gitProg + ((primaryRepo, primaryLocalDir) : secondaryRepos) = do + + vcsSyncRepo verbosity gitProg primaryRepo primaryLocalDir Nothing + sequence_ + [ vcsSyncRepo verbosity gitProg repo localDir (Just primaryLocalDir) + | (repo, localDir) <- secondaryRepos ] + return [ monitorDirectoryExistence dir + | dir <- (primaryLocalDir : map snd secondaryRepos) ] + + vcsSyncRepo verbosity gitProg SourceRepo{..} localDir peer = do + exists <- doesDirectoryExist localDir + if exists + then git localDir ["fetch"] + else git (takeDirectory localDir) cloneArgs + git localDir checkoutArgs + where + git :: FilePath -> [String] -> IO () + git cwd args = runProgramInvocation verbosity $ + (programInvocation gitProg args) { + progInvokeCwd = Just cwd + } + + cloneArgs = ["clone", "--no-checkout", loc, localDir] + ++ case peer of + Nothing -> [] + Just peerLocalDir -> ["--reference", peerLocalDir] + ++ verboseArg + where Just loc = repoLocation + checkoutArgs = "checkout" : verboseArg ++ ["--detach", "--force" + , checkoutTarget, "--" ] + checkoutTarget = fromMaybe "HEAD" (repoBranch `mplus` repoTag) + verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] + +gitProgram :: Program +gitProgram = (simpleProgram "git") { + programFindVersion = findProgramVersion "--version" $ \str -> + case words str of + -- "git version 2.5.5" + (_:_:ver:_) | all isTypical ver -> ver + + -- or annoyingly "git version 2.17.1.windows.2" yes, really + (_:_:ver:_) -> intercalate "." + . takeWhile (all isNum) + . split + $ ver + _ -> "" + } + where + isNum c = c >= '0' && c <= '9' + isTypical c = isNum c || c == '.' + split cs = case break (=='.') cs of + (chunk,[]) -> chunk : [] + (chunk,_:rest) -> chunk : split rest + +-- | VCS driver for Mercurial. +-- +vcsHg :: VCS Program +vcsHg = + VCS { + vcsRepoType = Mercurial, + vcsProgram = hgProgram, + vcsCloneRepo, + vcsSyncRepos + } + where + vcsCloneRepo :: Verbosity + -> ConfiguredProgram + -> SourceRepo + -> FilePath + -> FilePath + -> [ProgramInvocation] + vcsCloneRepo verbosity prog repo srcuri destdir = + [ programInvocation prog cloneArgs ] + where + cloneArgs = ["clone", srcuri, destdir] + ++ branchArgs ++ tagArgs ++ verboseArg + branchArgs = case repoBranch repo of + Just b -> ["--branch", b] + Nothing -> [] + tagArgs = case repoTag repo of + Just t -> ["--rev", t] + Nothing -> [] + verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] + + vcsSyncRepos :: Verbosity + -> ConfiguredProgram + -> [(SourceRepo, FilePath)] + -> IO [MonitorFilePath] + vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for hg" + +hgProgram :: Program +hgProgram = (simpleProgram "hg") { + programFindVersion = findProgramVersion "--version" $ \str -> + case words str of + -- Mercurial Distributed SCM (version 3.5.2)\n ... long message + (_:_:_:_:ver:_) -> takeWhile (\c -> Char.isDigit c || c == '.') ver + _ -> "" + } + + +-- | VCS driver for Subversion. +-- +vcsSvn :: VCS Program +vcsSvn = + VCS { + vcsRepoType = SVN, + vcsProgram = svnProgram, + vcsCloneRepo, + vcsSyncRepos + } + where + vcsCloneRepo :: Verbosity + -> ConfiguredProgram + -> SourceRepo + -> FilePath + -> FilePath + -> [ProgramInvocation] + vcsCloneRepo verbosity prog _repo srcuri destdir = + [ programInvocation prog checkoutArgs ] + where + checkoutArgs = ["checkout", srcuri, destdir] ++ verboseArg + verboseArg = [ "--quiet" | verbosity < Verbosity.normal ] + --TODO: branch or tag? + + vcsSyncRepos :: Verbosity + -> ConfiguredProgram + -> [(SourceRepo, FilePath)] + -> IO [MonitorFilePath] + vcsSyncRepos _v _p _rs = fail "sync repo not yet supported for svn" + +svnProgram :: Program +svnProgram = (simpleProgram "svn") { + programFindVersion = findProgramVersion "--version" $ \str -> + case words str of + -- svn, version 1.9.4 (r1740329)\n ... long message + (_:_:ver:_) -> ver + _ -> "" + } + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Win32SelfUpgrade.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Win32SelfUpgrade.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/Win32SelfUpgrade.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/Win32SelfUpgrade.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,225 @@ +{-# LANGUAGE CPP, ForeignFunctionInterface #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.Win32SelfUpgrade +-- Copyright : (c) Duncan Coutts 2008 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- Support for self-upgrading executables on Windows platforms. +----------------------------------------------------------------------------- +module Distribution.Client.Win32SelfUpgrade ( +-- * Explanation +-- +-- | Windows inherited a design choice from DOS that while initially innocuous +-- has rather unfortunate consequences. It maintains the invariant that every +-- open file has a corresponding name on disk. One positive consequence of this +-- is that an executable can always find its own executable file. The downside +-- is that a program cannot be deleted or upgraded while it is running without +-- hideous workarounds. This module implements one such hideous workaround. +-- +-- The basic idea is: +-- +-- * Move our own exe file to a new name +-- * Copy a new exe file to the previous name +-- * Run the new exe file, passing our own PID and new path +-- * Wait for the new process to start +-- * Close the new exe file +-- * Exit old process +-- +-- Then in the new process: +-- +-- * Inform the old process that we've started +-- * Wait for the old process to die +-- * Delete the old exe file +-- * Exit new process +-- + + possibleSelfUpgrade, + deleteOldExeFile, + ) where + +#ifdef mingw32_HOST_OS + +import qualified System.Win32 as Win32 +import System.Win32 (DWORD, BOOL, HANDLE, LPCTSTR) +import Foreign.Ptr (Ptr, nullPtr) +import System.Process (runProcess) +import System.Directory (canonicalizePath) +import System.FilePath (takeBaseName, replaceBaseName, equalFilePath) + +import Distribution.Verbosity as Verbosity (Verbosity, showForCabal) +import Distribution.Simple.Utils (debug, info) + +import Prelude hiding (log) + +-- | If one of the given files is our own exe file then we arrange things such +-- that the nested action can replace our own exe file. +-- +-- We require that the new process accepts a command line invocation that +-- calls 'deleteOldExeFile', passing in the PID and exe file. +-- +possibleSelfUpgrade :: Verbosity + -> [FilePath] + -> IO a -> IO a +possibleSelfUpgrade verbosity newPaths action = do + dstPath <- canonicalizePath =<< Win32.getModuleFileName Win32.nullHANDLE + + newPaths' <- mapM canonicalizePath newPaths + let doingSelfUpgrade = any (equalFilePath dstPath) newPaths' + + if not doingSelfUpgrade + then action + else do + info verbosity $ "cabal-install does the replace-own-exe-file dance..." + tmpPath <- moveOurExeOutOfTheWay verbosity + result <- action + scheduleOurDemise verbosity dstPath tmpPath + (\pid path -> ["win32selfupgrade", pid, path + ,"--verbose=" ++ Verbosity.showForCabal verbosity]) + return result + +-- | The name of a Win32 Event object that we use to synchronise between the +-- old and new processes. We need to synchronise to make sure that the old +-- process has not yet terminated by the time the new one starts up and looks +-- for the old process. Otherwise the old one might have already terminated +-- and we could not wait on it terminating reliably (eg the PID might get +-- re-used). +-- +syncEventName :: String +syncEventName = "Local\\cabal-install-upgrade" + +-- | The first part of allowing our exe file to be replaced is to move the +-- existing exe file out of the way. Although we cannot delete our exe file +-- while we're still running, fortunately we can rename it, at least within +-- the same directory. +-- +moveOurExeOutOfTheWay :: Verbosity -> IO FilePath +moveOurExeOutOfTheWay verbosity = do + ourPID <- getCurrentProcessId + dstPath <- Win32.getModuleFileName Win32.nullHANDLE + + let tmpPath = replaceBaseName dstPath (takeBaseName dstPath ++ show ourPID) + + debug verbosity $ "moving " ++ dstPath ++ " to " ++ tmpPath + Win32.moveFile dstPath tmpPath + return tmpPath + +-- | Assuming we've now installed the new exe file in the right place, we +-- launch it and ask it to delete our exe file when we eventually terminate. +-- +scheduleOurDemise :: Verbosity -> FilePath -> FilePath + -> (String -> FilePath -> [String]) -> IO () +scheduleOurDemise verbosity dstPath tmpPath mkArgs = do + ourPID <- getCurrentProcessId + event <- createEvent syncEventName + + let args = mkArgs (show ourPID) tmpPath + log $ "launching child " ++ unwords (dstPath : map show args) + _ <- runProcess dstPath args Nothing Nothing Nothing Nothing Nothing + + log $ "waiting for the child to start up" + waitForSingleObject event (10*1000) -- wait at most 10 sec + log $ "child started ok" + + where + log msg = debug verbosity ("Win32Reinstall.parent: " ++ msg) + +-- | Assuming we're now in the new child process, we've been asked by the old +-- process to wait for it to terminate and then we can remove the old exe file +-- that it renamed itself to. +-- +deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () +deleteOldExeFile verbosity oldPID tmpPath = do + log $ "process started. Will delete exe file of process " + ++ show oldPID ++ " at path " ++ tmpPath + + log $ "getting handle of parent process " ++ show oldPID + oldPHANDLE <- Win32.openProcess Win32.sYNCHORNIZE False (fromIntegral oldPID) + + log $ "synchronising with parent" + event <- openEvent syncEventName + setEvent event + + log $ "waiting for parent process to terminate" + waitForSingleObject oldPHANDLE Win32.iNFINITE + log $ "parent process terminated" + + log $ "deleting parent's old .exe file" + Win32.deleteFile tmpPath + + where + log msg = debug verbosity ("Win32Reinstall.child: " ++ msg) + +------------------------ +-- Win32 foreign imports +-- + +-- A bunch of functions sadly not provided by the Win32 package. + +#ifdef x86_64_HOST_ARCH +#define CALLCONV ccall +#else +#define CALLCONV stdcall +#endif + +foreign import CALLCONV unsafe "windows.h GetCurrentProcessId" + getCurrentProcessId :: IO DWORD + +foreign import CALLCONV unsafe "windows.h WaitForSingleObject" + waitForSingleObject_ :: HANDLE -> DWORD -> IO DWORD + +waitForSingleObject :: HANDLE -> DWORD -> IO () +waitForSingleObject handle timeout = + Win32.failIf_ bad "WaitForSingleObject" $ + waitForSingleObject_ handle timeout + where + bad result = not (result == 0 || result == wAIT_TIMEOUT) + wAIT_TIMEOUT = 0x00000102 + +foreign import CALLCONV unsafe "windows.h CreateEventW" + createEvent_ :: Ptr () -> BOOL -> BOOL -> LPCTSTR -> IO HANDLE + +createEvent :: String -> IO HANDLE +createEvent name = do + Win32.failIfNull "CreateEvent" $ + Win32.withTString name $ + createEvent_ nullPtr False False + +foreign import CALLCONV unsafe "windows.h OpenEventW" + openEvent_ :: DWORD -> BOOL -> LPCTSTR -> IO HANDLE + +openEvent :: String -> IO HANDLE +openEvent name = do + Win32.failIfNull "OpenEvent" $ + Win32.withTString name $ + openEvent_ eVENT_MODIFY_STATE False + where + eVENT_MODIFY_STATE :: DWORD + eVENT_MODIFY_STATE = 0x0002 + +foreign import CALLCONV unsafe "windows.h SetEvent" + setEvent_ :: HANDLE -> IO BOOL + +setEvent :: HANDLE -> IO () +setEvent handle = + Win32.failIfFalse_ "SetEvent" $ + setEvent_ handle + +#else + +import Distribution.Verbosity (Verbosity) +import Distribution.Simple.Utils (die') + +possibleSelfUpgrade :: Verbosity + -> [FilePath] + -> IO a -> IO a +possibleSelfUpgrade _ _ action = action + +deleteOldExeFile :: Verbosity -> Int -> FilePath -> IO () +deleteOldExeFile verbosity _ _ = die' verbosity "win32selfupgrade not needed except on win32" + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/World.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/World.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Client/World.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Client/World.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,173 @@ +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Client.World +-- Copyright : (c) Peter Robinson 2009 +-- License : BSD-like +-- +-- Maintainer : thaldyron@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Interface to the world-file that contains a list of explicitly +-- requested packages. Meant to be imported qualified. +-- +-- A world file entry stores the package-name, package-version, and +-- user flags. +-- For example, the entry generated by +-- # cabal install stm-io-hooks --flags="-debug" +-- looks like this: +-- # stm-io-hooks -any --flags="-debug" +-- To rebuild/upgrade the packages in world (e.g. when updating the compiler) +-- use +-- # cabal install world +-- +----------------------------------------------------------------------------- +module Distribution.Client.World ( + WorldPkgInfo(..), + insert, + delete, + getContents, + ) where + +import Prelude (sequence) +import Distribution.Client.Compat.Prelude hiding (getContents) + +import Distribution.Types.Dependency +import Distribution.PackageDescription + ( FlagAssignment, mkFlagAssignment, unFlagAssignment + , mkFlagName, unFlagName ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Simple.Utils + ( die', info, chattyTry, writeFileAtomic ) +import Distribution.Text + ( Text(..), display, simpleParse ) +import qualified Distribution.Compat.ReadP as Parse +import Distribution.Compat.Exception ( catchIO ) +import qualified Text.PrettyPrint as Disp + + +import Data.Char as Char + +import Data.List + ( unionBy, deleteFirstsBy ) +import System.IO.Error + ( isDoesNotExistError ) +import qualified Data.ByteString.Lazy.Char8 as B + + +data WorldPkgInfo = WorldPkgInfo Dependency FlagAssignment + deriving (Show,Eq) + +-- | Adds packages to the world file; creates the file if it doesn't +-- exist yet. Version constraints and flag assignments for a package are +-- updated if already present. IO errors are non-fatal. +insert :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () +insert = modifyWorld $ unionBy equalUDep + +-- | Removes packages from the world file. +-- Note: Currently unused as there is no mechanism in Cabal (yet) to +-- handle uninstalls. IO errors are non-fatal. +delete :: Verbosity -> FilePath -> [WorldPkgInfo] -> IO () +delete = modifyWorld $ flip (deleteFirstsBy equalUDep) + +-- | WorldPkgInfo values are considered equal if they refer to +-- the same package, i.e., we don't care about differing versions or flags. +equalUDep :: WorldPkgInfo -> WorldPkgInfo -> Bool +equalUDep (WorldPkgInfo (Dependency pkg1 _) _) + (WorldPkgInfo (Dependency pkg2 _) _) = pkg1 == pkg2 + +-- | Modifies the world file by applying an update-function ('unionBy' +-- for 'insert', 'deleteFirstsBy' for 'delete') to the given list of +-- packages. IO errors are considered non-fatal. +modifyWorld :: ([WorldPkgInfo] -> [WorldPkgInfo] + -> [WorldPkgInfo]) + -- ^ Function that defines how + -- the list of user packages are merged with + -- existing world packages. + -> Verbosity + -> FilePath -- ^ Location of the world file + -> [WorldPkgInfo] -- ^ list of user supplied packages + -> IO () +modifyWorld _ _ _ [] = return () +modifyWorld f verbosity world pkgs = + chattyTry "Error while updating world-file. " $ do + pkgsOldWorld <- getContents verbosity world + -- Filter out packages that are not in the world file: + let pkgsNewWorld = nubBy equalUDep $ f pkgs pkgsOldWorld + -- 'Dependency' is not an Ord instance, so we need to check for + -- equivalence the awkward way: + if not (all (`elem` pkgsOldWorld) pkgsNewWorld && + all (`elem` pkgsNewWorld) pkgsOldWorld) + then do + info verbosity "Updating world file..." + writeFileAtomic world . B.pack $ unlines + [ (display pkg) | pkg <- pkgsNewWorld] + else + info verbosity "World file is already up to date." + + +-- | Returns the content of the world file as a list +getContents :: Verbosity -> FilePath -> IO [WorldPkgInfo] +getContents verbosity world = do + content <- safelyReadFile world + let result = map simpleParse (lines $ B.unpack content) + case sequence result of + Nothing -> die' verbosity "Could not parse world file." + Just xs -> return xs + where + safelyReadFile :: FilePath -> IO B.ByteString + safelyReadFile file = B.readFile file `catchIO` handler + where + handler e | isDoesNotExistError e = return B.empty + | otherwise = ioError e + + +instance Text WorldPkgInfo where + disp (WorldPkgInfo dep flags) = disp dep Disp.<+> dispFlags (unFlagAssignment flags) + where + dispFlags [] = Disp.empty + dispFlags fs = Disp.text "--flags=" + <<>> Disp.doubleQuotes (flagAssToDoc fs) + flagAssToDoc = foldr (\(fname,val) flagAssDoc -> + (if not val then Disp.char '-' + else Disp.empty) + <<>> Disp.text (unFlagName fname) + Disp.<+> flagAssDoc) + Disp.empty + parse = do + dep <- parse + Parse.skipSpaces + flagAss <- Parse.option mempty parseFlagAssignment + return $ WorldPkgInfo dep flagAss + where + parseFlagAssignment :: Parse.ReadP r FlagAssignment + parseFlagAssignment = do + _ <- Parse.string "--flags" + Parse.skipSpaces + _ <- Parse.char '=' + Parse.skipSpaces + mkFlagAssignment <$> (inDoubleQuotes $ Parse.many1 flag) + where + inDoubleQuotes :: Parse.ReadP r a -> Parse.ReadP r a + inDoubleQuotes = Parse.between (Parse.char '"') (Parse.char '"') + + flag = do + Parse.skipSpaces + val <- negative Parse.+++ positive + name <- ident + Parse.skipSpaces + return (mkFlagName name,val) + negative = do + _ <- Parse.char '-' + return False + positive = return True + + ident :: Parse.ReadP r String + ident = do + -- First character must be a letter/digit to avoid flags + -- like "+-debug": + c <- Parse.satisfy Char.isAlphaNum + cs <- Parse.munch (\ch -> Char.isAlphaNum ch || ch == '_' + || ch == '-') + return (c:cs) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Compat/Prelude.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Compat/Prelude.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Compat/Prelude.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Compat/Prelude.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,19 @@ +-- to suppress WARNING in "Distribution.Compat.Prelude.Internal" +{-# OPTIONS_GHC -fno-warn-deprecations #-} + +-- | This module does two things: +-- +-- * Acts as a compatiblity layer, like @base-compat@. +-- +-- * Provides commonly used imports. +-- +-- This module is a superset of "Distribution.Compat.Prelude" (which +-- this module re-exports) +-- +module Distribution.Solver.Compat.Prelude + ( module Distribution.Compat.Prelude.Internal + , Prelude.IO + ) where + +import Prelude (IO) +import Distribution.Compat.Prelude.Internal hiding (IO) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Assignment.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Assignment.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Assignment.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Assignment.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,94 @@ +module Distribution.Solver.Modular.Assignment + ( Assignment(..) + , PAssignment + , FAssignment + , SAssignment + , toCPs + ) where + +import Prelude () +import Distribution.Solver.Compat.Prelude hiding (pi) + +import Data.Array as A +import Data.List as L +import Data.Map as M +import Data.Maybe + +import Distribution.PackageDescription (FlagAssignment, mkFlagAssignment) -- from Cabal + +import Distribution.Solver.Types.ComponentDeps (ComponentDeps, Component) +import qualified Distribution.Solver.Types.ComponentDeps as CD +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackagePath + +import Distribution.Solver.Modular.Configured +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.LabeledGraph +import Distribution.Solver.Modular.Package + +-- | A (partial) package assignment. Qualified package names +-- are associated with instances. +type PAssignment = Map QPN I + +type FAssignment = Map QFN Bool +type SAssignment = Map QSN Bool + +-- | A (partial) assignment of variables. +data Assignment = A PAssignment FAssignment SAssignment + deriving (Show, Eq) + +-- | Delivers an ordered list of fully configured packages. +-- +-- TODO: This function is (sort of) ok. However, there's an open bug +-- w.r.t. unqualification. There might be several different instances +-- of one package version chosen by the solver, which will lead to +-- clashes. +toCPs :: Assignment -> RevDepMap -> [CP QPN] +toCPs (A pa fa sa) rdm = + let + -- get hold of the graph + g :: Graph Component + vm :: Vertex -> ((), QPN, [(Component, QPN)]) + cvm :: QPN -> Maybe Vertex + -- Note that the RevDepMap contains duplicate dependencies. Therefore the nub. + (g, vm, cvm) = graphFromEdges (L.map (\ (x, xs) -> ((), x, nub xs)) + (M.toList rdm)) + tg :: Graph Component + tg = transposeG g + -- Topsort the dependency graph, yielding a list of pkgs in the right order. + -- The graph will still contain all the installed packages, and it might + -- contain duplicates, because several variables might actually resolve to + -- the same package in the presence of qualified package names. + ps :: [PI QPN] + ps = L.map ((\ (_, x, _) -> PI x (pa M.! x)) . vm) $ + topSort g + -- Determine the flags per package, by walking over and regrouping the + -- complete flag assignment by package. + fapp :: Map QPN FlagAssignment + fapp = M.fromListWith mappend $ + L.map (\ ((FN qpn fn), b) -> (qpn, mkFlagAssignment [(fn, b)])) $ + M.toList $ + fa + -- Stanzas per package. + sapp :: Map QPN [OptionalStanza] + sapp = M.fromListWith (++) $ + L.map (\ ((SN qpn sn), b) -> (qpn, if b then [sn] else [])) $ + M.toList $ + sa + -- Dependencies per package. + depp :: QPN -> [(Component, PI QPN)] + depp qpn = let v :: Vertex + v = fromJust (cvm qpn) + dvs :: [(Component, Vertex)] + dvs = tg A.! v + in L.map (\ (comp, dv) -> case vm dv of (_, x, _) -> (comp, PI x (pa M.! x))) dvs + -- Translated to PackageDeps + depp' :: QPN -> ComponentDeps [PI QPN] + depp' = CD.fromList . L.map (\(comp, d) -> (comp, [d])) . depp + in + L.map (\ pi@(PI qpn _) -> CP pi + (M.findWithDefault mempty qpn fapp) + (M.findWithDefault mempty qpn sapp) + (depp' qpn)) + ps diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Builder.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Builder.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Builder.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Builder.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,298 @@ +{-# LANGUAGE ScopedTypeVariables #-} +module Distribution.Solver.Modular.Builder ( + buildTree + , splits -- for testing + ) where + +-- Building the search tree. +-- +-- In this phase, we build a search tree that is too large, i.e, it contains +-- invalid solutions. We keep track of the open goals at each point. We +-- nondeterministically pick an open goal (via a goal choice node), create +-- subtrees according to the index and the available solutions, and extend the +-- set of open goals by superficially looking at the dependencies recorded in +-- the index. +-- +-- For each goal, we keep track of all the *reasons* why it is being +-- introduced. These are for debugging and error messages, mainly. A little bit +-- of care has to be taken due to the way we treat flags. If a package has +-- flag-guarded dependencies, we cannot introduce them immediately. Instead, we +-- store the entire dependency. + +import Data.List as L +import Data.Map as M +import Data.Set as S +import Prelude hiding (sequence, mapM) + +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Index +import Distribution.Solver.Modular.Package +import qualified Distribution.Solver.Modular.PSQ as P +import Distribution.Solver.Modular.Tree +import qualified Distribution.Solver.Modular.WeightedPSQ as W + +import Distribution.Solver.Types.ComponentDeps +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.Settings + +-- | All state needed to build and link the search tree. It has a type variable +-- because the linking phase doesn't need to know about the state used to build +-- the tree. +data Linker a = Linker { + buildState :: a, + linkingState :: LinkingState +} + +-- | The state needed to build the search tree without creating any linked nodes. +data BuildState = BS { + index :: Index, -- ^ information about packages and their dependencies + rdeps :: RevDepMap, -- ^ set of all package goals, completed and open, with reverse dependencies + open :: [OpenGoal], -- ^ set of still open goals (flag and package goals) + next :: BuildType, -- ^ kind of node to generate next + qualifyOptions :: QualifyOptions -- ^ qualification options +} + +-- | Map of available linking targets. +type LinkingState = Map (PN, I) [PackagePath] + +-- | Extend the set of open goals with the new goals listed. +-- +-- We also adjust the map of overall goals, and keep track of the +-- reverse dependencies of each of the goals. +extendOpen :: QPN -> [FlaggedDep QPN] -> BuildState -> BuildState +extendOpen qpn' gs s@(BS { rdeps = gs', open = o' }) = go gs' o' gs + where + go :: RevDepMap -> [OpenGoal] -> [FlaggedDep QPN] -> BuildState + go g o [] = s { rdeps = g, open = o } + go g o ((Flagged fn@(FN qpn _) fInfo t f) : ngs) = + go g (FlagGoal fn fInfo t f (flagGR qpn) : o) ngs + -- Note: for 'Flagged' goals, we always insert, so later additions win. + -- This is important, because in general, if a goal is inserted twice, + -- the later addition will have better dependency information. + go g o ((Stanza sn@(SN qpn _) t) : ngs) = + go g (StanzaGoal sn t (flagGR qpn) : o) ngs + go g o ((Simple (LDep dr (Dep (PkgComponent qpn _) _)) c) : ngs) + | qpn == qpn' = + -- We currently only add a self-dependency to the graph if it is + -- between a package and its setup script. The edge creates a cycle + -- and causes the solver to backtrack and choose a different + -- instance for the setup script. We may need to track other + -- self-dependencies once we implement component-based solving. + case c of + ComponentSetup -> go (M.adjust (addIfAbsent (ComponentSetup, qpn')) qpn g) o ngs + _ -> go g o ngs + | qpn `M.member` g = go (M.adjust (addIfAbsent (c, qpn')) qpn g) o ngs + | otherwise = go (M.insert qpn [(c, qpn')] g) (PkgGoal qpn (DependencyGoal dr) : o) ngs + -- code above is correct; insert/adjust have different arg order + go g o ((Simple (LDep _dr (Ext _ext )) _) : ngs) = go g o ngs + go g o ((Simple (LDep _dr (Lang _lang))_) : ngs) = go g o ngs + go g o ((Simple (LDep _dr (Pkg _pn _vr))_) : ngs) = go g o ngs + + addIfAbsent :: Eq a => a -> [a] -> [a] + addIfAbsent x xs = if x `elem` xs then xs else x : xs + + -- GoalReason for a flag or stanza. Each flag/stanza is introduced only by + -- its containing package. + flagGR :: qpn -> GoalReason qpn + flagGR qpn = DependencyGoal (DependencyReason qpn M.empty S.empty) + +-- | Given the current scope, qualify all the package names in the given set of +-- dependencies and then extend the set of open goals accordingly. +scopedExtendOpen :: QPN -> FlaggedDeps PN -> FlagInfo -> + BuildState -> BuildState +scopedExtendOpen qpn fdeps fdefs s = extendOpen qpn gs s + where + -- Qualify all package names + qfdeps = qualifyDeps (qualifyOptions s) qpn fdeps + -- Introduce all package flags + qfdefs = L.map (\ (fn, b) -> Flagged (FN qpn fn) b [] []) $ M.toList fdefs + -- Combine new package and flag goals + gs = qfdefs ++ qfdeps + -- NOTE: + -- + -- In the expression @qfdefs ++ qfdeps@ above, flags occur potentially + -- multiple times, both via the flag declaration and via dependencies. + +-- | Datatype that encodes what to build next +data BuildType = + Goals -- ^ build a goal choice node + | OneGoal OpenGoal -- ^ build a node for this goal + | Instance QPN PInfo -- ^ build a tree for a concrete instance + +build :: Linker BuildState -> Tree () QGoalReason +build = ana go + where + go :: Linker BuildState -> TreeF () QGoalReason (Linker BuildState) + go s = addLinking (linkingState s) $ addChildren (buildState s) + +addChildren :: BuildState -> TreeF () QGoalReason BuildState + +-- If we have a choice between many goals, we just record the choice in +-- the tree. We select each open goal in turn, and before we descend, remove +-- it from the queue of open goals. +addChildren bs@(BS { rdeps = rdm, open = gs, next = Goals }) + | L.null gs = DoneF rdm () + | otherwise = GoalChoiceF rdm $ P.fromList + $ L.map (\ (g, gs') -> (close g, bs { next = OneGoal g, open = gs' })) + $ splits gs + +-- If we have already picked a goal, then the choice depends on the kind +-- of goal. +-- +-- For a package, we look up the instances available in the global info, +-- and then handle each instance in turn. +addChildren bs@(BS { rdeps = rdm, index = idx, next = OneGoal (PkgGoal qpn@(Q _ pn) gr) }) = + -- If the package does not exist in the index, we construct an emty PChoiceF node for it + -- After all, we have no choices here. Alternatively, we could immediately construct + -- a Fail node here, but that would complicate the construction of conflict sets. + -- We will probably want to give this case special treatment when generating error + -- messages though. + case M.lookup pn idx of + Nothing -> PChoiceF qpn rdm gr (W.fromList []) + Just pis -> PChoiceF qpn rdm gr (W.fromList (L.map (\ (i, info) -> + ([], POption i Nothing, bs { next = Instance qpn info })) + (M.toList pis))) + -- TODO: data structure conversion is rather ugly here + +-- For a flag, we create only two subtrees, and we create them in the order +-- that is indicated by the flag default. +addChildren bs@(BS { rdeps = rdm, next = OneGoal (FlagGoal qfn@(FN qpn _) (FInfo b m w) t f gr) }) = + FChoiceF qfn rdm gr weak m b (W.fromList + [([if b then 0 else 1], True, (extendOpen qpn t bs) { next = Goals }), + ([if b then 1 else 0], False, (extendOpen qpn f bs) { next = Goals })]) + where + trivial = L.null t && L.null f + weak = WeakOrTrivial $ unWeakOrTrivial w || trivial + +-- For a stanza, we also create only two subtrees. The order is initially +-- False, True. This can be changed later by constraints (force enabling +-- the stanza by replacing the False branch with failure) or preferences +-- (try enabling the stanza if possible by moving the True branch first). + +addChildren bs@(BS { rdeps = rdm, next = OneGoal (StanzaGoal qsn@(SN qpn _) t gr) }) = + SChoiceF qsn rdm gr trivial (W.fromList + [([0], False, bs { next = Goals }), + ([1], True, (extendOpen qpn t bs) { next = Goals })]) + where + trivial = WeakOrTrivial (L.null t) + +-- For a particular instance, we change the state: we update the scope, +-- and furthermore we update the set of goals. +-- +-- TODO: We could inline this above. +addChildren bs@(BS { next = Instance qpn (PInfo fdeps _ fdefs _) }) = + addChildren ((scopedExtendOpen qpn fdeps fdefs bs) + { next = Goals }) + +{------------------------------------------------------------------------------- + Add linking +-------------------------------------------------------------------------------} + +-- | Introduce link nodes into the tree +-- +-- Linking is a phase that adapts package choice nodes and adds the option to +-- link wherever appropriate: Package goals are called "related" if they are for +-- the same instance of the same package (but have different prefixes). A link +-- option is available in a package choice node whenever we can choose an +-- instance that has already been chosen for a related goal at a higher position +-- in the tree. We only create link options for related goals that are not +-- themselves linked, because the choice to link to a linked goal is the same as +-- the choice to link to the target of that goal's linking. +-- +-- The code here proceeds by maintaining a finite map recording choices that +-- have been made at higher positions in the tree. For each pair of package name +-- and instance, it stores the prefixes at which we have made a choice for this +-- package instance. Whenever we make an unlinked choice, we extend the map. +-- Whenever we find a choice, we look into the map in order to find out what +-- link options we have to add. +-- +-- A separate tree traversal would be simpler. However, 'addLinking' creates +-- linked nodes from existing unlinked nodes, which leads to sharing between the +-- nodes. If we copied the nodes when they were full trees of type +-- 'Tree () QGoalReason', then the sharing would cause a space leak during +-- exploration of the tree. Instead, we only copy the 'BuildState', which is +-- relatively small, while the tree is being constructed. See +-- https://github.com/haskell/cabal/issues/2899 +addLinking :: LinkingState -> TreeF () c a -> TreeF () c (Linker a) +-- The only nodes of interest are package nodes +addLinking ls (PChoiceF qpn@(Q pp pn) rdm gr cs) = + let linkedCs = fmap (\bs -> Linker bs ls) $ + W.fromList $ concatMap (linkChoices ls qpn) (W.toList cs) + unlinkedCs = W.mapWithKey goP cs + allCs = unlinkedCs `W.union` linkedCs + + -- Recurse underneath package choices. Here we just need to make sure + -- that we record the package choice so that it is available below + goP :: POption -> a -> Linker a + goP (POption i Nothing) bs = Linker bs $ M.insertWith (++) (pn, i) [pp] ls + goP _ _ = alreadyLinked + in PChoiceF qpn rdm gr allCs +addLinking ls t = fmap (\bs -> Linker bs ls) t + +linkChoices :: forall a w . LinkingState + -> QPN + -> (w, POption, a) + -> [(w, POption, a)] +linkChoices related (Q _pp pn) (weight, POption i Nothing, subtree) = + L.map aux (M.findWithDefault [] (pn, i) related) + where + aux :: PackagePath -> (w, POption, a) + aux pp = (weight, POption i (Just pp), subtree) +linkChoices _ _ (_, POption _ (Just _), _) = + alreadyLinked + +alreadyLinked :: a +alreadyLinked = error "addLinking called on tree that already contains linked nodes" + +------------------------------------------------------------------------------- + +-- | Interface to the tree builder. Just takes an index and a list of package names, +-- and computes the initial state and then the tree from there. +buildTree :: Index -> IndependentGoals -> [PN] -> Tree () QGoalReason +buildTree idx (IndependentGoals ind) igs = + build Linker { + buildState = BS { + index = idx + , rdeps = M.fromList (L.map (\ qpn -> (qpn, [])) qpns) + , open = L.map topLevelGoal qpns + , next = Goals + , qualifyOptions = defaultQualifyOptions idx + } + , linkingState = M.empty + } + where + topLevelGoal qpn = PkgGoal qpn UserGoal + + qpns | ind = L.map makeIndependent igs + | otherwise = L.map (Q (PackagePath DefaultNamespace QualToplevel)) igs + +{------------------------------------------------------------------------------- + Goals +-------------------------------------------------------------------------------} + +-- | Information needed about a dependency before it is converted into a Goal. +data OpenGoal = + FlagGoal (FN QPN) FInfo (FlaggedDeps QPN) (FlaggedDeps QPN) QGoalReason + | StanzaGoal (SN QPN) (FlaggedDeps QPN) QGoalReason + | PkgGoal QPN QGoalReason + +-- | Closes a goal, i.e., removes all the extraneous information that we +-- need only during the build phase. +close :: OpenGoal -> Goal QPN +close (FlagGoal qfn _ _ _ gr) = Goal (F qfn) gr +close (StanzaGoal qsn _ gr) = Goal (S qsn) gr +close (PkgGoal qpn gr) = Goal (P qpn) gr + +{------------------------------------------------------------------------------- + Auxiliary +-------------------------------------------------------------------------------} + +-- | Pairs each element of a list with the list resulting from removal of that +-- element from the original list. +splits :: [a] -> [(a, [a])] +splits = go id + where + go :: ([a] -> [a]) -> [a] -> [(a, [a])] + go _ [] = [] + go f (x : xs) = (x, f xs) : go (f . (x :)) xs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/ConfiguredConversion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/ConfiguredConversion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/ConfiguredConversion.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/ConfiguredConversion.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,72 @@ +module Distribution.Solver.Modular.ConfiguredConversion + ( convCP + ) where + +import Data.Maybe +import Prelude hiding (pi) +import Data.Either (partitionEithers) + +import Distribution.Package (UnitId, packageId) + +import qualified Distribution.Simple.PackageIndex as SI + +import Distribution.Solver.Modular.Configured +import Distribution.Solver.Modular.Package + +import Distribution.Solver.Types.ComponentDeps (ComponentDeps) +import qualified Distribution.Solver.Types.PackageIndex as CI +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.ResolverPackage +import Distribution.Solver.Types.SolverId +import Distribution.Solver.Types.SolverPackage +import Distribution.Solver.Types.InstSolverPackage +import Distribution.Solver.Types.SourcePackage + +-- | Converts from the solver specific result @CP QPN@ into +-- a 'ResolverPackage', which can then be converted into +-- the install plan. +convCP :: SI.InstalledPackageIndex -> + CI.PackageIndex (SourcePackage loc) -> + CP QPN -> ResolverPackage loc +convCP iidx sidx (CP qpi fa es ds) = + case convPI qpi of + Left pi -> PreExisting $ + InstSolverPackage { + instSolverPkgIPI = fromJust $ SI.lookupUnitId iidx pi, + instSolverPkgLibDeps = fmap fst ds', + instSolverPkgExeDeps = fmap snd ds' + } + Right pi -> Configured $ + SolverPackage { + solverPkgSource = srcpkg, + solverPkgFlags = fa, + solverPkgStanzas = es, + solverPkgLibDeps = fmap fst ds', + solverPkgExeDeps = fmap snd ds' + } + where + Just srcpkg = CI.lookupPackageId sidx pi + where + ds' :: ComponentDeps ([SolverId] {- lib -}, [SolverId] {- exe -}) + ds' = fmap (partitionEithers . map convConfId) ds + +convPI :: PI QPN -> Either UnitId PackageId +convPI (PI _ (I _ (Inst pi))) = Left pi +convPI pi = Right (packageId (either id id (convConfId pi))) + +convConfId :: PI QPN -> Either SolverId {- is lib -} SolverId {- is exe -} +convConfId (PI (Q (PackagePath _ q) pn) (I v loc)) = + case loc of + Inst pi -> Left (PreExistingId sourceId pi) + _otherwise + | QualExe _ pn' <- q + -- NB: the dependencies of the executable are also + -- qualified. So the way to tell if this is an executable + -- dependency is to make sure the qualifier is pointing + -- at the actual thing. Fortunately for us, I was + -- silly and didn't allow arbitrarily nested build-tools + -- dependencies, so a shallow check works. + , pn == pn' -> Right (PlannedId sourceId) + | otherwise -> Left (PlannedId sourceId) + where + sourceId = PackageIdentifier pn v diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Configured.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Configured.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Configured.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Configured.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,13 @@ +module Distribution.Solver.Modular.Configured + ( CP(..) + ) where + +import Distribution.PackageDescription (FlagAssignment) + +import Distribution.Solver.Modular.Package +import Distribution.Solver.Types.ComponentDeps (ComponentDeps) +import Distribution.Solver.Types.OptionalStanza + +-- | A configured package is a package instance together with +-- a flag assignment and complete dependencies. +data CP qpn = CP (PI qpn) FlagAssignment [OptionalStanza] (ComponentDeps [PI qpn]) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/ConflictSet.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/ConflictSet.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/ConflictSet.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/ConflictSet.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,192 @@ +{-# LANGUAGE CPP #-} +#ifdef DEBUG_CONFLICT_SETS +{-# LANGUAGE ImplicitParams #-} +#endif +-- | Conflict sets +-- +-- Intended for double import +-- +-- > import Distribution.Solver.Modular.ConflictSet (ConflictSet) +-- > import qualified Distribution.Solver.Modular.ConflictSet as CS +module Distribution.Solver.Modular.ConflictSet ( + ConflictSet -- opaque + , ConflictMap +#ifdef DEBUG_CONFLICT_SETS + , conflictSetOrigin +#endif + , showConflictSet + , showCSSortedByFrequency + , showCSWithFrequency + -- Set-like operations + , toList + , union + , unions + , insert + , empty + , singleton + , member + , filter + , fromList + ) where + +import Prelude hiding (filter) +import Data.List (intercalate, sortBy) +import Data.Map (Map) +import Data.Set (Set) +import Data.Function (on) +import qualified Data.Set as S +import qualified Data.Map as M + +#ifdef DEBUG_CONFLICT_SETS +import Data.Tree +import GHC.Stack +#endif + +import Distribution.Solver.Modular.Var +import Distribution.Solver.Types.PackagePath + +-- | The set of variables involved in a solver conflict +-- +-- Since these variables should be preprocessed in some way, this type is +-- kept abstract. +data ConflictSet = CS { + -- | The set of variables involved on the conflict + conflictSetToSet :: !(Set (Var QPN)) + +#ifdef DEBUG_CONFLICT_SETS + -- | The origin of the conflict set + -- + -- When @DEBUG_CONFLICT_SETS@ is defined @(-f debug-conflict-sets)@, + -- we record the origin of every conflict set. For new conflict sets + -- ('empty', 'fromVars', ..) we just record the 'CallStack'; for operations + -- that construct new conflict sets from existing conflict sets ('union', + -- 'filter', ..) we record the 'CallStack' to the call to the combinator + -- as well as the 'CallStack's of the input conflict sets. + -- + -- Requires @GHC >= 7.10@. + , conflictSetOrigin :: Tree CallStack +#endif + } + deriving (Show) + +instance Eq ConflictSet where + (==) = (==) `on` conflictSetToSet + +instance Ord ConflictSet where + compare = compare `on` conflictSetToSet + +showConflictSet :: ConflictSet -> String +showConflictSet = intercalate ", " . map showVar . toList + +showCSSortedByFrequency :: ConflictMap -> ConflictSet -> String +showCSSortedByFrequency = showCS False + +showCSWithFrequency :: ConflictMap -> ConflictSet -> String +showCSWithFrequency = showCS True + +showCS :: Bool -> ConflictMap -> ConflictSet -> String +showCS showCount cm = + intercalate ", " . map showWithFrequency . indexByFrequency + where + indexByFrequency = sortBy (flip compare `on` snd) . map (\c -> (c, M.lookup c cm)) . toList + showWithFrequency (conflict, maybeFrequency) = case maybeFrequency of + Just frequency + | showCount -> showVar conflict ++ " (" ++ show frequency ++ ")" + _ -> showVar conflict + +{------------------------------------------------------------------------------- + Set-like operations +-------------------------------------------------------------------------------} + +toList :: ConflictSet -> [Var QPN] +toList = S.toList . conflictSetToSet + +union :: +#ifdef DEBUG_CONFLICT_SETS + (?loc :: CallStack) => +#endif + ConflictSet -> ConflictSet -> ConflictSet +union cs cs' = CS { + conflictSetToSet = S.union (conflictSetToSet cs) (conflictSetToSet cs') +#ifdef DEBUG_CONFLICT_SETS + , conflictSetOrigin = Node ?loc (map conflictSetOrigin [cs, cs']) +#endif + } + +unions :: +#ifdef DEBUG_CONFLICT_SETS + (?loc :: CallStack) => +#endif + [ConflictSet] -> ConflictSet +unions css = CS { + conflictSetToSet = S.unions (map conflictSetToSet css) +#ifdef DEBUG_CONFLICT_SETS + , conflictSetOrigin = Node ?loc (map conflictSetOrigin css) +#endif + } + +insert :: +#ifdef DEBUG_CONFLICT_SETS + (?loc :: CallStack) => +#endif + Var QPN -> ConflictSet -> ConflictSet +insert var cs = CS { + conflictSetToSet = S.insert var (conflictSetToSet cs) +#ifdef DEBUG_CONFLICT_SETS + , conflictSetOrigin = Node ?loc [conflictSetOrigin cs] +#endif + } + +empty :: +#ifdef DEBUG_CONFLICT_SETS + (?loc :: CallStack) => +#endif + ConflictSet +empty = CS { + conflictSetToSet = S.empty +#ifdef DEBUG_CONFLICT_SETS + , conflictSetOrigin = Node ?loc [] +#endif + } + +singleton :: +#ifdef DEBUG_CONFLICT_SETS + (?loc :: CallStack) => +#endif + Var QPN -> ConflictSet +singleton var = CS { + conflictSetToSet = S.singleton var +#ifdef DEBUG_CONFLICT_SETS + , conflictSetOrigin = Node ?loc [] +#endif + } + +member :: Var QPN -> ConflictSet -> Bool +member var = S.member var . conflictSetToSet + +filter :: +#ifdef DEBUG_CONFLICT_SETS + (?loc :: CallStack) => +#endif + (Var QPN -> Bool) -> ConflictSet -> ConflictSet +filter p cs = CS { + conflictSetToSet = S.filter p (conflictSetToSet cs) +#ifdef DEBUG_CONFLICT_SETS + , conflictSetOrigin = Node ?loc [conflictSetOrigin cs] +#endif + } + +fromList :: +#ifdef DEBUG_CONFLICT_SETS + (?loc :: CallStack) => +#endif + [Var QPN] -> ConflictSet +fromList vars = CS { + conflictSetToSet = S.fromList vars +#ifdef DEBUG_CONFLICT_SETS + , conflictSetOrigin = Node ?loc [] +#endif + } + +type ConflictMap = Map (Var QPN) Int + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Cycles.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Cycles.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Cycles.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Cycles.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,118 @@ +{-# LANGUAGE TypeFamilies #-} +module Distribution.Solver.Modular.Cycles ( + detectCyclesPhase + ) where + +import Prelude hiding (cycle) +import qualified Data.Map as M +import qualified Data.Set as S + +import qualified Distribution.Compat.Graph as G +import Distribution.Simple.Utils (ordNub) +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Tree +import qualified Distribution.Solver.Modular.ConflictSet as CS +import Distribution.Solver.Types.ComponentDeps (Component) +import Distribution.Solver.Types.PackagePath + +-- | Find and reject any nodes with cyclic dependencies +detectCyclesPhase :: Tree d c -> Tree d c +detectCyclesPhase = cata go + where + -- Only check children of choice nodes. + go :: TreeF d c (Tree d c) -> Tree d c + go (PChoiceF qpn rdm gr cs) = + PChoice qpn rdm gr $ fmap (checkChild qpn) cs + go (FChoiceF qfn@(FN qpn _) rdm gr w m d cs) = + FChoice qfn rdm gr w m d $ fmap (checkChild qpn) cs + go (SChoiceF qsn@(SN qpn _) rdm gr w cs) = + SChoice qsn rdm gr w $ fmap (checkChild qpn) cs + go x = inn x + + checkChild :: QPN -> Tree d c -> Tree d c + checkChild qpn x@(PChoice _ rdm _ _) = failIfCycle qpn rdm x + checkChild qpn x@(FChoice _ rdm _ _ _ _ _) = failIfCycle qpn rdm x + checkChild qpn x@(SChoice _ rdm _ _ _) = failIfCycle qpn rdm x + checkChild qpn x@(GoalChoice rdm _) = failIfCycle qpn rdm x + checkChild _ x@(Fail _ _) = x + checkChild qpn x@(Done rdm _) = failIfCycle qpn rdm x + + failIfCycle :: QPN -> RevDepMap -> Tree d c -> Tree d c + failIfCycle qpn rdm x = + case findCycles qpn rdm of + Nothing -> x + Just relSet -> Fail relSet CyclicDependencies + +-- | Given the reverse dependency map from a node in the tree, check +-- if the solution is cyclic. If it is, return the conflict set containing +-- all decisions that could potentially break the cycle. +-- +-- TODO: The conflict set should also contain flag and stanza variables. +findCycles :: QPN -> RevDepMap -> Maybe ConflictSet +findCycles pkg rdm = + -- This function has two parts: a faster cycle check that is called at every + -- step and a slower calculation of the conflict set. + -- + -- 'hasCycle' checks for cycles incrementally by only looking for cycles + -- containing the current package, 'pkg'. It searches for cycles in the + -- 'RevDepMap', which is the data structure used to store reverse + -- dependencies in the search tree. We store the reverse dependencies in a + -- map, because Data.Map is smaller and/or has better sharing than + -- Distribution.Compat.Graph. + -- + -- If there is a cycle, we call G.cycles to find a strongly connected + -- component. Then we choose one cycle from the component to use for the + -- conflict set. Choosing only one cycle can lead to a smaller conflict set, + -- such as when a choice to enable testing introduces many cycles at once. + -- In that case, all cycles contain the current package and are in one large + -- strongly connected component. + -- + if hasCycle + then let scc :: G.Graph RevDepMapNode + scc = case G.cycles $ revDepMapToGraph rdm of + [] -> findCyclesError "cannot find a strongly connected component" + c : _ -> G.fromDistinctList c + + next :: QPN -> QPN + next p = case G.neighbors scc p of + Just (n : _) -> G.nodeKey n + _ -> findCyclesError "cannot find next node in the cycle" + + -- This function also assumes that all cycles contain 'pkg'. + oneCycle :: [QPN] + oneCycle = case iterate next pkg of + [] -> findCyclesError "empty cycle" + x : xs -> x : takeWhile (/= x) xs + in Just $ CS.fromList $ map P oneCycle + else Nothing + where + hasCycle :: Bool + hasCycle = pkg `S.member` closure (neighbors pkg) + + closure :: [QPN] -> S.Set QPN + closure = foldl go S.empty + where + go :: S.Set QPN -> QPN -> S.Set QPN + go s x = + if x `S.member` s + then s + else foldl go (S.insert x s) $ neighbors x + + neighbors :: QPN -> [QPN] + neighbors x = case x `M.lookup` rdm of + Nothing -> findCyclesError "cannot find node" + Just xs -> map snd xs + + findCyclesError = error . ("Distribution.Solver.Modular.Cycles.findCycles: " ++) + +data RevDepMapNode = RevDepMapNode QPN [(Component, QPN)] + +instance G.IsNode RevDepMapNode where + type Key RevDepMapNode = QPN + nodeKey (RevDepMapNode qpn _) = qpn + nodeNeighbors (RevDepMapNode _ ns) = ordNub $ map snd ns + +revDepMapToGraph :: RevDepMap -> G.Graph RevDepMapNode +revDepMapToGraph rdm = G.fromDistinctList + [RevDepMapNode qpn ns | (qpn, ns) <- M.toList rdm] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Dependency.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Dependency.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Dependency.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Dependency.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,298 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE RecordWildCards #-} +module Distribution.Solver.Modular.Dependency ( + -- * Variables + Var(..) + , showVar + , varPN + -- * Conflict sets + , ConflictSet + , ConflictMap + , CS.showConflictSet + -- * Constrained instances + , CI(..) + -- * Flagged dependencies + , FlaggedDeps + , FlaggedDep(..) + , LDep(..) + , Dep(..) + , PkgComponent(..) + , ExposedComponent(..) + , DependencyReason(..) + , showDependencyReason + , flattenFlaggedDeps + , QualifyOptions(..) + , qualifyDeps + , unqualifyDeps + -- * Reverse dependency map + , RevDepMap + -- * Goals + , Goal(..) + , GoalReason(..) + , QGoalReason + , goalToVar + , varToConflictSet + , goalReasonToCS + , dependencyReasonToCS + ) where + +import Prelude () +import qualified Data.Map as M +import qualified Data.Set as S +import Distribution.Solver.Compat.Prelude hiding (pi) + +import Language.Haskell.Extension (Extension(..), Language(..)) + +import Distribution.Solver.Modular.ConflictSet (ConflictSet, ConflictMap) +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Package +import Distribution.Solver.Modular.Var +import Distribution.Solver.Modular.Version +import qualified Distribution.Solver.Modular.ConflictSet as CS + +import Distribution.Solver.Types.ComponentDeps (Component(..)) +import Distribution.Solver.Types.PackagePath +import Distribution.Types.UnqualComponentName + +{------------------------------------------------------------------------------- + Constrained instances +-------------------------------------------------------------------------------} + +-- | Constrained instance. It represents the allowed instances for a package, +-- which can be either a fixed instance or a version range. +data CI = Fixed I | Constrained VR + deriving (Eq, Show) + +{------------------------------------------------------------------------------- + Flagged dependencies +-------------------------------------------------------------------------------} + +-- | Flagged dependencies +-- +-- 'FlaggedDeps' is the modular solver's view of a packages dependencies: +-- rather than having the dependencies indexed by component, each dependency +-- defines what component it is in. +-- +-- Note that each dependency is associated with a Component. We must know what +-- component the dependencies belong to, or else we won't be able to construct +-- fine-grained reverse dependencies. +type FlaggedDeps qpn = [FlaggedDep qpn] + +-- | Flagged dependencies can either be plain dependency constraints, +-- or flag-dependent dependency trees. +data FlaggedDep qpn = + -- | Dependencies which are conditional on a flag choice. + Flagged (FN qpn) FInfo (TrueFlaggedDeps qpn) (FalseFlaggedDeps qpn) + -- | Dependencies which are conditional on whether or not a stanza + -- (e.g., a test suite or benchmark) is enabled. + | Stanza (SN qpn) (TrueFlaggedDeps qpn) + -- | Dependencies which are always enabled, for the component 'comp'. + | Simple (LDep qpn) Component + +-- | Conversatively flatten out flagged dependencies +-- +-- NOTE: We do not filter out duplicates. +flattenFlaggedDeps :: FlaggedDeps qpn -> [(LDep qpn, Component)] +flattenFlaggedDeps = concatMap aux + where + aux :: FlaggedDep qpn -> [(LDep qpn, Component)] + aux (Flagged _ _ t f) = flattenFlaggedDeps t ++ flattenFlaggedDeps f + aux (Stanza _ t) = flattenFlaggedDeps t + aux (Simple d c) = [(d, c)] + +type TrueFlaggedDeps qpn = FlaggedDeps qpn +type FalseFlaggedDeps qpn = FlaggedDeps qpn + +-- | A 'Dep' labeled with the reason it was introduced. +-- +-- 'LDep' intentionally has no 'Functor' instance because the type variable +-- is used both to record the dependencies as well as who's doing the +-- depending; having a 'Functor' instance makes bugs where we don't distinguish +-- these two far too likely. (By rights 'LDep' ought to have two type variables.) +data LDep qpn = LDep (DependencyReason qpn) (Dep qpn) + +-- | A dependency (constraint) associates a package name with a constrained +-- instance. It can also represent other types of dependencies, such as +-- dependencies on language extensions. +data Dep qpn = Dep (PkgComponent qpn) CI -- ^ dependency on a package component + | Ext Extension -- ^ dependency on a language extension + | Lang Language -- ^ dependency on a language version + | Pkg PkgconfigName VR -- ^ dependency on a pkg-config package + deriving Functor + +-- | An exposed component within a package. This type is used to represent +-- build-depends and build-tool-depends dependencies. +data PkgComponent qpn = PkgComponent qpn ExposedComponent + deriving (Eq, Ord, Functor, Show) + +-- | A component that can be depended upon by another package, i.e., a library +-- or an executable. +data ExposedComponent = ExposedLib | ExposedExe UnqualComponentName + deriving (Eq, Ord, Show) + +-- | The reason that a dependency is active. It identifies the package and any +-- flag and stanza choices that introduced the dependency. It contains +-- everything needed for creating ConflictSets or describing conflicts in solver +-- log messages. +data DependencyReason qpn = DependencyReason qpn (Map Flag FlagValue) (S.Set Stanza) + deriving (Functor, Eq, Show) + +-- | Print the reason that a dependency was introduced. +showDependencyReason :: DependencyReason QPN -> String +showDependencyReason (DependencyReason qpn flags stanzas) = + intercalate " " $ + showQPN qpn + : map (uncurry showFlagValue) (M.toList flags) + ++ map (\s -> showSBool s True) (S.toList stanzas) + +-- | Options for goal qualification (used in 'qualifyDeps') +-- +-- See also 'defaultQualifyOptions' +data QualifyOptions = QO { + -- | Do we have a version of base relying on another version of base? + qoBaseShim :: Bool + + -- Should dependencies of the setup script be treated as independent? + , qoSetupIndependent :: Bool + } + deriving Show + +-- | Apply built-in rules for package qualifiers +-- +-- Although the behaviour of 'qualifyDeps' depends on the 'QualifyOptions', +-- it is important that these 'QualifyOptions' are _static_. Qualification +-- does NOT depend on flag assignment; in other words, it behaves the same no +-- matter which choices the solver makes (modulo the global 'QualifyOptions'); +-- we rely on this in 'linkDeps' (see comment there). +-- +-- NOTE: It's the _dependencies_ of a package that may or may not be independent +-- from the package itself. Package flag choices must of course be consistent. +qualifyDeps :: QualifyOptions -> QPN -> FlaggedDeps PN -> FlaggedDeps QPN +qualifyDeps QO{..} (Q pp@(PackagePath ns q) pn) = go + where + go :: FlaggedDeps PN -> FlaggedDeps QPN + go = map go1 + + go1 :: FlaggedDep PN -> FlaggedDep QPN + go1 (Flagged fn nfo t f) = Flagged (fmap (Q pp) fn) nfo (go t) (go f) + go1 (Stanza sn t) = Stanza (fmap (Q pp) sn) (go t) + go1 (Simple dep comp) = Simple (goLDep dep comp) comp + + -- Suppose package B has a setup dependency on package A. + -- This will be recorded as something like + -- + -- > LDep (DependencyReason "B") (Dep (PkgComponent "A" ExposedLib) (Constrained AnyVersion)) + -- + -- Observe that when we qualify this dependency, we need to turn that + -- @"A"@ into @"B-setup.A"@, but we should not apply that same qualifier + -- to the DependencyReason. + goLDep :: LDep PN -> Component -> LDep QPN + goLDep (LDep dr dep) comp = LDep (fmap (Q pp) dr) (goD dep comp) + + goD :: Dep PN -> Component -> Dep QPN + goD (Ext ext) _ = Ext ext + goD (Lang lang) _ = Lang lang + goD (Pkg pkn vr) _ = Pkg pkn vr + goD (Dep dep@(PkgComponent qpn (ExposedExe _)) ci) _ = + Dep (Q (PackagePath ns (QualExe pn qpn)) <$> dep) ci + goD (Dep dep@(PkgComponent qpn ExposedLib) ci) comp + | qBase qpn = Dep (Q (PackagePath ns (QualBase pn)) <$> dep) ci + | qSetup comp = Dep (Q (PackagePath ns (QualSetup pn)) <$> dep) ci + | otherwise = Dep (Q (PackagePath ns inheritedQ ) <$> dep) ci + + -- If P has a setup dependency on Q, and Q has a regular dependency on R, then + -- we say that the 'Setup' qualifier is inherited: P has an (indirect) setup + -- dependency on R. We do not do this for the base qualifier however. + -- + -- The inherited qualifier is only used for regular dependencies; for setup + -- and base deppendencies we override the existing qualifier. See #3160 for + -- a detailed discussion. + inheritedQ :: Qualifier + inheritedQ = case q of + QualSetup _ -> q + QualExe _ _ -> q + QualToplevel -> q + QualBase _ -> QualToplevel + + -- Should we qualify this goal with the 'Base' package path? + qBase :: PN -> Bool + qBase dep = qoBaseShim && unPackageName dep == "base" + + -- Should we qualify this goal with the 'Setup' package path? + qSetup :: Component -> Bool + qSetup comp = qoSetupIndependent && comp == ComponentSetup + +-- | Remove qualifiers from set of dependencies +-- +-- This is used during link validation: when we link package @Q.A@ to @Q'.A@, +-- then all dependencies @Q.B@ need to be linked to @Q'.B@. In order to compute +-- what to link these dependencies to, we need to requalify @Q.B@ to become +-- @Q'.B@; we do this by first removing all qualifiers and then calling +-- 'qualifyDeps' again. +unqualifyDeps :: FlaggedDeps QPN -> FlaggedDeps PN +unqualifyDeps = go + where + go :: FlaggedDeps QPN -> FlaggedDeps PN + go = map go1 + + go1 :: FlaggedDep QPN -> FlaggedDep PN + go1 (Flagged fn nfo t f) = Flagged (fmap unq fn) nfo (go t) (go f) + go1 (Stanza sn t) = Stanza (fmap unq sn) (go t) + go1 (Simple dep comp) = Simple (goLDep dep) comp + + goLDep :: LDep QPN -> LDep PN + goLDep (LDep dr dep) = LDep (fmap unq dr) (fmap unq dep) + + unq :: QPN -> PN + unq (Q _ pn) = pn + +{------------------------------------------------------------------------------- + Reverse dependency map +-------------------------------------------------------------------------------} + +-- | A map containing reverse dependencies between qualified +-- package names. +type RevDepMap = Map QPN [(Component, QPN)] + +{------------------------------------------------------------------------------- + Goals +-------------------------------------------------------------------------------} + +-- | A goal is just a solver variable paired with a reason. +-- The reason is only used for tracing. +data Goal qpn = Goal (Var qpn) (GoalReason qpn) + deriving (Eq, Show, Functor) + +-- | Reason why a goal is being added to a goal set. +data GoalReason qpn = + UserGoal -- introduced by a build target + | DependencyGoal (DependencyReason qpn) -- introduced by a package + deriving (Eq, Show, Functor) + +type QGoalReason = GoalReason QPN + +goalToVar :: Goal a -> Var a +goalToVar (Goal v _) = v + +-- | Compute a singleton conflict set from a 'Var' +varToConflictSet :: Var QPN -> ConflictSet +varToConflictSet = CS.singleton + +goalReasonToCS :: GoalReason QPN -> ConflictSet +goalReasonToCS UserGoal = CS.empty +goalReasonToCS (DependencyGoal dr) = dependencyReasonToCS dr + +-- | This function returns the solver variables responsible for the dependency. +-- It drops the flag and stanza values, which are only needed for log messages. +dependencyReasonToCS :: DependencyReason QPN -> ConflictSet +dependencyReasonToCS (DependencyReason qpn flags stanzas) = + CS.fromList $ P qpn : flagVars ++ map stanzaToVar (S.toList stanzas) + where + -- Filter out any flags that introduced the dependency with both values. + -- They don't need to be included in the conflict set, because changing the + -- flag value can't remove the dependency. + flagVars :: [Var QPN] + flagVars = [F (FN qpn fn) | (fn, fv) <- M.toList flags, fv /= FlagBoth] + + stanzaToVar :: Stanza -> Var QPN + stanzaToVar = S . SN qpn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Explore.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Explore.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Explore.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Explore.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,220 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE ScopedTypeVariables #-} +module Distribution.Solver.Modular.Explore + ( backjump + , backjumpAndExplore + ) where + +import qualified Distribution.Solver.Types.Progress as P + +import Data.Foldable as F +import Data.List as L (foldl') +import Data.Map.Strict as M + +import Distribution.Solver.Modular.Assignment +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Log +import Distribution.Solver.Modular.Message +import qualified Distribution.Solver.Modular.PSQ as P +import qualified Distribution.Solver.Modular.ConflictSet as CS +import Distribution.Solver.Modular.RetryLog +import Distribution.Solver.Modular.Tree +import qualified Distribution.Solver.Modular.WeightedPSQ as W +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.Settings (EnableBackjumping(..), CountConflicts(..)) + +-- | This function takes the variable we're currently considering, a +-- last conflict set and a list of children's logs. Each log yields +-- either a solution or a conflict set. The result is a combined log for +-- the parent node that has explored a prefix of the children. +-- +-- We can stop traversing the children's logs if we find an individual +-- conflict set that does not contain the current variable. In this +-- case, we can just lift the conflict set to the current level, +-- because the current level cannot possibly have contributed to this +-- conflict, so no other choice at the current level would avoid the +-- conflict. +-- +-- If any of the children might contain a successful solution, we can +-- return it immediately. If all children contain conflict sets, we can +-- take the union as the combined conflict set. +-- +-- The last conflict set corresponds to the justification that we +-- have to choose this goal at all. There is a reason why we have +-- introduced the goal in the first place, and this reason is in conflict +-- with the (virtual) option not to choose anything for the current +-- variable. See also the comments for 'avoidSet'. +-- +backjump :: Maybe Int -> EnableBackjumping -> Var QPN + -> ConflictSet -> W.WeightedPSQ w k (ExploreState -> ConflictSetLog a) + -> ExploreState -> ConflictSetLog a +backjump mbj (EnableBackjumping enableBj) var lastCS xs = + F.foldr combine avoidGoal xs CS.empty + where + combine :: forall a . (ExploreState -> ConflictSetLog a) + -> (ConflictSet -> ExploreState -> ConflictSetLog a) + -> ConflictSet -> ExploreState -> ConflictSetLog a + combine x f csAcc es = retry (x es) next + where + next :: IntermediateFailure -> ConflictSetLog a + next BackjumpLimit = fromProgress (P.Fail BackjumpLimit) + next (NoSolution !cs es') + | enableBj && not (var `CS.member` cs) = skipLoggingBackjump cs es' + | otherwise = f (csAcc `CS.union` cs) es' + + -- This function represents the option to not choose a value for this goal. + avoidGoal :: ConflictSet -> ExploreState -> ConflictSetLog a + avoidGoal cs !es = + logBackjump (cs `CS.union` lastCS) $ + + -- Use 'lastCS' below instead of 'cs' since we do not want to + -- double-count the additionally accumulated conflicts. + es { esConflictMap = updateCM lastCS (esConflictMap es) } + + logBackjump :: ConflictSet -> ExploreState -> ConflictSetLog a + logBackjump cs es = + failWith (Failure cs Backjump) $ + if reachedBjLimit (esBackjumps es) + then BackjumpLimit + else NoSolution cs es { esBackjumps = esBackjumps es + 1 } + where + reachedBjLimit = case mbj of + Nothing -> const False + Just limit -> (== limit) + + -- The solver does not count or log backjumps at levels where the conflict + -- set does not contain the current variable. Otherwise, there would be many + -- consecutive log messages about backjumping with the same conflict set. + skipLoggingBackjump :: ConflictSet -> ExploreState -> ConflictSetLog a + skipLoggingBackjump cs es = fromProgress $ P.Fail (NoSolution cs es) + +-- | The state that is read and written while exploring the search tree. +data ExploreState = ES { + esConflictMap :: !ConflictMap + , esBackjumps :: !Int + } + +data IntermediateFailure = + NoSolution ConflictSet ExploreState + | BackjumpLimit + +type ConflictSetLog = RetryLog Message IntermediateFailure + +getBestGoal :: ConflictMap -> P.PSQ (Goal QPN) a -> (Goal QPN, a) +getBestGoal cm = + P.maximumBy + ( flip (M.findWithDefault 0) cm + . (\ (Goal v _) -> v) + ) + +getFirstGoal :: P.PSQ (Goal QPN) a -> (Goal QPN, a) +getFirstGoal ts = + P.casePSQ ts + (error "getFirstGoal: empty goal choice") -- empty goal choice is an internal error + (\ k v _xs -> (k, v)) -- commit to the first goal choice + +updateCM :: ConflictSet -> ConflictMap -> ConflictMap +updateCM cs cm = + L.foldl' (\ cmc k -> M.insertWith (+) k 1 cmc) cm (CS.toList cs) + +-- | Record complete assignments on 'Done' nodes. +assign :: Tree d c -> Tree Assignment c +assign tree = cata go tree $ A M.empty M.empty M.empty + where + go :: TreeF d c (Assignment -> Tree Assignment c) + -> (Assignment -> Tree Assignment c) + go (FailF c fr) _ = Fail c fr + go (DoneF rdm _) a = Done rdm a + go (PChoiceF qpn rdm y ts) (A pa fa sa) = PChoice qpn rdm y $ W.mapWithKey f ts + where f (POption k _) r = r (A (M.insert qpn k pa) fa sa) + go (FChoiceF qfn rdm y t m d ts) (A pa fa sa) = FChoice qfn rdm y t m d $ W.mapWithKey f ts + where f k r = r (A pa (M.insert qfn k fa) sa) + go (SChoiceF qsn rdm y t ts) (A pa fa sa) = SChoice qsn rdm y t $ W.mapWithKey f ts + where f k r = r (A pa fa (M.insert qsn k sa)) + go (GoalChoiceF rdm ts) a = GoalChoice rdm $ fmap ($ a) ts + +-- | A tree traversal that simultaneously propagates conflict sets up +-- the tree from the leaves and creates a log. +exploreLog :: Maybe Int -> EnableBackjumping -> CountConflicts + -> Tree Assignment QGoalReason + -> ConflictSetLog (Assignment, RevDepMap) +exploreLog mbj enableBj (CountConflicts countConflicts) t = cata go t initES + where + getBestGoal' :: P.PSQ (Goal QPN) a -> ConflictMap -> (Goal QPN, a) + getBestGoal' + | countConflicts = \ ts cm -> getBestGoal cm ts + | otherwise = \ ts _ -> getFirstGoal ts + + go :: TreeF Assignment QGoalReason (ExploreState -> ConflictSetLog (Assignment, RevDepMap)) + -> (ExploreState -> ConflictSetLog (Assignment, RevDepMap)) + go (FailF c fr) = \ !es -> + let es' = es { esConflictMap = updateCM c (esConflictMap es) } + in failWith (Failure c fr) (NoSolution c es') + go (DoneF rdm a) = \ _ -> succeedWith Success (a, rdm) + go (PChoiceF qpn _ gr ts) = + backjump mbj enableBj (P qpn) (avoidSet (P qpn) gr) $ -- try children in order, + W.mapWithKey -- when descending ... + (\ k r es -> tryWith (TryP qpn k) (r es)) + ts + go (FChoiceF qfn _ gr _ _ _ ts) = + backjump mbj enableBj (F qfn) (avoidSet (F qfn) gr) $ -- try children in order, + W.mapWithKey -- when descending ... + (\ k r es -> tryWith (TryF qfn k) (r es)) + ts + go (SChoiceF qsn _ gr _ ts) = + backjump mbj enableBj (S qsn) (avoidSet (S qsn) gr) $ -- try children in order, + W.mapWithKey -- when descending ... + (\ k r es -> tryWith (TryS qsn k) (r es)) + ts + go (GoalChoiceF _ ts) = \ es -> + let (k, v) = getBestGoal' ts (esConflictMap es) + in continueWith (Next k) (v es) + + initES = ES { + esConflictMap = M.empty + , esBackjumps = 0 + } + +-- | Build a conflict set corresponding to the (virtual) option not to +-- choose a solution for a goal at all. +-- +-- In the solver, the set of goals is not statically determined, but depends +-- on the choices we make. Therefore, when dealing with conflict sets, we +-- always have to consider that we could perhaps make choices that would +-- avoid the existence of the goal completely. +-- +-- Whenever we actually introduce a choice in the tree, we have already established +-- that the goal cannot be avoided. This is tracked in the "goal reason". +-- The choice to avoid the goal therefore is a conflict between the goal itself +-- and its goal reason. We build this set here, and pass it to the 'backjump' +-- function as the last conflict set. +-- +-- This has two effects: +-- +-- - In a situation where there are no choices available at all (this happens +-- if an unknown package is requested), the last conflict set becomes the +-- actual conflict set. +-- +-- - In a situation where all of the children's conflict sets contain the +-- current variable, the goal reason of the current node will be added to the +-- conflict set. +-- +avoidSet :: Var QPN -> QGoalReason -> ConflictSet +avoidSet var gr = + CS.union (CS.singleton var) (goalReasonToCS gr) + +-- | Interface. +-- +-- Takes as an argument a limit on allowed backjumps. If the limit is 'Nothing', +-- then infinitely many backjumps are allowed. If the limit is 'Just 0', +-- backtracking is completely disabled. +backjumpAndExplore :: Maybe Int + -> EnableBackjumping + -> CountConflicts + -> Tree d QGoalReason + -> RetryLog Message SolverFailure (Assignment, RevDepMap) +backjumpAndExplore mbj enableBj countConflicts = + mapFailure convertFailure . exploreLog mbj enableBj countConflicts . assign + where + convertFailure (NoSolution cs es) = ExhaustiveSearch cs (esConflictMap es) + convertFailure BackjumpLimit = BackjumpLimitReached diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Flag.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Flag.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Flag.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Flag.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,108 @@ +{-# LANGUAGE DeriveFunctor #-} +module Distribution.Solver.Modular.Flag + ( FInfo(..) + , Flag + , FlagInfo + , FN(..) + , QFN + , QSN + , Stanza + , SN(..) + , WeakOrTrivial(..) + , FlagValue(..) + , mkFlag + , showQFN + , showQFNBool + , showFlagValue + , showQSN + , showQSNBool + , showSBool + ) where + +import Data.Map as M +import Prelude hiding (pi) + +import qualified Distribution.PackageDescription as P -- from Cabal + +import Distribution.Solver.Types.Flag +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackagePath + +-- | Flag name. Consists of a package instance and the flag identifier itself. +data FN qpn = FN qpn Flag + deriving (Eq, Ord, Show, Functor) + +-- | Flag identifier. Just a string. +type Flag = P.FlagName + +-- | Stanza identifier. +type Stanza = OptionalStanza + +unFlag :: Flag -> String +unFlag = P.unFlagName + +mkFlag :: String -> Flag +mkFlag = P.mkFlagName + +-- | Flag info. Default value, whether the flag is manual, and +-- whether the flag is weak. Manual flags can only be set explicitly. +-- Weak flags are typically deferred by the solver. +data FInfo = FInfo { fdefault :: Bool, fmanual :: FlagType, fweak :: WeakOrTrivial } + deriving (Eq, Show) + +-- | Flag defaults. +type FlagInfo = Map Flag FInfo + +-- | Qualified flag name. +type QFN = FN QPN + +-- | Stanza name. Paired with a package name, much like a flag. +data SN qpn = SN qpn Stanza + deriving (Eq, Ord, Show, Functor) + +-- | Qualified stanza name. +type QSN = SN QPN + +-- | A property of flag and stanza choices that determines whether the +-- choice should be deferred in the solving process. +-- +-- A choice is called weak if we do want to defer it. This is the +-- case for flags that should be implied by what's currently installed on +-- the system, as opposed to flags that are used to explicitly enable or +-- disable some functionality. +-- +-- A choice is called trivial if it clearly does not matter. The +-- special case of triviality we actually consider is if there are no new +-- dependencies introduced by the choice. +newtype WeakOrTrivial = WeakOrTrivial { unWeakOrTrivial :: Bool } + deriving (Eq, Ord, Show) + +-- | Value shown for a flag in a solver log message. The message can refer to +-- only the true choice, only the false choice, or both choices. +data FlagValue = FlagTrue | FlagFalse | FlagBoth + deriving (Eq, Show) + +showQFNBool :: QFN -> Bool -> String +showQFNBool qfn@(FN qpn _f) b = showQPN qpn ++ ":" ++ showFBool qfn b + +showQSNBool :: QSN -> Bool -> String +showQSNBool (SN qpn s) b = showQPN qpn ++ ":" ++ showSBool s b + +showFBool :: FN qpn -> Bool -> String +showFBool (FN _ f) v = P.showFlagValue (f, v) + +-- | String representation of a flag-value pair. +showFlagValue :: P.FlagName -> FlagValue -> String +showFlagValue f FlagTrue = '+' : unFlag f +showFlagValue f FlagFalse = '-' : unFlag f +showFlagValue f FlagBoth = "+/-" ++ unFlag f + +showSBool :: Stanza -> Bool -> String +showSBool s True = "*" ++ showStanza s +showSBool s False = "!" ++ showStanza s + +showQFN :: QFN -> String +showQFN (FN qpn f) = showQPN qpn ++ ":" ++ unFlag f + +showQSN :: QSN -> String +showQSN (SN qpn s) = showQPN qpn ++ ":" ++ showStanza s diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/IndexConversion.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/IndexConversion.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/IndexConversion.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/IndexConversion.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,575 @@ +module Distribution.Solver.Modular.IndexConversion + ( convPIs + ) where + +import Data.List as L +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as M +import Data.Maybe +import Data.Monoid as Mon +import Data.Set as S + +import Distribution.Compiler +import Distribution.InstalledPackageInfo as IPI +import Distribution.Package -- from Cabal +import Distribution.Simple.BuildToolDepends -- from Cabal +import Distribution.Simple.Utils (cabalVersion) -- from Cabal +import Distribution.Types.ExeDependency -- from Cabal +import Distribution.Types.PkgconfigDependency -- from Cabal +import Distribution.Types.ComponentName -- from Cabal +import Distribution.Types.UnqualComponentName -- from Cabal +import Distribution.Types.CondTree -- from Cabal +import Distribution.Types.MungedPackageId -- from Cabal +import Distribution.Types.MungedPackageName -- from Cabal +import Distribution.PackageDescription as PD -- from Cabal +import Distribution.PackageDescription.Configuration as PDC +import qualified Distribution.Simple.PackageIndex as SI +import Distribution.System +import Distribution.Types.ForeignLib + +import Distribution.Solver.Types.ComponentDeps + ( Component(..), componentNameToComponent ) +import Distribution.Solver.Types.Flag +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackageConstraint +import qualified Distribution.Solver.Types.PackageIndex as CI +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.SourcePackage + +import Distribution.Solver.Modular.Dependency as D +import Distribution.Solver.Modular.Flag as F +import Distribution.Solver.Modular.Index +import Distribution.Solver.Modular.Package +import Distribution.Solver.Modular.Tree +import Distribution.Solver.Modular.Version + +-- | Convert both the installed package index and the source package +-- index into one uniform solver index. +-- +-- We use 'allPackagesBySourcePackageId' for the installed package index +-- because that returns us several instances of the same package and version +-- in order of preference. This allows us in principle to \"shadow\" +-- packages if there are several installed packages of the same version. +-- There are currently some shortcomings in both GHC and Cabal in +-- resolving these situations. However, the right thing to do is to +-- fix the problem there, so for now, shadowing is only activated if +-- explicitly requested. +convPIs :: OS -> Arch -> CompilerInfo -> Map PN [LabeledPackageConstraint] + -> ShadowPkgs -> StrongFlags -> SolveExecutables + -> SI.InstalledPackageIndex -> CI.PackageIndex (SourcePackage loc) + -> Index +convPIs os arch comp constraints sip strfl solveExes iidx sidx = + mkIndex $ + convIPI' sip iidx ++ convSPI' os arch comp constraints strfl solveExes sidx + +-- | Convert a Cabal installed package index to the simpler, +-- more uniform index format of the solver. +convIPI' :: ShadowPkgs -> SI.InstalledPackageIndex -> [(PN, I, PInfo)] +convIPI' (ShadowPkgs sip) idx = + -- apply shadowing whenever there are multiple installed packages with + -- the same version + [ maybeShadow (convIP idx pkg) + -- IMPORTANT to get internal libraries. See + -- Note [Index conversion with internal libraries] + | (_, pkgs) <- SI.allPackagesBySourcePackageIdAndLibName idx + , (maybeShadow, pkg) <- zip (id : repeat shadow) pkgs ] + where + + -- shadowing is recorded in the package info + shadow (pn, i, PInfo fdeps comps fds _) + | sip = (pn, i, PInfo fdeps comps fds (Just Shadowed)) + shadow x = x + +-- | Extract/recover the the package ID from an installed package info, and convert it to a solver's I. +convId :: InstalledPackageInfo -> (PN, I) +convId ipi = (pn, I ver $ Inst $ IPI.installedUnitId ipi) + where MungedPackageId mpn ver = mungedId ipi + -- HACK. See Note [Index conversion with internal libraries] + pn = mkPackageName (unMungedPackageName mpn) + +-- | Convert a single installed package into the solver-specific format. +convIP :: SI.InstalledPackageIndex -> InstalledPackageInfo -> (PN, I, PInfo) +convIP idx ipi = + case mapM (convIPId (DependencyReason pn M.empty S.empty) comp idx) (IPI.depends ipi) of + Nothing -> (pn, i, PInfo [] M.empty M.empty (Just Broken)) + Just fds -> ( pn + , i + , PInfo fds (M.singleton ExposedLib (IsBuildable True)) M.empty Nothing) + where + (pn, i) = convId ipi + -- 'sourceLibName' is unreliable, but for now we only really use this for + -- primary libs anyways + comp = componentNameToComponent $ libraryComponentName $ sourceLibName ipi +-- TODO: Installed packages should also store their encapsulations! + +-- Note [Index conversion with internal libraries] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Something very interesting happens when we have internal libraries +-- in our index. In this case, we maybe have p-0.1, which itself +-- depends on the internal library p-internal ALSO from p-0.1. +-- Here's the danger: +-- +-- - If we treat both of these packages as having PN "p", +-- then the solver will try to pick one or the other, +-- but never both. +-- +-- - If we drop the internal packages, now p-0.1 has a +-- dangling dependency on an "installed" package we know +-- nothing about. Oops. +-- +-- An expedient hack is to put p-internal into cabal-install's +-- index as a MUNGED package name, so that it doesn't conflict +-- with anyone else (except other instances of itself). But +-- yet, we ought NOT to say that PNs in the solver are munged +-- package names, because they're not; for source packages, +-- we really will never see munged package names. +-- +-- The tension here is that the installed package index is actually +-- per library, but the solver is per package. We need to smooth +-- it over, and munging the package names is a pretty good way to +-- do it. + +-- | Convert dependencies specified by an installed package id into +-- flagged dependencies of the solver. +-- +-- May return Nothing if the package can't be found in the index. That +-- indicates that the original package having this dependency is broken +-- and should be ignored. +convIPId :: DependencyReason PN -> Component -> SI.InstalledPackageIndex -> UnitId -> Maybe (FlaggedDep PN) +convIPId dr comp idx ipid = + case SI.lookupUnitId idx ipid of + Nothing -> Nothing + Just ipi -> let (pn, i) = convId ipi + in Just (D.Simple (LDep dr (Dep (PkgComponent pn ExposedLib) (Fixed i))) comp) + -- NB: something we pick up from the + -- InstalledPackageIndex is NEVER an executable + +-- | Convert a cabal-install source package index to the simpler, +-- more uniform index format of the solver. +convSPI' :: OS -> Arch -> CompilerInfo -> Map PN [LabeledPackageConstraint] + -> StrongFlags -> SolveExecutables + -> CI.PackageIndex (SourcePackage loc) -> [(PN, I, PInfo)] +convSPI' os arch cinfo constraints strfl solveExes = + L.map (convSP os arch cinfo constraints strfl solveExes) . CI.allPackages + +-- | Convert a single source package into the solver-specific format. +convSP :: OS -> Arch -> CompilerInfo -> Map PN [LabeledPackageConstraint] + -> StrongFlags -> SolveExecutables -> SourcePackage loc -> (PN, I, PInfo) +convSP os arch cinfo constraints strfl solveExes (SourcePackage (PackageIdentifier pn pv) gpd _ _pl) = + let i = I pv InRepo + pkgConstraints = fromMaybe [] $ M.lookup pn constraints + in (pn, i, convGPD os arch cinfo pkgConstraints strfl solveExes pn gpd) + +-- We do not use 'flattenPackageDescription' or 'finalizePD' +-- from 'Distribution.PackageDescription.Configuration' here, because we +-- want to keep the condition tree, but simplify much of the test. + +-- | Convert a generic package description to a solver-specific 'PInfo'. +convGPD :: OS -> Arch -> CompilerInfo -> [LabeledPackageConstraint] + -> StrongFlags -> SolveExecutables -> PN -> GenericPackageDescription + -> PInfo +convGPD os arch cinfo constraints strfl solveExes pn + (GenericPackageDescription pkg flags mlib sub_libs flibs exes tests benchs) = + let + fds = flagInfo strfl flags + + -- | We have to be careful to filter out dependencies on + -- internal libraries, since they don't refer to real packages + -- and thus cannot actually be solved over. We'll do this + -- by creating a set of package names which are "internal" + -- and dropping them as we convert. + + ipns = S.fromList $ [ unqualComponentNameToPackageName nm + | (nm, _) <- sub_libs ] + + conv :: Mon.Monoid a => Component -> (a -> BuildInfo) -> DependencyReason PN -> + CondTree ConfVar [Dependency] a -> FlaggedDeps PN + conv comp getInfo dr = + convCondTree M.empty dr pkg os arch cinfo pn fds comp getInfo ipns solveExes . + PDC.addBuildableCondition getInfo + + initDR = DependencyReason pn M.empty S.empty + + flagged_deps + = concatMap (\ds -> conv ComponentLib libBuildInfo initDR ds) (maybeToList mlib) + ++ concatMap (\(nm, ds) -> conv (ComponentSubLib nm) libBuildInfo initDR ds) sub_libs + ++ concatMap (\(nm, ds) -> conv (ComponentFLib nm) foreignLibBuildInfo initDR ds) flibs + ++ concatMap (\(nm, ds) -> conv (ComponentExe nm) buildInfo initDR ds) exes + ++ prefix (Stanza (SN pn TestStanzas)) + (L.map (\(nm, ds) -> conv (ComponentTest nm) testBuildInfo (addStanza TestStanzas initDR) ds) + tests) + ++ prefix (Stanza (SN pn BenchStanzas)) + (L.map (\(nm, ds) -> conv (ComponentBench nm) benchmarkBuildInfo (addStanza BenchStanzas initDR) ds) + benchs) + ++ maybe [] (convSetupBuildInfo pn) (setupBuildInfo pkg) + + addStanza :: Stanza -> DependencyReason pn -> DependencyReason pn + addStanza s (DependencyReason pn' fs ss) = DependencyReason pn' fs (S.insert s ss) + + -- | We infer the maximally supported spec-version from @lib:Cabal@'s version + -- + -- As we cannot predict the future, we can only properly support + -- spec-versions predating (and including) the @lib:Cabal@ version + -- used by @cabal-install@. + -- + -- This relies on 'cabalVersion' having always at least 3 components to avoid + -- comparisons like @2.0.0 > 2.0@ which would result in confusing results. + -- + -- NOTE: Before we can switch to a /normalised/ spec-version + -- comparison (e.g. by truncating to 3 components, and removing + -- trailing zeroes) we'd have to make sure all other places where + -- the spec-version is compared against a bound do it + -- consistently. + maxSpecVer = cabalVersion + + -- | Required/declared spec-version of the package + -- + -- We don't truncate patch-levels, as specifying a patch-level + -- spec-version is discouraged and not supported anymore starting + -- with spec-version 2.2. + reqSpecVer = specVersion pkg + + -- | A too-new specVersion is turned into a global 'FailReason' + -- which prevents the solver from selecting this release (and if + -- forced to, emit a meaningful solver error message). + fr | reqSpecVer > maxSpecVer = Just (UnsupportedSpecVer reqSpecVer) + | otherwise = Nothing + + components :: Map ExposedComponent IsBuildable + components = M.fromList $ libComps ++ exeComps + where + libComps = [ (ExposedLib, IsBuildable $ isBuildable libBuildInfo lib) + | lib <- maybeToList mlib ] + exeComps = [ (ExposedExe name, IsBuildable $ isBuildable buildInfo exe) + | (name, exe) <- exes ] + isBuildable = isBuildableComponent os arch cinfo constraints + + in PInfo flagged_deps components fds fr + +-- | Returns true if the component is buildable in the given environment. +-- This function can give false-positives. For example, it only considers flags +-- that are set by unqualified flag constraints, and it doesn't check whether +-- the intra-package dependencies of a component are buildable. It is also +-- possible for the solver to later assign a value to an automatic flag that +-- makes the component unbuildable. +isBuildableComponent :: OS + -> Arch + -> CompilerInfo + -> [LabeledPackageConstraint] + -> (a -> BuildInfo) + -> CondTree ConfVar [Dependency] a + -> Bool +isBuildableComponent os arch cinfo constraints getInfo tree = + case simplifyCondition $ extractCondition (buildable . getInfo) tree of + Lit False -> False + _ -> True + where + flagAssignment :: [(FlagName, Bool)] + flagAssignment = + mconcat [ unFlagAssignment fa + | PackageConstraint (ScopeAnyQualifier _) (PackagePropertyFlags fa) + <- L.map unlabelPackageConstraint constraints] + + -- Simplify the condition, using the current environment. Most of this + -- function was copied from convBranch and + -- Distribution.Types.Condition.simplifyCondition. + simplifyCondition :: Condition ConfVar -> Condition ConfVar + simplifyCondition (Var (OS os')) = Lit (os == os') + simplifyCondition (Var (Arch arch')) = Lit (arch == arch') + simplifyCondition (Var (Impl cf cvr)) + | matchImpl (compilerInfoId cinfo) || + -- fixme: Nothing should be treated as unknown, rather than empty + -- list. This code should eventually be changed to either + -- support partial resolution of compiler flags or to + -- complain about incompletely configured compilers. + any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = Lit True + | otherwise = Lit False + where + matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv + simplifyCondition (Var (Flag f)) + | Just b <- L.lookup f flagAssignment = Lit b + simplifyCondition (Var v) = Var v + simplifyCondition (Lit b) = Lit b + simplifyCondition (CNot c) = + case simplifyCondition c of + Lit True -> Lit False + Lit False -> Lit True + c' -> CNot c' + simplifyCondition (COr c d) = + case (simplifyCondition c, simplifyCondition d) of + (Lit False, d') -> d' + (Lit True, _) -> Lit True + (c', Lit False) -> c' + (_, Lit True) -> Lit True + (c', d') -> COr c' d' + simplifyCondition (CAnd c d) = + case (simplifyCondition c, simplifyCondition d) of + (Lit False, _) -> Lit False + (Lit True, d') -> d' + (_, Lit False) -> Lit False + (c', Lit True) -> c' + (c', d') -> CAnd c' d' + +-- | Create a flagged dependency tree from a list @fds@ of flagged +-- dependencies, using @f@ to form the tree node (@f@ will be +-- something like @Stanza sn@). +prefix :: (FlaggedDeps qpn -> FlaggedDep qpn) + -> [FlaggedDeps qpn] -> FlaggedDeps qpn +prefix _ [] = [] +prefix f fds = [f (concat fds)] + +-- | Convert flag information. Automatic flags are now considered weak +-- unless strong flags have been selected explicitly. +flagInfo :: StrongFlags -> [PD.Flag] -> FlagInfo +flagInfo (StrongFlags strfl) = + M.fromList . L.map (\ (MkFlag fn _ b m) -> (fn, FInfo b (flagType m) (weak m))) + where + weak m = WeakOrTrivial $ not (strfl || m) + flagType m = if m then Manual else Automatic + +-- | Internal package names, which should not be interpreted as true +-- dependencies. +type IPNs = Set PN + +-- | Convenience function to delete a 'Dependency' if it's +-- for a 'PN' that isn't actually real. +filterIPNs :: IPNs -> Dependency -> Maybe Dependency +filterIPNs ipns d@(Dependency pn _) + | S.notMember pn ipns = Just d + | otherwise = Nothing + +-- | Convert condition trees to flagged dependencies. Mutually +-- recursive with 'convBranch'. See 'convBranch' for an explanation +-- of all arguments preceeding the input 'CondTree'. +convCondTree :: Map FlagName Bool -> DependencyReason PN -> PackageDescription -> OS -> Arch -> CompilerInfo -> PN -> FlagInfo -> + Component -> + (a -> BuildInfo) -> + IPNs -> + SolveExecutables -> + CondTree ConfVar [Dependency] a -> FlaggedDeps PN +convCondTree flags dr pkg os arch cinfo pn fds comp getInfo ipns solveExes@(SolveExecutables solveExes') (CondNode info ds branches) = + -- Merge all library and build-tool dependencies at every level in + -- the tree of flagged dependencies. Otherwise 'extractCommon' + -- could create duplicate dependencies, and the number of + -- duplicates could grow exponentially from the leaves to the root + -- of the tree. + mergeSimpleDeps $ + L.map (\d -> D.Simple (convLibDep dr d) comp) + (mapMaybe (filterIPNs ipns) ds) -- unconditional package dependencies + ++ L.map (\e -> D.Simple (LDep dr (Ext e)) comp) (PD.allExtensions bi) -- unconditional extension dependencies + ++ L.map (\l -> D.Simple (LDep dr (Lang l)) comp) (PD.allLanguages bi) -- unconditional language dependencies + ++ L.map (\(PkgconfigDependency pkn vr) -> D.Simple (LDep dr (Pkg pkn vr)) comp) (PD.pkgconfigDepends bi) -- unconditional pkg-config dependencies + ++ concatMap (convBranch flags dr pkg os arch cinfo pn fds comp getInfo ipns solveExes) branches + -- build-tools dependencies + -- NB: Only include these dependencies if SolveExecutables + -- is True. It might be false in the legacy solver + -- codepath, in which case there won't be any record of + -- an executable we need. + ++ [ D.Simple (convExeDep dr exeDep) comp + | solveExes' + , exeDep <- getAllToolDependencies pkg bi + , not $ isInternal pkg exeDep + ] + where + bi = getInfo info + +data SimpleFlaggedDepKey qpn = + SimpleFlaggedDepKey (PkgComponent qpn) Component + deriving (Eq, Ord) + +data SimpleFlaggedDepValue qpn = SimpleFlaggedDepValue (DependencyReason qpn) VR + +-- | Merge 'Simple' dependencies that apply to the same library or build-tool. +-- This function should be able to merge any two dependencies that can be merged +-- by extractCommon, in order to prevent the exponential growth of dependencies. +-- +-- Note that this function can merge dependencies that have different +-- DependencyReasons, which can make the DependencyReasons less precise. This +-- loss of precision only affects performance and log messages, not correctness. +-- However, when 'mergeSimpleDeps' is only called on dependencies at a single +-- location in the dependency tree, the only difference between +-- DependencyReasons should be flags that have value FlagBoth. Adding extra +-- flags with value FlagBoth should not affect performance, since they are not +-- added to the conflict set. The only downside is the possibility of the log +-- incorrectly saying that the flag contributed to excluding a specific version +-- of a dependency. For example, if +/-flagA introduces pkg >=2 and +/-flagB +-- introduces pkg <5, the merged dependency would mean that +-- +/-flagA and +/-flagB introduce pkg >=2 && <5, which would incorrectly imply +-- that +/-flagA excludes pkg-6. +mergeSimpleDeps :: Ord qpn => FlaggedDeps qpn -> FlaggedDeps qpn +mergeSimpleDeps deps = L.map (uncurry toFlaggedDep) (M.toList merged) ++ unmerged + where + (merged, unmerged) = L.foldl' f (M.empty, []) deps + where + f :: Ord qpn + => (Map (SimpleFlaggedDepKey qpn) (SimpleFlaggedDepValue qpn), FlaggedDeps qpn) + -> FlaggedDep qpn + -> (Map (SimpleFlaggedDepKey qpn) (SimpleFlaggedDepValue qpn), FlaggedDeps qpn) + f (merged', unmerged') (D.Simple (LDep dr (Dep dep (Constrained vr))) comp) = + ( M.insertWith mergeValues + (SimpleFlaggedDepKey dep comp) + (SimpleFlaggedDepValue dr vr) + merged' + , unmerged') + f (merged', unmerged') unmergeableDep = (merged', unmergeableDep : unmerged') + + mergeValues :: SimpleFlaggedDepValue qpn + -> SimpleFlaggedDepValue qpn + -> SimpleFlaggedDepValue qpn + mergeValues (SimpleFlaggedDepValue dr1 vr1) (SimpleFlaggedDepValue dr2 vr2) = + SimpleFlaggedDepValue (unionDRs dr1 dr2) (vr1 .&&. vr2) + + toFlaggedDep :: SimpleFlaggedDepKey qpn + -> SimpleFlaggedDepValue qpn + -> FlaggedDep qpn + toFlaggedDep (SimpleFlaggedDepKey dep comp) (SimpleFlaggedDepValue dr vr) = + D.Simple (LDep dr (Dep dep (Constrained vr))) comp + +-- | Branch interpreter. Mutually recursive with 'convCondTree'. +-- +-- Here, we try to simplify one of Cabal's condition tree branches into the +-- solver's flagged dependency format, which is weaker. Condition trees can +-- contain complex logical expression composed from flag choices and special +-- flags (such as architecture, or compiler flavour). We try to evaluate the +-- special flags and subsequently simplify to a tree that only depends on +-- simple flag choices. +-- +-- This function takes a number of arguments: +-- +-- 1. A map of flag values that have already been chosen. It allows +-- convBranch to avoid creating nested FlaggedDeps that are +-- controlled by the same flag and avoid creating DependencyReasons with +-- conflicting values for the same flag. +-- +-- 2. The DependencyReason calculated at this point in the tree of +-- conditionals. The flag values in the DependencyReason are similar to +-- the values in the map above, except for the use of FlagBoth. +-- +-- 3. Some pre dependency-solving known information ('OS', 'Arch', +-- 'CompilerInfo') for @os()@, @arch()@ and @impl()@ variables, +-- +-- 4. The package name @'PN'@ which this condition tree +-- came from, so that we can correctly associate @flag()@ +-- variables with the correct package name qualifier, +-- +-- 5. The flag defaults 'FlagInfo' so that we can populate +-- 'Flagged' dependencies with 'FInfo', +-- +-- 6. The name of the component 'Component' so we can record where +-- the fine-grained information about where the component came +-- from (see 'convCondTree'), and +-- +-- 7. A selector to extract the 'BuildInfo' from the leaves of +-- the 'CondTree' (which actually contains the needed +-- dependency information.) +-- +-- 8. The set of package names which should be considered internal +-- dependencies, and thus not handled as dependencies. +convBranch :: Map FlagName Bool + -> DependencyReason PN + -> PackageDescription + -> OS + -> Arch + -> CompilerInfo + -> PN + -> FlagInfo + -> Component + -> (a -> BuildInfo) + -> IPNs + -> SolveExecutables + -> CondBranch ConfVar [Dependency] a + -> FlaggedDeps PN +convBranch flags dr pkg os arch cinfo pn fds comp getInfo ipns solveExes (CondBranch c' t' mf') = + go c' + (\flags' dr' -> convCondTree flags' dr' pkg os arch cinfo pn fds comp getInfo ipns solveExes t') + (\flags' dr' -> maybe [] (convCondTree flags' dr' pkg os arch cinfo pn fds comp getInfo ipns solveExes) mf') + flags dr + where + go :: Condition ConfVar + -> (Map FlagName Bool -> DependencyReason PN -> FlaggedDeps PN) + -> (Map FlagName Bool -> DependencyReason PN -> FlaggedDeps PN) + -> Map FlagName Bool -> DependencyReason PN -> FlaggedDeps PN + go (Lit True) t _ = t + go (Lit False) _ f = f + go (CNot c) t f = go c f t + go (CAnd c d) t f = go c (go d t f) f + go (COr c d) t f = go c t (go d t f) + go (Var (Flag fn)) t f = \flags' -> + case M.lookup fn flags' of + Just True -> t flags' + Just False -> f flags' + Nothing -> \dr' -> + -- Add each flag to the DependencyReason for all dependencies below, + -- including any extracted dependencies. Extracted dependencies are + -- introduced by both flag values (FlagBoth). Note that we don't + -- actually need to add the flag to the extracted dependencies for + -- correct backjumping; the information only improves log messages + -- by giving the user the full reason for each dependency. + let addFlagValue v = addFlagToDependencyReason fn v dr' + addFlag v = M.insert fn v flags' + in extractCommon (t (addFlag True) (addFlagValue FlagBoth)) + (f (addFlag False) (addFlagValue FlagBoth)) + ++ [ Flagged (FN pn fn) (fds M.! fn) (t (addFlag True) (addFlagValue FlagTrue)) + (f (addFlag False) (addFlagValue FlagFalse)) ] + go (Var (OS os')) t f + | os == os' = t + | otherwise = f + go (Var (Arch arch')) t f + | arch == arch' = t + | otherwise = f + go (Var (Impl cf cvr)) t f + | matchImpl (compilerInfoId cinfo) || + -- fixme: Nothing should be treated as unknown, rather than empty + -- list. This code should eventually be changed to either + -- support partial resolution of compiler flags or to + -- complain about incompletely configured compilers. + any matchImpl (fromMaybe [] $ compilerInfoCompat cinfo) = t + | otherwise = f + where + matchImpl (CompilerId cf' cv) = cf == cf' && checkVR cvr cv + + addFlagToDependencyReason :: FlagName -> FlagValue -> DependencyReason pn -> DependencyReason pn + addFlagToDependencyReason fn v (DependencyReason pn' fs ss) = + DependencyReason pn' (M.insert fn v fs) ss + + -- If both branches contain the same package as a simple dep, we lift it to + -- the next higher-level, but with the union of version ranges. This + -- heuristic together with deferring flag choices will then usually first + -- resolve this package, and try an already installed version before imposing + -- a default flag choice that might not be what we want. + -- + -- Note that we make assumptions here on the form of the dependencies that + -- can occur at this point. In particular, no occurrences of Fixed, as all + -- dependencies below this point have been generated using 'convLibDep'. + -- + -- WARNING: This is quadratic! + extractCommon :: Eq pn => FlaggedDeps pn -> FlaggedDeps pn -> FlaggedDeps pn + extractCommon ps ps' = + -- Union the DependencyReasons, because the extracted dependency can be + -- avoided by removing the dependency from either side of the + -- conditional. + [ D.Simple (LDep (unionDRs vs1 vs2) (Dep dep1 (Constrained $ vr1 .||. vr2))) comp + | D.Simple (LDep vs1 (Dep dep1 (Constrained vr1))) _ <- ps + , D.Simple (LDep vs2 (Dep dep2 (Constrained vr2))) _ <- ps' + , dep1 == dep2 + ] + +-- | Merge DependencyReasons by unioning their variables. +unionDRs :: DependencyReason pn -> DependencyReason pn -> DependencyReason pn +unionDRs (DependencyReason pn' fs1 ss1) (DependencyReason _ fs2 ss2) = + DependencyReason pn' (M.union fs1 fs2) (S.union ss1 ss2) + +-- | Convert a Cabal dependency on a library to a solver-specific dependency. +convLibDep :: DependencyReason PN -> Dependency -> LDep PN +convLibDep dr (Dependency pn vr) = LDep dr $ Dep (PkgComponent pn ExposedLib) (Constrained vr) + +-- | Convert a Cabal dependency on an executable (build-tools) to a solver-specific dependency. +convExeDep :: DependencyReason PN -> ExeDependency -> LDep PN +convExeDep dr (ExeDependency pn exe vr) = LDep dr $ Dep (PkgComponent pn (ExposedExe exe)) (Constrained vr) + +-- | Convert setup dependencies +convSetupBuildInfo :: PN -> SetupBuildInfo -> FlaggedDeps PN +convSetupBuildInfo pn nfo = + L.map (\d -> D.Simple (convLibDep (DependencyReason pn M.empty S.empty) d) ComponentSetup) + (PD.setupDepends nfo) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Index.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Index.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Index.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Index.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,55 @@ +module Distribution.Solver.Modular.Index + ( Index + , PInfo(..) + , IsBuildable(..) + , defaultQualifyOptions + , mkIndex + ) where + +import Data.List as L +import Data.Map as M +import Prelude hiding (pi) + +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Package +import Distribution.Solver.Modular.Tree + +-- | An index contains information about package instances. This is a nested +-- dictionary. Package names are mapped to instances, which in turn is mapped +-- to info. +type Index = Map PN (Map I PInfo) + +-- | Info associated with a package instance. +-- Currently, dependencies, component names, flags and failure reasons. +-- The component map records whether any components are unbuildable in the +-- current environment (compiler, os, arch, and global flag constraints). +-- Packages that have a failure reason recorded for them are disabled +-- globally, for reasons external to the solver. We currently use this +-- for shadowing which essentially is a GHC limitation, and for +-- installed packages that are broken. +data PInfo = PInfo (FlaggedDeps PN) (Map ExposedComponent IsBuildable) FlagInfo (Maybe FailReason) + +-- | Whether a component is made unbuildable by a "buildable: False" field. +newtype IsBuildable = IsBuildable Bool + +mkIndex :: [(PN, I, PInfo)] -> Index +mkIndex xs = M.map M.fromList (groupMap (L.map (\ (pn, i, pi) -> (pn, (i, pi))) xs)) + +groupMap :: Ord a => [(a, b)] -> Map a [b] +groupMap xs = M.fromListWith (flip (++)) (L.map (\ (x, y) -> (x, [y])) xs) + +defaultQualifyOptions :: Index -> QualifyOptions +defaultQualifyOptions idx = QO { + qoBaseShim = or [ dep == base + | -- Find all versions of base .. + Just is <- [M.lookup base idx] + -- .. which are installed .. + , (I _ver (Inst _), PInfo deps _comps _flagNfo _fr) <- M.toList is + -- .. and flatten all their dependencies .. + , (LDep _ (Dep (PkgComponent dep _) _ci), _comp) <- flattenFlaggedDeps deps + ] + , qoSetupIndependent = True + } + where + base = mkPackageName "base" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/LabeledGraph.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/LabeledGraph.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/LabeledGraph.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/LabeledGraph.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,116 @@ +-- | Wrapper around Data.Graph with support for edge labels +{-# LANGUAGE ScopedTypeVariables #-} +module Distribution.Solver.Modular.LabeledGraph ( + -- * Graphs + Graph + , Vertex + -- ** Building graphs + , graphFromEdges + , graphFromEdges' + , buildG + , transposeG + -- ** Graph properties + , vertices + , edges + -- ** Operations on the underlying unlabeled graph + , forgetLabels + , topSort + ) where + +import Data.Array +import Data.Graph (Vertex, Bounds) +import Data.List (sortBy) +import Data.Maybe (mapMaybe) +import qualified Data.Graph as G + +{------------------------------------------------------------------------------- + Types +-------------------------------------------------------------------------------} + +type Graph e = Array Vertex [(e, Vertex)] +type Edge e = (Vertex, e, Vertex) + +{------------------------------------------------------------------------------- + Building graphs +-------------------------------------------------------------------------------} + +-- | Construct an edge-labeled graph +-- +-- This is a simple adaptation of the definition in Data.Graph +graphFromEdges :: forall key node edge. Ord key + => [ (node, key, [(edge, key)]) ] + -> ( Graph edge + , Vertex -> (node, key, [(edge, key)]) + , key -> Maybe Vertex + ) +graphFromEdges edges0 = + (graph, \v -> vertex_map ! v, key_vertex) + where + max_v = length edges0 - 1 + bounds0 = (0, max_v) :: (Vertex, Vertex) + sorted_edges = sortBy lt edges0 + edges1 = zip [0..] sorted_edges + + graph = array bounds0 [(v, (mapMaybe mk_edge ks)) + | (v, (_, _, ks)) <- edges1] + key_map = array bounds0 [(v, k ) + | (v, (_, k, _ )) <- edges1] + vertex_map = array bounds0 edges1 + + (_,k1,_) `lt` (_,k2,_) = k1 `compare` k2 + + mk_edge :: (edge, key) -> Maybe (edge, Vertex) + mk_edge (edge, key) = do v <- key_vertex key ; return (edge, v) + + -- returns Nothing for non-interesting vertices + key_vertex :: key -> Maybe Vertex + key_vertex k = findVertex 0 max_v + where + findVertex a b + | a > b = Nothing + | otherwise = case compare k (key_map ! mid) of + LT -> findVertex a (mid-1) + EQ -> Just mid + GT -> findVertex (mid+1) b + where + mid = a + (b - a) `div` 2 + +graphFromEdges' :: Ord key + => [ (node, key, [(edge, key)]) ] + -> ( Graph edge + , Vertex -> (node, key, [(edge, key)]) + ) +graphFromEdges' x = (a,b) + where + (a,b,_) = graphFromEdges x + +transposeG :: Graph e -> Graph e +transposeG g = buildG (bounds g) (reverseE g) + +buildG :: Bounds -> [Edge e] -> Graph e +buildG bounds0 edges0 = accumArray (flip (:)) [] bounds0 (map reassoc edges0) + where + reassoc (v, e, w) = (v, (e, w)) + +reverseE :: Graph e -> [Edge e] +reverseE g = [ (w, e, v) | (v, e, w) <- edges g ] + +{------------------------------------------------------------------------------- + Graph properties +-------------------------------------------------------------------------------} + +vertices :: Graph e -> [Vertex] +vertices = indices + +edges :: Graph e -> [Edge e] +edges g = [ (v, e, w) | v <- vertices g, (e, w) <- g!v ] + +{------------------------------------------------------------------------------- + Operations on the underlying unlabelled graph +-------------------------------------------------------------------------------} + +forgetLabels :: Graph e -> G.Graph +forgetLabels = fmap (map snd) + +topSort :: Graph e -> [Vertex] +topSort = G.topSort . forgetLabels diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Linking.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Linking.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Linking.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Linking.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,518 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +module Distribution.Solver.Modular.Linking ( + validateLinking + ) where + +import Prelude () +import Distribution.Solver.Compat.Prelude hiding (get,put) + +import Control.Exception (assert) +import Control.Monad.Reader +import Control.Monad.State +import Data.Function (on) +import Data.Map ((!)) +import Data.Set (Set) +import qualified Data.Map as M +import qualified Data.Set as S +import qualified Data.Traversable as T + +import Distribution.Client.Utils.Assertion +import Distribution.Solver.Modular.Assignment +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Index +import Distribution.Solver.Modular.Package +import Distribution.Solver.Modular.Tree +import qualified Distribution.Solver.Modular.ConflictSet as CS +import qualified Distribution.Solver.Modular.WeightedPSQ as W + +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackagePath +import Distribution.Types.GenericPackageDescription (unFlagName) + +{------------------------------------------------------------------------------- + Validation + + Validation of links is a separate pass that's performed after normal + validation. Validation of links checks that if the tree indicates that a + package is linked, then everything underneath that choice really matches the + package we have linked to. + + This is interesting because it isn't unidirectional. Consider that we've + chosen a.foo to be version 1 and later decide that b.foo should link to a.foo. + Now foo depends on bar. Because a.foo and b.foo are linked, it's required that + a.bar and b.bar are also linked. However, it's not required that we actually + choose a.bar before b.bar. Goal choice order is relatively free. It's possible + that we choose a.bar first, but also possible that we choose b.bar first. In + both cases, we have to recognize that we have freedom of choice for the first + of the two, but no freedom of choice for the second. + + This is what LinkGroups are all about. Using LinkGroup, we can record (in the + situation above) that a.bar and b.bar need to be linked even if we haven't + chosen either of them yet. +-------------------------------------------------------------------------------} + +data ValidateState = VS { + vsIndex :: Index + , vsLinks :: Map QPN LinkGroup + , vsFlags :: FAssignment + , vsStanzas :: SAssignment + , vsQualifyOptions :: QualifyOptions + + -- Saved qualified dependencies. Every time 'validateLinking' makes a + -- package choice, it qualifies the package's dependencies and saves them in + -- this map. Then the qualified dependencies are available for subsequent + -- flag and stanza choices for the same package. + , vsSaved :: Map QPN (FlaggedDeps QPN) + } + +type Validate = Reader ValidateState + +-- | Validate linked packages +-- +-- Verify that linked packages have +-- +-- * Linked dependencies, +-- * Equal flag assignments +-- * Equal stanza assignments +validateLinking :: Index -> Tree d c -> Tree d c +validateLinking index = (`runReader` initVS) . cata go + where + go :: TreeF d c (Validate (Tree d c)) -> Validate (Tree d c) + + go (PChoiceF qpn rdm gr cs) = + PChoice qpn rdm gr <$> T.sequence (W.mapWithKey (goP qpn) cs) + go (FChoiceF qfn rdm gr t m d cs) = + FChoice qfn rdm gr t m d <$> T.sequence (W.mapWithKey (goF qfn) cs) + go (SChoiceF qsn rdm gr t cs) = + SChoice qsn rdm gr t <$> T.sequence (W.mapWithKey (goS qsn) cs) + + -- For the other nodes we just recurse + go (GoalChoiceF rdm cs) = GoalChoice rdm <$> T.sequence cs + go (DoneF revDepMap s) = return $ Done revDepMap s + go (FailF conflictSet failReason) = return $ Fail conflictSet failReason + + -- Package choices + goP :: QPN -> POption -> Validate (Tree d c) -> Validate (Tree d c) + goP qpn@(Q _pp pn) opt@(POption i _) r = do + vs <- ask + let PInfo deps _ _ _ = vsIndex vs ! pn ! i + qdeps = qualifyDeps (vsQualifyOptions vs) qpn deps + newSaved = M.insert qpn qdeps (vsSaved vs) + case execUpdateState (pickPOption qpn opt qdeps) vs of + Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) + Right vs' -> local (const vs' { vsSaved = newSaved }) r + + -- Flag choices + goF :: QFN -> Bool -> Validate (Tree d c) -> Validate (Tree d c) + goF qfn b r = do + vs <- ask + case execUpdateState (pickFlag qfn b) vs of + Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) + Right vs' -> local (const vs') r + + -- Stanza choices (much the same as flag choices) + goS :: QSN -> Bool -> Validate (Tree d c) -> Validate (Tree d c) + goS qsn b r = do + vs <- ask + case execUpdateState (pickStanza qsn b) vs of + Left (cs, err) -> return $ Fail cs (DependenciesNotLinked err) + Right vs' -> local (const vs') r + + initVS :: ValidateState + initVS = VS { + vsIndex = index + , vsLinks = M.empty + , vsFlags = M.empty + , vsStanzas = M.empty + , vsQualifyOptions = defaultQualifyOptions index + , vsSaved = M.empty + } + +{------------------------------------------------------------------------------- + Updating the validation state +-------------------------------------------------------------------------------} + +type Conflict = (ConflictSet, String) + +newtype UpdateState a = UpdateState { + unUpdateState :: StateT ValidateState (Either Conflict) a + } + deriving (Functor, Applicative, Monad) + +instance MonadState ValidateState UpdateState where + get = UpdateState $ get + put st = UpdateState $ do + expensiveAssert (lgInvariant $ vsLinks st) $ return () + put st + +lift' :: Either Conflict a -> UpdateState a +lift' = UpdateState . lift + +conflict :: Conflict -> UpdateState a +conflict = lift' . Left + +execUpdateState :: UpdateState () -> ValidateState -> Either Conflict ValidateState +execUpdateState = execStateT . unUpdateState + +pickPOption :: QPN -> POption -> FlaggedDeps QPN -> UpdateState () +pickPOption qpn (POption i Nothing) _deps = pickConcrete qpn i +pickPOption qpn (POption i (Just pp')) deps = pickLink qpn i pp' deps + +pickConcrete :: QPN -> I -> UpdateState () +pickConcrete qpn@(Q pp _) i = do + vs <- get + case M.lookup qpn (vsLinks vs) of + -- Package is not yet in a LinkGroup. Create a new singleton link group. + Nothing -> do + let lg = lgSingleton qpn (Just $ PI pp i) + updateLinkGroup lg + + -- Package is already in a link group. Since we are picking a concrete + -- instance here, it must by definition be the canonical package. + Just lg -> + makeCanonical lg qpn i + +pickLink :: QPN -> I -> PackagePath -> FlaggedDeps QPN -> UpdateState () +pickLink qpn@(Q _pp pn) i pp' deps = do + vs <- get + + -- The package might already be in a link group + -- (because one of its reverse dependencies is) + let lgSource = case M.lookup qpn (vsLinks vs) of + Nothing -> lgSingleton qpn Nothing + Just lg -> lg + + -- Find the link group for the package we are linking to + -- + -- Since the builder never links to a package without having first picked a + -- concrete instance for that package, and since we create singleton link + -- groups for concrete instances, this link group must exist (and must + -- in fact already have a canonical member). + let target = Q pp' pn + lgTarget = vsLinks vs ! target + + -- Verify here that the member we add is in fact for the same package and + -- matches the version of the canonical instance. However, violations of + -- these checks would indicate a bug in the linker, not a true conflict. + let sanityCheck :: Maybe (PI PackagePath) -> Bool + sanityCheck Nothing = False + sanityCheck (Just (PI _ canonI)) = pn == lgPackage lgTarget && i == canonI + assert (sanityCheck (lgCanon lgTarget)) $ return () + + -- Merge the two link groups (updateLinkGroup will propagate the change) + lgTarget' <- lift' $ lgMerge CS.empty lgSource lgTarget + updateLinkGroup lgTarget' + + -- Make sure all dependencies are linked as well + linkDeps target deps + +makeCanonical :: LinkGroup -> QPN -> I -> UpdateState () +makeCanonical lg qpn@(Q pp _) i = + case lgCanon lg of + -- There is already a canonical member. Fail. + Just _ -> + conflict ( CS.insert (P qpn) (lgConflictSet lg) + , "cannot make " ++ showQPN qpn + ++ " canonical member of " ++ showLinkGroup lg + ) + Nothing -> do + let lg' = lg { lgCanon = Just (PI pp i) } + updateLinkGroup lg' + +-- | Link the dependencies of linked parents. +-- +-- When we decide to link one package against another we walk through the +-- package's direct depedencies and make sure that they're all linked to each +-- other by merging their link groups (or creating new singleton link groups if +-- they don't have link groups yet). We do not need to do this recursively, +-- because having the direct dependencies in a link group means that we must +-- have already made or will make sooner or later a link choice for one of these +-- as well, and cover their dependencies at that point. +linkDeps :: QPN -> FlaggedDeps QPN -> UpdateState () +linkDeps target = \deps -> do + -- linkDeps is called in two places: when we first link one package to + -- another, and when we discover more dependencies of an already linked + -- package after doing some flag assignment. It is therefore important that + -- flag assignments cannot influence _how_ dependencies are qualified; + -- fortunately this is a documented property of 'qualifyDeps'. + rdeps <- requalify deps + go deps rdeps + where + go :: FlaggedDeps QPN -> FlaggedDeps QPN -> UpdateState () + go = zipWithM_ go1 + + go1 :: FlaggedDep QPN -> FlaggedDep QPN -> UpdateState () + go1 dep rdep = case (dep, rdep) of + (Simple (LDep dr1 (Dep (PkgComponent qpn _) _)) _, ~(Simple (LDep dr2 (Dep (PkgComponent qpn' _) _)) _)) -> do + vs <- get + let lg = M.findWithDefault (lgSingleton qpn Nothing) qpn $ vsLinks vs + lg' = M.findWithDefault (lgSingleton qpn' Nothing) qpn' $ vsLinks vs + lg'' <- lift' $ lgMerge ((CS.union `on` dependencyReasonToCS) dr1 dr2) lg lg' + updateLinkGroup lg'' + (Flagged fn _ t f, ~(Flagged _ _ t' f')) -> do + vs <- get + case M.lookup fn (vsFlags vs) of + Nothing -> return () -- flag assignment not yet known + Just True -> go t t' + Just False -> go f f' + (Stanza sn t, ~(Stanza _ t')) -> do + vs <- get + case M.lookup sn (vsStanzas vs) of + Nothing -> return () -- stanza assignment not yet known + Just True -> go t t' + Just False -> return () -- stanza not enabled; no new deps + -- For extensions and language dependencies, there is nothing to do. + -- No choice is involved, just checking, so there is nothing to link. + -- The same goes for for pkg-config constraints. + (Simple (LDep _ (Ext _)) _, _) -> return () + (Simple (LDep _ (Lang _)) _, _) -> return () + (Simple (LDep _ (Pkg _ _)) _, _) -> return () + + requalify :: FlaggedDeps QPN -> UpdateState (FlaggedDeps QPN) + requalify deps = do + vs <- get + return $ qualifyDeps (vsQualifyOptions vs) target (unqualifyDeps deps) + +pickFlag :: QFN -> Bool -> UpdateState () +pickFlag qfn b = do + modify $ \vs -> vs { vsFlags = M.insert qfn b (vsFlags vs) } + verifyFlag qfn + linkNewDeps (F qfn) b + +pickStanza :: QSN -> Bool -> UpdateState () +pickStanza qsn b = do + modify $ \vs -> vs { vsStanzas = M.insert qsn b (vsStanzas vs) } + verifyStanza qsn + linkNewDeps (S qsn) b + +-- | Link dependencies that we discover after making a flag or stanza choice. +-- +-- When we make a flag choice for a package, then new dependencies for that +-- package might become available. If the package under consideration is in a +-- non-trivial link group, then these new dependencies have to be linked as +-- well. In linkNewDeps, we compute such new dependencies and make sure they are +-- linked. +linkNewDeps :: Var QPN -> Bool -> UpdateState () +linkNewDeps var b = do + vs <- get + let qpn@(Q pp pn) = varPN var + qdeps = vsSaved vs ! qpn + lg = vsLinks vs ! qpn + newDeps = findNewDeps vs qdeps + linkedTo = S.delete pp (lgMembers lg) + forM_ (S.toList linkedTo) $ \pp' -> linkDeps (Q pp' pn) newDeps + where + findNewDeps :: ValidateState -> FlaggedDeps QPN -> FlaggedDeps QPN + findNewDeps vs = concatMap (findNewDeps' vs) + + findNewDeps' :: ValidateState -> FlaggedDep QPN -> FlaggedDeps QPN + findNewDeps' _ (Simple _ _) = [] + findNewDeps' vs (Flagged qfn _ t f) = + case (F qfn == var, M.lookup qfn (vsFlags vs)) of + (True, _) -> if b then t else f + (_, Nothing) -> [] -- not yet known + (_, Just b') -> findNewDeps vs (if b' then t else f) + findNewDeps' vs (Stanza qsn t) = + case (S qsn == var, M.lookup qsn (vsStanzas vs)) of + (True, _) -> if b then t else [] + (_, Nothing) -> [] -- not yet known + (_, Just b') -> findNewDeps vs (if b' then t else []) + +updateLinkGroup :: LinkGroup -> UpdateState () +updateLinkGroup lg = do + verifyLinkGroup lg + modify $ \vs -> vs { + vsLinks = M.fromList (map aux (S.toList (lgMembers lg))) + `M.union` vsLinks vs + } + where + aux pp = (Q pp (lgPackage lg), lg) + +{------------------------------------------------------------------------------- + Verification +-------------------------------------------------------------------------------} + +verifyLinkGroup :: LinkGroup -> UpdateState () +verifyLinkGroup lg = + case lgInstance lg of + -- No instance picked yet. Nothing to verify + Nothing -> + return () + + -- We picked an instance. Verify flags and stanzas + -- TODO: The enumeration of OptionalStanza names is very brittle; + -- if a constructor is added to the datatype we won't notice it here + Just i -> do + vs <- get + let PInfo _deps _exes finfo _ = vsIndex vs ! lgPackage lg ! i + flags = M.keys finfo + stanzas = [TestStanzas, BenchStanzas] + forM_ flags $ \fn -> do + let flag = FN (lgPackage lg) fn + verifyFlag' flag lg + forM_ stanzas $ \sn -> do + let stanza = SN (lgPackage lg) sn + verifyStanza' stanza lg + +verifyFlag :: QFN -> UpdateState () +verifyFlag (FN qpn@(Q _pp pn) fn) = do + vs <- get + -- We can only pick a flag after picking an instance; link group must exist + verifyFlag' (FN pn fn) (vsLinks vs ! qpn) + +verifyStanza :: QSN -> UpdateState () +verifyStanza (SN qpn@(Q _pp pn) sn) = do + vs <- get + -- We can only pick a stanza after picking an instance; link group must exist + verifyStanza' (SN pn sn) (vsLinks vs ! qpn) + +-- | Verify that all packages in the link group agree on flag assignments +-- +-- For the given flag and the link group, obtain all assignments for the flag +-- that have already been made for link group members, and check that they are +-- equal. +verifyFlag' :: FN PN -> LinkGroup -> UpdateState () +verifyFlag' (FN pn fn) lg = do + vs <- get + let flags = map (\pp' -> FN (Q pp' pn) fn) (S.toList (lgMembers lg)) + vals = map (`M.lookup` vsFlags vs) flags + if allEqual (catMaybes vals) -- We ignore not-yet assigned flags + then return () + else conflict ( CS.fromList (map F flags) `CS.union` lgConflictSet lg + , "flag \"" ++ unFlagName fn ++ "\" incompatible" + ) + +-- | Verify that all packages in the link group agree on stanza assignments +-- +-- For the given stanza and the link group, obtain all assignments for the +-- stanza that have already been made for link group members, and check that +-- they are equal. +-- +-- This function closely mirrors 'verifyFlag''. +verifyStanza' :: SN PN -> LinkGroup -> UpdateState () +verifyStanza' (SN pn sn) lg = do + vs <- get + let stanzas = map (\pp' -> SN (Q pp' pn) sn) (S.toList (lgMembers lg)) + vals = map (`M.lookup` vsStanzas vs) stanzas + if allEqual (catMaybes vals) -- We ignore not-yet assigned stanzas + then return () + else conflict ( CS.fromList (map S stanzas) `CS.union` lgConflictSet lg + , "stanza \"" ++ showStanza sn ++ "\" incompatible" + ) + +{------------------------------------------------------------------------------- + Link groups +-------------------------------------------------------------------------------} + +-- | Set of packages that must be linked together +-- +-- A LinkGroup is between several qualified package names. In the validation +-- state, we maintain a map vsLinks from qualified package names to link groups. +-- There is an invariant that for all members of a link group, vsLinks must map +-- to the same link group. The function updateLinkGroup can be used to +-- re-establish this invariant after creating or expanding a LinkGroup. +data LinkGroup = LinkGroup { + -- | The name of the package of this link group + lgPackage :: PN + + -- | The canonical member of this link group (the one where we picked + -- a concrete instance). Once we have picked a canonical member, all + -- other packages must link to this one. + -- + -- We may not know this yet (if we are constructing link groups + -- for dependencies) + , lgCanon :: Maybe (PI PackagePath) + + -- | The members of the link group + , lgMembers :: Set PackagePath + + -- | The set of variables that should be added to the conflict set if + -- something goes wrong with this link set (in addition to the members + -- of the link group itself) + , lgBlame :: ConflictSet + } + deriving (Show, Eq) + +-- | Invariant for the set of link groups: every element in the link group +-- must be pointing to the /same/ link group +lgInvariant :: Map QPN LinkGroup -> Bool +lgInvariant links = all invGroup (M.elems links) + where + invGroup :: LinkGroup -> Bool + invGroup lg = allEqual $ map (`M.lookup` links) members + where + members :: [QPN] + members = map (`Q` lgPackage lg) $ S.toList (lgMembers lg) + +-- | Package version of this group +-- +-- This is only known once we have picked a canonical element. +lgInstance :: LinkGroup -> Maybe I +lgInstance = fmap (\(PI _ i) -> i) . lgCanon + +showLinkGroup :: LinkGroup -> String +showLinkGroup lg = + "{" ++ intercalate "," (map showMember (S.toList (lgMembers lg))) ++ "}" + where + showMember :: PackagePath -> String + showMember pp = case lgCanon lg of + Just (PI pp' _i) | pp == pp' -> "*" + _otherwise -> "" + ++ case lgInstance lg of + Nothing -> showQPN (qpn pp) + Just i -> showPI (PI (qpn pp) i) + + qpn :: PackagePath -> QPN + qpn pp = Q pp (lgPackage lg) + +-- | Creates a link group that contains a single member. +lgSingleton :: QPN -> Maybe (PI PackagePath) -> LinkGroup +lgSingleton (Q pp pn) canon = LinkGroup { + lgPackage = pn + , lgCanon = canon + , lgMembers = S.singleton pp + , lgBlame = CS.empty + } + +lgMerge :: ConflictSet -> LinkGroup -> LinkGroup -> Either Conflict LinkGroup +lgMerge blame lg lg' = do + canon <- pick (lgCanon lg) (lgCanon lg') + return LinkGroup { + lgPackage = lgPackage lg + , lgCanon = canon + , lgMembers = lgMembers lg `S.union` lgMembers lg' + , lgBlame = CS.unions [blame, lgBlame lg, lgBlame lg'] + } + where + pick :: Eq a => Maybe a -> Maybe a -> Either Conflict (Maybe a) + pick Nothing Nothing = Right Nothing + pick (Just x) Nothing = Right $ Just x + pick Nothing (Just y) = Right $ Just y + pick (Just x) (Just y) = + if x == y then Right $ Just x + else Left ( CS.unions [ + blame + , lgConflictSet lg + , lgConflictSet lg' + ] + , "cannot merge " ++ showLinkGroup lg + ++ " and " ++ showLinkGroup lg' + ) + +lgConflictSet :: LinkGroup -> ConflictSet +lgConflictSet lg = + CS.fromList (map aux (S.toList (lgMembers lg))) + `CS.union` lgBlame lg + where + aux pp = P (Q pp (lgPackage lg)) + +{------------------------------------------------------------------------------- + Auxiliary +-------------------------------------------------------------------------------} + +allEqual :: Eq a => [a] -> Bool +allEqual [] = True +allEqual [_] = True +allEqual (x:y:ys) = x == y && allEqual (y:ys) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Log.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Log.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Log.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Log.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,56 @@ +module Distribution.Solver.Modular.Log + ( logToProgress + , SolverFailure(..) + ) where + +import Prelude () +import Distribution.Solver.Compat.Prelude + +import Distribution.Solver.Types.Progress + +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Message +import qualified Distribution.Solver.Modular.ConflictSet as CS +import Distribution.Solver.Modular.RetryLog +import Distribution.Verbosity + +-- | Information about a dependency solver failure. +data SolverFailure = + ExhaustiveSearch ConflictSet ConflictMap + | BackjumpLimitReached + +-- | Postprocesses a log file. When the dependency solver fails to find a +-- solution, the log ends with a SolverFailure and a message describing the +-- failure. This function discards all log messages and avoids calling +-- 'showMessages' if the log isn't needed (specified by 'keepLog'), for +-- efficiency. +logToProgress :: Bool + -> Verbosity + -> Maybe Int + -> RetryLog Message SolverFailure a + -> Progress String (SolverFailure, String) a +logToProgress keepLog verbosity mbj lg = + if keepLog + then showMessages progress + else foldProgress (const id) Fail Done progress + where + progress = + -- Convert the RetryLog to a Progress (with toProgress) as late as + -- possible, to take advantage of efficient updates at failures. + toProgress $ + mapFailure (\failure -> (failure, finalErrorMsg failure)) lg + + finalErrorMsg :: SolverFailure -> String + finalErrorMsg (ExhaustiveSearch cs cm) = + "After searching the rest of the dependency tree exhaustively, " + ++ "these were the goals I've had most trouble fulfilling: " + ++ showCS cm cs + where + showCS = if verbosity > normal + then CS.showCSWithFrequency + else CS.showCSSortedByFrequency + finalErrorMsg BackjumpLimitReached = + "Backjump limit reached (" ++ currlimit mbj ++ + "change with --max-backjumps or try to run with --reorder-goals).\n" + where currlimit (Just n) = "currently " ++ show n ++ ", " + currlimit Nothing = "" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Message.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Message.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Message.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Message.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,154 @@ +{-# LANGUAGE BangPatterns #-} + +module Distribution.Solver.Modular.Message ( + Message(..), + showMessages + ) where + +import qualified Data.List as L +import Prelude hiding (pi) + +import Distribution.Text -- from Cabal + +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Package +import Distribution.Solver.Modular.Tree + ( FailReason(..), POption(..), ConflictingDep(..) ) +import Distribution.Solver.Modular.Version +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.Progress +import Distribution.Types.UnqualComponentName + +data Message = + Enter -- ^ increase indentation level + | Leave -- ^ decrease indentation level + | TryP QPN POption + | TryF QFN Bool + | TryS QSN Bool + | Next (Goal QPN) + | Success + | Failure ConflictSet FailReason + +-- | Transforms the structured message type to actual messages (strings). +-- +-- The log contains level numbers, which are useful for any trace that involves +-- backtracking, because only the level numbers will allow to keep track of +-- backjumps. +showMessages :: Progress Message a b -> Progress String a b +showMessages = go 0 + where + -- 'go' increments the level for a recursive call when it encounters + -- 'TryP', 'TryF', or 'TryS' and decrements the level when it encounters 'Leave'. + go :: Int -> Progress Message a b -> Progress String a b + go !_ (Done x) = Done x + go !_ (Fail x) = Fail x + -- complex patterns + go !l (Step (TryP qpn i) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = + goPReject l qpn [i] c fr ms + go !l (Step (TryF qfn b) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = + (atLevel l $ "rejecting: " ++ showQFNBool qfn b ++ showFR c fr) (go l ms) + go !l (Step (TryS qsn b) (Step Enter (Step (Failure c fr) (Step Leave ms)))) = + (atLevel l $ "rejecting: " ++ showQSNBool qsn b ++ showFR c fr) (go l ms) + go !l (Step (Next (Goal (P _ ) gr)) (Step (TryP qpn' i) ms@(Step Enter (Step (Next _) _)))) = + (atLevel l $ "trying: " ++ showQPNPOpt qpn' i ++ showGR gr) (go l ms) + go !l (Step (Next (Goal (P qpn) gr)) ms@(Step (Failure _c Backjump) _)) = + (atLevel l $ "unknown package: " ++ showQPN qpn ++ showGR gr) $ go l ms + go !l (Step (Next (Goal (P qpn) gr)) (Step (Failure c fr) ms)) = + (atLevel l $ showPackageGoal qpn gr) $ (atLevel l $ showFailure c fr) (go l ms) + -- standard display + go !l (Step Enter ms) = go (l+1) ms + go !l (Step Leave ms) = go (l-1) ms + go !l (Step (TryP qpn i) ms) = (atLevel l $ "trying: " ++ showQPNPOpt qpn i) (go l ms) + go !l (Step (TryF qfn b) ms) = (atLevel l $ "trying: " ++ showQFNBool qfn b) (go l ms) + go !l (Step (TryS qsn b) ms) = (atLevel l $ "trying: " ++ showQSNBool qsn b) (go l ms) + go !l (Step (Next (Goal (P qpn) gr)) ms) = (atLevel l $ showPackageGoal qpn gr) (go l ms) + go !l (Step (Next _) ms) = go l ms -- ignore flag goals in the log + go !l (Step (Success) ms) = (atLevel l $ "done") (go l ms) + go !l (Step (Failure c fr) ms) = (atLevel l $ showFailure c fr) (go l ms) + + showPackageGoal :: QPN -> QGoalReason -> String + showPackageGoal qpn gr = "next goal: " ++ showQPN qpn ++ showGR gr + + showFailure :: ConflictSet -> FailReason -> String + showFailure c fr = "fail" ++ showFR c fr + + -- special handler for many subsequent package rejections + goPReject :: Int + -> QPN + -> [POption] + -> ConflictSet + -> FailReason + -> Progress Message a b + -> Progress String a b + goPReject l qpn is c fr (Step (TryP qpn' i) (Step Enter (Step (Failure _ fr') (Step Leave ms)))) + | qpn == qpn' && fr == fr' = goPReject l qpn (i : is) c fr ms + goPReject l qpn is c fr ms = + (atLevel l $ "rejecting: " ++ L.intercalate ", " (map (showQPNPOpt qpn) (reverse is)) ++ showFR c fr) (go l ms) + + -- write a message with the current level number + atLevel :: Int -> String -> Progress String a b -> Progress String a b + atLevel l x xs = + let s = show l + in Step ("[" ++ replicate (3 - length s) '_' ++ s ++ "] " ++ x) xs + +showQPNPOpt :: QPN -> POption -> String +showQPNPOpt qpn@(Q _pp pn) (POption i linkedTo) = + case linkedTo of + Nothing -> showPI (PI qpn i) -- Consistent with prior to POption + Just pp' -> showQPN qpn ++ "~>" ++ showPI (PI (Q pp' pn) i) + +showGR :: QGoalReason -> String +showGR UserGoal = " (user goal)" +showGR (DependencyGoal dr) = " (dependency of " ++ showDependencyReason dr ++ ")" + +showFR :: ConflictSet -> FailReason -> String +showFR _ (UnsupportedExtension ext) = " (conflict: requires " ++ display ext ++ ")" +showFR _ (UnsupportedLanguage lang) = " (conflict: requires " ++ display lang ++ ")" +showFR _ (MissingPkgconfigPackage pn vr) = " (conflict: pkg-config package " ++ display pn ++ display vr ++ ", not found in the pkg-config database)" +showFR _ (NewPackageDoesNotMatchExistingConstraint d) = " (conflict: " ++ showConflictingDep d ++ ")" +showFR _ (ConflictingConstraints d1 d2) = " (conflict: " ++ L.intercalate ", " (L.map showConflictingDep [d1, d2]) ++ ")" +showFR _ (NewPackageIsMissingRequiredComponent comp dr) = " (does not contain " ++ showExposedComponent comp ++ ", which is required by " ++ showDependencyReason dr ++ ")" +showFR _ (NewPackageHasUnbuildableRequiredComponent comp dr) = " (" ++ showExposedComponent comp ++ " is not buildable in the current environment, but it is required by " ++ showDependencyReason dr ++ ")" +showFR _ (PackageRequiresMissingComponent qpn comp) = " (requires " ++ showExposedComponent comp ++ " from " ++ showQPN qpn ++ ", but the component does not exist)" +showFR _ (PackageRequiresUnbuildableComponent qpn comp) = " (requires " ++ showExposedComponent comp ++ " from " ++ showQPN qpn ++ ", but the component is not buildable in the current environment)" +showFR _ CannotInstall = " (only already installed instances can be used)" +showFR _ CannotReinstall = " (avoiding to reinstall a package with same version but new dependencies)" +showFR _ Shadowed = " (shadowed by another installed package with same version)" +showFR _ Broken = " (package is broken)" +showFR _ (GlobalConstraintVersion vr src) = " (" ++ constraintSource src ++ " requires " ++ display vr ++ ")" +showFR _ (GlobalConstraintInstalled src) = " (" ++ constraintSource src ++ " requires installed instance)" +showFR _ (GlobalConstraintSource src) = " (" ++ constraintSource src ++ " requires source instance)" +showFR _ (GlobalConstraintFlag src) = " (" ++ constraintSource src ++ " requires opposite flag selection)" +showFR _ ManualFlag = " (manual flag can only be changed explicitly)" +showFR c Backjump = " (backjumping, conflict set: " ++ showConflictSet c ++ ")" +showFR _ MultipleInstances = " (multiple instances)" +showFR c (DependenciesNotLinked msg) = " (dependencies not linked: " ++ msg ++ "; conflict set: " ++ showConflictSet c ++ ")" +showFR c CyclicDependencies = " (cyclic dependencies; conflict set: " ++ showConflictSet c ++ ")" +showFR _ (UnsupportedSpecVer ver) = " (unsupported spec-version " ++ display ver ++ ")" +-- The following are internal failures. They should not occur. In the +-- interest of not crashing unnecessarily, we still just print an error +-- message though. +showFR _ (MalformedFlagChoice qfn) = " (INTERNAL ERROR: MALFORMED FLAG CHOICE: " ++ showQFN qfn ++ ")" +showFR _ (MalformedStanzaChoice qsn) = " (INTERNAL ERROR: MALFORMED STANZA CHOICE: " ++ showQSN qsn ++ ")" +showFR _ EmptyGoalChoice = " (INTERNAL ERROR: EMPTY GOAL CHOICE)" + +showExposedComponent :: ExposedComponent -> String +showExposedComponent ExposedLib = "library" +showExposedComponent (ExposedExe name) = "executable '" ++ unUnqualComponentName name ++ "'" + +constraintSource :: ConstraintSource -> String +constraintSource src = "constraint from " ++ showConstraintSource src + +showConflictingDep :: ConflictingDep -> String +showConflictingDep (ConflictingDep dr (PkgComponent qpn comp) ci) = + let DependencyReason qpn' _ _ = dr + componentStr = case comp of + ExposedExe exe -> " (exe " ++ unUnqualComponentName exe ++ ")" + ExposedLib -> "" + in case ci of + Fixed i -> (if qpn /= qpn' then showDependencyReason dr ++ " => " else "") ++ + showQPN qpn ++ componentStr ++ "==" ++ showI i + Constrained vr -> showDependencyReason dr ++ " => " ++ showQPN qpn ++ + componentStr ++ showVR vr diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Package.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Package.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Package.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Package.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,107 @@ +{-# LANGUAGE DeriveFunctor #-} +module Distribution.Solver.Modular.Package + ( I(..) + , Loc(..) + , PackageId + , PackageIdentifier(..) + , PackageName, mkPackageName, unPackageName + , PkgconfigName, mkPkgconfigName, unPkgconfigName + , PI(..) + , PN + , QPV + , instI + , makeIndependent + , primaryPP + , setupPP + , showI + , showPI + , unPN + ) where + +import Data.List as L + +import Distribution.Package -- from Cabal +import Distribution.Text (display) + +import Distribution.Solver.Modular.Version +import Distribution.Solver.Types.PackagePath + +-- | A package name. +type PN = PackageName + +-- | Unpacking a package name. +unPN :: PN -> String +unPN = unPackageName + +-- | Package version. A package name plus a version number. +type PV = PackageId + +-- | Qualified package version. +type QPV = Qualified PV + +-- | Package id. Currently just a black-box string. +type PId = UnitId + +-- | Location. Info about whether a package is installed or not, and where +-- exactly it is located. For installed packages, uniquely identifies the +-- package instance via its 'PId'. +-- +-- TODO: More information is needed about the repo. +data Loc = Inst PId | InRepo + deriving (Eq, Ord, Show) + +-- | Instance. A version number and a location. +data I = I Ver Loc + deriving (Eq, Ord, Show) + +-- | String representation of an instance. +showI :: I -> String +showI (I v InRepo) = showVer v +showI (I v (Inst uid)) = showVer v ++ "/installed" ++ shortId uid + where + -- A hack to extract the beginning of the package ABI hash + shortId = snip (splitAt 4) (++ "...") + . snip ((\ (x, y) -> (reverse x, y)) . break (=='-') . reverse) ('-':) + . display + snip p f xs = case p xs of + (ys, zs) -> (if L.null zs then id else f) ys + +-- | Package instance. A package name and an instance. +data PI qpn = PI qpn I + deriving (Eq, Ord, Show, Functor) + +-- | String representation of a package instance. +showPI :: PI QPN -> String +showPI (PI qpn i) = showQPN qpn ++ "-" ++ showI i + +instI :: I -> Bool +instI (I _ (Inst _)) = True +instI _ = False + +-- | Is the package in the primary group of packages. This is used to +-- determine (1) if we should try to establish stanza preferences +-- for this goal, and (2) whether or not a user specified @--constraint@ +-- should apply to this dependency (grep 'primaryPP' to see the +-- use sites). In particular this does not include packages pulled in +-- as setup deps. +-- +primaryPP :: PackagePath -> Bool +primaryPP (PackagePath _ns q) = go q + where + go QualToplevel = True + go (QualBase _) = True + go (QualSetup _) = False + go (QualExe _ _) = False + +-- | Is the package a dependency of a setup script. This is used to +-- establish whether or not certain constraints should apply to this +-- dependency (grep 'setupPP' to see the use sites). +-- +setupPP :: PackagePath -> Bool +setupPP (PackagePath _ns (QualSetup _)) = True +setupPP (PackagePath _ns _) = False + +-- | Qualify a target package with its own name so that its dependencies are not +-- required to be consistent with other targets. +makeIndependent :: PN -> QPN +makeIndependent pn = Q (PackagePath (Independent pn) QualToplevel) pn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Preference.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Preference.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Preference.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Preference.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,471 @@ +{-# LANGUAGE ScopedTypeVariables #-} +-- | Reordering or pruning the tree in order to prefer or make certain choices. +module Distribution.Solver.Modular.Preference + ( avoidReinstalls + , deferSetupChoices + , deferWeakFlagChoices + , enforceManualFlags + , enforcePackageConstraints + , enforceSingleInstanceRestriction + , firstGoal + , preferBaseGoalChoice + , preferLinked + , preferPackagePreferences + , preferReallyEasyGoalChoices + , requireInstalled + , sortGoals + , pruneAfterFirstSuccess + ) where + +import Prelude () +import Distribution.Solver.Compat.Prelude + +import Data.Function (on) +import qualified Data.List as L +import qualified Data.Map as M +import Control.Monad.Reader hiding (sequence) +import Data.Traversable (sequence) + +import Distribution.PackageDescription (lookupFlagAssignment, unFlagAssignment) -- from Cabal + +import Distribution.Solver.Types.Flag +import Distribution.Solver.Types.InstalledPreference +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackageConstraint +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.PackagePreferences +import Distribution.Solver.Types.Variable + +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Package +import qualified Distribution.Solver.Modular.PSQ as P +import Distribution.Solver.Modular.Tree +import Distribution.Solver.Modular.Version +import qualified Distribution.Solver.Modular.ConflictSet as CS +import qualified Distribution.Solver.Modular.WeightedPSQ as W + +-- | Update the weights of children under 'PChoice' nodes. 'addWeights' takes a +-- list of weight-calculating functions in order to avoid sorting the package +-- choices multiple times. Each function takes the package name, sorted list of +-- children's versions, and package option. 'addWeights' prepends the new +-- weights to the existing weights, which gives precedence to preferences that +-- are applied later. +addWeights :: [PN -> [Ver] -> POption -> Weight] -> Tree d c -> Tree d c +addWeights fs = trav go + where + go :: TreeF d c (Tree d c) -> TreeF d c (Tree d c) + go (PChoiceF qpn@(Q _ pn) rdm x cs) = + let sortedVersions = L.sortBy (flip compare) $ L.map version (W.keys cs) + weights k = [f pn sortedVersions k | f <- fs] + + elemsToWhnf :: [a] -> () + elemsToWhnf = foldr seq () + in PChoiceF qpn rdm x + -- Evaluate the children's versions before evaluating any of the + -- subtrees, so that 'sortedVersions' doesn't hold onto all of the + -- subtrees (referenced by cs) and cause a space leak. + (elemsToWhnf sortedVersions `seq` + W.mapWeightsWithKey (\k w -> weights k ++ w) cs) + go x = x + +addWeight :: (PN -> [Ver] -> POption -> Weight) -> Tree d c -> Tree d c +addWeight f = addWeights [f] + +version :: POption -> Ver +version (POption (I v _) _) = v + +-- | Prefer to link packages whenever possible. +preferLinked :: Tree d c -> Tree d c +preferLinked = addWeight (const (const linked)) + where + linked (POption _ Nothing) = 1 + linked (POption _ (Just _)) = 0 + +-- Works by setting weights on choice nodes. Also applies stanza preferences. +preferPackagePreferences :: (PN -> PackagePreferences) -> Tree d c -> Tree d c +preferPackagePreferences pcs = + preferPackageStanzaPreferences pcs . + addWeights [ + \pn _ opt -> preferred pn opt + + -- Note that we always rank installed before uninstalled, and later + -- versions before earlier, but we can change the priority of the + -- two orderings. + , \pn vs opt -> case preference pn of + PreferInstalled -> installed opt + PreferLatest -> latest vs opt + , \pn vs opt -> case preference pn of + PreferInstalled -> latest vs opt + PreferLatest -> installed opt + ] + where + -- Prefer packages with higher version numbers over packages with + -- lower version numbers. + latest :: [Ver] -> POption -> Weight + latest sortedVersions opt = + let l = length sortedVersions + index = fromMaybe l $ L.findIndex (<= version opt) sortedVersions + in fromIntegral index / fromIntegral l + + preference :: PN -> InstalledPreference + preference pn = + let PackagePreferences _ ipref _ = pcs pn + in ipref + + -- | Prefer versions satisfying more preferred version ranges. + preferred :: PN -> POption -> Weight + preferred pn opt = + let PackagePreferences vrs _ _ = pcs pn + in fromIntegral . negate . L.length $ + L.filter (flip checkVR (version opt)) vrs + + -- Prefer installed packages over non-installed packages. + installed :: POption -> Weight + installed (POption (I _ (Inst _)) _) = 0 + installed _ = 1 + +-- | Traversal that tries to establish package stanza enable\/disable +-- preferences. Works by reordering the branches of stanza choices. +preferPackageStanzaPreferences :: (PN -> PackagePreferences) -> Tree d c -> Tree d c +preferPackageStanzaPreferences pcs = trav go + where + go (SChoiceF qsn@(SN (Q pp pn) s) rdm gr _tr ts) + | primaryPP pp && enableStanzaPref pn s = + -- move True case first to try enabling the stanza + let ts' = W.mapWeightsWithKey (\k w -> weight k : w) ts + weight k = if k then 0 else 1 + -- defer the choice by setting it to weak + in SChoiceF qsn rdm gr (WeakOrTrivial True) ts' + go x = x + + enableStanzaPref :: PN -> OptionalStanza -> Bool + enableStanzaPref pn s = + let PackagePreferences _ _ spref = pcs pn + in s `elem` spref + +-- | Helper function that tries to enforce a single package constraint on a +-- given instance for a P-node. Translates the constraint into a +-- tree-transformer that either leaves the subtree untouched, or replaces it +-- with an appropriate failure node. +processPackageConstraintP :: forall d c. QPN + -> ConflictSet + -> I + -> LabeledPackageConstraint + -> Tree d c + -> Tree d c +processPackageConstraintP qpn c i (LabeledPackageConstraint (PackageConstraint scope prop) src) r = + if constraintScopeMatches scope qpn + then go i prop + else r + where + go :: I -> PackageProperty -> Tree d c + go (I v _) (PackagePropertyVersion vr) + | checkVR vr v = r + | otherwise = Fail c (GlobalConstraintVersion vr src) + go _ PackagePropertyInstalled + | instI i = r + | otherwise = Fail c (GlobalConstraintInstalled src) + go _ PackagePropertySource + | not (instI i) = r + | otherwise = Fail c (GlobalConstraintSource src) + go _ _ = r + +-- | Helper function that tries to enforce a single package constraint on a +-- given flag setting for an F-node. Translates the constraint into a +-- tree-transformer that either leaves the subtree untouched, or replaces it +-- with an appropriate failure node. +processPackageConstraintF :: forall d c. QPN + -> Flag + -> ConflictSet + -> Bool + -> LabeledPackageConstraint + -> Tree d c + -> Tree d c +processPackageConstraintF qpn f c b' (LabeledPackageConstraint (PackageConstraint scope prop) src) r = + if constraintScopeMatches scope qpn + then go prop + else r + where + go :: PackageProperty -> Tree d c + go (PackagePropertyFlags fa) = + case lookupFlagAssignment f fa of + Nothing -> r + Just b | b == b' -> r + | otherwise -> Fail c (GlobalConstraintFlag src) + go _ = r + +-- | Helper function that tries to enforce a single package constraint on a +-- given flag setting for an F-node. Translates the constraint into a +-- tree-transformer that either leaves the subtree untouched, or replaces it +-- with an appropriate failure node. +processPackageConstraintS :: forall d c. QPN + -> OptionalStanza + -> ConflictSet + -> Bool + -> LabeledPackageConstraint + -> Tree d c + -> Tree d c +processPackageConstraintS qpn s c b' (LabeledPackageConstraint (PackageConstraint scope prop) src) r = + if constraintScopeMatches scope qpn + then go prop + else r + where + go :: PackageProperty -> Tree d c + go (PackagePropertyStanzas ss) = + if not b' && s `elem` ss then Fail c (GlobalConstraintFlag src) + else r + go _ = r + +-- | Traversal that tries to establish various kinds of user constraints. Works +-- by selectively disabling choices that have been ruled out by global user +-- constraints. +enforcePackageConstraints :: M.Map PN [LabeledPackageConstraint] + -> Tree d c + -> Tree d c +enforcePackageConstraints pcs = trav go + where + go (PChoiceF qpn@(Q _ pn) rdm gr ts) = + let c = varToConflictSet (P qpn) + -- compose the transformation functions for each of the relevant constraint + g = \ (POption i _) -> foldl (\ h pc -> h . processPackageConstraintP qpn c i pc) + id + (M.findWithDefault [] pn pcs) + in PChoiceF qpn rdm gr (W.mapWithKey g ts) + go (FChoiceF qfn@(FN qpn@(Q _ pn) f) rdm gr tr m d ts) = + let c = varToConflictSet (F qfn) + -- compose the transformation functions for each of the relevant constraint + g = \ b -> foldl (\ h pc -> h . processPackageConstraintF qpn f c b pc) + id + (M.findWithDefault [] pn pcs) + in FChoiceF qfn rdm gr tr m d (W.mapWithKey g ts) + go (SChoiceF qsn@(SN qpn@(Q _ pn) f) rdm gr tr ts) = + let c = varToConflictSet (S qsn) + -- compose the transformation functions for each of the relevant constraint + g = \ b -> foldl (\ h pc -> h . processPackageConstraintS qpn f c b pc) + id + (M.findWithDefault [] pn pcs) + in SChoiceF qsn rdm gr tr (W.mapWithKey g ts) + go x = x + +-- | Transformation that tries to enforce the rule that manual flags can only be +-- set by the user. +-- +-- If there are no constraints on a manual flag, this function prunes all but +-- the default value. If there are constraints, then the flag is allowed to have +-- the values specified by the constraints. Note that the type used for flag +-- values doesn't need to be Bool. +-- +-- This function makes an exception for the case where there are multiple goals +-- for a single package (with different qualifiers), and flag constraints for +-- manual flag x only apply to some of those goals. In that case, we allow the +-- unconstrained goals to use the default value for x OR any of the values in +-- the constraints on x (even though the constraints don't apply), in order to +-- allow the unconstrained goals to be linked to the constrained goals. See +-- https://github.com/haskell/cabal/issues/4299. Removing the single instance +-- restriction (SIR) would also fix #4299, so we may want to remove this +-- exception and only let the user toggle manual flags if we remove the SIR. +-- +-- This function does not enforce any of the constraints, since that is done by +-- 'enforcePackageConstraints'. +enforceManualFlags :: M.Map PN [LabeledPackageConstraint] -> Tree d c -> Tree d c +enforceManualFlags pcs = trav go + where + go (FChoiceF qfn@(FN (Q _ pn) fn) rdm gr tr Manual d ts) = + FChoiceF qfn rdm gr tr Manual d $ + let -- A list of all values specified by constraints on 'fn'. + -- We ignore the constraint scope in order to handle issue #4299. + flagConstraintValues :: [Bool] + flagConstraintValues = + [ flagVal + | let lpcs = M.findWithDefault [] pn pcs + , (LabeledPackageConstraint (PackageConstraint _ (PackagePropertyFlags fa)) _) <- lpcs + , (fn', flagVal) <- unFlagAssignment fa + , fn' == fn ] + + -- Prune flag values that are not the default and do not match any + -- of the constraints. + restrictToggling :: Eq a => a -> [a] -> a -> Tree d c -> Tree d c + restrictToggling flagDefault constraintVals flagVal r = + if flagVal `elem` constraintVals || flagVal == flagDefault + then r + else Fail (varToConflictSet (F qfn)) ManualFlag + + in W.mapWithKey (restrictToggling d flagConstraintValues) ts + go x = x + +-- | Require installed packages. +requireInstalled :: (PN -> Bool) -> Tree d c -> Tree d c +requireInstalled p = trav go + where + go (PChoiceF v@(Q _ pn) rdm gr cs) + | p pn = PChoiceF v rdm gr (W.mapWithKey installed cs) + | otherwise = PChoiceF v rdm gr cs + where + installed (POption (I _ (Inst _)) _) x = x + installed _ _ = Fail (varToConflictSet (P v)) CannotInstall + go x = x + +-- | Avoid reinstalls. +-- +-- This is a tricky strategy. If a package version is installed already and the +-- same version is available from a repo, the repo version will never be chosen. +-- This would result in a reinstall (either destructively, or potentially, +-- shadowing). The old instance won't be visible or even present anymore, but +-- other packages might have depended on it. +-- +-- TODO: It would be better to actually check the reverse dependencies of installed +-- packages. If they're not depended on, then reinstalling should be fine. Even if +-- they are, perhaps this should just result in trying to reinstall those other +-- packages as well. However, doing this all neatly in one pass would require to +-- change the builder, or at least to change the goal set after building. +avoidReinstalls :: (PN -> Bool) -> Tree d c -> Tree d c +avoidReinstalls p = trav go + where + go (PChoiceF qpn@(Q _ pn) rdm gr cs) + | p pn = PChoiceF qpn rdm gr disableReinstalls + | otherwise = PChoiceF qpn rdm gr cs + where + disableReinstalls = + let installed = [ v | (_, POption (I v (Inst _)) _, _) <- W.toList cs ] + in W.mapWithKey (notReinstall installed) cs + + notReinstall vs (POption (I v InRepo) _) _ | v `elem` vs = + Fail (varToConflictSet (P qpn)) CannotReinstall + notReinstall _ _ x = + x + go x = x + +-- | Sort all goals using the provided function. +sortGoals :: (Variable QPN -> Variable QPN -> Ordering) -> Tree d c -> Tree d c +sortGoals variableOrder = trav go + where + go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.sortByKeys goalOrder xs) + go x = x + + goalOrder :: Goal QPN -> Goal QPN -> Ordering + goalOrder = variableOrder `on` (varToVariable . goalToVar) + + varToVariable :: Var QPN -> Variable QPN + varToVariable (P qpn) = PackageVar qpn + varToVariable (F (FN qpn fn)) = FlagVar qpn fn + varToVariable (S (SN qpn stanza)) = StanzaVar qpn stanza + +-- | Reduce the branching degree of the search tree by removing all choices +-- after the first successful choice at each level. The returned tree is the +-- minimal subtree containing the path to the first backjump. +pruneAfterFirstSuccess :: Tree d c -> Tree d c +pruneAfterFirstSuccess = trav go + where + go (PChoiceF qpn rdm gr ts) = PChoiceF qpn rdm gr (W.takeUntil active ts) + go (FChoiceF qfn rdm gr w m d ts) = FChoiceF qfn rdm gr w m d (W.takeUntil active ts) + go (SChoiceF qsn rdm gr w ts) = SChoiceF qsn rdm gr w (W.takeUntil active ts) + go x = x + +-- | Always choose the first goal in the list next, abandoning all +-- other choices. +-- +-- This is unnecessary for the default search strategy, because +-- it descends only into the first goal choice anyway, +-- but may still make sense to just reduce the tree size a bit. +firstGoal :: Tree d c -> Tree d c +firstGoal = trav go + where + go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.firstOnly xs) + go x = x + -- Note that we keep empty choice nodes, because they mean success. + +-- | Transformation that tries to make a decision on base as early as +-- possible by pruning all other goals when base is available. In nearly +-- all cases, there's a single choice for the base package. Also, fixing +-- base early should lead to better error messages. +preferBaseGoalChoice :: Tree d c -> Tree d c +preferBaseGoalChoice = trav go + where + go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.filterIfAnyByKeys isBase xs) + go x = x + + isBase :: Goal QPN -> Bool + isBase (Goal (P (Q _pp pn)) _) = unPN pn == "base" + isBase _ = False + +-- | Deal with setup dependencies after regular dependencies, so that we can +-- will link setup dependencies against package dependencies when possible +deferSetupChoices :: Tree d c -> Tree d c +deferSetupChoices = trav go + where + go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.preferByKeys noSetup xs) + go x = x + + noSetup :: Goal QPN -> Bool + noSetup (Goal (P (Q (PackagePath _ns (QualSetup _)) _)) _) = False + noSetup _ = True + +-- | Transformation that tries to avoid making weak flag choices early. +-- Weak flags are trivial flags (not influencing dependencies) or such +-- flags that are explicitly declared to be weak in the index. +deferWeakFlagChoices :: Tree d c -> Tree d c +deferWeakFlagChoices = trav go + where + go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.prefer noWeakFlag (P.prefer noWeakStanza xs)) + go x = x + + noWeakStanza :: Tree d c -> Bool + noWeakStanza (SChoice _ _ _ (WeakOrTrivial True) _) = False + noWeakStanza _ = True + + noWeakFlag :: Tree d c -> Bool + noWeakFlag (FChoice _ _ _ (WeakOrTrivial True) _ _ _) = False + noWeakFlag _ = True + +-- | Transformation that prefers goals with lower branching degrees. +-- +-- When a goal choice node has at least one goal with zero or one children, this +-- function prunes all other goals. This transformation can help the solver find +-- a solution in fewer steps by allowing it to backtrack sooner when it is +-- exploring a subtree with no solutions. However, each step is more expensive. +preferReallyEasyGoalChoices :: Tree d c -> Tree d c +preferReallyEasyGoalChoices = trav go + where + go (GoalChoiceF rdm xs) = GoalChoiceF rdm (P.filterIfAny zeroOrOneChoices xs) + go x = x + +-- | Monad used internally in enforceSingleInstanceRestriction +-- +-- For each package instance we record the goal for which we picked a concrete +-- instance. The SIR means that for any package instance there can only be one. +type EnforceSIR = Reader (Map (PI PN) QPN) + +-- | Enforce ghc's single instance restriction +-- +-- From the solver's perspective, this means that for any package instance +-- (that is, package name + package version) there can be at most one qualified +-- goal resolving to that instance (there may be other goals _linking_ to that +-- instance however). +enforceSingleInstanceRestriction :: Tree d c -> Tree d c +enforceSingleInstanceRestriction = (`runReader` M.empty) . cata go + where + go :: TreeF d c (EnforceSIR (Tree d c)) -> EnforceSIR (Tree d c) + + -- We just verify package choices. + go (PChoiceF qpn rdm gr cs) = + PChoice qpn rdm gr <$> sequence (W.mapWithKey (goP qpn) cs) + go _otherwise = + innM _otherwise + + -- The check proper + goP :: QPN -> POption -> EnforceSIR (Tree d c) -> EnforceSIR (Tree d c) + goP qpn@(Q _ pn) (POption i linkedTo) r = do + let inst = PI pn i + env <- ask + case (linkedTo, M.lookup inst env) of + (Just _, _) -> + -- For linked nodes we don't check anything + r + (Nothing, Nothing) -> + -- Not linked, not already used + local (M.insert inst qpn) r + (Nothing, Just qpn') -> do + -- Not linked, already used. This is an error + return $ Fail (CS.union (varToConflictSet (P qpn)) (varToConflictSet (P qpn'))) MultipleInstances diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/PSQ.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/PSQ.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/PSQ.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/PSQ.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,149 @@ +{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} +module Distribution.Solver.Modular.PSQ + ( PSQ(..) -- Unit test needs constructor access + , casePSQ + , cons + , length + , lookup + , filter + , filterIfAny + , filterIfAnyByKeys + , filterKeys + , firstOnly + , fromList + , isZeroOrOne + , keys + , map + , mapKeys + , mapWithKey + , maximumBy + , minimumBy + , null + , prefer + , preferByKeys + , snoc + , sortBy + , sortByKeys + , toList + , union + ) where + +-- Priority search queues. +-- +-- I am not yet sure what exactly is needed. But we need a data structure with +-- key-based lookup that can be sorted. We're using a sequence right now with +-- (inefficiently implemented) lookup, because I think that queue-based +-- operations and sorting turn out to be more efficiency-critical in practice. + +import Control.Arrow (first, second) + +import qualified Data.Foldable as F +import Data.Function +import qualified Data.List as S +import Data.Ord (comparing) +import Data.Traversable +import Prelude hiding (foldr, length, lookup, filter, null, map) + +newtype PSQ k v = PSQ [(k, v)] + deriving (Eq, Show, Functor, F.Foldable, Traversable) -- Qualified Foldable to avoid issues with FTP + +keys :: PSQ k v -> [k] +keys (PSQ xs) = fmap fst xs + +lookup :: Eq k => k -> PSQ k v -> Maybe v +lookup k (PSQ xs) = S.lookup k xs + +map :: (v1 -> v2) -> PSQ k v1 -> PSQ k v2 +map f (PSQ xs) = PSQ (fmap (second f) xs) + +mapKeys :: (k1 -> k2) -> PSQ k1 v -> PSQ k2 v +mapKeys f (PSQ xs) = PSQ (fmap (first f) xs) + +mapWithKey :: (k -> a -> b) -> PSQ k a -> PSQ k b +mapWithKey f (PSQ xs) = PSQ (fmap (\ (k, v) -> (k, f k v)) xs) + +fromList :: [(k, a)] -> PSQ k a +fromList = PSQ + +cons :: k -> a -> PSQ k a -> PSQ k a +cons k x (PSQ xs) = PSQ ((k, x) : xs) + +snoc :: PSQ k a -> k -> a -> PSQ k a +snoc (PSQ xs) k x = PSQ (xs ++ [(k, x)]) + +casePSQ :: PSQ k a -> r -> (k -> a -> PSQ k a -> r) -> r +casePSQ (PSQ xs) n c = + case xs of + [] -> n + (k, v) : ys -> c k v (PSQ ys) + +sortBy :: (a -> a -> Ordering) -> PSQ k a -> PSQ k a +sortBy cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` snd) xs) + +sortByKeys :: (k -> k -> Ordering) -> PSQ k a -> PSQ k a +sortByKeys cmp (PSQ xs) = PSQ (S.sortBy (cmp `on` fst) xs) + +maximumBy :: (k -> Int) -> PSQ k a -> (k, a) +maximumBy sel (PSQ xs) = + S.minimumBy (flip (comparing (sel . fst))) xs + +minimumBy :: (a -> Int) -> PSQ k a -> PSQ k a +minimumBy sel (PSQ xs) = + PSQ [snd (S.minimumBy (comparing fst) (S.map (\ x -> (sel (snd x), x)) xs))] + +-- | Sort the list so that values satisfying the predicate are first. +prefer :: (a -> Bool) -> PSQ k a -> PSQ k a +prefer p = sortBy $ flip (comparing p) + +-- | Sort the list so that keys satisfying the predicate are first. +preferByKeys :: (k -> Bool) -> PSQ k a -> PSQ k a +preferByKeys p = sortByKeys $ flip (comparing p) + +-- | Will partition the list according to the predicate. If +-- there is any element that satisfies the precidate, then only +-- the elements satisfying the predicate are returned. +-- Otherwise, the rest is returned. +-- +filterIfAny :: (a -> Bool) -> PSQ k a -> PSQ k a +filterIfAny p (PSQ xs) = + let + (pro, con) = S.partition (p . snd) xs + in + if S.null pro then PSQ con else PSQ pro + +-- | Variant of 'filterIfAny' that takes a predicate on the keys +-- rather than on the values. +-- +filterIfAnyByKeys :: (k -> Bool) -> PSQ k a -> PSQ k a +filterIfAnyByKeys p (PSQ xs) = + let + (pro, con) = S.partition (p . fst) xs + in + if S.null pro then PSQ con else PSQ pro + +filterKeys :: (k -> Bool) -> PSQ k a -> PSQ k a +filterKeys p (PSQ xs) = PSQ (S.filter (p . fst) xs) + +filter :: (a -> Bool) -> PSQ k a -> PSQ k a +filter p (PSQ xs) = PSQ (S.filter (p . snd) xs) + +length :: PSQ k a -> Int +length (PSQ xs) = S.length xs + +null :: PSQ k a -> Bool +null (PSQ xs) = S.null xs + +isZeroOrOne :: PSQ k a -> Bool +isZeroOrOne (PSQ []) = True +isZeroOrOne (PSQ [_]) = True +isZeroOrOne _ = False + +firstOnly :: PSQ k a -> PSQ k a +firstOnly (PSQ []) = PSQ [] +firstOnly (PSQ (x : _)) = PSQ [x] + +toList :: PSQ k a -> [(k, a)] +toList (PSQ xs) = xs + +union :: PSQ k a -> PSQ k a -> PSQ k a +union (PSQ xs) (PSQ ys) = PSQ (xs ++ ys) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/RetryLog.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/RetryLog.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/RetryLog.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/RetryLog.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,69 @@ +{-# LANGUAGE Rank2Types #-} +module Distribution.Solver.Modular.RetryLog + ( RetryLog + , toProgress + , fromProgress + , mapFailure + , retry + , failWith + , succeedWith + , continueWith + , tryWith + ) where + +import Distribution.Solver.Modular.Message +import Distribution.Solver.Types.Progress + +-- | 'Progress' as a difference list that allows efficient appends at failures. +newtype RetryLog step fail done = RetryLog { + unRetryLog :: forall fail2 . (fail -> Progress step fail2 done) + -> Progress step fail2 done + } + +-- | /O(1)/. Convert a 'RetryLog' to a 'Progress'. +toProgress :: RetryLog step fail done -> Progress step fail done +toProgress (RetryLog f) = f Fail + +-- | /O(N)/. Convert a 'Progress' to a 'RetryLog'. +fromProgress :: Progress step fail done -> RetryLog step fail done +fromProgress l = RetryLog $ \f -> go f l + where + go :: (fail1 -> Progress step fail2 done) + -> Progress step fail1 done + -> Progress step fail2 done + go _ (Done d) = Done d + go f (Fail failure) = f failure + go f (Step m ms) = Step m (go f ms) + +-- | /O(1)/. Apply a function to the failure value in a log. +mapFailure :: (fail1 -> fail2) + -> RetryLog step fail1 done + -> RetryLog step fail2 done +mapFailure f l = retry l $ \failure -> RetryLog $ \g -> g (f failure) + +-- | /O(1)/. If the first log leads to failure, continue with the second. +retry :: RetryLog step fail1 done + -> (fail1 -> RetryLog step fail2 done) + -> RetryLog step fail2 done +retry (RetryLog f) g = + RetryLog $ \extendLog -> f $ \failure -> unRetryLog (g failure) extendLog + +-- | /O(1)/. Create a log with one message before a failure. +failWith :: step -> fail -> RetryLog step fail done +failWith m failure = RetryLog $ \f -> Step m (f failure) + +-- | /O(1)/. Create a log with one message before a success. +succeedWith :: step -> done -> RetryLog step fail done +succeedWith m d = RetryLog $ const $ Step m (Done d) + +-- | /O(1)/. Prepend a message to a log. +continueWith :: step + -> RetryLog step fail done + -> RetryLog step fail done +continueWith m (RetryLog f) = RetryLog $ Step m . f + +-- | /O(1)/. Prepend the given message and 'Enter' to the log, and insert +-- 'Leave' before the failure if the log fails. +tryWith :: Message -> RetryLog Message fail done -> RetryLog Message fail done +tryWith m f = + RetryLog $ Step m . Step Enter . unRetryLog (retry f (failWith Leave)) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Solver.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Solver.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Solver.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Solver.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,247 @@ +{-# LANGUAGE CPP #-} +#ifdef DEBUG_TRACETREE +{-# LANGUAGE FlexibleInstances #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +#endif +module Distribution.Solver.Modular.Solver + ( SolverConfig(..) + , solve + , PruneAfterFirstSuccess(..) + ) where + +import Data.Map as M +import Data.List as L +import Data.Set as S +import Distribution.Verbosity + +import Distribution.Compiler (CompilerInfo) + +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.PackagePreferences +import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb) +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.Settings +import Distribution.Solver.Types.Variable + +import Distribution.Solver.Modular.Assignment +import Distribution.Solver.Modular.Builder +import Distribution.Solver.Modular.Cycles +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Explore +import Distribution.Solver.Modular.Index +import Distribution.Solver.Modular.Log +import Distribution.Solver.Modular.Message +import Distribution.Solver.Modular.Package +import qualified Distribution.Solver.Modular.Preference as P +import Distribution.Solver.Modular.Validate +import Distribution.Solver.Modular.Linking +import Distribution.Solver.Modular.PSQ (PSQ) +import Distribution.Solver.Modular.RetryLog +import Distribution.Solver.Modular.Tree +import qualified Distribution.Solver.Modular.PSQ as PSQ + +import Distribution.Simple.Setup (BooleanFlag(..)) + +#ifdef DEBUG_TRACETREE +import qualified Distribution.Solver.Modular.ConflictSet as CS +import qualified Distribution.Solver.Modular.WeightedPSQ as W +import qualified Distribution.Text as T + +import Debug.Trace.Tree (gtraceJson) +import Debug.Trace.Tree.Simple +import Debug.Trace.Tree.Generic +import Debug.Trace.Tree.Assoc (Assoc(..)) +#endif + +-- | Various options for the modular solver. +data SolverConfig = SolverConfig { + reorderGoals :: ReorderGoals, + countConflicts :: CountConflicts, + independentGoals :: IndependentGoals, + avoidReinstalls :: AvoidReinstalls, + shadowPkgs :: ShadowPkgs, + strongFlags :: StrongFlags, + allowBootLibInstalls :: AllowBootLibInstalls, + maxBackjumps :: Maybe Int, + enableBackjumping :: EnableBackjumping, + solveExecutables :: SolveExecutables, + goalOrder :: Maybe (Variable QPN -> Variable QPN -> Ordering), + solverVerbosity :: Verbosity, + pruneAfterFirstSuccess :: PruneAfterFirstSuccess +} + +-- | Whether to remove all choices after the first successful choice at each +-- level in the search tree. +newtype PruneAfterFirstSuccess = PruneAfterFirstSuccess Bool + +-- | Run all solver phases. +-- +-- In principle, we have a valid tree after 'validationPhase', which +-- means that every 'Done' node should correspond to valid solution. +-- +-- There is one exception, though, and that is cycle detection, which +-- has been added relatively recently. Cycles are only removed directly +-- before exploration. +-- +solve :: SolverConfig -- ^ solver parameters + -> CompilerInfo + -> Index -- ^ all available packages as an index + -> PkgConfigDb -- ^ available pkg-config pkgs + -> (PN -> PackagePreferences) -- ^ preferences + -> Map PN [LabeledPackageConstraint] -- ^ global constraints + -> Set PN -- ^ global goals + -> RetryLog Message SolverFailure (Assignment, RevDepMap) +solve sc cinfo idx pkgConfigDB userPrefs userConstraints userGoals = + explorePhase $ + detectCycles $ + heuristicsPhase $ + preferencesPhase $ + validationPhase $ + prunePhase $ + buildPhase + where + explorePhase = backjumpAndExplore (maxBackjumps sc) + (enableBackjumping sc) + (countConflicts sc) + detectCycles = traceTree "cycles.json" id . detectCyclesPhase + heuristicsPhase = + let heuristicsTree = traceTree "heuristics.json" id + sortGoals = case goalOrder sc of + Nothing -> goalChoiceHeuristics . + heuristicsTree . + P.deferSetupChoices . + P.deferWeakFlagChoices . + P.preferBaseGoalChoice + Just order -> P.firstGoal . + heuristicsTree . + P.sortGoals order + PruneAfterFirstSuccess prune = pruneAfterFirstSuccess sc + in sortGoals . + (if prune then P.pruneAfterFirstSuccess else id) + preferencesPhase = P.preferLinked . + P.preferPackagePreferences userPrefs + validationPhase = traceTree "validated.json" id . + P.enforcePackageConstraints userConstraints . + P.enforceManualFlags userConstraints . + P.enforceSingleInstanceRestriction . + validateLinking idx . + validateTree cinfo idx pkgConfigDB + prunePhase = (if asBool (avoidReinstalls sc) then P.avoidReinstalls (const True) else id) . + (if asBool (allowBootLibInstalls sc) + then id + else P.requireInstalled (`elem` nonInstallable)) + buildPhase = traceTree "build.json" id + $ buildTree idx (independentGoals sc) (S.toList userGoals) + + -- packages that can never be installed or upgraded + -- If you change this enumeration, make sure to update the list in + -- "Distribution.Client.Dependency" as well + nonInstallable :: [PackageName] + nonInstallable = + L.map mkPackageName + [ "base" + , "ghc-prim" + , "integer-gmp" + , "integer-simple" + , "template-haskell" + ] + + -- When --reorder-goals is set, we use preferReallyEasyGoalChoices, which + -- prefers (keeps) goals only if the have 0 or 1 enabled choice. + -- + -- In the past, we furthermore used P.firstGoal to trim down the goal choice nodes + -- to just a single option. This was a way to work around a space leak that was + -- unnecessary and is now fixed, so we no longer do it. + -- + -- If --count-conflicts is active, it will then choose among the remaining goals + -- the one that has been responsible for the most conflicts so far. + -- + -- Otherwise, we simply choose the first remaining goal. + -- + goalChoiceHeuristics + | asBool (reorderGoals sc) = P.preferReallyEasyGoalChoices + | otherwise = id {- P.firstGoal -} + +-- | Dump solver tree to a file (in debugging mode) +-- +-- This only does something if the @debug-tracetree@ configure argument was +-- given; otherwise this is just the identity function. +traceTree :: +#ifdef DEBUG_TRACETREE + GSimpleTree a => +#endif + FilePath -- ^ Output file + -> (a -> a) -- ^ Function to summarize the tree before dumping + -> a -> a +#ifdef DEBUG_TRACETREE +traceTree = gtraceJson +#else +traceTree _ _ = id +#endif + +#ifdef DEBUG_TRACETREE +instance GSimpleTree (Tree d c) where + fromGeneric = go + where + go :: Tree d c -> SimpleTree + go (PChoice qpn _ _ psq) = Node "P" $ Assoc $ L.map (uncurry (goP qpn)) $ psqToList psq + go (FChoice _ _ _ _ _ _ psq) = Node "F" $ Assoc $ L.map (uncurry goFS) $ psqToList psq + go (SChoice _ _ _ _ psq) = Node "S" $ Assoc $ L.map (uncurry goFS) $ psqToList psq + go (GoalChoice _ psq) = Node "G" $ Assoc $ L.map (uncurry goG) $ PSQ.toList psq + go (Done _rdm _s) = Node "D" $ Assoc [] + go (Fail cs _reason) = Node "X" $ Assoc [("CS", Leaf $ goCS cs)] + + psqToList :: W.WeightedPSQ w k v -> [(k, v)] + psqToList = L.map (\(_, k, v) -> (k, v)) . W.toList + + -- Show package choice + goP :: QPN -> POption -> Tree d c -> (String, SimpleTree) + goP _ (POption (I ver _loc) Nothing) subtree = (T.display ver, go subtree) + goP (Q _ pn) (POption _ (Just pp)) subtree = (showQPN (Q pp pn), go subtree) + + -- Show flag or stanza choice + goFS :: Bool -> Tree d c -> (String, SimpleTree) + goFS val subtree = (show val, go subtree) + + -- Show goal choice + goG :: Goal QPN -> Tree d c -> (String, SimpleTree) + goG (Goal var gr) subtree = (showVar var ++ " (" ++ shortGR gr ++ ")", go subtree) + + -- Variation on 'showGR' that produces shorter strings + -- (Actually, QGoalReason records more info than necessary: we only need + -- to know the variable that introduced the goal, not the value assigned + -- to that variable) + shortGR :: QGoalReason -> String + shortGR UserGoal = "user" + shortGR (DependencyGoal dr) = showDependencyReason dr + + -- Show conflict set + goCS :: ConflictSet -> String + goCS cs = "{" ++ (intercalate "," . L.map showVar . CS.toList $ cs) ++ "}" +#endif + +-- | Replace all goal reasons with a dummy goal reason in the tree +-- +-- This is useful for debugging (when experimenting with the impact of GRs) +_removeGR :: Tree d c -> Tree d QGoalReason +_removeGR = trav go + where + go :: TreeF d c (Tree d QGoalReason) -> TreeF d QGoalReason (Tree d QGoalReason) + go (PChoiceF qpn rdm _ psq) = PChoiceF qpn rdm dummy psq + go (FChoiceF qfn rdm _ a b d psq) = FChoiceF qfn rdm dummy a b d psq + go (SChoiceF qsn rdm _ a psq) = SChoiceF qsn rdm dummy a psq + go (GoalChoiceF rdm psq) = GoalChoiceF rdm (goG psq) + go (DoneF rdm s) = DoneF rdm s + go (FailF cs reason) = FailF cs reason + + goG :: PSQ (Goal QPN) (Tree d QGoalReason) -> PSQ (Goal QPN) (Tree d QGoalReason) + goG = PSQ.fromList + . L.map (\(Goal var _, subtree) -> (Goal var dummy, subtree)) + . PSQ.toList + + dummy :: QGoalReason + dummy = + DependencyGoal $ + DependencyReason + (Q (PackagePath DefaultNamespace QualToplevel) (mkPackageName "$")) + M.empty S.empty diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Tree.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Tree.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Tree.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Tree.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,193 @@ +{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} +module Distribution.Solver.Modular.Tree + ( POption(..) + , Tree(..) + , TreeF(..) + , Weight + , FailReason(..) + , ConflictingDep(..) + , ana + , cata + , inn + , innM + , para + , trav + , zeroOrOneChoices + , active + ) where + +import Control.Monad hiding (mapM, sequence) +import Data.Foldable +import Data.Traversable +import Prelude hiding (foldr, mapM, sequence) + +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Package +import Distribution.Solver.Modular.PSQ (PSQ) +import Distribution.Solver.Modular.Version +import Distribution.Solver.Modular.WeightedPSQ (WeightedPSQ) +import qualified Distribution.Solver.Modular.WeightedPSQ as W +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.Flag +import Distribution.Solver.Types.PackagePath +import Language.Haskell.Extension (Extension, Language) + +type Weight = Double + +-- | Type of the search tree. Inlining the choice nodes for now. Weights on +-- package, flag, and stanza choices control the traversal order. +-- +-- The tree can hold additional data on 'Done' nodes (type 'd') and choice nodes +-- (type 'c'). For example, during the final traversal, choice nodes contain the +-- variables that introduced the choices, and 'Done' nodes contain the +-- assignments for all variables. +-- +-- TODO: The weight type should be changed from [Double] to Double to avoid +-- giving too much weight to preferences that are applied later. +data Tree d c = + -- | Choose a version for a package (or choose to link) + PChoice QPN RevDepMap c (WeightedPSQ [Weight] POption (Tree d c)) + + -- | Choose a value for a flag + -- + -- The Bool is the default value. + | FChoice QFN RevDepMap c WeakOrTrivial FlagType Bool (WeightedPSQ [Weight] Bool (Tree d c)) + + -- | Choose whether or not to enable a stanza + | SChoice QSN RevDepMap c WeakOrTrivial (WeightedPSQ [Weight] Bool (Tree d c)) + + -- | Choose which choice to make next + -- + -- Invariants: + -- + -- * PSQ should never be empty + -- * For each choice we additionally record the 'QGoalReason' why we are + -- introducing that goal into tree. Note that most of the time we are + -- working with @Tree QGoalReason@; in that case, we must have the + -- invariant that the 'QGoalReason' cached in the 'PChoice', 'FChoice' + -- or 'SChoice' directly below a 'GoalChoice' node must equal the reason + -- recorded on that 'GoalChoice' node. + | GoalChoice RevDepMap (PSQ (Goal QPN) (Tree d c)) + + -- | We're done -- we found a solution! + | Done RevDepMap d + + -- | We failed to find a solution in this path through the tree + | Fail ConflictSet FailReason + +-- | A package option is a package instance with an optional linking annotation +-- +-- The modular solver has a number of package goals to solve for, and can only +-- pick a single package version for a single goal. In order to allow to +-- install multiple versions of the same package as part of a single solution +-- the solver uses qualified goals. For example, @0.P@ and @1.P@ might both +-- be qualified goals for @P@, allowing to pick a difference version of package +-- @P@ for @0.P@ and @1.P@. +-- +-- Linking is an essential part of this story. In addition to picking a specific +-- version for @1.P@, the solver can also decide to link @1.P@ to @0.P@ (or +-- vice versa). It means that @1.P@ and @0.P@ really must be the very same package +-- (and hence must have the same build time configuration, and their +-- dependencies must also be the exact same). +-- +-- See for details. +data POption = POption I (Maybe PackagePath) + deriving (Eq, Show) + +data FailReason = UnsupportedExtension Extension + | UnsupportedLanguage Language + | MissingPkgconfigPackage PkgconfigName VR + | NewPackageDoesNotMatchExistingConstraint ConflictingDep + | ConflictingConstraints ConflictingDep ConflictingDep + | NewPackageIsMissingRequiredComponent ExposedComponent (DependencyReason QPN) + | NewPackageHasUnbuildableRequiredComponent ExposedComponent (DependencyReason QPN) + | PackageRequiresMissingComponent QPN ExposedComponent + | PackageRequiresUnbuildableComponent QPN ExposedComponent + | CannotInstall + | CannotReinstall + | Shadowed + | Broken + | GlobalConstraintVersion VR ConstraintSource + | GlobalConstraintInstalled ConstraintSource + | GlobalConstraintSource ConstraintSource + | GlobalConstraintFlag ConstraintSource + | ManualFlag + | MalformedFlagChoice QFN + | MalformedStanzaChoice QSN + | EmptyGoalChoice + | Backjump + | MultipleInstances + | DependenciesNotLinked String + | CyclicDependencies + | UnsupportedSpecVer Ver + deriving (Eq, Show) + +-- | Information about a dependency involved in a conflict, for error messages. +data ConflictingDep = ConflictingDep (DependencyReason QPN) (PkgComponent QPN) CI + deriving (Eq, Show) + +-- | Functor for the tree type. 'a' is the type of nodes' children. 'd' and 'c' +-- have the same meaning as in 'Tree'. +data TreeF d c a = + PChoiceF QPN RevDepMap c (WeightedPSQ [Weight] POption a) + | FChoiceF QFN RevDepMap c WeakOrTrivial FlagType Bool (WeightedPSQ [Weight] Bool a) + | SChoiceF QSN RevDepMap c WeakOrTrivial (WeightedPSQ [Weight] Bool a) + | GoalChoiceF RevDepMap (PSQ (Goal QPN) a) + | DoneF RevDepMap d + | FailF ConflictSet FailReason + deriving (Functor, Foldable, Traversable) + +out :: Tree d c -> TreeF d c (Tree d c) +out (PChoice p s i ts) = PChoiceF p s i ts +out (FChoice p s i b m d ts) = FChoiceF p s i b m d ts +out (SChoice p s i b ts) = SChoiceF p s i b ts +out (GoalChoice s ts) = GoalChoiceF s ts +out (Done x s ) = DoneF x s +out (Fail c x ) = FailF c x + +inn :: TreeF d c (Tree d c) -> Tree d c +inn (PChoiceF p s i ts) = PChoice p s i ts +inn (FChoiceF p s i b m d ts) = FChoice p s i b m d ts +inn (SChoiceF p s i b ts) = SChoice p s i b ts +inn (GoalChoiceF s ts) = GoalChoice s ts +inn (DoneF x s ) = Done x s +inn (FailF c x ) = Fail c x + +innM :: Monad m => TreeF d c (m (Tree d c)) -> m (Tree d c) +innM (PChoiceF p s i ts) = liftM (PChoice p s i ) (sequence ts) +innM (FChoiceF p s i b m d ts) = liftM (FChoice p s i b m d) (sequence ts) +innM (SChoiceF p s i b ts) = liftM (SChoice p s i b ) (sequence ts) +innM (GoalChoiceF s ts) = liftM (GoalChoice s ) (sequence ts) +innM (DoneF x s ) = return $ Done x s +innM (FailF c x ) = return $ Fail c x + +-- | Determines whether a tree is active, i.e., isn't a failure node. +active :: Tree d c -> Bool +active (Fail _ _) = False +active _ = True + +-- | Approximates the number of active choices that are available in a node. +-- Note that we count goal choices as having one choice, always. +zeroOrOneChoices :: Tree d c -> Bool +zeroOrOneChoices (PChoice _ _ _ ts) = W.isZeroOrOne (W.filter active ts) +zeroOrOneChoices (FChoice _ _ _ _ _ _ ts) = W.isZeroOrOne (W.filter active ts) +zeroOrOneChoices (SChoice _ _ _ _ ts) = W.isZeroOrOne (W.filter active ts) +zeroOrOneChoices (GoalChoice _ _ ) = True +zeroOrOneChoices (Done _ _ ) = True +zeroOrOneChoices (Fail _ _ ) = True + +-- | Catamorphism on trees. +cata :: (TreeF d c a -> a) -> Tree d c -> a +cata phi x = (phi . fmap (cata phi) . out) x + +trav :: (TreeF d c (Tree d a) -> TreeF d a (Tree d a)) -> Tree d c -> Tree d a +trav psi x = cata (inn . psi) x + +-- | Paramorphism on trees. +para :: (TreeF d c (a, Tree d c) -> a) -> Tree d c -> a +para phi = phi . fmap (\ x -> (para phi x, x)) . out + +-- | Anamorphism on trees. +ana :: (a -> TreeF d c a) -> a -> Tree d c +ana psi = inn . fmap (ana psi) . psi diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Validate.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Validate.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Validate.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Validate.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,536 @@ +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE CPP #-} +#ifdef DEBUG_CONFLICT_SETS +{-# LANGUAGE ImplicitParams #-} +#endif +module Distribution.Solver.Modular.Validate (validateTree) where + +-- Validation of the tree. +-- +-- The task here is to make sure all constraints hold. After validation, any +-- assignment returned by exploration of the tree should be a complete valid +-- assignment, i.e., actually constitute a solution. + +import Control.Applicative +import Control.Monad.Reader hiding (sequence) +import Data.Function (on) +import Data.List as L +import Data.Set as S +import Data.Traversable +import Prelude hiding (sequence) + +import Language.Haskell.Extension (Extension, Language) + +import Data.Map.Strict as M +import Distribution.Compiler (CompilerInfo(..)) + +import Distribution.Solver.Modular.Assignment +import qualified Distribution.Solver.Modular.ConflictSet as CS +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Index +import Distribution.Solver.Modular.Package +import Distribution.Solver.Modular.Tree +import Distribution.Solver.Modular.Version +import qualified Distribution.Solver.Modular.WeightedPSQ as W + +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.PkgConfigDb (PkgConfigDb, pkgConfigPkgIsPresent) + +#ifdef DEBUG_CONFLICT_SETS +import GHC.Stack (CallStack) +#endif + +-- In practice, most constraints are implication constraints (IF we have made +-- a number of choices, THEN we also have to ensure that). We call constraints +-- that for which the preconditions are fulfilled ACTIVE. We maintain a set +-- of currently active constraints that we pass down the node. +-- +-- We aim at detecting inconsistent states as early as possible. +-- +-- Whenever we make a choice, there are two things that need to happen: +-- +-- (1) We must check that the choice is consistent with the currently +-- active constraints. +-- +-- (2) The choice increases the set of active constraints. For the new +-- active constraints, we must check that they are consistent with +-- the current state. +-- +-- We can actually merge (1) and (2) by saying the the current choice is +-- a new active constraint, fixing the choice. +-- +-- If a test fails, we have detected an inconsistent state. We can +-- disable the current subtree and do not have to traverse it any further. +-- +-- We need a good way to represent the current state, i.e., the current +-- set of active constraints. Since the main situation where we have to +-- search in it is (1), it seems best to store the state by package: for +-- every package, we store which versions are still allowed. If for any +-- package, we have inconsistent active constraints, we can also stop. +-- This is a particular way to read task (2): +-- +-- (2, weak) We only check if the new constraints are consistent with +-- the choices we've already made, and add them to the active set. +-- +-- (2, strong) We check if the new constraints are consistent with the +-- choices we've already made, and the constraints we already have. +-- +-- It currently seems as if we're implementing the weak variant. However, +-- when used together with 'preferEasyGoalChoices', we will find an +-- inconsistent state in the very next step. +-- +-- What do we do about flags? +-- +-- Like for packages, we store the flag choices we have already made. +-- Now, regarding (1), we only have to test whether we've decided the +-- current flag before. Regarding (2), the interesting bit is in discovering +-- the new active constraints. To this end, we look up the constraints for +-- the package the flag belongs to, and traverse its flagged dependencies. +-- Wherever we find the flag in question, we start recording dependencies +-- underneath as new active dependencies. If we encounter other flags, we +-- check if we've chosen them already and either proceed or stop. + +-- | The state needed during validation. +data ValidateState = VS { + supportedExt :: Extension -> Bool, + supportedLang :: Language -> Bool, + presentPkgs :: PkgconfigName -> VR -> Bool, + index :: Index, + + -- Saved, scoped, dependencies. Every time 'validate' makes a package choice, + -- it qualifies the package's dependencies and saves them in this map. Then + -- the qualified dependencies are available for subsequent flag and stanza + -- choices for the same package. + saved :: Map QPN (FlaggedDeps QPN), + + pa :: PreAssignment, + + -- Map from package name to the components that are provided by the chosen + -- instance of that package, and whether those components are buildable. + availableComponents :: Map QPN (Map ExposedComponent IsBuildable), + + -- Map from package name to the components that are required from that + -- package. + requiredComponents :: Map QPN ComponentDependencyReasons, + + qualifyOptions :: QualifyOptions +} + +newtype Validate a = Validate (Reader ValidateState a) + deriving (Functor, Applicative, Monad, MonadReader ValidateState) + +runValidate :: Validate a -> ValidateState -> a +runValidate (Validate r) = runReader r + +-- | A preassignment comprises knowledge about variables, but not +-- necessarily fixed values. +data PreAssignment = PA PPreAssignment FAssignment SAssignment + +-- | A (partial) package preassignment. Qualified package names +-- are associated with MergedPkgDeps. +type PPreAssignment = Map QPN MergedPkgDep + +-- | A dependency on a component, including its DependencyReason. +data PkgDep = PkgDep (DependencyReason QPN) (PkgComponent QPN) CI + +-- | Map from component name to one of the reasons that the component is +-- required. +type ComponentDependencyReasons = Map ExposedComponent (DependencyReason QPN) + +-- | MergedPkgDep records constraints about the instances that can still be +-- chosen, and in the extreme case fixes a concrete instance. Otherwise, it is a +-- list of version ranges paired with the goals / variables that introduced +-- them. It also records whether a package is a build-tool dependency, for each +-- reason that it was introduced. +-- +-- It is important to store the component name with the version constraint, for +-- error messages, because whether something is a build-tool dependency affects +-- its qualifier, which affects which constraint is applied. +data MergedPkgDep = + MergedDepFixed ExposedComponent (DependencyReason QPN) I + | MergedDepConstrained [VROrigin] + +-- | Version ranges paired with origins. +type VROrigin = (VR, ExposedComponent, DependencyReason QPN) + +-- | The information needed to create a 'Fail' node. +type Conflict = (ConflictSet, FailReason) + +validate :: Tree d c -> Validate (Tree d c) +validate = cata go + where + go :: TreeF d c (Validate (Tree d c)) -> Validate (Tree d c) + + go (PChoiceF qpn rdm gr ts) = PChoice qpn rdm gr <$> sequence (W.mapWithKey (goP qpn) ts) + go (FChoiceF qfn rdm gr b m d ts) = + do + -- Flag choices may occur repeatedly (because they can introduce new constraints + -- in various places). However, subsequent choices must be consistent. We thereby + -- collapse repeated flag choice nodes. + PA _ pfa _ <- asks pa -- obtain current flag-preassignment + case M.lookup qfn pfa of + Just rb -> -- flag has already been assigned; collapse choice to the correct branch + case W.lookup rb ts of + Just t -> goF qfn rb t + Nothing -> return $ Fail (varToConflictSet (F qfn)) (MalformedFlagChoice qfn) + Nothing -> -- flag choice is new, follow both branches + FChoice qfn rdm gr b m d <$> sequence (W.mapWithKey (goF qfn) ts) + go (SChoiceF qsn rdm gr b ts) = + do + -- Optional stanza choices are very similar to flag choices. + PA _ _ psa <- asks pa -- obtain current stanza-preassignment + case M.lookup qsn psa of + Just rb -> -- stanza choice has already been made; collapse choice to the correct branch + case W.lookup rb ts of + Just t -> goS qsn rb t + Nothing -> return $ Fail (varToConflictSet (S qsn)) (MalformedStanzaChoice qsn) + Nothing -> -- stanza choice is new, follow both branches + SChoice qsn rdm gr b <$> sequence (W.mapWithKey (goS qsn) ts) + + -- We don't need to do anything for goal choices or failure nodes. + go (GoalChoiceF rdm ts) = GoalChoice rdm <$> sequence ts + go (DoneF rdm s ) = pure (Done rdm s) + go (FailF c fr ) = pure (Fail c fr) + + -- What to do for package nodes ... + goP :: QPN -> POption -> Validate (Tree d c) -> Validate (Tree d c) + goP qpn@(Q _pp pn) (POption i _) r = do + PA ppa pfa psa <- asks pa -- obtain current preassignment + extSupported <- asks supportedExt -- obtain the supported extensions + langSupported <- asks supportedLang -- obtain the supported languages + pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs + idx <- asks index -- obtain the index + svd <- asks saved -- obtain saved dependencies + aComps <- asks availableComponents + rComps <- asks requiredComponents + qo <- asks qualifyOptions + -- obtain dependencies and index-dictated exclusions introduced by the choice + let (PInfo deps comps _ mfr) = idx ! pn ! i + -- qualify the deps in the current scope + let qdeps = qualifyDeps qo qpn deps + -- the new active constraints are given by the instance we have chosen, + -- plus the dependency information we have for that instance + let newactives = extractAllDeps pfa psa qdeps + -- We now try to extend the partial assignment with the new active constraints. + let mnppa = extend extSupported langSupported pkgPresent newactives + =<< extendWithPackageChoice (PI qpn i) ppa + -- In case we continue, we save the scoped dependencies + let nsvd = M.insert qpn qdeps svd + case mfr of + Just fr -> -- The index marks this as an invalid choice. We can stop. + return (Fail (varToConflictSet (P qpn)) fr) + Nothing -> + let newDeps :: Either Conflict (PPreAssignment, Map QPN ComponentDependencyReasons) + newDeps = do + nppa <- mnppa + rComps' <- extendRequiredComponents aComps rComps newactives + checkComponentsInNewPackage (M.findWithDefault M.empty qpn rComps) qpn comps + return (nppa, rComps') + in case newDeps of + Left (c, fr) -> -- We have an inconsistency. We can stop. + return (Fail c fr) + Right (nppa, rComps') -> -- We have an updated partial assignment for the recursive validation. + local (\ s -> s { pa = PA nppa pfa psa + , saved = nsvd + , availableComponents = M.insert qpn comps aComps + , requiredComponents = rComps' + }) r + + -- What to do for flag nodes ... + goF :: QFN -> Bool -> Validate (Tree d c) -> Validate (Tree d c) + goF qfn@(FN qpn _f) b r = do + PA ppa pfa psa <- asks pa -- obtain current preassignment + extSupported <- asks supportedExt -- obtain the supported extensions + langSupported <- asks supportedLang -- obtain the supported languages + pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs + svd <- asks saved -- obtain saved dependencies + aComps <- asks availableComponents + rComps <- asks requiredComponents + -- Note that there should be saved dependencies for the package in question, + -- because while building, we do not choose flags before we see the packages + -- that define them. + let qdeps = svd ! qpn + -- We take the *saved* dependencies, because these have been qualified in the + -- correct scope. + -- + -- Extend the flag assignment + let npfa = M.insert qfn b pfa + -- We now try to get the new active dependencies we might learn about because + -- we have chosen a new flag. + let newactives = extractNewDeps (F qfn) b npfa psa qdeps + mNewRequiredComps = extendRequiredComponents aComps rComps newactives + -- As in the package case, we try to extend the partial assignment. + let mnppa = extend extSupported langSupported pkgPresent newactives ppa + case liftM2 (,) mnppa mNewRequiredComps of + Left (c, fr) -> return (Fail c fr) -- inconsistency found + Right (nppa, rComps') -> + local (\ s -> s { pa = PA nppa npfa psa, requiredComponents = rComps' }) r + + -- What to do for stanza nodes (similar to flag nodes) ... + goS :: QSN -> Bool -> Validate (Tree d c) -> Validate (Tree d c) + goS qsn@(SN qpn _f) b r = do + PA ppa pfa psa <- asks pa -- obtain current preassignment + extSupported <- asks supportedExt -- obtain the supported extensions + langSupported <- asks supportedLang -- obtain the supported languages + pkgPresent <- asks presentPkgs -- obtain the present pkg-config pkgs + svd <- asks saved -- obtain saved dependencies + aComps <- asks availableComponents + rComps <- asks requiredComponents + -- Note that there should be saved dependencies for the package in question, + -- because while building, we do not choose flags before we see the packages + -- that define them. + let qdeps = svd ! qpn + -- We take the *saved* dependencies, because these have been qualified in the + -- correct scope. + -- + -- Extend the flag assignment + let npsa = M.insert qsn b psa + -- We now try to get the new active dependencies we might learn about because + -- we have chosen a new flag. + let newactives = extractNewDeps (S qsn) b pfa npsa qdeps + mNewRequiredComps = extendRequiredComponents aComps rComps newactives + -- As in the package case, we try to extend the partial assignment. + let mnppa = extend extSupported langSupported pkgPresent newactives ppa + case liftM2 (,) mnppa mNewRequiredComps of + Left (c, fr) -> return (Fail c fr) -- inconsistency found + Right (nppa, rComps') -> + local (\ s -> s { pa = PA nppa pfa npsa, requiredComponents = rComps' }) r + +-- | Check that a newly chosen package instance contains all components that +-- are required from that package so far. The components must also be buildable. +checkComponentsInNewPackage :: ComponentDependencyReasons + -> QPN + -> Map ExposedComponent IsBuildable + -> Either Conflict () +checkComponentsInNewPackage required qpn providedComps = + case M.toList $ deleteKeys (M.keys providedComps) required of + (missingComp, dr) : _ -> + Left $ mkConflict missingComp dr NewPackageIsMissingRequiredComponent + [] -> + case M.toList $ deleteKeys buildableProvidedComps required of + (unbuildableComp, dr) : _ -> + Left $ mkConflict unbuildableComp dr NewPackageHasUnbuildableRequiredComponent + [] -> Right () + where + mkConflict :: ExposedComponent + -> DependencyReason QPN + -> (ExposedComponent -> DependencyReason QPN -> FailReason) + -> Conflict + mkConflict comp dr mkFailure = + (CS.insert (P qpn) (dependencyReasonToCS dr), mkFailure comp dr) + + buildableProvidedComps :: [ExposedComponent] + buildableProvidedComps = [comp | (comp, IsBuildable True) <- M.toList providedComps] + + deleteKeys :: Ord k => [k] -> Map k v -> Map k v + deleteKeys ks m = L.foldr M.delete m ks + +-- | We try to extract as many concrete dependencies from the given flagged +-- dependencies as possible. We make use of all the flag knowledge we have +-- already acquired. +extractAllDeps :: FAssignment -> SAssignment -> FlaggedDeps QPN -> [LDep QPN] +extractAllDeps fa sa deps = do + d <- deps + case d of + Simple sd _ -> return sd + Flagged qfn _ td fd -> case M.lookup qfn fa of + Nothing -> mzero + Just True -> extractAllDeps fa sa td + Just False -> extractAllDeps fa sa fd + Stanza qsn td -> case M.lookup qsn sa of + Nothing -> mzero + Just True -> extractAllDeps fa sa td + Just False -> [] + +-- | We try to find new dependencies that become available due to the given +-- flag or stanza choice. We therefore look for the choice in question, and then call +-- 'extractAllDeps' for everything underneath. +extractNewDeps :: Var QPN -> Bool -> FAssignment -> SAssignment -> FlaggedDeps QPN -> [LDep QPN] +extractNewDeps v b fa sa = go + where + go :: FlaggedDeps QPN -> [LDep QPN] + go deps = do + d <- deps + case d of + Simple _ _ -> mzero + Flagged qfn' _ td fd + | v == F qfn' -> if b then extractAllDeps fa sa td else extractAllDeps fa sa fd + | otherwise -> case M.lookup qfn' fa of + Nothing -> mzero + Just True -> go td + Just False -> go fd + Stanza qsn' td + | v == S qsn' -> if b then extractAllDeps fa sa td else [] + | otherwise -> case M.lookup qsn' sa of + Nothing -> mzero + Just True -> go td + Just False -> [] + +-- | Extend a package preassignment. +-- +-- Takes the variable that causes the new constraints, a current preassignment +-- and a set of new dependency constraints. +-- +-- We're trying to extend the preassignment with each dependency one by one. +-- Each dependency is for a particular variable. We check if we already have +-- constraints for that variable in the current preassignment. If so, we're +-- trying to merge the constraints. +-- +-- Either returns a witness of the conflict that would arise during the merge, +-- or the successfully extended assignment. +extend :: (Extension -> Bool) -- ^ is a given extension supported + -> (Language -> Bool) -- ^ is a given language supported + -> (PkgconfigName -> VR -> Bool) -- ^ is a given pkg-config requirement satisfiable + -> [LDep QPN] + -> PPreAssignment + -> Either Conflict PPreAssignment +extend extSupported langSupported pkgPresent newactives ppa = foldM extendSingle ppa newactives + where + + extendSingle :: PPreAssignment -> LDep QPN -> Either Conflict PPreAssignment + extendSingle a (LDep dr (Ext ext )) = + if extSupported ext then Right a + else Left (dependencyReasonToCS dr, UnsupportedExtension ext) + extendSingle a (LDep dr (Lang lang)) = + if langSupported lang then Right a + else Left (dependencyReasonToCS dr, UnsupportedLanguage lang) + extendSingle a (LDep dr (Pkg pn vr)) = + if pkgPresent pn vr then Right a + else Left (dependencyReasonToCS dr, MissingPkgconfigPackage pn vr) + extendSingle a (LDep dr (Dep dep@(PkgComponent qpn _) ci)) = + let mergedDep = M.findWithDefault (MergedDepConstrained []) qpn a + in case (\ x -> M.insert qpn x a) <$> merge mergedDep (PkgDep dr dep ci) of + Left (c, (d, d')) -> Left (c, ConflictingConstraints d d') + Right x -> Right x + +-- | Extend a package preassignment with a package choice. For example, when +-- the solver chooses foo-2.0, it tries to add the constraint foo==2.0. +-- +-- TODO: The new constraint is implemented as a dependency from foo to foo's +-- library. That isn't correct, because foo might only be needed as a build +-- tool dependency. The implemention may need to change when we support +-- component-based dependency solving. +extendWithPackageChoice :: PI QPN -> PPreAssignment -> Either Conflict PPreAssignment +extendWithPackageChoice (PI qpn i) ppa = + let mergedDep = M.findWithDefault (MergedDepConstrained []) qpn ppa + newChoice = PkgDep (DependencyReason qpn M.empty S.empty) (PkgComponent qpn ExposedLib) (Fixed i) + in case (\ x -> M.insert qpn x ppa) <$> merge mergedDep newChoice of + Left (c, (d, _d')) -> -- Don't include the package choice in the + -- FailReason, because it is redundant. + Left (c, NewPackageDoesNotMatchExistingConstraint d) + Right x -> Right x + +-- | Merge constrained instances. We currently adopt a lazy strategy for +-- merging, i.e., we only perform actual checking if one of the two choices +-- is fixed. If the merge fails, we return a conflict set indicating the +-- variables responsible for the failure, as well as the two conflicting +-- fragments. +-- +-- Note that while there may be more than one conflicting pair of version +-- ranges, we only return the first we find. +-- +-- The ConflictingDeps are returned in order, i.e., the first describes the +-- conflicting part of the MergedPkgDep, and the second describes the PkgDep. +-- +-- TODO: Different pairs might have different conflict sets. We're +-- obviously interested to return a conflict that has a "better" conflict +-- set in the sense the it contains variables that allow us to backjump +-- further. We might apply some heuristics here, such as to change the +-- order in which we check the constraints. +merge :: +#ifdef DEBUG_CONFLICT_SETS + (?loc :: CallStack) => +#endif + MergedPkgDep -> PkgDep -> Either (ConflictSet, (ConflictingDep, ConflictingDep)) MergedPkgDep +merge (MergedDepFixed comp1 vs1 i1) (PkgDep vs2 (PkgComponent p comp2) ci@(Fixed i2)) + | i1 == i2 = Right $ MergedDepFixed comp1 vs1 i1 + | otherwise = + Left ( (CS.union `on` dependencyReasonToCS) vs1 vs2 + , ( ConflictingDep vs1 (PkgComponent p comp1) (Fixed i1) + , ConflictingDep vs2 (PkgComponent p comp2) ci ) ) + +merge (MergedDepFixed comp1 vs1 i@(I v _)) (PkgDep vs2 (PkgComponent p comp2) ci@(Constrained vr)) + | checkVR vr v = Right $ MergedDepFixed comp1 vs1 i + | otherwise = + Left ( (CS.union `on` dependencyReasonToCS) vs1 vs2 + , ( ConflictingDep vs1 (PkgComponent p comp1) (Fixed i) + , ConflictingDep vs2 (PkgComponent p comp2) ci ) ) + +merge (MergedDepConstrained vrOrigins) (PkgDep vs2 (PkgComponent p comp2) ci@(Fixed i@(I v _))) = + go vrOrigins -- I tried "reverse vrOrigins" here, but it seems to slow things down ... + where + go :: [VROrigin] -> Either (ConflictSet, (ConflictingDep, ConflictingDep)) MergedPkgDep + go [] = Right (MergedDepFixed comp2 vs2 i) + go ((vr, comp1, vs1) : vros) + | checkVR vr v = go vros + | otherwise = + Left ( (CS.union `on` dependencyReasonToCS) vs1 vs2 + , ( ConflictingDep vs1 (PkgComponent p comp1) (Constrained vr) + , ConflictingDep vs2 (PkgComponent p comp2) ci ) ) + +merge (MergedDepConstrained vrOrigins) (PkgDep vs2 (PkgComponent _ comp2) (Constrained vr)) = + Right (MergedDepConstrained $ + + -- TODO: This line appends the new version range, to preserve the order used + -- before a refactoring. Consider prepending the version range, if there is + -- no negative performance impact. + vrOrigins ++ [(vr, comp2, vs2)]) + +-- | Takes a list of new dependencies and uses it to try to update the map of +-- known component dependencies. It returns a failure when a new dependency +-- requires a component that is missing or unbuildable in a previously chosen +-- packages. +extendRequiredComponents :: Map QPN (Map ExposedComponent IsBuildable) + -> Map QPN ComponentDependencyReasons + -> [LDep QPN] + -> Either Conflict (Map QPN ComponentDependencyReasons) +extendRequiredComponents available = foldM extendSingle + where + extendSingle :: Map QPN ComponentDependencyReasons + -> LDep QPN + -> Either Conflict (Map QPN ComponentDependencyReasons) + extendSingle required (LDep dr (Dep (PkgComponent qpn comp) _)) = + let compDeps = M.findWithDefault M.empty qpn required + in -- Only check for the existence of the component if its package has + -- already been chosen. + case M.lookup qpn available of + Just comps + | M.notMember comp comps -> + Left $ mkConflict qpn comp dr PackageRequiresMissingComponent + | L.notElem comp (buildableComps comps) -> + Left $ mkConflict qpn comp dr PackageRequiresUnbuildableComponent + _ -> + Right $ M.insertWith M.union qpn (M.insert comp dr compDeps) required + extendSingle required _ = Right required + + mkConflict :: QPN + -> ExposedComponent + -> DependencyReason QPN + -> (QPN -> ExposedComponent -> FailReason) + -> Conflict + mkConflict qpn comp dr mkFailure = + (CS.insert (P qpn) (dependencyReasonToCS dr), mkFailure qpn comp) + + buildableComps :: Map comp IsBuildable -> [comp] + buildableComps comps = [comp | (comp, IsBuildable True) <- M.toList comps] + + +-- | Interface. +validateTree :: CompilerInfo -> Index -> PkgConfigDb -> Tree d c -> Tree d c +validateTree cinfo idx pkgConfigDb t = runValidate (validate t) VS { + supportedExt = maybe (const True) -- if compiler has no list of extensions, we assume everything is supported + (\ es -> let s = S.fromList es in \ x -> S.member x s) + (compilerInfoExtensions cinfo) + , supportedLang = maybe (const True) + (flip L.elem) -- use list lookup because language list is small and no Ord instance + (compilerInfoLanguages cinfo) + , presentPkgs = pkgConfigPkgIsPresent pkgConfigDb + , index = idx + , saved = M.empty + , pa = PA M.empty M.empty M.empty + , availableComponents = M.empty + , requiredComponents = M.empty + , qualifyOptions = defaultQualifyOptions idx + } diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Var.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Var.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Var.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Var.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,34 @@ +{-# LANGUAGE DeriveFunctor #-} +module Distribution.Solver.Modular.Var ( + Var(..) + , showVar + , varPN + ) where + +import Prelude hiding (pi) + +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Types.PackagePath + +{------------------------------------------------------------------------------- + Variables +-------------------------------------------------------------------------------} + +-- | The type of variables that play a role in the solver. +-- Note that the tree currently does not use this type directly, +-- and rather has separate tree nodes for the different types of +-- variables. This fits better with the fact that in most cases, +-- these have to be treated differently. +data Var qpn = P qpn | F (FN qpn) | S (SN qpn) + deriving (Eq, Ord, Show, Functor) + +showVar :: Var QPN -> String +showVar (P qpn) = showQPN qpn +showVar (F qfn) = showQFN qfn +showVar (S qsn) = showQSN qsn + +-- | Extract the package name from a Var +varPN :: Var qpn -> qpn +varPN (P qpn) = qpn +varPN (F (FN qpn _)) = qpn +varPN (S (SN qpn _)) = qpn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Version.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Version.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Version.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/Version.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,53 @@ +module Distribution.Solver.Modular.Version + ( Ver + , VR + , anyVR + , checkVR + , eqVR + , showVer + , showVR + , simplifyVR + , (.&&.) + , (.||.) + ) where + +import qualified Distribution.Version as CV -- from Cabal +import Distribution.Text -- from Cabal + +-- | Preliminary type for versions. +type Ver = CV.Version + +-- | String representation of a version. +showVer :: Ver -> String +showVer = display + +-- | Version range. Consists of a lower and upper bound. +type VR = CV.VersionRange + +-- | String representation of a version range. +showVR :: VR -> String +showVR = display + +-- | Unconstrained version range. +anyVR :: VR +anyVR = CV.anyVersion + +-- | Version range fixing a single version. +eqVR :: Ver -> VR +eqVR = CV.thisVersion + +-- | Intersect two version ranges. +(.&&.) :: VR -> VR -> VR +v1 .&&. v2 = simplifyVR $ CV.intersectVersionRanges v1 v2 + +-- | Union of two version ranges. +(.||.) :: VR -> VR -> VR +v1 .||. v2 = simplifyVR $ CV.unionVersionRanges v1 v2 + +-- | Simplify a version range. +simplifyVR :: VR -> VR +simplifyVR = CV.simplifyVersionRange + +-- | Checking a version against a version range. +checkVR :: VR -> Ver -> Bool +checkVR = flip CV.withinRange diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/WeightedPSQ.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/WeightedPSQ.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/WeightedPSQ.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular/WeightedPSQ.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,97 @@ +{-# LANGUAGE DeriveFunctor, DeriveFoldable, DeriveTraversable #-} +{-# LANGUAGE ScopedTypeVariables #-} +module Distribution.Solver.Modular.WeightedPSQ ( + WeightedPSQ + , fromList + , toList + , keys + , weights + , isZeroOrOne + , filter + , lookup + , mapWithKey + , mapWeightsWithKey + , union + , takeUntil + ) where + +import qualified Data.Foldable as F +import qualified Data.List as L +import Data.Ord (comparing) +import qualified Data.Traversable as T +import Prelude hiding (filter, lookup) + +-- | An association list that is sorted by weight. +-- +-- Each element has a key ('k'), value ('v'), and weight ('w'). All operations +-- that add elements or modify weights stably sort the elements by weight. +newtype WeightedPSQ w k v = WeightedPSQ [(w, k, v)] + deriving (Eq, Show, Functor, F.Foldable, T.Traversable) + +-- | /O(N)/. +filter :: (v -> Bool) -> WeightedPSQ k w v -> WeightedPSQ k w v +filter p (WeightedPSQ xs) = WeightedPSQ (L.filter (p . triple_3) xs) + +-- | /O(1)/. Return @True@ if the @WeightedPSQ@ contains zero or one elements. +isZeroOrOne :: WeightedPSQ w k v -> Bool +isZeroOrOne (WeightedPSQ []) = True +isZeroOrOne (WeightedPSQ [_]) = True +isZeroOrOne _ = False + +-- | /O(1)/. Return the elements in order. +toList :: WeightedPSQ w k v -> [(w, k, v)] +toList (WeightedPSQ xs) = xs + +-- | /O(N log N)/. +fromList :: Ord w => [(w, k, v)] -> WeightedPSQ w k v +fromList = WeightedPSQ . L.sortBy (comparing triple_1) + +-- | /O(N)/. Return the weights in order. +weights :: WeightedPSQ w k v -> [w] +weights (WeightedPSQ xs) = L.map triple_1 xs + +-- | /O(N)/. Return the keys in order. +keys :: WeightedPSQ w k v -> [k] +keys (WeightedPSQ xs) = L.map triple_2 xs + +-- | /O(N)/. Return the value associated with the first occurrence of the give +-- key, if it exists. +lookup :: Eq k => k -> WeightedPSQ w k v -> Maybe v +lookup k (WeightedPSQ xs) = triple_3 `fmap` L.find ((k ==) . triple_2) xs + +-- | /O(N log N)/. Update the weights. +mapWeightsWithKey :: Ord w2 + => (k -> w1 -> w2) + -> WeightedPSQ w1 k v + -> WeightedPSQ w2 k v +mapWeightsWithKey f (WeightedPSQ xs) = fromList $ + L.map (\ (w, k, v) -> (f k w, k, v)) xs + +-- | /O(N)/. Update the values. +mapWithKey :: (k -> v1 -> v2) -> WeightedPSQ w k v1 -> WeightedPSQ w k v2 +mapWithKey f (WeightedPSQ xs) = WeightedPSQ $ + L.map (\ (w, k, v) -> (w, k, f k v)) xs + +-- | /O((N + M) log (N + M))/. Combine two @WeightedPSQ@s, preserving all +-- elements. Elements from the first @WeightedPSQ@ come before elements in the +-- second when they have the same weight. +union :: Ord w => WeightedPSQ w k v -> WeightedPSQ w k v -> WeightedPSQ w k v +union (WeightedPSQ xs) (WeightedPSQ ys) = fromList (xs ++ ys) + +-- | /O(N)/. Return the prefix of values ending with the first element that +-- satisfies p, or all elements if none satisfy p. +takeUntil :: forall w k v. (v -> Bool) -> WeightedPSQ w k v -> WeightedPSQ w k v +takeUntil p (WeightedPSQ xs) = WeightedPSQ (go xs) + where + go :: [(w, k, v)] -> [(w, k, v)] + go [] = [] + go (y : ys) = y : if p (triple_3 y) then [] else go ys + +triple_1 :: (x, y, z) -> x +triple_1 (x, _, _) = x + +triple_2 :: (x, y, z) -> y +triple_2 (_, y, _) = y + +triple_3 :: (x, y, z) -> z +triple_3 (_, _, z) = z diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Modular.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Modular.hs 2018-11-26 08:42:58.000000000 +0000 @@ -0,0 +1,175 @@ +module Distribution.Solver.Modular + ( modularResolver, SolverConfig(..), PruneAfterFirstSuccess(..)) where + +-- Here, we try to map between the external cabal-install solver +-- interface and the internal interface that the solver actually +-- expects. There are a number of type conversions to perform: we +-- have to convert the package indices to the uniform index used +-- by the solver; we also have to convert the initial constraints; +-- and finally, we have to convert back the resulting install +-- plan. + +import Prelude () +import Distribution.Solver.Compat.Prelude + +import qualified Data.Map as M +import Data.Set (Set) +import Data.Ord +import Distribution.Compat.Graph + ( IsNode(..) ) +import Distribution.Compiler + ( CompilerInfo ) +import Distribution.Solver.Modular.Assignment + ( Assignment, toCPs ) +import Distribution.Solver.Modular.ConfiguredConversion + ( convCP ) +import qualified Distribution.Solver.Modular.ConflictSet as CS +import Distribution.Solver.Modular.Dependency +import Distribution.Solver.Modular.Flag +import Distribution.Solver.Modular.Index +import Distribution.Solver.Modular.IndexConversion + ( convPIs ) +import Distribution.Solver.Modular.Log + ( SolverFailure(..), logToProgress ) +import Distribution.Solver.Modular.Package + ( PN ) +import Distribution.Solver.Modular.Solver + ( SolverConfig(..), PruneAfterFirstSuccess(..), solve ) +import Distribution.Solver.Types.DependencyResolver +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.PackageConstraint +import Distribution.Solver.Types.PackagePath +import Distribution.Solver.Types.PackagePreferences +import Distribution.Solver.Types.PkgConfigDb + ( PkgConfigDb ) +import Distribution.Solver.Types.Progress +import Distribution.Solver.Types.Variable +import Distribution.System + ( Platform(..) ) +import Distribution.Simple.Utils + ( ordNubBy ) +import Distribution.Verbosity + + +-- | Ties the two worlds together: classic cabal-install vs. the modular +-- solver. Performs the necessary translations before and after. +modularResolver :: SolverConfig -> DependencyResolver loc +modularResolver sc (Platform arch os) cinfo iidx sidx pkgConfigDB pprefs pcs pns = + fmap (uncurry postprocess) $ -- convert install plan + solve' sc cinfo idx pkgConfigDB pprefs gcs pns + where + -- Indices have to be converted into solver-specific uniform index. + idx = convPIs os arch cinfo gcs (shadowPkgs sc) (strongFlags sc) (solveExecutables sc) iidx sidx + -- Constraints have to be converted into a finite map indexed by PN. + gcs = M.fromListWith (++) (map pair pcs) + where + pair lpc = (pcName $ unlabelPackageConstraint lpc, [lpc]) + + -- Results have to be converted into an install plan. 'convCP' removes + -- package qualifiers, which means that linked packages become duplicates + -- and can be removed. + postprocess a rdm = ordNubBy nodeKey $ + map (convCP iidx sidx) (toCPs a rdm) + + -- Helper function to extract the PN from a constraint. + pcName :: PackageConstraint -> PN + pcName (PackageConstraint scope _) = scopeToPackageName scope + +-- | Run 'D.S.Modular.Solver.solve' and then produce a summarized log to display +-- in the error case. +-- +-- When there is no solution, we produce the error message by rerunning the +-- solver but making it prefer the goals from the final conflict set from the +-- first run. We also set the backjump limit to 0, so that the log stops at the +-- first backjump and is relatively short. Preferring goals from the final +-- conflict set increases the probability that the log to the first backjump +-- contains package, flag, and stanza choices that are relevant to the final +-- failure. The solver shouldn't need to choose any packages that aren't in the +-- final conflict set. (For every variable in the final conflict set, the final +-- conflict set should also contain the variable that introduced that variable. +-- The solver can then follow that chain of variables in reverse order from the +-- user target to the conflict.) However, it is possible that the conflict set +-- contains unnecessary variables. +-- +-- Producing an error message when the solver reaches the backjump limit is more +-- complicated. There is no final conflict set, so we create one for the minimal +-- subtree containing the path that the solver took to the first backjump. This +-- conflict set helps explain why the solver reached the backjump limit, because +-- the first backjump contributes to reaching the backjump limit. Additionally, +-- the solver is much more likely to be able to finish traversing this subtree +-- before the backjump limit, since its size is linear (not exponential) in the +-- number of goal choices. We create it by pruning all children after the first +-- successful child under each node in the original tree, so that there is at +-- most one valid choice at each level. Then we use the final conflict set from +-- that run to generate an error message, as in the case where the solver found +-- that there was no solution. +-- +-- Using the full log from a rerun of the solver ensures that the log is +-- complete, i.e., it shows the whole chain of dependencies from the user +-- targets to the conflicting packages. +solve' :: SolverConfig + -> CompilerInfo + -> Index + -> PkgConfigDb + -> (PN -> PackagePreferences) + -> Map PN [LabeledPackageConstraint] + -> Set PN + -> Progress String String (Assignment, RevDepMap) +solve' sc cinfo idx pkgConfigDB pprefs gcs pns = + foldProgress Step (uncurry createErrorMsg) Done (runSolver printFullLog sc) + where + runSolver :: Bool -> SolverConfig + -> Progress String (SolverFailure, String) (Assignment, RevDepMap) + runSolver keepLog sc' = + logToProgress keepLog (solverVerbosity sc') (maxBackjumps sc') $ + solve sc' cinfo idx pkgConfigDB pprefs gcs pns + + createErrorMsg :: SolverFailure -> String + -> Progress String String (Assignment, RevDepMap) + createErrorMsg (ExhaustiveSearch cs _) msg = + Fail $ rerunSolverForErrorMsg cs ++ msg + createErrorMsg BackjumpLimitReached msg = + Step ("Backjump limit reached. Rerunning dependency solver to generate " + ++ "a final conflict set for the search tree containing the " + ++ "first backjump.") $ + foldProgress Step (f . fst) Done $ + runSolver printFullLog + sc { pruneAfterFirstSuccess = PruneAfterFirstSuccess True } + where + f :: SolverFailure -> Progress String String (Assignment, RevDepMap) + f (ExhaustiveSearch cs _) = Fail $ rerunSolverForErrorMsg cs ++ msg + f BackjumpLimitReached = + -- This case is possible when the number of goals involved in + -- conflicts is greater than the backjump limit. + Fail $ msg ++ "Failed to generate a summarized dependency solver " + ++ "log due to low backjump limit." + + rerunSolverForErrorMsg :: ConflictSet -> String + rerunSolverForErrorMsg cs = + let sc' = sc { + goalOrder = Just goalOrder' + , maxBackjumps = Just 0 + } + + -- Preferring goals from the conflict set takes precedence over the + -- original goal order. + goalOrder' = preferGoalsFromConflictSet cs <> fromMaybe mempty (goalOrder sc) + + in unlines ("Could not resolve dependencies:" : messages (runSolver True sc')) + + printFullLog = solverVerbosity sc >= verbose + + messages :: Progress step fail done -> [step] + messages = foldProgress (:) (const []) (const []) + +-- | Goal ordering that chooses goals contained in the conflict set before +-- other goals. +preferGoalsFromConflictSet :: ConflictSet + -> Variable QPN -> Variable QPN -> Ordering +preferGoalsFromConflictSet cs = + comparing $ \v -> not $ CS.member (toVar v) cs + where + toVar :: Variable QPN -> Var QPN + toVar (PackageVar qpn) = P qpn + toVar (FlagVar qpn fn) = F (FN qpn fn) + toVar (StanzaVar qpn sn) = S (SN qpn sn) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ComponentDeps.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ComponentDeps.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ComponentDeps.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ComponentDeps.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,194 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} + +-- | Fine-grained package dependencies +-- +-- Like many others, this module is meant to be "double-imported": +-- +-- > import Distribution.Solver.Types.ComponentDeps ( +-- > Component +-- > , ComponentDep +-- > , ComponentDeps +-- > ) +-- > import qualified Distribution.Solver.Types.ComponentDeps as CD +module Distribution.Solver.Types.ComponentDeps ( + -- * Fine-grained package dependencies + Component(..) + , componentNameToComponent + , ComponentDep + , ComponentDeps -- opaque + -- ** Constructing ComponentDeps + , empty + , fromList + , singleton + , insert + , zip + , filterDeps + , fromLibraryDeps + , fromSetupDeps + , fromInstalled + -- ** Deconstructing ComponentDeps + , toList + , flatDeps + , nonSetupDeps + , libraryDeps + , setupDeps + , select + ) where + +import Prelude () +import Distribution.Types.UnqualComponentName +import Distribution.Solver.Compat.Prelude hiding (empty,zip) + +import qualified Data.Map as Map +import Data.Foldable (fold) + +import qualified Distribution.Types.ComponentName as CN + +{------------------------------------------------------------------------------- + Types +-------------------------------------------------------------------------------} + +-- | Component of a package. +data Component = + ComponentLib + | ComponentSubLib UnqualComponentName + | ComponentFLib UnqualComponentName + | ComponentExe UnqualComponentName + | ComponentTest UnqualComponentName + | ComponentBench UnqualComponentName + | ComponentSetup + deriving (Show, Eq, Ord, Generic) + +instance Binary Component + +-- | Dependency for a single component. +type ComponentDep a = (Component, a) + +-- | Fine-grained dependencies for a package. +-- +-- Typically used as @ComponentDeps [Dependency]@, to represent the list of +-- dependencies for each named component within a package. +-- +newtype ComponentDeps a = ComponentDeps { unComponentDeps :: Map Component a } + deriving (Show, Functor, Eq, Ord, Generic) + +instance Semigroup a => Monoid (ComponentDeps a) where + mempty = ComponentDeps Map.empty + mappend = (<>) + +instance Semigroup a => Semigroup (ComponentDeps a) where + ComponentDeps d <> ComponentDeps d' = + ComponentDeps (Map.unionWith (<>) d d') + +instance Foldable ComponentDeps where + foldMap f = foldMap f . unComponentDeps + +instance Traversable ComponentDeps where + traverse f = fmap ComponentDeps . traverse f . unComponentDeps + +instance Binary a => Binary (ComponentDeps a) + +componentNameToComponent :: CN.ComponentName -> Component +componentNameToComponent (CN.CLibName) = ComponentLib +componentNameToComponent (CN.CSubLibName s) = ComponentSubLib s +componentNameToComponent (CN.CFLibName s) = ComponentFLib s +componentNameToComponent (CN.CExeName s) = ComponentExe s +componentNameToComponent (CN.CTestName s) = ComponentTest s +componentNameToComponent (CN.CBenchName s) = ComponentBench s + +{------------------------------------------------------------------------------- + Construction +-------------------------------------------------------------------------------} + +empty :: ComponentDeps a +empty = ComponentDeps $ Map.empty + +fromList :: Monoid a => [ComponentDep a] -> ComponentDeps a +fromList = ComponentDeps . Map.fromListWith mappend + +singleton :: Component -> a -> ComponentDeps a +singleton comp = ComponentDeps . Map.singleton comp + +insert :: Monoid a => Component -> a -> ComponentDeps a -> ComponentDeps a +insert comp a = ComponentDeps . Map.alter aux comp . unComponentDeps + where + aux Nothing = Just a + aux (Just a') = Just $ a `mappend` a' + +-- | Zip two 'ComponentDeps' together by 'Component', using 'mempty' +-- as the neutral element when a 'Component' is present only in one. +zip :: (Monoid a, Monoid b) => ComponentDeps a -> ComponentDeps b -> ComponentDeps (a, b) +{- TODO/FIXME: Once we can expect containers>=0.5, switch to the more efficient version below: + +zip (ComponentDeps d1) (ComponentDeps d2) = + ComponentDeps $ + Map.mergeWithKey + (\_ a b -> Just (a,b)) + (fmap (\a -> (a, mempty))) + (fmap (\b -> (mempty, b))) + d1 d2 + +-} +zip (ComponentDeps d1) (ComponentDeps d2) = + ComponentDeps $ + Map.unionWith + mappend + (Map.map (\a -> (a, mempty)) d1) + (Map.map (\b -> (mempty, b)) d2) + + +-- | Keep only selected components (and their associated deps info). +filterDeps :: (Component -> a -> Bool) -> ComponentDeps a -> ComponentDeps a +filterDeps p = ComponentDeps . Map.filterWithKey p . unComponentDeps + +-- | ComponentDeps containing library dependencies only +fromLibraryDeps :: a -> ComponentDeps a +fromLibraryDeps = singleton ComponentLib + +-- | ComponentDeps containing setup dependencies only. +fromSetupDeps :: a -> ComponentDeps a +fromSetupDeps = singleton ComponentSetup + +-- | ComponentDeps for installed packages. +-- +-- We assume that installed packages only record their library dependencies. +fromInstalled :: a -> ComponentDeps a +fromInstalled = fromLibraryDeps + +{------------------------------------------------------------------------------- + Deconstruction +-------------------------------------------------------------------------------} + +toList :: ComponentDeps a -> [ComponentDep a] +toList = Map.toList . unComponentDeps + +-- | All dependencies of a package. +-- +-- This is just a synonym for 'fold', but perhaps a use of 'flatDeps' is more +-- obvious than a use of 'fold', and moreover this avoids introducing lots of +-- @#ifdef@s for 7.10 just for the use of 'fold'. +flatDeps :: Monoid a => ComponentDeps a -> a +flatDeps = fold + +-- | All dependencies except the setup dependencies. +-- +-- Prior to the introduction of setup dependencies in version 1.24 this +-- would have been _all_ dependencies. +nonSetupDeps :: Monoid a => ComponentDeps a -> a +nonSetupDeps = select (/= ComponentSetup) + +-- | Library dependencies proper only. (Includes dependencies +-- of internal libraries.) +libraryDeps :: Monoid a => ComponentDeps a -> a +libraryDeps = select (\c -> case c of ComponentSubLib _ -> True + ComponentLib -> True + _ -> False) + +-- | Setup dependencies. +setupDeps :: Monoid a => ComponentDeps a -> a +setupDeps = select (== ComponentSetup) + +-- | Select dependencies satisfying a given predicate. +select :: Monoid a => (Component -> Bool) -> ComponentDeps a -> a +select p = foldMap snd . filter (p . fst) . toList diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ConstraintSource.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ConstraintSource.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ConstraintSource.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ConstraintSource.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,82 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Solver.Types.ConstraintSource + ( ConstraintSource(..) + , showConstraintSource + ) where + +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary(..)) + +-- | Source of a 'PackageConstraint'. +data ConstraintSource = + + -- | Main config file, which is ~/.cabal/config by default. + ConstraintSourceMainConfig FilePath + + -- | Local cabal.project file + | ConstraintSourceProjectConfig FilePath + + -- | Sandbox config file, which is ./cabal.sandbox.config by default. + | ConstraintSourceSandboxConfig FilePath + + -- | User config file, which is ./cabal.config by default. + | ConstraintSourceUserConfig FilePath + + -- | Flag specified on the command line. + | ConstraintSourceCommandlineFlag + + -- | Target specified by the user, e.g., @cabal install package-0.1.0.0@ + -- implies @package==0.1.0.0@. + | ConstraintSourceUserTarget + + -- | Internal requirement to use installed versions of packages like ghc-prim. + | ConstraintSourceNonUpgradeablePackage + + -- | Internal requirement to use the add-source version of a package when that + -- version is installed and the source is modified. + | ConstraintSourceModifiedAddSourceDep + + -- | Internal constraint used by @cabal freeze@. + | ConstraintSourceFreeze + + -- | Constraint specified by a config file, a command line flag, or a user + -- target, when a more specific source is not known. + | ConstraintSourceConfigFlagOrTarget + + -- | The source of the constraint is not specified. + | ConstraintSourceUnknown + + -- | An internal constraint due to compatibility issues with the Setup.hs + -- command line interface requires a minimum lower bound on Cabal + | ConstraintSetupCabalMinVersion + + -- | An internal constraint due to compatibility issues with the Setup.hs + -- command line interface requires a maximum upper bound on Cabal + | ConstraintSetupCabalMaxVersion + deriving (Eq, Show, Generic) + +instance Binary ConstraintSource + +-- | Description of a 'ConstraintSource'. +showConstraintSource :: ConstraintSource -> String +showConstraintSource (ConstraintSourceMainConfig path) = + "main config " ++ path +showConstraintSource (ConstraintSourceProjectConfig path) = + "project config " ++ path +showConstraintSource (ConstraintSourceSandboxConfig path) = + "sandbox config " ++ path +showConstraintSource (ConstraintSourceUserConfig path)= "user config " ++ path +showConstraintSource ConstraintSourceCommandlineFlag = "command line flag" +showConstraintSource ConstraintSourceUserTarget = "user target" +showConstraintSource ConstraintSourceNonUpgradeablePackage = + "non-upgradeable package" +showConstraintSource ConstraintSourceModifiedAddSourceDep = + "modified add-source dependency" +showConstraintSource ConstraintSourceFreeze = "cabal freeze" +showConstraintSource ConstraintSourceConfigFlagOrTarget = + "config file, command line flag, or user target" +showConstraintSource ConstraintSourceUnknown = "unknown source" +showConstraintSource ConstraintSetupCabalMinVersion = + "minimum version of Cabal used by Setup.hs" +showConstraintSource ConstraintSetupCabalMaxVersion = + "maximum version of Cabal used by Setup.hs" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/DependencyResolver.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/DependencyResolver.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/DependencyResolver.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/DependencyResolver.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,36 @@ +module Distribution.Solver.Types.DependencyResolver + ( DependencyResolver + ) where + +import Data.Set (Set) + +import Distribution.Solver.Types.LabeledPackageConstraint +import Distribution.Solver.Types.PkgConfigDb ( PkgConfigDb ) +import Distribution.Solver.Types.PackagePreferences +import Distribution.Solver.Types.PackageIndex ( PackageIndex ) +import Distribution.Solver.Types.Progress +import Distribution.Solver.Types.ResolverPackage +import Distribution.Solver.Types.SourcePackage + +import Distribution.Simple.PackageIndex ( InstalledPackageIndex ) +import Distribution.Package ( PackageName ) +import Distribution.Compiler ( CompilerInfo ) +import Distribution.System ( Platform ) + +-- | A dependency resolver is a function that works out an installation plan +-- given the set of installed and available packages and a set of deps to +-- solve for. +-- +-- The reason for this interface is because there are dozens of approaches to +-- solving the package dependency problem and we want to make it easy to swap +-- in alternatives. +-- +type DependencyResolver loc = Platform + -> CompilerInfo + -> InstalledPackageIndex + -> PackageIndex (SourcePackage loc) + -> PkgConfigDb + -> (PackageName -> PackagePreferences) + -> [LabeledPackageConstraint] + -> Set PackageName + -> Progress String String [ResolverPackage loc] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Flag.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Flag.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Flag.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Flag.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,6 @@ +module Distribution.Solver.Types.Flag + ( FlagType(..) + ) where + +data FlagType = Manual | Automatic + deriving (Eq, Show) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/InstalledPreference.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/InstalledPreference.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/InstalledPreference.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/InstalledPreference.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +module Distribution.Solver.Types.InstalledPreference + ( InstalledPreference(..), + ) where + +-- | Whether we prefer an installed version of a package or simply the latest +-- version. +-- +data InstalledPreference = PreferInstalled | PreferLatest + deriving Show diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/InstSolverPackage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/InstSolverPackage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/InstSolverPackage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/InstSolverPackage.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,38 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Solver.Types.InstSolverPackage + ( InstSolverPackage(..) + ) where + +import Distribution.Compat.Binary (Binary(..)) +import Distribution.Package ( Package(..), HasMungedPackageId(..), HasUnitId(..) ) +import Distribution.Solver.Types.ComponentDeps ( ComponentDeps ) +import Distribution.Solver.Types.SolverId +import Distribution.Types.MungedPackageId +import Distribution.Types.PackageId +import Distribution.Types.PackageName +import Distribution.Types.MungedPackageName +import Distribution.InstalledPackageInfo (InstalledPackageInfo) +import GHC.Generics (Generic) + +-- | An 'InstSolverPackage' is a pre-existing installed pacakge +-- specified by the dependency solver. +data InstSolverPackage = InstSolverPackage { + instSolverPkgIPI :: InstalledPackageInfo, + instSolverPkgLibDeps :: ComponentDeps [SolverId], + instSolverPkgExeDeps :: ComponentDeps [SolverId] + } + deriving (Eq, Show, Generic) + +instance Binary InstSolverPackage + +instance Package InstSolverPackage where + packageId i = + -- HACK! See Note [Index conversion with internal libraries] + let MungedPackageId mpn v = mungedId i + in PackageIdentifier (mkPackageName (unMungedPackageName mpn)) v + +instance HasMungedPackageId InstSolverPackage where + mungedId = mungedId . instSolverPkgIPI + +instance HasUnitId InstSolverPackage where + installedUnitId = installedUnitId . instSolverPkgIPI diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/LabeledPackageConstraint.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/LabeledPackageConstraint.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/LabeledPackageConstraint.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/LabeledPackageConstraint.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,14 @@ +module Distribution.Solver.Types.LabeledPackageConstraint + ( LabeledPackageConstraint(..) + , unlabelPackageConstraint + ) where + +import Distribution.Solver.Types.ConstraintSource +import Distribution.Solver.Types.PackageConstraint + +-- | 'PackageConstraint' labeled with its source. +data LabeledPackageConstraint + = LabeledPackageConstraint PackageConstraint ConstraintSource + +unlabelPackageConstraint :: LabeledPackageConstraint -> PackageConstraint +unlabelPackageConstraint (LabeledPackageConstraint pc _) = pc diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/OptionalStanza.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/OptionalStanza.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/OptionalStanza.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/OptionalStanza.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,34 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveDataTypeable #-} +module Distribution.Solver.Types.OptionalStanza + ( OptionalStanza(..) + , showStanza + , enableStanzas + ) where + +import GHC.Generics (Generic) +import Data.Typeable +import Distribution.Compat.Binary (Binary(..)) +import Distribution.Types.ComponentRequestedSpec + (ComponentRequestedSpec(..), defaultComponentRequestedSpec) +import Data.List (foldl') + +data OptionalStanza + = TestStanzas + | BenchStanzas + deriving (Eq, Ord, Enum, Bounded, Show, Generic, Typeable) + +-- | String representation of an OptionalStanza. +showStanza :: OptionalStanza -> String +showStanza TestStanzas = "test" +showStanza BenchStanzas = "bench" + +-- | Convert a list of 'OptionalStanza' into the corresponding +-- 'ComponentRequestedSpec' which records what components are enabled. +enableStanzas :: [OptionalStanza] -> ComponentRequestedSpec +enableStanzas = foldl' addStanza defaultComponentRequestedSpec + where + addStanza enabled TestStanzas = enabled { testsRequested = True } + addStanza enabled BenchStanzas = enabled { benchmarksRequested = True } + +instance Binary OptionalStanza diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageConstraint.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageConstraint.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageConstraint.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageConstraint.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,149 @@ +{-# LANGUAGE DeriveGeneric #-} + +-- | Per-package constraints. Package constraints must be respected by the +-- solver. Multiple constraints for each package can be given, though obviously +-- it is possible to construct conflicting constraints (eg impossible version +-- range or inconsistent flag assignment). +-- +module Distribution.Solver.Types.PackageConstraint ( + ConstraintScope(..), + scopeToplevel, + scopeToPackageName, + constraintScopeMatches, + PackageProperty(..), + dispPackageProperty, + PackageConstraint(..), + dispPackageConstraint, + showPackageConstraint, + packageConstraintToDependency + ) where + +import Distribution.Compat.Binary (Binary(..)) +import Distribution.Package (PackageName) +import Distribution.PackageDescription (FlagAssignment, dispFlagAssignment) +import Distribution.Types.Dependency (Dependency(..)) +import Distribution.Version (VersionRange, simplifyVersionRange) + +import Distribution.Solver.Compat.Prelude ((<<>>)) +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.PackagePath + +import Distribution.Text (disp, flatStyle) +import GHC.Generics (Generic) +import Text.PrettyPrint ((<+>)) +import qualified Text.PrettyPrint as Disp + + +-- | Determines to what packages and in what contexts a +-- constraint applies. +data ConstraintScope + -- | A scope that applies when the given package is used as a build target. + -- In other words, the scope applies iff a goal has a top-level qualifier + -- and its namespace matches the given package name. A namespace is + -- considered to match a package name when it is either the default + -- namespace (for --no-independent-goals) or it is an independent namespace + -- with the given package name (for --independent-goals). + + -- TODO: Try to generalize the ConstraintScopes once component-based + -- solving is implemented, and remove this special case for targets. + = ScopeTarget PackageName + -- | The package with the specified name and qualifier. + | ScopeQualified Qualifier PackageName + -- | The package with the specified name when it has a + -- setup qualifier. + | ScopeAnySetupQualifier PackageName + -- | The package with the specified name regardless of + -- qualifier. + | ScopeAnyQualifier PackageName + deriving (Eq, Show) + +-- | Constructor for a common use case: the constraint applies to +-- the package with the specified name when that package is a +-- top-level dependency in the default namespace. +scopeToplevel :: PackageName -> ConstraintScope +scopeToplevel = ScopeQualified QualToplevel + +-- | Returns the package name associated with a constraint scope. +scopeToPackageName :: ConstraintScope -> PackageName +scopeToPackageName (ScopeTarget pn) = pn +scopeToPackageName (ScopeQualified _ pn) = pn +scopeToPackageName (ScopeAnySetupQualifier pn) = pn +scopeToPackageName (ScopeAnyQualifier pn) = pn + +constraintScopeMatches :: ConstraintScope -> QPN -> Bool +constraintScopeMatches (ScopeTarget pn) (Q (PackagePath ns q) pn') = + let namespaceMatches DefaultNamespace = True + namespaceMatches (Independent namespacePn) = pn == namespacePn + in namespaceMatches ns && q == QualToplevel && pn == pn' +constraintScopeMatches (ScopeQualified q pn) (Q (PackagePath _ q') pn') = + q == q' && pn == pn' +constraintScopeMatches (ScopeAnySetupQualifier pn) (Q pp pn') = + let setup (PackagePath _ (QualSetup _)) = True + setup _ = False + in setup pp && pn == pn' +constraintScopeMatches (ScopeAnyQualifier pn) (Q _ pn') = pn == pn' + +-- | Pretty-prints a constraint scope. +dispConstraintScope :: ConstraintScope -> Disp.Doc +dispConstraintScope (ScopeTarget pn) = disp pn <<>> Disp.text "." <<>> disp pn +dispConstraintScope (ScopeQualified q pn) = dispQualifier q <<>> disp pn +dispConstraintScope (ScopeAnySetupQualifier pn) = Disp.text "setup." <<>> disp pn +dispConstraintScope (ScopeAnyQualifier pn) = Disp.text "any." <<>> disp pn + +-- | A package property is a logical predicate on packages. +data PackageProperty + = PackagePropertyVersion VersionRange + | PackagePropertyInstalled + | PackagePropertySource + | PackagePropertyFlags FlagAssignment + | PackagePropertyStanzas [OptionalStanza] + deriving (Eq, Show, Generic) + +instance Binary PackageProperty + +-- | Pretty-prints a package property. +dispPackageProperty :: PackageProperty -> Disp.Doc +dispPackageProperty (PackagePropertyVersion verrange) = disp verrange +dispPackageProperty PackagePropertyInstalled = Disp.text "installed" +dispPackageProperty PackagePropertySource = Disp.text "source" +dispPackageProperty (PackagePropertyFlags flags) = dispFlagAssignment flags +dispPackageProperty (PackagePropertyStanzas stanzas) = + Disp.hsep $ map (Disp.text . showStanza) stanzas + +-- | A package constraint consists of a scope plus a property +-- that must hold for all packages within that scope. +data PackageConstraint = PackageConstraint ConstraintScope PackageProperty + deriving (Eq, Show) + +-- | Pretty-prints a package constraint. +dispPackageConstraint :: PackageConstraint -> Disp.Doc +dispPackageConstraint (PackageConstraint scope prop) = + dispConstraintScope scope <+> dispPackageProperty prop + +-- | Alternative textual representation of a package constraint +-- for debugging purposes (slightly more verbose than that +-- produced by 'dispPackageConstraint'). +-- +showPackageConstraint :: PackageConstraint -> String +showPackageConstraint pc@(PackageConstraint scope prop) = + Disp.renderStyle flatStyle . postprocess $ dispPackageConstraint pc2 + where + pc2 = case prop of + PackagePropertyVersion vr -> + PackageConstraint scope $ PackagePropertyVersion (simplifyVersionRange vr) + _ -> pc + postprocess = case prop of + PackagePropertyFlags _ -> (Disp.text "flags" <+>) + PackagePropertyStanzas _ -> (Disp.text "stanzas" <+>) + _ -> id + +-- | Lossily convert a 'PackageConstraint' to a 'Dependency'. +packageConstraintToDependency :: PackageConstraint -> Maybe Dependency +packageConstraintToDependency (PackageConstraint scope prop) = toDep prop + where + toDep (PackagePropertyVersion vr) = + Just $ Dependency (scopeToPackageName scope) vr + toDep (PackagePropertyInstalled) = Nothing + toDep (PackagePropertySource) = Nothing + toDep (PackagePropertyFlags _) = Nothing + toDep (PackagePropertyStanzas _) = Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageFixedDeps.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageFixedDeps.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageFixedDeps.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageFixedDeps.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,23 @@ +module Distribution.Solver.Types.PackageFixedDeps + ( PackageFixedDeps(..) + ) where + +import Distribution.InstalledPackageInfo ( InstalledPackageInfo ) +import Distribution.Package + ( Package(..), UnitId, installedDepends) +import Distribution.Solver.Types.ComponentDeps ( ComponentDeps ) +import qualified Distribution.Solver.Types.ComponentDeps as CD + +-- | Subclass of packages that have specific versioned dependencies. +-- +-- So for example a not-yet-configured package has dependencies on version +-- ranges, not specific versions. A configured or an already installed package +-- depends on exact versions. Some operations or data structures (like +-- dependency graphs) only make sense on this subclass of package types. +-- +class Package pkg => PackageFixedDeps pkg where + depends :: pkg -> ComponentDeps [UnitId] + +instance PackageFixedDeps InstalledPackageInfo where + depends pkg = CD.fromInstalled (installedDepends pkg) + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageIndex.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageIndex.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageIndex.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackageIndex.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,316 @@ +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveGeneric #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Solver.Types.PackageIndex +-- Copyright : (c) David Himmelstrup 2005, +-- Bjorn Bringert 2007, +-- Duncan Coutts 2008 +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- An index of packages. +-- +module Distribution.Solver.Types.PackageIndex ( + -- * Package index data type + PackageIndex, + + -- * Creating an index + fromList, + + -- * Updates + merge, + insert, + deletePackageName, + deletePackageId, + deleteDependency, + + -- * Queries + + -- ** Precise lookups + elemByPackageId, + elemByPackageName, + lookupPackageName, + lookupPackageId, + lookupDependency, + + -- ** Case-insensitive searches + searchByName, + SearchResult(..), + searchByNameSubstring, + + -- ** Bulk queries + allPackages, + allPackagesByName, + ) where + +import Prelude () +import Distribution.Solver.Compat.Prelude hiding (lookup) + +import Control.Exception (assert) +import qualified Data.Map as Map +import Data.List (groupBy, isInfixOf) + +import Distribution.Package + ( PackageName, unPackageName, PackageIdentifier(..) + , Package(..), packageName, packageVersion ) +import Distribution.Types.Dependency +import Distribution.Version + ( withinRange ) +import Distribution.Simple.Utils + ( lowercase, comparing ) + + +-- | The collection of information about packages from one or more 'PackageDB's. +-- +-- It can be searched efficiently by package name and version. +-- +newtype PackageIndex pkg = PackageIndex + -- This index package names to all the package records matching that package + -- name case-sensitively. It includes all versions. + -- + -- This allows us to find all versions satisfying a dependency. + -- Most queries are a map lookup followed by a linear scan of the bucket. + -- + (Map PackageName [pkg]) + + deriving (Eq, Show, Read, Functor, Generic) +--FIXME: the Functor instance here relies on no package id changes + +instance Package pkg => Semigroup (PackageIndex pkg) where + (<>) = merge + +instance Package pkg => Monoid (PackageIndex pkg) where + mempty = PackageIndex Map.empty + mappend = (<>) + --save one mappend with empty in the common case: + mconcat [] = mempty + mconcat xs = foldr1 mappend xs + +instance Binary pkg => Binary (PackageIndex pkg) + +invariant :: Package pkg => PackageIndex pkg -> Bool +invariant (PackageIndex m) = all (uncurry goodBucket) (Map.toList m) + where + goodBucket _ [] = False + goodBucket name (pkg0:pkgs0) = check (packageId pkg0) pkgs0 + where + check pkgid [] = packageName pkgid == name + check pkgid (pkg':pkgs) = packageName pkgid == name + && pkgid < pkgid' + && check pkgid' pkgs + where pkgid' = packageId pkg' + +-- +-- * Internal helpers +-- + +mkPackageIndex :: Package pkg => Map PackageName [pkg] -> PackageIndex pkg +mkPackageIndex index = assert (invariant (PackageIndex index)) + (PackageIndex index) + +internalError :: String -> a +internalError name = error ("PackageIndex." ++ name ++ ": internal error") + +-- | Lookup a name in the index to get all packages that match that name +-- case-sensitively. +-- +lookup :: PackageIndex pkg -> PackageName -> [pkg] +lookup (PackageIndex m) name = fromMaybe [] $ Map.lookup name m + +-- +-- * Construction +-- + +-- | Build an index out of a bunch of packages. +-- +-- If there are duplicates, later ones mask earlier ones. +-- +fromList :: Package pkg => [pkg] -> PackageIndex pkg +fromList pkgs = mkPackageIndex + . Map.map fixBucket + . Map.fromListWith (++) + $ [ (packageName pkg, [pkg]) + | pkg <- pkgs ] + where + fixBucket = -- out of groups of duplicates, later ones mask earlier ones + -- but Map.fromListWith (++) constructs groups in reverse order + map head + -- Eq instance for PackageIdentifier is wrong, so use Ord: + . groupBy (\a b -> EQ == comparing packageId a b) + -- relies on sortBy being a stable sort so we + -- can pick consistently among duplicates + . sortBy (comparing packageId) + +-- +-- * Updates +-- + +-- | Merge two indexes. +-- +-- Packages from the second mask packages of the same exact name +-- (case-sensitively) from the first. +-- +merge :: Package pkg => PackageIndex pkg -> PackageIndex pkg -> PackageIndex pkg +merge i1@(PackageIndex m1) i2@(PackageIndex m2) = + assert (invariant i1 && invariant i2) $ + mkPackageIndex (Map.unionWith mergeBuckets m1 m2) + +-- | Elements in the second list mask those in the first. +mergeBuckets :: Package pkg => [pkg] -> [pkg] -> [pkg] +mergeBuckets [] ys = ys +mergeBuckets xs [] = xs +mergeBuckets xs@(x:xs') ys@(y:ys') = + case packageId x `compare` packageId y of + GT -> y : mergeBuckets xs ys' + EQ -> y : mergeBuckets xs' ys' + LT -> x : mergeBuckets xs' ys + +-- | Inserts a single package into the index. +-- +-- This is equivalent to (but slightly quicker than) using 'mappend' or +-- 'merge' with a singleton index. +-- +insert :: Package pkg => pkg -> PackageIndex pkg -> PackageIndex pkg +insert pkg (PackageIndex index) = mkPackageIndex $ + Map.insertWith (\_ -> insertNoDup) (packageName pkg) [pkg] index + where + pkgid = packageId pkg + insertNoDup [] = [pkg] + insertNoDup pkgs@(pkg':pkgs') = case compare pkgid (packageId pkg') of + LT -> pkg : pkgs + EQ -> pkg : pkgs' + GT -> pkg' : insertNoDup pkgs' + +-- | Internal delete helper. +-- +delete :: Package pkg => PackageName -> (pkg -> Bool) -> PackageIndex pkg + -> PackageIndex pkg +delete name p (PackageIndex index) = mkPackageIndex $ + Map.update filterBucket name index + where + filterBucket = deleteEmptyBucket + . filter (not . p) + deleteEmptyBucket [] = Nothing + deleteEmptyBucket remaining = Just remaining + +-- | Removes a single package from the index. +-- +deletePackageId :: Package pkg => PackageIdentifier -> PackageIndex pkg + -> PackageIndex pkg +deletePackageId pkgid = + delete (packageName pkgid) (\pkg -> packageId pkg == pkgid) + +-- | Removes all packages with this (case-sensitive) name from the index. +-- +deletePackageName :: Package pkg => PackageName -> PackageIndex pkg + -> PackageIndex pkg +deletePackageName name = + delete name (\pkg -> packageName pkg == name) + +-- | Removes all packages satisfying this dependency from the index. +-- +deleteDependency :: Package pkg => Dependency -> PackageIndex pkg + -> PackageIndex pkg +deleteDependency (Dependency name verstionRange) = + delete name (\pkg -> packageVersion pkg `withinRange` verstionRange) + +-- +-- * Bulk queries +-- + +-- | Get all the packages from the index. +-- +allPackages :: PackageIndex pkg -> [pkg] +allPackages (PackageIndex m) = concat (Map.elems m) + +-- | Get all the packages from the index. +-- +-- They are grouped by package name, case-sensitively. +-- +allPackagesByName :: PackageIndex pkg -> [[pkg]] +allPackagesByName (PackageIndex m) = Map.elems m + +-- +-- * Lookups +-- + +elemByPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier -> Bool +elemByPackageId index = isJust . lookupPackageId index + +elemByPackageName :: Package pkg => PackageIndex pkg -> PackageName -> Bool +elemByPackageName index = not . null . lookupPackageName index + + +-- | Does a lookup by package id (name & version). +-- +-- Since multiple package DBs mask each other case-sensitively by package name, +-- then we get back at most one package. +-- +lookupPackageId :: Package pkg => PackageIndex pkg -> PackageIdentifier + -> Maybe pkg +lookupPackageId index pkgid = + case [ pkg | pkg <- lookup index (packageName pkgid) + , packageId pkg == pkgid ] of + [] -> Nothing + [pkg] -> Just pkg + _ -> internalError "lookupPackageIdentifier" + +-- | Does a case-sensitive search by package name. +-- +lookupPackageName :: Package pkg => PackageIndex pkg -> PackageName -> [pkg] +lookupPackageName index name = + [ pkg | pkg <- lookup index name + , packageName pkg == name ] + +-- | Does a case-sensitive search by package name and a range of versions. +-- +-- We get back any number of versions of the specified package name, all +-- satisfying the version range constraint. +-- +lookupDependency :: Package pkg => PackageIndex pkg -> Dependency -> [pkg] +lookupDependency index (Dependency name versionRange) = + [ pkg | pkg <- lookup index name + , packageName pkg == name + , packageVersion pkg `withinRange` versionRange ] + +-- +-- * Case insensitive name lookups +-- + +-- | Does a case-insensitive search by package name. +-- +-- If there is only one package that compares case-insensitively to this name +-- then the search is unambiguous and we get back all versions of that package. +-- If several match case-insensitively but one matches exactly then it is also +-- unambiguous. +-- +-- If however several match case-insensitively and none match exactly then we +-- have an ambiguous result, and we get back all the versions of all the +-- packages. The list of ambiguous results is split by exact package name. So +-- it is a non-empty list of non-empty lists. +-- +searchByName :: PackageIndex pkg + -> String -> [(PackageName, [pkg])] +searchByName (PackageIndex m) name = + [ pkgs + | pkgs@(pname,_) <- Map.toList m + , lowercase (unPackageName pname) == lname ] + where + lname = lowercase name + +data SearchResult a = None | Unambiguous a | Ambiguous [a] + +-- | Does a case-insensitive substring search by package name. +-- +-- That is, all packages that contain the given string in their name. +-- +searchByNameSubstring :: PackageIndex pkg + -> String -> [(PackageName, [pkg])] +searchByNameSubstring (PackageIndex m) searchterm = + [ pkgs + | pkgs@(pname, _) <- Map.toList m + , lsearchterm `isInfixOf` lowercase (unPackageName pname) ] + where + lsearchterm = lowercase searchterm diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackagePath.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackagePath.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackagePath.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackagePath.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,101 @@ +module Distribution.Solver.Types.PackagePath + ( PackagePath(..) + , Namespace(..) + , Qualifier(..) + , dispQualifier + , Qualified(..) + , QPN + , dispQPN + , showQPN + ) where + +import Distribution.Package +import Distribution.Text +import qualified Text.PrettyPrint as Disp +import Distribution.Solver.Compat.Prelude ((<<>>)) + +-- | A package path consists of a namespace and a package path inside that +-- namespace. +data PackagePath = PackagePath Namespace Qualifier + deriving (Eq, Ord, Show) + +-- | Top-level namespace +-- +-- Package choices in different namespaces are considered completely independent +-- by the solver. +data Namespace = + -- | The default namespace + DefaultNamespace + + -- | A namespace for a specific build target + | Independent PackageName + deriving (Eq, Ord, Show) + +-- | Pretty-prints a namespace. The result is either empty or +-- ends in a period, so it can be prepended onto a qualifier. +dispNamespace :: Namespace -> Disp.Doc +dispNamespace DefaultNamespace = Disp.empty +dispNamespace (Independent i) = disp i <<>> Disp.text "." + +-- | Qualifier of a package within a namespace (see 'PackagePath') +data Qualifier = + -- | Top-level dependency in this namespace + QualToplevel + + -- | Any dependency on base is considered independent + -- + -- This makes it possible to have base shims. + | QualBase PackageName + + -- | Setup dependency + -- + -- By rights setup dependencies ought to be nestable; after all, the setup + -- dependencies of a package might themselves have setup dependencies, which + -- are independent from everything else. However, this very quickly leads to + -- infinite search trees in the solver. Therefore we limit ourselves to + -- a single qualifier (within a given namespace). + | QualSetup PackageName + + -- | If we depend on an executable from a package (via + -- @build-tools@), we should solve for the dependencies of that + -- package separately (since we're not going to actually try to + -- link it.) We qualify for EACH package separately; e.g., + -- @'Exe' pn1 pn2@ qualifies the @build-tools@ dependency on + -- @pn2@ from package @pn1@. (If we tracked only @pn1@, that + -- would require a consistent dependency resolution for all + -- of the depended upon executables from a package; if we + -- tracked only @pn2@, that would require us to pick only one + -- version of an executable over the entire install plan.) + | QualExe PackageName PackageName + deriving (Eq, Ord, Show) + +-- | Pretty-prints a qualifier. The result is either empty or +-- ends in a period, so it can be prepended onto a package name. +-- +-- NOTE: the base qualifier is for a dependency _on_ base; the qualifier is +-- there to make sure different dependencies on base are all independent. +-- So we want to print something like @"A.base"@, where the @"A."@ part +-- is the qualifier and @"base"@ is the actual dependency (which, for the +-- 'Base' qualifier, will always be @base@). +dispQualifier :: Qualifier -> Disp.Doc +dispQualifier QualToplevel = Disp.empty +dispQualifier (QualSetup pn) = disp pn <<>> Disp.text ":setup." +dispQualifier (QualExe pn pn2) = disp pn <<>> Disp.text ":" <<>> + disp pn2 <<>> Disp.text ":exe." +dispQualifier (QualBase pn) = disp pn <<>> Disp.text "." + +-- | A qualified entity. Pairs a package path with the entity. +data Qualified a = Q PackagePath a + deriving (Eq, Ord, Show) + +-- | Qualified package name. +type QPN = Qualified PackageName + +-- | Pretty-prints a qualified package name. +dispQPN :: QPN -> Disp.Doc +dispQPN (Q (PackagePath ns qual) pn) = + dispNamespace ns <<>> dispQualifier qual <<>> disp pn + +-- | String representation of a qualified package name. +showQPN :: QPN -> String +showQPN = Disp.renderStyle flatStyle . dispQPN diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackagePreferences.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackagePreferences.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackagePreferences.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PackagePreferences.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,22 @@ +module Distribution.Solver.Types.PackagePreferences + ( PackagePreferences(..) + ) where + +import Distribution.Solver.Types.InstalledPreference +import Distribution.Solver.Types.OptionalStanza +import Distribution.Version (VersionRange) + +-- | Per-package preferences on the version. It is a soft constraint that the +-- 'DependencyResolver' should try to respect where possible. It consists of +-- an 'InstalledPreference' which says if we prefer versions of packages +-- that are already installed. It also has (possibly multiple) +-- 'PackageVersionPreference's which are suggested constraints on the version +-- number. The resolver should try to use package versions that satisfy +-- the maximum number of the suggested version constraints. +-- +-- It is not specified if preferences on some packages are more important than +-- others. +-- +data PackagePreferences = PackagePreferences [VersionRange] + InstalledPreference + [OptionalStanza] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PkgConfigDb.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PkgConfigDb.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PkgConfigDb.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/PkgConfigDb.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,159 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveDataTypeable #-} +----------------------------------------------------------------------------- +-- | +-- Module : Distribution.Solver.Types.PkgConfigDb +-- Copyright : (c) Iñaki García Etxebarria 2016 +-- License : BSD-like +-- +-- Maintainer : cabal-devel@haskell.org +-- Portability : portable +-- +-- Read the list of packages available to pkg-config. +----------------------------------------------------------------------------- +module Distribution.Solver.Types.PkgConfigDb + ( PkgConfigDb + , readPkgConfigDb + , pkgConfigDbFromList + , pkgConfigPkgIsPresent + , pkgConfigDbPkgVersion + , getPkgConfigDbDirs + ) where + +import Prelude () +import Distribution.Solver.Compat.Prelude + +import Control.Exception (IOException, handle) +import qualified Data.Map as M +import Data.Version (parseVersion) +import Text.ParserCombinators.ReadP (readP_to_S) +import System.FilePath (splitSearchPath) + +import Distribution.Package + ( PkgconfigName, mkPkgconfigName ) +import Distribution.Verbosity + ( Verbosity ) +import Distribution.Version + ( Version, mkVersion', VersionRange, withinRange ) + +import Distribution.Compat.Environment + ( lookupEnv ) +import Distribution.Simple.Program + ( ProgramDb, pkgConfigProgram, getProgramOutput, requireProgram ) +import Distribution.Simple.Utils + ( info ) + +-- | The list of packages installed in the system visible to +-- @pkg-config@. This is an opaque datatype, to be constructed with +-- `readPkgConfigDb` and queried with `pkgConfigPkgPresent`. +data PkgConfigDb = PkgConfigDb (M.Map PkgconfigName (Maybe Version)) + -- ^ If an entry is `Nothing`, this means that the + -- package seems to be present, but we don't know the + -- exact version (because parsing of the version + -- number failed). + | NoPkgConfigDb + -- ^ For when we could not run pkg-config successfully. + deriving (Show, Generic, Typeable) + +instance Binary PkgConfigDb + +-- | Query pkg-config for the list of installed packages, together +-- with their versions. Return a `PkgConfigDb` encapsulating this +-- information. +readPkgConfigDb :: Verbosity -> ProgramDb -> IO PkgConfigDb +readPkgConfigDb verbosity progdb = handle ioErrorHandler $ do + (pkgConfig, _) <- requireProgram verbosity pkgConfigProgram progdb + pkgList <- lines <$> getProgramOutput verbosity pkgConfig ["--list-all"] + -- The output of @pkg-config --list-all@ also includes a description + -- for each package, which we do not need. + let pkgNames = map (takeWhile (not . isSpace)) pkgList + pkgVersions <- lines <$> getProgramOutput verbosity pkgConfig + ("--modversion" : pkgNames) + (return . pkgConfigDbFromList . zip pkgNames) pkgVersions + where + -- For when pkg-config invocation fails (possibly because of a + -- too long command line). + ioErrorHandler :: IOException -> IO PkgConfigDb + ioErrorHandler e = do + info verbosity ("Failed to query pkg-config, Cabal will continue" + ++ " without solving for pkg-config constraints: " + ++ show e) + return NoPkgConfigDb + +-- | Create a `PkgConfigDb` from a list of @(packageName, version)@ pairs. +pkgConfigDbFromList :: [(String, String)] -> PkgConfigDb +pkgConfigDbFromList pairs = (PkgConfigDb . M.fromList . map convert) pairs + where + convert :: (String, String) -> (PkgconfigName, Maybe Version) + convert (n,vs) = (mkPkgconfigName n, + case (reverse . readP_to_S parseVersion) vs of + (v, "") : _ -> Just (mkVersion' v) + _ -> Nothing -- Version not (fully) + -- understood. + ) + +-- | Check whether a given package range is satisfiable in the given +-- @pkg-config@ database. +pkgConfigPkgIsPresent :: PkgConfigDb -> PkgconfigName -> VersionRange -> Bool +pkgConfigPkgIsPresent (PkgConfigDb db) pn vr = + case M.lookup pn db of + Nothing -> False -- Package not present in the DB. + Just Nothing -> True -- Package present, but version unknown. + Just (Just v) -> withinRange v vr +-- If we could not read the pkg-config database successfully we allow +-- the check to succeed. The plan found by the solver may fail to be +-- executed later on, but we have no grounds for rejecting the plan at +-- this stage. +pkgConfigPkgIsPresent NoPkgConfigDb _ _ = True + + +-- | Query the version of a package in the @pkg-config@ database. +-- @Nothing@ indicates the package is not in the database, while +-- @Just Nothing@ indicates that the package is in the database, +-- but its version is not known. +pkgConfigDbPkgVersion :: PkgConfigDb -> PkgconfigName -> Maybe (Maybe Version) +pkgConfigDbPkgVersion (PkgConfigDb db) pn = M.lookup pn db +-- NB: Since the solver allows solving to succeed if there is +-- NoPkgConfigDb, we should report that we *guess* that there +-- is a matching pkg-config configuration, but that we just +-- don't know about it. +pkgConfigDbPkgVersion NoPkgConfigDb _ = Just Nothing + + +-- | Query pkg-config for the locations of pkg-config's package files. Use this +-- to monitor for changes in the pkg-config DB. +-- +getPkgConfigDbDirs :: Verbosity -> ProgramDb -> IO [FilePath] +getPkgConfigDbDirs verbosity progdb = + (++) <$> getEnvPath <*> getDefPath + where + -- According to @man pkg-config@: + -- + -- PKG_CONFIG_PATH + -- A colon-separated (on Windows, semicolon-separated) list of directories + -- to search for .pc files. The default directory will always be searched + -- after searching the path + -- + getEnvPath = maybe [] parseSearchPath + <$> lookupEnv "PKG_CONFIG_PATH" + + -- Again according to @man pkg-config@: + -- + -- pkg-config can be used to query itself for the default search path, + -- version number and other information, for instance using: + -- + -- > pkg-config --variable pc_path pkg-config + -- + getDefPath = handle ioErrorHandler $ do + (pkgConfig, _) <- requireProgram verbosity pkgConfigProgram progdb + parseSearchPath <$> + getProgramOutput verbosity pkgConfig + ["--variable", "pc_path", "pkg-config"] + + parseSearchPath str = + case lines str of + [p] | not (null p) -> splitSearchPath p + _ -> [] + + ioErrorHandler :: IOException -> IO [FilePath] + ioErrorHandler _e = return [] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Progress.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Progress.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Progress.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Progress.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,49 @@ +module Distribution.Solver.Types.Progress + ( Progress(..) + , foldProgress + ) where + +import Prelude () +import Distribution.Solver.Compat.Prelude hiding (fail) + +-- | A type to represent the unfolding of an expensive long running +-- calculation that may fail. We may get intermediate steps before the final +-- result which may be used to indicate progress and\/or logging messages. +-- +data Progress step fail done = Step step (Progress step fail done) + | Fail fail + | Done done + +-- This Functor instance works around a bug in GHC 7.6.3. +-- See https://ghc.haskell.org/trac/ghc/ticket/7436#comment:6. +-- The derived functor instance caused a space leak in the solver. +instance Functor (Progress step fail) where + fmap f (Step s p) = Step s (fmap f p) + fmap _ (Fail x) = Fail x + fmap f (Done r) = Done (f r) + +-- | Consume a 'Progress' calculation. Much like 'foldr' for lists but with two +-- base cases, one for a final result and one for failure. +-- +-- Eg to convert into a simple 'Either' result use: +-- +-- > foldProgress (flip const) Left Right +-- +foldProgress :: (step -> a -> a) -> (fail -> a) -> (done -> a) + -> Progress step fail done -> a +foldProgress step fail done = fold + where fold (Step s p) = step s (fold p) + fold (Fail f) = fail f + fold (Done r) = done r + +instance Monad (Progress step fail) where + return = pure + p >>= f = foldProgress Step Fail f p + +instance Applicative (Progress step fail) where + pure a = Done a + p <*> x = foldProgress Step Fail (flip fmap x) p + +instance Monoid fail => Alternative (Progress step fail) where + empty = Fail mempty + p <|> q = foldProgress Step (const q) Done p diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ResolverPackage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ResolverPackage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ResolverPackage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/ResolverPackage.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,50 @@ +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Solver.Types.ResolverPackage + ( ResolverPackage(..) + , resolverPackageLibDeps + , resolverPackageExeDeps + ) where + +import Distribution.Solver.Types.InstSolverPackage +import Distribution.Solver.Types.SolverId +import Distribution.Solver.Types.SolverPackage +import qualified Distribution.Solver.Types.ComponentDeps as CD + +import Distribution.Compat.Binary (Binary(..)) +import Distribution.Compat.Graph (IsNode(..)) +import Distribution.Package (Package(..), HasUnitId(..)) +import Distribution.Simple.Utils (ordNub) +import GHC.Generics (Generic) + +-- | The dependency resolver picks either pre-existing installed packages +-- or it picks source packages along with package configuration. +-- +-- This is like the 'InstallPlan.PlanPackage' but with fewer cases. +-- +data ResolverPackage loc = PreExisting InstSolverPackage + | Configured (SolverPackage loc) + deriving (Eq, Show, Generic) + +instance Binary loc => Binary (ResolverPackage loc) + +instance Package (ResolverPackage loc) where + packageId (PreExisting ipkg) = packageId ipkg + packageId (Configured spkg) = packageId spkg + +resolverPackageLibDeps :: ResolverPackage loc -> CD.ComponentDeps [SolverId] +resolverPackageLibDeps (PreExisting ipkg) = instSolverPkgLibDeps ipkg +resolverPackageLibDeps (Configured spkg) = solverPkgLibDeps spkg + +resolverPackageExeDeps :: ResolverPackage loc -> CD.ComponentDeps [SolverId] +resolverPackageExeDeps (PreExisting ipkg) = instSolverPkgExeDeps ipkg +resolverPackageExeDeps (Configured spkg) = solverPkgExeDeps spkg + +instance IsNode (ResolverPackage loc) where + type Key (ResolverPackage loc) = SolverId + nodeKey (PreExisting ipkg) = PreExistingId (packageId ipkg) (installedUnitId ipkg) + nodeKey (Configured spkg) = PlannedId (packageId spkg) + -- Use dependencies for ALL components + nodeNeighbors pkg = + ordNub $ CD.flatDeps (resolverPackageLibDeps pkg) ++ + CD.flatDeps (resolverPackageExeDeps pkg) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Settings.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Settings.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Settings.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Settings.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,53 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +module Distribution.Solver.Types.Settings + ( ReorderGoals(..) + , IndependentGoals(..) + , AvoidReinstalls(..) + , ShadowPkgs(..) + , StrongFlags(..) + , AllowBootLibInstalls(..) + , EnableBackjumping(..) + , CountConflicts(..) + , SolveExecutables(..) + ) where + +import Distribution.Simple.Setup ( BooleanFlag(..) ) +import Distribution.Compat.Binary (Binary(..)) +import GHC.Generics (Generic) + +newtype ReorderGoals = ReorderGoals Bool + deriving (BooleanFlag, Eq, Generic, Show) + +newtype CountConflicts = CountConflicts Bool + deriving (BooleanFlag, Eq, Generic, Show) + +newtype IndependentGoals = IndependentGoals Bool + deriving (BooleanFlag, Eq, Generic, Show) + +newtype AvoidReinstalls = AvoidReinstalls Bool + deriving (BooleanFlag, Eq, Generic, Show) + +newtype ShadowPkgs = ShadowPkgs Bool + deriving (BooleanFlag, Eq, Generic, Show) + +newtype StrongFlags = StrongFlags Bool + deriving (BooleanFlag, Eq, Generic, Show) + +newtype AllowBootLibInstalls = AllowBootLibInstalls Bool + deriving (BooleanFlag, Eq, Generic, Show) + +newtype EnableBackjumping = EnableBackjumping Bool + deriving (BooleanFlag, Eq, Generic, Show) + +newtype SolveExecutables = SolveExecutables Bool + deriving (BooleanFlag, Eq, Generic, Show) + +instance Binary ReorderGoals +instance Binary CountConflicts +instance Binary IndependentGoals +instance Binary AvoidReinstalls +instance Binary ShadowPkgs +instance Binary StrongFlags +instance Binary AllowBootLibInstalls +instance Binary SolveExecutables diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SolverId.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SolverId.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SolverId.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SolverId.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,27 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Solver.Types.SolverId + ( SolverId(..) + ) + +where + +import Distribution.Compat.Binary (Binary(..)) +import Distribution.Package (PackageId, Package(..), UnitId) +import GHC.Generics (Generic) + +-- | The solver can produce references to existing packages or +-- packages we plan to install. Unlike 'ConfiguredId' we don't +-- yet know the 'UnitId' for planned packages, because it's +-- not the solver's job to compute them. +-- +data SolverId = PreExistingId { solverSrcId :: PackageId, solverInstId :: UnitId } + | PlannedId { solverSrcId :: PackageId } + deriving (Eq, Ord, Generic) + +instance Binary SolverId + +instance Show SolverId where + show = show . solverSrcId + +instance Package SolverId where + packageId = solverSrcId diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SolverPackage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SolverPackage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SolverPackage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SolverPackage.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,34 @@ +{-# LANGUAGE DeriveGeneric #-} +module Distribution.Solver.Types.SolverPackage + ( SolverPackage(..) + ) where + +import Distribution.Compat.Binary (Binary(..)) +import Distribution.Package ( Package(..) ) +import Distribution.PackageDescription ( FlagAssignment ) +import Distribution.Solver.Types.ComponentDeps ( ComponentDeps ) +import Distribution.Solver.Types.OptionalStanza +import Distribution.Solver.Types.SolverId +import Distribution.Solver.Types.SourcePackage +import GHC.Generics (Generic) + +-- | A 'SolverPackage' is a package specified by the dependency solver. +-- It will get elaborated into a 'ConfiguredPackage' or even an +-- 'ElaboratedConfiguredPackage'. +-- +-- NB: 'SolverPackage's are essentially always with 'UnresolvedPkgLoc', +-- but for symmetry we have the parameter. (Maybe it can be removed.) +-- +data SolverPackage loc = SolverPackage { + solverPkgSource :: SourcePackage loc, + solverPkgFlags :: FlagAssignment, + solverPkgStanzas :: [OptionalStanza], + solverPkgLibDeps :: ComponentDeps [SolverId], + solverPkgExeDeps :: ComponentDeps [SolverId] + } + deriving (Eq, Show, Generic) + +instance Binary loc => Binary (SolverPackage loc) + +instance Package (SolverPackage loc) where + packageId = packageId . solverPkgSource diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SourcePackage.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SourcePackage.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SourcePackage.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/SourcePackage.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,34 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveDataTypeable #-} +module Distribution.Solver.Types.SourcePackage + ( PackageDescriptionOverride + , SourcePackage(..) + ) where + +import Distribution.Package + ( PackageId, Package(..) ) +import Distribution.PackageDescription + ( GenericPackageDescription(..) ) + +import Data.ByteString.Lazy (ByteString) +import GHC.Generics (Generic) +import Distribution.Compat.Binary (Binary(..)) +import Data.Typeable + +-- | A package description along with the location of the package sources. +-- +data SourcePackage loc = SourcePackage { + packageInfoId :: PackageId, + packageDescription :: GenericPackageDescription, + packageSource :: loc, + packageDescrOverride :: PackageDescriptionOverride + } + deriving (Eq, Show, Generic, Typeable) + +instance (Binary loc) => Binary (SourcePackage loc) + +instance Package (SourcePackage a) where packageId = packageInfoId + +-- | We sometimes need to override the .cabal file in the tarball with +-- the newer one from the package index. +type PackageDescriptionOverride = Maybe ByteString diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Variable.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Variable.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Variable.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Distribution/Solver/Types/Variable.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,13 @@ +module Distribution.Solver.Types.Variable where + +import Distribution.Solver.Types.OptionalStanza + +import Distribution.PackageDescription (FlagName) + +-- | Variables used by the dependency solver. This type is similar to the +-- internal 'Var' type. +data Variable qpn = + PackageVar qpn + | FlagVar qpn FlagName + | StanzaVar qpn OptionalStanza + deriving (Eq, Show) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/LICENSE cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/LICENSE --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/LICENSE 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,31 @@ +Copyright (c) 2003-2017, Cabal Development Team. +See the AUTHORS file for the full list of copyright holders. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * Neither the name of Isaac Jones nor the names of other + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/main/Main.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/main/Main.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/main/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/main/Main.hs 2018-11-26 08:42:57.000000000 +0000 @@ -0,0 +1,1253 @@ +{-# LANGUAGE CPP #-} + +----------------------------------------------------------------------------- +-- | +-- Module : Main +-- Copyright : (c) David Himmelstrup 2005 +-- License : BSD-like +-- +-- Maintainer : lemmih@gmail.com +-- Stability : provisional +-- Portability : portable +-- +-- Entry point to the default cabal-install front-end. +----------------------------------------------------------------------------- + +module Main (main) where + +import Distribution.Client.Setup + ( GlobalFlags(..), globalCommand, withRepoContext + , ConfigFlags(..) + , ConfigExFlags(..), defaultConfigExFlags, configureExCommand + , reconfigureCommand + , configCompilerAux', configPackageDB' + , BuildFlags(..), BuildExFlags(..), SkipAddSourceDepsCheck(..) + , buildCommand, replCommand, testCommand, benchmarkCommand + , InstallFlags(..), defaultInstallFlags + , installCommand, upgradeCommand, uninstallCommand + , FetchFlags(..), fetchCommand + , FreezeFlags(..), freezeCommand + , genBoundsCommand + , OutdatedFlags(..), outdatedCommand + , GetFlags(..), getCommand, unpackCommand + , checkCommand + , formatCommand + , UpdateFlags(..), updateCommand + , ListFlags(..), listCommand + , InfoFlags(..), infoCommand + , UploadFlags(..), uploadCommand + , ReportFlags(..), reportCommand + , runCommand + , InitFlags(initVerbosity, initHcPath), initCommand + , SDistFlags(..), SDistExFlags(..), sdistCommand + , Win32SelfUpgradeFlags(..), win32SelfUpgradeCommand + , ActAsSetupFlags(..), actAsSetupCommand + , SandboxFlags(..), sandboxCommand + , ExecFlags(..), execCommand + , UserConfigFlags(..), userConfigCommand + , reportCommand + , manpageCommand + , haddockCommand + , cleanCommand + , doctestCommand + , copyCommand + , registerCommand + ) +import Distribution.Simple.Setup + ( HaddockTarget(..) + , DoctestFlags(..) + , HaddockFlags(..), defaultHaddockFlags + , HscolourFlags(..), hscolourCommand + , ReplFlags(..) + , CopyFlags(..) + , RegisterFlags(..) + , CleanFlags(..) + , TestFlags(..), BenchmarkFlags(..) + , Flag(..), fromFlag, fromFlagOrDefault, flagToMaybe, toFlag + , configAbsolutePaths + ) + +import Prelude () +import Distribution.Solver.Compat.Prelude hiding (get) + +import Distribution.Client.SetupWrapper + ( setupWrapper, SetupScriptOptions(..), defaultSetupScriptOptions ) +import Distribution.Client.Config + ( SavedConfig(..), loadConfig, defaultConfigFile, userConfigDiff + , userConfigUpdate, createDefaultConfigFile, getConfigFilePath ) +import Distribution.Client.Targets + ( readUserTargets ) +import qualified Distribution.Client.List as List + ( list, info ) + + +import qualified Distribution.Client.CmdConfigure as CmdConfigure +import qualified Distribution.Client.CmdUpdate as CmdUpdate +import qualified Distribution.Client.CmdBuild as CmdBuild +import qualified Distribution.Client.CmdRepl as CmdRepl +import qualified Distribution.Client.CmdFreeze as CmdFreeze +import qualified Distribution.Client.CmdHaddock as CmdHaddock +import qualified Distribution.Client.CmdInstall as CmdInstall +import qualified Distribution.Client.CmdRun as CmdRun +import qualified Distribution.Client.CmdTest as CmdTest +import qualified Distribution.Client.CmdBench as CmdBench +import qualified Distribution.Client.CmdExec as CmdExec +import qualified Distribution.Client.CmdClean as CmdClean +import qualified Distribution.Client.CmdSdist as CmdSdist +import Distribution.Client.CmdLegacy + +import Distribution.Client.Install (install) +import Distribution.Client.Configure (configure, writeConfigFlags) +import Distribution.Client.Update (update) +import Distribution.Client.Exec (exec) +import Distribution.Client.Fetch (fetch) +import Distribution.Client.Freeze (freeze) +import Distribution.Client.GenBounds (genBounds) +import Distribution.Client.Outdated (outdated) +import Distribution.Client.Check as Check (check) +--import Distribution.Client.Clean (clean) +import qualified Distribution.Client.Upload as Upload +import Distribution.Client.Run (run, splitRunArgs) +import Distribution.Client.SrcDist (sdist) +import Distribution.Client.Get (get) +import Distribution.Client.Reconfigure (Check(..), reconfigure) +import Distribution.Client.Nix (nixInstantiate + ,nixShell + ,nixShellIfSandboxed) +import Distribution.Client.Sandbox (sandboxInit + ,sandboxAddSource + ,sandboxDelete + ,sandboxDeleteSource + ,sandboxListSources + ,sandboxHcPkg + ,dumpPackageEnvironment + + ,loadConfigOrSandboxConfig + ,findSavedDistPref + ,initPackageDBIfNeeded + ,maybeWithSandboxDirOnSearchPath + ,maybeWithSandboxPackageInfo + ,tryGetIndexFilePath + ,sandboxBuildDir + ,updateSandboxConfigFileFlag + ,updateInstallDirs + + ,getPersistOrConfigCompiler) +import Distribution.Client.Sandbox.PackageEnvironment (setPackageDB) +import Distribution.Client.Sandbox.Timestamp (maybeAddCompilerTimestampRecord) +import Distribution.Client.Sandbox.Types (UseSandbox(..), whenUsingSandbox) +import Distribution.Client.Tar (createTarGzFile) +import Distribution.Client.Types (Password (..)) +import Distribution.Client.Init (initCabal) +import Distribution.Client.Manpage (manpage) +import qualified Distribution.Client.Win32SelfUpgrade as Win32SelfUpgrade +import Distribution.Client.Utils (determineNumJobs +#if defined(mingw32_HOST_OS) + ,relaxEncodingErrors +#endif + ) + +import Distribution.Package (packageId) +import Distribution.PackageDescription + ( BuildType(..), Executable(..), buildable ) +import Distribution.PackageDescription.Parsec ( readGenericPackageDescription ) + +import Distribution.PackageDescription.PrettyPrint + ( writeGenericPackageDescription ) +import qualified Distribution.Simple as Simple +import qualified Distribution.Make as Make +import qualified Distribution.Types.UnqualComponentName as Make +import Distribution.Simple.Build + ( startInterpreter ) +import Distribution.Simple.Command + ( CommandParse(..), CommandUI(..), Command, CommandSpec(..) + , CommandType(..), commandsRun, commandAddAction, hiddenCommand + , commandFromSpec, commandShowOptions ) +import Distribution.Simple.Compiler (Compiler(..), PackageDBStack) +import Distribution.Simple.Configure + ( configCompilerAuxEx, ConfigStateFileError(..) + , getPersistBuildConfig, interpretPackageDbFlags + , tryGetPersistBuildConfig ) +import qualified Distribution.Simple.LocalBuildInfo as LBI +import Distribution.Simple.Program (defaultProgramDb + ,configureAllKnownPrograms + ,simpleProgramInvocation + ,getProgramInvocationOutput) +import Distribution.Simple.Program.Db (reconfigurePrograms) +import qualified Distribution.Simple.Setup as Cabal +import Distribution.Simple.Utils + ( cabalVersion, die', dieNoVerbosity, info, notice, topHandler + , findPackageDesc, tryFindPackageDesc ) +import Distribution.Text + ( display ) +import Distribution.Verbosity as Verbosity + ( Verbosity, normal ) +import Distribution.Version + ( Version, mkVersion, orLaterVersion ) +import qualified Paths_cabal_install (version) + +import System.Environment (getArgs, getProgName) +import System.Exit (exitFailure, exitSuccess) +import System.FilePath ( dropExtension, splitExtension + , takeExtension, (), (<.>)) +import System.IO ( BufferMode(LineBuffering), hSetBuffering +#ifdef mingw32_HOST_OS + , stderr +#endif + , stdout ) +import System.Directory (doesFileExist, getCurrentDirectory) +import Data.Monoid (Any(..)) +import Control.Exception (SomeException(..), try) +import Control.Monad (mapM_) + +#ifdef MONOLITHIC +import qualified UnitTests +import qualified MemoryUsageTests +import qualified SolverQuickCheck +import qualified IntegrationTests2 +import qualified System.Environment as Monolithic +#endif + +-- | Entry point +-- +main :: IO () +#ifdef MONOLITHIC +main = do + mb_exec <- Monolithic.lookupEnv "CABAL_INSTALL_MONOLITHIC_MODE" + case mb_exec of + Just "UnitTests" -> UnitTests.main + Just "MemoryUsageTests" -> MemoryUsageTests.main + Just "SolverQuickCheck" -> SolverQuickCheck.main + Just "IntegrationTests2" -> IntegrationTests2.main + Just s -> error $ "Unrecognized mode '" ++ show s ++ "' in CABAL_INSTALL_MONOLITHIC_MODE" + Nothing -> main' +#else +main = main' +#endif + +main' :: IO () +main' = do + -- Enable line buffering so that we can get fast feedback even when piped. + -- This is especially important for CI and build systems. + hSetBuffering stdout LineBuffering + -- The default locale encoding for Windows CLI is not UTF-8 and printing + -- Unicode characters to it will fail unless we relax the handling of encoding + -- errors when writing to stderr and stdout. +#ifdef mingw32_HOST_OS + relaxEncodingErrors stdout + relaxEncodingErrors stderr +#endif + getArgs >>= mainWorker + +mainWorker :: [String] -> IO () +mainWorker args = do + validScript <- + if null args + then return False + else doesFileExist (last args) + + topHandler $ + case commandsRun (globalCommand commands) commands args of + CommandHelp help -> printGlobalHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs -> printErrors errs + CommandReadyToGo (globalFlags, commandParse) -> + case commandParse of + _ | fromFlagOrDefault False (globalVersion globalFlags) + -> printVersion + | fromFlagOrDefault False (globalNumericVersion globalFlags) + -> printNumericVersion + CommandHelp help -> printCommandHelp help + CommandList opts -> printOptionsList opts + CommandErrors errs + | validScript -> CmdRun.handleShebang (last args) + | otherwise -> printErrors errs + CommandReadyToGo action -> do + globalFlags' <- updateSandboxConfigFileFlag globalFlags + action globalFlags' + + where + printCommandHelp help = do + pname <- getProgName + putStr (help pname) + printGlobalHelp help = do + pname <- getProgName + configFile <- defaultConfigFile + putStr (help pname) + putStr $ "\nYou can edit the cabal configuration file to set defaults:\n" + ++ " " ++ configFile ++ "\n" + exists <- doesFileExist configFile + unless exists $ + putStrLn $ "This file will be generated with sensible " + ++ "defaults if you run 'cabal update'." + printOptionsList = putStr . unlines + printErrors errs = dieNoVerbosity $ intercalate "\n" errs + printNumericVersion = putStrLn $ display Paths_cabal_install.version + printVersion = putStrLn $ "cabal-install version " + ++ display Paths_cabal_install.version + ++ "\ncompiled using version " + ++ display cabalVersion + ++ " of the Cabal library " + + commands = map commandFromSpec commandSpecs + commandSpecs = + [ regularCmd listCommand listAction + , regularCmd infoCommand infoAction + , regularCmd fetchCommand fetchAction + , regularCmd getCommand getAction + , hiddenCmd unpackCommand unpackAction + , regularCmd checkCommand checkAction + , regularCmd uploadCommand uploadAction + , regularCmd reportCommand reportAction + , regularCmd initCommand initAction + , regularCmd userConfigCommand userConfigAction + , regularCmd genBoundsCommand genBoundsAction + , regularCmd outdatedCommand outdatedAction + , wrapperCmd hscolourCommand hscolourVerbosity hscolourDistPref + , hiddenCmd uninstallCommand uninstallAction + , hiddenCmd formatCommand formatAction + , hiddenCmd upgradeCommand upgradeAction + , hiddenCmd win32SelfUpgradeCommand win32SelfUpgradeAction + , hiddenCmd actAsSetupCommand actAsSetupAction + , hiddenCmd manpageCommand (manpageAction commandSpecs) + + ] ++ concat + [ newCmd CmdConfigure.configureCommand CmdConfigure.configureAction + , newCmd CmdUpdate.updateCommand CmdUpdate.updateAction + , newCmd CmdBuild.buildCommand CmdBuild.buildAction + , newCmd CmdRepl.replCommand CmdRepl.replAction + , newCmd CmdFreeze.freezeCommand CmdFreeze.freezeAction + , newCmd CmdHaddock.haddockCommand CmdHaddock.haddockAction + , newCmd CmdInstall.installCommand CmdInstall.installAction + , newCmd CmdRun.runCommand CmdRun.runAction + , newCmd CmdTest.testCommand CmdTest.testAction + , newCmd CmdBench.benchCommand CmdBench.benchAction + , newCmd CmdExec.execCommand CmdExec.execAction + , newCmd CmdClean.cleanCommand CmdClean.cleanAction + , newCmd CmdSdist.sdistCommand CmdSdist.sdistAction + + , legacyCmd configureExCommand configureAction + , legacyCmd updateCommand updateAction + , legacyCmd buildCommand buildAction + , legacyCmd replCommand replAction + , legacyCmd freezeCommand freezeAction + , legacyCmd haddockCommand haddockAction + , legacyCmd installCommand installAction + , legacyCmd runCommand runAction + , legacyCmd testCommand testAction + , legacyCmd benchmarkCommand benchmarkAction + , legacyCmd execCommand execAction + , legacyCmd cleanCommand cleanAction + , legacyCmd sdistCommand sdistAction + , legacyCmd doctestCommand doctestAction + , legacyWrapperCmd copyCommand copyVerbosity copyDistPref + , legacyWrapperCmd registerCommand regVerbosity regDistPref + , legacyCmd reconfigureCommand reconfigureAction + , legacyCmd sandboxCommand sandboxAction + ] + +type Action = GlobalFlags -> IO () + +-- Duplicated in Distribution.Client.CmdLegacy. Any changes must be +-- reflected there, as well. +regularCmd :: CommandUI flags -> (flags -> [String] -> action) + -> CommandSpec action +regularCmd ui action = + CommandSpec ui ((flip commandAddAction) action) NormalCommand + +hiddenCmd :: CommandUI flags -> (flags -> [String] -> action) + -> CommandSpec action +hiddenCmd ui action = + CommandSpec ui (\ui' -> hiddenCommand (commandAddAction ui' action)) + HiddenCommand + +wrapperCmd :: Monoid flags => CommandUI flags -> (flags -> Flag Verbosity) + -> (flags -> Flag String) -> CommandSpec Action +wrapperCmd ui verbosity distPref = + CommandSpec ui (\ui' -> wrapperAction ui' verbosity distPref) NormalCommand + +wrapperAction :: Monoid flags + => CommandUI flags + -> (flags -> Flag Verbosity) + -> (flags -> Flag String) + -> Command Action +wrapperAction command verbosityFlag distPrefFlag = + commandAddAction command + { commandDefaultFlags = mempty } $ \flags extraArgs globalFlags -> do + let verbosity = fromFlagOrDefault normal (verbosityFlag flags) + load <- try (loadConfigOrSandboxConfig verbosity globalFlags) + let config = either (\(SomeException _) -> mempty) snd load + distPref <- findSavedDistPref config (distPrefFlag flags) + let setupScriptOptions = defaultSetupScriptOptions { useDistPref = distPref } + setupWrapper verbosity setupScriptOptions Nothing + command (const flags) (const extraArgs) + +configureAction :: (ConfigFlags, ConfigExFlags) + -> [String] -> Action +configureAction (configFlags, configExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + (useSandbox, config) <- updateInstallDirs (configUserInstall configFlags) + <$> loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (configDistPref configFlags) + nixInstantiate verbosity distPref True globalFlags config + nixShell verbosity distPref globalFlags config $ do + let configFlags' = savedConfigureFlags config `mappend` configFlags + configExFlags' = savedConfigureExFlags config `mappend` configExFlags + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, progdb) <- configCompilerAuxEx configFlags' + + -- If we're working inside a sandbox and the user has set the -w option, we + -- may need to create a sandbox-local package DB for this compiler and add a + -- timestamp record for this compiler to the timestamp file. + let configFlags'' = case useSandbox of + NoSandbox -> configFlags' + (UseSandbox sandboxDir) -> setPackageDB sandboxDir + comp platform configFlags' + + writeConfigFlags verbosity distPref (configFlags'', configExFlags') + + -- What package database(s) to use + let packageDBs :: PackageDBStack + packageDBs + = interpretPackageDbFlags + (fromFlag (configUserInstall configFlags'')) + (configPackageDBs configFlags'') + + whenUsingSandbox useSandbox $ \sandboxDir -> do + initPackageDBIfNeeded verbosity configFlags'' comp progdb + -- NOTE: We do not write the new sandbox package DB location to + -- 'cabal.sandbox.config' here because 'configure -w' must not affect + -- subsequent 'install' (for UI compatibility with non-sandboxed mode). + + indexFile <- tryGetIndexFilePath verbosity config + maybeAddCompilerTimestampRecord verbosity sandboxDir indexFile + (compilerId comp) platform + + maybeWithSandboxDirOnSearchPath useSandbox $ + withRepoContext verbosity globalFlags' $ \repoContext -> + configure verbosity packageDBs repoContext + comp platform progdb configFlags'' configExFlags' extraArgs + +reconfigureAction :: (ConfigFlags, ConfigExFlags) + -> [String] -> Action +reconfigureAction flags@(configFlags, _) _ globalFlags = do + let verbosity = fromFlagOrDefault normal (configVerbosity configFlags) + (useSandbox, config) <- updateInstallDirs (configUserInstall configFlags) + <$> loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (configDistPref configFlags) + let checkFlags = Check $ \_ saved -> do + let flags' = saved <> flags + unless (saved == flags') $ info verbosity message + pure (Any True, flags') + where + -- This message is correct, but not very specific: it will list all + -- of the new flags, even if some have not actually changed. The + -- *minimal* set of changes is more difficult to determine. + message = + "flags changed: " + ++ unwords (commandShowOptions configureExCommand flags) + nixInstantiate verbosity distPref True globalFlags config + _ <- + reconfigure configureAction + verbosity distPref useSandbox DontSkipAddSourceDepsCheck NoFlag + checkFlags [] globalFlags config + pure () + +buildAction :: (BuildFlags, BuildExFlags) -> [String] -> Action +buildAction (buildFlags, buildExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) + noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (buildDistPref buildFlags) + -- Calls 'configureAction' to do the real work, so nothing special has to be + -- done to support sandboxes. + config' <- + reconfigure configureAction + verbosity distPref useSandbox noAddSource (buildNumJobs buildFlags) + mempty [] globalFlags config + nixShell verbosity distPref globalFlags config $ do + maybeWithSandboxDirOnSearchPath useSandbox $ + build verbosity config' distPref buildFlags extraArgs + + +-- | Actually do the work of building the package. This is separate from +-- 'buildAction' so that 'testAction' and 'benchmarkAction' do not invoke +-- 'reconfigure' twice. +build :: Verbosity -> SavedConfig -> FilePath -> BuildFlags -> [String] -> IO () +build verbosity config distPref buildFlags extraArgs = + setupWrapper verbosity setupOptions Nothing + (Cabal.buildCommand progDb) mkBuildFlags (const extraArgs) + where + progDb = defaultProgramDb + setupOptions = defaultSetupScriptOptions { useDistPref = distPref } + + mkBuildFlags version = filterBuildFlags version config buildFlags' + buildFlags' = buildFlags + { buildVerbosity = toFlag verbosity + , buildDistPref = toFlag distPref + } + +-- | Make sure that we don't pass new flags to setup scripts compiled against +-- old versions of Cabal. +filterBuildFlags :: Version -> SavedConfig -> BuildFlags -> BuildFlags +filterBuildFlags version config buildFlags + | version >= mkVersion [1,19,1] = buildFlags_latest + -- Cabal < 1.19.1 doesn't support 'build -j'. + | otherwise = buildFlags_pre_1_19_1 + where + buildFlags_pre_1_19_1 = buildFlags { + buildNumJobs = NoFlag + } + buildFlags_latest = buildFlags { + -- Take the 'jobs' setting '~/.cabal/config' into account. + buildNumJobs = Flag . Just . determineNumJobs $ + (numJobsConfigFlag `mappend` numJobsCmdLineFlag) + } + numJobsConfigFlag = installNumJobs . savedInstallFlags $ config + numJobsCmdLineFlag = buildNumJobs buildFlags + + +replAction :: (ReplFlags, BuildExFlags) -> [String] -> Action +replAction (replFlags, buildExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (replVerbosity replFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (replDistPref replFlags) + cwd <- getCurrentDirectory + pkgDesc <- findPackageDesc cwd + let + -- There is a .cabal file in the current directory: start a REPL and load + -- the project's modules. + onPkgDesc = do + let noAddSource = case replReload replFlags of + Flag True -> SkipAddSourceDepsCheck + _ -> fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + + -- Calls 'configureAction' to do the real work, so nothing special has to + -- be done to support sandboxes. + _ <- + reconfigure configureAction + verbosity distPref useSandbox noAddSource NoFlag + mempty [] globalFlags config + let progDb = defaultProgramDb + setupOptions = defaultSetupScriptOptions + { useCabalVersion = orLaterVersion $ mkVersion [1,18,0] + , useDistPref = distPref + } + replFlags' = replFlags + { replVerbosity = toFlag verbosity + , replDistPref = toFlag distPref + } + + nixShell verbosity distPref globalFlags config $ do + maybeWithSandboxDirOnSearchPath useSandbox $ + setupWrapper verbosity setupOptions Nothing + (Cabal.replCommand progDb) (const replFlags') (const extraArgs) + + -- No .cabal file in the current directory: just start the REPL (possibly + -- using the sandbox package DB). + onNoPkgDesc = do + let configFlags = savedConfigureFlags config + (comp, platform, programDb) <- configCompilerAux' configFlags + programDb' <- reconfigurePrograms verbosity + (replProgramPaths replFlags) + (replProgramArgs replFlags) + programDb + nixShell verbosity distPref globalFlags config $ do + startInterpreter verbosity programDb' comp platform + (configPackageDB' configFlags) + + either (const onNoPkgDesc) (const onPkgDesc) pkgDesc + +installAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> Action +installAction (configFlags, _, installFlags, _) _ globalFlags + | fromFlagOrDefault False (installOnly installFlags) = do + let verb = fromFlagOrDefault normal (configVerbosity configFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verb globalFlags + dist <- findSavedDistPref config (configDistPref configFlags) + let setupOpts = defaultSetupScriptOptions { useDistPref = dist } + nixShellIfSandboxed verb dist globalFlags config useSandbox $ + setupWrapper + verb setupOpts Nothing + installCommand (const mempty) (const []) + +installAction + (configFlags, configExFlags, installFlags, haddockFlags) + extraArgs globalFlags = do + let verb = fromFlagOrDefault normal (configVerbosity configFlags) + (useSandbox, config) <- updateInstallDirs (configUserInstall configFlags) + <$> loadConfigOrSandboxConfig verb globalFlags + + let sandboxDist = + case useSandbox of + NoSandbox -> NoFlag + UseSandbox sandboxDir -> Flag $ sandboxBuildDir sandboxDir + dist <- findSavedDistPref config + (configDistPref configFlags `mappend` sandboxDist) + + nixShellIfSandboxed verb dist globalFlags config useSandbox $ do + targets <- readUserTargets verb extraArgs + + -- TODO: It'd be nice if 'cabal install' picked up the '-w' flag passed to + -- 'configure' when run inside a sandbox. Right now, running + -- + -- $ cabal sandbox init && cabal configure -w /path/to/ghc + -- && cabal build && cabal install + -- + -- performs the compilation twice unless you also pass -w to 'install'. + -- However, this is the same behaviour that 'cabal install' has in the normal + -- mode of operation, so we stick to it for consistency. + + let configFlags' = maybeForceTests installFlags' $ + savedConfigureFlags config `mappend` + configFlags { configDistPref = toFlag dist } + configExFlags' = defaultConfigExFlags `mappend` + savedConfigureExFlags config `mappend` configExFlags + installFlags' = defaultInstallFlags `mappend` + savedInstallFlags config `mappend` installFlags + haddockFlags' = defaultHaddockFlags `mappend` + savedHaddockFlags config `mappend` + haddockFlags { haddockDistPref = toFlag dist } + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, progdb) <- configCompilerAux' configFlags' + -- TODO: Redesign ProgramDB API to prevent such problems as #2241 in the + -- future. + progdb' <- configureAllKnownPrograms verb progdb + + -- If we're working inside a sandbox and the user has set the -w option, we + -- may need to create a sandbox-local package DB for this compiler and add a + -- timestamp record for this compiler to the timestamp file. + configFlags'' <- case useSandbox of + NoSandbox -> configAbsolutePaths $ configFlags' + (UseSandbox sandboxDir) -> return $ setPackageDB sandboxDir comp platform + configFlags' + + whenUsingSandbox useSandbox $ \sandboxDir -> do + initPackageDBIfNeeded verb configFlags'' comp progdb' + + indexFile <- tryGetIndexFilePath verb config + maybeAddCompilerTimestampRecord verb sandboxDir indexFile + (compilerId comp) platform + + -- TODO: Passing 'SandboxPackageInfo' to install unconditionally here means + -- that 'cabal install some-package' inside a sandbox will sometimes reinstall + -- modified add-source deps, even if they are not among the dependencies of + -- 'some-package'. This can also prevent packages that depend on older + -- versions of add-source'd packages from building (see #1362). + maybeWithSandboxPackageInfo verb configFlags'' globalFlags' + comp platform progdb useSandbox $ \mSandboxPkgInfo -> + maybeWithSandboxDirOnSearchPath useSandbox $ + withRepoContext verb globalFlags' $ \repoContext -> + install verb + (configPackageDB' configFlags'') + repoContext + comp platform progdb' + useSandbox mSandboxPkgInfo + globalFlags' configFlags'' configExFlags' + installFlags' haddockFlags' + targets + + where + -- '--run-tests' implies '--enable-tests'. + maybeForceTests installFlags' configFlags' = + if fromFlagOrDefault False (installRunTests installFlags') + then configFlags' { configTests = toFlag True } + else configFlags' + +testAction :: (TestFlags, BuildFlags, BuildExFlags) -> [String] -> GlobalFlags + -> IO () +testAction (testFlags, buildFlags, buildExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (testVerbosity testFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (testDistPref testFlags) + let noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + buildFlags' = buildFlags + { buildVerbosity = testVerbosity testFlags } + checkFlags = Check $ \_ flags@(configFlags, configExFlags) -> + if fromFlagOrDefault False (configTests configFlags) + then pure (mempty, flags) + else do + info verbosity "reconfiguring to enable tests" + let flags' = ( configFlags { configTests = toFlag True } + , configExFlags + ) + pure (Any True, flags') + + -- reconfigure also checks if we're in a sandbox and reinstalls add-source + -- deps if needed. + _ <- + reconfigure configureAction + verbosity distPref useSandbox noAddSource (buildNumJobs buildFlags') + checkFlags [] globalFlags config + nixShell verbosity distPref globalFlags config $ do + let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } + testFlags' = testFlags { testDistPref = toFlag distPref } + + -- The package was just configured, so the LBI must be available. + names <- componentNamesFromLBI verbosity distPref "test suites" + (\c -> case c of { LBI.CTest{} -> True; _ -> False }) + let extraArgs' + | null extraArgs = case names of + ComponentNamesUnknown -> [] + ComponentNames names' -> [ Make.unUnqualComponentName name + | LBI.CTestName name <- names' ] + | otherwise = extraArgs + + maybeWithSandboxDirOnSearchPath useSandbox $ + build verbosity config distPref buildFlags' extraArgs' + + maybeWithSandboxDirOnSearchPath useSandbox $ + setupWrapper verbosity setupOptions Nothing + Cabal.testCommand (const testFlags') (const extraArgs') + +data ComponentNames = ComponentNamesUnknown + | ComponentNames [LBI.ComponentName] + +-- | Return the names of all buildable components matching a given predicate. +componentNamesFromLBI :: Verbosity -> FilePath -> String + -> (LBI.Component -> Bool) + -> IO ComponentNames +componentNamesFromLBI verbosity distPref targetsDescr compPred = do + eLBI <- tryGetPersistBuildConfig distPref + case eLBI of + Left err -> case err of + -- Note: the build config could have been generated by a custom setup + -- script built against a different Cabal version, so it's crucial that + -- we ignore the bad version error here. + ConfigStateFileBadVersion _ _ _ -> return ComponentNamesUnknown + _ -> die' verbosity (show err) + Right lbi -> do + let pkgDescr = LBI.localPkgDescr lbi + names = map LBI.componentName + . filter (buildable . LBI.componentBuildInfo) + . filter compPred $ + LBI.pkgComponents pkgDescr + if null names + then do notice verbosity $ "Package has no buildable " + ++ targetsDescr ++ "." + exitSuccess -- See #3215. + + else return $! (ComponentNames names) + +benchmarkAction :: (BenchmarkFlags, BuildFlags, BuildExFlags) + -> [String] -> GlobalFlags + -> IO () +benchmarkAction + (benchmarkFlags, buildFlags, buildExFlags) + extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal + (benchmarkVerbosity benchmarkFlags) + + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (benchmarkDistPref benchmarkFlags) + let buildFlags' = buildFlags + { buildVerbosity = benchmarkVerbosity benchmarkFlags } + noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + + let checkFlags = Check $ \_ flags@(configFlags, configExFlags) -> + if fromFlagOrDefault False (configBenchmarks configFlags) + then pure (mempty, flags) + else do + info verbosity "reconfiguring to enable benchmarks" + let flags' = ( configFlags { configBenchmarks = toFlag True } + , configExFlags + ) + pure (Any True, flags') + + + -- reconfigure also checks if we're in a sandbox and reinstalls add-source + -- deps if needed. + config' <- + reconfigure configureAction + verbosity distPref useSandbox noAddSource (buildNumJobs buildFlags') + checkFlags [] globalFlags config + nixShell verbosity distPref globalFlags config $ do + let setupOptions = defaultSetupScriptOptions { useDistPref = distPref } + benchmarkFlags'= benchmarkFlags { benchmarkDistPref = toFlag distPref } + + -- The package was just configured, so the LBI must be available. + names <- componentNamesFromLBI verbosity distPref "benchmarks" + (\c -> case c of { LBI.CBench{} -> True; _ -> False; }) + let extraArgs' + | null extraArgs = case names of + ComponentNamesUnknown -> [] + ComponentNames names' -> [ Make.unUnqualComponentName name + | LBI.CBenchName name <- names'] + | otherwise = extraArgs + + maybeWithSandboxDirOnSearchPath useSandbox $ + build verbosity config' distPref buildFlags' extraArgs' + + maybeWithSandboxDirOnSearchPath useSandbox $ + setupWrapper verbosity setupOptions Nothing + Cabal.benchmarkCommand (const benchmarkFlags') (const extraArgs') + +haddockAction :: HaddockFlags -> [String] -> Action +haddockAction haddockFlags extraArgs globalFlags = do + let verbosity = fromFlag (haddockVerbosity haddockFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (haddockDistPref haddockFlags) + config' <- + reconfigure configureAction + verbosity distPref useSandbox DontSkipAddSourceDepsCheck NoFlag + mempty [] globalFlags config + nixShell verbosity distPref globalFlags config $ do + let haddockFlags' = defaultHaddockFlags `mappend` + savedHaddockFlags config' `mappend` + haddockFlags { haddockDistPref = toFlag distPref } + setupScriptOptions = defaultSetupScriptOptions + { useDistPref = distPref } + setupWrapper verbosity setupScriptOptions Nothing + haddockCommand (const haddockFlags') (const extraArgs) + when (haddockForHackage haddockFlags == Flag ForHackage) $ do + pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) + let dest = distPref name <.> "tar.gz" + name = display (packageId pkg) ++ "-docs" + docDir = distPref "doc" "html" + createTarGzFile dest docDir name + notice verbosity $ "Documentation tarball created: " ++ dest + +doctestAction :: DoctestFlags -> [String] -> Action +doctestAction doctestFlags extraArgs _globalFlags = do + let verbosity = fromFlag (doctestVerbosity doctestFlags) + + setupWrapper verbosity defaultSetupScriptOptions Nothing + doctestCommand (const doctestFlags) (const extraArgs) + +cleanAction :: CleanFlags -> [String] -> Action +cleanAction cleanFlags extraArgs globalFlags = do + load <- try (loadConfigOrSandboxConfig verbosity globalFlags) + let config = either (\(SomeException _) -> mempty) snd load + distPref <- findSavedDistPref config (cleanDistPref cleanFlags) + let setupScriptOptions = defaultSetupScriptOptions + { useDistPref = distPref + , useWin32CleanHack = True + } + cleanFlags' = cleanFlags { cleanDistPref = toFlag distPref } + setupWrapper verbosity setupScriptOptions Nothing + cleanCommand (const cleanFlags') (const extraArgs) + where + verbosity = fromFlagOrDefault normal (cleanVerbosity cleanFlags) + +listAction :: ListFlags -> [String] -> Action +listAction listFlags extraArgs globalFlags = do + let verbosity = fromFlag (listVerbosity listFlags) + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let configFlags' = savedConfigureFlags config + configFlags = configFlags' { + configPackageDBs = configPackageDBs configFlags' + `mappend` listPackageDBs listFlags + } + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, _, progdb) <- configCompilerAux' configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + List.list verbosity + (configPackageDB' configFlags) + repoContext + comp + progdb + listFlags + extraArgs + +infoAction :: InfoFlags -> [String] -> Action +infoAction infoFlags extraArgs globalFlags = do + let verbosity = fromFlag (infoVerbosity infoFlags) + targets <- readUserTargets verbosity extraArgs + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let configFlags' = savedConfigureFlags config + configFlags = configFlags' { + configPackageDBs = configPackageDBs configFlags' + `mappend` infoPackageDBs infoFlags + } + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, _, progdb) <- configCompilerAuxEx configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + List.info verbosity + (configPackageDB' configFlags) + repoContext + comp + progdb + globalFlags' + infoFlags + targets + +updateAction :: UpdateFlags -> [String] -> Action +updateAction updateFlags extraArgs globalFlags = do + let verbosity = fromFlag (updateVerbosity updateFlags) + unless (null extraArgs) $ + die' verbosity $ "'update' doesn't take any extra arguments: " ++ unwords extraArgs + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let globalFlags' = savedGlobalFlags config `mappend` globalFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + update verbosity updateFlags repoContext + +upgradeAction :: (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags) + -> [String] -> Action +upgradeAction (configFlags, _, _, _) _ _ = die' verbosity $ + "Use the 'cabal install' command instead of 'cabal upgrade'.\n" + ++ "You can install the latest version of a package using 'cabal install'. " + ++ "The 'cabal upgrade' command has been removed because people found it " + ++ "confusing and it often led to broken packages.\n" + ++ "If you want the old upgrade behaviour then use the install command " + ++ "with the --upgrade-dependencies flag (but check first with --dry-run " + ++ "to see what would happen). This will try to pick the latest versions " + ++ "of all dependencies, rather than the usual behaviour of trying to pick " + ++ "installed versions of all dependencies. If you do use " + ++ "--upgrade-dependencies, it is recommended that you do not upgrade core " + ++ "packages (e.g. by using appropriate --constraint= flags)." + where + verbosity = fromFlag (configVerbosity configFlags) + +fetchAction :: FetchFlags -> [String] -> Action +fetchAction fetchFlags extraArgs globalFlags = do + let verbosity = fromFlag (fetchVerbosity fetchFlags) + targets <- readUserTargets verbosity extraArgs + config <- loadConfig verbosity (globalConfigFile globalFlags) + let configFlags = savedConfigureFlags config + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, progdb) <- configCompilerAux' configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + fetch verbosity + (configPackageDB' configFlags) + repoContext + comp platform progdb globalFlags' fetchFlags + targets + +freezeAction :: FreezeFlags -> [String] -> Action +freezeAction freezeFlags _extraArgs globalFlags = do + let verbosity = fromFlag (freezeVerbosity freezeFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config NoFlag + nixShell verbosity distPref globalFlags config $ do + let configFlags = savedConfigureFlags config + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, progdb) <- configCompilerAux' configFlags + + maybeWithSandboxPackageInfo + verbosity configFlags globalFlags' + comp platform progdb useSandbox $ \mSandboxPkgInfo -> + maybeWithSandboxDirOnSearchPath useSandbox $ + withRepoContext verbosity globalFlags' $ \repoContext -> + freeze verbosity + (configPackageDB' configFlags) + repoContext + comp platform progdb + mSandboxPkgInfo + globalFlags' freezeFlags + +genBoundsAction :: FreezeFlags -> [String] -> GlobalFlags -> IO () +genBoundsAction freezeFlags _extraArgs globalFlags = do + let verbosity = fromFlag (freezeVerbosity freezeFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config NoFlag + nixShell verbosity distPref globalFlags config $ do + let configFlags = savedConfigureFlags config + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, progdb) <- configCompilerAux' configFlags + + maybeWithSandboxPackageInfo + verbosity configFlags globalFlags' + comp platform progdb useSandbox $ \mSandboxPkgInfo -> + maybeWithSandboxDirOnSearchPath useSandbox $ + withRepoContext verbosity globalFlags' $ \repoContext -> + genBounds verbosity + (configPackageDB' configFlags) + repoContext + comp platform progdb + mSandboxPkgInfo + globalFlags' freezeFlags + +outdatedAction :: OutdatedFlags -> [String] -> GlobalFlags -> IO () +outdatedAction outdatedFlags _extraArgs globalFlags = do + let verbosity = fromFlag (outdatedVerbosity outdatedFlags) + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + let configFlags = savedConfigureFlags config + globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, platform, _progdb) <- configCompilerAux' configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + outdated verbosity outdatedFlags repoContext + comp platform + +uploadAction :: UploadFlags -> [String] -> Action +uploadAction uploadFlags extraArgs globalFlags = do + config <- loadConfig verbosity (globalConfigFile globalFlags) + let uploadFlags' = savedUploadFlags config `mappend` uploadFlags + globalFlags' = savedGlobalFlags config `mappend` globalFlags + tarfiles = extraArgs + when (null tarfiles && not (fromFlag (uploadDoc uploadFlags'))) $ + die' verbosity "the 'upload' command expects at least one .tar.gz archive." + checkTarFiles extraArgs + maybe_password <- + case uploadPasswordCmd uploadFlags' + of Flag (xs:xss) -> Just . Password <$> + getProgramInvocationOutput verbosity + (simpleProgramInvocation xs xss) + _ -> pure $ flagToMaybe $ uploadPassword uploadFlags' + withRepoContext verbosity globalFlags' $ \repoContext -> do + if fromFlag (uploadDoc uploadFlags') + then do + when (length tarfiles > 1) $ + die' verbosity $ "the 'upload' command can only upload documentation " + ++ "for one package at a time." + tarfile <- maybe (generateDocTarball config) return $ listToMaybe tarfiles + Upload.uploadDoc verbosity + repoContext + (flagToMaybe $ uploadUsername uploadFlags') + maybe_password + (fromFlag (uploadCandidate uploadFlags')) + tarfile + else do + Upload.upload verbosity + repoContext + (flagToMaybe $ uploadUsername uploadFlags') + maybe_password + (fromFlag (uploadCandidate uploadFlags')) + tarfiles + where + verbosity = fromFlag (uploadVerbosity uploadFlags) + checkTarFiles tarfiles + | not (null otherFiles) + = die' verbosity $ "the 'upload' command expects only .tar.gz archives: " + ++ intercalate ", " otherFiles + | otherwise = sequence_ + [ do exists <- doesFileExist tarfile + unless exists $ die' verbosity $ "file not found: " ++ tarfile + | tarfile <- tarfiles ] + + where otherFiles = filter (not . isTarGzFile) tarfiles + isTarGzFile file = case splitExtension file of + (file', ".gz") -> takeExtension file' == ".tar" + _ -> False + generateDocTarball config = do + notice verbosity $ + "No documentation tarball specified. " + ++ "Building a documentation tarball with default settings...\n" + ++ "If you need to customise Haddock options, " + ++ "run 'haddock --for-hackage' first " + ++ "to generate a documentation tarball." + haddockAction (defaultHaddockFlags { haddockForHackage = Flag ForHackage }) + [] globalFlags + distPref <- findSavedDistPref config NoFlag + pkg <- fmap LBI.localPkgDescr (getPersistBuildConfig distPref) + return $ distPref display (packageId pkg) ++ "-docs" <.> "tar.gz" + +checkAction :: Flag Verbosity -> [String] -> Action +checkAction verbosityFlag extraArgs _globalFlags = do + let verbosity = fromFlag verbosityFlag + unless (null extraArgs) $ + die' verbosity $ "'check' doesn't take any extra arguments: " ++ unwords extraArgs + allOk <- Check.check (fromFlag verbosityFlag) + unless allOk exitFailure + +formatAction :: Flag Verbosity -> [String] -> Action +formatAction verbosityFlag extraArgs _globalFlags = do + let verbosity = fromFlag verbosityFlag + path <- case extraArgs of + [] -> do cwd <- getCurrentDirectory + tryFindPackageDesc cwd + (p:_) -> return p + pkgDesc <- readGenericPackageDescription verbosity path + -- Uses 'writeFileAtomic' under the hood. + writeGenericPackageDescription path pkgDesc + +uninstallAction :: Flag Verbosity -> [String] -> Action +uninstallAction verbosityFlag extraArgs _globalFlags = do + let verbosity = fromFlag verbosityFlag + package = case extraArgs of + p:_ -> p + _ -> "PACKAGE_NAME" + die' verbosity $ "This version of 'cabal-install' does not support the 'uninstall' " + ++ "operation. " + ++ "It will likely be implemented at some point in the future; " + ++ "in the meantime you're advised to use either 'ghc-pkg unregister " + ++ package ++ "' or 'cabal sandbox hc-pkg -- unregister " ++ package ++ "'." + + +sdistAction :: (SDistFlags, SDistExFlags) -> [String] -> Action +sdistAction (sdistFlags, sdistExFlags) extraArgs globalFlags = do + let verbosity = fromFlag (sDistVerbosity sdistFlags) + unless (null extraArgs) $ + die' verbosity $ "'sdist' doesn't take any extra arguments: " ++ unwords extraArgs + load <- try (loadConfigOrSandboxConfig verbosity globalFlags) + let config = either (\(SomeException _) -> mempty) snd load + distPref <- findSavedDistPref config (sDistDistPref sdistFlags) + let sdistFlags' = sdistFlags { sDistDistPref = toFlag distPref } + sdist sdistFlags' sdistExFlags + +reportAction :: ReportFlags -> [String] -> Action +reportAction reportFlags extraArgs globalFlags = do + let verbosity = fromFlag (reportVerbosity reportFlags) + unless (null extraArgs) $ + die' verbosity $ "'report' doesn't take any extra arguments: " ++ unwords extraArgs + config <- loadConfig verbosity (globalConfigFile globalFlags) + let globalFlags' = savedGlobalFlags config `mappend` globalFlags + reportFlags' = savedReportFlags config `mappend` reportFlags + + withRepoContext verbosity globalFlags' $ \repoContext -> + Upload.report verbosity repoContext + (flagToMaybe $ reportUsername reportFlags') + (flagToMaybe $ reportPassword reportFlags') + +runAction :: (BuildFlags, BuildExFlags) -> [String] -> Action +runAction (buildFlags, buildExFlags) extraArgs globalFlags = do + let verbosity = fromFlagOrDefault normal (buildVerbosity buildFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (buildDistPref buildFlags) + let noAddSource = fromFlagOrDefault DontSkipAddSourceDepsCheck + (buildOnly buildExFlags) + -- reconfigure also checks if we're in a sandbox and reinstalls add-source + -- deps if needed. + config' <- + reconfigure configureAction + verbosity distPref useSandbox noAddSource (buildNumJobs buildFlags) + mempty [] globalFlags config + nixShell verbosity distPref globalFlags config $ do + lbi <- getPersistBuildConfig distPref + (exe, exeArgs) <- splitRunArgs verbosity lbi extraArgs + + maybeWithSandboxDirOnSearchPath useSandbox $ + build verbosity config' distPref buildFlags ["exe:" ++ display (exeName exe)] + + maybeWithSandboxDirOnSearchPath useSandbox $ + run verbosity lbi exe exeArgs + +getAction :: GetFlags -> [String] -> Action +getAction getFlags extraArgs globalFlags = do + let verbosity = fromFlag (getVerbosity getFlags) + targets <- readUserTargets verbosity extraArgs + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let globalFlags' = savedGlobalFlags config `mappend` globalFlags + withRepoContext verbosity (savedGlobalFlags config) $ \repoContext -> + get verbosity + repoContext + globalFlags' + getFlags + targets + +unpackAction :: GetFlags -> [String] -> Action +unpackAction getFlags extraArgs globalFlags = do + getAction getFlags extraArgs globalFlags + +initAction :: InitFlags -> [String] -> Action +initAction initFlags extraArgs globalFlags = do + let verbosity = fromFlag (initVerbosity initFlags) + when (extraArgs /= []) $ + die' verbosity $ "'init' doesn't take any extra arguments: " ++ unwords extraArgs + (_useSandbox, config) <- loadConfigOrSandboxConfig verbosity + (globalFlags { globalRequireSandbox = Flag False }) + let configFlags = savedConfigureFlags config `mappend` + -- override with `--with-compiler` from CLI if available + mempty { configHcPath = initHcPath initFlags } + let globalFlags' = savedGlobalFlags config `mappend` globalFlags + (comp, _, progdb) <- configCompilerAux' configFlags + withRepoContext verbosity globalFlags' $ \repoContext -> + initCabal verbosity + (configPackageDB' configFlags) + repoContext + comp + progdb + initFlags + +sandboxAction :: SandboxFlags -> [String] -> Action +sandboxAction sandboxFlags extraArgs globalFlags = do + let verbosity = fromFlag (sandboxVerbosity sandboxFlags) + case extraArgs of + -- Basic sandbox commands. + ["init"] -> sandboxInit verbosity sandboxFlags globalFlags + ["delete"] -> sandboxDelete verbosity sandboxFlags globalFlags + ("add-source":extra) -> do + when (noExtraArgs extra) $ + die' verbosity "The 'sandbox add-source' command expects at least one argument" + sandboxAddSource verbosity extra sandboxFlags globalFlags + ("delete-source":extra) -> do + when (noExtraArgs extra) $ + die' verbosity ("The 'sandbox delete-source' command expects " ++ + "at least one argument") + sandboxDeleteSource verbosity extra sandboxFlags globalFlags + ["list-sources"] -> sandboxListSources verbosity sandboxFlags globalFlags + + -- More advanced commands. + ("hc-pkg":extra) -> do + when (noExtraArgs extra) $ + die' verbosity $ "The 'sandbox hc-pkg' command expects at least one argument" + sandboxHcPkg verbosity sandboxFlags globalFlags extra + ["buildopts"] -> die' verbosity "Not implemented!" + + -- Hidden commands. + ["dump-pkgenv"] -> dumpPackageEnvironment verbosity sandboxFlags globalFlags + + -- Error handling. + [] -> die' verbosity $ "Please specify a subcommand (see 'help sandbox')" + _ -> die' verbosity $ "Unknown 'sandbox' subcommand: " ++ unwords extraArgs + + where + noExtraArgs = (<1) . length + +execAction :: ExecFlags -> [String] -> Action +execAction execFlags extraArgs globalFlags = do + let verbosity = fromFlag (execVerbosity execFlags) + (useSandbox, config) <- loadConfigOrSandboxConfig verbosity globalFlags + distPref <- findSavedDistPref config (execDistPref execFlags) + let configFlags = savedConfigureFlags config + configFlags' = configFlags { configDistPref = Flag distPref } + (comp, platform, progdb) <- getPersistOrConfigCompiler configFlags' + exec verbosity useSandbox comp platform progdb extraArgs + +userConfigAction :: UserConfigFlags -> [String] -> Action +userConfigAction ucflags extraArgs globalFlags = do + let verbosity = fromFlag (userConfigVerbosity ucflags) + force = fromFlag (userConfigForce ucflags) + extraLines = fromFlag (userConfigAppendLines ucflags) + case extraArgs of + ("init":_) -> do + path <- configFile + fileExists <- doesFileExist path + if (not fileExists || (fileExists && force)) + then void $ createDefaultConfigFile verbosity extraLines path + else die' verbosity $ path ++ " already exists." + ("diff":_) -> mapM_ putStrLn =<< userConfigDiff verbosity globalFlags extraLines + ("update":_) -> userConfigUpdate verbosity globalFlags extraLines + -- Error handling. + [] -> die' verbosity $ "Please specify a subcommand (see 'help user-config')" + _ -> die' verbosity $ "Unknown 'user-config' subcommand: " ++ unwords extraArgs + where configFile = getConfigFilePath (globalConfigFile globalFlags) + +-- | See 'Distribution.Client.Install.withWin32SelfUpgrade' for details. +-- +win32SelfUpgradeAction :: Win32SelfUpgradeFlags -> [String] -> Action +win32SelfUpgradeAction selfUpgradeFlags (pid:path:_extraArgs) _globalFlags = do + let verbosity = fromFlag (win32SelfUpgradeVerbosity selfUpgradeFlags) + Win32SelfUpgrade.deleteOldExeFile verbosity (read pid) path -- TODO: eradicateNoParse +win32SelfUpgradeAction _ _ _ = return () + +-- | Used as an entry point when cabal-install needs to invoke itself +-- as a setup script. This can happen e.g. when doing parallel builds. +-- +actAsSetupAction :: ActAsSetupFlags -> [String] -> Action +actAsSetupAction actAsSetupFlags args _globalFlags = + let bt = fromFlag (actAsSetupBuildType actAsSetupFlags) + in case bt of + Simple -> Simple.defaultMainArgs args + Configure -> Simple.defaultMainWithHooksArgs + Simple.autoconfUserHooks args + Make -> Make.defaultMainArgs args + Custom -> error "actAsSetupAction Custom" + +manpageAction :: [CommandSpec action] -> Flag Verbosity -> [String] -> Action +manpageAction commands flagVerbosity extraArgs _ = do + let verbosity = fromFlag flagVerbosity + unless (null extraArgs) $ + die' verbosity $ "'manpage' doesn't take any extra arguments: " ++ unwords extraArgs + pname <- getProgName + let cabalCmd = if takeExtension pname == ".exe" + then dropExtension pname + else pname + putStrLn $ manpage cabalCmd commands diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/README.md 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,155 @@ +The cabal-install package +========================= + +See the [Cabal web site] for more information. + +The `cabal-install` package provides a command line tool named `cabal`. +It uses the [Cabal] library and provides a user interface to the +Cabal/[Hackage] build automation and package management system. It can +build and install both local and remote packages, including +dependencies. + +[Cabal web site]: http://www.haskell.org/cabal/ +[Cabal]: ../Cabal/README.md + +Installing the `cabal` command-line tool +======================================== + +The `cabal-install` package requires a number of other packages, most of +which come with a standard GHC installation. It requires the [network] +package, which is sometimes packaged separately by Linux distributions; +for example, on Debian or Ubuntu, it is located in the +"libghc6-network-dev" package. + +`cabal` requires a few other Haskell packages that are not always +installed. The exact list is specified in the [.cabal] file or in the +[bootstrap.sh] file. All these packages are available from [Hackage]. + +Note that on some Unix systems you may need to install an additional +zlib development package using your system package manager; for example, +on Debian or Ubuntu, it is located in the "zlib1g-dev" package; on +Fedora, it is located in the "zlib-devel" package. It is required +because the Haskell zlib package uses the system zlib C library and +header files. + +The `cabal-install` package is now part of the [Haskell Platform], so you +do not usually need to install it separately. However, if you are +starting from a minimal GHC installation, you need to install +`cabal-install` manually. Since it is an ordinary Cabal package, +`cabal-install` can be built the standard way; to facilitate this, the +process has been partially automated. It is described below. + +[.cabal]: cabal-install.cabal +[network]: http://hackage.haskell.org/package/network +[Haskell Platform]: http://www.haskell.org/platform/ + +Quick start on Unix-like systems +-------------------------------- + +As a convenience for users on Unix-like systems, there is a +[bootstrap.sh] script that will download and install each of +`cabal-install`'s dependencies in turn. + + $ ./bootstrap.sh + +It will download and install the dependencies. The script will install the +library packages (vanilla, profiling and shared) into `$HOME/.cabal/` and the +`cabal` program into `$HOME/.cabal/bin/`. If you don't want to install profiling +and shared versions of the libraries, use + + $ EXTRA_CONFIGURE_OPTS="" ./bootstrap.sh + +You then have the choice either to place `$HOME/.cabal/bin` on your +`$PATH` or move the `cabal` program to somewhere on your `$PATH`. Next, +you can get the latest list of packages by running: + + $ cabal update + +This will also create a default configuration file, if it does not +already exist, at `$HOME/.cabal/config`. + +By default, `cabal` will install programs to `$HOME/.cabal/bin`. If you +do not want to add this directory to your `$PATH`, you can change +the setting in the config file; for example, you could use the +following: + + symlink-bindir: $HOME/bin + + +Quick start on Windows systems +------------------------------ + +For Windows users, a precompiled program ([cabal.exe]) is provided. +Download and put it somewhere on your `%PATH%` (for example, +`C:\Program Files\Haskell\bin`.) + +Next, you can get the latest list of packages by running: + + $ cabal update + +This will also create a default configuration file (if it does not +already exist) at +`C:\Documents and Settings\%USERNAME%\Application Data\cabal\config`. + +[cabal.exe]: http://www.haskell.org/cabal/release/cabal-install-latest/ + +Using `cabal` +============= + +There are two sets of commands: commands for working with a local +project build tree and those for working with packages distributed +from [Hackage]. + +For the list of the full set of commands and flags for each command, +run: + + $ cabal help + + +Commands for developers for local build trees +--------------------------------------------- + +The commands for local project build trees are almost the same as the +`runghc Setup` command-line interface you may already be familiar with. +In particular, it has the following commands: + + * `cabal configure` + * `cabal build` + * `cabal haddock` + * `cabal clean` + * `cabal sdist` + +The `install` command is somewhat different; it is an all-in-one +operation. If you run `cabal install` in your build tree, it will +configure, build, and install. It takes all the flags that `configure` +takes such as `--global` and `--prefix`. + +In addition, `cabal` will download and install any dependencies that are +not already installed. It can also rebuild packages to ensure a +consistent set of dependencies. + + +Commands for released Hackage packages +-------------------------------------- + + $ cabal update + +This command gets the latest list of packages from the [Hackage] server. +On occasion, this command must be run manually--for instance, if you +want to install a newly released package. + + $ cabal install xmonad + +This command installs one or more named packages, and all their +dependencies, from Hackage. By default, it installs the latest available +version; however, you may specify exact versions or version ranges. For +example, `cabal install alex-2.2` or `cabal install parsec < 3`. + + $ cabal list xml + +This does a search of the installed and available packages. It does a +case-insensitive substring match on the package name. + + +[Hackage]: http://hackage.haskell.org +[bootstrap.sh]: bootstrap.sh diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/Setup.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,63 @@ +import Distribution.PackageDescription ( PackageDescription ) +import Distribution.Simple ( defaultMainWithHooks + , simpleUserHooks + , postBuild + , postCopy + , postInst + ) +import Distribution.Simple.InstallDirs ( mandir + , CopyDest (NoCopyDest) + ) +import Distribution.Simple.LocalBuildInfo ( LocalBuildInfo(..) + , absoluteInstallDirs + ) +import Distribution.Simple.Utils ( installOrdinaryFiles + , notice ) +import Distribution.Simple.Setup ( buildVerbosity + , copyDest + , copyVerbosity + , fromFlag + , installVerbosity + ) +import Distribution.Verbosity ( Verbosity ) + +import System.IO ( openFile + , IOMode (WriteMode) + ) +import System.Process ( runProcess ) +import System.FilePath ( () ) + +-- WARNING to editors of this file: +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- At this moment (Cabal 1.23), whatever you write here must be +-- compatible with ALL Cabal libraries which we support bootstrapping +-- with. This is because pre-setup-depends versions of cabal-install will +-- build Setup.hs against the version of Cabal which MATCHES the library +-- that cabal-install was built against. There is no way of overriding +-- this behavior without bumping the required 'cabal-version' in our +-- Cabal file. Travis will let you know if we fail to install from +-- tarball! + +main :: IO () +main = defaultMainWithHooks $ simpleUserHooks + { postBuild = \ _ flags _ lbi -> + buildManpage lbi (fromFlag $ buildVerbosity flags) + , postCopy = \ _ flags pkg lbi -> + installManpage pkg lbi (fromFlag $ copyVerbosity flags) (fromFlag $ copyDest flags) + , postInst = \ _ flags pkg lbi -> + installManpage pkg lbi (fromFlag $ installVerbosity flags) NoCopyDest + } + +buildManpage :: LocalBuildInfo -> Verbosity -> IO () +buildManpage lbi verbosity = do + let cabal = buildDir lbi "cabal/cabal" + manpage = buildDir lbi "cabal/cabal.1" + manpageHandle <- openFile manpage WriteMode + notice verbosity ("Generating manual page " ++ manpage ++ " ...") + _ <- runProcess cabal ["manpage"] Nothing Nothing Nothing (Just manpageHandle) Nothing + return () + +installManpage :: PackageDescription -> LocalBuildInfo -> Verbosity -> CopyDest -> IO () +installManpage pkg lbi verbosity copy = do + let destDir = mandir (absoluteInstallDirs pkg lbi copy) "man1" + installOrdinaryFiles verbosity destDir [(buildDir lbi "cabal", "cabal.1")] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: p q diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/p/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,8 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: P + build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/p/P.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/p/P.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/p/P.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/p/P.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,4 @@ +module P where + +p :: Int +p = this_is_not_expected_to_compile diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: Q + build-depends: base + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/q/Q.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/q/Q.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/q/Q.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/keep-going/q/Q.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,4 @@ +module Q where + +q :: Int +q = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,2 @@ +packages: p-0.1.tar.gz + q/ Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/p-0.1.tar.gz and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/p-0.1.tar.gz differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,8 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: Q + build-depends: base, p diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/q/Q.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/q/Q.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/q/Q.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/local-tarball/q/Q.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,5 @@ +module Q where + +import P + +q = p ++ " world" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/a.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/a.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,13 @@ +name: a +version: 0.1 +build-type: Custom +cabal-version: >= 1.10 + +-- explicit setup deps: +custom-setup + setup-depends: base, Cabal >= 1.18 + +library + exposed-modules: A + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/A.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/A.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/A.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/A.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,4 @@ +module A where + +a :: Int +a = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom1/Setup.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain >> writeFile "marker" "ok" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/a.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/a.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,11 @@ +name: a +version: 0.1 +build-type: Custom +cabal-version: >= 1.10 + +-- no explicit setup deps + +library + exposed-modules: A + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/A.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/A.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/A.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/A.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,4 @@ +module A where + +a :: Int +a = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-custom2/Setup.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain >> writeFile "marker" "ok" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/a.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/a.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: a +version: 0.1 +build-type: Simple +cabal-version: >= 1.10 + +library + exposed-modules: A + build-depends: base + default-language: Haskell2010 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/A.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/A.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/A.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/A.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,4 @@ +module A where + +a :: Int +a = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/build/setup-simple/Setup.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/bad-config/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/bad-config/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/bad-config/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/bad-config/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,4 @@ +packages: + +package foo + ghc-location: bar diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/build/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/build/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/build/a.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/build/a.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,8 @@ +name: a +version: 1 +build-type: Simple +cabal-version: >= 1.2 + +executable a + main-is: Main.hs + build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/build/Main.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/build/Main.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/build/Main.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/build/Main.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +main = thisNameDoesNotExist diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/configure/a.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/configure/a.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/configure/a.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/configure/a.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: a +version: 1 +build-type: Simple +-- This used to be a blank package with no components, +-- but I refactored new-build so that if a package has +-- no buildable components, we skip configuring it. +-- So put in a (failing) component so that we try to +-- configure. +executable a diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/no-pkg/empty.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/no-pkg/empty.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/no-pkg/empty.in 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/no-pkg/empty.in 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +this is just here to ensure the source control creates the dir diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/no-pkg2/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/no-pkg2/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/no-pkg2/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/exception/no-pkg2/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: p q diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/p/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,8 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: P + build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/p/P.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/p/P.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/p/P.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/p/P.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,4 @@ +module P where + +p :: Int +p = 42 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: Q + build-depends: base + -- missing a dep on p here, so expect failure initially diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/q/Q.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/q/Q.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/q/Q.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/regression/3324/q/Q.hs 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,6 @@ +module Q where + +import P + +q :: Int +q = p diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/all-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/all-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/all-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/all-disabled/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/all-disabled/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/all-disabled/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/all-disabled/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/all-disabled/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,23 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: Q + build-depends: base, filepath + buildable: False + +executable buildable-false + main-is: Main.hs + buildable: False + +test-suite solver-disabled + type: exitcode-stdio-1.0 + main-is: Test.hs + build-depends: a-package-that-does-not-exist + +benchmark user-disabled + type: exitcode-stdio-1.0 + main-is: Test.hs + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ ./q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,15 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.10 + +benchmark solver-disabled + type: exitcode-stdio-1.0 + main-is: Test.hs + build-depends: a-package-that-does-not-exist + +benchmark user-disabled + type: exitcode-stdio-1.0 + main-is: Test.hs + build-depends: base + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/benchmarks-disabled/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,10 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.10 + +benchmark buildable-false + type: exitcode-stdio-1.0 + main-is: Main.hs + buildable: False + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/complex/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/complex/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/complex/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/complex/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/complex/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/complex/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/complex/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/complex/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,22 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: Q + build-depends: base, filepath + +executable buildable-false + main-is: Main.hs + buildable: False + +test-suite solver-disabled + type: exitcode-stdio-1.0 + main-is: Test.hs + build-depends: a-package-that-does-not-exist + +benchmark user-disabled + type: exitcode-stdio-1.0 + main-is: Test.hs + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty-pkg/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty-pkg/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty-pkg/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty-pkg/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty-pkg/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty-pkg/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty-pkg/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/empty-pkg/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,5 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: p/ q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/p/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +executable p + main-is: P.hs + build-depends: base + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/exes-disabled/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +executable q + main-is: Q.hs + build-depends: base + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/lib-only/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/lib-only/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/lib-only/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/lib-only/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,8 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: P + build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: p/ q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/p/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: P + build-depends: base + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/libs-disabled/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: Q + build-depends: base + buildable: False diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-exes/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-exes/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-exes/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-exes/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-exes/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-exes/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-exes/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-exes/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,12 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.10 + +executable p1 + main-is: P1.hs + build-depends: base + +executable p2 + main-is: P2.hs + build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: p/ q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/p/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/p/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/p/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/p/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,8 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: P + build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-libs/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,8 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: Q + build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-tests/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-tests/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-tests/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-tests/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-tests/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-tests/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-tests/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/multiple-tests/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,14 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.10 + +test-suite p1 + type: exitcode-stdio-1.0 + main-is: P1.hs + build-depends: base + +test-suite p2 + type: exitcode-stdio-1.0 + main-is: P2.hs + build-depends: base diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,12 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: P + build-depends: base + +executable pexe + main-is: Main.hs + other-modules: PMain diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/simple/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,12 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +library + exposed-modules: QQ + build-depends: base + +executable qexe + main-is: Main.hs + other-modules: QMain diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/test-only/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/test-only/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/test-only/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/test-only/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,9 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.2 + +test-suite pexe + type: exitcode-stdio-1.0 + main-is: Main.hs + other-modules: PMain diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ ./q/ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,15 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.10 + +test-suite solver-disabled + type: exitcode-stdio-1.0 + main-is: Test.hs + build-depends: a-package-that-does-not-exist + +test-suite user-disabled + type: exitcode-stdio-1.0 + main-is: Test.hs + build-depends: base + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/q/q.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/q/q.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/q/q.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/tests-disabled/q/q.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,10 @@ +name: q +version: 0.1 +build-type: Simple +cabal-version: >= 1.10 + +test-suite buildable-false + type: exitcode-stdio-1.0 + main-is: Main.hs + buildable: False + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/variety/cabal.project cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/variety/cabal.project --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/variety/cabal.project 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/variety/cabal.project 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1 @@ +packages: ./ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/variety/p.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/variety/p.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/variety/p.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/IntegrationTests2/targets/variety/p.cabal 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,27 @@ +name: p +version: 0.1 +build-type: Simple +cabal-version: >= 1.10 + +library + exposed-modules: P + build-depends: base + +foreign-library libp + type: native-shared + other-modules: FLib + +executable an-exe + main-is: Main.hs + other-modules: AModule + +test-suite a-testsuite + type: exitcode-stdio-1.0 + main-is: Test.hs + other-modules: AModule + +benchmark a-benchmark + type: exitcode-stdio-1.0 + main-is: Test.hs + other-modules: AModule + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/cabal-install-2.4.1.0/tests/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/cabal-install-2.4.1.0/tests/README.md 2018-11-26 08:42:59.000000000 +0000 @@ -0,0 +1,27 @@ +Integration Tests +================= + +Each test is a shell script. Tests that share files (e.g., `.cabal` files) are +grouped under a common sub-directory of [IntegrationTests]. The framework +copies the whole group's directory before running each test, which allows tests +to reuse files, yet run independently. A group's tests are further divided into +`should_run` and `should_fail` directories, based on the expected exit status. +For example, the test +`IntegrationTests/exec/should_fail/exit_with_failure_without_args.sh` has access +to all files under `exec` and is expected to fail. + +Tests can specify their expected output. For a test named `x.sh`, `x.out` +specifies `stdout` and `x.err` specifies `stderr`. Both files are optional. +The framework expects an exact match between lines in the file and output, +except for lines beginning with "RE:", which are interpreted as regular +expressions. + +[IntegrationTests.hs] defines several environment variables: + +* `CABAL` - The path to the executable being tested. +* `GHC_PKG` - The path to ghc-pkg. +* `CABAL_ARGS` - A common set of arguments for running cabal. +* `CABAL_ARGS_NO_CONFIG_FILE` - `CABAL_ARGS` without `--config-file`. + +[IntegrationTests]: IntegrationTests +[IntegrationTests.hs]: IntegrationTests.hs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/ancilData.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/ancilData.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/ancilData.c 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/ancilData.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,132 +0,0 @@ -/* - * Copyright(c), 2002 The GHC Team. - */ - -#ifdef aix_HOST_OS -#define _LINUX_SOURCE_COMPAT -// Required to get CMSG_SPACE/CMSG_LEN macros. See #265. -// Alternative is to #define COMPAT_43 and use the -// HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS code instead, but that means -// fiddling with the configure script too. -#endif - -#include "HsNet.h" -#include - -#if HAVE_STRUCT_MSGHDR_MSG_CONTROL || HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS /* until end */ - -/* - * Support for transmitting file descriptors. - * - * - */ - - -/* - * sendmsg() and recvmsg() wrappers for transmitting - * ancillary socket data. - * - * Doesn't provide the full generality of either, specifically: - * - * - no support for scattered read/writes. - * - only possible to send one ancillary chunk of data at a time. - */ - -int -sendFd(int sock, - int outfd) -{ - struct msghdr msg = {0}; - struct iovec iov[1]; - char buf[2]; -#if HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS - msg.msg_accrights = (void*)&outfd; - msg.msg_accrightslen = sizeof(int); -#else - struct cmsghdr *cmsg; - char ancBuffer[CMSG_SPACE(sizeof(int))]; - char* dPtr; - - msg.msg_control = ancBuffer; - msg.msg_controllen = sizeof(ancBuffer); - - cmsg = CMSG_FIRSTHDR(&msg); - cmsg->cmsg_level = SOL_SOCKET; - cmsg->cmsg_type = SCM_RIGHTS; - cmsg->cmsg_len = CMSG_LEN(sizeof(int)); - dPtr = (char*)CMSG_DATA(cmsg); - - *(int*)dPtr = outfd; - msg.msg_controllen = cmsg->cmsg_len; -#endif - - buf[0] = 0; buf[1] = '\0'; - iov[0].iov_base = buf; - iov[0].iov_len = 2; - - msg.msg_iov = iov; - msg.msg_iovlen = 1; - - return sendmsg(sock,&msg,0); -} - -int -recvFd(int sock) -{ - struct msghdr msg = {0}; - char duffBuf[10]; - int rc; - int len = sizeof(int); - struct iovec iov[1]; -#if HAVE_STRUCT_MSGHDR_MSG_CONTROL - struct cmsghdr *cmsg = NULL; - struct cmsghdr *cptr; -#else - int* fdBuffer; -#endif - int fd; - - iov[0].iov_base = duffBuf; - iov[0].iov_len = sizeof(duffBuf); - msg.msg_iov = iov; - msg.msg_iovlen = 1; - -#if HAVE_STRUCT_MSGHDR_MSG_CONTROL - cmsg = (struct cmsghdr*)malloc(CMSG_SPACE(len)); - if (cmsg==NULL) { - return -1; - } - - msg.msg_control = (void *)cmsg; - msg.msg_controllen = CMSG_LEN(len); -#else - fdBuffer = (int*)malloc(len); - if (fdBuffer) { - msg.msg_accrights = (void *)fdBuffer; - } else { - return -1; - } - msg.msg_accrightslen = len; -#endif - - if ((rc = recvmsg(sock,&msg,0)) < 0) { -#if HAVE_STRUCT_MSGHDR_MSG_CONTROL - free(cmsg); -#else - free(fdBuffer); -#endif - return rc; - } - -#if HAVE_STRUCT_MSGHDR_MSG_CONTROL - cptr = (struct cmsghdr*)CMSG_FIRSTHDR(&msg); - fd = *(int*)CMSG_DATA(cptr); - free(cmsg); -#else - fd = *(int*)fdBuffer; - free(fdBuffer); -#endif - return fd; -} - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/asyncAccept.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/asyncAccept.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/asyncAccept.c 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/asyncAccept.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -/* - * (c) sof, 2003. - */ - -#include "HsNet.h" -#include "HsFFI.h" - -#if defined(HAVE_WINSOCK2_H) - -/* all the way to the end */ - -/* - * To support non-blocking accept()s with WinSock, we use the asyncDoProc# - * primop, which lets a Haskell thread call an external routine without - * blocking the progress of other threads. - * - * As can readily be seen, this is a low-level mechanism. - * - */ - -typedef struct AcceptData { - int fdSock; - int newSock; - void* sockAddr; - int size; -} AcceptData; - -/* - * Fill in parameter block that's passed along when the RTS invokes the - * accept()-calling proc below (acceptDoProc()) - */ -void* -newAcceptParams(int sock, - int sz, - void* sockaddr) -{ - AcceptData* data = (AcceptData*)malloc(sizeof(AcceptData)); - if (!data) return NULL; - data->fdSock = sock; - data->newSock = 0; - data->sockAddr = sockaddr; - data->size = sz; - - return data; -} - -/* Accessors for return code and accept()'s socket result. */ - -int -acceptNewSock(void* d) -{ - return (((AcceptData*)d)->newSock); -} - -/* Routine invoked by an RTS worker thread */ -int -acceptDoProc(void* param) -{ - SOCKET s; - - AcceptData* data = (AcceptData*)param; - s = accept( data->fdSock, - data->sockAddr, - &data->size); - data->newSock = s; - if ( s == INVALID_SOCKET ) { - return GetLastError(); - } else { - return 0; - } -} -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/HsNet.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/HsNet.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/HsNet.c 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/HsNet.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -/* ----------------------------------------------------------------------------- - * (c) The University of Glasgow 2002 - * - * static versions of the inline functions from HsNet.h - * -------------------------------------------------------------------------- */ - -#define INLINE -#include "HsNet.h" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/initWinSock.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/initWinSock.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/initWinSock.c 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/initWinSock.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,43 +0,0 @@ -#include "HsNet.h" -#include "HsFFI.h" - -#if defined(HAVE_WINSOCK2_H) - -static int winsock_inited = 0; - -static void -shutdownHandler(void) -{ - WSACleanup(); -} - -/* Initialising WinSock... */ -int -initWinSock () -{ - WORD wVersionRequested; - WSADATA wsaData; - int err; - - if (!winsock_inited) { - wVersionRequested = MAKEWORD( 2, 2 ); - - err = WSAStartup ( wVersionRequested, &wsaData ); - - if ( err != 0 ) { - return err; - } - - if ( LOBYTE( wsaData.wVersion ) != 2 || - HIBYTE( wsaData.wVersion ) != 2 ) { - WSACleanup(); - return (-1); - } - - atexit(shutdownHandler); - winsock_inited = 1; - } - return 0; -} - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/winSockErr.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/winSockErr.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/cbits/winSockErr.c 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/cbits/winSockErr.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,76 +0,0 @@ -#include "HsNet.h" -#include "HsFFI.h" - -#if defined(HAVE_WINSOCK2_H) -#include - -/* to the end */ - -const char* -getWSErrorDescr(int err) -{ - static char otherErrMsg[256]; - - switch (err) { - case WSAEINTR: return "Interrupted function call (WSAEINTR)"; - case WSAEBADF: return "bad socket descriptor (WSAEBADF)"; - case WSAEACCES: return "Permission denied (WSAEACCESS)"; - case WSAEFAULT: return "Bad address (WSAEFAULT)"; - case WSAEINVAL: return "Invalid argument (WSAEINVAL)"; - case WSAEMFILE: return "Too many open files (WSAEMFILE)"; - case WSAEWOULDBLOCK: return "Resource temporarily unavailable (WSAEWOULDBLOCK)"; - case WSAEINPROGRESS: return "Operation now in progress (WSAEINPROGRESS)"; - case WSAEALREADY: return "Operation already in progress (WSAEALREADY)"; - case WSAENOTSOCK: return "Socket operation on non-socket (WSAENOTSOCK)"; - case WSAEDESTADDRREQ: return "Destination address required (WSAEDESTADDRREQ)"; - case WSAEMSGSIZE: return "Message too long (WSAEMSGSIZE)"; - case WSAEPROTOTYPE: return "Protocol wrong type for socket (WSAEPROTOTYPE)"; - case WSAENOPROTOOPT: return "Bad protocol option (WSAENOPROTOOPT)"; - case WSAEPROTONOSUPPORT: return "Protocol not supported (WSAEPROTONOSUPPORT)"; - case WSAESOCKTNOSUPPORT: return "Socket type not supported (WSAESOCKTNOSUPPORT)"; - case WSAEOPNOTSUPP: return "Operation not supported (WSAEOPNOTSUPP)"; - case WSAEPFNOSUPPORT: return "Protocol family not supported (WSAEPFNOSUPPORT)"; - case WSAEAFNOSUPPORT: return "Address family not supported by protocol family (WSAEAFNOSUPPORT)"; - case WSAEADDRINUSE: return "Address already in use (WSAEADDRINUSE)"; - case WSAEADDRNOTAVAIL: return "Cannot assign requested address (WSAEADDRNOTAVAIL)"; - case WSAENETDOWN: return "Network is down (WSAENETDOWN)"; - case WSAENETUNREACH: return "Network is unreachable (WSAENETUNREACH)"; - case WSAENETRESET: return "Network dropped connection on reset (WSAENETRESET)"; - case WSAECONNABORTED: return "Software caused connection abort (WSAECONNABORTED)"; - case WSAECONNRESET: return "Connection reset by peer (WSAECONNRESET)"; - case WSAENOBUFS: return "No buffer space available (WSAENOBUFS)"; - case WSAEISCONN: return "Socket is already connected (WSAEISCONN)"; - case WSAENOTCONN: return "Socket is not connected (WSAENOTCONN)"; - case WSAESHUTDOWN: return "Cannot send after socket shutdown (WSAESHUTDOWN)"; - case WSAETOOMANYREFS: return "Too many references (WSAETOOMANYREFS)"; - case WSAETIMEDOUT: return "Connection timed out (WSAETIMEDOUT)"; - case WSAECONNREFUSED: return "Connection refused (WSAECONNREFUSED)"; - case WSAELOOP: return "Too many levels of symbolic links (WSAELOOP)"; - case WSAENAMETOOLONG: return "Filename too long (WSAENAMETOOLONG)"; - case WSAEHOSTDOWN: return "Host is down (WSAEHOSTDOWN)"; - case WSAEHOSTUNREACH: return "Host is unreachable (WSAEHOSTUNREACH)"; - case WSAENOTEMPTY: return "Resource not empty (WSAENOTEMPTY)"; - case WSAEPROCLIM: return "Too many processes (WSAEPROCLIM)"; - case WSAEUSERS: return "Too many users (WSAEUSERS)"; - case WSAEDQUOT: return "Disk quota exceeded (WSAEDQUOT)"; - case WSAESTALE: return "Stale NFS file handle (WSAESTALE)"; - case WSAEREMOTE: return "Too many levels of remote in path (WSAEREMOTE)"; - case WSAEDISCON: return "Graceful shutdown in progress (WSAEDISCON)"; - case WSASYSNOTREADY: return "Network subsystem is unavailable (WSASYSNOTREADY)"; - case WSAVERNOTSUPPORTED: return "Winsock.dll version out of range (WSAVERNOTSUPPORTED)"; - case WSANOTINITIALISED: return "Successful WSAStartup not yet performed (WSANOTINITIALISED)"; -#ifdef WSATYPE_NOT_FOUND - case WSATYPE_NOT_FOUND: return "Class type not found (WSATYPE_NOT_FOUND)"; -#endif - case WSAHOST_NOT_FOUND: return "Host not found (WSAHOST_NOT_FOUND)"; - case WSATRY_AGAIN: return "Nonauthoritative host not found (WSATRY_AGAIN)"; - case WSANO_RECOVERY: return "This is a nonrecoverable error (WSANO_RECOVERY)"; - case WSANO_DATA: return "Valid name, no data record of requested type (WSANO_DATA)"; - default: - sprintf(otherErrMsg, "Unknown WinSock error: %u", err); - return otherErrMsg; - } -} - -#endif - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/CHANGELOG.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/CHANGELOG.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/CHANGELOG.md 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/CHANGELOG.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,138 +0,0 @@ -## Version 2.7.0.2 - -* Removing withMVar to avoid the deadlock between "accept" and "close" - [#330](https://github.com/haskell/network/pull/330) -* "close" does not throw exceptions. A new API: "close'" throws - exceptions when necessary. - [#337](https://github.com/haskell/network/pull/337) -* Fixing the hang of lazy sendAll. - [#340](https://github.com/haskell/network/pull/340) -* Installing NetDef.h (#334) - [#334](https://github.com/haskell/network/pull/334) - -## Version 2.7.0.1 - - * A new API: socketPortSafe. - [#319](https://github.com/haskell/network/pull/319) - * Fixing a drain bug of sendAll. - [#320](https://github.com/haskell/network/pull/320) - * Porting the new CALLCONV convention from master. - [#313](https://github.com/haskell/network/pull/313) - * Withdrawing the deprecations of packFamily and unpackFamily. - [#324](https://github.com/haskell/network/pull/324) - -## Version 2.7.0.0 - - * Obsoleting the Network module. - * Obsoleting the Network.BSD module. - * Obsoleting APIs: MkSocket, htonl, ntohl, - getPeerCred, getPeerEid, - send, sendTo, recv, recvFrom, recvLen, - inet_addr, inet_ntoa, - isConnected, isBound, isListening, isReadable, isWritable, - aNY_PORT, iNADDR_ANY, iN6ADDR_ANY, sOMAXCONN, - sOL_SOCKET, sCM_RIGHTS, - packFamily, unpackFamily, packSocketType - * Do not closeFd within sendFd. - [#271](https://github.com/haskell/network/pull/271) - * Exporting ifNameToIndex and ifIndexToName from Network.Socket. - * New APIs: setCloseOnExecIfNeeded, getCloseOnExec and getNonBlock - * New APIs: isUnixDomainSocketAvailable and getPeerCredential - * socketPair, sendFd and recvFd are exported even on Windows. - -## Version 2.6.3.5 - - * Reverting "Do not closeFd within sendFd" - [#271](https://github.com/haskell/network/pull/271) - -## Version 2.6.3.4 - - * Don't touch IPv6Only when running on OpenBSD - [#227](https://github.com/haskell/network/pull/227) - * Do not closeFd within sendFd - [#271](https://github.com/haskell/network/pull/271) - * Updating examples and docs. - -## Version 2.6.3.3 - - * Adds a function to show the defaultHints without reading their undefined fields - [#291](https://github.com/haskell/network/pull/291) - * Improve exception error messages for getAddrInfo and getNameInfo - [#289](https://github.com/haskell/network/pull/289) - * Deprecating SockAddrCan. - -## Version 2.6.3.2 - - * Zero memory of `sockaddr_un` if abstract socket - [#220](https://github.com/haskell/network/pull/220) - - * Improving error messages - [#232](https://github.com/haskell/network/pull/232) - - * Allow non-blocking file descriptors via `setNonBlockIfNeeded` - [#242](https://github.com/haskell/network/pull/242) - - * Update config.{guess,sub} to latest version - [#244](https://github.com/haskell/network/pull/244) - - * Rename `my_inet_ntoa` to avoid symbol conflicts - [#228](https://github.com/haskell/network/pull/228) - - * Test infrastructure improvements - [#219](https://github.com/haskell/network/pull/219) - [#217](https://github.com/haskell/network/pull/217) - [#218](https://github.com/haskell/network/pull/218) - - * House keeping and cleanup - [#238](https://github.com/haskell/network/pull/238) - [#237](https://github.com/haskell/network/pull/237) - -## Version 2.6.3.1 - - * Reverse breaking exception change in `Network.Socket.ByteString.recv` - [#215](https://github.com/haskell/network/issues/215) - -## Version 2.6.3.0 - - * New maintainers: Evan Borden (@eborden) and Kazu Yamamoto (@kazu-yamamoto). - The maintainer for a long period, Johan Tibell (@tibbe) stepped down. - Thank you, Johan, for your hard work for a long time. - - * New APIs: ntohl, htonl,hostAddressToTuple{,6} and tupleToHostAddress{,6}. - [#210](https://github.com/haskell/network/pull/210) - - * Added a Read instance for PortNumber. [#145](https://github.com/haskell/network/pull/145) - - * We only set the IPV6_V6ONLY flag to 0 for stream and datagram socket types, - as opposed to all of them. This makes it possible to use ICMPv6. - [#180](https://github.com/haskell/network/pull/180) - [#181](https://github.com/haskell/network/pull/181) - - * Work around GHC bug #12020. Socket errors no longer cause segfaults or - hangs on Windows. [#192](https://github.com/haskell/network/pull/192) - - * Various documentation improvements and the deprecated pragmas. - [#186](https://github.com/haskell/network/pull/186) - [#201](https://github.com/haskell/network/issues/201) - [#205](https://github.com/haskell/network/pull/205) - [#206](https://github.com/haskell/network/pull/206) - [#211](https://github.com/haskell/network/issues/211) - - * Various internal improvements. - [#193](https://github.com/haskell/network/pull/193) - [#200](https://github.com/haskell/network/pull/200) - -## Version 2.6.2.1 - - * Regenerate configure and HsNetworkConfig.h.in. - - * Better detection of CAN sockets. - -## Version 2.6.2.0 - - * Add support for TCP_USER_TIMEOUT. - - * Don't conditionally export the SockAddr constructors. - - * Add isSupportSockAddr to allow checking for supported address types - at runtime. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/config.guess cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/config.guess --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/config.guess 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/config.guess 1970-01-01 00:00:00.000000000 +0000 @@ -1,1466 +0,0 @@ -#! /bin/sh -# Attempt to guess a canonical system name. -# Copyright 1992-2017 Free Software Foundation, Inc. - -timestamp='2017-03-05' - -# This file is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, see . -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that -# program. This Exception is an additional permission under section 7 -# of the GNU General Public License, version 3 ("GPLv3"). -# -# Originally written by Per Bothner; maintained since 2000 by Ben Elliston. -# -# You can get the latest version of this script from: -# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess -# -# Please send patches to . - - -me=`echo "$0" | sed -e 's,.*/,,'` - -usage="\ -Usage: $0 [OPTION] - -Output the configuration name of the system \`$me' is run on. - -Operation modes: - -h, --help print this help, then exit - -t, --time-stamp print date of last modification, then exit - -v, --version print version number, then exit - -Report bugs and patches to ." - -version="\ -GNU config.guess ($timestamp) - -Originally written by Per Bothner. -Copyright 1992-2017 Free Software Foundation, Inc. - -This is free software; see the source for copying conditions. There is NO -warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - -help=" -Try \`$me --help' for more information." - -# Parse command line -while test $# -gt 0 ; do - case $1 in - --time-stamp | --time* | -t ) - echo "$timestamp" ; exit ;; - --version | -v ) - echo "$version" ; exit ;; - --help | --h* | -h ) - echo "$usage"; exit ;; - -- ) # Stop option processing - shift; break ;; - - ) # Use stdin as input. - break ;; - -* ) - echo "$me: invalid option $1$help" >&2 - exit 1 ;; - * ) - break ;; - esac -done - -if test $# != 0; then - echo "$me: too many arguments$help" >&2 - exit 1 -fi - -trap 'exit 1' 1 2 15 - -# CC_FOR_BUILD -- compiler used by this script. Note that the use of a -# compiler to aid in system detection is discouraged as it requires -# temporary files to be created and, as you can see below, it is a -# headache to deal with in a portable fashion. - -# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still -# use `HOST_CC' if defined, but it is deprecated. - -# Portable tmp directory creation inspired by the Autoconf team. - -set_cc_for_build=' -trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; -trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; -: ${TMPDIR=/tmp} ; - { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || - { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || - { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || - { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; -dummy=$tmp/dummy ; -tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; -case $CC_FOR_BUILD,$HOST_CC,$CC in - ,,) echo "int x;" > $dummy.c ; - for c in cc gcc c89 c99 ; do - if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then - CC_FOR_BUILD="$c"; break ; - fi ; - done ; - if test x"$CC_FOR_BUILD" = x ; then - CC_FOR_BUILD=no_compiler_found ; - fi - ;; - ,,*) CC_FOR_BUILD=$CC ;; - ,*,*) CC_FOR_BUILD=$HOST_CC ;; -esac ; set_cc_for_build= ;' - -# This is needed to find uname on a Pyramid OSx when run in the BSD universe. -# (ghazi@noc.rutgers.edu 1994-08-24) -if (test -f /.attbin/uname) >/dev/null 2>&1 ; then - PATH=$PATH:/.attbin ; export PATH -fi - -UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown -UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown -UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown -UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown - -case "${UNAME_SYSTEM}" in -Linux|GNU|GNU/*) - # If the system lacks a compiler, then just pick glibc. - # We could probably try harder. - LIBC=gnu - - eval $set_cc_for_build - cat <<-EOF > $dummy.c - #include - #if defined(__UCLIBC__) - LIBC=uclibc - #elif defined(__dietlibc__) - LIBC=dietlibc - #else - LIBC=gnu - #endif - EOF - eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC' | sed 's, ,,g'` - ;; -esac - -# Note: order is significant - the case branches are not exclusive. - -case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in - *:NetBSD:*:*) - # NetBSD (nbsd) targets should (where applicable) match one or - # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, - # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently - # switched to ELF, *-*-netbsd* would select the old - # object file format. This provides both forward - # compatibility and a consistent mechanism for selecting the - # object file format. - # - # Note: NetBSD doesn't particularly care about the vendor - # portion of the name. We always set it to "unknown". - sysctl="sysctl -n hw.machine_arch" - UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \ - /sbin/$sysctl 2>/dev/null || \ - /usr/sbin/$sysctl 2>/dev/null || \ - echo unknown)` - case "${UNAME_MACHINE_ARCH}" in - armeb) machine=armeb-unknown ;; - arm*) machine=arm-unknown ;; - sh3el) machine=shl-unknown ;; - sh3eb) machine=sh-unknown ;; - sh5el) machine=sh5le-unknown ;; - earmv*) - arch=`echo ${UNAME_MACHINE_ARCH} | sed -e 's,^e\(armv[0-9]\).*$,\1,'` - endian=`echo ${UNAME_MACHINE_ARCH} | sed -ne 's,^.*\(eb\)$,\1,p'` - machine=${arch}${endian}-unknown - ;; - *) machine=${UNAME_MACHINE_ARCH}-unknown ;; - esac - # The Operating System including object format, if it has switched - # to ELF recently (or will in the future) and ABI. - case "${UNAME_MACHINE_ARCH}" in - earm*) - os=netbsdelf - ;; - arm*|i386|m68k|ns32k|sh3*|sparc|vax) - eval $set_cc_for_build - if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep -q __ELF__ - then - # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). - # Return netbsd for either. FIX? - os=netbsd - else - os=netbsdelf - fi - ;; - *) - os=netbsd - ;; - esac - # Determine ABI tags. - case "${UNAME_MACHINE_ARCH}" in - earm*) - expr='s/^earmv[0-9]/-eabi/;s/eb$//' - abi=`echo ${UNAME_MACHINE_ARCH} | sed -e "$expr"` - ;; - esac - # The OS release - # Debian GNU/NetBSD machines have a different userland, and - # thus, need a distinct triplet. However, they do not need - # kernel version information, so it can be replaced with a - # suitable tag, in the style of linux-gnu. - case "${UNAME_VERSION}" in - Debian*) - release='-gnu' - ;; - *) - release=`echo ${UNAME_RELEASE} | sed -e 's/[-_].*//' | cut -d. -f1,2` - ;; - esac - # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: - # contains redundant information, the shorter form: - # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. - echo "${machine}-${os}${release}${abi}" - exit ;; - *:Bitrig:*:*) - UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` - echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE} - exit ;; - *:OpenBSD:*:*) - UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` - echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} - exit ;; - *:LibertyBSD:*:*) - UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'` - echo ${UNAME_MACHINE_ARCH}-unknown-libertybsd${UNAME_RELEASE} - exit ;; - *:ekkoBSD:*:*) - echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} - exit ;; - *:SolidBSD:*:*) - echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} - exit ;; - macppc:MirBSD:*:*) - echo powerpc-unknown-mirbsd${UNAME_RELEASE} - exit ;; - *:MirBSD:*:*) - echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} - exit ;; - *:Sortix:*:*) - echo ${UNAME_MACHINE}-unknown-sortix - exit ;; - alpha:OSF1:*:*) - case $UNAME_RELEASE in - *4.0) - UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` - ;; - *5.*) - UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` - ;; - esac - # According to Compaq, /usr/sbin/psrinfo has been available on - # OSF/1 and Tru64 systems produced since 1995. I hope that - # covers most systems running today. This code pipes the CPU - # types through head -n 1, so we only detect the type of CPU 0. - ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` - case "$ALPHA_CPU_TYPE" in - "EV4 (21064)") - UNAME_MACHINE=alpha ;; - "EV4.5 (21064)") - UNAME_MACHINE=alpha ;; - "LCA4 (21066/21068)") - UNAME_MACHINE=alpha ;; - "EV5 (21164)") - UNAME_MACHINE=alphaev5 ;; - "EV5.6 (21164A)") - UNAME_MACHINE=alphaev56 ;; - "EV5.6 (21164PC)") - UNAME_MACHINE=alphapca56 ;; - "EV5.7 (21164PC)") - UNAME_MACHINE=alphapca57 ;; - "EV6 (21264)") - UNAME_MACHINE=alphaev6 ;; - "EV6.7 (21264A)") - UNAME_MACHINE=alphaev67 ;; - "EV6.8CB (21264C)") - UNAME_MACHINE=alphaev68 ;; - "EV6.8AL (21264B)") - UNAME_MACHINE=alphaev68 ;; - "EV6.8CX (21264D)") - UNAME_MACHINE=alphaev68 ;; - "EV6.9A (21264/EV69A)") - UNAME_MACHINE=alphaev69 ;; - "EV7 (21364)") - UNAME_MACHINE=alphaev7 ;; - "EV7.9 (21364A)") - UNAME_MACHINE=alphaev79 ;; - esac - # A Pn.n version is a patched version. - # A Vn.n version is a released version. - # A Tn.n version is a released field test version. - # A Xn.n version is an unreleased experimental baselevel. - # 1.2 uses "1.2" for uname -r. - echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` - # Reset EXIT trap before exiting to avoid spurious non-zero exit code. - exitcode=$? - trap '' 0 - exit $exitcode ;; - Alpha\ *:Windows_NT*:*) - # How do we know it's Interix rather than the generic POSIX subsystem? - # Should we change UNAME_MACHINE based on the output of uname instead - # of the specific Alpha model? - echo alpha-pc-interix - exit ;; - 21064:Windows_NT:50:3) - echo alpha-dec-winnt3.5 - exit ;; - Amiga*:UNIX_System_V:4.0:*) - echo m68k-unknown-sysv4 - exit ;; - *:[Aa]miga[Oo][Ss]:*:*) - echo ${UNAME_MACHINE}-unknown-amigaos - exit ;; - *:[Mm]orph[Oo][Ss]:*:*) - echo ${UNAME_MACHINE}-unknown-morphos - exit ;; - *:OS/390:*:*) - echo i370-ibm-openedition - exit ;; - *:z/VM:*:*) - echo s390-ibm-zvmoe - exit ;; - *:OS400:*:*) - echo powerpc-ibm-os400 - exit ;; - arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) - echo arm-acorn-riscix${UNAME_RELEASE} - exit ;; - arm*:riscos:*:*|arm*:RISCOS:*:*) - echo arm-unknown-riscos - exit ;; - SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) - echo hppa1.1-hitachi-hiuxmpp - exit ;; - Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) - # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. - if test "`(/bin/universe) 2>/dev/null`" = att ; then - echo pyramid-pyramid-sysv3 - else - echo pyramid-pyramid-bsd - fi - exit ;; - NILE*:*:*:dcosx) - echo pyramid-pyramid-svr4 - exit ;; - DRS?6000:unix:4.0:6*) - echo sparc-icl-nx6 - exit ;; - DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) - case `/usr/bin/uname -p` in - sparc) echo sparc-icl-nx7; exit ;; - esac ;; - s390x:SunOS:*:*) - echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - sun4H:SunOS:5.*:*) - echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) - echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) - echo i386-pc-auroraux${UNAME_RELEASE} - exit ;; - i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) - eval $set_cc_for_build - SUN_ARCH=i386 - # If there is a compiler, see if it is configured for 64-bit objects. - # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. - # This test works for both compilers. - if [ "$CC_FOR_BUILD" != no_compiler_found ]; then - if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - SUN_ARCH=x86_64 - fi - fi - echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - sun4*:SunOS:6*:*) - # According to config.sub, this is the proper way to canonicalize - # SunOS6. Hard to guess exactly what SunOS6 will be like, but - # it's likely to be more like Solaris than SunOS4. - echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - sun4*:SunOS:*:*) - case "`/usr/bin/arch -k`" in - Series*|S4*) - UNAME_RELEASE=`uname -v` - ;; - esac - # Japanese Language versions have a version number like `4.1.3-JL'. - echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` - exit ;; - sun3*:SunOS:*:*) - echo m68k-sun-sunos${UNAME_RELEASE} - exit ;; - sun*:*:4.2BSD:*) - UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` - test "x${UNAME_RELEASE}" = x && UNAME_RELEASE=3 - case "`/bin/arch`" in - sun3) - echo m68k-sun-sunos${UNAME_RELEASE} - ;; - sun4) - echo sparc-sun-sunos${UNAME_RELEASE} - ;; - esac - exit ;; - aushp:SunOS:*:*) - echo sparc-auspex-sunos${UNAME_RELEASE} - exit ;; - # The situation for MiNT is a little confusing. The machine name - # can be virtually everything (everything which is not - # "atarist" or "atariste" at least should have a processor - # > m68000). The system name ranges from "MiNT" over "FreeMiNT" - # to the lowercase version "mint" (or "freemint"). Finally - # the system name "TOS" denotes a system which is actually not - # MiNT. But MiNT is downward compatible to TOS, so this should - # be no problem. - atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} - exit ;; - atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} - exit ;; - *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} - exit ;; - milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) - echo m68k-milan-mint${UNAME_RELEASE} - exit ;; - hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) - echo m68k-hades-mint${UNAME_RELEASE} - exit ;; - *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) - echo m68k-unknown-mint${UNAME_RELEASE} - exit ;; - m68k:machten:*:*) - echo m68k-apple-machten${UNAME_RELEASE} - exit ;; - powerpc:machten:*:*) - echo powerpc-apple-machten${UNAME_RELEASE} - exit ;; - RISC*:Mach:*:*) - echo mips-dec-mach_bsd4.3 - exit ;; - RISC*:ULTRIX:*:*) - echo mips-dec-ultrix${UNAME_RELEASE} - exit ;; - VAX*:ULTRIX*:*:*) - echo vax-dec-ultrix${UNAME_RELEASE} - exit ;; - 2020:CLIX:*:* | 2430:CLIX:*:*) - echo clipper-intergraph-clix${UNAME_RELEASE} - exit ;; - mips:*:*:UMIPS | mips:*:*:RISCos) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c -#ifdef __cplusplus -#include /* for printf() prototype */ - int main (int argc, char *argv[]) { -#else - int main (argc, argv) int argc; char *argv[]; { -#endif - #if defined (host_mips) && defined (MIPSEB) - #if defined (SYSTYPE_SYSV) - printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); - #endif - #if defined (SYSTYPE_SVR4) - printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); - #endif - #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) - printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); - #endif - #endif - exit (-1); - } -EOF - $CC_FOR_BUILD -o $dummy $dummy.c && - dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && - SYSTEM_NAME=`$dummy $dummyarg` && - { echo "$SYSTEM_NAME"; exit; } - echo mips-mips-riscos${UNAME_RELEASE} - exit ;; - Motorola:PowerMAX_OS:*:*) - echo powerpc-motorola-powermax - exit ;; - Motorola:*:4.3:PL8-*) - echo powerpc-harris-powermax - exit ;; - Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) - echo powerpc-harris-powermax - exit ;; - Night_Hawk:Power_UNIX:*:*) - echo powerpc-harris-powerunix - exit ;; - m88k:CX/UX:7*:*) - echo m88k-harris-cxux7 - exit ;; - m88k:*:4*:R4*) - echo m88k-motorola-sysv4 - exit ;; - m88k:*:3*:R3*) - echo m88k-motorola-sysv3 - exit ;; - AViiON:dgux:*:*) - # DG/UX returns AViiON for all architectures - UNAME_PROCESSOR=`/usr/bin/uname -p` - if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] - then - if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ - [ ${TARGET_BINARY_INTERFACE}x = x ] - then - echo m88k-dg-dgux${UNAME_RELEASE} - else - echo m88k-dg-dguxbcs${UNAME_RELEASE} - fi - else - echo i586-dg-dgux${UNAME_RELEASE} - fi - exit ;; - M88*:DolphinOS:*:*) # DolphinOS (SVR3) - echo m88k-dolphin-sysv3 - exit ;; - M88*:*:R3*:*) - # Delta 88k system running SVR3 - echo m88k-motorola-sysv3 - exit ;; - XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) - echo m88k-tektronix-sysv3 - exit ;; - Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) - echo m68k-tektronix-bsd - exit ;; - *:IRIX*:*:*) - echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` - exit ;; - ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. - echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id - exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' - i*86:AIX:*:*) - echo i386-ibm-aix - exit ;; - ia64:AIX:*:*) - if [ -x /usr/bin/oslevel ] ; then - IBM_REV=`/usr/bin/oslevel` - else - IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} - fi - echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} - exit ;; - *:AIX:2:3) - if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #include - - main() - { - if (!__power_pc()) - exit(1); - puts("powerpc-ibm-aix3.2.5"); - exit(0); - } -EOF - if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` - then - echo "$SYSTEM_NAME" - else - echo rs6000-ibm-aix3.2.5 - fi - elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then - echo rs6000-ibm-aix3.2.4 - else - echo rs6000-ibm-aix3.2 - fi - exit ;; - *:AIX:*:[4567]) - IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` - if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then - IBM_ARCH=rs6000 - else - IBM_ARCH=powerpc - fi - if [ -x /usr/bin/lslpp ] ; then - IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | - awk -F: '{ print $3 }' | sed s/[0-9]*$/0/` - else - IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} - fi - echo ${IBM_ARCH}-ibm-aix${IBM_REV} - exit ;; - *:AIX:*:*) - echo rs6000-ibm-aix - exit ;; - ibmrt:4.4BSD:*|romp-ibm:BSD:*) - echo romp-ibm-bsd4.4 - exit ;; - ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and - echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to - exit ;; # report: romp-ibm BSD 4.3 - *:BOSX:*:*) - echo rs6000-bull-bosx - exit ;; - DPX/2?00:B.O.S.:*:*) - echo m68k-bull-sysv3 - exit ;; - 9000/[34]??:4.3bsd:1.*:*) - echo m68k-hp-bsd - exit ;; - hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) - echo m68k-hp-bsd4.4 - exit ;; - 9000/[34678]??:HP-UX:*:*) - HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` - case "${UNAME_MACHINE}" in - 9000/31? ) HP_ARCH=m68000 ;; - 9000/[34]?? ) HP_ARCH=m68k ;; - 9000/[678][0-9][0-9]) - if [ -x /usr/bin/getconf ]; then - sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` - sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` - case "${sc_cpu_version}" in - 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0 - 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1 - 532) # CPU_PA_RISC2_0 - case "${sc_kernel_bits}" in - 32) HP_ARCH=hppa2.0n ;; - 64) HP_ARCH=hppa2.0w ;; - '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20 - esac ;; - esac - fi - if [ "${HP_ARCH}" = "" ]; then - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - - #define _HPUX_SOURCE - #include - #include - - int main () - { - #if defined(_SC_KERNEL_BITS) - long bits = sysconf(_SC_KERNEL_BITS); - #endif - long cpu = sysconf (_SC_CPU_VERSION); - - switch (cpu) - { - case CPU_PA_RISC1_0: puts ("hppa1.0"); break; - case CPU_PA_RISC1_1: puts ("hppa1.1"); break; - case CPU_PA_RISC2_0: - #if defined(_SC_KERNEL_BITS) - switch (bits) - { - case 64: puts ("hppa2.0w"); break; - case 32: puts ("hppa2.0n"); break; - default: puts ("hppa2.0"); break; - } break; - #else /* !defined(_SC_KERNEL_BITS) */ - puts ("hppa2.0"); break; - #endif - default: puts ("hppa1.0"); break; - } - exit (0); - } -EOF - (CCOPTS="" $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` - test -z "$HP_ARCH" && HP_ARCH=hppa - fi ;; - esac - if [ ${HP_ARCH} = hppa2.0w ] - then - eval $set_cc_for_build - - # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating - # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler - # generating 64-bit code. GNU and HP use different nomenclature: - # - # $ CC_FOR_BUILD=cc ./config.guess - # => hppa2.0w-hp-hpux11.23 - # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess - # => hppa64-hp-hpux11.23 - - if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | - grep -q __LP64__ - then - HP_ARCH=hppa2.0w - else - HP_ARCH=hppa64 - fi - fi - echo ${HP_ARCH}-hp-hpux${HPUX_REV} - exit ;; - ia64:HP-UX:*:*) - HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` - echo ia64-hp-hpux${HPUX_REV} - exit ;; - 3050*:HI-UX:*:*) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #include - int - main () - { - long cpu = sysconf (_SC_CPU_VERSION); - /* The order matters, because CPU_IS_HP_MC68K erroneously returns - true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct - results, however. */ - if (CPU_IS_PA_RISC (cpu)) - { - switch (cpu) - { - case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; - case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; - case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; - default: puts ("hppa-hitachi-hiuxwe2"); break; - } - } - else if (CPU_IS_HP_MC68K (cpu)) - puts ("m68k-hitachi-hiuxwe2"); - else puts ("unknown-hitachi-hiuxwe2"); - exit (0); - } -EOF - $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && - { echo "$SYSTEM_NAME"; exit; } - echo unknown-hitachi-hiuxwe2 - exit ;; - 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) - echo hppa1.1-hp-bsd - exit ;; - 9000/8??:4.3bsd:*:*) - echo hppa1.0-hp-bsd - exit ;; - *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) - echo hppa1.0-hp-mpeix - exit ;; - hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) - echo hppa1.1-hp-osf - exit ;; - hp8??:OSF1:*:*) - echo hppa1.0-hp-osf - exit ;; - i*86:OSF1:*:*) - if [ -x /usr/sbin/sysversion ] ; then - echo ${UNAME_MACHINE}-unknown-osf1mk - else - echo ${UNAME_MACHINE}-unknown-osf1 - fi - exit ;; - parisc*:Lites*:*:*) - echo hppa1.1-hp-lites - exit ;; - C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) - echo c1-convex-bsd - exit ;; - C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) - if getsysinfo -f scalar_acc - then echo c32-convex-bsd - else echo c2-convex-bsd - fi - exit ;; - C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) - echo c34-convex-bsd - exit ;; - C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) - echo c38-convex-bsd - exit ;; - C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) - echo c4-convex-bsd - exit ;; - CRAY*Y-MP:*:*:*) - echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*[A-Z]90:*:*:*) - echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ - | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ - -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ - -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*TS:*:*:*) - echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*T3E:*:*:*) - echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - CRAY*SV1:*:*:*) - echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - *:UNICOS/mp:*:*) - echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' - exit ;; - F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) - FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` - FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` - FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` - echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" - exit ;; - 5000:UNIX_System_V:4.*:*) - FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` - FUJITSU_REL=`echo ${UNAME_RELEASE} | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'` - echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" - exit ;; - i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) - echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} - exit ;; - sparc*:BSD/OS:*:*) - echo sparc-unknown-bsdi${UNAME_RELEASE} - exit ;; - *:BSD/OS:*:*) - echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} - exit ;; - *:FreeBSD:*:*) - UNAME_PROCESSOR=`/usr/bin/uname -p` - case ${UNAME_PROCESSOR} in - amd64) - UNAME_PROCESSOR=x86_64 ;; - i386) - UNAME_PROCESSOR=i586 ;; - esac - echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` - exit ;; - i*:CYGWIN*:*) - echo ${UNAME_MACHINE}-pc-cygwin - exit ;; - *:MINGW64*:*) - echo ${UNAME_MACHINE}-pc-mingw64 - exit ;; - *:MINGW*:*) - echo ${UNAME_MACHINE}-pc-mingw32 - exit ;; - *:MSYS*:*) - echo ${UNAME_MACHINE}-pc-msys - exit ;; - i*:windows32*:*) - # uname -m includes "-pc" on this system. - echo ${UNAME_MACHINE}-mingw32 - exit ;; - i*:PW*:*) - echo ${UNAME_MACHINE}-pc-pw32 - exit ;; - *:Interix*:*) - case ${UNAME_MACHINE} in - x86) - echo i586-pc-interix${UNAME_RELEASE} - exit ;; - authenticamd | genuineintel | EM64T) - echo x86_64-unknown-interix${UNAME_RELEASE} - exit ;; - IA64) - echo ia64-unknown-interix${UNAME_RELEASE} - exit ;; - esac ;; - [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) - echo i${UNAME_MACHINE}-pc-mks - exit ;; - 8664:Windows_NT:*) - echo x86_64-pc-mks - exit ;; - i*:Windows_NT*:* | Pentium*:Windows_NT*:*) - # How do we know it's Interix rather than the generic POSIX subsystem? - # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we - # UNAME_MACHINE based on the output of uname instead of i386? - echo i586-pc-interix - exit ;; - i*:UWIN*:*) - echo ${UNAME_MACHINE}-pc-uwin - exit ;; - amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) - echo x86_64-unknown-cygwin - exit ;; - p*:CYGWIN*:*) - echo powerpcle-unknown-cygwin - exit ;; - prep*:SunOS:5.*:*) - echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` - exit ;; - *:GNU:*:*) - # the GNU system - echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` - exit ;; - *:GNU/*:*:*) - # other systems with GNU libc and userland - echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC} - exit ;; - i*86:Minix:*:*) - echo ${UNAME_MACHINE}-pc-minix - exit ;; - aarch64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - aarch64_be:Linux:*:*) - UNAME_MACHINE=aarch64_be - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - alpha:Linux:*:*) - case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in - EV5) UNAME_MACHINE=alphaev5 ;; - EV56) UNAME_MACHINE=alphaev56 ;; - PCA56) UNAME_MACHINE=alphapca56 ;; - PCA57) UNAME_MACHINE=alphapca56 ;; - EV6) UNAME_MACHINE=alphaev6 ;; - EV67) UNAME_MACHINE=alphaev67 ;; - EV68*) UNAME_MACHINE=alphaev68 ;; - esac - objdump --private-headers /bin/sh | grep -q ld.so.1 - if test "$?" = 0 ; then LIBC=gnulibc1 ; fi - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - arc:Linux:*:* | arceb:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - arm*:Linux:*:*) - eval $set_cc_for_build - if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep -q __ARM_EABI__ - then - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - else - if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep -q __ARM_PCS_VFP - then - echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi - else - echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf - fi - fi - exit ;; - avr32*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - cris:Linux:*:*) - echo ${UNAME_MACHINE}-axis-linux-${LIBC} - exit ;; - crisv32:Linux:*:*) - echo ${UNAME_MACHINE}-axis-linux-${LIBC} - exit ;; - e2k:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - frv:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - hexagon:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - i*86:Linux:*:*) - echo ${UNAME_MACHINE}-pc-linux-${LIBC} - exit ;; - ia64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - k1om:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - m32r*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - m68*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - mips:Linux:*:* | mips64:Linux:*:*) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #undef CPU - #undef ${UNAME_MACHINE} - #undef ${UNAME_MACHINE}el - #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) - CPU=${UNAME_MACHINE}el - #else - #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) - CPU=${UNAME_MACHINE} - #else - CPU= - #endif - #endif -EOF - eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` - test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; } - ;; - mips64el:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - openrisc*:Linux:*:*) - echo or1k-unknown-linux-${LIBC} - exit ;; - or32:Linux:*:* | or1k*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - padre:Linux:*:*) - echo sparc-unknown-linux-${LIBC} - exit ;; - parisc64:Linux:*:* | hppa64:Linux:*:*) - echo hppa64-unknown-linux-${LIBC} - exit ;; - parisc:Linux:*:* | hppa:Linux:*:*) - # Look for CPU level - case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in - PA7*) echo hppa1.1-unknown-linux-${LIBC} ;; - PA8*) echo hppa2.0-unknown-linux-${LIBC} ;; - *) echo hppa-unknown-linux-${LIBC} ;; - esac - exit ;; - ppc64:Linux:*:*) - echo powerpc64-unknown-linux-${LIBC} - exit ;; - ppc:Linux:*:*) - echo powerpc-unknown-linux-${LIBC} - exit ;; - ppc64le:Linux:*:*) - echo powerpc64le-unknown-linux-${LIBC} - exit ;; - ppcle:Linux:*:*) - echo powerpcle-unknown-linux-${LIBC} - exit ;; - riscv32:Linux:*:* | riscv64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - s390:Linux:*:* | s390x:Linux:*:*) - echo ${UNAME_MACHINE}-ibm-linux-${LIBC} - exit ;; - sh64*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - sh*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - sparc:Linux:*:* | sparc64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - tile*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - vax:Linux:*:*) - echo ${UNAME_MACHINE}-dec-linux-${LIBC} - exit ;; - x86_64:Linux:*:*) - echo ${UNAME_MACHINE}-pc-linux-${LIBC} - exit ;; - xtensa*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-${LIBC} - exit ;; - i*86:DYNIX/ptx:4*:*) - # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. - # earlier versions are messed up and put the nodename in both - # sysname and nodename. - echo i386-sequent-sysv4 - exit ;; - i*86:UNIX_SV:4.2MP:2.*) - # Unixware is an offshoot of SVR4, but it has its own version - # number series starting with 2... - # I am not positive that other SVR4 systems won't match this, - # I just have to hope. -- rms. - # Use sysv4.2uw... so that sysv4* matches it. - echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} - exit ;; - i*86:OS/2:*:*) - # If we were able to find `uname', then EMX Unix compatibility - # is probably installed. - echo ${UNAME_MACHINE}-pc-os2-emx - exit ;; - i*86:XTS-300:*:STOP) - echo ${UNAME_MACHINE}-unknown-stop - exit ;; - i*86:atheos:*:*) - echo ${UNAME_MACHINE}-unknown-atheos - exit ;; - i*86:syllable:*:*) - echo ${UNAME_MACHINE}-pc-syllable - exit ;; - i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) - echo i386-unknown-lynxos${UNAME_RELEASE} - exit ;; - i*86:*DOS:*:*) - echo ${UNAME_MACHINE}-pc-msdosdjgpp - exit ;; - i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) - UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` - if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then - echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} - else - echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} - fi - exit ;; - i*86:*:5:[678]*) - # UnixWare 7.x, OpenUNIX and OpenServer 6. - case `/bin/uname -X | grep "^Machine"` in - *486*) UNAME_MACHINE=i486 ;; - *Pentium) UNAME_MACHINE=i586 ;; - *Pent*|*Celeron) UNAME_MACHINE=i686 ;; - esac - echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} - exit ;; - i*86:*:3.2:*) - if test -f /usr/options/cb.name; then - UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then - UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` - (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 - (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ - && UNAME_MACHINE=i586 - (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ - && UNAME_MACHINE=i686 - (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ - && UNAME_MACHINE=i686 - echo ${UNAME_MACHINE}-pc-sco$UNAME_REL - else - echo ${UNAME_MACHINE}-pc-sysv32 - fi - exit ;; - pc:*:*:*) - # Left here for compatibility: - # uname -m prints for DJGPP always 'pc', but it prints nothing about - # the processor, so we play safe by assuming i586. - # Note: whatever this is, it MUST be the same as what config.sub - # prints for the "djgpp" host, or else GDB configure will decide that - # this is a cross-build. - echo i586-pc-msdosdjgpp - exit ;; - Intel:Mach:3*:*) - echo i386-pc-mach3 - exit ;; - paragon:*:*:*) - echo i860-intel-osf1 - exit ;; - i860:*:4.*:*) # i860-SVR4 - if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then - echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 - else # Add other i860-SVR4 vendors below as they are discovered. - echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 - fi - exit ;; - mini*:CTIX:SYS*5:*) - # "miniframe" - echo m68010-convergent-sysv - exit ;; - mc68k:UNIX:SYSTEM5:3.51m) - echo m68k-convergent-sysv - exit ;; - M680?0:D-NIX:5.3:*) - echo m68k-diab-dnix - exit ;; - M68*:*:R3V[5678]*:*) - test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; - 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) - OS_REL='' - test -r /etc/.relid \ - && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4.3${OS_REL}; exit; } - /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ - && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; - 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4; exit; } ;; - NCR*:*:4.2:* | MPRAS*:*:4.2:*) - OS_REL='.3' - test -r /etc/.relid \ - && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4.3${OS_REL}; exit; } - /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ - && { echo i586-ncr-sysv4.3${OS_REL}; exit; } - /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ - && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; - m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) - echo m68k-unknown-lynxos${UNAME_RELEASE} - exit ;; - mc68030:UNIX_System_V:4.*:*) - echo m68k-atari-sysv4 - exit ;; - TSUNAMI:LynxOS:2.*:*) - echo sparc-unknown-lynxos${UNAME_RELEASE} - exit ;; - rs6000:LynxOS:2.*:*) - echo rs6000-unknown-lynxos${UNAME_RELEASE} - exit ;; - PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) - echo powerpc-unknown-lynxos${UNAME_RELEASE} - exit ;; - SM[BE]S:UNIX_SV:*:*) - echo mips-dde-sysv${UNAME_RELEASE} - exit ;; - RM*:ReliantUNIX-*:*:*) - echo mips-sni-sysv4 - exit ;; - RM*:SINIX-*:*:*) - echo mips-sni-sysv4 - exit ;; - *:SINIX-*:*:*) - if uname -p 2>/dev/null >/dev/null ; then - UNAME_MACHINE=`(uname -p) 2>/dev/null` - echo ${UNAME_MACHINE}-sni-sysv4 - else - echo ns32k-sni-sysv - fi - exit ;; - PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort - # says - echo i586-unisys-sysv4 - exit ;; - *:UNIX_System_V:4*:FTX*) - # From Gerald Hewes . - # How about differentiating between stratus architectures? -djm - echo hppa1.1-stratus-sysv4 - exit ;; - *:*:*:FTX*) - # From seanf@swdc.stratus.com. - echo i860-stratus-sysv4 - exit ;; - i*86:VOS:*:*) - # From Paul.Green@stratus.com. - echo ${UNAME_MACHINE}-stratus-vos - exit ;; - *:VOS:*:*) - # From Paul.Green@stratus.com. - echo hppa1.1-stratus-vos - exit ;; - mc68*:A/UX:*:*) - echo m68k-apple-aux${UNAME_RELEASE} - exit ;; - news*:NEWS-OS:6*:*) - echo mips-sony-newsos6 - exit ;; - R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) - if [ -d /usr/nec ]; then - echo mips-nec-sysv${UNAME_RELEASE} - else - echo mips-unknown-sysv${UNAME_RELEASE} - fi - exit ;; - BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. - echo powerpc-be-beos - exit ;; - BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. - echo powerpc-apple-beos - exit ;; - BePC:BeOS:*:*) # BeOS running on Intel PC compatible. - echo i586-pc-beos - exit ;; - BePC:Haiku:*:*) # Haiku running on Intel PC compatible. - echo i586-pc-haiku - exit ;; - x86_64:Haiku:*:*) - echo x86_64-unknown-haiku - exit ;; - SX-4:SUPER-UX:*:*) - echo sx4-nec-superux${UNAME_RELEASE} - exit ;; - SX-5:SUPER-UX:*:*) - echo sx5-nec-superux${UNAME_RELEASE} - exit ;; - SX-6:SUPER-UX:*:*) - echo sx6-nec-superux${UNAME_RELEASE} - exit ;; - SX-7:SUPER-UX:*:*) - echo sx7-nec-superux${UNAME_RELEASE} - exit ;; - SX-8:SUPER-UX:*:*) - echo sx8-nec-superux${UNAME_RELEASE} - exit ;; - SX-8R:SUPER-UX:*:*) - echo sx8r-nec-superux${UNAME_RELEASE} - exit ;; - SX-ACE:SUPER-UX:*:*) - echo sxace-nec-superux${UNAME_RELEASE} - exit ;; - Power*:Rhapsody:*:*) - echo powerpc-apple-rhapsody${UNAME_RELEASE} - exit ;; - *:Rhapsody:*:*) - echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} - exit ;; - *:Darwin:*:*) - UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown - eval $set_cc_for_build - if test "$UNAME_PROCESSOR" = unknown ; then - UNAME_PROCESSOR=powerpc - fi - if test `echo "$UNAME_RELEASE" | sed -e 's/\..*//'` -le 10 ; then - if [ "$CC_FOR_BUILD" != no_compiler_found ]; then - if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ - (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ - grep IS_64BIT_ARCH >/dev/null - then - case $UNAME_PROCESSOR in - i386) UNAME_PROCESSOR=x86_64 ;; - powerpc) UNAME_PROCESSOR=powerpc64 ;; - esac - fi - fi - elif test "$UNAME_PROCESSOR" = i386 ; then - # Avoid executing cc on OS X 10.9, as it ships with a stub - # that puts up a graphical alert prompting to install - # developer tools. Any system running Mac OS X 10.7 or - # later (Darwin 11 and later) is required to have a 64-bit - # processor. This is not true of the ARM version of Darwin - # that Apple uses in portable devices. - UNAME_PROCESSOR=x86_64 - fi - echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} - exit ;; - *:procnto*:*:* | *:QNX:[0123456789]*:*) - UNAME_PROCESSOR=`uname -p` - if test "$UNAME_PROCESSOR" = x86; then - UNAME_PROCESSOR=i386 - UNAME_MACHINE=pc - fi - echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} - exit ;; - *:QNX:*:4*) - echo i386-pc-qnx - exit ;; - NEO-?:NONSTOP_KERNEL:*:*) - echo neo-tandem-nsk${UNAME_RELEASE} - exit ;; - NSE-*:NONSTOP_KERNEL:*:*) - echo nse-tandem-nsk${UNAME_RELEASE} - exit ;; - NSR-?:NONSTOP_KERNEL:*:*) - echo nsr-tandem-nsk${UNAME_RELEASE} - exit ;; - NSX-?:NONSTOP_KERNEL:*:*) - echo nsx-tandem-nsk${UNAME_RELEASE} - exit ;; - *:NonStop-UX:*:*) - echo mips-compaq-nonstopux - exit ;; - BS2000:POSIX*:*:*) - echo bs2000-siemens-sysv - exit ;; - DS/*:UNIX_System_V:*:*) - echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} - exit ;; - *:Plan9:*:*) - # "uname -m" is not consistent, so use $cputype instead. 386 - # is converted to i386 for consistency with other x86 - # operating systems. - if test "$cputype" = 386; then - UNAME_MACHINE=i386 - else - UNAME_MACHINE="$cputype" - fi - echo ${UNAME_MACHINE}-unknown-plan9 - exit ;; - *:TOPS-10:*:*) - echo pdp10-unknown-tops10 - exit ;; - *:TENEX:*:*) - echo pdp10-unknown-tenex - exit ;; - KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) - echo pdp10-dec-tops20 - exit ;; - XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) - echo pdp10-xkl-tops20 - exit ;; - *:TOPS-20:*:*) - echo pdp10-unknown-tops20 - exit ;; - *:ITS:*:*) - echo pdp10-unknown-its - exit ;; - SEI:*:*:SEIUX) - echo mips-sei-seiux${UNAME_RELEASE} - exit ;; - *:DragonFly:*:*) - echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` - exit ;; - *:*VMS:*:*) - UNAME_MACHINE=`(uname -p) 2>/dev/null` - case "${UNAME_MACHINE}" in - A*) echo alpha-dec-vms ; exit ;; - I*) echo ia64-dec-vms ; exit ;; - V*) echo vax-dec-vms ; exit ;; - esac ;; - *:XENIX:*:SysV) - echo i386-pc-xenix - exit ;; - i*86:skyos:*:*) - echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE} | sed -e 's/ .*$//'` - exit ;; - i*86:rdos:*:*) - echo ${UNAME_MACHINE}-pc-rdos - exit ;; - i*86:AROS:*:*) - echo ${UNAME_MACHINE}-pc-aros - exit ;; - x86_64:VMkernel:*:*) - echo ${UNAME_MACHINE}-unknown-esx - exit ;; - amd64:Isilon\ OneFS:*:*) - echo x86_64-unknown-onefs - exit ;; -esac - -cat >&2 </dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null` - -hostinfo = `(hostinfo) 2>/dev/null` -/bin/universe = `(/bin/universe) 2>/dev/null` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` -/bin/arch = `(/bin/arch) 2>/dev/null` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` - -UNAME_MACHINE = ${UNAME_MACHINE} -UNAME_RELEASE = ${UNAME_RELEASE} -UNAME_SYSTEM = ${UNAME_SYSTEM} -UNAME_VERSION = ${UNAME_VERSION} -EOF - -exit 1 - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "timestamp='" -# time-stamp-format: "%:y-%02m-%02d" -# time-stamp-end: "'" -# End: diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/config.sub cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/config.sub --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/config.sub 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/config.sub 1970-01-01 00:00:00.000000000 +0000 @@ -1,1836 +0,0 @@ -#! /bin/sh -# Configuration validation subroutine script. -# Copyright 1992-2017 Free Software Foundation, Inc. - -timestamp='2017-04-02' - -# This file is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, see . -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that -# program. This Exception is an additional permission under section 7 -# of the GNU General Public License, version 3 ("GPLv3"). - - -# Please send patches to . -# -# Configuration subroutine to validate and canonicalize a configuration type. -# Supply the specified configuration type as an argument. -# If it is invalid, we print an error message on stderr and exit with code 1. -# Otherwise, we print the canonical config type on stdout and succeed. - -# You can get the latest version of this script from: -# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub - -# This file is supposed to be the same for all GNU packages -# and recognize all the CPU types, system types and aliases -# that are meaningful with *any* GNU software. -# Each package is responsible for reporting which valid configurations -# it does not support. The user should be able to distinguish -# a failure to support a valid configuration from a meaningless -# configuration. - -# The goal of this file is to map all the various variations of a given -# machine specification into a single specification in the form: -# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM -# or in some cases, the newer four-part form: -# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM -# It is wrong to echo any other type of specification. - -me=`echo "$0" | sed -e 's,.*/,,'` - -usage="\ -Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS - -Canonicalize a configuration name. - -Operation modes: - -h, --help print this help, then exit - -t, --time-stamp print date of last modification, then exit - -v, --version print version number, then exit - -Report bugs and patches to ." - -version="\ -GNU config.sub ($timestamp) - -Copyright 1992-2017 Free Software Foundation, Inc. - -This is free software; see the source for copying conditions. There is NO -warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." - -help=" -Try \`$me --help' for more information." - -# Parse command line -while test $# -gt 0 ; do - case $1 in - --time-stamp | --time* | -t ) - echo "$timestamp" ; exit ;; - --version | -v ) - echo "$version" ; exit ;; - --help | --h* | -h ) - echo "$usage"; exit ;; - -- ) # Stop option processing - shift; break ;; - - ) # Use stdin as input. - break ;; - -* ) - echo "$me: invalid option $1$help" - exit 1 ;; - - *local*) - # First pass through any local machine types. - echo $1 - exit ;; - - * ) - break ;; - esac -done - -case $# in - 0) echo "$me: missing argument$help" >&2 - exit 1;; - 1) ;; - *) echo "$me: too many arguments$help" >&2 - exit 1;; -esac - -# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). -# Here we must recognize all the valid KERNEL-OS combinations. -maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` -case $maybe_os in - nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ - linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ - knetbsd*-gnu* | netbsd*-gnu* | netbsd*-eabi* | \ - kopensolaris*-gnu* | cloudabi*-eabi* | \ - storm-chaos* | os2-emx* | rtmk-nova*) - os=-$maybe_os - basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` - ;; - android-linux) - os=-linux-android - basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown - ;; - *) - basic_machine=`echo $1 | sed 's/-[^-]*$//'` - if [ $basic_machine != $1 ] - then os=`echo $1 | sed 's/.*-/-/'` - else os=; fi - ;; -esac - -### Let's recognize common machines as not being operating systems so -### that things like config.sub decstation-3100 work. We also -### recognize some manufacturers as not being operating systems, so we -### can provide default operating systems below. -case $os in - -sun*os*) - # Prevent following clause from handling this invalid input. - ;; - -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ - -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ - -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ - -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ - -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ - -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ - -apple | -axis | -knuth | -cray | -microblaze*) - os= - basic_machine=$1 - ;; - -bluegene*) - os=-cnk - ;; - -sim | -cisco | -oki | -wec | -winbond) - os= - basic_machine=$1 - ;; - -scout) - ;; - -wrs) - os=-vxworks - basic_machine=$1 - ;; - -chorusos*) - os=-chorusos - basic_machine=$1 - ;; - -chorusrdb) - os=-chorusrdb - basic_machine=$1 - ;; - -hiux*) - os=-hiuxwe2 - ;; - -sco6) - os=-sco5v6 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco5) - os=-sco3.2v5 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco4) - os=-sco3.2v4 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco3.2.[4-9]*) - os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco3.2v[4-9]*) - # Don't forget version if it is 3.2v4 or newer. - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco5v6*) - # Don't forget version if it is 3.2v4 or newer. - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -sco*) - os=-sco3.2v2 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -udk*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -isc) - os=-isc2.2 - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -clix*) - basic_machine=clipper-intergraph - ;; - -isc*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` - ;; - -lynx*178) - os=-lynxos178 - ;; - -lynx*5) - os=-lynxos5 - ;; - -lynx*) - os=-lynxos - ;; - -ptx*) - basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` - ;; - -windowsnt*) - os=`echo $os | sed -e 's/windowsnt/winnt/'` - ;; - -psos*) - os=-psos - ;; - -mint | -mint[0-9]*) - basic_machine=m68k-atari - os=-mint - ;; -esac - -# Decode aliases for certain CPU-COMPANY combinations. -case $basic_machine in - # Recognize the basic CPU types without company name. - # Some are omitted here because they have special meanings below. - 1750a | 580 \ - | a29k \ - | aarch64 | aarch64_be \ - | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ - | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ - | am33_2.0 \ - | arc | arceb \ - | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ - | avr | avr32 \ - | ba \ - | be32 | be64 \ - | bfin \ - | c4x | c8051 | clipper \ - | d10v | d30v | dlx | dsp16xx \ - | e2k | epiphany \ - | fido | fr30 | frv | ft32 \ - | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ - | hexagon \ - | i370 | i860 | i960 | ia16 | ia64 \ - | ip2k | iq2000 \ - | k1om \ - | le32 | le64 \ - | lm32 \ - | m32c | m32r | m32rle | m68000 | m68k | m88k \ - | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ - | mips | mipsbe | mipseb | mipsel | mipsle \ - | mips16 \ - | mips64 | mips64el \ - | mips64octeon | mips64octeonel \ - | mips64orion | mips64orionel \ - | mips64r5900 | mips64r5900el \ - | mips64vr | mips64vrel \ - | mips64vr4100 | mips64vr4100el \ - | mips64vr4300 | mips64vr4300el \ - | mips64vr5000 | mips64vr5000el \ - | mips64vr5900 | mips64vr5900el \ - | mipsisa32 | mipsisa32el \ - | mipsisa32r2 | mipsisa32r2el \ - | mipsisa32r6 | mipsisa32r6el \ - | mipsisa64 | mipsisa64el \ - | mipsisa64r2 | mipsisa64r2el \ - | mipsisa64r6 | mipsisa64r6el \ - | mipsisa64sb1 | mipsisa64sb1el \ - | mipsisa64sr71k | mipsisa64sr71kel \ - | mipsr5900 | mipsr5900el \ - | mipstx39 | mipstx39el \ - | mn10200 | mn10300 \ - | moxie \ - | mt \ - | msp430 \ - | nds32 | nds32le | nds32be \ - | nios | nios2 | nios2eb | nios2el \ - | ns16k | ns32k \ - | open8 | or1k | or1knd | or32 \ - | pdp10 | pdp11 | pj | pjl \ - | powerpc | powerpc64 | powerpc64le | powerpcle \ - | pru \ - | pyramid \ - | riscv32 | riscv64 \ - | rl78 | rx \ - | score \ - | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[234]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ - | sh64 | sh64le \ - | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ - | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ - | spu \ - | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ - | ubicom32 \ - | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ - | visium \ - | wasm32 \ - | we32k \ - | x86 | xc16x | xstormy16 | xtensa \ - | z8k | z80) - basic_machine=$basic_machine-unknown - ;; - c54x) - basic_machine=tic54x-unknown - ;; - c55x) - basic_machine=tic55x-unknown - ;; - c6x) - basic_machine=tic6x-unknown - ;; - leon|leon[3-9]) - basic_machine=sparc-$basic_machine - ;; - m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) - basic_machine=$basic_machine-unknown - os=-none - ;; - m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) - ;; - ms1) - basic_machine=mt-unknown - ;; - - strongarm | thumb | xscale) - basic_machine=arm-unknown - ;; - xgate) - basic_machine=$basic_machine-unknown - os=-none - ;; - xscaleeb) - basic_machine=armeb-unknown - ;; - - xscaleel) - basic_machine=armel-unknown - ;; - - # We use `pc' rather than `unknown' - # because (1) that's what they normally are, and - # (2) the word "unknown" tends to confuse beginning users. - i*86 | x86_64) - basic_machine=$basic_machine-pc - ;; - # Object if more than one company name word. - *-*-*) - echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 - exit 1 - ;; - # Recognize the basic CPU types with company name. - 580-* \ - | a29k-* \ - | aarch64-* | aarch64_be-* \ - | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ - | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ - | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ - | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ - | avr-* | avr32-* \ - | ba-* \ - | be32-* | be64-* \ - | bfin-* | bs2000-* \ - | c[123]* | c30-* | [cjt]90-* | c4x-* \ - | c8051-* | clipper-* | craynv-* | cydra-* \ - | d10v-* | d30v-* | dlx-* \ - | e2k-* | elxsi-* \ - | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ - | h8300-* | h8500-* \ - | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ - | hexagon-* \ - | i*86-* | i860-* | i960-* | ia16-* | ia64-* \ - | ip2k-* | iq2000-* \ - | k1om-* \ - | le32-* | le64-* \ - | lm32-* \ - | m32c-* | m32r-* | m32rle-* \ - | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ - | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \ - | microblaze-* | microblazeel-* \ - | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ - | mips16-* \ - | mips64-* | mips64el-* \ - | mips64octeon-* | mips64octeonel-* \ - | mips64orion-* | mips64orionel-* \ - | mips64r5900-* | mips64r5900el-* \ - | mips64vr-* | mips64vrel-* \ - | mips64vr4100-* | mips64vr4100el-* \ - | mips64vr4300-* | mips64vr4300el-* \ - | mips64vr5000-* | mips64vr5000el-* \ - | mips64vr5900-* | mips64vr5900el-* \ - | mipsisa32-* | mipsisa32el-* \ - | mipsisa32r2-* | mipsisa32r2el-* \ - | mipsisa32r6-* | mipsisa32r6el-* \ - | mipsisa64-* | mipsisa64el-* \ - | mipsisa64r2-* | mipsisa64r2el-* \ - | mipsisa64r6-* | mipsisa64r6el-* \ - | mipsisa64sb1-* | mipsisa64sb1el-* \ - | mipsisa64sr71k-* | mipsisa64sr71kel-* \ - | mipsr5900-* | mipsr5900el-* \ - | mipstx39-* | mipstx39el-* \ - | mmix-* \ - | mt-* \ - | msp430-* \ - | nds32-* | nds32le-* | nds32be-* \ - | nios-* | nios2-* | nios2eb-* | nios2el-* \ - | none-* | np1-* | ns16k-* | ns32k-* \ - | open8-* \ - | or1k*-* \ - | orion-* \ - | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ - | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ - | pru-* \ - | pyramid-* \ - | riscv32-* | riscv64-* \ - | rl78-* | romp-* | rs6000-* | rx-* \ - | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ - | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ - | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ - | sparclite-* \ - | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \ - | tahoe-* \ - | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ - | tile*-* \ - | tron-* \ - | ubicom32-* \ - | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ - | vax-* \ - | visium-* \ - | wasm32-* \ - | we32k-* \ - | x86-* | x86_64-* | xc16x-* | xps100-* \ - | xstormy16-* | xtensa*-* \ - | ymp-* \ - | z8k-* | z80-*) - ;; - # Recognize the basic CPU types without company name, with glob match. - xtensa*) - basic_machine=$basic_machine-unknown - ;; - # Recognize the various machine names and aliases which stand - # for a CPU type and a company and sometimes even an OS. - 386bsd) - basic_machine=i386-unknown - os=-bsd - ;; - 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) - basic_machine=m68000-att - ;; - 3b*) - basic_machine=we32k-att - ;; - a29khif) - basic_machine=a29k-amd - os=-udi - ;; - abacus) - basic_machine=abacus-unknown - ;; - adobe68k) - basic_machine=m68010-adobe - os=-scout - ;; - alliant | fx80) - basic_machine=fx80-alliant - ;; - altos | altos3068) - basic_machine=m68k-altos - ;; - am29k) - basic_machine=a29k-none - os=-bsd - ;; - amd64) - basic_machine=x86_64-pc - ;; - amd64-*) - basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - amdahl) - basic_machine=580-amdahl - os=-sysv - ;; - amiga | amiga-*) - basic_machine=m68k-unknown - ;; - amigaos | amigados) - basic_machine=m68k-unknown - os=-amigaos - ;; - amigaunix | amix) - basic_machine=m68k-unknown - os=-sysv4 - ;; - apollo68) - basic_machine=m68k-apollo - os=-sysv - ;; - apollo68bsd) - basic_machine=m68k-apollo - os=-bsd - ;; - aros) - basic_machine=i386-pc - os=-aros - ;; - asmjs) - basic_machine=asmjs-unknown - ;; - aux) - basic_machine=m68k-apple - os=-aux - ;; - balance) - basic_machine=ns32k-sequent - os=-dynix - ;; - blackfin) - basic_machine=bfin-unknown - os=-linux - ;; - blackfin-*) - basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux - ;; - bluegene*) - basic_machine=powerpc-ibm - os=-cnk - ;; - c54x-*) - basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - c55x-*) - basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - c6x-*) - basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - c90) - basic_machine=c90-cray - os=-unicos - ;; - cegcc) - basic_machine=arm-unknown - os=-cegcc - ;; - convex-c1) - basic_machine=c1-convex - os=-bsd - ;; - convex-c2) - basic_machine=c2-convex - os=-bsd - ;; - convex-c32) - basic_machine=c32-convex - os=-bsd - ;; - convex-c34) - basic_machine=c34-convex - os=-bsd - ;; - convex-c38) - basic_machine=c38-convex - os=-bsd - ;; - cray | j90) - basic_machine=j90-cray - os=-unicos - ;; - craynv) - basic_machine=craynv-cray - os=-unicosmp - ;; - cr16 | cr16-*) - basic_machine=cr16-unknown - os=-elf - ;; - crds | unos) - basic_machine=m68k-crds - ;; - crisv32 | crisv32-* | etraxfs*) - basic_machine=crisv32-axis - ;; - cris | cris-* | etrax*) - basic_machine=cris-axis - ;; - crx) - basic_machine=crx-unknown - os=-elf - ;; - da30 | da30-*) - basic_machine=m68k-da30 - ;; - decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) - basic_machine=mips-dec - ;; - decsystem10* | dec10*) - basic_machine=pdp10-dec - os=-tops10 - ;; - decsystem20* | dec20*) - basic_machine=pdp10-dec - os=-tops20 - ;; - delta | 3300 | motorola-3300 | motorola-delta \ - | 3300-motorola | delta-motorola) - basic_machine=m68k-motorola - ;; - delta88) - basic_machine=m88k-motorola - os=-sysv3 - ;; - dicos) - basic_machine=i686-pc - os=-dicos - ;; - djgpp) - basic_machine=i586-pc - os=-msdosdjgpp - ;; - dpx20 | dpx20-*) - basic_machine=rs6000-bull - os=-bosx - ;; - dpx2* | dpx2*-bull) - basic_machine=m68k-bull - os=-sysv3 - ;; - e500v[12]) - basic_machine=powerpc-unknown - os=$os"spe" - ;; - e500v[12]-*) - basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` - os=$os"spe" - ;; - ebmon29k) - basic_machine=a29k-amd - os=-ebmon - ;; - elxsi) - basic_machine=elxsi-elxsi - os=-bsd - ;; - encore | umax | mmax) - basic_machine=ns32k-encore - ;; - es1800 | OSE68k | ose68k | ose | OSE) - basic_machine=m68k-ericsson - os=-ose - ;; - fx2800) - basic_machine=i860-alliant - ;; - genix) - basic_machine=ns32k-ns - ;; - gmicro) - basic_machine=tron-gmicro - os=-sysv - ;; - go32) - basic_machine=i386-pc - os=-go32 - ;; - h3050r* | hiux*) - basic_machine=hppa1.1-hitachi - os=-hiuxwe2 - ;; - h8300hms) - basic_machine=h8300-hitachi - os=-hms - ;; - h8300xray) - basic_machine=h8300-hitachi - os=-xray - ;; - h8500hms) - basic_machine=h8500-hitachi - os=-hms - ;; - harris) - basic_machine=m88k-harris - os=-sysv3 - ;; - hp300-*) - basic_machine=m68k-hp - ;; - hp300bsd) - basic_machine=m68k-hp - os=-bsd - ;; - hp300hpux) - basic_machine=m68k-hp - os=-hpux - ;; - hp3k9[0-9][0-9] | hp9[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hp9k2[0-9][0-9] | hp9k31[0-9]) - basic_machine=m68000-hp - ;; - hp9k3[2-9][0-9]) - basic_machine=m68k-hp - ;; - hp9k6[0-9][0-9] | hp6[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hp9k7[0-79][0-9] | hp7[0-79][0-9]) - basic_machine=hppa1.1-hp - ;; - hp9k78[0-9] | hp78[0-9]) - # FIXME: really hppa2.0-hp - basic_machine=hppa1.1-hp - ;; - hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) - # FIXME: really hppa2.0-hp - basic_machine=hppa1.1-hp - ;; - hp9k8[0-9][13679] | hp8[0-9][13679]) - basic_machine=hppa1.1-hp - ;; - hp9k8[0-9][0-9] | hp8[0-9][0-9]) - basic_machine=hppa1.0-hp - ;; - hppa-next) - os=-nextstep3 - ;; - hppaosf) - basic_machine=hppa1.1-hp - os=-osf - ;; - hppro) - basic_machine=hppa1.1-hp - os=-proelf - ;; - i370-ibm* | ibm*) - basic_machine=i370-ibm - ;; - i*86v32) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv32 - ;; - i*86v4*) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv4 - ;; - i*86v) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-sysv - ;; - i*86sol2) - basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` - os=-solaris2 - ;; - i386mach) - basic_machine=i386-mach - os=-mach - ;; - i386-vsta | vsta) - basic_machine=i386-unknown - os=-vsta - ;; - iris | iris4d) - basic_machine=mips-sgi - case $os in - -irix*) - ;; - *) - os=-irix4 - ;; - esac - ;; - isi68 | isi) - basic_machine=m68k-isi - os=-sysv - ;; - leon-*|leon[3-9]-*) - basic_machine=sparc-`echo $basic_machine | sed 's/-.*//'` - ;; - m68knommu) - basic_machine=m68k-unknown - os=-linux - ;; - m68knommu-*) - basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux - ;; - m88k-omron*) - basic_machine=m88k-omron - ;; - magnum | m3230) - basic_machine=mips-mips - os=-sysv - ;; - merlin) - basic_machine=ns32k-utek - os=-sysv - ;; - microblaze*) - basic_machine=microblaze-xilinx - ;; - mingw64) - basic_machine=x86_64-pc - os=-mingw64 - ;; - mingw32) - basic_machine=i686-pc - os=-mingw32 - ;; - mingw32ce) - basic_machine=arm-unknown - os=-mingw32ce - ;; - miniframe) - basic_machine=m68000-convergent - ;; - *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) - basic_machine=m68k-atari - os=-mint - ;; - mips3*-*) - basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` - ;; - mips3*) - basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown - ;; - monitor) - basic_machine=m68k-rom68k - os=-coff - ;; - morphos) - basic_machine=powerpc-unknown - os=-morphos - ;; - moxiebox) - basic_machine=moxie-unknown - os=-moxiebox - ;; - msdos) - basic_machine=i386-pc - os=-msdos - ;; - ms1-*) - basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` - ;; - msys) - basic_machine=i686-pc - os=-msys - ;; - mvs) - basic_machine=i370-ibm - os=-mvs - ;; - nacl) - basic_machine=le32-unknown - os=-nacl - ;; - ncr3000) - basic_machine=i486-ncr - os=-sysv4 - ;; - netbsd386) - basic_machine=i386-unknown - os=-netbsd - ;; - netwinder) - basic_machine=armv4l-rebel - os=-linux - ;; - news | news700 | news800 | news900) - basic_machine=m68k-sony - os=-newsos - ;; - news1000) - basic_machine=m68030-sony - os=-newsos - ;; - news-3600 | risc-news) - basic_machine=mips-sony - os=-newsos - ;; - necv70) - basic_machine=v70-nec - os=-sysv - ;; - next | m*-next ) - basic_machine=m68k-next - case $os in - -nextstep* ) - ;; - -ns2*) - os=-nextstep2 - ;; - *) - os=-nextstep3 - ;; - esac - ;; - nh3000) - basic_machine=m68k-harris - os=-cxux - ;; - nh[45]000) - basic_machine=m88k-harris - os=-cxux - ;; - nindy960) - basic_machine=i960-intel - os=-nindy - ;; - mon960) - basic_machine=i960-intel - os=-mon960 - ;; - nonstopux) - basic_machine=mips-compaq - os=-nonstopux - ;; - np1) - basic_machine=np1-gould - ;; - neo-tandem) - basic_machine=neo-tandem - ;; - nse-tandem) - basic_machine=nse-tandem - ;; - nsr-tandem) - basic_machine=nsr-tandem - ;; - nsx-tandem) - basic_machine=nsx-tandem - ;; - op50n-* | op60c-*) - basic_machine=hppa1.1-oki - os=-proelf - ;; - openrisc | openrisc-*) - basic_machine=or32-unknown - ;; - os400) - basic_machine=powerpc-ibm - os=-os400 - ;; - OSE68000 | ose68000) - basic_machine=m68000-ericsson - os=-ose - ;; - os68k) - basic_machine=m68k-none - os=-os68k - ;; - pa-hitachi) - basic_machine=hppa1.1-hitachi - os=-hiuxwe2 - ;; - paragon) - basic_machine=i860-intel - os=-osf - ;; - parisc) - basic_machine=hppa-unknown - os=-linux - ;; - parisc-*) - basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` - os=-linux - ;; - pbd) - basic_machine=sparc-tti - ;; - pbb) - basic_machine=m68k-tti - ;; - pc532 | pc532-*) - basic_machine=ns32k-pc532 - ;; - pc98) - basic_machine=i386-pc - ;; - pc98-*) - basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pentium | p5 | k5 | k6 | nexgen | viac3) - basic_machine=i586-pc - ;; - pentiumpro | p6 | 6x86 | athlon | athlon_*) - basic_machine=i686-pc - ;; - pentiumii | pentium2 | pentiumiii | pentium3) - basic_machine=i686-pc - ;; - pentium4) - basic_machine=i786-pc - ;; - pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) - basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pentiumpro-* | p6-* | 6x86-* | athlon-*) - basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) - basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pentium4-*) - basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - pn) - basic_machine=pn-gould - ;; - power) basic_machine=power-ibm - ;; - ppc | ppcbe) basic_machine=powerpc-unknown - ;; - ppc-* | ppcbe-*) - basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - ppcle | powerpclittle) - basic_machine=powerpcle-unknown - ;; - ppcle-* | powerpclittle-*) - basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - ppc64) basic_machine=powerpc64-unknown - ;; - ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - ppc64le | powerpc64little) - basic_machine=powerpc64le-unknown - ;; - ppc64le-* | powerpc64little-*) - basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - ps2) - basic_machine=i386-ibm - ;; - pw32) - basic_machine=i586-unknown - os=-pw32 - ;; - rdos | rdos64) - basic_machine=x86_64-pc - os=-rdos - ;; - rdos32) - basic_machine=i386-pc - os=-rdos - ;; - rom68k) - basic_machine=m68k-rom68k - os=-coff - ;; - rm[46]00) - basic_machine=mips-siemens - ;; - rtpc | rtpc-*) - basic_machine=romp-ibm - ;; - s390 | s390-*) - basic_machine=s390-ibm - ;; - s390x | s390x-*) - basic_machine=s390x-ibm - ;; - sa29200) - basic_machine=a29k-amd - os=-udi - ;; - sb1) - basic_machine=mipsisa64sb1-unknown - ;; - sb1el) - basic_machine=mipsisa64sb1el-unknown - ;; - sde) - basic_machine=mipsisa32-sde - os=-elf - ;; - sei) - basic_machine=mips-sei - os=-seiux - ;; - sequent) - basic_machine=i386-sequent - ;; - sh) - basic_machine=sh-hitachi - os=-hms - ;; - sh5el) - basic_machine=sh5le-unknown - ;; - sh64) - basic_machine=sh64-unknown - ;; - sparclite-wrs | simso-wrs) - basic_machine=sparclite-wrs - os=-vxworks - ;; - sps7) - basic_machine=m68k-bull - os=-sysv2 - ;; - spur) - basic_machine=spur-unknown - ;; - st2000) - basic_machine=m68k-tandem - ;; - stratus) - basic_machine=i860-stratus - os=-sysv4 - ;; - strongarm-* | thumb-*) - basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` - ;; - sun2) - basic_machine=m68000-sun - ;; - sun2os3) - basic_machine=m68000-sun - os=-sunos3 - ;; - sun2os4) - basic_machine=m68000-sun - os=-sunos4 - ;; - sun3os3) - basic_machine=m68k-sun - os=-sunos3 - ;; - sun3os4) - basic_machine=m68k-sun - os=-sunos4 - ;; - sun4os3) - basic_machine=sparc-sun - os=-sunos3 - ;; - sun4os4) - basic_machine=sparc-sun - os=-sunos4 - ;; - sun4sol2) - basic_machine=sparc-sun - os=-solaris2 - ;; - sun3 | sun3-*) - basic_machine=m68k-sun - ;; - sun4) - basic_machine=sparc-sun - ;; - sun386 | sun386i | roadrunner) - basic_machine=i386-sun - ;; - sv1) - basic_machine=sv1-cray - os=-unicos - ;; - symmetry) - basic_machine=i386-sequent - os=-dynix - ;; - t3e) - basic_machine=alphaev5-cray - os=-unicos - ;; - t90) - basic_machine=t90-cray - os=-unicos - ;; - tile*) - basic_machine=$basic_machine-unknown - os=-linux-gnu - ;; - tx39) - basic_machine=mipstx39-unknown - ;; - tx39el) - basic_machine=mipstx39el-unknown - ;; - toad1) - basic_machine=pdp10-xkl - os=-tops20 - ;; - tower | tower-32) - basic_machine=m68k-ncr - ;; - tpf) - basic_machine=s390x-ibm - os=-tpf - ;; - udi29k) - basic_machine=a29k-amd - os=-udi - ;; - ultra3) - basic_machine=a29k-nyu - os=-sym1 - ;; - v810 | necv810) - basic_machine=v810-nec - os=-none - ;; - vaxv) - basic_machine=vax-dec - os=-sysv - ;; - vms) - basic_machine=vax-dec - os=-vms - ;; - vpp*|vx|vx-*) - basic_machine=f301-fujitsu - ;; - vxworks960) - basic_machine=i960-wrs - os=-vxworks - ;; - vxworks68) - basic_machine=m68k-wrs - os=-vxworks - ;; - vxworks29k) - basic_machine=a29k-wrs - os=-vxworks - ;; - wasm32) - basic_machine=wasm32-unknown - ;; - w65*) - basic_machine=w65-wdc - os=-none - ;; - w89k-*) - basic_machine=hppa1.1-winbond - os=-proelf - ;; - xbox) - basic_machine=i686-pc - os=-mingw32 - ;; - xps | xps100) - basic_machine=xps100-honeywell - ;; - xscale-* | xscalee[bl]-*) - basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` - ;; - ymp) - basic_machine=ymp-cray - os=-unicos - ;; - z8k-*-coff) - basic_machine=z8k-unknown - os=-sim - ;; - z80-*-coff) - basic_machine=z80-unknown - os=-sim - ;; - none) - basic_machine=none-none - os=-none - ;; - -# Here we handle the default manufacturer of certain CPU types. It is in -# some cases the only manufacturer, in others, it is the most popular. - w89k) - basic_machine=hppa1.1-winbond - ;; - op50n) - basic_machine=hppa1.1-oki - ;; - op60c) - basic_machine=hppa1.1-oki - ;; - romp) - basic_machine=romp-ibm - ;; - mmix) - basic_machine=mmix-knuth - ;; - rs6000) - basic_machine=rs6000-ibm - ;; - vax) - basic_machine=vax-dec - ;; - pdp10) - # there are many clones, so DEC is not a safe bet - basic_machine=pdp10-unknown - ;; - pdp11) - basic_machine=pdp11-dec - ;; - we32k) - basic_machine=we32k-att - ;; - sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) - basic_machine=sh-unknown - ;; - sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) - basic_machine=sparc-sun - ;; - cydra) - basic_machine=cydra-cydrome - ;; - orion) - basic_machine=orion-highlevel - ;; - orion105) - basic_machine=clipper-highlevel - ;; - mac | mpw | mac-mpw) - basic_machine=m68k-apple - ;; - pmac | pmac-mpw) - basic_machine=powerpc-apple - ;; - *-unknown) - # Make sure to match an already-canonicalized machine name. - ;; - *) - echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 - exit 1 - ;; -esac - -# Here we canonicalize certain aliases for manufacturers. -case $basic_machine in - *-digital*) - basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` - ;; - *-commodore*) - basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` - ;; - *) - ;; -esac - -# Decode manufacturer-specific aliases for certain operating systems. - -if [ x"$os" != x"" ] -then -case $os in - # First match some system type aliases - # that might get confused with valid system types. - # -solaris* is a basic system type, with this one exception. - -auroraux) - os=-auroraux - ;; - -solaris1 | -solaris1.*) - os=`echo $os | sed -e 's|solaris1|sunos4|'` - ;; - -solaris) - os=-solaris2 - ;; - -svr4*) - os=-sysv4 - ;; - -unixware*) - os=-sysv4.2uw - ;; - -gnu/linux*) - os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` - ;; - # First accept the basic system types. - # The portable systems comes first. - # Each alternative MUST END IN A *, to match a version number. - # -sysv* is not here because it comes later, after sysvr4. - -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ - | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ - | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ - | -sym* | -kopensolaris* | -plan9* \ - | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ - | -aos* | -aros* | -cloudabi* | -sortix* \ - | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ - | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ - | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ - | -bitrig* | -openbsd* | -solidbsd* | -libertybsd* \ - | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ - | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ - | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ - | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ - | -chorusos* | -chorusrdb* | -cegcc* | -glidix* \ - | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ - | -midipix* | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ - | -linux-newlib* | -linux-musl* | -linux-uclibc* \ - | -uxpv* | -beos* | -mpeix* | -udk* | -moxiebox* \ - | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ - | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ - | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ - | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ - | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ - | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ - | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es* \ - | -onefs* | -tirtos* | -phoenix* | -fuchsia* | -redox*) - # Remember, each alternative MUST END IN *, to match a version number. - ;; - -qnx*) - case $basic_machine in - x86-* | i*86-*) - ;; - *) - os=-nto$os - ;; - esac - ;; - -nto-qnx*) - ;; - -nto*) - os=`echo $os | sed -e 's|nto|nto-qnx|'` - ;; - -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ - | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ - | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) - ;; - -mac*) - os=`echo $os | sed -e 's|mac|macos|'` - ;; - -linux-dietlibc) - os=-linux-dietlibc - ;; - -linux*) - os=`echo $os | sed -e 's|linux|linux-gnu|'` - ;; - -sunos5*) - os=`echo $os | sed -e 's|sunos5|solaris2|'` - ;; - -sunos6*) - os=`echo $os | sed -e 's|sunos6|solaris3|'` - ;; - -opened*) - os=-openedition - ;; - -os400*) - os=-os400 - ;; - -wince*) - os=-wince - ;; - -osfrose*) - os=-osfrose - ;; - -osf*) - os=-osf - ;; - -utek*) - os=-bsd - ;; - -dynix*) - os=-bsd - ;; - -acis*) - os=-aos - ;; - -atheos*) - os=-atheos - ;; - -syllable*) - os=-syllable - ;; - -386bsd) - os=-bsd - ;; - -ctix* | -uts*) - os=-sysv - ;; - -nova*) - os=-rtmk-nova - ;; - -ns2 ) - os=-nextstep2 - ;; - -nsk*) - os=-nsk - ;; - # Preserve the version number of sinix5. - -sinix5.*) - os=`echo $os | sed -e 's|sinix|sysv|'` - ;; - -sinix*) - os=-sysv4 - ;; - -tpf*) - os=-tpf - ;; - -triton*) - os=-sysv3 - ;; - -oss*) - os=-sysv3 - ;; - -svr4) - os=-sysv4 - ;; - -svr3) - os=-sysv3 - ;; - -sysvr4) - os=-sysv4 - ;; - # This must come after -sysvr4. - -sysv*) - ;; - -ose*) - os=-ose - ;; - -es1800*) - os=-ose - ;; - -xenix) - os=-xenix - ;; - -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) - os=-mint - ;; - -aros*) - os=-aros - ;; - -zvmoe) - os=-zvmoe - ;; - -dicos*) - os=-dicos - ;; - -nacl*) - ;; - -ios) - ;; - -none) - ;; - *) - # Get rid of the `-' at the beginning of $os. - os=`echo $os | sed 's/[^-]*-//'` - echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 - exit 1 - ;; -esac -else - -# Here we handle the default operating systems that come with various machines. -# The value should be what the vendor currently ships out the door with their -# machine or put another way, the most popular os provided with the machine. - -# Note that if you're going to try to match "-MANUFACTURER" here (say, -# "-sun"), then you have to tell the case statement up towards the top -# that MANUFACTURER isn't an operating system. Otherwise, code above -# will signal an error saying that MANUFACTURER isn't an operating -# system, and we'll never get to this point. - -case $basic_machine in - score-*) - os=-elf - ;; - spu-*) - os=-elf - ;; - *-acorn) - os=-riscix1.2 - ;; - arm*-rebel) - os=-linux - ;; - arm*-semi) - os=-aout - ;; - c4x-* | tic4x-*) - os=-coff - ;; - c8051-*) - os=-elf - ;; - hexagon-*) - os=-elf - ;; - tic54x-*) - os=-coff - ;; - tic55x-*) - os=-coff - ;; - tic6x-*) - os=-coff - ;; - # This must come before the *-dec entry. - pdp10-*) - os=-tops20 - ;; - pdp11-*) - os=-none - ;; - *-dec | vax-*) - os=-ultrix4.2 - ;; - m68*-apollo) - os=-domain - ;; - i386-sun) - os=-sunos4.0.2 - ;; - m68000-sun) - os=-sunos3 - ;; - m68*-cisco) - os=-aout - ;; - mep-*) - os=-elf - ;; - mips*-cisco) - os=-elf - ;; - mips*-*) - os=-elf - ;; - or32-*) - os=-coff - ;; - *-tti) # must be before sparc entry or we get the wrong os. - os=-sysv3 - ;; - sparc-* | *-sun) - os=-sunos4.1.1 - ;; - pru-*) - os=-elf - ;; - *-be) - os=-beos - ;; - *-haiku) - os=-haiku - ;; - *-ibm) - os=-aix - ;; - *-knuth) - os=-mmixware - ;; - *-wec) - os=-proelf - ;; - *-winbond) - os=-proelf - ;; - *-oki) - os=-proelf - ;; - *-hp) - os=-hpux - ;; - *-hitachi) - os=-hiux - ;; - i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) - os=-sysv - ;; - *-cbm) - os=-amigaos - ;; - *-dg) - os=-dgux - ;; - *-dolphin) - os=-sysv3 - ;; - m68k-ccur) - os=-rtu - ;; - m88k-omron*) - os=-luna - ;; - *-next ) - os=-nextstep - ;; - *-sequent) - os=-ptx - ;; - *-crds) - os=-unos - ;; - *-ns) - os=-genix - ;; - i370-*) - os=-mvs - ;; - *-next) - os=-nextstep3 - ;; - *-gould) - os=-sysv - ;; - *-highlevel) - os=-bsd - ;; - *-encore) - os=-bsd - ;; - *-sgi) - os=-irix - ;; - *-siemens) - os=-sysv4 - ;; - *-masscomp) - os=-rtu - ;; - f30[01]-fujitsu | f700-fujitsu) - os=-uxpv - ;; - *-rom68k) - os=-coff - ;; - *-*bug) - os=-coff - ;; - *-apple) - os=-macos - ;; - *-atari*) - os=-mint - ;; - *) - os=-none - ;; -esac -fi - -# Here we handle the case where we know the os, and the CPU type, but not the -# manufacturer. We pick the logical manufacturer. -vendor=unknown -case $basic_machine in - *-unknown) - case $os in - -riscix*) - vendor=acorn - ;; - -sunos*) - vendor=sun - ;; - -cnk*|-aix*) - vendor=ibm - ;; - -beos*) - vendor=be - ;; - -hpux*) - vendor=hp - ;; - -mpeix*) - vendor=hp - ;; - -hiux*) - vendor=hitachi - ;; - -unos*) - vendor=crds - ;; - -dgux*) - vendor=dg - ;; - -luna*) - vendor=omron - ;; - -genix*) - vendor=ns - ;; - -mvs* | -opened*) - vendor=ibm - ;; - -os400*) - vendor=ibm - ;; - -ptx*) - vendor=sequent - ;; - -tpf*) - vendor=ibm - ;; - -vxsim* | -vxworks* | -windiss*) - vendor=wrs - ;; - -aux*) - vendor=apple - ;; - -hms*) - vendor=hitachi - ;; - -mpw* | -macos*) - vendor=apple - ;; - -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) - vendor=atari - ;; - -vos*) - vendor=stratus - ;; - esac - basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` - ;; -esac - -echo $basic_machine$os -exit - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "timestamp='" -# time-stamp-format: "%:y-%02m-%02d" -# time-stamp-end: "'" -# End: diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/configure cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/configure --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/configure 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/configure 1970-01-01 00:00:00.000000000 +0000 @@ -1,5389 +0,0 @@ -#! /bin/sh -# Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for Haskell network package 2.6.3.5. -# -# Report bugs to . -# -# -# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. -# -# -# This configure script is free software; the Free Software Foundation -# gives unlimited permission to copy, distribute and modify it. -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -# Use a proper internal environment variable to ensure we don't fall - # into an infinite loop, continuously re-executing ourselves. - if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then - _as_can_reexec=no; export _as_can_reexec; - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -as_fn_exit 255 - fi - # We don't want this to propagate to other subprocesses. - { _as_can_reexec=; unset _as_can_reexec;} -if test "x$CONFIG_SHELL" = x; then - as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else - case \`(set -o) 2>/dev/null\` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi -" - as_required="as_fn_return () { (exit \$1); } -as_fn_success () { as_fn_return 0; } -as_fn_failure () { as_fn_return 1; } -as_fn_ret_success () { return 0; } -as_fn_ret_failure () { return 1; } - -exitcode=0 -as_fn_success || { exitcode=1; echo as_fn_success failed.; } -as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } -as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } -as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } -if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : - -else - exitcode=1; echo positional parameters were not saved. -fi -test x\$exitcode = x0 || exit 1 -test -x / || exit 1" - as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO - as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO - eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && - test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 -test \$(( 1 + 1 )) = 2 || exit 1" - if (eval "$as_required") 2>/dev/null; then : - as_have_required=yes -else - as_have_required=no -fi - if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : - -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -as_found=false -for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - as_found=: - case $as_dir in #( - /*) - for as_base in sh bash ksh sh5; do - # Try only shells that exist, to save several forks. - as_shell=$as_dir/$as_base - if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : - CONFIG_SHELL=$as_shell as_have_required=yes - if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : - break 2 -fi -fi - done;; - esac - as_found=false -done -$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : - CONFIG_SHELL=$SHELL as_have_required=yes -fi; } -IFS=$as_save_IFS - - - if test "x$CONFIG_SHELL" != x; then : - export CONFIG_SHELL - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -exit 255 -fi - - if test x$as_have_required = xno; then : - $as_echo "$0: This script requires a shell more modern than all" - $as_echo "$0: the shells that I found on your system." - if test x${ZSH_VERSION+set} = xset ; then - $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" - $as_echo "$0: be upgraded to zsh 4.3.4 or later." - else - $as_echo "$0: Please tell bug-autoconf@gnu.org and -$0: libraries@haskell.org about your system, including any -$0: error possibly output before this message. Then install -$0: a modern shell, or manually run the script under such a -$0: shell if you do have one." - fi - exit 1 -fi -fi -fi -SHELL=${CONFIG_SHELL-/bin/sh} -export SHELL -# Unset more variables known to interfere with behavior of common tools. -CLICOLOR_FORCE= GREP_OPTIONS= -unset CLICOLOR_FORCE GREP_OPTIONS - -## --------------------- ## -## M4sh Shell Functions. ## -## --------------------- ## -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - - - as_lineno_1=$LINENO as_lineno_1a=$LINENO - as_lineno_2=$LINENO as_lineno_2a=$LINENO - eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && - test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { - # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - - # If we had to re-execute with $CONFIG_SHELL, we're ensured to have - # already done that, so ensure we don't try to do so again and fall - # in an infinite loop. This has already happened in practice. - _as_can_reexec=no; export _as_can_reexec - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -test -n "$DJDIR" || exec 7<&0 &1 - -# Name of the host. -# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, -# so uname gets run too. -ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` - -# -# Initializations. -# -ac_default_prefix=/usr/local -ac_clean_files= -ac_config_libobj_dir=. -LIBOBJS= -cross_compiling=no -subdirs= -MFLAGS= -MAKEFLAGS= - -# Identity of this package. -PACKAGE_NAME='Haskell network package' -PACKAGE_TARNAME='network' -PACKAGE_VERSION='2.6.3.5' -PACKAGE_STRING='Haskell network package 2.6.3.5' -PACKAGE_BUGREPORT='libraries@haskell.org' -PACKAGE_URL='' - -ac_unique_file="include/HsNet.h" -# Factoring default headers for most tests. -ac_includes_default="\ -#include -#ifdef HAVE_SYS_TYPES_H -# include -#endif -#ifdef HAVE_SYS_STAT_H -# include -#endif -#ifdef STDC_HEADERS -# include -# include -#else -# ifdef HAVE_STDLIB_H -# include -# endif -#endif -#ifdef HAVE_STRING_H -# if !defined STDC_HEADERS && defined HAVE_MEMORY_H -# include -# endif -# include -#endif -#ifdef HAVE_STRINGS_H -# include -#endif -#ifdef HAVE_INTTYPES_H -# include -#endif -#ifdef HAVE_STDINT_H -# include -#endif -#ifdef HAVE_UNISTD_H -# include -#endif" - -ac_subst_vars='LTLIBOBJS -LIBOBJS -EXTRA_SRCS -EXTRA_LIBS -EXTRA_CPPFLAGS -CALLCONV -EGREP -GREP -CPP -OBJEXT -EXEEXT -ac_ct_CC -CPPFLAGS -LDFLAGS -CFLAGS -CC -host_os -host_vendor -host_cpu -host -build_os -build_vendor -build_cpu -build -target_alias -host_alias -build_alias -LIBS -ECHO_T -ECHO_N -ECHO_C -DEFS -mandir -localedir -libdir -psdir -pdfdir -dvidir -htmldir -infodir -docdir -oldincludedir -includedir -runstatedir -localstatedir -sharedstatedir -sysconfdir -datadir -datarootdir -libexecdir -sbindir -bindir -program_transform_name -prefix -exec_prefix -PACKAGE_URL -PACKAGE_BUGREPORT -PACKAGE_STRING -PACKAGE_VERSION -PACKAGE_TARNAME -PACKAGE_NAME -PATH_SEPARATOR -SHELL' -ac_subst_files='' -ac_user_opts=' -enable_option_checking -with_cc -' - ac_precious_vars='build_alias -host_alias -target_alias -CC -CFLAGS -LDFLAGS -LIBS -CPPFLAGS -CPP' - - -# Initialize some variables set by options. -ac_init_help= -ac_init_version=false -ac_unrecognized_opts= -ac_unrecognized_sep= -# The variables have the same names as the options, with -# dashes changed to underlines. -cache_file=/dev/null -exec_prefix=NONE -no_create= -no_recursion= -prefix=NONE -program_prefix=NONE -program_suffix=NONE -program_transform_name=s,x,x, -silent= -site= -srcdir= -verbose= -x_includes=NONE -x_libraries=NONE - -# Installation directory options. -# These are left unexpanded so users can "make install exec_prefix=/foo" -# and all the variables that are supposed to be based on exec_prefix -# by default will actually change. -# Use braces instead of parens because sh, perl, etc. also accept them. -# (The list follows the same order as the GNU Coding Standards.) -bindir='${exec_prefix}/bin' -sbindir='${exec_prefix}/sbin' -libexecdir='${exec_prefix}/libexec' -datarootdir='${prefix}/share' -datadir='${datarootdir}' -sysconfdir='${prefix}/etc' -sharedstatedir='${prefix}/com' -localstatedir='${prefix}/var' -runstatedir='${localstatedir}/run' -includedir='${prefix}/include' -oldincludedir='/usr/include' -docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' -infodir='${datarootdir}/info' -htmldir='${docdir}' -dvidir='${docdir}' -pdfdir='${docdir}' -psdir='${docdir}' -libdir='${exec_prefix}/lib' -localedir='${datarootdir}/locale' -mandir='${datarootdir}/man' - -ac_prev= -ac_dashdash= -for ac_option -do - # If the previous option needs an argument, assign it. - if test -n "$ac_prev"; then - eval $ac_prev=\$ac_option - ac_prev= - continue - fi - - case $ac_option in - *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; - *=) ac_optarg= ;; - *) ac_optarg=yes ;; - esac - - # Accept the important Cygnus configure options, so we can diagnose typos. - - case $ac_dashdash$ac_option in - --) - ac_dashdash=yes ;; - - -bindir | --bindir | --bindi | --bind | --bin | --bi) - ac_prev=bindir ;; - -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) - bindir=$ac_optarg ;; - - -build | --build | --buil | --bui | --bu) - ac_prev=build_alias ;; - -build=* | --build=* | --buil=* | --bui=* | --bu=*) - build_alias=$ac_optarg ;; - - -cache-file | --cache-file | --cache-fil | --cache-fi \ - | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) - ac_prev=cache_file ;; - -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ - | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) - cache_file=$ac_optarg ;; - - --config-cache | -C) - cache_file=config.cache ;; - - -datadir | --datadir | --datadi | --datad) - ac_prev=datadir ;; - -datadir=* | --datadir=* | --datadi=* | --datad=*) - datadir=$ac_optarg ;; - - -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ - | --dataroo | --dataro | --datar) - ac_prev=datarootdir ;; - -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ - | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) - datarootdir=$ac_optarg ;; - - -disable-* | --disable-*) - ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=no ;; - - -docdir | --docdir | --docdi | --doc | --do) - ac_prev=docdir ;; - -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) - docdir=$ac_optarg ;; - - -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) - ac_prev=dvidir ;; - -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) - dvidir=$ac_optarg ;; - - -enable-* | --enable-*) - ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=\$ac_optarg ;; - - -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ - | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ - | --exec | --exe | --ex) - ac_prev=exec_prefix ;; - -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ - | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ - | --exec=* | --exe=* | --ex=*) - exec_prefix=$ac_optarg ;; - - -gas | --gas | --ga | --g) - # Obsolete; use --with-gas. - with_gas=yes ;; - - -help | --help | --hel | --he | -h) - ac_init_help=long ;; - -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) - ac_init_help=recursive ;; - -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) - ac_init_help=short ;; - - -host | --host | --hos | --ho) - ac_prev=host_alias ;; - -host=* | --host=* | --hos=* | --ho=*) - host_alias=$ac_optarg ;; - - -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) - ac_prev=htmldir ;; - -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ - | --ht=*) - htmldir=$ac_optarg ;; - - -includedir | --includedir | --includedi | --included | --include \ - | --includ | --inclu | --incl | --inc) - ac_prev=includedir ;; - -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ - | --includ=* | --inclu=* | --incl=* | --inc=*) - includedir=$ac_optarg ;; - - -infodir | --infodir | --infodi | --infod | --info | --inf) - ac_prev=infodir ;; - -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) - infodir=$ac_optarg ;; - - -libdir | --libdir | --libdi | --libd) - ac_prev=libdir ;; - -libdir=* | --libdir=* | --libdi=* | --libd=*) - libdir=$ac_optarg ;; - - -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ - | --libexe | --libex | --libe) - ac_prev=libexecdir ;; - -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ - | --libexe=* | --libex=* | --libe=*) - libexecdir=$ac_optarg ;; - - -localedir | --localedir | --localedi | --localed | --locale) - ac_prev=localedir ;; - -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) - localedir=$ac_optarg ;; - - -localstatedir | --localstatedir | --localstatedi | --localstated \ - | --localstate | --localstat | --localsta | --localst | --locals) - ac_prev=localstatedir ;; - -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ - | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) - localstatedir=$ac_optarg ;; - - -mandir | --mandir | --mandi | --mand | --man | --ma | --m) - ac_prev=mandir ;; - -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) - mandir=$ac_optarg ;; - - -nfp | --nfp | --nf) - # Obsolete; use --without-fp. - with_fp=no ;; - - -no-create | --no-create | --no-creat | --no-crea | --no-cre \ - | --no-cr | --no-c | -n) - no_create=yes ;; - - -no-recursion | --no-recursion | --no-recursio | --no-recursi \ - | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) - no_recursion=yes ;; - - -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ - | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ - | --oldin | --oldi | --old | --ol | --o) - ac_prev=oldincludedir ;; - -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ - | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ - | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) - oldincludedir=$ac_optarg ;; - - -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) - ac_prev=prefix ;; - -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) - prefix=$ac_optarg ;; - - -program-prefix | --program-prefix | --program-prefi | --program-pref \ - | --program-pre | --program-pr | --program-p) - ac_prev=program_prefix ;; - -program-prefix=* | --program-prefix=* | --program-prefi=* \ - | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) - program_prefix=$ac_optarg ;; - - -program-suffix | --program-suffix | --program-suffi | --program-suff \ - | --program-suf | --program-su | --program-s) - ac_prev=program_suffix ;; - -program-suffix=* | --program-suffix=* | --program-suffi=* \ - | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) - program_suffix=$ac_optarg ;; - - -program-transform-name | --program-transform-name \ - | --program-transform-nam | --program-transform-na \ - | --program-transform-n | --program-transform- \ - | --program-transform | --program-transfor \ - | --program-transfo | --program-transf \ - | --program-trans | --program-tran \ - | --progr-tra | --program-tr | --program-t) - ac_prev=program_transform_name ;; - -program-transform-name=* | --program-transform-name=* \ - | --program-transform-nam=* | --program-transform-na=* \ - | --program-transform-n=* | --program-transform-=* \ - | --program-transform=* | --program-transfor=* \ - | --program-transfo=* | --program-transf=* \ - | --program-trans=* | --program-tran=* \ - | --progr-tra=* | --program-tr=* | --program-t=*) - program_transform_name=$ac_optarg ;; - - -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) - ac_prev=pdfdir ;; - -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) - pdfdir=$ac_optarg ;; - - -psdir | --psdir | --psdi | --psd | --ps) - ac_prev=psdir ;; - -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) - psdir=$ac_optarg ;; - - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - silent=yes ;; - - -runstatedir | --runstatedir | --runstatedi | --runstated \ - | --runstate | --runstat | --runsta | --runst | --runs \ - | --run | --ru | --r) - ac_prev=runstatedir ;; - -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ - | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ - | --run=* | --ru=* | --r=*) - runstatedir=$ac_optarg ;; - - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) - ac_prev=sbindir ;; - -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ - | --sbi=* | --sb=*) - sbindir=$ac_optarg ;; - - -sharedstatedir | --sharedstatedir | --sharedstatedi \ - | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ - | --sharedst | --shareds | --shared | --share | --shar \ - | --sha | --sh) - ac_prev=sharedstatedir ;; - -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ - | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ - | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ - | --sha=* | --sh=*) - sharedstatedir=$ac_optarg ;; - - -site | --site | --sit) - ac_prev=site ;; - -site=* | --site=* | --sit=*) - site=$ac_optarg ;; - - -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) - ac_prev=srcdir ;; - -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) - srcdir=$ac_optarg ;; - - -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ - | --syscon | --sysco | --sysc | --sys | --sy) - ac_prev=sysconfdir ;; - -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ - | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) - sysconfdir=$ac_optarg ;; - - -target | --target | --targe | --targ | --tar | --ta | --t) - ac_prev=target_alias ;; - -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) - target_alias=$ac_optarg ;; - - -v | -verbose | --verbose | --verbos | --verbo | --verb) - verbose=yes ;; - - -version | --version | --versio | --versi | --vers | -V) - ac_init_version=: ;; - - -with-* | --with-*) - ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=\$ac_optarg ;; - - -without-* | --without-*) - ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=no ;; - - --x) - # Obsolete; use --with-x. - with_x=yes ;; - - -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ - | --x-incl | --x-inc | --x-in | --x-i) - ac_prev=x_includes ;; - -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ - | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) - x_includes=$ac_optarg ;; - - -x-libraries | --x-libraries | --x-librarie | --x-librari \ - | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) - ac_prev=x_libraries ;; - -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ - | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) - x_libraries=$ac_optarg ;; - - -*) as_fn_error $? "unrecognized option: \`$ac_option' -Try \`$0 --help' for more information" - ;; - - *=*) - ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` - # Reject names that are not valid shell variable names. - case $ac_envvar in #( - '' | [0-9]* | *[!_$as_cr_alnum]* ) - as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; - esac - eval $ac_envvar=\$ac_optarg - export $ac_envvar ;; - - *) - # FIXME: should be removed in autoconf 3.0. - $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 - expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && - $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 - : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" - ;; - - esac -done - -if test -n "$ac_prev"; then - ac_option=--`echo $ac_prev | sed 's/_/-/g'` - as_fn_error $? "missing argument to $ac_option" -fi - -if test -n "$ac_unrecognized_opts"; then - case $enable_option_checking in - no) ;; - fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; - *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; - esac -fi - -# Check all directory arguments for consistency. -for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ - datadir sysconfdir sharedstatedir localstatedir includedir \ - oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir runstatedir -do - eval ac_val=\$$ac_var - # Remove trailing slashes. - case $ac_val in - */ ) - ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` - eval $ac_var=\$ac_val;; - esac - # Be sure to have absolute directory names. - case $ac_val in - [\\/$]* | ?:[\\/]* ) continue;; - NONE | '' ) case $ac_var in *prefix ) continue;; esac;; - esac - as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" -done - -# There might be people who depend on the old broken behavior: `$host' -# used to hold the argument of --host etc. -# FIXME: To remove some day. -build=$build_alias -host=$host_alias -target=$target_alias - -# FIXME: To remove some day. -if test "x$host_alias" != x; then - if test "x$build_alias" = x; then - cross_compiling=maybe - elif test "x$build_alias" != "x$host_alias"; then - cross_compiling=yes - fi -fi - -ac_tool_prefix= -test -n "$host_alias" && ac_tool_prefix=$host_alias- - -test "$silent" = yes && exec 6>/dev/null - - -ac_pwd=`pwd` && test -n "$ac_pwd" && -ac_ls_di=`ls -di .` && -ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || - as_fn_error $? "working directory cannot be determined" -test "X$ac_ls_di" = "X$ac_pwd_ls_di" || - as_fn_error $? "pwd does not report name of working directory" - - -# Find the source files, if location was not specified. -if test -z "$srcdir"; then - ac_srcdir_defaulted=yes - # Try the directory containing this script, then the parent directory. - ac_confdir=`$as_dirname -- "$as_myself" || -$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_myself" : 'X\(//\)[^/]' \| \ - X"$as_myself" : 'X\(//\)$' \| \ - X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_myself" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - srcdir=$ac_confdir - if test ! -r "$srcdir/$ac_unique_file"; then - srcdir=.. - fi -else - ac_srcdir_defaulted=no -fi -if test ! -r "$srcdir/$ac_unique_file"; then - test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." - as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" -fi -ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" -ac_abs_confdir=`( - cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" - pwd)` -# When building in place, set srcdir=. -if test "$ac_abs_confdir" = "$ac_pwd"; then - srcdir=. -fi -# Remove unnecessary trailing slashes from srcdir. -# Double slashes in file names in object file debugging info -# mess up M-x gdb in Emacs. -case $srcdir in -*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; -esac -for ac_var in $ac_precious_vars; do - eval ac_env_${ac_var}_set=\${${ac_var}+set} - eval ac_env_${ac_var}_value=\$${ac_var} - eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} - eval ac_cv_env_${ac_var}_value=\$${ac_var} -done - -# -# Report the --help message. -# -if test "$ac_init_help" = "long"; then - # Omit some internal or obsolete options to make the list less imposing. - # This message is too long to be a string in the A/UX 3.1 sh. - cat <<_ACEOF -\`configure' configures Haskell network package 2.6.3.5 to adapt to many kinds of systems. - -Usage: $0 [OPTION]... [VAR=VALUE]... - -To assign environment variables (e.g., CC, CFLAGS...), specify them as -VAR=VALUE. See below for descriptions of some of the useful variables. - -Defaults for the options are specified in brackets. - -Configuration: - -h, --help display this help and exit - --help=short display options specific to this package - --help=recursive display the short help of all the included packages - -V, --version display version information and exit - -q, --quiet, --silent do not print \`checking ...' messages - --cache-file=FILE cache test results in FILE [disabled] - -C, --config-cache alias for \`--cache-file=config.cache' - -n, --no-create do not create output files - --srcdir=DIR find the sources in DIR [configure dir or \`..'] - -Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX - [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX - [PREFIX] - -By default, \`make install' will install all the files in -\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -an installation prefix other than \`$ac_default_prefix' using \`--prefix', -for instance \`--prefix=\$HOME'. - -For better control, use the options below. - -Fine tuning of the installation directories: - --bindir=DIR user executables [EPREFIX/bin] - --sbindir=DIR system admin executables [EPREFIX/sbin] - --libexecdir=DIR program executables [EPREFIX/libexec] - --sysconfdir=DIR read-only single-machine data [PREFIX/etc] - --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] - --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] - --libdir=DIR object code libraries [EPREFIX/lib] - --includedir=DIR C header files [PREFIX/include] - --oldincludedir=DIR C header files for non-gcc [/usr/include] - --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] - --datadir=DIR read-only architecture-independent data [DATAROOTDIR] - --infodir=DIR info documentation [DATAROOTDIR/info] - --localedir=DIR locale-dependent data [DATAROOTDIR/locale] - --mandir=DIR man documentation [DATAROOTDIR/man] - --docdir=DIR documentation root [DATAROOTDIR/doc/network] - --htmldir=DIR html documentation [DOCDIR] - --dvidir=DIR dvi documentation [DOCDIR] - --pdfdir=DIR pdf documentation [DOCDIR] - --psdir=DIR ps documentation [DOCDIR] -_ACEOF - - cat <<\_ACEOF - -System types: - --build=BUILD configure for building on BUILD [guessed] - --host=HOST cross-compile to build programs to run on HOST [BUILD] -_ACEOF -fi - -if test -n "$ac_init_help"; then - case $ac_init_help in - short | recursive ) echo "Configuration of Haskell network package 2.6.3.5:";; - esac - cat <<\_ACEOF - -Optional Packages: - --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) -C compiler - -Some influential environment variables: - CC C compiler command - CFLAGS C compiler flags - LDFLAGS linker flags, e.g. -L if you have libraries in a - nonstandard directory - LIBS libraries to pass to the linker, e.g. -l - CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if - you have headers in a nonstandard directory - CPP C preprocessor - -Use these variables to override the choices made by `configure' or to help -it to find libraries and programs with nonstandard names/locations. - -Report bugs to . -_ACEOF -ac_status=$? -fi - -if test "$ac_init_help" = "recursive"; then - # If there are subdirs, report their specific --help. - for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue - test -d "$ac_dir" || - { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || - continue - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - cd "$ac_dir" || { ac_status=$?; continue; } - # Check for guested configure. - if test -f "$ac_srcdir/configure.gnu"; then - echo && - $SHELL "$ac_srcdir/configure.gnu" --help=recursive - elif test -f "$ac_srcdir/configure"; then - echo && - $SHELL "$ac_srcdir/configure" --help=recursive - else - $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi || ac_status=$? - cd "$ac_pwd" || { ac_status=$?; break; } - done -fi - -test -n "$ac_init_help" && exit $ac_status -if $ac_init_version; then - cat <<\_ACEOF -Haskell network package configure 2.6.3.5 -generated by GNU Autoconf 2.69 - -Copyright (C) 2012 Free Software Foundation, Inc. -This configure script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it. -_ACEOF - exit -fi - -## ------------------------ ## -## Autoconf initialization. ## -## ------------------------ ## - -# ac_fn_c_try_compile LINENO -# -------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_compile - -# ac_fn_c_try_cpp LINENO -# ---------------------- -# Try to preprocess conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_cpp () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if { { ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } > conftest.i && { - test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || - test ! -s conftest.err - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_cpp - -# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES -# ------------------------------------------------------- -# Tests whether HEADER exists, giving a warning if it cannot be compiled using -# the include files in INCLUDES and setting the cache variable VAR -# accordingly. -ac_fn_c_check_header_mongrel () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if eval \${$3+:} false; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -else - # Is the header compilable? -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 -$as_echo_n "checking $2 usability... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -#include <$2> -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_header_compiler=yes -else - ac_header_compiler=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 -$as_echo "$ac_header_compiler" >&6; } - -# Is the header present? -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 -$as_echo_n "checking $2 presence... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include <$2> -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - ac_header_preproc=yes -else - ac_header_preproc=no -fi -rm -f conftest.err conftest.i conftest.$ac_ext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 -$as_echo "$ac_header_preproc" >&6; } - -# So? What about this header? -case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( - yes:no: ) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 -$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 -$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} - ;; - no:yes:* ) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 -$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 -$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 -$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 -$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 -$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} -( $as_echo "## ------------------------------------ ## -## Report this to libraries@haskell.org ## -## ------------------------------------ ##" - ) | sed "s/^/$as_me: WARNING: /" >&2 - ;; -esac - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - eval "$3=\$ac_header_compiler" -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_header_mongrel - -# ac_fn_c_try_run LINENO -# ---------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes -# that executables *can* be run. -ac_fn_c_try_run () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then : - ac_retval=0 -else - $as_echo "$as_me: program exited with status $ac_status" >&5 - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=$ac_status -fi - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_run - -# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES -# ------------------------------------------------------- -# Tests whether HEADER exists and can be compiled using the include files in -# INCLUDES, setting the cache variable VAR accordingly. -ac_fn_c_check_header_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -#include <$2> -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_header_compile - -# ac_fn_c_try_link LINENO -# ----------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_link () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext conftest$ac_exeext - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && { - test "$cross_compiling" = yes || - test -x conftest$ac_exeext - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information - # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would - # interfere with the next link command; also delete a directory that is - # left behind by Apple's compiler. We do this before executing the actions. - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_link - -# ac_fn_c_check_func LINENO FUNC VAR -# ---------------------------------- -# Tests whether FUNC exists, setting the cache variable VAR accordingly -ac_fn_c_check_func () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -/* Define $2 to an innocuous variant, in case declares $2. - For example, HP-UX 11i declares gettimeofday. */ -#define $2 innocuous_$2 - -/* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $2 (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ - -#ifdef __STDC__ -# include -#else -# include -#endif - -#undef $2 - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char $2 (); -/* The GNU C library defines this for functions which it implements - to always fail with ENOSYS. Some functions are actually named - something starting with __ and the normal name is an alias. */ -#if defined __stub_$2 || defined __stub___$2 -choke me -#endif - -int -main () -{ -return $2 (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_func - -# ac_fn_c_check_member LINENO AGGR MEMBER VAR INCLUDES -# ---------------------------------------------------- -# Tries to find if the field MEMBER exists in type AGGR, after including -# INCLUDES, setting cache variable VAR accordingly. -ac_fn_c_check_member () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2.$3" >&5 -$as_echo_n "checking for $2.$3... " >&6; } -if eval \${$4+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$5 -int -main () -{ -static $2 ac_aggr; -if (ac_aggr.$3) -return 0; - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - eval "$4=yes" -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$5 -int -main () -{ -static $2 ac_aggr; -if (sizeof ac_aggr.$3) -return 0; - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - eval "$4=yes" -else - eval "$4=no" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$4 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_member - -# ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES -# --------------------------------------------- -# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR -# accordingly. -ac_fn_c_check_decl () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - as_decl_name=`echo $2|sed 's/ *(.*//'` - as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 -$as_echo_n "checking whether $as_decl_name is declared... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -#ifndef $as_decl_name -#ifdef __cplusplus - (void) $as_decl_use; -#else - (void) $as_decl_name; -#endif -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_decl -cat >config.log <<_ACEOF -This file contains any messages produced by compilers while -running configure, to aid debugging if configure makes a mistake. - -It was created by Haskell network package $as_me 2.6.3.5, which was -generated by GNU Autoconf 2.69. Invocation command line was - - $ $0 $@ - -_ACEOF -exec 5>>config.log -{ -cat <<_ASUNAME -## --------- ## -## Platform. ## -## --------- ## - -hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` - -/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` -/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` -/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` -/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` - -_ASUNAME - -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - $as_echo "PATH: $as_dir" - done -IFS=$as_save_IFS - -} >&5 - -cat >&5 <<_ACEOF - - -## ----------- ## -## Core tests. ## -## ----------- ## - -_ACEOF - - -# Keep a trace of the command line. -# Strip out --no-create and --no-recursion so they do not pile up. -# Strip out --silent because we don't want to record it for future runs. -# Also quote any args containing shell meta-characters. -# Make two passes to allow for proper duplicate-argument suppression. -ac_configure_args= -ac_configure_args0= -ac_configure_args1= -ac_must_keep_next=false -for ac_pass in 1 2 -do - for ac_arg - do - case $ac_arg in - -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - continue ;; - *\'*) - ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - case $ac_pass in - 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; - 2) - as_fn_append ac_configure_args1 " '$ac_arg'" - if test $ac_must_keep_next = true; then - ac_must_keep_next=false # Got value, back to normal. - else - case $ac_arg in - *=* | --config-cache | -C | -disable-* | --disable-* \ - | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ - | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ - | -with-* | --with-* | -without-* | --without-* | --x) - case "$ac_configure_args0 " in - "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; - esac - ;; - -* ) ac_must_keep_next=true ;; - esac - fi - as_fn_append ac_configure_args " '$ac_arg'" - ;; - esac - done -done -{ ac_configure_args0=; unset ac_configure_args0;} -{ ac_configure_args1=; unset ac_configure_args1;} - -# When interrupted or exit'd, cleanup temporary files, and complete -# config.log. We remove comments because anyway the quotes in there -# would cause problems or look ugly. -# WARNING: Use '\'' to represent an apostrophe within the trap. -# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. -trap 'exit_status=$? - # Save into config.log some information that might help in debugging. - { - echo - - $as_echo "## ---------------- ## -## Cache variables. ## -## ---------------- ##" - echo - # The following way of writing the cache mishandles newlines in values, -( - for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - (set) 2>&1 | - case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - sed -n \ - "s/'\''/'\''\\\\'\'''\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" - ;; #( - *) - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) - echo - - $as_echo "## ----------------- ## -## Output variables. ## -## ----------------- ##" - echo - for ac_var in $ac_subst_vars - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - - if test -n "$ac_subst_files"; then - $as_echo "## ------------------- ## -## File substitutions. ## -## ------------------- ##" - echo - for ac_var in $ac_subst_files - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - fi - - if test -s confdefs.h; then - $as_echo "## ----------- ## -## confdefs.h. ## -## ----------- ##" - echo - cat confdefs.h - echo - fi - test "$ac_signal" != 0 && - $as_echo "$as_me: caught signal $ac_signal" - $as_echo "$as_me: exit $exit_status" - } >&5 - rm -f core *.core core.conftest.* && - rm -f -r conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status -' 0 -for ac_signal in 1 2 13 15; do - trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal -done -ac_signal=0 - -# confdefs.h avoids OS command line length limits that DEFS can exceed. -rm -f -r conftest* confdefs.h - -$as_echo "/* confdefs.h */" > confdefs.h - -# Predefined preprocessor variables. - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_NAME "$PACKAGE_NAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_TARNAME "$PACKAGE_TARNAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_VERSION "$PACKAGE_VERSION" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_STRING "$PACKAGE_STRING" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_URL "$PACKAGE_URL" -_ACEOF - - -# Let the site file select an alternate cache file if it wants to. -# Prefer an explicitly selected file to automatically selected ones. -ac_site_file1=NONE -ac_site_file2=NONE -if test -n "$CONFIG_SITE"; then - # We do not want a PATH search for config.site. - case $CONFIG_SITE in #(( - -*) ac_site_file1=./$CONFIG_SITE;; - */*) ac_site_file1=$CONFIG_SITE;; - *) ac_site_file1=./$CONFIG_SITE;; - esac -elif test "x$prefix" != xNONE; then - ac_site_file1=$prefix/share/config.site - ac_site_file2=$prefix/etc/config.site -else - ac_site_file1=$ac_default_prefix/share/config.site - ac_site_file2=$ac_default_prefix/etc/config.site -fi -for ac_site_file in "$ac_site_file1" "$ac_site_file2" -do - test "x$ac_site_file" = xNONE && continue - if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 -$as_echo "$as_me: loading site script $ac_site_file" >&6;} - sed 's/^/| /' "$ac_site_file" >&5 - . "$ac_site_file" \ - || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "failed to load site script $ac_site_file -See \`config.log' for more details" "$LINENO" 5; } - fi -done - -if test -r "$cache_file"; then - # Some versions of bash will fail to source /dev/null (special files - # actually), so we avoid doing that. DJGPP emulates it as a regular file. - if test /dev/null != "$cache_file" && test -f "$cache_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 -$as_echo "$as_me: loading cache $cache_file" >&6;} - case $cache_file in - [\\/]* | ?:[\\/]* ) . "$cache_file";; - *) . "./$cache_file";; - esac - fi -else - { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 -$as_echo "$as_me: creating cache $cache_file" >&6;} - >$cache_file -fi - -# Check that the precious variables saved in the cache have kept the same -# value. -ac_cache_corrupted=false -for ac_var in $ac_precious_vars; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val=\$ac_cv_env_${ac_var}_value - eval ac_new_val=\$ac_env_${ac_var}_value - case $ac_old_set,$ac_new_set in - set,) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,set) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then - # differences in whitespace do not lead to failure. - ac_old_val_w=`echo x $ac_old_val` - ac_new_val_w=`echo x $ac_new_val` - if test "$ac_old_val_w" != "$ac_new_val_w"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 -$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} - ac_cache_corrupted=: - else - { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 -$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} - eval $ac_var=\$ac_old_val - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 -$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 -$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} - fi;; - esac - # Pass precious variables to config.status. - if test "$ac_new_set" = set; then - case $ac_new_val in - *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; - *) ac_arg=$ac_var=$ac_new_val ;; - esac - case " $ac_configure_args " in - *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. - *) as_fn_append ac_configure_args " '$ac_arg'" ;; - esac - fi -done -if $ac_cache_corrupted; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 -$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} - as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 -fi -## -------------------- ## -## Main body of script. ## -## -------------------- ## - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - -ac_includes_default="$ac_includes_default -#ifdef HAVE_SYS_SOCKET_H -# include -#endif -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_NETDB_H -# include -#endif -#ifdef HAVE_WINSOCK2_H -# include -#endif -#ifdef HAVE_WS2TCPIP_H -# include -// fix for MingW not defining IPV6_V6ONLY -# define IPV6_V6ONLY 27 -#endif" - -# Safety check: Ensure that we are in the correct source directory. - - -ac_config_headers="$ac_config_headers include/HsNetworkConfig.h" - - -ac_aux_dir= -for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do - if test -f "$ac_dir/install-sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install-sh -c" - break - elif test -f "$ac_dir/install.sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install.sh -c" - break - elif test -f "$ac_dir/shtool"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/shtool install -c" - break - fi -done -if test -z "$ac_aux_dir"; then - as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 -fi - -# These three variables are undocumented and unsupported, -# and are intended to be withdrawn in a future Autoconf release. -# They can cause serious problems if a builder's source tree is in a directory -# whose full name contains unusual characters. -ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. -ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. -ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. - - -# Make sure we can run config.sub. -$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || - as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 -$as_echo_n "checking build system type... " >&6; } -if ${ac_cv_build+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_build_alias=$build_alias -test "x$ac_build_alias" = x && - ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` -test "x$ac_build_alias" = x && - as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 -ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || - as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 -$as_echo "$ac_cv_build" >&6; } -case $ac_cv_build in -*-*-*) ;; -*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; -esac -build=$ac_cv_build -ac_save_IFS=$IFS; IFS='-' -set x $ac_cv_build -shift -build_cpu=$1 -build_vendor=$2 -shift; shift -# Remember, the first character of IFS is used to create $*, -# except with old shells: -build_os=$* -IFS=$ac_save_IFS -case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 -$as_echo_n "checking host system type... " >&6; } -if ${ac_cv_host+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test "x$host_alias" = x; then - ac_cv_host=$ac_cv_build -else - ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || - as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 -$as_echo "$ac_cv_host" >&6; } -case $ac_cv_host in -*-*-*) ;; -*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; -esac -host=$ac_cv_host -ac_save_IFS=$IFS; IFS='-' -set x $ac_cv_host -shift -host_cpu=$1 -host_vendor=$2 -shift; shift -# Remember, the first character of IFS is used to create $*, -# except with old shells: -host_os=$* -IFS=$ac_save_IFS -case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac - - - - -# Check whether --with-cc was given. -if test "${with_cc+set}" = set; then : - withval=$with_cc; CC=$withval -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. -set dummy ${ac_tool_prefix}gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="${ac_tool_prefix}gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_CC"; then - ac_ct_CC=$CC - # Extract the first word of "gcc", so it can be a program name with args. -set dummy gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_CC="gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -else - CC="$ac_cv_prog_CC" -fi - -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. -set dummy ${ac_tool_prefix}cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="${ac_tool_prefix}cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - fi -fi -if test -z "$CC"; then - # Extract the first word of "cc", so it can be a program name with args. -set dummy cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else - ac_prog_rejected=no -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then - ac_prog_rejected=yes - continue - fi - ac_cv_prog_CC="cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -if test $ac_prog_rejected = yes; then - # We found a bogon in the path, so make sure we never use it. - set dummy $ac_cv_prog_CC - shift - if test $# != 0; then - # We chose a different compiler from the bogus one. - # However, it has the same basename, so the bogon will be chosen - # first if we set CC to just the basename; use the full file name. - shift - ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" - fi -fi -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - for ac_prog in cl.exe - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$CC" && break - done -fi -if test -z "$CC"; then - ac_ct_CC=$CC - for ac_prog in cl.exe -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_CC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_CC" && break -done - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -fi - -fi - - -test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "no acceptable C compiler found in \$PATH -See \`config.log' for more details" "$LINENO" 5; } - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done - -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" -# Try to create an executable without -o first, disregard a.out. -# It will help us diagnose broken compilers, and finding out an intuition -# of exeext. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 -$as_echo_n "checking whether the C compiler works... " >&6; } -ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` - -# The possible output files: -ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" - -ac_rmfiles= -for ac_file in $ac_files -do - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - * ) ac_rmfiles="$ac_rmfiles $ac_file";; - esac -done -rm -f $ac_rmfiles - -if { { ac_try="$ac_link_default" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link_default") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. -# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' -# in a Makefile. We should not override ac_cv_exeext if it was cached, -# so that the user can short-circuit this test for compilers unknown to -# Autoconf. -for ac_file in $ac_files '' -do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) - ;; - [ab].out ) - # We found the default executable, but exeext='' is most - # certainly right. - break;; - *.* ) - if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; - then :; else - ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - fi - # We set ac_cv_exeext here because the later test for it is not - # safe: cross compilers may not add the suffix if given an `-o' - # argument, so we may need to know it at that point already. - # Even if this section looks crufty: it has the advantage of - # actually working. - break;; - * ) - break;; - esac -done -test "$ac_cv_exeext" = no && ac_cv_exeext= - -else - ac_file='' -fi -if test -z "$ac_file"; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -$as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error 77 "C compiler cannot create executables -See \`config.log' for more details" "$LINENO" 5; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 -$as_echo_n "checking for C compiler default output file name... " >&6; } -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 -$as_echo "$ac_file" >&6; } -ac_exeext=$ac_cv_exeext - -rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 -$as_echo_n "checking for suffix of executables... " >&6; } -if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # If both `conftest.exe' and `conftest' are `present' (well, observable) -# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will -# work properly (i.e., refer to `conftest.exe'), while it won't with -# `rm'. -for ac_file in conftest.exe conftest conftest.*; do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - break;; - * ) break;; - esac -done -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of executables: cannot compile and link -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest conftest$ac_cv_exeext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 -$as_echo "$ac_cv_exeext" >&6; } - -rm -f conftest.$ac_ext -EXEEXT=$ac_cv_exeext -ac_exeext=$EXEEXT -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -int -main () -{ -FILE *f = fopen ("conftest.out", "w"); - return ferror (f) || fclose (f) != 0; - - ; - return 0; -} -_ACEOF -ac_clean_files="$ac_clean_files conftest.out" -# Check that the compiler produces executables we can run. If not, either -# the compiler is broken, or we cross compile. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 -$as_echo_n "checking whether we are cross compiling... " >&6; } -if test "$cross_compiling" != yes; then - { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if { ac_try='./conftest$ac_cv_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - cross_compiling=no - else - if test "$cross_compiling" = maybe; then - cross_compiling=yes - else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot run C compiled programs. -If you meant to cross compile, use \`--host'. -See \`config.log' for more details" "$LINENO" 5; } - fi - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 -$as_echo "$cross_compiling" >&6; } - -rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 -$as_echo_n "checking for suffix of object files... " >&6; } -if ${ac_cv_objext+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.o conftest.obj -if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - for ac_file in conftest.o conftest.obj conftest.*; do - test -f "$ac_file" || continue; - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; - *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` - break;; - esac -done -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of object files: cannot compile -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest.$ac_cv_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 -$as_echo "$ac_cv_objext" >&6; } -OBJEXT=$ac_cv_objext -ac_objext=$OBJEXT -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 -$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } -if ${ac_cv_c_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -#ifndef __GNUC__ - choke me -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_c_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 -$as_echo "$ac_cv_c_compiler_gnu" >&6; } -if test $ac_compiler_gnu = yes; then - GCC=yes -else - GCC= -fi -ac_test_CFLAGS=${CFLAGS+set} -ac_save_CFLAGS=$CFLAGS -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 -$as_echo_n "checking whether $CC accepts -g... " >&6; } -if ${ac_cv_prog_cc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_save_c_werror_flag=$ac_c_werror_flag - ac_c_werror_flag=yes - ac_cv_prog_cc_g=no - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -else - CFLAGS="" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -else - ac_c_werror_flag=$ac_save_c_werror_flag - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - ac_c_werror_flag=$ac_save_c_werror_flag -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 -$as_echo "$ac_cv_prog_cc_g" >&6; } -if test "$ac_test_CFLAGS" = set; then - CFLAGS=$ac_save_CFLAGS -elif test $ac_cv_prog_cc_g = yes; then - if test "$GCC" = yes; then - CFLAGS="-g -O2" - else - CFLAGS="-g" - fi -else - if test "$GCC" = yes; then - CFLAGS="-O2" - else - CFLAGS= - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 -$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } -if ${ac_cv_prog_cc_c89+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_cv_prog_cc_c89=no -ac_save_CC=$CC -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -struct stat; -/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ -struct buf { int x; }; -FILE * (*rcsopen) (struct buf *, struct stat *, int); -static char *e (p, i) - char **p; - int i; -{ - return p[i]; -} -static char *f (char * (*g) (char **, int), char **p, ...) -{ - char *s; - va_list v; - va_start (v,p); - s = g (p, va_arg (v,int)); - va_end (v); - return s; -} - -/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has - function prototypes and stuff, but not '\xHH' hex character constants. - These don't provoke an error unfortunately, instead are silently treated - as 'x'. The following induces an error, until -std is added to get - proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an - array size at least. It's necessary to write '\x00'==0 to get something - that's true only with -std. */ -int osf4_cc_array ['\x00' == 0 ? 1 : -1]; - -/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters - inside strings and character constants. */ -#define FOO(x) 'x' -int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; - -int test (int i, double x); -struct s1 {int (*f) (int a);}; -struct s2 {int (*f) (double a);}; -int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); -int argc; -char **argv; -int -main () -{ -return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; - ; - return 0; -} -_ACEOF -for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ - -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" -do - CC="$ac_save_CC $ac_arg" - if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_c89=$ac_arg -fi -rm -f core conftest.err conftest.$ac_objext - test "x$ac_cv_prog_cc_c89" != "xno" && break -done -rm -f conftest.$ac_ext -CC=$ac_save_CC - -fi -# AC_CACHE_VAL -case "x$ac_cv_prog_cc_c89" in - x) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 -$as_echo "none needed" >&6; } ;; - xno) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 -$as_echo "unsupported" >&6; } ;; - *) - CC="$CC $ac_cv_prog_cc_c89" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 -$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; -esac -if test "x$ac_cv_prog_cc_c89" != xno; then : - -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 -$as_echo_n "checking for an ANSI C-conforming const... " >&6; } -if ${ac_cv_c_const+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - -#ifndef __cplusplus - /* Ultrix mips cc rejects this sort of thing. */ - typedef int charset[2]; - const charset cs = { 0, 0 }; - /* SunOS 4.1.1 cc rejects this. */ - char const *const *pcpcc; - char **ppc; - /* NEC SVR4.0.2 mips cc rejects this. */ - struct point {int x, y;}; - static struct point const zero = {0,0}; - /* AIX XL C 1.02.0.0 rejects this. - It does not let you subtract one const X* pointer from another in - an arm of an if-expression whose if-part is not a constant - expression */ - const char *g = "string"; - pcpcc = &g + (g ? g-g : 0); - /* HPUX 7.0 cc rejects these. */ - ++pcpcc; - ppc = (char**) pcpcc; - pcpcc = (char const *const *) ppc; - { /* SCO 3.2v4 cc rejects this sort of thing. */ - char tx; - char *t = &tx; - char const *s = 0 ? (char *) 0 : (char const *) 0; - - *t++ = 0; - if (s) return 0; - } - { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ - int x[] = {25, 17}; - const int *foo = &x[0]; - ++foo; - } - { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ - typedef const int *iptr; - iptr p = 0; - ++p; - } - { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying - "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ - struct s { int j; const int *ap[3]; } bx; - struct s *b = &bx; b->j = 5; - } - { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ - const int foo = 10; - if (!foo) return 0; - } - return !cs[0] && !zero.x; -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_c_const=yes -else - ac_cv_c_const=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 -$as_echo "$ac_cv_c_const" >&6; } -if test $ac_cv_c_const = no; then - -$as_echo "#define const /**/" >>confdefs.h - -fi - - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 -$as_echo_n "checking how to run the C preprocessor... " >&6; } -# On Suns, sometimes $CPP names a directory. -if test -n "$CPP" && test -d "$CPP"; then - CPP= -fi -if test -z "$CPP"; then - if ${ac_cv_prog_CPP+:} false; then : - $as_echo_n "(cached) " >&6 -else - # Double quotes because CPP needs to be expanded - for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" - do - ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - -else - # Broken: fails on valid input. -continue -fi -rm -f conftest.err conftest.i conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - # Broken: success on invalid input. -continue -else - # Passes both tests. -ac_preproc_ok=: -break -fi -rm -f conftest.err conftest.i conftest.$ac_ext - -done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.i conftest.err conftest.$ac_ext -if $ac_preproc_ok; then : - break -fi - - done - ac_cv_prog_CPP=$CPP - -fi - CPP=$ac_cv_prog_CPP -else - ac_cv_prog_CPP=$CPP -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 -$as_echo "$CPP" >&6; } -ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - -else - # Broken: fails on valid input. -continue -fi -rm -f conftest.err conftest.i conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - # Broken: success on invalid input. -continue -else - # Passes both tests. -ac_preproc_ok=: -break -fi -rm -f conftest.err conftest.i conftest.$ac_ext - -done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.i conftest.err conftest.$ac_ext -if $ac_preproc_ok; then : - -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "C preprocessor \"$CPP\" fails sanity check -See \`config.log' for more details" "$LINENO" 5; } -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 -$as_echo_n "checking for grep that handles long lines and -e... " >&6; } -if ${ac_cv_path_GREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$GREP"; then - ac_path_GREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in grep ggrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - as_fn_executable_p "$ac_path_GREP" || continue -# Check for GNU ac_path_GREP and select it if it is found. - # Check for GNU $ac_path_GREP -case `"$ac_path_GREP" --version 2>&1` in -*GNU*) - ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'GREP' >> "conftest.nl" - "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_GREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_GREP="$ac_path_GREP" - ac_path_GREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_GREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_GREP"; then - as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_GREP=$GREP -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 -$as_echo "$ac_cv_path_GREP" >&6; } - GREP="$ac_cv_path_GREP" - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 -$as_echo_n "checking for egrep... " >&6; } -if ${ac_cv_path_EGREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 - then ac_cv_path_EGREP="$GREP -E" - else - if test -z "$EGREP"; then - ac_path_EGREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in egrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - as_fn_executable_p "$ac_path_EGREP" || continue -# Check for GNU ac_path_EGREP and select it if it is found. - # Check for GNU $ac_path_EGREP -case `"$ac_path_EGREP" --version 2>&1` in -*GNU*) - ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'EGREP' >> "conftest.nl" - "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_EGREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_EGREP="$ac_path_EGREP" - ac_path_EGREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_EGREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_EGREP"; then - as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_EGREP=$EGREP -fi - - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 -$as_echo "$ac_cv_path_EGREP" >&6; } - EGREP="$ac_cv_path_EGREP" - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 -$as_echo_n "checking for ANSI C header files... " >&6; } -if ${ac_cv_header_stdc+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#include -#include - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_header_stdc=yes -else - ac_cv_header_stdc=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -if test $ac_cv_header_stdc = yes; then - # SunOS 4.x string.h does not declare mem*, contrary to ANSI. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "memchr" >/dev/null 2>&1; then : - -else - ac_cv_header_stdc=no -fi -rm -f conftest* - -fi - -if test $ac_cv_header_stdc = yes; then - # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "free" >/dev/null 2>&1; then : - -else - ac_cv_header_stdc=no -fi -rm -f conftest* - -fi - -if test $ac_cv_header_stdc = yes; then - # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. - if test "$cross_compiling" = yes; then : - : -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#if ((' ' & 0x0FF) == 0x020) -# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') -# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) -#else -# define ISLOWER(c) \ - (('a' <= (c) && (c) <= 'i') \ - || ('j' <= (c) && (c) <= 'r') \ - || ('s' <= (c) && (c) <= 'z')) -# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) -#endif - -#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) -int -main () -{ - int i; - for (i = 0; i < 256; i++) - if (XOR (islower (i), ISLOWER (i)) - || toupper (i) != TOUPPER (i)) - return 2; - return 0; -} -_ACEOF -if ac_fn_c_try_run "$LINENO"; then : - -else - ac_cv_header_stdc=no -fi -rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ - conftest.$ac_objext conftest.beam conftest.$ac_ext -fi - -fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 -$as_echo "$ac_cv_header_stdc" >&6; } -if test $ac_cv_header_stdc = yes; then - -$as_echo "#define STDC_HEADERS 1" >>confdefs.h - -fi - -# On IRIX 5.3, sys/types and inttypes.h are conflicting. -for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ - inttypes.h stdint.h unistd.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default -" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - - -for ac_header in fcntl.h limits.h stdlib.h sys/types.h unistd.h winsock2.h ws2tcpip.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - -for ac_header in arpa/inet.h netdb.h netinet/in.h netinet/tcp.h sys/socket.h sys/uio.h sys/un.h linux/can.h linux/tcp.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - -for ac_header in net/if.h -do : - ac_fn_c_check_header_mongrel "$LINENO" "net/if.h" "ac_cv_header_net_if_h" "$ac_includes_default" -if test "x$ac_cv_header_net_if_h" = xyes; then : - cat >>confdefs.h <<_ACEOF -#define HAVE_NET_IF_H 1 -_ACEOF - -fi - -done - - -for ac_func in readlink symlink if_nametoindex -do : - as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` -ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" -if eval test \"x\$"$as_ac_var"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 -_ACEOF - -fi -done - - -ac_fn_c_check_member "$LINENO" "struct msghdr" "msg_control" "ac_cv_member_struct_msghdr_msg_control" "#if HAVE_SYS_TYPES_H -# include -#endif -#if HAVE_SYS_SOCKET_H -# include -#endif -#if HAVE_SYS_UIO_H -# include -#endif -" -if test "x$ac_cv_member_struct_msghdr_msg_control" = xyes; then : - -cat >>confdefs.h <<_ACEOF -#define HAVE_STRUCT_MSGHDR_MSG_CONTROL 1 -_ACEOF - - -fi -ac_fn_c_check_member "$LINENO" "struct msghdr" "msg_accrights" "ac_cv_member_struct_msghdr_msg_accrights" "#if HAVE_SYS_TYPES_H -# include -#endif -#if HAVE_SYS_SOCKET_H -# include -#endif -#if HAVE_SYS_UIO_H -# include -#endif -" -if test "x$ac_cv_member_struct_msghdr_msg_accrights" = xyes; then : - -cat >>confdefs.h <<_ACEOF -#define HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS 1 -_ACEOF - - -fi - - -ac_fn_c_check_member "$LINENO" "struct sockaddr" "sa_len" "ac_cv_member_struct_sockaddr_sa_len" "#if HAVE_SYS_TYPES_H -# include -#endif -#if HAVE_SYS_SOCKET_H -# include -#endif -" -if test "x$ac_cv_member_struct_sockaddr_sa_len" = xyes; then : - -cat >>confdefs.h <<_ACEOF -#define HAVE_STRUCT_SOCKADDR_SA_LEN 1 -_ACEOF - - -fi - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for in_addr_t in netinet/in.h" >&5 -$as_echo_n "checking for in_addr_t in netinet/in.h... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "in_addr_t" >/dev/null 2>&1; then : - -$as_echo "#define HAVE_IN_ADDR_T 1" >>confdefs.h - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi -rm -f conftest* - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for SO_PEERCRED and struct ucred in sys/socket.h" >&5 -$as_echo_n "checking for SO_PEERCRED and struct ucred in sys/socket.h... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#ifndef SO_PEERCRED -# error no SO_PEERCRED -#endif -struct ucred u; -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_ucred=yes -else - ac_cv_ucred=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -if test "x$ac_cv_ucred" = xno; then - old_CFLAGS="$CFLAGS" - CFLAGS="-D_GNU_SOURCE $CFLAGS" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#ifndef SO_PEERCRED -# error no SO_PEERCRED -#endif -struct ucred u; -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_ucred=yes -else - ac_cv_ucred=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - if test "x$ac_cv_ucred" = xyes; then - EXTRA_CPPFLAGS=-D_GNU_SOURCE - fi -else - old_CFLAGS="$CFLAGS" -fi -if test "x$ac_cv_ucred" = xno; then - CFLAGS="$old_CFLAGS" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -else - -$as_echo "#define HAVE_STRUCT_UCRED 1" >>confdefs.h - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for getpeereid in unistd.h" >&5 -$as_echo_n "checking for getpeereid in unistd.h... " >&6; } -ac_fn_c_check_func "$LINENO" "getpeereid" "ac_cv_func_getpeereid" -if test "x$ac_cv_func_getpeereid" = xyes; then : - -$as_echo "#define HAVE_GETPEEREID 1" >>confdefs.h - -fi - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for _head_libws2_32_a in -lws2_32" >&5 -$as_echo_n "checking for _head_libws2_32_a in -lws2_32... " >&6; } -if ${ac_cv_lib_ws2_32__head_libws2_32_a+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-lws2_32 $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char _head_libws2_32_a (); -int -main () -{ -return _head_libws2_32_a (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_ws2_32__head_libws2_32_a=yes -else - ac_cv_lib_ws2_32__head_libws2_32_a=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ws2_32__head_libws2_32_a" >&5 -$as_echo "$ac_cv_lib_ws2_32__head_libws2_32_a" >&6; } -if test "x$ac_cv_lib_ws2_32__head_libws2_32_a" = xyes; then : - cat >>confdefs.h <<_ACEOF -#define HAVE_LIBWS2_32 1 -_ACEOF - - LIBS="-lws2_32 $LIBS" - -fi - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for getaddrinfo" >&5 -$as_echo_n "checking for getaddrinfo... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$ac_includes_default -int testme(){ getaddrinfo; } -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -$as_echo "#define HAVE_GETADDRINFO 1" >>confdefs.h - ac_have_getaddrinfo=yes; { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -if test "x$ac_have_getaddrinfo" = x; then - old_CFLAGS="$CFLAGS" - if test "z$ac_cv_lib_ws2_32__head_libws2_32_a" = zyes; then - CFLAGS="-DWINVER=0x0501 $CFLAGS" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getaddrinfo if WINVER is 0x0501" >&5 -$as_echo_n "checking for getaddrinfo if WINVER is 0x0501... " >&6; } - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$ac_includes_default - int testme(){ getaddrinfo; } -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -$as_echo "#define HAVE_GETADDRINFO 1" >>confdefs.h - -$as_echo "#define NEED_WINVER_XP 1" >>confdefs.h - EXTRA_CPPFLAGS="-DWINVER=0x0501 $EXTRA_CPPFLAGS"; { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - CFLAGS="$old_CFLAGS"; { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - fi -fi - -for ac_func in gai_strerror -do : - ac_fn_c_check_func "$LINENO" "gai_strerror" "ac_cv_func_gai_strerror" -if test "x$ac_cv_func_gai_strerror" = xyes; then : - cat >>confdefs.h <<_ACEOF -#define HAVE_GAI_STRERROR 1 -_ACEOF - -fi -done - - -ac_fn_c_check_decl "$LINENO" "AI_ADDRCONFIG" "ac_cv_have_decl_AI_ADDRCONFIG" "$ac_includes_default" -if test "x$ac_cv_have_decl_AI_ADDRCONFIG" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_AI_ADDRCONFIG $ac_have_decl -_ACEOF -ac_fn_c_check_decl "$LINENO" "AI_ALL" "ac_cv_have_decl_AI_ALL" "$ac_includes_default" -if test "x$ac_cv_have_decl_AI_ALL" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_AI_ALL $ac_have_decl -_ACEOF -ac_fn_c_check_decl "$LINENO" "AI_NUMERICSERV" "ac_cv_have_decl_AI_NUMERICSERV" "$ac_includes_default" -if test "x$ac_cv_have_decl_AI_NUMERICSERV" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_AI_NUMERICSERV $ac_have_decl -_ACEOF -ac_fn_c_check_decl "$LINENO" "AI_V4MAPPED" "ac_cv_have_decl_AI_V4MAPPED" "$ac_includes_default" -if test "x$ac_cv_have_decl_AI_V4MAPPED" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_AI_V4MAPPED $ac_have_decl -_ACEOF - - -ac_fn_c_check_decl "$LINENO" "IPV6_V6ONLY" "ac_cv_have_decl_IPV6_V6ONLY" "$ac_includes_default" -if test "x$ac_cv_have_decl_IPV6_V6ONLY" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_IPV6_V6ONLY $ac_have_decl -_ACEOF - - -ac_fn_c_check_decl "$LINENO" "IPPROTO_IP" "ac_cv_have_decl_IPPROTO_IP" "$ac_includes_default" -if test "x$ac_cv_have_decl_IPPROTO_IP" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_IPPROTO_IP $ac_have_decl -_ACEOF -ac_fn_c_check_decl "$LINENO" "IPPROTO_TCP" "ac_cv_have_decl_IPPROTO_TCP" "$ac_includes_default" -if test "x$ac_cv_have_decl_IPPROTO_TCP" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_IPPROTO_TCP $ac_have_decl -_ACEOF -ac_fn_c_check_decl "$LINENO" "IPPROTO_IPV6" "ac_cv_have_decl_IPPROTO_IPV6" "$ac_includes_default" -if test "x$ac_cv_have_decl_IPPROTO_IPV6" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_IPPROTO_IPV6 $ac_have_decl -_ACEOF - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sendfile in sys/sendfile.h" >&5 -$as_echo_n "checking for sendfile in sys/sendfile.h... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "sendfile" >/dev/null 2>&1; then : - -$as_echo "#define HAVE_LINUX_SENDFILE 1" >>confdefs.h - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi -rm -f conftest* - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sendfile in sys/socket.h" >&5 -$as_echo_n "checking for sendfile in sys/socket.h... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "sendfile" >/dev/null 2>&1; then : - -$as_echo "#define HAVE_BSD_SENDFILE 1" >>confdefs.h - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi -rm -f conftest* - - -for ac_func in gethostent -do : - ac_fn_c_check_func "$LINENO" "gethostent" "ac_cv_func_gethostent" -if test "x$ac_cv_func_gethostent" = xyes; then : - cat >>confdefs.h <<_ACEOF -#define HAVE_GETHOSTENT 1 -_ACEOF - -fi -done - - -for ac_func in accept4 -do : - ac_fn_c_check_func "$LINENO" "accept4" "ac_cv_func_accept4" -if test "x$ac_cv_func_accept4" = xyes; then : - cat >>confdefs.h <<_ACEOF -#define HAVE_ACCEPT4 1 -_ACEOF - -fi -done - - -case "$host" in -*-mingw* | *-msys*) - EXTRA_SRCS="cbits/initWinSock.c, cbits/winSockErr.c, cbits/asyncAccept.c" - EXTRA_LIBS=ws2_32 - CALLCONV=stdcall ;; -*-solaris2*) - EXTRA_SRCS="cbits/ancilData.c" - EXTRA_LIBS="nsl, socket" - CALLCONV=ccall ;; -*) - EXTRA_SRCS="cbits/ancilData.c" - EXTRA_LIBS= - CALLCONV=ccall ;; -esac - - - - - -ac_config_files="$ac_config_files network.buildinfo" - - -cat >confcache <<\_ACEOF -# This file is a shell script that caches the results of configure -# tests run on this system so they can be shared between configure -# scripts and configure runs, see configure's option --config-cache. -# It is not useful on other systems. If it contains results you don't -# want to keep, you may remove or edit it. -# -# config.status only pays attention to the cache file if you give it -# the --recheck option to rerun configure. -# -# `ac_cv_env_foo' variables (set or unset) will be overridden when -# loading this file, other *unset* `ac_cv_foo' will be assigned the -# following values. - -_ACEOF - -# The following way of writing the cache mishandles newlines in values, -# but we know of no workaround that is simple, portable, and efficient. -# So, we kill variables containing newlines. -# Ultrix sh set writes to stderr and can't be redirected directly, -# and sets the high bit in the cache file unless we assign to the vars. -( - for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - - (set) 2>&1 | - case $as_nl`(ac_space=' '; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - # `set' does not quote correctly, so add quotes: double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \. - sed -n \ - "s/'/'\\\\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" - ;; #( - *) - # `set' quotes correctly as required by POSIX, so do not add quotes. - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) | - sed ' - /^ac_cv_env_/b end - t clear - :clear - s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ - t end - s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ - :end' >>confcache -if diff "$cache_file" confcache >/dev/null 2>&1; then :; else - if test -w "$cache_file"; then - if test "x$cache_file" != "x/dev/null"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 -$as_echo "$as_me: updating cache $cache_file" >&6;} - if test ! -f "$cache_file" || test -h "$cache_file"; then - cat confcache >"$cache_file" - else - case $cache_file in #( - */* | ?:*) - mv -f confcache "$cache_file"$$ && - mv -f "$cache_file"$$ "$cache_file" ;; #( - *) - mv -f confcache "$cache_file" ;; - esac - fi - fi - else - { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 -$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} - fi -fi -rm -f confcache - -test "x$prefix" = xNONE && prefix=$ac_default_prefix -# Let make expand exec_prefix. -test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' - -DEFS=-DHAVE_CONFIG_H - -ac_libobjs= -ac_ltlibobjs= -U= -for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue - # 1. Remove the extension, and $U if already installed. - ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' - ac_i=`$as_echo "$ac_i" | sed "$ac_script"` - # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR - # will be set to the directory where LIBOBJS objects are built. - as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" - as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' -done -LIBOBJS=$ac_libobjs - -LTLIBOBJS=$ac_ltlibobjs - - - -: "${CONFIG_STATUS=./config.status}" -ac_write_fail=0 -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files $CONFIG_STATUS" -{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 -$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} -as_write_fail=0 -cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 -#! $SHELL -# Generated by $as_me. -# Run this file to recreate the current configuration. -# Compiler output produced by configure, useful for debugging -# configure, is in config.log if it exists. - -debug=false -ac_cs_recheck=false -ac_cs_silent=false - -SHELL=\${CONFIG_SHELL-$SHELL} -export SHELL -_ASEOF -cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -exec 6>&1 -## ----------------------------------- ## -## Main body of $CONFIG_STATUS script. ## -## ----------------------------------- ## -_ASEOF -test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# Save the log message, to keep $0 and so on meaningful, and to -# report actual input values of CONFIG_FILES etc. instead of their -# values after options handling. -ac_log=" -This file was extended by Haskell network package $as_me 2.6.3.5, which was -generated by GNU Autoconf 2.69. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS - CONFIG_LINKS = $CONFIG_LINKS - CONFIG_COMMANDS = $CONFIG_COMMANDS - $ $0 $@ - -on `(hostname || uname -n) 2>/dev/null | sed 1q` -" - -_ACEOF - -case $ac_config_files in *" -"*) set x $ac_config_files; shift; ac_config_files=$*;; -esac - -case $ac_config_headers in *" -"*) set x $ac_config_headers; shift; ac_config_headers=$*;; -esac - - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# Files that config.status was made for. -config_files="$ac_config_files" -config_headers="$ac_config_headers" - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -ac_cs_usage="\ -\`$as_me' instantiates files and other configuration actions -from templates according to the current configuration. Unless the files -and actions are specified as TAGs, all are instantiated by default. - -Usage: $0 [OPTION]... [TAG]... - - -h, --help print this help, then exit - -V, --version print version number and configuration settings, then exit - --config print configuration, then exit - -q, --quiet, --silent - do not print progress messages - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - --header=FILE[:TEMPLATE] - instantiate the configuration header FILE - -Configuration files: -$config_files - -Configuration headers: -$config_headers - -Report bugs to ." - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" -ac_cs_version="\\ -Haskell network package config.status 2.6.3.5 -configured by $0, generated by GNU Autoconf 2.69, - with options \\"\$ac_cs_config\\" - -Copyright (C) 2012 Free Software Foundation, Inc. -This config.status script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it." - -ac_pwd='$ac_pwd' -srcdir='$srcdir' -test -n "\$AWK" || AWK=awk -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# The default lists apply if the user does not specify any file. -ac_need_defaults=: -while test $# != 0 -do - case $1 in - --*=?*) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` - ac_shift=: - ;; - --*=) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg= - ac_shift=: - ;; - *) - ac_option=$1 - ac_optarg=$2 - ac_shift=shift - ;; - esac - - case $ac_option in - # Handling of the options. - -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) - ac_cs_recheck=: ;; - --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - $as_echo "$ac_cs_version"; exit ;; - --config | --confi | --conf | --con | --co | --c ) - $as_echo "$ac_cs_config"; exit ;; - --debug | --debu | --deb | --de | --d | -d ) - debug=: ;; - --file | --fil | --fi | --f ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - '') as_fn_error $? "missing file argument" ;; - esac - as_fn_append CONFIG_FILES " '$ac_optarg'" - ac_need_defaults=false;; - --header | --heade | --head | --hea ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - as_fn_append CONFIG_HEADERS " '$ac_optarg'" - ac_need_defaults=false;; - --he | --h) - # Conflict between --help and --header - as_fn_error $? "ambiguous option: \`$1' -Try \`$0 --help' for more information.";; - --help | --hel | -h ) - $as_echo "$ac_cs_usage"; exit ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil | --si | --s) - ac_cs_silent=: ;; - - # This is an error. - -*) as_fn_error $? "unrecognized option: \`$1' -Try \`$0 --help' for more information." ;; - - *) as_fn_append ac_config_targets " $1" - ac_need_defaults=false ;; - - esac - shift -done - -ac_configure_extra_args= - -if $ac_cs_silent; then - exec 6>/dev/null - ac_configure_extra_args="$ac_configure_extra_args --silent" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -if \$ac_cs_recheck; then - set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion - shift - \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 - CONFIG_SHELL='$SHELL' - export CONFIG_SHELL - exec "\$@" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -exec 5>>config.log -{ - echo - sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX -## Running $as_me. ## -_ASBOX - $as_echo "$ac_log" -} >&5 - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - -# Handling of arguments. -for ac_config_target in $ac_config_targets -do - case $ac_config_target in - "include/HsNetworkConfig.h") CONFIG_HEADERS="$CONFIG_HEADERS include/HsNetworkConfig.h" ;; - "network.buildinfo") CONFIG_FILES="$CONFIG_FILES network.buildinfo" ;; - - *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; - esac -done - - -# If the user did not use the arguments to specify the items to instantiate, -# then the envvar interface is used. Set only those that are not. -# We use the long form for the default assignment because of an extremely -# bizarre bug on SunOS 4.1.3. -if $ac_need_defaults; then - test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files - test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers -fi - -# Have a temporary directory for convenience. Make it in the build tree -# simply because there is no reason against having it here, and in addition, -# creating and moving files from /tmp can sometimes cause problems. -# Hook for its removal unless debugging. -# Note that there is a small window in which the directory will not be cleaned: -# after its creation but before its name has been assigned to `$tmp'. -$debug || -{ - tmp= ac_tmp= - trap 'exit_status=$? - : "${ac_tmp:=$tmp}" - { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status -' 0 - trap 'as_fn_exit 1' 1 2 13 15 -} -# Create a (secure) tmp directory for tmp files. - -{ - tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -d "$tmp" -} || -{ - tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 -ac_tmp=$tmp - -# Set up the scripts for CONFIG_FILES section. -# No need to generate them if there are no CONFIG_FILES. -# This happens for instance with `./config.status config.h'. -if test -n "$CONFIG_FILES"; then - - -ac_cr=`echo X | tr X '\015'` -# On cygwin, bash can eat \r inside `` if the user requested igncr. -# But we know of no other shell where ac_cr would be empty at this -# point, so we can use a bashism as a fallback. -if test "x$ac_cr" = x; then - eval ac_cr=\$\'\\r\' -fi -ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` -if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then - ac_cs_awk_cr='\\r' -else - ac_cs_awk_cr=$ac_cr -fi - -echo 'BEGIN {' >"$ac_tmp/subs1.awk" && -_ACEOF - - -{ - echo "cat >conf$$subs.awk <<_ACEOF" && - echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && - echo "_ACEOF" -} >conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 -ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` -ac_delim='%!_!# ' -for ac_last_try in false false false false false :; do - . ./conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - - ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` - if test $ac_delim_n = $ac_delim_num; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done -rm -f conf$$subs.sh - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && -_ACEOF -sed -n ' -h -s/^/S["/; s/!.*/"]=/ -p -g -s/^[^!]*!// -:repl -t repl -s/'"$ac_delim"'$// -t delim -:nl -h -s/\(.\{148\}\)..*/\1/ -t more1 -s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ -p -n -b repl -:more1 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t nl -:delim -h -s/\(.\{148\}\)..*/\1/ -t more2 -s/["\\]/\\&/g; s/^/"/; s/$/"/ -p -b -:more2 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t delim -' >$CONFIG_STATUS || ac_write_fail=1 -rm -f conf$$subs.awk -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACAWK -cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && - for (key in S) S_is_set[key] = 1 - FS = "" - -} -{ - line = $ 0 - nfields = split(line, field, "@") - substed = 0 - len = length(field[1]) - for (i = 2; i < nfields; i++) { - key = field[i] - keylen = length(key) - if (S_is_set[key]) { - value = S[key] - line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) - len += length(value) + length(field[++i]) - substed = 1 - } else - len += 1 + keylen - } - - print line -} - -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then - sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" -else - cat -fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ - || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 -_ACEOF - -# VPATH may cause trouble with some makes, so we remove sole $(srcdir), -# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and -# trailing colons and then remove the whole line if VPATH becomes empty -# (actually we leave an empty line to preserve line numbers). -if test "x$srcdir" = x.; then - ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ -h -s/// -s/^/:/ -s/[ ]*$/:/ -s/:\$(srcdir):/:/g -s/:\${srcdir}:/:/g -s/:@srcdir@:/:/g -s/^:*// -s/:*$// -x -s/\(=[ ]*\).*/\1/ -G -s/\n// -s/^[^=]*=[ ]*$// -}' -fi - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -fi # test -n "$CONFIG_FILES" - -# Set up the scripts for CONFIG_HEADERS section. -# No need to generate them if there are no CONFIG_HEADERS. -# This happens for instance with `./config.status Makefile'. -if test -n "$CONFIG_HEADERS"; then -cat >"$ac_tmp/defines.awk" <<\_ACAWK || -BEGIN { -_ACEOF - -# Transform confdefs.h into an awk script `defines.awk', embedded as -# here-document in config.status, that substitutes the proper values into -# config.h.in to produce config.h. - -# Create a delimiter string that does not exist in confdefs.h, to ease -# handling of long lines. -ac_delim='%!_!# ' -for ac_last_try in false false :; do - ac_tt=`sed -n "/$ac_delim/p" confdefs.h` - if test -z "$ac_tt"; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done - -# For the awk script, D is an array of macro values keyed by name, -# likewise P contains macro parameters if any. Preserve backslash -# newline sequences. - -ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* -sed -n ' -s/.\{148\}/&'"$ac_delim"'/g -t rset -:rset -s/^[ ]*#[ ]*define[ ][ ]*/ / -t def -d -:def -s/\\$// -t bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3"/p -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p -d -:bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3\\\\\\n"\\/p -t cont -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p -t cont -d -:cont -n -s/.\{148\}/&'"$ac_delim"'/g -t clear -:clear -s/\\$// -t bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/"/p -d -:bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p -b cont -' >$CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - for (key in D) D_is_set[key] = 1 - FS = "" -} -/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { - line = \$ 0 - split(line, arg, " ") - if (arg[1] == "#") { - defundef = arg[2] - mac1 = arg[3] - } else { - defundef = substr(arg[1], 2) - mac1 = arg[2] - } - split(mac1, mac2, "(") #) - macro = mac2[1] - prefix = substr(line, 1, index(line, defundef) - 1) - if (D_is_set[macro]) { - # Preserve the white space surrounding the "#". - print prefix "define", macro P[macro] D[macro] - next - } else { - # Replace #undef with comments. This is necessary, for example, - # in the case of _POSIX_SOURCE, which is predefined and required - # on some systems where configure will not decide to define it. - if (defundef == "undef") { - print "/*", prefix defundef, macro, "*/" - next - } - } -} -{ print } -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 -fi # test -n "$CONFIG_HEADERS" - - -eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS " -shift -for ac_tag -do - case $ac_tag in - :[FHLC]) ac_mode=$ac_tag; continue;; - esac - case $ac_mode$ac_tag in - :[FHL]*:*);; - :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; - :[FH]-) ac_tag=-:-;; - :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; - esac - ac_save_IFS=$IFS - IFS=: - set x $ac_tag - IFS=$ac_save_IFS - shift - ac_file=$1 - shift - - case $ac_mode in - :L) ac_source=$1;; - :[FH]) - ac_file_inputs= - for ac_f - do - case $ac_f in - -) ac_f="$ac_tmp/stdin";; - *) # Look for the file first in the build tree, then in the source tree - # (if the path is not absolute). The absolute path cannot be DOS-style, - # because $ac_f cannot contain `:'. - test -f "$ac_f" || - case $ac_f in - [\\/$]*) false;; - *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; - esac || - as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; - esac - case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac - as_fn_append ac_file_inputs " '$ac_f'" - done - - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ - configure_input='Generated from '` - $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' - `' by configure.' - if test x"$ac_file" != x-; then - configure_input="$ac_file. $configure_input" - { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 -$as_echo "$as_me: creating $ac_file" >&6;} - fi - # Neutralize special characters interpreted by sed in replacement strings. - case $configure_input in #( - *\&* | *\|* | *\\* ) - ac_sed_conf_input=`$as_echo "$configure_input" | - sed 's/[\\\\&|]/\\\\&/g'`;; #( - *) ac_sed_conf_input=$configure_input;; - esac - - case $ac_tag in - *:-:* | *:-) cat >"$ac_tmp/stdin" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; - esac - ;; - esac - - ac_dir=`$as_dirname -- "$ac_file" || -$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$ac_file" : 'X\(//\)[^/]' \| \ - X"$ac_file" : 'X\(//\)$' \| \ - X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir="$ac_dir"; as_fn_mkdir_p - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - - case $ac_mode in - :F) - # - # CONFIG_FILE - # - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# If the template does not know about datarootdir, expand it. -# FIXME: This hack should be removed a few years after 2.60. -ac_datarootdir_hack=; ac_datarootdir_seen= -ac_sed_dataroot=' -/datarootdir/ { - p - q -} -/@datadir@/p -/@docdir@/p -/@infodir@/p -/@localedir@/p -/@mandir@/p' -case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in -*datarootdir*) ac_datarootdir_seen=yes;; -*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - ac_datarootdir_hack=' - s&@datadir@&$datadir&g - s&@docdir@&$docdir&g - s&@infodir@&$infodir&g - s&@localedir@&$localedir&g - s&@mandir@&$mandir&g - s&\\\${datarootdir}&$datarootdir&g' ;; -esac -_ACEOF - -# Neutralize VPATH when `$srcdir' = `.'. -# Shell code in configure.ac might set extrasub. -# FIXME: do we really want to maintain this feature? -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_sed_extra="$ac_vpsub -$extrasub -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -:t -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s|@configure_input@|$ac_sed_conf_input|;t t -s&@top_builddir@&$ac_top_builddir_sub&;t t -s&@top_build_prefix@&$ac_top_build_prefix&;t t -s&@srcdir@&$ac_srcdir&;t t -s&@abs_srcdir@&$ac_abs_srcdir&;t t -s&@top_srcdir@&$ac_top_srcdir&;t t -s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t -s&@builddir@&$ac_builddir&;t t -s&@abs_builddir@&$ac_abs_builddir&;t t -s&@abs_top_builddir@&$ac_abs_top_builddir&;t t -$ac_datarootdir_hack -" -eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ - >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - -test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ - "$ac_tmp/out"`; test -z "$ac_out"; } && - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&5 -$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&2;} - - rm -f "$ac_tmp/stdin" - case $ac_file in - -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; - *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; - esac \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - ;; - :H) - # - # CONFIG_HEADER - # - if test x"$ac_file" != x-; then - { - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" - } >"$ac_tmp/config.h" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then - { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 -$as_echo "$as_me: $ac_file is unchanged" >&6;} - else - rm -f "$ac_file" - mv "$ac_tmp/config.h" "$ac_file" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - fi - else - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ - || as_fn_error $? "could not create -" "$LINENO" 5 - fi - ;; - - - esac - -done # for ac_tag - - -as_fn_exit 0 -_ACEOF -ac_clean_files=$ac_clean_files_save - -test $ac_write_fail = 0 || - as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 - - -# configure is writing to config.log, and then calls config.status. -# config.status does its own redirection, appending to config.log. -# Unfortunately, on DOS this fails, as config.log is still kept open -# by configure, so config.status won't be able to write to it; its -# output is simply discarded. So we exec the FD to /dev/null, -# effectively closing config.log, so it can be properly (re)opened and -# appended to by config.status. When coming back to configure, we -# need to make the FD available again. -if test "$no_create" != yes; then - ac_cs_success=: - ac_config_status_args= - test "$silent" = yes && - ac_config_status_args="$ac_config_status_args --quiet" - exec 5>/dev/null - $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false - exec 5>>config.log - # Use ||, not &&, to avoid exiting from the if with $? = 1, which - # would make configure fail if this is the last instruction. - $ac_cs_success || as_fn_exit 1 -fi -if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 -$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} -fi - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/configure.ac cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/configure.ac --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/configure.ac 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/configure.ac 1970-01-01 00:00:00.000000000 +0000 @@ -1,192 +0,0 @@ -AC_INIT([Haskell network package], [2.7.0.2], [libraries@haskell.org], [network]) - -ac_includes_default="$ac_includes_default -#ifdef HAVE_SYS_SOCKET_H -# include -#endif -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_NETDB_H -# include -#endif -#ifdef HAVE_WINSOCK2_H -# include -#endif -#ifdef HAVE_WS2TCPIP_H -# include -// fix for MingW not defining IPV6_V6ONLY -# define IPV6_V6ONLY 27 -#endif" - -# Safety check: Ensure that we are in the correct source directory. -AC_CONFIG_SRCDIR([include/HsNet.h]) - -AC_CONFIG_HEADERS([include/HsNetworkConfig.h]) - -AC_CANONICAL_HOST - -AC_ARG_WITH([cc], - [C compiler], - [CC=$withval]) -AC_PROG_CC() - -AC_C_CONST - -dnl ** check for specific header (.h) files that we are interested in -AC_CHECK_HEADERS([fcntl.h limits.h stdlib.h sys/types.h unistd.h winsock2.h ws2tcpip.h]) -AC_CHECK_HEADERS([arpa/inet.h netdb.h netinet/in.h netinet/tcp.h sys/socket.h sys/uio.h sys/un.h linux/can.h linux/tcp.h]) -AC_CHECK_HEADERS([net/if.h]) - -AC_CHECK_FUNCS([readlink symlink if_nametoindex]) - -dnl ** check what fields struct msghdr contains -AC_CHECK_MEMBERS([struct msghdr.msg_control, struct msghdr.msg_accrights], [], [], [#if HAVE_SYS_TYPES_H -# include -#endif -#if HAVE_SYS_SOCKET_H -# include -#endif -#if HAVE_SYS_UIO_H -# include -#endif]) - -dnl ** check if struct sockaddr contains sa_len -AC_CHECK_MEMBERS([struct sockaddr.sa_len], [], [], [#if HAVE_SYS_TYPES_H -# include -#endif -#if HAVE_SYS_SOCKET_H -# include -#endif]) - -dnl -------------------------------------------------- -dnl * test for in_addr_t -dnl -------------------------------------------------- -AC_MSG_CHECKING(for in_addr_t in netinet/in.h) -AC_EGREP_HEADER(in_addr_t, netinet/in.h, - [ AC_DEFINE([HAVE_IN_ADDR_T], [1], [Define to 1 if in_addr_t is available.]) AC_MSG_RESULT(yes) ], - AC_MSG_RESULT(no)) - -dnl -------------------------------------------------- -dnl * test for SO_PEERCRED and struct ucred -dnl -------------------------------------------------- -AC_MSG_CHECKING(for SO_PEERCRED and struct ucred in sys/socket.h) -AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include -#include -#ifndef SO_PEERCRED -# error no SO_PEERCRED -#endif -struct ucred u;]])],ac_cv_ucred=yes,ac_cv_ucred=no) -if test "x$ac_cv_ucred" = xno; then - old_CFLAGS="$CFLAGS" - CFLAGS="-D_GNU_SOURCE $CFLAGS" - AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include -#include -#ifndef SO_PEERCRED -# error no SO_PEERCRED -#endif -struct ucred u;]])],ac_cv_ucred=yes,ac_cv_ucred=no) - if test "x$ac_cv_ucred" = xyes; then - EXTRA_CPPFLAGS=-D_GNU_SOURCE - fi -else - old_CFLAGS="$CFLAGS" -fi -if test "x$ac_cv_ucred" = xno; then - CFLAGS="$old_CFLAGS" - AC_MSG_RESULT(no) -else - AC_DEFINE([HAVE_STRUCT_UCRED], [1], [Define to 1 if you have both SO_PEERCRED and struct ucred.]) - AC_MSG_RESULT(yes) -fi - -dnl -------------------------------------------------- -dnl * test for GETPEEREID(3) -dnl -------------------------------------------------- -AC_MSG_CHECKING(for getpeereid in unistd.h) -AC_CHECK_FUNC( getpeereid, AC_DEFINE([HAVE_GETPEEREID], [1], [Define to 1 if you have getpeereid.] )) - -dnl -------------------------------------------------- -dnl * check for Windows networking libraries -dnl -------------------------------------------------- -AC_CHECK_LIB(ws2_32, _head_libws2_32_a) - -dnl -------------------------------------------------- -dnl * test for getaddrinfo as proxy for IPv6 support -dnl -------------------------------------------------- -AC_MSG_CHECKING(for getaddrinfo) -dnl Can't use AC_CHECK_FUNC here, because it doesn't do the right -dnl thing on Windows. -AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[$ac_includes_default -int testme(){ getaddrinfo; }]])],[AC_DEFINE([HAVE_GETADDRINFO], [1], [Define to 1 if you have the `getaddrinfo' function.]) ac_have_getaddrinfo=yes; AC_MSG_RESULT(yes)],[AC_MSG_RESULT(no)]) - -dnl Under mingw, we may need to set WINVER to 0x0501 to expose getaddrinfo. -if test "x$ac_have_getaddrinfo" = x; then - old_CFLAGS="$CFLAGS" - if test "z$ac_cv_lib_ws2_32__head_libws2_32_a" = zyes; then - CFLAGS="-DWINVER=0x0501 $CFLAGS" - AC_MSG_CHECKING(for getaddrinfo if WINVER is 0x0501) - AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[$ac_includes_default - int testme(){ getaddrinfo; }]])],[AC_DEFINE([HAVE_GETADDRINFO], [1], [Define to 1 if you have the `getaddrinfo' function.]) AC_DEFINE([NEED_WINVER_XP], [1], [Define to 1 if the `getaddrinfo' function needs WINVER set.]) EXTRA_CPPFLAGS="-DWINVER=0x0501 $EXTRA_CPPFLAGS"; AC_MSG_RESULT(yes)],[CFLAGS="$old_CFLAGS"; AC_MSG_RESULT(no)]) - fi -fi - -dnl Missing under mingw, sigh. -AC_CHECK_FUNCS(gai_strerror) - -dnl ------------------------------------------------------- -dnl * test for AI_* flags that not all implementations have -dnl ------------------------------------------------------- -AC_CHECK_DECLS([AI_ADDRCONFIG, AI_ALL, AI_NUMERICSERV, AI_V4MAPPED]) - -dnl ------------------------------------------------------- -dnl * test for IPV6_V6ONLY flags that not all implementations have -dnl ------------------------------------------------------- -AC_CHECK_DECLS([IPV6_V6ONLY]) - -dnl ------------------------------------------------------- -dnl * test for IPPROTO_* macros/constants -dnl ------------------------------------------------------- -AC_CHECK_DECLS([IPPROTO_IP, IPPROTO_TCP, IPPROTO_IPV6]) - -dnl -------------------------------------------------- -dnl * test for Linux sendfile(2) -dnl -------------------------------------------------- -AC_MSG_CHECKING(for sendfile in sys/sendfile.h) -AC_EGREP_HEADER(sendfile, sys/sendfile.h, - [ AC_DEFINE([HAVE_LINUX_SENDFILE], [1], [Define to 1 if you have a Linux sendfile(2) implementation.]) AC_MSG_RESULT(yes) ], - AC_MSG_RESULT(no)) - -dnl -------------------------------------------------- -dnl * test for BSD sendfile(2) -dnl -------------------------------------------------- -AC_MSG_CHECKING(for sendfile in sys/socket.h) -AC_EGREP_HEADER(sendfile, sys/socket.h, - [ AC_DEFINE([HAVE_BSD_SENDFILE], [1], [Define to 1 if you have a BSDish sendfile(2) implementation.]) AC_MSG_RESULT(yes) ], - AC_MSG_RESULT(no)) - -AC_CHECK_FUNCS(gethostent) - -AC_CHECK_FUNCS(accept4) - -case "$host" in -*-mingw* | *-msys*) - EXTRA_SRCS="cbits/initWinSock.c, cbits/winSockErr.c, cbits/asyncAccept.c" - EXTRA_LIBS=ws2_32 - ;; -*-solaris2*) - EXTRA_SRCS="cbits/ancilData.c" - EXTRA_LIBS="nsl, socket" - ;; -*) - EXTRA_SRCS="cbits/ancilData.c" - EXTRA_LIBS= - ;; -esac -AC_SUBST([EXTRA_CPPFLAGS]) -AC_SUBST([EXTRA_LIBS]) -AC_SUBST([EXTRA_SRCS]) - -AC_CONFIG_FILES([network.buildinfo]) - -AC_OUTPUT diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/examples/EchoClient.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/examples/EchoClient.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/examples/EchoClient.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/examples/EchoClient.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} --- Echo client program -module Main (main) where - -import qualified Control.Exception as E -import qualified Data.ByteString.Char8 as C -import Network.Socket hiding (recv) -import Network.Socket.ByteString (recv, sendAll) - -main :: IO () -main = withSocketsDo $ do - addr <- resolve "127.0.0.1" "3000" - E.bracket (open addr) close talk - where - resolve host port = do - let hints = defaultHints { addrSocketType = Stream } - addr:_ <- getAddrInfo (Just hints) (Just host) (Just port) - return addr - open addr = do - sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) - connect sock $ addrAddress addr - return sock - talk sock = do - sendAll sock "Hello, world!" - msg <- recv sock 1024 - putStr "Received: " - C.putStrLn msg diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/examples/EchoServer.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/examples/EchoServer.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/examples/EchoServer.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/examples/EchoServer.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,41 +0,0 @@ --- Echo server program -module Main (main) where - -import Control.Concurrent (forkFinally) -import qualified Control.Exception as E -import Control.Monad (unless, forever, void) -import qualified Data.ByteString as S -import Network.Socket hiding (recv) -import Network.Socket.ByteString (recv, sendAll) - -main :: IO () -main = withSocketsDo $ do - addr <- resolve "3000" - E.bracket (open addr) close loop - where - resolve port = do - let hints = defaultHints { - addrFlags = [AI_PASSIVE] - , addrSocketType = Stream - } - addr:_ <- getAddrInfo (Just hints) Nothing (Just port) - return addr - open addr = do - sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) - setSocketOption sock ReuseAddr 1 - -- If the prefork technique is not used, - -- set CloseOnExec for the security reasons. - let fd = fdSocket sock - setCloseOnExecIfNeeded fd - bind sock (addrAddress addr) - listen sock 10 - return sock - loop sock = forever $ do - (conn, peer) <- accept sock - putStrLn $ "Connection from " ++ show peer - void $ forkFinally (talk conn) (\_ -> close conn) - talk conn = do - msg <- recv conn 1024 - unless (S.null msg) $ do - sendAll conn msg - talk conn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/include/HsNetDef.h cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/include/HsNetDef.h --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/include/HsNetDef.h 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/include/HsNetDef.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -#ifndef HSNETDEF_H -#define HSNETDEF_H - -#include "HsNetworkConfig.h" - -/* ultra-evil... */ -#undef PACKAGE_BUGREPORT -#undef PACKAGE_NAME -#undef PACKAGE_STRING -#undef PACKAGE_TARNAME -#undef PACKAGE_VERSION - -#if defined(HAVE_WINSOCK2_H) -# define WITH_WINSOCK 1 -#endif - -#if !defined(mingw32_HOST_OS) && !defined(_WIN32) -# define DOMAIN_SOCKET_SUPPORT 1 -#endif - -/* stdcall is for Windows 32. - Haskell FFI does not have a keyword for Windows 64. - If ccall/stdcall is specified on Windows 64, - GHC ignores it and use a proper ABI for Windows 64. - But if stdcall is specified, GHC displays a warning. - So, let's use ccall for Windows 64. - */ -#if defined(mingw32_HOST_OS) -# if defined(i386_HOST_ARCH) -# define CALLCONV stdcall -# elif defined(x86_64_HOST_ARCH) -# define CALLCONV ccall -# else -# error Unknown mingw32 arch -# endif -#else -# define CALLCONV ccall -#endif -#if defined(mingw32_HOST_OS) -# define SAFE_ON_WIN safe -#else -# define SAFE_ON_WIN unsafe -#endif - -#endif /* HSNETDEF_H */ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/include/HsNet.h cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/include/HsNet.h --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/include/HsNet.h 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/include/HsNet.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,165 +0,0 @@ -/* ----------------------------------------------------------------------------- - * - * Definitions for package `network' which are visible in Haskell land. - * - * ---------------------------------------------------------------------------*/ - -#ifndef HSNET_H -#define HSNET_H - -#include "HsNetDef.h" - -#ifdef NEED_WINVER -# define WINVER 0x0501 -#endif - -#ifndef INLINE -# if defined(_MSC_VER) -# define INLINE extern __inline -# elif defined(__GNUC_GNU_INLINE__) -# define INLINE extern inline -# else -# define INLINE inline -# endif -#endif - -#ifdef HAVE_GETADDRINFO -# define IPV6_SOCKET_SUPPORT 1 -#else -# undef IPV6_SOCKET_SUPPORT -#endif - -#if defined(HAVE_WINSOCK2_H) -#include -# ifdef HAVE_WS2TCPIP_H -# include -// fix for MingW not defining IPV6_V6ONLY -# define IPV6_V6ONLY 27 -# endif - -extern int initWinSock (); -extern const char* getWSErrorDescr(int err); -extern void* newAcceptParams(int sock, - int sz, - void* sockaddr); -extern int acceptNewSock(void* d); -extern int acceptDoProc(void* param); - -#else - -#ifdef HAVE_LIMITS_H -# include -#endif -#ifdef HAVE_STDLIB_H -# include -#endif -#ifdef HAVE_UNISTD_H -#include -#endif -#ifdef HAVE_SYS_TYPES_H -# include -#endif -#ifdef HAVE_FCNTL_H -# include -#endif -#ifdef HAVE_SYS_UIO_H -# include -#endif -#ifdef HAVE_SYS_SOCKET_H -# include -#endif -#ifdef HAVE_LINUX_TCP_H -# include -#elif HAVE_NETINET_TCP_H -# include -#endif -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_SYS_UN_H -# include -#endif -#ifdef HAVE_ARPA_INET_H -# include -#endif -#ifdef HAVE_NETDB_H -#include -#endif -#ifdef HAVE_LINUX_CAN_H -# include -# define CAN_SOCKET_SUPPORT 1 -#endif -#ifdef HAVE_NET_IF -# include -#endif - -#ifdef HAVE_BSD_SENDFILE -#include -#endif -#ifdef HAVE_LINUX_SENDFILE -#if !defined(__USE_FILE_OFFSET64) -#include -#endif -#endif - -extern int -sendFd(int sock, int outfd); - -extern int -recvFd(int sock); - -#endif /* HAVE_WINSOCK2_H */ - -INLINE char * -hsnet_inet_ntoa( -#if defined(HAVE_WINSOCK2_H) - u_long addr -#elif defined(HAVE_IN_ADDR_T) - in_addr_t addr -#elif defined(HAVE_INTTYPES_H) - u_int32_t addr -#else - unsigned long addr -#endif - ) -{ - struct in_addr a; - a.s_addr = addr; - return inet_ntoa(a); -} - -#ifdef HAVE_GETADDRINFO -INLINE int -hsnet_getnameinfo(const struct sockaddr* a,socklen_t b, char* c, -# if defined(HAVE_WINSOCK2_H) - DWORD d, char* e, DWORD f, int g) -# else - socklen_t d, char* e, socklen_t f, int g) -# endif -{ - return getnameinfo(a,b,c,d,e,f,g); -} - -INLINE int -hsnet_getaddrinfo(const char *hostname, const char *servname, - const struct addrinfo *hints, struct addrinfo **res) -{ - return getaddrinfo(hostname, servname, hints, res); -} - -INLINE void -hsnet_freeaddrinfo(struct addrinfo *ai) -{ - freeaddrinfo(ai); -} -#endif - -#if !defined(IOV_MAX) -# define IOV_MAX 1024 -#endif - -#if !defined(SOCK_NONBLOCK) // Missing define in Bionic libc (Android) -# define SOCK_NONBLOCK O_NONBLOCK -#endif - -#endif /* HSNET_H */ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/include/HsNetworkConfig.h.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/include/HsNetworkConfig.h.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/include/HsNetworkConfig.h.in 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/include/HsNetworkConfig.h.in 1970-01-01 00:00:00.000000000 +0000 @@ -1,177 +0,0 @@ -/* include/HsNetworkConfig.h.in. Generated from configure.ac by autoheader. */ - -/* Define to 1 if you have the `accept4' function. */ -#undef HAVE_ACCEPT4 - -/* Define to 1 if you have the header file. */ -#undef HAVE_ARPA_INET_H - -/* Define to 1 if you have a BSDish sendfile(2) implementation. */ -#undef HAVE_BSD_SENDFILE - -/* Define to 1 if you have the declaration of `AI_ADDRCONFIG', and to 0 if you - don't. */ -#undef HAVE_DECL_AI_ADDRCONFIG - -/* Define to 1 if you have the declaration of `AI_ALL', and to 0 if you don't. - */ -#undef HAVE_DECL_AI_ALL - -/* Define to 1 if you have the declaration of `AI_NUMERICSERV', and to 0 if - you don't. */ -#undef HAVE_DECL_AI_NUMERICSERV - -/* Define to 1 if you have the declaration of `AI_V4MAPPED', and to 0 if you - don't. */ -#undef HAVE_DECL_AI_V4MAPPED - -/* Define to 1 if you have the declaration of `IPPROTO_IP', and to 0 if you - don't. */ -#undef HAVE_DECL_IPPROTO_IP - -/* Define to 1 if you have the declaration of `IPPROTO_IPV6', and to 0 if you - don't. */ -#undef HAVE_DECL_IPPROTO_IPV6 - -/* Define to 1 if you have the declaration of `IPPROTO_TCP', and to 0 if you - don't. */ -#undef HAVE_DECL_IPPROTO_TCP - -/* Define to 1 if you have the declaration of `IPV6_V6ONLY', and to 0 if you - don't. */ -#undef HAVE_DECL_IPV6_V6ONLY - -/* Define to 1 if you have the header file. */ -#undef HAVE_FCNTL_H - -/* Define to 1 if you have the `gai_strerror' function. */ -#undef HAVE_GAI_STRERROR - -/* Define to 1 if you have the `getaddrinfo' function. */ -#undef HAVE_GETADDRINFO - -/* Define to 1 if you have the `gethostent' function. */ -#undef HAVE_GETHOSTENT - -/* Define to 1 if you have getpeereid. */ -#undef HAVE_GETPEEREID - -/* Define to 1 if you have the `if_nametoindex' function. */ -#undef HAVE_IF_NAMETOINDEX - -/* Define to 1 if you have the header file. */ -#undef HAVE_INTTYPES_H - -/* Define to 1 if in_addr_t is available. */ -#undef HAVE_IN_ADDR_T - -/* Define to 1 if you have the `ws2_32' library (-lws2_32). */ -#undef HAVE_LIBWS2_32 - -/* Define to 1 if you have the header file. */ -#undef HAVE_LIMITS_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_LINUX_CAN_H - -/* Define to 1 if you have a Linux sendfile(2) implementation. */ -#undef HAVE_LINUX_SENDFILE - -/* Define to 1 if you have the header file. */ -#undef HAVE_LINUX_TCP_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_MEMORY_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_NETDB_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_NETINET_IN_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_NETINET_TCP_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_NET_IF_H - -/* Define to 1 if you have the `readlink' function. */ -#undef HAVE_READLINK - -/* Define to 1 if you have the header file. */ -#undef HAVE_STDINT_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_STDLIB_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_STRINGS_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_STRING_H - -/* Define to 1 if `msg_accrights' is a member of `struct msghdr'. */ -#undef HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS - -/* Define to 1 if `msg_control' is a member of `struct msghdr'. */ -#undef HAVE_STRUCT_MSGHDR_MSG_CONTROL - -/* Define to 1 if `sa_len' is a member of `struct sockaddr'. */ -#undef HAVE_STRUCT_SOCKADDR_SA_LEN - -/* Define to 1 if you have both SO_PEERCRED and struct ucred. */ -#undef HAVE_STRUCT_UCRED - -/* Define to 1 if you have the `symlink' function. */ -#undef HAVE_SYMLINK - -/* Define to 1 if you have the header file. */ -#undef HAVE_SYS_SOCKET_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_SYS_STAT_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_SYS_TYPES_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_SYS_UIO_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_SYS_UN_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_UNISTD_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_WINSOCK2_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_WS2TCPIP_H - -/* Define to 1 if the `getaddrinfo' function needs WINVER set. */ -#undef NEED_WINVER_XP - -/* Define to the address where bug reports for this package should be sent. */ -#undef PACKAGE_BUGREPORT - -/* Define to the full name of this package. */ -#undef PACKAGE_NAME - -/* Define to the full name and version of this package. */ -#undef PACKAGE_STRING - -/* Define to the one symbol short name of this package. */ -#undef PACKAGE_TARNAME - -/* Define to the home page for this package. */ -#undef PACKAGE_URL - -/* Define to the version of this package. */ -#undef PACKAGE_VERSION - -/* Define to 1 if you have the ANSI C header files. */ -#undef STDC_HEADERS - -/* Define to empty if `const' does not conform to ANSI C. */ -#undef const diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/install-sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/install-sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/install-sh 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/install-sh 1970-01-01 00:00:00.000000000 +0000 @@ -1,295 +0,0 @@ -#!/bin/sh -# install - install a program, script, or datafile - -scriptversion=2003-09-24.23 - -# This originates from X11R5 (mit/util/scripts/install.sh), which was -# later released in X11R6 (xc/config/util/install.sh) with the -# following copyright and license. -# -# Copyright (C) 1994 X Consortium -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN -# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- -# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -# Except as contained in this notice, the name of the X Consortium shall not -# be used in advertising or otherwise to promote the sale, use or other deal- -# ings in this Software without prior written authorization from the X Consor- -# tium. -# -# -# FSF changes to this file are in the public domain. -# -# Calling this script install-sh is preferred over install.sh, to prevent -# `make' implicit rules from creating a file called install from it -# when there is no Makefile. -# -# This script is compatible with the BSD install script, but was written -# from scratch. It can only install one file at a time, a restriction -# shared with many OS's install programs. - -# set DOITPROG to echo to test this script - -# Don't use :- since 4.3BSD and earlier shells don't like it. -doit="${DOITPROG-}" - -# put in absolute paths if you don't have them in your path; or use env. vars. - -mvprog="${MVPROG-mv}" -cpprog="${CPPROG-cp}" -chmodprog="${CHMODPROG-chmod}" -chownprog="${CHOWNPROG-chown}" -chgrpprog="${CHGRPPROG-chgrp}" -stripprog="${STRIPPROG-strip}" -rmprog="${RMPROG-rm}" -mkdirprog="${MKDIRPROG-mkdir}" - -transformbasename= -transform_arg= -instcmd="$mvprog" -chmodcmd="$chmodprog 0755" -chowncmd= -chgrpcmd= -stripcmd= -rmcmd="$rmprog -f" -mvcmd="$mvprog" -src= -dst= -dir_arg= - -usage="Usage: $0 [OPTION]... SRCFILE DSTFILE - or: $0 -d DIR1 DIR2... - -In the first form, install SRCFILE to DSTFILE, removing SRCFILE by default. -In the second, create the directory path DIR. - -Options: --b=TRANSFORMBASENAME --c copy source (using $cpprog) instead of moving (using $mvprog). --d create directories instead of installing files. --g GROUP $chgrp installed files to GROUP. --m MODE $chmod installed files to MODE. --o USER $chown installed files to USER. --s strip installed files (using $stripprog). --t=TRANSFORM ---help display this help and exit. ---version display version info and exit. - -Environment variables override the default commands: - CHGRPPROG CHMODPROG CHOWNPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG -" - -while test -n "$1"; do - case $1 in - -b=*) transformbasename=`echo $1 | sed 's/-b=//'` - shift - continue;; - - -c) instcmd=$cpprog - shift - continue;; - - -d) dir_arg=true - shift - continue;; - - -g) chgrpcmd="$chgrpprog $2" - shift - shift - continue;; - - --help) echo "$usage"; exit 0;; - - -m) chmodcmd="$chmodprog $2" - shift - shift - continue;; - - -o) chowncmd="$chownprog $2" - shift - shift - continue;; - - -s) stripcmd=$stripprog - shift - continue;; - - -t=*) transformarg=`echo $1 | sed 's/-t=//'` - shift - continue;; - - --version) echo "$0 $scriptversion"; exit 0;; - - *) if test -z "$src"; then - src=$1 - else - # this colon is to work around a 386BSD /bin/sh bug - : - dst=$1 - fi - shift - continue;; - esac -done - -if test -z "$src"; then - echo "$0: no input file specified." >&2 - exit 1 -fi - -# Protect names starting with `-'. -case $src in - -*) src=./$src ;; -esac - -if test -n "$dir_arg"; then - dst=$src - src= - - if test -d "$dst"; then - instcmd=: - chmodcmd= - else - instcmd=$mkdirprog - fi -else - # Waiting for this to be detected by the "$instcmd $src $dsttmp" command - # might cause directories to be created, which would be especially bad - # if $src (and thus $dsttmp) contains '*'. - if test ! -f "$src" && test ! -d "$src"; then - echo "$0: $src does not exist." >&2 - exit 1 - fi - - if test -z "$dst"; then - echo "$0: no destination specified." >&2 - exit 1 - fi - - # Protect names starting with `-'. - case $dst in - -*) dst=./$dst ;; - esac - - # If destination is a directory, append the input filename; won't work - # if double slashes aren't ignored. - if test -d "$dst"; then - dst=$dst/`basename "$src"` - fi -fi - -# This sed command emulates the dirname command. -dstdir=`echo "$dst" | sed -e 's,[^/]*$,,;s,/$,,;s,^$,.,'` - -# Make sure that the destination directory exists. - -# Skip lots of stat calls in the usual case. -if test ! -d "$dstdir"; then - defaultIFS=' - ' - IFS="${IFS-$defaultIFS}" - - oIFS=$IFS - # Some sh's can't handle IFS=/ for some reason. - IFS='%' - set - `echo "$dstdir" | sed -e 's@/@%@g' -e 's@^%@/@'` - IFS=$oIFS - - pathcomp= - - while test $# -ne 0 ; do - pathcomp=$pathcomp$1 - shift - test -d "$pathcomp" || $mkdirprog "$pathcomp" - pathcomp=$pathcomp/ - done -fi - -if test -n "$dir_arg"; then - $doit $instcmd "$dst" \ - && { test -z "$chowncmd" || $doit $chowncmd "$dst"; } \ - && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } \ - && { test -z "$stripcmd" || $doit $stripcmd "$dst"; } \ - && { test -z "$chmodcmd" || $doit $chmodcmd "$dst"; } - -else - # If we're going to rename the final executable, determine the name now. - if test -z "$transformarg"; then - dstfile=`basename "$dst"` - else - dstfile=`basename "$dst" $transformbasename \ - | sed $transformarg`$transformbasename - fi - - # don't allow the sed command to completely eliminate the filename. - test -z "$dstfile" && dstfile=`basename "$dst"` - - # Make a couple of temp file names in the proper directory. - dsttmp=$dstdir/_inst.$$_ - rmtmp=$dstdir/_rm.$$_ - - # Trap to clean up those temp files at exit. - trap 'status=$?; rm -f "$dsttmp" "$rmtmp" && exit $status' 0 - trap '(exit $?); exit' 1 2 13 15 - - # Move or copy the file name to the temp name - $doit $instcmd "$src" "$dsttmp" && - - # and set any options; do chmod last to preserve setuid bits. - # - # If any of these fail, we abort the whole thing. If we want to - # ignore errors from any of these, just make sure not to ignore - # errors from the above "$doit $instcmd $src $dsttmp" command. - # - { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } \ - && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } \ - && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } \ - && { test -z "$chmodcmd" || $doit $chmodcmd "$dsttmp"; } && - - # Now remove or move aside any old file at destination location. We - # try this two ways since rm can't unlink itself on some systems and - # the destination file might be busy for other reasons. In this case, - # the final cleanup might fail but the new file should still install - # successfully. - { - if test -f "$dstdir/$dstfile"; then - $doit $rmcmd -f "$dstdir/$dstfile" 2>/dev/null \ - || $doit $mvcmd -f "$dstdir/$dstfile" "$rmtmp" 2>/dev/null \ - || { - echo "$0: cannot unlink or rename $dstdir/$dstfile" >&2 - (exit 1); exit - } - else - : - fi - } && - - # Now rename the file to the real destination. - $doit $mvcmd "$dsttmp" "$dstdir/$dstfile" -fi && - -# The final little trick to "correctly" pass the exit status to the exit trap. -{ - (exit 0); exit -} - -# Local variables: -# eval: (add-hook 'write-file-hooks 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-end: "$" -# End: diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/LICENSE cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/LICENSE --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/LICENSE 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -Copyright (c) 2002-2010, The University Court of the University of Glasgow. -Copyright (c) 2007-2010, Johan Tibell - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -- Redistributions of source code must retain the above copyright notice, -this list of conditions and the following disclaimer. - -- Redistributions in binary form must reproduce the above copyright notice, -this list of conditions and the following disclaimer in the documentation -and/or other materials provided with the distribution. - -- Neither name of the University nor the names of its contributors may be -used to endorse or promote products derived from this software without -specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF -GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, -INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE -UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT -LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY -OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH -DAMAGE. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/BSD.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/BSD.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/BSD.hsc 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/BSD.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,574 +0,0 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface #-} -{-# OPTIONS_HADDOCK hide #-} -{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} ------------------------------------------------------------------------------ --- | --- Module : Network.BSD --- Copyright : (c) The University of Glasgow 2001 --- License : BSD-style (see the file libraries/network/LICENSE) --- --- Maintainer : libraries@haskell.org --- Stability : experimental --- Portability : non-portable --- --- The "Network.BSD" module defines Haskell bindings to network --- programming functionality provided by BSD Unix derivatives. --- ------------------------------------------------------------------------------ - -#include "HsNet.h" -##include "HsNetDef.h" - -module Network.BSD {-# DEPRECATED "This platform dependent module is no longer supported." #-} - ( - -- * Host names - HostName - , getHostName - - , HostEntry(..) - , getHostByName - , getHostByAddr - , hostAddress - -#if defined(HAVE_GETHOSTENT) && !defined(mingw32_HOST_OS) - , getHostEntries - - -- ** Low level functionality - , setHostEntry - , getHostEntry - , endHostEntry -#endif - - -- * Service names - , ServiceEntry(..) - , ServiceName - , getServiceByName - , getServiceByPort - , getServicePortNumber - -#if !defined(mingw32_HOST_OS) - , getServiceEntries - - -- ** Low level functionality - , getServiceEntry - , setServiceEntry - , endServiceEntry -#endif - - -- * Protocol names - , ProtocolName - , ProtocolNumber - , ProtocolEntry(..) - , getProtocolByName - , getProtocolByNumber - , getProtocolNumber - , defaultProtocol - -#if !defined(mingw32_HOST_OS) - , getProtocolEntries - -- ** Low level functionality - , setProtocolEntry - , getProtocolEntry - , endProtocolEntry -#endif - - -- * Port numbers - , PortNumber - - -- * Network names - , NetworkName - , NetworkAddr - , NetworkEntry(..) - -#if !defined(mingw32_HOST_OS) - , getNetworkByName - , getNetworkByAddr - , getNetworkEntries - -- ** Low level functionality - , setNetworkEntry - , getNetworkEntry - , endNetworkEntry -#endif - -#if defined(HAVE_IF_NAMETOINDEX) - -- * Interface names - , ifNameToIndex -#endif - - ) where - -import Network.Socket - -import Control.Concurrent (MVar, newMVar, withMVar) -import qualified Control.Exception as E -import Foreign.C.String (CString, peekCString, withCString) -#if defined(HAVE_WINSOCK2_H) -import Foreign.C.Types ( CShort ) -#endif -import Foreign.C.Types ( CInt(..), CULong(..), CSize(..) ) -import Foreign.Ptr (Ptr, nullPtr) -import Foreign.Storable (Storable(..)) -import Foreign.Marshal.Array (allocaArray0, peekArray0) -import Foreign.Marshal.Utils (with, fromBool) -import Data.Typeable -import System.IO.Error (ioeSetErrorString, mkIOError) -import System.IO.Unsafe (unsafePerformIO) - -import GHC.IO.Exception - -import Control.Monad (liftM) - -import Network.Socket.Internal (throwSocketErrorIfMinus1_) - --- --------------------------------------------------------------------------- --- Basic Types - -type ProtocolName = String - --- --------------------------------------------------------------------------- --- Service Database Access - --- Calling getServiceByName for a given service and protocol returns --- the systems service entry. This should be used to find the port --- numbers for standard protocols such as SMTP and FTP. The remaining --- three functions should be used for browsing the service database --- sequentially. - --- Calling setServiceEntry with True indicates that the service --- database should be left open between calls to getServiceEntry. To --- close the database a call to endServiceEntry is required. This --- database file is usually stored in the file /etc/services. - -data ServiceEntry = - ServiceEntry { - serviceName :: ServiceName, -- Official Name - serviceAliases :: [ServiceName], -- aliases - servicePort :: PortNumber, -- Port Number ( network byte order ) - serviceProtocol :: ProtocolName -- Protocol - } deriving (Show, Typeable) - -instance Storable ServiceEntry where - sizeOf _ = #const sizeof(struct servent) - alignment _ = alignment (undefined :: CInt) -- ??? - - peek p = do - s_name <- (#peek struct servent, s_name) p >>= peekCString - s_aliases <- (#peek struct servent, s_aliases) p - >>= peekArray0 nullPtr - >>= mapM peekCString - s_port <- (#peek struct servent, s_port) p - s_proto <- (#peek struct servent, s_proto) p >>= peekCString - return (ServiceEntry { - serviceName = s_name, - serviceAliases = s_aliases, -#if defined(HAVE_WINSOCK2_H) - servicePort = (fromIntegral (s_port :: CShort)), -#else - -- s_port is already in network byte order, but it - -- might be the wrong size. - servicePort = (fromIntegral (s_port :: CInt)), -#endif - serviceProtocol = s_proto - }) - - poke = throwUnsupportedOperationPoke "ServiceEntry" - - --- | Get service by name. -getServiceByName :: ServiceName -- Service Name - -> ProtocolName -- Protocol Name - -> IO ServiceEntry -- Service Entry -getServiceByName name proto = withLock $ do - withCString name $ \ cstr_name -> do - withCString proto $ \ cstr_proto -> do - throwNoSuchThingIfNull "Network.BSD.getServiceByName" "no such service entry" - $ c_getservbyname cstr_name cstr_proto - >>= peek - -foreign import CALLCONV unsafe "getservbyname" - c_getservbyname :: CString -> CString -> IO (Ptr ServiceEntry) - --- | Get the service given a 'PortNumber' and 'ProtocolName'. -getServiceByPort :: PortNumber -> ProtocolName -> IO ServiceEntry -getServiceByPort port proto = withLock $ do - withCString proto $ \ cstr_proto -> do - throwNoSuchThingIfNull "Network.BSD.getServiceByPort" "no such service entry" - $ c_getservbyport (fromIntegral port) cstr_proto - >>= peek - -foreign import CALLCONV unsafe "getservbyport" - c_getservbyport :: CInt -> CString -> IO (Ptr ServiceEntry) - --- | Get the 'PortNumber' corresponding to the 'ServiceName'. -getServicePortNumber :: ServiceName -> IO PortNumber -getServicePortNumber name = do - (ServiceEntry _ _ port _) <- getServiceByName name "tcp" - return port - -#if !defined(mingw32_HOST_OS) -getServiceEntry :: IO ServiceEntry -getServiceEntry = withLock $ do - throwNoSuchThingIfNull "Network.BSD.getServiceEntry" "no such service entry" - $ c_getservent - >>= peek - -foreign import ccall unsafe "getservent" c_getservent :: IO (Ptr ServiceEntry) - -setServiceEntry :: Bool -> IO () -setServiceEntry flg = withLock $ c_setservent (fromBool flg) - -foreign import ccall unsafe "setservent" c_setservent :: CInt -> IO () - -endServiceEntry :: IO () -endServiceEntry = withLock $ c_endservent - -foreign import ccall unsafe "endservent" c_endservent :: IO () - -getServiceEntries :: Bool -> IO [ServiceEntry] -getServiceEntries stayOpen = do - setServiceEntry stayOpen - getEntries (getServiceEntry) (endServiceEntry) -#endif - --- --------------------------------------------------------------------------- --- Protocol Entries - --- The following relate directly to the corresponding UNIX C --- calls for returning the protocol entries. The protocol entry is --- represented by the Haskell type ProtocolEntry. - --- As for setServiceEntry above, calling setProtocolEntry. --- determines whether or not the protocol database file, usually --- @/etc/protocols@, is to be kept open between calls of --- getProtocolEntry. Similarly, - -data ProtocolEntry = - ProtocolEntry { - protoName :: ProtocolName, -- Official Name - protoAliases :: [ProtocolName], -- aliases - protoNumber :: ProtocolNumber -- Protocol Number - } deriving (Read, Show, Typeable) - -instance Storable ProtocolEntry where - sizeOf _ = #const sizeof(struct protoent) - alignment _ = alignment (undefined :: CInt) -- ??? - - peek p = do - p_name <- (#peek struct protoent, p_name) p >>= peekCString - p_aliases <- (#peek struct protoent, p_aliases) p - >>= peekArray0 nullPtr - >>= mapM peekCString -#if defined(HAVE_WINSOCK2_H) - -- With WinSock, the protocol number is only a short; - -- hoist it in as such, but represent it on the Haskell side - -- as a CInt. - p_proto_short <- (#peek struct protoent, p_proto) p - let p_proto = fromIntegral (p_proto_short :: CShort) -#else - p_proto <- (#peek struct protoent, p_proto) p -#endif - return (ProtocolEntry { - protoName = p_name, - protoAliases = p_aliases, - protoNumber = p_proto - }) - - poke = throwUnsupportedOperationPoke "ProtocolEntry" - - -getProtocolByName :: ProtocolName -> IO ProtocolEntry -getProtocolByName name = withLock $ do - withCString name $ \ name_cstr -> do - throwNoSuchThingIfNull "Network.BSD.getProtocolByName" ("no such protocol name: " ++ name) - $ c_getprotobyname name_cstr - >>= peek - -foreign import CALLCONV unsafe "getprotobyname" - c_getprotobyname :: CString -> IO (Ptr ProtocolEntry) - - -getProtocolByNumber :: ProtocolNumber -> IO ProtocolEntry -getProtocolByNumber num = withLock $ do - throwNoSuchThingIfNull "Network.BSD.getProtocolByNumber" ("no such protocol number: " ++ show num) - $ c_getprotobynumber (fromIntegral num) - >>= peek - -foreign import CALLCONV unsafe "getprotobynumber" - c_getprotobynumber :: CInt -> IO (Ptr ProtocolEntry) - - -getProtocolNumber :: ProtocolName -> IO ProtocolNumber -getProtocolNumber proto = do - (ProtocolEntry _ _ num) <- getProtocolByName proto - return num - -#if !defined(mingw32_HOST_OS) -getProtocolEntry :: IO ProtocolEntry -- Next Protocol Entry from DB -getProtocolEntry = withLock $ do - ent <- throwNoSuchThingIfNull "Network.BSD.getProtocolEntry" "no such protocol entry" - $ c_getprotoent - peek ent - -foreign import ccall unsafe "getprotoent" c_getprotoent :: IO (Ptr ProtocolEntry) - -setProtocolEntry :: Bool -> IO () -- Keep DB Open ? -setProtocolEntry flg = withLock $ c_setprotoent (fromBool flg) - -foreign import ccall unsafe "setprotoent" c_setprotoent :: CInt -> IO () - -endProtocolEntry :: IO () -endProtocolEntry = withLock $ c_endprotoent - -foreign import ccall unsafe "endprotoent" c_endprotoent :: IO () - -getProtocolEntries :: Bool -> IO [ProtocolEntry] -getProtocolEntries stayOpen = withLock $ do - setProtocolEntry stayOpen - getEntries (getProtocolEntry) (endProtocolEntry) -#endif - --- --------------------------------------------------------------------------- --- Host lookups - -data HostEntry = - HostEntry { - hostName :: HostName, -- Official Name - hostAliases :: [HostName], -- aliases - hostFamily :: Family, -- Host Type (currently AF_INET) - hostAddresses :: [HostAddress] -- Set of Network Addresses (in network byte order) - } deriving (Read, Show, Typeable) - -instance Storable HostEntry where - sizeOf _ = #const sizeof(struct hostent) - alignment _ = alignment (undefined :: CInt) -- ??? - - peek p = do - h_name <- (#peek struct hostent, h_name) p >>= peekCString - h_aliases <- (#peek struct hostent, h_aliases) p - >>= peekArray0 nullPtr - >>= mapM peekCString - h_addrtype <- (#peek struct hostent, h_addrtype) p - -- h_length <- (#peek struct hostent, h_length) p - h_addr_list <- (#peek struct hostent, h_addr_list) p - >>= peekArray0 nullPtr - >>= mapM peek - return (HostEntry { - hostName = h_name, - hostAliases = h_aliases, -#if defined(HAVE_WINSOCK2_H) - hostFamily = unpackFamily (fromIntegral (h_addrtype :: CShort)), -#else - hostFamily = unpackFamily h_addrtype, -#endif - hostAddresses = h_addr_list - }) - - poke = throwUnsupportedOperationPoke "HostEntry" - - --- convenience function: -hostAddress :: HostEntry -> HostAddress -hostAddress (HostEntry nm _ _ ls) = - case ls of - [] -> error $ "Network.BSD.hostAddress: empty network address list for " ++ nm - (x:_) -> x - --- getHostByName must use the same lock as the *hostent functions --- may cause problems if called concurrently. - --- | Resolve a 'HostName' to IPv4 address. -getHostByName :: HostName -> IO HostEntry -getHostByName name = withLock $ do - withCString name $ \ name_cstr -> do - ent <- throwNoSuchThingIfNull "Network.BSD.getHostByName" "no such host entry" - $ c_gethostbyname name_cstr - peek ent - -foreign import CALLCONV safe "gethostbyname" - c_gethostbyname :: CString -> IO (Ptr HostEntry) - - --- The locking of gethostbyaddr is similar to gethostbyname. --- | Get a 'HostEntry' corresponding to the given address and family. --- Note that only IPv4 is currently supported. -getHostByAddr :: Family -> HostAddress -> IO HostEntry -getHostByAddr family addr = do - with addr $ \ ptr_addr -> withLock $ do - throwNoSuchThingIfNull "Network.BSD.getHostByAddr" "no such host entry" - $ c_gethostbyaddr ptr_addr (fromIntegral (sizeOf addr)) (packFamily family) - >>= peek - -foreign import CALLCONV safe "gethostbyaddr" - c_gethostbyaddr :: Ptr HostAddress -> CInt -> CInt -> IO (Ptr HostEntry) - -#if defined(HAVE_GETHOSTENT) && !defined(mingw32_HOST_OS) -getHostEntry :: IO HostEntry -getHostEntry = withLock $ do - throwNoSuchThingIfNull "Network.BSD.getHostEntry" "unable to retrieve host entry" - $ c_gethostent - >>= peek - -foreign import ccall unsafe "gethostent" c_gethostent :: IO (Ptr HostEntry) - -setHostEntry :: Bool -> IO () -setHostEntry flg = withLock $ c_sethostent (fromBool flg) - -foreign import ccall unsafe "sethostent" c_sethostent :: CInt -> IO () - -endHostEntry :: IO () -endHostEntry = withLock $ c_endhostent - -foreign import ccall unsafe "endhostent" c_endhostent :: IO () - -getHostEntries :: Bool -> IO [HostEntry] -getHostEntries stayOpen = do - setHostEntry stayOpen - getEntries (getHostEntry) (endHostEntry) -#endif - --- --------------------------------------------------------------------------- --- Accessing network information - --- Same set of access functions as for accessing host,protocol and --- service system info, this time for the types of networks supported. - --- network addresses are represented in host byte order. -type NetworkAddr = CULong - -type NetworkName = String - -data NetworkEntry = - NetworkEntry { - networkName :: NetworkName, -- official name - networkAliases :: [NetworkName], -- aliases - networkFamily :: Family, -- type - networkAddress :: NetworkAddr - } deriving (Read, Show, Typeable) - -instance Storable NetworkEntry where - sizeOf _ = #const sizeof(struct hostent) - alignment _ = alignment (undefined :: CInt) -- ??? - - peek p = do - n_name <- (#peek struct netent, n_name) p >>= peekCString - n_aliases <- (#peek struct netent, n_aliases) p - >>= peekArray0 nullPtr - >>= mapM peekCString - n_addrtype <- (#peek struct netent, n_addrtype) p - n_net <- (#peek struct netent, n_net) p - return (NetworkEntry { - networkName = n_name, - networkAliases = n_aliases, - networkFamily = unpackFamily (fromIntegral - (n_addrtype :: CInt)), - networkAddress = n_net - }) - - poke = throwUnsupportedOperationPoke "NetworkEntry" - - -#if !defined(mingw32_HOST_OS) -getNetworkByName :: NetworkName -> IO NetworkEntry -getNetworkByName name = withLock $ do - withCString name $ \ name_cstr -> do - throwNoSuchThingIfNull "Network.BSD.getNetworkByName" "no such network entry" - $ c_getnetbyname name_cstr - >>= peek - -foreign import ccall unsafe "getnetbyname" - c_getnetbyname :: CString -> IO (Ptr NetworkEntry) - -getNetworkByAddr :: NetworkAddr -> Family -> IO NetworkEntry -getNetworkByAddr addr family = withLock $ do - throwNoSuchThingIfNull "Network.BSD.getNetworkByAddr" "no such network entry" - $ c_getnetbyaddr addr (packFamily family) - >>= peek - -foreign import ccall unsafe "getnetbyaddr" - c_getnetbyaddr :: NetworkAddr -> CInt -> IO (Ptr NetworkEntry) - -getNetworkEntry :: IO NetworkEntry -getNetworkEntry = withLock $ do - throwNoSuchThingIfNull "Network.BSD.getNetworkEntry" "no more network entries" - $ c_getnetent - >>= peek - -foreign import ccall unsafe "getnetent" c_getnetent :: IO (Ptr NetworkEntry) - --- | Open the network name database. The parameter specifies --- whether a connection is maintained open between various --- networkEntry calls -setNetworkEntry :: Bool -> IO () -setNetworkEntry flg = withLock $ c_setnetent (fromBool flg) - -foreign import ccall unsafe "setnetent" c_setnetent :: CInt -> IO () - --- | Close the connection to the network name database. -endNetworkEntry :: IO () -endNetworkEntry = withLock $ c_endnetent - -foreign import ccall unsafe "endnetent" c_endnetent :: IO () - --- | Get the list of network entries. -getNetworkEntries :: Bool -> IO [NetworkEntry] -getNetworkEntries stayOpen = do - setNetworkEntry stayOpen - getEntries (getNetworkEntry) (endNetworkEntry) -#endif - --- Mutex for name service lockdown - -{-# NOINLINE lock #-} -lock :: MVar () -lock = unsafePerformIO $ withSocketsDo $ newMVar () - -withLock :: IO a -> IO a -withLock act = withMVar lock (\_ -> act) - --- --------------------------------------------------------------------------- --- Miscellaneous Functions - --- | Calling getHostName returns the standard host name for the current --- processor, as set at boot time. - -getHostName :: IO HostName -getHostName = do - let size = 256 - allocaArray0 size $ \ cstr -> do - throwSocketErrorIfMinus1_ "Network.BSD.getHostName" $ c_gethostname cstr (fromIntegral size) - peekCString cstr - -foreign import CALLCONV unsafe "gethostname" - c_gethostname :: CString -> CSize -> IO CInt - --- Helper function used by the exported functions that provides a --- Haskellised view of the enumerator functions: - -getEntries :: IO a -- read - -> IO () -- at end - -> IO [a] -getEntries getOne atEnd = loop - where - loop = do - vv <- E.catch (liftM Just getOne) - (\ e -> let _types = e :: IOException in return Nothing) - case vv of - Nothing -> return [] - Just v -> loop >>= \ vs -> atEnd >> return (v:vs) - - -throwNoSuchThingIfNull :: String -> String -> IO (Ptr a) -> IO (Ptr a) -throwNoSuchThingIfNull loc desc act = do - ptr <- act - if (ptr == nullPtr) - then ioError (ioeSetErrorString (mkIOError NoSuchThing loc Nothing Nothing) desc) - else return ptr - -throwUnsupportedOperationPoke :: String -> Ptr a -> a -> IO () -throwUnsupportedOperationPoke typ _ _ = - ioError $ ioeSetErrorString ioe "Operation not implemented" - where - ioe = mkIOError UnsupportedOperation - ("Network.BSD: instance Storable " ++ typ ++ ": poke") - Nothing - Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/Internal.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/Internal.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,55 +0,0 @@ -{-# LANGUAGE CPP, ForeignFunctionInterface #-} - --- | --- Module : Network.Socket.ByteString.Internal --- Copyright : (c) Johan Tibell 2007-2010 --- License : BSD-style --- --- Maintainer : johan.tibell@gmail.com --- Stability : stable --- Portability : portable --- -module Network.Socket.ByteString.Internal - ( - mkInvalidRecvArgError -#if !defined(mingw32_HOST_OS) - , c_writev - , c_sendmsg -#endif - , waitWhen0 - ) where - -import System.IO.Error (ioeSetErrorString, mkIOError) - -#if !defined(mingw32_HOST_OS) -import Foreign.C.Types (CInt(..)) -import System.Posix.Types (CSsize(..)) -import Foreign.Ptr (Ptr) - -import Network.Socket.ByteString.IOVec (IOVec) -import Network.Socket.ByteString.MsgHdr (MsgHdr) -#endif - -import Control.Concurrent (threadWaitWrite, rtsSupportsBoundThreads) -import Control.Monad (when) -import GHC.IO.Exception (IOErrorType(..)) -import Network.Socket.Types - -mkInvalidRecvArgError :: String -> IOError -mkInvalidRecvArgError loc = ioeSetErrorString (mkIOError - InvalidArgument - loc Nothing Nothing) "non-positive length" - -#if !defined(mingw32_HOST_OS) -foreign import ccall unsafe "writev" - c_writev :: CInt -> Ptr IOVec -> CInt -> IO CSsize - -foreign import ccall unsafe "sendmsg" - c_sendmsg :: CInt -> Ptr MsgHdr -> CInt -> IO CSsize -#endif - -waitWhen0 :: Int -> Socket -> IO () -waitWhen0 0 s = when rtsSupportsBoundThreads $ do - let fd = fromIntegral $ fdSocket s - threadWaitWrite fd -waitWhen0 _ _ = return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/IOVec.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/IOVec.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/IOVec.hsc 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/IOVec.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ -{-# OPTIONS_GHC -funbox-strict-fields #-} - --- | Support module for the POSIX writev system call. -module Network.Socket.ByteString.IOVec - ( IOVec(..) - ) where - -import Foreign.C.Types (CChar, CInt, CSize) -import Foreign.Ptr (Ptr) -import Foreign.Storable (Storable(..)) - -#include -#include - -data IOVec = IOVec - { iovBase :: !(Ptr CChar) - , iovLen :: !CSize - } - -instance Storable IOVec where - sizeOf _ = (#const sizeof(struct iovec)) - alignment _ = alignment (undefined :: CInt) - - peek p = do - base <- (#peek struct iovec, iov_base) p - len <- (#peek struct iovec, iov_len) p - return $ IOVec base len - - poke p iov = do - (#poke struct iovec, iov_base) p (iovBase iov) - (#poke struct iovec, iov_len) p (iovLen iov) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/Lazy/Posix.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/Lazy/Posix.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/Lazy/Posix.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/Lazy/Posix.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE OverloadedStrings #-} - -module Network.Socket.ByteString.Lazy.Posix - ( - -- * Send data to a socket - send - , sendAll - ) where - -import Control.Monad (liftM, when) -import qualified Data.ByteString.Lazy as L -import Data.ByteString.Lazy.Internal (ByteString(..)) -import Data.ByteString.Unsafe (unsafeUseAsCStringLen) -import Data.Int (Int64) -import Foreign.Marshal.Array (allocaArray) -import Foreign.Ptr (plusPtr) -import Foreign.Storable (Storable(..)) - -import Network.Socket (Socket(..)) -import Network.Socket.ByteString.IOVec (IOVec(IOVec)) -import Network.Socket.ByteString.Internal (c_writev, waitWhen0) -import Network.Socket.Internal - --- ----------------------------------------------------------------------------- --- Sending - -send :: Socket -- ^ Connected socket - -> ByteString -- ^ Data to send - -> IO Int64 -- ^ Number of bytes sent -send sock@(MkSocket fd _ _ _ _) s = do - let cs = take maxNumChunks (L.toChunks s) - len = length cs - liftM fromIntegral . allocaArray len $ \ptr -> - withPokes cs ptr $ \niovs -> - throwSocketErrorWaitWrite sock "writev" $ - c_writev (fromIntegral fd) ptr niovs - where - withPokes ss p f = loop ss p 0 0 - where loop (c:cs) q k !niovs - | k < maxNumBytes = - unsafeUseAsCStringLen c $ \(ptr,len) -> do - poke q $ IOVec ptr (fromIntegral len) - loop cs (q `plusPtr` sizeOf (undefined :: IOVec)) - (k + fromIntegral len) (niovs + 1) - | otherwise = f niovs - loop _ _ _ niovs = f niovs - maxNumBytes = 4194304 :: Int -- maximum number of bytes to transmit in one system call - maxNumChunks = 1024 :: Int -- maximum number of chunks to transmit in one system call - -sendAll :: Socket -- ^ Connected socket - -> ByteString -- ^ Data to send - -> IO () -sendAll _ "" = return () -sendAll sock bs = do - sent <- send sock bs - waitWhen0 (fromIntegral sent) sock - when (sent >= 0) $ sendAll sock $ L.drop sent bs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/Lazy/Windows.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/Lazy/Windows.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/Lazy/Windows.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/Lazy/Windows.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,40 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE OverloadedStrings #-} - -module Network.Socket.ByteString.Lazy.Windows - ( - -- * Send data to a socket - send - , sendAll - ) where - -import Control.Applicative ((<$>)) -import Control.Monad (when) -import qualified Data.ByteString as S -import qualified Data.ByteString.Lazy as L -import Data.Int (Int64) - -import Network.Socket (Socket(..)) -import qualified Network.Socket.ByteString as Socket -import Network.Socket.ByteString.Internal (waitWhen0) - --- ----------------------------------------------------------------------------- --- Sending - -send :: Socket -- ^ Connected socket - -> L.ByteString -- ^ Data to send - -> IO Int64 -- ^ Number of bytes sent -send sock s = do - fromIntegral <$> case L.toChunks s of - -- TODO: Consider doing nothing if the string is empty. - [] -> Socket.send sock S.empty - (x:_) -> Socket.send sock x - -sendAll :: Socket -- ^ Connected socket - -> L.ByteString -- ^ Data to send - -> IO () -sendAll _ "" = return () -sendAll sock bs = do - sent <- send sock bs - waitWhen0 (fromIntegral sent) sock - when (sent >= 0) $ sendAll sock $ L.drop sent bs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/Lazy.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/Lazy.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/Lazy.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/Lazy.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,88 +0,0 @@ -{-# LANGUAGE CPP #-} - --- | --- Module : Network.Socket.ByteString.Lazy --- Copyright : (c) Bryan O'Sullivan 2009 --- License : BSD-style --- --- Maintainer : bos@serpentine.com --- Stability : experimental --- Portability : POSIX, GHC --- --- This module provides access to the BSD /socket/ interface. This --- module is generally more efficient than the 'String' based network --- functions in 'Network.Socket'. For detailed documentation, consult --- your favorite POSIX socket reference. All functions communicate --- failures by converting the error number to 'System.IO.IOError'. --- --- This module is made to be imported with 'Network.Socket' like so: --- --- > import Network.Socket hiding (send, sendTo, recv, recvFrom) --- > import Network.Socket.ByteString.Lazy --- > import Prelude hiding (getContents) --- -module Network.Socket.ByteString.Lazy - ( - -- * Send data to a socket - send - , sendAll - , - - -- * Receive data from a socket - getContents - , recv - ) where - -import Control.Monad (liftM) -import Data.ByteString.Lazy.Internal (ByteString(..), defaultChunkSize) -import Data.Int (Int64) -import Network.Socket (Socket(..), ShutdownCmd(..), shutdown) -import Prelude hiding (getContents) -import System.IO.Unsafe (unsafeInterleaveIO) - -import qualified Data.ByteString as S -import qualified Network.Socket.ByteString as N - -#if defined(mingw32_HOST_OS) -import Network.Socket.ByteString.Lazy.Windows (send, sendAll) -#else -import Network.Socket.ByteString.Lazy.Posix (send, sendAll) -#endif - --- ----------------------------------------------------------------------------- --- Receiving - --- | Receive data from the socket. The socket must be in a connected --- state. Data is received on demand, in chunks; each chunk will be --- sized to reflect the amount of data received by individual 'recv' --- calls. --- --- All remaining data from the socket is consumed. When there is no --- more data to be received, the receiving side of the socket is shut --- down. If there is an error and an exception is thrown, the socket --- is not shut down. -getContents :: Socket -- ^ Connected socket - -> IO ByteString -- ^ Data received -getContents sock = loop where - loop = unsafeInterleaveIO $ do - s <- N.recv sock defaultChunkSize - if S.null s - then shutdown sock ShutdownReceive >> return Empty - else Chunk s `liftM` loop - --- | Receive data from the socket. The socket must be in a connected --- state. This function may return fewer bytes than specified. If --- the received data is longer than the specified length, it may be --- discarded depending on the type of socket. This function may block --- until a message arrives. --- --- If there is no more data to be received, returns an empty 'ByteString'. --- --- Receiving data from closed socket may lead to undefined behaviour. -recv :: Socket -- ^ Connected socket - -> Int64 -- ^ Maximum number of bytes to receive - -> IO ByteString -- ^ Data received -recv sock nbytes = chunk `liftM` N.recv sock (fromIntegral nbytes) where - chunk k - | S.null k = Empty - | otherwise = Chunk k Empty diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/MsgHdr.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/MsgHdr.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString/MsgHdr.hsc 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString/MsgHdr.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,48 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_GHC -funbox-strict-fields #-} - --- | Support module for the POSIX 'sendmsg' system call. -module Network.Socket.ByteString.MsgHdr - ( MsgHdr(..) - ) where - -#include -#include - -import Foreign.C.Types (CInt, CSize, CUInt) -import Foreign.Ptr (Ptr) -import Foreign.Storable (Storable(..)) -import Network.Socket (SockAddr) -import Network.Socket.Internal (zeroMemory) - -import Network.Socket.ByteString.IOVec (IOVec) - --- We don't use msg_control, msg_controllen, and msg_flags as these --- don't exist on OpenSolaris. -data MsgHdr = MsgHdr - { msgName :: !(Ptr SockAddr) - , msgNameLen :: !CUInt - , msgIov :: !(Ptr IOVec) - , msgIovLen :: !CSize - } - -instance Storable MsgHdr where - sizeOf _ = (#const sizeof(struct msghdr)) - alignment _ = alignment (undefined :: CInt) - - peek p = do - name <- (#peek struct msghdr, msg_name) p - nameLen <- (#peek struct msghdr, msg_namelen) p - iov <- (#peek struct msghdr, msg_iov) p - iovLen <- (#peek struct msghdr, msg_iovlen) p - return $ MsgHdr name nameLen iov iovLen - - poke p mh = do - -- We need to zero the msg_control, msg_controllen, and msg_flags - -- fields, but they only exist on some platforms (e.g. not on - -- Solaris). Instead of using CPP, we zero the entire struct. - zeroMemory p (#const sizeof(struct msghdr)) - (#poke struct msghdr, msg_name) p (msgName mh) - (#poke struct msghdr, msg_namelen) p (msgNameLen mh) - (#poke struct msghdr, msg_iov) p (msgIov mh) - (#poke struct msghdr, msg_iovlen) p (msgIovLen mh) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/ByteString.hsc 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/ByteString.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,285 +0,0 @@ -{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} -{-# LANGUAGE CPP, ForeignFunctionInterface #-} -{-# LANGUAGE OverloadedStrings #-} - -#include "HsNet.h" - --- | --- Module : Network.Socket.ByteString --- Copyright : (c) Johan Tibell 2007-2010 --- License : BSD-style --- --- Maintainer : johan.tibell@gmail.com --- Stability : stable --- Portability : portable --- --- This module provides access to the BSD /socket/ interface. This --- module is generally more efficient than the 'String' based network --- functions in 'Network.Socket'. For detailed documentation, consult --- your favorite POSIX socket reference. All functions communicate --- failures by converting the error number to 'System.IO.IOError'. --- --- This module is made to be imported with 'Network.Socket' like so: --- --- > import Network.Socket hiding (send, sendTo, recv, recvFrom) --- > import Network.Socket.ByteString --- -module Network.Socket.ByteString - ( - -- * Send data to a socket - send - , sendAll - , sendTo - , sendAllTo - - -- ** Vectored I/O - -- $vectored - , sendMany - , sendManyTo - - -- * Receive data from a socket - , recv - , recvFrom - ) where - -import Control.Exception as E (catch, throwIO) -import Control.Monad (when) -import Data.ByteString (ByteString) -import Data.ByteString.Internal (createAndTrim) -import Data.ByteString.Unsafe (unsafeUseAsCStringLen) -import Foreign.Marshal.Alloc (allocaBytes) -import Foreign.Ptr (castPtr) -import Network.Socket (sendBuf, sendBufTo, recvBuf, recvBufFrom) -import System.IO.Error (isEOFError) - -import qualified Data.ByteString as B - -import Network.Socket.ByteString.Internal -import Network.Socket.Internal -import Network.Socket.Types - -#if !defined(mingw32_HOST_OS) -import Control.Monad (liftM, zipWithM_) -import Foreign.Marshal.Array (allocaArray) -import Foreign.Marshal.Utils (with) -import Foreign.Ptr (Ptr, plusPtr) -import Foreign.Storable (Storable(..)) - -import Network.Socket.ByteString.IOVec (IOVec(..)) -import Network.Socket.ByteString.MsgHdr (MsgHdr(..)) -#endif - --- ---------------------------------------------------------------------------- --- Sending - --- | Send data to the socket. The socket must be connected to a --- remote socket. Returns the number of bytes sent. Applications are --- responsible for ensuring that all data has been sent. --- --- Sending data to closed socket may lead to undefined behaviour. -send :: Socket -- ^ Connected socket - -> ByteString -- ^ Data to send - -> IO Int -- ^ Number of bytes sent -send sock xs = unsafeUseAsCStringLen xs $ \(str, len) -> - sendBuf sock (castPtr str) len - --- | Send data to the socket. The socket must be connected to a --- remote socket. Unlike 'send', this function continues to send data --- until either all data has been sent or an error occurs. On error, --- an exception is raised, and there is no way to determine how much --- data, if any, was successfully sent. --- --- Sending data to closed socket may lead to undefined behaviour. -sendAll :: Socket -- ^ Connected socket - -> ByteString -- ^ Data to send - -> IO () -sendAll _ "" = return () -sendAll sock bs = do - sent <- send sock bs - waitWhen0 sent sock - when (sent >= 0) $ sendAll sock $ B.drop sent bs - --- | Send data to the socket. The recipient can be specified --- explicitly, so the socket need not be in a connected state. --- Returns the number of bytes sent. Applications are responsible for --- ensuring that all data has been sent. --- --- Sending data to closed socket may lead to undefined behaviour. -sendTo :: Socket -- ^ Socket - -> ByteString -- ^ Data to send - -> SockAddr -- ^ Recipient address - -> IO Int -- ^ Number of bytes sent -sendTo sock xs addr = - unsafeUseAsCStringLen xs $ \(str, len) -> sendBufTo sock str len addr - --- | Send data to the socket. The recipient can be specified --- explicitly, so the socket need not be in a connected state. Unlike --- 'sendTo', this function continues to send data until either all --- data has been sent or an error occurs. On error, an exception is --- raised, and there is no way to determine how much data, if any, was --- successfully sent. --- --- Sending data to closed socket may lead to undefined behaviour. -sendAllTo :: Socket -- ^ Socket - -> ByteString -- ^ Data to send - -> SockAddr -- ^ Recipient address - -> IO () -sendAllTo _ "" _ = return () -sendAllTo sock xs addr = do - sent <- sendTo sock xs addr - waitWhen0 sent sock - when (sent >= 0) $ sendAllTo sock (B.drop sent xs) addr - --- ---------------------------------------------------------------------------- --- ** Vectored I/O - --- $vectored --- --- Vectored I\/O, also known as scatter\/gather I\/O, allows multiple --- data segments to be sent using a single system call, without first --- concatenating the segments. For example, given a list of --- @ByteString@s, @xs@, --- --- > sendMany sock xs --- --- is equivalent to --- --- > sendAll sock (concat xs) --- --- but potentially more efficient. --- --- Vectored I\/O are often useful when implementing network protocols --- that, for example, group data into segments consisting of one or --- more fixed-length headers followed by a variable-length body. - --- | Send data to the socket. The socket must be in a connected --- state. The data is sent as if the parts have been concatenated. --- This function continues to send data until either all data has been --- sent or an error occurs. On error, an exception is raised, and --- there is no way to determine how much data, if any, was --- successfully sent. --- --- Sending data to closed socket may lead to undefined behaviour. -sendMany :: Socket -- ^ Connected socket - -> [ByteString] -- ^ Data to send - -> IO () -#if !defined(mingw32_HOST_OS) -sendMany _ [] = return () -sendMany sock@(MkSocket fd _ _ _ _) cs = do - sent <- sendManyInner - waitWhen0 sent sock - when (sent >= 0) $ sendMany sock (remainingChunks sent cs) - where - sendManyInner = - liftM fromIntegral . withIOVec cs $ \(iovsPtr, iovsLen) -> - throwSocketErrorWaitWrite sock "Network.Socket.ByteString.sendMany" $ - c_writev (fromIntegral fd) iovsPtr - (fromIntegral (min iovsLen (#const IOV_MAX))) -#else -sendMany sock = sendAll sock . B.concat -#endif - --- | Send data to the socket. The recipient can be specified --- explicitly, so the socket need not be in a connected state. The --- data is sent as if the parts have been concatenated. This function --- continues to send data until either all data has been sent or an --- error occurs. On error, an exception is raised, and there is no --- way to determine how much data, if any, was successfully sent. --- --- Sending data to closed socket may lead to undefined behaviour. -sendManyTo :: Socket -- ^ Socket - -> [ByteString] -- ^ Data to send - -> SockAddr -- ^ Recipient address - -> IO () -#if !defined(mingw32_HOST_OS) -sendManyTo _ [] _ = return () -sendManyTo sock@(MkSocket fd _ _ _ _) cs addr = do - sent <- liftM fromIntegral sendManyToInner - waitWhen0 sent sock - when (sent >= 0) $ sendManyTo sock (remainingChunks sent cs) addr - where - sendManyToInner = - withSockAddr addr $ \addrPtr addrSize -> - withIOVec cs $ \(iovsPtr, iovsLen) -> do - let msgHdr = MsgHdr - addrPtr (fromIntegral addrSize) - iovsPtr (fromIntegral iovsLen) - with msgHdr $ \msgHdrPtr -> - throwSocketErrorWaitWrite sock "Network.Socket.ByteString.sendManyTo" $ - c_sendmsg (fromIntegral fd) msgHdrPtr 0 -#else -sendManyTo sock cs = sendAllTo sock (B.concat cs) -#endif - --- ---------------------------------------------------------------------------- --- Receiving - --- | Receive data from the socket. The socket must be in a connected --- state. This function may return fewer bytes than specified. If --- the message is longer than the specified length, it may be --- discarded depending on the type of socket. This function may block --- until a message arrives. --- --- Considering hardware and network realities, the maximum number of bytes to --- receive should be a small power of 2, e.g., 4096. --- --- For TCP sockets, a zero length return value means the peer has --- closed its half side of the connection. --- --- Receiving data from closed socket may lead to undefined behaviour. -recv :: Socket -- ^ Connected socket - -> Int -- ^ Maximum number of bytes to receive - -> IO ByteString -- ^ Data received -recv sock nbytes - | nbytes < 0 = ioError (mkInvalidRecvArgError "Network.Socket.ByteString.recv") - | otherwise = createAndTrim nbytes $ \ptr -> - E.catch - (recvBuf sock ptr nbytes) - (\e -> if isEOFError e then return 0 else throwIO e) - --- | Receive data from the socket. The socket need not be in a --- connected state. Returns @(bytes, address)@ where @bytes@ is a --- 'ByteString' representing the data received and @address@ is a --- 'SockAddr' representing the address of the sending socket. --- --- Receiving data from closed socket may lead to undefined behaviour. -recvFrom :: Socket -- ^ Socket - -> Int -- ^ Maximum number of bytes to receive - -> IO (ByteString, SockAddr) -- ^ Data received and sender address -recvFrom sock nbytes = - allocaBytes nbytes $ \ptr -> do - (len, sockaddr) <- recvBufFrom sock ptr nbytes - str <- B.packCStringLen (ptr, len) - return (str, sockaddr) - --- ---------------------------------------------------------------------------- --- Not exported - -#if !defined(mingw32_HOST_OS) --- | Suppose we try to transmit a list of chunks @cs@ via a gathering write --- operation and find that @n@ bytes were sent. Then @remainingChunks n cs@ is --- list of chunks remaining to be sent. -remainingChunks :: Int -> [ByteString] -> [ByteString] -remainingChunks _ [] = [] -remainingChunks i (x:xs) - | i < len = B.drop i x : xs - | otherwise = let i' = i - len in i' `seq` remainingChunks i' xs - where - len = B.length x - --- | @withIOVec cs f@ executes the computation @f@, passing as argument a pair --- consisting of a pointer to a temporarily allocated array of pointers to --- IOVec made from @cs@ and the number of pointers (@length cs@). --- /Unix only/. -withIOVec :: [ByteString] -> ((Ptr IOVec, Int) -> IO a) -> IO a -withIOVec cs f = - allocaArray csLen $ \aPtr -> do - zipWithM_ pokeIov (ptrs aPtr) cs - f (aPtr, csLen) - where - csLen = length cs - ptrs = iterate (`plusPtr` sizeOf (undefined :: IOVec)) - pokeIov ptr s = - unsafeUseAsCStringLen s $ \(sPtr, sLen) -> - poke ptr $ IOVec sPtr (fromIntegral sLen) -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/Internal.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/Internal.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/Internal.hsc 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/Internal.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,277 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE ForeignFunctionInterface #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} ------------------------------------------------------------------------------ --- | --- Module : Network.Socket.Internal --- Copyright : (c) The University of Glasgow 2001 --- License : BSD-style (see the file libraries/network/LICENSE) --- --- Maintainer : libraries@haskell.org --- Stability : provisional --- Portability : portable --- --- A module containing semi-public 'Network.Socket' internals. --- Modules which extend the 'Network.Socket' module will need to use --- this module while ideally most users will be able to make do with --- the public interface. --- ------------------------------------------------------------------------------ - -#include "HsNet.h" -##include "HsNetDef.h" - -module Network.Socket.Internal - ( - -- * Socket addresses - HostAddress -#if defined(IPV6_SOCKET_SUPPORT) - , HostAddress6 - , FlowInfo - , ScopeID -#endif - , PortNumber(..) - , SockAddr(..) - - , peekSockAddr - , pokeSockAddr - , sizeOfSockAddr - , sizeOfSockAddrByFamily - , withSockAddr - , withNewSockAddr - - -- * Protocol families - , Family(..) - - -- * Socket error functions -#if defined(HAVE_WINSOCK2_H) - , c_getLastError -#endif - , throwSocketError - , throwSocketErrorCode - - -- * Guards for socket operations that may fail - , throwSocketErrorIfMinus1_ - , throwSocketErrorIfMinus1Retry - , throwSocketErrorIfMinus1Retry_ - , throwSocketErrorIfMinus1RetryMayBlock - - -- ** Guards that wait and retry if the operation would block - -- | These guards are based on 'throwSocketErrorIfMinus1RetryMayBlock'. - -- They wait for socket readiness if the action fails with @EWOULDBLOCK@ - -- or similar. - , throwSocketErrorWaitRead - , throwSocketErrorWaitWrite - - -- * Initialization - , withSocketsDo - - -- * Low-level helpers - , zeroMemory - ) where - -import Foreign.C.Error (throwErrno, throwErrnoIfMinus1Retry, - throwErrnoIfMinus1RetryMayBlock, throwErrnoIfMinus1_, - Errno(..), errnoToIOError) -#if defined(HAVE_WINSOCK2_H) -import Foreign.C.String (peekCString) -import Foreign.Ptr (Ptr) -#endif -import Foreign.C.Types (CInt(..)) -import GHC.Conc (threadWaitRead, threadWaitWrite) - -#if defined(HAVE_WINSOCK2_H) -import Control.Exception ( evaluate ) -import System.IO.Unsafe ( unsafePerformIO ) -import Control.Monad ( when ) -# if __GLASGOW_HASKELL__ >= 707 -import GHC.IO.Exception ( IOErrorType(..) ) -# else -import GHC.IOBase ( IOErrorType(..) ) -# endif -import Foreign.C.Types ( CChar ) -import System.IO.Error ( ioeSetErrorString, mkIOError ) -#endif - -import Network.Socket.Types - --- --------------------------------------------------------------------- --- Guards for socket operations that may fail - --- | Throw an 'IOError' corresponding to the current socket error. -throwSocketError :: String -- ^ textual description of the error location - -> IO a - --- | Like 'throwSocketError', but the error code is supplied as an argument. --- --- On Windows, do not use errno. Use a system error code instead. -throwSocketErrorCode :: String -> CInt -> IO a - --- | Throw an 'IOError' corresponding to the current socket error if --- the IO action returns a result of @-1@. Discards the result of the --- IO action after error handling. -throwSocketErrorIfMinus1_ - :: (Eq a, Num a) - => String -- ^ textual description of the location - -> IO a -- ^ the 'IO' operation to be executed - -> IO () - -{-# SPECIALIZE throwSocketErrorIfMinus1_ :: String -> IO CInt -> IO () #-} - --- | Throw an 'IOError' corresponding to the current socket error if --- the IO action returns a result of @-1@, but retries in case of an --- interrupted operation. -throwSocketErrorIfMinus1Retry - :: (Eq a, Num a) - => String -- ^ textual description of the location - -> IO a -- ^ the 'IO' operation to be executed - -> IO a - -{-# SPECIALIZE throwSocketErrorIfMinus1Retry :: String -> IO CInt -> IO CInt #-} - --- | Throw an 'IOError' corresponding to the current socket error if --- the IO action returns a result of @-1@, but retries in case of an --- interrupted operation. Discards the result of the IO action after --- error handling. -throwSocketErrorIfMinus1Retry_ - :: (Eq a, Num a) - => String -- ^ textual description of the location - -> IO a -- ^ the 'IO' operation to be executed - -> IO () -throwSocketErrorIfMinus1Retry_ loc m = - throwSocketErrorIfMinus1Retry loc m >> return () -{-# SPECIALIZE throwSocketErrorIfMinus1Retry_ :: String -> IO CInt -> IO () #-} - --- | Throw an 'IOError' corresponding to the current socket error if --- the IO action returns a result of @-1@, but retries in case of an --- interrupted operation. Checks for operations that would block and --- executes an alternative action before retrying in that case. -throwSocketErrorIfMinus1RetryMayBlock - :: (Eq a, Num a) - => String -- ^ textual description of the location - -> IO b -- ^ action to execute before retrying if an - -- immediate retry would block - -> IO a -- ^ the 'IO' operation to be executed - -> IO a - -{-# SPECIALIZE throwSocketErrorIfMinus1RetryMayBlock - :: String -> IO b -> IO CInt -> IO CInt #-} - -#if (!defined(HAVE_WINSOCK2_H)) - -throwSocketErrorIfMinus1RetryMayBlock name on_block act = - throwErrnoIfMinus1RetryMayBlock name act on_block - -throwSocketErrorIfMinus1Retry = throwErrnoIfMinus1Retry - -throwSocketErrorIfMinus1_ = throwErrnoIfMinus1_ - -throwSocketError = throwErrno - -throwSocketErrorCode loc errno = - ioError (errnoToIOError loc (Errno errno) Nothing Nothing) - -#else - -throwSocketErrorIfMinus1RetryMayBlock name _ act - = throwSocketErrorIfMinus1Retry name act - -throwSocketErrorIfMinus1_ name act = do - throwSocketErrorIfMinus1Retry name act - return () - -# if defined(HAVE_WINSOCK2_H) -throwSocketErrorIfMinus1Retry name act = do - r <- act - if (r == -1) - then do - rc <- c_getLastError - case rc of - #{const WSANOTINITIALISED} -> do - withSocketsDo (return ()) - r <- act - if (r == -1) - then throwSocketError name - else return r - _ -> throwSocketError name - else return r - -throwSocketErrorCode name rc = do - pstr <- c_getWSError rc - str <- peekCString pstr - ioError (ioeSetErrorString (mkIOError OtherError name Nothing Nothing) str) - -throwSocketError name = - c_getLastError >>= throwSocketErrorCode name - -foreign import CALLCONV unsafe "WSAGetLastError" - c_getLastError :: IO CInt - -foreign import ccall unsafe "getWSErrorDescr" - c_getWSError :: CInt -> IO (Ptr CChar) - - -# else -throwSocketErrorIfMinus1Retry = throwErrnoIfMinus1Retry -throwSocketError = throwErrno -throwSocketErrorCode loc errno = - ioError (errnoToIOError loc (Errno errno) Nothing Nothing) -# endif -#endif - --- | Like 'throwSocketErrorIfMinus1Retry', but if the action fails with --- @EWOULDBLOCK@ or similar, wait for the socket to be read-ready, --- and try again. -throwSocketErrorWaitRead :: (Eq a, Num a) => Socket -> String -> IO a -> IO a -throwSocketErrorWaitRead sock name io = - throwSocketErrorIfMinus1RetryMayBlock name - (threadWaitRead $ fromIntegral $ fdSocket sock) - io - --- | Like 'throwSocketErrorIfMinus1Retry', but if the action fails with --- @EWOULDBLOCK@ or similar, wait for the socket to be write-ready, --- and try again. -throwSocketErrorWaitWrite :: (Eq a, Num a) => Socket -> String -> IO a -> IO a -throwSocketErrorWaitWrite sock name io = - throwSocketErrorIfMinus1RetryMayBlock name - (threadWaitWrite $ fromIntegral $ fdSocket sock) - io - --- --------------------------------------------------------------------------- --- WinSock support - -{-| With older versions of the @network@ library (version 2.6.0.2 or earlier) -on Windows operating systems, -the networking subsystem must be initialised using 'withSocketsDo' before -any networking operations can be used. eg. - -> main = withSocketsDo $ do {...} - -It is fine to nest calls to 'withSocketsDo', and to perform networking operations -after 'withSocketsDo' has returned. - -In newer versions of the @network@ library (version v2.6.1.0 or later) -it is only necessary to call -'withSocketsDo' if you are calling the 'MkSocket' constructor directly. -However, for compatibility with older versions on Windows, it is good practice -to always call 'withSocketsDo' (it's very cheap). --} -{-# INLINE withSocketsDo #-} -withSocketsDo :: IO a -> IO a -#if !defined(WITH_WINSOCK) -withSocketsDo x = x -#else -withSocketsDo act = evaluate withSocketsInit >> act - - -{-# NOINLINE withSocketsInit #-} -withSocketsInit :: () --- Use a CAF to make forcing it do initialisation once, but subsequent forces will be cheap -withSocketsInit = unsafePerformIO $ do - x <- initWinSock - when (x /= 0) $ ioError $ - userError "Network.Socket.Internal.withSocketsDo: Failed to initialise WinSock" - -foreign import ccall unsafe "initWinSock" initWinSock :: IO Int - -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/Types.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/Types.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket/Types.hsc 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket/Types.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,1135 +0,0 @@ -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE ForeignFunctionInterface #-} - -#include "HsNet.h" -##include "HsNetDef.h" - -module Network.Socket.Types - ( - -- * Socket - Socket(..) - , fdSocket - , sockFamily - , sockType - , sockProtocol - , sockStatus - , SocketStatus(..) - - -- * Socket types - , SocketType(..) - , isSupportedSocketType - , packSocketType - , packSocketType' - , packSocketTypeOrThrow - , unpackSocketType - , unpackSocketType' - - -- * Family - , Family(..) - , isSupportedFamily - , packFamily - , unpackFamily - - -- * Socket addresses - , SockAddr(..) - , isSupportedSockAddr - , HostAddress - , hostAddressToTuple - , tupleToHostAddress -#if defined(IPV6_SOCKET_SUPPORT) - , HostAddress6 - , hostAddress6ToTuple - , tupleToHostAddress6 - , FlowInfo - , ScopeID -#endif - , peekSockAddr - , pokeSockAddr - , sizeOfSockAddr - , sizeOfSockAddrByFamily - , withSockAddr - , withNewSockAddr - - -- * Unsorted - , ProtocolNumber - , PortNumber(..) - - -- * Low-level helpers - , zeroMemory - ) where - -import Control.Concurrent.MVar -import Control.Monad -import Data.Bits -import Data.Maybe -import Data.Ratio -import Data.Typeable -import Data.Word -import Data.Int -import Foreign.C -import Foreign.Marshal.Alloc -import Foreign.Marshal.Array -import Foreign.Ptr -import Foreign.Storable - --- | A socket data type. --- 'Socket's are not GCed unless they are closed by 'close'. -data Socket - = MkSocket - CInt -- File Descriptor - Family - SocketType - ProtocolNumber -- Protocol Number - (MVar SocketStatus) -- Status Flag - deriving Typeable - -{-# DEPRECATED MkSocket "'MkSocket' will not be available in version 3.0.0.0 or later. Use fdSocket instead" #-} - --- | Obtaining the file descriptor from a socket. --- --- If a 'Socket' is shared with multiple threads and --- one uses 'fdSocket', unexpected issues may happen. --- Consider the following scenario: --- --- 1) Thread A acquires a 'Fd' from 'Socket' by 'fdSocket'. --- --- 2) Thread B close the 'Socket'. --- --- 3) Thread C opens a new 'Socket'. Unfortunately it gets the same 'Fd' --- number which thread A is holding. --- --- In this case, it is safer for Thread A to clone 'Fd' by --- 'System.Posix.IO.dup'. But this would still suffer from --- a rase condition between 'fdSocket' and 'close'. -fdSocket :: Socket -> CInt -fdSocket (MkSocket fd _ _ _ _) = fd - -sockFamily :: Socket -> Family -sockFamily (MkSocket _ f _ _ _) = f - -sockType :: Socket -> SocketType -sockType (MkSocket _ _ t _ _) = t - -sockProtocol :: Socket -> ProtocolNumber -sockProtocol (MkSocket _ _ _ p _) = p - -sockStatus :: Socket -> MVar SocketStatus -sockStatus (MkSocket _ _ _ _ s) = s - -instance Eq Socket where - (MkSocket _ _ _ _ m1) == (MkSocket _ _ _ _ m2) = m1 == m2 - -instance Show Socket where - showsPrec _n (MkSocket fd _ _ _ _) = - showString "" - -type ProtocolNumber = CInt - --- | The status of the socket as /determined by this library/, not --- necessarily reflecting the state of the connection itself. --- --- For example, the 'Closed' status is applied when the 'close' --- function is called. -data SocketStatus - -- Returned Status Function called - = NotConnected -- ^ Newly created, unconnected socket - | Bound -- ^ Bound, via 'bind' - | Listening -- ^ Listening, via 'listen' - | Connected -- ^ Connected or accepted, via 'connect' or 'accept' - | ConvertedToHandle -- ^ Is now a 'Handle' (via 'socketToHandle'), don't touch - | Closed -- ^ Closed was closed by 'close' - deriving (Eq, Show, Typeable) - -{-# DEPRECATED SocketStatus "SocketStatus will be removed" #-} - ------------------------------------------------------------------------------ --- Socket types - --- There are a few possible ways to do this. The first is convert the --- structs used in the C library into an equivalent Haskell type. An --- other possible implementation is to keep all the internals in the C --- code and use an Int## and a status flag. The second method is used --- here since a lot of the C structures are not required to be --- manipulated. - --- Originally the status was non-mutable so we had to return a new --- socket each time we changed the status. This version now uses --- mutable variables to avoid the need to do this. The result is a --- cleaner interface and better security since the application --- programmer now can't circumvent the status information to perform --- invalid operations on sockets. - --- | Socket Types. --- --- The existence of a constructor does not necessarily imply that that --- socket type is supported on your system: see 'isSupportedSocketType'. -data SocketType - = NoSocketType -- ^ 0, used in getAddrInfo hints, for example - | Stream -- ^ SOCK_STREAM - | Datagram -- ^ SOCK_DGRAM - | Raw -- ^ SOCK_RAW - | RDM -- ^ SOCK_RDM - | SeqPacket -- ^ SOCK_SEQPACKET - deriving (Eq, Ord, Read, Show, Typeable) - --- | Does the SOCK_ constant corresponding to the given SocketType exist on --- this system? -isSupportedSocketType :: SocketType -> Bool -isSupportedSocketType = isJust . packSocketType' - --- | Find the SOCK_ constant corresponding to the SocketType value. -packSocketType' :: SocketType -> Maybe CInt -packSocketType' stype = case Just stype of - -- the Just above is to disable GHC's overlapping pattern - -- detection: see comments for packSocketOption - Just NoSocketType -> Just 0 -#ifdef SOCK_STREAM - Just Stream -> Just #const SOCK_STREAM -#endif -#ifdef SOCK_DGRAM - Just Datagram -> Just #const SOCK_DGRAM -#endif -#ifdef SOCK_RAW - Just Raw -> Just #const SOCK_RAW -#endif -#ifdef SOCK_RDM - Just RDM -> Just #const SOCK_RDM -#endif -#ifdef SOCK_SEQPACKET - Just SeqPacket -> Just #const SOCK_SEQPACKET -#endif - _ -> Nothing - -{-# DEPRECATED packSocketType "packSocketType will not be available in version 3.0.0.0 or later." #-} - -packSocketType :: SocketType -> CInt -packSocketType stype = fromMaybe (error errMsg) (packSocketType' stype) - where - errMsg = concat ["Network.Socket.packSocketType: ", - "socket type ", show stype, " unsupported on this system"] - --- | Try packSocketType' on the SocketType, if it fails throw an error with --- message starting "Network.Socket." ++ the String parameter -packSocketTypeOrThrow :: String -> SocketType -> IO CInt -packSocketTypeOrThrow caller stype = maybe err return (packSocketType' stype) - where - err = ioError . userError . concat $ ["Network.Socket.", caller, ": ", - "socket type ", show stype, " unsupported on this system"] - - -unpackSocketType:: CInt -> Maybe SocketType -unpackSocketType t = case t of - 0 -> Just NoSocketType -#ifdef SOCK_STREAM - (#const SOCK_STREAM) -> Just Stream -#endif -#ifdef SOCK_DGRAM - (#const SOCK_DGRAM) -> Just Datagram -#endif -#ifdef SOCK_RAW - (#const SOCK_RAW) -> Just Raw -#endif -#ifdef SOCK_RDM - (#const SOCK_RDM) -> Just RDM -#endif -#ifdef SOCK_SEQPACKET - (#const SOCK_SEQPACKET) -> Just SeqPacket -#endif - _ -> Nothing - --- | Try unpackSocketType on the CInt, if it fails throw an error with --- message starting "Network.Socket." ++ the String parameter -unpackSocketType' :: String -> CInt -> IO SocketType -unpackSocketType' caller ty = maybe err return (unpackSocketType ty) - where - err = ioError . userError . concat $ ["Network.Socket.", caller, ": ", - "socket type ", show ty, " unsupported on this system"] - ------------------------------------------------------------------------- --- Protocol Families. - --- | Address families. --- --- A constructor being present here does not mean it is supported by the --- operating system: see 'isSupportedFamily'. -data Family - = AF_UNSPEC -- unspecified - | AF_UNIX -- local to host (pipes, portals - | AF_INET -- internetwork: UDP, TCP, etc - | AF_INET6 -- Internet Protocol version 6 - | AF_IMPLINK -- arpanet imp addresses - | AF_PUP -- pup protocols: e.g. BSP - | AF_CHAOS -- mit CHAOS protocols - | AF_NS -- XEROX NS protocols - | AF_NBS -- nbs protocols - | AF_ECMA -- european computer manufacturers - | AF_DATAKIT -- datakit protocols - | AF_CCITT -- CCITT protocols, X.25 etc - | AF_SNA -- IBM SNA - | AF_DECnet -- DECnet - | AF_DLI -- Direct data link interface - | AF_LAT -- LAT - | AF_HYLINK -- NSC Hyperchannel - | AF_APPLETALK -- Apple Talk - | AF_ROUTE -- Internal Routing Protocol (aka AF_NETLINK) - | AF_NETBIOS -- NetBios-style addresses - | AF_NIT -- Network Interface Tap - | AF_802 -- IEEE 802.2, also ISO 8802 - | AF_ISO -- ISO protocols - | AF_OSI -- umbrella of all families used by OSI - | AF_NETMAN -- DNA Network Management - | AF_X25 -- CCITT X.25 - | AF_AX25 - | AF_OSINET -- AFI - | AF_GOSSIP -- US Government OSI - | AF_IPX -- Novell Internet Protocol - | Pseudo_AF_XTP -- eXpress Transfer Protocol (no AF) - | AF_CTF -- Common Trace Facility - | AF_WAN -- Wide Area Network protocols - | AF_SDL -- SGI Data Link for DLPI - | AF_NETWARE - | AF_NDD - | AF_INTF -- Debugging use only - | AF_COIP -- connection-oriented IP, aka ST II - | AF_CNT -- Computer Network Technology - | Pseudo_AF_RTIP -- Help Identify RTIP packets - | Pseudo_AF_PIP -- Help Identify PIP packets - | AF_SIP -- Simple Internet Protocol - | AF_ISDN -- Integrated Services Digital Network - | Pseudo_AF_KEY -- Internal key-management function - | AF_NATM -- native ATM access - | AF_ARP -- (rev.) addr. res. prot. (RFC 826) - | Pseudo_AF_HDRCMPLT -- Used by BPF to not rewrite hdrs in iface output - | AF_ENCAP - | AF_LINK -- Link layer interface - | AF_RAW -- Link layer interface - | AF_RIF -- raw interface - | AF_NETROM -- Amateur radio NetROM - | AF_BRIDGE -- multiprotocol bridge - | AF_ATMPVC -- ATM PVCs - | AF_ROSE -- Amateur Radio X.25 PLP - | AF_NETBEUI -- 802.2LLC - | AF_SECURITY -- Security callback pseudo AF - | AF_PACKET -- Packet family - | AF_ASH -- Ash - | AF_ECONET -- Acorn Econet - | AF_ATMSVC -- ATM SVCs - | AF_IRDA -- IRDA sockets - | AF_PPPOX -- PPPoX sockets - | AF_WANPIPE -- Wanpipe API sockets - | AF_BLUETOOTH -- bluetooth sockets - | AF_CAN -- Controller Area Network - deriving (Eq, Ord, Read, Show) - -packFamily :: Family -> CInt -packFamily f = case packFamily' f of - Just fam -> fam - Nothing -> error $ - "Network.Socket.packFamily: unsupported address family: " ++ - show f - --- | Does the AF_ constant corresponding to the given family exist on this --- system? -isSupportedFamily :: Family -> Bool -isSupportedFamily = isJust . packFamily' - -packFamily' :: Family -> Maybe CInt -packFamily' f = case Just f of - -- the Just above is to disable GHC's overlapping pattern - -- detection: see comments for packSocketOption - Just AF_UNSPEC -> Just #const AF_UNSPEC -#ifdef AF_UNIX - Just AF_UNIX -> Just #const AF_UNIX -#endif -#ifdef AF_INET - Just AF_INET -> Just #const AF_INET -#endif -#ifdef AF_INET6 - Just AF_INET6 -> Just #const AF_INET6 -#endif -#ifdef AF_IMPLINK - Just AF_IMPLINK -> Just #const AF_IMPLINK -#endif -#ifdef AF_PUP - Just AF_PUP -> Just #const AF_PUP -#endif -#ifdef AF_CHAOS - Just AF_CHAOS -> Just #const AF_CHAOS -#endif -#ifdef AF_NS - Just AF_NS -> Just #const AF_NS -#endif -#ifdef AF_NBS - Just AF_NBS -> Just #const AF_NBS -#endif -#ifdef AF_ECMA - Just AF_ECMA -> Just #const AF_ECMA -#endif -#ifdef AF_DATAKIT - Just AF_DATAKIT -> Just #const AF_DATAKIT -#endif -#ifdef AF_CCITT - Just AF_CCITT -> Just #const AF_CCITT -#endif -#ifdef AF_SNA - Just AF_SNA -> Just #const AF_SNA -#endif -#ifdef AF_DECnet - Just AF_DECnet -> Just #const AF_DECnet -#endif -#ifdef AF_DLI - Just AF_DLI -> Just #const AF_DLI -#endif -#ifdef AF_LAT - Just AF_LAT -> Just #const AF_LAT -#endif -#ifdef AF_HYLINK - Just AF_HYLINK -> Just #const AF_HYLINK -#endif -#ifdef AF_APPLETALK - Just AF_APPLETALK -> Just #const AF_APPLETALK -#endif -#ifdef AF_ROUTE - Just AF_ROUTE -> Just #const AF_ROUTE -#endif -#ifdef AF_NETBIOS - Just AF_NETBIOS -> Just #const AF_NETBIOS -#endif -#ifdef AF_NIT - Just AF_NIT -> Just #const AF_NIT -#endif -#ifdef AF_802 - Just AF_802 -> Just #const AF_802 -#endif -#ifdef AF_ISO - Just AF_ISO -> Just #const AF_ISO -#endif -#ifdef AF_OSI - Just AF_OSI -> Just #const AF_OSI -#endif -#ifdef AF_NETMAN - Just AF_NETMAN -> Just #const AF_NETMAN -#endif -#ifdef AF_X25 - Just AF_X25 -> Just #const AF_X25 -#endif -#ifdef AF_AX25 - Just AF_AX25 -> Just #const AF_AX25 -#endif -#ifdef AF_OSINET - Just AF_OSINET -> Just #const AF_OSINET -#endif -#ifdef AF_GOSSIP - Just AF_GOSSIP -> Just #const AF_GOSSIP -#endif -#ifdef AF_IPX - Just AF_IPX -> Just #const AF_IPX -#endif -#ifdef Pseudo_AF_XTP - Just Pseudo_AF_XTP -> Just #const Pseudo_AF_XTP -#endif -#ifdef AF_CTF - Just AF_CTF -> Just #const AF_CTF -#endif -#ifdef AF_WAN - Just AF_WAN -> Just #const AF_WAN -#endif -#ifdef AF_SDL - Just AF_SDL -> Just #const AF_SDL -#endif -#ifdef AF_NETWARE - Just AF_NETWARE -> Just #const AF_NETWARE -#endif -#ifdef AF_NDD - Just AF_NDD -> Just #const AF_NDD -#endif -#ifdef AF_INTF - Just AF_INTF -> Just #const AF_INTF -#endif -#ifdef AF_COIP - Just AF_COIP -> Just #const AF_COIP -#endif -#ifdef AF_CNT - Just AF_CNT -> Just #const AF_CNT -#endif -#ifdef Pseudo_AF_RTIP - Just Pseudo_AF_RTIP -> Just #const Pseudo_AF_RTIP -#endif -#ifdef Pseudo_AF_PIP - Just Pseudo_AF_PIP -> Just #const Pseudo_AF_PIP -#endif -#ifdef AF_SIP - Just AF_SIP -> Just #const AF_SIP -#endif -#ifdef AF_ISDN - Just AF_ISDN -> Just #const AF_ISDN -#endif -#ifdef Pseudo_AF_KEY - Just Pseudo_AF_KEY -> Just #const Pseudo_AF_KEY -#endif -#ifdef AF_NATM - Just AF_NATM -> Just #const AF_NATM -#endif -#ifdef AF_ARP - Just AF_ARP -> Just #const AF_ARP -#endif -#ifdef Pseudo_AF_HDRCMPLT - Just Pseudo_AF_HDRCMPLT -> Just #const Pseudo_AF_HDRCMPLT -#endif -#ifdef AF_ENCAP - Just AF_ENCAP -> Just #const AF_ENCAP -#endif -#ifdef AF_LINK - Just AF_LINK -> Just #const AF_LINK -#endif -#ifdef AF_RAW - Just AF_RAW -> Just #const AF_RAW -#endif -#ifdef AF_RIF - Just AF_RIF -> Just #const AF_RIF -#endif -#ifdef AF_NETROM - Just AF_NETROM -> Just #const AF_NETROM -#endif -#ifdef AF_BRIDGE - Just AF_BRIDGE -> Just #const AF_BRIDGE -#endif -#ifdef AF_ATMPVC - Just AF_ATMPVC -> Just #const AF_ATMPVC -#endif -#ifdef AF_ROSE - Just AF_ROSE -> Just #const AF_ROSE -#endif -#ifdef AF_NETBEUI - Just AF_NETBEUI -> Just #const AF_NETBEUI -#endif -#ifdef AF_SECURITY - Just AF_SECURITY -> Just #const AF_SECURITY -#endif -#ifdef AF_PACKET - Just AF_PACKET -> Just #const AF_PACKET -#endif -#ifdef AF_ASH - Just AF_ASH -> Just #const AF_ASH -#endif -#ifdef AF_ECONET - Just AF_ECONET -> Just #const AF_ECONET -#endif -#ifdef AF_ATMSVC - Just AF_ATMSVC -> Just #const AF_ATMSVC -#endif -#ifdef AF_IRDA - Just AF_IRDA -> Just #const AF_IRDA -#endif -#ifdef AF_PPPOX - Just AF_PPPOX -> Just #const AF_PPPOX -#endif -#ifdef AF_WANPIPE - Just AF_WANPIPE -> Just #const AF_WANPIPE -#endif -#ifdef AF_BLUETOOTH - Just AF_BLUETOOTH -> Just #const AF_BLUETOOTH -#endif -#ifdef AF_CAN - Just AF_CAN -> Just #const AF_CAN -#endif - _ -> Nothing - ---------- ---------- - -unpackFamily :: CInt -> Family -unpackFamily f = case f of - (#const AF_UNSPEC) -> AF_UNSPEC -#ifdef AF_UNIX - (#const AF_UNIX) -> AF_UNIX -#endif -#ifdef AF_INET - (#const AF_INET) -> AF_INET -#endif -#ifdef AF_INET6 - (#const AF_INET6) -> AF_INET6 -#endif -#ifdef AF_IMPLINK - (#const AF_IMPLINK) -> AF_IMPLINK -#endif -#ifdef AF_PUP - (#const AF_PUP) -> AF_PUP -#endif -#ifdef AF_CHAOS - (#const AF_CHAOS) -> AF_CHAOS -#endif -#ifdef AF_NS - (#const AF_NS) -> AF_NS -#endif -#ifdef AF_NBS - (#const AF_NBS) -> AF_NBS -#endif -#ifdef AF_ECMA - (#const AF_ECMA) -> AF_ECMA -#endif -#ifdef AF_DATAKIT - (#const AF_DATAKIT) -> AF_DATAKIT -#endif -#ifdef AF_CCITT - (#const AF_CCITT) -> AF_CCITT -#endif -#ifdef AF_SNA - (#const AF_SNA) -> AF_SNA -#endif -#ifdef AF_DECnet - (#const AF_DECnet) -> AF_DECnet -#endif -#ifdef AF_DLI - (#const AF_DLI) -> AF_DLI -#endif -#ifdef AF_LAT - (#const AF_LAT) -> AF_LAT -#endif -#ifdef AF_HYLINK - (#const AF_HYLINK) -> AF_HYLINK -#endif -#ifdef AF_APPLETALK - (#const AF_APPLETALK) -> AF_APPLETALK -#endif -#ifdef AF_ROUTE - (#const AF_ROUTE) -> AF_ROUTE -#endif -#ifdef AF_NETBIOS - (#const AF_NETBIOS) -> AF_NETBIOS -#endif -#ifdef AF_NIT - (#const AF_NIT) -> AF_NIT -#endif -#ifdef AF_802 - (#const AF_802) -> AF_802 -#endif -#ifdef AF_ISO - (#const AF_ISO) -> AF_ISO -#endif -#ifdef AF_OSI -# if (!defined(AF_ISO)) || (defined(AF_ISO) && (AF_ISO != AF_OSI)) - (#const AF_OSI) -> AF_OSI -# endif -#endif -#ifdef AF_NETMAN - (#const AF_NETMAN) -> AF_NETMAN -#endif -#ifdef AF_X25 - (#const AF_X25) -> AF_X25 -#endif -#ifdef AF_AX25 - (#const AF_AX25) -> AF_AX25 -#endif -#ifdef AF_OSINET - (#const AF_OSINET) -> AF_OSINET -#endif -#ifdef AF_GOSSIP - (#const AF_GOSSIP) -> AF_GOSSIP -#endif -#if defined(AF_IPX) && (!defined(AF_NS) || AF_NS != AF_IPX) - (#const AF_IPX) -> AF_IPX -#endif -#ifdef Pseudo_AF_XTP - (#const Pseudo_AF_XTP) -> Pseudo_AF_XTP -#endif -#ifdef AF_CTF - (#const AF_CTF) -> AF_CTF -#endif -#ifdef AF_WAN - (#const AF_WAN) -> AF_WAN -#endif -#ifdef AF_SDL - (#const AF_SDL) -> AF_SDL -#endif -#ifdef AF_NETWARE - (#const AF_NETWARE) -> AF_NETWARE -#endif -#ifdef AF_NDD - (#const AF_NDD) -> AF_NDD -#endif -#ifdef AF_INTF - (#const AF_INTF) -> AF_INTF -#endif -#ifdef AF_COIP - (#const AF_COIP) -> AF_COIP -#endif -#ifdef AF_CNT - (#const AF_CNT) -> AF_CNT -#endif -#ifdef Pseudo_AF_RTIP - (#const Pseudo_AF_RTIP) -> Pseudo_AF_RTIP -#endif -#ifdef Pseudo_AF_PIP - (#const Pseudo_AF_PIP) -> Pseudo_AF_PIP -#endif -#ifdef AF_SIP - (#const AF_SIP) -> AF_SIP -#endif -#ifdef AF_ISDN - (#const AF_ISDN) -> AF_ISDN -#endif -#ifdef Pseudo_AF_KEY - (#const Pseudo_AF_KEY) -> Pseudo_AF_KEY -#endif -#ifdef AF_NATM - (#const AF_NATM) -> AF_NATM -#endif -#ifdef AF_ARP - (#const AF_ARP) -> AF_ARP -#endif -#ifdef Pseudo_AF_HDRCMPLT - (#const Pseudo_AF_HDRCMPLT) -> Pseudo_AF_HDRCMPLT -#endif -#ifdef AF_ENCAP - (#const AF_ENCAP) -> AF_ENCAP -#endif -#ifdef AF_LINK - (#const AF_LINK) -> AF_LINK -#endif -#ifdef AF_RAW - (#const AF_RAW) -> AF_RAW -#endif -#ifdef AF_RIF - (#const AF_RIF) -> AF_RIF -#endif -#ifdef AF_NETROM - (#const AF_NETROM) -> AF_NETROM -#endif -#ifdef AF_BRIDGE - (#const AF_BRIDGE) -> AF_BRIDGE -#endif -#ifdef AF_ATMPVC - (#const AF_ATMPVC) -> AF_ATMPVC -#endif -#ifdef AF_ROSE - (#const AF_ROSE) -> AF_ROSE -#endif -#ifdef AF_NETBEUI - (#const AF_NETBEUI) -> AF_NETBEUI -#endif -#ifdef AF_SECURITY - (#const AF_SECURITY) -> AF_SECURITY -#endif -#ifdef AF_PACKET - (#const AF_PACKET) -> AF_PACKET -#endif -#ifdef AF_ASH - (#const AF_ASH) -> AF_ASH -#endif -#ifdef AF_ECONET - (#const AF_ECONET) -> AF_ECONET -#endif -#ifdef AF_ATMSVC - (#const AF_ATMSVC) -> AF_ATMSVC -#endif -#ifdef AF_IRDA - (#const AF_IRDA) -> AF_IRDA -#endif -#ifdef AF_PPPOX - (#const AF_PPPOX) -> AF_PPPOX -#endif -#ifdef AF_WANPIPE - (#const AF_WANPIPE) -> AF_WANPIPE -#endif -#ifdef AF_BLUETOOTH - (#const AF_BLUETOOTH) -> AF_BLUETOOTH -#endif -#ifdef AF_CAN - (#const AF_CAN) -> AF_CAN -#endif - unknown -> error $ - "Network.Socket.Types.unpackFamily: unknown address family: " ++ - show unknown - ------------------------------------------------------------------------- --- Port Numbers - --- | Use the @Num@ instance (i.e. use a literal) to create a --- @PortNumber@ value with the correct network-byte-ordering. You --- should not use the PortNum constructor. It will be removed in the --- next release. --- --- >>> 1 :: PortNumber --- 1 --- >>> read "1" :: PortNumber --- 1 -newtype PortNumber = PortNum Word16 deriving (Eq, Ord, Typeable) --- newtyped to prevent accidental use of sane-looking --- port numbers that haven't actually been converted to --- network-byte-order first. - -{-# DEPRECATED PortNum "Do not use the PortNum constructor. Use the Num instance. PortNum will be removed in the next release." #-} - -instance Show PortNumber where - showsPrec p pn = showsPrec p (portNumberToInt pn) - -instance Read PortNumber where - readsPrec n = map (\(x,y) -> (intToPortNumber x, y)) . readsPrec n - -intToPortNumber :: Int -> PortNumber -intToPortNumber v = PortNum (htons (fromIntegral v)) - -portNumberToInt :: PortNumber -> Int -portNumberToInt (PortNum po) = fromIntegral (ntohs po) - -foreign import CALLCONV unsafe "ntohs" ntohs :: Word16 -> Word16 -foreign import CALLCONV unsafe "htons" htons :: Word16 -> Word16 -foreign import CALLCONV unsafe "ntohl" ntohl :: Word32 -> Word32 -foreign import CALLCONV unsafe "htonl" htonl :: Word32 -> Word32 - -instance Enum PortNumber where - toEnum = intToPortNumber - fromEnum = portNumberToInt - -instance Num PortNumber where - fromInteger i = intToPortNumber (fromInteger i) - -- for completeness. - (+) x y = intToPortNumber (portNumberToInt x + portNumberToInt y) - (-) x y = intToPortNumber (portNumberToInt x - portNumberToInt y) - negate x = intToPortNumber (-portNumberToInt x) - (*) x y = intToPortNumber (portNumberToInt x * portNumberToInt y) - abs n = intToPortNumber (abs (portNumberToInt n)) - signum n = intToPortNumber (signum (portNumberToInt n)) - -instance Real PortNumber where - toRational x = toInteger x % 1 - -instance Integral PortNumber where - quotRem a b = let (c,d) = quotRem (portNumberToInt a) (portNumberToInt b) in - (intToPortNumber c, intToPortNumber d) - toInteger a = toInteger (portNumberToInt a) - -instance Storable PortNumber where - sizeOf _ = sizeOf (undefined :: Word16) - alignment _ = alignment (undefined :: Word16) - poke p (PortNum po) = poke (castPtr p) po - peek p = PortNum `liftM` peek (castPtr p) - ------------------------------------------------------------------------- --- Socket addresses - --- The scheme used for addressing sockets is somewhat quirky. The --- calls in the BSD socket API that need to know the socket address --- all operate in terms of struct sockaddr, a `virtual' type of --- socket address. - --- The Internet family of sockets are addressed as struct sockaddr_in, --- so when calling functions that operate on struct sockaddr, we have --- to type cast the Internet socket address into a struct sockaddr. --- Instances of the structure for different families might *not* be --- the same size. Same casting is required of other families of --- sockets such as Xerox NS. Similarly for UNIX-domain sockets. - --- To represent these socket addresses in Haskell-land, we do what BSD --- didn't do, and use a union/algebraic type for the different --- families. Currently only UNIX-domain sockets and the Internet --- families are supported. - -#if defined(IPV6_SOCKET_SUPPORT) -type FlowInfo = Word32 -type ScopeID = Word32 -#endif - --- | The existence of a constructor does not necessarily imply that --- that socket address type is supported on your system: see --- 'isSupportedSockAddr'. -data SockAddr -- C Names - = SockAddrInet - PortNumber -- sin_port (network byte order) - HostAddress -- sin_addr (ditto) - | SockAddrInet6 - PortNumber -- sin6_port (network byte order) - FlowInfo -- sin6_flowinfo (ditto) - HostAddress6 -- sin6_addr (ditto) - ScopeID -- sin6_scope_id (ditto) - | SockAddrUnix - String -- sun_path - | SockAddrCan - Int32 -- can_ifindex (can be get by Network.BSD.ifNameToIndex "can0") - -- TODO: Extend this to include transport protocol information - deriving (Eq, Ord, Typeable) - --- | Is the socket address type supported on this system? -isSupportedSockAddr :: SockAddr -> Bool -isSupportedSockAddr addr = case addr of - SockAddrInet {} -> True -#if defined(IPV6_SOCKET_SUPPORT) - SockAddrInet6 {} -> True -#endif -#if defined(DOMAIN_SOCKET_SUPPORT) - SockAddrUnix{} -> True -#endif -#if defined(CAN_SOCKET_SUPPORT) - SockAddrCan{} -> True -#endif -#if !(defined(IPV6_SOCKET_SUPPORT) \ - && defined(DOMAIN_SOCKET_SUPPORT) && defined(CAN_SOCKET_SUPPORT)) - _ -> False -#endif - -{-# DEPRECATED SockAddrCan "This will be removed in 3.0" #-} - -#if defined(WITH_WINSOCK) -type CSaFamily = (#type unsigned short) -#elif defined(darwin_HOST_OS) -type CSaFamily = (#type u_char) -#else -type CSaFamily = (#type sa_family_t) -#endif - --- | Computes the storage requirements (in bytes) of the given --- 'SockAddr'. This function differs from 'Foreign.Storable.sizeOf' --- in that the value of the argument /is/ used. -sizeOfSockAddr :: SockAddr -> Int -#if defined(DOMAIN_SOCKET_SUPPORT) -sizeOfSockAddr (SockAddrUnix path) = - case path of - '\0':_ -> (#const sizeof(sa_family_t)) + length path - _ -> #const sizeof(struct sockaddr_un) -#endif -sizeOfSockAddr (SockAddrInet _ _) = #const sizeof(struct sockaddr_in) -#if defined(IPV6_SOCKET_SUPPORT) -sizeOfSockAddr (SockAddrInet6 _ _ _ _) = #const sizeof(struct sockaddr_in6) -#endif -#if defined(CAN_SOCKET_SUPPORT) -sizeOfSockAddr (SockAddrCan _) = #const sizeof(struct sockaddr_can) -#endif -#if !(defined(IPV6_SOCKET_SUPPORT) \ - && defined(DOMAIN_SOCKET_SUPPORT) && defined(CAN_SOCKET_SUPPORT)) -sizeOfSockAddr _ = error "sizeOfSockAddr: not supported" -#endif - --- | Computes the storage requirements (in bytes) required for a --- 'SockAddr' with the given 'Family'. -sizeOfSockAddrByFamily :: Family -> Int -#if defined(DOMAIN_SOCKET_SUPPORT) -sizeOfSockAddrByFamily AF_UNIX = #const sizeof(struct sockaddr_un) -#endif -#if defined(IPV6_SOCKET_SUPPORT) -sizeOfSockAddrByFamily AF_INET6 = #const sizeof(struct sockaddr_in6) -#endif -sizeOfSockAddrByFamily AF_INET = #const sizeof(struct sockaddr_in) -#if defined(CAN_SOCKET_SUPPORT) -sizeOfSockAddrByFamily AF_CAN = #const sizeof(struct sockaddr_can) -#endif -sizeOfSockAddrByFamily family = error $ - "Network.Socket.Types.sizeOfSockAddrByFamily: address family '" ++ - show family ++ "' not supported." - --- | Use a 'SockAddr' with a function requiring a pointer to a --- 'SockAddr' and the length of that 'SockAddr'. -withSockAddr :: SockAddr -> (Ptr SockAddr -> Int -> IO a) -> IO a -withSockAddr addr f = do - let sz = sizeOfSockAddr addr - allocaBytes sz $ \p -> pokeSockAddr p addr >> f (castPtr p) sz - --- | Create a new 'SockAddr' for use with a function requiring a --- pointer to a 'SockAddr' and the length of that 'SockAddr'. -withNewSockAddr :: Family -> (Ptr SockAddr -> Int -> IO a) -> IO a -withNewSockAddr family f = do - let sz = sizeOfSockAddrByFamily family - allocaBytes sz $ \ptr -> f ptr sz - --- We can't write an instance of 'Storable' for 'SockAddr' because --- @sockaddr@ is a sum type of variable size but --- 'Foreign.Storable.sizeOf' is required to be constant. - --- Note that on Darwin, the sockaddr structure must be zeroed before --- use. - --- | Write the given 'SockAddr' to the given memory location. -pokeSockAddr :: Ptr a -> SockAddr -> IO () -#if defined(DOMAIN_SOCKET_SUPPORT) -pokeSockAddr p (SockAddrUnix path) = do -#if defined(darwin_HOST_OS) - zeroMemory p (#const sizeof(struct sockaddr_un)) -#else - case path of - ('\0':_) -> zeroMemory p (#const sizeof(struct sockaddr_un)) - _ -> return () -#endif -#if defined(HAVE_STRUCT_SOCKADDR_SA_LEN) - (#poke struct sockaddr_un, sun_len) p ((#const sizeof(struct sockaddr_un)) :: Word8) -#endif - (#poke struct sockaddr_un, sun_family) p ((#const AF_UNIX) :: CSaFamily) - let pathC = map castCharToCChar path - poker = case path of ('\0':_) -> pokeArray; _ -> pokeArray0 0 - poker ((#ptr struct sockaddr_un, sun_path) p) pathC -#endif -pokeSockAddr p (SockAddrInet (PortNum port) addr) = do -#if defined(darwin_HOST_OS) - zeroMemory p (#const sizeof(struct sockaddr_in)) -#endif -#if defined(HAVE_STRUCT_SOCKADDR_SA_LEN) - (#poke struct sockaddr_in, sin_len) p ((#const sizeof(struct sockaddr_in)) :: Word8) -#endif - (#poke struct sockaddr_in, sin_family) p ((#const AF_INET) :: CSaFamily) - (#poke struct sockaddr_in, sin_port) p port - (#poke struct sockaddr_in, sin_addr) p addr -#if defined(IPV6_SOCKET_SUPPORT) -pokeSockAddr p (SockAddrInet6 (PortNum port) flow addr scope) = do -#if defined(darwin_HOST_OS) - zeroMemory p (#const sizeof(struct sockaddr_in6)) -#endif -#if defined(HAVE_STRUCT_SOCKADDR_SA_LEN) - (#poke struct sockaddr_in6, sin6_len) p ((#const sizeof(struct sockaddr_in6)) :: Word8) -#endif - (#poke struct sockaddr_in6, sin6_family) p ((#const AF_INET6) :: CSaFamily) - (#poke struct sockaddr_in6, sin6_port) p port - (#poke struct sockaddr_in6, sin6_flowinfo) p flow - (#poke struct sockaddr_in6, sin6_addr) p (In6Addr addr) - (#poke struct sockaddr_in6, sin6_scope_id) p scope -#endif -#if defined(CAN_SOCKET_SUPPORT) -pokeSockAddr p (SockAddrCan ifIndex) = do -#if defined(darwin_HOST_OS) - zeroMemory p (#const sizeof(struct sockaddr_can)) -#endif - (#poke struct sockaddr_can, can_ifindex) p ifIndex -#endif -#if !(defined(IPV6_SOCKET_SUPPORT) \ - && defined(DOMAIN_SOCKET_SUPPORT) && defined(CAN_SOCKET_SUPPORT)) -pokeSockAddr _ _ = error "pokeSockAddr: not supported" -#endif - --- | Read a 'SockAddr' from the given memory location. -peekSockAddr :: Ptr SockAddr -> IO SockAddr -peekSockAddr p = do - family <- (#peek struct sockaddr, sa_family) p - case family :: CSaFamily of -#if defined(DOMAIN_SOCKET_SUPPORT) - (#const AF_UNIX) -> do - str <- peekCString ((#ptr struct sockaddr_un, sun_path) p) - return (SockAddrUnix str) -#endif - (#const AF_INET) -> do - addr <- (#peek struct sockaddr_in, sin_addr) p - port <- (#peek struct sockaddr_in, sin_port) p - return (SockAddrInet (PortNum port) addr) -#if defined(IPV6_SOCKET_SUPPORT) - (#const AF_INET6) -> do - port <- (#peek struct sockaddr_in6, sin6_port) p - flow <- (#peek struct sockaddr_in6, sin6_flowinfo) p - In6Addr addr <- (#peek struct sockaddr_in6, sin6_addr) p - scope <- (#peek struct sockaddr_in6, sin6_scope_id) p - return (SockAddrInet6 (PortNum port) flow addr scope) -#endif -#if defined(CAN_SOCKET_SUPPORT) - (#const AF_CAN) -> do - ifidx <- (#peek struct sockaddr_can, can_ifindex) p - return (SockAddrCan ifidx) -#endif - _ -> ioError $ userError $ - "Network.Socket.Types.peekSockAddr: address family '" ++ - show family ++ "' not supported." - ------------------------------------------------------------------------- - --- | The raw network byte order number is read using host byte order. --- Therefore on little-endian architectures the byte order is swapped. For --- example @127.0.0.1@ is represented as @0x0100007f@ on little-endian hosts --- and as @0x7f000001@ on big-endian hosts. --- --- For direct manipulation prefer 'hostAddressToTuple' and --- 'tupleToHostAddress'. -type HostAddress = Word32 - --- | Converts 'HostAddress' to representation-independent IPv4 quadruple. --- For example for @127.0.0.1@ the function will return @(0x7f, 0, 0, 1)@ --- regardless of host endianness. -hostAddressToTuple :: HostAddress -> (Word8, Word8, Word8, Word8) -hostAddressToTuple ha' = - let ha = htonl ha' - byte i = fromIntegral (ha `shiftR` i) :: Word8 - in (byte 24, byte 16, byte 8, byte 0) - --- | Converts IPv4 quadruple to 'HostAddress'. -tupleToHostAddress :: (Word8, Word8, Word8, Word8) -> HostAddress -tupleToHostAddress (b3, b2, b1, b0) = - let x `sl` i = fromIntegral x `shiftL` i :: Word32 - in ntohl $ (b3 `sl` 24) .|. (b2 `sl` 16) .|. (b1 `sl` 8) .|. (b0 `sl` 0) - -#if defined(IPV6_SOCKET_SUPPORT) --- | Independent of endianness. For example @::1@ is stored as @(0, 0, 0, 1)@. --- --- For direct manipulation prefer 'hostAddress6ToTuple' and --- 'tupleToHostAddress6'. -type HostAddress6 = (Word32, Word32, Word32, Word32) - -hostAddress6ToTuple :: HostAddress6 -> (Word16, Word16, Word16, Word16, - Word16, Word16, Word16, Word16) -hostAddress6ToTuple (w3, w2, w1, w0) = - let high, low :: Word32 -> Word16 - high w = fromIntegral (w `shiftR` 16) - low w = fromIntegral w - in (high w3, low w3, high w2, low w2, high w1, low w1, high w0, low w0) - -tupleToHostAddress6 :: (Word16, Word16, Word16, Word16, - Word16, Word16, Word16, Word16) -> HostAddress6 -tupleToHostAddress6 (w7, w6, w5, w4, w3, w2, w1, w0) = - let add :: Word16 -> Word16 -> Word32 - high `add` low = (fromIntegral high `shiftL` 16) .|. (fromIntegral low) - in (w7 `add` w6, w5 `add` w4, w3 `add` w2, w1 `add` w0) - --- The peek32 and poke32 functions work around the fact that the RFCs --- don't require 32-bit-wide address fields to be present. We can --- only portably rely on an 8-bit field, s6_addr. - -s6_addr_offset :: Int -s6_addr_offset = (#offset struct in6_addr, s6_addr) - -peek32 :: Ptr a -> Int -> IO Word32 -peek32 p i0 = do - let i' = i0 * 4 - peekByte n = peekByteOff p (s6_addr_offset + i' + n) :: IO Word8 - a `sl` i = fromIntegral a `shiftL` i - a0 <- peekByte 0 - a1 <- peekByte 1 - a2 <- peekByte 2 - a3 <- peekByte 3 - return ((a0 `sl` 24) .|. (a1 `sl` 16) .|. (a2 `sl` 8) .|. (a3 `sl` 0)) - -poke32 :: Ptr a -> Int -> Word32 -> IO () -poke32 p i0 a = do - let i' = i0 * 4 - pokeByte n = pokeByteOff p (s6_addr_offset + i' + n) - x `sr` i = fromIntegral (x `shiftR` i) :: Word8 - pokeByte 0 (a `sr` 24) - pokeByte 1 (a `sr` 16) - pokeByte 2 (a `sr` 8) - pokeByte 3 (a `sr` 0) - --- | Private newtype proxy for the Storable instance. To avoid orphan instances. -newtype In6Addr = In6Addr HostAddress6 - -#if __GLASGOW_HASKELL__ < 800 -#let alignment t = "%lu", (unsigned long)offsetof(struct {char x__; t (y__); }, y__) -#endif - -instance Storable In6Addr where - sizeOf _ = #const sizeof(struct in6_addr) - alignment _ = #alignment struct in6_addr - - peek p = do - a <- peek32 p 0 - b <- peek32 p 1 - c <- peek32 p 2 - d <- peek32 p 3 - return $ In6Addr (a, b, c, d) - - poke p (In6Addr (a, b, c, d)) = do - poke32 p 0 a - poke32 p 1 b - poke32 p 2 c - poke32 p 3 d -#endif - ------------------------------------------------------------------------- --- Helper functions - -foreign import ccall unsafe "string.h" memset :: Ptr a -> CInt -> CSize -> IO () - --- | Zero a structure. -zeroMemory :: Ptr a -> CSize -> IO () -zeroMemory dest nbytes = memset dest 0 (fromIntegral nbytes) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network/Socket.hsc 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network/Socket.hsc 1970-01-01 00:00:00.000000000 +0000 @@ -1,1965 +0,0 @@ -{-# LANGUAGE CPP, ScopedTypeVariables, RecordWildCards #-} -{-# OPTIONS_GHC -fno-warn-orphans #-} -{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} ------------------------------------------------------------------------------ --- | --- Module : Network.Socket --- Copyright : (c) The University of Glasgow 2001 --- License : BSD-style (see the file libraries/network/LICENSE) --- --- Maintainer : libraries@haskell.org --- Stability : provisional --- Portability : portable --- --- This is the main module of the network package supposed to be --- used with either "Network.Socket.ByteString" or --- "Network.Socket.ByteString.Lazy" for sending/receiving. --- --- Here are two minimal example programs using the TCP/IP protocol: a --- server that echoes all data that it receives back (servicing only --- one client) and a client using it. --- --- > -- Echo server program --- > module Main (main) where --- > --- > import Control.Concurrent (forkFinally) --- > import qualified Control.Exception as E --- > import Control.Monad (unless, forever, void) --- > import qualified Data.ByteString as S --- > import Network.Socket hiding (recv) --- > import Network.Socket.ByteString (recv, sendAll) --- > --- > main :: IO () --- > main = withSocketsDo $ do --- > addr <- resolve "3000" --- > E.bracket (open addr) close loop --- > where --- > resolve port = do --- > let hints = defaultHints { --- > addrFlags = [AI_PASSIVE] --- > , addrSocketType = Stream --- > } --- > addr:_ <- getAddrInfo (Just hints) Nothing (Just port) --- > return addr --- > open addr = do --- > sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) --- > setSocketOption sock ReuseAddr 1 --- > bind sock (addrAddress addr) --- > -- If the prefork technique is not used, --- > -- set CloseOnExec for the security reasons. --- > let fd = fdSocket sock --- > setCloseOnExecIfNeeded fd --- > listen sock 10 --- > return sock --- > loop sock = forever $ do --- > (conn, peer) <- accept sock --- > putStrLn $ "Connection from " ++ show peer --- > void $ forkFinally (talk conn) (\_ -> close conn) --- > talk conn = do --- > msg <- recv conn 1024 --- > unless (S.null msg) $ do --- > sendAll conn msg --- > talk conn --- --- > {-# LANGUAGE OverloadedStrings #-} --- > -- Echo client program --- > module Main (main) where --- > --- > import qualified Control.Exception as E --- > import qualified Data.ByteString.Char8 as C --- > import Network.Socket hiding (recv) --- > import Network.Socket.ByteString (recv, sendAll) --- > --- > main :: IO () --- > main = withSocketsDo $ do --- > addr <- resolve "127.0.0.1" "3000" --- > E.bracket (open addr) close talk --- > where --- > resolve host port = do --- > let hints = defaultHints { addrSocketType = Stream } --- > addr:_ <- getAddrInfo (Just hints) (Just host) (Just port) --- > return addr --- > open addr = do --- > sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) --- > connect sock $ addrAddress addr --- > return sock --- > talk sock = do --- > sendAll sock "Hello, world!" --- > msg <- recv sock 1024 --- > putStr "Received: " --- > C.putStrLn msg --- --- The proper programming model is that one 'Socket' is handled by --- a single thread. If multiple threads use one 'Socket' concurrently, --- unexpected things would happen. There is one exception for multiple --- threads vs a single 'Socket': one thread reads data from a 'Socket' --- only and the other thread writes data to the 'Socket' only. ------------------------------------------------------------------------------ - -#include "HsNet.h" -##include "HsNetDef.h" - -module Network.Socket - ( - -- * Initialisation - withSocketsDo - -- * Address information - , getAddrInfo - -- ** Types - , HostName - , ServiceName - , AddrInfo(..) - , defaultHints - -- ** Flags - , AddrInfoFlag(..) - , addrInfoFlagImplemented - -- * Socket operations - , connect - , bind - , listen - , accept - -- ** Closing - , close - , close' - , shutdown - , ShutdownCmd(..) - -- * Socket options - , SocketOption(..) - , isSupportedSocketOption - , getSocketOption - , setSocketOption - -- * Socket - , Socket(..) - , socket - , fdSocket - , mkSocket - , socketToHandle - -- ** Types of Socket - , SocketType(..) - , isSupportedSocketType - -- ** Family - , Family(..) - , isSupportedFamily - -- ** Protocol number - , ProtocolNumber - , defaultProtocol - -- * Socket address - , SockAddr(..) - , isSupportedSockAddr - , getPeerName - , getSocketName - -- ** Host address - , HostAddress - , hostAddressToTuple - , tupleToHostAddress -#if defined(IPV6_SOCKET_SUPPORT) - -- ** Host address6 - , HostAddress6 - , hostAddress6ToTuple - , tupleToHostAddress6 - -- ** Flow Info - , FlowInfo - -- ** Scope ID - , ScopeID -# if defined(HAVE_IF_NAMETOINDEX) - , ifNameToIndex - , ifIndexToName -# endif -#endif - -- ** Port number - , PortNumber(..) - , defaultPort - , socketPortSafe - , socketPort - -- * UNIX-domain socket - , isUnixDomainSocketAvailable - , socketPair - , sendFd - , recvFd - , getPeerCredential -#if defined(IPV6_SOCKET_SUPPORT) - -- * Name information - , NameInfoFlag(..) - , getNameInfo -#endif - -- * Low level operations - , setCloseOnExecIfNeeded - , getCloseOnExec - , setNonBlockIfNeeded - , getNonBlock - -- * Sending and receiving data - , sendBuf - , recvBuf - , sendBufTo - , recvBufFrom - -- * Special constants - , maxListenQueue - -- * Deprecated - -- ** Deprecated sending and receiving - , send - , sendTo - , recv - , recvFrom - , recvLen - -- ** Deprecated address functions - , htonl - , ntohl - , inet_addr - , inet_ntoa - -- ** Deprecated socket operations - , bindSocket - , sClose - -- ** Deprecated socket status - , SocketStatus(..) -- fixme - , isConnected - , isBound - , isListening - , isReadable - , isWritable - , sIsConnected - , sIsBound - , sIsListening - , sIsReadable - , sIsWritable - -- ** Deprecated special constants - , aNY_PORT - , iNADDR_ANY -#if defined(IPV6_SOCKET_SUPPORT) - , iN6ADDR_ANY -#endif - , sOMAXCONN - , sOL_SOCKET -#ifdef SCM_RIGHTS - , sCM_RIGHTS -#endif - -- ** Decrecated internal functions - , packFamily - , unpackFamily - , packSocketType - -- ** Decrecated UNIX-domain functions -#if defined(HAVE_STRUCT_UCRED) || defined(HAVE_GETPEEREID) - -- get the credentials of our domain socket peer. - , getPeerCred -#if defined(HAVE_GETPEEREID) - , getPeerEid -#endif -#endif - ) where - -import Data.Bits -import Data.Functor -import Data.List (foldl') -import Data.Maybe (isJust) -import Data.Word (Word8, Word32) -import Foreign.Ptr (Ptr, castPtr, nullPtr) -import Foreign.Storable (Storable(..)) -import Foreign.C.Error -import Foreign.C.String (CString, withCString, withCStringLen, peekCString, peekCStringLen) -import Foreign.C.Types (CUInt(..), CChar) -import Foreign.C.Types (CInt(..), CSize(..)) -import Foreign.Marshal.Alloc ( alloca, allocaBytes ) -import Foreign.Marshal.Array ( peekArray ) -import Foreign.Marshal.Utils ( maybeWith, with ) - -import System.IO -import Control.Monad (liftM, when) - -import qualified Control.Exception as E -import Control.Concurrent.MVar -import Data.Typeable -import System.IO.Error - -import GHC.Conc (threadWaitWrite) -# ifdef HAVE_ACCEPT4 -import GHC.Conc (threadWaitRead) -# endif -##if MIN_VERSION_base(4,3,1) -import GHC.Conc (closeFdWith) -##endif -# if defined(mingw32_HOST_OS) -import GHC.Conc (asyncDoProc) -import GHC.IO.FD (FD(..), readRawBufferPtr, writeRawBufferPtr) -import Foreign (FunPtr) -# endif -# if defined(darwin_HOST_OS) -import Data.List (delete) -# endif -import qualified GHC.IO.Device -import GHC.IO.Handle.FD -import GHC.IO.Exception -import GHC.IO -import qualified System.Posix.Internals - -import Network.Socket.Internal -import Network.Socket.Types - -import Prelude -- Silence AMP warnings - --- | Either a host name e.g., @\"haskell.org\"@ or a numeric host --- address string consisting of a dotted decimal IPv4 address or an --- IPv6 address e.g., @\"192.168.0.1\"@. -type HostName = String -type ServiceName = String - --- ---------------------------------------------------------------------------- --- On Windows, our sockets are not put in non-blocking mode (non-blocking --- is not supported for regular file descriptors on Windows, and it would --- be a pain to support it only for sockets). So there are two cases: --- --- - the threaded RTS uses safe calls for socket operations to get --- non-blocking I/O, just like the rest of the I/O library --- --- - with the non-threaded RTS, only some operations on sockets will be --- non-blocking. Reads and writes go through the normal async I/O --- system. accept() uses asyncDoProc so is non-blocking. A handful --- of others (recvFrom, sendFd, recvFd) will block all threads - if this --- is a problem, -threaded is the workaround. --- -##if defined(mingw32_HOST_OS) -##define SAFE_ON_WIN safe -##else -##define SAFE_ON_WIN unsafe -##endif - ------------------------------------------------------------------------------ --- Socket types - -#if defined(mingw32_HOST_OS) -socket2FD (MkSocket fd _ _ _ _) = - -- HACK, 1 means True - FD{fdFD = fd,fdIsSocket_ = 1} -#endif - --- | Smart constructor for constructing a 'Socket'. It should only be --- called once for every new file descriptor. The caller must make --- sure that the socket is in non-blocking mode. See --- 'setNonBlockIfNeeded'. -mkSocket :: CInt - -> Family - -> SocketType - -> ProtocolNumber - -> SocketStatus - -> IO Socket -mkSocket fd fam sType pNum stat = do - mStat <- newMVar stat - withSocketsDo $ return () - return $ MkSocket fd fam sType pNum mStat - --- | This is the default protocol for a given service. -defaultProtocol :: ProtocolNumber -defaultProtocol = 0 - ------------------------------------------------------------------------------ --- SockAddr - -instance Show SockAddr where -#if defined(DOMAIN_SOCKET_SUPPORT) - showsPrec _ (SockAddrUnix str) = showString str -#endif - showsPrec _ (SockAddrInet port ha) - = showString (unsafePerformIO (inet_ntoa ha)) - . showString ":" - . shows port -#if defined(IPV6_SOCKET_SUPPORT) - showsPrec _ addr@(SockAddrInet6 port _ _ _) - = showChar '[' - . showString (unsafePerformIO $ - fst `liftM` getNameInfo [NI_NUMERICHOST] True False addr >>= - maybe (fail "showsPrec: impossible internal error") return) - . showString "]:" - . shows port -#endif -#if defined(CAN_SOCKET_SUPPORT) - showsPrec _ (SockAddrCan ifidx) = shows ifidx -#endif -#if !(defined(IPV6_SOCKET_SUPPORT) \ - && defined(DOMAIN_SOCKET_SUPPORT) && defined(CAN_SOCKET_SUPPORT)) - showsPrec _ _ = error "showsPrec: not supported" -#endif - ------------------------------------------------------------------------------ --- Connection Functions - --- In the following connection and binding primitives. The names of --- the equivalent C functions have been preserved where possible. It --- should be noted that some of these names used in the C library, --- \tr{bind} in particular, have a different meaning to many Haskell --- programmers and have thus been renamed by appending the prefix --- Socket. - --- | Create a new socket using the given address family, socket type --- and protocol number. The address family is usually 'AF_INET', --- 'AF_INET6', or 'AF_UNIX'. The socket type is usually 'Stream' or --- 'Datagram'. The protocol number is usually 'defaultProtocol'. --- If 'AF_INET6' is used and the socket type is 'Stream' or 'Datagram', --- the 'IPv6Only' socket option is set to 0 so that both IPv4 and IPv6 --- can be handled with one socket. --- --- >>> let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV], addrSocketType = Stream } --- >>> addr:_ <- getAddrInfo (Just hints) (Just "127.0.0.1") (Just "5000") --- >>> sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) --- >>> bind sock (addrAddress addr) --- >>> getSocketName sock --- 127.0.0.1:5000 -socket :: Family -- Family Name (usually AF_INET) - -> SocketType -- Socket Type (usually Stream) - -> ProtocolNumber -- Protocol Number (getProtocolByName to find value) - -> IO Socket -- Unconnected Socket -socket family stype protocol = do - c_stype <- packSocketTypeOrThrow "socket" stype - fd <- throwSocketErrorIfMinus1Retry "Network.Socket.socket" $ - c_socket (packFamily family) c_stype protocol - setNonBlockIfNeeded fd - sock <- mkSocket fd family stype protocol NotConnected -#if HAVE_DECL_IPV6_V6ONLY - -- The default value of the IPv6Only option is platform specific, - -- so we explicitly set it to 0 to provide a common default. -# if defined(mingw32_HOST_OS) - -- The IPv6Only option is only supported on Windows Vista and later, - -- so trying to change it might throw an error. - when (family == AF_INET6 && (stype == Stream || stype == Datagram)) $ - E.catch (setSocketOption sock IPv6Only 0) $ (\(_ :: E.IOException) -> return ()) -# elif !defined(__OpenBSD__) - when (family == AF_INET6 && (stype == Stream || stype == Datagram)) $ - setSocketOption sock IPv6Only 0 `onException` close sock -# endif -#endif - return sock - --- | Build a pair of connected socket objects using the given address --- family, socket type, and protocol number. Address family, socket --- type, and protocol number are as for the 'socket' function above. --- Availability: Unix. -socketPair :: Family -- Family Name (usually AF_INET or AF_INET6) - -> SocketType -- Socket Type (usually Stream) - -> ProtocolNumber -- Protocol Number - -> IO (Socket, Socket) -- unnamed and connected. -#if defined(DOMAIN_SOCKET_SUPPORT) -socketPair family stype protocol = do - allocaBytes (2 * sizeOf (1 :: CInt)) $ \ fdArr -> do - c_stype <- packSocketTypeOrThrow "socketPair" stype - _rc <- throwSocketErrorIfMinus1Retry "Network.Socket.socketpair" $ - c_socketpair (packFamily family) c_stype protocol fdArr - [fd1,fd2] <- peekArray 2 fdArr - s1 <- mkNonBlockingSocket fd1 - s2 <- mkNonBlockingSocket fd2 - return (s1,s2) - where - mkNonBlockingSocket fd = do - setNonBlockIfNeeded fd - mkSocket fd family stype protocol Connected - -foreign import ccall unsafe "socketpair" - c_socketpair :: CInt -> CInt -> CInt -> Ptr CInt -> IO CInt -#else -socketPair _ _ _ = error "Network.Socket.socketPair" -#endif - ------------------------------------------------------------------------------ - -#if defined(mingw32_HOST_OS) -#else -fGetFd :: CInt -fGetFd = #const F_GETFD -fGetFl :: CInt -fGetFl = #const F_GETFL -fdCloexec :: CInt -fdCloexec = #const FD_CLOEXEC -oNonBlock :: CInt -oNonBlock = #const O_NONBLOCK -# if defined(HAVE_ACCEPT4) -sockNonBlock :: CInt -sockNonBlock = #const SOCK_NONBLOCK -sockCloexec :: CInt -sockCloexec = #const SOCK_CLOEXEC -# endif -#endif - --- | Set the nonblocking flag on Unix. --- On Windows, nothing is done. -setNonBlockIfNeeded :: CInt -> IO () -setNonBlockIfNeeded fd = - System.Posix.Internals.setNonBlockingFD fd True - --- | Set the close_on_exec flag on Unix. --- On Windows, nothing is done. --- --- Since 2.7.0.0. -setCloseOnExecIfNeeded :: CInt -> IO () -#if defined(mingw32_HOST_OS) -setCloseOnExecIfNeeded _ = return () -#else -setCloseOnExecIfNeeded fd = System.Posix.Internals.setCloseOnExec fd -#endif - -#if !defined(mingw32_HOST_OS) -foreign import ccall unsafe "fcntl" - c_fcntl_read :: CInt -> CInt -> CInt -> IO CInt -#endif - --- | Get the nonblocking flag. --- On Windows, this function always returns 'False'. --- --- Since 2.7.0.0. -getCloseOnExec :: CInt -> IO Bool -#if defined(mingw32_HOST_OS) -getCloseOnExec _ = return False -#else -getCloseOnExec fd = do - flags <- c_fcntl_read fd fGetFd 0 - let ret = flags .&. fdCloexec - return (ret /= 0) -#endif - --- | Get the close_on_exec flag. --- On Windows, this function always returns 'False'. --- --- Since 2.7.0.0. -getNonBlock :: CInt -> IO Bool -#if defined(mingw32_HOST_OS) -getNonBlock _ = return False -#else -getNonBlock fd = do - flags <- c_fcntl_read fd fGetFl 0 - let ret = flags .&. oNonBlock - return (ret /= 0) -#endif - ------------------------------------------------------------------------------ --- Binding a socket - --- | Bind the socket to an address. The socket must not already be --- bound. The 'Family' passed to @bind@ must be the --- same as that passed to 'socket'. If the special port number --- 'defaultPort' is passed then the system assigns the next available --- use port. -bind :: Socket -- Unconnected Socket - -> SockAddr -- Address to Bind to - -> IO () -bind (MkSocket s _family _stype _protocol socketStatus) addr = do - modifyMVar_ socketStatus $ \ status -> do - if status /= NotConnected - then - ioError $ userError $ - "Network.Socket.bind: can't bind to socket with status " ++ show status - else do - withSockAddr addr $ \p_addr sz -> do - _status <- throwSocketErrorIfMinus1Retry "Network.Socket.bind" $ - c_bind s p_addr (fromIntegral sz) - return Bound - ------------------------------------------------------------------------------ --- Connecting a socket - --- | Connect to a remote socket at address. -connect :: Socket -- Unconnected Socket - -> SockAddr -- Socket address stuff - -> IO () -connect sock@(MkSocket s _family _stype _protocol socketStatus) addr = withSocketsDo $ do - modifyMVar_ socketStatus $ \currentStatus -> do - if currentStatus /= NotConnected && currentStatus /= Bound - then - ioError $ userError $ - errLoc ++ ": can't connect to socket with status " ++ show currentStatus - else do - withSockAddr addr $ \p_addr sz -> do - - let connectLoop = do - r <- c_connect s p_addr (fromIntegral sz) - if r == -1 - then do -#if !(defined(HAVE_WINSOCK2_H)) - err <- getErrno - case () of - _ | err == eINTR -> connectLoop - _ | err == eINPROGRESS -> connectBlocked --- _ | err == eAGAIN -> connectBlocked - _otherwise -> throwSocketError errLoc -#else - throwSocketError errLoc -#endif - else return () - - connectBlocked = do - threadWaitWrite (fromIntegral s) - err <- getSocketOption sock SoError - if (err == 0) - then return () - else throwSocketErrorCode errLoc (fromIntegral err) - - connectLoop - return Connected - where - errLoc = "Network.Socket.connect: " ++ show sock - ------------------------------------------------------------------------------ --- Listen - --- | Listen for connections made to the socket. The second argument --- specifies the maximum number of queued connections and should be at --- least 1; the maximum value is system-dependent (usually 5). -listen :: Socket -- Connected & Bound Socket - -> Int -- Queue Length - -> IO () -listen (MkSocket s _family _stype _protocol socketStatus) backlog = do - modifyMVar_ socketStatus $ \ status -> do - if status /= Bound - then - ioError $ userError $ - "Network.Socket.listen: can't listen on socket with status " ++ show status - else do - throwSocketErrorIfMinus1Retry_ "Network.Socket.listen" $ - c_listen s (fromIntegral backlog) - return Listening - ------------------------------------------------------------------------------ --- Accept --- --- A call to `accept' only returns when data is available on the given --- socket, unless the socket has been set to non-blocking. It will --- return a new socket which should be used to read the incoming data and --- should then be closed. Using the socket returned by `accept' allows --- incoming requests to be queued on the original socket. - --- | Accept a connection. The socket must be bound to an address and --- listening for connections. The return value is a pair @(conn, --- address)@ where @conn@ is a new socket object usable to send and --- receive data on the connection, and @address@ is the address bound --- to the socket on the other end of the connection. -accept :: Socket -- Queue Socket - -> IO (Socket, -- Readable Socket - SockAddr) -- Peer details - -accept sock@(MkSocket s family stype protocol status) = do - currentStatus <- readMVar status - if not $ isAcceptable family stype currentStatus - then - ioError $ userError $ - "Network.Socket.accept: can't accept socket (" ++ - show (family, stype, protocol) ++ ") with status " ++ - show currentStatus - else do - let sz = sizeOfSockAddrByFamily family - allocaBytes sz $ \ sockaddr -> do -#if defined(mingw32_HOST_OS) - new_sock <- - if threaded - then with (fromIntegral sz) $ \ ptr_len -> - throwSocketErrorIfMinus1Retry "Network.Socket.accept" $ - c_accept_safe s sockaddr ptr_len - else do - paramData <- c_newAcceptParams s (fromIntegral sz) sockaddr - rc <- asyncDoProc c_acceptDoProc paramData - new_sock <- c_acceptNewSock paramData - c_free paramData - when (rc /= 0) $ - throwSocketErrorCode "Network.Socket.accept" (fromIntegral rc) - return new_sock -#else - with (fromIntegral sz) $ \ ptr_len -> do -# ifdef HAVE_ACCEPT4 - new_sock <- throwSocketErrorIfMinus1RetryMayBlock "Network.Socket.accept" - (threadWaitRead (fromIntegral s)) - (c_accept4 s sockaddr ptr_len (sockNonBlock .|. sockCloexec)) -# else - new_sock <- throwSocketErrorWaitRead sock "Network.Socket.accept" - (c_accept s sockaddr ptr_len) - setNonBlockIfNeeded new_sock - setCloseOnExecIfNeeded new_sock -# endif /* HAVE_ACCEPT4 */ -#endif - addr <- peekSockAddr sockaddr - sock' <- mkSocket new_sock family stype protocol Connected - return (sock', addr) - -#if defined(mingw32_HOST_OS) -foreign import ccall unsafe "HsNet.h acceptNewSock" - c_acceptNewSock :: Ptr () -> IO CInt -foreign import ccall unsafe "HsNet.h newAcceptParams" - c_newAcceptParams :: CInt -> CInt -> Ptr a -> IO (Ptr ()) -foreign import ccall unsafe "HsNet.h &acceptDoProc" - c_acceptDoProc :: FunPtr (Ptr () -> IO Int) -foreign import ccall unsafe "free" - c_free:: Ptr a -> IO () -#endif - ------------------------------------------------------------------------------ --- ** Sending and receiving data - --- $sendrecv --- --- Do not use the @send@ and @recv@ functions defined in this section --- in new code, as they incorrectly represent binary data as a Unicode --- string. As a result, these functions are inefficient and may lead --- to bugs in the program. Instead use the @send@ and @recv@ --- functions defined in the "Network.Socket.ByteString" module. - ------------------------------------------------------------------------------ --- sendTo & recvFrom - --- | Send data to the socket. The recipient can be specified --- explicitly, so the socket need not be in a connected state. --- Returns the number of bytes sent. Applications are responsible for --- ensuring that all data has been sent. --- --- NOTE: blocking on Windows unless you compile with -threaded (see --- GHC ticket #1129) -{-# DEPRECATED sendTo "Use sendTo defined in \"Network.Socket.ByteString\"" #-} -sendTo :: Socket -- (possibly) bound/connected Socket - -> String -- Data to send - -> SockAddr - -> IO Int -- Number of Bytes sent -sendTo sock xs addr = do - withCStringLen xs $ \(str, len) -> do - sendBufTo sock str len addr - --- | Send data to the socket. The recipient can be specified --- explicitly, so the socket need not be in a connected state. --- Returns the number of bytes sent. Applications are responsible for --- ensuring that all data has been sent. -sendBufTo :: Socket -- (possibly) bound/connected Socket - -> Ptr a -> Int -- Data to send - -> SockAddr - -> IO Int -- Number of Bytes sent -sendBufTo sock@(MkSocket s _family _stype _protocol _status) ptr nbytes addr = do - withSockAddr addr $ \p_addr sz -> do - liftM fromIntegral $ - throwSocketErrorWaitWrite sock "Network.Socket.sendBufTo" $ - c_sendto s ptr (fromIntegral $ nbytes) 0{-flags-} - p_addr (fromIntegral sz) - --- | Receive data from the socket. The socket need not be in a --- connected state. Returns @(bytes, nbytes, address)@ where @bytes@ --- is a @String@ of length @nbytes@ representing the data received and --- @address@ is a 'SockAddr' representing the address of the sending --- socket. --- --- NOTE: blocking on Windows unless you compile with -threaded (see --- GHC ticket #1129) -{-# DEPRECATED recvFrom "Use recvFrom defined in \"Network.Socket.ByteString\"" #-} -recvFrom :: Socket -> Int -> IO (String, Int, SockAddr) -recvFrom sock nbytes = - allocaBytes nbytes $ \ptr -> do - (len, sockaddr) <- recvBufFrom sock ptr nbytes - str <- peekCStringLen (ptr, len) - return (str, len, sockaddr) - --- | Receive data from the socket, writing it into buffer instead of --- creating a new string. The socket need not be in a connected --- state. Returns @(nbytes, address)@ where @nbytes@ is the number of --- bytes received and @address@ is a 'SockAddr' representing the --- address of the sending socket. --- --- NOTE: blocking on Windows unless you compile with -threaded (see --- GHC ticket #1129) -recvBufFrom :: Socket -> Ptr a -> Int -> IO (Int, SockAddr) -recvBufFrom sock@(MkSocket s family _stype _protocol _status) ptr nbytes - | nbytes <= 0 = ioError (mkInvalidRecvArgError "Network.Socket.recvBufFrom") - | otherwise = - withNewSockAddr family $ \ptr_addr sz -> do - alloca $ \ptr_len -> do - poke ptr_len (fromIntegral sz) - len <- throwSocketErrorWaitRead sock "Network.Socket.recvBufFrom" $ - c_recvfrom s ptr (fromIntegral nbytes) 0{-flags-} - ptr_addr ptr_len - let len' = fromIntegral len - if len' == 0 - then ioError (mkEOFError "Network.Socket.recvFrom") - else do - flg <- isConnected sock - -- For at least one implementation (WinSock 2), recvfrom() ignores - -- filling in the sockaddr for connected TCP sockets. Cope with - -- this by using getPeerName instead. - sockaddr <- - if flg then - getPeerName sock - else - peekSockAddr ptr_addr - return (len', sockaddr) - ------------------------------------------------------------------------------ --- send & recv - --- | Send data to the socket. The socket must be connected to a remote --- socket. Returns the number of bytes sent. Applications are --- responsible for ensuring that all data has been sent. --- --- Sending data to closed socket may lead to undefined behaviour. -{-# DEPRECATED send "Use send defined in \"Network.Socket.ByteString\"" #-} -send :: Socket -- Bound/Connected Socket - -> String -- Data to send - -> IO Int -- Number of Bytes sent -send sock xs = withCStringLen xs $ \(str, len) -> - sendBuf sock (castPtr str) len - --- | Send data to the socket. The socket must be connected to a remote --- socket. Returns the number of bytes sent. Applications are --- responsible for ensuring that all data has been sent. --- --- Sending data to closed socket may lead to undefined behaviour. -sendBuf :: Socket -- Bound/Connected Socket - -> Ptr Word8 -- Pointer to the data to send - -> Int -- Length of the buffer - -> IO Int -- Number of Bytes sent -sendBuf sock@(MkSocket s _family _stype _protocol _status) str len = do - liftM fromIntegral $ -#if defined(mingw32_HOST_OS) --- writeRawBufferPtr is supposed to handle checking for errors, but it's broken --- on x86_64 because of GHC bug #12010 so we duplicate the check here. The call --- to throwSocketErrorIfMinus1Retry can be removed when no GHC version with the --- bug is supported. - throwSocketErrorIfMinus1Retry "Network.Socket.sendBuf" $ writeRawBufferPtr - "Network.Socket.sendBuf" - (socket2FD sock) - (castPtr str) - 0 - (fromIntegral len) -#else - throwSocketErrorWaitWrite sock "Network.Socket.sendBuf" $ - c_send s str (fromIntegral len) 0{-flags-} -#endif - - --- | Receive data from the socket. The socket must be in a connected --- state. This function may return fewer bytes than specified. If the --- message is longer than the specified length, it may be discarded --- depending on the type of socket. This function may block until a --- message arrives. --- --- Considering hardware and network realities, the maximum number of --- bytes to receive should be a small power of 2, e.g., 4096. --- --- For TCP sockets, a zero length return value means the peer has --- closed its half side of the connection. --- --- Receiving data from closed socket may lead to undefined behaviour. -{-# DEPRECATED recv "Use recv defined in \"Network.Socket.ByteString\"" #-} -recv :: Socket -> Int -> IO String -recv sock l = fst <$> recvLen sock l - -{-# DEPRECATED recvLen "Use recv defined in \"Network.Socket.ByteString\" with \"Data.Bytestring.length\"" #-} -recvLen :: Socket -> Int -> IO (String, Int) -recvLen sock nbytes = - allocaBytes nbytes $ \ptr -> do - len <- recvBuf sock ptr nbytes - s <- peekCStringLen (castPtr ptr,len) - return (s, len) - --- | Receive data from the socket. The socket must be in a connected --- state. This function may return fewer bytes than specified. If the --- message is longer than the specified length, it may be discarded --- depending on the type of socket. This function may block until a --- message arrives. --- --- Considering hardware and network realities, the maximum number of --- bytes to receive should be a small power of 2, e.g., 4096. --- --- For TCP sockets, a zero length return value means the peer has --- closed its half side of the connection. --- --- Receiving data from closed socket may lead to undefined behaviour. -recvBuf :: Socket -> Ptr Word8 -> Int -> IO Int -recvBuf sock@(MkSocket s _family _stype _protocol _status) ptr nbytes - | nbytes <= 0 = ioError (mkInvalidRecvArgError "Network.Socket.recvBuf") - | otherwise = do - len <- -#if defined(mingw32_HOST_OS) --- see comment in sendBuf above. - throwSocketErrorIfMinus1Retry "Network.Socket.recvBuf" $ - readRawBufferPtr "Network.Socket.recvBuf" - (socket2FD sock) ptr 0 (fromIntegral nbytes) -#else - throwSocketErrorWaitRead sock "Network.Socket.recvBuf" $ - c_recv s (castPtr ptr) (fromIntegral nbytes) 0{-flags-} -#endif - let len' = fromIntegral len - if len' == 0 - then ioError (mkEOFError "Network.Socket.recvBuf") - else return len' - - --- --------------------------------------------------------------------------- --- socketPort --- --- The port number the given socket is currently connected to can be --- determined by calling $port$, is generally only useful when bind --- was given $aNY\_PORT$. - --- | Getting the port of socket. --- `IOError` is thrown if a port is not available. -socketPort :: Socket -- Connected & Bound Socket - -> IO PortNumber -- Port Number of Socket -socketPort sock@(MkSocket _ AF_INET _ _ _) = do - (SockAddrInet port _) <- getSocketName sock - return port -#if defined(IPV6_SOCKET_SUPPORT) -socketPort sock@(MkSocket _ AF_INET6 _ _ _) = do - (SockAddrInet6 port _ _ _) <- getSocketName sock - return port -#endif -socketPort (MkSocket _ family _ _ _) = - ioError $ userError $ - "Network.Socket.socketPort: address family '" ++ show family ++ - "' not supported." - - --- --------------------------------------------------------------------------- --- socketPortSafe --- | Getting the port of socket. -socketPortSafe :: Socket -- Connected & Bound Socket - -> IO (Maybe PortNumber) -- Port Number of Socket -socketPortSafe s = do - sa <- getSocketName s - return $ case sa of - SockAddrInet port _ -> Just port -#if defined(IPV6_SOCKET_SUPPORT) - SockAddrInet6 port _ _ _ -> Just port -#endif - _ -> Nothing - --- --------------------------------------------------------------------------- --- getPeerName - --- Calling $getPeerName$ returns the address details of the machine, --- other than the local one, which is connected to the socket. This is --- used in programs such as FTP to determine where to send the --- returning data. The corresponding call to get the details of the --- local machine is $getSocketName$. - -getPeerName :: Socket -> IO SockAddr -getPeerName (MkSocket s family _ _ _) = do - withNewSockAddr family $ \ptr sz -> do - with (fromIntegral sz) $ \int_star -> do - throwSocketErrorIfMinus1Retry_ "Network.Socket.getPeerName" $ - c_getpeername s ptr int_star - _sz <- peek int_star - peekSockAddr ptr - -getSocketName :: Socket -> IO SockAddr -getSocketName (MkSocket s family _ _ _) = do - withNewSockAddr family $ \ptr sz -> do - with (fromIntegral sz) $ \int_star -> do - throwSocketErrorIfMinus1Retry_ "Network.Socket.getSocketName" $ - c_getsockname s ptr int_star - peekSockAddr ptr - ------------------------------------------------------------------------------ --- Socket Properties - --- | Socket options for use with 'setSocketOption' and 'getSocketOption'. --- --- The existence of a constructor does not imply that the relevant option --- is supported on your system: see 'isSupportedSocketOption' -data SocketOption - = Debug -- ^ SO_DEBUG - | ReuseAddr -- ^ SO_REUSEADDR - | Type -- ^ SO_TYPE - | SoError -- ^ SO_ERROR - | DontRoute -- ^ SO_DONTROUTE - | Broadcast -- ^ SO_BROADCAST - | SendBuffer -- ^ SO_SNDBUF - | RecvBuffer -- ^ SO_RCVBUF - | KeepAlive -- ^ SO_KEEPALIVE - | OOBInline -- ^ SO_OOBINLINE - | TimeToLive -- ^ IP_TTL - | MaxSegment -- ^ TCP_MAXSEG - | NoDelay -- ^ TCP_NODELAY - | Cork -- ^ TCP_CORK - | Linger -- ^ SO_LINGER - | ReusePort -- ^ SO_REUSEPORT - | RecvLowWater -- ^ SO_RCVLOWAT - | SendLowWater -- ^ SO_SNDLOWAT - | RecvTimeOut -- ^ SO_RCVTIMEO - | SendTimeOut -- ^ SO_SNDTIMEO - | UseLoopBack -- ^ SO_USELOOPBACK - | UserTimeout -- ^ TCP_USER_TIMEOUT - | IPv6Only -- ^ IPV6_V6ONLY - | CustomSockOpt (CInt, CInt) - deriving (Show, Typeable) - --- | Does the 'SocketOption' exist on this system? -isSupportedSocketOption :: SocketOption -> Bool -isSupportedSocketOption = isJust . packSocketOption - --- | For a socket option, return Just (level, value) where level is the --- corresponding C option level constant (e.g. SOL_SOCKET) and value is --- the option constant itself (e.g. SO_DEBUG) --- If either constant does not exist, return Nothing. -packSocketOption :: SocketOption -> Maybe (CInt, CInt) -packSocketOption so = - -- The Just here is a hack to disable GHC's overlapping pattern detection: - -- the problem is if all constants are present, the fallback pattern is - -- redundant, but if they aren't then it isn't. Hence we introduce an - -- extra pattern (Nothing) that can't possibly happen, so that the - -- fallback is always (in principle) necessary. - -- I feel a little bad for including this, but such are the sacrifices we - -- make while working with CPP - excluding the fallback pattern correctly - -- would be a serious nuisance. - -- (NB: comments elsewhere in this file refer to this one) - case Just so of -#ifdef SOL_SOCKET -#ifdef SO_DEBUG - Just Debug -> Just ((#const SOL_SOCKET), (#const SO_DEBUG)) -#endif -#ifdef SO_REUSEADDR - Just ReuseAddr -> Just ((#const SOL_SOCKET), (#const SO_REUSEADDR)) -#endif -#ifdef SO_TYPE - Just Type -> Just ((#const SOL_SOCKET), (#const SO_TYPE)) -#endif -#ifdef SO_ERROR - Just SoError -> Just ((#const SOL_SOCKET), (#const SO_ERROR)) -#endif -#ifdef SO_DONTROUTE - Just DontRoute -> Just ((#const SOL_SOCKET), (#const SO_DONTROUTE)) -#endif -#ifdef SO_BROADCAST - Just Broadcast -> Just ((#const SOL_SOCKET), (#const SO_BROADCAST)) -#endif -#ifdef SO_SNDBUF - Just SendBuffer -> Just ((#const SOL_SOCKET), (#const SO_SNDBUF)) -#endif -#ifdef SO_RCVBUF - Just RecvBuffer -> Just ((#const SOL_SOCKET), (#const SO_RCVBUF)) -#endif -#ifdef SO_KEEPALIVE - Just KeepAlive -> Just ((#const SOL_SOCKET), (#const SO_KEEPALIVE)) -#endif -#ifdef SO_OOBINLINE - Just OOBInline -> Just ((#const SOL_SOCKET), (#const SO_OOBINLINE)) -#endif -#ifdef SO_LINGER - Just Linger -> Just ((#const SOL_SOCKET), (#const SO_LINGER)) -#endif -#ifdef SO_REUSEPORT - Just ReusePort -> Just ((#const SOL_SOCKET), (#const SO_REUSEPORT)) -#endif -#ifdef SO_RCVLOWAT - Just RecvLowWater -> Just ((#const SOL_SOCKET), (#const SO_RCVLOWAT)) -#endif -#ifdef SO_SNDLOWAT - Just SendLowWater -> Just ((#const SOL_SOCKET), (#const SO_SNDLOWAT)) -#endif -#ifdef SO_RCVTIMEO - Just RecvTimeOut -> Just ((#const SOL_SOCKET), (#const SO_RCVTIMEO)) -#endif -#ifdef SO_SNDTIMEO - Just SendTimeOut -> Just ((#const SOL_SOCKET), (#const SO_SNDTIMEO)) -#endif -#ifdef SO_USELOOPBACK - Just UseLoopBack -> Just ((#const SOL_SOCKET), (#const SO_USELOOPBACK)) -#endif -#endif // SOL_SOCKET -#if HAVE_DECL_IPPROTO_IP -#ifdef IP_TTL - Just TimeToLive -> Just ((#const IPPROTO_IP), (#const IP_TTL)) -#endif -#endif // HAVE_DECL_IPPROTO_IP -#if HAVE_DECL_IPPROTO_TCP -#ifdef TCP_MAXSEG - Just MaxSegment -> Just ((#const IPPROTO_TCP), (#const TCP_MAXSEG)) -#endif -#ifdef TCP_NODELAY - Just NoDelay -> Just ((#const IPPROTO_TCP), (#const TCP_NODELAY)) -#endif -#ifdef TCP_USER_TIMEOUT - Just UserTimeout -> Just ((#const IPPROTO_TCP), (#const TCP_USER_TIMEOUT)) -#endif -#ifdef TCP_CORK - Just Cork -> Just ((#const IPPROTO_TCP), (#const TCP_CORK)) -#endif -#endif // HAVE_DECL_IPPROTO_TCP -#if HAVE_DECL_IPPROTO_IPV6 -#if HAVE_DECL_IPV6_V6ONLY - Just IPv6Only -> Just ((#const IPPROTO_IPV6), (#const IPV6_V6ONLY)) -#endif -#endif // HAVE_DECL_IPPROTO_IPV6 - Just (CustomSockOpt opt) -> Just opt - _ -> Nothing - --- | Return the option level and option value if they exist, --- otherwise throw an error that begins "Network.Socket." ++ the String --- parameter -packSocketOption' :: String -> SocketOption -> IO (CInt, CInt) -packSocketOption' caller so = maybe err return (packSocketOption so) - where - err = ioError . userError . concat $ ["Network.Socket.", caller, - ": socket option ", show so, " unsupported on this system"] - --- | Set a socket option that expects an Int value. --- There is currently no API to set e.g. the timeval socket options -setSocketOption :: Socket - -> SocketOption -- Option Name - -> Int -- Option Value - -> IO () -setSocketOption (MkSocket s _ _ _ _) so v = do - (level, opt) <- packSocketOption' "setSocketOption" so - with (fromIntegral v) $ \ptr_v -> do - throwSocketErrorIfMinus1_ "Network.Socket.setSocketOption" $ - c_setsockopt s level opt ptr_v - (fromIntegral (sizeOf (undefined :: CInt))) - return () - - --- | Get a socket option that gives an Int value. --- There is currently no API to get e.g. the timeval socket options -getSocketOption :: Socket - -> SocketOption -- Option Name - -> IO Int -- Option Value -getSocketOption (MkSocket s _ _ _ _) so = do - (level, opt) <- packSocketOption' "getSocketOption" so - alloca $ \ptr_v -> - with (fromIntegral (sizeOf (undefined :: CInt))) $ \ptr_sz -> do - throwSocketErrorIfMinus1Retry_ "Network.Socket.getSocketOption" $ - c_getsockopt s level opt ptr_v ptr_sz - fromIntegral `liftM` peek ptr_v - - --- | Getting process ID, user ID and group ID for UNIX-domain sockets. --- --- This is implemented with SO_PEERCRED on Linux and getpeereid() --- on BSD variants. Unfortunately, on some BSD variants --- getpeereid() returns unexpected results, rather than an error, --- for AF_INET sockets. It is the user's responsibility to make sure --- that the socket is a UNIX-domain socket. --- Also, on some BSD variants, getpeereid() does not return credentials --- for sockets created via 'socketPair', only separately created and then --- explicitly connected UNIX-domain sockets work on such systems. --- --- Since 2.7.0.0. -getPeerCredential :: Socket -> IO (Maybe CUInt, Maybe CUInt, Maybe CUInt) -#ifdef HAVE_STRUCT_UCRED -getPeerCredential sock = do - (pid, uid, gid) <- getPeerCred sock - if uid == maxBound then - return (Nothing, Nothing, Nothing) - else - return (Just pid, Just uid, Just gid) -#elif defined(HAVE_GETPEEREID) -getPeerCredential sock = E.handle (\(E.SomeException _) -> return (Nothing,Nothing,Nothing)) $ do - (uid, gid) <- getPeerEid sock - return (Nothing, Just uid, Just gid) -#else -getPeerCredential _ = return (Nothing, Nothing, Nothing) -#endif - -#if defined(HAVE_STRUCT_UCRED) || defined(HAVE_GETPEEREID) -{-# DEPRECATED getPeerCred "Use getPeerCredential instead" #-} --- | Returns the processID, userID and groupID of the socket's peer. --- --- Only available on platforms that support SO_PEERCRED or GETPEEREID(3) --- on domain sockets. --- GETPEEREID(3) returns userID and groupID. processID is always 0. -getPeerCred :: Socket -> IO (CUInt, CUInt, CUInt) -getPeerCred sock = do -#ifdef HAVE_STRUCT_UCRED - let fd = fdSocket sock - let sz = (#const sizeof(struct ucred)) - allocaBytes sz $ \ ptr_cr -> - with (fromIntegral sz) $ \ ptr_sz -> do - _ <- ($) throwSocketErrorIfMinus1Retry "Network.Socket.getPeerCred" $ - c_getsockopt fd (#const SOL_SOCKET) (#const SO_PEERCRED) ptr_cr ptr_sz - pid <- (#peek struct ucred, pid) ptr_cr - uid <- (#peek struct ucred, uid) ptr_cr - gid <- (#peek struct ucred, gid) ptr_cr - return (pid, uid, gid) -#else - (uid,gid) <- getPeerEid sock - return (0,uid,gid) -#endif - -#ifdef HAVE_GETPEEREID -{-# DEPRECATED getPeerEid "Use getPeerCredential instead" #-} --- | The getpeereid() function returns the effective user and group IDs of the --- peer connected to a UNIX-domain socket -getPeerEid :: Socket -> IO (CUInt, CUInt) -getPeerEid sock = do - let fd = fdSocket sock - alloca $ \ ptr_uid -> - alloca $ \ ptr_gid -> do - throwSocketErrorIfMinus1Retry_ "Network.Socket.getPeerEid" $ - c_getpeereid fd ptr_uid ptr_gid - uid <- peek ptr_uid - gid <- peek ptr_gid - return (uid, gid) -#endif -#endif - --- | Whether or not UNIX-domain sockets are available. --- --- Since 3.0.0.0. -isUnixDomainSocketAvailable :: Bool -#if defined(DOMAIN_SOCKET_SUPPORT) -isUnixDomainSocketAvailable = True -#else -isUnixDomainSocketAvailable = False -#endif - -##if !(MIN_VERSION_base(4,3,1)) -closeFdWith closer fd = closer fd -##endif - --- sending/receiving ancillary socket data; low-level mechanism --- for transmitting file descriptors, mainly. -sendFd :: Socket -> CInt -> IO () -#if defined(DOMAIN_SOCKET_SUPPORT) -sendFd sock outfd = do - _ <- throwSocketErrorWaitWrite sock "Network.Socket.sendFd" $ c_sendFd (fdSocket sock) outfd - return () -foreign import ccall SAFE_ON_WIN "sendFd" c_sendFd :: CInt -> CInt -> IO CInt -#else -sendFd _ _ = error "Network.Socket.sendFd" -#endif - --- | Receive a file descriptor over a domain socket. Note that the resulting --- file descriptor may have to be put into non-blocking mode in order to be --- used safely. See 'setNonBlockIfNeeded'. -recvFd :: Socket -> IO CInt -#if defined(DOMAIN_SOCKET_SUPPORT) -recvFd sock = do - theFd <- throwSocketErrorWaitRead sock "Network.Socket.recvFd" $ - c_recvFd (fdSocket sock) - return theFd -foreign import ccall SAFE_ON_WIN "recvFd" c_recvFd :: CInt -> IO CInt -#else -recvFd _ = error "Network.Socket.recvFd" -#endif - --- --------------------------------------------------------------------------- --- Utility Functions - -{-# DEPRECATED aNY_PORT "Use defaultPort instead" #-} -aNY_PORT :: PortNumber -aNY_PORT = 0 - -defaultPort :: PortNumber -defaultPort = 0 - --- | The IPv4 wild card address. - -{-# DEPRECATED iNADDR_ANY "Use getAddrInfo instead" #-} -iNADDR_ANY :: HostAddress -iNADDR_ANY = htonl (#const INADDR_ANY) - --- | Converts the from host byte order to network byte order. -foreign import CALLCONV unsafe "htonl" htonl :: Word32 -> Word32 --- | Converts the from network byte order to host byte order. -foreign import CALLCONV unsafe "ntohl" ntohl :: Word32 -> Word32 - -{-# DEPRECATED htonl "Use getAddrInfo instead" #-} -{-# DEPRECATED ntohl "Use getAddrInfo instead" #-} - -#if defined(IPV6_SOCKET_SUPPORT) --- | The IPv6 wild card address. - -{-# DEPRECATED iN6ADDR_ANY "Use getAddrInfo instead" #-} -iN6ADDR_ANY :: HostAddress6 -iN6ADDR_ANY = (0, 0, 0, 0) -#endif - -{-# DEPRECATED sOMAXCONN "Use maxListenQueue instead" #-} -sOMAXCONN :: Int -sOMAXCONN = #const SOMAXCONN - -{-# DEPRECATED sOL_SOCKET "This is not necessary anymore" #-} -sOL_SOCKET :: Int -sOL_SOCKET = #const SOL_SOCKET - -#ifdef SCM_RIGHTS -{-# DEPRECATED sCM_RIGHTS "This is not necessary anymore" #-} -sCM_RIGHTS :: Int -sCM_RIGHTS = #const SCM_RIGHTS -#endif - --- | This is the value of SOMAXCONN, typically 128. --- 128 is good enough for normal network servers but --- is too small for high performance servers. -maxListenQueue :: Int -maxListenQueue = sOMAXCONN - --- ----------------------------------------------------------------------------- - -data ShutdownCmd - = ShutdownReceive - | ShutdownSend - | ShutdownBoth - deriving Typeable - -sdownCmdToInt :: ShutdownCmd -> CInt -sdownCmdToInt ShutdownReceive = 0 -sdownCmdToInt ShutdownSend = 1 -sdownCmdToInt ShutdownBoth = 2 - --- | Shut down one or both halves of the connection, depending on the --- second argument to the function. If the second argument is --- 'ShutdownReceive', further receives are disallowed. If it is --- 'ShutdownSend', further sends are disallowed. If it is --- 'ShutdownBoth', further sends and receives are disallowed. -shutdown :: Socket -> ShutdownCmd -> IO () -shutdown (MkSocket s _ _ _ _) stype = do - throwSocketErrorIfMinus1Retry_ "Network.Socket.shutdown" $ - c_shutdown s (sdownCmdToInt stype) - return () - --- ----------------------------------------------------------------------------- - --- | Close the socket. This function does not throw exceptions even if --- the underlying system call returns errors. --- --- Sending data to or receiving data from closed socket --- may lead to undefined behaviour. --- --- If multiple threads use the same socket and one uses 'fdSocket' and --- the other use 'close', unexpected behavior may happen. --- For more information, please refer to the documentation of 'fdSocket'. -close :: Socket -> IO () -close (MkSocket s _ _ _ socketStatus) = modifyMVar_ socketStatus $ \ status -> - case status of - ConvertedToHandle -> return ConvertedToHandle - Closed -> return Closed - _ -> do - -- closeFdWith avoids the deadlock of IO manager. - closeFdWith (void . c_close . fromIntegral) (fromIntegral s) - return Closed - --- | Close the socket. This function throws exceptions if --- the underlying system call returns errors. --- --- Sending data to or receiving data from closed socket --- may lead to undefined behaviour. -close' :: Socket -> IO () -close' (MkSocket s _ _ _ socketStatus) = modifyMVar_ socketStatus $ \ status -> - case status of - ConvertedToHandle -> ioError (userError ("close: converted to a Handle, use hClose instead")) - Closed -> return Closed - _ -> do - -- closeFdWith avoids the deadlock of IO manager. - -- closeFd throws exceptions. - closeFdWith (closeFd . fromIntegral) (fromIntegral s) - return Closed - --- ----------------------------------------------------------------------------- - --- | Determines whether 'close' has been used on the 'Socket'. This --- does /not/ indicate any status about the socket beyond this. If the --- socket has been closed remotely, this function can still return --- 'True'. -isConnected :: Socket -> IO Bool -isConnected (MkSocket _ _ _ _ status) = do - value <- readMVar status - return (value == Connected) -{-# DEPRECATED isConnected "SocketStatus will be removed" #-} - --- ----------------------------------------------------------------------------- --- Socket Predicates - -isBound :: Socket -> IO Bool -isBound (MkSocket _ _ _ _ status) = do - value <- readMVar status - return (value == Bound) -{-# DEPRECATED isBound "SocketStatus will be removed" #-} - -isListening :: Socket -> IO Bool -isListening (MkSocket _ _ _ _ status) = do - value <- readMVar status - return (value == Listening) -{-# DEPRECATED isListening "SocketStatus will be removed" #-} - -isReadable :: Socket -> IO Bool -isReadable (MkSocket _ _ _ _ status) = do - value <- readMVar status - return (value == Listening || value == Connected) -{-# DEPRECATED isReadable "SocketStatus will be removed" #-} - -isWritable :: Socket -> IO Bool -isWritable = isReadable -- sort of. -{-# DEPRECATED isWritable "SocketStatus will be removed" #-} - -isAcceptable :: Family -> SocketType -> SocketStatus -> Bool -#if defined(DOMAIN_SOCKET_SUPPORT) -isAcceptable AF_UNIX sockTyp status - | sockTyp == Stream || sockTyp == SeqPacket = - status == Connected || status == Bound || status == Listening -isAcceptable AF_UNIX _ _ = False -#endif -isAcceptable _ _ status = status == Connected || status == Listening -{-# DEPRECATED isAcceptable "SocketStatus will be removed" #-} - --- ----------------------------------------------------------------------------- --- Internet address manipulation routines: - -{-# DEPRECATED inet_addr "Use \"getAddrInfo\" instead" #-} -inet_addr :: String -> IO HostAddress -inet_addr ipstr = withSocketsDo $ do - withCString ipstr $ \str -> do - had <- c_inet_addr str - if had == maxBound - then ioError $ userError $ - "Network.Socket.inet_addr: Malformed address: " ++ ipstr - else return had -- network byte order - -{-# DEPRECATED inet_ntoa "Use \"getNameInfo\" instead" #-} -inet_ntoa :: HostAddress -> IO String -inet_ntoa haddr = withSocketsDo $ do - pstr <- c_inet_ntoa haddr - peekCString pstr - --- | Turns a Socket into an 'Handle'. By default, the new handle is --- unbuffered. Use 'System.IO.hSetBuffering' to change the buffering. --- --- Note that since a 'Handle' is automatically closed by a finalizer --- when it is no longer referenced, you should avoid doing any more --- operations on the 'Socket' after calling 'socketToHandle'. To --- close the 'Socket' after 'socketToHandle', call 'System.IO.hClose' --- on the 'Handle'. - -socketToHandle :: Socket -> IOMode -> IO Handle -socketToHandle s@(MkSocket fd _ _ _ socketStatus) mode = do - modifyMVar socketStatus $ \ status -> - if status == ConvertedToHandle - then ioError (userError ("socketToHandle: already a Handle")) - else do - h <- fdToHandle' (fromIntegral fd) (Just GHC.IO.Device.Stream) True (show s) mode True{-bin-} - hSetBuffering h NoBuffering - return (ConvertedToHandle, h) - --- | Pack a list of values into a bitmask. The possible mappings from --- value to bit-to-set are given as the first argument. We assume --- that each value can cause exactly one bit to be set; unpackBits will --- break if this property is not true. - -packBits :: (Eq a, Num b, Bits b) => [(a, b)] -> [a] -> b - -packBits mapping xs = foldl' pack 0 mapping - where pack acc (k, v) | k `elem` xs = acc .|. v - | otherwise = acc - --- | Unpack a bitmask into a list of values. - -unpackBits :: (Num b, Bits b) => [(a, b)] -> b -> [a] - --- Be permissive and ignore unknown bit values. At least on OS X, --- getaddrinfo returns an ai_flags field with bits set that have no --- entry in . -unpackBits [] _ = [] -unpackBits ((k,v):xs) r - | r .&. v /= 0 = k : unpackBits xs (r .&. complement v) - | otherwise = unpackBits xs r - ------------------------------------------------------------------------------ --- Address and service lookups - -#if defined(IPV6_SOCKET_SUPPORT) - --- | Flags that control the querying behaviour of 'getAddrInfo'. --- For more information, see -data AddrInfoFlag = - -- | The list of returned 'AddrInfo' values will - -- only contain IPv4 addresses if the local system has at least - -- one IPv4 interface configured, and likewise for IPv6. - -- (Only some platforms support this.) - AI_ADDRCONFIG - -- | If 'AI_ALL' is specified, return all matching IPv6 and - -- IPv4 addresses. Otherwise, this flag has no effect. - -- (Only some platforms support this.) - | AI_ALL - -- | The 'addrCanonName' field of the first returned - -- 'AddrInfo' will contain the "canonical name" of the host. - | AI_CANONNAME - -- | The 'HostName' argument /must/ be a numeric - -- address in string form, and network name lookups will not be - -- attempted. - | AI_NUMERICHOST - -- | The 'ServiceName' argument /must/ be a port - -- number in string form, and service name lookups will not be - -- attempted. (Only some platforms support this.) - | AI_NUMERICSERV - -- | If no 'HostName' value is provided, the network - -- address in each 'SockAddr' - -- will be left as a "wild card". - -- This is useful for server applications that - -- will accept connections from any client. - | AI_PASSIVE - -- | If an IPv6 lookup is performed, and no IPv6 - -- addresses are found, IPv6-mapped IPv4 addresses will be - -- returned. (Only some platforms support this.) - | AI_V4MAPPED - deriving (Eq, Read, Show, Typeable) - -aiFlagMapping :: [(AddrInfoFlag, CInt)] - -aiFlagMapping = - [ -#if HAVE_DECL_AI_ADDRCONFIG - (AI_ADDRCONFIG, #const AI_ADDRCONFIG), -#else - (AI_ADDRCONFIG, 0), -#endif -#if HAVE_DECL_AI_ALL - (AI_ALL, #const AI_ALL), -#else - (AI_ALL, 0), -#endif - (AI_CANONNAME, #const AI_CANONNAME), - (AI_NUMERICHOST, #const AI_NUMERICHOST), -#if HAVE_DECL_AI_NUMERICSERV - (AI_NUMERICSERV, #const AI_NUMERICSERV), -#else - (AI_NUMERICSERV, 0), -#endif - (AI_PASSIVE, #const AI_PASSIVE), -#if HAVE_DECL_AI_V4MAPPED - (AI_V4MAPPED, #const AI_V4MAPPED) -#else - (AI_V4MAPPED, 0) -#endif - ] - --- | Indicate whether the given 'AddrInfoFlag' will have any effect on --- this system. -addrInfoFlagImplemented :: AddrInfoFlag -> Bool -addrInfoFlagImplemented f = packBits aiFlagMapping [f] /= 0 - -data AddrInfo = - AddrInfo { - addrFlags :: [AddrInfoFlag], - addrFamily :: Family, - addrSocketType :: SocketType, - addrProtocol :: ProtocolNumber, - addrAddress :: SockAddr, - addrCanonName :: Maybe String - } - deriving (Eq, Show, Typeable) - -instance Storable AddrInfo where - sizeOf _ = #const sizeof(struct addrinfo) - alignment _ = alignment (undefined :: CInt) - - peek p = do - ai_flags <- (#peek struct addrinfo, ai_flags) p - ai_family <- (#peek struct addrinfo, ai_family) p - ai_socktype <- (#peek struct addrinfo, ai_socktype) p - ai_protocol <- (#peek struct addrinfo, ai_protocol) p - ai_addr <- (#peek struct addrinfo, ai_addr) p >>= peekSockAddr - ai_canonname_ptr <- (#peek struct addrinfo, ai_canonname) p - - ai_canonname <- if ai_canonname_ptr == nullPtr - then return Nothing - else liftM Just $ peekCString ai_canonname_ptr - - socktype <- unpackSocketType' "AddrInfo.peek" ai_socktype - return (AddrInfo - { - addrFlags = unpackBits aiFlagMapping ai_flags, - addrFamily = unpackFamily ai_family, - addrSocketType = socktype, - addrProtocol = ai_protocol, - addrAddress = ai_addr, - addrCanonName = ai_canonname - }) - - poke p (AddrInfo flags family socketType protocol _ _) = do - c_stype <- packSocketTypeOrThrow "AddrInfo.poke" socketType - - (#poke struct addrinfo, ai_flags) p (packBits aiFlagMapping flags) - (#poke struct addrinfo, ai_family) p (packFamily family) - (#poke struct addrinfo, ai_socktype) p c_stype - (#poke struct addrinfo, ai_protocol) p protocol - - -- stuff below is probably not needed, but let's zero it for safety - - (#poke struct addrinfo, ai_addrlen) p (0::CSize) - (#poke struct addrinfo, ai_addr) p nullPtr - (#poke struct addrinfo, ai_canonname) p nullPtr - (#poke struct addrinfo, ai_next) p nullPtr - --- | Flags that control the querying behaviour of 'getNameInfo'. --- For more information, see -data NameInfoFlag = - -- | Resolve a datagram-based service name. This is - -- required only for the few protocols that have different port - -- numbers for their datagram-based versions than for their - -- stream-based versions. - NI_DGRAM - -- | If the hostname cannot be looked up, an IO error is thrown. - | NI_NAMEREQD - -- | If a host is local, return only the hostname part of the FQDN. - | NI_NOFQDN - -- | The name of the host is not looked up. - -- Instead, a numeric representation of the host's - -- address is returned. For an IPv4 address, this will be a - -- dotted-quad string. For IPv6, it will be colon-separated - -- hexadecimal. - | NI_NUMERICHOST - -- | The name of the service is not - -- looked up. Instead, a numeric representation of the - -- service is returned. - | NI_NUMERICSERV - deriving (Eq, Read, Show, Typeable) - -niFlagMapping :: [(NameInfoFlag, CInt)] - -niFlagMapping = [(NI_DGRAM, #const NI_DGRAM), - (NI_NAMEREQD, #const NI_NAMEREQD), - (NI_NOFQDN, #const NI_NOFQDN), - (NI_NUMERICHOST, #const NI_NUMERICHOST), - (NI_NUMERICSERV, #const NI_NUMERICSERV)] - --- | Default hints for address lookup with 'getAddrInfo'. The values --- of the 'addrAddress' and 'addrCanonName' fields are 'undefined', --- and are never inspected by 'getAddrInfo'. --- --- >>> addrFlags defaultHints --- [] --- >>> addrFamily defaultHints --- AF_UNSPEC --- >>> addrSocketType defaultHints --- NoSocketType --- >>> addrProtocol defaultHints --- 0 - -defaultHints :: AddrInfo -defaultHints = AddrInfo { - addrFlags = [], - addrFamily = AF_UNSPEC, - addrSocketType = NoSocketType, - addrProtocol = defaultProtocol, - addrAddress = undefined, - addrCanonName = undefined - } - --- | Shows the fields of 'defaultHints', without inspecting the by-default undefined fields 'addrAddress' and 'addrCanonName'. -showDefaultHints :: AddrInfo -> String -showDefaultHints AddrInfo{..} = concat - [ "AddrInfo {" - , "addrFlags = " - , show addrFlags - , ", addrFamily = " - , show addrFamily - , ", addrSocketType = " - , show addrSocketType - , ", addrProtocol = " - , show addrProtocol - , ", addrAddress = " - , "" - , ", addrCanonName = " - , "" - , "}" - ] - --- | Resolve a host or service name to one or more addresses. --- The 'AddrInfo' values that this function returns contain 'SockAddr' --- values that you can pass directly to 'connect' or --- 'bind'. --- --- This function is protocol independent. It can return both IPv4 and --- IPv6 address information. --- --- The 'AddrInfo' argument specifies the preferred query behaviour, --- socket options, or protocol. You can override these conveniently --- using Haskell's record update syntax on 'defaultHints', for example --- as follows: --- --- >>> let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Stream } --- --- You must provide a 'Just' value for at least one of the 'HostName' --- or 'ServiceName' arguments. 'HostName' can be either a numeric --- network address (dotted quad for IPv4, colon-separated hex for --- IPv6) or a hostname. In the latter case, its addresses will be --- looked up unless 'AI_NUMERICHOST' is specified as a hint. If you --- do not provide a 'HostName' value /and/ do not set 'AI_PASSIVE' as --- a hint, network addresses in the result will contain the address of --- the loopback interface. --- --- If the query fails, this function throws an IO exception instead of --- returning an empty list. Otherwise, it returns a non-empty list --- of 'AddrInfo' values. --- --- There are several reasons why a query might result in several --- values. For example, the queried-for host could be multihomed, or --- the service might be available via several protocols. --- --- Note: the order of arguments is slightly different to that defined --- for @getaddrinfo@ in RFC 2553. The 'AddrInfo' parameter comes first --- to make partial application easier. --- --- >>> addr:_ <- getAddrInfo (Just hints) (Just "127.0.0.1") (Just "http") --- >>> addrAddress addr --- 127.0.0.1:80 - -getAddrInfo :: Maybe AddrInfo -- ^ preferred socket type or protocol - -> Maybe HostName -- ^ host name to look up - -> Maybe ServiceName -- ^ service name to look up - -> IO [AddrInfo] -- ^ resolved addresses, with "best" first - -getAddrInfo hints node service = withSocketsDo $ - maybeWith withCString node $ \c_node -> - maybeWith withCString service $ \c_service -> - maybeWith with filteredHints $ \c_hints -> - alloca $ \ptr_ptr_addrs -> do - ret <- c_getaddrinfo c_node c_service c_hints ptr_ptr_addrs - case ret of - 0 -> do ptr_addrs <- peek ptr_ptr_addrs - ais <- followAddrInfo ptr_addrs - c_freeaddrinfo ptr_addrs - return ais - _ -> do err <- gai_strerror ret - let message = concat - [ "Network.Socket.getAddrInfo (called with preferred socket type/protocol: " - , maybe (show hints) showDefaultHints hints - , ", host name: " - , show node - , ", service name: " - , show service - , ")" - ] - ioError (ioeSetErrorString - (mkIOError NoSuchThing message Nothing - Nothing) err) - -- Leaving out the service and using AI_NUMERICSERV causes a - -- segfault on OS X 10.8.2. This code removes AI_NUMERICSERV - -- (which has no effect) in that case. - where -#if defined(darwin_HOST_OS) - filteredHints = case service of - Nothing -> fmap (\ h -> h { addrFlags = delete AI_NUMERICSERV (addrFlags h) }) hints - _ -> hints -#else - filteredHints = hints -#endif - -followAddrInfo :: Ptr AddrInfo -> IO [AddrInfo] - -followAddrInfo ptr_ai | ptr_ai == nullPtr = return [] - | otherwise = do - a <- peek ptr_ai - as <- (#peek struct addrinfo, ai_next) ptr_ai >>= followAddrInfo - return (a:as) - -foreign import ccall safe "hsnet_getaddrinfo" - c_getaddrinfo :: CString -> CString -> Ptr AddrInfo -> Ptr (Ptr AddrInfo) - -> IO CInt - -foreign import ccall safe "hsnet_freeaddrinfo" - c_freeaddrinfo :: Ptr AddrInfo -> IO () - -gai_strerror :: CInt -> IO String - -#ifdef HAVE_GAI_STRERROR -gai_strerror n = c_gai_strerror n >>= peekCString - -foreign import ccall safe "gai_strerror" - c_gai_strerror :: CInt -> IO CString -#else -gai_strerror n = ioError $ userError $ "Network.Socket.gai_strerror not supported: " ++ show n -#endif - -withCStringIf :: Bool -> Int -> (CSize -> CString -> IO a) -> IO a -withCStringIf False _ f = f 0 nullPtr -withCStringIf True n f = allocaBytes n (f (fromIntegral n)) - --- | Resolve an address to a host or service name. --- This function is protocol independent. --- The list of 'NameInfoFlag' values controls query behaviour. --- --- If a host or service's name cannot be looked up, then the numeric --- form of the address or service will be returned. --- --- If the query fails, this function throws an IO exception. --- --- Example: --- @ --- (hostName, _) <- getNameInfo [] True False myAddress --- @ - -getNameInfo :: [NameInfoFlag] -- ^ flags to control lookup behaviour - -> Bool -- ^ whether to look up a hostname - -> Bool -- ^ whether to look up a service name - -> SockAddr -- ^ the address to look up - -> IO (Maybe HostName, Maybe ServiceName) - -getNameInfo flags doHost doService addr = withSocketsDo $ - withCStringIf doHost (#const NI_MAXHOST) $ \c_hostlen c_host -> - withCStringIf doService (#const NI_MAXSERV) $ \c_servlen c_serv -> do - withSockAddr addr $ \ptr_addr sz -> do - ret <- c_getnameinfo ptr_addr (fromIntegral sz) c_host c_hostlen - c_serv c_servlen (packBits niFlagMapping flags) - case ret of - 0 -> do - let peekIf doIf c_val = if doIf - then liftM Just $ peekCString c_val - else return Nothing - host <- peekIf doHost c_host - serv <- peekIf doService c_serv - return (host, serv) - _ -> do err <- gai_strerror ret - let message = concat - [ "Network.Socket.getNameInfo (called with flags: " - , show flags - , ", hostname lookup: " - , show doHost - , ", service name lookup: " - , show doService - , ", socket address: " - , show addr - , ")" - ] - ioError (ioeSetErrorString - (mkIOError NoSuchThing message Nothing - Nothing) err) - -foreign import ccall safe "hsnet_getnameinfo" - c_getnameinfo :: Ptr SockAddr -> CInt{-CSockLen???-} -> CString -> CSize -> CString - -> CSize -> CInt -> IO CInt -#endif - -mkInvalidRecvArgError :: String -> IOError -mkInvalidRecvArgError loc = ioeSetErrorString (mkIOError - InvalidArgument - loc Nothing Nothing) "non-positive length" - -mkEOFError :: String -> IOError -mkEOFError loc = ioeSetErrorString (mkIOError EOF loc Nothing Nothing) "end of file" - --- --------------------------------------------------------------------------- --- foreign imports from the C library - -foreign import ccall unsafe "hsnet_inet_ntoa" - c_inet_ntoa :: HostAddress -> IO (Ptr CChar) - -foreign import CALLCONV unsafe "inet_addr" - c_inet_addr :: Ptr CChar -> IO HostAddress - -foreign import CALLCONV unsafe "shutdown" - c_shutdown :: CInt -> CInt -> IO CInt - -closeFd :: CInt -> IO () -closeFd fd = throwSocketErrorIfMinus1_ "Network.Socket.close" $ c_close fd - -#if !defined(WITH_WINSOCK) -foreign import ccall unsafe "close" - c_close :: CInt -> IO CInt -#else -foreign import stdcall unsafe "closesocket" - c_close :: CInt -> IO CInt -#endif - -foreign import CALLCONV unsafe "socket" - c_socket :: CInt -> CInt -> CInt -> IO CInt -foreign import CALLCONV unsafe "bind" - c_bind :: CInt -> Ptr SockAddr -> CInt{-CSockLen???-} -> IO CInt -foreign import CALLCONV SAFE_ON_WIN "connect" - c_connect :: CInt -> Ptr SockAddr -> CInt{-CSockLen???-} -> IO CInt -#ifdef HAVE_ACCEPT4 -foreign import CALLCONV unsafe "accept4" - c_accept4 :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> CInt -> IO CInt -#else -foreign import CALLCONV unsafe "accept" - c_accept :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> IO CInt -#endif -foreign import CALLCONV unsafe "listen" - c_listen :: CInt -> CInt -> IO CInt - -#if defined(mingw32_HOST_OS) -foreign import CALLCONV safe "accept" - c_accept_safe :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> IO CInt - -foreign import ccall unsafe "rtsSupportsBoundThreads" threaded :: Bool -#endif - -foreign import CALLCONV unsafe "send" - c_send :: CInt -> Ptr a -> CSize -> CInt -> IO CInt -foreign import CALLCONV SAFE_ON_WIN "sendto" - c_sendto :: CInt -> Ptr a -> CSize -> CInt -> Ptr SockAddr -> CInt -> IO CInt -foreign import CALLCONV unsafe "recv" - c_recv :: CInt -> Ptr CChar -> CSize -> CInt -> IO CInt -foreign import CALLCONV SAFE_ON_WIN "recvfrom" - c_recvfrom :: CInt -> Ptr a -> CSize -> CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt -foreign import CALLCONV unsafe "getpeername" - c_getpeername :: CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt -foreign import CALLCONV unsafe "getsockname" - c_getsockname :: CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt - -foreign import CALLCONV unsafe "getsockopt" - c_getsockopt :: CInt -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> IO CInt -foreign import CALLCONV unsafe "setsockopt" - c_setsockopt :: CInt -> CInt -> CInt -> Ptr CInt -> CInt -> IO CInt - -#if defined(HAVE_GETPEEREID) -foreign import CALLCONV unsafe "getpeereid" - c_getpeereid :: CInt -> Ptr CUInt -> Ptr CUInt -> IO CInt -#endif --- --------------------------------------------------------------------------- --- * Deprecated aliases - --- $deprecated-aliases --- --- These aliases are deprecated and should not be used in new code. --- They will be removed in some future version of the package. - -{-# DEPRECATED bindSocket "use 'bind'" #-} - --- | Deprecated alias for 'bind'. -bindSocket :: Socket -- Unconnected Socket - -> SockAddr -- Address to Bind to - -> IO () -bindSocket = bind - -{-# DEPRECATED sClose "use 'close'" #-} - --- | Deprecated alias for 'close'. -sClose :: Socket -> IO () -sClose = close - -{-# DEPRECATED sIsConnected "SocketStatus will be removed" #-} - -sIsConnected :: Socket -> IO Bool -sIsConnected = isConnected - -{-# DEPRECATED sIsBound "SocketStatus will be removed" #-} - -sIsBound :: Socket -> IO Bool -sIsBound = isBound - -{-# DEPRECATED sIsListening "SocketStatus will be removed" #-} - -sIsListening :: Socket -> IO Bool -sIsListening = isListening - -{-# DEPRECATED sIsReadable "SocketStatus will be removed" #-} - -sIsReadable :: Socket -> IO Bool -sIsReadable = isReadable - -{-# DEPRECATED sIsWritable "SocketStatus will be removed" #-} - -sIsWritable :: Socket -> IO Bool -sIsWritable = isWritable - -#if defined(HAVE_IF_NAMETOINDEX) --- | Returns the index corresponding to the interface name. --- --- Since 2.7.0.0. -ifNameToIndex :: String -> IO (Maybe Int) -ifNameToIndex ifname = do - index <- withCString ifname c_if_nametoindex - -- On failure zero is returned. We'll return Nothing. - return $ if index == 0 then Nothing else Just $ fromIntegral index - --- | Returns the interface name corresponding to the index. --- --- Since 2.7.0.0. -ifIndexToName :: Int -> IO (Maybe String) -ifIndexToName ifn = allocaBytes 16 $ \ptr -> do -- 16 == IFNAMSIZ - r <- c_if_indextoname (fromIntegral ifn) ptr - if r == nullPtr then - return Nothing - else - Just <$> peekCString ptr - -foreign import CALLCONV safe "if_nametoindex" - c_if_nametoindex :: CString -> IO CUInt - -foreign import CALLCONV safe "if_indextoname" - c_if_indextoname :: CUInt -> CString -> IO CString -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/network.buildinfo.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/network.buildinfo.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/network.buildinfo.in 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/network.buildinfo.in 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -ghc-options: @EXTRA_CPPFLAGS@ -ghc-prof-options: @EXTRA_CPPFLAGS@ -ld-options: @LDFLAGS@ -cc-options: @EXTRA_CPPFLAGS@ -c-sources: @EXTRA_SRCS@ -extra-libraries: @EXTRA_LIBS@ -install-includes: HsNetworkConfig.h diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/network.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/network.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/network.cabal 2018-10-17 15:59:15.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/network.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,106 +0,0 @@ -name: network -version: 2.7.0.2 -x-revision: 2 -license: BSD3 -license-file: LICENSE -maintainer: Kazu Yamamoto, Evan Borden -synopsis: Low-level networking interface -description: - This package provides a low-level networking interface. - . - In network-2.6 the @Network.URI@ module was split off into its own - package, network-uri-2.6. If you're using the @Network.URI@ module - you can automatically get it from the right package by adding this - to your .cabal file: - . - > library - > build-depends: network-uri-flag -category: Network -build-type: Configure -cabal-version: >=1.8 -extra-tmp-files: - config.log config.status autom4te.cache network.buildinfo - include/HsNetworkConfig.h -extra-source-files: - README.md CHANGELOG.md - examples/*.hs tests/*.hs config.guess config.sub install-sh - configure.ac configure network.buildinfo.in - include/HsNetworkConfig.h.in include/HsNet.h include/HsNetDef.h - -- C sources only used on some systems - cbits/ancilData.c cbits/asyncAccept.c cbits/initWinSock.c - cbits/winSockErr.c -homepage: https://github.com/haskell/network -bug-reports: https://github.com/haskell/network/issues -tested-with: GHC == 7.4.2 - , GHC == 7.6.3 - , GHC == 7.8.4 - , GHC == 7.10.3 - , GHC == 8.0.2 - , GHC == 8.2.2 - -library - exposed-modules: - Network - Network.BSD - Network.Socket - Network.Socket.ByteString - Network.Socket.ByteString.Lazy - Network.Socket.Internal - other-modules: - Network.Socket.ByteString.Internal - Network.Socket.Types - - if !os(windows) - other-modules: - Network.Socket.ByteString.IOVec - Network.Socket.ByteString.Lazy.Posix - Network.Socket.ByteString.MsgHdr - if os(windows) - other-modules: - Network.Socket.ByteString.Lazy.Windows - - build-depends: - base >= 4.7 && < 5, - bytestring == 0.10.* - - if !os(windows) - build-depends: - unix >= 2 - - extensions: - CPP, DeriveDataTypeable, ForeignFunctionInterface, TypeSynonymInstances - include-dirs: include - includes: HsNet.h HsNetDef.h - install-includes: HsNet.h HsNetDef.h - c-sources: cbits/HsNet.c - ghc-options: -Wall -fwarn-tabs - -test-suite spec - hs-source-dirs: tests - main-is: Spec.hs - other-modules: RegressionSpec - SimpleSpec - type: exitcode-stdio-1.0 - ghc-options: -Wall -threaded - build-depends: - base < 5, - bytestring, - directory, - HUnit, - network, - hspec - -test-suite doctest - hs-source-dirs: tests - main-is: doctests.hs - type: exitcode-stdio-1.0 - - build-depends: - base < 5, - doctest >= 0.10.1 - - ghc-options: -Wall - -source-repository head - type: git - location: git://github.com/haskell/network.git diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Network.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Network.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,480 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} -{-# OPTIONS_HADDOCK hide #-} ------------------------------------------------------------------------------ --- | --- Module : Network --- Copyright : (c) The University of Glasgow 2001 --- License : BSD-style (see the file libraries/network/LICENSE) --- --- Maintainer : libraries@haskell.org --- Stability : provisional --- Portability : portable --- --- This module is kept for backwards-compatibility. New users are --- encouraged to use "Network.Socket" instead. --- --- "Network" was intended as a \"higher-level\" interface to networking --- facilities, and only supports TCP. --- ------------------------------------------------------------------------------ - -#include "HsNetworkConfig.h" - -#ifdef HAVE_GETADDRINFO --- Use IPv6-capable function definitions if the OS supports it. -#define IPV6_SOCKET_SUPPORT 1 -#endif - -module Network {-# DEPRECATED "The high level Network interface is no longer supported. Please use Network.Socket." #-} - ( - -- * Basic data types - Socket - , PortID(..) - , HostName - , PortNumber - - -- * Initialisation - , withSocketsDo - - -- * Server-side connections - , listenOn - , accept - , sClose - - -- * Client-side connections - , connectTo - - -- * Simple sending and receiving - {-$sendrecv-} - , sendTo - , recvFrom - - -- * Miscellaneous - , socketPort - - -- * Networking Issues - -- ** Buffering - {-$buffering-} - - -- ** Improving I\/O Performance over sockets - {-$performance-} - ) where - -import Control.Monad (liftM) -import Data.Maybe (fromJust) -import Network.BSD -import Network.Socket hiding (accept, socketPort, recvFrom, - sendTo, PortNumber, sClose) -import qualified Network.Socket as Socket (accept) -import System.IO -import Prelude -import qualified Control.Exception as Exception - --- --------------------------------------------------------------------------- --- High Level ``Setup'' functions - --- If the @PortID@ specifies a unix family socket and the @Hostname@ --- differs from that returned by @getHostname@ then an error is --- raised. Alternatively an empty string may be given to @connectTo@ --- signalling that the current hostname applies. - -data PortID = - Service String -- Service Name eg "ftp" - | PortNumber PortNumber -- User defined Port Number -#if !defined(mingw32_HOST_OS) - | UnixSocket String -- Unix family socket in file system -#endif - deriving (Show, Eq) - --- | Calling 'connectTo' creates a client side socket which is --- connected to the given host and port. The Protocol and socket type is --- derived from the given port identifier. If a port number is given --- then the result is always an internet family 'Stream' socket. - -connectTo :: HostName -- Hostname - -> PortID -- Port Identifier - -> IO Handle -- Connected Socket - -#if defined(IPV6_SOCKET_SUPPORT) --- IPv6 and IPv4. - -connectTo hostname (Service serv) = connect' "Network.connectTo" hostname serv - -connectTo hostname (PortNumber port) = connect' "Network.connectTo" hostname (show port) -#else --- IPv4 only. - -connectTo hostname (Service serv) = do - proto <- getProtocolNumber "tcp" - bracketOnError - (socket AF_INET Stream proto) - (sClose) -- only done if there's an error - (\sock -> do - port <- getServicePortNumber serv - he <- getHostByName hostname - connect sock (SockAddrInet port (hostAddress he)) - socketToHandle sock ReadWriteMode - ) - -connectTo hostname (PortNumber port) = do - proto <- getProtocolNumber "tcp" - bracketOnError - (socket AF_INET Stream proto) - (sClose) -- only done if there's an error - (\sock -> do - he <- getHostByName hostname - connect sock (SockAddrInet port (hostAddress he)) - socketToHandle sock ReadWriteMode - ) -#endif - -#if !defined(mingw32_HOST_OS) -connectTo _ (UnixSocket path) = do - bracketOnError - (socket AF_UNIX Stream 0) - (sClose) - (\sock -> do - connect sock (SockAddrUnix path) - socketToHandle sock ReadWriteMode - ) -#endif - -#if defined(IPV6_SOCKET_SUPPORT) -connect' :: String -> HostName -> ServiceName -> IO Handle - -connect' caller host serv = do - proto <- getProtocolNumber "tcp" - let hints = defaultHints { addrFlags = [AI_ADDRCONFIG] - , addrProtocol = proto - , addrSocketType = Stream } - addrs <- getAddrInfo (Just hints) (Just host) (Just serv) - firstSuccessful caller $ map tryToConnect addrs - where - tryToConnect addr = - bracketOnError - (socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)) - (sClose) -- only done if there's an error - (\sock -> do - connect sock (addrAddress addr) - socketToHandle sock ReadWriteMode - ) -#endif - --- | Creates the server side socket which has been bound to the --- specified port. --- --- 'maxListenQueue' (typically 128) is specified to the listen queue. --- This is good enough for normal network servers but is too small --- for high performance servers. --- --- To avoid the \"Address already in use\" problems, --- the 'ReuseAddr' socket option is set on the listening socket. --- --- If available, the 'IPv6Only' socket option is set to 0 --- so that both IPv4 and IPv6 can be accepted with this socket. --- --- If you don't like the behavior above, please use the lower level --- 'Network.Socket.listen' instead. - -listenOn :: PortID -- ^ Port Identifier - -> IO Socket -- ^ Listening Socket - -#if defined(IPV6_SOCKET_SUPPORT) --- IPv6 and IPv4. - -listenOn (Service serv) = listen' serv - -listenOn (PortNumber port) = listen' (show port) -#else --- IPv4 only. - -listenOn (Service serv) = do - proto <- getProtocolNumber "tcp" - bracketOnError - (socket AF_INET Stream proto) - (sClose) - (\sock -> do - port <- getServicePortNumber serv - setSocketOption sock ReuseAddr 1 - bind sock (SockAddrInet port iNADDR_ANY) - listen sock maxListenQueue - return sock - ) - -listenOn (PortNumber port) = do - proto <- getProtocolNumber "tcp" - bracketOnError - (socket AF_INET Stream proto) - (sClose) - (\sock -> do - setSocketOption sock ReuseAddr 1 - bind sock (SockAddrInet port iNADDR_ANY) - listen sock maxListenQueue - return sock - ) -#endif - -#if !defined(mingw32_HOST_OS) -listenOn (UnixSocket path) = - bracketOnError - (socket AF_UNIX Stream 0) - (sClose) - (\sock -> do - setSocketOption sock ReuseAddr 1 - bind sock (SockAddrUnix path) - listen sock maxListenQueue - return sock - ) -#endif - -#if defined(IPV6_SOCKET_SUPPORT) -listen' :: ServiceName -> IO Socket - -listen' serv = do - proto <- getProtocolNumber "tcp" - -- We should probably specify addrFamily = AF_INET6 and the filter - -- code below should be removed. AI_ADDRCONFIG is probably not - -- necessary. But this code is well-tested. So, let's keep it. - let hints = defaultHints { addrFlags = [AI_ADDRCONFIG, AI_PASSIVE] - , addrSocketType = Stream - , addrProtocol = proto } - addrs <- getAddrInfo (Just hints) Nothing (Just serv) - -- Choose an IPv6 socket if exists. This ensures the socket can - -- handle both IPv4 and IPv6 if v6only is false. - let addrs' = filter (\x -> addrFamily x == AF_INET6) addrs - addr = if null addrs' then head addrs else head addrs' - bracketOnError - (socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)) - (sClose) - (\sock -> do - setSocketOption sock ReuseAddr 1 - bind sock (addrAddress addr) - listen sock maxListenQueue - return sock - ) -#endif - --- ----------------------------------------------------------------------------- --- accept - --- | Accept a connection on a socket created by 'listenOn'. Normal --- I\/O operations (see "System.IO") can be used on the 'Handle' --- returned to communicate with the client. --- Notice that although you can pass any Socket to Network.accept, --- only sockets of either AF_UNIX, AF_INET, or AF_INET6 will work --- (this shouldn't be a problem, though). When using AF_UNIX, HostName --- will be set to the path of the socket and PortNumber to -1. --- -accept :: Socket -- ^ Listening Socket - -> IO (Handle, - HostName, - PortNumber) -- ^ Triple of: read\/write 'Handle' for - -- communicating with the client, - -- the 'HostName' of the peer socket, and - -- the 'PortNumber' of the remote connection. -accept sock@(MkSocket _ AF_INET _ _ _) = do - ~(sock', (SockAddrInet port haddr)) <- Socket.accept sock - peer <- catchIO - (do - (HostEntry peer _ _ _) <- getHostByAddr AF_INET haddr - return peer - ) - (\_e -> inet_ntoa haddr) - -- if getHostByName fails, we fall back to the IP address - handle <- socketToHandle sock' ReadWriteMode - return (handle, peer, port) -#if defined(IPV6_SOCKET_SUPPORT) -accept sock@(MkSocket _ AF_INET6 _ _ _) = do - (sock', addr) <- Socket.accept sock - peer <- catchIO ((fromJust . fst) `liftM` getNameInfo [] True False addr) $ - \_ -> case addr of - SockAddrInet _ a -> inet_ntoa a - SockAddrInet6 _ _ a _ -> return (show a) -#if defined(mingw32_HOST_OS) - SockAddrUnix {} -> ioError $ userError "Network.accept: peer socket address 'SockAddrUnix' not supported on this platform." -#else - SockAddrUnix a -> return a -#endif -#if defined(CAN_SOCKET_SUPPORT) - SockAddrCan {} -> ioError $ userError "Network.accept: peer socket address 'SockAddrCan' not supported." -#else - SockAddrCan {} -> ioError $ userError "Network.accept: peer socket address 'SockAddrCan' not supported on this platform." -#endif - handle <- socketToHandle sock' ReadWriteMode - let port = case addr of - SockAddrInet p _ -> p - SockAddrInet6 p _ _ _ -> p - _ -> -1 - return (handle, peer, port) -#endif -#if !defined(mingw32_HOST_OS) -accept sock@(MkSocket _ AF_UNIX _ _ _) = do - ~(sock', (SockAddrUnix path)) <- Socket.accept sock - handle <- socketToHandle sock' ReadWriteMode - return (handle, path, -1) -#endif -accept (MkSocket _ family _ _ _) = - ioError $ userError $ "Network.accept: address family '" ++ - show family ++ "' not supported." - - --- | Close the socket. Sending data to or receiving data from closed socket --- may lead to undefined behaviour. -sClose :: Socket -> IO () -sClose = close -- Explicit redefinition because Network.sClose is deprecated, - -- hence the re-export would also be marked as such. - --- ----------------------------------------------------------------------------- --- sendTo/recvFrom - -{-$sendrecv -Send and receive data from\/to the given host and port number. These -should normally only be used where the socket will not be required for -further calls. Also, note that due to the use of 'hGetContents' in 'recvFrom' -the socket will remain open (i.e. not available) even if the function already -returned. Their use is strongly discouraged except for small test-applications -or invocations from the command line. --} - -sendTo :: HostName -- Hostname - -> PortID -- Port Number - -> String -- Message to send - -> IO () -sendTo h p msg = do - s <- connectTo h p - hPutStr s msg - hClose s - -recvFrom :: HostName -- Hostname - -> PortID -- Port Number - -> IO String -- Received Data - -#if defined(IPV6_SOCKET_SUPPORT) -recvFrom host port = do - proto <- getProtocolNumber "tcp" - let hints = defaultHints { addrFlags = [AI_ADDRCONFIG] - , addrProtocol = proto - , addrSocketType = Stream } - allowed <- map addrAddress `liftM` getAddrInfo (Just hints) (Just host) - Nothing - s <- listenOn port - let waiting = do - (s', addr) <- Socket.accept s - if not (addr `oneOf` allowed) - then sClose s' >> waiting - else socketToHandle s' ReadMode >>= hGetContents - waiting - where - a@(SockAddrInet _ ha) `oneOf` ((SockAddrInet _ hb):bs) - | ha == hb = True - | otherwise = a `oneOf` bs - a@(SockAddrInet6 _ _ ha _) `oneOf` ((SockAddrInet6 _ _ hb _):bs) - | ha == hb = True - | otherwise = a `oneOf` bs - _ `oneOf` _ = False -#else -recvFrom host port = do - ip <- getHostByName host - let ipHs = hostAddresses ip - s <- listenOn port - let - waiting = do - ~(s', SockAddrInet _ haddr) <- Socket.accept s - he <- getHostByAddr AF_INET haddr - if not (any (`elem` ipHs) (hostAddresses he)) - then do - sClose s' - waiting - else do - h <- socketToHandle s' ReadMode - msg <- hGetContents h - return msg - - message <- waiting - return message -#endif - --- --------------------------------------------------------------------------- --- Access function returning the port type/id of socket. - --- | Returns the 'PortID' associated with a given socket. -socketPort :: Socket -> IO PortID -socketPort s = do - sockaddr <- getSocketName s - case sockaddr of - SockAddrInet port _ -> return $ PortNumber port -#if defined(IPV6_SOCKET_SUPPORT) - SockAddrInet6 port _ _ _ -> return $ PortNumber port -#else - SockAddrInet6 {} -> ioError $ userError "Network.socketPort: socket address 'SockAddrInet6' not supported on this platform." -#endif -#if defined(mingw32_HOST_OS) - SockAddrUnix {} -> ioError $ userError "Network.socketPort: socket address 'SockAddrUnix' not supported on this platform." -#else - SockAddrUnix path -> return $ UnixSocket path -#endif - SockAddrCan {} -> ioError $ userError "Network.socketPort: socket address 'SockAddrCan' not supported." - --- --------------------------------------------------------------------------- --- Utils - --- Like bracket, but only performs the final action if there was an --- exception raised by the middle bit. -bracketOnError - :: IO a -- ^ computation to run first (\"acquire resource\") - -> (a -> IO b) -- ^ computation to run last (\"release resource\") - -> (a -> IO c) -- ^ computation to run in-between - -> IO c -- returns the value from the in-between computation -bracketOnError = Exception.bracketOnError - ------------------------------------------------------------------------------ --- Extra documentation - -{-$buffering - -The 'Handle' returned by 'connectTo' and 'accept' is 'NoBuffering' by -default. For an interactive application you may want to set the -buffering mode on the 'Handle' to -'LineBuffering' or 'BlockBuffering', like so: - -> h <- connectTo host port -> hSetBuffering h LineBuffering --} - -{-$performance - -For really fast I\/O, it might be worth looking at the 'hGetBuf' and -'hPutBuf' family of functions in "System.IO". --} - -catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a -#if MIN_VERSION_base(4,0,0) -catchIO = Exception.catch -#else -catchIO = Exception.catchJust Exception.ioErrors -#endif - --- Version of try implemented in terms of the locally defined catchIO -tryIO :: IO a -> IO (Either Exception.IOException a) -tryIO m = catchIO (liftM Right m) (return . Left) - --- Returns the first action from a list which does not throw an exception. --- If all the actions throw exceptions (and the list of actions is not empty), --- the last exception is thrown. --- The operations are run outside of the catchIO cleanup handler because --- catchIO masks asynchronous exceptions in the cleanup handler. --- In the case of complete failure, the last exception is actually thrown. -firstSuccessful :: String -> [IO a] -> IO a -firstSuccessful caller = go Nothing - where - -- Attempt the next operation, remember exception on failure - go _ (p:ps) = - do r <- tryIO p - case r of - Right x -> return x - Left e -> go (Just e) ps - - -- All operations failed, throw error if one exists - go Nothing [] = ioError $ userError $ caller ++ ": firstSuccessful: empty list" - go (Just e) [] = Exception.throwIO e diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/README.md 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/README.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -# [`network`](http://hackage.haskell.org/package/network) [![Build Status](https://travis-ci.org/haskell/network.svg?branch=master)](https://travis-ci.org/haskell/network) [![Build status](https://ci.appveyor.com/api/projects/status/5erq63o4m29bhl57/branch/master?svg=true)](https://ci.appveyor.com/project/eborden/network/branch/master) - -To build this package using Cabal directly from git, you must run -`autoreconf` before the usual Cabal build steps -(configure/build/install). `autoreconf` is included in the -[GNU Autoconf](http://www.gnu.org/software/autoconf/) tools. There is -no need to run the `configure` script: the `setup configure` step will -do this for you. - -## Support Policy - -### GHC - -`network`'s GHC policy supports 3 [stable](https://downloads.haskell.org/~ghc/8.0.2/docs/html/users_guide/intro.html#ghc-version-numbering-policy) versions. The current stable -version and two previous stable versions are supported. - -### Hugs, JHC, UHC - -`network` does not officially support these compilers. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/Setup.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,6 +0,0 @@ -module Main (main) where - -import Distribution.Simple - -main :: IO () -main = defaultMainWithHooks autoconfUserHooks diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/BadFileDescriptor.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/BadFileDescriptor.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/BadFileDescriptor.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/BadFileDescriptor.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ --- Test code for "threadWait: invalid argument (Bad file descriptor)" --- See https://ghc.haskell.org/trac/ghc/ticket/14621 --- See https://github.com/haskell/network/issues/287 --- --- % runghc BadFileDescriptor.hs --- BadFileDescriptor.hs: threadWait: invalid argument (Bad file descriptor) -module Main where - -import Control.Concurrent (forkIO) -import Control.Monad (void, forever) -import Network.Socket hiding (recv) -import Network.Socket.ByteString (recv, sendAll) - -main :: IO () -main = do - let localhost = "localhost" - listenPort = "9876" - connectPort = "6789" - proxy localhost listenPort connectPort - -proxy :: HostName -> ServiceName -> ServiceName -> IO () -proxy localhost listenPort connectPort = do - fromClient <- serverSocket localhost listenPort - toServer <- clientSocket localhost connectPort - void $ forkIO $ relay toServer fromClient - relay fromClient toServer - -relay :: Socket -> Socket -> IO () -relay s1 s2 = forever $ do - payload <- recv s1 4096 - sendAll s2 payload - -serverSocket :: HostName -> ServiceName -> IO Socket -serverSocket host port = do - let hints = defaultHints { - addrFlags = [AI_PASSIVE] - , addrSocketType = Stream - } - addr:_ <- getAddrInfo (Just hints) (Just host) (Just port) - sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) - bind sock (addrAddress addr) - listen sock 1 - fst <$> accept sock - -clientSocket :: HostName -> ServiceName -> IO Socket -clientSocket host port = do - let hints = defaultHints { addrSocketType = Stream } - addr:_ <- getAddrInfo (Just hints) (Just host) (Just port) - sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) - connect sock (addrAddress addr) - return sock diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/doctests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/doctests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/doctests.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/doctests.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,20 +0,0 @@ -import Test.DocTest - -main :: IO () -main = doctest [ - "-i" - , "-idist/build" - , "-i." - , "-idist/build/autogen" - , "-Idist/build/autogen" - , "-Idist/build" - , "-Iinclude" - , "-optP-include" - , "-optPdist/build/autogen/cabal_macros.h" - , "-DCALLCONV=ccall" - , "-XCPP" - , "-XDeriveDataTypeable" - , "-package-db dist/package.conf.inplace" - , "-package network" - , "Network" - ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/RegressionSpec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/RegressionSpec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/RegressionSpec.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/RegressionSpec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,29 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} --- | Tests for things that didn't work in the past. -module RegressionSpec (main, spec) where - -import Control.Monad -import Network.Socket hiding (send, sendTo, recv, recvFrom) -import Network.Socket.ByteString - -import Test.Hspec - -main :: IO () -main = hspec spec - -spec :: Spec -spec = do - describe "getAddrInfo" $ do - it "does not cause segfault on macOS 10.8.2 due to AI_NUMERICSERV" $ do - let hints = defaultHints { addrFlags = [AI_NUMERICSERV] } - void $ getAddrInfo (Just hints) (Just "localhost") Nothing - - describe "Network.Socket.ByteString.recv" $ do - it "checks -1 correctly on Windows" $ do - sock <- socket AF_INET Stream defaultProtocol - recv sock 1024 `shouldThrow` anyException - - describe "Network.Socket.ByteString.send" $ do - it "checks -1 correctly on Windows" $ do - sock <- socket AF_INET Stream defaultProtocol - send sock "hello world" `shouldThrow` anyException diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/SimpleSpec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/SimpleSpec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/SimpleSpec.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/SimpleSpec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,344 +0,0 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE ScopedTypeVariables #-} - -module SimpleSpec (main, spec) where - -import Control.Concurrent (ThreadId, forkIO, myThreadId) -import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar, readMVar) -import qualified Control.Exception as E -import Control.Monad -import Data.ByteString (ByteString) -import qualified Data.ByteString as S -import qualified Data.ByteString.Char8 as C -import qualified Data.ByteString.Lazy as L -import Network.Socket hiding (send, sendTo, recv, recvFrom) -import Network.Socket.ByteString -import qualified Network.Socket.ByteString.Lazy as Lazy -import System.Directory -import System.Timeout (timeout) - -import Test.Hspec - -main :: IO () -main = hspec spec - -spec :: Spec -spec = do - describe "send" $ do - it "works well" $ do - let server sock = recv sock 1024 `shouldReturn` testMsg - client sock = send sock testMsg - tcpTest client server - - describe "sendAll" $ do - it "works well" $ do - let server sock = recv sock 1024 `shouldReturn` testMsg - client sock = sendAll sock testMsg - tcpTest client server - - describe "Lazy.sendAll" $ do - it "works well" $ do - let server sock = recv sock 1024 `shouldReturn` testMsg - client sock = Lazy.sendAll sock $ L.fromChunks [testMsg] - tcpTest client server - - describe "sendTo" $ do - it "works well" $ do - let server sock = recv sock 1024 `shouldReturn` testMsg - client sock serverPort = do - let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Datagram } - addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort) - sendTo sock testMsg $ addrAddress addr - udpTest client server - - describe "sendAllTo" $ do - it "works well" $ do - let server sock = recv sock 1024 `shouldReturn` testMsg - client sock serverPort = do - let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Datagram } - addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort) - sendAllTo sock testMsg $ addrAddress addr - udpTest client server - - describe "sendMany" $ do - it "works well" $ do - let server sock = recv sock 1024 `shouldReturn` (S.append seg1 seg2) - client sock = sendMany sock [seg1, seg2] - - seg1 = C.pack "This is a " - seg2 = C.pack "test message." - tcpTest client server - - describe "sendManyTo" $ do - it "works well" $ do - let server sock = recv sock 1024 `shouldReturn` (S.append seg1 seg2) - client sock serverPort = do - let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Datagram } - addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort) - sendManyTo sock [seg1, seg2] $ addrAddress addr - - seg1 = C.pack "This is a " - seg2 = C.pack "test message." - udpTest client server - - describe "recv" $ do - it "works well" $ do - let server sock = recv sock 1024 `shouldReturn` testMsg - client sock = send sock testMsg - tcpTest client server - - it "can treat overflow" $ do - let server sock = do seg1 <- recv sock (S.length testMsg - 3) - seg2 <- recv sock 1024 - let msg = S.append seg1 seg2 - msg `shouldBe` testMsg - client sock = send sock testMsg - tcpTest client server - - it "returns empty string at EOF" $ do - let client s = recv s 4096 `shouldReturn` S.empty - server s = shutdown s ShutdownSend - tcpTest client server - - describe "recvFrom" $ do - it "works well" $ do - let server sock = do (msg, _) <- recvFrom sock 1024 - testMsg `shouldBe` msg - client sock = do - addr <- getPeerName sock - sendTo sock testMsg addr - tcpTest client server - it "can treat overflow" $ do - let server sock = do (seg1, _) <- recvFrom sock (S.length testMsg - 3) - (seg2, _) <- recvFrom sock 1024 - let msg = S.append seg1 seg2 - testMsg `shouldBe` msg - - client sock = send sock testMsg - tcpTest client server - - describe "UserTimeout" $ do - it "can be set" $ do - when (isSupportedSocketOption UserTimeout) $ do - sock <- socket AF_INET Stream defaultProtocol - setSocketOption sock UserTimeout 1000 - getSocketOption sock UserTimeout `shouldReturn` 1000 - setSocketOption sock UserTimeout 2000 - getSocketOption sock UserTimeout `shouldReturn` 2000 - close sock - - -- On various BSD systems the peer credentials are exchanged during - -- connect(), and this does not happen with `socketpair()`. Therefore, - -- we must actually set up a listener and connect, rather than use a - -- socketpair(). - -- - describe "getPeerCredential" $ do - it "can return something" $ do - when isUnixDomainSocketAvailable $ do - -- It would be useful to check that we did not get garbage - -- back, but rather the actual uid of the test program. For - -- that we'd need System.Posix.User, but that is not available - -- under Windows. For now, accept the risk that we did not get - -- the right answer. - -- - let client sock = do - (_, uid, _) <- getPeerCredential sock - uid `shouldNotBe` Nothing - server (sock, _) = do - (_, uid, _) <- getPeerCredential sock - uid `shouldNotBe` Nothing - unixTest client server - {- The below test fails on many *BSD systems, because the getsockopt() - call that underlies getpeereid() does not have the same meaning for - all address families, but the C-library was not checking that the - provided sock is an AF_UNIX socket. This will fixed some day, but - we should not fail on those systems in the mean-time. The upstream - C-library fix is to call getsockname() and check the address family - before calling `getpeereid()`. We could duplicate that in our own - code, and then this test would work on those platforms that have - `getpeereid()` and not the SO_PEERCRED socket option. - - it "return nothing for non-UNIX-domain socket" $ do - when isUnixDomainSocketAvailable $ do - s <- socket AF_INET Stream defaultProtocol - cred1 <- getPeerCredential s - cred1 `shouldBe` (Nothing,Nothing,Nothing) - -} - - describe "getAddrInfo" $ do - it "works for IPv4 address" $ do - let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_ADDRCONFIG] } - AddrInfo{addrAddress = (SockAddrInet _ hostAddr)}:_ <- - getAddrInfo (Just hints) (Just "127.128.129.130") Nothing - hostAddressToTuple hostAddr `shouldBe` (0x7f, 0x80, 0x81, 0x82) -#if defined(IPV6_SOCKET_SUPPORT) - it "works for IPv6 address" $ do - let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_ADDRCONFIG] } - host = "2001:0db8:85a3:0000:0000:8a2e:0370:7334" - AddrInfo{addrAddress = (SockAddrInet6 _ _ hostAddr _)}:_ <- - getAddrInfo (Just hints) (Just host) Nothing - hostAddress6ToTuple hostAddr - `shouldBe` (0x2001, 0x0db8, 0x85a3, 0x0000, 0x0000, 0x8a2e, 0x0370, 0x7334) -#endif - ------------------------------------------------------------------------- - -serverAddr :: String -serverAddr = "127.0.0.1" - -testMsg :: ByteString -testMsg = "This is a test message." - -unixAddr :: String -unixAddr = "/tmp/network-test" - ------------------------------------------------------------------------- --- Test helpers - --- | Establish a connection between client and server and then run --- 'clientAct' and 'serverAct', in different threads. Both actions --- get passed a connected 'Socket', used for communicating between --- client and server. 'unixTest' makes sure that the 'Socket' is --- closed after the actions have run. -unixTest :: (Socket -> IO a) -> ((Socket, SockAddr) -> IO b) -> IO () -unixTest clientAct serverAct = do - test clientSetup clientAct serverSetup server - where - clientSetup = do - sock <- socket AF_UNIX Stream defaultProtocol - connect sock (SockAddrUnix unixAddr) - return sock - - serverSetup = do - sock <- socket AF_UNIX Stream defaultProtocol - unlink unixAddr -- just in case - bind sock (SockAddrUnix unixAddr) - listen sock 1 - return sock - - server sock = E.bracket (accept sock) (killClientSock . fst) serverAct - - unlink file = do - exist <- doesFileExist file - when exist $ removeFile file - - killClientSock sock = do - shutdown sock ShutdownBoth - close sock - unlink unixAddr - --- | Establish a connection between client and server and then run --- 'clientAct' and 'serverAct', in different threads. Both actions --- get passed a connected 'Socket', used for communicating between --- client and server. 'tcpTest' makes sure that the 'Socket' is --- closed after the actions have run. -tcpTest :: (Socket -> IO a) -> (Socket -> IO b) -> IO () -tcpTest clientAct serverAct = do - portVar <- newEmptyMVar - test (clientSetup portVar) clientAct (serverSetup portVar) server - where - clientSetup portVar = do - let hints = defaultHints { addrSocketType = Stream } - serverPort <- readMVar portVar - addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort) - sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) -#if !defined(mingw32_HOST_OS) - let fd = fdSocket sock - getNonBlock fd `shouldReturn` True - getCloseOnExec fd `shouldReturn` False -#endif - connect sock $ addrAddress addr - return sock - - serverSetup portVar = do - let hints = defaultHints { - addrFlags = [AI_PASSIVE] - , addrSocketType = Stream - } - addr:_ <- getAddrInfo (Just hints) (Just serverAddr) Nothing - sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) - let fd = fdSocket sock -#if !defined(mingw32_HOST_OS) - getNonBlock fd `shouldReturn` True - getCloseOnExec fd `shouldReturn` False -#endif - setSocketOption sock ReuseAddr 1 - setCloseOnExecIfNeeded fd -#if !defined(mingw32_HOST_OS) - getCloseOnExec fd `shouldReturn` True -#endif - bind sock $ addrAddress addr - listen sock 1 - serverPort <- socketPort sock - putMVar portVar serverPort - return sock - - server sock = do - (clientSock, _) <- accept sock -#if !defined(mingw32_HOST_OS) - let fd = fdSocket clientSock - getNonBlock fd `shouldReturn` True - getCloseOnExec fd `shouldReturn` True -#endif - _ <- serverAct clientSock - close clientSock - --- | Create an unconnected 'Socket' for sending UDP and receiving --- datagrams and then run 'clientAct' and 'serverAct'. -udpTest :: (Socket -> PortNumber -> IO a) -> (Socket -> IO b) -> IO () -udpTest clientAct serverAct = do - portVar <- newEmptyMVar - test clientSetup (client portVar) (serverSetup portVar) serverAct - where - clientSetup = socket AF_INET Datagram defaultProtocol - - client portVar sock = do - serverPort <- readMVar portVar - clientAct sock serverPort - - serverSetup portVar = do - let hints = defaultHints { - addrFlags = [AI_PASSIVE] - , addrSocketType = Datagram - } - addr:_ <- getAddrInfo (Just hints) (Just serverAddr) Nothing - sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) - setSocketOption sock ReuseAddr 1 - bind sock $ addrAddress addr - serverPort <- socketPort sock - putMVar portVar serverPort - return sock - --- | Run a client/server pair and synchronize them so that the server --- is started before the client and the specified server action is --- finished before the client closes the 'Socket'. -test :: IO Socket -> (Socket -> IO b) -> IO Socket -> (Socket -> IO c) -> IO () -test clientSetup clientAct serverSetup serverAct = do - tid <- myThreadId - barrier <- newEmptyMVar - _ <- forkIO $ server barrier - client tid barrier - where - server barrier = do - E.bracket serverSetup close $ \sock -> do - serverReady - Just _ <- timeout 1000000 $ serverAct sock - putMVar barrier () - where - -- | Signal to the client that it can proceed. - serverReady = putMVar barrier () - - client tid barrier = do - takeMVar barrier - -- Transfer exceptions to the main thread. - bracketWithReraise tid clientSetup close $ \res -> do - Just _ <- timeout 1000000 $ clientAct res - takeMVar barrier - --- | Like 'bracket' but catches and reraises the exception in another --- thread, specified by the first argument. -bracketWithReraise :: ThreadId -> IO a -> (a -> IO b) -> (a -> IO ()) -> IO () -bracketWithReraise tid setup teardown thing = - E.bracket setup teardown thing - `E.catch` \ (e :: E.SomeException) -> E.throwTo tid e diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/Spec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/Spec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.7.0.2/tests/Spec.hs 2018-07-07 14:47:29.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.7.0.2/tests/Spec.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -{-# OPTIONS_GHC -F -pgmF hspec-discover #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/ancilData.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/ancilData.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/ancilData.c 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/ancilData.c 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,132 @@ +/* + * Copyright(c), 2002 The GHC Team. + */ + +#ifdef aix_HOST_OS +#define _LINUX_SOURCE_COMPAT +// Required to get CMSG_SPACE/CMSG_LEN macros. See #265. +// Alternative is to #define COMPAT_43 and use the +// HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS code instead, but that means +// fiddling with the configure script too. +#endif + +#include "HsNet.h" +#include + +#if HAVE_STRUCT_MSGHDR_MSG_CONTROL || HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS /* until end */ + +/* + * Support for transmitting file descriptors. + * + * + */ + + +/* + * sendmsg() and recvmsg() wrappers for transmitting + * ancillary socket data. + * + * Doesn't provide the full generality of either, specifically: + * + * - no support for scattered read/writes. + * - only possible to send one ancillary chunk of data at a time. + */ + +int +sendFd(int sock, + int outfd) +{ + struct msghdr msg = {0}; + struct iovec iov[1]; + char buf[2]; +#if HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS + msg.msg_accrights = (void*)&outfd; + msg.msg_accrightslen = sizeof(int); +#else + struct cmsghdr *cmsg; + char ancBuffer[CMSG_SPACE(sizeof(int))]; + char* dPtr; + + msg.msg_control = ancBuffer; + msg.msg_controllen = sizeof(ancBuffer); + + cmsg = CMSG_FIRSTHDR(&msg); + cmsg->cmsg_level = SOL_SOCKET; + cmsg->cmsg_type = SCM_RIGHTS; + cmsg->cmsg_len = CMSG_LEN(sizeof(int)); + dPtr = (char*)CMSG_DATA(cmsg); + + *(int*)dPtr = outfd; + msg.msg_controllen = cmsg->cmsg_len; +#endif + + buf[0] = 0; buf[1] = '\0'; + iov[0].iov_base = buf; + iov[0].iov_len = 2; + + msg.msg_iov = iov; + msg.msg_iovlen = 1; + + return sendmsg(sock,&msg,0); +} + +int +recvFd(int sock) +{ + struct msghdr msg = {0}; + char duffBuf[10]; + int rc; + int len = sizeof(int); + struct iovec iov[1]; +#if HAVE_STRUCT_MSGHDR_MSG_CONTROL + struct cmsghdr *cmsg = NULL; + struct cmsghdr *cptr; +#else + int* fdBuffer; +#endif + int fd; + + iov[0].iov_base = duffBuf; + iov[0].iov_len = sizeof(duffBuf); + msg.msg_iov = iov; + msg.msg_iovlen = 1; + +#if HAVE_STRUCT_MSGHDR_MSG_CONTROL + cmsg = (struct cmsghdr*)malloc(CMSG_SPACE(len)); + if (cmsg==NULL) { + return -1; + } + + msg.msg_control = (void *)cmsg; + msg.msg_controllen = CMSG_LEN(len); +#else + fdBuffer = (int*)malloc(len); + if (fdBuffer) { + msg.msg_accrights = (void *)fdBuffer; + } else { + return -1; + } + msg.msg_accrightslen = len; +#endif + + if ((rc = recvmsg(sock,&msg,0)) < 0) { +#if HAVE_STRUCT_MSGHDR_MSG_CONTROL + free(cmsg); +#else + free(fdBuffer); +#endif + return rc; + } + +#if HAVE_STRUCT_MSGHDR_MSG_CONTROL + cptr = (struct cmsghdr*)CMSG_FIRSTHDR(&msg); + fd = *(int*)CMSG_DATA(cptr); + free(cmsg); +#else + fd = *(int*)fdBuffer; + free(fdBuffer); +#endif + return fd; +} + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/asyncAccept.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/asyncAccept.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/asyncAccept.c 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/asyncAccept.c 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,72 @@ +/* + * (c) sof, 2003. + */ + +#include "HsNet.h" +#include "HsFFI.h" + +#if defined(HAVE_WINSOCK2_H) + +/* all the way to the end */ + +/* + * To support non-blocking accept()s with WinSock, we use the asyncDoProc# + * primop, which lets a Haskell thread call an external routine without + * blocking the progress of other threads. + * + * As can readily be seen, this is a low-level mechanism. + * + */ + +typedef struct AcceptData { + int fdSock; + int newSock; + void* sockAddr; + int size; +} AcceptData; + +/* + * Fill in parameter block that's passed along when the RTS invokes the + * accept()-calling proc below (acceptDoProc()) + */ +void* +newAcceptParams(int sock, + int sz, + void* sockaddr) +{ + AcceptData* data = (AcceptData*)malloc(sizeof(AcceptData)); + if (!data) return NULL; + data->fdSock = sock; + data->newSock = 0; + data->sockAddr = sockaddr; + data->size = sz; + + return data; +} + +/* Accessors for return code and accept()'s socket result. */ + +int +acceptNewSock(void* d) +{ + return (((AcceptData*)d)->newSock); +} + +/* Routine invoked by an RTS worker thread */ +int +acceptDoProc(void* param) +{ + SOCKET s; + + AcceptData* data = (AcceptData*)param; + s = accept( data->fdSock, + data->sockAddr, + &data->size); + data->newSock = s; + if ( s == INVALID_SOCKET ) { + return GetLastError(); + } else { + return 0; + } +} +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/HsNet.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/HsNet.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/HsNet.c 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/HsNet.c 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,8 @@ +/* ----------------------------------------------------------------------------- + * (c) The University of Glasgow 2002 + * + * static versions of the inline functions from HsNet.h + * -------------------------------------------------------------------------- */ + +#define INLINE +#include "HsNet.h" diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/initWinSock.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/initWinSock.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/initWinSock.c 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/initWinSock.c 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,43 @@ +#include "HsNet.h" +#include "HsFFI.h" + +#if defined(HAVE_WINSOCK2_H) + +static int winsock_inited = 0; + +static void +shutdownHandler(void) +{ + WSACleanup(); +} + +/* Initialising WinSock... */ +int +initWinSock () +{ + WORD wVersionRequested; + WSADATA wsaData; + int err; + + if (!winsock_inited) { + wVersionRequested = MAKEWORD( 2, 2 ); + + err = WSAStartup ( wVersionRequested, &wsaData ); + + if ( err != 0 ) { + return err; + } + + if ( LOBYTE( wsaData.wVersion ) != 2 || + HIBYTE( wsaData.wVersion ) != 2 ) { + WSACleanup(); + return (-1); + } + + atexit(shutdownHandler); + winsock_inited = 1; + } + return 0; +} + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/winSockErr.c cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/winSockErr.c --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/cbits/winSockErr.c 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/cbits/winSockErr.c 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,76 @@ +#include "HsNet.h" +#include "HsFFI.h" + +#if defined(HAVE_WINSOCK2_H) +#include + +/* to the end */ + +const char* +getWSErrorDescr(int err) +{ + static char otherErrMsg[256]; + + switch (err) { + case WSAEINTR: return "Interrupted function call (WSAEINTR)"; + case WSAEBADF: return "bad socket descriptor (WSAEBADF)"; + case WSAEACCES: return "Permission denied (WSAEACCESS)"; + case WSAEFAULT: return "Bad address (WSAEFAULT)"; + case WSAEINVAL: return "Invalid argument (WSAEINVAL)"; + case WSAEMFILE: return "Too many open files (WSAEMFILE)"; + case WSAEWOULDBLOCK: return "Resource temporarily unavailable (WSAEWOULDBLOCK)"; + case WSAEINPROGRESS: return "Operation now in progress (WSAEINPROGRESS)"; + case WSAEALREADY: return "Operation already in progress (WSAEALREADY)"; + case WSAENOTSOCK: return "Socket operation on non-socket (WSAENOTSOCK)"; + case WSAEDESTADDRREQ: return "Destination address required (WSAEDESTADDRREQ)"; + case WSAEMSGSIZE: return "Message too long (WSAEMSGSIZE)"; + case WSAEPROTOTYPE: return "Protocol wrong type for socket (WSAEPROTOTYPE)"; + case WSAENOPROTOOPT: return "Bad protocol option (WSAENOPROTOOPT)"; + case WSAEPROTONOSUPPORT: return "Protocol not supported (WSAEPROTONOSUPPORT)"; + case WSAESOCKTNOSUPPORT: return "Socket type not supported (WSAESOCKTNOSUPPORT)"; + case WSAEOPNOTSUPP: return "Operation not supported (WSAEOPNOTSUPP)"; + case WSAEPFNOSUPPORT: return "Protocol family not supported (WSAEPFNOSUPPORT)"; + case WSAEAFNOSUPPORT: return "Address family not supported by protocol family (WSAEAFNOSUPPORT)"; + case WSAEADDRINUSE: return "Address already in use (WSAEADDRINUSE)"; + case WSAEADDRNOTAVAIL: return "Cannot assign requested address (WSAEADDRNOTAVAIL)"; + case WSAENETDOWN: return "Network is down (WSAENETDOWN)"; + case WSAENETUNREACH: return "Network is unreachable (WSAENETUNREACH)"; + case WSAENETRESET: return "Network dropped connection on reset (WSAENETRESET)"; + case WSAECONNABORTED: return "Software caused connection abort (WSAECONNABORTED)"; + case WSAECONNRESET: return "Connection reset by peer (WSAECONNRESET)"; + case WSAENOBUFS: return "No buffer space available (WSAENOBUFS)"; + case WSAEISCONN: return "Socket is already connected (WSAEISCONN)"; + case WSAENOTCONN: return "Socket is not connected (WSAENOTCONN)"; + case WSAESHUTDOWN: return "Cannot send after socket shutdown (WSAESHUTDOWN)"; + case WSAETOOMANYREFS: return "Too many references (WSAETOOMANYREFS)"; + case WSAETIMEDOUT: return "Connection timed out (WSAETIMEDOUT)"; + case WSAECONNREFUSED: return "Connection refused (WSAECONNREFUSED)"; + case WSAELOOP: return "Too many levels of symbolic links (WSAELOOP)"; + case WSAENAMETOOLONG: return "Filename too long (WSAENAMETOOLONG)"; + case WSAEHOSTDOWN: return "Host is down (WSAEHOSTDOWN)"; + case WSAEHOSTUNREACH: return "Host is unreachable (WSAEHOSTUNREACH)"; + case WSAENOTEMPTY: return "Resource not empty (WSAENOTEMPTY)"; + case WSAEPROCLIM: return "Too many processes (WSAEPROCLIM)"; + case WSAEUSERS: return "Too many users (WSAEUSERS)"; + case WSAEDQUOT: return "Disk quota exceeded (WSAEDQUOT)"; + case WSAESTALE: return "Stale NFS file handle (WSAESTALE)"; + case WSAEREMOTE: return "Too many levels of remote in path (WSAEREMOTE)"; + case WSAEDISCON: return "Graceful shutdown in progress (WSAEDISCON)"; + case WSASYSNOTREADY: return "Network subsystem is unavailable (WSASYSNOTREADY)"; + case WSAVERNOTSUPPORTED: return "Winsock.dll version out of range (WSAVERNOTSUPPORTED)"; + case WSANOTINITIALISED: return "Successful WSAStartup not yet performed (WSANOTINITIALISED)"; +#ifdef WSATYPE_NOT_FOUND + case WSATYPE_NOT_FOUND: return "Class type not found (WSATYPE_NOT_FOUND)"; +#endif + case WSAHOST_NOT_FOUND: return "Host not found (WSAHOST_NOT_FOUND)"; + case WSATRY_AGAIN: return "Nonauthoritative host not found (WSATRY_AGAIN)"; + case WSANO_RECOVERY: return "This is a nonrecoverable error (WSANO_RECOVERY)"; + case WSANO_DATA: return "Valid name, no data record of requested type (WSANO_DATA)"; + default: + sprintf(otherErrMsg, "Unknown WinSock error: %u", err); + return otherErrMsg; + } +} + +#endif + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/CHANGELOG.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/CHANGELOG.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/CHANGELOG.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/CHANGELOG.md 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,149 @@ +## Version 2.8.0.0 + +* Breaking change: PortNumber originally contained Word32 in network + byte order and used "deriving Ord". This results in strange behavior + on the Ord instance. Now PortNumber holds Word32 in host byte order. + [#347](https://github.com/haskell/network/pull/347) +* Breaking change: stopping the export of the PortNum constructor in + PortNumber. +* Use bytestring == 0.10.* only. +* Use base >= 4.7 && < 5. + +## Version 2.7.0.2 + +* Removing withMVar to avoid the deadlock between "accept" and "close" + [#330](https://github.com/haskell/network/pull/330) +* "close" does not throw exceptions. A new API: "close'" throws + exceptions when necessary. + [#337](https://github.com/haskell/network/pull/337) +* Fixing the hang of lazy sendAll. + [#340](https://github.com/haskell/network/pull/340) +* Installing NetDef.h (#334) + [#334](https://github.com/haskell/network/pull/334) + +## Version 2.7.0.1 + + * A new API: socketPortSafe. + [#319](https://github.com/haskell/network/pull/319) + * Fixing a drain bug of sendAll. + [#320](https://github.com/haskell/network/pull/320) + * Porting the new CALLCONV convention from master. + [#313](https://github.com/haskell/network/pull/313) + * Withdrawing the deprecations of packFamily and unpackFamily. + [#324](https://github.com/haskell/network/pull/324) + +## Version 2.7.0.0 + + * Obsoleting the Network module. + * Obsoleting the Network.BSD module. + * Obsoleting APIs: MkSocket, htonl, ntohl, + getPeerCred, getPeerEid, + send, sendTo, recv, recvFrom, recvLen, + inet_addr, inet_ntoa, + isConnected, isBound, isListening, isReadable, isWritable, + aNY_PORT, iNADDR_ANY, iN6ADDR_ANY, sOMAXCONN, + sOL_SOCKET, sCM_RIGHTS, + packFamily, unpackFamily, packSocketType + * Do not closeFd within sendFd. + [#271](https://github.com/haskell/network/pull/271) + * Exporting ifNameToIndex and ifIndexToName from Network.Socket. + * New APIs: setCloseOnExecIfNeeded, getCloseOnExec and getNonBlock + * New APIs: isUnixDomainSocketAvailable and getPeerCredential + * socketPair, sendFd and recvFd are exported even on Windows. + +## Version 2.6.3.5 + + * Reverting "Do not closeFd within sendFd" + [#271](https://github.com/haskell/network/pull/271) + +## Version 2.6.3.4 + + * Don't touch IPv6Only when running on OpenBSD + [#227](https://github.com/haskell/network/pull/227) + * Do not closeFd within sendFd + [#271](https://github.com/haskell/network/pull/271) + * Updating examples and docs. + +## Version 2.6.3.3 + + * Adds a function to show the defaultHints without reading their undefined fields + [#291](https://github.com/haskell/network/pull/291) + * Improve exception error messages for getAddrInfo and getNameInfo + [#289](https://github.com/haskell/network/pull/289) + * Deprecating SockAddrCan. + +## Version 2.6.3.2 + + * Zero memory of `sockaddr_un` if abstract socket + [#220](https://github.com/haskell/network/pull/220) + + * Improving error messages + [#232](https://github.com/haskell/network/pull/232) + + * Allow non-blocking file descriptors via `setNonBlockIfNeeded` + [#242](https://github.com/haskell/network/pull/242) + + * Update config.{guess,sub} to latest version + [#244](https://github.com/haskell/network/pull/244) + + * Rename `my_inet_ntoa` to avoid symbol conflicts + [#228](https://github.com/haskell/network/pull/228) + + * Test infrastructure improvements + [#219](https://github.com/haskell/network/pull/219) + [#217](https://github.com/haskell/network/pull/217) + [#218](https://github.com/haskell/network/pull/218) + + * House keeping and cleanup + [#238](https://github.com/haskell/network/pull/238) + [#237](https://github.com/haskell/network/pull/237) + +## Version 2.6.3.1 + + * Reverse breaking exception change in `Network.Socket.ByteString.recv` + [#215](https://github.com/haskell/network/issues/215) + +## Version 2.6.3.0 + + * New maintainers: Evan Borden (@eborden) and Kazu Yamamoto (@kazu-yamamoto). + The maintainer for a long period, Johan Tibell (@tibbe) stepped down. + Thank you, Johan, for your hard work for a long time. + + * New APIs: ntohl, htonl,hostAddressToTuple{,6} and tupleToHostAddress{,6}. + [#210](https://github.com/haskell/network/pull/210) + + * Added a Read instance for PortNumber. [#145](https://github.com/haskell/network/pull/145) + + * We only set the IPV6_V6ONLY flag to 0 for stream and datagram socket types, + as opposed to all of them. This makes it possible to use ICMPv6. + [#180](https://github.com/haskell/network/pull/180) + [#181](https://github.com/haskell/network/pull/181) + + * Work around GHC bug #12020. Socket errors no longer cause segfaults or + hangs on Windows. [#192](https://github.com/haskell/network/pull/192) + + * Various documentation improvements and the deprecated pragmas. + [#186](https://github.com/haskell/network/pull/186) + [#201](https://github.com/haskell/network/issues/201) + [#205](https://github.com/haskell/network/pull/205) + [#206](https://github.com/haskell/network/pull/206) + [#211](https://github.com/haskell/network/issues/211) + + * Various internal improvements. + [#193](https://github.com/haskell/network/pull/193) + [#200](https://github.com/haskell/network/pull/200) + +## Version 2.6.2.1 + + * Regenerate configure and HsNetworkConfig.h.in. + + * Better detection of CAN sockets. + +## Version 2.6.2.0 + + * Add support for TCP_USER_TIMEOUT. + + * Don't conditionally export the SockAddr constructors. + + * Add isSupportSockAddr to allow checking for supported address types + at runtime. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/config.guess cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/config.guess --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/config.guess 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/config.guess 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,1466 @@ +#! /bin/sh +# Attempt to guess a canonical system name. +# Copyright 1992-2017 Free Software Foundation, Inc. + +timestamp='2017-03-05' + +# This file is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). +# +# Originally written by Per Bothner; maintained since 2000 by Ben Elliston. +# +# You can get the latest version of this script from: +# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess +# +# Please send patches to . + + +me=`echo "$0" | sed -e 's,.*/,,'` + +usage="\ +Usage: $0 [OPTION] + +Output the configuration name of the system \`$me' is run on. + +Operation modes: + -h, --help print this help, then exit + -t, --time-stamp print date of last modification, then exit + -v, --version print version number, then exit + +Report bugs and patches to ." + +version="\ +GNU config.guess ($timestamp) + +Originally written by Per Bothner. +Copyright 1992-2017 Free Software Foundation, Inc. + +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." + +help=" +Try \`$me --help' for more information." + +# Parse command line +while test $# -gt 0 ; do + case $1 in + --time-stamp | --time* | -t ) + echo "$timestamp" ; exit ;; + --version | -v ) + echo "$version" ; exit ;; + --help | --h* | -h ) + echo "$usage"; exit ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "$me: invalid option $1$help" >&2 + exit 1 ;; + * ) + break ;; + esac +done + +if test $# != 0; then + echo "$me: too many arguments$help" >&2 + exit 1 +fi + +trap 'exit 1' 1 2 15 + +# CC_FOR_BUILD -- compiler used by this script. Note that the use of a +# compiler to aid in system detection is discouraged as it requires +# temporary files to be created and, as you can see below, it is a +# headache to deal with in a portable fashion. + +# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still +# use `HOST_CC' if defined, but it is deprecated. + +# Portable tmp directory creation inspired by the Autoconf team. + +set_cc_for_build=' +trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; +trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; +: ${TMPDIR=/tmp} ; + { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || + { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || + { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || + { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; +dummy=$tmp/dummy ; +tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; +case $CC_FOR_BUILD,$HOST_CC,$CC in + ,,) echo "int x;" > $dummy.c ; + for c in cc gcc c89 c99 ; do + if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then + CC_FOR_BUILD="$c"; break ; + fi ; + done ; + if test x"$CC_FOR_BUILD" = x ; then + CC_FOR_BUILD=no_compiler_found ; + fi + ;; + ,,*) CC_FOR_BUILD=$CC ;; + ,*,*) CC_FOR_BUILD=$HOST_CC ;; +esac ; set_cc_for_build= ;' + +# This is needed to find uname on a Pyramid OSx when run in the BSD universe. +# (ghazi@noc.rutgers.edu 1994-08-24) +if (test -f /.attbin/uname) >/dev/null 2>&1 ; then + PATH=$PATH:/.attbin ; export PATH +fi + +UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown +UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown +UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown +UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown + +case "${UNAME_SYSTEM}" in +Linux|GNU|GNU/*) + # If the system lacks a compiler, then just pick glibc. + # We could probably try harder. + LIBC=gnu + + eval $set_cc_for_build + cat <<-EOF > $dummy.c + #include + #if defined(__UCLIBC__) + LIBC=uclibc + #elif defined(__dietlibc__) + LIBC=dietlibc + #else + LIBC=gnu + #endif + EOF + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC' | sed 's, ,,g'` + ;; +esac + +# Note: order is significant - the case branches are not exclusive. + +case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in + *:NetBSD:*:*) + # NetBSD (nbsd) targets should (where applicable) match one or + # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, + # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently + # switched to ELF, *-*-netbsd* would select the old + # object file format. This provides both forward + # compatibility and a consistent mechanism for selecting the + # object file format. + # + # Note: NetBSD doesn't particularly care about the vendor + # portion of the name. We always set it to "unknown". + sysctl="sysctl -n hw.machine_arch" + UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \ + /sbin/$sysctl 2>/dev/null || \ + /usr/sbin/$sysctl 2>/dev/null || \ + echo unknown)` + case "${UNAME_MACHINE_ARCH}" in + armeb) machine=armeb-unknown ;; + arm*) machine=arm-unknown ;; + sh3el) machine=shl-unknown ;; + sh3eb) machine=sh-unknown ;; + sh5el) machine=sh5le-unknown ;; + earmv*) + arch=`echo ${UNAME_MACHINE_ARCH} | sed -e 's,^e\(armv[0-9]\).*$,\1,'` + endian=`echo ${UNAME_MACHINE_ARCH} | sed -ne 's,^.*\(eb\)$,\1,p'` + machine=${arch}${endian}-unknown + ;; + *) machine=${UNAME_MACHINE_ARCH}-unknown ;; + esac + # The Operating System including object format, if it has switched + # to ELF recently (or will in the future) and ABI. + case "${UNAME_MACHINE_ARCH}" in + earm*) + os=netbsdelf + ;; + arm*|i386|m68k|ns32k|sh3*|sparc|vax) + eval $set_cc_for_build + if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ELF__ + then + # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). + # Return netbsd for either. FIX? + os=netbsd + else + os=netbsdelf + fi + ;; + *) + os=netbsd + ;; + esac + # Determine ABI tags. + case "${UNAME_MACHINE_ARCH}" in + earm*) + expr='s/^earmv[0-9]/-eabi/;s/eb$//' + abi=`echo ${UNAME_MACHINE_ARCH} | sed -e "$expr"` + ;; + esac + # The OS release + # Debian GNU/NetBSD machines have a different userland, and + # thus, need a distinct triplet. However, they do not need + # kernel version information, so it can be replaced with a + # suitable tag, in the style of linux-gnu. + case "${UNAME_VERSION}" in + Debian*) + release='-gnu' + ;; + *) + release=`echo ${UNAME_RELEASE} | sed -e 's/[-_].*//' | cut -d. -f1,2` + ;; + esac + # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: + # contains redundant information, the shorter form: + # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. + echo "${machine}-${os}${release}${abi}" + exit ;; + *:Bitrig:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` + echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE} + exit ;; + *:OpenBSD:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` + echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} + exit ;; + *:LibertyBSD:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'` + echo ${UNAME_MACHINE_ARCH}-unknown-libertybsd${UNAME_RELEASE} + exit ;; + *:ekkoBSD:*:*) + echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} + exit ;; + *:SolidBSD:*:*) + echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} + exit ;; + macppc:MirBSD:*:*) + echo powerpc-unknown-mirbsd${UNAME_RELEASE} + exit ;; + *:MirBSD:*:*) + echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} + exit ;; + *:Sortix:*:*) + echo ${UNAME_MACHINE}-unknown-sortix + exit ;; + alpha:OSF1:*:*) + case $UNAME_RELEASE in + *4.0) + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` + ;; + *5.*) + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` + ;; + esac + # According to Compaq, /usr/sbin/psrinfo has been available on + # OSF/1 and Tru64 systems produced since 1995. I hope that + # covers most systems running today. This code pipes the CPU + # types through head -n 1, so we only detect the type of CPU 0. + ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` + case "$ALPHA_CPU_TYPE" in + "EV4 (21064)") + UNAME_MACHINE=alpha ;; + "EV4.5 (21064)") + UNAME_MACHINE=alpha ;; + "LCA4 (21066/21068)") + UNAME_MACHINE=alpha ;; + "EV5 (21164)") + UNAME_MACHINE=alphaev5 ;; + "EV5.6 (21164A)") + UNAME_MACHINE=alphaev56 ;; + "EV5.6 (21164PC)") + UNAME_MACHINE=alphapca56 ;; + "EV5.7 (21164PC)") + UNAME_MACHINE=alphapca57 ;; + "EV6 (21264)") + UNAME_MACHINE=alphaev6 ;; + "EV6.7 (21264A)") + UNAME_MACHINE=alphaev67 ;; + "EV6.8CB (21264C)") + UNAME_MACHINE=alphaev68 ;; + "EV6.8AL (21264B)") + UNAME_MACHINE=alphaev68 ;; + "EV6.8CX (21264D)") + UNAME_MACHINE=alphaev68 ;; + "EV6.9A (21264/EV69A)") + UNAME_MACHINE=alphaev69 ;; + "EV7 (21364)") + UNAME_MACHINE=alphaev7 ;; + "EV7.9 (21364A)") + UNAME_MACHINE=alphaev79 ;; + esac + # A Pn.n version is a patched version. + # A Vn.n version is a released version. + # A Tn.n version is a released field test version. + # A Xn.n version is an unreleased experimental baselevel. + # 1.2 uses "1.2" for uname -r. + echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` + # Reset EXIT trap before exiting to avoid spurious non-zero exit code. + exitcode=$? + trap '' 0 + exit $exitcode ;; + Alpha\ *:Windows_NT*:*) + # How do we know it's Interix rather than the generic POSIX subsystem? + # Should we change UNAME_MACHINE based on the output of uname instead + # of the specific Alpha model? + echo alpha-pc-interix + exit ;; + 21064:Windows_NT:50:3) + echo alpha-dec-winnt3.5 + exit ;; + Amiga*:UNIX_System_V:4.0:*) + echo m68k-unknown-sysv4 + exit ;; + *:[Aa]miga[Oo][Ss]:*:*) + echo ${UNAME_MACHINE}-unknown-amigaos + exit ;; + *:[Mm]orph[Oo][Ss]:*:*) + echo ${UNAME_MACHINE}-unknown-morphos + exit ;; + *:OS/390:*:*) + echo i370-ibm-openedition + exit ;; + *:z/VM:*:*) + echo s390-ibm-zvmoe + exit ;; + *:OS400:*:*) + echo powerpc-ibm-os400 + exit ;; + arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) + echo arm-acorn-riscix${UNAME_RELEASE} + exit ;; + arm*:riscos:*:*|arm*:RISCOS:*:*) + echo arm-unknown-riscos + exit ;; + SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) + echo hppa1.1-hitachi-hiuxmpp + exit ;; + Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) + # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. + if test "`(/bin/universe) 2>/dev/null`" = att ; then + echo pyramid-pyramid-sysv3 + else + echo pyramid-pyramid-bsd + fi + exit ;; + NILE*:*:*:dcosx) + echo pyramid-pyramid-svr4 + exit ;; + DRS?6000:unix:4.0:6*) + echo sparc-icl-nx6 + exit ;; + DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) + case `/usr/bin/uname -p` in + sparc) echo sparc-icl-nx7; exit ;; + esac ;; + s390x:SunOS:*:*) + echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4H:SunOS:5.*:*) + echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) + echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) + echo i386-pc-auroraux${UNAME_RELEASE} + exit ;; + i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) + eval $set_cc_for_build + SUN_ARCH=i386 + # If there is a compiler, see if it is configured for 64-bit objects. + # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. + # This test works for both compilers. + if [ "$CC_FOR_BUILD" != no_compiler_found ]; then + if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + SUN_ARCH=x86_64 + fi + fi + echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:6*:*) + # According to config.sub, this is the proper way to canonicalize + # SunOS6. Hard to guess exactly what SunOS6 will be like, but + # it's likely to be more like Solaris than SunOS4. + echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + sun4*:SunOS:*:*) + case "`/usr/bin/arch -k`" in + Series*|S4*) + UNAME_RELEASE=`uname -v` + ;; + esac + # Japanese Language versions have a version number like `4.1.3-JL'. + echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` + exit ;; + sun3*:SunOS:*:*) + echo m68k-sun-sunos${UNAME_RELEASE} + exit ;; + sun*:*:4.2BSD:*) + UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` + test "x${UNAME_RELEASE}" = x && UNAME_RELEASE=3 + case "`/bin/arch`" in + sun3) + echo m68k-sun-sunos${UNAME_RELEASE} + ;; + sun4) + echo sparc-sun-sunos${UNAME_RELEASE} + ;; + esac + exit ;; + aushp:SunOS:*:*) + echo sparc-auspex-sunos${UNAME_RELEASE} + exit ;; + # The situation for MiNT is a little confusing. The machine name + # can be virtually everything (everything which is not + # "atarist" or "atariste" at least should have a processor + # > m68000). The system name ranges from "MiNT" over "FreeMiNT" + # to the lowercase version "mint" (or "freemint"). Finally + # the system name "TOS" denotes a system which is actually not + # MiNT. But MiNT is downward compatible to TOS, so this should + # be no problem. + atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) + echo m68k-atari-mint${UNAME_RELEASE} + exit ;; + milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) + echo m68k-milan-mint${UNAME_RELEASE} + exit ;; + hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) + echo m68k-hades-mint${UNAME_RELEASE} + exit ;; + *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) + echo m68k-unknown-mint${UNAME_RELEASE} + exit ;; + m68k:machten:*:*) + echo m68k-apple-machten${UNAME_RELEASE} + exit ;; + powerpc:machten:*:*) + echo powerpc-apple-machten${UNAME_RELEASE} + exit ;; + RISC*:Mach:*:*) + echo mips-dec-mach_bsd4.3 + exit ;; + RISC*:ULTRIX:*:*) + echo mips-dec-ultrix${UNAME_RELEASE} + exit ;; + VAX*:ULTRIX*:*:*) + echo vax-dec-ultrix${UNAME_RELEASE} + exit ;; + 2020:CLIX:*:* | 2430:CLIX:*:*) + echo clipper-intergraph-clix${UNAME_RELEASE} + exit ;; + mips:*:*:UMIPS | mips:*:*:RISCos) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c +#ifdef __cplusplus +#include /* for printf() prototype */ + int main (int argc, char *argv[]) { +#else + int main (argc, argv) int argc; char *argv[]; { +#endif + #if defined (host_mips) && defined (MIPSEB) + #if defined (SYSTYPE_SYSV) + printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); + #endif + #if defined (SYSTYPE_SVR4) + printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); + #endif + #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) + printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); + #endif + #endif + exit (-1); + } +EOF + $CC_FOR_BUILD -o $dummy $dummy.c && + dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && + SYSTEM_NAME=`$dummy $dummyarg` && + { echo "$SYSTEM_NAME"; exit; } + echo mips-mips-riscos${UNAME_RELEASE} + exit ;; + Motorola:PowerMAX_OS:*:*) + echo powerpc-motorola-powermax + exit ;; + Motorola:*:4.3:PL8-*) + echo powerpc-harris-powermax + exit ;; + Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) + echo powerpc-harris-powermax + exit ;; + Night_Hawk:Power_UNIX:*:*) + echo powerpc-harris-powerunix + exit ;; + m88k:CX/UX:7*:*) + echo m88k-harris-cxux7 + exit ;; + m88k:*:4*:R4*) + echo m88k-motorola-sysv4 + exit ;; + m88k:*:3*:R3*) + echo m88k-motorola-sysv3 + exit ;; + AViiON:dgux:*:*) + # DG/UX returns AViiON for all architectures + UNAME_PROCESSOR=`/usr/bin/uname -p` + if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] + then + if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ + [ ${TARGET_BINARY_INTERFACE}x = x ] + then + echo m88k-dg-dgux${UNAME_RELEASE} + else + echo m88k-dg-dguxbcs${UNAME_RELEASE} + fi + else + echo i586-dg-dgux${UNAME_RELEASE} + fi + exit ;; + M88*:DolphinOS:*:*) # DolphinOS (SVR3) + echo m88k-dolphin-sysv3 + exit ;; + M88*:*:R3*:*) + # Delta 88k system running SVR3 + echo m88k-motorola-sysv3 + exit ;; + XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) + echo m88k-tektronix-sysv3 + exit ;; + Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) + echo m68k-tektronix-bsd + exit ;; + *:IRIX*:*:*) + echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` + exit ;; + ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. + echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id + exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' + i*86:AIX:*:*) + echo i386-ibm-aix + exit ;; + ia64:AIX:*:*) + if [ -x /usr/bin/oslevel ] ; then + IBM_REV=`/usr/bin/oslevel` + else + IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} + fi + echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} + exit ;; + *:AIX:2:3) + if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #include + + main() + { + if (!__power_pc()) + exit(1); + puts("powerpc-ibm-aix3.2.5"); + exit(0); + } +EOF + if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` + then + echo "$SYSTEM_NAME" + else + echo rs6000-ibm-aix3.2.5 + fi + elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then + echo rs6000-ibm-aix3.2.4 + else + echo rs6000-ibm-aix3.2 + fi + exit ;; + *:AIX:*:[4567]) + IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` + if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then + IBM_ARCH=rs6000 + else + IBM_ARCH=powerpc + fi + if [ -x /usr/bin/lslpp ] ; then + IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | + awk -F: '{ print $3 }' | sed s/[0-9]*$/0/` + else + IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} + fi + echo ${IBM_ARCH}-ibm-aix${IBM_REV} + exit ;; + *:AIX:*:*) + echo rs6000-ibm-aix + exit ;; + ibmrt:4.4BSD:*|romp-ibm:BSD:*) + echo romp-ibm-bsd4.4 + exit ;; + ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and + echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to + exit ;; # report: romp-ibm BSD 4.3 + *:BOSX:*:*) + echo rs6000-bull-bosx + exit ;; + DPX/2?00:B.O.S.:*:*) + echo m68k-bull-sysv3 + exit ;; + 9000/[34]??:4.3bsd:1.*:*) + echo m68k-hp-bsd + exit ;; + hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) + echo m68k-hp-bsd4.4 + exit ;; + 9000/[34678]??:HP-UX:*:*) + HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` + case "${UNAME_MACHINE}" in + 9000/31? ) HP_ARCH=m68000 ;; + 9000/[34]?? ) HP_ARCH=m68k ;; + 9000/[678][0-9][0-9]) + if [ -x /usr/bin/getconf ]; then + sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` + sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` + case "${sc_cpu_version}" in + 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0 + 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1 + 532) # CPU_PA_RISC2_0 + case "${sc_kernel_bits}" in + 32) HP_ARCH=hppa2.0n ;; + 64) HP_ARCH=hppa2.0w ;; + '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20 + esac ;; + esac + fi + if [ "${HP_ARCH}" = "" ]; then + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + + #define _HPUX_SOURCE + #include + #include + + int main () + { + #if defined(_SC_KERNEL_BITS) + long bits = sysconf(_SC_KERNEL_BITS); + #endif + long cpu = sysconf (_SC_CPU_VERSION); + + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1"); break; + case CPU_PA_RISC2_0: + #if defined(_SC_KERNEL_BITS) + switch (bits) + { + case 64: puts ("hppa2.0w"); break; + case 32: puts ("hppa2.0n"); break; + default: puts ("hppa2.0"); break; + } break; + #else /* !defined(_SC_KERNEL_BITS) */ + puts ("hppa2.0"); break; + #endif + default: puts ("hppa1.0"); break; + } + exit (0); + } +EOF + (CCOPTS="" $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` + test -z "$HP_ARCH" && HP_ARCH=hppa + fi ;; + esac + if [ ${HP_ARCH} = hppa2.0w ] + then + eval $set_cc_for_build + + # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating + # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler + # generating 64-bit code. GNU and HP use different nomenclature: + # + # $ CC_FOR_BUILD=cc ./config.guess + # => hppa2.0w-hp-hpux11.23 + # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess + # => hppa64-hp-hpux11.23 + + if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | + grep -q __LP64__ + then + HP_ARCH=hppa2.0w + else + HP_ARCH=hppa64 + fi + fi + echo ${HP_ARCH}-hp-hpux${HPUX_REV} + exit ;; + ia64:HP-UX:*:*) + HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` + echo ia64-hp-hpux${HPUX_REV} + exit ;; + 3050*:HI-UX:*:*) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #include + int + main () + { + long cpu = sysconf (_SC_CPU_VERSION); + /* The order matters, because CPU_IS_HP_MC68K erroneously returns + true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct + results, however. */ + if (CPU_IS_PA_RISC (cpu)) + { + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; + case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; + default: puts ("hppa-hitachi-hiuxwe2"); break; + } + } + else if (CPU_IS_HP_MC68K (cpu)) + puts ("m68k-hitachi-hiuxwe2"); + else puts ("unknown-hitachi-hiuxwe2"); + exit (0); + } +EOF + $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && + { echo "$SYSTEM_NAME"; exit; } + echo unknown-hitachi-hiuxwe2 + exit ;; + 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) + echo hppa1.1-hp-bsd + exit ;; + 9000/8??:4.3bsd:*:*) + echo hppa1.0-hp-bsd + exit ;; + *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) + echo hppa1.0-hp-mpeix + exit ;; + hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) + echo hppa1.1-hp-osf + exit ;; + hp8??:OSF1:*:*) + echo hppa1.0-hp-osf + exit ;; + i*86:OSF1:*:*) + if [ -x /usr/sbin/sysversion ] ; then + echo ${UNAME_MACHINE}-unknown-osf1mk + else + echo ${UNAME_MACHINE}-unknown-osf1 + fi + exit ;; + parisc*:Lites*:*:*) + echo hppa1.1-hp-lites + exit ;; + C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) + echo c1-convex-bsd + exit ;; + C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) + if getsysinfo -f scalar_acc + then echo c32-convex-bsd + else echo c2-convex-bsd + fi + exit ;; + C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) + echo c34-convex-bsd + exit ;; + C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) + echo c38-convex-bsd + exit ;; + C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) + echo c4-convex-bsd + exit ;; + CRAY*Y-MP:*:*:*) + echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*[A-Z]90:*:*:*) + echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ + | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ + -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ + -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*TS:*:*:*) + echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*T3E:*:*:*) + echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + CRAY*SV1:*:*:*) + echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + *:UNICOS/mp:*:*) + echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' + exit ;; + F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) + FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` + FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` + FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` + echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; + 5000:UNIX_System_V:4.*:*) + FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` + FUJITSU_REL=`echo ${UNAME_RELEASE} | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'` + echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; + i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) + echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} + exit ;; + sparc*:BSD/OS:*:*) + echo sparc-unknown-bsdi${UNAME_RELEASE} + exit ;; + *:BSD/OS:*:*) + echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} + exit ;; + *:FreeBSD:*:*) + UNAME_PROCESSOR=`/usr/bin/uname -p` + case ${UNAME_PROCESSOR} in + amd64) + UNAME_PROCESSOR=x86_64 ;; + i386) + UNAME_PROCESSOR=i586 ;; + esac + echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` + exit ;; + i*:CYGWIN*:*) + echo ${UNAME_MACHINE}-pc-cygwin + exit ;; + *:MINGW64*:*) + echo ${UNAME_MACHINE}-pc-mingw64 + exit ;; + *:MINGW*:*) + echo ${UNAME_MACHINE}-pc-mingw32 + exit ;; + *:MSYS*:*) + echo ${UNAME_MACHINE}-pc-msys + exit ;; + i*:windows32*:*) + # uname -m includes "-pc" on this system. + echo ${UNAME_MACHINE}-mingw32 + exit ;; + i*:PW*:*) + echo ${UNAME_MACHINE}-pc-pw32 + exit ;; + *:Interix*:*) + case ${UNAME_MACHINE} in + x86) + echo i586-pc-interix${UNAME_RELEASE} + exit ;; + authenticamd | genuineintel | EM64T) + echo x86_64-unknown-interix${UNAME_RELEASE} + exit ;; + IA64) + echo ia64-unknown-interix${UNAME_RELEASE} + exit ;; + esac ;; + [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) + echo i${UNAME_MACHINE}-pc-mks + exit ;; + 8664:Windows_NT:*) + echo x86_64-pc-mks + exit ;; + i*:Windows_NT*:* | Pentium*:Windows_NT*:*) + # How do we know it's Interix rather than the generic POSIX subsystem? + # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we + # UNAME_MACHINE based on the output of uname instead of i386? + echo i586-pc-interix + exit ;; + i*:UWIN*:*) + echo ${UNAME_MACHINE}-pc-uwin + exit ;; + amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) + echo x86_64-unknown-cygwin + exit ;; + p*:CYGWIN*:*) + echo powerpcle-unknown-cygwin + exit ;; + prep*:SunOS:5.*:*) + echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; + *:GNU:*:*) + # the GNU system + echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` + exit ;; + *:GNU/*:*:*) + # other systems with GNU libc and userland + echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC} + exit ;; + i*86:Minix:*:*) + echo ${UNAME_MACHINE}-pc-minix + exit ;; + aarch64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + aarch64_be:Linux:*:*) + UNAME_MACHINE=aarch64_be + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + alpha:Linux:*:*) + case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in + EV5) UNAME_MACHINE=alphaev5 ;; + EV56) UNAME_MACHINE=alphaev56 ;; + PCA56) UNAME_MACHINE=alphapca56 ;; + PCA57) UNAME_MACHINE=alphapca56 ;; + EV6) UNAME_MACHINE=alphaev6 ;; + EV67) UNAME_MACHINE=alphaev67 ;; + EV68*) UNAME_MACHINE=alphaev68 ;; + esac + objdump --private-headers /bin/sh | grep -q ld.so.1 + if test "$?" = 0 ; then LIBC=gnulibc1 ; fi + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + arc:Linux:*:* | arceb:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + arm*:Linux:*:*) + eval $set_cc_for_build + if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_EABI__ + then + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + else + if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_PCS_VFP + then + echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi + else + echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf + fi + fi + exit ;; + avr32*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + cris:Linux:*:*) + echo ${UNAME_MACHINE}-axis-linux-${LIBC} + exit ;; + crisv32:Linux:*:*) + echo ${UNAME_MACHINE}-axis-linux-${LIBC} + exit ;; + e2k:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + frv:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + hexagon:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + i*86:Linux:*:*) + echo ${UNAME_MACHINE}-pc-linux-${LIBC} + exit ;; + ia64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + k1om:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + m32r*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + m68*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + mips:Linux:*:* | mips64:Linux:*:*) + eval $set_cc_for_build + sed 's/^ //' << EOF >$dummy.c + #undef CPU + #undef ${UNAME_MACHINE} + #undef ${UNAME_MACHINE}el + #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) + CPU=${UNAME_MACHINE}el + #else + #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) + CPU=${UNAME_MACHINE} + #else + CPU= + #endif + #endif +EOF + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` + test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; } + ;; + mips64el:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + openrisc*:Linux:*:*) + echo or1k-unknown-linux-${LIBC} + exit ;; + or32:Linux:*:* | or1k*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + padre:Linux:*:*) + echo sparc-unknown-linux-${LIBC} + exit ;; + parisc64:Linux:*:* | hppa64:Linux:*:*) + echo hppa64-unknown-linux-${LIBC} + exit ;; + parisc:Linux:*:* | hppa:Linux:*:*) + # Look for CPU level + case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in + PA7*) echo hppa1.1-unknown-linux-${LIBC} ;; + PA8*) echo hppa2.0-unknown-linux-${LIBC} ;; + *) echo hppa-unknown-linux-${LIBC} ;; + esac + exit ;; + ppc64:Linux:*:*) + echo powerpc64-unknown-linux-${LIBC} + exit ;; + ppc:Linux:*:*) + echo powerpc-unknown-linux-${LIBC} + exit ;; + ppc64le:Linux:*:*) + echo powerpc64le-unknown-linux-${LIBC} + exit ;; + ppcle:Linux:*:*) + echo powerpcle-unknown-linux-${LIBC} + exit ;; + riscv32:Linux:*:* | riscv64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + s390:Linux:*:* | s390x:Linux:*:*) + echo ${UNAME_MACHINE}-ibm-linux-${LIBC} + exit ;; + sh64*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + sh*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + sparc:Linux:*:* | sparc64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + tile*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + vax:Linux:*:*) + echo ${UNAME_MACHINE}-dec-linux-${LIBC} + exit ;; + x86_64:Linux:*:*) + echo ${UNAME_MACHINE}-pc-linux-${LIBC} + exit ;; + xtensa*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + i*86:DYNIX/ptx:4*:*) + # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. + # earlier versions are messed up and put the nodename in both + # sysname and nodename. + echo i386-sequent-sysv4 + exit ;; + i*86:UNIX_SV:4.2MP:2.*) + # Unixware is an offshoot of SVR4, but it has its own version + # number series starting with 2... + # I am not positive that other SVR4 systems won't match this, + # I just have to hope. -- rms. + # Use sysv4.2uw... so that sysv4* matches it. + echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} + exit ;; + i*86:OS/2:*:*) + # If we were able to find `uname', then EMX Unix compatibility + # is probably installed. + echo ${UNAME_MACHINE}-pc-os2-emx + exit ;; + i*86:XTS-300:*:STOP) + echo ${UNAME_MACHINE}-unknown-stop + exit ;; + i*86:atheos:*:*) + echo ${UNAME_MACHINE}-unknown-atheos + exit ;; + i*86:syllable:*:*) + echo ${UNAME_MACHINE}-pc-syllable + exit ;; + i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) + echo i386-unknown-lynxos${UNAME_RELEASE} + exit ;; + i*86:*DOS:*:*) + echo ${UNAME_MACHINE}-pc-msdosdjgpp + exit ;; + i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) + UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` + if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then + echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} + else + echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} + fi + exit ;; + i*86:*:5:[678]*) + # UnixWare 7.x, OpenUNIX and OpenServer 6. + case `/bin/uname -X | grep "^Machine"` in + *486*) UNAME_MACHINE=i486 ;; + *Pentium) UNAME_MACHINE=i586 ;; + *Pent*|*Celeron) UNAME_MACHINE=i686 ;; + esac + echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} + exit ;; + i*86:*:3.2:*) + if test -f /usr/options/cb.name; then + UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then + UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` + (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 + (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ + && UNAME_MACHINE=i586 + (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ + && UNAME_MACHINE=i686 + (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ + && UNAME_MACHINE=i686 + echo ${UNAME_MACHINE}-pc-sco$UNAME_REL + else + echo ${UNAME_MACHINE}-pc-sysv32 + fi + exit ;; + pc:*:*:*) + # Left here for compatibility: + # uname -m prints for DJGPP always 'pc', but it prints nothing about + # the processor, so we play safe by assuming i586. + # Note: whatever this is, it MUST be the same as what config.sub + # prints for the "djgpp" host, or else GDB configure will decide that + # this is a cross-build. + echo i586-pc-msdosdjgpp + exit ;; + Intel:Mach:3*:*) + echo i386-pc-mach3 + exit ;; + paragon:*:*:*) + echo i860-intel-osf1 + exit ;; + i860:*:4.*:*) # i860-SVR4 + if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then + echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 + else # Add other i860-SVR4 vendors below as they are discovered. + echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 + fi + exit ;; + mini*:CTIX:SYS*5:*) + # "miniframe" + echo m68010-convergent-sysv + exit ;; + mc68k:UNIX:SYSTEM5:3.51m) + echo m68k-convergent-sysv + exit ;; + M680?0:D-NIX:5.3:*) + echo m68k-diab-dnix + exit ;; + M68*:*:R3V[5678]*:*) + test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; + 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) + OS_REL='' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; + 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4; exit; } ;; + NCR*:*:4.2:* | MPRAS*:*:4.2:*) + OS_REL='.3' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; + m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) + echo m68k-unknown-lynxos${UNAME_RELEASE} + exit ;; + mc68030:UNIX_System_V:4.*:*) + echo m68k-atari-sysv4 + exit ;; + TSUNAMI:LynxOS:2.*:*) + echo sparc-unknown-lynxos${UNAME_RELEASE} + exit ;; + rs6000:LynxOS:2.*:*) + echo rs6000-unknown-lynxos${UNAME_RELEASE} + exit ;; + PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) + echo powerpc-unknown-lynxos${UNAME_RELEASE} + exit ;; + SM[BE]S:UNIX_SV:*:*) + echo mips-dde-sysv${UNAME_RELEASE} + exit ;; + RM*:ReliantUNIX-*:*:*) + echo mips-sni-sysv4 + exit ;; + RM*:SINIX-*:*:*) + echo mips-sni-sysv4 + exit ;; + *:SINIX-*:*:*) + if uname -p 2>/dev/null >/dev/null ; then + UNAME_MACHINE=`(uname -p) 2>/dev/null` + echo ${UNAME_MACHINE}-sni-sysv4 + else + echo ns32k-sni-sysv + fi + exit ;; + PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort + # says + echo i586-unisys-sysv4 + exit ;; + *:UNIX_System_V:4*:FTX*) + # From Gerald Hewes . + # How about differentiating between stratus architectures? -djm + echo hppa1.1-stratus-sysv4 + exit ;; + *:*:*:FTX*) + # From seanf@swdc.stratus.com. + echo i860-stratus-sysv4 + exit ;; + i*86:VOS:*:*) + # From Paul.Green@stratus.com. + echo ${UNAME_MACHINE}-stratus-vos + exit ;; + *:VOS:*:*) + # From Paul.Green@stratus.com. + echo hppa1.1-stratus-vos + exit ;; + mc68*:A/UX:*:*) + echo m68k-apple-aux${UNAME_RELEASE} + exit ;; + news*:NEWS-OS:6*:*) + echo mips-sony-newsos6 + exit ;; + R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) + if [ -d /usr/nec ]; then + echo mips-nec-sysv${UNAME_RELEASE} + else + echo mips-unknown-sysv${UNAME_RELEASE} + fi + exit ;; + BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. + echo powerpc-be-beos + exit ;; + BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. + echo powerpc-apple-beos + exit ;; + BePC:BeOS:*:*) # BeOS running on Intel PC compatible. + echo i586-pc-beos + exit ;; + BePC:Haiku:*:*) # Haiku running on Intel PC compatible. + echo i586-pc-haiku + exit ;; + x86_64:Haiku:*:*) + echo x86_64-unknown-haiku + exit ;; + SX-4:SUPER-UX:*:*) + echo sx4-nec-superux${UNAME_RELEASE} + exit ;; + SX-5:SUPER-UX:*:*) + echo sx5-nec-superux${UNAME_RELEASE} + exit ;; + SX-6:SUPER-UX:*:*) + echo sx6-nec-superux${UNAME_RELEASE} + exit ;; + SX-7:SUPER-UX:*:*) + echo sx7-nec-superux${UNAME_RELEASE} + exit ;; + SX-8:SUPER-UX:*:*) + echo sx8-nec-superux${UNAME_RELEASE} + exit ;; + SX-8R:SUPER-UX:*:*) + echo sx8r-nec-superux${UNAME_RELEASE} + exit ;; + SX-ACE:SUPER-UX:*:*) + echo sxace-nec-superux${UNAME_RELEASE} + exit ;; + Power*:Rhapsody:*:*) + echo powerpc-apple-rhapsody${UNAME_RELEASE} + exit ;; + *:Rhapsody:*:*) + echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} + exit ;; + *:Darwin:*:*) + UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown + eval $set_cc_for_build + if test "$UNAME_PROCESSOR" = unknown ; then + UNAME_PROCESSOR=powerpc + fi + if test `echo "$UNAME_RELEASE" | sed -e 's/\..*//'` -le 10 ; then + if [ "$CC_FOR_BUILD" != no_compiler_found ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + case $UNAME_PROCESSOR in + i386) UNAME_PROCESSOR=x86_64 ;; + powerpc) UNAME_PROCESSOR=powerpc64 ;; + esac + fi + fi + elif test "$UNAME_PROCESSOR" = i386 ; then + # Avoid executing cc on OS X 10.9, as it ships with a stub + # that puts up a graphical alert prompting to install + # developer tools. Any system running Mac OS X 10.7 or + # later (Darwin 11 and later) is required to have a 64-bit + # processor. This is not true of the ARM version of Darwin + # that Apple uses in portable devices. + UNAME_PROCESSOR=x86_64 + fi + echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} + exit ;; + *:procnto*:*:* | *:QNX:[0123456789]*:*) + UNAME_PROCESSOR=`uname -p` + if test "$UNAME_PROCESSOR" = x86; then + UNAME_PROCESSOR=i386 + UNAME_MACHINE=pc + fi + echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} + exit ;; + *:QNX:*:4*) + echo i386-pc-qnx + exit ;; + NEO-?:NONSTOP_KERNEL:*:*) + echo neo-tandem-nsk${UNAME_RELEASE} + exit ;; + NSE-*:NONSTOP_KERNEL:*:*) + echo nse-tandem-nsk${UNAME_RELEASE} + exit ;; + NSR-?:NONSTOP_KERNEL:*:*) + echo nsr-tandem-nsk${UNAME_RELEASE} + exit ;; + NSX-?:NONSTOP_KERNEL:*:*) + echo nsx-tandem-nsk${UNAME_RELEASE} + exit ;; + *:NonStop-UX:*:*) + echo mips-compaq-nonstopux + exit ;; + BS2000:POSIX*:*:*) + echo bs2000-siemens-sysv + exit ;; + DS/*:UNIX_System_V:*:*) + echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} + exit ;; + *:Plan9:*:*) + # "uname -m" is not consistent, so use $cputype instead. 386 + # is converted to i386 for consistency with other x86 + # operating systems. + if test "$cputype" = 386; then + UNAME_MACHINE=i386 + else + UNAME_MACHINE="$cputype" + fi + echo ${UNAME_MACHINE}-unknown-plan9 + exit ;; + *:TOPS-10:*:*) + echo pdp10-unknown-tops10 + exit ;; + *:TENEX:*:*) + echo pdp10-unknown-tenex + exit ;; + KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) + echo pdp10-dec-tops20 + exit ;; + XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) + echo pdp10-xkl-tops20 + exit ;; + *:TOPS-20:*:*) + echo pdp10-unknown-tops20 + exit ;; + *:ITS:*:*) + echo pdp10-unknown-its + exit ;; + SEI:*:*:SEIUX) + echo mips-sei-seiux${UNAME_RELEASE} + exit ;; + *:DragonFly:*:*) + echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` + exit ;; + *:*VMS:*:*) + UNAME_MACHINE=`(uname -p) 2>/dev/null` + case "${UNAME_MACHINE}" in + A*) echo alpha-dec-vms ; exit ;; + I*) echo ia64-dec-vms ; exit ;; + V*) echo vax-dec-vms ; exit ;; + esac ;; + *:XENIX:*:SysV) + echo i386-pc-xenix + exit ;; + i*86:skyos:*:*) + echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE} | sed -e 's/ .*$//'` + exit ;; + i*86:rdos:*:*) + echo ${UNAME_MACHINE}-pc-rdos + exit ;; + i*86:AROS:*:*) + echo ${UNAME_MACHINE}-pc-aros + exit ;; + x86_64:VMkernel:*:*) + echo ${UNAME_MACHINE}-unknown-esx + exit ;; + amd64:Isilon\ OneFS:*:*) + echo x86_64-unknown-onefs + exit ;; +esac + +cat >&2 </dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null` + +hostinfo = `(hostinfo) 2>/dev/null` +/bin/universe = `(/bin/universe) 2>/dev/null` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` +/bin/arch = `(/bin/arch) 2>/dev/null` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` + +UNAME_MACHINE = ${UNAME_MACHINE} +UNAME_RELEASE = ${UNAME_RELEASE} +UNAME_SYSTEM = ${UNAME_SYSTEM} +UNAME_VERSION = ${UNAME_VERSION} +EOF + +exit 1 + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "timestamp='" +# time-stamp-format: "%:y-%02m-%02d" +# time-stamp-end: "'" +# End: diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/config.sub cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/config.sub --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/config.sub 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/config.sub 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,1836 @@ +#! /bin/sh +# Configuration validation subroutine script. +# Copyright 1992-2017 Free Software Foundation, Inc. + +timestamp='2017-04-02' + +# This file is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, see . +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). + + +# Please send patches to . +# +# Configuration subroutine to validate and canonicalize a configuration type. +# Supply the specified configuration type as an argument. +# If it is invalid, we print an error message on stderr and exit with code 1. +# Otherwise, we print the canonical config type on stdout and succeed. + +# You can get the latest version of this script from: +# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub + +# This file is supposed to be the same for all GNU packages +# and recognize all the CPU types, system types and aliases +# that are meaningful with *any* GNU software. +# Each package is responsible for reporting which valid configurations +# it does not support. The user should be able to distinguish +# a failure to support a valid configuration from a meaningless +# configuration. + +# The goal of this file is to map all the various variations of a given +# machine specification into a single specification in the form: +# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM +# or in some cases, the newer four-part form: +# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM +# It is wrong to echo any other type of specification. + +me=`echo "$0" | sed -e 's,.*/,,'` + +usage="\ +Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS + +Canonicalize a configuration name. + +Operation modes: + -h, --help print this help, then exit + -t, --time-stamp print date of last modification, then exit + -v, --version print version number, then exit + +Report bugs and patches to ." + +version="\ +GNU config.sub ($timestamp) + +Copyright 1992-2017 Free Software Foundation, Inc. + +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." + +help=" +Try \`$me --help' for more information." + +# Parse command line +while test $# -gt 0 ; do + case $1 in + --time-stamp | --time* | -t ) + echo "$timestamp" ; exit ;; + --version | -v ) + echo "$version" ; exit ;; + --help | --h* | -h ) + echo "$usage"; exit ;; + -- ) # Stop option processing + shift; break ;; + - ) # Use stdin as input. + break ;; + -* ) + echo "$me: invalid option $1$help" + exit 1 ;; + + *local*) + # First pass through any local machine types. + echo $1 + exit ;; + + * ) + break ;; + esac +done + +case $# in + 0) echo "$me: missing argument$help" >&2 + exit 1;; + 1) ;; + *) echo "$me: too many arguments$help" >&2 + exit 1;; +esac + +# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). +# Here we must recognize all the valid KERNEL-OS combinations. +maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` +case $maybe_os in + nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ + linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ + knetbsd*-gnu* | netbsd*-gnu* | netbsd*-eabi* | \ + kopensolaris*-gnu* | cloudabi*-eabi* | \ + storm-chaos* | os2-emx* | rtmk-nova*) + os=-$maybe_os + basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` + ;; + android-linux) + os=-linux-android + basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown + ;; + *) + basic_machine=`echo $1 | sed 's/-[^-]*$//'` + if [ $basic_machine != $1 ] + then os=`echo $1 | sed 's/.*-/-/'` + else os=; fi + ;; +esac + +### Let's recognize common machines as not being operating systems so +### that things like config.sub decstation-3100 work. We also +### recognize some manufacturers as not being operating systems, so we +### can provide default operating systems below. +case $os in + -sun*os*) + # Prevent following clause from handling this invalid input. + ;; + -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ + -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ + -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ + -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ + -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ + -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ + -apple | -axis | -knuth | -cray | -microblaze*) + os= + basic_machine=$1 + ;; + -bluegene*) + os=-cnk + ;; + -sim | -cisco | -oki | -wec | -winbond) + os= + basic_machine=$1 + ;; + -scout) + ;; + -wrs) + os=-vxworks + basic_machine=$1 + ;; + -chorusos*) + os=-chorusos + basic_machine=$1 + ;; + -chorusrdb) + os=-chorusrdb + basic_machine=$1 + ;; + -hiux*) + os=-hiuxwe2 + ;; + -sco6) + os=-sco5v6 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco5) + os=-sco3.2v5 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco4) + os=-sco3.2v4 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco3.2.[4-9]*) + os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco3.2v[4-9]*) + # Don't forget version if it is 3.2v4 or newer. + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco5v6*) + # Don't forget version if it is 3.2v4 or newer. + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -sco*) + os=-sco3.2v2 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -udk*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -isc) + os=-isc2.2 + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -clix*) + basic_machine=clipper-intergraph + ;; + -isc*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` + ;; + -lynx*178) + os=-lynxos178 + ;; + -lynx*5) + os=-lynxos5 + ;; + -lynx*) + os=-lynxos + ;; + -ptx*) + basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` + ;; + -windowsnt*) + os=`echo $os | sed -e 's/windowsnt/winnt/'` + ;; + -psos*) + os=-psos + ;; + -mint | -mint[0-9]*) + basic_machine=m68k-atari + os=-mint + ;; +esac + +# Decode aliases for certain CPU-COMPANY combinations. +case $basic_machine in + # Recognize the basic CPU types without company name. + # Some are omitted here because they have special meanings below. + 1750a | 580 \ + | a29k \ + | aarch64 | aarch64_be \ + | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ + | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ + | am33_2.0 \ + | arc | arceb \ + | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ + | avr | avr32 \ + | ba \ + | be32 | be64 \ + | bfin \ + | c4x | c8051 | clipper \ + | d10v | d30v | dlx | dsp16xx \ + | e2k | epiphany \ + | fido | fr30 | frv | ft32 \ + | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ + | hexagon \ + | i370 | i860 | i960 | ia16 | ia64 \ + | ip2k | iq2000 \ + | k1om \ + | le32 | le64 \ + | lm32 \ + | m32c | m32r | m32rle | m68000 | m68k | m88k \ + | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ + | mips | mipsbe | mipseb | mipsel | mipsle \ + | mips16 \ + | mips64 | mips64el \ + | mips64octeon | mips64octeonel \ + | mips64orion | mips64orionel \ + | mips64r5900 | mips64r5900el \ + | mips64vr | mips64vrel \ + | mips64vr4100 | mips64vr4100el \ + | mips64vr4300 | mips64vr4300el \ + | mips64vr5000 | mips64vr5000el \ + | mips64vr5900 | mips64vr5900el \ + | mipsisa32 | mipsisa32el \ + | mipsisa32r2 | mipsisa32r2el \ + | mipsisa32r6 | mipsisa32r6el \ + | mipsisa64 | mipsisa64el \ + | mipsisa64r2 | mipsisa64r2el \ + | mipsisa64r6 | mipsisa64r6el \ + | mipsisa64sb1 | mipsisa64sb1el \ + | mipsisa64sr71k | mipsisa64sr71kel \ + | mipsr5900 | mipsr5900el \ + | mipstx39 | mipstx39el \ + | mn10200 | mn10300 \ + | moxie \ + | mt \ + | msp430 \ + | nds32 | nds32le | nds32be \ + | nios | nios2 | nios2eb | nios2el \ + | ns16k | ns32k \ + | open8 | or1k | or1knd | or32 \ + | pdp10 | pdp11 | pj | pjl \ + | powerpc | powerpc64 | powerpc64le | powerpcle \ + | pru \ + | pyramid \ + | riscv32 | riscv64 \ + | rl78 | rx \ + | score \ + | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[234]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ + | sh64 | sh64le \ + | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ + | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ + | spu \ + | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ + | ubicom32 \ + | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ + | visium \ + | wasm32 \ + | we32k \ + | x86 | xc16x | xstormy16 | xtensa \ + | z8k | z80) + basic_machine=$basic_machine-unknown + ;; + c54x) + basic_machine=tic54x-unknown + ;; + c55x) + basic_machine=tic55x-unknown + ;; + c6x) + basic_machine=tic6x-unknown + ;; + leon|leon[3-9]) + basic_machine=sparc-$basic_machine + ;; + m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) + basic_machine=$basic_machine-unknown + os=-none + ;; + m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) + ;; + ms1) + basic_machine=mt-unknown + ;; + + strongarm | thumb | xscale) + basic_machine=arm-unknown + ;; + xgate) + basic_machine=$basic_machine-unknown + os=-none + ;; + xscaleeb) + basic_machine=armeb-unknown + ;; + + xscaleel) + basic_machine=armel-unknown + ;; + + # We use `pc' rather than `unknown' + # because (1) that's what they normally are, and + # (2) the word "unknown" tends to confuse beginning users. + i*86 | x86_64) + basic_machine=$basic_machine-pc + ;; + # Object if more than one company name word. + *-*-*) + echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 + exit 1 + ;; + # Recognize the basic CPU types with company name. + 580-* \ + | a29k-* \ + | aarch64-* | aarch64_be-* \ + | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ + | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ + | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ + | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ + | avr-* | avr32-* \ + | ba-* \ + | be32-* | be64-* \ + | bfin-* | bs2000-* \ + | c[123]* | c30-* | [cjt]90-* | c4x-* \ + | c8051-* | clipper-* | craynv-* | cydra-* \ + | d10v-* | d30v-* | dlx-* \ + | e2k-* | elxsi-* \ + | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ + | h8300-* | h8500-* \ + | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ + | hexagon-* \ + | i*86-* | i860-* | i960-* | ia16-* | ia64-* \ + | ip2k-* | iq2000-* \ + | k1om-* \ + | le32-* | le64-* \ + | lm32-* \ + | m32c-* | m32r-* | m32rle-* \ + | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ + | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \ + | microblaze-* | microblazeel-* \ + | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ + | mips16-* \ + | mips64-* | mips64el-* \ + | mips64octeon-* | mips64octeonel-* \ + | mips64orion-* | mips64orionel-* \ + | mips64r5900-* | mips64r5900el-* \ + | mips64vr-* | mips64vrel-* \ + | mips64vr4100-* | mips64vr4100el-* \ + | mips64vr4300-* | mips64vr4300el-* \ + | mips64vr5000-* | mips64vr5000el-* \ + | mips64vr5900-* | mips64vr5900el-* \ + | mipsisa32-* | mipsisa32el-* \ + | mipsisa32r2-* | mipsisa32r2el-* \ + | mipsisa32r6-* | mipsisa32r6el-* \ + | mipsisa64-* | mipsisa64el-* \ + | mipsisa64r2-* | mipsisa64r2el-* \ + | mipsisa64r6-* | mipsisa64r6el-* \ + | mipsisa64sb1-* | mipsisa64sb1el-* \ + | mipsisa64sr71k-* | mipsisa64sr71kel-* \ + | mipsr5900-* | mipsr5900el-* \ + | mipstx39-* | mipstx39el-* \ + | mmix-* \ + | mt-* \ + | msp430-* \ + | nds32-* | nds32le-* | nds32be-* \ + | nios-* | nios2-* | nios2eb-* | nios2el-* \ + | none-* | np1-* | ns16k-* | ns32k-* \ + | open8-* \ + | or1k*-* \ + | orion-* \ + | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ + | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ + | pru-* \ + | pyramid-* \ + | riscv32-* | riscv64-* \ + | rl78-* | romp-* | rs6000-* | rx-* \ + | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ + | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ + | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ + | sparclite-* \ + | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \ + | tahoe-* \ + | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ + | tile*-* \ + | tron-* \ + | ubicom32-* \ + | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ + | vax-* \ + | visium-* \ + | wasm32-* \ + | we32k-* \ + | x86-* | x86_64-* | xc16x-* | xps100-* \ + | xstormy16-* | xtensa*-* \ + | ymp-* \ + | z8k-* | z80-*) + ;; + # Recognize the basic CPU types without company name, with glob match. + xtensa*) + basic_machine=$basic_machine-unknown + ;; + # Recognize the various machine names and aliases which stand + # for a CPU type and a company and sometimes even an OS. + 386bsd) + basic_machine=i386-unknown + os=-bsd + ;; + 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) + basic_machine=m68000-att + ;; + 3b*) + basic_machine=we32k-att + ;; + a29khif) + basic_machine=a29k-amd + os=-udi + ;; + abacus) + basic_machine=abacus-unknown + ;; + adobe68k) + basic_machine=m68010-adobe + os=-scout + ;; + alliant | fx80) + basic_machine=fx80-alliant + ;; + altos | altos3068) + basic_machine=m68k-altos + ;; + am29k) + basic_machine=a29k-none + os=-bsd + ;; + amd64) + basic_machine=x86_64-pc + ;; + amd64-*) + basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + amdahl) + basic_machine=580-amdahl + os=-sysv + ;; + amiga | amiga-*) + basic_machine=m68k-unknown + ;; + amigaos | amigados) + basic_machine=m68k-unknown + os=-amigaos + ;; + amigaunix | amix) + basic_machine=m68k-unknown + os=-sysv4 + ;; + apollo68) + basic_machine=m68k-apollo + os=-sysv + ;; + apollo68bsd) + basic_machine=m68k-apollo + os=-bsd + ;; + aros) + basic_machine=i386-pc + os=-aros + ;; + asmjs) + basic_machine=asmjs-unknown + ;; + aux) + basic_machine=m68k-apple + os=-aux + ;; + balance) + basic_machine=ns32k-sequent + os=-dynix + ;; + blackfin) + basic_machine=bfin-unknown + os=-linux + ;; + blackfin-*) + basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + bluegene*) + basic_machine=powerpc-ibm + os=-cnk + ;; + c54x-*) + basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c55x-*) + basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c6x-*) + basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c90) + basic_machine=c90-cray + os=-unicos + ;; + cegcc) + basic_machine=arm-unknown + os=-cegcc + ;; + convex-c1) + basic_machine=c1-convex + os=-bsd + ;; + convex-c2) + basic_machine=c2-convex + os=-bsd + ;; + convex-c32) + basic_machine=c32-convex + os=-bsd + ;; + convex-c34) + basic_machine=c34-convex + os=-bsd + ;; + convex-c38) + basic_machine=c38-convex + os=-bsd + ;; + cray | j90) + basic_machine=j90-cray + os=-unicos + ;; + craynv) + basic_machine=craynv-cray + os=-unicosmp + ;; + cr16 | cr16-*) + basic_machine=cr16-unknown + os=-elf + ;; + crds | unos) + basic_machine=m68k-crds + ;; + crisv32 | crisv32-* | etraxfs*) + basic_machine=crisv32-axis + ;; + cris | cris-* | etrax*) + basic_machine=cris-axis + ;; + crx) + basic_machine=crx-unknown + os=-elf + ;; + da30 | da30-*) + basic_machine=m68k-da30 + ;; + decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) + basic_machine=mips-dec + ;; + decsystem10* | dec10*) + basic_machine=pdp10-dec + os=-tops10 + ;; + decsystem20* | dec20*) + basic_machine=pdp10-dec + os=-tops20 + ;; + delta | 3300 | motorola-3300 | motorola-delta \ + | 3300-motorola | delta-motorola) + basic_machine=m68k-motorola + ;; + delta88) + basic_machine=m88k-motorola + os=-sysv3 + ;; + dicos) + basic_machine=i686-pc + os=-dicos + ;; + djgpp) + basic_machine=i586-pc + os=-msdosdjgpp + ;; + dpx20 | dpx20-*) + basic_machine=rs6000-bull + os=-bosx + ;; + dpx2* | dpx2*-bull) + basic_machine=m68k-bull + os=-sysv3 + ;; + e500v[12]) + basic_machine=powerpc-unknown + os=$os"spe" + ;; + e500v[12]-*) + basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` + os=$os"spe" + ;; + ebmon29k) + basic_machine=a29k-amd + os=-ebmon + ;; + elxsi) + basic_machine=elxsi-elxsi + os=-bsd + ;; + encore | umax | mmax) + basic_machine=ns32k-encore + ;; + es1800 | OSE68k | ose68k | ose | OSE) + basic_machine=m68k-ericsson + os=-ose + ;; + fx2800) + basic_machine=i860-alliant + ;; + genix) + basic_machine=ns32k-ns + ;; + gmicro) + basic_machine=tron-gmicro + os=-sysv + ;; + go32) + basic_machine=i386-pc + os=-go32 + ;; + h3050r* | hiux*) + basic_machine=hppa1.1-hitachi + os=-hiuxwe2 + ;; + h8300hms) + basic_machine=h8300-hitachi + os=-hms + ;; + h8300xray) + basic_machine=h8300-hitachi + os=-xray + ;; + h8500hms) + basic_machine=h8500-hitachi + os=-hms + ;; + harris) + basic_machine=m88k-harris + os=-sysv3 + ;; + hp300-*) + basic_machine=m68k-hp + ;; + hp300bsd) + basic_machine=m68k-hp + os=-bsd + ;; + hp300hpux) + basic_machine=m68k-hp + os=-hpux + ;; + hp3k9[0-9][0-9] | hp9[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hp9k2[0-9][0-9] | hp9k31[0-9]) + basic_machine=m68000-hp + ;; + hp9k3[2-9][0-9]) + basic_machine=m68k-hp + ;; + hp9k6[0-9][0-9] | hp6[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hp9k7[0-79][0-9] | hp7[0-79][0-9]) + basic_machine=hppa1.1-hp + ;; + hp9k78[0-9] | hp78[0-9]) + # FIXME: really hppa2.0-hp + basic_machine=hppa1.1-hp + ;; + hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) + # FIXME: really hppa2.0-hp + basic_machine=hppa1.1-hp + ;; + hp9k8[0-9][13679] | hp8[0-9][13679]) + basic_machine=hppa1.1-hp + ;; + hp9k8[0-9][0-9] | hp8[0-9][0-9]) + basic_machine=hppa1.0-hp + ;; + hppa-next) + os=-nextstep3 + ;; + hppaosf) + basic_machine=hppa1.1-hp + os=-osf + ;; + hppro) + basic_machine=hppa1.1-hp + os=-proelf + ;; + i370-ibm* | ibm*) + basic_machine=i370-ibm + ;; + i*86v32) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv32 + ;; + i*86v4*) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv4 + ;; + i*86v) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-sysv + ;; + i*86sol2) + basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` + os=-solaris2 + ;; + i386mach) + basic_machine=i386-mach + os=-mach + ;; + i386-vsta | vsta) + basic_machine=i386-unknown + os=-vsta + ;; + iris | iris4d) + basic_machine=mips-sgi + case $os in + -irix*) + ;; + *) + os=-irix4 + ;; + esac + ;; + isi68 | isi) + basic_machine=m68k-isi + os=-sysv + ;; + leon-*|leon[3-9]-*) + basic_machine=sparc-`echo $basic_machine | sed 's/-.*//'` + ;; + m68knommu) + basic_machine=m68k-unknown + os=-linux + ;; + m68knommu-*) + basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + m88k-omron*) + basic_machine=m88k-omron + ;; + magnum | m3230) + basic_machine=mips-mips + os=-sysv + ;; + merlin) + basic_machine=ns32k-utek + os=-sysv + ;; + microblaze*) + basic_machine=microblaze-xilinx + ;; + mingw64) + basic_machine=x86_64-pc + os=-mingw64 + ;; + mingw32) + basic_machine=i686-pc + os=-mingw32 + ;; + mingw32ce) + basic_machine=arm-unknown + os=-mingw32ce + ;; + miniframe) + basic_machine=m68000-convergent + ;; + *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) + basic_machine=m68k-atari + os=-mint + ;; + mips3*-*) + basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` + ;; + mips3*) + basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown + ;; + monitor) + basic_machine=m68k-rom68k + os=-coff + ;; + morphos) + basic_machine=powerpc-unknown + os=-morphos + ;; + moxiebox) + basic_machine=moxie-unknown + os=-moxiebox + ;; + msdos) + basic_machine=i386-pc + os=-msdos + ;; + ms1-*) + basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` + ;; + msys) + basic_machine=i686-pc + os=-msys + ;; + mvs) + basic_machine=i370-ibm + os=-mvs + ;; + nacl) + basic_machine=le32-unknown + os=-nacl + ;; + ncr3000) + basic_machine=i486-ncr + os=-sysv4 + ;; + netbsd386) + basic_machine=i386-unknown + os=-netbsd + ;; + netwinder) + basic_machine=armv4l-rebel + os=-linux + ;; + news | news700 | news800 | news900) + basic_machine=m68k-sony + os=-newsos + ;; + news1000) + basic_machine=m68030-sony + os=-newsos + ;; + news-3600 | risc-news) + basic_machine=mips-sony + os=-newsos + ;; + necv70) + basic_machine=v70-nec + os=-sysv + ;; + next | m*-next ) + basic_machine=m68k-next + case $os in + -nextstep* ) + ;; + -ns2*) + os=-nextstep2 + ;; + *) + os=-nextstep3 + ;; + esac + ;; + nh3000) + basic_machine=m68k-harris + os=-cxux + ;; + nh[45]000) + basic_machine=m88k-harris + os=-cxux + ;; + nindy960) + basic_machine=i960-intel + os=-nindy + ;; + mon960) + basic_machine=i960-intel + os=-mon960 + ;; + nonstopux) + basic_machine=mips-compaq + os=-nonstopux + ;; + np1) + basic_machine=np1-gould + ;; + neo-tandem) + basic_machine=neo-tandem + ;; + nse-tandem) + basic_machine=nse-tandem + ;; + nsr-tandem) + basic_machine=nsr-tandem + ;; + nsx-tandem) + basic_machine=nsx-tandem + ;; + op50n-* | op60c-*) + basic_machine=hppa1.1-oki + os=-proelf + ;; + openrisc | openrisc-*) + basic_machine=or32-unknown + ;; + os400) + basic_machine=powerpc-ibm + os=-os400 + ;; + OSE68000 | ose68000) + basic_machine=m68000-ericsson + os=-ose + ;; + os68k) + basic_machine=m68k-none + os=-os68k + ;; + pa-hitachi) + basic_machine=hppa1.1-hitachi + os=-hiuxwe2 + ;; + paragon) + basic_machine=i860-intel + os=-osf + ;; + parisc) + basic_machine=hppa-unknown + os=-linux + ;; + parisc-*) + basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + pbd) + basic_machine=sparc-tti + ;; + pbb) + basic_machine=m68k-tti + ;; + pc532 | pc532-*) + basic_machine=ns32k-pc532 + ;; + pc98) + basic_machine=i386-pc + ;; + pc98-*) + basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentium | p5 | k5 | k6 | nexgen | viac3) + basic_machine=i586-pc + ;; + pentiumpro | p6 | 6x86 | athlon | athlon_*) + basic_machine=i686-pc + ;; + pentiumii | pentium2 | pentiumiii | pentium3) + basic_machine=i686-pc + ;; + pentium4) + basic_machine=i786-pc + ;; + pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) + basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentiumpro-* | p6-* | 6x86-* | athlon-*) + basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) + basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pentium4-*) + basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + pn) + basic_machine=pn-gould + ;; + power) basic_machine=power-ibm + ;; + ppc | ppcbe) basic_machine=powerpc-unknown + ;; + ppc-* | ppcbe-*) + basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppcle | powerpclittle) + basic_machine=powerpcle-unknown + ;; + ppcle-* | powerpclittle-*) + basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppc64) basic_machine=powerpc64-unknown + ;; + ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppc64le | powerpc64little) + basic_machine=powerpc64le-unknown + ;; + ppc64le-* | powerpc64little-*) + basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ps2) + basic_machine=i386-ibm + ;; + pw32) + basic_machine=i586-unknown + os=-pw32 + ;; + rdos | rdos64) + basic_machine=x86_64-pc + os=-rdos + ;; + rdos32) + basic_machine=i386-pc + os=-rdos + ;; + rom68k) + basic_machine=m68k-rom68k + os=-coff + ;; + rm[46]00) + basic_machine=mips-siemens + ;; + rtpc | rtpc-*) + basic_machine=romp-ibm + ;; + s390 | s390-*) + basic_machine=s390-ibm + ;; + s390x | s390x-*) + basic_machine=s390x-ibm + ;; + sa29200) + basic_machine=a29k-amd + os=-udi + ;; + sb1) + basic_machine=mipsisa64sb1-unknown + ;; + sb1el) + basic_machine=mipsisa64sb1el-unknown + ;; + sde) + basic_machine=mipsisa32-sde + os=-elf + ;; + sei) + basic_machine=mips-sei + os=-seiux + ;; + sequent) + basic_machine=i386-sequent + ;; + sh) + basic_machine=sh-hitachi + os=-hms + ;; + sh5el) + basic_machine=sh5le-unknown + ;; + sh64) + basic_machine=sh64-unknown + ;; + sparclite-wrs | simso-wrs) + basic_machine=sparclite-wrs + os=-vxworks + ;; + sps7) + basic_machine=m68k-bull + os=-sysv2 + ;; + spur) + basic_machine=spur-unknown + ;; + st2000) + basic_machine=m68k-tandem + ;; + stratus) + basic_machine=i860-stratus + os=-sysv4 + ;; + strongarm-* | thumb-*) + basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + sun2) + basic_machine=m68000-sun + ;; + sun2os3) + basic_machine=m68000-sun + os=-sunos3 + ;; + sun2os4) + basic_machine=m68000-sun + os=-sunos4 + ;; + sun3os3) + basic_machine=m68k-sun + os=-sunos3 + ;; + sun3os4) + basic_machine=m68k-sun + os=-sunos4 + ;; + sun4os3) + basic_machine=sparc-sun + os=-sunos3 + ;; + sun4os4) + basic_machine=sparc-sun + os=-sunos4 + ;; + sun4sol2) + basic_machine=sparc-sun + os=-solaris2 + ;; + sun3 | sun3-*) + basic_machine=m68k-sun + ;; + sun4) + basic_machine=sparc-sun + ;; + sun386 | sun386i | roadrunner) + basic_machine=i386-sun + ;; + sv1) + basic_machine=sv1-cray + os=-unicos + ;; + symmetry) + basic_machine=i386-sequent + os=-dynix + ;; + t3e) + basic_machine=alphaev5-cray + os=-unicos + ;; + t90) + basic_machine=t90-cray + os=-unicos + ;; + tile*) + basic_machine=$basic_machine-unknown + os=-linux-gnu + ;; + tx39) + basic_machine=mipstx39-unknown + ;; + tx39el) + basic_machine=mipstx39el-unknown + ;; + toad1) + basic_machine=pdp10-xkl + os=-tops20 + ;; + tower | tower-32) + basic_machine=m68k-ncr + ;; + tpf) + basic_machine=s390x-ibm + os=-tpf + ;; + udi29k) + basic_machine=a29k-amd + os=-udi + ;; + ultra3) + basic_machine=a29k-nyu + os=-sym1 + ;; + v810 | necv810) + basic_machine=v810-nec + os=-none + ;; + vaxv) + basic_machine=vax-dec + os=-sysv + ;; + vms) + basic_machine=vax-dec + os=-vms + ;; + vpp*|vx|vx-*) + basic_machine=f301-fujitsu + ;; + vxworks960) + basic_machine=i960-wrs + os=-vxworks + ;; + vxworks68) + basic_machine=m68k-wrs + os=-vxworks + ;; + vxworks29k) + basic_machine=a29k-wrs + os=-vxworks + ;; + wasm32) + basic_machine=wasm32-unknown + ;; + w65*) + basic_machine=w65-wdc + os=-none + ;; + w89k-*) + basic_machine=hppa1.1-winbond + os=-proelf + ;; + xbox) + basic_machine=i686-pc + os=-mingw32 + ;; + xps | xps100) + basic_machine=xps100-honeywell + ;; + xscale-* | xscalee[bl]-*) + basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` + ;; + ymp) + basic_machine=ymp-cray + os=-unicos + ;; + z8k-*-coff) + basic_machine=z8k-unknown + os=-sim + ;; + z80-*-coff) + basic_machine=z80-unknown + os=-sim + ;; + none) + basic_machine=none-none + os=-none + ;; + +# Here we handle the default manufacturer of certain CPU types. It is in +# some cases the only manufacturer, in others, it is the most popular. + w89k) + basic_machine=hppa1.1-winbond + ;; + op50n) + basic_machine=hppa1.1-oki + ;; + op60c) + basic_machine=hppa1.1-oki + ;; + romp) + basic_machine=romp-ibm + ;; + mmix) + basic_machine=mmix-knuth + ;; + rs6000) + basic_machine=rs6000-ibm + ;; + vax) + basic_machine=vax-dec + ;; + pdp10) + # there are many clones, so DEC is not a safe bet + basic_machine=pdp10-unknown + ;; + pdp11) + basic_machine=pdp11-dec + ;; + we32k) + basic_machine=we32k-att + ;; + sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) + basic_machine=sh-unknown + ;; + sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) + basic_machine=sparc-sun + ;; + cydra) + basic_machine=cydra-cydrome + ;; + orion) + basic_machine=orion-highlevel + ;; + orion105) + basic_machine=clipper-highlevel + ;; + mac | mpw | mac-mpw) + basic_machine=m68k-apple + ;; + pmac | pmac-mpw) + basic_machine=powerpc-apple + ;; + *-unknown) + # Make sure to match an already-canonicalized machine name. + ;; + *) + echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 + exit 1 + ;; +esac + +# Here we canonicalize certain aliases for manufacturers. +case $basic_machine in + *-digital*) + basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` + ;; + *-commodore*) + basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` + ;; + *) + ;; +esac + +# Decode manufacturer-specific aliases for certain operating systems. + +if [ x"$os" != x"" ] +then +case $os in + # First match some system type aliases + # that might get confused with valid system types. + # -solaris* is a basic system type, with this one exception. + -auroraux) + os=-auroraux + ;; + -solaris1 | -solaris1.*) + os=`echo $os | sed -e 's|solaris1|sunos4|'` + ;; + -solaris) + os=-solaris2 + ;; + -svr4*) + os=-sysv4 + ;; + -unixware*) + os=-sysv4.2uw + ;; + -gnu/linux*) + os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` + ;; + # First accept the basic system types. + # The portable systems comes first. + # Each alternative MUST END IN A *, to match a version number. + # -sysv* is not here because it comes later, after sysvr4. + -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ + | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ + | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ + | -sym* | -kopensolaris* | -plan9* \ + | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ + | -aos* | -aros* | -cloudabi* | -sortix* \ + | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ + | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ + | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ + | -bitrig* | -openbsd* | -solidbsd* | -libertybsd* \ + | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ + | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ + | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ + | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ + | -chorusos* | -chorusrdb* | -cegcc* | -glidix* \ + | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ + | -midipix* | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ + | -linux-newlib* | -linux-musl* | -linux-uclibc* \ + | -uxpv* | -beos* | -mpeix* | -udk* | -moxiebox* \ + | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ + | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ + | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ + | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ + | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ + | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ + | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es* \ + | -onefs* | -tirtos* | -phoenix* | -fuchsia* | -redox*) + # Remember, each alternative MUST END IN *, to match a version number. + ;; + -qnx*) + case $basic_machine in + x86-* | i*86-*) + ;; + *) + os=-nto$os + ;; + esac + ;; + -nto-qnx*) + ;; + -nto*) + os=`echo $os | sed -e 's|nto|nto-qnx|'` + ;; + -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ + | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ + | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) + ;; + -mac*) + os=`echo $os | sed -e 's|mac|macos|'` + ;; + -linux-dietlibc) + os=-linux-dietlibc + ;; + -linux*) + os=`echo $os | sed -e 's|linux|linux-gnu|'` + ;; + -sunos5*) + os=`echo $os | sed -e 's|sunos5|solaris2|'` + ;; + -sunos6*) + os=`echo $os | sed -e 's|sunos6|solaris3|'` + ;; + -opened*) + os=-openedition + ;; + -os400*) + os=-os400 + ;; + -wince*) + os=-wince + ;; + -osfrose*) + os=-osfrose + ;; + -osf*) + os=-osf + ;; + -utek*) + os=-bsd + ;; + -dynix*) + os=-bsd + ;; + -acis*) + os=-aos + ;; + -atheos*) + os=-atheos + ;; + -syllable*) + os=-syllable + ;; + -386bsd) + os=-bsd + ;; + -ctix* | -uts*) + os=-sysv + ;; + -nova*) + os=-rtmk-nova + ;; + -ns2 ) + os=-nextstep2 + ;; + -nsk*) + os=-nsk + ;; + # Preserve the version number of sinix5. + -sinix5.*) + os=`echo $os | sed -e 's|sinix|sysv|'` + ;; + -sinix*) + os=-sysv4 + ;; + -tpf*) + os=-tpf + ;; + -triton*) + os=-sysv3 + ;; + -oss*) + os=-sysv3 + ;; + -svr4) + os=-sysv4 + ;; + -svr3) + os=-sysv3 + ;; + -sysvr4) + os=-sysv4 + ;; + # This must come after -sysvr4. + -sysv*) + ;; + -ose*) + os=-ose + ;; + -es1800*) + os=-ose + ;; + -xenix) + os=-xenix + ;; + -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) + os=-mint + ;; + -aros*) + os=-aros + ;; + -zvmoe) + os=-zvmoe + ;; + -dicos*) + os=-dicos + ;; + -nacl*) + ;; + -ios) + ;; + -none) + ;; + *) + # Get rid of the `-' at the beginning of $os. + os=`echo $os | sed 's/[^-]*-//'` + echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 + exit 1 + ;; +esac +else + +# Here we handle the default operating systems that come with various machines. +# The value should be what the vendor currently ships out the door with their +# machine or put another way, the most popular os provided with the machine. + +# Note that if you're going to try to match "-MANUFACTURER" here (say, +# "-sun"), then you have to tell the case statement up towards the top +# that MANUFACTURER isn't an operating system. Otherwise, code above +# will signal an error saying that MANUFACTURER isn't an operating +# system, and we'll never get to this point. + +case $basic_machine in + score-*) + os=-elf + ;; + spu-*) + os=-elf + ;; + *-acorn) + os=-riscix1.2 + ;; + arm*-rebel) + os=-linux + ;; + arm*-semi) + os=-aout + ;; + c4x-* | tic4x-*) + os=-coff + ;; + c8051-*) + os=-elf + ;; + hexagon-*) + os=-elf + ;; + tic54x-*) + os=-coff + ;; + tic55x-*) + os=-coff + ;; + tic6x-*) + os=-coff + ;; + # This must come before the *-dec entry. + pdp10-*) + os=-tops20 + ;; + pdp11-*) + os=-none + ;; + *-dec | vax-*) + os=-ultrix4.2 + ;; + m68*-apollo) + os=-domain + ;; + i386-sun) + os=-sunos4.0.2 + ;; + m68000-sun) + os=-sunos3 + ;; + m68*-cisco) + os=-aout + ;; + mep-*) + os=-elf + ;; + mips*-cisco) + os=-elf + ;; + mips*-*) + os=-elf + ;; + or32-*) + os=-coff + ;; + *-tti) # must be before sparc entry or we get the wrong os. + os=-sysv3 + ;; + sparc-* | *-sun) + os=-sunos4.1.1 + ;; + pru-*) + os=-elf + ;; + *-be) + os=-beos + ;; + *-haiku) + os=-haiku + ;; + *-ibm) + os=-aix + ;; + *-knuth) + os=-mmixware + ;; + *-wec) + os=-proelf + ;; + *-winbond) + os=-proelf + ;; + *-oki) + os=-proelf + ;; + *-hp) + os=-hpux + ;; + *-hitachi) + os=-hiux + ;; + i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) + os=-sysv + ;; + *-cbm) + os=-amigaos + ;; + *-dg) + os=-dgux + ;; + *-dolphin) + os=-sysv3 + ;; + m68k-ccur) + os=-rtu + ;; + m88k-omron*) + os=-luna + ;; + *-next ) + os=-nextstep + ;; + *-sequent) + os=-ptx + ;; + *-crds) + os=-unos + ;; + *-ns) + os=-genix + ;; + i370-*) + os=-mvs + ;; + *-next) + os=-nextstep3 + ;; + *-gould) + os=-sysv + ;; + *-highlevel) + os=-bsd + ;; + *-encore) + os=-bsd + ;; + *-sgi) + os=-irix + ;; + *-siemens) + os=-sysv4 + ;; + *-masscomp) + os=-rtu + ;; + f30[01]-fujitsu | f700-fujitsu) + os=-uxpv + ;; + *-rom68k) + os=-coff + ;; + *-*bug) + os=-coff + ;; + *-apple) + os=-macos + ;; + *-atari*) + os=-mint + ;; + *) + os=-none + ;; +esac +fi + +# Here we handle the case where we know the os, and the CPU type, but not the +# manufacturer. We pick the logical manufacturer. +vendor=unknown +case $basic_machine in + *-unknown) + case $os in + -riscix*) + vendor=acorn + ;; + -sunos*) + vendor=sun + ;; + -cnk*|-aix*) + vendor=ibm + ;; + -beos*) + vendor=be + ;; + -hpux*) + vendor=hp + ;; + -mpeix*) + vendor=hp + ;; + -hiux*) + vendor=hitachi + ;; + -unos*) + vendor=crds + ;; + -dgux*) + vendor=dg + ;; + -luna*) + vendor=omron + ;; + -genix*) + vendor=ns + ;; + -mvs* | -opened*) + vendor=ibm + ;; + -os400*) + vendor=ibm + ;; + -ptx*) + vendor=sequent + ;; + -tpf*) + vendor=ibm + ;; + -vxsim* | -vxworks* | -windiss*) + vendor=wrs + ;; + -aux*) + vendor=apple + ;; + -hms*) + vendor=hitachi + ;; + -mpw* | -macos*) + vendor=apple + ;; + -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) + vendor=atari + ;; + -vos*) + vendor=stratus + ;; + esac + basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` + ;; +esac + +echo $basic_machine$os +exit + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "timestamp='" +# time-stamp-format: "%:y-%02m-%02d" +# time-stamp-end: "'" +# End: diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/configure cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/configure --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/configure 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/configure 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,5389 @@ +#! /bin/sh +# Guess values for system-dependent variables and create Makefiles. +# Generated by GNU Autoconf 2.69 for Haskell network package 2.6.3.1. +# +# Report bugs to . +# +# +# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. +# +# +# This configure script is free software; the Free Software Foundation +# gives unlimited permission to copy, distribute and modify it. +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +as_fn_exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else + case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : + +else + exitcode=1; echo positional parameters were not saved. +fi +test x\$exitcode = x0 || exit 1 +test -x / || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 +test \$(( 1 + 1 )) = 2 || exit 1" + if (eval "$as_required") 2>/dev/null; then : + as_have_required=yes +else + as_have_required=no +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : + +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir/$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : + CONFIG_SHELL=$as_shell as_have_required=yes + if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : + break 2 +fi +fi + done;; + esac + as_found=false +done +$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi; } +IFS=$as_save_IFS + + + if test "x$CONFIG_SHELL" != x; then : + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 +fi + + if test x$as_have_required = xno; then : + $as_echo "$0: This script requires a shell more modern than all" + $as_echo "$0: the shells that I found on your system." + if test x${ZSH_VERSION+set} = xset ; then + $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" + $as_echo "$0: be upgraded to zsh 4.3.4 or later." + else + $as_echo "$0: Please tell bug-autoconf@gnu.org and +$0: libraries@haskell.org about your system, including any +$0: error possibly output before this message. Then install +$0: a modern shell, or manually run the script under such a +$0: shell if you do have one." + fi + exit 1 +fi +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit +} + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +test -n "$DJDIR" || exec 7<&0 &1 + +# Name of the host. +# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` + +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= + +# Identity of this package. +PACKAGE_NAME='Haskell network package' +PACKAGE_TARNAME='network' +PACKAGE_VERSION='2.6.3.1' +PACKAGE_STRING='Haskell network package 2.6.3.1' +PACKAGE_BUGREPORT='libraries@haskell.org' +PACKAGE_URL='' + +ac_unique_file="include/HsNet.h" +# Factoring default headers for most tests. +ac_includes_default="\ +#include +#ifdef HAVE_SYS_TYPES_H +# include +#endif +#ifdef HAVE_SYS_STAT_H +# include +#endif +#ifdef STDC_HEADERS +# include +# include +#else +# ifdef HAVE_STDLIB_H +# include +# endif +#endif +#ifdef HAVE_STRING_H +# if !defined STDC_HEADERS && defined HAVE_MEMORY_H +# include +# endif +# include +#endif +#ifdef HAVE_STRINGS_H +# include +#endif +#ifdef HAVE_INTTYPES_H +# include +#endif +#ifdef HAVE_STDINT_H +# include +#endif +#ifdef HAVE_UNISTD_H +# include +#endif" + +ac_subst_vars='LTLIBOBJS +LIBOBJS +EXTRA_SRCS +EXTRA_LIBS +EXTRA_CPPFLAGS +CALLCONV +EGREP +GREP +CPP +OBJEXT +EXEEXT +ac_ct_CC +CPPFLAGS +LDFLAGS +CFLAGS +CC +host_os +host_vendor +host_cpu +host +build_os +build_vendor +build_cpu +build +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +runstatedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +with_cc +' + ac_precious_vars='build_alias +host_alias +target_alias +CC +CFLAGS +LDFLAGS +LIBS +CPPFLAGS +CPP' + + +# Initialize some variables set by options. +ac_init_help= +ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= +# The variables have the same names as the options, with +# dashes changed to underlines. +cache_file=/dev/null +exec_prefix=NONE +no_create= +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +verbose= +x_includes=NONE +x_libraries=NONE + +# Installation directory options. +# These are left unexpanded so users can "make install exec_prefix=/foo" +# and all the variables that are supposed to be based on exec_prefix +# by default will actually change. +# Use braces instead of parens because sh, perl, etc. also accept them. +# (The list follows the same order as the GNU Coding Standards.) +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datarootdir='${prefix}/share' +datadir='${datarootdir}' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +runstatedir='${localstatedir}/run' +includedir='${prefix}/include' +oldincludedir='/usr/include' +docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' +infodir='${datarootdir}/info' +htmldir='${docdir}' +dvidir='${docdir}' +pdfdir='${docdir}' +psdir='${docdir}' +libdir='${exec_prefix}/lib' +localedir='${datarootdir}/locale' +mandir='${datarootdir}/man' + +ac_prev= +ac_dashdash= +for ac_option +do + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval $ac_prev=\$ac_option + ac_prev= + continue + fi + + case $ac_option in + *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *=) ac_optarg= ;; + *) ac_optarg=yes ;; + esac + + # Accept the important Cygnus configure options, so we can diagnose typos. + + case $ac_dashdash$ac_option in + --) + ac_dashdash=yes ;; + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir=$ac_optarg ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build_alias ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build_alias=$ac_optarg ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -runstatedir | --runstatedir | --runstatedi | --runstated \ + | --runstate | --runstat | --runsta | --runst | --runs \ + | --run | --ru | --r) + ac_prev=runstatedir ;; + -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ + | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ + | --run=* | --ru=* | --r=*) + runstatedir=$ac_optarg ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error $? "unrecognized option: \`$ac_option' +Try \`$0 --help' for more information" + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 + : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error $? "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; + *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir runstatedir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: `$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error $? "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error $? "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done + +# +# Report the --help message. +# +if test "$ac_init_help" = "long"; then + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat <<_ACEOF +\`configure' configures Haskell network package 2.6.3.1 to adapt to many kinds of systems. + +Usage: $0 [OPTION]... [VAR=VALUE]... + +To assign environment variables (e.g., CC, CFLAGS...), specify them as +VAR=VALUE. See below for descriptions of some of the useful variables. + +Defaults for the options are specified in brackets. + +Configuration: + -h, --help display this help and exit + --help=short display options specific to this package + --help=recursive display the short help of all the included packages + -V, --version display version information and exit + -q, --quiet, --silent do not print \`checking ...' messages + --cache-file=FILE cache test results in FILE [disabled] + -C, --config-cache alias for \`--cache-file=config.cache' + -n, --no-create do not create output files + --srcdir=DIR find the sources in DIR [configure dir or \`..'] + +Installation directories: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [PREFIX] + +By default, \`make install' will install all the files in +\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify +an installation prefix other than \`$ac_default_prefix' using \`--prefix', +for instance \`--prefix=\$HOME'. + +For better control, use the options below. + +Fine tuning of the installation directories: + --bindir=DIR user executables [EPREFIX/bin] + --sbindir=DIR system admin executables [EPREFIX/sbin] + --libexecdir=DIR program executables [EPREFIX/libexec] + --sysconfdir=DIR read-only single-machine data [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] + --localstatedir=DIR modifiable single-machine data [PREFIX/var] + --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] + --libdir=DIR object code libraries [EPREFIX/lib] + --includedir=DIR C header files [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc [/usr/include] + --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] + --datadir=DIR read-only architecture-independent data [DATAROOTDIR] + --infodir=DIR info documentation [DATAROOTDIR/info] + --localedir=DIR locale-dependent data [DATAROOTDIR/locale] + --mandir=DIR man documentation [DATAROOTDIR/man] + --docdir=DIR documentation root [DATAROOTDIR/doc/network] + --htmldir=DIR html documentation [DOCDIR] + --dvidir=DIR dvi documentation [DOCDIR] + --pdfdir=DIR pdf documentation [DOCDIR] + --psdir=DIR ps documentation [DOCDIR] +_ACEOF + + cat <<\_ACEOF + +System types: + --build=BUILD configure for building on BUILD [guessed] + --host=HOST cross-compile to build programs to run on HOST [BUILD] +_ACEOF +fi + +if test -n "$ac_init_help"; then + case $ac_init_help in + short | recursive ) echo "Configuration of Haskell network package 2.6.3.1:";; + esac + cat <<\_ACEOF + +Optional Packages: + --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] + --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) +C compiler + +Some influential environment variables: + CC C compiler command + CFLAGS C compiler flags + LDFLAGS linker flags, e.g. -L if you have libraries in a + nonstandard directory + LIBS libraries to pass to the linker, e.g. -l + CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if + you have headers in a nonstandard directory + CPP C preprocessor + +Use these variables to override the choices made by `configure' or to help +it to find libraries and programs with nonstandard names/locations. + +Report bugs to . +_ACEOF +ac_status=$? +fi + +if test "$ac_init_help" = "recursive"; then + # If there are subdirs, report their specific --help. + for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue + test -d "$ac_dir" || + { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || + continue + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + cd "$ac_dir" || { ac_status=$?; continue; } + # Check for guested configure. + if test -f "$ac_srcdir/configure.gnu"; then + echo && + $SHELL "$ac_srcdir/configure.gnu" --help=recursive + elif test -f "$ac_srcdir/configure"; then + echo && + $SHELL "$ac_srcdir/configure" --help=recursive + else + $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 + fi || ac_status=$? + cd "$ac_pwd" || { ac_status=$?; break; } + done +fi + +test -n "$ac_init_help" && exit $ac_status +if $ac_init_version; then + cat <<\_ACEOF +Haskell network package configure 2.6.3.1 +generated by GNU Autoconf 2.69 + +Copyright (C) 2012 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## + +# ac_fn_c_try_compile LINENO +# -------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_compile + +# ac_fn_c_try_cpp LINENO +# ---------------------- +# Try to preprocess conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_cpp () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_cpp conftest.$ac_ext" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } > conftest.i && { + test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || + test ! -s conftest.err + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_cpp + +# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists, giving a warning if it cannot be compiled using +# the include files in INCLUDES and setting the cache variable VAR +# accordingly. +ac_fn_c_check_header_mongrel () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if eval \${$3+:} false; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +else + # Is the header compilable? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 +$as_echo_n "checking $2 usability... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_header_compiler=yes +else + ac_header_compiler=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 +$as_echo "$ac_header_compiler" >&6; } + +# Is the header present? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 +$as_echo_n "checking $2 presence... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include <$2> +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + ac_header_preproc=yes +else + ac_header_preproc=no +fi +rm -f conftest.err conftest.i conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 +$as_echo "$ac_header_preproc" >&6; } + +# So? What about this header? +case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( + yes:no: ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 +$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} + ;; + no:yes:* ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 +$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 +$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 +$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 +$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} +( $as_echo "## ------------------------------------ ## +## Report this to libraries@haskell.org ## +## ------------------------------------ ##" + ) | sed "s/^/$as_me: WARNING: /" >&2 + ;; +esac + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=\$ac_header_compiler" +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_header_mongrel + +# ac_fn_c_try_run LINENO +# ---------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes +# that executables *can* be run. +ac_fn_c_try_run () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then : + ac_retval=0 +else + $as_echo "$as_me: program exited with status $ac_status" >&5 + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=$ac_status +fi + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_run + +# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists and can be compiled using the include files in +# INCLUDES, setting the cache variable VAR accordingly. +ac_fn_c_check_header_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_header_compile + +# ac_fn_c_try_link LINENO +# ----------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + test -x conftest$ac_exeext + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_link + +# ac_fn_c_check_func LINENO FUNC VAR +# ---------------------------------- +# Tests whether FUNC exists, setting the cache variable VAR accordingly +ac_fn_c_check_func () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +/* Define $2 to an innocuous variant, in case declares $2. + For example, HP-UX 11i declares gettimeofday. */ +#define $2 innocuous_$2 + +/* System header to define __stub macros and hopefully few prototypes, + which can conflict with char $2 (); below. + Prefer to if __STDC__ is defined, since + exists even on freestanding compilers. */ + +#ifdef __STDC__ +# include +#else +# include +#endif + +#undef $2 + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $2 (); +/* The GNU C library defines this for functions which it implements + to always fail with ENOSYS. Some functions are actually named + something starting with __ and the normal name is an alias. */ +#if defined __stub_$2 || defined __stub___$2 +choke me +#endif + +int +main () +{ +return $2 (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_func + +# ac_fn_c_check_member LINENO AGGR MEMBER VAR INCLUDES +# ---------------------------------------------------- +# Tries to find if the field MEMBER exists in type AGGR, after including +# INCLUDES, setting cache variable VAR accordingly. +ac_fn_c_check_member () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2.$3" >&5 +$as_echo_n "checking for $2.$3... " >&6; } +if eval \${$4+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$5 +int +main () +{ +static $2 ac_aggr; +if (ac_aggr.$3) +return 0; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$4=yes" +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$5 +int +main () +{ +static $2 ac_aggr; +if (sizeof ac_aggr.$3) +return 0; + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$4=yes" +else + eval "$4=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$4 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_member + +# ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES +# --------------------------------------------- +# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR +# accordingly. +ac_fn_c_check_decl () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + as_decl_name=`echo $2|sed 's/ *(.*//'` + as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 +$as_echo_n "checking whether $as_decl_name is declared... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +#ifndef $as_decl_name +#ifdef __cplusplus + (void) $as_decl_use; +#else + (void) $as_decl_name; +#endif +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_decl +cat >config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by Haskell network package $as_me 2.6.3.1, which was +generated by GNU Autoconf 2.69. Invocation command line was + + $ $0 $@ + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + $as_echo "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Save into config.log some information that might help in debugging. + { + echo + + $as_echo "## ---------------- ## +## Cache variables. ## +## ---------------- ##" + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + $as_echo "## ----------------- ## +## Output variables. ## +## ----------------- ##" + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + $as_echo "## ------------------- ## +## File substitutions. ## +## ------------------- ##" + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + $as_echo "## ----------- ## +## confdefs.h. ## +## ----------- ##" + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + $as_echo "$as_me: caught signal $ac_signal" + $as_echo "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +$as_echo "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_NAME "$PACKAGE_NAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_TARNAME "$PACKAGE_TARNAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_VERSION "$PACKAGE_VERSION" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_STRING "$PACKAGE_STRING" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_URL "$PACKAGE_URL" +_ACEOF + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +ac_site_file1=NONE +ac_site_file2=NONE +if test -n "$CONFIG_SITE"; then + # We do not want a PATH search for config.site. + case $CONFIG_SITE in #(( + -*) ac_site_file1=./$CONFIG_SITE;; + */*) ac_site_file1=$CONFIG_SITE;; + *) ac_site_file1=./$CONFIG_SITE;; + esac +elif test "x$prefix" != xNONE; then + ac_site_file1=$prefix/share/config.site + ac_site_file2=$prefix/etc/config.site +else + ac_site_file1=$ac_default_prefix/share/config.site + ac_site_file2=$ac_default_prefix/etc/config.site +fi +for ac_site_file in "$ac_site_file1" "$ac_site_file2" +do + test "x$ac_site_file" = xNONE && continue + if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +$as_echo "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" \ + || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "failed to load site script $ac_site_file +See \`config.log' for more details" "$LINENO" 5; } + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special files + # actually), so we avoid doing that. DJGPP emulates it as a regular file. + if test /dev/null != "$cache_file" && test -f "$cache_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +$as_echo "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +$as_echo "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 +$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 +$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 +$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 +$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + +ac_includes_default="$ac_includes_default +#ifdef HAVE_SYS_SOCKET_H +# include +#endif +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_NETDB_H +# include +#endif +#ifdef HAVE_WINSOCK2_H +# include +#endif +#ifdef HAVE_WS2TCPIP_H +# include +// fix for MingW not defining IPV6_V6ONLY +# define IPV6_V6ONLY 27 +#endif" + +# Safety check: Ensure that we are in the correct source directory. + + +ac_config_headers="$ac_config_headers include/HsNetworkConfig.h" + + +ac_aux_dir= +for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do + if test -f "$ac_dir/install-sh"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install-sh -c" + break + elif test -f "$ac_dir/install.sh"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install.sh -c" + break + elif test -f "$ac_dir/shtool"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/shtool install -c" + break + fi +done +if test -z "$ac_aux_dir"; then + as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 +fi + +# These three variables are undocumented and unsupported, +# and are intended to be withdrawn in a future Autoconf release. +# They can cause serious problems if a builder's source tree is in a directory +# whose full name contains unusual characters. +ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. +ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. +ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. + + +# Make sure we can run config.sub. +$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || + as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 +$as_echo_n "checking build system type... " >&6; } +if ${ac_cv_build+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_build_alias=$build_alias +test "x$ac_build_alias" = x && + ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` +test "x$ac_build_alias" = x && + as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 +ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || + as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 +$as_echo "$ac_cv_build" >&6; } +case $ac_cv_build in +*-*-*) ;; +*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; +esac +build=$ac_cv_build +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_build +shift +build_cpu=$1 +build_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +build_os=$* +IFS=$ac_save_IFS +case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 +$as_echo_n "checking host system type... " >&6; } +if ${ac_cv_host+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test "x$host_alias" = x; then + ac_cv_host=$ac_cv_build +else + ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || + as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 +$as_echo "$ac_cv_host" >&6; } +case $ac_cv_host in +*-*-*) ;; +*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; +esac +host=$ac_cv_host +ac_save_IFS=$IFS; IFS='-' +set x $ac_cv_host +shift +host_cpu=$1 +host_vendor=$2 +shift; shift +# Remember, the first character of IFS is used to create $*, +# except with old shells: +host_os=$* +IFS=$ac_save_IFS +case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac + + + + +# Check whether --with-cc was given. +if test "${with_cc+set}" = set; then : + withval=$with_cc; CC=$withval +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" + fi +fi +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_CC" && break +done + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +fi + +fi + + +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "no acceptable C compiler found in \$PATH +See \`config.log' for more details" "$LINENO" 5; } + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" +# Try to create an executable without -o first, disregard a.out. +# It will help us diagnose broken compilers, and finding out an intuition +# of exeext. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 +$as_echo_n "checking whether the C compiler works... " >&6; } +ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` + +# The possible output files: +ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" + +ac_rmfiles= +for ac_file in $ac_files +do + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + * ) ac_rmfiles="$ac_rmfiles $ac_file";; + esac +done +rm -f $ac_rmfiles + +if { { ac_try="$ac_link_default" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link_default") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. +# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' +# in a Makefile. We should not override ac_cv_exeext if it was cached, +# so that the user can short-circuit this test for compilers unknown to +# Autoconf. +for ac_file in $ac_files '' +do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) + ;; + [ab].out ) + # We found the default executable, but exeext='' is most + # certainly right. + break;; + *.* ) + if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; + then :; else + ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + fi + # We set ac_cv_exeext here because the later test for it is not + # safe: cross compilers may not add the suffix if given an `-o' + # argument, so we may need to know it at that point already. + # Even if this section looks crufty: it has the advantage of + # actually working. + break;; + * ) + break;; + esac +done +test "$ac_cv_exeext" = no && ac_cv_exeext= + +else + ac_file='' +fi +if test -z "$ac_file"; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +$as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error 77 "C compiler cannot create executables +See \`config.log' for more details" "$LINENO" 5; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 +$as_echo_n "checking for C compiler default output file name... " >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 +$as_echo "$ac_file" >&6; } +ac_exeext=$ac_cv_exeext + +rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 +$as_echo_n "checking for suffix of executables... " >&6; } +if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # If both `conftest.exe' and `conftest' are `present' (well, observable) +# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will +# work properly (i.e., refer to `conftest.exe'), while it won't with +# `rm'. +for ac_file in conftest.exe conftest conftest.*; do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + break;; + * ) break;; + esac +done +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of executables: cannot compile and link +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest conftest$ac_cv_exeext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 +$as_echo "$ac_cv_exeext" >&6; } + +rm -f conftest.$ac_ext +EXEEXT=$ac_cv_exeext +ac_exeext=$EXEEXT +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ +FILE *f = fopen ("conftest.out", "w"); + return ferror (f) || fclose (f) != 0; + + ; + return 0; +} +_ACEOF +ac_clean_files="$ac_clean_files conftest.out" +# Check that the compiler produces executables we can run. If not, either +# the compiler is broken, or we cross compile. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 +$as_echo_n "checking whether we are cross compiling... " >&6; } +if test "$cross_compiling" != yes; then + { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if { ac_try='./conftest$ac_cv_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then + cross_compiling=no + else + if test "$cross_compiling" = maybe; then + cross_compiling=yes + else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot run C compiled programs. +If you meant to cross compile, use \`--host'. +See \`config.log' for more details" "$LINENO" 5; } + fi + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 +$as_echo "$cross_compiling" >&6; } + +rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 +$as_echo_n "checking for suffix of object files... " >&6; } +if ${ac_cv_objext+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +rm -f conftest.o conftest.obj +if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + for ac_file in conftest.o conftest.obj conftest.*; do + test -f "$ac_file" || continue; + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; + *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` + break;; + esac +done +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of object files: cannot compile +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest.$ac_cv_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 +$as_echo "$ac_cv_objext" >&6; } +OBJEXT=$ac_cv_objext +ac_objext=$OBJEXT +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if ${ac_cv_c_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+set} +ac_save_CFLAGS=$CFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if ${ac_cv_prog_cc_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +else + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +else + ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } +if test "$ac_test_CFLAGS" = set; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if ${ac_cv_prog_cc_c89+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +struct stat; +/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ +struct buf { int x; }; +FILE * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (p, i) + char **p; + int i; +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} + +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not '\xHH' hex character constants. + These don't provoke an error unfortunately, instead are silently treated + as 'x'. The following induces an error, until -std is added to get + proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an + array size at least. It's necessary to write '\x00'==0 to get something + that's true only with -std. */ +int osf4_cc_array ['\x00' == 0 ? 1 : -1]; + +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) 'x' +int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; + +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); +int argc; +char **argv; +int +main () +{ +return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; + ; + return 0; +} +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ + -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC + +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c89" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c89" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c89" != xno; then : + +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 +$as_echo_n "checking for an ANSI C-conforming const... " >&6; } +if ${ac_cv_c_const+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + +#ifndef __cplusplus + /* Ultrix mips cc rejects this sort of thing. */ + typedef int charset[2]; + const charset cs = { 0, 0 }; + /* SunOS 4.1.1 cc rejects this. */ + char const *const *pcpcc; + char **ppc; + /* NEC SVR4.0.2 mips cc rejects this. */ + struct point {int x, y;}; + static struct point const zero = {0,0}; + /* AIX XL C 1.02.0.0 rejects this. + It does not let you subtract one const X* pointer from another in + an arm of an if-expression whose if-part is not a constant + expression */ + const char *g = "string"; + pcpcc = &g + (g ? g-g : 0); + /* HPUX 7.0 cc rejects these. */ + ++pcpcc; + ppc = (char**) pcpcc; + pcpcc = (char const *const *) ppc; + { /* SCO 3.2v4 cc rejects this sort of thing. */ + char tx; + char *t = &tx; + char const *s = 0 ? (char *) 0 : (char const *) 0; + + *t++ = 0; + if (s) return 0; + } + { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ + int x[] = {25, 17}; + const int *foo = &x[0]; + ++foo; + } + { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ + typedef const int *iptr; + iptr p = 0; + ++p; + } + { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying + "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ + struct s { int j; const int *ap[3]; } bx; + struct s *b = &bx; b->j = 5; + } + { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ + const int foo = 10; + if (!foo) return 0; + } + return !cs[0] && !zero.x; +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_c_const=yes +else + ac_cv_c_const=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 +$as_echo "$ac_cv_c_const" >&6; } +if test $ac_cv_c_const = no; then + +$as_echo "#define const /**/" >>confdefs.h + +fi + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 +$as_echo_n "checking how to run the C preprocessor... " >&6; } +# On Suns, sometimes $CPP names a directory. +if test -n "$CPP" && test -d "$CPP"; then + CPP= +fi +if test -z "$CPP"; then + if ${ac_cv_prog_CPP+:} false; then : + $as_echo_n "(cached) " >&6 +else + # Double quotes because CPP needs to be expanded + for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" + do + ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.i conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + break +fi + + done + ac_cv_prog_CPP=$CPP + +fi + CPP=$ac_cv_prog_CPP +else + ac_cv_prog_CPP=$CPP +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 +$as_echo "$CPP" >&6; } +ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.i conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "C preprocessor \"$CPP\" fails sanity check +See \`config.log' for more details" "$LINENO" 5; } +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 +$as_echo_n "checking for grep that handles long lines and -e... " >&6; } +if ${ac_cv_path_GREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$GREP"; then + ac_path_GREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in grep ggrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_GREP" || continue +# Check for GNU ac_path_GREP and select it if it is found. + # Check for GNU $ac_path_GREP +case `"$ac_path_GREP" --version 2>&1` in +*GNU*) + ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'GREP' >> "conftest.nl" + "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_GREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_GREP="$ac_path_GREP" + ac_path_GREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_GREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_GREP"; then + as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_GREP=$GREP +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 +$as_echo "$ac_cv_path_GREP" >&6; } + GREP="$ac_cv_path_GREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 +$as_echo_n "checking for egrep... " >&6; } +if ${ac_cv_path_EGREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 + then ac_cv_path_EGREP="$GREP -E" + else + if test -z "$EGREP"; then + ac_path_EGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in egrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_EGREP" || continue +# Check for GNU ac_path_EGREP and select it if it is found. + # Check for GNU $ac_path_EGREP +case `"$ac_path_EGREP" --version 2>&1` in +*GNU*) + ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'EGREP' >> "conftest.nl" + "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_EGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_EGREP="$ac_path_EGREP" + ac_path_EGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_EGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_EGREP"; then + as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_EGREP=$EGREP +fi + + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 +$as_echo "$ac_cv_path_EGREP" >&6; } + EGREP="$ac_cv_path_EGREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 +$as_echo_n "checking for ANSI C header files... " >&6; } +if ${ac_cv_header_stdc+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_header_stdc=yes +else + ac_cv_header_stdc=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + +if test $ac_cv_header_stdc = yes; then + # SunOS 4.x string.h does not declare mem*, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "memchr" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "free" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. + if test "$cross_compiling" = yes; then : + : +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#if ((' ' & 0x0FF) == 0x020) +# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') +# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) +#else +# define ISLOWER(c) \ + (('a' <= (c) && (c) <= 'i') \ + || ('j' <= (c) && (c) <= 'r') \ + || ('s' <= (c) && (c) <= 'z')) +# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) +#endif + +#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) +int +main () +{ + int i; + for (i = 0; i < 256; i++) + if (XOR (islower (i), ISLOWER (i)) + || toupper (i) != TOUPPER (i)) + return 2; + return 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : + +else + ac_cv_header_stdc=no +fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 +$as_echo "$ac_cv_header_stdc" >&6; } +if test $ac_cv_header_stdc = yes; then + +$as_echo "#define STDC_HEADERS 1" >>confdefs.h + +fi + +# On IRIX 5.3, sys/types and inttypes.h are conflicting. +for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ + inttypes.h stdint.h unistd.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default +" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + + +for ac_header in fcntl.h limits.h stdlib.h sys/types.h unistd.h winsock2.h ws2tcpip.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + +for ac_header in arpa/inet.h netdb.h netinet/in.h netinet/tcp.h sys/socket.h sys/uio.h sys/un.h linux/can.h linux/tcp.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + +for ac_header in net/if.h +do : + ac_fn_c_check_header_mongrel "$LINENO" "net/if.h" "ac_cv_header_net_if_h" "$ac_includes_default" +if test "x$ac_cv_header_net_if_h" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_NET_IF_H 1 +_ACEOF + +fi + +done + + +for ac_func in readlink symlink if_nametoindex +do : + as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` +ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" +if eval test \"x\$"$as_ac_var"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 +_ACEOF + +fi +done + + +ac_fn_c_check_member "$LINENO" "struct msghdr" "msg_control" "ac_cv_member_struct_msghdr_msg_control" "#if HAVE_SYS_TYPES_H +# include +#endif +#if HAVE_SYS_SOCKET_H +# include +#endif +#if HAVE_SYS_UIO_H +# include +#endif +" +if test "x$ac_cv_member_struct_msghdr_msg_control" = xyes; then : + +cat >>confdefs.h <<_ACEOF +#define HAVE_STRUCT_MSGHDR_MSG_CONTROL 1 +_ACEOF + + +fi +ac_fn_c_check_member "$LINENO" "struct msghdr" "msg_accrights" "ac_cv_member_struct_msghdr_msg_accrights" "#if HAVE_SYS_TYPES_H +# include +#endif +#if HAVE_SYS_SOCKET_H +# include +#endif +#if HAVE_SYS_UIO_H +# include +#endif +" +if test "x$ac_cv_member_struct_msghdr_msg_accrights" = xyes; then : + +cat >>confdefs.h <<_ACEOF +#define HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS 1 +_ACEOF + + +fi + + +ac_fn_c_check_member "$LINENO" "struct sockaddr" "sa_len" "ac_cv_member_struct_sockaddr_sa_len" "#if HAVE_SYS_TYPES_H +# include +#endif +#if HAVE_SYS_SOCKET_H +# include +#endif +" +if test "x$ac_cv_member_struct_sockaddr_sa_len" = xyes; then : + +cat >>confdefs.h <<_ACEOF +#define HAVE_STRUCT_SOCKADDR_SA_LEN 1 +_ACEOF + + +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for in_addr_t in netinet/in.h" >&5 +$as_echo_n "checking for in_addr_t in netinet/in.h... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "in_addr_t" >/dev/null 2>&1; then : + +$as_echo "#define HAVE_IN_ADDR_T 1" >>confdefs.h + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi +rm -f conftest* + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for SO_PEERCRED and struct ucred in sys/socket.h" >&5 +$as_echo_n "checking for SO_PEERCRED and struct ucred in sys/socket.h... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#ifndef SO_PEERCRED +# error no SO_PEERCRED +#endif +struct ucred u; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_ucred=yes +else + ac_cv_ucred=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +if test "x$ac_cv_ucred" = xno; then + old_CFLAGS="$CFLAGS" + CFLAGS="-D_GNU_SOURCE $CFLAGS" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#ifndef SO_PEERCRED +# error no SO_PEERCRED +#endif +struct ucred u; +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_ucred=yes +else + ac_cv_ucred=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + if test "x$ac_cv_ucred" = xyes; then + EXTRA_CPPFLAGS=-D_GNU_SOURCE + fi +else + old_CFLAGS="$CFLAGS" +fi +if test "x$ac_cv_ucred" = xno; then + CFLAGS="$old_CFLAGS" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +else + +$as_echo "#define HAVE_STRUCT_UCRED 1" >>confdefs.h + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for getpeereid in unistd.h" >&5 +$as_echo_n "checking for getpeereid in unistd.h... " >&6; } +ac_fn_c_check_func "$LINENO" "getpeereid" "ac_cv_func_getpeereid" +if test "x$ac_cv_func_getpeereid" = xyes; then : + +$as_echo "#define HAVE_GETPEEREID 1" >>confdefs.h + +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for _head_libws2_32_a in -lws2_32" >&5 +$as_echo_n "checking for _head_libws2_32_a in -lws2_32... " >&6; } +if ${ac_cv_lib_ws2_32__head_libws2_32_a+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_check_lib_save_LIBS=$LIBS +LIBS="-lws2_32 $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char _head_libws2_32_a (); +int +main () +{ +return _head_libws2_32_a (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + ac_cv_lib_ws2_32__head_libws2_32_a=yes +else + ac_cv_lib_ws2_32__head_libws2_32_a=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +LIBS=$ac_check_lib_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_ws2_32__head_libws2_32_a" >&5 +$as_echo "$ac_cv_lib_ws2_32__head_libws2_32_a" >&6; } +if test "x$ac_cv_lib_ws2_32__head_libws2_32_a" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_LIBWS2_32 1 +_ACEOF + + LIBS="-lws2_32 $LIBS" + +fi + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for getaddrinfo" >&5 +$as_echo_n "checking for getaddrinfo... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$ac_includes_default +int testme(){ getaddrinfo; } +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +$as_echo "#define HAVE_GETADDRINFO 1" >>confdefs.h + ac_have_getaddrinfo=yes; { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + +if test "x$ac_have_getaddrinfo" = x; then + old_CFLAGS="$CFLAGS" + if test "z$ac_cv_lib_ws2_32__head_libws2_32_a" = zyes; then + CFLAGS="-DWINVER=0x0501 $CFLAGS" + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getaddrinfo if WINVER is 0x0501" >&5 +$as_echo_n "checking for getaddrinfo if WINVER is 0x0501... " >&6; } + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$ac_includes_default + int testme(){ getaddrinfo; } +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +$as_echo "#define HAVE_GETADDRINFO 1" >>confdefs.h + +$as_echo "#define NEED_WINVER_XP 1" >>confdefs.h + EXTRA_CPPFLAGS="-DWINVER=0x0501 $EXTRA_CPPFLAGS"; { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + CFLAGS="$old_CFLAGS"; { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + fi +fi + +for ac_func in gai_strerror +do : + ac_fn_c_check_func "$LINENO" "gai_strerror" "ac_cv_func_gai_strerror" +if test "x$ac_cv_func_gai_strerror" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_GAI_STRERROR 1 +_ACEOF + +fi +done + + +ac_fn_c_check_decl "$LINENO" "AI_ADDRCONFIG" "ac_cv_have_decl_AI_ADDRCONFIG" "$ac_includes_default" +if test "x$ac_cv_have_decl_AI_ADDRCONFIG" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_AI_ADDRCONFIG $ac_have_decl +_ACEOF +ac_fn_c_check_decl "$LINENO" "AI_ALL" "ac_cv_have_decl_AI_ALL" "$ac_includes_default" +if test "x$ac_cv_have_decl_AI_ALL" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_AI_ALL $ac_have_decl +_ACEOF +ac_fn_c_check_decl "$LINENO" "AI_NUMERICSERV" "ac_cv_have_decl_AI_NUMERICSERV" "$ac_includes_default" +if test "x$ac_cv_have_decl_AI_NUMERICSERV" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_AI_NUMERICSERV $ac_have_decl +_ACEOF +ac_fn_c_check_decl "$LINENO" "AI_V4MAPPED" "ac_cv_have_decl_AI_V4MAPPED" "$ac_includes_default" +if test "x$ac_cv_have_decl_AI_V4MAPPED" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_AI_V4MAPPED $ac_have_decl +_ACEOF + + +ac_fn_c_check_decl "$LINENO" "IPV6_V6ONLY" "ac_cv_have_decl_IPV6_V6ONLY" "$ac_includes_default" +if test "x$ac_cv_have_decl_IPV6_V6ONLY" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_IPV6_V6ONLY $ac_have_decl +_ACEOF + + +ac_fn_c_check_decl "$LINENO" "IPPROTO_IP" "ac_cv_have_decl_IPPROTO_IP" "$ac_includes_default" +if test "x$ac_cv_have_decl_IPPROTO_IP" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_IPPROTO_IP $ac_have_decl +_ACEOF +ac_fn_c_check_decl "$LINENO" "IPPROTO_TCP" "ac_cv_have_decl_IPPROTO_TCP" "$ac_includes_default" +if test "x$ac_cv_have_decl_IPPROTO_TCP" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_IPPROTO_TCP $ac_have_decl +_ACEOF +ac_fn_c_check_decl "$LINENO" "IPPROTO_IPV6" "ac_cv_have_decl_IPPROTO_IPV6" "$ac_includes_default" +if test "x$ac_cv_have_decl_IPPROTO_IPV6" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_IPPROTO_IPV6 $ac_have_decl +_ACEOF + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sendfile in sys/sendfile.h" >&5 +$as_echo_n "checking for sendfile in sys/sendfile.h... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "sendfile" >/dev/null 2>&1; then : + +$as_echo "#define HAVE_LINUX_SENDFILE 1" >>confdefs.h + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi +rm -f conftest* + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sendfile in sys/socket.h" >&5 +$as_echo_n "checking for sendfile in sys/socket.h... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "sendfile" >/dev/null 2>&1; then : + +$as_echo "#define HAVE_BSD_SENDFILE 1" >>confdefs.h + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi +rm -f conftest* + + +for ac_func in gethostent +do : + ac_fn_c_check_func "$LINENO" "gethostent" "ac_cv_func_gethostent" +if test "x$ac_cv_func_gethostent" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_GETHOSTENT 1 +_ACEOF + +fi +done + + +for ac_func in accept4 +do : + ac_fn_c_check_func "$LINENO" "accept4" "ac_cv_func_accept4" +if test "x$ac_cv_func_accept4" = xyes; then : + cat >>confdefs.h <<_ACEOF +#define HAVE_ACCEPT4 1 +_ACEOF + +fi +done + + +case "$host" in +*-mingw* | *-msys*) + EXTRA_SRCS="cbits/initWinSock.c, cbits/winSockErr.c, cbits/asyncAccept.c" + EXTRA_LIBS=ws2_32 + CALLCONV=stdcall ;; +*-solaris2*) + EXTRA_SRCS="cbits/ancilData.c" + EXTRA_LIBS="nsl, socket" + CALLCONV=ccall ;; +*) + EXTRA_SRCS="cbits/ancilData.c" + EXTRA_LIBS= + CALLCONV=ccall ;; +esac + + + + + +ac_config_files="$ac_config_files network.buildinfo" + + +cat >confcache <<\_ACEOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs, see configure's option --config-cache. +# It is not useful on other systems. If it contains results you don't +# want to keep, you may remove or edit it. +# +# config.status only pays attention to the cache file if you give it +# the --recheck option to rerun configure. +# +# `ac_cv_env_foo' variables (set or unset) will be overridden when +# loading this file, other *unset* `ac_cv_foo' will be assigned the +# following values. + +_ACEOF + +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, we kill variables containing newlines. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +( + for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + + (set) 2>&1 | + case $as_nl`(ac_space=' '; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + # `set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. + sed -n \ + "s/'/'\\\\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" + ;; #( + *) + # `set' quotes correctly as required by POSIX, so do not add quotes. + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) | + sed ' + /^ac_cv_env_/b end + t clear + :clear + s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ + t end + s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ + :end' >>confcache +if diff "$cache_file" confcache >/dev/null 2>&1; then :; else + if test -w "$cache_file"; then + if test "x$cache_file" != "x/dev/null"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +$as_echo "$as_me: updating cache $cache_file" >&6;} + if test ! -f "$cache_file" || test -h "$cache_file"; then + cat confcache >"$cache_file" + else + case $cache_file in #( + */* | ?:*) + mv -f confcache "$cache_file"$$ && + mv -f "$cache_file"$$ "$cache_file" ;; #( + *) + mv -f confcache "$cache_file" ;; + esac + fi + fi + else + { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} + fi +fi +rm -f confcache + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +DEFS=-DHAVE_CONFIG_H + +ac_libobjs= +ac_ltlibobjs= +U= +for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue + # 1. Remove the extension, and $U if already installed. + ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' + ac_i=`$as_echo "$ac_i" | sed "$ac_script"` + # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR + # will be set to the directory where LIBOBJS objects are built. + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' +done +LIBOBJS=$ac_libobjs + +LTLIBOBJS=$ac_ltlibobjs + + + +: "${CONFIG_STATUS=./config.status}" +ac_write_fail=0 +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files $CONFIG_STATUS" +{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 +#! $SHELL +# Generated by $as_me. +# Run this file to recreate the current configuration. +# Compiler output produced by configure, useful for debugging +# configure, is in config.log if it exists. + +debug=false +ac_cs_recheck=false +ac_cs_silent=false + +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to +# report actual input values of CONFIG_FILES etc. instead of their +# values after options handling. +ac_log=" +This file was extended by Haskell network package $as_me 2.6.3.1, which was +generated by GNU Autoconf 2.69. Invocation command line was + + CONFIG_FILES = $CONFIG_FILES + CONFIG_HEADERS = $CONFIG_HEADERS + CONFIG_LINKS = $CONFIG_LINKS + CONFIG_COMMANDS = $CONFIG_COMMANDS + $ $0 $@ + +on `(hostname || uname -n) 2>/dev/null | sed 1q` +" + +_ACEOF + +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + +case $ac_config_headers in *" +"*) set x $ac_config_headers; shift; ac_config_headers=$*;; +esac + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# Files that config.status was made for. +config_files="$ac_config_files" +config_headers="$ac_config_headers" + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +ac_cs_usage="\ +\`$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. + +Usage: $0 [OPTION]... [TAG]... + + -h, --help print this help, then exit + -V, --version print version number and configuration settings, then exit + --config print configuration, then exit + -q, --quiet, --silent + do not print progress messages + -d, --debug don't remove temporary files + --recheck update $as_me by reconfiguring in the same conditions + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + --header=FILE[:TEMPLATE] + instantiate the configuration header FILE + +Configuration files: +$config_files + +Configuration headers: +$config_headers + +Report bugs to ." + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" +ac_cs_version="\\ +Haskell network package config.status 2.6.3.1 +configured by $0, generated by GNU Autoconf 2.69, + with options \\"\$ac_cs_config\\" + +Copyright (C) 2012 Free Software Foundation, Inc. +This config.status script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it." + +ac_pwd='$ac_pwd' +srcdir='$srcdir' +test -n "\$AWK" || AWK=awk +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. +ac_need_defaults=: +while test $# != 0 +do + case $1 in + --*=?*) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` + ac_shift=: + ;; + --*=) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg= + ac_shift=: + ;; + *) + ac_option=$1 + ac_optarg=$2 + ac_shift=shift + ;; + esac + + case $ac_option in + # Handling of the options. + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + ac_cs_recheck=: ;; + --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) + $as_echo "$ac_cs_version"; exit ;; + --config | --confi | --conf | --con | --co | --c ) + $as_echo "$ac_cs_config"; exit ;; + --debug | --debu | --deb | --de | --d | -d ) + debug=: ;; + --file | --fil | --fi | --f ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + '') as_fn_error $? "missing file argument" ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" + ac_need_defaults=false;; + --header | --heade | --head | --hea ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append CONFIG_HEADERS " '$ac_optarg'" + ac_need_defaults=false;; + --he | --h) + # Conflict between --help and --header + as_fn_error $? "ambiguous option: \`$1' +Try \`$0 --help' for more information.";; + --help | --hel | -h ) + $as_echo "$ac_cs_usage"; exit ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil | --si | --s) + ac_cs_silent=: ;; + + # This is an error. + -*) as_fn_error $? "unrecognized option: \`$1' +Try \`$0 --help' for more information." ;; + + *) as_fn_append ac_config_targets " $1" + ac_need_defaults=false ;; + + esac + shift +done + +ac_configure_extra_args= + +if $ac_cs_silent; then + exec 6>/dev/null + ac_configure_extra_args="$ac_configure_extra_args --silent" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +if \$ac_cs_recheck; then + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' + export CONFIG_SHELL + exec "\$@" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +exec 5>>config.log +{ + echo + sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX +## Running $as_me. ## +_ASBOX + $as_echo "$ac_log" +} >&5 + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + +# Handling of arguments. +for ac_config_target in $ac_config_targets +do + case $ac_config_target in + "include/HsNetworkConfig.h") CONFIG_HEADERS="$CONFIG_HEADERS include/HsNetworkConfig.h" ;; + "network.buildinfo") CONFIG_FILES="$CONFIG_FILES network.buildinfo" ;; + + *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; + esac +done + + +# If the user did not use the arguments to specify the items to instantiate, +# then the envvar interface is used. Set only those that are not. +# We use the long form for the default assignment because of an extremely +# bizarre bug on SunOS 4.1.3. +if $ac_need_defaults; then + test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files + test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers +fi + +# Have a temporary directory for convenience. Make it in the build tree +# simply because there is no reason against having it here, and in addition, +# creating and moving files from /tmp can sometimes cause problems. +# Hook for its removal unless debugging. +# Note that there is a small window in which the directory will not be cleaned: +# after its creation but before its name has been assigned to `$tmp'. +$debug || +{ + tmp= ac_tmp= + trap 'exit_status=$? + : "${ac_tmp:=$tmp}" + { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status +' 0 + trap 'as_fn_exit 1' 1 2 13 15 +} +# Create a (secure) tmp directory for tmp files. + +{ + tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && + test -d "$tmp" +} || +{ + tmp=./conf$$-$RANDOM + (umask 077 && mkdir "$tmp") +} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 +ac_tmp=$tmp + +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with `./config.status config.h'. +if test -n "$CONFIG_FILES"; then + + +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\\r' +else + ac_cs_awk_cr=$ac_cr +fi + +echo 'BEGIN {' >"$ac_tmp/subs1.awk" && +_ACEOF + + +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` +ac_delim='%!_!# ' +for ac_last_try in false false false false false :; do + . ./conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done +rm -f conf$$subs.sh + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && +_ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\)..*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\)..*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} + +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ + || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 +_ACEOF + +# VPATH may cause trouble with some makes, so we remove sole $(srcdir), +# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and +# trailing colons and then remove the whole line if VPATH becomes empty +# (actually we leave an empty line to preserve line numbers). +if test "x$srcdir" = x.; then + ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ +h +s/// +s/^/:/ +s/[ ]*$/:/ +s/:\$(srcdir):/:/g +s/:\${srcdir}:/:/g +s/:@srcdir@:/:/g +s/^:*// +s/:*$// +x +s/\(=[ ]*\).*/\1/ +G +s/\n// +s/^[^=]*=[ ]*$// +}' +fi + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +fi # test -n "$CONFIG_FILES" + +# Set up the scripts for CONFIG_HEADERS section. +# No need to generate them if there are no CONFIG_HEADERS. +# This happens for instance with `./config.status Makefile'. +if test -n "$CONFIG_HEADERS"; then +cat >"$ac_tmp/defines.awk" <<\_ACAWK || +BEGIN { +_ACEOF + +# Transform confdefs.h into an awk script `defines.awk', embedded as +# here-document in config.status, that substitutes the proper values into +# config.h.in to produce config.h. + +# Create a delimiter string that does not exist in confdefs.h, to ease +# handling of long lines. +ac_delim='%!_!# ' +for ac_last_try in false false :; do + ac_tt=`sed -n "/$ac_delim/p" confdefs.h` + if test -z "$ac_tt"; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done + +# For the awk script, D is an array of macro values keyed by name, +# likewise P contains macro parameters if any. Preserve backslash +# newline sequences. + +ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* +sed -n ' +s/.\{148\}/&'"$ac_delim"'/g +t rset +:rset +s/^[ ]*#[ ]*define[ ][ ]*/ / +t def +d +:def +s/\\$// +t bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3"/p +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p +d +:bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3\\\\\\n"\\/p +t cont +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p +t cont +d +:cont +n +s/.\{148\}/&'"$ac_delim"'/g +t clear +:clear +s/\\$// +t bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/"/p +d +:bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p +b cont +' >$CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + for (key in D) D_is_set[key] = 1 + FS = "" +} +/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { + line = \$ 0 + split(line, arg, " ") + if (arg[1] == "#") { + defundef = arg[2] + mac1 = arg[3] + } else { + defundef = substr(arg[1], 2) + mac1 = arg[2] + } + split(mac1, mac2, "(") #) + macro = mac2[1] + prefix = substr(line, 1, index(line, defundef) - 1) + if (D_is_set[macro]) { + # Preserve the white space surrounding the "#". + print prefix "define", macro P[macro] D[macro] + next + } else { + # Replace #undef with comments. This is necessary, for example, + # in the case of _POSIX_SOURCE, which is predefined and required + # on some systems where configure will not decide to define it. + if (defundef == "undef") { + print "/*", prefix defundef, macro, "*/" + next + } + } +} +{ print } +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 +fi # test -n "$CONFIG_HEADERS" + + +eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS " +shift +for ac_tag +do + case $ac_tag in + :[FHLC]) ac_mode=$ac_tag; continue;; + esac + case $ac_mode$ac_tag in + :[FHL]*:*);; + :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; + :[FH]-) ac_tag=-:-;; + :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; + esac + ac_save_IFS=$IFS + IFS=: + set x $ac_tag + IFS=$ac_save_IFS + shift + ac_file=$1 + shift + + case $ac_mode in + :L) ac_source=$1;; + :[FH]) + ac_file_inputs= + for ac_f + do + case $ac_f in + -) ac_f="$ac_tmp/stdin";; + *) # Look for the file first in the build tree, then in the source tree + # (if the path is not absolute). The absolute path cannot be DOS-style, + # because $ac_f cannot contain `:'. + test -f "$ac_f" || + case $ac_f in + [\\/$]*) false;; + *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; + esac || + as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; + esac + case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" + done + + # Let's still pretend it is `configure' which instantiates (i.e., don't + # use $as_me), people would be surprised to read: + # /* config.h. Generated by config.status. */ + configure_input='Generated from '` + $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' + if test x"$ac_file" != x-; then + configure_input="$ac_file. $configure_input" + { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +$as_echo "$as_me: creating $ac_file" >&6;} + fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`$as_echo "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac + + case $ac_tag in + *:-:* | *:-) cat >"$ac_tmp/stdin" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; + esac + ;; + esac + + ac_dir=`$as_dirname -- "$ac_file" || +$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$ac_file" : 'X\(//\)[^/]' \| \ + X"$ac_file" : 'X\(//\)$' \| \ + X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$ac_file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir="$ac_dir"; as_fn_mkdir_p + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + + case $ac_mode in + :F) + # + # CONFIG_FILE + # + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# If the template does not know about datarootdir, expand it. +# FIXME: This hack should be removed a few years after 2.60. +ac_datarootdir_hack=; ac_datarootdir_seen= +ac_sed_dataroot=' +/datarootdir/ { + p + q +} +/@datadir@/p +/@docdir@/p +/@infodir@/p +/@localedir@/p +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in +*datarootdir*) ac_datarootdir_seen=yes;; +*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + ac_datarootdir_hack=' + s&@datadir@&$datadir&g + s&@docdir@&$docdir&g + s&@infodir@&$infodir&g + s&@localedir@&$localedir&g + s&@mandir@&$mandir&g + s&\\\${datarootdir}&$datarootdir&g' ;; +esac +_ACEOF + +# Neutralize VPATH when `$srcdir' = `.'. +# Shell code in configure.ac might set extrasub. +# FIXME: do we really want to maintain this feature? +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub +$extrasub +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +:t +/@[a-zA-Z_][a-zA-Z_0-9]*@/!b +s|@configure_input@|$ac_sed_conf_input|;t t +s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t +s&@srcdir@&$ac_srcdir&;t t +s&@abs_srcdir@&$ac_abs_srcdir&;t t +s&@top_srcdir@&$ac_top_srcdir&;t t +s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t +s&@builddir@&$ac_builddir&;t t +s&@abs_builddir@&$ac_abs_builddir&;t t +s&@abs_top_builddir@&$ac_abs_top_builddir&;t t +$ac_datarootdir_hack +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ + >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + +test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && + { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ + "$ac_tmp/out"`; test -z "$ac_out"; } && + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&5 +$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&2;} + + rm -f "$ac_tmp/stdin" + case $ac_file in + -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; + *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; + esac \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + ;; + :H) + # + # CONFIG_HEADER + # + if test x"$ac_file" != x-; then + { + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" + } >"$ac_tmp/config.h" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then + { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 +$as_echo "$as_me: $ac_file is unchanged" >&6;} + else + rm -f "$ac_file" + mv "$ac_tmp/config.h" "$ac_file" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + fi + else + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ + || as_fn_error $? "could not create -" "$LINENO" 5 + fi + ;; + + + esac + +done # for ac_tag + + +as_fn_exit 0 +_ACEOF +ac_clean_files=$ac_clean_files_save + +test $ac_write_fail = 0 || + as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 + + +# configure is writing to config.log, and then calls config.status. +# config.status does its own redirection, appending to config.log. +# Unfortunately, on DOS this fails, as config.log is still kept open +# by configure, so config.status won't be able to write to it; its +# output is simply discarded. So we exec the FD to /dev/null, +# effectively closing config.log, so it can be properly (re)opened and +# appended to by config.status. When coming back to configure, we +# need to make the FD available again. +if test "$no_create" != yes; then + ac_cs_success=: + ac_config_status_args= + test "$silent" = yes && + ac_config_status_args="$ac_config_status_args --quiet" + exec 5>/dev/null + $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false + exec 5>>config.log + # Use ||, not &&, to avoid exiting from the if with $? = 1, which + # would make configure fail if this is the last instruction. + $ac_cs_success || as_fn_exit 1 +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} +fi + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/configure.ac cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/configure.ac --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/configure.ac 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/configure.ac 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,192 @@ +AC_INIT([Haskell network package], [2.8.0.0], [libraries@haskell.org], [network]) + +ac_includes_default="$ac_includes_default +#ifdef HAVE_SYS_SOCKET_H +# include +#endif +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_NETDB_H +# include +#endif +#ifdef HAVE_WINSOCK2_H +# include +#endif +#ifdef HAVE_WS2TCPIP_H +# include +// fix for MingW not defining IPV6_V6ONLY +# define IPV6_V6ONLY 27 +#endif" + +# Safety check: Ensure that we are in the correct source directory. +AC_CONFIG_SRCDIR([include/HsNet.h]) + +AC_CONFIG_HEADERS([include/HsNetworkConfig.h]) + +AC_CANONICAL_HOST + +AC_ARG_WITH([cc], + [C compiler], + [CC=$withval]) +AC_PROG_CC() + +AC_C_CONST + +dnl ** check for specific header (.h) files that we are interested in +AC_CHECK_HEADERS([fcntl.h limits.h stdlib.h sys/types.h unistd.h winsock2.h ws2tcpip.h]) +AC_CHECK_HEADERS([arpa/inet.h netdb.h netinet/in.h netinet/tcp.h sys/socket.h sys/uio.h sys/un.h linux/can.h linux/tcp.h]) +AC_CHECK_HEADERS([net/if.h]) + +AC_CHECK_FUNCS([readlink symlink if_nametoindex]) + +dnl ** check what fields struct msghdr contains +AC_CHECK_MEMBERS([struct msghdr.msg_control, struct msghdr.msg_accrights], [], [], [#if HAVE_SYS_TYPES_H +# include +#endif +#if HAVE_SYS_SOCKET_H +# include +#endif +#if HAVE_SYS_UIO_H +# include +#endif]) + +dnl ** check if struct sockaddr contains sa_len +AC_CHECK_MEMBERS([struct sockaddr.sa_len], [], [], [#if HAVE_SYS_TYPES_H +# include +#endif +#if HAVE_SYS_SOCKET_H +# include +#endif]) + +dnl -------------------------------------------------- +dnl * test for in_addr_t +dnl -------------------------------------------------- +AC_MSG_CHECKING(for in_addr_t in netinet/in.h) +AC_EGREP_HEADER(in_addr_t, netinet/in.h, + [ AC_DEFINE([HAVE_IN_ADDR_T], [1], [Define to 1 if in_addr_t is available.]) AC_MSG_RESULT(yes) ], + AC_MSG_RESULT(no)) + +dnl -------------------------------------------------- +dnl * test for SO_PEERCRED and struct ucred +dnl -------------------------------------------------- +AC_MSG_CHECKING(for SO_PEERCRED and struct ucred in sys/socket.h) +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include +#include +#ifndef SO_PEERCRED +# error no SO_PEERCRED +#endif +struct ucred u;]])],ac_cv_ucred=yes,ac_cv_ucred=no) +if test "x$ac_cv_ucred" = xno; then + old_CFLAGS="$CFLAGS" + CFLAGS="-D_GNU_SOURCE $CFLAGS" + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[#include +#include +#ifndef SO_PEERCRED +# error no SO_PEERCRED +#endif +struct ucred u;]])],ac_cv_ucred=yes,ac_cv_ucred=no) + if test "x$ac_cv_ucred" = xyes; then + EXTRA_CPPFLAGS=-D_GNU_SOURCE + fi +else + old_CFLAGS="$CFLAGS" +fi +if test "x$ac_cv_ucred" = xno; then + CFLAGS="$old_CFLAGS" + AC_MSG_RESULT(no) +else + AC_DEFINE([HAVE_STRUCT_UCRED], [1], [Define to 1 if you have both SO_PEERCRED and struct ucred.]) + AC_MSG_RESULT(yes) +fi + +dnl -------------------------------------------------- +dnl * test for GETPEEREID(3) +dnl -------------------------------------------------- +AC_MSG_CHECKING(for getpeereid in unistd.h) +AC_CHECK_FUNC( getpeereid, AC_DEFINE([HAVE_GETPEEREID], [1], [Define to 1 if you have getpeereid.] )) + +dnl -------------------------------------------------- +dnl * check for Windows networking libraries +dnl -------------------------------------------------- +AC_CHECK_LIB(ws2_32, _head_libws2_32_a) + +dnl -------------------------------------------------- +dnl * test for getaddrinfo as proxy for IPv6 support +dnl -------------------------------------------------- +AC_MSG_CHECKING(for getaddrinfo) +dnl Can't use AC_CHECK_FUNC here, because it doesn't do the right +dnl thing on Windows. +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[$ac_includes_default +int testme(){ getaddrinfo; }]])],[AC_DEFINE([HAVE_GETADDRINFO], [1], [Define to 1 if you have the `getaddrinfo' function.]) ac_have_getaddrinfo=yes; AC_MSG_RESULT(yes)],[AC_MSG_RESULT(no)]) + +dnl Under mingw, we may need to set WINVER to 0x0501 to expose getaddrinfo. +if test "x$ac_have_getaddrinfo" = x; then + old_CFLAGS="$CFLAGS" + if test "z$ac_cv_lib_ws2_32__head_libws2_32_a" = zyes; then + CFLAGS="-DWINVER=0x0501 $CFLAGS" + AC_MSG_CHECKING(for getaddrinfo if WINVER is 0x0501) + AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[$ac_includes_default + int testme(){ getaddrinfo; }]])],[AC_DEFINE([HAVE_GETADDRINFO], [1], [Define to 1 if you have the `getaddrinfo' function.]) AC_DEFINE([NEED_WINVER_XP], [1], [Define to 1 if the `getaddrinfo' function needs WINVER set.]) EXTRA_CPPFLAGS="-DWINVER=0x0501 $EXTRA_CPPFLAGS"; AC_MSG_RESULT(yes)],[CFLAGS="$old_CFLAGS"; AC_MSG_RESULT(no)]) + fi +fi + +dnl Missing under mingw, sigh. +AC_CHECK_FUNCS(gai_strerror) + +dnl ------------------------------------------------------- +dnl * test for AI_* flags that not all implementations have +dnl ------------------------------------------------------- +AC_CHECK_DECLS([AI_ADDRCONFIG, AI_ALL, AI_NUMERICSERV, AI_V4MAPPED]) + +dnl ------------------------------------------------------- +dnl * test for IPV6_V6ONLY flags that not all implementations have +dnl ------------------------------------------------------- +AC_CHECK_DECLS([IPV6_V6ONLY]) + +dnl ------------------------------------------------------- +dnl * test for IPPROTO_* macros/constants +dnl ------------------------------------------------------- +AC_CHECK_DECLS([IPPROTO_IP, IPPROTO_TCP, IPPROTO_IPV6]) + +dnl -------------------------------------------------- +dnl * test for Linux sendfile(2) +dnl -------------------------------------------------- +AC_MSG_CHECKING(for sendfile in sys/sendfile.h) +AC_EGREP_HEADER(sendfile, sys/sendfile.h, + [ AC_DEFINE([HAVE_LINUX_SENDFILE], [1], [Define to 1 if you have a Linux sendfile(2) implementation.]) AC_MSG_RESULT(yes) ], + AC_MSG_RESULT(no)) + +dnl -------------------------------------------------- +dnl * test for BSD sendfile(2) +dnl -------------------------------------------------- +AC_MSG_CHECKING(for sendfile in sys/socket.h) +AC_EGREP_HEADER(sendfile, sys/socket.h, + [ AC_DEFINE([HAVE_BSD_SENDFILE], [1], [Define to 1 if you have a BSDish sendfile(2) implementation.]) AC_MSG_RESULT(yes) ], + AC_MSG_RESULT(no)) + +AC_CHECK_FUNCS(gethostent) + +AC_CHECK_FUNCS(accept4) + +case "$host" in +*-mingw* | *-msys*) + EXTRA_SRCS="cbits/initWinSock.c, cbits/winSockErr.c, cbits/asyncAccept.c" + EXTRA_LIBS=ws2_32 + ;; +*-solaris2*) + EXTRA_SRCS="cbits/ancilData.c" + EXTRA_LIBS="nsl, socket" + ;; +*) + EXTRA_SRCS="cbits/ancilData.c" + EXTRA_LIBS= + ;; +esac +AC_SUBST([EXTRA_CPPFLAGS]) +AC_SUBST([EXTRA_LIBS]) +AC_SUBST([EXTRA_SRCS]) + +AC_CONFIG_FILES([network.buildinfo]) + +AC_OUTPUT diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/examples/EchoClient.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/examples/EchoClient.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/examples/EchoClient.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/examples/EchoClient.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,27 @@ +{-# LANGUAGE OverloadedStrings #-} +-- Echo client program +module Main (main) where + +import qualified Control.Exception as E +import qualified Data.ByteString.Char8 as C +import Network.Socket hiding (recv) +import Network.Socket.ByteString (recv, sendAll) + +main :: IO () +main = withSocketsDo $ do + addr <- resolve "127.0.0.1" "3000" + E.bracket (open addr) close talk + where + resolve host port = do + let hints = defaultHints { addrSocketType = Stream } + addr:_ <- getAddrInfo (Just hints) (Just host) (Just port) + return addr + open addr = do + sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) + connect sock $ addrAddress addr + return sock + talk sock = do + sendAll sock "Hello, world!" + msg <- recv sock 1024 + putStr "Received: " + C.putStrLn msg diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/examples/EchoServer.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/examples/EchoServer.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/examples/EchoServer.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/examples/EchoServer.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,41 @@ +-- Echo server program +module Main (main) where + +import Control.Concurrent (forkFinally) +import qualified Control.Exception as E +import Control.Monad (unless, forever, void) +import qualified Data.ByteString as S +import Network.Socket hiding (recv) +import Network.Socket.ByteString (recv, sendAll) + +main :: IO () +main = withSocketsDo $ do + addr <- resolve "3000" + E.bracket (open addr) close loop + where + resolve port = do + let hints = defaultHints { + addrFlags = [AI_PASSIVE] + , addrSocketType = Stream + } + addr:_ <- getAddrInfo (Just hints) Nothing (Just port) + return addr + open addr = do + sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) + setSocketOption sock ReuseAddr 1 + -- If the prefork technique is not used, + -- set CloseOnExec for the security reasons. + let fd = fdSocket sock + setCloseOnExecIfNeeded fd + bind sock (addrAddress addr) + listen sock 10 + return sock + loop sock = forever $ do + (conn, peer) <- accept sock + putStrLn $ "Connection from " ++ show peer + void $ forkFinally (talk conn) (\_ -> close conn) + talk conn = do + msg <- recv conn 1024 + unless (S.null msg) $ do + sendAll conn msg + talk conn diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/include/HsNetDef.h cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/include/HsNetDef.h --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/include/HsNetDef.h 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/include/HsNetDef.h 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,45 @@ +#ifndef HSNETDEF_H +#define HSNETDEF_H + +#include "HsNetworkConfig.h" + +/* ultra-evil... */ +#undef PACKAGE_BUGREPORT +#undef PACKAGE_NAME +#undef PACKAGE_STRING +#undef PACKAGE_TARNAME +#undef PACKAGE_VERSION + +#if defined(HAVE_WINSOCK2_H) +# define WITH_WINSOCK 1 +#endif + +#if !defined(mingw32_HOST_OS) && !defined(_WIN32) +# define DOMAIN_SOCKET_SUPPORT 1 +#endif + +/* stdcall is for Windows 32. + Haskell FFI does not have a keyword for Windows 64. + If ccall/stdcall is specified on Windows 64, + GHC ignores it and use a proper ABI for Windows 64. + But if stdcall is specified, GHC displays a warning. + So, let's use ccall for Windows 64. + */ +#if defined(mingw32_HOST_OS) +# if defined(i386_HOST_ARCH) +# define CALLCONV stdcall +# elif defined(x86_64_HOST_ARCH) +# define CALLCONV ccall +# else +# error Unknown mingw32 arch +# endif +#else +# define CALLCONV ccall +#endif +#if defined(mingw32_HOST_OS) +# define SAFE_ON_WIN safe +#else +# define SAFE_ON_WIN unsafe +#endif + +#endif /* HSNETDEF_H */ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/include/HsNet.h cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/include/HsNet.h --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/include/HsNet.h 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/include/HsNet.h 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,165 @@ +/* ----------------------------------------------------------------------------- + * + * Definitions for package `network' which are visible in Haskell land. + * + * ---------------------------------------------------------------------------*/ + +#ifndef HSNET_H +#define HSNET_H + +#include "HsNetDef.h" + +#ifdef NEED_WINVER +# define WINVER 0x0501 +#endif + +#ifndef INLINE +# if defined(_MSC_VER) +# define INLINE extern __inline +# elif defined(__GNUC_GNU_INLINE__) +# define INLINE extern inline +# else +# define INLINE inline +# endif +#endif + +#ifdef HAVE_GETADDRINFO +# define IPV6_SOCKET_SUPPORT 1 +#else +# undef IPV6_SOCKET_SUPPORT +#endif + +#if defined(HAVE_WINSOCK2_H) +#include +# ifdef HAVE_WS2TCPIP_H +# include +// fix for MingW not defining IPV6_V6ONLY +# define IPV6_V6ONLY 27 +# endif + +extern int initWinSock (); +extern const char* getWSErrorDescr(int err); +extern void* newAcceptParams(int sock, + int sz, + void* sockaddr); +extern int acceptNewSock(void* d); +extern int acceptDoProc(void* param); + +#else + +#ifdef HAVE_LIMITS_H +# include +#endif +#ifdef HAVE_STDLIB_H +# include +#endif +#ifdef HAVE_UNISTD_H +#include +#endif +#ifdef HAVE_SYS_TYPES_H +# include +#endif +#ifdef HAVE_FCNTL_H +# include +#endif +#ifdef HAVE_SYS_UIO_H +# include +#endif +#ifdef HAVE_SYS_SOCKET_H +# include +#endif +#ifdef HAVE_LINUX_TCP_H +# include +#elif HAVE_NETINET_TCP_H +# include +#endif +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_SYS_UN_H +# include +#endif +#ifdef HAVE_ARPA_INET_H +# include +#endif +#ifdef HAVE_NETDB_H +#include +#endif +#ifdef HAVE_LINUX_CAN_H +# include +# define CAN_SOCKET_SUPPORT 1 +#endif +#ifdef HAVE_NET_IF +# include +#endif + +#ifdef HAVE_BSD_SENDFILE +#include +#endif +#ifdef HAVE_LINUX_SENDFILE +#if !defined(__USE_FILE_OFFSET64) +#include +#endif +#endif + +extern int +sendFd(int sock, int outfd); + +extern int +recvFd(int sock); + +#endif /* HAVE_WINSOCK2_H */ + +INLINE char * +hsnet_inet_ntoa( +#if defined(HAVE_WINSOCK2_H) + u_long addr +#elif defined(HAVE_IN_ADDR_T) + in_addr_t addr +#elif defined(HAVE_INTTYPES_H) + u_int32_t addr +#else + unsigned long addr +#endif + ) +{ + struct in_addr a; + a.s_addr = addr; + return inet_ntoa(a); +} + +#ifdef HAVE_GETADDRINFO +INLINE int +hsnet_getnameinfo(const struct sockaddr* a,socklen_t b, char* c, +# if defined(HAVE_WINSOCK2_H) + DWORD d, char* e, DWORD f, int g) +# else + socklen_t d, char* e, socklen_t f, int g) +# endif +{ + return getnameinfo(a,b,c,d,e,f,g); +} + +INLINE int +hsnet_getaddrinfo(const char *hostname, const char *servname, + const struct addrinfo *hints, struct addrinfo **res) +{ + return getaddrinfo(hostname, servname, hints, res); +} + +INLINE void +hsnet_freeaddrinfo(struct addrinfo *ai) +{ + freeaddrinfo(ai); +} +#endif + +#if !defined(IOV_MAX) +# define IOV_MAX 1024 +#endif + +#if !defined(SOCK_NONBLOCK) // Missing define in Bionic libc (Android) +# define SOCK_NONBLOCK O_NONBLOCK +#endif + +#endif /* HSNET_H */ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/include/HsNetworkConfig.h.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/include/HsNetworkConfig.h.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/include/HsNetworkConfig.h.in 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/include/HsNetworkConfig.h.in 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,177 @@ +/* include/HsNetworkConfig.h.in. Generated from configure.ac by autoheader. */ + +/* Define to 1 if you have the `accept4' function. */ +#undef HAVE_ACCEPT4 + +/* Define to 1 if you have the header file. */ +#undef HAVE_ARPA_INET_H + +/* Define to 1 if you have a BSDish sendfile(2) implementation. */ +#undef HAVE_BSD_SENDFILE + +/* Define to 1 if you have the declaration of `AI_ADDRCONFIG', and to 0 if you + don't. */ +#undef HAVE_DECL_AI_ADDRCONFIG + +/* Define to 1 if you have the declaration of `AI_ALL', and to 0 if you don't. + */ +#undef HAVE_DECL_AI_ALL + +/* Define to 1 if you have the declaration of `AI_NUMERICSERV', and to 0 if + you don't. */ +#undef HAVE_DECL_AI_NUMERICSERV + +/* Define to 1 if you have the declaration of `AI_V4MAPPED', and to 0 if you + don't. */ +#undef HAVE_DECL_AI_V4MAPPED + +/* Define to 1 if you have the declaration of `IPPROTO_IP', and to 0 if you + don't. */ +#undef HAVE_DECL_IPPROTO_IP + +/* Define to 1 if you have the declaration of `IPPROTO_IPV6', and to 0 if you + don't. */ +#undef HAVE_DECL_IPPROTO_IPV6 + +/* Define to 1 if you have the declaration of `IPPROTO_TCP', and to 0 if you + don't. */ +#undef HAVE_DECL_IPPROTO_TCP + +/* Define to 1 if you have the declaration of `IPV6_V6ONLY', and to 0 if you + don't. */ +#undef HAVE_DECL_IPV6_V6ONLY + +/* Define to 1 if you have the header file. */ +#undef HAVE_FCNTL_H + +/* Define to 1 if you have the `gai_strerror' function. */ +#undef HAVE_GAI_STRERROR + +/* Define to 1 if you have the `getaddrinfo' function. */ +#undef HAVE_GETADDRINFO + +/* Define to 1 if you have the `gethostent' function. */ +#undef HAVE_GETHOSTENT + +/* Define to 1 if you have getpeereid. */ +#undef HAVE_GETPEEREID + +/* Define to 1 if you have the `if_nametoindex' function. */ +#undef HAVE_IF_NAMETOINDEX + +/* Define to 1 if you have the header file. */ +#undef HAVE_INTTYPES_H + +/* Define to 1 if in_addr_t is available. */ +#undef HAVE_IN_ADDR_T + +/* Define to 1 if you have the `ws2_32' library (-lws2_32). */ +#undef HAVE_LIBWS2_32 + +/* Define to 1 if you have the header file. */ +#undef HAVE_LIMITS_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_LINUX_CAN_H + +/* Define to 1 if you have a Linux sendfile(2) implementation. */ +#undef HAVE_LINUX_SENDFILE + +/* Define to 1 if you have the header file. */ +#undef HAVE_LINUX_TCP_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_MEMORY_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NETDB_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NETINET_IN_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NETINET_TCP_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NET_IF_H + +/* Define to 1 if you have the `readlink' function. */ +#undef HAVE_READLINK + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDINT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDLIB_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRINGS_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRING_H + +/* Define to 1 if `msg_accrights' is a member of `struct msghdr'. */ +#undef HAVE_STRUCT_MSGHDR_MSG_ACCRIGHTS + +/* Define to 1 if `msg_control' is a member of `struct msghdr'. */ +#undef HAVE_STRUCT_MSGHDR_MSG_CONTROL + +/* Define to 1 if `sa_len' is a member of `struct sockaddr'. */ +#undef HAVE_STRUCT_SOCKADDR_SA_LEN + +/* Define to 1 if you have both SO_PEERCRED and struct ucred. */ +#undef HAVE_STRUCT_UCRED + +/* Define to 1 if you have the `symlink' function. */ +#undef HAVE_SYMLINK + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_SOCKET_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_STAT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_TYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_UIO_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_UN_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_UNISTD_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_WINSOCK2_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_WS2TCPIP_H + +/* Define to 1 if the `getaddrinfo' function needs WINVER set. */ +#undef NEED_WINVER_XP + +/* Define to the address where bug reports for this package should be sent. */ +#undef PACKAGE_BUGREPORT + +/* Define to the full name of this package. */ +#undef PACKAGE_NAME + +/* Define to the full name and version of this package. */ +#undef PACKAGE_STRING + +/* Define to the one symbol short name of this package. */ +#undef PACKAGE_TARNAME + +/* Define to the home page for this package. */ +#undef PACKAGE_URL + +/* Define to the version of this package. */ +#undef PACKAGE_VERSION + +/* Define to 1 if you have the ANSI C header files. */ +#undef STDC_HEADERS + +/* Define to empty if `const' does not conform to ANSI C. */ +#undef const diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/install-sh cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/install-sh --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/install-sh 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/install-sh 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,295 @@ +#!/bin/sh +# install - install a program, script, or datafile + +scriptversion=2003-09-24.23 + +# This originates from X11R5 (mit/util/scripts/install.sh), which was +# later released in X11R6 (xc/config/util/install.sh) with the +# following copyright and license. +# +# Copyright (C) 1994 X Consortium +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- +# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# +# Except as contained in this notice, the name of the X Consortium shall not +# be used in advertising or otherwise to promote the sale, use or other deal- +# ings in this Software without prior written authorization from the X Consor- +# tium. +# +# +# FSF changes to this file are in the public domain. +# +# Calling this script install-sh is preferred over install.sh, to prevent +# `make' implicit rules from creating a file called install from it +# when there is no Makefile. +# +# This script is compatible with the BSD install script, but was written +# from scratch. It can only install one file at a time, a restriction +# shared with many OS's install programs. + +# set DOITPROG to echo to test this script + +# Don't use :- since 4.3BSD and earlier shells don't like it. +doit="${DOITPROG-}" + +# put in absolute paths if you don't have them in your path; or use env. vars. + +mvprog="${MVPROG-mv}" +cpprog="${CPPROG-cp}" +chmodprog="${CHMODPROG-chmod}" +chownprog="${CHOWNPROG-chown}" +chgrpprog="${CHGRPPROG-chgrp}" +stripprog="${STRIPPROG-strip}" +rmprog="${RMPROG-rm}" +mkdirprog="${MKDIRPROG-mkdir}" + +transformbasename= +transform_arg= +instcmd="$mvprog" +chmodcmd="$chmodprog 0755" +chowncmd= +chgrpcmd= +stripcmd= +rmcmd="$rmprog -f" +mvcmd="$mvprog" +src= +dst= +dir_arg= + +usage="Usage: $0 [OPTION]... SRCFILE DSTFILE + or: $0 -d DIR1 DIR2... + +In the first form, install SRCFILE to DSTFILE, removing SRCFILE by default. +In the second, create the directory path DIR. + +Options: +-b=TRANSFORMBASENAME +-c copy source (using $cpprog) instead of moving (using $mvprog). +-d create directories instead of installing files. +-g GROUP $chgrp installed files to GROUP. +-m MODE $chmod installed files to MODE. +-o USER $chown installed files to USER. +-s strip installed files (using $stripprog). +-t=TRANSFORM +--help display this help and exit. +--version display version info and exit. + +Environment variables override the default commands: + CHGRPPROG CHMODPROG CHOWNPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG +" + +while test -n "$1"; do + case $1 in + -b=*) transformbasename=`echo $1 | sed 's/-b=//'` + shift + continue;; + + -c) instcmd=$cpprog + shift + continue;; + + -d) dir_arg=true + shift + continue;; + + -g) chgrpcmd="$chgrpprog $2" + shift + shift + continue;; + + --help) echo "$usage"; exit 0;; + + -m) chmodcmd="$chmodprog $2" + shift + shift + continue;; + + -o) chowncmd="$chownprog $2" + shift + shift + continue;; + + -s) stripcmd=$stripprog + shift + continue;; + + -t=*) transformarg=`echo $1 | sed 's/-t=//'` + shift + continue;; + + --version) echo "$0 $scriptversion"; exit 0;; + + *) if test -z "$src"; then + src=$1 + else + # this colon is to work around a 386BSD /bin/sh bug + : + dst=$1 + fi + shift + continue;; + esac +done + +if test -z "$src"; then + echo "$0: no input file specified." >&2 + exit 1 +fi + +# Protect names starting with `-'. +case $src in + -*) src=./$src ;; +esac + +if test -n "$dir_arg"; then + dst=$src + src= + + if test -d "$dst"; then + instcmd=: + chmodcmd= + else + instcmd=$mkdirprog + fi +else + # Waiting for this to be detected by the "$instcmd $src $dsttmp" command + # might cause directories to be created, which would be especially bad + # if $src (and thus $dsttmp) contains '*'. + if test ! -f "$src" && test ! -d "$src"; then + echo "$0: $src does not exist." >&2 + exit 1 + fi + + if test -z "$dst"; then + echo "$0: no destination specified." >&2 + exit 1 + fi + + # Protect names starting with `-'. + case $dst in + -*) dst=./$dst ;; + esac + + # If destination is a directory, append the input filename; won't work + # if double slashes aren't ignored. + if test -d "$dst"; then + dst=$dst/`basename "$src"` + fi +fi + +# This sed command emulates the dirname command. +dstdir=`echo "$dst" | sed -e 's,[^/]*$,,;s,/$,,;s,^$,.,'` + +# Make sure that the destination directory exists. + +# Skip lots of stat calls in the usual case. +if test ! -d "$dstdir"; then + defaultIFS=' + ' + IFS="${IFS-$defaultIFS}" + + oIFS=$IFS + # Some sh's can't handle IFS=/ for some reason. + IFS='%' + set - `echo "$dstdir" | sed -e 's@/@%@g' -e 's@^%@/@'` + IFS=$oIFS + + pathcomp= + + while test $# -ne 0 ; do + pathcomp=$pathcomp$1 + shift + test -d "$pathcomp" || $mkdirprog "$pathcomp" + pathcomp=$pathcomp/ + done +fi + +if test -n "$dir_arg"; then + $doit $instcmd "$dst" \ + && { test -z "$chowncmd" || $doit $chowncmd "$dst"; } \ + && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } \ + && { test -z "$stripcmd" || $doit $stripcmd "$dst"; } \ + && { test -z "$chmodcmd" || $doit $chmodcmd "$dst"; } + +else + # If we're going to rename the final executable, determine the name now. + if test -z "$transformarg"; then + dstfile=`basename "$dst"` + else + dstfile=`basename "$dst" $transformbasename \ + | sed $transformarg`$transformbasename + fi + + # don't allow the sed command to completely eliminate the filename. + test -z "$dstfile" && dstfile=`basename "$dst"` + + # Make a couple of temp file names in the proper directory. + dsttmp=$dstdir/_inst.$$_ + rmtmp=$dstdir/_rm.$$_ + + # Trap to clean up those temp files at exit. + trap 'status=$?; rm -f "$dsttmp" "$rmtmp" && exit $status' 0 + trap '(exit $?); exit' 1 2 13 15 + + # Move or copy the file name to the temp name + $doit $instcmd "$src" "$dsttmp" && + + # and set any options; do chmod last to preserve setuid bits. + # + # If any of these fail, we abort the whole thing. If we want to + # ignore errors from any of these, just make sure not to ignore + # errors from the above "$doit $instcmd $src $dsttmp" command. + # + { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } \ + && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } \ + && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } \ + && { test -z "$chmodcmd" || $doit $chmodcmd "$dsttmp"; } && + + # Now remove or move aside any old file at destination location. We + # try this two ways since rm can't unlink itself on some systems and + # the destination file might be busy for other reasons. In this case, + # the final cleanup might fail but the new file should still install + # successfully. + { + if test -f "$dstdir/$dstfile"; then + $doit $rmcmd -f "$dstdir/$dstfile" 2>/dev/null \ + || $doit $mvcmd -f "$dstdir/$dstfile" "$rmtmp" 2>/dev/null \ + || { + echo "$0: cannot unlink or rename $dstdir/$dstfile" >&2 + (exit 1); exit + } + else + : + fi + } && + + # Now rename the file to the real destination. + $doit $mvcmd "$dsttmp" "$dstdir/$dstfile" +fi && + +# The final little trick to "correctly" pass the exit status to the exit trap. +{ + (exit 0); exit +} + +# Local variables: +# eval: (add-hook 'write-file-hooks 'time-stamp) +# time-stamp-start: "scriptversion=" +# time-stamp-format: "%:y-%02m-%02d.%02H" +# time-stamp-end: "$" +# End: diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/LICENSE cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/LICENSE --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/LICENSE 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,29 @@ +Copyright (c) 2002-2010, The University Court of the University of Glasgow. +Copyright (c) 2007-2010, Johan Tibell + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +- Redistributions of source code must retain the above copyright notice, +this list of conditions and the following disclaimer. + +- Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +- Neither name of the University nor the names of its contributors may be +used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY COURT OF THE UNIVERSITY OF +GLASGOW AND THE CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, +INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +UNIVERSITY COURT OF THE UNIVERSITY OF GLASGOW OR THE CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/BSD.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/BSD.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/BSD.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/BSD.hsc 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,574 @@ +{-# LANGUAGE CPP, ForeignFunctionInterface #-} +{-# OPTIONS_HADDOCK hide #-} +{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} +----------------------------------------------------------------------------- +-- | +-- Module : Network.BSD +-- Copyright : (c) The University of Glasgow 2001 +-- License : BSD-style (see the file libraries/network/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Stability : experimental +-- Portability : non-portable +-- +-- The "Network.BSD" module defines Haskell bindings to network +-- programming functionality provided by BSD Unix derivatives. +-- +----------------------------------------------------------------------------- + +#include "HsNet.h" +##include "HsNetDef.h" + +module Network.BSD {-# DEPRECATED "This platform dependent module is no longer supported." #-} + ( + -- * Host names + HostName + , getHostName + + , HostEntry(..) + , getHostByName + , getHostByAddr + , hostAddress + +#if defined(HAVE_GETHOSTENT) && !defined(mingw32_HOST_OS) + , getHostEntries + + -- ** Low level functionality + , setHostEntry + , getHostEntry + , endHostEntry +#endif + + -- * Service names + , ServiceEntry(..) + , ServiceName + , getServiceByName + , getServiceByPort + , getServicePortNumber + +#if !defined(mingw32_HOST_OS) + , getServiceEntries + + -- ** Low level functionality + , getServiceEntry + , setServiceEntry + , endServiceEntry +#endif + + -- * Protocol names + , ProtocolName + , ProtocolNumber + , ProtocolEntry(..) + , getProtocolByName + , getProtocolByNumber + , getProtocolNumber + , defaultProtocol + +#if !defined(mingw32_HOST_OS) + , getProtocolEntries + -- ** Low level functionality + , setProtocolEntry + , getProtocolEntry + , endProtocolEntry +#endif + + -- * Port numbers + , PortNumber + + -- * Network names + , NetworkName + , NetworkAddr + , NetworkEntry(..) + +#if !defined(mingw32_HOST_OS) + , getNetworkByName + , getNetworkByAddr + , getNetworkEntries + -- ** Low level functionality + , setNetworkEntry + , getNetworkEntry + , endNetworkEntry +#endif + +#if defined(HAVE_IF_NAMETOINDEX) + -- * Interface names + , ifNameToIndex +#endif + + ) where + +import Network.Socket + +import Control.Concurrent (MVar, newMVar, withMVar) +import qualified Control.Exception as E +import Foreign.C.String (CString, peekCString, withCString) +#if defined(HAVE_WINSOCK2_H) +import Foreign.C.Types ( CShort ) +#endif +import Foreign.C.Types ( CInt(..), CULong(..), CSize(..) ) +import Foreign.Ptr (Ptr, nullPtr) +import Foreign.Storable (Storable(..)) +import Foreign.Marshal.Array (allocaArray0, peekArray0) +import Foreign.Marshal.Utils (with, fromBool) +import Data.Typeable +import System.IO.Error (ioeSetErrorString, mkIOError) +import System.IO.Unsafe (unsafePerformIO) + +import GHC.IO.Exception + +import Control.Monad (liftM) + +import Network.Socket.Internal (throwSocketErrorIfMinus1_) + +-- --------------------------------------------------------------------------- +-- Basic Types + +type ProtocolName = String + +-- --------------------------------------------------------------------------- +-- Service Database Access + +-- Calling getServiceByName for a given service and protocol returns +-- the systems service entry. This should be used to find the port +-- numbers for standard protocols such as SMTP and FTP. The remaining +-- three functions should be used for browsing the service database +-- sequentially. + +-- Calling setServiceEntry with True indicates that the service +-- database should be left open between calls to getServiceEntry. To +-- close the database a call to endServiceEntry is required. This +-- database file is usually stored in the file /etc/services. + +data ServiceEntry = + ServiceEntry { + serviceName :: ServiceName, -- Official Name + serviceAliases :: [ServiceName], -- aliases + servicePort :: PortNumber, -- Port Number ( network byte order ) + serviceProtocol :: ProtocolName -- Protocol + } deriving (Show, Typeable) + +instance Storable ServiceEntry where + sizeOf _ = #const sizeof(struct servent) + alignment _ = alignment (undefined :: CInt) -- ??? + + peek p = do + s_name <- (#peek struct servent, s_name) p >>= peekCString + s_aliases <- (#peek struct servent, s_aliases) p + >>= peekArray0 nullPtr + >>= mapM peekCString + s_port <- (#peek struct servent, s_port) p + s_proto <- (#peek struct servent, s_proto) p >>= peekCString + return (ServiceEntry { + serviceName = s_name, + serviceAliases = s_aliases, +#if defined(HAVE_WINSOCK2_H) + servicePort = (fromIntegral (s_port :: CShort)), +#else + -- s_port is already in network byte order, but it + -- might be the wrong size. + servicePort = (fromIntegral (s_port :: CInt)), +#endif + serviceProtocol = s_proto + }) + + poke = throwUnsupportedOperationPoke "ServiceEntry" + + +-- | Get service by name. +getServiceByName :: ServiceName -- Service Name + -> ProtocolName -- Protocol Name + -> IO ServiceEntry -- Service Entry +getServiceByName name proto = withLock $ do + withCString name $ \ cstr_name -> do + withCString proto $ \ cstr_proto -> do + throwNoSuchThingIfNull "Network.BSD.getServiceByName" "no such service entry" + $ c_getservbyname cstr_name cstr_proto + >>= peek + +foreign import CALLCONV unsafe "getservbyname" + c_getservbyname :: CString -> CString -> IO (Ptr ServiceEntry) + +-- | Get the service given a 'PortNumber' and 'ProtocolName'. +getServiceByPort :: PortNumber -> ProtocolName -> IO ServiceEntry +getServiceByPort port proto = withLock $ do + withCString proto $ \ cstr_proto -> do + throwNoSuchThingIfNull "Network.BSD.getServiceByPort" "no such service entry" + $ c_getservbyport (fromIntegral port) cstr_proto + >>= peek + +foreign import CALLCONV unsafe "getservbyport" + c_getservbyport :: CInt -> CString -> IO (Ptr ServiceEntry) + +-- | Get the 'PortNumber' corresponding to the 'ServiceName'. +getServicePortNumber :: ServiceName -> IO PortNumber +getServicePortNumber name = do + (ServiceEntry _ _ port _) <- getServiceByName name "tcp" + return port + +#if !defined(mingw32_HOST_OS) +getServiceEntry :: IO ServiceEntry +getServiceEntry = withLock $ do + throwNoSuchThingIfNull "Network.BSD.getServiceEntry" "no such service entry" + $ c_getservent + >>= peek + +foreign import ccall unsafe "getservent" c_getservent :: IO (Ptr ServiceEntry) + +setServiceEntry :: Bool -> IO () +setServiceEntry flg = withLock $ c_setservent (fromBool flg) + +foreign import ccall unsafe "setservent" c_setservent :: CInt -> IO () + +endServiceEntry :: IO () +endServiceEntry = withLock $ c_endservent + +foreign import ccall unsafe "endservent" c_endservent :: IO () + +getServiceEntries :: Bool -> IO [ServiceEntry] +getServiceEntries stayOpen = do + setServiceEntry stayOpen + getEntries (getServiceEntry) (endServiceEntry) +#endif + +-- --------------------------------------------------------------------------- +-- Protocol Entries + +-- The following relate directly to the corresponding UNIX C +-- calls for returning the protocol entries. The protocol entry is +-- represented by the Haskell type ProtocolEntry. + +-- As for setServiceEntry above, calling setProtocolEntry. +-- determines whether or not the protocol database file, usually +-- @/etc/protocols@, is to be kept open between calls of +-- getProtocolEntry. Similarly, + +data ProtocolEntry = + ProtocolEntry { + protoName :: ProtocolName, -- Official Name + protoAliases :: [ProtocolName], -- aliases + protoNumber :: ProtocolNumber -- Protocol Number + } deriving (Read, Show, Typeable) + +instance Storable ProtocolEntry where + sizeOf _ = #const sizeof(struct protoent) + alignment _ = alignment (undefined :: CInt) -- ??? + + peek p = do + p_name <- (#peek struct protoent, p_name) p >>= peekCString + p_aliases <- (#peek struct protoent, p_aliases) p + >>= peekArray0 nullPtr + >>= mapM peekCString +#if defined(HAVE_WINSOCK2_H) + -- With WinSock, the protocol number is only a short; + -- hoist it in as such, but represent it on the Haskell side + -- as a CInt. + p_proto_short <- (#peek struct protoent, p_proto) p + let p_proto = fromIntegral (p_proto_short :: CShort) +#else + p_proto <- (#peek struct protoent, p_proto) p +#endif + return (ProtocolEntry { + protoName = p_name, + protoAliases = p_aliases, + protoNumber = p_proto + }) + + poke = throwUnsupportedOperationPoke "ProtocolEntry" + + +getProtocolByName :: ProtocolName -> IO ProtocolEntry +getProtocolByName name = withLock $ do + withCString name $ \ name_cstr -> do + throwNoSuchThingIfNull "Network.BSD.getProtocolByName" ("no such protocol name: " ++ name) + $ c_getprotobyname name_cstr + >>= peek + +foreign import CALLCONV unsafe "getprotobyname" + c_getprotobyname :: CString -> IO (Ptr ProtocolEntry) + + +getProtocolByNumber :: ProtocolNumber -> IO ProtocolEntry +getProtocolByNumber num = withLock $ do + throwNoSuchThingIfNull "Network.BSD.getProtocolByNumber" ("no such protocol number: " ++ show num) + $ c_getprotobynumber (fromIntegral num) + >>= peek + +foreign import CALLCONV unsafe "getprotobynumber" + c_getprotobynumber :: CInt -> IO (Ptr ProtocolEntry) + + +getProtocolNumber :: ProtocolName -> IO ProtocolNumber +getProtocolNumber proto = do + (ProtocolEntry _ _ num) <- getProtocolByName proto + return num + +#if !defined(mingw32_HOST_OS) +getProtocolEntry :: IO ProtocolEntry -- Next Protocol Entry from DB +getProtocolEntry = withLock $ do + ent <- throwNoSuchThingIfNull "Network.BSD.getProtocolEntry" "no such protocol entry" + $ c_getprotoent + peek ent + +foreign import ccall unsafe "getprotoent" c_getprotoent :: IO (Ptr ProtocolEntry) + +setProtocolEntry :: Bool -> IO () -- Keep DB Open ? +setProtocolEntry flg = withLock $ c_setprotoent (fromBool flg) + +foreign import ccall unsafe "setprotoent" c_setprotoent :: CInt -> IO () + +endProtocolEntry :: IO () +endProtocolEntry = withLock $ c_endprotoent + +foreign import ccall unsafe "endprotoent" c_endprotoent :: IO () + +getProtocolEntries :: Bool -> IO [ProtocolEntry] +getProtocolEntries stayOpen = withLock $ do + setProtocolEntry stayOpen + getEntries (getProtocolEntry) (endProtocolEntry) +#endif + +-- --------------------------------------------------------------------------- +-- Host lookups + +data HostEntry = + HostEntry { + hostName :: HostName, -- Official Name + hostAliases :: [HostName], -- aliases + hostFamily :: Family, -- Host Type (currently AF_INET) + hostAddresses :: [HostAddress] -- Set of Network Addresses (in network byte order) + } deriving (Read, Show, Typeable) + +instance Storable HostEntry where + sizeOf _ = #const sizeof(struct hostent) + alignment _ = alignment (undefined :: CInt) -- ??? + + peek p = do + h_name <- (#peek struct hostent, h_name) p >>= peekCString + h_aliases <- (#peek struct hostent, h_aliases) p + >>= peekArray0 nullPtr + >>= mapM peekCString + h_addrtype <- (#peek struct hostent, h_addrtype) p + -- h_length <- (#peek struct hostent, h_length) p + h_addr_list <- (#peek struct hostent, h_addr_list) p + >>= peekArray0 nullPtr + >>= mapM peek + return (HostEntry { + hostName = h_name, + hostAliases = h_aliases, +#if defined(HAVE_WINSOCK2_H) + hostFamily = unpackFamily (fromIntegral (h_addrtype :: CShort)), +#else + hostFamily = unpackFamily h_addrtype, +#endif + hostAddresses = h_addr_list + }) + + poke = throwUnsupportedOperationPoke "HostEntry" + + +-- convenience function: +hostAddress :: HostEntry -> HostAddress +hostAddress (HostEntry nm _ _ ls) = + case ls of + [] -> error $ "Network.BSD.hostAddress: empty network address list for " ++ nm + (x:_) -> x + +-- getHostByName must use the same lock as the *hostent functions +-- may cause problems if called concurrently. + +-- | Resolve a 'HostName' to IPv4 address. +getHostByName :: HostName -> IO HostEntry +getHostByName name = withLock $ do + withCString name $ \ name_cstr -> do + ent <- throwNoSuchThingIfNull "Network.BSD.getHostByName" "no such host entry" + $ c_gethostbyname name_cstr + peek ent + +foreign import CALLCONV safe "gethostbyname" + c_gethostbyname :: CString -> IO (Ptr HostEntry) + + +-- The locking of gethostbyaddr is similar to gethostbyname. +-- | Get a 'HostEntry' corresponding to the given address and family. +-- Note that only IPv4 is currently supported. +getHostByAddr :: Family -> HostAddress -> IO HostEntry +getHostByAddr family addr = do + with addr $ \ ptr_addr -> withLock $ do + throwNoSuchThingIfNull "Network.BSD.getHostByAddr" "no such host entry" + $ c_gethostbyaddr ptr_addr (fromIntegral (sizeOf addr)) (packFamily family) + >>= peek + +foreign import CALLCONV safe "gethostbyaddr" + c_gethostbyaddr :: Ptr HostAddress -> CInt -> CInt -> IO (Ptr HostEntry) + +#if defined(HAVE_GETHOSTENT) && !defined(mingw32_HOST_OS) +getHostEntry :: IO HostEntry +getHostEntry = withLock $ do + throwNoSuchThingIfNull "Network.BSD.getHostEntry" "unable to retrieve host entry" + $ c_gethostent + >>= peek + +foreign import ccall unsafe "gethostent" c_gethostent :: IO (Ptr HostEntry) + +setHostEntry :: Bool -> IO () +setHostEntry flg = withLock $ c_sethostent (fromBool flg) + +foreign import ccall unsafe "sethostent" c_sethostent :: CInt -> IO () + +endHostEntry :: IO () +endHostEntry = withLock $ c_endhostent + +foreign import ccall unsafe "endhostent" c_endhostent :: IO () + +getHostEntries :: Bool -> IO [HostEntry] +getHostEntries stayOpen = do + setHostEntry stayOpen + getEntries (getHostEntry) (endHostEntry) +#endif + +-- --------------------------------------------------------------------------- +-- Accessing network information + +-- Same set of access functions as for accessing host,protocol and +-- service system info, this time for the types of networks supported. + +-- network addresses are represented in host byte order. +type NetworkAddr = CULong + +type NetworkName = String + +data NetworkEntry = + NetworkEntry { + networkName :: NetworkName, -- official name + networkAliases :: [NetworkName], -- aliases + networkFamily :: Family, -- type + networkAddress :: NetworkAddr + } deriving (Read, Show, Typeable) + +instance Storable NetworkEntry where + sizeOf _ = #const sizeof(struct hostent) + alignment _ = alignment (undefined :: CInt) -- ??? + + peek p = do + n_name <- (#peek struct netent, n_name) p >>= peekCString + n_aliases <- (#peek struct netent, n_aliases) p + >>= peekArray0 nullPtr + >>= mapM peekCString + n_addrtype <- (#peek struct netent, n_addrtype) p + n_net <- (#peek struct netent, n_net) p + return (NetworkEntry { + networkName = n_name, + networkAliases = n_aliases, + networkFamily = unpackFamily (fromIntegral + (n_addrtype :: CInt)), + networkAddress = n_net + }) + + poke = throwUnsupportedOperationPoke "NetworkEntry" + + +#if !defined(mingw32_HOST_OS) +getNetworkByName :: NetworkName -> IO NetworkEntry +getNetworkByName name = withLock $ do + withCString name $ \ name_cstr -> do + throwNoSuchThingIfNull "Network.BSD.getNetworkByName" "no such network entry" + $ c_getnetbyname name_cstr + >>= peek + +foreign import ccall unsafe "getnetbyname" + c_getnetbyname :: CString -> IO (Ptr NetworkEntry) + +getNetworkByAddr :: NetworkAddr -> Family -> IO NetworkEntry +getNetworkByAddr addr family = withLock $ do + throwNoSuchThingIfNull "Network.BSD.getNetworkByAddr" "no such network entry" + $ c_getnetbyaddr addr (packFamily family) + >>= peek + +foreign import ccall unsafe "getnetbyaddr" + c_getnetbyaddr :: NetworkAddr -> CInt -> IO (Ptr NetworkEntry) + +getNetworkEntry :: IO NetworkEntry +getNetworkEntry = withLock $ do + throwNoSuchThingIfNull "Network.BSD.getNetworkEntry" "no more network entries" + $ c_getnetent + >>= peek + +foreign import ccall unsafe "getnetent" c_getnetent :: IO (Ptr NetworkEntry) + +-- | Open the network name database. The parameter specifies +-- whether a connection is maintained open between various +-- networkEntry calls +setNetworkEntry :: Bool -> IO () +setNetworkEntry flg = withLock $ c_setnetent (fromBool flg) + +foreign import ccall unsafe "setnetent" c_setnetent :: CInt -> IO () + +-- | Close the connection to the network name database. +endNetworkEntry :: IO () +endNetworkEntry = withLock $ c_endnetent + +foreign import ccall unsafe "endnetent" c_endnetent :: IO () + +-- | Get the list of network entries. +getNetworkEntries :: Bool -> IO [NetworkEntry] +getNetworkEntries stayOpen = do + setNetworkEntry stayOpen + getEntries (getNetworkEntry) (endNetworkEntry) +#endif + +-- Mutex for name service lockdown + +{-# NOINLINE lock #-} +lock :: MVar () +lock = unsafePerformIO $ withSocketsDo $ newMVar () + +withLock :: IO a -> IO a +withLock act = withMVar lock (\_ -> act) + +-- --------------------------------------------------------------------------- +-- Miscellaneous Functions + +-- | Calling getHostName returns the standard host name for the current +-- processor, as set at boot time. + +getHostName :: IO HostName +getHostName = do + let size = 256 + allocaArray0 size $ \ cstr -> do + throwSocketErrorIfMinus1_ "Network.BSD.getHostName" $ c_gethostname cstr (fromIntegral size) + peekCString cstr + +foreign import CALLCONV unsafe "gethostname" + c_gethostname :: CString -> CSize -> IO CInt + +-- Helper function used by the exported functions that provides a +-- Haskellised view of the enumerator functions: + +getEntries :: IO a -- read + -> IO () -- at end + -> IO [a] +getEntries getOne atEnd = loop + where + loop = do + vv <- E.catch (liftM Just getOne) + (\ e -> let _types = e :: IOException in return Nothing) + case vv of + Nothing -> return [] + Just v -> loop >>= \ vs -> atEnd >> return (v:vs) + + +throwNoSuchThingIfNull :: String -> String -> IO (Ptr a) -> IO (Ptr a) +throwNoSuchThingIfNull loc desc act = do + ptr <- act + if (ptr == nullPtr) + then ioError (ioeSetErrorString (mkIOError NoSuchThing loc Nothing Nothing) desc) + else return ptr + +throwUnsupportedOperationPoke :: String -> Ptr a -> a -> IO () +throwUnsupportedOperationPoke typ _ _ = + ioError $ ioeSetErrorString ioe "Operation not implemented" + where + ioe = mkIOError UnsupportedOperation + ("Network.BSD: instance Storable " ++ typ ++ ": poke") + Nothing + Nothing diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/Internal.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/Internal.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/Internal.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/Internal.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,55 @@ +{-# LANGUAGE CPP, ForeignFunctionInterface #-} + +-- | +-- Module : Network.Socket.ByteString.Internal +-- Copyright : (c) Johan Tibell 2007-2010 +-- License : BSD-style +-- +-- Maintainer : johan.tibell@gmail.com +-- Stability : stable +-- Portability : portable +-- +module Network.Socket.ByteString.Internal + ( + mkInvalidRecvArgError +#if !defined(mingw32_HOST_OS) + , c_writev + , c_sendmsg +#endif + , waitWhen0 + ) where + +import System.IO.Error (ioeSetErrorString, mkIOError) + +#if !defined(mingw32_HOST_OS) +import Foreign.C.Types (CInt(..)) +import System.Posix.Types (CSsize(..)) +import Foreign.Ptr (Ptr) + +import Network.Socket.ByteString.IOVec (IOVec) +import Network.Socket.ByteString.MsgHdr (MsgHdr) +#endif + +import Control.Concurrent (threadWaitWrite, rtsSupportsBoundThreads) +import Control.Monad (when) +import GHC.IO.Exception (IOErrorType(..)) +import Network.Socket.Types + +mkInvalidRecvArgError :: String -> IOError +mkInvalidRecvArgError loc = ioeSetErrorString (mkIOError + InvalidArgument + loc Nothing Nothing) "non-positive length" + +#if !defined(mingw32_HOST_OS) +foreign import ccall unsafe "writev" + c_writev :: CInt -> Ptr IOVec -> CInt -> IO CSsize + +foreign import ccall unsafe "sendmsg" + c_sendmsg :: CInt -> Ptr MsgHdr -> CInt -> IO CSsize +#endif + +waitWhen0 :: Int -> Socket -> IO () +waitWhen0 0 s = when rtsSupportsBoundThreads $ do + let fd = fromIntegral $ fdSocket s + threadWaitWrite fd +waitWhen0 _ _ = return () diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/IOVec.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/IOVec.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/IOVec.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/IOVec.hsc 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,31 @@ +{-# OPTIONS_GHC -funbox-strict-fields #-} + +-- | Support module for the POSIX writev system call. +module Network.Socket.ByteString.IOVec + ( IOVec(..) + ) where + +import Foreign.C.Types (CChar, CInt, CSize) +import Foreign.Ptr (Ptr) +import Foreign.Storable (Storable(..)) + +#include +#include + +data IOVec = IOVec + { iovBase :: !(Ptr CChar) + , iovLen :: !CSize + } + +instance Storable IOVec where + sizeOf _ = (#const sizeof(struct iovec)) + alignment _ = alignment (undefined :: CInt) + + peek p = do + base <- (#peek struct iovec, iov_base) p + len <- (#peek struct iovec, iov_len) p + return $ IOVec base len + + poke p iov = do + (#poke struct iovec, iov_base) p (iovBase iov) + (#poke struct iovec, iov_len) p (iovLen iov) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/Lazy/Posix.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/Lazy/Posix.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/Lazy/Posix.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/Lazy/Posix.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,59 @@ +{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE OverloadedStrings #-} + +module Network.Socket.ByteString.Lazy.Posix + ( + -- * Send data to a socket + send + , sendAll + ) where + +import Control.Monad (liftM, when) +import qualified Data.ByteString.Lazy as L +import Data.ByteString.Lazy.Internal (ByteString(..)) +import Data.ByteString.Unsafe (unsafeUseAsCStringLen) +import Data.Int (Int64) +import Foreign.Marshal.Array (allocaArray) +import Foreign.Ptr (plusPtr) +import Foreign.Storable (Storable(..)) + +import Network.Socket (Socket(..)) +import Network.Socket.ByteString.IOVec (IOVec(IOVec)) +import Network.Socket.ByteString.Internal (c_writev, waitWhen0) +import Network.Socket.Internal + +-- ----------------------------------------------------------------------------- +-- Sending + +send :: Socket -- ^ Connected socket + -> ByteString -- ^ Data to send + -> IO Int64 -- ^ Number of bytes sent +send sock@(MkSocket fd _ _ _ _) s = do + let cs = take maxNumChunks (L.toChunks s) + len = length cs + liftM fromIntegral . allocaArray len $ \ptr -> + withPokes cs ptr $ \niovs -> + throwSocketErrorWaitWrite sock "writev" $ + c_writev (fromIntegral fd) ptr niovs + where + withPokes ss p f = loop ss p 0 0 + where loop (c:cs) q k !niovs + | k < maxNumBytes = + unsafeUseAsCStringLen c $ \(ptr,len) -> do + poke q $ IOVec ptr (fromIntegral len) + loop cs (q `plusPtr` sizeOf (undefined :: IOVec)) + (k + fromIntegral len) (niovs + 1) + | otherwise = f niovs + loop _ _ _ niovs = f niovs + maxNumBytes = 4194304 :: Int -- maximum number of bytes to transmit in one system call + maxNumChunks = 1024 :: Int -- maximum number of chunks to transmit in one system call + +sendAll :: Socket -- ^ Connected socket + -> ByteString -- ^ Data to send + -> IO () +sendAll _ "" = return () +sendAll sock bs = do + sent <- send sock bs + waitWhen0 (fromIntegral sent) sock + when (sent >= 0) $ sendAll sock $ L.drop sent bs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/Lazy/Windows.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/Lazy/Windows.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/Lazy/Windows.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/Lazy/Windows.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,40 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE OverloadedStrings #-} + +module Network.Socket.ByteString.Lazy.Windows + ( + -- * Send data to a socket + send + , sendAll + ) where + +import Control.Applicative ((<$>)) +import Control.Monad (when) +import qualified Data.ByteString as S +import qualified Data.ByteString.Lazy as L +import Data.Int (Int64) + +import Network.Socket (Socket(..)) +import qualified Network.Socket.ByteString as Socket +import Network.Socket.ByteString.Internal (waitWhen0) + +-- ----------------------------------------------------------------------------- +-- Sending + +send :: Socket -- ^ Connected socket + -> L.ByteString -- ^ Data to send + -> IO Int64 -- ^ Number of bytes sent +send sock s = do + fromIntegral <$> case L.toChunks s of + -- TODO: Consider doing nothing if the string is empty. + [] -> Socket.send sock S.empty + (x:_) -> Socket.send sock x + +sendAll :: Socket -- ^ Connected socket + -> L.ByteString -- ^ Data to send + -> IO () +sendAll _ "" = return () +sendAll sock bs = do + sent <- send sock bs + waitWhen0 (fromIntegral sent) sock + when (sent >= 0) $ sendAll sock $ L.drop sent bs diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/Lazy.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/Lazy.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/Lazy.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/Lazy.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,88 @@ +{-# LANGUAGE CPP #-} + +-- | +-- Module : Network.Socket.ByteString.Lazy +-- Copyright : (c) Bryan O'Sullivan 2009 +-- License : BSD-style +-- +-- Maintainer : bos@serpentine.com +-- Stability : experimental +-- Portability : POSIX, GHC +-- +-- This module provides access to the BSD /socket/ interface. This +-- module is generally more efficient than the 'String' based network +-- functions in 'Network.Socket'. For detailed documentation, consult +-- your favorite POSIX socket reference. All functions communicate +-- failures by converting the error number to 'System.IO.IOError'. +-- +-- This module is made to be imported with 'Network.Socket' like so: +-- +-- > import Network.Socket hiding (send, sendTo, recv, recvFrom) +-- > import Network.Socket.ByteString.Lazy +-- > import Prelude hiding (getContents) +-- +module Network.Socket.ByteString.Lazy + ( + -- * Send data to a socket + send + , sendAll + , + + -- * Receive data from a socket + getContents + , recv + ) where + +import Control.Monad (liftM) +import Data.ByteString.Lazy.Internal (ByteString(..), defaultChunkSize) +import Data.Int (Int64) +import Network.Socket (Socket(..), ShutdownCmd(..), shutdown) +import Prelude hiding (getContents) +import System.IO.Unsafe (unsafeInterleaveIO) + +import qualified Data.ByteString as S +import qualified Network.Socket.ByteString as N + +#if defined(mingw32_HOST_OS) +import Network.Socket.ByteString.Lazy.Windows (send, sendAll) +#else +import Network.Socket.ByteString.Lazy.Posix (send, sendAll) +#endif + +-- ----------------------------------------------------------------------------- +-- Receiving + +-- | Receive data from the socket. The socket must be in a connected +-- state. Data is received on demand, in chunks; each chunk will be +-- sized to reflect the amount of data received by individual 'recv' +-- calls. +-- +-- All remaining data from the socket is consumed. When there is no +-- more data to be received, the receiving side of the socket is shut +-- down. If there is an error and an exception is thrown, the socket +-- is not shut down. +getContents :: Socket -- ^ Connected socket + -> IO ByteString -- ^ Data received +getContents sock = loop where + loop = unsafeInterleaveIO $ do + s <- N.recv sock defaultChunkSize + if S.null s + then shutdown sock ShutdownReceive >> return Empty + else Chunk s `liftM` loop + +-- | Receive data from the socket. The socket must be in a connected +-- state. This function may return fewer bytes than specified. If +-- the received data is longer than the specified length, it may be +-- discarded depending on the type of socket. This function may block +-- until a message arrives. +-- +-- If there is no more data to be received, returns an empty 'ByteString'. +-- +-- Receiving data from closed socket may lead to undefined behaviour. +recv :: Socket -- ^ Connected socket + -> Int64 -- ^ Maximum number of bytes to receive + -> IO ByteString -- ^ Data received +recv sock nbytes = chunk `liftM` N.recv sock (fromIntegral nbytes) where + chunk k + | S.null k = Empty + | otherwise = Chunk k Empty diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/MsgHdr.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/MsgHdr.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString/MsgHdr.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString/MsgHdr.hsc 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,48 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -funbox-strict-fields #-} + +-- | Support module for the POSIX 'sendmsg' system call. +module Network.Socket.ByteString.MsgHdr + ( MsgHdr(..) + ) where + +#include +#include + +import Foreign.C.Types (CInt, CSize, CUInt) +import Foreign.Ptr (Ptr) +import Foreign.Storable (Storable(..)) +import Network.Socket (SockAddr) +import Network.Socket.Internal (zeroMemory) + +import Network.Socket.ByteString.IOVec (IOVec) + +-- We don't use msg_control, msg_controllen, and msg_flags as these +-- don't exist on OpenSolaris. +data MsgHdr = MsgHdr + { msgName :: !(Ptr SockAddr) + , msgNameLen :: !CUInt + , msgIov :: !(Ptr IOVec) + , msgIovLen :: !CSize + } + +instance Storable MsgHdr where + sizeOf _ = (#const sizeof(struct msghdr)) + alignment _ = alignment (undefined :: CInt) + + peek p = do + name <- (#peek struct msghdr, msg_name) p + nameLen <- (#peek struct msghdr, msg_namelen) p + iov <- (#peek struct msghdr, msg_iov) p + iovLen <- (#peek struct msghdr, msg_iovlen) p + return $ MsgHdr name nameLen iov iovLen + + poke p mh = do + -- We need to zero the msg_control, msg_controllen, and msg_flags + -- fields, but they only exist on some platforms (e.g. not on + -- Solaris). Instead of using CPP, we zero the entire struct. + zeroMemory p (#const sizeof(struct msghdr)) + (#poke struct msghdr, msg_name) p (msgName mh) + (#poke struct msghdr, msg_namelen) p (msgNameLen mh) + (#poke struct msghdr, msg_iov) p (msgIov mh) + (#poke struct msghdr, msg_iovlen) p (msgIovLen mh) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/ByteString.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/ByteString.hsc 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,285 @@ +{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} +{-# LANGUAGE CPP, ForeignFunctionInterface #-} +{-# LANGUAGE OverloadedStrings #-} + +#include "HsNet.h" + +-- | +-- Module : Network.Socket.ByteString +-- Copyright : (c) Johan Tibell 2007-2010 +-- License : BSD-style +-- +-- Maintainer : johan.tibell@gmail.com +-- Stability : stable +-- Portability : portable +-- +-- This module provides access to the BSD /socket/ interface. This +-- module is generally more efficient than the 'String' based network +-- functions in 'Network.Socket'. For detailed documentation, consult +-- your favorite POSIX socket reference. All functions communicate +-- failures by converting the error number to 'System.IO.IOError'. +-- +-- This module is made to be imported with 'Network.Socket' like so: +-- +-- > import Network.Socket hiding (send, sendTo, recv, recvFrom) +-- > import Network.Socket.ByteString +-- +module Network.Socket.ByteString + ( + -- * Send data to a socket + send + , sendAll + , sendTo + , sendAllTo + + -- ** Vectored I/O + -- $vectored + , sendMany + , sendManyTo + + -- * Receive data from a socket + , recv + , recvFrom + ) where + +import Control.Exception as E (catch, throwIO) +import Control.Monad (when) +import Data.ByteString (ByteString) +import Data.ByteString.Internal (createAndTrim) +import Data.ByteString.Unsafe (unsafeUseAsCStringLen) +import Foreign.Marshal.Alloc (allocaBytes) +import Foreign.Ptr (castPtr) +import Network.Socket (sendBuf, sendBufTo, recvBuf, recvBufFrom) +import System.IO.Error (isEOFError) + +import qualified Data.ByteString as B + +import Network.Socket.ByteString.Internal +import Network.Socket.Internal +import Network.Socket.Types + +#if !defined(mingw32_HOST_OS) +import Control.Monad (liftM, zipWithM_) +import Foreign.Marshal.Array (allocaArray) +import Foreign.Marshal.Utils (with) +import Foreign.Ptr (Ptr, plusPtr) +import Foreign.Storable (Storable(..)) + +import Network.Socket.ByteString.IOVec (IOVec(..)) +import Network.Socket.ByteString.MsgHdr (MsgHdr(..)) +#endif + +-- ---------------------------------------------------------------------------- +-- Sending + +-- | Send data to the socket. The socket must be connected to a +-- remote socket. Returns the number of bytes sent. Applications are +-- responsible for ensuring that all data has been sent. +-- +-- Sending data to closed socket may lead to undefined behaviour. +send :: Socket -- ^ Connected socket + -> ByteString -- ^ Data to send + -> IO Int -- ^ Number of bytes sent +send sock xs = unsafeUseAsCStringLen xs $ \(str, len) -> + sendBuf sock (castPtr str) len + +-- | Send data to the socket. The socket must be connected to a +-- remote socket. Unlike 'send', this function continues to send data +-- until either all data has been sent or an error occurs. On error, +-- an exception is raised, and there is no way to determine how much +-- data, if any, was successfully sent. +-- +-- Sending data to closed socket may lead to undefined behaviour. +sendAll :: Socket -- ^ Connected socket + -> ByteString -- ^ Data to send + -> IO () +sendAll _ "" = return () +sendAll sock bs = do + sent <- send sock bs + waitWhen0 sent sock + when (sent >= 0) $ sendAll sock $ B.drop sent bs + +-- | Send data to the socket. The recipient can be specified +-- explicitly, so the socket need not be in a connected state. +-- Returns the number of bytes sent. Applications are responsible for +-- ensuring that all data has been sent. +-- +-- Sending data to closed socket may lead to undefined behaviour. +sendTo :: Socket -- ^ Socket + -> ByteString -- ^ Data to send + -> SockAddr -- ^ Recipient address + -> IO Int -- ^ Number of bytes sent +sendTo sock xs addr = + unsafeUseAsCStringLen xs $ \(str, len) -> sendBufTo sock str len addr + +-- | Send data to the socket. The recipient can be specified +-- explicitly, so the socket need not be in a connected state. Unlike +-- 'sendTo', this function continues to send data until either all +-- data has been sent or an error occurs. On error, an exception is +-- raised, and there is no way to determine how much data, if any, was +-- successfully sent. +-- +-- Sending data to closed socket may lead to undefined behaviour. +sendAllTo :: Socket -- ^ Socket + -> ByteString -- ^ Data to send + -> SockAddr -- ^ Recipient address + -> IO () +sendAllTo _ "" _ = return () +sendAllTo sock xs addr = do + sent <- sendTo sock xs addr + waitWhen0 sent sock + when (sent >= 0) $ sendAllTo sock (B.drop sent xs) addr + +-- ---------------------------------------------------------------------------- +-- ** Vectored I/O + +-- $vectored +-- +-- Vectored I\/O, also known as scatter\/gather I\/O, allows multiple +-- data segments to be sent using a single system call, without first +-- concatenating the segments. For example, given a list of +-- @ByteString@s, @xs@, +-- +-- > sendMany sock xs +-- +-- is equivalent to +-- +-- > sendAll sock (concat xs) +-- +-- but potentially more efficient. +-- +-- Vectored I\/O are often useful when implementing network protocols +-- that, for example, group data into segments consisting of one or +-- more fixed-length headers followed by a variable-length body. + +-- | Send data to the socket. The socket must be in a connected +-- state. The data is sent as if the parts have been concatenated. +-- This function continues to send data until either all data has been +-- sent or an error occurs. On error, an exception is raised, and +-- there is no way to determine how much data, if any, was +-- successfully sent. +-- +-- Sending data to closed socket may lead to undefined behaviour. +sendMany :: Socket -- ^ Connected socket + -> [ByteString] -- ^ Data to send + -> IO () +#if !defined(mingw32_HOST_OS) +sendMany _ [] = return () +sendMany sock@(MkSocket fd _ _ _ _) cs = do + sent <- sendManyInner + waitWhen0 sent sock + when (sent >= 0) $ sendMany sock (remainingChunks sent cs) + where + sendManyInner = + liftM fromIntegral . withIOVec cs $ \(iovsPtr, iovsLen) -> + throwSocketErrorWaitWrite sock "Network.Socket.ByteString.sendMany" $ + c_writev (fromIntegral fd) iovsPtr + (fromIntegral (min iovsLen (#const IOV_MAX))) +#else +sendMany sock = sendAll sock . B.concat +#endif + +-- | Send data to the socket. The recipient can be specified +-- explicitly, so the socket need not be in a connected state. The +-- data is sent as if the parts have been concatenated. This function +-- continues to send data until either all data has been sent or an +-- error occurs. On error, an exception is raised, and there is no +-- way to determine how much data, if any, was successfully sent. +-- +-- Sending data to closed socket may lead to undefined behaviour. +sendManyTo :: Socket -- ^ Socket + -> [ByteString] -- ^ Data to send + -> SockAddr -- ^ Recipient address + -> IO () +#if !defined(mingw32_HOST_OS) +sendManyTo _ [] _ = return () +sendManyTo sock@(MkSocket fd _ _ _ _) cs addr = do + sent <- liftM fromIntegral sendManyToInner + waitWhen0 sent sock + when (sent >= 0) $ sendManyTo sock (remainingChunks sent cs) addr + where + sendManyToInner = + withSockAddr addr $ \addrPtr addrSize -> + withIOVec cs $ \(iovsPtr, iovsLen) -> do + let msgHdr = MsgHdr + addrPtr (fromIntegral addrSize) + iovsPtr (fromIntegral iovsLen) + with msgHdr $ \msgHdrPtr -> + throwSocketErrorWaitWrite sock "Network.Socket.ByteString.sendManyTo" $ + c_sendmsg (fromIntegral fd) msgHdrPtr 0 +#else +sendManyTo sock cs = sendAllTo sock (B.concat cs) +#endif + +-- ---------------------------------------------------------------------------- +-- Receiving + +-- | Receive data from the socket. The socket must be in a connected +-- state. This function may return fewer bytes than specified. If +-- the message is longer than the specified length, it may be +-- discarded depending on the type of socket. This function may block +-- until a message arrives. +-- +-- Considering hardware and network realities, the maximum number of bytes to +-- receive should be a small power of 2, e.g., 4096. +-- +-- For TCP sockets, a zero length return value means the peer has +-- closed its half side of the connection. +-- +-- Receiving data from closed socket may lead to undefined behaviour. +recv :: Socket -- ^ Connected socket + -> Int -- ^ Maximum number of bytes to receive + -> IO ByteString -- ^ Data received +recv sock nbytes + | nbytes < 0 = ioError (mkInvalidRecvArgError "Network.Socket.ByteString.recv") + | otherwise = createAndTrim nbytes $ \ptr -> + E.catch + (recvBuf sock ptr nbytes) + (\e -> if isEOFError e then return 0 else throwIO e) + +-- | Receive data from the socket. The socket need not be in a +-- connected state. Returns @(bytes, address)@ where @bytes@ is a +-- 'ByteString' representing the data received and @address@ is a +-- 'SockAddr' representing the address of the sending socket. +-- +-- Receiving data from closed socket may lead to undefined behaviour. +recvFrom :: Socket -- ^ Socket + -> Int -- ^ Maximum number of bytes to receive + -> IO (ByteString, SockAddr) -- ^ Data received and sender address +recvFrom sock nbytes = + allocaBytes nbytes $ \ptr -> do + (len, sockaddr) <- recvBufFrom sock ptr nbytes + str <- B.packCStringLen (ptr, len) + return (str, sockaddr) + +-- ---------------------------------------------------------------------------- +-- Not exported + +#if !defined(mingw32_HOST_OS) +-- | Suppose we try to transmit a list of chunks @cs@ via a gathering write +-- operation and find that @n@ bytes were sent. Then @remainingChunks n cs@ is +-- list of chunks remaining to be sent. +remainingChunks :: Int -> [ByteString] -> [ByteString] +remainingChunks _ [] = [] +remainingChunks i (x:xs) + | i < len = B.drop i x : xs + | otherwise = let i' = i - len in i' `seq` remainingChunks i' xs + where + len = B.length x + +-- | @withIOVec cs f@ executes the computation @f@, passing as argument a pair +-- consisting of a pointer to a temporarily allocated array of pointers to +-- IOVec made from @cs@ and the number of pointers (@length cs@). +-- /Unix only/. +withIOVec :: [ByteString] -> ((Ptr IOVec, Int) -> IO a) -> IO a +withIOVec cs f = + allocaArray csLen $ \aPtr -> do + zipWithM_ pokeIov (ptrs aPtr) cs + f (aPtr, csLen) + where + csLen = length cs + ptrs = iterate (`plusPtr` sizeOf (undefined :: IOVec)) + pokeIov ptr s = + unsafeUseAsCStringLen s $ \(sPtr, sLen) -> + poke ptr $ IOVec sPtr (fromIntegral sLen) +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/Internal.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/Internal.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/Internal.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/Internal.hsc 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,277 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE ForeignFunctionInterface #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +----------------------------------------------------------------------------- +-- | +-- Module : Network.Socket.Internal +-- Copyright : (c) The University of Glasgow 2001 +-- License : BSD-style (see the file libraries/network/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- A module containing semi-public 'Network.Socket' internals. +-- Modules which extend the 'Network.Socket' module will need to use +-- this module while ideally most users will be able to make do with +-- the public interface. +-- +----------------------------------------------------------------------------- + +#include "HsNet.h" +##include "HsNetDef.h" + +module Network.Socket.Internal + ( + -- * Socket addresses + HostAddress +#if defined(IPV6_SOCKET_SUPPORT) + , HostAddress6 + , FlowInfo + , ScopeID +#endif + , PortNumber(..) + , SockAddr(..) + + , peekSockAddr + , pokeSockAddr + , sizeOfSockAddr + , sizeOfSockAddrByFamily + , withSockAddr + , withNewSockAddr + + -- * Protocol families + , Family(..) + + -- * Socket error functions +#if defined(HAVE_WINSOCK2_H) + , c_getLastError +#endif + , throwSocketError + , throwSocketErrorCode + + -- * Guards for socket operations that may fail + , throwSocketErrorIfMinus1_ + , throwSocketErrorIfMinus1Retry + , throwSocketErrorIfMinus1Retry_ + , throwSocketErrorIfMinus1RetryMayBlock + + -- ** Guards that wait and retry if the operation would block + -- | These guards are based on 'throwSocketErrorIfMinus1RetryMayBlock'. + -- They wait for socket readiness if the action fails with @EWOULDBLOCK@ + -- or similar. + , throwSocketErrorWaitRead + , throwSocketErrorWaitWrite + + -- * Initialization + , withSocketsDo + + -- * Low-level helpers + , zeroMemory + ) where + +import Foreign.C.Error (throwErrno, throwErrnoIfMinus1Retry, + throwErrnoIfMinus1RetryMayBlock, throwErrnoIfMinus1_, + Errno(..), errnoToIOError) +#if defined(HAVE_WINSOCK2_H) +import Foreign.C.String (peekCString) +import Foreign.Ptr (Ptr) +#endif +import Foreign.C.Types (CInt(..)) +import GHC.Conc (threadWaitRead, threadWaitWrite) + +#if defined(HAVE_WINSOCK2_H) +import Control.Exception ( evaluate ) +import System.IO.Unsafe ( unsafePerformIO ) +import Control.Monad ( when ) +# if __GLASGOW_HASKELL__ >= 707 +import GHC.IO.Exception ( IOErrorType(..) ) +# else +import GHC.IOBase ( IOErrorType(..) ) +# endif +import Foreign.C.Types ( CChar ) +import System.IO.Error ( ioeSetErrorString, mkIOError ) +#endif + +import Network.Socket.Types + +-- --------------------------------------------------------------------- +-- Guards for socket operations that may fail + +-- | Throw an 'IOError' corresponding to the current socket error. +throwSocketError :: String -- ^ textual description of the error location + -> IO a + +-- | Like 'throwSocketError', but the error code is supplied as an argument. +-- +-- On Windows, do not use errno. Use a system error code instead. +throwSocketErrorCode :: String -> CInt -> IO a + +-- | Throw an 'IOError' corresponding to the current socket error if +-- the IO action returns a result of @-1@. Discards the result of the +-- IO action after error handling. +throwSocketErrorIfMinus1_ + :: (Eq a, Num a) + => String -- ^ textual description of the location + -> IO a -- ^ the 'IO' operation to be executed + -> IO () + +{-# SPECIALIZE throwSocketErrorIfMinus1_ :: String -> IO CInt -> IO () #-} + +-- | Throw an 'IOError' corresponding to the current socket error if +-- the IO action returns a result of @-1@, but retries in case of an +-- interrupted operation. +throwSocketErrorIfMinus1Retry + :: (Eq a, Num a) + => String -- ^ textual description of the location + -> IO a -- ^ the 'IO' operation to be executed + -> IO a + +{-# SPECIALIZE throwSocketErrorIfMinus1Retry :: String -> IO CInt -> IO CInt #-} + +-- | Throw an 'IOError' corresponding to the current socket error if +-- the IO action returns a result of @-1@, but retries in case of an +-- interrupted operation. Discards the result of the IO action after +-- error handling. +throwSocketErrorIfMinus1Retry_ + :: (Eq a, Num a) + => String -- ^ textual description of the location + -> IO a -- ^ the 'IO' operation to be executed + -> IO () +throwSocketErrorIfMinus1Retry_ loc m = + throwSocketErrorIfMinus1Retry loc m >> return () +{-# SPECIALIZE throwSocketErrorIfMinus1Retry_ :: String -> IO CInt -> IO () #-} + +-- | Throw an 'IOError' corresponding to the current socket error if +-- the IO action returns a result of @-1@, but retries in case of an +-- interrupted operation. Checks for operations that would block and +-- executes an alternative action before retrying in that case. +throwSocketErrorIfMinus1RetryMayBlock + :: (Eq a, Num a) + => String -- ^ textual description of the location + -> IO b -- ^ action to execute before retrying if an + -- immediate retry would block + -> IO a -- ^ the 'IO' operation to be executed + -> IO a + +{-# SPECIALIZE throwSocketErrorIfMinus1RetryMayBlock + :: String -> IO b -> IO CInt -> IO CInt #-} + +#if (!defined(HAVE_WINSOCK2_H)) + +throwSocketErrorIfMinus1RetryMayBlock name on_block act = + throwErrnoIfMinus1RetryMayBlock name act on_block + +throwSocketErrorIfMinus1Retry = throwErrnoIfMinus1Retry + +throwSocketErrorIfMinus1_ = throwErrnoIfMinus1_ + +throwSocketError = throwErrno + +throwSocketErrorCode loc errno = + ioError (errnoToIOError loc (Errno errno) Nothing Nothing) + +#else + +throwSocketErrorIfMinus1RetryMayBlock name _ act + = throwSocketErrorIfMinus1Retry name act + +throwSocketErrorIfMinus1_ name act = do + throwSocketErrorIfMinus1Retry name act + return () + +# if defined(HAVE_WINSOCK2_H) +throwSocketErrorIfMinus1Retry name act = do + r <- act + if (r == -1) + then do + rc <- c_getLastError + case rc of + #{const WSANOTINITIALISED} -> do + withSocketsDo (return ()) + r <- act + if (r == -1) + then throwSocketError name + else return r + _ -> throwSocketError name + else return r + +throwSocketErrorCode name rc = do + pstr <- c_getWSError rc + str <- peekCString pstr + ioError (ioeSetErrorString (mkIOError OtherError name Nothing Nothing) str) + +throwSocketError name = + c_getLastError >>= throwSocketErrorCode name + +foreign import CALLCONV unsafe "WSAGetLastError" + c_getLastError :: IO CInt + +foreign import ccall unsafe "getWSErrorDescr" + c_getWSError :: CInt -> IO (Ptr CChar) + + +# else +throwSocketErrorIfMinus1Retry = throwErrnoIfMinus1Retry +throwSocketError = throwErrno +throwSocketErrorCode loc errno = + ioError (errnoToIOError loc (Errno errno) Nothing Nothing) +# endif +#endif + +-- | Like 'throwSocketErrorIfMinus1Retry', but if the action fails with +-- @EWOULDBLOCK@ or similar, wait for the socket to be read-ready, +-- and try again. +throwSocketErrorWaitRead :: (Eq a, Num a) => Socket -> String -> IO a -> IO a +throwSocketErrorWaitRead sock name io = + throwSocketErrorIfMinus1RetryMayBlock name + (threadWaitRead $ fromIntegral $ fdSocket sock) + io + +-- | Like 'throwSocketErrorIfMinus1Retry', but if the action fails with +-- @EWOULDBLOCK@ or similar, wait for the socket to be write-ready, +-- and try again. +throwSocketErrorWaitWrite :: (Eq a, Num a) => Socket -> String -> IO a -> IO a +throwSocketErrorWaitWrite sock name io = + throwSocketErrorIfMinus1RetryMayBlock name + (threadWaitWrite $ fromIntegral $ fdSocket sock) + io + +-- --------------------------------------------------------------------------- +-- WinSock support + +{-| With older versions of the @network@ library (version 2.6.0.2 or earlier) +on Windows operating systems, +the networking subsystem must be initialised using 'withSocketsDo' before +any networking operations can be used. eg. + +> main = withSocketsDo $ do {...} + +It is fine to nest calls to 'withSocketsDo', and to perform networking operations +after 'withSocketsDo' has returned. + +In newer versions of the @network@ library (version v2.6.1.0 or later) +it is only necessary to call +'withSocketsDo' if you are calling the 'MkSocket' constructor directly. +However, for compatibility with older versions on Windows, it is good practice +to always call 'withSocketsDo' (it's very cheap). +-} +{-# INLINE withSocketsDo #-} +withSocketsDo :: IO a -> IO a +#if !defined(WITH_WINSOCK) +withSocketsDo x = x +#else +withSocketsDo act = evaluate withSocketsInit >> act + + +{-# NOINLINE withSocketsInit #-} +withSocketsInit :: () +-- Use a CAF to make forcing it do initialisation once, but subsequent forces will be cheap +withSocketsInit = unsafePerformIO $ do + x <- initWinSock + when (x /= 0) $ ioError $ + userError "Network.Socket.Internal.withSocketsDo: Failed to initialise WinSock" + +foreign import ccall unsafe "initWinSock" initWinSock :: IO Int + +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/Types.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/Types.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket/Types.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket/Types.hsc 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,1110 @@ +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE ForeignFunctionInterface #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +#include "HsNet.h" +##include "HsNetDef.h" + +module Network.Socket.Types + ( + -- * Socket + Socket(..) + , fdSocket + , sockFamily + , sockType + , sockProtocol + , sockStatus + , SocketStatus(..) + + -- * Socket types + , SocketType(..) + , isSupportedSocketType + , packSocketType + , packSocketType' + , packSocketTypeOrThrow + , unpackSocketType + , unpackSocketType' + + -- * Family + , Family(..) + , isSupportedFamily + , packFamily + , unpackFamily + + -- * Socket addresses + , SockAddr(..) + , isSupportedSockAddr + , HostAddress + , hostAddressToTuple + , tupleToHostAddress +#if defined(IPV6_SOCKET_SUPPORT) + , HostAddress6 + , hostAddress6ToTuple + , tupleToHostAddress6 + , FlowInfo + , ScopeID +#endif + , peekSockAddr + , pokeSockAddr + , sizeOfSockAddr + , sizeOfSockAddrByFamily + , withSockAddr + , withNewSockAddr + + -- * Unsorted + , ProtocolNumber + , PortNumber(..) + + -- * Low-level helpers + , zeroMemory + ) where + +import Control.Concurrent.MVar +import Control.Monad +import Data.Bits +import Data.Maybe +import Data.Typeable +import Data.Word +import Data.Int +import Foreign.C +import Foreign.Marshal.Alloc +import Foreign.Marshal.Array +import Foreign.Ptr +import Foreign.Storable + +-- | A socket data type. +-- 'Socket's are not GCed unless they are closed by 'close'. +data Socket + = MkSocket + CInt -- File Descriptor + Family + SocketType + ProtocolNumber -- Protocol Number + (MVar SocketStatus) -- Status Flag + deriving Typeable + +{-# DEPRECATED MkSocket "'MkSocket' will not be available in version 3.0.0.0 or later. Use fdSocket instead" #-} + +-- | Obtaining the file descriptor from a socket. +-- +-- If a 'Socket' is shared with multiple threads and +-- one uses 'fdSocket', unexpected issues may happen. +-- Consider the following scenario: +-- +-- 1) Thread A acquires a 'Fd' from 'Socket' by 'fdSocket'. +-- +-- 2) Thread B close the 'Socket'. +-- +-- 3) Thread C opens a new 'Socket'. Unfortunately it gets the same 'Fd' +-- number which thread A is holding. +-- +-- In this case, it is safer for Thread A to clone 'Fd' by +-- 'System.Posix.IO.dup'. But this would still suffer from +-- a rase condition between 'fdSocket' and 'close'. +fdSocket :: Socket -> CInt +fdSocket (MkSocket fd _ _ _ _) = fd + +sockFamily :: Socket -> Family +sockFamily (MkSocket _ f _ _ _) = f + +sockType :: Socket -> SocketType +sockType (MkSocket _ _ t _ _) = t + +sockProtocol :: Socket -> ProtocolNumber +sockProtocol (MkSocket _ _ _ p _) = p + +sockStatus :: Socket -> MVar SocketStatus +sockStatus (MkSocket _ _ _ _ s) = s + +instance Eq Socket where + (MkSocket _ _ _ _ m1) == (MkSocket _ _ _ _ m2) = m1 == m2 + +instance Show Socket where + showsPrec _n (MkSocket fd _ _ _ _) = + showString "" + +type ProtocolNumber = CInt + +-- | The status of the socket as /determined by this library/, not +-- necessarily reflecting the state of the connection itself. +-- +-- For example, the 'Closed' status is applied when the 'close' +-- function is called. +data SocketStatus + -- Returned Status Function called + = NotConnected -- ^ Newly created, unconnected socket + | Bound -- ^ Bound, via 'bind' + | Listening -- ^ Listening, via 'listen' + | Connected -- ^ Connected or accepted, via 'connect' or 'accept' + | ConvertedToHandle -- ^ Is now a 'Handle' (via 'socketToHandle'), don't touch + | Closed -- ^ Closed was closed by 'close' + deriving (Eq, Show, Typeable) + +{-# DEPRECATED SocketStatus "SocketStatus will be removed" #-} + +----------------------------------------------------------------------------- +-- Socket types + +-- There are a few possible ways to do this. The first is convert the +-- structs used in the C library into an equivalent Haskell type. An +-- other possible implementation is to keep all the internals in the C +-- code and use an Int## and a status flag. The second method is used +-- here since a lot of the C structures are not required to be +-- manipulated. + +-- Originally the status was non-mutable so we had to return a new +-- socket each time we changed the status. This version now uses +-- mutable variables to avoid the need to do this. The result is a +-- cleaner interface and better security since the application +-- programmer now can't circumvent the status information to perform +-- invalid operations on sockets. + +-- | Socket Types. +-- +-- The existence of a constructor does not necessarily imply that that +-- socket type is supported on your system: see 'isSupportedSocketType'. +data SocketType + = NoSocketType -- ^ 0, used in getAddrInfo hints, for example + | Stream -- ^ SOCK_STREAM + | Datagram -- ^ SOCK_DGRAM + | Raw -- ^ SOCK_RAW + | RDM -- ^ SOCK_RDM + | SeqPacket -- ^ SOCK_SEQPACKET + deriving (Eq, Ord, Read, Show, Typeable) + +-- | Does the SOCK_ constant corresponding to the given SocketType exist on +-- this system? +isSupportedSocketType :: SocketType -> Bool +isSupportedSocketType = isJust . packSocketType' + +-- | Find the SOCK_ constant corresponding to the SocketType value. +packSocketType' :: SocketType -> Maybe CInt +packSocketType' stype = case Just stype of + -- the Just above is to disable GHC's overlapping pattern + -- detection: see comments for packSocketOption + Just NoSocketType -> Just 0 +#ifdef SOCK_STREAM + Just Stream -> Just #const SOCK_STREAM +#endif +#ifdef SOCK_DGRAM + Just Datagram -> Just #const SOCK_DGRAM +#endif +#ifdef SOCK_RAW + Just Raw -> Just #const SOCK_RAW +#endif +#ifdef SOCK_RDM + Just RDM -> Just #const SOCK_RDM +#endif +#ifdef SOCK_SEQPACKET + Just SeqPacket -> Just #const SOCK_SEQPACKET +#endif + _ -> Nothing + +{-# DEPRECATED packSocketType "packSocketType will not be available in version 3.0.0.0 or later." #-} + +packSocketType :: SocketType -> CInt +packSocketType stype = fromMaybe (error errMsg) (packSocketType' stype) + where + errMsg = concat ["Network.Socket.packSocketType: ", + "socket type ", show stype, " unsupported on this system"] + +-- | Try packSocketType' on the SocketType, if it fails throw an error with +-- message starting "Network.Socket." ++ the String parameter +packSocketTypeOrThrow :: String -> SocketType -> IO CInt +packSocketTypeOrThrow caller stype = maybe err return (packSocketType' stype) + where + err = ioError . userError . concat $ ["Network.Socket.", caller, ": ", + "socket type ", show stype, " unsupported on this system"] + + +unpackSocketType:: CInt -> Maybe SocketType +unpackSocketType t = case t of + 0 -> Just NoSocketType +#ifdef SOCK_STREAM + (#const SOCK_STREAM) -> Just Stream +#endif +#ifdef SOCK_DGRAM + (#const SOCK_DGRAM) -> Just Datagram +#endif +#ifdef SOCK_RAW + (#const SOCK_RAW) -> Just Raw +#endif +#ifdef SOCK_RDM + (#const SOCK_RDM) -> Just RDM +#endif +#ifdef SOCK_SEQPACKET + (#const SOCK_SEQPACKET) -> Just SeqPacket +#endif + _ -> Nothing + +-- | Try unpackSocketType on the CInt, if it fails throw an error with +-- message starting "Network.Socket." ++ the String parameter +unpackSocketType' :: String -> CInt -> IO SocketType +unpackSocketType' caller ty = maybe err return (unpackSocketType ty) + where + err = ioError . userError . concat $ ["Network.Socket.", caller, ": ", + "socket type ", show ty, " unsupported on this system"] + +------------------------------------------------------------------------ +-- Protocol Families. + +-- | Address families. +-- +-- A constructor being present here does not mean it is supported by the +-- operating system: see 'isSupportedFamily'. +data Family + = AF_UNSPEC -- unspecified + | AF_UNIX -- local to host (pipes, portals + | AF_INET -- internetwork: UDP, TCP, etc + | AF_INET6 -- Internet Protocol version 6 + | AF_IMPLINK -- arpanet imp addresses + | AF_PUP -- pup protocols: e.g. BSP + | AF_CHAOS -- mit CHAOS protocols + | AF_NS -- XEROX NS protocols + | AF_NBS -- nbs protocols + | AF_ECMA -- european computer manufacturers + | AF_DATAKIT -- datakit protocols + | AF_CCITT -- CCITT protocols, X.25 etc + | AF_SNA -- IBM SNA + | AF_DECnet -- DECnet + | AF_DLI -- Direct data link interface + | AF_LAT -- LAT + | AF_HYLINK -- NSC Hyperchannel + | AF_APPLETALK -- Apple Talk + | AF_ROUTE -- Internal Routing Protocol (aka AF_NETLINK) + | AF_NETBIOS -- NetBios-style addresses + | AF_NIT -- Network Interface Tap + | AF_802 -- IEEE 802.2, also ISO 8802 + | AF_ISO -- ISO protocols + | AF_OSI -- umbrella of all families used by OSI + | AF_NETMAN -- DNA Network Management + | AF_X25 -- CCITT X.25 + | AF_AX25 + | AF_OSINET -- AFI + | AF_GOSSIP -- US Government OSI + | AF_IPX -- Novell Internet Protocol + | Pseudo_AF_XTP -- eXpress Transfer Protocol (no AF) + | AF_CTF -- Common Trace Facility + | AF_WAN -- Wide Area Network protocols + | AF_SDL -- SGI Data Link for DLPI + | AF_NETWARE + | AF_NDD + | AF_INTF -- Debugging use only + | AF_COIP -- connection-oriented IP, aka ST II + | AF_CNT -- Computer Network Technology + | Pseudo_AF_RTIP -- Help Identify RTIP packets + | Pseudo_AF_PIP -- Help Identify PIP packets + | AF_SIP -- Simple Internet Protocol + | AF_ISDN -- Integrated Services Digital Network + | Pseudo_AF_KEY -- Internal key-management function + | AF_NATM -- native ATM access + | AF_ARP -- (rev.) addr. res. prot. (RFC 826) + | Pseudo_AF_HDRCMPLT -- Used by BPF to not rewrite hdrs in iface output + | AF_ENCAP + | AF_LINK -- Link layer interface + | AF_RAW -- Link layer interface + | AF_RIF -- raw interface + | AF_NETROM -- Amateur radio NetROM + | AF_BRIDGE -- multiprotocol bridge + | AF_ATMPVC -- ATM PVCs + | AF_ROSE -- Amateur Radio X.25 PLP + | AF_NETBEUI -- 802.2LLC + | AF_SECURITY -- Security callback pseudo AF + | AF_PACKET -- Packet family + | AF_ASH -- Ash + | AF_ECONET -- Acorn Econet + | AF_ATMSVC -- ATM SVCs + | AF_IRDA -- IRDA sockets + | AF_PPPOX -- PPPoX sockets + | AF_WANPIPE -- Wanpipe API sockets + | AF_BLUETOOTH -- bluetooth sockets + | AF_CAN -- Controller Area Network + deriving (Eq, Ord, Read, Show) + +packFamily :: Family -> CInt +packFamily f = case packFamily' f of + Just fam -> fam + Nothing -> error $ + "Network.Socket.packFamily: unsupported address family: " ++ + show f + +-- | Does the AF_ constant corresponding to the given family exist on this +-- system? +isSupportedFamily :: Family -> Bool +isSupportedFamily = isJust . packFamily' + +packFamily' :: Family -> Maybe CInt +packFamily' f = case Just f of + -- the Just above is to disable GHC's overlapping pattern + -- detection: see comments for packSocketOption + Just AF_UNSPEC -> Just #const AF_UNSPEC +#ifdef AF_UNIX + Just AF_UNIX -> Just #const AF_UNIX +#endif +#ifdef AF_INET + Just AF_INET -> Just #const AF_INET +#endif +#ifdef AF_INET6 + Just AF_INET6 -> Just #const AF_INET6 +#endif +#ifdef AF_IMPLINK + Just AF_IMPLINK -> Just #const AF_IMPLINK +#endif +#ifdef AF_PUP + Just AF_PUP -> Just #const AF_PUP +#endif +#ifdef AF_CHAOS + Just AF_CHAOS -> Just #const AF_CHAOS +#endif +#ifdef AF_NS + Just AF_NS -> Just #const AF_NS +#endif +#ifdef AF_NBS + Just AF_NBS -> Just #const AF_NBS +#endif +#ifdef AF_ECMA + Just AF_ECMA -> Just #const AF_ECMA +#endif +#ifdef AF_DATAKIT + Just AF_DATAKIT -> Just #const AF_DATAKIT +#endif +#ifdef AF_CCITT + Just AF_CCITT -> Just #const AF_CCITT +#endif +#ifdef AF_SNA + Just AF_SNA -> Just #const AF_SNA +#endif +#ifdef AF_DECnet + Just AF_DECnet -> Just #const AF_DECnet +#endif +#ifdef AF_DLI + Just AF_DLI -> Just #const AF_DLI +#endif +#ifdef AF_LAT + Just AF_LAT -> Just #const AF_LAT +#endif +#ifdef AF_HYLINK + Just AF_HYLINK -> Just #const AF_HYLINK +#endif +#ifdef AF_APPLETALK + Just AF_APPLETALK -> Just #const AF_APPLETALK +#endif +#ifdef AF_ROUTE + Just AF_ROUTE -> Just #const AF_ROUTE +#endif +#ifdef AF_NETBIOS + Just AF_NETBIOS -> Just #const AF_NETBIOS +#endif +#ifdef AF_NIT + Just AF_NIT -> Just #const AF_NIT +#endif +#ifdef AF_802 + Just AF_802 -> Just #const AF_802 +#endif +#ifdef AF_ISO + Just AF_ISO -> Just #const AF_ISO +#endif +#ifdef AF_OSI + Just AF_OSI -> Just #const AF_OSI +#endif +#ifdef AF_NETMAN + Just AF_NETMAN -> Just #const AF_NETMAN +#endif +#ifdef AF_X25 + Just AF_X25 -> Just #const AF_X25 +#endif +#ifdef AF_AX25 + Just AF_AX25 -> Just #const AF_AX25 +#endif +#ifdef AF_OSINET + Just AF_OSINET -> Just #const AF_OSINET +#endif +#ifdef AF_GOSSIP + Just AF_GOSSIP -> Just #const AF_GOSSIP +#endif +#ifdef AF_IPX + Just AF_IPX -> Just #const AF_IPX +#endif +#ifdef Pseudo_AF_XTP + Just Pseudo_AF_XTP -> Just #const Pseudo_AF_XTP +#endif +#ifdef AF_CTF + Just AF_CTF -> Just #const AF_CTF +#endif +#ifdef AF_WAN + Just AF_WAN -> Just #const AF_WAN +#endif +#ifdef AF_SDL + Just AF_SDL -> Just #const AF_SDL +#endif +#ifdef AF_NETWARE + Just AF_NETWARE -> Just #const AF_NETWARE +#endif +#ifdef AF_NDD + Just AF_NDD -> Just #const AF_NDD +#endif +#ifdef AF_INTF + Just AF_INTF -> Just #const AF_INTF +#endif +#ifdef AF_COIP + Just AF_COIP -> Just #const AF_COIP +#endif +#ifdef AF_CNT + Just AF_CNT -> Just #const AF_CNT +#endif +#ifdef Pseudo_AF_RTIP + Just Pseudo_AF_RTIP -> Just #const Pseudo_AF_RTIP +#endif +#ifdef Pseudo_AF_PIP + Just Pseudo_AF_PIP -> Just #const Pseudo_AF_PIP +#endif +#ifdef AF_SIP + Just AF_SIP -> Just #const AF_SIP +#endif +#ifdef AF_ISDN + Just AF_ISDN -> Just #const AF_ISDN +#endif +#ifdef Pseudo_AF_KEY + Just Pseudo_AF_KEY -> Just #const Pseudo_AF_KEY +#endif +#ifdef AF_NATM + Just AF_NATM -> Just #const AF_NATM +#endif +#ifdef AF_ARP + Just AF_ARP -> Just #const AF_ARP +#endif +#ifdef Pseudo_AF_HDRCMPLT + Just Pseudo_AF_HDRCMPLT -> Just #const Pseudo_AF_HDRCMPLT +#endif +#ifdef AF_ENCAP + Just AF_ENCAP -> Just #const AF_ENCAP +#endif +#ifdef AF_LINK + Just AF_LINK -> Just #const AF_LINK +#endif +#ifdef AF_RAW + Just AF_RAW -> Just #const AF_RAW +#endif +#ifdef AF_RIF + Just AF_RIF -> Just #const AF_RIF +#endif +#ifdef AF_NETROM + Just AF_NETROM -> Just #const AF_NETROM +#endif +#ifdef AF_BRIDGE + Just AF_BRIDGE -> Just #const AF_BRIDGE +#endif +#ifdef AF_ATMPVC + Just AF_ATMPVC -> Just #const AF_ATMPVC +#endif +#ifdef AF_ROSE + Just AF_ROSE -> Just #const AF_ROSE +#endif +#ifdef AF_NETBEUI + Just AF_NETBEUI -> Just #const AF_NETBEUI +#endif +#ifdef AF_SECURITY + Just AF_SECURITY -> Just #const AF_SECURITY +#endif +#ifdef AF_PACKET + Just AF_PACKET -> Just #const AF_PACKET +#endif +#ifdef AF_ASH + Just AF_ASH -> Just #const AF_ASH +#endif +#ifdef AF_ECONET + Just AF_ECONET -> Just #const AF_ECONET +#endif +#ifdef AF_ATMSVC + Just AF_ATMSVC -> Just #const AF_ATMSVC +#endif +#ifdef AF_IRDA + Just AF_IRDA -> Just #const AF_IRDA +#endif +#ifdef AF_PPPOX + Just AF_PPPOX -> Just #const AF_PPPOX +#endif +#ifdef AF_WANPIPE + Just AF_WANPIPE -> Just #const AF_WANPIPE +#endif +#ifdef AF_BLUETOOTH + Just AF_BLUETOOTH -> Just #const AF_BLUETOOTH +#endif +#ifdef AF_CAN + Just AF_CAN -> Just #const AF_CAN +#endif + _ -> Nothing + +--------- ---------- + +unpackFamily :: CInt -> Family +unpackFamily f = case f of + (#const AF_UNSPEC) -> AF_UNSPEC +#ifdef AF_UNIX + (#const AF_UNIX) -> AF_UNIX +#endif +#ifdef AF_INET + (#const AF_INET) -> AF_INET +#endif +#ifdef AF_INET6 + (#const AF_INET6) -> AF_INET6 +#endif +#ifdef AF_IMPLINK + (#const AF_IMPLINK) -> AF_IMPLINK +#endif +#ifdef AF_PUP + (#const AF_PUP) -> AF_PUP +#endif +#ifdef AF_CHAOS + (#const AF_CHAOS) -> AF_CHAOS +#endif +#ifdef AF_NS + (#const AF_NS) -> AF_NS +#endif +#ifdef AF_NBS + (#const AF_NBS) -> AF_NBS +#endif +#ifdef AF_ECMA + (#const AF_ECMA) -> AF_ECMA +#endif +#ifdef AF_DATAKIT + (#const AF_DATAKIT) -> AF_DATAKIT +#endif +#ifdef AF_CCITT + (#const AF_CCITT) -> AF_CCITT +#endif +#ifdef AF_SNA + (#const AF_SNA) -> AF_SNA +#endif +#ifdef AF_DECnet + (#const AF_DECnet) -> AF_DECnet +#endif +#ifdef AF_DLI + (#const AF_DLI) -> AF_DLI +#endif +#ifdef AF_LAT + (#const AF_LAT) -> AF_LAT +#endif +#ifdef AF_HYLINK + (#const AF_HYLINK) -> AF_HYLINK +#endif +#ifdef AF_APPLETALK + (#const AF_APPLETALK) -> AF_APPLETALK +#endif +#ifdef AF_ROUTE + (#const AF_ROUTE) -> AF_ROUTE +#endif +#ifdef AF_NETBIOS + (#const AF_NETBIOS) -> AF_NETBIOS +#endif +#ifdef AF_NIT + (#const AF_NIT) -> AF_NIT +#endif +#ifdef AF_802 + (#const AF_802) -> AF_802 +#endif +#ifdef AF_ISO + (#const AF_ISO) -> AF_ISO +#endif +#ifdef AF_OSI +# if (!defined(AF_ISO)) || (defined(AF_ISO) && (AF_ISO != AF_OSI)) + (#const AF_OSI) -> AF_OSI +# endif +#endif +#ifdef AF_NETMAN + (#const AF_NETMAN) -> AF_NETMAN +#endif +#ifdef AF_X25 + (#const AF_X25) -> AF_X25 +#endif +#ifdef AF_AX25 + (#const AF_AX25) -> AF_AX25 +#endif +#ifdef AF_OSINET + (#const AF_OSINET) -> AF_OSINET +#endif +#ifdef AF_GOSSIP + (#const AF_GOSSIP) -> AF_GOSSIP +#endif +#if defined(AF_IPX) && (!defined(AF_NS) || AF_NS != AF_IPX) + (#const AF_IPX) -> AF_IPX +#endif +#ifdef Pseudo_AF_XTP + (#const Pseudo_AF_XTP) -> Pseudo_AF_XTP +#endif +#ifdef AF_CTF + (#const AF_CTF) -> AF_CTF +#endif +#ifdef AF_WAN + (#const AF_WAN) -> AF_WAN +#endif +#ifdef AF_SDL + (#const AF_SDL) -> AF_SDL +#endif +#ifdef AF_NETWARE + (#const AF_NETWARE) -> AF_NETWARE +#endif +#ifdef AF_NDD + (#const AF_NDD) -> AF_NDD +#endif +#ifdef AF_INTF + (#const AF_INTF) -> AF_INTF +#endif +#ifdef AF_COIP + (#const AF_COIP) -> AF_COIP +#endif +#ifdef AF_CNT + (#const AF_CNT) -> AF_CNT +#endif +#ifdef Pseudo_AF_RTIP + (#const Pseudo_AF_RTIP) -> Pseudo_AF_RTIP +#endif +#ifdef Pseudo_AF_PIP + (#const Pseudo_AF_PIP) -> Pseudo_AF_PIP +#endif +#ifdef AF_SIP + (#const AF_SIP) -> AF_SIP +#endif +#ifdef AF_ISDN + (#const AF_ISDN) -> AF_ISDN +#endif +#ifdef Pseudo_AF_KEY + (#const Pseudo_AF_KEY) -> Pseudo_AF_KEY +#endif +#ifdef AF_NATM + (#const AF_NATM) -> AF_NATM +#endif +#ifdef AF_ARP + (#const AF_ARP) -> AF_ARP +#endif +#ifdef Pseudo_AF_HDRCMPLT + (#const Pseudo_AF_HDRCMPLT) -> Pseudo_AF_HDRCMPLT +#endif +#ifdef AF_ENCAP + (#const AF_ENCAP) -> AF_ENCAP +#endif +#ifdef AF_LINK + (#const AF_LINK) -> AF_LINK +#endif +#ifdef AF_RAW + (#const AF_RAW) -> AF_RAW +#endif +#ifdef AF_RIF + (#const AF_RIF) -> AF_RIF +#endif +#ifdef AF_NETROM + (#const AF_NETROM) -> AF_NETROM +#endif +#ifdef AF_BRIDGE + (#const AF_BRIDGE) -> AF_BRIDGE +#endif +#ifdef AF_ATMPVC + (#const AF_ATMPVC) -> AF_ATMPVC +#endif +#ifdef AF_ROSE + (#const AF_ROSE) -> AF_ROSE +#endif +#ifdef AF_NETBEUI + (#const AF_NETBEUI) -> AF_NETBEUI +#endif +#ifdef AF_SECURITY + (#const AF_SECURITY) -> AF_SECURITY +#endif +#ifdef AF_PACKET + (#const AF_PACKET) -> AF_PACKET +#endif +#ifdef AF_ASH + (#const AF_ASH) -> AF_ASH +#endif +#ifdef AF_ECONET + (#const AF_ECONET) -> AF_ECONET +#endif +#ifdef AF_ATMSVC + (#const AF_ATMSVC) -> AF_ATMSVC +#endif +#ifdef AF_IRDA + (#const AF_IRDA) -> AF_IRDA +#endif +#ifdef AF_PPPOX + (#const AF_PPPOX) -> AF_PPPOX +#endif +#ifdef AF_WANPIPE + (#const AF_WANPIPE) -> AF_WANPIPE +#endif +#ifdef AF_BLUETOOTH + (#const AF_BLUETOOTH) -> AF_BLUETOOTH +#endif +#ifdef AF_CAN + (#const AF_CAN) -> AF_CAN +#endif + unknown -> error $ + "Network.Socket.Types.unpackFamily: unknown address family: " ++ + show unknown + +------------------------------------------------------------------------ +-- Port Numbers + +-- | Use the @Num@ instance (i.e. use a literal) to create a +-- @PortNumber@ value. +-- +-- >>> 1 :: PortNumber +-- 1 +-- >>> read "1" :: PortNumber +-- 1 +-- >>> show (12345 :: PortNumber) +-- "12345" +-- >>> 50000 < (51000 :: PortNumber) +-- True +-- >>> 50000 < (52000 :: PortNumber) +-- True +-- >>> 50000 + (10000 :: PortNumber) +-- 60000 +newtype PortNumber = PortNum Word16 deriving (Eq, Ord, Typeable, Num, Enum, Real, Integral) + +-- Print "n" instead of "PortNum n". +instance Show PortNumber where + showsPrec p (PortNum pn) = showsPrec p (fromIntegral pn :: Int) + +-- Read "n" instead of "PortNum n". +instance Read PortNumber where + readsPrec n = map (\(x,y) -> (fromIntegral (x :: Int), y)) . readsPrec n + +foreign import CALLCONV unsafe "ntohs" ntohs :: Word16 -> Word16 +foreign import CALLCONV unsafe "htons" htons :: Word16 -> Word16 +foreign import CALLCONV unsafe "ntohl" ntohl :: Word32 -> Word32 +foreign import CALLCONV unsafe "htonl" htonl :: Word32 -> Word32 + +instance Storable PortNumber where + sizeOf _ = sizeOf (undefined :: Word16) + alignment _ = alignment (undefined :: Word16) + poke p (PortNum po) = poke (castPtr p) (htons po) + peek p = (PortNum . ntohs) `liftM` peek (castPtr p) + +------------------------------------------------------------------------ +-- Socket addresses + +-- The scheme used for addressing sockets is somewhat quirky. The +-- calls in the BSD socket API that need to know the socket address +-- all operate in terms of struct sockaddr, a `virtual' type of +-- socket address. + +-- The Internet family of sockets are addressed as struct sockaddr_in, +-- so when calling functions that operate on struct sockaddr, we have +-- to type cast the Internet socket address into a struct sockaddr. +-- Instances of the structure for different families might *not* be +-- the same size. Same casting is required of other families of +-- sockets such as Xerox NS. Similarly for UNIX-domain sockets. + +-- To represent these socket addresses in Haskell-land, we do what BSD +-- didn't do, and use a union/algebraic type for the different +-- families. Currently only UNIX-domain sockets and the Internet +-- families are supported. + +#if defined(IPV6_SOCKET_SUPPORT) +type FlowInfo = Word32 +type ScopeID = Word32 +#endif + +-- | The existence of a constructor does not necessarily imply that +-- that socket address type is supported on your system: see +-- 'isSupportedSockAddr'. +data SockAddr -- C Names + = SockAddrInet + PortNumber -- sin_port + HostAddress -- sin_addr (ditto) + | SockAddrInet6 + PortNumber -- sin6_port + FlowInfo -- sin6_flowinfo (ditto) + HostAddress6 -- sin6_addr (ditto) + ScopeID -- sin6_scope_id (ditto) + | SockAddrUnix + String -- sun_path + | SockAddrCan + Int32 -- can_ifindex (can be get by Network.BSD.ifNameToIndex "can0") + -- TODO: Extend this to include transport protocol information + deriving (Eq, Ord, Typeable) + +-- | Is the socket address type supported on this system? +isSupportedSockAddr :: SockAddr -> Bool +isSupportedSockAddr addr = case addr of + SockAddrInet {} -> True +#if defined(IPV6_SOCKET_SUPPORT) + SockAddrInet6 {} -> True +#endif +#if defined(DOMAIN_SOCKET_SUPPORT) + SockAddrUnix{} -> True +#endif +#if defined(CAN_SOCKET_SUPPORT) + SockAddrCan{} -> True +#endif +#if !(defined(IPV6_SOCKET_SUPPORT) \ + && defined(DOMAIN_SOCKET_SUPPORT) && defined(CAN_SOCKET_SUPPORT)) + _ -> False +#endif + +{-# DEPRECATED SockAddrCan "This will be removed in 3.0" #-} + +#if defined(WITH_WINSOCK) +type CSaFamily = (#type unsigned short) +#elif defined(darwin_HOST_OS) +type CSaFamily = (#type u_char) +#else +type CSaFamily = (#type sa_family_t) +#endif + +-- | Computes the storage requirements (in bytes) of the given +-- 'SockAddr'. This function differs from 'Foreign.Storable.sizeOf' +-- in that the value of the argument /is/ used. +sizeOfSockAddr :: SockAddr -> Int +#if defined(DOMAIN_SOCKET_SUPPORT) +sizeOfSockAddr (SockAddrUnix path) = + case path of + '\0':_ -> (#const sizeof(sa_family_t)) + length path + _ -> #const sizeof(struct sockaddr_un) +#endif +sizeOfSockAddr (SockAddrInet _ _) = #const sizeof(struct sockaddr_in) +#if defined(IPV6_SOCKET_SUPPORT) +sizeOfSockAddr (SockAddrInet6 _ _ _ _) = #const sizeof(struct sockaddr_in6) +#endif +#if defined(CAN_SOCKET_SUPPORT) +sizeOfSockAddr (SockAddrCan _) = #const sizeof(struct sockaddr_can) +#endif +#if !(defined(IPV6_SOCKET_SUPPORT) \ + && defined(DOMAIN_SOCKET_SUPPORT) && defined(CAN_SOCKET_SUPPORT)) +sizeOfSockAddr _ = error "sizeOfSockAddr: not supported" +#endif + +-- | Computes the storage requirements (in bytes) required for a +-- 'SockAddr' with the given 'Family'. +sizeOfSockAddrByFamily :: Family -> Int +#if defined(DOMAIN_SOCKET_SUPPORT) +sizeOfSockAddrByFamily AF_UNIX = #const sizeof(struct sockaddr_un) +#endif +#if defined(IPV6_SOCKET_SUPPORT) +sizeOfSockAddrByFamily AF_INET6 = #const sizeof(struct sockaddr_in6) +#endif +sizeOfSockAddrByFamily AF_INET = #const sizeof(struct sockaddr_in) +#if defined(CAN_SOCKET_SUPPORT) +sizeOfSockAddrByFamily AF_CAN = #const sizeof(struct sockaddr_can) +#endif +sizeOfSockAddrByFamily family = error $ + "Network.Socket.Types.sizeOfSockAddrByFamily: address family '" ++ + show family ++ "' not supported." + +-- | Use a 'SockAddr' with a function requiring a pointer to a +-- 'SockAddr' and the length of that 'SockAddr'. +withSockAddr :: SockAddr -> (Ptr SockAddr -> Int -> IO a) -> IO a +withSockAddr addr f = do + let sz = sizeOfSockAddr addr + allocaBytes sz $ \p -> pokeSockAddr p addr >> f (castPtr p) sz + +-- | Create a new 'SockAddr' for use with a function requiring a +-- pointer to a 'SockAddr' and the length of that 'SockAddr'. +withNewSockAddr :: Family -> (Ptr SockAddr -> Int -> IO a) -> IO a +withNewSockAddr family f = do + let sz = sizeOfSockAddrByFamily family + allocaBytes sz $ \ptr -> f ptr sz + +-- We can't write an instance of 'Storable' for 'SockAddr' because +-- @sockaddr@ is a sum type of variable size but +-- 'Foreign.Storable.sizeOf' is required to be constant. + +-- Note that on Darwin, the sockaddr structure must be zeroed before +-- use. + +-- | Write the given 'SockAddr' to the given memory location. +pokeSockAddr :: Ptr a -> SockAddr -> IO () +#if defined(DOMAIN_SOCKET_SUPPORT) +pokeSockAddr p (SockAddrUnix path) = do +#if defined(darwin_HOST_OS) + zeroMemory p (#const sizeof(struct sockaddr_un)) +#else + case path of + ('\0':_) -> zeroMemory p (#const sizeof(struct sockaddr_un)) + _ -> return () +#endif +#if defined(HAVE_STRUCT_SOCKADDR_SA_LEN) + (#poke struct sockaddr_un, sun_len) p ((#const sizeof(struct sockaddr_un)) :: Word8) +#endif + (#poke struct sockaddr_un, sun_family) p ((#const AF_UNIX) :: CSaFamily) + let pathC = map castCharToCChar path + poker = case path of ('\0':_) -> pokeArray; _ -> pokeArray0 0 + poker ((#ptr struct sockaddr_un, sun_path) p) pathC +#endif +pokeSockAddr p (SockAddrInet port addr) = do +#if defined(darwin_HOST_OS) + zeroMemory p (#const sizeof(struct sockaddr_in)) +#endif +#if defined(HAVE_STRUCT_SOCKADDR_SA_LEN) + (#poke struct sockaddr_in, sin_len) p ((#const sizeof(struct sockaddr_in)) :: Word8) +#endif + (#poke struct sockaddr_in, sin_family) p ((#const AF_INET) :: CSaFamily) + (#poke struct sockaddr_in, sin_port) p port + (#poke struct sockaddr_in, sin_addr) p addr +#if defined(IPV6_SOCKET_SUPPORT) +pokeSockAddr p (SockAddrInet6 port flow addr scope) = do +#if defined(darwin_HOST_OS) + zeroMemory p (#const sizeof(struct sockaddr_in6)) +#endif +#if defined(HAVE_STRUCT_SOCKADDR_SA_LEN) + (#poke struct sockaddr_in6, sin6_len) p ((#const sizeof(struct sockaddr_in6)) :: Word8) +#endif + (#poke struct sockaddr_in6, sin6_family) p ((#const AF_INET6) :: CSaFamily) + (#poke struct sockaddr_in6, sin6_port) p port + (#poke struct sockaddr_in6, sin6_flowinfo) p flow + (#poke struct sockaddr_in6, sin6_addr) p (In6Addr addr) + (#poke struct sockaddr_in6, sin6_scope_id) p scope +#endif +#if defined(CAN_SOCKET_SUPPORT) +pokeSockAddr p (SockAddrCan ifIndex) = do +#if defined(darwin_HOST_OS) + zeroMemory p (#const sizeof(struct sockaddr_can)) +#endif + (#poke struct sockaddr_can, can_ifindex) p ifIndex +#endif +#if !(defined(IPV6_SOCKET_SUPPORT) \ + && defined(DOMAIN_SOCKET_SUPPORT) && defined(CAN_SOCKET_SUPPORT)) +pokeSockAddr _ _ = error "pokeSockAddr: not supported" +#endif + +-- | Read a 'SockAddr' from the given memory location. +peekSockAddr :: Ptr SockAddr -> IO SockAddr +peekSockAddr p = do + family <- (#peek struct sockaddr, sa_family) p + case family :: CSaFamily of +#if defined(DOMAIN_SOCKET_SUPPORT) + (#const AF_UNIX) -> do + str <- peekCString ((#ptr struct sockaddr_un, sun_path) p) + return (SockAddrUnix str) +#endif + (#const AF_INET) -> do + addr <- (#peek struct sockaddr_in, sin_addr) p + port <- (#peek struct sockaddr_in, sin_port) p + return (SockAddrInet port addr) +#if defined(IPV6_SOCKET_SUPPORT) + (#const AF_INET6) -> do + port <- (#peek struct sockaddr_in6, sin6_port) p + flow <- (#peek struct sockaddr_in6, sin6_flowinfo) p + In6Addr addr <- (#peek struct sockaddr_in6, sin6_addr) p + scope <- (#peek struct sockaddr_in6, sin6_scope_id) p + return (SockAddrInet6 port flow addr scope) +#endif +#if defined(CAN_SOCKET_SUPPORT) + (#const AF_CAN) -> do + ifidx <- (#peek struct sockaddr_can, can_ifindex) p + return (SockAddrCan ifidx) +#endif + _ -> ioError $ userError $ + "Network.Socket.Types.peekSockAddr: address family '" ++ + show family ++ "' not supported." + +------------------------------------------------------------------------ + +-- | The raw network byte order number is read using host byte order. +-- Therefore on little-endian architectures the byte order is swapped. For +-- example @127.0.0.1@ is represented as @0x0100007f@ on little-endian hosts +-- and as @0x7f000001@ on big-endian hosts. +-- +-- For direct manipulation prefer 'hostAddressToTuple' and +-- 'tupleToHostAddress'. +type HostAddress = Word32 + +-- | Converts 'HostAddress' to representation-independent IPv4 quadruple. +-- For example for @127.0.0.1@ the function will return @(0x7f, 0, 0, 1)@ +-- regardless of host endianness. +hostAddressToTuple :: HostAddress -> (Word8, Word8, Word8, Word8) +hostAddressToTuple ha' = + let ha = htonl ha' + byte i = fromIntegral (ha `shiftR` i) :: Word8 + in (byte 24, byte 16, byte 8, byte 0) + +-- | Converts IPv4 quadruple to 'HostAddress'. +tupleToHostAddress :: (Word8, Word8, Word8, Word8) -> HostAddress +tupleToHostAddress (b3, b2, b1, b0) = + let x `sl` i = fromIntegral x `shiftL` i :: Word32 + in ntohl $ (b3 `sl` 24) .|. (b2 `sl` 16) .|. (b1 `sl` 8) .|. (b0 `sl` 0) + +#if defined(IPV6_SOCKET_SUPPORT) +-- | Independent of endianness. For example @::1@ is stored as @(0, 0, 0, 1)@. +-- +-- For direct manipulation prefer 'hostAddress6ToTuple' and +-- 'tupleToHostAddress6'. +type HostAddress6 = (Word32, Word32, Word32, Word32) + +hostAddress6ToTuple :: HostAddress6 -> (Word16, Word16, Word16, Word16, + Word16, Word16, Word16, Word16) +hostAddress6ToTuple (w3, w2, w1, w0) = + let high, low :: Word32 -> Word16 + high w = fromIntegral (w `shiftR` 16) + low w = fromIntegral w + in (high w3, low w3, high w2, low w2, high w1, low w1, high w0, low w0) + +tupleToHostAddress6 :: (Word16, Word16, Word16, Word16, + Word16, Word16, Word16, Word16) -> HostAddress6 +tupleToHostAddress6 (w7, w6, w5, w4, w3, w2, w1, w0) = + let add :: Word16 -> Word16 -> Word32 + high `add` low = (fromIntegral high `shiftL` 16) .|. (fromIntegral low) + in (w7 `add` w6, w5 `add` w4, w3 `add` w2, w1 `add` w0) + +-- The peek32 and poke32 functions work around the fact that the RFCs +-- don't require 32-bit-wide address fields to be present. We can +-- only portably rely on an 8-bit field, s6_addr. + +s6_addr_offset :: Int +s6_addr_offset = (#offset struct in6_addr, s6_addr) + +peek32 :: Ptr a -> Int -> IO Word32 +peek32 p i0 = do + let i' = i0 * 4 + peekByte n = peekByteOff p (s6_addr_offset + i' + n) :: IO Word8 + a `sl` i = fromIntegral a `shiftL` i + a0 <- peekByte 0 + a1 <- peekByte 1 + a2 <- peekByte 2 + a3 <- peekByte 3 + return ((a0 `sl` 24) .|. (a1 `sl` 16) .|. (a2 `sl` 8) .|. (a3 `sl` 0)) + +poke32 :: Ptr a -> Int -> Word32 -> IO () +poke32 p i0 a = do + let i' = i0 * 4 + pokeByte n = pokeByteOff p (s6_addr_offset + i' + n) + x `sr` i = fromIntegral (x `shiftR` i) :: Word8 + pokeByte 0 (a `sr` 24) + pokeByte 1 (a `sr` 16) + pokeByte 2 (a `sr` 8) + pokeByte 3 (a `sr` 0) + +-- | Private newtype proxy for the Storable instance. To avoid orphan instances. +newtype In6Addr = In6Addr HostAddress6 + +#if __GLASGOW_HASKELL__ < 800 +#let alignment t = "%lu", (unsigned long)offsetof(struct {char x__; t (y__); }, y__) +#endif + +instance Storable In6Addr where + sizeOf _ = #const sizeof(struct in6_addr) + alignment _ = #alignment struct in6_addr + + peek p = do + a <- peek32 p 0 + b <- peek32 p 1 + c <- peek32 p 2 + d <- peek32 p 3 + return $ In6Addr (a, b, c, d) + + poke p (In6Addr (a, b, c, d)) = do + poke32 p 0 a + poke32 p 1 b + poke32 p 2 c + poke32 p 3 d +#endif + +------------------------------------------------------------------------ +-- Helper functions + +foreign import ccall unsafe "string.h" memset :: Ptr a -> CInt -> CSize -> IO () + +-- | Zero a structure. +zeroMemory :: Ptr a -> CSize -> IO () +zeroMemory dest nbytes = memset dest 0 (fromIntegral nbytes) diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket.hsc cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket.hsc --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network/Socket.hsc 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network/Socket.hsc 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,1965 @@ +{-# LANGUAGE CPP, ScopedTypeVariables, RecordWildCards #-} +{-# OPTIONS_GHC -fno-warn-orphans #-} +{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} +----------------------------------------------------------------------------- +-- | +-- Module : Network.Socket +-- Copyright : (c) The University of Glasgow 2001 +-- License : BSD-style (see the file libraries/network/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- This is the main module of the network package supposed to be +-- used with either "Network.Socket.ByteString" or +-- "Network.Socket.ByteString.Lazy" for sending/receiving. +-- +-- Here are two minimal example programs using the TCP/IP protocol: a +-- server that echoes all data that it receives back (servicing only +-- one client) and a client using it. +-- +-- > -- Echo server program +-- > module Main (main) where +-- > +-- > import Control.Concurrent (forkFinally) +-- > import qualified Control.Exception as E +-- > import Control.Monad (unless, forever, void) +-- > import qualified Data.ByteString as S +-- > import Network.Socket hiding (recv) +-- > import Network.Socket.ByteString (recv, sendAll) +-- > +-- > main :: IO () +-- > main = withSocketsDo $ do +-- > addr <- resolve "3000" +-- > E.bracket (open addr) close loop +-- > where +-- > resolve port = do +-- > let hints = defaultHints { +-- > addrFlags = [AI_PASSIVE] +-- > , addrSocketType = Stream +-- > } +-- > addr:_ <- getAddrInfo (Just hints) Nothing (Just port) +-- > return addr +-- > open addr = do +-- > sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) +-- > setSocketOption sock ReuseAddr 1 +-- > bind sock (addrAddress addr) +-- > -- If the prefork technique is not used, +-- > -- set CloseOnExec for the security reasons. +-- > let fd = fdSocket sock +-- > setCloseOnExecIfNeeded fd +-- > listen sock 10 +-- > return sock +-- > loop sock = forever $ do +-- > (conn, peer) <- accept sock +-- > putStrLn $ "Connection from " ++ show peer +-- > void $ forkFinally (talk conn) (\_ -> close conn) +-- > talk conn = do +-- > msg <- recv conn 1024 +-- > unless (S.null msg) $ do +-- > sendAll conn msg +-- > talk conn +-- +-- > {-# LANGUAGE OverloadedStrings #-} +-- > -- Echo client program +-- > module Main (main) where +-- > +-- > import qualified Control.Exception as E +-- > import qualified Data.ByteString.Char8 as C +-- > import Network.Socket hiding (recv) +-- > import Network.Socket.ByteString (recv, sendAll) +-- > +-- > main :: IO () +-- > main = withSocketsDo $ do +-- > addr <- resolve "127.0.0.1" "3000" +-- > E.bracket (open addr) close talk +-- > where +-- > resolve host port = do +-- > let hints = defaultHints { addrSocketType = Stream } +-- > addr:_ <- getAddrInfo (Just hints) (Just host) (Just port) +-- > return addr +-- > open addr = do +-- > sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) +-- > connect sock $ addrAddress addr +-- > return sock +-- > talk sock = do +-- > sendAll sock "Hello, world!" +-- > msg <- recv sock 1024 +-- > putStr "Received: " +-- > C.putStrLn msg +-- +-- The proper programming model is that one 'Socket' is handled by +-- a single thread. If multiple threads use one 'Socket' concurrently, +-- unexpected things would happen. There is one exception for multiple +-- threads vs a single 'Socket': one thread reads data from a 'Socket' +-- only and the other thread writes data to the 'Socket' only. +----------------------------------------------------------------------------- + +#include "HsNet.h" +##include "HsNetDef.h" + +module Network.Socket + ( + -- * Initialisation + withSocketsDo + -- * Address information + , getAddrInfo + -- ** Types + , HostName + , ServiceName + , AddrInfo(..) + , defaultHints + -- ** Flags + , AddrInfoFlag(..) + , addrInfoFlagImplemented + -- * Socket operations + , connect + , bind + , listen + , accept + -- ** Closing + , close + , close' + , shutdown + , ShutdownCmd(..) + -- * Socket options + , SocketOption(..) + , isSupportedSocketOption + , getSocketOption + , setSocketOption + -- * Socket + , Socket(..) + , socket + , fdSocket + , mkSocket + , socketToHandle + -- ** Types of Socket + , SocketType(..) + , isSupportedSocketType + -- ** Family + , Family(..) + , isSupportedFamily + -- ** Protocol number + , ProtocolNumber + , defaultProtocol + -- * Socket address + , SockAddr(..) + , isSupportedSockAddr + , getPeerName + , getSocketName + -- ** Host address + , HostAddress + , hostAddressToTuple + , tupleToHostAddress +#if defined(IPV6_SOCKET_SUPPORT) + -- ** Host address6 + , HostAddress6 + , hostAddress6ToTuple + , tupleToHostAddress6 + -- ** Flow Info + , FlowInfo + -- ** Scope ID + , ScopeID +# if defined(HAVE_IF_NAMETOINDEX) + , ifNameToIndex + , ifIndexToName +# endif +#endif + -- ** Port number + , PortNumber + , defaultPort + , socketPortSafe + , socketPort + -- * UNIX-domain socket + , isUnixDomainSocketAvailable + , socketPair + , sendFd + , recvFd + , getPeerCredential +#if defined(IPV6_SOCKET_SUPPORT) + -- * Name information + , NameInfoFlag(..) + , getNameInfo +#endif + -- * Low level operations + , setCloseOnExecIfNeeded + , getCloseOnExec + , setNonBlockIfNeeded + , getNonBlock + -- * Sending and receiving data + , sendBuf + , recvBuf + , sendBufTo + , recvBufFrom + -- * Special constants + , maxListenQueue + -- * Deprecated + -- ** Deprecated sending and receiving + , send + , sendTo + , recv + , recvFrom + , recvLen + -- ** Deprecated address functions + , htonl + , ntohl + , inet_addr + , inet_ntoa + -- ** Deprecated socket operations + , bindSocket + , sClose + -- ** Deprecated socket status + , SocketStatus(..) -- fixme + , isConnected + , isBound + , isListening + , isReadable + , isWritable + , sIsConnected + , sIsBound + , sIsListening + , sIsReadable + , sIsWritable + -- ** Deprecated special constants + , aNY_PORT + , iNADDR_ANY +#if defined(IPV6_SOCKET_SUPPORT) + , iN6ADDR_ANY +#endif + , sOMAXCONN + , sOL_SOCKET +#ifdef SCM_RIGHTS + , sCM_RIGHTS +#endif + -- ** Decrecated internal functions + , packFamily + , unpackFamily + , packSocketType + -- ** Decrecated UNIX-domain functions +#if defined(HAVE_STRUCT_UCRED) || defined(HAVE_GETPEEREID) + -- get the credentials of our domain socket peer. + , getPeerCred +#if defined(HAVE_GETPEEREID) + , getPeerEid +#endif +#endif + ) where + +import Data.Bits +import Data.Functor +import Data.List (foldl') +import Data.Maybe (isJust) +import Data.Word (Word8, Word32) +import Foreign.Ptr (Ptr, castPtr, nullPtr) +import Foreign.Storable (Storable(..)) +import Foreign.C.Error +import Foreign.C.String (CString, withCString, withCStringLen, peekCString, peekCStringLen) +import Foreign.C.Types (CUInt(..), CChar) +import Foreign.C.Types (CInt(..), CSize(..)) +import Foreign.Marshal.Alloc ( alloca, allocaBytes ) +import Foreign.Marshal.Array ( peekArray ) +import Foreign.Marshal.Utils ( maybeWith, with ) + +import System.IO +import Control.Monad (liftM, when, void) + +import qualified Control.Exception as E +import Control.Concurrent.MVar +import Data.Typeable +import System.IO.Error + +import GHC.Conc (threadWaitWrite) +# ifdef HAVE_ACCEPT4 +import GHC.Conc (threadWaitRead) +# endif +##if MIN_VERSION_base(4,3,1) +import GHC.Conc (closeFdWith) +##endif +# if defined(mingw32_HOST_OS) +import GHC.Conc (asyncDoProc) +import GHC.IO.FD (FD(..), readRawBufferPtr, writeRawBufferPtr) +import Foreign (FunPtr) +# endif +# if defined(darwin_HOST_OS) +import Data.List (delete) +# endif +import qualified GHC.IO.Device +import GHC.IO.Handle.FD +import GHC.IO.Exception +import GHC.IO +import qualified System.Posix.Internals + +import Network.Socket.Internal +import Network.Socket.Types + +import Prelude -- Silence AMP warnings + +-- | Either a host name e.g., @\"haskell.org\"@ or a numeric host +-- address string consisting of a dotted decimal IPv4 address or an +-- IPv6 address e.g., @\"192.168.0.1\"@. +type HostName = String +type ServiceName = String + +-- ---------------------------------------------------------------------------- +-- On Windows, our sockets are not put in non-blocking mode (non-blocking +-- is not supported for regular file descriptors on Windows, and it would +-- be a pain to support it only for sockets). So there are two cases: +-- +-- - the threaded RTS uses safe calls for socket operations to get +-- non-blocking I/O, just like the rest of the I/O library +-- +-- - with the non-threaded RTS, only some operations on sockets will be +-- non-blocking. Reads and writes go through the normal async I/O +-- system. accept() uses asyncDoProc so is non-blocking. A handful +-- of others (recvFrom, sendFd, recvFd) will block all threads - if this +-- is a problem, -threaded is the workaround. +-- +##if defined(mingw32_HOST_OS) +##define SAFE_ON_WIN safe +##else +##define SAFE_ON_WIN unsafe +##endif + +----------------------------------------------------------------------------- +-- Socket types + +#if defined(mingw32_HOST_OS) +socket2FD (MkSocket fd _ _ _ _) = + -- HACK, 1 means True + FD{fdFD = fd,fdIsSocket_ = 1} +#endif + +-- | Smart constructor for constructing a 'Socket'. It should only be +-- called once for every new file descriptor. The caller must make +-- sure that the socket is in non-blocking mode. See +-- 'setNonBlockIfNeeded'. +mkSocket :: CInt + -> Family + -> SocketType + -> ProtocolNumber + -> SocketStatus + -> IO Socket +mkSocket fd fam sType pNum stat = do + mStat <- newMVar stat + withSocketsDo $ return () + return $ MkSocket fd fam sType pNum mStat + +-- | This is the default protocol for a given service. +defaultProtocol :: ProtocolNumber +defaultProtocol = 0 + +----------------------------------------------------------------------------- +-- SockAddr + +instance Show SockAddr where +#if defined(DOMAIN_SOCKET_SUPPORT) + showsPrec _ (SockAddrUnix str) = showString str +#endif + showsPrec _ (SockAddrInet port ha) + = showString (unsafePerformIO (inet_ntoa ha)) + . showString ":" + . shows port +#if defined(IPV6_SOCKET_SUPPORT) + showsPrec _ addr@(SockAddrInet6 port _ _ _) + = showChar '[' + . showString (unsafePerformIO $ + fst `liftM` getNameInfo [NI_NUMERICHOST] True False addr >>= + maybe (fail "showsPrec: impossible internal error") return) + . showString "]:" + . shows port +#endif +#if defined(CAN_SOCKET_SUPPORT) + showsPrec _ (SockAddrCan ifidx) = shows ifidx +#endif +#if !(defined(IPV6_SOCKET_SUPPORT) \ + && defined(DOMAIN_SOCKET_SUPPORT) && defined(CAN_SOCKET_SUPPORT)) + showsPrec _ _ = error "showsPrec: not supported" +#endif + +----------------------------------------------------------------------------- +-- Connection Functions + +-- In the following connection and binding primitives. The names of +-- the equivalent C functions have been preserved where possible. It +-- should be noted that some of these names used in the C library, +-- \tr{bind} in particular, have a different meaning to many Haskell +-- programmers and have thus been renamed by appending the prefix +-- Socket. + +-- | Create a new socket using the given address family, socket type +-- and protocol number. The address family is usually 'AF_INET', +-- 'AF_INET6', or 'AF_UNIX'. The socket type is usually 'Stream' or +-- 'Datagram'. The protocol number is usually 'defaultProtocol'. +-- If 'AF_INET6' is used and the socket type is 'Stream' or 'Datagram', +-- the 'IPv6Only' socket option is set to 0 so that both IPv4 and IPv6 +-- can be handled with one socket. +-- +-- >>> let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_NUMERICSERV], addrSocketType = Stream } +-- >>> addr:_ <- getAddrInfo (Just hints) (Just "127.0.0.1") (Just "5000") +-- >>> sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) +-- >>> bind sock (addrAddress addr) +-- >>> getSocketName sock +-- 127.0.0.1:5000 +socket :: Family -- Family Name (usually AF_INET) + -> SocketType -- Socket Type (usually Stream) + -> ProtocolNumber -- Protocol Number (getProtocolByName to find value) + -> IO Socket -- Unconnected Socket +socket family stype protocol = do + c_stype <- packSocketTypeOrThrow "socket" stype + fd <- throwSocketErrorIfMinus1Retry "Network.Socket.socket" $ + c_socket (packFamily family) c_stype protocol + setNonBlockIfNeeded fd + sock <- mkSocket fd family stype protocol NotConnected +#if HAVE_DECL_IPV6_V6ONLY + -- The default value of the IPv6Only option is platform specific, + -- so we explicitly set it to 0 to provide a common default. +# if defined(mingw32_HOST_OS) + -- The IPv6Only option is only supported on Windows Vista and later, + -- so trying to change it might throw an error. + when (family == AF_INET6 && (stype == Stream || stype == Datagram)) $ + E.catch (setSocketOption sock IPv6Only 0) $ (\(_ :: E.IOException) -> return ()) +# elif !defined(__OpenBSD__) + when (family == AF_INET6 && (stype == Stream || stype == Datagram)) $ + setSocketOption sock IPv6Only 0 `onException` close sock +# endif +#endif + return sock + +-- | Build a pair of connected socket objects using the given address +-- family, socket type, and protocol number. Address family, socket +-- type, and protocol number are as for the 'socket' function above. +-- Availability: Unix. +socketPair :: Family -- Family Name (usually AF_INET or AF_INET6) + -> SocketType -- Socket Type (usually Stream) + -> ProtocolNumber -- Protocol Number + -> IO (Socket, Socket) -- unnamed and connected. +#if defined(DOMAIN_SOCKET_SUPPORT) +socketPair family stype protocol = do + allocaBytes (2 * sizeOf (1 :: CInt)) $ \ fdArr -> do + c_stype <- packSocketTypeOrThrow "socketPair" stype + _rc <- throwSocketErrorIfMinus1Retry "Network.Socket.socketpair" $ + c_socketpair (packFamily family) c_stype protocol fdArr + [fd1,fd2] <- peekArray 2 fdArr + s1 <- mkNonBlockingSocket fd1 + s2 <- mkNonBlockingSocket fd2 + return (s1,s2) + where + mkNonBlockingSocket fd = do + setNonBlockIfNeeded fd + mkSocket fd family stype protocol Connected + +foreign import ccall unsafe "socketpair" + c_socketpair :: CInt -> CInt -> CInt -> Ptr CInt -> IO CInt +#else +socketPair _ _ _ = error "Network.Socket.socketPair" +#endif + +----------------------------------------------------------------------------- + +#if defined(mingw32_HOST_OS) +#else +fGetFd :: CInt +fGetFd = #const F_GETFD +fGetFl :: CInt +fGetFl = #const F_GETFL +fdCloexec :: CInt +fdCloexec = #const FD_CLOEXEC +oNonBlock :: CInt +oNonBlock = #const O_NONBLOCK +# if defined(HAVE_ACCEPT4) +sockNonBlock :: CInt +sockNonBlock = #const SOCK_NONBLOCK +sockCloexec :: CInt +sockCloexec = #const SOCK_CLOEXEC +# endif +#endif + +-- | Set the nonblocking flag on Unix. +-- On Windows, nothing is done. +setNonBlockIfNeeded :: CInt -> IO () +setNonBlockIfNeeded fd = + System.Posix.Internals.setNonBlockingFD fd True + +-- | Set the close_on_exec flag on Unix. +-- On Windows, nothing is done. +-- +-- Since 2.7.0.0. +setCloseOnExecIfNeeded :: CInt -> IO () +#if defined(mingw32_HOST_OS) +setCloseOnExecIfNeeded _ = return () +#else +setCloseOnExecIfNeeded fd = System.Posix.Internals.setCloseOnExec fd +#endif + +#if !defined(mingw32_HOST_OS) +foreign import ccall unsafe "fcntl" + c_fcntl_read :: CInt -> CInt -> CInt -> IO CInt +#endif + +-- | Get the nonblocking flag. +-- On Windows, this function always returns 'False'. +-- +-- Since 2.7.0.0. +getCloseOnExec :: CInt -> IO Bool +#if defined(mingw32_HOST_OS) +getCloseOnExec _ = return False +#else +getCloseOnExec fd = do + flags <- c_fcntl_read fd fGetFd 0 + let ret = flags .&. fdCloexec + return (ret /= 0) +#endif + +-- | Get the close_on_exec flag. +-- On Windows, this function always returns 'False'. +-- +-- Since 2.7.0.0. +getNonBlock :: CInt -> IO Bool +#if defined(mingw32_HOST_OS) +getNonBlock _ = return False +#else +getNonBlock fd = do + flags <- c_fcntl_read fd fGetFl 0 + let ret = flags .&. oNonBlock + return (ret /= 0) +#endif + +----------------------------------------------------------------------------- +-- Binding a socket + +-- | Bind the socket to an address. The socket must not already be +-- bound. The 'Family' passed to @bind@ must be the +-- same as that passed to 'socket'. If the special port number +-- 'defaultPort' is passed then the system assigns the next available +-- use port. +bind :: Socket -- Unconnected Socket + -> SockAddr -- Address to Bind to + -> IO () +bind (MkSocket s _family _stype _protocol socketStatus) addr = do + modifyMVar_ socketStatus $ \ status -> do + if status /= NotConnected + then + ioError $ userError $ + "Network.Socket.bind: can't bind to socket with status " ++ show status + else do + withSockAddr addr $ \p_addr sz -> do + _status <- throwSocketErrorIfMinus1Retry "Network.Socket.bind" $ + c_bind s p_addr (fromIntegral sz) + return Bound + +----------------------------------------------------------------------------- +-- Connecting a socket + +-- | Connect to a remote socket at address. +connect :: Socket -- Unconnected Socket + -> SockAddr -- Socket address stuff + -> IO () +connect sock@(MkSocket s _family _stype _protocol socketStatus) addr = withSocketsDo $ do + modifyMVar_ socketStatus $ \currentStatus -> do + if currentStatus /= NotConnected && currentStatus /= Bound + then + ioError $ userError $ + errLoc ++ ": can't connect to socket with status " ++ show currentStatus + else do + withSockAddr addr $ \p_addr sz -> do + + let connectLoop = do + r <- c_connect s p_addr (fromIntegral sz) + if r == -1 + then do +#if !(defined(HAVE_WINSOCK2_H)) + err <- getErrno + case () of + _ | err == eINTR -> connectLoop + _ | err == eINPROGRESS -> connectBlocked +-- _ | err == eAGAIN -> connectBlocked + _otherwise -> throwSocketError errLoc +#else + throwSocketError errLoc +#endif + else return () + + connectBlocked = do + threadWaitWrite (fromIntegral s) + err <- getSocketOption sock SoError + if (err == 0) + then return () + else throwSocketErrorCode errLoc (fromIntegral err) + + connectLoop + return Connected + where + errLoc = "Network.Socket.connect: " ++ show sock + +----------------------------------------------------------------------------- +-- Listen + +-- | Listen for connections made to the socket. The second argument +-- specifies the maximum number of queued connections and should be at +-- least 1; the maximum value is system-dependent (usually 5). +listen :: Socket -- Connected & Bound Socket + -> Int -- Queue Length + -> IO () +listen (MkSocket s _family _stype _protocol socketStatus) backlog = do + modifyMVar_ socketStatus $ \ status -> do + if status /= Bound + then + ioError $ userError $ + "Network.Socket.listen: can't listen on socket with status " ++ show status + else do + throwSocketErrorIfMinus1Retry_ "Network.Socket.listen" $ + c_listen s (fromIntegral backlog) + return Listening + +----------------------------------------------------------------------------- +-- Accept +-- +-- A call to `accept' only returns when data is available on the given +-- socket, unless the socket has been set to non-blocking. It will +-- return a new socket which should be used to read the incoming data and +-- should then be closed. Using the socket returned by `accept' allows +-- incoming requests to be queued on the original socket. + +-- | Accept a connection. The socket must be bound to an address and +-- listening for connections. The return value is a pair @(conn, +-- address)@ where @conn@ is a new socket object usable to send and +-- receive data on the connection, and @address@ is the address bound +-- to the socket on the other end of the connection. +accept :: Socket -- Queue Socket + -> IO (Socket, -- Readable Socket + SockAddr) -- Peer details + +accept sock@(MkSocket s family stype protocol status) = do + currentStatus <- readMVar status + if not $ isAcceptable family stype currentStatus + then + ioError $ userError $ + "Network.Socket.accept: can't accept socket (" ++ + show (family, stype, protocol) ++ ") with status " ++ + show currentStatus + else do + let sz = sizeOfSockAddrByFamily family + allocaBytes sz $ \ sockaddr -> do +#if defined(mingw32_HOST_OS) + new_sock <- + if threaded + then with (fromIntegral sz) $ \ ptr_len -> + throwSocketErrorIfMinus1Retry "Network.Socket.accept" $ + c_accept_safe s sockaddr ptr_len + else do + paramData <- c_newAcceptParams s (fromIntegral sz) sockaddr + rc <- asyncDoProc c_acceptDoProc paramData + new_sock <- c_acceptNewSock paramData + c_free paramData + when (rc /= 0) $ + throwSocketErrorCode "Network.Socket.accept" (fromIntegral rc) + return new_sock +#else + with (fromIntegral sz) $ \ ptr_len -> do +# ifdef HAVE_ACCEPT4 + new_sock <- throwSocketErrorIfMinus1RetryMayBlock "Network.Socket.accept" + (threadWaitRead (fromIntegral s)) + (c_accept4 s sockaddr ptr_len (sockNonBlock .|. sockCloexec)) +# else + new_sock <- throwSocketErrorWaitRead sock "Network.Socket.accept" + (c_accept s sockaddr ptr_len) + setNonBlockIfNeeded new_sock + setCloseOnExecIfNeeded new_sock +# endif /* HAVE_ACCEPT4 */ +#endif + addr <- peekSockAddr sockaddr + sock' <- mkSocket new_sock family stype protocol Connected + return (sock', addr) + +#if defined(mingw32_HOST_OS) +foreign import ccall unsafe "HsNet.h acceptNewSock" + c_acceptNewSock :: Ptr () -> IO CInt +foreign import ccall unsafe "HsNet.h newAcceptParams" + c_newAcceptParams :: CInt -> CInt -> Ptr a -> IO (Ptr ()) +foreign import ccall unsafe "HsNet.h &acceptDoProc" + c_acceptDoProc :: FunPtr (Ptr () -> IO Int) +foreign import ccall unsafe "free" + c_free:: Ptr a -> IO () +#endif + +----------------------------------------------------------------------------- +-- ** Sending and receiving data + +-- $sendrecv +-- +-- Do not use the @send@ and @recv@ functions defined in this section +-- in new code, as they incorrectly represent binary data as a Unicode +-- string. As a result, these functions are inefficient and may lead +-- to bugs in the program. Instead use the @send@ and @recv@ +-- functions defined in the "Network.Socket.ByteString" module. + +----------------------------------------------------------------------------- +-- sendTo & recvFrom + +-- | Send data to the socket. The recipient can be specified +-- explicitly, so the socket need not be in a connected state. +-- Returns the number of bytes sent. Applications are responsible for +-- ensuring that all data has been sent. +-- +-- NOTE: blocking on Windows unless you compile with -threaded (see +-- GHC ticket #1129) +{-# DEPRECATED sendTo "Use sendTo defined in \"Network.Socket.ByteString\"" #-} +sendTo :: Socket -- (possibly) bound/connected Socket + -> String -- Data to send + -> SockAddr + -> IO Int -- Number of Bytes sent +sendTo sock xs addr = do + withCStringLen xs $ \(str, len) -> do + sendBufTo sock str len addr + +-- | Send data to the socket. The recipient can be specified +-- explicitly, so the socket need not be in a connected state. +-- Returns the number of bytes sent. Applications are responsible for +-- ensuring that all data has been sent. +sendBufTo :: Socket -- (possibly) bound/connected Socket + -> Ptr a -> Int -- Data to send + -> SockAddr + -> IO Int -- Number of Bytes sent +sendBufTo sock@(MkSocket s _family _stype _protocol _status) ptr nbytes addr = do + withSockAddr addr $ \p_addr sz -> do + liftM fromIntegral $ + throwSocketErrorWaitWrite sock "Network.Socket.sendBufTo" $ + c_sendto s ptr (fromIntegral $ nbytes) 0{-flags-} + p_addr (fromIntegral sz) + +-- | Receive data from the socket. The socket need not be in a +-- connected state. Returns @(bytes, nbytes, address)@ where @bytes@ +-- is a @String@ of length @nbytes@ representing the data received and +-- @address@ is a 'SockAddr' representing the address of the sending +-- socket. +-- +-- NOTE: blocking on Windows unless you compile with -threaded (see +-- GHC ticket #1129) +{-# DEPRECATED recvFrom "Use recvFrom defined in \"Network.Socket.ByteString\"" #-} +recvFrom :: Socket -> Int -> IO (String, Int, SockAddr) +recvFrom sock nbytes = + allocaBytes nbytes $ \ptr -> do + (len, sockaddr) <- recvBufFrom sock ptr nbytes + str <- peekCStringLen (ptr, len) + return (str, len, sockaddr) + +-- | Receive data from the socket, writing it into buffer instead of +-- creating a new string. The socket need not be in a connected +-- state. Returns @(nbytes, address)@ where @nbytes@ is the number of +-- bytes received and @address@ is a 'SockAddr' representing the +-- address of the sending socket. +-- +-- NOTE: blocking on Windows unless you compile with -threaded (see +-- GHC ticket #1129) +recvBufFrom :: Socket -> Ptr a -> Int -> IO (Int, SockAddr) +recvBufFrom sock@(MkSocket s family _stype _protocol _status) ptr nbytes + | nbytes <= 0 = ioError (mkInvalidRecvArgError "Network.Socket.recvBufFrom") + | otherwise = + withNewSockAddr family $ \ptr_addr sz -> do + alloca $ \ptr_len -> do + poke ptr_len (fromIntegral sz) + len <- throwSocketErrorWaitRead sock "Network.Socket.recvBufFrom" $ + c_recvfrom s ptr (fromIntegral nbytes) 0{-flags-} + ptr_addr ptr_len + let len' = fromIntegral len + if len' == 0 + then ioError (mkEOFError "Network.Socket.recvFrom") + else do + flg <- isConnected sock + -- For at least one implementation (WinSock 2), recvfrom() ignores + -- filling in the sockaddr for connected TCP sockets. Cope with + -- this by using getPeerName instead. + sockaddr <- + if flg then + getPeerName sock + else + peekSockAddr ptr_addr + return (len', sockaddr) + +----------------------------------------------------------------------------- +-- send & recv + +-- | Send data to the socket. The socket must be connected to a remote +-- socket. Returns the number of bytes sent. Applications are +-- responsible for ensuring that all data has been sent. +-- +-- Sending data to closed socket may lead to undefined behaviour. +{-# DEPRECATED send "Use send defined in \"Network.Socket.ByteString\"" #-} +send :: Socket -- Bound/Connected Socket + -> String -- Data to send + -> IO Int -- Number of Bytes sent +send sock xs = withCStringLen xs $ \(str, len) -> + sendBuf sock (castPtr str) len + +-- | Send data to the socket. The socket must be connected to a remote +-- socket. Returns the number of bytes sent. Applications are +-- responsible for ensuring that all data has been sent. +-- +-- Sending data to closed socket may lead to undefined behaviour. +sendBuf :: Socket -- Bound/Connected Socket + -> Ptr Word8 -- Pointer to the data to send + -> Int -- Length of the buffer + -> IO Int -- Number of Bytes sent +sendBuf sock@(MkSocket s _family _stype _protocol _status) str len = do + liftM fromIntegral $ +#if defined(mingw32_HOST_OS) +-- writeRawBufferPtr is supposed to handle checking for errors, but it's broken +-- on x86_64 because of GHC bug #12010 so we duplicate the check here. The call +-- to throwSocketErrorIfMinus1Retry can be removed when no GHC version with the +-- bug is supported. + throwSocketErrorIfMinus1Retry "Network.Socket.sendBuf" $ writeRawBufferPtr + "Network.Socket.sendBuf" + (socket2FD sock) + (castPtr str) + 0 + (fromIntegral len) +#else + throwSocketErrorWaitWrite sock "Network.Socket.sendBuf" $ + c_send s str (fromIntegral len) 0{-flags-} +#endif + + +-- | Receive data from the socket. The socket must be in a connected +-- state. This function may return fewer bytes than specified. If the +-- message is longer than the specified length, it may be discarded +-- depending on the type of socket. This function may block until a +-- message arrives. +-- +-- Considering hardware and network realities, the maximum number of +-- bytes to receive should be a small power of 2, e.g., 4096. +-- +-- For TCP sockets, a zero length return value means the peer has +-- closed its half side of the connection. +-- +-- Receiving data from closed socket may lead to undefined behaviour. +{-# DEPRECATED recv "Use recv defined in \"Network.Socket.ByteString\"" #-} +recv :: Socket -> Int -> IO String +recv sock l = fst <$> recvLen sock l + +{-# DEPRECATED recvLen "Use recv defined in \"Network.Socket.ByteString\" with \"Data.Bytestring.length\"" #-} +recvLen :: Socket -> Int -> IO (String, Int) +recvLen sock nbytes = + allocaBytes nbytes $ \ptr -> do + len <- recvBuf sock ptr nbytes + s <- peekCStringLen (castPtr ptr,len) + return (s, len) + +-- | Receive data from the socket. The socket must be in a connected +-- state. This function may return fewer bytes than specified. If the +-- message is longer than the specified length, it may be discarded +-- depending on the type of socket. This function may block until a +-- message arrives. +-- +-- Considering hardware and network realities, the maximum number of +-- bytes to receive should be a small power of 2, e.g., 4096. +-- +-- For TCP sockets, a zero length return value means the peer has +-- closed its half side of the connection. +-- +-- Receiving data from closed socket may lead to undefined behaviour. +recvBuf :: Socket -> Ptr Word8 -> Int -> IO Int +recvBuf sock@(MkSocket s _family _stype _protocol _status) ptr nbytes + | nbytes <= 0 = ioError (mkInvalidRecvArgError "Network.Socket.recvBuf") + | otherwise = do + len <- +#if defined(mingw32_HOST_OS) +-- see comment in sendBuf above. + throwSocketErrorIfMinus1Retry "Network.Socket.recvBuf" $ + readRawBufferPtr "Network.Socket.recvBuf" + (socket2FD sock) ptr 0 (fromIntegral nbytes) +#else + throwSocketErrorWaitRead sock "Network.Socket.recvBuf" $ + c_recv s (castPtr ptr) (fromIntegral nbytes) 0{-flags-} +#endif + let len' = fromIntegral len + if len' == 0 + then ioError (mkEOFError "Network.Socket.recvBuf") + else return len' + + +-- --------------------------------------------------------------------------- +-- socketPort +-- +-- The port number the given socket is currently connected to can be +-- determined by calling $port$, is generally only useful when bind +-- was given $aNY\_PORT$. + +-- | Getting the port of socket. +-- `IOError` is thrown if a port is not available. +socketPort :: Socket -- Connected & Bound Socket + -> IO PortNumber -- Port Number of Socket +socketPort sock@(MkSocket _ AF_INET _ _ _) = do + (SockAddrInet port _) <- getSocketName sock + return port +#if defined(IPV6_SOCKET_SUPPORT) +socketPort sock@(MkSocket _ AF_INET6 _ _ _) = do + (SockAddrInet6 port _ _ _) <- getSocketName sock + return port +#endif +socketPort (MkSocket _ family _ _ _) = + ioError $ userError $ + "Network.Socket.socketPort: address family '" ++ show family ++ + "' not supported." + + +-- --------------------------------------------------------------------------- +-- socketPortSafe +-- | Getting the port of socket. +socketPortSafe :: Socket -- Connected & Bound Socket + -> IO (Maybe PortNumber) -- Port Number of Socket +socketPortSafe s = do + sa <- getSocketName s + return $ case sa of + SockAddrInet port _ -> Just port +#if defined(IPV6_SOCKET_SUPPORT) + SockAddrInet6 port _ _ _ -> Just port +#endif + _ -> Nothing + +-- --------------------------------------------------------------------------- +-- getPeerName + +-- Calling $getPeerName$ returns the address details of the machine, +-- other than the local one, which is connected to the socket. This is +-- used in programs such as FTP to determine where to send the +-- returning data. The corresponding call to get the details of the +-- local machine is $getSocketName$. + +getPeerName :: Socket -> IO SockAddr +getPeerName (MkSocket s family _ _ _) = do + withNewSockAddr family $ \ptr sz -> do + with (fromIntegral sz) $ \int_star -> do + throwSocketErrorIfMinus1Retry_ "Network.Socket.getPeerName" $ + c_getpeername s ptr int_star + _sz <- peek int_star + peekSockAddr ptr + +getSocketName :: Socket -> IO SockAddr +getSocketName (MkSocket s family _ _ _) = do + withNewSockAddr family $ \ptr sz -> do + with (fromIntegral sz) $ \int_star -> do + throwSocketErrorIfMinus1Retry_ "Network.Socket.getSocketName" $ + c_getsockname s ptr int_star + peekSockAddr ptr + +----------------------------------------------------------------------------- +-- Socket Properties + +-- | Socket options for use with 'setSocketOption' and 'getSocketOption'. +-- +-- The existence of a constructor does not imply that the relevant option +-- is supported on your system: see 'isSupportedSocketOption' +data SocketOption + = Debug -- ^ SO_DEBUG + | ReuseAddr -- ^ SO_REUSEADDR + | Type -- ^ SO_TYPE + | SoError -- ^ SO_ERROR + | DontRoute -- ^ SO_DONTROUTE + | Broadcast -- ^ SO_BROADCAST + | SendBuffer -- ^ SO_SNDBUF + | RecvBuffer -- ^ SO_RCVBUF + | KeepAlive -- ^ SO_KEEPALIVE + | OOBInline -- ^ SO_OOBINLINE + | TimeToLive -- ^ IP_TTL + | MaxSegment -- ^ TCP_MAXSEG + | NoDelay -- ^ TCP_NODELAY + | Cork -- ^ TCP_CORK + | Linger -- ^ SO_LINGER + | ReusePort -- ^ SO_REUSEPORT + | RecvLowWater -- ^ SO_RCVLOWAT + | SendLowWater -- ^ SO_SNDLOWAT + | RecvTimeOut -- ^ SO_RCVTIMEO + | SendTimeOut -- ^ SO_SNDTIMEO + | UseLoopBack -- ^ SO_USELOOPBACK + | UserTimeout -- ^ TCP_USER_TIMEOUT + | IPv6Only -- ^ IPV6_V6ONLY + | CustomSockOpt (CInt, CInt) + deriving (Show, Typeable) + +-- | Does the 'SocketOption' exist on this system? +isSupportedSocketOption :: SocketOption -> Bool +isSupportedSocketOption = isJust . packSocketOption + +-- | For a socket option, return Just (level, value) where level is the +-- corresponding C option level constant (e.g. SOL_SOCKET) and value is +-- the option constant itself (e.g. SO_DEBUG) +-- If either constant does not exist, return Nothing. +packSocketOption :: SocketOption -> Maybe (CInt, CInt) +packSocketOption so = + -- The Just here is a hack to disable GHC's overlapping pattern detection: + -- the problem is if all constants are present, the fallback pattern is + -- redundant, but if they aren't then it isn't. Hence we introduce an + -- extra pattern (Nothing) that can't possibly happen, so that the + -- fallback is always (in principle) necessary. + -- I feel a little bad for including this, but such are the sacrifices we + -- make while working with CPP - excluding the fallback pattern correctly + -- would be a serious nuisance. + -- (NB: comments elsewhere in this file refer to this one) + case Just so of +#ifdef SOL_SOCKET +#ifdef SO_DEBUG + Just Debug -> Just ((#const SOL_SOCKET), (#const SO_DEBUG)) +#endif +#ifdef SO_REUSEADDR + Just ReuseAddr -> Just ((#const SOL_SOCKET), (#const SO_REUSEADDR)) +#endif +#ifdef SO_TYPE + Just Type -> Just ((#const SOL_SOCKET), (#const SO_TYPE)) +#endif +#ifdef SO_ERROR + Just SoError -> Just ((#const SOL_SOCKET), (#const SO_ERROR)) +#endif +#ifdef SO_DONTROUTE + Just DontRoute -> Just ((#const SOL_SOCKET), (#const SO_DONTROUTE)) +#endif +#ifdef SO_BROADCAST + Just Broadcast -> Just ((#const SOL_SOCKET), (#const SO_BROADCAST)) +#endif +#ifdef SO_SNDBUF + Just SendBuffer -> Just ((#const SOL_SOCKET), (#const SO_SNDBUF)) +#endif +#ifdef SO_RCVBUF + Just RecvBuffer -> Just ((#const SOL_SOCKET), (#const SO_RCVBUF)) +#endif +#ifdef SO_KEEPALIVE + Just KeepAlive -> Just ((#const SOL_SOCKET), (#const SO_KEEPALIVE)) +#endif +#ifdef SO_OOBINLINE + Just OOBInline -> Just ((#const SOL_SOCKET), (#const SO_OOBINLINE)) +#endif +#ifdef SO_LINGER + Just Linger -> Just ((#const SOL_SOCKET), (#const SO_LINGER)) +#endif +#ifdef SO_REUSEPORT + Just ReusePort -> Just ((#const SOL_SOCKET), (#const SO_REUSEPORT)) +#endif +#ifdef SO_RCVLOWAT + Just RecvLowWater -> Just ((#const SOL_SOCKET), (#const SO_RCVLOWAT)) +#endif +#ifdef SO_SNDLOWAT + Just SendLowWater -> Just ((#const SOL_SOCKET), (#const SO_SNDLOWAT)) +#endif +#ifdef SO_RCVTIMEO + Just RecvTimeOut -> Just ((#const SOL_SOCKET), (#const SO_RCVTIMEO)) +#endif +#ifdef SO_SNDTIMEO + Just SendTimeOut -> Just ((#const SOL_SOCKET), (#const SO_SNDTIMEO)) +#endif +#ifdef SO_USELOOPBACK + Just UseLoopBack -> Just ((#const SOL_SOCKET), (#const SO_USELOOPBACK)) +#endif +#endif // SOL_SOCKET +#if HAVE_DECL_IPPROTO_IP +#ifdef IP_TTL + Just TimeToLive -> Just ((#const IPPROTO_IP), (#const IP_TTL)) +#endif +#endif // HAVE_DECL_IPPROTO_IP +#if HAVE_DECL_IPPROTO_TCP +#ifdef TCP_MAXSEG + Just MaxSegment -> Just ((#const IPPROTO_TCP), (#const TCP_MAXSEG)) +#endif +#ifdef TCP_NODELAY + Just NoDelay -> Just ((#const IPPROTO_TCP), (#const TCP_NODELAY)) +#endif +#ifdef TCP_USER_TIMEOUT + Just UserTimeout -> Just ((#const IPPROTO_TCP), (#const TCP_USER_TIMEOUT)) +#endif +#ifdef TCP_CORK + Just Cork -> Just ((#const IPPROTO_TCP), (#const TCP_CORK)) +#endif +#endif // HAVE_DECL_IPPROTO_TCP +#if HAVE_DECL_IPPROTO_IPV6 +#if HAVE_DECL_IPV6_V6ONLY + Just IPv6Only -> Just ((#const IPPROTO_IPV6), (#const IPV6_V6ONLY)) +#endif +#endif // HAVE_DECL_IPPROTO_IPV6 + Just (CustomSockOpt opt) -> Just opt + _ -> Nothing + +-- | Return the option level and option value if they exist, +-- otherwise throw an error that begins "Network.Socket." ++ the String +-- parameter +packSocketOption' :: String -> SocketOption -> IO (CInt, CInt) +packSocketOption' caller so = maybe err return (packSocketOption so) + where + err = ioError . userError . concat $ ["Network.Socket.", caller, + ": socket option ", show so, " unsupported on this system"] + +-- | Set a socket option that expects an Int value. +-- There is currently no API to set e.g. the timeval socket options +setSocketOption :: Socket + -> SocketOption -- Option Name + -> Int -- Option Value + -> IO () +setSocketOption (MkSocket s _ _ _ _) so v = do + (level, opt) <- packSocketOption' "setSocketOption" so + with (fromIntegral v) $ \ptr_v -> do + throwSocketErrorIfMinus1_ "Network.Socket.setSocketOption" $ + c_setsockopt s level opt ptr_v + (fromIntegral (sizeOf (undefined :: CInt))) + return () + + +-- | Get a socket option that gives an Int value. +-- There is currently no API to get e.g. the timeval socket options +getSocketOption :: Socket + -> SocketOption -- Option Name + -> IO Int -- Option Value +getSocketOption (MkSocket s _ _ _ _) so = do + (level, opt) <- packSocketOption' "getSocketOption" so + alloca $ \ptr_v -> + with (fromIntegral (sizeOf (undefined :: CInt))) $ \ptr_sz -> do + throwSocketErrorIfMinus1Retry_ "Network.Socket.getSocketOption" $ + c_getsockopt s level opt ptr_v ptr_sz + fromIntegral `liftM` peek ptr_v + + +-- | Getting process ID, user ID and group ID for UNIX-domain sockets. +-- +-- This is implemented with SO_PEERCRED on Linux and getpeereid() +-- on BSD variants. Unfortunately, on some BSD variants +-- getpeereid() returns unexpected results, rather than an error, +-- for AF_INET sockets. It is the user's responsibility to make sure +-- that the socket is a UNIX-domain socket. +-- Also, on some BSD variants, getpeereid() does not return credentials +-- for sockets created via 'socketPair', only separately created and then +-- explicitly connected UNIX-domain sockets work on such systems. +-- +-- Since 2.7.0.0. +getPeerCredential :: Socket -> IO (Maybe CUInt, Maybe CUInt, Maybe CUInt) +#ifdef HAVE_STRUCT_UCRED +getPeerCredential sock = do + (pid, uid, gid) <- getPeerCred sock + if uid == maxBound then + return (Nothing, Nothing, Nothing) + else + return (Just pid, Just uid, Just gid) +#elif defined(HAVE_GETPEEREID) +getPeerCredential sock = E.handle (\(E.SomeException _) -> return (Nothing,Nothing,Nothing)) $ do + (uid, gid) <- getPeerEid sock + return (Nothing, Just uid, Just gid) +#else +getPeerCredential _ = return (Nothing, Nothing, Nothing) +#endif + +#if defined(HAVE_STRUCT_UCRED) || defined(HAVE_GETPEEREID) +{-# DEPRECATED getPeerCred "Use getPeerCredential instead" #-} +-- | Returns the processID, userID and groupID of the socket's peer. +-- +-- Only available on platforms that support SO_PEERCRED or GETPEEREID(3) +-- on domain sockets. +-- GETPEEREID(3) returns userID and groupID. processID is always 0. +getPeerCred :: Socket -> IO (CUInt, CUInt, CUInt) +getPeerCred sock = do +#ifdef HAVE_STRUCT_UCRED + let fd = fdSocket sock + let sz = (#const sizeof(struct ucred)) + allocaBytes sz $ \ ptr_cr -> + with (fromIntegral sz) $ \ ptr_sz -> do + _ <- ($) throwSocketErrorIfMinus1Retry "Network.Socket.getPeerCred" $ + c_getsockopt fd (#const SOL_SOCKET) (#const SO_PEERCRED) ptr_cr ptr_sz + pid <- (#peek struct ucred, pid) ptr_cr + uid <- (#peek struct ucred, uid) ptr_cr + gid <- (#peek struct ucred, gid) ptr_cr + return (pid, uid, gid) +#else + (uid,gid) <- getPeerEid sock + return (0,uid,gid) +#endif + +#ifdef HAVE_GETPEEREID +{-# DEPRECATED getPeerEid "Use getPeerCredential instead" #-} +-- | The getpeereid() function returns the effective user and group IDs of the +-- peer connected to a UNIX-domain socket +getPeerEid :: Socket -> IO (CUInt, CUInt) +getPeerEid sock = do + let fd = fdSocket sock + alloca $ \ ptr_uid -> + alloca $ \ ptr_gid -> do + throwSocketErrorIfMinus1Retry_ "Network.Socket.getPeerEid" $ + c_getpeereid fd ptr_uid ptr_gid + uid <- peek ptr_uid + gid <- peek ptr_gid + return (uid, gid) +#endif +#endif + +-- | Whether or not UNIX-domain sockets are available. +-- +-- Since 3.0.0.0. +isUnixDomainSocketAvailable :: Bool +#if defined(DOMAIN_SOCKET_SUPPORT) +isUnixDomainSocketAvailable = True +#else +isUnixDomainSocketAvailable = False +#endif + +##if !(MIN_VERSION_base(4,3,1)) +closeFdWith closer fd = closer fd +##endif + +-- sending/receiving ancillary socket data; low-level mechanism +-- for transmitting file descriptors, mainly. +sendFd :: Socket -> CInt -> IO () +#if defined(DOMAIN_SOCKET_SUPPORT) +sendFd sock outfd = do + _ <- throwSocketErrorWaitWrite sock "Network.Socket.sendFd" $ c_sendFd (fdSocket sock) outfd + return () +foreign import ccall SAFE_ON_WIN "sendFd" c_sendFd :: CInt -> CInt -> IO CInt +#else +sendFd _ _ = error "Network.Socket.sendFd" +#endif + +-- | Receive a file descriptor over a domain socket. Note that the resulting +-- file descriptor may have to be put into non-blocking mode in order to be +-- used safely. See 'setNonBlockIfNeeded'. +recvFd :: Socket -> IO CInt +#if defined(DOMAIN_SOCKET_SUPPORT) +recvFd sock = do + theFd <- throwSocketErrorWaitRead sock "Network.Socket.recvFd" $ + c_recvFd (fdSocket sock) + return theFd +foreign import ccall SAFE_ON_WIN "recvFd" c_recvFd :: CInt -> IO CInt +#else +recvFd _ = error "Network.Socket.recvFd" +#endif + +-- --------------------------------------------------------------------------- +-- Utility Functions + +{-# DEPRECATED aNY_PORT "Use defaultPort instead" #-} +aNY_PORT :: PortNumber +aNY_PORT = 0 + +defaultPort :: PortNumber +defaultPort = 0 + +-- | The IPv4 wild card address. + +{-# DEPRECATED iNADDR_ANY "Use getAddrInfo instead" #-} +iNADDR_ANY :: HostAddress +iNADDR_ANY = htonl (#const INADDR_ANY) + +-- | Converts the from host byte order to network byte order. +foreign import CALLCONV unsafe "htonl" htonl :: Word32 -> Word32 +-- | Converts the from network byte order to host byte order. +foreign import CALLCONV unsafe "ntohl" ntohl :: Word32 -> Word32 + +{-# DEPRECATED htonl "Use getAddrInfo instead" #-} +{-# DEPRECATED ntohl "Use getAddrInfo instead" #-} + +#if defined(IPV6_SOCKET_SUPPORT) +-- | The IPv6 wild card address. + +{-# DEPRECATED iN6ADDR_ANY "Use getAddrInfo instead" #-} +iN6ADDR_ANY :: HostAddress6 +iN6ADDR_ANY = (0, 0, 0, 0) +#endif + +{-# DEPRECATED sOMAXCONN "Use maxListenQueue instead" #-} +sOMAXCONN :: Int +sOMAXCONN = #const SOMAXCONN + +{-# DEPRECATED sOL_SOCKET "This is not necessary anymore" #-} +sOL_SOCKET :: Int +sOL_SOCKET = #const SOL_SOCKET + +#ifdef SCM_RIGHTS +{-# DEPRECATED sCM_RIGHTS "This is not necessary anymore" #-} +sCM_RIGHTS :: Int +sCM_RIGHTS = #const SCM_RIGHTS +#endif + +-- | This is the value of SOMAXCONN, typically 128. +-- 128 is good enough for normal network servers but +-- is too small for high performance servers. +maxListenQueue :: Int +maxListenQueue = sOMAXCONN + +-- ----------------------------------------------------------------------------- + +data ShutdownCmd + = ShutdownReceive + | ShutdownSend + | ShutdownBoth + deriving Typeable + +sdownCmdToInt :: ShutdownCmd -> CInt +sdownCmdToInt ShutdownReceive = 0 +sdownCmdToInt ShutdownSend = 1 +sdownCmdToInt ShutdownBoth = 2 + +-- | Shut down one or both halves of the connection, depending on the +-- second argument to the function. If the second argument is +-- 'ShutdownReceive', further receives are disallowed. If it is +-- 'ShutdownSend', further sends are disallowed. If it is +-- 'ShutdownBoth', further sends and receives are disallowed. +shutdown :: Socket -> ShutdownCmd -> IO () +shutdown (MkSocket s _ _ _ _) stype = do + throwSocketErrorIfMinus1Retry_ "Network.Socket.shutdown" $ + c_shutdown s (sdownCmdToInt stype) + return () + +-- ----------------------------------------------------------------------------- + +-- | Close the socket. This function does not throw exceptions even if +-- the underlying system call returns errors. +-- +-- Sending data to or receiving data from closed socket +-- may lead to undefined behaviour. +-- +-- If multiple threads use the same socket and one uses 'fdSocket' and +-- the other use 'close', unexpected behavior may happen. +-- For more information, please refer to the documentation of 'fdSocket'. +close :: Socket -> IO () +close (MkSocket s _ _ _ socketStatus) = modifyMVar_ socketStatus $ \ status -> + case status of + ConvertedToHandle -> return ConvertedToHandle + Closed -> return Closed + _ -> do + -- closeFdWith avoids the deadlock of IO manager. + closeFdWith (void . c_close . fromIntegral) (fromIntegral s) + return Closed + +-- | Close the socket. This function throws exceptions if +-- the underlying system call returns errors. +-- +-- Sending data to or receiving data from closed socket +-- may lead to undefined behaviour. +close' :: Socket -> IO () +close' (MkSocket s _ _ _ socketStatus) = modifyMVar_ socketStatus $ \ status -> + case status of + ConvertedToHandle -> ioError (userError ("close: converted to a Handle, use hClose instead")) + Closed -> return Closed + _ -> do + -- closeFdWith avoids the deadlock of IO manager. + -- closeFd throws exceptions. + closeFdWith (closeFd . fromIntegral) (fromIntegral s) + return Closed + +-- ----------------------------------------------------------------------------- + +-- | Determines whether 'close' has been used on the 'Socket'. This +-- does /not/ indicate any status about the socket beyond this. If the +-- socket has been closed remotely, this function can still return +-- 'True'. +isConnected :: Socket -> IO Bool +isConnected (MkSocket _ _ _ _ status) = do + value <- readMVar status + return (value == Connected) +{-# DEPRECATED isConnected "SocketStatus will be removed" #-} + +-- ----------------------------------------------------------------------------- +-- Socket Predicates + +isBound :: Socket -> IO Bool +isBound (MkSocket _ _ _ _ status) = do + value <- readMVar status + return (value == Bound) +{-# DEPRECATED isBound "SocketStatus will be removed" #-} + +isListening :: Socket -> IO Bool +isListening (MkSocket _ _ _ _ status) = do + value <- readMVar status + return (value == Listening) +{-# DEPRECATED isListening "SocketStatus will be removed" #-} + +isReadable :: Socket -> IO Bool +isReadable (MkSocket _ _ _ _ status) = do + value <- readMVar status + return (value == Listening || value == Connected) +{-# DEPRECATED isReadable "SocketStatus will be removed" #-} + +isWritable :: Socket -> IO Bool +isWritable = isReadable -- sort of. +{-# DEPRECATED isWritable "SocketStatus will be removed" #-} + +isAcceptable :: Family -> SocketType -> SocketStatus -> Bool +#if defined(DOMAIN_SOCKET_SUPPORT) +isAcceptable AF_UNIX sockTyp status + | sockTyp == Stream || sockTyp == SeqPacket = + status == Connected || status == Bound || status == Listening +isAcceptable AF_UNIX _ _ = False +#endif +isAcceptable _ _ status = status == Connected || status == Listening +{-# DEPRECATED isAcceptable "SocketStatus will be removed" #-} + +-- ----------------------------------------------------------------------------- +-- Internet address manipulation routines: + +{-# DEPRECATED inet_addr "Use \"getAddrInfo\" instead" #-} +inet_addr :: String -> IO HostAddress +inet_addr ipstr = withSocketsDo $ do + withCString ipstr $ \str -> do + had <- c_inet_addr str + if had == maxBound + then ioError $ userError $ + "Network.Socket.inet_addr: Malformed address: " ++ ipstr + else return had -- network byte order + +{-# DEPRECATED inet_ntoa "Use \"getNameInfo\" instead" #-} +inet_ntoa :: HostAddress -> IO String +inet_ntoa haddr = withSocketsDo $ do + pstr <- c_inet_ntoa haddr + peekCString pstr + +-- | Turns a Socket into an 'Handle'. By default, the new handle is +-- unbuffered. Use 'System.IO.hSetBuffering' to change the buffering. +-- +-- Note that since a 'Handle' is automatically closed by a finalizer +-- when it is no longer referenced, you should avoid doing any more +-- operations on the 'Socket' after calling 'socketToHandle'. To +-- close the 'Socket' after 'socketToHandle', call 'System.IO.hClose' +-- on the 'Handle'. + +socketToHandle :: Socket -> IOMode -> IO Handle +socketToHandle s@(MkSocket fd _ _ _ socketStatus) mode = do + modifyMVar socketStatus $ \ status -> + if status == ConvertedToHandle + then ioError (userError ("socketToHandle: already a Handle")) + else do + h <- fdToHandle' (fromIntegral fd) (Just GHC.IO.Device.Stream) True (show s) mode True{-bin-} + hSetBuffering h NoBuffering + return (ConvertedToHandle, h) + +-- | Pack a list of values into a bitmask. The possible mappings from +-- value to bit-to-set are given as the first argument. We assume +-- that each value can cause exactly one bit to be set; unpackBits will +-- break if this property is not true. + +packBits :: (Eq a, Num b, Bits b) => [(a, b)] -> [a] -> b + +packBits mapping xs = foldl' pack 0 mapping + where pack acc (k, v) | k `elem` xs = acc .|. v + | otherwise = acc + +-- | Unpack a bitmask into a list of values. + +unpackBits :: (Num b, Bits b) => [(a, b)] -> b -> [a] + +-- Be permissive and ignore unknown bit values. At least on OS X, +-- getaddrinfo returns an ai_flags field with bits set that have no +-- entry in . +unpackBits [] _ = [] +unpackBits ((k,v):xs) r + | r .&. v /= 0 = k : unpackBits xs (r .&. complement v) + | otherwise = unpackBits xs r + +----------------------------------------------------------------------------- +-- Address and service lookups + +#if defined(IPV6_SOCKET_SUPPORT) + +-- | Flags that control the querying behaviour of 'getAddrInfo'. +-- For more information, see +data AddrInfoFlag = + -- | The list of returned 'AddrInfo' values will + -- only contain IPv4 addresses if the local system has at least + -- one IPv4 interface configured, and likewise for IPv6. + -- (Only some platforms support this.) + AI_ADDRCONFIG + -- | If 'AI_ALL' is specified, return all matching IPv6 and + -- IPv4 addresses. Otherwise, this flag has no effect. + -- (Only some platforms support this.) + | AI_ALL + -- | The 'addrCanonName' field of the first returned + -- 'AddrInfo' will contain the "canonical name" of the host. + | AI_CANONNAME + -- | The 'HostName' argument /must/ be a numeric + -- address in string form, and network name lookups will not be + -- attempted. + | AI_NUMERICHOST + -- | The 'ServiceName' argument /must/ be a port + -- number in string form, and service name lookups will not be + -- attempted. (Only some platforms support this.) + | AI_NUMERICSERV + -- | If no 'HostName' value is provided, the network + -- address in each 'SockAddr' + -- will be left as a "wild card". + -- This is useful for server applications that + -- will accept connections from any client. + | AI_PASSIVE + -- | If an IPv6 lookup is performed, and no IPv6 + -- addresses are found, IPv6-mapped IPv4 addresses will be + -- returned. (Only some platforms support this.) + | AI_V4MAPPED + deriving (Eq, Read, Show, Typeable) + +aiFlagMapping :: [(AddrInfoFlag, CInt)] + +aiFlagMapping = + [ +#if HAVE_DECL_AI_ADDRCONFIG + (AI_ADDRCONFIG, #const AI_ADDRCONFIG), +#else + (AI_ADDRCONFIG, 0), +#endif +#if HAVE_DECL_AI_ALL + (AI_ALL, #const AI_ALL), +#else + (AI_ALL, 0), +#endif + (AI_CANONNAME, #const AI_CANONNAME), + (AI_NUMERICHOST, #const AI_NUMERICHOST), +#if HAVE_DECL_AI_NUMERICSERV + (AI_NUMERICSERV, #const AI_NUMERICSERV), +#else + (AI_NUMERICSERV, 0), +#endif + (AI_PASSIVE, #const AI_PASSIVE), +#if HAVE_DECL_AI_V4MAPPED + (AI_V4MAPPED, #const AI_V4MAPPED) +#else + (AI_V4MAPPED, 0) +#endif + ] + +-- | Indicate whether the given 'AddrInfoFlag' will have any effect on +-- this system. +addrInfoFlagImplemented :: AddrInfoFlag -> Bool +addrInfoFlagImplemented f = packBits aiFlagMapping [f] /= 0 + +data AddrInfo = + AddrInfo { + addrFlags :: [AddrInfoFlag], + addrFamily :: Family, + addrSocketType :: SocketType, + addrProtocol :: ProtocolNumber, + addrAddress :: SockAddr, + addrCanonName :: Maybe String + } + deriving (Eq, Show, Typeable) + +instance Storable AddrInfo where + sizeOf _ = #const sizeof(struct addrinfo) + alignment _ = alignment (undefined :: CInt) + + peek p = do + ai_flags <- (#peek struct addrinfo, ai_flags) p + ai_family <- (#peek struct addrinfo, ai_family) p + ai_socktype <- (#peek struct addrinfo, ai_socktype) p + ai_protocol <- (#peek struct addrinfo, ai_protocol) p + ai_addr <- (#peek struct addrinfo, ai_addr) p >>= peekSockAddr + ai_canonname_ptr <- (#peek struct addrinfo, ai_canonname) p + + ai_canonname <- if ai_canonname_ptr == nullPtr + then return Nothing + else liftM Just $ peekCString ai_canonname_ptr + + socktype <- unpackSocketType' "AddrInfo.peek" ai_socktype + return (AddrInfo + { + addrFlags = unpackBits aiFlagMapping ai_flags, + addrFamily = unpackFamily ai_family, + addrSocketType = socktype, + addrProtocol = ai_protocol, + addrAddress = ai_addr, + addrCanonName = ai_canonname + }) + + poke p (AddrInfo flags family socketType protocol _ _) = do + c_stype <- packSocketTypeOrThrow "AddrInfo.poke" socketType + + (#poke struct addrinfo, ai_flags) p (packBits aiFlagMapping flags) + (#poke struct addrinfo, ai_family) p (packFamily family) + (#poke struct addrinfo, ai_socktype) p c_stype + (#poke struct addrinfo, ai_protocol) p protocol + + -- stuff below is probably not needed, but let's zero it for safety + + (#poke struct addrinfo, ai_addrlen) p (0::CSize) + (#poke struct addrinfo, ai_addr) p nullPtr + (#poke struct addrinfo, ai_canonname) p nullPtr + (#poke struct addrinfo, ai_next) p nullPtr + +-- | Flags that control the querying behaviour of 'getNameInfo'. +-- For more information, see +data NameInfoFlag = + -- | Resolve a datagram-based service name. This is + -- required only for the few protocols that have different port + -- numbers for their datagram-based versions than for their + -- stream-based versions. + NI_DGRAM + -- | If the hostname cannot be looked up, an IO error is thrown. + | NI_NAMEREQD + -- | If a host is local, return only the hostname part of the FQDN. + | NI_NOFQDN + -- | The name of the host is not looked up. + -- Instead, a numeric representation of the host's + -- address is returned. For an IPv4 address, this will be a + -- dotted-quad string. For IPv6, it will be colon-separated + -- hexadecimal. + | NI_NUMERICHOST + -- | The name of the service is not + -- looked up. Instead, a numeric representation of the + -- service is returned. + | NI_NUMERICSERV + deriving (Eq, Read, Show, Typeable) + +niFlagMapping :: [(NameInfoFlag, CInt)] + +niFlagMapping = [(NI_DGRAM, #const NI_DGRAM), + (NI_NAMEREQD, #const NI_NAMEREQD), + (NI_NOFQDN, #const NI_NOFQDN), + (NI_NUMERICHOST, #const NI_NUMERICHOST), + (NI_NUMERICSERV, #const NI_NUMERICSERV)] + +-- | Default hints for address lookup with 'getAddrInfo'. The values +-- of the 'addrAddress' and 'addrCanonName' fields are 'undefined', +-- and are never inspected by 'getAddrInfo'. +-- +-- >>> addrFlags defaultHints +-- [] +-- >>> addrFamily defaultHints +-- AF_UNSPEC +-- >>> addrSocketType defaultHints +-- NoSocketType +-- >>> addrProtocol defaultHints +-- 0 + +defaultHints :: AddrInfo +defaultHints = AddrInfo { + addrFlags = [], + addrFamily = AF_UNSPEC, + addrSocketType = NoSocketType, + addrProtocol = defaultProtocol, + addrAddress = undefined, + addrCanonName = undefined + } + +-- | Shows the fields of 'defaultHints', without inspecting the by-default undefined fields 'addrAddress' and 'addrCanonName'. +showDefaultHints :: AddrInfo -> String +showDefaultHints AddrInfo{..} = concat + [ "AddrInfo {" + , "addrFlags = " + , show addrFlags + , ", addrFamily = " + , show addrFamily + , ", addrSocketType = " + , show addrSocketType + , ", addrProtocol = " + , show addrProtocol + , ", addrAddress = " + , "" + , ", addrCanonName = " + , "" + , "}" + ] + +-- | Resolve a host or service name to one or more addresses. +-- The 'AddrInfo' values that this function returns contain 'SockAddr' +-- values that you can pass directly to 'connect' or +-- 'bind'. +-- +-- This function is protocol independent. It can return both IPv4 and +-- IPv6 address information. +-- +-- The 'AddrInfo' argument specifies the preferred query behaviour, +-- socket options, or protocol. You can override these conveniently +-- using Haskell's record update syntax on 'defaultHints', for example +-- as follows: +-- +-- >>> let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Stream } +-- +-- You must provide a 'Just' value for at least one of the 'HostName' +-- or 'ServiceName' arguments. 'HostName' can be either a numeric +-- network address (dotted quad for IPv4, colon-separated hex for +-- IPv6) or a hostname. In the latter case, its addresses will be +-- looked up unless 'AI_NUMERICHOST' is specified as a hint. If you +-- do not provide a 'HostName' value /and/ do not set 'AI_PASSIVE' as +-- a hint, network addresses in the result will contain the address of +-- the loopback interface. +-- +-- If the query fails, this function throws an IO exception instead of +-- returning an empty list. Otherwise, it returns a non-empty list +-- of 'AddrInfo' values. +-- +-- There are several reasons why a query might result in several +-- values. For example, the queried-for host could be multihomed, or +-- the service might be available via several protocols. +-- +-- Note: the order of arguments is slightly different to that defined +-- for @getaddrinfo@ in RFC 2553. The 'AddrInfo' parameter comes first +-- to make partial application easier. +-- +-- >>> addr:_ <- getAddrInfo (Just hints) (Just "127.0.0.1") (Just "http") +-- >>> addrAddress addr +-- 127.0.0.1:80 + +getAddrInfo :: Maybe AddrInfo -- ^ preferred socket type or protocol + -> Maybe HostName -- ^ host name to look up + -> Maybe ServiceName -- ^ service name to look up + -> IO [AddrInfo] -- ^ resolved addresses, with "best" first + +getAddrInfo hints node service = withSocketsDo $ + maybeWith withCString node $ \c_node -> + maybeWith withCString service $ \c_service -> + maybeWith with filteredHints $ \c_hints -> + alloca $ \ptr_ptr_addrs -> do + ret <- c_getaddrinfo c_node c_service c_hints ptr_ptr_addrs + case ret of + 0 -> do ptr_addrs <- peek ptr_ptr_addrs + ais <- followAddrInfo ptr_addrs + c_freeaddrinfo ptr_addrs + return ais + _ -> do err <- gai_strerror ret + let message = concat + [ "Network.Socket.getAddrInfo (called with preferred socket type/protocol: " + , maybe (show hints) showDefaultHints hints + , ", host name: " + , show node + , ", service name: " + , show service + , ")" + ] + ioError (ioeSetErrorString + (mkIOError NoSuchThing message Nothing + Nothing) err) + -- Leaving out the service and using AI_NUMERICSERV causes a + -- segfault on OS X 10.8.2. This code removes AI_NUMERICSERV + -- (which has no effect) in that case. + where +#if defined(darwin_HOST_OS) + filteredHints = case service of + Nothing -> fmap (\ h -> h { addrFlags = delete AI_NUMERICSERV (addrFlags h) }) hints + _ -> hints +#else + filteredHints = hints +#endif + +followAddrInfo :: Ptr AddrInfo -> IO [AddrInfo] + +followAddrInfo ptr_ai | ptr_ai == nullPtr = return [] + | otherwise = do + a <- peek ptr_ai + as <- (#peek struct addrinfo, ai_next) ptr_ai >>= followAddrInfo + return (a:as) + +foreign import ccall safe "hsnet_getaddrinfo" + c_getaddrinfo :: CString -> CString -> Ptr AddrInfo -> Ptr (Ptr AddrInfo) + -> IO CInt + +foreign import ccall safe "hsnet_freeaddrinfo" + c_freeaddrinfo :: Ptr AddrInfo -> IO () + +gai_strerror :: CInt -> IO String + +#ifdef HAVE_GAI_STRERROR +gai_strerror n = c_gai_strerror n >>= peekCString + +foreign import ccall safe "gai_strerror" + c_gai_strerror :: CInt -> IO CString +#else +gai_strerror n = ioError $ userError $ "Network.Socket.gai_strerror not supported: " ++ show n +#endif + +withCStringIf :: Bool -> Int -> (CSize -> CString -> IO a) -> IO a +withCStringIf False _ f = f 0 nullPtr +withCStringIf True n f = allocaBytes n (f (fromIntegral n)) + +-- | Resolve an address to a host or service name. +-- This function is protocol independent. +-- The list of 'NameInfoFlag' values controls query behaviour. +-- +-- If a host or service's name cannot be looked up, then the numeric +-- form of the address or service will be returned. +-- +-- If the query fails, this function throws an IO exception. +-- +-- Example: +-- @ +-- (hostName, _) <- getNameInfo [] True False myAddress +-- @ + +getNameInfo :: [NameInfoFlag] -- ^ flags to control lookup behaviour + -> Bool -- ^ whether to look up a hostname + -> Bool -- ^ whether to look up a service name + -> SockAddr -- ^ the address to look up + -> IO (Maybe HostName, Maybe ServiceName) + +getNameInfo flags doHost doService addr = withSocketsDo $ + withCStringIf doHost (#const NI_MAXHOST) $ \c_hostlen c_host -> + withCStringIf doService (#const NI_MAXSERV) $ \c_servlen c_serv -> do + withSockAddr addr $ \ptr_addr sz -> do + ret <- c_getnameinfo ptr_addr (fromIntegral sz) c_host c_hostlen + c_serv c_servlen (packBits niFlagMapping flags) + case ret of + 0 -> do + let peekIf doIf c_val = if doIf + then liftM Just $ peekCString c_val + else return Nothing + host <- peekIf doHost c_host + serv <- peekIf doService c_serv + return (host, serv) + _ -> do err <- gai_strerror ret + let message = concat + [ "Network.Socket.getNameInfo (called with flags: " + , show flags + , ", hostname lookup: " + , show doHost + , ", service name lookup: " + , show doService + , ", socket address: " + , show addr + , ")" + ] + ioError (ioeSetErrorString + (mkIOError NoSuchThing message Nothing + Nothing) err) + +foreign import ccall safe "hsnet_getnameinfo" + c_getnameinfo :: Ptr SockAddr -> CInt{-CSockLen???-} -> CString -> CSize -> CString + -> CSize -> CInt -> IO CInt +#endif + +mkInvalidRecvArgError :: String -> IOError +mkInvalidRecvArgError loc = ioeSetErrorString (mkIOError + InvalidArgument + loc Nothing Nothing) "non-positive length" + +mkEOFError :: String -> IOError +mkEOFError loc = ioeSetErrorString (mkIOError EOF loc Nothing Nothing) "end of file" + +-- --------------------------------------------------------------------------- +-- foreign imports from the C library + +foreign import ccall unsafe "hsnet_inet_ntoa" + c_inet_ntoa :: HostAddress -> IO (Ptr CChar) + +foreign import CALLCONV unsafe "inet_addr" + c_inet_addr :: Ptr CChar -> IO HostAddress + +foreign import CALLCONV unsafe "shutdown" + c_shutdown :: CInt -> CInt -> IO CInt + +closeFd :: CInt -> IO () +closeFd fd = throwSocketErrorIfMinus1_ "Network.Socket.close" $ c_close fd + +#if !defined(WITH_WINSOCK) +foreign import ccall unsafe "close" + c_close :: CInt -> IO CInt +#else +foreign import stdcall unsafe "closesocket" + c_close :: CInt -> IO CInt +#endif + +foreign import CALLCONV unsafe "socket" + c_socket :: CInt -> CInt -> CInt -> IO CInt +foreign import CALLCONV unsafe "bind" + c_bind :: CInt -> Ptr SockAddr -> CInt{-CSockLen???-} -> IO CInt +foreign import CALLCONV SAFE_ON_WIN "connect" + c_connect :: CInt -> Ptr SockAddr -> CInt{-CSockLen???-} -> IO CInt +#ifdef HAVE_ACCEPT4 +foreign import CALLCONV unsafe "accept4" + c_accept4 :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> CInt -> IO CInt +#else +foreign import CALLCONV unsafe "accept" + c_accept :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> IO CInt +#endif +foreign import CALLCONV unsafe "listen" + c_listen :: CInt -> CInt -> IO CInt + +#if defined(mingw32_HOST_OS) +foreign import CALLCONV safe "accept" + c_accept_safe :: CInt -> Ptr SockAddr -> Ptr CInt{-CSockLen???-} -> IO CInt + +foreign import ccall unsafe "rtsSupportsBoundThreads" threaded :: Bool +#endif + +foreign import CALLCONV unsafe "send" + c_send :: CInt -> Ptr a -> CSize -> CInt -> IO CInt +foreign import CALLCONV SAFE_ON_WIN "sendto" + c_sendto :: CInt -> Ptr a -> CSize -> CInt -> Ptr SockAddr -> CInt -> IO CInt +foreign import CALLCONV unsafe "recv" + c_recv :: CInt -> Ptr CChar -> CSize -> CInt -> IO CInt +foreign import CALLCONV SAFE_ON_WIN "recvfrom" + c_recvfrom :: CInt -> Ptr a -> CSize -> CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt +foreign import CALLCONV unsafe "getpeername" + c_getpeername :: CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt +foreign import CALLCONV unsafe "getsockname" + c_getsockname :: CInt -> Ptr SockAddr -> Ptr CInt -> IO CInt + +foreign import CALLCONV unsafe "getsockopt" + c_getsockopt :: CInt -> CInt -> CInt -> Ptr CInt -> Ptr CInt -> IO CInt +foreign import CALLCONV unsafe "setsockopt" + c_setsockopt :: CInt -> CInt -> CInt -> Ptr CInt -> CInt -> IO CInt + +#if defined(HAVE_GETPEEREID) +foreign import CALLCONV unsafe "getpeereid" + c_getpeereid :: CInt -> Ptr CUInt -> Ptr CUInt -> IO CInt +#endif +-- --------------------------------------------------------------------------- +-- * Deprecated aliases + +-- $deprecated-aliases +-- +-- These aliases are deprecated and should not be used in new code. +-- They will be removed in some future version of the package. + +{-# DEPRECATED bindSocket "use 'bind'" #-} + +-- | Deprecated alias for 'bind'. +bindSocket :: Socket -- Unconnected Socket + -> SockAddr -- Address to Bind to + -> IO () +bindSocket = bind + +{-# DEPRECATED sClose "use 'close'" #-} + +-- | Deprecated alias for 'close'. +sClose :: Socket -> IO () +sClose = close + +{-# DEPRECATED sIsConnected "SocketStatus will be removed" #-} + +sIsConnected :: Socket -> IO Bool +sIsConnected = isConnected + +{-# DEPRECATED sIsBound "SocketStatus will be removed" #-} + +sIsBound :: Socket -> IO Bool +sIsBound = isBound + +{-# DEPRECATED sIsListening "SocketStatus will be removed" #-} + +sIsListening :: Socket -> IO Bool +sIsListening = isListening + +{-# DEPRECATED sIsReadable "SocketStatus will be removed" #-} + +sIsReadable :: Socket -> IO Bool +sIsReadable = isReadable + +{-# DEPRECATED sIsWritable "SocketStatus will be removed" #-} + +sIsWritable :: Socket -> IO Bool +sIsWritable = isWritable + +#if defined(HAVE_IF_NAMETOINDEX) +-- | Returns the index corresponding to the interface name. +-- +-- Since 2.7.0.0. +ifNameToIndex :: String -> IO (Maybe Int) +ifNameToIndex ifname = do + index <- withCString ifname c_if_nametoindex + -- On failure zero is returned. We'll return Nothing. + return $ if index == 0 then Nothing else Just $ fromIntegral index + +-- | Returns the interface name corresponding to the index. +-- +-- Since 2.7.0.0. +ifIndexToName :: Int -> IO (Maybe String) +ifIndexToName ifn = allocaBytes 16 $ \ptr -> do -- 16 == IFNAMSIZ + r <- c_if_indextoname (fromIntegral ifn) ptr + if r == nullPtr then + return Nothing + else + Just <$> peekCString ptr + +foreign import CALLCONV safe "if_nametoindex" + c_if_nametoindex :: CString -> IO CUInt + +foreign import CALLCONV safe "if_indextoname" + c_if_indextoname :: CUInt -> CString -> IO CString +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/network.buildinfo.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/network.buildinfo.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/network.buildinfo.in 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/network.buildinfo.in 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,7 @@ +ghc-options: @EXTRA_CPPFLAGS@ +ghc-prof-options: @EXTRA_CPPFLAGS@ +ld-options: @LDFLAGS@ +cc-options: @EXTRA_CPPFLAGS@ +c-sources: @EXTRA_SRCS@ +extra-libraries: @EXTRA_LIBS@ +install-includes: HsNetworkConfig.h diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/network.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/network.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/network.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/network.cabal 2018-11-26 08:43:16.000000000 +0000 @@ -0,0 +1,104 @@ +name: network +version: 2.8.0.0 +license: BSD3 +license-file: LICENSE +maintainer: Kazu Yamamoto, Evan Borden +synopsis: Low-level networking interface +description: + This package provides a low-level networking interface. + . + In network-2.6 the @Network.URI@ module was split off into its own + package, network-uri-2.6. If you're using the @Network.URI@ module + you can automatically get it from the right package by adding this + to your .cabal file: + . + > library + > build-depends: network-uri-flag +category: Network +build-type: Configure +cabal-version: >=1.8 +extra-tmp-files: + config.log config.status autom4te.cache network.buildinfo + include/HsNetworkConfig.h +extra-source-files: + README.md CHANGELOG.md + examples/*.hs tests/*.hs config.guess config.sub install-sh + configure.ac configure network.buildinfo.in + include/HsNetworkConfig.h.in include/HsNet.h include/HsNetDef.h + -- C sources only used on some systems + cbits/ancilData.c cbits/asyncAccept.c cbits/initWinSock.c + cbits/winSockErr.c +homepage: https://github.com/haskell/network +bug-reports: https://github.com/haskell/network/issues +tested-with: GHC == 7.8.4 + , GHC == 7.10.3 + , GHC == 8.0.2 + , GHC == 8.2.2 + , GHC == 8.4.3 + +library + exposed-modules: + Network + Network.BSD + Network.Socket + Network.Socket.ByteString + Network.Socket.ByteString.Lazy + Network.Socket.Internal + other-modules: + Network.Socket.ByteString.Internal + Network.Socket.Types + + if !os(windows) + other-modules: + Network.Socket.ByteString.IOVec + Network.Socket.ByteString.Lazy.Posix + Network.Socket.ByteString.MsgHdr + if os(windows) + other-modules: + Network.Socket.ByteString.Lazy.Windows + + build-depends: + base >= 4.7 && < 5, + bytestring == 0.10.* + + if !os(windows) + build-depends: + unix >= 2 + + extensions: + CPP, DeriveDataTypeable, ForeignFunctionInterface, TypeSynonymInstances + include-dirs: include + includes: HsNet.h HsNetDef.h + install-includes: HsNet.h HsNetDef.h + c-sources: cbits/HsNet.c + ghc-options: -Wall -fwarn-tabs + +test-suite spec + hs-source-dirs: tests + main-is: Spec.hs + other-modules: RegressionSpec + SimpleSpec + type: exitcode-stdio-1.0 + ghc-options: -Wall -threaded + build-depends: + base >= 4.7 && < 5, + bytestring, + directory, + HUnit, + network, + hspec + +test-suite doctest + hs-source-dirs: tests + main-is: doctests.hs + type: exitcode-stdio-1.0 + + build-depends: + base >= 4.7 && < 5, + doctest >= 0.10.1 + + ghc-options: -Wall + +source-repository head + type: git + location: git://github.com/haskell/network.git diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Network.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Network.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,480 @@ +{-# LANGUAGE CPP #-} +{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-} +{-# OPTIONS_HADDOCK hide #-} +----------------------------------------------------------------------------- +-- | +-- Module : Network +-- Copyright : (c) The University of Glasgow 2001 +-- License : BSD-style (see the file libraries/network/LICENSE) +-- +-- Maintainer : libraries@haskell.org +-- Stability : provisional +-- Portability : portable +-- +-- This module is kept for backwards-compatibility. New users are +-- encouraged to use "Network.Socket" instead. +-- +-- "Network" was intended as a \"higher-level\" interface to networking +-- facilities, and only supports TCP. +-- +----------------------------------------------------------------------------- + +#include "HsNetworkConfig.h" + +#ifdef HAVE_GETADDRINFO +-- Use IPv6-capable function definitions if the OS supports it. +#define IPV6_SOCKET_SUPPORT 1 +#endif + +module Network {-# DEPRECATED "The high level Network interface is no longer supported. Please use Network.Socket." #-} + ( + -- * Basic data types + Socket + , PortID(..) + , HostName + , PortNumber + + -- * Initialisation + , withSocketsDo + + -- * Server-side connections + , listenOn + , accept + , sClose + + -- * Client-side connections + , connectTo + + -- * Simple sending and receiving + {-$sendrecv-} + , sendTo + , recvFrom + + -- * Miscellaneous + , socketPort + + -- * Networking Issues + -- ** Buffering + {-$buffering-} + + -- ** Improving I\/O Performance over sockets + {-$performance-} + ) where + +import Control.Monad (liftM) +import Data.Maybe (fromJust) +import Network.BSD +import Network.Socket hiding (accept, socketPort, recvFrom, + sendTo, PortNumber, sClose) +import qualified Network.Socket as Socket (accept) +import System.IO +import Prelude +import qualified Control.Exception as Exception + +-- --------------------------------------------------------------------------- +-- High Level ``Setup'' functions + +-- If the @PortID@ specifies a unix family socket and the @Hostname@ +-- differs from that returned by @getHostname@ then an error is +-- raised. Alternatively an empty string may be given to @connectTo@ +-- signalling that the current hostname applies. + +data PortID = + Service String -- Service Name eg "ftp" + | PortNumber PortNumber -- User defined Port Number +#if !defined(mingw32_HOST_OS) + | UnixSocket String -- Unix family socket in file system +#endif + deriving (Show, Eq) + +-- | Calling 'connectTo' creates a client side socket which is +-- connected to the given host and port. The Protocol and socket type is +-- derived from the given port identifier. If a port number is given +-- then the result is always an internet family 'Stream' socket. + +connectTo :: HostName -- Hostname + -> PortID -- Port Identifier + -> IO Handle -- Connected Socket + +#if defined(IPV6_SOCKET_SUPPORT) +-- IPv6 and IPv4. + +connectTo hostname (Service serv) = connect' "Network.connectTo" hostname serv + +connectTo hostname (PortNumber port) = connect' "Network.connectTo" hostname (show port) +#else +-- IPv4 only. + +connectTo hostname (Service serv) = do + proto <- getProtocolNumber "tcp" + bracketOnError + (socket AF_INET Stream proto) + (sClose) -- only done if there's an error + (\sock -> do + port <- getServicePortNumber serv + he <- getHostByName hostname + connect sock (SockAddrInet port (hostAddress he)) + socketToHandle sock ReadWriteMode + ) + +connectTo hostname (PortNumber port) = do + proto <- getProtocolNumber "tcp" + bracketOnError + (socket AF_INET Stream proto) + (sClose) -- only done if there's an error + (\sock -> do + he <- getHostByName hostname + connect sock (SockAddrInet port (hostAddress he)) + socketToHandle sock ReadWriteMode + ) +#endif + +#if !defined(mingw32_HOST_OS) +connectTo _ (UnixSocket path) = do + bracketOnError + (socket AF_UNIX Stream 0) + (sClose) + (\sock -> do + connect sock (SockAddrUnix path) + socketToHandle sock ReadWriteMode + ) +#endif + +#if defined(IPV6_SOCKET_SUPPORT) +connect' :: String -> HostName -> ServiceName -> IO Handle + +connect' caller host serv = do + proto <- getProtocolNumber "tcp" + let hints = defaultHints { addrFlags = [AI_ADDRCONFIG] + , addrProtocol = proto + , addrSocketType = Stream } + addrs <- getAddrInfo (Just hints) (Just host) (Just serv) + firstSuccessful caller $ map tryToConnect addrs + where + tryToConnect addr = + bracketOnError + (socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)) + (sClose) -- only done if there's an error + (\sock -> do + connect sock (addrAddress addr) + socketToHandle sock ReadWriteMode + ) +#endif + +-- | Creates the server side socket which has been bound to the +-- specified port. +-- +-- 'maxListenQueue' (typically 128) is specified to the listen queue. +-- This is good enough for normal network servers but is too small +-- for high performance servers. +-- +-- To avoid the \"Address already in use\" problems, +-- the 'ReuseAddr' socket option is set on the listening socket. +-- +-- If available, the 'IPv6Only' socket option is set to 0 +-- so that both IPv4 and IPv6 can be accepted with this socket. +-- +-- If you don't like the behavior above, please use the lower level +-- 'Network.Socket.listen' instead. + +listenOn :: PortID -- ^ Port Identifier + -> IO Socket -- ^ Listening Socket + +#if defined(IPV6_SOCKET_SUPPORT) +-- IPv6 and IPv4. + +listenOn (Service serv) = listen' serv + +listenOn (PortNumber port) = listen' (show port) +#else +-- IPv4 only. + +listenOn (Service serv) = do + proto <- getProtocolNumber "tcp" + bracketOnError + (socket AF_INET Stream proto) + (sClose) + (\sock -> do + port <- getServicePortNumber serv + setSocketOption sock ReuseAddr 1 + bind sock (SockAddrInet port iNADDR_ANY) + listen sock maxListenQueue + return sock + ) + +listenOn (PortNumber port) = do + proto <- getProtocolNumber "tcp" + bracketOnError + (socket AF_INET Stream proto) + (sClose) + (\sock -> do + setSocketOption sock ReuseAddr 1 + bind sock (SockAddrInet port iNADDR_ANY) + listen sock maxListenQueue + return sock + ) +#endif + +#if !defined(mingw32_HOST_OS) +listenOn (UnixSocket path) = + bracketOnError + (socket AF_UNIX Stream 0) + (sClose) + (\sock -> do + setSocketOption sock ReuseAddr 1 + bind sock (SockAddrUnix path) + listen sock maxListenQueue + return sock + ) +#endif + +#if defined(IPV6_SOCKET_SUPPORT) +listen' :: ServiceName -> IO Socket + +listen' serv = do + proto <- getProtocolNumber "tcp" + -- We should probably specify addrFamily = AF_INET6 and the filter + -- code below should be removed. AI_ADDRCONFIG is probably not + -- necessary. But this code is well-tested. So, let's keep it. + let hints = defaultHints { addrFlags = [AI_ADDRCONFIG, AI_PASSIVE] + , addrSocketType = Stream + , addrProtocol = proto } + addrs <- getAddrInfo (Just hints) Nothing (Just serv) + -- Choose an IPv6 socket if exists. This ensures the socket can + -- handle both IPv4 and IPv6 if v6only is false. + let addrs' = filter (\x -> addrFamily x == AF_INET6) addrs + addr = if null addrs' then head addrs else head addrs' + bracketOnError + (socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)) + (sClose) + (\sock -> do + setSocketOption sock ReuseAddr 1 + bind sock (addrAddress addr) + listen sock maxListenQueue + return sock + ) +#endif + +-- ----------------------------------------------------------------------------- +-- accept + +-- | Accept a connection on a socket created by 'listenOn'. Normal +-- I\/O operations (see "System.IO") can be used on the 'Handle' +-- returned to communicate with the client. +-- Notice that although you can pass any Socket to Network.accept, +-- only sockets of either AF_UNIX, AF_INET, or AF_INET6 will work +-- (this shouldn't be a problem, though). When using AF_UNIX, HostName +-- will be set to the path of the socket and PortNumber to -1. +-- +accept :: Socket -- ^ Listening Socket + -> IO (Handle, + HostName, + PortNumber) -- ^ Triple of: read\/write 'Handle' for + -- communicating with the client, + -- the 'HostName' of the peer socket, and + -- the 'PortNumber' of the remote connection. +accept sock@(MkSocket _ AF_INET _ _ _) = do + ~(sock', (SockAddrInet port haddr)) <- Socket.accept sock + peer <- catchIO + (do + (HostEntry peer _ _ _) <- getHostByAddr AF_INET haddr + return peer + ) + (\_e -> inet_ntoa haddr) + -- if getHostByName fails, we fall back to the IP address + handle <- socketToHandle sock' ReadWriteMode + return (handle, peer, port) +#if defined(IPV6_SOCKET_SUPPORT) +accept sock@(MkSocket _ AF_INET6 _ _ _) = do + (sock', addr) <- Socket.accept sock + peer <- catchIO ((fromJust . fst) `liftM` getNameInfo [] True False addr) $ + \_ -> case addr of + SockAddrInet _ a -> inet_ntoa a + SockAddrInet6 _ _ a _ -> return (show a) +#if defined(mingw32_HOST_OS) + SockAddrUnix {} -> ioError $ userError "Network.accept: peer socket address 'SockAddrUnix' not supported on this platform." +#else + SockAddrUnix a -> return a +#endif +#if defined(CAN_SOCKET_SUPPORT) + SockAddrCan {} -> ioError $ userError "Network.accept: peer socket address 'SockAddrCan' not supported." +#else + SockAddrCan {} -> ioError $ userError "Network.accept: peer socket address 'SockAddrCan' not supported on this platform." +#endif + handle <- socketToHandle sock' ReadWriteMode + let port = case addr of + SockAddrInet p _ -> p + SockAddrInet6 p _ _ _ -> p + _ -> -1 + return (handle, peer, port) +#endif +#if !defined(mingw32_HOST_OS) +accept sock@(MkSocket _ AF_UNIX _ _ _) = do + ~(sock', (SockAddrUnix path)) <- Socket.accept sock + handle <- socketToHandle sock' ReadWriteMode + return (handle, path, -1) +#endif +accept (MkSocket _ family _ _ _) = + ioError $ userError $ "Network.accept: address family '" ++ + show family ++ "' not supported." + + +-- | Close the socket. Sending data to or receiving data from closed socket +-- may lead to undefined behaviour. +sClose :: Socket -> IO () +sClose = close -- Explicit redefinition because Network.sClose is deprecated, + -- hence the re-export would also be marked as such. + +-- ----------------------------------------------------------------------------- +-- sendTo/recvFrom + +{-$sendrecv +Send and receive data from\/to the given host and port number. These +should normally only be used where the socket will not be required for +further calls. Also, note that due to the use of 'hGetContents' in 'recvFrom' +the socket will remain open (i.e. not available) even if the function already +returned. Their use is strongly discouraged except for small test-applications +or invocations from the command line. +-} + +sendTo :: HostName -- Hostname + -> PortID -- Port Number + -> String -- Message to send + -> IO () +sendTo h p msg = do + s <- connectTo h p + hPutStr s msg + hClose s + +recvFrom :: HostName -- Hostname + -> PortID -- Port Number + -> IO String -- Received Data + +#if defined(IPV6_SOCKET_SUPPORT) +recvFrom host port = do + proto <- getProtocolNumber "tcp" + let hints = defaultHints { addrFlags = [AI_ADDRCONFIG] + , addrProtocol = proto + , addrSocketType = Stream } + allowed <- map addrAddress `liftM` getAddrInfo (Just hints) (Just host) + Nothing + s <- listenOn port + let waiting = do + (s', addr) <- Socket.accept s + if not (addr `oneOf` allowed) + then sClose s' >> waiting + else socketToHandle s' ReadMode >>= hGetContents + waiting + where + a@(SockAddrInet _ ha) `oneOf` ((SockAddrInet _ hb):bs) + | ha == hb = True + | otherwise = a `oneOf` bs + a@(SockAddrInet6 _ _ ha _) `oneOf` ((SockAddrInet6 _ _ hb _):bs) + | ha == hb = True + | otherwise = a `oneOf` bs + _ `oneOf` _ = False +#else +recvFrom host port = do + ip <- getHostByName host + let ipHs = hostAddresses ip + s <- listenOn port + let + waiting = do + ~(s', SockAddrInet _ haddr) <- Socket.accept s + he <- getHostByAddr AF_INET haddr + if not (any (`elem` ipHs) (hostAddresses he)) + then do + sClose s' + waiting + else do + h <- socketToHandle s' ReadMode + msg <- hGetContents h + return msg + + message <- waiting + return message +#endif + +-- --------------------------------------------------------------------------- +-- Access function returning the port type/id of socket. + +-- | Returns the 'PortID' associated with a given socket. +socketPort :: Socket -> IO PortID +socketPort s = do + sockaddr <- getSocketName s + case sockaddr of + SockAddrInet port _ -> return $ PortNumber port +#if defined(IPV6_SOCKET_SUPPORT) + SockAddrInet6 port _ _ _ -> return $ PortNumber port +#else + SockAddrInet6 {} -> ioError $ userError "Network.socketPort: socket address 'SockAddrInet6' not supported on this platform." +#endif +#if defined(mingw32_HOST_OS) + SockAddrUnix {} -> ioError $ userError "Network.socketPort: socket address 'SockAddrUnix' not supported on this platform." +#else + SockAddrUnix path -> return $ UnixSocket path +#endif + SockAddrCan {} -> ioError $ userError "Network.socketPort: socket address 'SockAddrCan' not supported." + +-- --------------------------------------------------------------------------- +-- Utils + +-- Like bracket, but only performs the final action if there was an +-- exception raised by the middle bit. +bracketOnError + :: IO a -- ^ computation to run first (\"acquire resource\") + -> (a -> IO b) -- ^ computation to run last (\"release resource\") + -> (a -> IO c) -- ^ computation to run in-between + -> IO c -- returns the value from the in-between computation +bracketOnError = Exception.bracketOnError + +----------------------------------------------------------------------------- +-- Extra documentation + +{-$buffering + +The 'Handle' returned by 'connectTo' and 'accept' is 'NoBuffering' by +default. For an interactive application you may want to set the +buffering mode on the 'Handle' to +'LineBuffering' or 'BlockBuffering', like so: + +> h <- connectTo host port +> hSetBuffering h LineBuffering +-} + +{-$performance + +For really fast I\/O, it might be worth looking at the 'hGetBuf' and +'hPutBuf' family of functions in "System.IO". +-} + +catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a +#if MIN_VERSION_base(4,0,0) +catchIO = Exception.catch +#else +catchIO = Exception.catchJust Exception.ioErrors +#endif + +-- Version of try implemented in terms of the locally defined catchIO +tryIO :: IO a -> IO (Either Exception.IOException a) +tryIO m = catchIO (liftM Right m) (return . Left) + +-- Returns the first action from a list which does not throw an exception. +-- If all the actions throw exceptions (and the list of actions is not empty), +-- the last exception is thrown. +-- The operations are run outside of the catchIO cleanup handler because +-- catchIO masks asynchronous exceptions in the cleanup handler. +-- In the case of complete failure, the last exception is actually thrown. +firstSuccessful :: String -> [IO a] -> IO a +firstSuccessful caller = go Nothing + where + -- Attempt the next operation, remember exception on failure + go _ (p:ps) = + do r <- tryIO p + case r of + Right x -> return x + Left e -> go (Just e) ps + + -- All operations failed, throw error if one exists + go Nothing [] = ioError $ userError $ caller ++ ": firstSuccessful: empty list" + go (Just e) [] = Exception.throwIO e diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/README.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/README.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/README.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/README.md 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,19 @@ +# [`network`](http://hackage.haskell.org/package/network) [![Build Status](https://travis-ci.org/haskell/network.svg?branch=master)](https://travis-ci.org/haskell/network) [![Build status](https://ci.appveyor.com/api/projects/status/5erq63o4m29bhl57/branch/master?svg=true)](https://ci.appveyor.com/project/eborden/network/branch/master) + +To build this package using Cabal directly from git, you must run +`autoreconf` before the usual Cabal build steps +(configure/build/install). `autoreconf` is included in the +[GNU Autoconf](http://www.gnu.org/software/autoconf/) tools. There is +no need to run the `configure` script: the `setup configure` step will +do this for you. + +## Support Policy + +### GHC + +`network`'s GHC policy supports 3 [stable](https://downloads.haskell.org/~ghc/8.0.2/docs/html/users_guide/intro.html#ghc-version-numbering-policy) versions. The current stable +version and two previous stable versions are supported. + +### Hugs, JHC, UHC + +`network` does not officially support these compilers. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/Setup.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,6 @@ +module Main (main) where + +import Distribution.Simple + +main :: IO () +main = defaultMainWithHooks autoconfUserHooks diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/BadFileDescriptor.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/BadFileDescriptor.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/BadFileDescriptor.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/BadFileDescriptor.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,51 @@ +-- Test code for "threadWait: invalid argument (Bad file descriptor)" +-- See https://ghc.haskell.org/trac/ghc/ticket/14621 +-- See https://github.com/haskell/network/issues/287 +-- +-- % runghc BadFileDescriptor.hs +-- BadFileDescriptor.hs: threadWait: invalid argument (Bad file descriptor) +module Main where + +import Control.Concurrent (forkIO) +import Control.Monad (void, forever) +import Network.Socket hiding (recv) +import Network.Socket.ByteString (recv, sendAll) + +main :: IO () +main = do + let localhost = "localhost" + listenPort = "9876" + connectPort = "6789" + proxy localhost listenPort connectPort + +proxy :: HostName -> ServiceName -> ServiceName -> IO () +proxy localhost listenPort connectPort = do + fromClient <- serverSocket localhost listenPort + toServer <- clientSocket localhost connectPort + void $ forkIO $ relay toServer fromClient + relay fromClient toServer + +relay :: Socket -> Socket -> IO () +relay s1 s2 = forever $ do + payload <- recv s1 4096 + sendAll s2 payload + +serverSocket :: HostName -> ServiceName -> IO Socket +serverSocket host port = do + let hints = defaultHints { + addrFlags = [AI_PASSIVE] + , addrSocketType = Stream + } + addr:_ <- getAddrInfo (Just hints) (Just host) (Just port) + sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) + bind sock (addrAddress addr) + listen sock 1 + fst <$> accept sock + +clientSocket :: HostName -> ServiceName -> IO Socket +clientSocket host port = do + let hints = defaultHints { addrSocketType = Stream } + addr:_ <- getAddrInfo (Just hints) (Just host) (Just port) + sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) + connect sock (addrAddress addr) + return sock diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/doctests.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/doctests.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/doctests.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/doctests.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,20 @@ +import Test.DocTest + +main :: IO () +main = doctest [ + "-i" + , "-idist/build" + , "-i." + , "-idist/build/autogen" + , "-Idist/build/autogen" + , "-Idist/build" + , "-Iinclude" + , "-optP-include" + , "-optPdist/build/autogen/cabal_macros.h" + , "-DCALLCONV=ccall" + , "-XCPP" + , "-XDeriveDataTypeable" + , "-package-db dist/package.conf.inplace" + , "-package network" + , "Network" + ] diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/RegressionSpec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/RegressionSpec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/RegressionSpec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/RegressionSpec.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,29 @@ +{-# LANGUAGE OverloadedStrings #-} +-- | Tests for things that didn't work in the past. +module RegressionSpec (main, spec) where + +import Control.Monad +import Network.Socket hiding (send, sendTo, recv, recvFrom) +import Network.Socket.ByteString + +import Test.Hspec + +main :: IO () +main = hspec spec + +spec :: Spec +spec = do + describe "getAddrInfo" $ do + it "does not cause segfault on macOS 10.8.2 due to AI_NUMERICSERV" $ do + let hints = defaultHints { addrFlags = [AI_NUMERICSERV] } + void $ getAddrInfo (Just hints) (Just "localhost") Nothing + + describe "Network.Socket.ByteString.recv" $ do + it "checks -1 correctly on Windows" $ do + sock <- socket AF_INET Stream defaultProtocol + recv sock 1024 `shouldThrow` anyException + + describe "Network.Socket.ByteString.send" $ do + it "checks -1 correctly on Windows" $ do + sock <- socket AF_INET Stream defaultProtocol + send sock "hello world" `shouldThrow` anyException diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/SimpleSpec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/SimpleSpec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/SimpleSpec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/SimpleSpec.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1,344 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module SimpleSpec (main, spec) where + +import Control.Concurrent (ThreadId, forkIO, myThreadId) +import Control.Concurrent.MVar (newEmptyMVar, putMVar, takeMVar, readMVar) +import qualified Control.Exception as E +import Control.Monad +import Data.ByteString (ByteString) +import qualified Data.ByteString as S +import qualified Data.ByteString.Char8 as C +import qualified Data.ByteString.Lazy as L +import Network.Socket hiding (send, sendTo, recv, recvFrom) +import Network.Socket.ByteString +import qualified Network.Socket.ByteString.Lazy as Lazy +import System.Directory +import System.Timeout (timeout) + +import Test.Hspec + +main :: IO () +main = hspec spec + +spec :: Spec +spec = do + describe "send" $ do + it "works well" $ do + let server sock = recv sock 1024 `shouldReturn` testMsg + client sock = send sock testMsg + tcpTest client server + + describe "sendAll" $ do + it "works well" $ do + let server sock = recv sock 1024 `shouldReturn` testMsg + client sock = sendAll sock testMsg + tcpTest client server + + describe "Lazy.sendAll" $ do + it "works well" $ do + let server sock = recv sock 1024 `shouldReturn` testMsg + client sock = Lazy.sendAll sock $ L.fromChunks [testMsg] + tcpTest client server + + describe "sendTo" $ do + it "works well" $ do + let server sock = recv sock 1024 `shouldReturn` testMsg + client sock serverPort = do + let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Datagram } + addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort) + sendTo sock testMsg $ addrAddress addr + udpTest client server + + describe "sendAllTo" $ do + it "works well" $ do + let server sock = recv sock 1024 `shouldReturn` testMsg + client sock serverPort = do + let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Datagram } + addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort) + sendAllTo sock testMsg $ addrAddress addr + udpTest client server + + describe "sendMany" $ do + it "works well" $ do + let server sock = recv sock 1024 `shouldReturn` (S.append seg1 seg2) + client sock = sendMany sock [seg1, seg2] + + seg1 = C.pack "This is a " + seg2 = C.pack "test message." + tcpTest client server + + describe "sendManyTo" $ do + it "works well" $ do + let server sock = recv sock 1024 `shouldReturn` (S.append seg1 seg2) + client sock serverPort = do + let hints = defaultHints { addrFlags = [AI_NUMERICHOST], addrSocketType = Datagram } + addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort) + sendManyTo sock [seg1, seg2] $ addrAddress addr + + seg1 = C.pack "This is a " + seg2 = C.pack "test message." + udpTest client server + + describe "recv" $ do + it "works well" $ do + let server sock = recv sock 1024 `shouldReturn` testMsg + client sock = send sock testMsg + tcpTest client server + + it "can treat overflow" $ do + let server sock = do seg1 <- recv sock (S.length testMsg - 3) + seg2 <- recv sock 1024 + let msg = S.append seg1 seg2 + msg `shouldBe` testMsg + client sock = send sock testMsg + tcpTest client server + + it "returns empty string at EOF" $ do + let client s = recv s 4096 `shouldReturn` S.empty + server s = shutdown s ShutdownSend + tcpTest client server + + describe "recvFrom" $ do + it "works well" $ do + let server sock = do (msg, _) <- recvFrom sock 1024 + testMsg `shouldBe` msg + client sock = do + addr <- getPeerName sock + sendTo sock testMsg addr + tcpTest client server + it "can treat overflow" $ do + let server sock = do (seg1, _) <- recvFrom sock (S.length testMsg - 3) + (seg2, _) <- recvFrom sock 1024 + let msg = S.append seg1 seg2 + testMsg `shouldBe` msg + + client sock = send sock testMsg + tcpTest client server + + describe "UserTimeout" $ do + it "can be set" $ do + when (isSupportedSocketOption UserTimeout) $ do + sock <- socket AF_INET Stream defaultProtocol + setSocketOption sock UserTimeout 1000 + getSocketOption sock UserTimeout `shouldReturn` 1000 + setSocketOption sock UserTimeout 2000 + getSocketOption sock UserTimeout `shouldReturn` 2000 + close sock + + -- On various BSD systems the peer credentials are exchanged during + -- connect(), and this does not happen with `socketpair()`. Therefore, + -- we must actually set up a listener and connect, rather than use a + -- socketpair(). + -- + describe "getPeerCredential" $ do + it "can return something" $ do + when isUnixDomainSocketAvailable $ do + -- It would be useful to check that we did not get garbage + -- back, but rather the actual uid of the test program. For + -- that we'd need System.Posix.User, but that is not available + -- under Windows. For now, accept the risk that we did not get + -- the right answer. + -- + let client sock = do + (_, uid, _) <- getPeerCredential sock + uid `shouldNotBe` Nothing + server (sock, _) = do + (_, uid, _) <- getPeerCredential sock + uid `shouldNotBe` Nothing + unixTest client server + {- The below test fails on many *BSD systems, because the getsockopt() + call that underlies getpeereid() does not have the same meaning for + all address families, but the C-library was not checking that the + provided sock is an AF_UNIX socket. This will fixed some day, but + we should not fail on those systems in the mean-time. The upstream + C-library fix is to call getsockname() and check the address family + before calling `getpeereid()`. We could duplicate that in our own + code, and then this test would work on those platforms that have + `getpeereid()` and not the SO_PEERCRED socket option. + + it "return nothing for non-UNIX-domain socket" $ do + when isUnixDomainSocketAvailable $ do + s <- socket AF_INET Stream defaultProtocol + cred1 <- getPeerCredential s + cred1 `shouldBe` (Nothing,Nothing,Nothing) + -} + + describe "getAddrInfo" $ do + it "works for IPv4 address" $ do + let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_ADDRCONFIG] } + AddrInfo{addrAddress = (SockAddrInet _ hostAddr)}:_ <- + getAddrInfo (Just hints) (Just "127.128.129.130") Nothing + hostAddressToTuple hostAddr `shouldBe` (0x7f, 0x80, 0x81, 0x82) +#if defined(IPV6_SOCKET_SUPPORT) + it "works for IPv6 address" $ do + let hints = defaultHints { addrFlags = [AI_NUMERICHOST, AI_ADDRCONFIG] } + host = "2001:0db8:85a3:0000:0000:8a2e:0370:7334" + AddrInfo{addrAddress = (SockAddrInet6 _ _ hostAddr _)}:_ <- + getAddrInfo (Just hints) (Just host) Nothing + hostAddress6ToTuple hostAddr + `shouldBe` (0x2001, 0x0db8, 0x85a3, 0x0000, 0x0000, 0x8a2e, 0x0370, 0x7334) +#endif + +------------------------------------------------------------------------ + +serverAddr :: String +serverAddr = "127.0.0.1" + +testMsg :: ByteString +testMsg = "This is a test message." + +unixAddr :: String +unixAddr = "/tmp/network-test" + +------------------------------------------------------------------------ +-- Test helpers + +-- | Establish a connection between client and server and then run +-- 'clientAct' and 'serverAct', in different threads. Both actions +-- get passed a connected 'Socket', used for communicating between +-- client and server. 'unixTest' makes sure that the 'Socket' is +-- closed after the actions have run. +unixTest :: (Socket -> IO a) -> ((Socket, SockAddr) -> IO b) -> IO () +unixTest clientAct serverAct = do + test clientSetup clientAct serverSetup server + where + clientSetup = do + sock <- socket AF_UNIX Stream defaultProtocol + connect sock (SockAddrUnix unixAddr) + return sock + + serverSetup = do + sock <- socket AF_UNIX Stream defaultProtocol + unlink unixAddr -- just in case + bind sock (SockAddrUnix unixAddr) + listen sock 1 + return sock + + server sock = E.bracket (accept sock) (killClientSock . fst) serverAct + + unlink file = do + exist <- doesFileExist file + when exist $ removeFile file + + killClientSock sock = do + shutdown sock ShutdownBoth + close sock + unlink unixAddr + +-- | Establish a connection between client and server and then run +-- 'clientAct' and 'serverAct', in different threads. Both actions +-- get passed a connected 'Socket', used for communicating between +-- client and server. 'tcpTest' makes sure that the 'Socket' is +-- closed after the actions have run. +tcpTest :: (Socket -> IO a) -> (Socket -> IO b) -> IO () +tcpTest clientAct serverAct = do + portVar <- newEmptyMVar + test (clientSetup portVar) clientAct (serverSetup portVar) server + where + clientSetup portVar = do + let hints = defaultHints { addrSocketType = Stream } + serverPort <- readMVar portVar + addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort) + sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) +#if !defined(mingw32_HOST_OS) + let fd = fdSocket sock + getNonBlock fd `shouldReturn` True + getCloseOnExec fd `shouldReturn` False +#endif + connect sock $ addrAddress addr + return sock + + serverSetup portVar = do + let hints = defaultHints { + addrFlags = [AI_PASSIVE] + , addrSocketType = Stream + } + addr:_ <- getAddrInfo (Just hints) (Just serverAddr) Nothing + sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) + let fd = fdSocket sock +#if !defined(mingw32_HOST_OS) + getNonBlock fd `shouldReturn` True + getCloseOnExec fd `shouldReturn` False +#endif + setSocketOption sock ReuseAddr 1 + setCloseOnExecIfNeeded fd +#if !defined(mingw32_HOST_OS) + getCloseOnExec fd `shouldReturn` True +#endif + bind sock $ addrAddress addr + listen sock 1 + serverPort <- socketPort sock + putMVar portVar serverPort + return sock + + server sock = do + (clientSock, _) <- accept sock +#if !defined(mingw32_HOST_OS) + let fd = fdSocket clientSock + getNonBlock fd `shouldReturn` True + getCloseOnExec fd `shouldReturn` True +#endif + _ <- serverAct clientSock + close clientSock + +-- | Create an unconnected 'Socket' for sending UDP and receiving +-- datagrams and then run 'clientAct' and 'serverAct'. +udpTest :: (Socket -> PortNumber -> IO a) -> (Socket -> IO b) -> IO () +udpTest clientAct serverAct = do + portVar <- newEmptyMVar + test clientSetup (client portVar) (serverSetup portVar) serverAct + where + clientSetup = socket AF_INET Datagram defaultProtocol + + client portVar sock = do + serverPort <- readMVar portVar + clientAct sock serverPort + + serverSetup portVar = do + let hints = defaultHints { + addrFlags = [AI_PASSIVE] + , addrSocketType = Datagram + } + addr:_ <- getAddrInfo (Just hints) (Just serverAddr) Nothing + sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr) + setSocketOption sock ReuseAddr 1 + bind sock $ addrAddress addr + serverPort <- socketPort sock + putMVar portVar serverPort + return sock + +-- | Run a client/server pair and synchronize them so that the server +-- is started before the client and the specified server action is +-- finished before the client closes the 'Socket'. +test :: IO Socket -> (Socket -> IO b) -> IO Socket -> (Socket -> IO c) -> IO () +test clientSetup clientAct serverSetup serverAct = do + tid <- myThreadId + barrier <- newEmptyMVar + _ <- forkIO $ server barrier + client tid barrier + where + server barrier = do + E.bracket serverSetup close $ \sock -> do + serverReady + Just _ <- timeout 1000000 $ serverAct sock + putMVar barrier () + where + -- | Signal to the client that it can proceed. + serverReady = putMVar barrier () + + client tid barrier = do + takeMVar barrier + -- Transfer exceptions to the main thread. + bracketWithReraise tid clientSetup close $ \res -> do + Just _ <- timeout 1000000 $ clientAct res + takeMVar barrier + +-- | Like 'bracket' but catches and reraises the exception in another +-- thread, specified by the first argument. +bracketWithReraise :: ThreadId -> IO a -> (a -> IO b) -> (a -> IO ()) -> IO () +bracketWithReraise tid setup teardown thing = + E.bracket setup teardown thing + `E.catch` \ (e :: E.SomeException) -> E.throwTo tid e diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/Spec.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/Spec.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/network-2.8.0.0/tests/Spec.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/network-2.8.0.0/tests/Spec.hs 2018-09-04 22:14:52.000000000 +0000 @@ -0,0 +1 @@ +{-# OPTIONS_GHC -F -pgmF hspec-discover #-} diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/cbits/hs_resolv_config.h.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/cbits/hs_resolv_config.h.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/cbits/hs_resolv_config.h.in 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/cbits/hs_resolv_config.h.in 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -/* cbits/hs_resolv_config.h.in. Generated from configure.ac by autoheader. */ - -/* Define to 1 if you have the header file. */ -#undef HAVE_ARPA_NAMESER_H - -/* Define to 1 if you have the declaration of `res_nquery', and to 0 if you - don't. */ -#undef HAVE_DECL_RES_NQUERY - -/* Define to 1 if you have the declaration of `res_query', and to 0 if you - don't. */ -#undef HAVE_DECL_RES_QUERY - -/* Define to 1 if you have the header file. */ -#undef HAVE_INTTYPES_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_MEMORY_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_NETINET_IN_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_STDINT_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_STDLIB_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_STRINGS_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_STRING_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_SYS_STAT_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_SYS_TYPES_H - -/* Define to 1 if you have the header file. */ -#undef HAVE_UNISTD_H - -/* Define to the address where bug reports for this package should be sent. */ -#undef PACKAGE_BUGREPORT - -/* Define to the full name of this package. */ -#undef PACKAGE_NAME - -/* Define to the full name and version of this package. */ -#undef PACKAGE_STRING - -/* Define to the one symbol short name of this package. */ -#undef PACKAGE_TARNAME - -/* Define to the home page for this package. */ -#undef PACKAGE_URL - -/* Define to the version of this package. */ -#undef PACKAGE_VERSION - -/* The size of `struct __res_state', as computed by sizeof. */ -#undef SIZEOF_STRUCT___RES_STATE - -/* Define to 1 if you have the ANSI C header files. */ -#undef STDC_HEADERS - -/* Define to 1 in order to use res_nquery(3) API */ -#undef USE_RES_NQUERY - -/* Define to empty if `const' does not conform to ANSI C. */ -#undef const diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/cbits/hs_resolv.h cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/cbits/hs_resolv.h --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/cbits/hs_resolv.h 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/cbits/hs_resolv.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,128 +0,0 @@ -#if !defined(HS_RESOLV_H) -#define HS_RESOLV_H - -#include "hs_resolv_config.h" - -#include - -#if defined(HAVE_NETINET_IN_H) -# include -#endif - -#if defined(HAVE_ARPA_NAMESER_H) -# include -#endif - -#include - -#include - -/* This is usually provided via */ -#if !defined(QUERY) -# define QUERY ns_o_query -#endif - -#if !defined(USE_RES_NQUERY) -# error USE_RES_NQUERY not defined -#endif - -#if USE_RES_NQUERY && (SIZEOF_STRUCT___RES_STATE <= 0) -# error broken invariant -#endif - -#if USE_RES_NQUERY - -inline static int -res_opt_set_use_dnssec(struct __res_state *s) -{ - assert(s); - - if (!(s->options & RES_INIT)) { - int rc = res_ninit(s); - if (rc) return rc; - } - - s->options |= RES_USE_DNSSEC | RES_USE_EDNS0; - - return 0; -} - -inline static int -hs_res_mkquery(struct __res_state *s, const char *dname, int class, int type, unsigned char *req, int reqlen0) -{ - assert(s); - - int reqlen = res_nmkquery(s, QUERY, dname, class, type, NULL, 0, NULL, req, reqlen0); - - assert(reqlen <= reqlen0); - - return reqlen; -} - -inline static int -hs_res_send(struct __res_state *s, const unsigned char *msg, int msglen, unsigned char *answer, int anslen) -{ - assert(s); - - return res_nsend(s, msg, msglen, answer, anslen); -} - -inline static int -hs_res_query(struct __res_state *s, const char *dname, int class, int type, unsigned char *answer, int anslen) -{ - assert(s); - - return res_nquery(s, dname, class, type, answer, anslen); -} - -#else - -/* use non-reentrant API */ - -inline static int -res_opt_set_use_dnssec(void *s) -{ - assert(!s); - - if (!(_res.options & RES_INIT)) { - int rc = res_init(); - if (rc) return rc; - } - - _res.options |= RES_USE_DNSSEC | RES_USE_EDNS0; - - return 0; -} - - -inline static int -hs_res_mkquery(void *s, const char *dname, int class, int type, unsigned char *req, int reqlen0) -{ - assert(!s); - - int reqlen = res_mkquery(QUERY, dname, class, type, NULL, 0, NULL, req, reqlen0); - - assert(reqlen <= reqlen0); - - return reqlen; -} - -inline static int -hs_res_send(void *s, const unsigned char *msg, int msglen, unsigned char *answer, int anslen) -{ - assert(!s); - - return res_send(msg, msglen, answer, anslen); -} - -inline static int -hs_res_query(void *s, const char *dname, int class, int type, unsigned char *answer, int anslen) -{ - assert(!s); - - return res_query(dname, class, type, answer, anslen); -} - -#endif - -#endif /* HS_RESOLV_H */ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/ChangeLog.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/ChangeLog.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/ChangeLog.md 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/ChangeLog.md 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -# Revision history for `resolv` - -## 0.1.1.1 - -* Improve Autoconf script - -## 0.1.1.0 - -* Use Autoconf to detect which library (if any) to link for `res_query(3)` -* Use reentrant `res_nquery(3)` API if available and signal via new `resIsReentrant :: Bool` constant -* Expose `DnsException` and `QR` - -## 0.1.0.0 - -* First version. Released on an unsuspecting world. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/configure cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/configure --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/configure 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/configure 1970-01-01 00:00:00.000000000 +0000 @@ -1,5099 +0,0 @@ -#! /bin/sh -# Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for resolv 0.0. -# -# Report bugs to . -# -# -# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. -# -# -# This configure script is free software; the Free Software Foundation -# gives unlimited permission to copy, distribute and modify it. -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -# Use a proper internal environment variable to ensure we don't fall - # into an infinite loop, continuously re-executing ourselves. - if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then - _as_can_reexec=no; export _as_can_reexec; - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -as_fn_exit 255 - fi - # We don't want this to propagate to other subprocesses. - { _as_can_reexec=; unset _as_can_reexec;} -if test "x$CONFIG_SHELL" = x; then - as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else - case \`(set -o) 2>/dev/null\` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi -" - as_required="as_fn_return () { (exit \$1); } -as_fn_success () { as_fn_return 0; } -as_fn_failure () { as_fn_return 1; } -as_fn_ret_success () { return 0; } -as_fn_ret_failure () { return 1; } - -exitcode=0 -as_fn_success || { exitcode=1; echo as_fn_success failed.; } -as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } -as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } -as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } -if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : - -else - exitcode=1; echo positional parameters were not saved. -fi -test x\$exitcode = x0 || exit 1 -test -x / || exit 1" - as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO - as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO - eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && - test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 -test \$(( 1 + 1 )) = 2 || exit 1" - if (eval "$as_required") 2>/dev/null; then : - as_have_required=yes -else - as_have_required=no -fi - if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : - -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -as_found=false -for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - as_found=: - case $as_dir in #( - /*) - for as_base in sh bash ksh sh5; do - # Try only shells that exist, to save several forks. - as_shell=$as_dir/$as_base - if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : - CONFIG_SHELL=$as_shell as_have_required=yes - if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : - break 2 -fi -fi - done;; - esac - as_found=false -done -$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : - CONFIG_SHELL=$SHELL as_have_required=yes -fi; } -IFS=$as_save_IFS - - - if test "x$CONFIG_SHELL" != x; then : - export CONFIG_SHELL - # We cannot yet assume a decent shell, so we have to provide a -# neutralization value for shells without unset; and this also -# works around shells that cannot unset nonexistent variables. -# Preserve -v and -x to the replacement shell. -BASH_ENV=/dev/null -ENV=/dev/null -(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV -case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; -esac -exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} -# Admittedly, this is quite paranoid, since all the known shells bail -# out after a failed `exec'. -$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 -exit 255 -fi - - if test x$as_have_required = xno; then : - $as_echo "$0: This script requires a shell more modern than all" - $as_echo "$0: the shells that I found on your system." - if test x${ZSH_VERSION+set} = xset ; then - $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" - $as_echo "$0: be upgraded to zsh 4.3.4 or later." - else - $as_echo "$0: Please tell bug-autoconf@gnu.org and hvr@gnu.org about -$0: your system, including any error possibly output before -$0: this message. Then install a modern shell, or manually -$0: run the script under such a shell if you do have one." - fi - exit 1 -fi -fi -fi -SHELL=${CONFIG_SHELL-/bin/sh} -export SHELL -# Unset more variables known to interfere with behavior of common tools. -CLICOLOR_FORCE= GREP_OPTIONS= -unset CLICOLOR_FORCE GREP_OPTIONS - -## --------------------- ## -## M4sh Shell Functions. ## -## --------------------- ## -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - - - as_lineno_1=$LINENO as_lineno_1a=$LINENO - as_lineno_2=$LINENO as_lineno_2a=$LINENO - eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && - test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { - # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - - # If we had to re-execute with $CONFIG_SHELL, we're ensured to have - # already done that, so ensure we don't try to do so again and fall - # in an infinite loop. This has already happened in practice. - _as_can_reexec=no; export _as_can_reexec - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -test -n "$DJDIR" || exec 7<&0 &1 - -# Name of the host. -# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, -# so uname gets run too. -ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` - -# -# Initializations. -# -ac_default_prefix=/usr/local -ac_clean_files= -ac_config_libobj_dir=. -LIBOBJS= -cross_compiling=no -subdirs= -MFLAGS= -MAKEFLAGS= - -# Identity of this package. -PACKAGE_NAME='resolv' -PACKAGE_TARNAME='resolv' -PACKAGE_VERSION='0.0' -PACKAGE_STRING='resolv 0.0' -PACKAGE_BUGREPORT='hvr@gnu.org' -PACKAGE_URL='' - -ac_unique_file="resolv.cabal" -# Factoring default headers for most tests. -ac_includes_default="\ -#include -#ifdef HAVE_SYS_TYPES_H -# include -#endif -#ifdef HAVE_SYS_STAT_H -# include -#endif -#ifdef STDC_HEADERS -# include -# include -#else -# ifdef HAVE_STDLIB_H -# include -# endif -#endif -#ifdef HAVE_STRING_H -# if !defined STDC_HEADERS && defined HAVE_MEMORY_H -# include -# endif -# include -#endif -#ifdef HAVE_STRINGS_H -# include -#endif -#ifdef HAVE_INTTYPES_H -# include -#endif -#ifdef HAVE_STDINT_H -# include -#endif -#ifdef HAVE_UNISTD_H -# include -#endif" - -ac_subst_vars='LTLIBOBJS -LIBOBJS -EXTRA_LIBS -CPP_OPTIONS -EGREP -GREP -CPP -OBJEXT -EXEEXT -ac_ct_CC -CPPFLAGS -LDFLAGS -CFLAGS -CC -target_alias -host_alias -build_alias -LIBS -ECHO_T -ECHO_N -ECHO_C -DEFS -mandir -localedir -libdir -psdir -pdfdir -dvidir -htmldir -infodir -docdir -oldincludedir -includedir -runstatedir -localstatedir -sharedstatedir -sysconfdir -datadir -datarootdir -libexecdir -sbindir -bindir -program_transform_name -prefix -exec_prefix -PACKAGE_URL -PACKAGE_BUGREPORT -PACKAGE_STRING -PACKAGE_VERSION -PACKAGE_TARNAME -PACKAGE_NAME -PATH_SEPARATOR -SHELL' -ac_subst_files='' -ac_user_opts=' -enable_option_checking -' - ac_precious_vars='build_alias -host_alias -target_alias -CC -CFLAGS -LDFLAGS -LIBS -CPPFLAGS -CPP' - - -# Initialize some variables set by options. -ac_init_help= -ac_init_version=false -ac_unrecognized_opts= -ac_unrecognized_sep= -# The variables have the same names as the options, with -# dashes changed to underlines. -cache_file=/dev/null -exec_prefix=NONE -no_create= -no_recursion= -prefix=NONE -program_prefix=NONE -program_suffix=NONE -program_transform_name=s,x,x, -silent= -site= -srcdir= -verbose= -x_includes=NONE -x_libraries=NONE - -# Installation directory options. -# These are left unexpanded so users can "make install exec_prefix=/foo" -# and all the variables that are supposed to be based on exec_prefix -# by default will actually change. -# Use braces instead of parens because sh, perl, etc. also accept them. -# (The list follows the same order as the GNU Coding Standards.) -bindir='${exec_prefix}/bin' -sbindir='${exec_prefix}/sbin' -libexecdir='${exec_prefix}/libexec' -datarootdir='${prefix}/share' -datadir='${datarootdir}' -sysconfdir='${prefix}/etc' -sharedstatedir='${prefix}/com' -localstatedir='${prefix}/var' -runstatedir='${localstatedir}/run' -includedir='${prefix}/include' -oldincludedir='/usr/include' -docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' -infodir='${datarootdir}/info' -htmldir='${docdir}' -dvidir='${docdir}' -pdfdir='${docdir}' -psdir='${docdir}' -libdir='${exec_prefix}/lib' -localedir='${datarootdir}/locale' -mandir='${datarootdir}/man' - -ac_prev= -ac_dashdash= -for ac_option -do - # If the previous option needs an argument, assign it. - if test -n "$ac_prev"; then - eval $ac_prev=\$ac_option - ac_prev= - continue - fi - - case $ac_option in - *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; - *=) ac_optarg= ;; - *) ac_optarg=yes ;; - esac - - # Accept the important Cygnus configure options, so we can diagnose typos. - - case $ac_dashdash$ac_option in - --) - ac_dashdash=yes ;; - - -bindir | --bindir | --bindi | --bind | --bin | --bi) - ac_prev=bindir ;; - -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) - bindir=$ac_optarg ;; - - -build | --build | --buil | --bui | --bu) - ac_prev=build_alias ;; - -build=* | --build=* | --buil=* | --bui=* | --bu=*) - build_alias=$ac_optarg ;; - - -cache-file | --cache-file | --cache-fil | --cache-fi \ - | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) - ac_prev=cache_file ;; - -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ - | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) - cache_file=$ac_optarg ;; - - --config-cache | -C) - cache_file=config.cache ;; - - -datadir | --datadir | --datadi | --datad) - ac_prev=datadir ;; - -datadir=* | --datadir=* | --datadi=* | --datad=*) - datadir=$ac_optarg ;; - - -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ - | --dataroo | --dataro | --datar) - ac_prev=datarootdir ;; - -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ - | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) - datarootdir=$ac_optarg ;; - - -disable-* | --disable-*) - ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=no ;; - - -docdir | --docdir | --docdi | --doc | --do) - ac_prev=docdir ;; - -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) - docdir=$ac_optarg ;; - - -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) - ac_prev=dvidir ;; - -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) - dvidir=$ac_optarg ;; - - -enable-* | --enable-*) - ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=\$ac_optarg ;; - - -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ - | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ - | --exec | --exe | --ex) - ac_prev=exec_prefix ;; - -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ - | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ - | --exec=* | --exe=* | --ex=*) - exec_prefix=$ac_optarg ;; - - -gas | --gas | --ga | --g) - # Obsolete; use --with-gas. - with_gas=yes ;; - - -help | --help | --hel | --he | -h) - ac_init_help=long ;; - -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) - ac_init_help=recursive ;; - -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) - ac_init_help=short ;; - - -host | --host | --hos | --ho) - ac_prev=host_alias ;; - -host=* | --host=* | --hos=* | --ho=*) - host_alias=$ac_optarg ;; - - -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) - ac_prev=htmldir ;; - -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ - | --ht=*) - htmldir=$ac_optarg ;; - - -includedir | --includedir | --includedi | --included | --include \ - | --includ | --inclu | --incl | --inc) - ac_prev=includedir ;; - -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ - | --includ=* | --inclu=* | --incl=* | --inc=*) - includedir=$ac_optarg ;; - - -infodir | --infodir | --infodi | --infod | --info | --inf) - ac_prev=infodir ;; - -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) - infodir=$ac_optarg ;; - - -libdir | --libdir | --libdi | --libd) - ac_prev=libdir ;; - -libdir=* | --libdir=* | --libdi=* | --libd=*) - libdir=$ac_optarg ;; - - -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ - | --libexe | --libex | --libe) - ac_prev=libexecdir ;; - -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ - | --libexe=* | --libex=* | --libe=*) - libexecdir=$ac_optarg ;; - - -localedir | --localedir | --localedi | --localed | --locale) - ac_prev=localedir ;; - -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) - localedir=$ac_optarg ;; - - -localstatedir | --localstatedir | --localstatedi | --localstated \ - | --localstate | --localstat | --localsta | --localst | --locals) - ac_prev=localstatedir ;; - -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ - | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) - localstatedir=$ac_optarg ;; - - -mandir | --mandir | --mandi | --mand | --man | --ma | --m) - ac_prev=mandir ;; - -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) - mandir=$ac_optarg ;; - - -nfp | --nfp | --nf) - # Obsolete; use --without-fp. - with_fp=no ;; - - -no-create | --no-create | --no-creat | --no-crea | --no-cre \ - | --no-cr | --no-c | -n) - no_create=yes ;; - - -no-recursion | --no-recursion | --no-recursio | --no-recursi \ - | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) - no_recursion=yes ;; - - -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ - | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ - | --oldin | --oldi | --old | --ol | --o) - ac_prev=oldincludedir ;; - -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ - | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ - | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) - oldincludedir=$ac_optarg ;; - - -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) - ac_prev=prefix ;; - -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) - prefix=$ac_optarg ;; - - -program-prefix | --program-prefix | --program-prefi | --program-pref \ - | --program-pre | --program-pr | --program-p) - ac_prev=program_prefix ;; - -program-prefix=* | --program-prefix=* | --program-prefi=* \ - | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) - program_prefix=$ac_optarg ;; - - -program-suffix | --program-suffix | --program-suffi | --program-suff \ - | --program-suf | --program-su | --program-s) - ac_prev=program_suffix ;; - -program-suffix=* | --program-suffix=* | --program-suffi=* \ - | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) - program_suffix=$ac_optarg ;; - - -program-transform-name | --program-transform-name \ - | --program-transform-nam | --program-transform-na \ - | --program-transform-n | --program-transform- \ - | --program-transform | --program-transfor \ - | --program-transfo | --program-transf \ - | --program-trans | --program-tran \ - | --progr-tra | --program-tr | --program-t) - ac_prev=program_transform_name ;; - -program-transform-name=* | --program-transform-name=* \ - | --program-transform-nam=* | --program-transform-na=* \ - | --program-transform-n=* | --program-transform-=* \ - | --program-transform=* | --program-transfor=* \ - | --program-transfo=* | --program-transf=* \ - | --program-trans=* | --program-tran=* \ - | --progr-tra=* | --program-tr=* | --program-t=*) - program_transform_name=$ac_optarg ;; - - -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) - ac_prev=pdfdir ;; - -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) - pdfdir=$ac_optarg ;; - - -psdir | --psdir | --psdi | --psd | --ps) - ac_prev=psdir ;; - -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) - psdir=$ac_optarg ;; - - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - silent=yes ;; - - -runstatedir | --runstatedir | --runstatedi | --runstated \ - | --runstate | --runstat | --runsta | --runst | --runs \ - | --run | --ru | --r) - ac_prev=runstatedir ;; - -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ - | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ - | --run=* | --ru=* | --r=*) - runstatedir=$ac_optarg ;; - - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) - ac_prev=sbindir ;; - -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ - | --sbi=* | --sb=*) - sbindir=$ac_optarg ;; - - -sharedstatedir | --sharedstatedir | --sharedstatedi \ - | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ - | --sharedst | --shareds | --shared | --share | --shar \ - | --sha | --sh) - ac_prev=sharedstatedir ;; - -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ - | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ - | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ - | --sha=* | --sh=*) - sharedstatedir=$ac_optarg ;; - - -site | --site | --sit) - ac_prev=site ;; - -site=* | --site=* | --sit=*) - site=$ac_optarg ;; - - -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) - ac_prev=srcdir ;; - -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) - srcdir=$ac_optarg ;; - - -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ - | --syscon | --sysco | --sysc | --sys | --sy) - ac_prev=sysconfdir ;; - -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ - | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) - sysconfdir=$ac_optarg ;; - - -target | --target | --targe | --targ | --tar | --ta | --t) - ac_prev=target_alias ;; - -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) - target_alias=$ac_optarg ;; - - -v | -verbose | --verbose | --verbos | --verbo | --verb) - verbose=yes ;; - - -version | --version | --versio | --versi | --vers | -V) - ac_init_version=: ;; - - -with-* | --with-*) - ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=\$ac_optarg ;; - - -without-* | --without-*) - ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=no ;; - - --x) - # Obsolete; use --with-x. - with_x=yes ;; - - -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ - | --x-incl | --x-inc | --x-in | --x-i) - ac_prev=x_includes ;; - -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ - | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) - x_includes=$ac_optarg ;; - - -x-libraries | --x-libraries | --x-librarie | --x-librari \ - | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) - ac_prev=x_libraries ;; - -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ - | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) - x_libraries=$ac_optarg ;; - - -*) as_fn_error $? "unrecognized option: \`$ac_option' -Try \`$0 --help' for more information" - ;; - - *=*) - ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` - # Reject names that are not valid shell variable names. - case $ac_envvar in #( - '' | [0-9]* | *[!_$as_cr_alnum]* ) - as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; - esac - eval $ac_envvar=\$ac_optarg - export $ac_envvar ;; - - *) - # FIXME: should be removed in autoconf 3.0. - $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 - expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && - $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 - : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" - ;; - - esac -done - -if test -n "$ac_prev"; then - ac_option=--`echo $ac_prev | sed 's/_/-/g'` - as_fn_error $? "missing argument to $ac_option" -fi - -if test -n "$ac_unrecognized_opts"; then - case $enable_option_checking in - no) ;; - fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; - *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; - esac -fi - -# Check all directory arguments for consistency. -for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ - datadir sysconfdir sharedstatedir localstatedir includedir \ - oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir runstatedir -do - eval ac_val=\$$ac_var - # Remove trailing slashes. - case $ac_val in - */ ) - ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` - eval $ac_var=\$ac_val;; - esac - # Be sure to have absolute directory names. - case $ac_val in - [\\/$]* | ?:[\\/]* ) continue;; - NONE | '' ) case $ac_var in *prefix ) continue;; esac;; - esac - as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" -done - -# There might be people who depend on the old broken behavior: `$host' -# used to hold the argument of --host etc. -# FIXME: To remove some day. -build=$build_alias -host=$host_alias -target=$target_alias - -# FIXME: To remove some day. -if test "x$host_alias" != x; then - if test "x$build_alias" = x; then - cross_compiling=maybe - elif test "x$build_alias" != "x$host_alias"; then - cross_compiling=yes - fi -fi - -ac_tool_prefix= -test -n "$host_alias" && ac_tool_prefix=$host_alias- - -test "$silent" = yes && exec 6>/dev/null - - -ac_pwd=`pwd` && test -n "$ac_pwd" && -ac_ls_di=`ls -di .` && -ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || - as_fn_error $? "working directory cannot be determined" -test "X$ac_ls_di" = "X$ac_pwd_ls_di" || - as_fn_error $? "pwd does not report name of working directory" - - -# Find the source files, if location was not specified. -if test -z "$srcdir"; then - ac_srcdir_defaulted=yes - # Try the directory containing this script, then the parent directory. - ac_confdir=`$as_dirname -- "$as_myself" || -$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_myself" : 'X\(//\)[^/]' \| \ - X"$as_myself" : 'X\(//\)$' \| \ - X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_myself" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - srcdir=$ac_confdir - if test ! -r "$srcdir/$ac_unique_file"; then - srcdir=.. - fi -else - ac_srcdir_defaulted=no -fi -if test ! -r "$srcdir/$ac_unique_file"; then - test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." - as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" -fi -ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" -ac_abs_confdir=`( - cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" - pwd)` -# When building in place, set srcdir=. -if test "$ac_abs_confdir" = "$ac_pwd"; then - srcdir=. -fi -# Remove unnecessary trailing slashes from srcdir. -# Double slashes in file names in object file debugging info -# mess up M-x gdb in Emacs. -case $srcdir in -*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; -esac -for ac_var in $ac_precious_vars; do - eval ac_env_${ac_var}_set=\${${ac_var}+set} - eval ac_env_${ac_var}_value=\$${ac_var} - eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} - eval ac_cv_env_${ac_var}_value=\$${ac_var} -done - -# -# Report the --help message. -# -if test "$ac_init_help" = "long"; then - # Omit some internal or obsolete options to make the list less imposing. - # This message is too long to be a string in the A/UX 3.1 sh. - cat <<_ACEOF -\`configure' configures resolv 0.0 to adapt to many kinds of systems. - -Usage: $0 [OPTION]... [VAR=VALUE]... - -To assign environment variables (e.g., CC, CFLAGS...), specify them as -VAR=VALUE. See below for descriptions of some of the useful variables. - -Defaults for the options are specified in brackets. - -Configuration: - -h, --help display this help and exit - --help=short display options specific to this package - --help=recursive display the short help of all the included packages - -V, --version display version information and exit - -q, --quiet, --silent do not print \`checking ...' messages - --cache-file=FILE cache test results in FILE [disabled] - -C, --config-cache alias for \`--cache-file=config.cache' - -n, --no-create do not create output files - --srcdir=DIR find the sources in DIR [configure dir or \`..'] - -Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX - [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX - [PREFIX] - -By default, \`make install' will install all the files in -\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -an installation prefix other than \`$ac_default_prefix' using \`--prefix', -for instance \`--prefix=\$HOME'. - -For better control, use the options below. - -Fine tuning of the installation directories: - --bindir=DIR user executables [EPREFIX/bin] - --sbindir=DIR system admin executables [EPREFIX/sbin] - --libexecdir=DIR program executables [EPREFIX/libexec] - --sysconfdir=DIR read-only single-machine data [PREFIX/etc] - --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] - --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] - --libdir=DIR object code libraries [EPREFIX/lib] - --includedir=DIR C header files [PREFIX/include] - --oldincludedir=DIR C header files for non-gcc [/usr/include] - --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] - --datadir=DIR read-only architecture-independent data [DATAROOTDIR] - --infodir=DIR info documentation [DATAROOTDIR/info] - --localedir=DIR locale-dependent data [DATAROOTDIR/locale] - --mandir=DIR man documentation [DATAROOTDIR/man] - --docdir=DIR documentation root [DATAROOTDIR/doc/resolv] - --htmldir=DIR html documentation [DOCDIR] - --dvidir=DIR dvi documentation [DOCDIR] - --pdfdir=DIR pdf documentation [DOCDIR] - --psdir=DIR ps documentation [DOCDIR] -_ACEOF - - cat <<\_ACEOF -_ACEOF -fi - -if test -n "$ac_init_help"; then - case $ac_init_help in - short | recursive ) echo "Configuration of resolv 0.0:";; - esac - cat <<\_ACEOF - -Some influential environment variables: - CC C compiler command - CFLAGS C compiler flags - LDFLAGS linker flags, e.g. -L if you have libraries in a - nonstandard directory - LIBS libraries to pass to the linker, e.g. -l - CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if - you have headers in a nonstandard directory - CPP C preprocessor - -Use these variables to override the choices made by `configure' or to help -it to find libraries and programs with nonstandard names/locations. - -Report bugs to . -_ACEOF -ac_status=$? -fi - -if test "$ac_init_help" = "recursive"; then - # If there are subdirs, report their specific --help. - for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue - test -d "$ac_dir" || - { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || - continue - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - cd "$ac_dir" || { ac_status=$?; continue; } - # Check for guested configure. - if test -f "$ac_srcdir/configure.gnu"; then - echo && - $SHELL "$ac_srcdir/configure.gnu" --help=recursive - elif test -f "$ac_srcdir/configure"; then - echo && - $SHELL "$ac_srcdir/configure" --help=recursive - else - $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi || ac_status=$? - cd "$ac_pwd" || { ac_status=$?; break; } - done -fi - -test -n "$ac_init_help" && exit $ac_status -if $ac_init_version; then - cat <<\_ACEOF -resolv configure 0.0 -generated by GNU Autoconf 2.69 - -Copyright (C) 2012 Free Software Foundation, Inc. -This configure script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it. -_ACEOF - exit -fi - -## ------------------------ ## -## Autoconf initialization. ## -## ------------------------ ## - -# ac_fn_c_try_compile LINENO -# -------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_compile - -# ac_fn_c_try_cpp LINENO -# ---------------------- -# Try to preprocess conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_cpp () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if { { ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } > conftest.i && { - test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || - test ! -s conftest.err - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_cpp - -# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES -# ------------------------------------------------------- -# Tests whether HEADER exists, giving a warning if it cannot be compiled using -# the include files in INCLUDES and setting the cache variable VAR -# accordingly. -ac_fn_c_check_header_mongrel () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if eval \${$3+:} false; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -else - # Is the header compilable? -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 -$as_echo_n "checking $2 usability... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -#include <$2> -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_header_compiler=yes -else - ac_header_compiler=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 -$as_echo "$ac_header_compiler" >&6; } - -# Is the header present? -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 -$as_echo_n "checking $2 presence... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include <$2> -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - ac_header_preproc=yes -else - ac_header_preproc=no -fi -rm -f conftest.err conftest.i conftest.$ac_ext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 -$as_echo "$ac_header_preproc" >&6; } - -# So? What about this header? -case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( - yes:no: ) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 -$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 -$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} - ;; - no:yes:* ) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 -$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 -$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 -$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 -$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 -$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} -( $as_echo "## -------------------------- ## -## Report this to hvr@gnu.org ## -## -------------------------- ##" - ) | sed "s/^/$as_me: WARNING: /" >&2 - ;; -esac - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - eval "$3=\$ac_header_compiler" -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_header_mongrel - -# ac_fn_c_try_run LINENO -# ---------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes -# that executables *can* be run. -ac_fn_c_try_run () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then : - ac_retval=0 -else - $as_echo "$as_me: program exited with status $ac_status" >&5 - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=$ac_status -fi - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_run - -# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES -# ------------------------------------------------------- -# Tests whether HEADER exists and can be compiled using the include files in -# INCLUDES, setting the cache variable VAR accordingly. -ac_fn_c_check_header_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -#include <$2> -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_header_compile - -# ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES -# --------------------------------------------- -# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR -# accordingly. -ac_fn_c_check_decl () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - as_decl_name=`echo $2|sed 's/ *(.*//'` - as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 -$as_echo_n "checking whether $as_decl_name is declared... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -#ifndef $as_decl_name -#ifdef __cplusplus - (void) $as_decl_use; -#else - (void) $as_decl_name; -#endif -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_decl - -# ac_fn_c_try_link LINENO -# ----------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_link () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext conftest$ac_exeext - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && { - test "$cross_compiling" = yes || - test -x conftest$ac_exeext - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information - # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would - # interfere with the next link command; also delete a directory that is - # left behind by Apple's compiler. We do this before executing the actions. - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_link - -# ac_fn_c_compute_int LINENO EXPR VAR INCLUDES -# -------------------------------------------- -# Tries to find the compile-time value of EXPR in a program that includes -# INCLUDES, setting VAR accordingly. Returns whether the value could be -# computed -ac_fn_c_compute_int () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if test "$cross_compiling" = yes; then - # Depending upon the size, compute the lo and hi bounds. -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -static int test_array [1 - 2 * !(($2) >= 0)]; -test_array [0] = 0; -return test_array [0]; - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_lo=0 ac_mid=0 - while :; do - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -static int test_array [1 - 2 * !(($2) <= $ac_mid)]; -test_array [0] = 0; -return test_array [0]; - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_hi=$ac_mid; break -else - as_fn_arith $ac_mid + 1 && ac_lo=$as_val - if test $ac_lo -le $ac_mid; then - ac_lo= ac_hi= - break - fi - as_fn_arith 2 '*' $ac_mid + 1 && ac_mid=$as_val -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - done -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -static int test_array [1 - 2 * !(($2) < 0)]; -test_array [0] = 0; -return test_array [0]; - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_hi=-1 ac_mid=-1 - while :; do - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -static int test_array [1 - 2 * !(($2) >= $ac_mid)]; -test_array [0] = 0; -return test_array [0]; - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_lo=$ac_mid; break -else - as_fn_arith '(' $ac_mid ')' - 1 && ac_hi=$as_val - if test $ac_mid -le $ac_hi; then - ac_lo= ac_hi= - break - fi - as_fn_arith 2 '*' $ac_mid && ac_mid=$as_val -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - done -else - ac_lo= ac_hi= -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -# Binary search between lo and hi bounds. -while test "x$ac_lo" != "x$ac_hi"; do - as_fn_arith '(' $ac_hi - $ac_lo ')' / 2 + $ac_lo && ac_mid=$as_val - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -static int test_array [1 - 2 * !(($2) <= $ac_mid)]; -test_array [0] = 0; -return test_array [0]; - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_hi=$ac_mid -else - as_fn_arith '(' $ac_mid ')' + 1 && ac_lo=$as_val -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -done -case $ac_lo in #(( -?*) eval "$3=\$ac_lo"; ac_retval=0 ;; -'') ac_retval=1 ;; -esac - else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -static long int longval () { return $2; } -static unsigned long int ulongval () { return $2; } -#include -#include -int -main () -{ - - FILE *f = fopen ("conftest.val", "w"); - if (! f) - return 1; - if (($2) < 0) - { - long int i = longval (); - if (i != ($2)) - return 1; - fprintf (f, "%ld", i); - } - else - { - unsigned long int i = ulongval (); - if (i != ($2)) - return 1; - fprintf (f, "%lu", i); - } - /* Do not output a trailing newline, as this causes \r\n confusion - on some platforms. */ - return ferror (f) || fclose (f) != 0; - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_run "$LINENO"; then : - echo >>conftest.val; read $3 config.log <<_ACEOF -This file contains any messages produced by compilers while -running configure, to aid debugging if configure makes a mistake. - -It was created by resolv $as_me 0.0, which was -generated by GNU Autoconf 2.69. Invocation command line was - - $ $0 $@ - -_ACEOF -exec 5>>config.log -{ -cat <<_ASUNAME -## --------- ## -## Platform. ## -## --------- ## - -hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` - -/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` -/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` -/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` -/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` - -_ASUNAME - -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - $as_echo "PATH: $as_dir" - done -IFS=$as_save_IFS - -} >&5 - -cat >&5 <<_ACEOF - - -## ----------- ## -## Core tests. ## -## ----------- ## - -_ACEOF - - -# Keep a trace of the command line. -# Strip out --no-create and --no-recursion so they do not pile up. -# Strip out --silent because we don't want to record it for future runs. -# Also quote any args containing shell meta-characters. -# Make two passes to allow for proper duplicate-argument suppression. -ac_configure_args= -ac_configure_args0= -ac_configure_args1= -ac_must_keep_next=false -for ac_pass in 1 2 -do - for ac_arg - do - case $ac_arg in - -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - continue ;; - *\'*) - ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - case $ac_pass in - 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; - 2) - as_fn_append ac_configure_args1 " '$ac_arg'" - if test $ac_must_keep_next = true; then - ac_must_keep_next=false # Got value, back to normal. - else - case $ac_arg in - *=* | --config-cache | -C | -disable-* | --disable-* \ - | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ - | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ - | -with-* | --with-* | -without-* | --without-* | --x) - case "$ac_configure_args0 " in - "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; - esac - ;; - -* ) ac_must_keep_next=true ;; - esac - fi - as_fn_append ac_configure_args " '$ac_arg'" - ;; - esac - done -done -{ ac_configure_args0=; unset ac_configure_args0;} -{ ac_configure_args1=; unset ac_configure_args1;} - -# When interrupted or exit'd, cleanup temporary files, and complete -# config.log. We remove comments because anyway the quotes in there -# would cause problems or look ugly. -# WARNING: Use '\'' to represent an apostrophe within the trap. -# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. -trap 'exit_status=$? - # Save into config.log some information that might help in debugging. - { - echo - - $as_echo "## ---------------- ## -## Cache variables. ## -## ---------------- ##" - echo - # The following way of writing the cache mishandles newlines in values, -( - for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - (set) 2>&1 | - case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - sed -n \ - "s/'\''/'\''\\\\'\'''\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" - ;; #( - *) - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) - echo - - $as_echo "## ----------------- ## -## Output variables. ## -## ----------------- ##" - echo - for ac_var in $ac_subst_vars - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - - if test -n "$ac_subst_files"; then - $as_echo "## ------------------- ## -## File substitutions. ## -## ------------------- ##" - echo - for ac_var in $ac_subst_files - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - fi - - if test -s confdefs.h; then - $as_echo "## ----------- ## -## confdefs.h. ## -## ----------- ##" - echo - cat confdefs.h - echo - fi - test "$ac_signal" != 0 && - $as_echo "$as_me: caught signal $ac_signal" - $as_echo "$as_me: exit $exit_status" - } >&5 - rm -f core *.core core.conftest.* && - rm -f -r conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status -' 0 -for ac_signal in 1 2 13 15; do - trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal -done -ac_signal=0 - -# confdefs.h avoids OS command line length limits that DEFS can exceed. -rm -f -r conftest* confdefs.h - -$as_echo "/* confdefs.h */" > confdefs.h - -# Predefined preprocessor variables. - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_NAME "$PACKAGE_NAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_TARNAME "$PACKAGE_TARNAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_VERSION "$PACKAGE_VERSION" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_STRING "$PACKAGE_STRING" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_URL "$PACKAGE_URL" -_ACEOF - - -# Let the site file select an alternate cache file if it wants to. -# Prefer an explicitly selected file to automatically selected ones. -ac_site_file1=NONE -ac_site_file2=NONE -if test -n "$CONFIG_SITE"; then - # We do not want a PATH search for config.site. - case $CONFIG_SITE in #(( - -*) ac_site_file1=./$CONFIG_SITE;; - */*) ac_site_file1=$CONFIG_SITE;; - *) ac_site_file1=./$CONFIG_SITE;; - esac -elif test "x$prefix" != xNONE; then - ac_site_file1=$prefix/share/config.site - ac_site_file2=$prefix/etc/config.site -else - ac_site_file1=$ac_default_prefix/share/config.site - ac_site_file2=$ac_default_prefix/etc/config.site -fi -for ac_site_file in "$ac_site_file1" "$ac_site_file2" -do - test "x$ac_site_file" = xNONE && continue - if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 -$as_echo "$as_me: loading site script $ac_site_file" >&6;} - sed 's/^/| /' "$ac_site_file" >&5 - . "$ac_site_file" \ - || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "failed to load site script $ac_site_file -See \`config.log' for more details" "$LINENO" 5; } - fi -done - -if test -r "$cache_file"; then - # Some versions of bash will fail to source /dev/null (special files - # actually), so we avoid doing that. DJGPP emulates it as a regular file. - if test /dev/null != "$cache_file" && test -f "$cache_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 -$as_echo "$as_me: loading cache $cache_file" >&6;} - case $cache_file in - [\\/]* | ?:[\\/]* ) . "$cache_file";; - *) . "./$cache_file";; - esac - fi -else - { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 -$as_echo "$as_me: creating cache $cache_file" >&6;} - >$cache_file -fi - -# Check that the precious variables saved in the cache have kept the same -# value. -ac_cache_corrupted=false -for ac_var in $ac_precious_vars; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val=\$ac_cv_env_${ac_var}_value - eval ac_new_val=\$ac_env_${ac_var}_value - case $ac_old_set,$ac_new_set in - set,) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,set) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then - # differences in whitespace do not lead to failure. - ac_old_val_w=`echo x $ac_old_val` - ac_new_val_w=`echo x $ac_new_val` - if test "$ac_old_val_w" != "$ac_new_val_w"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 -$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} - ac_cache_corrupted=: - else - { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 -$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} - eval $ac_var=\$ac_old_val - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 -$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 -$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} - fi;; - esac - # Pass precious variables to config.status. - if test "$ac_new_set" = set; then - case $ac_new_val in - *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; - *) ac_arg=$ac_var=$ac_new_val ;; - esac - case " $ac_configure_args " in - *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. - *) as_fn_append ac_configure_args " '$ac_arg'" ;; - esac - fi -done -if $ac_cache_corrupted; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 -$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} - as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 -fi -## -------------------- ## -## Main body of script. ## -## -------------------- ## - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - -ac_config_headers="$ac_config_headers cbits/hs_resolv_config.h" - - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. -set dummy ${ac_tool_prefix}gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="${ac_tool_prefix}gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_CC"; then - ac_ct_CC=$CC - # Extract the first word of "gcc", so it can be a program name with args. -set dummy gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_CC="gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -else - CC="$ac_cv_prog_CC" -fi - -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. -set dummy ${ac_tool_prefix}cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="${ac_tool_prefix}cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - fi -fi -if test -z "$CC"; then - # Extract the first word of "cc", so it can be a program name with args. -set dummy cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else - ac_prog_rejected=no -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then - ac_prog_rejected=yes - continue - fi - ac_cv_prog_CC="cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -if test $ac_prog_rejected = yes; then - # We found a bogon in the path, so make sure we never use it. - set dummy $ac_cv_prog_CC - shift - if test $# != 0; then - # We chose a different compiler from the bogus one. - # However, it has the same basename, so the bogon will be chosen - # first if we set CC to just the basename; use the full file name. - shift - ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" - fi -fi -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - for ac_prog in cl.exe - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_CC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$CC" && break - done -fi -if test -z "$CC"; then - ac_ct_CC=$CC - for ac_prog in cl.exe -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then - ac_cv_prog_ac_ct_CC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_CC" && break -done - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -fi - -fi - - -test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "no acceptable C compiler found in \$PATH -See \`config.log' for more details" "$LINENO" 5; } - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done - -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" -# Try to create an executable without -o first, disregard a.out. -# It will help us diagnose broken compilers, and finding out an intuition -# of exeext. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 -$as_echo_n "checking whether the C compiler works... " >&6; } -ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` - -# The possible output files: -ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" - -ac_rmfiles= -for ac_file in $ac_files -do - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - * ) ac_rmfiles="$ac_rmfiles $ac_file";; - esac -done -rm -f $ac_rmfiles - -if { { ac_try="$ac_link_default" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link_default") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. -# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' -# in a Makefile. We should not override ac_cv_exeext if it was cached, -# so that the user can short-circuit this test for compilers unknown to -# Autoconf. -for ac_file in $ac_files '' -do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) - ;; - [ab].out ) - # We found the default executable, but exeext='' is most - # certainly right. - break;; - *.* ) - if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; - then :; else - ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - fi - # We set ac_cv_exeext here because the later test for it is not - # safe: cross compilers may not add the suffix if given an `-o' - # argument, so we may need to know it at that point already. - # Even if this section looks crufty: it has the advantage of - # actually working. - break;; - * ) - break;; - esac -done -test "$ac_cv_exeext" = no && ac_cv_exeext= - -else - ac_file='' -fi -if test -z "$ac_file"; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -$as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error 77 "C compiler cannot create executables -See \`config.log' for more details" "$LINENO" 5; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 -$as_echo_n "checking for C compiler default output file name... " >&6; } -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 -$as_echo "$ac_file" >&6; } -ac_exeext=$ac_cv_exeext - -rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 -$as_echo_n "checking for suffix of executables... " >&6; } -if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # If both `conftest.exe' and `conftest' are `present' (well, observable) -# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will -# work properly (i.e., refer to `conftest.exe'), while it won't with -# `rm'. -for ac_file in conftest.exe conftest conftest.*; do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - break;; - * ) break;; - esac -done -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of executables: cannot compile and link -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest conftest$ac_cv_exeext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 -$as_echo "$ac_cv_exeext" >&6; } - -rm -f conftest.$ac_ext -EXEEXT=$ac_cv_exeext -ac_exeext=$EXEEXT -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -int -main () -{ -FILE *f = fopen ("conftest.out", "w"); - return ferror (f) || fclose (f) != 0; - - ; - return 0; -} -_ACEOF -ac_clean_files="$ac_clean_files conftest.out" -# Check that the compiler produces executables we can run. If not, either -# the compiler is broken, or we cross compile. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 -$as_echo_n "checking whether we are cross compiling... " >&6; } -if test "$cross_compiling" != yes; then - { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if { ac_try='./conftest$ac_cv_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - cross_compiling=no - else - if test "$cross_compiling" = maybe; then - cross_compiling=yes - else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot run C compiled programs. -If you meant to cross compile, use \`--host'. -See \`config.log' for more details" "$LINENO" 5; } - fi - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 -$as_echo "$cross_compiling" >&6; } - -rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 -$as_echo_n "checking for suffix of object files... " >&6; } -if ${ac_cv_objext+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.o conftest.obj -if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - for ac_file in conftest.o conftest.obj conftest.*; do - test -f "$ac_file" || continue; - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; - *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` - break;; - esac -done -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of object files: cannot compile -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest.$ac_cv_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 -$as_echo "$ac_cv_objext" >&6; } -OBJEXT=$ac_cv_objext -ac_objext=$OBJEXT -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 -$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } -if ${ac_cv_c_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -#ifndef __GNUC__ - choke me -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_c_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 -$as_echo "$ac_cv_c_compiler_gnu" >&6; } -if test $ac_compiler_gnu = yes; then - GCC=yes -else - GCC= -fi -ac_test_CFLAGS=${CFLAGS+set} -ac_save_CFLAGS=$CFLAGS -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 -$as_echo_n "checking whether $CC accepts -g... " >&6; } -if ${ac_cv_prog_cc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_save_c_werror_flag=$ac_c_werror_flag - ac_c_werror_flag=yes - ac_cv_prog_cc_g=no - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -else - CFLAGS="" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -else - ac_c_werror_flag=$ac_save_c_werror_flag - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - ac_c_werror_flag=$ac_save_c_werror_flag -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 -$as_echo "$ac_cv_prog_cc_g" >&6; } -if test "$ac_test_CFLAGS" = set; then - CFLAGS=$ac_save_CFLAGS -elif test $ac_cv_prog_cc_g = yes; then - if test "$GCC" = yes; then - CFLAGS="-g -O2" - else - CFLAGS="-g" - fi -else - if test "$GCC" = yes; then - CFLAGS="-O2" - else - CFLAGS= - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 -$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } -if ${ac_cv_prog_cc_c89+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_cv_prog_cc_c89=no -ac_save_CC=$CC -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -struct stat; -/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ -struct buf { int x; }; -FILE * (*rcsopen) (struct buf *, struct stat *, int); -static char *e (p, i) - char **p; - int i; -{ - return p[i]; -} -static char *f (char * (*g) (char **, int), char **p, ...) -{ - char *s; - va_list v; - va_start (v,p); - s = g (p, va_arg (v,int)); - va_end (v); - return s; -} - -/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has - function prototypes and stuff, but not '\xHH' hex character constants. - These don't provoke an error unfortunately, instead are silently treated - as 'x'. The following induces an error, until -std is added to get - proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an - array size at least. It's necessary to write '\x00'==0 to get something - that's true only with -std. */ -int osf4_cc_array ['\x00' == 0 ? 1 : -1]; - -/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters - inside strings and character constants. */ -#define FOO(x) 'x' -int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; - -int test (int i, double x); -struct s1 {int (*f) (int a);}; -struct s2 {int (*f) (double a);}; -int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); -int argc; -char **argv; -int -main () -{ -return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; - ; - return 0; -} -_ACEOF -for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ - -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" -do - CC="$ac_save_CC $ac_arg" - if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_c89=$ac_arg -fi -rm -f core conftest.err conftest.$ac_objext - test "x$ac_cv_prog_cc_c89" != "xno" && break -done -rm -f conftest.$ac_ext -CC=$ac_save_CC - -fi -# AC_CACHE_VAL -case "x$ac_cv_prog_cc_c89" in - x) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 -$as_echo "none needed" >&6; } ;; - xno) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 -$as_echo "unsupported" >&6; } ;; - *) - CC="$CC $ac_cv_prog_cc_c89" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 -$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; -esac -if test "x$ac_cv_prog_cc_c89" != xno; then : - -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 -$as_echo_n "checking for an ANSI C-conforming const... " >&6; } -if ${ac_cv_c_const+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - -#ifndef __cplusplus - /* Ultrix mips cc rejects this sort of thing. */ - typedef int charset[2]; - const charset cs = { 0, 0 }; - /* SunOS 4.1.1 cc rejects this. */ - char const *const *pcpcc; - char **ppc; - /* NEC SVR4.0.2 mips cc rejects this. */ - struct point {int x, y;}; - static struct point const zero = {0,0}; - /* AIX XL C 1.02.0.0 rejects this. - It does not let you subtract one const X* pointer from another in - an arm of an if-expression whose if-part is not a constant - expression */ - const char *g = "string"; - pcpcc = &g + (g ? g-g : 0); - /* HPUX 7.0 cc rejects these. */ - ++pcpcc; - ppc = (char**) pcpcc; - pcpcc = (char const *const *) ppc; - { /* SCO 3.2v4 cc rejects this sort of thing. */ - char tx; - char *t = &tx; - char const *s = 0 ? (char *) 0 : (char const *) 0; - - *t++ = 0; - if (s) return 0; - } - { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ - int x[] = {25, 17}; - const int *foo = &x[0]; - ++foo; - } - { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ - typedef const int *iptr; - iptr p = 0; - ++p; - } - { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying - "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ - struct s { int j; const int *ap[3]; } bx; - struct s *b = &bx; b->j = 5; - } - { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ - const int foo = 10; - if (!foo) return 0; - } - return !cs[0] && !zero.x; -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_c_const=yes -else - ac_cv_c_const=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 -$as_echo "$ac_cv_c_const" >&6; } -if test $ac_cv_c_const = no; then - -$as_echo "#define const /**/" >>confdefs.h - -fi - - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 -$as_echo_n "checking how to run the C preprocessor... " >&6; } -# On Suns, sometimes $CPP names a directory. -if test -n "$CPP" && test -d "$CPP"; then - CPP= -fi -if test -z "$CPP"; then - if ${ac_cv_prog_CPP+:} false; then : - $as_echo_n "(cached) " >&6 -else - # Double quotes because CPP needs to be expanded - for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" - do - ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - -else - # Broken: fails on valid input. -continue -fi -rm -f conftest.err conftest.i conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - # Broken: success on invalid input. -continue -else - # Passes both tests. -ac_preproc_ok=: -break -fi -rm -f conftest.err conftest.i conftest.$ac_ext - -done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.i conftest.err conftest.$ac_ext -if $ac_preproc_ok; then : - break -fi - - done - ac_cv_prog_CPP=$CPP - -fi - CPP=$ac_cv_prog_CPP -else - ac_cv_prog_CPP=$CPP -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 -$as_echo "$CPP" >&6; } -ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - -else - # Broken: fails on valid input. -continue -fi -rm -f conftest.err conftest.i conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - # Broken: success on invalid input. -continue -else - # Passes both tests. -ac_preproc_ok=: -break -fi -rm -f conftest.err conftest.i conftest.$ac_ext - -done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.i conftest.err conftest.$ac_ext -if $ac_preproc_ok; then : - -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "C preprocessor \"$CPP\" fails sanity check -See \`config.log' for more details" "$LINENO" 5; } -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 -$as_echo_n "checking for grep that handles long lines and -e... " >&6; } -if ${ac_cv_path_GREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$GREP"; then - ac_path_GREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in grep ggrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - as_fn_executable_p "$ac_path_GREP" || continue -# Check for GNU ac_path_GREP and select it if it is found. - # Check for GNU $ac_path_GREP -case `"$ac_path_GREP" --version 2>&1` in -*GNU*) - ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'GREP' >> "conftest.nl" - "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_GREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_GREP="$ac_path_GREP" - ac_path_GREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_GREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_GREP"; then - as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_GREP=$GREP -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 -$as_echo "$ac_cv_path_GREP" >&6; } - GREP="$ac_cv_path_GREP" - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 -$as_echo_n "checking for egrep... " >&6; } -if ${ac_cv_path_EGREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 - then ac_cv_path_EGREP="$GREP -E" - else - if test -z "$EGREP"; then - ac_path_EGREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in egrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - as_fn_executable_p "$ac_path_EGREP" || continue -# Check for GNU ac_path_EGREP and select it if it is found. - # Check for GNU $ac_path_EGREP -case `"$ac_path_EGREP" --version 2>&1` in -*GNU*) - ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'EGREP' >> "conftest.nl" - "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_EGREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_EGREP="$ac_path_EGREP" - ac_path_EGREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_EGREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_EGREP"; then - as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_EGREP=$EGREP -fi - - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 -$as_echo "$ac_cv_path_EGREP" >&6; } - EGREP="$ac_cv_path_EGREP" - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 -$as_echo_n "checking for ANSI C header files... " >&6; } -if ${ac_cv_header_stdc+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#include -#include - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_header_stdc=yes -else - ac_cv_header_stdc=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -if test $ac_cv_header_stdc = yes; then - # SunOS 4.x string.h does not declare mem*, contrary to ANSI. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "memchr" >/dev/null 2>&1; then : - -else - ac_cv_header_stdc=no -fi -rm -f conftest* - -fi - -if test $ac_cv_header_stdc = yes; then - # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "free" >/dev/null 2>&1; then : - -else - ac_cv_header_stdc=no -fi -rm -f conftest* - -fi - -if test $ac_cv_header_stdc = yes; then - # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. - if test "$cross_compiling" = yes; then : - : -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#if ((' ' & 0x0FF) == 0x020) -# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') -# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) -#else -# define ISLOWER(c) \ - (('a' <= (c) && (c) <= 'i') \ - || ('j' <= (c) && (c) <= 'r') \ - || ('s' <= (c) && (c) <= 'z')) -# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) -#endif - -#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) -int -main () -{ - int i; - for (i = 0; i < 256; i++) - if (XOR (islower (i), ISLOWER (i)) - || toupper (i) != TOUPPER (i)) - return 2; - return 0; -} -_ACEOF -if ac_fn_c_try_run "$LINENO"; then : - -else - ac_cv_header_stdc=no -fi -rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ - conftest.$ac_objext conftest.beam conftest.$ac_ext -fi - -fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 -$as_echo "$ac_cv_header_stdc" >&6; } -if test $ac_cv_header_stdc = yes; then - -$as_echo "#define STDC_HEADERS 1" >>confdefs.h - -fi - -# On IRIX 5.3, sys/types and inttypes.h are conflicting. -for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ - inttypes.h stdint.h unistd.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default -" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - - -for ac_header in netinet/in.h arpa/nameser.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - -ac_fn_c_check_header_compile "$LINENO" "resolv.h" "ac_cv_header_resolv_h" " -#include -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_ARPA_NAMESER_H -# include -#endif -#include - -" -if test "x$ac_cv_header_resolv_h" = xyes; then : - -else - as_fn_error $? "required header not found" "$LINENO" 5 -fi - - - -ac_fn_c_check_decl "$LINENO" "res_query" "ac_cv_have_decl_res_query" " -#include -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_ARPA_NAMESER_H -# include -#endif -#include - -" -if test "x$ac_cv_have_decl_res_query" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_RES_QUERY $ac_have_decl -_ACEOF -ac_fn_c_check_decl "$LINENO" "res_nquery" "ac_cv_have_decl_res_nquery" " -#include -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_ARPA_NAMESER_H -# include -#endif -#include - -" -if test "x$ac_cv_have_decl_res_nquery" = xyes; then : - ac_have_decl=1 -else - ac_have_decl=0 -fi - -cat >>confdefs.h <<_ACEOF -#define HAVE_DECL_RES_NQUERY $ac_have_decl -_ACEOF - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing res_query" >&5 -$as_echo_n "checking for library containing res_query... " >&6; } -if ${ac_cv_search_res_query+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_func_search_save_LIBS=$LIBS -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -#include -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_ARPA_NAMESER_H -# include -#endif -#include - -int -main () -{ -res_query(0,0,0,0,0) - ; - return 0; -} -_ACEOF -for ac_lib in '' resolv bind; do - if test -z "$ac_lib"; then - ac_res="none required" - else - ac_res=-l$ac_lib - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - fi - if ac_fn_c_try_link "$LINENO"; then : - ac_cv_search_res_query=$ac_res -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext - if ${ac_cv_search_res_query+:} false; then : - break -fi -done -if ${ac_cv_search_res_query+:} false; then : - -else - ac_cv_search_res_query=no -fi -rm conftest.$ac_ext -LIBS=$ac_func_search_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_res_query" >&5 -$as_echo "$ac_cv_search_res_query" >&6; } -ac_res=$ac_cv_search_res_query -if test "$ac_res" != no; then : - test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" - EXTRA_LIBS="$EXTRA_LIBS $ac_lib" -else - -as_fn_error $? "could not figure out which C library contains res_query(3)" "$LINENO" 5 - -fi - - - - -USE_RES_NQUERY=0 - -if test "x$ac_cv_have_decl_res_nquery" = xyes; then - -# The cast to long int works around a bug in the HP C Compiler -# version HP92453-01 B.11.11.23709.GP, which incorrectly rejects -# declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. -# This bug is HP SR number 8606223364. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking size of struct __res_state" >&5 -$as_echo_n "checking size of struct __res_state... " >&6; } -if ${ac_cv_sizeof_struct___res_state+:} false; then : - $as_echo_n "(cached) " >&6 -else - if ac_fn_c_compute_int "$LINENO" "(long int) (sizeof (struct __res_state))" "ac_cv_sizeof_struct___res_state" " -#include -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_ARPA_NAMESER_H -# include -#endif -#include - -"; then : - -else - if test "$ac_cv_type_struct___res_state" = yes; then - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error 77 "cannot compute sizeof (struct __res_state) -See \`config.log' for more details" "$LINENO" 5; } - else - ac_cv_sizeof_struct___res_state=0 - fi -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_struct___res_state" >&5 -$as_echo "$ac_cv_sizeof_struct___res_state" >&6; } - - - -cat >>confdefs.h <<_ACEOF -#define SIZEOF_STRUCT___RES_STATE $ac_cv_sizeof_struct___res_state -_ACEOF - - - -SIZEOF_RES_STATE="$ac_cv_sizeof_struct___res_state" - -if test "$SIZEOF_RES_STATE" -gt 0; then - -USE_RES_NQUERY=1 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing res_nquery" >&5 -$as_echo_n "checking for library containing res_nquery... " >&6; } -if ${ac_cv_search_res_nquery+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_func_search_save_LIBS=$LIBS -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -#include -#ifdef HAVE_NETINET_IN_H -# include -#endif -#ifdef HAVE_ARPA_NAMESER_H -# include -#endif -#include - -int -main () -{ -res_nquery(0,0,0,0,0,0) - ; - return 0; -} -_ACEOF -for ac_lib in '' resolv bind; do - if test -z "$ac_lib"; then - ac_res="none required" - else - ac_res=-l$ac_lib - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - fi - if ac_fn_c_try_link "$LINENO"; then : - ac_cv_search_res_nquery=$ac_res -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext - if ${ac_cv_search_res_nquery+:} false; then : - break -fi -done -if ${ac_cv_search_res_nquery+:} false; then : - -else - ac_cv_search_res_nquery=no -fi -rm conftest.$ac_ext -LIBS=$ac_func_search_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_res_nquery" >&5 -$as_echo "$ac_cv_search_res_nquery" >&6; } -ac_res=$ac_cv_search_res_nquery -if test "$ac_res" != no; then : - test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" - EXTRA_LIBS="$EXTRA_LIBS $ac_lib" -else - -USE_RES_NQUERY=0 -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: could not figure out which C library contains res_nquery(3)" >&5 -$as_echo "$as_me: WARNING: could not figure out which C library contains res_nquery(3)" >&2;} - -fi - - -else - -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: could not determine sizeof(struct __res_state)" >&5 -$as_echo "$as_me: WARNING: could not determine sizeof(struct __res_state)" >&2;} - -fi - -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking which DNS api to use" >&5 -$as_echo_n "checking which DNS api to use... " >&6; } - -case "x$USE_RES_NQUERY" in - x0) { $as_echo "$as_me:${as_lineno-$LINENO}: result: res_query(3)" >&5 -$as_echo "res_query(3)" >&6; } - CPP_OPTIONS="-DUSE_RES_NQUERY=0 -DSIZEOF_RES_STATE=0" - ;; - x1) { $as_echo "$as_me:${as_lineno-$LINENO}: result: res_nquery(3)" >&5 -$as_echo "res_nquery(3)" >&6; } - CPP_OPTIONS="-DUSE_RES_NQUERY=1 -DSIZEOF_RES_STATE=$SIZEOF_RES_STATE" - ;; - *) as_fn_error $? "no suitable DNS API detected" "$LINENO" 5 - ;; -esac - -cat >>confdefs.h <<_ACEOF -#define USE_RES_NQUERY $USE_RES_NQUERY -_ACEOF - - - - -ac_config_files="$ac_config_files resolv.buildinfo" - - -cat >confcache <<\_ACEOF -# This file is a shell script that caches the results of configure -# tests run on this system so they can be shared between configure -# scripts and configure runs, see configure's option --config-cache. -# It is not useful on other systems. If it contains results you don't -# want to keep, you may remove or edit it. -# -# config.status only pays attention to the cache file if you give it -# the --recheck option to rerun configure. -# -# `ac_cv_env_foo' variables (set or unset) will be overridden when -# loading this file, other *unset* `ac_cv_foo' will be assigned the -# following values. - -_ACEOF - -# The following way of writing the cache mishandles newlines in values, -# but we know of no workaround that is simple, portable, and efficient. -# So, we kill variables containing newlines. -# Ultrix sh set writes to stderr and can't be redirected directly, -# and sets the high bit in the cache file unless we assign to the vars. -( - for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - - (set) 2>&1 | - case $as_nl`(ac_space=' '; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - # `set' does not quote correctly, so add quotes: double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \. - sed -n \ - "s/'/'\\\\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" - ;; #( - *) - # `set' quotes correctly as required by POSIX, so do not add quotes. - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) | - sed ' - /^ac_cv_env_/b end - t clear - :clear - s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ - t end - s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ - :end' >>confcache -if diff "$cache_file" confcache >/dev/null 2>&1; then :; else - if test -w "$cache_file"; then - if test "x$cache_file" != "x/dev/null"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 -$as_echo "$as_me: updating cache $cache_file" >&6;} - if test ! -f "$cache_file" || test -h "$cache_file"; then - cat confcache >"$cache_file" - else - case $cache_file in #( - */* | ?:*) - mv -f confcache "$cache_file"$$ && - mv -f "$cache_file"$$ "$cache_file" ;; #( - *) - mv -f confcache "$cache_file" ;; - esac - fi - fi - else - { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 -$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} - fi -fi -rm -f confcache - -test "x$prefix" = xNONE && prefix=$ac_default_prefix -# Let make expand exec_prefix. -test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' - -DEFS=-DHAVE_CONFIG_H - -ac_libobjs= -ac_ltlibobjs= -U= -for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue - # 1. Remove the extension, and $U if already installed. - ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' - ac_i=`$as_echo "$ac_i" | sed "$ac_script"` - # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR - # will be set to the directory where LIBOBJS objects are built. - as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" - as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' -done -LIBOBJS=$ac_libobjs - -LTLIBOBJS=$ac_ltlibobjs - - - -: "${CONFIG_STATUS=./config.status}" -ac_write_fail=0 -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files $CONFIG_STATUS" -{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 -$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} -as_write_fail=0 -cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 -#! $SHELL -# Generated by $as_me. -# Run this file to recreate the current configuration. -# Compiler output produced by configure, useful for debugging -# configure, is in config.log if it exists. - -debug=false -ac_cs_recheck=false -ac_cs_silent=false - -SHELL=\${CONFIG_SHELL-$SHELL} -export SHELL -_ASEOF -cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -pR'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -pR' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -pR' - fi -else - as_ln_s='cp -pR' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - - -# as_fn_executable_p FILE -# ----------------------- -# Test if FILE is an executable regular file. -as_fn_executable_p () -{ - test -f "$1" && test -x "$1" -} # as_fn_executable_p -as_test_x='test -x' -as_executable_p=as_fn_executable_p - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -exec 6>&1 -## ----------------------------------- ## -## Main body of $CONFIG_STATUS script. ## -## ----------------------------------- ## -_ASEOF -test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# Save the log message, to keep $0 and so on meaningful, and to -# report actual input values of CONFIG_FILES etc. instead of their -# values after options handling. -ac_log=" -This file was extended by resolv $as_me 0.0, which was -generated by GNU Autoconf 2.69. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS - CONFIG_LINKS = $CONFIG_LINKS - CONFIG_COMMANDS = $CONFIG_COMMANDS - $ $0 $@ - -on `(hostname || uname -n) 2>/dev/null | sed 1q` -" - -_ACEOF - -case $ac_config_files in *" -"*) set x $ac_config_files; shift; ac_config_files=$*;; -esac - -case $ac_config_headers in *" -"*) set x $ac_config_headers; shift; ac_config_headers=$*;; -esac - - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# Files that config.status was made for. -config_files="$ac_config_files" -config_headers="$ac_config_headers" - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -ac_cs_usage="\ -\`$as_me' instantiates files and other configuration actions -from templates according to the current configuration. Unless the files -and actions are specified as TAGs, all are instantiated by default. - -Usage: $0 [OPTION]... [TAG]... - - -h, --help print this help, then exit - -V, --version print version number and configuration settings, then exit - --config print configuration, then exit - -q, --quiet, --silent - do not print progress messages - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - --header=FILE[:TEMPLATE] - instantiate the configuration header FILE - -Configuration files: -$config_files - -Configuration headers: -$config_headers - -Report bugs to ." - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" -ac_cs_version="\\ -resolv config.status 0.0 -configured by $0, generated by GNU Autoconf 2.69, - with options \\"\$ac_cs_config\\" - -Copyright (C) 2012 Free Software Foundation, Inc. -This config.status script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it." - -ac_pwd='$ac_pwd' -srcdir='$srcdir' -test -n "\$AWK" || AWK=awk -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# The default lists apply if the user does not specify any file. -ac_need_defaults=: -while test $# != 0 -do - case $1 in - --*=?*) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` - ac_shift=: - ;; - --*=) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg= - ac_shift=: - ;; - *) - ac_option=$1 - ac_optarg=$2 - ac_shift=shift - ;; - esac - - case $ac_option in - # Handling of the options. - -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) - ac_cs_recheck=: ;; - --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - $as_echo "$ac_cs_version"; exit ;; - --config | --confi | --conf | --con | --co | --c ) - $as_echo "$ac_cs_config"; exit ;; - --debug | --debu | --deb | --de | --d | -d ) - debug=: ;; - --file | --fil | --fi | --f ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - '') as_fn_error $? "missing file argument" ;; - esac - as_fn_append CONFIG_FILES " '$ac_optarg'" - ac_need_defaults=false;; - --header | --heade | --head | --hea ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - as_fn_append CONFIG_HEADERS " '$ac_optarg'" - ac_need_defaults=false;; - --he | --h) - # Conflict between --help and --header - as_fn_error $? "ambiguous option: \`$1' -Try \`$0 --help' for more information.";; - --help | --hel | -h ) - $as_echo "$ac_cs_usage"; exit ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil | --si | --s) - ac_cs_silent=: ;; - - # This is an error. - -*) as_fn_error $? "unrecognized option: \`$1' -Try \`$0 --help' for more information." ;; - - *) as_fn_append ac_config_targets " $1" - ac_need_defaults=false ;; - - esac - shift -done - -ac_configure_extra_args= - -if $ac_cs_silent; then - exec 6>/dev/null - ac_configure_extra_args="$ac_configure_extra_args --silent" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -if \$ac_cs_recheck; then - set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion - shift - \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 - CONFIG_SHELL='$SHELL' - export CONFIG_SHELL - exec "\$@" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -exec 5>>config.log -{ - echo - sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX -## Running $as_me. ## -_ASBOX - $as_echo "$ac_log" -} >&5 - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - -# Handling of arguments. -for ac_config_target in $ac_config_targets -do - case $ac_config_target in - "cbits/hs_resolv_config.h") CONFIG_HEADERS="$CONFIG_HEADERS cbits/hs_resolv_config.h" ;; - "resolv.buildinfo") CONFIG_FILES="$CONFIG_FILES resolv.buildinfo" ;; - - *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; - esac -done - - -# If the user did not use the arguments to specify the items to instantiate, -# then the envvar interface is used. Set only those that are not. -# We use the long form for the default assignment because of an extremely -# bizarre bug on SunOS 4.1.3. -if $ac_need_defaults; then - test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files - test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers -fi - -# Have a temporary directory for convenience. Make it in the build tree -# simply because there is no reason against having it here, and in addition, -# creating and moving files from /tmp can sometimes cause problems. -# Hook for its removal unless debugging. -# Note that there is a small window in which the directory will not be cleaned: -# after its creation but before its name has been assigned to `$tmp'. -$debug || -{ - tmp= ac_tmp= - trap 'exit_status=$? - : "${ac_tmp:=$tmp}" - { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status -' 0 - trap 'as_fn_exit 1' 1 2 13 15 -} -# Create a (secure) tmp directory for tmp files. - -{ - tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -d "$tmp" -} || -{ - tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 -ac_tmp=$tmp - -# Set up the scripts for CONFIG_FILES section. -# No need to generate them if there are no CONFIG_FILES. -# This happens for instance with `./config.status config.h'. -if test -n "$CONFIG_FILES"; then - - -ac_cr=`echo X | tr X '\015'` -# On cygwin, bash can eat \r inside `` if the user requested igncr. -# But we know of no other shell where ac_cr would be empty at this -# point, so we can use a bashism as a fallback. -if test "x$ac_cr" = x; then - eval ac_cr=\$\'\\r\' -fi -ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` -if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then - ac_cs_awk_cr='\\r' -else - ac_cs_awk_cr=$ac_cr -fi - -echo 'BEGIN {' >"$ac_tmp/subs1.awk" && -_ACEOF - - -{ - echo "cat >conf$$subs.awk <<_ACEOF" && - echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && - echo "_ACEOF" -} >conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 -ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` -ac_delim='%!_!# ' -for ac_last_try in false false false false false :; do - . ./conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - - ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` - if test $ac_delim_n = $ac_delim_num; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done -rm -f conf$$subs.sh - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && -_ACEOF -sed -n ' -h -s/^/S["/; s/!.*/"]=/ -p -g -s/^[^!]*!// -:repl -t repl -s/'"$ac_delim"'$// -t delim -:nl -h -s/\(.\{148\}\)..*/\1/ -t more1 -s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ -p -n -b repl -:more1 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t nl -:delim -h -s/\(.\{148\}\)..*/\1/ -t more2 -s/["\\]/\\&/g; s/^/"/; s/$/"/ -p -b -:more2 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t delim -' >$CONFIG_STATUS || ac_write_fail=1 -rm -f conf$$subs.awk -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACAWK -cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && - for (key in S) S_is_set[key] = 1 - FS = "" - -} -{ - line = $ 0 - nfields = split(line, field, "@") - substed = 0 - len = length(field[1]) - for (i = 2; i < nfields; i++) { - key = field[i] - keylen = length(key) - if (S_is_set[key]) { - value = S[key] - line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) - len += length(value) + length(field[++i]) - substed = 1 - } else - len += 1 + keylen - } - - print line -} - -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then - sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" -else - cat -fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ - || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 -_ACEOF - -# VPATH may cause trouble with some makes, so we remove sole $(srcdir), -# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and -# trailing colons and then remove the whole line if VPATH becomes empty -# (actually we leave an empty line to preserve line numbers). -if test "x$srcdir" = x.; then - ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ -h -s/// -s/^/:/ -s/[ ]*$/:/ -s/:\$(srcdir):/:/g -s/:\${srcdir}:/:/g -s/:@srcdir@:/:/g -s/^:*// -s/:*$// -x -s/\(=[ ]*\).*/\1/ -G -s/\n// -s/^[^=]*=[ ]*$// -}' -fi - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -fi # test -n "$CONFIG_FILES" - -# Set up the scripts for CONFIG_HEADERS section. -# No need to generate them if there are no CONFIG_HEADERS. -# This happens for instance with `./config.status Makefile'. -if test -n "$CONFIG_HEADERS"; then -cat >"$ac_tmp/defines.awk" <<\_ACAWK || -BEGIN { -_ACEOF - -# Transform confdefs.h into an awk script `defines.awk', embedded as -# here-document in config.status, that substitutes the proper values into -# config.h.in to produce config.h. - -# Create a delimiter string that does not exist in confdefs.h, to ease -# handling of long lines. -ac_delim='%!_!# ' -for ac_last_try in false false :; do - ac_tt=`sed -n "/$ac_delim/p" confdefs.h` - if test -z "$ac_tt"; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done - -# For the awk script, D is an array of macro values keyed by name, -# likewise P contains macro parameters if any. Preserve backslash -# newline sequences. - -ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* -sed -n ' -s/.\{148\}/&'"$ac_delim"'/g -t rset -:rset -s/^[ ]*#[ ]*define[ ][ ]*/ / -t def -d -:def -s/\\$// -t bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3"/p -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p -d -:bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3\\\\\\n"\\/p -t cont -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p -t cont -d -:cont -n -s/.\{148\}/&'"$ac_delim"'/g -t clear -:clear -s/\\$// -t bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/"/p -d -:bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p -b cont -' >$CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - for (key in D) D_is_set[key] = 1 - FS = "" -} -/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { - line = \$ 0 - split(line, arg, " ") - if (arg[1] == "#") { - defundef = arg[2] - mac1 = arg[3] - } else { - defundef = substr(arg[1], 2) - mac1 = arg[2] - } - split(mac1, mac2, "(") #) - macro = mac2[1] - prefix = substr(line, 1, index(line, defundef) - 1) - if (D_is_set[macro]) { - # Preserve the white space surrounding the "#". - print prefix "define", macro P[macro] D[macro] - next - } else { - # Replace #undef with comments. This is necessary, for example, - # in the case of _POSIX_SOURCE, which is predefined and required - # on some systems where configure will not decide to define it. - if (defundef == "undef") { - print "/*", prefix defundef, macro, "*/" - next - } - } -} -{ print } -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 -fi # test -n "$CONFIG_HEADERS" - - -eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS " -shift -for ac_tag -do - case $ac_tag in - :[FHLC]) ac_mode=$ac_tag; continue;; - esac - case $ac_mode$ac_tag in - :[FHL]*:*);; - :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; - :[FH]-) ac_tag=-:-;; - :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; - esac - ac_save_IFS=$IFS - IFS=: - set x $ac_tag - IFS=$ac_save_IFS - shift - ac_file=$1 - shift - - case $ac_mode in - :L) ac_source=$1;; - :[FH]) - ac_file_inputs= - for ac_f - do - case $ac_f in - -) ac_f="$ac_tmp/stdin";; - *) # Look for the file first in the build tree, then in the source tree - # (if the path is not absolute). The absolute path cannot be DOS-style, - # because $ac_f cannot contain `:'. - test -f "$ac_f" || - case $ac_f in - [\\/$]*) false;; - *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; - esac || - as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; - esac - case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac - as_fn_append ac_file_inputs " '$ac_f'" - done - - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ - configure_input='Generated from '` - $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' - `' by configure.' - if test x"$ac_file" != x-; then - configure_input="$ac_file. $configure_input" - { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 -$as_echo "$as_me: creating $ac_file" >&6;} - fi - # Neutralize special characters interpreted by sed in replacement strings. - case $configure_input in #( - *\&* | *\|* | *\\* ) - ac_sed_conf_input=`$as_echo "$configure_input" | - sed 's/[\\\\&|]/\\\\&/g'`;; #( - *) ac_sed_conf_input=$configure_input;; - esac - - case $ac_tag in - *:-:* | *:-) cat >"$ac_tmp/stdin" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; - esac - ;; - esac - - ac_dir=`$as_dirname -- "$ac_file" || -$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$ac_file" : 'X\(//\)[^/]' \| \ - X"$ac_file" : 'X\(//\)$' \| \ - X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir="$ac_dir"; as_fn_mkdir_p - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - - case $ac_mode in - :F) - # - # CONFIG_FILE - # - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# If the template does not know about datarootdir, expand it. -# FIXME: This hack should be removed a few years after 2.60. -ac_datarootdir_hack=; ac_datarootdir_seen= -ac_sed_dataroot=' -/datarootdir/ { - p - q -} -/@datadir@/p -/@docdir@/p -/@infodir@/p -/@localedir@/p -/@mandir@/p' -case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in -*datarootdir*) ac_datarootdir_seen=yes;; -*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - ac_datarootdir_hack=' - s&@datadir@&$datadir&g - s&@docdir@&$docdir&g - s&@infodir@&$infodir&g - s&@localedir@&$localedir&g - s&@mandir@&$mandir&g - s&\\\${datarootdir}&$datarootdir&g' ;; -esac -_ACEOF - -# Neutralize VPATH when `$srcdir' = `.'. -# Shell code in configure.ac might set extrasub. -# FIXME: do we really want to maintain this feature? -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_sed_extra="$ac_vpsub -$extrasub -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -:t -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s|@configure_input@|$ac_sed_conf_input|;t t -s&@top_builddir@&$ac_top_builddir_sub&;t t -s&@top_build_prefix@&$ac_top_build_prefix&;t t -s&@srcdir@&$ac_srcdir&;t t -s&@abs_srcdir@&$ac_abs_srcdir&;t t -s&@top_srcdir@&$ac_top_srcdir&;t t -s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t -s&@builddir@&$ac_builddir&;t t -s&@abs_builddir@&$ac_abs_builddir&;t t -s&@abs_top_builddir@&$ac_abs_top_builddir&;t t -$ac_datarootdir_hack -" -eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ - >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - -test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ - "$ac_tmp/out"`; test -z "$ac_out"; } && - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&5 -$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&2;} - - rm -f "$ac_tmp/stdin" - case $ac_file in - -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; - *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; - esac \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - ;; - :H) - # - # CONFIG_HEADER - # - if test x"$ac_file" != x-; then - { - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" - } >"$ac_tmp/config.h" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then - { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 -$as_echo "$as_me: $ac_file is unchanged" >&6;} - else - rm -f "$ac_file" - mv "$ac_tmp/config.h" "$ac_file" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - fi - else - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ - || as_fn_error $? "could not create -" "$LINENO" 5 - fi - ;; - - - esac - -done # for ac_tag - - -as_fn_exit 0 -_ACEOF -ac_clean_files=$ac_clean_files_save - -test $ac_write_fail = 0 || - as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 - - -# configure is writing to config.log, and then calls config.status. -# config.status does its own redirection, appending to config.log. -# Unfortunately, on DOS this fails, as config.log is still kept open -# by configure, so config.status won't be able to write to it; its -# output is simply discarded. So we exec the FD to /dev/null, -# effectively closing config.log, so it can be properly (re)opened and -# appended to by config.status. When coming back to configure, we -# need to make the FD available again. -if test "$no_create" != yes; then - ac_cs_success=: - ac_config_status_args= - test "$silent" = yes && - ac_config_status_args="$ac_config_status_args --quiet" - exec 5>/dev/null - $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false - exec 5>>config.log - # Use ||, not &&, to avoid exiting from the if with $? = 1, which - # would make configure fail if this is the last instruction. - $ac_cs_success || as_fn_exit 1 -fi -if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 -$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} -fi - - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/LICENSE cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/LICENSE --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/LICENSE 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/resolv.buildinfo.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/resolv.buildinfo.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/resolv.buildinfo.in 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/resolv.buildinfo.in 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -extra-libraries: @EXTRA_LIBS@ -cpp-options: @CPP_OPTIONS@ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/resolv.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/resolv.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/resolv.cabal 2018-10-17 15:59:19.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/resolv.cabal 1970-01-01 00:00:00.000000000 +0000 @@ -1,106 +0,0 @@ -cabal-version: 1.12 - -name: resolv -version: 0.1.1.1 -x-revision: 3 - -synopsis: Domain Name Service (DNS) lookup via the libresolv standard library routines -description: { - -This package implements an API for accessing -the [Domain Name Service (DNS)](https://tools.ietf.org/html/rfc1035) -resolver service via the standard @libresolv@ system library (whose -API is often available directly via the standard @libc@ C library) on -Unix systems. -. -This package also includes support for decoding message record types -as defined in the following RFCs: -. -- [RFC 1035](https://tools.ietf.org/html/rfc1035): Domain Names - Implementation And Specification -- [RFC 1183](https://tools.ietf.org/html/rfc1183): New DNS RR Definitions -- [RFC 2782](https://tools.ietf.org/html/rfc2782): A DNS RR for specifying the location of services (DNS SRV) -- [RFC 2915](https://tools.ietf.org/html/rfc2915): The Naming Authority Pointer (NAPTR) DNS Resource Record -- [RFC 3596](https://tools.ietf.org/html/rfc3596): DNS Extensions to Support IP Version 6 -- [RFC 4034](https://tools.ietf.org/html/rfc4034): Resource Records for the DNS Security Extensions -- [RFC 4255](https://tools.ietf.org/html/rfc4255): Using DNS to Securely Publish Secure Shell (SSH) Key Fingerprints -- [RFC 4408](https://tools.ietf.org/html/rfc4408): Sender Policy Framework (SPF) for Authorizing Use of Domains in E-Mail, Version 1 -- [RFC 5155](https://tools.ietf.org/html/rfc5155): DNS Security (DNSSEC) Hashed Authenticated Denial of Existence -- [RFC 6844](https://tools.ietf.org/html/rfc6844): DNS Certification Authority Authorization (CAA) Resource Record -- [RFC 6891](https://tools.ietf.org/html/rfc6891): Extension Mechanisms for DNS (EDNS(0)) -- [RFC 7553](https://tools.ietf.org/html/rfc7553): The Uniform Resource Identifier (URI) DNS Resource Record - -} - -license: GPL-3 -license-file: LICENSE -author: Herbert Valerio Riedel -maintainer: hvr@gnu.org -category: Network -build-type: Configure -bug-reports: https://github.com/hvr/resolv/issues -extra-source-files: ChangeLog.md - -extra-source-files: cbits/hs_resolv.h - cbits/hs_resolv_config.h.in - testdata/msg/*.bin - testdata/msg/*.show - resolv.buildinfo.in - configure - -extra-tmp-files: autom4te.cache - config.log - config.status - resolv.buildinfo - cbits/hs_resolv_config.h - -tested-with: GHC==8.2.1, GHC==8.0.2, GHC==7.10.3, GHC==7.10.1, GHC==7.8.4, GHC==7.6.3, GHC==7.4.2 - -source-repository head - type: git - location: https://github.com/hvr/resolv.git - -library - default-language: Haskell2010 - other-extensions: BangPatterns - CApiFFI - CPP - DeriveDataTypeable - DeriveFoldable - DeriveFunctor - DeriveTraversable - GeneralizedNewtypeDeriving - OverloadedStrings - RecordWildCards - Trustworthy - - hs-source-dirs: src - exposed-modules: Network.DNS - other-modules: Network.DNS.Message - Network.DNS.FFI - Compat - - build-depends: base >= 4.5 && <4.13 - , base16-bytestring == 0.1.* - , binary >= 0.7.3 && < 0.9 - , bytestring >= 0.9.2 && < 0.11 - , containers >= 0.4.2.1 && < 0.7 - - ghc-options: -Wall - include-dirs: cbits - -test-suite resolv. - default-language: Haskell2010 - hs-source-dirs: src-test - main-is: Tests1.hs - type: exitcode-stdio-1.0 - - -- dependencies whose version constraints are inherited via lib:resolv component - build-depends: resolv - , base - , bytestring - - -- additional dependencies not inherited - build-depends: tasty >= 0.11.2 && < 0.12 - , tasty-hunit >= 0.9.2 && < 0.10 - , directory >= 1.1.0.2 && < 1.4 - , filepath >= 1.3.0 && < 1.5 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/Setup.hs 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/Setup.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,2 +0,0 @@ -import Distribution.Simple -main = defaultMainWithHooks autoconfUserHooks diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src/Compat.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src/Compat.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src/Compat.hs 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src/Compat.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,61 +0,0 @@ -{-# LANGUAGE CPP #-} - --- | --- Copyright: © 2017 Herbert Valerio Riedel --- License: GPLv3 -module Compat - ( toStrict - , fromStrict - , guard - , replicateM - , unless - , when - , A.Applicative(..) - , (<$>) - , Mon.Monoid(..) - , Foldable - , F.forM_ - , toList - , traverse - , T.Traversable - , module Data.Word - , module Data.Int - , module Data.Maybe - , putInt32be - , getInt32be - ) where - ---import qualified Data.ByteString.Lazy as BSL - -import Control.Applicative as A -import Control.Monad as M -import Data.Binary.Get -import Data.Binary.Put -#if MIN_VERSION_bytestring(0,10,0) -import Data.ByteString.Lazy (fromStrict, toStrict) -#else -import qualified Data.ByteString as BS -import qualified Data.ByteString.Lazy as BS.L -#endif -import Data.Foldable as F -import Data.Int -import Data.Maybe -import Data.Monoid as Mon (Monoid (..)) -import Data.Traversable as T -import Data.Word - -#if !(MIN_VERSION_bytestring(0,10,0)) -fromStrict :: BS.ByteString -> BS.L.ByteString -fromStrict = BS.L.fromChunks . (:[]) - -toStrict :: BS.L.ByteString -> BS.ByteString -toStrict = mconcat . BS.L.toChunks -#endif - -#if !MIN_VERSION_binary(0,8,1) -putInt32be :: Int32 -> Put -putInt32be x = putWord32be (fromIntegral x) - -getInt32be :: Get Int32 -getInt32be = fromIntegral <$> getWord32be -#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src/Network/DNS/FFI.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src/Network/DNS/FFI.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src/Network/DNS/FFI.hs 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src/Network/DNS/FFI.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -{-# LANGUAGE CApiFFI #-} -{-# LANGUAGE CPP #-} - -module Network.DNS.FFI where - -import Control.Concurrent.MVar -import Foreign.C -import Foreign.Marshal.Alloc -import Foreign.Ptr -import System.IO.Unsafe (unsafePerformIO) - -#if !defined(USE_RES_NQUERY) -# error USE_RES_NQUERY not defined -#endif - -{-# INLINE resIsReentrant #-} --- | Whether the reentrant DNS resolver C API (e.g. @res_nquery(3)@, @res_nsend(3)@) is being used. --- --- If this this 'False', then as a fall-back --- @res_query(3)@/@res_send(3)@ are used, protected by a global mutex. --- --- @since 0.1.1.0 -resIsReentrant :: Bool -#if USE_RES_NQUERY -resIsReentrant = True -#else -resIsReentrant = False -#endif - -#if !defined(SIZEOF_RES_STATE) -# error SIZEOF_RES_STATE not defined -#endif - -#if USE_RES_NQUERY && (SIZEOF_RES_STATE <= 0) -# error broken invariant -#endif - -{-# INLINE sizeOfResState #-} -sizeOfResState :: CSize -sizeOfResState = SIZEOF_RES_STATE - -data CResState - -{-# NOINLINE resolvLock #-} -resolvLock :: MVar () -resolvLock = unsafePerformIO $ newMVar () - -withCResState :: (Ptr CResState -> IO a) -> IO a -withCResState act - | resIsReentrant = allocaBytes (fromIntegral sizeOfResState) $ \ptr -> do - _ <- c_memset ptr 0 sizeOfResState - act ptr - | otherwise = withMVar resolvLock $ \() -> act nullPtr - - --- void *memset(void *s, int c, size_t n); -foreign import capi unsafe "string.h memset" c_memset :: Ptr a -> CInt -> CSize -> IO (Ptr a) - --- int res_query(void *, const char *dname, int class, int type, unsigned char *answer, int anslen); -foreign import capi safe "hs_resolv.h hs_res_query" c_res_query :: Ptr CResState -> CString -> CInt -> CInt -> Ptr CChar -> CInt -> IO CInt - --- int res_send(void *, const unsigned char *msg, int msglen, unsigned char *answer, int anslen); -foreign import capi safe "hs_resolv.h hs_res_send" c_res_send :: Ptr CResState -> Ptr CChar -> CInt -> Ptr CChar -> CInt -> IO CInt - --- int res_opt_set_use_dnssec(void *); -foreign import capi safe "hs_resolv.h res_opt_set_use_dnssec" c_res_opt_set_use_dnssec :: Ptr CResState -> IO CInt - --- int hs_res_mkquery(void *, const char *dname, int class, int type, unsigned char *req, int reqlen0); -foreign import capi safe "hs_resolv.h hs_res_mkquery" c_res_mkquery :: Ptr CResState -> CString -> CInt -> CInt -> Ptr CChar -> CInt -> IO CInt - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src/Network/DNS/Message.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src/Network/DNS/Message.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src/Network/DNS/Message.hs 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src/Network/DNS/Message.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,1069 +0,0 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE CApiFFI #-} -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveFunctor #-} -{-# LANGUAGE DeriveTraversable #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} - --- | --- Copyright: © 2017 Herbert Valerio Riedel --- License: GPLv3 --- --- Internal module -module Network.DNS.Message where - -import qualified Data.ByteString.Base16 as B16 - -import qualified Data.ByteString as BS -import qualified Data.ByteString.Lazy as BSL -import Data.Function -import Data.List (groupBy) -import Data.String -import Numeric (showHex) -import Prelude - -import Data.Binary -import Data.Binary.Get -import Data.Binary.Put -import Data.Bits -import Data.Map (Map) -import qualified Data.Map as Map -import Data.Set (Set) -import qualified Data.Set as Set - -import Compat - --- | An IPv6 address --- --- The IP address is represented in network order, --- i.e. @2606:2800:220:1:248:1893:25c8:1946@ is --- represented as @(IPv6 0x2606280002200001 0x248189325c81946)@. -data IPv6 = IPv6 !Word64 !Word64 - deriving (Eq,Ord,Read) - -instance Show IPv6 where - showsPrec p (IPv6 hi lo) = showParen (p >= 11) (showString "IPv6 0x" . showHex hi . showString " 0x" . showHex lo) - -instance Binary IPv6 where - put (IPv6 hi lo) = putWord64be hi >> putWord64be lo - get = IPv6 <$> getWord64be <*> getWord64be - --- | An IPv4 address --- --- The IP address is represented in network order, i.e. @127.0.0.1@ is --- represented as @(IPv4 0x7f000001)@. -data IPv4 = IPv4 !Word32 - deriving (Eq,Ord,Read) - -instance Show IPv4 where - showsPrec p (IPv4 n) = showParen (p >= 11) (showString "IPv4 0x" . showHex n) - -instance Binary IPv4 where - put (IPv4 w) = putWord32be w - get = IPv4 <$> getWord32be - --- | @\@ as per [RFC 1035, section 3.3](https://tools.ietf.org/html/rfc1035#section-3.3). --- --- A domain-name represented as a series of labels separated by dots. --- --- See also 'Labels' for list-based representation. --- --- __NOTE__: The 'Labels' type is able to properly represent domain --- names whose components contain dots which the 'Name' representation --- cannot. -newtype Name = Name BS.ByteString - deriving (Read,Show,Eq,Ord) - --- | @\@ as per [RFC 1035, section 3.3](https://tools.ietf.org/html/rfc1035#section-3.3). --- --- A sequence of up to 255 octets --- --- The limit of 255 octets is caused by the encoding which uses by a --- prefixed octet denoting the length. -newtype CharStr = CharStr BS.ByteString - deriving (Eq,Ord,IsString) - -instance Show CharStr where - showsPrec p (CharStr bs) = showsPrec p bs - -instance Read CharStr where - readsPrec p = map (\(x,y) -> (CharStr x,y)) <$> readsPrec p - -instance Binary CharStr where - put (CharStr bs) - | BS.length bs > 0xff = fail "putString: string too long" - | otherwise = do - putWord8 (fromIntegral $ BS.length bs) - putByteString bs - get = do - len' <- getWord8 - CharStr <$> getByteString (fromIntegral len') - -{- Resource records - - -- https://en.wikipedia.org/wiki/List_of_DNS_record_types - - RFC 1035 - - A 1 a host address - NS 2 an authoritative name server - CNAME 5 the canonical name for an alias - SOA 6 marks the start of a zone of authority - PTR 12 a domain name pointer - MX 15 mail exchange - TXT 16 text strings - - RFC 3596 - - AAAA 28 IPv6 - - RFC 2782 - - SRV 33 Location of services - - ---- - - RFC3597 Handling of Unknown DNS Resource Record (RR) Types - --} - --- | Represents a DNS message as per [RFC 1035](https://tools.ietf.org/html/rfc1035) -data Msg l - = Msg - { msgHeader :: !MsgHeader - , msgQD :: [MsgQuestion l] - , msgAN, msgNS, msgAR :: [MsgRR l] - } deriving (Read,Show,Functor,Foldable,Traversable) - --- | DNS message header section as per [RFC 1035, section 4.1.1](https://tools.ietf.org/html/rfc1035#section-4.1.1) -data MsgHeader - = MsgHeader - { mhId :: !Word16 - - , mhFlags :: !MsgHeaderFlags - - , mhQDCount :: !Word16 - , mhANCount :: !Word16 - , mhNSCount :: !Word16 - , mhARCount :: !Word16 - } deriving (Read,Show) - --- | DNS message header section as per [RFC 1035, section 4.1.2](https://tools.ietf.org/html/rfc1035#section-4.1.2) -data MsgQuestion l - = MsgQuestion !l !Type !Class - deriving (Eq,Read,Show,Functor,Foldable,Traversable) - --- | DNS message header flags as per [RFC 1035, section 4.1.1](https://tools.ietf.org/html/rfc1035#section-4.1.1) -data MsgHeaderFlags - = MsgHeaderFlags - { mhQR :: !QR - , mhOpcode :: !Word8 -- actually Word4 - , mhAA :: !Bool - , mhTC :: !Bool - , mhRD :: !Bool - , mhRA :: !Bool - , mhZ :: !Bool -- reserved/unused bit - , mhAD :: !Bool -- RFC4035 - , mhCD :: !Bool -- RFC4035 - , mhRCode :: !Word8 -- Word4 - } deriving (Read,Show) - --- | DNS resource record section as per [RFC 1035, section 4.1.3](https://tools.ietf.org/html/rfc1035#section-4.1.3) -data MsgRR l - = MsgRR - { rrName :: !l - , rrClass :: !Class - , rrTTL :: !TTL - , rrData :: !(RData l) - } deriving (Eq,Read,Show,Functor,Foldable,Traversable) - --- | DNS resource record data (see also 'MsgRR' and 'TypeSym') -data RData l - = RDataA !IPv4 - | RDataAAAA !IPv6 - | RDataCNAME !l - | RDataPTR !l - | RDataHINFO !CharStr !CharStr - | RDataNS !l - | RDataMX !Word16 !l - | RDataTXT ![CharStr] - | RDataSPF ![CharStr] - | RDataSOA !l !l !Word32 !Word32 !Word32 !Word32 !Word32 - | RDataSRV !(SRV l) - - -- RFC 1183 - | RDataAFSDB !Word16 !l - - -- RFC 2915 - | RDataNAPTR !Word16 !Word16 !CharStr !CharStr !CharStr !l - - -- RFC 7553 - | RDataURI !Word16 !Word16 !BS.ByteString - - -- RFC 4034 - | RDataRRSIG !Word16 !Word8 !Word8 !Word32 !Word32 !Word32 !Word16 !l !BS.ByteString - | RDataDNSKEY !Word16 !Word8 !Word8 !BS.ByteString - | RDataDS !Word16 !Word8 !Word8 !BS.ByteString - | RDataNSEC !l !(Set Type) - - -- RFC 4255 - | RDataSSHFP !Word8 !Word8 !BS.ByteString - - -- RFC 5155 - | RDataNSEC3PARAM !Word8 !Word8 !Word16 !CharStr - | RDataNSEC3 !Word8 !Word8 !Word16 !CharStr !CharStr !(Set Type) - - -- RFC 6844 - | RDataCAA !Word8 !CharStr !BS.ByteString - - -- pseudo-record - | RDataOPT !BS.ByteString -- FIXME - - -- unknown/unsupported - | RData !Type !BS.ByteString -- ^ Unknown/undecoded resource record type - deriving (Eq,Read,Show,Functor,Foldable,Traversable) - - --- | @SRV@ Record data as per [RFC 2782](https://tools.ietf.org/html/rfc2782) -data SRV l = SRV { srvPriority :: !Word16 - , srvWeight :: !Word16 - , srvPort :: !Word16 - , srvTarget :: !l - } deriving (Eq,Read,Show,Functor,Foldable,Traversable) - ----------------------------------------------------------------------------- - -decodeMessage' :: BS.ByteString -> Maybe (Msg Labels) -decodeMessage' bs = do - (rest, _, v) <- either handleParseFail Just $ - decodeOrFail (fromStrict bs) - - -- don't allow trailing garbage - guard (BSL.null rest) - - let ofss = Set.fromList $ mapMaybe labelsPtr (toList v) - ofsmap <- retrieveLabelPtrs bs ofss - - traverse (resolveLabelPtr ofsmap) v - where - -- handleParseFail _ = Nothing - handleParseFail (rest, n, e) = error $ show (e, n, BSL.length rest, BS.length bs) ++ "\n" ++ show (B16.encode $ toStrict rest) - --- | Decode a raw DNS message (query or response) --- --- Returns 'Nothing' on decoding failures. -decodeMessage :: IsLabels n => BS.ByteString -> Maybe (Msg n) -decodeMessage = fmap (fmap fromLabels) . decodeMessage' - -encodeMessage' :: Msg Labels -> BS.ByteString -encodeMessage' m = toStrict $ encode (fmap labels2labelsPtr m) - --- | Construct a raw DNS message (query or response) --- --- May return 'Nothing' in input parameters are detected to be invalid. -encodeMessage :: IsLabels n => Msg n -> Maybe BS.ByteString -encodeMessage m = encodeMessage' <$> traverse toLabels m - - -instance Binary l => Binary (Msg l) where - get = do - hdr@MsgHeader{..} <- get - - Msg hdr <$> replicateM (fromIntegral mhQDCount) get - <*> replicateM (fromIntegral mhANCount) get - <*> replicateM (fromIntegral mhNSCount) get - <*> replicateM (fromIntegral mhARCount) get - - put (Msg hdr qds ans nss ars) = do - put hdr - mapM_ put qds - mapM_ put ans - mapM_ put nss - mapM_ put ars - -instance Binary MsgHeader where - get = MsgHeader <$> getWord16be - <*> get - <*> getWord16be - <*> getWord16be - <*> getWord16be - <*> getWord16be - - put (MsgHeader{..}) = do - putWord16be mhId - put mhFlags - putWord16be mhQDCount - putWord16be mhANCount - putWord16be mhNSCount - putWord16be mhARCount - -instance Binary MsgHeaderFlags where - put = putWord16be . encodeFlags - get = decodeFlags <$> getWord16be - --- | Decode message header flag field --- --- > +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ --- > |QR| Opcode |AA|TC|RD|RA|??|AD|CD| RCODE | --- > +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ --- -decodeFlags :: Word16 -> MsgHeaderFlags -decodeFlags w = MsgHeaderFlags{..} - where - mhQR = if testBit w 15 then IsResponse else IsQuery - mhOpcode = shiftR' 11 .&. 0xf - mhAA = testBit w 10 - mhTC = testBit w 9 - mhRD = testBit w 8 - mhRA = testBit w 7 - mhZ = testBit w 6 - mhAD = testBit w 5 - mhCD = testBit w 4 - mhRCode = fromIntegral w .&. 0xf - - shiftR' = fromIntegral . shiftR w - -encodeFlags :: MsgHeaderFlags -> Word16 -encodeFlags MsgHeaderFlags{..} = - (case mhQR of - IsResponse -> bit 15 - IsQuery -> 0) .|. - (fromIntegral mhOpcode `shiftL` 11) .|. - (if mhAA then bit 10 else 0) .|. - (if mhTC then bit 9 else 0) .|. - (if mhRD then bit 8 else 0) .|. - (if mhRA then bit 7 else 0) .|. - (if mhZ then bit 6 else 0) .|. - (if mhAD then bit 5 else 0) .|. - (if mhCD then bit 4 else 0) .|. - (fromIntegral mhRCode) - --- | Encodes whether message is a query or a response --- --- @since 0.1.1.0 -data QR = IsQuery | IsResponse - deriving (Eq,Read,Show) - ----------------------------------------------------------------------------- - -infixr 5 :.: - --- | A DNS Label --- --- Must be non-empty and at most 63 octets. -type Label = BS.ByteString - --- | A @@ as per [RFC 1035, section 3.3](https://tools.ietf.org/html/rfc1035#section-3.3) expressed as list of 'Label's. --- --- See also 'Name' -data Labels = !Label :.: !Labels | Root - deriving (Read,Show,Eq,Ord) - -labelsToList :: Labels -> [Label] -labelsToList (x :.: xs) = x : labelsToList xs -labelsToList Root = [""] - --- | Types that represent @@ as per [RFC 1035, section 3.3](https://tools.ietf.org/html/rfc1035#section-3.3) and can be converted to and from 'Labels'. -class IsLabels s where - toLabels :: s -> Maybe Labels - fromLabels :: Labels -> s - -instance IsLabels Labels where - fromLabels = id - - toLabels ls - | all isLabelValid (init (labelsToList ls)) = Just ls - | otherwise = Nothing - where - isLabelValid l = not (BS.null l) && BS.length l < 0x40 - -instance IsLabels Name where - fromLabels = labels2name - toLabels = name2labels - -toName :: IsLabels n => n -> Maybe Name -toName = fmap fromLabels . toLabels - -name2labels :: Name -> Maybe Labels -name2labels (Name n) - | all (\l -> not (BS.null l) && BS.length l < 0x40) n' = Just $! foldr (:.:) Root n' - | otherwise = Nothing - where - n' | BS.isSuffixOf "." n = BS.split 0x2e (BS.init n) - | otherwise = BS.split 0x2e n - -labels2name :: Labels -> Name -labels2name Root = Name "." -labels2name ls = Name (BS.intercalate "." $ labelsToList ls) - --- | IOW, a domain-name --- --- May contain pointers --- --- Can be resolved into a 'Labels' without label ptrs. -data LabelsPtr = Label !Label !LabelsPtr -- ^ See RC2181: a label must be between 1-63 octets; can be arbitrary binary data - | LPtr !Word16 - | LNul - deriving (Eq,Read,Show) - -labels2labelsPtr :: Labels -> LabelsPtr -labels2labelsPtr Root = LNul -labels2labelsPtr (l :.: rest) = Label l (labels2labelsPtr rest) - -instance Binary LabelsPtr where - get = go [] - where - go acc = do - l0 <- getLabel - case l0 of - Right bs | BS.null bs -> pure (foldr Label LNul $ reverse acc) - | otherwise -> go (bs:acc) - Left ofs -> pure (foldr Label (LPtr ofs) $ reverse acc) - - getLabel :: Get (Either Word16 BS.ByteString) - getLabel = do - len <- getWord8 - - if len >= 0x40 - then do - when (len .&. 0xc0 /= 0xc0) $ fail ("invalid length octet " ++ show len) - ofs <- fromIntegral <$> getWord8 - pure $ Left $ (fromIntegral (len .&. 0x3f) `shiftL` 8) .|. ofs - else Right <$> getByteString (fromIntegral len) - - put LNul = putWord8 0 - put (Label l next) - | BS.length l < 1 || BS.length l >= 0x40 = error "put (Label {}): invalid label size" - | otherwise = do - putWord8 (fromIntegral (BS.length l)) - putByteString l - put next - put (LPtr ofs) - | ofs < 0x4000 = putWord16be (0xc000 .|. ofs) - | otherwise = error "put (LPtr {}): invalid offset" - --- | Compute serialised size of 'LabelsPtr' -labelsSize :: LabelsPtr -> Word16 -labelsSize = fromIntegral . go 0 - where - go n (LPtr _) = n+2 - go n LNul = n+1 - go n (Label bs rest) = go (n + 1 + BS.length bs) rest - --- | Extract pointer-offset from 'LabelsPtr' (if it exists) -labelsPtr :: LabelsPtr -> Maybe Word16 -labelsPtr (Label _ ls) = labelsPtr ls -labelsPtr LNul = Nothing -labelsPtr (LPtr ofs) = Just ofs - ----------------------------------------------------------------------------- - -instance Binary l => Binary (MsgQuestion l) where - get = MsgQuestion <$> get <*> get <*> get - put (MsgQuestion l qt cls) = put l >> put qt >> put cls - - -instance Binary l => Binary (MsgRR l) where - get = do - rrName <- get - rrType <- get - rrClass <- get - rrTTL <- get - rrData <- getRData rrType - pure (MsgRR {..}) - - put (MsgRR{..}) = do - put rrName - put (either id typeFromSym $ rdType rrData) - put rrClass - put rrTTL - putRData rrData - -getRData :: Binary l => Type -> Get (RData l) -getRData qt = do - len <- fromIntegral <$> getWord16be - - let unknownRdata = RData qt <$> getByteString len - - getByteStringRest = consumeRestWith getByteString - - consumeRestWith act = do - curofs <- fromIntegral <$> bytesRead - act (len - curofs) - - isolate len $ - case typeToSym qt of - Nothing -> unknownRdata - Just ts -> case ts of - TypeA -> RDataA <$> get - - TypeAFSDB -> RDataAFSDB <$> getWord16be - <*> get - - TypeNS -> RDataNS <$> get - - TypeCNAME -> RDataCNAME <$> get - - TypeSOA -> RDataSOA <$> get - <*> get - <*> getWord32be - <*> getWord32be - <*> getWord32be - <*> getWord32be - <*> getWord32be - - TypePTR -> RDataPTR <$> get - - TypeHINFO -> RDataHINFO <$> get - <*> get - - TypeMX -> RDataMX <$> getWord16be - <*> get - - TypeTXT -> RDataTXT <$> getUntilEmpty - TypeSPF -> RDataSPF <$> getUntilEmpty - - TypeAAAA -> RDataAAAA <$> get - - TypeSRV -> RDataSRV <$> get - - TypeNAPTR -> RDataNAPTR <$> getWord16be -- order - <*> getWord16be --preference - <*> get -- flags - <*> get -- services - <*> get -- regexp - <*> get -- replacement - - TypeRRSIG -> RDataRRSIG <$> getWord16be - <*> getWord8 - <*> getWord8 - <*> getWord32be - <*> getWord32be - <*> getWord32be - <*> getWord16be - <*> get -- uncompressed - <*> getByteStringRest - - TypeDNSKEY -> RDataDNSKEY <$> getWord16be - <*> getWord8 - <*> getWord8 - <*> getByteString (len - 4) - - TypeDS -> RDataDS <$> getWord16be - <*> getWord8 - <*> getWord8 - <*> getByteString (len - 4) - - TypeNSEC -> RDataNSEC <$> get - <*> decodeNsecTypeMap - - TypeURI -> RDataURI <$> getWord16be -- prio - <*> getWord16be -- weight - <*> getByteString (len - 4) - - TypeSSHFP -> RDataSSHFP <$> getWord8 - <*> getWord8 - <*> getByteString (len - 2) - - TypeNSEC3PARAM -> RDataNSEC3PARAM <$> getWord8 - <*> getWord8 - <*> getWord16be - <*> get -- salt - - TypeNSEC3 -> RDataNSEC3 <$> getWord8 - <*> getWord8 - <*> getWord16be - <*> get -- salt - <*> get -- next hashed owner name - <*> decodeNsecTypeMap - - TypeCAA -> RDataCAA <$> getWord8 -- flags - <*> get -- tag -- TODO: must be non-empty - <*> getByteStringRest - - TypeOPT -> RDataOPT <$> getByteString len -- FIXME - - TypeANY -> unknownRdata -- shouldn't happen - -putRData :: Binary l => RData l -> Put -putRData rd = do - let rdata = runPut (putRData' rd) - rdataLen = BSL.length rdata - - unless (rdataLen < 0x10000) $ - fail "rdata too large" - - putWord16be (fromIntegral rdataLen) - putLazyByteString rdata - -putRData' :: Binary l => RData l -> Put -putRData' rd = case rd of - RDataA ip4 -> put ip4 - RDataAAAA ip6 -> put ip6 - RDataCNAME cname -> put cname - RDataOPT d -> putByteString d - RDataMX prio l -> putWord16be prio >> put l - RDataSOA l1 l2 w1 w2 w3 w4 w5 -> do - put l1 - put l2 - putWord32be w1 - putWord32be w2 - putWord32be w3 - putWord32be w4 - putWord32be w5 - - RDataPTR l -> put l - RDataNS l -> put l - RDataTXT ss -> mapM_ put ss - RDataSPF ss -> mapM_ put ss - RDataSRV srv -> put srv - - RDataAFSDB w l -> putWord16be w >> put l - - RDataHINFO s1 s2 -> put s1 >> put s2 - - RDataRRSIG w1 w2 w3 w4 w5 w6 w7 l s -> do - putWord16be w1 - putWord8 w2 - putWord8 w3 - putWord32be w4 - putWord32be w5 - putWord32be w6 - putWord16be w7 - put l - putByteString s - - RDataDNSKEY w1 w2 w3 s -> do - putWord16be w1 - putWord8 w2 - putWord8 w3 - putByteString s - - RDataNSEC3PARAM w1 w2 w3 s -> do - putWord8 w1 - putWord8 w2 - putWord16be w3 - put s - - RDataNSEC3 w1 w2 w3 s1 s2 tm -> do - putWord8 w1 - putWord8 w2 - putWord16be w3 - put s1 - put s2 - encodeNsecTypeMap tm - - RDataCAA fl s1 s2 -> do - putWord8 fl - put s1 - putByteString s2 - - RDataURI w1 w2 s -> do - putWord16be w1 - putWord16be w2 - putByteString s - - RDataDS w1 w2 w3 s -> do - putWord16be w1 - putWord8 w2 - putWord8 w3 - putByteString s - - RDataNSEC l tm -> do - put l - encodeNsecTypeMap tm - - RDataNAPTR w1 w2 s1 s2 s3 l -> do - putWord16be w1 - putWord16be w2 - put s1 - put s2 - put s3 - put l - - RDataSSHFP w1 w2 s -> do - putWord8 w1 - putWord8 w2 - putByteString s - - RData _ raw -> putByteString raw - - -- _ -> error ("putRData: " ++ show rd) - - -instance Binary l => Binary (SRV l) where - get = SRV <$> getWord16be - <*> getWord16be - <*> getWord16be - <*> get - - put (SRV w1 w2 w3 l) = do - putWord16be w1 - putWord16be w2 - putWord16be w3 - put l - -{- NSEC type-bitmap example: - - A NS SOA TXT AAAA RRSIG NSEC DNSKEY - -'00 07 62 00 80 08 00 03 80' -'00000000 00000111 01100010 00000000 10000000 00001000 00000000 00000011 10000000' - Win=#0 len=7 ^{SOA} ^{TXT} ^{AAAA} ^{DNSKEY} - ^^{A,NS} ^^{RRSIG,NSEC} --} - -decodeNsecTypeMap :: Get (Set Type) -decodeNsecTypeMap = do - r <- concat <$> untilEmptyWith decode1 - -- TODO: enforce uniqueness - pure (Set.fromList r) - where - -- decode single window - decode1 = do - wi <- getWord8 - l <- getWord8 - unless (0 < l && l <= 32) $ - fail "invalid bitmap length" - - bmap <- getByteString (fromIntegral l) - - let winofs = (fromIntegral wi)*0x100 :: Int - lst = [ Type (fromIntegral (winofs+j*8+7-i)) - | (j,x) <- zip [0..] (BS.unpack bmap) - , i <- [7,6..0] - , testBit x i ] - - pure lst - -encodeNsecTypeMap :: Set Type -> Put -encodeNsecTypeMap bmap = do - when (Set.null bmap) $ fail "invalid empty type-map" - -- when (Set.member 0 bmap) $ fail "invalid TYPE0 set in type-map" - -- TODO: verify that Meta-TYPES and QTYPEs aren't contained in bmap - - forM_ (Map.toList bmap') $ \(wi, tm) -> do - putWord8 wi - put (CharStr $ BS.pack tm) - where - bmap' = fmap set2bitmap . splitToBlocks $ Set.map (\(Type w)->w) bmap - -set2bitmap :: Set Word8 -> [Word8] -set2bitmap = go 0 0 . Set.toList - where - go _ acc [] = if acc == 0 then [] else [acc] - go j acc (i:is) - | j' > j = acc : go (j+1) 0 (i:is) - | j' == j = go j' (acc .|. bit (7 - fromIntegral i')) is - | otherwise = error "set2bitmap: the impossible happened" - where - (j',i') = i `quotRem` 8 - -splitToBlocks :: Set Word16 -> Map Word8 (Set Word8) -splitToBlocks js = Map.fromList $ map (\xs -> (fst $ head xs, Set.fromList (map snd xs))) js' - where - hi16 :: Word16 -> Word8 - hi16 = fromIntegral . flip shiftR 8 - - lo16 :: Word16 -> Word8 - lo16 = fromIntegral . (.&. 0xff) - - js' :: [[(Word8,Word8)]] - js' = groupBy ((==) `on` fst) (map ((,) <$> hi16 <*> lo16) (Set.toList js)) - - --- | Resolves/parses label pointer used for label compressing --- --- Returns 'Nothing' on failure -retrieveLabelPtr :: BS.ByteString -> Word16 -> Maybe LabelsPtr -retrieveLabelPtr msg ofs - = case decodeOrFail (fromStrict $ BS.drop (fromIntegral ofs) msg) of - Left _ -> Nothing - Right (_, _, v) -> Just v - --- | Resolve set of label pointer offsets --- --- Invariants (/iff/ result is not 'Nothing') --- --- * all requested offsets will be contained in the result map --- --- * any offsets contained in the resolved 'Labels' will be part of --- the result map as well --- --- NB: No cycle detection is performed, nor are 'Labels' flattened -retrieveLabelPtrs :: BS.ByteString -> Set Word16 -> Maybe (Map Word16 LabelsPtr) -retrieveLabelPtrs msg ofss0 = go =<< lupPtrs1 ofss0 - where - go :: Map Word16 LabelsPtr -> Maybe (Map Word16 LabelsPtr) - go m0 = do - let missingOfss = Set.fromList (mapMaybe labelsPtr (toList m0)) Set.\\ Map.keysSet m0 - - if Set.null missingOfss - then pure m0 -- fix-point reached - else do - m1 <- lupPtrs1 missingOfss - go (Map.union m0 m1) - - -- single lookup step - lupPtrs1 :: Set Word16 -> Maybe (Map Word16 LabelsPtr) - lupPtrs1 ofss1 = Map.fromList . zip (toList ofss1) <$> traverse (retrieveLabelPtr msg) (toList ofss1) - --- | Checks for maximum name length (255) and (therefore indirectly) cycle-checking -resolveLabelPtr :: Map Word16 LabelsPtr -> LabelsPtr -> Maybe Labels -resolveLabelPtr ofsmap = go 0 [] - where - go :: Int -> [BS.ByteString] -> LabelsPtr -> Maybe Labels - go !n acc (Label x ls) = go (n+1+BS.length x) (x:acc) ls - go n acc LNul - | n < 255 = Just $! foldr (:.:) Root (reverse acc) - | otherwise = Nothing -- length violation - go n acc (LPtr ofs) - | n < 255 = go n acc =<< lup ofs - | otherwise = Nothing - - lup :: Word16 -> Maybe LabelsPtr - lup ofs = Map.lookup ofs ofsmap - - -{- Resource records - - -- https://en.wikipedia.org/wiki/List_of_DNS_record_types - - RFC 1035 - - A 1 a host address - NS 2 an authoritative name server - CNAME 5 the canonical name for an alias - SOA 6 marks the start of a zone of authority - PTR 12 a domain name pointer - MX 15 mail exchange - TXT 16 text strings - - RFC 3596 - - AAAA 28 IPv6 - - RFC 2782 - - SRV 33 Location of services - - ---- - - RFC3597 Handling of Unknown DNS Resource Record (RR) Types - --} - --- | Raw DNS record type code --- --- See also 'TypeSym' -newtype Type = Type Word16 - deriving (Eq,Ord,Read,Show) - -instance Binary Type where - put (Type w) = putWord16be w - get = Type <$> getWord16be - --- | DNS @CLASS@ code as per [RFC 1035, section 3.2.4](https://tools.ietf.org/html/rfc1035#section-3.2.4) --- --- The most commonly used value is 'classIN'. -newtype Class = Class Word16 - deriving (Eq,Ord,Read,Show) - --- | The 'Class' constant for @IN@ (Internet) -classIN :: Class -classIN = Class 1 - -instance Binary Class where - put (Class w) = putWord16be w - get = Class <$> getWord16be - --- | Cache time-to-live expressed in seconds -newtype TTL = TTL Int32 - deriving (Eq,Ord,Read,Show) - -instance Binary TTL where - put (TTL i) = putInt32be i - get = TTL <$> getInt32be - --- http://www.bind9.net/dns-parameters - --- | Symbolic DNS record type -data TypeSym - = TypeA -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) - | TypeAAAA -- ^ [RFC 3596](https://tools.ietf.org/html/rfc3596) - | TypeAFSDB -- ^ [RFC 1183](https://tools.ietf.org/html/rfc1183) - | TypeANY -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) (query) - | TypeCAA -- ^ [RFC 6844](https://tools.ietf.org/html/rfc6844) - | TypeCNAME -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) - | TypeDNSKEY -- ^ [RFC 4034](https://tools.ietf.org/html/rfc4034) - | TypeDS -- ^ [RFC 4034](https://tools.ietf.org/html/rfc4034) - | TypeHINFO -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) - | TypeMX -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) - | TypeNAPTR -- ^ [RFC 2915](https://tools.ietf.org/html/rfc2915) - | TypeNS -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) - | TypeNSEC -- ^ [RFC 4034](https://tools.ietf.org/html/rfc4034) - | TypeNSEC3 -- ^ [RFC 5155](https://tools.ietf.org/html/rfc5155) - | TypeNSEC3PARAM -- ^ [RFC 5155](https://tools.ietf.org/html/rfc5155) - | TypeOPT -- ^ [RFC 6891](https://tools.ietf.org/html/rfc6891) (meta) - | TypePTR -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) - | TypeRRSIG -- ^ [RFC 4034](https://tools.ietf.org/html/rfc4034) - | TypeSOA -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) - | TypeSPF -- ^ [RFC 4408](https://tools.ietf.org/html/rfc4408) - | TypeSRV -- ^ [RFC 2782](https://tools.ietf.org/html/rfc2782) - | TypeSSHFP -- ^ [RFC 4255](https://tools.ietf.org/html/rfc4255) - | TypeTXT -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) - | TypeURI -- ^ [RFC 7553](https://tools.ietf.org/html/rfc7553) - deriving (Eq,Ord,Enum,Bounded,Read,Show) - --- | Convert symbolic 'TypeSym' to numeric 'Type' code -typeFromSym :: TypeSym -> Type -typeFromSym ts = Type $ case ts of - TypeA -> 1 - TypeNS -> 2 - TypeCNAME -> 5 - TypeSOA -> 6 - TypePTR -> 12 - TypeHINFO -> 13 - TypeMX -> 15 - TypeTXT -> 16 - TypeAFSDB -> 18 - TypeAAAA -> 28 - TypeSRV -> 33 - TypeNAPTR -> 35 - TypeOPT -> 41 - TypeDS -> 43 - TypeSSHFP -> 44 - TypeRRSIG -> 46 - TypeNSEC -> 47 - TypeDNSKEY -> 48 - TypeNSEC3 -> 50 - TypeNSEC3PARAM -> 51 - TypeSPF -> 99 - TypeANY -> 255 - TypeURI -> 256 - TypeCAA -> 257 - --- | Convert 'Type' code to symbolic 'TypeSym' -typeToSym :: Type -> Maybe TypeSym -typeToSym (Type w) = case w of - 1 -> Just TypeA - 2 -> Just TypeNS - 5 -> Just TypeCNAME - 6 -> Just TypeSOA - 12 -> Just TypePTR - 13 -> Just TypeHINFO - 15 -> Just TypeMX - 16 -> Just TypeTXT - 18 -> Just TypeAFSDB - 28 -> Just TypeAAAA - 33 -> Just TypeSRV - 35 -> Just TypeNAPTR - 41 -> Just TypeOPT - 43 -> Just TypeDS - 44 -> Just TypeSSHFP - 46 -> Just TypeRRSIG - 47 -> Just TypeNSEC - 48 -> Just TypeDNSKEY - 50 -> Just TypeNSEC3 - 51 -> Just TypeNSEC3PARAM - 99 -> Just TypeSPF - 255 -> Just TypeANY - 256 -> Just TypeURI - 257 -> Just TypeCAA - _ -> Nothing - --- | Extract the resource record type of a 'RData' object -rdType :: RData l -> Either Type TypeSym -rdType rd = case rd of - RDataA {} -> Right TypeA - RDataAAAA {} -> Right TypeAAAA - RDataAFSDB {} -> Right TypeAFSDB - RDataCAA {} -> Right TypeCAA - RDataCNAME {} -> Right TypeCNAME - RDataDNSKEY {} -> Right TypeDNSKEY - RDataDS {} -> Right TypeDS - RDataHINFO {} -> Right TypeHINFO - RDataMX {} -> Right TypeMX - RDataNAPTR {} -> Right TypeNAPTR - RDataNS {} -> Right TypeNS - RDataNSEC {} -> Right TypeNSEC - RDataNSEC3 {} -> Right TypeNSEC3 - RDataNSEC3PARAM {} -> Right TypeNSEC3PARAM - RDataOPT {} -> Right TypeOPT - RDataPTR {} -> Right TypePTR - RDataRRSIG {} -> Right TypeRRSIG - RDataSOA {} -> Right TypeSOA - RDataSRV {} -> Right TypeSRV - RDataTXT {} -> Right TypeTXT - RDataSPF {} -> Right TypeSPF - RDataURI {} -> Right TypeURI - RDataSSHFP {} -> Right TypeSSHFP - -- - RData ty _ -> maybe (Left ty) Right (typeToSym ty) - - -{- TODO: - - -type-bitmap: - - A NS SOA TXT AAAA RRSIG NSEC DNSKEY - -'00 07 62 00 80 08 00 03 80' -'00000000 00000111 01100010 00000000 10000000 00001000 00000000 00000011 10000000' - Win=#0 len=7 ^{SOA} ^{TXT} ^{AAAA} ^{DNSKEY} - ^^{A,NS} ^^{RRSIG,NSEC} - -" ".join(map("{:08b}".format,[0,7,98,0,128,8,0,3,128])) - - -"\NUL\a\"\NUL\NUL\NUL\NUL\ETX\128" NS SOA RRSIG NSEC DNSKEY - -[ (winofs+j*8+7-i) | (j,x) <- zip [0..] xs, i <- [7,6..0], testBit x i ] - --} - - - --- helpers - -getUntilEmpty :: Binary a => Get [a] -getUntilEmpty = untilEmptyWith get - -untilEmptyWith :: Get a -> Get [a] -untilEmptyWith g = go [] - where - go acc = do - e <- isEmpty - if e - then pure (reverse acc) - else do - v <- g - go (v : acc) - - - -{- TODO: - - - MsgRR{rrName = Name "stanford.edu.", rrClass = 1, rrTTL = 1799, - rrData = - RData 29 - "\NUL\DC2\SYN\DC3\136\a\244\212e\200\252\194\NUL\152\150\128"}, - - -https://en.wikipedia.org/wiki/LOC_record - - -LOC record statdns.net. IN LOC 52 22 23.000 N 4 53 32.000 E -2.00m 0.00m 10000m 10m - - -SW1A2AA.find.me.uk. 86399 IN LOC 51 30 12.748 N 0 7 39.611 W 0.00m 0.00m 0.00m 0.00m - - -https://tools.ietf.org/html/rfc1876 - --} - diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src/Network/DNS.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src/Network/DNS.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src/Network/DNS.hs 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src/Network/DNS.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,373 +0,0 @@ -{-# LANGUAGE CApiFFI #-} -{-# LANGUAGE DeriveDataTypeable #-} -{-# LANGUAGE OverloadedStrings #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE Trustworthy #-} - --- | --- Copyright: © 2017 Herbert Valerio Riedel --- License: GPLv3 --- --- This module implements an API for accessing --- the [Domain Name Service (DNS)](https://tools.ietf.org/html/rfc1035) --- resolver service via the standard @libresolv@ system library --- on Unix systems. --- -module Network.DNS - ( -- ** High level API - queryA - , queryAAAA - , queryCNAME - , querySRV - , queryTXT - - -- * Mid-level API - , query - , DnsException(..) - - -- * Low-level API - , resIsReentrant - , queryRaw - , sendRaw - , mkQueryRaw - - , decodeMessage - , encodeMessage - , mkQueryMsg - - -- * Types - -- ** Basic types - - -- *** Names/Labels - , Label - , Labels(..) - , IsLabels(..) - - , Name(..) - , caseFoldName - - -- *** Character strings - , CharStr(..) - - -- *** IP addresses - , IPv4(..) - , IPv6(..) - - -- *** RR TTL & Class - , TTL(..) - - , Class(..) - , classIN - - -- *** Message types - , Type(..) - , TypeSym(..) - , typeFromSym - , typeToSym - - -- ** Messages - - , Msg(..) - - , MsgHeader(..) - , MsgHeaderFlags(..), QR(..) - , MsgQuestion(..) - , MsgRR(..) - - , RData(..) - , rdType - - , SRV(..) - ) - where - -import Control.Exception -import Data.Typeable (Typeable) -import Foreign.C -import Foreign.Marshal.Alloc -import Prelude - -import qualified Data.ByteString as BS - -import Compat - -import Network.DNS.FFI -import Network.DNS.Message - --- | Exception thrown in case of errors while encoding or decoding into a 'Msg'. --- --- @since 0.1.1.0 -data DnsException = DnsEncodeException - | DnsDecodeException - deriving (Show, Typeable) - -instance Exception DnsException - --- | Send a query via @res_query(3)@ and decode its response into a 'Msg' --- --- Throws 'DnsException' in case of encoding or decoding errors. May throw other IO exceptions in case of network errors. --- --- === Example --- --- >>> query classIN (Name "_mirrors.hackage.haskell.org") TypeTXT --- Just (Msg{msgHeader = MsgHeader{mhId = 56694, --- mhFlags = MsgHeaderFlags{mhQR = IsResponse, mhOpcode = 0, mhAA = False, --- mhTC = False, mhRD = True, mhRA = True, mhZ = False, --- mhAD = False, mhCD = False, mhRCode = 0}, --- mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, --- msgQD = [MsgQuestion (Name "_mirrors.hackage.haskell.org.") (Type 16) (Class 1)], --- msgAN = [MsgRR{rrName = Name "_mirrors.hackage.haskell.org.", --- rrClass = Class 1, rrTTL = TTL 299, --- rrData = RDataTXT ["0.urlbase=http://hackage.fpcomplete.com/", --- "1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/"]}], --- msgNS = [], --- msgAR = [MsgRR{rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}] --- }) --- -query :: IsLabels n => Class -> n -> TypeSym -> IO (Msg n) -query cls name0 qtype - | Just name <- toName name0 = do - bs <- queryRaw cls name (typeFromSym qtype) - msg <- evaluate (decodeMessage bs) - maybe (throwIO DnsDecodeException) pure msg - | otherwise = throwIO DnsEncodeException - --- | Send a query via @res_query(3)@, the return value is the raw binary response message. --- --- You can use 'decodeMessage' to decode the response message. -queryRaw :: Class -> Name -> Type -> IO BS.ByteString -queryRaw (Class cls) (Name name) qtype = withCResState $ \stptr -> do - allocaBytes max_msg_size $ \resptr -> do - _ <- c_memset resptr 0 max_msg_size - BS.useAsCString name $ \dn -> do - - rc1 <- c_res_opt_set_use_dnssec stptr - unless (rc1 == 0) $ - fail "res_init(3) failed" - - resetErrno - reslen <- c_res_query stptr dn (fromIntegral cls) qtypeVal resptr max_msg_size - - unless (reslen <= max_msg_size) $ - fail "res_query(3) message size overflow" - - errno <- getErrno - - when (reslen < 0) $ do - unless (errno == eOK) $ - throwErrno "res_query" - - fail "res_query(3) failed" - - BS.packCStringLen (resptr, fromIntegral reslen) - - where - -- The DNS protocol is inherently 16-bit-offset based; so 64KiB is - -- a reasonable maximum message size most implementations seem to - -- support. - max_msg_size :: Num a => a - max_msg_size = 0x10000 - - qtypeVal :: CInt - qtypeVal = case qtype of Type w -> fromIntegral w - --- | Send a raw preformatted query via @res_send(3)@. -sendRaw :: BS.ByteString -> IO BS.ByteString -sendRaw req = withCResState $ \stptr -> do - allocaBytes max_msg_size $ \resptr -> do - _ <- c_memset resptr 0 max_msg_size - BS.useAsCStringLen req $ \(reqptr,reqlen) -> do - rc1 <- c_res_opt_set_use_dnssec stptr - unless (rc1 == 0) $ - fail "res_init(3) failed" - - resetErrno - reslen <- c_res_send stptr reqptr (fromIntegral reqlen) resptr max_msg_size - - unless (reslen <= max_msg_size) $ - fail "res_send(3) message size overflow" - - errno <- getErrno - - when (reslen < 0) $ do - unless (errno == eOK) $ - throwErrno "res_send" - - fail "res_send(3) failed" - - BS.packCStringLen (resptr, fromIntegral reslen) - - where - -- The DNS protocol is inherently 16-bit-offset based; so 64KiB is - -- a reasonable maximum message size most implementations seem to - -- support. - max_msg_size :: Num a => a - max_msg_size = 0x10000 - --- | Construct a DNS query 'Msg' in the style of 'mkQueryRaw' -mkQueryMsg :: IsLabels n => Class -> n -> Type -> Msg n -mkQueryMsg cls l qtype = Msg (MsgHeader{..}) - [MsgQuestion l qtype cls] - [] - [] - [MsgRR {..}] - where - mhId = 31337 - mhFlags = MsgHeaderFlags - { mhQR = IsQuery - , mhOpcode = 0 - , mhAA = False - , mhTC = False - , mhRD = True - , mhRA = False - , mhZ = False - , mhAD = True - , mhCD = False - , mhRCode = 0 - } - - mhQDCount = 1 - mhANCount = 0 - mhNSCount = 0 - mhARCount = 1 - - rrName = fromLabels Root - rrClass = Class 512 - rrTTL = TTL 0x8000 - rrData = RDataOPT "" - - - --- | Use @res_mkquery(3)@ to construct a DNS query message. -mkQueryRaw :: Class -> Name -> Type -> IO BS.ByteString -mkQueryRaw (Class cls) (Name name) qtype = withCResState $ \stptr -> do - allocaBytes max_msg_size $ \resptr -> do - _ <- c_memset resptr 0 max_msg_size - BS.useAsCString name $ \dn -> do - - rc1 <- c_res_opt_set_use_dnssec stptr - unless (rc1 == 0) $ - fail "res_init(3) failed" - - resetErrno - reslen <- c_res_mkquery stptr dn (fromIntegral cls) qtypeVal resptr max_msg_size - - unless (reslen <= max_msg_size) $ - fail "res_mkquery(3) message size overflow" - - errno <- getErrno - - when (reslen < 0) $ do - unless (errno == eOK) $ - throwErrno "res_query" - - fail "res_mkquery(3) failed" - - BS.packCStringLen (resptr, fromIntegral reslen) - - where - -- The DNS protocol is inherently 16-bit-offset based; so 64KiB is - -- a reasonable maximum message size most implementations seem to - -- support. - max_msg_size :: Num a => a - max_msg_size = 0x10000 - - qtypeVal :: CInt - qtypeVal = case qtype of Type w -> fromIntegral w - - ----------------------------------------------------------------------------- --- Common High-level queries - --- | Normalise 'Name' --- --- This function case folds 'Name's as described in --- in [RFC 4343, section 3](https://tools.ietf.org/html/rfc4343#section-3) --- by subtracting @0x20@ from all octets in the inclusive range --- @[0x61..0x7A]@ (i.e. mapping @['a'..'z']@ to @['A'..'Z']@). --- --- This operation is idempotent. -caseFoldName :: Name -> Name -caseFoldName (Name n) = (Name n'') - where - n' = BS.map cf n - n'' | BS.null n' = "." - | BS.last n' == 0x2e {- '.' -} = n' - | otherwise = n' `mappend` "." - - -- case fold (c.f. RFC4343) - cf w | 0x61 <= w && w <= 0x7a = w - 0x20 - | otherwise = w - ----------------------------------------------------------------------------- - --- | Query @A@ record (see [RFC 1035, section 3.4.1](https://tools.ietf.org/html/rfc1035#section-3.4.1)). --- --- This query returns only exact matches (modulo 'foldCaseName'). --- E.g. in case of @CNAME@ responses even if the --- answer section would contain @A@ records for the hostnames pointed --- to by the @CNAME@. You can use 'query' if you need more control. --- --- >>> queryA (Name "www.google.com") --- [(TTL 72,IPv4 0xd83acde4)] --- -queryA :: Name -> IO [(TTL,IPv4)] -queryA n = do - res <- query classIN n' TypeA - pure [ (ttl,ip4) | MsgRR { rrData = RDataA ip4, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] - where - n' = caseFoldName n - --- | Query @AAAA@ records (see [RFC 3596](https://tools.ietf.org/html/rfc3596)). --- --- This query returns only exact matches (modulo 'foldCaseName'). --- E.g. in case of @CNAME@ responses even if the answer section would --- contain @A@ records for the hostnames pointed to by the --- @CNAME@. You can use 'query' if you need more control. --- --- >>> queryAAAA (Name "www.google.com") --- [(TTL 299,IPv6 0x2a0014504001081e 0x2004)] --- -queryAAAA :: Name -> IO [(TTL,IPv6)] -queryAAAA n = do - res <- query classIN n' TypeAAAA - pure [ (ttl,ip6) | MsgRR { rrData = RDataAAAA ip6, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] - where - n' = caseFoldName n - --- | Query @CNAME@ records (see [RFC 1035, section 3.3.1](https://tools.ietf.org/html/rfc1035#section-3.3.1)). --- --- >>> queryCNAME (Name "hackage.haskell.org") --- [(TTL 299,Name "j.global-ssl.fastly.net.")] --- -queryCNAME :: Name -> IO [(TTL,Name)] -queryCNAME n = do - res <- query classIN n' TypeAAAA - pure [ (ttl,cname) | MsgRR { rrData = RDataCNAME cname, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] - where - n' = caseFoldName n - --- | Query @TXT@ records (see [RFC 1035, section 3.3.14](https://tools.ietf.org/html/rfc1035#section-3.3.14)). --- --- >>> queryTXT (Name "_mirrors.hackage.haskell.org") --- [(TTL 299,["0.urlbase=http://hackage.fpcomplete.com/", --- "1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/"])] --- -queryTXT :: Name -> IO [(TTL,[CharStr])] -queryTXT n = do - res <- query classIN n' TypeTXT - pure [ (ttl,txts) | MsgRR { rrData = RDataTXT txts, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] - where - n' = caseFoldName n - --- | Query @SRV@ records (see [RFC 2782](https://tools.ietf.org/html/rfc2782)). --- --- >>> querySRV (Name "_imap._tcp.gmail.com") --- [(TTL 21599,SRV {srvPriority = 0, srvWeight = 0, srvPort = 0, srvTarget = Name "."})] --- -querySRV :: Name -> IO [(TTL,SRV Name)] -querySRV n = do - res <- query classIN n' TypeSRV - pure [ (ttl,srv) | MsgRR { rrData = RDataSRV srv, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] - where - n' = caseFoldName n diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src-test/Tests1.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src-test/Tests1.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/src-test/Tests1.hs 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/src-test/Tests1.hs 1970-01-01 00:00:00.000000000 +0000 @@ -1,91 +0,0 @@ -{-# LANGUAGE OverloadedStrings #-} - -module Main where - -import Control.Applicative as A -import qualified Control.Exception as E -import Control.Monad -import qualified Data.ByteString as BS -import System.Directory (getDirectoryContents, removeFile) -import System.FilePath (dropExtension, takeExtension, (<.>), - ()) - -import qualified Test.Tasty as T -import qualified Test.Tasty.HUnit as T - -import qualified Network.DNS as DNS - -main :: IO () -main = do - msgfiles <- filter ((== ".bin") . takeExtension) <$> getDirectoryContents "testdata/msg" - - let tests1 = [ msgFileTest1 (dropExtension fn) | fn <- msgfiles ] - tests2 = [ msgFileTest2 (dropExtension fn) | fn <- msgfiles ] - - T.defaultMain (T.testGroup "" [ T.testGroup "decode" tests1 - , T.testGroup "enc/dec" tests2 - , T.testGroup "Type/TypeSym" - [ testTypeToFromSym1, testTypeToFromSym2 ] - , T.testGroup "mkQueryRaw" [ mkQueryRawText1 ] - ]) - -testTypeToFromSym1 :: T.TestTree -testTypeToFromSym1 = T.testCase "testTypeToFromSym1" $ do - forM_ [minBound..maxBound] $ \sym -> do - T.assertEqual "" (Just sym) (DNS.typeToSym . DNS.typeFromSym $ sym) - -testTypeToFromSym2 :: T.TestTree -testTypeToFromSym2 = T.testCase "testTypeToFromSym2" $ do - forM_ (map DNS.Type [minBound..maxBound]) $ \ty -> - case DNS.typeToSym ty of - Nothing -> pure () - Just sym -> T.assertEqual "" (DNS.typeFromSym sym) ty - -msgFileTest1 :: FilePath -> T.TestTree -msgFileTest1 fn = T.testCase fn $ do - bs <- BS.readFile ("testdata" "msg" fn <.> "bin") - msg1 <- assertJust "failed to decode message" $ DNS.decodeMessage bs - - -- load reference value - let refFn = "testdata" "msg" fn <.> "show" - writeFile (refFn ++ "~") (show (msg1 :: DNS.Msg DNS.Name)) - msg0 <- read <$> readFile refFn - - assertEqShow (pure ()) msg0 msg1 - removeFile (refFn ++ "~") - -msgFileTest2 :: FilePath -> T.TestTree -msgFileTest2 fn = T.testCase fn $ do - -- use this as reference message - bs <- BS.readFile ("testdata" "msg" fn <.> "bin") - msg0 <- assertJust "failed to decode stored message" $ DNS.decodeMessage bs - --- print msg0 - - -- encode it now again - let Just msg0bin = DNS.encodeMessage (msg0 :: DNS.Msg DNS.Labels) - - msg1 <- assertJust "failed to decode re-encoded message" $ DNS.decodeMessage msg0bin - - assertEqShow (pure ()) msg0 msg1 - -mkQueryRawText1 :: T.TestTree -mkQueryRawText1 = T.testCase "mkQueryRawText1" $ do - msgraw <- DNS.mkQueryRaw DNS.classIN (DNS.Name "www.google.com") (DNS.typeFromSym DNS.TypeA) - - let Just msg = DNS.decodeMessage msgraw - - assertEqShow (pure ()) (head (DNS.msgQD msg)) (DNS.MsgQuestion (DNS.Name "www.google.com.") (DNS.Type 1) (DNS.Class 1)) - -assertJust :: String -> Maybe a -> IO a -assertJust msg Nothing = E.throwIO (T.HUnitFailure msg) -assertJust _ (Just v) = A.pure v - -assertEqShow :: Show a => IO () -> a -> a -> T.Assertion -assertEqShow onFail ref cur - | show ref /= show cur = do - onFail - T.assertFailure ("expected: " ++ show ref ++ "\n but got: " ++ show cur) - | otherwise = A.pure () - - Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/01ca022e21220474ca6100f21d137b42.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/01ca022e21220474ca6100f21d137b42.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/01ca022e21220474ca6100f21d137b42.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/01ca022e21220474ca6100f21d137b42.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/01ca022e21220474ca6100f21d137b42.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/01ca022e21220474ca6100f21d137b42.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 50673, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 185, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 24290, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "_mirrors.hackage.haskell.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "_mirrors.hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 3637, rrData = RDataHINFO "ANY obsoleted" "See draft-ietf-dnsop-refuse-any"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 44451, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 28) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/04e610d95902b1898ee6abce328905dd.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/04e610d95902b1898ee6abce328905dd.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/04e610d95902b1898ee6abce328905dd.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/04e610d95902b1898ee6abce328905dd.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/04e610d95902b1898ee6abce328905dd.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/04e610d95902b1898ee6abce328905dd.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 50214, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 1}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 147, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 0, mhNSCount = 4, mhARCount = 1}, msgQD = [MsgQuestion (Name "com.") (Type 50) (Class 1)], msgAN = [], msgNS = [MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 593, rrData = RDataSOA (Name "a.gtld-servers.net.") (Name "nstld.verisign-grs.com.") 1494230811 1800 900 604800 86400},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 593, rrData = RDataRRSIG 6 8 1 900 1494835611 1494226611 27302 (Name "com.") ",\SYN\230\r\ESC\180\139\246|W?\207\SYN\218\185\223:\166\129\218\251\243]\"Vm\241\156\227.\145P\219N[\250\138\"\220k)j\133\&8B%\232\249xo\221\187\248\ESC\vO\143\135\219\174\161\ETB\199i\248\184X\DC3VrU\205o8\237\rN J\163L$\203\200\211\148\&6\157\b\NULj\204\185E\238\186\ETXmZ\226\240\162\202>Y\226\247\186\128\RS\232\250\DC18\196RenvB\193\185\202R1-\a\182\229\156\158\bB\241\173I\162:\SYN\212\156Z\161\167\182]\DC1vB\169\SOH2\DC1\244\236\226k\DC1\153\198\165\201\f\195X\199\237\205\NAK\GS\203\208=\229\142o\159\189\252n\144\155\191\228\DLE\199\244H)e\135\239z\134\240\&3\ENQ7Q\243xs\149\&0\233Pl\164\216\NAK\196&\SO\180\219w\249\176\163\136l\176\180\235\157y\ETB"},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "i.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "d.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "h.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "m.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "c.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "b.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "g.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "a.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "f.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "l.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "k.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "e.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "j.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataRRSIG 2 8 0 518400 1495256400 1494129600 14796 (Name ".") "S/%\226\DC25\201D\151\140\161\243\248\192\a}\198\167!1\233\141h\205\203\ENQ\EM|8\204\184\224\b\203\223\r_'\187\US#\205YM\220\202?\240;z\136$\166\EOT\249M\251\146\&5\143\219\187\DLE\154\185Tx/\181\199\240s\184\135\171\247y\196\191\168E\216\159&\220\252\132\240\&0\a\217gI$\217\171\218}\ACK\144\DC3\199\t\tw\236\&7\151\SO\nQ\246\181aa_\251\156,\218[4\192\EOT(\165\NUL\163\233s\179$\tS\FS\147\176\228\179=\DC2pMP\186\155\FS\205njSC\159\187\142\b\215[\244\224\253z>\201\t\223\SI\t\165\153X\"\223\232\186sHg\DC2\196F\160\167E\144\ACKo\232[\132'\245\158Q\163\192\218i\175\214\157\ESC.\137mS\143W:\DC1\250\&8\251\DC3\226]\244\163\190uc\249\215\220\169V6\155H\DLE\216\180\NULh+v\150\255b2\168u\226u\175\177}T\206\163w\249]+h\EMRfx\US8\179\176\166l\186\190\199\229\155yB\184g\173\211\ACKA\208`g\136\216(\171~]P\159\203(q\159\234I\242\183j\153\&0\208>E\172\227r'yE\220\180Pl\184=\ESCI\243\155\SOHn\142\226\194\&9TI\192\r\192\FSu\129\143V\184\178\166\NAK\177\FSoI\239\179\GSs=\n]\237?\\?A\132\198\ACK\184\248\150\ETX\230\128\DC4+w\241\196\&5\160\246\131kG"},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 116905, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\168\NUL \169Uf\186B\232\134\187\128L\218\132\228~\245m\189z\236a&\NAKU,\236\144m!\SYN\208\239 p(\197\NAKT\DC4M\254\175\231\199\203\143\NUL]\209\130\&4\DC3:\192q\n\129\CAN,\225\253\DC4\173\"\131\188\131C_\157\242\246\&12Q\147\SUB\ETBm\240\218Q\229OB\230\EOT\134\r\251\&5\149\128%\SIU\156\197C\196\255\213\FS\190=\232\207\208g\EM#\DEL\159\196~\231)\218\ACK\131_\164R\232%\233\161\142\188.\203\207V4te,3\207V\169\ETX;\205\245\217s\DC2\ETB\151\236\128\137\EOT\ESCn\ETX\161\183-\ns[\152N\ETXhs\t3#$\242|-\186\133\233\219\NAK\232:\SOHC8.\151K\ACK!\193\142b^\206\201\aW}\158{\173\233RA\168\RS\187\232\169\SOH\212\211'n@\177\DC4\192\162\230\252\&8\209\156.j\171\STXdK(\DC3\245u\252!`\RS\r\238I\205\158\233jC\DLE>RMb\135="},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 116905, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\146\236\190\243\223\253`\216\DC3\194\252eC\162\\C}=(76\172\170\187\197\150)[\184,\139\207[\190\216\209mf\ACK\t\171\160Q\a\199\&0=\204\205\158u\ACK\216 Xc\231~\172\199w\227\213\188x\159\204\138\193\r2\212\198$^\248\DC4\221\162\150\DELH\231\155\202\179\229\DC3\213k\136\132R\ETX\249\162\SI\251\210k\128^j7\241\ESC\181\209\171\FS\221#m7c\165\SOH=S\138\250~S\130T\149P< +\226z\181\SI\131b\160\DELh-4Xn\168\196\241\252\129\235\DC3\144\221W\230\DEL\174a\198\168D\158\254i\218\SI1\221\SYNr\178\236\198\185\205\195\211zI\176g\206\130\156o\175\&5c\DC3\235\b\142\231\166\DC2\SOH\168\135_H\210\n9\164/\178sO|i\167\251\237\161b\202\214\184\171\239H\186\&5\187\DC4\227\203\SId\225\203~\FS3n\140'{\NUL,\158\r\DLE\168\195\156U\201\161\151\168\159\DC3p\191\190\&1"},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 116905, rrData = RDataRRSIG 48 8 0 172800 1495411200 1493596800 19036 (Name ".") "Z\249\197\192\196\231A\191\251\179\224k_;\158N!<\182\173;\ENQz~D\183\SOH\129!\213\203:9\161\149f\201+\rD\nO\216v\230\191\234\176P\142\166.\180Z~M\161\249X\245\227\233\188\EM\249\212\186\172;\197n\217\145\221~\165\180G\152\181\ACK\216\149\205sg\v\186>J4\177\195b\237r\NAK\241~\220,\239\134O.\158,\189\ETB\193\bK\131t6\SYNL\245\SYN\206\140:;\163\235;'\134\239\157 \225\138d\251x\fG\175%(\142\150\212]Q\202\229\254\&8Q_\SUB\NAK9\138\DC2PN\194Gf\132\180\162\154o\206\DC4\161+\137N1\197\162\238\148F\NUL\219|,\217S\179g\140\248[\234'\130u+\201\234\212\185\SYN\220bqf\a\193\DC4%\249\212\140'\242c\194\235u\RS\n\222\169\SUB\130S\141\142E/\248R\143\130\&3\222\222\200jVe'\npj\ETX@\SYN\150\DC3\135Q\222ca?\144\210"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/36187d17453931a5d1563c69ed34fb52.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/36187d17453931a5d1563c69ed34fb52.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/36187d17453931a5d1563c69ed34fb52.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/36187d17453931a5d1563c69ed34fb52.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/36187d17453931a5d1563c69ed34fb52.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/36187d17453931a5d1563c69ed34fb52.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 16363, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 14, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "br.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "a.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "b.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "c.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "d.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "e.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "f.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 5 1 172800 1494583200 1493978400 16335 (Name "br.") "C\r\136\233\&4\251z\214\ENQ\v\202fm\142\251`3\DEL\181\DLE\aj(/\242\STXO\243\208X\223\145\ETX\227-\146\153*Q\a\ESC\149^.:L\171\170\&7\227<7\221L\ETB\165\220y,\226\226WPF\RS\227\205\154\183\136\227\217\157\232b\177\189 \213@\b:\212\132\RS\240\236\187\240tca\153K/\193\225\132\ETB\166\NAK[VQ\253\175\166K\153\218\213\SUB%\249#\231\143\134\DC1\169\189!}5\CAN0\EOT\179\144q]V\201\178\STX\202\190\151f\163Ks\NUL\DLE\188\140=\SO\178\218\180\194\174\210m\222\223.\252'"},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataNSEC (Name "0800.br.") (fromList [Type 2,Type 6,Type 46,Type 47,Type 48])},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataRRSIG 47 5 1 900 1494583200 1493978400 16335 (Name "br.") "\154\183\204(\130\144\188O\252\209\225\ENQ\SO\252CET@Ve\203\253\139\140\199\243\206\238m+\223\185f\DLE\152$\ETB\159sW\246#\"\CAN\223`\196.\200\181\212\252|\199\ACKN\201\197+\ETB;-\179*.\US\140\f\164g<\r\163\247\173|\157@s\247I\170\ESC'\203f\US\201\204\&3\204S\169\164 \233\203\&8Gw\254\EOT\202\217\SUB\US\180\237\174]Ok\251\246\234\SUB1\242\167i\247\168\150\EOT\249\200\ETB\234\SOH\234\DELm\197\156\194O\223C\245\236\198\160\242]\NAKV\181\180\n\207\128\217@U\EM\180\132\220m\153"},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 21599, rrData = RDataDNSKEY 257 3 5 "\ETX\SOH\NUL\SOH\194\&8\150i\159U\208o\243\225G\214^\132fc\232\144h\"\135\188\SUB\SO\245\254\161\136\240\&4\146@\250\236\188\219\178\138\r\168\151\EOT\141\228\SUB'P\221\EM\SUB>6\NAK_P&\145\202zs\191'\238_f\252\240\230\165\201\SO%%{_o\249\SYNf\SIk\228s\234\DC4\229\223\234\144\r)V\152 =3\148\217~\192+K\243\163\ETX\151\141\160\137\151b_\218\156\209$\161\227\231\216\235\215\235\160r\199\223\159\223:\232\201\191\SIH\t\174\151\153\162UK\253x\247\195'\SO\240Ix\243\131\DEL_ws\DC3\188E\243`\252\170\242l\SYN\202\bz\194\RSI}U\\\205\178\166\136J\237}\249i\174\135\169\DC2\248Lg"},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 21599, rrData = RDataDNSKEY 256 3 5 "\ETX\SOH\NUL\SOH\209Zg\144K\201\185\174\150\154}\225\223\n\244\180\239\151\195\167\150\173\217\a#i(\189\166\192\"\175J\222\128\227\251U\217\235@t;@T\180\184\196\152\160b\NUL\217\231\245=\aX\b\ETB}{\228a\138@9\137\170\183\184\SO^\222Sh?\236\ENQ\130\151we\ESC\214:y\195\236%\254\139\194b\221!\161}\132!\190X]\217S\228\239:*\RSp\144\ENQ\202\228)\247\GS\169$\v5\254Na\192\131\135\197\242\ESChd\193\151`\162\EOT\235\252\195\130\179$\195\131\175\184H\SI\162^\156\248Z\162\194\143\143\246X \170\149\141\v@\184Z\b\DC1\SO\131\199\239>\166\166\&3y\235\&2\237\208m)\254\182(\140\&3z\144Ws\US\137\217\153Ujc\249\162'\138O\n$\230\179\&2\179\221u\140\r,G\209(\250\&8\137^0\210\174\224\154b09\188Hw\214m\173@#\226\SI\240\224\186\241\255(\176\DC1#\176l\f\231gBq\224l\SIXv\133\n\204\155\130S\212m \213u\196\200Z\235R}\244\216\&3l~^\136\189\174\DEL+\237.@\250\205\202\238-\184\206\222\&5v\186\SUB:\165%\DC3\NUL+\207t\189\CAN\200\n\140\172G\180\183\&6\200mc7M^\223GI\170\137\252\135\205Z\191\169\&8\b8P\216\130J\228 \EOT\187@\252\205\177\194Z\ETB\RS\190\165\218\158c\176\135\SUB:\198"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 39631, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0x17603435)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0x17647aaf)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0x682bc3fb)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0x6828d323)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0xbfefd5c5)}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 60637, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3580, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 3480, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 633, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 733, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494154963 1000 1000 1000 1800}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 39296, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3428, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 21306, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 6) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2915, rrData = RDataSOA (Name "ns1.msft.net.") (Name "msnhst.microsoft.com.") 2017050703 7200 600 2419200 3600}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/4e86d529a6401b74f84956cd72682c15.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/4e86d529a6401b74f84956cd72682c15.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/4e86d529a6401b74f84956cd72682c15.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/4e86d529a6401b74f84956cd72682c15.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/4e86d529a6401b74f84956cd72682c15.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/4e86d529a6401b74f84956cd72682c15.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 31, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "torproject.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataNSEC3PARAM 1 0 16 "\185\181\DC2\245\247"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataRRSIG 51 8 2 0 1497380687 1493921581 59061 (Name "torproject.org.") "\ESC\139\217>\186\USz(\219\227J\162\235\215t\166\171\198\136\243\146\202;\128\130<\152\232\SOH\167>\248Q \177\130'\NUL\231\199\199\222\172\fE\206g$sR'\205=\216\188\211\152\230\232\202mV\220\164\128xsj\129\138\173\ESC\217\SYN`\NUL\231RE{by\173\177\235\ACK\203IT\128\167\212\197J\174\205V\145\n\133\NUL\175 \187\200\232\b\200*/-\141R>|W\153\176 \242\201\160\SIK\SUB\146\160\227\242\146\181\STX%\129\195\b\209\210N\FS\GS\255\174e\FS\227!}\179g\211\148\188\159b),\149ym7\fi\155\163A\181T\169\154\196/\141\212\170\224\198\253q#F\DLE\ETX\141\217\216\133,0\163\183\207"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataSOA (Name "nevii.torproject.org.") (Name "hostmaster.torproject.org.") 2017050738 10800 3600 1814400 3601},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 28 8 2 300 1497627735 1494168135 59061 (Name "torproject.org.") "csn\208v\207*\137\134\225U\CAN\187\253\212\&2\176\223\250;\205`\DC2\b\\\a&\246\188\203\133\215q@U\DEL\247\170z\r\GS\172\193\243\177\CAN\183(\196J\253\192\240<\255\199\226]\252U\242\EMdJ\156\171p\245A\189\EOT\250]\253\220,\138\224-V\209\171R\158U\132B{\159cl\202#\ACK2\216\249+\213npY\EOT\133\129GFl\249b\GS\154\189\243+)\230\132\170\240'Q\167\&2r\210\SOH\DEL\196S\163\145\&9\136\208\181\NUL]\ACK\GS\142\140\164\241\253\134j\135\DEL\135\&0\159\145\US\ESC>\163\208\\\",\236\217h|\245\&1\DC4\179,\189\211\174bo\DEL?\\\235[\SI\229\176\DC3\244\190b\224/\129\ENQ\245"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2620000006b0000b 0x1a1a000026e54810)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2001085800020002 0xaabb0000563b1e28)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200141b802020deb 0x21321fffe201426)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2a0104f801721b46 0xabba00050001)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200106b0005a5000 0x5)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 15 8 2 3600 1497532518 1494075620 59061 (Name "torproject.org.") "=KT\176\SYN\SYN\226\&6\213k\171{\174\213{\209\NUL\171\250\&9D\168\t\177\202v\249\143\&1\211~:8\220\214g!\134\167\250\175\136\&5J/\205\205&\231\DC4\161\252\144~\160\245\250\210\213\253\139\205\199\215\234P\248\b-\179\157%\138Y/R\201\171\189e\241\203\181rHQ\CANsB\248$\217\202\144a\237\&1U4\EOTe:W\201\SYNrU\\\EOTy\207\224\214\244+\255U}e\169\&4\243\SUB\192\CAN#.CY\DC2$\133np\170N\219\242\245\&2\163\154\134\232R\v(\171\ETB\242=\225V\196g\228\147\231\STX]\US\238m\175ok\238\160\CAN\172{w\ACK\"\173\ACK\240P\208;H\195Hf@u\228\&0l(\165\131"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 1 8 2 300 1497627735 1494168135 59061 (Name "torproject.org.") "C%\201\209\ai\SUB\SYN\150_\EOT\209@/\183\150\229s\156\&8\250\177\174\245!\154\USH\132\188\DC3\138]\167}l\182\&8=\166\233\DEL\153T\169\159O9\195\177\"T\130\146\223s\ACKkp\224\172\DC2\\\182\224\ESC\STX\DC1\202\224\243_u\204\246\159\146t=`\ENQ\UST\157)5-3\212=\151\250U\180J\EM\202\DC2\200\169a+\196T\226\228x2\NUL\208\141\189\178#\b\US\189\ACKU{\245\tq7\216\158\159\237;6\196l\193\146\178\178\&8\161\154e\166\210\150\183c]!\DEL\211>\US&\EOT\208\253Q\161x\FS\251\186\141T7\171%=\207c\ETX\t#H\208\157t\252\194$(rI\146\223\189<\SUB\211\161\&5\253D"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 8 2 86400 1497572963 1494115902 59061 (Name "torproject.org.") "$_\241\250\&5\246[\189\226iR\196\138PqqH\214\176@!\155\244V\195\224G\209n\234z\SYN.\151\SYN\187\194\151\167!#x\222\&4\163\169\170\232Ac\181Z\142\233\166\CAN\246\EOT\t\132\218\&0\213\140AK\220\162\237\248\144l\161z\RS\128\254\213\176\223\US\231\139\203\215\"\145\214$\192l\153F +[\175\201\"\136\189\r%\177\169\194\242\145\158\233\EM\202\NUL4w\238\168\SOH\a\DC15\n\160\221\DC2\rj'+\194\130c\ENQ\198k\249{/p]\181\159\202h\167\151\162dH\SOmt\242\176J\134\229\163d\203\222\214*\245\252cYqa\242Q\GS\192:\167\EM.\200E\171@\NAK2P\SUB0l\247=-(\f"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 6 8 2 3600 1497692652 1494233052 59061 (Name "torproject.org.") "'^\229\253\SO\135\241\160\t\195\229W\129\150\201\130\251q@\213\217\&3.;\243\218!\192\183<\217~q\ENQ+\ENQ\DC2M\230\135}\183\197\168#\143\&5\201\213_\SUBx\196\\{\218\"\234(\216\133\211\209\213N\237\USRURz\128P\NAK\253y\175\207\DC3\230m\202\164\222\&9\167\128\220\ESC\184\206dB\SI\152\189\133q#Ua\234\175\200G\FS\224\189'Kk\143\229u\136vX\SYNY\242n.~t\188\132\254|\244\150k4\217\180\181\b\167\DC2\181\174\150\139)Jo\145-\180\141$\225\DC1\245\a\154\GS\201Fb\213\150q\v\v\140\&608\158\216\144Yj\184\151\237?\192\190\EOT\162?,\249\STXb\166\187\254\199w\212"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1497462010 1494002410 17224 (Name "torproject.org.") "\143\&8*?\209\140\SI\160f\254h\203\v\219\226B\153\148U\SI\159\182\233\158\ETX\149\134\142\US\233`\\\138L\DLE\135txpz\148\198y\230\237\230h\228\&0W\179\n\186\ETB\194\146\DEL\r/\190\138t\188\&1\202\164\EM\RS\163\183\184\EM\DC1\CAN1\204\207 \rk\129\200^+z\" \134\SO\RSX\249\DC4\130\206\204,\158\158\130\159\176\&8\248p;}-M\139\r\133\152\&1\217\203\r\246\ACK\234\ETXd\246\247\135O\252\153\229(\250d9\159T\184u\202Q\195\SOH\146\176;\137OM\219\DEL\DEL\178\249\STX\b*{\186e\203\157\150\&1\194x\147#9\161\232\143\172\250\168\172\ENQZ\194\233S\EMr7\234\220\142\233\205x\132T\v \183a\158\225\214(\240\234\143h\217\230\DEL\229YX\213 \211\228\146\&2\ESC\187\229`\132\187k\231a\EM\193\129\DLEW\246Wm\231\197#\ESC\NAK\158p()\a\\V\164\152\164\252\158\&6\168\200\140\&8e\147c"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1497462010 1494002410 35740 (Name "torproject.org.") "bZI\194E(T\n\244~\160@\SYN\DC4L\164\167\251|\251\n\254\162\184\139\NULe\247\159\DEL\NUL[\180\152?\153\148\DC2\203\138\253\155\189\248\139\178b\192\149u \NAK\235\204\194\NUL\203\205|/7\130\&9\147>\139{1L\182/[\165\205R\233A\175=s\157\242,\230:\211\210\163\187v\DEL\160u\205\136\253\176\252\b\156\ENQ\158;\239\153sTw_7j\227w\129\194tA\161\DC2\227\182#\DEL\174\212\251\148\151\205\201,\167\SO\243\132\198\138]\161|\129\199\&9\160\151'\243\&7\226\150/\238\192v\159\129.\209\170RNV\233\238\212\254\255\158\v\251\137\206\175\180\214K\253L\144V)\179>m$\131\147\241|\242w[z\246cVV\216p\142e@)\232\253\146i\200\220\196\234[\230\203C\b\234\141\139\157:z\a\202\141\166a\ta\DC2g\188\239\196\194\186\220A\202\246K?M\217w\158\199\208d\216$\188\215\136t\175"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1497462010 1494002410 59061 (Name "torproject.org.") "D&\210'\154\164~\193;m\223\216\174\203\&0[\159\208\220\236\232\206u\DC3\ETXn\214l@\195\223g\252Z\254\147T\DELWc\246\129\185\141\v\133\147_\DC4\NAK\173\231[\195\216h\225\159F=\SO\188\230\&0\254\DC4+_E\155\174%A\ETX\229j\vuKz\ENQ\150\240\bV['\237u}P\255\155@\154\149\247\189\251\EOT\DLE\t\174\254\142$x\RS\245~p\248\139\172\240\254\249b\184\SYN$\228\DC4\ACK\168_\189-S\202\136\210\213\198\DC3y\169\144\SI\149\250\v,vW)C\187S+\147\183\SUB<\167c\SUB\188\CAN#\144\195\FS\182}\185\139N\199\251`2\139\140\255|V\238\158r\ESC\CAN\215\STXZ1\170\188R\f\228\221"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\180\DC4Zp{K?\191H\186\210^\193\197\EOTV\232\145\161\187\238\v!\209\240\NAK\192\191}>3\214\246\145o\152\142%\134\208CZR\US\157\147=\DEL\128\238\NAKH\138jA\222\155M\164\153\225a\172F\135\189F\135'g-N\197*\131\171\b>P\128\138b{\r\157u\230[rpa7\ESC\194\162-\255;>\170X\231\150\177\&3w\255\173\226X\143:\196\241z\131\CAN\146(\US|_\154)*\b\152\184\ENQ\182\159\STX\CAN\178\183\238!\248\b\172\EOT/\166\144\151\DC1U\140\&7W\NUL\200\246\a\STX\174\225\175\249\130\149\SO^\250o\"\173\250\143\178S\220c\178\197;\EOTz\149\SUBi\181\211\254\209\142md\209\135\201;"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\208\227\159\216\218\187\227H\b\208\244\f\139\EOT\159\210vW\RS\163:\SUB\180\179Y\255\178\156|0\160r\187\b\197\251\183\201\147xh\227\&9\202\194\209{\211xv\241\134Q\201EQ\151[\140,a\251<\DLE\211\230\196\&9s2\226\253\225\226\&2\173,09\192\217\bx\247>R\CAN\224] 6E$k3&\159P\205\163\248v\153\ETXi{\129,nhf\144LF\212\NUL\SUBkt~?\184\190\207v\228\165D\196\163\&3@\198\249![\224\148]\182Pv\212\165\238p\162\165\212\175a\167\206\152\138\130\130\176\167/>\164D\194,-\t\255\ETB\201\&3\143\230\DC4}\255\236\226\"E\224\138\150\168\SO\SOH\221\163\156c\EM&\148\&8\135\&7\v~\211\&1\ENQ\252_\234r\137{\184\239\174\233\&2\206\DC2\NAK5\RS\174\242\213K=T\194).\196\158\165\&0i\136Dh&\231\&0\SOH2\SYN\130\ACK\243\&5\131\198L\234}\188\242\&2\174\248\130\151"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\201eI\ACKo\150\NUL\219\&5d\"\\\t\226_\210\DC3#%\169r\169S\ESC\132\EOT\181\SI\EOT\DC3#\t^\220\178jNrh\DC3\SYN\162o\ESC\236\162\181\196\232\165\169\209\243\"\v\NULx4\ru\175a%\169\&5\165\SUBC\177\&1\SUB[\DELn\SO\230/\137\&8\136\205U\195&\245vC.V4`\180J~,c\198\233\245\186\184.\146\151pt\198IYI\198,\248\252OO8\172;*\190v$\181\195\128\147\DC4\n\140\DLE\RS5\159\139\136\253\&1}S\254\SYN\218pI\225~8\234@\144%\131\191;u\133S&\154\130\221\220\213G}@\153\141\218,\161\190\t\172Y\v\211\218\245\&6#9e}\188\ENQ\164\134n\130\222\191\239\190\200\202\132\241\157\211\FS\227\147\DC2\198\156kE\SI\247\133\170\145\206\210v\148\EM\148%\200\229\192\220^\222Ye}\194*\237(\149-\171q\242\157\SUB\170\249\213\199\153\RS\v\137\222\191\183\180\209\167\183"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns5.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns1.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns3.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns4.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns2.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x9a238446)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x8ac90ec5)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x26e54810)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x592deb15)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x563b1e28)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x52c34b65)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataMX 10 (Name "eugeni.torproject.org.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/53d71a9e72adf19251123b46f31769bb.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/53d71a9e72adf19251123b46f31769bb.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/53d71a9e72adf19251123b46f31769bb.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/53d71a9e72adf19251123b46f31769bb.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/53d71a9e72adf19251123b46f31769bb.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/53d71a9e72adf19251123b46f31769bb.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 38143, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataPTR (Name "ghc.haskell.org.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/54f81230e47e2399d16a309e1227025e.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/54f81230e47e2399d16a309e1227025e.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/54f81230e47e2399d16a309e1227025e.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/54f81230e47e2399d16a309e1227025e.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/54f81230e47e2399d16a309e1227025e.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/54f81230e47e2399d16a309e1227025e.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 53536, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = True, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 5) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2306, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/552aec026306990d49a098d0a4608434.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/552aec026306990d49a098d0a4608434.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/552aec026306990d49a098d0a4608434.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/552aec026306990d49a098d0a4608434.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/552aec026306990d49a098d0a4608434.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/552aec026306990d49a098d0a4608434.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 2, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "4.4.2.2.3.3.5.6.8.1.4.4.e164.arpa.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "4.4.2.2.3.3.5.6.8.1.4.4.e164.arpa.", rrClass = Class 1, rrTTL = TTL 86046, rrData = RDataNAPTR 100 20 "u" "E2U+pstn:tel" "!^(.*)$!tel:\\1!" (Name ".")},MsgRR {rrName = Name "4.4.2.2.3.3.5.6.8.1.4.4.e164.arpa.", rrClass = Class 1, rrTTL = TTL 86046, rrData = RDataNAPTR 100 10 "u" "E2U+sip" "!^\\+441865332(.*)$!sip:\\1@nominet.org.uk!" (Name ".")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 45729, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3542, rrData = RDataMX 10 (Name "microsoft-com.mail.protection.outlook.com.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/579ff887c8ea54e4173934be5e85faec.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/579ff887c8ea54e4173934be5e85faec.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/579ff887c8ea54e4173934be5e85faec.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/579ff887c8ea54e4173934be5e85faec.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/579ff887c8ea54e4173934be5e85faec.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/579ff887c8ea54e4173934be5e85faec.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 33589, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 4, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1524, rrData = RDataTXT ["v=spf1 include:_spf-a.microsoft.com include:_spf-b.microsoft.com include:_spf-c.microsoft.com include:_spf-ssg-a.microsoft.com include:spf-a.hotmail.com ip4:147.243.128.24 ip4:147.243.128.26 ip4:147.243.1.153 ip4:147.243.1.47 ip4:147.243.1.48 -all"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1524, rrData = RDataTXT ["google-site-verification=6P08Ow5E-8Q0m6vQ7FMAqAYIDprkVV8fUf_7hZ4Qvc8"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1524, rrData = RDataTXT ["FbUF6DbkE+Aw1/wi9xgDi8KVrIIZus5v8L6tbIQZkGrQ/rVQKJi8CjQbBtWtE64ey4NJJwj5J65PIggVYNabdQ=="]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1524, rrData = RDataTXT ["docusign=d5a3737c-c23c-4bd0-9095-d2ff621f2840"]}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/585424227713068d541ca07b184abd89.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/585424227713068d541ca07b184abd89.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/585424227713068d541ca07b184abd89.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/585424227713068d541ca07b184abd89.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/585424227713068d541ca07b184abd89.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/585424227713068d541ca07b184abd89.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 32, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "eff.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataSOA (Name "ns1.eff.org.") (Name "hostmaster.eff.org.") 2017042405 600 1800 604800 1800},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNS (Name "ns6.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNS (Name "ns1.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNS (Name "ns2.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataTXT ["v=spf1 mx ip4:173.239.79.202 include:spf1.eff.org include:spf2.eff.org -all"]},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 10 (Name "dummy1.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 15 (Name "dummy2.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 5 (Name "mail2.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataA (IPv4 0x4532e836)},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 220 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 230 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 240 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 250 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 260 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!! !!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 270 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!! !!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 280 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 290 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 300 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 310 10 "" " !!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 320 10 "" " !!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 100 10 "" " !!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 110 10 "" " !!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 120 10 "" " !!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 130 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 140 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 150 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 160 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!! !!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 170 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!! !!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 180 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 190 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 200 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 210 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/593b5d26fc010f953c99621a7d608c8d.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/593b5d26fc010f953c99621a7d608c8d.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/593b5d26fc010f953c99621a7d608c8d.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/593b5d26fc010f953c99621a7d608c8d.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/593b5d26fc010f953c99621a7d608c8d.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/593b5d26fc010f953c99621a7d608c8d.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 19974, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2950, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 20003, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 999, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494155229 1000 1000 1000 1800}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 64444, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 2, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.google.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "www.google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xd83ac9e4)},MsgRR {rrName = Name "www.google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2a00145040070816 0x2004)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/60c1e26a578cd3007a592250dbad30be.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/60c1e26a578cd3007a592250dbad30be.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/60c1e26a578cd3007a592250dbad30be.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/60c1e26a578cd3007a592250dbad30be.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/60c1e26a578cd3007a592250dbad30be.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/60c1e26a578cd3007a592250dbad30be.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 61653, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 148, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")},MsgRR {rrName = Name "j.global-ssl.fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataA (IPv4 0x97650044)},MsgRR {rrName = Name "j.global-ssl.fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataA (IPv4 0x97654044)},MsgRR {rrName = Name "j.global-ssl.fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataA (IPv4 0x97658044)},MsgRR {rrName = Name "j.global-ssl.fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataA (IPv4 0x9765c044)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 2963, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 6) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 26891, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2846, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 16652, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 785, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 999, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494155077 1000 1000 1000 1800}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/62ce31cf45d2d095d384da330a6e6189.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/62ce31cf45d2d095d384da330a6e6189.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/62ce31cf45d2d095d384da330a6e6189.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/62ce31cf45d2d095d384da330a6e6189.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/62ce31cf45d2d095d384da330a6e6189.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/62ce31cf45d2d095d384da330a6e6189.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 32245, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 10072, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "_mirrors.hackage.haskell.org.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "_mirrors.hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataTXT ["0.urlbase=http://hackage.fpcomplete.com/","1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/"]}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6a0b07a53da450663489f3ff62fc7866.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6a0b07a53da450663489f3ff62fc7866.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6a0b07a53da450663489f3ff62fc7866.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6a0b07a53da450663489f3ff62fc7866.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6a0b07a53da450663489f3ff62fc7866.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6a0b07a53da450663489f3ff62fc7866.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "fencepost.gnu.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "fencepost.gnu.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2001483001340003 0xe)},MsgRR {rrName = Name "fencepost.gnu.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xd076eb0a)},MsgRR {rrName = Name "fencepost.gnu.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataSSHFP 1 1 "\ETB8\CAN\146I?\149\218`\185\b[f\189\CAN\185\142\190\252#"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 23209, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2381, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 16755, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 590, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494154820 1000 1000 1000 1800}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 3198, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 4, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["google-site-verification=6P08Ow5E-8Q0m6vQ7FMAqAYIDprkVV8fUf_7hZ4Qvc8"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["v=spf1 include:_spf-a.microsoft.com include:_spf-b.microsoft.com include:_spf-c.microsoft.com include:_spf-ssg-a.microsoft.com include:spf-a.hotmail.com ip4:147.243.128.24 ip4:147.243.128.26 ip4:147.243.1.153 ip4:147.243.1.47 ip4:147.243.1.48 -all"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["docusign=d5a3737c-c23c-4bd0-9095-d2ff621f2840"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["FbUF6DbkE+Aw1/wi9xgDi8KVrIIZus5v8L6tbIQZkGrQ/rVQKJi8CjQbBtWtE64ey4NJJwj5J65PIggVYNabdQ=="]}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 1192, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = True, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 5) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3478, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 51432, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "_xmpp-server._tcp.gmail.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt3.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt4.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 5, srvWeight = 0, srvPort = 5269, srvTarget = Name "xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt2.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt1.xmpp-server.l.google.com."})}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 24972, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "4.4.8.8.in-addr.arpa.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "4.4.8.8.in-addr.arpa.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataPTR (Name "google-public-dns-b.google.com.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7e3570aada9975cbb2285ed217fe5016.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7e3570aada9975cbb2285ed217fe5016.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7e3570aada9975cbb2285ed217fe5016.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7e3570aada9975cbb2285ed217fe5016.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7e3570aada9975cbb2285ed217fe5016.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7e3570aada9975cbb2285ed217fe5016.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 7, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "example.com.") (Type 43) (Class 1)], msgAN = [MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 31589 8 1 "4\144\166\128mG\241z4\194\158,\232\SO\138\153\159\251\228\190"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 31589 8 2 "\205\224\215B\214\153\138\165T\169-\137\SI\129\132\198\152\207\172\138&\250Y\135Z\153\f\ETX\229v4<"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 43547 8 1 "\182\"Z\178\204a>\r\202yb\189\194\&4.\164\241\181`\131"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 43547 8 2 "aZd#5C\246oD\214\137\&3b[\ETBI|\137\167\SO\133\142\215j!E\153~\223\150\169\CAN"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 31406 8 1 "\CAN\153h\129\RSn\186\134-\214\194\t\247V#\216\217\237\145B"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 31406 8 2 "\247\140\243\&4Or\DC3r5\t\142\203\189\b\148|,\144\SOH\199\246\160\133\161\DELQ\139]\143k\145m"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataRRSIG 43 8 2 86400 1494563200 1493954200 27302 (Name "com.") "\133-dQ\179\DC3\EMi\SI\166\249\140E\212\163\219\181k\SI\197\198M\231\EOTJ\203YO\189Z\151\170p\232U$\160\DC2A6\234c\233\DLE\183\150\200\SOH\177\164\DC2c\161\223\201AB\ENQr\241*\a\251]\tWQ\219\128vPs\145g@\153=\140\141\139\176\222\238:d\145\184\SYN`\ACK\156Z\SIavT@\ENQ\140\179D\200@ s\190\239r\237\160\213\225\FS\215\129\EM\252\ETX\135\ENQ]\225*\CAN\USg\240\242"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 65505, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 1318, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/816c6c332941a20f07c497ee16609971.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/816c6c332941a20f07c497ee16609971.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/816c6c332941a20f07c497ee16609971.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/816c6c332941a20f07c497ee16609971.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/816c6c332941a20f07c497ee16609971.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/816c6c332941a20f07c497ee16609971.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 10909, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0x6828d323)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0xbfefd5c5)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0x17603435)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0x17647aaf)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0x682bc3fb)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31016, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "4.4.8.8.in-addr.arpa.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "4.4.8.8.in-addr.arpa.", rrClass = Class 1, rrTTL = TTL 86133, rrData = RDataPTR (Name "google-public-dns-b.google.com.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 0, mhNSCount = 4, mhARCount = 1}, msgQD = [MsgQuestion (Name "debian.org.") (Type 50) (Class 1)], msgAN = [], msgNS = [MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataSOA (Name "denis.debian.org.") (Name "hostmaster.debian.org.") 2017050804 1800 600 1814400 600},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataRRSIG 6 8 2 3600 1497698216 1494238616 53598 (Name "debian.org.") "\141G\148\&8Ru\215~\167\220\201\a\234\142\f\136?p\174d\136E\130\135\136\&7vA\212\"x \188\189\SYN\166x\158y\236\DC18\163\241\217B\169;\200IUF\136\138\253A2Fq\150M:\174C/Y\GS\194XD\135\216\140P\DC1U\187\STX\163DM\182\205\243\185\170\165C\209,bp\195\246\236\162)\180\209\ETX\216\232\219\241<\219\158S\EOT+$C\250\174\b\237\155:\233\177@\208F\192\198\131\132zsO9@\f\247\138\SOHjM>ajU\179\t\n\222\203b\DLE\253\224\&4\240\DC4N\146ua\148\208\185\183:}\156\148\153\171\ve\180\SI\229\193\140\232\SOH\182<#?P\STX\164#3\167.\163\143\&9\205"},MsgRR {rrName = Name "r0ju544ltcbavo1k3br87djp168tl58j.debian.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNSEC3 1 0 16 "\SO\192\162\220\222" "\216>\235R\171\NAK\DC2a\203\134\DC37\173/E\197I$\182v" (fromList [Type 1,Type 2,Type 6,Type 15,Type 28,Type 35,Type 46,Type 48,Type 51,Type 65534])},MsgRR {rrName = Name "r0ju544ltcbavo1k3br87djp168tl58j.debian.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataRRSIG 50 8 3 600 1497614849 1494156550 53598 (Name "debian.org.") "\234\DLE\144\162h\RSm\188|\SOHB\233\215\RSJ0\DLEoD \218\178\131|\251\205\&3\246WZ\227\208\v\EMD\251\153t\193\155\145\133w7\EM+\133\244\129\159\SUBgT\DLE~{\229\218\187\&7<\173D\169\204\169f*\v\189\149_\161\210\207\173\169\166\DC3\168\DLE\n\141F\221\138f\131\214'\194\131\ACKc\r,e\180\bON\235'\132\r\198\160\242\v*\ETX\ETB\140x<\135\165\219\ETXk\NUL\221\&0\216/};\223\241\US\"*@\230\135`\216\222\207\234\217dfI;\238g5u+\DC1e\155\SO\195\239\229\196("},MsgRR {rrName = Name "_kerberos.fedoraproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNSEC (Name "_openpgpkey.fedoraproject.org.") (fromList [Type 16,Type 46,Type 47,Type 256])},MsgRR {rrName = Name "_kerberos.fedoraproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 47 5 3 86400 1496578736 1493986736 7725 (Name "fedoraproject.org.") "\144\141\152~q\a\220}#\DC2\228\150\140\166\209\152B\202\r\SO\184\ENQ\176\202\SOH\167\ETB==\\WN\ETB\193\242\159v\141\218\158\232\DC3O3x\130=\188P0\SI(\140\129R)\223fd\183qB\187_\GS\165\128wGR\228p\204\&7\DC3\173\186\130\221l\135\STX\STX\147\DLEV\243\159\&9\STX\138\209\DC4=\DLE\DC4\"/X\232Y\190\134\198\216k-\160q:p\206\189\176~ \154\242\226\&9m\232Z\214\129 \228_"},MsgRR {rrName = Name "_kerberos.fedoraproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataURI 10 1 "krb5srv:m:kkdcp:https://id.fedoraproject.org/KdcProxy/"},MsgRR {rrName = Name "_kerberos.fedoraproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 256 5 3 300 1496578736 1493986736 7725 (Name "fedoraproject.org.") "\ESCg\174[\230\138Y_\218\200\&2\SUB^\ETX\243\128\&6\180\131\136\245\181\158'\224%\204n~%\CANN=n2L\146\203h\198\153\SYN\175\221\243\EOT\171}\137\136\181\SO\191\226b\152\SI\222k{\132R\143\220U@\250\220~\235\154\159\186E\211&\187\174Jjjs\196H:%\248\EOT\235\194\182\139q\181\158\187\131\164bE\f:\210^f\NAK\147\ETXF\RS\203\&4\174\188\164\153\198\v\142kJ\188\136\173~\150\128_"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 16846, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = True, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 5) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 185, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/9499a890bd65e85e74e68ca5631941f7.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/9499a890bd65e85e74e68ca5631941f7.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/9499a890bd65e85e74e68ca5631941f7.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/9499a890bd65e85e74e68ca5631941f7.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/9499a890bd65e85e74e68ca5631941f7.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/9499a890bd65e85e74e68ca5631941f7.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 48779, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "_xmpp-server._tcp.gmail.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt3.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt1.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 5, srvWeight = 0, srvPort = 5269, srvTarget = Name "xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt2.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt4.xmpp-server.l.google.com."})}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 3}, mhQDCount = 1, mhANCount = 0, mhNSCount = 8, mhARCount = 1}, msgQD = [MsgQuestion (Name "xfoo.org.") (Type 255) (Class 1)], msgAN = [], msgNS = [MsgRR {rrName = Name "dlbdq9qmbnmmf3v28ndnjkv55oc2f0ad.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNSEC3 1 1 1 "\211\153\234\171" "mW\246\229v\\-\n\185\148\151&\141\DEL\a\191\200\217%\231" (fromList [Type 2,Type 43,Type 46])},MsgRR {rrName = Name "dlbdq9qmbnmmf3v28ndnjkv55oc2f0ad.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 50 7 2 86400 1496158069 1494340069 3947 (Name "org.") "v\DLEJ\225\185\241\204\204jl\208\SYN\221(%\168G:eLV\241E\195$\239\CAN\223\164\195\CAN\246E|\179M\168G\159\&2RAR'\136\150\171\NUL\238%\ACK\EM\196\143\r\250\222N\244x\244\&1\255=g|dB/\239Y\b\205\255n\156\179&\140\ETBfG\232\207\172\f\ETB\251GK\201\DC4\180\242\209\172\149\218\210\222\145\161^yi\144\246\&2A\141\239\140\214(\190\EOT\251\165\211ySH\EOT8\232\ETX\DC2\222"},MsgRR {rrName = Name "h9p7u7tr2u91d0v0ljs9l1gidnp90u3h.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNSEC3 1 1 1 "\211\153\234\171" "\138r\173\236\198OM\228`0\228\DC2\224\214\210\237\b\215X\GS" (fromList [Type 2,Type 6,Type 46,Type 48,Type 51])},MsgRR {rrName = Name "h9p7u7tr2u91d0v0ljs9l1gidnp90u3h.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 50 7 2 86400 1496166683 1494348683 3947 (Name "org.") "\162,\189o\CAN@\248\170$\DEL\DC4|\128\134\232\SYN\246\230rE\153{A\173x\241#8|\231N\166\163{b\145[uB\f\148,\148(\242E_\192\225L\193\186*\232\154\US\173\174\133^\SUB[\139\208+B\CANOc=\144R\NAK\228\ACKc\237\171\159\181\b\SOH$;\138\216o\ETBq*\225\251\225\133WJlX\SOH=K\ETX\198\182\149\255\133\NAK\186R\213&\206\211\244\150\US\DC3'\f\186]\ETB\242BE\DLE\DC3"},MsgRR {rrName = Name "vaittv1g2ies9s3920soaumh73klnhs5.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNSEC3 1 1 1 "\211\153\234\171" "\250\166\180%\ru\DEL\210\250d\146Z\NAK\237\162~\250\190\&2\193" (fromList [Type 2,Type 43,Type 46])},MsgRR {rrName = Name "vaittv1g2ies9s3920soaumh73klnhs5.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 50 7 2 86400 1496158069 1494340069 3947 (Name "org.") "gO\136U\GSsg\199\162 \243\158\STX\190~\131\ETX\228=.\211x\191\SUB\194_\241\FS\158 \167\202\170\221\141\148\220\165\159\170\253\209\182\195\\\218N\132\&6\254\EM\158\200\251\\t\146\244\&1\254\241\DC41E\248\143)\227Hm\CAN,\EM\255\207~e|\163\200Eh\227q\ESC]}\169\188\229\139\245\166\206\221\254\145%\150\136\190T@\DC4:\SOH\249\177\148\189)le%_V\186u'\214\254C\245\NAK\212]l\237"},MsgRR {rrName = Name "org.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSOA (Name "a0.org.afilias-nst.info.") (Name "noc.afilias-nst.info.") 2012479177 1800 900 604800 86400},MsgRR {rrName = Name "org.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataRRSIG 6 7 1 900 1496166683 1494348683 3947 (Name "org.") "\DC1U\179b4\204\v\240\182\f-\170\206\195\144O2p\DC3\174\160\251\177\r\185\RS\199\RS\239\221/@\b\136\235\201[B\n\236P\SUBb\198>W\163~\221^\233\ETBr\\c\212\238\185\136w\USUG\128\201\161\&8\128WU\SUBS~~!\208\160\206\191H{u\158\192mP\253GE\128\155\nY\207\SOH_\DEL\163\208\195\139\US \225\253\182\237\231\244J\135-\ETB\204\130\187\217\209\162\161t\212\220\b\\l\ACK\NAK"}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/98b101299888feb502f38066db59f0af.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/98b101299888feb502f38066db59f0af.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/98b101299888feb502f38066db59f0af.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/98b101299888feb502f38066db59f0af.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/98b101299888feb502f38066db59f0af.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/98b101299888feb502f38066db59f0af.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 18, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "openssl.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataSOA (Name "auth.openssl.org.") (Name "hostmaster.openssl.org.") 2017032573 86400 900 604800 3600},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNS (Name "secondary.lp.se.")},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNS (Name "primary.lp.se.")},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNS (Name "ns.openssl.org.")},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataA (IPv4 0xc26196ea)},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataAAAA (IPv6 0x200106080c000180 0x100ea)},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataMX 50 (Name "mta.openssl.org.")},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\162l\158\239\255\&3\RS\144\175\162\&3}\143|\138\214Nk\NUL\200\217\214#*\171\176SW\ETX_-\200\235B\152\232\DC3\191\182\ACK\240\160\198R\188\156D\147\163L2\199\170\245\142\187\195\169\178\135\159\203\242o\177\196\230\129\234\206\171]\151\192S\225ufi\ACKKy>~D\134\192]\150}\168x\181O\250\214\ENQ8H\143\222.\221X\169_)\202\239\180\226}\200\189\252\v*\r/\210RP\145\184mk\197{"},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\188i\181\221\168\175\172\SOH\f>\207\ACK\209\158\184f\DC4\200\139e\164m\149\SO\140\240\198s\161\SYN#\ESC\151Vkd\145\193~\239\254\226\199\252\&9*\130M\DC1\157\148\132\169'\187\153\185R\139\186~\SYNg\153\239\128|\249\140\v+\194\a\r\220\128\150\&1\225\168_%\144\131\ETB>\DC3d\254\249M;\ESC\178\168\ETB7\216\193\198\173\165wT\EM\182and.\222\195\133\193\137\209+\130 K\243\&77\150\211\234{\NULO\153s\ACK\230\212\143\186\141\CANl\247o+\171\136\217i/\SOH\255~\181I\255\128\239E\176\&0\224\205\SOH\141cK+q\253\t\162[\131\225o\248\144\140\173\151\204\144\&5}`\246\ESCn\180\t6U\US\251HY\210\249S6\190Q\207\195`\220H\237\165|\193+\NAKZz#\237\172\166\138\139\EOT!\STXC\210d\198\238\238\RS6\188\SYN\189\US|k\DLE\131\201\136\233e\166\\R\171\231\215\ENQK\b\179\EM[\153\139"},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1495160463 1493948580 22791 (Name "openssl.org.") "\SO\204\187\243\214>\239\247\172\187\164\216\194\&9W\140\163\148\211\223\a\212b\FS\DC1\t%QpK\185gy\247\158\n\140\227\182\233\185\131\216\167\160\158\240FO\240\218\195\242\179\&7\138_?}\202\EOT\237\206\218B\156\161$\DC2\241;\246ZR)\r#3cL\STXj\223\225\ENQ\206r\135E\191\195yu\243\228:{\131i\FS\SOH~6\236\230\177\ESCS\246p\254\f1T\170\140\236\EM\241\134\171??g\228\134\229\140"},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1495160463 1493948580 44671 (Name "openssl.org.") "NC1\231I\223\244N\ENQ\252\206(D\NAK\251\160\214\153\v9\NAK\DLE\224\136\200Q\173\173\239q/6\211\132\n\r\189\201%:\252\205P\ACK\168\DC2bV\167\129:s\239\211k\235}'\211\202\189\197\136\179\189\239\176\210\211fj\237\173t\243n\EM\143\215\CAN\NAKS!\243\US\197\132S\\\182\182l\165\130\CAN\175\&0\176\196P9MpMo\232\166\211\173l\rm%gy:=H\168[M\163\&7I\194\252\SOH\225!g\140\&7\DC1\215?\253k#^\184~\157\SO\189f\250`q\232\225"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 48 8 2 172800 1496626622 1494031022 2746 (Name "stanford.edu.") "\141\184\176\RS^(\172\138\t+\a}2\149L\STX\EM\140\n~\181\227l\245?\183\231\216\196\187\NUL\SOH*\219}\212h\140\161nn\176-]\168\239\135kPk* \198+\199\211l\226}\b\135\207{\ENQ\205\130\228\239\v\181\224\242\192\&5r;\151x\172\236\a\171d\128\194\CAN4\129\174\254\146^\143(>\160H\224\219\190\DC3\230K\219\220L4~\153\r\212I\FS\180\&6\225\235A\136D|\FSd\128\145*\135O\129>\185\&8\191\200`\236\NULh/K8\DC1\185\183\US&\DC1\233\209\193WP\SUB\241\155\252\a\236nie\SOI\149\ACKWb\175s\EOT\181%\251\194ihV\143v\141\198\154\165\&7+L\160\v\216\DC27\216\138/i\239\172\a_8\191\FSh\132:\215^\222b\242\&2\t\190\ETX\203\237\133+\245\RSm\255,\SOH\203\239&\204\204\142*D\166f\b\151\167\255\246\208\FS\219\v\218N\128\198n\146\205\189\130\163-27"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 48 8 2 172800 1496626622 1494031022 27840 (Name "stanford.edu.") ")\195\155\185.\192_\148h\249|4\139\174\USNY\152N\128Xh\161\225\220\191\153~\171\GS\232\193\135f\133\212\SOH\202\204?I|\233\FS\229\203\211o/\155\201\223\157#P\141\&1\199\230\136\&4\SOHC\246\&7\ESC\161\158\249#v#i\184)aQ\166\133\228\183\159C\DC1sI\FS=c\166U\248a\199\145\220\165+\255p\245\254\209\143ZxE\207 \218\198\RS\173a\195'\143\168bj\195\144\167nP\205+\230"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 6 8 2 172800 1496846541 1494250941 27840 (Name "stanford.edu.") "\DC1x\SUB\204}_\150 \168j\228\134\183\b0\158dX\216\199M\213\191\181!rK\194\149\234\169t\SUB\215:\158xr\169\164\ETB\206\243\174\171y)\245\159\225F:\139Kva\247\177\NUL\EM\171-.#Rmzt\CAN}\222\177\147\138*Y\244$\141\t\254\179\226-Q@O\v\238\216\132\169&\208\202d\237 F\v<\178<\203\&6w\157e\211H\144\233I|Y\"N\209`\v;\159\163\225\SUBq\230\GS"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 8 2 172800 1496248135 1493654460 29979 (Name "stanford.edu.") "B\164\247\168\186g\148\174\254f\201n0KD\182\DEL\191\199\133%g\246\&1\128\147:\168\140Xl\224\169k\135\&4\f\DC1P\248\143\215\185\203\&3?\128\r\137h\168\FSR \160\")\202vx\ETBb\131\&12\234\235D\141\244\220\212*\224\188v[\205\183nE\193\216s\140\186\RSH\221\&1VK\224\200\US&\137\r\220P\242\196\130\&0\ETBRD^\137\231\186/\193d-\SUB\168\243O\177\CANN\EOT4\167\"\138."},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 8 2 172800 1496626500 1494031022 27840 (Name "stanford.edu.") "\182\190W\128@\251\&7\176\172\SOH4\235|\SI\220\173\227\GS\160\EMk`\FS\155\186Z\190\220\STX\DLE\\\234\253\251hQ'\165\SI\SYN\239\236\f\129\203\RS\247H\222g\198\220\172\&6\155\186$E\160y\163\160\DC2e\242|V\225P\196\164n_.S\177\237i\ETXI\170D\238)\222*x3\157\SOH\NAK$\245\FSl\200\141\GS\DC2\EM\134I\144\201\192\223*Y\212\n\219\147u\ACK\224\ETXA\DC1\206\234\217\253\181\152 \205\187W"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 99 8 2 1800 1496759744 1494166881 27840 (Name "stanford.edu.") "4\STX^MH\DC2\235r\162\169\210\197\138\244V\227:\224\196q\204\218\173(\DC3\232\172:n\252\132\148\190\166\209\239\187\223\199\148\179}\ETBYU\th\NAK\210\r\STX\194\DLED\148 i\147tm\161`\149\234@\212B\146\241\SUB\r\146\229\220\149\173o\190#\247\txA\226f\165\180<\225\&95\245\197\214\204\215O:\190D\147\201\SOH\132G\185MC\163\&1\208Me\203\205Vp\163\175X\224\253 \245c\NUL\247\251"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 16 8 2 1800 1495233106 1492638863 29979 (Name "stanford.edu.") "r\140\152\186\197\176\159\&7\217g\215\&5\155\252\ESC\230^|\195K\201_\181N\191\194\214\a\234m\SYN\"3\172&\b\218t\132\134Q\134\131T\229\253\193\240\250T\DC1;T\SOH@I\248\255\148\185<\v\149\177ZPT\NAK\225\171\&6\ao~e|\EM^\144\151\170-i$\209\180\206\&8b\154\218\174A\133\235P\t\183\194\190p\199\163A\245\DC3Fb\ENQS\221\249\DLE\230\222\171\148\196\vG\251Z\187v\200\131\166\249"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 16 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "D.\199\184\154\154s\223\200\139\195CN\200>\238\246\156v\185}\210\201\250>\177H\SOH\DC43\166\242\f\155Mi\246\246\&7G\130\231GzO\242\251\246\208\149\227\154H\142A\SUBB`\NAKG:=\196\238Zd\248\220\232\224\164\230\SOH:\254\202u\206iI\143\238\209]]\t\197\214\240Uw\226\191\169\152|\169r\139\219\212\251H\US\DC1R\ts\223 \bK\159j\US\195\132\148x/\SUB\244\216\197;q\130\154"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 29 8 2 1800 1496560398 1493966179 29979 (Name "stanford.edu.") "\SO\230 L/\255~\184\245\220VUp\a\244\CAN$[\163\223\242\SI\ETB\180\&5\206*\v\170\223NF6\209\167\211b\DC2\133xbL8\137v\f\198\243\136\&9\192K\155\142\151\&4O\161d\229z;\f\DC3p\223\147\254\223S\133W4:\156Z\231TOF\129[xPJ\244\224\EOT\176\253i\255C\SUB?\158\168@HS\169\178\240\143H\192\242"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 29 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "\158H\SUB\\\r\SI\246\208\205\252c\167\240\&41\DC3\224\&5j\240\180\215\237\b\STXp\144#>\254\196\\9\134f\251\235\tS\162T\f\129\196\&66E\182\141\202:\202\153\EM\ETB2\NAK\161J\181\222\&8\252\&8\226\145\129\135p>\US\185U\214\SOH8\246\NUL\192\ETX\DC1\252\&0>\232\203\SUB\176\248\DC1[+\219zW|z\161\238J_\132\br}\178\&5\SO\207\192\251\247\152J-s\221\224!\241aQ\153y\"\177\141\210"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 1 8 2 1800 1496560398 1493966179 29979 (Name "stanford.edu.") "\DLEj,X\RS\CAN\SOH\162\157\222\203\202\192\&8\DC2\203\169\ETB\236`t\186,,\250QMq\NULcu\214\223\211RY\189h\EOT\163+u?\154F\188\216.\206\212\223Z\216\160C\163\":\191\151]\145\&6\162=\221\169=-\207\235'h\167\129\200@\tFl>UV\242\NULCC\187_nu\154\219\GS\f\165\128\163\193\DLE\217\215\226\ACK\158\FSY\144\220o\183$\192\184\203\248c}*/\212}\182\148N\246\232\&6"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 1 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "\142\DC4\180&=/\GS\178\241\186\b\199\177\246\154\&8Y\240m\DC2\160\226,\213\139\182\190\164\197\188{\ACK\163+\228V\DC4H\143@\f2\SOH7\158\198Mr\206s\150\211\144\216qs\217\DC1Y\n\209\203\STX\US\137\154D\235\191\"f\151;\ACKK}\156\188\240\173\fnz\224j\184\215\145\186\220TYm\CAN\249\165\202\247#nf\200N\SUBM*(\154\181UQN\234}\230\212\FSN7\230^\203H\133\255\143\140="},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 28 8 2 1800 1496560398 1493966179 29979 (Name "stanford.edu.") "w\255\145C\SI]\247\147\190\254\147I`\198=\198\242\f\GS\153\183^g\236\238\191\142uLJ\222\186\246M\139\234#@\184\176\253\145\176\DC3\192O\GS3t\229\215\252\tC\225@\244t^u\134['\137\222\206\251\NUL\NAK\\\f\176\151\188\206B@\210\214$>GY\139\152\DC2s\181CB\143@\ENQ\EOT\195v\232^\161\GS\166\236\176\242Z\183\183\134\153\168\255\SUB\235a\177D\136\197@\135u\137u\246\137F\196\222"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 28 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "\DC4\225\222\156\174r\231}*\162\190\SOo\DC2\163\212;i4\137OD\ENQf\135\179\FS{=;4\189|\n\164Rh+\217uo\135\198mRB9\223\DC3/\236B\222\229w#\170\252\ACK\217\a\182#\232\DELhb:\ESC\244!\SYN\130\200\174\173\213\186\218\223\202\171\FS\158\194\STX\205\236.@62\130\153\147\159\DC2\208B\SYN\188\148XB\217\SOH\195\EM2WF\235\&4\180+\231+ :\ACKO\198\133\130\128P\214~"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 15 8 2 1800 1496464420 1493870399 29979 (Name "stanford.edu.") "GH\218\173#\197\DEL\175`\213\ETXj\202R\132\170\&5\t\EOT\155_\165\239}\182\195S\143\GS\195\167\191\SUBN\237R&\213\ETX\221\207\US\242\230b{\234.|\214\STX\161,\150\138\a\159\202?]\217\n\175@bh\245\210\174\140\v\174\207;G\177B\189,\239\b\GSJ\159\t!\176.\209a\217!\146\n|\165Z\214/\209\235k%\US $\220r\215\196\151\216\CAN\ETB\212o i\216d\193\\\169,\NAK0%\247"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 15 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "\208\141)]A\138{\180Va\181\160[A\187\224\EOT):w\226\149Si\168\171.U\fW@\172\203\195/\136\156\222\161c\r\209\201P\\B\SOH\196\199\186\146\170x\188t\NAK\250q\246\210v\213S_\NAK.\141\RSIU\141.\202?\172\236\188\145\EOT\223P\251\DC3/+\SI\139|W\NAK%\DC2\162`)I\146\144\151\157\EOT\210\200\206}\225\NAK\216\160\147\STX\f\150\241\GS\224\DLE\162\173\"\STXA6:>\148\184U"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNSEC (Name "003004139284.stanford.edu.") (fromList [Type 1,Type 2,Type 6,Type 15,Type 16,Type 28,Type 29,Type 46,Type 47,Type 48,Type 99])},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 47 8 2 1800 1496624318 1494031186 27840 (Name "stanford.edu.") "i\218\201\NUL\144r\148\148.j\196H\245&'\138\152v\230@PO\226\188\192\210~\172}_K\168\150qO\198F&\128\220C\184\251\166\237\251\236\246.RvN\250\218\216\172T4\152b0\247\201\186\EOTg\185\&4ARl\153_s\242\212\136\f\135\191QBe\162Y\SIA\ENQ\173\233I\169\168(\SI\248\162\172'\130\197&\226\198\142<\ACK\vg\190Q\130\238n(T\235\&7\SOp^\194\187J\208\ENQ\181\151"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 46337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 40786, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 21, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSOA (Name "a.gtld-servers.net.") (Name "nstld.verisign-grs.com.") 1494175849 1800 900 604800 86400},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataRRSIG 6 8 1 900 1494780649 1494171649 27302 (Name "com.") "y\199J)\ESCiLV\160\DC1y\146Y\202z\249\&6+\239\208\159\143\178\194V.u\EOT\189\220\137\&9X\ENQ\156\DC1\189\218\237v\193=\169nJ+3\133\SUB\240\151T\200\251'\146\209\DC3\aD\DC4\248q\173\216I\234\180\&4*gJ\231\170FV\212\186)\156\219\208\165>;\249V\161U\144\237\185\192\252\138\167\253\NAK%\209\180\240\174n+\239\128'CqB\182y\175\171\171\DC2\192:U\216%c\227\160Kw\215"},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "f.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "c.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "g.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "i.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "l.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "m.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "e.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "h.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "a.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "b.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "j.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "k.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "d.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataRRSIG 2 8 1 172800 1494650866 1494041866 27302 (Name "com.") "P\166\202az\187\155\175\227w\195\202Hq2\163\228aU\231\144\203\159\196A\146!\172\USvn\193;n\170F\185\255\170\169=\235\CAN\181%\199\"\243\254\159\205\140\158\&5{p1\ETB\231vb\fv\209\203\179\DC2\132(v\140\147\222\"\181NXB\202\246s\242\n~\163\r\SUB\EOT\\\DC2\179"},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataDNSKEY 256 3 8 "\SOH\ETX\171\205/\235\a\130\203\ESC\EM\135\129\189e\EOT\229j\140\133~-\233\161\248\221?\181\200 ;m\173#\153\SUB\196\&0\192R\198]n\177Tt\214\224_\184V\141\215\154\145\209\FS\238e\244\ETX1\157\128\STX\135\a\184[\244 \254\172\&8\225'\142}\DC4\174\161$\230\217\226\149RG\143\EOT&\130@1'\a;\197/\229w\255\141Z\NAK=\160\205\207\v{\ESC\CAN\NUL\139w\175\214T\237\150J\135\229\231\132\152\130\233\171"},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataDNSKEY 257 3 8 "\SOH\ETX\195\206WM\152\203\217\NAK~\rp\210t\184I\202\SO\SO\237\154\255\197\220\204\144GIi\ACKe\\5\203\b\179\245.\RSj\245\&6\139>z\140]Ux\"\EOT\194#\153\204\238C6\153r\GS\163B8-\EOT\249\185b\194\227\223\249Y}'\166\242\f\SI\135\&9\225U\EOT\149\212\172<\SOH\193}\SO\205\DC1E\EOT_\145\ACK\131\158\253\228\196\&8\238\141\180\&4\133\GSc\234\DC1\234tl\220\233\&1\138s \204\168\129\239\135\222\203\141\139\216g\160\228\EOT\244\179\208\199\247_\243\228\176k\210.\DC2\132\240\157\139\176^\144M\191~w\170\USO\247\251\137Hu\204g\186\187{d\FS\166l\188\156\138S\140[\174\131\230p\214\US\231C!P\169h0\198\195W\179\232\157\243ZX\b?\246Kh;gA\184\174\195xou\157\143\137\205\239|\213\254\192\248#`m\ESC\247=\219/!\225\217)\195\243E\ETX/\128\228\162\EOTL\178cR\RS}\228I\183r\203\150?\SUB\189"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 15280, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 147, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 1949, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 15, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0x17647aaf)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0x17603435)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0xbfefd5c5)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0x6828d323)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0x682bc3fb)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 84943, rrData = RDataNS (Name "ns1.msft.net.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 84943, rrData = RDataNS (Name "ns2.msft.net.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 84943, rrData = RDataNS (Name "ns3.msft.net.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 84943, rrData = RDataNS (Name "ns4.msft.net.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataSOA (Name "ns1.msft.net.") (Name "msnhst.microsoft.com.") 2017050703 7200 600 2419200 3600},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataMX 10 (Name "microsoft-com.mail.protection.outlook.com.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataTXT ["google-site-verification=6P08Ow5E-8Q0m6vQ7FMAqAYIDprkVV8fUf_7hZ4Qvc8"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataTXT ["v=spf1 include:_spf-a.microsoft.com include:_spf-b.microsoft.com include:_spf-c.microsoft.com include:_spf-ssg-a.microsoft.com include:spf-a.hotmail.com ip4:147.243.128.24 ip4:147.243.128.26 ip4:147.243.1.153 ip4:147.243.1.47 ip4:147.243.1.48 -all"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataTXT ["FbUF6DbkE+Aw1/wi9xgDi8KVrIIZus5v8L6tbIQZkGrQ/rVQKJi8CjQbBtWtE64ey4NJJwj5J65PIggVYNabdQ=="]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataTXT ["docusign=d5a3737c-c23c-4bd0-9095-d2ff621f2840"]}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 60351, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "_mirrors.hackage.haskell.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "_mirrors.hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 3788, rrData = RDataHINFO "ANY obsoleted" "See draft-ietf-dnsop-refuse-any"}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 26541, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1357, rrData = RDataMX 10 (Name "microsoft-com.mail.protection.outlook.com.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 17, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "google.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xacd913ce)},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xacd913ce)},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xacd913ce)},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2a001450400e0808 0x200e)},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 50 (Name "alt4.aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 10 (Name "aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 59, rrData = RDataSOA (Name "ns4.google.com.") (Name "dns-admin.google.com.") 155321650 900 900 1800 60},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns4.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 40 (Name "alt3.aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 20 (Name "alt1.aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 30 (Name "alt2.aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns3.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns2.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataCAA 0 "issue" "pki.goog"},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataCAA 0 "issue" "symantec.com"},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["v=spf1 include:_spf.google.com ~all"]},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns1.google.com.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b64d9926db52e425c8747c143b9266c1.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b64d9926db52e425c8747c143b9266c1.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b64d9926db52e425c8747c143b9266c1.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b64d9926db52e425c8747c143b9266c1.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b64d9926db52e425c8747c143b9266c1.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b64d9926db52e425c8747c143b9266c1.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 60952, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = True, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 5) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 148, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b6d7e514db902da80c8a8e124673139b.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b6d7e514db902da80c8a8e124673139b.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b6d7e514db902da80c8a8e124673139b.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b6d7e514db902da80c8a8e124673139b.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b6d7e514db902da80c8a8e124673139b.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b6d7e514db902da80c8a8e124673139b.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 56763, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.thelongestdomainnameintheworldandthensomeandthensomemoreandmore.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.thelongestdomainnameintheworldandthensomeandthensomemoreandmore.com.", rrClass = Class 1, rrTTL = TTL 35622, rrData = RDataA (IPv4 0x5e7e2a32)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 31, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "debian.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataNSEC3PARAM 1 0 16 "\SO\192\162\220\222"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataRRSIG 51 8 2 0 1497614849 1494156550 53598 (Name "debian.org.") "\186\DLE\205\fEe\vUb)\EM\156\201g\236\165r\226\178\136\STX~\US\v*Y\150\196\253\217c\236\&0fMO\157\184Q\227\170\CAN\190q\190\252\241\220d\230\249\175.\EOTw\f-n\164G\223\159P\149T\152p\217\147\238\178\208\147N\220\167c\224\212\138dI\169\217m\167\187\196\144\206G\224D\198\205d?]V|\134\226\r[n/\198\150\228G\198\v[q\FS\221`\223\RSP\218\DC4\142\138\"n\167\176{*S\248\\\129\141Z{a\152k\163b~\216\133~*\175\235IP\242\173\156\142ZOX\229\174\157\187\\\224\212\216~r\DEL\158|\135\NAK\135\130\233C\217\155bU\128\DEL>\177\150\161-u\177\227\191"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataSOA (Name "denis.debian.org.") (Name "hostmaster.debian.org.") 2017050804 1800 600 1814400 600},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataRRSIG 2 8 2 28800 1496661232 1493203448 53598 (Name "debian.org.") "\203\GS`3I\r\228\EM{\ACKZ\SUB\233\185\&2\154\159!\194\183\222}5\143Sz\239\240\186\240\129\251R\147\215\251\167 @\DC1H\227\207\140\215y\236\202\208+\NULi\168q\183\NAK\r\DEL\159~x\GS\137\150&\NAKt\140\a2\GS4\SO:\174s\DC3\DLEHO_\v\147.x{8\156\214\SO\128\200\163\158\149/\175\190[B\149\175^\229\ESC\NAK\164\216\247D\243\t\SOP!?\255\DC4\ACKej\145\176\f\EOT|\ACK\166w\233\194l@Gp\188\208\238_\179\219\&0\CAN4\229\GS\148\196\135\200r\234\175\&3\rB\211\184R/i\150\SOHA\207\222iri\177K\242\NAKd\241\230-\STX\DC1Eh\222\199\250\157$7\SUBf\179\149\150"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 1 8 2 300 1497698216 1494238616 53598 (Name "debian.org.") "'\233%\DC3)/\188\165\236\&4\DC2f\231Q\231J\132\178\142\222\232\182\133\DEL6\220%\212\ACK\247\184\167\vb\175\CAN\232\162\170\190FOM\RS\f\235_\144W\ETB\215\158\244\223\vJ\210=\148d\142|\220\215\&9\220\204\233F4~=9J$\a}\177p\148\255\223AIP@\137\DLE\203;>i:\250\SUB\167\159\164Z]\149\150P\US/\157\198\181\ETX\165N\DC3\197\235\140\139I\US\238\184\193M\180\132\141{\213R\251\219&8\252"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataRRSIG 15 8 2 28800 1496661232 1493203448 53598 (Name "debian.org.") "\135\164\225\200\246u(\DC1\129\NAK\224\a\141\205\&0\206\201\221\156f[\ENQ9}\\\236\157+\SYN\128\170\197f\ETB\195Z\169\209\240\189\185\&8:\EM\209\219\128k0r\\Xc\NAKNSB\233\221\241\174\139\135\253\246\130PW\129\\\142\198\223\DLEpL\NUL\188z\252\169(\226kzu\225)\238\147n\227c\190V\236\249\213\182b|@\255\169H\167\128\145\184[\220\155K\238U\235\190\tJ\ETB\175[Ua\134G2\134\225\v6mx\179$\243\186HSK]\203\"\158\167r\SO\174I\f9\214o\170J\185\128\149y\vo\175\211Py\DC3\DC4\175\US\237\130\&1sE\229bt&o~\SUB\196\134\243\DLE\231\n\253\234A\180\175"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 28 8 2 300 1497698216 1494238616 53598 (Name "debian.org.") "h;I\216\162\211\209\224\SOHa+\"\a\222\245\241\SIL\134-\225\200\170\&0\149\148*\234\208~4UG\135M\DC1\rK\US\181M\158D\220+\205\215q\229M?\199x\250 5\STX_}\167\&3\207\214\DEL\ETBAY\216=\252D\SO\174y\142\147\185s\STX\135\SO7^V:\160\153H/\151 \220\146\155\132\&5\158\143\184\164h\SUB\130\176\CAN\a}2\196\162\a\SYN\223f\130\240\211~b4-\252\151\223\217\DC2\223{z{s\134\251\171\ESCG\216\164\&6\223\211g\152\218\241\&88\SUB?\213\133\218@\175\255\EM\192\238\147\&8af\242\DC4B\DC3\220\140#\153\174\182\166\ETX\177%\DC4\f\240BC\195qL\228\196=\DC1\137\251\DC17"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 35 8 2 3600 1496661232 1493203448 53598 (Name "debian.org.") "\128\242P\211\136<\187\164D\208\213\195-\130\230q\153\156\180\161\\?)^\128\\\247\200\224i4\NULu\230|\t\215\DC4\149\254\ETB\192-\186\173tw\213\DC3\184\228\223:\202\&3\208 \179\165\161*\254&\149\138S\192.E\218\EMp\156\\\165\b\"\179v\231\132\188\222\DEL*\EM\155U\US#@\ENQh\241K.\223\&6\131\162\226o\203!\152$;\"\128\DC1B\231\187HZ\209\&3/\208\135?y\n\178c\177OX\CAN\202\161)\161\196\245\199\DC16*\210\248\174Jg#\247\161\145\177\&1A\243\212\200r\204\142\&7\165\GSl\134+-\ETX\129L\162-\154\207\SO5\248\145\252\162\v\149\169Z\154\240\DC4\242}i\182\216\187CL"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 6 8 2 3600 1497698216 1494238616 53598 (Name "debian.org.") "\141G\148\&8Ru\215~\167\220\201\a\234\142\f\136?p\174d\136E\130\135\136\&7vA\212\"x \188\189\SYN\166x\158y\236\DC18\163\241\217B\169;\200IUF\136\138\253A2Fq\150M:\174C/Y\GS\194XD\135\216\140P\DC1U\187\STX\163DM\182\205\243\185\170\165C\209,bp\195\246\236\162)\180\209\ETX\216\232\219\241<\219\158S\EOT+$C\250\174\b\237\155:\233\177@\208F\192\198\131\132zsO9@\f\247\138\SOHjM>ajU\179\t\n\222\203b\DLE\253\224\&4\240\DC4N\146ua\148\208\185\183:}\156\148\153\171\ve\180\SI\229\193\140\232\SOH\182<#?P\STX\164#3\167.\163\143\&9\205"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataRRSIG 65534 8 2 0 1497615664 1494156551 53598 (Name "debian.org.") "\194\&5v\DEL\182\184\138\201\232=\183\ETB~4\172\DEL\168B`e\SUB\128\222\190L\141\154\151\236\\\176/\NUL\209a*:c\145`\164\t\179l\206\225\162\RS\208f\159S\RSLz\182\n\a\186\207\208$#>+\150\&5\151\129\220L&\173\188\141BA\SOH\130\136\&6\DC1\141\220 \134~\149\144=\229\v*\246m\SI\DC2\207`\180\237\SO\166\167\226y\186\139\182."},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RData (Type 65534) "\b\EMW\NUL\SOH"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RData (Type 65534) "\b\209^\NUL\SOH"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\247\254\167,h*\ACK\241\143,\SYN)w\190\177I(\202\ACKiV\246nn\SUBM\bu_\239n\194\193\236\214is\140\SUB\SO\182/?\162\140\b\182\187\179\rD\223\139&\214\165y\236\r0\SOH\137\249\252z\EM9\247\165(\EM\143\">'\230\GSX?\188t\228r\185\168\&5\173\SI\180\226/\203\140\154D\180\239\207\&4\234\ETX\236\252\245\NAK\tK\199\138\243\NAK\177fwG^\253S\211\192B\186\151W\252\SYN\199\206\&4\FS\243M"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\151\229\ETB\218\252+\173\&2\GSK\140\207\236\249\f\n\187M\129\160\216\169\186\152p\n\CAN\161\204}\153\207\241\ETBr\128WZ\237\CAN\245\151\184\&7\240\226\US^\243\&0\133\140\141\231 \139z\209\150\227x\182\191V\187e\SYN\169#\162W\222\157K\DC4\234mJ\172\130#4\146\f\175\191\255 \191\251\182\244A\\\134V\197\218\186\194\NAK\150L\173B#\222\242-\209\SIP\250b\DC1\231/\151\a\252\SYNC\160\DLE\US<\251liLSc\SOH\185,\247\218\143\239\181:\243\164\191I\165\157\184\177\134\187O\150\179GE\ESCaU02qR!\199R\172\STXita\157w4h\219\218\241\206\210v[\165\193}:\STX\DLE5\157\166\144\EOT\194\247\231\164\191-\222\186\\\144\222=\224o\230\194J\189\211\160\241\240\242\163\167\238\f\SOH\238L\159\DC4\132\208\171F$\161\169\GS\173\224w\167/\191\142\146wY\235\245\231\&1<\252\234\148-\NUL\137\223y"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNAPTR 10 0 "s" "SIPS+D2T" "" (Name "_sips._tcp.debian.org.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200104f80001000c 0x15)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200106101908b000 0x1480014)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200141c810000021 0x210004)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2605bc8030100b00 0xdeb01660202)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataMX 0 (Name "muffat.debian.org.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataMX 0 (Name "mailly.debian.org.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x599e704)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x801f003e)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x8259940e)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x8cd3a6ca)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x9514040f)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataNS (Name "dnsnode.debian.org.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataNS (Name "sec1.rcode0.net.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataNS (Name "sec2.rcode0.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b90b636aee4f847240da4e2606eb64af.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b90b636aee4f847240da4e2606eb64af.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b90b636aee4f847240da4e2606eb64af.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b90b636aee4f847240da4e2606eb64af.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/b90b636aee4f847240da4e2606eb64af.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/b90b636aee4f847240da4e2606eb64af.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 8824, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.thelongestdomainnameintheworldandthensomeandthensomemoreandmore.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.thelongestdomainnameintheworldandthensomeandthensomemoreandmore.com.", rrClass = Class 1, rrTTL = TTL 35774, rrData = RDataA (IPv4 0x5e7e2a32)}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 49467, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3376, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 17605, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 785, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 885, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494154963 1000 1000 1000 1800}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 38134, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataPTR (Name "ghc.haskell.org.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 18218, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 4, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3478, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 19714, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 890, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")},MsgRR {rrName = Name "e1863.dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 10, rrData = RDataA (IPv4 0x5c7ab450)}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 1121, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.google.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.google.com.", rrClass = Class 1, rrTTL = TTL 129, rrData = RDataA (IPv4 0xd83ac9e4)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 30614, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.google.com.") (Type 28) (Class 1)], msgAN = [MsgRR {rrName = Name "www.google.com.", rrClass = Class 1, rrTTL = TTL 186, rrData = RDataAAAA (IPv6 0x2a00145040070816 0x2004)}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c4506200f514568ec9a2d42663a1e77d.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c4506200f514568ec9a2d42663a1e77d.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c4506200f514568ec9a2d42663a1e77d.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c4506200f514568ec9a2d42663a1e77d.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/c4506200f514568ec9a2d42663a1e77d.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/c4506200f514568ec9a2d42663a1e77d.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 25, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "ietf.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataSOA (Name "ns0.amsl.com.") (Name "glen.amsl.com.") 1200000348 1800 1800 604800 1800},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 6 5 2 1800 1524762969 1493223555 40452 (Name "ietf.org.") "P\143\164/\211\&9Cmr\144#p\138\f\131\ESC\220\RS\DC2\153v\209\SOHv\SOL\153\223\227\ETB\186\NAK\180\&9\189A\138}\160]\220!\212\244\t\131\DC2\176ag\RS_\136\250\233\159\&9\250\239\EM;\NAK_\156\171\212duq\194\211\211\207m0t\194;\246\222^m\249\189\252\131\210\189\175\ACK\144\199%\179J\206\190\139\ACK\220\230Y&\211&b\202'\227t\152\SYN\184\173\254\247\235X\243\197\&7\174\205\DELD<\231Gt\215o\228\ENQ\164\ESC\192~\166y1\232>n\202\217\252>\200\229\233\133\&7&\180\138\134\ETX\160\195\226\\\198\232\253\250@\154G\NUL\DC4\175\197\145\250\USa\143\DEL\149,\160._\DC2\162\216\242E\174:\"\236;\255\154\147\219iF\"\245e\236\&6\134$a\241c\217\DC1\193c\182O\160\238XL\203\230K\228\DC1\135\135\&7`Y\248\EOT2\156.\146\SUB\158\&4+\238\STX\142\182\164\FS\ACK&\143\186*F:@\222U\149"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.mia1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.ams1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns0.amsl.com.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.sea1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.yyz1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.hkg1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 2 5 2 1800 1524763120 1493223555 40452 (Name "ietf.org.") "\196-\224\241\235A5\213\186+\136\SUB\201\171\186f6Y\163\US\GS\211\152@R\154l\166\FS|\208\246F\251\185\180\SI\ETBR\130\131~\200\214\RSl\144\157\a\137\216\236\&0\ETX`\188\DC2\137t\ENQ\188A\255\144Uj\179\151\205\SO\197Kw6\169\157\133i\EMWA\GSo\135\224HP\US\174\243 \219\208\a\248\187\220\218\240\ACK!\132R\243\150\&2P\EM\180\161\ESC\179%\ETXiBx\149\160\156\155J\SOH\239f\201\213)\173\240qD:&)\146Kf\202\195\b\175\161O\220\DC2\213\\*\209\200\EM\228\173\208\163q\154\EM^s\138\191S9\191\232\170\181\232 \131\227\131\151e?k\DC1\173\205\f\197/\183y<\ETB\205\170\244\143\133\232l[\149\&1\SYN\202s\220\&3\135\173+\STX\136z)=d\226>\188\139\151T_?\USe+\EOT\196\SYNC\196\128\144Du~9A\136*\154\219V\DC2\195@\DEL\209>\135\182\150\131\149~\132a\141\138"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataA (IPv4 0x41fc62c)},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 1 5 2 1800 1524763086 1493223555 40452 (Name "ietf.org.") "]t\133\144\&7}\NULV^\200\165\FS\187[u:<_\238\172\&6\217(}\226\b]\151}ve\180\186)5\255\FS\ESCj?>_\202\226\210\185\165\202\128\v\173\204kn\SUB\242qm\\\203Ce'\242\222\228r\223\243;\ESCa\DC3\NUL\165\&6wAf\196\"\253\200\231\198\140\200a\227\&6K=?\v\FS\162\245lB\161\160\\\ETX\f\r\176M\218\134\US\176Fi\208\174\209\202P\160\DLEC*\200\&8\191ou\190\DC3\226\v\156r\144\DLE\171\&2r=\187\157\v\184\169\235\163\ENQm\242U\f v\aOmR\142\227K\v_\n\239\249\177\161\217\134\&0\250\208\221\130\DC3\136\189\139k\228\210.em,M&\237vU\227\214\193\US\148|\198\DC4|\128,\177}\133\144^+\148\143\&6\197B\ACK[\204\DC3`\147\134U\219\170#\132\199\183\196\154\179ZGQ\247)l\187\&2\SYNf\237\202p\241\253\230\145\251j\NUL\224\146=\ENQf\SI\180"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataMX 0 (Name "mail.ietf.org.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 15 5 2 1800 1524763100 1493223555 40452 (Name "ietf.org.") "\ENQ>\186\\\164\202\156\DC1\201\251i\187\244\236\&3\189\195?,4t(\165r\\p\221\195f_\186$8M\b\147L.n`v\248\167\&5\220\240\156\179\&9m\242<2/\203\229\168}2$k\231\NUL\247R\\\155K\186\134U\ETBg\165g\196\190|#\US\176\ACK\192\242\ETB,\FS\151\231\217\NAK\143z\194j\138\&8s\164\US\252\247i0\RS\201d\DC2\170}\204;\NUL\187\ETX\249\DC4\219\165zg\DC3\241\201\223mp\215Yb\176\242;w\137ZV\193\192\243\211\SO\157\235y<\DC2;<16n\190\202\147\152\133\206\144\203]\148\204\224\172X^\211\200\192\225\223\128M\169\192<\253W;E\241\EM\253W\173\225$\226\159\t\195\200\222^\242a\196!\176x\149l\251\141\227ya8:\197\STX0\238\230\DC2\DLE\187\175\DEL\209\188E\133\131\179(\147\214\130ta\ETBK\188uQ\178\199/\228\a.\NAKi\149\172\NAK-^au\ACK.\215j"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataTXT ["v=spf1 ip4:12.22.58.0/24 ip4:64.170.98.0/24 ip4:4.31.198.32/27 ip4:209.208.19.192/27 ip4:72.167.123.204 ip6:2001:1890:123a::/56 ip6:2001:1890:126c::/56 ip6:2001:1900:3001:0011::0/64 ip6:2607:f170:8000:1500::0/64 -all"]},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 16 5 2 1800 1524763104 1493223555 40452 (Name "ietf.org.") "\136\157Fpx\200\192\178J\227\"cT\254\180\217r{\216\228AQ\EMsM\253 \180,\248&\149\220\196:l\153\202+\SYN\224\CAN\156=\SO\174\241\168\254\215\181\180\194N+\171\197e\156\NULE\141EMPg\247[\DC3\202\SUB\248\180\EM\144K\SI\210\240\129hhi+G\204\SO\152A\221_F:\FS\253}\DEL|\141pk&\GS\160\130\254i9M\STX\162\231&\142\222fW\210A\149\221y\185{\GS\156\237!\212NI\DC2\144\226\197\146\vJ)\212\134l\SO\243\RSu\178'\181y\NAK#\224<\233\230\186^\253\222\208\215\SYN6\151]oI\189\172@\DC1\129\195ga\221\182\ENQ\189\164\244\191\173\163\247gCl\131\177\196\164\226\184D\229@\132s\131\161\162w\165\"2\164\168;\135\199\225\251\&7&\134\194\177g\153\rJ6\164\"\220\136\160g\207\129\184\200\167\ESC:\238\EM\154y\177\&2\242\246\238\149\228\DC29R/\211`vs"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataAAAA (IPv6 0x2001190030010011 0x2c)},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 28 5 2 1800 1524763044 1493223555 40452 (Name "ietf.org.") "F\221\216\166\163C\171\ESC\210N\225! \155%4%@\ENQ\251\170\179\SYN\187UJiQ\214\&6\169\b2\198qz\145gY\240^k\232\247$\237\&9\156D\169\CAN<\138\128O\174\171j\207\NAK\129\254NH`nlY\SUB\NUL\238\206\176\156\aN(\186\&9\199\198C\140g\EOT\179\210\232Tq\203)\226\160\218\163\&9\203\NUL\133\223\NUL4\239\181\133\DELgh\143o\146\157H\DLE\NUL\168yJwc2\201\"\254\230q\201\bei\185\197\230\156\141\GS;\159\204\176\"\242\ETB\210T\217\254\236#-'l\187t/#'\236\183 \174/\181\200\160\198\SYN\fw\153\244\207?F\v\NUL|,Q\133\176\185\212]\207\ETB\DC29\148j\189\229\229\177\DC3\219|\244\146\215\STX\208\DC4C+\139\192\"\130\150\192\149\175\188\196|o,\148%\154\v\135\224\172ko\228\v\233`~#\151p\128\147\134\170\235\133\208Ekl\245r7*t\254\193=?\180"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNSEC (Name "ietf1._domainkey.ietf.org.") (fromList [Type 1,Type 2,Type 6,Type 15,Type 16,Type 28,Type 46,Type 47,Type 48,Type 99])},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 47 5 2 1800 1524763055 1493223555 40452 (Name "ietf.org.") "\ENQ\218r\154>Lin;\254\238F\NAK\ACK6\r\184-\192\237[\STX\250\184\&8\232\202\154b\t\204\129;\221\NUL\DEL\v*!8\178\169\201\ENQ_\199Yn\162\137\177$\a\159M[,\202U\DEL\155P c\254N\197\&7\233Y>)f\224}D\142\146\166\191\136\226)x\168\237I\133\194\233L\144c\DELU\185\255\158\DC2d\187\155\211~c\186\&7\169#\233\&0r\DC1\SYNg\181\196\179\141}V\ENQ\142\SUBW\165C\135\ETX\EMV\US\234%\243f3\213D\141\&1\DC2\195%\SYN\195\253\ENQ\239Q\a\245Z\RS\159'\223\159f)\185x\168\GS\a\195q@8?\190\150^\215$\140$\221\211\EM\144\246MK\vp\172+\158hE\135\254\b\164e\175\&1q\SYN\205\146|\178u\255\198\155\&8[\237\247\211\210\253\157A\222\FS\FSkHv\156\220\172!p7pv\174s\153\248\f\169BVY?\246\129b\250\255\DC4\174\238\EM\243h\237:`\146"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataDNSKEY 256 3 5 "\ETX\SOH\NUL\SOH\208\196\t\168\199i8\223J\131Sc\159\SYNp\SYN\161\212\FSO)R\a\a>\b|\184\223\168\203\226\186n%\128\171Jd\183\189\236\&8\t\231\243P\172\209\ACK\EM\t\171\233\242\175R>\187q\250\217R\136\217\217\193\145\139\&5a\130C\170g\219\FSi5W6\229\203\&3\148\240\237U\142T\135\RSyp.\184}\166<\136\132W\163!%\133,[\167o\152\219\t\225|\STX\128\216+\160\t\151\134\131\EOT\137a#\EOT\US\176\141\134P\228kY\FS\DC1JUp\STXId\226\n\215]\GS\203\158\&9\228\149>.V\US\222\NAK\150\230\225\GS\248\178\180\211\217.\179\155\133@\135O\169\&4\180\209:\166\DC2\232\RSu\214S\238\175\166\198)\207\161\161\&3\SI\209\244\DC1q[\147\221\170\166\195\159' B\201PZ\fq\191\193u\DC2\224o$\222\186\177e\159\ESC"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 48 5 2 1800 1524762971 1493223555 40452 (Name "ietf.org.") "R\220uL\178\139\238\216\181br\231\GS\128\250\205\243\&9d\165\185;Q\151\&6+n\a\148\169\152Fy(\244R\FS\131\ACK\137\229\167\199>R\RS\192\238\NAK\215\241m\"\200\174\132u\FS0C\DC4]\249\244\198.\138\221[K\164\SOW\SI\SYNcV\146\196\229#4\173\216\201^\128~\tHcSY\131\SI\229\238\191\GS\161\180zV\167]\216\216\163\CANb\211\ETX\\\SYN\234%\152\&4\231q\200H\129\245\253\224S\248\no\GS\195D\149&\ENQ\152HRi`\181\228\189\214\203\176\CAN\209&,o\239\222p\147\203\213o\163\179d\161\229\132\170\r\ENQ\251S\168M\232G\151\189\206\&7M';\SI\201\136\DLE\168\212m\200c\210-H\n0\ESC\157W\DELg\NUL\240\"XE\228\243W\236\170P\202\167U\ETXm\166\&9\162r\151\134\223\179\DC3=T\STX\158\180\226`\131S\DC3\235\199;\128B\149H\170\134\ENQ\253\156\166\141\236Z\161\228\186\175\DC3"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 48 5 2 1800 1524763006 1493223555 45586 (Name "ietf.org.") "X\165A\218{\a\DC1\163\238\178 =\153\220\241\150\231\ETX\164\230\FS\199\233\r\218\t5\ETX$\182B/^\215\235L\175GP\133\153\230#\204(\179-\243/\188\ETX\192=\148J\n)\160\a\247J\251\133\209\223-f\175\DEL\211\227\170\197\SOH\ACKo8\247_\DEL\189Gjf\251X\166\221\207\&7b\142k\208-g|\SO\169\128\f\221\139\130\181\128\185o\220g\151\219B%\204\v\237\129\&7\249\&4\149\217\205\209\160'=\211 \187\152i\213\213\192H\178\153\&2\161\181\128\189y\132e\136\DEL\235\244\180\207U\245C\128\207\230lf\187\196\128\&8\250\SUB\177\224^k\ETX\FS\195\&79M?\189\NUL\161\178\&5\183\228\DC3Bxy\244\221\153L\145\154~0\173o\130\160\155\213\254\&3\ENQf^\255\157\188\&1%\146\US|\232\SOH\n~\ESC\239d'=\245\&7\206\a\ETX\SO\245Q\ESC\227\242Vj2\182\175!\206\223%\231\ENQ\172`\222\b\209\EM\234\172\&2"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataSPF ["v=spf1 ip4:12.22.58.0/24 ip4:64.170.98.0/24 ip4:4.31.198.32/27 ip4:209.208.19.192/27 ip4:72.167.123.204 ip6:2001:1890:123a::/56 ip6:2001:1890:126c::/56 ip6:2001:1900:3001:0011::0/64 ip6:2607:f170:8000:1500::0/64 -all"]},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 99 5 2 1800 1524762978 1493223555 40452 (Name "ietf.org.") "\b\DC2k\193\248\172\212\218\130l\131\247\202O\248\132\254\237\158\178\180\167\229X\184\v<<7\152\233\178v\172\220\161\158\171\232\254\165\NUL\147\FS\STX\251\&1\189Mc\230\194\197\192y\140\221tc\SYN\USC_>T:\STXF\135Z\225\&8\130\FS\ETBhZ\178Hc\GSS\171g*t}\255\137\146\217>@\230\232\141\176\SI\185\202\202Ad\EOTA\133\240Vk\204^7\212\158\&9\135\CAN\226\188!\145\129\132\213%\234\207\219\tTsN\135\178\SYN56\DC3q\216\192>,\197\194K\198\229e\132\231\b\232("},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataNSEC (Name "3D-Printing.nasa.gov.") (fromList [Type 1,Type 2,Type 6,Type 15,Type 16,Type 46,Type 47,Type 48])},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 47 8 2 300 1496749007 1494154805 34113 (Name "nasa.gov.") "\bG\199@\n~\164\158\155\214\EMa\130\134\160\184C\250\147\156z\197q\165\249\DC3\155\250\b\NAK\EOT>%%3\132%[\n\179\137vh\135\205]\SI[\187\181@\190\a\168\178qW\252\245\229F\177Wt\188\170g\133\211A\236\154\SOH\159V\DC4o\NULA\EM\200^\182+9\170tR\220\&5\236\152em\222\161Y\159\&7c\211y\162R\228y\128,dz\156#jR\168&\214^\166\147\227\223\170\135\131b\156\226"},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\188\&6\253\240\DEL\139M\ESC\154>W\159._q=\206\151\215\162\"\182M\218\218\rj\171\188A\172\197\170\213k\171\238\SO\206a\171\145\144\185ZH\235\212\145\&7SN\133\156]6R\DEL\141\216\236g\128\222\159G\ESC\205z\"A\176]\216\&35hB\221\187\248Qd\146\ETBt\221\151\168\EMg\159\156x\155\218\148\&8\143DC8\223\136:3\202=\226\198=\157)b\169\172R\128\175\DC4V\216\232\192\232\EOT\231\203"},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\193\145\233\ETB\182\"R\233g\EM2\179\191\171\161h\159\143*\247\169q/\142&\SYNM\202V\224\SO\221\133\178U\223\DC3\175\158t9\220\128\ENQP=\FS\209,\133\130\159s\227\136e\234\195\136\ACK\140H\ETB\231\176\DEL\147\252c\SOF\FS\151\174\172v\140\&8U\139\137y\228\b\EOT4\DC2\136\174\255F\205\SO9\187a\190Q\ETB\234\182\DEL\ETBm\NAK\209y[:k\150\161\DC4>\134\131\ETB\245\167\199\134\226\244\222\214G\222D\129&\DC4\195\NAK\RSr\188\174\195D\217\129A\236 \DC4\NUL\200qzAwt\210\180\ETB\160|/,\167\"\193\137\188\154\185\165\DC2^]l_Qq\ETB\166\242 P\154\208Zm\SYN\\\229@\229\154F\229T@\254\&8\220\134t\237\156\220|\SOc\EOT\239\SI\240\FS\\']\185\157L\190\206\133\130\208\162\143\220\156\180\152y\US~\STX\207\EMA\173\235\ENQ+8\DC1#\236\ETB\162\161\"\f\SOH\168\177\180\201\138\t\STXv\133"},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataRRSIG 48 8 2 600 1496750400 1494154800 34113 (Name "nasa.gov.") "R\166/W\NAK\SYN\253e\153\EM\142\201m\170\246\205g%\230\\I-\134\251T\170\228\154\221P1\DC1\NUL\223\234\245\171\243\190:\225\133\218\&4\247\195\ACKU\180\NAKJ\147\140x\155O\250VK\155\205\201\&3\137\&1=\180\153\170A\229f\SO\ENQ\164\SOU\221\\\178RM\DEL8\217\146\245D\161r\182\140[6\247\164,qX\202\247X\173\193x\222\210!9i6n \192 \196@u\177fi&4\209b=\225\196\248\160\223\nT\250\131\254\183\254\221\202\RS\140|\134\171\252\EM,\225\234\199\247\DEL\149/\189\152\214\215\218\186\240\&2[P\152\208\180\US"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNSEC (Name "www.example.com.") (fromList [Type 1,Type 2,Type 6,Type 16,Type 28,Type 46,Type 47,Type 48])},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 8 2 86400 1495006082 1493148678 21214 (Name "example.com.") "\a\223\141iM\f\bo\160\139\192`\255Y.\166VO\178\DLED\161|8\STXB\252\SYN*\164\129\ESC5\DC3\v\\ZV?U\216\135\DC3\f\248M#\SYN\142\164\143\217X\233\ESC\159\NUL_\DC4n(\157w\FS~\145R\178\166\229J(\159\209\184|\251\220\238\237SO\139\209\147\220\&0Xv\165\201\GSH \251\&2\135\154J\194\ETX\199' \RS\132bb\243\148E\168\158\172\US\228\ACK\241\174\169\201\239\225\243\&6\DC1\255](\153\161"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "a.iana-servers.net.")},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "b.iana-servers.net.")},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 28 8 2 86400 1494925308 1493148678 21214 (Name "example.com.") "\DC4\224\&0\233w\r&>\182\142\167\220\207t\ENQ\US\243\149\128\230u'P\233j\164\248U1\NAK\182\146\235&\ACK\DC4-l\163,\191n\153\248\r\ESCH\229\134\209eh\163\132\169\247\ETB\234\DC4\176\239\187\208w\US(\220\242\&5DpoD\207\167\r\GS\169\ENQDd\152\ETB\179\226xG\217\ETXm\177\196\182\215H\255\221\v\DLE\179\226\GS\SOH\171\167\174\155XX#q]\164*\DC3m\195\175\227\205 \187\140\213\154\250Ea\253z\STX"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataAAAA (IPv6 0x2606280002200001 0x248189325c81946)},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 1 8 2 86400 1494974036 1493148678 21214 (Name "example.com.") "\DEL\147H\248t\140@\163\130\DC2\DC1\200\217\225\190\140\132\t\197\197C\164y\223\ENQ\228\195\201\153L\SYN$\179\177EE<\ACK`u\188F9\199[\165\171\177w\192\188l\159\229i\169\254\&4Q.\ACK\214\188\196\166\191AV\255\SO\232\&7,\225V5\255\NUL\186V\246\198\159\&4\164p\243\168+?rz\a\234l,B\208\224C\EM\142\221\181qw\154\164/&\144]\a>\188\223\&9\142h-3\STX\170\231\183Kr\164a\205\228"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataA (IPv4 0x5db8d822)},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 59, rrData = RDataRRSIG 16 8 2 60 1494968390 1493148678 21214 (Name "example.com.") "\149\vd\208\136+H\224\213\r\246\&2\CAN#\251\233q\SYN\157\220j@2/\157\242\215\173y\ETB\154\135\199\209R\253\\1\DC3Sof\139\143\229\251p\139O/\209\199\US\ACKL\138A+\134\a\187\145\222!n[\252\227L,\NULkj_\219K\163x\252_1\175\161\212.V\CAN\193\215\197\218\248\162\178\&1US\144NY\216#c\230\252\247'\135\160\237j\EOTf\152J\b\SOS+\213\242\SUB.}`@\130\133nm\225"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 59, rrData = RDataTXT ["v=spf1 -all"]},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 59, rrData = RDataTXT ["$Id: example.com 4415 2015-08-24 20:12:23Z davids $"]},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1495402611 1493547398 31406 (Name "example.com.") "&\236\\[/n#V\ENQ^\212\n\240\156\239\162\DELQ\211\241w\135\&9o\253\132\218\184s\218\210vn\180C\SI\255\SO\141<$\"\214r\141\n\173\223\248CCP'\132d\NUL,\DC1Q\134J\209\239\f\197e8\145->d\150\232=\153\191\&1\198\139\219\NAK\SUB\231\211\149t;\147[G\211C\190E\177,\GS\ETX\SOa\140\193\166\ETX1\248\EM\200#\\\130\231qF\177\230\140\243\164\SOD\162\201\165b~|\am\242\206j\197\STX\239\250\168\251~\USk\SOT\195Tbl3\136o\162\164\172\158!\128e\215y)\184\162zW\199V\246x\160\233\207S*s\ESC\240\203\&6\174|[\GSmz\NUL\249H\RS>\233\132\f[\ENQ\147\148Q\DLE\DC3\142\208c.\147t\133\229\140\rwTG\179\&4\ETB|&\163KP\a\182\178\174:b':\140o\DC2\f\143\167.S\143\FS@g\187?\209\132`Qf\FS\199\196\148\194\tX\233\166"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1495402611 1493547398 45620 (Name "example.com.") "\ETXNp\\\169\139\179\160\GS\DEL\225\247F\ACK\207\199M\197\218s\CAN\140B\194\199\218\SI*-\194\172\245\195d7\DC3\USa\236Q\225l 2E|\242\n~J\rq\234\175X%+\196y\189\&4\218\143vl\151\152\212e\223\134\135\145\166\226\201\213H\221V\254\216\177\242s\SYN\148?\192[\152\168\240\SI\130|\154\ACK~b\224wl\DC1g\236\b\149z\150a\SYN\\(\207\a\250\&8\154g\ETB\200\US-g`\EOTm\230$Jn\235\152K\222^\STXSf\136\130\CAN\248\170\179Y\243\152\&1y\143\DEL\\*]\r\SYN;\f\DC3\224~\ACK\150\214\130E\158\182\186\v\191j\131=\244\161/[\DC4\234ya\242\248\STXs\220\225K\203nM\v\241\252v\213\221\135\133\153,H8Z\138\DC4A\188\178`\136L\245\208\245\145\210c\t\154\159\244\146\"E\134\162\174\192\250\135\143$\143KT.\249}\SO\212R\215\140\214\153\DLE1r\247B\GS\161"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\179\133\ETX\EM~.KtP\200%f,\202\DLE-@\197K\188\206X\250\228\166\SUB\181\RSp\ENQc+\135_\DC3c2\191\138\SO\152\214\222XM`\142\235\198\242\158\138\233\&6\239_\168\209@-~\219V_\DEL\131&\192\210\253\EOT\132_\157\129y\168Q\244W\238K\f\SUB\NULo\181\246\182\253\143Z\222IW4\186\164N\204\200C\131\196\&1P\163\182\188\165\215\208^\247\243\228\NAK\224\189!8\224\&1\DC4,B\EM\129\223\215\178\&1\137\218\151\231\247mLJ\147\135\234\237\203\132SG[F\155\158\224\DEL\205\234\&3\238qu\142\194#\NUL\145\&2a\130\SUB\160\203\234=\NAK\242)\250\212\DELzb\154\163\222?\194\149W\r\195\223\228\GS|\143\188s\217+\211O\CAN\174\168,\194\&2\219\&1\158)\EM\GS\202!\214> \249\141A\243\&2\f\"\250\196\&3\234Y\SUB\CAN\DELb\231\248G\NUL\129\129\166\STX\139\216i\136\197\149\189.\SYN\a}\217A\CANV\"\150\188ze\169X\248\147W\ESC*\162\DLEB\212'\230\228Q\128\218\162\252\233.L[ob\220\218{\ENQq\200\174\219\248z\190\216\227\156`K\ACK|\\!\DC4vbu\213W7\n|\174\USZK\139F\a\178\206\&5"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\169\210\128a\180\148\180\210W\214,\198\215\161\155\158\247\ETB\140\198l\223\200\157\181\187s\250\228\NAK\205Z\173t\251\ETX\246\234a\254SV\196HDL\SUBK'KH\221|\176\223\178+\219\237\247h\179Y\175\144y\171)\132\203N\221\160\&4\227\190\140\137\f\227\170g\142\161tw\247\ab\230b\169\238\211\218\191\161\145\183\196\134l\FSY\241\150\134\131\SUBc\240\140\DLE\243\172\FS\SO)\144\211\FSH\160\225\224\ETX\217\238\SO\142\233"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\157\SUB\170\237k'\170+')\235E\243i>f\178%\154\NUL\199\210\FS\219\244e\245T\SYN,\193\242\143\FS^\155uTJ\131T U\196E\ACK\227\208\SIN\130\157\&3\f\207X!\199\n-\ETBz.e\162\f+{P\148\&1U\208\254\133\230\249\DC1\206*\150\161\166\201\DELL\r\166\228\189}\141\188\204,Q\231\ESC`\SUB\188\161w\147O\210\209\152%-\244\165-\253c\162\232\&2\132\v\SUB\ACK\253\181\147\202J\215\200\DC4|zP\254I\ACK8\220\224\NAK\142U\171V[G\198\nx\254\184A\n\244[\153\215\229\186v\143\"\v\182\237\225\&6S\137\178\r\"\211\241\174\NUL\210\176y\184q\184>C\154\245\210\DC1\179\220\180\208\205\166Y\254%\200\247\158\238\248\185}\236\&6u\182\178\GSy\162xg\SUB\NULz\241\239\163F\NUL\NUL^c\DELw\198\&9f\f\215\DC4\144]`\164\213\148\149\174\USY\150\219i\132\&3\ETX\221\230-\218\231\131\227"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 6 8 2 3600 1495444812 1493601562 61845 (Name "example.com.") "t\209\CAN\EOT\EOT\GSwz\237\DC3x\151!\151\255\215\145\CAN\189l\233uxt &\218\137\ETB\205DU\RS\166\240\&7x8\246)\176>\138O\187j\215\148\250\142\213h\160tCR\208|zV&\167\243\130Y\233'\NAK\160k\184\209\SYNFZH\224]\138\235\181\253\219<+=\154\143jo2\213d\237\206\207\NULAY\151\130\f\nD;\137\ACK_\141\240vW\148i\196/\v\DC1s\221Sl\241Y7\STXy(Z9\209"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 52146, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 4, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2306, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 7192, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 891, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")},MsgRR {rrName = Name "e1863.dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 11, rrData = RDataA (IPv4 0x5c7ab450)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/f07dc1a95f1663525bf32957288adb78.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/f07dc1a95f1663525bf32957288adb78.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/f07dc1a95f1663525bf32957288adb78.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/f07dc1a95f1663525bf32957288adb78.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/f07dc1a95f1663525bf32957288adb78.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/f07dc1a95f1663525bf32957288adb78.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 56182, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3342, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 20129, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 785, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 823, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494154901 1000 1000 1000 1800}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 62613, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.", rrClass = Class 1, rrTTL = TTL 785, rrData = RDataPTR (Name "ghc.haskell.org.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.1/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.show 2017-10-26 22:24:39.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.1/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.show 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -Msg {msgHeader = MsgHeader {mhId = 15722, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "4.4.8.8.in-addr.arpa.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "4.4.8.8.in-addr.arpa.", rrClass = Class 1, rrTTL = TTL 86285, rrData = RDataPTR (Name "google-public-dns-b.google.com.")}], msgNS = [], msgAR = []} \ No newline at end of file diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/cbits/hs_resolv_config.h.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/cbits/hs_resolv_config.h.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/cbits/hs_resolv_config.h.in 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/cbits/hs_resolv_config.h.in 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,72 @@ +/* cbits/hs_resolv_config.h.in. Generated from configure.ac by autoheader. */ + +/* Define to 1 if you have the header file. */ +#undef HAVE_ARPA_NAMESER_H + +/* Define to 1 if you have the declaration of `res_nquery', and to 0 if you + don't. */ +#undef HAVE_DECL_RES_NQUERY + +/* Define to 1 if you have the declaration of `res_query', and to 0 if you + don't. */ +#undef HAVE_DECL_RES_QUERY + +/* Define to 1 if you have the header file. */ +#undef HAVE_INTTYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_MEMORY_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_NETINET_IN_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDINT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STDLIB_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRINGS_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_STRING_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_STAT_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_SYS_TYPES_H + +/* Define to 1 if you have the header file. */ +#undef HAVE_UNISTD_H + +/* Define to the address where bug reports for this package should be sent. */ +#undef PACKAGE_BUGREPORT + +/* Define to the full name of this package. */ +#undef PACKAGE_NAME + +/* Define to the full name and version of this package. */ +#undef PACKAGE_STRING + +/* Define to the one symbol short name of this package. */ +#undef PACKAGE_TARNAME + +/* Define to the home page for this package. */ +#undef PACKAGE_URL + +/* Define to the version of this package. */ +#undef PACKAGE_VERSION + +/* The size of `struct __res_state', as computed by sizeof. */ +#undef SIZEOF_STRUCT___RES_STATE + +/* Define to 1 if you have the ANSI C header files. */ +#undef STDC_HEADERS + +/* Define to 1 in order to use res_nquery(3) API */ +#undef USE_RES_NQUERY + +/* Define to empty if `const' does not conform to ANSI C. */ +#undef const diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/cbits/hs_resolv.h cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/cbits/hs_resolv.h --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/cbits/hs_resolv.h 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/cbits/hs_resolv.h 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,128 @@ +#if !defined(HS_RESOLV_H) +#define HS_RESOLV_H + +#include "hs_resolv_config.h" + +#include + +#if defined(HAVE_NETINET_IN_H) +# include +#endif + +#if defined(HAVE_ARPA_NAMESER_H) +# include +#endif + +#include + +#include + +/* This is usually provided via */ +#if !defined(QUERY) +# define QUERY ns_o_query +#endif + +#if !defined(USE_RES_NQUERY) +# error USE_RES_NQUERY not defined +#endif + +#if USE_RES_NQUERY && (SIZEOF_STRUCT___RES_STATE <= 0) +# error broken invariant +#endif + +#if USE_RES_NQUERY + +inline static int +res_opt_set_use_dnssec(struct __res_state *s) +{ + assert(s); + + if (!(s->options & RES_INIT)) { + int rc = res_ninit(s); + if (rc) return rc; + } + + s->options |= RES_USE_DNSSEC | RES_USE_EDNS0; + + return 0; +} + +inline static int +hs_res_mkquery(struct __res_state *s, const char *dname, int class, int type, unsigned char *req, int reqlen0) +{ + assert(s); + + int reqlen = res_nmkquery(s, QUERY, dname, class, type, NULL, 0, NULL, req, reqlen0); + + assert(reqlen <= reqlen0); + + return reqlen; +} + +inline static int +hs_res_send(struct __res_state *s, const unsigned char *msg, int msglen, unsigned char *answer, int anslen) +{ + assert(s); + + return res_nsend(s, msg, msglen, answer, anslen); +} + +inline static int +hs_res_query(struct __res_state *s, const char *dname, int class, int type, unsigned char *answer, int anslen) +{ + assert(s); + + return res_nquery(s, dname, class, type, answer, anslen); +} + +#else + +/* use non-reentrant API */ + +inline static int +res_opt_set_use_dnssec(void *s) +{ + assert(!s); + + if (!(_res.options & RES_INIT)) { + int rc = res_init(); + if (rc) return rc; + } + + _res.options |= RES_USE_DNSSEC | RES_USE_EDNS0; + + return 0; +} + + +inline static int +hs_res_mkquery(void *s, const char *dname, int class, int type, unsigned char *req, int reqlen0) +{ + assert(!s); + + int reqlen = res_mkquery(QUERY, dname, class, type, NULL, 0, NULL, req, reqlen0); + + assert(reqlen <= reqlen0); + + return reqlen; +} + +inline static int +hs_res_send(void *s, const unsigned char *msg, int msglen, unsigned char *answer, int anslen) +{ + assert(!s); + + return res_send(msg, msglen, answer, anslen); +} + +inline static int +hs_res_query(void *s, const char *dname, int class, int type, unsigned char *answer, int anslen) +{ + assert(!s); + + return res_query(dname, class, type, answer, anslen); +} + +#endif + +#endif /* HS_RESOLV_H */ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/ChangeLog.md cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/ChangeLog.md --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/ChangeLog.md 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/ChangeLog.md 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,19 @@ +# Revision history for `resolv` + +## 0.1.1.2 + +* Clarify/relax licensing terms + +## 0.1.1.1 + +* Improve Autoconf script + +## 0.1.1.0 + +* Use Autoconf to detect which library (if any) to link for `res_query(3)` +* Use reentrant `res_nquery(3)` API if available and signal via new `resIsReentrant :: Bool` constant +* Expose `DnsException` and `QR` + +## 0.1.0.0 + +* First version. Released on an unsuspecting world. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/configure cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/configure --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/configure 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/configure 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,5099 @@ +#! /bin/sh +# Guess values for system-dependent variables and create Makefiles. +# Generated by GNU Autoconf 2.69 for resolv 0.0. +# +# Report bugs to . +# +# +# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. +# +# +# This configure script is free software; the Free Software Foundation +# gives unlimited permission to copy, distribute and modify it. +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +as_fn_exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else + case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : + +else + exitcode=1; echo positional parameters were not saved. +fi +test x\$exitcode = x0 || exit 1 +test -x / || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 +test \$(( 1 + 1 )) = 2 || exit 1" + if (eval "$as_required") 2>/dev/null; then : + as_have_required=yes +else + as_have_required=no +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : + +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir/$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : + CONFIG_SHELL=$as_shell as_have_required=yes + if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : + break 2 +fi +fi + done;; + esac + as_found=false +done +$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi; } +IFS=$as_save_IFS + + + if test "x$CONFIG_SHELL" != x; then : + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 +fi + + if test x$as_have_required = xno; then : + $as_echo "$0: This script requires a shell more modern than all" + $as_echo "$0: the shells that I found on your system." + if test x${ZSH_VERSION+set} = xset ; then + $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" + $as_echo "$0: be upgraded to zsh 4.3.4 or later." + else + $as_echo "$0: Please tell bug-autoconf@gnu.org and hvr@gnu.org about +$0: your system, including any error possibly output before +$0: this message. Then install a modern shell, or manually +$0: run the script under such a shell if you do have one." + fi + exit 1 +fi +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + + + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } + + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit +} + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +test -n "$DJDIR" || exec 7<&0 &1 + +# Name of the host. +# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` + +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= + +# Identity of this package. +PACKAGE_NAME='resolv' +PACKAGE_TARNAME='resolv' +PACKAGE_VERSION='0.0' +PACKAGE_STRING='resolv 0.0' +PACKAGE_BUGREPORT='hvr@gnu.org' +PACKAGE_URL='' + +ac_unique_file="resolv.cabal" +# Factoring default headers for most tests. +ac_includes_default="\ +#include +#ifdef HAVE_SYS_TYPES_H +# include +#endif +#ifdef HAVE_SYS_STAT_H +# include +#endif +#ifdef STDC_HEADERS +# include +# include +#else +# ifdef HAVE_STDLIB_H +# include +# endif +#endif +#ifdef HAVE_STRING_H +# if !defined STDC_HEADERS && defined HAVE_MEMORY_H +# include +# endif +# include +#endif +#ifdef HAVE_STRINGS_H +# include +#endif +#ifdef HAVE_INTTYPES_H +# include +#endif +#ifdef HAVE_STDINT_H +# include +#endif +#ifdef HAVE_UNISTD_H +# include +#endif" + +ac_subst_vars='LTLIBOBJS +LIBOBJS +EXTRA_LIBS +CPP_OPTIONS +EGREP +GREP +CPP +OBJEXT +EXEEXT +ac_ct_CC +CPPFLAGS +LDFLAGS +CFLAGS +CC +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +runstatedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +' + ac_precious_vars='build_alias +host_alias +target_alias +CC +CFLAGS +LDFLAGS +LIBS +CPPFLAGS +CPP' + + +# Initialize some variables set by options. +ac_init_help= +ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= +# The variables have the same names as the options, with +# dashes changed to underlines. +cache_file=/dev/null +exec_prefix=NONE +no_create= +no_recursion= +prefix=NONE +program_prefix=NONE +program_suffix=NONE +program_transform_name=s,x,x, +silent= +site= +srcdir= +verbose= +x_includes=NONE +x_libraries=NONE + +# Installation directory options. +# These are left unexpanded so users can "make install exec_prefix=/foo" +# and all the variables that are supposed to be based on exec_prefix +# by default will actually change. +# Use braces instead of parens because sh, perl, etc. also accept them. +# (The list follows the same order as the GNU Coding Standards.) +bindir='${exec_prefix}/bin' +sbindir='${exec_prefix}/sbin' +libexecdir='${exec_prefix}/libexec' +datarootdir='${prefix}/share' +datadir='${datarootdir}' +sysconfdir='${prefix}/etc' +sharedstatedir='${prefix}/com' +localstatedir='${prefix}/var' +runstatedir='${localstatedir}/run' +includedir='${prefix}/include' +oldincludedir='/usr/include' +docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' +infodir='${datarootdir}/info' +htmldir='${docdir}' +dvidir='${docdir}' +pdfdir='${docdir}' +psdir='${docdir}' +libdir='${exec_prefix}/lib' +localedir='${datarootdir}/locale' +mandir='${datarootdir}/man' + +ac_prev= +ac_dashdash= +for ac_option +do + # If the previous option needs an argument, assign it. + if test -n "$ac_prev"; then + eval $ac_prev=\$ac_option + ac_prev= + continue + fi + + case $ac_option in + *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *=) ac_optarg= ;; + *) ac_optarg=yes ;; + esac + + # Accept the important Cygnus configure options, so we can diagnose typos. + + case $ac_dashdash$ac_option in + --) + ac_dashdash=yes ;; + + -bindir | --bindir | --bindi | --bind | --bin | --bi) + ac_prev=bindir ;; + -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) + bindir=$ac_optarg ;; + + -build | --build | --buil | --bui | --bu) + ac_prev=build_alias ;; + -build=* | --build=* | --buil=* | --bui=* | --bu=*) + build_alias=$ac_optarg ;; + + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -runstatedir | --runstatedir | --runstatedi | --runstated \ + | --runstate | --runstat | --runsta | --runst | --runs \ + | --run | --ru | --r) + ac_prev=runstatedir ;; + -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ + | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ + | --run=* | --ru=* | --r=*) + runstatedir=$ac_optarg ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error $? "unrecognized option: \`$ac_option' +Try \`$0 --help' for more information" + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 + : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error $? "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; + *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir runstatedir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: `$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error $? "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error $? "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done + +# +# Report the --help message. +# +if test "$ac_init_help" = "long"; then + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat <<_ACEOF +\`configure' configures resolv 0.0 to adapt to many kinds of systems. + +Usage: $0 [OPTION]... [VAR=VALUE]... + +To assign environment variables (e.g., CC, CFLAGS...), specify them as +VAR=VALUE. See below for descriptions of some of the useful variables. + +Defaults for the options are specified in brackets. + +Configuration: + -h, --help display this help and exit + --help=short display options specific to this package + --help=recursive display the short help of all the included packages + -V, --version display version information and exit + -q, --quiet, --silent do not print \`checking ...' messages + --cache-file=FILE cache test results in FILE [disabled] + -C, --config-cache alias for \`--cache-file=config.cache' + -n, --no-create do not create output files + --srcdir=DIR find the sources in DIR [configure dir or \`..'] + +Installation directories: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [PREFIX] + +By default, \`make install' will install all the files in +\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify +an installation prefix other than \`$ac_default_prefix' using \`--prefix', +for instance \`--prefix=\$HOME'. + +For better control, use the options below. + +Fine tuning of the installation directories: + --bindir=DIR user executables [EPREFIX/bin] + --sbindir=DIR system admin executables [EPREFIX/sbin] + --libexecdir=DIR program executables [EPREFIX/libexec] + --sysconfdir=DIR read-only single-machine data [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] + --localstatedir=DIR modifiable single-machine data [PREFIX/var] + --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] + --libdir=DIR object code libraries [EPREFIX/lib] + --includedir=DIR C header files [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc [/usr/include] + --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] + --datadir=DIR read-only architecture-independent data [DATAROOTDIR] + --infodir=DIR info documentation [DATAROOTDIR/info] + --localedir=DIR locale-dependent data [DATAROOTDIR/locale] + --mandir=DIR man documentation [DATAROOTDIR/man] + --docdir=DIR documentation root [DATAROOTDIR/doc/resolv] + --htmldir=DIR html documentation [DOCDIR] + --dvidir=DIR dvi documentation [DOCDIR] + --pdfdir=DIR pdf documentation [DOCDIR] + --psdir=DIR ps documentation [DOCDIR] +_ACEOF + + cat <<\_ACEOF +_ACEOF +fi + +if test -n "$ac_init_help"; then + case $ac_init_help in + short | recursive ) echo "Configuration of resolv 0.0:";; + esac + cat <<\_ACEOF + +Some influential environment variables: + CC C compiler command + CFLAGS C compiler flags + LDFLAGS linker flags, e.g. -L if you have libraries in a + nonstandard directory + LIBS libraries to pass to the linker, e.g. -l + CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if + you have headers in a nonstandard directory + CPP C preprocessor + +Use these variables to override the choices made by `configure' or to help +it to find libraries and programs with nonstandard names/locations. + +Report bugs to . +_ACEOF +ac_status=$? +fi + +if test "$ac_init_help" = "recursive"; then + # If there are subdirs, report their specific --help. + for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue + test -d "$ac_dir" || + { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || + continue + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + cd "$ac_dir" || { ac_status=$?; continue; } + # Check for guested configure. + if test -f "$ac_srcdir/configure.gnu"; then + echo && + $SHELL "$ac_srcdir/configure.gnu" --help=recursive + elif test -f "$ac_srcdir/configure"; then + echo && + $SHELL "$ac_srcdir/configure" --help=recursive + else + $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 + fi || ac_status=$? + cd "$ac_pwd" || { ac_status=$?; break; } + done +fi + +test -n "$ac_init_help" && exit $ac_status +if $ac_init_version; then + cat <<\_ACEOF +resolv configure 0.0 +generated by GNU Autoconf 2.69 + +Copyright (C) 2012 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## + +# ac_fn_c_try_compile LINENO +# -------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_compile + +# ac_fn_c_try_cpp LINENO +# ---------------------- +# Try to preprocess conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_cpp () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_cpp conftest.$ac_ext" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } > conftest.i && { + test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || + test ! -s conftest.err + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_cpp + +# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists, giving a warning if it cannot be compiled using +# the include files in INCLUDES and setting the cache variable VAR +# accordingly. +ac_fn_c_check_header_mongrel () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if eval \${$3+:} false; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +else + # Is the header compilable? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 +$as_echo_n "checking $2 usability... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_header_compiler=yes +else + ac_header_compiler=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 +$as_echo "$ac_header_compiler" >&6; } + +# Is the header present? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 +$as_echo_n "checking $2 presence... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include <$2> +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + ac_header_preproc=yes +else + ac_header_preproc=no +fi +rm -f conftest.err conftest.i conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 +$as_echo "$ac_header_preproc" >&6; } + +# So? What about this header? +case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( + yes:no: ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 +$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} + ;; + no:yes:* ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 +$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 +$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 +$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 +$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} +( $as_echo "## -------------------------- ## +## Report this to hvr@gnu.org ## +## -------------------------- ##" + ) | sed "s/^/$as_me: WARNING: /" >&2 + ;; +esac + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=\$ac_header_compiler" +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_header_mongrel + +# ac_fn_c_try_run LINENO +# ---------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes +# that executables *can* be run. +ac_fn_c_try_run () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then : + ac_retval=0 +else + $as_echo "$as_me: program exited with status $ac_status" >&5 + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=$ac_status +fi + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_run + +# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists and can be compiled using the include files in +# INCLUDES, setting the cache variable VAR accordingly. +ac_fn_c_check_header_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_header_compile + +# ac_fn_c_check_decl LINENO SYMBOL VAR INCLUDES +# --------------------------------------------- +# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR +# accordingly. +ac_fn_c_check_decl () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + as_decl_name=`echo $2|sed 's/ *(.*//'` + as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 +$as_echo_n "checking whether $as_decl_name is declared... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +#ifndef $as_decl_name +#ifdef __cplusplus + (void) $as_decl_use; +#else + (void) $as_decl_name; +#endif +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_decl + +# ac_fn_c_try_link LINENO +# ----------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + test -x conftest$ac_exeext + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_link + +# ac_fn_c_compute_int LINENO EXPR VAR INCLUDES +# -------------------------------------------- +# Tries to find the compile-time value of EXPR in a program that includes +# INCLUDES, setting VAR accordingly. Returns whether the value could be +# computed +ac_fn_c_compute_int () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if test "$cross_compiling" = yes; then + # Depending upon the size, compute the lo and hi bounds. +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +static int test_array [1 - 2 * !(($2) >= 0)]; +test_array [0] = 0; +return test_array [0]; + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_lo=0 ac_mid=0 + while :; do + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +static int test_array [1 - 2 * !(($2) <= $ac_mid)]; +test_array [0] = 0; +return test_array [0]; + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_hi=$ac_mid; break +else + as_fn_arith $ac_mid + 1 && ac_lo=$as_val + if test $ac_lo -le $ac_mid; then + ac_lo= ac_hi= + break + fi + as_fn_arith 2 '*' $ac_mid + 1 && ac_mid=$as_val +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + done +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +static int test_array [1 - 2 * !(($2) < 0)]; +test_array [0] = 0; +return test_array [0]; + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_hi=-1 ac_mid=-1 + while :; do + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +static int test_array [1 - 2 * !(($2) >= $ac_mid)]; +test_array [0] = 0; +return test_array [0]; + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_lo=$ac_mid; break +else + as_fn_arith '(' $ac_mid ')' - 1 && ac_hi=$as_val + if test $ac_mid -le $ac_hi; then + ac_lo= ac_hi= + break + fi + as_fn_arith 2 '*' $ac_mid && ac_mid=$as_val +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + done +else + ac_lo= ac_hi= +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +# Binary search between lo and hi bounds. +while test "x$ac_lo" != "x$ac_hi"; do + as_fn_arith '(' $ac_hi - $ac_lo ')' / 2 + $ac_lo && ac_mid=$as_val + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +static int test_array [1 - 2 * !(($2) <= $ac_mid)]; +test_array [0] = 0; +return test_array [0]; + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_hi=$ac_mid +else + as_fn_arith '(' $ac_mid ')' + 1 && ac_lo=$as_val +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +done +case $ac_lo in #(( +?*) eval "$3=\$ac_lo"; ac_retval=0 ;; +'') ac_retval=1 ;; +esac + else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +static long int longval () { return $2; } +static unsigned long int ulongval () { return $2; } +#include +#include +int +main () +{ + + FILE *f = fopen ("conftest.val", "w"); + if (! f) + return 1; + if (($2) < 0) + { + long int i = longval (); + if (i != ($2)) + return 1; + fprintf (f, "%ld", i); + } + else + { + unsigned long int i = ulongval (); + if (i != ($2)) + return 1; + fprintf (f, "%lu", i); + } + /* Do not output a trailing newline, as this causes \r\n confusion + on some platforms. */ + return ferror (f) || fclose (f) != 0; + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : + echo >>conftest.val; read $3 config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by resolv $as_me 0.0, which was +generated by GNU Autoconf 2.69. Invocation command line was + + $ $0 $@ + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + $as_echo "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Save into config.log some information that might help in debugging. + { + echo + + $as_echo "## ---------------- ## +## Cache variables. ## +## ---------------- ##" + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + $as_echo "## ----------------- ## +## Output variables. ## +## ----------------- ##" + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + $as_echo "## ------------------- ## +## File substitutions. ## +## ------------------- ##" + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + $as_echo "## ----------- ## +## confdefs.h. ## +## ----------- ##" + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + $as_echo "$as_me: caught signal $ac_signal" + $as_echo "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +$as_echo "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_NAME "$PACKAGE_NAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_TARNAME "$PACKAGE_TARNAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_VERSION "$PACKAGE_VERSION" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_STRING "$PACKAGE_STRING" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_URL "$PACKAGE_URL" +_ACEOF + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +ac_site_file1=NONE +ac_site_file2=NONE +if test -n "$CONFIG_SITE"; then + # We do not want a PATH search for config.site. + case $CONFIG_SITE in #(( + -*) ac_site_file1=./$CONFIG_SITE;; + */*) ac_site_file1=$CONFIG_SITE;; + *) ac_site_file1=./$CONFIG_SITE;; + esac +elif test "x$prefix" != xNONE; then + ac_site_file1=$prefix/share/config.site + ac_site_file2=$prefix/etc/config.site +else + ac_site_file1=$ac_default_prefix/share/config.site + ac_site_file2=$ac_default_prefix/etc/config.site +fi +for ac_site_file in "$ac_site_file1" "$ac_site_file2" +do + test "x$ac_site_file" = xNONE && continue + if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +$as_echo "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" \ + || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "failed to load site script $ac_site_file +See \`config.log' for more details" "$LINENO" 5; } + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special files + # actually), so we avoid doing that. DJGPP emulates it as a regular file. + if test /dev/null != "$cache_file" && test -f "$cache_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +$as_echo "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +$as_echo "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 +$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 +$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 +$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 +$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + +ac_config_headers="$ac_config_headers cbits/hs_resolv_config.h" + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi + +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" + fi +fi +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$ac_ct_CC" && break +done + + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +fi + +fi + + +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "no acceptable C compiler found in \$PATH +See \`config.log' for more details" "$LINENO" 5; } + +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" +# Try to create an executable without -o first, disregard a.out. +# It will help us diagnose broken compilers, and finding out an intuition +# of exeext. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 +$as_echo_n "checking whether the C compiler works... " >&6; } +ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` + +# The possible output files: +ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" + +ac_rmfiles= +for ac_file in $ac_files +do + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + * ) ac_rmfiles="$ac_rmfiles $ac_file";; + esac +done +rm -f $ac_rmfiles + +if { { ac_try="$ac_link_default" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link_default") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. +# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' +# in a Makefile. We should not override ac_cv_exeext if it was cached, +# so that the user can short-circuit this test for compilers unknown to +# Autoconf. +for ac_file in $ac_files '' +do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) + ;; + [ab].out ) + # We found the default executable, but exeext='' is most + # certainly right. + break;; + *.* ) + if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; + then :; else + ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + fi + # We set ac_cv_exeext here because the later test for it is not + # safe: cross compilers may not add the suffix if given an `-o' + # argument, so we may need to know it at that point already. + # Even if this section looks crufty: it has the advantage of + # actually working. + break;; + * ) + break;; + esac +done +test "$ac_cv_exeext" = no && ac_cv_exeext= + +else + ac_file='' +fi +if test -z "$ac_file"; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +$as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error 77 "C compiler cannot create executables +See \`config.log' for more details" "$LINENO" 5; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 +$as_echo_n "checking for C compiler default output file name... " >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 +$as_echo "$ac_file" >&6; } +ac_exeext=$ac_cv_exeext + +rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 +$as_echo_n "checking for suffix of executables... " >&6; } +if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # If both `conftest.exe' and `conftest' are `present' (well, observable) +# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will +# work properly (i.e., refer to `conftest.exe'), while it won't with +# `rm'. +for ac_file in conftest.exe conftest conftest.*; do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + break;; + * ) break;; + esac +done +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of executables: cannot compile and link +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest conftest$ac_cv_exeext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 +$as_echo "$ac_cv_exeext" >&6; } + +rm -f conftest.$ac_ext +EXEEXT=$ac_cv_exeext +ac_exeext=$EXEEXT +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ +FILE *f = fopen ("conftest.out", "w"); + return ferror (f) || fclose (f) != 0; + + ; + return 0; +} +_ACEOF +ac_clean_files="$ac_clean_files conftest.out" +# Check that the compiler produces executables we can run. If not, either +# the compiler is broken, or we cross compile. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 +$as_echo_n "checking whether we are cross compiling... " >&6; } +if test "$cross_compiling" != yes; then + { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if { ac_try='./conftest$ac_cv_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then + cross_compiling=no + else + if test "$cross_compiling" = maybe; then + cross_compiling=yes + else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot run C compiled programs. +If you meant to cross compile, use \`--host'. +See \`config.log' for more details" "$LINENO" 5; } + fi + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 +$as_echo "$cross_compiling" >&6; } + +rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 +$as_echo_n "checking for suffix of object files... " >&6; } +if ${ac_cv_objext+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +rm -f conftest.o conftest.obj +if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + for ac_file in conftest.o conftest.obj conftest.*; do + test -f "$ac_file" || continue; + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; + *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` + break;; + esac +done +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of object files: cannot compile +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest.$ac_cv_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 +$as_echo "$ac_cv_objext" >&6; } +OBJEXT=$ac_cv_objext +ac_objext=$OBJEXT +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if ${ac_cv_c_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ +#ifndef __GNUC__ + choke me +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+set} +ac_save_CFLAGS=$CFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if ${ac_cv_prog_cc_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +else + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + +else + ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } +if test "$ac_test_CFLAGS" = set; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" + fi +else + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if ${ac_cv_prog_cc_c89+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +struct stat; +/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ +struct buf { int x; }; +FILE * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (p, i) + char **p; + int i; +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} + +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not '\xHH' hex character constants. + These don't provoke an error unfortunately, instead are silently treated + as 'x'. The following induces an error, until -std is added to get + proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an + array size at least. It's necessary to write '\x00'==0 to get something + that's true only with -std. */ +int osf4_cc_array ['\x00' == 0 ? 1 : -1]; + +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) 'x' +int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; + +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); +int argc; +char **argv; +int +main () +{ +return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; + ; + return 0; +} +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ + -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC + +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c89" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c89" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c89" != xno; then : + +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 +$as_echo_n "checking for an ANSI C-conforming const... " >&6; } +if ${ac_cv_c_const+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + +#ifndef __cplusplus + /* Ultrix mips cc rejects this sort of thing. */ + typedef int charset[2]; + const charset cs = { 0, 0 }; + /* SunOS 4.1.1 cc rejects this. */ + char const *const *pcpcc; + char **ppc; + /* NEC SVR4.0.2 mips cc rejects this. */ + struct point {int x, y;}; + static struct point const zero = {0,0}; + /* AIX XL C 1.02.0.0 rejects this. + It does not let you subtract one const X* pointer from another in + an arm of an if-expression whose if-part is not a constant + expression */ + const char *g = "string"; + pcpcc = &g + (g ? g-g : 0); + /* HPUX 7.0 cc rejects these. */ + ++pcpcc; + ppc = (char**) pcpcc; + pcpcc = (char const *const *) ppc; + { /* SCO 3.2v4 cc rejects this sort of thing. */ + char tx; + char *t = &tx; + char const *s = 0 ? (char *) 0 : (char const *) 0; + + *t++ = 0; + if (s) return 0; + } + { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ + int x[] = {25, 17}; + const int *foo = &x[0]; + ++foo; + } + { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ + typedef const int *iptr; + iptr p = 0; + ++p; + } + { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying + "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ + struct s { int j; const int *ap[3]; } bx; + struct s *b = &bx; b->j = 5; + } + { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ + const int foo = 10; + if (!foo) return 0; + } + return !cs[0] && !zero.x; +#endif + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_c_const=yes +else + ac_cv_c_const=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 +$as_echo "$ac_cv_c_const" >&6; } +if test $ac_cv_c_const = no; then + +$as_echo "#define const /**/" >>confdefs.h + +fi + + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 +$as_echo_n "checking how to run the C preprocessor... " >&6; } +# On Suns, sometimes $CPP names a directory. +if test -n "$CPP" && test -d "$CPP"; then + CPP= +fi +if test -z "$CPP"; then + if ${ac_cv_prog_CPP+:} false; then : + $as_echo_n "(cached) " >&6 +else + # Double quotes because CPP needs to be expanded + for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" + do + ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.i conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + break +fi + + done + ac_cv_prog_CPP=$CPP + +fi + CPP=$ac_cv_prog_CPP +else + ac_cv_prog_CPP=$CPP +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 +$as_echo "$CPP" >&6; } +ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + +else + # Broken: fails on valid input. +continue +fi +rm -f conftest.err conftest.i conftest.$ac_ext + + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "C preprocessor \"$CPP\" fails sanity check +See \`config.log' for more details" "$LINENO" 5; } +fi + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 +$as_echo_n "checking for grep that handles long lines and -e... " >&6; } +if ${ac_cv_path_GREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$GREP"; then + ac_path_GREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in grep ggrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_GREP" || continue +# Check for GNU ac_path_GREP and select it if it is found. + # Check for GNU $ac_path_GREP +case `"$ac_path_GREP" --version 2>&1` in +*GNU*) + ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'GREP' >> "conftest.nl" + "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_GREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_GREP="$ac_path_GREP" + ac_path_GREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_GREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_GREP"; then + as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_GREP=$GREP +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 +$as_echo "$ac_cv_path_GREP" >&6; } + GREP="$ac_cv_path_GREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 +$as_echo_n "checking for egrep... " >&6; } +if ${ac_cv_path_EGREP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 + then ac_cv_path_EGREP="$GREP -E" + else + if test -z "$EGREP"; then + ac_path_EGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in egrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_EGREP" || continue +# Check for GNU ac_path_EGREP and select it if it is found. + # Check for GNU $ac_path_EGREP +case `"$ac_path_EGREP" --version 2>&1` in +*GNU*) + ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'EGREP' >> "conftest.nl" + "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_EGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_EGREP="$ac_path_EGREP" + ac_path_EGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac + + $ac_path_EGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_EGREP"; then + as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi +else + ac_cv_path_EGREP=$EGREP +fi + + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 +$as_echo "$ac_cv_path_EGREP" >&6; } + EGREP="$ac_cv_path_EGREP" + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 +$as_echo_n "checking for ANSI C header files... " >&6; } +if ${ac_cv_header_stdc+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_header_stdc=yes +else + ac_cv_header_stdc=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + +if test $ac_cv_header_stdc = yes; then + # SunOS 4.x string.h does not declare mem*, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "memchr" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include + +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "free" >/dev/null 2>&1; then : + +else + ac_cv_header_stdc=no +fi +rm -f conftest* + +fi + +if test $ac_cv_header_stdc = yes; then + # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. + if test "$cross_compiling" = yes; then : + : +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#if ((' ' & 0x0FF) == 0x020) +# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') +# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) +#else +# define ISLOWER(c) \ + (('a' <= (c) && (c) <= 'i') \ + || ('j' <= (c) && (c) <= 'r') \ + || ('s' <= (c) && (c) <= 'z')) +# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) +#endif + +#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) +int +main () +{ + int i; + for (i = 0; i < 256; i++) + if (XOR (islower (i), ISLOWER (i)) + || toupper (i) != TOUPPER (i)) + return 2; + return 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : + +else + ac_cv_header_stdc=no +fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext +fi + +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 +$as_echo "$ac_cv_header_stdc" >&6; } +if test $ac_cv_header_stdc = yes; then + +$as_echo "#define STDC_HEADERS 1" >>confdefs.h + +fi + +# On IRIX 5.3, sys/types and inttypes.h are conflicting. +for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ + inttypes.h stdint.h unistd.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default +" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + + +for ac_header in netinet/in.h arpa/nameser.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + +fi + +done + +ac_fn_c_check_header_compile "$LINENO" "resolv.h" "ac_cv_header_resolv_h" " +#include +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_ARPA_NAMESER_H +# include +#endif +#include + +" +if test "x$ac_cv_header_resolv_h" = xyes; then : + +else + as_fn_error $? "required header not found" "$LINENO" 5 +fi + + + +ac_fn_c_check_decl "$LINENO" "res_query" "ac_cv_have_decl_res_query" " +#include +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_ARPA_NAMESER_H +# include +#endif +#include + +" +if test "x$ac_cv_have_decl_res_query" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_RES_QUERY $ac_have_decl +_ACEOF +ac_fn_c_check_decl "$LINENO" "res_nquery" "ac_cv_have_decl_res_nquery" " +#include +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_ARPA_NAMESER_H +# include +#endif +#include + +" +if test "x$ac_cv_have_decl_res_nquery" = xyes; then : + ac_have_decl=1 +else + ac_have_decl=0 +fi + +cat >>confdefs.h <<_ACEOF +#define HAVE_DECL_RES_NQUERY $ac_have_decl +_ACEOF + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing res_query" >&5 +$as_echo_n "checking for library containing res_query... " >&6; } +if ${ac_cv_search_res_query+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +#include +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_ARPA_NAMESER_H +# include +#endif +#include + +int +main () +{ +res_query(0,0,0,0,0) + ; + return 0; +} +_ACEOF +for ac_lib in '' resolv bind; do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO"; then : + ac_cv_search_res_query=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext + if ${ac_cv_search_res_query+:} false; then : + break +fi +done +if ${ac_cv_search_res_query+:} false; then : + +else + ac_cv_search_res_query=no +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_res_query" >&5 +$as_echo "$ac_cv_search_res_query" >&6; } +ac_res=$ac_cv_search_res_query +if test "$ac_res" != no; then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + EXTRA_LIBS="$EXTRA_LIBS $ac_lib" +else + +as_fn_error $? "could not figure out which C library contains res_query(3)" "$LINENO" 5 + +fi + + + + +USE_RES_NQUERY=0 + +if test "x$ac_cv_have_decl_res_nquery" = xyes; then + +# The cast to long int works around a bug in the HP C Compiler +# version HP92453-01 B.11.11.23709.GP, which incorrectly rejects +# declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. +# This bug is HP SR number 8606223364. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking size of struct __res_state" >&5 +$as_echo_n "checking size of struct __res_state... " >&6; } +if ${ac_cv_sizeof_struct___res_state+:} false; then : + $as_echo_n "(cached) " >&6 +else + if ac_fn_c_compute_int "$LINENO" "(long int) (sizeof (struct __res_state))" "ac_cv_sizeof_struct___res_state" " +#include +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_ARPA_NAMESER_H +# include +#endif +#include + +"; then : + +else + if test "$ac_cv_type_struct___res_state" = yes; then + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error 77 "cannot compute sizeof (struct __res_state) +See \`config.log' for more details" "$LINENO" 5; } + else + ac_cv_sizeof_struct___res_state=0 + fi +fi + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_struct___res_state" >&5 +$as_echo "$ac_cv_sizeof_struct___res_state" >&6; } + + + +cat >>confdefs.h <<_ACEOF +#define SIZEOF_STRUCT___RES_STATE $ac_cv_sizeof_struct___res_state +_ACEOF + + + +SIZEOF_RES_STATE="$ac_cv_sizeof_struct___res_state" + +if test "$SIZEOF_RES_STATE" -gt 0; then + +USE_RES_NQUERY=1 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing res_nquery" >&5 +$as_echo_n "checking for library containing res_nquery... " >&6; } +if ${ac_cv_search_res_nquery+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_func_search_save_LIBS=$LIBS +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +#include +#ifdef HAVE_NETINET_IN_H +# include +#endif +#ifdef HAVE_ARPA_NAMESER_H +# include +#endif +#include + +int +main () +{ +res_nquery(0,0,0,0,0,0) + ; + return 0; +} +_ACEOF +for ac_lib in '' resolv bind; do + if test -z "$ac_lib"; then + ac_res="none required" + else + ac_res=-l$ac_lib + LIBS="-l$ac_lib $ac_func_search_save_LIBS" + fi + if ac_fn_c_try_link "$LINENO"; then : + ac_cv_search_res_nquery=$ac_res +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext + if ${ac_cv_search_res_nquery+:} false; then : + break +fi +done +if ${ac_cv_search_res_nquery+:} false; then : + +else + ac_cv_search_res_nquery=no +fi +rm conftest.$ac_ext +LIBS=$ac_func_search_save_LIBS +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_res_nquery" >&5 +$as_echo "$ac_cv_search_res_nquery" >&6; } +ac_res=$ac_cv_search_res_nquery +if test "$ac_res" != no; then : + test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" + EXTRA_LIBS="$EXTRA_LIBS $ac_lib" +else + +USE_RES_NQUERY=0 +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: could not figure out which C library contains res_nquery(3)" >&5 +$as_echo "$as_me: WARNING: could not figure out which C library contains res_nquery(3)" >&2;} + +fi + + +else + +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: could not determine sizeof(struct __res_state)" >&5 +$as_echo "$as_me: WARNING: could not determine sizeof(struct __res_state)" >&2;} + +fi + +fi + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking which DNS api to use" >&5 +$as_echo_n "checking which DNS api to use... " >&6; } + +case "x$USE_RES_NQUERY" in + x0) { $as_echo "$as_me:${as_lineno-$LINENO}: result: res_query(3)" >&5 +$as_echo "res_query(3)" >&6; } + CPP_OPTIONS="-DUSE_RES_NQUERY=0 -DSIZEOF_RES_STATE=0" + ;; + x1) { $as_echo "$as_me:${as_lineno-$LINENO}: result: res_nquery(3)" >&5 +$as_echo "res_nquery(3)" >&6; } + CPP_OPTIONS="-DUSE_RES_NQUERY=1 -DSIZEOF_RES_STATE=$SIZEOF_RES_STATE" + ;; + *) as_fn_error $? "no suitable DNS API detected" "$LINENO" 5 + ;; +esac + +cat >>confdefs.h <<_ACEOF +#define USE_RES_NQUERY $USE_RES_NQUERY +_ACEOF + + + + +ac_config_files="$ac_config_files resolv.buildinfo" + + +cat >confcache <<\_ACEOF +# This file is a shell script that caches the results of configure +# tests run on this system so they can be shared between configure +# scripts and configure runs, see configure's option --config-cache. +# It is not useful on other systems. If it contains results you don't +# want to keep, you may remove or edit it. +# +# config.status only pays attention to the cache file if you give it +# the --recheck option to rerun configure. +# +# `ac_cv_env_foo' variables (set or unset) will be overridden when +# loading this file, other *unset* `ac_cv_foo' will be assigned the +# following values. + +_ACEOF + +# The following way of writing the cache mishandles newlines in values, +# but we know of no workaround that is simple, portable, and efficient. +# So, we kill variables containing newlines. +# Ultrix sh set writes to stderr and can't be redirected directly, +# and sets the high bit in the cache file unless we assign to the vars. +( + for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + + (set) 2>&1 | + case $as_nl`(ac_space=' '; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + # `set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. + sed -n \ + "s/'/'\\\\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" + ;; #( + *) + # `set' quotes correctly as required by POSIX, so do not add quotes. + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) | + sed ' + /^ac_cv_env_/b end + t clear + :clear + s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ + t end + s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ + :end' >>confcache +if diff "$cache_file" confcache >/dev/null 2>&1; then :; else + if test -w "$cache_file"; then + if test "x$cache_file" != "x/dev/null"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +$as_echo "$as_me: updating cache $cache_file" >&6;} + if test ! -f "$cache_file" || test -h "$cache_file"; then + cat confcache >"$cache_file" + else + case $cache_file in #( + */* | ?:*) + mv -f confcache "$cache_file"$$ && + mv -f "$cache_file"$$ "$cache_file" ;; #( + *) + mv -f confcache "$cache_file" ;; + esac + fi + fi + else + { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} + fi +fi +rm -f confcache + +test "x$prefix" = xNONE && prefix=$ac_default_prefix +# Let make expand exec_prefix. +test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' + +DEFS=-DHAVE_CONFIG_H + +ac_libobjs= +ac_ltlibobjs= +U= +for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue + # 1. Remove the extension, and $U if already installed. + ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' + ac_i=`$as_echo "$ac_i" | sed "$ac_script"` + # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR + # will be set to the directory where LIBOBJS objects are built. + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' +done +LIBOBJS=$ac_libobjs + +LTLIBOBJS=$ac_ltlibobjs + + + +: "${CONFIG_STATUS=./config.status}" +ac_write_fail=0 +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files $CONFIG_STATUS" +{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 +#! $SHELL +# Generated by $as_me. +# Run this file to recreate the current configuration. +# Compiler output produced by configure, useful for debugging +# configure, is in config.log if it exists. + +debug=false +ac_cs_recheck=false +ac_cs_silent=false + +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## + +# Be more Bourne compatible +DUALCASE=1; export DUALCASE # for MKS sh +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which + # is contrary to our usage. Disable this feature. + alias -g '${1+"$@"}'='"$@"' + setopt NO_GLOB_SUBST +else + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi + + +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi + +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } +fi + + +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + +# Find who we are. Look in the path if we contain no directory separator. +as_myself= +case $0 in #(( + *[\\/]* ) as_myself=$0 ;; + *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done +IFS=$as_save_IFS + + ;; +esac +# We did not find ourselves, most probably we were run as `sh COMMAND' +# in which case we are not to be found in the path. +if test "x$as_myself" = x; then + as_myself=$0 +fi +if test ! -f "$as_myself"; then + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 +fi + +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +if expr a : '\(a\)' >/dev/null 2>&1 && + test "X`expr 00001 : '.*\(...\)'`" = X001; then + as_expr=expr +else + as_expr=false +fi + +if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then + as_basename=basename +else + as_basename=false +fi + +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi + +as_me=`$as_basename -- "$0" || +$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ + X"$0" : 'X\(//\)$' \| \ + X"$0" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X/"$0" | + sed '/^.*\/\([^/][^/]*\)\/*$/{ + s//\1/ + q + } + /^X\/\(\/\/\)$/{ + s//\1/ + q + } + /^X\/\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits + +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file +else + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null +fi +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' +fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null + + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' +else + test -d ./-p && rmdir ./-p + as_mkdir_p=false +fi + + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p + +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" + +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" + + +exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to +# report actual input values of CONFIG_FILES etc. instead of their +# values after options handling. +ac_log=" +This file was extended by resolv $as_me 0.0, which was +generated by GNU Autoconf 2.69. Invocation command line was + + CONFIG_FILES = $CONFIG_FILES + CONFIG_HEADERS = $CONFIG_HEADERS + CONFIG_LINKS = $CONFIG_LINKS + CONFIG_COMMANDS = $CONFIG_COMMANDS + $ $0 $@ + +on `(hostname || uname -n) 2>/dev/null | sed 1q` +" + +_ACEOF + +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + +case $ac_config_headers in *" +"*) set x $ac_config_headers; shift; ac_config_headers=$*;; +esac + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +# Files that config.status was made for. +config_files="$ac_config_files" +config_headers="$ac_config_headers" + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +ac_cs_usage="\ +\`$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. + +Usage: $0 [OPTION]... [TAG]... + + -h, --help print this help, then exit + -V, --version print version number and configuration settings, then exit + --config print configuration, then exit + -q, --quiet, --silent + do not print progress messages + -d, --debug don't remove temporary files + --recheck update $as_me by reconfiguring in the same conditions + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + --header=FILE[:TEMPLATE] + instantiate the configuration header FILE + +Configuration files: +$config_files + +Configuration headers: +$config_headers + +Report bugs to ." + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" +ac_cs_version="\\ +resolv config.status 0.0 +configured by $0, generated by GNU Autoconf 2.69, + with options \\"\$ac_cs_config\\" + +Copyright (C) 2012 Free Software Foundation, Inc. +This config.status script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it." + +ac_pwd='$ac_pwd' +srcdir='$srcdir' +test -n "\$AWK" || AWK=awk +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. +ac_need_defaults=: +while test $# != 0 +do + case $1 in + --*=?*) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` + ac_shift=: + ;; + --*=) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg= + ac_shift=: + ;; + *) + ac_option=$1 + ac_optarg=$2 + ac_shift=shift + ;; + esac + + case $ac_option in + # Handling of the options. + -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) + ac_cs_recheck=: ;; + --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) + $as_echo "$ac_cs_version"; exit ;; + --config | --confi | --conf | --con | --co | --c ) + $as_echo "$ac_cs_config"; exit ;; + --debug | --debu | --deb | --de | --d | -d ) + debug=: ;; + --file | --fil | --fi | --f ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + '') as_fn_error $? "missing file argument" ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" + ac_need_defaults=false;; + --header | --heade | --head | --hea ) + $ac_shift + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append CONFIG_HEADERS " '$ac_optarg'" + ac_need_defaults=false;; + --he | --h) + # Conflict between --help and --header + as_fn_error $? "ambiguous option: \`$1' +Try \`$0 --help' for more information.";; + --help | --hel | -h ) + $as_echo "$ac_cs_usage"; exit ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil | --si | --s) + ac_cs_silent=: ;; + + # This is an error. + -*) as_fn_error $? "unrecognized option: \`$1' +Try \`$0 --help' for more information." ;; + + *) as_fn_append ac_config_targets " $1" + ac_need_defaults=false ;; + + esac + shift +done + +ac_configure_extra_args= + +if $ac_cs_silent; then + exec 6>/dev/null + ac_configure_extra_args="$ac_configure_extra_args --silent" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +if \$ac_cs_recheck; then + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' + export CONFIG_SHELL + exec "\$@" +fi + +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +exec 5>>config.log +{ + echo + sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX +## Running $as_me. ## +_ASBOX + $as_echo "$ac_log" +} >&5 + +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + +# Handling of arguments. +for ac_config_target in $ac_config_targets +do + case $ac_config_target in + "cbits/hs_resolv_config.h") CONFIG_HEADERS="$CONFIG_HEADERS cbits/hs_resolv_config.h" ;; + "resolv.buildinfo") CONFIG_FILES="$CONFIG_FILES resolv.buildinfo" ;; + + *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; + esac +done + + +# If the user did not use the arguments to specify the items to instantiate, +# then the envvar interface is used. Set only those that are not. +# We use the long form for the default assignment because of an extremely +# bizarre bug on SunOS 4.1.3. +if $ac_need_defaults; then + test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files + test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers +fi + +# Have a temporary directory for convenience. Make it in the build tree +# simply because there is no reason against having it here, and in addition, +# creating and moving files from /tmp can sometimes cause problems. +# Hook for its removal unless debugging. +# Note that there is a small window in which the directory will not be cleaned: +# after its creation but before its name has been assigned to `$tmp'. +$debug || +{ + tmp= ac_tmp= + trap 'exit_status=$? + : "${ac_tmp:=$tmp}" + { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status +' 0 + trap 'as_fn_exit 1' 1 2 13 15 +} +# Create a (secure) tmp directory for tmp files. + +{ + tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && + test -d "$tmp" +} || +{ + tmp=./conf$$-$RANDOM + (umask 077 && mkdir "$tmp") +} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 +ac_tmp=$tmp + +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with `./config.status config.h'. +if test -n "$CONFIG_FILES"; then + + +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\\r' +else + ac_cs_awk_cr=$ac_cr +fi + +echo 'BEGIN {' >"$ac_tmp/subs1.awk" && +_ACEOF + + +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` +ac_delim='%!_!# ' +for ac_last_try in false false false false false :; do + . ./conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done +rm -f conf$$subs.sh + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && +_ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\)..*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\)..*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} + +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ + || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 +_ACEOF + +# VPATH may cause trouble with some makes, so we remove sole $(srcdir), +# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and +# trailing colons and then remove the whole line if VPATH becomes empty +# (actually we leave an empty line to preserve line numbers). +if test "x$srcdir" = x.; then + ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ +h +s/// +s/^/:/ +s/[ ]*$/:/ +s/:\$(srcdir):/:/g +s/:\${srcdir}:/:/g +s/:@srcdir@:/:/g +s/^:*// +s/:*$// +x +s/\(=[ ]*\).*/\1/ +G +s/\n// +s/^[^=]*=[ ]*$// +}' +fi + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +fi # test -n "$CONFIG_FILES" + +# Set up the scripts for CONFIG_HEADERS section. +# No need to generate them if there are no CONFIG_HEADERS. +# This happens for instance with `./config.status Makefile'. +if test -n "$CONFIG_HEADERS"; then +cat >"$ac_tmp/defines.awk" <<\_ACAWK || +BEGIN { +_ACEOF + +# Transform confdefs.h into an awk script `defines.awk', embedded as +# here-document in config.status, that substitutes the proper values into +# config.h.in to produce config.h. + +# Create a delimiter string that does not exist in confdefs.h, to ease +# handling of long lines. +ac_delim='%!_!# ' +for ac_last_try in false false :; do + ac_tt=`sed -n "/$ac_delim/p" confdefs.h` + if test -z "$ac_tt"; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done + +# For the awk script, D is an array of macro values keyed by name, +# likewise P contains macro parameters if any. Preserve backslash +# newline sequences. + +ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* +sed -n ' +s/.\{148\}/&'"$ac_delim"'/g +t rset +:rset +s/^[ ]*#[ ]*define[ ][ ]*/ / +t def +d +:def +s/\\$// +t bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3"/p +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p +d +:bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3\\\\\\n"\\/p +t cont +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p +t cont +d +:cont +n +s/.\{148\}/&'"$ac_delim"'/g +t clear +:clear +s/\\$// +t bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/"/p +d +:bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p +b cont +' >$CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + for (key in D) D_is_set[key] = 1 + FS = "" +} +/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { + line = \$ 0 + split(line, arg, " ") + if (arg[1] == "#") { + defundef = arg[2] + mac1 = arg[3] + } else { + defundef = substr(arg[1], 2) + mac1 = arg[2] + } + split(mac1, mac2, "(") #) + macro = mac2[1] + prefix = substr(line, 1, index(line, defundef) - 1) + if (D_is_set[macro]) { + # Preserve the white space surrounding the "#". + print prefix "define", macro P[macro] D[macro] + next + } else { + # Replace #undef with comments. This is necessary, for example, + # in the case of _POSIX_SOURCE, which is predefined and required + # on some systems where configure will not decide to define it. + if (defundef == "undef") { + print "/*", prefix defundef, macro, "*/" + next + } + } +} +{ print } +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 +fi # test -n "$CONFIG_HEADERS" + + +eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS " +shift +for ac_tag +do + case $ac_tag in + :[FHLC]) ac_mode=$ac_tag; continue;; + esac + case $ac_mode$ac_tag in + :[FHL]*:*);; + :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; + :[FH]-) ac_tag=-:-;; + :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; + esac + ac_save_IFS=$IFS + IFS=: + set x $ac_tag + IFS=$ac_save_IFS + shift + ac_file=$1 + shift + + case $ac_mode in + :L) ac_source=$1;; + :[FH]) + ac_file_inputs= + for ac_f + do + case $ac_f in + -) ac_f="$ac_tmp/stdin";; + *) # Look for the file first in the build tree, then in the source tree + # (if the path is not absolute). The absolute path cannot be DOS-style, + # because $ac_f cannot contain `:'. + test -f "$ac_f" || + case $ac_f in + [\\/$]*) false;; + *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; + esac || + as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; + esac + case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" + done + + # Let's still pretend it is `configure' which instantiates (i.e., don't + # use $as_me), people would be surprised to read: + # /* config.h. Generated by config.status. */ + configure_input='Generated from '` + $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' + if test x"$ac_file" != x-; then + configure_input="$ac_file. $configure_input" + { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +$as_echo "$as_me: creating $ac_file" >&6;} + fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`$as_echo "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac + + case $ac_tag in + *:-:* | *:-) cat >"$ac_tmp/stdin" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; + esac + ;; + esac + + ac_dir=`$as_dirname -- "$ac_file" || +$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$ac_file" : 'X\(//\)[^/]' \| \ + X"$ac_file" : 'X\(//\)$' \| \ + X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$ac_file" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + as_dir="$ac_dir"; as_fn_mkdir_p + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + + case $ac_mode in + :F) + # + # CONFIG_FILE + # + +_ACEOF + +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# If the template does not know about datarootdir, expand it. +# FIXME: This hack should be removed a few years after 2.60. +ac_datarootdir_hack=; ac_datarootdir_seen= +ac_sed_dataroot=' +/datarootdir/ { + p + q +} +/@datadir@/p +/@docdir@/p +/@infodir@/p +/@localedir@/p +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in +*datarootdir*) ac_datarootdir_seen=yes;; +*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} +_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + ac_datarootdir_hack=' + s&@datadir@&$datadir&g + s&@docdir@&$docdir&g + s&@infodir@&$infodir&g + s&@localedir@&$localedir&g + s&@mandir@&$mandir&g + s&\\\${datarootdir}&$datarootdir&g' ;; +esac +_ACEOF + +# Neutralize VPATH when `$srcdir' = `.'. +# Shell code in configure.ac might set extrasub. +# FIXME: do we really want to maintain this feature? +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub +$extrasub +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +:t +/@[a-zA-Z_][a-zA-Z_0-9]*@/!b +s|@configure_input@|$ac_sed_conf_input|;t t +s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t +s&@srcdir@&$ac_srcdir&;t t +s&@abs_srcdir@&$ac_abs_srcdir&;t t +s&@top_srcdir@&$ac_top_srcdir&;t t +s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t +s&@builddir@&$ac_builddir&;t t +s&@abs_builddir@&$ac_abs_builddir&;t t +s&@abs_top_builddir@&$ac_abs_top_builddir&;t t +$ac_datarootdir_hack +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ + >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + +test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && + { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ + "$ac_tmp/out"`; test -z "$ac_out"; } && + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&5 +$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&2;} + + rm -f "$ac_tmp/stdin" + case $ac_file in + -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; + *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; + esac \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + ;; + :H) + # + # CONFIG_HEADER + # + if test x"$ac_file" != x-; then + { + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" + } >"$ac_tmp/config.h" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then + { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 +$as_echo "$as_me: $ac_file is unchanged" >&6;} + else + rm -f "$ac_file" + mv "$ac_tmp/config.h" "$ac_file" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + fi + else + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ + || as_fn_error $? "could not create -" "$LINENO" 5 + fi + ;; + + + esac + +done # for ac_tag + + +as_fn_exit 0 +_ACEOF +ac_clean_files=$ac_clean_files_save + +test $ac_write_fail = 0 || + as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 + + +# configure is writing to config.log, and then calls config.status. +# config.status does its own redirection, appending to config.log. +# Unfortunately, on DOS this fails, as config.log is still kept open +# by configure, so config.status won't be able to write to it; its +# output is simply discarded. So we exec the FD to /dev/null, +# effectively closing config.log, so it can be properly (re)opened and +# appended to by config.status. When coming back to configure, we +# need to make the FD available again. +if test "$no_create" != yes; then + ac_cs_success=: + ac_config_status_args= + test "$silent" = yes && + ac_config_status_args="$ac_config_status_args --quiet" + exec 5>/dev/null + $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false + exec 5>>config.log + # Use ||, not &&, to avoid exiting from the if with $? = 1, which + # would make configure fail if this is the last instruction. + $ac_cs_success || as_fn_exit 1 +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} +fi + + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/LICENSE cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/LICENSE --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/LICENSE 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/LICENSE 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,22 @@ +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; either version 2 of the License (a copy +is included as `LICENSE.GPLv2`), or (at your option) any later +version. + +As a special exception, the copyright holders of this library give you +permission to link this library with the independent modules that +constitute the +package and its dependencies for the purpose of producing the `cabal` +executable, regardless of the license terms of these independent +modules, and to copy and distribute the resulting `cabal` executable +under terms of your choice, provided that you also meet, for each +linked independent module, the terms and conditions of the license of +that module. An independent module is a module which is not derived +from or based on this library. If you modify this library, you may +extend this exception to your version of the library, but you are not +obliged to do so. If you do not wish to do so, delete this exception +statement from your version. + +The aforementioned special exception applies (at your option) +retroactively to all prior releases of this library. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/LICENSE.GPLv2 cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/LICENSE.GPLv2 --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/LICENSE.GPLv2 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/LICENSE.GPLv2 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/LICENSE.GPLv3 cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/LICENSE.GPLv3 --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/LICENSE.GPLv3 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/LICENSE.GPLv3 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/resolv.buildinfo.in cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/resolv.buildinfo.in --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/resolv.buildinfo.in 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/resolv.buildinfo.in 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,2 @@ +extra-libraries: @EXTRA_LIBS@ +cpp-options: @CPP_OPTIONS@ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/resolv.cabal cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/resolv.cabal --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/resolv.cabal 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/resolv.cabal 2018-11-26 08:43:28.000000000 +0000 @@ -0,0 +1,108 @@ +cabal-version: 1.12 + +name: resolv +version: 0.1.1.2 + +synopsis: Domain Name Service (DNS) lookup via the libresolv standard library routines +description: { + +This package implements an API for accessing +the [Domain Name Service (DNS)](https://tools.ietf.org/html/rfc1035) +resolver service via the standard @libresolv@ system library (whose +API is often available directly via the standard @libc@ C library) on +Unix systems. +. +This package also includes support for decoding message record types +as defined in the following RFCs: +. +- [RFC 1035](https://tools.ietf.org/html/rfc1035): Domain Names - Implementation And Specification +- [RFC 1183](https://tools.ietf.org/html/rfc1183): New DNS RR Definitions +- [RFC 2782](https://tools.ietf.org/html/rfc2782): A DNS RR for specifying the location of services (DNS SRV) +- [RFC 2915](https://tools.ietf.org/html/rfc2915): The Naming Authority Pointer (NAPTR) DNS Resource Record +- [RFC 3596](https://tools.ietf.org/html/rfc3596): DNS Extensions to Support IP Version 6 +- [RFC 4034](https://tools.ietf.org/html/rfc4034): Resource Records for the DNS Security Extensions +- [RFC 4255](https://tools.ietf.org/html/rfc4255): Using DNS to Securely Publish Secure Shell (SSH) Key Fingerprints +- [RFC 4408](https://tools.ietf.org/html/rfc4408): Sender Policy Framework (SPF) for Authorizing Use of Domains in E-Mail, Version 1 +- [RFC 5155](https://tools.ietf.org/html/rfc5155): DNS Security (DNSSEC) Hashed Authenticated Denial of Existence +- [RFC 6844](https://tools.ietf.org/html/rfc6844): DNS Certification Authority Authorization (CAA) Resource Record +- [RFC 6891](https://tools.ietf.org/html/rfc6891): Extension Mechanisms for DNS (EDNS(0)) +- [RFC 7553](https://tools.ietf.org/html/rfc7553): The Uniform Resource Identifier (URI) DNS Resource Record +. +For Windows, the package [windns](https://hackage.haskell.org/package/windns) +provides a compatible subset of this package's API. +} + +X-SPDX-License-Identifier: GPL-2.0-or-later +license: GPL-2 +license-files: LICENSE LICENSE.GPLv2 LICENSE.GPLv3 +author: Herbert Valerio Riedel +maintainer: hvr@gnu.org +category: Network +build-type: Configure +bug-reports: https://github.com/hvr/resolv/issues +extra-source-files: ChangeLog.md + +extra-source-files: cbits/hs_resolv.h + cbits/hs_resolv_config.h.in + testdata/msg/*.bin + testdata/msg/*.show + resolv.buildinfo.in + configure + +extra-tmp-files: autom4te.cache + config.log + config.status + resolv.buildinfo + cbits/hs_resolv_config.h + +tested-with: GHC==8.6.2, GHC==8.4.4, GHC==8.2.2, GHC==8.0.2, GHC==7.10.3, GHC==7.10.1, GHC==7.8.4, GHC==7.6.3, GHC==7.4.2 + +source-repository head + type: git + location: https://github.com/hvr/resolv.git + +library + default-language: Haskell2010 + other-extensions: BangPatterns + CApiFFI + CPP + DeriveDataTypeable + DeriveFoldable + DeriveFunctor + DeriveTraversable + GeneralizedNewtypeDeriving + OverloadedStrings + RecordWildCards + Trustworthy + + hs-source-dirs: src + exposed-modules: Network.DNS + other-modules: Network.DNS.Message + Network.DNS.FFI + Compat + + build-depends: base >= 4.5 && <4.13 + , base16-bytestring == 0.1.* + , binary >= 0.7.3 && < 0.9 + , bytestring >= 0.9.2 && < 0.11 + , containers >= 0.4.2.1 && < 0.7 + + ghc-options: -Wall + include-dirs: cbits + +test-suite resolv. + default-language: Haskell2010 + hs-source-dirs: src-test + main-is: Tests1.hs + type: exitcode-stdio-1.0 + + -- dependencies whose version constraints are inherited via lib:resolv component + build-depends: resolv + , base + , bytestring + + -- additional dependencies not inherited + build-depends: tasty >= 1.1 && < 1.2 + , tasty-hunit >= 0.10 && < 0.11 + , directory >= 1.1.0.2 && < 1.4 + , filepath >= 1.3.0 && < 1.5 diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/Setup.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/Setup.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/Setup.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/Setup.hs 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMainWithHooks autoconfUserHooks diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src/Compat.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src/Compat.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src/Compat.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src/Compat.hs 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,61 @@ +{-# LANGUAGE CPP #-} + +-- | +-- Copyright: © 2017 Herbert Valerio Riedel +-- SPDX-License-Identifier: GPL-2.0-or-later +module Compat + ( toStrict + , fromStrict + , guard + , replicateM + , unless + , when + , A.Applicative(..) + , (<$>) + , Mon.Monoid(..) + , Foldable + , F.forM_ + , toList + , traverse + , T.Traversable + , module Data.Word + , module Data.Int + , module Data.Maybe + , putInt32be + , getInt32be + ) where + +--import qualified Data.ByteString.Lazy as BSL + +import Control.Applicative as A +import Control.Monad as M +import Data.Binary.Get +import Data.Binary.Put +#if MIN_VERSION_bytestring(0,10,0) +import Data.ByteString.Lazy (fromStrict, toStrict) +#else +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as BS.L +#endif +import Data.Foldable as F +import Data.Int +import Data.Maybe +import Data.Monoid as Mon (Monoid (..)) +import Data.Traversable as T +import Data.Word + +#if !(MIN_VERSION_bytestring(0,10,0)) +fromStrict :: BS.ByteString -> BS.L.ByteString +fromStrict = BS.L.fromChunks . (:[]) + +toStrict :: BS.L.ByteString -> BS.ByteString +toStrict = mconcat . BS.L.toChunks +#endif + +#if !MIN_VERSION_binary(0,8,1) +putInt32be :: Int32 -> Put +putInt32be x = putWord32be (fromIntegral x) + +getInt32be :: Get Int32 +getInt32be = fromIntegral <$> getWord32be +#endif diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src/Network/DNS/FFI.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src/Network/DNS/FFI.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src/Network/DNS/FFI.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src/Network/DNS/FFI.hs 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,70 @@ +{-# LANGUAGE CApiFFI #-} +{-# LANGUAGE CPP #-} + +module Network.DNS.FFI where + +import Control.Concurrent.MVar +import Foreign.C +import Foreign.Marshal.Alloc +import Foreign.Ptr +import System.IO.Unsafe (unsafePerformIO) + +#if !defined(USE_RES_NQUERY) +# error USE_RES_NQUERY not defined +#endif + +{-# INLINE resIsReentrant #-} +-- | Whether the reentrant DNS resolver C API (e.g. @res_nquery(3)@, @res_nsend(3)@) is being used. +-- +-- If this this 'False', then as a fall-back +-- @res_query(3)@/@res_send(3)@ are used, protected by a global mutex. +-- +-- @since 0.1.1.0 +resIsReentrant :: Bool +#if USE_RES_NQUERY +resIsReentrant = True +#else +resIsReentrant = False +#endif + +#if !defined(SIZEOF_RES_STATE) +# error SIZEOF_RES_STATE not defined +#endif + +#if USE_RES_NQUERY && (SIZEOF_RES_STATE <= 0) +# error broken invariant +#endif + +{-# INLINE sizeOfResState #-} +sizeOfResState :: CSize +sizeOfResState = SIZEOF_RES_STATE + +data CResState + +{-# NOINLINE resolvLock #-} +resolvLock :: MVar () +resolvLock = unsafePerformIO $ newMVar () + +withCResState :: (Ptr CResState -> IO a) -> IO a +withCResState act + | resIsReentrant = allocaBytes (fromIntegral sizeOfResState) $ \ptr -> do + _ <- c_memset ptr 0 sizeOfResState + act ptr + | otherwise = withMVar resolvLock $ \() -> act nullPtr + + +-- void *memset(void *s, int c, size_t n); +foreign import capi unsafe "string.h memset" c_memset :: Ptr a -> CInt -> CSize -> IO (Ptr a) + +-- int res_query(void *, const char *dname, int class, int type, unsigned char *answer, int anslen); +foreign import capi safe "hs_resolv.h hs_res_query" c_res_query :: Ptr CResState -> CString -> CInt -> CInt -> Ptr CChar -> CInt -> IO CInt + +-- int res_send(void *, const unsigned char *msg, int msglen, unsigned char *answer, int anslen); +foreign import capi safe "hs_resolv.h hs_res_send" c_res_send :: Ptr CResState -> Ptr CChar -> CInt -> Ptr CChar -> CInt -> IO CInt + +-- int res_opt_set_use_dnssec(void *); +foreign import capi safe "hs_resolv.h res_opt_set_use_dnssec" c_res_opt_set_use_dnssec :: Ptr CResState -> IO CInt + +-- int hs_res_mkquery(void *, const char *dname, int class, int type, unsigned char *req, int reqlen0); +foreign import capi safe "hs_resolv.h hs_res_mkquery" c_res_mkquery :: Ptr CResState -> CString -> CInt -> CInt -> Ptr CChar -> CInt -> IO CInt + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src/Network/DNS/Message.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src/Network/DNS/Message.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src/Network/DNS/Message.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src/Network/DNS/Message.hs 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,1069 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE CApiFFI #-} +{-# LANGUAGE DeriveFoldable #-} +{-# LANGUAGE DeriveFunctor #-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} + +-- | +-- Copyright: © 2017 Herbert Valerio Riedel +-- SPDX-License-Identifier: GPL-2.0-or-later +-- +-- Internal module +module Network.DNS.Message where + +import qualified Data.ByteString.Base16 as B16 + +import qualified Data.ByteString as BS +import qualified Data.ByteString.Lazy as BSL +import Data.Function +import Data.List (groupBy) +import Data.String +import Numeric (showHex) +import Prelude + +import Data.Binary +import Data.Binary.Get +import Data.Binary.Put +import Data.Bits +import Data.Map (Map) +import qualified Data.Map as Map +import Data.Set (Set) +import qualified Data.Set as Set + +import Compat + +-- | An IPv6 address +-- +-- The IP address is represented in network order, +-- i.e. @2606:2800:220:1:248:1893:25c8:1946@ is +-- represented as @(IPv6 0x2606280002200001 0x248189325c81946)@. +data IPv6 = IPv6 !Word64 !Word64 + deriving (Eq,Ord,Read) + +instance Show IPv6 where + showsPrec p (IPv6 hi lo) = showParen (p >= 11) (showString "IPv6 0x" . showHex hi . showString " 0x" . showHex lo) + +instance Binary IPv6 where + put (IPv6 hi lo) = putWord64be hi >> putWord64be lo + get = IPv6 <$> getWord64be <*> getWord64be + +-- | An IPv4 address +-- +-- The IP address is represented in network order, i.e. @127.0.0.1@ is +-- represented as @(IPv4 0x7f000001)@. +data IPv4 = IPv4 !Word32 + deriving (Eq,Ord,Read) + +instance Show IPv4 where + showsPrec p (IPv4 n) = showParen (p >= 11) (showString "IPv4 0x" . showHex n) + +instance Binary IPv4 where + put (IPv4 w) = putWord32be w + get = IPv4 <$> getWord32be + +-- | @\@ as per [RFC 1035, section 3.3](https://tools.ietf.org/html/rfc1035#section-3.3). +-- +-- A domain-name represented as a series of labels separated by dots. +-- +-- See also 'Labels' for list-based representation. +-- +-- __NOTE__: The 'Labels' type is able to properly represent domain +-- names whose components contain dots which the 'Name' representation +-- cannot. +newtype Name = Name BS.ByteString + deriving (Read,Show,Eq,Ord) + +-- | @\@ as per [RFC 1035, section 3.3](https://tools.ietf.org/html/rfc1035#section-3.3). +-- +-- A sequence of up to 255 octets +-- +-- The limit of 255 octets is caused by the encoding which uses by a +-- prefixed octet denoting the length. +newtype CharStr = CharStr BS.ByteString + deriving (Eq,Ord,IsString) + +instance Show CharStr where + showsPrec p (CharStr bs) = showsPrec p bs + +instance Read CharStr where + readsPrec p = map (\(x,y) -> (CharStr x,y)) <$> readsPrec p + +instance Binary CharStr where + put (CharStr bs) + | BS.length bs > 0xff = fail "putString: string too long" + | otherwise = do + putWord8 (fromIntegral $ BS.length bs) + putByteString bs + get = do + len' <- getWord8 + CharStr <$> getByteString (fromIntegral len') + +{- Resource records + + -- https://en.wikipedia.org/wiki/List_of_DNS_record_types + + RFC 1035 + + A 1 a host address + NS 2 an authoritative name server + CNAME 5 the canonical name for an alias + SOA 6 marks the start of a zone of authority + PTR 12 a domain name pointer + MX 15 mail exchange + TXT 16 text strings + + RFC 3596 + + AAAA 28 IPv6 + + RFC 2782 + + SRV 33 Location of services + + ---- + + RFC3597 Handling of Unknown DNS Resource Record (RR) Types + +-} + +-- | Represents a DNS message as per [RFC 1035](https://tools.ietf.org/html/rfc1035) +data Msg l + = Msg + { msgHeader :: !MsgHeader + , msgQD :: [MsgQuestion l] + , msgAN, msgNS, msgAR :: [MsgRR l] + } deriving (Read,Show,Functor,Foldable,Traversable) + +-- | DNS message header section as per [RFC 1035, section 4.1.1](https://tools.ietf.org/html/rfc1035#section-4.1.1) +data MsgHeader + = MsgHeader + { mhId :: !Word16 + + , mhFlags :: !MsgHeaderFlags + + , mhQDCount :: !Word16 + , mhANCount :: !Word16 + , mhNSCount :: !Word16 + , mhARCount :: !Word16 + } deriving (Read,Show) + +-- | DNS message header section as per [RFC 1035, section 4.1.2](https://tools.ietf.org/html/rfc1035#section-4.1.2) +data MsgQuestion l + = MsgQuestion !l !Type !Class + deriving (Eq,Read,Show,Functor,Foldable,Traversable) + +-- | DNS message header flags as per [RFC 1035, section 4.1.1](https://tools.ietf.org/html/rfc1035#section-4.1.1) +data MsgHeaderFlags + = MsgHeaderFlags + { mhQR :: !QR + , mhOpcode :: !Word8 -- actually Word4 + , mhAA :: !Bool + , mhTC :: !Bool + , mhRD :: !Bool + , mhRA :: !Bool + , mhZ :: !Bool -- reserved/unused bit + , mhAD :: !Bool -- RFC4035 + , mhCD :: !Bool -- RFC4035 + , mhRCode :: !Word8 -- Word4 + } deriving (Read,Show) + +-- | DNS resource record section as per [RFC 1035, section 4.1.3](https://tools.ietf.org/html/rfc1035#section-4.1.3) +data MsgRR l + = MsgRR + { rrName :: !l + , rrClass :: !Class + , rrTTL :: !TTL + , rrData :: !(RData l) + } deriving (Eq,Read,Show,Functor,Foldable,Traversable) + +-- | DNS resource record data (see also 'MsgRR' and 'TypeSym') +data RData l + = RDataA !IPv4 + | RDataAAAA !IPv6 + | RDataCNAME !l + | RDataPTR !l + | RDataHINFO !CharStr !CharStr + | RDataNS !l + | RDataMX !Word16 !l + | RDataTXT ![CharStr] + | RDataSPF ![CharStr] + | RDataSOA !l !l !Word32 !Word32 !Word32 !Word32 !Word32 + | RDataSRV !(SRV l) + + -- RFC 1183 + | RDataAFSDB !Word16 !l + + -- RFC 2915 + | RDataNAPTR !Word16 !Word16 !CharStr !CharStr !CharStr !l + + -- RFC 7553 + | RDataURI !Word16 !Word16 !BS.ByteString + + -- RFC 4034 + | RDataRRSIG !Word16 !Word8 !Word8 !Word32 !Word32 !Word32 !Word16 !l !BS.ByteString + | RDataDNSKEY !Word16 !Word8 !Word8 !BS.ByteString + | RDataDS !Word16 !Word8 !Word8 !BS.ByteString + | RDataNSEC !l !(Set Type) + + -- RFC 4255 + | RDataSSHFP !Word8 !Word8 !BS.ByteString + + -- RFC 5155 + | RDataNSEC3PARAM !Word8 !Word8 !Word16 !CharStr + | RDataNSEC3 !Word8 !Word8 !Word16 !CharStr !CharStr !(Set Type) + + -- RFC 6844 + | RDataCAA !Word8 !CharStr !BS.ByteString + + -- pseudo-record + | RDataOPT !BS.ByteString -- FIXME + + -- unknown/unsupported + | RData !Type !BS.ByteString -- ^ Unknown/undecoded resource record type + deriving (Eq,Read,Show,Functor,Foldable,Traversable) + + +-- | @SRV@ Record data as per [RFC 2782](https://tools.ietf.org/html/rfc2782) +data SRV l = SRV { srvPriority :: !Word16 + , srvWeight :: !Word16 + , srvPort :: !Word16 + , srvTarget :: !l + } deriving (Eq,Read,Show,Functor,Foldable,Traversable) + +---------------------------------------------------------------------------- + +decodeMessage' :: BS.ByteString -> Maybe (Msg Labels) +decodeMessage' bs = do + (rest, _, v) <- either handleParseFail Just $ + decodeOrFail (fromStrict bs) + + -- don't allow trailing garbage + guard (BSL.null rest) + + let ofss = Set.fromList $ mapMaybe labelsPtr (toList v) + ofsmap <- retrieveLabelPtrs bs ofss + + traverse (resolveLabelPtr ofsmap) v + where + -- handleParseFail _ = Nothing + handleParseFail (rest, n, e) = error $ show (e, n, BSL.length rest, BS.length bs) ++ "\n" ++ show (B16.encode $ toStrict rest) + +-- | Decode a raw DNS message (query or response) +-- +-- Returns 'Nothing' on decoding failures. +decodeMessage :: IsLabels n => BS.ByteString -> Maybe (Msg n) +decodeMessage = fmap (fmap fromLabels) . decodeMessage' + +encodeMessage' :: Msg Labels -> BS.ByteString +encodeMessage' m = toStrict $ encode (fmap labels2labelsPtr m) + +-- | Construct a raw DNS message (query or response) +-- +-- May return 'Nothing' in input parameters are detected to be invalid. +encodeMessage :: IsLabels n => Msg n -> Maybe BS.ByteString +encodeMessage m = encodeMessage' <$> traverse toLabels m + + +instance Binary l => Binary (Msg l) where + get = do + hdr@MsgHeader{..} <- get + + Msg hdr <$> replicateM (fromIntegral mhQDCount) get + <*> replicateM (fromIntegral mhANCount) get + <*> replicateM (fromIntegral mhNSCount) get + <*> replicateM (fromIntegral mhARCount) get + + put (Msg hdr qds ans nss ars) = do + put hdr + mapM_ put qds + mapM_ put ans + mapM_ put nss + mapM_ put ars + +instance Binary MsgHeader where + get = MsgHeader <$> getWord16be + <*> get + <*> getWord16be + <*> getWord16be + <*> getWord16be + <*> getWord16be + + put (MsgHeader{..}) = do + putWord16be mhId + put mhFlags + putWord16be mhQDCount + putWord16be mhANCount + putWord16be mhNSCount + putWord16be mhARCount + +instance Binary MsgHeaderFlags where + put = putWord16be . encodeFlags + get = decodeFlags <$> getWord16be + +-- | Decode message header flag field +-- +-- > +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ +-- > |QR| Opcode |AA|TC|RD|RA|??|AD|CD| RCODE | +-- > +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+ +-- +decodeFlags :: Word16 -> MsgHeaderFlags +decodeFlags w = MsgHeaderFlags{..} + where + mhQR = if testBit w 15 then IsResponse else IsQuery + mhOpcode = shiftR' 11 .&. 0xf + mhAA = testBit w 10 + mhTC = testBit w 9 + mhRD = testBit w 8 + mhRA = testBit w 7 + mhZ = testBit w 6 + mhAD = testBit w 5 + mhCD = testBit w 4 + mhRCode = fromIntegral w .&. 0xf + + shiftR' = fromIntegral . shiftR w + +encodeFlags :: MsgHeaderFlags -> Word16 +encodeFlags MsgHeaderFlags{..} = + (case mhQR of + IsResponse -> bit 15 + IsQuery -> 0) .|. + (fromIntegral mhOpcode `shiftL` 11) .|. + (if mhAA then bit 10 else 0) .|. + (if mhTC then bit 9 else 0) .|. + (if mhRD then bit 8 else 0) .|. + (if mhRA then bit 7 else 0) .|. + (if mhZ then bit 6 else 0) .|. + (if mhAD then bit 5 else 0) .|. + (if mhCD then bit 4 else 0) .|. + (fromIntegral mhRCode) + +-- | Encodes whether message is a query or a response +-- +-- @since 0.1.1.0 +data QR = IsQuery | IsResponse + deriving (Eq,Read,Show) + +---------------------------------------------------------------------------- + +infixr 5 :.: + +-- | A DNS Label +-- +-- Must be non-empty and at most 63 octets. +type Label = BS.ByteString + +-- | A @@ as per [RFC 1035, section 3.3](https://tools.ietf.org/html/rfc1035#section-3.3) expressed as list of 'Label's. +-- +-- See also 'Name' +data Labels = !Label :.: !Labels | Root + deriving (Read,Show,Eq,Ord) + +labelsToList :: Labels -> [Label] +labelsToList (x :.: xs) = x : labelsToList xs +labelsToList Root = [""] + +-- | Types that represent @@ as per [RFC 1035, section 3.3](https://tools.ietf.org/html/rfc1035#section-3.3) and can be converted to and from 'Labels'. +class IsLabels s where + toLabels :: s -> Maybe Labels + fromLabels :: Labels -> s + +instance IsLabels Labels where + fromLabels = id + + toLabels ls + | all isLabelValid (init (labelsToList ls)) = Just ls + | otherwise = Nothing + where + isLabelValid l = not (BS.null l) && BS.length l < 0x40 + +instance IsLabels Name where + fromLabels = labels2name + toLabels = name2labels + +toName :: IsLabels n => n -> Maybe Name +toName = fmap fromLabels . toLabels + +name2labels :: Name -> Maybe Labels +name2labels (Name n) + | all (\l -> not (BS.null l) && BS.length l < 0x40) n' = Just $! foldr (:.:) Root n' + | otherwise = Nothing + where + n' | BS.isSuffixOf "." n = BS.split 0x2e (BS.init n) + | otherwise = BS.split 0x2e n + +labels2name :: Labels -> Name +labels2name Root = Name "." +labels2name ls = Name (BS.intercalate "." $ labelsToList ls) + +-- | IOW, a domain-name +-- +-- May contain pointers +-- +-- Can be resolved into a 'Labels' without label ptrs. +data LabelsPtr = Label !Label !LabelsPtr -- ^ See RC2181: a label must be between 1-63 octets; can be arbitrary binary data + | LPtr !Word16 + | LNul + deriving (Eq,Read,Show) + +labels2labelsPtr :: Labels -> LabelsPtr +labels2labelsPtr Root = LNul +labels2labelsPtr (l :.: rest) = Label l (labels2labelsPtr rest) + +instance Binary LabelsPtr where + get = go [] + where + go acc = do + l0 <- getLabel + case l0 of + Right bs | BS.null bs -> pure (foldr Label LNul $ reverse acc) + | otherwise -> go (bs:acc) + Left ofs -> pure (foldr Label (LPtr ofs) $ reverse acc) + + getLabel :: Get (Either Word16 BS.ByteString) + getLabel = do + len <- getWord8 + + if len >= 0x40 + then do + when (len .&. 0xc0 /= 0xc0) $ fail ("invalid length octet " ++ show len) + ofs <- fromIntegral <$> getWord8 + pure $ Left $ (fromIntegral (len .&. 0x3f) `shiftL` 8) .|. ofs + else Right <$> getByteString (fromIntegral len) + + put LNul = putWord8 0 + put (Label l next) + | BS.length l < 1 || BS.length l >= 0x40 = error "put (Label {}): invalid label size" + | otherwise = do + putWord8 (fromIntegral (BS.length l)) + putByteString l + put next + put (LPtr ofs) + | ofs < 0x4000 = putWord16be (0xc000 .|. ofs) + | otherwise = error "put (LPtr {}): invalid offset" + +-- | Compute serialised size of 'LabelsPtr' +labelsSize :: LabelsPtr -> Word16 +labelsSize = fromIntegral . go 0 + where + go n (LPtr _) = n+2 + go n LNul = n+1 + go n (Label bs rest) = go (n + 1 + BS.length bs) rest + +-- | Extract pointer-offset from 'LabelsPtr' (if it exists) +labelsPtr :: LabelsPtr -> Maybe Word16 +labelsPtr (Label _ ls) = labelsPtr ls +labelsPtr LNul = Nothing +labelsPtr (LPtr ofs) = Just ofs + +---------------------------------------------------------------------------- + +instance Binary l => Binary (MsgQuestion l) where + get = MsgQuestion <$> get <*> get <*> get + put (MsgQuestion l qt cls) = put l >> put qt >> put cls + + +instance Binary l => Binary (MsgRR l) where + get = do + rrName <- get + rrType <- get + rrClass <- get + rrTTL <- get + rrData <- getRData rrType + pure (MsgRR {..}) + + put (MsgRR{..}) = do + put rrName + put (either id typeFromSym $ rdType rrData) + put rrClass + put rrTTL + putRData rrData + +getRData :: Binary l => Type -> Get (RData l) +getRData qt = do + len <- fromIntegral <$> getWord16be + + let unknownRdata = RData qt <$> getByteString len + + getByteStringRest = consumeRestWith getByteString + + consumeRestWith act = do + curofs <- fromIntegral <$> bytesRead + act (len - curofs) + + isolate len $ + case typeToSym qt of + Nothing -> unknownRdata + Just ts -> case ts of + TypeA -> RDataA <$> get + + TypeAFSDB -> RDataAFSDB <$> getWord16be + <*> get + + TypeNS -> RDataNS <$> get + + TypeCNAME -> RDataCNAME <$> get + + TypeSOA -> RDataSOA <$> get + <*> get + <*> getWord32be + <*> getWord32be + <*> getWord32be + <*> getWord32be + <*> getWord32be + + TypePTR -> RDataPTR <$> get + + TypeHINFO -> RDataHINFO <$> get + <*> get + + TypeMX -> RDataMX <$> getWord16be + <*> get + + TypeTXT -> RDataTXT <$> getUntilEmpty + TypeSPF -> RDataSPF <$> getUntilEmpty + + TypeAAAA -> RDataAAAA <$> get + + TypeSRV -> RDataSRV <$> get + + TypeNAPTR -> RDataNAPTR <$> getWord16be -- order + <*> getWord16be --preference + <*> get -- flags + <*> get -- services + <*> get -- regexp + <*> get -- replacement + + TypeRRSIG -> RDataRRSIG <$> getWord16be + <*> getWord8 + <*> getWord8 + <*> getWord32be + <*> getWord32be + <*> getWord32be + <*> getWord16be + <*> get -- uncompressed + <*> getByteStringRest + + TypeDNSKEY -> RDataDNSKEY <$> getWord16be + <*> getWord8 + <*> getWord8 + <*> getByteString (len - 4) + + TypeDS -> RDataDS <$> getWord16be + <*> getWord8 + <*> getWord8 + <*> getByteString (len - 4) + + TypeNSEC -> RDataNSEC <$> get + <*> decodeNsecTypeMap + + TypeURI -> RDataURI <$> getWord16be -- prio + <*> getWord16be -- weight + <*> getByteString (len - 4) + + TypeSSHFP -> RDataSSHFP <$> getWord8 + <*> getWord8 + <*> getByteString (len - 2) + + TypeNSEC3PARAM -> RDataNSEC3PARAM <$> getWord8 + <*> getWord8 + <*> getWord16be + <*> get -- salt + + TypeNSEC3 -> RDataNSEC3 <$> getWord8 + <*> getWord8 + <*> getWord16be + <*> get -- salt + <*> get -- next hashed owner name + <*> decodeNsecTypeMap + + TypeCAA -> RDataCAA <$> getWord8 -- flags + <*> get -- tag -- TODO: must be non-empty + <*> getByteStringRest + + TypeOPT -> RDataOPT <$> getByteString len -- FIXME + + TypeANY -> unknownRdata -- shouldn't happen + +putRData :: Binary l => RData l -> Put +putRData rd = do + let rdata = runPut (putRData' rd) + rdataLen = BSL.length rdata + + unless (rdataLen < 0x10000) $ + fail "rdata too large" + + putWord16be (fromIntegral rdataLen) + putLazyByteString rdata + +putRData' :: Binary l => RData l -> Put +putRData' rd = case rd of + RDataA ip4 -> put ip4 + RDataAAAA ip6 -> put ip6 + RDataCNAME cname -> put cname + RDataOPT d -> putByteString d + RDataMX prio l -> putWord16be prio >> put l + RDataSOA l1 l2 w1 w2 w3 w4 w5 -> do + put l1 + put l2 + putWord32be w1 + putWord32be w2 + putWord32be w3 + putWord32be w4 + putWord32be w5 + + RDataPTR l -> put l + RDataNS l -> put l + RDataTXT ss -> mapM_ put ss + RDataSPF ss -> mapM_ put ss + RDataSRV srv -> put srv + + RDataAFSDB w l -> putWord16be w >> put l + + RDataHINFO s1 s2 -> put s1 >> put s2 + + RDataRRSIG w1 w2 w3 w4 w5 w6 w7 l s -> do + putWord16be w1 + putWord8 w2 + putWord8 w3 + putWord32be w4 + putWord32be w5 + putWord32be w6 + putWord16be w7 + put l + putByteString s + + RDataDNSKEY w1 w2 w3 s -> do + putWord16be w1 + putWord8 w2 + putWord8 w3 + putByteString s + + RDataNSEC3PARAM w1 w2 w3 s -> do + putWord8 w1 + putWord8 w2 + putWord16be w3 + put s + + RDataNSEC3 w1 w2 w3 s1 s2 tm -> do + putWord8 w1 + putWord8 w2 + putWord16be w3 + put s1 + put s2 + encodeNsecTypeMap tm + + RDataCAA fl s1 s2 -> do + putWord8 fl + put s1 + putByteString s2 + + RDataURI w1 w2 s -> do + putWord16be w1 + putWord16be w2 + putByteString s + + RDataDS w1 w2 w3 s -> do + putWord16be w1 + putWord8 w2 + putWord8 w3 + putByteString s + + RDataNSEC l tm -> do + put l + encodeNsecTypeMap tm + + RDataNAPTR w1 w2 s1 s2 s3 l -> do + putWord16be w1 + putWord16be w2 + put s1 + put s2 + put s3 + put l + + RDataSSHFP w1 w2 s -> do + putWord8 w1 + putWord8 w2 + putByteString s + + RData _ raw -> putByteString raw + + -- _ -> error ("putRData: " ++ show rd) + + +instance Binary l => Binary (SRV l) where + get = SRV <$> getWord16be + <*> getWord16be + <*> getWord16be + <*> get + + put (SRV w1 w2 w3 l) = do + putWord16be w1 + putWord16be w2 + putWord16be w3 + put l + +{- NSEC type-bitmap example: + + A NS SOA TXT AAAA RRSIG NSEC DNSKEY + +'00 07 62 00 80 08 00 03 80' +'00000000 00000111 01100010 00000000 10000000 00001000 00000000 00000011 10000000' + Win=#0 len=7 ^{SOA} ^{TXT} ^{AAAA} ^{DNSKEY} + ^^{A,NS} ^^{RRSIG,NSEC} +-} + +decodeNsecTypeMap :: Get (Set Type) +decodeNsecTypeMap = do + r <- concat <$> untilEmptyWith decode1 + -- TODO: enforce uniqueness + pure (Set.fromList r) + where + -- decode single window + decode1 = do + wi <- getWord8 + l <- getWord8 + unless (0 < l && l <= 32) $ + fail "invalid bitmap length" + + bmap <- getByteString (fromIntegral l) + + let winofs = (fromIntegral wi)*0x100 :: Int + lst = [ Type (fromIntegral (winofs+j*8+7-i)) + | (j,x) <- zip [0..] (BS.unpack bmap) + , i <- [7,6..0] + , testBit x i ] + + pure lst + +encodeNsecTypeMap :: Set Type -> Put +encodeNsecTypeMap bmap = do + when (Set.null bmap) $ fail "invalid empty type-map" + -- when (Set.member 0 bmap) $ fail "invalid TYPE0 set in type-map" + -- TODO: verify that Meta-TYPES and QTYPEs aren't contained in bmap + + forM_ (Map.toList bmap') $ \(wi, tm) -> do + putWord8 wi + put (CharStr $ BS.pack tm) + where + bmap' = fmap set2bitmap . splitToBlocks $ Set.map (\(Type w)->w) bmap + +set2bitmap :: Set Word8 -> [Word8] +set2bitmap = go 0 0 . Set.toList + where + go _ acc [] = if acc == 0 then [] else [acc] + go j acc (i:is) + | j' > j = acc : go (j+1) 0 (i:is) + | j' == j = go j' (acc .|. bit (7 - fromIntegral i')) is + | otherwise = error "set2bitmap: the impossible happened" + where + (j',i') = i `quotRem` 8 + +splitToBlocks :: Set Word16 -> Map Word8 (Set Word8) +splitToBlocks js = Map.fromList $ map (\xs -> (fst $ head xs, Set.fromList (map snd xs))) js' + where + hi16 :: Word16 -> Word8 + hi16 = fromIntegral . flip shiftR 8 + + lo16 :: Word16 -> Word8 + lo16 = fromIntegral . (.&. 0xff) + + js' :: [[(Word8,Word8)]] + js' = groupBy ((==) `on` fst) (map ((,) <$> hi16 <*> lo16) (Set.toList js)) + + +-- | Resolves/parses label pointer used for label compressing +-- +-- Returns 'Nothing' on failure +retrieveLabelPtr :: BS.ByteString -> Word16 -> Maybe LabelsPtr +retrieveLabelPtr msg ofs + = case decodeOrFail (fromStrict $ BS.drop (fromIntegral ofs) msg) of + Left _ -> Nothing + Right (_, _, v) -> Just v + +-- | Resolve set of label pointer offsets +-- +-- Invariants (/iff/ result is not 'Nothing') +-- +-- * all requested offsets will be contained in the result map +-- +-- * any offsets contained in the resolved 'Labels' will be part of +-- the result map as well +-- +-- NB: No cycle detection is performed, nor are 'Labels' flattened +retrieveLabelPtrs :: BS.ByteString -> Set Word16 -> Maybe (Map Word16 LabelsPtr) +retrieveLabelPtrs msg ofss0 = go =<< lupPtrs1 ofss0 + where + go :: Map Word16 LabelsPtr -> Maybe (Map Word16 LabelsPtr) + go m0 = do + let missingOfss = Set.fromList (mapMaybe labelsPtr (toList m0)) Set.\\ Map.keysSet m0 + + if Set.null missingOfss + then pure m0 -- fix-point reached + else do + m1 <- lupPtrs1 missingOfss + go (Map.union m0 m1) + + -- single lookup step + lupPtrs1 :: Set Word16 -> Maybe (Map Word16 LabelsPtr) + lupPtrs1 ofss1 = Map.fromList . zip (toList ofss1) <$> traverse (retrieveLabelPtr msg) (toList ofss1) + +-- | Checks for maximum name length (255) and (therefore indirectly) cycle-checking +resolveLabelPtr :: Map Word16 LabelsPtr -> LabelsPtr -> Maybe Labels +resolveLabelPtr ofsmap = go 0 [] + where + go :: Int -> [BS.ByteString] -> LabelsPtr -> Maybe Labels + go !n acc (Label x ls) = go (n+1+BS.length x) (x:acc) ls + go n acc LNul + | n < 255 = Just $! foldr (:.:) Root (reverse acc) + | otherwise = Nothing -- length violation + go n acc (LPtr ofs) + | n < 255 = go n acc =<< lup ofs + | otherwise = Nothing + + lup :: Word16 -> Maybe LabelsPtr + lup ofs = Map.lookup ofs ofsmap + + +{- Resource records + + -- https://en.wikipedia.org/wiki/List_of_DNS_record_types + + RFC 1035 + + A 1 a host address + NS 2 an authoritative name server + CNAME 5 the canonical name for an alias + SOA 6 marks the start of a zone of authority + PTR 12 a domain name pointer + MX 15 mail exchange + TXT 16 text strings + + RFC 3596 + + AAAA 28 IPv6 + + RFC 2782 + + SRV 33 Location of services + + ---- + + RFC3597 Handling of Unknown DNS Resource Record (RR) Types + +-} + +-- | Raw DNS record type code +-- +-- See also 'TypeSym' +newtype Type = Type Word16 + deriving (Eq,Ord,Read,Show) + +instance Binary Type where + put (Type w) = putWord16be w + get = Type <$> getWord16be + +-- | DNS @CLASS@ code as per [RFC 1035, section 3.2.4](https://tools.ietf.org/html/rfc1035#section-3.2.4) +-- +-- The most commonly used value is 'classIN'. +newtype Class = Class Word16 + deriving (Eq,Ord,Read,Show) + +-- | The 'Class' constant for @IN@ (Internet) +classIN :: Class +classIN = Class 1 + +instance Binary Class where + put (Class w) = putWord16be w + get = Class <$> getWord16be + +-- | Cache time-to-live expressed in seconds +newtype TTL = TTL Int32 + deriving (Eq,Ord,Read,Show) + +instance Binary TTL where + put (TTL i) = putInt32be i + get = TTL <$> getInt32be + +-- http://www.bind9.net/dns-parameters + +-- | Symbolic DNS record type +data TypeSym + = TypeA -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) + | TypeAAAA -- ^ [RFC 3596](https://tools.ietf.org/html/rfc3596) + | TypeAFSDB -- ^ [RFC 1183](https://tools.ietf.org/html/rfc1183) + | TypeANY -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) (query) + | TypeCAA -- ^ [RFC 6844](https://tools.ietf.org/html/rfc6844) + | TypeCNAME -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) + | TypeDNSKEY -- ^ [RFC 4034](https://tools.ietf.org/html/rfc4034) + | TypeDS -- ^ [RFC 4034](https://tools.ietf.org/html/rfc4034) + | TypeHINFO -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) + | TypeMX -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) + | TypeNAPTR -- ^ [RFC 2915](https://tools.ietf.org/html/rfc2915) + | TypeNS -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) + | TypeNSEC -- ^ [RFC 4034](https://tools.ietf.org/html/rfc4034) + | TypeNSEC3 -- ^ [RFC 5155](https://tools.ietf.org/html/rfc5155) + | TypeNSEC3PARAM -- ^ [RFC 5155](https://tools.ietf.org/html/rfc5155) + | TypeOPT -- ^ [RFC 6891](https://tools.ietf.org/html/rfc6891) (meta) + | TypePTR -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) + | TypeRRSIG -- ^ [RFC 4034](https://tools.ietf.org/html/rfc4034) + | TypeSOA -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) + | TypeSPF -- ^ [RFC 4408](https://tools.ietf.org/html/rfc4408) + | TypeSRV -- ^ [RFC 2782](https://tools.ietf.org/html/rfc2782) + | TypeSSHFP -- ^ [RFC 4255](https://tools.ietf.org/html/rfc4255) + | TypeTXT -- ^ [RFC 1035](https://tools.ietf.org/html/rfc1035) + | TypeURI -- ^ [RFC 7553](https://tools.ietf.org/html/rfc7553) + deriving (Eq,Ord,Enum,Bounded,Read,Show) + +-- | Convert symbolic 'TypeSym' to numeric 'Type' code +typeFromSym :: TypeSym -> Type +typeFromSym ts = Type $ case ts of + TypeA -> 1 + TypeNS -> 2 + TypeCNAME -> 5 + TypeSOA -> 6 + TypePTR -> 12 + TypeHINFO -> 13 + TypeMX -> 15 + TypeTXT -> 16 + TypeAFSDB -> 18 + TypeAAAA -> 28 + TypeSRV -> 33 + TypeNAPTR -> 35 + TypeOPT -> 41 + TypeDS -> 43 + TypeSSHFP -> 44 + TypeRRSIG -> 46 + TypeNSEC -> 47 + TypeDNSKEY -> 48 + TypeNSEC3 -> 50 + TypeNSEC3PARAM -> 51 + TypeSPF -> 99 + TypeANY -> 255 + TypeURI -> 256 + TypeCAA -> 257 + +-- | Convert 'Type' code to symbolic 'TypeSym' +typeToSym :: Type -> Maybe TypeSym +typeToSym (Type w) = case w of + 1 -> Just TypeA + 2 -> Just TypeNS + 5 -> Just TypeCNAME + 6 -> Just TypeSOA + 12 -> Just TypePTR + 13 -> Just TypeHINFO + 15 -> Just TypeMX + 16 -> Just TypeTXT + 18 -> Just TypeAFSDB + 28 -> Just TypeAAAA + 33 -> Just TypeSRV + 35 -> Just TypeNAPTR + 41 -> Just TypeOPT + 43 -> Just TypeDS + 44 -> Just TypeSSHFP + 46 -> Just TypeRRSIG + 47 -> Just TypeNSEC + 48 -> Just TypeDNSKEY + 50 -> Just TypeNSEC3 + 51 -> Just TypeNSEC3PARAM + 99 -> Just TypeSPF + 255 -> Just TypeANY + 256 -> Just TypeURI + 257 -> Just TypeCAA + _ -> Nothing + +-- | Extract the resource record type of a 'RData' object +rdType :: RData l -> Either Type TypeSym +rdType rd = case rd of + RDataA {} -> Right TypeA + RDataAAAA {} -> Right TypeAAAA + RDataAFSDB {} -> Right TypeAFSDB + RDataCAA {} -> Right TypeCAA + RDataCNAME {} -> Right TypeCNAME + RDataDNSKEY {} -> Right TypeDNSKEY + RDataDS {} -> Right TypeDS + RDataHINFO {} -> Right TypeHINFO + RDataMX {} -> Right TypeMX + RDataNAPTR {} -> Right TypeNAPTR + RDataNS {} -> Right TypeNS + RDataNSEC {} -> Right TypeNSEC + RDataNSEC3 {} -> Right TypeNSEC3 + RDataNSEC3PARAM {} -> Right TypeNSEC3PARAM + RDataOPT {} -> Right TypeOPT + RDataPTR {} -> Right TypePTR + RDataRRSIG {} -> Right TypeRRSIG + RDataSOA {} -> Right TypeSOA + RDataSRV {} -> Right TypeSRV + RDataTXT {} -> Right TypeTXT + RDataSPF {} -> Right TypeSPF + RDataURI {} -> Right TypeURI + RDataSSHFP {} -> Right TypeSSHFP + -- + RData ty _ -> maybe (Left ty) Right (typeToSym ty) + + +{- TODO: + + +type-bitmap: + + A NS SOA TXT AAAA RRSIG NSEC DNSKEY + +'00 07 62 00 80 08 00 03 80' +'00000000 00000111 01100010 00000000 10000000 00001000 00000000 00000011 10000000' + Win=#0 len=7 ^{SOA} ^{TXT} ^{AAAA} ^{DNSKEY} + ^^{A,NS} ^^{RRSIG,NSEC} + +" ".join(map("{:08b}".format,[0,7,98,0,128,8,0,3,128])) + + +"\NUL\a\"\NUL\NUL\NUL\NUL\ETX\128" NS SOA RRSIG NSEC DNSKEY + +[ (winofs+j*8+7-i) | (j,x) <- zip [0..] xs, i <- [7,6..0], testBit x i ] + +-} + + + +-- helpers + +getUntilEmpty :: Binary a => Get [a] +getUntilEmpty = untilEmptyWith get + +untilEmptyWith :: Get a -> Get [a] +untilEmptyWith g = go [] + where + go acc = do + e <- isEmpty + if e + then pure (reverse acc) + else do + v <- g + go (v : acc) + + + +{- TODO: + + + MsgRR{rrName = Name "stanford.edu.", rrClass = 1, rrTTL = 1799, + rrData = + RData 29 + "\NUL\DC2\SYN\DC3\136\a\244\212e\200\252\194\NUL\152\150\128"}, + + +https://en.wikipedia.org/wiki/LOC_record + + +LOC record statdns.net. IN LOC 52 22 23.000 N 4 53 32.000 E -2.00m 0.00m 10000m 10m + + +SW1A2AA.find.me.uk. 86399 IN LOC 51 30 12.748 N 0 7 39.611 W 0.00m 0.00m 0.00m 0.00m + + +https://tools.ietf.org/html/rfc1876 + +-} + diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src/Network/DNS.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src/Network/DNS.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src/Network/DNS.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src/Network/DNS.hs 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,373 @@ +{-# LANGUAGE CApiFFI #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE Trustworthy #-} + +-- | +-- Copyright: © 2017 Herbert Valerio Riedel +-- SPDX-License-Identifier: GPL-2.0-or-later +-- +-- This module implements an API for accessing +-- the [Domain Name Service (DNS)](https://tools.ietf.org/html/rfc1035) +-- resolver service via the standard @libresolv@ system library +-- on Unix systems. +-- +module Network.DNS + ( -- ** High level API + queryA + , queryAAAA + , queryCNAME + , querySRV + , queryTXT + + -- * Mid-level API + , query + , DnsException(..) + + -- * Low-level API + , resIsReentrant + , queryRaw + , sendRaw + , mkQueryRaw + + , decodeMessage + , encodeMessage + , mkQueryMsg + + -- * Types + -- ** Basic types + + -- *** Names/Labels + , Label + , Labels(..) + , IsLabels(..) + + , Name(..) + , caseFoldName + + -- *** Character strings + , CharStr(..) + + -- *** IP addresses + , IPv4(..) + , IPv6(..) + + -- *** RR TTL & Class + , TTL(..) + + , Class(..) + , classIN + + -- *** Message types + , Type(..) + , TypeSym(..) + , typeFromSym + , typeToSym + + -- ** Messages + + , Msg(..) + + , MsgHeader(..) + , MsgHeaderFlags(..), QR(..) + , MsgQuestion(..) + , MsgRR(..) + + , RData(..) + , rdType + + , SRV(..) + ) + where + +import Control.Exception +import Data.Typeable (Typeable) +import Foreign.C +import Foreign.Marshal.Alloc +import Prelude + +import qualified Data.ByteString as BS + +import Compat + +import Network.DNS.FFI +import Network.DNS.Message + +-- | Exception thrown in case of errors while encoding or decoding into a 'Msg'. +-- +-- @since 0.1.1.0 +data DnsException = DnsEncodeException + | DnsDecodeException + deriving (Show, Typeable) + +instance Exception DnsException + +-- | Send a query via @res_query(3)@ and decode its response into a 'Msg' +-- +-- Throws 'DnsException' in case of encoding or decoding errors. May throw other IO exceptions in case of network errors. +-- +-- === Example +-- +-- >>> query classIN (Name "_mirrors.hackage.haskell.org") TypeTXT +-- Just (Msg{msgHeader = MsgHeader{mhId = 56694, +-- mhFlags = MsgHeaderFlags{mhQR = IsResponse, mhOpcode = 0, mhAA = False, +-- mhTC = False, mhRD = True, mhRA = True, mhZ = False, +-- mhAD = False, mhCD = False, mhRCode = 0}, +-- mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, +-- msgQD = [MsgQuestion (Name "_mirrors.hackage.haskell.org.") (Type 16) (Class 1)], +-- msgAN = [MsgRR{rrName = Name "_mirrors.hackage.haskell.org.", +-- rrClass = Class 1, rrTTL = TTL 299, +-- rrData = RDataTXT ["0.urlbase=http://hackage.fpcomplete.com/", +-- "1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/"]}], +-- msgNS = [], +-- msgAR = [MsgRR{rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}] +-- }) +-- +query :: IsLabels n => Class -> n -> TypeSym -> IO (Msg n) +query cls name0 qtype + | Just name <- toName name0 = do + bs <- queryRaw cls name (typeFromSym qtype) + msg <- evaluate (decodeMessage bs) + maybe (throwIO DnsDecodeException) pure msg + | otherwise = throwIO DnsEncodeException + +-- | Send a query via @res_query(3)@, the return value is the raw binary response message. +-- +-- You can use 'decodeMessage' to decode the response message. +queryRaw :: Class -> Name -> Type -> IO BS.ByteString +queryRaw (Class cls) (Name name) qtype = withCResState $ \stptr -> do + allocaBytes max_msg_size $ \resptr -> do + _ <- c_memset resptr 0 max_msg_size + BS.useAsCString name $ \dn -> do + + rc1 <- c_res_opt_set_use_dnssec stptr + unless (rc1 == 0) $ + fail "res_init(3) failed" + + resetErrno + reslen <- c_res_query stptr dn (fromIntegral cls) qtypeVal resptr max_msg_size + + unless (reslen <= max_msg_size) $ + fail "res_query(3) message size overflow" + + errno <- getErrno + + when (reslen < 0) $ do + unless (errno == eOK) $ + throwErrno "res_query" + + fail "res_query(3) failed" + + BS.packCStringLen (resptr, fromIntegral reslen) + + where + -- The DNS protocol is inherently 16-bit-offset based; so 64KiB is + -- a reasonable maximum message size most implementations seem to + -- support. + max_msg_size :: Num a => a + max_msg_size = 0x10000 + + qtypeVal :: CInt + qtypeVal = case qtype of Type w -> fromIntegral w + +-- | Send a raw preformatted query via @res_send(3)@. +sendRaw :: BS.ByteString -> IO BS.ByteString +sendRaw req = withCResState $ \stptr -> do + allocaBytes max_msg_size $ \resptr -> do + _ <- c_memset resptr 0 max_msg_size + BS.useAsCStringLen req $ \(reqptr,reqlen) -> do + rc1 <- c_res_opt_set_use_dnssec stptr + unless (rc1 == 0) $ + fail "res_init(3) failed" + + resetErrno + reslen <- c_res_send stptr reqptr (fromIntegral reqlen) resptr max_msg_size + + unless (reslen <= max_msg_size) $ + fail "res_send(3) message size overflow" + + errno <- getErrno + + when (reslen < 0) $ do + unless (errno == eOK) $ + throwErrno "res_send" + + fail "res_send(3) failed" + + BS.packCStringLen (resptr, fromIntegral reslen) + + where + -- The DNS protocol is inherently 16-bit-offset based; so 64KiB is + -- a reasonable maximum message size most implementations seem to + -- support. + max_msg_size :: Num a => a + max_msg_size = 0x10000 + +-- | Construct a DNS query 'Msg' in the style of 'mkQueryRaw' +mkQueryMsg :: IsLabels n => Class -> n -> Type -> Msg n +mkQueryMsg cls l qtype = Msg (MsgHeader{..}) + [MsgQuestion l qtype cls] + [] + [] + [MsgRR {..}] + where + mhId = 31337 + mhFlags = MsgHeaderFlags + { mhQR = IsQuery + , mhOpcode = 0 + , mhAA = False + , mhTC = False + , mhRD = True + , mhRA = False + , mhZ = False + , mhAD = True + , mhCD = False + , mhRCode = 0 + } + + mhQDCount = 1 + mhANCount = 0 + mhNSCount = 0 + mhARCount = 1 + + rrName = fromLabels Root + rrClass = Class 512 + rrTTL = TTL 0x8000 + rrData = RDataOPT "" + + + +-- | Use @res_mkquery(3)@ to construct a DNS query message. +mkQueryRaw :: Class -> Name -> Type -> IO BS.ByteString +mkQueryRaw (Class cls) (Name name) qtype = withCResState $ \stptr -> do + allocaBytes max_msg_size $ \resptr -> do + _ <- c_memset resptr 0 max_msg_size + BS.useAsCString name $ \dn -> do + + rc1 <- c_res_opt_set_use_dnssec stptr + unless (rc1 == 0) $ + fail "res_init(3) failed" + + resetErrno + reslen <- c_res_mkquery stptr dn (fromIntegral cls) qtypeVal resptr max_msg_size + + unless (reslen <= max_msg_size) $ + fail "res_mkquery(3) message size overflow" + + errno <- getErrno + + when (reslen < 0) $ do + unless (errno == eOK) $ + throwErrno "res_query" + + fail "res_mkquery(3) failed" + + BS.packCStringLen (resptr, fromIntegral reslen) + + where + -- The DNS protocol is inherently 16-bit-offset based; so 64KiB is + -- a reasonable maximum message size most implementations seem to + -- support. + max_msg_size :: Num a => a + max_msg_size = 0x10000 + + qtypeVal :: CInt + qtypeVal = case qtype of Type w -> fromIntegral w + + +---------------------------------------------------------------------------- +-- Common High-level queries + +-- | Normalise 'Name' +-- +-- This function case folds 'Name's as described in +-- in [RFC 4343, section 3](https://tools.ietf.org/html/rfc4343#section-3) +-- by subtracting @0x20@ from all octets in the inclusive range +-- @[0x61..0x7A]@ (i.e. mapping @['a'..'z']@ to @['A'..'Z']@). +-- +-- This operation is idempotent. +caseFoldName :: Name -> Name +caseFoldName (Name n) = (Name n'') + where + n' = BS.map cf n + n'' | BS.null n' = "." + | BS.last n' == 0x2e {- '.' -} = n' + | otherwise = n' `mappend` "." + + -- case fold (c.f. RFC4343) + cf w | 0x61 <= w && w <= 0x7a = w - 0x20 + | otherwise = w + +---------------------------------------------------------------------------- + +-- | Query @A@ record (see [RFC 1035, section 3.4.1](https://tools.ietf.org/html/rfc1035#section-3.4.1)). +-- +-- This query returns only exact matches (modulo 'foldCaseName'). +-- E.g. in case of @CNAME@ responses even if the +-- answer section would contain @A@ records for the hostnames pointed +-- to by the @CNAME@. You can use 'query' if you need more control. +-- +-- >>> queryA (Name "www.google.com") +-- [(TTL 72,IPv4 0xd83acde4)] +-- +queryA :: Name -> IO [(TTL,IPv4)] +queryA n = do + res <- query classIN n' TypeA + pure [ (ttl,ip4) | MsgRR { rrData = RDataA ip4, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] + where + n' = caseFoldName n + +-- | Query @AAAA@ records (see [RFC 3596](https://tools.ietf.org/html/rfc3596)). +-- +-- This query returns only exact matches (modulo 'foldCaseName'). +-- E.g. in case of @CNAME@ responses even if the answer section would +-- contain @A@ records for the hostnames pointed to by the +-- @CNAME@. You can use 'query' if you need more control. +-- +-- >>> queryAAAA (Name "www.google.com") +-- [(TTL 299,IPv6 0x2a0014504001081e 0x2004)] +-- +queryAAAA :: Name -> IO [(TTL,IPv6)] +queryAAAA n = do + res <- query classIN n' TypeAAAA + pure [ (ttl,ip6) | MsgRR { rrData = RDataAAAA ip6, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] + where + n' = caseFoldName n + +-- | Query @CNAME@ records (see [RFC 1035, section 3.3.1](https://tools.ietf.org/html/rfc1035#section-3.3.1)). +-- +-- >>> queryCNAME (Name "hackage.haskell.org") +-- [(TTL 299,Name "j.global-ssl.fastly.net.")] +-- +queryCNAME :: Name -> IO [(TTL,Name)] +queryCNAME n = do + res <- query classIN n' TypeAAAA + pure [ (ttl,cname) | MsgRR { rrData = RDataCNAME cname, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] + where + n' = caseFoldName n + +-- | Query @TXT@ records (see [RFC 1035, section 3.3.14](https://tools.ietf.org/html/rfc1035#section-3.3.14)). +-- +-- >>> queryTXT (Name "_mirrors.hackage.haskell.org") +-- [(TTL 299,["0.urlbase=http://hackage.fpcomplete.com/", +-- "1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/"])] +-- +queryTXT :: Name -> IO [(TTL,[CharStr])] +queryTXT n = do + res <- query classIN n' TypeTXT + pure [ (ttl,txts) | MsgRR { rrData = RDataTXT txts, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] + where + n' = caseFoldName n + +-- | Query @SRV@ records (see [RFC 2782](https://tools.ietf.org/html/rfc2782)). +-- +-- >>> querySRV (Name "_imap._tcp.gmail.com") +-- [(TTL 21599,SRV {srvPriority = 0, srvWeight = 0, srvPort = 0, srvTarget = Name "."})] +-- +querySRV :: Name -> IO [(TTL,SRV Name)] +querySRV n = do + res <- query classIN n' TypeSRV + pure [ (ttl,srv) | MsgRR { rrData = RDataSRV srv, rrTTL = ttl, rrName = n1, rrClass = Class 1 } <- msgAN res, caseFoldName n1 == n' ] + where + n' = caseFoldName n diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src-test/Tests1.hs cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src-test/Tests1.hs --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/src-test/Tests1.hs 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/src-test/Tests1.hs 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1,91 @@ +{-# LANGUAGE OverloadedStrings #-} + +module Main where + +import Control.Applicative as A +import qualified Control.Exception as E +import Control.Monad +import qualified Data.ByteString as BS +import System.Directory (getDirectoryContents, removeFile) +import System.FilePath (dropExtension, takeExtension, (<.>), + ()) + +import qualified Test.Tasty as T +import qualified Test.Tasty.HUnit as T + +import qualified Network.DNS as DNS + +main :: IO () +main = do + msgfiles <- filter ((== ".bin") . takeExtension) <$> getDirectoryContents "testdata/msg" + + let tests1 = [ msgFileTest1 (dropExtension fn) | fn <- msgfiles ] + tests2 = [ msgFileTest2 (dropExtension fn) | fn <- msgfiles ] + + T.defaultMain (T.testGroup "" [ T.testGroup "decode" tests1 + , T.testGroup "enc/dec" tests2 + , T.testGroup "Type/TypeSym" + [ testTypeToFromSym1, testTypeToFromSym2 ] + , T.testGroup "mkQueryRaw" [ mkQueryRawText1 ] + ]) + +testTypeToFromSym1 :: T.TestTree +testTypeToFromSym1 = T.testCase "testTypeToFromSym1" $ do + forM_ [minBound..maxBound] $ \sym -> do + T.assertEqual "" (Just sym) (DNS.typeToSym . DNS.typeFromSym $ sym) + +testTypeToFromSym2 :: T.TestTree +testTypeToFromSym2 = T.testCase "testTypeToFromSym2" $ do + forM_ (map DNS.Type [minBound..maxBound]) $ \ty -> + case DNS.typeToSym ty of + Nothing -> pure () + Just sym -> T.assertEqual "" (DNS.typeFromSym sym) ty + +msgFileTest1 :: FilePath -> T.TestTree +msgFileTest1 fn = T.testCase fn $ do + bs <- BS.readFile ("testdata" "msg" fn <.> "bin") + msg1 <- assertJust "failed to decode message" $ DNS.decodeMessage bs + + -- load reference value + let refFn = "testdata" "msg" fn <.> "show" + writeFile (refFn ++ "~") (show (msg1 :: DNS.Msg DNS.Name)) + msg0 <- read <$> readFile refFn + + assertEqShow (pure ()) msg0 msg1 + removeFile (refFn ++ "~") + +msgFileTest2 :: FilePath -> T.TestTree +msgFileTest2 fn = T.testCase fn $ do + -- use this as reference message + bs <- BS.readFile ("testdata" "msg" fn <.> "bin") + msg0 <- assertJust "failed to decode stored message" $ DNS.decodeMessage bs + +-- print msg0 + + -- encode it now again + let Just msg0bin = DNS.encodeMessage (msg0 :: DNS.Msg DNS.Labels) + + msg1 <- assertJust "failed to decode re-encoded message" $ DNS.decodeMessage msg0bin + + assertEqShow (pure ()) msg0 msg1 + +mkQueryRawText1 :: T.TestTree +mkQueryRawText1 = T.testCase "mkQueryRawText1" $ do + msgraw <- DNS.mkQueryRaw DNS.classIN (DNS.Name "www.google.com") (DNS.typeFromSym DNS.TypeA) + + let Just msg = DNS.decodeMessage msgraw + + assertEqShow (pure ()) (head (DNS.msgQD msg)) (DNS.MsgQuestion (DNS.Name "www.google.com.") (DNS.Type 1) (DNS.Class 1)) + +assertJust :: String -> Maybe a -> IO a +assertJust msg Nothing = T.assertFailure msg +assertJust _ (Just v) = A.pure v + +assertEqShow :: Show a => IO () -> a -> a -> T.Assertion +assertEqShow onFail ref cur + | show ref /= show cur = do + onFail + T.assertFailure ("expected: " ++ show ref ++ "\n but got: " ++ show cur) + | otherwise = A.pure () + + Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/01ca022e21220474ca6100f21d137b42.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/01ca022e21220474ca6100f21d137b42.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/01ca022e21220474ca6100f21d137b42.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/01ca022e21220474ca6100f21d137b42.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/01ca022e21220474ca6100f21d137b42.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/01ca022e21220474ca6100f21d137b42.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 50673, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 185, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/033cb8330c8fba5fbb8bd48fd60fd6c3.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 24290, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "_mirrors.hackage.haskell.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "_mirrors.hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 3637, rrData = RDataHINFO "ANY obsoleted" "See draft-ietf-dnsop-refuse-any"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/03edcc6e3a8f04350e45b3c718df1cb5.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 44451, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 28) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/04e610d95902b1898ee6abce328905dd.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/04e610d95902b1898ee6abce328905dd.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/04e610d95902b1898ee6abce328905dd.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/04e610d95902b1898ee6abce328905dd.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/04e610d95902b1898ee6abce328905dd.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/04e610d95902b1898ee6abce328905dd.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 50214, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 1}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 147, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/0c2248ed621e903da402b64d8dc12fbc.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 0, mhNSCount = 4, mhARCount = 1}, msgQD = [MsgQuestion (Name "com.") (Type 50) (Class 1)], msgAN = [], msgNS = [MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 593, rrData = RDataSOA (Name "a.gtld-servers.net.") (Name "nstld.verisign-grs.com.") 1494230811 1800 900 604800 86400},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 593, rrData = RDataRRSIG 6 8 1 900 1494835611 1494226611 27302 (Name "com.") ",\SYN\230\r\ESC\180\139\246|W?\207\SYN\218\185\223:\166\129\218\251\243]\"Vm\241\156\227.\145P\219N[\250\138\"\220k)j\133\&8B%\232\249xo\221\187\248\ESC\vO\143\135\219\174\161\ETB\199i\248\184X\DC3VrU\205o8\237\rN J\163L$\203\200\211\148\&6\157\b\NULj\204\185E\238\186\ETXmZ\226\240\162\202>Y\226\247\186\128\RS\232\250\DC18\196RenvB\193\185\202R1-\a\182\229\156\158\bB\241\173I\162:\SYN\212\156Z\161\167\182]\DC1vB\169\SOH2\DC1\244\236\226k\DC1\153\198\165\201\f\195X\199\237\205\NAK\GS\203\208=\229\142o\159\189\252n\144\155\191\228\DLE\199\244H)e\135\239z\134\240\&3\ENQ7Q\243xs\149\&0\233Pl\164\216\NAK\196&\SO\180\219w\249\176\163\136l\176\180\235\157y\ETB"},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "i.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "d.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "h.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "m.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "c.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "b.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "g.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "a.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "f.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "l.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "k.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "e.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataNS (Name "j.root-servers.net.")},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 203305, rrData = RDataRRSIG 2 8 0 518400 1495256400 1494129600 14796 (Name ".") "S/%\226\DC25\201D\151\140\161\243\248\192\a}\198\167!1\233\141h\205\203\ENQ\EM|8\204\184\224\b\203\223\r_'\187\US#\205YM\220\202?\240;z\136$\166\EOT\249M\251\146\&5\143\219\187\DLE\154\185Tx/\181\199\240s\184\135\171\247y\196\191\168E\216\159&\220\252\132\240\&0\a\217gI$\217\171\218}\ACK\144\DC3\199\t\tw\236\&7\151\SO\nQ\246\181aa_\251\156,\218[4\192\EOT(\165\NUL\163\233s\179$\tS\FS\147\176\228\179=\DC2pMP\186\155\FS\205njSC\159\187\142\b\215[\244\224\253z>\201\t\223\SI\t\165\153X\"\223\232\186sHg\DC2\196F\160\167E\144\ACKo\232[\132'\245\158Q\163\192\218i\175\214\157\ESC.\137mS\143W:\DC1\250\&8\251\DC3\226]\244\163\190uc\249\215\220\169V6\155H\DLE\216\180\NULh+v\150\255b2\168u\226u\175\177}T\206\163w\249]+h\EMRfx\US8\179\176\166l\186\190\199\229\155yB\184g\173\211\ACKA\208`g\136\216(\171~]P\159\203(q\159\234I\242\183j\153\&0\208>E\172\227r'yE\220\180Pl\184=\ESCI\243\155\SOHn\142\226\194\&9TI\192\r\192\FSu\129\143V\184\178\166\NAK\177\FSoI\239\179\GSs=\n]\237?\\?A\132\198\ACK\184\248\150\ETX\230\128\DC4+w\241\196\&5\160\246\131kG"},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 116905, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\168\NUL \169Uf\186B\232\134\187\128L\218\132\228~\245m\189z\236a&\NAKU,\236\144m!\SYN\208\239 p(\197\NAKT\DC4M\254\175\231\199\203\143\NUL]\209\130\&4\DC3:\192q\n\129\CAN,\225\253\DC4\173\"\131\188\131C_\157\242\246\&12Q\147\SUB\ETBm\240\218Q\229OB\230\EOT\134\r\251\&5\149\128%\SIU\156\197C\196\255\213\FS\190=\232\207\208g\EM#\DEL\159\196~\231)\218\ACK\131_\164R\232%\233\161\142\188.\203\207V4te,3\207V\169\ETX;\205\245\217s\DC2\ETB\151\236\128\137\EOT\ESCn\ETX\161\183-\ns[\152N\ETXhs\t3#$\242|-\186\133\233\219\NAK\232:\SOHC8.\151K\ACK!\193\142b^\206\201\aW}\158{\173\233RA\168\RS\187\232\169\SOH\212\211'n@\177\DC4\192\162\230\252\&8\209\156.j\171\STXdK(\DC3\245u\252!`\RS\r\238I\205\158\233jC\DLE>RMb\135="},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 116905, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\146\236\190\243\223\253`\216\DC3\194\252eC\162\\C}=(76\172\170\187\197\150)[\184,\139\207[\190\216\209mf\ACK\t\171\160Q\a\199\&0=\204\205\158u\ACK\216 Xc\231~\172\199w\227\213\188x\159\204\138\193\r2\212\198$^\248\DC4\221\162\150\DELH\231\155\202\179\229\DC3\213k\136\132R\ETX\249\162\SI\251\210k\128^j7\241\ESC\181\209\171\FS\221#m7c\165\SOH=S\138\250~S\130T\149P< +\226z\181\SI\131b\160\DELh-4Xn\168\196\241\252\129\235\DC3\144\221W\230\DEL\174a\198\168D\158\254i\218\SI1\221\SYNr\178\236\198\185\205\195\211zI\176g\206\130\156o\175\&5c\DC3\235\b\142\231\166\DC2\SOH\168\135_H\210\n9\164/\178sO|i\167\251\237\161b\202\214\184\171\239H\186\&5\187\DC4\227\203\SId\225\203~\FS3n\140'{\NUL,\158\r\DLE\168\195\156U\201\161\151\168\159\DC3p\191\190\&1"},MsgRR {rrName = Name ".", rrClass = Class 1, rrTTL = TTL 116905, rrData = RDataRRSIG 48 8 0 172800 1495411200 1493596800 19036 (Name ".") "Z\249\197\192\196\231A\191\251\179\224k_;\158N!<\182\173;\ENQz~D\183\SOH\129!\213\203:9\161\149f\201+\rD\nO\216v\230\191\234\176P\142\166.\180Z~M\161\249X\245\227\233\188\EM\249\212\186\172;\197n\217\145\221~\165\180G\152\181\ACK\216\149\205sg\v\186>J4\177\195b\237r\NAK\241~\220,\239\134O.\158,\189\ETB\193\bK\131t6\SYNL\245\SYN\206\140:;\163\235;'\134\239\157 \225\138d\251x\fG\175%(\142\150\212]Q\202\229\254\&8Q_\SUB\NAK9\138\DC2PN\194Gf\132\180\162\154o\206\DC4\161+\137N1\197\162\238\148F\NUL\219|,\217S\179g\140\248[\234'\130u+\201\234\212\185\SYN\220bqf\a\193\DC4%\249\212\140'\242c\194\235u\RS\n\222\169\SUB\130S\141\142E/\248R\143\130\&3\222\222\200jVe'\npj\ETX@\SYN\150\DC3\135Q\222ca?\144\210"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/36187d17453931a5d1563c69ed34fb52.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/36187d17453931a5d1563c69ed34fb52.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/36187d17453931a5d1563c69ed34fb52.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/36187d17453931a5d1563c69ed34fb52.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/36187d17453931a5d1563c69ed34fb52.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/36187d17453931a5d1563c69ed34fb52.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 16363, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 14, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "br.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "a.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "b.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "c.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "d.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "e.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "f.dns.br.")},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 5 1 172800 1494583200 1493978400 16335 (Name "br.") "C\r\136\233\&4\251z\214\ENQ\v\202fm\142\251`3\DEL\181\DLE\aj(/\242\STXO\243\208X\223\145\ETX\227-\146\153*Q\a\ESC\149^.:L\171\170\&7\227<7\221L\ETB\165\220y,\226\226WPF\RS\227\205\154\183\136\227\217\157\232b\177\189 \213@\b:\212\132\RS\240\236\187\240tca\153K/\193\225\132\ETB\166\NAK[VQ\253\175\166K\153\218\213\SUB%\249#\231\143\134\DC1\169\189!}5\CAN0\EOT\179\144q]V\201\178\STX\202\190\151f\163Ks\NUL\DLE\188\140=\SO\178\218\180\194\174\210m\222\223.\252'"},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataNSEC (Name "0800.br.") (fromList [Type 2,Type 6,Type 46,Type 47,Type 48])},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataRRSIG 47 5 1 900 1494583200 1493978400 16335 (Name "br.") "\154\183\204(\130\144\188O\252\209\225\ENQ\SO\252CET@Ve\203\253\139\140\199\243\206\238m+\223\185f\DLE\152$\ETB\159sW\246#\"\CAN\223`\196.\200\181\212\252|\199\ACKN\201\197+\ETB;-\179*.\US\140\f\164g<\r\163\247\173|\157@s\247I\170\ESC'\203f\US\201\204\&3\204S\169\164 \233\203\&8Gw\254\EOT\202\217\SUB\US\180\237\174]Ok\251\246\234\SUB1\242\167i\247\168\150\EOT\249\200\ETB\234\SOH\234\DELm\197\156\194O\223C\245\236\198\160\242]\NAKV\181\180\n\207\128\217@U\EM\180\132\220m\153"},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 21599, rrData = RDataDNSKEY 257 3 5 "\ETX\SOH\NUL\SOH\194\&8\150i\159U\208o\243\225G\214^\132fc\232\144h\"\135\188\SUB\SO\245\254\161\136\240\&4\146@\250\236\188\219\178\138\r\168\151\EOT\141\228\SUB'P\221\EM\SUB>6\NAK_P&\145\202zs\191'\238_f\252\240\230\165\201\SO%%{_o\249\SYNf\SIk\228s\234\DC4\229\223\234\144\r)V\152 =3\148\217~\192+K\243\163\ETX\151\141\160\137\151b_\218\156\209$\161\227\231\216\235\215\235\160r\199\223\159\223:\232\201\191\SIH\t\174\151\153\162UK\253x\247\195'\SO\240Ix\243\131\DEL_ws\DC3\188E\243`\252\170\242l\SYN\202\bz\194\RSI}U\\\205\178\166\136J\237}\249i\174\135\169\DC2\248Lg"},MsgRR {rrName = Name "br.", rrClass = Class 1, rrTTL = TTL 21599, rrData = RDataDNSKEY 256 3 5 "\ETX\SOH\NUL\SOH\209Zg\144K\201\185\174\150\154}\225\223\n\244\180\239\151\195\167\150\173\217\a#i(\189\166\192\"\175J\222\128\227\251U\217\235@t;@T\180\184\196\152\160b\NUL\217\231\245=\aX\b\ETB}{\228a\138@9\137\170\183\184\SO^\222Sh?\236\ENQ\130\151we\ESC\214:y\195\236%\254\139\194b\221!\161}\132!\190X]\217S\228\239:*\RSp\144\ENQ\202\228)\247\GS\169$\v5\254Na\192\131\135\197\242\ESChd\193\151`\162\EOT\235\252\195\130\179$\195\131\175\184H\SI\162^\156\248Z\162\194\143\143\246X \170\149\141\v@\184Z\b\DC1\SO\131\199\239>\166\166\&3y\235\&2\237\208m)\254\182(\140\&3z\144Ws\US\137\217\153Ujc\249\162'\138O\n$\230\179\&2\179\221u\140\r,G\209(\250\&8\137^0\210\174\224\154b09\188Hw\214m\173@#\226\SI\240\224\186\241\255(\176\DC1#\176l\f\231gBq\224l\SIXv\133\n\204\155\130S\212m \213u\196\200Z\235R}\244\216\&3l~^\136\189\174\DEL+\237.@\250\205\202\238-\184\206\222\&5v\186\SUB:\165%\DC3\NUL+\207t\189\CAN\200\n\140\172G\180\183\&6\200mc7M^\223GI\170\137\252\135\205Z\191\169\&8\b8P\216\130J\228 \EOT\187@\252\205\177\194Z\ETB\RS\190\165\218\158c\176\135\SUB:\198"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/3c973651b87bbb27d9249ef41ebd5c5e.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 39631, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0x17603435)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0x17647aaf)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0x682bc3fb)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0x6828d323)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1057, rrData = RDataA (IPv4 0xbfefd5c5)}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/419ae4b8b8770bad65e3bdf3cbc84ad3.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 60637, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3580, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 3480, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 633, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 733, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494154963 1000 1000 1000 1800}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/47b446859b6bd9bf4d1e7348a356a43c.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 39296, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3428, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/4e49a68a7cbd8f6c69eab194c49b9888.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 21306, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 6) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2915, rrData = RDataSOA (Name "ns1.msft.net.") (Name "msnhst.microsoft.com.") 2017050703 7200 600 2419200 3600}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/4e86d529a6401b74f84956cd72682c15.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/4e86d529a6401b74f84956cd72682c15.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/4e86d529a6401b74f84956cd72682c15.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/4e86d529a6401b74f84956cd72682c15.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/4e86d529a6401b74f84956cd72682c15.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/4e86d529a6401b74f84956cd72682c15.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 31, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "torproject.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataNSEC3PARAM 1 0 16 "\185\181\DC2\245\247"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataRRSIG 51 8 2 0 1497380687 1493921581 59061 (Name "torproject.org.") "\ESC\139\217>\186\USz(\219\227J\162\235\215t\166\171\198\136\243\146\202;\128\130<\152\232\SOH\167>\248Q \177\130'\NUL\231\199\199\222\172\fE\206g$sR'\205=\216\188\211\152\230\232\202mV\220\164\128xsj\129\138\173\ESC\217\SYN`\NUL\231RE{by\173\177\235\ACK\203IT\128\167\212\197J\174\205V\145\n\133\NUL\175 \187\200\232\b\200*/-\141R>|W\153\176 \242\201\160\SIK\SUB\146\160\227\242\146\181\STX%\129\195\b\209\210N\FS\GS\255\174e\FS\227!}\179g\211\148\188\159b),\149ym7\fi\155\163A\181T\169\154\196/\141\212\170\224\198\253q#F\DLE\ETX\141\217\216\133,0\163\183\207"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataSOA (Name "nevii.torproject.org.") (Name "hostmaster.torproject.org.") 2017050738 10800 3600 1814400 3601},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 28 8 2 300 1497627735 1494168135 59061 (Name "torproject.org.") "csn\208v\207*\137\134\225U\CAN\187\253\212\&2\176\223\250;\205`\DC2\b\\\a&\246\188\203\133\215q@U\DEL\247\170z\r\GS\172\193\243\177\CAN\183(\196J\253\192\240<\255\199\226]\252U\242\EMdJ\156\171p\245A\189\EOT\250]\253\220,\138\224-V\209\171R\158U\132B{\159cl\202#\ACK2\216\249+\213npY\EOT\133\129GFl\249b\GS\154\189\243+)\230\132\170\240'Q\167\&2r\210\SOH\DEL\196S\163\145\&9\136\208\181\NUL]\ACK\GS\142\140\164\241\253\134j\135\DEL\135\&0\159\145\US\ESC>\163\208\\\",\236\217h|\245\&1\DC4\179,\189\211\174bo\DEL?\\\235[\SI\229\176\DC3\244\190b\224/\129\ENQ\245"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2620000006b0000b 0x1a1a000026e54810)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2001085800020002 0xaabb0000563b1e28)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200141b802020deb 0x21321fffe201426)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2a0104f801721b46 0xabba00050001)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200106b0005a5000 0x5)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 15 8 2 3600 1497532518 1494075620 59061 (Name "torproject.org.") "=KT\176\SYN\SYN\226\&6\213k\171{\174\213{\209\NUL\171\250\&9D\168\t\177\202v\249\143\&1\211~:8\220\214g!\134\167\250\175\136\&5J/\205\205&\231\DC4\161\252\144~\160\245\250\210\213\253\139\205\199\215\234P\248\b-\179\157%\138Y/R\201\171\189e\241\203\181rHQ\CANsB\248$\217\202\144a\237\&1U4\EOTe:W\201\SYNrU\\\EOTy\207\224\214\244+\255U}e\169\&4\243\SUB\192\CAN#.CY\DC2$\133np\170N\219\242\245\&2\163\154\134\232R\v(\171\ETB\242=\225V\196g\228\147\231\STX]\US\238m\175ok\238\160\CAN\172{w\ACK\"\173\ACK\240P\208;H\195Hf@u\228\&0l(\165\131"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 1 8 2 300 1497627735 1494168135 59061 (Name "torproject.org.") "C%\201\209\ai\SUB\SYN\150_\EOT\209@/\183\150\229s\156\&8\250\177\174\245!\154\USH\132\188\DC3\138]\167}l\182\&8=\166\233\DEL\153T\169\159O9\195\177\"T\130\146\223s\ACKkp\224\172\DC2\\\182\224\ESC\STX\DC1\202\224\243_u\204\246\159\146t=`\ENQ\UST\157)5-3\212=\151\250U\180J\EM\202\DC2\200\169a+\196T\226\228x2\NUL\208\141\189\178#\b\US\189\ACKU{\245\tq7\216\158\159\237;6\196l\193\146\178\178\&8\161\154e\166\210\150\183c]!\DEL\211>\US&\EOT\208\253Q\161x\FS\251\186\141T7\171%=\207c\ETX\t#H\208\157t\252\194$(rI\146\223\189<\SUB\211\161\&5\253D"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 8 2 86400 1497572963 1494115902 59061 (Name "torproject.org.") "$_\241\250\&5\246[\189\226iR\196\138PqqH\214\176@!\155\244V\195\224G\209n\234z\SYN.\151\SYN\187\194\151\167!#x\222\&4\163\169\170\232Ac\181Z\142\233\166\CAN\246\EOT\t\132\218\&0\213\140AK\220\162\237\248\144l\161z\RS\128\254\213\176\223\US\231\139\203\215\"\145\214$\192l\153F +[\175\201\"\136\189\r%\177\169\194\242\145\158\233\EM\202\NUL4w\238\168\SOH\a\DC15\n\160\221\DC2\rj'+\194\130c\ENQ\198k\249{/p]\181\159\202h\167\151\162dH\SOmt\242\176J\134\229\163d\203\222\214*\245\252cYqa\242Q\GS\192:\167\EM.\200E\171@\NAK2P\SUB0l\247=-(\f"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 6 8 2 3600 1497692652 1494233052 59061 (Name "torproject.org.") "'^\229\253\SO\135\241\160\t\195\229W\129\150\201\130\251q@\213\217\&3.;\243\218!\192\183<\217~q\ENQ+\ENQ\DC2M\230\135}\183\197\168#\143\&5\201\213_\SUBx\196\\{\218\"\234(\216\133\211\209\213N\237\USRURz\128P\NAK\253y\175\207\DC3\230m\202\164\222\&9\167\128\220\ESC\184\206dB\SI\152\189\133q#Ua\234\175\200G\FS\224\189'Kk\143\229u\136vX\SYNY\242n.~t\188\132\254|\244\150k4\217\180\181\b\167\DC2\181\174\150\139)Jo\145-\180\141$\225\DC1\245\a\154\GS\201Fb\213\150q\v\v\140\&608\158\216\144Yj\184\151\237?\192\190\EOT\162?,\249\STXb\166\187\254\199w\212"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1497462010 1494002410 17224 (Name "torproject.org.") "\143\&8*?\209\140\SI\160f\254h\203\v\219\226B\153\148U\SI\159\182\233\158\ETX\149\134\142\US\233`\\\138L\DLE\135txpz\148\198y\230\237\230h\228\&0W\179\n\186\ETB\194\146\DEL\r/\190\138t\188\&1\202\164\EM\RS\163\183\184\EM\DC1\CAN1\204\207 \rk\129\200^+z\" \134\SO\RSX\249\DC4\130\206\204,\158\158\130\159\176\&8\248p;}-M\139\r\133\152\&1\217\203\r\246\ACK\234\ETXd\246\247\135O\252\153\229(\250d9\159T\184u\202Q\195\SOH\146\176;\137OM\219\DEL\DEL\178\249\STX\b*{\186e\203\157\150\&1\194x\147#9\161\232\143\172\250\168\172\ENQZ\194\233S\EMr7\234\220\142\233\205x\132T\v \183a\158\225\214(\240\234\143h\217\230\DEL\229YX\213 \211\228\146\&2\ESC\187\229`\132\187k\231a\EM\193\129\DLEW\246Wm\231\197#\ESC\NAK\158p()\a\\V\164\152\164\252\158\&6\168\200\140\&8e\147c"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1497462010 1494002410 35740 (Name "torproject.org.") "bZI\194E(T\n\244~\160@\SYN\DC4L\164\167\251|\251\n\254\162\184\139\NULe\247\159\DEL\NUL[\180\152?\153\148\DC2\203\138\253\155\189\248\139\178b\192\149u \NAK\235\204\194\NUL\203\205|/7\130\&9\147>\139{1L\182/[\165\205R\233A\175=s\157\242,\230:\211\210\163\187v\DEL\160u\205\136\253\176\252\b\156\ENQ\158;\239\153sTw_7j\227w\129\194tA\161\DC2\227\182#\DEL\174\212\251\148\151\205\201,\167\SO\243\132\198\138]\161|\129\199\&9\160\151'\243\&7\226\150/\238\192v\159\129.\209\170RNV\233\238\212\254\255\158\v\251\137\206\175\180\214K\253L\144V)\179>m$\131\147\241|\242w[z\246cVV\216p\142e@)\232\253\146i\200\220\196\234[\230\203C\b\234\141\139\157:z\a\202\141\166a\ta\DC2g\188\239\196\194\186\220A\202\246K?M\217w\158\199\208d\216$\188\215\136t\175"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1497462010 1494002410 59061 (Name "torproject.org.") "D&\210'\154\164~\193;m\223\216\174\203\&0[\159\208\220\236\232\206u\DC3\ETXn\214l@\195\223g\252Z\254\147T\DELWc\246\129\185\141\v\133\147_\DC4\NAK\173\231[\195\216h\225\159F=\SO\188\230\&0\254\DC4+_E\155\174%A\ETX\229j\vuKz\ENQ\150\240\bV['\237u}P\255\155@\154\149\247\189\251\EOT\DLE\t\174\254\142$x\RS\245~p\248\139\172\240\254\249b\184\SYN$\228\DC4\ACK\168_\189-S\202\136\210\213\198\DC3y\169\144\SI\149\250\v,vW)C\187S+\147\183\SUB<\167c\SUB\188\CAN#\144\195\FS\182}\185\139N\199\251`2\139\140\255|V\238\158r\ESC\CAN\215\STXZ1\170\188R\f\228\221"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\180\DC4Zp{K?\191H\186\210^\193\197\EOTV\232\145\161\187\238\v!\209\240\NAK\192\191}>3\214\246\145o\152\142%\134\208CZR\US\157\147=\DEL\128\238\NAKH\138jA\222\155M\164\153\225a\172F\135\189F\135'g-N\197*\131\171\b>P\128\138b{\r\157u\230[rpa7\ESC\194\162-\255;>\170X\231\150\177\&3w\255\173\226X\143:\196\241z\131\CAN\146(\US|_\154)*\b\152\184\ENQ\182\159\STX\CAN\178\183\238!\248\b\172\EOT/\166\144\151\DC1U\140\&7W\NUL\200\246\a\STX\174\225\175\249\130\149\SO^\250o\"\173\250\143\178S\220c\178\197;\EOTz\149\SUBi\181\211\254\209\142md\209\135\201;"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\208\227\159\216\218\187\227H\b\208\244\f\139\EOT\159\210vW\RS\163:\SUB\180\179Y\255\178\156|0\160r\187\b\197\251\183\201\147xh\227\&9\202\194\209{\211xv\241\134Q\201EQ\151[\140,a\251<\DLE\211\230\196\&9s2\226\253\225\226\&2\173,09\192\217\bx\247>R\CAN\224] 6E$k3&\159P\205\163\248v\153\ETXi{\129,nhf\144LF\212\NUL\SUBkt~?\184\190\207v\228\165D\196\163\&3@\198\249![\224\148]\182Pv\212\165\238p\162\165\212\175a\167\206\152\138\130\130\176\167/>\164D\194,-\t\255\ETB\201\&3\143\230\DC4}\255\236\226\"E\224\138\150\168\SO\SOH\221\163\156c\EM&\148\&8\135\&7\v~\211\&1\ENQ\252_\234r\137{\184\239\174\233\&2\206\DC2\NAK5\RS\174\242\213K=T\194).\196\158\165\&0i\136Dh&\231\&0\SOH2\SYN\130\ACK\243\&5\131\198L\234}\188\242\&2\174\248\130\151"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\201eI\ACKo\150\NUL\219\&5d\"\\\t\226_\210\DC3#%\169r\169S\ESC\132\EOT\181\SI\EOT\DC3#\t^\220\178jNrh\DC3\SYN\162o\ESC\236\162\181\196\232\165\169\209\243\"\v\NULx4\ru\175a%\169\&5\165\SUBC\177\&1\SUB[\DELn\SO\230/\137\&8\136\205U\195&\245vC.V4`\180J~,c\198\233\245\186\184.\146\151pt\198IYI\198,\248\252OO8\172;*\190v$\181\195\128\147\DC4\n\140\DLE\RS5\159\139\136\253\&1}S\254\SYN\218pI\225~8\234@\144%\131\191;u\133S&\154\130\221\220\213G}@\153\141\218,\161\190\t\172Y\v\211\218\245\&6#9e}\188\ENQ\164\134n\130\222\191\239\190\200\202\132\241\157\211\FS\227\147\DC2\198\156kE\SI\247\133\170\145\206\210v\148\EM\148%\200\229\192\220^\222Ye}\194*\237(\149-\171q\242\157\SUB\170\249\213\199\153\RS\v\137\222\191\183\180\209\167\183"},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns5.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns1.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns3.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns4.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns2.torproject.org.")},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x9a238446)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x8ac90ec5)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x26e54810)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x592deb15)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x563b1e28)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x52c34b65)},MsgRR {rrName = Name "torproject.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataMX 10 (Name "eugeni.torproject.org.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/53d71a9e72adf19251123b46f31769bb.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/53d71a9e72adf19251123b46f31769bb.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/53d71a9e72adf19251123b46f31769bb.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/53d71a9e72adf19251123b46f31769bb.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/53d71a9e72adf19251123b46f31769bb.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/53d71a9e72adf19251123b46f31769bb.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 38143, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataPTR (Name "ghc.haskell.org.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/54f81230e47e2399d16a309e1227025e.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/54f81230e47e2399d16a309e1227025e.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/54f81230e47e2399d16a309e1227025e.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/54f81230e47e2399d16a309e1227025e.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/54f81230e47e2399d16a309e1227025e.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/54f81230e47e2399d16a309e1227025e.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 53536, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = True, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 5) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2306, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/552aec026306990d49a098d0a4608434.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/552aec026306990d49a098d0a4608434.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/552aec026306990d49a098d0a4608434.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/552aec026306990d49a098d0a4608434.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/552aec026306990d49a098d0a4608434.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/552aec026306990d49a098d0a4608434.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 2, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "4.4.2.2.3.3.5.6.8.1.4.4.e164.arpa.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "4.4.2.2.3.3.5.6.8.1.4.4.e164.arpa.", rrClass = Class 1, rrTTL = TTL 86046, rrData = RDataNAPTR 100 20 "u" "E2U+pstn:tel" "!^(.*)$!tel:\\1!" (Name ".")},MsgRR {rrName = Name "4.4.2.2.3.3.5.6.8.1.4.4.e164.arpa.", rrClass = Class 1, rrTTL = TTL 86046, rrData = RDataNAPTR 100 10 "u" "E2U+sip" "!^\\+441865332(.*)$!sip:\\1@nominet.org.uk!" (Name ".")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/57394dfc69f9e32c0c0cd9d4d2057d87.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 45729, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3542, rrData = RDataMX 10 (Name "microsoft-com.mail.protection.outlook.com.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/579ff887c8ea54e4173934be5e85faec.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/579ff887c8ea54e4173934be5e85faec.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/579ff887c8ea54e4173934be5e85faec.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/579ff887c8ea54e4173934be5e85faec.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/579ff887c8ea54e4173934be5e85faec.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/579ff887c8ea54e4173934be5e85faec.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 33589, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 4, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1524, rrData = RDataTXT ["v=spf1 include:_spf-a.microsoft.com include:_spf-b.microsoft.com include:_spf-c.microsoft.com include:_spf-ssg-a.microsoft.com include:spf-a.hotmail.com ip4:147.243.128.24 ip4:147.243.128.26 ip4:147.243.1.153 ip4:147.243.1.47 ip4:147.243.1.48 -all"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1524, rrData = RDataTXT ["google-site-verification=6P08Ow5E-8Q0m6vQ7FMAqAYIDprkVV8fUf_7hZ4Qvc8"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1524, rrData = RDataTXT ["FbUF6DbkE+Aw1/wi9xgDi8KVrIIZus5v8L6tbIQZkGrQ/rVQKJi8CjQbBtWtE64ey4NJJwj5J65PIggVYNabdQ=="]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1524, rrData = RDataTXT ["docusign=d5a3737c-c23c-4bd0-9095-d2ff621f2840"]}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/585424227713068d541ca07b184abd89.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/585424227713068d541ca07b184abd89.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/585424227713068d541ca07b184abd89.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/585424227713068d541ca07b184abd89.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/585424227713068d541ca07b184abd89.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/585424227713068d541ca07b184abd89.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 32, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "eff.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataSOA (Name "ns1.eff.org.") (Name "hostmaster.eff.org.") 2017042405 600 1800 604800 1800},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNS (Name "ns6.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNS (Name "ns1.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNS (Name "ns2.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataTXT ["v=spf1 mx ip4:173.239.79.202 include:spf1.eff.org include:spf2.eff.org -all"]},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 10 (Name "dummy1.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 15 (Name "dummy2.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 5 (Name "mail2.eff.org.")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataA (IPv4 0x4532e836)},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 220 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 230 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 240 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 250 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 260 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!! !!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 270 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!! !!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 280 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 290 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 300 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 310 10 "" " !!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 320 10 "" " !!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 100 10 "" " !!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 110 10 "" " !!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 120 10 "" " !!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 130 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 140 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 150 10 "" " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 160 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!! !!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 170 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!! !!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 180 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 190 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 200 10 "" " @@@@@@@!!!!!!!!!!!!!!!!!!!! !!!!!!!!!!!!!!!!!!! " "" (Name ".")},MsgRR {rrName = Name "eff.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNAPTR 210 10 "" " @@@@@@@@@@@@@@@@@@!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! " "" (Name ".")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/593b5d26fc010f953c99621a7d608c8d.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/593b5d26fc010f953c99621a7d608c8d.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/593b5d26fc010f953c99621a7d608c8d.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/593b5d26fc010f953c99621a7d608c8d.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/593b5d26fc010f953c99621a7d608c8d.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/593b5d26fc010f953c99621a7d608c8d.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 19974, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2950, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 20003, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 999, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494155229 1000 1000 1000 1800}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/5fb7059d3cc96ae5eb05b06b7212bf76.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 64444, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 2, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.google.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "www.google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xd83ac9e4)},MsgRR {rrName = Name "www.google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2a00145040070816 0x2004)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/60c1e26a578cd3007a592250dbad30be.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/60c1e26a578cd3007a592250dbad30be.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/60c1e26a578cd3007a592250dbad30be.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/60c1e26a578cd3007a592250dbad30be.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/60c1e26a578cd3007a592250dbad30be.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/60c1e26a578cd3007a592250dbad30be.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 61653, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 148, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")},MsgRR {rrName = Name "j.global-ssl.fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataA (IPv4 0x97650044)},MsgRR {rrName = Name "j.global-ssl.fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataA (IPv4 0x97654044)},MsgRR {rrName = Name "j.global-ssl.fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataA (IPv4 0x97658044)},MsgRR {rrName = Name "j.global-ssl.fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataA (IPv4 0x9765c044)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/616d307f2f3407a001c0ed31ae01daf8.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 2963, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 6) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/62a40ed1d8ac22ee6dcca1d8cc6e1733.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 26891, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2846, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 16652, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 785, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 999, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494155077 1000 1000 1000 1800}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/62ce31cf45d2d095d384da330a6e6189.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/62ce31cf45d2d095d384da330a6e6189.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/62ce31cf45d2d095d384da330a6e6189.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/62ce31cf45d2d095d384da330a6e6189.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/62ce31cf45d2d095d384da330a6e6189.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/62ce31cf45d2d095d384da330a6e6189.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 32245, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/68c43b7f9e85fb0b77552dc905fe6537.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 10072, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "_mirrors.hackage.haskell.org.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "_mirrors.hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataTXT ["0.urlbase=http://hackage.fpcomplete.com/","1.urlbase=http://objects-us-west-1.dream.io/hackage-mirror/"]}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6a0b07a53da450663489f3ff62fc7866.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6a0b07a53da450663489f3ff62fc7866.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6a0b07a53da450663489f3ff62fc7866.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6a0b07a53da450663489f3ff62fc7866.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6a0b07a53da450663489f3ff62fc7866.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6a0b07a53da450663489f3ff62fc7866.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "fencepost.gnu.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "fencepost.gnu.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2001483001340003 0xe)},MsgRR {rrName = Name "fencepost.gnu.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xd076eb0a)},MsgRR {rrName = Name "fencepost.gnu.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataSSHFP 1 1 "\ETB8\CAN\146I?\149\218`\185\b[f\189\CAN\185\142\190\252#"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6b49a8a930ab6cdaf5ae91c822247811.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 23209, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2381, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 16755, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 590, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494154820 1000 1000 1000 1800}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/6f5b2b7d7ffa62ab3b6bce45ab3b0b51.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 3198, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 4, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 16) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["google-site-verification=6P08Ow5E-8Q0m6vQ7FMAqAYIDprkVV8fUf_7hZ4Qvc8"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["v=spf1 include:_spf-a.microsoft.com include:_spf-b.microsoft.com include:_spf-c.microsoft.com include:_spf-ssg-a.microsoft.com include:spf-a.hotmail.com ip4:147.243.128.24 ip4:147.243.128.26 ip4:147.243.1.153 ip4:147.243.1.47 ip4:147.243.1.48 -all"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["docusign=d5a3737c-c23c-4bd0-9095-d2ff621f2840"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["FbUF6DbkE+Aw1/wi9xgDi8KVrIIZus5v8L6tbIQZkGrQ/rVQKJi8CjQbBtWtE64ey4NJJwj5J65PIggVYNabdQ=="]}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/75330cfe5b4c19161dee2d9f578d8fb4.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 1192, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = True, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 5) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3478, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7a13aa49d0b498ee7b5073d1f1370273.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 51432, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "_xmpp-server._tcp.gmail.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt3.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt4.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 5, srvWeight = 0, srvPort = 5269, srvTarget = Name "xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt2.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt1.xmpp-server.l.google.com."})}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7c47e44ab3c0e8d6f3d5f4246dc4a0d2.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 24972, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "4.4.8.8.in-addr.arpa.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "4.4.8.8.in-addr.arpa.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataPTR (Name "google-public-dns-b.google.com.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7e3570aada9975cbb2285ed217fe5016.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7e3570aada9975cbb2285ed217fe5016.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7e3570aada9975cbb2285ed217fe5016.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7e3570aada9975cbb2285ed217fe5016.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7e3570aada9975cbb2285ed217fe5016.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7e3570aada9975cbb2285ed217fe5016.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 7, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "example.com.") (Type 43) (Class 1)], msgAN = [MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 31589 8 1 "4\144\166\128mG\241z4\194\158,\232\SO\138\153\159\251\228\190"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 31589 8 2 "\205\224\215B\214\153\138\165T\169-\137\SI\129\132\198\152\207\172\138&\250Y\135Z\153\f\ETX\229v4<"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 43547 8 1 "\182\"Z\178\204a>\r\202yb\189\194\&4.\164\241\181`\131"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 43547 8 2 "aZd#5C\246oD\214\137\&3b[\ETBI|\137\167\SO\133\142\215j!E\153~\223\150\169\CAN"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 31406 8 1 "\CAN\153h\129\RSn\186\134-\214\194\t\247V#\216\217\237\145B"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataDS 31406 8 2 "\247\140\243\&4Or\DC3r5\t\142\203\189\b\148|,\144\SOH\199\246\160\133\161\DELQ\139]\143k\145m"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 64454, rrData = RDataRRSIG 43 8 2 86400 1494563200 1493954200 27302 (Name "com.") "\133-dQ\179\DC3\EMi\SI\166\249\140E\212\163\219\181k\SI\197\198M\231\EOTJ\203YO\189Z\151\170p\232U$\160\DC2A6\234c\233\DLE\183\150\200\SOH\177\164\DC2c\161\223\201AB\ENQr\241*\a\251]\tWQ\219\128vPs\145g@\153=\140\141\139\176\222\238:d\145\184\SYN`\ACK\156Z\SIavT@\ENQ\140\179D\200@ s\190\239r\237\160\213\225\FS\215\129\EM\252\ETX\135\ENQ]\225*\CAN\USg\240\242"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/7f4d4de8e74e86e10d19f1f7428609e4.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 65505, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 1318, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/816c6c332941a20f07c497ee16609971.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/816c6c332941a20f07c497ee16609971.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/816c6c332941a20f07c497ee16609971.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/816c6c332941a20f07c497ee16609971.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/816c6c332941a20f07c497ee16609971.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/816c6c332941a20f07c497ee16609971.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 10909, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0x6828d323)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0xbfefd5c5)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0x17603435)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0x17647aaf)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 905, rrData = RDataA (IPv4 0x682bc3fb)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/81c9cbfb364a8be0302bd0f4f600c3f6.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31016, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "4.4.8.8.in-addr.arpa.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "4.4.8.8.in-addr.arpa.", rrClass = Class 1, rrTTL = TTL 86133, rrData = RDataPTR (Name "google-public-dns-b.google.com.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/87aace8db5b6cb0b4b8c6194967f345b.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 0, mhNSCount = 4, mhARCount = 1}, msgQD = [MsgQuestion (Name "debian.org.") (Type 50) (Class 1)], msgAN = [], msgNS = [MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataSOA (Name "denis.debian.org.") (Name "hostmaster.debian.org.") 2017050804 1800 600 1814400 600},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataRRSIG 6 8 2 3600 1497698216 1494238616 53598 (Name "debian.org.") "\141G\148\&8Ru\215~\167\220\201\a\234\142\f\136?p\174d\136E\130\135\136\&7vA\212\"x \188\189\SYN\166x\158y\236\DC18\163\241\217B\169;\200IUF\136\138\253A2Fq\150M:\174C/Y\GS\194XD\135\216\140P\DC1U\187\STX\163DM\182\205\243\185\170\165C\209,bp\195\246\236\162)\180\209\ETX\216\232\219\241<\219\158S\EOT+$C\250\174\b\237\155:\233\177@\208F\192\198\131\132zsO9@\f\247\138\SOHjM>ajU\179\t\n\222\203b\DLE\253\224\&4\240\DC4N\146ua\148\208\185\183:}\156\148\153\171\ve\180\SI\229\193\140\232\SOH\182<#?P\STX\164#3\167.\163\143\&9\205"},MsgRR {rrName = Name "r0ju544ltcbavo1k3br87djp168tl58j.debian.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataNSEC3 1 0 16 "\SO\192\162\220\222" "\216>\235R\171\NAK\DC2a\203\134\DC37\173/E\197I$\182v" (fromList [Type 1,Type 2,Type 6,Type 15,Type 28,Type 35,Type 46,Type 48,Type 51,Type 65534])},MsgRR {rrName = Name "r0ju544ltcbavo1k3br87djp168tl58j.debian.org.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataRRSIG 50 8 3 600 1497614849 1494156550 53598 (Name "debian.org.") "\234\DLE\144\162h\RSm\188|\SOHB\233\215\RSJ0\DLEoD \218\178\131|\251\205\&3\246WZ\227\208\v\EMD\251\153t\193\155\145\133w7\EM+\133\244\129\159\SUBgT\DLE~{\229\218\187\&7<\173D\169\204\169f*\v\189\149_\161\210\207\173\169\166\DC3\168\DLE\n\141F\221\138f\131\214'\194\131\ACKc\r,e\180\bON\235'\132\r\198\160\242\v*\ETX\ETB\140x<\135\165\219\ETXk\NUL\221\&0\216/};\223\241\US\"*@\230\135`\216\222\207\234\217dfI;\238g5u+\DC1e\155\SO\195\239\229\196("},MsgRR {rrName = Name "_kerberos.fedoraproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNSEC (Name "_openpgpkey.fedoraproject.org.") (fromList [Type 16,Type 46,Type 47,Type 256])},MsgRR {rrName = Name "_kerberos.fedoraproject.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 47 5 3 86400 1496578736 1493986736 7725 (Name "fedoraproject.org.") "\144\141\152~q\a\220}#\DC2\228\150\140\166\209\152B\202\r\SO\184\ENQ\176\202\SOH\167\ETB==\\WN\ETB\193\242\159v\141\218\158\232\DC3O3x\130=\188P0\SI(\140\129R)\223fd\183qB\187_\GS\165\128wGR\228p\204\&7\DC3\173\186\130\221l\135\STX\STX\147\DLEV\243\159\&9\STX\138\209\DC4=\DLE\DC4\"/X\232Y\190\134\198\216k-\160q:p\206\189\176~ \154\242\226\&9m\232Z\214\129 \228_"},MsgRR {rrName = Name "_kerberos.fedoraproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataURI 10 1 "krb5srv:m:kkdcp:https://id.fedoraproject.org/KdcProxy/"},MsgRR {rrName = Name "_kerberos.fedoraproject.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 256 5 3 300 1496578736 1493986736 7725 (Name "fedoraproject.org.") "\ESCg\174[\230\138Y_\218\200\&2\SUB^\ETX\243\128\&6\180\131\136\245\181\158'\224%\204n~%\CANN=n2L\146\203h\198\153\SYN\175\221\243\EOT\171}\137\136\181\SO\191\226b\152\SI\222k{\132R\143\220U@\250\220~\235\154\159\186E\211&\187\174Jjjs\196H:%\248\EOT\235\194\182\139q\181\158\187\131\164bE\f:\210^f\NAK\147\ETXF\RS\203\&4\174\188\164\153\198\v\142kJ\188\136\173~\150\128_"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/8e08e117a1d187bac5d498ffc2797f3a.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 16846, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = True, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 5) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 185, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/9499a890bd65e85e74e68ca5631941f7.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/9499a890bd65e85e74e68ca5631941f7.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/9499a890bd65e85e74e68ca5631941f7.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/9499a890bd65e85e74e68ca5631941f7.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/9499a890bd65e85e74e68ca5631941f7.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/9499a890bd65e85e74e68ca5631941f7.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 48779, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 5, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "_xmpp-server._tcp.gmail.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt3.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt1.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 5, srvWeight = 0, srvPort = 5269, srvTarget = Name "xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt2.xmpp-server.l.google.com."})},MsgRR {rrName = Name "_xmpp-server._tcp.gmail.com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSRV (SRV {srvPriority = 20, srvWeight = 0, srvPort = 5269, srvTarget = Name "alt4.xmpp-server.l.google.com."})}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/949ea24aecb8e40432f6af14ceb91ab6.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 3}, mhQDCount = 1, mhANCount = 0, mhNSCount = 8, mhARCount = 1}, msgQD = [MsgQuestion (Name "xfoo.org.") (Type 255) (Class 1)], msgAN = [], msgNS = [MsgRR {rrName = Name "dlbdq9qmbnmmf3v28ndnjkv55oc2f0ad.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNSEC3 1 1 1 "\211\153\234\171" "mW\246\229v\\-\n\185\148\151&\141\DEL\a\191\200\217%\231" (fromList [Type 2,Type 43,Type 46])},MsgRR {rrName = Name "dlbdq9qmbnmmf3v28ndnjkv55oc2f0ad.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 50 7 2 86400 1496158069 1494340069 3947 (Name "org.") "v\DLEJ\225\185\241\204\204jl\208\SYN\221(%\168G:eLV\241E\195$\239\CAN\223\164\195\CAN\246E|\179M\168G\159\&2RAR'\136\150\171\NUL\238%\ACK\EM\196\143\r\250\222N\244x\244\&1\255=g|dB/\239Y\b\205\255n\156\179&\140\ETBfG\232\207\172\f\ETB\251GK\201\DC4\180\242\209\172\149\218\210\222\145\161^yi\144\246\&2A\141\239\140\214(\190\EOT\251\165\211ySH\EOT8\232\ETX\DC2\222"},MsgRR {rrName = Name "h9p7u7tr2u91d0v0ljs9l1gidnp90u3h.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNSEC3 1 1 1 "\211\153\234\171" "\138r\173\236\198OM\228`0\228\DC2\224\214\210\237\b\215X\GS" (fromList [Type 2,Type 6,Type 46,Type 48,Type 51])},MsgRR {rrName = Name "h9p7u7tr2u91d0v0ljs9l1gidnp90u3h.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 50 7 2 86400 1496166683 1494348683 3947 (Name "org.") "\162,\189o\CAN@\248\170$\DEL\DC4|\128\134\232\SYN\246\230rE\153{A\173x\241#8|\231N\166\163{b\145[uB\f\148,\148(\242E_\192\225L\193\186*\232\154\US\173\174\133^\SUB[\139\208+B\CANOc=\144R\NAK\228\ACKc\237\171\159\181\b\SOH$;\138\216o\ETBq*\225\251\225\133WJlX\SOH=K\ETX\198\182\149\255\133\NAK\186R\213&\206\211\244\150\US\DC3'\f\186]\ETB\242BE\DLE\DC3"},MsgRR {rrName = Name "vaittv1g2ies9s3920soaumh73klnhs5.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNSEC3 1 1 1 "\211\153\234\171" "\250\166\180%\ru\DEL\210\250d\146Z\NAK\237\162~\250\190\&2\193" (fromList [Type 2,Type 43,Type 46])},MsgRR {rrName = Name "vaittv1g2ies9s3920soaumh73klnhs5.org.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 50 7 2 86400 1496158069 1494340069 3947 (Name "org.") "gO\136U\GSsg\199\162 \243\158\STX\190~\131\ETX\228=.\211x\191\SUB\194_\241\FS\158 \167\202\170\221\141\148\220\165\159\170\253\209\182\195\\\218N\132\&6\254\EM\158\200\251\\t\146\244\&1\254\241\DC41E\248\143)\227Hm\CAN,\EM\255\207~e|\163\200Eh\227q\ESC]}\169\188\229\139\245\166\206\221\254\145%\150\136\190T@\DC4:\SOH\249\177\148\189)le%_V\186u'\214\254C\245\NAK\212]l\237"},MsgRR {rrName = Name "org.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSOA (Name "a0.org.afilias-nst.info.") (Name "noc.afilias-nst.info.") 2012479177 1800 900 604800 86400},MsgRR {rrName = Name "org.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataRRSIG 6 7 1 900 1496166683 1494348683 3947 (Name "org.") "\DC1U\179b4\204\v\240\182\f-\170\206\195\144O2p\DC3\174\160\251\177\r\185\RS\199\RS\239\221/@\b\136\235\201[B\n\236P\SUBb\198>W\163~\221^\233\ETBr\\c\212\238\185\136w\USUG\128\201\161\&8\128WU\SUBS~~!\208\160\206\191H{u\158\192mP\253GE\128\155\nY\207\SOH_\DEL\163\208\195\139\US \225\253\182\237\231\244J\135-\ETB\204\130\187\217\209\162\161t\212\220\b\\l\ACK\NAK"}], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/98b101299888feb502f38066db59f0af.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/98b101299888feb502f38066db59f0af.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/98b101299888feb502f38066db59f0af.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/98b101299888feb502f38066db59f0af.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/98b101299888feb502f38066db59f0af.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/98b101299888feb502f38066db59f0af.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 18, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "openssl.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataSOA (Name "auth.openssl.org.") (Name "hostmaster.openssl.org.") 2017032573 86400 900 604800 3600},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNS (Name "secondary.lp.se.")},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNS (Name "primary.lp.se.")},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNS (Name "ns.openssl.org.")},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataA (IPv4 0xc26196ea)},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataAAAA (IPv6 0x200106080c000180 0x100ea)},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataMX 50 (Name "mta.openssl.org.")},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\162l\158\239\255\&3\RS\144\175\162\&3}\143|\138\214Nk\NUL\200\217\214#*\171\176SW\ETX_-\200\235B\152\232\DC3\191\182\ACK\240\160\198R\188\156D\147\163L2\199\170\245\142\187\195\169\178\135\159\203\242o\177\196\230\129\234\206\171]\151\192S\225ufi\ACKKy>~D\134\192]\150}\168x\181O\250\214\ENQ8H\143\222.\221X\169_)\202\239\180\226}\200\189\252\v*\r/\210RP\145\184mk\197{"},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\188i\181\221\168\175\172\SOH\f>\207\ACK\209\158\184f\DC4\200\139e\164m\149\SO\140\240\198s\161\SYN#\ESC\151Vkd\145\193~\239\254\226\199\252\&9*\130M\DC1\157\148\132\169'\187\153\185R\139\186~\SYNg\153\239\128|\249\140\v+\194\a\r\220\128\150\&1\225\168_%\144\131\ETB>\DC3d\254\249M;\ESC\178\168\ETB7\216\193\198\173\165wT\EM\182and.\222\195\133\193\137\209+\130 K\243\&77\150\211\234{\NULO\153s\ACK\230\212\143\186\141\CANl\247o+\171\136\217i/\SOH\255~\181I\255\128\239E\176\&0\224\205\SOH\141cK+q\253\t\162[\131\225o\248\144\140\173\151\204\144\&5}`\246\ESCn\180\t6U\US\251HY\210\249S6\190Q\207\195`\220H\237\165|\193+\NAKZz#\237\172\166\138\139\EOT!\STXC\210d\198\238\238\RS6\188\SYN\189\US|k\DLE\131\201\136\233e\166\\R\171\231\215\ENQK\b\179\EM[\153\139"},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1495160463 1493948580 22791 (Name "openssl.org.") "\SO\204\187\243\214>\239\247\172\187\164\216\194\&9W\140\163\148\211\223\a\212b\FS\DC1\t%QpK\185gy\247\158\n\140\227\182\233\185\131\216\167\160\158\240FO\240\218\195\242\179\&7\138_?}\202\EOT\237\206\218B\156\161$\DC2\241;\246ZR)\r#3cL\STXj\223\225\ENQ\206r\135E\191\195yu\243\228:{\131i\FS\SOH~6\236\230\177\ESCS\246p\254\f1T\170\140\236\EM\241\134\171??g\228\134\229\140"},MsgRR {rrName = Name "openssl.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1495160463 1493948580 44671 (Name "openssl.org.") "NC1\231I\223\244N\ENQ\252\206(D\NAK\251\160\214\153\v9\NAK\DLE\224\136\200Q\173\173\239q/6\211\132\n\r\189\201%:\252\205P\ACK\168\DC2bV\167\129:s\239\211k\235}'\211\202\189\197\136\179\189\239\176\210\211fj\237\173t\243n\EM\143\215\CAN\NAKS!\243\US\197\132S\\\182\182l\165\130\CAN\175\&0\176\196P9MpMo\232\166\211\173l\rm%gy:=H\168[M\163\&7I\194\252\SOH\225!g\140\&7\DC1\215?\253k#^\184~\157\SO\189f\250`q\232\225"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 48 8 2 172800 1496626622 1494031022 2746 (Name "stanford.edu.") "\141\184\176\RS^(\172\138\t+\a}2\149L\STX\EM\140\n~\181\227l\245?\183\231\216\196\187\NUL\SOH*\219}\212h\140\161nn\176-]\168\239\135kPk* \198+\199\211l\226}\b\135\207{\ENQ\205\130\228\239\v\181\224\242\192\&5r;\151x\172\236\a\171d\128\194\CAN4\129\174\254\146^\143(>\160H\224\219\190\DC3\230K\219\220L4~\153\r\212I\FS\180\&6\225\235A\136D|\FSd\128\145*\135O\129>\185\&8\191\200`\236\NULh/K8\DC1\185\183\US&\DC1\233\209\193WP\SUB\241\155\252\a\236nie\SOI\149\ACKWb\175s\EOT\181%\251\194ihV\143v\141\198\154\165\&7+L\160\v\216\DC27\216\138/i\239\172\a_8\191\FSh\132:\215^\222b\242\&2\t\190\ETX\203\237\133+\245\RSm\255,\SOH\203\239&\204\204\142*D\166f\b\151\167\255\246\208\FS\219\v\218N\128\198n\146\205\189\130\163-27"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 48 8 2 172800 1496626622 1494031022 27840 (Name "stanford.edu.") ")\195\155\185.\192_\148h\249|4\139\174\USNY\152N\128Xh\161\225\220\191\153~\171\GS\232\193\135f\133\212\SOH\202\204?I|\233\FS\229\203\211o/\155\201\223\157#P\141\&1\199\230\136\&4\SOHC\246\&7\ESC\161\158\249#v#i\184)aQ\166\133\228\183\159C\DC1sI\FS=c\166U\248a\199\145\220\165+\255p\245\254\209\143ZxE\207 \218\198\RS\173a\195'\143\168bj\195\144\167nP\205+\230"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 6 8 2 172800 1496846541 1494250941 27840 (Name "stanford.edu.") "\DC1x\SUB\204}_\150 \168j\228\134\183\b0\158dX\216\199M\213\191\181!rK\194\149\234\169t\SUB\215:\158xr\169\164\ETB\206\243\174\171y)\245\159\225F:\139Kva\247\177\NUL\EM\171-.#Rmzt\CAN}\222\177\147\138*Y\244$\141\t\254\179\226-Q@O\v\238\216\132\169&\208\202d\237 F\v<\178<\203\&6w\157e\211H\144\233I|Y\"N\209`\v;\159\163\225\SUBq\230\GS"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 8 2 172800 1496248135 1493654460 29979 (Name "stanford.edu.") "B\164\247\168\186g\148\174\254f\201n0KD\182\DEL\191\199\133%g\246\&1\128\147:\168\140Xl\224\169k\135\&4\f\DC1P\248\143\215\185\203\&3?\128\r\137h\168\FSR \160\")\202vx\ETBb\131\&12\234\235D\141\244\220\212*\224\188v[\205\183nE\193\216s\140\186\RSH\221\&1VK\224\200\US&\137\r\220P\242\196\130\&0\ETBRD^\137\231\186/\193d-\SUB\168\243O\177\CANN\EOT4\167\"\138."},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 8 2 172800 1496626500 1494031022 27840 (Name "stanford.edu.") "\182\190W\128@\251\&7\176\172\SOH4\235|\SI\220\173\227\GS\160\EMk`\FS\155\186Z\190\220\STX\DLE\\\234\253\251hQ'\165\SI\SYN\239\236\f\129\203\RS\247H\222g\198\220\172\&6\155\186$E\160y\163\160\DC2e\242|V\225P\196\164n_.S\177\237i\ETXI\170D\238)\222*x3\157\SOH\NAK$\245\FSl\200\141\GS\DC2\EM\134I\144\201\192\223*Y\212\n\219\147u\ACK\224\ETXA\DC1\206\234\217\253\181\152 \205\187W"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 99 8 2 1800 1496759744 1494166881 27840 (Name "stanford.edu.") "4\STX^MH\DC2\235r\162\169\210\197\138\244V\227:\224\196q\204\218\173(\DC3\232\172:n\252\132\148\190\166\209\239\187\223\199\148\179}\ETBYU\th\NAK\210\r\STX\194\DLED\148 i\147tm\161`\149\234@\212B\146\241\SUB\r\146\229\220\149\173o\190#\247\txA\226f\165\180<\225\&95\245\197\214\204\215O:\190D\147\201\SOH\132G\185MC\163\&1\208Me\203\205Vp\163\175X\224\253 \245c\NUL\247\251"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 16 8 2 1800 1495233106 1492638863 29979 (Name "stanford.edu.") "r\140\152\186\197\176\159\&7\217g\215\&5\155\252\ESC\230^|\195K\201_\181N\191\194\214\a\234m\SYN\"3\172&\b\218t\132\134Q\134\131T\229\253\193\240\250T\DC1;T\SOH@I\248\255\148\185<\v\149\177ZPT\NAK\225\171\&6\ao~e|\EM^\144\151\170-i$\209\180\206\&8b\154\218\174A\133\235P\t\183\194\190p\199\163A\245\DC3Fb\ENQS\221\249\DLE\230\222\171\148\196\vG\251Z\187v\200\131\166\249"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 16 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "D.\199\184\154\154s\223\200\139\195CN\200>\238\246\156v\185}\210\201\250>\177H\SOH\DC43\166\242\f\155Mi\246\246\&7G\130\231GzO\242\251\246\208\149\227\154H\142A\SUBB`\NAKG:=\196\238Zd\248\220\232\224\164\230\SOH:\254\202u\206iI\143\238\209]]\t\197\214\240Uw\226\191\169\152|\169r\139\219\212\251H\US\DC1R\ts\223 \bK\159j\US\195\132\148x/\SUB\244\216\197;q\130\154"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 29 8 2 1800 1496560398 1493966179 29979 (Name "stanford.edu.") "\SO\230 L/\255~\184\245\220VUp\a\244\CAN$[\163\223\242\SI\ETB\180\&5\206*\v\170\223NF6\209\167\211b\DC2\133xbL8\137v\f\198\243\136\&9\192K\155\142\151\&4O\161d\229z;\f\DC3p\223\147\254\223S\133W4:\156Z\231TOF\129[xPJ\244\224\EOT\176\253i\255C\SUB?\158\168@HS\169\178\240\143H\192\242"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 29 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "\158H\SUB\\\r\SI\246\208\205\252c\167\240\&41\DC3\224\&5j\240\180\215\237\b\STXp\144#>\254\196\\9\134f\251\235\tS\162T\f\129\196\&66E\182\141\202:\202\153\EM\ETB2\NAK\161J\181\222\&8\252\&8\226\145\129\135p>\US\185U\214\SOH8\246\NUL\192\ETX\DC1\252\&0>\232\203\SUB\176\248\DC1[+\219zW|z\161\238J_\132\br}\178\&5\SO\207\192\251\247\152J-s\221\224!\241aQ\153y\"\177\141\210"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 1 8 2 1800 1496560398 1493966179 29979 (Name "stanford.edu.") "\DLEj,X\RS\CAN\SOH\162\157\222\203\202\192\&8\DC2\203\169\ETB\236`t\186,,\250QMq\NULcu\214\223\211RY\189h\EOT\163+u?\154F\188\216.\206\212\223Z\216\160C\163\":\191\151]\145\&6\162=\221\169=-\207\235'h\167\129\200@\tFl>UV\242\NULCC\187_nu\154\219\GS\f\165\128\163\193\DLE\217\215\226\ACK\158\FSY\144\220o\183$\192\184\203\248c}*/\212}\182\148N\246\232\&6"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 1 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "\142\DC4\180&=/\GS\178\241\186\b\199\177\246\154\&8Y\240m\DC2\160\226,\213\139\182\190\164\197\188{\ACK\163+\228V\DC4H\143@\f2\SOH7\158\198Mr\206s\150\211\144\216qs\217\DC1Y\n\209\203\STX\US\137\154D\235\191\"f\151;\ACKK}\156\188\240\173\fnz\224j\184\215\145\186\220TYm\CAN\249\165\202\247#nf\200N\SUBM*(\154\181UQN\234}\230\212\FSN7\230^\203H\133\255\143\140="},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 28 8 2 1800 1496560398 1493966179 29979 (Name "stanford.edu.") "w\255\145C\SI]\247\147\190\254\147I`\198=\198\242\f\GS\153\183^g\236\238\191\142uLJ\222\186\246M\139\234#@\184\176\253\145\176\DC3\192O\GS3t\229\215\252\tC\225@\244t^u\134['\137\222\206\251\NUL\NAK\\\f\176\151\188\206B@\210\214$>GY\139\152\DC2s\181CB\143@\ENQ\EOT\195v\232^\161\GS\166\236\176\242Z\183\183\134\153\168\255\SUB\235a\177D\136\197@\135u\137u\246\137F\196\222"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 28 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "\DC4\225\222\156\174r\231}*\162\190\SOo\DC2\163\212;i4\137OD\ENQf\135\179\FS{=;4\189|\n\164Rh+\217uo\135\198mRB9\223\DC3/\236B\222\229w#\170\252\ACK\217\a\182#\232\DELhb:\ESC\244!\SYN\130\200\174\173\213\186\218\223\202\171\FS\158\194\STX\205\236.@62\130\153\147\159\DC2\208B\SYN\188\148XB\217\SOH\195\EM2WF\235\&4\180+\231+ :\ACKO\198\133\130\128P\214~"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 15 8 2 1800 1496464420 1493870399 29979 (Name "stanford.edu.") "GH\218\173#\197\DEL\175`\213\ETXj\202R\132\170\&5\t\EOT\155_\165\239}\182\195S\143\GS\195\167\191\SUBN\237R&\213\ETX\221\207\US\242\230b{\234.|\214\STX\161,\150\138\a\159\202?]\217\n\175@bh\245\210\174\140\v\174\207;G\177B\189,\239\b\GSJ\159\t!\176.\209a\217!\146\n|\165Z\214/\209\235k%\US $\220r\215\196\151\216\CAN\ETB\212o i\216d\193\\\169,\NAK0%\247"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 15 8 2 1800 1496626500 1494031022 27840 (Name "stanford.edu.") "\208\141)]A\138{\180Va\181\160[A\187\224\EOT):w\226\149Si\168\171.U\fW@\172\203\195/\136\156\222\161c\r\209\201P\\B\SOH\196\199\186\146\170x\188t\NAK\250q\246\210v\213S_\NAK.\141\RSIU\141.\202?\172\236\188\145\EOT\223P\251\DC3/+\SI\139|W\NAK%\DC2\162`)I\146\144\151\157\EOT\210\200\206}\225\NAK\216\160\147\STX\f\150\241\GS\224\DLE\162\173\"\STXA6:>\148\184U"},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNSEC (Name "003004139284.stanford.edu.") (fromList [Type 1,Type 2,Type 6,Type 15,Type 16,Type 28,Type 29,Type 46,Type 47,Type 48,Type 99])},MsgRR {rrName = Name "stanford.edu.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 47 8 2 1800 1496624318 1494031186 27840 (Name "stanford.edu.") "i\218\201\NUL\144r\148\148.j\196H\245&'\138\152v\230@PO\226\188\192\210~\172}_K\168\150qO\198F&\128\220C\184\251\166\237\251\236\246.RvN\250\218\216\172T4\152b0\247\201\186\EOTg\185\&4ARl\153_s\242\212\136\f\135\191QBe\162Y\SIA\ENQ\173\233I\169\168(\SI\248\162\172'\130\197&\226\198\142<\ACK\vg\190Q\130\238n(T\235\&7\SOp^\194\187J\208\ENQ\181\151"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/9ebe8abd73d2cb61ac54d631a0305b61.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 46337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [MsgRR {rrName = Name "fastly.net.", rrClass = Class 1, rrTTL = TTL 29, rrData = RDataSOA (Name "ns1.fastly.net.") (Name "hostmaster.fastly.com.") 2016110301 3600 600 604800 30}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/a2878248b0e2a6d30b50bc67d11b5a21.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 40786, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 21, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataSOA (Name "a.gtld-servers.net.") (Name "nstld.verisign-grs.com.") 1494175849 1800 900 604800 86400},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataRRSIG 6 8 1 900 1494780649 1494171649 27302 (Name "com.") "y\199J)\ESCiLV\160\DC1y\146Y\202z\249\&6+\239\208\159\143\178\194V.u\EOT\189\220\137\&9X\ENQ\156\DC1\189\218\237v\193=\169nJ+3\133\SUB\240\151T\200\251'\146\209\DC3\aD\DC4\248q\173\216I\234\180\&4*gJ\231\170FV\212\186)\156\219\208\165>;\249V\161U\144\237\185\192\252\138\167\253\NAK%\209\180\240\174n+\239\128'CqB\182y\175\171\171\DC2\192:U\216%c\227\160Kw\215"},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "f.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "c.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "g.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "i.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "l.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "m.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "e.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "h.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "a.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "b.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "j.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "k.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataNS (Name "d.gtld-servers.net.")},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 172799, rrData = RDataRRSIG 2 8 1 172800 1494650866 1494041866 27302 (Name "com.") "P\166\202az\187\155\175\227w\195\202Hq2\163\228aU\231\144\203\159\196A\146!\172\USvn\193;n\170F\185\255\170\169=\235\CAN\181%\199\"\243\254\159\205\140\158\&5{p1\ETB\231vb\fv\209\203\179\DC2\132(v\140\147\222\"\181NXB\202\246s\242\n~\163\r\SUB\EOT\\\DC2\179"},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataDNSKEY 256 3 8 "\SOH\ETX\171\205/\235\a\130\203\ESC\EM\135\129\189e\EOT\229j\140\133~-\233\161\248\221?\181\200 ;m\173#\153\SUB\196\&0\192R\198]n\177Tt\214\224_\184V\141\215\154\145\209\FS\238e\244\ETX1\157\128\STX\135\a\184[\244 \254\172\&8\225'\142}\DC4\174\161$\230\217\226\149RG\143\EOT&\130@1'\a;\197/\229w\255\141Z\NAK=\160\205\207\v{\ESC\CAN\NUL\139w\175\214T\237\150J\135\229\231\132\152\130\233\171"},MsgRR {rrName = Name "com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataDNSKEY 257 3 8 "\SOH\ETX\195\206WM\152\203\217\NAK~\rp\210t\184I\202\SO\SO\237\154\255\197\220\204\144GIi\ACKe\\5\203\b\179\245.\RSj\245\&6\139>z\140]Ux\"\EOT\194#\153\204\238C6\153r\GS\163B8-\EOT\249\185b\194\227\223\249Y}'\166\242\f\SI\135\&9\225U\EOT\149\212\172<\SOH\193}\SO\205\DC1E\EOT_\145\ACK\131\158\253\228\196\&8\238\141\180\&4\133\GSc\234\DC1\234tl\220\233\&1\138s \204\168\129\239\135\222\203\141\139\216g\160\228\EOT\244\179\208\199\247_\243\228\176k\210.\DC2\132\240\157\139\176^\144M\191~w\170\USO\247\251\137Hu\204g\186\187{d\FS\166l\188\156\138S\140[\174\131\230p\214\US\231C!P\169h0\198\195W\179\232\157\243ZX\b?\246Kh;gA\184\174\195xou\157\143\137\205\239|\213\254\192\248#`m\ESC\247=\219/!\225\217)\195\243E\ETX/\128\228\162\EOTL\178cR\RS}\228I\183r\203\150?\SUB\189"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/a48ae1f9104e3732d4c5bd183c2a9ae1.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 15280, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 147, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b0beddd4cdcdc372a58192f3ae7264e2.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 1949, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 15, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0x17647aaf)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0x17603435)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0xbfefd5c5)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0x6828d323)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataA (IPv4 0x682bc3fb)},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 84943, rrData = RDataNS (Name "ns1.msft.net.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 84943, rrData = RDataNS (Name "ns2.msft.net.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 84943, rrData = RDataNS (Name "ns3.msft.net.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 84943, rrData = RDataNS (Name "ns4.msft.net.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataSOA (Name "ns1.msft.net.") (Name "msnhst.microsoft.com.") 2017050703 7200 600 2419200 3600},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataMX 10 (Name "microsoft-com.mail.protection.outlook.com.")},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataTXT ["google-site-verification=6P08Ow5E-8Q0m6vQ7FMAqAYIDprkVV8fUf_7hZ4Qvc8"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataTXT ["v=spf1 include:_spf-a.microsoft.com include:_spf-b.microsoft.com include:_spf-c.microsoft.com include:_spf-ssg-a.microsoft.com include:spf-a.hotmail.com ip4:147.243.128.24 ip4:147.243.128.26 ip4:147.243.1.153 ip4:147.243.1.47 ip4:147.243.1.48 -all"]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataTXT ["FbUF6DbkE+Aw1/wi9xgDi8KVrIIZus5v8L6tbIQZkGrQ/rVQKJi8CjQbBtWtE64ey4NJJwj5J65PIggVYNabdQ=="]},MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 2143, rrData = RDataTXT ["docusign=d5a3737c-c23c-4bd0-9095-d2ff621f2840"]}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b127444bb25b3d2947e972d3ba0c26dd.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 60351, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "_mirrors.hackage.haskell.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "_mirrors.hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 3788, rrData = RDataHINFO "ANY obsoleted" "See draft-ietf-dnsop-refuse-any"}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b28818a1324f6e327b4c31668e0e6d9d.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 26541, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "microsoft.com.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "microsoft.com.", rrClass = Class 1, rrTTL = TTL 1357, rrData = RDataMX 10 (Name "microsoft-com.mail.protection.outlook.com.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b473fc47ff9ce44a72ef479cbb6fa861.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 17, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "google.com.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xacd913ce)},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xacd913ce)},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0xacd913ce)},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2a001450400e0808 0x200e)},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 50 (Name "alt4.aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 10 (Name "aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 59, rrData = RDataSOA (Name "ns4.google.com.") (Name "dns-admin.google.com.") 155321650 900 900 1800 60},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns4.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 40 (Name "alt3.aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 20 (Name "alt1.aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataMX 30 (Name "alt2.aspmx.l.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns3.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns2.google.com.")},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataCAA 0 "issue" "pki.goog"},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataCAA 0 "issue" "symantec.com"},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataTXT ["v=spf1 include:_spf.google.com ~all"]},MsgRR {rrName = Name "google.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "ns1.google.com.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b64d9926db52e425c8747c143b9266c1.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b64d9926db52e425c8747c143b9266c1.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b64d9926db52e425c8747c143b9266c1.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b64d9926db52e425c8747c143b9266c1.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b64d9926db52e425c8747c143b9266c1.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b64d9926db52e425c8747c143b9266c1.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 60952, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = True, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "hackage.haskell.org.") (Type 5) (Class 1)], msgAN = [MsgRR {rrName = Name "hackage.haskell.org.", rrClass = Class 1, rrTTL = TTL 148, rrData = RDataCNAME (Name "j.global-ssl.fastly.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b6d7e514db902da80c8a8e124673139b.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b6d7e514db902da80c8a8e124673139b.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b6d7e514db902da80c8a8e124673139b.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b6d7e514db902da80c8a8e124673139b.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b6d7e514db902da80c8a8e124673139b.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b6d7e514db902da80c8a8e124673139b.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 56763, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.thelongestdomainnameintheworldandthensomeandthensomemoreandmore.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.thelongestdomainnameintheworldandthensomeandthensomemoreandmore.com.", rrClass = Class 1, rrTTL = TTL 35622, rrData = RDataA (IPv4 0x5e7e2a32)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 4096, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b6e25f429f7bfb60a8a3b473e8d2b6a2.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 31, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "debian.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataNSEC3PARAM 1 0 16 "\SO\192\162\220\222"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataRRSIG 51 8 2 0 1497614849 1494156550 53598 (Name "debian.org.") "\186\DLE\205\fEe\vUb)\EM\156\201g\236\165r\226\178\136\STX~\US\v*Y\150\196\253\217c\236\&0fMO\157\184Q\227\170\CAN\190q\190\252\241\220d\230\249\175.\EOTw\f-n\164G\223\159P\149T\152p\217\147\238\178\208\147N\220\167c\224\212\138dI\169\217m\167\187\196\144\206G\224D\198\205d?]V|\134\226\r[n/\198\150\228G\198\v[q\FS\221`\223\RSP\218\DC4\142\138\"n\167\176{*S\248\\\129\141Z{a\152k\163b~\216\133~*\175\235IP\242\173\156\142ZOX\229\174\157\187\\\224\212\216~r\DEL\158|\135\NAK\135\130\233C\217\155bU\128\DEL>\177\150\161-u\177\227\191"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataSOA (Name "denis.debian.org.") (Name "hostmaster.debian.org.") 2017050804 1800 600 1814400 600},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataRRSIG 2 8 2 28800 1496661232 1493203448 53598 (Name "debian.org.") "\203\GS`3I\r\228\EM{\ACKZ\SUB\233\185\&2\154\159!\194\183\222}5\143Sz\239\240\186\240\129\251R\147\215\251\167 @\DC1H\227\207\140\215y\236\202\208+\NULi\168q\183\NAK\r\DEL\159~x\GS\137\150&\NAKt\140\a2\GS4\SO:\174s\DC3\DLEHO_\v\147.x{8\156\214\SO\128\200\163\158\149/\175\190[B\149\175^\229\ESC\NAK\164\216\247D\243\t\SOP!?\255\DC4\ACKej\145\176\f\EOT|\ACK\166w\233\194l@Gp\188\208\238_\179\219\&0\CAN4\229\GS\148\196\135\200r\234\175\&3\rB\211\184R/i\150\SOHA\207\222iri\177K\242\NAKd\241\230-\STX\DC1Eh\222\199\250\157$7\SUBf\179\149\150"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 1 8 2 300 1497698216 1494238616 53598 (Name "debian.org.") "'\233%\DC3)/\188\165\236\&4\DC2f\231Q\231J\132\178\142\222\232\182\133\DEL6\220%\212\ACK\247\184\167\vb\175\CAN\232\162\170\190FOM\RS\f\235_\144W\ETB\215\158\244\223\vJ\210=\148d\142|\220\215\&9\220\204\233F4~=9J$\a}\177p\148\255\223AIP@\137\DLE\203;>i:\250\SUB\167\159\164Z]\149\150P\US/\157\198\181\ETX\165N\DC3\197\235\140\139I\US\238\184\193M\180\132\141{\213R\251\219&8\252"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataRRSIG 15 8 2 28800 1496661232 1493203448 53598 (Name "debian.org.") "\135\164\225\200\246u(\DC1\129\NAK\224\a\141\205\&0\206\201\221\156f[\ENQ9}\\\236\157+\SYN\128\170\197f\ETB\195Z\169\209\240\189\185\&8:\EM\209\219\128k0r\\Xc\NAKNSB\233\221\241\174\139\135\253\246\130PW\129\\\142\198\223\DLEpL\NUL\188z\252\169(\226kzu\225)\238\147n\227c\190V\236\249\213\182b|@\255\169H\167\128\145\184[\220\155K\238U\235\190\tJ\ETB\175[Ua\134G2\134\225\v6mx\179$\243\186HSK]\203\"\158\167r\SO\174I\f9\214o\170J\185\128\149y\vo\175\211Py\DC3\DC4\175\US\237\130\&1sE\229bt&o~\SUB\196\134\243\DLE\231\n\253\234A\180\175"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 28 8 2 300 1497698216 1494238616 53598 (Name "debian.org.") "h;I\216\162\211\209\224\SOHa+\"\a\222\245\241\SIL\134-\225\200\170\&0\149\148*\234\208~4UG\135M\DC1\rK\US\181M\158D\220+\205\215q\229M?\199x\250 5\STX_}\167\&3\207\214\DEL\ETBAY\216=\252D\SO\174y\142\147\185s\STX\135\SO7^V:\160\153H/\151 \220\146\155\132\&5\158\143\184\164h\SUB\130\176\CAN\a}2\196\162\a\SYN\223f\130\240\211~b4-\252\151\223\217\DC2\223{z{s\134\251\171\ESCG\216\164\&6\223\211g\152\218\241\&88\SUB?\213\133\218@\175\255\EM\192\238\147\&8af\242\DC4B\DC3\220\140#\153\174\182\166\ETX\177%\DC4\f\240BC\195qL\228\196=\DC1\137\251\DC17"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 35 8 2 3600 1496661232 1493203448 53598 (Name "debian.org.") "\128\242P\211\136<\187\164D\208\213\195-\130\230q\153\156\180\161\\?)^\128\\\247\200\224i4\NULu\230|\t\215\DC4\149\254\ETB\192-\186\173tw\213\DC3\184\228\223:\202\&3\208 \179\165\161*\254&\149\138S\192.E\218\EMp\156\\\165\b\"\179v\231\132\188\222\DEL*\EM\155U\US#@\ENQh\241K.\223\&6\131\162\226o\203!\152$;\"\128\DC1B\231\187HZ\209\&3/\208\135?y\n\178c\177OX\CAN\202\161)\161\196\245\199\DC16*\210\248\174Jg#\247\161\145\177\&1A\243\212\200r\204\142\&7\165\GSl\134+-\ETX\129L\162-\154\207\SO5\248\145\252\162\v\149\169Z\154\240\DC4\242}i\182\216\187CL"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 6 8 2 3600 1497698216 1494238616 53598 (Name "debian.org.") "\141G\148\&8Ru\215~\167\220\201\a\234\142\f\136?p\174d\136E\130\135\136\&7vA\212\"x \188\189\SYN\166x\158y\236\DC18\163\241\217B\169;\200IUF\136\138\253A2Fq\150M:\174C/Y\GS\194XD\135\216\140P\DC1U\187\STX\163DM\182\205\243\185\170\165C\209,bp\195\246\236\162)\180\209\ETX\216\232\219\241<\219\158S\EOT+$C\250\174\b\237\155:\233\177@\208F\192\198\131\132zsO9@\f\247\138\SOHjM>ajU\179\t\n\222\203b\DLE\253\224\&4\240\DC4N\146ua\148\208\185\183:}\156\148\153\171\ve\180\SI\229\193\140\232\SOH\182<#?P\STX\164#3\167.\163\143\&9\205"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RDataRRSIG 65534 8 2 0 1497615664 1494156551 53598 (Name "debian.org.") "\194\&5v\DEL\182\184\138\201\232=\183\ETB~4\172\DEL\168B`e\SUB\128\222\190L\141\154\151\236\\\176/\NUL\209a*:c\145`\164\t\179l\206\225\162\RS\208f\159S\RSLz\182\n\a\186\207\208$#>+\150\&5\151\129\220L&\173\188\141BA\SOH\130\136\&6\DC1\141\220 \134~\149\144=\229\v*\246m\SI\DC2\207`\180\237\SO\166\167\226y\186\139\182."},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RData (Type 65534) "\b\EMW\NUL\SOH"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 0, rrData = RData (Type 65534) "\b\209^\NUL\SOH"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\247\254\167,h*\ACK\241\143,\SYN)w\190\177I(\202\ACKiV\246nn\SUBM\bu_\239n\194\193\236\214is\140\SUB\SO\182/?\162\140\b\182\187\179\rD\223\139&\214\165y\236\r0\SOH\137\249\252z\EM9\247\165(\EM\143\">'\230\GSX?\188t\228r\185\168\&5\173\SI\180\226/\203\140\154D\180\239\207\&4\234\ETX\236\252\245\NAK\tK\199\138\243\NAK\177fwG^\253S\211\192B\186\151W\252\SYN\199\206\&4\FS\243M"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\151\229\ETB\218\252+\173\&2\GSK\140\207\236\249\f\n\187M\129\160\216\169\186\152p\n\CAN\161\204}\153\207\241\ETBr\128WZ\237\CAN\245\151\184\&7\240\226\US^\243\&0\133\140\141\231 \139z\209\150\227x\182\191V\187e\SYN\169#\162W\222\157K\DC4\234mJ\172\130#4\146\f\175\191\255 \191\251\182\244A\\\134V\197\218\186\194\NAK\150L\173B#\222\242-\209\SIP\250b\DC1\231/\151\a\252\SYNC\160\DLE\US<\251liLSc\SOH\185,\247\218\143\239\181:\243\164\191I\165\157\184\177\134\187O\150\179GE\ESCaU02qR!\199R\172\STXita\157w4h\219\218\241\206\210v[\165\193}:\STX\DLE5\157\166\144\EOT\194\247\231\164\191-\222\186\\\144\222=\224o\230\194J\189\211\160\241\240\242\163\167\238\f\SOH\238L\159\DC4\132\208\171F$\161\169\GS\173\224w\167/\191\142\146wY\235\245\231\&1<\252\234\148-\NUL\137\223y"},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNAPTR 10 0 "s" "SIPS+D2T" "" (Name "_sips._tcp.debian.org.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200104f80001000c 0x15)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200106101908b000 0x1480014)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x200141c810000021 0x210004)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataAAAA (IPv6 0x2605bc8030100b00 0xdeb01660202)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataMX 0 (Name "muffat.debian.org.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataMX 0 (Name "mailly.debian.org.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x599e704)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x801f003e)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x8259940e)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x8cd3a6ca)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataA (IPv4 0x9514040f)},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataNS (Name "dnsnode.debian.org.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataNS (Name "sec1.rcode0.net.")},MsgRR {rrName = Name "debian.org.", rrClass = Class 1, rrTTL = TTL 28799, rrData = RDataNS (Name "sec2.rcode0.net.")}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b90b636aee4f847240da4e2606eb64af.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b90b636aee4f847240da4e2606eb64af.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b90b636aee4f847240da4e2606eb64af.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b90b636aee4f847240da4e2606eb64af.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/b90b636aee4f847240da4e2606eb64af.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/b90b636aee4f847240da4e2606eb64af.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 8824, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.thelongestdomainnameintheworldandthensomeandthensomemoreandmore.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.thelongestdomainnameintheworldandthensomeandthensomemoreandmore.com.", rrClass = Class 1, rrTTL = TTL 35774, rrData = RDataA (IPv4 0x5e7e2a32)}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/bc73346b31d530d2fd1c40100e0abdb3.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 49467, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3376, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 17605, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 785, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 885, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494154963 1000 1000 1000 1800}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/bd9f84e89a040ca3c405c8b0c61e7b95.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 38134, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.", rrClass = Class 1, rrTTL = TTL 899, rrData = RDataPTR (Name "ghc.haskell.org.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c1643eda6cc3d3ed3cee45c25027e5f4.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 18218, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 4, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3478, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 19714, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 890, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")},MsgRR {rrName = Name "e1863.dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 10, rrData = RDataA (IPv4 0x5c7ab450)}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c19a30769d1fac0f4bc2e4e20681ef4e.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 1121, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.google.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.google.com.", rrClass = Class 1, rrTTL = TTL 129, rrData = RDataA (IPv4 0xd83ac9e4)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c2b9983a237cc1d0085a7579dd54f189.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 30614, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.google.com.") (Type 28) (Class 1)], msgAN = [MsgRR {rrName = Name "www.google.com.", rrClass = Class 1, rrTTL = TTL 186, rrData = RDataAAAA (IPv6 0x2a00145040070816 0x2004)}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c4506200f514568ec9a2d42663a1e77d.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c4506200f514568ec9a2d42663a1e77d.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c4506200f514568ec9a2d42663a1e77d.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c4506200f514568ec9a2d42663a1e77d.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/c4506200f514568ec9a2d42663a1e77d.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/c4506200f514568ec9a2d42663a1e77d.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 31337, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 25, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "ietf.org.") (Type 255) (Class 1)], msgAN = [MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataSOA (Name "ns0.amsl.com.") (Name "glen.amsl.com.") 1200000348 1800 1800 604800 1800},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 6 5 2 1800 1524762969 1493223555 40452 (Name "ietf.org.") "P\143\164/\211\&9Cmr\144#p\138\f\131\ESC\220\RS\DC2\153v\209\SOHv\SOL\153\223\227\ETB\186\NAK\180\&9\189A\138}\160]\220!\212\244\t\131\DC2\176ag\RS_\136\250\233\159\&9\250\239\EM;\NAK_\156\171\212duq\194\211\211\207m0t\194;\246\222^m\249\189\252\131\210\189\175\ACK\144\199%\179J\206\190\139\ACK\220\230Y&\211&b\202'\227t\152\SYN\184\173\254\247\235X\243\197\&7\174\205\DELD<\231Gt\215o\228\ENQ\164\ESC\192~\166y1\232>n\202\217\252>\200\229\233\133\&7&\180\138\134\ETX\160\195\226\\\198\232\253\250@\154G\NUL\DC4\175\197\145\250\USa\143\DEL\149,\160._\DC2\162\216\242E\174:\"\236;\255\154\147\219iF\"\245e\236\&6\134$a\241c\217\DC1\193c\182O\160\238XL\203\230K\228\DC1\135\135\&7`Y\248\EOT2\156.\146\SUB\158\&4+\238\STX\142\182\164\FS\ACK&\143\186*F:@\222U\149"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.mia1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.ams1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns0.amsl.com.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.sea1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.yyz1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNS (Name "ns1.hkg1.afilias-nst.info.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 2 5 2 1800 1524763120 1493223555 40452 (Name "ietf.org.") "\196-\224\241\235A5\213\186+\136\SUB\201\171\186f6Y\163\US\GS\211\152@R\154l\166\FS|\208\246F\251\185\180\SI\ETBR\130\131~\200\214\RSl\144\157\a\137\216\236\&0\ETX`\188\DC2\137t\ENQ\188A\255\144Uj\179\151\205\SO\197Kw6\169\157\133i\EMWA\GSo\135\224HP\US\174\243 \219\208\a\248\187\220\218\240\ACK!\132R\243\150\&2P\EM\180\161\ESC\179%\ETXiBx\149\160\156\155J\SOH\239f\201\213)\173\240qD:&)\146Kf\202\195\b\175\161O\220\DC2\213\\*\209\200\EM\228\173\208\163q\154\EM^s\138\191S9\191\232\170\181\232 \131\227\131\151e?k\DC1\173\205\f\197/\183y<\ETB\205\170\244\143\133\232l[\149\&1\SYN\202s\220\&3\135\173+\STX\136z)=d\226>\188\139\151T_?\USe+\EOT\196\SYNC\196\128\144Du~9A\136*\154\219V\DC2\195@\DEL\209>\135\182\150\131\149~\132a\141\138"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataA (IPv4 0x41fc62c)},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 1 5 2 1800 1524763086 1493223555 40452 (Name "ietf.org.") "]t\133\144\&7}\NULV^\200\165\FS\187[u:<_\238\172\&6\217(}\226\b]\151}ve\180\186)5\255\FS\ESCj?>_\202\226\210\185\165\202\128\v\173\204kn\SUB\242qm\\\203Ce'\242\222\228r\223\243;\ESCa\DC3\NUL\165\&6wAf\196\"\253\200\231\198\140\200a\227\&6K=?\v\FS\162\245lB\161\160\\\ETX\f\r\176M\218\134\US\176Fi\208\174\209\202P\160\DLEC*\200\&8\191ou\190\DC3\226\v\156r\144\DLE\171\&2r=\187\157\v\184\169\235\163\ENQm\242U\f v\aOmR\142\227K\v_\n\239\249\177\161\217\134\&0\250\208\221\130\DC3\136\189\139k\228\210.em,M&\237vU\227\214\193\US\148|\198\DC4|\128,\177}\133\144^+\148\143\&6\197B\ACK[\204\DC3`\147\134U\219\170#\132\199\183\196\154\179ZGQ\247)l\187\&2\SYNf\237\202p\241\253\230\145\251j\NUL\224\146=\ENQf\SI\180"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataMX 0 (Name "mail.ietf.org.")},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 15 5 2 1800 1524763100 1493223555 40452 (Name "ietf.org.") "\ENQ>\186\\\164\202\156\DC1\201\251i\187\244\236\&3\189\195?,4t(\165r\\p\221\195f_\186$8M\b\147L.n`v\248\167\&5\220\240\156\179\&9m\242<2/\203\229\168}2$k\231\NUL\247R\\\155K\186\134U\ETBg\165g\196\190|#\US\176\ACK\192\242\ETB,\FS\151\231\217\NAK\143z\194j\138\&8s\164\US\252\247i0\RS\201d\DC2\170}\204;\NUL\187\ETX\249\DC4\219\165zg\DC3\241\201\223mp\215Yb\176\242;w\137ZV\193\192\243\211\SO\157\235y<\DC2;<16n\190\202\147\152\133\206\144\203]\148\204\224\172X^\211\200\192\225\223\128M\169\192<\253W;E\241\EM\253W\173\225$\226\159\t\195\200\222^\242a\196!\176x\149l\251\141\227ya8:\197\STX0\238\230\DC2\DLE\187\175\DEL\209\188E\133\131\179(\147\214\130ta\ETBK\188uQ\178\199/\228\a.\NAKi\149\172\NAK-^au\ACK.\215j"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataTXT ["v=spf1 ip4:12.22.58.0/24 ip4:64.170.98.0/24 ip4:4.31.198.32/27 ip4:209.208.19.192/27 ip4:72.167.123.204 ip6:2001:1890:123a::/56 ip6:2001:1890:126c::/56 ip6:2001:1900:3001:0011::0/64 ip6:2607:f170:8000:1500::0/64 -all"]},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 16 5 2 1800 1524763104 1493223555 40452 (Name "ietf.org.") "\136\157Fpx\200\192\178J\227\"cT\254\180\217r{\216\228AQ\EMsM\253 \180,\248&\149\220\196:l\153\202+\SYN\224\CAN\156=\SO\174\241\168\254\215\181\180\194N+\171\197e\156\NULE\141EMPg\247[\DC3\202\SUB\248\180\EM\144K\SI\210\240\129hhi+G\204\SO\152A\221_F:\FS\253}\DEL|\141pk&\GS\160\130\254i9M\STX\162\231&\142\222fW\210A\149\221y\185{\GS\156\237!\212NI\DC2\144\226\197\146\vJ)\212\134l\SO\243\RSu\178'\181y\NAK#\224<\233\230\186^\253\222\208\215\SYN6\151]oI\189\172@\DC1\129\195ga\221\182\ENQ\189\164\244\191\173\163\247gCl\131\177\196\164\226\184D\229@\132s\131\161\162w\165\"2\164\168;\135\199\225\251\&7&\134\194\177g\153\rJ6\164\"\220\136\160g\207\129\184\200\167\ESC:\238\EM\154y\177\&2\242\246\238\149\228\DC29R/\211`vs"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataAAAA (IPv6 0x2001190030010011 0x2c)},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 28 5 2 1800 1524763044 1493223555 40452 (Name "ietf.org.") "F\221\216\166\163C\171\ESC\210N\225! \155%4%@\ENQ\251\170\179\SYN\187UJiQ\214\&6\169\b2\198qz\145gY\240^k\232\247$\237\&9\156D\169\CAN<\138\128O\174\171j\207\NAK\129\254NH`nlY\SUB\NUL\238\206\176\156\aN(\186\&9\199\198C\140g\EOT\179\210\232Tq\203)\226\160\218\163\&9\203\NUL\133\223\NUL4\239\181\133\DELgh\143o\146\157H\DLE\NUL\168yJwc2\201\"\254\230q\201\bei\185\197\230\156\141\GS;\159\204\176\"\242\ETB\210T\217\254\236#-'l\187t/#'\236\183 \174/\181\200\160\198\SYN\fw\153\244\207?F\v\NUL|,Q\133\176\185\212]\207\ETB\DC29\148j\189\229\229\177\DC3\219|\244\146\215\STX\208\DC4C+\139\192\"\130\150\192\149\175\188\196|o,\148%\154\v\135\224\172ko\228\v\233`~#\151p\128\147\134\170\235\133\208Ekl\245r7*t\254\193=?\180"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataNSEC (Name "ietf1._domainkey.ietf.org.") (fromList [Type 1,Type 2,Type 6,Type 15,Type 16,Type 28,Type 46,Type 47,Type 48,Type 99])},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 47 5 2 1800 1524763055 1493223555 40452 (Name "ietf.org.") "\ENQ\218r\154>Lin;\254\238F\NAK\ACK6\r\184-\192\237[\STX\250\184\&8\232\202\154b\t\204\129;\221\NUL\DEL\v*!8\178\169\201\ENQ_\199Yn\162\137\177$\a\159M[,\202U\DEL\155P c\254N\197\&7\233Y>)f\224}D\142\146\166\191\136\226)x\168\237I\133\194\233L\144c\DELU\185\255\158\DC2d\187\155\211~c\186\&7\169#\233\&0r\DC1\SYNg\181\196\179\141}V\ENQ\142\SUBW\165C\135\ETX\EMV\US\234%\243f3\213D\141\&1\DC2\195%\SYN\195\253\ENQ\239Q\a\245Z\RS\159'\223\159f)\185x\168\GS\a\195q@8?\190\150^\215$\140$\221\211\EM\144\246MK\vp\172+\158hE\135\254\b\164e\175\&1q\SYN\205\146|\178u\255\198\155\&8[\237\247\211\210\253\157A\222\FS\FSkHv\156\220\172!p7pv\174s\153\248\f\169BVY?\246\129b\250\255\DC4\174\238\EM\243h\237:`\146"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataDNSKEY 256 3 5 "\ETX\SOH\NUL\SOH\208\196\t\168\199i8\223J\131Sc\159\SYNp\SYN\161\212\FSO)R\a\a>\b|\184\223\168\203\226\186n%\128\171Jd\183\189\236\&8\t\231\243P\172\209\ACK\EM\t\171\233\242\175R>\187q\250\217R\136\217\217\193\145\139\&5a\130C\170g\219\FSi5W6\229\203\&3\148\240\237U\142T\135\RSyp.\184}\166<\136\132W\163!%\133,[\167o\152\219\t\225|\STX\128\216+\160\t\151\134\131\EOT\137a#\EOT\US\176\141\134P\228kY\FS\DC1JUp\STXId\226\n\215]\GS\203\158\&9\228\149>.V\US\222\NAK\150\230\225\GS\248\178\180\211\217.\179\155\133@\135O\169\&4\180\209:\166\DC2\232\RSu\214S\238\175\166\198)\207\161\161\&3\SI\209\244\DC1q[\147\221\170\166\195\159' B\201PZ\fq\191\193u\DC2\224o$\222\186\177e\159\ESC"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 48 5 2 1800 1524762971 1493223555 40452 (Name "ietf.org.") "R\220uL\178\139\238\216\181br\231\GS\128\250\205\243\&9d\165\185;Q\151\&6+n\a\148\169\152Fy(\244R\FS\131\ACK\137\229\167\199>R\RS\192\238\NAK\215\241m\"\200\174\132u\FS0C\DC4]\249\244\198.\138\221[K\164\SOW\SI\SYNcV\146\196\229#4\173\216\201^\128~\tHcSY\131\SI\229\238\191\GS\161\180zV\167]\216\216\163\CANb\211\ETX\\\SYN\234%\152\&4\231q\200H\129\245\253\224S\248\no\GS\195D\149&\ENQ\152HRi`\181\228\189\214\203\176\CAN\209&,o\239\222p\147\203\213o\163\179d\161\229\132\170\r\ENQ\251S\168M\232G\151\189\206\&7M';\SI\201\136\DLE\168\212m\200c\210-H\n0\ESC\157W\DELg\NUL\240\"XE\228\243W\236\170P\202\167U\ETXm\166\&9\162r\151\134\223\179\DC3=T\STX\158\180\226`\131S\DC3\235\199;\128B\149H\170\134\ENQ\253\156\166\141\236Z\161\228\186\175\DC3"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 48 5 2 1800 1524763006 1493223555 45586 (Name "ietf.org.") "X\165A\218{\a\DC1\163\238\178 =\153\220\241\150\231\ETX\164\230\FS\199\233\r\218\t5\ETX$\182B/^\215\235L\175GP\133\153\230#\204(\179-\243/\188\ETX\192=\148J\n)\160\a\247J\251\133\209\223-f\175\DEL\211\227\170\197\SOH\ACKo8\247_\DEL\189Gjf\251X\166\221\207\&7b\142k\208-g|\SO\169\128\f\221\139\130\181\128\185o\220g\151\219B%\204\v\237\129\&7\249\&4\149\217\205\209\160'=\211 \187\152i\213\213\192H\178\153\&2\161\181\128\189y\132e\136\DEL\235\244\180\207U\245C\128\207\230lf\187\196\128\&8\250\SUB\177\224^k\ETX\FS\195\&79M?\189\NUL\161\178\&5\183\228\DC3Bxy\244\221\153L\145\154~0\173o\130\160\155\213\254\&3\ENQf^\255\157\188\&1%\146\US|\232\SOH\n~\ESC\239d'=\245\&7\206\a\ETX\SO\245Q\ESC\227\242Vj2\182\175!\206\223%\231\ENQ\172`\222\b\209\EM\234\172\&2"},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataSPF ["v=spf1 ip4:12.22.58.0/24 ip4:64.170.98.0/24 ip4:4.31.198.32/27 ip4:209.208.19.192/27 ip4:72.167.123.204 ip6:2001:1890:123a::/56 ip6:2001:1890:126c::/56 ip6:2001:1900:3001:0011::0/64 ip6:2607:f170:8000:1500::0/64 -all"]},MsgRR {rrName = Name "ietf.org.", rrClass = Class 1, rrTTL = TTL 1799, rrData = RDataRRSIG 99 5 2 1800 1524762978 1493223555 40452 (Name "ietf.org.") "\b\DC2k\193\248\172\212\218\130l\131\247\202O\248\132\254\237\158\178\180\167\229X\184\v<<7\152\233\178v\172\220\161\158\171\232\254\165\NUL\147\FS\STX\251\&1\189Mc\230\194\197\192y\140\221tc\SYN\USC_>T:\STXF\135Z\225\&8\130\FS\ETBhZ\178Hc\GSS\171g*t}\255\137\146\217>@\230\232\141\176\SI\185\202\202Ad\EOTA\133\240Vk\204^7\212\158\&9\135\CAN\226\188!\145\129\132\213%\234\207\219\tTsN\135\178\SYN56\DC3q\216\192>,\197\194K\198\229e\132\231\b\232("},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataNSEC (Name "3D-Printing.nasa.gov.") (fromList [Type 1,Type 2,Type 6,Type 15,Type 16,Type 46,Type 47,Type 48])},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 299, rrData = RDataRRSIG 47 8 2 300 1496749007 1494154805 34113 (Name "nasa.gov.") "\bG\199@\n~\164\158\155\214\EMa\130\134\160\184C\250\147\156z\197q\165\249\DC3\155\250\b\NAK\EOT>%%3\132%[\n\179\137vh\135\205]\SI[\187\181@\190\a\168\178qW\252\245\229F\177Wt\188\170g\133\211A\236\154\SOH\159V\DC4o\NULA\EM\200^\182+9\170tR\220\&5\236\152em\222\161Y\159\&7c\211y\162R\228y\128,dz\156#jR\168&\214^\166\147\227\223\170\135\131b\156\226"},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\188\&6\253\240\DEL\139M\ESC\154>W\159._q=\206\151\215\162\"\182M\218\218\rj\171\188A\172\197\170\213k\171\238\SO\206a\171\145\144\185ZH\235\212\145\&7SN\133\156]6R\DEL\141\216\236g\128\222\159G\ESC\205z\"A\176]\216\&35hB\221\187\248Qd\146\ETBt\221\151\168\EMg\159\156x\155\218\148\&8\143DC8\223\136:3\202=\226\198=\157)b\169\172R\128\175\DC4V\216\232\192\232\EOT\231\203"},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\193\145\233\ETB\182\"R\233g\EM2\179\191\171\161h\159\143*\247\169q/\142&\SYNM\202V\224\SO\221\133\178U\223\DC3\175\158t9\220\128\ENQP=\FS\209,\133\130\159s\227\136e\234\195\136\ACK\140H\ETB\231\176\DEL\147\252c\SOF\FS\151\174\172v\140\&8U\139\137y\228\b\EOT4\DC2\136\174\255F\205\SO9\187a\190Q\ETB\234\182\DEL\ETBm\NAK\209y[:k\150\161\DC4>\134\131\ETB\245\167\199\134\226\244\222\214G\222D\129&\DC4\195\NAK\RSr\188\174\195D\217\129A\236 \DC4\NUL\200qzAwt\210\180\ETB\160|/,\167\"\193\137\188\154\185\165\DC2^]l_Qq\ETB\166\242 P\154\208Zm\SYN\\\229@\229\154F\229T@\254\&8\220\134t\237\156\220|\SOc\EOT\239\SI\240\FS\\']\185\157L\190\206\133\130\208\162\143\220\156\180\152y\US~\STX\207\EMA\173\235\ENQ+8\DC1#\236\ETB\162\161\"\f\SOH\168\177\180\201\138\t\STXv\133"},MsgRR {rrName = Name "nasa.gov.", rrClass = Class 1, rrTTL = TTL 599, rrData = RDataRRSIG 48 8 2 600 1496750400 1494154800 34113 (Name "nasa.gov.") "R\166/W\NAK\SYN\253e\153\EM\142\201m\170\246\205g%\230\\I-\134\251T\170\228\154\221P1\DC1\NUL\223\234\245\171\243\190:\225\133\218\&4\247\195\ACKU\180\NAKJ\147\140x\155O\250VK\155\205\201\&3\137\&1=\180\153\170A\229f\SO\ENQ\164\SOU\221\\\178RM\DEL8\217\146\245D\161r\182\140[6\247\164,qX\202\247X\173\193x\222\210!9i6n \192 \196@u\177fi&4\209b=\225\196\248\160\223\nT\250\131\254\183\254\221\202\RS\140|\134\171\252\EM,\225\234\199\247\DEL\149/\189\152\214\215\218\186\240\&2[P\152\208\180\US"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataNSEC (Name "www.example.com.") (fromList [Type 1,Type 2,Type 6,Type 16,Type 28,Type 46,Type 47,Type 48])},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 2 8 2 86400 1495006082 1493148678 21214 (Name "example.com.") "\a\223\141iM\f\bo\160\139\192`\255Y.\166VO\178\DLED\161|8\STXB\252\SYN*\164\129\ESC5\DC3\v\\ZV?U\216\135\DC3\f\248M#\SYN\142\164\143\217X\233\ESC\159\NUL_\DC4n(\157w\FS~\145R\178\166\229J(\159\209\184|\251\220\238\237SO\139\209\147\220\&0Xv\165\201\GSH \251\&2\135\154J\194\ETX\199' \RS\132bb\243\148E\168\158\172\US\228\ACK\241\174\169\201\239\225\243\&6\DC1\255](\153\161"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "a.iana-servers.net.")},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataNS (Name "b.iana-servers.net.")},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 28 8 2 86400 1494925308 1493148678 21214 (Name "example.com.") "\DC4\224\&0\233w\r&>\182\142\167\220\207t\ENQ\US\243\149\128\230u'P\233j\164\248U1\NAK\182\146\235&\ACK\DC4-l\163,\191n\153\248\r\ESCH\229\134\209eh\163\132\169\247\ETB\234\DC4\176\239\187\208w\US(\220\242\&5DpoD\207\167\r\GS\169\ENQDd\152\ETB\179\226xG\217\ETXm\177\196\182\215H\255\221\v\DLE\179\226\GS\SOH\171\167\174\155XX#q]\164*\DC3m\195\175\227\205 \187\140\213\154\250Ea\253z\STX"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataAAAA (IPv6 0x2606280002200001 0x248189325c81946)},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataRRSIG 1 8 2 86400 1494974036 1493148678 21214 (Name "example.com.") "\DEL\147H\248t\140@\163\130\DC2\DC1\200\217\225\190\140\132\t\197\197C\164y\223\ENQ\228\195\201\153L\SYN$\179\177EE<\ACK`u\188F9\199[\165\171\177w\192\188l\159\229i\169\254\&4Q.\ACK\214\188\196\166\191AV\255\SO\232\&7,\225V5\255\NUL\186V\246\198\159\&4\164p\243\168+?rz\a\234l,B\208\224C\EM\142\221\181qw\154\164/&\144]\a>\188\223\&9\142h-3\STX\170\231\183Kr\164a\205\228"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 86399, rrData = RDataA (IPv4 0x5db8d822)},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 59, rrData = RDataRRSIG 16 8 2 60 1494968390 1493148678 21214 (Name "example.com.") "\149\vd\208\136+H\224\213\r\246\&2\CAN#\251\233q\SYN\157\220j@2/\157\242\215\173y\ETB\154\135\199\209R\253\\1\DC3Sof\139\143\229\251p\139O/\209\199\US\ACKL\138A+\134\a\187\145\222!n[\252\227L,\NULkj_\219K\163x\252_1\175\161\212.V\CAN\193\215\197\218\248\162\178\&1US\144NY\216#c\230\252\247'\135\160\237j\EOTf\152J\b\SOS+\213\242\SUB.}`@\130\133nm\225"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 59, rrData = RDataTXT ["v=spf1 -all"]},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 59, rrData = RDataTXT ["$Id: example.com 4415 2015-08-24 20:12:23Z davids $"]},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1495402611 1493547398 31406 (Name "example.com.") "&\236\\[/n#V\ENQ^\212\n\240\156\239\162\DELQ\211\241w\135\&9o\253\132\218\184s\218\210vn\180C\SI\255\SO\141<$\"\214r\141\n\173\223\248CCP'\132d\NUL,\DC1Q\134J\209\239\f\197e8\145->d\150\232=\153\191\&1\198\139\219\NAK\SUB\231\211\149t;\147[G\211C\190E\177,\GS\ETX\SOa\140\193\166\ETX1\248\EM\200#\\\130\231qF\177\230\140\243\164\SOD\162\201\165b~|\am\242\206j\197\STX\239\250\168\251~\USk\SOT\195Tbl3\136o\162\164\172\158!\128e\215y)\184\162zW\199V\246x\160\233\207S*s\ESC\240\203\&6\174|[\GSmz\NUL\249H\RS>\233\132\f[\ENQ\147\148Q\DLE\DC3\142\208c.\147t\133\229\140\rwTG\179\&4\ETB|&\163KP\a\182\178\174:b':\140o\DC2\f\143\167.S\143\FS@g\187?\209\132`Qf\FS\199\196\148\194\tX\233\166"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 48 8 2 3600 1495402611 1493547398 45620 (Name "example.com.") "\ETXNp\\\169\139\179\160\GS\DEL\225\247F\ACK\207\199M\197\218s\CAN\140B\194\199\218\SI*-\194\172\245\195d7\DC3\USa\236Q\225l 2E|\242\n~J\rq\234\175X%+\196y\189\&4\218\143vl\151\152\212e\223\134\135\145\166\226\201\213H\221V\254\216\177\242s\SYN\148?\192[\152\168\240\SI\130|\154\ACK~b\224wl\DC1g\236\b\149z\150a\SYN\\(\207\a\250\&8\154g\ETB\200\US-g`\EOTm\230$Jn\235\152K\222^\STXSf\136\130\CAN\248\170\179Y\243\152\&1y\143\DEL\\*]\r\SYN;\f\DC3\224~\ACK\150\214\130E\158\182\186\v\191j\131=\244\161/[\DC4\234ya\242\248\STXs\220\225K\203nM\v\241\252v\213\221\135\133\153,H8Z\138\DC4A\188\178`\136L\245\208\245\145\210c\t\154\159\244\146\"E\134\162\174\192\250\135\143$\143KT.\249}\SO\212R\215\140\214\153\DLE1r\247B\GS\161"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\179\133\ETX\EM~.KtP\200%f,\202\DLE-@\197K\188\206X\250\228\166\SUB\181\RSp\ENQc+\135_\DC3c2\191\138\SO\152\214\222XM`\142\235\198\242\158\138\233\&6\239_\168\209@-~\219V_\DEL\131&\192\210\253\EOT\132_\157\129y\168Q\244W\238K\f\SUB\NULo\181\246\182\253\143Z\222IW4\186\164N\204\200C\131\196\&1P\163\182\188\165\215\208^\247\243\228\NAK\224\189!8\224\&1\DC4,B\EM\129\223\215\178\&1\137\218\151\231\247mLJ\147\135\234\237\203\132SG[F\155\158\224\DEL\205\234\&3\238qu\142\194#\NUL\145\&2a\130\SUB\160\203\234=\NAK\242)\250\212\DELzb\154\163\222?\194\149W\r\195\223\228\GS|\143\188s\217+\211O\CAN\174\168,\194\&2\219\&1\158)\EM\GS\202!\214> \249\141A\243\&2\f\"\250\196\&3\234Y\SUB\CAN\DELb\231\248G\NUL\129\129\166\STX\139\216i\136\197\149\189.\SYN\a}\217A\CANV\"\150\188ze\169X\248\147W\ESC*\162\DLEB\212'\230\228Q\128\218\162\252\233.L[ob\220\218{\ENQq\200\174\219\248z\190\216\227\156`K\ACK|\\!\DC4vbu\213W7\n|\174\USZK\139F\a\178\206\&5"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 256 3 8 "\ETX\SOH\NUL\SOH\169\210\128a\180\148\180\210W\214,\198\215\161\155\158\247\ETB\140\198l\223\200\157\181\187s\250\228\NAK\205Z\173t\251\ETX\246\234a\254SV\196HDL\SUBK'KH\221|\176\223\178+\219\237\247h\179Y\175\144y\171)\132\203N\221\160\&4\227\190\140\137\f\227\170g\142\161tw\247\ab\230b\169\238\211\218\191\161\145\183\196\134l\FSY\241\150\134\131\SUBc\240\140\DLE\243\172\FS\SO)\144\211\FSH\160\225\224\ETX\217\238\SO\142\233"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataDNSKEY 257 3 8 "\ETX\SOH\NUL\SOH\157\SUB\170\237k'\170+')\235E\243i>f\178%\154\NUL\199\210\FS\219\244e\245T\SYN,\193\242\143\FS^\155uTJ\131T U\196E\ACK\227\208\SIN\130\157\&3\f\207X!\199\n-\ETBz.e\162\f+{P\148\&1U\208\254\133\230\249\DC1\206*\150\161\166\201\DELL\r\166\228\189}\141\188\204,Q\231\ESC`\SUB\188\161w\147O\210\209\152%-\244\165-\253c\162\232\&2\132\v\SUB\ACK\253\181\147\202J\215\200\DC4|zP\254I\ACK8\220\224\NAK\142U\171V[G\198\nx\254\184A\n\244[\153\215\229\186v\143\"\v\182\237\225\&6S\137\178\r\"\211\241\174\NUL\210\176y\184q\184>C\154\245\210\DC1\179\220\180\208\205\166Y\254%\200\247\158\238\248\185}\236\&6u\182\178\GSy\162xg\SUB\NULz\241\239\163F\NUL\NUL^c\DELw\198\&9f\f\215\DC4\144]`\164\213\148\149\174\USY\150\219i\132\&3\ETX\221\230-\218\231\131\227"},MsgRR {rrName = Name "example.com.", rrClass = Class 1, rrTTL = TTL 3599, rrData = RDataRRSIG 6 8 2 3600 1495444812 1493601562 61845 (Name "example.com.") "t\209\CAN\EOT\EOT\GSwz\237\DC3x\151!\151\255\215\145\CAN\189l\233uxt &\218\137\ETB\205DU\RS\166\240\&7x8\246)\176>\138O\187j\215\148\250\142\213h\160tCR\208|zV&\167\243\130Y\233'\NAK\160k\184\209\SYNFZH\224]\138\235\181\253\219<+=\154\143jo2\213d\237\206\207\NULAY\151\130\f\nD;\137\ACK_\141\240vW\148i\196/\v\DC1s\221Sl\241Y7\STXy(Z9\209"}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 32768, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/e9036c06fc30e6d7582fe9e5238cb718.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 52146, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 4, mhNSCount = 0, mhARCount = 1}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 1) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 2306, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 7192, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 891, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")},MsgRR {rrName = Name "e1863.dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 11, rrData = RDataA (IPv4 0x5c7ab450)}], msgNS = [], msgAR = [MsgRR {rrName = Name ".", rrClass = Class 512, rrTTL = TTL 0, rrData = RDataOPT ""}]} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/f07dc1a95f1663525bf32957288adb78.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/f07dc1a95f1663525bf32957288adb78.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/f07dc1a95f1663525bf32957288adb78.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/f07dc1a95f1663525bf32957288adb78.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/f07dc1a95f1663525bf32957288adb78.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/f07dc1a95f1663525bf32957288adb78.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 56182, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 3, mhNSCount = 1, mhARCount = 0}, msgQD = [MsgQuestion (Name "www.microsoft.com.") (Type 15) (Class 1)], msgAN = [MsgRR {rrName = Name "www.microsoft.com.", rrClass = Class 1, rrTTL = TTL 3342, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.", rrClass = Class 1, rrTTL = TTL 20129, rrData = RDataCNAME (Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.")},MsgRR {rrName = Name "www.microsoft.com-c-2.edgekey.net.globalredir.akadns.net.", rrClass = Class 1, rrTTL = TTL 785, rrData = RDataCNAME (Name "e1863.dspb.akamaiedge.net.")}], msgNS = [MsgRR {rrName = Name "dspb.akamaiedge.net.", rrClass = Class 1, rrTTL = TTL 823, rrData = RDataSOA (Name "n0dspb.akamaiedge.net.") (Name "hostmaster.akamai.com.") 1494154901 1000 1000 1000 1800}], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/f3110fafd49cc625b0cc29e99d82ec79.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 62613, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "c.f.f.c.7.f.e.0.0.6.b.7.5.a.4.0.4.0.1.0.5.2.8.7.1.0.8.4.1.0.0.2.ip6.arpa.", rrClass = Class 1, rrTTL = TTL 785, rrData = RDataPTR (Name "ghc.haskell.org.")}], msgNS = [], msgAR = []} \ No newline at end of file Binary files /tmp/tmpB51Hyy/vNxGOjQ7bA/cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.bin and /tmp/tmpB51Hyy/uNL1qO3kBD/cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.bin differ diff -Nru cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.show cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.show --- cabal-install-2.4-2.4+git20181017.1.d899935/src/resolv-0.1.1.2/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.show 1970-01-01 00:00:00.000000000 +0000 +++ cabal-install-2.4-2.4+git20181125.1.5e65672/src/resolv-0.1.1.2/testdata/msg/fc9b623aa6ba4673b84145ab0899a3bd.show 2018-10-27 22:43:14.000000000 +0000 @@ -0,0 +1 @@ +Msg {msgHeader = MsgHeader {mhId = 15722, mhFlags = MsgHeaderFlags {mhQR = IsResponse, mhOpcode = 0, mhAA = False, mhTC = False, mhRD = True, mhRA = True, mhZ = False, mhAD = False, mhCD = False, mhRCode = 0}, mhQDCount = 1, mhANCount = 1, mhNSCount = 0, mhARCount = 0}, msgQD = [MsgQuestion (Name "4.4.8.8.in-addr.arpa.") (Type 12) (Class 1)], msgAN = [MsgRR {rrName = Name "4.4.8.8.in-addr.arpa.", rrClass = Class 1, rrTTL = TTL 86285, rrData = RDataPTR (Name "google-public-dns-b.google.com.")}], msgNS = [], msgAR = []} \ No newline at end of file